./PaxHeaders.30264/nordugrid-arc-6.14.00000644000000000000000000000013214152153477015426 xustar000000000000000030 mtime=1638455103.905630141 30 atime=1638455103.995631494 30 ctime=1638455103.905630141 nordugrid-arc-6.14.0/0000755000175000002070000000000014152153477015333 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376017400 xustar000000000000000030 mtime=1638455038.261643811 30 atime=1638455038.459646786 30 ctime=1638455095.698506826 nordugrid-arc-6.14.0/Makefile.am0000644000175000002070000000054114152153376017365 0ustar00mockbuildmock00000000000000# /opt/local is the location for macports on MacOS X ACLOCAL_AMFLAGS = -I m4 `test -d /opt/local/share/aclocal && echo -I /opt/local/share/aclocal` if SWIG_ENABLED SWIG_SD = swig endif SUBDIRS = src include $(SWIG_SD) python $(POSUB) debian DIST_SUBDIRS = src include swig python po debian EXTRA_DIST = nordugrid-arc.spec autogen.sh LICENSE NOTICE nordugrid-arc-6.14.0/PaxHeaders.30264/configure0000644000000000000000000000013214152153416017243 xustar000000000000000030 mtime=1638455054.478887482 30 atime=1638455071.395141656 30 ctime=1638455095.700506856 nordugrid-arc-6.14.0/configure0000755000175000002070000455113614152153416017252 0ustar00mockbuildmock00000000000000#! /bin/sh # Guess values for system-dependent variables and create Makefiles. # Generated by GNU Autoconf 2.69 for nordugrid-arc 6.14.0. # # Report bugs to . # # # Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc. # # # This configure script is free software; the Free Software Foundation # gives unlimited permission to copy, distribute and modify it. ## -------------------- ## ## M4sh Initialization. ## ## -------------------- ## # Be more Bourne compatible DUALCASE=1; export DUALCASE # for MKS sh if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else case `(set -o) 2>/dev/null` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi as_nl=' ' export as_nl # Printing a long string crashes Solaris 7 /usr/bin/printf. as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo # Prefer a ksh shell builtin over an external printf program on Solaris, # but without wasting forks for bash or zsh. if test -z "$BASH_VERSION$ZSH_VERSION" \ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='print -r --' as_echo_n='print -rn --' elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='printf %s\n' as_echo_n='printf %s' else if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' as_echo_n='/usr/ucb/echo -n' else as_echo_body='eval expr "X$1" : "X\\(.*\\)"' as_echo_n_body='eval arg=$1; case $arg in #( *"$as_nl"*) expr "X$arg" : "X\\(.*\\)$as_nl"; arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; esac; expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" ' export as_echo_n_body as_echo_n='sh -c $as_echo_n_body as_echo' fi export as_echo_body as_echo='sh -c $as_echo_body as_echo' fi # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || PATH_SEPARATOR=';' } fi # IFS # We need space, tab and new line, in precisely that order. Quoting is # there to prevent editors from complaining about space-tab. # (If _AS_PATH_WALK were called with IFS unset, it would disable word # splitting by setting IFS to empty value.) IFS=" "" $as_nl" # Find who we are. Look in the path if we contain no directory separator. as_myself= case $0 in #(( *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done IFS=$as_save_IFS ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 exit 1 fi # Unset variables that we do not need and which cause bugs (e.g. in # pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" # suppresses any "Segmentation fault" message there. '((' could # trigger a bug in pdksh 5.2.14. for as_var in BASH_ENV ENV MAIL MAILPATH do eval test x\${$as_var+set} = xset \ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : done PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. LC_ALL=C export LC_ALL LANGUAGE=C export LANGUAGE # CDPATH. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH # Use a proper internal environment variable to ensure we don't fall # into an infinite loop, continuously re-executing ourselves. if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then _as_can_reexec=no; export _as_can_reexec; # We cannot yet assume a decent shell, so we have to provide a # neutralization value for shells without unset; and this also # works around shells that cannot unset nonexistent variables. # Preserve -v and -x to the replacement shell. BASH_ENV=/dev/null ENV=/dev/null (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV case $- in # (((( *v*x* | *x*v* ) as_opts=-vx ;; *v* ) as_opts=-v ;; *x* ) as_opts=-x ;; * ) as_opts= ;; esac exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} # Admittedly, this is quite paranoid, since all the known shells bail # out after a failed `exec'. $as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 as_fn_exit 255 fi # We don't want this to propagate to other subprocesses. { _as_can_reexec=; unset _as_can_reexec;} if test "x$CONFIG_SHELL" = x; then as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which # is contrary to our usage. Disable this feature. alias -g '\${1+\"\$@\"}'='\"\$@\"' setopt NO_GLOB_SUBST else case \`(set -o) 2>/dev/null\` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi " as_required="as_fn_return () { (exit \$1); } as_fn_success () { as_fn_return 0; } as_fn_failure () { as_fn_return 1; } as_fn_ret_success () { return 0; } as_fn_ret_failure () { return 1; } exitcode=0 as_fn_success || { exitcode=1; echo as_fn_success failed.; } as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : else exitcode=1; echo positional parameters were not saved. fi test x\$exitcode = x0 || exit 1 test -x / || exit 1" as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1 test \$(( 1 + 1 )) = 2 || exit 1 test -n \"\${ZSH_VERSION+set}\${BASH_VERSION+set}\" || ( ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO PATH=/empty FPATH=/empty; export PATH FPATH test \"X\`printf %s \$ECHO\`\" = \"X\$ECHO\" \\ || test \"X\`print -r -- \$ECHO\`\" = \"X\$ECHO\" ) || exit 1" if (eval "$as_required") 2>/dev/null; then : as_have_required=yes else as_have_required=no fi if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR as_found=false for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. as_found=: case $as_dir in #( /*) for as_base in sh bash ksh sh5; do # Try only shells that exist, to save several forks. as_shell=$as_dir/$as_base if { test -f "$as_shell" || test -f "$as_shell.exe"; } && { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : CONFIG_SHELL=$as_shell as_have_required=yes if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : break 2 fi fi done;; esac as_found=false done $as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : CONFIG_SHELL=$SHELL as_have_required=yes fi; } IFS=$as_save_IFS if test "x$CONFIG_SHELL" != x; then : export CONFIG_SHELL # We cannot yet assume a decent shell, so we have to provide a # neutralization value for shells without unset; and this also # works around shells that cannot unset nonexistent variables. # Preserve -v and -x to the replacement shell. BASH_ENV=/dev/null ENV=/dev/null (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV case $- in # (((( *v*x* | *x*v* ) as_opts=-vx ;; *v* ) as_opts=-v ;; *x* ) as_opts=-x ;; * ) as_opts= ;; esac exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} # Admittedly, this is quite paranoid, since all the known shells bail # out after a failed `exec'. $as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 exit 255 fi if test x$as_have_required = xno; then : $as_echo "$0: This script requires a shell more modern than all" $as_echo "$0: the shells that I found on your system." if test x${ZSH_VERSION+set} = xset ; then $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" $as_echo "$0: be upgraded to zsh 4.3.4 or later." else $as_echo "$0: Please tell bug-autoconf@gnu.org and $0: http://bugzilla.nordugrid.org/ about your system, $0: including any error possibly output before this $0: message. Then install a modern shell, or manually run $0: the script under such a shell if you do have one." fi exit 1 fi fi fi SHELL=${CONFIG_SHELL-/bin/sh} export SHELL # Unset more variables known to interfere with behavior of common tools. CLICOLOR_FORCE= GREP_OPTIONS= unset CLICOLOR_FORCE GREP_OPTIONS ## --------------------- ## ## M4sh Shell Functions. ## ## --------------------- ## # as_fn_unset VAR # --------------- # Portably unset VAR. as_fn_unset () { { eval $1=; unset $1;} } as_unset=as_fn_unset # as_fn_set_status STATUS # ----------------------- # Set $? to STATUS, without forking. as_fn_set_status () { return $1 } # as_fn_set_status # as_fn_exit STATUS # ----------------- # Exit the shell with STATUS, even in a "trap 0" or "set -e" context. as_fn_exit () { set +e as_fn_set_status $1 exit $1 } # as_fn_exit # as_fn_mkdir_p # ------------- # Create "$as_dir" as a directory, including parents if necessary. as_fn_mkdir_p () { case $as_dir in #( -*) as_dir=./$as_dir;; esac test -d "$as_dir" || eval $as_mkdir_p || { as_dirs= while :; do case $as_dir in #( *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( *) as_qdir=$as_dir;; esac as_dirs="'$as_qdir' $as_dirs" as_dir=`$as_dirname -- "$as_dir" || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` test -d "$as_dir" && break done test -z "$as_dirs" || eval "mkdir $as_dirs" } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" } # as_fn_mkdir_p # as_fn_executable_p FILE # ----------------------- # Test if FILE is an executable regular file. as_fn_executable_p () { test -f "$1" && test -x "$1" } # as_fn_executable_p # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take # advantage of any shell optimizations that allow amortized linear growth over # repeated appends, instead of the typical quadratic growth present in naive # implementations. if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : eval 'as_fn_append () { eval $1+=\$2 }' else as_fn_append () { eval $1=\$$1\$2 } fi # as_fn_append # as_fn_arith ARG... # ------------------ # Perform arithmetic evaluation on the ARGs, and store the result in the # global $as_val. Take advantage of shells that can avoid forks. The arguments # must be portable across $(()) and expr. if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : eval 'as_fn_arith () { as_val=$(( $* )) }' else as_fn_arith () { as_val=`expr "$@" || test $? -eq 1` } fi # as_fn_arith # as_fn_error STATUS ERROR [LINENO LOG_FD] # ---------------------------------------- # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are # provided, also output the error to LOG_FD, referencing LINENO. Then exit the # script with STATUS, using 1 if that was 0. as_fn_error () { as_status=$1; test $as_status -eq 0 && as_status=1 if test "$4"; then as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 fi $as_echo "$as_me: error: $2" >&2 as_fn_exit $as_status } # as_fn_error if expr a : '\(a\)' >/dev/null 2>&1 && test "X`expr 00001 : '.*\(...\)'`" = X001; then as_expr=expr else as_expr=false fi if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then as_dirname=dirname else as_dirname=false fi as_me=`$as_basename -- "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| . 2>/dev/null || $as_echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/ q } /^X\/\(\/\/\)$/{ s//\1/ q } /^X\/\(\/\).*/{ s//\1/ q } s/.*/./; q'` # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits as_lineno_1=$LINENO as_lineno_1a=$LINENO as_lineno_2=$LINENO as_lineno_2a=$LINENO eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) sed -n ' p /[$]LINENO/= ' <$as_myself | sed ' s/[$]LINENO.*/&-/ t lineno b :lineno N :loop s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ t loop s/-\n.*// ' >$as_me.lineno && chmod +x "$as_me.lineno" || { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } # If we had to re-execute with $CONFIG_SHELL, we're ensured to have # already done that, so ensure we don't try to do so again and fall # in an infinite loop. This has already happened in practice. _as_can_reexec=no; export _as_can_reexec # Don't try to exec as it changes $[0], causing all sort of problems # (the dirname of $[0] is not the place where we might find the # original and so on. Autoconf is especially sensitive to this). . "./$as_me.lineno" # Exit status is that of the last command. exit } ECHO_C= ECHO_N= ECHO_T= case `echo -n x` in #((((( -n*) case `echo 'xy\c'` in *c*) ECHO_T=' ';; # ECHO_T is single tab character. xy) ECHO_C='\c';; *) echo `echo ksh88 bug on AIX 6.1` > /dev/null ECHO_T=' ';; esac;; *) ECHO_N='-n';; esac rm -f conf$$ conf$$.exe conf$$.file if test -d conf$$.dir; then rm -f conf$$.dir/conf$$.file else rm -f conf$$.dir mkdir conf$$.dir 2>/dev/null fi if (echo >conf$$.file) 2>/dev/null; then if ln -s conf$$.file conf$$ 2>/dev/null; then as_ln_s='ln -s' # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. # In both cases, we have to default to `cp -pR'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || as_ln_s='cp -pR' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -pR' fi else as_ln_s='cp -pR' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null if mkdir -p . 2>/dev/null; then as_mkdir_p='mkdir -p "$as_dir"' else test -d ./-p && rmdir ./-p as_mkdir_p=false fi as_test_x='test -x' as_executable_p=as_fn_executable_p # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" SHELL=${CONFIG_SHELL-/bin/sh} test -n "$DJDIR" || exec 7<&0 &1 # Name of the host. # hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, # so uname gets run too. ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` # # Initializations. # ac_default_prefix=/usr/local ac_clean_files= ac_config_libobj_dir=. LIBOBJS= cross_compiling=no subdirs= MFLAGS= MAKEFLAGS= # Identity of this package. PACKAGE_NAME='nordugrid-arc' PACKAGE_TARNAME='nordugrid-arc' PACKAGE_VERSION='6.14.0' PACKAGE_STRING='nordugrid-arc 6.14.0' PACKAGE_BUGREPORT='http://bugzilla.nordugrid.org/' PACKAGE_URL='' ac_unique_file="Makefile.am" # Factoring default headers for most tests. ac_includes_default="\ #include #ifdef HAVE_SYS_TYPES_H # include #endif #ifdef HAVE_SYS_STAT_H # include #endif #ifdef STDC_HEADERS # include # include #else # ifdef HAVE_STDLIB_H # include # endif #endif #ifdef HAVE_STRING_H # if !defined STDC_HEADERS && defined HAVE_MEMORY_H # include # endif # include #endif #ifdef HAVE_STRINGS_H # include #endif #ifdef HAVE_INTTYPES_H # include #endif #ifdef HAVE_STDINT_H # include #endif #ifdef HAVE_UNISTD_H # include #endif" gt_needs= ac_header_list= ac_func_list= ac_subst_vars='am__EXEEXT_FALSE am__EXEEXT_TRUE LTLIBOBJS SPECDATE DATER DATE posix_shell nodename gnu_time tmp_dir arc_location HED_ENABLED_FALSE HED_ENABLED_TRUE ARCREST_ENABLED_FALSE ARCREST_ENABLED_TRUE EMIES_ENABLED_FALSE EMIES_ENABLED_TRUE DATA_CLIENT_ENABLED_FALSE DATA_CLIENT_ENABLED_TRUE CREDENTIALS_CLIENT_ENABLED_FALSE CREDENTIALS_CLIENT_ENABLED_TRUE COMPUTE_CLIENT_ENABLED_FALSE COMPUTE_CLIENT_ENABLED_TRUE ACIX_TESTS_ENABLED_FALSE ACIX_TESTS_ENABLED_TRUE TRIAL ACIX_ENABLED_FALSE ACIX_ENABLED_TRUE TWISTD DATADELIVERY_SERVICE_ENABLED_FALSE DATADELIVERY_SERVICE_ENABLED_TRUE CANDYPOND_ENABLED_FALSE CANDYPOND_ENABLED_TRUE MONITOR_ENABLED_FALSE MONITOR_ENABLED_TRUE LDAP_SERVICE_ENABLED_FALSE LDAP_SERVICE_ENABLED_TRUE GRIDFTPD_SERVICE_ENABLED_FALSE GRIDFTPD_SERVICE_ENABLED_TRUE INTERNAL_ENABLED_FALSE INTERNAL_ENABLED_TRUE A_REX_SERVICE_ENABLED_FALSE A_REX_SERVICE_ENABLED_TRUE ALTPYDOXYGEN_FALSE ALTPYDOXYGEN_TRUE PYDOXYGEN_FALSE PYDOXYGEN_TRUE DOC_ENABLED_FALSE DOC_ENABLED_TRUE DOT DOXYGEN PDFLATEX LIBRESOLV LIBOBJS bashcompdir BASH_COMPLETION_LIBS BASH_COMPLETION_CFLAGS DLOPEN_LIBS UUID_LIBS PYTHON_LRMS_ENABLED_FALSE PYTHON_LRMS_ENABLED_TRUE PERL5LIB_INLINE_PYTHON PERL_TEST_DIR LDAP_ENABLED_FALSE LDAP_ENABLED_TRUE LDAP_LIBS SRM_DMC_ENABLED_FALSE SRM_DMC_ENABLED_TRUE CPPUNIT_ENABLED_FALSE CPPUNIT_ENABLED_TRUE XMLSEC_ENABLED_FALSE XMLSEC_ENABLED_TRUE XROOTD_ENABLED_FALSE XROOTD_ENABLED_TRUE S3_DMC_ENABLED_FALSE S3_DMC_ENABLED_TRUE GFAL_ENABLED_FALSE GFAL_ENABLED_TRUE MOCK_DMC_ENABLED_FALSE MOCK_DMC_ENABLED_TRUE GRIDFTP_ENABLED_FALSE GRIDFTP_ENABLED_TRUE GLOBUSUTILS_ENABLED_FALSE GLOBUSUTILS_ENABLED_TRUE XROOTD_LIBS XROOTD_CPPFLAGS S3_LIBS S3_CPPFLAGS GFAL2_LIBS GFAL2_CFLAGS LCMAPS_LIBS LCMAPS_CFLAGS LCMAPS_LOCATION LCAS_LIBS LCAS_CFLAGS LCAS_LOCATION DEFAULT_GLOBUS_LOCATION GLOBUS_OPENSSL_LIBS GLOBUS_OPENSSL_CFLAGS GLOBUS_OPENSSL_MODULE_LIBS GLOBUS_OPENSSL_MODULE_CFLAGS GLOBUS_GSI_CREDENTIAL_LIBS GLOBUS_GSI_CREDENTIAL_CFLAGS GLOBUS_GSI_CERT_UTILS_LIBS GLOBUS_GSI_CERT_UTILS_CFLAGS GLOBUS_IO_LIBS GLOBUS_IO_CFLAGS GLOBUS_FTP_CONTROL_LIBS GLOBUS_FTP_CONTROL_CFLAGS GLOBUS_FTP_CLIENT_LIBS GLOBUS_FTP_CLIENT_CFLAGS GLOBUS_GSI_CALLBACK_LIBS GLOBUS_GSI_CALLBACK_CFLAGS GLOBUS_GSS_ASSIST_LIBS GLOBUS_GSS_ASSIST_CFLAGS GLOBUS_GSSAPI_GSI_LIBS GLOBUS_GSSAPI_GSI_CFLAGS GPT_QUERY GPT_FLAVOR_CONFIGURATION GLOBUS_MAKEFILE_HEADER GLOBUS_COMMON_LIBS GLOBUS_COMMON_CFLAGS SQLITEJSTORE_ENABLED_FALSE SQLITEJSTORE_ENABLED_TRUE DBJSTORE_ENABLED_FALSE DBJSTORE_ENABLED_TRUE DBCXX_LIBS DBCXX_CPPFLAGS ARGUS_ENABLED_FALSE ARGUS_ENABLED_TRUE ARGUS_LIBS ARGUS_CFLAGS ZLIB_LIBS ZLIB_CFLAGS monitor_prefix MYSQL_LIBRARY_ENABLED_FALSE MYSQL_LIBRARY_ENABLED_TRUE MYSQL_CFLAGS MYSQL_LIBS XMLSEC_OPENSSL_LIBS XMLSEC_OPENSSL_CFLAGS XMLSEC_LIBS XMLSEC_CFLAGS MACOSX_FALSE MACOSX_TRUE LDNS_ENABLED_FALSE LDNS_ENABLED_TRUE LDNS_CONFIG LDNS_LIBS LDNS_CFLAGS TEST_DIR CPPUNIT_CONFIG CPPUNIT_LIBS CPPUNIT_CFLAGS SQLITE_ENABLED_FALSE SQLITE_ENABLED_TRUE SQLITE_LIBS SQLITE_CFLAGS NSS_ENABLED_FALSE NSS_ENABLED_TRUE NSS_LIBS NSS_CFLAGS OPENSSL_1_1_LIBS OPENSSL_1_1_CFLAGS OPENSSL_LIBS OPENSSL_CFLAGS LIBXML2_LIBS LIBXML2_CFLAGS GLIBMM_LIBS GLIBMM_CFLAGS GTHREAD_LIBS GTHREAD_CFLAGS SYSTEMD_DAEMON_LIBS PYLINT_ENABLED_FALSE PYLINT_ENABLED_TRUE PYLINT_ARGS_ARGUMENTS_DIFFER PYLINT_ARGS PYLINT ALTPYTHON3_FALSE ALTPYTHON3_TRUE ALTPYTHON_ENABLED_FALSE ALTPYTHON_ENABLED_TRUE ALTPYTHON_SITE_LIB ALTPYTHON_SITE_ARCH ALTPYTHON_EXT_SUFFIX ALTPYTHON_VERSION ALTPYTHON_LIBS ALTPYTHON_CFLAGS ALTPYTHON PYTHON_SERVICE_FALSE PYTHON_SERVICE_TRUE PYTHON_SWIG_ENABLED_FALSE PYTHON_SWIG_ENABLED_TRUE PYTHON3_FALSE PYTHON3_TRUE PYTHON_ENABLED_FALSE PYTHON_ENABLED_TRUE PYTHON_SITE_LIB PYTHON_SITE_ARCH PYTHON_EXT_SUFFIX PYTHON_VERSION PYTHON_LIBS PYTHON_CFLAGS PKG_CONFIG_LIBDIR PKG_CONFIG_PATH PYTHON SWIG_ENABLED_FALSE SWIG_ENABLED_TRUE SWIG_PYTHON_NAMING SWIG2 SWIG PEDANTIC_COMPILE_FALSE PEDANTIC_COMPILE_TRUE AM_CXXFLAGS pkgconfigdir PKG_CONFIG POSUB LTLIBINTL LIBINTL INTLLIBS LTLIBICONV LIBICONV INTL_MACOSX_LIBS XGETTEXT_EXTRA_OPTIONS MSGMERGE XGETTEXT_015 XGETTEXT GMSGFMT_015 MSGFMT_015 GMSGFMT MSGFMT GETTEXT_MACRO_VERSION USE_NLS cronddir SYSV_SCRIPTS_ENABLED_FALSE SYSV_SCRIPTS_ENABLED_TRUE initddir SYSTEMD_UNITS_ENABLED_FALSE SYSTEMD_UNITS_ENABLED_TRUE unitsdir pkgdatasubdir pkgdatadir_rel_to_pkglibexecdir bindir_rel_to_pkglibexecdir sbindir_rel_to_pkglibexecdir pkglibdir_rel_to_pkglibexecdir pkglibexecsubdir pkglibsubdir libsubdir ARCXMLSEC_CFLAGS ARCXMLSEC_LIBS ARCWSSECURITY_CFLAGS ARCWSSECURITY_LIBS ARCWSADDRESSING_CFLAGS ARCWSADDRESSING_LIBS ARCINFOSYS_CFLAGS ARCINFOSYS_LIBS ARCOTOKENS_CFLAGS ARCOTOKENS_LIBS ARCSECURITY_CFLAGS ARCSECURITY_LIBS ARCMESSAGE_CFLAGS ARCMESSAGE_LIBS ARCLOADER_CFLAGS ARCLOADER_LIBS ARCJOB_CFLAGS ARCJOB_LIBS ARCDATA_CFLAGS ARCDATA_LIBS ARCCREDENTIAL_CFLAGS ARCCREDENTIAL_LIBS ARCCOMMON_CFLAGS ARCCOMMON_LIBS ARCCLIENT_CFLAGS ARCCLIENT_LIBS pkglibexecdir extpkglibdir pkglibdir pkgincludedir pkgdatadir PERL CXXCPP OTOOL64 OTOOL LIPO NMEDIT DSYMUTIL MANIFEST_TOOL RANLIB ac_ct_AR AR DLLTOOL OBJDUMP NM ac_ct_DUMPBIN DUMPBIN LD FGREP SED host_os host_vendor host_cpu host build_os build_vendor build_cpu build LIBTOOL LN_S EGREP GREP CPP am__fastdepCC_FALSE am__fastdepCC_TRUE CCDEPMODE ac_ct_CC CFLAGS CC am__fastdepCXX_FALSE am__fastdepCXX_TRUE CXXDEPMODE am__nodep AMDEPBACKSLASH AMDEP_FALSE AMDEP_TRUE am__quote am__include DEPDIR OBJEXT EXEEXT ac_ct_CXX CPPFLAGS LDFLAGS CXXFLAGS CXX ARC_VERSION ARC_VERSION_NUM ARC_VERSION_PATCH ARC_VERSION_MINOR ARC_VERSION_MAJOR debianversion fedorasetupopts fedorarelease preversion baseversion AM_BACKSLASH AM_DEFAULT_VERBOSITY AM_DEFAULT_V AM_V am__untar am__tar AMTAR am__leading_dot SET_MAKE AWK mkdir_p MKDIR_P INSTALL_STRIP_PROGRAM STRIP install_sh MAKEINFO AUTOHEADER AUTOMAKE AUTOCONF ACLOCAL VERSION PACKAGE CYGPATH_W am__isrc INSTALL_DATA INSTALL_SCRIPT INSTALL_PROGRAM target_alias host_alias build_alias LIBS ECHO_T ECHO_N ECHO_C DEFS mandir localedir libdir psdir pdfdir dvidir htmldir infodir docdir oldincludedir includedir localstatedir sharedstatedir sysconfdir datadir datarootdir libexecdir sbindir bindir program_transform_name prefix exec_prefix PACKAGE_URL PACKAGE_BUGREPORT PACKAGE_STRING PACKAGE_VERSION PACKAGE_TARNAME PACKAGE_NAME PATH_SEPARATOR SHELL' ac_subst_files='' ac_user_opts=' enable_option_checking enable_silent_rules enable_dependency_tracking enable_static enable_shared with_pic enable_fast_install with_gnu_ld with_sysroot enable_libtool_lock with_systemd_units_location with_sysv_scripts_location with_cron_scripts_prefix enable_nls enable_rpath with_libiconv_prefix with_libintl_prefix enable_largefile enable_all enable_all_clients enable_all_data_clients enable_all_services enable_pedantic_compile enable_swig_python enable_swig enable_hed enable_python with_python with_python_site_arch with_python_site_lib enable_altpython with_altpython with_altpython_site_arch with_altpython_site_lib enable_pylint enable_systemd enable_nss enable_cppunit enable_ldns enable_xmlsec1 with_xmlsec1 enable_mysql with_mysql enable_monitor with_monitor with_zlib enable_argus with_argus with_dbcxx_include with_db4_library_path enable_dbjstore enable_sqlitejstore with_flavor with_lcas_location with_lcmaps_location enable_mock_dmc enable_gfal enable_s3 with_s3 enable_xrootd with_xrootd enable_ldap with_inline_python enable_doc enable_a_rex_service enable_internal enable_gridftpd_service enable_ldap_service enable_candypond enable_datadelivery_service enable_acix enable_compute_client enable_credentials_client enable_data_client enable_emies_client enable_arcrest_client ' ac_precious_vars='build_alias host_alias target_alias CXX CXXFLAGS LDFLAGS LIBS CPPFLAGS CCC CC CFLAGS CPP CXXCPP PKG_CONFIG PKG_CONFIG_PATH PKG_CONFIG_LIBDIR PYTHON_CFLAGS PYTHON_LIBS ALTPYTHON_CFLAGS ALTPYTHON_LIBS GTHREAD_CFLAGS GTHREAD_LIBS GLIBMM_CFLAGS GLIBMM_LIBS LIBXML2_CFLAGS LIBXML2_LIBS OPENSSL_CFLAGS OPENSSL_LIBS OPENSSL_1_1_CFLAGS OPENSSL_1_1_LIBS NSS_CFLAGS NSS_LIBS SQLITE_CFLAGS SQLITE_LIBS CPPUNIT_CFLAGS CPPUNIT_LIBS LDNS_CFLAGS LDNS_LIBS XMLSEC_CFLAGS XMLSEC_LIBS XMLSEC_OPENSSL_CFLAGS XMLSEC_OPENSSL_LIBS ARGUS_CFLAGS ARGUS_LIBS GLOBUS_COMMON_CFLAGS GLOBUS_COMMON_LIBS GLOBUS_MAKEFILE_HEADER GPT_FLAVOR_CONFIGURATION GPT_QUERY GLOBUS_GSSAPI_GSI_CFLAGS GLOBUS_GSSAPI_GSI_LIBS GLOBUS_GSS_ASSIST_CFLAGS GLOBUS_GSS_ASSIST_LIBS GLOBUS_GSI_CALLBACK_CFLAGS GLOBUS_GSI_CALLBACK_LIBS GLOBUS_FTP_CLIENT_CFLAGS GLOBUS_FTP_CLIENT_LIBS GLOBUS_FTP_CONTROL_CFLAGS GLOBUS_FTP_CONTROL_LIBS GLOBUS_IO_CFLAGS GLOBUS_IO_LIBS GLOBUS_GSI_CERT_UTILS_CFLAGS GLOBUS_GSI_CERT_UTILS_LIBS GLOBUS_GSI_CREDENTIAL_CFLAGS GLOBUS_GSI_CREDENTIAL_LIBS GLOBUS_OPENSSL_MODULE_CFLAGS GLOBUS_OPENSSL_MODULE_LIBS GLOBUS_OPENSSL_CFLAGS GLOBUS_OPENSSL_LIBS GFAL2_CFLAGS GFAL2_LIBS BASH_COMPLETION_CFLAGS BASH_COMPLETION_LIBS' # Initialize some variables set by options. ac_init_help= ac_init_version=false ac_unrecognized_opts= ac_unrecognized_sep= # The variables have the same names as the options, with # dashes changed to underlines. cache_file=/dev/null exec_prefix=NONE no_create= no_recursion= prefix=NONE program_prefix=NONE program_suffix=NONE program_transform_name=s,x,x, silent= site= srcdir= verbose= x_includes=NONE x_libraries=NONE # Installation directory options. # These are left unexpanded so users can "make install exec_prefix=/foo" # and all the variables that are supposed to be based on exec_prefix # by default will actually change. # Use braces instead of parens because sh, perl, etc. also accept them. # (The list follows the same order as the GNU Coding Standards.) bindir='${exec_prefix}/bin' sbindir='${exec_prefix}/sbin' libexecdir='${exec_prefix}/libexec' datarootdir='${prefix}/share' datadir='${datarootdir}' sysconfdir='${prefix}/etc' sharedstatedir='${prefix}/com' localstatedir='${prefix}/var' includedir='${prefix}/include' oldincludedir='/usr/include' docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' infodir='${datarootdir}/info' htmldir='${docdir}' dvidir='${docdir}' pdfdir='${docdir}' psdir='${docdir}' libdir='${exec_prefix}/lib' localedir='${datarootdir}/locale' mandir='${datarootdir}/man' ac_prev= ac_dashdash= for ac_option do # If the previous option needs an argument, assign it. if test -n "$ac_prev"; then eval $ac_prev=\$ac_option ac_prev= continue fi case $ac_option in *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; *=) ac_optarg= ;; *) ac_optarg=yes ;; esac # Accept the important Cygnus configure options, so we can diagnose typos. case $ac_dashdash$ac_option in --) ac_dashdash=yes ;; -bindir | --bindir | --bindi | --bind | --bin | --bi) ac_prev=bindir ;; -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) bindir=$ac_optarg ;; -build | --build | --buil | --bui | --bu) ac_prev=build_alias ;; -build=* | --build=* | --buil=* | --bui=* | --bu=*) build_alias=$ac_optarg ;; -cache-file | --cache-file | --cache-fil | --cache-fi \ | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) ac_prev=cache_file ;; -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) cache_file=$ac_optarg ;; --config-cache | -C) cache_file=config.cache ;; -datadir | --datadir | --datadi | --datad) ac_prev=datadir ;; -datadir=* | --datadir=* | --datadi=* | --datad=*) datadir=$ac_optarg ;; -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ | --dataroo | --dataro | --datar) ac_prev=datarootdir ;; -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) datarootdir=$ac_optarg ;; -disable-* | --disable-*) ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid feature name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "enable_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval enable_$ac_useropt=no ;; -docdir | --docdir | --docdi | --doc | --do) ac_prev=docdir ;; -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) docdir=$ac_optarg ;; -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) ac_prev=dvidir ;; -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) dvidir=$ac_optarg ;; -enable-* | --enable-*) ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid feature name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "enable_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval enable_$ac_useropt=\$ac_optarg ;; -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ | --exec | --exe | --ex) ac_prev=exec_prefix ;; -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ | --exec=* | --exe=* | --ex=*) exec_prefix=$ac_optarg ;; -gas | --gas | --ga | --g) # Obsolete; use --with-gas. with_gas=yes ;; -help | --help | --hel | --he | -h) ac_init_help=long ;; -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) ac_init_help=recursive ;; -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) ac_init_help=short ;; -host | --host | --hos | --ho) ac_prev=host_alias ;; -host=* | --host=* | --hos=* | --ho=*) host_alias=$ac_optarg ;; -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) ac_prev=htmldir ;; -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ | --ht=*) htmldir=$ac_optarg ;; -includedir | --includedir | --includedi | --included | --include \ | --includ | --inclu | --incl | --inc) ac_prev=includedir ;; -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ | --includ=* | --inclu=* | --incl=* | --inc=*) includedir=$ac_optarg ;; -infodir | --infodir | --infodi | --infod | --info | --inf) ac_prev=infodir ;; -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) infodir=$ac_optarg ;; -libdir | --libdir | --libdi | --libd) ac_prev=libdir ;; -libdir=* | --libdir=* | --libdi=* | --libd=*) libdir=$ac_optarg ;; -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ | --libexe | --libex | --libe) ac_prev=libexecdir ;; -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ | --libexe=* | --libex=* | --libe=*) libexecdir=$ac_optarg ;; -localedir | --localedir | --localedi | --localed | --locale) ac_prev=localedir ;; -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) localedir=$ac_optarg ;; -localstatedir | --localstatedir | --localstatedi | --localstated \ | --localstate | --localstat | --localsta | --localst | --locals) ac_prev=localstatedir ;; -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) localstatedir=$ac_optarg ;; -mandir | --mandir | --mandi | --mand | --man | --ma | --m) ac_prev=mandir ;; -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) mandir=$ac_optarg ;; -nfp | --nfp | --nf) # Obsolete; use --without-fp. with_fp=no ;; -no-create | --no-create | --no-creat | --no-crea | --no-cre \ | --no-cr | --no-c | -n) no_create=yes ;; -no-recursion | --no-recursion | --no-recursio | --no-recursi \ | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) no_recursion=yes ;; -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ | --oldin | --oldi | --old | --ol | --o) ac_prev=oldincludedir ;; -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) oldincludedir=$ac_optarg ;; -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) ac_prev=prefix ;; -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) prefix=$ac_optarg ;; -program-prefix | --program-prefix | --program-prefi | --program-pref \ | --program-pre | --program-pr | --program-p) ac_prev=program_prefix ;; -program-prefix=* | --program-prefix=* | --program-prefi=* \ | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) program_prefix=$ac_optarg ;; -program-suffix | --program-suffix | --program-suffi | --program-suff \ | --program-suf | --program-su | --program-s) ac_prev=program_suffix ;; -program-suffix=* | --program-suffix=* | --program-suffi=* \ | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) program_suffix=$ac_optarg ;; -program-transform-name | --program-transform-name \ | --program-transform-nam | --program-transform-na \ | --program-transform-n | --program-transform- \ | --program-transform | --program-transfor \ | --program-transfo | --program-transf \ | --program-trans | --program-tran \ | --progr-tra | --program-tr | --program-t) ac_prev=program_transform_name ;; -program-transform-name=* | --program-transform-name=* \ | --program-transform-nam=* | --program-transform-na=* \ | --program-transform-n=* | --program-transform-=* \ | --program-transform=* | --program-transfor=* \ | --program-transfo=* | --program-transf=* \ | --program-trans=* | --program-tran=* \ | --progr-tra=* | --program-tr=* | --program-t=*) program_transform_name=$ac_optarg ;; -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) ac_prev=pdfdir ;; -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) pdfdir=$ac_optarg ;; -psdir | --psdir | --psdi | --psd | --ps) ac_prev=psdir ;; -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) psdir=$ac_optarg ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) silent=yes ;; -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) ac_prev=sbindir ;; -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ | --sbi=* | --sb=*) sbindir=$ac_optarg ;; -sharedstatedir | --sharedstatedir | --sharedstatedi \ | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ | --sharedst | --shareds | --shared | --share | --shar \ | --sha | --sh) ac_prev=sharedstatedir ;; -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ | --sha=* | --sh=*) sharedstatedir=$ac_optarg ;; -site | --site | --sit) ac_prev=site ;; -site=* | --site=* | --sit=*) site=$ac_optarg ;; -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) ac_prev=srcdir ;; -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) srcdir=$ac_optarg ;; -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ | --syscon | --sysco | --sysc | --sys | --sy) ac_prev=sysconfdir ;; -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) sysconfdir=$ac_optarg ;; -target | --target | --targe | --targ | --tar | --ta | --t) ac_prev=target_alias ;; -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) target_alias=$ac_optarg ;; -v | -verbose | --verbose | --verbos | --verbo | --verb) verbose=yes ;; -version | --version | --versio | --versi | --vers | -V) ac_init_version=: ;; -with-* | --with-*) ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid package name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "with_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval with_$ac_useropt=\$ac_optarg ;; -without-* | --without-*) ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid package name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "with_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval with_$ac_useropt=no ;; --x) # Obsolete; use --with-x. with_x=yes ;; -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ | --x-incl | --x-inc | --x-in | --x-i) ac_prev=x_includes ;; -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) x_includes=$ac_optarg ;; -x-libraries | --x-libraries | --x-librarie | --x-librari \ | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) ac_prev=x_libraries ;; -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) x_libraries=$ac_optarg ;; -*) as_fn_error $? "unrecognized option: \`$ac_option' Try \`$0 --help' for more information" ;; *=*) ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` # Reject names that are not valid shell variable names. case $ac_envvar in #( '' | [0-9]* | *[!_$as_cr_alnum]* ) as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; esac eval $ac_envvar=\$ac_optarg export $ac_envvar ;; *) # FIXME: should be removed in autoconf 3.0. $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" ;; esac done if test -n "$ac_prev"; then ac_option=--`echo $ac_prev | sed 's/_/-/g'` as_fn_error $? "missing argument to $ac_option" fi if test -n "$ac_unrecognized_opts"; then case $enable_option_checking in no) ;; fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; esac fi # Check all directory arguments for consistency. for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ datadir sysconfdir sharedstatedir localstatedir includedir \ oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ libdir localedir mandir do eval ac_val=\$$ac_var # Remove trailing slashes. case $ac_val in */ ) ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` eval $ac_var=\$ac_val;; esac # Be sure to have absolute directory names. case $ac_val in [\\/$]* | ?:[\\/]* ) continue;; NONE | '' ) case $ac_var in *prefix ) continue;; esac;; esac as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" done # There might be people who depend on the old broken behavior: `$host' # used to hold the argument of --host etc. # FIXME: To remove some day. build=$build_alias host=$host_alias target=$target_alias # FIXME: To remove some day. if test "x$host_alias" != x; then if test "x$build_alias" = x; then cross_compiling=maybe elif test "x$build_alias" != "x$host_alias"; then cross_compiling=yes fi fi ac_tool_prefix= test -n "$host_alias" && ac_tool_prefix=$host_alias- test "$silent" = yes && exec 6>/dev/null ac_pwd=`pwd` && test -n "$ac_pwd" && ac_ls_di=`ls -di .` && ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || as_fn_error $? "working directory cannot be determined" test "X$ac_ls_di" = "X$ac_pwd_ls_di" || as_fn_error $? "pwd does not report name of working directory" # Find the source files, if location was not specified. if test -z "$srcdir"; then ac_srcdir_defaulted=yes # Try the directory containing this script, then the parent directory. ac_confdir=`$as_dirname -- "$as_myself" || $as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_myself" : 'X\(//\)[^/]' \| \ X"$as_myself" : 'X\(//\)$' \| \ X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_myself" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` srcdir=$ac_confdir if test ! -r "$srcdir/$ac_unique_file"; then srcdir=.. fi else ac_srcdir_defaulted=no fi if test ! -r "$srcdir/$ac_unique_file"; then test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" fi ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" ac_abs_confdir=`( cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" pwd)` # When building in place, set srcdir=. if test "$ac_abs_confdir" = "$ac_pwd"; then srcdir=. fi # Remove unnecessary trailing slashes from srcdir. # Double slashes in file names in object file debugging info # mess up M-x gdb in Emacs. case $srcdir in */) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; esac for ac_var in $ac_precious_vars; do eval ac_env_${ac_var}_set=\${${ac_var}+set} eval ac_env_${ac_var}_value=\$${ac_var} eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} eval ac_cv_env_${ac_var}_value=\$${ac_var} done # # Report the --help message. # if test "$ac_init_help" = "long"; then # Omit some internal or obsolete options to make the list less imposing. # This message is too long to be a string in the A/UX 3.1 sh. cat <<_ACEOF \`configure' configures nordugrid-arc 6.14.0 to adapt to many kinds of systems. Usage: $0 [OPTION]... [VAR=VALUE]... To assign environment variables (e.g., CC, CFLAGS...), specify them as VAR=VALUE. See below for descriptions of some of the useful variables. Defaults for the options are specified in brackets. Configuration: -h, --help display this help and exit --help=short display options specific to this package --help=recursive display the short help of all the included packages -V, --version display version information and exit -q, --quiet, --silent do not print \`checking ...' messages --cache-file=FILE cache test results in FILE [disabled] -C, --config-cache alias for \`--cache-file=config.cache' -n, --no-create do not create output files --srcdir=DIR find the sources in DIR [configure dir or \`..'] Installation directories: --prefix=PREFIX install architecture-independent files in PREFIX [$ac_default_prefix] --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX [PREFIX] By default, \`make install' will install all the files in \`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify an installation prefix other than \`$ac_default_prefix' using \`--prefix', for instance \`--prefix=\$HOME'. For better control, use the options below. Fine tuning of the installation directories: --bindir=DIR user executables [EPREFIX/bin] --sbindir=DIR system admin executables [EPREFIX/sbin] --libexecdir=DIR program executables [EPREFIX/libexec] --sysconfdir=DIR read-only single-machine data [PREFIX/etc] --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] --localstatedir=DIR modifiable single-machine data [PREFIX/var] --libdir=DIR object code libraries [EPREFIX/lib] --includedir=DIR C header files [PREFIX/include] --oldincludedir=DIR C header files for non-gcc [/usr/include] --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] --datadir=DIR read-only architecture-independent data [DATAROOTDIR] --infodir=DIR info documentation [DATAROOTDIR/info] --localedir=DIR locale-dependent data [DATAROOTDIR/locale] --mandir=DIR man documentation [DATAROOTDIR/man] --docdir=DIR documentation root [DATAROOTDIR/doc/nordugrid-arc] --htmldir=DIR html documentation [DOCDIR] --dvidir=DIR dvi documentation [DOCDIR] --pdfdir=DIR pdf documentation [DOCDIR] --psdir=DIR ps documentation [DOCDIR] _ACEOF cat <<\_ACEOF Program names: --program-prefix=PREFIX prepend PREFIX to installed program names --program-suffix=SUFFIX append SUFFIX to installed program names --program-transform-name=PROGRAM run sed PROGRAM on installed program names System types: --build=BUILD configure for building on BUILD [guessed] --host=HOST cross-compile to build programs to run on HOST [BUILD] _ACEOF fi if test -n "$ac_init_help"; then case $ac_init_help in short | recursive ) echo "Configuration of nordugrid-arc 6.14.0:";; esac cat <<\_ACEOF Optional Features: --disable-option-checking ignore unrecognized --enable/--with options --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) --enable-FEATURE[=ARG] include FEATURE [ARG=yes] --enable-silent-rules less verbose build output (undo: "make V=1") --disable-silent-rules verbose build output (undo: "make V=0") --enable-dependency-tracking do not reject slow dependency extractors --disable-dependency-tracking speeds up one-time build --enable-static[=PKGS] build static libraries [default=no] --enable-shared[=PKGS] build shared libraries [default=yes] --enable-fast-install[=PKGS] optimize for fast installation [default=yes] --disable-libtool-lock avoid locking (might break parallel builds) --disable-nls do not use Native Language Support --disable-rpath do not hardcode runtime library paths --disable-largefile omit support for large files --disable-all disables all buildable components. Can be overwritten with --enable-* for group or specific component. It is also possible to use --enable-all to overwrite defaults for most of components. --disable-all-clients disables all buildable client components. Can be overwritten with --enable-* for specific component. It is also possible to use --enable-all-clients to overwrite defaults and --enable-all. --disable-all-data-clients disables all buildable client components providing data handling abilities. Can be overwritten with --enable-* for specific component. It is also possible to use --enable-all-data-clients to overwrite defaults, --enable-all and --enable-all-clients. --disable-all-services disables all buildable service componets. Can be overwritten with --enable-* for specific component. It is also possible to use --enable-all-services to overwrite defaults and --enable-all. --enable-pedantic-compile add pedantic compiler flags --disable-swig-python disable SWIG python bindings --disable-swig disable all bindings through SWIG --disable-hed disable building HED libraries and plugins. Do not do that unless You do not want to build anything. Even in that case better use --disable-all. --disable-python disable Python components --disable-altpython enable alternative Python binding --disable-pylint disable python example checking using pylint --enable-systemd enable use of the systemd daemon integration features --disable-nss disable use of the mozilla nss library --disable-cppunit disable cppunit-based UNIT testing of code --disable-ldns disable ldns library usage (makes ARCHERY client unavailable) --disable-xmlsec1 disable features which need xmlsec1 library --enable-mysql enable use of the MySQL client library --enable-monitor enable use of the monitor --enable-argus enable use of Argus PEP V2 libraries --disable-dbjstore disable storing local jobs information in BDB --disable-sqlitejstore disable storing local jobs information in SQLite --enable-mock-dmc enable mock DMC, default is disable --enable-gfal enable the GFAL support, default is disable --enable-s3 enable the S3 support, default is disable --disable-xrootd disable the xrootd support, default is enable --disable-ldap disable the LDAP support - requires OpenLDAP --disable-doc disable building documentation (requires doxygen and pdflatex) --disable-a-rex-service disable building A-Rex service --enable-internal enable building the internal job plugin --disable-gridftpd-service disable building Gridftpd service --disable-ldap-service disable building LDAP Infosystem Service --disable-monitor disable building LDAP Monitor --disable-candypond disable building candypond --disable-datadelivery-service disable building DataDelivery service --disable-acix disable building ACIX service --disable-compute-client disable building compute (job management) client tools --disable-credentials-client disable building client tools for handling X.509 credentials --disable-data-client disable building generic client tools for handling data --disable-emies-client disables building EMI ES-related client plugins. --disable-arcrest-client disables building ARC REST interface client plugins. Optional Packages: --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) --with-pic[=PKGS] try to use only PIC/non-PIC objects [default=use both] --with-gnu-ld assume the C compiler uses GNU ld [default=no] --with-sysroot=DIR Search for dependent libraries within DIR (or the compiler's sysroot if not specified). --with-systemd-units-location= Location of the systemd unit files. [[None]] --with-sysv-scripts-location= Location of the SYSV init scripts. [[autodetect]] --with-cron-scripts-prefix= Specify the location of the cron directory. [[SYSCONFDIR/cron.d]] --with-gnu-ld assume the C compiler uses GNU ld default=no --with-libiconv-prefix[=DIR] search for libiconv in DIR/include and DIR/lib --without-libiconv-prefix don't search for libiconv in includedir and libdir --with-libintl-prefix[=DIR] search for libintl in DIR/include and DIR/lib --without-libintl-prefix don't search for libintl in includedir and libdir --with-python=(PYTHON) specify python program from PATH --with-python-site-arch=directory Direcory where Python modules will be installed - defaults is to query the Python binary --with-python-site-lib=directory Direcory where Python modules will be installed - defaults is to query the Python binary --with-altpython=(PYTHON) specify alternative python program from PATH --with-altpython-site-arch=directory Direcory where Python modules will be installed - defaults is to query the Python binary --with-altpython-site-lib=directory Direcory where Python modules will be installed - defaults is to query the Python binary --with-xmlsec1=(PATH) xmlsec1 location --with-mysql=(PATH) prefix of MySQL installation. e.g. /usr/local or /usr --with-monitor=(PATH) where to install the monitor, eg /var/www/monitor or /usr/share/arc/monitor --with-zlib=PATH where zlib is installed --with-argus=PATH ARGUS PEP installation path --with-dbcxx-include=PATH Specify path to db_cxx.h --with-db4-library-path=PATH Specify path to DB4 library --with-flavor=(flavor) Specify the gpt build flavor [[autodetect]] --with-lcas-location= Specify the LCAS installation path. [/opt/glite] --with-lcmaps-location= Specify the LCMAPS installation path. [/opt/glite] --with-s3=(PATH) libs3 location --with-xrootd=(PATH) Xrootd location --with-inline-python= Location of the Perl module Inline::Python. Some influential environment variables: CXX C++ compiler command CXXFLAGS C++ compiler flags LDFLAGS linker flags, e.g. -L if you have libraries in a nonstandard directory LIBS libraries to pass to the linker, e.g. -l CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if you have headers in a nonstandard directory CC C compiler command CFLAGS C compiler flags CPP C preprocessor CXXCPP C++ preprocessor PKG_CONFIG path to pkg-config utility PKG_CONFIG_PATH directories to add to pkg-config's search path PKG_CONFIG_LIBDIR path overriding pkg-config's built-in search path PYTHON_CFLAGS C compiler flags for PYTHON, overriding pkg-config PYTHON_LIBS linker flags for PYTHON, overriding pkg-config ALTPYTHON_CFLAGS C compiler flags for ALTPYTHON, overriding pkg-config ALTPYTHON_LIBS linker flags for ALTPYTHON, overriding pkg-config GTHREAD_CFLAGS C compiler flags for GTHREAD, overriding pkg-config GTHREAD_LIBS linker flags for GTHREAD, overriding pkg-config GLIBMM_CFLAGS C compiler flags for GLIBMM, overriding pkg-config GLIBMM_LIBS linker flags for GLIBMM, overriding pkg-config LIBXML2_CFLAGS C compiler flags for LIBXML2, overriding pkg-config LIBXML2_LIBS linker flags for LIBXML2, overriding pkg-config OPENSSL_CFLAGS C compiler flags for OPENSSL, overriding pkg-config OPENSSL_LIBS linker flags for OPENSSL, overriding pkg-config OPENSSL_1_1_CFLAGS C compiler flags for OPENSSL_1_1, overriding pkg-config OPENSSL_1_1_LIBS linker flags for OPENSSL_1_1, overriding pkg-config NSS_CFLAGS C compiler flags for NSS, overriding pkg-config NSS_LIBS linker flags for NSS, overriding pkg-config SQLITE_CFLAGS C compiler flags for SQLITE, overriding pkg-config SQLITE_LIBS linker flags for SQLITE, overriding pkg-config CPPUNIT_CFLAGS C compiler flags for CPPUNIT, overriding pkg-config CPPUNIT_LIBS linker flags for CPPUNIT, overriding pkg-config LDNS_CFLAGS C compiler flags for LDNS, overriding pkg-config LDNS_LIBS linker flags for LDNS, overriding pkg-config XMLSEC_CFLAGS C compiler flags for XMLSEC, overriding pkg-config XMLSEC_LIBS linker flags for XMLSEC, overriding pkg-config XMLSEC_OPENSSL_CFLAGS C compiler flags for XMLSEC_OPENSSL, overriding pkg-config XMLSEC_OPENSSL_LIBS linker flags for XMLSEC_OPENSSL, overriding pkg-config ARGUS_CFLAGS C compiler flags for ARGUS, overriding pkg-config ARGUS_LIBS linker flags for ARGUS, overriding pkg-config GLOBUS_COMMON_CFLAGS C compiler flags for GLOBUS_COMMON, overriding pkg-config GLOBUS_COMMON_LIBS linker flags for GLOBUS_COMMON, overriding pkg-config GLOBUS_MAKEFILE_HEADER path to globus-makefile-header GPT_FLAVOR_CONFIGURATION path to gpt-flavor-configuration GPT_QUERY path to gpt-query GLOBUS_GSSAPI_GSI_CFLAGS C compiler flags for GLOBUS_GSSAPI_GSI, overriding pkg-config GLOBUS_GSSAPI_GSI_LIBS linker flags for GLOBUS_GSSAPI_GSI, overriding pkg-config GLOBUS_GSS_ASSIST_CFLAGS C compiler flags for GLOBUS_GSS_ASSIST, overriding pkg-config GLOBUS_GSS_ASSIST_LIBS linker flags for GLOBUS_GSS_ASSIST, overriding pkg-config GLOBUS_GSI_CALLBACK_CFLAGS C compiler flags for GLOBUS_GSI_CALLBACK, overriding pkg-config GLOBUS_GSI_CALLBACK_LIBS linker flags for GLOBUS_GSI_CALLBACK, overriding pkg-config GLOBUS_FTP_CLIENT_CFLAGS C compiler flags for GLOBUS_FTP_CLIENT, overriding pkg-config GLOBUS_FTP_CLIENT_LIBS linker flags for GLOBUS_FTP_CLIENT, overriding pkg-config GLOBUS_FTP_CONTROL_CFLAGS C compiler flags for GLOBUS_FTP_CONTROL, overriding pkg-config GLOBUS_FTP_CONTROL_LIBS linker flags for GLOBUS_FTP_CONTROL, overriding pkg-config GLOBUS_IO_CFLAGS C compiler flags for GLOBUS_IO, overriding pkg-config GLOBUS_IO_LIBS linker flags for GLOBUS_IO, overriding pkg-config GLOBUS_GSI_CERT_UTILS_CFLAGS C compiler flags for GLOBUS_GSI_CERT_UTILS, overriding pkg-config GLOBUS_GSI_CERT_UTILS_LIBS linker flags for GLOBUS_GSI_CERT_UTILS, overriding pkg-config GLOBUS_GSI_CREDENTIAL_CFLAGS C compiler flags for GLOBUS_GSI_CREDENTIAL, overriding pkg-config GLOBUS_GSI_CREDENTIAL_LIBS linker flags for GLOBUS_GSI_CREDENTIAL, overriding pkg-config GLOBUS_OPENSSL_MODULE_CFLAGS C compiler flags for GLOBUS_OPENSSL_MODULE, overriding pkg-config GLOBUS_OPENSSL_MODULE_LIBS linker flags for GLOBUS_OPENSSL_MODULE, overriding pkg-config GLOBUS_OPENSSL_CFLAGS C compiler flags for GLOBUS_OPENSSL, overriding pkg-config GLOBUS_OPENSSL_LIBS linker flags for GLOBUS_OPENSSL, overriding pkg-config GFAL2_CFLAGS C compiler flags for GFAL2, overriding pkg-config GFAL2_LIBS linker flags for GFAL2, overriding pkg-config BASH_COMPLETION_CFLAGS C compiler flags for BASH_COMPLETION, overriding pkg-config BASH_COMPLETION_LIBS linker flags for BASH_COMPLETION, overriding pkg-config Use these variables to override the choices made by `configure' or to help it to find libraries and programs with nonstandard names/locations. Report bugs to . _ACEOF ac_status=$? fi if test "$ac_init_help" = "recursive"; then # If there are subdirs, report their specific --help. for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue test -d "$ac_dir" || { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || continue ac_builddir=. case "$ac_dir" in .) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` # A ".." for each directory in $ac_dir_suffix. ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` case $ac_top_builddir_sub in "") ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; esac ;; esac ac_abs_top_builddir=$ac_pwd ac_abs_builddir=$ac_pwd$ac_dir_suffix # for backward compatibility: ac_top_builddir=$ac_top_build_prefix case $srcdir in .) # We are building in place. ac_srcdir=. ac_top_srcdir=$ac_top_builddir_sub ac_abs_top_srcdir=$ac_pwd ;; [\\/]* | ?:[\\/]* ) # Absolute name. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ac_abs_top_srcdir=$srcdir ;; *) # Relative name. ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_build_prefix$srcdir ac_abs_top_srcdir=$ac_pwd/$srcdir ;; esac ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix cd "$ac_dir" || { ac_status=$?; continue; } # Check for guested configure. if test -f "$ac_srcdir/configure.gnu"; then echo && $SHELL "$ac_srcdir/configure.gnu" --help=recursive elif test -f "$ac_srcdir/configure"; then echo && $SHELL "$ac_srcdir/configure" --help=recursive else $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 fi || ac_status=$? cd "$ac_pwd" || { ac_status=$?; break; } done fi test -n "$ac_init_help" && exit $ac_status if $ac_init_version; then cat <<\_ACEOF nordugrid-arc configure 6.14.0 generated by GNU Autoconf 2.69 Copyright (C) 2012 Free Software Foundation, Inc. This configure script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it. _ACEOF exit fi ## ------------------------ ## ## Autoconf initialization. ## ## ------------------------ ## # ac_fn_cxx_try_compile LINENO # ---------------------------- # Try to compile conftest.$ac_ext, and return whether this succeeded. ac_fn_cxx_try_compile () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext if { { ac_try="$ac_compile" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compile") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_cxx_werror_flag" || test ! -s conftest.err } && test -s conftest.$ac_objext; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_cxx_try_compile # ac_fn_c_try_compile LINENO # -------------------------- # Try to compile conftest.$ac_ext, and return whether this succeeded. ac_fn_c_try_compile () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext if { { ac_try="$ac_compile" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compile") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_c_werror_flag" || test ! -s conftest.err } && test -s conftest.$ac_objext; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_c_try_compile # ac_fn_c_try_cpp LINENO # ---------------------- # Try to preprocess conftest.$ac_ext, and return whether this succeeded. ac_fn_c_try_cpp () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if { { ac_try="$ac_cpp conftest.$ac_ext" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } > conftest.i && { test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || test ! -s conftest.err }; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_c_try_cpp # ac_fn_c_check_header_mongrel LINENO HEADER VAR INCLUDES # ------------------------------------------------------- # Tests whether HEADER exists, giving a warning if it cannot be compiled using # the include files in INCLUDES and setting the cache variable VAR # accordingly. ac_fn_c_check_header_mongrel () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if eval \${$3+:} false; then : { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } else # Is the header compilable? { $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5 $as_echo_n "checking $2 usability... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 #include <$2> _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_header_compiler=yes else ac_header_compiler=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5 $as_echo "$ac_header_compiler" >&6; } # Is the header present? { $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5 $as_echo_n "checking $2 presence... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include <$2> _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : ac_header_preproc=yes else ac_header_preproc=no fi rm -f conftest.err conftest.i conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5 $as_echo "$ac_header_preproc" >&6; } # So? What about this header? case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in #(( yes:no: ) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5 $as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 $as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} ;; no:yes:* ) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5 $as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5 $as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5 $as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5 $as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 $as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} ( $as_echo "## --------------------------------------------- ## ## Report this to http://bugzilla.nordugrid.org/ ## ## --------------------------------------------- ##" ) | sed "s/^/$as_me: WARNING: /" >&2 ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 else eval "$3=\$ac_header_compiler" fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } fi eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_c_check_header_mongrel # ac_fn_c_try_run LINENO # ---------------------- # Try to link conftest.$ac_ext, and return whether this succeeded. Assumes # that executables *can* be run. ac_fn_c_try_run () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' { { case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_try") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; }; then : ac_retval=0 else $as_echo "$as_me: program exited with status $ac_status" >&5 $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=$ac_status fi rm -rf conftest.dSYM conftest_ipa8_conftest.oo eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_c_try_run # ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES # ------------------------------------------------------- # Tests whether HEADER exists and can be compiled using the include files in # INCLUDES, setting the cache variable VAR accordingly. ac_fn_c_check_header_compile () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 #include <$2> _ACEOF if ac_fn_c_try_compile "$LINENO"; then : eval "$3=yes" else eval "$3=no" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_c_check_header_compile # ac_fn_c_try_link LINENO # ----------------------- # Try to link conftest.$ac_ext, and return whether this succeeded. ac_fn_c_try_link () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext conftest$ac_exeext if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_c_werror_flag" || test ! -s conftest.err } && test -s conftest$ac_exeext && { test "$cross_compiling" = yes || test -x conftest$ac_exeext }; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would # interfere with the next link command; also delete a directory that is # left behind by Apple's compiler. We do this before executing the actions. rm -rf conftest.dSYM conftest_ipa8_conftest.oo eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_c_try_link # ac_fn_c_check_func LINENO FUNC VAR # ---------------------------------- # Tests whether FUNC exists, setting the cache variable VAR accordingly ac_fn_c_check_func () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Define $2 to an innocuous variant, in case declares $2. For example, HP-UX 11i declares gettimeofday. */ #define $2 innocuous_$2 /* System header to define __stub macros and hopefully few prototypes, which can conflict with char $2 (); below. Prefer to if __STDC__ is defined, since exists even on freestanding compilers. */ #ifdef __STDC__ # include #else # include #endif #undef $2 /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char $2 (); /* The GNU C library defines this for functions which it implements to always fail with ENOSYS. Some functions are actually named something starting with __ and the normal name is an alias. */ #if defined __stub_$2 || defined __stub___$2 choke me #endif int main () { return $2 (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$3=yes" else eval "$3=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_c_check_func # ac_fn_cxx_try_cpp LINENO # ------------------------ # Try to preprocess conftest.$ac_ext, and return whether this succeeded. ac_fn_cxx_try_cpp () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if { { ac_try="$ac_cpp conftest.$ac_ext" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } > conftest.i && { test -z "$ac_cxx_preproc_warn_flag$ac_cxx_werror_flag" || test ! -s conftest.err }; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_cxx_try_cpp # ac_fn_cxx_try_link LINENO # ------------------------- # Try to link conftest.$ac_ext, and return whether this succeeded. ac_fn_cxx_try_link () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext conftest$ac_exeext if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_cxx_werror_flag" || test ! -s conftest.err } && test -s conftest$ac_exeext && { test "$cross_compiling" = yes || test -x conftest$ac_exeext }; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would # interfere with the next link command; also delete a directory that is # left behind by Apple's compiler. We do this before executing the actions. rm -rf conftest.dSYM conftest_ipa8_conftest.oo eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_cxx_try_link # ac_fn_cxx_check_header_mongrel LINENO HEADER VAR INCLUDES # --------------------------------------------------------- # Tests whether HEADER exists, giving a warning if it cannot be compiled using # the include files in INCLUDES and setting the cache variable VAR # accordingly. ac_fn_cxx_check_header_mongrel () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if eval \${$3+:} false; then : { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } else # Is the header compilable? { $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5 $as_echo_n "checking $2 usability... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 #include <$2> _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_header_compiler=yes else ac_header_compiler=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5 $as_echo "$ac_header_compiler" >&6; } # Is the header present? { $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5 $as_echo_n "checking $2 presence... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include <$2> _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : ac_header_preproc=yes else ac_header_preproc=no fi rm -f conftest.err conftest.i conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5 $as_echo "$ac_header_preproc" >&6; } # So? What about this header? case $ac_header_compiler:$ac_header_preproc:$ac_cxx_preproc_warn_flag in #(( yes:no: ) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5 $as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 $as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} ;; no:yes:* ) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5 $as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5 $as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5 $as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5 $as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 $as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} ( $as_echo "## --------------------------------------------- ## ## Report this to http://bugzilla.nordugrid.org/ ## ## --------------------------------------------- ##" ) | sed "s/^/$as_me: WARNING: /" >&2 ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 else eval "$3=\$ac_header_compiler" fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } fi eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_cxx_check_header_mongrel # ac_fn_c_check_type LINENO TYPE VAR INCLUDES # ------------------------------------------- # Tests whether TYPE exists after having included INCLUDES, setting cache # variable VAR accordingly. ac_fn_c_check_type () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 else eval "$3=no" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { if (sizeof ($2)) return 0; ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { if (sizeof (($2))) return 0; ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : else eval "$3=yes" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_c_check_type # ac_fn_c_check_member LINENO AGGR MEMBER VAR INCLUDES # ---------------------------------------------------- # Tries to find if the field MEMBER exists in type AGGR, after including # INCLUDES, setting cache variable VAR accordingly. ac_fn_c_check_member () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2.$3" >&5 $as_echo_n "checking for $2.$3... " >&6; } if eval \${$4+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $5 int main () { static $2 ac_aggr; if (ac_aggr.$3) return 0; ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : eval "$4=yes" else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $5 int main () { static $2 ac_aggr; if (sizeof ac_aggr.$3) return 0; ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : eval "$4=yes" else eval "$4=no" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$4 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_c_check_member # ac_fn_c_check_decl LINENO SYMBOL VAR INCLUDES # --------------------------------------------- # Tests whether SYMBOL is declared in INCLUDES, setting cache variable VAR # accordingly. ac_fn_c_check_decl () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack as_decl_name=`echo $2|sed 's/ *(.*//'` as_decl_use=`echo $2|sed -e 's/(/((/' -e 's/)/) 0&/' -e 's/,/) 0& (/g'` { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $as_decl_name is declared" >&5 $as_echo_n "checking whether $as_decl_name is declared... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { #ifndef $as_decl_name #ifdef __cplusplus (void) $as_decl_use; #else (void) $as_decl_name; #endif #endif ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : eval "$3=yes" else eval "$3=no" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_c_check_decl cat >config.log <<_ACEOF This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. It was created by nordugrid-arc $as_me 6.14.0, which was generated by GNU Autoconf 2.69. Invocation command line was $ $0 $@ _ACEOF exec 5>>config.log { cat <<_ASUNAME ## --------- ## ## Platform. ## ## --------- ## hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` uname -m = `(uname -m) 2>/dev/null || echo unknown` uname -r = `(uname -r) 2>/dev/null || echo unknown` uname -s = `(uname -s) 2>/dev/null || echo unknown` uname -v = `(uname -v) 2>/dev/null || echo unknown` /usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` /bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` /bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` /usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` /usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` /usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` /bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` /usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` /bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` _ASUNAME as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. $as_echo "PATH: $as_dir" done IFS=$as_save_IFS } >&5 cat >&5 <<_ACEOF ## ----------- ## ## Core tests. ## ## ----------- ## _ACEOF # Keep a trace of the command line. # Strip out --no-create and --no-recursion so they do not pile up. # Strip out --silent because we don't want to record it for future runs. # Also quote any args containing shell meta-characters. # Make two passes to allow for proper duplicate-argument suppression. ac_configure_args= ac_configure_args0= ac_configure_args1= ac_must_keep_next=false for ac_pass in 1 2 do for ac_arg do case $ac_arg in -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) continue ;; *\'*) ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; esac case $ac_pass in 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; 2) as_fn_append ac_configure_args1 " '$ac_arg'" if test $ac_must_keep_next = true; then ac_must_keep_next=false # Got value, back to normal. else case $ac_arg in *=* | --config-cache | -C | -disable-* | --disable-* \ | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ | -with-* | --with-* | -without-* | --without-* | --x) case "$ac_configure_args0 " in "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; esac ;; -* ) ac_must_keep_next=true ;; esac fi as_fn_append ac_configure_args " '$ac_arg'" ;; esac done done { ac_configure_args0=; unset ac_configure_args0;} { ac_configure_args1=; unset ac_configure_args1;} # When interrupted or exit'd, cleanup temporary files, and complete # config.log. We remove comments because anyway the quotes in there # would cause problems or look ugly. # WARNING: Use '\'' to represent an apostrophe within the trap. # WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. trap 'exit_status=$? # Save into config.log some information that might help in debugging. { echo $as_echo "## ---------------- ## ## Cache variables. ## ## ---------------- ##" echo # The following way of writing the cache mishandles newlines in values, ( for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do eval ac_val=\$$ac_var case $ac_val in #( *${as_nl}*) case $ac_var in #( *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 $as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; esac case $ac_var in #( _ | IFS | as_nl) ;; #( BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( *) { eval $ac_var=; unset $ac_var;} ;; esac ;; esac done (set) 2>&1 | case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( *${as_nl}ac_space=\ *) sed -n \ "s/'\''/'\''\\\\'\'''\''/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" ;; #( *) sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" ;; esac | sort ) echo $as_echo "## ----------------- ## ## Output variables. ## ## ----------------- ##" echo for ac_var in $ac_subst_vars do eval ac_val=\$$ac_var case $ac_val in *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; esac $as_echo "$ac_var='\''$ac_val'\''" done | sort echo if test -n "$ac_subst_files"; then $as_echo "## ------------------- ## ## File substitutions. ## ## ------------------- ##" echo for ac_var in $ac_subst_files do eval ac_val=\$$ac_var case $ac_val in *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; esac $as_echo "$ac_var='\''$ac_val'\''" done | sort echo fi if test -s confdefs.h; then $as_echo "## ----------- ## ## confdefs.h. ## ## ----------- ##" echo cat confdefs.h echo fi test "$ac_signal" != 0 && $as_echo "$as_me: caught signal $ac_signal" $as_echo "$as_me: exit $exit_status" } >&5 rm -f core *.core core.conftest.* && rm -f -r conftest* confdefs* conf$$* $ac_clean_files && exit $exit_status ' 0 for ac_signal in 1 2 13 15; do trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal done ac_signal=0 # confdefs.h avoids OS command line length limits that DEFS can exceed. rm -f -r conftest* confdefs.h $as_echo "/* confdefs.h */" > confdefs.h # Predefined preprocessor variables. cat >>confdefs.h <<_ACEOF #define PACKAGE_NAME "$PACKAGE_NAME" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_TARNAME "$PACKAGE_TARNAME" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_VERSION "$PACKAGE_VERSION" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_STRING "$PACKAGE_STRING" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_URL "$PACKAGE_URL" _ACEOF # Let the site file select an alternate cache file if it wants to. # Prefer an explicitly selected file to automatically selected ones. ac_site_file1=NONE ac_site_file2=NONE if test -n "$CONFIG_SITE"; then # We do not want a PATH search for config.site. case $CONFIG_SITE in #(( -*) ac_site_file1=./$CONFIG_SITE;; */*) ac_site_file1=$CONFIG_SITE;; *) ac_site_file1=./$CONFIG_SITE;; esac elif test "x$prefix" != xNONE; then ac_site_file1=$prefix/share/config.site ac_site_file2=$prefix/etc/config.site else ac_site_file1=$ac_default_prefix/share/config.site ac_site_file2=$ac_default_prefix/etc/config.site fi for ac_site_file in "$ac_site_file1" "$ac_site_file2" do test "x$ac_site_file" = xNONE && continue if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 $as_echo "$as_me: loading site script $ac_site_file" >&6;} sed 's/^/| /' "$ac_site_file" >&5 . "$ac_site_file" \ || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "failed to load site script $ac_site_file See \`config.log' for more details" "$LINENO" 5; } fi done if test -r "$cache_file"; then # Some versions of bash will fail to source /dev/null (special files # actually), so we avoid doing that. DJGPP emulates it as a regular file. if test /dev/null != "$cache_file" && test -f "$cache_file"; then { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 $as_echo "$as_me: loading cache $cache_file" >&6;} case $cache_file in [\\/]* | ?:[\\/]* ) . "$cache_file";; *) . "./$cache_file";; esac fi else { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 $as_echo "$as_me: creating cache $cache_file" >&6;} >$cache_file fi gt_needs="$gt_needs " as_fn_append ac_header_list " sys/time.h" as_fn_append ac_header_list " unistd.h" as_fn_append ac_func_list " alarm" # Check that the precious variables saved in the cache have kept the same # value. ac_cache_corrupted=false for ac_var in $ac_precious_vars; do eval ac_old_set=\$ac_cv_env_${ac_var}_set eval ac_new_set=\$ac_env_${ac_var}_set eval ac_old_val=\$ac_cv_env_${ac_var}_value eval ac_new_val=\$ac_env_${ac_var}_value case $ac_old_set,$ac_new_set in set,) { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 $as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} ac_cache_corrupted=: ;; ,set) { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 $as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} ac_cache_corrupted=: ;; ,);; *) if test "x$ac_old_val" != "x$ac_new_val"; then # differences in whitespace do not lead to failure. ac_old_val_w=`echo x $ac_old_val` ac_new_val_w=`echo x $ac_new_val` if test "$ac_old_val_w" != "$ac_new_val_w"; then { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 $as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} ac_cache_corrupted=: else { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 $as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} eval $ac_var=\$ac_old_val fi { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 $as_echo "$as_me: former value: \`$ac_old_val'" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 $as_echo "$as_me: current value: \`$ac_new_val'" >&2;} fi;; esac # Pass precious variables to config.status. if test "$ac_new_set" = set; then case $ac_new_val in *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; *) ac_arg=$ac_var=$ac_new_val ;; esac case " $ac_configure_args " in *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. *) as_fn_append ac_configure_args " '$ac_arg'" ;; esac fi done if $ac_cache_corrupted; then { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 $as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 fi ## -------------------- ## ## Main body of script. ## ## -------------------- ## ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu am__api_version='1.13' ac_aux_dir= for ac_dir in "$srcdir" "$srcdir/.." "$srcdir/../.."; do if test -f "$ac_dir/install-sh"; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/install-sh -c" break elif test -f "$ac_dir/install.sh"; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/install.sh -c" break elif test -f "$ac_dir/shtool"; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/shtool install -c" break fi done if test -z "$ac_aux_dir"; then as_fn_error $? "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5 fi # These three variables are undocumented and unsupported, # and are intended to be withdrawn in a future Autoconf release. # They can cause serious problems if a builder's source tree is in a directory # whose full name contains unusual characters. ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var. ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var. ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var. # Find a good install program. We prefer a C program (faster), # so one script is as good as another. But avoid the broken or # incompatible versions: # SysV /etc/install, /usr/sbin/install # SunOS /usr/etc/install # IRIX /sbin/install # AIX /bin/install # AmigaOS /C/install, which installs bootblocks on floppy discs # AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag # AFS /usr/afsws/bin/install, which mishandles nonexistent args # SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" # OS/2's system install, which has a completely different semantic # ./install, which can be erroneously created by make from ./install.sh. # Reject install programs that cannot install multiple files. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5 $as_echo_n "checking for a BSD-compatible install... " >&6; } if test -z "$INSTALL"; then if ${ac_cv_path_install+:} false; then : $as_echo_n "(cached) " >&6 else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. # Account for people who put trailing slashes in PATH elements. case $as_dir/ in #(( ./ | .// | /[cC]/* | \ /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \ /usr/ucb/* ) ;; *) # OSF1 and SCO ODT 3.0 have their own names for install. # Don't use installbsd from OSF since it installs stuff as root # by default. for ac_prog in ginstall scoinst install; do for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then if test $ac_prog = install && grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # AIX install. It has an incompatible calling convention. : elif test $ac_prog = install && grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # program-specific install script used by HP pwplus--don't use. : else rm -rf conftest.one conftest.two conftest.dir echo one > conftest.one echo two > conftest.two mkdir conftest.dir if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" && test -s conftest.one && test -s conftest.two && test -s conftest.dir/conftest.one && test -s conftest.dir/conftest.two then ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" break 3 fi fi fi done done ;; esac done IFS=$as_save_IFS rm -rf conftest.one conftest.two conftest.dir fi if test "${ac_cv_path_install+set}" = set; then INSTALL=$ac_cv_path_install else # As a last resort, use the slow shell script. Don't cache a # value for INSTALL within a source directory, because that will # break other packages using the cache if that directory is # removed, or if the value is a relative name. INSTALL=$ac_install_sh fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5 $as_echo "$INSTALL" >&6; } # Use test -z because SunOS4 sh mishandles braces in ${var-val}. # It thinks the first close brace ends the variable substitution. test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5 $as_echo_n "checking whether build environment is sane... " >&6; } # Reject unsafe characters in $srcdir or the absolute working directory # name. Accept space and tab only in the latter. am_lf=' ' case `pwd` in *[\\\"\#\$\&\'\`$am_lf]*) as_fn_error $? "unsafe absolute working directory name" "$LINENO" 5;; esac case $srcdir in *[\\\"\#\$\&\'\`$am_lf\ \ ]*) as_fn_error $? "unsafe srcdir value: '$srcdir'" "$LINENO" 5;; esac # Do 'set' in a subshell so we don't clobber the current shell's # arguments. Must try -L first in case configure is actually a # symlink; some systems play weird games with the mod time of symlinks # (eg FreeBSD returns the mod time of the symlink's containing # directory). if ( am_has_slept=no for am_try in 1 2; do echo "timestamp, slept: $am_has_slept" > conftest.file set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` if test "$*" = "X"; then # -L didn't work. set X `ls -t "$srcdir/configure" conftest.file` fi if test "$*" != "X $srcdir/configure conftest.file" \ && test "$*" != "X conftest.file $srcdir/configure"; then # If neither matched, then we have a broken ls. This can happen # if, for instance, CONFIG_SHELL is bash and it inherits a # broken ls alias from the environment. This has actually # happened. Such a system could not be considered "sane". as_fn_error $? "ls -t appears to fail. Make sure there is not a broken alias in your environment" "$LINENO" 5 fi if test "$2" = conftest.file || test $am_try -eq 2; then break fi # Just in case. sleep 1 am_has_slept=yes done test "$2" = conftest.file ) then # Ok. : else as_fn_error $? "newly created file is older than distributed files! Check your system clock" "$LINENO" 5 fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } # If we didn't sleep, we still need to ensure time stamps of config.status and # generated files are strictly newer. am_sleep_pid= if grep 'slept: no' conftest.file >/dev/null 2>&1; then ( sleep 1 ) & am_sleep_pid=$! fi rm -f conftest.file test "$program_prefix" != NONE && program_transform_name="s&^&$program_prefix&;$program_transform_name" # Use a double $ so make ignores it. test "$program_suffix" != NONE && program_transform_name="s&\$&$program_suffix&;$program_transform_name" # Double any \ or $. # By default was `s,x,x', remove it if useless. ac_script='s/[\\$]/&&/g;s/;s,x,x,$//' program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"` # expand $ac_aux_dir to an absolute path am_aux_dir=`cd $ac_aux_dir && pwd` if test x"${MISSING+set}" != xset; then case $am_aux_dir in *\ * | *\ *) MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; *) MISSING="\${SHELL} $am_aux_dir/missing" ;; esac fi # Use eval to expand $SHELL if eval "$MISSING --is-lightweight"; then am_missing_run="$MISSING " else am_missing_run= { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: 'missing' script is too old or missing" >&5 $as_echo "$as_me: WARNING: 'missing' script is too old or missing" >&2;} fi if test x"${install_sh}" != xset; then case $am_aux_dir in *\ * | *\ *) install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; *) install_sh="\${SHELL} $am_aux_dir/install-sh" esac fi # Installed binaries are usually stripped using 'strip' when the user # run "make install-strip". However 'strip' might not be the right # tool to use in cross-compilation environments, therefore Automake # will honor the 'STRIP' environment variable to overrule this program. if test "$cross_compiling" != no; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. set dummy ${ac_tool_prefix}strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_STRIP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$STRIP"; then ac_cv_prog_STRIP="$STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_STRIP="${ac_tool_prefix}strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi STRIP=$ac_cv_prog_STRIP if test -n "$STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 $as_echo "$STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_STRIP"; then ac_ct_STRIP=$STRIP # Extract the first word of "strip", so it can be a program name with args. set dummy strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_STRIP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_STRIP"; then ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_STRIP="strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP if test -n "$ac_ct_STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 $as_echo "$ac_ct_STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_STRIP" = x; then STRIP=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac STRIP=$ac_ct_STRIP fi else STRIP="$ac_cv_prog_STRIP" fi fi INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5 $as_echo_n "checking for a thread-safe mkdir -p... " >&6; } if test -z "$MKDIR_P"; then if ${ac_cv_path_mkdir+:} false; then : $as_echo_n "(cached) " >&6 else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in mkdir gmkdir; do for ac_exec_ext in '' $ac_executable_extensions; do as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext" || continue case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #( 'mkdir (GNU coreutils) '* | \ 'mkdir (coreutils) '* | \ 'mkdir (fileutils) '4.1*) ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext break 3;; esac done done done IFS=$as_save_IFS fi test -d ./--version && rmdir ./--version if test "${ac_cv_path_mkdir+set}" = set; then MKDIR_P="$ac_cv_path_mkdir -p" else # As a last resort, use the slow shell script. Don't cache a # value for MKDIR_P within a source directory, because that will # break other packages using the cache if that directory is # removed, or if the value is a relative name. MKDIR_P="$ac_install_sh -d" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5 $as_echo "$MKDIR_P" >&6; } for ac_prog in gawk mawk nawk awk do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_AWK+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$AWK"; then ac_cv_prog_AWK="$AWK" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_AWK="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi AWK=$ac_cv_prog_AWK if test -n "$AWK"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5 $as_echo "$AWK" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$AWK" && break done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 $as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } set x ${MAKE-make} ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then : $as_echo_n "(cached) " >&6 else cat >conftest.make <<\_ACEOF SHELL = /bin/sh all: @echo '@@@%%%=$(MAKE)=@@@%%%' _ACEOF # GNU make sometimes prints "make[1]: Entering ...", which would confuse us. case `${MAKE-make} -f conftest.make 2>/dev/null` in *@@@%%%=?*=@@@%%%*) eval ac_cv_prog_make_${ac_make}_set=yes;; *) eval ac_cv_prog_make_${ac_make}_set=no;; esac rm -f conftest.make fi if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } SET_MAKE= else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } SET_MAKE="MAKE=${MAKE-make}" fi rm -rf .tst 2>/dev/null mkdir .tst 2>/dev/null if test -d .tst; then am__leading_dot=. else am__leading_dot=_ fi rmdir .tst 2>/dev/null # Check whether --enable-silent-rules was given. if test "${enable_silent_rules+set}" = set; then : enableval=$enable_silent_rules; fi case $enable_silent_rules in # ((( yes) AM_DEFAULT_VERBOSITY=0;; no) AM_DEFAULT_VERBOSITY=1;; *) AM_DEFAULT_VERBOSITY=1;; esac am_make=${MAKE-make} { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $am_make supports nested variables" >&5 $as_echo_n "checking whether $am_make supports nested variables... " >&6; } if ${am_cv_make_support_nested_variables+:} false; then : $as_echo_n "(cached) " >&6 else if $as_echo 'TRUE=$(BAR$(V)) BAR0=false BAR1=true V=1 am__doit: @$(TRUE) .PHONY: am__doit' | $am_make -f - >/dev/null 2>&1; then am_cv_make_support_nested_variables=yes else am_cv_make_support_nested_variables=no fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_make_support_nested_variables" >&5 $as_echo "$am_cv_make_support_nested_variables" >&6; } if test $am_cv_make_support_nested_variables = yes; then AM_V='$(V)' AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)' else AM_V=$AM_DEFAULT_VERBOSITY AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY fi AM_BACKSLASH='\' if test "`cd $srcdir && pwd`" != "`pwd`"; then # Use -I$(srcdir) only when $(srcdir) != ., so that make's output # is not polluted with repeated "-I." am__isrc=' -I$(srcdir)' # test to see if srcdir already configured if test -f $srcdir/config.status; then as_fn_error $? "source directory already configured; run \"make distclean\" there first" "$LINENO" 5 fi fi # test whether we have cygpath if test -z "$CYGPATH_W"; then if (cygpath --version) >/dev/null 2>/dev/null; then CYGPATH_W='cygpath -w' else CYGPATH_W=echo fi fi # Define the identity of the package. PACKAGE='nordugrid-arc' VERSION='6.14.0' cat >>confdefs.h <<_ACEOF #define PACKAGE "$PACKAGE" _ACEOF cat >>confdefs.h <<_ACEOF #define VERSION "$VERSION" _ACEOF # Some tools Automake needs. ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"} AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"} AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"} AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"} MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"} # For better backward compatibility. To be removed once Automake 1.9.x # dies out for good. For more background, see: # # mkdir_p='$(MKDIR_P)' # We need awk for the "check" target. The system "awk" is bad on # some platforms. # Always define AMTAR for backward compatibility. Yes, it's still used # in the wild :-( We should find a proper way to deprecate it ... AMTAR='$${TAR-tar}' # We'll loop over all known methods to create a tar archive until one works. _am_tools='gnutar pax cpio none' { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to create a pax tar archive" >&5 $as_echo_n "checking how to create a pax tar archive... " >&6; } # Go ahead even if we have the value already cached. We do so because we # need to set the values for the 'am__tar' and 'am__untar' variables. _am_tools=${am_cv_prog_tar_pax-$_am_tools} for _am_tool in $_am_tools; do case $_am_tool in gnutar) for _am_tar in tar gnutar gtar; do { echo "$as_me:$LINENO: $_am_tar --version" >&5 ($_am_tar --version) >&5 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && break done am__tar="$_am_tar --format=posix -chf - "'"$$tardir"' am__tar_="$_am_tar --format=posix -chf - "'"$tardir"' am__untar="$_am_tar -xf -" ;; plaintar) # Must skip GNU tar: if it does not support --format= it doesn't create # ustar tarball either. (tar --version) >/dev/null 2>&1 && continue am__tar='tar chf - "$$tardir"' am__tar_='tar chf - "$tardir"' am__untar='tar xf -' ;; pax) am__tar='pax -L -x pax -w "$$tardir"' am__tar_='pax -L -x pax -w "$tardir"' am__untar='pax -r' ;; cpio) am__tar='find "$$tardir" -print | cpio -o -H pax -L' am__tar_='find "$tardir" -print | cpio -o -H pax -L' am__untar='cpio -i -H pax -d' ;; none) am__tar=false am__tar_=false am__untar=false ;; esac # If the value was cached, stop now. We just wanted to have am__tar # and am__untar set. test -n "${am_cv_prog_tar_pax}" && break # tar/untar a dummy directory, and stop if the command works. rm -rf conftest.dir mkdir conftest.dir echo GrepMe > conftest.dir/file { echo "$as_me:$LINENO: tardir=conftest.dir && eval $am__tar_ >conftest.tar" >&5 (tardir=conftest.dir && eval $am__tar_ >conftest.tar) >&5 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } rm -rf conftest.dir if test -s conftest.tar; then { echo "$as_me:$LINENO: $am__untar &5 ($am__untar &5 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } { echo "$as_me:$LINENO: cat conftest.dir/file" >&5 (cat conftest.dir/file) >&5 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } grep GrepMe conftest.dir/file >/dev/null 2>&1 && break fi done rm -rf conftest.dir if ${am_cv_prog_tar_pax+:} false; then : $as_echo_n "(cached) " >&6 else am_cv_prog_tar_pax=$_am_tool fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_prog_tar_pax" >&5 $as_echo "$am_cv_prog_tar_pax" >&6; } ac_config_headers="$ac_config_headers config.h" baseversion=`echo $VERSION | sed 's/[^0-9.].*//'` preversion=`echo $VERSION | sed 's/^[0-9.]*//'` if test "x$baseversion" = "x" ; then baseversion=$VERSION preversion="" fi if test "x$preversion" = "x" ; then fedorarelease="1" fedorasetupopts="-q" debianversion="$baseversion" else fedorarelease="0.$preversion" fedorasetupopts="-q -n %{name}-%{version}$preversion" debianversion="$baseversion~$preversion" fi # numeric ARC_VERSION_* used for API fall back to current release seriese (e.g. when 'master' is specified in VESRION file, the "6.0.0" will be used) ARC_VERSION_MAJOR=`echo $VERSION | awk -F. '{print match($1, /^[0-9]+$/) ? $1 : "6"}'` ARC_VERSION_MINOR=`echo $VERSION | awk -F. '{print match($2, /[^ ]/) ? $2 : "0"}'` ARC_VERSION_PATCH=`echo $VERSION | awk -F. '{print match($3, /[^ ]/) ? $3 : "0"}'` ARC_VERSION_NUM=`printf "0x%02x%02x%02x" $ARC_VERSION_MAJOR $ARC_VERSION_MINOR $ARC_VERSION_PATCH` ARC_VERSION=`echo $ARC_VERSION_MAJOR.$ARC_VERSION_MINOR.$ARC_VERSION_PATCH` # This macro was introduced in autoconf 2.57g? but we currently only require 2.56 ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu if test -z "$CXX"; then if test -n "$CCC"; then CXX=$CCC else if test -n "$ac_tool_prefix"; then for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CXX+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CXX"; then ac_cv_prog_CXX="$CXX" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CXX="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CXX=$ac_cv_prog_CXX if test -n "$CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXX" >&5 $as_echo "$CXX" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$CXX" && break done fi if test -z "$CXX"; then ac_ct_CXX=$CXX for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_CXX+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CXX"; then ac_cv_prog_ac_ct_CXX="$ac_ct_CXX" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CXX="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CXX=$ac_cv_prog_ac_ct_CXX if test -n "$ac_ct_CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CXX" >&5 $as_echo "$ac_ct_CXX" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_CXX" && break done if test "x$ac_ct_CXX" = x; then CXX="g++" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CXX=$ac_ct_CXX fi fi fi fi # Provide some information about the compiler. $as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler version" >&5 set X $ac_compile ac_compiler=$2 for ac_option in --version -v -V -qversion; do { { ac_try="$ac_compiler $ac_option >&5" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compiler $ac_option >&5") 2>conftest.err ac_status=$? if test -s conftest.err; then sed '10a\ ... rest of stderr output deleted ... 10q' conftest.err >conftest.er1 cat conftest.er1 >&5 fi rm -f conftest.er1 conftest.err $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } done cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF ac_clean_files_save=$ac_clean_files ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out" # Try to create an executable without -o first, disregard a.out. # It will help us diagnose broken compilers, and finding out an intuition # of exeext. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C++ compiler works" >&5 $as_echo_n "checking whether the C++ compiler works... " >&6; } ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` # The possible output files: ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" ac_rmfiles= for ac_file in $ac_files do case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; * ) ac_rmfiles="$ac_rmfiles $ac_file";; esac done rm -f $ac_rmfiles if { { ac_try="$ac_link_default" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link_default") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then : # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. # So ignore a value of `no', otherwise this would lead to `EXEEXT = no' # in a Makefile. We should not override ac_cv_exeext if it was cached, # so that the user can short-circuit this test for compilers unknown to # Autoconf. for ac_file in $ac_files '' do test -f "$ac_file" || continue case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; [ab].out ) # We found the default executable, but exeext='' is most # certainly right. break;; *.* ) if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no; then :; else ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` fi # We set ac_cv_exeext here because the later test for it is not # safe: cross compilers may not add the suffix if given an `-o' # argument, so we may need to know it at that point already. # Even if this section looks crufty: it has the advantage of # actually working. break;; * ) break;; esac done test "$ac_cv_exeext" = no && ac_cv_exeext= else ac_file='' fi if test -z "$ac_file"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error 77 "C++ compiler cannot create executables See \`config.log' for more details" "$LINENO" 5; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler default output file name" >&5 $as_echo_n "checking for C++ compiler default output file name... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 $as_echo "$ac_file" >&6; } ac_exeext=$ac_cv_exeext rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out ac_clean_files=$ac_clean_files_save { $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 $as_echo_n "checking for suffix of executables... " >&6; } if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then : # If both `conftest.exe' and `conftest' are `present' (well, observable) # catch `conftest.exe'. For instance with Cygwin, `ls conftest' will # work properly (i.e., refer to `conftest.exe'), while it won't with # `rm'. for ac_file in conftest.exe conftest conftest.*; do test -f "$ac_file" || continue case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` break;; * ) break;; esac done else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "cannot compute suffix of executables: cannot compile and link See \`config.log' for more details" "$LINENO" 5; } fi rm -f conftest conftest$ac_cv_exeext { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 $as_echo "$ac_cv_exeext" >&6; } rm -f conftest.$ac_ext EXEEXT=$ac_cv_exeext ac_exeext=$EXEEXT cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { FILE *f = fopen ("conftest.out", "w"); return ferror (f) || fclose (f) != 0; ; return 0; } _ACEOF ac_clean_files="$ac_clean_files conftest.out" # Check that the compiler produces executables we can run. If not, either # the compiler is broken, or we cross compile. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 $as_echo_n "checking whether we are cross compiling... " >&6; } if test "$cross_compiling" != yes; then { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } if { ac_try='./conftest$ac_cv_exeext' { { case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_try") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; }; then cross_compiling=no else if test "$cross_compiling" = maybe; then cross_compiling=yes else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "cannot run C++ compiled programs. If you meant to cross compile, use \`--host'. See \`config.log' for more details" "$LINENO" 5; } fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 $as_echo "$cross_compiling" >&6; } rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out ac_clean_files=$ac_clean_files_save { $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 $as_echo_n "checking for suffix of object files... " >&6; } if ${ac_cv_objext+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF rm -f conftest.o conftest.obj if { { ac_try="$ac_compile" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compile") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then : for ac_file in conftest.o conftest.obj conftest.*; do test -f "$ac_file" || continue; case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` break;; esac done else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "cannot compute suffix of object files: cannot compile See \`config.log' for more details" "$LINENO" 5; } fi rm -f conftest.$ac_cv_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 $as_echo "$ac_cv_objext" >&6; } OBJEXT=$ac_cv_objext ac_objext=$OBJEXT { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C++ compiler" >&5 $as_echo_n "checking whether we are using the GNU C++ compiler... " >&6; } if ${ac_cv_cxx_compiler_gnu+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { #ifndef __GNUC__ choke me #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_compiler_gnu=yes else ac_compiler_gnu=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_cxx_compiler_gnu=$ac_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_cxx_compiler_gnu" >&5 $as_echo "$ac_cv_cxx_compiler_gnu" >&6; } if test $ac_compiler_gnu = yes; then GXX=yes else GXX= fi ac_test_CXXFLAGS=${CXXFLAGS+set} ac_save_CXXFLAGS=$CXXFLAGS { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CXX accepts -g" >&5 $as_echo_n "checking whether $CXX accepts -g... " >&6; } if ${ac_cv_prog_cxx_g+:} false; then : $as_echo_n "(cached) " >&6 else ac_save_cxx_werror_flag=$ac_cxx_werror_flag ac_cxx_werror_flag=yes ac_cv_prog_cxx_g=no CXXFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_prog_cxx_g=yes else CXXFLAGS="" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : else ac_cxx_werror_flag=$ac_save_cxx_werror_flag CXXFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_prog_cxx_g=yes fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cxx_werror_flag=$ac_save_cxx_werror_flag fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cxx_g" >&5 $as_echo "$ac_cv_prog_cxx_g" >&6; } if test "$ac_test_CXXFLAGS" = set; then CXXFLAGS=$ac_save_CXXFLAGS elif test $ac_cv_prog_cxx_g = yes; then if test "$GXX" = yes; then CXXFLAGS="-g -O2" else CXXFLAGS="-g" fi else if test "$GXX" = yes; then CXXFLAGS="-O2" else CXXFLAGS= fi fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu DEPDIR="${am__leading_dot}deps" ac_config_commands="$ac_config_commands depfiles" am_make=${MAKE-make} cat > confinc << 'END' am__doit: @echo this is the am__doit target .PHONY: am__doit END # If we don't find an include directive, just comment out the code. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for style of include used by $am_make" >&5 $as_echo_n "checking for style of include used by $am_make... " >&6; } am__include="#" am__quote= _am_result=none # First try GNU make style include. echo "include confinc" > confmf # Ignore all kinds of additional output from 'make'. case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=include am__quote= _am_result=GNU ;; esac # Now try BSD make style include. if test "$am__include" = "#"; then echo '.include "confinc"' > confmf case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=.include am__quote="\"" _am_result=BSD ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $_am_result" >&5 $as_echo "$_am_result" >&6; } rm -f confinc confmf # Check whether --enable-dependency-tracking was given. if test "${enable_dependency_tracking+set}" = set; then : enableval=$enable_dependency_tracking; fi if test "x$enable_dependency_tracking" != xno; then am_depcomp="$ac_aux_dir/depcomp" AMDEPBACKSLASH='\' am__nodep='_no' fi if test "x$enable_dependency_tracking" != xno; then AMDEP_TRUE= AMDEP_FALSE='#' else AMDEP_TRUE='#' AMDEP_FALSE= fi depcc="$CXX" am_compiler_list= { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 $as_echo_n "checking dependency style of $depcc... " >&6; } if ${am_cv_CXX_dependencies_compiler_type+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named 'D' -- because '-MD' means "put the output # in D". rm -rf conftest.dir mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_CXX_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi am__universal=false case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with # Solaris 10 /bin/sh. echo '/* dummy */' > sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with '-c' and '-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle '-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs. am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # After this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested. if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvc7 | msvc7msys | msvisualcpp | msvcmsys) # This compiler won't grok '-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_CXX_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_CXX_dependencies_compiler_type=none fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CXX_dependencies_compiler_type" >&5 $as_echo "$am_cv_CXX_dependencies_compiler_type" >&6; } CXXDEPMODE=depmode=$am_cv_CXX_dependencies_compiler_type if test "x$enable_dependency_tracking" != xno \ && test "$am_cv_CXX_dependencies_compiler_type" = gcc3; then am__fastdepCXX_TRUE= am__fastdepCXX_FALSE='#' else am__fastdepCXX_TRUE='#' am__fastdepCXX_FALSE= fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. set dummy ${ac_tool_prefix}gcc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_CC"; then ac_ct_CC=$CC # Extract the first word of "gcc", so it can be a program name with args. set dummy gcc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 $as_echo "$ac_ct_CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_CC" = x; then CC="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CC=$ac_ct_CC fi else CC="$ac_cv_prog_CC" fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. set dummy ${ac_tool_prefix}cc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi fi if test -z "$CC"; then # Extract the first word of "cc", so it can be a program name with args. set dummy cc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else ac_prog_rejected=no as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then ac_prog_rejected=yes continue fi ac_cv_prog_CC="cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS if test $ac_prog_rejected = yes; then # We found a bogon in the path, so make sure we never use it. set dummy $ac_cv_prog_CC shift if test $# != 0; then # We chose a different compiler from the bogus one. # However, it has the same basename, so the bogon will be chosen # first if we set CC to just the basename; use the full file name. shift ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" fi fi fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then for ac_prog in cl.exe do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$CC" && break done fi if test -z "$CC"; then ac_ct_CC=$CC for ac_prog in cl.exe do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 $as_echo "$ac_ct_CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_CC" && break done if test "x$ac_ct_CC" = x; then CC="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CC=$ac_ct_CC fi fi fi test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "no acceptable C compiler found in \$PATH See \`config.log' for more details" "$LINENO" 5; } # Provide some information about the compiler. $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 set X $ac_compile ac_compiler=$2 for ac_option in --version -v -V -qversion; do { { ac_try="$ac_compiler $ac_option >&5" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compiler $ac_option >&5") 2>conftest.err ac_status=$? if test -s conftest.err; then sed '10a\ ... rest of stderr output deleted ... 10q' conftest.err >conftest.er1 cat conftest.er1 >&5 fi rm -f conftest.er1 conftest.err $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 $as_echo_n "checking whether we are using the GNU C compiler... " >&6; } if ${ac_cv_c_compiler_gnu+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { #ifndef __GNUC__ choke me #endif ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_compiler_gnu=yes else ac_compiler_gnu=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_c_compiler_gnu=$ac_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 $as_echo "$ac_cv_c_compiler_gnu" >&6; } if test $ac_compiler_gnu = yes; then GCC=yes else GCC= fi ac_test_CFLAGS=${CFLAGS+set} ac_save_CFLAGS=$CFLAGS { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 $as_echo_n "checking whether $CC accepts -g... " >&6; } if ${ac_cv_prog_cc_g+:} false; then : $as_echo_n "(cached) " >&6 else ac_save_c_werror_flag=$ac_c_werror_flag ac_c_werror_flag=yes ac_cv_prog_cc_g=no CFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_g=yes else CFLAGS="" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : else ac_c_werror_flag=$ac_save_c_werror_flag CFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_g=yes fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_c_werror_flag=$ac_save_c_werror_flag fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 $as_echo "$ac_cv_prog_cc_g" >&6; } if test "$ac_test_CFLAGS" = set; then CFLAGS=$ac_save_CFLAGS elif test $ac_cv_prog_cc_g = yes; then if test "$GCC" = yes; then CFLAGS="-g -O2" else CFLAGS="-g" fi else if test "$GCC" = yes; then CFLAGS="-O2" else CFLAGS= fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 $as_echo_n "checking for $CC option to accept ISO C89... " >&6; } if ${ac_cv_prog_cc_c89+:} false; then : $as_echo_n "(cached) " >&6 else ac_cv_prog_cc_c89=no ac_save_CC=$CC cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include struct stat; /* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ struct buf { int x; }; FILE * (*rcsopen) (struct buf *, struct stat *, int); static char *e (p, i) char **p; int i; { return p[i]; } static char *f (char * (*g) (char **, int), char **p, ...) { char *s; va_list v; va_start (v,p); s = g (p, va_arg (v,int)); va_end (v); return s; } /* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has function prototypes and stuff, but not '\xHH' hex character constants. These don't provoke an error unfortunately, instead are silently treated as 'x'. The following induces an error, until -std is added to get proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an array size at least. It's necessary to write '\x00'==0 to get something that's true only with -std. */ int osf4_cc_array ['\x00' == 0 ? 1 : -1]; /* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters inside strings and character constants. */ #define FOO(x) 'x' int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; int test (int i, double x); struct s1 {int (*f) (int a);}; struct s2 {int (*f) (double a);}; int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); int argc; char **argv; int main () { return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; ; return 0; } _ACEOF for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" do CC="$ac_save_CC $ac_arg" if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_c89=$ac_arg fi rm -f core conftest.err conftest.$ac_objext test "x$ac_cv_prog_cc_c89" != "xno" && break done rm -f conftest.$ac_ext CC=$ac_save_CC fi # AC_CACHE_VAL case "x$ac_cv_prog_cc_c89" in x) { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 $as_echo "none needed" >&6; } ;; xno) { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 $as_echo "unsupported" >&6; } ;; *) CC="$CC $ac_cv_prog_cc_c89" { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 $as_echo "$ac_cv_prog_cc_c89" >&6; } ;; esac if test "x$ac_cv_prog_cc_c89" != xno; then : fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu depcc="$CC" am_compiler_list= { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 $as_echo_n "checking dependency style of $depcc... " >&6; } if ${am_cv_CC_dependencies_compiler_type+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named 'D' -- because '-MD' means "put the output # in D". rm -rf conftest.dir mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_CC_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi am__universal=false case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with # Solaris 10 /bin/sh. echo '/* dummy */' > sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with '-c' and '-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle '-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs. am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # After this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested. if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvc7 | msvc7msys | msvisualcpp | msvcmsys) # This compiler won't grok '-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_CC_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_CC_dependencies_compiler_type=none fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5 $as_echo "$am_cv_CC_dependencies_compiler_type" >&6; } CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type if test "x$enable_dependency_tracking" != xno \ && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then am__fastdepCC_TRUE= am__fastdepCC_FALSE='#' else am__fastdepCC_TRUE='#' am__fastdepCC_FALSE= fi case $ac_cv_prog_cc_stdc in #( no) : ac_cv_prog_cc_c99=no; ac_cv_prog_cc_c89=no ;; #( *) : { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C99" >&5 $as_echo_n "checking for $CC option to accept ISO C99... " >&6; } if ${ac_cv_prog_cc_c99+:} false; then : $as_echo_n "(cached) " >&6 else ac_cv_prog_cc_c99=no ac_save_CC=$CC cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #include #include #include // Check varargs macros. These examples are taken from C99 6.10.3.5. #define debug(...) fprintf (stderr, __VA_ARGS__) #define showlist(...) puts (#__VA_ARGS__) #define report(test,...) ((test) ? puts (#test) : printf (__VA_ARGS__)) static void test_varargs_macros (void) { int x = 1234; int y = 5678; debug ("Flag"); debug ("X = %d\n", x); showlist (The first, second, and third items.); report (x>y, "x is %d but y is %d", x, y); } // Check long long types. #define BIG64 18446744073709551615ull #define BIG32 4294967295ul #define BIG_OK (BIG64 / BIG32 == 4294967297ull && BIG64 % BIG32 == 0) #if !BIG_OK your preprocessor is broken; #endif #if BIG_OK #else your preprocessor is broken; #endif static long long int bignum = -9223372036854775807LL; static unsigned long long int ubignum = BIG64; struct incomplete_array { int datasize; double data[]; }; struct named_init { int number; const wchar_t *name; double average; }; typedef const char *ccp; static inline int test_restrict (ccp restrict text) { // See if C++-style comments work. // Iterate through items via the restricted pointer. // Also check for declarations in for loops. for (unsigned int i = 0; *(text+i) != '\0'; ++i) continue; return 0; } // Check varargs and va_copy. static void test_varargs (const char *format, ...) { va_list args; va_start (args, format); va_list args_copy; va_copy (args_copy, args); const char *str; int number; float fnumber; while (*format) { switch (*format++) { case 's': // string str = va_arg (args_copy, const char *); break; case 'd': // int number = va_arg (args_copy, int); break; case 'f': // float fnumber = va_arg (args_copy, double); break; default: break; } } va_end (args_copy); va_end (args); } int main () { // Check bool. _Bool success = false; // Check restrict. if (test_restrict ("String literal") == 0) success = true; char *restrict newvar = "Another string"; // Check varargs. test_varargs ("s, d' f .", "string", 65, 34.234); test_varargs_macros (); // Check flexible array members. struct incomplete_array *ia = malloc (sizeof (struct incomplete_array) + (sizeof (double) * 10)); ia->datasize = 10; for (int i = 0; i < ia->datasize; ++i) ia->data[i] = i * 1.234; // Check named initializers. struct named_init ni = { .number = 34, .name = L"Test wide string", .average = 543.34343, }; ni.number = 58; int dynamic_array[ni.number]; dynamic_array[ni.number - 1] = 543; // work around unused variable warnings return (!success || bignum == 0LL || ubignum == 0uLL || newvar[0] == 'x' || dynamic_array[ni.number - 1] != 543); ; return 0; } _ACEOF for ac_arg in '' -std=gnu99 -std=c99 -c99 -AC99 -D_STDC_C99= -qlanglvl=extc99 do CC="$ac_save_CC $ac_arg" if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_c99=$ac_arg fi rm -f core conftest.err conftest.$ac_objext test "x$ac_cv_prog_cc_c99" != "xno" && break done rm -f conftest.$ac_ext CC=$ac_save_CC fi # AC_CACHE_VAL case "x$ac_cv_prog_cc_c99" in x) { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 $as_echo "none needed" >&6; } ;; xno) { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 $as_echo "unsupported" >&6; } ;; *) CC="$CC $ac_cv_prog_cc_c99" { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c99" >&5 $as_echo "$ac_cv_prog_cc_c99" >&6; } ;; esac if test "x$ac_cv_prog_cc_c99" != xno; then : ac_cv_prog_cc_stdc=$ac_cv_prog_cc_c99 else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 $as_echo_n "checking for $CC option to accept ISO C89... " >&6; } if ${ac_cv_prog_cc_c89+:} false; then : $as_echo_n "(cached) " >&6 else ac_cv_prog_cc_c89=no ac_save_CC=$CC cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include struct stat; /* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ struct buf { int x; }; FILE * (*rcsopen) (struct buf *, struct stat *, int); static char *e (p, i) char **p; int i; { return p[i]; } static char *f (char * (*g) (char **, int), char **p, ...) { char *s; va_list v; va_start (v,p); s = g (p, va_arg (v,int)); va_end (v); return s; } /* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has function prototypes and stuff, but not '\xHH' hex character constants. These don't provoke an error unfortunately, instead are silently treated as 'x'. The following induces an error, until -std is added to get proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an array size at least. It's necessary to write '\x00'==0 to get something that's true only with -std. */ int osf4_cc_array ['\x00' == 0 ? 1 : -1]; /* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters inside strings and character constants. */ #define FOO(x) 'x' int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; int test (int i, double x); struct s1 {int (*f) (int a);}; struct s2 {int (*f) (double a);}; int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); int argc; char **argv; int main () { return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; ; return 0; } _ACEOF for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" do CC="$ac_save_CC $ac_arg" if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_c89=$ac_arg fi rm -f core conftest.err conftest.$ac_objext test "x$ac_cv_prog_cc_c89" != "xno" && break done rm -f conftest.$ac_ext CC=$ac_save_CC fi # AC_CACHE_VAL case "x$ac_cv_prog_cc_c89" in x) { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 $as_echo "none needed" >&6; } ;; xno) { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 $as_echo "unsupported" >&6; } ;; *) CC="$CC $ac_cv_prog_cc_c89" { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 $as_echo "$ac_cv_prog_cc_c89" >&6; } ;; esac if test "x$ac_cv_prog_cc_c89" != xno; then : ac_cv_prog_cc_stdc=$ac_cv_prog_cc_c89 else ac_cv_prog_cc_stdc=no fi fi ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO Standard C" >&5 $as_echo_n "checking for $CC option to accept ISO Standard C... " >&6; } if ${ac_cv_prog_cc_stdc+:} false; then : $as_echo_n "(cached) " >&6 fi case $ac_cv_prog_cc_stdc in #( no) : { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 $as_echo "unsupported" >&6; } ;; #( '') : { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 $as_echo "none needed" >&6; } ;; #( *) : { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_stdc" >&5 $as_echo "$ac_cv_prog_cc_stdc" >&6; } ;; esac ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5 $as_echo_n "checking how to run the C preprocessor... " >&6; } # On Suns, sometimes $CPP names a directory. if test -n "$CPP" && test -d "$CPP"; then CPP= fi if test -z "$CPP"; then if ${ac_cv_prog_CPP+:} false; then : $as_echo_n "(cached) " >&6 else # Double quotes because CPP needs to be expanded for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp" do ac_preproc_ok=false for ac_c_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : break fi done ac_cv_prog_CPP=$CPP fi CPP=$ac_cv_prog_CPP else ac_cv_prog_CPP=$CPP fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5 $as_echo "$CPP" >&6; } ac_preproc_ok=false for ac_c_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "C preprocessor \"$CPP\" fails sanity check See \`config.log' for more details" "$LINENO" 5; } fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5 $as_echo_n "checking for grep that handles long lines and -e... " >&6; } if ${ac_cv_path_GREP+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$GREP"; then ac_path_GREP_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in grep ggrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" as_fn_executable_p "$ac_path_GREP" || continue # Check for GNU ac_path_GREP and select it if it is found. # Check for GNU $ac_path_GREP case `"$ac_path_GREP" --version 2>&1` in *GNU*) ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;; *) ac_count=0 $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" $as_echo 'GREP' >> "conftest.nl" "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_GREP_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_max=$ac_count fi # 10*(2^10) chars as input seems more than enough test $ac_count -gt 10 && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac $ac_path_GREP_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_GREP"; then as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 fi else ac_cv_path_GREP=$GREP fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5 $as_echo "$ac_cv_path_GREP" >&6; } GREP="$ac_cv_path_GREP" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5 $as_echo_n "checking for egrep... " >&6; } if ${ac_cv_path_EGREP+:} false; then : $as_echo_n "(cached) " >&6 else if echo a | $GREP -E '(a|b)' >/dev/null 2>&1 then ac_cv_path_EGREP="$GREP -E" else if test -z "$EGREP"; then ac_path_EGREP_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in egrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" as_fn_executable_p "$ac_path_EGREP" || continue # Check for GNU ac_path_EGREP and select it if it is found. # Check for GNU $ac_path_EGREP case `"$ac_path_EGREP" --version 2>&1` in *GNU*) ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;; *) ac_count=0 $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" $as_echo 'EGREP' >> "conftest.nl" "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_EGREP_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_max=$ac_count fi # 10*(2^10) chars as input seems more than enough test $ac_count -gt 10 && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac $ac_path_EGREP_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_EGREP"; then as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 fi else ac_cv_path_EGREP=$EGREP fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5 $as_echo "$ac_cv_path_EGREP" >&6; } EGREP="$ac_cv_path_EGREP" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 $as_echo_n "checking for ANSI C header files... " >&6; } if ${ac_cv_header_stdc+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #include #include int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_header_stdc=yes else ac_cv_header_stdc=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext if test $ac_cv_header_stdc = yes; then # SunOS 4.x string.h does not declare mem*, contrary to ANSI. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "memchr" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "free" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. if test "$cross_compiling" = yes; then : : else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #if ((' ' & 0x0FF) == 0x020) # define ISLOWER(c) ('a' <= (c) && (c) <= 'z') # define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) #else # define ISLOWER(c) \ (('a' <= (c) && (c) <= 'i') \ || ('j' <= (c) && (c) <= 'r') \ || ('s' <= (c) && (c) <= 'z')) # define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) #endif #define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) int main () { int i; for (i = 0; i < 256; i++) if (XOR (islower (i), ISLOWER (i)) || toupper (i) != TOUPPER (i)) return 2; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : else ac_cv_header_stdc=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 $as_echo "$ac_cv_header_stdc" >&6; } if test $ac_cv_header_stdc = yes; then $as_echo "#define STDC_HEADERS 1" >>confdefs.h fi # On IRIX 5.3, sys/types and inttypes.h are conflicting. for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ inttypes.h stdint.h unistd.h do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default " if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done ac_fn_c_check_header_mongrel "$LINENO" "minix/config.h" "ac_cv_header_minix_config_h" "$ac_includes_default" if test "x$ac_cv_header_minix_config_h" = xyes; then : MINIX=yes else MINIX= fi if test "$MINIX" = yes; then $as_echo "#define _POSIX_SOURCE 1" >>confdefs.h $as_echo "#define _POSIX_1_SOURCE 2" >>confdefs.h $as_echo "#define _MINIX 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether it is safe to define __EXTENSIONS__" >&5 $as_echo_n "checking whether it is safe to define __EXTENSIONS__... " >&6; } if ${ac_cv_safe_to_define___extensions__+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ # define __EXTENSIONS__ 1 $ac_includes_default int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_safe_to_define___extensions__=yes else ac_cv_safe_to_define___extensions__=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_safe_to_define___extensions__" >&5 $as_echo "$ac_cv_safe_to_define___extensions__" >&6; } test $ac_cv_safe_to_define___extensions__ = yes && $as_echo "#define __EXTENSIONS__ 1" >>confdefs.h $as_echo "#define _ALL_SOURCE 1" >>confdefs.h $as_echo "#define _GNU_SOURCE 1" >>confdefs.h $as_echo "#define _POSIX_PTHREAD_SEMANTICS 1" >>confdefs.h $as_echo "#define _TANDEM_SOURCE 1" >>confdefs.h for ac_prog in gawk mawk nawk awk do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_AWK+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$AWK"; then ac_cv_prog_AWK="$AWK" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_AWK="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi AWK=$ac_cv_prog_AWK if test -n "$AWK"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5 $as_echo "$AWK" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$AWK" && break done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5 $as_echo_n "checking whether ln -s works... " >&6; } LN_S=$as_ln_s if test "$LN_S" = "ln -s"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no, using $LN_S" >&5 $as_echo "no, using $LN_S" >&6; } fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 $as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } set x ${MAKE-make} ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then : $as_echo_n "(cached) " >&6 else cat >conftest.make <<\_ACEOF SHELL = /bin/sh all: @echo '@@@%%%=$(MAKE)=@@@%%%' _ACEOF # GNU make sometimes prints "make[1]: Entering ...", which would confuse us. case `${MAKE-make} -f conftest.make 2>/dev/null` in *@@@%%%=?*=@@@%%%*) eval ac_cv_prog_make_${ac_make}_set=yes;; *) eval ac_cv_prog_make_${ac_make}_set=no;; esac rm -f conftest.make fi if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } SET_MAKE= else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } SET_MAKE="MAKE=${MAKE-make}" fi # Check whether --enable-static was given. if test "${enable_static+set}" = set; then : enableval=$enable_static; p=${PACKAGE-default} case $enableval in yes) enable_static=yes ;; no) enable_static=no ;; *) enable_static=no # Look at the argument we got. We use all the common list separators. lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," for pkg in $enableval; do IFS="$lt_save_ifs" if test "X$pkg" = "X$p"; then enable_static=yes fi done IFS="$lt_save_ifs" ;; esac else enable_static=no fi case `pwd` in *\ * | *\ *) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&5 $as_echo "$as_me: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&2;} ;; esac macro_version='2.4.2' macro_revision='1.3337' ltmain="$ac_aux_dir/ltmain.sh" # Make sure we can run config.sub. $SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 || as_fn_error $? "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5 $as_echo_n "checking build system type... " >&6; } if ${ac_cv_build+:} false; then : $as_echo_n "(cached) " >&6 else ac_build_alias=$build_alias test "x$ac_build_alias" = x && ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"` test "x$ac_build_alias" = x && as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5 ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` || as_fn_error $? "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5 fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5 $as_echo "$ac_cv_build" >&6; } case $ac_cv_build in *-*-*) ;; *) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;; esac build=$ac_cv_build ac_save_IFS=$IFS; IFS='-' set x $ac_cv_build shift build_cpu=$1 build_vendor=$2 shift; shift # Remember, the first character of IFS is used to create $*, # except with old shells: build_os=$* IFS=$ac_save_IFS case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5 $as_echo_n "checking host system type... " >&6; } if ${ac_cv_host+:} false; then : $as_echo_n "(cached) " >&6 else if test "x$host_alias" = x; then ac_cv_host=$ac_cv_build else ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` || as_fn_error $? "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5 fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5 $as_echo "$ac_cv_host" >&6; } case $ac_cv_host in *-*-*) ;; *) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;; esac host=$ac_cv_host ac_save_IFS=$IFS; IFS='-' set x $ac_cv_host shift host_cpu=$1 host_vendor=$2 shift; shift # Remember, the first character of IFS is used to create $*, # except with old shells: host_os=$* IFS=$ac_save_IFS case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac # Backslashify metacharacters that are still active within # double-quoted strings. sed_quote_subst='s/\(["`$\\]\)/\\\1/g' # Same as above, but do not quote variable references. double_quote_subst='s/\(["`\\]\)/\\\1/g' # Sed substitution to delay expansion of an escaped shell variable in a # double_quote_subst'ed string. delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g' # Sed substitution to delay expansion of an escaped single quote. delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g' # Sed substitution to avoid accidental globbing in evaled expressions no_glob_subst='s/\*/\\\*/g' ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 $as_echo_n "checking how to print strings... " >&6; } # Test print first, because it will be a builtin if present. if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then ECHO='print -r --' elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then ECHO='printf %s\n' else # Use this function as a fallback that always works. func_fallback_echo () { eval 'cat <<_LTECHO_EOF $1 _LTECHO_EOF' } ECHO='func_fallback_echo' fi # func_echo_all arg... # Invoke $ECHO with all args, space-separated. func_echo_all () { $ECHO "" } case "$ECHO" in printf*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: printf" >&5 $as_echo "printf" >&6; } ;; print*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: print -r" >&5 $as_echo "print -r" >&6; } ;; *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: cat" >&5 $as_echo "cat" >&6; } ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a sed that does not truncate output" >&5 $as_echo_n "checking for a sed that does not truncate output... " >&6; } if ${ac_cv_path_SED+:} false; then : $as_echo_n "(cached) " >&6 else ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/ for ac_i in 1 2 3 4 5 6 7; do ac_script="$ac_script$as_nl$ac_script" done echo "$ac_script" 2>/dev/null | sed 99q >conftest.sed { ac_script=; unset ac_script;} if test -z "$SED"; then ac_path_SED_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in sed gsed; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_SED="$as_dir/$ac_prog$ac_exec_ext" as_fn_executable_p "$ac_path_SED" || continue # Check for GNU ac_path_SED and select it if it is found. # Check for GNU $ac_path_SED case `"$ac_path_SED" --version 2>&1` in *GNU*) ac_cv_path_SED="$ac_path_SED" ac_path_SED_found=:;; *) ac_count=0 $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" $as_echo '' >> "conftest.nl" "$ac_path_SED" -f conftest.sed < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_SED_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_SED="$ac_path_SED" ac_path_SED_max=$ac_count fi # 10*(2^10) chars as input seems more than enough test $ac_count -gt 10 && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac $ac_path_SED_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_SED"; then as_fn_error $? "no acceptable sed could be found in \$PATH" "$LINENO" 5 fi else ac_cv_path_SED=$SED fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_SED" >&5 $as_echo "$ac_cv_path_SED" >&6; } SED="$ac_cv_path_SED" rm -f conftest.sed test -z "$SED" && SED=sed Xsed="$SED -e 1s/^X//" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgrep" >&5 $as_echo_n "checking for fgrep... " >&6; } if ${ac_cv_path_FGREP+:} false; then : $as_echo_n "(cached) " >&6 else if echo 'ab*c' | $GREP -F 'ab*c' >/dev/null 2>&1 then ac_cv_path_FGREP="$GREP -F" else if test -z "$FGREP"; then ac_path_FGREP_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in fgrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_FGREP="$as_dir/$ac_prog$ac_exec_ext" as_fn_executable_p "$ac_path_FGREP" || continue # Check for GNU ac_path_FGREP and select it if it is found. # Check for GNU $ac_path_FGREP case `"$ac_path_FGREP" --version 2>&1` in *GNU*) ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_found=:;; *) ac_count=0 $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" $as_echo 'FGREP' >> "conftest.nl" "$ac_path_FGREP" FGREP < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_FGREP_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_max=$ac_count fi # 10*(2^10) chars as input seems more than enough test $ac_count -gt 10 && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac $ac_path_FGREP_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_FGREP"; then as_fn_error $? "no acceptable fgrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 fi else ac_cv_path_FGREP=$FGREP fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_FGREP" >&5 $as_echo "$ac_cv_path_FGREP" >&6; } FGREP="$ac_cv_path_FGREP" test -z "$GREP" && GREP=grep # Check whether --with-gnu-ld was given. if test "${with_gnu_ld+set}" = set; then : withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes else with_gnu_ld=no fi ac_prog=ld if test "$GCC" = yes; then # Check if gcc -print-prog-name=ld gives a path. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5 $as_echo_n "checking for ld used by $CC... " >&6; } case $host in *-*-mingw*) # gcc leaves a trailing carriage return which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [\\/]* | ?:[\\/]*) re_direlt='/[^/][^/]*/\.\./' # Canonicalize the pathname of ld ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'` while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` done test -z "$LD" && LD="$ac_prog" ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test "$with_gnu_ld" = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 $as_echo_n "checking for GNU ld... " >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5 $as_echo_n "checking for non-GNU ld... " >&6; } fi if ${lt_cv_path_LD+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$LD"; then lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then lt_cv_path_LD="$ac_dir/$ac_prog" # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some variants of GNU ld only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$lt_cv_path_LD" -v 2>&1 &5 $as_echo "$LD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5 $as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; } if ${lt_cv_prog_gnu_ld+:} false; then : $as_echo_n "(cached) " >&6 else # I'd rather use --version here, but apparently some GNU lds only accept -v. case `$LD -v 2>&1 &5 $as_echo "$lt_cv_prog_gnu_ld" >&6; } with_gnu_ld=$lt_cv_prog_gnu_ld { $as_echo "$as_me:${as_lineno-$LINENO}: checking for BSD- or MS-compatible name lister (nm)" >&5 $as_echo_n "checking for BSD- or MS-compatible name lister (nm)... " >&6; } if ${lt_cv_path_NM+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$NM"; then # Let the user override the test. lt_cv_path_NM="$NM" else lt_nm_to_check="${ac_tool_prefix}nm" if test -n "$ac_tool_prefix" && test "$build" = "$host"; then lt_nm_to_check="$lt_nm_to_check nm" fi for lt_tmp_nm in $lt_nm_to_check; do lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. tmp_nm="$ac_dir/$lt_tmp_nm" if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then # Check to see if the nm accepts a BSD-compat flag. # Adding the `sed 1q' prevents false positives on HP-UX, which says: # nm: unknown option "B" ignored # Tru64's nm complains that /dev/null is an invalid object file case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in */dev/null* | *'Invalid file or object type'*) lt_cv_path_NM="$tmp_nm -B" break ;; *) case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in */dev/null*) lt_cv_path_NM="$tmp_nm -p" break ;; *) lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but continue # so that we can try to find one that supports BSD flags ;; esac ;; esac fi done IFS="$lt_save_ifs" done : ${lt_cv_path_NM=no} fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_NM" >&5 $as_echo "$lt_cv_path_NM" >&6; } if test "$lt_cv_path_NM" != "no"; then NM="$lt_cv_path_NM" else # Didn't find any BSD compatible name lister, look for dumpbin. if test -n "$DUMPBIN"; then : # Let the user override the test. else if test -n "$ac_tool_prefix"; then for ac_prog in dumpbin "link -dump" do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_DUMPBIN+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$DUMPBIN"; then ac_cv_prog_DUMPBIN="$DUMPBIN" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_DUMPBIN="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi DUMPBIN=$ac_cv_prog_DUMPBIN if test -n "$DUMPBIN"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DUMPBIN" >&5 $as_echo "$DUMPBIN" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$DUMPBIN" && break done fi if test -z "$DUMPBIN"; then ac_ct_DUMPBIN=$DUMPBIN for ac_prog in dumpbin "link -dump" do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_DUMPBIN+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_DUMPBIN"; then ac_cv_prog_ac_ct_DUMPBIN="$ac_ct_DUMPBIN" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_DUMPBIN="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_DUMPBIN=$ac_cv_prog_ac_ct_DUMPBIN if test -n "$ac_ct_DUMPBIN"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DUMPBIN" >&5 $as_echo "$ac_ct_DUMPBIN" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_DUMPBIN" && break done if test "x$ac_ct_DUMPBIN" = x; then DUMPBIN=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac DUMPBIN=$ac_ct_DUMPBIN fi fi case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in *COFF*) DUMPBIN="$DUMPBIN -symbols" ;; *) DUMPBIN=: ;; esac fi if test "$DUMPBIN" != ":"; then NM="$DUMPBIN" fi fi test -z "$NM" && NM=nm { $as_echo "$as_me:${as_lineno-$LINENO}: checking the name lister ($NM) interface" >&5 $as_echo_n "checking the name lister ($NM) interface... " >&6; } if ${lt_cv_nm_interface+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_nm_interface="BSD nm" echo "int some_variable = 0;" > conftest.$ac_ext (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&5) (eval "$ac_compile" 2>conftest.err) cat conftest.err >&5 (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&5) (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out) cat conftest.err >&5 (eval echo "\"\$as_me:$LINENO: output\"" >&5) cat conftest.out >&5 if $GREP 'External.*some_variable' conftest.out > /dev/null; then lt_cv_nm_interface="MS dumpbin" fi rm -f conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_nm_interface" >&5 $as_echo "$lt_cv_nm_interface" >&6; } # find the maximum length of command line arguments { $as_echo "$as_me:${as_lineno-$LINENO}: checking the maximum length of command line arguments" >&5 $as_echo_n "checking the maximum length of command line arguments... " >&6; } if ${lt_cv_sys_max_cmd_len+:} false; then : $as_echo_n "(cached) " >&6 else i=0 teststring="ABCD" case $build_os in msdosdjgpp*) # On DJGPP, this test can blow up pretty badly due to problems in libc # (any single argument exceeding 2000 bytes causes a buffer overrun # during glob expansion). Even if it were fixed, the result of this # check would be larger than it should be. lt_cv_sys_max_cmd_len=12288; # 12K is about right ;; gnu*) # Under GNU Hurd, this test is not required because there is # no limit to the length of command line arguments. # Libtool will interpret -1 as no limit whatsoever lt_cv_sys_max_cmd_len=-1; ;; cygwin* | mingw* | cegcc*) # On Win9x/ME, this test blows up -- it succeeds, but takes # about 5 minutes as the teststring grows exponentially. # Worse, since 9x/ME are not pre-emptively multitasking, # you end up with a "frozen" computer, even though with patience # the test eventually succeeds (with a max line length of 256k). # Instead, let's just punt: use the minimum linelength reported by # all of the supported platforms: 8192 (on NT/2K/XP). lt_cv_sys_max_cmd_len=8192; ;; mint*) # On MiNT this can take a long time and run out of memory. lt_cv_sys_max_cmd_len=8192; ;; amigaos*) # On AmigaOS with pdksh, this test takes hours, literally. # So we just punt and use a minimum line length of 8192. lt_cv_sys_max_cmd_len=8192; ;; netbsd* | freebsd* | openbsd* | darwin* | dragonfly*) # This has been around since 386BSD, at least. Likely further. if test -x /sbin/sysctl; then lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax` elif test -x /usr/sbin/sysctl; then lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax` else lt_cv_sys_max_cmd_len=65536 # usable default for all BSDs fi # And add a safety zone lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` ;; interix*) # We know the value 262144 and hardcode it with a safety zone (like BSD) lt_cv_sys_max_cmd_len=196608 ;; os2*) # The test takes a long time on OS/2. lt_cv_sys_max_cmd_len=8192 ;; osf*) # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not # nice to cause kernel panics so lets avoid the loop below. # First set a reasonable default. lt_cv_sys_max_cmd_len=16384 # if test -x /sbin/sysconfig; then case `/sbin/sysconfig -q proc exec_disable_arg_limit` in *1*) lt_cv_sys_max_cmd_len=-1 ;; esac fi ;; sco3.2v5*) lt_cv_sys_max_cmd_len=102400 ;; sysv5* | sco5v6* | sysv4.2uw2*) kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null` if test -n "$kargmax"; then lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[ ]//'` else lt_cv_sys_max_cmd_len=32768 fi ;; *) lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null` if test -n "$lt_cv_sys_max_cmd_len"; then lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` else # Make teststring a little bigger before we do anything with it. # a 1K string should be a reasonable start. for i in 1 2 3 4 5 6 7 8 ; do teststring=$teststring$teststring done SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}} # If test is not a shell built-in, we'll probably end up computing a # maximum length that is only half of the actual maximum length, but # we can't tell. while { test "X"`env echo "$teststring$teststring" 2>/dev/null` \ = "X$teststring$teststring"; } >/dev/null 2>&1 && test $i != 17 # 1/2 MB should be enough do i=`expr $i + 1` teststring=$teststring$teststring done # Only check the string length outside the loop. lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1` teststring= # Add a significant safety factor because C++ compilers can tack on # massive amounts of additional arguments before passing them to the # linker. It appears as though 1/2 is a usable value. lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2` fi ;; esac fi if test -n $lt_cv_sys_max_cmd_len ; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_max_cmd_len" >&5 $as_echo "$lt_cv_sys_max_cmd_len" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: none" >&5 $as_echo "none" >&6; } fi max_cmd_len=$lt_cv_sys_max_cmd_len : ${CP="cp -f"} : ${MV="mv -f"} : ${RM="rm -f"} { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands some XSI constructs" >&5 $as_echo_n "checking whether the shell understands some XSI constructs... " >&6; } # Try some XSI features xsi_shell=no ( _lt_dummy="a/b/c" test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ = c,a/b,b/c, \ && eval 'test $(( 1 + 1 )) -eq 2 \ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ && xsi_shell=yes { $as_echo "$as_me:${as_lineno-$LINENO}: result: $xsi_shell" >&5 $as_echo "$xsi_shell" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands \"+=\"" >&5 $as_echo_n "checking whether the shell understands \"+=\"... " >&6; } lt_shell_append=no ( foo=bar; set foo baz; eval "$1+=\$2" && test "$foo" = barbaz ) \ >/dev/null 2>&1 \ && lt_shell_append=yes { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_shell_append" >&5 $as_echo "$lt_shell_append" >&6; } if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then lt_unset=unset else lt_unset=false fi # test EBCDIC or ASCII case `echo X|tr X '\101'` in A) # ASCII based system # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr lt_SP2NL='tr \040 \012' lt_NL2SP='tr \015\012 \040\040' ;; *) # EBCDIC based system lt_SP2NL='tr \100 \n' lt_NL2SP='tr \r\n \100\100' ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 $as_echo_n "checking how to convert $build file names to $host format... " >&6; } if ${lt_cv_to_host_file_cmd+:} false; then : $as_echo_n "(cached) " >&6 else case $host in *-*-mingw* ) case $build in *-*-mingw* ) # actually msys lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 ;; *-*-cygwin* ) lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 ;; * ) # otherwise, assume *nix lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 ;; esac ;; *-*-cygwin* ) case $build in *-*-mingw* ) # actually msys lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin ;; *-*-cygwin* ) lt_cv_to_host_file_cmd=func_convert_file_noop ;; * ) # otherwise, assume *nix lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin ;; esac ;; * ) # unhandled hosts (and "normal" native builds) lt_cv_to_host_file_cmd=func_convert_file_noop ;; esac fi to_host_file_cmd=$lt_cv_to_host_file_cmd { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 $as_echo "$lt_cv_to_host_file_cmd" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 $as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } if ${lt_cv_to_tool_file_cmd+:} false; then : $as_echo_n "(cached) " >&6 else #assume ordinary cross tools, or native build. lt_cv_to_tool_file_cmd=func_convert_file_noop case $host in *-*-mingw* ) case $build in *-*-mingw* ) # actually msys lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 ;; esac ;; esac fi to_tool_file_cmd=$lt_cv_to_tool_file_cmd { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 $as_echo "$lt_cv_to_tool_file_cmd" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 $as_echo_n "checking for $LD option to reload object files... " >&6; } if ${lt_cv_ld_reload_flag+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_ld_reload_flag='-r' fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_reload_flag" >&5 $as_echo "$lt_cv_ld_reload_flag" >&6; } reload_flag=$lt_cv_ld_reload_flag case $reload_flag in "" | " "*) ;; *) reload_flag=" $reload_flag" ;; esac reload_cmds='$LD$reload_flag -o $output$reload_objs' case $host_os in cygwin* | mingw* | pw32* | cegcc*) if test "$GCC" != yes; then reload_cmds=false fi ;; darwin*) if test "$GCC" = yes; then reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' else reload_cmds='$LD$reload_flag -o $output$reload_objs' fi ;; esac if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}objdump", so it can be a program name with args. set dummy ${ac_tool_prefix}objdump; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_OBJDUMP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$OBJDUMP"; then ac_cv_prog_OBJDUMP="$OBJDUMP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_OBJDUMP="${ac_tool_prefix}objdump" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi OBJDUMP=$ac_cv_prog_OBJDUMP if test -n "$OBJDUMP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJDUMP" >&5 $as_echo "$OBJDUMP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_OBJDUMP"; then ac_ct_OBJDUMP=$OBJDUMP # Extract the first word of "objdump", so it can be a program name with args. set dummy objdump; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_OBJDUMP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_OBJDUMP"; then ac_cv_prog_ac_ct_OBJDUMP="$ac_ct_OBJDUMP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_OBJDUMP="objdump" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_OBJDUMP=$ac_cv_prog_ac_ct_OBJDUMP if test -n "$ac_ct_OBJDUMP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJDUMP" >&5 $as_echo "$ac_ct_OBJDUMP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_OBJDUMP" = x; then OBJDUMP="false" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac OBJDUMP=$ac_ct_OBJDUMP fi else OBJDUMP="$ac_cv_prog_OBJDUMP" fi test -z "$OBJDUMP" && OBJDUMP=objdump { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to recognize dependent libraries" >&5 $as_echo_n "checking how to recognize dependent libraries... " >&6; } if ${lt_cv_deplibs_check_method+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_file_magic_cmd='$MAGIC_CMD' lt_cv_file_magic_test_file= lt_cv_deplibs_check_method='unknown' # Need to set the preceding variable on all platforms that support # interlibrary dependencies. # 'none' -- dependencies not supported. # `unknown' -- same as none, but documents that we really don't know. # 'pass_all' -- all dependencies passed with no checks. # 'test_compile' -- check by making test program. # 'file_magic [[regex]]' -- check by looking for files in library path # which responds to the $file_magic_cmd with a given extended regex. # If you have `file' or equivalent on your system and you're not sure # whether `pass_all' will *always* work, you probably want this one. case $host_os in aix[4-9]*) lt_cv_deplibs_check_method=pass_all ;; beos*) lt_cv_deplibs_check_method=pass_all ;; bsdi[45]*) lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib)' lt_cv_file_magic_cmd='/usr/bin/file -L' lt_cv_file_magic_test_file=/shlib/libc.so ;; cygwin*) # func_win32_libid is a shell function defined in ltmain.sh lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' lt_cv_file_magic_cmd='func_win32_libid' ;; mingw* | pw32*) # Base MSYS/MinGW do not provide the 'file' command needed by # func_win32_libid shell function, so use a weaker test based on 'objdump', # unless we find 'file', for example because we are cross-compiling. # func_win32_libid assumes BSD nm, so disallow it if using MS dumpbin. if ( test "$lt_cv_nm_interface" = "BSD nm" && file / ) >/dev/null 2>&1; then lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' lt_cv_file_magic_cmd='func_win32_libid' else # Keep this pattern in sync with the one in func_win32_libid. lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' lt_cv_file_magic_cmd='$OBJDUMP -f' fi ;; cegcc*) # use the weaker test based on 'objdump'. See mingw*. lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?' lt_cv_file_magic_cmd='$OBJDUMP -f' ;; darwin* | rhapsody*) lt_cv_deplibs_check_method=pass_all ;; freebsd* | dragonfly*) if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then case $host_cpu in i*86 ) # Not sure whether the presence of OpenBSD here was a mistake. # Let's accept both of them until this is cleared up. lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[3-9]86 (compact )?demand paged shared library' lt_cv_file_magic_cmd=/usr/bin/file lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*` ;; esac else lt_cv_deplibs_check_method=pass_all fi ;; gnu*) lt_cv_deplibs_check_method=pass_all ;; haiku*) lt_cv_deplibs_check_method=pass_all ;; hpux10.20* | hpux11*) lt_cv_file_magic_cmd=/usr/bin/file case $host_cpu in ia64*) lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - IA64' lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so ;; hppa*64*) lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]' lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl ;; *) lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|PA-RISC[0-9]\.[0-9]) shared library' lt_cv_file_magic_test_file=/usr/lib/libc.sl ;; esac ;; interix[3-9]*) # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|\.a)$' ;; irix5* | irix6* | nonstopux*) case $LD in *-32|*"-32 ") libmagic=32-bit;; *-n32|*"-n32 ") libmagic=N32;; *-64|*"-64 ") libmagic=64-bit;; *) libmagic=never-match;; esac lt_cv_deplibs_check_method=pass_all ;; # This must be glibc/ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu) lt_cv_deplibs_check_method=pass_all ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' else lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|_pic\.a)$' fi ;; newos6*) lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (executable|dynamic lib)' lt_cv_file_magic_cmd=/usr/bin/file lt_cv_file_magic_test_file=/usr/lib/libnls.so ;; *nto* | *qnx*) lt_cv_deplibs_check_method=pass_all ;; openbsd*) if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|\.so|_pic\.a)$' else lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' fi ;; osf3* | osf4* | osf5*) lt_cv_deplibs_check_method=pass_all ;; rdos*) lt_cv_deplibs_check_method=pass_all ;; solaris*) lt_cv_deplibs_check_method=pass_all ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) lt_cv_deplibs_check_method=pass_all ;; sysv4 | sysv4.3*) case $host_vendor in motorola) lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib) M[0-9][0-9]* Version [0-9]' lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*` ;; ncr) lt_cv_deplibs_check_method=pass_all ;; sequent) lt_cv_file_magic_cmd='/bin/file' lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB (shared object|dynamic lib )' ;; sni) lt_cv_file_magic_cmd='/bin/file' lt_cv_deplibs_check_method="file_magic ELF [0-9][0-9]*-bit [LM]SB dynamic lib" lt_cv_file_magic_test_file=/lib/libc.so ;; siemens) lt_cv_deplibs_check_method=pass_all ;; pc) lt_cv_deplibs_check_method=pass_all ;; esac ;; tpf*) lt_cv_deplibs_check_method=pass_all ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 $as_echo "$lt_cv_deplibs_check_method" >&6; } file_magic_glob= want_nocaseglob=no if test "$build" = "$host"; then case $host_os in mingw* | pw32*) if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then want_nocaseglob=yes else file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` fi ;; esac fi file_magic_cmd=$lt_cv_file_magic_cmd deplibs_check_method=$lt_cv_deplibs_check_method test -z "$deplibs_check_method" && deplibs_check_method=unknown if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. set dummy ${ac_tool_prefix}dlltool; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_DLLTOOL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$DLLTOOL"; then ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi DLLTOOL=$ac_cv_prog_DLLTOOL if test -n "$DLLTOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 $as_echo "$DLLTOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_DLLTOOL"; then ac_ct_DLLTOOL=$DLLTOOL # Extract the first word of "dlltool", so it can be a program name with args. set dummy dlltool; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_DLLTOOL"; then ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_DLLTOOL="dlltool" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL if test -n "$ac_ct_DLLTOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 $as_echo "$ac_ct_DLLTOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_DLLTOOL" = x; then DLLTOOL="false" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac DLLTOOL=$ac_ct_DLLTOOL fi else DLLTOOL="$ac_cv_prog_DLLTOOL" fi test -z "$DLLTOOL" && DLLTOOL=dlltool { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 $as_echo_n "checking how to associate runtime and link libraries... " >&6; } if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_sharedlib_from_linklib_cmd='unknown' case $host_os in cygwin* | mingw* | pw32* | cegcc*) # two different shell functions defined in ltmain.sh # decide which to use based on capabilities of $DLLTOOL case `$DLLTOOL --help 2>&1` in *--identify-strict*) lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib ;; *) lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback ;; esac ;; *) # fallback: assume linklib IS sharedlib lt_cv_sharedlib_from_linklib_cmd="$ECHO" ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 $as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO if test -n "$ac_tool_prefix"; then for ac_prog in ar do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_AR+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$AR"; then ac_cv_prog_AR="$AR" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_AR="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi AR=$ac_cv_prog_AR if test -n "$AR"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AR" >&5 $as_echo "$AR" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$AR" && break done fi if test -z "$AR"; then ac_ct_AR=$AR for ac_prog in ar do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_AR+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_AR"; then ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_AR="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_AR=$ac_cv_prog_ac_ct_AR if test -n "$ac_ct_AR"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AR" >&5 $as_echo "$ac_ct_AR" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_AR" && break done if test "x$ac_ct_AR" = x; then AR="false" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac AR=$ac_ct_AR fi fi : ${AR=ar} : ${AR_FLAGS=cru} { $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 $as_echo_n "checking for archiver @FILE support... " >&6; } if ${lt_cv_ar_at_file+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_ar_at_file=no cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : echo conftest.$ac_objext > conftest.lst lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 (eval $lt_ar_try) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } if test "$ac_status" -eq 0; then # Ensure the archiver fails upon bogus file names. rm -f conftest.$ac_objext libconftest.a { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 (eval $lt_ar_try) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } if test "$ac_status" -ne 0; then lt_cv_ar_at_file=@ fi fi rm -f conftest.* libconftest.a fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 $as_echo "$lt_cv_ar_at_file" >&6; } if test "x$lt_cv_ar_at_file" = xno; then archiver_list_spec= else archiver_list_spec=$lt_cv_ar_at_file fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. set dummy ${ac_tool_prefix}strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_STRIP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$STRIP"; then ac_cv_prog_STRIP="$STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_STRIP="${ac_tool_prefix}strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi STRIP=$ac_cv_prog_STRIP if test -n "$STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 $as_echo "$STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_STRIP"; then ac_ct_STRIP=$STRIP # Extract the first word of "strip", so it can be a program name with args. set dummy strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_STRIP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_STRIP"; then ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_STRIP="strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP if test -n "$ac_ct_STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 $as_echo "$ac_ct_STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_STRIP" = x; then STRIP=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac STRIP=$ac_ct_STRIP fi else STRIP="$ac_cv_prog_STRIP" fi test -z "$STRIP" && STRIP=: if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args. set dummy ${ac_tool_prefix}ranlib; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_RANLIB+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$RANLIB"; then ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi RANLIB=$ac_cv_prog_RANLIB if test -n "$RANLIB"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5 $as_echo "$RANLIB" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_RANLIB"; then ac_ct_RANLIB=$RANLIB # Extract the first word of "ranlib", so it can be a program name with args. set dummy ranlib; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_RANLIB+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_RANLIB"; then ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_RANLIB="ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB if test -n "$ac_ct_RANLIB"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5 $as_echo "$ac_ct_RANLIB" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_RANLIB" = x; then RANLIB=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac RANLIB=$ac_ct_RANLIB fi else RANLIB="$ac_cv_prog_RANLIB" fi test -z "$RANLIB" && RANLIB=: # Determine commands to create old-style static archives. old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs' old_postinstall_cmds='chmod 644 $oldlib' old_postuninstall_cmds= if test -n "$RANLIB"; then case $host_os in openbsd*) old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib" ;; *) old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$tool_oldlib" ;; esac old_archive_cmds="$old_archive_cmds~\$RANLIB \$tool_oldlib" fi case $host_os in darwin*) lock_old_archive_extraction=yes ;; *) lock_old_archive_extraction=no ;; esac # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # If no C compiler flags were specified, use CFLAGS. LTCFLAGS=${LTCFLAGS-"$CFLAGS"} # Allow CC to be a program name with arguments. compiler=$CC # Check for command to grab the raw symbol name followed by C symbol from nm. { $as_echo "$as_me:${as_lineno-$LINENO}: checking command to parse $NM output from $compiler object" >&5 $as_echo_n "checking command to parse $NM output from $compiler object... " >&6; } if ${lt_cv_sys_global_symbol_pipe+:} false; then : $as_echo_n "(cached) " >&6 else # These are sane defaults that work on at least a few old systems. # [They come from Ultrix. What could be older than Ultrix?!! ;)] # Character class describing NM global symbol codes. symcode='[BCDEGRST]' # Regexp to match symbols that can be accessed directly from C. sympat='\([_A-Za-z][_A-Za-z0-9]*\)' # Define system-specific variables. case $host_os in aix*) symcode='[BCDT]' ;; cygwin* | mingw* | pw32* | cegcc*) symcode='[ABCDGISTW]' ;; hpux*) if test "$host_cpu" = ia64; then symcode='[ABCDEGRST]' fi ;; irix* | nonstopux*) symcode='[BCDEGRST]' ;; osf*) symcode='[BCDEGQRST]' ;; solaris*) symcode='[BDRT]' ;; sco3.2v5*) symcode='[DT]' ;; sysv4.2uw2*) symcode='[DT]' ;; sysv5* | sco5v6* | unixware* | OpenUNIX*) symcode='[ABDT]' ;; sysv4) symcode='[DFNSTU]' ;; esac # If we're using GNU nm, then use its standard symbol codes. case `$NM -V 2>&1` in *GNU* | *'with BFD'*) symcode='[ABCDGIRSTW]' ;; esac # Transform an extracted symbol line into a proper C declaration. # Some systems (esp. on ia64) link data and code symbols differently, # so use this general approach. lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" # Transform an extracted symbol line into symbol name and symbol address lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" # Handle CRLF in mingw tool chain opt_cr= case $build_os in mingw*) opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp ;; esac # Try without a prefix underscore, then with it. for ac_symprfx in "" "_"; do # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol. symxfrm="\\1 $ac_symprfx\\2 \\2" # Write the raw and C identifiers. if test "$lt_cv_nm_interface" = "MS dumpbin"; then # Fake it for dumpbin and say T for any non-static function # and D for any global variable. # Also find C++ and __fastcall symbols from MSVC++, # which start with @ or ?. lt_cv_sys_global_symbol_pipe="$AWK '"\ " {last_section=section; section=\$ 3};"\ " /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\ " /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\ " \$ 0!~/External *\|/{next};"\ " / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\ " {if(hide[section]) next};"\ " {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\ " {split(\$ 0, a, /\||\r/); split(a[2], s)};"\ " s[1]~/^[@?]/{print s[1], s[1]; next};"\ " s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\ " ' prfx=^$ac_symprfx" else lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" fi lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" # Check to see that the pipe works correctly. pipe_works=no rm -f conftest* cat > conftest.$ac_ext <<_LT_EOF #ifdef __cplusplus extern "C" { #endif char nm_test_var; void nm_test_func(void); void nm_test_func(void){} #ifdef __cplusplus } #endif int main(){nm_test_var='a';nm_test_func();return(0);} _LT_EOF if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then # Now try to grab the symbols. nlist=conftest.nm if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist\""; } >&5 (eval $NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && test -s "$nlist"; then # Try sorting and uniquifying the output. if sort "$nlist" | uniq > "$nlist"T; then mv -f "$nlist"T "$nlist" else rm -f "$nlist"T fi # Make sure that we snagged all the symbols we need. if $GREP ' nm_test_var$' "$nlist" >/dev/null; then if $GREP ' nm_test_func$' "$nlist" >/dev/null; then cat <<_LT_EOF > conftest.$ac_ext /* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ #if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) /* DATA imports from DLLs on WIN32 con't be const, because runtime relocations are performed -- see ld's documentation on pseudo-relocs. */ # define LT_DLSYM_CONST #elif defined(__osf__) /* This system does not cope well with relocations in const data. */ # define LT_DLSYM_CONST #else # define LT_DLSYM_CONST const #endif #ifdef __cplusplus extern "C" { #endif _LT_EOF # Now generate the symbol file. eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext' cat <<_LT_EOF >> conftest.$ac_ext /* The mapping between symbol names and symbols. */ LT_DLSYM_CONST struct { const char *name; void *address; } lt__PROGRAM__LTX_preloaded_symbols[] = { { "@PROGRAM@", (void *) 0 }, _LT_EOF $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/ {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext cat <<\_LT_EOF >> conftest.$ac_ext {0, (void *) 0} }; /* This works around a problem in FreeBSD linker */ #ifdef FREEBSD_WORKAROUND static const void *lt_preloaded_setup() { return lt__PROGRAM__LTX_preloaded_symbols; } #endif #ifdef __cplusplus } #endif _LT_EOF # Now try linking the two files. mv conftest.$ac_objext conftstm.$ac_objext lt_globsym_save_LIBS=$LIBS lt_globsym_save_CFLAGS=$CFLAGS LIBS="conftstm.$ac_objext" CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 (eval $ac_link) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && test -s conftest${ac_exeext}; then pipe_works=yes fi LIBS=$lt_globsym_save_LIBS CFLAGS=$lt_globsym_save_CFLAGS else echo "cannot find nm_test_func in $nlist" >&5 fi else echo "cannot find nm_test_var in $nlist" >&5 fi else echo "cannot run $lt_cv_sys_global_symbol_pipe" >&5 fi else echo "$progname: failed program was:" >&5 cat conftest.$ac_ext >&5 fi rm -rf conftest* conftst* # Do not use the global_symbol_pipe unless it works. if test "$pipe_works" = yes; then break else lt_cv_sys_global_symbol_pipe= fi done fi if test -z "$lt_cv_sys_global_symbol_pipe"; then lt_cv_sys_global_symbol_to_cdecl= fi if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed" >&5 $as_echo "failed" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5 $as_echo "ok" >&6; } fi # Response file support. if test "$lt_cv_nm_interface" = "MS dumpbin"; then nm_file_list_spec='@' elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then nm_file_list_spec='@' fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 $as_echo_n "checking for sysroot... " >&6; } # Check whether --with-sysroot was given. if test "${with_sysroot+set}" = set; then : withval=$with_sysroot; else with_sysroot=no fi lt_sysroot= case ${with_sysroot} in #( yes) if test "$GCC" = yes; then lt_sysroot=`$CC --print-sysroot 2>/dev/null` fi ;; #( /*) lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"` ;; #( no|'') ;; #( *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_sysroot}" >&5 $as_echo "${with_sysroot}" >&6; } as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5 ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 $as_echo "${lt_sysroot:-no}" >&6; } # Check whether --enable-libtool-lock was given. if test "${enable_libtool_lock+set}" = set; then : enableval=$enable_libtool_lock; fi test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes # Some flags need to be propagated to the compiler or linker for good # libtool support. case $host in ia64-*-hpux*) # Find out which ABI we are using. echo 'int i;' > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then case `/usr/bin/file conftest.$ac_objext` in *ELF-32*) HPUX_IA64_MODE="32" ;; *ELF-64*) HPUX_IA64_MODE="64" ;; esac fi rm -rf conftest* ;; *-*-irix6*) # Find out which ABI we are using. echo '#line '$LINENO' "configure"' > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then if test "$lt_cv_prog_gnu_ld" = yes; then case `/usr/bin/file conftest.$ac_objext` in *32-bit*) LD="${LD-ld} -melf32bsmip" ;; *N32*) LD="${LD-ld} -melf32bmipn32" ;; *64-bit*) LD="${LD-ld} -melf64bmip" ;; esac else case `/usr/bin/file conftest.$ac_objext` in *32-bit*) LD="${LD-ld} -32" ;; *N32*) LD="${LD-ld} -n32" ;; *64-bit*) LD="${LD-ld} -64" ;; esac fi fi rm -rf conftest* ;; x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \ s390*-*linux*|s390*-*tpf*|sparc*-*linux*) # Find out which ABI we are using. echo 'int i;' > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then case `/usr/bin/file conftest.o` in *32-bit*) case $host in x86_64-*kfreebsd*-gnu) LD="${LD-ld} -m elf_i386_fbsd" ;; x86_64-*linux*) LD="${LD-ld} -m elf_i386" ;; ppc64-*linux*|powerpc64-*linux*) LD="${LD-ld} -m elf32ppclinux" ;; s390x-*linux*) LD="${LD-ld} -m elf_s390" ;; sparc64-*linux*) LD="${LD-ld} -m elf32_sparc" ;; esac ;; *64-bit*) case $host in x86_64-*kfreebsd*-gnu) LD="${LD-ld} -m elf_x86_64_fbsd" ;; x86_64-*linux*) LD="${LD-ld} -m elf_x86_64" ;; ppc*-*linux*|powerpc*-*linux*) LD="${LD-ld} -m elf64ppc" ;; s390*-*linux*|s390*-*tpf*) LD="${LD-ld} -m elf64_s390" ;; sparc*-*linux*) LD="${LD-ld} -m elf64_sparc" ;; esac ;; esac fi rm -rf conftest* ;; *-*-sco3.2v5*) # On SCO OpenServer 5, we need -belf to get full-featured binaries. SAVE_CFLAGS="$CFLAGS" CFLAGS="$CFLAGS -belf" { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler needs -belf" >&5 $as_echo_n "checking whether the C compiler needs -belf... " >&6; } if ${lt_cv_cc_needs_belf+:} false; then : $as_echo_n "(cached) " >&6 else ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : lt_cv_cc_needs_belf=yes else lt_cv_cc_needs_belf=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5 $as_echo "$lt_cv_cc_needs_belf" >&6; } if test x"$lt_cv_cc_needs_belf" != x"yes"; then # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf CFLAGS="$SAVE_CFLAGS" fi ;; *-*solaris*) # Find out which ABI we are using. echo 'int i;' > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then case `/usr/bin/file conftest.o` in *64-bit*) case $lt_cv_prog_gnu_ld in yes*) case $host in i?86-*-solaris*) LD="${LD-ld} -m elf_x86_64" ;; sparc*-*-solaris*) LD="${LD-ld} -m elf64_sparc" ;; esac # GNU ld 2.21 introduced _sol2 emulations. Use them if available. if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then LD="${LD-ld}_sol2" fi ;; *) if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then LD="${LD-ld} -64" fi ;; esac ;; esac fi rm -rf conftest* ;; esac need_locks="$enable_libtool_lock" if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. set dummy ${ac_tool_prefix}mt; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_MANIFEST_TOOL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$MANIFEST_TOOL"; then ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL if test -n "$MANIFEST_TOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 $as_echo "$MANIFEST_TOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_MANIFEST_TOOL"; then ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL # Extract the first word of "mt", so it can be a program name with args. set dummy mt; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_MANIFEST_TOOL"; then ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL if test -n "$ac_ct_MANIFEST_TOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 $as_echo "$ac_ct_MANIFEST_TOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_MANIFEST_TOOL" = x; then MANIFEST_TOOL=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL fi else MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" fi test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 $as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } if ${lt_cv_path_mainfest_tool+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_path_mainfest_tool=no echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out cat conftest.err >&5 if $GREP 'Manifest Tool' conftest.out > /dev/null; then lt_cv_path_mainfest_tool=yes fi rm -f conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 $as_echo "$lt_cv_path_mainfest_tool" >&6; } if test "x$lt_cv_path_mainfest_tool" != xyes; then MANIFEST_TOOL=: fi case $host_os in rhapsody* | darwin*) if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}dsymutil", so it can be a program name with args. set dummy ${ac_tool_prefix}dsymutil; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_DSYMUTIL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$DSYMUTIL"; then ac_cv_prog_DSYMUTIL="$DSYMUTIL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_DSYMUTIL="${ac_tool_prefix}dsymutil" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi DSYMUTIL=$ac_cv_prog_DSYMUTIL if test -n "$DSYMUTIL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DSYMUTIL" >&5 $as_echo "$DSYMUTIL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_DSYMUTIL"; then ac_ct_DSYMUTIL=$DSYMUTIL # Extract the first word of "dsymutil", so it can be a program name with args. set dummy dsymutil; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_DSYMUTIL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_DSYMUTIL"; then ac_cv_prog_ac_ct_DSYMUTIL="$ac_ct_DSYMUTIL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_DSYMUTIL="dsymutil" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_DSYMUTIL=$ac_cv_prog_ac_ct_DSYMUTIL if test -n "$ac_ct_DSYMUTIL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DSYMUTIL" >&5 $as_echo "$ac_ct_DSYMUTIL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_DSYMUTIL" = x; then DSYMUTIL=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac DSYMUTIL=$ac_ct_DSYMUTIL fi else DSYMUTIL="$ac_cv_prog_DSYMUTIL" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}nmedit", so it can be a program name with args. set dummy ${ac_tool_prefix}nmedit; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_NMEDIT+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$NMEDIT"; then ac_cv_prog_NMEDIT="$NMEDIT" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_NMEDIT="${ac_tool_prefix}nmedit" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi NMEDIT=$ac_cv_prog_NMEDIT if test -n "$NMEDIT"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $NMEDIT" >&5 $as_echo "$NMEDIT" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_NMEDIT"; then ac_ct_NMEDIT=$NMEDIT # Extract the first word of "nmedit", so it can be a program name with args. set dummy nmedit; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_NMEDIT+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_NMEDIT"; then ac_cv_prog_ac_ct_NMEDIT="$ac_ct_NMEDIT" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_NMEDIT="nmedit" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_NMEDIT=$ac_cv_prog_ac_ct_NMEDIT if test -n "$ac_ct_NMEDIT"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_NMEDIT" >&5 $as_echo "$ac_ct_NMEDIT" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_NMEDIT" = x; then NMEDIT=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac NMEDIT=$ac_ct_NMEDIT fi else NMEDIT="$ac_cv_prog_NMEDIT" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}lipo", so it can be a program name with args. set dummy ${ac_tool_prefix}lipo; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_LIPO+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$LIPO"; then ac_cv_prog_LIPO="$LIPO" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_LIPO="${ac_tool_prefix}lipo" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi LIPO=$ac_cv_prog_LIPO if test -n "$LIPO"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIPO" >&5 $as_echo "$LIPO" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_LIPO"; then ac_ct_LIPO=$LIPO # Extract the first word of "lipo", so it can be a program name with args. set dummy lipo; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_LIPO+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_LIPO"; then ac_cv_prog_ac_ct_LIPO="$ac_ct_LIPO" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_LIPO="lipo" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_LIPO=$ac_cv_prog_ac_ct_LIPO if test -n "$ac_ct_LIPO"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_LIPO" >&5 $as_echo "$ac_ct_LIPO" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_LIPO" = x; then LIPO=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac LIPO=$ac_ct_LIPO fi else LIPO="$ac_cv_prog_LIPO" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}otool", so it can be a program name with args. set dummy ${ac_tool_prefix}otool; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_OTOOL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$OTOOL"; then ac_cv_prog_OTOOL="$OTOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_OTOOL="${ac_tool_prefix}otool" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi OTOOL=$ac_cv_prog_OTOOL if test -n "$OTOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL" >&5 $as_echo "$OTOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_OTOOL"; then ac_ct_OTOOL=$OTOOL # Extract the first word of "otool", so it can be a program name with args. set dummy otool; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_OTOOL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_OTOOL"; then ac_cv_prog_ac_ct_OTOOL="$ac_ct_OTOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_OTOOL="otool" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_OTOOL=$ac_cv_prog_ac_ct_OTOOL if test -n "$ac_ct_OTOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL" >&5 $as_echo "$ac_ct_OTOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_OTOOL" = x; then OTOOL=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac OTOOL=$ac_ct_OTOOL fi else OTOOL="$ac_cv_prog_OTOOL" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}otool64", so it can be a program name with args. set dummy ${ac_tool_prefix}otool64; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_OTOOL64+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$OTOOL64"; then ac_cv_prog_OTOOL64="$OTOOL64" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_OTOOL64="${ac_tool_prefix}otool64" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi OTOOL64=$ac_cv_prog_OTOOL64 if test -n "$OTOOL64"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL64" >&5 $as_echo "$OTOOL64" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_OTOOL64"; then ac_ct_OTOOL64=$OTOOL64 # Extract the first word of "otool64", so it can be a program name with args. set dummy otool64; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_OTOOL64+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_OTOOL64"; then ac_cv_prog_ac_ct_OTOOL64="$ac_ct_OTOOL64" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_OTOOL64="otool64" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_OTOOL64=$ac_cv_prog_ac_ct_OTOOL64 if test -n "$ac_ct_OTOOL64"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL64" >&5 $as_echo "$ac_ct_OTOOL64" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_OTOOL64" = x; then OTOOL64=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac OTOOL64=$ac_ct_OTOOL64 fi else OTOOL64="$ac_cv_prog_OTOOL64" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -single_module linker flag" >&5 $as_echo_n "checking for -single_module linker flag... " >&6; } if ${lt_cv_apple_cc_single_mod+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_apple_cc_single_mod=no if test -z "${LT_MULTI_MODULE}"; then # By default we will add the -single_module flag. You can override # by either setting the environment variable LT_MULTI_MODULE # non-empty at configure time, or by adding -multi_module to the # link flags. rm -rf libconftest.dylib* echo "int foo(void){return 1;}" > conftest.c echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ -dynamiclib -Wl,-single_module conftest.c" >&5 $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ -dynamiclib -Wl,-single_module conftest.c 2>conftest.err _lt_result=$? # If there is a non-empty error log, and "single_module" # appears in it, assume the flag caused a linker warning if test -s conftest.err && $GREP single_module conftest.err; then cat conftest.err >&5 # Otherwise, if the output was created with a 0 exit code from # the compiler, it worked. elif test -f libconftest.dylib && test $_lt_result -eq 0; then lt_cv_apple_cc_single_mod=yes else cat conftest.err >&5 fi rm -rf libconftest.dylib* rm -f conftest.* fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_apple_cc_single_mod" >&5 $as_echo "$lt_cv_apple_cc_single_mod" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -exported_symbols_list linker flag" >&5 $as_echo_n "checking for -exported_symbols_list linker flag... " >&6; } if ${lt_cv_ld_exported_symbols_list+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_ld_exported_symbols_list=no save_LDFLAGS=$LDFLAGS echo "_main" > conftest.sym LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : lt_cv_ld_exported_symbols_list=yes else lt_cv_ld_exported_symbols_list=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LDFLAGS="$save_LDFLAGS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_exported_symbols_list" >&5 $as_echo "$lt_cv_ld_exported_symbols_list" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -force_load linker flag" >&5 $as_echo_n "checking for -force_load linker flag... " >&6; } if ${lt_cv_ld_force_load+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_ld_force_load=no cat > conftest.c << _LT_EOF int forced_loaded() { return 2;} _LT_EOF echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&5 $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 echo "$AR cru libconftest.a conftest.o" >&5 $AR cru libconftest.a conftest.o 2>&5 echo "$RANLIB libconftest.a" >&5 $RANLIB libconftest.a 2>&5 cat > conftest.c << _LT_EOF int main() { return 0;} _LT_EOF echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&5 $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err _lt_result=$? if test -s conftest.err && $GREP force_load conftest.err; then cat conftest.err >&5 elif test -f conftest && test $_lt_result -eq 0 && $GREP forced_load conftest >/dev/null 2>&1 ; then lt_cv_ld_force_load=yes else cat conftest.err >&5 fi rm -f conftest.err libconftest.a conftest conftest.c rm -rf conftest.dSYM fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_force_load" >&5 $as_echo "$lt_cv_ld_force_load" >&6; } case $host_os in rhapsody* | darwin1.[012]) _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;; darwin1.*) _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; darwin*) # darwin 5.x on # if running on 10.5 or later, the deployment target defaults # to the OS version, if on x86, and 10.4, the deployment # target defaults to 10.4. Don't you love it? case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in 10.0,*86*-darwin8*|10.0,*-darwin[91]*) _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; 10.[012]*) _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; 10.*) _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; esac ;; esac if test "$lt_cv_apple_cc_single_mod" = "yes"; then _lt_dar_single_mod='$single_module' fi if test "$lt_cv_ld_exported_symbols_list" = "yes"; then _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym' else _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}' fi if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then _lt_dsymutil='~$DSYMUTIL $lib || :' else _lt_dsymutil= fi ;; esac for ac_header in dlfcn.h do : ac_fn_c_check_header_compile "$LINENO" "dlfcn.h" "ac_cv_header_dlfcn_h" "$ac_includes_default " if test "x$ac_cv_header_dlfcn_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_DLFCN_H 1 _ACEOF fi done func_stripname_cnf () { case ${2} in .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; esac } # func_stripname_cnf # Set options enable_dlopen=no enable_win32_dll=no # Check whether --enable-shared was given. if test "${enable_shared+set}" = set; then : enableval=$enable_shared; p=${PACKAGE-default} case $enableval in yes) enable_shared=yes ;; no) enable_shared=no ;; *) enable_shared=no # Look at the argument we got. We use all the common list separators. lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," for pkg in $enableval; do IFS="$lt_save_ifs" if test "X$pkg" = "X$p"; then enable_shared=yes fi done IFS="$lt_save_ifs" ;; esac else enable_shared=yes fi # Check whether --with-pic was given. if test "${with_pic+set}" = set; then : withval=$with_pic; lt_p=${PACKAGE-default} case $withval in yes|no) pic_mode=$withval ;; *) pic_mode=default # Look at the argument we got. We use all the common list separators. lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," for lt_pkg in $withval; do IFS="$lt_save_ifs" if test "X$lt_pkg" = "X$lt_p"; then pic_mode=yes fi done IFS="$lt_save_ifs" ;; esac else pic_mode=default fi test -z "$pic_mode" && pic_mode=default # Check whether --enable-fast-install was given. if test "${enable_fast_install+set}" = set; then : enableval=$enable_fast_install; p=${PACKAGE-default} case $enableval in yes) enable_fast_install=yes ;; no) enable_fast_install=no ;; *) enable_fast_install=no # Look at the argument we got. We use all the common list separators. lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," for pkg in $enableval; do IFS="$lt_save_ifs" if test "X$pkg" = "X$p"; then enable_fast_install=yes fi done IFS="$lt_save_ifs" ;; esac else enable_fast_install=yes fi # This can be used to rebuild libtool when needed LIBTOOL_DEPS="$ltmain" # Always use our own libtool. LIBTOOL='$(SHELL) $(top_builddir)/libtool' test -z "$LN_S" && LN_S="ln -s" if test -n "${ZSH_VERSION+set}" ; then setopt NO_GLOB_SUBST fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for objdir" >&5 $as_echo_n "checking for objdir... " >&6; } if ${lt_cv_objdir+:} false; then : $as_echo_n "(cached) " >&6 else rm -f .libs 2>/dev/null mkdir .libs 2>/dev/null if test -d .libs; then lt_cv_objdir=.libs else # MS-DOS does not allow filenames that begin with a dot. lt_cv_objdir=_libs fi rmdir .libs 2>/dev/null fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_objdir" >&5 $as_echo "$lt_cv_objdir" >&6; } objdir=$lt_cv_objdir cat >>confdefs.h <<_ACEOF #define LT_OBJDIR "$lt_cv_objdir/" _ACEOF case $host_os in aix3*) # AIX sometimes has problems with the GCC collect2 program. For some # reason, if we set the COLLECT_NAMES environment variable, the problems # vanish in a puff of smoke. if test "X${COLLECT_NAMES+set}" != Xset; then COLLECT_NAMES= export COLLECT_NAMES fi ;; esac # Global variables: ofile=libtool can_build_shared=yes # All known linkers require a `.a' archive for static linking (except MSVC, # which needs '.lib'). libext=a with_gnu_ld="$lt_cv_prog_gnu_ld" old_CC="$CC" old_CFLAGS="$CFLAGS" # Set sane defaults for various variables test -z "$CC" && CC=cc test -z "$LTCC" && LTCC=$CC test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS test -z "$LD" && LD=ld test -z "$ac_objext" && ac_objext=o for cc_temp in $compiler""; do case $cc_temp in compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; \-*) ;; *) break;; esac done cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` # Only perform the check for file, if the check method requires it test -z "$MAGIC_CMD" && MAGIC_CMD=file case $deplibs_check_method in file_magic*) if test "$file_magic_cmd" = '$MAGIC_CMD'; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ${ac_tool_prefix}file" >&5 $as_echo_n "checking for ${ac_tool_prefix}file... " >&6; } if ${lt_cv_path_MAGIC_CMD+:} false; then : $as_echo_n "(cached) " >&6 else case $MAGIC_CMD in [\\/*] | ?:[\\/]*) lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. ;; *) lt_save_MAGIC_CMD="$MAGIC_CMD" lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" for ac_dir in $ac_dummy; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. if test -f $ac_dir/${ac_tool_prefix}file; then lt_cv_path_MAGIC_CMD="$ac_dir/${ac_tool_prefix}file" if test -n "$file_magic_test_file"; then case $deplibs_check_method in "file_magic "*) file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` MAGIC_CMD="$lt_cv_path_MAGIC_CMD" if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | $EGREP "$file_magic_regex" > /dev/null; then : else cat <<_LT_EOF 1>&2 *** Warning: the command libtool uses to detect shared libraries, *** $file_magic_cmd, produces output that libtool cannot recognize. *** The result is that libtool may fail to recognize shared libraries *** as such. This will affect the creation of libtool libraries that *** depend on shared libraries, but programs linked with such libtool *** libraries will work regardless of this problem. Nevertheless, you *** may want to report the problem to your system manager and/or to *** bug-libtool@gnu.org _LT_EOF fi ;; esac fi break fi done IFS="$lt_save_ifs" MAGIC_CMD="$lt_save_MAGIC_CMD" ;; esac fi MAGIC_CMD="$lt_cv_path_MAGIC_CMD" if test -n "$MAGIC_CMD"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 $as_echo "$MAGIC_CMD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test -z "$lt_cv_path_MAGIC_CMD"; then if test -n "$ac_tool_prefix"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for file" >&5 $as_echo_n "checking for file... " >&6; } if ${lt_cv_path_MAGIC_CMD+:} false; then : $as_echo_n "(cached) " >&6 else case $MAGIC_CMD in [\\/*] | ?:[\\/]*) lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. ;; *) lt_save_MAGIC_CMD="$MAGIC_CMD" lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" for ac_dir in $ac_dummy; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. if test -f $ac_dir/file; then lt_cv_path_MAGIC_CMD="$ac_dir/file" if test -n "$file_magic_test_file"; then case $deplibs_check_method in "file_magic "*) file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` MAGIC_CMD="$lt_cv_path_MAGIC_CMD" if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | $EGREP "$file_magic_regex" > /dev/null; then : else cat <<_LT_EOF 1>&2 *** Warning: the command libtool uses to detect shared libraries, *** $file_magic_cmd, produces output that libtool cannot recognize. *** The result is that libtool may fail to recognize shared libraries *** as such. This will affect the creation of libtool libraries that *** depend on shared libraries, but programs linked with such libtool *** libraries will work regardless of this problem. Nevertheless, you *** may want to report the problem to your system manager and/or to *** bug-libtool@gnu.org _LT_EOF fi ;; esac fi break fi done IFS="$lt_save_ifs" MAGIC_CMD="$lt_save_MAGIC_CMD" ;; esac fi MAGIC_CMD="$lt_cv_path_MAGIC_CMD" if test -n "$MAGIC_CMD"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 $as_echo "$MAGIC_CMD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi else MAGIC_CMD=: fi fi fi ;; esac # Use C for the default configuration in the libtool script lt_save_CC="$CC" ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu # Source file extension for C test sources. ac_ext=c # Object file extension for compiled C test sources. objext=o objext=$objext # Code to be used in simple compile tests lt_simple_compile_test_code="int some_variable = 0;" # Code to be used in simple link tests lt_simple_link_test_code='int main(){return(0);}' # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # If no C compiler flags were specified, use CFLAGS. LTCFLAGS=${LTCFLAGS-"$CFLAGS"} # Allow CC to be a program name with arguments. compiler=$CC # Save the default compiler, since it gets overwritten when the other # tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP. compiler_DEFAULT=$CC # save warnings/boilerplate of simple test code ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" >conftest.$ac_ext eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_compiler_boilerplate=`cat conftest.err` $RM conftest* ac_outfile=conftest.$ac_objext echo "$lt_simple_link_test_code" >conftest.$ac_ext eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_linker_boilerplate=`cat conftest.err` $RM -r conftest* ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... if test -n "$compiler"; then lt_prog_compiler_no_builtin_flag= if test "$GCC" = yes; then case $cc_basename in nvcc*) lt_prog_compiler_no_builtin_flag=' -Xcompiler -fno-builtin' ;; *) lt_prog_compiler_no_builtin_flag=' -fno-builtin' ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -fno-rtti -fno-exceptions" >&5 $as_echo_n "checking if $compiler supports -fno-rtti -fno-exceptions... " >&6; } if ${lt_cv_prog_compiler_rtti_exceptions+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_rtti_exceptions=no ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-fno-rtti -fno-exceptions" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings other than the usual output. $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_rtti_exceptions=yes fi fi $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_rtti_exceptions" >&5 $as_echo "$lt_cv_prog_compiler_rtti_exceptions" >&6; } if test x"$lt_cv_prog_compiler_rtti_exceptions" = xyes; then lt_prog_compiler_no_builtin_flag="$lt_prog_compiler_no_builtin_flag -fno-rtti -fno-exceptions" else : fi fi lt_prog_compiler_wl= lt_prog_compiler_pic= lt_prog_compiler_static= if test "$GCC" = yes; then lt_prog_compiler_wl='-Wl,' lt_prog_compiler_static='-static' case $host_os in aix*) # All AIX code is PIC. if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor lt_prog_compiler_static='-Bstatic' fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support lt_prog_compiler_pic='-fPIC' ;; m68k) # FIXME: we need at least 68020 code to build shared libraries, but # adding the `-m68020' flag to GCC prevents building anything better, # like `-m68040'. lt_prog_compiler_pic='-m68020 -resident32 -malways-restore-a4' ;; esac ;; beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | cygwin* | pw32* | os2* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). # Although the cygwin gcc ignores -fPIC, still need this for old-style # (--disable-auto-import) libraries lt_prog_compiler_pic='-DDLL_EXPORT' ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files lt_prog_compiler_pic='-fno-common' ;; haiku*) # PIC is the default for Haiku. # The "-static" flag exists, but is broken. lt_prog_compiler_static= ;; hpux*) # PIC is the default for 64-bit PA HP-UX, but not for 32-bit # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag # sets the default TLS model and affects inlining. case $host_cpu in hppa*64*) # +Z the default ;; *) lt_prog_compiler_pic='-fPIC' ;; esac ;; interix[3-9]*) # Interix 3.x gcc -fpic/-fPIC options generate broken code. # Instead, we relocate shared libraries at runtime. ;; msdosdjgpp*) # Just because we use GCC doesn't mean we suddenly get shared libraries # on systems that don't support them. lt_prog_compiler_can_build_shared=no enable_shared=no ;; *nto* | *qnx*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. lt_prog_compiler_pic='-fPIC -shared' ;; sysv4*MP*) if test -d /usr/nec; then lt_prog_compiler_pic=-Kconform_pic fi ;; *) lt_prog_compiler_pic='-fPIC' ;; esac case $cc_basename in nvcc*) # Cuda Compiler Driver 2.2 lt_prog_compiler_wl='-Xlinker ' if test -n "$lt_prog_compiler_pic"; then lt_prog_compiler_pic="-Xcompiler $lt_prog_compiler_pic" fi ;; esac else # PORTME Check for flag to pass linker flags through the system compiler. case $host_os in aix*) lt_prog_compiler_wl='-Wl,' if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor lt_prog_compiler_static='-Bstatic' else lt_prog_compiler_static='-bnso -bI:/lib/syscalls.exp' fi ;; mingw* | cygwin* | pw32* | os2* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). lt_prog_compiler_pic='-DDLL_EXPORT' ;; hpux9* | hpux10* | hpux11*) lt_prog_compiler_wl='-Wl,' # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but # not for PA HP-UX. case $host_cpu in hppa*64*|ia64*) # +Z the default ;; *) lt_prog_compiler_pic='+Z' ;; esac # Is there a better lt_prog_compiler_static that works with the bundled CC? lt_prog_compiler_static='${wl}-a ${wl}archive' ;; irix5* | irix6* | nonstopux*) lt_prog_compiler_wl='-Wl,' # PIC (with -KPIC) is the default. lt_prog_compiler_static='-non_shared' ;; linux* | k*bsd*-gnu | kopensolaris*-gnu) case $cc_basename in # old Intel for x86_64 which still supported -KPIC. ecc*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-static' ;; # icc used to be incompatible with GCC. # ICC 10 doesn't accept -KPIC any more. icc* | ifort*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-fPIC' lt_prog_compiler_static='-static' ;; # Lahey Fortran 8.1. lf95*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='--shared' lt_prog_compiler_static='--static' ;; nagfor*) # NAG Fortran compiler lt_prog_compiler_wl='-Wl,-Wl,,' lt_prog_compiler_pic='-PIC' lt_prog_compiler_static='-Bstatic' ;; pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) # Portland Group compilers (*not* the Pentium gcc compiler, # which looks to be a dead project) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-fpic' lt_prog_compiler_static='-Bstatic' ;; ccc*) lt_prog_compiler_wl='-Wl,' # All Alpha code is PIC. lt_prog_compiler_static='-non_shared' ;; xl* | bgxl* | bgf* | mpixl*) # IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-qpic' lt_prog_compiler_static='-qstaticlink' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [1-7].* | *Sun*Fortran*\ 8.[0-3]*) # Sun Fortran 8.3 passes all unrecognized flags to the linker lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' lt_prog_compiler_wl='' ;; *Sun\ F* | *Sun*Fortran*) lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' lt_prog_compiler_wl='-Qoption ld ' ;; *Sun\ C*) # Sun C 5.9 lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' lt_prog_compiler_wl='-Wl,' ;; *Intel*\ [CF]*Compiler*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-fPIC' lt_prog_compiler_static='-static' ;; *Portland\ Group*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-fpic' lt_prog_compiler_static='-Bstatic' ;; esac ;; esac ;; newsos6) lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' ;; *nto* | *qnx*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. lt_prog_compiler_pic='-fPIC -shared' ;; osf3* | osf4* | osf5*) lt_prog_compiler_wl='-Wl,' # All OSF/1 code is PIC. lt_prog_compiler_static='-non_shared' ;; rdos*) lt_prog_compiler_static='-non_shared' ;; solaris*) lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' case $cc_basename in f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) lt_prog_compiler_wl='-Qoption ld ';; *) lt_prog_compiler_wl='-Wl,';; esac ;; sunos4*) lt_prog_compiler_wl='-Qoption ld ' lt_prog_compiler_pic='-PIC' lt_prog_compiler_static='-Bstatic' ;; sysv4 | sysv4.2uw2* | sysv4.3*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' ;; sysv4*MP*) if test -d /usr/nec ;then lt_prog_compiler_pic='-Kconform_pic' lt_prog_compiler_static='-Bstatic' fi ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' ;; unicos*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_can_build_shared=no ;; uts4*) lt_prog_compiler_pic='-pic' lt_prog_compiler_static='-Bstatic' ;; *) lt_prog_compiler_can_build_shared=no ;; esac fi case $host_os in # For platforms which do not support PIC, -DPIC is meaningless: *djgpp*) lt_prog_compiler_pic= ;; *) lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 $as_echo_n "checking for $compiler option to produce PIC... " >&6; } if ${lt_cv_prog_compiler_pic+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_pic=$lt_prog_compiler_pic fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 $as_echo "$lt_cv_prog_compiler_pic" >&6; } lt_prog_compiler_pic=$lt_cv_prog_compiler_pic # # Check to make sure the PIC flag actually works. # if test -n "$lt_prog_compiler_pic"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic works" >&5 $as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic works... " >&6; } if ${lt_cv_prog_compiler_pic_works+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_pic_works=no ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="$lt_prog_compiler_pic -DPIC" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings other than the usual output. $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_pic_works=yes fi fi $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works" >&5 $as_echo "$lt_cv_prog_compiler_pic_works" >&6; } if test x"$lt_cv_prog_compiler_pic_works" = xyes; then case $lt_prog_compiler_pic in "" | " "*) ;; *) lt_prog_compiler_pic=" $lt_prog_compiler_pic" ;; esac else lt_prog_compiler_pic= lt_prog_compiler_can_build_shared=no fi fi # # Check to make sure the static flag actually works. # wl=$lt_prog_compiler_wl eval lt_tmp_static_flag=\"$lt_prog_compiler_static\" { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5 $as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; } if ${lt_cv_prog_compiler_static_works+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_static_works=no save_LDFLAGS="$LDFLAGS" LDFLAGS="$LDFLAGS $lt_tmp_static_flag" echo "$lt_simple_link_test_code" > conftest.$ac_ext if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then # The linker can only warn and ignore the option if not recognized # So say no if there are warnings if test -s conftest.err; then # Append any errors to the config.log. cat conftest.err 1>&5 $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_static_works=yes fi else lt_cv_prog_compiler_static_works=yes fi fi $RM -r conftest* LDFLAGS="$save_LDFLAGS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works" >&5 $as_echo "$lt_cv_prog_compiler_static_works" >&6; } if test x"$lt_cv_prog_compiler_static_works" = xyes; then : else lt_prog_compiler_static= fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 $as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } if ${lt_cv_prog_compiler_c_o+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then lt_cv_prog_compiler_c_o=yes fi fi chmod u+w . 2>&5 $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 $as_echo "$lt_cv_prog_compiler_c_o" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 $as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } if ${lt_cv_prog_compiler_c_o+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then lt_cv_prog_compiler_c_o=yes fi fi chmod u+w . 2>&5 $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 $as_echo "$lt_cv_prog_compiler_c_o" >&6; } hard_links="nottested" if test "$lt_cv_prog_compiler_c_o" = no && test "$need_locks" != no; then # do not overwrite the value of need_locks provided by the user { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 $as_echo_n "checking if we can lock with hard links... " >&6; } hard_links=yes $RM conftest* ln conftest.a conftest.b 2>/dev/null && hard_links=no touch conftest.a ln conftest.a conftest.b 2>&5 || hard_links=no ln conftest.a conftest.b 2>/dev/null && hard_links=no { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 $as_echo "$hard_links" >&6; } if test "$hard_links" = no; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 $as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} need_locks=warn fi else need_locks=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 $as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } runpath_var= allow_undefined_flag= always_export_symbols=no archive_cmds= archive_expsym_cmds= compiler_needs_object=no enable_shared_with_static_runtimes=no export_dynamic_flag_spec= export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' hardcode_automatic=no hardcode_direct=no hardcode_direct_absolute=no hardcode_libdir_flag_spec= hardcode_libdir_separator= hardcode_minus_L=no hardcode_shlibpath_var=unsupported inherit_rpath=no link_all_deplibs=unknown module_cmds= module_expsym_cmds= old_archive_from_new_cmds= old_archive_from_expsyms_cmds= thread_safe_flag_spec= whole_archive_flag_spec= # include_expsyms should be a list of space-separated symbols to be *always* # included in the symbol list include_expsyms= # exclude_expsyms can be an extended regexp of symbols to exclude # it will be wrapped by ` (' and `)$', so one must not match beginning or # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', # as well as any symbol that contains `d'. exclude_expsyms='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out # platforms (ab)use it in PIC code, but their linkers get confused if # the symbol is explicitly referenced. Since portable code cannot # rely on this symbol name, it's probably fine to never include it in # preloaded symbol tables. # Exclude shared library initialization/finalization symbols. extract_expsyms_cmds= case $host_os in cygwin* | mingw* | pw32* | cegcc*) # FIXME: the MSVC++ port hasn't been tested in a loooong time # When not using gcc, we currently assume that we are using # Microsoft Visual C++. if test "$GCC" != yes; then with_gnu_ld=no fi ;; interix*) # we just hope/assume this is gcc and not c89 (= MSVC++) with_gnu_ld=yes ;; openbsd*) with_gnu_ld=no ;; esac ld_shlibs=yes # On some targets, GNU ld is compatible enough with the native linker # that we're better off using the native interface for both. lt_use_gnu_ld_interface=no if test "$with_gnu_ld" = yes; then case $host_os in aix*) # The AIX port of GNU ld has always aspired to compatibility # with the native linker. However, as the warning in the GNU ld # block says, versions before 2.19.5* couldn't really create working # shared libraries, regardless of the interface used. case `$LD -v 2>&1` in *\ \(GNU\ Binutils\)\ 2.19.5*) ;; *\ \(GNU\ Binutils\)\ 2.[2-9]*) ;; *\ \(GNU\ Binutils\)\ [3-9]*) ;; *) lt_use_gnu_ld_interface=yes ;; esac ;; *) lt_use_gnu_ld_interface=yes ;; esac fi if test "$lt_use_gnu_ld_interface" = yes; then # If archive_cmds runs LD, not CC, wlarc should be empty wlarc='${wl}' # Set some defaults for GNU ld with shared library support. These # are reset later if shared libraries are not supported. Putting them # here allows them to be overridden if necessary. runpath_var=LD_RUN_PATH hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' export_dynamic_flag_spec='${wl}--export-dynamic' # ancient GNU ld didn't support --whole-archive et. al. if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then whole_archive_flag_spec="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' else whole_archive_flag_spec= fi supports_anon_versioning=no case `$LD -v 2>&1` in *GNU\ gold*) supports_anon_versioning=yes ;; *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11 *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... *\ 2.11.*) ;; # other 2.11 versions *) supports_anon_versioning=yes ;; esac # See if GNU ld supports shared libraries. case $host_os in aix[3-9]*) # On AIX/PPC, the GNU linker is very broken if test "$host_cpu" != ia64; then ld_shlibs=no cat <<_LT_EOF 1>&2 *** Warning: the GNU linker, at least up to release 2.19, is reported *** to be unable to reliably create shared libraries on AIX. *** Therefore, libtool is disabling shared libraries support. If you *** really care for shared libraries, you may want to install binutils *** 2.20 or above, or modify your PATH so that a non-GNU linker is found. *** You will then need to restart the configuration process. _LT_EOF fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds='' ;; m68k) archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes ;; esac ;; beos*) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then allow_undefined_flag=unsupported # Joseph Beckenbach says some releases of gcc # support --undefined. This deserves some investigation. FIXME archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' else ld_shlibs=no fi ;; cygwin* | mingw* | pw32* | cegcc*) # _LT_TAGVAR(hardcode_libdir_flag_spec, ) is actually meaningless, # as there is no search path for DLLs. hardcode_libdir_flag_spec='-L$libdir' export_dynamic_flag_spec='${wl}--export-all-symbols' allow_undefined_flag=unsupported always_export_symbols=no enable_shared_with_static_runtimes=yes export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' # If the export-symbols file already is a .def file (1st line # is EXPORTS), use it as is; otherwise, prepend... archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' else ld_shlibs=no fi ;; haiku*) archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' link_all_deplibs=yes ;; interix[3-9]*) hardcode_direct=no hardcode_shlibpath_var=no hardcode_libdir_flag_spec='${wl}-rpath,$libdir' export_dynamic_flag_spec='${wl}-E' # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. # Instead, shared libraries are loaded at an image base (0x10000000 by # default) and relocated if they conflict, which is a slow very memory # consuming and fragmenting process. To avoid this, we pick a random, # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link # time. Moving up from 0x10000000 also allows more sbrk(2) space. archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' archive_expsym_cmds='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' ;; gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) tmp_diet=no if test "$host_os" = linux-dietlibc; then case $cc_basename in diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) esac fi if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ && test "$tmp_diet" = no then tmp_addflag=' $pic_flag' tmp_sharedflag='-shared' case $cc_basename,$host_cpu in pgcc*) # Portland Group C compiler whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' tmp_addflag=' $pic_flag' ;; pgf77* | pgf90* | pgf95* | pgfortran*) # Portland Group f77 and f90 compilers whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' tmp_addflag=' $pic_flag -Mnomain' ;; ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 tmp_addflag=' -i_dynamic' ;; efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64 tmp_addflag=' -i_dynamic -nofor_main' ;; ifc* | ifort*) # Intel Fortran compiler tmp_addflag=' -nofor_main' ;; lf95*) # Lahey Fortran 8.1 whole_archive_flag_spec= tmp_sharedflag='--shared' ;; xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) tmp_sharedflag='-qmkshrobj' tmp_addflag= ;; nvcc*) # Cuda Compiler Driver 2.2 whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' compiler_needs_object=yes ;; esac case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C 5.9 whole_archive_flag_spec='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' compiler_needs_object=yes tmp_sharedflag='-G' ;; *Sun\ F*) # Sun Fortran 8.3 tmp_sharedflag='-G' ;; esac archive_cmds='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' if test "x$supports_anon_versioning" = xyes; then archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' fi case $cc_basename in xlf* | bgf* | bgxlf* | mpixlf*) # IBM XL Fortran 10.1 on PPC cannot create shared libs itself whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' if test "x$supports_anon_versioning" = xyes; then archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' fi ;; esac else ld_shlibs=no fi ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' wlarc= else archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' fi ;; solaris*) if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then ld_shlibs=no cat <<_LT_EOF 1>&2 *** Warning: The releases 2.8.* of the GNU linker cannot reliably *** create shared libraries on Solaris systems. Therefore, libtool *** is disabling shared libraries support. We urge you to upgrade GNU *** binutils to release 2.9.1 or newer. Another option is to modify *** your PATH or compiler configuration so that the native linker is *** used, and then restart. _LT_EOF elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs=no fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) case `$LD -v 2>&1` in *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*) ld_shlibs=no cat <<_LT_EOF 1>&2 *** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not *** reliably create shared libraries on SCO systems. Therefore, libtool *** is disabling shared libraries support. We urge you to upgrade GNU *** binutils to release 2.16.91.0.3 or newer. Another option is to modify *** your PATH or compiler configuration so that the native linker is *** used, and then restart. _LT_EOF ;; *) # For security reasons, it is highly recommended that you always # use absolute paths for naming shared libraries, and exclude the # DT_RUNPATH tag from executables and libraries. But doing so # requires that you compile everything twice, which is a pain. if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs=no fi ;; esac ;; sunos4*) archive_cmds='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' wlarc= hardcode_direct=yes hardcode_shlibpath_var=no ;; *) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs=no fi ;; esac if test "$ld_shlibs" = no; then runpath_var= hardcode_libdir_flag_spec= export_dynamic_flag_spec= whole_archive_flag_spec= fi else # PORTME fill in a description of your system's linker (not GNU ld) case $host_os in aix3*) allow_undefined_flag=unsupported always_export_symbols=yes archive_expsym_cmds='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' # Note: this linker hardcodes the directories in LIBPATH if there # are no directories specified by -L. hardcode_minus_L=yes if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then # Neither direct hardcoding nor static linking is supported with a # broken collect2. hardcode_direct=unsupported fi ;; aix[4-9]*) if test "$host_cpu" = ia64; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag="" else # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to AIX nm, but means don't demangle with GNU nm # Also, AIX nm treats weak defined symbols like other global # defined symbols, whereas GNU nm marks them as "W". if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' else export_symbols_cmds='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' fi aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # need to do runtime linking. case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) for ld_flag in $LDFLAGS; do if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then aix_use_runtimelinking=yes break fi done ;; esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. archive_cmds='' hardcode_direct=yes hardcode_direct_absolute=yes hardcode_libdir_separator=':' link_all_deplibs=yes file_list_spec='${wl}-f,' if test "$GCC" = yes; then case $host_os in aix4.[012]|aix4.[012].*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`${CC} -print-prog-name=collect2` if test -f "$collect2name" && strings "$collect2name" | $GREP resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 hardcode_direct=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking hardcode_minus_L=yes hardcode_libdir_flag_spec='-L$libdir' hardcode_libdir_separator= fi ;; esac shared_flag='-shared' if test "$aix_use_runtimelinking" = yes; then shared_flag="$shared_flag "'${wl}-G' fi else # not using gcc if test "$host_cpu" = ia64; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test "$aix_use_runtimelinking" = yes; then shared_flag='${wl}-G' else shared_flag='${wl}-bM:SRE' fi fi fi export_dynamic_flag_spec='${wl}-bexpall' # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to export. always_export_symbols=yes if test "$aix_use_runtimelinking" = yes; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. allow_undefined_flag='-berok' # Determine the default libpath from the value encoded in an # empty executable. if test "${lt_cv_aix_libpath+set}" = set; then aix_libpath=$lt_cv_aix_libpath else if ${lt_cv_aix_libpath_+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : lt_aix_libpath_sed=' /Import File Strings/,/^$/ { /^0/ { s/^0 *\([^ ]*\) *$/\1/ p } }' lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$lt_cv_aix_libpath_"; then lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$lt_cv_aix_libpath_"; then lt_cv_aix_libpath_="/usr/lib:/lib" fi fi aix_libpath=$lt_cv_aix_libpath_ fi hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" else if test "$host_cpu" = ia64; then hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib' allow_undefined_flag="-z nodefs" archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an # empty executable. if test "${lt_cv_aix_libpath+set}" = set; then aix_libpath=$lt_cv_aix_libpath else if ${lt_cv_aix_libpath_+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : lt_aix_libpath_sed=' /Import File Strings/,/^$/ { /^0/ { s/^0 *\([^ ]*\) *$/\1/ p } }' lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$lt_cv_aix_libpath_"; then lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$lt_cv_aix_libpath_"; then lt_cv_aix_libpath_="/usr/lib:/lib" fi fi aix_libpath=$lt_cv_aix_libpath_ fi hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. no_undefined_flag=' ${wl}-bernotok' allow_undefined_flag=' ${wl}-berok' if test "$with_gnu_ld" = yes; then # We only use this code for GNU lds that support --whole-archive. whole_archive_flag_spec='${wl}--whole-archive$convenience ${wl}--no-whole-archive' else # Exported symbols can be pulled into shared objects from archives whole_archive_flag_spec='$convenience' fi archive_cmds_need_lc=yes # This is similar to how AIX traditionally builds its shared libraries. archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' fi fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds='' ;; m68k) archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes ;; esac ;; bsdi[45]*) export_dynamic_flag_spec=-rdynamic ;; cygwin* | mingw* | pw32* | cegcc*) # When not using gcc, we currently assume that we are using # Microsoft Visual C++. # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. case $cc_basename in cl*) # Native MSVC hardcode_libdir_flag_spec=' ' allow_undefined_flag=unsupported always_export_symbols=yes file_list_spec='@' # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=".dll" # FIXME: Setting linknames here is a bad hack. archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; else sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; fi~ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ linknames=' # The linker will not automatically build a static lib if we build a DLL. # _LT_TAGVAR(old_archive_from_new_cmds, )='true' enable_shared_with_static_runtimes=yes exclude_expsyms='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*' export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' # Don't use ranlib old_postinstall_cmds='chmod 644 $oldlib' postlink_cmds='lt_outputfile="@OUTPUT@"~ lt_tool_outputfile="@TOOL_OUTPUT@"~ case $lt_outputfile in *.exe|*.EXE) ;; *) lt_outputfile="$lt_outputfile.exe" lt_tool_outputfile="$lt_tool_outputfile.exe" ;; esac~ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; $RM "$lt_outputfile.manifest"; fi' ;; *) # Assume MSVC wrapper hardcode_libdir_flag_spec=' ' allow_undefined_flag=unsupported # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=".dll" # FIXME: Setting linknames here is a bad hack. archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' # The linker will automatically build a .lib file if we build a DLL. old_archive_from_new_cmds='true' # FIXME: Should let the user specify the lib program. old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' enable_shared_with_static_runtimes=yes ;; esac ;; darwin* | rhapsody*) archive_cmds_need_lc=no hardcode_direct=no hardcode_automatic=yes hardcode_shlibpath_var=unsupported if test "$lt_cv_ld_force_load" = "yes"; then whole_archive_flag_spec='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' else whole_archive_flag_spec='' fi link_all_deplibs=yes allow_undefined_flag="$_lt_dar_allow_undefined" case $cc_basename in ifort*) _lt_dar_can_shared=yes ;; *) _lt_dar_can_shared=$GCC ;; esac if test "$_lt_dar_can_shared" = "yes"; then output_verbose_link_cmd=func_echo_all archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" archive_expsym_cmds="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" module_expsym_cmds="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" else ld_shlibs=no fi ;; dgux*) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_libdir_flag_spec='-L$libdir' hardcode_shlibpath_var=no ;; # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor # support. Future versions do this automatically, but an explicit c++rt0.o # does not break anything, and helps significantly (at the cost of a little # extra space). freebsd2.2*) archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes hardcode_shlibpath_var=no ;; # Unfortunately, older versions of FreeBSD 2 do not have this feature. freebsd2.*) archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=yes hardcode_minus_L=yes hardcode_shlibpath_var=no ;; # FreeBSD 3 and greater uses gcc -shared to do shared libraries. freebsd* | dragonfly*) archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes hardcode_shlibpath_var=no ;; hpux9*) if test "$GCC" = yes; then archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' else archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' fi hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_separator=: hardcode_direct=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes export_dynamic_flag_spec='${wl}-E' ;; hpux10*) if test "$GCC" = yes && test "$with_gnu_ld" = no; then archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' fi if test "$with_gnu_ld" = no; then hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_separator=: hardcode_direct=yes hardcode_direct_absolute=yes export_dynamic_flag_spec='${wl}-E' # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes fi ;; hpux11*) if test "$GCC" = yes && test "$with_gnu_ld" = no; then case $host_cpu in hppa*64*) archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ;; ia64*) archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ;; *) archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ;; esac else case $host_cpu in hppa*64*) archive_cmds='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ;; ia64*) archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ;; *) # Older versions of the 11.00 compiler do not understand -b yet # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does) { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $CC understands -b" >&5 $as_echo_n "checking if $CC understands -b... " >&6; } if ${lt_cv_prog_compiler__b+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler__b=no save_LDFLAGS="$LDFLAGS" LDFLAGS="$LDFLAGS -b" echo "$lt_simple_link_test_code" > conftest.$ac_ext if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then # The linker can only warn and ignore the option if not recognized # So say no if there are warnings if test -s conftest.err; then # Append any errors to the config.log. cat conftest.err 1>&5 $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler__b=yes fi else lt_cv_prog_compiler__b=yes fi fi $RM -r conftest* LDFLAGS="$save_LDFLAGS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler__b" >&5 $as_echo "$lt_cv_prog_compiler__b" >&6; } if test x"$lt_cv_prog_compiler__b" = xyes; then archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' fi ;; esac fi if test "$with_gnu_ld" = no; then hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_separator=: case $host_cpu in hppa*64*|ia64*) hardcode_direct=no hardcode_shlibpath_var=no ;; *) hardcode_direct=yes hardcode_direct_absolute=yes export_dynamic_flag_spec='${wl}-E' # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes ;; esac fi ;; irix5* | irix6* | nonstopux*) if test "$GCC" = yes; then archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' # Try to use the -exported_symbol ld option, if it does not # work, assume that -exports_file does not work either and # implicitly export all symbols. # This should be the same for all languages, so no per-tag cache variable. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 $as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } if ${lt_cv_irix_exported_symbol+:} false; then : $as_echo_n "(cached) " >&6 else save_LDFLAGS="$LDFLAGS" LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int foo (void) { return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : lt_cv_irix_exported_symbol=yes else lt_cv_irix_exported_symbol=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LDFLAGS="$save_LDFLAGS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 $as_echo "$lt_cv_irix_exported_symbol" >&6; } if test "$lt_cv_irix_exported_symbol" = yes; then archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' fi else archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' fi archive_cmds_need_lc='no' hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: inherit_rpath=yes link_all_deplibs=yes ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out else archive_cmds='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF fi hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes hardcode_shlibpath_var=no ;; newsos6) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=yes hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: hardcode_shlibpath_var=no ;; *nto* | *qnx*) ;; openbsd*) if test -f /usr/libexec/ld.so; then hardcode_direct=yes hardcode_shlibpath_var=no hardcode_direct_absolute=yes if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols' hardcode_libdir_flag_spec='${wl}-rpath,$libdir' export_dynamic_flag_spec='${wl}-E' else case $host_os in openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' hardcode_libdir_flag_spec='-R$libdir' ;; *) archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' hardcode_libdir_flag_spec='${wl}-rpath,$libdir' ;; esac fi else ld_shlibs=no fi ;; os2*) hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes allow_undefined_flag=unsupported archive_cmds='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' old_archive_from_new_cmds='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' ;; osf3*) if test "$GCC" = yes; then allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' else allow_undefined_flag=' -expect_unresolved \*' archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' fi archive_cmds_need_lc='no' hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: ;; osf4* | osf5*) # as osf3* with the addition of -msym flag if test "$GCC" = yes; then allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' else allow_undefined_flag=' -expect_unresolved \*' archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' archive_expsym_cmds='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp' # Both c and cxx compiler support -rpath directly hardcode_libdir_flag_spec='-rpath $libdir' fi archive_cmds_need_lc='no' hardcode_libdir_separator=: ;; solaris*) no_undefined_flag=' -z defs' if test "$GCC" = yes; then wlarc='${wl}' archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' else case `$CC -V 2>&1` in *"Compilers 5.0"*) wlarc='' archive_cmds='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' ;; *) wlarc='${wl}' archive_cmds='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' ;; esac fi hardcode_libdir_flag_spec='-R$libdir' hardcode_shlibpath_var=no case $host_os in solaris2.[0-5] | solaris2.[0-5].*) ;; *) # The compiler driver will combine and reorder linker options, # but understands `-z linker_flag'. GCC discards it without `$wl', # but is careful enough not to reorder. # Supported since Solaris 2.6 (maybe 2.5.1?) if test "$GCC" = yes; then whole_archive_flag_spec='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' else whole_archive_flag_spec='-z allextract$convenience -z defaultextract' fi ;; esac link_all_deplibs=yes ;; sunos4*) if test "x$host_vendor" = xsequent; then # Use $CC to link under sequent, because it throws in some extra .o # files that make .init and .fini sections work. archive_cmds='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' fi hardcode_libdir_flag_spec='-L$libdir' hardcode_direct=yes hardcode_minus_L=yes hardcode_shlibpath_var=no ;; sysv4) case $host_vendor in sni) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=yes # is this really true??? ;; siemens) ## LD is ld it makes a PLAMLIB ## CC just makes a GrossModule. archive_cmds='$LD -G -o $lib $libobjs $deplibs $linker_flags' reload_cmds='$CC -r -o $output$reload_objs' hardcode_direct=no ;; motorola) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=no #Motorola manual says yes, but my tests say they lie ;; esac runpath_var='LD_RUN_PATH' hardcode_shlibpath_var=no ;; sysv4.3*) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_shlibpath_var=no export_dynamic_flag_spec='-Bexport' ;; sysv4*MP*) if test -d /usr/nec; then archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_shlibpath_var=no runpath_var=LD_RUN_PATH hardcode_runpath_var=yes ld_shlibs=yes fi ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) no_undefined_flag='${wl}-z,text' archive_cmds_need_lc=no hardcode_shlibpath_var=no runpath_var='LD_RUN_PATH' if test "$GCC" = yes; then archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' fi ;; sysv5* | sco3.2v5* | sco5v6*) # Note: We can NOT use -z defs as we might desire, because we do not # link with -lc, and that would cause any symbols used from libc to # always be unresolved, which means just about no library would # ever link correctly. If we're not using GNU ld we use -z text # though, which does catch some bad symbols but isn't as heavy-handed # as -z defs. no_undefined_flag='${wl}-z,text' allow_undefined_flag='${wl}-z,nodefs' archive_cmds_need_lc=no hardcode_shlibpath_var=no hardcode_libdir_flag_spec='${wl}-R,$libdir' hardcode_libdir_separator=':' link_all_deplibs=yes export_dynamic_flag_spec='${wl}-Bexport' runpath_var='LD_RUN_PATH' if test "$GCC" = yes; then archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' fi ;; uts4*) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_libdir_flag_spec='-L$libdir' hardcode_shlibpath_var=no ;; *) ld_shlibs=no ;; esac if test x$host_vendor = xsni; then case $host in sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) export_dynamic_flag_spec='${wl}-Blargedynsym' ;; esac fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs" >&5 $as_echo "$ld_shlibs" >&6; } test "$ld_shlibs" = no && can_build_shared=no with_gnu_ld=$with_gnu_ld # # Do we need to explicitly link libc? # case "x$archive_cmds_need_lc" in x|xyes) # Assume -lc should be added archive_cmds_need_lc=yes if test "$enable_shared" = yes && test "$GCC" = yes; then case $archive_cmds in *'~'*) # FIXME: we may have to deal with multi-command sequences. ;; '$CC '*) # Test whether the compiler implicitly links with -lc since on some # systems, -lgcc has to come before -lc. If gcc already passes -lc # to ld, don't add -lc before -lgcc. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5 $as_echo_n "checking whether -lc should be explicitly linked in... " >&6; } if ${lt_cv_archive_cmds_need_lc+:} false; then : $as_echo_n "(cached) " >&6 else $RM conftest* echo "$lt_simple_compile_test_code" > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } 2>conftest.err; then soname=conftest lib=conftest libobjs=conftest.$ac_objext deplibs= wl=$lt_prog_compiler_wl pic_flag=$lt_prog_compiler_pic compiler_flags=-v linker_flags=-v verstring= output_objdir=. libname=conftest lt_save_allow_undefined_flag=$allow_undefined_flag allow_undefined_flag= if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5 (eval $archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } then lt_cv_archive_cmds_need_lc=no else lt_cv_archive_cmds_need_lc=yes fi allow_undefined_flag=$lt_save_allow_undefined_flag else cat conftest.err 1>&5 fi $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc" >&5 $as_echo "$lt_cv_archive_cmds_need_lc" >&6; } archive_cmds_need_lc=$lt_cv_archive_cmds_need_lc ;; esac fi ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5 $as_echo_n "checking dynamic linker characteristics... " >&6; } if test "$GCC" = yes; then case $host_os in darwin*) lt_awk_arg="/^libraries:/,/LR/" ;; *) lt_awk_arg="/^libraries:/" ;; esac case $host_os in mingw* | cegcc*) lt_sed_strip_eq="s,=\([A-Za-z]:\),\1,g" ;; *) lt_sed_strip_eq="s,=/,/,g" ;; esac lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq` case $lt_search_path_spec in *\;*) # if the path contains ";" then we assume it to be the separator # otherwise default to the standard path separator (i.e. ":") - it is # assumed that no part of a normal pathname contains ";" but that should # okay in the real world where ";" in dirpaths is itself problematic. lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'` ;; *) lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"` ;; esac # Ok, now we have the path, separated by spaces, we can step through it # and add multilib dir if necessary. lt_tmp_lt_search_path_spec= lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` for lt_sys_path in $lt_search_path_spec; do if test -d "$lt_sys_path/$lt_multi_os_dir"; then lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir" else test -d "$lt_sys_path" && \ lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path" fi done lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk ' BEGIN {RS=" "; FS="/|\n";} { lt_foo=""; lt_count=0; for (lt_i = NF; lt_i > 0; lt_i--) { if ($lt_i != "" && $lt_i != ".") { if ($lt_i == "..") { lt_count++; } else { if (lt_count == 0) { lt_foo="/" $lt_i lt_foo; } else { lt_count--; } } } } if (lt_foo != "") { lt_freq[lt_foo]++; } if (lt_freq[lt_foo] == 1) { print lt_foo; } }'` # AWK program above erroneously prepends '/' to C:/dos/paths # for these hosts. case $host_os in mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\ $SED 's,/\([A-Za-z]:\),\1,g'` ;; esac sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP` else sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" fi library_names_spec= libname_spec='lib$name' soname_spec= shrext_cmds=".so" postinstall_cmds= postuninstall_cmds= finish_cmds= finish_eval= shlibpath_var= shlibpath_overrides_runpath=unknown version_type=none dynamic_linker="$host_os ld.so" sys_lib_dlsearch_path_spec="/lib /usr/lib" need_lib_prefix=unknown hardcode_into_libs=no # when you set need_version to no, make sure it does not cause -set_version # flags to be left without arguments need_version=unknown case $host_os in aix3*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' shlibpath_var=LIBPATH # AIX 3 has no versioning support, so we append a major version to the name. soname_spec='${libname}${release}${shared_ext}$major' ;; aix[4-9]*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no hardcode_into_libs=yes if test "$host_cpu" = ia64; then # AIX 5 supports IA64 library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH else # With GCC up to 2.95.x, collect2 would create an import file # for dependence libraries. The import file would start with # the line `#! .'. This would cause the generated library to # depend on `.', always an invalid library. This was fixed in # development snapshots of GCC prior to 3.0. case $host_os in aix4 | aix4.[01] | aix4.[01].*) if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' echo ' yes ' echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then : else can_build_shared=no fi ;; esac # AIX (on Power*) has no versioning support, so currently we can not hardcode correct # soname into executable. Probably we can add versioning support to # collect2, so additional links can be useful in future. if test "$aix_use_runtimelinking" = yes; then # If using run time linking (on AIX 4.2 or later) use lib.so # instead of lib.a to let people know that these are not # typical AIX shared libraries. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' else # We preserve .a as extension for shared libraries through AIX4.2 # and later when we are not doing run time linking. library_names_spec='${libname}${release}.a $libname.a' soname_spec='${libname}${release}${shared_ext}$major' fi shlibpath_var=LIBPATH fi ;; amigaos*) case $host_cpu in powerpc) # Since July 2007 AmigaOS4 officially supports .so libraries. # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ;; m68k) library_names_spec='$libname.ixlibrary $libname.a' # Create ${libname}_ixlibrary.a entries in /sys/libs. finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' ;; esac ;; beos*) library_names_spec='${libname}${shared_ext}' dynamic_linker="$host_os ld.so" shlibpath_var=LIBRARY_PATH ;; bsdi[45]*) version_type=linux # correct to gnu/linux during the next big refactor need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" # the default ld.so.conf also contains /usr/contrib/lib and # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow # libtool to hard-code these into programs ;; cygwin* | mingw* | pw32* | cegcc*) version_type=windows shrext_cmds=".dll" need_version=no need_lib_prefix=no case $GCC,$cc_basename in yes,*) # gcc library_names_spec='$libname.dll.a' # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \${file}`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname~ chmod a+x \$dldir/$dlname~ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; fi' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes case $host_os in cygwin*) # Cygwin DLLs use 'cyg' prefix rather than 'lib' soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api" ;; mingw* | cegcc*) # MinGW DLLs use traditional 'lib' prefix soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ;; pw32*) # pw32 DLLs use 'pw' prefix rather than 'lib' library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ;; esac dynamic_linker='Win32 ld.exe' ;; *,cl*) # Native MSVC libname_spec='$name' soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' library_names_spec='${libname}.dll.lib' case $build_os in mingw*) sys_lib_search_path_spec= lt_save_ifs=$IFS IFS=';' for lt_path in $LIB do IFS=$lt_save_ifs # Let DOS variable expansion print the short 8.3 style file name. lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" done IFS=$lt_save_ifs # Convert to MSYS style. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` ;; cygwin*) # Convert to unix form, then to dos form, then back to unix form # but this time dos style (no spaces!) so that the unix form looks # like /cygdrive/c/PROGRA~1:/cygdr... sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ;; *) sys_lib_search_path_spec="$LIB" if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then # It is most probably a Windows format PATH. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi # FIXME: find the short name or the path components, as spaces are # common. (e.g. "Program Files" -> "PROGRA~1") ;; esac # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \${file}`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes dynamic_linker='Win32 link.exe' ;; *) # Assume MSVC wrapper library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' dynamic_linker='Win32 ld.exe' ;; esac # FIXME: first we should search . and the directory the executable is in shlibpath_var=PATH ;; darwin* | rhapsody*) dynamic_linker="$host_os dyld" version_type=darwin need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' soname_spec='${libname}${release}${major}$shared_ext' shlibpath_overrides_runpath=yes shlibpath_var=DYLD_LIBRARY_PATH shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib" sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' ;; dgux*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; freebsd* | dragonfly*) # DragonFly does not have aout. When/if they implement a new # versioning mechanism, adjust this. if test -x /usr/bin/objformat; then objformat=`/usr/bin/objformat` else case $host_os in freebsd[23].*) objformat=aout ;; *) objformat=elf ;; esac fi version_type=freebsd-$objformat case $version_type in freebsd-elf*) library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' need_version=no need_lib_prefix=no ;; freebsd-*) library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' need_version=yes ;; esac shlibpath_var=LD_LIBRARY_PATH case $host_os in freebsd2.*) shlibpath_overrides_runpath=yes ;; freebsd3.[01]* | freebsdelf3.[01]*) shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; freebsd3.[2-9]* | freebsdelf3.[2-9]* | \ freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1) shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; *) # from 4.6 on, and DragonFly shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; esac ;; gnu*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; haiku*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no dynamic_linker="$host_os runtime_loader" library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LIBRARY_PATH shlibpath_overrides_runpath=yes sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' hardcode_into_libs=yes ;; hpux9* | hpux10* | hpux11*) # Give a soname corresponding to the major version so that dld.sl refuses to # link against other versions. version_type=sunos need_lib_prefix=no need_version=no case $host_cpu in ia64*) shrext_cmds='.so' hardcode_into_libs=yes dynamic_linker="$host_os dld.so" shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' if test "X$HPUX_IA64_MODE" = X32; then sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" else sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" fi sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; hppa*64*) shrext_cmds='.sl' hardcode_into_libs=yes dynamic_linker="$host_os dld.sl" shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; *) shrext_cmds='.sl' dynamic_linker="$host_os dld.sl" shlibpath_var=SHLIB_PATH shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' ;; esac # HP-UX runs *really* slowly unless shared libraries are mode 555, ... postinstall_cmds='chmod 555 $lib' # or fails outright, so override atomically: install_override_mode=555 ;; interix[3-9]*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; irix5* | irix6* | nonstopux*) case $host_os in nonstopux*) version_type=nonstopux ;; *) if test "$lt_cv_prog_gnu_ld" = yes; then version_type=linux # correct to gnu/linux during the next big refactor else version_type=irix fi ;; esac need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' case $host_os in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in # libtool.m4 will add one of these switches to LD *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= libmagic=32-bit;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 libmagic=N32;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 libmagic=64-bit;; *) libsuff= shlibsuff= libmagic=never-match;; esac ;; esac shlibpath_var=LD_LIBRARY${shlibsuff}_PATH shlibpath_overrides_runpath=no sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" hardcode_into_libs=yes ;; # No shared lib support for Linux oldld, aout, or coff. linux*oldld* | linux*aout* | linux*coff*) dynamic_linker=no ;; # This must be glibc/ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no # Some binutils ld are patched to set DT_RUNPATH if ${lt_cv_shlibpath_overrides_runpath+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_shlibpath_overrides_runpath=no save_LDFLAGS=$LDFLAGS save_libdir=$libdir eval "libdir=/foo; wl=\"$lt_prog_compiler_wl\"; \ LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec\"" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then : lt_cv_shlibpath_overrides_runpath=yes fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LDFLAGS=$save_LDFLAGS libdir=$save_libdir fi shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes # Add ABI-specific directories to the system library path. sys_lib_dlsearch_path_spec="/lib64 /usr/lib64 /lib /usr/lib" # Append ld.so.conf contents to the search path if test -f /etc/ld.so.conf; then lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` sys_lib_dlsearch_path_spec="$sys_lib_dlsearch_path_spec $lt_ld_extra" fi # We used to test for /lib/ld.so.1 and disable shared libraries on # powerpc, because MkLinux only supported shared libraries with the # GNU dynamic linker. Since this was broken with cross compilers, # most powerpc-linux boxes support dynamic linking these days and # people can always --disable-shared, the test was removed, and we # assume the GNU/Linux dynamic linker is in use. dynamic_linker='GNU/Linux ld.so' ;; netbsd*) version_type=sunos need_lib_prefix=no need_version=no if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' dynamic_linker='NetBSD (a.out) ld.so' else library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' dynamic_linker='NetBSD ld.elf_so' fi shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; newsos6) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; *nto* | *qnx*) version_type=qnx need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='ldqnx.so' ;; openbsd*) version_type=sunos sys_lib_dlsearch_path_spec="/usr/lib" need_lib_prefix=no # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. case $host_os in openbsd3.3 | openbsd3.3.*) need_version=yes ;; *) need_version=no ;; esac library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' shlibpath_var=LD_LIBRARY_PATH if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then case $host_os in openbsd2.[89] | openbsd2.[89].*) shlibpath_overrides_runpath=no ;; *) shlibpath_overrides_runpath=yes ;; esac else shlibpath_overrides_runpath=yes fi ;; os2*) libname_spec='$name' shrext_cmds=".dll" need_lib_prefix=no library_names_spec='$libname${shared_ext} $libname.a' dynamic_linker='OS/2 ld.exe' shlibpath_var=LIBPATH ;; osf3* | osf4* | osf5*) version_type=osf need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" ;; rdos*) dynamic_linker=no ;; solaris*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes # ldd complains unless libraries are executable postinstall_cmds='chmod +x $lib' ;; sunos4*) version_type=sunos library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes if test "$with_gnu_ld" = yes; then need_lib_prefix=no fi need_version=yes ;; sysv4 | sysv4.3*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH case $host_vendor in sni) shlibpath_overrides_runpath=no need_lib_prefix=no runpath_var=LD_RUN_PATH ;; siemens) need_lib_prefix=no ;; motorola) need_lib_prefix=no need_version=no shlibpath_overrides_runpath=no sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' ;; esac ;; sysv4*MP*) if test -d /usr/nec ;then version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' soname_spec='$libname${shared_ext}.$major' shlibpath_var=LD_LIBRARY_PATH fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) version_type=freebsd-elf need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes if test "$with_gnu_ld" = yes; then sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' else sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' case $host_os in sco3.2v5*) sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" ;; esac fi sys_lib_dlsearch_path_spec='/usr/lib' ;; tpf*) # TPF is a cross-target only. Preferred cross-host = GNU/Linux. version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; uts4*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; *) dynamic_linker=no ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 $as_echo "$dynamic_linker" >&6; } test "$dynamic_linker" = no && can_build_shared=no variables_saved_for_relink="PATH $shlibpath_var $runpath_var" if test "$GCC" = yes; then variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" fi if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" fi if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5 $as_echo_n "checking how to hardcode library paths into programs... " >&6; } hardcode_action= if test -n "$hardcode_libdir_flag_spec" || test -n "$runpath_var" || test "X$hardcode_automatic" = "Xyes" ; then # We can hardcode non-existent directories. if test "$hardcode_direct" != no && # If the only mechanism to avoid hardcoding is shlibpath_var, we # have to relink, otherwise we might link with an installed library # when we should be linking with a yet-to-be-installed one ## test "$_LT_TAGVAR(hardcode_shlibpath_var, )" != no && test "$hardcode_minus_L" != no; then # Linking always hardcodes the temporary library directory. hardcode_action=relink else # We can link without hardcoding, and we can hardcode nonexisting dirs. hardcode_action=immediate fi else # We cannot hardcode anything, or else we can only hardcode existing # directories. hardcode_action=unsupported fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action" >&5 $as_echo "$hardcode_action" >&6; } if test "$hardcode_action" = relink || test "$inherit_rpath" = yes; then # Fast installation is not supported enable_fast_install=no elif test "$shlibpath_overrides_runpath" = yes || test "$enable_shared" = no; then # Fast installation is not necessary enable_fast_install=needless fi if test "x$enable_dlopen" != xyes; then enable_dlopen=unknown enable_dlopen_self=unknown enable_dlopen_self_static=unknown else lt_cv_dlopen=no lt_cv_dlopen_libs= case $host_os in beos*) lt_cv_dlopen="load_add_on" lt_cv_dlopen_libs= lt_cv_dlopen_self=yes ;; mingw* | pw32* | cegcc*) lt_cv_dlopen="LoadLibrary" lt_cv_dlopen_libs= ;; cygwin*) lt_cv_dlopen="dlopen" lt_cv_dlopen_libs= ;; darwin*) # if libdl is installed we need to link against it { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 $as_echo_n "checking for dlopen in -ldl... " >&6; } if ${ac_cv_lib_dl_dlopen+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dlopen (); int main () { return dlopen (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dl_dlopen=yes else ac_cv_lib_dl_dlopen=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 $as_echo "$ac_cv_lib_dl_dlopen" >&6; } if test "x$ac_cv_lib_dl_dlopen" = xyes; then : lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" else lt_cv_dlopen="dyld" lt_cv_dlopen_libs= lt_cv_dlopen_self=yes fi ;; *) ac_fn_c_check_func "$LINENO" "shl_load" "ac_cv_func_shl_load" if test "x$ac_cv_func_shl_load" = xyes; then : lt_cv_dlopen="shl_load" else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shl_load in -ldld" >&5 $as_echo_n "checking for shl_load in -ldld... " >&6; } if ${ac_cv_lib_dld_shl_load+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldld $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char shl_load (); int main () { return shl_load (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dld_shl_load=yes else ac_cv_lib_dld_shl_load=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_shl_load" >&5 $as_echo "$ac_cv_lib_dld_shl_load" >&6; } if test "x$ac_cv_lib_dld_shl_load" = xyes; then : lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld" else ac_fn_c_check_func "$LINENO" "dlopen" "ac_cv_func_dlopen" if test "x$ac_cv_func_dlopen" = xyes; then : lt_cv_dlopen="dlopen" else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 $as_echo_n "checking for dlopen in -ldl... " >&6; } if ${ac_cv_lib_dl_dlopen+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dlopen (); int main () { return dlopen (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dl_dlopen=yes else ac_cv_lib_dl_dlopen=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 $as_echo "$ac_cv_lib_dl_dlopen" >&6; } if test "x$ac_cv_lib_dl_dlopen" = xyes; then : lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -lsvld" >&5 $as_echo_n "checking for dlopen in -lsvld... " >&6; } if ${ac_cv_lib_svld_dlopen+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lsvld $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dlopen (); int main () { return dlopen (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_svld_dlopen=yes else ac_cv_lib_svld_dlopen=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_svld_dlopen" >&5 $as_echo "$ac_cv_lib_svld_dlopen" >&6; } if test "x$ac_cv_lib_svld_dlopen" = xyes; then : lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld" else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dld_link in -ldld" >&5 $as_echo_n "checking for dld_link in -ldld... " >&6; } if ${ac_cv_lib_dld_dld_link+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldld $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dld_link (); int main () { return dld_link (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dld_dld_link=yes else ac_cv_lib_dld_dld_link=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_dld_link" >&5 $as_echo "$ac_cv_lib_dld_dld_link" >&6; } if test "x$ac_cv_lib_dld_dld_link" = xyes; then : lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld" fi fi fi fi fi fi ;; esac if test "x$lt_cv_dlopen" != xno; then enable_dlopen=yes else enable_dlopen=no fi case $lt_cv_dlopen in dlopen) save_CPPFLAGS="$CPPFLAGS" test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" save_LDFLAGS="$LDFLAGS" wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" save_LIBS="$LIBS" LIBS="$lt_cv_dlopen_libs $LIBS" { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a program can dlopen itself" >&5 $as_echo_n "checking whether a program can dlopen itself... " >&6; } if ${lt_cv_dlopen_self+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : lt_cv_dlopen_self=cross else lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_status=$lt_dlunknown cat > conftest.$ac_ext <<_LT_EOF #line $LINENO "configure" #include "confdefs.h" #if HAVE_DLFCN_H #include #endif #include #ifdef RTLD_GLOBAL # define LT_DLGLOBAL RTLD_GLOBAL #else # ifdef DL_GLOBAL # define LT_DLGLOBAL DL_GLOBAL # else # define LT_DLGLOBAL 0 # endif #endif /* We may have to define LT_DLLAZY_OR_NOW in the command line if we find out it does not work in some platform. */ #ifndef LT_DLLAZY_OR_NOW # ifdef RTLD_LAZY # define LT_DLLAZY_OR_NOW RTLD_LAZY # else # ifdef DL_LAZY # define LT_DLLAZY_OR_NOW DL_LAZY # else # ifdef RTLD_NOW # define LT_DLLAZY_OR_NOW RTLD_NOW # else # ifdef DL_NOW # define LT_DLLAZY_OR_NOW DL_NOW # else # define LT_DLLAZY_OR_NOW 0 # endif # endif # endif # endif #endif /* When -fvisbility=hidden is used, assume the code has been annotated correspondingly for the symbols needed. */ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) int fnord () __attribute__((visibility("default"))); #endif int fnord () { return 42; } int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); int status = $lt_dlunknown; if (self) { if (dlsym (self,"fnord")) status = $lt_dlno_uscore; else { if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; else puts (dlerror ()); } /* dlclose (self); */ } else puts (dlerror ()); return status; } _LT_EOF if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 (eval $ac_link) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then (./conftest; exit; ) >&5 2>/dev/null lt_status=$? case x$lt_status in x$lt_dlno_uscore) lt_cv_dlopen_self=yes ;; x$lt_dlneed_uscore) lt_cv_dlopen_self=yes ;; x$lt_dlunknown|x*) lt_cv_dlopen_self=no ;; esac else : # compilation failed lt_cv_dlopen_self=no fi fi rm -fr conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self" >&5 $as_echo "$lt_cv_dlopen_self" >&6; } if test "x$lt_cv_dlopen_self" = xyes; then wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a statically linked program can dlopen itself" >&5 $as_echo_n "checking whether a statically linked program can dlopen itself... " >&6; } if ${lt_cv_dlopen_self_static+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : lt_cv_dlopen_self_static=cross else lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_status=$lt_dlunknown cat > conftest.$ac_ext <<_LT_EOF #line $LINENO "configure" #include "confdefs.h" #if HAVE_DLFCN_H #include #endif #include #ifdef RTLD_GLOBAL # define LT_DLGLOBAL RTLD_GLOBAL #else # ifdef DL_GLOBAL # define LT_DLGLOBAL DL_GLOBAL # else # define LT_DLGLOBAL 0 # endif #endif /* We may have to define LT_DLLAZY_OR_NOW in the command line if we find out it does not work in some platform. */ #ifndef LT_DLLAZY_OR_NOW # ifdef RTLD_LAZY # define LT_DLLAZY_OR_NOW RTLD_LAZY # else # ifdef DL_LAZY # define LT_DLLAZY_OR_NOW DL_LAZY # else # ifdef RTLD_NOW # define LT_DLLAZY_OR_NOW RTLD_NOW # else # ifdef DL_NOW # define LT_DLLAZY_OR_NOW DL_NOW # else # define LT_DLLAZY_OR_NOW 0 # endif # endif # endif # endif #endif /* When -fvisbility=hidden is used, assume the code has been annotated correspondingly for the symbols needed. */ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) int fnord () __attribute__((visibility("default"))); #endif int fnord () { return 42; } int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); int status = $lt_dlunknown; if (self) { if (dlsym (self,"fnord")) status = $lt_dlno_uscore; else { if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; else puts (dlerror ()); } /* dlclose (self); */ } else puts (dlerror ()); return status; } _LT_EOF if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 (eval $ac_link) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then (./conftest; exit; ) >&5 2>/dev/null lt_status=$? case x$lt_status in x$lt_dlno_uscore) lt_cv_dlopen_self_static=yes ;; x$lt_dlneed_uscore) lt_cv_dlopen_self_static=yes ;; x$lt_dlunknown|x*) lt_cv_dlopen_self_static=no ;; esac else : # compilation failed lt_cv_dlopen_self_static=no fi fi rm -fr conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self_static" >&5 $as_echo "$lt_cv_dlopen_self_static" >&6; } fi CPPFLAGS="$save_CPPFLAGS" LDFLAGS="$save_LDFLAGS" LIBS="$save_LIBS" ;; esac case $lt_cv_dlopen_self in yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;; *) enable_dlopen_self=unknown ;; esac case $lt_cv_dlopen_self_static in yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;; *) enable_dlopen_self_static=unknown ;; esac fi striplib= old_striplib= { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stripping libraries is possible" >&5 $as_echo_n "checking whether stripping libraries is possible... " >&6; } if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" test -z "$striplib" && striplib="$STRIP --strip-unneeded" { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else # FIXME - insert some real tests, host_os isn't really good enough case $host_os in darwin*) if test -n "$STRIP" ; then striplib="$STRIP -x" old_striplib="$STRIP -S" { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi ;; *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } ;; esac fi # Report which library types will actually be built { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5 $as_echo_n "checking if libtool supports shared libraries... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5 $as_echo "$can_build_shared" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5 $as_echo_n "checking whether to build shared libraries... " >&6; } test "$can_build_shared" = "no" && enable_shared=no # On AIX, shared libraries and static libraries use the same namespace, and # are all built from PIC. case $host_os in aix3*) test "$enable_shared" = yes && enable_static=no if test -n "$RANLIB"; then archive_cmds="$archive_cmds~\$RANLIB \$lib" postinstall_cmds='$RANLIB $lib' fi ;; aix[4-9]*) if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then test "$enable_shared" = yes && enable_static=no fi ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5 $as_echo "$enable_shared" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5 $as_echo_n "checking whether to build static libraries... " >&6; } # Make sure either enable_shared or enable_static is yes. test "$enable_shared" = yes || enable_static=yes { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5 $as_echo "$enable_static" >&6; } fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu CC="$lt_save_CC" if test -n "$CXX" && ( test "X$CXX" != "Xno" && ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) || (test "X$CXX" != "Xg++"))) ; then ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C++ preprocessor" >&5 $as_echo_n "checking how to run the C++ preprocessor... " >&6; } if test -z "$CXXCPP"; then if ${ac_cv_prog_CXXCPP+:} false; then : $as_echo_n "(cached) " >&6 else # Double quotes because CXXCPP needs to be expanded for CXXCPP in "$CXX -E" "/lib/cpp" do ac_preproc_ok=false for ac_cxx_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : break fi done ac_cv_prog_CXXCPP=$CXXCPP fi CXXCPP=$ac_cv_prog_CXXCPP else ac_cv_prog_CXXCPP=$CXXCPP fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXXCPP" >&5 $as_echo "$CXXCPP" >&6; } ac_preproc_ok=false for ac_cxx_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "C++ preprocessor \"$CXXCPP\" fails sanity check See \`config.log' for more details" "$LINENO" 5; } fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu else _lt_caught_CXX_error=yes fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu archive_cmds_need_lc_CXX=no allow_undefined_flag_CXX= always_export_symbols_CXX=no archive_expsym_cmds_CXX= compiler_needs_object_CXX=no export_dynamic_flag_spec_CXX= hardcode_direct_CXX=no hardcode_direct_absolute_CXX=no hardcode_libdir_flag_spec_CXX= hardcode_libdir_separator_CXX= hardcode_minus_L_CXX=no hardcode_shlibpath_var_CXX=unsupported hardcode_automatic_CXX=no inherit_rpath_CXX=no module_cmds_CXX= module_expsym_cmds_CXX= link_all_deplibs_CXX=unknown old_archive_cmds_CXX=$old_archive_cmds reload_flag_CXX=$reload_flag reload_cmds_CXX=$reload_cmds no_undefined_flag_CXX= whole_archive_flag_spec_CXX= enable_shared_with_static_runtimes_CXX=no # Source file extension for C++ test sources. ac_ext=cpp # Object file extension for compiled C++ test sources. objext=o objext_CXX=$objext # No sense in running all these tests if we already determined that # the CXX compiler isn't working. Some variables (like enable_shared) # are currently assumed to apply to all compilers on this platform, # and will be corrupted by setting them based on a non-working compiler. if test "$_lt_caught_CXX_error" != yes; then # Code to be used in simple compile tests lt_simple_compile_test_code="int some_variable = 0;" # Code to be used in simple link tests lt_simple_link_test_code='int main(int, char *[]) { return(0); }' # ltmain only uses $CC for tagged configurations so make sure $CC is set. # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # If no C compiler flags were specified, use CFLAGS. LTCFLAGS=${LTCFLAGS-"$CFLAGS"} # Allow CC to be a program name with arguments. compiler=$CC # save warnings/boilerplate of simple test code ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" >conftest.$ac_ext eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_compiler_boilerplate=`cat conftest.err` $RM conftest* ac_outfile=conftest.$ac_objext echo "$lt_simple_link_test_code" >conftest.$ac_ext eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_linker_boilerplate=`cat conftest.err` $RM -r conftest* # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_CFLAGS=$CFLAGS lt_save_LD=$LD lt_save_GCC=$GCC GCC=$GXX lt_save_with_gnu_ld=$with_gnu_ld lt_save_path_LD=$lt_cv_path_LD if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx else $as_unset lt_cv_prog_gnu_ld fi if test -n "${lt_cv_path_LDCXX+set}"; then lt_cv_path_LD=$lt_cv_path_LDCXX else $as_unset lt_cv_path_LD fi test -z "${LDCXX+set}" || LD=$LDCXX CC=${CXX-"c++"} CFLAGS=$CXXFLAGS compiler=$CC compiler_CXX=$CC for cc_temp in $compiler""; do case $cc_temp in compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; \-*) ;; *) break;; esac done cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` if test -n "$compiler"; then # We don't want -fno-exception when compiling C++ code, so set the # no_builtin_flag separately if test "$GXX" = yes; then lt_prog_compiler_no_builtin_flag_CXX=' -fno-builtin' else lt_prog_compiler_no_builtin_flag_CXX= fi if test "$GXX" = yes; then # Set up default GNU C++ configuration # Check whether --with-gnu-ld was given. if test "${with_gnu_ld+set}" = set; then : withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes else with_gnu_ld=no fi ac_prog=ld if test "$GCC" = yes; then # Check if gcc -print-prog-name=ld gives a path. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5 $as_echo_n "checking for ld used by $CC... " >&6; } case $host in *-*-mingw*) # gcc leaves a trailing carriage return which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [\\/]* | ?:[\\/]*) re_direlt='/[^/][^/]*/\.\./' # Canonicalize the pathname of ld ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'` while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` done test -z "$LD" && LD="$ac_prog" ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test "$with_gnu_ld" = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 $as_echo_n "checking for GNU ld... " >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5 $as_echo_n "checking for non-GNU ld... " >&6; } fi if ${lt_cv_path_LD+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$LD"; then lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then lt_cv_path_LD="$ac_dir/$ac_prog" # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some variants of GNU ld only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$lt_cv_path_LD" -v 2>&1 &5 $as_echo "$LD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5 $as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; } if ${lt_cv_prog_gnu_ld+:} false; then : $as_echo_n "(cached) " >&6 else # I'd rather use --version here, but apparently some GNU lds only accept -v. case `$LD -v 2>&1 &5 $as_echo "$lt_cv_prog_gnu_ld" >&6; } with_gnu_ld=$lt_cv_prog_gnu_ld # Check if GNU C++ uses GNU ld as the underlying linker, since the # archiving commands below assume that GNU ld is being used. if test "$with_gnu_ld" = yes; then archive_cmds_CXX='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir' export_dynamic_flag_spec_CXX='${wl}--export-dynamic' # If archive_cmds runs LD, not CC, wlarc should be empty # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to # investigate it a little bit more. (MM) wlarc='${wl}' # ancient GNU ld didn't support --whole-archive et. al. if eval "`$CC -print-prog-name=ld` --help 2>&1" | $GREP 'no-whole-archive' > /dev/null; then whole_archive_flag_spec_CXX="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' else whole_archive_flag_spec_CXX= fi else with_gnu_ld=no wlarc= # A generic and very simple default shared library creation # command for GNU C++ for the case where it uses the native # linker, instead of GNU ld. If possible, this setting should # overridden to take advantage of the native linker features on # the platform it is being used on. archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' fi # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' else GXX=no with_gnu_ld=no wlarc= fi # PORTME: fill in a description of your system's C++ link characteristics { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 $as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } ld_shlibs_CXX=yes case $host_os in aix3*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; aix[4-9]*) if test "$host_cpu" = ia64; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag="" else aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # need to do runtime linking. case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) for ld_flag in $LDFLAGS; do case $ld_flag in *-brtl*) aix_use_runtimelinking=yes break ;; esac done ;; esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. archive_cmds_CXX='' hardcode_direct_CXX=yes hardcode_direct_absolute_CXX=yes hardcode_libdir_separator_CXX=':' link_all_deplibs_CXX=yes file_list_spec_CXX='${wl}-f,' if test "$GXX" = yes; then case $host_os in aix4.[012]|aix4.[012].*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`${CC} -print-prog-name=collect2` if test -f "$collect2name" && strings "$collect2name" | $GREP resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 hardcode_direct_CXX=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking hardcode_minus_L_CXX=yes hardcode_libdir_flag_spec_CXX='-L$libdir' hardcode_libdir_separator_CXX= fi esac shared_flag='-shared' if test "$aix_use_runtimelinking" = yes; then shared_flag="$shared_flag "'${wl}-G' fi else # not using gcc if test "$host_cpu" = ia64; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test "$aix_use_runtimelinking" = yes; then shared_flag='${wl}-G' else shared_flag='${wl}-bM:SRE' fi fi fi export_dynamic_flag_spec_CXX='${wl}-bexpall' # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to # export. always_export_symbols_CXX=yes if test "$aix_use_runtimelinking" = yes; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. allow_undefined_flag_CXX='-berok' # Determine the default libpath from the value encoded in an empty # executable. if test "${lt_cv_aix_libpath+set}" = set; then aix_libpath=$lt_cv_aix_libpath else if ${lt_cv_aix_libpath__CXX+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : lt_aix_libpath_sed=' /Import File Strings/,/^$/ { /^0/ { s/^0 *\([^ ]*\) *$/\1/ p } }' lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$lt_cv_aix_libpath__CXX"; then lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$lt_cv_aix_libpath__CXX"; then lt_cv_aix_libpath__CXX="/usr/lib:/lib" fi fi aix_libpath=$lt_cv_aix_libpath__CXX fi hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath" archive_expsym_cmds_CXX='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" else if test "$host_cpu" = ia64; then hardcode_libdir_flag_spec_CXX='${wl}-R $libdir:/usr/lib:/lib' allow_undefined_flag_CXX="-z nodefs" archive_expsym_cmds_CXX="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an # empty executable. if test "${lt_cv_aix_libpath+set}" = set; then aix_libpath=$lt_cv_aix_libpath else if ${lt_cv_aix_libpath__CXX+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : lt_aix_libpath_sed=' /Import File Strings/,/^$/ { /^0/ { s/^0 *\([^ ]*\) *$/\1/ p } }' lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$lt_cv_aix_libpath__CXX"; then lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$lt_cv_aix_libpath__CXX"; then lt_cv_aix_libpath__CXX="/usr/lib:/lib" fi fi aix_libpath=$lt_cv_aix_libpath__CXX fi hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. no_undefined_flag_CXX=' ${wl}-bernotok' allow_undefined_flag_CXX=' ${wl}-berok' if test "$with_gnu_ld" = yes; then # We only use this code for GNU lds that support --whole-archive. whole_archive_flag_spec_CXX='${wl}--whole-archive$convenience ${wl}--no-whole-archive' else # Exported symbols can be pulled into shared objects from archives whole_archive_flag_spec_CXX='$convenience' fi archive_cmds_need_lc_CXX=yes # This is similar to how AIX traditionally builds its shared # libraries. archive_expsym_cmds_CXX="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' fi fi ;; beos*) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then allow_undefined_flag_CXX=unsupported # Joseph Beckenbach says some releases of gcc # support --undefined. This deserves some investigation. FIXME archive_cmds_CXX='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' else ld_shlibs_CXX=no fi ;; chorus*) case $cc_basename in *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; cygwin* | mingw* | pw32* | cegcc*) case $GXX,$cc_basename in ,cl* | no,cl*) # Native MSVC # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. hardcode_libdir_flag_spec_CXX=' ' allow_undefined_flag_CXX=unsupported always_export_symbols_CXX=yes file_list_spec_CXX='@' # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=".dll" # FIXME: Setting linknames here is a bad hack. archive_cmds_CXX='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; else $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; fi~ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ linknames=' # The linker will not automatically build a static lib if we build a DLL. # _LT_TAGVAR(old_archive_from_new_cmds, CXX)='true' enable_shared_with_static_runtimes_CXX=yes # Don't use ranlib old_postinstall_cmds_CXX='chmod 644 $oldlib' postlink_cmds_CXX='lt_outputfile="@OUTPUT@"~ lt_tool_outputfile="@TOOL_OUTPUT@"~ case $lt_outputfile in *.exe|*.EXE) ;; *) lt_outputfile="$lt_outputfile.exe" lt_tool_outputfile="$lt_tool_outputfile.exe" ;; esac~ func_to_tool_file "$lt_outputfile"~ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; $RM "$lt_outputfile.manifest"; fi' ;; *) # g++ # _LT_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless, # as there is no search path for DLLs. hardcode_libdir_flag_spec_CXX='-L$libdir' export_dynamic_flag_spec_CXX='${wl}--export-all-symbols' allow_undefined_flag_CXX=unsupported always_export_symbols_CXX=no enable_shared_with_static_runtimes_CXX=yes if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' # If the export-symbols file already is a .def file (1st line # is EXPORTS), use it as is; otherwise, prepend... archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' else ld_shlibs_CXX=no fi ;; esac ;; darwin* | rhapsody*) archive_cmds_need_lc_CXX=no hardcode_direct_CXX=no hardcode_automatic_CXX=yes hardcode_shlibpath_var_CXX=unsupported if test "$lt_cv_ld_force_load" = "yes"; then whole_archive_flag_spec_CXX='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' else whole_archive_flag_spec_CXX='' fi link_all_deplibs_CXX=yes allow_undefined_flag_CXX="$_lt_dar_allow_undefined" case $cc_basename in ifort*) _lt_dar_can_shared=yes ;; *) _lt_dar_can_shared=$GCC ;; esac if test "$_lt_dar_can_shared" = "yes"; then output_verbose_link_cmd=func_echo_all archive_cmds_CXX="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" module_cmds_CXX="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" archive_expsym_cmds_CXX="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" module_expsym_cmds_CXX="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" if test "$lt_cv_apple_cc_single_mod" != "yes"; then archive_cmds_CXX="\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dsymutil}" archive_expsym_cmds_CXX="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dar_export_syms}${_lt_dsymutil}" fi else ld_shlibs_CXX=no fi ;; dgux*) case $cc_basename in ec++*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; ghcx*) # Green Hills C++ Compiler # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; freebsd2.*) # C++ shared libraries reported to be fairly broken before # switch to ELF ld_shlibs_CXX=no ;; freebsd-elf*) archive_cmds_need_lc_CXX=no ;; freebsd* | dragonfly*) # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF # conventions ld_shlibs_CXX=yes ;; gnu*) ;; haiku*) archive_cmds_CXX='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' link_all_deplibs_CXX=yes ;; hpux9*) hardcode_libdir_flag_spec_CXX='${wl}+b ${wl}$libdir' hardcode_libdir_separator_CXX=: export_dynamic_flag_spec_CXX='${wl}-E' hardcode_direct_CXX=yes hardcode_minus_L_CXX=yes # Not in the search PATH, # but as the default # location of the library. case $cc_basename in CC*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; aCC*) archive_cmds_CXX='$RM $output_objdir/$soname~$CC -b ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' ;; *) if test "$GXX" = yes; then archive_cmds_CXX='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' else # FIXME: insert proper C++ library support ld_shlibs_CXX=no fi ;; esac ;; hpux10*|hpux11*) if test $with_gnu_ld = no; then hardcode_libdir_flag_spec_CXX='${wl}+b ${wl}$libdir' hardcode_libdir_separator_CXX=: case $host_cpu in hppa*64*|ia64*) ;; *) export_dynamic_flag_spec_CXX='${wl}-E' ;; esac fi case $host_cpu in hppa*64*|ia64*) hardcode_direct_CXX=no hardcode_shlibpath_var_CXX=no ;; *) hardcode_direct_CXX=yes hardcode_direct_absolute_CXX=yes hardcode_minus_L_CXX=yes # Not in the search PATH, # but as the default # location of the library. ;; esac case $cc_basename in CC*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; aCC*) case $host_cpu in hppa*64*) archive_cmds_CXX='$CC -b ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; ia64*) archive_cmds_CXX='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; *) archive_cmds_CXX='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' ;; *) if test "$GXX" = yes; then if test $with_gnu_ld = no; then case $host_cpu in hppa*64*) archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; ia64*) archive_cmds_CXX='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; *) archive_cmds_CXX='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac fi else # FIXME: insert proper C++ library support ld_shlibs_CXX=no fi ;; esac ;; interix[3-9]*) hardcode_direct_CXX=no hardcode_shlibpath_var_CXX=no hardcode_libdir_flag_spec_CXX='${wl}-rpath,$libdir' export_dynamic_flag_spec_CXX='${wl}-E' # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. # Instead, shared libraries are loaded at an image base (0x10000000 by # default) and relocated if they conflict, which is a slow very memory # consuming and fragmenting process. To avoid this, we pick a random, # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link # time. Moving up from 0x10000000 also allows more sbrk(2) space. archive_cmds_CXX='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' archive_expsym_cmds_CXX='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' ;; irix5* | irix6*) case $cc_basename in CC*) # SGI C++ archive_cmds_CXX='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' # Archives containing C++ object files must be created using # "CC -ar", where "CC" is the IRIX C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. old_archive_cmds_CXX='$CC -ar -WR,-u -o $oldlib $oldobjs' ;; *) if test "$GXX" = yes; then if test "$with_gnu_ld" = no; then archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' else archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib' fi fi link_all_deplibs_CXX=yes ;; esac hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator_CXX=: inherit_rpath_CXX=yes ;; linux* | k*bsd*-gnu | kopensolaris*-gnu) case $cc_basename in KCC*) # Kuck and Associates, Inc. (KAI) C++ Compiler # KCC will only create a shared library if the output file # ends with ".so" (or ".sl" for HP-UX), so rename the library # to its proper name (with version) after linking. archive_cmds_CXX='tempext=`echo $shared_ext | $SED -e '\''s/\([^()0-9A-Za-z{}]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' archive_expsym_cmds_CXX='tempext=`echo $shared_ext | $SED -e '\''s/\([^()0-9A-Za-z{}]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib ${wl}-retain-symbols-file,$export_symbols; mv \$templib $lib' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' hardcode_libdir_flag_spec_CXX='${wl}-rpath,$libdir' export_dynamic_flag_spec_CXX='${wl}--export-dynamic' # Archives containing C++ object files must be created using # "CC -Bstatic", where "CC" is the KAI C++ compiler. old_archive_cmds_CXX='$CC -Bstatic -o $oldlib $oldobjs' ;; icpc* | ecpc* ) # Intel C++ with_gnu_ld=yes # version 8.0 and above of icpc choke on multiply defined symbols # if we add $predep_objects and $postdep_objects, however 7.1 and # earlier do not add the objects themselves. case `$CC -V 2>&1` in *"Version 7."*) archive_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ;; *) # Version 8.0 or newer tmp_idyn= case $host_cpu in ia64*) tmp_idyn=' -i_dynamic';; esac archive_cmds_CXX='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ;; esac archive_cmds_need_lc_CXX=no hardcode_libdir_flag_spec_CXX='${wl}-rpath,$libdir' export_dynamic_flag_spec_CXX='${wl}--export-dynamic' whole_archive_flag_spec_CXX='${wl}--whole-archive$convenience ${wl}--no-whole-archive' ;; pgCC* | pgcpp*) # Portland Group C++ compiler case `$CC -V` in *pgCC\ [1-5].* | *pgcpp\ [1-5].*) prelink_cmds_CXX='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~ compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"' old_archive_cmds_CXX='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~ $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~ $RANLIB $oldlib' archive_cmds_CXX='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' archive_expsym_cmds_CXX='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' ;; *) # Version 6 and above use weak symbols archive_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' ;; esac hardcode_libdir_flag_spec_CXX='${wl}--rpath ${wl}$libdir' export_dynamic_flag_spec_CXX='${wl}--export-dynamic' whole_archive_flag_spec_CXX='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' ;; cxx*) # Compaq C++ archive_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib ${wl}-retain-symbols-file $wl$export_symbols' runpath_var=LD_RUN_PATH hardcode_libdir_flag_spec_CXX='-rpath $libdir' hardcode_libdir_separator_CXX=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "X$list" | $Xsed' ;; xl* | mpixl* | bgxl*) # IBM XL 8.0 on PPC, with GNU ld hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir' export_dynamic_flag_spec_CXX='${wl}--export-dynamic' archive_cmds_CXX='$CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' if test "x$supports_anon_versioning" = xyes; then archive_expsym_cmds_CXX='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' fi ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 no_undefined_flag_CXX=' -zdefs' archive_cmds_CXX='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' archive_expsym_cmds_CXX='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file ${wl}$export_symbols' hardcode_libdir_flag_spec_CXX='-R$libdir' whole_archive_flag_spec_CXX='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' compiler_needs_object_CXX=yes # Not sure whether something based on # $CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 # would be better. output_verbose_link_cmd='func_echo_all' # Archives containing C++ object files must be created using # "CC -xar", where "CC" is the Sun C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. old_archive_cmds_CXX='$CC -xar -o $oldlib $oldobjs' ;; esac ;; esac ;; lynxos*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; m88k*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; mvs*) case $cc_basename in cxx*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then archive_cmds_CXX='$LD -Bshareable -o $lib $predep_objects $libobjs $deplibs $postdep_objects $linker_flags' wlarc= hardcode_libdir_flag_spec_CXX='-R$libdir' hardcode_direct_CXX=yes hardcode_shlibpath_var_CXX=no fi # Workaround some broken pre-1.5 toolchains output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"' ;; *nto* | *qnx*) ld_shlibs_CXX=yes ;; openbsd2*) # C++ shared libraries are fairly broken ld_shlibs_CXX=no ;; openbsd*) if test -f /usr/libexec/ld.so; then hardcode_direct_CXX=yes hardcode_shlibpath_var_CXX=no hardcode_direct_absolute_CXX=yes archive_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' hardcode_libdir_flag_spec_CXX='${wl}-rpath,$libdir' if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then archive_expsym_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file,$export_symbols -o $lib' export_dynamic_flag_spec_CXX='${wl}-E' whole_archive_flag_spec_CXX="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' fi output_verbose_link_cmd=func_echo_all else ld_shlibs_CXX=no fi ;; osf3* | osf4* | osf5*) case $cc_basename in KCC*) # Kuck and Associates, Inc. (KAI) C++ Compiler # KCC will only create a shared library if the output file # ends with ".so" (or ".sl" for HP-UX), so rename the library # to its proper name (with version) after linking. archive_cmds_CXX='tempext=`echo $shared_ext | $SED -e '\''s/\([^()0-9A-Za-z{}]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' hardcode_libdir_flag_spec_CXX='${wl}-rpath,$libdir' hardcode_libdir_separator_CXX=: # Archives containing C++ object files must be created using # the KAI C++ compiler. case $host in osf3*) old_archive_cmds_CXX='$CC -Bstatic -o $oldlib $oldobjs' ;; *) old_archive_cmds_CXX='$CC -o $oldlib $oldobjs' ;; esac ;; RCC*) # Rational C++ 2.4.1 # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; cxx*) case $host in osf3*) allow_undefined_flag_CXX=' ${wl}-expect_unresolved ${wl}\*' archive_cmds_CXX='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $soname `test -n "$verstring" && func_echo_all "${wl}-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir' ;; *) allow_undefined_flag_CXX=' -expect_unresolved \*' archive_cmds_CXX='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' archive_expsym_cmds_CXX='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~ echo "-hidden">> $lib.exp~ $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname ${wl}-input ${wl}$lib.exp `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~ $RM $lib.exp' hardcode_libdir_flag_spec_CXX='-rpath $libdir' ;; esac hardcode_libdir_separator_CXX=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' ;; *) if test "$GXX" = yes && test "$with_gnu_ld" = no; then allow_undefined_flag_CXX=' ${wl}-expect_unresolved ${wl}\*' case $host in osf3*) archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ;; *) archive_cmds_CXX='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ;; esac hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator_CXX=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' else # FIXME: insert proper C++ library support ld_shlibs_CXX=no fi ;; esac ;; psos*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; sunos4*) case $cc_basename in CC*) # Sun C++ 4.x # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; lcc*) # Lucid # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; solaris*) case $cc_basename in CC* | sunCC*) # Sun C++ 4.2, 5.x and Centerline C++ archive_cmds_need_lc_CXX=yes no_undefined_flag_CXX=' -zdefs' archive_cmds_CXX='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G${allow_undefined_flag} ${wl}-M ${wl}$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' hardcode_libdir_flag_spec_CXX='-R$libdir' hardcode_shlibpath_var_CXX=no case $host_os in solaris2.[0-5] | solaris2.[0-5].*) ;; *) # The compiler driver will combine and reorder linker options, # but understands `-z linker_flag'. # Supported since Solaris 2.6 (maybe 2.5.1?) whole_archive_flag_spec_CXX='-z allextract$convenience -z defaultextract' ;; esac link_all_deplibs_CXX=yes output_verbose_link_cmd='func_echo_all' # Archives containing C++ object files must be created using # "CC -xar", where "CC" is the Sun C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. old_archive_cmds_CXX='$CC -xar -o $oldlib $oldobjs' ;; gcx*) # Green Hills C++ Compiler archive_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' # The C++ compiler must be used to create the archive. old_archive_cmds_CXX='$CC $LDFLAGS -archive -o $oldlib $oldobjs' ;; *) # GNU C++ compiler with Solaris linker if test "$GXX" = yes && test "$with_gnu_ld" = no; then no_undefined_flag_CXX=' ${wl}-z ${wl}defs' if $CC --version | $GREP -v '^2\.7' > /dev/null; then archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' else # g++ 2.7 appears to require `-G' NOT `-shared' on this # platform. archive_cmds_CXX='$CC -G -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' fi hardcode_libdir_flag_spec_CXX='${wl}-R $wl$libdir' case $host_os in solaris2.[0-5] | solaris2.[0-5].*) ;; *) whole_archive_flag_spec_CXX='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' ;; esac fi ;; esac ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) no_undefined_flag_CXX='${wl}-z,text' archive_cmds_need_lc_CXX=no hardcode_shlibpath_var_CXX=no runpath_var='LD_RUN_PATH' case $cc_basename in CC*) archive_cmds_CXX='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds_CXX='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; *) archive_cmds_CXX='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds_CXX='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; sysv5* | sco3.2v5* | sco5v6*) # Note: We can NOT use -z defs as we might desire, because we do not # link with -lc, and that would cause any symbols used from libc to # always be unresolved, which means just about no library would # ever link correctly. If we're not using GNU ld we use -z text # though, which does catch some bad symbols but isn't as heavy-handed # as -z defs. no_undefined_flag_CXX='${wl}-z,text' allow_undefined_flag_CXX='${wl}-z,nodefs' archive_cmds_need_lc_CXX=no hardcode_shlibpath_var_CXX=no hardcode_libdir_flag_spec_CXX='${wl}-R,$libdir' hardcode_libdir_separator_CXX=':' link_all_deplibs_CXX=yes export_dynamic_flag_spec_CXX='${wl}-Bexport' runpath_var='LD_RUN_PATH' case $cc_basename in CC*) archive_cmds_CXX='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds_CXX='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' old_archive_cmds_CXX='$CC -Tprelink_objects $oldobjs~ '"$old_archive_cmds_CXX" reload_cmds_CXX='$CC -Tprelink_objects $reload_objs~ '"$reload_cmds_CXX" ;; *) archive_cmds_CXX='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds_CXX='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; tandem*) case $cc_basename in NCC*) # NonStop-UX NCC 3.20 # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; vxworks*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_CXX" >&5 $as_echo "$ld_shlibs_CXX" >&6; } test "$ld_shlibs_CXX" = no && can_build_shared=no GCC_CXX="$GXX" LD_CXX="$LD" ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... # Dependencies to place before and after the object being linked: predep_objects_CXX= postdep_objects_CXX= predeps_CXX= postdeps_CXX= compiler_lib_search_path_CXX= cat > conftest.$ac_ext <<_LT_EOF class Foo { public: Foo (void) { a = 0; } private: int a; }; _LT_EOF _lt_libdeps_save_CFLAGS=$CFLAGS case "$CC $CFLAGS " in #( *\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;; *\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;; *\ -fuse-linker-plugin*\ *) CFLAGS="$CFLAGS -fno-use-linker-plugin" ;; esac if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then # Parse the compiler output and extract the necessary # objects, libraries and library flags. # Sentinel used to keep track of whether or not we are before # the conftest object file. pre_test_object_deps_done=no for p in `eval "$output_verbose_link_cmd"`; do case ${prev}${p} in -L* | -R* | -l*) # Some compilers place space between "-{L,R}" and the path. # Remove the space. if test $p = "-L" || test $p = "-R"; then prev=$p continue fi # Expand the sysroot to ease extracting the directories later. if test -z "$prev"; then case $p in -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;; -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;; -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;; esac fi case $p in =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;; esac if test "$pre_test_object_deps_done" = no; then case ${prev} in -L | -R) # Internal compiler library paths should come after those # provided the user. The postdeps already come after the # user supplied libs so there is no need to process them. if test -z "$compiler_lib_search_path_CXX"; then compiler_lib_search_path_CXX="${prev}${p}" else compiler_lib_search_path_CXX="${compiler_lib_search_path_CXX} ${prev}${p}" fi ;; # The "-l" case would never come before the object being # linked, so don't bother handling this case. esac else if test -z "$postdeps_CXX"; then postdeps_CXX="${prev}${p}" else postdeps_CXX="${postdeps_CXX} ${prev}${p}" fi fi prev= ;; *.lto.$objext) ;; # Ignore GCC LTO objects *.$objext) # This assumes that the test object file only shows up # once in the compiler output. if test "$p" = "conftest.$objext"; then pre_test_object_deps_done=yes continue fi if test "$pre_test_object_deps_done" = no; then if test -z "$predep_objects_CXX"; then predep_objects_CXX="$p" else predep_objects_CXX="$predep_objects_CXX $p" fi else if test -z "$postdep_objects_CXX"; then postdep_objects_CXX="$p" else postdep_objects_CXX="$postdep_objects_CXX $p" fi fi ;; *) ;; # Ignore the rest. esac done # Clean up. rm -f a.out a.exe else echo "libtool.m4: error: problem compiling CXX test program" fi $RM -f confest.$objext CFLAGS=$_lt_libdeps_save_CFLAGS # PORTME: override above test on systems where it is broken case $host_os in interix[3-9]*) # Interix 3.5 installs completely hosed .la files for C++, so rather than # hack all around it, let's just trust "g++" to DTRT. predep_objects_CXX= postdep_objects_CXX= postdeps_CXX= ;; linux*) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 # The more standards-conforming stlport4 library is # incompatible with the Cstd library. Avoid specifying # it if it's in CXXFLAGS. Ignore libCrun as # -library=stlport4 depends on it. case " $CXX $CXXFLAGS " in *" -library=stlport4 "*) solaris_use_stlport4=yes ;; esac if test "$solaris_use_stlport4" != yes; then postdeps_CXX='-library=Cstd -library=Crun' fi ;; esac ;; solaris*) case $cc_basename in CC* | sunCC*) # The more standards-conforming stlport4 library is # incompatible with the Cstd library. Avoid specifying # it if it's in CXXFLAGS. Ignore libCrun as # -library=stlport4 depends on it. case " $CXX $CXXFLAGS " in *" -library=stlport4 "*) solaris_use_stlport4=yes ;; esac # Adding this requires a known-good setup of shared libraries for # Sun compiler versions before 5.6, else PIC objects from an old # archive will be linked into the output, leading to subtle bugs. if test "$solaris_use_stlport4" != yes; then postdeps_CXX='-library=Cstd -library=Crun' fi ;; esac ;; esac case " $postdeps_CXX " in *" -lc "*) archive_cmds_need_lc_CXX=no ;; esac compiler_lib_search_dirs_CXX= if test -n "${compiler_lib_search_path_CXX}"; then compiler_lib_search_dirs_CXX=`echo " ${compiler_lib_search_path_CXX}" | ${SED} -e 's! -L! !g' -e 's!^ !!'` fi lt_prog_compiler_wl_CXX= lt_prog_compiler_pic_CXX= lt_prog_compiler_static_CXX= # C++ specific cases for pic, static, wl, etc. if test "$GXX" = yes; then lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX='-static' case $host_os in aix*) # All AIX code is PIC. if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor lt_prog_compiler_static_CXX='-Bstatic' fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support lt_prog_compiler_pic_CXX='-fPIC' ;; m68k) # FIXME: we need at least 68020 code to build shared libraries, but # adding the `-m68020' flag to GCC prevents building anything better, # like `-m68040'. lt_prog_compiler_pic_CXX='-m68020 -resident32 -malways-restore-a4' ;; esac ;; beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | cygwin* | os2* | pw32* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). # Although the cygwin gcc ignores -fPIC, still need this for old-style # (--disable-auto-import) libraries lt_prog_compiler_pic_CXX='-DDLL_EXPORT' ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files lt_prog_compiler_pic_CXX='-fno-common' ;; *djgpp*) # DJGPP does not support shared libraries at all lt_prog_compiler_pic_CXX= ;; haiku*) # PIC is the default for Haiku. # The "-static" flag exists, but is broken. lt_prog_compiler_static_CXX= ;; interix[3-9]*) # Interix 3.x gcc -fpic/-fPIC options generate broken code. # Instead, we relocate shared libraries at runtime. ;; sysv4*MP*) if test -d /usr/nec; then lt_prog_compiler_pic_CXX=-Kconform_pic fi ;; hpux*) # PIC is the default for 64-bit PA HP-UX, but not for 32-bit # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag # sets the default TLS model and affects inlining. case $host_cpu in hppa*64*) ;; *) lt_prog_compiler_pic_CXX='-fPIC' ;; esac ;; *qnx* | *nto*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. lt_prog_compiler_pic_CXX='-fPIC -shared' ;; *) lt_prog_compiler_pic_CXX='-fPIC' ;; esac else case $host_os in aix[4-9]*) # All AIX code is PIC. if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor lt_prog_compiler_static_CXX='-Bstatic' else lt_prog_compiler_static_CXX='-bnso -bI:/lib/syscalls.exp' fi ;; chorus*) case $cc_basename in cxch68*) # Green Hills C++ Compiler # _LT_TAGVAR(lt_prog_compiler_static, CXX)="--no_auto_instantiation -u __main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a $MVME_DIR/lib/classix/libcx.s.a" ;; esac ;; mingw* | cygwin* | os2* | pw32* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). lt_prog_compiler_pic_CXX='-DDLL_EXPORT' ;; dgux*) case $cc_basename in ec++*) lt_prog_compiler_pic_CXX='-KPIC' ;; ghcx*) # Green Hills C++ Compiler lt_prog_compiler_pic_CXX='-pic' ;; *) ;; esac ;; freebsd* | dragonfly*) # FreeBSD uses GNU C++ ;; hpux9* | hpux10* | hpux11*) case $cc_basename in CC*) lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX='${wl}-a ${wl}archive' if test "$host_cpu" != ia64; then lt_prog_compiler_pic_CXX='+Z' fi ;; aCC*) lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX='${wl}-a ${wl}archive' case $host_cpu in hppa*64*|ia64*) # +Z the default ;; *) lt_prog_compiler_pic_CXX='+Z' ;; esac ;; *) ;; esac ;; interix*) # This is c89, which is MS Visual C++ (no shared libs) # Anyone wants to do a port? ;; irix5* | irix6* | nonstopux*) case $cc_basename in CC*) lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX='-non_shared' # CC pic flag -KPIC is the default. ;; *) ;; esac ;; linux* | k*bsd*-gnu | kopensolaris*-gnu) case $cc_basename in KCC*) # KAI C++ Compiler lt_prog_compiler_wl_CXX='--backend -Wl,' lt_prog_compiler_pic_CXX='-fPIC' ;; ecpc* ) # old Intel C++ for x86_64 which still supported -KPIC. lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-KPIC' lt_prog_compiler_static_CXX='-static' ;; icpc* ) # Intel C++, used to be incompatible with GCC. # ICC 10 doesn't accept -KPIC any more. lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-fPIC' lt_prog_compiler_static_CXX='-static' ;; pgCC* | pgcpp*) # Portland Group C++ compiler lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-fpic' lt_prog_compiler_static_CXX='-Bstatic' ;; cxx*) # Compaq C++ # Make sure the PIC flag is empty. It appears that all Alpha # Linux and Compaq Tru64 Unix objects are PIC. lt_prog_compiler_pic_CXX= lt_prog_compiler_static_CXX='-non_shared' ;; xlc* | xlC* | bgxl[cC]* | mpixl[cC]*) # IBM XL 8.0, 9.0 on PPC and BlueGene lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-qpic' lt_prog_compiler_static_CXX='-qstaticlink' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 lt_prog_compiler_pic_CXX='-KPIC' lt_prog_compiler_static_CXX='-Bstatic' lt_prog_compiler_wl_CXX='-Qoption ld ' ;; esac ;; esac ;; lynxos*) ;; m88k*) ;; mvs*) case $cc_basename in cxx*) lt_prog_compiler_pic_CXX='-W c,exportall' ;; *) ;; esac ;; netbsd*) ;; *qnx* | *nto*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. lt_prog_compiler_pic_CXX='-fPIC -shared' ;; osf3* | osf4* | osf5*) case $cc_basename in KCC*) lt_prog_compiler_wl_CXX='--backend -Wl,' ;; RCC*) # Rational C++ 2.4.1 lt_prog_compiler_pic_CXX='-pic' ;; cxx*) # Digital/Compaq C++ lt_prog_compiler_wl_CXX='-Wl,' # Make sure the PIC flag is empty. It appears that all Alpha # Linux and Compaq Tru64 Unix objects are PIC. lt_prog_compiler_pic_CXX= lt_prog_compiler_static_CXX='-non_shared' ;; *) ;; esac ;; psos*) ;; solaris*) case $cc_basename in CC* | sunCC*) # Sun C++ 4.2, 5.x and Centerline C++ lt_prog_compiler_pic_CXX='-KPIC' lt_prog_compiler_static_CXX='-Bstatic' lt_prog_compiler_wl_CXX='-Qoption ld ' ;; gcx*) # Green Hills C++ Compiler lt_prog_compiler_pic_CXX='-PIC' ;; *) ;; esac ;; sunos4*) case $cc_basename in CC*) # Sun C++ 4.x lt_prog_compiler_pic_CXX='-pic' lt_prog_compiler_static_CXX='-Bstatic' ;; lcc*) # Lucid lt_prog_compiler_pic_CXX='-pic' ;; *) ;; esac ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) case $cc_basename in CC*) lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-KPIC' lt_prog_compiler_static_CXX='-Bstatic' ;; esac ;; tandem*) case $cc_basename in NCC*) # NonStop-UX NCC 3.20 lt_prog_compiler_pic_CXX='-KPIC' ;; *) ;; esac ;; vxworks*) ;; *) lt_prog_compiler_can_build_shared_CXX=no ;; esac fi case $host_os in # For platforms which do not support PIC, -DPIC is meaningless: *djgpp*) lt_prog_compiler_pic_CXX= ;; *) lt_prog_compiler_pic_CXX="$lt_prog_compiler_pic_CXX -DPIC" ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 $as_echo_n "checking for $compiler option to produce PIC... " >&6; } if ${lt_cv_prog_compiler_pic_CXX+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_pic_CXX=$lt_prog_compiler_pic_CXX fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_CXX" >&5 $as_echo "$lt_cv_prog_compiler_pic_CXX" >&6; } lt_prog_compiler_pic_CXX=$lt_cv_prog_compiler_pic_CXX # # Check to make sure the PIC flag actually works. # if test -n "$lt_prog_compiler_pic_CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic_CXX works" >&5 $as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic_CXX works... " >&6; } if ${lt_cv_prog_compiler_pic_works_CXX+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_pic_works_CXX=no ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="$lt_prog_compiler_pic_CXX -DPIC" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings other than the usual output. $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_pic_works_CXX=yes fi fi $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works_CXX" >&5 $as_echo "$lt_cv_prog_compiler_pic_works_CXX" >&6; } if test x"$lt_cv_prog_compiler_pic_works_CXX" = xyes; then case $lt_prog_compiler_pic_CXX in "" | " "*) ;; *) lt_prog_compiler_pic_CXX=" $lt_prog_compiler_pic_CXX" ;; esac else lt_prog_compiler_pic_CXX= lt_prog_compiler_can_build_shared_CXX=no fi fi # # Check to make sure the static flag actually works. # wl=$lt_prog_compiler_wl_CXX eval lt_tmp_static_flag=\"$lt_prog_compiler_static_CXX\" { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5 $as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; } if ${lt_cv_prog_compiler_static_works_CXX+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_static_works_CXX=no save_LDFLAGS="$LDFLAGS" LDFLAGS="$LDFLAGS $lt_tmp_static_flag" echo "$lt_simple_link_test_code" > conftest.$ac_ext if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then # The linker can only warn and ignore the option if not recognized # So say no if there are warnings if test -s conftest.err; then # Append any errors to the config.log. cat conftest.err 1>&5 $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_static_works_CXX=yes fi else lt_cv_prog_compiler_static_works_CXX=yes fi fi $RM -r conftest* LDFLAGS="$save_LDFLAGS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works_CXX" >&5 $as_echo "$lt_cv_prog_compiler_static_works_CXX" >&6; } if test x"$lt_cv_prog_compiler_static_works_CXX" = xyes; then : else lt_prog_compiler_static_CXX= fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 $as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } if ${lt_cv_prog_compiler_c_o_CXX+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o_CXX=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then lt_cv_prog_compiler_c_o_CXX=yes fi fi chmod u+w . 2>&5 $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_CXX" >&5 $as_echo "$lt_cv_prog_compiler_c_o_CXX" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 $as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } if ${lt_cv_prog_compiler_c_o_CXX+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o_CXX=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then lt_cv_prog_compiler_c_o_CXX=yes fi fi chmod u+w . 2>&5 $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_CXX" >&5 $as_echo "$lt_cv_prog_compiler_c_o_CXX" >&6; } hard_links="nottested" if test "$lt_cv_prog_compiler_c_o_CXX" = no && test "$need_locks" != no; then # do not overwrite the value of need_locks provided by the user { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 $as_echo_n "checking if we can lock with hard links... " >&6; } hard_links=yes $RM conftest* ln conftest.a conftest.b 2>/dev/null && hard_links=no touch conftest.a ln conftest.a conftest.b 2>&5 || hard_links=no ln conftest.a conftest.b 2>/dev/null && hard_links=no { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 $as_echo "$hard_links" >&6; } if test "$hard_links" = no; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 $as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} need_locks=warn fi else need_locks=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 $as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' case $host_os in aix[4-9]*) # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to AIX nm, but means don't demangle with GNU nm # Also, AIX nm treats weak defined symbols like other global defined # symbols, whereas GNU nm marks them as "W". if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then export_symbols_cmds_CXX='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' else export_symbols_cmds_CXX='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' fi ;; pw32*) export_symbols_cmds_CXX="$ltdll_cmds" ;; cygwin* | mingw* | cegcc*) case $cc_basename in cl*) exclude_expsyms_CXX='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*' ;; *) export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' exclude_expsyms_CXX='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' ;; esac ;; *) export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_CXX" >&5 $as_echo "$ld_shlibs_CXX" >&6; } test "$ld_shlibs_CXX" = no && can_build_shared=no with_gnu_ld_CXX=$with_gnu_ld # # Do we need to explicitly link libc? # case "x$archive_cmds_need_lc_CXX" in x|xyes) # Assume -lc should be added archive_cmds_need_lc_CXX=yes if test "$enable_shared" = yes && test "$GCC" = yes; then case $archive_cmds_CXX in *'~'*) # FIXME: we may have to deal with multi-command sequences. ;; '$CC '*) # Test whether the compiler implicitly links with -lc since on some # systems, -lgcc has to come before -lc. If gcc already passes -lc # to ld, don't add -lc before -lgcc. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5 $as_echo_n "checking whether -lc should be explicitly linked in... " >&6; } if ${lt_cv_archive_cmds_need_lc_CXX+:} false; then : $as_echo_n "(cached) " >&6 else $RM conftest* echo "$lt_simple_compile_test_code" > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } 2>conftest.err; then soname=conftest lib=conftest libobjs=conftest.$ac_objext deplibs= wl=$lt_prog_compiler_wl_CXX pic_flag=$lt_prog_compiler_pic_CXX compiler_flags=-v linker_flags=-v verstring= output_objdir=. libname=conftest lt_save_allow_undefined_flag=$allow_undefined_flag_CXX allow_undefined_flag_CXX= if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds_CXX 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5 (eval $archive_cmds_CXX 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } then lt_cv_archive_cmds_need_lc_CXX=no else lt_cv_archive_cmds_need_lc_CXX=yes fi allow_undefined_flag_CXX=$lt_save_allow_undefined_flag else cat conftest.err 1>&5 fi $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc_CXX" >&5 $as_echo "$lt_cv_archive_cmds_need_lc_CXX" >&6; } archive_cmds_need_lc_CXX=$lt_cv_archive_cmds_need_lc_CXX ;; esac fi ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5 $as_echo_n "checking dynamic linker characteristics... " >&6; } library_names_spec= libname_spec='lib$name' soname_spec= shrext_cmds=".so" postinstall_cmds= postuninstall_cmds= finish_cmds= finish_eval= shlibpath_var= shlibpath_overrides_runpath=unknown version_type=none dynamic_linker="$host_os ld.so" sys_lib_dlsearch_path_spec="/lib /usr/lib" need_lib_prefix=unknown hardcode_into_libs=no # when you set need_version to no, make sure it does not cause -set_version # flags to be left without arguments need_version=unknown case $host_os in aix3*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' shlibpath_var=LIBPATH # AIX 3 has no versioning support, so we append a major version to the name. soname_spec='${libname}${release}${shared_ext}$major' ;; aix[4-9]*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no hardcode_into_libs=yes if test "$host_cpu" = ia64; then # AIX 5 supports IA64 library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH else # With GCC up to 2.95.x, collect2 would create an import file # for dependence libraries. The import file would start with # the line `#! .'. This would cause the generated library to # depend on `.', always an invalid library. This was fixed in # development snapshots of GCC prior to 3.0. case $host_os in aix4 | aix4.[01] | aix4.[01].*) if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' echo ' yes ' echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then : else can_build_shared=no fi ;; esac # AIX (on Power*) has no versioning support, so currently we can not hardcode correct # soname into executable. Probably we can add versioning support to # collect2, so additional links can be useful in future. if test "$aix_use_runtimelinking" = yes; then # If using run time linking (on AIX 4.2 or later) use lib.so # instead of lib.a to let people know that these are not # typical AIX shared libraries. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' else # We preserve .a as extension for shared libraries through AIX4.2 # and later when we are not doing run time linking. library_names_spec='${libname}${release}.a $libname.a' soname_spec='${libname}${release}${shared_ext}$major' fi shlibpath_var=LIBPATH fi ;; amigaos*) case $host_cpu in powerpc) # Since July 2007 AmigaOS4 officially supports .so libraries. # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ;; m68k) library_names_spec='$libname.ixlibrary $libname.a' # Create ${libname}_ixlibrary.a entries in /sys/libs. finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' ;; esac ;; beos*) library_names_spec='${libname}${shared_ext}' dynamic_linker="$host_os ld.so" shlibpath_var=LIBRARY_PATH ;; bsdi[45]*) version_type=linux # correct to gnu/linux during the next big refactor need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" # the default ld.so.conf also contains /usr/contrib/lib and # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow # libtool to hard-code these into programs ;; cygwin* | mingw* | pw32* | cegcc*) version_type=windows shrext_cmds=".dll" need_version=no need_lib_prefix=no case $GCC,$cc_basename in yes,*) # gcc library_names_spec='$libname.dll.a' # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \${file}`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname~ chmod a+x \$dldir/$dlname~ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; fi' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes case $host_os in cygwin*) # Cygwin DLLs use 'cyg' prefix rather than 'lib' soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ;; mingw* | cegcc*) # MinGW DLLs use traditional 'lib' prefix soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ;; pw32*) # pw32 DLLs use 'pw' prefix rather than 'lib' library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ;; esac dynamic_linker='Win32 ld.exe' ;; *,cl*) # Native MSVC libname_spec='$name' soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' library_names_spec='${libname}.dll.lib' case $build_os in mingw*) sys_lib_search_path_spec= lt_save_ifs=$IFS IFS=';' for lt_path in $LIB do IFS=$lt_save_ifs # Let DOS variable expansion print the short 8.3 style file name. lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" done IFS=$lt_save_ifs # Convert to MSYS style. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` ;; cygwin*) # Convert to unix form, then to dos form, then back to unix form # but this time dos style (no spaces!) so that the unix form looks # like /cygdrive/c/PROGRA~1:/cygdr... sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ;; *) sys_lib_search_path_spec="$LIB" if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then # It is most probably a Windows format PATH. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi # FIXME: find the short name or the path components, as spaces are # common. (e.g. "Program Files" -> "PROGRA~1") ;; esac # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \${file}`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes dynamic_linker='Win32 link.exe' ;; *) # Assume MSVC wrapper library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' dynamic_linker='Win32 ld.exe' ;; esac # FIXME: first we should search . and the directory the executable is in shlibpath_var=PATH ;; darwin* | rhapsody*) dynamic_linker="$host_os dyld" version_type=darwin need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' soname_spec='${libname}${release}${major}$shared_ext' shlibpath_overrides_runpath=yes shlibpath_var=DYLD_LIBRARY_PATH shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' ;; dgux*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; freebsd* | dragonfly*) # DragonFly does not have aout. When/if they implement a new # versioning mechanism, adjust this. if test -x /usr/bin/objformat; then objformat=`/usr/bin/objformat` else case $host_os in freebsd[23].*) objformat=aout ;; *) objformat=elf ;; esac fi version_type=freebsd-$objformat case $version_type in freebsd-elf*) library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' need_version=no need_lib_prefix=no ;; freebsd-*) library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' need_version=yes ;; esac shlibpath_var=LD_LIBRARY_PATH case $host_os in freebsd2.*) shlibpath_overrides_runpath=yes ;; freebsd3.[01]* | freebsdelf3.[01]*) shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; freebsd3.[2-9]* | freebsdelf3.[2-9]* | \ freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1) shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; *) # from 4.6 on, and DragonFly shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; esac ;; gnu*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; haiku*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no dynamic_linker="$host_os runtime_loader" library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LIBRARY_PATH shlibpath_overrides_runpath=yes sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' hardcode_into_libs=yes ;; hpux9* | hpux10* | hpux11*) # Give a soname corresponding to the major version so that dld.sl refuses to # link against other versions. version_type=sunos need_lib_prefix=no need_version=no case $host_cpu in ia64*) shrext_cmds='.so' hardcode_into_libs=yes dynamic_linker="$host_os dld.so" shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' if test "X$HPUX_IA64_MODE" = X32; then sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" else sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" fi sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; hppa*64*) shrext_cmds='.sl' hardcode_into_libs=yes dynamic_linker="$host_os dld.sl" shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; *) shrext_cmds='.sl' dynamic_linker="$host_os dld.sl" shlibpath_var=SHLIB_PATH shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' ;; esac # HP-UX runs *really* slowly unless shared libraries are mode 555, ... postinstall_cmds='chmod 555 $lib' # or fails outright, so override atomically: install_override_mode=555 ;; interix[3-9]*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; irix5* | irix6* | nonstopux*) case $host_os in nonstopux*) version_type=nonstopux ;; *) if test "$lt_cv_prog_gnu_ld" = yes; then version_type=linux # correct to gnu/linux during the next big refactor else version_type=irix fi ;; esac need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' case $host_os in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in # libtool.m4 will add one of these switches to LD *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= libmagic=32-bit;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 libmagic=N32;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 libmagic=64-bit;; *) libsuff= shlibsuff= libmagic=never-match;; esac ;; esac shlibpath_var=LD_LIBRARY${shlibsuff}_PATH shlibpath_overrides_runpath=no sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" hardcode_into_libs=yes ;; # No shared lib support for Linux oldld, aout, or coff. linux*oldld* | linux*aout* | linux*coff*) dynamic_linker=no ;; # This must be glibc/ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no # Some binutils ld are patched to set DT_RUNPATH if ${lt_cv_shlibpath_overrides_runpath+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_shlibpath_overrides_runpath=no save_LDFLAGS=$LDFLAGS save_libdir=$libdir eval "libdir=/foo; wl=\"$lt_prog_compiler_wl_CXX\"; \ LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec_CXX\"" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then : lt_cv_shlibpath_overrides_runpath=yes fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LDFLAGS=$save_LDFLAGS libdir=$save_libdir fi shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes # Add ABI-specific directories to the system library path. sys_lib_dlsearch_path_spec="/lib64 /usr/lib64 /lib /usr/lib" # Append ld.so.conf contents to the search path if test -f /etc/ld.so.conf; then lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` sys_lib_dlsearch_path_spec="$sys_lib_dlsearch_path_spec $lt_ld_extra" fi # We used to test for /lib/ld.so.1 and disable shared libraries on # powerpc, because MkLinux only supported shared libraries with the # GNU dynamic linker. Since this was broken with cross compilers, # most powerpc-linux boxes support dynamic linking these days and # people can always --disable-shared, the test was removed, and we # assume the GNU/Linux dynamic linker is in use. dynamic_linker='GNU/Linux ld.so' ;; netbsd*) version_type=sunos need_lib_prefix=no need_version=no if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' dynamic_linker='NetBSD (a.out) ld.so' else library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' dynamic_linker='NetBSD ld.elf_so' fi shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; newsos6) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; *nto* | *qnx*) version_type=qnx need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='ldqnx.so' ;; openbsd*) version_type=sunos sys_lib_dlsearch_path_spec="/usr/lib" need_lib_prefix=no # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. case $host_os in openbsd3.3 | openbsd3.3.*) need_version=yes ;; *) need_version=no ;; esac library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' shlibpath_var=LD_LIBRARY_PATH if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then case $host_os in openbsd2.[89] | openbsd2.[89].*) shlibpath_overrides_runpath=no ;; *) shlibpath_overrides_runpath=yes ;; esac else shlibpath_overrides_runpath=yes fi ;; os2*) libname_spec='$name' shrext_cmds=".dll" need_lib_prefix=no library_names_spec='$libname${shared_ext} $libname.a' dynamic_linker='OS/2 ld.exe' shlibpath_var=LIBPATH ;; osf3* | osf4* | osf5*) version_type=osf need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" ;; rdos*) dynamic_linker=no ;; solaris*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes # ldd complains unless libraries are executable postinstall_cmds='chmod +x $lib' ;; sunos4*) version_type=sunos library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes if test "$with_gnu_ld" = yes; then need_lib_prefix=no fi need_version=yes ;; sysv4 | sysv4.3*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH case $host_vendor in sni) shlibpath_overrides_runpath=no need_lib_prefix=no runpath_var=LD_RUN_PATH ;; siemens) need_lib_prefix=no ;; motorola) need_lib_prefix=no need_version=no shlibpath_overrides_runpath=no sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' ;; esac ;; sysv4*MP*) if test -d /usr/nec ;then version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' soname_spec='$libname${shared_ext}.$major' shlibpath_var=LD_LIBRARY_PATH fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) version_type=freebsd-elf need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes if test "$with_gnu_ld" = yes; then sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' else sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' case $host_os in sco3.2v5*) sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" ;; esac fi sys_lib_dlsearch_path_spec='/usr/lib' ;; tpf*) # TPF is a cross-target only. Preferred cross-host = GNU/Linux. version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; uts4*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; *) dynamic_linker=no ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 $as_echo "$dynamic_linker" >&6; } test "$dynamic_linker" = no && can_build_shared=no variables_saved_for_relink="PATH $shlibpath_var $runpath_var" if test "$GCC" = yes; then variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" fi if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" fi if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5 $as_echo_n "checking how to hardcode library paths into programs... " >&6; } hardcode_action_CXX= if test -n "$hardcode_libdir_flag_spec_CXX" || test -n "$runpath_var_CXX" || test "X$hardcode_automatic_CXX" = "Xyes" ; then # We can hardcode non-existent directories. if test "$hardcode_direct_CXX" != no && # If the only mechanism to avoid hardcoding is shlibpath_var, we # have to relink, otherwise we might link with an installed library # when we should be linking with a yet-to-be-installed one ## test "$_LT_TAGVAR(hardcode_shlibpath_var, CXX)" != no && test "$hardcode_minus_L_CXX" != no; then # Linking always hardcodes the temporary library directory. hardcode_action_CXX=relink else # We can link without hardcoding, and we can hardcode nonexisting dirs. hardcode_action_CXX=immediate fi else # We cannot hardcode anything, or else we can only hardcode existing # directories. hardcode_action_CXX=unsupported fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action_CXX" >&5 $as_echo "$hardcode_action_CXX" >&6; } if test "$hardcode_action_CXX" = relink || test "$inherit_rpath_CXX" = yes; then # Fast installation is not supported enable_fast_install=no elif test "$shlibpath_overrides_runpath" = yes || test "$enable_shared" = no; then # Fast installation is not necessary enable_fast_install=needless fi fi # test -n "$compiler" CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS LDCXX=$LD LD=$lt_save_LD GCC=$lt_save_GCC with_gnu_ld=$lt_save_with_gnu_ld lt_cv_path_LDCXX=$lt_cv_path_LD lt_cv_path_LD=$lt_save_path_LD lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld fi # test "$_lt_caught_CXX_error" != yes ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu ac_config_commands="$ac_config_commands libtool" # Only expand once: # Extract the first word of "perl", so it can be a program name with args. set dummy perl; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_PERL+:} false; then : $as_echo_n "(cached) " >&6 else case $PERL in [\\/]* | ?:[\\/]*) ac_cv_path_PERL="$PERL" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR as_dummy=":" for as_dir in $as_dummy do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_PERL="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_path_PERL" && ac_cv_path_PERL="/usr/bin/perl" ;; esac fi PERL=$ac_cv_path_PERL if test -n "$PERL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PERL" >&5 $as_echo "$PERL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi # EL-5 compatibility. $(mkdir_p) is now obsolete. test -n "$MKDIR_P" || MKDIR_P="$mkdir_p" # Use arc for "pkgdir" instead of nordugrid-arc (@PACKAGE@) pkgdatadir='${datadir}/arc' pkgincludedir='${includedir}/arc' pkglibdir='${libdir}/arc' extpkglibdir='${libdir}/arc/external' pkglibexecdir='${libexecdir}/arc' ARCCLIENT_LIBS='$(top_builddir)/src/hed/libs/compute/libarccompute.la' ARCCLIENT_CFLAGS='-I$(top_srcdir)/include' ARCCOMMON_LIBS='$(top_builddir)/src/hed/libs/common/libarccommon.la' ARCCOMMON_CFLAGS='-I$(top_srcdir)/include' ARCCREDENTIAL_LIBS='$(top_builddir)/src/hed/libs/credential/libarccredential.la' ARCCREDENTIAL_CFLAGS='-I$(top_srcdir)/include' ARCDATA_LIBS='$(top_builddir)/src/hed/libs/data/libarcdata.la' ARCDATA_CFLAGS='-I$(top_srcdir)/include' ARCJOB_LIBS='$(top_builddir)/src/hed/libs/job/libarcjob.la' ARCJOB_CFLAGS='-I$(top_srcdir)/include' ARCLOADER_LIBS='$(top_builddir)/src/hed/libs/loader/libarcloader.la' ARCLOADER_CFLAGS='-I$(top_srcdir)/include' ARCMESSAGE_LIBS='$(top_builddir)/src/hed/libs/message/libarcmessage.la' ARCMESSAGE_CFLAGS='-I$(top_srcdir)/include' ARCSECURITY_LIBS='$(top_builddir)/src/hed/libs/security/libarcsecurity.la' ARCSECURITY_CFLAGS='-I$(top_srcdir)/include' ARCOTOKENS_LIBS='$(top_builddir)/src/hed/libs/security/libarcotokens.la' ARCOTOKENS_CFLAGS='-I$(top_srcdir)/include' ARCINFOSYS_LIBS='$(top_builddir)/src/hed/libs/infosys/libarcinfosys.la' ARCINFOSYS_CFLAGS='-I$(top_srcdir)/include' ARCWSADDRESSING_LIBS='$(top_builddir)/src/hed/libs/ws-addressing/libarcwsaddressing.la' ARCWSADDRESSING_CFLAGS='-I$(top_srcdir)/include' ARCWSSECURITY_LIBS='$(top_builddir)/src/hed/libs/ws-security/libarcwssecurity.la' ARCWSSECURITY_CFLAGS='-I$(top_srcdir)/include' ARCXMLSEC_LIBS='$(top_builddir)/src/hed/libs/xmlsec/libarcxmlsec.la' ARCXMLSEC_CFLAGS='-I$(top_srcdir)/include' get_relative_path() { olddir=`echo $1 | sed -e 's|/+|/|g' -e 's|^/||' -e 's|/*$|/|'` newdir=`echo $2 | sed -e 's|/+|/|g' -e 's|^/||' -e 's|/*$|/|'` O_IFS=$IFS IFS=/ relative="" common="" for i in $olddir; do if echo "$newdir" | grep -q "^$common$i/"; then common="$common$i/" else relative="../$relative" fi done IFS=$O_IFS echo $newdir | sed "s|^$common|$relative|" | sed 's|/*$||' } if test "X$prefix" = "XNONE"; then acl_final_prefix="$ac_default_prefix" else acl_final_prefix="$prefix" fi if test "X$exec_prefix" = "XNONE"; then acl_final_exec_prefix='${prefix}' else acl_final_exec_prefix="$exec_prefix" fi acl_save_prefix="$prefix" prefix="$acl_final_prefix" eval acl_final_exec_prefix=\"$acl_final_exec_prefix\" prefix="$acl_save_prefix" acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval instprefix="\"${exec_prefix}\"" eval arc_libdir="\"${libdir}\"" eval arc_bindir="\"${bindir}\"" eval arc_sbindir="\"${sbindir}\"" eval arc_pkglibdir="\"${libdir}/arc\"" eval arc_pkglibexecdir="\"${libexecdir}/arc\"" # It seems arc_datadir should be evaluated twice to be expanded fully. eval arc_datadir="\"${datadir}/arc\"" eval arc_datadir="\"${arc_datadir}\"" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" libsubdir=`get_relative_path "$instprefix" "$arc_libdir"` pkglibsubdir=`get_relative_path "$instprefix" "$arc_pkglibdir"` pkglibexecsubdir=`get_relative_path "$instprefix" "$arc_pkglibexecdir"` pkgdatasubdir=`get_relative_path "$instprefix" "$arc_datadir"` pkglibdir_rel_to_pkglibexecdir=`get_relative_path "$arc_pkglibexecdir" "$arc_pkglibdir"` sbindir_rel_to_pkglibexecdir=`get_relative_path "$arc_pkglibexecdir" "$arc_sbindir"` bindir_rel_to_pkglibexecdir=`get_relative_path "$arc_pkglibexecdir" "$arc_bindir"` pkgdatadir_rel_to_pkglibexecdir=`get_relative_path "$arc_pkglibexecdir" "$arc_datadir"` { $as_echo "$as_me:${as_lineno-$LINENO}: pkglib subdirectory is: $pkglibsubdir" >&5 $as_echo "$as_me: pkglib subdirectory is: $pkglibsubdir" >&6;} { $as_echo "$as_me:${as_lineno-$LINENO}: pkglibexec subdirectory is: $pkglibexecsubdir" >&5 $as_echo "$as_me: pkglibexec subdirectory is: $pkglibexecsubdir" >&6;} { $as_echo "$as_me:${as_lineno-$LINENO}: relative path of pkglib to pkglibexec is: $pkglibdir_rel_to_pkglibexecdir" >&5 $as_echo "$as_me: relative path of pkglib to pkglibexec is: $pkglibdir_rel_to_pkglibexecdir" >&6;} cat >>confdefs.h <<_ACEOF #define INSTPREFIX "${instprefix}" _ACEOF cat >>confdefs.h <<_ACEOF #define LIBSUBDIR "${libsubdir}" _ACEOF cat >>confdefs.h <<_ACEOF #define PKGLIBSUBDIR "${pkglibsubdir}" _ACEOF cat >>confdefs.h <<_ACEOF #define PKGLIBEXECSUBDIR "${pkglibexecsubdir}" _ACEOF cat >>confdefs.h <<_ACEOF #define PKGDATASUBDIR "${pkgdatasubdir}" _ACEOF # Check whether --with-systemd-units-location was given. if test "${with_systemd_units_location+set}" = set; then : withval=$with_systemd_units_location; unitsdir="$withval" else unitsdir= fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $unitsdir" >&5 $as_echo "$unitsdir" >&6; } if test "x$unitsdir" != "x"; then SYSTEMD_UNITS_ENABLED_TRUE= SYSTEMD_UNITS_ENABLED_FALSE='#' else SYSTEMD_UNITS_ENABLED_TRUE='#' SYSTEMD_UNITS_ENABLED_FALSE= fi # Check whether --with-sysv-scripts-location was given. if test "${with_sysv_scripts_location+set}" = set; then : withval=$with_sysv_scripts_location; initddirauto="no" initddir="$withval" else initddirauto="yes" initddir= case "${host}" in *linux* | *kfreebsd* | *gnu* ) for i in init.d rc.d/init.d rc.d; do if test -d "/etc/$i" -a ! -h "/etc/$i" ; then initddir="$sysconfdir/$i" break fi done if test -z "$initddir"; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: could not find a suitable location for the SYSV init scripts - not installing" >&5 $as_echo "$as_me: WARNING: could not find a suitable location for the SYSV init scripts - not installing" >&2;} fi ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $initddir" >&5 $as_echo "$initddir" >&6; } if ( test "x$initddirauto" == "xno" || test "x$unitsdir" = "x" ) && test "x$initddir" != "x"; then SYSV_SCRIPTS_ENABLED_TRUE= SYSV_SCRIPTS_ENABLED_FALSE='#' else SYSV_SCRIPTS_ENABLED_TRUE='#' SYSV_SCRIPTS_ENABLED_FALSE= fi # Check whether --with-cron-scripts-prefix was given. if test "${with_cron_scripts_prefix+set}" = set; then : withval=$with_cron_scripts_prefix; cronddir="$withval" else cronddir="$sysconfdir/cron.d" fi # gettext mkdir_p="$MKDIR_P" case $mkdir_p in [\\/$]* | ?:[\\/]*) ;; */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether NLS is requested" >&5 $as_echo_n "checking whether NLS is requested... " >&6; } # Check whether --enable-nls was given. if test "${enable_nls+set}" = set; then : enableval=$enable_nls; USE_NLS=$enableval else USE_NLS=yes fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $USE_NLS" >&5 $as_echo "$USE_NLS" >&6; } GETTEXT_MACRO_VERSION=0.17 # Prepare PATH_SEPARATOR. # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then echo "#! /bin/sh" >conf$$.sh echo "exit 0" >>conf$$.sh chmod +x conf$$.sh if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then PATH_SEPARATOR=';' else PATH_SEPARATOR=: fi rm -f conf$$.sh fi # Find out how to test for executable files. Don't use a zero-byte file, # as systems may use methods other than mode bits to determine executability. cat >conf$$.file <<_ASEOF #! /bin/sh exit 0 _ASEOF chmod +x conf$$.file if test -x conf$$.file >/dev/null 2>&1; then ac_executable_p="test -x" else ac_executable_p="test -f" fi rm -f conf$$.file # Extract the first word of "msgfmt", so it can be a program name with args. set dummy msgfmt; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_MSGFMT+:} false; then : $as_echo_n "(cached) " >&6 else case "$MSGFMT" in [\\/]* | ?:[\\/]*) ac_cv_path_MSGFMT="$MSGFMT" # Let the user override the test with a path. ;; *) ac_save_IFS="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$ac_save_IFS" test -z "$ac_dir" && ac_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $ac_executable_p "$ac_dir/$ac_word$ac_exec_ext"; then echo "$as_me: trying $ac_dir/$ac_word..." >&5 if $ac_dir/$ac_word --statistics /dev/null >&5 2>&1 && (if $ac_dir/$ac_word --statistics /dev/null 2>&1 >/dev/null | grep usage >/dev/null; then exit 1; else exit 0; fi); then ac_cv_path_MSGFMT="$ac_dir/$ac_word$ac_exec_ext" break 2 fi fi done done IFS="$ac_save_IFS" test -z "$ac_cv_path_MSGFMT" && ac_cv_path_MSGFMT=":" ;; esac fi MSGFMT="$ac_cv_path_MSGFMT" if test "$MSGFMT" != ":"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MSGFMT" >&5 $as_echo "$MSGFMT" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi # Extract the first word of "gmsgfmt", so it can be a program name with args. set dummy gmsgfmt; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_GMSGFMT+:} false; then : $as_echo_n "(cached) " >&6 else case $GMSGFMT in [\\/]* | ?:[\\/]*) ac_cv_path_GMSGFMT="$GMSGFMT" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_GMSGFMT="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_path_GMSGFMT" && ac_cv_path_GMSGFMT="$MSGFMT" ;; esac fi GMSGFMT=$ac_cv_path_GMSGFMT if test -n "$GMSGFMT"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $GMSGFMT" >&5 $as_echo "$GMSGFMT" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi case `$MSGFMT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) MSGFMT_015=: ;; *) MSGFMT_015=$MSGFMT ;; esac case `$GMSGFMT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) GMSGFMT_015=: ;; *) GMSGFMT_015=$GMSGFMT ;; esac # Prepare PATH_SEPARATOR. # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then echo "#! /bin/sh" >conf$$.sh echo "exit 0" >>conf$$.sh chmod +x conf$$.sh if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then PATH_SEPARATOR=';' else PATH_SEPARATOR=: fi rm -f conf$$.sh fi # Find out how to test for executable files. Don't use a zero-byte file, # as systems may use methods other than mode bits to determine executability. cat >conf$$.file <<_ASEOF #! /bin/sh exit 0 _ASEOF chmod +x conf$$.file if test -x conf$$.file >/dev/null 2>&1; then ac_executable_p="test -x" else ac_executable_p="test -f" fi rm -f conf$$.file # Extract the first word of "xgettext", so it can be a program name with args. set dummy xgettext; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_XGETTEXT+:} false; then : $as_echo_n "(cached) " >&6 else case "$XGETTEXT" in [\\/]* | ?:[\\/]*) ac_cv_path_XGETTEXT="$XGETTEXT" # Let the user override the test with a path. ;; *) ac_save_IFS="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$ac_save_IFS" test -z "$ac_dir" && ac_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $ac_executable_p "$ac_dir/$ac_word$ac_exec_ext"; then echo "$as_me: trying $ac_dir/$ac_word..." >&5 if $ac_dir/$ac_word --omit-header --copyright-holder= --msgid-bugs-address= /dev/null >&5 2>&1 && (if $ac_dir/$ac_word --omit-header --copyright-holder= --msgid-bugs-address= /dev/null 2>&1 >/dev/null | grep usage >/dev/null; then exit 1; else exit 0; fi); then ac_cv_path_XGETTEXT="$ac_dir/$ac_word$ac_exec_ext" break 2 fi fi done done IFS="$ac_save_IFS" test -z "$ac_cv_path_XGETTEXT" && ac_cv_path_XGETTEXT=":" ;; esac fi XGETTEXT="$ac_cv_path_XGETTEXT" if test "$XGETTEXT" != ":"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $XGETTEXT" >&5 $as_echo "$XGETTEXT" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi rm -f messages.po case `$XGETTEXT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) XGETTEXT_015=: ;; *) XGETTEXT_015=$XGETTEXT ;; esac # Prepare PATH_SEPARATOR. # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then echo "#! /bin/sh" >conf$$.sh echo "exit 0" >>conf$$.sh chmod +x conf$$.sh if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then PATH_SEPARATOR=';' else PATH_SEPARATOR=: fi rm -f conf$$.sh fi # Find out how to test for executable files. Don't use a zero-byte file, # as systems may use methods other than mode bits to determine executability. cat >conf$$.file <<_ASEOF #! /bin/sh exit 0 _ASEOF chmod +x conf$$.file if test -x conf$$.file >/dev/null 2>&1; then ac_executable_p="test -x" else ac_executable_p="test -f" fi rm -f conf$$.file # Extract the first word of "msgmerge", so it can be a program name with args. set dummy msgmerge; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_MSGMERGE+:} false; then : $as_echo_n "(cached) " >&6 else case "$MSGMERGE" in [\\/]* | ?:[\\/]*) ac_cv_path_MSGMERGE="$MSGMERGE" # Let the user override the test with a path. ;; *) ac_save_IFS="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$ac_save_IFS" test -z "$ac_dir" && ac_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $ac_executable_p "$ac_dir/$ac_word$ac_exec_ext"; then echo "$as_me: trying $ac_dir/$ac_word..." >&5 if $ac_dir/$ac_word --update -q /dev/null /dev/null >&5 2>&1; then ac_cv_path_MSGMERGE="$ac_dir/$ac_word$ac_exec_ext" break 2 fi fi done done IFS="$ac_save_IFS" test -z "$ac_cv_path_MSGMERGE" && ac_cv_path_MSGMERGE=":" ;; esac fi MSGMERGE="$ac_cv_path_MSGMERGE" if test "$MSGMERGE" != ":"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MSGMERGE" >&5 $as_echo "$MSGMERGE" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$localedir" || localedir='${datadir}/locale' test -n "${XGETTEXT_EXTRA_OPTIONS+set}" || XGETTEXT_EXTRA_OPTIONS= ac_config_commands="$ac_config_commands po-directories" # Check whether --with-gnu-ld was given. if test "${with_gnu_ld+set}" = set; then : withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes else with_gnu_ld=no fi # Prepare PATH_SEPARATOR. # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then echo "#! /bin/sh" >conf$$.sh echo "exit 0" >>conf$$.sh chmod +x conf$$.sh if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then PATH_SEPARATOR=';' else PATH_SEPARATOR=: fi rm -f conf$$.sh fi ac_prog=ld if test "$GCC" = yes; then # Check if gcc -print-prog-name=ld gives a path. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by GCC" >&5 $as_echo_n "checking for ld used by GCC... " >&6; } case $host in *-*-mingw*) # gcc leaves a trailing carriage return which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [\\/]* | [A-Za-z]:[\\/]*) re_direlt='/[^/][^/]*/\.\./' # Canonicalize the path of ld ac_prog=`echo $ac_prog| sed 's%\\\\%/%g'` while echo $ac_prog | grep "$re_direlt" > /dev/null 2>&1; do ac_prog=`echo $ac_prog| sed "s%$re_direlt%/%"` done test -z "$LD" && LD="$ac_prog" ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test "$with_gnu_ld" = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 $as_echo_n "checking for GNU ld... " >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5 $as_echo_n "checking for non-GNU ld... " >&6; } fi if ${acl_cv_path_LD+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$LD"; then IFS="${IFS= }"; ac_save_ifs="$IFS"; IFS="${IFS}${PATH_SEPARATOR-:}" for ac_dir in $PATH; do test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then acl_cv_path_LD="$ac_dir/$ac_prog" # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some GNU ld's only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$acl_cv_path_LD" -v 2>&1 < /dev/null` in *GNU* | *'with BFD'*) test "$with_gnu_ld" != no && break ;; *) test "$with_gnu_ld" != yes && break ;; esac fi done IFS="$ac_save_ifs" else acl_cv_path_LD="$LD" # Let the user override the test with a path. fi fi LD="$acl_cv_path_LD" if test -n "$LD"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LD" >&5 $as_echo "$LD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5 $as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; } if ${acl_cv_prog_gnu_ld+:} false; then : $as_echo_n "(cached) " >&6 else # I'd rather use --version here, but apparently some GNU ld's only accept -v. case `$LD -v 2>&1 &5 $as_echo "$acl_cv_prog_gnu_ld" >&6; } with_gnu_ld=$acl_cv_prog_gnu_ld { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shared library run path origin" >&5 $as_echo_n "checking for shared library run path origin... " >&6; } if ${acl_cv_rpath+:} false; then : $as_echo_n "(cached) " >&6 else CC="$CC" GCC="$GCC" LDFLAGS="$LDFLAGS" LD="$LD" with_gnu_ld="$with_gnu_ld" \ ${CONFIG_SHELL-/bin/sh} "$ac_aux_dir/config.rpath" "$host" > conftest.sh . ./conftest.sh rm -f ./conftest.sh acl_cv_rpath=done fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $acl_cv_rpath" >&5 $as_echo "$acl_cv_rpath" >&6; } wl="$acl_cv_wl" acl_libext="$acl_cv_libext" acl_shlibext="$acl_cv_shlibext" acl_libname_spec="$acl_cv_libname_spec" acl_library_names_spec="$acl_cv_library_names_spec" acl_hardcode_libdir_flag_spec="$acl_cv_hardcode_libdir_flag_spec" acl_hardcode_libdir_separator="$acl_cv_hardcode_libdir_separator" acl_hardcode_direct="$acl_cv_hardcode_direct" acl_hardcode_minus_L="$acl_cv_hardcode_minus_L" # Check whether --enable-rpath was given. if test "${enable_rpath+set}" = set; then : enableval=$enable_rpath; : else enable_rpath=yes fi acl_libdirstem=lib searchpath=`(LC_ALL=C $CC -print-search-dirs) 2>/dev/null | sed -n -e 's,^libraries: ,,p' | sed -e 's,^=,,'` if test -n "$searchpath"; then acl_save_IFS="${IFS= }"; IFS=":" for searchdir in $searchpath; do if test -d "$searchdir"; then case "$searchdir" in */lib64/ | */lib64 ) acl_libdirstem=lib64 ;; *) searchdir=`cd "$searchdir" && pwd` case "$searchdir" in */lib64 ) acl_libdirstem=lib64 ;; esac ;; esac fi done IFS="$acl_save_IFS" fi use_additional=yes acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" # Check whether --with-libiconv-prefix was given. if test "${with_libiconv_prefix+set}" = set; then : withval=$with_libiconv_prefix; if test "X$withval" = "Xno"; then use_additional=no else if test "X$withval" = "X"; then acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" else additional_includedir="$withval/include" additional_libdir="$withval/$acl_libdirstem" fi fi fi LIBICONV= LTLIBICONV= INCICONV= LIBICONV_PREFIX= rpathdirs= ltrpathdirs= names_already_handled= names_next_round='iconv ' while test -n "$names_next_round"; do names_this_round="$names_next_round" names_next_round= for name in $names_this_round; do already_handled= for n in $names_already_handled; do if test "$n" = "$name"; then already_handled=yes break fi done if test -z "$already_handled"; then names_already_handled="$names_already_handled $name" uppername=`echo "$name" | sed -e 'y|abcdefghijklmnopqrstuvwxyz./-|ABCDEFGHIJKLMNOPQRSTUVWXYZ___|'` eval value=\"\$HAVE_LIB$uppername\" if test -n "$value"; then if test "$value" = yes; then eval value=\"\$LIB$uppername\" test -z "$value" || LIBICONV="${LIBICONV}${LIBICONV:+ }$value" eval value=\"\$LTLIB$uppername\" test -z "$value" || LTLIBICONV="${LTLIBICONV}${LTLIBICONV:+ }$value" else : fi else found_dir= found_la= found_so= found_a= eval libname=\"$acl_libname_spec\" # typically: libname=lib$name if test -n "$acl_shlibext"; then shrext=".$acl_shlibext" # typically: shrext=.so else shrext= fi if test $use_additional = yes; then dir="$additional_libdir" if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi fi if test "X$found_dir" = "X"; then for x in $LDFLAGS $LTLIBICONV; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" case "$x" in -L*) dir=`echo "X$x" | sed -e 's/^X-L//'` if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi ;; esac if test "X$found_dir" != "X"; then break fi done fi if test "X$found_dir" != "X"; then LTLIBICONV="${LTLIBICONV}${LTLIBICONV:+ }-L$found_dir -l$name" if test "X$found_so" != "X"; then if test "$enable_rpath" = no || test "X$found_dir" = "X/usr/$acl_libdirstem"; then LIBICONV="${LIBICONV}${LIBICONV:+ }$found_so" else haveit= for x in $ltrpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $found_dir" fi if test "$acl_hardcode_direct" = yes; then LIBICONV="${LIBICONV}${LIBICONV:+ }$found_so" else if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then LIBICONV="${LIBICONV}${LIBICONV:+ }$found_so" haveit= for x in $rpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $found_dir" fi else haveit= for x in $LDFLAGS $LIBICONV; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-L$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then LIBICONV="${LIBICONV}${LIBICONV:+ }-L$found_dir" fi if test "$acl_hardcode_minus_L" != no; then LIBICONV="${LIBICONV}${LIBICONV:+ }$found_so" else LIBICONV="${LIBICONV}${LIBICONV:+ }-l$name" fi fi fi fi else if test "X$found_a" != "X"; then LIBICONV="${LIBICONV}${LIBICONV:+ }$found_a" else LIBICONV="${LIBICONV}${LIBICONV:+ }-L$found_dir -l$name" fi fi additional_includedir= case "$found_dir" in */$acl_libdirstem | */$acl_libdirstem/) basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem/"'*$,,'` LIBICONV_PREFIX="$basedir" additional_includedir="$basedir/include" ;; esac if test "X$additional_includedir" != "X"; then if test "X$additional_includedir" != "X/usr/include"; then haveit= if test "X$additional_includedir" = "X/usr/local/include"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then for x in $CPPFLAGS $INCICONV; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-I$additional_includedir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_includedir"; then INCICONV="${INCICONV}${INCICONV:+ }-I$additional_includedir" fi fi fi fi fi if test -n "$found_la"; then save_libdir="$libdir" case "$found_la" in */* | *\\*) . "$found_la" ;; *) . "./$found_la" ;; esac libdir="$save_libdir" for dep in $dependency_libs; do case "$dep" in -L*) additional_libdir=`echo "X$dep" | sed -e 's/^X-L//'` if test "X$additional_libdir" != "X/usr/$acl_libdirstem"; then haveit= if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then haveit= for x in $LDFLAGS $LIBICONV; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then LIBICONV="${LIBICONV}${LIBICONV:+ }-L$additional_libdir" fi fi haveit= for x in $LDFLAGS $LTLIBICONV; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then LTLIBICONV="${LTLIBICONV}${LTLIBICONV:+ }-L$additional_libdir" fi fi fi fi ;; -R*) dir=`echo "X$dep" | sed -e 's/^X-R//'` if test "$enable_rpath" != no; then haveit= for x in $rpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $dir" fi haveit= for x in $ltrpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $dir" fi fi ;; -l*) names_next_round="$names_next_round "`echo "X$dep" | sed -e 's/^X-l//'` ;; *.la) names_next_round="$names_next_round "`echo "X$dep" | sed -e 's,^X.*/,,' -e 's,^lib,,' -e 's,\.la$,,'` ;; *) LIBICONV="${LIBICONV}${LIBICONV:+ }$dep" LTLIBICONV="${LTLIBICONV}${LTLIBICONV:+ }$dep" ;; esac done fi else LIBICONV="${LIBICONV}${LIBICONV:+ }-l$name" LTLIBICONV="${LTLIBICONV}${LTLIBICONV:+ }-l$name" fi fi fi done done if test "X$rpathdirs" != "X"; then if test -n "$acl_hardcode_libdir_separator"; then alldirs= for found_dir in $rpathdirs; do alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$found_dir" done acl_save_libdir="$libdir" libdir="$alldirs" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIBICONV="${LIBICONV}${LIBICONV:+ }$flag" else for found_dir in $rpathdirs; do acl_save_libdir="$libdir" libdir="$found_dir" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIBICONV="${LIBICONV}${LIBICONV:+ }$flag" done fi fi if test "X$ltrpathdirs" != "X"; then for found_dir in $ltrpathdirs; do LTLIBICONV="${LTLIBICONV}${LTLIBICONV:+ }-R$found_dir" done fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for CFPreferencesCopyAppValue" >&5 $as_echo_n "checking for CFPreferencesCopyAppValue... " >&6; } if ${gt_cv_func_CFPreferencesCopyAppValue+:} false; then : $as_echo_n "(cached) " >&6 else gt_save_LIBS="$LIBS" LIBS="$LIBS -Wl,-framework -Wl,CoreFoundation" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { CFPreferencesCopyAppValue(NULL, NULL) ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : gt_cv_func_CFPreferencesCopyAppValue=yes else gt_cv_func_CFPreferencesCopyAppValue=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS="$gt_save_LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gt_cv_func_CFPreferencesCopyAppValue" >&5 $as_echo "$gt_cv_func_CFPreferencesCopyAppValue" >&6; } if test $gt_cv_func_CFPreferencesCopyAppValue = yes; then $as_echo "#define HAVE_CFPREFERENCESCOPYAPPVALUE 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for CFLocaleCopyCurrent" >&5 $as_echo_n "checking for CFLocaleCopyCurrent... " >&6; } if ${gt_cv_func_CFLocaleCopyCurrent+:} false; then : $as_echo_n "(cached) " >&6 else gt_save_LIBS="$LIBS" LIBS="$LIBS -Wl,-framework -Wl,CoreFoundation" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { CFLocaleCopyCurrent(); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : gt_cv_func_CFLocaleCopyCurrent=yes else gt_cv_func_CFLocaleCopyCurrent=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS="$gt_save_LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gt_cv_func_CFLocaleCopyCurrent" >&5 $as_echo "$gt_cv_func_CFLocaleCopyCurrent" >&6; } if test $gt_cv_func_CFLocaleCopyCurrent = yes; then $as_echo "#define HAVE_CFLOCALECOPYCURRENT 1" >>confdefs.h fi INTL_MACOSX_LIBS= if test $gt_cv_func_CFPreferencesCopyAppValue = yes || test $gt_cv_func_CFLocaleCopyCurrent = yes; then INTL_MACOSX_LIBS="-Wl,-framework -Wl,CoreFoundation" fi LIBINTL= LTLIBINTL= POSUB= case " $gt_needs " in *" need-formatstring-macros "*) gt_api_version=3 ;; *" need-ngettext "*) gt_api_version=2 ;; *) gt_api_version=1 ;; esac gt_func_gnugettext_libc="gt_cv_func_gnugettext${gt_api_version}_libc" gt_func_gnugettext_libintl="gt_cv_func_gnugettext${gt_api_version}_libintl" if test "$USE_NLS" = "yes"; then gt_use_preinstalled_gnugettext=no if test $gt_api_version -ge 3; then gt_revision_test_code=' #ifndef __GNU_GETTEXT_SUPPORTED_REVISION #define __GNU_GETTEXT_SUPPORTED_REVISION(major) ((major) == 0 ? 0 : -1) #endif typedef int array [2 * (__GNU_GETTEXT_SUPPORTED_REVISION(0) >= 1) - 1]; ' else gt_revision_test_code= fi if test $gt_api_version -ge 2; then gt_expression_test_code=' + * ngettext ("", "", 0)' else gt_expression_test_code= fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU gettext in libc" >&5 $as_echo_n "checking for GNU gettext in libc... " >&6; } if eval \${$gt_func_gnugettext_libc+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include $gt_revision_test_code extern int _nl_msg_cat_cntr; extern int *_nl_domain_bindings; int main () { bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + _nl_msg_cat_cntr + *_nl_domain_bindings ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$gt_func_gnugettext_libc=yes" else eval "$gt_func_gnugettext_libc=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi eval ac_res=\$$gt_func_gnugettext_libc { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" != "yes"; }; then am_save_CPPFLAGS="$CPPFLAGS" for element in $INCICONV; do haveit= for x in $CPPFLAGS; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X$element"; then haveit=yes break fi done if test -z "$haveit"; then CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }$element" fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for iconv" >&5 $as_echo_n "checking for iconv... " >&6; } if ${am_cv_func_iconv+:} false; then : $as_echo_n "(cached) " >&6 else am_cv_func_iconv="no, consider installing GNU libiconv" am_cv_lib_iconv=no cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { iconv_t cd = iconv_open("",""); iconv(cd,NULL,NULL,NULL,NULL); iconv_close(cd); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : am_cv_func_iconv=yes fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test "$am_cv_func_iconv" != yes; then am_save_LIBS="$LIBS" LIBS="$LIBS $LIBICONV" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { iconv_t cd = iconv_open("",""); iconv(cd,NULL,NULL,NULL,NULL); iconv_close(cd); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : am_cv_lib_iconv=yes am_cv_func_iconv=yes fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS="$am_save_LIBS" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_func_iconv" >&5 $as_echo "$am_cv_func_iconv" >&6; } if test "$am_cv_func_iconv" = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for working iconv" >&5 $as_echo_n "checking for working iconv... " >&6; } if ${am_cv_func_iconv_works+:} false; then : $as_echo_n "(cached) " >&6 else am_save_LIBS="$LIBS" if test $am_cv_lib_iconv = yes; then LIBS="$LIBS $LIBICONV" fi if test "$cross_compiling" = yes; then : case "$host_os" in aix* | hpux*) am_cv_func_iconv_works="guessing no" ;; *) am_cv_func_iconv_works="guessing yes" ;; esac else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { /* Test against AIX 5.1 bug: Failures are not distinguishable from successful returns. */ { iconv_t cd_utf8_to_88591 = iconv_open ("ISO8859-1", "UTF-8"); if (cd_utf8_to_88591 != (iconv_t)(-1)) { static const char input[] = "\342\202\254"; /* EURO SIGN */ char buf[10]; const char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_utf8_to_88591, (char **) &inptr, &inbytesleft, &outptr, &outbytesleft); if (res == 0) return 1; } } #if 0 /* This bug could be worked around by the caller. */ /* Test against HP-UX 11.11 bug: Positive return value instead of 0. */ { iconv_t cd_88591_to_utf8 = iconv_open ("utf8", "iso88591"); if (cd_88591_to_utf8 != (iconv_t)(-1)) { static const char input[] = "\304rger mit b\366sen B\374bchen ohne Augenma\337"; char buf[50]; const char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_88591_to_utf8, (char **) &inptr, &inbytesleft, &outptr, &outbytesleft); if ((int)res > 0) return 1; } } #endif /* Test against HP-UX 11.11 bug: No converter from EUC-JP to UTF-8 is provided. */ if (/* Try standardized names. */ iconv_open ("UTF-8", "EUC-JP") == (iconv_t)(-1) /* Try IRIX, OSF/1 names. */ && iconv_open ("UTF-8", "eucJP") == (iconv_t)(-1) /* Try AIX names. */ && iconv_open ("UTF-8", "IBM-eucJP") == (iconv_t)(-1) /* Try HP-UX names. */ && iconv_open ("utf8", "eucJP") == (iconv_t)(-1)) return 1; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : am_cv_func_iconv_works=yes else am_cv_func_iconv_works=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi LIBS="$am_save_LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_func_iconv_works" >&5 $as_echo "$am_cv_func_iconv_works" >&6; } case "$am_cv_func_iconv_works" in *no) am_func_iconv=no am_cv_lib_iconv=no ;; *) am_func_iconv=yes ;; esac else am_func_iconv=no am_cv_lib_iconv=no fi if test "$am_func_iconv" = yes; then $as_echo "#define HAVE_ICONV 1" >>confdefs.h fi if test "$am_cv_lib_iconv" = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to link with libiconv" >&5 $as_echo_n "checking how to link with libiconv... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIBICONV" >&5 $as_echo "$LIBICONV" >&6; } else CPPFLAGS="$am_save_CPPFLAGS" LIBICONV= LTLIBICONV= fi use_additional=yes acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" # Check whether --with-libintl-prefix was given. if test "${with_libintl_prefix+set}" = set; then : withval=$with_libintl_prefix; if test "X$withval" = "Xno"; then use_additional=no else if test "X$withval" = "X"; then acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" else additional_includedir="$withval/include" additional_libdir="$withval/$acl_libdirstem" fi fi fi LIBINTL= LTLIBINTL= INCINTL= LIBINTL_PREFIX= rpathdirs= ltrpathdirs= names_already_handled= names_next_round='intl ' while test -n "$names_next_round"; do names_this_round="$names_next_round" names_next_round= for name in $names_this_round; do already_handled= for n in $names_already_handled; do if test "$n" = "$name"; then already_handled=yes break fi done if test -z "$already_handled"; then names_already_handled="$names_already_handled $name" uppername=`echo "$name" | sed -e 'y|abcdefghijklmnopqrstuvwxyz./-|ABCDEFGHIJKLMNOPQRSTUVWXYZ___|'` eval value=\"\$HAVE_LIB$uppername\" if test -n "$value"; then if test "$value" = yes; then eval value=\"\$LIB$uppername\" test -z "$value" || LIBINTL="${LIBINTL}${LIBINTL:+ }$value" eval value=\"\$LTLIB$uppername\" test -z "$value" || LTLIBINTL="${LTLIBINTL}${LTLIBINTL:+ }$value" else : fi else found_dir= found_la= found_so= found_a= eval libname=\"$acl_libname_spec\" # typically: libname=lib$name if test -n "$acl_shlibext"; then shrext=".$acl_shlibext" # typically: shrext=.so else shrext= fi if test $use_additional = yes; then dir="$additional_libdir" if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi fi if test "X$found_dir" = "X"; then for x in $LDFLAGS $LTLIBINTL; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" case "$x" in -L*) dir=`echo "X$x" | sed -e 's/^X-L//'` if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi ;; esac if test "X$found_dir" != "X"; then break fi done fi if test "X$found_dir" != "X"; then LTLIBINTL="${LTLIBINTL}${LTLIBINTL:+ }-L$found_dir -l$name" if test "X$found_so" != "X"; then if test "$enable_rpath" = no || test "X$found_dir" = "X/usr/$acl_libdirstem"; then LIBINTL="${LIBINTL}${LIBINTL:+ }$found_so" else haveit= for x in $ltrpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $found_dir" fi if test "$acl_hardcode_direct" = yes; then LIBINTL="${LIBINTL}${LIBINTL:+ }$found_so" else if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then LIBINTL="${LIBINTL}${LIBINTL:+ }$found_so" haveit= for x in $rpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $found_dir" fi else haveit= for x in $LDFLAGS $LIBINTL; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-L$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then LIBINTL="${LIBINTL}${LIBINTL:+ }-L$found_dir" fi if test "$acl_hardcode_minus_L" != no; then LIBINTL="${LIBINTL}${LIBINTL:+ }$found_so" else LIBINTL="${LIBINTL}${LIBINTL:+ }-l$name" fi fi fi fi else if test "X$found_a" != "X"; then LIBINTL="${LIBINTL}${LIBINTL:+ }$found_a" else LIBINTL="${LIBINTL}${LIBINTL:+ }-L$found_dir -l$name" fi fi additional_includedir= case "$found_dir" in */$acl_libdirstem | */$acl_libdirstem/) basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem/"'*$,,'` LIBINTL_PREFIX="$basedir" additional_includedir="$basedir/include" ;; esac if test "X$additional_includedir" != "X"; then if test "X$additional_includedir" != "X/usr/include"; then haveit= if test "X$additional_includedir" = "X/usr/local/include"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then for x in $CPPFLAGS $INCINTL; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-I$additional_includedir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_includedir"; then INCINTL="${INCINTL}${INCINTL:+ }-I$additional_includedir" fi fi fi fi fi if test -n "$found_la"; then save_libdir="$libdir" case "$found_la" in */* | *\\*) . "$found_la" ;; *) . "./$found_la" ;; esac libdir="$save_libdir" for dep in $dependency_libs; do case "$dep" in -L*) additional_libdir=`echo "X$dep" | sed -e 's/^X-L//'` if test "X$additional_libdir" != "X/usr/$acl_libdirstem"; then haveit= if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then haveit= for x in $LDFLAGS $LIBINTL; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then LIBINTL="${LIBINTL}${LIBINTL:+ }-L$additional_libdir" fi fi haveit= for x in $LDFLAGS $LTLIBINTL; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then LTLIBINTL="${LTLIBINTL}${LTLIBINTL:+ }-L$additional_libdir" fi fi fi fi ;; -R*) dir=`echo "X$dep" | sed -e 's/^X-R//'` if test "$enable_rpath" != no; then haveit= for x in $rpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $dir" fi haveit= for x in $ltrpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $dir" fi fi ;; -l*) names_next_round="$names_next_round "`echo "X$dep" | sed -e 's/^X-l//'` ;; *.la) names_next_round="$names_next_round "`echo "X$dep" | sed -e 's,^X.*/,,' -e 's,^lib,,' -e 's,\.la$,,'` ;; *) LIBINTL="${LIBINTL}${LIBINTL:+ }$dep" LTLIBINTL="${LTLIBINTL}${LTLIBINTL:+ }$dep" ;; esac done fi else LIBINTL="${LIBINTL}${LIBINTL:+ }-l$name" LTLIBINTL="${LTLIBINTL}${LTLIBINTL:+ }-l$name" fi fi fi done done if test "X$rpathdirs" != "X"; then if test -n "$acl_hardcode_libdir_separator"; then alldirs= for found_dir in $rpathdirs; do alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$found_dir" done acl_save_libdir="$libdir" libdir="$alldirs" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIBINTL="${LIBINTL}${LIBINTL:+ }$flag" else for found_dir in $rpathdirs; do acl_save_libdir="$libdir" libdir="$found_dir" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIBINTL="${LIBINTL}${LIBINTL:+ }$flag" done fi fi if test "X$ltrpathdirs" != "X"; then for found_dir in $ltrpathdirs; do LTLIBINTL="${LTLIBINTL}${LTLIBINTL:+ }-R$found_dir" done fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU gettext in libintl" >&5 $as_echo_n "checking for GNU gettext in libintl... " >&6; } if eval \${$gt_func_gnugettext_libintl+:} false; then : $as_echo_n "(cached) " >&6 else gt_save_CPPFLAGS="$CPPFLAGS" CPPFLAGS="$CPPFLAGS $INCINTL" gt_save_LIBS="$LIBS" LIBS="$LIBS $LIBINTL" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include $gt_revision_test_code extern int _nl_msg_cat_cntr; extern #ifdef __cplusplus "C" #endif const char *_nl_expand_alias (const char *); int main () { bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + _nl_msg_cat_cntr + *_nl_expand_alias ("") ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$gt_func_gnugettext_libintl=yes" else eval "$gt_func_gnugettext_libintl=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" != yes; } && test -n "$LIBICONV"; then LIBS="$LIBS $LIBICONV" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include $gt_revision_test_code extern int _nl_msg_cat_cntr; extern #ifdef __cplusplus "C" #endif const char *_nl_expand_alias (const char *); int main () { bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + _nl_msg_cat_cntr + *_nl_expand_alias ("") ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : LIBINTL="$LIBINTL $LIBICONV" LTLIBINTL="$LTLIBINTL $LTLIBICONV" eval "$gt_func_gnugettext_libintl=yes" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi CPPFLAGS="$gt_save_CPPFLAGS" LIBS="$gt_save_LIBS" fi eval ac_res=\$$gt_func_gnugettext_libintl { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } fi if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" = "yes"; } \ || { { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; } \ && test "$PACKAGE" != gettext-runtime \ && test "$PACKAGE" != gettext-tools; }; then gt_use_preinstalled_gnugettext=yes else LIBINTL= LTLIBINTL= INCINTL= fi if test -n "$INTL_MACOSX_LIBS"; then if test "$gt_use_preinstalled_gnugettext" = "yes" \ || test "$nls_cv_use_gnu_gettext" = "yes"; then LIBINTL="$LIBINTL $INTL_MACOSX_LIBS" LTLIBINTL="$LTLIBINTL $INTL_MACOSX_LIBS" fi fi if test "$gt_use_preinstalled_gnugettext" = "yes" \ || test "$nls_cv_use_gnu_gettext" = "yes"; then $as_echo "#define ENABLE_NLS 1" >>confdefs.h else USE_NLS=no fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to use NLS" >&5 $as_echo_n "checking whether to use NLS... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $USE_NLS" >&5 $as_echo "$USE_NLS" >&6; } if test "$USE_NLS" = "yes"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking where the gettext function comes from" >&5 $as_echo_n "checking where the gettext function comes from... " >&6; } if test "$gt_use_preinstalled_gnugettext" = "yes"; then if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; }; then gt_source="external libintl" else gt_source="libc" fi else gt_source="included intl directory" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gt_source" >&5 $as_echo "$gt_source" >&6; } fi if test "$USE_NLS" = "yes"; then if test "$gt_use_preinstalled_gnugettext" = "yes"; then if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; }; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to link with libintl" >&5 $as_echo_n "checking how to link with libintl... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIBINTL" >&5 $as_echo "$LIBINTL" >&6; } for element in $INCINTL; do haveit= for x in $CPPFLAGS; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X$element"; then haveit=yes break fi done if test -z "$haveit"; then CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }$element" fi done fi $as_echo "#define HAVE_GETTEXT 1" >>confdefs.h $as_echo "#define HAVE_DCGETTEXT 1" >>confdefs.h fi POSUB=po fi INTLLIBS="$LIBINTL" [ -r $srcdir/po/POTFILES.in ] || touch $srcdir/po/POTFILES.in # Portable 64bit file offsets # Check whether --enable-largefile was given. if test "${enable_largefile+set}" = set; then : enableval=$enable_largefile; fi if test "$enable_largefile" != no; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for special C compiler options needed for large files" >&5 $as_echo_n "checking for special C compiler options needed for large files... " >&6; } if ${ac_cv_sys_largefile_CC+:} false; then : $as_echo_n "(cached) " >&6 else ac_cv_sys_largefile_CC=no if test "$GCC" != yes; then ac_save_CC=$CC while :; do # IRIX 6.2 and later do not support large files by default, # so use the C compiler's -n32 option if that helps. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : break fi rm -f core conftest.err conftest.$ac_objext CC="$CC -n32" if ac_fn_c_try_compile "$LINENO"; then : ac_cv_sys_largefile_CC=' -n32'; break fi rm -f core conftest.err conftest.$ac_objext break done CC=$ac_save_CC rm -f conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_largefile_CC" >&5 $as_echo "$ac_cv_sys_largefile_CC" >&6; } if test "$ac_cv_sys_largefile_CC" != no; then CC=$CC$ac_cv_sys_largefile_CC fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for _FILE_OFFSET_BITS value needed for large files" >&5 $as_echo_n "checking for _FILE_OFFSET_BITS value needed for large files... " >&6; } if ${ac_cv_sys_file_offset_bits+:} false; then : $as_echo_n "(cached) " >&6 else while :; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_sys_file_offset_bits=no; break fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #define _FILE_OFFSET_BITS 64 #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_sys_file_offset_bits=64; break fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_sys_file_offset_bits=unknown break done fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_file_offset_bits" >&5 $as_echo "$ac_cv_sys_file_offset_bits" >&6; } case $ac_cv_sys_file_offset_bits in #( no | unknown) ;; *) cat >>confdefs.h <<_ACEOF #define _FILE_OFFSET_BITS $ac_cv_sys_file_offset_bits _ACEOF ;; esac rm -rf conftest* if test $ac_cv_sys_file_offset_bits = unknown; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for _LARGE_FILES value needed for large files" >&5 $as_echo_n "checking for _LARGE_FILES value needed for large files... " >&6; } if ${ac_cv_sys_large_files+:} false; then : $as_echo_n "(cached) " >&6 else while :; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_sys_large_files=no; break fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #define _LARGE_FILES 1 #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_sys_large_files=1; break fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_sys_large_files=unknown break done fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_large_files" >&5 $as_echo "$ac_cv_sys_large_files" >&6; } case $ac_cv_sys_large_files in #( no | unknown) ;; *) cat >>confdefs.h <<_ACEOF #define _LARGE_FILES $ac_cv_sys_large_files _ACEOF ;; esac rm -rf conftest* fi fi # pkg-config needed for many checks if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}pkg-config", so it can be a program name with args. set dummy ${ac_tool_prefix}pkg-config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_PKG_CONFIG+:} false; then : $as_echo_n "(cached) " >&6 else case $PKG_CONFIG in [\\/]* | ?:[\\/]*) ac_cv_path_PKG_CONFIG="$PKG_CONFIG" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi PKG_CONFIG=$ac_cv_path_PKG_CONFIG if test -n "$PKG_CONFIG"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PKG_CONFIG" >&5 $as_echo "$PKG_CONFIG" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_path_PKG_CONFIG"; then ac_pt_PKG_CONFIG=$PKG_CONFIG # Extract the first word of "pkg-config", so it can be a program name with args. set dummy pkg-config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_ac_pt_PKG_CONFIG+:} false; then : $as_echo_n "(cached) " >&6 else case $ac_pt_PKG_CONFIG in [\\/]* | ?:[\\/]*) ac_cv_path_ac_pt_PKG_CONFIG="$ac_pt_PKG_CONFIG" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_ac_pt_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi ac_pt_PKG_CONFIG=$ac_cv_path_ac_pt_PKG_CONFIG if test -n "$ac_pt_PKG_CONFIG"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_pt_PKG_CONFIG" >&5 $as_echo "$ac_pt_PKG_CONFIG" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_pt_PKG_CONFIG" = x; then PKG_CONFIG="no" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac PKG_CONFIG=$ac_pt_PKG_CONFIG fi else PKG_CONFIG="$ac_cv_path_PKG_CONFIG" fi if test "x$PKG_CONFIG" = "xno"; then as_fn_error $? " *** pkg-config not found" "$LINENO" 5 else pkgconfigdir=${libdir}/pkgconfig fi # Default enable/disable switches # Features enables_ldap=yes enables_mysql=no enables_systemd=no enables_swig_python=yes # Features directly related to components enables_cppunit=yes enables_python=yes enables_altpython=yes enables_pylint=yes enables_mock_dmc=no enables_gfal=no enables_s3=no enables_xrootd=yes enables_argus=no enables_xmlsec1=yes enables_dbjstore=yes enables_sqlitejstore=yes enables_ldns=yes # Libraries and plugins # Currently no fine-grained choice is supported. # Also this variable is used to check if source # build is needed at all because no component can # be built without HED. enables_hed=yes # Services enables_a_rex_service=yes enables_internal=no enables_gridftpd_service=yes enables_ldap_service=yes enables_candypond=yes enables_datadelivery_service=yes enables_monitor=yes # Clients enables_compute_client=yes enables_credentials_client=yes enables_data_client=yes enables_emies_client=yes enables_arcrest_client=yes # Documentation enables_doc=yes # ACIX cache index enables_acix=yes # Handle group enable/disable switches # Check whether --enable-all was given. if test "${enable_all+set}" = set; then : enableval=$enable_all; enables_a_rex_service=$enableval enables_internal=$enableval enables_gridftpd_service=$enableval enables_ldap_service=$enableval enables_monitor=$enableval enables_candypond=$enableval enables_datadelivery_service=$enableval enables_compute_client=$enableval enables_credentials_client=$enableval enables_echo_client=$enableval enables_data_client=$enableval enables_emies_client=$enableval enables_arcrest_client=$enableval enables_hed=$enableval enables_python=$enableval enables_altpython=$enableval enables_pylint=$enableval enables_mock_dmc=$enableval enables_gfal=$enableval enables_s3=$enableval enables_xrootd=$enableval enables_xmlsec1=$enableval enables_argus=$enableval enables_cppunit=$enableval enables_doc=$enableval enables_acix=$enableval enables_dbjstore=$enableval enables_ldns=$enableval enables_sqlitejstore=$enableval fi # Check whether --enable-all-clients was given. if test "${enable_all_clients+set}" = set; then : enableval=$enable_all_clients; enables_compute_client=$enableval enables_credentials_client=$enableval enables_echo_client=$enableval enables_data_client=$enableval enables_emies_client=$enableval enables_arcrest_client=$enableval enables_doc=$enableval fi # Check whether --enable-all-data-clients was given. if test "${enable_all_data_clients+set}" = set; then : enableval=$enable_all_data_clients; enables_data_client=$enableval fi # Check whether --enable-all-services was given. if test "${enable_all_services+set}" = set; then : enableval=$enable_all_services; enables_a_rex_service=$enableval enables_gridftpd_service=$enableval enables_ldap_service=$enableval enables_monitor=$enableval enables_candypond=$enableval enables_datadelivery_service=$enableval enables_acix=$enableval fi # Be pedantic about compiler warnings. # Check whether --enable-pedantic-compile was given. if test "${enable_pedantic_compile+set}" = set; then : enableval=$enable_pedantic_compile; enables_pedantic_compile="yes" else enables_pedantic_compile="no" fi if test "x$enables_pedantic_compile" = "xyes"; then # This check need to be enhanced. It won't work in case of cross-compilation # and if path to compiler is explicitly specified. if test x"$CXX" = x"g++"; then # GNU C/C++ flags AM_CXXFLAGS="-Wall -Wextra -Werror -Wno-sign-compare -Wno-unused" SAVE_CPPFLAGS=$CPPFLAGS ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu CPPFLAGS="$CPPFLAGS -Wno-unused-result" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : AM_CXXFLAGS="$AM_CXXFLAGS -Wno-unused-result" else { $as_echo "$as_me:${as_lineno-$LINENO}: compilation flag -Wno-unused-result is not supported" >&5 $as_echo "$as_me: compilation flag -Wno-unused-result is not supported" >&6;} fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu CPPFLAGS=$SAVE_CPPFLAGS else # TODO: set generic flags for generic compiler AM_CXXFLAGS="" fi fi if test "x$enables_pedantic_compile" = "xyes"; then PEDANTIC_COMPILE_TRUE= PEDANTIC_COMPILE_FALSE='#' else PEDANTIC_COMPILE_TRUE='#' PEDANTIC_COMPILE_FALSE= fi # Enable/disable switches for third-party. # Swig # Check whether --enable-swig-python was given. if test "${enable_swig_python+set}" = set; then : enableval=$enable_swig_python; enables_swig_python=$enableval fi # Check whether --enable-swig was given. if test "${enable_swig+set}" = set; then : enableval=$enable_swig; enables_swig_python=$enableval fi if test "$enables_swig_python" = "yes"; then for ac_prog in swig do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_SWIG+:} false; then : $as_echo_n "(cached) " >&6 else case $SWIG in [\\/]* | ?:[\\/]*) ac_cv_path_SWIG="$SWIG" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_SWIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi SWIG=$ac_cv_path_SWIG if test -n "$SWIG"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $SWIG" >&5 $as_echo "$SWIG" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$SWIG" && break done if test "x$SWIG" = "x"; then enables_swig="no" else swigver=`$SWIG -version 2>&1 | grep Version | sed 's/.* //'` swigver1=`echo $swigver | cut -d. -f1` swigver2=`echo $swigver | cut -d. -f2` swigver3=`echo $swigver | cut -d. -f3` if test $swigver1 -lt 1 || ( test $swigver1 -eq 1 && ( \ test $swigver2 -lt 3 || ( test $swigver2 -eq 3 && ( \ test $swigver3 -lt 25 ) ) ) ) ; then { $as_echo "$as_me:${as_lineno-$LINENO}: swig is too old (< 1.3.25)" >&5 $as_echo "$as_me: swig is too old (< 1.3.25)" >&6;} SWIG="" enables_swig="no" elif test $swigver1 -eq 1 && test $swigver2 -eq 3 && test $swigver3 -eq 38 ; then { $as_echo "$as_me:${as_lineno-$LINENO}: swig version 1.3.38 has bug which prevents it from being used for this software. Please upgrade or downgrade." >&5 $as_echo "$as_me: swig version 1.3.38 has bug which prevents it from being used for this software. Please upgrade or downgrade." >&6;} SWIG="" enables_swig="no" else SWIG2="no" if test $swigver1 -ge 2 then SWIG2="yes" fi SWIG_PYTHON_NAMING="SwigPy" # In SWIG version 1.3.37 naming was changed from "PySwig" to "SwigPy". if test $swigver1 -lt 1 || ( test $swigver1 -eq 1 && ( \ test $swigver2 -lt 3 || ( test $swigver2 -eq 3 && ( \ test $swigver3 -lt 37 ) ) ) ) ; then SWIG_PYTHON_NAMING="PySwig" fi fi fi else SWIG="" fi if test "x$enables_swig" = "xyes"; then SWIG_ENABLED_TRUE= SWIG_ENABLED_FALSE='#' else SWIG_ENABLED_TRUE='#' SWIG_ENABLED_FALSE= fi # Check whether --enable-hed was given. if test "${enable_hed+set}" = set; then : enableval=$enable_hed; enables_hed=$enableval fi # Python if test "$enables_hed" = "yes"; then # Check whether --enable-python was given. if test "${enable_python+set}" = set; then : enableval=$enable_python; enables_python=$enableval enables_swig_python=$enableval fi if test "$enables_python" = "yes"; then # Check whether --with-python was given. if test "${with_python+set}" = set; then : withval=$with_python; fi # We do not look for python binary when cross-compiling # but we need to make the variable non-empty if test "${build}" = "${host}"; then for ac_prog in $with_python python do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_PYTHON+:} false; then : $as_echo_n "(cached) " >&6 else case $PYTHON in [\\/]* | ?:[\\/]*) ac_cv_path_PYTHON="$PYTHON" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_PYTHON="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi PYTHON=$ac_cv_path_PYTHON if test -n "$PYTHON"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PYTHON" >&5 $as_echo "$PYTHON" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$PYTHON" && break done else PYTHON=/usr/bin/python fi if test "X$PYTHON" != "X"; then PYNAME=`basename $PYTHON` if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}pkg-config", so it can be a program name with args. set dummy ${ac_tool_prefix}pkg-config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_PKG_CONFIG+:} false; then : $as_echo_n "(cached) " >&6 else case $PKG_CONFIG in [\\/]* | ?:[\\/]*) ac_cv_path_PKG_CONFIG="$PKG_CONFIG" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi PKG_CONFIG=$ac_cv_path_PKG_CONFIG if test -n "$PKG_CONFIG"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PKG_CONFIG" >&5 $as_echo "$PKG_CONFIG" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_path_PKG_CONFIG"; then ac_pt_PKG_CONFIG=$PKG_CONFIG # Extract the first word of "pkg-config", so it can be a program name with args. set dummy pkg-config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_ac_pt_PKG_CONFIG+:} false; then : $as_echo_n "(cached) " >&6 else case $ac_pt_PKG_CONFIG in [\\/]* | ?:[\\/]*) ac_cv_path_ac_pt_PKG_CONFIG="$ac_pt_PKG_CONFIG" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_ac_pt_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi ac_pt_PKG_CONFIG=$ac_cv_path_ac_pt_PKG_CONFIG if test -n "$ac_pt_PKG_CONFIG"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_pt_PKG_CONFIG" >&5 $as_echo "$ac_pt_PKG_CONFIG" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_pt_PKG_CONFIG" = x; then PKG_CONFIG="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac PKG_CONFIG=$ac_pt_PKG_CONFIG fi else PKG_CONFIG="$ac_cv_path_PKG_CONFIG" fi fi if test -n "$PKG_CONFIG"; then _pkg_min_version=0.9.0 { $as_echo "$as_me:${as_lineno-$LINENO}: checking pkg-config is at least version $_pkg_min_version" >&5 $as_echo_n "checking pkg-config is at least version $_pkg_min_version... " >&6; } if $PKG_CONFIG --atleast-pkgconfig-version $_pkg_min_version; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } PKG_CONFIG="" fi fi pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for PYTHON" >&5 $as_echo_n "checking for PYTHON... " >&6; } if test -n "$PYTHON_CFLAGS"; then pkg_cv_PYTHON_CFLAGS="$PYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_CFLAGS=`$PKG_CONFIG --cflags "$PYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$PYTHON_LIBS"; then pkg_cv_PYTHON_LIBS="$PYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_LIBS=`$PKG_CONFIG --libs "$PYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then PYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$PYNAME-embed" 2>&1` else PYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$PYNAME-embed" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$PYTHON_PKG_ERRORS" >&5 pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for PYTHON" >&5 $as_echo_n "checking for PYTHON... " >&6; } if test -n "$PYTHON_CFLAGS"; then pkg_cv_PYTHON_CFLAGS="$PYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_CFLAGS=`$PKG_CONFIG --cflags "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$PYTHON_LIBS"; then pkg_cv_PYTHON_LIBS="$PYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_LIBS=`$PKG_CONFIG --libs "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then PYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$PYNAME" 2>&1` else PYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$PYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$PYTHON_PKG_ERRORS" >&5 PYNAME=python-`$PYTHON -c 'import sys; print(sys.version[:3])'` pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for PYTHON" >&5 $as_echo_n "checking for PYTHON... " >&6; } if test -n "$PYTHON_CFLAGS"; then pkg_cv_PYTHON_CFLAGS="$PYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_CFLAGS=`$PKG_CONFIG --cflags "$PYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$PYTHON_LIBS"; then pkg_cv_PYTHON_LIBS="$PYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_LIBS=`$PKG_CONFIG --libs "$PYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then PYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$PYNAME-embed" 2>&1` else PYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$PYNAME-embed" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$PYTHON_PKG_ERRORS" >&5 pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for PYTHON" >&5 $as_echo_n "checking for PYTHON... " >&6; } if test -n "$PYTHON_CFLAGS"; then pkg_cv_PYTHON_CFLAGS="$PYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_CFLAGS=`$PKG_CONFIG --cflags "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$PYTHON_LIBS"; then pkg_cv_PYTHON_LIBS="$PYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_LIBS=`$PKG_CONFIG --libs "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then PYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$PYNAME" 2>&1` else PYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$PYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$PYTHON_PKG_ERRORS" >&5 PYTHON_VERSION=`$PYTHON -c 'import sys; print(sys.version[:3])'` PYTHON_MAJOR=`$PYTHON -c 'import sys; print(sys.version_info[0])'` PYTHON_CFLAGS=-I`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` PY_LIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` PY_SYSLIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` PY_LIBDEST=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` PYTHON_LIBS="$PY_LIBS $PY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$PYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} PYTHON_LIBS="-lpython$PYTHON_VERSION $PYTHON_LIBS" else LDFLAGS="-L$PY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} PYTHON_LIBS="-L$PY_LIBDEST/config -lpython$PYTHON_VERSION $PYTHON_LIBS" else PYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } PYTHON_VERSION=`$PYTHON -c 'import sys; print(sys.version[:3])'` PYTHON_MAJOR=`$PYTHON -c 'import sys; print(sys.version_info[0])'` PYTHON_CFLAGS=-I`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` PY_LIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` PY_SYSLIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` PY_LIBDEST=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` PYTHON_LIBS="$PY_LIBS $PY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$PYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} PYTHON_LIBS="-lpython$PYTHON_VERSION $PYTHON_LIBS" else LDFLAGS="-L$PY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} PYTHON_LIBS="-L$PY_LIBDEST/config -lpython$PYTHON_VERSION $PYTHON_LIBS" else PYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS else PYTHON_CFLAGS=$pkg_cv_PYTHON_CFLAGS PYTHON_LIBS=$pkg_cv_PYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } PYTHON_VERSION=`$PKG_CONFIG --modversion $PYNAME` PYTHON_MAJOR=`echo $PYTHON_VERSION|cut -f1 -d.` fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for PYTHON" >&5 $as_echo_n "checking for PYTHON... " >&6; } if test -n "$PYTHON_CFLAGS"; then pkg_cv_PYTHON_CFLAGS="$PYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_CFLAGS=`$PKG_CONFIG --cflags "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$PYTHON_LIBS"; then pkg_cv_PYTHON_LIBS="$PYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_LIBS=`$PKG_CONFIG --libs "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then PYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$PYNAME" 2>&1` else PYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$PYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$PYTHON_PKG_ERRORS" >&5 PYTHON_VERSION=`$PYTHON -c 'import sys; print(sys.version[:3])'` PYTHON_MAJOR=`$PYTHON -c 'import sys; print(sys.version_info[0])'` PYTHON_CFLAGS=-I`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` PY_LIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` PY_SYSLIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` PY_LIBDEST=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` PYTHON_LIBS="$PY_LIBS $PY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$PYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} PYTHON_LIBS="-lpython$PYTHON_VERSION $PYTHON_LIBS" else LDFLAGS="-L$PY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} PYTHON_LIBS="-L$PY_LIBDEST/config -lpython$PYTHON_VERSION $PYTHON_LIBS" else PYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } PYTHON_VERSION=`$PYTHON -c 'import sys; print(sys.version[:3])'` PYTHON_MAJOR=`$PYTHON -c 'import sys; print(sys.version_info[0])'` PYTHON_CFLAGS=-I`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` PY_LIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` PY_SYSLIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` PY_LIBDEST=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` PYTHON_LIBS="$PY_LIBS $PY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$PYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} PYTHON_LIBS="-lpython$PYTHON_VERSION $PYTHON_LIBS" else LDFLAGS="-L$PY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} PYTHON_LIBS="-L$PY_LIBDEST/config -lpython$PYTHON_VERSION $PYTHON_LIBS" else PYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS else PYTHON_CFLAGS=$pkg_cv_PYTHON_CFLAGS PYTHON_LIBS=$pkg_cv_PYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } PYTHON_VERSION=`$PKG_CONFIG --modversion $PYNAME` PYTHON_MAJOR=`echo $PYTHON_VERSION|cut -f1 -d.` fi else PYTHON_CFLAGS=$pkg_cv_PYTHON_CFLAGS PYTHON_LIBS=$pkg_cv_PYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } PYTHON_VERSION=`$PKG_CONFIG --modversion $PYNAME-embed` PYTHON_MAJOR=`echo $PYTHON_VERSION|cut -f1 -d.` fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } PYNAME=python-`$PYTHON -c 'import sys; print(sys.version[:3])'` pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for PYTHON" >&5 $as_echo_n "checking for PYTHON... " >&6; } if test -n "$PYTHON_CFLAGS"; then pkg_cv_PYTHON_CFLAGS="$PYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_CFLAGS=`$PKG_CONFIG --cflags "$PYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$PYTHON_LIBS"; then pkg_cv_PYTHON_LIBS="$PYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_LIBS=`$PKG_CONFIG --libs "$PYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then PYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$PYNAME-embed" 2>&1` else PYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$PYNAME-embed" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$PYTHON_PKG_ERRORS" >&5 pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for PYTHON" >&5 $as_echo_n "checking for PYTHON... " >&6; } if test -n "$PYTHON_CFLAGS"; then pkg_cv_PYTHON_CFLAGS="$PYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_CFLAGS=`$PKG_CONFIG --cflags "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$PYTHON_LIBS"; then pkg_cv_PYTHON_LIBS="$PYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_LIBS=`$PKG_CONFIG --libs "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then PYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$PYNAME" 2>&1` else PYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$PYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$PYTHON_PKG_ERRORS" >&5 PYTHON_VERSION=`$PYTHON -c 'import sys; print(sys.version[:3])'` PYTHON_MAJOR=`$PYTHON -c 'import sys; print(sys.version_info[0])'` PYTHON_CFLAGS=-I`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` PY_LIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` PY_SYSLIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` PY_LIBDEST=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` PYTHON_LIBS="$PY_LIBS $PY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$PYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} PYTHON_LIBS="-lpython$PYTHON_VERSION $PYTHON_LIBS" else LDFLAGS="-L$PY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} PYTHON_LIBS="-L$PY_LIBDEST/config -lpython$PYTHON_VERSION $PYTHON_LIBS" else PYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } PYTHON_VERSION=`$PYTHON -c 'import sys; print(sys.version[:3])'` PYTHON_MAJOR=`$PYTHON -c 'import sys; print(sys.version_info[0])'` PYTHON_CFLAGS=-I`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` PY_LIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` PY_SYSLIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` PY_LIBDEST=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` PYTHON_LIBS="$PY_LIBS $PY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$PYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} PYTHON_LIBS="-lpython$PYTHON_VERSION $PYTHON_LIBS" else LDFLAGS="-L$PY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} PYTHON_LIBS="-L$PY_LIBDEST/config -lpython$PYTHON_VERSION $PYTHON_LIBS" else PYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS else PYTHON_CFLAGS=$pkg_cv_PYTHON_CFLAGS PYTHON_LIBS=$pkg_cv_PYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } PYTHON_VERSION=`$PKG_CONFIG --modversion $PYNAME` PYTHON_MAJOR=`echo $PYTHON_VERSION|cut -f1 -d.` fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for PYTHON" >&5 $as_echo_n "checking for PYTHON... " >&6; } if test -n "$PYTHON_CFLAGS"; then pkg_cv_PYTHON_CFLAGS="$PYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_CFLAGS=`$PKG_CONFIG --cflags "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$PYTHON_LIBS"; then pkg_cv_PYTHON_LIBS="$PYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_LIBS=`$PKG_CONFIG --libs "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then PYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$PYNAME" 2>&1` else PYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$PYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$PYTHON_PKG_ERRORS" >&5 PYTHON_VERSION=`$PYTHON -c 'import sys; print(sys.version[:3])'` PYTHON_MAJOR=`$PYTHON -c 'import sys; print(sys.version_info[0])'` PYTHON_CFLAGS=-I`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` PY_LIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` PY_SYSLIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` PY_LIBDEST=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` PYTHON_LIBS="$PY_LIBS $PY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$PYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} PYTHON_LIBS="-lpython$PYTHON_VERSION $PYTHON_LIBS" else LDFLAGS="-L$PY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} PYTHON_LIBS="-L$PY_LIBDEST/config -lpython$PYTHON_VERSION $PYTHON_LIBS" else PYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } PYTHON_VERSION=`$PYTHON -c 'import sys; print(sys.version[:3])'` PYTHON_MAJOR=`$PYTHON -c 'import sys; print(sys.version_info[0])'` PYTHON_CFLAGS=-I`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` PY_LIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` PY_SYSLIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` PY_LIBDEST=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` PYTHON_LIBS="$PY_LIBS $PY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$PYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} PYTHON_LIBS="-lpython$PYTHON_VERSION $PYTHON_LIBS" else LDFLAGS="-L$PY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} PYTHON_LIBS="-L$PY_LIBDEST/config -lpython$PYTHON_VERSION $PYTHON_LIBS" else PYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS else PYTHON_CFLAGS=$pkg_cv_PYTHON_CFLAGS PYTHON_LIBS=$pkg_cv_PYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } PYTHON_VERSION=`$PKG_CONFIG --modversion $PYNAME` PYTHON_MAJOR=`echo $PYTHON_VERSION|cut -f1 -d.` fi else PYTHON_CFLAGS=$pkg_cv_PYTHON_CFLAGS PYTHON_LIBS=$pkg_cv_PYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } PYTHON_VERSION=`$PKG_CONFIG --modversion $PYNAME-embed` PYTHON_MAJOR=`echo $PYTHON_VERSION|cut -f1 -d.` fi else PYTHON_CFLAGS=$pkg_cv_PYTHON_CFLAGS PYTHON_LIBS=$pkg_cv_PYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } PYTHON_VERSION=`$PKG_CONFIG --modversion $PYNAME` PYTHON_MAJOR=`echo $PYTHON_VERSION|cut -f1 -d.` fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for PYTHON" >&5 $as_echo_n "checking for PYTHON... " >&6; } if test -n "$PYTHON_CFLAGS"; then pkg_cv_PYTHON_CFLAGS="$PYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_CFLAGS=`$PKG_CONFIG --cflags "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$PYTHON_LIBS"; then pkg_cv_PYTHON_LIBS="$PYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_LIBS=`$PKG_CONFIG --libs "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then PYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$PYNAME" 2>&1` else PYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$PYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$PYTHON_PKG_ERRORS" >&5 PYNAME=python-`$PYTHON -c 'import sys; print(sys.version[:3])'` pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for PYTHON" >&5 $as_echo_n "checking for PYTHON... " >&6; } if test -n "$PYTHON_CFLAGS"; then pkg_cv_PYTHON_CFLAGS="$PYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_CFLAGS=`$PKG_CONFIG --cflags "$PYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$PYTHON_LIBS"; then pkg_cv_PYTHON_LIBS="$PYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_LIBS=`$PKG_CONFIG --libs "$PYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then PYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$PYNAME-embed" 2>&1` else PYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$PYNAME-embed" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$PYTHON_PKG_ERRORS" >&5 pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for PYTHON" >&5 $as_echo_n "checking for PYTHON... " >&6; } if test -n "$PYTHON_CFLAGS"; then pkg_cv_PYTHON_CFLAGS="$PYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_CFLAGS=`$PKG_CONFIG --cflags "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$PYTHON_LIBS"; then pkg_cv_PYTHON_LIBS="$PYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_LIBS=`$PKG_CONFIG --libs "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then PYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$PYNAME" 2>&1` else PYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$PYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$PYTHON_PKG_ERRORS" >&5 PYTHON_VERSION=`$PYTHON -c 'import sys; print(sys.version[:3])'` PYTHON_MAJOR=`$PYTHON -c 'import sys; print(sys.version_info[0])'` PYTHON_CFLAGS=-I`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` PY_LIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` PY_SYSLIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` PY_LIBDEST=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` PYTHON_LIBS="$PY_LIBS $PY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$PYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} PYTHON_LIBS="-lpython$PYTHON_VERSION $PYTHON_LIBS" else LDFLAGS="-L$PY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} PYTHON_LIBS="-L$PY_LIBDEST/config -lpython$PYTHON_VERSION $PYTHON_LIBS" else PYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } PYTHON_VERSION=`$PYTHON -c 'import sys; print(sys.version[:3])'` PYTHON_MAJOR=`$PYTHON -c 'import sys; print(sys.version_info[0])'` PYTHON_CFLAGS=-I`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` PY_LIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` PY_SYSLIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` PY_LIBDEST=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` PYTHON_LIBS="$PY_LIBS $PY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$PYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} PYTHON_LIBS="-lpython$PYTHON_VERSION $PYTHON_LIBS" else LDFLAGS="-L$PY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} PYTHON_LIBS="-L$PY_LIBDEST/config -lpython$PYTHON_VERSION $PYTHON_LIBS" else PYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS else PYTHON_CFLAGS=$pkg_cv_PYTHON_CFLAGS PYTHON_LIBS=$pkg_cv_PYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } PYTHON_VERSION=`$PKG_CONFIG --modversion $PYNAME` PYTHON_MAJOR=`echo $PYTHON_VERSION|cut -f1 -d.` fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for PYTHON" >&5 $as_echo_n "checking for PYTHON... " >&6; } if test -n "$PYTHON_CFLAGS"; then pkg_cv_PYTHON_CFLAGS="$PYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_CFLAGS=`$PKG_CONFIG --cflags "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$PYTHON_LIBS"; then pkg_cv_PYTHON_LIBS="$PYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_LIBS=`$PKG_CONFIG --libs "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then PYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$PYNAME" 2>&1` else PYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$PYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$PYTHON_PKG_ERRORS" >&5 PYTHON_VERSION=`$PYTHON -c 'import sys; print(sys.version[:3])'` PYTHON_MAJOR=`$PYTHON -c 'import sys; print(sys.version_info[0])'` PYTHON_CFLAGS=-I`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` PY_LIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` PY_SYSLIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` PY_LIBDEST=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` PYTHON_LIBS="$PY_LIBS $PY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$PYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} PYTHON_LIBS="-lpython$PYTHON_VERSION $PYTHON_LIBS" else LDFLAGS="-L$PY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} PYTHON_LIBS="-L$PY_LIBDEST/config -lpython$PYTHON_VERSION $PYTHON_LIBS" else PYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } PYTHON_VERSION=`$PYTHON -c 'import sys; print(sys.version[:3])'` PYTHON_MAJOR=`$PYTHON -c 'import sys; print(sys.version_info[0])'` PYTHON_CFLAGS=-I`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` PY_LIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` PY_SYSLIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` PY_LIBDEST=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` PYTHON_LIBS="$PY_LIBS $PY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$PYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} PYTHON_LIBS="-lpython$PYTHON_VERSION $PYTHON_LIBS" else LDFLAGS="-L$PY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} PYTHON_LIBS="-L$PY_LIBDEST/config -lpython$PYTHON_VERSION $PYTHON_LIBS" else PYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS else PYTHON_CFLAGS=$pkg_cv_PYTHON_CFLAGS PYTHON_LIBS=$pkg_cv_PYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } PYTHON_VERSION=`$PKG_CONFIG --modversion $PYNAME` PYTHON_MAJOR=`echo $PYTHON_VERSION|cut -f1 -d.` fi else PYTHON_CFLAGS=$pkg_cv_PYTHON_CFLAGS PYTHON_LIBS=$pkg_cv_PYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } PYTHON_VERSION=`$PKG_CONFIG --modversion $PYNAME-embed` PYTHON_MAJOR=`echo $PYTHON_VERSION|cut -f1 -d.` fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } PYNAME=python-`$PYTHON -c 'import sys; print(sys.version[:3])'` pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for PYTHON" >&5 $as_echo_n "checking for PYTHON... " >&6; } if test -n "$PYTHON_CFLAGS"; then pkg_cv_PYTHON_CFLAGS="$PYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_CFLAGS=`$PKG_CONFIG --cflags "$PYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$PYTHON_LIBS"; then pkg_cv_PYTHON_LIBS="$PYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_LIBS=`$PKG_CONFIG --libs "$PYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then PYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$PYNAME-embed" 2>&1` else PYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$PYNAME-embed" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$PYTHON_PKG_ERRORS" >&5 pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for PYTHON" >&5 $as_echo_n "checking for PYTHON... " >&6; } if test -n "$PYTHON_CFLAGS"; then pkg_cv_PYTHON_CFLAGS="$PYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_CFLAGS=`$PKG_CONFIG --cflags "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$PYTHON_LIBS"; then pkg_cv_PYTHON_LIBS="$PYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_LIBS=`$PKG_CONFIG --libs "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then PYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$PYNAME" 2>&1` else PYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$PYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$PYTHON_PKG_ERRORS" >&5 PYTHON_VERSION=`$PYTHON -c 'import sys; print(sys.version[:3])'` PYTHON_MAJOR=`$PYTHON -c 'import sys; print(sys.version_info[0])'` PYTHON_CFLAGS=-I`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` PY_LIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` PY_SYSLIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` PY_LIBDEST=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` PYTHON_LIBS="$PY_LIBS $PY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$PYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} PYTHON_LIBS="-lpython$PYTHON_VERSION $PYTHON_LIBS" else LDFLAGS="-L$PY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} PYTHON_LIBS="-L$PY_LIBDEST/config -lpython$PYTHON_VERSION $PYTHON_LIBS" else PYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } PYTHON_VERSION=`$PYTHON -c 'import sys; print(sys.version[:3])'` PYTHON_MAJOR=`$PYTHON -c 'import sys; print(sys.version_info[0])'` PYTHON_CFLAGS=-I`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` PY_LIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` PY_SYSLIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` PY_LIBDEST=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` PYTHON_LIBS="$PY_LIBS $PY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$PYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} PYTHON_LIBS="-lpython$PYTHON_VERSION $PYTHON_LIBS" else LDFLAGS="-L$PY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} PYTHON_LIBS="-L$PY_LIBDEST/config -lpython$PYTHON_VERSION $PYTHON_LIBS" else PYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS else PYTHON_CFLAGS=$pkg_cv_PYTHON_CFLAGS PYTHON_LIBS=$pkg_cv_PYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } PYTHON_VERSION=`$PKG_CONFIG --modversion $PYNAME` PYTHON_MAJOR=`echo $PYTHON_VERSION|cut -f1 -d.` fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for PYTHON" >&5 $as_echo_n "checking for PYTHON... " >&6; } if test -n "$PYTHON_CFLAGS"; then pkg_cv_PYTHON_CFLAGS="$PYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_CFLAGS=`$PKG_CONFIG --cflags "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$PYTHON_LIBS"; then pkg_cv_PYTHON_LIBS="$PYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$PYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$PYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_PYTHON_LIBS=`$PKG_CONFIG --libs "$PYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then PYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$PYNAME" 2>&1` else PYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$PYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$PYTHON_PKG_ERRORS" >&5 PYTHON_VERSION=`$PYTHON -c 'import sys; print(sys.version[:3])'` PYTHON_MAJOR=`$PYTHON -c 'import sys; print(sys.version_info[0])'` PYTHON_CFLAGS=-I`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` PY_LIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` PY_SYSLIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` PY_LIBDEST=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` PYTHON_LIBS="$PY_LIBS $PY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$PYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} PYTHON_LIBS="-lpython$PYTHON_VERSION $PYTHON_LIBS" else LDFLAGS="-L$PY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} PYTHON_LIBS="-L$PY_LIBDEST/config -lpython$PYTHON_VERSION $PYTHON_LIBS" else PYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } PYTHON_VERSION=`$PYTHON -c 'import sys; print(sys.version[:3])'` PYTHON_MAJOR=`$PYTHON -c 'import sys; print(sys.version_info[0])'` PYTHON_CFLAGS=-I`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` PY_LIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` PY_SYSLIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` PY_LIBDEST=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` PYTHON_LIBS="$PY_LIBS $PY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$PYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} PYTHON_LIBS="-lpython$PYTHON_VERSION $PYTHON_LIBS" else LDFLAGS="-L$PY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$PYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$PYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$PYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$PYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} PYTHON_LIBS="-L$PY_LIBDEST/config -lpython$PYTHON_VERSION $PYTHON_LIBS" else PYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS else PYTHON_CFLAGS=$pkg_cv_PYTHON_CFLAGS PYTHON_LIBS=$pkg_cv_PYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } PYTHON_VERSION=`$PKG_CONFIG --modversion $PYNAME` PYTHON_MAJOR=`echo $PYTHON_VERSION|cut -f1 -d.` fi else PYTHON_CFLAGS=$pkg_cv_PYTHON_CFLAGS PYTHON_LIBS=$pkg_cv_PYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } PYTHON_VERSION=`$PKG_CONFIG --modversion $PYNAME-embed` PYTHON_MAJOR=`echo $PYTHON_VERSION|cut -f1 -d.` fi else PYTHON_CFLAGS=$pkg_cv_PYTHON_CFLAGS PYTHON_LIBS=$pkg_cv_PYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } PYTHON_VERSION=`$PKG_CONFIG --modversion $PYNAME` PYTHON_MAJOR=`echo $PYTHON_VERSION|cut -f1 -d.` fi else PYTHON_CFLAGS=$pkg_cv_PYTHON_CFLAGS PYTHON_LIBS=$pkg_cv_PYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } PYTHON_VERSION=`$PKG_CONFIG --modversion $PYNAME-embed` PYTHON_MAJOR=`echo $PYTHON_VERSION|cut -f1 -d.` fi if test "${build}" = "${host}"; then PYTHON_EXT_SUFFIX=`$PYTHON -c "from distutils import sysconfig; v = sysconfig.get_config_vars(); print(v.get('EXT_SUFFIX', v.get('SO')))" | sed s/None//` else PYTHON_EXT_SUFFIX="" fi # Check whether --with-python-site-arch was given. if test "${with_python_site_arch+set}" = set; then : withval=$with_python_site_arch; fi if test "X$PYTHON_SITE_ARCH" = "X"; then if test "${build}" = "${host}"; then PYTHON_SITE_ARCH=`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_lib(1,0,"${prefix}"))'` else PYTHON_SITE_ARCH="${libdir}/python${PYTHON_VERSION}/site-packages" fi fi # Check whether --with-python-site-lib was given. if test "${with_python_site_lib+set}" = set; then : withval=$with_python_site_lib; fi if test "X$PYTHON_SITE_LIB" = "X"; then if test "${build}" = "${host}"; then PYTHON_SITE_LIB=`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_lib(0,0,"${prefix}"))'` else PYTHON_SITE_LIB="${libdir}/python${PYTHON_VERSION}/site-packages" fi fi SAVE_LDFLAGS=$LDFLAGS SAVE_CPPFLAGS=$CPPFLAGS LDFLAGS="$LDFLAGS $PYTHON_LIBS" CPPFLAGS="$CPPFLAGS $PYTHON_CFLAGS" ac_fn_c_check_header_mongrel "$LINENO" "Python.h" "ac_cv_header_Python_h" "$ac_includes_default" if test "x$ac_cv_header_Python_h" = xyes; then : pythonh="yes" else pythonh="no" fi cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { Py_InitializeEx(0) ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Python includes functionality of skipping initialization registration of signal handlers" >&5 $as_echo "$as_me: Python includes functionality of skipping initialization registration of signal handlers" >&6;} $as_echo "#define HAVE_PYTHON_INITIALIZE_EX 1" >>confdefs.h enables_python_service="yes" else { $as_echo "$as_me:${as_lineno-$LINENO}: Python does not include functionality of skipping initialization registration of signal handlers, since its version is below 2.4" >&5 $as_echo "$as_me: Python does not include functionality of skipping initialization registration of signal handlers, since its version is below 2.4" >&6;} enables_python_service="no" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext LDFLAGS=$SAVE_LDFLAGS CPPFLAGS=$SAVE_CPPFLAGS fi if test "X$PYTHON" = "X"; then { $as_echo "$as_me:${as_lineno-$LINENO}: Missing Python - skipping Python components" >&5 $as_echo "$as_me: Missing Python - skipping Python components" >&6;} enables_python=no elif test "X$PYTHON_SITE_ARCH" = "X" || test "X$PYTHON_SITE_LIB" = "X"; then { $as_echo "$as_me:${as_lineno-$LINENO}: Missing python site packages location - skipping Python components" >&5 $as_echo "$as_me: Missing python site packages location - skipping Python components" >&6;} enables_python=no else { $as_echo "$as_me:${as_lineno-$LINENO}: Python available: $PYTHON_VERSION" >&5 $as_echo "$as_me: Python available: $PYTHON_VERSION" >&6;} fi if test "x$enables_python" != "xyes"; then { $as_echo "$as_me:${as_lineno-$LINENO}: Missing Python - skipping Python bindings" >&5 $as_echo "$as_me: Missing Python - skipping Python bindings" >&6;} enables_swig_python=no elif test "X$PYTHON_LIBS" = "X"; then { $as_echo "$as_me:${as_lineno-$LINENO}: Missing Python library - skipping Python bindings" >&5 $as_echo "$as_me: Missing Python library - skipping Python bindings" >&6;} enables_swig_python=no elif test "X$pythonh" != "Xyes"; then { $as_echo "$as_me:${as_lineno-$LINENO}: Missing Python header - skipping Python bindings" >&5 $as_echo "$as_me: Missing Python header - skipping Python bindings" >&6;} enables_swig_python=no elif ! test -f python/arc_wrap.cpp && test "x$enables_swig_python" != "xyes"; then { $as_echo "$as_me:${as_lineno-$LINENO}: Missing pre-compiled Python wrapper and SWIG - skipping Python bindings" >&5 $as_echo "$as_me: Missing pre-compiled Python wrapper and SWIG - skipping Python bindings" >&6;} enables_swig_python=no fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: Python enabled: $enables_python" >&5 $as_echo "$as_me: Python enabled: $enables_python" >&6;} { $as_echo "$as_me:${as_lineno-$LINENO}: Python SWIG bindings enabled: $enables_swig_python" >&5 $as_echo "$as_me: Python SWIG bindings enabled: $enables_swig_python" >&6;} if test "x$enables_python" = "xyes"; then PYTHON_ENABLED_TRUE= PYTHON_ENABLED_FALSE='#' else PYTHON_ENABLED_TRUE='#' PYTHON_ENABLED_FALSE= fi if test "x$enables_python" = "xyes" && test "x$PYTHON_MAJOR" = "x3"; then PYTHON3_TRUE= PYTHON3_FALSE='#' else PYTHON3_TRUE='#' PYTHON3_FALSE= fi if test "x$enables_swig_python" = "xyes"; then PYTHON_SWIG_ENABLED_TRUE= PYTHON_SWIG_ENABLED_FALSE='#' else PYTHON_SWIG_ENABLED_TRUE='#' PYTHON_SWIG_ENABLED_FALSE= fi if test "x$enables_swig_python" = "xyes" && test "x$enables_python_service" = "xyes"; then PYTHON_SERVICE_TRUE= PYTHON_SERVICE_FALSE='#' else PYTHON_SERVICE_TRUE='#' PYTHON_SERVICE_FALSE= fi # Alternative Python if test "$enables_hed" = "yes"; then # Check whether --enable-altpython was given. if test "${enable_altpython+set}" = set; then : enableval=$enable_altpython; enables_altpython=$enableval fi if test "$enables_altpython" = "yes"; then # Check whether --with-altpython was given. if test "${with_altpython+set}" = set; then : withval=$with_altpython; fi for ac_prog in $with_altpython do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_ALTPYTHON+:} false; then : $as_echo_n "(cached) " >&6 else case $ALTPYTHON in [\\/]* | ?:[\\/]*) ac_cv_path_ALTPYTHON="$ALTPYTHON" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_ALTPYTHON="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi ALTPYTHON=$ac_cv_path_ALTPYTHON if test -n "$ALTPYTHON"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ALTPYTHON" >&5 $as_echo "$ALTPYTHON" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ALTPYTHON" && break done if test "X$ALTPYTHON" != "X"; then ALTPYNAME=`basename $ALTPYTHON` pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ALTPYTHON" >&5 $as_echo_n "checking for ALTPYTHON... " >&6; } if test -n "$ALTPYTHON_CFLAGS"; then pkg_cv_ALTPYTHON_CFLAGS="$ALTPYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_CFLAGS=`$PKG_CONFIG --cflags "$ALTPYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$ALTPYTHON_LIBS"; then pkg_cv_ALTPYTHON_LIBS="$ALTPYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_LIBS=`$PKG_CONFIG --libs "$ALTPYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$ALTPYNAME-embed" 2>&1` else ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$ALTPYNAME-embed" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$ALTPYTHON_PKG_ERRORS" >&5 pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ALTPYTHON" >&5 $as_echo_n "checking for ALTPYTHON... " >&6; } if test -n "$ALTPYTHON_CFLAGS"; then pkg_cv_ALTPYTHON_CFLAGS="$ALTPYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_CFLAGS=`$PKG_CONFIG --cflags "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$ALTPYTHON_LIBS"; then pkg_cv_ALTPYTHON_LIBS="$ALTPYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_LIBS=`$PKG_CONFIG --libs "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$ALTPYNAME" 2>&1` else ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$ALTPYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$ALTPYTHON_PKG_ERRORS" >&5 ALTPYNAME=python-`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ALTPYTHON" >&5 $as_echo_n "checking for ALTPYTHON... " >&6; } if test -n "$ALTPYTHON_CFLAGS"; then pkg_cv_ALTPYTHON_CFLAGS="$ALTPYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_CFLAGS=`$PKG_CONFIG --cflags "$ALTPYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$ALTPYTHON_LIBS"; then pkg_cv_ALTPYTHON_LIBS="$ALTPYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_LIBS=`$PKG_CONFIG --libs "$ALTPYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$ALTPYNAME-embed" 2>&1` else ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$ALTPYNAME-embed" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$ALTPYTHON_PKG_ERRORS" >&5 pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ALTPYTHON" >&5 $as_echo_n "checking for ALTPYTHON... " >&6; } if test -n "$ALTPYTHON_CFLAGS"; then pkg_cv_ALTPYTHON_CFLAGS="$ALTPYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_CFLAGS=`$PKG_CONFIG --cflags "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$ALTPYTHON_LIBS"; then pkg_cv_ALTPYTHON_LIBS="$ALTPYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_LIBS=`$PKG_CONFIG --libs "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$ALTPYNAME" 2>&1` else ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$ALTPYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$ALTPYTHON_PKG_ERRORS" >&5 ALTPYTHON_VERSION=`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` ALTPYTHON_MAJOR=`$ALTPYTHON -c 'import sys; print(sys.version_info[0])'` ALTPYTHON_CFLAGS=-I`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` ALTPY_LIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` ALTPY_SYSLIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` ALTPY_LIBDEST=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` ALTPYTHON_LIBS="$ALTPY_LIBS $ALTPY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$ALTPYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} ALTPYTHON_LIBS="-lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else LDFLAGS="-L$ALTPY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} ALTPYTHON_LIBS="-L$ALTPY_LIBDEST/config -lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else ALTPYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } ALTPYTHON_VERSION=`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` ALTPYTHON_MAJOR=`$ALTPYTHON -c 'import sys; print(sys.version_info[0])'` ALTPYTHON_CFLAGS=-I`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` ALTPY_LIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` ALTPY_SYSLIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` ALTPY_LIBDEST=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` ALTPYTHON_LIBS="$ALTPY_LIBS $ALTPY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$ALTPYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} ALTPYTHON_LIBS="-lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else LDFLAGS="-L$ALTPY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} ALTPYTHON_LIBS="-L$ALTPY_LIBDEST/config -lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else ALTPYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS else ALTPYTHON_CFLAGS=$pkg_cv_ALTPYTHON_CFLAGS ALTPYTHON_LIBS=$pkg_cv_ALTPYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } ALTPYTHON_VERSION=`$PKG_CONFIG --modversion $ALTPYNAME` ALTPYTHON_MAJOR=`echo $ALTPYTHON_VERSION|cut -f1 -d.` fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ALTPYTHON" >&5 $as_echo_n "checking for ALTPYTHON... " >&6; } if test -n "$ALTPYTHON_CFLAGS"; then pkg_cv_ALTPYTHON_CFLAGS="$ALTPYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_CFLAGS=`$PKG_CONFIG --cflags "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$ALTPYTHON_LIBS"; then pkg_cv_ALTPYTHON_LIBS="$ALTPYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_LIBS=`$PKG_CONFIG --libs "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$ALTPYNAME" 2>&1` else ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$ALTPYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$ALTPYTHON_PKG_ERRORS" >&5 ALTPYTHON_VERSION=`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` ALTPYTHON_MAJOR=`$ALTPYTHON -c 'import sys; print(sys.version_info[0])'` ALTPYTHON_CFLAGS=-I`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` ALTPY_LIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` ALTPY_SYSLIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` ALTPY_LIBDEST=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` ALTPYTHON_LIBS="$ALTPY_LIBS $ALTPY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$ALTPYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} ALTPYTHON_LIBS="-lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else LDFLAGS="-L$ALTPY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} ALTPYTHON_LIBS="-L$ALTPY_LIBDEST/config -lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else ALTPYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } ALTPYTHON_VERSION=`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` ALTPYTHON_MAJOR=`$ALTPYTHON -c 'import sys; print(sys.version_info[0])'` ALTPYTHON_CFLAGS=-I`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` ALTPY_LIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` ALTPY_SYSLIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` ALTPY_LIBDEST=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` ALTPYTHON_LIBS="$ALTPY_LIBS $ALTPY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$ALTPYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} ALTPYTHON_LIBS="-lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else LDFLAGS="-L$ALTPY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} ALTPYTHON_LIBS="-L$ALTPY_LIBDEST/config -lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else ALTPYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS else ALTPYTHON_CFLAGS=$pkg_cv_ALTPYTHON_CFLAGS ALTPYTHON_LIBS=$pkg_cv_ALTPYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } ALTPYTHON_VERSION=`$PKG_CONFIG --modversion $ALTPYNAME` ALTPYTHON_MAJOR=`echo $ALTPYTHON_VERSION|cut -f1 -d.` fi else ALTPYTHON_CFLAGS=$pkg_cv_ALTPYTHON_CFLAGS ALTPYTHON_LIBS=$pkg_cv_ALTPYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } ALTPYTHON_VERSION=`$PKG_CONFIG --modversion $ALTPYNAME-embed` ALTPYTHON_MAJOR=`echo $ALTPYTHON_VERSION|cut -f1 -d.` fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } ALTPYNAME=python-`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ALTPYTHON" >&5 $as_echo_n "checking for ALTPYTHON... " >&6; } if test -n "$ALTPYTHON_CFLAGS"; then pkg_cv_ALTPYTHON_CFLAGS="$ALTPYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_CFLAGS=`$PKG_CONFIG --cflags "$ALTPYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$ALTPYTHON_LIBS"; then pkg_cv_ALTPYTHON_LIBS="$ALTPYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_LIBS=`$PKG_CONFIG --libs "$ALTPYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$ALTPYNAME-embed" 2>&1` else ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$ALTPYNAME-embed" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$ALTPYTHON_PKG_ERRORS" >&5 pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ALTPYTHON" >&5 $as_echo_n "checking for ALTPYTHON... " >&6; } if test -n "$ALTPYTHON_CFLAGS"; then pkg_cv_ALTPYTHON_CFLAGS="$ALTPYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_CFLAGS=`$PKG_CONFIG --cflags "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$ALTPYTHON_LIBS"; then pkg_cv_ALTPYTHON_LIBS="$ALTPYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_LIBS=`$PKG_CONFIG --libs "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$ALTPYNAME" 2>&1` else ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$ALTPYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$ALTPYTHON_PKG_ERRORS" >&5 ALTPYTHON_VERSION=`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` ALTPYTHON_MAJOR=`$ALTPYTHON -c 'import sys; print(sys.version_info[0])'` ALTPYTHON_CFLAGS=-I`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` ALTPY_LIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` ALTPY_SYSLIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` ALTPY_LIBDEST=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` ALTPYTHON_LIBS="$ALTPY_LIBS $ALTPY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$ALTPYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} ALTPYTHON_LIBS="-lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else LDFLAGS="-L$ALTPY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} ALTPYTHON_LIBS="-L$ALTPY_LIBDEST/config -lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else ALTPYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } ALTPYTHON_VERSION=`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` ALTPYTHON_MAJOR=`$ALTPYTHON -c 'import sys; print(sys.version_info[0])'` ALTPYTHON_CFLAGS=-I`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` ALTPY_LIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` ALTPY_SYSLIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` ALTPY_LIBDEST=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` ALTPYTHON_LIBS="$ALTPY_LIBS $ALTPY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$ALTPYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} ALTPYTHON_LIBS="-lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else LDFLAGS="-L$ALTPY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} ALTPYTHON_LIBS="-L$ALTPY_LIBDEST/config -lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else ALTPYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS else ALTPYTHON_CFLAGS=$pkg_cv_ALTPYTHON_CFLAGS ALTPYTHON_LIBS=$pkg_cv_ALTPYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } ALTPYTHON_VERSION=`$PKG_CONFIG --modversion $ALTPYNAME` ALTPYTHON_MAJOR=`echo $ALTPYTHON_VERSION|cut -f1 -d.` fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ALTPYTHON" >&5 $as_echo_n "checking for ALTPYTHON... " >&6; } if test -n "$ALTPYTHON_CFLAGS"; then pkg_cv_ALTPYTHON_CFLAGS="$ALTPYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_CFLAGS=`$PKG_CONFIG --cflags "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$ALTPYTHON_LIBS"; then pkg_cv_ALTPYTHON_LIBS="$ALTPYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_LIBS=`$PKG_CONFIG --libs "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$ALTPYNAME" 2>&1` else ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$ALTPYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$ALTPYTHON_PKG_ERRORS" >&5 ALTPYTHON_VERSION=`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` ALTPYTHON_MAJOR=`$ALTPYTHON -c 'import sys; print(sys.version_info[0])'` ALTPYTHON_CFLAGS=-I`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` ALTPY_LIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` ALTPY_SYSLIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` ALTPY_LIBDEST=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` ALTPYTHON_LIBS="$ALTPY_LIBS $ALTPY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$ALTPYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} ALTPYTHON_LIBS="-lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else LDFLAGS="-L$ALTPY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} ALTPYTHON_LIBS="-L$ALTPY_LIBDEST/config -lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else ALTPYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } ALTPYTHON_VERSION=`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` ALTPYTHON_MAJOR=`$ALTPYTHON -c 'import sys; print(sys.version_info[0])'` ALTPYTHON_CFLAGS=-I`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` ALTPY_LIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` ALTPY_SYSLIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` ALTPY_LIBDEST=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` ALTPYTHON_LIBS="$ALTPY_LIBS $ALTPY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$ALTPYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} ALTPYTHON_LIBS="-lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else LDFLAGS="-L$ALTPY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} ALTPYTHON_LIBS="-L$ALTPY_LIBDEST/config -lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else ALTPYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS else ALTPYTHON_CFLAGS=$pkg_cv_ALTPYTHON_CFLAGS ALTPYTHON_LIBS=$pkg_cv_ALTPYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } ALTPYTHON_VERSION=`$PKG_CONFIG --modversion $ALTPYNAME` ALTPYTHON_MAJOR=`echo $ALTPYTHON_VERSION|cut -f1 -d.` fi else ALTPYTHON_CFLAGS=$pkg_cv_ALTPYTHON_CFLAGS ALTPYTHON_LIBS=$pkg_cv_ALTPYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } ALTPYTHON_VERSION=`$PKG_CONFIG --modversion $ALTPYNAME-embed` ALTPYTHON_MAJOR=`echo $ALTPYTHON_VERSION|cut -f1 -d.` fi else ALTPYTHON_CFLAGS=$pkg_cv_ALTPYTHON_CFLAGS ALTPYTHON_LIBS=$pkg_cv_ALTPYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } ALTPYTHON_VERSION=`$PKG_CONFIG --modversion $ALTPYNAME` ALTPYTHON_MAJOR=`echo $ALTPYTHON_VERSION|cut -f1 -d.` fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ALTPYTHON" >&5 $as_echo_n "checking for ALTPYTHON... " >&6; } if test -n "$ALTPYTHON_CFLAGS"; then pkg_cv_ALTPYTHON_CFLAGS="$ALTPYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_CFLAGS=`$PKG_CONFIG --cflags "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$ALTPYTHON_LIBS"; then pkg_cv_ALTPYTHON_LIBS="$ALTPYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_LIBS=`$PKG_CONFIG --libs "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$ALTPYNAME" 2>&1` else ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$ALTPYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$ALTPYTHON_PKG_ERRORS" >&5 ALTPYNAME=python-`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ALTPYTHON" >&5 $as_echo_n "checking for ALTPYTHON... " >&6; } if test -n "$ALTPYTHON_CFLAGS"; then pkg_cv_ALTPYTHON_CFLAGS="$ALTPYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_CFLAGS=`$PKG_CONFIG --cflags "$ALTPYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$ALTPYTHON_LIBS"; then pkg_cv_ALTPYTHON_LIBS="$ALTPYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_LIBS=`$PKG_CONFIG --libs "$ALTPYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$ALTPYNAME-embed" 2>&1` else ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$ALTPYNAME-embed" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$ALTPYTHON_PKG_ERRORS" >&5 pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ALTPYTHON" >&5 $as_echo_n "checking for ALTPYTHON... " >&6; } if test -n "$ALTPYTHON_CFLAGS"; then pkg_cv_ALTPYTHON_CFLAGS="$ALTPYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_CFLAGS=`$PKG_CONFIG --cflags "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$ALTPYTHON_LIBS"; then pkg_cv_ALTPYTHON_LIBS="$ALTPYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_LIBS=`$PKG_CONFIG --libs "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$ALTPYNAME" 2>&1` else ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$ALTPYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$ALTPYTHON_PKG_ERRORS" >&5 ALTPYTHON_VERSION=`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` ALTPYTHON_MAJOR=`$ALTPYTHON -c 'import sys; print(sys.version_info[0])'` ALTPYTHON_CFLAGS=-I`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` ALTPY_LIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` ALTPY_SYSLIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` ALTPY_LIBDEST=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` ALTPYTHON_LIBS="$ALTPY_LIBS $ALTPY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$ALTPYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} ALTPYTHON_LIBS="-lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else LDFLAGS="-L$ALTPY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} ALTPYTHON_LIBS="-L$ALTPY_LIBDEST/config -lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else ALTPYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } ALTPYTHON_VERSION=`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` ALTPYTHON_MAJOR=`$ALTPYTHON -c 'import sys; print(sys.version_info[0])'` ALTPYTHON_CFLAGS=-I`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` ALTPY_LIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` ALTPY_SYSLIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` ALTPY_LIBDEST=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` ALTPYTHON_LIBS="$ALTPY_LIBS $ALTPY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$ALTPYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} ALTPYTHON_LIBS="-lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else LDFLAGS="-L$ALTPY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} ALTPYTHON_LIBS="-L$ALTPY_LIBDEST/config -lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else ALTPYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS else ALTPYTHON_CFLAGS=$pkg_cv_ALTPYTHON_CFLAGS ALTPYTHON_LIBS=$pkg_cv_ALTPYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } ALTPYTHON_VERSION=`$PKG_CONFIG --modversion $ALTPYNAME` ALTPYTHON_MAJOR=`echo $ALTPYTHON_VERSION|cut -f1 -d.` fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ALTPYTHON" >&5 $as_echo_n "checking for ALTPYTHON... " >&6; } if test -n "$ALTPYTHON_CFLAGS"; then pkg_cv_ALTPYTHON_CFLAGS="$ALTPYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_CFLAGS=`$PKG_CONFIG --cflags "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$ALTPYTHON_LIBS"; then pkg_cv_ALTPYTHON_LIBS="$ALTPYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_LIBS=`$PKG_CONFIG --libs "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$ALTPYNAME" 2>&1` else ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$ALTPYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$ALTPYTHON_PKG_ERRORS" >&5 ALTPYTHON_VERSION=`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` ALTPYTHON_MAJOR=`$ALTPYTHON -c 'import sys; print(sys.version_info[0])'` ALTPYTHON_CFLAGS=-I`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` ALTPY_LIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` ALTPY_SYSLIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` ALTPY_LIBDEST=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` ALTPYTHON_LIBS="$ALTPY_LIBS $ALTPY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$ALTPYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} ALTPYTHON_LIBS="-lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else LDFLAGS="-L$ALTPY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} ALTPYTHON_LIBS="-L$ALTPY_LIBDEST/config -lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else ALTPYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } ALTPYTHON_VERSION=`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` ALTPYTHON_MAJOR=`$ALTPYTHON -c 'import sys; print(sys.version_info[0])'` ALTPYTHON_CFLAGS=-I`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` ALTPY_LIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` ALTPY_SYSLIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` ALTPY_LIBDEST=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` ALTPYTHON_LIBS="$ALTPY_LIBS $ALTPY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$ALTPYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} ALTPYTHON_LIBS="-lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else LDFLAGS="-L$ALTPY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} ALTPYTHON_LIBS="-L$ALTPY_LIBDEST/config -lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else ALTPYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS else ALTPYTHON_CFLAGS=$pkg_cv_ALTPYTHON_CFLAGS ALTPYTHON_LIBS=$pkg_cv_ALTPYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } ALTPYTHON_VERSION=`$PKG_CONFIG --modversion $ALTPYNAME` ALTPYTHON_MAJOR=`echo $ALTPYTHON_VERSION|cut -f1 -d.` fi else ALTPYTHON_CFLAGS=$pkg_cv_ALTPYTHON_CFLAGS ALTPYTHON_LIBS=$pkg_cv_ALTPYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } ALTPYTHON_VERSION=`$PKG_CONFIG --modversion $ALTPYNAME-embed` ALTPYTHON_MAJOR=`echo $ALTPYTHON_VERSION|cut -f1 -d.` fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } ALTPYNAME=python-`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ALTPYTHON" >&5 $as_echo_n "checking for ALTPYTHON... " >&6; } if test -n "$ALTPYTHON_CFLAGS"; then pkg_cv_ALTPYTHON_CFLAGS="$ALTPYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_CFLAGS=`$PKG_CONFIG --cflags "$ALTPYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$ALTPYTHON_LIBS"; then pkg_cv_ALTPYTHON_LIBS="$ALTPYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME-embed\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME-embed") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_LIBS=`$PKG_CONFIG --libs "$ALTPYNAME-embed" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$ALTPYNAME-embed" 2>&1` else ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$ALTPYNAME-embed" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$ALTPYTHON_PKG_ERRORS" >&5 pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ALTPYTHON" >&5 $as_echo_n "checking for ALTPYTHON... " >&6; } if test -n "$ALTPYTHON_CFLAGS"; then pkg_cv_ALTPYTHON_CFLAGS="$ALTPYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_CFLAGS=`$PKG_CONFIG --cflags "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$ALTPYTHON_LIBS"; then pkg_cv_ALTPYTHON_LIBS="$ALTPYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_LIBS=`$PKG_CONFIG --libs "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$ALTPYNAME" 2>&1` else ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$ALTPYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$ALTPYTHON_PKG_ERRORS" >&5 ALTPYTHON_VERSION=`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` ALTPYTHON_MAJOR=`$ALTPYTHON -c 'import sys; print(sys.version_info[0])'` ALTPYTHON_CFLAGS=-I`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` ALTPY_LIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` ALTPY_SYSLIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` ALTPY_LIBDEST=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` ALTPYTHON_LIBS="$ALTPY_LIBS $ALTPY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$ALTPYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} ALTPYTHON_LIBS="-lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else LDFLAGS="-L$ALTPY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} ALTPYTHON_LIBS="-L$ALTPY_LIBDEST/config -lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else ALTPYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } ALTPYTHON_VERSION=`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` ALTPYTHON_MAJOR=`$ALTPYTHON -c 'import sys; print(sys.version_info[0])'` ALTPYTHON_CFLAGS=-I`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` ALTPY_LIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` ALTPY_SYSLIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` ALTPY_LIBDEST=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` ALTPYTHON_LIBS="$ALTPY_LIBS $ALTPY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$ALTPYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} ALTPYTHON_LIBS="-lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else LDFLAGS="-L$ALTPY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} ALTPYTHON_LIBS="-L$ALTPY_LIBDEST/config -lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else ALTPYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS else ALTPYTHON_CFLAGS=$pkg_cv_ALTPYTHON_CFLAGS ALTPYTHON_LIBS=$pkg_cv_ALTPYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } ALTPYTHON_VERSION=`$PKG_CONFIG --modversion $ALTPYNAME` ALTPYTHON_MAJOR=`echo $ALTPYTHON_VERSION|cut -f1 -d.` fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ALTPYTHON" >&5 $as_echo_n "checking for ALTPYTHON... " >&6; } if test -n "$ALTPYTHON_CFLAGS"; then pkg_cv_ALTPYTHON_CFLAGS="$ALTPYTHON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_CFLAGS=`$PKG_CONFIG --cflags "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$ALTPYTHON_LIBS"; then pkg_cv_ALTPYTHON_LIBS="$ALTPYTHON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"\$ALTPYNAME\""; } >&5 ($PKG_CONFIG --exists --print-errors "$ALTPYNAME") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ALTPYTHON_LIBS=`$PKG_CONFIG --libs "$ALTPYNAME" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$ALTPYNAME" 2>&1` else ALTPYTHON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$ALTPYNAME" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$ALTPYTHON_PKG_ERRORS" >&5 ALTPYTHON_VERSION=`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` ALTPYTHON_MAJOR=`$ALTPYTHON -c 'import sys; print(sys.version_info[0])'` ALTPYTHON_CFLAGS=-I`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` ALTPY_LIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` ALTPY_SYSLIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` ALTPY_LIBDEST=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` ALTPYTHON_LIBS="$ALTPY_LIBS $ALTPY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$ALTPYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} ALTPYTHON_LIBS="-lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else LDFLAGS="-L$ALTPY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} ALTPYTHON_LIBS="-L$ALTPY_LIBDEST/config -lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else ALTPYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } ALTPYTHON_VERSION=`$ALTPYTHON -c 'import sys; print(sys.version[:3])'` ALTPYTHON_MAJOR=`$ALTPYTHON -c 'import sys; print(sys.version_info[0])'` ALTPYTHON_CFLAGS=-I`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` ALTPY_LIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` ALTPY_SYSLIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` ALTPY_LIBDEST=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` ALTPYTHON_LIBS="$ALTPY_LIBS $ALTPY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$ALTPYTHON_LIBS $LDFLAGS" as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Initialize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Initialize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Initialize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Initialize (); int main () { return Py_Initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: No additional path to python library needed" >&5 $as_echo "$as_me: No additional path to python library needed" >&6;} ALTPYTHON_LIBS="-lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else LDFLAGS="-L$ALTPY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value as_ac_Lib=`$as_echo "ac_cv_lib_python$ALTPYTHON_VERSION''_Py_Finalize" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Py_Finalize in -lpython$ALTPYTHON_VERSION" >&5 $as_echo_n "checking for Py_Finalize in -lpython$ALTPYTHON_VERSION... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpython$ALTPYTHON_VERSION $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char Py_Finalize (); int main () { return Py_Finalize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: Adding path to python library" >&5 $as_echo "$as_me: Adding path to python library" >&6;} ALTPYTHON_LIBS="-L$ALTPY_LIBDEST/config -lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS" else ALTPYTHON_LIBS="" fi fi LDFLAGS=$SAVE_LDFLAGS else ALTPYTHON_CFLAGS=$pkg_cv_ALTPYTHON_CFLAGS ALTPYTHON_LIBS=$pkg_cv_ALTPYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } ALTPYTHON_VERSION=`$PKG_CONFIG --modversion $ALTPYNAME` ALTPYTHON_MAJOR=`echo $ALTPYTHON_VERSION|cut -f1 -d.` fi else ALTPYTHON_CFLAGS=$pkg_cv_ALTPYTHON_CFLAGS ALTPYTHON_LIBS=$pkg_cv_ALTPYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } ALTPYTHON_VERSION=`$PKG_CONFIG --modversion $ALTPYNAME-embed` ALTPYTHON_MAJOR=`echo $ALTPYTHON_VERSION|cut -f1 -d.` fi else ALTPYTHON_CFLAGS=$pkg_cv_ALTPYTHON_CFLAGS ALTPYTHON_LIBS=$pkg_cv_ALTPYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } ALTPYTHON_VERSION=`$PKG_CONFIG --modversion $ALTPYNAME` ALTPYTHON_MAJOR=`echo $ALTPYTHON_VERSION|cut -f1 -d.` fi else ALTPYTHON_CFLAGS=$pkg_cv_ALTPYTHON_CFLAGS ALTPYTHON_LIBS=$pkg_cv_ALTPYTHON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } ALTPYTHON_VERSION=`$PKG_CONFIG --modversion $ALTPYNAME-embed` ALTPYTHON_MAJOR=`echo $ALTPYTHON_VERSION|cut -f1 -d.` fi ALTPYTHON_EXT_SUFFIX=`$ALTPYTHON -c "from distutils import sysconfig; v = sysconfig.get_config_vars(); print(v.get('EXT_SUFFIX', v.get('SO')))" | sed s/None//` # Check whether --with-altpython-site-arch was given. if test "${with_altpython_site_arch+set}" = set; then : withval=$with_altpython_site_arch; fi if test "X$ALTPYTHON_SITE_ARCH" = "X"; then ALTPYTHON_SITE_ARCH=`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_lib(1,0,"${prefix}"))'` fi # Check whether --with-altpython-site-lib was given. if test "${with_altpython_site_lib+set}" = set; then : withval=$with_altpython_site_lib; fi if test "X$ALTPYTHON_SITE_LIB" = "X"; then ALTPYTHON_SITE_LIB=`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_lib(0,0,"${prefix}"))'` fi SAVE_LDFLAGS=$LDFLAGS SAVE_CPPFLAGS=$CPPFLAGS LDFLAGS="$LDFLAGS $ALTPYTHON_LIBS" CPPFLAGS="$CPPFLAGS $ALTPYTHON_CFLAGS" ac_fn_c_check_header_mongrel "$LINENO" "Python.h" "ac_cv_header_Python_h" "$ac_includes_default" if test "x$ac_cv_header_Python_h" = xyes; then : altpythonh="yes" else altpythonh="no" fi LDFLAGS=$SAVE_LDFLAGS CPPFLAGS=$SAVE_CPPFLAGS fi if test "X$ALTPYTHON" = "X"; then { $as_echo "$as_me:${as_lineno-$LINENO}: Missing alternative Python - skipping alternative Python" >&5 $as_echo "$as_me: Missing alternative Python - skipping alternative Python" >&6;} enables_altpython=no elif test "X$ALTPYTHON_LIBS" = "X"; then { $as_echo "$as_me:${as_lineno-$LINENO}: Missing alternative Python library - skipping alternative Python bindings" >&5 $as_echo "$as_me: Missing alternative Python library - skipping alternative Python bindings" >&6;} enables_altpython=no elif test "X$altpythonh" != "Xyes"; then { $as_echo "$as_me:${as_lineno-$LINENO}: Missing alternative Python header - skipping alternative Python bindings" >&5 $as_echo "$as_me: Missing alternative Python header - skipping alternative Python bindings" >&6;} enables_altpython=no elif test "X$ALTPYTHON_SITE_ARCH" = "X" || test "X$ALTPYTHON_SITE_LIB" = "X"; then { $as_echo "$as_me:${as_lineno-$LINENO}: Missing python site packages location - skipping Python bindings" >&5 $as_echo "$as_me: Missing python site packages location - skipping Python bindings" >&6;} enables_altpython=no else { $as_echo "$as_me:${as_lineno-$LINENO}: Alternative Python available: $ALTPYTHON_VERSION" >&5 $as_echo "$as_me: Alternative Python available: $ALTPYTHON_VERSION" >&6;} fi if test "x$enables_altpython" != "xyes"; then { $as_echo "$as_me:${as_lineno-$LINENO}: Missing alternative Python - skipping alternative Python bindings" >&5 $as_echo "$as_me: Missing alternative Python - skipping alternative Python bindings" >&6;} enables_altpython=no elif ! test -f python/arc_wrap.cpp && test "x$enables_swig_python" != "xyes"; then { $as_echo "$as_me:${as_lineno-$LINENO}: Missing pre-compiled Python wrapper and SWIG - skipping alternative Python bindings" >&5 $as_echo "$as_me: Missing pre-compiled Python wrapper and SWIG - skipping alternative Python bindings" >&6;} enables_altpython=no fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: Alternative Python enabled: $enables_altpython" >&5 $as_echo "$as_me: Alternative Python enabled: $enables_altpython" >&6;} if test "x$enables_altpython" = "xyes"; then ALTPYTHON_ENABLED_TRUE= ALTPYTHON_ENABLED_FALSE='#' else ALTPYTHON_ENABLED_TRUE='#' ALTPYTHON_ENABLED_FALSE= fi if test "x$enables_altpython" = "xyes" && test "x$ALTPYTHON_MAJOR" = "x3"; then ALTPYTHON3_TRUE= ALTPYTHON3_FALSE='#' else ALTPYTHON3_TRUE='#' ALTPYTHON3_FALSE= fi # check for pylint if test "$enables_hed" = "yes"; then # Check whether --enable-pylint was given. if test "${enable_pylint+set}" = set; then : enableval=$enable_pylint; enables_pylint=$enableval fi for ac_prog in pylint do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_PYLINT+:} false; then : $as_echo_n "(cached) " >&6 else case $PYLINT in [\\/]* | ?:[\\/]*) ac_cv_path_PYLINT="$PYLINT" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_PYLINT="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi PYLINT=$ac_cv_path_PYLINT if test -n "$PYLINT"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PYLINT" >&5 $as_echo "$PYLINT" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$PYLINT" && break done if test "x$PYLINT" = "x"; then enables_pylint="no" else PYLINT_VERSION=`$PYLINT --version 2> /dev/null | sed -n 's/^pylint \([0-9.]*\).*/\1/p'` # Check if pylint supports the following arguments, otherwise disable pylint (python example checking). # Do not generate report # Disable convention and recommendation messages - we are only interested in fatals, errors and warnings. PYLINT_ARGS="--reports=no --disable=C,R" if $PYLINT $PYLINT_ARGS /dev/null > /dev/null 2>&1 ; then { $as_echo "$as_me:${as_lineno-$LINENO}: pylint version $PYLINT_VERSION found - version ok" >&5 $as_echo "$as_me: pylint version $PYLINT_VERSION found - version ok" >&6;} enables_pylint="yes" else { $as_echo "$as_me:${as_lineno-$LINENO}: pylint version $PYLINT_VERSION found - bad version" >&5 $as_echo "$as_me: pylint version $PYLINT_VERSION found - bad version" >&6;} enables_pylint="no" PYLINT_ARGS="" fi fi # Check if the --disable=W0221 option is supported # W0221: Disable arguments differ messages since Swig uses tuple syntax (*args). if test "$enables_pylint" = "yes"; then PYLINT_ARGS_ARGUMENTS_DIFFER="--disable=W0221" if ! $PYLINT $PYLINT_ARGS $PYLINT_ARGS_ARGUMENTS_DIFFER /dev/null > /dev/null 2>&1 ; then PYLINT_ARGS_ARGUMENTS_DIFFER="" fi fi fi if test "x$enables_pylint" = "xyes"; then PYLINT_ENABLED_TRUE= PYLINT_ENABLED_FALSE='#' else PYLINT_ENABLED_TRUE='#' PYLINT_ENABLED_FALSE= fi { $as_echo "$as_me:${as_lineno-$LINENO}: Python example checking with pylint enabled: $enables_pylint" >&5 $as_echo "$as_me: Python example checking with pylint enabled: $enables_pylint" >&6;} # check systemd daemon integration # Check whether --enable-systemd was given. if test "${enable_systemd+set}" = set; then : enableval=$enable_systemd; enables_systemd="$enableval" fi if test "x$enables_systemd" = "xyes"; then systemd_daemon_save_LIBS=$LIBS LIBS= { $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing sd_listen_fds" >&5 $as_echo_n "checking for library containing sd_listen_fds... " >&6; } if ${ac_cv_search_sd_listen_fds+:} false; then : $as_echo_n "(cached) " >&6 else ac_func_search_save_LIBS=$LIBS cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char sd_listen_fds (); int main () { return sd_listen_fds (); ; return 0; } _ACEOF for ac_lib in '' systemd systemd-daemon; do if test -z "$ac_lib"; then ac_res="none required" else ac_res=-l$ac_lib LIBS="-l$ac_lib $systemd_daemon_save_LIBS $ac_func_search_save_LIBS" fi if ac_fn_c_try_link "$LINENO"; then : ac_cv_search_sd_listen_fds=$ac_res fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext if ${ac_cv_search_sd_listen_fds+:} false; then : break fi done if ${ac_cv_search_sd_listen_fds+:} false; then : else ac_cv_search_sd_listen_fds=no fi rm conftest.$ac_ext LIBS=$ac_func_search_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_sd_listen_fds" >&5 $as_echo "$ac_cv_search_sd_listen_fds" >&6; } ac_res=$ac_cv_search_sd_listen_fds if test "$ac_res" != no; then : test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" have_sd_listen_fds=yes else have_sd_listen_fds=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing sd_notify" >&5 $as_echo_n "checking for library containing sd_notify... " >&6; } if ${ac_cv_search_sd_notify+:} false; then : $as_echo_n "(cached) " >&6 else ac_func_search_save_LIBS=$LIBS cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char sd_notify (); int main () { return sd_notify (); ; return 0; } _ACEOF for ac_lib in '' systemd systemd-daemon; do if test -z "$ac_lib"; then ac_res="none required" else ac_res=-l$ac_lib LIBS="-l$ac_lib $systemd_daemon_save_LIBS $ac_func_search_save_LIBS" fi if ac_fn_c_try_link "$LINENO"; then : ac_cv_search_sd_notify=$ac_res fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext if ${ac_cv_search_sd_notify+:} false; then : break fi done if ${ac_cv_search_sd_notify+:} false; then : else ac_cv_search_sd_notify=no fi rm conftest.$ac_ext LIBS=$ac_func_search_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_sd_notify" >&5 $as_echo "$ac_cv_search_sd_notify" >&6; } ac_res=$ac_cv_search_sd_notify if test "$ac_res" != no; then : test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" have_sd_notify=yes else have_sd_notify=no fi for ac_header in systemd/sd-daemon.h do : ac_fn_c_check_header_mongrel "$LINENO" "systemd/sd-daemon.h" "ac_cv_header_systemd_sd_daemon_h" "$ac_includes_default" if test "x$ac_cv_header_systemd_sd_daemon_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_SYSTEMD_SD_DAEMON_H 1 _ACEOF have_systemd_sd_daemon_h=yes else have_systemd_sd_daemon_h=no fi done if test x"$have_sd_listen_fds" = x"yes" && \ test x"$have_sd_notify" = x"yes" && \ test x"$have_systemd_sd_daemon_h" = x"yes"; then $as_echo "#define HAVE_SYSTEMD_DAEMON 1" >>confdefs.h SYSTEMD_DAEMON_LIBS=$LIBS else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "--enable-systemd was given, but test for systemd libraries had failed See \`config.log' for more details" "$LINENO" 5; } fi LIBS=$systemd_daemon_save_LIBS fi # check gthread if test "$enables_hed" = "yes"; then pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GTHREAD" >&5 $as_echo_n "checking for GTHREAD... " >&6; } if test -n "$GTHREAD_CFLAGS"; then pkg_cv_GTHREAD_CFLAGS="$GTHREAD_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"gthread-2.0 >= 2.4.7\""; } >&5 ($PKG_CONFIG --exists --print-errors "gthread-2.0 >= 2.4.7") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GTHREAD_CFLAGS=`$PKG_CONFIG --cflags "gthread-2.0 >= 2.4.7" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$GTHREAD_LIBS"; then pkg_cv_GTHREAD_LIBS="$GTHREAD_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"gthread-2.0 >= 2.4.7\""; } >&5 ($PKG_CONFIG --exists --print-errors "gthread-2.0 >= 2.4.7") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GTHREAD_LIBS=`$PKG_CONFIG --libs "gthread-2.0 >= 2.4.7" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then GTHREAD_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "gthread-2.0 >= 2.4.7" 2>&1` else GTHREAD_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "gthread-2.0 >= 2.4.7" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$GTHREAD_PKG_ERRORS" >&5 as_fn_error $? "Package requirements (gthread-2.0 >= 2.4.7) were not met: $GTHREAD_PKG_ERRORS Consider adjusting the PKG_CONFIG_PATH environment variable if you installed software in a non-standard prefix. Alternatively, you may set the environment variables GTHREAD_CFLAGS and GTHREAD_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details." "$LINENO" 5 elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "The pkg-config script could not be found or is too old. Make sure it is in your PATH or set the PKG_CONFIG environment variable to the full path to pkg-config. Alternatively, you may set the environment variables GTHREAD_CFLAGS and GTHREAD_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details. To get pkg-config, see . See \`config.log' for more details" "$LINENO" 5; } else GTHREAD_CFLAGS=$pkg_cv_GTHREAD_CFLAGS GTHREAD_LIBS=$pkg_cv_GTHREAD_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } fi fi # check glibmm # check for giomm which became a part of glibmm as of version 2.16 if test "$enables_hed" = "yes"; then "$PKG_CONFIG" giomm-2.4 if test "$?" = '1'; then pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GLIBMM" >&5 $as_echo_n "checking for GLIBMM... " >&6; } if test -n "$GLIBMM_CFLAGS"; then pkg_cv_GLIBMM_CFLAGS="$GLIBMM_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"glibmm-2.4 >= 2.4.7\""; } >&5 ($PKG_CONFIG --exists --print-errors "glibmm-2.4 >= 2.4.7") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLIBMM_CFLAGS=`$PKG_CONFIG --cflags "glibmm-2.4 >= 2.4.7" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$GLIBMM_LIBS"; then pkg_cv_GLIBMM_LIBS="$GLIBMM_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"glibmm-2.4 >= 2.4.7\""; } >&5 ($PKG_CONFIG --exists --print-errors "glibmm-2.4 >= 2.4.7") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLIBMM_LIBS=`$PKG_CONFIG --libs "glibmm-2.4 >= 2.4.7" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then GLIBMM_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "glibmm-2.4 >= 2.4.7" 2>&1` else GLIBMM_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "glibmm-2.4 >= 2.4.7" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$GLIBMM_PKG_ERRORS" >&5 as_fn_error $? "Package requirements (glibmm-2.4 >= 2.4.7) were not met: $GLIBMM_PKG_ERRORS Consider adjusting the PKG_CONFIG_PATH environment variable if you installed software in a non-standard prefix. Alternatively, you may set the environment variables GLIBMM_CFLAGS and GLIBMM_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details." "$LINENO" 5 elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "The pkg-config script could not be found or is too old. Make sure it is in your PATH or set the PKG_CONFIG environment variable to the full path to pkg-config. Alternatively, you may set the environment variables GLIBMM_CFLAGS and GLIBMM_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details. To get pkg-config, see . See \`config.log' for more details" "$LINENO" 5; } else GLIBMM_CFLAGS=$pkg_cv_GLIBMM_CFLAGS GLIBMM_LIBS=$pkg_cv_GLIBMM_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } fi else pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GLIBMM" >&5 $as_echo_n "checking for GLIBMM... " >&6; } if test -n "$GLIBMM_CFLAGS"; then pkg_cv_GLIBMM_CFLAGS="$GLIBMM_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"giomm-2.4\""; } >&5 ($PKG_CONFIG --exists --print-errors "giomm-2.4") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLIBMM_CFLAGS=`$PKG_CONFIG --cflags "giomm-2.4" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$GLIBMM_LIBS"; then pkg_cv_GLIBMM_LIBS="$GLIBMM_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"giomm-2.4\""; } >&5 ($PKG_CONFIG --exists --print-errors "giomm-2.4") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLIBMM_LIBS=`$PKG_CONFIG --libs "giomm-2.4" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then GLIBMM_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "giomm-2.4" 2>&1` else GLIBMM_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "giomm-2.4" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$GLIBMM_PKG_ERRORS" >&5 as_fn_error $? "Package requirements (giomm-2.4) were not met: $GLIBMM_PKG_ERRORS Consider adjusting the PKG_CONFIG_PATH environment variable if you installed software in a non-standard prefix. Alternatively, you may set the environment variables GLIBMM_CFLAGS and GLIBMM_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details." "$LINENO" 5 elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "The pkg-config script could not be found or is too old. Make sure it is in your PATH or set the PKG_CONFIG environment variable to the full path to pkg-config. Alternatively, you may set the environment variables GLIBMM_CFLAGS and GLIBMM_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details. To get pkg-config, see . See \`config.log' for more details" "$LINENO" 5; } else GLIBMM_CFLAGS=$pkg_cv_GLIBMM_CFLAGS GLIBMM_LIBS=$pkg_cv_GLIBMM_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } fi $as_echo "#define HAVE_GIOMM 1" >>confdefs.h fi SAVE_CPPFLAGS=$CPPFLAGS ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu CPPFLAGS="$CPPFLAGS $GLIBMM_CFLAGS" ac_fn_cxx_check_header_mongrel "$LINENO" "glibmm/optioncontext.h" "ac_cv_header_glibmm_optioncontext_h" "$ac_includes_default" if test "x$ac_cv_header_glibmm_optioncontext_h" = xyes; then : cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { Glib::OptionContext ctx; ctx.set_summary("summary") ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_GLIBMM_OPTIONCONTEXT_SET_SUMMARY 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: using glibmm command line parsing" >&5 $as_echo "$as_me: using glibmm command line parsing" >&6;} else { $as_echo "$as_me:${as_lineno-$LINENO}: using getopt_long command line parsing" >&5 $as_echo "$as_me: using getopt_long command line parsing" >&6;} fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { Glib::OptionContext ctx; ctx.get_help(); ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_GLIBMM_OPTIONCONTEXT_GET_HELP 1" >>confdefs.h fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { Glib::ModuleFlags flags = Glib::MODULE_BIND_LOCAL; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : glibmm_bind_local=yes else glibmm_bind_local=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext if test "$glibmm_bind_local" = yes; then $as_echo "#define HAVE_GLIBMM_BIND_LOCAL 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: glibmm has no way to limit scope of symbols of shared libraries. Make sure external libraries used by plugins have no conflicting symbols. HINT: use Globus compiled against system OpenSSL library." >&5 $as_echo "$as_me: WARNING: glibmm has no way to limit scope of symbols of shared libraries. Make sure external libraries used by plugins have no conflicting symbols. HINT: use Globus compiled against system OpenSSL library." >&6;} fi cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { Glib::getenv(""); ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : glibmm_getenv=yes else glibmm_getenv=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext if test "$glibmm_getenv" = yes; then $as_echo "#define HAVE_GLIBMM_GETENV 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: glibmm has no support for getenv. Usage of libc getenv is unsafe in multi-threaded applications." >&5 $as_echo "$as_me: WARNING: glibmm has no support for getenv. Usage of libc getenv is unsafe in multi-threaded applications." >&6;} fi cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { Glib::setenv("", ""); ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : glibmm_setenv=yes else glibmm_setenv=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext if test "$glibmm_setenv" = yes; then $as_echo "#define HAVE_GLIBMM_SETENV 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: glibmm has no support for setenv. Usage of libc setenv may be unsafe in multi-threaded applications." >&5 $as_echo "$as_me: WARNING: glibmm has no support for setenv. Usage of libc setenv may be unsafe in multi-threaded applications." >&6;} fi cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { Glib::unsetenv(""); ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : glibmm_unsetenv=yes else glibmm_unsetenv=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext if test "$glibmm_unsetenv" = yes; then $as_echo "#define HAVE_GLIBMM_UNSETENV 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: glibmm has no support for unsetenv. Usage of libc unsetenv may be unsafe in multi-threaded applications." >&5 $as_echo "$as_me: WARNING: glibmm has no support for unsetenv. Usage of libc unsetenv may be unsafe in multi-threaded applications." >&6;} fi cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { Glib::listenv(); ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : glibmm_listenv=yes else glibmm_listenv=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext if test "$glibmm_listenv" = yes; then $as_echo "#define HAVE_GLIBMM_LISTENV 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: glibmm has no support for listenv. Usage of libc environ is unsafe in multi-threaded applications." >&5 $as_echo "$as_me: WARNING: glibmm has no support for listenv. Usage of libc environ is unsafe in multi-threaded applications." >&6;} fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu CPPFLAGS=$SAVE_CPPFLAGS fi # check libxml if test "$enables_hed" = "yes"; then pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for LIBXML2" >&5 $as_echo_n "checking for LIBXML2... " >&6; } if test -n "$LIBXML2_CFLAGS"; then pkg_cv_LIBXML2_CFLAGS="$LIBXML2_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libxml-2.0 >= 2.4.0\""; } >&5 ($PKG_CONFIG --exists --print-errors "libxml-2.0 >= 2.4.0") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_LIBXML2_CFLAGS=`$PKG_CONFIG --cflags "libxml-2.0 >= 2.4.0" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$LIBXML2_LIBS"; then pkg_cv_LIBXML2_LIBS="$LIBXML2_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libxml-2.0 >= 2.4.0\""; } >&5 ($PKG_CONFIG --exists --print-errors "libxml-2.0 >= 2.4.0") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_LIBXML2_LIBS=`$PKG_CONFIG --libs "libxml-2.0 >= 2.4.0" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then LIBXML2_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libxml-2.0 >= 2.4.0" 2>&1` else LIBXML2_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libxml-2.0 >= 2.4.0" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$LIBXML2_PKG_ERRORS" >&5 as_fn_error $? "Package requirements (libxml-2.0 >= 2.4.0) were not met: $LIBXML2_PKG_ERRORS Consider adjusting the PKG_CONFIG_PATH environment variable if you installed software in a non-standard prefix. Alternatively, you may set the environment variables LIBXML2_CFLAGS and LIBXML2_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details." "$LINENO" 5 elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "The pkg-config script could not be found or is too old. Make sure it is in your PATH or set the PKG_CONFIG environment variable to the full path to pkg-config. Alternatively, you may set the environment variables LIBXML2_CFLAGS and LIBXML2_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details. To get pkg-config, see . See \`config.log' for more details" "$LINENO" 5; } else LIBXML2_CFLAGS=$pkg_cv_LIBXML2_CFLAGS LIBXML2_LIBS=$pkg_cv_LIBXML2_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } fi fi # check openssl if test "$enables_hed" = "yes"; then pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for OPENSSL" >&5 $as_echo_n "checking for OPENSSL... " >&6; } if test -n "$OPENSSL_CFLAGS"; then pkg_cv_OPENSSL_CFLAGS="$OPENSSL_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"openssl >= 1.0.0\""; } >&5 ($PKG_CONFIG --exists --print-errors "openssl >= 1.0.0") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_OPENSSL_CFLAGS=`$PKG_CONFIG --cflags "openssl >= 1.0.0" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$OPENSSL_LIBS"; then pkg_cv_OPENSSL_LIBS="$OPENSSL_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"openssl >= 1.0.0\""; } >&5 ($PKG_CONFIG --exists --print-errors "openssl >= 1.0.0") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_OPENSSL_LIBS=`$PKG_CONFIG --libs "openssl >= 1.0.0" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then OPENSSL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "openssl >= 1.0.0" 2>&1` else OPENSSL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "openssl >= 1.0.0" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$OPENSSL_PKG_ERRORS" >&5 as_fn_error $? "Package requirements (openssl >= 1.0.0) were not met: $OPENSSL_PKG_ERRORS Consider adjusting the PKG_CONFIG_PATH environment variable if you installed software in a non-standard prefix. Alternatively, you may set the environment variables OPENSSL_CFLAGS and OPENSSL_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details." "$LINENO" 5 elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "The pkg-config script could not be found or is too old. Make sure it is in your PATH or set the PKG_CONFIG environment variable to the full path to pkg-config. Alternatively, you may set the environment variables OPENSSL_CFLAGS and OPENSSL_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details. To get pkg-config, see . See \`config.log' for more details" "$LINENO" 5; } else OPENSSL_CFLAGS=$pkg_cv_OPENSSL_CFLAGS OPENSSL_LIBS=$pkg_cv_OPENSSL_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } fi pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for OPENSSL_1_1" >&5 $as_echo_n "checking for OPENSSL_1_1... " >&6; } if test -n "$OPENSSL_1_1_CFLAGS"; then pkg_cv_OPENSSL_1_1_CFLAGS="$OPENSSL_1_1_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"openssl >= 1.1.0\""; } >&5 ($PKG_CONFIG --exists --print-errors "openssl >= 1.1.0") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_OPENSSL_1_1_CFLAGS=`$PKG_CONFIG --cflags "openssl >= 1.1.0" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$OPENSSL_1_1_LIBS"; then pkg_cv_OPENSSL_1_1_LIBS="$OPENSSL_1_1_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"openssl >= 1.1.0\""; } >&5 ($PKG_CONFIG --exists --print-errors "openssl >= 1.1.0") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_OPENSSL_1_1_LIBS=`$PKG_CONFIG --libs "openssl >= 1.1.0" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then OPENSSL_1_1_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "openssl >= 1.1.0" 2>&1` else OPENSSL_1_1_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "openssl >= 1.1.0" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$OPENSSL_1_1_PKG_ERRORS" >&5 { $as_echo "$as_me:${as_lineno-$LINENO}: OpenSSL is pre-1.1" >&5 $as_echo "$as_me: OpenSSL is pre-1.1" >&6;} elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: OpenSSL is pre-1.1" >&5 $as_echo "$as_me: OpenSSL is pre-1.1" >&6;} else OPENSSL_1_1_CFLAGS=$pkg_cv_OPENSSL_1_1_CFLAGS OPENSSL_1_1_LIBS=$pkg_cv_OPENSSL_1_1_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } OPENSSL_CFLAGS="$OPENSSL_CFLAGS -DOPENSSL_API_COMPAT=0x10100000L" { $as_echo "$as_me:${as_lineno-$LINENO}: Forcing off deprecated functions for OpenSSL >= 1.1" >&5 $as_echo "$as_me: Forcing off deprecated functions for OpenSSL >= 1.1" >&6;} fi fi # Check for available *_method functions in OpenSSL SAVE_CPPFLAGS=$CPPFLAGS SAVE_LIBS=$LIBS CPPFLAGS="$CPPFLAGS $OPENSSL_CFLAGS" LIBS="$LIBS $OPENSSL_LIBS" ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include void _test(void) { (void)SSLv3_method(); } int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_SSLV3_METHOD 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: No SSLv3_method function avialable" >&5 $as_echo "$as_me: No SSLv3_method function avialable" >&6;} fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include void _test(void) { (void)TLSv1_method(); } int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_TLSV1_METHOD 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: No TLSv1_method function avialable" >&5 $as_echo "$as_me: No TLSv1_method function avialable" >&6;} fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include void _test(void) { (void)TLSv1_1_method(); } int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_TLSV1_1_METHOD 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: No TLSv1_1_method function avialable" >&5 $as_echo "$as_me: No TLSv1_1_method function avialable" >&6;} fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include void _test(void) { (void)TLSv1_2_method(); } int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_TLSV1_2_METHOD 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: No TLSv1_2_method function avialable" >&5 $as_echo "$as_me: No TLSv1_2_method function avialable" >&6;} fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include void _test(void) { (void)TLS_method(); } int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_TLS_METHOD 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: No TLS_method function avialable" >&5 $as_echo "$as_me: No TLS_method function avialable" >&6;} fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include void _test(void) { (void)DTLSv1_method(); } int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_DTLSV1_METHOD 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: No DTLSv1_method function avialable" >&5 $as_echo "$as_me: No DTLSv1_method function avialable" >&6;} fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include void _test(void) { (void)DTLSv1_2_method(); } int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_DTLSV1_2_METHOD 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: No DTLSv1_2_method function avialable" >&5 $as_echo "$as_me: No DTLSv1_2_method function avialable" >&6;} fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include void _test(void) { (void)DTLS_method(); } int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_DTLS_METHOD 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: No DTLS_method function avialable" >&5 $as_echo "$as_me: No DTLS_method function avialable" >&6;} fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu CPPFLAGS=$SAVE_CPPFLAGS LIBS=$SAVE_LIBS #check mozilla nss enables_nss=yes NSS_INSTALLED=no # Check whether --enable-nss was given. if test "${enable_nss+set}" = set; then : enableval=$enable_nss; enables_nss="$enableval" fi if test "$enables_nss" = "yes"; then pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for NSS" >&5 $as_echo_n "checking for NSS... " >&6; } if test -n "$NSS_CFLAGS"; then pkg_cv_NSS_CFLAGS="$NSS_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"nss >= 3.10\""; } >&5 ($PKG_CONFIG --exists --print-errors "nss >= 3.10") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_NSS_CFLAGS=`$PKG_CONFIG --cflags "nss >= 3.10" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$NSS_LIBS"; then pkg_cv_NSS_LIBS="$NSS_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"nss >= 3.10\""; } >&5 ($PKG_CONFIG --exists --print-errors "nss >= 3.10") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_NSS_LIBS=`$PKG_CONFIG --libs "nss >= 3.10" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then NSS_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "nss >= 3.10" 2>&1` else NSS_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "nss >= 3.10" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$NSS_PKG_ERRORS" >&5 { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Cannot locate nss lib" >&5 $as_echo "$as_me: WARNING: Cannot locate nss lib" >&2;} NSS_INSTALLED=no enables_nss=no elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Cannot locate nss lib" >&5 $as_echo "$as_me: WARNING: Cannot locate nss lib" >&2;} NSS_INSTALLED=no enables_nss=no else NSS_CFLAGS=$pkg_cv_NSS_CFLAGS NSS_LIBS=$pkg_cv_NSS_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } NSS_INSTALLED=yes fi if test "x$NSS_INSTALLED" = "xyes" ; then $as_echo "#define HAVE_NSS 1" >>confdefs.h fi fi if test x$NSS_INSTALLED = xyes; then NSS_ENABLED_TRUE= NSS_ENABLED_FALSE='#' else NSS_ENABLED_TRUE='#' NSS_ENABLED_FALSE= fi #check SQLite SQLITE_INSTALLED=no pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for SQLITE" >&5 $as_echo_n "checking for SQLITE... " >&6; } if test -n "$SQLITE_CFLAGS"; then pkg_cv_SQLITE_CFLAGS="$SQLITE_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"sqlite3 >= 3.6\""; } >&5 ($PKG_CONFIG --exists --print-errors "sqlite3 >= 3.6") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_SQLITE_CFLAGS=`$PKG_CONFIG --cflags "sqlite3 >= 3.6" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$SQLITE_LIBS"; then pkg_cv_SQLITE_LIBS="$SQLITE_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"sqlite3 >= 3.6\""; } >&5 ($PKG_CONFIG --exists --print-errors "sqlite3 >= 3.6") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_SQLITE_LIBS=`$PKG_CONFIG --libs "sqlite3 >= 3.6" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then SQLITE_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "sqlite3 >= 3.6" 2>&1` else SQLITE_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "sqlite3 >= 3.6" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$SQLITE_PKG_ERRORS" >&5 { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Cannot locate SQLite newer than 3.6" >&5 $as_echo "$as_me: WARNING: Cannot locate SQLite newer than 3.6" >&2;} SQLITE_INSTALLED=no enables_sqlite=no elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Cannot locate SQLite newer than 3.6" >&5 $as_echo "$as_me: WARNING: Cannot locate SQLite newer than 3.6" >&2;} SQLITE_INSTALLED=no enables_sqlite=no else SQLITE_CFLAGS=$pkg_cv_SQLITE_CFLAGS SQLITE_LIBS=$pkg_cv_SQLITE_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } SQLITE_INSTALLED=yes fi if test "x$SQLITE_INSTALLED" = "xyes" ; then $as_echo "#define HAVE_SQLITE 1" >>confdefs.h # Check for function available since 3.8 SAVE_CFLAGS=$CFLAGS SAVE_LIBS=$LIBS CFLAGS="$CFLAGS $SQLITE_CFLAGS" LIBS="$LIBS $SQLITE_LIBS" for ac_func in sqlite3_errstr do : ac_fn_c_check_func "$LINENO" "sqlite3_errstr" "ac_cv_func_sqlite3_errstr" if test "x$ac_cv_func_sqlite3_errstr" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_SQLITE3_ERRSTR 1 _ACEOF fi done CFLAGS=$SAVE_CFLAGS LIBS=$SAVE_LIBS fi if test x$SQLITE_INSTALLED = xyes; then SQLITE_ENABLED_TRUE= SQLITE_ENABLED_FALSE='#' else SQLITE_ENABLED_TRUE='#' SQLITE_ENABLED_FALSE= fi # check cppunit if test "$enables_hed" = "yes"; then # Check whether --enable-cppunit was given. if test "${enable_cppunit+set}" = set; then : enableval=$enable_cppunit; enables_cppunit=$enableval fi if test "$enables_cppunit" = "yes"; then pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for CPPUNIT" >&5 $as_echo_n "checking for CPPUNIT... " >&6; } if test -n "$CPPUNIT_CFLAGS"; then pkg_cv_CPPUNIT_CFLAGS="$CPPUNIT_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"cppunit\""; } >&5 ($PKG_CONFIG --exists --print-errors "cppunit") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_CPPUNIT_CFLAGS=`$PKG_CONFIG --cflags "cppunit" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$CPPUNIT_LIBS"; then pkg_cv_CPPUNIT_LIBS="$CPPUNIT_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"cppunit\""; } >&5 ($PKG_CONFIG --exists --print-errors "cppunit") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_CPPUNIT_LIBS=`$PKG_CONFIG --libs "cppunit" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then CPPUNIT_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "cppunit" 2>&1` else CPPUNIT_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "cppunit" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$CPPUNIT_PKG_ERRORS" >&5 # Extract the first word of "cppunit-config", so it can be a program name with args. set dummy cppunit-config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_CPPUNIT_CONFIG+:} false; then : $as_echo_n "(cached) " >&6 else case $CPPUNIT_CONFIG in [\\/]* | ?:[\\/]*) ac_cv_path_CPPUNIT_CONFIG="$CPPUNIT_CONFIG" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_CPPUNIT_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_path_CPPUNIT_CONFIG" && ac_cv_path_CPPUNIT_CONFIG="no" ;; esac fi CPPUNIT_CONFIG=$ac_cv_path_CPPUNIT_CONFIG if test -n "$CPPUNIT_CONFIG"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPPUNIT_CONFIG" >&5 $as_echo "$CPPUNIT_CONFIG" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$CPPUNIT_CONFIG" = "xno"; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cppunit-config not found - no UNIT testing will be performed" >&5 $as_echo "$as_me: WARNING: cppunit-config not found - no UNIT testing will be performed" >&2;} CPPUNIT_CFLAGS= CPPUNIT_LIBS= enables_cppunit="no" else CPPUNIT_CFLAGS="`$CPPUNIT_CONFIG --cflags`" CPPUNIT_LIBS="`$CPPUNIT_CONFIG --libs`" fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } # Extract the first word of "cppunit-config", so it can be a program name with args. set dummy cppunit-config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_CPPUNIT_CONFIG+:} false; then : $as_echo_n "(cached) " >&6 else case $CPPUNIT_CONFIG in [\\/]* | ?:[\\/]*) ac_cv_path_CPPUNIT_CONFIG="$CPPUNIT_CONFIG" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_CPPUNIT_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_path_CPPUNIT_CONFIG" && ac_cv_path_CPPUNIT_CONFIG="no" ;; esac fi CPPUNIT_CONFIG=$ac_cv_path_CPPUNIT_CONFIG if test -n "$CPPUNIT_CONFIG"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPPUNIT_CONFIG" >&5 $as_echo "$CPPUNIT_CONFIG" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$CPPUNIT_CONFIG" = "xno"; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cppunit-config not found - no UNIT testing will be performed" >&5 $as_echo "$as_me: WARNING: cppunit-config not found - no UNIT testing will be performed" >&2;} CPPUNIT_CFLAGS= CPPUNIT_LIBS= enables_cppunit="no" else CPPUNIT_CFLAGS="`$CPPUNIT_CONFIG --cflags`" CPPUNIT_LIBS="`$CPPUNIT_CONFIG --libs`" fi else CPPUNIT_CFLAGS=$pkg_cv_CPPUNIT_CFLAGS CPPUNIT_LIBS=$pkg_cv_CPPUNIT_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } fi if test "x$CPPUNIT_CONFIG" != "xno" || test "x$CPPUNIT_PKG_ERRORS" != "x" then TEST_DIR=test else enables_cppunit=no TEST_DIR= fi fi else enables_cppunit="no" fi # check ldns library if test "$enables_compute_client" = "yes"; then # Check whether --enable-ldns was given. if test "${enable_ldns+set}" = set; then : enableval=$enable_ldns; enables_ldns=$enableval fi if test "$enables_ldns" = "yes"; then pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for LDNS" >&5 $as_echo_n "checking for LDNS... " >&6; } if test -n "$LDNS_CFLAGS"; then pkg_cv_LDNS_CFLAGS="$LDNS_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"ldns\""; } >&5 ($PKG_CONFIG --exists --print-errors "ldns") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_LDNS_CFLAGS=`$PKG_CONFIG --cflags "ldns" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$LDNS_LIBS"; then pkg_cv_LDNS_LIBS="$LDNS_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"ldns\""; } >&5 ($PKG_CONFIG --exists --print-errors "ldns") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_LDNS_LIBS=`$PKG_CONFIG --libs "ldns" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then LDNS_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "ldns" 2>&1` else LDNS_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "ldns" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$LDNS_PKG_ERRORS" >&5 # Extract the first word of "ldns-config", so it can be a program name with args. set dummy ldns-config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_LDNS_CONFIG+:} false; then : $as_echo_n "(cached) " >&6 else case $LDNS_CONFIG in [\\/]* | ?:[\\/]*) ac_cv_path_LDNS_CONFIG="$LDNS_CONFIG" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_LDNS_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_path_LDNS_CONFIG" && ac_cv_path_LDNS_CONFIG="no" ;; esac fi LDNS_CONFIG=$ac_cv_path_LDNS_CONFIG if test -n "$LDNS_CONFIG"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LDNS_CONFIG" >&5 $as_echo "$LDNS_CONFIG" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$LDNS_CONFIG" = "xno"; then ac_fn_c_check_header_mongrel "$LINENO" "ldns/ldns.h" "ac_cv_header_ldns_ldns_h" "$ac_includes_default" if test "x$ac_cv_header_ldns_ldns_h" = xyes; then : { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ldns_dname_new_frm_str in -lldns" >&5 $as_echo_n "checking for ldns_dname_new_frm_str in -lldns... " >&6; } if ${ac_cv_lib_ldns_ldns_dname_new_frm_str+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lldns $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char ldns_dname_new_frm_str (); int main () { return ldns_dname_new_frm_str (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_ldns_ldns_dname_new_frm_str=yes else ac_cv_lib_ldns_ldns_dname_new_frm_str=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_ldns_ldns_dname_new_frm_str" >&5 $as_echo "$ac_cv_lib_ldns_ldns_dname_new_frm_str" >&6; } if test "x$ac_cv_lib_ldns_ldns_dname_new_frm_str" = xyes; then : LDNS_CFLAGS="$LDNS_CFLAGS" LDNS_LIBS="$LDNS_LIBS -lldns" else enables_ldns="no" fi else enables_ldns="no" fi else LDNS_CFLAGS="`$LDNS_CONFIG --cflags`" LDNS_LIBS="`$LDNS_CONFIG --libs`" fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } # Extract the first word of "ldns-config", so it can be a program name with args. set dummy ldns-config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_LDNS_CONFIG+:} false; then : $as_echo_n "(cached) " >&6 else case $LDNS_CONFIG in [\\/]* | ?:[\\/]*) ac_cv_path_LDNS_CONFIG="$LDNS_CONFIG" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_LDNS_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_path_LDNS_CONFIG" && ac_cv_path_LDNS_CONFIG="no" ;; esac fi LDNS_CONFIG=$ac_cv_path_LDNS_CONFIG if test -n "$LDNS_CONFIG"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LDNS_CONFIG" >&5 $as_echo "$LDNS_CONFIG" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$LDNS_CONFIG" = "xno"; then ac_fn_c_check_header_mongrel "$LINENO" "ldns/ldns.h" "ac_cv_header_ldns_ldns_h" "$ac_includes_default" if test "x$ac_cv_header_ldns_ldns_h" = xyes; then : { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ldns_dname_new_frm_str in -lldns" >&5 $as_echo_n "checking for ldns_dname_new_frm_str in -lldns... " >&6; } if ${ac_cv_lib_ldns_ldns_dname_new_frm_str+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lldns $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char ldns_dname_new_frm_str (); int main () { return ldns_dname_new_frm_str (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_ldns_ldns_dname_new_frm_str=yes else ac_cv_lib_ldns_ldns_dname_new_frm_str=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_ldns_ldns_dname_new_frm_str" >&5 $as_echo "$ac_cv_lib_ldns_ldns_dname_new_frm_str" >&6; } if test "x$ac_cv_lib_ldns_ldns_dname_new_frm_str" = xyes; then : LDNS_CFLAGS="$LDNS_CFLAGS" LDNS_LIBS="$LDNS_LIBS -lldns" else enables_ldns="no" fi else enables_ldns="no" fi else LDNS_CFLAGS="`$LDNS_CONFIG --cflags`" LDNS_LIBS="`$LDNS_CONFIG --libs`" fi else LDNS_CFLAGS=$pkg_cv_LDNS_CFLAGS LDNS_LIBS=$pkg_cv_LDNS_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } fi if test "$enables_ldns" = "no"; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: ldns library was not found. Compute clients will be built without ARCHERY support." >&5 $as_echo "$as_me: WARNING: ldns library was not found. Compute clients will be built without ARCHERY support." >&2;} fi fi else enables_ldns="no" fi if test "x$enables_ldns" = "xyes" ; then $as_echo "#define HAVE_LDNS 1" >>confdefs.h else LDNS_CFLAGS= LDNS_LIBS= fi if test "x$enables_ldns" = "xyes"; then LDNS_ENABLED_TRUE= LDNS_ENABLED_FALSE='#' else LDNS_ENABLED_TRUE='#' LDNS_ENABLED_FALSE= fi ############################## # # Check xmlsec1 # ############################# MACOSX="" case "${host}" in *darwin*) MACOSX="yes" ;; esac if test "x$MACOSX" = "xyes"; then $as_echo "#define _MACOSX 1" >>confdefs.h fi if test "x$MACOSX" = "xyes"; then MACOSX_TRUE= MACOSX_FALSE='#' else MACOSX_TRUE='#' MACOSX_FALSE= fi if test "$enables_hed" = "yes"; then XMLSEC_MIN_VERSION="1.2.4" XMLSEC_OPENSSL_MIN_VERSION="1.2.4" XMLSEC_CONFIG="${XMLSEC1_CONFIG:-xmlsec1-config}" XMLSEC_CFLAGS="" XMLSEC_LIBS="" XMLSEC_INSTALLED=no # Check whether --enable-xmlsec1 was given. if test "${enable_xmlsec1+set}" = set; then : enableval=$enable_xmlsec1; enables_xmlsec1=$enableval fi if test "x$enables_xmlsec1" = "xyes"; then # Check whether --with-xmlsec1 was given. if test "${with_xmlsec1+set}" = set; then : withval=$with_xmlsec1; fi if test "x$with_xmlsec1" = "x" ; then pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for XMLSEC" >&5 $as_echo_n "checking for XMLSEC... " >&6; } if test -n "$XMLSEC_CFLAGS"; then pkg_cv_XMLSEC_CFLAGS="$XMLSEC_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"xmlsec1 >= \$XMLSEC_MIN_VERSION\""; } >&5 ($PKG_CONFIG --exists --print-errors "xmlsec1 >= $XMLSEC_MIN_VERSION") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_XMLSEC_CFLAGS=`$PKG_CONFIG --cflags "xmlsec1 >= $XMLSEC_MIN_VERSION" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$XMLSEC_LIBS"; then pkg_cv_XMLSEC_LIBS="$XMLSEC_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"xmlsec1 >= \$XMLSEC_MIN_VERSION\""; } >&5 ($PKG_CONFIG --exists --print-errors "xmlsec1 >= $XMLSEC_MIN_VERSION") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_XMLSEC_LIBS=`$PKG_CONFIG --libs "xmlsec1 >= $XMLSEC_MIN_VERSION" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then XMLSEC_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "xmlsec1 >= $XMLSEC_MIN_VERSION" 2>&1` else XMLSEC_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "xmlsec1 >= $XMLSEC_MIN_VERSION" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$XMLSEC_PKG_ERRORS" >&5 XMLSEC_INSTALLED=no elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } XMLSEC_INSTALLED=no else XMLSEC_CFLAGS=$pkg_cv_XMLSEC_CFLAGS XMLSEC_LIBS=$pkg_cv_XMLSEC_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } XMLSEC_INSTALLED=yes fi if test "x$XMLSEC_INSTALLED" = "xyes" ; then pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for XMLSEC_OPENSSL" >&5 $as_echo_n "checking for XMLSEC_OPENSSL... " >&6; } if test -n "$XMLSEC_OPENSSL_CFLAGS"; then pkg_cv_XMLSEC_OPENSSL_CFLAGS="$XMLSEC_OPENSSL_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"xmlsec1-openssl >= \$XMLSEC_OPENSSL_MIN_VERSION\""; } >&5 ($PKG_CONFIG --exists --print-errors "xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_XMLSEC_OPENSSL_CFLAGS=`$PKG_CONFIG --cflags "xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$XMLSEC_OPENSSL_LIBS"; then pkg_cv_XMLSEC_OPENSSL_LIBS="$XMLSEC_OPENSSL_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"xmlsec1-openssl >= \$XMLSEC_OPENSSL_MIN_VERSION\""; } >&5 ($PKG_CONFIG --exists --print-errors "xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_XMLSEC_OPENSSL_LIBS=`$PKG_CONFIG --libs "xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then XMLSEC_OPENSSL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION" 2>&1` else XMLSEC_OPENSSL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$XMLSEC_OPENSSL_PKG_ERRORS" >&5 XMLSEC_INSTALLED=no elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } XMLSEC_INSTALLED=no else XMLSEC_OPENSSL_CFLAGS=$pkg_cv_XMLSEC_OPENSSL_CFLAGS XMLSEC_OPENSSL_LIBS=$pkg_cv_XMLSEC_OPENSSL_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } XMLSEC_INSTALLED=yes fi fi # Find number of backslashes in XMLSEC_CFLAGS n=$(echo $XMLSEC_CFLAGS|sed 's/.*-DXMLSEC_CRYPTO=\([^ ]*\).*/\1/'|tr -d '[A-Za-z0-1\n"]'| wc -c) # Fixes due to bugs in pkg-config and/or xmlsec1 # # 0: Indicates a bug in pkg-config which removes the escaping of the quotes # 2: Correct value with escaped quotes # 6: Old xmlsec1 version which used 3 back-slashes to escape quotes # See eg. https://bugzilla.redhat.com/show_bug.cgi?id=675334 # Make sure that the quotes are escaped with single backslash if test $n = 0 -o $n = 6; then { $as_echo "$as_me:${as_lineno-$LINENO}: Working around bad combination of pkgconfig and xmlsec1 with $n back-slashes" >&5 $as_echo "$as_me: Working around bad combination of pkgconfig and xmlsec1 with $n back-slashes" >&6;} XMLSEC_CFLAGS=$(echo $XMLSEC_CFLAGS|sed 's/\(.*-DXMLSEC_CRYPTO=\)\\*"\([^ \\"]*\)\\*" \(.*\)/\1\\"\2\\" \3/') XMLSEC_OPENSSL_CFLAGS=$(echo $XMLSEC_OPENSSL_CFLAGS|sed 's/\(.*-DXMLSEC_CRYPTO=\)\\*"\([^ \\"]*\)\\*" \(.*\)/\1\\"\2\\" \3/') fi fi if test "x$XMLSEC_INSTALLED" = "xno" -a "x$MACOSX" != "xyes"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for xmlsec1 libraries >= $XMLSEC_MIN_VERSION" >&5 $as_echo_n "checking for xmlsec1 libraries >= $XMLSEC_MIN_VERSION... " >&6; } if test "x$with_xmlsec1" != "x" ; then XMLSEC_CONFIG=$with_xmlsec1/bin/$XMLSEC_CONFIG fi "$XMLSEC_CONFIG" --version 2>/dev/null 1>/dev/null if test "$?" != '0' ; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Could not find xmlsec1 anywhere; The xml security related functionality will not be compiled" >&5 $as_echo "$as_me: WARNING: Could not find xmlsec1 anywhere; The xml security related functionality will not be compiled" >&2;} else vers=`$XMLSEC_CONFIG --version 2>/dev/null | awk -F. '{ printf "%d", ($1 * 1000 + $2) * 1000 + $3;}'` minvers=`echo $XMLSEC_MIN_VERSION | awk -F. '{ printf "%d", ($1 * 1000 + $2) * 1000 + $3;}'` if test "$vers" -ge "$minvers" ; then XMLSEC_LIBS="`$XMLSEC_CONFIG --libs`" XMLSEC_CFLAGS="`$XMLSEC_CONFIG --cflags`" #check the xmlsec1-openssl here if test "x$PKG_CONFIG_PATH" != "x"; then PKG_CONFIG_PATH="$with_xmlsec1/lib/pkgconfig:$PKG_CONFIG_PATH" else PKG_CONFIG_PATH="$with_xmlsec1/lib/pkgconfig" fi pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for XMLSEC_OPENSSL" >&5 $as_echo_n "checking for XMLSEC_OPENSSL... " >&6; } if test -n "$XMLSEC_OPENSSL_CFLAGS"; then pkg_cv_XMLSEC_OPENSSL_CFLAGS="$XMLSEC_OPENSSL_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"xmlsec1-openssl >= \$XMLSEC_OPENSSL_MIN_VERSION\""; } >&5 ($PKG_CONFIG --exists --print-errors "xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_XMLSEC_OPENSSL_CFLAGS=`$PKG_CONFIG --cflags "xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$XMLSEC_OPENSSL_LIBS"; then pkg_cv_XMLSEC_OPENSSL_LIBS="$XMLSEC_OPENSSL_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"xmlsec1-openssl >= \$XMLSEC_OPENSSL_MIN_VERSION\""; } >&5 ($PKG_CONFIG --exists --print-errors "xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_XMLSEC_OPENSSL_LIBS=`$PKG_CONFIG --libs "xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then XMLSEC_OPENSSL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION" 2>&1` else XMLSEC_OPENSSL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$XMLSEC_OPENSSL_PKG_ERRORS" >&5 XMLSEC_INSTALLED=no elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } XMLSEC_INSTALLED=no else XMLSEC_OPENSSL_CFLAGS=$pkg_cv_XMLSEC_OPENSSL_CFLAGS XMLSEC_OPENSSL_LIBS=$pkg_cv_XMLSEC_OPENSSL_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } XMLSEC_INSTALLED=yes fi else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: You need at least xmlsec1 $XMLSEC_MIN_VERSION for this version of arc" >&5 $as_echo "$as_me: WARNING: You need at least xmlsec1 $XMLSEC_MIN_VERSION for this version of arc" >&2;} fi fi elif test "x$XMLSEC_INSTALLED" = "xno" -a "x$MACOSX" = "xyes"; then #MACOSX has no "ldd" which is needed by xmlsec1-config, so here simply we use PKG_CHECK_MODULES if test "x$PKG_CONFIG_PATH" != "x"; then PKG_CONFIG_PATH="$with_xmlsec1/lib/pkgconfig:$PKG_CONFIG_PATH" else PKG_CONFIG_PATH="$with_xmlsec1/lib/pkgconfig" fi pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for XMLSEC" >&5 $as_echo_n "checking for XMLSEC... " >&6; } if test -n "$XMLSEC_CFLAGS"; then pkg_cv_XMLSEC_CFLAGS="$XMLSEC_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"xmlsec1 >= \$XMLSEC_MIN_VERSION\""; } >&5 ($PKG_CONFIG --exists --print-errors "xmlsec1 >= $XMLSEC_MIN_VERSION") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_XMLSEC_CFLAGS=`$PKG_CONFIG --cflags "xmlsec1 >= $XMLSEC_MIN_VERSION" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$XMLSEC_LIBS"; then pkg_cv_XMLSEC_LIBS="$XMLSEC_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"xmlsec1 >= \$XMLSEC_MIN_VERSION\""; } >&5 ($PKG_CONFIG --exists --print-errors "xmlsec1 >= $XMLSEC_MIN_VERSION") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_XMLSEC_LIBS=`$PKG_CONFIG --libs "xmlsec1 >= $XMLSEC_MIN_VERSION" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then XMLSEC_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "xmlsec1 >= $XMLSEC_MIN_VERSION" 2>&1` else XMLSEC_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "xmlsec1 >= $XMLSEC_MIN_VERSION" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$XMLSEC_PKG_ERRORS" >&5 XMLSEC_INSTALLED=no elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } XMLSEC_INSTALLED=no else XMLSEC_CFLAGS=$pkg_cv_XMLSEC_CFLAGS XMLSEC_LIBS=$pkg_cv_XMLSEC_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } XMLSEC_INSTALLED=yes fi if test "x$XMLSEC_INSTALLED" = "xyes" ; then pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for XMLSEC_OPENSSL" >&5 $as_echo_n "checking for XMLSEC_OPENSSL... " >&6; } if test -n "$XMLSEC_OPENSSL_CFLAGS"; then pkg_cv_XMLSEC_OPENSSL_CFLAGS="$XMLSEC_OPENSSL_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"xmlsec1-openssl >= \$XMLSEC_OPENSSL_MIN_VERSION\""; } >&5 ($PKG_CONFIG --exists --print-errors "xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_XMLSEC_OPENSSL_CFLAGS=`$PKG_CONFIG --cflags "xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$XMLSEC_OPENSSL_LIBS"; then pkg_cv_XMLSEC_OPENSSL_LIBS="$XMLSEC_OPENSSL_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"xmlsec1-openssl >= \$XMLSEC_OPENSSL_MIN_VERSION\""; } >&5 ($PKG_CONFIG --exists --print-errors "xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_XMLSEC_OPENSSL_LIBS=`$PKG_CONFIG --libs "xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then XMLSEC_OPENSSL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION" 2>&1` else XMLSEC_OPENSSL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$XMLSEC_OPENSSL_PKG_ERRORS" >&5 XMLSEC_INSTALLED=no elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } XMLSEC_INSTALLED=no else XMLSEC_OPENSSL_CFLAGS=$pkg_cv_XMLSEC_OPENSSL_CFLAGS XMLSEC_OPENSSL_LIBS=$pkg_cv_XMLSEC_OPENSSL_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } XMLSEC_INSTALLED=yes fi fi fi #AC_SUBST(XMLSEC_CONFIG) #AC_SUBST(XMLSEC_MIN_VERSION) enables_xmlsec1="$XMLSEC_INSTALLED" fi else enables_xmlsec1="no" fi ######################### # # Check libmysqlclient # ######################### MYSQL_INSTALLED=no if test "$enables_hed" = "yes"; then MYSQL_CONFIG="mysql_config" MYSQL_CFLAGS="" MYSQL_LIBS="" # Check whether --enable-mysql was given. if test "${enable_mysql+set}" = set; then : enableval=$enable_mysql; enables_mysql="$enableval" fi # Ask user for path to libmysqlclient if test "x$enables_mysql" = "xyes"; then # Check whether --with-mysql was given. if test "${with_mysql+set}" = set; then : withval=$with_mysql; fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for mysql client library" >&5 $as_echo_n "checking for mysql client library... " >&6; } if test "x$with_mysql" != "x" ; then MYSQL_CONFIG=$with_mysql/bin/$MYSQL_CONFIG fi if ! $MYSQL_CONFIG --version > /dev/null 2>&1 ; then as_fn_error $? "Could not find mysql C library anywhere (see config.log for details)." "$LINENO" 5 fi MYSQL_LIBS="`$MYSQL_CONFIG --libs`" MYSQL_CFLAGS="`$MYSQL_CONFIG --cflags`" MYSQL_INSTALLED="yes" enables_mysql=$MYSQL_INSTALLED fi { $as_echo "$as_me:${as_lineno-$LINENO}: MySQL client library enabled: $MYSQL_INSTALLED" >&5 $as_echo "$as_me: MySQL client library enabled: $MYSQL_INSTALLED" >&6;} fi if test "x$MYSQL_INSTALLED" = "xyes"; then MYSQL_LIBRARY_ENABLED_TRUE= MYSQL_LIBRARY_ENABLED_FALSE='#' else MYSQL_LIBRARY_ENABLED_TRUE='#' MYSQL_LIBRARY_ENABLED_FALSE= fi # Check monitor # Check whether --enable-monitor was given. if test "${enable_monitor+set}" = set; then : enableval=$enable_monitor; enables_monitor="$enableval" fi if test "x$enables_monitor" = "xyes"; then # Check whether --with-monitor was given. if test "${with_monitor+set}" = set; then : withval=$with_monitor; fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for monitor installation path" >&5 $as_echo_n "checking for monitor installation path... " >&6; } if test "x$with_monitor" != "x" ; then monitor_prefix=$with_monitor else monitor_prefix=${datadir}/arc/monitor fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $monitor_prefix" >&5 $as_echo "$monitor_prefix" >&6; } fi # check zlib ZLIB_CFLAGS= ZLIB_LDFLAGS= ZLIB_LIBS= if test "$enables_hed" = "yes"; then SAVE_CPPFLAGS=$CPPFLAGS SAVE_LDFLAGS=$LDFLAGS # Check whether --with-zlib was given. if test "${with_zlib+set}" = set; then : withval=$with_zlib; if test -d "$withval"; then ZLIB_CFLAGS="${CPPFLAGS} -I$withval/include" ZLIB_LDFLAGS="${LDFLAGS} -L$withval/lib" fi fi CPPFLAGS="$CPPFLAGS $ZLIB_CFLAGS" LDFLAGS="$LDFLAGS $ZLIB_LDFLAGS" ac_fn_c_check_header_mongrel "$LINENO" "zlib.h" "ac_cv_header_zlib_h" "$ac_includes_default" if test "x$ac_cv_header_zlib_h" = xyes; then : ZLIB_CFLAGS="$ZLIB_CFLAGS" else as_fn_error $? "unable to find zlib header files" "$LINENO" 5 fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for deflateInit2_ in -lz" >&5 $as_echo_n "checking for deflateInit2_ in -lz... " >&6; } if ${ac_cv_lib_z_deflateInit2_+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lz $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char deflateInit2_ (); int main () { return deflateInit2_ (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_z_deflateInit2_=yes else ac_cv_lib_z_deflateInit2_=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_z_deflateInit2_" >&5 $as_echo "$ac_cv_lib_z_deflateInit2_" >&6; } if test "x$ac_cv_lib_z_deflateInit2_" = xyes; then : ZLIB_LIBS="$ZLIB_LDFLAGS -lz" else as_fn_error $? "unable to link with zlib library" "$LINENO" 5 fi CPPFLAGS=$SAVE_CPPFLAGS LDFLAGS=$SAVE_LDFLAGS fi # check ARGUS ARGUS_CFLAGS= ARGUS_LIBS= # Check whether --enable-argus was given. if test "${enable_argus+set}" = set; then : enableval=$enable_argus; enables_argus="$enableval" fi if test "x$enables_argus" = "xyes"; then # Check whether --with-argus was given. if test "${with_argus+set}" = set; then : withval=$with_argus; if test "x$PKG_CONFIG_PATH" != "x"; then PKG_CONFIG_PATH="$withval/lib/pkgconfig:$PKG_CONFIG_PATH" else PKG_CONFIG_PATH="$withval/lib/pkgconfig" fi fi pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ARGUS" >&5 $as_echo_n "checking for ARGUS... " >&6; } if test -n "$ARGUS_CFLAGS"; then pkg_cv_ARGUS_CFLAGS="$ARGUS_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libargus-pep >= 2.0.0\""; } >&5 ($PKG_CONFIG --exists --print-errors "libargus-pep >= 2.0.0") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ARGUS_CFLAGS=`$PKG_CONFIG --cflags "libargus-pep >= 2.0.0" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$ARGUS_LIBS"; then pkg_cv_ARGUS_LIBS="$ARGUS_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libargus-pep >= 2.0.0\""; } >&5 ($PKG_CONFIG --exists --print-errors "libargus-pep >= 2.0.0") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_ARGUS_LIBS=`$PKG_CONFIG --libs "libargus-pep >= 2.0.0" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then ARGUS_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libargus-pep >= 2.0.0" 2>&1` else ARGUS_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libargus-pep >= 2.0.0" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$ARGUS_PKG_ERRORS" >&5 { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find Argus PEP libraries with version >= 2" >&5 $as_echo "$as_me: Failed to find Argus PEP libraries with version >= 2" >&6;} enables_argus=no elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to find Argus PEP libraries with version >= 2" >&5 $as_echo "$as_me: Failed to find Argus PEP libraries with version >= 2" >&6;} enables_argus=no else ARGUS_CFLAGS=$pkg_cv_ARGUS_CFLAGS ARGUS_LIBS=$pkg_cv_ARGUS_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } fi fi if test "x$enables_argus" = "xyes"; then ARGUS_ENABLED_TRUE= ARGUS_ENABLED_FALSE='#' else ARGUS_ENABLED_TRUE='#' ARGUS_ENABLED_FALSE= fi ############################################### # # Check for Berkeley DB C++ # ############################################### DBCXX_LIBS="" DBCXX_CPPFLAGS= if test "$enables_hed" = "yes"; then # # Allow the user to specify db_cxx.h location (we will still check though) # dbcxx_include_paths= # Check whether --with-dbcxx-include was given. if test "${with_dbcxx_include+set}" = set; then : withval=$with_dbcxx_include; if test "x$withval" = "xyes" ; then as_fn_error $? "--with-dbcxx-include requires PATH argument" "$LINENO" 5 fi if test "x$withval" != "xno" ; then dbcxx_include_paths=$withval fi fi # # Allow the user to specify DB4 library location (we will still check though) # db4_library_path= # Check whether --with-db4-library-path was given. if test "${with_db4_library_path+set}" = set; then : withval=$with_db4_library_path; if test "x$withval" = "xyes" ; then as_fn_error $? "--with-db4-library-path requires PATH argument" "$LINENO" 5 fi if test "x$withval" != "xno" ; then db4_library_path=$withval fi fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu # # If user did not specify location we start by searching at the standard locations # if test "x$dbcxx_include_paths" = "x" then { $as_echo "$as_me:${as_lineno-$LINENO}: Looking for db_cxx.h in standard locations" >&5 $as_echo "$as_me: Looking for db_cxx.h in standard locations" >&6;} for ac_header in db_cxx.h do : ac_fn_cxx_check_header_mongrel "$LINENO" "db_cxx.h" "ac_cv_header_db_cxx_h" "$ac_includes_default" if test "x$ac_cv_header_db_cxx_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_DB_CXX_H 1 _ACEOF HAVE_DBCXX=yes else HAVE_DBCXX=no fi done # If the user did not provide a location we have some good suggestions dbcxx_include_paths="/usr/include/db4 /usr/include/db44 /usr/include/db43" else HAVE_DBCXX=no fi # # Now Look for db_cxx.h in non-standard locations # if test "$HAVE_DBCXX" = no then for dbcxx_dir in $dbcxx_include_paths do SAVE_CPPFLAGS=$CPPFLAGS DBCXX_CPPFLAGS=-I$dbcxx_dir CPPFLAGS="$CPPFLAGS $DBCXX_CPPFLAGS" # Disable Autoconf caching unset ac_cv_header_db_cxx_h { $as_echo "$as_me:${as_lineno-$LINENO}: Looking for db_cxx.h in $dbcxx_dir" >&5 $as_echo "$as_me: Looking for db_cxx.h in $dbcxx_dir" >&6;} for ac_header in db_cxx.h do : ac_fn_cxx_check_header_mongrel "$LINENO" "db_cxx.h" "ac_cv_header_db_cxx_h" "$ac_includes_default" if test "x$ac_cv_header_db_cxx_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_DB_CXX_H 1 _ACEOF HAVE_DBCXX=yes else HAVE_DBCXX=no fi done CPPFLAGS=$SAVE_CPPFLAGS # If a db_cxx.h was found we break and keep the current value of DBCXX_CPPFLAGS if test "$HAVE_DBCXX" = yes then break fi DBCXX_CPPFLAGS= done fi if test "x$db4_library_path" != "x" then db4_library_path="-L$db4_library_path" fi if test "$HAVE_DBCXX" = no then DBCXX_LIBS="" else SAVE_LDFLAGS=$LDFLAGS SAVE_CXXFLAGS=$CXXFLAGS # pthread needed for RH9 LDFLAGS="$LDFLAGS -lpthread" LDFLAGS="$LDFLAGS $db4_library_path" for db_ver in "" -4.7 -4.3 -4.2 do as_ac_Lib=`$as_echo "ac_cv_lib_db_cxx$db_ver''_main" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for main in -ldb_cxx$db_ver" >&5 $as_echo_n "checking for main in -ldb_cxx$db_ver... " >&6; } if eval \${$as_ac_Lib+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldb_cxx$db_ver $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { return main (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else eval "$as_ac_Lib=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi eval ac_res=\$$as_ac_Lib { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : DBCXX_LIBS="$db4_library_path -ldb_cxx$db_ver" else DBCXX_LIBS="" fi if test "$DBCXX_LIBS" = "" then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: BerkeleyDB library libdb_cxx$db_ver was not found!" >&5 $as_echo "$as_me: WARNING: BerkeleyDB library libdb_cxx$db_ver was not found!" >&2;} else break fi done if test "$DBCXX_LIBS" = "" then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: No BerkeleyDB library found!" >&5 $as_echo "$as_me: WARNING: No BerkeleyDB library found!" >&2;} fi LDFLAGS=$SAVE_LDFLAGS CXXFLAGS=$SAVE_CXXFLAGS fi if test ! "x$DBCXX_LIBS" = "x" then $as_echo "#define HAVE_DBCXX 1" >>confdefs.h SAVE_CXXFLAGS=$CXXFLAGS CXXFLAGS="$CXXFLAGS $DBCXX_CPPFLAGS" { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the Berkeley DB has DbDeadlockException" >&5 $as_echo_n "checking whether the Berkeley DB has DbDeadlockException... " >&6; } if ${ac_cv_dbcxx_dbdeadlockexception+:} false; then : $as_echo_n "(cached) " >&6 else ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { try { } catch(DbDeadlockException&) { }; return 0; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_dbcxx_have_dbdeadlockexception=yes else ac_cv_dbcxx_have_dbdeadlockexception=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_dbcxx_dbdeadlockexception" >&5 $as_echo "$ac_cv_dbcxx_dbdeadlockexception" >&6; } if test "$ac_cv_dbcxx_have_dbdeadlockexception" = yes; then $as_echo "#define HAVE_DBDEADLOCKEXCEPTION /**/" >>confdefs.h fi CXXFLAGS=$SAVE_CXXFLAGS fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu fi # DBJSTORE (storing jobs information in BDB) # Check whether --enable-dbjstore was given. if test "${enable_dbjstore+set}" = set; then : enableval=$enable_dbjstore; enables_dbjstore=$enableval fi if test "$enables_dbjstore" = "yes"; then if test "x$DBCXX_LIBS" = "x" ; then { $as_echo "$as_me:${as_lineno-$LINENO}: For storing jobs in BDB C++ API is needed (dbcxx) - disabling" >&5 $as_echo "$as_me: For storing jobs in BDB C++ API is needed (dbcxx) - disabling" >&6;} enables_dbjstore="no" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: Storing jobs in BDB enabled: $enables_dbjstore" >&5 $as_echo "$as_me: Storing jobs in BDB enabled: $enables_dbjstore" >&6;} if test "x$enables_dbjstore" = "xyes"; then DBJSTORE_ENABLED_TRUE= DBJSTORE_ENABLED_FALSE='#' else DBJSTORE_ENABLED_TRUE='#' DBJSTORE_ENABLED_FALSE= fi if test "x$enables_dbjstore" = "xyes"; then $as_echo "#define DBJSTORE_ENABLED 1" >>confdefs.h fi # SQLITEJSTORE (storing jobs information in SQLite) # Check whether --enable-sqlitejstore was given. if test "${enable_sqlitejstore+set}" = set; then : enableval=$enable_sqlitejstore; enables_sqlitejstore=$enableval fi if test "$enables_sqlitejstore" = "yes"; then if test "x$SQLITE_INSTALLED" != "xyes" ; then { $as_echo "$as_me:${as_lineno-$LINENO}: For storing jobs in SQLite install SQLite 3.6 or newer - disabling" >&5 $as_echo "$as_me: For storing jobs in SQLite install SQLite 3.6 or newer - disabling" >&6;} enables_dbjstore="no" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: Storing jobs in SQLite enabled: $enables_sqlitejstore" >&5 $as_echo "$as_me: Storing jobs in SQLite enabled: $enables_sqlitejstore" >&6;} if test "x$enables_sqlitejstore" = "xyes"; then SQLITEJSTORE_ENABLED_TRUE= SQLITEJSTORE_ENABLED_FALSE='#' else SQLITEJSTORE_ENABLED_TRUE='#' SQLITEJSTORE_ENABLED_FALSE= fi if test "x$enables_sqlitejstore" = "xyes"; then $as_echo "#define SQLITEJSTORE_ENABLED 1" >>confdefs.h fi # globus/gpt packages # globus/gpt packages if test "$enables_hed" = "yes"; then if test "x$ac_cv_env_GLOBUS_MAKEFILE_HEADER_set" != "xset"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}globus-makefile-header", so it can be a program name with args. set dummy ${ac_tool_prefix}globus-makefile-header; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_GLOBUS_MAKEFILE_HEADER+:} false; then : $as_echo_n "(cached) " >&6 else case $GLOBUS_MAKEFILE_HEADER in [\\/]* | ?:[\\/]*) ac_cv_path_GLOBUS_MAKEFILE_HEADER="$GLOBUS_MAKEFILE_HEADER" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR as_dummy="$PATH:/opt/globus/bin" for as_dir in $as_dummy do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_GLOBUS_MAKEFILE_HEADER="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi GLOBUS_MAKEFILE_HEADER=$ac_cv_path_GLOBUS_MAKEFILE_HEADER if test -n "$GLOBUS_MAKEFILE_HEADER"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $GLOBUS_MAKEFILE_HEADER" >&5 $as_echo "$GLOBUS_MAKEFILE_HEADER" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_path_GLOBUS_MAKEFILE_HEADER"; then ac_pt_GLOBUS_MAKEFILE_HEADER=$GLOBUS_MAKEFILE_HEADER # Extract the first word of "globus-makefile-header", so it can be a program name with args. set dummy globus-makefile-header; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_ac_pt_GLOBUS_MAKEFILE_HEADER+:} false; then : $as_echo_n "(cached) " >&6 else case $ac_pt_GLOBUS_MAKEFILE_HEADER in [\\/]* | ?:[\\/]*) ac_cv_path_ac_pt_GLOBUS_MAKEFILE_HEADER="$ac_pt_GLOBUS_MAKEFILE_HEADER" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR as_dummy="$PATH:/opt/globus/bin" for as_dir in $as_dummy do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_ac_pt_GLOBUS_MAKEFILE_HEADER="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi ac_pt_GLOBUS_MAKEFILE_HEADER=$ac_cv_path_ac_pt_GLOBUS_MAKEFILE_HEADER if test -n "$ac_pt_GLOBUS_MAKEFILE_HEADER"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_pt_GLOBUS_MAKEFILE_HEADER" >&5 $as_echo "$ac_pt_GLOBUS_MAKEFILE_HEADER" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_pt_GLOBUS_MAKEFILE_HEADER" = x; then GLOBUS_MAKEFILE_HEADER="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac GLOBUS_MAKEFILE_HEADER=$ac_pt_GLOBUS_MAKEFILE_HEADER fi else GLOBUS_MAKEFILE_HEADER="$ac_cv_path_GLOBUS_MAKEFILE_HEADER" fi fi if test -f "$GLOBUS_MAKEFILE_HEADER" && test "x$GLOBUS_LOCATION" = "x"; then GLOBUS_LOCATION=`dirname $GLOBUS_MAKEFILE_HEADER` GLOBUS_LOCATION=`dirname $GLOBUS_LOCATION` export GLOBUS_LOCATION fi if test "x$ac_cv_env_GPT_FLAVOR_CONFIGURATION_set" != "xset"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}gpt-flavor-configuration", so it can be a program name with args. set dummy ${ac_tool_prefix}gpt-flavor-configuration; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_GPT_FLAVOR_CONFIGURATION+:} false; then : $as_echo_n "(cached) " >&6 else case $GPT_FLAVOR_CONFIGURATION in [\\/]* | ?:[\\/]*) ac_cv_path_GPT_FLAVOR_CONFIGURATION="$GPT_FLAVOR_CONFIGURATION" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR as_dummy="$PATH:/usr/sbin:/opt/gpt/sbin" for as_dir in $as_dummy do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_GPT_FLAVOR_CONFIGURATION="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi GPT_FLAVOR_CONFIGURATION=$ac_cv_path_GPT_FLAVOR_CONFIGURATION if test -n "$GPT_FLAVOR_CONFIGURATION"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $GPT_FLAVOR_CONFIGURATION" >&5 $as_echo "$GPT_FLAVOR_CONFIGURATION" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_path_GPT_FLAVOR_CONFIGURATION"; then ac_pt_GPT_FLAVOR_CONFIGURATION=$GPT_FLAVOR_CONFIGURATION # Extract the first word of "gpt-flavor-configuration", so it can be a program name with args. set dummy gpt-flavor-configuration; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_ac_pt_GPT_FLAVOR_CONFIGURATION+:} false; then : $as_echo_n "(cached) " >&6 else case $ac_pt_GPT_FLAVOR_CONFIGURATION in [\\/]* | ?:[\\/]*) ac_cv_path_ac_pt_GPT_FLAVOR_CONFIGURATION="$ac_pt_GPT_FLAVOR_CONFIGURATION" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR as_dummy="$PATH:/usr/sbin:/opt/gpt/sbin" for as_dir in $as_dummy do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_ac_pt_GPT_FLAVOR_CONFIGURATION="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi ac_pt_GPT_FLAVOR_CONFIGURATION=$ac_cv_path_ac_pt_GPT_FLAVOR_CONFIGURATION if test -n "$ac_pt_GPT_FLAVOR_CONFIGURATION"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_pt_GPT_FLAVOR_CONFIGURATION" >&5 $as_echo "$ac_pt_GPT_FLAVOR_CONFIGURATION" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_pt_GPT_FLAVOR_CONFIGURATION" = x; then GPT_FLAVOR_CONFIGURATION="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac GPT_FLAVOR_CONFIGURATION=$ac_pt_GPT_FLAVOR_CONFIGURATION fi else GPT_FLAVOR_CONFIGURATION="$ac_cv_path_GPT_FLAVOR_CONFIGURATION" fi fi if test -f "$GPT_FLAVOR_CONFIGURATION" && test "x$GPT_LOCATION" = "x"; then GPT_LOCATION=`dirname $GPT_FLAVOR_CONFIGURATION` GPT_LOCATION=`dirname $GPT_LOCATION` export GPT_LOCATION fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for gpt flavor" >&5 $as_echo_n "checking for gpt flavor... " >&6; } # Check whether --with-flavor was given. if test "${with_flavor+set}" = set; then : withval=$with_flavor; GPT_FLAVOR=$withval else if test -n "$GPT_FLAVOR_CONFIGURATION" ; then GPT_FLAVOR=`$GPT_FLAVOR_CONFIGURATION | \\ grep '^[a-zA-Z].*:$' | cut -f1 -d: | grep thr | tail -1` fi fi if test -n "$GPT_FLAVOR"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $GPT_FLAVOR" >&5 $as_echo "$GPT_FLAVOR" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: none detected, is globus_core-devel installed?" >&5 $as_echo "none detected, is globus_core-devel installed?" >&6; } fi if test "x$ac_cv_env_GPT_QUERY_set" != "xset"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}gpt-query", so it can be a program name with args. set dummy ${ac_tool_prefix}gpt-query; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_GPT_QUERY+:} false; then : $as_echo_n "(cached) " >&6 else case $GPT_QUERY in [\\/]* | ?:[\\/]*) ac_cv_path_GPT_QUERY="$GPT_QUERY" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR as_dummy="$PATH:/usr/sbin:/opt/gpt/sbin" for as_dir in $as_dummy do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_GPT_QUERY="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi GPT_QUERY=$ac_cv_path_GPT_QUERY if test -n "$GPT_QUERY"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $GPT_QUERY" >&5 $as_echo "$GPT_QUERY" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_path_GPT_QUERY"; then ac_pt_GPT_QUERY=$GPT_QUERY # Extract the first word of "gpt-query", so it can be a program name with args. set dummy gpt-query; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_ac_pt_GPT_QUERY+:} false; then : $as_echo_n "(cached) " >&6 else case $ac_pt_GPT_QUERY in [\\/]* | ?:[\\/]*) ac_cv_path_ac_pt_GPT_QUERY="$ac_pt_GPT_QUERY" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR as_dummy="$PATH:/usr/sbin:/opt/gpt/sbin" for as_dir in $as_dummy do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_ac_pt_GPT_QUERY="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi ac_pt_GPT_QUERY=$ac_cv_path_ac_pt_GPT_QUERY if test -n "$ac_pt_GPT_QUERY"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_pt_GPT_QUERY" >&5 $as_echo "$ac_pt_GPT_QUERY" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_pt_GPT_QUERY" = x; then GPT_QUERY="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac GPT_QUERY=$ac_pt_GPT_QUERY fi else GPT_QUERY="$ac_cv_path_GPT_QUERY" fi fi if test -f "$GPT_QUERY" && test "x$GPT_LOCATION" = "x"; then GPT_LOCATION=`dirname $GPT_QUERY` GPT_LOCATION=`dirname $GPT_LOCATION` export GPT_LOCATION fi pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GLOBUS_COMMON" >&5 $as_echo_n "checking for GLOBUS_COMMON... " >&6; } if test -n "$GLOBUS_COMMON_CFLAGS"; then pkg_cv_GLOBUS_COMMON_CFLAGS="$GLOBUS_COMMON_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-common\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-common") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_COMMON_CFLAGS=`$PKG_CONFIG --cflags "globus-common" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$GLOBUS_COMMON_LIBS"; then pkg_cv_GLOBUS_COMMON_LIBS="$GLOBUS_COMMON_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-common\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-common") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_COMMON_LIBS=`$PKG_CONFIG --libs "globus-common" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then GLOBUS_COMMON_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "globus-common" 2>&1` else GLOBUS_COMMON_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "globus-common" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$GLOBUS_COMMON_PKG_ERRORS" >&5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_common" >&5 $as_echo_n "checking for globus_common... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_common_version=`$GPT_QUERY globus_common-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_common_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_common | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_common_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_common_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_common_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_common_version" >&5 $as_echo "$gpt_cv_globus_common_version" >&6; } GLOBUS_COMMON_VERSION=$gpt_cv_globus_common_version GLOBUS_COMMON_LIBS=$gpt_cv_globus_common_libs GLOBUS_COMMON_CFLAGS=$gpt_cv_globus_common_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_common" >&5 $as_echo_n "checking for globus_common... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_common_version=`$GPT_QUERY globus_common-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_common_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_common | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_common_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_common_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_common_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_common_version" >&5 $as_echo "$gpt_cv_globus_common_version" >&6; } GLOBUS_COMMON_VERSION=$gpt_cv_globus_common_version GLOBUS_COMMON_LIBS=$gpt_cv_globus_common_libs GLOBUS_COMMON_CFLAGS=$gpt_cv_globus_common_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi else GLOBUS_COMMON_CFLAGS=$pkg_cv_GLOBUS_COMMON_CFLAGS GLOBUS_COMMON_LIBS=$pkg_cv_GLOBUS_COMMON_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } GLOBUS_COMMON_VERSION=`$PKG_CONFIG --modversion globus-common` fi pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GLOBUS_GSSAPI_GSI" >&5 $as_echo_n "checking for GLOBUS_GSSAPI_GSI... " >&6; } if test -n "$GLOBUS_GSSAPI_GSI_CFLAGS"; then pkg_cv_GLOBUS_GSSAPI_GSI_CFLAGS="$GLOBUS_GSSAPI_GSI_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-gssapi-gsi\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-gssapi-gsi") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_GSSAPI_GSI_CFLAGS=`$PKG_CONFIG --cflags "globus-gssapi-gsi" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$GLOBUS_GSSAPI_GSI_LIBS"; then pkg_cv_GLOBUS_GSSAPI_GSI_LIBS="$GLOBUS_GSSAPI_GSI_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-gssapi-gsi\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-gssapi-gsi") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_GSSAPI_GSI_LIBS=`$PKG_CONFIG --libs "globus-gssapi-gsi" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then GLOBUS_GSSAPI_GSI_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "globus-gssapi-gsi" 2>&1` else GLOBUS_GSSAPI_GSI_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "globus-gssapi-gsi" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$GLOBUS_GSSAPI_GSI_PKG_ERRORS" >&5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_gssapi_gsi" >&5 $as_echo_n "checking for globus_gssapi_gsi... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_gssapi_gsi_version=`$GPT_QUERY globus_gssapi_gsi-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_gssapi_gsi_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_gssapi_gsi | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_gssapi_gsi_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_gssapi_gsi_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_gssapi_gsi_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_gssapi_gsi_version" >&5 $as_echo "$gpt_cv_globus_gssapi_gsi_version" >&6; } GLOBUS_GSSAPI_GSI_VERSION=$gpt_cv_globus_gssapi_gsi_version GLOBUS_GSSAPI_GSI_LIBS=$gpt_cv_globus_gssapi_gsi_libs GLOBUS_GSSAPI_GSI_CFLAGS=$gpt_cv_globus_gssapi_gsi_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_gssapi_gsi" >&5 $as_echo_n "checking for globus_gssapi_gsi... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_gssapi_gsi_version=`$GPT_QUERY globus_gssapi_gsi-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_gssapi_gsi_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_gssapi_gsi | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_gssapi_gsi_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_gssapi_gsi_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_gssapi_gsi_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_gssapi_gsi_version" >&5 $as_echo "$gpt_cv_globus_gssapi_gsi_version" >&6; } GLOBUS_GSSAPI_GSI_VERSION=$gpt_cv_globus_gssapi_gsi_version GLOBUS_GSSAPI_GSI_LIBS=$gpt_cv_globus_gssapi_gsi_libs GLOBUS_GSSAPI_GSI_CFLAGS=$gpt_cv_globus_gssapi_gsi_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi else GLOBUS_GSSAPI_GSI_CFLAGS=$pkg_cv_GLOBUS_GSSAPI_GSI_CFLAGS GLOBUS_GSSAPI_GSI_LIBS=$pkg_cv_GLOBUS_GSSAPI_GSI_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } GLOBUS_GSSAPI_GSI_VERSION=`$PKG_CONFIG --modversion globus-gssapi-gsi` fi pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GLOBUS_GSS_ASSIST" >&5 $as_echo_n "checking for GLOBUS_GSS_ASSIST... " >&6; } if test -n "$GLOBUS_GSS_ASSIST_CFLAGS"; then pkg_cv_GLOBUS_GSS_ASSIST_CFLAGS="$GLOBUS_GSS_ASSIST_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-gss-assist\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-gss-assist") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_GSS_ASSIST_CFLAGS=`$PKG_CONFIG --cflags "globus-gss-assist" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$GLOBUS_GSS_ASSIST_LIBS"; then pkg_cv_GLOBUS_GSS_ASSIST_LIBS="$GLOBUS_GSS_ASSIST_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-gss-assist\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-gss-assist") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_GSS_ASSIST_LIBS=`$PKG_CONFIG --libs "globus-gss-assist" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then GLOBUS_GSS_ASSIST_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "globus-gss-assist" 2>&1` else GLOBUS_GSS_ASSIST_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "globus-gss-assist" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$GLOBUS_GSS_ASSIST_PKG_ERRORS" >&5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_gss_assist" >&5 $as_echo_n "checking for globus_gss_assist... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_gss_assist_version=`$GPT_QUERY globus_gss_assist-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_gss_assist_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_gss_assist | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_gss_assist_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_gss_assist_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_gss_assist_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_gss_assist_version" >&5 $as_echo "$gpt_cv_globus_gss_assist_version" >&6; } GLOBUS_GSS_ASSIST_VERSION=$gpt_cv_globus_gss_assist_version GLOBUS_GSS_ASSIST_LIBS=$gpt_cv_globus_gss_assist_libs GLOBUS_GSS_ASSIST_CFLAGS=$gpt_cv_globus_gss_assist_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_gss_assist" >&5 $as_echo_n "checking for globus_gss_assist... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_gss_assist_version=`$GPT_QUERY globus_gss_assist-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_gss_assist_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_gss_assist | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_gss_assist_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_gss_assist_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_gss_assist_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_gss_assist_version" >&5 $as_echo "$gpt_cv_globus_gss_assist_version" >&6; } GLOBUS_GSS_ASSIST_VERSION=$gpt_cv_globus_gss_assist_version GLOBUS_GSS_ASSIST_LIBS=$gpt_cv_globus_gss_assist_libs GLOBUS_GSS_ASSIST_CFLAGS=$gpt_cv_globus_gss_assist_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi else GLOBUS_GSS_ASSIST_CFLAGS=$pkg_cv_GLOBUS_GSS_ASSIST_CFLAGS GLOBUS_GSS_ASSIST_LIBS=$pkg_cv_GLOBUS_GSS_ASSIST_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } GLOBUS_GSS_ASSIST_VERSION=`$PKG_CONFIG --modversion globus-gss-assist` fi pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GLOBUS_GSI_CALLBACK" >&5 $as_echo_n "checking for GLOBUS_GSI_CALLBACK... " >&6; } if test -n "$GLOBUS_GSI_CALLBACK_CFLAGS"; then pkg_cv_GLOBUS_GSI_CALLBACK_CFLAGS="$GLOBUS_GSI_CALLBACK_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-gsi-callback\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-gsi-callback") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_GSI_CALLBACK_CFLAGS=`$PKG_CONFIG --cflags "globus-gsi-callback" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$GLOBUS_GSI_CALLBACK_LIBS"; then pkg_cv_GLOBUS_GSI_CALLBACK_LIBS="$GLOBUS_GSI_CALLBACK_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-gsi-callback\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-gsi-callback") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_GSI_CALLBACK_LIBS=`$PKG_CONFIG --libs "globus-gsi-callback" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then GLOBUS_GSI_CALLBACK_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "globus-gsi-callback" 2>&1` else GLOBUS_GSI_CALLBACK_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "globus-gsi-callback" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$GLOBUS_GSI_CALLBACK_PKG_ERRORS" >&5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_gsi_callback" >&5 $as_echo_n "checking for globus_gsi_callback... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_gsi_callback_version=`$GPT_QUERY globus_gsi_callback-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_gsi_callback_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_gsi_callback | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_gsi_callback_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_gsi_callback_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_gsi_callback_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_gsi_callback_version" >&5 $as_echo "$gpt_cv_globus_gsi_callback_version" >&6; } GLOBUS_GSI_CALLBACK_VERSION=$gpt_cv_globus_gsi_callback_version GLOBUS_GSI_CALLBACK_LIBS=$gpt_cv_globus_gsi_callback_libs GLOBUS_GSI_CALLBACK_CFLAGS=$gpt_cv_globus_gsi_callback_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_gsi_callback" >&5 $as_echo_n "checking for globus_gsi_callback... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_gsi_callback_version=`$GPT_QUERY globus_gsi_callback-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_gsi_callback_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_gsi_callback | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_gsi_callback_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_gsi_callback_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_gsi_callback_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_gsi_callback_version" >&5 $as_echo "$gpt_cv_globus_gsi_callback_version" >&6; } GLOBUS_GSI_CALLBACK_VERSION=$gpt_cv_globus_gsi_callback_version GLOBUS_GSI_CALLBACK_LIBS=$gpt_cv_globus_gsi_callback_libs GLOBUS_GSI_CALLBACK_CFLAGS=$gpt_cv_globus_gsi_callback_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi else GLOBUS_GSI_CALLBACK_CFLAGS=$pkg_cv_GLOBUS_GSI_CALLBACK_CFLAGS GLOBUS_GSI_CALLBACK_LIBS=$pkg_cv_GLOBUS_GSI_CALLBACK_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } GLOBUS_GSI_CALLBACK_VERSION=`$PKG_CONFIG --modversion globus-gsi-callback` fi pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GLOBUS_FTP_CLIENT" >&5 $as_echo_n "checking for GLOBUS_FTP_CLIENT... " >&6; } if test -n "$GLOBUS_FTP_CLIENT_CFLAGS"; then pkg_cv_GLOBUS_FTP_CLIENT_CFLAGS="$GLOBUS_FTP_CLIENT_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-ftp-client\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-ftp-client") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_FTP_CLIENT_CFLAGS=`$PKG_CONFIG --cflags "globus-ftp-client" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$GLOBUS_FTP_CLIENT_LIBS"; then pkg_cv_GLOBUS_FTP_CLIENT_LIBS="$GLOBUS_FTP_CLIENT_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-ftp-client\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-ftp-client") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_FTP_CLIENT_LIBS=`$PKG_CONFIG --libs "globus-ftp-client" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then GLOBUS_FTP_CLIENT_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "globus-ftp-client" 2>&1` else GLOBUS_FTP_CLIENT_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "globus-ftp-client" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$GLOBUS_FTP_CLIENT_PKG_ERRORS" >&5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_ftp_client" >&5 $as_echo_n "checking for globus_ftp_client... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_ftp_client_version=`$GPT_QUERY globus_ftp_client-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_ftp_client_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_ftp_client | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_ftp_client_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_ftp_client_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_ftp_client_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_ftp_client_version" >&5 $as_echo "$gpt_cv_globus_ftp_client_version" >&6; } GLOBUS_FTP_CLIENT_VERSION=$gpt_cv_globus_ftp_client_version GLOBUS_FTP_CLIENT_LIBS=$gpt_cv_globus_ftp_client_libs GLOBUS_FTP_CLIENT_CFLAGS=$gpt_cv_globus_ftp_client_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_ftp_client" >&5 $as_echo_n "checking for globus_ftp_client... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_ftp_client_version=`$GPT_QUERY globus_ftp_client-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_ftp_client_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_ftp_client | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_ftp_client_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_ftp_client_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_ftp_client_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_ftp_client_version" >&5 $as_echo "$gpt_cv_globus_ftp_client_version" >&6; } GLOBUS_FTP_CLIENT_VERSION=$gpt_cv_globus_ftp_client_version GLOBUS_FTP_CLIENT_LIBS=$gpt_cv_globus_ftp_client_libs GLOBUS_FTP_CLIENT_CFLAGS=$gpt_cv_globus_ftp_client_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi else GLOBUS_FTP_CLIENT_CFLAGS=$pkg_cv_GLOBUS_FTP_CLIENT_CFLAGS GLOBUS_FTP_CLIENT_LIBS=$pkg_cv_GLOBUS_FTP_CLIENT_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } GLOBUS_FTP_CLIENT_VERSION=`$PKG_CONFIG --modversion globus-ftp-client` fi pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GLOBUS_FTP_CONTROL" >&5 $as_echo_n "checking for GLOBUS_FTP_CONTROL... " >&6; } if test -n "$GLOBUS_FTP_CONTROL_CFLAGS"; then pkg_cv_GLOBUS_FTP_CONTROL_CFLAGS="$GLOBUS_FTP_CONTROL_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-ftp-control\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-ftp-control") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_FTP_CONTROL_CFLAGS=`$PKG_CONFIG --cflags "globus-ftp-control" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$GLOBUS_FTP_CONTROL_LIBS"; then pkg_cv_GLOBUS_FTP_CONTROL_LIBS="$GLOBUS_FTP_CONTROL_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-ftp-control\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-ftp-control") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_FTP_CONTROL_LIBS=`$PKG_CONFIG --libs "globus-ftp-control" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then GLOBUS_FTP_CONTROL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "globus-ftp-control" 2>&1` else GLOBUS_FTP_CONTROL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "globus-ftp-control" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$GLOBUS_FTP_CONTROL_PKG_ERRORS" >&5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_ftp_control" >&5 $as_echo_n "checking for globus_ftp_control... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_ftp_control_version=`$GPT_QUERY globus_ftp_control-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_ftp_control_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_ftp_control | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_ftp_control_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_ftp_control_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_ftp_control_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_ftp_control_version" >&5 $as_echo "$gpt_cv_globus_ftp_control_version" >&6; } GLOBUS_FTP_CONTROL_VERSION=$gpt_cv_globus_ftp_control_version GLOBUS_FTP_CONTROL_LIBS=$gpt_cv_globus_ftp_control_libs GLOBUS_FTP_CONTROL_CFLAGS=$gpt_cv_globus_ftp_control_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_ftp_control" >&5 $as_echo_n "checking for globus_ftp_control... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_ftp_control_version=`$GPT_QUERY globus_ftp_control-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_ftp_control_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_ftp_control | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_ftp_control_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_ftp_control_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_ftp_control_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_ftp_control_version" >&5 $as_echo "$gpt_cv_globus_ftp_control_version" >&6; } GLOBUS_FTP_CONTROL_VERSION=$gpt_cv_globus_ftp_control_version GLOBUS_FTP_CONTROL_LIBS=$gpt_cv_globus_ftp_control_libs GLOBUS_FTP_CONTROL_CFLAGS=$gpt_cv_globus_ftp_control_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi else GLOBUS_FTP_CONTROL_CFLAGS=$pkg_cv_GLOBUS_FTP_CONTROL_CFLAGS GLOBUS_FTP_CONTROL_LIBS=$pkg_cv_GLOBUS_FTP_CONTROL_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } GLOBUS_FTP_CONTROL_VERSION=`$PKG_CONFIG --modversion globus-ftp-control` fi pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GLOBUS_IO" >&5 $as_echo_n "checking for GLOBUS_IO... " >&6; } if test -n "$GLOBUS_IO_CFLAGS"; then pkg_cv_GLOBUS_IO_CFLAGS="$GLOBUS_IO_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-io\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-io") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_IO_CFLAGS=`$PKG_CONFIG --cflags "globus-io" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$GLOBUS_IO_LIBS"; then pkg_cv_GLOBUS_IO_LIBS="$GLOBUS_IO_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-io\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-io") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_IO_LIBS=`$PKG_CONFIG --libs "globus-io" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then GLOBUS_IO_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "globus-io" 2>&1` else GLOBUS_IO_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "globus-io" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$GLOBUS_IO_PKG_ERRORS" >&5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_io" >&5 $as_echo_n "checking for globus_io... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_io_version=`$GPT_QUERY globus_io-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_io_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_io | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_io_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_io_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_io_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_io_version" >&5 $as_echo "$gpt_cv_globus_io_version" >&6; } GLOBUS_IO_VERSION=$gpt_cv_globus_io_version GLOBUS_IO_LIBS=$gpt_cv_globus_io_libs GLOBUS_IO_CFLAGS=$gpt_cv_globus_io_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_io" >&5 $as_echo_n "checking for globus_io... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_io_version=`$GPT_QUERY globus_io-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_io_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_io | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_io_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_io_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_io_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_io_version" >&5 $as_echo "$gpt_cv_globus_io_version" >&6; } GLOBUS_IO_VERSION=$gpt_cv_globus_io_version GLOBUS_IO_LIBS=$gpt_cv_globus_io_libs GLOBUS_IO_CFLAGS=$gpt_cv_globus_io_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi else GLOBUS_IO_CFLAGS=$pkg_cv_GLOBUS_IO_CFLAGS GLOBUS_IO_LIBS=$pkg_cv_GLOBUS_IO_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } GLOBUS_IO_VERSION=`$PKG_CONFIG --modversion globus-io` fi pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GLOBUS_GSI_CERT_UTILS" >&5 $as_echo_n "checking for GLOBUS_GSI_CERT_UTILS... " >&6; } if test -n "$GLOBUS_GSI_CERT_UTILS_CFLAGS"; then pkg_cv_GLOBUS_GSI_CERT_UTILS_CFLAGS="$GLOBUS_GSI_CERT_UTILS_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-gsi-cert-utils\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-gsi-cert-utils") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_GSI_CERT_UTILS_CFLAGS=`$PKG_CONFIG --cflags "globus-gsi-cert-utils" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$GLOBUS_GSI_CERT_UTILS_LIBS"; then pkg_cv_GLOBUS_GSI_CERT_UTILS_LIBS="$GLOBUS_GSI_CERT_UTILS_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-gsi-cert-utils\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-gsi-cert-utils") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_GSI_CERT_UTILS_LIBS=`$PKG_CONFIG --libs "globus-gsi-cert-utils" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then GLOBUS_GSI_CERT_UTILS_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "globus-gsi-cert-utils" 2>&1` else GLOBUS_GSI_CERT_UTILS_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "globus-gsi-cert-utils" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$GLOBUS_GSI_CERT_UTILS_PKG_ERRORS" >&5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_gsi_cert_utils" >&5 $as_echo_n "checking for globus_gsi_cert_utils... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_gsi_cert_utils_version=`$GPT_QUERY globus_gsi_cert_utils-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_gsi_cert_utils_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_gsi_cert_utils | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_gsi_cert_utils_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_gsi_cert_utils_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_gsi_cert_utils_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_gsi_cert_utils_version" >&5 $as_echo "$gpt_cv_globus_gsi_cert_utils_version" >&6; } GLOBUS_GSI_CERT_UTILS_VERSION=$gpt_cv_globus_gsi_cert_utils_version GLOBUS_GSI_CERT_UTILS_LIBS=$gpt_cv_globus_gsi_cert_utils_libs GLOBUS_GSI_CERT_UTILS_CFLAGS=$gpt_cv_globus_gsi_cert_utils_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_gsi_cert_utils" >&5 $as_echo_n "checking for globus_gsi_cert_utils... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_gsi_cert_utils_version=`$GPT_QUERY globus_gsi_cert_utils-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_gsi_cert_utils_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_gsi_cert_utils | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_gsi_cert_utils_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_gsi_cert_utils_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_gsi_cert_utils_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_gsi_cert_utils_version" >&5 $as_echo "$gpt_cv_globus_gsi_cert_utils_version" >&6; } GLOBUS_GSI_CERT_UTILS_VERSION=$gpt_cv_globus_gsi_cert_utils_version GLOBUS_GSI_CERT_UTILS_LIBS=$gpt_cv_globus_gsi_cert_utils_libs GLOBUS_GSI_CERT_UTILS_CFLAGS=$gpt_cv_globus_gsi_cert_utils_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi else GLOBUS_GSI_CERT_UTILS_CFLAGS=$pkg_cv_GLOBUS_GSI_CERT_UTILS_CFLAGS GLOBUS_GSI_CERT_UTILS_LIBS=$pkg_cv_GLOBUS_GSI_CERT_UTILS_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } GLOBUS_GSI_CERT_UTILS_VERSION=`$PKG_CONFIG --modversion globus-gsi-cert-utils` fi pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GLOBUS_GSI_CREDENTIAL" >&5 $as_echo_n "checking for GLOBUS_GSI_CREDENTIAL... " >&6; } if test -n "$GLOBUS_GSI_CREDENTIAL_CFLAGS"; then pkg_cv_GLOBUS_GSI_CREDENTIAL_CFLAGS="$GLOBUS_GSI_CREDENTIAL_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-gsi-credential\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-gsi-credential") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_GSI_CREDENTIAL_CFLAGS=`$PKG_CONFIG --cflags "globus-gsi-credential" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$GLOBUS_GSI_CREDENTIAL_LIBS"; then pkg_cv_GLOBUS_GSI_CREDENTIAL_LIBS="$GLOBUS_GSI_CREDENTIAL_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-gsi-credential\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-gsi-credential") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_GSI_CREDENTIAL_LIBS=`$PKG_CONFIG --libs "globus-gsi-credential" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then GLOBUS_GSI_CREDENTIAL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "globus-gsi-credential" 2>&1` else GLOBUS_GSI_CREDENTIAL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "globus-gsi-credential" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$GLOBUS_GSI_CREDENTIAL_PKG_ERRORS" >&5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_gsi_credential" >&5 $as_echo_n "checking for globus_gsi_credential... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_gsi_credential_version=`$GPT_QUERY globus_gsi_credential-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_gsi_credential_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_gsi_credential | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_gsi_credential_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_gsi_credential_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_gsi_credential_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_gsi_credential_version" >&5 $as_echo "$gpt_cv_globus_gsi_credential_version" >&6; } GLOBUS_GSI_CREDENTIAL_VERSION=$gpt_cv_globus_gsi_credential_version GLOBUS_GSI_CREDENTIAL_LIBS=$gpt_cv_globus_gsi_credential_libs GLOBUS_GSI_CREDENTIAL_CFLAGS=$gpt_cv_globus_gsi_credential_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_gsi_credential" >&5 $as_echo_n "checking for globus_gsi_credential... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_gsi_credential_version=`$GPT_QUERY globus_gsi_credential-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_gsi_credential_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_gsi_credential | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_gsi_credential_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_gsi_credential_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_gsi_credential_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_gsi_credential_version" >&5 $as_echo "$gpt_cv_globus_gsi_credential_version" >&6; } GLOBUS_GSI_CREDENTIAL_VERSION=$gpt_cv_globus_gsi_credential_version GLOBUS_GSI_CREDENTIAL_LIBS=$gpt_cv_globus_gsi_credential_libs GLOBUS_GSI_CREDENTIAL_CFLAGS=$gpt_cv_globus_gsi_credential_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi else GLOBUS_GSI_CREDENTIAL_CFLAGS=$pkg_cv_GLOBUS_GSI_CREDENTIAL_CFLAGS GLOBUS_GSI_CREDENTIAL_LIBS=$pkg_cv_GLOBUS_GSI_CREDENTIAL_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } GLOBUS_GSI_CREDENTIAL_VERSION=`$PKG_CONFIG --modversion globus-gsi-credential` fi pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GLOBUS_OPENSSL_MODULE" >&5 $as_echo_n "checking for GLOBUS_OPENSSL_MODULE... " >&6; } if test -n "$GLOBUS_OPENSSL_MODULE_CFLAGS"; then pkg_cv_GLOBUS_OPENSSL_MODULE_CFLAGS="$GLOBUS_OPENSSL_MODULE_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-openssl-module\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-openssl-module") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_OPENSSL_MODULE_CFLAGS=`$PKG_CONFIG --cflags "globus-openssl-module" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$GLOBUS_OPENSSL_MODULE_LIBS"; then pkg_cv_GLOBUS_OPENSSL_MODULE_LIBS="$GLOBUS_OPENSSL_MODULE_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-openssl-module\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-openssl-module") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_OPENSSL_MODULE_LIBS=`$PKG_CONFIG --libs "globus-openssl-module" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then GLOBUS_OPENSSL_MODULE_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "globus-openssl-module" 2>&1` else GLOBUS_OPENSSL_MODULE_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "globus-openssl-module" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$GLOBUS_OPENSSL_MODULE_PKG_ERRORS" >&5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_openssl_module" >&5 $as_echo_n "checking for globus_openssl_module... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_openssl_module_version=`$GPT_QUERY globus_openssl_module-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_openssl_module_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_openssl_module | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_openssl_module_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_openssl_module_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_openssl_module_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_openssl_module_version" >&5 $as_echo "$gpt_cv_globus_openssl_module_version" >&6; } GLOBUS_OPENSSL_MODULE_VERSION=$gpt_cv_globus_openssl_module_version GLOBUS_OPENSSL_MODULE_LIBS=$gpt_cv_globus_openssl_module_libs GLOBUS_OPENSSL_MODULE_CFLAGS=$gpt_cv_globus_openssl_module_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_openssl_module" >&5 $as_echo_n "checking for globus_openssl_module... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_openssl_module_version=`$GPT_QUERY globus_openssl_module-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_openssl_module_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_openssl_module | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_openssl_module_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_openssl_module_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_openssl_module_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_openssl_module_version" >&5 $as_echo "$gpt_cv_globus_openssl_module_version" >&6; } GLOBUS_OPENSSL_MODULE_VERSION=$gpt_cv_globus_openssl_module_version GLOBUS_OPENSSL_MODULE_LIBS=$gpt_cv_globus_openssl_module_libs GLOBUS_OPENSSL_MODULE_CFLAGS=$gpt_cv_globus_openssl_module_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi else GLOBUS_OPENSSL_MODULE_CFLAGS=$pkg_cv_GLOBUS_OPENSSL_MODULE_CFLAGS GLOBUS_OPENSSL_MODULE_LIBS=$pkg_cv_GLOBUS_OPENSSL_MODULE_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } GLOBUS_OPENSSL_MODULE_VERSION=`$PKG_CONFIG --modversion globus-openssl-module` fi # Check for new globus thread model selection SAVE_CFLAGS=$CFLAGS SAVE_LIBS=$LIBS CFLAGS="$CFLAGS $GLOBUS_COMMON_CFLAGS" LIBS="$LIBS $GLOBUS_COMMON_LIBS" for ac_func in globus_thread_set_model do : ac_fn_c_check_func "$LINENO" "globus_thread_set_model" "ac_cv_func_globus_thread_set_model" if test "x$ac_cv_func_globus_thread_set_model" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_GLOBUS_THREAD_SET_MODEL 1 _ACEOF fi done CFLAGS=$SAVE_CFLAGS LIBS=$SAVE_LIBS # Check for gridftp-v2 SAVE_CFLAGS=$CFLAGS SAVE_LIBS=$LIBS CFLAGS="$CFLAGS $GLOBUS_FTP_CLIENT_CFLAGS" LIBS="$LIBS $GLOBUS_FTP_CLIENT_LIBS" for ac_func in globus_ftp_client_handleattr_set_gridftp2 do : ac_fn_c_check_func "$LINENO" "globus_ftp_client_handleattr_set_gridftp2" "ac_cv_func_globus_ftp_client_handleattr_set_gridftp2" if test "x$ac_cv_func_globus_ftp_client_handleattr_set_gridftp2" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_GLOBUS_FTP_CLIENT_HANDLEATTR_SET_GRIDFTP2 1 _ACEOF fi done CFLAGS=$SAVE_CFLAGS LIBS=$SAVE_LIBS globus_openssl_detected= pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GLOBUS_OPENSSL" >&5 $as_echo_n "checking for GLOBUS_OPENSSL... " >&6; } if test -n "$GLOBUS_OPENSSL_CFLAGS"; then pkg_cv_GLOBUS_OPENSSL_CFLAGS="$GLOBUS_OPENSSL_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-openssl\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-openssl") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_OPENSSL_CFLAGS=`$PKG_CONFIG --cflags "globus-openssl" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$GLOBUS_OPENSSL_LIBS"; then pkg_cv_GLOBUS_OPENSSL_LIBS="$GLOBUS_OPENSSL_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"globus-openssl\""; } >&5 ($PKG_CONFIG --exists --print-errors "globus-openssl") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GLOBUS_OPENSSL_LIBS=`$PKG_CONFIG --libs "globus-openssl" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then GLOBUS_OPENSSL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "globus-openssl" 2>&1` else GLOBUS_OPENSSL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "globus-openssl" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$GLOBUS_OPENSSL_PKG_ERRORS" >&5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_openssl" >&5 $as_echo_n "checking for globus_openssl... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_openssl_version=`$GPT_QUERY globus_openssl-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_openssl_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_openssl | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_openssl_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_openssl_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_openssl_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_openssl_version" >&5 $as_echo "$gpt_cv_globus_openssl_version" >&6; } GLOBUS_OPENSSL_VERSION=$gpt_cv_globus_openssl_version GLOBUS_OPENSSL_LIBS=$gpt_cv_globus_openssl_libs GLOBUS_OPENSSL_CFLAGS=$gpt_cv_globus_openssl_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for globus_openssl" >&5 $as_echo_n "checking for globus_openssl... " >&6; } if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_globus_openssl_version=`$GPT_QUERY globus_openssl-$GPT_FLAVOR-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi if test -n "$gpt_cv_globus_openssl_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR globus_openssl | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_globus_openssl_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_globus_openssl_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_globus_openssl_version"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gpt_cv_globus_openssl_version" >&5 $as_echo "$gpt_cv_globus_openssl_version" >&6; } GLOBUS_OPENSSL_VERSION=$gpt_cv_globus_openssl_version GLOBUS_OPENSSL_LIBS=$gpt_cv_globus_openssl_libs GLOBUS_OPENSSL_CFLAGS=$gpt_cv_globus_openssl_cflags else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi else GLOBUS_OPENSSL_CFLAGS=$pkg_cv_GLOBUS_OPENSSL_CFLAGS GLOBUS_OPENSSL_LIBS=$pkg_cv_GLOBUS_OPENSSL_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } GLOBUS_OPENSSL_VERSION=`$PKG_CONFIG --modversion globus-openssl` fi if test ! "x$GLOBUS_OPENSSL_LIBS" = "x" ; then globus_openssl_detected=`echo "$GLOBUS_OPENSSL_LIBS" | grep "lssl_$GPT_FLAVOR"` if test ! "x$globus_openssl_detected" = "x" ; then globus_openssl_detected="yes" fi fi if test "x$globus_openssl_detected" = "xyes" ; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: Globus own OpenSSL library detected. In order to avoid runtime conflicts following components will be disabled: GridFTP DMC, SRM DMC, GSI MCC. To enable these components use Globus compiled for system OpenSSL. " >&5 $as_echo " Globus own OpenSSL library detected. In order to avoid runtime conflicts following components will be disabled: GridFTP DMC, SRM DMC, GSI MCC. To enable these components use Globus compiled for system OpenSSL. " >&6; } GLOBUS_FTP_CLIENT_VERSION= GLOBUS_FTP_CONTROL_VERSION= GLOBUS_IO_VERSION= GLOBUS_GSSAPI_GSI_VERSION= fi if test "x$GLOBUS_IO_VERSION" = "x"; then IO_VERSION_MAJOR=0 else IO_VERSION_MAJOR=`echo "$GLOBUS_IO_VERSION" | sed 's/^\([^.]*\).*/\1/'`; fi cat >>confdefs.h <<_ACEOF #define GLOBUS_IO_VERSION $IO_VERSION_MAJOR _ACEOF if test "x$GLOBUS_GSSAPI_GSI_VERSION" = "x"; then GLOBUS_GSSAPI_GSI_VERSION_MAJOR=0 GLOBUS_GSSAPI_GSI_VERSION_MINOR=0 else GLOBUS_GSSAPI_GSI_VERSION_MAJOR=`echo "$GLOBUS_GSSAPI_GSI_VERSION" | sed 's/^\([^.]*\).*/\1/'`; GLOBUS_GSSAPI_GSI_VERSION_MINOR=`echo "$GLOBUS_GSSAPI_GSI_VERSION" | sed 's/^[^.]*\.\([^.]*\).*/\1/'`; fi if test "$GLOBUS_GSSAPI_GSI_VERSION_MAJOR" -lt "12"; then GLOBUS_GSSAPI_GSI_OLD_OPENSSL=1 elif test "$GLOBUS_GSSAPI_GSI_VERSION_MAJOR" -eq "12"; then if test "$GLOBUS_GSSAPI_GSI_VERSION_MINOR" -lt "2"; then GLOBUS_GSSAPI_GSI_OLD_OPENSSL=1 else GLOBUS_GSSAPI_GSI_OLD_OPENSSL=0 fi else GLOBUS_GSSAPI_GSI_OLD_OPENSSL=0 fi cat >>confdefs.h <<_ACEOF #define GLOBUS_GSSAPI_GSI_VERSION $GSSAPI_GSI_VERSION_MAJOR _ACEOF cat >>confdefs.h <<_ACEOF #define GLOBUS_GSSAPI_GSI_OLD_OPENSSL $GLOBUS_GSSAPI_GSI_OLD_OPENSSL _ACEOF { $as_echo "$as_me:${as_lineno-$LINENO}: checking for DEFAULT_GLOBUS_LOCATION" >&5 $as_echo_n "checking for DEFAULT_GLOBUS_LOCATION... " >&6; } # GLOBUS_LOCATION is set by GPT macros DEFAULT_GLOBUS_LOCATION="$GLOBUS_LOCATION" { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DEFAULT_GLOBUS_LOCATION" >&5 $as_echo "$DEFAULT_GLOBUS_LOCATION" >&6; } #check lcas DEFAULT_LCAS_LOCATION=/opt/glite LCAS_LOCATION= LCAS_CFLAGS= LCAS_LIBS= # Check whether --with-lcas-location was given. if test "${with_lcas_location+set}" = set; then : withval=$with_lcas_location; LCAS_LOCATION=$with_lcas_location if test ! -d $LCAS_LOCATION; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: LCAS_LOCATION ($LCAS_LOCATION) does not exist" >&5 $as_echo "$as_me: WARNING: LCAS_LOCATION ($LCAS_LOCATION) does not exist" >&2;} LCAS_LOCATION= fi else if test "x$LCAS_LOCATION" = "x"; then LCAS_LOCATION=$DEFAULT_LCAS_LOCATION fi if test ! -d $LCAS_LOCATION; then LCAS_LOCATION= fi fi if test "x$LCAS_LOCATION" != "x"; then LCAS_CFLAGS=$LCAS_LOCATION/include/glite/security/lcas if test ! -d $LCAS_CFLAGS; then LCAS_CFLAGS=$LCAS_LOCATION/include/lcas if test ! -d $LCAS_CFLAGS; then LCAS_CFLAGS=$LCAS_LOCATION/include fi fi LCAS_CFLAGS=-I$LCAS_CFLAGS SAVE_CPPFLAGS=$CPPFLAGS CPPFLAGS="$LCAS_CFLAGS $GLOBUS_GSSAPI_GSI_CFLAGS" for ac_header in lcas.h do : ac_fn_c_check_header_mongrel "$LINENO" "lcas.h" "ac_cv_header_lcas_h" "$ac_includes_default" if test "x$ac_cv_header_lcas_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LCAS_H 1 _ACEOF LCAS_LDFLAGS= if test -d $LCAS_LOCATION/lib64; then LCAS_LDFLAGS="-L$LCAS_LOCATION/lib64 $GLOBUS_GSSAPI_GSI_LIBS" else LCAS_LDFLAGS="-L$LCAS_LOCATION/lib $GLOBUS_GSSAPI_GSI_LIBS" fi SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS $LCAS_LDFLAGS" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for lcas_init in -llcas" >&5 $as_echo_n "checking for lcas_init in -llcas... " >&6; } if ${ac_cv_lib_lcas_lcas_init+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-llcas $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char lcas_init (); int main () { return lcas_init (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_lcas_lcas_init=yes else ac_cv_lib_lcas_lcas_init=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_lcas_lcas_init" >&5 $as_echo "$ac_cv_lib_lcas_lcas_init" >&6; } if test "x$ac_cv_lib_lcas_lcas_init" = xyes; then : LCAS_LIBS="$LCAS_LDFLAGS -llcas" else LCAS_LOCATION="" fi LDFLAGS=$SAVE_LDFLAGS else LCAS_LOCATION="" fi done CPPFLAGS=$SAVE_CPPFLAGS fi if test "x$LCAS_LOCATION" != "x"; then $as_echo "#define HAVE_LCAS 1" >>confdefs.h fi #check lcmaps DEFAULT_LCMAPS_LOCATION=/opt/glite LCMAPS_LOCATION= LCMAPS_CFLAGS= LCMAPS_LIBS= # Check whether --with-lcmaps-location was given. if test "${with_lcmaps_location+set}" = set; then : withval=$with_lcmaps_location; LCMAPS_LOCATION=$with_lcmaps_location if test ! -d $LCMAPS_LOCATION; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: LCMAPS_LOCATION ($LCMAPS_LOCATION) does not exist" >&5 $as_echo "$as_me: WARNING: LCMAPS_LOCATION ($LCMAPS_LOCATION) does not exist" >&2;} LCMAPS_LOCATION= fi else if test "x$LCMAPS_LOCATION" = "x"; then LCMAPS_LOCATION=$DEFAULT_LCMAPS_LOCATION fi if test ! -d $LCMAPS_LOCATION; then LCMAPS_LOCATION= fi fi if test "x$LCMAPS_LOCATION" != "x"; then LCMAPS_CFLAGS=$LCMAPS_LOCATION/include/glite/security/lcmaps if test ! -d $LCMAPS_CFLAGS; then LCMAPS_CFLAGS=$LCMAPS_LOCATION/include/lcmaps if test ! -d $LCMAPS_CFLAGS; then LCMAPS_CFLAGS=$LCMAPS_LOCATION/include fi fi LCMAPS_CFLAGS=-I$LCMAPS_CFLAGS SAVE_CPPFLAGS=$CPPFLAGS CPPFLAGS="$LCMAPS_CFLAGS $GLOBUS_GSSAPI_GSI_CFLAGS" for ac_header in lcmaps.h do : ac_fn_c_check_header_mongrel "$LINENO" "lcmaps.h" "ac_cv_header_lcmaps_h" "$ac_includes_default" if test "x$ac_cv_header_lcmaps_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LCMAPS_H 1 _ACEOF LCMAPS_LDFLAGS= if test -d $LCMAPS_LOCATION/lib64; then LCMAPS_LDFLAGS="-L$LCMAPS_LOCATION/lib64 $GLOBUS_GSSAPI_GSI_LIBS" else LCMAPS_LDFLAGS="-L$LCMAPS_LOCATION/lib $GLOBUS_GSSAPI_GSI_LIBS" fi SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS $LCMAPS_LDFLAGS" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for lcmaps_init in -llcmaps" >&5 $as_echo_n "checking for lcmaps_init in -llcmaps... " >&6; } if ${ac_cv_lib_lcmaps_lcmaps_init+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-llcmaps $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char lcmaps_init (); int main () { return lcmaps_init (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_lcmaps_lcmaps_init=yes else ac_cv_lib_lcmaps_lcmaps_init=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_lcmaps_lcmaps_init" >&5 $as_echo "$ac_cv_lib_lcmaps_lcmaps_init" >&6; } if test "x$ac_cv_lib_lcmaps_lcmaps_init" = xyes; then : LCMAPS_LIBS="$LCMAPS_LDFLAGS -llcmaps" else LCMAPS_LOCATION="" fi LDFLAGS=$SAVE_LDFLAGS else LCMAPS_LOCATION="" fi done CPPFLAGS=$SAVE_CPPFLAGS fi if test "x$LCMAPS_LOCATION" != "x"; then $as_echo "#define HAVE_LCMAPS 1" >>confdefs.h fi # Check if mock DMC is enabled # Check whether --enable-mock-dmc was given. if test "${enable_mock_dmc+set}" = set; then : enableval=$enable_mock_dmc; enables_mock_dmc="$enableval" fi # Check for GFAL2 # Check whether --enable-gfal was given. if test "${enable_gfal+set}" = set; then : enableval=$enable_gfal; enables_gfal="$enableval" fi if test "x$enables_gfal" = "xyes"; then pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GFAL2" >&5 $as_echo_n "checking for GFAL2... " >&6; } if test -n "$GFAL2_CFLAGS"; then pkg_cv_GFAL2_CFLAGS="$GFAL2_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"gfal_transfer\""; } >&5 ($PKG_CONFIG --exists --print-errors "gfal_transfer") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GFAL2_CFLAGS=`$PKG_CONFIG --cflags "gfal_transfer" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$GFAL2_LIBS"; then pkg_cv_GFAL2_LIBS="$GFAL2_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"gfal_transfer\""; } >&5 ($PKG_CONFIG --exists --print-errors "gfal_transfer") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_GFAL2_LIBS=`$PKG_CONFIG --libs "gfal_transfer" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then GFAL2_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "gfal_transfer" 2>&1` else GFAL2_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "gfal_transfer" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$GFAL2_PKG_ERRORS" >&5 enables_gfal="no" elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } enables_gfal="no" else GFAL2_CFLAGS=$pkg_cv_GFAL2_CFLAGS GFAL2_LIBS=$pkg_cv_GFAL2_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } fi fi # Check for S3 # Check whether --enable-s3 was given. if test "${enable_s3+set}" = set; then : enableval=$enable_s3; enables_s3="$enableval" fi if test "x$enables_s3" = "xyes"; then # Check whether --with-s3 was given. if test "${with_s3+set}" = set; then : withval=$with_s3; fi if test ! "x$with_s3" = "x" ; then S3_LOCATION="$with_s3" S3_CPPFLAGS="-I$S3_LOCATION/include" if test -d $S3_LOCATION/lib64; then S3_LDFLAGS="-L$S3_LOCATION/lib64" else S3_LDFLAGS="-L$S3_LOCATION/lib" fi fi SAVE_CPPFLAGS=$CPPFLAGS CPPFLAGS="$CPPFLAGS $S3_CPPFLAGS" ac_fn_c_check_header_mongrel "$LINENO" "libs3.h" "ac_cv_header_libs3_h" "$ac_includes_default" if test "x$ac_cv_header_libs3_h" = xyes; then : else enables_s3="no" fi CPPFLAGS=$SAVE_CPPFLAGS SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS $S3_LDFLAGS" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for S3_initialize in -ls3" >&5 $as_echo_n "checking for S3_initialize in -ls3... " >&6; } if ${ac_cv_lib_s3_S3_initialize+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ls3 $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char S3_initialize (); int main () { return S3_initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_s3_S3_initialize=yes else ac_cv_lib_s3_S3_initialize=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_s3_S3_initialize" >&5 $as_echo "$ac_cv_lib_s3_S3_initialize" >&6; } if test "x$ac_cv_lib_s3_S3_initialize" = xyes; then : S3_LIBS="$S3_LDFLAGS -ls3" else enables_s3="no" fi LDFLAGS=$SAVE_LDFLAGS if test x$enables_s3 = xyes then if s3 help 2>&1 | grep -q -- '--timeout' ; then $as_echo "#define HAVE_S3_TIMEOUT 1" >>confdefs.h fi fi fi # Check for xrootd (c++) ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu # Check whether --enable-xrootd was given. if test "${enable_xrootd+set}" = set; then : enableval=$enable_xrootd; enables_xrootd="$enableval" fi if test "x$enables_xrootd" = "xyes"; then XROOTD_CPPFLAGS="-I/usr/include/xrootd" # Check whether --with-xrootd was given. if test "${with_xrootd+set}" = set; then : withval=$with_xrootd; fi if test ! "x$with_xrootd" = "x" ; then XROOTD_LOCATION="$with_xrootd" XROOTD_CPPFLAGS="-I$XROOTD_LOCATION/include/xrootd" if test -d $XROOTD_LOCATION/lib64; then XROOTD_LDFLAGS="-L$XROOTD_LOCATION/lib64" else XROOTD_LDFLAGS="-L$XROOTD_LOCATION/lib" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for XROOTD headers" >&5 $as_echo_n "checking for XROOTD headers... " >&6; } SAVE_CPPFLAGS=$CPPFLAGS CPPFLAGS="$CPPFLAGS $XROOTD_CPPFLAGS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: result: $XROOTD_CPPFLAGS" >&5 $as_echo "$XROOTD_CPPFLAGS" >&6; } else XROOTD_CPPFLAGS="-std=c++0x $XROOTD_CPPFLAGS" CPPFLAGS="$SAVE_CPPFLAGS $XROOTD_CPPFLAGS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: result: $XROOTD_CPPFLAGS" >&5 $as_echo "$XROOTD_CPPFLAGS" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } enables_xrootd="no" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext CPPFLAGS=$SAVE_CPPFLAGS SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS $XROOTD_LDFLAGS" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for main in -lXrdPosix" >&5 $as_echo_n "checking for main in -lXrdPosix... " >&6; } if ${ac_cv_lib_XrdPosix_main+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lXrdPosix $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { return main (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_XrdPosix_main=yes else ac_cv_lib_XrdPosix_main=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_XrdPosix_main" >&5 $as_echo "$ac_cv_lib_XrdPosix_main" >&6; } if test "x$ac_cv_lib_XrdPosix_main" = xyes; then : XROOTD_LIBS="$XROOTD_LDFLAGS -lXrdPosix -lXrdCl" else enables_xrootd="no" fi LDFLAGS=$SAVE_LDFLAGS fi fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu # Setup conditionals if test -n "$GLOBUS_COMMON_VERSION"; then GLOBUSUTILS_ENABLED_TRUE= GLOBUSUTILS_ENABLED_FALSE='#' else GLOBUSUTILS_ENABLED_TRUE='#' GLOBUSUTILS_ENABLED_FALSE= fi if test -n "$GLOBUS_FTP_CLIENT_VERSION"; then GRIDFTP_ENABLED_TRUE= GRIDFTP_ENABLED_FALSE='#' else GRIDFTP_ENABLED_TRUE='#' GRIDFTP_ENABLED_FALSE= fi if test x$enables_mock_dmc = xyes; then MOCK_DMC_ENABLED_TRUE= MOCK_DMC_ENABLED_FALSE='#' else MOCK_DMC_ENABLED_TRUE='#' MOCK_DMC_ENABLED_FALSE= fi if test x$enables_gfal = xyes; then GFAL_ENABLED_TRUE= GFAL_ENABLED_FALSE='#' else GFAL_ENABLED_TRUE='#' GFAL_ENABLED_FALSE= fi if test x$enables_s3 = xyes; then S3_DMC_ENABLED_TRUE= S3_DMC_ENABLED_FALSE='#' else S3_DMC_ENABLED_TRUE='#' S3_DMC_ENABLED_FALSE= fi if test x$enables_xrootd = xyes; then XROOTD_ENABLED_TRUE= XROOTD_ENABLED_FALSE='#' else XROOTD_ENABLED_TRUE='#' XROOTD_ENABLED_FALSE= fi if test x$XMLSEC_INSTALLED = xyes; then XMLSEC_ENABLED_TRUE= XMLSEC_ENABLED_FALSE='#' else XMLSEC_ENABLED_TRUE='#' XMLSEC_ENABLED_FALSE= fi if test x$enables_cppunit = xyes; then CPPUNIT_ENABLED_TRUE= CPPUNIT_ENABLED_FALSE='#' else CPPUNIT_ENABLED_TRUE='#' CPPUNIT_ENABLED_FALSE= fi enables_srm_dmc=no if test "$enables_hed" = "yes"; then enables_srm_dmc=yes fi if test "x$enables_srm_dmc" = "xyes"; then SRM_DMC_ENABLED_TRUE= SRM_DMC_ENABLED_FALSE='#' else SRM_DMC_ENABLED_TRUE='#' SRM_DMC_ENABLED_FALSE= fi # Setup defines if test -n "$GLOBUS_COMMON_VERSION"; then $as_echo "#define HAVE_GLOBUS 1" >>confdefs.h fi if test x"$XMLSEC_INSTALLED" = xyes; then $as_echo "#define HAVE_XMLSEC 1" >>confdefs.h fi # Setup messages for reporting enables_gridftp=no if test -n "$GLOBUS_FTP_CLIENT_VERSION" ; then enables_gridftp=yes; fi enables_dbcxx=no if test -n "$DBCXX_LIBS" ; then enables_dbcxx=yes; fi enables_sqlite=no if test "x$SQLITE_INSTALLED" = "xyes" ; then enables_sqlite=yes; fi # Check for LDAP if test "$enables_hed" = "yes"; then LDAP=no # Check whether --enable-ldap was given. if test "${enable_ldap+set}" = set; then : enableval=$enable_ldap; enables_ldap="$enableval" fi if test "x$enables_ldap" = "xyes"; then ac_fn_c_check_header_mongrel "$LINENO" "ldap.h" "ac_cv_header_ldap_h" "$ac_includes_default" if test "x$ac_cv_header_ldap_h" = xyes; then : LDAP=yes SAVE_LDFLAGS=$LDFLAGS LDFLAGS=-lpthread { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ldap_first_message in -lldap_r" >&5 $as_echo_n "checking for ldap_first_message in -lldap_r... " >&6; } if ${ac_cv_lib_ldap_r_ldap_first_message+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lldap_r $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char ldap_first_message (); int main () { return ldap_first_message (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_ldap_r_ldap_first_message=yes else ac_cv_lib_ldap_r_ldap_first_message=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_ldap_r_ldap_first_message" >&5 $as_echo "$ac_cv_lib_ldap_r_ldap_first_message" >&6; } if test "x$ac_cv_lib_ldap_r_ldap_first_message" = xyes; then : { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ldap_initialize in -lldap_r" >&5 $as_echo_n "checking for ldap_initialize in -lldap_r... " >&6; } if ${ac_cv_lib_ldap_r_ldap_initialize+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lldap_r $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char ldap_initialize (); int main () { return ldap_initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_ldap_r_ldap_initialize=yes else ac_cv_lib_ldap_r_ldap_initialize=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_ldap_r_ldap_initialize" >&5 $as_echo "$ac_cv_lib_ldap_r_ldap_initialize" >&6; } if test "x$ac_cv_lib_ldap_r_ldap_initialize" = xyes; then : $as_echo "#define HAVE_LDAP_INITIALIZE /**/" >>confdefs.h fi LDAP_LIBS=-lldap_r else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ldap_first_message in -lldap" >&5 $as_echo_n "checking for ldap_first_message in -lldap... " >&6; } if ${ac_cv_lib_ldap_ldap_first_message+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lldap $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char ldap_first_message (); int main () { return ldap_first_message (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_ldap_ldap_first_message=yes else ac_cv_lib_ldap_ldap_first_message=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_ldap_ldap_first_message" >&5 $as_echo "$ac_cv_lib_ldap_ldap_first_message" >&6; } if test "x$ac_cv_lib_ldap_ldap_first_message" = xyes; then : { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ldap_initialize in -lldap" >&5 $as_echo_n "checking for ldap_initialize in -lldap... " >&6; } if ${ac_cv_lib_ldap_ldap_initialize+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lldap $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char ldap_initialize (); int main () { return ldap_initialize (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_ldap_ldap_initialize=yes else ac_cv_lib_ldap_ldap_initialize=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_ldap_ldap_initialize" >&5 $as_echo "$ac_cv_lib_ldap_ldap_initialize" >&6; } if test "x$ac_cv_lib_ldap_ldap_initialize" = xyes; then : $as_echo "#define HAVE_LDAP_INITIALIZE /**/" >>confdefs.h fi LDAP_LIBS=-lldap else LDAP=no fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ber_init in -llber" >&5 $as_echo_n "checking for ber_init in -llber... " >&6; } if ${ac_cv_lib_lber_ber_init+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-llber $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char ber_init (); int main () { return ber_init (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_lber_ber_init=yes else ac_cv_lib_lber_ber_init=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_lber_ber_init" >&5 $as_echo "$ac_cv_lib_lber_ber_init" >&6; } if test "x$ac_cv_lib_lber_ber_init" = xyes; then : LDAP_LIBS="$LDAP_LIBS -llber" fi LDFLAGS=$SAVE_LDFLAGS else LDAP=no fi enables_ldap="$LDAP" fi else enables_ldap="no" fi if test x$LDAP = xyes; then LDAP_ENABLED_TRUE= LDAP_ENABLED_FALSE='#' else LDAP_ENABLED_TRUE='#' LDAP_ENABLED_FALSE= fi if test "x$LDAP" = "xyes"; then $as_echo "#define HAVE_LDAP /**/" >>confdefs.h fi # Check version of Test::More Perl module. min_perl_test_more_version_required="0.88" # Stable version of Test::More containing done_testing sub. PERL_TEST_DIR= perl_test_more_version_found=$(perl -MTest::More -e "print \"\$Test::More::VERSION\"") if test $(echo "$perl_test_more_version_found" | cut -d. -f1) -gt $(echo "$min_perl_test_more_version_required" | cut -d. -f1) || \ test $(echo "$perl_test_more_version_found" | cut -d. -f1) -eq $(echo "$min_perl_test_more_version_required" | cut -d. -f1) && \ test $(echo "$perl_test_more_version_found" | cut -d. -f2) -ge $(echo "$min_perl_test_more_version_required" | cut -d. -f2); then PERL_TEST_DIR="test" fi # Check for the Perl module Inline::Python - temporary during rewrite. PERL5LIB_INLINE_PYTHON= INLINE_PYTHON_FOUND=no # Check whether --with-inline-python was given. if test "${with_inline_python+set}" = set; then : withval=$with_inline_python; if test "$with_inline_python" = "yes" then PERL5LIB_INLINE_PYTHON= if ${PERL} -e "use Inline::Python; exit;" > /dev/null 2>&1 then INLINE_PYTHON_FOUND="yes" { $as_echo "$as_me:${as_lineno-$LINENO}: Perl module Inline::Python found" >&5 $as_echo "$as_me: Perl module Inline::Python found" >&6;} else as_fn_error $? "Perl module Inline::Python not found" "$LINENO" 5 fi elif test "$with_inline_python" = "no" then { $as_echo "$as_me:${as_lineno-$LINENO}: Disabling PYTHON LRMS" >&5 $as_echo "$as_me: Disabling PYTHON LRMS" >&6;} else PERL5LIB_INLINE_PYTHON="$with_inline_python" if test -d $PERL5LIB_INLINE_PYTHON; then PERL5LIB_INLINE_PYTHON="-I${PERL5LIB_INLINE_PYTHON}" if `${PERL} ${PERL5LIB_INLINE_PYTHON} -e "use Inline::Python; exit;" > /dev/null 2>&1` then INLINE_PYTHON_FOUND="yes" { $as_echo "$as_me:${as_lineno-$LINENO}: Perl module Inline::Python found" >&5 $as_echo "$as_me: Perl module Inline::Python found" >&6;} else as_fn_error $? "--with-inline-python given, but test failed: PERL5LIB_INLINE_PYTHON=${PERL5LIB_INLINE_PYTHON}" "$LINENO" 5 fi fi fi else if `${PERL} -e "use Inline::Python; exit;" > /dev/null 2>&1` then INLINE_PYTHON_FOUND="yes" { $as_echo "$as_me:${as_lineno-$LINENO}: Perl module Inline::Python found" >&5 $as_echo "$as_me: Perl module Inline::Python found" >&6;} else { $as_echo "$as_me:${as_lineno-$LINENO}: Perl module Inline::Python not found" >&5 $as_echo "$as_me: Perl module Inline::Python not found" >&6;} fi fi if test "x${INLINE_PYTHON_FOUND}" = "xyes"; then PYTHON_LRMS_ENABLED_TRUE= PYTHON_LRMS_ENABLED_FALSE='#' else PYTHON_LRMS_ENABLED_TRUE='#' PYTHON_LRMS_ENABLED_FALSE= fi # Check for the uuid lib UUID_LIBS="" if test "$enables_hed" = "yes"; then ac_fn_c_check_header_mongrel "$LINENO" "uuid/uuid.h" "ac_cv_header_uuid_uuid_h" "$ac_includes_default" if test "x$ac_cv_header_uuid_uuid_h" = xyes; then : ac_fn_c_check_func "$LINENO" "uuid_generate" "ac_cv_func_uuid_generate" if test "x$ac_cv_func_uuid_generate" = xyes; then : UUID_LIBS= else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for uuid_generate in -luuid" >&5 $as_echo_n "checking for uuid_generate in -luuid... " >&6; } if ${ac_cv_lib_uuid_uuid_generate+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-luuid $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char uuid_generate (); int main () { return uuid_generate (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_uuid_uuid_generate=yes else ac_cv_lib_uuid_uuid_generate=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_uuid_uuid_generate" >&5 $as_echo "$ac_cv_lib_uuid_uuid_generate" >&6; } if test "x$ac_cv_lib_uuid_uuid_generate" = xyes; then : UUID_LIBS=-luuid else { $as_echo "$as_me:${as_lineno-$LINENO}: Can't find library containing uuid implementation" >&5 $as_echo "$as_me: Can't find library containing uuid implementation" >&6;} fi fi else { $as_echo "$as_me:${as_lineno-$LINENO}: Can't find uuid header" >&5 $as_echo "$as_me: Can't find uuid header" >&6;} fi LIBS="$LIBS $UUID_LIBS" fi # Check for dlopen DLOPEN_LIBS="" if test "$enables_hed" = "yes"; then ac_fn_c_check_func "$LINENO" "dlopen" "ac_cv_func_dlopen" if test "x$ac_cv_func_dlopen" = xyes; then : DLOPEN_LIBS= else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 $as_echo_n "checking for dlopen in -ldl... " >&6; } if ${ac_cv_lib_dl_dlopen+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dlopen (); int main () { return dlopen (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dl_dlopen=yes else ac_cv_lib_dl_dlopen=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 $as_echo "$ac_cv_lib_dl_dlopen" >&6; } if test "x$ac_cv_lib_dl_dlopen" = xyes; then : DLOPEN_LIBS=-ldl else { $as_echo "$as_me:${as_lineno-$LINENO}: Can't find library containing dlopen implementation" >&5 $as_echo "$as_me: Can't find library containing dlopen implementation" >&6;} fi fi fi # Check for clock_gettime { $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing clock_gettime" >&5 $as_echo_n "checking for library containing clock_gettime... " >&6; } if ${ac_cv_search_clock_gettime+:} false; then : $as_echo_n "(cached) " >&6 else ac_func_search_save_LIBS=$LIBS cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char clock_gettime (); int main () { return clock_gettime (); ; return 0; } _ACEOF for ac_lib in '' rt; do if test -z "$ac_lib"; then ac_res="none required" else ac_res=-l$ac_lib LIBS="-l$ac_lib $ac_func_search_save_LIBS" fi if ac_fn_c_try_link "$LINENO"; then : ac_cv_search_clock_gettime=$ac_res fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext if ${ac_cv_search_clock_gettime+:} false; then : break fi done if ${ac_cv_search_clock_gettime+:} false; then : else ac_cv_search_clock_gettime=no fi rm conftest.$ac_ext LIBS=$ac_func_search_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_clock_gettime" >&5 $as_echo "$ac_cv_search_clock_gettime" >&6; } ac_res=$ac_cv_search_clock_gettime if test "$ac_res" != no; then : test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" fi # Define bash-completion dir pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for BASH_COMPLETION" >&5 $as_echo_n "checking for BASH_COMPLETION... " >&6; } if test -n "$BASH_COMPLETION_CFLAGS"; then pkg_cv_BASH_COMPLETION_CFLAGS="$BASH_COMPLETION_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"bash-completion >= 2.0\""; } >&5 ($PKG_CONFIG --exists --print-errors "bash-completion >= 2.0") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_BASH_COMPLETION_CFLAGS=`$PKG_CONFIG --cflags "bash-completion >= 2.0" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$BASH_COMPLETION_LIBS"; then pkg_cv_BASH_COMPLETION_LIBS="$BASH_COMPLETION_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"bash-completion >= 2.0\""; } >&5 ($PKG_CONFIG --exists --print-errors "bash-completion >= 2.0") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_BASH_COMPLETION_LIBS=`$PKG_CONFIG --libs "bash-completion >= 2.0" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then BASH_COMPLETION_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "bash-completion >= 2.0" 2>&1` else BASH_COMPLETION_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "bash-completion >= 2.0" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$BASH_COMPLETION_PKG_ERRORS" >&5 bashcompdir="${sysconfdir}/bash_completion.d" elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } bashcompdir="${sysconfdir}/bash_completion.d" else BASH_COMPLETION_CFLAGS=$pkg_cv_BASH_COMPLETION_CFLAGS BASH_COMPLETION_LIBS=$pkg_cv_BASH_COMPLETION_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } bashcompdir="`pkg-config --variable=completionsdir --define-variable=prefix=${prefix} bash-completion`" fi # check for fsusage if test "$enables_hed" = "yes"; then for ac_header in sys/param.h do : ac_fn_c_check_header_mongrel "$LINENO" "sys/param.h" "ac_cv_header_sys_param_h" "$ac_includes_default" if test "x$ac_cv_header_sys_param_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_SYS_PARAM_H 1 _ACEOF fi done for ac_header in sys/vfs.h sys/fs_types.h do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done for ac_header in sys/mount.h do : ac_fn_c_check_header_compile "$LINENO" "sys/mount.h" "ac_cv_header_sys_mount_h" "$ac_includes_default #if HAVE_SYS_PARAM_H #include #endif " if test "x$ac_cv_header_sys_mount_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_SYS_MOUNT_H 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to get file system space usage" >&5 $as_echo "$as_me: checking how to get file system space usage" >&6;} ac_fsusage_space=no # Perform only the link test since it seems there are no variants of the # statvfs function. This check is more than just AC_CHECK_FUNCS(statvfs) # because that got a false positive on SCO OSR5. Adding the declaration # of a `struct statvfs' causes this test to fail (as it should) on such # systems. That system is reported to work fine with STAT_STATFS4 which # is what it gets when this test fails. if test $ac_fsusage_space = no; then # SVR4 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for statvfs function (SVR4)" >&5 $as_echo_n "checking for statvfs function (SVR4)... " >&6; } if ${fu_cv_sys_stat_statvfs+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #if defined __GLIBC__ && !defined __BEOS__ Do not use statvfs on systems with GNU libc, because that function stats all preceding entries in /proc/mounts, and that makes df hang if even one of the corresponding file systems is hard-mounted, but not available. statvfs in GNU libc on BeOS operates differently: it only makes a system call. #endif #ifdef __osf__ "Do not use Tru64's statvfs implementation" #endif #include int main () { struct statvfs fsd; statvfs (0, &fsd); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : fu_cv_sys_stat_statvfs=yes else fu_cv_sys_stat_statvfs=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $fu_cv_sys_stat_statvfs" >&5 $as_echo "$fu_cv_sys_stat_statvfs" >&6; } if test $fu_cv_sys_stat_statvfs = yes; then ac_fsusage_space=yes $as_echo "#define STAT_STATVFS 1" >>confdefs.h fi fi if test $ac_fsusage_space = no; then # DEC Alpha running OSF/1 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for 3-argument statfs function (DEC OSF/1)" >&5 $as_echo_n "checking for 3-argument statfs function (DEC OSF/1)... " >&6; } if ${fu_cv_sys_stat_statfs3_osf1+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : fu_cv_sys_stat_statfs3_osf1=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #include int main () { struct statfs fsd; fsd.f_fsize = 0; return statfs (".", &fsd, sizeof (struct statfs)) != 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : fu_cv_sys_stat_statfs3_osf1=yes else fu_cv_sys_stat_statfs3_osf1=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $fu_cv_sys_stat_statfs3_osf1" >&5 $as_echo "$fu_cv_sys_stat_statfs3_osf1" >&6; } if test $fu_cv_sys_stat_statfs3_osf1 = yes; then ac_fsusage_space=yes $as_echo "#define STAT_STATFS3_OSF1 1" >>confdefs.h fi fi if test $ac_fsusage_space = no; then # AIX { $as_echo "$as_me:${as_lineno-$LINENO}: checking for two-argument statfs with statfs.bsize member (AIX, 4.3BSD)" >&5 $as_echo_n "checking for two-argument statfs with statfs.bsize member (AIX, 4.3BSD)... " >&6; } if ${fu_cv_sys_stat_statfs2_bsize+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : fu_cv_sys_stat_statfs2_bsize=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef HAVE_SYS_PARAM_H #include #endif #ifdef HAVE_SYS_MOUNT_H #include #endif #ifdef HAVE_SYS_VFS_H #include #endif int main () { struct statfs fsd; fsd.f_bsize = 0; return statfs (".", &fsd) != 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : fu_cv_sys_stat_statfs2_bsize=yes else fu_cv_sys_stat_statfs2_bsize=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $fu_cv_sys_stat_statfs2_bsize" >&5 $as_echo "$fu_cv_sys_stat_statfs2_bsize" >&6; } if test $fu_cv_sys_stat_statfs2_bsize = yes; then ac_fsusage_space=yes $as_echo "#define STAT_STATFS2_BSIZE 1" >>confdefs.h fi fi if test $ac_fsusage_space = no; then # SVR3 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for four-argument statfs (AIX-3.2.5, SVR3)" >&5 $as_echo_n "checking for four-argument statfs (AIX-3.2.5, SVR3)... " >&6; } if ${fu_cv_sys_stat_statfs4+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : fu_cv_sys_stat_statfs4=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { struct statfs fsd; return statfs (".", &fsd, sizeof fsd, 0) != 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : fu_cv_sys_stat_statfs4=yes else fu_cv_sys_stat_statfs4=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $fu_cv_sys_stat_statfs4" >&5 $as_echo "$fu_cv_sys_stat_statfs4" >&6; } if test $fu_cv_sys_stat_statfs4 = yes; then ac_fsusage_space=yes $as_echo "#define STAT_STATFS4 1" >>confdefs.h fi fi if test $ac_fsusage_space = no; then # 4.4BSD and NetBSD { $as_echo "$as_me:${as_lineno-$LINENO}: checking for two-argument statfs with statfs.fsize member (4.4BSD and NetBSD)" >&5 $as_echo_n "checking for two-argument statfs with statfs.fsize member (4.4BSD and NetBSD)... " >&6; } if ${fu_cv_sys_stat_statfs2_fsize+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : fu_cv_sys_stat_statfs2_fsize=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #ifdef HAVE_SYS_PARAM_H #include #endif #ifdef HAVE_SYS_MOUNT_H #include #endif int main () { struct statfs fsd; fsd.f_fsize = 0; return statfs (".", &fsd) != 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : fu_cv_sys_stat_statfs2_fsize=yes else fu_cv_sys_stat_statfs2_fsize=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $fu_cv_sys_stat_statfs2_fsize" >&5 $as_echo "$fu_cv_sys_stat_statfs2_fsize" >&6; } if test $fu_cv_sys_stat_statfs2_fsize = yes; then ac_fsusage_space=yes $as_echo "#define STAT_STATFS2_FSIZE 1" >>confdefs.h fi fi if test $ac_fsusage_space = no; then # Ultrix { $as_echo "$as_me:${as_lineno-$LINENO}: checking for two-argument statfs with struct fs_data (Ultrix)" >&5 $as_echo_n "checking for two-argument statfs with struct fs_data (Ultrix)... " >&6; } if ${fu_cv_sys_stat_fs_data+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : fu_cv_sys_stat_fs_data=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #ifdef HAVE_SYS_PARAM_H #include #endif #ifdef HAVE_SYS_MOUNT_H #include #endif #ifdef HAVE_SYS_FS_TYPES_H #include #endif int main () { struct fs_data fsd; /* Ultrix's statfs returns 1 for success, 0 for not mounted, -1 for failure. */ return statfs (".", &fsd) != 1; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : fu_cv_sys_stat_fs_data=yes else fu_cv_sys_stat_fs_data=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $fu_cv_sys_stat_fs_data" >&5 $as_echo "$fu_cv_sys_stat_fs_data" >&6; } if test $fu_cv_sys_stat_fs_data = yes; then ac_fsusage_space=yes $as_echo "#define STAT_STATFS2_FS_DATA 1" >>confdefs.h fi fi if test $ac_fsusage_space = no; then # SVR2 cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : $as_echo "#define STAT_READ_FILSYS 1" >>confdefs.h ac_fsusage_space=yes fi rm -f conftest.err conftest.i conftest.$ac_ext fi if test $ac_fsusage_space = yes; then : gl_cv_fs_space=yes else gl_cv_fs_space=no fi if test $gl_cv_fs_space = yes; then case " $LIBOBJS " in *" fsusage.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS fsusage.$ac_objext" ;; esac for ac_header in dustat.h sys/fs/s5param.h sys/filsys.h sys/statfs.h do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for statfs that truncates block counts" >&5 $as_echo_n "checking for statfs that truncates block counts... " >&6; } if ${fu_cv_sys_truncating_statfs+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #if !defined(sun) && !defined(__sun) choke -- this is a workaround for a Sun-specific problem #endif #include #include int main () { struct statfs t; long c = *(t.f_spare); if (c) return 0; ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : fu_cv_sys_truncating_statfs=yes else fu_cv_sys_truncating_statfs=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi if test $fu_cv_sys_truncating_statfs = yes; then $as_echo "#define STATFS_TRUNCATES_BLOCK_COUNTS 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $fu_cv_sys_truncating_statfs" >&5 $as_echo "$fu_cv_sys_truncating_statfs" >&6; } fi fi if test "$enables_hed" = "yes"; then # Checks for header files. ac_header_dirent=no for ac_hdr in dirent.h sys/ndir.h sys/dir.h ndir.h; do as_ac_Header=`$as_echo "ac_cv_header_dirent_$ac_hdr" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_hdr that defines DIR" >&5 $as_echo_n "checking for $ac_hdr that defines DIR... " >&6; } if eval \${$as_ac_Header+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include <$ac_hdr> int main () { if ((DIR *) 0) return 0; ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : eval "$as_ac_Header=yes" else eval "$as_ac_Header=no" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$as_ac_Header { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_hdr" | $as_tr_cpp` 1 _ACEOF ac_header_dirent=$ac_hdr; break fi done # Two versions of opendir et al. are in -ldir and -lx on SCO Xenix. if test $ac_header_dirent = dirent.h; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing opendir" >&5 $as_echo_n "checking for library containing opendir... " >&6; } if ${ac_cv_search_opendir+:} false; then : $as_echo_n "(cached) " >&6 else ac_func_search_save_LIBS=$LIBS cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char opendir (); int main () { return opendir (); ; return 0; } _ACEOF for ac_lib in '' dir; do if test -z "$ac_lib"; then ac_res="none required" else ac_res=-l$ac_lib LIBS="-l$ac_lib $ac_func_search_save_LIBS" fi if ac_fn_c_try_link "$LINENO"; then : ac_cv_search_opendir=$ac_res fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext if ${ac_cv_search_opendir+:} false; then : break fi done if ${ac_cv_search_opendir+:} false; then : else ac_cv_search_opendir=no fi rm conftest.$ac_ext LIBS=$ac_func_search_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_opendir" >&5 $as_echo "$ac_cv_search_opendir" >&6; } ac_res=$ac_cv_search_opendir if test "$ac_res" != no; then : test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" fi else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing opendir" >&5 $as_echo_n "checking for library containing opendir... " >&6; } if ${ac_cv_search_opendir+:} false; then : $as_echo_n "(cached) " >&6 else ac_func_search_save_LIBS=$LIBS cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char opendir (); int main () { return opendir (); ; return 0; } _ACEOF for ac_lib in '' x; do if test -z "$ac_lib"; then ac_res="none required" else ac_res=-l$ac_lib LIBS="-l$ac_lib $ac_func_search_save_LIBS" fi if ac_fn_c_try_link "$LINENO"; then : ac_cv_search_opendir=$ac_res fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext if ${ac_cv_search_opendir+:} false; then : break fi done if ${ac_cv_search_opendir+:} false; then : else ac_cv_search_opendir=no fi rm conftest.$ac_ext LIBS=$ac_func_search_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_opendir" >&5 $as_echo "$ac_cv_search_opendir" >&6; } ac_res=$ac_cv_search_opendir if test "$ac_res" != no; then : test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 $as_echo_n "checking for ANSI C header files... " >&6; } if ${ac_cv_header_stdc+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #include #include int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_header_stdc=yes else ac_cv_header_stdc=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext if test $ac_cv_header_stdc = yes; then # SunOS 4.x string.h does not declare mem*, contrary to ANSI. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "memchr" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "free" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. if test "$cross_compiling" = yes; then : : else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #if ((' ' & 0x0FF) == 0x020) # define ISLOWER(c) ('a' <= (c) && (c) <= 'z') # define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) #else # define ISLOWER(c) \ (('a' <= (c) && (c) <= 'i') \ || ('j' <= (c) && (c) <= 'r') \ || ('s' <= (c) && (c) <= 'z')) # define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) #endif #define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) int main () { int i; for (i = 0; i < 256; i++) if (XOR (islower (i), ISLOWER (i)) || toupper (i) != TOUPPER (i)) return 2; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : else ac_cv_header_stdc=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 $as_echo "$ac_cv_header_stdc" >&6; } if test $ac_cv_header_stdc = yes; then $as_echo "#define STDC_HEADERS 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for sys/wait.h that is POSIX.1 compatible" >&5 $as_echo_n "checking for sys/wait.h that is POSIX.1 compatible... " >&6; } if ${ac_cv_header_sys_wait_h+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #ifndef WEXITSTATUS # define WEXITSTATUS(stat_val) ((unsigned int) (stat_val) >> 8) #endif #ifndef WIFEXITED # define WIFEXITED(stat_val) (((stat_val) & 255) == 0) #endif int main () { int s; wait (&s); s = WIFEXITED (s) ? WEXITSTATUS (s) : 1; ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_header_sys_wait_h=yes else ac_cv_header_sys_wait_h=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_sys_wait_h" >&5 $as_echo "$ac_cv_header_sys_wait_h" >&6; } if test $ac_cv_header_sys_wait_h = yes; then $as_echo "#define HAVE_SYS_WAIT_H 1" >>confdefs.h fi for ac_header in arpa/inet.h fcntl.h float.h limits.h netdb.h netinet/in.h sasl.h sasl/sasl.h stdint.h stdlib.h string.h sys/file.h sys/socket.h sys/vfs.h unistd.h uuid/uuid.h getopt.h do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the compiler implements namespaces" >&5 $as_echo_n "checking whether the compiler implements namespaces... " >&6; } if ${ac_cv_cxx_namespaces+:} false; then : $as_echo_n "(cached) " >&6 else ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ namespace Outer { namespace Inner { int i = 0; }} int main () { using namespace Outer::Inner; return i; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_cxx_namespaces=yes else ac_cv_cxx_namespaces=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_cxx_namespaces" >&5 $as_echo "$ac_cv_cxx_namespaces" >&6; } if test "$ac_cv_cxx_namespaces" = yes; then $as_echo "#define HAVE_NAMESPACES /**/" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the compiler has stringstream" >&5 $as_echo_n "checking whether the compiler has stringstream... " >&6; } if ${ac_cv_cxx_have_sstream+:} false; then : $as_echo_n "(cached) " >&6 else ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #ifdef HAVE_NAMESPACES using namespace std; #endif int main () { stringstream message; message << "Hello"; return 0; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_cxx_have_sstream=yes else ac_cv_cxx_have_sstream=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_cxx_have_sstream" >&5 $as_echo "$ac_cv_cxx_have_sstream" >&6; } if test "$ac_cv_cxx_have_sstream" = yes; then $as_echo "#define HAVE_SSTREAM /**/" >>confdefs.h fi # Checks for typedefs, structures, and compiler characteristics. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for stdbool.h that conforms to C99" >&5 $as_echo_n "checking for stdbool.h that conforms to C99... " >&6; } if ${ac_cv_header_stdbool_h+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #ifndef bool "error: bool is not defined" #endif #ifndef false "error: false is not defined" #endif #if false "error: false is not 0" #endif #ifndef true "error: true is not defined" #endif #if true != 1 "error: true is not 1" #endif #ifndef __bool_true_false_are_defined "error: __bool_true_false_are_defined is not defined" #endif struct s { _Bool s: 1; _Bool t; } s; char a[true == 1 ? 1 : -1]; char b[false == 0 ? 1 : -1]; char c[__bool_true_false_are_defined == 1 ? 1 : -1]; char d[(bool) 0.5 == true ? 1 : -1]; /* See body of main program for 'e'. */ char f[(_Bool) 0.0 == false ? 1 : -1]; char g[true]; char h[sizeof (_Bool)]; char i[sizeof s.t]; enum { j = false, k = true, l = false * true, m = true * 256 }; /* The following fails for HP aC++/ANSI C B3910B A.05.55 [Dec 04 2003]. */ _Bool n[m]; char o[sizeof n == m * sizeof n[0] ? 1 : -1]; char p[-1 - (_Bool) 0 < 0 && -1 - (bool) 0 < 0 ? 1 : -1]; /* Catch a bug in an HP-UX C compiler. See http://gcc.gnu.org/ml/gcc-patches/2003-12/msg02303.html http://lists.gnu.org/archive/html/bug-coreutils/2005-11/msg00161.html */ _Bool q = true; _Bool *pq = &q; int main () { bool e = &s; *pq |= q; *pq |= ! q; /* Refer to every declared value, to avoid compiler optimizations. */ return (!a + !b + !c + !d + !e + !f + !g + !h + !i + !!j + !k + !!l + !m + !n + !o + !p + !q + !pq); ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_header_stdbool_h=yes else ac_cv_header_stdbool_h=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdbool_h" >&5 $as_echo "$ac_cv_header_stdbool_h" >&6; } ac_fn_c_check_type "$LINENO" "_Bool" "ac_cv_type__Bool" "$ac_includes_default" if test "x$ac_cv_type__Bool" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE__BOOL 1 _ACEOF fi if test $ac_cv_header_stdbool_h = yes; then $as_echo "#define HAVE_STDBOOL_H 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for an ANSI C-conforming const" >&5 $as_echo_n "checking for an ANSI C-conforming const... " >&6; } if ${ac_cv_c_const+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { #ifndef __cplusplus /* Ultrix mips cc rejects this sort of thing. */ typedef int charset[2]; const charset cs = { 0, 0 }; /* SunOS 4.1.1 cc rejects this. */ char const *const *pcpcc; char **ppc; /* NEC SVR4.0.2 mips cc rejects this. */ struct point {int x, y;}; static struct point const zero = {0,0}; /* AIX XL C 1.02.0.0 rejects this. It does not let you subtract one const X* pointer from another in an arm of an if-expression whose if-part is not a constant expression */ const char *g = "string"; pcpcc = &g + (g ? g-g : 0); /* HPUX 7.0 cc rejects these. */ ++pcpcc; ppc = (char**) pcpcc; pcpcc = (char const *const *) ppc; { /* SCO 3.2v4 cc rejects this sort of thing. */ char tx; char *t = &tx; char const *s = 0 ? (char *) 0 : (char const *) 0; *t++ = 0; if (s) return 0; } { /* Someone thinks the Sun supposedly-ANSI compiler will reject this. */ int x[] = {25, 17}; const int *foo = &x[0]; ++foo; } { /* Sun SC1.0 ANSI compiler rejects this -- but not the above. */ typedef const int *iptr; iptr p = 0; ++p; } { /* AIX XL C 1.02.0.0 rejects this sort of thing, saying "k.c", line 2.27: 1506-025 (S) Operand must be a modifiable lvalue. */ struct s { int j; const int *ap[3]; } bx; struct s *b = &bx; b->j = 5; } { /* ULTRIX-32 V3.1 (Rev 9) vcc rejects this */ const int foo = 10; if (!foo) return 0; } return !cs[0] && !zero.x; #endif ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_c_const=yes else ac_cv_c_const=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_const" >&5 $as_echo "$ac_cv_c_const" >&6; } if test $ac_cv_c_const = no; then $as_echo "#define const /**/" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for uid_t in sys/types.h" >&5 $as_echo_n "checking for uid_t in sys/types.h... " >&6; } if ${ac_cv_type_uid_t+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "uid_t" >/dev/null 2>&1; then : ac_cv_type_uid_t=yes else ac_cv_type_uid_t=no fi rm -f conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_type_uid_t" >&5 $as_echo "$ac_cv_type_uid_t" >&6; } if test $ac_cv_type_uid_t = no; then $as_echo "#define uid_t int" >>confdefs.h $as_echo "#define gid_t int" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for inline" >&5 $as_echo_n "checking for inline... " >&6; } if ${ac_cv_c_inline+:} false; then : $as_echo_n "(cached) " >&6 else ac_cv_c_inline=no for ac_kw in inline __inline__ __inline; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifndef __cplusplus typedef int foo_t; static $ac_kw foo_t static_foo () {return 0; } $ac_kw foo_t foo () {return 0; } #endif _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_c_inline=$ac_kw fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext test "$ac_cv_c_inline" != no && break done fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_inline" >&5 $as_echo "$ac_cv_c_inline" >&6; } case $ac_cv_c_inline in inline | yes) ;; *) case $ac_cv_c_inline in no) ac_val=;; *) ac_val=$ac_cv_c_inline;; esac cat >>confdefs.h <<_ACEOF #ifndef __cplusplus #define inline $ac_val #endif _ACEOF ;; esac ac_fn_c_check_type "$LINENO" "mode_t" "ac_cv_type_mode_t" "$ac_includes_default" if test "x$ac_cv_type_mode_t" = xyes; then : else cat >>confdefs.h <<_ACEOF #define mode_t int _ACEOF fi ac_fn_c_check_type "$LINENO" "off_t" "ac_cv_type_off_t" "$ac_includes_default" if test "x$ac_cv_type_off_t" = xyes; then : else cat >>confdefs.h <<_ACEOF #define off_t long int _ACEOF fi ac_fn_c_check_type "$LINENO" "pid_t" "ac_cv_type_pid_t" "$ac_includes_default" if test "x$ac_cv_type_pid_t" = xyes; then : else cat >>confdefs.h <<_ACEOF #define pid_t int _ACEOF fi ac_fn_c_check_type "$LINENO" "size_t" "ac_cv_type_size_t" "$ac_includes_default" if test "x$ac_cv_type_size_t" = xyes; then : else cat >>confdefs.h <<_ACEOF #define size_t unsigned int _ACEOF fi ac_fn_c_check_member "$LINENO" "struct stat" "st_blksize" "ac_cv_member_struct_stat_st_blksize" "$ac_includes_default" if test "x$ac_cv_member_struct_stat_st_blksize" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_STRUCT_STAT_ST_BLKSIZE 1 _ACEOF fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether time.h and sys/time.h may both be included" >&5 $as_echo_n "checking whether time.h and sys/time.h may both be included... " >&6; } if ${ac_cv_header_time+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #include int main () { if ((struct tm *) 0) return 0; ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_header_time=yes else ac_cv_header_time=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_time" >&5 $as_echo "$ac_cv_header_time" >&6; } if test $ac_cv_header_time = yes; then $as_echo "#define TIME_WITH_SYS_TIME 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether struct tm is in sys/time.h or time.h" >&5 $as_echo_n "checking whether struct tm is in sys/time.h or time.h... " >&6; } if ${ac_cv_struct_tm+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { struct tm tm; int *p = &tm.tm_sec; return !p; ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_struct_tm=time.h else ac_cv_struct_tm=sys/time.h fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_struct_tm" >&5 $as_echo "$ac_cv_struct_tm" >&6; } if test $ac_cv_struct_tm = sys/time.h; then $as_echo "#define TM_IN_SYS_TIME 1" >>confdefs.h fi ac_fn_c_check_type "$LINENO" "ptrdiff_t" "ac_cv_type_ptrdiff_t" "$ac_includes_default" if test "x$ac_cv_type_ptrdiff_t" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_PTRDIFF_T 1 _ACEOF fi # Checks for library functions. for ac_header in unistd.h do : ac_fn_c_check_header_mongrel "$LINENO" "unistd.h" "ac_cv_header_unistd_h" "$ac_includes_default" if test "x$ac_cv_header_unistd_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_UNISTD_H 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for working chown" >&5 $as_echo_n "checking for working chown... " >&6; } if ${ac_cv_func_chown_works+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_chown_works=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default #include int main () { char *f = "conftest.chown"; struct stat before, after; if (creat (f, 0600) < 0) return 1; if (stat (f, &before) < 0) return 1; if (chown (f, (uid_t) -1, (gid_t) -1) == -1) return 1; if (stat (f, &after) < 0) return 1; return ! (before.st_uid == after.st_uid && before.st_gid == after.st_gid); ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_chown_works=yes else ac_cv_func_chown_works=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi rm -f conftest.chown fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_chown_works" >&5 $as_echo "$ac_cv_func_chown_works" >&6; } if test $ac_cv_func_chown_works = yes; then $as_echo "#define HAVE_CHOWN 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether closedir returns void" >&5 $as_echo_n "checking whether closedir returns void... " >&6; } if ${ac_cv_func_closedir_void+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_closedir_void=yes else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default #include <$ac_header_dirent> #ifndef __cplusplus int closedir (); #endif int main () { return closedir (opendir (".")) != 0; ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_closedir_void=no else ac_cv_func_closedir_void=yes fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_closedir_void" >&5 $as_echo "$ac_cv_func_closedir_void" >&6; } if test $ac_cv_func_closedir_void = yes; then $as_echo "#define CLOSEDIR_VOID 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for error_at_line" >&5 $as_echo_n "checking for error_at_line... " >&6; } if ${ac_cv_lib_error_at_line+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { error_at_line (0, 0, "", 0, "an error occurred"); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_error_at_line=yes else ac_cv_lib_error_at_line=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_error_at_line" >&5 $as_echo "$ac_cv_lib_error_at_line" >&6; } if test $ac_cv_lib_error_at_line = no; then case " $LIBOBJS " in *" error.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS error.$ac_objext" ;; esac fi for ac_header in vfork.h do : ac_fn_c_check_header_mongrel "$LINENO" "vfork.h" "ac_cv_header_vfork_h" "$ac_includes_default" if test "x$ac_cv_header_vfork_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_VFORK_H 1 _ACEOF fi done for ac_func in fork vfork do : as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" if eval test \"x\$"$as_ac_var"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 _ACEOF fi done if test "x$ac_cv_func_fork" = xyes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for working fork" >&5 $as_echo_n "checking for working fork... " >&6; } if ${ac_cv_func_fork_works+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_fork_works=cross else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { /* By Ruediger Kuhlmann. */ return fork () < 0; ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_fork_works=yes else ac_cv_func_fork_works=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_fork_works" >&5 $as_echo "$ac_cv_func_fork_works" >&6; } else ac_cv_func_fork_works=$ac_cv_func_fork fi if test "x$ac_cv_func_fork_works" = xcross; then case $host in *-*-amigaos* | *-*-msdosdjgpp*) # Override, as these systems have only a dummy fork() stub ac_cv_func_fork_works=no ;; *) ac_cv_func_fork_works=yes ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: result $ac_cv_func_fork_works guessed because of cross compilation" >&5 $as_echo "$as_me: WARNING: result $ac_cv_func_fork_works guessed because of cross compilation" >&2;} fi ac_cv_func_vfork_works=$ac_cv_func_vfork if test "x$ac_cv_func_vfork" = xyes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for working vfork" >&5 $as_echo_n "checking for working vfork... " >&6; } if ${ac_cv_func_vfork_works+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_vfork_works=cross else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Thanks to Paul Eggert for this test. */ $ac_includes_default #include #ifdef HAVE_VFORK_H # include #endif /* On some sparc systems, changes by the child to local and incoming argument registers are propagated back to the parent. The compiler is told about this with #include , but some compilers (e.g. gcc -O) don't grok . Test for this by using a static variable whose address is put into a register that is clobbered by the vfork. */ static void #ifdef __cplusplus sparc_address_test (int arg) # else sparc_address_test (arg) int arg; #endif { static pid_t child; if (!child) { child = vfork (); if (child < 0) { perror ("vfork"); _exit(2); } if (!child) { arg = getpid(); write(-1, "", 0); _exit (arg); } } } int main () { pid_t parent = getpid (); pid_t child; sparc_address_test (0); child = vfork (); if (child == 0) { /* Here is another test for sparc vfork register problems. This test uses lots of local variables, at least as many local variables as main has allocated so far including compiler temporaries. 4 locals are enough for gcc 1.40.3 on a Solaris 4.1.3 sparc, but we use 8 to be safe. A buggy compiler should reuse the register of parent for one of the local variables, since it will think that parent can't possibly be used any more in this routine. Assigning to the local variable will thus munge parent in the parent process. */ pid_t p = getpid(), p1 = getpid(), p2 = getpid(), p3 = getpid(), p4 = getpid(), p5 = getpid(), p6 = getpid(), p7 = getpid(); /* Convince the compiler that p..p7 are live; otherwise, it might use the same hardware register for all 8 local variables. */ if (p != p1 || p != p2 || p != p3 || p != p4 || p != p5 || p != p6 || p != p7) _exit(1); /* On some systems (e.g. IRIX 3.3), vfork doesn't separate parent from child file descriptors. If the child closes a descriptor before it execs or exits, this munges the parent's descriptor as well. Test for this by closing stdout in the child. */ _exit(close(fileno(stdout)) != 0); } else { int status; struct stat st; while (wait(&status) != child) ; return ( /* Was there some problem with vforking? */ child < 0 /* Did the child fail? (This shouldn't happen.) */ || status /* Did the vfork/compiler bug occur? */ || parent != getpid() /* Did the file descriptor bug occur? */ || fstat(fileno(stdout), &st) != 0 ); } } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_vfork_works=yes else ac_cv_func_vfork_works=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_vfork_works" >&5 $as_echo "$ac_cv_func_vfork_works" >&6; } fi; if test "x$ac_cv_func_fork_works" = xcross; then ac_cv_func_vfork_works=$ac_cv_func_vfork { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: result $ac_cv_func_vfork_works guessed because of cross compilation" >&5 $as_echo "$as_me: WARNING: result $ac_cv_func_vfork_works guessed because of cross compilation" >&2;} fi if test "x$ac_cv_func_vfork_works" = xyes; then $as_echo "#define HAVE_WORKING_VFORK 1" >>confdefs.h else $as_echo "#define vfork fork" >>confdefs.h fi if test "x$ac_cv_func_fork_works" = xyes; then $as_echo "#define HAVE_WORKING_FORK 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether lstat correctly handles trailing slash" >&5 $as_echo_n "checking whether lstat correctly handles trailing slash... " >&6; } if ${ac_cv_func_lstat_dereferences_slashed_symlink+:} false; then : $as_echo_n "(cached) " >&6 else rm -f conftest.sym conftest.file echo >conftest.file if test "$as_ln_s" = "ln -s" && ln -s conftest.file conftest.sym; then if test "$cross_compiling" = yes; then : ac_cv_func_lstat_dereferences_slashed_symlink=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { struct stat sbuf; /* Linux will dereference the symlink and fail, as required by POSIX. That is better in the sense that it means we will not have to compile and use the lstat wrapper. */ return lstat ("conftest.sym/", &sbuf) == 0; ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_lstat_dereferences_slashed_symlink=yes else ac_cv_func_lstat_dereferences_slashed_symlink=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi else # If the `ln -s' command failed, then we probably don't even # have an lstat function. ac_cv_func_lstat_dereferences_slashed_symlink=no fi rm -f conftest.sym conftest.file fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_lstat_dereferences_slashed_symlink" >&5 $as_echo "$ac_cv_func_lstat_dereferences_slashed_symlink" >&6; } test $ac_cv_func_lstat_dereferences_slashed_symlink = yes && cat >>confdefs.h <<_ACEOF #define LSTAT_FOLLOWS_SLASHED_SYMLINK 1 _ACEOF if test "x$ac_cv_func_lstat_dereferences_slashed_symlink" = xno; then case " $LIBOBJS " in *" lstat.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS lstat.$ac_objext" ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether lstat accepts an empty string" >&5 $as_echo_n "checking whether lstat accepts an empty string... " >&6; } if ${ac_cv_func_lstat_empty_string_bug+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_lstat_empty_string_bug=yes else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { struct stat sbuf; return lstat ("", &sbuf) == 0; ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_lstat_empty_string_bug=no else ac_cv_func_lstat_empty_string_bug=yes fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_lstat_empty_string_bug" >&5 $as_echo "$ac_cv_func_lstat_empty_string_bug" >&6; } if test $ac_cv_func_lstat_empty_string_bug = yes; then case " $LIBOBJS " in *" lstat.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS lstat.$ac_objext" ;; esac cat >>confdefs.h <<_ACEOF #define HAVE_LSTAT_EMPTY_STRING_BUG 1 _ACEOF fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether lstat correctly handles trailing slash" >&5 $as_echo_n "checking whether lstat correctly handles trailing slash... " >&6; } if ${ac_cv_func_lstat_dereferences_slashed_symlink+:} false; then : $as_echo_n "(cached) " >&6 else rm -f conftest.sym conftest.file echo >conftest.file if test "$as_ln_s" = "ln -s" && ln -s conftest.file conftest.sym; then if test "$cross_compiling" = yes; then : ac_cv_func_lstat_dereferences_slashed_symlink=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { struct stat sbuf; /* Linux will dereference the symlink and fail, as required by POSIX. That is better in the sense that it means we will not have to compile and use the lstat wrapper. */ return lstat ("conftest.sym/", &sbuf) == 0; ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_lstat_dereferences_slashed_symlink=yes else ac_cv_func_lstat_dereferences_slashed_symlink=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi else # If the `ln -s' command failed, then we probably don't even # have an lstat function. ac_cv_func_lstat_dereferences_slashed_symlink=no fi rm -f conftest.sym conftest.file fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_lstat_dereferences_slashed_symlink" >&5 $as_echo "$ac_cv_func_lstat_dereferences_slashed_symlink" >&6; } test $ac_cv_func_lstat_dereferences_slashed_symlink = yes && cat >>confdefs.h <<_ACEOF #define LSTAT_FOLLOWS_SLASHED_SYMLINK 1 _ACEOF if test "x$ac_cv_func_lstat_dereferences_slashed_symlink" = xno; then case " $LIBOBJS " in *" lstat.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS lstat.$ac_objext" ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for working memcmp" >&5 $as_echo_n "checking for working memcmp... " >&6; } if ${ac_cv_func_memcmp_working+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_memcmp_working=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { /* Some versions of memcmp are not 8-bit clean. */ char c0 = '\100', c1 = '\200', c2 = '\201'; if (memcmp(&c0, &c2, 1) >= 0 || memcmp(&c1, &c2, 1) >= 0) return 1; /* The Next x86 OpenStep bug shows up only when comparing 16 bytes or more and with at least one buffer not starting on a 4-byte boundary. William Lewis provided this test program. */ { char foo[21]; char bar[21]; int i; for (i = 0; i < 4; i++) { char *a = foo + i; char *b = bar + i; strcpy (a, "--------01111111"); strcpy (b, "--------10000000"); if (memcmp (a, b, 16) >= 0) return 1; } return 0; } ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_memcmp_working=yes else ac_cv_func_memcmp_working=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_memcmp_working" >&5 $as_echo "$ac_cv_func_memcmp_working" >&6; } test $ac_cv_func_memcmp_working = no && case " $LIBOBJS " in *" memcmp.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS memcmp.$ac_objext" ;; esac for ac_header in $ac_header_list do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default " if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done for ac_func in $ac_func_list do : as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" if eval test \"x\$"$as_ac_var"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for working mktime" >&5 $as_echo_n "checking for working mktime... " >&6; } if ${ac_cv_func_working_mktime+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_working_mktime=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Test program from Paul Eggert and Tony Leneis. */ #ifdef TIME_WITH_SYS_TIME # include # include #else # ifdef HAVE_SYS_TIME_H # include # else # include # endif #endif #include #include #ifdef HAVE_UNISTD_H # include #endif #ifndef HAVE_ALARM # define alarm(X) /* empty */ #endif /* Work around redefinition to rpl_putenv by other config tests. */ #undef putenv static time_t time_t_max; static time_t time_t_min; /* Values we'll use to set the TZ environment variable. */ static const char *tz_strings[] = { (const char *) 0, "TZ=GMT0", "TZ=JST-9", "TZ=EST+3EDT+2,M10.1.0/00:00:00,M2.3.0/00:00:00" }; #define N_STRINGS (sizeof (tz_strings) / sizeof (tz_strings[0])) /* Return 0 if mktime fails to convert a date in the spring-forward gap. Based on a problem report from Andreas Jaeger. */ static int spring_forward_gap () { /* glibc (up to about 1998-10-07) failed this test. */ struct tm tm; /* Use the portable POSIX.1 specification "TZ=PST8PDT,M4.1.0,M10.5.0" instead of "TZ=America/Vancouver" in order to detect the bug even on systems that don't support the Olson extension, or don't have the full zoneinfo tables installed. */ putenv ((char*) "TZ=PST8PDT,M4.1.0,M10.5.0"); tm.tm_year = 98; tm.tm_mon = 3; tm.tm_mday = 5; tm.tm_hour = 2; tm.tm_min = 0; tm.tm_sec = 0; tm.tm_isdst = -1; return mktime (&tm) != (time_t) -1; } static int mktime_test1 (time_t now) { struct tm *lt; return ! (lt = localtime (&now)) || mktime (lt) == now; } static int mktime_test (time_t now) { return (mktime_test1 (now) && mktime_test1 ((time_t) (time_t_max - now)) && mktime_test1 ((time_t) (time_t_min + now))); } static int irix_6_4_bug () { /* Based on code from Ariel Faigon. */ struct tm tm; tm.tm_year = 96; tm.tm_mon = 3; tm.tm_mday = 0; tm.tm_hour = 0; tm.tm_min = 0; tm.tm_sec = 0; tm.tm_isdst = -1; mktime (&tm); return tm.tm_mon == 2 && tm.tm_mday == 31; } static int bigtime_test (int j) { struct tm tm; time_t now; tm.tm_year = tm.tm_mon = tm.tm_mday = tm.tm_hour = tm.tm_min = tm.tm_sec = j; now = mktime (&tm); if (now != (time_t) -1) { struct tm *lt = localtime (&now); if (! (lt && lt->tm_year == tm.tm_year && lt->tm_mon == tm.tm_mon && lt->tm_mday == tm.tm_mday && lt->tm_hour == tm.tm_hour && lt->tm_min == tm.tm_min && lt->tm_sec == tm.tm_sec && lt->tm_yday == tm.tm_yday && lt->tm_wday == tm.tm_wday && ((lt->tm_isdst < 0 ? -1 : 0 < lt->tm_isdst) == (tm.tm_isdst < 0 ? -1 : 0 < tm.tm_isdst)))) return 0; } return 1; } static int year_2050_test () { /* The correct answer for 2050-02-01 00:00:00 in Pacific time, ignoring leap seconds. */ unsigned long int answer = 2527315200UL; struct tm tm; time_t t; tm.tm_year = 2050 - 1900; tm.tm_mon = 2 - 1; tm.tm_mday = 1; tm.tm_hour = tm.tm_min = tm.tm_sec = 0; tm.tm_isdst = -1; /* Use the portable POSIX.1 specification "TZ=PST8PDT,M4.1.0,M10.5.0" instead of "TZ=America/Vancouver" in order to detect the bug even on systems that don't support the Olson extension, or don't have the full zoneinfo tables installed. */ putenv ((char*) "TZ=PST8PDT,M4.1.0,M10.5.0"); t = mktime (&tm); /* Check that the result is either a failure, or close enough to the correct answer that we can assume the discrepancy is due to leap seconds. */ return (t == (time_t) -1 || (0 < t && answer - 120 <= t && t <= answer + 120)); } int main () { time_t t, delta; int i, j; /* This test makes some buggy mktime implementations loop. Give up after 60 seconds; a mktime slower than that isn't worth using anyway. */ alarm (60); for (;;) { t = (time_t_max << 1) + 1; if (t <= time_t_max) break; time_t_max = t; } time_t_min = - ((time_t) ~ (time_t) 0 == (time_t) -1) - time_t_max; delta = time_t_max / 997; /* a suitable prime number */ for (i = 0; i < N_STRINGS; i++) { if (tz_strings[i]) putenv ((char*) tz_strings[i]); for (t = 0; t <= time_t_max - delta; t += delta) if (! mktime_test (t)) return 1; if (! (mktime_test ((time_t) 1) && mktime_test ((time_t) (60 * 60)) && mktime_test ((time_t) (60 * 60 * 24)))) return 1; for (j = 1; ; j <<= 1) if (! bigtime_test (j)) return 1; else if (INT_MAX / 2 < j) break; if (! bigtime_test (INT_MAX)) return 1; } return ! (irix_6_4_bug () && spring_forward_gap () && year_2050_test ()); } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_working_mktime=yes else ac_cv_func_working_mktime=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_working_mktime" >&5 $as_echo "$ac_cv_func_working_mktime" >&6; } if test $ac_cv_func_working_mktime = no; then case " $LIBOBJS " in *" mktime.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS mktime.$ac_objext" ;; esac fi for ac_header in stdlib.h do : ac_fn_c_check_header_mongrel "$LINENO" "stdlib.h" "ac_cv_header_stdlib_h" "$ac_includes_default" if test "x$ac_cv_header_stdlib_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_STDLIB_H 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU libc compatible malloc" >&5 $as_echo_n "checking for GNU libc compatible malloc... " >&6; } if ${ac_cv_func_malloc_0_nonnull+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_malloc_0_nonnull=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #if defined STDC_HEADERS || defined HAVE_STDLIB_H # include #else char *malloc (); #endif int main () { return ! malloc (0); ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_malloc_0_nonnull=yes else ac_cv_func_malloc_0_nonnull=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_malloc_0_nonnull" >&5 $as_echo "$ac_cv_func_malloc_0_nonnull" >&6; } if test $ac_cv_func_malloc_0_nonnull = yes; then : $as_echo "#define HAVE_MALLOC 1" >>confdefs.h else $as_echo "#define HAVE_MALLOC 0" >>confdefs.h case " $LIBOBJS " in *" malloc.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS malloc.$ac_objext" ;; esac $as_echo "#define malloc rpl_malloc" >>confdefs.h fi for ac_header in stdlib.h do : ac_fn_c_check_header_mongrel "$LINENO" "stdlib.h" "ac_cv_header_stdlib_h" "$ac_includes_default" if test "x$ac_cv_header_stdlib_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_STDLIB_H 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU libc compatible realloc" >&5 $as_echo_n "checking for GNU libc compatible realloc... " >&6; } if ${ac_cv_func_realloc_0_nonnull+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_realloc_0_nonnull=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #if defined STDC_HEADERS || defined HAVE_STDLIB_H # include #else char *realloc (); #endif int main () { return ! realloc (0, 0); ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_realloc_0_nonnull=yes else ac_cv_func_realloc_0_nonnull=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_realloc_0_nonnull" >&5 $as_echo "$ac_cv_func_realloc_0_nonnull" >&6; } if test $ac_cv_func_realloc_0_nonnull = yes; then : $as_echo "#define HAVE_REALLOC 1" >>confdefs.h else $as_echo "#define HAVE_REALLOC 0" >>confdefs.h case " $LIBOBJS " in *" realloc.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS realloc.$ac_objext" ;; esac $as_echo "#define realloc rpl_realloc" >>confdefs.h fi for ac_header in sys/select.h sys/socket.h do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking types of arguments for select" >&5 $as_echo_n "checking types of arguments for select... " >&6; } if ${ac_cv_func_select_args+:} false; then : $as_echo_n "(cached) " >&6 else for ac_arg234 in 'fd_set *' 'int *' 'void *'; do for ac_arg1 in 'int' 'size_t' 'unsigned long int' 'unsigned int'; do for ac_arg5 in 'struct timeval *' 'const struct timeval *'; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default #ifdef HAVE_SYS_SELECT_H # include #endif #ifdef HAVE_SYS_SOCKET_H # include #endif int main () { extern int select ($ac_arg1, $ac_arg234, $ac_arg234, $ac_arg234, $ac_arg5); ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_func_select_args="$ac_arg1,$ac_arg234,$ac_arg5"; break 3 fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext done done done # Provide a safe default value. : "${ac_cv_func_select_args=int,int *,struct timeval *}" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_select_args" >&5 $as_echo "$ac_cv_func_select_args" >&6; } ac_save_IFS=$IFS; IFS=',' set dummy `echo "$ac_cv_func_select_args" | sed 's/\*/\*/g'` IFS=$ac_save_IFS shift cat >>confdefs.h <<_ACEOF #define SELECT_TYPE_ARG1 $1 _ACEOF cat >>confdefs.h <<_ACEOF #define SELECT_TYPE_ARG234 ($2) _ACEOF cat >>confdefs.h <<_ACEOF #define SELECT_TYPE_ARG5 ($3) _ACEOF rm -f conftest* { $as_echo "$as_me:${as_lineno-$LINENO}: checking return type of signal handlers" >&5 $as_echo_n "checking return type of signal handlers... " >&6; } if ${ac_cv_type_signal+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { return *(signal (0, 0)) (0) == 1; ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_type_signal=int else ac_cv_type_signal=void fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_type_signal" >&5 $as_echo "$ac_cv_type_signal" >&6; } cat >>confdefs.h <<_ACEOF #define RETSIGTYPE $ac_cv_type_signal _ACEOF ac_fn_c_check_decl "$LINENO" "strerror_r" "ac_cv_have_decl_strerror_r" "$ac_includes_default" if test "x$ac_cv_have_decl_strerror_r" = xyes; then : ac_have_decl=1 else ac_have_decl=0 fi cat >>confdefs.h <<_ACEOF #define HAVE_DECL_STRERROR_R $ac_have_decl _ACEOF for ac_func in strerror_r do : ac_fn_c_check_func "$LINENO" "strerror_r" "ac_cv_func_strerror_r" if test "x$ac_cv_func_strerror_r" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_STRERROR_R 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether strerror_r returns char *" >&5 $as_echo_n "checking whether strerror_r returns char *... " >&6; } if ${ac_cv_func_strerror_r_char_p+:} false; then : $as_echo_n "(cached) " >&6 else ac_cv_func_strerror_r_char_p=no if test $ac_cv_have_decl_strerror_r = yes; then cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { char buf[100]; char x = *strerror_r (0, buf, sizeof buf); char *p = strerror_r (0, buf, sizeof buf); return !p || x; ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_func_strerror_r_char_p=yes fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext else # strerror_r is not declared. Choose between # systems that have relatively inaccessible declarations for the # function. BeOS and DEC UNIX 4.0 fall in this category, but the # former has a strerror_r that returns char*, while the latter # has a strerror_r that returns `int'. # This test should segfault on the DEC system. if test "$cross_compiling" = yes; then : : else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default extern char *strerror_r (); int main () { char buf[100]; char x = *strerror_r (0, buf, sizeof buf); return ! isalpha (x); ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_strerror_r_char_p=yes fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_strerror_r_char_p" >&5 $as_echo "$ac_cv_func_strerror_r_char_p" >&6; } if test $ac_cv_func_strerror_r_char_p = yes; then $as_echo "#define STRERROR_R_CHAR_P 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stat accepts an empty string" >&5 $as_echo_n "checking whether stat accepts an empty string... " >&6; } if ${ac_cv_func_stat_empty_string_bug+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_stat_empty_string_bug=yes else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { struct stat sbuf; return stat ("", &sbuf) == 0; ; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : ac_cv_func_stat_empty_string_bug=no else ac_cv_func_stat_empty_string_bug=yes fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_stat_empty_string_bug" >&5 $as_echo "$ac_cv_func_stat_empty_string_bug" >&6; } if test $ac_cv_func_stat_empty_string_bug = yes; then case " $LIBOBJS " in *" stat.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS stat.$ac_objext" ;; esac cat >>confdefs.h <<_ACEOF #define HAVE_STAT_EMPTY_STRING_BUG 1 _ACEOF fi for ac_func in acl dup2 floor ftruncate gethostname getdomainname getpid gmtime_r lchown localtime_r memchr memmove memset mkdir mkfifo regcomp rmdir select setenv socket strcasecmp strchr strcspn strdup strerror strncasecmp strstr strtol strtoul strtoull timegm tzset unsetenv getopt_long_only getgrouplist mkdtemp posix_fallocate readdir_r mkstemp mktemp do : as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" if eval test \"x\$"$as_ac_var"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for res_query in -lresolv" >&5 $as_echo_n "checking for res_query in -lresolv... " >&6; } if ${ac_cv_lib_resolv_res_query+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lresolv $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char res_query (); int main () { return res_query (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_resolv_res_query=yes else ac_cv_lib_resolv_res_query=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_resolv_res_query" >&5 $as_echo "$ac_cv_lib_resolv_res_query" >&6; } if test "x$ac_cv_lib_resolv_res_query" = xyes; then : LIBRESOLV=-lresolv else LIBRESOLV= fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for __dn_skipname in -lresolv" >&5 $as_echo_n "checking for __dn_skipname in -lresolv... " >&6; } if ${ac_cv_lib_resolv___dn_skipname+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lresolv $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char __dn_skipname (); int main () { return __dn_skipname (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_resolv___dn_skipname=yes else ac_cv_lib_resolv___dn_skipname=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_resolv___dn_skipname" >&5 $as_echo "$ac_cv_lib_resolv___dn_skipname" >&6; } if test "x$ac_cv_lib_resolv___dn_skipname" = xyes; then : LIBRESOLV=-lresolv else LIBRESOLV= fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for gethostbyname in -lnsl" >&5 $as_echo_n "checking for gethostbyname in -lnsl... " >&6; } if ${ac_cv_lib_nsl_gethostbyname+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lnsl $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char gethostbyname (); int main () { return gethostbyname (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_nsl_gethostbyname=yes else ac_cv_lib_nsl_gethostbyname=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_nsl_gethostbyname" >&5 $as_echo "$ac_cv_lib_nsl_gethostbyname" >&6; } if test "x$ac_cv_lib_nsl_gethostbyname" = xyes; then : LIBRESOLV="$LIBRESOLV -lnsl" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for getdomainname in -lnsl" >&5 $as_echo_n "checking for getdomainname in -lnsl... " >&6; } if ${ac_cv_lib_nsl_getdomainname+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lnsl $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char getdomainname (); int main () { return getdomainname (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_nsl_getdomainname=yes else ac_cv_lib_nsl_getdomainname=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_nsl_getdomainname" >&5 $as_echo "$ac_cv_lib_nsl_getdomainname" >&6; } if test "x$ac_cv_lib_nsl_getdomainname" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBNSL 1 _ACEOF LIBS="-lnsl $LIBS" fi fi # check for platfom specific flags case " $LDFLAGS " in " -Wl,--no-undefined ") ;; " -Wl,-no-undefined ") ;; " -Wl,-z -Wl,defs ") ;; " -Wl,-z,defs ") ;; *) case "${host}" in *darwin*);; *) LDFLAGS="$LDFLAGS -Wl,--no-undefined" ;; esac ;; esac for ac_prog in pdflatex do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_PDFLATEX+:} false; then : $as_echo_n "(cached) " >&6 else case $PDFLATEX in [\\/]* | ?:[\\/]*) ac_cv_path_PDFLATEX="$PDFLATEX" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_PDFLATEX="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi PDFLATEX=$ac_cv_path_PDFLATEX if test -n "$PDFLATEX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PDFLATEX" >&5 $as_echo "$PDFLATEX" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$PDFLATEX" && break done for ac_prog in doxygen do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_DOXYGEN+:} false; then : $as_echo_n "(cached) " >&6 else case $DOXYGEN in [\\/]* | ?:[\\/]*) ac_cv_path_DOXYGEN="$DOXYGEN" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_DOXYGEN="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi DOXYGEN=$ac_cv_path_DOXYGEN if test -n "$DOXYGEN"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DOXYGEN" >&5 $as_echo "$DOXYGEN" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$DOXYGEN" && break done for ac_prog in dot do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_DOT+:} false; then : $as_echo_n "(cached) " >&6 else case $DOT in [\\/]* | ?:[\\/]*) ac_cv_path_DOT="$DOT" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_DOT="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi DOT=$ac_cv_path_DOT if test -n "$DOT"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DOT" >&5 $as_echo "$DOT" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$DOT" && break done # Check if user asks to skip documentation build # Check whether --enable-doc was given. if test "${enable_doc+set}" = set; then : enableval=$enable_doc; enables_doc=$enableval fi #if test "x$enables_doc" = "xyes"; then # There is no point disabling docs due to missing tools since the pdf # files are both in svn and in the dist tarball # if test "x$PDFLATEX" = "x"; then # enables_doc="no" # AC_MSG_NOTICE([WARNING: Missing pdflatex - documentation won't be built]) # elif test "x$DOXYGEN" = "x"; then # enables_doc="no" # AC_MSG_NOTICE([WARNING: Missing doxygen - documentation won't be built]) # elif test "x$DOT" = "x"; then # enables_doc="no" # AC_MSG_NOTICE([WARNING: Missing dot - documentation won't be built]) # fi #fi { $as_echo "$as_me:${as_lineno-$LINENO}: Documentation enabled: $enables_doc" >&5 $as_echo "$as_me: Documentation enabled: $enables_doc" >&6;} if test "x$enables_doc" = "xyes"; then DOC_ENABLED_TRUE= DOC_ENABLED_FALSE='#' else DOC_ENABLED_TRUE='#' DOC_ENABLED_FALSE= fi if test -f python/python/arc/index.xml -o "x$DOXYGEN" != "x"; then PYDOXYGEN_TRUE= PYDOXYGEN_FALSE='#' else PYDOXYGEN_TRUE='#' PYDOXYGEN_FALSE= fi if test -f python/altpython/arc/index.xml -o "x$DOXYGEN" != "x"; then ALTPYDOXYGEN_TRUE= ALTPYDOXYGEN_FALSE='#' else ALTPYDOXYGEN_TRUE='#' ALTPYDOXYGEN_FALSE= fi # Check for explicitly and implicitely disabled services # A-Rex # Check whether --enable-a_rex_service was given. if test "${enable_a_rex_service+set}" = set; then : enableval=$enable_a_rex_service; enables_a_rex_service=$enableval fi if test "$enables_a_rex_service" = "yes"; then if test "x$SQLITE_INSTALLED" != "xyes" ; then { $as_echo "$as_me:${as_lineno-$LINENO}: A-Rex can't be built without SQLite - disabling" >&5 $as_echo "$as_me: A-Rex can't be built without SQLite - disabling" >&6;} enables_a_rex_service="no" elif test "x$DBCXX_LIBS" = "x" ; then { $as_echo "$as_me:${as_lineno-$LINENO}: A-Rex can't be built without C++ API for DB4.x - disabling" >&5 $as_echo "$as_me: A-Rex can't be built without C++ API for DB4.x - disabling" >&6;} enables_a_rex_service="no" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: A-Rex service enabled: $enables_a_rex_service" >&5 $as_echo "$as_me: A-Rex service enabled: $enables_a_rex_service" >&6;} if test "x$enables_a_rex_service" = "xyes"; then A_REX_SERVICE_ENABLED_TRUE= A_REX_SERVICE_ENABLED_FALSE='#' else A_REX_SERVICE_ENABLED_TRUE='#' A_REX_SERVICE_ENABLED_FALSE= fi # Internal job plugin # Check whether --enable-internal was given. if test "${enable_internal+set}" = set; then : enableval=$enable_internal; enables_internal=$enableval fi if test "$enables_internal" = "yes"; then if test "x$enables_a_rex_service" != "xyes" ; then { $as_echo "$as_me:${as_lineno-$LINENO}: Internal job plugin can't be built without A-Rex - disabling" >&5 $as_echo "$as_me: Internal job plugin can't be built without A-Rex - disabling" >&6;} enables_internal="no" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: Internal plugin enabled: $enables_internal" >&5 $as_echo "$as_me: Internal plugin enabled: $enables_internal" >&6;} if test "x$enables_internal" = "xyes"; then INTERNAL_ENABLED_TRUE= INTERNAL_ENABLED_FALSE='#' else INTERNAL_ENABLED_TRUE='#' INTERNAL_ENABLED_FALSE= fi # Gridftpd # Check whether --enable-gridftpd_service was given. if test "${enable_gridftpd_service+set}" = set; then : enableval=$enable_gridftpd_service; enables_gridftpd_service=$enableval fi if test "$enables_gridftpd_service" = "yes"; then gridftpd_service_globus_pkgs="globus-common globus-io globus-gsi-credential globus-openssl-module globus-ftp-control" gridftpd_service_globus_pkgs_missing="" for pkg in $gridftpd_service_globus_pkgs do var=`echo '$'$pkg|tr '\-a-z' '_A-Z'|sed 's/$/_VERSION/'` if test -z "`eval echo $var`" then gridftpd_service_globus_pkgs_missing="$gridftpd_service_globus_pkgs_missing $pkg" fi done if test -n "$gridftpd_service_globus_pkgs_missing" ; then { $as_echo "$as_me:${as_lineno-$LINENO}: GridFTP service can not be built (missing development packages for$gridftpd_service_globus_pkgs_missing) - disabling" >&5 $as_echo "$as_me: GridFTP service can not be built (missing development packages for$gridftpd_service_globus_pkgs_missing) - disabling" >&6;} enables_gridftpd_service="no" fi #check for struct statfs for ac_func in fstatfs do : ac_fn_c_check_func "$LINENO" "fstatfs" "ac_cv_func_fstatfs" if test "x$ac_cv_func_fstatfs" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_FSTATFS 1 _ACEOF fi done for ac_header in sys/param.h sys/statfs.h sys/mount.h sys/vfs.h do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done ac_fn_c_check_member "$LINENO" "struct statfs" "f_type" "ac_cv_member_struct_statfs_f_type" "$ac_includes_default #if HAVE_SYS_STATFS_H #include #endif #if HAVE_SYS_MOUNT_H #include #endif #if HAVE_SYS_VFS_H #include #endif " if test "x$ac_cv_member_struct_statfs_f_type" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_STRUCT_STATFS_F_TYPE 1 _ACEOF fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: Gridftpd service enabled: $enables_gridftpd_service" >&5 $as_echo "$as_me: Gridftpd service enabled: $enables_gridftpd_service" >&6;} if test "x$enables_gridftpd_service" = "xyes"; then GRIDFTPD_SERVICE_ENABLED_TRUE= GRIDFTPD_SERVICE_ENABLED_FALSE='#' else GRIDFTPD_SERVICE_ENABLED_TRUE='#' GRIDFTPD_SERVICE_ENABLED_FALSE= fi # LDAP service # Check whether --enable-ldap_service was given. if test "${enable_ldap_service+set}" = set; then : enableval=$enable_ldap_service; enables_ldap_service=$enableval fi { $as_echo "$as_me:${as_lineno-$LINENO}: LDAP Infosystem service enabled: $enables_ldap_service" >&5 $as_echo "$as_me: LDAP Infosystem service enabled: $enables_ldap_service" >&6;} if test "x$enables_ldap_service" = "xyes"; then LDAP_SERVICE_ENABLED_TRUE= LDAP_SERVICE_ENABLED_FALSE='#' else LDAP_SERVICE_ENABLED_TRUE='#' LDAP_SERVICE_ENABLED_FALSE= fi # LDAP monitor # Check whether --enable-monitor was given. if test "${enable_monitor+set}" = set; then : enableval=$enable_monitor; enables_monitor=$enableval fi { $as_echo "$as_me:${as_lineno-$LINENO}: LDAP Monitor enabled: $enables_monitor" >&5 $as_echo "$as_me: LDAP Monitor enabled: $enables_monitor" >&6;} if test "x$enables_monitor" = "xyes"; then MONITOR_ENABLED_TRUE= MONITOR_ENABLED_FALSE='#' else MONITOR_ENABLED_TRUE='#' MONITOR_ENABLED_FALSE= fi # Cache service # Check whether --enable-candypond was given. if test "${enable_candypond+set}" = set; then : enableval=$enable_candypond; enables_candypond=$enableval fi if test "$enables_candypond" = "yes"; then if test ! "x$enables_a_rex_service" = "xyes" ; then enables_candypond="no" { $as_echo "$as_me:${as_lineno-$LINENO}: CandyPond can't be built without A-REX - disabling" >&5 $as_echo "$as_me: CandyPond can't be built without A-REX - disabling" >&6;} fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: CandyPond enabled: $enables_candypond" >&5 $as_echo "$as_me: CandyPond enabled: $enables_candypond" >&6;} if test "x$enables_candypond" = "xyes"; then CANDYPOND_ENABLED_TRUE= CANDYPOND_ENABLED_FALSE='#' else CANDYPOND_ENABLED_TRUE='#' CANDYPOND_ENABLED_FALSE= fi # DataDelivery service # Check whether --enable-datadelivery_service was given. if test "${enable_datadelivery_service+set}" = set; then : enableval=$enable_datadelivery_service; enables_datadelivery_service=$enableval fi { $as_echo "$as_me:${as_lineno-$LINENO}: DataDelivery service enabled: $enables_datadelivery_service" >&5 $as_echo "$as_me: DataDelivery service enabled: $enables_datadelivery_service" >&6;} if test "x$enables_datadelivery_service" = "xyes"; then DATADELIVERY_SERVICE_ENABLED_TRUE= DATADELIVERY_SERVICE_ENABLED_FALSE='#' else DATADELIVERY_SERVICE_ENABLED_TRUE='#' DATADELIVERY_SERVICE_ENABLED_FALSE= fi # ACIX service # Check whether --enable-acix was given. if test "${enable_acix+set}" = set; then : enableval=$enable_acix; enables_acix=$enableval fi for ac_prog in twistd-${PYTHON_MAJOR} twistd${PYTHON_MAJOR} twistd do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_TWISTD+:} false; then : $as_echo_n "(cached) " >&6 else case $TWISTD in [\\/]* | ?:[\\/]*) ac_cv_path_TWISTD="$TWISTD" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_TWISTD="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi TWISTD=$ac_cv_path_TWISTD if test -n "$TWISTD"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $TWISTD" >&5 $as_echo "$TWISTD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$TWISTD" && break done if test "x$TWISTD" = "x"; then { $as_echo "$as_me:${as_lineno-$LINENO}: twistd not found - ACIX service disabled" >&5 $as_echo "$as_me: twistd not found - ACIX service disabled" >&6;} enables_acix="no" fi { $as_echo "$as_me:${as_lineno-$LINENO}: ACIX enabled: $enables_acix" >&5 $as_echo "$as_me: ACIX enabled: $enables_acix" >&6;} if test "x$enables_acix" = "xyes"; then ACIX_ENABLED_TRUE= ACIX_ENABLED_FALSE='#' else ACIX_ENABLED_TRUE='#' ACIX_ENABLED_FALSE= fi # trial command (from python-twisted-core) is used for acix unittests for ac_prog in trial-${PYTHON_MAJOR} trial${PYTHON_MAJOR} trial do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_TRIAL+:} false; then : $as_echo_n "(cached) " >&6 else case $TRIAL in [\\/]* | ?:[\\/]*) ac_cv_path_TRIAL="$TRIAL" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_TRIAL="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi TRIAL=$ac_cv_path_TRIAL if test -n "$TRIAL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $TRIAL" >&5 $as_echo "$TRIAL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$TRIAL" && break done if test "x$TRIAL" = "x"; then { $as_echo "$as_me:${as_lineno-$LINENO}: trial not found - ACIX unit tests will be skipped" >&5 $as_echo "$as_me: trial not found - ACIX unit tests will be skipped" >&6;} fi # unit tests also require python >=2.6 if test "x$TRIAL" != "x" && test "x$PYTHON_VERSION" != "x2.4" && test "x$PYTHON_VERSION" != "x2.5"; then ACIX_TESTS_ENABLED_TRUE= ACIX_TESTS_ENABLED_FALSE='#' else ACIX_TESTS_ENABLED_TRUE='#' ACIX_TESTS_ENABLED_FALSE= fi # Check for explicitly and implicitely disabled clients # Check whether --enable-compute_client was given. if test "${enable_compute_client+set}" = set; then : enableval=$enable_compute_client; enables_compute_client=$enableval fi { $as_echo "$as_me:${as_lineno-$LINENO}: Compute client tools enabled: $enables_compute_client" >&5 $as_echo "$as_me: Compute client tools enabled: $enables_compute_client" >&6;} if test "x$enables_compute_client" = "xyes"; then COMPUTE_CLIENT_ENABLED_TRUE= COMPUTE_CLIENT_ENABLED_FALSE='#' else COMPUTE_CLIENT_ENABLED_TRUE='#' COMPUTE_CLIENT_ENABLED_FALSE= fi # Check whether --enable-credentials_client was given. if test "${enable_credentials_client+set}" = set; then : enableval=$enable_credentials_client; enables_credentials_client=$enableval fi { $as_echo "$as_me:${as_lineno-$LINENO}: Credentials client tools enabled: $enables_credentials_client" >&5 $as_echo "$as_me: Credentials client tools enabled: $enables_credentials_client" >&6;} if test "x$enables_credentials_client" = "xyes"; then CREDENTIALS_CLIENT_ENABLED_TRUE= CREDENTIALS_CLIENT_ENABLED_FALSE='#' else CREDENTIALS_CLIENT_ENABLED_TRUE='#' CREDENTIALS_CLIENT_ENABLED_FALSE= fi # Check whether --enable-data_client was given. if test "${enable_data_client+set}" = set; then : enableval=$enable_data_client; enables_data_client=$enableval fi { $as_echo "$as_me:${as_lineno-$LINENO}: Data client tools enabled: $enables_data_client" >&5 $as_echo "$as_me: Data client tools enabled: $enables_data_client" >&6;} if test "x$enables_data_client" = "xyes"; then DATA_CLIENT_ENABLED_TRUE= DATA_CLIENT_ENABLED_FALSE='#' else DATA_CLIENT_ENABLED_TRUE='#' DATA_CLIENT_ENABLED_FALSE= fi # Check whether --enable-emies_client was given. if test "${enable_emies_client+set}" = set; then : enableval=$enable_emies_client; enables_emies_client=$enableval fi { $as_echo "$as_me:${as_lineno-$LINENO}: EMI ES plugin(s) enabled: $enables_emies_client" >&5 $as_echo "$as_me: EMI ES plugin(s) enabled: $enables_emies_client" >&6;} if test "x$enables_emies_client" = "xyes"; then EMIES_ENABLED_TRUE= EMIES_ENABLED_FALSE='#' else EMIES_ENABLED_TRUE='#' EMIES_ENABLED_FALSE= fi # Check whether --enable-arcrest_client was given. if test "${enable_arcrest_client+set}" = set; then : enableval=$enable_arcrest_client; enables_arcrest_client=$enableval fi { $as_echo "$as_me:${as_lineno-$LINENO}: ARC REST plugin(s) enabled: $enables_arcrest_client" >&5 $as_echo "$as_me: ARC REST plugin(s) enabled: $enables_arcrest_client" >&6;} if test "x$enables_arcrest_client" = "xyes"; then ARCREST_ENABLED_TRUE= ARCREST_ENABLED_FALSE='#' else ARCREST_ENABLED_TRUE='#' ARCREST_ENABLED_FALSE= fi # Check for consistency among disabled components if test "$enables_hed" = "no"; then if test "$enables_a_rex_service" = "yes" -o \ "$enables_candypond" = "yes" -o \ "$enables_datadelivery_service" = "yes" -o \ "$enables_compute_client" = "yes" -o \ "$enables_credentials_client" = "yes" -o \ "$enables_data_client" = "yes" -o \ ; then as_fn_error $? "HED is needed for building any of the client or service tools. Please enable HED by using --enable-hed." "$LINENO" 5 fi fi if test "x$enables_hed" = "xyes"; then HED_ENABLED_TRUE= HED_ENABLED_FALSE='#' else HED_ENABLED_TRUE='#' HED_ENABLED_FALSE= fi # A-Rex specific hack for backend scripts tmp_dir=/tmp gnu_time=/usr/bin/time case "${host}" in *darwin*) # hostname -f does not work on OS X nodename="hostname" ;; *) nodename="/bin/hostname -f" ;; esac arc_location=$prefix # Shell for the job control scripts posix_shell='/bin/sh' DATE=`date +%Y-%m-%d ${SOURCE_DATE_EPOCH:+-u -d @$SOURCE_DATE_EPOCH}` #DATER=`date -R` DATER=`date +'%a, %d %b %Y %H:%M:%S %z'` SPECDATE=`LANG=C date +"%a %b %d %Y"` ac_config_files="$ac_config_files Makefile include/arc/ArcVersion.h src/Makefile src/external/Makefile src/external/cJSON/Makefile src/hed/Makefile src/hed/libs/compute/Makefile src/hed/libs/compute/test/Makefile src/hed/libs/compute/examples/Makefile src/hed/libs/common/ArcVersion.h src/hed/libs/common/Makefile src/hed/libs/common/test/Makefile src/hed/libs/communication/Makefile src/hed/libs/credential/Makefile src/hed/libs/credential/test/Makefile src/hed/libs/credentialmod/Makefile src/hed/libs/crypto/Makefile src/hed/libs/cryptomod/Makefile src/hed/libs/data/Makefile src/hed/libs/data/cache-clean.1 src/hed/libs/data/cache-list.1 src/hed/libs/data/test/Makefile src/hed/libs/data/examples/Makefile src/hed/libs/Makefile src/hed/libs/loader/Makefile src/hed/libs/loader/schema/Makefile src/hed/libs/loader/test/Makefile src/hed/libs/message/Makefile src/hed/libs/message/test/Makefile src/hed/libs/security/Makefile src/hed/libs/security/ArcPDP/Makefile src/hed/libs/security/ArcPDP/attr/Makefile src/hed/libs/security/ArcPDP/policy/Makefile src/hed/libs/security/ArcPDP/alg/Makefile src/hed/libs/security/ArcPDP/fn/Makefile src/hed/libs/credentialstore/Makefile src/hed/libs/ws-addressing/Makefile src/hed/libs/ws-security/Makefile src/hed/libs/ws-security/test/Makefile src/hed/libs/infosys/Makefile src/hed/libs/infosys/schema/Makefile src/hed/libs/infosys/test/Makefile src/hed/libs/delegation/Makefile src/hed/libs/delegation/test/Makefile src/hed/libs/xmlsec/Makefile src/hed/libs/globusutils/Makefile src/hed/libs/otokens/Makefile src/hed/daemon/Makefile src/hed/daemon/scripts/Makefile src/hed/daemon/schema/Makefile src/hed/daemon/unix/Makefile src/hed/mcc/Makefile src/hed/mcc/soap/Makefile src/hed/mcc/tcp/Makefile src/hed/mcc/tcp/schema/Makefile src/hed/mcc/http/Makefile src/hed/mcc/http/schema/Makefile src/hed/mcc/tls/Makefile src/hed/mcc/tls/schema/Makefile src/hed/mcc/msgvalidator/Makefile src/hed/mcc/msgvalidator/schema/Makefile src/hed/acc/Makefile src/hed/acc/GRIDFTPJOB/Makefile src/hed/acc/ARCREST/Makefile src/hed/acc/EMIES/Makefile src/hed/acc/EMIES/arcemiestest.1 src/hed/acc/EMIES/schema/Makefile src/hed/acc/Broker/Makefile src/hed/acc/Broker/test/Makefile src/hed/acc/PythonBroker/Makefile src/hed/acc/JobDescriptionParser/Makefile src/hed/acc/JobDescriptionParser/test/Makefile src/hed/acc/ARCHERY/Makefile src/hed/acc/LDAP/Makefile src/hed/acc/TEST/Makefile src/hed/dmc/Makefile src/hed/dmc/file/Makefile src/hed/dmc/gridftp/Makefile src/hed/dmc/http/Makefile src/hed/dmc/ldap/Makefile src/hed/dmc/srm/Makefile src/hed/dmc/srm/srmclient/Makefile src/hed/dmc/gfal/Makefile src/hed/dmc/xrootd/Makefile src/hed/dmc/mock/Makefile src/hed/dmc/acix/Makefile src/hed/dmc/rucio/Makefile src/hed/dmc/s3/Makefile src/hed/profiles/general/general.xml src/hed/shc/Makefile src/hed/shc/arcpdp/Makefile src/hed/shc/arcpdp/schema/Makefile src/hed/shc/xacmlpdp/Makefile src/hed/shc/xacmlpdp/schema/Makefile src/hed/shc/delegationpdp/Makefile src/hed/shc/delegationpdp/schema/Makefile src/hed/shc/gaclpdp/Makefile src/hed/shc/pdpserviceinvoker/Makefile src/hed/shc/pdpserviceinvoker/schema/Makefile src/hed/shc/allowpdp/Makefile src/hed/shc/denypdp/Makefile src/hed/shc/simplelistpdp/Makefile src/hed/shc/simplelistpdp/schema/Makefile src/hed/shc/arcauthzsh/Makefile src/hed/shc/arcauthzsh/schema/Makefile src/hed/shc/usernametokensh/Makefile src/hed/shc/usernametokensh/schema/Makefile src/hed/shc/x509tokensh/Makefile src/hed/shc/x509tokensh/schema/Makefile src/hed/shc/samltokensh/Makefile src/hed/shc/samltokensh/schema/Makefile src/hed/shc/saml2sso_assertionconsumersh/Makefile src/hed/shc/delegationsh/Makefile src/hed/shc/delegationsh/schema/Makefile src/hed/shc/legacy/Makefile src/hed/shc/legacy/schema/Makefile src/hed/shc/otokens/Makefile src/hed/identitymap/Makefile src/hed/identitymap/schema/Makefile src/libs/Makefile src/libs/data-staging/Makefile src/libs/data-staging/test/Makefile src/libs/data-staging/examples/Makefile src/services/Makefile src/services/a-rex/Makefile src/services/a-rex/arc-arex src/services/a-rex/arc-arex.service src/services/a-rex/arc-arex-start src/services/a-rex/arc-arex-ws src/services/a-rex/arc-arex-ws.service src/services/a-rex/arc-arex-ws-start src/services/a-rex/a-rex-backtrace-collect src/services/a-rex/a-rex-backtrace-collect.8 src/services/a-rex/perferator src/services/a-rex/grid-manager/arc-blahp-logger.8 src/services/a-rex/grid-manager/gm-jobs.8 src/services/a-rex/grid-manager/gm-delegations-converter.8 src/services/a-rex/rest/Makefile src/services/a-rex/delegation/Makefile src/services/a-rex/grid-manager/Makefile src/services/a-rex/grid-manager/accounting/Makefile src/services/a-rex/grid-manager/conf/Makefile src/services/a-rex/grid-manager/files/Makefile src/services/a-rex/grid-manager/jobs/Makefile src/services/a-rex/grid-manager/jobplugin/Makefile src/services/a-rex/grid-manager/log/Makefile src/services/a-rex/grid-manager/mail/Makefile src/services/a-rex/grid-manager/misc/Makefile src/services/a-rex/grid-manager/run/Makefile src/services/a-rex/internaljobplugin/Makefile src/services/a-rex/grid-manager/arc-config-check.1 src/services/a-rex/infoproviders/Makefile src/services/a-rex/infoproviders/CEinfo.pl src/services/a-rex/infoproviders/ConfigCentral.pm src/services/a-rex/infoproviders/PerfData.pl src/services/a-rex/infoproviders/test/Makefile src/services/a-rex/lrms/Makefile src/services/a-rex/lrms/test/Makefile src/services/a-rex/lrms/arc/Makefile src/services/a-rex/lrms/arc/lrms/Makefile src/services/a-rex/lrms/arc/lrms/common/Makefile src/services/a-rex/lrms/lrms_common.sh src/services/a-rex/lrms/condor/Makefile src/services/a-rex/lrms/condor/scan-condor-job src/services/a-rex/lrms/condor/cancel-condor-job src/services/a-rex/lrms/condor/submit-condor-job src/services/a-rex/lrms/fork/Makefile src/services/a-rex/lrms/fork/scan-fork-job src/services/a-rex/lrms/fork/submit-fork-job src/services/a-rex/lrms/fork/cancel-fork-job src/services/a-rex/lrms/ll/Makefile src/services/a-rex/lrms/ll/submit-ll-job src/services/a-rex/lrms/ll/cancel-ll-job src/services/a-rex/lrms/ll/scan-ll-job src/services/a-rex/lrms/lsf/Makefile src/services/a-rex/lrms/lsf/submit-lsf-job src/services/a-rex/lrms/lsf/cancel-lsf-job src/services/a-rex/lrms/lsf/scan-lsf-job src/services/a-rex/lrms/pbs/Makefile src/services/a-rex/lrms/pbs/submit-pbs-job src/services/a-rex/lrms/pbs/cancel-pbs-job src/services/a-rex/lrms/pbs/scan-pbs-job src/services/a-rex/lrms/pbspro/Makefile src/services/a-rex/lrms/pbspro/submit-pbspro-job src/services/a-rex/lrms/pbspro/cancel-pbspro-job src/services/a-rex/lrms/pbspro/scan-pbspro-job src/services/a-rex/lrms/sge/Makefile src/services/a-rex/lrms/sge/submit-sge-job src/services/a-rex/lrms/sge/scan-sge-job src/services/a-rex/lrms/sge/cancel-sge-job src/services/a-rex/lrms/slurm/Makefile src/services/a-rex/lrms/slurm/submit-SLURM-job src/services/a-rex/lrms/slurm/scan-SLURM-job src/services/a-rex/lrms/slurm/cancel-SLURM-job src/services/a-rex/lrms/slurm/test/Makefile src/services/a-rex/lrms/slurm/test/scan/Makefile src/services/a-rex/lrms/slurm/test/submit/Makefile src/services/a-rex/lrms/boinc/Makefile src/services/a-rex/lrms/boinc/submit-boinc-job src/services/a-rex/lrms/boinc/scan-boinc-job src/services/a-rex/lrms/boinc/cancel-boinc-job src/services/a-rex/lrms/slurmpy/Makefile src/services/a-rex/lrms/slurmpy/submit-SLURMPY-job src/services/a-rex/lrms/slurmpy/scan-SLURMPY-job src/services/a-rex/lrms/slurmpy/cancel-SLURMPY-job src/services/a-rex/lrms/slurmpy/test/Makefile src/services/a-rex/lrms/slurmpy/test/submit/Makefile src/services/a-rex/lrms/slurmpy/test/scan/Makefile src/services/a-rex/rte/Makefile src/services/a-rex/rte/ENV/PROXY src/services/a-rex/rte/ENV/CANDYPOND src/services/a-rex/schema/Makefile src/services/acix/Makefile src/services/acix/scanner/Makefile src/services/acix/scanner/arc-acix-scanner src/services/acix/scanner/arc-acix-scanner-start src/services/acix/scanner/arc-acix-scanner.service src/services/acix/scanner/test/Makefile src/services/acix/core/Makefile src/services/acix/core/test/Makefile src/services/acix/indexserver/Makefile src/services/acix/indexserver/arc-acix-index src/services/acix/indexserver/arc-acix-index-start src/services/acix/indexserver/arc-acix-index.service src/services/acix/indexserver/test/Makefile src/services/candypond/Makefile src/services/data-staging/Makefile src/services/data-staging/arc-datadelivery-service src/services/data-staging/arc-datadelivery-service.service src/services/data-staging/arc-datadelivery-service-start src/services/gridftpd/Makefile src/services/gridftpd/arc-gridftpd src/services/gridftpd/arc-gridftpd.service src/services/gridftpd/arc-gridftpd-start src/services/gridftpd/gridftpd.8 src/services/gridftpd/auth/Makefile src/services/gridftpd/conf/Makefile src/services/gridftpd/misc/Makefile src/services/gridftpd/run/Makefile src/services/gridftpd/fileplugin/Makefile src/services/ldap-infosys/Makefile src/services/ldap-infosys/create-bdii-config src/services/ldap-infosys/create-slapd-config src/services/ldap-infosys/arc-infosys-ldap src/services/ldap-infosys/arc-infosys-ldap.service src/services/ldap-infosys/arc-infosys-ldap-slapd.service src/services/monitor/Makefile src/services/monitor/monitor src/services/monitor/README src/services/monitor/man/Makefile src/services/monitor/man/monitor.7 src/services/monitor/includes/Makefile src/services/monitor/mon-icons/Makefile src/services/monitor/lang/Makefile src/services/examples/Makefile src/services/examples/echo_python/Makefile src/services/wrappers/Makefile src/services/wrappers/python/Makefile src/services/wrappers/python/schema/Makefile src/clients/Makefile src/clients/data/Makefile src/clients/data/arccp.1 src/clients/data/arcls.1 src/clients/data/arcrm.1 src/clients/data/arcmkdir.1 src/clients/data/arcrename.1 src/clients/credentials/Makefile src/clients/credentials/arcproxy.1 src/clients/compute/Makefile src/clients/compute/arcstat.1 src/clients/compute/arcinfo.1 src/clients/compute/arcsub.1 src/clients/compute/arcclean.1 src/clients/compute/arckill.1 src/clients/compute/arcget.1 src/clients/compute/arccat.1 src/clients/compute/arcresub.1 src/clients/compute/arcsync.1 src/clients/compute/arcrenew.1 src/clients/compute/arcresume.1 src/clients/compute/arctest.1 src/tests/Makefile src/tests/echo/Makefile src/tests/echo/perftest.1 src/tests/echo/echo_service.xml.example src/tests/echo/schema/Makefile src/tests/policy-delegation/Makefile src/tests/delegation/Makefile src/tests/translator/Makefile src/tests/xpath/Makefile src/tests/arcpolicy/Makefile src/tests/perf/Makefile src/tests/perf/arcperftest.1 src/tests/client/Makefile src/tests/lrms/Makefile src/utils/archery/Makefile src/utils/archery/archery-manage src/utils/python/Makefile src/utils/python/arccandypond src/utils/python/arcctl src/utils/python/arcctl.1 src/utils/python/jura-ng src/utils/python/arc/Makefile src/utils/python/arc/gen_paths_dist.sh src/utils/python/arc/utils/Makefile src/utils/python/arc/control/Makefile src/utils/hed/wsdl2hed.1 src/utils/hed/arcplugin.1 src/utils/hed/Makefile src/utils/gridmap/nordugridmap.cron src/utils/gridmap/nordugridmap.8 src/utils/gridmap/Makefile src/utils/Makefile src/wn/Makefile src/doc/Makefile src/doc/arc.conf.5 swig/Makefile python/Makefile python/Doxyfile.api python/python/Makefile python/python/arc/Makefile python/altpython/Makefile python/altpython/arc/Makefile python/test/Makefile python/test/python/Makefile python/test/altpython/Makefile python/examples/Makefile po/Makefile.in include/Makefile debian/Makefile debian/changelog.deb nordugrid-arc.spec src/hed/daemon/arched.8 src/hed/daemon/scripts/arched src/hed/daemon/scripts/arched.service src/hed/daemon/scripts/arched-start src/doxygen/Makefile" ac_config_files="$ac_config_files src/utils/python/arcconfig-parser" cat >confcache <<\_ACEOF # This file is a shell script that caches the results of configure # tests run on this system so they can be shared between configure # scripts and configure runs, see configure's option --config-cache. # It is not useful on other systems. If it contains results you don't # want to keep, you may remove or edit it. # # config.status only pays attention to the cache file if you give it # the --recheck option to rerun configure. # # `ac_cv_env_foo' variables (set or unset) will be overridden when # loading this file, other *unset* `ac_cv_foo' will be assigned the # following values. _ACEOF # The following way of writing the cache mishandles newlines in values, # but we know of no workaround that is simple, portable, and efficient. # So, we kill variables containing newlines. # Ultrix sh set writes to stderr and can't be redirected directly, # and sets the high bit in the cache file unless we assign to the vars. ( for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do eval ac_val=\$$ac_var case $ac_val in #( *${as_nl}*) case $ac_var in #( *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 $as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; esac case $ac_var in #( _ | IFS | as_nl) ;; #( BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( *) { eval $ac_var=; unset $ac_var;} ;; esac ;; esac done (set) 2>&1 | case $as_nl`(ac_space=' '; set) 2>&1` in #( *${as_nl}ac_space=\ *) # `set' does not quote correctly, so add quotes: double-quote # substitution turns \\\\ into \\, and sed turns \\ into \. sed -n \ "s/'/'\\\\''/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" ;; #( *) # `set' quotes correctly as required by POSIX, so do not add quotes. sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" ;; esac | sort ) | sed ' /^ac_cv_env_/b end t clear :clear s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ t end s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ :end' >>confcache if diff "$cache_file" confcache >/dev/null 2>&1; then :; else if test -w "$cache_file"; then if test "x$cache_file" != "x/dev/null"; then { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 $as_echo "$as_me: updating cache $cache_file" >&6;} if test ! -f "$cache_file" || test -h "$cache_file"; then cat confcache >"$cache_file" else case $cache_file in #( */* | ?:*) mv -f confcache "$cache_file"$$ && mv -f "$cache_file"$$ "$cache_file" ;; #( *) mv -f confcache "$cache_file" ;; esac fi fi else { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 $as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} fi fi rm -f confcache test "x$prefix" = xNONE && prefix=$ac_default_prefix # Let make expand exec_prefix. test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' DEFS=-DHAVE_CONFIG_H ac_libobjs= ac_ltlibobjs= U= for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue # 1. Remove the extension, and $U if already installed. ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' ac_i=`$as_echo "$ac_i" | sed "$ac_script"` # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR # will be set to the directory where LIBOBJS objects are built. as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' done LIBOBJS=$ac_libobjs LTLIBOBJS=$ac_ltlibobjs { $as_echo "$as_me:${as_lineno-$LINENO}: checking that generated files are newer than configure" >&5 $as_echo_n "checking that generated files are newer than configure... " >&6; } if test -n "$am_sleep_pid"; then # Hide warnings about reused PIDs. wait $am_sleep_pid 2>/dev/null fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: done" >&5 $as_echo "done" >&6; } if test -n "$EXEEXT"; then am__EXEEXT_TRUE= am__EXEEXT_FALSE='#' else am__EXEEXT_TRUE='#' am__EXEEXT_FALSE= fi if test -z "${AMDEP_TRUE}" && test -z "${AMDEP_FALSE}"; then as_fn_error $? "conditional \"AMDEP\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${am__fastdepCXX_TRUE}" && test -z "${am__fastdepCXX_FALSE}"; then as_fn_error $? "conditional \"am__fastdepCXX\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then as_fn_error $? "conditional \"am__fastdepCC\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${SYSTEMD_UNITS_ENABLED_TRUE}" && test -z "${SYSTEMD_UNITS_ENABLED_FALSE}"; then as_fn_error $? "conditional \"SYSTEMD_UNITS_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${SYSV_SCRIPTS_ENABLED_TRUE}" && test -z "${SYSV_SCRIPTS_ENABLED_FALSE}"; then as_fn_error $? "conditional \"SYSV_SCRIPTS_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${PEDANTIC_COMPILE_TRUE}" && test -z "${PEDANTIC_COMPILE_FALSE}"; then as_fn_error $? "conditional \"PEDANTIC_COMPILE\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${SWIG_ENABLED_TRUE}" && test -z "${SWIG_ENABLED_FALSE}"; then as_fn_error $? "conditional \"SWIG_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${PYTHON_ENABLED_TRUE}" && test -z "${PYTHON_ENABLED_FALSE}"; then as_fn_error $? "conditional \"PYTHON_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${PYTHON3_TRUE}" && test -z "${PYTHON3_FALSE}"; then as_fn_error $? "conditional \"PYTHON3\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${PYTHON_SWIG_ENABLED_TRUE}" && test -z "${PYTHON_SWIG_ENABLED_FALSE}"; then as_fn_error $? "conditional \"PYTHON_SWIG_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${PYTHON_SERVICE_TRUE}" && test -z "${PYTHON_SERVICE_FALSE}"; then as_fn_error $? "conditional \"PYTHON_SERVICE\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${ALTPYTHON_ENABLED_TRUE}" && test -z "${ALTPYTHON_ENABLED_FALSE}"; then as_fn_error $? "conditional \"ALTPYTHON_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${ALTPYTHON3_TRUE}" && test -z "${ALTPYTHON3_FALSE}"; then as_fn_error $? "conditional \"ALTPYTHON3\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${PYLINT_ENABLED_TRUE}" && test -z "${PYLINT_ENABLED_FALSE}"; then as_fn_error $? "conditional \"PYLINT_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${NSS_ENABLED_TRUE}" && test -z "${NSS_ENABLED_FALSE}"; then as_fn_error $? "conditional \"NSS_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${SQLITE_ENABLED_TRUE}" && test -z "${SQLITE_ENABLED_FALSE}"; then as_fn_error $? "conditional \"SQLITE_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${LDNS_ENABLED_TRUE}" && test -z "${LDNS_ENABLED_FALSE}"; then as_fn_error $? "conditional \"LDNS_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${MACOSX_TRUE}" && test -z "${MACOSX_FALSE}"; then as_fn_error $? "conditional \"MACOSX\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${MYSQL_LIBRARY_ENABLED_TRUE}" && test -z "${MYSQL_LIBRARY_ENABLED_FALSE}"; then as_fn_error $? "conditional \"MYSQL_LIBRARY_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${ARGUS_ENABLED_TRUE}" && test -z "${ARGUS_ENABLED_FALSE}"; then as_fn_error $? "conditional \"ARGUS_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${DBJSTORE_ENABLED_TRUE}" && test -z "${DBJSTORE_ENABLED_FALSE}"; then as_fn_error $? "conditional \"DBJSTORE_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${SQLITEJSTORE_ENABLED_TRUE}" && test -z "${SQLITEJSTORE_ENABLED_FALSE}"; then as_fn_error $? "conditional \"SQLITEJSTORE_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${GLOBUSUTILS_ENABLED_TRUE}" && test -z "${GLOBUSUTILS_ENABLED_FALSE}"; then as_fn_error $? "conditional \"GLOBUSUTILS_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${GRIDFTP_ENABLED_TRUE}" && test -z "${GRIDFTP_ENABLED_FALSE}"; then as_fn_error $? "conditional \"GRIDFTP_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${MOCK_DMC_ENABLED_TRUE}" && test -z "${MOCK_DMC_ENABLED_FALSE}"; then as_fn_error $? "conditional \"MOCK_DMC_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${GFAL_ENABLED_TRUE}" && test -z "${GFAL_ENABLED_FALSE}"; then as_fn_error $? "conditional \"GFAL_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${S3_DMC_ENABLED_TRUE}" && test -z "${S3_DMC_ENABLED_FALSE}"; then as_fn_error $? "conditional \"S3_DMC_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${XROOTD_ENABLED_TRUE}" && test -z "${XROOTD_ENABLED_FALSE}"; then as_fn_error $? "conditional \"XROOTD_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${XMLSEC_ENABLED_TRUE}" && test -z "${XMLSEC_ENABLED_FALSE}"; then as_fn_error $? "conditional \"XMLSEC_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${CPPUNIT_ENABLED_TRUE}" && test -z "${CPPUNIT_ENABLED_FALSE}"; then as_fn_error $? "conditional \"CPPUNIT_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${SRM_DMC_ENABLED_TRUE}" && test -z "${SRM_DMC_ENABLED_FALSE}"; then as_fn_error $? "conditional \"SRM_DMC_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${LDAP_ENABLED_TRUE}" && test -z "${LDAP_ENABLED_FALSE}"; then as_fn_error $? "conditional \"LDAP_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${PYTHON_LRMS_ENABLED_TRUE}" && test -z "${PYTHON_LRMS_ENABLED_FALSE}"; then as_fn_error $? "conditional \"PYTHON_LRMS_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${DOC_ENABLED_TRUE}" && test -z "${DOC_ENABLED_FALSE}"; then as_fn_error $? "conditional \"DOC_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${PYDOXYGEN_TRUE}" && test -z "${PYDOXYGEN_FALSE}"; then as_fn_error $? "conditional \"PYDOXYGEN\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${ALTPYDOXYGEN_TRUE}" && test -z "${ALTPYDOXYGEN_FALSE}"; then as_fn_error $? "conditional \"ALTPYDOXYGEN\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${A_REX_SERVICE_ENABLED_TRUE}" && test -z "${A_REX_SERVICE_ENABLED_FALSE}"; then as_fn_error $? "conditional \"A_REX_SERVICE_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${INTERNAL_ENABLED_TRUE}" && test -z "${INTERNAL_ENABLED_FALSE}"; then as_fn_error $? "conditional \"INTERNAL_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${GRIDFTPD_SERVICE_ENABLED_TRUE}" && test -z "${GRIDFTPD_SERVICE_ENABLED_FALSE}"; then as_fn_error $? "conditional \"GRIDFTPD_SERVICE_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${LDAP_SERVICE_ENABLED_TRUE}" && test -z "${LDAP_SERVICE_ENABLED_FALSE}"; then as_fn_error $? "conditional \"LDAP_SERVICE_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${MONITOR_ENABLED_TRUE}" && test -z "${MONITOR_ENABLED_FALSE}"; then as_fn_error $? "conditional \"MONITOR_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${CANDYPOND_ENABLED_TRUE}" && test -z "${CANDYPOND_ENABLED_FALSE}"; then as_fn_error $? "conditional \"CANDYPOND_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${DATADELIVERY_SERVICE_ENABLED_TRUE}" && test -z "${DATADELIVERY_SERVICE_ENABLED_FALSE}"; then as_fn_error $? "conditional \"DATADELIVERY_SERVICE_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${ACIX_ENABLED_TRUE}" && test -z "${ACIX_ENABLED_FALSE}"; then as_fn_error $? "conditional \"ACIX_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${ACIX_TESTS_ENABLED_TRUE}" && test -z "${ACIX_TESTS_ENABLED_FALSE}"; then as_fn_error $? "conditional \"ACIX_TESTS_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${COMPUTE_CLIENT_ENABLED_TRUE}" && test -z "${COMPUTE_CLIENT_ENABLED_FALSE}"; then as_fn_error $? "conditional \"COMPUTE_CLIENT_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${CREDENTIALS_CLIENT_ENABLED_TRUE}" && test -z "${CREDENTIALS_CLIENT_ENABLED_FALSE}"; then as_fn_error $? "conditional \"CREDENTIALS_CLIENT_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${DATA_CLIENT_ENABLED_TRUE}" && test -z "${DATA_CLIENT_ENABLED_FALSE}"; then as_fn_error $? "conditional \"DATA_CLIENT_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${EMIES_ENABLED_TRUE}" && test -z "${EMIES_ENABLED_FALSE}"; then as_fn_error $? "conditional \"EMIES_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${ARCREST_ENABLED_TRUE}" && test -z "${ARCREST_ENABLED_FALSE}"; then as_fn_error $? "conditional \"ARCREST_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${HED_ENABLED_TRUE}" && test -z "${HED_ENABLED_FALSE}"; then as_fn_error $? "conditional \"HED_ENABLED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi : "${CONFIG_STATUS=./config.status}" ac_write_fail=0 ac_clean_files_save=$ac_clean_files ac_clean_files="$ac_clean_files $CONFIG_STATUS" { $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 $as_echo "$as_me: creating $CONFIG_STATUS" >&6;} as_write_fail=0 cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 #! $SHELL # Generated by $as_me. # Run this file to recreate the current configuration. # Compiler output produced by configure, useful for debugging # configure, is in config.log if it exists. debug=false ac_cs_recheck=false ac_cs_silent=false SHELL=\${CONFIG_SHELL-$SHELL} export SHELL _ASEOF cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 ## -------------------- ## ## M4sh Initialization. ## ## -------------------- ## # Be more Bourne compatible DUALCASE=1; export DUALCASE # for MKS sh if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else case `(set -o) 2>/dev/null` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi as_nl=' ' export as_nl # Printing a long string crashes Solaris 7 /usr/bin/printf. as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo # Prefer a ksh shell builtin over an external printf program on Solaris, # but without wasting forks for bash or zsh. if test -z "$BASH_VERSION$ZSH_VERSION" \ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='print -r --' as_echo_n='print -rn --' elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='printf %s\n' as_echo_n='printf %s' else if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' as_echo_n='/usr/ucb/echo -n' else as_echo_body='eval expr "X$1" : "X\\(.*\\)"' as_echo_n_body='eval arg=$1; case $arg in #( *"$as_nl"*) expr "X$arg" : "X\\(.*\\)$as_nl"; arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; esac; expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" ' export as_echo_n_body as_echo_n='sh -c $as_echo_n_body as_echo' fi export as_echo_body as_echo='sh -c $as_echo_body as_echo' fi # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || PATH_SEPARATOR=';' } fi # IFS # We need space, tab and new line, in precisely that order. Quoting is # there to prevent editors from complaining about space-tab. # (If _AS_PATH_WALK were called with IFS unset, it would disable word # splitting by setting IFS to empty value.) IFS=" "" $as_nl" # Find who we are. Look in the path if we contain no directory separator. as_myself= case $0 in #(( *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done IFS=$as_save_IFS ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 exit 1 fi # Unset variables that we do not need and which cause bugs (e.g. in # pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" # suppresses any "Segmentation fault" message there. '((' could # trigger a bug in pdksh 5.2.14. for as_var in BASH_ENV ENV MAIL MAILPATH do eval test x\${$as_var+set} = xset \ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : done PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. LC_ALL=C export LC_ALL LANGUAGE=C export LANGUAGE # CDPATH. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH # as_fn_error STATUS ERROR [LINENO LOG_FD] # ---------------------------------------- # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are # provided, also output the error to LOG_FD, referencing LINENO. Then exit the # script with STATUS, using 1 if that was 0. as_fn_error () { as_status=$1; test $as_status -eq 0 && as_status=1 if test "$4"; then as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 fi $as_echo "$as_me: error: $2" >&2 as_fn_exit $as_status } # as_fn_error # as_fn_set_status STATUS # ----------------------- # Set $? to STATUS, without forking. as_fn_set_status () { return $1 } # as_fn_set_status # as_fn_exit STATUS # ----------------- # Exit the shell with STATUS, even in a "trap 0" or "set -e" context. as_fn_exit () { set +e as_fn_set_status $1 exit $1 } # as_fn_exit # as_fn_unset VAR # --------------- # Portably unset VAR. as_fn_unset () { { eval $1=; unset $1;} } as_unset=as_fn_unset # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take # advantage of any shell optimizations that allow amortized linear growth over # repeated appends, instead of the typical quadratic growth present in naive # implementations. if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : eval 'as_fn_append () { eval $1+=\$2 }' else as_fn_append () { eval $1=\$$1\$2 } fi # as_fn_append # as_fn_arith ARG... # ------------------ # Perform arithmetic evaluation on the ARGs, and store the result in the # global $as_val. Take advantage of shells that can avoid forks. The arguments # must be portable across $(()) and expr. if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : eval 'as_fn_arith () { as_val=$(( $* )) }' else as_fn_arith () { as_val=`expr "$@" || test $? -eq 1` } fi # as_fn_arith if expr a : '\(a\)' >/dev/null 2>&1 && test "X`expr 00001 : '.*\(...\)'`" = X001; then as_expr=expr else as_expr=false fi if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then as_dirname=dirname else as_dirname=false fi as_me=`$as_basename -- "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| . 2>/dev/null || $as_echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/ q } /^X\/\(\/\/\)$/{ s//\1/ q } /^X\/\(\/\).*/{ s//\1/ q } s/.*/./; q'` # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits ECHO_C= ECHO_N= ECHO_T= case `echo -n x` in #((((( -n*) case `echo 'xy\c'` in *c*) ECHO_T=' ';; # ECHO_T is single tab character. xy) ECHO_C='\c';; *) echo `echo ksh88 bug on AIX 6.1` > /dev/null ECHO_T=' ';; esac;; *) ECHO_N='-n';; esac rm -f conf$$ conf$$.exe conf$$.file if test -d conf$$.dir; then rm -f conf$$.dir/conf$$.file else rm -f conf$$.dir mkdir conf$$.dir 2>/dev/null fi if (echo >conf$$.file) 2>/dev/null; then if ln -s conf$$.file conf$$ 2>/dev/null; then as_ln_s='ln -s' # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. # In both cases, we have to default to `cp -pR'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || as_ln_s='cp -pR' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -pR' fi else as_ln_s='cp -pR' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null # as_fn_mkdir_p # ------------- # Create "$as_dir" as a directory, including parents if necessary. as_fn_mkdir_p () { case $as_dir in #( -*) as_dir=./$as_dir;; esac test -d "$as_dir" || eval $as_mkdir_p || { as_dirs= while :; do case $as_dir in #( *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( *) as_qdir=$as_dir;; esac as_dirs="'$as_qdir' $as_dirs" as_dir=`$as_dirname -- "$as_dir" || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` test -d "$as_dir" && break done test -z "$as_dirs" || eval "mkdir $as_dirs" } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" } # as_fn_mkdir_p if mkdir -p . 2>/dev/null; then as_mkdir_p='mkdir -p "$as_dir"' else test -d ./-p && rmdir ./-p as_mkdir_p=false fi # as_fn_executable_p FILE # ----------------------- # Test if FILE is an executable regular file. as_fn_executable_p () { test -f "$1" && test -x "$1" } # as_fn_executable_p as_test_x='test -x' as_executable_p=as_fn_executable_p # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" exec 6>&1 ## ----------------------------------- ## ## Main body of $CONFIG_STATUS script. ## ## ----------------------------------- ## _ASEOF test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # Save the log message, to keep $0 and so on meaningful, and to # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. ac_log=" This file was extended by nordugrid-arc $as_me 6.14.0, which was generated by GNU Autoconf 2.69. Invocation command line was CONFIG_FILES = $CONFIG_FILES CONFIG_HEADERS = $CONFIG_HEADERS CONFIG_LINKS = $CONFIG_LINKS CONFIG_COMMANDS = $CONFIG_COMMANDS $ $0 $@ on `(hostname || uname -n) 2>/dev/null | sed 1q` " _ACEOF case $ac_config_files in *" "*) set x $ac_config_files; shift; ac_config_files=$*;; esac case $ac_config_headers in *" "*) set x $ac_config_headers; shift; ac_config_headers=$*;; esac cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 # Files that config.status was made for. config_files="$ac_config_files" config_headers="$ac_config_headers" config_commands="$ac_config_commands" _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 ac_cs_usage="\ \`$as_me' instantiates files and other configuration actions from templates according to the current configuration. Unless the files and actions are specified as TAGs, all are instantiated by default. Usage: $0 [OPTION]... [TAG]... -h, --help print this help, then exit -V, --version print version number and configuration settings, then exit --config print configuration, then exit -q, --quiet, --silent do not print progress messages -d, --debug don't remove temporary files --recheck update $as_me by reconfiguring in the same conditions --file=FILE[:TEMPLATE] instantiate the configuration file FILE --header=FILE[:TEMPLATE] instantiate the configuration header FILE Configuration files: $config_files Configuration headers: $config_headers Configuration commands: $config_commands Report bugs to ." _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" ac_cs_version="\\ nordugrid-arc config.status 6.14.0 configured by $0, generated by GNU Autoconf 2.69, with options \\"\$ac_cs_config\\" Copyright (C) 2012 Free Software Foundation, Inc. This config.status script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it." ac_pwd='$ac_pwd' srcdir='$srcdir' INSTALL='$INSTALL' MKDIR_P='$MKDIR_P' AWK='$AWK' test -n "\$AWK" || AWK=awk _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # The default lists apply if the user does not specify any file. ac_need_defaults=: while test $# != 0 do case $1 in --*=?*) ac_option=`expr "X$1" : 'X\([^=]*\)='` ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` ac_shift=: ;; --*=) ac_option=`expr "X$1" : 'X\([^=]*\)='` ac_optarg= ac_shift=: ;; *) ac_option=$1 ac_optarg=$2 ac_shift=shift ;; esac case $ac_option in # Handling of the options. -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) ac_cs_recheck=: ;; --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) $as_echo "$ac_cs_version"; exit ;; --config | --confi | --conf | --con | --co | --c ) $as_echo "$ac_cs_config"; exit ;; --debug | --debu | --deb | --de | --d | -d ) debug=: ;; --file | --fil | --fi | --f ) $ac_shift case $ac_optarg in *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; '') as_fn_error $? "missing file argument" ;; esac as_fn_append CONFIG_FILES " '$ac_optarg'" ac_need_defaults=false;; --header | --heade | --head | --hea ) $ac_shift case $ac_optarg in *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; esac as_fn_append CONFIG_HEADERS " '$ac_optarg'" ac_need_defaults=false;; --he | --h) # Conflict between --help and --header as_fn_error $? "ambiguous option: \`$1' Try \`$0 --help' for more information.";; --help | --hel | -h ) $as_echo "$ac_cs_usage"; exit ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil | --si | --s) ac_cs_silent=: ;; # This is an error. -*) as_fn_error $? "unrecognized option: \`$1' Try \`$0 --help' for more information." ;; *) as_fn_append ac_config_targets " $1" ac_need_defaults=false ;; esac shift done ac_configure_extra_args= if $ac_cs_silent; then exec 6>/dev/null ac_configure_extra_args="$ac_configure_extra_args --silent" fi _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 if \$ac_cs_recheck; then set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion shift \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 CONFIG_SHELL='$SHELL' export CONFIG_SHELL exec "\$@" fi _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 exec 5>>config.log { echo sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX ## Running $as_me. ## _ASBOX $as_echo "$ac_log" } >&5 _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 # # INIT-COMMANDS # AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir" # The HP-UX ksh and POSIX shell print the target directory to stdout # if CDPATH is set. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH sed_quote_subst='$sed_quote_subst' double_quote_subst='$double_quote_subst' delay_variable_subst='$delay_variable_subst' enable_static='`$ECHO "$enable_static" | $SED "$delay_single_quote_subst"`' macro_version='`$ECHO "$macro_version" | $SED "$delay_single_quote_subst"`' macro_revision='`$ECHO "$macro_revision" | $SED "$delay_single_quote_subst"`' enable_shared='`$ECHO "$enable_shared" | $SED "$delay_single_quote_subst"`' pic_mode='`$ECHO "$pic_mode" | $SED "$delay_single_quote_subst"`' enable_fast_install='`$ECHO "$enable_fast_install" | $SED "$delay_single_quote_subst"`' SHELL='`$ECHO "$SHELL" | $SED "$delay_single_quote_subst"`' ECHO='`$ECHO "$ECHO" | $SED "$delay_single_quote_subst"`' PATH_SEPARATOR='`$ECHO "$PATH_SEPARATOR" | $SED "$delay_single_quote_subst"`' host_alias='`$ECHO "$host_alias" | $SED "$delay_single_quote_subst"`' host='`$ECHO "$host" | $SED "$delay_single_quote_subst"`' host_os='`$ECHO "$host_os" | $SED "$delay_single_quote_subst"`' build_alias='`$ECHO "$build_alias" | $SED "$delay_single_quote_subst"`' build='`$ECHO "$build" | $SED "$delay_single_quote_subst"`' build_os='`$ECHO "$build_os" | $SED "$delay_single_quote_subst"`' SED='`$ECHO "$SED" | $SED "$delay_single_quote_subst"`' Xsed='`$ECHO "$Xsed" | $SED "$delay_single_quote_subst"`' GREP='`$ECHO "$GREP" | $SED "$delay_single_quote_subst"`' EGREP='`$ECHO "$EGREP" | $SED "$delay_single_quote_subst"`' FGREP='`$ECHO "$FGREP" | $SED "$delay_single_quote_subst"`' LD='`$ECHO "$LD" | $SED "$delay_single_quote_subst"`' NM='`$ECHO "$NM" | $SED "$delay_single_quote_subst"`' LN_S='`$ECHO "$LN_S" | $SED "$delay_single_quote_subst"`' max_cmd_len='`$ECHO "$max_cmd_len" | $SED "$delay_single_quote_subst"`' ac_objext='`$ECHO "$ac_objext" | $SED "$delay_single_quote_subst"`' exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' old_postuninstall_cmds='`$ECHO "$old_postuninstall_cmds" | $SED "$delay_single_quote_subst"`' old_archive_cmds='`$ECHO "$old_archive_cmds" | $SED "$delay_single_quote_subst"`' lock_old_archive_extraction='`$ECHO "$lock_old_archive_extraction" | $SED "$delay_single_quote_subst"`' CC='`$ECHO "$CC" | $SED "$delay_single_quote_subst"`' CFLAGS='`$ECHO "$CFLAGS" | $SED "$delay_single_quote_subst"`' compiler='`$ECHO "$compiler" | $SED "$delay_single_quote_subst"`' GCC='`$ECHO "$GCC" | $SED "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' OTOOL='`$ECHO "$OTOOL" | $SED "$delay_single_quote_subst"`' OTOOL64='`$ECHO "$OTOOL64" | $SED "$delay_single_quote_subst"`' libext='`$ECHO "$libext" | $SED "$delay_single_quote_subst"`' shrext_cmds='`$ECHO "$shrext_cmds" | $SED "$delay_single_quote_subst"`' extract_expsyms_cmds='`$ECHO "$extract_expsyms_cmds" | $SED "$delay_single_quote_subst"`' archive_cmds_need_lc='`$ECHO "$archive_cmds_need_lc" | $SED "$delay_single_quote_subst"`' enable_shared_with_static_runtimes='`$ECHO "$enable_shared_with_static_runtimes" | $SED "$delay_single_quote_subst"`' export_dynamic_flag_spec='`$ECHO "$export_dynamic_flag_spec" | $SED "$delay_single_quote_subst"`' whole_archive_flag_spec='`$ECHO "$whole_archive_flag_spec" | $SED "$delay_single_quote_subst"`' compiler_needs_object='`$ECHO "$compiler_needs_object" | $SED "$delay_single_quote_subst"`' old_archive_from_new_cmds='`$ECHO "$old_archive_from_new_cmds" | $SED "$delay_single_quote_subst"`' old_archive_from_expsyms_cmds='`$ECHO "$old_archive_from_expsyms_cmds" | $SED "$delay_single_quote_subst"`' archive_cmds='`$ECHO "$archive_cmds" | $SED "$delay_single_quote_subst"`' archive_expsym_cmds='`$ECHO "$archive_expsym_cmds" | $SED "$delay_single_quote_subst"`' module_cmds='`$ECHO "$module_cmds" | $SED "$delay_single_quote_subst"`' module_expsym_cmds='`$ECHO "$module_expsym_cmds" | $SED "$delay_single_quote_subst"`' with_gnu_ld='`$ECHO "$with_gnu_ld" | $SED "$delay_single_quote_subst"`' allow_undefined_flag='`$ECHO "$allow_undefined_flag" | $SED "$delay_single_quote_subst"`' no_undefined_flag='`$ECHO "$no_undefined_flag" | $SED "$delay_single_quote_subst"`' hardcode_libdir_flag_spec='`$ECHO "$hardcode_libdir_flag_spec" | $SED "$delay_single_quote_subst"`' hardcode_libdir_separator='`$ECHO "$hardcode_libdir_separator" | $SED "$delay_single_quote_subst"`' hardcode_direct='`$ECHO "$hardcode_direct" | $SED "$delay_single_quote_subst"`' hardcode_direct_absolute='`$ECHO "$hardcode_direct_absolute" | $SED "$delay_single_quote_subst"`' hardcode_minus_L='`$ECHO "$hardcode_minus_L" | $SED "$delay_single_quote_subst"`' hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_quote_subst"`' hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' need_version='`$ECHO "$need_version" | $SED "$delay_single_quote_subst"`' version_type='`$ECHO "$version_type" | $SED "$delay_single_quote_subst"`' runpath_var='`$ECHO "$runpath_var" | $SED "$delay_single_quote_subst"`' shlibpath_var='`$ECHO "$shlibpath_var" | $SED "$delay_single_quote_subst"`' shlibpath_overrides_runpath='`$ECHO "$shlibpath_overrides_runpath" | $SED "$delay_single_quote_subst"`' libname_spec='`$ECHO "$libname_spec" | $SED "$delay_single_quote_subst"`' library_names_spec='`$ECHO "$library_names_spec" | $SED "$delay_single_quote_subst"`' soname_spec='`$ECHO "$soname_spec" | $SED "$delay_single_quote_subst"`' install_override_mode='`$ECHO "$install_override_mode" | $SED "$delay_single_quote_subst"`' postinstall_cmds='`$ECHO "$postinstall_cmds" | $SED "$delay_single_quote_subst"`' postuninstall_cmds='`$ECHO "$postuninstall_cmds" | $SED "$delay_single_quote_subst"`' finish_cmds='`$ECHO "$finish_cmds" | $SED "$delay_single_quote_subst"`' finish_eval='`$ECHO "$finish_eval" | $SED "$delay_single_quote_subst"`' hardcode_into_libs='`$ECHO "$hardcode_into_libs" | $SED "$delay_single_quote_subst"`' sys_lib_search_path_spec='`$ECHO "$sys_lib_search_path_spec" | $SED "$delay_single_quote_subst"`' sys_lib_dlsearch_path_spec='`$ECHO "$sys_lib_dlsearch_path_spec" | $SED "$delay_single_quote_subst"`' hardcode_action='`$ECHO "$hardcode_action" | $SED "$delay_single_quote_subst"`' enable_dlopen='`$ECHO "$enable_dlopen" | $SED "$delay_single_quote_subst"`' enable_dlopen_self='`$ECHO "$enable_dlopen_self" | $SED "$delay_single_quote_subst"`' enable_dlopen_self_static='`$ECHO "$enable_dlopen_self_static" | $SED "$delay_single_quote_subst"`' old_striplib='`$ECHO "$old_striplib" | $SED "$delay_single_quote_subst"`' striplib='`$ECHO "$striplib" | $SED "$delay_single_quote_subst"`' compiler_lib_search_dirs='`$ECHO "$compiler_lib_search_dirs" | $SED "$delay_single_quote_subst"`' predep_objects='`$ECHO "$predep_objects" | $SED "$delay_single_quote_subst"`' postdep_objects='`$ECHO "$postdep_objects" | $SED "$delay_single_quote_subst"`' predeps='`$ECHO "$predeps" | $SED "$delay_single_quote_subst"`' postdeps='`$ECHO "$postdeps" | $SED "$delay_single_quote_subst"`' compiler_lib_search_path='`$ECHO "$compiler_lib_search_path" | $SED "$delay_single_quote_subst"`' LD_CXX='`$ECHO "$LD_CXX" | $SED "$delay_single_quote_subst"`' reload_flag_CXX='`$ECHO "$reload_flag_CXX" | $SED "$delay_single_quote_subst"`' reload_cmds_CXX='`$ECHO "$reload_cmds_CXX" | $SED "$delay_single_quote_subst"`' old_archive_cmds_CXX='`$ECHO "$old_archive_cmds_CXX" | $SED "$delay_single_quote_subst"`' compiler_CXX='`$ECHO "$compiler_CXX" | $SED "$delay_single_quote_subst"`' GCC_CXX='`$ECHO "$GCC_CXX" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_no_builtin_flag_CXX='`$ECHO "$lt_prog_compiler_no_builtin_flag_CXX" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_pic_CXX='`$ECHO "$lt_prog_compiler_pic_CXX" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_wl_CXX='`$ECHO "$lt_prog_compiler_wl_CXX" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_static_CXX='`$ECHO "$lt_prog_compiler_static_CXX" | $SED "$delay_single_quote_subst"`' lt_cv_prog_compiler_c_o_CXX='`$ECHO "$lt_cv_prog_compiler_c_o_CXX" | $SED "$delay_single_quote_subst"`' archive_cmds_need_lc_CXX='`$ECHO "$archive_cmds_need_lc_CXX" | $SED "$delay_single_quote_subst"`' enable_shared_with_static_runtimes_CXX='`$ECHO "$enable_shared_with_static_runtimes_CXX" | $SED "$delay_single_quote_subst"`' export_dynamic_flag_spec_CXX='`$ECHO "$export_dynamic_flag_spec_CXX" | $SED "$delay_single_quote_subst"`' whole_archive_flag_spec_CXX='`$ECHO "$whole_archive_flag_spec_CXX" | $SED "$delay_single_quote_subst"`' compiler_needs_object_CXX='`$ECHO "$compiler_needs_object_CXX" | $SED "$delay_single_quote_subst"`' old_archive_from_new_cmds_CXX='`$ECHO "$old_archive_from_new_cmds_CXX" | $SED "$delay_single_quote_subst"`' old_archive_from_expsyms_cmds_CXX='`$ECHO "$old_archive_from_expsyms_cmds_CXX" | $SED "$delay_single_quote_subst"`' archive_cmds_CXX='`$ECHO "$archive_cmds_CXX" | $SED "$delay_single_quote_subst"`' archive_expsym_cmds_CXX='`$ECHO "$archive_expsym_cmds_CXX" | $SED "$delay_single_quote_subst"`' module_cmds_CXX='`$ECHO "$module_cmds_CXX" | $SED "$delay_single_quote_subst"`' module_expsym_cmds_CXX='`$ECHO "$module_expsym_cmds_CXX" | $SED "$delay_single_quote_subst"`' with_gnu_ld_CXX='`$ECHO "$with_gnu_ld_CXX" | $SED "$delay_single_quote_subst"`' allow_undefined_flag_CXX='`$ECHO "$allow_undefined_flag_CXX" | $SED "$delay_single_quote_subst"`' no_undefined_flag_CXX='`$ECHO "$no_undefined_flag_CXX" | $SED "$delay_single_quote_subst"`' hardcode_libdir_flag_spec_CXX='`$ECHO "$hardcode_libdir_flag_spec_CXX" | $SED "$delay_single_quote_subst"`' hardcode_libdir_separator_CXX='`$ECHO "$hardcode_libdir_separator_CXX" | $SED "$delay_single_quote_subst"`' hardcode_direct_CXX='`$ECHO "$hardcode_direct_CXX" | $SED "$delay_single_quote_subst"`' hardcode_direct_absolute_CXX='`$ECHO "$hardcode_direct_absolute_CXX" | $SED "$delay_single_quote_subst"`' hardcode_minus_L_CXX='`$ECHO "$hardcode_minus_L_CXX" | $SED "$delay_single_quote_subst"`' hardcode_shlibpath_var_CXX='`$ECHO "$hardcode_shlibpath_var_CXX" | $SED "$delay_single_quote_subst"`' hardcode_automatic_CXX='`$ECHO "$hardcode_automatic_CXX" | $SED "$delay_single_quote_subst"`' inherit_rpath_CXX='`$ECHO "$inherit_rpath_CXX" | $SED "$delay_single_quote_subst"`' link_all_deplibs_CXX='`$ECHO "$link_all_deplibs_CXX" | $SED "$delay_single_quote_subst"`' always_export_symbols_CXX='`$ECHO "$always_export_symbols_CXX" | $SED "$delay_single_quote_subst"`' export_symbols_cmds_CXX='`$ECHO "$export_symbols_cmds_CXX" | $SED "$delay_single_quote_subst"`' exclude_expsyms_CXX='`$ECHO "$exclude_expsyms_CXX" | $SED "$delay_single_quote_subst"`' include_expsyms_CXX='`$ECHO "$include_expsyms_CXX" | $SED "$delay_single_quote_subst"`' prelink_cmds_CXX='`$ECHO "$prelink_cmds_CXX" | $SED "$delay_single_quote_subst"`' postlink_cmds_CXX='`$ECHO "$postlink_cmds_CXX" | $SED "$delay_single_quote_subst"`' file_list_spec_CXX='`$ECHO "$file_list_spec_CXX" | $SED "$delay_single_quote_subst"`' hardcode_action_CXX='`$ECHO "$hardcode_action_CXX" | $SED "$delay_single_quote_subst"`' compiler_lib_search_dirs_CXX='`$ECHO "$compiler_lib_search_dirs_CXX" | $SED "$delay_single_quote_subst"`' predep_objects_CXX='`$ECHO "$predep_objects_CXX" | $SED "$delay_single_quote_subst"`' postdep_objects_CXX='`$ECHO "$postdep_objects_CXX" | $SED "$delay_single_quote_subst"`' predeps_CXX='`$ECHO "$predeps_CXX" | $SED "$delay_single_quote_subst"`' postdeps_CXX='`$ECHO "$postdeps_CXX" | $SED "$delay_single_quote_subst"`' compiler_lib_search_path_CXX='`$ECHO "$compiler_lib_search_path_CXX" | $SED "$delay_single_quote_subst"`' LTCC='$LTCC' LTCFLAGS='$LTCFLAGS' compiler='$compiler_DEFAULT' # A function that is used when there is no print builtin or printf. func_fallback_echo () { eval 'cat <<_LTECHO_EOF \$1 _LTECHO_EOF' } # Quote evaled strings. for var in SHELL \ ECHO \ PATH_SEPARATOR \ SED \ GREP \ EGREP \ FGREP \ LD \ NM \ LN_S \ lt_SP2NL \ lt_NL2SP \ reload_flag \ OBJDUMP \ deplibs_check_method \ file_magic_cmd \ file_magic_glob \ want_nocaseglob \ DLLTOOL \ sharedlib_from_linklib_cmd \ AR \ AR_FLAGS \ archiver_list_spec \ STRIP \ RANLIB \ CC \ CFLAGS \ compiler \ lt_cv_sys_global_symbol_pipe \ lt_cv_sys_global_symbol_to_cdecl \ lt_cv_sys_global_symbol_to_c_name_address \ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ nm_file_list_spec \ lt_prog_compiler_no_builtin_flag \ lt_prog_compiler_pic \ lt_prog_compiler_wl \ lt_prog_compiler_static \ lt_cv_prog_compiler_c_o \ need_locks \ MANIFEST_TOOL \ DSYMUTIL \ NMEDIT \ LIPO \ OTOOL \ OTOOL64 \ shrext_cmds \ export_dynamic_flag_spec \ whole_archive_flag_spec \ compiler_needs_object \ with_gnu_ld \ allow_undefined_flag \ no_undefined_flag \ hardcode_libdir_flag_spec \ hardcode_libdir_separator \ exclude_expsyms \ include_expsyms \ file_list_spec \ variables_saved_for_relink \ libname_spec \ library_names_spec \ soname_spec \ install_override_mode \ finish_eval \ old_striplib \ striplib \ compiler_lib_search_dirs \ predep_objects \ postdep_objects \ predeps \ postdeps \ compiler_lib_search_path \ LD_CXX \ reload_flag_CXX \ compiler_CXX \ lt_prog_compiler_no_builtin_flag_CXX \ lt_prog_compiler_pic_CXX \ lt_prog_compiler_wl_CXX \ lt_prog_compiler_static_CXX \ lt_cv_prog_compiler_c_o_CXX \ export_dynamic_flag_spec_CXX \ whole_archive_flag_spec_CXX \ compiler_needs_object_CXX \ with_gnu_ld_CXX \ allow_undefined_flag_CXX \ no_undefined_flag_CXX \ hardcode_libdir_flag_spec_CXX \ hardcode_libdir_separator_CXX \ exclude_expsyms_CXX \ include_expsyms_CXX \ file_list_spec_CXX \ compiler_lib_search_dirs_CXX \ predep_objects_CXX \ postdep_objects_CXX \ predeps_CXX \ postdeps_CXX \ compiler_lib_search_path_CXX; do case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in *[\\\\\\\`\\"\\\$]*) eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" ;; *) eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" ;; esac done # Double-quote double-evaled strings. for var in reload_cmds \ old_postinstall_cmds \ old_postuninstall_cmds \ old_archive_cmds \ extract_expsyms_cmds \ old_archive_from_new_cmds \ old_archive_from_expsyms_cmds \ archive_cmds \ archive_expsym_cmds \ module_cmds \ module_expsym_cmds \ export_symbols_cmds \ prelink_cmds \ postlink_cmds \ postinstall_cmds \ postuninstall_cmds \ finish_cmds \ sys_lib_search_path_spec \ sys_lib_dlsearch_path_spec \ reload_cmds_CXX \ old_archive_cmds_CXX \ old_archive_from_new_cmds_CXX \ old_archive_from_expsyms_cmds_CXX \ archive_cmds_CXX \ archive_expsym_cmds_CXX \ module_cmds_CXX \ module_expsym_cmds_CXX \ export_symbols_cmds_CXX \ prelink_cmds_CXX \ postlink_cmds_CXX; do case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in *[\\\\\\\`\\"\\\$]*) eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" ;; *) eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" ;; esac done ac_aux_dir='$ac_aux_dir' xsi_shell='$xsi_shell' lt_shell_append='$lt_shell_append' # See if we are running on zsh, and set the options which allow our # commands through without removal of \ escapes INIT. if test -n "\${ZSH_VERSION+set}" ; then setopt NO_GLOB_SUBST fi PACKAGE='$PACKAGE' VERSION='$VERSION' TIMESTAMP='$TIMESTAMP' RM='$RM' ofile='$ofile' # Capture the value of obsolete ALL_LINGUAS because we need it to compute # POFILES, UPDATEPOFILES, DUMMYPOFILES, GMOFILES, CATALOGS. But hide it # from automake < 1.5. eval 'OBSOLETE_ALL_LINGUAS''="$ALL_LINGUAS"' # Capture the value of LINGUAS because we need it to compute CATALOGS. LINGUAS="${LINGUAS-%UNSET%}" _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # Handling of arguments. for ac_config_target in $ac_config_targets do case $ac_config_target in "config.h") CONFIG_HEADERS="$CONFIG_HEADERS config.h" ;; "depfiles") CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;; "libtool") CONFIG_COMMANDS="$CONFIG_COMMANDS libtool" ;; "po-directories") CONFIG_COMMANDS="$CONFIG_COMMANDS po-directories" ;; "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;; "include/arc/ArcVersion.h") CONFIG_FILES="$CONFIG_FILES include/arc/ArcVersion.h" ;; "src/Makefile") CONFIG_FILES="$CONFIG_FILES src/Makefile" ;; "src/external/Makefile") CONFIG_FILES="$CONFIG_FILES src/external/Makefile" ;; "src/external/cJSON/Makefile") CONFIG_FILES="$CONFIG_FILES src/external/cJSON/Makefile" ;; "src/hed/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/Makefile" ;; "src/hed/libs/compute/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/compute/Makefile" ;; "src/hed/libs/compute/test/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/compute/test/Makefile" ;; "src/hed/libs/compute/examples/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/compute/examples/Makefile" ;; "src/hed/libs/common/ArcVersion.h") CONFIG_FILES="$CONFIG_FILES src/hed/libs/common/ArcVersion.h" ;; "src/hed/libs/common/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/common/Makefile" ;; "src/hed/libs/common/test/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/common/test/Makefile" ;; "src/hed/libs/communication/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/communication/Makefile" ;; "src/hed/libs/credential/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/credential/Makefile" ;; "src/hed/libs/credential/test/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/credential/test/Makefile" ;; "src/hed/libs/credentialmod/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/credentialmod/Makefile" ;; "src/hed/libs/crypto/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/crypto/Makefile" ;; "src/hed/libs/cryptomod/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/cryptomod/Makefile" ;; "src/hed/libs/data/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/data/Makefile" ;; "src/hed/libs/data/cache-clean.1") CONFIG_FILES="$CONFIG_FILES src/hed/libs/data/cache-clean.1" ;; "src/hed/libs/data/cache-list.1") CONFIG_FILES="$CONFIG_FILES src/hed/libs/data/cache-list.1" ;; "src/hed/libs/data/test/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/data/test/Makefile" ;; "src/hed/libs/data/examples/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/data/examples/Makefile" ;; "src/hed/libs/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/Makefile" ;; "src/hed/libs/loader/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/loader/Makefile" ;; "src/hed/libs/loader/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/loader/schema/Makefile" ;; "src/hed/libs/loader/test/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/loader/test/Makefile" ;; "src/hed/libs/message/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/message/Makefile" ;; "src/hed/libs/message/test/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/message/test/Makefile" ;; "src/hed/libs/security/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/security/Makefile" ;; "src/hed/libs/security/ArcPDP/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/security/ArcPDP/Makefile" ;; "src/hed/libs/security/ArcPDP/attr/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/security/ArcPDP/attr/Makefile" ;; "src/hed/libs/security/ArcPDP/policy/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/security/ArcPDP/policy/Makefile" ;; "src/hed/libs/security/ArcPDP/alg/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/security/ArcPDP/alg/Makefile" ;; "src/hed/libs/security/ArcPDP/fn/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/security/ArcPDP/fn/Makefile" ;; "src/hed/libs/credentialstore/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/credentialstore/Makefile" ;; "src/hed/libs/ws-addressing/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/ws-addressing/Makefile" ;; "src/hed/libs/ws-security/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/ws-security/Makefile" ;; "src/hed/libs/ws-security/test/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/ws-security/test/Makefile" ;; "src/hed/libs/infosys/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/infosys/Makefile" ;; "src/hed/libs/infosys/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/infosys/schema/Makefile" ;; "src/hed/libs/infosys/test/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/infosys/test/Makefile" ;; "src/hed/libs/delegation/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/delegation/Makefile" ;; "src/hed/libs/delegation/test/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/delegation/test/Makefile" ;; "src/hed/libs/xmlsec/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/xmlsec/Makefile" ;; "src/hed/libs/globusutils/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/globusutils/Makefile" ;; "src/hed/libs/otokens/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/libs/otokens/Makefile" ;; "src/hed/daemon/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/daemon/Makefile" ;; "src/hed/daemon/scripts/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/daemon/scripts/Makefile" ;; "src/hed/daemon/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/daemon/schema/Makefile" ;; "src/hed/daemon/unix/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/daemon/unix/Makefile" ;; "src/hed/mcc/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/mcc/Makefile" ;; "src/hed/mcc/soap/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/mcc/soap/Makefile" ;; "src/hed/mcc/tcp/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/mcc/tcp/Makefile" ;; "src/hed/mcc/tcp/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/mcc/tcp/schema/Makefile" ;; "src/hed/mcc/http/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/mcc/http/Makefile" ;; "src/hed/mcc/http/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/mcc/http/schema/Makefile" ;; "src/hed/mcc/tls/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/mcc/tls/Makefile" ;; "src/hed/mcc/tls/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/mcc/tls/schema/Makefile" ;; "src/hed/mcc/msgvalidator/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/mcc/msgvalidator/Makefile" ;; "src/hed/mcc/msgvalidator/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/mcc/msgvalidator/schema/Makefile" ;; "src/hed/acc/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/acc/Makefile" ;; "src/hed/acc/GRIDFTPJOB/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/acc/GRIDFTPJOB/Makefile" ;; "src/hed/acc/ARCREST/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/acc/ARCREST/Makefile" ;; "src/hed/acc/EMIES/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/acc/EMIES/Makefile" ;; "src/hed/acc/EMIES/arcemiestest.1") CONFIG_FILES="$CONFIG_FILES src/hed/acc/EMIES/arcemiestest.1" ;; "src/hed/acc/EMIES/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/acc/EMIES/schema/Makefile" ;; "src/hed/acc/Broker/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/acc/Broker/Makefile" ;; "src/hed/acc/Broker/test/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/acc/Broker/test/Makefile" ;; "src/hed/acc/PythonBroker/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/acc/PythonBroker/Makefile" ;; "src/hed/acc/JobDescriptionParser/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/acc/JobDescriptionParser/Makefile" ;; "src/hed/acc/JobDescriptionParser/test/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/acc/JobDescriptionParser/test/Makefile" ;; "src/hed/acc/ARCHERY/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/acc/ARCHERY/Makefile" ;; "src/hed/acc/LDAP/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/acc/LDAP/Makefile" ;; "src/hed/acc/TEST/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/acc/TEST/Makefile" ;; "src/hed/dmc/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/dmc/Makefile" ;; "src/hed/dmc/file/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/dmc/file/Makefile" ;; "src/hed/dmc/gridftp/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/dmc/gridftp/Makefile" ;; "src/hed/dmc/http/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/dmc/http/Makefile" ;; "src/hed/dmc/ldap/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/dmc/ldap/Makefile" ;; "src/hed/dmc/srm/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/dmc/srm/Makefile" ;; "src/hed/dmc/srm/srmclient/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/dmc/srm/srmclient/Makefile" ;; "src/hed/dmc/gfal/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/dmc/gfal/Makefile" ;; "src/hed/dmc/xrootd/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/dmc/xrootd/Makefile" ;; "src/hed/dmc/mock/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/dmc/mock/Makefile" ;; "src/hed/dmc/acix/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/dmc/acix/Makefile" ;; "src/hed/dmc/rucio/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/dmc/rucio/Makefile" ;; "src/hed/dmc/s3/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/dmc/s3/Makefile" ;; "src/hed/profiles/general/general.xml") CONFIG_FILES="$CONFIG_FILES src/hed/profiles/general/general.xml" ;; "src/hed/shc/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/Makefile" ;; "src/hed/shc/arcpdp/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/arcpdp/Makefile" ;; "src/hed/shc/arcpdp/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/arcpdp/schema/Makefile" ;; "src/hed/shc/xacmlpdp/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/xacmlpdp/Makefile" ;; "src/hed/shc/xacmlpdp/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/xacmlpdp/schema/Makefile" ;; "src/hed/shc/delegationpdp/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/delegationpdp/Makefile" ;; "src/hed/shc/delegationpdp/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/delegationpdp/schema/Makefile" ;; "src/hed/shc/gaclpdp/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/gaclpdp/Makefile" ;; "src/hed/shc/pdpserviceinvoker/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/pdpserviceinvoker/Makefile" ;; "src/hed/shc/pdpserviceinvoker/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/pdpserviceinvoker/schema/Makefile" ;; "src/hed/shc/allowpdp/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/allowpdp/Makefile" ;; "src/hed/shc/denypdp/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/denypdp/Makefile" ;; "src/hed/shc/simplelistpdp/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/simplelistpdp/Makefile" ;; "src/hed/shc/simplelistpdp/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/simplelistpdp/schema/Makefile" ;; "src/hed/shc/arcauthzsh/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/arcauthzsh/Makefile" ;; "src/hed/shc/arcauthzsh/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/arcauthzsh/schema/Makefile" ;; "src/hed/shc/usernametokensh/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/usernametokensh/Makefile" ;; "src/hed/shc/usernametokensh/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/usernametokensh/schema/Makefile" ;; "src/hed/shc/x509tokensh/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/x509tokensh/Makefile" ;; "src/hed/shc/x509tokensh/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/x509tokensh/schema/Makefile" ;; "src/hed/shc/samltokensh/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/samltokensh/Makefile" ;; "src/hed/shc/samltokensh/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/samltokensh/schema/Makefile" ;; "src/hed/shc/saml2sso_assertionconsumersh/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/saml2sso_assertionconsumersh/Makefile" ;; "src/hed/shc/delegationsh/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/delegationsh/Makefile" ;; "src/hed/shc/delegationsh/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/delegationsh/schema/Makefile" ;; "src/hed/shc/legacy/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/legacy/Makefile" ;; "src/hed/shc/legacy/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/legacy/schema/Makefile" ;; "src/hed/shc/otokens/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/shc/otokens/Makefile" ;; "src/hed/identitymap/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/identitymap/Makefile" ;; "src/hed/identitymap/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/hed/identitymap/schema/Makefile" ;; "src/libs/Makefile") CONFIG_FILES="$CONFIG_FILES src/libs/Makefile" ;; "src/libs/data-staging/Makefile") CONFIG_FILES="$CONFIG_FILES src/libs/data-staging/Makefile" ;; "src/libs/data-staging/test/Makefile") CONFIG_FILES="$CONFIG_FILES src/libs/data-staging/test/Makefile" ;; "src/libs/data-staging/examples/Makefile") CONFIG_FILES="$CONFIG_FILES src/libs/data-staging/examples/Makefile" ;; "src/services/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/Makefile" ;; "src/services/a-rex/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/Makefile" ;; "src/services/a-rex/arc-arex") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/arc-arex" ;; "src/services/a-rex/arc-arex.service") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/arc-arex.service" ;; "src/services/a-rex/arc-arex-start") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/arc-arex-start" ;; "src/services/a-rex/arc-arex-ws") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/arc-arex-ws" ;; "src/services/a-rex/arc-arex-ws.service") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/arc-arex-ws.service" ;; "src/services/a-rex/arc-arex-ws-start") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/arc-arex-ws-start" ;; "src/services/a-rex/a-rex-backtrace-collect") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/a-rex-backtrace-collect" ;; "src/services/a-rex/a-rex-backtrace-collect.8") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/a-rex-backtrace-collect.8" ;; "src/services/a-rex/perferator") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/perferator" ;; "src/services/a-rex/grid-manager/arc-blahp-logger.8") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/grid-manager/arc-blahp-logger.8" ;; "src/services/a-rex/grid-manager/gm-jobs.8") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/grid-manager/gm-jobs.8" ;; "src/services/a-rex/grid-manager/gm-delegations-converter.8") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/grid-manager/gm-delegations-converter.8" ;; "src/services/a-rex/rest/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/rest/Makefile" ;; "src/services/a-rex/delegation/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/delegation/Makefile" ;; "src/services/a-rex/grid-manager/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/grid-manager/Makefile" ;; "src/services/a-rex/grid-manager/accounting/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/grid-manager/accounting/Makefile" ;; "src/services/a-rex/grid-manager/conf/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/grid-manager/conf/Makefile" ;; "src/services/a-rex/grid-manager/files/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/grid-manager/files/Makefile" ;; "src/services/a-rex/grid-manager/jobs/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/grid-manager/jobs/Makefile" ;; "src/services/a-rex/grid-manager/jobplugin/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/grid-manager/jobplugin/Makefile" ;; "src/services/a-rex/grid-manager/log/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/grid-manager/log/Makefile" ;; "src/services/a-rex/grid-manager/mail/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/grid-manager/mail/Makefile" ;; "src/services/a-rex/grid-manager/misc/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/grid-manager/misc/Makefile" ;; "src/services/a-rex/grid-manager/run/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/grid-manager/run/Makefile" ;; "src/services/a-rex/internaljobplugin/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/internaljobplugin/Makefile" ;; "src/services/a-rex/grid-manager/arc-config-check.1") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/grid-manager/arc-config-check.1" ;; "src/services/a-rex/infoproviders/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/infoproviders/Makefile" ;; "src/services/a-rex/infoproviders/CEinfo.pl") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/infoproviders/CEinfo.pl" ;; "src/services/a-rex/infoproviders/ConfigCentral.pm") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/infoproviders/ConfigCentral.pm" ;; "src/services/a-rex/infoproviders/PerfData.pl") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/infoproviders/PerfData.pl" ;; "src/services/a-rex/infoproviders/test/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/infoproviders/test/Makefile" ;; "src/services/a-rex/lrms/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/Makefile" ;; "src/services/a-rex/lrms/test/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/test/Makefile" ;; "src/services/a-rex/lrms/arc/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/arc/Makefile" ;; "src/services/a-rex/lrms/arc/lrms/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/arc/lrms/Makefile" ;; "src/services/a-rex/lrms/arc/lrms/common/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/arc/lrms/common/Makefile" ;; "src/services/a-rex/lrms/lrms_common.sh") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/lrms_common.sh" ;; "src/services/a-rex/lrms/condor/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/condor/Makefile" ;; "src/services/a-rex/lrms/condor/scan-condor-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/condor/scan-condor-job" ;; "src/services/a-rex/lrms/condor/cancel-condor-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/condor/cancel-condor-job" ;; "src/services/a-rex/lrms/condor/submit-condor-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/condor/submit-condor-job" ;; "src/services/a-rex/lrms/fork/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/fork/Makefile" ;; "src/services/a-rex/lrms/fork/scan-fork-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/fork/scan-fork-job" ;; "src/services/a-rex/lrms/fork/submit-fork-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/fork/submit-fork-job" ;; "src/services/a-rex/lrms/fork/cancel-fork-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/fork/cancel-fork-job" ;; "src/services/a-rex/lrms/ll/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/ll/Makefile" ;; "src/services/a-rex/lrms/ll/submit-ll-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/ll/submit-ll-job" ;; "src/services/a-rex/lrms/ll/cancel-ll-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/ll/cancel-ll-job" ;; "src/services/a-rex/lrms/ll/scan-ll-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/ll/scan-ll-job" ;; "src/services/a-rex/lrms/lsf/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/lsf/Makefile" ;; "src/services/a-rex/lrms/lsf/submit-lsf-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/lsf/submit-lsf-job" ;; "src/services/a-rex/lrms/lsf/cancel-lsf-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/lsf/cancel-lsf-job" ;; "src/services/a-rex/lrms/lsf/scan-lsf-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/lsf/scan-lsf-job" ;; "src/services/a-rex/lrms/pbs/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/pbs/Makefile" ;; "src/services/a-rex/lrms/pbs/submit-pbs-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/pbs/submit-pbs-job" ;; "src/services/a-rex/lrms/pbs/cancel-pbs-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/pbs/cancel-pbs-job" ;; "src/services/a-rex/lrms/pbs/scan-pbs-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/pbs/scan-pbs-job" ;; "src/services/a-rex/lrms/pbspro/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/pbspro/Makefile" ;; "src/services/a-rex/lrms/pbspro/submit-pbspro-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/pbspro/submit-pbspro-job" ;; "src/services/a-rex/lrms/pbspro/cancel-pbspro-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/pbspro/cancel-pbspro-job" ;; "src/services/a-rex/lrms/pbspro/scan-pbspro-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/pbspro/scan-pbspro-job" ;; "src/services/a-rex/lrms/sge/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/sge/Makefile" ;; "src/services/a-rex/lrms/sge/submit-sge-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/sge/submit-sge-job" ;; "src/services/a-rex/lrms/sge/scan-sge-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/sge/scan-sge-job" ;; "src/services/a-rex/lrms/sge/cancel-sge-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/sge/cancel-sge-job" ;; "src/services/a-rex/lrms/slurm/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/slurm/Makefile" ;; "src/services/a-rex/lrms/slurm/submit-SLURM-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/slurm/submit-SLURM-job" ;; "src/services/a-rex/lrms/slurm/scan-SLURM-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/slurm/scan-SLURM-job" ;; "src/services/a-rex/lrms/slurm/cancel-SLURM-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/slurm/cancel-SLURM-job" ;; "src/services/a-rex/lrms/slurm/test/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/slurm/test/Makefile" ;; "src/services/a-rex/lrms/slurm/test/scan/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/slurm/test/scan/Makefile" ;; "src/services/a-rex/lrms/slurm/test/submit/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/slurm/test/submit/Makefile" ;; "src/services/a-rex/lrms/boinc/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/boinc/Makefile" ;; "src/services/a-rex/lrms/boinc/submit-boinc-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/boinc/submit-boinc-job" ;; "src/services/a-rex/lrms/boinc/scan-boinc-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/boinc/scan-boinc-job" ;; "src/services/a-rex/lrms/boinc/cancel-boinc-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/boinc/cancel-boinc-job" ;; "src/services/a-rex/lrms/slurmpy/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/slurmpy/Makefile" ;; "src/services/a-rex/lrms/slurmpy/submit-SLURMPY-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/slurmpy/submit-SLURMPY-job" ;; "src/services/a-rex/lrms/slurmpy/scan-SLURMPY-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/slurmpy/scan-SLURMPY-job" ;; "src/services/a-rex/lrms/slurmpy/cancel-SLURMPY-job") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/slurmpy/cancel-SLURMPY-job" ;; "src/services/a-rex/lrms/slurmpy/test/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/slurmpy/test/Makefile" ;; "src/services/a-rex/lrms/slurmpy/test/submit/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/slurmpy/test/submit/Makefile" ;; "src/services/a-rex/lrms/slurmpy/test/scan/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/lrms/slurmpy/test/scan/Makefile" ;; "src/services/a-rex/rte/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/rte/Makefile" ;; "src/services/a-rex/rte/ENV/PROXY") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/rte/ENV/PROXY" ;; "src/services/a-rex/rte/ENV/CANDYPOND") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/rte/ENV/CANDYPOND" ;; "src/services/a-rex/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/a-rex/schema/Makefile" ;; "src/services/acix/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/acix/Makefile" ;; "src/services/acix/scanner/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/acix/scanner/Makefile" ;; "src/services/acix/scanner/arc-acix-scanner") CONFIG_FILES="$CONFIG_FILES src/services/acix/scanner/arc-acix-scanner" ;; "src/services/acix/scanner/arc-acix-scanner-start") CONFIG_FILES="$CONFIG_FILES src/services/acix/scanner/arc-acix-scanner-start" ;; "src/services/acix/scanner/arc-acix-scanner.service") CONFIG_FILES="$CONFIG_FILES src/services/acix/scanner/arc-acix-scanner.service" ;; "src/services/acix/scanner/test/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/acix/scanner/test/Makefile" ;; "src/services/acix/core/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/acix/core/Makefile" ;; "src/services/acix/core/test/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/acix/core/test/Makefile" ;; "src/services/acix/indexserver/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/acix/indexserver/Makefile" ;; "src/services/acix/indexserver/arc-acix-index") CONFIG_FILES="$CONFIG_FILES src/services/acix/indexserver/arc-acix-index" ;; "src/services/acix/indexserver/arc-acix-index-start") CONFIG_FILES="$CONFIG_FILES src/services/acix/indexserver/arc-acix-index-start" ;; "src/services/acix/indexserver/arc-acix-index.service") CONFIG_FILES="$CONFIG_FILES src/services/acix/indexserver/arc-acix-index.service" ;; "src/services/acix/indexserver/test/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/acix/indexserver/test/Makefile" ;; "src/services/candypond/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/candypond/Makefile" ;; "src/services/data-staging/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/data-staging/Makefile" ;; "src/services/data-staging/arc-datadelivery-service") CONFIG_FILES="$CONFIG_FILES src/services/data-staging/arc-datadelivery-service" ;; "src/services/data-staging/arc-datadelivery-service.service") CONFIG_FILES="$CONFIG_FILES src/services/data-staging/arc-datadelivery-service.service" ;; "src/services/data-staging/arc-datadelivery-service-start") CONFIG_FILES="$CONFIG_FILES src/services/data-staging/arc-datadelivery-service-start" ;; "src/services/gridftpd/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/gridftpd/Makefile" ;; "src/services/gridftpd/arc-gridftpd") CONFIG_FILES="$CONFIG_FILES src/services/gridftpd/arc-gridftpd" ;; "src/services/gridftpd/arc-gridftpd.service") CONFIG_FILES="$CONFIG_FILES src/services/gridftpd/arc-gridftpd.service" ;; "src/services/gridftpd/arc-gridftpd-start") CONFIG_FILES="$CONFIG_FILES src/services/gridftpd/arc-gridftpd-start" ;; "src/services/gridftpd/gridftpd.8") CONFIG_FILES="$CONFIG_FILES src/services/gridftpd/gridftpd.8" ;; "src/services/gridftpd/auth/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/gridftpd/auth/Makefile" ;; "src/services/gridftpd/conf/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/gridftpd/conf/Makefile" ;; "src/services/gridftpd/misc/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/gridftpd/misc/Makefile" ;; "src/services/gridftpd/run/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/gridftpd/run/Makefile" ;; "src/services/gridftpd/fileplugin/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/gridftpd/fileplugin/Makefile" ;; "src/services/ldap-infosys/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/ldap-infosys/Makefile" ;; "src/services/ldap-infosys/create-bdii-config") CONFIG_FILES="$CONFIG_FILES src/services/ldap-infosys/create-bdii-config" ;; "src/services/ldap-infosys/create-slapd-config") CONFIG_FILES="$CONFIG_FILES src/services/ldap-infosys/create-slapd-config" ;; "src/services/ldap-infosys/arc-infosys-ldap") CONFIG_FILES="$CONFIG_FILES src/services/ldap-infosys/arc-infosys-ldap" ;; "src/services/ldap-infosys/arc-infosys-ldap.service") CONFIG_FILES="$CONFIG_FILES src/services/ldap-infosys/arc-infosys-ldap.service" ;; "src/services/ldap-infosys/arc-infosys-ldap-slapd.service") CONFIG_FILES="$CONFIG_FILES src/services/ldap-infosys/arc-infosys-ldap-slapd.service" ;; "src/services/monitor/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/monitor/Makefile" ;; "src/services/monitor/monitor") CONFIG_FILES="$CONFIG_FILES src/services/monitor/monitor" ;; "src/services/monitor/README") CONFIG_FILES="$CONFIG_FILES src/services/monitor/README" ;; "src/services/monitor/man/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/monitor/man/Makefile" ;; "src/services/monitor/man/monitor.7") CONFIG_FILES="$CONFIG_FILES src/services/monitor/man/monitor.7" ;; "src/services/monitor/includes/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/monitor/includes/Makefile" ;; "src/services/monitor/mon-icons/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/monitor/mon-icons/Makefile" ;; "src/services/monitor/lang/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/monitor/lang/Makefile" ;; "src/services/examples/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/examples/Makefile" ;; "src/services/examples/echo_python/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/examples/echo_python/Makefile" ;; "src/services/wrappers/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/wrappers/Makefile" ;; "src/services/wrappers/python/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/wrappers/python/Makefile" ;; "src/services/wrappers/python/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/services/wrappers/python/schema/Makefile" ;; "src/clients/Makefile") CONFIG_FILES="$CONFIG_FILES src/clients/Makefile" ;; "src/clients/data/Makefile") CONFIG_FILES="$CONFIG_FILES src/clients/data/Makefile" ;; "src/clients/data/arccp.1") CONFIG_FILES="$CONFIG_FILES src/clients/data/arccp.1" ;; "src/clients/data/arcls.1") CONFIG_FILES="$CONFIG_FILES src/clients/data/arcls.1" ;; "src/clients/data/arcrm.1") CONFIG_FILES="$CONFIG_FILES src/clients/data/arcrm.1" ;; "src/clients/data/arcmkdir.1") CONFIG_FILES="$CONFIG_FILES src/clients/data/arcmkdir.1" ;; "src/clients/data/arcrename.1") CONFIG_FILES="$CONFIG_FILES src/clients/data/arcrename.1" ;; "src/clients/credentials/Makefile") CONFIG_FILES="$CONFIG_FILES src/clients/credentials/Makefile" ;; "src/clients/credentials/arcproxy.1") CONFIG_FILES="$CONFIG_FILES src/clients/credentials/arcproxy.1" ;; "src/clients/compute/Makefile") CONFIG_FILES="$CONFIG_FILES src/clients/compute/Makefile" ;; "src/clients/compute/arcstat.1") CONFIG_FILES="$CONFIG_FILES src/clients/compute/arcstat.1" ;; "src/clients/compute/arcinfo.1") CONFIG_FILES="$CONFIG_FILES src/clients/compute/arcinfo.1" ;; "src/clients/compute/arcsub.1") CONFIG_FILES="$CONFIG_FILES src/clients/compute/arcsub.1" ;; "src/clients/compute/arcclean.1") CONFIG_FILES="$CONFIG_FILES src/clients/compute/arcclean.1" ;; "src/clients/compute/arckill.1") CONFIG_FILES="$CONFIG_FILES src/clients/compute/arckill.1" ;; "src/clients/compute/arcget.1") CONFIG_FILES="$CONFIG_FILES src/clients/compute/arcget.1" ;; "src/clients/compute/arccat.1") CONFIG_FILES="$CONFIG_FILES src/clients/compute/arccat.1" ;; "src/clients/compute/arcresub.1") CONFIG_FILES="$CONFIG_FILES src/clients/compute/arcresub.1" ;; "src/clients/compute/arcsync.1") CONFIG_FILES="$CONFIG_FILES src/clients/compute/arcsync.1" ;; "src/clients/compute/arcrenew.1") CONFIG_FILES="$CONFIG_FILES src/clients/compute/arcrenew.1" ;; "src/clients/compute/arcresume.1") CONFIG_FILES="$CONFIG_FILES src/clients/compute/arcresume.1" ;; "src/clients/compute/arctest.1") CONFIG_FILES="$CONFIG_FILES src/clients/compute/arctest.1" ;; "src/tests/Makefile") CONFIG_FILES="$CONFIG_FILES src/tests/Makefile" ;; "src/tests/echo/Makefile") CONFIG_FILES="$CONFIG_FILES src/tests/echo/Makefile" ;; "src/tests/echo/perftest.1") CONFIG_FILES="$CONFIG_FILES src/tests/echo/perftest.1" ;; "src/tests/echo/echo_service.xml.example") CONFIG_FILES="$CONFIG_FILES src/tests/echo/echo_service.xml.example" ;; "src/tests/echo/schema/Makefile") CONFIG_FILES="$CONFIG_FILES src/tests/echo/schema/Makefile" ;; "src/tests/policy-delegation/Makefile") CONFIG_FILES="$CONFIG_FILES src/tests/policy-delegation/Makefile" ;; "src/tests/delegation/Makefile") CONFIG_FILES="$CONFIG_FILES src/tests/delegation/Makefile" ;; "src/tests/translator/Makefile") CONFIG_FILES="$CONFIG_FILES src/tests/translator/Makefile" ;; "src/tests/xpath/Makefile") CONFIG_FILES="$CONFIG_FILES src/tests/xpath/Makefile" ;; "src/tests/arcpolicy/Makefile") CONFIG_FILES="$CONFIG_FILES src/tests/arcpolicy/Makefile" ;; "src/tests/perf/Makefile") CONFIG_FILES="$CONFIG_FILES src/tests/perf/Makefile" ;; "src/tests/perf/arcperftest.1") CONFIG_FILES="$CONFIG_FILES src/tests/perf/arcperftest.1" ;; "src/tests/client/Makefile") CONFIG_FILES="$CONFIG_FILES src/tests/client/Makefile" ;; "src/tests/lrms/Makefile") CONFIG_FILES="$CONFIG_FILES src/tests/lrms/Makefile" ;; "src/utils/archery/Makefile") CONFIG_FILES="$CONFIG_FILES src/utils/archery/Makefile" ;; "src/utils/archery/archery-manage") CONFIG_FILES="$CONFIG_FILES src/utils/archery/archery-manage" ;; "src/utils/python/Makefile") CONFIG_FILES="$CONFIG_FILES src/utils/python/Makefile" ;; "src/utils/python/arccandypond") CONFIG_FILES="$CONFIG_FILES src/utils/python/arccandypond" ;; "src/utils/python/arcctl") CONFIG_FILES="$CONFIG_FILES src/utils/python/arcctl" ;; "src/utils/python/arcctl.1") CONFIG_FILES="$CONFIG_FILES src/utils/python/arcctl.1" ;; "src/utils/python/jura-ng") CONFIG_FILES="$CONFIG_FILES src/utils/python/jura-ng" ;; "src/utils/python/arc/Makefile") CONFIG_FILES="$CONFIG_FILES src/utils/python/arc/Makefile" ;; "src/utils/python/arc/gen_paths_dist.sh") CONFIG_FILES="$CONFIG_FILES src/utils/python/arc/gen_paths_dist.sh" ;; "src/utils/python/arc/utils/Makefile") CONFIG_FILES="$CONFIG_FILES src/utils/python/arc/utils/Makefile" ;; "src/utils/python/arc/control/Makefile") CONFIG_FILES="$CONFIG_FILES src/utils/python/arc/control/Makefile" ;; "src/utils/hed/wsdl2hed.1") CONFIG_FILES="$CONFIG_FILES src/utils/hed/wsdl2hed.1" ;; "src/utils/hed/arcplugin.1") CONFIG_FILES="$CONFIG_FILES src/utils/hed/arcplugin.1" ;; "src/utils/hed/Makefile") CONFIG_FILES="$CONFIG_FILES src/utils/hed/Makefile" ;; "src/utils/gridmap/nordugridmap.cron") CONFIG_FILES="$CONFIG_FILES src/utils/gridmap/nordugridmap.cron" ;; "src/utils/gridmap/nordugridmap.8") CONFIG_FILES="$CONFIG_FILES src/utils/gridmap/nordugridmap.8" ;; "src/utils/gridmap/Makefile") CONFIG_FILES="$CONFIG_FILES src/utils/gridmap/Makefile" ;; "src/utils/Makefile") CONFIG_FILES="$CONFIG_FILES src/utils/Makefile" ;; "src/wn/Makefile") CONFIG_FILES="$CONFIG_FILES src/wn/Makefile" ;; "src/doc/Makefile") CONFIG_FILES="$CONFIG_FILES src/doc/Makefile" ;; "src/doc/arc.conf.5") CONFIG_FILES="$CONFIG_FILES src/doc/arc.conf.5" ;; "swig/Makefile") CONFIG_FILES="$CONFIG_FILES swig/Makefile" ;; "python/Makefile") CONFIG_FILES="$CONFIG_FILES python/Makefile" ;; "python/Doxyfile.api") CONFIG_FILES="$CONFIG_FILES python/Doxyfile.api" ;; "python/python/Makefile") CONFIG_FILES="$CONFIG_FILES python/python/Makefile" ;; "python/python/arc/Makefile") CONFIG_FILES="$CONFIG_FILES python/python/arc/Makefile" ;; "python/altpython/Makefile") CONFIG_FILES="$CONFIG_FILES python/altpython/Makefile" ;; "python/altpython/arc/Makefile") CONFIG_FILES="$CONFIG_FILES python/altpython/arc/Makefile" ;; "python/test/Makefile") CONFIG_FILES="$CONFIG_FILES python/test/Makefile" ;; "python/test/python/Makefile") CONFIG_FILES="$CONFIG_FILES python/test/python/Makefile" ;; "python/test/altpython/Makefile") CONFIG_FILES="$CONFIG_FILES python/test/altpython/Makefile" ;; "python/examples/Makefile") CONFIG_FILES="$CONFIG_FILES python/examples/Makefile" ;; "po/Makefile.in") CONFIG_FILES="$CONFIG_FILES po/Makefile.in" ;; "include/Makefile") CONFIG_FILES="$CONFIG_FILES include/Makefile" ;; "debian/Makefile") CONFIG_FILES="$CONFIG_FILES debian/Makefile" ;; "debian/changelog.deb") CONFIG_FILES="$CONFIG_FILES debian/changelog.deb" ;; "nordugrid-arc.spec") CONFIG_FILES="$CONFIG_FILES nordugrid-arc.spec" ;; "src/hed/daemon/arched.8") CONFIG_FILES="$CONFIG_FILES src/hed/daemon/arched.8" ;; "src/hed/daemon/scripts/arched") CONFIG_FILES="$CONFIG_FILES src/hed/daemon/scripts/arched" ;; "src/hed/daemon/scripts/arched.service") CONFIG_FILES="$CONFIG_FILES src/hed/daemon/scripts/arched.service" ;; "src/hed/daemon/scripts/arched-start") CONFIG_FILES="$CONFIG_FILES src/hed/daemon/scripts/arched-start" ;; "src/doxygen/Makefile") CONFIG_FILES="$CONFIG_FILES src/doxygen/Makefile" ;; "src/utils/python/arcconfig-parser") CONFIG_FILES="$CONFIG_FILES src/utils/python/arcconfig-parser" ;; *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; esac done # If the user did not use the arguments to specify the items to instantiate, # then the envvar interface is used. Set only those that are not. # We use the long form for the default assignment because of an extremely # bizarre bug on SunOS 4.1.3. if $ac_need_defaults; then test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands fi # Have a temporary directory for convenience. Make it in the build tree # simply because there is no reason against having it here, and in addition, # creating and moving files from /tmp can sometimes cause problems. # Hook for its removal unless debugging. # Note that there is a small window in which the directory will not be cleaned: # after its creation but before its name has been assigned to `$tmp'. $debug || { tmp= ac_tmp= trap 'exit_status=$? : "${ac_tmp:=$tmp}" { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status ' 0 trap 'as_fn_exit 1' 1 2 13 15 } # Create a (secure) tmp directory for tmp files. { tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && test -d "$tmp" } || { tmp=./conf$$-$RANDOM (umask 077 && mkdir "$tmp") } || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 ac_tmp=$tmp # Set up the scripts for CONFIG_FILES section. # No need to generate them if there are no CONFIG_FILES. # This happens for instance with `./config.status config.h'. if test -n "$CONFIG_FILES"; then ac_cr=`echo X | tr X '\015'` # On cygwin, bash can eat \r inside `` if the user requested igncr. # But we know of no other shell where ac_cr would be empty at this # point, so we can use a bashism as a fallback. if test "x$ac_cr" = x; then eval ac_cr=\$\'\\r\' fi ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then ac_cs_awk_cr='\\r' else ac_cs_awk_cr=$ac_cr fi echo 'BEGIN {' >"$ac_tmp/subs1.awk" && _ACEOF { echo "cat >conf$$subs.awk <<_ACEOF" && echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && echo "_ACEOF" } >conf$$subs.sh || as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` ac_delim='%!_!# ' for ac_last_try in false false false false false :; do . ./conf$$subs.sh || as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` if test $ac_delim_n = $ac_delim_num; then break elif $ac_last_try; then as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 else ac_delim="$ac_delim!$ac_delim _$ac_delim!! " fi done rm -f conf$$subs.sh cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && _ACEOF sed -n ' h s/^/S["/; s/!.*/"]=/ p g s/^[^!]*!// :repl t repl s/'"$ac_delim"'$// t delim :nl h s/\(.\{148\}\)..*/\1/ t more1 s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ p n b repl :more1 s/["\\]/\\&/g; s/^/"/; s/$/"\\/ p g s/.\{148\}// t nl :delim h s/\(.\{148\}\)..*/\1/ t more2 s/["\\]/\\&/g; s/^/"/; s/$/"/ p b :more2 s/["\\]/\\&/g; s/^/"/; s/$/"\\/ p g s/.\{148\}// t delim ' >$CONFIG_STATUS || ac_write_fail=1 rm -f conf$$subs.awk cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 _ACAWK cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && for (key in S) S_is_set[key] = 1 FS = "" } { line = $ 0 nfields = split(line, field, "@") substed = 0 len = length(field[1]) for (i = 2; i < nfields; i++) { key = field[i] keylen = length(key) if (S_is_set[key]) { value = S[key] line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) len += length(value) + length(field[++i]) substed = 1 } else len += 1 + keylen } print line } _ACAWK _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" else cat fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 _ACEOF # VPATH may cause trouble with some makes, so we remove sole $(srcdir), # ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and # trailing colons and then remove the whole line if VPATH becomes empty # (actually we leave an empty line to preserve line numbers). if test "x$srcdir" = x.; then ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ h s/// s/^/:/ s/[ ]*$/:/ s/:\$(srcdir):/:/g s/:\${srcdir}:/:/g s/:@srcdir@:/:/g s/^:*// s/:*$// x s/\(=[ ]*\).*/\1/ G s/\n// s/^[^=]*=[ ]*$// }' fi cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 fi # test -n "$CONFIG_FILES" # Set up the scripts for CONFIG_HEADERS section. # No need to generate them if there are no CONFIG_HEADERS. # This happens for instance with `./config.status Makefile'. if test -n "$CONFIG_HEADERS"; then cat >"$ac_tmp/defines.awk" <<\_ACAWK || BEGIN { _ACEOF # Transform confdefs.h into an awk script `defines.awk', embedded as # here-document in config.status, that substitutes the proper values into # config.h.in to produce config.h. # Create a delimiter string that does not exist in confdefs.h, to ease # handling of long lines. ac_delim='%!_!# ' for ac_last_try in false false :; do ac_tt=`sed -n "/$ac_delim/p" confdefs.h` if test -z "$ac_tt"; then break elif $ac_last_try; then as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5 else ac_delim="$ac_delim!$ac_delim _$ac_delim!! " fi done # For the awk script, D is an array of macro values keyed by name, # likewise P contains macro parameters if any. Preserve backslash # newline sequences. ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]* sed -n ' s/.\{148\}/&'"$ac_delim"'/g t rset :rset s/^[ ]*#[ ]*define[ ][ ]*/ / t def d :def s/\\$// t bsnl s/["\\]/\\&/g s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ D["\1"]=" \3"/p s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p d :bsnl s/["\\]/\\&/g s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ D["\1"]=" \3\\\\\\n"\\/p t cont s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p t cont d :cont n s/.\{148\}/&'"$ac_delim"'/g t clear :clear s/\\$// t bsnlc s/["\\]/\\&/g; s/^/"/; s/$/"/p d :bsnlc s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p b cont ' >$CONFIG_STATUS || ac_write_fail=1 cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 for (key in D) D_is_set[key] = 1 FS = "" } /^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ { line = \$ 0 split(line, arg, " ") if (arg[1] == "#") { defundef = arg[2] mac1 = arg[3] } else { defundef = substr(arg[1], 2) mac1 = arg[2] } split(mac1, mac2, "(") #) macro = mac2[1] prefix = substr(line, 1, index(line, defundef) - 1) if (D_is_set[macro]) { # Preserve the white space surrounding the "#". print prefix "define", macro P[macro] D[macro] next } else { # Replace #undef with comments. This is necessary, for example, # in the case of _POSIX_SOURCE, which is predefined and required # on some systems where configure will not decide to define it. if (defundef == "undef") { print "/*", prefix defundef, macro, "*/" next } } } { print } _ACAWK _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 as_fn_error $? "could not setup config headers machinery" "$LINENO" 5 fi # test -n "$CONFIG_HEADERS" eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS :C $CONFIG_COMMANDS" shift for ac_tag do case $ac_tag in :[FHLC]) ac_mode=$ac_tag; continue;; esac case $ac_mode$ac_tag in :[FHL]*:*);; :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; :[FH]-) ac_tag=-:-;; :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; esac ac_save_IFS=$IFS IFS=: set x $ac_tag IFS=$ac_save_IFS shift ac_file=$1 shift case $ac_mode in :L) ac_source=$1;; :[FH]) ac_file_inputs= for ac_f do case $ac_f in -) ac_f="$ac_tmp/stdin";; *) # Look for the file first in the build tree, then in the source tree # (if the path is not absolute). The absolute path cannot be DOS-style, # because $ac_f cannot contain `:'. test -f "$ac_f" || case $ac_f in [\\/$]*) false;; *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; esac || as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; esac case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac as_fn_append ac_file_inputs " '$ac_f'" done # Let's still pretend it is `configure' which instantiates (i.e., don't # use $as_me), people would be surprised to read: # /* config.h. Generated by config.status. */ configure_input='Generated from '` $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' `' by configure.' if test x"$ac_file" != x-; then configure_input="$ac_file. $configure_input" { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 $as_echo "$as_me: creating $ac_file" >&6;} fi # Neutralize special characters interpreted by sed in replacement strings. case $configure_input in #( *\&* | *\|* | *\\* ) ac_sed_conf_input=`$as_echo "$configure_input" | sed 's/[\\\\&|]/\\\\&/g'`;; #( *) ac_sed_conf_input=$configure_input;; esac case $ac_tag in *:-:* | *:-) cat >"$ac_tmp/stdin" \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; esac ;; esac ac_dir=`$as_dirname -- "$ac_file" || $as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$ac_file" : 'X\(//\)[^/]' \| \ X"$ac_file" : 'X\(//\)$' \| \ X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$ac_file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` as_dir="$ac_dir"; as_fn_mkdir_p ac_builddir=. case "$ac_dir" in .) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` # A ".." for each directory in $ac_dir_suffix. ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` case $ac_top_builddir_sub in "") ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; esac ;; esac ac_abs_top_builddir=$ac_pwd ac_abs_builddir=$ac_pwd$ac_dir_suffix # for backward compatibility: ac_top_builddir=$ac_top_build_prefix case $srcdir in .) # We are building in place. ac_srcdir=. ac_top_srcdir=$ac_top_builddir_sub ac_abs_top_srcdir=$ac_pwd ;; [\\/]* | ?:[\\/]* ) # Absolute name. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ac_abs_top_srcdir=$srcdir ;; *) # Relative name. ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_build_prefix$srcdir ac_abs_top_srcdir=$ac_pwd/$srcdir ;; esac ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix case $ac_mode in :F) # # CONFIG_FILE # case $INSTALL in [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;; *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;; esac ac_MKDIR_P=$MKDIR_P case $MKDIR_P in [\\/$]* | ?:[\\/]* ) ;; */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;; esac _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # If the template does not know about datarootdir, expand it. # FIXME: This hack should be removed a few years after 2.60. ac_datarootdir_hack=; ac_datarootdir_seen= ac_sed_dataroot=' /datarootdir/ { p q } /@datadir@/p /@docdir@/p /@infodir@/p /@localedir@/p /@mandir@/p' case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in *datarootdir*) ac_datarootdir_seen=yes;; *@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 $as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_datarootdir_hack=' s&@datadir@&$datadir&g s&@docdir@&$docdir&g s&@infodir@&$infodir&g s&@localedir@&$localedir&g s&@mandir@&$mandir&g s&\\\${datarootdir}&$datarootdir&g' ;; esac _ACEOF # Neutralize VPATH when `$srcdir' = `.'. # Shell code in configure.ac might set extrasub. # FIXME: do we really want to maintain this feature? cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_sed_extra="$ac_vpsub $extrasub _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 :t /@[a-zA-Z_][a-zA-Z_0-9]*@/!b s|@configure_input@|$ac_sed_conf_input|;t t s&@top_builddir@&$ac_top_builddir_sub&;t t s&@top_build_prefix@&$ac_top_build_prefix&;t t s&@srcdir@&$ac_srcdir&;t t s&@abs_srcdir@&$ac_abs_srcdir&;t t s&@top_srcdir@&$ac_top_srcdir&;t t s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t s&@builddir@&$ac_builddir&;t t s&@abs_builddir@&$ac_abs_builddir&;t t s&@abs_top_builddir@&$ac_abs_top_builddir&;t t s&@INSTALL@&$ac_INSTALL&;t t s&@MKDIR_P@&$ac_MKDIR_P&;t t $ac_datarootdir_hack " eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ "$ac_tmp/out"`; test -z "$ac_out"; } && { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' which seems to be undefined. Please make sure it is defined" >&5 $as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' which seems to be undefined. Please make sure it is defined" >&2;} rm -f "$ac_tmp/stdin" case $ac_file in -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; esac \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; :H) # # CONFIG_HEADER # if test x"$ac_file" != x-; then { $as_echo "/* $configure_input */" \ && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" } >"$ac_tmp/config.h" \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5 $as_echo "$as_me: $ac_file is unchanged" >&6;} else rm -f "$ac_file" mv "$ac_tmp/config.h" "$ac_file" \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 fi else $as_echo "/* $configure_input */" \ && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \ || as_fn_error $? "could not create -" "$LINENO" 5 fi # Compute "$ac_file"'s index in $config_headers. _am_arg="$ac_file" _am_stamp_count=1 for _am_header in $config_headers :; do case $_am_header in $_am_arg | $_am_arg:* ) break ;; * ) _am_stamp_count=`expr $_am_stamp_count + 1` ;; esac done echo "timestamp for $_am_arg" >`$as_dirname -- "$_am_arg" || $as_expr X"$_am_arg" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$_am_arg" : 'X\(//\)[^/]' \| \ X"$_am_arg" : 'X\(//\)$' \| \ X"$_am_arg" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$_am_arg" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'`/stamp-h$_am_stamp_count ;; :C) { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5 $as_echo "$as_me: executing $ac_file commands" >&6;} ;; esac case $ac_file$ac_mode in "depfiles":C) test x"$AMDEP_TRUE" != x"" || { # Older Autoconf quotes --file arguments for eval, but not when files # are listed without --file. Let's play safe and only enable the eval # if we detect the quoting. case $CONFIG_FILES in *\'*) eval set x "$CONFIG_FILES" ;; *) set x $CONFIG_FILES ;; esac shift for mf do # Strip MF so we end up with the name of the file. mf=`echo "$mf" | sed -e 's/:.*$//'` # Check whether this is an Automake generated Makefile or not. # We used to match only the files named 'Makefile.in', but # some people rename them; so instead we look at the file content. # Grep'ing the first line is not enough: some people post-process # each Makefile.in and add a new line on top of each file to say so. # Grep'ing the whole file is not good either: AIX grep has a line # limit of 2048, but all sed's we know have understand at least 4000. if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then dirpart=`$as_dirname -- "$mf" || $as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$mf" : 'X\(//\)[^/]' \| \ X"$mf" : 'X\(//\)$' \| \ X"$mf" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$mf" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` else continue fi # Extract the definition of DEPDIR, am__include, and am__quote # from the Makefile without running 'make'. DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` test -z "$DEPDIR" && continue am__include=`sed -n 's/^am__include = //p' < "$mf"` test -z "$am__include" && continue am__quote=`sed -n 's/^am__quote = //p' < "$mf"` # Find all dependency output files, they are included files with # $(DEPDIR) in their names. We invoke sed twice because it is the # simplest approach to changing $(DEPDIR) to its actual value in the # expansion. for file in `sed -n " s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g'`; do # Make sure the directory exists. test -f "$dirpart/$file" && continue fdir=`$as_dirname -- "$file" || $as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$file" : 'X\(//\)[^/]' \| \ X"$file" : 'X\(//\)$' \| \ X"$file" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` as_dir=$dirpart/$fdir; as_fn_mkdir_p # echo "creating $dirpart/$file" echo '# dummy' > "$dirpart/$file" done done } ;; "libtool":C) # See if we are running on zsh, and set the options which allow our # commands through without removal of \ escapes. if test -n "${ZSH_VERSION+set}" ; then setopt NO_GLOB_SUBST fi cfgfile="${ofile}T" trap "$RM \"$cfgfile\"; exit 1" 1 2 15 $RM "$cfgfile" cat <<_LT_EOF >> "$cfgfile" #! $SHELL # `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services. # Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION # Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: # NOTE: Changes made to this file will be lost: look at ltmain.sh. # # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, # 2006, 2007, 2008, 2009, 2010, 2011 Free Software # Foundation, Inc. # Written by Gordon Matzigkeit, 1996 # # This file is part of GNU Libtool. # # GNU Libtool is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of # the License, or (at your option) any later version. # # As a special exception to the GNU General Public License, # if you distribute this file as part of a program or library that # is built using GNU Libtool, you may include this file under the # same distribution terms that you use for the rest of that program. # # GNU Libtool is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Libtool; see the file COPYING. If not, a copy # can be downloaded from http://www.gnu.org/licenses/gpl.html, or # obtained by writing to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # The names of the tagged configurations supported by this script. available_tags="CXX " # ### BEGIN LIBTOOL CONFIG # Whether or not to build static libraries. build_old_libs=$enable_static # Which release of libtool.m4 was used? macro_version=$macro_version macro_revision=$macro_revision # Whether or not to build shared libraries. build_libtool_libs=$enable_shared # What type of objects to build. pic_mode=$pic_mode # Whether or not to optimize for fast installation. fast_install=$enable_fast_install # Shell to use when invoking shell scripts. SHELL=$lt_SHELL # An echo program that protects backslashes. ECHO=$lt_ECHO # The PATH separator for the build system. PATH_SEPARATOR=$lt_PATH_SEPARATOR # The host system. host_alias=$host_alias host=$host host_os=$host_os # The build system. build_alias=$build_alias build=$build build_os=$build_os # A sed program that does not truncate output. SED=$lt_SED # Sed that helps us avoid accidentally triggering echo(1) options like -n. Xsed="\$SED -e 1s/^X//" # A grep program that handles long lines. GREP=$lt_GREP # An ERE matcher. EGREP=$lt_EGREP # A literal string matcher. FGREP=$lt_FGREP # A BSD- or MS-compatible name lister. NM=$lt_NM # Whether we need soft or hard links. LN_S=$lt_LN_S # What is the maximum length of a command? max_cmd_len=$max_cmd_len # Object file suffix (normally "o"). objext=$ac_objext # Executable file suffix (normally ""). exeext=$exeext # whether the shell understands "unset". lt_unset=$lt_unset # turn spaces into newlines. SP2NL=$lt_lt_SP2NL # turn newlines into spaces. NL2SP=$lt_lt_NL2SP # convert \$build file names to \$host format. to_host_file_cmd=$lt_cv_to_host_file_cmd # convert \$build files to toolchain format. to_tool_file_cmd=$lt_cv_to_tool_file_cmd # An object symbol dumper. OBJDUMP=$lt_OBJDUMP # Method to check whether dependent libraries are shared objects. deplibs_check_method=$lt_deplibs_check_method # Command to use when deplibs_check_method = "file_magic". file_magic_cmd=$lt_file_magic_cmd # How to find potential files when deplibs_check_method = "file_magic". file_magic_glob=$lt_file_magic_glob # Find potential files using nocaseglob when deplibs_check_method = "file_magic". want_nocaseglob=$lt_want_nocaseglob # DLL creation program. DLLTOOL=$lt_DLLTOOL # Command to associate shared and link libraries. sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd # The archiver. AR=$lt_AR # Flags to create an archive. AR_FLAGS=$lt_AR_FLAGS # How to feed a file listing to the archiver. archiver_list_spec=$lt_archiver_list_spec # A symbol stripping program. STRIP=$lt_STRIP # Commands used to install an old-style archive. RANLIB=$lt_RANLIB old_postinstall_cmds=$lt_old_postinstall_cmds old_postuninstall_cmds=$lt_old_postuninstall_cmds # Whether to use a lock for old archive extraction. lock_old_archive_extraction=$lock_old_archive_extraction # A C compiler. LTCC=$lt_CC # LTCC compiler flags. LTCFLAGS=$lt_CFLAGS # Take the output of nm and produce a listing of raw symbols and C names. global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe # Transform the output of nm in a proper C declaration. global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl # Transform the output of nm in a C name address pair. global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address # Transform the output of nm in a C name address pair when lib prefix is needed. global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix # Specify filename containing input files for \$NM. nm_file_list_spec=$lt_nm_file_list_spec # The root where to search for dependent libraries,and in which our libraries should be installed. lt_sysroot=$lt_sysroot # The name of the directory that contains temporary libtool files. objdir=$objdir # Used to examine libraries when file_magic_cmd begins with "file". MAGIC_CMD=$MAGIC_CMD # Must we lock files when doing compilation? need_locks=$lt_need_locks # Manifest tool. MANIFEST_TOOL=$lt_MANIFEST_TOOL # Tool to manipulate archived DWARF debug symbol files on Mac OS X. DSYMUTIL=$lt_DSYMUTIL # Tool to change global to local symbols on Mac OS X. NMEDIT=$lt_NMEDIT # Tool to manipulate fat objects and archives on Mac OS X. LIPO=$lt_LIPO # ldd/readelf like tool for Mach-O binaries on Mac OS X. OTOOL=$lt_OTOOL # ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4. OTOOL64=$lt_OTOOL64 # Old archive suffix (normally "a"). libext=$libext # Shared library suffix (normally ".so"). shrext_cmds=$lt_shrext_cmds # The commands to extract the exported symbol list from a shared archive. extract_expsyms_cmds=$lt_extract_expsyms_cmds # Variables whose values should be saved in libtool wrapper scripts and # restored at link time. variables_saved_for_relink=$lt_variables_saved_for_relink # Do we need the "lib" prefix for modules? need_lib_prefix=$need_lib_prefix # Do we need a version for libraries? need_version=$need_version # Library versioning type. version_type=$version_type # Shared library runtime path variable. runpath_var=$runpath_var # Shared library path variable. shlibpath_var=$shlibpath_var # Is shlibpath searched before the hard-coded library search path? shlibpath_overrides_runpath=$shlibpath_overrides_runpath # Format of library name prefix. libname_spec=$lt_libname_spec # List of archive names. First name is the real one, the rest are links. # The last name is the one that the linker finds with -lNAME library_names_spec=$lt_library_names_spec # The coded name of the library, if different from the real name. soname_spec=$lt_soname_spec # Permission mode override for installation of shared libraries. install_override_mode=$lt_install_override_mode # Command to use after installation of a shared archive. postinstall_cmds=$lt_postinstall_cmds # Command to use after uninstallation of a shared archive. postuninstall_cmds=$lt_postuninstall_cmds # Commands used to finish a libtool library installation in a directory. finish_cmds=$lt_finish_cmds # As "finish_cmds", except a single script fragment to be evaled but # not shown. finish_eval=$lt_finish_eval # Whether we should hardcode library paths into libraries. hardcode_into_libs=$hardcode_into_libs # Compile-time system search path for libraries. sys_lib_search_path_spec=$lt_sys_lib_search_path_spec # Run-time system search path for libraries. sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec # Whether dlopen is supported. dlopen_support=$enable_dlopen # Whether dlopen of programs is supported. dlopen_self=$enable_dlopen_self # Whether dlopen of statically linked programs is supported. dlopen_self_static=$enable_dlopen_self_static # Commands to strip libraries. old_striplib=$lt_old_striplib striplib=$lt_striplib # The linker used to build libraries. LD=$lt_LD # How to create reloadable object files. reload_flag=$lt_reload_flag reload_cmds=$lt_reload_cmds # Commands used to build an old-style archive. old_archive_cmds=$lt_old_archive_cmds # A language specific compiler. CC=$lt_compiler # Is the compiler the GNU compiler? with_gcc=$GCC # Compiler flag to turn off builtin functions. no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag # Additional compiler flags for building library objects. pic_flag=$lt_lt_prog_compiler_pic # How to pass a linker flag through the compiler. wl=$lt_lt_prog_compiler_wl # Compiler flag to prevent dynamic linking. link_static_flag=$lt_lt_prog_compiler_static # Does compiler simultaneously support -c and -o options? compiler_c_o=$lt_lt_cv_prog_compiler_c_o # Whether or not to add -lc for building shared libraries. build_libtool_need_lc=$archive_cmds_need_lc # Whether or not to disallow shared libs when runtime libs are static. allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes # Compiler flag to allow reflexive dlopens. export_dynamic_flag_spec=$lt_export_dynamic_flag_spec # Compiler flag to generate shared objects directly from archives. whole_archive_flag_spec=$lt_whole_archive_flag_spec # Whether the compiler copes with passing no objects directly. compiler_needs_object=$lt_compiler_needs_object # Create an old-style archive from a shared archive. old_archive_from_new_cmds=$lt_old_archive_from_new_cmds # Create a temporary old-style archive to link instead of a shared archive. old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds # Commands used to build a shared archive. archive_cmds=$lt_archive_cmds archive_expsym_cmds=$lt_archive_expsym_cmds # Commands used to build a loadable module if different from building # a shared archive. module_cmds=$lt_module_cmds module_expsym_cmds=$lt_module_expsym_cmds # Whether we are building with GNU ld or not. with_gnu_ld=$lt_with_gnu_ld # Flag that allows shared libraries with undefined symbols to be built. allow_undefined_flag=$lt_allow_undefined_flag # Flag that enforces no undefined symbols. no_undefined_flag=$lt_no_undefined_flag # Flag to hardcode \$libdir into a binary during linking. # This must work even if \$libdir does not exist hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec # Whether we need a single "-rpath" flag with a separated argument. hardcode_libdir_separator=$lt_hardcode_libdir_separator # Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes # DIR into the resulting binary. hardcode_direct=$hardcode_direct # Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes # DIR into the resulting binary and the resulting library dependency is # "absolute",i.e impossible to change by setting \${shlibpath_var} if the # library is relocated. hardcode_direct_absolute=$hardcode_direct_absolute # Set to "yes" if using the -LDIR flag during linking hardcodes DIR # into the resulting binary. hardcode_minus_L=$hardcode_minus_L # Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR # into the resulting binary. hardcode_shlibpath_var=$hardcode_shlibpath_var # Set to "yes" if building a shared library automatically hardcodes DIR # into the library and all subsequent libraries and executables linked # against it. hardcode_automatic=$hardcode_automatic # Set to yes if linker adds runtime paths of dependent libraries # to runtime path list. inherit_rpath=$inherit_rpath # Whether libtool must link a program against all its dependency libraries. link_all_deplibs=$link_all_deplibs # Set to "yes" if exported symbols are required. always_export_symbols=$always_export_symbols # The commands to list exported symbols. export_symbols_cmds=$lt_export_symbols_cmds # Symbols that should not be listed in the preloaded symbols. exclude_expsyms=$lt_exclude_expsyms # Symbols that must always be exported. include_expsyms=$lt_include_expsyms # Commands necessary for linking programs (against libraries) with templates. prelink_cmds=$lt_prelink_cmds # Commands necessary for finishing linking programs. postlink_cmds=$lt_postlink_cmds # Specify filename containing input files. file_list_spec=$lt_file_list_spec # How to hardcode a shared library path into an executable. hardcode_action=$hardcode_action # The directories searched by this compiler when creating a shared library. compiler_lib_search_dirs=$lt_compiler_lib_search_dirs # Dependencies to place before and after the objects being linked to # create a shared library. predep_objects=$lt_predep_objects postdep_objects=$lt_postdep_objects predeps=$lt_predeps postdeps=$lt_postdeps # The library search path used internally by the compiler when linking # a shared library. compiler_lib_search_path=$lt_compiler_lib_search_path # ### END LIBTOOL CONFIG _LT_EOF case $host_os in aix3*) cat <<\_LT_EOF >> "$cfgfile" # AIX sometimes has problems with the GCC collect2 program. For some # reason, if we set the COLLECT_NAMES environment variable, the problems # vanish in a puff of smoke. if test "X${COLLECT_NAMES+set}" != Xset; then COLLECT_NAMES= export COLLECT_NAMES fi _LT_EOF ;; esac ltmain="$ac_aux_dir/ltmain.sh" # We use sed instead of cat because bash on DJGPP gets confused if # if finds mixed CR/LF and LF-only lines. Since sed operates in # text mode, it properly converts lines to CR/LF. This bash problem # is reportedly fixed, but why not run on old versions too? sed '$q' "$ltmain" >> "$cfgfile" \ || (rm -f "$cfgfile"; exit 1) if test x"$xsi_shell" = xyes; then sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ func_dirname ()\ {\ \ case ${1} in\ \ */*) func_dirname_result="${1%/*}${2}" ;;\ \ * ) func_dirname_result="${3}" ;;\ \ esac\ } # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ && mv -f "$cfgfile.tmp" "$cfgfile" \ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") test 0 -eq $? || _lt_function_replace_fail=: sed -e '/^func_basename ()$/,/^} # func_basename /c\ func_basename ()\ {\ \ func_basename_result="${1##*/}"\ } # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ && mv -f "$cfgfile.tmp" "$cfgfile" \ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") test 0 -eq $? || _lt_function_replace_fail=: sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ func_dirname_and_basename ()\ {\ \ case ${1} in\ \ */*) func_dirname_result="${1%/*}${2}" ;;\ \ * ) func_dirname_result="${3}" ;;\ \ esac\ \ func_basename_result="${1##*/}"\ } # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ && mv -f "$cfgfile.tmp" "$cfgfile" \ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") test 0 -eq $? || _lt_function_replace_fail=: sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ func_stripname ()\ {\ \ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ \ # positional parameters, so assign one to ordinary parameter first.\ \ func_stripname_result=${3}\ \ func_stripname_result=${func_stripname_result#"${1}"}\ \ func_stripname_result=${func_stripname_result%"${2}"}\ } # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ && mv -f "$cfgfile.tmp" "$cfgfile" \ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") test 0 -eq $? || _lt_function_replace_fail=: sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ func_split_long_opt ()\ {\ \ func_split_long_opt_name=${1%%=*}\ \ func_split_long_opt_arg=${1#*=}\ } # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ && mv -f "$cfgfile.tmp" "$cfgfile" \ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") test 0 -eq $? || _lt_function_replace_fail=: sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ func_split_short_opt ()\ {\ \ func_split_short_opt_arg=${1#??}\ \ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ } # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ && mv -f "$cfgfile.tmp" "$cfgfile" \ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") test 0 -eq $? || _lt_function_replace_fail=: sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ func_lo2o ()\ {\ \ case ${1} in\ \ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ \ *) func_lo2o_result=${1} ;;\ \ esac\ } # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ && mv -f "$cfgfile.tmp" "$cfgfile" \ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") test 0 -eq $? || _lt_function_replace_fail=: sed -e '/^func_xform ()$/,/^} # func_xform /c\ func_xform ()\ {\ func_xform_result=${1%.*}.lo\ } # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ && mv -f "$cfgfile.tmp" "$cfgfile" \ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") test 0 -eq $? || _lt_function_replace_fail=: sed -e '/^func_arith ()$/,/^} # func_arith /c\ func_arith ()\ {\ func_arith_result=$(( $* ))\ } # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ && mv -f "$cfgfile.tmp" "$cfgfile" \ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") test 0 -eq $? || _lt_function_replace_fail=: sed -e '/^func_len ()$/,/^} # func_len /c\ func_len ()\ {\ func_len_result=${#1}\ } # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ && mv -f "$cfgfile.tmp" "$cfgfile" \ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") test 0 -eq $? || _lt_function_replace_fail=: fi if test x"$lt_shell_append" = xyes; then sed -e '/^func_append ()$/,/^} # func_append /c\ func_append ()\ {\ eval "${1}+=\\${2}"\ } # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ && mv -f "$cfgfile.tmp" "$cfgfile" \ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") test 0 -eq $? || _lt_function_replace_fail=: sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ func_append_quoted ()\ {\ \ func_quote_for_eval "${2}"\ \ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ } # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ && mv -f "$cfgfile.tmp" "$cfgfile" \ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") test 0 -eq $? || _lt_function_replace_fail=: # Save a `func_append' function call where possible by direct use of '+=' sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ && mv -f "$cfgfile.tmp" "$cfgfile" \ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") test 0 -eq $? || _lt_function_replace_fail=: else # Save a `func_append' function call even when '+=' is not available sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ && mv -f "$cfgfile.tmp" "$cfgfile" \ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") test 0 -eq $? || _lt_function_replace_fail=: fi if test x"$_lt_function_replace_fail" = x":"; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 $as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} fi mv -f "$cfgfile" "$ofile" || (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") chmod +x "$ofile" cat <<_LT_EOF >> "$ofile" # ### BEGIN LIBTOOL TAG CONFIG: CXX # The linker used to build libraries. LD=$lt_LD_CXX # How to create reloadable object files. reload_flag=$lt_reload_flag_CXX reload_cmds=$lt_reload_cmds_CXX # Commands used to build an old-style archive. old_archive_cmds=$lt_old_archive_cmds_CXX # A language specific compiler. CC=$lt_compiler_CXX # Is the compiler the GNU compiler? with_gcc=$GCC_CXX # Compiler flag to turn off builtin functions. no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_CXX # Additional compiler flags for building library objects. pic_flag=$lt_lt_prog_compiler_pic_CXX # How to pass a linker flag through the compiler. wl=$lt_lt_prog_compiler_wl_CXX # Compiler flag to prevent dynamic linking. link_static_flag=$lt_lt_prog_compiler_static_CXX # Does compiler simultaneously support -c and -o options? compiler_c_o=$lt_lt_cv_prog_compiler_c_o_CXX # Whether or not to add -lc for building shared libraries. build_libtool_need_lc=$archive_cmds_need_lc_CXX # Whether or not to disallow shared libs when runtime libs are static. allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes_CXX # Compiler flag to allow reflexive dlopens. export_dynamic_flag_spec=$lt_export_dynamic_flag_spec_CXX # Compiler flag to generate shared objects directly from archives. whole_archive_flag_spec=$lt_whole_archive_flag_spec_CXX # Whether the compiler copes with passing no objects directly. compiler_needs_object=$lt_compiler_needs_object_CXX # Create an old-style archive from a shared archive. old_archive_from_new_cmds=$lt_old_archive_from_new_cmds_CXX # Create a temporary old-style archive to link instead of a shared archive. old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds_CXX # Commands used to build a shared archive. archive_cmds=$lt_archive_cmds_CXX archive_expsym_cmds=$lt_archive_expsym_cmds_CXX # Commands used to build a loadable module if different from building # a shared archive. module_cmds=$lt_module_cmds_CXX module_expsym_cmds=$lt_module_expsym_cmds_CXX # Whether we are building with GNU ld or not. with_gnu_ld=$lt_with_gnu_ld_CXX # Flag that allows shared libraries with undefined symbols to be built. allow_undefined_flag=$lt_allow_undefined_flag_CXX # Flag that enforces no undefined symbols. no_undefined_flag=$lt_no_undefined_flag_CXX # Flag to hardcode \$libdir into a binary during linking. # This must work even if \$libdir does not exist hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec_CXX # Whether we need a single "-rpath" flag with a separated argument. hardcode_libdir_separator=$lt_hardcode_libdir_separator_CXX # Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes # DIR into the resulting binary. hardcode_direct=$hardcode_direct_CXX # Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes # DIR into the resulting binary and the resulting library dependency is # "absolute",i.e impossible to change by setting \${shlibpath_var} if the # library is relocated. hardcode_direct_absolute=$hardcode_direct_absolute_CXX # Set to "yes" if using the -LDIR flag during linking hardcodes DIR # into the resulting binary. hardcode_minus_L=$hardcode_minus_L_CXX # Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR # into the resulting binary. hardcode_shlibpath_var=$hardcode_shlibpath_var_CXX # Set to "yes" if building a shared library automatically hardcodes DIR # into the library and all subsequent libraries and executables linked # against it. hardcode_automatic=$hardcode_automatic_CXX # Set to yes if linker adds runtime paths of dependent libraries # to runtime path list. inherit_rpath=$inherit_rpath_CXX # Whether libtool must link a program against all its dependency libraries. link_all_deplibs=$link_all_deplibs_CXX # Set to "yes" if exported symbols are required. always_export_symbols=$always_export_symbols_CXX # The commands to list exported symbols. export_symbols_cmds=$lt_export_symbols_cmds_CXX # Symbols that should not be listed in the preloaded symbols. exclude_expsyms=$lt_exclude_expsyms_CXX # Symbols that must always be exported. include_expsyms=$lt_include_expsyms_CXX # Commands necessary for linking programs (against libraries) with templates. prelink_cmds=$lt_prelink_cmds_CXX # Commands necessary for finishing linking programs. postlink_cmds=$lt_postlink_cmds_CXX # Specify filename containing input files. file_list_spec=$lt_file_list_spec_CXX # How to hardcode a shared library path into an executable. hardcode_action=$hardcode_action_CXX # The directories searched by this compiler when creating a shared library. compiler_lib_search_dirs=$lt_compiler_lib_search_dirs_CXX # Dependencies to place before and after the objects being linked to # create a shared library. predep_objects=$lt_predep_objects_CXX postdep_objects=$lt_postdep_objects_CXX predeps=$lt_predeps_CXX postdeps=$lt_postdeps_CXX # The library search path used internally by the compiler when linking # a shared library. compiler_lib_search_path=$lt_compiler_lib_search_path_CXX # ### END LIBTOOL TAG CONFIG: CXX _LT_EOF ;; "po-directories":C) for ac_file in $CONFIG_FILES; do # Support "outfile[:infile[:infile...]]" case "$ac_file" in *:*) ac_file=`echo "$ac_file"|sed 's%:.*%%'` ;; esac # PO directories have a Makefile.in generated from Makefile.in.in. case "$ac_file" in */Makefile.in) # Adjust a relative srcdir. ac_dir=`echo "$ac_file"|sed 's%/[^/][^/]*$%%'` ac_dir_suffix="/`echo "$ac_dir"|sed 's%^\./%%'`" ac_dots=`echo "$ac_dir_suffix"|sed 's%/[^/]*%../%g'` # In autoconf-2.13 it is called $ac_given_srcdir. # In autoconf-2.50 it is called $srcdir. test -n "$ac_given_srcdir" || ac_given_srcdir="$srcdir" case "$ac_given_srcdir" in .) top_srcdir=`echo $ac_dots|sed 's%/$%%'` ;; /*) top_srcdir="$ac_given_srcdir" ;; *) top_srcdir="$ac_dots$ac_given_srcdir" ;; esac # Treat a directory as a PO directory if and only if it has a # POTFILES.in file. This allows packages to have multiple PO # directories under different names or in different locations. if test -f "$ac_given_srcdir/$ac_dir/POTFILES.in"; then rm -f "$ac_dir/POTFILES" test -n "$as_me" && echo "$as_me: creating $ac_dir/POTFILES" || echo "creating $ac_dir/POTFILES" cat "$ac_given_srcdir/$ac_dir/POTFILES.in" | sed -e "/^#/d" -e "/^[ ]*\$/d" -e "s,.*, $top_srcdir/& \\\\," | sed -e "\$s/\(.*\) \\\\/\1/" > "$ac_dir/POTFILES" POMAKEFILEDEPS="POTFILES.in" # ALL_LINGUAS, POFILES, UPDATEPOFILES, DUMMYPOFILES, GMOFILES depend # on $ac_dir but don't depend on user-specified configuration # parameters. if test -f "$ac_given_srcdir/$ac_dir/LINGUAS"; then # The LINGUAS file contains the set of available languages. if test -n "$OBSOLETE_ALL_LINGUAS"; then test -n "$as_me" && echo "$as_me: setting ALL_LINGUAS in configure.in is obsolete" || echo "setting ALL_LINGUAS in configure.in is obsolete" fi ALL_LINGUAS_=`sed -e "/^#/d" -e "s/#.*//" "$ac_given_srcdir/$ac_dir/LINGUAS"` # Hide the ALL_LINGUAS assigment from automake < 1.5. eval 'ALL_LINGUAS''=$ALL_LINGUAS_' POMAKEFILEDEPS="$POMAKEFILEDEPS LINGUAS" else # The set of available languages was given in configure.in. # Hide the ALL_LINGUAS assigment from automake < 1.5. eval 'ALL_LINGUAS''=$OBSOLETE_ALL_LINGUAS' fi # Compute POFILES # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(lang).po) # Compute UPDATEPOFILES # as $(foreach lang, $(ALL_LINGUAS), $(lang).po-update) # Compute DUMMYPOFILES # as $(foreach lang, $(ALL_LINGUAS), $(lang).nop) # Compute GMOFILES # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(lang).gmo) case "$ac_given_srcdir" in .) srcdirpre= ;; *) srcdirpre='$(srcdir)/' ;; esac POFILES= UPDATEPOFILES= DUMMYPOFILES= GMOFILES= for lang in $ALL_LINGUAS; do POFILES="$POFILES $srcdirpre$lang.po" UPDATEPOFILES="$UPDATEPOFILES $lang.po-update" DUMMYPOFILES="$DUMMYPOFILES $lang.nop" GMOFILES="$GMOFILES $srcdirpre$lang.gmo" done # CATALOGS depends on both $ac_dir and the user's LINGUAS # environment variable. INST_LINGUAS= if test -n "$ALL_LINGUAS"; then for presentlang in $ALL_LINGUAS; do useit=no if test "%UNSET%" != "$LINGUAS"; then desiredlanguages="$LINGUAS" else desiredlanguages="$ALL_LINGUAS" fi for desiredlang in $desiredlanguages; do # Use the presentlang catalog if desiredlang is # a. equal to presentlang, or # b. a variant of presentlang (because in this case, # presentlang can be used as a fallback for messages # which are not translated in the desiredlang catalog). case "$desiredlang" in "$presentlang"*) useit=yes;; esac done if test $useit = yes; then INST_LINGUAS="$INST_LINGUAS $presentlang" fi done fi CATALOGS= if test -n "$INST_LINGUAS"; then for lang in $INST_LINGUAS; do CATALOGS="$CATALOGS $lang.gmo" done fi test -n "$as_me" && echo "$as_me: creating $ac_dir/Makefile" || echo "creating $ac_dir/Makefile" sed -e "/^POTFILES =/r $ac_dir/POTFILES" -e "/^# Makevars/r $ac_given_srcdir/$ac_dir/Makevars" -e "s|@POFILES@|$POFILES|g" -e "s|@UPDATEPOFILES@|$UPDATEPOFILES|g" -e "s|@DUMMYPOFILES@|$DUMMYPOFILES|g" -e "s|@GMOFILES@|$GMOFILES|g" -e "s|@CATALOGS@|$CATALOGS|g" -e "s|@POMAKEFILEDEPS@|$POMAKEFILEDEPS|g" "$ac_dir/Makefile.in" > "$ac_dir/Makefile" for f in "$ac_given_srcdir/$ac_dir"/Rules-*; do if test -f "$f"; then case "$f" in *.orig | *.bak | *~) ;; *) cat "$f" >> "$ac_dir/Makefile" ;; esac fi done fi ;; esac done ;; "src/utils/python/arcconfig-parser":F) chmod +x src/utils/python/arcconfig-parser ;; esac done # for ac_tag as_fn_exit 0 _ACEOF ac_clean_files=$ac_clean_files_save test $ac_write_fail = 0 || as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 # configure is writing to config.log, and then calls config.status. # config.status does its own redirection, appending to config.log. # Unfortunately, on DOS this fails, as config.log is still kept open # by configure, so config.status won't be able to write to it; its # output is simply discarded. So we exec the FD to /dev/null, # effectively closing config.log, so it can be properly (re)opened and # appended to by config.status. When coming back to configure, we # need to make the FD available again. if test "$no_create" != yes; then ac_cs_success=: ac_config_status_args= test "$silent" = yes && ac_config_status_args="$ac_config_status_args --quiet" exec 5>/dev/null $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false exec 5>>config.log # Use ||, not &&, to avoid exiting from the if with $? = 1, which # would make configure fail if this is the last instruction. $ac_cs_success || as_fn_exit 1 fi if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: Unit testing: ${enables_cppunit} Python binding: ${enables_swig_python} ($PYTHON_VERSION) Alt.Python binding: ${enables_altpython} ($ALTPYTHON_VERSION) Available third-party features: GridFTP: ${enables_gridftp} GFAL: ${enables_gfal} S3: ${enables_s3} Xrootd: ${enables_xrootd} MYSQL CLIENT LIB: ${enables_mysql} LDAP: ${enables_ldap} xmlsec1: ${enables_xmlsec1} ARGUS: ${enables_argus} NSS: ${enables_nss} BDB C++: ${enables_dbcxx} SQLite: ${enables_sqlite} LDNS: ${enables_ldns} Enabled features: Local jobs info in BDB: ${enables_dbjstore} Local jobs info in SQLite: ${enables_sqlitejstore} Systemd Integration: ${enables_systemd} Python LRMS back-end dependency: Inline::Python Perl module: ${INLINE_PYTHON_FOUND} Included components: HED: ${enables_hed} A-REX service: ${enables_a_rex_service} Internal plugin: ${enables_internal} GRIDFTPD service: ${enables_gridftpd_service} LDAP Info service: ${enables_ldap_service} CANDYPOND service: ${enables_candypond} DATADELIVERY service: ${enables_datadelivery_service} ACIX service: ${enables_acix} COMPUTE clients: ${enables_compute_client} DATA clients: ${enables_data_client} CREDENTIAL clients: ${enables_credentials_client} EMI ES client (ACC): ${enables_emies_client} ARC REST client (ACC): ${enables_arcrest_client} SRM client (DMC): ${enables_srm_dmc} Documentation: ${enables_doc} Monitoring: LDAP Monitor ${enables_monitor} " >&5 $as_echo " Unit testing: ${enables_cppunit} Python binding: ${enables_swig_python} ($PYTHON_VERSION) Alt.Python binding: ${enables_altpython} ($ALTPYTHON_VERSION) Available third-party features: GridFTP: ${enables_gridftp} GFAL: ${enables_gfal} S3: ${enables_s3} Xrootd: ${enables_xrootd} MYSQL CLIENT LIB: ${enables_mysql} LDAP: ${enables_ldap} xmlsec1: ${enables_xmlsec1} ARGUS: ${enables_argus} NSS: ${enables_nss} BDB C++: ${enables_dbcxx} SQLite: ${enables_sqlite} LDNS: ${enables_ldns} Enabled features: Local jobs info in BDB: ${enables_dbjstore} Local jobs info in SQLite: ${enables_sqlitejstore} Systemd Integration: ${enables_systemd} Python LRMS back-end dependency: Inline::Python Perl module: ${INLINE_PYTHON_FOUND} Included components: HED: ${enables_hed} A-REX service: ${enables_a_rex_service} Internal plugin: ${enables_internal} GRIDFTPD service: ${enables_gridftpd_service} LDAP Info service: ${enables_ldap_service} CANDYPOND service: ${enables_candypond} DATADELIVERY service: ${enables_datadelivery_service} ACIX service: ${enables_acix} COMPUTE clients: ${enables_compute_client} DATA clients: ${enables_data_client} CREDENTIAL clients: ${enables_credentials_client} EMI ES client (ACC): ${enables_emies_client} ARC REST client (ACC): ${enables_arcrest_client} SRM client (DMC): ${enables_srm_dmc} Documentation: ${enables_doc} Monitoring: LDAP Monitor ${enables_monitor} " >&6; } nordugrid-arc-6.14.0/PaxHeaders.30264/nordugrid-arc.spec.in0000644000000000000000000000013214152153376021365 xustar000000000000000030 mtime=1638455038.281644111 30 atime=1638455038.462646831 30 ctime=1638455095.727507261 nordugrid-arc-6.14.0/nordugrid-arc.spec.in0000644000175000002070000015404314152153376021361 0ustar00mockbuildmock00000000000000%{!?_pkgdocdir: %global _pkgdocdir %{_docdir}/%{name}-%{version}} # # Build dependency descrepancies across platforms # %if %{?suse_version:1}%{!?suse_version:0} %global glibmm2_devel glibmm2-devel %global openldap_devel openldap2-devel %global nss_devel mozilla-nss-devel %else %global glibmm2_devel glibmm24-devel %global openldap_devel openldap-devel %global nss_devel nss-devel %endif # # xROOTd # %if %{?fedora}%{!?fedora:0} >= 24 || %{?rhel}%{!?rhel:0} %global with_xrootd %{!?_without_xrootd:1}%{?_without_xrootd:0} %else %global with_xrootd 0 %endif # # Python # %if %{?fedora}%{!?fedora:0} >= 32 || %{?rhel}%{!?rhel:0} >= 8 %global with_python2 0 %else %global with_python2 1 %endif %if %{?fedora}%{!?fedora:0} >= 13 || %{?rhel}%{!?rhel:0} >= 7 %global with_python3 1 %else %global with_python3 0 %endif %if %{with_python2} %{!?__python2: %global __python2 /usr/bin/python2} %{!?python2_sitearch: %global python2_sitearch %(%{__python2} -Esc "from distutils.sysconfig import get_python_lib; print(get_python_lib(1))")} %{!?python2_sitelib: %global python2_sitelib %(%{__python2} -Esc "from distutils.sysconfig import get_python_lib; print(get_python_lib())")} %endif %if %{with_python3} %{!?python3_pkgversion: %global python3_pkgversion 3} %endif %if %{?rhel}%{!?rhel:0} == 6 %filter_provides_in %{python2_sitearch}/.*\.so$ %filter_setup %endif %if %{?fedora}%{!?fedora:0} || %{?rhel}%{!?rhel:0} >= 7 || %{?suse_version:1}%{!?suse_version:0} %global with_pylint %{!?_without_pylint:1}%{?_without_pylint:0} %else %global with_pylint 0 %endif %if %{?fedora}%{!?fedora:0} >= 29 || %{?rhel}%{!?rhel:0} >= 8 %global py3default 1 %else %global py3default 0 %endif %if %{?fedora}%{!?fedora:0} >= 7 || %{?rhel}%{!?rhel:0} >= 5 || %{?suse_version}%{!?suse_version:0} >= 1110 %global with_acix 1 %else %global with_acix 0 %endif %if %{?fedora}%{!?fedora:0} >= 21 || %{?rhel}%{!?rhel:0} >= 5 %global with_s3 1 %else %global with_s3 0 %endif %if %{?fedora}%{!?fedora:0} >= 21 || %{?rhel}%{!?rhel:0} >= 5 %global with_gfal 1 %else %global with_gfal 0 %endif %if %{?fedora}%{!?fedora:0} || %{?rhel}%{!?rhel:0} %global with_xmlsec1 %{!?_without_xmlsec1:1}%{?_without_xmlsec1:0} %else %global with_xmlsec1 0 %endif %if %{?fedora}%{!?fedora:0} >= 21 || %{?rhel}%{!?rhel:0} >= 6 %global with_pythonlrms 1 %else %global with_pythonlrms 0 %endif # LDNS %if %{?fedora}%{!?fedora:0} >= 13 || %{?rhel}%{!?rhel:0} >= 5 %global with_ldns 1 %else %global with_ldns 0 %endif %if %{?fedora}%{!?fedora:0} >= 25 || %{?rhel}%{!?rhel:0} >= 7 %global use_systemd 1 %else %global use_systemd 0 %endif %global with_ldap_service 1 %global pkgdir arc # bash-completion %global _bashcompdir %(pkg-config --variable=completionsdir bash-completion 2>/dev/null || echo %{_sysconfdir}/bash_completion.d) # # Macros for scripts # # Stop and disable service on package removal %if %{use_systemd} %define stop_on_removal() %{expand:%%systemd_preun %(sed 's/[^ ]*/&.service/g' <<< '%{?*}')} %else %if %{?stop_on_removal:0}%{!?stop_on_removal:1} %global stop_on_removal() if [ $1 -eq 0 ]; then for s in %*; do service $s stop > /dev/null 2>&1 || : ; done; for s in %*; do /sbin/chkconfig --del $s; done; fi %endif %endif # Enable a service %if %{use_systemd} %define enable_service() %{expand:%%systemd_post %(sed 's/[^ ]*/&.service/g' <<< '%{?*}')} %else %if %{?suse_version:1}%{!?suse_version:0} %define enable_service() %{expand:%%fillup_and_insserv -f %{?*}} %else %define enable_service() for s in %{?*}; do /sbin/chkconfig --add $s ; done %endif %endif # Conditionally restart service on package update %if %{use_systemd} %define condrestart_on_update() %{expand:%%systemd_postun_with_restart %(sed 's/[^ ]*/&.service/g' <<< '%{?*}')} %else %if %{?suse_version:1}%{!?suse_version:0} %define condrestart_on_update() %{expand:%%restart_on_update %{?*}} %{expand:%%insserv_cleanup} %else %define condrestart_on_update() if [ $1 -ge 1 ]; then for s in %{?*}; do service $s condrestart > /dev/null 2>&1 || : ; done; fi %endif %endif # Standard service requirements %if %{use_systemd} %define service_post_requires systemd-units %define service_preun_requires systemd-units %define service_postun_requires systemd-units %else %if %{?suse_version:1}%{!?suse_version:0} %define service_post_requires %{insserv_prereq} %define service_preun_requires %{insserv_prereq} %define service_postun_requires %{insserv_prereq} %else %define service_post_requires chkconfig %define service_preun_requires chkconfig, initscripts %define service_postun_requires initscripts %endif %endif Name: @PACKAGE@ Version: @baseversion@ Release: @fedorarelease@%{?dist} Summary: Advanced Resource Connector Middleware Group: System Environment/Daemons License: ASL 2.0 URL: http://www.nordugrid.org/ Source: http://download.nordugrid.org/packages/%{name}/releases/%{version}@preversion@/src/%{name}-%{version}@preversion@.tar.gz BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n) # Packages dropped without replacements Obsoletes: %{name}-chelonia < 2.0.0 Obsoletes: %{name}-hopi < 2.0.0 Obsoletes: %{name}-isis < 2.0.0 Obsoletes: %{name}-janitor < 2.0.0 Obsoletes: %{name}-doxygen < 4.0.0 Obsoletes: %{name}-arcproxyalt < 6.0.0 Obsoletes: %{name}-java < 6.0.0 Obsoletes: %{name}-egiis < 6.0.0 %if ! %{with_python2} Obsoletes: python2-%{name} < %{version}-%{release} Obsoletes: %{name}-python < 5.3.3 %endif %if ! %{with_ldap_service} Obsoletes: %{name}-infosys-ldap < %{version}-%{release} Obsoletes: %{name}-ldap-infosys < 6.0.0 Obsoletes: %{name}-aris < 6.0.0 %endif BuildRequires: gcc-c++ BuildRequires: cppunit-devel BuildRequires: pkgconfig %if %{use_systemd} BuildRequires: systemd-devel %endif %if %{?fedora}%{!?fedora:0} >= 12 || %{?rhel}%{!?rhel:0} >= 6 || %{?suse_version:1}%{!?suse_version:0} BuildRequires: libuuid-devel %else BuildRequires: e2fsprogs-devel %endif BuildRequires: gettext %if %{with_python2} BuildRequires: python2-devel %endif %if %{with_python3} BuildRequires: python%{python3_pkgversion}-devel %endif %if %{with_pylint} BuildRequires: pylint %endif BuildRequires: %{glibmm2_devel} BuildRequires: glib2-devel BuildRequires: libxml2-devel BuildRequires: openssl BuildRequires: openssl-devel %if %{with_xmlsec1} BuildRequires: xmlsec1-devel >= 1.2.4 BuildRequires: xmlsec1-openssl-devel >= 1.2.4 %endif BuildRequires: %{nss_devel} BuildRequires: %{openldap_devel} BuildRequires: globus-common-devel BuildRequires: globus-ftp-client-devel BuildRequires: globus-ftp-control-devel %if %{?fedora}%{!?fedora:0} >= 23 || %{?rhel}%{!?rhel:0} >= 5 BuildRequires: globus-gssapi-gsi-devel >= 12.2 %else BuildRequires: globus-gssapi-gsi-devel < 12.2 %endif %if %{with_xrootd} BuildRequires: xrootd-client-devel >= 1:4.5.0 %endif %if %{with_gfal} BuildRequires: gfal2-devel %endif %if %{with_s3} BuildRequires: libs3-devel %endif %if %{?suse_version}%{!?suse_version:0} == 1110 BuildRequires: db43-devel %else %if %{?fedora}%{!?fedora:0} >= 15 || %{?rhel}%{!?rhel:0} >= 7 BuildRequires: libdb-cxx-devel %else %if %{?fedora}%{!?fedora:0} == 14 BuildRequires: libdb-devel %else BuildRequires: db4-devel %endif %endif %endif %if %{?fedora}%{!?fedora:0} >= 21 || %{?rhel}%{!?rhel:0} BuildRequires: perl-generators %endif # Needed for Boinc backend testing during make check BuildRequires: perl(DBI) # Needed for infoprovider testing during make check BuildRequires: perl(English) BuildRequires: perl(JSON::XS) BuildRequires: perl(Sys::Hostname) BuildRequires: perl(XML::Simple) # Needed for LRMS testing during make check BuildRequires: perl(Test::Harness) BuildRequires: perl(Test::Simple) # Needed to run ACIX unit tests %if %{with_acix} %if %{py3default} BuildRequires: python3-twisted BuildRequires: python3-pyOpenSSL %else %if %{?fedora}%{!?fedora:0} || %{?rhel}%{!?rhel:0} >= 8 || %{?suse_version:1}%{!?suse_version:0} BuildRequires: python-twisted %else BuildRequires: python-twisted-core BuildRequires: python-twisted-web %endif %if %{?suse_version:1}%{!?suse_version:0} BuildRequires: python-openssl %else BuildRequires: pyOpenSSL %endif %endif %endif BuildRequires: swig %if %{?fedora}%{!?fedora:0} >= 4 || %{?rhel}%{!?rhel:0} >= 5 BuildRequires: libtool-ltdl-devel %else BuildRequires: libtool %endif %if %{with_pythonlrms} BuildRequires: perl(Inline) BuildRequires: perl(Inline::Python) %endif BuildRequires: sqlite-devel >= 3.6 %if %{with_ldns} BuildRequires: ldns-devel >= 1.6.8 %endif %if %{?fedora}%{!?fedora:0} >= 17 || %{?rhel}%{!?rhel:0} >= 7 || %{?suse_version:1}%{!?suse_version:0} BuildRequires: pkgconfig(bash-completion) %endif %if %{?fedora}%{!?fedora:0} <= 13 && %{?rhel}%{!?rhel:0} <= 6 BuildRequires: python-argparse Requires: python-argparse %endif %if %{?fedora}%{!?fedora:0} >= 13 || %{?rhel}%{!?rhel:0} >= 7 || %{?suse_version:1}%{!?suse_version:0} Requires: hostname %else Requires: net-tools %endif Requires: openssl %description NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). The ARC middleware is a software solution that uses distributed computing technologies to enable sharing and federation of computing resources across different administrative and application domains. ARC is used to create distributed infrastructures of various scope and complexity, from campus to national and global deployments. %package client Summary: ARC command line clients Group: Applications/Internet Requires: %{name} = %{version}-%{release} Requires: %{name}-plugins-needed = %{version}-%{release} %description client NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This client package contains all the CLI tools that are needed to operate with x509 proxies, submit and manage jobs and handle data transfers. %package hed Summary: ARC Hosting Environment Daemon Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} Requires(post): %{service_post_requires} Requires(preun): %{service_preun_requires} Requires(postun): %{service_postun_requires} %description hed NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). The ARC Hosting Environment Daemon (HED) is a Web Service container for ARC services. %package gridftpd Summary: ARC gridftp server Group: System Environment/Daemons Requires: %{name} = %{version}-%{release} Requires: %{name}-plugins-gridftp = %{version}-%{release} Requires: %{name}-arcctl-service = %{version}-%{release} Requires(post): %{service_post_requires} Requires(preun): %{service_preun_requires} Requires(postun): %{service_postun_requires} %description gridftpd NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This package contains the ARC gridftp server which can be used as a custom job submission interface in front of an ARC enabled computing cluster or as a low-level dedicated gridftp file server. %package datadelivery-service Summary: ARC data delivery service Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} Requires: %{name}-hed = %{version}-%{release} Requires: %{name}-plugins-needed = %{version}-%{release} Requires: %{name}-arcctl-service = %{version}-%{release} Requires(post): %{service_post_requires} Requires(preun): %{service_preun_requires} Requires(postun): %{service_postun_requires} %description datadelivery-service NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This package contains the ARC data delivery service. %if %{with_ldap_service} %package infosys-ldap Summary: ARC LDAP-based information services Group: System Environment/Libraries %if %{?fedora}%{!?fedora:0} >= 10 || %{?rhel}%{!?rhel:0} >= 6 BuildArch: noarch %endif %if %{?rhel}%{!?rhel:0} == 8 # slapd package not available in EPEL 8 Recommends: openldap-servers Recommends: bdii %else Requires: openldap-servers Requires: bdii %endif Requires: glue-schema >= 2.0.10 Requires: %{name}-arcctl-service = %{version}-%{release} Provides: %{name}-ldap-infosys = %{version}-%{release} Obsoletes: %{name}-ldap-infosys < 6.0.0 Provides: %{name}-aris = %{version}-%{release} Obsoletes: %{name}-aris < 6.0.0 Requires(post): %{service_post_requires} Requires(preun): %{service_preun_requires} Requires(postun): %{service_postun_requires} %if %{?fedora}%{!?fedora:0} >= 23 || %{?rhel}%{!?rhel:0} >= 8 Requires(post): policycoreutils-python-utils Requires(postun): policycoreutils-python-utils %else %if %{?fedora}%{!?fedora:0} >= 11 || %{?rhel}%{!?rhel:0} >= 6 Requires(post): policycoreutils-python Requires(postun): policycoreutils-python %else %if %{?fedora}%{!?fedora:0} >= 5 || %{?rhel}%{!?rhel:0} >= 5 Requires(post): policycoreutils Requires(postun): policycoreutils %endif %endif %endif %description infosys-ldap NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This package contains the ARC information services relying on BDII and LDAP technologies to publish ARC CE information according to various LDAP schemas. Please note that the information collectors are part of another package, the nordugrid-arc-arex. %endif %package monitor Summary: ARC LDAP monitor web application Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} Requires: php Requires: php-gd Requires: php-ldap %if %{?fedora}%{!?fedora:0} >= 10 || %{?rhel}%{!?rhel:0} >= 6 BuildArch: noarch %endif Obsoletes: %{name}-ldap-monitor < 6.0.0 Obsoletes: %{name}-ws-monitor < 6.0.0 %description monitor NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This package contains the PHP web application that is used to set up a web-based monitor which pulls information from the LDAP information system and visualizes it. %package arcctl Summary: ARC Control Tool Group: Applications/Internet Requires: %{name} = %{version}-%{release} %description arcctl NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This package contains the ARC Control Tool with basic set of control modules suitable for both server and client side. %package arcctl-service Summary: ARC Control Tool - service control modules Group: Applications/Internet Requires: %{name} = %{version}-%{release} Requires: %{name}-arcctl = %{version}-%{release} %description arcctl-service NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This package contains the service control modules for ARC Contol Tool that allow working with server-side config and manage ARC services. %package arex Summary: ARC Resource-coupled EXecution service Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} Requires: %{name}-hed = %{version}-%{release} Requires: %{name}-arcctl = %{version}-%{release} Requires: %{name}-arcctl-service = %{version}-%{release} Requires: %{name}-plugins-needed = %{version}-%{release} %if %{py3default} Requires: python3-isodate Requires: python3-ldap %else Requires: python-isodate Requires: python-ldap %endif Provides: %{name}-cache-service = %{version}-%{release} Obsoletes: %{name}-cache-service < 6.0.0 Provides: %{name}-candypond = %{version}-%{release} Obsoletes: %{name}-candypond < 6.0.0 Requires(post): %{name}-arcctl = %{version}-%{release} Requires(preun): %{name}-arcctl = %{version}-%{release} %if %{?fedora}%{!?fedora:0} >= 13 || %{?rhel}%{!?rhel:0} >= 7 || %{?suse_version:1}%{!?suse_version:0} Requires(post): hostname %else Requires(post): net-tools %endif Requires(post): openssl Requires(post): %{service_post_requires} Requires(preun): %{service_preun_requires} Requires(postun): %{service_postun_requires} %description arex NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). The ARC Resource-coupled EXecution service (AREX) is the Computing Element of the ARC middleware. AREX offers a full-featured middle layer to manage computational tasks including interfacing to local batch systems, taking care of complex environments such as data staging, data caching, software environment provisioning, information collection and exposure, accounting information gathering and publishing. %if %{with_pythonlrms} %package arex-python-lrms Summary: ARC Resource-coupled EXecution service - Python LRMS backends Group: System Environment/Libraries Requires: %{name}-arex = %{version}-%{release} %if %{py3default} Requires: python%{python3_pkgversion}-%{name} = %{version}-%{release} %else Requires: python2-%{name} = %{version}-%{release} %endif Requires: perl(Inline) Requires: perl(Inline::Python) %description arex-python-lrms NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). The Python LRMS backends are a new implementation of the AREX LRMS backend scripts written in Python. Currently only the SLURM LRMS is supported. It is released as a technology preview. %endif %package community-rtes Summary: Community-defined RTEs support Group: System Environment/Libraries Requires: %{name}-arex = %{version}-%{release} Requires: %{name}-arcctl = %{version}-%{release} Requires: gnupg2 %if %{py3default} Requires: python3-dns %else Requires: python-dns %endif %description community-rtes NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). Community RTEs is the framework that allows deploying software packages (tarballs, containers, etc) provided by trusted communities to ARC CE using simple arcctl commands. It is released as a technology preview. %package plugins-needed Summary: ARC base plugins Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} %description plugins-needed NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC base plugins. This includes the Message Chain Components (MCCs) and Data Manager Components (DMCs). %package plugins-globus Summary: ARC Globus plugins (compat) Group: System Environment/Libraries Requires: %{name}-plugins-gridftp = %{version}-%{release} Requires: %{name}-plugins-gridftpjob = %{version}-%{release} Requires: %{name}-plugins-lcas-lcmaps = %{version}-%{release} %description plugins-globus NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC Globus plugins. This compat metapackage brings all Globus dependent plugins at once, including: Data Manager Components (DMCs), Client plugin and LCAS/LCMAPS tools. This package is meant to allow smooth transition and will be removed from the upcoming releases. %package plugins-globus-common Summary: ARC Globus plugins common libraries Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} %description plugins-globus-common NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC Globus plugins common libraries package includes the bundle of necessary Globus libraries needed for all other globus-dependent ARC components. %package plugins-gridftp Summary: ARC Globus dependent DMCs Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} Requires: %{name}-plugins-globus-common = %{version}-%{release} %description plugins-gridftp NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC Globus GridFTP plugins. These allow access to data through the gridftp protocol. %package plugins-lcas-lcmaps Summary: ARC LCAS/LCMAPS plugins Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} Requires: %{name}-plugins-globus-common = %{version}-%{release} %if %{?fedora}%{!?fedora:0} >= 23 || %{?rhel}%{!?rhel:0} >= 5 Requires: globus-gssapi-gsi >= 12.2 %else Requires: globus-gssapi-gsi < 12.2 %endif %description plugins-lcas-lcmaps NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC LCAS/LCMAPS tools allow configuring ARC CE to use LCAS/LCMAPS services for authorization and mapping. %package plugins-gridftpjob Summary: ARC GRIDFTPJOB client plugin Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} Requires: %{name}-plugins-globus-common = %{version}-%{release} Requires: %{name}-plugins-gridftp = %{version}-%{release} %description plugins-gridftpjob NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC GRIDFTPJOB plugin allows submitting jobs via the gridftpd interface. %if %{with_xrootd} %package plugins-xrootd Summary: ARC xrootd plugins Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} %description plugins-xrootd NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC xrootd plugins. These allow access to data through the xrootd protocol. %endif %if %{with_gfal} %package plugins-gfal Summary: ARC GFAL2 plugins Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} %description plugins-gfal NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC plugins for GFAL2. This allows third-party transfer and adds support for several extra transfer protocols (rfio, dcap, gsidcap). Support for specific protocols is provided by separate 3rd-party GFAL2 plugin packages. %endif %if %{with_s3} %package plugins-s3 Summary: ARC S3 plugins Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} %description plugins-s3 NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC plugins for S3. These allow access to data through the S3 protocol. %endif %package plugins-internal Summary: ARC internal plugin Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} Requires: %{name}-arex = %{version}-%{release} %description plugins-internal NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). The ARC internal plugin. A special interface aimed for restrictive HPC sites, to be used with a local installation of the ARC Control Tower. %package plugins-arcrest Summary: ARC REST plugin Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} %description plugins-arcrest NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC plugin for ARC REST interface technology preview. %package plugins-python Summary: ARC Python dependent plugin Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} %if %{py3default} Requires: python%{python3_pkgversion}-%{name} = %{version}-%{release} %else Requires: python2-%{name} = %{version}-%{release} %endif %description plugins-python NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC plugins dependent on Python. %if %{with_acix} %package acix-core Summary: ARC cache index - core Group: System Environment/Libraries %if %{?fedora}%{!?fedora:0} >= 10 || %{?rhel}%{!?rhel:0} >= 6 BuildArch: noarch %endif %if %{py3default} Requires: python3-twisted Requires: python3-pyOpenSSL %else %if %{?fedora}%{!?fedora:0} || %{?rhel}%{!?rhel:0} >= 8 || %{?suse_version:1}%{!?suse_version:0} Requires: python-twisted %else Requires: python-twisted-core Requires: python-twisted-web %endif %if %{?suse_version:1}%{!?suse_version:0} Requires: python-openssl %else Requires: pyOpenSSL %endif %endif %description acix-core NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). Core components of the ARC Cache Index (ACIX). %package acix-scanner Summary: ARC cache index - scanner server Group: System Environment/Libraries %if %{?fedora}%{!?fedora:0} >= 10 || %{?rhel}%{!?rhel:0} >= 6 BuildArch: noarch %endif Requires: %{name} = %{version}-%{release} Requires: %{name}-acix-core = %{version}-%{release} Requires: %{name}-arcctl-service = %{version}-%{release} Obsoletes: %{name}-acix-cache < 6.0.0 Requires(post): %{service_post_requires} Requires(preun): %{service_preun_requires} Requires(postun): %{service_postun_requires} %description acix-scanner NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). Cache scanner component of the ARC Cache Index (ACIX), usually installed alongside A-REX. This component collects information on the content of an A-REX cache. %package acix-index Summary: ARC cache index - index server Group: System Environment/Libraries %if %{?fedora}%{!?fedora:0} >= 10 || %{?rhel}%{!?rhel:0} >= 6 BuildArch: noarch %endif Requires: %{name} = %{version}-%{release} Requires: %{name}-arcctl-service = %{version}-%{release} Requires: %{name}-acix-core = %{version}-%{release} Requires(post): %{service_post_requires} Requires(preun): %{service_preun_requires} Requires(postun): %{service_postun_requires} %description acix-index NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). Index server component of the ARC Cache Index (ACIX), usually installed independently of any A-REX installation. This component pulls cache content from ACIX cache scanner servers and can be queried by clients for the location of cached files. %endif %package devel Summary: ARC development files Group: Development/Libraries Requires: %{name} = %{version}-%{release} Requires: %{glibmm2_devel} Requires: glib2-devel Requires: libxml2-devel Requires: openssl-devel %description devel NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). Header files and libraries needed to develop applications using ARC. %if %{with_python2} %package -n python2-%{name} Summary: ARC Python 2 wrapper Group: Development/Libraries %{?python_provide:%python_provide python2-%{name}} Provides: %{name}-python = %{version}-%{release} Obsoletes: %{name}-python < 5.3.3 Requires: %{name} = %{version}-%{release} %description -n python2-%{name} NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). Python 2 bindings for ARC. %endif %if %{with_python3} %package -n python%{python3_pkgversion}-%{name} Summary: ARC Python 3 wrapper Group: Development/Libraries %{?python_provide:%python_provide python%{python3_pkgversion}-%{name}} Provides: %{name}-python%{python3_pkgversion} = %{version}-%{release} Obsoletes: %{name}-python%{python3_pkgversion} < 5.3.3 Requires: %{name} = %{version}-%{release} %description -n python%{python3_pkgversion}-%{name} NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). Python 3 bindings for ARC. %endif %package nordugridmap Summary: ARC's nordugridmap tool Group: Applications/Internet Requires: crontabs Obsoletes: %{name}-gridmap-utils < 6.0.0 %if %{?fedora}%{!?fedora:0} >= 10 || %{?rhel}%{!?rhel:0} >= 6 BuildArch: noarch %endif %description nordugridmap NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). A simple tool to fetch list of users and eventually generate gridmap files. %package test-utils Summary: ARC test tools Group: Applications/Internet Requires: %{name} = %{version}-%{release} Requires: %{name}-plugins-needed = %{version}-%{release} Obsoletes: %{name}-misc-utils < 6.0.0 %description test-utils NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This package contains a few utilities useful to test various ARC subsystems. The package is not required by users or sysadmins and it is mainly for developers. %package archery-manage Summary: ARCHERY administration tool Group: Applications/Internet %if %{?fedora}%{!?fedora:0} >= 10 || %{?rhel}%{!?rhel:0} >= 6 BuildArch: noarch %endif %if %{?fedora}%{!?fedora:0} <= 13 && %{?rhel}%{!?rhel:0} <= 6 Requires: python-argparse %endif %if %{py3default} Requires: python3-dns Requires: python3-ldap %else Requires: python-dns Requires: python-ldap %endif %description archery-manage NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This package contains the archery-manage utility for administration of an ARCHERY DNS-embedded service endpoint registry. %package wn Summary: ARC optional worker nodes components Group: Applications/Internet %description wn NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This package contains the optional components that provide new job management features on the worker nodes (WN). %prep %setup @fedorasetupopts@ %build if pkg-config --atleast-version 2.6 sigc++-2.0 ; then if [ `echo __GNUC__ | gcc -E - | tail -1` -lt 6 ] ; then # Workaround for too new libsigc++/glibmm, too old gcc combination export CXXFLAGS="%{optflags} -std=c++11" fi fi %configure --disable-static \ %if ! %{with_acix} --disable-acix \ %endif %if %{with_gfal} --enable-gfal \ %endif %if %{with_s3} --enable-s3 \ %endif %if %{py3default} --with-python=python3 \ %if %{with_python2} --with-altpython=python2 \ %endif %else --with-python=python2 \ %if %{with_python3} --with-altpython=python3 \ %endif %endif %if ! %{with_xrootd} --disable-xrootd \ %endif %if %{with_pythonlrms} --with-inline-python \ %endif %if ! %{with_ldns} --disable-ldns \ %endif --enable-internal \ %if %{use_systemd} --enable-systemd \ --with-systemd-units-location=%{_unitdir} \ %endif %if ! %{with_ldap_service} --disable-ldap-service \ %endif --disable-doc \ --docdir=%{_pkgdocdir} make %{?_smp_mflags} %check make %{?_smp_mflags} check %install rm -rf $RPM_BUILD_ROOT make install DESTDIR=$RPM_BUILD_ROOT # Install Logrotate. mkdir -p $RPM_BUILD_ROOT%{_sysconfdir}/logrotate.d install -p -m 644 debian/%{name}-arex.logrotate \ $RPM_BUILD_ROOT%{_sysconfdir}/logrotate.d/%{name}-arex install -p -m 644 debian/%{name}-gridftpd.logrotate \ $RPM_BUILD_ROOT%{_sysconfdir}/logrotate.d/%{name}-gridftpd %if %{with_ldap_service} install -p -m 644 debian/%{name}-infosys-ldap.logrotate \ $RPM_BUILD_ROOT%{_sysconfdir}/logrotate.d/%{name}-infosys-ldap %endif install -p -m 644 debian/%{name}-datadelivery-service.logrotate \ $RPM_BUILD_ROOT%{_sysconfdir}/logrotate.d/%{name}-datadelivery-service find $RPM_BUILD_ROOT -type f -name \*.la -exec rm -fv '{}' ';' # The py-compile script in the source tarball is old (RHEL 6) # It does the wrong thing for python 3 - remove and let rpmbuild do it right find $RPM_BUILD_ROOT -type f -name \*.pyc -exec rm -fv '{}' ';' find $RPM_BUILD_ROOT -type f -name \*.pyo -exec rm -fv '{}' ';' # libarcglobusutils is not part of the ARC api. find $RPM_BUILD_ROOT -name libarcglobusutils.so -exec rm -fv '{}' ';' # Create log directory mkdir -p $RPM_BUILD_ROOT%{_localstatedir}/log/arc # Create spool directories for Jura mkdir -p $RPM_BUILD_ROOT%{_localstatedir}/spool/arc mkdir -p $RPM_BUILD_ROOT%{_localstatedir}/spool/arc/ssm mkdir -p $RPM_BUILD_ROOT%{_localstatedir}/spool/arc/urs %find_lang %{name} # Remove examples and let RPM package them under /usr/share/doc using the doc macro rm -rf $RPM_BUILD_ROOT%{_datadir}/%{pkgdir}/examples make -C src/libs/data-staging/examples DESTDIR=$PWD/docdir/devel pkgdatadir= install-exampleDATA make -C src/hed/libs/compute/examples DESTDIR=$PWD/docdir/devel pkgdatadir= install-exampleDATA make -C src/hed/libs/data/examples DESTDIR=$PWD/docdir/devel pkgdatadir= install-exampleDATA make -C src/hed/acc/PythonBroker DESTDIR=$PWD/docdir/python pkgdatadir= install-exampleDATA make -C python/examples DESTDIR=$PWD/docdir/devel pkgdatadir= install-exampleDATA make -C src/tests/echo DESTDIR=$PWD/docdir/hed pkgdatadir= install-exampleDATA make -C src/hed DESTDIR=$PWD/docdir/hed pkgdatadir= install-profileDATA # client.conf needs special handling make -C src/clients DESTDIR=$RPM_BUILD_ROOT install-exampleDATA # Link to client.conf from doc ln -s %{_datadir}/%{pkgdir}/examples/client.conf $PWD/docdir/client.conf %clean rm -rf $RPM_BUILD_ROOT %post -p /sbin/ldconfig %postun -p /sbin/ldconfig %post plugins-globus-common -p /sbin/ldconfig %postun plugins-globus-common -p /sbin/ldconfig %post hed %enable_service arched %preun hed %stop_on_removal arched %postun hed %condrestart_on_update arched %post arex %enable_service arc-arex %enable_service arc-arex-ws # out-of-package testing host certificate if [ $1 -eq 1 ]; then arcctl test-ca init arcctl test-ca hostcert fi %preun arex %stop_on_removal arc-arex %stop_on_removal arc-arex-ws if [ $1 -eq 0 ]; then arcctl test-ca cleanup fi %postun arex %condrestart_on_update arc-arex %condrestart_on_update arc-arex-ws %post gridftpd %enable_service arc-gridftpd %preun gridftpd %stop_on_removal arc-gridftpd %postun gridftpd %condrestart_on_update arc-gridftpd %post datadelivery-service %enable_service arc-datadelivery-service %preun datadelivery-service %stop_on_removal arc-datadelivery-service %postun datadelivery-service %condrestart_on_update arc-datadelivery-service %if %{with_ldap_service} %post infosys-ldap %enable_service arc-infosys-ldap %if %{?fedora}%{!?fedora:0} >= 5 || %{?rhel}%{!?rhel:0} >= 5 semanage port -a -t ldap_port_t -p tcp 2135 2>/dev/null || : semanage fcontext -a -t slapd_etc_t "/var/run/arc/infosys/bdii-slapd\.conf" 2>/dev/null || : semanage fcontext -a -t slapd_db_t "/var/lib/arc/bdii/db(/.*)?" 2>/dev/null || : semanage fcontext -a -t slapd_var_run_t "/var/run/arc/bdii/db(/.*)?" 2>/dev/null || : %endif %preun infosys-ldap %stop_on_removal arc-infosys-ldap %postun infosys-ldap %condrestart_on_update arc-infosys-ldap %if %{?fedora}%{!?fedora:0} >= 5 || %{?rhel}%{!?rhel:0} >= 5 if [ $1 -eq 0 ]; then semanage port -d -t ldap_port_t -p tcp 2135 2>/dev/null || : semanage fcontext -d -t slapd_etc_t "/var/run/arc/infosys/bdii-slapd\.conf" 2>/dev/null || : semanage fcontext -d -t slapd_db_t "/var/lib/arc/bdii/db(/.*)?" 2>/dev/null || : semanage fcontext -d -t slapd_var_run_t "/var/run/arc/bdii/db(/.*)?" 2>/dev/null || : fi %endif %triggerpostun infosys-ldap -- %{name}-ldap-infosys # Uninstalling the old %{name}-ldap-infosys will remove some selinux config # for %{name}-infosys-ldap - put them back in this triggerpostun script semanage port -a -t ldap_port_t -p tcp 2135 2>/dev/null || : semanage fcontext -a -t slapd_etc_t "/var/run/arc/infosys/bdii-slapd\.conf" 2>/dev/null || : %triggerpostun infosys-ldap -- %{name}-aris # Uninstalling the old %{name}-aris will remove some selinux config # for %{name}-infosys-ldap - put them back in this triggerpostun script semanage fcontext -a -t slapd_db_t "/var/lib/arc/bdii/db(/.*)?" 2>/dev/null || : semanage fcontext -a -t slapd_var_run_t "/var/run/arc/bdii/db(/.*)?" 2>/dev/null || : %triggerun infosys-ldap -- bdii %if %{?suse_version:1}%{!?suse_version:0} FIRST_ARG=1 %restart_on_update arc-infosys-ldap %else service arc-infosys-ldap condrestart > /dev/null 2>&1 || : %endif %endif %if %{with_acix} %post acix-scanner %enable_service arc-acix-scanner %preun acix-scanner %stop_on_removal arc-acix-scanner %postun acix-scanner %condrestart_on_update arc-acix-scanner %post acix-index %enable_service arc-acix-index %preun acix-index %stop_on_removal arc-acix-index %postun acix-index %condrestart_on_update arc-acix-index %endif %files -f %{name}.lang %defattr(-,root,root,-) %doc src/doc/arc.conf.reference src/doc/arc.conf.DELETED src/doc/arc.conf.DELETED-6.8.0 %doc README AUTHORS LICENSE NOTICE %{_libdir}/libarccompute.so.* %{_libdir}/libarccommunication.so.* %{_libdir}/libarccommon.so.* %{_libdir}/libarccredential.so.* %{_libdir}/libarccredentialstore.so.* %{_libdir}/libarccrypto.so.* %{_libdir}/libarcdata.so.* %{_libdir}/libarcdatastaging.so.* %{_libdir}/libarcloader.so.* %{_libdir}/libarcmessage.so.* %{_libdir}/libarcsecurity.so.* %{_libdir}/libarcotokens.so.* %{_libdir}/libarcinfosys.so.* %{_libdir}/libarcwsaddressing.so.* %{_libdir}/libarcwssecurity.so.* %if %{with_xmlsec1} %{_libdir}/libarcxmlsec.so.* %endif %dir %{_libdir}/%{pkgdir} # We need to have libmodcrypto.so close to libarccrypto %{_libdir}/%{pkgdir}/libmodcrypto.so %{_libdir}/%{pkgdir}/libmodcrypto.apd # We need to have libmodcredential.so close to libarccredential %{_libdir}/%{pkgdir}/libmodcredential.so %{_libdir}/%{pkgdir}/libmodcredential.apd %{_libdir}/%{pkgdir}/arc-file-access %{_libdir}/%{pkgdir}/arc-hostname-resolver %{_libdir}/%{pkgdir}/DataStagingDelivery %{_libdir}/%{pkgdir}/arc-dmc %dir %{_libexecdir}/%{pkgdir} %{_libexecdir}/%{pkgdir}/arcconfig-parser %if %{py3default} %dir %{python3_sitearch}/%{pkgdir} %{python3_sitearch}/%{pkgdir}/utils %{python3_sitearch}/%{pkgdir}/__init__.py %{python3_sitearch}/%{pkgdir}/paths.py %{python3_sitearch}/%{pkgdir}/paths_dist.py %dir %{python3_sitearch}/%{pkgdir}/__pycache__ %{python3_sitearch}/%{pkgdir}/__pycache__/__init__.* %{python3_sitearch}/%{pkgdir}/__pycache__/paths.* %{python3_sitearch}/%{pkgdir}/__pycache__/paths_dist.* %else %dir %{python2_sitearch}/%{pkgdir} %{python2_sitearch}/%{pkgdir}/utils %{python2_sitearch}/%{pkgdir}/__init__.py* %{python2_sitearch}/%{pkgdir}/paths.py* %{python2_sitearch}/%{pkgdir}/paths_dist.py* %endif %dir %{_datadir}/%{pkgdir} %{_datadir}/%{pkgdir}/arc.parser.defaults %dir %{_datadir}/%{pkgdir}/test-jobs %{_datadir}/%{pkgdir}/test-jobs/test-job-* %{_datadir}/%{pkgdir}/schema %files client %defattr(-,root,root,-) %doc docdir/client.conf %{_bindir}/arccat %{_bindir}/arcclean %{_bindir}/arccp %{_bindir}/arcget %{_bindir}/arcinfo %{_bindir}/arckill %{_bindir}/arcls %{_bindir}/arcmkdir %{_bindir}/arcrename %{_bindir}/arcproxy %{_bindir}/arcrenew %{_bindir}/arcresub %{_bindir}/arcresume %{_bindir}/arcrm %{_bindir}/arcstat %{_bindir}/arcsub %{_bindir}/arcsync %{_bindir}/arctest %dir %{_datadir}/%{pkgdir}/examples %{_datadir}/%{pkgdir}/examples/client.conf %dir %{_sysconfdir}/%{pkgdir} %config(noreplace) %{_sysconfdir}/%{pkgdir}/client.conf %doc %{_mandir}/man1/arccat.1* %doc %{_mandir}/man1/arcclean.1* %doc %{_mandir}/man1/arccp.1* %doc %{_mandir}/man1/arcget.1* %doc %{_mandir}/man1/arcinfo.1* %doc %{_mandir}/man1/arckill.1* %doc %{_mandir}/man1/arcls.1* %doc %{_mandir}/man1/arcmkdir.1* %doc %{_mandir}/man1/arcrename.1* %doc %{_mandir}/man1/arcproxy.1* %doc %{_mandir}/man1/arcrenew.1* %doc %{_mandir}/man1/arcresub.1* %doc %{_mandir}/man1/arcresume.1* %doc %{_mandir}/man1/arcrm.1* %doc %{_mandir}/man1/arcstat.1* %doc %{_mandir}/man1/arcsub.1* %doc %{_mandir}/man1/arcsync.1* %doc %{_mandir}/man1/arctest.1* %dir %{_bashcompdir} %{_bashcompdir}/arc-client-tools %files hed %defattr(-,root,root,-) %doc docdir/hed/* %if %{use_systemd} %{_unitdir}/arched.service %else %{_initrddir}/arched %endif %{_sbindir}/arched %{_libdir}/%{pkgdir}/libecho.so %{_libdir}/%{pkgdir}/libecho.apd %{_datadir}/%{pkgdir}/arched-start %{_datadir}/%{pkgdir}/profiles %doc %{_mandir}/man8/arched.8* %doc %{_mandir}/man5/arc.conf.5* %files gridftpd %defattr(-,root,root,-) %if %{use_systemd} %{_unitdir}/arc-gridftpd.service %else %{_initrddir}/arc-gridftpd %endif %{_sbindir}/gridftpd %{_libdir}/%{pkgdir}/jobsplugin.* %{_libdir}/%{pkgdir}/filedirplugin.* %{_datadir}/%{pkgdir}/arc-gridftpd-start %config(noreplace) %{_sysconfdir}/logrotate.d/%{name}-gridftpd %doc %{_mandir}/man8/gridftpd.8* %files datadelivery-service %defattr(-,root,root,-) %if %{use_systemd} %{_unitdir}/arc-datadelivery-service.service %else %{_initrddir}/arc-datadelivery-service %endif %{_libdir}/%{pkgdir}/libdatadeliveryservice.so %{_libdir}/%{pkgdir}/libdatadeliveryservice.apd %{_datadir}/%{pkgdir}/arc-datadelivery-service-start %config(noreplace) %{_sysconfdir}/logrotate.d/%{name}-datadelivery-service %if %{with_ldap_service} %files infosys-ldap %defattr(-,root,root,-) %if %{use_systemd} %{_unitdir}/arc-infosys-ldap.service %{_unitdir}/arc-infosys-ldap-slapd.service %else %{_initrddir}/arc-infosys-ldap %endif %{_datadir}/%{pkgdir}/create-bdii-config %{_datadir}/%{pkgdir}/create-slapd-config %{_datadir}/%{pkgdir}/glue-generator.pl %{_datadir}/%{pkgdir}/glite-info-provider-ldap %{_datadir}/%{pkgdir}/ldap-schema %config(noreplace) %{_sysconfdir}/logrotate.d/%{name}-infosys-ldap %endif %files monitor %defattr(-,root,root,-) %{_datadir}/%{pkgdir}/monitor %doc %{_mandir}/man7/monitor.7* %files arcctl %{_sbindir}/arcctl %if %{py3default} %dir %{python3_sitearch}/%{pkgdir}/control %{python3_sitearch}/%{pkgdir}/control/__init__.py %{python3_sitearch}/%{pkgdir}/control/CertificateGenerator.py %{python3_sitearch}/%{pkgdir}/control/ControlCommon.py %{python3_sitearch}/%{pkgdir}/control/OSPackage.py %{python3_sitearch}/%{pkgdir}/control/TestCA.py %{python3_sitearch}/%{pkgdir}/control/ThirdPartyDeployment.py %dir %{python3_sitearch}/%{pkgdir}/control/__pycache__ %{python3_sitearch}/%{pkgdir}/control/__pycache__/__init__.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/CertificateGenerator.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/ControlCommon.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/OSPackage.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/TestCA.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/ThirdPartyDeployment.* %else %dir %{python2_sitearch}/%{pkgdir}/control %{python2_sitearch}/%{pkgdir}/control/__init__.py* %{python2_sitearch}/%{pkgdir}/control/CertificateGenerator.py* %{python2_sitearch}/%{pkgdir}/control/ControlCommon.py* %{python2_sitearch}/%{pkgdir}/control/OSPackage.py* %{python2_sitearch}/%{pkgdir}/control/TestCA.py* %{python2_sitearch}/%{pkgdir}/control/ThirdPartyDeployment.py* %endif %doc %{_mandir}/man1/arcctl.1* %files arcctl-service %if %{py3default} %{python3_sitearch}/%{pkgdir}/control/Config.py %{python3_sitearch}/%{pkgdir}/control/ServiceCommon.py %{python3_sitearch}/%{pkgdir}/control/Services.py %{python3_sitearch}/%{pkgdir}/control/OSService.py %{python3_sitearch}/%{pkgdir}/control/Validator.py %{python3_sitearch}/%{pkgdir}/control/__pycache__/Config.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/ServiceCommon.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/Services.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/OSService.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/Validator.* %else %{python2_sitearch}/%{pkgdir}/control/Config.py* %{python2_sitearch}/%{pkgdir}/control/ServiceCommon.py* %{python2_sitearch}/%{pkgdir}/control/Services.py* %{python2_sitearch}/%{pkgdir}/control/OSService.py* %{python2_sitearch}/%{pkgdir}/control/Validator.py* %endif %files arex %defattr(-,root,root,-) %if %{use_systemd} %{_unitdir}/arc-arex.service %{_unitdir}/arc-arex-ws.service %else %{_initrddir}/arc-arex %{_initrddir}/arc-arex-ws %endif %{_libexecdir}/%{pkgdir}/cache-clean %{_libexecdir}/%{pkgdir}/cache-list %{_libexecdir}/%{pkgdir}/jura-ng %{_libexecdir}/%{pkgdir}/gm-delegations-converter %{_libexecdir}/%{pkgdir}/gm-jobs %{_libexecdir}/%{pkgdir}/gm-kick %{_libexecdir}/%{pkgdir}/smtp-send %{_libexecdir}/%{pkgdir}/smtp-send.sh %{_libexecdir}/%{pkgdir}/inputcheck %{_libexecdir}/%{pkgdir}/arc-config-check %{_libexecdir}/%{pkgdir}/arc-blahp-logger %{_datadir}/%{pkgdir}/cancel-*-job %{_datadir}/%{pkgdir}/scan-*-job %{_datadir}/%{pkgdir}/submit-*-job %{_libdir}/%{pkgdir}/libarex.so %{_libdir}/%{pkgdir}/libarex.apd %{_libdir}/%{pkgdir}/libcandypond.so %{_libdir}/%{pkgdir}/libcandypond.apd %{_datadir}/%{pkgdir}/CEinfo.pl %{_datadir}/%{pkgdir}/ARC0mod.pm %{_datadir}/%{pkgdir}/FORKmod.pm %{_datadir}/%{pkgdir}/Fork.pm %{_datadir}/%{pkgdir}/SGEmod.pm %{_datadir}/%{pkgdir}/SGE.pm %{_datadir}/%{pkgdir}/LL.pm %{_datadir}/%{pkgdir}/LSF.pm %{_datadir}/%{pkgdir}/PBS.pm %{_datadir}/%{pkgdir}/PBSPRO.pm %{_datadir}/%{pkgdir}/Condor.pm %{_datadir}/%{pkgdir}/SLURMmod.pm %{_datadir}/%{pkgdir}/SLURM.pm %{_datadir}/%{pkgdir}/Boinc.pm %{_datadir}/%{pkgdir}/XmlPrinter.pm %{_datadir}/%{pkgdir}/InfosysHelper.pm %{_datadir}/%{pkgdir}/LdifPrinter.pm %{_datadir}/%{pkgdir}/GLUE2xmlPrinter.pm %{_datadir}/%{pkgdir}/GLUE2ldifPrinter.pm %{_datadir}/%{pkgdir}/NGldifPrinter.pm %{_datadir}/%{pkgdir}/ARC0ClusterInfo.pm %{_datadir}/%{pkgdir}/ARC1ClusterInfo.pm %{_datadir}/%{pkgdir}/ConfigCentral.pm %{_datadir}/%{pkgdir}/GMJobsInfo.pm %{_datadir}/%{pkgdir}/HostInfo.pm %{_datadir}/%{pkgdir}/RTEInfo.pm %{_datadir}/%{pkgdir}/InfoChecker.pm %{_datadir}/%{pkgdir}/IniParser.pm %{_datadir}/%{pkgdir}/LRMSInfo.pm %{_datadir}/%{pkgdir}/Sysinfo.pm %{_datadir}/%{pkgdir}/LogUtils.pm %{_datadir}/%{pkgdir}/condor_env.pm %{_datadir}/%{pkgdir}/cancel_common.sh %{_datadir}/%{pkgdir}/configure-*-env.sh %{_datadir}/%{pkgdir}/submit_common.sh %{_datadir}/%{pkgdir}/scan_common.sh %{_datadir}/%{pkgdir}/lrms_common.sh %{_datadir}/%{pkgdir}/perferator %{_datadir}/%{pkgdir}/PerfData.pl %{_datadir}/%{pkgdir}/arc-arex-start %{_datadir}/%{pkgdir}/arc-arex-ws-start %{_datadir}/%{pkgdir}/sql-schema/arex_accounting_db_schema_v1.sql %doc %{_mandir}/man1/arc-config-check.1* %doc %{_mandir}/man1/cache-clean.1* %doc %{_mandir}/man1/cache-list.1* %doc %{_mandir}/man8/gm-delegations-converter.8* %doc %{_mandir}/man8/gm-jobs.8* %doc %{_mandir}/man8/arc-blahp-logger.8* %doc %{_mandir}/man8/a-rex-backtrace-collect.8* %config(noreplace) %{_sysconfdir}/logrotate.d/%{name}-arex %dir %{_localstatedir}/log/arc %dir %{_localstatedir}/spool/arc %dir %{_localstatedir}/spool/arc/ssm %dir %{_localstatedir}/spool/arc/urs %if %{py3default} %{python3_sitearch}/%{pkgdir}/control/AccountingDB.py %{python3_sitearch}/%{pkgdir}/control/AccountingPublishing.py %{python3_sitearch}/%{pkgdir}/control/Accounting.py %{python3_sitearch}/%{pkgdir}/control/Cache.py %{python3_sitearch}/%{pkgdir}/control/DataStaging.py %{python3_sitearch}/%{pkgdir}/control/Jobs.py %{python3_sitearch}/%{pkgdir}/control/RunTimeEnvironment.py %{python3_sitearch}/%{pkgdir}/control/__pycache__/AccountingDB.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/AccountingPublishing.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/Accounting.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/Cache.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/DataStaging.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/Jobs.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/RunTimeEnvironment.* %else %{python2_sitearch}/%{pkgdir}/control/AccountingDB.py* %{python2_sitearch}/%{pkgdir}/control/AccountingPublishing.py* %{python2_sitearch}/%{pkgdir}/control/Accounting.py* %{python2_sitearch}/%{pkgdir}/control/Cache.py* %{python2_sitearch}/%{pkgdir}/control/DataStaging.py* %{python2_sitearch}/%{pkgdir}/control/Jobs.py* %{python2_sitearch}/%{pkgdir}/control/RunTimeEnvironment.py* %endif %{_libexecdir}/%{pkgdir}/arccandypond %dir %{_datadir}/%{pkgdir}/rte %dir %{_datadir}/%{pkgdir}/rte/ENV %{_datadir}/%{pkgdir}/rte/ENV/LRMS-SCRATCH %{_datadir}/%{pkgdir}/rte/ENV/PROXY %{_datadir}/%{pkgdir}/rte/ENV/RTE %{_datadir}/%{pkgdir}/rte/ENV/CANDYPOND %{_datadir}/%{pkgdir}/rte/ENV/SINGULARITY %dir %{_datadir}/%{pkgdir}/rte/ENV/CONDOR %{_datadir}/%{pkgdir}/rte/ENV/CONDOR/DOCKER %{_sbindir}/a-rex-backtrace-collect %config(noreplace) %{_sysconfdir}/arc.conf %if %{with_pythonlrms} %files arex-python-lrms %defattr(-,root,root,-) %{_libexecdir}/%{pkgdir}/arc-sshfs-mount %if %{py3default} %{python3_sitearch}/%{pkgdir}/lrms %else %{python2_sitearch}/%{pkgdir}/lrms %endif %{_datadir}/%{pkgdir}/SLURMPYmod.pm %{_datadir}/%{pkgdir}/job_script.stubs %endif %files community-rtes %defattr(-,root,root,-) %{_datadir}/%{pkgdir}/community_rtes.sh %if %{py3default} %{python3_sitearch}/%{pkgdir}/control/CommunityRTE.py %{python3_sitearch}/%{pkgdir}/control/__pycache__/CommunityRTE.* %else %{python2_sitearch}/%{pkgdir}/control/CommunityRTE.py* %endif %files plugins-needed %defattr(-,root,root,-) %dir %{_libdir}/%{pkgdir}/test %{_libdir}/%{pkgdir}/test/libaccTEST.so %{_libdir}/%{pkgdir}/libaccBroker.so %{_libdir}/%{pkgdir}/libaccEMIES.so %{_libdir}/%{pkgdir}/libaccJobDescriptionParser.so %if %{with_ldns} %{_libdir}/%{pkgdir}/libaccARCHERY.so %endif %{_libdir}/%{pkgdir}/libaccLDAP.so %{_libdir}/%{pkgdir}/libarcshc.so %{_libdir}/%{pkgdir}/libarcshclegacy.so %{_libdir}/%{pkgdir}/libarcshcotokens.so %{_libdir}/%{pkgdir}/libdmcfile.so %{_libdir}/%{pkgdir}/libdmchttp.so %{_libdir}/%{pkgdir}/libdmcldap.so %{_libdir}/%{pkgdir}/libdmcsrm.so %{_libdir}/%{pkgdir}/libdmcrucio.so %{_libdir}/%{pkgdir}/libdmcacix.so %{_libdir}/%{pkgdir}/libidentitymap.so %{_libdir}/%{pkgdir}/libarguspdpclient.so %{_libdir}/%{pkgdir}/libmcchttp.so %{_libdir}/%{pkgdir}/libmccmsgvalidator.so %{_libdir}/%{pkgdir}/libmccsoap.so %{_libdir}/%{pkgdir}/libmcctcp.so %{_libdir}/%{pkgdir}/libmcctls.so %{_libdir}/%{pkgdir}/test/libaccTEST.apd %{_libdir}/%{pkgdir}/libaccBroker.apd %{_libdir}/%{pkgdir}/libaccEMIES.apd %{_libdir}/%{pkgdir}/libaccJobDescriptionParser.apd %if %{with_ldns} %{_libdir}/%{pkgdir}/libaccARCHERY.apd %endif %{_libdir}/%{pkgdir}/libaccLDAP.apd %{_libdir}/%{pkgdir}/libarcshc.apd %{_libdir}/%{pkgdir}/libarcshclegacy.apd %{_libdir}/%{pkgdir}/libarcshcotokens.apd %{_libdir}/%{pkgdir}/libdmcfile.apd %{_libdir}/%{pkgdir}/libdmchttp.apd %{_libdir}/%{pkgdir}/libdmcldap.apd %{_libdir}/%{pkgdir}/libdmcsrm.apd %{_libdir}/%{pkgdir}/libdmcrucio.apd %{_libdir}/%{pkgdir}/libdmcacix.apd %{_libdir}/%{pkgdir}/libidentitymap.apd %{_libdir}/%{pkgdir}/libarguspdpclient.apd %{_libdir}/%{pkgdir}/libmcchttp.apd %{_libdir}/%{pkgdir}/libmccmsgvalidator.apd %{_libdir}/%{pkgdir}/libmccsoap.apd %{_libdir}/%{pkgdir}/libmcctcp.apd %{_libdir}/%{pkgdir}/libmcctls.apd %files plugins-globus %defattr(-,root,root,-) %files plugins-globus-common %defattr(-,root,root,-) %{_libdir}/libarcglobusutils.so.* %files plugins-gridftp %defattr(-,root,root,-) %{_libdir}/%{pkgdir}/arc-dmcgridftp %{_libdir}/%{pkgdir}/libdmcgridftpdeleg.so %{_libdir}/%{pkgdir}/libdmcgridftpdeleg.apd %files plugins-lcas-lcmaps %defattr(-,root,root,-) %{_libexecdir}/%{pkgdir}/arc-lcas %{_libexecdir}/%{pkgdir}/arc-lcmaps %files plugins-gridftpjob %defattr(-,root,root,-) %{_libdir}/%{pkgdir}/libaccGRIDFTPJOB.so %{_libdir}/%{pkgdir}/libaccGRIDFTPJOB.apd %if %{with_xrootd} %files plugins-xrootd %defattr(-,root,root,-) %dir %{_libdir}/%{pkgdir}/external %{_libdir}/%{pkgdir}/external/libdmcxrootd.so %{_libdir}/%{pkgdir}/external/libdmcxrootd.apd %{_libdir}/%{pkgdir}/libdmcxrootddeleg.so %{_libdir}/%{pkgdir}/libdmcxrootddeleg.apd %endif %if %{with_gfal} %files plugins-gfal %defattr(-,root,root,-) %dir %{_libdir}/%{pkgdir}/external %{_libdir}/%{pkgdir}/external/libdmcgfal.so %{_libdir}/%{pkgdir}/external/libdmcgfal.apd %{_libdir}/%{pkgdir}/libdmcgfaldeleg.so %{_libdir}/%{pkgdir}/libdmcgfaldeleg.apd %endif %if %{with_s3} %files plugins-s3 %defattr(-,root,root,-) %{_libdir}/%{pkgdir}/libdmcs3.so %{_libdir}/%{pkgdir}/libdmcs3.apd %endif %files plugins-internal %defattr(-,root,root,-) %{_libdir}/%{pkgdir}/libaccINTERNAL.so %{_libdir}/%{pkgdir}/libaccINTERNAL.apd %files plugins-arcrest %defattr(-,root,root,-) %{_libdir}/%{pkgdir}/libaccARCREST.so %{_libdir}/%{pkgdir}/libaccARCREST.apd %files plugins-python %defattr(-,root,root,-) %doc docdir/python/* %{_libdir}/%{pkgdir}/libaccPythonBroker.so %{_libdir}/%{pkgdir}/libaccPythonBroker.apd %{_libdir}/%{pkgdir}/libpythonservice.so %{_libdir}/%{pkgdir}/libpythonservice.apd %if %{with_acix} %files acix-core %defattr(-,root,root,-) %if %{py3default} %dir %{python3_sitelib}/acix %{python3_sitelib}/acix/__init__.py %dir %{python3_sitelib}/acix/__pycache__ %{python3_sitelib}/acix/__pycache__/__init__.* %{python3_sitelib}/acix/core %else %dir %{python2_sitelib}/acix %{python2_sitelib}/acix/__init__.py* %{python2_sitelib}/acix/core %endif %files acix-scanner %defattr(-,root,root,-) %if %{py3default} %{python3_sitelib}/acix/scanner %else %{python2_sitelib}/acix/scanner %endif %if %{use_systemd} %{_unitdir}/arc-acix-scanner.service %else %{_initrddir}/arc-acix-scanner %endif %{_datadir}/%{pkgdir}/arc-acix-scanner-start %files acix-index %defattr(-,root,root,-) %if %{py3default} %{python3_sitelib}/acix/indexserver %else %{python2_sitelib}/acix/indexserver %endif %if %{use_systemd} %{_unitdir}/arc-acix-index.service %else %{_initrddir}/arc-acix-index %endif %{_datadir}/%{pkgdir}/arc-acix-index-start %endif %files devel %defattr(-,root,root,-) %doc docdir/devel/* src/hed/shc/arcpdp/*.xsd %{_includedir}/%{pkgdir} %{_libdir}/lib*.so %{_bindir}/wsdl2hed %doc %{_mandir}/man1/wsdl2hed.1* %{_bindir}/arcplugin %doc %{_mandir}/man1/arcplugin.1* %if %{with_python2} %files -n python2-%{name} %defattr(-,root,root,-) %{python2_sitearch}/_arc.*so %if %{py3default} %dir %{python2_sitearch}/%{pkgdir} %{python2_sitearch}/%{pkgdir}/__init__.py* %endif %{python2_sitearch}/%{pkgdir}/[^_p]*.py* %endif %if %{with_python3} %files -n python%{python3_pkgversion}-%{name} %defattr(-,root,root,-) %{python3_sitearch}/_arc.*so %if %{?fedora}%{!?fedora:0} >= 15 || %{?rhel}%{!?rhel:0} >= 7 # Python >= 3.2 uses __pycache__ %if ! %{py3default} %dir %{python3_sitearch}/%{pkgdir} %{python3_sitearch}/%{pkgdir}/__init__.py %dir %{python3_sitearch}/%{pkgdir}/__pycache__ %{python3_sitearch}/%{pkgdir}/__pycache__/__init__.* %endif %{python3_sitearch}/%{pkgdir}/[^_p]*.py %{python3_sitearch}/%{pkgdir}/__pycache__/[^_p]*.* %else # Python 3.1 doesn't use __pycache__ %dir %{python3_sitearch}/%{pkgdir} %{python3_sitearch}/%{pkgdir}/__init__.py* %{python3_sitearch}/%{pkgdir}/[^_p]*.py* %endif %endif %files nordugridmap %defattr(-,root,root,-) %{_sbindir}/nordugridmap %config(noreplace) %{_sysconfdir}/cron.d/nordugridmap %doc %{_mandir}/man8/nordugridmap.8* %files test-utils %defattr(-,root,root,-) %{_bindir}/arcemiestest %{_bindir}/arcperftest %doc %{_mandir}/man1/arcemiestest.1* %doc %{_mandir}/man1/arcperftest.1* %files archery-manage %defattr(-,root,root,-) %{_sbindir}/archery-manage %files wn %defattr(-,root,root,-) %attr(4755,root,root) %{_bindir}/arc-job-cgroup nordugrid-arc-6.14.0/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153420017377 xustar000000000000000030 mtime=1638455056.952924655 30 atime=1638455087.976390798 30 ctime=1638455095.697506811 nordugrid-arc-6.14.0/Makefile.in0000644000175000002070000010067014152153420017370 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ # /opt/local is the location for macports on MacOS X VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = . DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(top_srcdir)/configure $(am__configure_deps) \ $(srcdir)/config.h.in \ $(top_srcdir)/include/arc/ArcVersion.h.in \ $(top_srcdir)/src/hed/profiles/general/general.xml.in \ $(top_srcdir)/src/services/a-rex/rte/ENV/PROXY.in \ $(top_srcdir)/src/services/a-rex/rte/ENV/CANDYPOND.in \ $(srcdir)/nordugrid-arc.spec.in ABOUT-NLS AUTHORS README \ config.guess config.rpath config.sub install-sh missing \ ltmain.sh ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ configure.lineno config.status.lineno mkinstalldirs = $(install_sh) -d CONFIG_HEADER = config.h CONFIG_CLEAN_FILES = include/arc/ArcVersion.h \ src/hed/profiles/general/general.xml \ src/services/a-rex/rte/ENV/PROXY \ src/services/a-rex/rte/ENV/CANDYPOND nordugrid-arc.spec CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ cscope distdir dist dist-all distcheck am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) \ $(LISP)config.h.in # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags CSCOPE = cscope DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) distdir = $(PACKAGE)-$(VERSION) top_distdir = $(distdir) am__remove_distdir = \ if test -d "$(distdir)"; then \ find "$(distdir)" -type d ! -perm -200 -exec chmod u+w {} ';' \ && rm -rf "$(distdir)" \ || { sleep 5 && rm -rf "$(distdir)"; }; \ else :; fi am__post_remove_distdir = $(am__remove_distdir) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" DIST_ARCHIVES = $(distdir).tar.gz GZIP_ENV = --best DIST_TARGETS = dist-gzip distuninstallcheck_listfiles = find . -type f -print am__distuninstallcheck_listfiles = $(distuninstallcheck_listfiles) \ | sed 's|^\./|$(prefix)/|' | grep -v '$(infodir)/dir$$' distcleancheck_listfiles = find . -type f -print pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ ACLOCAL_AMFLAGS = -I m4 `test -d /opt/local/share/aclocal && echo -I /opt/local/share/aclocal` @SWIG_ENABLED_TRUE@SWIG_SD = swig SUBDIRS = src include $(SWIG_SD) python $(POSUB) debian DIST_SUBDIRS = src include swig python po debian EXTRA_DIST = nordugrid-arc.spec autogen.sh LICENSE NOTICE all: config.h $(MAKE) $(AM_MAKEFLAGS) all-recursive .SUFFIXES: am--refresh: Makefile @: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ echo ' cd $(srcdir) && $(AUTOMAKE) --foreign'; \ $(am__cd) $(srcdir) && $(AUTOMAKE) --foreign \ && exit 0; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ echo ' $(SHELL) ./config.status'; \ $(SHELL) ./config.status;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) $(SHELL) ./config.status --recheck $(top_srcdir)/configure: $(am__configure_deps) $(am__cd) $(srcdir) && $(AUTOCONF) $(ACLOCAL_M4): $(am__aclocal_m4_deps) $(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS) $(am__aclocal_m4_deps): config.h: stamp-h1 @if test ! -f $@; then rm -f stamp-h1; else :; fi @if test ! -f $@; then $(MAKE) $(AM_MAKEFLAGS) stamp-h1; else :; fi stamp-h1: $(srcdir)/config.h.in $(top_builddir)/config.status @rm -f stamp-h1 cd $(top_builddir) && $(SHELL) ./config.status config.h $(srcdir)/config.h.in: $(am__configure_deps) ($(am__cd) $(top_srcdir) && $(AUTOHEADER)) rm -f stamp-h1 touch $@ distclean-hdr: -rm -f config.h stamp-h1 include/arc/ArcVersion.h: $(top_builddir)/config.status $(top_srcdir)/include/arc/ArcVersion.h.in cd $(top_builddir) && $(SHELL) ./config.status $@ src/hed/profiles/general/general.xml: $(top_builddir)/config.status $(top_srcdir)/src/hed/profiles/general/general.xml.in cd $(top_builddir) && $(SHELL) ./config.status $@ src/services/a-rex/rte/ENV/PROXY: $(top_builddir)/config.status $(top_srcdir)/src/services/a-rex/rte/ENV/PROXY.in cd $(top_builddir) && $(SHELL) ./config.status $@ src/services/a-rex/rte/ENV/CANDYPOND: $(top_builddir)/config.status $(top_srcdir)/src/services/a-rex/rte/ENV/CANDYPOND.in cd $(top_builddir) && $(SHELL) ./config.status $@ nordugrid-arc.spec: $(top_builddir)/config.status $(srcdir)/nordugrid-arc.spec.in cd $(top_builddir) && $(SHELL) ./config.status $@ mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs distclean-libtool: -rm -f libtool config.lt # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscope: cscope.files test ! -s cscope.files \ || $(CSCOPE) -b -q $(AM_CSCOPEFLAGS) $(CSCOPEFLAGS) -i cscope.files $(CSCOPE_ARGS) clean-cscope: -rm -f cscope.files cscope.files: clean-cscope cscopelist cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags -rm -f cscope.out cscope.in.out cscope.po.out cscope.files distdir: $(DISTFILES) $(am__remove_distdir) test -d "$(distdir)" || mkdir "$(distdir)" @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done -test -n "$(am__skip_mode_fix)" \ || find "$(distdir)" -type d ! -perm -755 \ -exec chmod u+rwx,go+rx {} \; -o \ ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \ ! -type d ! -perm -400 -exec chmod a+r {} \; -o \ ! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \ || chmod -R a+r "$(distdir)" dist-gzip: distdir tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz $(am__post_remove_distdir) dist-bzip2: distdir tardir=$(distdir) && $(am__tar) | BZIP2=$${BZIP2--9} bzip2 -c >$(distdir).tar.bz2 $(am__post_remove_distdir) dist-lzip: distdir tardir=$(distdir) && $(am__tar) | lzip -c $${LZIP_OPT--9} >$(distdir).tar.lz $(am__post_remove_distdir) dist-xz: distdir tardir=$(distdir) && $(am__tar) | XZ_OPT=$${XZ_OPT--e} xz -c >$(distdir).tar.xz $(am__post_remove_distdir) dist-tarZ: distdir tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z $(am__post_remove_distdir) dist-shar: distdir shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz $(am__post_remove_distdir) dist-zip: distdir -rm -f $(distdir).zip zip -rq $(distdir).zip $(distdir) $(am__post_remove_distdir) dist dist-all: $(MAKE) $(AM_MAKEFLAGS) $(DIST_TARGETS) am__post_remove_distdir='@:' $(am__post_remove_distdir) # This target untars the dist file and tries a VPATH configuration. Then # it guarantees that the distribution is self-contained by making another # tarfile. distcheck: dist case '$(DIST_ARCHIVES)' in \ *.tar.gz*) \ GZIP=$(GZIP_ENV) gzip -dc $(distdir).tar.gz | $(am__untar) ;;\ *.tar.bz2*) \ bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\ *.tar.lz*) \ lzip -dc $(distdir).tar.lz | $(am__untar) ;;\ *.tar.xz*) \ xz -dc $(distdir).tar.xz | $(am__untar) ;;\ *.tar.Z*) \ uncompress -c $(distdir).tar.Z | $(am__untar) ;;\ *.shar.gz*) \ GZIP=$(GZIP_ENV) gzip -dc $(distdir).shar.gz | unshar ;;\ *.zip*) \ unzip $(distdir).zip ;;\ esac chmod -R a-w $(distdir) chmod u+w $(distdir) mkdir $(distdir)/_build $(distdir)/_inst chmod a-w $(distdir) test -d $(distdir)/_build || exit 0; \ dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \ && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ && am__cwd=`pwd` \ && $(am__cd) $(distdir)/_build \ && ../configure --srcdir=.. --prefix="$$dc_install_base" \ $(AM_DISTCHECK_CONFIGURE_FLAGS) \ $(DISTCHECK_CONFIGURE_FLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) dvi \ && $(MAKE) $(AM_MAKEFLAGS) check \ && $(MAKE) $(AM_MAKEFLAGS) install \ && $(MAKE) $(AM_MAKEFLAGS) installcheck \ && $(MAKE) $(AM_MAKEFLAGS) uninstall \ && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \ distuninstallcheck \ && chmod -R a-w "$$dc_install_base" \ && ({ \ (cd ../.. && umask 077 && mkdir "$$dc_destdir") \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \ distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \ } || { rm -rf "$$dc_destdir"; exit 1; }) \ && rm -rf "$$dc_destdir" \ && $(MAKE) $(AM_MAKEFLAGS) dist \ && rm -rf $(DIST_ARCHIVES) \ && $(MAKE) $(AM_MAKEFLAGS) distcleancheck \ && cd "$$am__cwd" \ || exit 1 $(am__post_remove_distdir) @(echo "$(distdir) archives ready for distribution: "; \ list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \ sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x' distuninstallcheck: @test -n '$(distuninstallcheck_dir)' || { \ echo 'ERROR: trying to run $@ with an empty' \ '$$(distuninstallcheck_dir)' >&2; \ exit 1; \ }; \ $(am__cd) '$(distuninstallcheck_dir)' || { \ echo 'ERROR: cannot chdir into $(distuninstallcheck_dir)' >&2; \ exit 1; \ }; \ test `$(am__distuninstallcheck_listfiles) | wc -l` -eq 0 \ || { echo "ERROR: files left after uninstall:" ; \ if test -n "$(DESTDIR)"; then \ echo " (check DESTDIR support)"; \ fi ; \ $(distuninstallcheck_listfiles) ; \ exit 1; } >&2 distcleancheck: distclean @if test '$(srcdir)' = . ; then \ echo "ERROR: distcleancheck can only run from a VPATH build" ; \ exit 1 ; \ fi @test `$(distcleancheck_listfiles) | wc -l` -eq 0 \ || { echo "ERROR: files left in build directory after distclean:" ; \ $(distcleancheck_listfiles) ; \ exit 1; } >&2 check-am: all-am check: check-recursive all-am: Makefile config.h installdirs: installdirs-recursive installdirs-am: install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -f Makefile distclean-am: clean-am distclean-generic distclean-hdr \ distclean-libtool distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -rf $(top_srcdir)/autom4te.cache -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: .MAKE: $(am__recursive_targets) all install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am \ am--refresh check check-am clean clean-cscope clean-generic \ clean-libtool cscope cscopelist-am ctags ctags-am dist \ dist-all dist-bzip2 dist-gzip dist-lzip dist-shar dist-tarZ \ dist-xz dist-zip distcheck distclean distclean-generic \ distclean-hdr distclean-libtool distclean-tags distcleancheck \ distdir distuninstallcheck dvi dvi-am html html-am info \ info-am install install-am install-data install-data-am \ install-dvi install-dvi-am install-exec install-exec-am \ install-html install-html-am install-info install-info-am \ install-man install-pdf install-pdf-am install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs installdirs-am maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \ uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/PaxHeaders.30264/config.h.in0000644000000000000000000000013114152153417017362 xustar000000000000000030 mtime=1638455055.592904221 29 atime=1638455092.14145338 30 ctime=1638455095.722507187 nordugrid-arc-6.14.0/config.h.in0000644000175000002070000004137514152153417017362 0ustar00mockbuildmock00000000000000/* config.h.in. Generated from configure.ac by autoheader. */ /* Define to 1 if the `closedir' function returns void instead of `int'. */ #undef CLOSEDIR_VOID /* define if to build job information in BDB storage */ #undef DBJSTORE_ENABLED /* Define to 1 if translation of program messages to the user's native language is requested. */ #undef ENABLE_NLS /* Globus GSSAPI GSI is for OpenSSL post-1.1 */ #undef GLOBUS_GSSAPI_GSI_OLD_OPENSSL /* Globus GSSAPI GSI version */ #undef GLOBUS_GSSAPI_GSI_VERSION /* Globus IO version */ #undef GLOBUS_IO_VERSION /* Define to 1 if you have the `acl' function. */ #undef HAVE_ACL /* Define to 1 if you have the `alarm' function. */ #undef HAVE_ALARM /* Define to 1 if you have the header file. */ #undef HAVE_ARPA_INET_H /* Define to 1 if you have the MacOS X function CFLocaleCopyCurrent in the CoreFoundation framework. */ #undef HAVE_CFLOCALECOPYCURRENT /* Define to 1 if you have the MacOS X function CFPreferencesCopyAppValue in the CoreFoundation framework. */ #undef HAVE_CFPREFERENCESCOPYAPPVALUE /* Define to 1 if your system has a working `chown' function. */ #undef HAVE_CHOWN /* define if Berkeley DB C++ binding is available */ #undef HAVE_DBCXX /* define if the Berkeley DB has DbDeadLockException */ #undef HAVE_DBDEADLOCKEXCEPTION /* Define to 1 if you have the header file. */ #undef HAVE_DB_CXX_H /* Define if the GNU dcgettext() function is already present or preinstalled. */ #undef HAVE_DCGETTEXT /* Define to 1 if you have the declaration of `strerror_r', and to 0 if you don't. */ #undef HAVE_DECL_STRERROR_R /* Define to 1 if you have the header file, and it defines `DIR'. */ #undef HAVE_DIRENT_H /* Define to 1 if you have the header file. */ #undef HAVE_DLFCN_H /* define if DTLSv1_2_method is available */ #undef HAVE_DTLSV1_2_METHOD /* define if DTLSv1_method is available */ #undef HAVE_DTLSV1_METHOD /* define if DTLS_method is available */ #undef HAVE_DTLS_METHOD /* Define to 1 if you have the `dup2' function. */ #undef HAVE_DUP2 /* Define to 1 if you have the header file. */ #undef HAVE_DUSTAT_H /* Define to 1 if you have the header file. */ #undef HAVE_FCNTL_H /* Define to 1 if you have the header file. */ #undef HAVE_FLOAT_H /* Define to 1 if you have the `floor' function. */ #undef HAVE_FLOOR /* Define to 1 if you have the `fork' function. */ #undef HAVE_FORK /* Define to 1 if you have the `fstatfs' function. */ #undef HAVE_FSTATFS /* Define to 1 if you have the `ftruncate' function. */ #undef HAVE_FTRUNCATE /* Define to 1 if you have the `getdomainname' function. */ #undef HAVE_GETDOMAINNAME /* Define to 1 if you have the `getgrouplist' function. */ #undef HAVE_GETGROUPLIST /* Define to 1 if you have the `gethostname' function. */ #undef HAVE_GETHOSTNAME /* Define to 1 if you have the header file. */ #undef HAVE_GETOPT_H /* Define to 1 if you have the `getopt_long_only' function. */ #undef HAVE_GETOPT_LONG_ONLY /* Define to 1 if you have the `getpid' function. */ #undef HAVE_GETPID /* Define if the GNU gettext() function is already present or preinstalled. */ #undef HAVE_GETTEXT /* define if giomm is supported in glibmm */ #undef HAVE_GIOMM /* define if glibmm have support local symbol resolution in shared libraries */ #undef HAVE_GLIBMM_BIND_LOCAL /* define if glibmm have getenv operations */ #undef HAVE_GLIBMM_GETENV /* define if glibmm have listenv operations */ #undef HAVE_GLIBMM_LISTENV /* define if glibmm has Glib::OptionContext::get_help() */ #undef HAVE_GLIBMM_OPTIONCONTEXT_GET_HELP /* define if glibmm has Glib::OptionContext::set_summary() */ #undef HAVE_GLIBMM_OPTIONCONTEXT_SET_SUMMARY /* define if glibmm have setenv operations */ #undef HAVE_GLIBMM_SETENV /* define if glibmm have unsetenv operations */ #undef HAVE_GLIBMM_UNSETENV /* define if GLOBUS is available */ #undef HAVE_GLOBUS /* Define to 1 if you have the `globus_ftp_client_handleattr_set_gridftp2' function. */ #undef HAVE_GLOBUS_FTP_CLIENT_HANDLEATTR_SET_GRIDFTP2 /* Define to 1 if you have the `globus_thread_set_model' function. */ #undef HAVE_GLOBUS_THREAD_SET_MODEL /* Define to 1 if you have the `gmtime_r' function. */ #undef HAVE_GMTIME_R /* Define if you have the iconv() function and it works. */ #undef HAVE_ICONV /* Define to 1 if you have the header file. */ #undef HAVE_INTTYPES_H /* define if lcas is available */ #undef HAVE_LCAS /* Define to 1 if you have the header file. */ #undef HAVE_LCAS_H /* Define to 1 if you have the `lchown' function. */ #undef HAVE_LCHOWN /* define if lcmaps is available */ #undef HAVE_LCMAPS /* Define to 1 if you have the header file. */ #undef HAVE_LCMAPS_H /* Define if OpenLDAP is available */ #undef HAVE_LDAP /* Define if you have ldap_initialize function */ #undef HAVE_LDAP_INITIALIZE /* define if LDNS is enabled and available */ #undef HAVE_LDNS /* Define to 1 if you have the `nsl' library (-lnsl). */ #undef HAVE_LIBNSL /* Define to 1 if you have the header file. */ #undef HAVE_LIMITS_H /* Define to 1 if you have the `localtime_r' function. */ #undef HAVE_LOCALTIME_R /* Define to 1 if `lstat' has the bug that it succeeds when given the zero-length file name argument. */ #undef HAVE_LSTAT_EMPTY_STRING_BUG /* Define to 1 if your system has a GNU libc compatible `malloc' function, and to 0 otherwise. */ #undef HAVE_MALLOC /* Define to 1 if you have the `memchr' function. */ #undef HAVE_MEMCHR /* Define to 1 if you have the `memmove' function. */ #undef HAVE_MEMMOVE /* Define to 1 if you have the header file. */ #undef HAVE_MEMORY_H /* Define to 1 if you have the `memset' function. */ #undef HAVE_MEMSET /* Define to 1 if you have the `mkdir' function. */ #undef HAVE_MKDIR /* Define to 1 if you have the `mkdtemp' function. */ #undef HAVE_MKDTEMP /* Define to 1 if you have the `mkfifo' function. */ #undef HAVE_MKFIFO /* Define to 1 if you have the `[mkstemp]' function. */ #undef HAVE_MKSTEMP /* Define to 1 if you have the `mktemp' function. */ #undef HAVE_MKTEMP /* define if the compiler implements namespaces */ #undef HAVE_NAMESPACES /* Define to 1 if you have the header file, and it defines `DIR'. */ #undef HAVE_NDIR_H /* Define to 1 if you have the header file. */ #undef HAVE_NETDB_H /* Define to 1 if you have the header file. */ #undef HAVE_NETINET_IN_H /* define if NSS is enabled and available */ #undef HAVE_NSS /* Define to 1 if you have the `posix_fallocate' function. */ #undef HAVE_POSIX_FALLOCATE /* Define to 1 if the system has the type `ptrdiff_t'. */ #undef HAVE_PTRDIFF_T /* Define if you have Py_InitializeEx function */ #undef HAVE_PYTHON_INITIALIZE_EX /* Define to 1 if you have the `readdir_r' function. */ #undef HAVE_READDIR_R /* Define to 1 if your system has a GNU libc compatible `realloc' function, and to 0 otherwise. */ #undef HAVE_REALLOC /* Define to 1 if you have the `regcomp' function. */ #undef HAVE_REGCOMP /* Define to 1 if you have the `rmdir' function. */ #undef HAVE_RMDIR /* Define if S3 API has timeouts */ #undef HAVE_S3_TIMEOUT /* Define to 1 if you have the header file. */ #undef HAVE_SASL_H /* Define to 1 if you have the header file. */ #undef HAVE_SASL_SASL_H /* Define to 1 if you have the `select' function. */ #undef HAVE_SELECT /* Define to 1 if you have the `setenv' function. */ #undef HAVE_SETENV /* Define to 1 if you have the `socket' function. */ #undef HAVE_SOCKET /* define if SQLite is available */ #undef HAVE_SQLITE /* Define to 1 if you have the `sqlite3_errstr' function. */ #undef HAVE_SQLITE3_ERRSTR /* define if SSLv3_method is available */ #undef HAVE_SSLV3_METHOD /* define if the compiler has stringstream */ #undef HAVE_SSTREAM /* Define to 1 if `stat' has the bug that it succeeds when given the zero-length file name argument. */ #undef HAVE_STAT_EMPTY_STRING_BUG /* Define to 1 if stdbool.h conforms to C99. */ #undef HAVE_STDBOOL_H /* Define to 1 if you have the header file. */ #undef HAVE_STDINT_H /* Define to 1 if you have the header file. */ #undef HAVE_STDLIB_H /* Define to 1 if you have the `strcasecmp' function. */ #undef HAVE_STRCASECMP /* Define to 1 if you have the `strchr' function. */ #undef HAVE_STRCHR /* Define to 1 if you have the `strcspn' function. */ #undef HAVE_STRCSPN /* Define to 1 if you have the `strdup' function. */ #undef HAVE_STRDUP /* Define to 1 if you have the `strerror' function. */ #undef HAVE_STRERROR /* Define to 1 if you have the `strerror_r' function. */ #undef HAVE_STRERROR_R /* Define to 1 if you have the header file. */ #undef HAVE_STRINGS_H /* Define to 1 if you have the header file. */ #undef HAVE_STRING_H /* Define to 1 if you have the `strncasecmp' function. */ #undef HAVE_STRNCASECMP /* Define to 1 if you have the `strstr' function. */ #undef HAVE_STRSTR /* Define to 1 if you have the `strtol' function. */ #undef HAVE_STRTOL /* Define to 1 if you have the `strtoul' function. */ #undef HAVE_STRTOUL /* Define to 1 if you have the `strtoull' function. */ #undef HAVE_STRTOULL /* Define to 1 if `f_type' is a member of `struct statfs'. */ #undef HAVE_STRUCT_STATFS_F_TYPE /* Define to 1 if `st_blksize' is a member of `struct stat'. */ #undef HAVE_STRUCT_STAT_ST_BLKSIZE /* Define if you have systemd daemon */ #undef HAVE_SYSTEMD_DAEMON /* Define to 1 if you have the header file. */ #undef HAVE_SYSTEMD_SD_DAEMON_H /* Define to 1 if you have the header file, and it defines `DIR'. */ #undef HAVE_SYS_DIR_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_FILE_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_FILSYS_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_FS_S5PARAM_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_FS_TYPES_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_MOUNT_H /* Define to 1 if you have the header file, and it defines `DIR'. */ #undef HAVE_SYS_NDIR_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_PARAM_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_SELECT_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_SOCKET_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_STATFS_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_STAT_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_TIME_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_TYPES_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_VFS_H /* Define to 1 if you have that is POSIX.1 compatible. */ #undef HAVE_SYS_WAIT_H /* Define to 1 if you have the `timegm' function. */ #undef HAVE_TIMEGM /* define if TLSv1_1_method is available */ #undef HAVE_TLSV1_1_METHOD /* define if TLSv1_2_method is available */ #undef HAVE_TLSV1_2_METHOD /* define if TLSv1_method is available */ #undef HAVE_TLSV1_METHOD /* define if TLS_method is available */ #undef HAVE_TLS_METHOD /* Define to 1 if you have the `tzset' function. */ #undef HAVE_TZSET /* Define to 1 if you have the header file. */ #undef HAVE_UNISTD_H /* Define to 1 if you have the `unsetenv' function. */ #undef HAVE_UNSETENV /* Define to 1 if you have the header file. */ #undef HAVE_UUID_UUID_H /* Define to 1 if you have the `vfork' function. */ #undef HAVE_VFORK /* Define to 1 if you have the header file. */ #undef HAVE_VFORK_H /* Define to 1 if `fork' works. */ #undef HAVE_WORKING_FORK /* Define to 1 if `vfork' works. */ #undef HAVE_WORKING_VFORK /* define if XMLSEC package is available */ #undef HAVE_XMLSEC /* Define to 1 if the system has the type `_Bool'. */ #undef HAVE__BOOL /* installation prefix */ #undef INSTPREFIX /* library installation subdirectory */ #undef LIBSUBDIR /* Define to 1 if `lstat' dereferences a symlink specified with a trailing slash. */ #undef LSTAT_FOLLOWS_SLASHED_SYMLINK /* Define to the sub-directory in which libtool stores uninstalled libraries. */ #undef LT_OBJDIR /* Name of package */ #undef PACKAGE /* Define to the address where bug reports for this package should be sent. */ #undef PACKAGE_BUGREPORT /* Define to the full name of this package. */ #undef PACKAGE_NAME /* Define to the full name and version of this package. */ #undef PACKAGE_STRING /* Define to the one symbol short name of this package. */ #undef PACKAGE_TARNAME /* Define to the home page for this package. */ #undef PACKAGE_URL /* Define to the version of this package. */ #undef PACKAGE_VERSION /* package data subdirectory */ #undef PKGDATASUBDIR /* helper programs installation subdirectory */ #undef PKGLIBEXECSUBDIR /* plugin installation subdirectory */ #undef PKGLIBSUBDIR /* Define as the return type of signal handlers (`int' or `void'). */ #undef RETSIGTYPE /* Define to the type of arg 1 for `select'. */ #undef SELECT_TYPE_ARG1 /* Define to the type of args 2, 3 and 4 for `select'. */ #undef SELECT_TYPE_ARG234 /* Define to the type of arg 5 for `select'. */ #undef SELECT_TYPE_ARG5 /* define if to build job information in SQLite storage */ #undef SQLITEJSTORE_ENABLED /* Define if the block counts reported by statfs may be truncated to 2GB and the correct values may be stored in the f_spare array. (SunOS 4.1.2, 4.1.3, and 4.1.3_U1 are reported to have this problem. SunOS 4.1.1 seems not to be affected.) */ #undef STATFS_TRUNCATES_BLOCK_COUNTS /* Define if there is no specific function for reading file systems usage information and you have the header file. (SVR2) */ #undef STAT_READ_FILSYS /* Define if statfs takes 2 args and struct statfs has a field named f_bsize. (4.3BSD, SunOS 4, HP-UX, AIX PS/2) */ #undef STAT_STATFS2_BSIZE /* Define if statfs takes 2 args and struct statfs has a field named f_fsize. (4.4BSD, NetBSD) */ #undef STAT_STATFS2_FSIZE /* Define if statfs takes 2 args and the second argument has type struct fs_data. (Ultrix) */ #undef STAT_STATFS2_FS_DATA /* Define if statfs takes 3 args. (DEC Alpha running OSF/1) */ #undef STAT_STATFS3_OSF1 /* Define if statfs takes 4 args. (SVR3, Dynix, Irix, Dolphin) */ #undef STAT_STATFS4 /* Define if there is a function named statvfs. (SVR4) */ #undef STAT_STATVFS /* Define to 1 if you have the ANSI C header files. */ #undef STDC_HEADERS /* Define to 1 if strerror_r returns char *. */ #undef STRERROR_R_CHAR_P /* Define to 1 if you can safely include both and . */ #undef TIME_WITH_SYS_TIME /* Define to 1 if your declares `struct tm'. */ #undef TM_IN_SYS_TIME /* Enable extensions on AIX 3, Interix. */ #ifndef _ALL_SOURCE # undef _ALL_SOURCE #endif /* Enable GNU extensions on systems that have them. */ #ifndef _GNU_SOURCE # undef _GNU_SOURCE #endif /* Enable threading extensions on Solaris. */ #ifndef _POSIX_PTHREAD_SEMANTICS # undef _POSIX_PTHREAD_SEMANTICS #endif /* Enable extensions on HP NonStop. */ #ifndef _TANDEM_SOURCE # undef _TANDEM_SOURCE #endif /* Enable general extensions on Solaris. */ #ifndef __EXTENSIONS__ # undef __EXTENSIONS__ #endif /* Version number of package */ #undef VERSION /* Enable large inode numbers on Mac OS X 10.5. */ #ifndef _DARWIN_USE_64_BIT_INODE # define _DARWIN_USE_64_BIT_INODE 1 #endif /* Number of bits in a file offset, on hosts where this is settable. */ #undef _FILE_OFFSET_BITS /* Define for large files, on AIX-style hosts. */ #undef _LARGE_FILES /* Define if compiling for MacOSX */ #undef _MACOSX /* Define to 1 if on MINIX. */ #undef _MINIX /* Define to 2 if the system does not provide POSIX.1 features except with this defined. */ #undef _POSIX_1_SOURCE /* Define to 1 if you need to in order for `stat' and other things to work. */ #undef _POSIX_SOURCE /* Define to empty if `const' does not conform to ANSI C. */ #undef const /* Define to `int' if doesn't define. */ #undef gid_t /* Define to `__inline__' or `__inline' if that's what the C compiler calls it, or to nothing if 'inline' is not supported under any name. */ #ifndef __cplusplus #undef inline #endif /* Define to rpl_malloc if the replacement function should be used. */ #undef malloc /* Define to `int' if does not define. */ #undef mode_t /* Define to `long int' if does not define. */ #undef off_t /* Define to `int' if does not define. */ #undef pid_t /* Define to rpl_realloc if the replacement function should be used. */ #undef realloc /* Define to `unsigned int' if does not define. */ #undef size_t /* Define to `int' if doesn't define. */ #undef uid_t /* Define as `fork' if `vfork' does not work. */ #undef vfork nordugrid-arc-6.14.0/PaxHeaders.30264/nordugrid-arc.spec0000644000000000000000000000013214152153464020756 xustar000000000000000030 mtime=1638455092.063452208 30 atime=1638455095.505503926 30 ctime=1638455095.736507397 nordugrid-arc-6.14.0/nordugrid-arc.spec0000644000175000002070000015375314152153464020761 0ustar00mockbuildmock00000000000000%{!?_pkgdocdir: %global _pkgdocdir %{_docdir}/%{name}-%{version}} # # Build dependency descrepancies across platforms # %if %{?suse_version:1}%{!?suse_version:0} %global glibmm2_devel glibmm2-devel %global openldap_devel openldap2-devel %global nss_devel mozilla-nss-devel %else %global glibmm2_devel glibmm24-devel %global openldap_devel openldap-devel %global nss_devel nss-devel %endif # # xROOTd # %if %{?fedora}%{!?fedora:0} >= 24 || %{?rhel}%{!?rhel:0} %global with_xrootd %{!?_without_xrootd:1}%{?_without_xrootd:0} %else %global with_xrootd 0 %endif # # Python # %if %{?fedora}%{!?fedora:0} >= 32 || %{?rhel}%{!?rhel:0} >= 8 %global with_python2 0 %else %global with_python2 1 %endif %if %{?fedora}%{!?fedora:0} >= 13 || %{?rhel}%{!?rhel:0} >= 7 %global with_python3 1 %else %global with_python3 0 %endif %if %{with_python2} %{!?__python2: %global __python2 /usr/bin/python2} %{!?python2_sitearch: %global python2_sitearch %(%{__python2} -Esc "from distutils.sysconfig import get_python_lib; print(get_python_lib(1))")} %{!?python2_sitelib: %global python2_sitelib %(%{__python2} -Esc "from distutils.sysconfig import get_python_lib; print(get_python_lib())")} %endif %if %{with_python3} %{!?python3_pkgversion: %global python3_pkgversion 3} %endif %if %{?rhel}%{!?rhel:0} == 6 %filter_provides_in %{python2_sitearch}/.*\.so$ %filter_setup %endif %if %{?fedora}%{!?fedora:0} || %{?rhel}%{!?rhel:0} >= 7 || %{?suse_version:1}%{!?suse_version:0} %global with_pylint %{!?_without_pylint:1}%{?_without_pylint:0} %else %global with_pylint 0 %endif %if %{?fedora}%{!?fedora:0} >= 29 || %{?rhel}%{!?rhel:0} >= 8 %global py3default 1 %else %global py3default 0 %endif %if %{?fedora}%{!?fedora:0} >= 7 || %{?rhel}%{!?rhel:0} >= 5 || %{?suse_version}%{!?suse_version:0} >= 1110 %global with_acix 1 %else %global with_acix 0 %endif %if %{?fedora}%{!?fedora:0} >= 21 || %{?rhel}%{!?rhel:0} >= 5 %global with_s3 1 %else %global with_s3 0 %endif %if %{?fedora}%{!?fedora:0} >= 21 || %{?rhel}%{!?rhel:0} >= 5 %global with_gfal 1 %else %global with_gfal 0 %endif %if %{?fedora}%{!?fedora:0} || %{?rhel}%{!?rhel:0} %global with_xmlsec1 %{!?_without_xmlsec1:1}%{?_without_xmlsec1:0} %else %global with_xmlsec1 0 %endif %if %{?fedora}%{!?fedora:0} >= 21 || %{?rhel}%{!?rhel:0} >= 6 %global with_pythonlrms 1 %else %global with_pythonlrms 0 %endif # LDNS %if %{?fedora}%{!?fedora:0} >= 13 || %{?rhel}%{!?rhel:0} >= 5 %global with_ldns 1 %else %global with_ldns 0 %endif %if %{?fedora}%{!?fedora:0} >= 25 || %{?rhel}%{!?rhel:0} >= 7 %global use_systemd 1 %else %global use_systemd 0 %endif %global with_ldap_service 1 %global pkgdir arc # bash-completion %global _bashcompdir %(pkg-config --variable=completionsdir bash-completion 2>/dev/null || echo %{_sysconfdir}/bash_completion.d) # # Macros for scripts # # Stop and disable service on package removal %if %{use_systemd} %define stop_on_removal() %{expand:%%systemd_preun %(sed 's/[^ ]*/&.service/g' <<< '%{?*}')} %else %if %{?stop_on_removal:0}%{!?stop_on_removal:1} %global stop_on_removal() if [ $1 -eq 0 ]; then for s in %*; do service $s stop > /dev/null 2>&1 || : ; done; for s in %*; do /sbin/chkconfig --del $s; done; fi %endif %endif # Enable a service %if %{use_systemd} %define enable_service() %{expand:%%systemd_post %(sed 's/[^ ]*/&.service/g' <<< '%{?*}')} %else %if %{?suse_version:1}%{!?suse_version:0} %define enable_service() %{expand:%%fillup_and_insserv -f %{?*}} %else %define enable_service() for s in %{?*}; do /sbin/chkconfig --add $s ; done %endif %endif # Conditionally restart service on package update %if %{use_systemd} %define condrestart_on_update() %{expand:%%systemd_postun_with_restart %(sed 's/[^ ]*/&.service/g' <<< '%{?*}')} %else %if %{?suse_version:1}%{!?suse_version:0} %define condrestart_on_update() %{expand:%%restart_on_update %{?*}} %{expand:%%insserv_cleanup} %else %define condrestart_on_update() if [ $1 -ge 1 ]; then for s in %{?*}; do service $s condrestart > /dev/null 2>&1 || : ; done; fi %endif %endif # Standard service requirements %if %{use_systemd} %define service_post_requires systemd-units %define service_preun_requires systemd-units %define service_postun_requires systemd-units %else %if %{?suse_version:1}%{!?suse_version:0} %define service_post_requires %{insserv_prereq} %define service_preun_requires %{insserv_prereq} %define service_postun_requires %{insserv_prereq} %else %define service_post_requires chkconfig %define service_preun_requires chkconfig, initscripts %define service_postun_requires initscripts %endif %endif Name: nordugrid-arc Version: 6.14.0 Release: 1%{?dist} Summary: Advanced Resource Connector Middleware Group: System Environment/Daemons License: ASL 2.0 URL: http://www.nordugrid.org/ Source: http://download.nordugrid.org/packages/%{name}/releases/%{version}/src/%{name}-%{version}.tar.gz BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n) # Packages dropped without replacements Obsoletes: %{name}-chelonia < 2.0.0 Obsoletes: %{name}-hopi < 2.0.0 Obsoletes: %{name}-isis < 2.0.0 Obsoletes: %{name}-janitor < 2.0.0 Obsoletes: %{name}-doxygen < 4.0.0 Obsoletes: %{name}-arcproxyalt < 6.0.0 Obsoletes: %{name}-java < 6.0.0 Obsoletes: %{name}-egiis < 6.0.0 %if ! %{with_python2} Obsoletes: python2-%{name} < %{version}-%{release} Obsoletes: %{name}-python < 5.3.3 %endif %if ! %{with_ldap_service} Obsoletes: %{name}-infosys-ldap < %{version}-%{release} Obsoletes: %{name}-ldap-infosys < 6.0.0 Obsoletes: %{name}-aris < 6.0.0 %endif BuildRequires: gcc-c++ BuildRequires: cppunit-devel BuildRequires: pkgconfig %if %{use_systemd} BuildRequires: systemd-devel %endif %if %{?fedora}%{!?fedora:0} >= 12 || %{?rhel}%{!?rhel:0} >= 6 || %{?suse_version:1}%{!?suse_version:0} BuildRequires: libuuid-devel %else BuildRequires: e2fsprogs-devel %endif BuildRequires: gettext %if %{with_python2} BuildRequires: python2-devel %endif %if %{with_python3} BuildRequires: python%{python3_pkgversion}-devel %endif %if %{with_pylint} BuildRequires: pylint %endif BuildRequires: %{glibmm2_devel} BuildRequires: glib2-devel BuildRequires: libxml2-devel BuildRequires: openssl BuildRequires: openssl-devel %if %{with_xmlsec1} BuildRequires: xmlsec1-devel >= 1.2.4 BuildRequires: xmlsec1-openssl-devel >= 1.2.4 %endif BuildRequires: %{nss_devel} BuildRequires: %{openldap_devel} BuildRequires: globus-common-devel BuildRequires: globus-ftp-client-devel BuildRequires: globus-ftp-control-devel %if %{?fedora}%{!?fedora:0} >= 23 || %{?rhel}%{!?rhel:0} >= 5 BuildRequires: globus-gssapi-gsi-devel >= 12.2 %else BuildRequires: globus-gssapi-gsi-devel < 12.2 %endif %if %{with_xrootd} BuildRequires: xrootd-client-devel >= 1:4.5.0 %endif %if %{with_gfal} BuildRequires: gfal2-devel %endif %if %{with_s3} BuildRequires: libs3-devel %endif %if %{?suse_version}%{!?suse_version:0} == 1110 BuildRequires: db43-devel %else %if %{?fedora}%{!?fedora:0} >= 15 || %{?rhel}%{!?rhel:0} >= 7 BuildRequires: libdb-cxx-devel %else %if %{?fedora}%{!?fedora:0} == 14 BuildRequires: libdb-devel %else BuildRequires: db4-devel %endif %endif %endif %if %{?fedora}%{!?fedora:0} >= 21 || %{?rhel}%{!?rhel:0} BuildRequires: perl-generators %endif # Needed for Boinc backend testing during make check BuildRequires: perl(DBI) # Needed for infoprovider testing during make check BuildRequires: perl(English) BuildRequires: perl(JSON::XS) BuildRequires: perl(Sys::Hostname) BuildRequires: perl(XML::Simple) # Needed for LRMS testing during make check BuildRequires: perl(Test::Harness) BuildRequires: perl(Test::Simple) # Needed to run ACIX unit tests %if %{with_acix} %if %{py3default} BuildRequires: python3-twisted BuildRequires: python3-pyOpenSSL %else %if %{?fedora}%{!?fedora:0} || %{?rhel}%{!?rhel:0} >= 8 || %{?suse_version:1}%{!?suse_version:0} BuildRequires: python-twisted %else BuildRequires: python-twisted-core BuildRequires: python-twisted-web %endif %if %{?suse_version:1}%{!?suse_version:0} BuildRequires: python-openssl %else BuildRequires: pyOpenSSL %endif %endif %endif BuildRequires: swig %if %{?fedora}%{!?fedora:0} >= 4 || %{?rhel}%{!?rhel:0} >= 5 BuildRequires: libtool-ltdl-devel %else BuildRequires: libtool %endif %if %{with_pythonlrms} BuildRequires: perl(Inline) BuildRequires: perl(Inline::Python) %endif BuildRequires: sqlite-devel >= 3.6 %if %{with_ldns} BuildRequires: ldns-devel >= 1.6.8 %endif %if %{?fedora}%{!?fedora:0} >= 17 || %{?rhel}%{!?rhel:0} >= 7 || %{?suse_version:1}%{!?suse_version:0} BuildRequires: pkgconfig(bash-completion) %endif %if %{?fedora}%{!?fedora:0} <= 13 && %{?rhel}%{!?rhel:0} <= 6 BuildRequires: python-argparse Requires: python-argparse %endif %if %{?fedora}%{!?fedora:0} >= 13 || %{?rhel}%{!?rhel:0} >= 7 || %{?suse_version:1}%{!?suse_version:0} Requires: hostname %else Requires: net-tools %endif Requires: openssl %description NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). The ARC middleware is a software solution that uses distributed computing technologies to enable sharing and federation of computing resources across different administrative and application domains. ARC is used to create distributed infrastructures of various scope and complexity, from campus to national and global deployments. %package client Summary: ARC command line clients Group: Applications/Internet Requires: %{name} = %{version}-%{release} Requires: %{name}-plugins-needed = %{version}-%{release} %description client NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This client package contains all the CLI tools that are needed to operate with x509 proxies, submit and manage jobs and handle data transfers. %package hed Summary: ARC Hosting Environment Daemon Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} Requires(post): %{service_post_requires} Requires(preun): %{service_preun_requires} Requires(postun): %{service_postun_requires} %description hed NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). The ARC Hosting Environment Daemon (HED) is a Web Service container for ARC services. %package gridftpd Summary: ARC gridftp server Group: System Environment/Daemons Requires: %{name} = %{version}-%{release} Requires: %{name}-plugins-gridftp = %{version}-%{release} Requires: %{name}-arcctl-service = %{version}-%{release} Requires(post): %{service_post_requires} Requires(preun): %{service_preun_requires} Requires(postun): %{service_postun_requires} %description gridftpd NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This package contains the ARC gridftp server which can be used as a custom job submission interface in front of an ARC enabled computing cluster or as a low-level dedicated gridftp file server. %package datadelivery-service Summary: ARC data delivery service Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} Requires: %{name}-hed = %{version}-%{release} Requires: %{name}-plugins-needed = %{version}-%{release} Requires: %{name}-arcctl-service = %{version}-%{release} Requires(post): %{service_post_requires} Requires(preun): %{service_preun_requires} Requires(postun): %{service_postun_requires} %description datadelivery-service NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This package contains the ARC data delivery service. %if %{with_ldap_service} %package infosys-ldap Summary: ARC LDAP-based information services Group: System Environment/Libraries %if %{?fedora}%{!?fedora:0} >= 10 || %{?rhel}%{!?rhel:0} >= 6 BuildArch: noarch %endif %if %{?rhel}%{!?rhel:0} == 8 # slapd package not available in EPEL 8 Recommends: openldap-servers Recommends: bdii %else Requires: openldap-servers Requires: bdii %endif Requires: glue-schema >= 2.0.10 Requires: %{name}-arcctl-service = %{version}-%{release} Provides: %{name}-ldap-infosys = %{version}-%{release} Obsoletes: %{name}-ldap-infosys < 6.0.0 Provides: %{name}-aris = %{version}-%{release} Obsoletes: %{name}-aris < 6.0.0 Requires(post): %{service_post_requires} Requires(preun): %{service_preun_requires} Requires(postun): %{service_postun_requires} %if %{?fedora}%{!?fedora:0} >= 23 || %{?rhel}%{!?rhel:0} >= 8 Requires(post): policycoreutils-python-utils Requires(postun): policycoreutils-python-utils %else %if %{?fedora}%{!?fedora:0} >= 11 || %{?rhel}%{!?rhel:0} >= 6 Requires(post): policycoreutils-python Requires(postun): policycoreutils-python %else %if %{?fedora}%{!?fedora:0} >= 5 || %{?rhel}%{!?rhel:0} >= 5 Requires(post): policycoreutils Requires(postun): policycoreutils %endif %endif %endif %description infosys-ldap NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This package contains the ARC information services relying on BDII and LDAP technologies to publish ARC CE information according to various LDAP schemas. Please note that the information collectors are part of another package, the nordugrid-arc-arex. %endif %package monitor Summary: ARC LDAP monitor web application Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} Requires: php Requires: php-gd Requires: php-ldap %if %{?fedora}%{!?fedora:0} >= 10 || %{?rhel}%{!?rhel:0} >= 6 BuildArch: noarch %endif Obsoletes: %{name}-ldap-monitor < 6.0.0 Obsoletes: %{name}-ws-monitor < 6.0.0 %description monitor NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This package contains the PHP web application that is used to set up a web-based monitor which pulls information from the LDAP information system and visualizes it. %package arcctl Summary: ARC Control Tool Group: Applications/Internet Requires: %{name} = %{version}-%{release} %description arcctl NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This package contains the ARC Control Tool with basic set of control modules suitable for both server and client side. %package arcctl-service Summary: ARC Control Tool - service control modules Group: Applications/Internet Requires: %{name} = %{version}-%{release} Requires: %{name}-arcctl = %{version}-%{release} %description arcctl-service NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This package contains the service control modules for ARC Contol Tool that allow working with server-side config and manage ARC services. %package arex Summary: ARC Resource-coupled EXecution service Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} Requires: %{name}-hed = %{version}-%{release} Requires: %{name}-arcctl = %{version}-%{release} Requires: %{name}-arcctl-service = %{version}-%{release} Requires: %{name}-plugins-needed = %{version}-%{release} %if %{py3default} Requires: python3-isodate Requires: python3-ldap %else Requires: python-isodate Requires: python-ldap %endif Provides: %{name}-cache-service = %{version}-%{release} Obsoletes: %{name}-cache-service < 6.0.0 Provides: %{name}-candypond = %{version}-%{release} Obsoletes: %{name}-candypond < 6.0.0 Requires(post): %{name}-arcctl = %{version}-%{release} Requires(preun): %{name}-arcctl = %{version}-%{release} %if %{?fedora}%{!?fedora:0} >= 13 || %{?rhel}%{!?rhel:0} >= 7 || %{?suse_version:1}%{!?suse_version:0} Requires(post): hostname %else Requires(post): net-tools %endif Requires(post): openssl Requires(post): %{service_post_requires} Requires(preun): %{service_preun_requires} Requires(postun): %{service_postun_requires} %description arex NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). The ARC Resource-coupled EXecution service (AREX) is the Computing Element of the ARC middleware. AREX offers a full-featured middle layer to manage computational tasks including interfacing to local batch systems, taking care of complex environments such as data staging, data caching, software environment provisioning, information collection and exposure, accounting information gathering and publishing. %if %{with_pythonlrms} %package arex-python-lrms Summary: ARC Resource-coupled EXecution service - Python LRMS backends Group: System Environment/Libraries Requires: %{name}-arex = %{version}-%{release} %if %{py3default} Requires: python%{python3_pkgversion}-%{name} = %{version}-%{release} %else Requires: python2-%{name} = %{version}-%{release} %endif Requires: perl(Inline) Requires: perl(Inline::Python) %description arex-python-lrms NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). The Python LRMS backends are a new implementation of the AREX LRMS backend scripts written in Python. Currently only the SLURM LRMS is supported. It is released as a technology preview. %endif %package community-rtes Summary: Community-defined RTEs support Group: System Environment/Libraries Requires: %{name}-arex = %{version}-%{release} Requires: %{name}-arcctl = %{version}-%{release} Requires: gnupg2 %if %{py3default} Requires: python3-dns %else Requires: python-dns %endif %description community-rtes NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). Community RTEs is the framework that allows deploying software packages (tarballs, containers, etc) provided by trusted communities to ARC CE using simple arcctl commands. It is released as a technology preview. %package plugins-needed Summary: ARC base plugins Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} %description plugins-needed NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC base plugins. This includes the Message Chain Components (MCCs) and Data Manager Components (DMCs). %package plugins-globus Summary: ARC Globus plugins (compat) Group: System Environment/Libraries Requires: %{name}-plugins-gridftp = %{version}-%{release} Requires: %{name}-plugins-gridftpjob = %{version}-%{release} Requires: %{name}-plugins-lcas-lcmaps = %{version}-%{release} %description plugins-globus NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC Globus plugins. This compat metapackage brings all Globus dependent plugins at once, including: Data Manager Components (DMCs), Client plugin and LCAS/LCMAPS tools. This package is meant to allow smooth transition and will be removed from the upcoming releases. %package plugins-globus-common Summary: ARC Globus plugins common libraries Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} %description plugins-globus-common NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC Globus plugins common libraries package includes the bundle of necessary Globus libraries needed for all other globus-dependent ARC components. %package plugins-gridftp Summary: ARC Globus dependent DMCs Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} Requires: %{name}-plugins-globus-common = %{version}-%{release} %description plugins-gridftp NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC Globus GridFTP plugins. These allow access to data through the gridftp protocol. %package plugins-lcas-lcmaps Summary: ARC LCAS/LCMAPS plugins Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} Requires: %{name}-plugins-globus-common = %{version}-%{release} %if %{?fedora}%{!?fedora:0} >= 23 || %{?rhel}%{!?rhel:0} >= 5 Requires: globus-gssapi-gsi >= 12.2 %else Requires: globus-gssapi-gsi < 12.2 %endif %description plugins-lcas-lcmaps NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC LCAS/LCMAPS tools allow configuring ARC CE to use LCAS/LCMAPS services for authorization and mapping. %package plugins-gridftpjob Summary: ARC GRIDFTPJOB client plugin Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} Requires: %{name}-plugins-globus-common = %{version}-%{release} Requires: %{name}-plugins-gridftp = %{version}-%{release} %description plugins-gridftpjob NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC GRIDFTPJOB plugin allows submitting jobs via the gridftpd interface. %if %{with_xrootd} %package plugins-xrootd Summary: ARC xrootd plugins Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} %description plugins-xrootd NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC xrootd plugins. These allow access to data through the xrootd protocol. %endif %if %{with_gfal} %package plugins-gfal Summary: ARC GFAL2 plugins Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} %description plugins-gfal NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC plugins for GFAL2. This allows third-party transfer and adds support for several extra transfer protocols (rfio, dcap, gsidcap). Support for specific protocols is provided by separate 3rd-party GFAL2 plugin packages. %endif %if %{with_s3} %package plugins-s3 Summary: ARC S3 plugins Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} %description plugins-s3 NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC plugins for S3. These allow access to data through the S3 protocol. %endif %package plugins-internal Summary: ARC internal plugin Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} Requires: %{name}-arex = %{version}-%{release} %description plugins-internal NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). The ARC internal plugin. A special interface aimed for restrictive HPC sites, to be used with a local installation of the ARC Control Tower. %package plugins-arcrest Summary: ARC REST plugin Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} %description plugins-arcrest NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC plugin for ARC REST interface technology preview. %package plugins-python Summary: ARC Python dependent plugin Group: System Environment/Libraries Requires: %{name} = %{version}-%{release} %if %{py3default} Requires: python%{python3_pkgversion}-%{name} = %{version}-%{release} %else Requires: python2-%{name} = %{version}-%{release} %endif %description plugins-python NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). ARC plugins dependent on Python. %if %{with_acix} %package acix-core Summary: ARC cache index - core Group: System Environment/Libraries %if %{?fedora}%{!?fedora:0} >= 10 || %{?rhel}%{!?rhel:0} >= 6 BuildArch: noarch %endif %if %{py3default} Requires: python3-twisted Requires: python3-pyOpenSSL %else %if %{?fedora}%{!?fedora:0} || %{?rhel}%{!?rhel:0} >= 8 || %{?suse_version:1}%{!?suse_version:0} Requires: python-twisted %else Requires: python-twisted-core Requires: python-twisted-web %endif %if %{?suse_version:1}%{!?suse_version:0} Requires: python-openssl %else Requires: pyOpenSSL %endif %endif %description acix-core NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). Core components of the ARC Cache Index (ACIX). %package acix-scanner Summary: ARC cache index - scanner server Group: System Environment/Libraries %if %{?fedora}%{!?fedora:0} >= 10 || %{?rhel}%{!?rhel:0} >= 6 BuildArch: noarch %endif Requires: %{name} = %{version}-%{release} Requires: %{name}-acix-core = %{version}-%{release} Requires: %{name}-arcctl-service = %{version}-%{release} Obsoletes: %{name}-acix-cache < 6.0.0 Requires(post): %{service_post_requires} Requires(preun): %{service_preun_requires} Requires(postun): %{service_postun_requires} %description acix-scanner NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). Cache scanner component of the ARC Cache Index (ACIX), usually installed alongside A-REX. This component collects information on the content of an A-REX cache. %package acix-index Summary: ARC cache index - index server Group: System Environment/Libraries %if %{?fedora}%{!?fedora:0} >= 10 || %{?rhel}%{!?rhel:0} >= 6 BuildArch: noarch %endif Requires: %{name} = %{version}-%{release} Requires: %{name}-arcctl-service = %{version}-%{release} Requires: %{name}-acix-core = %{version}-%{release} Requires(post): %{service_post_requires} Requires(preun): %{service_preun_requires} Requires(postun): %{service_postun_requires} %description acix-index NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). Index server component of the ARC Cache Index (ACIX), usually installed independently of any A-REX installation. This component pulls cache content from ACIX cache scanner servers and can be queried by clients for the location of cached files. %endif %package devel Summary: ARC development files Group: Development/Libraries Requires: %{name} = %{version}-%{release} Requires: %{glibmm2_devel} Requires: glib2-devel Requires: libxml2-devel Requires: openssl-devel %description devel NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). Header files and libraries needed to develop applications using ARC. %if %{with_python2} %package -n python2-%{name} Summary: ARC Python 2 wrapper Group: Development/Libraries %{?python_provide:%python_provide python2-%{name}} Provides: %{name}-python = %{version}-%{release} Obsoletes: %{name}-python < 5.3.3 Requires: %{name} = %{version}-%{release} %description -n python2-%{name} NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). Python 2 bindings for ARC. %endif %if %{with_python3} %package -n python%{python3_pkgversion}-%{name} Summary: ARC Python 3 wrapper Group: Development/Libraries %{?python_provide:%python_provide python%{python3_pkgversion}-%{name}} Provides: %{name}-python%{python3_pkgversion} = %{version}-%{release} Obsoletes: %{name}-python%{python3_pkgversion} < 5.3.3 Requires: %{name} = %{version}-%{release} %description -n python%{python3_pkgversion}-%{name} NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). Python 3 bindings for ARC. %endif %package nordugridmap Summary: ARC's nordugridmap tool Group: Applications/Internet Requires: crontabs Obsoletes: %{name}-gridmap-utils < 6.0.0 %if %{?fedora}%{!?fedora:0} >= 10 || %{?rhel}%{!?rhel:0} >= 6 BuildArch: noarch %endif %description nordugridmap NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). A simple tool to fetch list of users and eventually generate gridmap files. %package test-utils Summary: ARC test tools Group: Applications/Internet Requires: %{name} = %{version}-%{release} Requires: %{name}-plugins-needed = %{version}-%{release} Obsoletes: %{name}-misc-utils < 6.0.0 %description test-utils NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This package contains a few utilities useful to test various ARC subsystems. The package is not required by users or sysadmins and it is mainly for developers. %package archery-manage Summary: ARCHERY administration tool Group: Applications/Internet %if %{?fedora}%{!?fedora:0} >= 10 || %{?rhel}%{!?rhel:0} >= 6 BuildArch: noarch %endif %if %{?fedora}%{!?fedora:0} <= 13 && %{?rhel}%{!?rhel:0} <= 6 Requires: python-argparse %endif %if %{py3default} Requires: python3-dns Requires: python3-ldap %else Requires: python-dns Requires: python-ldap %endif %description archery-manage NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This package contains the archery-manage utility for administration of an ARCHERY DNS-embedded service endpoint registry. %package wn Summary: ARC optional worker nodes components Group: Applications/Internet %description wn NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). This package contains the optional components that provide new job management features on the worker nodes (WN). %prep %setup -q %build if pkg-config --atleast-version 2.6 sigc++-2.0 ; then if [ `echo __GNUC__ | gcc -E - | tail -1` -lt 6 ] ; then # Workaround for too new libsigc++/glibmm, too old gcc combination export CXXFLAGS="%{optflags} -std=c++11" fi fi %configure --disable-static \ %if ! %{with_acix} --disable-acix \ %endif %if %{with_gfal} --enable-gfal \ %endif %if %{with_s3} --enable-s3 \ %endif %if %{py3default} --with-python=python3 \ %if %{with_python2} --with-altpython=python2 \ %endif %else --with-python=python2 \ %if %{with_python3} --with-altpython=python3 \ %endif %endif %if ! %{with_xrootd} --disable-xrootd \ %endif %if %{with_pythonlrms} --with-inline-python \ %endif %if ! %{with_ldns} --disable-ldns \ %endif --enable-internal \ %if %{use_systemd} --enable-systemd \ --with-systemd-units-location=%{_unitdir} \ %endif %if ! %{with_ldap_service} --disable-ldap-service \ %endif --disable-doc \ --docdir=%{_pkgdocdir} make %{?_smp_mflags} %check make %{?_smp_mflags} check %install rm -rf $RPM_BUILD_ROOT make install DESTDIR=$RPM_BUILD_ROOT # Install Logrotate. mkdir -p $RPM_BUILD_ROOT%{_sysconfdir}/logrotate.d install -p -m 644 debian/%{name}-arex.logrotate \ $RPM_BUILD_ROOT%{_sysconfdir}/logrotate.d/%{name}-arex install -p -m 644 debian/%{name}-gridftpd.logrotate \ $RPM_BUILD_ROOT%{_sysconfdir}/logrotate.d/%{name}-gridftpd %if %{with_ldap_service} install -p -m 644 debian/%{name}-infosys-ldap.logrotate \ $RPM_BUILD_ROOT%{_sysconfdir}/logrotate.d/%{name}-infosys-ldap %endif install -p -m 644 debian/%{name}-datadelivery-service.logrotate \ $RPM_BUILD_ROOT%{_sysconfdir}/logrotate.d/%{name}-datadelivery-service find $RPM_BUILD_ROOT -type f -name \*.la -exec rm -fv '{}' ';' # The py-compile script in the source tarball is old (RHEL 6) # It does the wrong thing for python 3 - remove and let rpmbuild do it right find $RPM_BUILD_ROOT -type f -name \*.pyc -exec rm -fv '{}' ';' find $RPM_BUILD_ROOT -type f -name \*.pyo -exec rm -fv '{}' ';' # libarcglobusutils is not part of the ARC api. find $RPM_BUILD_ROOT -name libarcglobusutils.so -exec rm -fv '{}' ';' # Create log directory mkdir -p $RPM_BUILD_ROOT%{_localstatedir}/log/arc # Create spool directories for Jura mkdir -p $RPM_BUILD_ROOT%{_localstatedir}/spool/arc mkdir -p $RPM_BUILD_ROOT%{_localstatedir}/spool/arc/ssm mkdir -p $RPM_BUILD_ROOT%{_localstatedir}/spool/arc/urs %find_lang %{name} # Remove examples and let RPM package them under /usr/share/doc using the doc macro rm -rf $RPM_BUILD_ROOT%{_datadir}/%{pkgdir}/examples make -C src/libs/data-staging/examples DESTDIR=$PWD/docdir/devel pkgdatadir= install-exampleDATA make -C src/hed/libs/compute/examples DESTDIR=$PWD/docdir/devel pkgdatadir= install-exampleDATA make -C src/hed/libs/data/examples DESTDIR=$PWD/docdir/devel pkgdatadir= install-exampleDATA make -C src/hed/acc/PythonBroker DESTDIR=$PWD/docdir/python pkgdatadir= install-exampleDATA make -C python/examples DESTDIR=$PWD/docdir/devel pkgdatadir= install-exampleDATA make -C src/tests/echo DESTDIR=$PWD/docdir/hed pkgdatadir= install-exampleDATA make -C src/hed DESTDIR=$PWD/docdir/hed pkgdatadir= install-profileDATA # client.conf needs special handling make -C src/clients DESTDIR=$RPM_BUILD_ROOT install-exampleDATA # Link to client.conf from doc ln -s %{_datadir}/%{pkgdir}/examples/client.conf $PWD/docdir/client.conf %clean rm -rf $RPM_BUILD_ROOT %post -p /sbin/ldconfig %postun -p /sbin/ldconfig %post plugins-globus-common -p /sbin/ldconfig %postun plugins-globus-common -p /sbin/ldconfig %post hed %enable_service arched %preun hed %stop_on_removal arched %postun hed %condrestart_on_update arched %post arex %enable_service arc-arex %enable_service arc-arex-ws # out-of-package testing host certificate if [ $1 -eq 1 ]; then arcctl test-ca init arcctl test-ca hostcert fi %preun arex %stop_on_removal arc-arex %stop_on_removal arc-arex-ws if [ $1 -eq 0 ]; then arcctl test-ca cleanup fi %postun arex %condrestart_on_update arc-arex %condrestart_on_update arc-arex-ws %post gridftpd %enable_service arc-gridftpd %preun gridftpd %stop_on_removal arc-gridftpd %postun gridftpd %condrestart_on_update arc-gridftpd %post datadelivery-service %enable_service arc-datadelivery-service %preun datadelivery-service %stop_on_removal arc-datadelivery-service %postun datadelivery-service %condrestart_on_update arc-datadelivery-service %if %{with_ldap_service} %post infosys-ldap %enable_service arc-infosys-ldap %if %{?fedora}%{!?fedora:0} >= 5 || %{?rhel}%{!?rhel:0} >= 5 semanage port -a -t ldap_port_t -p tcp 2135 2>/dev/null || : semanage fcontext -a -t slapd_etc_t "/var/run/arc/infosys/bdii-slapd\.conf" 2>/dev/null || : semanage fcontext -a -t slapd_db_t "/var/lib/arc/bdii/db(/.*)?" 2>/dev/null || : semanage fcontext -a -t slapd_var_run_t "/var/run/arc/bdii/db(/.*)?" 2>/dev/null || : %endif %preun infosys-ldap %stop_on_removal arc-infosys-ldap %postun infosys-ldap %condrestart_on_update arc-infosys-ldap %if %{?fedora}%{!?fedora:0} >= 5 || %{?rhel}%{!?rhel:0} >= 5 if [ $1 -eq 0 ]; then semanage port -d -t ldap_port_t -p tcp 2135 2>/dev/null || : semanage fcontext -d -t slapd_etc_t "/var/run/arc/infosys/bdii-slapd\.conf" 2>/dev/null || : semanage fcontext -d -t slapd_db_t "/var/lib/arc/bdii/db(/.*)?" 2>/dev/null || : semanage fcontext -d -t slapd_var_run_t "/var/run/arc/bdii/db(/.*)?" 2>/dev/null || : fi %endif %triggerpostun infosys-ldap -- %{name}-ldap-infosys # Uninstalling the old %{name}-ldap-infosys will remove some selinux config # for %{name}-infosys-ldap - put them back in this triggerpostun script semanage port -a -t ldap_port_t -p tcp 2135 2>/dev/null || : semanage fcontext -a -t slapd_etc_t "/var/run/arc/infosys/bdii-slapd\.conf" 2>/dev/null || : %triggerpostun infosys-ldap -- %{name}-aris # Uninstalling the old %{name}-aris will remove some selinux config # for %{name}-infosys-ldap - put them back in this triggerpostun script semanage fcontext -a -t slapd_db_t "/var/lib/arc/bdii/db(/.*)?" 2>/dev/null || : semanage fcontext -a -t slapd_var_run_t "/var/run/arc/bdii/db(/.*)?" 2>/dev/null || : %triggerun infosys-ldap -- bdii %if %{?suse_version:1}%{!?suse_version:0} FIRST_ARG=1 %restart_on_update arc-infosys-ldap %else service arc-infosys-ldap condrestart > /dev/null 2>&1 || : %endif %endif %if %{with_acix} %post acix-scanner %enable_service arc-acix-scanner %preun acix-scanner %stop_on_removal arc-acix-scanner %postun acix-scanner %condrestart_on_update arc-acix-scanner %post acix-index %enable_service arc-acix-index %preun acix-index %stop_on_removal arc-acix-index %postun acix-index %condrestart_on_update arc-acix-index %endif %files -f %{name}.lang %defattr(-,root,root,-) %doc src/doc/arc.conf.reference src/doc/arc.conf.DELETED src/doc/arc.conf.DELETED-6.8.0 %doc README AUTHORS LICENSE NOTICE %{_libdir}/libarccompute.so.* %{_libdir}/libarccommunication.so.* %{_libdir}/libarccommon.so.* %{_libdir}/libarccredential.so.* %{_libdir}/libarccredentialstore.so.* %{_libdir}/libarccrypto.so.* %{_libdir}/libarcdata.so.* %{_libdir}/libarcdatastaging.so.* %{_libdir}/libarcloader.so.* %{_libdir}/libarcmessage.so.* %{_libdir}/libarcsecurity.so.* %{_libdir}/libarcotokens.so.* %{_libdir}/libarcinfosys.so.* %{_libdir}/libarcwsaddressing.so.* %{_libdir}/libarcwssecurity.so.* %if %{with_xmlsec1} %{_libdir}/libarcxmlsec.so.* %endif %dir %{_libdir}/%{pkgdir} # We need to have libmodcrypto.so close to libarccrypto %{_libdir}/%{pkgdir}/libmodcrypto.so %{_libdir}/%{pkgdir}/libmodcrypto.apd # We need to have libmodcredential.so close to libarccredential %{_libdir}/%{pkgdir}/libmodcredential.so %{_libdir}/%{pkgdir}/libmodcredential.apd %{_libdir}/%{pkgdir}/arc-file-access %{_libdir}/%{pkgdir}/arc-hostname-resolver %{_libdir}/%{pkgdir}/DataStagingDelivery %{_libdir}/%{pkgdir}/arc-dmc %dir %{_libexecdir}/%{pkgdir} %{_libexecdir}/%{pkgdir}/arcconfig-parser %if %{py3default} %dir %{python3_sitearch}/%{pkgdir} %{python3_sitearch}/%{pkgdir}/utils %{python3_sitearch}/%{pkgdir}/__init__.py %{python3_sitearch}/%{pkgdir}/paths.py %{python3_sitearch}/%{pkgdir}/paths_dist.py %dir %{python3_sitearch}/%{pkgdir}/__pycache__ %{python3_sitearch}/%{pkgdir}/__pycache__/__init__.* %{python3_sitearch}/%{pkgdir}/__pycache__/paths.* %{python3_sitearch}/%{pkgdir}/__pycache__/paths_dist.* %else %dir %{python2_sitearch}/%{pkgdir} %{python2_sitearch}/%{pkgdir}/utils %{python2_sitearch}/%{pkgdir}/__init__.py* %{python2_sitearch}/%{pkgdir}/paths.py* %{python2_sitearch}/%{pkgdir}/paths_dist.py* %endif %dir %{_datadir}/%{pkgdir} %{_datadir}/%{pkgdir}/arc.parser.defaults %dir %{_datadir}/%{pkgdir}/test-jobs %{_datadir}/%{pkgdir}/test-jobs/test-job-* %{_datadir}/%{pkgdir}/schema %files client %defattr(-,root,root,-) %doc docdir/client.conf %{_bindir}/arccat %{_bindir}/arcclean %{_bindir}/arccp %{_bindir}/arcget %{_bindir}/arcinfo %{_bindir}/arckill %{_bindir}/arcls %{_bindir}/arcmkdir %{_bindir}/arcrename %{_bindir}/arcproxy %{_bindir}/arcrenew %{_bindir}/arcresub %{_bindir}/arcresume %{_bindir}/arcrm %{_bindir}/arcstat %{_bindir}/arcsub %{_bindir}/arcsync %{_bindir}/arctest %dir %{_datadir}/%{pkgdir}/examples %{_datadir}/%{pkgdir}/examples/client.conf %dir %{_sysconfdir}/%{pkgdir} %config(noreplace) %{_sysconfdir}/%{pkgdir}/client.conf %doc %{_mandir}/man1/arccat.1* %doc %{_mandir}/man1/arcclean.1* %doc %{_mandir}/man1/arccp.1* %doc %{_mandir}/man1/arcget.1* %doc %{_mandir}/man1/arcinfo.1* %doc %{_mandir}/man1/arckill.1* %doc %{_mandir}/man1/arcls.1* %doc %{_mandir}/man1/arcmkdir.1* %doc %{_mandir}/man1/arcrename.1* %doc %{_mandir}/man1/arcproxy.1* %doc %{_mandir}/man1/arcrenew.1* %doc %{_mandir}/man1/arcresub.1* %doc %{_mandir}/man1/arcresume.1* %doc %{_mandir}/man1/arcrm.1* %doc %{_mandir}/man1/arcstat.1* %doc %{_mandir}/man1/arcsub.1* %doc %{_mandir}/man1/arcsync.1* %doc %{_mandir}/man1/arctest.1* %dir %{_bashcompdir} %{_bashcompdir}/arc-client-tools %files hed %defattr(-,root,root,-) %doc docdir/hed/* %if %{use_systemd} %{_unitdir}/arched.service %else %{_initrddir}/arched %endif %{_sbindir}/arched %{_libdir}/%{pkgdir}/libecho.so %{_libdir}/%{pkgdir}/libecho.apd %{_datadir}/%{pkgdir}/arched-start %{_datadir}/%{pkgdir}/profiles %doc %{_mandir}/man8/arched.8* %doc %{_mandir}/man5/arc.conf.5* %files gridftpd %defattr(-,root,root,-) %if %{use_systemd} %{_unitdir}/arc-gridftpd.service %else %{_initrddir}/arc-gridftpd %endif %{_sbindir}/gridftpd %{_libdir}/%{pkgdir}/jobsplugin.* %{_libdir}/%{pkgdir}/filedirplugin.* %{_datadir}/%{pkgdir}/arc-gridftpd-start %config(noreplace) %{_sysconfdir}/logrotate.d/%{name}-gridftpd %doc %{_mandir}/man8/gridftpd.8* %files datadelivery-service %defattr(-,root,root,-) %if %{use_systemd} %{_unitdir}/arc-datadelivery-service.service %else %{_initrddir}/arc-datadelivery-service %endif %{_libdir}/%{pkgdir}/libdatadeliveryservice.so %{_libdir}/%{pkgdir}/libdatadeliveryservice.apd %{_datadir}/%{pkgdir}/arc-datadelivery-service-start %config(noreplace) %{_sysconfdir}/logrotate.d/%{name}-datadelivery-service %if %{with_ldap_service} %files infosys-ldap %defattr(-,root,root,-) %if %{use_systemd} %{_unitdir}/arc-infosys-ldap.service %{_unitdir}/arc-infosys-ldap-slapd.service %else %{_initrddir}/arc-infosys-ldap %endif %{_datadir}/%{pkgdir}/create-bdii-config %{_datadir}/%{pkgdir}/create-slapd-config %{_datadir}/%{pkgdir}/glue-generator.pl %{_datadir}/%{pkgdir}/glite-info-provider-ldap %{_datadir}/%{pkgdir}/ldap-schema %config(noreplace) %{_sysconfdir}/logrotate.d/%{name}-infosys-ldap %endif %files monitor %defattr(-,root,root,-) %{_datadir}/%{pkgdir}/monitor %doc %{_mandir}/man7/monitor.7* %files arcctl %{_sbindir}/arcctl %if %{py3default} %dir %{python3_sitearch}/%{pkgdir}/control %{python3_sitearch}/%{pkgdir}/control/__init__.py %{python3_sitearch}/%{pkgdir}/control/CertificateGenerator.py %{python3_sitearch}/%{pkgdir}/control/ControlCommon.py %{python3_sitearch}/%{pkgdir}/control/OSPackage.py %{python3_sitearch}/%{pkgdir}/control/TestCA.py %{python3_sitearch}/%{pkgdir}/control/ThirdPartyDeployment.py %dir %{python3_sitearch}/%{pkgdir}/control/__pycache__ %{python3_sitearch}/%{pkgdir}/control/__pycache__/__init__.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/CertificateGenerator.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/ControlCommon.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/OSPackage.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/TestCA.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/ThirdPartyDeployment.* %else %dir %{python2_sitearch}/%{pkgdir}/control %{python2_sitearch}/%{pkgdir}/control/__init__.py* %{python2_sitearch}/%{pkgdir}/control/CertificateGenerator.py* %{python2_sitearch}/%{pkgdir}/control/ControlCommon.py* %{python2_sitearch}/%{pkgdir}/control/OSPackage.py* %{python2_sitearch}/%{pkgdir}/control/TestCA.py* %{python2_sitearch}/%{pkgdir}/control/ThirdPartyDeployment.py* %endif %doc %{_mandir}/man1/arcctl.1* %files arcctl-service %if %{py3default} %{python3_sitearch}/%{pkgdir}/control/Config.py %{python3_sitearch}/%{pkgdir}/control/ServiceCommon.py %{python3_sitearch}/%{pkgdir}/control/Services.py %{python3_sitearch}/%{pkgdir}/control/OSService.py %{python3_sitearch}/%{pkgdir}/control/Validator.py %{python3_sitearch}/%{pkgdir}/control/__pycache__/Config.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/ServiceCommon.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/Services.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/OSService.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/Validator.* %else %{python2_sitearch}/%{pkgdir}/control/Config.py* %{python2_sitearch}/%{pkgdir}/control/ServiceCommon.py* %{python2_sitearch}/%{pkgdir}/control/Services.py* %{python2_sitearch}/%{pkgdir}/control/OSService.py* %{python2_sitearch}/%{pkgdir}/control/Validator.py* %endif %files arex %defattr(-,root,root,-) %if %{use_systemd} %{_unitdir}/arc-arex.service %{_unitdir}/arc-arex-ws.service %else %{_initrddir}/arc-arex %{_initrddir}/arc-arex-ws %endif %{_libexecdir}/%{pkgdir}/cache-clean %{_libexecdir}/%{pkgdir}/cache-list %{_libexecdir}/%{pkgdir}/jura-ng %{_libexecdir}/%{pkgdir}/gm-delegations-converter %{_libexecdir}/%{pkgdir}/gm-jobs %{_libexecdir}/%{pkgdir}/gm-kick %{_libexecdir}/%{pkgdir}/smtp-send %{_libexecdir}/%{pkgdir}/smtp-send.sh %{_libexecdir}/%{pkgdir}/inputcheck %{_libexecdir}/%{pkgdir}/arc-config-check %{_libexecdir}/%{pkgdir}/arc-blahp-logger %{_datadir}/%{pkgdir}/cancel-*-job %{_datadir}/%{pkgdir}/scan-*-job %{_datadir}/%{pkgdir}/submit-*-job %{_libdir}/%{pkgdir}/libarex.so %{_libdir}/%{pkgdir}/libarex.apd %{_libdir}/%{pkgdir}/libcandypond.so %{_libdir}/%{pkgdir}/libcandypond.apd %{_datadir}/%{pkgdir}/CEinfo.pl %{_datadir}/%{pkgdir}/ARC0mod.pm %{_datadir}/%{pkgdir}/FORKmod.pm %{_datadir}/%{pkgdir}/Fork.pm %{_datadir}/%{pkgdir}/SGEmod.pm %{_datadir}/%{pkgdir}/SGE.pm %{_datadir}/%{pkgdir}/LL.pm %{_datadir}/%{pkgdir}/LSF.pm %{_datadir}/%{pkgdir}/PBS.pm %{_datadir}/%{pkgdir}/PBSPRO.pm %{_datadir}/%{pkgdir}/Condor.pm %{_datadir}/%{pkgdir}/SLURMmod.pm %{_datadir}/%{pkgdir}/SLURM.pm %{_datadir}/%{pkgdir}/Boinc.pm %{_datadir}/%{pkgdir}/XmlPrinter.pm %{_datadir}/%{pkgdir}/InfosysHelper.pm %{_datadir}/%{pkgdir}/LdifPrinter.pm %{_datadir}/%{pkgdir}/GLUE2xmlPrinter.pm %{_datadir}/%{pkgdir}/GLUE2ldifPrinter.pm %{_datadir}/%{pkgdir}/NGldifPrinter.pm %{_datadir}/%{pkgdir}/ARC0ClusterInfo.pm %{_datadir}/%{pkgdir}/ARC1ClusterInfo.pm %{_datadir}/%{pkgdir}/ConfigCentral.pm %{_datadir}/%{pkgdir}/GMJobsInfo.pm %{_datadir}/%{pkgdir}/HostInfo.pm %{_datadir}/%{pkgdir}/RTEInfo.pm %{_datadir}/%{pkgdir}/InfoChecker.pm %{_datadir}/%{pkgdir}/IniParser.pm %{_datadir}/%{pkgdir}/LRMSInfo.pm %{_datadir}/%{pkgdir}/Sysinfo.pm %{_datadir}/%{pkgdir}/LogUtils.pm %{_datadir}/%{pkgdir}/condor_env.pm %{_datadir}/%{pkgdir}/cancel_common.sh %{_datadir}/%{pkgdir}/configure-*-env.sh %{_datadir}/%{pkgdir}/submit_common.sh %{_datadir}/%{pkgdir}/scan_common.sh %{_datadir}/%{pkgdir}/lrms_common.sh %{_datadir}/%{pkgdir}/perferator %{_datadir}/%{pkgdir}/PerfData.pl %{_datadir}/%{pkgdir}/arc-arex-start %{_datadir}/%{pkgdir}/arc-arex-ws-start %{_datadir}/%{pkgdir}/sql-schema/arex_accounting_db_schema_v1.sql %doc %{_mandir}/man1/arc-config-check.1* %doc %{_mandir}/man1/cache-clean.1* %doc %{_mandir}/man1/cache-list.1* %doc %{_mandir}/man8/gm-delegations-converter.8* %doc %{_mandir}/man8/gm-jobs.8* %doc %{_mandir}/man8/arc-blahp-logger.8* %doc %{_mandir}/man8/a-rex-backtrace-collect.8* %config(noreplace) %{_sysconfdir}/logrotate.d/%{name}-arex %dir %{_localstatedir}/log/arc %dir %{_localstatedir}/spool/arc %dir %{_localstatedir}/spool/arc/ssm %dir %{_localstatedir}/spool/arc/urs %if %{py3default} %{python3_sitearch}/%{pkgdir}/control/AccountingDB.py %{python3_sitearch}/%{pkgdir}/control/AccountingPublishing.py %{python3_sitearch}/%{pkgdir}/control/Accounting.py %{python3_sitearch}/%{pkgdir}/control/Cache.py %{python3_sitearch}/%{pkgdir}/control/DataStaging.py %{python3_sitearch}/%{pkgdir}/control/Jobs.py %{python3_sitearch}/%{pkgdir}/control/RunTimeEnvironment.py %{python3_sitearch}/%{pkgdir}/control/__pycache__/AccountingDB.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/AccountingPublishing.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/Accounting.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/Cache.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/DataStaging.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/Jobs.* %{python3_sitearch}/%{pkgdir}/control/__pycache__/RunTimeEnvironment.* %else %{python2_sitearch}/%{pkgdir}/control/AccountingDB.py* %{python2_sitearch}/%{pkgdir}/control/AccountingPublishing.py* %{python2_sitearch}/%{pkgdir}/control/Accounting.py* %{python2_sitearch}/%{pkgdir}/control/Cache.py* %{python2_sitearch}/%{pkgdir}/control/DataStaging.py* %{python2_sitearch}/%{pkgdir}/control/Jobs.py* %{python2_sitearch}/%{pkgdir}/control/RunTimeEnvironment.py* %endif %{_libexecdir}/%{pkgdir}/arccandypond %dir %{_datadir}/%{pkgdir}/rte %dir %{_datadir}/%{pkgdir}/rte/ENV %{_datadir}/%{pkgdir}/rte/ENV/LRMS-SCRATCH %{_datadir}/%{pkgdir}/rte/ENV/PROXY %{_datadir}/%{pkgdir}/rte/ENV/RTE %{_datadir}/%{pkgdir}/rte/ENV/CANDYPOND %{_datadir}/%{pkgdir}/rte/ENV/SINGULARITY %dir %{_datadir}/%{pkgdir}/rte/ENV/CONDOR %{_datadir}/%{pkgdir}/rte/ENV/CONDOR/DOCKER %{_sbindir}/a-rex-backtrace-collect %config(noreplace) %{_sysconfdir}/arc.conf %if %{with_pythonlrms} %files arex-python-lrms %defattr(-,root,root,-) %{_libexecdir}/%{pkgdir}/arc-sshfs-mount %if %{py3default} %{python3_sitearch}/%{pkgdir}/lrms %else %{python2_sitearch}/%{pkgdir}/lrms %endif %{_datadir}/%{pkgdir}/SLURMPYmod.pm %{_datadir}/%{pkgdir}/job_script.stubs %endif %files community-rtes %defattr(-,root,root,-) %{_datadir}/%{pkgdir}/community_rtes.sh %if %{py3default} %{python3_sitearch}/%{pkgdir}/control/CommunityRTE.py %{python3_sitearch}/%{pkgdir}/control/__pycache__/CommunityRTE.* %else %{python2_sitearch}/%{pkgdir}/control/CommunityRTE.py* %endif %files plugins-needed %defattr(-,root,root,-) %dir %{_libdir}/%{pkgdir}/test %{_libdir}/%{pkgdir}/test/libaccTEST.so %{_libdir}/%{pkgdir}/libaccBroker.so %{_libdir}/%{pkgdir}/libaccEMIES.so %{_libdir}/%{pkgdir}/libaccJobDescriptionParser.so %if %{with_ldns} %{_libdir}/%{pkgdir}/libaccARCHERY.so %endif %{_libdir}/%{pkgdir}/libaccLDAP.so %{_libdir}/%{pkgdir}/libarcshc.so %{_libdir}/%{pkgdir}/libarcshclegacy.so %{_libdir}/%{pkgdir}/libarcshcotokens.so %{_libdir}/%{pkgdir}/libdmcfile.so %{_libdir}/%{pkgdir}/libdmchttp.so %{_libdir}/%{pkgdir}/libdmcldap.so %{_libdir}/%{pkgdir}/libdmcsrm.so %{_libdir}/%{pkgdir}/libdmcrucio.so %{_libdir}/%{pkgdir}/libdmcacix.so %{_libdir}/%{pkgdir}/libidentitymap.so %{_libdir}/%{pkgdir}/libarguspdpclient.so %{_libdir}/%{pkgdir}/libmcchttp.so %{_libdir}/%{pkgdir}/libmccmsgvalidator.so %{_libdir}/%{pkgdir}/libmccsoap.so %{_libdir}/%{pkgdir}/libmcctcp.so %{_libdir}/%{pkgdir}/libmcctls.so %{_libdir}/%{pkgdir}/test/libaccTEST.apd %{_libdir}/%{pkgdir}/libaccBroker.apd %{_libdir}/%{pkgdir}/libaccEMIES.apd %{_libdir}/%{pkgdir}/libaccJobDescriptionParser.apd %if %{with_ldns} %{_libdir}/%{pkgdir}/libaccARCHERY.apd %endif %{_libdir}/%{pkgdir}/libaccLDAP.apd %{_libdir}/%{pkgdir}/libarcshc.apd %{_libdir}/%{pkgdir}/libarcshclegacy.apd %{_libdir}/%{pkgdir}/libarcshcotokens.apd %{_libdir}/%{pkgdir}/libdmcfile.apd %{_libdir}/%{pkgdir}/libdmchttp.apd %{_libdir}/%{pkgdir}/libdmcldap.apd %{_libdir}/%{pkgdir}/libdmcsrm.apd %{_libdir}/%{pkgdir}/libdmcrucio.apd %{_libdir}/%{pkgdir}/libdmcacix.apd %{_libdir}/%{pkgdir}/libidentitymap.apd %{_libdir}/%{pkgdir}/libarguspdpclient.apd %{_libdir}/%{pkgdir}/libmcchttp.apd %{_libdir}/%{pkgdir}/libmccmsgvalidator.apd %{_libdir}/%{pkgdir}/libmccsoap.apd %{_libdir}/%{pkgdir}/libmcctcp.apd %{_libdir}/%{pkgdir}/libmcctls.apd %files plugins-globus %defattr(-,root,root,-) %files plugins-globus-common %defattr(-,root,root,-) %{_libdir}/libarcglobusutils.so.* %files plugins-gridftp %defattr(-,root,root,-) %{_libdir}/%{pkgdir}/arc-dmcgridftp %{_libdir}/%{pkgdir}/libdmcgridftpdeleg.so %{_libdir}/%{pkgdir}/libdmcgridftpdeleg.apd %files plugins-lcas-lcmaps %defattr(-,root,root,-) %{_libexecdir}/%{pkgdir}/arc-lcas %{_libexecdir}/%{pkgdir}/arc-lcmaps %files plugins-gridftpjob %defattr(-,root,root,-) %{_libdir}/%{pkgdir}/libaccGRIDFTPJOB.so %{_libdir}/%{pkgdir}/libaccGRIDFTPJOB.apd %if %{with_xrootd} %files plugins-xrootd %defattr(-,root,root,-) %dir %{_libdir}/%{pkgdir}/external %{_libdir}/%{pkgdir}/external/libdmcxrootd.so %{_libdir}/%{pkgdir}/external/libdmcxrootd.apd %{_libdir}/%{pkgdir}/libdmcxrootddeleg.so %{_libdir}/%{pkgdir}/libdmcxrootddeleg.apd %endif %if %{with_gfal} %files plugins-gfal %defattr(-,root,root,-) %dir %{_libdir}/%{pkgdir}/external %{_libdir}/%{pkgdir}/external/libdmcgfal.so %{_libdir}/%{pkgdir}/external/libdmcgfal.apd %{_libdir}/%{pkgdir}/libdmcgfaldeleg.so %{_libdir}/%{pkgdir}/libdmcgfaldeleg.apd %endif %if %{with_s3} %files plugins-s3 %defattr(-,root,root,-) %{_libdir}/%{pkgdir}/libdmcs3.so %{_libdir}/%{pkgdir}/libdmcs3.apd %endif %files plugins-internal %defattr(-,root,root,-) %{_libdir}/%{pkgdir}/libaccINTERNAL.so %{_libdir}/%{pkgdir}/libaccINTERNAL.apd %files plugins-arcrest %defattr(-,root,root,-) %{_libdir}/%{pkgdir}/libaccARCREST.so %{_libdir}/%{pkgdir}/libaccARCREST.apd %files plugins-python %defattr(-,root,root,-) %doc docdir/python/* %{_libdir}/%{pkgdir}/libaccPythonBroker.so %{_libdir}/%{pkgdir}/libaccPythonBroker.apd %{_libdir}/%{pkgdir}/libpythonservice.so %{_libdir}/%{pkgdir}/libpythonservice.apd %if %{with_acix} %files acix-core %defattr(-,root,root,-) %if %{py3default} %dir %{python3_sitelib}/acix %{python3_sitelib}/acix/__init__.py %dir %{python3_sitelib}/acix/__pycache__ %{python3_sitelib}/acix/__pycache__/__init__.* %{python3_sitelib}/acix/core %else %dir %{python2_sitelib}/acix %{python2_sitelib}/acix/__init__.py* %{python2_sitelib}/acix/core %endif %files acix-scanner %defattr(-,root,root,-) %if %{py3default} %{python3_sitelib}/acix/scanner %else %{python2_sitelib}/acix/scanner %endif %if %{use_systemd} %{_unitdir}/arc-acix-scanner.service %else %{_initrddir}/arc-acix-scanner %endif %{_datadir}/%{pkgdir}/arc-acix-scanner-start %files acix-index %defattr(-,root,root,-) %if %{py3default} %{python3_sitelib}/acix/indexserver %else %{python2_sitelib}/acix/indexserver %endif %if %{use_systemd} %{_unitdir}/arc-acix-index.service %else %{_initrddir}/arc-acix-index %endif %{_datadir}/%{pkgdir}/arc-acix-index-start %endif %files devel %defattr(-,root,root,-) %doc docdir/devel/* src/hed/shc/arcpdp/*.xsd %{_includedir}/%{pkgdir} %{_libdir}/lib*.so %{_bindir}/wsdl2hed %doc %{_mandir}/man1/wsdl2hed.1* %{_bindir}/arcplugin %doc %{_mandir}/man1/arcplugin.1* %if %{with_python2} %files -n python2-%{name} %defattr(-,root,root,-) %{python2_sitearch}/_arc.*so %if %{py3default} %dir %{python2_sitearch}/%{pkgdir} %{python2_sitearch}/%{pkgdir}/__init__.py* %endif %{python2_sitearch}/%{pkgdir}/[^_p]*.py* %endif %if %{with_python3} %files -n python%{python3_pkgversion}-%{name} %defattr(-,root,root,-) %{python3_sitearch}/_arc.*so %if %{?fedora}%{!?fedora:0} >= 15 || %{?rhel}%{!?rhel:0} >= 7 # Python >= 3.2 uses __pycache__ %if ! %{py3default} %dir %{python3_sitearch}/%{pkgdir} %{python3_sitearch}/%{pkgdir}/__init__.py %dir %{python3_sitearch}/%{pkgdir}/__pycache__ %{python3_sitearch}/%{pkgdir}/__pycache__/__init__.* %endif %{python3_sitearch}/%{pkgdir}/[^_p]*.py %{python3_sitearch}/%{pkgdir}/__pycache__/[^_p]*.* %else # Python 3.1 doesn't use __pycache__ %dir %{python3_sitearch}/%{pkgdir} %{python3_sitearch}/%{pkgdir}/__init__.py* %{python3_sitearch}/%{pkgdir}/[^_p]*.py* %endif %endif %files nordugridmap %defattr(-,root,root,-) %{_sbindir}/nordugridmap %config(noreplace) %{_sysconfdir}/cron.d/nordugridmap %doc %{_mandir}/man8/nordugridmap.8* %files test-utils %defattr(-,root,root,-) %{_bindir}/arcemiestest %{_bindir}/arcperftest %doc %{_mandir}/man1/arcemiestest.1* %doc %{_mandir}/man1/arcperftest.1* %files archery-manage %defattr(-,root,root,-) %{_sbindir}/archery-manage %files wn %defattr(-,root,root,-) %attr(4755,root,root) %{_bindir}/arc-job-cgroup nordugrid-arc-6.14.0/PaxHeaders.30264/config.rpath0000644000000000000000000000013214152153400017635 xustar000000000000000030 mtime=1638455040.946684154 30 atime=1638455074.113182496 30 ctime=1638455095.731507322 nordugrid-arc-6.14.0/config.rpath0000755000175000002070000004364714152153400017643 0ustar00mockbuildmock00000000000000#! /bin/sh # Output a system dependent set of variables, describing how to set the # run time search path of shared libraries in an executable. # # Copyright 1996-2007 Free Software Foundation, Inc. # Taken from GNU libtool, 2001 # Originally by Gordon Matzigkeit , 1996 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # # The first argument passed to this file is the canonical host specification, # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM # or # CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM # The environment variables CC, GCC, LDFLAGS, LD, with_gnu_ld # should be set by the caller. # # The set of defined variables is at the end of this script. # Known limitations: # - On IRIX 6.5 with CC="cc", the run time search patch must not be longer # than 256 bytes, otherwise the compiler driver will dump core. The only # known workaround is to choose shorter directory names for the build # directory and/or the installation directory. # All known linkers require a `.a' archive for static linking (except MSVC, # which needs '.lib'). libext=a shrext=.so host="$1" host_cpu=`echo "$host" | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\1/'` host_vendor=`echo "$host" | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\2/'` host_os=`echo "$host" | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\3/'` # Code taken from libtool.m4's _LT_CC_BASENAME. for cc_temp in $CC""; do case $cc_temp in compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; \-*) ;; *) break;; esac done cc_basename=`echo "$cc_temp" | sed -e 's%^.*/%%'` # Code taken from libtool.m4's AC_LIBTOOL_PROG_COMPILER_PIC. wl= if test "$GCC" = yes; then wl='-Wl,' else case "$host_os" in aix*) wl='-Wl,' ;; darwin*) case $cc_basename in xlc*) wl='-Wl,' ;; esac ;; mingw* | cygwin* | pw32* | os2*) ;; hpux9* | hpux10* | hpux11*) wl='-Wl,' ;; irix5* | irix6* | nonstopux*) wl='-Wl,' ;; newsos6) ;; linux* | k*bsd*-gnu) case $cc_basename in icc* | ecc*) wl='-Wl,' ;; pgcc | pgf77 | pgf90) wl='-Wl,' ;; ccc*) wl='-Wl,' ;; como) wl='-lopt=' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) wl='-Wl,' ;; esac ;; esac ;; osf3* | osf4* | osf5*) wl='-Wl,' ;; rdos*) ;; solaris*) wl='-Wl,' ;; sunos4*) wl='-Qoption ld ' ;; sysv4 | sysv4.2uw2* | sysv4.3*) wl='-Wl,' ;; sysv4*MP*) ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) wl='-Wl,' ;; unicos*) wl='-Wl,' ;; uts4*) ;; esac fi # Code taken from libtool.m4's AC_LIBTOOL_PROG_LD_SHLIBS. hardcode_libdir_flag_spec= hardcode_libdir_separator= hardcode_direct=no hardcode_minus_L=no case "$host_os" in cygwin* | mingw* | pw32*) # FIXME: the MSVC++ port hasn't been tested in a loooong time # When not using gcc, we currently assume that we are using # Microsoft Visual C++. if test "$GCC" != yes; then with_gnu_ld=no fi ;; interix*) # we just hope/assume this is gcc and not c89 (= MSVC++) with_gnu_ld=yes ;; openbsd*) with_gnu_ld=no ;; esac ld_shlibs=yes if test "$with_gnu_ld" = yes; then # Set some defaults for GNU ld with shared library support. These # are reset later if shared libraries are not supported. Putting them # here allows them to be overridden if necessary. # Unlike libtool, we use -rpath here, not --rpath, since the documented # option of GNU ld is called -rpath, not --rpath. hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' case "$host_os" in aix3* | aix4* | aix5*) # On AIX/PPC, the GNU linker is very broken if test "$host_cpu" != ia64; then ld_shlibs=no fi ;; amigaos*) hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes # Samuel A. Falvo II reports # that the semantics of dynamic libraries on AmigaOS, at least up # to version 4, is to share data among multiple programs linked # with the same dynamic library. Since this doesn't match the # behavior of shared libraries on other platforms, we cannot use # them. ld_shlibs=no ;; beos*) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then : else ld_shlibs=no fi ;; cygwin* | mingw* | pw32*) # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. hardcode_libdir_flag_spec='-L$libdir' if $LD --help 2>&1 | grep 'auto-import' > /dev/null; then : else ld_shlibs=no fi ;; interix[3-9]*) hardcode_direct=no hardcode_libdir_flag_spec='${wl}-rpath,$libdir' ;; gnu* | linux* | k*bsd*-gnu) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then : else ld_shlibs=no fi ;; netbsd*) ;; solaris*) if $LD -v 2>&1 | grep 'BFD 2\.8' > /dev/null; then ld_shlibs=no elif $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then : else ld_shlibs=no fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) case `$LD -v 2>&1` in *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*) ld_shlibs=no ;; *) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then hardcode_libdir_flag_spec='`test -z "$SCOABSPATH" && echo ${wl}-rpath,$libdir`' else ld_shlibs=no fi ;; esac ;; sunos4*) hardcode_direct=yes ;; *) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then : else ld_shlibs=no fi ;; esac if test "$ld_shlibs" = no; then hardcode_libdir_flag_spec= fi else case "$host_os" in aix3*) # Note: this linker hardcodes the directories in LIBPATH if there # are no directories specified by -L. hardcode_minus_L=yes if test "$GCC" = yes; then # Neither direct hardcoding nor static linking is supported with a # broken collect2. hardcode_direct=unsupported fi ;; aix4* | aix5*) if test "$host_cpu" = ia64; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no else aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # need to do runtime linking. case $host_os in aix4.[23]|aix4.[23].*|aix5*) for ld_flag in $LDFLAGS; do if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then aix_use_runtimelinking=yes break fi done ;; esac fi hardcode_direct=yes hardcode_libdir_separator=':' if test "$GCC" = yes; then case $host_os in aix4.[012]|aix4.[012].*) collect2name=`${CC} -print-prog-name=collect2` if test -f "$collect2name" && \ strings "$collect2name" | grep resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 hardcode_direct=unsupported hardcode_minus_L=yes hardcode_libdir_flag_spec='-L$libdir' hardcode_libdir_separator= fi ;; esac fi # Begin _LT_AC_SYS_LIBPATH_AIX. echo 'int main () { return 0; }' > conftest.c ${CC} ${LDFLAGS} conftest.c -o conftest aix_libpath=`dump -H conftest 2>/dev/null | sed -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'` if test -z "$aix_libpath"; then aix_libpath=`dump -HX64 conftest 2>/dev/null | sed -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'` fi if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib" fi rm -f conftest.c conftest # End _LT_AC_SYS_LIBPATH_AIX. if test "$aix_use_runtimelinking" = yes; then hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" else if test "$host_cpu" = ia64; then hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib' else hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" fi fi ;; amigaos*) hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes # see comment about different semantics on the GNU ld section ld_shlibs=no ;; bsdi[45]*) ;; cygwin* | mingw* | pw32*) # When not using gcc, we currently assume that we are using # Microsoft Visual C++. # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. hardcode_libdir_flag_spec=' ' libext=lib ;; darwin* | rhapsody*) hardcode_direct=no if test "$GCC" = yes ; then : else case $cc_basename in xlc*) ;; *) ld_shlibs=no ;; esac fi ;; dgux*) hardcode_libdir_flag_spec='-L$libdir' ;; freebsd1*) ld_shlibs=no ;; freebsd2.2*) hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes ;; freebsd2*) hardcode_direct=yes hardcode_minus_L=yes ;; freebsd* | dragonfly*) hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes ;; hpux9*) hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_separator=: hardcode_direct=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes ;; hpux10*) if test "$with_gnu_ld" = no; then hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_separator=: hardcode_direct=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes fi ;; hpux11*) if test "$with_gnu_ld" = no; then hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_separator=: case $host_cpu in hppa*64*|ia64*) hardcode_direct=no ;; *) hardcode_direct=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes ;; esac fi ;; irix5* | irix6* | nonstopux*) hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: ;; netbsd*) hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes ;; newsos6) hardcode_direct=yes hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: ;; openbsd*) if test -f /usr/libexec/ld.so; then hardcode_direct=yes if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then hardcode_libdir_flag_spec='${wl}-rpath,$libdir' else case "$host_os" in openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) hardcode_libdir_flag_spec='-R$libdir' ;; *) hardcode_libdir_flag_spec='${wl}-rpath,$libdir' ;; esac fi else ld_shlibs=no fi ;; os2*) hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes ;; osf3*) hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: ;; osf4* | osf5*) if test "$GCC" = yes; then hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' else # Both cc and cxx compiler support -rpath directly hardcode_libdir_flag_spec='-rpath $libdir' fi hardcode_libdir_separator=: ;; solaris*) hardcode_libdir_flag_spec='-R$libdir' ;; sunos4*) hardcode_libdir_flag_spec='-L$libdir' hardcode_direct=yes hardcode_minus_L=yes ;; sysv4) case $host_vendor in sni) hardcode_direct=yes # is this really true??? ;; siemens) hardcode_direct=no ;; motorola) hardcode_direct=no #Motorola manual says yes, but my tests say they lie ;; esac ;; sysv4.3*) ;; sysv4*MP*) if test -d /usr/nec; then ld_shlibs=yes fi ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) ;; sysv5* | sco3.2v5* | sco5v6*) hardcode_libdir_flag_spec='`test -z "$SCOABSPATH" && echo ${wl}-R,$libdir`' hardcode_libdir_separator=':' ;; uts4*) hardcode_libdir_flag_spec='-L$libdir' ;; *) ld_shlibs=no ;; esac fi # Check dynamic linker characteristics # Code taken from libtool.m4's AC_LIBTOOL_SYS_DYNAMIC_LINKER. # Unlike libtool.m4, here we don't care about _all_ names of the library, but # only about the one the linker finds when passed -lNAME. This is the last # element of library_names_spec in libtool.m4, or possibly two of them if the # linker has special search rules. library_names_spec= # the last element of library_names_spec in libtool.m4 libname_spec='lib$name' case "$host_os" in aix3*) library_names_spec='$libname.a' ;; aix4* | aix5*) library_names_spec='$libname$shrext' ;; amigaos*) library_names_spec='$libname.a' ;; beos*) library_names_spec='$libname$shrext' ;; bsdi[45]*) library_names_spec='$libname$shrext' ;; cygwin* | mingw* | pw32*) shrext=.dll library_names_spec='$libname.dll.a $libname.lib' ;; darwin* | rhapsody*) shrext=.dylib library_names_spec='$libname$shrext' ;; dgux*) library_names_spec='$libname$shrext' ;; freebsd1*) ;; freebsd* | dragonfly*) case "$host_os" in freebsd[123]*) library_names_spec='$libname$shrext$versuffix' ;; *) library_names_spec='$libname$shrext' ;; esac ;; gnu*) library_names_spec='$libname$shrext' ;; hpux9* | hpux10* | hpux11*) case $host_cpu in ia64*) shrext=.so ;; hppa*64*) shrext=.sl ;; *) shrext=.sl ;; esac library_names_spec='$libname$shrext' ;; interix[3-9]*) library_names_spec='$libname$shrext' ;; irix5* | irix6* | nonstopux*) library_names_spec='$libname$shrext' case "$host_os" in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= ;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 ;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 ;; *) libsuff= shlibsuff= ;; esac ;; esac ;; linux*oldld* | linux*aout* | linux*coff*) ;; linux* | k*bsd*-gnu) library_names_spec='$libname$shrext' ;; knetbsd*-gnu) library_names_spec='$libname$shrext' ;; netbsd*) library_names_spec='$libname$shrext' ;; newsos6) library_names_spec='$libname$shrext' ;; nto-qnx*) library_names_spec='$libname$shrext' ;; openbsd*) library_names_spec='$libname$shrext$versuffix' ;; os2*) libname_spec='$name' shrext=.dll library_names_spec='$libname.a' ;; osf3* | osf4* | osf5*) library_names_spec='$libname$shrext' ;; rdos*) ;; solaris*) library_names_spec='$libname$shrext' ;; sunos4*) library_names_spec='$libname$shrext$versuffix' ;; sysv4 | sysv4.3*) library_names_spec='$libname$shrext' ;; sysv4*MP*) library_names_spec='$libname$shrext' ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) library_names_spec='$libname$shrext' ;; uts4*) library_names_spec='$libname$shrext' ;; esac sed_quote_subst='s/\(["`$\\]\)/\\\1/g' escaped_wl=`echo "X$wl" | sed -e 's/^X//' -e "$sed_quote_subst"` shlibext=`echo "$shrext" | sed -e 's,^\.,,'` escaped_libname_spec=`echo "X$libname_spec" | sed -e 's/^X//' -e "$sed_quote_subst"` escaped_library_names_spec=`echo "X$library_names_spec" | sed -e 's/^X//' -e "$sed_quote_subst"` escaped_hardcode_libdir_flag_spec=`echo "X$hardcode_libdir_flag_spec" | sed -e 's/^X//' -e "$sed_quote_subst"` LC_ALL=C sed -e 's/^\([a-zA-Z0-9_]*\)=/acl_cv_\1=/' < POTFILES.in.2 ; \ if diff $(srcdir)/POTFILES.in POTFILES.in.2 >/dev/null 2>&1 ; then \ rm -f POTFILES.in.2 ; \ else \ mv POTFILES.in.2 $(srcdir)/POTFILES.in ; \ fi DISTFILES.extra1 = Rules-POTFILES nordugrid-arc-6.14.0/po/PaxHeaders.30264/hu.po0000644000000000000000000000013214152153477016740 xustar000000000000000030 mtime=1638455103.535624582 30 atime=1638455103.796628504 30 ctime=1638455103.894629976 nordugrid-arc-6.14.0/po/hu.po0000644000175000002070000214546414152153477016745 0ustar00mockbuildmock00000000000000# Translation of Arc.po to Hungarian # Gábor RÅ‘czei , 2010. # Translation file for the Advanced Resource Connector (Arc) msgid "" msgstr "" "Project-Id-Version: Arc\n" "Report-Msgid-Bugs-To: support@nordugrid.org\n" "POT-Creation-Date: 2021-12-02 15:25+0100\n" "PO-Revision-Date: 2010-07-05 12:25+0100\n" "Last-Translator: Gábor RÅ‘czei \n" "Language-Team: Hungarian\n" "Language: \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=2; plural=(n != 1);\n" "X-Poedit-Language: Hungarian\n" "X-Poedit-Country: HUNGARY\n" "X-Poedit-SourceCharset: utf-8\n" #: src/clients/compute/arccat.cpp:35 src/clients/compute/arcclean.cpp:34 #: src/clients/compute/arcget.cpp:35 src/clients/compute/arckill.cpp:33 #: src/clients/compute/arcrenew.cpp:32 src/clients/compute/arcresub.cpp:36 #: src/clients/compute/arcresume.cpp:32 src/clients/compute/arcstat.cpp:34 msgid "[job ...]" msgstr "" #: src/clients/compute/arccat.cpp:36 msgid "" "The arccat command performs the cat command on the stdout, stderr or grid\n" "manager's error log of the job." msgstr "" #: src/clients/compute/arccat.cpp:43 src/clients/compute/arcclean.cpp:41 #: src/clients/compute/arcget.cpp:42 src/clients/compute/arcinfo.cpp:45 #: src/clients/compute/arckill.cpp:40 src/clients/compute/arcrenew.cpp:37 #: src/clients/compute/arcresub.cpp:41 src/clients/compute/arcresume.cpp:37 #: src/clients/compute/arcstat.cpp:42 src/clients/compute/arcsub.cpp:53 #: src/clients/compute/arcsync.cpp:147 src/clients/compute/arctest.cpp:64 #: src/clients/credentials/arcproxy.cpp:457 src/clients/data/arccp.cpp:641 #: src/clients/data/arcls.cpp:347 src/clients/data/arcmkdir.cpp:125 #: src/clients/data/arcrename.cpp:136 src/clients/data/arcrm.cpp:151 #: src/hed/daemon/unix/main_unix.cpp:341 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1263 #: src/hed/libs/data/DataExternalHelper.cpp:358 #, c-format msgid "%s version %s" msgstr "%s verzió %s" #: src/clients/compute/arccat.cpp:52 src/clients/compute/arcclean.cpp:50 #: src/clients/compute/arcget.cpp:51 src/clients/compute/arcinfo.cpp:53 #: src/clients/compute/arckill.cpp:49 src/clients/compute/arcrenew.cpp:46 #: src/clients/compute/arcresub.cpp:50 src/clients/compute/arcresume.cpp:46 #: src/clients/compute/arcstat.cpp:51 src/clients/compute/arcsub.cpp:62 #: src/clients/compute/arcsync.cpp:156 src/clients/compute/arctest.cpp:86 #: src/clients/credentials/arcproxy.cpp:465 src/clients/data/arccp.cpp:648 #: src/clients/data/arcls.cpp:355 src/clients/data/arcmkdir.cpp:133 #: src/clients/data/arcrename.cpp:144 src/clients/data/arcrm.cpp:160 #: src/libs/data-staging/DataDeliveryLocalComm.cpp:174 #: src/services/a-rex/grid-manager/GridManager.cpp:110 #: src/services/a-rex/grid-manager/log/JobLog.cpp:139 #, c-format msgid "Running command: %s" msgstr "" #: src/clients/compute/arccat.cpp:63 src/clients/compute/arcclean.cpp:61 #: src/clients/compute/arcget.cpp:62 src/clients/compute/arcinfo.cpp:65 #: src/clients/compute/arckill.cpp:60 src/clients/compute/arcrenew.cpp:57 #: src/clients/compute/arcresub.cpp:54 src/clients/compute/arcresume.cpp:50 #: src/clients/compute/arcstat.cpp:62 src/clients/compute/arcsub.cpp:66 #: src/clients/compute/arcsync.cpp:167 src/clients/compute/arctest.cpp:90 #: src/clients/data/arccp.cpp:671 src/clients/data/arcls.cpp:377 #: src/clients/data/arcmkdir.cpp:155 src/clients/data/arcrename.cpp:166 #: src/clients/data/arcrm.cpp:182 msgid "Failed configuration initialization" msgstr "Nem sikerült betölteni a konfigurációt" #: src/clients/compute/arccat.cpp:78 src/clients/compute/arcclean.cpp:76 #: src/clients/compute/arcget.cpp:90 src/clients/compute/arckill.cpp:75 #: src/clients/compute/arcrenew.cpp:70 src/clients/compute/arcresub.cpp:85 #: src/clients/compute/arcresume.cpp:72 src/clients/compute/arcstat.cpp:71 #, fuzzy, c-format msgid "Cannot read specified jobid file: %s" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/clients/compute/arccat.cpp:89 src/clients/compute/arcclean.cpp:87 #: src/clients/compute/arcget.cpp:101 src/clients/compute/arckill.cpp:86 #: src/clients/compute/arcrenew.cpp:81 src/clients/compute/arcresub.cpp:99 #: src/clients/compute/arcresume.cpp:83 src/clients/compute/arcstat.cpp:105 msgid "No jobs given" msgstr "Nem adott meg feladatot" #: src/clients/compute/arccat.cpp:102 src/clients/compute/arcclean.cpp:100 #: src/clients/compute/arcget.cpp:114 src/clients/compute/arckill.cpp:99 #: src/clients/compute/arcrenew.cpp:94 src/clients/compute/arcresub.cpp:109 #: src/clients/compute/arcresume.cpp:96 src/clients/compute/arcstat.cpp:117 #, fuzzy, c-format msgid "Job list file (%s) doesn't exist" msgstr "Az XML konfigurációs fájl: %s nem létezik" #: src/clients/compute/arccat.cpp:109 src/clients/compute/arcclean.cpp:107 #: src/clients/compute/arcget.cpp:121 src/clients/compute/arckill.cpp:106 #: src/clients/compute/arcrenew.cpp:101 src/clients/compute/arcresub.cpp:116 #: src/clients/compute/arcresume.cpp:103 src/clients/compute/arcstat.cpp:124 #: src/clients/compute/arctest.cpp:335 #, fuzzy, c-format msgid "Unable to read job information from file (%s)" msgstr "Nem sikerült elérnem a voms szervert %s, ezen fájl alapján: %s" #: src/clients/compute/arccat.cpp:118 src/clients/compute/arcclean.cpp:115 #: src/clients/compute/arcget.cpp:129 src/clients/compute/arckill.cpp:114 #: src/clients/compute/arcrenew.cpp:110 src/clients/compute/arcresub.cpp:124 #: src/clients/compute/arcresume.cpp:112 src/clients/compute/arcstat.cpp:133 #, c-format msgid "Warning: Job not found in job list: %s" msgstr "" #: src/clients/compute/arccat.cpp:131 src/clients/compute/arcclean.cpp:170 #: src/clients/compute/arcget.cpp:142 src/clients/compute/arckill.cpp:126 #: src/clients/compute/arcrenew.cpp:122 src/clients/compute/arcresub.cpp:136 #: src/clients/compute/arcresume.cpp:124 #, fuzzy msgid "No jobs" msgstr "Nem adott meg feladatot" #: src/clients/compute/arccat.cpp:146 #, c-format msgid "Could not create temporary file \"%s\"" msgstr "" #: src/clients/compute/arccat.cpp:147 src/clients/compute/arccat.cpp:153 #, c-format msgid "Cannot create output of %s for any jobs" msgstr "" #: src/clients/compute/arccat.cpp:154 #, fuzzy, c-format msgid "Invalid destination URL %s" msgstr "Érvénytelen URL: %s" #: src/clients/compute/arccat.cpp:172 #, c-format msgid "Job deleted: %s" msgstr "" #: src/clients/compute/arccat.cpp:182 #, c-format msgid "Job has not started yet: %s" msgstr "" #: src/clients/compute/arccat.cpp:223 #, c-format msgid "Cannot determine the %s location: %s" msgstr "" #: src/clients/compute/arccat.cpp:228 #, c-format msgid "Cannot create output of %s for job (%s): Invalid source %s" msgstr "" #: src/clients/compute/arccat.cpp:241 #, c-format msgid "Catting %s for job %s" msgstr "" #: src/clients/compute/arcclean.cpp:35 #, fuzzy msgid "The arcclean command removes a job from the computing resource." msgstr "Az arcclean parancs eltávolít egy feladatot a távoli klaszterröl" #: src/clients/compute/arcclean.cpp:139 msgid "" "You are about to remove jobs from the job list for which no information " "could be\n" "found. NOTE: Recently submitted jobs might not have appeared in the " "information\n" "system, and this action will also remove such jobs." msgstr "" #: src/clients/compute/arcclean.cpp:142 msgid "Are you sure you want to clean jobs missing information?" msgstr "" #: src/clients/compute/arcclean.cpp:143 src/clients/compute/arcsync.cpp:221 msgid "y" msgstr "y" #: src/clients/compute/arcclean.cpp:143 src/clients/compute/arcsync.cpp:221 msgid "n" msgstr "n" #: src/clients/compute/arcclean.cpp:148 msgid "Jobs missing information will not be cleaned!" msgstr "" #: src/clients/compute/arcclean.cpp:164 src/clients/compute/arcresub.cpp:177 #: src/clients/compute/arctest.cpp:339 #, fuzzy, c-format msgid "Warning: Failed to write job information to file (%s)" msgstr "Nem sikerült elérnem a voms szervert %s, ezen fájl alapján: %s" #: src/clients/compute/arcclean.cpp:165 msgid "" " Run 'arcclean -s Undefined' to remove cleaned jobs from job list" msgstr "" #: src/clients/compute/arcclean.cpp:174 #, c-format msgid "Jobs processed: %d, deleted: %d" msgstr "" #: src/clients/compute/arcget.cpp:36 msgid "The arcget command is used for retrieving the results from a job." msgstr "" "Az arcget parancsot arra lehet használni, hogy a feladat eredményeit " "megjelenítse" #: src/clients/compute/arcget.cpp:78 #, fuzzy, c-format msgid "Job download directory from user configuration file: %s" msgstr "voms szerver fájljának az elérési útvonala" #: src/clients/compute/arcget.cpp:81 #, fuzzy msgid "Job download directory will be created in present working directory." msgstr "" "könyvtár letöltése (a feladat könyvtára ebben a könyvtárban fog létrejönni)" #: src/clients/compute/arcget.cpp:85 #, fuzzy, c-format msgid "Job download directory: %s" msgstr "voms szerver fájljának az elérési útvonala" #: src/clients/compute/arcget.cpp:152 #, c-format msgid "Unable to create directory for storing results (%s) - %s" msgstr "" #: src/clients/compute/arcget.cpp:162 #, c-format msgid "Results stored at: %s" msgstr "" #: src/clients/compute/arcget.cpp:174 src/clients/compute/arckill.cpp:142 msgid "Warning: Some jobs were not removed from server" msgstr "" #: src/clients/compute/arcget.cpp:175 src/clients/compute/arcget.cpp:182 #: src/clients/compute/arckill.cpp:143 msgid " Use arcclean to remove retrieved jobs from job list" msgstr "" #: src/clients/compute/arcget.cpp:181 src/clients/compute/arckill.cpp:149 #: src/clients/compute/arcresub.cpp:207 #, c-format msgid "Warning: Failed removing jobs from file (%s)" msgstr "" #: src/clients/compute/arcget.cpp:186 #, c-format msgid "" "Jobs processed: %d, successfully retrieved: %d, successfully cleaned: %d" msgstr "" #: src/clients/compute/arcget.cpp:190 #, c-format msgid "Jobs processed: %d, successfully retrieved: %d" msgstr "" #: src/clients/compute/arcinfo.cpp:34 msgid "[resource ...]" msgstr "" #: src/clients/compute/arcinfo.cpp:35 #, fuzzy msgid "" "The arcinfo command is used for obtaining the status of computing resources " "on the Grid." msgstr "" "Az arcinfo paranccsal lehet lekérdezni az egyes klaszterek állapotát a grid-" "ben." #: src/clients/compute/arcinfo.cpp:142 msgid "Information endpoint" msgstr "" #: src/clients/compute/arcinfo.cpp:153 msgid "Submission endpoint" msgstr "" #: src/clients/compute/arcinfo.cpp:155 msgid "status" msgstr "" #: src/clients/compute/arcinfo.cpp:157 #, fuzzy msgid "interface" msgstr "Felhasználó oldali hiba" #: src/clients/compute/arcinfo.cpp:176 msgid "ERROR: Failed to retrieve information from the following endpoints:" msgstr "" #: src/clients/compute/arcinfo.cpp:189 msgid "ERROR: Failed to retrieve information" msgstr "" #: src/clients/compute/arcinfo.cpp:191 msgid "from the following endpoints:" msgstr "" #: src/clients/compute/arckill.cpp:34 msgid "The arckill command is used to kill running jobs." msgstr "Az arckill paranccsal lehet megölni egy futó feladatot" #: src/clients/compute/arckill.cpp:150 msgid "" " Run 'arcclean -s Undefined' to remove killed jobs from job list" msgstr "" #: src/clients/compute/arckill.cpp:153 #, c-format msgid "Jobs processed: %d, successfully killed: %d, successfully cleaned: %d" msgstr "" #: src/clients/compute/arckill.cpp:155 #, c-format msgid "Jobs processed: %d, successfully killed: %d" msgstr "" #: src/clients/compute/arcrenew.cpp:128 #, c-format msgid "Jobs processed: %d, renewed: %d" msgstr "" #: src/clients/compute/arcresub.cpp:79 msgid "--same and --not-same cannot be specified together." msgstr "" #: src/clients/compute/arcresub.cpp:153 msgid "" "It is not possible to resubmit jobs without new target information discovery" msgstr "" #: src/clients/compute/arcresub.cpp:166 #, fuzzy msgid "No jobs to resubmit with the specified status" msgstr "Nincs megadva feladat leírás bemeneti adatként" #: src/clients/compute/arcresub.cpp:173 src/clients/compute/submit.cpp:34 #, c-format msgid "Job submitted with jobid: %s" msgstr "Feladat elküldve ezzel az azonítóval: %s" #: src/clients/compute/arcresub.cpp:178 msgid " To recover missing jobs, run arcsync" msgstr "" #: src/clients/compute/arcresub.cpp:183 #, fuzzy, c-format msgid "Cannot write jobids to file (%s)" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/clients/compute/arcresub.cpp:194 #, c-format msgid "" "Resubmission of job (%s) succeeded, but killing the job failed - it will " "still appear in the job list" msgstr "" #: src/clients/compute/arcresub.cpp:203 #, c-format msgid "" "Resubmission of job (%s) succeeded, but cleaning the job failed - it will " "still appear in the job list" msgstr "" #: src/clients/compute/arcresub.cpp:208 msgid " Use arcclean to remove non-existing jobs" msgstr "" #: src/clients/compute/arcresub.cpp:215 #, fuzzy msgid "Job resubmission summary:" msgstr "Job küldési összefoglaló" #: src/clients/compute/arcresub.cpp:217 #, fuzzy, c-format msgid "%d of %d jobs were resubmitted" msgstr "%d %d feladatból elküldve" #: src/clients/compute/arcresub.cpp:219 #, fuzzy, c-format msgid "The following %d were not resubmitted" msgstr "%d nem lett elküldve" #: src/clients/compute/arcresume.cpp:130 #, c-format msgid "Jobs processed: %d, resumed: %d" msgstr "" #: src/clients/compute/arcstat.cpp:35 #, fuzzy msgid "" "The arcstat command is used for obtaining the status of jobs that have\n" "been submitted to Grid enabled resources." msgstr "" "Az arcstat paranccsal lehet lekérdezni azon feladatok állapotát,\n" "amelyek el lettek küldve a grid-ben lévÅ‘ klaszterre." #: src/clients/compute/arcstat.cpp:79 msgid "The 'sort' and 'rsort' flags cannot be specified at the same time." msgstr "A 'sort' vagy az 'rsort' kapcsolókat nem lehet egyszerre használni" #: src/clients/compute/arcstat.cpp:149 msgid "No jobs found, try later" msgstr "" #: src/clients/compute/arcstat.cpp:193 #, c-format msgid "Status of %d jobs was queried, %d jobs returned information" msgstr "" #: src/clients/compute/arcsub.cpp:45 msgid "[filename ...]" msgstr "[fájlnév ...]" #: src/clients/compute/arcsub.cpp:46 #, fuzzy msgid "" "The arcsub command is used for submitting jobs to Grid enabled computing\n" "resources." msgstr "Az arcsub paranccsal lehet feladatot küldeni a grid-be." #: src/clients/compute/arcsub.cpp:94 msgid "No job description input specified" msgstr "Nincs megadva feladat leírás bemeneti adatként" #: src/clients/compute/arcsub.cpp:107 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:582 #, c-format msgid "Can not open job description file: %s" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/clients/compute/arcsub.cpp:135 src/clients/compute/arcsub.cpp:163 msgid "Invalid JobDescription:" msgstr "Érvénytelen feladat leírás:" #: src/clients/compute/arcsub.cpp:198 src/clients/compute/arctest.cpp:229 msgid "" "Cannot adapt job description to the submission target when information " "discovery is turned off" msgstr "" #: src/clients/compute/arcsync.cpp:66 src/clients/compute/arcsync.cpp:174 #, c-format msgid "Warning: Unable to open job list file (%s), unknown format" msgstr "" #: src/clients/compute/arcsync.cpp:76 msgid "Found the following jobs:" msgstr "" #: src/clients/compute/arcsync.cpp:86 msgid "Total number of jobs found: " msgstr "" #: src/clients/compute/arcsync.cpp:98 msgid "Found the following new jobs:" msgstr "" #: src/clients/compute/arcsync.cpp:108 msgid "Total number of new jobs found: " msgstr "" #: src/clients/compute/arcsync.cpp:113 #, c-format msgid "ERROR: Failed to write job information to file (%s)" msgstr "" #: src/clients/compute/arcsync.cpp:140 msgid "" "The arcsync command synchronizes your local job list with the information " "at\n" "the given resources or index servers." msgstr "" #: src/clients/compute/arcsync.cpp:180 #, fuzzy, c-format msgid "Warning: Unable to read local list of jobs from file (%s)" msgstr "Nem sikerült elérnem a voms szervert %s, ezen fájl alapján: %s" #: src/clients/compute/arcsync.cpp:185 #, fuzzy, c-format msgid "Warning: Unable to truncate local list of jobs in file (%s)" msgstr "Nem sikerült elérnem a voms szervert %s, ezen fájl alapján: %s" #: src/clients/compute/arcsync.cpp:191 #, c-format msgid "Warning: Unable to create job list file (%s), jobs list is destroyed" msgstr "" #: src/clients/compute/arcsync.cpp:195 #, fuzzy, c-format msgid "" "Warning: Failed to write local list of jobs into file (%s), jobs list is " "destroyed" msgstr "Nem sikerült elérnem a voms szervert %s, ezen fájl alapján: %s" #: src/clients/compute/arcsync.cpp:215 msgid "" "Synchronizing the local list of active jobs with the information in the\n" "information system can result in some inconsistencies. Very recently " "submitted\n" "jobs might not yet be present, whereas jobs very recently scheduled for\n" "deletion can still be present." msgstr "" #: src/clients/compute/arcsync.cpp:220 msgid "Are you sure you want to synchronize your local job list?" msgstr "" #: src/clients/compute/arcsync.cpp:225 msgid "Cancelling synchronization request" msgstr "" #: src/clients/compute/arcsync.cpp:243 msgid "" "No services specified. Please configure default services in the client " "configuration, or specify a cluster or index (-c or -g options, see arcsync -" "h)." msgstr "" #: src/clients/compute/arctest.cpp:57 msgid " " msgstr "" #: src/clients/compute/arctest.cpp:58 #, fuzzy msgid "The arctest command is used for testing clusters as resources." msgstr "" "Az arcget parancsot arra lehet használni, hogy a feladat eredményeit " "megjelenítse" #: src/clients/compute/arctest.cpp:70 msgid "" "Nothing to do:\n" "you have to either specify a test job id with -J (--job)\n" "or query information about the certificates with -E (--certificate)\n" msgstr "" #: src/clients/compute/arctest.cpp:77 msgid "" "For the 1st test job you also have to specify a runtime value with -r (--" "runtime) option." msgstr "" #: src/clients/compute/arctest.cpp:111 #, fuzzy msgid "Certificate information:" msgstr "verzió információ kiírása" #: src/clients/compute/arctest.cpp:115 #, fuzzy msgid "No user-certificate found" msgstr "publikus kulcs elérési útvonala" #: src/clients/compute/arctest.cpp:118 #, fuzzy, c-format msgid "Certificate: %s" msgstr "Célállomás: %s" #: src/clients/compute/arctest.cpp:120 #, fuzzy, c-format msgid "Subject name: %s" msgstr "Tárgy: %s" #: src/clients/compute/arctest.cpp:121 #, fuzzy, c-format msgid "Valid until: %s" msgstr "A proxy eddig érvényes: %s" #: src/clients/compute/arctest.cpp:125 msgid "Unable to determine certificate information" msgstr "" #: src/clients/compute/arctest.cpp:129 #, fuzzy msgid "Proxy certificate information:" msgstr "verzió információ kiírása" #: src/clients/compute/arctest.cpp:131 msgid "No proxy found" msgstr "" #: src/clients/compute/arctest.cpp:134 #, fuzzy, c-format msgid "Proxy: %s" msgstr "Proxy elérési útvonal: %s" #: src/clients/compute/arctest.cpp:135 #, fuzzy, c-format msgid "Proxy-subject: %s" msgstr "Tárgy: %s" #: src/clients/compute/arctest.cpp:137 #, fuzzy msgid "Valid for: Proxy expired" msgstr "Nem használható tovább a proxy: Lejárt a proxy" #: src/clients/compute/arctest.cpp:139 #, fuzzy msgid "Valid for: Proxy not valid" msgstr "Nem használható tovább a proxy: Nem érvényes a proxy" #: src/clients/compute/arctest.cpp:141 #, fuzzy, c-format msgid "Valid for: %s" msgstr "Érvénytelen URL: %s" #: src/clients/compute/arctest.cpp:146 #, c-format msgid "Certificate issuer: %s" msgstr "" #: src/clients/compute/arctest.cpp:150 #, fuzzy msgid "CA-certificates installed:" msgstr "publikus kulcs elérési útvonala" #: src/clients/compute/arctest.cpp:172 msgid "Unable to detect if issuer certificate is installed." msgstr "" #: src/clients/compute/arctest.cpp:175 msgid "Your issuer's certificate is not installed" msgstr "" #: src/clients/compute/arctest.cpp:189 #, c-format msgid "No test-job, with ID \"%d\"" msgstr "" #: src/clients/compute/arctest.cpp:245 #, c-format msgid "Unable to load broker %s" msgstr "Nem sikerült betölteni a %s bróker modult" #: src/clients/compute/arctest.cpp:248 #: src/hed/libs/compute/BrokerPlugin.cpp:106 #, c-format msgid "Broker %s loaded" msgstr "%s bróker betöltve" #: src/clients/compute/arctest.cpp:270 #, fuzzy msgid "Test aborted because no resource returned any information" msgstr "" "Feladat küldés sikertelen, mert egyetlen klaszter sem adott vissza " "információt magáról" #: src/clients/compute/arctest.cpp:272 src/clients/compute/submit.cpp:170 msgid "" "Unable to adapt job description to any resource, no resource information " "could be obtained." msgstr "" #: src/clients/compute/arctest.cpp:273 src/clients/compute/submit.cpp:171 msgid "Original job description is listed below:" msgstr "" #: src/clients/compute/arctest.cpp:283 msgid "" "ERROR: Test aborted because no suitable resources were found for the test-job" msgstr "" #: src/clients/compute/arctest.cpp:285 msgid "" "ERROR: Dumping job description aborted because no suitable resources were " "found for the test-job" msgstr "" #: src/clients/compute/arctest.cpp:294 #, c-format msgid "Submitting test-job %d:" msgstr "" #: src/clients/compute/arctest.cpp:298 #, c-format msgid "Client version: nordugrid-arc-%s" msgstr "" #: src/clients/compute/arctest.cpp:306 #, fuzzy, c-format msgid "Cannot write jobid (%s) to file (%s)" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/clients/compute/arctest.cpp:307 #, fuzzy, c-format msgid "Test submitted with jobid: %s" msgstr "Feladat elküldve ezzel az azonítóval: %s" #: src/clients/compute/arctest.cpp:322 #, c-format msgid "Computing service: %s" msgstr "" #: src/clients/compute/arctest.cpp:328 #, fuzzy msgid "Test failed, no more possible targets" msgstr "A feladat küldés meghiusúlt, mert nincs több szabad várakozó sor." #: src/clients/compute/arctest.cpp:341 src/clients/compute/submit.cpp:49 msgid "To recover missing jobs, run arcsync" msgstr "" #: src/clients/compute/arctest.cpp:354 src/clients/compute/submit.cpp:200 #, fuzzy, c-format msgid "" "Unable to prepare job description according to needs of the target resource " "(%s)." msgstr "Nem tudom kiíratni a feladat leírást: Nem található várakozó sor." #: src/clients/compute/arctest.cpp:364 src/clients/compute/submit.cpp:216 #, fuzzy, c-format msgid "" "An error occurred during the generation of job description to be sent to %s" msgstr "Egy hiba lépett fel a feladat leírás elkészítése közben." #: src/clients/compute/arctest.cpp:368 src/clients/compute/submit.cpp:220 #, c-format msgid "Job description to be sent to %s:" msgstr "Feladat leírás elküldve ide: %s" #: src/clients/compute/submit.cpp:40 #, fuzzy, c-format msgid "Cannot write job IDs to file (%s)" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/clients/compute/submit.cpp:45 #, c-format msgid "Unable to open job list file (%s), unknown format" msgstr "" #: src/clients/compute/submit.cpp:47 #, fuzzy, c-format msgid "Failed to write job information to database (%s)" msgstr "Nem sikerült elérnem a voms szervert %s, ezen fájl alapján: %s" #: src/clients/compute/submit.cpp:51 #, c-format msgid "Record about new job successfully added to the database (%s)" msgstr "" #: src/clients/compute/submit.cpp:57 msgid "Job submission summary:" msgstr "Job küldési összefoglaló" #: src/clients/compute/submit.cpp:59 #, c-format msgid "%d of %d jobs were submitted" msgstr "%d %d feladatból elküldve" #: src/clients/compute/submit.cpp:61 #, fuzzy msgid "The following jobs were not submitted:" msgstr "%d nem lett elküldve" #: src/clients/compute/submit.cpp:65 msgid "Job nr." msgstr "" #: src/clients/compute/submit.cpp:75 #, fuzzy, c-format msgid "ERROR: Unable to load broker %s" msgstr "Nem sikerült betölteni a %s bróker modult" #: src/clients/compute/submit.cpp:79 #, fuzzy msgid "" "ERROR: Job submission aborted because no resource returned any information" msgstr "" "Feladat küldés sikertelen, mert egyetlen klaszter sem adott vissza " "információt magáról" #: src/clients/compute/submit.cpp:83 msgid "ERROR: One or multiple job descriptions was not submitted." msgstr "" #: src/clients/compute/submit.cpp:100 #, c-format msgid "" "A computing resource using the GridFTP interface was requested, but\n" "%sthe corresponding plugin could not be loaded. Is the plugin installed?\n" "%sIf not, please install the package 'nordugrid-arc-plugins-globus'.\n" "%sDepending on your type of installation the package name might differ." msgstr "" #: src/clients/compute/submit.cpp:125 #, c-format msgid "Removing endpoint %s: It has an unrequested interface (%s)." msgstr "" #: src/clients/compute/submit.cpp:183 #, c-format msgid "Dumping job description aborted: Unable to load broker %s" msgstr "" #: src/clients/compute/submit.cpp:238 #, fuzzy msgid "" "Unable to prepare job description according to needs of the target resource." msgstr "Nem tudom kiíratni a feladat leírást: Nem található várakozó sor." #: src/clients/compute/submit.cpp:322 src/clients/compute/submit.cpp:352 #, c-format msgid "Service endpoint %s (type %s) added to the list for resource discovery" msgstr "" #: src/clients/compute/submit.cpp:332 msgid "" "There are no endpoints in registry that match requested info endpoint type" msgstr "" #: src/clients/compute/submit.cpp:373 #, c-format msgid "Service endpoint %s (type %s) added to the list for direct submission" msgstr "" #: src/clients/compute/submit.cpp:381 msgid "" "There are no endpoints in registry that match requested submission endpoint " "type" msgstr "" #: src/clients/compute/utils.cpp:109 #, c-format msgid "Types of execution services that %s is able to submit jobs to:" msgstr "" #: src/clients/compute/utils.cpp:112 #, c-format msgid "Types of registry services that %s is able to collect information from:" msgstr "" #: src/clients/compute/utils.cpp:115 #, c-format msgid "" "Types of local information services that %s is able to collect information " "from:" msgstr "" #: src/clients/compute/utils.cpp:118 #, c-format msgid "" "Types of local information services that %s is able to collect job " "information from:" msgstr "" #: src/clients/compute/utils.cpp:121 #, c-format msgid "Types of services that %s is able to manage jobs at:" msgstr "" #: src/clients/compute/utils.cpp:124 #, fuzzy, c-format msgid "Job description languages supported by %s:" msgstr "Feladat leírás elküldve ide: %s" #: src/clients/compute/utils.cpp:127 #, c-format msgid "Brokers available to %s:" msgstr "" #: src/clients/compute/utils.cpp:150 #, c-format msgid "" "Default broker (%s) is not available. When using %s a broker should be " "specified explicitly (-b option)." msgstr "" #: src/clients/compute/utils.cpp:160 msgid "Proxy expired. Job submission aborted. Please run 'arcproxy'!" msgstr "" #: src/clients/compute/utils.cpp:165 msgid "" "Cannot find any proxy. This application currently cannot run without a " "proxy.\n" " If you have the proxy file in a non-default location,\n" " please make sure the path is specified in the client configuration file.\n" " If you don't have a proxy yet, please run 'arcproxy'!" msgstr "" #: src/clients/compute/utils.cpp:277 msgid "" "It is impossible to mix ARC6 target selection options with legacy options. " "All legacy options will be ignored!" msgstr "" #: src/clients/compute/utils.cpp:345 #, fuzzy, c-format msgid "Unsupported submission endpoint type: %s" msgstr "Nem támogatott url: %s" #: src/clients/compute/utils.cpp:383 #, c-format msgid "" "Requested to skip resource discovery. Will try direct submission to %s and " "%s submission endpoint types" msgstr "" #: src/clients/compute/utils.cpp:389 #, fuzzy, c-format msgid "Unsupported information endpoint type: %s" msgstr "Nem támogatott url: %s" #: src/clients/compute/utils.cpp:434 msgid "Other actions" msgstr "" #: src/clients/compute/utils.cpp:435 #, fuzzy msgid "Brokering and filtering" msgstr "szöveg" #: src/clients/compute/utils.cpp:436 msgid "Output format modifiers" msgstr "" #: src/clients/compute/utils.cpp:437 msgid "Behaviour tuning" msgstr "" #: src/clients/compute/utils.cpp:438 msgid "ARC6 submission endpoint selection" msgstr "" #: src/clients/compute/utils.cpp:439 msgid "Legacy options set for defining targets" msgstr "" #: src/clients/compute/utils.cpp:443 msgid "specify computing element hostname or a complete endpoint URL" msgstr "" #: src/clients/compute/utils.cpp:444 msgid "ce" msgstr "" #: src/clients/compute/utils.cpp:448 msgid "registry service URL with optional specification of protocol" msgstr "" #: src/clients/compute/utils.cpp:449 msgid "registry" msgstr "" #: src/clients/compute/utils.cpp:455 msgid "" "require the specified endpoint type for job submission.\n" "\tAllowed values are: arcrest, emies, gridftp or gridftpjob and internal." msgstr "" #: src/clients/compute/utils.cpp:457 src/clients/compute/utils.cpp:464 msgid "type" msgstr "" #: src/clients/compute/utils.cpp:461 msgid "" "require information query using the specified information endpoint type.\n" "\tSpecial value 'NONE' will disable all resource information queries and the " "following brokering.\n" "\tAllowed values are: ldap.nordugrid, ldap.glue2, emies, arcrest and " "internal." msgstr "" #: src/clients/compute/utils.cpp:470 msgid "" "select one or more computing elements: name can be an alias for a single CE, " "a group of CEs or a URL" msgstr "" #: src/clients/compute/utils.cpp:472 src/clients/compute/utils.cpp:477 #: src/clients/compute/utils.cpp:494 src/clients/compute/utils.cpp:614 msgid "name" msgstr "" #: src/clients/compute/utils.cpp:476 #, fuzzy msgid "only select jobs that were submitted to this resource" msgstr "újraküldés ugyanarra a klaszterre" #: src/clients/compute/utils.cpp:483 msgid "" "the computing element specified by URL at the command line should be queried " "using this information interface.\n" "\tAllowed values are: org.nordugrid.ldapng, org.nordugrid.ldapglue2 and org." "ogf.glue.emies.resourceinfo" msgstr "" #: src/clients/compute/utils.cpp:486 #, fuzzy msgid "interfacename" msgstr "Felhasználó oldali hiba" #: src/clients/compute/utils.cpp:492 msgid "" "selecting a computing element for the new jobs with a URL or an alias, or " "selecting a group of computing elements with the name of the group" msgstr "" #: src/clients/compute/utils.cpp:500 msgid "force migration, ignore kill failure" msgstr "migráció kikényszerítése, megölési hiba figyelmen kívül hagyása" #: src/clients/compute/utils.cpp:506 msgid "keep the files on the server (do not clean)" msgstr "fájlok megÅ‘rzése a szerveren (nincs törlés)" #: src/clients/compute/utils.cpp:512 msgid "do not ask for verification" msgstr "ne kérjen ellenÅ‘rzést" #: src/clients/compute/utils.cpp:516 #, fuzzy msgid "truncate the joblist before synchronizing" msgstr "feladat lista megcsonkult a szinkronizáció elÅ‘tt" #: src/clients/compute/utils.cpp:520 msgid "do not collect information, only convert jobs storage format" msgstr "" #: src/clients/compute/utils.cpp:526 src/clients/data/arcls.cpp:288 msgid "long format (more information)" msgstr "részletes formátum (több információ)" #: src/clients/compute/utils.cpp:532 msgid "print a list of services configured in the client.conf" msgstr "" #: src/clients/compute/utils.cpp:538 msgid "show the stdout of the job (default)" msgstr "" #: src/clients/compute/utils.cpp:542 msgid "show the stderr of the job" msgstr "" #: src/clients/compute/utils.cpp:546 msgid "show the CE's error log of the job" msgstr "" #: src/clients/compute/utils.cpp:550 msgid "show the specified file from job's session directory" msgstr "" #: src/clients/compute/utils.cpp:551 #, fuzzy msgid "filepath" msgstr "elérési útvonal" #: src/clients/compute/utils.cpp:557 msgid "" "download directory (the job directory will be created in this directory)" msgstr "" "könyvtár letöltése (a feladat könyvtára ebben a könyvtárban fog létrejönni)" #: src/clients/compute/utils.cpp:559 msgid "dirname" msgstr "könyvtárnév" #: src/clients/compute/utils.cpp:563 msgid "use the jobname instead of the short ID as the job directory name" msgstr "" #: src/clients/compute/utils.cpp:568 msgid "force download (overwrite existing job directory)" msgstr "" #: src/clients/compute/utils.cpp:574 msgid "instead of the status only the IDs of the selected jobs will be printed" msgstr "" #: src/clients/compute/utils.cpp:578 msgid "sort jobs according to jobid, submissiontime or jobname" msgstr "" "feladatok rendezése az azonosítójuk, az elküldés ideje vagy a neve alapján" #: src/clients/compute/utils.cpp:579 src/clients/compute/utils.cpp:582 msgid "order" msgstr "sorrend" #: src/clients/compute/utils.cpp:581 msgid "reverse sorting of jobs according to jobid, submissiontime or jobname" msgstr "" "feladatok rendezésének megfordítása az azonosítójuk, az elküldés ideje vagy " "a neve alapján" #: src/clients/compute/utils.cpp:585 msgid "show jobs where status information is unavailable" msgstr "" #: src/clients/compute/utils.cpp:589 msgid "show status information in JSON format" msgstr "" #: src/clients/compute/utils.cpp:595 #, fuzzy msgid "resubmit to the same resource" msgstr "újraküldés ugyanarra a klaszterre" #: src/clients/compute/utils.cpp:599 #, fuzzy msgid "do not resubmit to the same resource" msgstr "újraküldés ugyanarra a klaszterre" #: src/clients/compute/utils.cpp:605 msgid "" "remove the job from the local list of jobs even if the job is not found in " "the infosys" msgstr "" "feladat eltávolítása a helyi listából ha az nem található az információs " "rendszerben" #: src/clients/compute/utils.cpp:612 msgid "" "select one or more registries: name can be an alias for a single registry, a " "group of registries or a URL" msgstr "" #: src/clients/compute/utils.cpp:620 msgid "submit test job given by the number" msgstr "" #: src/clients/compute/utils.cpp:621 src/clients/compute/utils.cpp:625 msgid "int" msgstr "" #: src/clients/compute/utils.cpp:624 msgid "test job runtime specified by the number" msgstr "" #: src/clients/compute/utils.cpp:631 msgid "only select jobs whose status is statusstr" msgstr "" #: src/clients/compute/utils.cpp:632 msgid "statusstr" msgstr "" #: src/clients/compute/utils.cpp:638 msgid "all jobs" msgstr "" #: src/clients/compute/utils.cpp:644 msgid "jobdescription string describing the job to be submitted" msgstr "a feladat leíró szöveg tartalmazza magát az elküldendÅ‘ feladatot" #: src/clients/compute/utils.cpp:646 src/clients/compute/utils.cpp:652 #: src/clients/credentials/arcproxy.cpp:345 #: src/clients/credentials/arcproxy.cpp:352 #: src/clients/credentials/arcproxy.cpp:371 #: src/clients/credentials/arcproxy.cpp:378 #: src/clients/credentials/arcproxy.cpp:396 #: src/clients/credentials/arcproxy.cpp:400 #: src/clients/credentials/arcproxy.cpp:415 #: src/clients/credentials/arcproxy.cpp:425 #: src/clients/credentials/arcproxy.cpp:429 msgid "string" msgstr "szöveg" #: src/clients/compute/utils.cpp:650 msgid "jobdescription file describing the job to be submitted" msgstr "a feladat leíró fájl tartalmazza magát az elküldendÅ‘ feladatot" #: src/clients/compute/utils.cpp:658 msgid "select broker method (list available brokers with --listplugins flag)" msgstr "" #: src/clients/compute/utils.cpp:659 msgid "broker" msgstr "bróker" #: src/clients/compute/utils.cpp:662 msgid "the IDs of the submitted jobs will be appended to this file" msgstr "" #: src/clients/compute/utils.cpp:663 src/clients/compute/utils.cpp:685 #: src/clients/compute/utils.cpp:722 src/clients/compute/utils.cpp:730 #: src/clients/credentials/arcproxy.cpp:438 src/clients/data/arccp.cpp:627 #: src/clients/data/arcls.cpp:333 src/clients/data/arcmkdir.cpp:111 #: src/clients/data/arcrename.cpp:122 src/clients/data/arcrm.cpp:137 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:52 msgid "filename" msgstr "fájlnév" #: src/clients/compute/utils.cpp:667 msgid "" "only use this interface for submitting.\n" "\tAllowed values are: org.nordugrid.gridftpjob or org.nordugrid.gridftp, org." "ogf.glue.emies.activitycreation and org.nordugrid.internal" msgstr "" #: src/clients/compute/utils.cpp:669 src/clients/compute/utils.cpp:711 msgid "InterfaceName" msgstr "" #: src/clients/compute/utils.cpp:676 msgid "skip the service with the given URL during service discovery" msgstr "" #: src/clients/compute/utils.cpp:677 src/clients/compute/utils.cpp:690 #: src/clients/data/arccp.cpp:607 msgid "URL" msgstr "" #: src/clients/compute/utils.cpp:684 msgid "a file containing a list of jobIDs" msgstr "" #: src/clients/compute/utils.cpp:689 msgid "skip jobs that are on a computing element with a given URL" msgstr "" #: src/clients/compute/utils.cpp:695 msgid "submit jobs as dry run (no submission to batch system)" msgstr "" #: src/clients/compute/utils.cpp:698 msgid "submit directly - no resource discovery or matchmaking" msgstr "" #: src/clients/compute/utils.cpp:702 msgid "" "do not submit - dump job description in the language accepted by the target" msgstr "" "nincs küldés - azon feladat leíró formátumban megjelenítése, amit a távoli " "klaszter elfogad" #: src/clients/compute/utils.cpp:709 msgid "" "only get information about executon targets that support this job submission " "interface.\n" "\tAllowed values are org.nordugrid.gridftpjob or org.nordugrid.gridftp, org." "ogf.glue.emies.activitycreation and org.nordugrid.internal" msgstr "" #: src/clients/compute/utils.cpp:716 msgid "prints info about installed user- and CA-certificates" msgstr "" #: src/clients/compute/utils.cpp:721 #, c-format msgid "the file storing information about active jobs (default %s)" msgstr "" #: src/clients/compute/utils.cpp:729 src/clients/credentials/arcproxy.cpp:437 #: src/clients/data/arccp.cpp:626 src/clients/data/arcls.cpp:332 #: src/clients/data/arcmkdir.cpp:110 src/clients/data/arcrename.cpp:121 #: src/clients/data/arcrm.cpp:136 msgid "configuration file (default ~/.arc/client.conf)" msgstr "konfigurációs fájl (alapbeállítás ~/.arc/client.conf)" #: src/clients/compute/utils.cpp:732 src/clients/credentials/arcproxy.cpp:432 #: src/clients/data/arccp.cpp:621 src/clients/data/arcls.cpp:327 #: src/clients/data/arcmkdir.cpp:105 src/clients/data/arcrename.cpp:116 #: src/clients/data/arcrm.cpp:131 msgid "timeout in seconds (default 20)" msgstr "idÅ‘korlát másodpercben (alapbeállítás 20)" #: src/clients/compute/utils.cpp:733 src/clients/credentials/arcproxy.cpp:433 #: src/clients/data/arccp.cpp:622 src/clients/data/arcls.cpp:328 #: src/clients/data/arcmkdir.cpp:106 src/clients/data/arcrename.cpp:117 #: src/clients/data/arcrm.cpp:132 msgid "seconds" msgstr "másodpercek" #: src/clients/compute/utils.cpp:736 msgid "list the available plugins" msgstr "" #: src/clients/compute/utils.cpp:740 src/clients/credentials/arcproxy.cpp:442 #: src/clients/data/arccp.cpp:631 src/clients/data/arcls.cpp:337 #: src/clients/data/arcmkdir.cpp:115 src/clients/data/arcrename.cpp:126 #: src/clients/data/arcrm.cpp:141 #: src/hed/libs/compute/test_jobdescription.cpp:38 #: src/services/a-rex/grid-manager/gm_jobs.cpp:190 #: src/services/a-rex/grid-manager/inputcheck.cpp:81 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:66 msgid "FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG" msgstr "FATAL, ERROR, WARNING, INFO, VERBOSE vagy DEBUG" #: src/clients/compute/utils.cpp:741 src/clients/credentials/arcproxy.cpp:443 #: src/clients/data/arccp.cpp:632 src/clients/data/arcls.cpp:338 #: src/clients/data/arcmkdir.cpp:116 src/clients/data/arcrename.cpp:127 #: src/clients/data/arcrm.cpp:142 #: src/hed/libs/compute/test_jobdescription.cpp:39 #: src/services/a-rex/grid-manager/gm_jobs.cpp:191 #: src/services/a-rex/grid-manager/inputcheck.cpp:82 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:67 msgid "debuglevel" msgstr "logolási szint" #: src/clients/compute/utils.cpp:743 src/clients/credentials/arcproxy.cpp:446 #: src/clients/data/arccp.cpp:635 src/clients/data/arcls.cpp:341 #: src/clients/data/arcmkdir.cpp:119 src/clients/data/arcrename.cpp:130 #: src/clients/data/arcrm.cpp:145 msgid "print version information" msgstr "verzió információ kiírása" #: src/clients/credentials/arcproxy.cpp:146 #: src/hed/libs/credential/ARCProxyUtil.cpp:1216 #, fuzzy, c-format msgid "There are %d user certificates existing in the NSS database" msgstr "%d darab publikus tanúsítvány van a válasz üzenetben" #: src/clients/credentials/arcproxy.cpp:162 #: src/hed/libs/credential/ARCProxyUtil.cpp:1232 #, c-format msgid "Number %d is with nickname: %s%s" msgstr "" #: src/clients/credentials/arcproxy.cpp:171 #: src/hed/libs/credential/ARCProxyUtil.cpp:1241 #, c-format msgid " expiration time: %s " msgstr "" #: src/clients/credentials/arcproxy.cpp:175 #: src/hed/libs/credential/ARCProxyUtil.cpp:1245 #, c-format msgid " certificate dn: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:176 #: src/hed/libs/credential/ARCProxyUtil.cpp:1246 #, c-format msgid " issuer dn: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:177 #: src/hed/libs/credential/ARCProxyUtil.cpp:1247 #, c-format msgid " serial number: %d" msgstr "" #: src/clients/credentials/arcproxy.cpp:181 #: src/hed/libs/credential/ARCProxyUtil.cpp:1251 #, c-format msgid "Please choose the one you would use (1-%d): " msgstr "" #: src/clients/credentials/arcproxy.cpp:246 #, fuzzy msgid "" "The arcproxy command creates a proxy from a key/certificate pair which can\n" "then be used to access grid resources." msgstr "" "Az arcproxy parancs proxy-t készít a publikus/privát kulcsból,\n" "hogy tudja használni az ARC köztesréteget" #: src/clients/credentials/arcproxy.cpp:248 msgid "" "Supported constraints are:\n" " validityStart=time (e.g. 2008-05-29T10:20:30Z; if not specified, start " "from now)\n" " validityEnd=time\n" " validityPeriod=time (e.g. 43200 or 12h or 12H; if both validityPeriod and " "validityEnd\n" " not specified, the default is 12 hours for local proxy, and 168 hours " "for delegated\n" " proxy on myproxy server)\n" " vomsACvalidityPeriod=time (e.g. 43200 or 12h or 12H; if not specified, the " "default\n" " is the minimum value of 12 hours and validityPeriod)\n" " myproxyvalidityPeriod=time (lifetime of proxies delegated by myproxy " "server,\n" " e.g. 43200 or 12h or 12H; if not specified, the default is the minimum " "value of\n" " 12 hours and validityPeriod (which is lifetime of the delegated proxy on " "myproxy server))\n" " proxyPolicy=policy content\n" " proxyPolicyFile=policy file\n" " keybits=number - length of the key to generate. Default is 2048 bits.\n" " Special value 'inherit' is to use key length of signing certificate.\n" " signingAlgorithm=name - signing algorithm to use for signing public key of " "proxy.\n" " Possible values are sha1, sha2 (alias for sha256), sha224, sha256, " "sha384, sha512\n" " and inherit (use algorithm of signing certificate). Default is inherit.\n" " With old systems, only sha1 is acceptable.\n" "\n" "Supported information item names are:\n" " subject - subject name of proxy certificate.\n" " identity - identity subject name of proxy certificate.\n" " issuer - issuer subject name of proxy certificate.\n" " ca - subject name of CA which issued initial certificate.\n" " path - file system path to file containing proxy.\n" " type - type of proxy certificate.\n" " validityStart - timestamp when proxy validity starts.\n" " validityEnd - timestamp when proxy validity ends.\n" " validityPeriod - duration of proxy validity in seconds.\n" " validityLeft - duration of proxy validity left in seconds.\n" " vomsVO - VO name represented by VOMS attribute\n" " vomsSubject - subject of certificate for which VOMS attribute is issued\n" " vomsIssuer - subject of service which issued VOMS certificate\n" " vomsACvalidityStart - timestamp when VOMS attribute validity starts.\n" " vomsACvalidityEnd - timestamp when VOMS attribute validity ends.\n" " vomsACvalidityPeriod - duration of VOMS attribute validity in seconds.\n" " vomsACvalidityLeft - duration of VOMS attribute validity left in seconds.\n" " proxyPolicy\n" " keybits - size of proxy certificate key in bits.\n" " signingAlgorithm - algorithm used to sign proxy certificate.\n" "Items are printed in requested order and are separated by newline.\n" "If item has multiple values they are printed in same line separated by |.\n" "\n" "Supported password destinations are:\n" " key - for reading private key\n" " myproxy - for accessing credentials at MyProxy service\n" " myproxynew - for creating credentials at MyProxy service\n" " all - for any purspose.\n" "\n" "Supported password sources are:\n" " quoted string (\"password\") - explicitly specified password\n" " int - interactively request password from console\n" " stdin - read password from standard input delimited by newline\n" " file:filename - read password from file named filename\n" " stream:# - read password from input stream number #.\n" " Currently only 0 (standard input) is supported.\n" msgstr "" #: src/clients/credentials/arcproxy.cpp:308 #, fuzzy msgid "path to the proxy file" msgstr "proxy fájl elérési útvonala" #: src/clients/credentials/arcproxy.cpp:309 #: src/clients/credentials/arcproxy.cpp:313 #: src/clients/credentials/arcproxy.cpp:317 #: src/clients/credentials/arcproxy.cpp:321 #: src/clients/credentials/arcproxy.cpp:325 #: src/clients/credentials/arcproxy.cpp:329 src/clients/data/arccp.cpp:584 msgid "path" msgstr "elérési útvonal" #: src/clients/credentials/arcproxy.cpp:312 msgid "" "path to the certificate file, it can be either PEM, DER, or PKCS12 formatted" msgstr "" #: src/clients/credentials/arcproxy.cpp:316 msgid "" "path to the private key file, if the certificate is in PKCS12 format, then " "no need to give private key" msgstr "" #: src/clients/credentials/arcproxy.cpp:320 #, fuzzy msgid "" "path to the trusted certificate directory, only needed for the VOMS client " "functionality" msgstr "megbízható tanúsítványok könyvtára, csak a voms kliensek használják" #: src/clients/credentials/arcproxy.cpp:324 #, fuzzy msgid "" "path to the top directory of VOMS *.lsc files, only needed for the VOMS " "client functionality" msgstr "megbízható tanúsítványok könyvtára, csak a voms kliensek használják" #: src/clients/credentials/arcproxy.cpp:328 #, fuzzy msgid "path to the VOMS server configuration file" msgstr "voms szerver fájljának az elérési útvonala" #: src/clients/credentials/arcproxy.cpp:332 msgid "" "voms<:command>. Specify VOMS server (More than one VOMS server \n" " can be specified like this: --voms VOa:command1 --voms VOb:" "command2). \n" " :command is optional, and is used to ask for specific " "attributes(e.g: roles)\n" " command options are:\n" " all --- put all of this DN's attributes into AC; \n" " list ---list all of the DN's attribute, will not create AC " "extension; \n" " /Role=yourRole --- specify the role, if this DN \n" " has such a role, the role will be put into " "AC; \n" " /voname/groupname/Role=yourRole --- specify the VO, group and " "role; if this DN \n" " has such a role, the role will be put into " "AC. \n" " If this option is not specified values from configuration " "files are used.\n" " To avoid anything to be used specify -S with empty value.\n" msgstr "" #: src/clients/credentials/arcproxy.cpp:348 msgid "" "group<:role>. Specify ordering of attributes \n" " Example: --order /knowarc.eu/coredev:Developer,/knowarc.eu/" "testers:Tester \n" " or: --order /knowarc.eu/coredev:Developer --order /knowarc.eu/" "testers:Tester \n" " Note that it does not make sense to specify the order if you have two or " "more different VOMS servers specified" msgstr "" #: src/clients/credentials/arcproxy.cpp:355 msgid "use GSI communication protocol for contacting VOMS services" msgstr "GSI kommunikációs protokoll használata a VOMS szolgáltatás eléréséhez" #: src/clients/credentials/arcproxy.cpp:358 msgid "" "use HTTP communication protocol for contacting VOMS services that provide " "RESTful access \n" " Note for RESTful access, 'list' command and multiple VOMS " "server are not supported\n" msgstr "" #: src/clients/credentials/arcproxy.cpp:362 #, fuzzy msgid "" "use old communication protocol for contacting VOMS services instead of " "RESTful access\n" msgstr "GSI kommunikációs protokoll használata a VOMS szolgáltatás eléréséhez" #: src/clients/credentials/arcproxy.cpp:365 msgid "" "this option is not functional (old GSI proxies are not supported anymore)" msgstr "" #: src/clients/credentials/arcproxy.cpp:368 msgid "print all information about this proxy." msgstr "" #: src/clients/credentials/arcproxy.cpp:371 msgid "print selected information about this proxy." msgstr "" #: src/clients/credentials/arcproxy.cpp:374 msgid "remove proxy" msgstr "" #: src/clients/credentials/arcproxy.cpp:377 msgid "" "username to MyProxy server (if missing subject of user certificate is used)" msgstr "" #: src/clients/credentials/arcproxy.cpp:382 msgid "" "don't prompt for a credential passphrase, when retrieve a \n" " credential from on MyProxy server. \n" " The precondition of this choice is the credential is PUT onto\n" " the MyProxy server without a passphrase by using -R (--" "retrievable_by_cert) \n" " option when being PUTing onto Myproxy server. \n" " This option is specific for the GET command when contacting " "Myproxy server." msgstr "" #: src/clients/credentials/arcproxy.cpp:393 msgid "" "Allow specified entity to retrieve credential without passphrase.\n" " This option is specific for the PUT command when contacting " "Myproxy server." msgstr "" #: src/clients/credentials/arcproxy.cpp:399 #, fuzzy msgid "hostname[:port] of MyProxy server" msgstr "szervernév[:port] myproxy szerveré" #: src/clients/credentials/arcproxy.cpp:404 msgid "" "command to MyProxy server. The command can be PUT, GET, INFO, NEWPASS or " "DESTROY.\n" " PUT -- put a delegated credentials to the MyProxy server; \n" " GET -- get a delegated credentials from the MyProxy server; \n" " INFO -- get and present information about credentials stored " "at the MyProxy server; \n" " NEWPASS -- change password protecting credentials stored at " "the MyProxy server; \n" " DESTROY -- wipe off credentials stored at the MyProxy " "server; \n" " Local credentials (certificate and key) are not necessary " "except in case of PUT. \n" " MyProxy functionality can be used together with VOMS " "functionality.\n" " --voms and --vomses can be used for Get command if VOMS " "attributes\n" " is required to be included in the proxy.\n" msgstr "" #: src/clients/credentials/arcproxy.cpp:419 msgid "" "use NSS credential database in default Mozilla profiles, \n" " including Firefox, Seamonkey and Thunderbird.\n" msgstr "" #: src/clients/credentials/arcproxy.cpp:424 msgid "proxy constraints" msgstr "" #: src/clients/credentials/arcproxy.cpp:428 msgid "password destination=password source" msgstr "" #: src/clients/credentials/arcproxy.cpp:452 msgid "" "RESTful and old VOMS communication protocols can't be requested " "simultaneously." msgstr "" #: src/clients/credentials/arcproxy.cpp:482 #: src/clients/credentials/arcproxy.cpp:1187 #, fuzzy msgid "Failed configuration initialization." msgstr "Nem sikerült betölteni a konfigurációt" #: src/clients/credentials/arcproxy.cpp:511 msgid "" "Failed to find certificate and/or private key or files have improper " "permissions or ownership." msgstr "" #: src/clients/credentials/arcproxy.cpp:512 #: src/clients/credentials/arcproxy.cpp:524 msgid "You may try to increase verbosity to get more information." msgstr "" #: src/clients/credentials/arcproxy.cpp:520 #, fuzzy msgid "Failed to find CA certificates" msgstr "Nem sikerült listázni a meta adatokat" #: src/clients/credentials/arcproxy.cpp:521 msgid "" "Cannot find the CA certificates directory path, please set environment " "variable X509_CERT_DIR, or cacertificatesdirectory in a configuration file." msgstr "" #: src/clients/credentials/arcproxy.cpp:525 msgid "" "The CA certificates directory is required for contacting VOMS and MyProxy " "servers." msgstr "" #: src/clients/credentials/arcproxy.cpp:537 msgid "" "$X509_VOMS_FILE, and $X509_VOMSES are not set;\n" "User has not specified the location for vomses information;\n" "There is also not vomses location information in user's configuration file;\n" "Can not find vomses in default locations: ~/.arc/vomses, ~/.voms/vomses,\n" "$ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/grid-security/vomses, $PWD/" "vomses,\n" "/etc/vomses, /etc/grid-security/vomses, and the location at the " "corresponding sub-directory" msgstr "" #: src/clients/credentials/arcproxy.cpp:582 msgid "Wrong number of arguments!" msgstr "" #: src/clients/credentials/arcproxy.cpp:590 #: src/clients/credentials/arcproxy.cpp:614 #: src/clients/credentials/arcproxy.cpp:747 msgid "" "Cannot find the path of the proxy file, please setup environment " "X509_USER_PROXY, or proxypath in a configuration file" msgstr "" #: src/clients/credentials/arcproxy.cpp:597 #, fuzzy, c-format msgid "Cannot remove proxy file at %s" msgstr "proxy fájl elérési útvonala" #: src/clients/credentials/arcproxy.cpp:599 #, c-format msgid "Cannot remove proxy file at %s, because it's not there" msgstr "" #: src/clients/credentials/arcproxy.cpp:608 msgid "Bearer token is available. It is preferred for job submission." msgstr "" #: src/clients/credentials/arcproxy.cpp:620 #: src/clients/credentials/arcproxy.cpp:753 #, c-format msgid "" "Cannot find file at %s for getting the proxy. Please make sure this file " "exists." msgstr "" #: src/clients/credentials/arcproxy.cpp:626 #: src/clients/credentials/arcproxy.cpp:759 #, fuzzy, c-format msgid "Cannot process proxy file at %s." msgstr "proxy fájl elérési útvonala" #: src/clients/credentials/arcproxy.cpp:629 #, c-format msgid "Subject: %s" msgstr "Tárgy: %s" #: src/clients/credentials/arcproxy.cpp:630 #, fuzzy, c-format msgid "Issuer: %s" msgstr "Válasz: %s" #: src/clients/credentials/arcproxy.cpp:631 #, c-format msgid "Identity: %s" msgstr "Azonosító: %s" #: src/clients/credentials/arcproxy.cpp:633 msgid "Time left for proxy: Proxy expired" msgstr "Nem használható tovább a proxy: Lejárt a proxy" #: src/clients/credentials/arcproxy.cpp:635 #, fuzzy msgid "Time left for proxy: Proxy not valid yet" msgstr "Nem használható tovább a proxy: Nem érvényes a proxy" #: src/clients/credentials/arcproxy.cpp:637 #, c-format msgid "Time left for proxy: %s" msgstr "Ennyi ideig érvényes még a proxy: %s" #: src/clients/credentials/arcproxy.cpp:638 #, c-format msgid "Proxy path: %s" msgstr "Proxy elérési útvonal: %s" #: src/clients/credentials/arcproxy.cpp:639 #, c-format msgid "Proxy type: %s" msgstr "Proxy típusa: %s" #: src/clients/credentials/arcproxy.cpp:640 #, fuzzy, c-format msgid "Proxy key length: %i" msgstr "Proxy elérési útvonal: %s" #: src/clients/credentials/arcproxy.cpp:641 #, fuzzy, c-format msgid "Proxy signature: %s" msgstr "Proxy elérési útvonal: %s" #: src/clients/credentials/arcproxy.cpp:650 #, fuzzy msgid "AC extension information for VO " msgstr "verzió információ kiírása" #: src/clients/credentials/arcproxy.cpp:653 msgid "Error detected while parsing this AC" msgstr "" #: src/clients/credentials/arcproxy.cpp:666 msgid "AC is invalid: " msgstr "" #: src/clients/credentials/arcproxy.cpp:696 #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:222 #, c-format msgid "Malformed VOMS AC attribute %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:727 #, fuzzy msgid "Time left for AC: AC is not valid yet" msgstr "Nem használható tovább a proxy: Nem érvényes a proxy" #: src/clients/credentials/arcproxy.cpp:729 #, fuzzy msgid "Time left for AC: AC has expired" msgstr "Nem használható tovább a proxy: Lejárt a proxy" #: src/clients/credentials/arcproxy.cpp:731 #, fuzzy, c-format msgid "Time left for AC: %s" msgstr "Ennyi ideig érvényes még a proxy: %s" #: src/clients/credentials/arcproxy.cpp:838 #, c-format msgid "Information item '%s' is not known" msgstr "" #: src/clients/credentials/arcproxy.cpp:850 msgid "" "Cannot find the user certificate path, please setup environment " "X509_USER_CERT, or certificatepath in a configuration file" msgstr "" #: src/clients/credentials/arcproxy.cpp:854 msgid "" "Cannot find the user private key path, please setup environment " "X509_USER_KEY, or keypath in a configuration file" msgstr "" #: src/clients/credentials/arcproxy.cpp:878 #, c-format msgid "" "Cannot parse password source expression %s it must be of type=source format" msgstr "" #: src/clients/credentials/arcproxy.cpp:895 #, c-format msgid "" "Cannot parse password type %s. Currently supported values are " "'key','myproxy','myproxynew' and 'all'." msgstr "" #: src/clients/credentials/arcproxy.cpp:910 #, c-format msgid "" "Cannot parse password source %s it must be of source_type or source_type:" "data format. Supported source types are int,stdin,stream,file." msgstr "" #: src/clients/credentials/arcproxy.cpp:924 msgid "Only standard input is currently supported for password source." msgstr "" #: src/clients/credentials/arcproxy.cpp:929 #, c-format msgid "" "Cannot parse password source type %s. Supported source types are int,stdin," "stream,file." msgstr "" #: src/clients/credentials/arcproxy.cpp:968 msgid "The start, end and period can't be set simultaneously" msgstr "" #: src/clients/credentials/arcproxy.cpp:974 #, c-format msgid "The start time that you set: %s can't be recognized." msgstr "" #: src/clients/credentials/arcproxy.cpp:981 #, c-format msgid "The period that you set: %s can't be recognized." msgstr "" #: src/clients/credentials/arcproxy.cpp:988 #, c-format msgid "The end time that you set: %s can't be recognized." msgstr "" #: src/clients/credentials/arcproxy.cpp:997 #, c-format msgid "The end time that you set: %s is before start time: %s." msgstr "" #: src/clients/credentials/arcproxy.cpp:1008 #, c-format msgid "WARNING: The start time that you set: %s is before current time: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:1011 #, c-format msgid "WARNING: The end time that you set: %s is before current time: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:1021 #, c-format msgid "The VOMS AC period that you set: %s can't be recognized." msgstr "" #: src/clients/credentials/arcproxy.cpp:1039 #, c-format msgid "The MyProxy period that you set: %s can't be recognized." msgstr "" #: src/clients/credentials/arcproxy.cpp:1054 #, c-format msgid "The keybits constraint is wrong: %s." msgstr "" #: src/clients/credentials/arcproxy.cpp:1068 #: src/hed/libs/credential/ARCProxyUtil.cpp:1271 msgid "The NSS database can not be detected in the Firefox profile" msgstr "" #: src/clients/credentials/arcproxy.cpp:1077 #: src/hed/libs/credential/ARCProxyUtil.cpp:1279 #, c-format msgid "" "There are %d NSS base directories where the certificate, key, and module " "databases live" msgstr "" #: src/clients/credentials/arcproxy.cpp:1079 #: src/hed/libs/credential/ARCProxyUtil.cpp:1283 #, c-format msgid "Number %d is: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:1081 #: src/hed/libs/credential/ARCProxyUtil.cpp:1285 #, c-format msgid "Please choose the NSS database you would like to use (1-%d): " msgstr "" #: src/clients/credentials/arcproxy.cpp:1097 #: src/hed/libs/credential/ARCProxyUtil.cpp:1297 #, c-format msgid "NSS database to be accessed: %s\n" msgstr "" #: src/clients/credentials/arcproxy.cpp:1168 #: src/hed/libs/credential/ARCProxyUtil.cpp:1471 #, c-format msgid "Certificate to use is: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:1216 #: src/clients/credentials/arcproxy.cpp:1330 #: src/hed/libs/credential/ARCProxyUtil.cpp:1528 msgid "Proxy generation succeeded" msgstr "Proxy készítés sikeres" #: src/clients/credentials/arcproxy.cpp:1217 #: src/clients/credentials/arcproxy.cpp:1331 #: src/hed/libs/credential/ARCProxyUtil.cpp:1529 #, c-format msgid "Your proxy is valid until: %s" msgstr "A proxy eddig érvényes: %s" #: src/clients/credentials/arcproxy.cpp:1236 msgid "" "The old GSI proxies are not supported anymore. Please do not use -O/--old " "option." msgstr "" #: src/clients/credentials/arcproxy.cpp:1255 src/hed/mcc/tls/MCCTLS.cpp:163 #: src/hed/mcc/tls/MCCTLS.cpp:196 src/hed/mcc/tls/MCCTLS.cpp:222 msgid "VOMS attribute parsing failed" msgstr "VOMS attribútumok értelmezése sikertelen" #: src/clients/credentials/arcproxy.cpp:1257 msgid "Myproxy server did not return proxy with VOMS AC included" msgstr "" #: src/clients/credentials/arcproxy.cpp:1278 #: src/hed/libs/credential/ARCProxyUtil.cpp:337 #, fuzzy msgid "Proxy generation failed: No valid certificate found." msgstr "" "Proxy készítés sikertelen: Nem sikerült leellenÅ‘rizni a publikus kulcsot" #: src/clients/credentials/arcproxy.cpp:1283 #: src/hed/libs/credential/ARCProxyUtil.cpp:343 #, fuzzy msgid "Proxy generation failed: No valid private key found." msgstr "" "Proxy készítés sikertelen: Nem sikerült leellenÅ‘rizni a publikus kulcsot" #: src/clients/credentials/arcproxy.cpp:1287 #: src/hed/libs/credential/ARCProxyUtil.cpp:169 #, c-format msgid "Your identity: %s" msgstr "Azonosítód: %s" #: src/clients/credentials/arcproxy.cpp:1289 #: src/hed/libs/credential/ARCProxyUtil.cpp:350 msgid "Proxy generation failed: Certificate has expired." msgstr "Proxy készítés sikertelen: A publikus kulcs érvényessége lejárt." #: src/clients/credentials/arcproxy.cpp:1293 #: src/hed/libs/credential/ARCProxyUtil.cpp:355 msgid "Proxy generation failed: Certificate is not valid yet." msgstr "Proxy készítés sikertelen: A publikus kulcs érvénytelen." #: src/clients/credentials/arcproxy.cpp:1304 #, fuzzy msgid "Proxy generation failed: Failed to create temporary file." msgstr "" "Proxy készítés sikertelen: Nem sikerült leellenÅ‘rizni a publikus kulcsot" #: src/clients/credentials/arcproxy.cpp:1312 #, fuzzy msgid "Proxy generation failed: Failed to retrieve VOMS information." msgstr "" "Proxy készítés sikertelen: Nem sikerült leellenÅ‘rizni a publikus kulcsot" #: src/clients/credentials/arcproxy_myproxy.cpp:100 #: src/hed/libs/credential/ARCProxyUtil.cpp:838 msgid "Succeeded to get info from MyProxy server" msgstr "" #: src/clients/credentials/arcproxy_myproxy.cpp:144 #: src/hed/libs/credential/ARCProxyUtil.cpp:894 msgid "Succeeded to change password on MyProxy server" msgstr "" #: src/clients/credentials/arcproxy_myproxy.cpp:185 #: src/hed/libs/credential/ARCProxyUtil.cpp:943 msgid "Succeeded to destroy credential on MyProxy server" msgstr "" #: src/clients/credentials/arcproxy_myproxy.cpp:265 #: src/hed/libs/credential/ARCProxyUtil.cpp:1032 #, c-format msgid "Succeeded to get a proxy in %s from MyProxy server %s" msgstr "" #: src/clients/credentials/arcproxy_myproxy.cpp:318 #: src/hed/libs/credential/ARCProxyUtil.cpp:1091 msgid "Succeeded to put a proxy onto MyProxy server" msgstr "" #: src/clients/credentials/arcproxy_proxy.cpp:93 #: src/hed/libs/credential/ARCProxyUtil.cpp:397 #: src/hed/libs/credential/ARCProxyUtil.cpp:1378 msgid "Failed to add VOMS AC extension. Your proxy may be incomplete." msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:63 msgid "" "Failed to process VOMS configuration or no suitable configuration lines " "found." msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:75 #, fuzzy, c-format msgid "Failed to parse requested VOMS lifetime: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/clients/credentials/arcproxy_voms.cpp:93 #: src/hed/libs/credential/ARCProxyUtil.cpp:634 #, fuzzy, c-format msgid "Cannot get VOMS server address information from vomses line: \"%s\"" msgstr "Nem sikerült elérnem a voms szervert %s, ezen fájl alapján: %s" #: src/clients/credentials/arcproxy_voms.cpp:97 #: src/clients/credentials/arcproxy_voms.cpp:99 #: src/hed/libs/credential/ARCProxyUtil.cpp:644 #: src/hed/libs/credential/ARCProxyUtil.cpp:646 #, c-format msgid "Contacting VOMS server (named %s): %s on port: %s" msgstr "VOMS szerver elérése (neve: %s): %s ezen a porton: %s" #: src/clients/credentials/arcproxy_voms.cpp:105 #, c-format msgid "Failed to parse requested VOMS server port number: %s" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:122 msgid "List functionality is not supported for RESTful VOMS interface" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:132 #: src/clients/credentials/arcproxy_voms.cpp:188 #, c-format msgid "" "The VOMS server with the information:\n" "\t%s\n" "can not be reached, please make sure it is available." msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:133 #: src/clients/credentials/arcproxy_voms.cpp:138 #: src/clients/credentials/arcproxy_voms.cpp:189 #: src/clients/credentials/arcproxy_voms.cpp:194 #, c-format msgid "" "Collected error is:\n" "\t%s" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:137 #: src/clients/credentials/arcproxy_voms.cpp:193 #, fuzzy, c-format msgid "No valid response from VOMS server: %s" msgstr "Nincs válasz a voms szervertÅ‘l" #: src/clients/credentials/arcproxy_voms.cpp:155 msgid "List functionality is not supported for legacy VOMS interface" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:167 #, c-format msgid "Failed to parse VOMS command: %s" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:204 #, c-format msgid "" "There are %d servers with the same name: %s in your vomses file, but none of " "them can be reached, or can return a valid message." msgstr "" #: src/clients/data/arccp.cpp:77 src/clients/data/arccp.cpp:330 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:509 #, c-format msgid "Current transfer FAILED: %s" msgstr "Az aktuális átvitel MEGSZAKADT: %s" #: src/clients/data/arccp.cpp:79 src/clients/data/arccp.cpp:135 #: src/clients/data/arccp.cpp:332 src/clients/data/arcls.cpp:225 #: src/clients/data/arcmkdir.cpp:73 src/clients/data/arcrename.cpp:89 #: src/clients/data/arcrm.cpp:95 msgid "This seems like a temporary error, please try again later" msgstr "" #: src/clients/data/arccp.cpp:87 src/clients/data/arccp.cpp:96 #, fuzzy, c-format msgid "Unable to copy %s" msgstr "Nem sikerült betölteni a %s bróker modult" #: src/clients/data/arccp.cpp:88 src/clients/data/arccp.cpp:97 #: src/clients/data/arcls.cpp:150 src/clients/data/arcls.cpp:159 #: src/clients/data/arcmkdir.cpp:55 src/clients/data/arcmkdir.cpp:64 #: src/clients/data/arcrename.cpp:67 src/clients/data/arcrename.cpp:76 #: src/clients/data/arcrm.cpp:68 src/clients/data/arcrm.cpp:80 msgid "Invalid credentials, please check proxy and/or CA certificates" msgstr "" #: src/clients/data/arccp.cpp:94 src/clients/data/arcls.cpp:156 #: src/clients/data/arcmkdir.cpp:61 src/clients/data/arcrename.cpp:73 #: src/clients/data/arcrm.cpp:77 msgid "Proxy expired" msgstr "" #: src/clients/data/arccp.cpp:112 src/clients/data/arccp.cpp:116 #: src/clients/data/arccp.cpp:149 src/clients/data/arccp.cpp:153 #: src/clients/data/arccp.cpp:358 src/clients/data/arccp.cpp:363 #: src/clients/data/arcls.cpp:123 src/clients/data/arcmkdir.cpp:28 #: src/clients/data/arcrename.cpp:29 src/clients/data/arcrename.cpp:33 #: src/clients/data/arcrm.cpp:36 #, c-format msgid "Invalid URL: %s" msgstr "Érvénytelen URL: %s" #: src/clients/data/arccp.cpp:128 msgid "Third party transfer is not supported for these endpoints" msgstr "" #: src/clients/data/arccp.cpp:130 msgid "" "Protocol(s) not supported - please check that the relevant gfal2\n" " plugins are installed (gfal2-plugin-* packages)" msgstr "" #: src/clients/data/arccp.cpp:133 #, c-format msgid "Transfer FAILED: %s" msgstr "" #: src/clients/data/arccp.cpp:161 src/clients/data/arccp.cpp:187 #: src/clients/data/arccp.cpp:374 src/clients/data/arccp.cpp:402 #, c-format msgid "Can't read list of sources from file %s" msgstr "Nem tudom olvasni a forrásokat a fájlból: %s" #: src/clients/data/arccp.cpp:166 src/clients/data/arccp.cpp:202 #: src/clients/data/arccp.cpp:379 src/clients/data/arccp.cpp:418 #, c-format msgid "Can't read list of destinations from file %s" msgstr "Nem tudom olvasni a célállomásokat a fájlból: %s" #: src/clients/data/arccp.cpp:171 src/clients/data/arccp.cpp:385 msgid "Numbers of sources and destinations do not match" msgstr "A forrás és céállomások száma nem egyezik meg" #: src/clients/data/arccp.cpp:216 msgid "Fileset registration is not supported yet" msgstr "A fileset regisztcáció nem támogatott még" #: src/clients/data/arccp.cpp:222 src/clients/data/arccp.cpp:295 #: src/clients/data/arccp.cpp:456 #, c-format msgid "Unsupported source url: %s" msgstr "Nem támogatott url: %s" #: src/clients/data/arccp.cpp:226 src/clients/data/arccp.cpp:299 #, c-format msgid "Unsupported destination url: %s" msgstr "Nem támogatott url: %s" #: src/clients/data/arccp.cpp:233 msgid "" "For registration source must be ordinary URL and destination must be " "indexing service" msgstr "" #: src/clients/data/arccp.cpp:243 #, c-format msgid "Could not obtain information about source: %s" msgstr "" #: src/clients/data/arccp.cpp:250 msgid "" "Metadata of source does not match existing destination. Use the --force " "option to override this." msgstr "" #: src/clients/data/arccp.cpp:262 msgid "Failed to accept new file/destination" msgstr "" #: src/clients/data/arccp.cpp:268 src/clients/data/arccp.cpp:274 #, c-format msgid "Failed to register new file/destination: %s" msgstr "" #: src/clients/data/arccp.cpp:436 msgid "Fileset copy to single object is not supported yet" msgstr "" #: src/clients/data/arccp.cpp:446 msgid "Can't extract object's name from source url" msgstr "" #: src/clients/data/arccp.cpp:465 #, c-format msgid "%s. Cannot copy fileset" msgstr "" #: src/clients/data/arccp.cpp:475 src/hed/libs/compute/ExecutionTarget.cpp:256 #: src/hed/libs/compute/ExecutionTarget.cpp:328 #, c-format msgid "Name: %s" msgstr "Név: %s" #: src/clients/data/arccp.cpp:478 #, c-format msgid "Source: %s" msgstr "Forrás: %s" #: src/clients/data/arccp.cpp:479 #, c-format msgid "Destination: %s" msgstr "Célállomás: %s" #: src/clients/data/arccp.cpp:485 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:516 msgid "Current transfer complete" msgstr "Az aktuális átvitel sikeres" #: src/clients/data/arccp.cpp:488 msgid "Some transfers failed" msgstr "" #: src/clients/data/arccp.cpp:498 #, c-format msgid "Directory: %s" msgstr "" #: src/clients/data/arccp.cpp:518 msgid "Transfer complete" msgstr "" #: src/clients/data/arccp.cpp:537 msgid "source destination" msgstr "" #: src/clients/data/arccp.cpp:538 msgid "" "The arccp command copies files to, from and between grid storage elements." msgstr "" #: src/clients/data/arccp.cpp:543 msgid "" "use passive transfer (off by default if secure is on, on by default if " "secure is not requested)" msgstr "" #: src/clients/data/arccp.cpp:549 msgid "do not try to force passive transfer" msgstr "" #: src/clients/data/arccp.cpp:554 msgid "" "if the destination is an indexing service and not the same as the source and " "the destination is already registered, then the copy is normally not done. " "However, if this option is specified the source is assumed to be a replica " "of the destination created in an uncontrolled way and the copy is done like " "in case of replication. Using this option also skips validation of completed " "transfers." msgstr "" #: src/clients/data/arccp.cpp:567 msgid "show progress indicator" msgstr "" #: src/clients/data/arccp.cpp:572 msgid "" "do not transfer, but register source into destination. destination must be a " "meta-url." msgstr "" #: src/clients/data/arccp.cpp:578 msgid "use secure transfer (insecure by default)" msgstr "" #: src/clients/data/arccp.cpp:583 msgid "path to local cache (use to put file into cache)" msgstr "" #: src/clients/data/arccp.cpp:588 src/clients/data/arcls.cpp:301 msgid "operate recursively" msgstr "" #: src/clients/data/arccp.cpp:593 src/clients/data/arcls.cpp:306 msgid "operate recursively up to specified level" msgstr "" #: src/clients/data/arccp.cpp:594 src/clients/data/arcls.cpp:307 msgid "level" msgstr "" #: src/clients/data/arccp.cpp:598 msgid "number of retries before failing file transfer" msgstr "" #: src/clients/data/arccp.cpp:599 msgid "number" msgstr "szám" #: src/clients/data/arccp.cpp:603 msgid "" "physical location to write to when destination is an indexing service. Must " "be specified for indexing services which do not automatically generate " "physical locations. Can be specified multiple times - locations will be " "tried in order until one succeeds." msgstr "" #: src/clients/data/arccp.cpp:611 msgid "" "perform third party transfer, where the destination pulls from the source " "(only available with GFAL plugin)" msgstr "" #: src/clients/data/arccp.cpp:617 src/clients/data/arcls.cpp:323 #: src/clients/data/arcmkdir.cpp:101 src/clients/data/arcrename.cpp:112 #: src/clients/data/arcrm.cpp:127 msgid "list the available plugins (protocols supported)" msgstr "" #: src/clients/data/arccp.cpp:656 src/clients/data/arcls.cpp:363 #: src/clients/data/arcmkdir.cpp:141 src/clients/data/arcrename.cpp:152 #: src/clients/data/arcrm.cpp:168 msgid "Protocol plugins available:" msgstr "" #: src/clients/data/arccp.cpp:681 src/clients/data/arcls.cpp:388 #: src/clients/data/arcmkdir.cpp:165 src/clients/data/arcrename.cpp:175 #: src/clients/data/arcrm.cpp:193 msgid "Wrong number of parameters specified" msgstr "" #: src/clients/data/arccp.cpp:686 msgid "Options 'p' and 'n' can't be used simultaneously" msgstr "" #: src/clients/data/arcls.cpp:129 src/clients/data/arcmkdir.cpp:34 #: src/clients/data/arcrm.cpp:43 #, c-format msgid "Can't read list of locations from file %s" msgstr "" #: src/clients/data/arcls.cpp:144 src/clients/data/arcmkdir.cpp:49 #: src/clients/data/arcrename.cpp:61 msgid "Unsupported URL given" msgstr "" #: src/clients/data/arcls.cpp:149 src/clients/data/arcls.cpp:158 #, fuzzy, c-format msgid "Unable to list content of %s" msgstr "Nem sikerült betölteni a %s bróker modult" #: src/clients/data/arcls.cpp:228 msgid "Warning: Failed listing files but some information is obtained" msgstr "" #: src/clients/data/arcls.cpp:282 src/clients/data/arcmkdir.cpp:90 msgid "url" msgstr "url" #: src/clients/data/arcls.cpp:283 msgid "" "The arcls command is used for listing files in grid storage elements and " "file\n" "index catalogues." msgstr "" #: src/clients/data/arcls.cpp:292 msgid "show URLs of file locations" msgstr "" #: src/clients/data/arcls.cpp:296 msgid "display all available metadata" msgstr "" #: src/clients/data/arcls.cpp:310 msgid "" "show only description of requested object, do not list content of directories" msgstr "" #: src/clients/data/arcls.cpp:314 msgid "treat requested object as directory and always try to list content" msgstr "" #: src/clients/data/arcls.cpp:318 msgid "check readability of object, does not show any information about object" msgstr "" #: src/clients/data/arcls.cpp:393 msgid "Incompatible options --nolist and --forcelist requested" msgstr "" #: src/clients/data/arcls.cpp:398 msgid "Requesting recursion and --nolist has no sense" msgstr "" #: src/clients/data/arcmkdir.cpp:54 src/clients/data/arcmkdir.cpp:63 #, fuzzy, c-format msgid "Unable to create directory %s" msgstr "Nem sikerült elküldeni a kérést" #: src/clients/data/arcmkdir.cpp:91 msgid "" "The arcmkdir command creates directories on grid storage elements and " "catalogs." msgstr "" #: src/clients/data/arcmkdir.cpp:96 msgid "make parent directories as needed" msgstr "" #: src/clients/data/arcrename.cpp:41 msgid "Both URLs must have the same protocol, host and port" msgstr "" #: src/clients/data/arcrename.cpp:51 msgid "Cannot rename to or from root directory" msgstr "" #: src/clients/data/arcrename.cpp:55 msgid "Cannot rename to the same URL" msgstr "" #: src/clients/data/arcrename.cpp:66 src/clients/data/arcrename.cpp:75 #, fuzzy, c-format msgid "Unable to rename %s" msgstr "Nem sikerült elküldeni a kérést" #: src/clients/data/arcrename.cpp:106 msgid "old_url new_url" msgstr "" #: src/clients/data/arcrename.cpp:107 #, fuzzy msgid "The arcrename command renames files on grid storage elements." msgstr "Az arcclean parancs eltávolít egy feladatot a távoli klaszterröl" #: src/clients/data/arcrm.cpp:58 #, fuzzy, c-format msgid "Unsupported URL given: %s" msgstr "Nem támogatott url: %s" #: src/clients/data/arcrm.cpp:67 src/clients/data/arcrm.cpp:79 #, fuzzy, c-format msgid "Unable to remove file %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/clients/data/arcrm.cpp:115 #, fuzzy msgid "url [url ...]" msgstr "[klaszter ...]" #: src/clients/data/arcrm.cpp:116 #, fuzzy msgid "The arcrm command deletes files on grid storage elements." msgstr "Az arcclean parancs eltávolít egy feladatot a távoli klaszterröl" #: src/clients/data/arcrm.cpp:121 msgid "" "remove logical file name registration even if not all physical instances " "were removed" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:53 msgid "Cannot initialize ARCHERY domain name for query" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:60 msgid "Cannot create resolver from /etc/resolv.conf" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:68 msgid "Cannot query service endpoint TXT records from DNS" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:79 msgid "Cannot parse service endpoint TXT records." msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:124 #, c-format msgid "Wrong service record field \"%s\" found in the \"%s\"" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:129 #, c-format msgid "Malformed ARCHERY record found (endpoint url is not defined): %s" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:134 #, c-format msgid "Malformed ARCHERY record found (endpoint type is not defined): %s" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:138 #, c-format msgid "Found service endpoint %s (type %s)" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:157 #, c-format msgid "" "Status for service endpoint \"%s\" is set to inactive in ARCHERY. Skipping." msgstr "" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:229 #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:161 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:149 #, c-format msgid "Job %s has no delegation associated. Can't renew such job." msgstr "" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:241 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:161 #, fuzzy, c-format msgid "Job %s failed to renew delegation %s." msgstr "Nem támogatott url: %s" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:313 #, fuzzy, c-format msgid "Failed to process jobs - wrong response: %u" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:314 #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:323 #, fuzzy, c-format msgid "Content: %s" msgstr "Forrás: %s" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:317 #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:329 #, fuzzy, c-format msgid "Failed to process job: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:327 #, fuzzy msgid "Failed to process jobs - failed to parse response" msgstr "Nem tudom kiíratni a feladat leírást: Nem található várakozó sor." #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:340 #, fuzzy, c-format msgid "No response returned: %s" msgstr "Nincs válasz a voms szervertÅ‘l" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:364 #, fuzzy, c-format msgid "Failed to process job: %s - %s %s" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:431 #, c-format msgid "Failed retrieving job description for job: %s" msgstr "" #: src/hed/acc/ARCREST/JobListRetrieverPluginREST.cpp:29 msgid "Collecting Job (A-REX REST jobs) information." msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:50 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:84 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:124 msgid "Failed to communicate to delegation endpoint." msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:55 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:89 #, c-format msgid "Unexpected response code from delegation endpoint - %u" msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:57 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:91 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:242 #: src/hed/dmc/gridftp/Lister.cpp:223 src/hed/dmc/gridftp/Lister.cpp:243 #: src/hed/dmc/gridftp/Lister.cpp:468 src/hed/dmc/gridftp/Lister.cpp:475 #: src/hed/dmc/gridftp/Lister.cpp:497 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:163 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:196 #, c-format msgid "Response: %s" msgstr "Válasz: %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:62 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:96 msgid "Missing response from delegation endpoint." msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:71 #, c-format msgid "Unexpected delegation location from delegation endpoint - %s." msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:128 #, c-format msgid "Unexpected response code from delegation endpoint: %u, %s." msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:177 #, fuzzy msgid "Unable to submit jobs. Failed to delegate credentials." msgstr "Nem sikerült elküldeni a kérést" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:192 #, fuzzy msgid "Failed to prepare job description" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:201 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:87 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:401 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:116 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:262 #, fuzzy, c-format msgid "Unable to submit job. Job description is not valid in the %s format: %s" msgstr "Nem tudom kiíratni a feladat leírást: Nem található várakozó sor." #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:208 msgid "Unable to submit job. Failed to assign delegation to job description." msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:231 #, fuzzy msgid "Failed to submit all jobs." msgstr "Feladat újraküldésének megkisérlése ide: %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:241 #, fuzzy, c-format msgid "Failed to submit all jobs: %u %s" msgstr "Feladat újraküldésének megkisérlése ide: %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:254 #, fuzzy, c-format msgid "Failed to submit all jobs: %s" msgstr "Feladat újraküldésének megkisérlése ide: %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:274 #, fuzzy, c-format msgid "Failed to submit all jobs: %s %s" msgstr "Feladat újraküldésének megkisérlése ide: %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:289 msgid "Failed uploading local input files" msgstr "" #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:27 msgid "Querying WSRF GLUE2 computing REST endpoint." msgstr "" #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:60 #, c-format msgid "CONTENT %u: %s" msgstr "" #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:64 #, fuzzy msgid "Response is not XML" msgstr "Válasz: %s" #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:69 #, c-format msgid "Parsed domains: %u" msgstr "" #: src/hed/acc/Broker/DescriptorsBroker.cpp:14 msgid "Sorting according to free slots in queue" msgstr "" #: src/hed/acc/Broker/DescriptorsBroker.cpp:15 msgid "Random sorting" msgstr "" #: src/hed/acc/Broker/DescriptorsBroker.cpp:16 msgid "Sorting according to specified benchmark (default \"specint2000\")" msgstr "" #: src/hed/acc/Broker/DescriptorsBroker.cpp:17 msgid "Sorting according to input data availability at target" msgstr "" #: src/hed/acc/Broker/DescriptorsBroker.cpp:18 msgid "Performs neither sorting nor matching" msgstr "" #: src/hed/acc/Broker/FastestQueueBrokerPlugin.cpp:24 #, c-format msgid "" "Target %s removed by FastestQueueBroker, doesn't report number of waiting " "jobs" msgstr "" #: src/hed/acc/Broker/FastestQueueBrokerPlugin.cpp:27 #, c-format msgid "" "Target %s removed by FastestQueueBroker, doesn't report number of total slots" msgstr "" #: src/hed/acc/Broker/FastestQueueBrokerPlugin.cpp:30 #, c-format msgid "" "Target %s removed by FastestQueueBroker, doesn't report number of free slots" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:81 msgid "Creating an EMI ES client" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:85 msgid "Unable to create SOAP client used by EMIESClient." msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:133 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:505 msgid "Initiating delegation procedure" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:163 msgid "Re-creating an EMI ES client" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:180 #, c-format msgid "Processing a %s request" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:191 #, c-format msgid "%s request failed" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:200 #, c-format msgid "No response from %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:209 #, c-format msgid "%s request to %s failed with response: %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:224 #, c-format msgid "XML response: %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:234 #, c-format msgid "%s request to %s failed. Unexpected response: %s." msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:248 src/hed/acc/EMIES/EMIESClient.cpp:355 #, fuzzy, c-format msgid "Creating and sending job submit request to %s" msgstr "SOAP kérés készítése és küldése" #: src/hed/acc/EMIES/EMIESClient.cpp:313 src/hed/acc/EMIES/EMIESClient.cpp:416 #, c-format msgid "Job description to be sent: %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:426 src/hed/acc/EMIES/EMIESClient.cpp:609 #: src/hed/acc/EMIES/EMIESClient.cpp:1098 #, c-format msgid "New limit for vector queries returned by EMI ES service: %d" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:434 src/hed/acc/EMIES/EMIESClient.cpp:617 #: src/hed/acc/EMIES/EMIESClient.cpp:1106 #, c-format msgid "" "Error: Service returned a limit higher or equal to current limit (current: " "%d; returned: %d)" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:502 src/hed/acc/EMIES/EMIESClient.cpp:536 #: src/hed/acc/EMIES/EMIESClient.cpp:592 #, c-format msgid "Creating and sending job information query request to %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:775 #, fuzzy, c-format msgid "Creating and sending service information request to %s" msgstr "SOAP kérés készítése és küldése" #: src/hed/acc/EMIES/EMIESClient.cpp:832 #, c-format msgid "Creating and sending service information query request to %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:880 src/hed/acc/EMIES/EMIESClient.cpp:901 #, fuzzy, c-format msgid "Creating and sending job clean request to %s" msgstr "SOAP kérés készítése és küldése" #: src/hed/acc/EMIES/EMIESClient.cpp:922 #, fuzzy, c-format msgid "Creating and sending job suspend request to %s" msgstr "SOAP kérés készítése és küldése" #: src/hed/acc/EMIES/EMIESClient.cpp:943 #, c-format msgid "Creating and sending job resume request to %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:964 #, fuzzy, c-format msgid "Creating and sending job restart request to %s" msgstr "SOAP kérés készítése és küldése" #: src/hed/acc/EMIES/EMIESClient.cpp:1021 #, fuzzy, c-format msgid "Creating and sending job notify request to %s" msgstr "SOAP kérés készítése és küldése" #: src/hed/acc/EMIES/EMIESClient.cpp:1076 #, fuzzy, c-format msgid "Creating and sending notify request to %s" msgstr "SOAP kérés készítése és küldése" #: src/hed/acc/EMIES/EMIESClient.cpp:1166 #, fuzzy, c-format msgid "Creating and sending job list request to %s" msgstr "SOAP kérés készítése és küldése" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:175 #, c-format msgid "Job %s failed to renew delegation %s - %s." msgstr "" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:197 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:464 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:191 #, c-format msgid "Job %s does not report a resumable state" msgstr "" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:202 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:196 #, c-format msgid "Resuming job: %s at state: %s (%s)" msgstr "" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:215 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:520 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:205 msgid "Job resuming successful" msgstr "" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:248 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:251 #, c-format msgid "Failed retrieving information for job: %s" msgstr "" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:330 msgid "Retrieving job description of EMI ES jobs is not supported" msgstr "" #: src/hed/acc/EMIES/JobListRetrieverPluginEMIES.cpp:37 #, c-format msgid "Listing jobs succeeded, %d jobs found" msgstr "" #: src/hed/acc/EMIES/JobListRetrieverPluginEMIES.cpp:53 #: src/hed/acc/LDAP/JobListRetrieverPluginLDAPNG.cpp:111 #: src/services/a-rex/internaljobplugin/JobListRetrieverPluginINTERNAL.cpp:83 #, c-format msgid "" "Skipping retrieved job (%s) because it was submitted via another interface " "(%s)." msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:47 #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:41 msgid "" "Failed to delegate credentials to server - no delegation interface found" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:54 #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:48 #, c-format msgid "Failed to delegate credentials to server - %s" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:77 #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:87 #, fuzzy msgid "Failed preparing job description" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:95 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:406 #, fuzzy msgid "Unable to submit job. Job description is not valid XML" msgstr "Nem tudom kiíratni a feladat leírást: Nem található várakozó sor." #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:154 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:482 msgid "No valid job identifier returned by EMI ES" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:180 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:499 msgid "Job failed on service side" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:190 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:509 msgid "Failed to obtain state of job" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:205 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:521 msgid "Failed to wait for job to allow stage in" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:228 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:541 msgid "Failed to obtain valid stagein URL for input files" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:248 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:558 #, c-format msgid "Failed uploading local input files to %s" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:269 #, c-format msgid "Failed to submit job description: EMIESFault(%s , %s)" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:278 #, c-format msgid "Failed to submit job description: UnexpectedError(%s)" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:315 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:574 #, fuzzy msgid "Failed to notify service" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:367 #, fuzzy msgid "Failed preparing job description to target resources" msgstr "Nem tudom kiíratni a feladat leírást: Nem található várakozó sor." #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:475 #, fuzzy, c-format msgid "Failed to submit job description: %s" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/hed/acc/EMIES/TargetInformationRetrieverPluginEMIES.cpp:30 msgid "Collecting EMI-ES GLUE2 computing info endpoint information." msgstr "" #: src/hed/acc/EMIES/TargetInformationRetrieverPluginEMIES.cpp:50 msgid "Generating EMIES targets" msgstr "" #: src/hed/acc/EMIES/TargetInformationRetrieverPluginEMIES.cpp:59 #, c-format msgid "Generated EMIES target: %s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:75 #: src/hed/acc/EMIES/TestEMIESClient.cpp:79 #, c-format msgid "Query returned unexpected element: %s:%s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:85 #, c-format msgid "Element validation according to GLUE2 schema failed: %s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:114 #, fuzzy msgid "Resource query failed" msgstr "Státusz lekérdezés sikertelen" #: src/hed/acc/EMIES/TestEMIESClient.cpp:132 msgid "Submission failed" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:143 #, fuzzy msgid "Obtaining status failed" msgstr "Státusz lekérdezés sikertelen" #: src/hed/acc/EMIES/TestEMIESClient.cpp:153 #, fuzzy msgid "Obtaining information failed" msgstr "verzió információ kiírása" #: src/hed/acc/EMIES/TestEMIESClient.cpp:170 msgid "Cleaning failed" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:177 msgid "Notify failed" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:184 msgid "Kill failed" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:190 msgid "List failed" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:201 #, c-format msgid "Fetching resource description from %s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:204 #: src/hed/acc/EMIES/TestEMIESClient.cpp:272 #: src/hed/acc/EMIES/TestEMIESClient.cpp:282 #: src/hed/acc/EMIES/TestEMIESClient.cpp:293 #, fuzzy, c-format msgid "Failed to obtain resource description: %s" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:213 #: src/hed/acc/EMIES/TestEMIESClient.cpp:217 #, c-format msgid "Resource description contains unexpected element: %s:%s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:223 msgid "Resource description validation according to GLUE2 schema failed: " msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:228 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:560 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:819 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:517 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:706 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:129 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:169 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1214 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1248 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1430 #: src/hed/identitymap/ArgusPDPClient.cpp:444 #: src/hed/identitymap/ArgusPEPClient.cpp:98 #: src/hed/identitymap/ArgusPEPClient.cpp:345 #: src/hed/libs/common/Thread.cpp:242 src/hed/libs/common/Thread.cpp:245 #: src/hed/libs/common/Thread.cpp:248 #: src/hed/libs/credential/Credential.cpp:1048 #: src/hed/libs/data/DataPointDelegate.cpp:628 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:68 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:84 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:100 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:119 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:129 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:137 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:146 #: src/hed/mcc/tls/PayloadTLSMCC.cpp:82 src/hed/shc/arcpdp/ArcPDP.cpp:235 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:293 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:247 #: src/libs/data-staging/Scheduler.cpp:117 #: src/services/a-rex/delegation/DelegationStore.cpp:40 #: src/services/a-rex/delegation/DelegationStore.cpp:45 #: src/services/a-rex/delegation/DelegationStore.cpp:50 #: src/services/a-rex/delegation/DelegationStore.cpp:82 #: src/services/a-rex/delegation/DelegationStore.cpp:88 #: src/services/a-rex/grid-manager/inputcheck.cpp:33 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:480 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:551 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:576 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:587 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:598 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:609 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:617 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:623 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:628 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:633 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:643 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:652 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:660 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:671 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:678 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:736 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:743 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:783 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:787 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:790 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:859 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:872 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:889 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:901 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1174 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1179 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1208 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1221 #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:379 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:386 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:426 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:478 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:593 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:627 #, c-format msgid "%s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:248 #, fuzzy msgid "Resource description is empty" msgstr "Feladat leírás: %s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:255 #, c-format msgid "Resource description provides URL for interface %s: %s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:260 msgid "Resource description provides no URLs for interfaces" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:263 msgid "Resource description validation passed" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:265 #, c-format msgid "Requesting ComputingService elements of resource description at %s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:270 msgid "Performing /Services/ComputingService query" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:274 #: src/hed/acc/EMIES/TestEMIESClient.cpp:284 #: src/hed/acc/EMIES/TestEMIESClient.cpp:295 msgid "Query returned no elements." msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:280 msgid "Performing /ComputingService query" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:291 msgid "Performing /* query" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:301 #, fuzzy msgid "All queries failed" msgstr "Státusz lekérdezés sikertelen" #: src/hed/acc/EMIES/TestEMIESClient.cpp:331 #, c-format msgid "" "Number of ComputingService elements obtained from full document and XPath " "query do not match: %d != %d" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:334 msgid "Resource description query validation passed" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:336 #, fuzzy, c-format msgid "Unsupported command: %s" msgstr "Nem támogatott url: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:142 #, c-format msgid "Connect: Failed to init handle: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:148 #, c-format msgid "Failed to enable IPv6: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:158 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:172 #, c-format msgid "Connect: Failed to connect: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:165 #, c-format msgid "Connect: Connecting timed out after %d ms" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:185 #, c-format msgid "Connect: Failed to init auth info handle: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:196 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:210 #, c-format msgid "Connect: Failed authentication: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:203 #, c-format msgid "Connect: Authentication timed out after %d ms" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:224 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:256 #, c-format msgid "SendCommand: Command: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:229 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:240 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:260 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:271 #, c-format msgid "SendCommand: Failed: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:235 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:266 #, c-format msgid "SendCommand: Timed out after %d ms" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:243 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:276 #, fuzzy, c-format msgid "SendCommand: Response: %s" msgstr "Válasz: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:293 msgid "FTP Job Control: Failed sending EPSV and PASV commands" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:298 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:304 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:320 #, c-format msgid "FTP Job Control: Server PASV response parsing failed: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:330 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:336 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:343 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:350 #, c-format msgid "FTP Job Control: Server EPSV response parsing failed: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:357 #, c-format msgid "FTP Job Control: Server EPSV response port parsing failed: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:366 #, c-format msgid "FTP Job Control: Failed to apply local address to data connection: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:372 #, c-format msgid "" "FTP Job Control: Can't parse host and/or port in response to EPSV/PASV: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:377 #, c-format msgid "FTP Job Control: Data channel: %d.%d.%d.%d:%d" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:393 #, c-format msgid "FTP Job Control: Data channel: [%s]:%d" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:398 #, c-format msgid "FTP Job Control: Local port failed: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:422 msgid "FTP Job Control: Failed sending DCAU command" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:427 msgid "FTP Job Control: Failed sending TYPE command" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:436 #, c-format msgid "FTP Job Control: Local type failed: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:446 #, c-format msgid "FTP Job Control: Failed sending STOR command: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:454 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:475 #, c-format msgid "FTP Job Control: Data connect write failed: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:461 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:469 #, c-format msgid "FTP Job Control: Data connect write timed out after %d ms" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:487 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:507 #, c-format msgid "FTP Job Control: Data write failed: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:493 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:501 #, c-format msgid "FTP Job Control: Data write timed out after %d ms" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:527 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:538 #, c-format msgid "Disconnect: Failed aborting - ignoring: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:530 #, c-format msgid "Disconnect: Data close timed out after %d ms" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:541 #, c-format msgid "Disconnect: Abort timed out after %d ms" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:549 #, c-format msgid "Disconnect: Failed quitting - ignoring: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:552 #, c-format msgid "Disconnect: Quitting timed out after %d ms" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:561 #, c-format msgid "Disconnect: Failed closing - ignoring: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:567 #, c-format msgid "Disconnect: Closing timed out after %d ms" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:582 msgid "Disconnect: waiting for globus handle to settle" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:596 msgid "Disconnect: globus handle is stuck." msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:604 #, c-format msgid "Disconnect: Failed destroying handle: %s. Can't handle such situation." msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:607 msgid "Disconnect: handle destroyed." msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:43 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:44 msgid "" "Missing reference to factory and/or module. It is unsafe to use Globus in " "non-persistent mode - SubmitterPlugin for GRIDFTPJOB is disabled. Report to " "developers." msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:58 #, c-format msgid "Unable to query job information (%s), invalid URL provided (%s)" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:70 #, c-format msgid "Jobs left to query: %d" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:79 #, c-format msgid "Querying batch with %d jobs" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:97 msgid "Can't create information handle - is the ARC LDAP DMC plugin available?" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:130 #, c-format msgid "Job information not found in the information system: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:132 msgid "" "This job was very recently submitted and might not yet have reached the " "information system" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:319 #, c-format msgid "Cleaning job: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:323 msgid "Failed to connect for job cleaning" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:335 msgid "Failed sending CWD command for job cleaning" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:342 msgid "Failed sending RMD command for job cleaning" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:349 msgid "Failed to disconnect after job cleaning" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:356 msgid "Job cleaning successful" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:367 #, c-format msgid "Cancelling job: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:371 msgid "Failed to connect for job cancelling" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:383 msgid "Failed sending CWD command for job cancelling" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:390 msgid "Failed sending DELE command for job cancelling" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:397 msgid "Failed to disconnect after job cancelling" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:405 msgid "Job cancelling successful" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:416 #, c-format msgid "Renewing credentials for job: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:420 msgid "Failed to connect for credential renewal" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:432 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:439 msgid "Failed sending CWD command for credentials renewal" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:445 msgid "Failed to disconnect after credentials renewal" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:452 msgid "Renewal of credentials was successful" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:474 #, c-format msgid "Illegal jobID specified (%s)" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:481 #, c-format msgid "HER: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:487 #, c-format msgid "Could not create temporary file: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:556 #, c-format msgid "Trying to retrieve job description of %s from computing resource" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:561 #, fuzzy, c-format msgid "invalid jobID: %s" msgstr "Érvénytelen URL: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:603 msgid "clientxrsl found" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:606 msgid "could not find start of clientxrsl" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:611 msgid "could not find end of clientxrsl" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:624 #, c-format msgid "Job description: %s" msgstr "Feladat leírás: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:627 msgid "clientxrsl not found" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:633 #, fuzzy, c-format msgid "Invalid JobDescription: %s" msgstr "Érvénytelen feladat leírás:" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:636 msgid "Valid JobDescription found" msgstr "" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:60 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:206 msgid "Submit: Failed to connect" msgstr "" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:68 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:214 msgid "Submit: Failed sending CWD command" msgstr "" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:79 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:225 msgid "Submit: Failed sending CWD new command" msgstr "" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:106 #, fuzzy msgid "Failed to prepare job description." msgstr "Nem tudom kiíratni a feladat leírást: Nem található várakozó sor." #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:123 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:269 msgid "Submit: Failed sending job description" msgstr "" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:138 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:284 msgid "Submit: Failed uploading local input files" msgstr "" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:194 msgid "" "Submit: service has no suitable information interface - need org.nordugrid." "ldapng" msgstr "" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:252 #, fuzzy msgid "Failed to prepare job description to target resources." msgstr "Nem tudom kiíratni a feladat leírást: Nem található várakozó sor." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:75 #, c-format msgid "[ADLParser] Unsupported EMI ES state %s." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:95 #, c-format msgid "[ADLParser] Unsupported internal state %s." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:105 #, c-format msgid "[ADLParser] Optional for %s elements are not supported yet." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:114 #, c-format msgid "[ADLParser] %s element must be boolean." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:126 #, c-format msgid "[ADLParser] Code in FailIfExitCodeNotEqualTo in %s is not valid number." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:364 msgid "[ADLParser] Root element is not ActivityDescription " msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:411 msgid "[ADLParser] priority is too large - using max value 100" msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:454 #, c-format msgid "[ADLParser] Unsupported URL %s for RemoteLogging." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:473 #, c-format msgid "[ADLParser] Wrong time %s in ExpirationTime." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:503 msgid "[ADLParser] AccessControl isn't valid XML." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:514 msgid "[ADLParser] CredentialService must contain valid URL." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:543 #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:546 msgid "[ADLParser] Only email Prorocol for Notification is supported yet." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:604 msgid "[ADLParser] Missing or wrong value in ProcessesPerSlot." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:609 msgid "[ADLParser] Missing or wrong value in ThreadsPerProcess." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:615 msgid "" "[ADLParser] Missing Name element or value in ParallelEnvironment/Option " "element." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:632 #, fuzzy msgid "[ADLParser] NetworkInfo is not supported yet." msgstr "A fileset regisztcáció nem támogatott még" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:646 #, c-format msgid "[ADLParser] NodeAccess value %s is not supported yet." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:654 msgid "[ADLParser] Missing or wrong value in NumberOfSlots." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:661 msgid "" "[ADLParser] The NumberOfSlots element should be specified, when the value of " "useNumberOfSlots attribute of SlotsPerHost element is \"true\"." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:668 msgid "[ADLParser] Missing or wrong value in SlotsPerHost." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:697 msgid "[ADLParser] Missing or wrong value in IndividualPhysicalMemory." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:707 msgid "[ADLParser] Missing or wrong value in IndividualVirtualMemory." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:717 msgid "[ADLParser] Missing or wrong value in DiskSpaceRequirement." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:731 #, fuzzy msgid "[ADLParser] Benchmark is not supported yet." msgstr "A fileset regisztcáció nem támogatott még" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:739 msgid "[ADLParser] Missing or wrong value in IndividualCPUTime." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:747 msgid "[ADLParser] Missing or wrong value in TotalCPUTime." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:756 msgid "[ADLParser] Missing or wrong value in WallTime." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:776 msgid "[ADLParser] Missing or empty Name in InputFile." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:787 #, c-format msgid "[ADLParser] Wrong URI specified in Source - %s." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:809 msgid "[ADLParser] Missing or empty Name in OutputFile." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:815 #, c-format msgid "[ADLParser] Wrong URI specified in Target - %s." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:828 #, c-format msgid "Location URI for file %s is invalid" msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:853 #, c-format msgid "[ADLParser] CreationFlag value %s is not supported." msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:42 msgid "Left operand for RSL concatenation does not evaluate to a literal" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:50 msgid "Right operand for RSL concatenation does not evaluate to a literal" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:161 msgid "Multi-request operator only allowed at top level" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:186 msgid "RSL substitution is not a sequence" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:192 msgid "RSL substitution sequence is not of length 2" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:211 msgid "RSL substitution variable name does not evaluate to a literal" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:220 msgid "RSL substitution variable value does not evaluate to a literal" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:313 msgid "End of comment not found" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:324 msgid "Junk at end of RSL" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:424 msgid "End of single quoted string not found" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:441 msgid "End of double quoted string not found" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:460 #, c-format msgid "End of user delimiter (%s) quoted string not found" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:518 #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:546 #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:625 msgid "')' expected" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:528 #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:609 msgid "'(' expected" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:536 msgid "Variable name expected" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:541 #, c-format msgid "Variable name (%s) contains invalid character (%s)" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:557 #, fuzzy msgid "Broken string" msgstr "szöveg" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:570 msgid "No left operand for concatenation operator" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:574 msgid "No right operand for concatenation operator" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:638 msgid "Attribute name expected" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:643 #, c-format msgid "Attribute name (%s) contains invalid character (%s)" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:649 msgid "Relation operator expected" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:86 msgid "Error parsing the internally set executables attribute." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:102 #, c-format msgid "" "File '%s' in the 'executables' attribute is not present in the 'inputfiles' " "attribute" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:120 msgid "The value of the ftpthreads attribute must be a number from 1 to 10" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:177 msgid "'stdout' attribute must be specified when 'join' attribute is specified" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:181 msgid "" "Attribute 'join' cannot be specified when both 'stdout' and 'stderr' " "attributes is specified" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:200 #, fuzzy msgid "Attributes 'gridtime' and 'cputime' cannot be specified together" msgstr "A 'sort' vagy az 'rsort' kapcsolókat nem lehet egyszerre használni" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:204 msgid "Attributes 'gridtime' and 'walltime' cannot be specified together" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:226 msgid "" "When specifying 'countpernode' attribute, 'count' attribute must also be " "specified" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:229 msgid "Value of 'countpernode' attribute must be an integer" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:287 #, fuzzy msgid "No RSL content in job description found" msgstr "Nem tudom kiíratni a feladat leírást: Nem található várakozó sor." #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:295 msgid "'action' attribute not allowed in user-side job description" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:304 #, c-format msgid "String successfully parsed as %s." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:313 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:331 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:349 #, c-format msgid "Attribute '%s' multiply defined" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:317 #, c-format msgid "Value of attribute '%s' expected to be single value" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:322 #, c-format msgid "Value of attribute '%s' expected to be a string" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:338 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:368 #, c-format msgid "Value of attribute '%s' is not a string" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:356 #, c-format msgid "Value of attribute '%s' is not sequence" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:360 #, c-format msgid "" "Value of attribute '%s' has wrong sequence length: Expected %d, found %d" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:492 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1375 msgid "Unexpected RSL type" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:557 msgid "At least two values are needed for the 'inputfiles' attribute" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:562 msgid "First value of 'inputfiles' attribute (filename) cannot be empty" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:587 #, fuzzy, c-format msgid "Invalid URL '%s' for input file '%s'" msgstr "Érvénytelen URL: %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:596 #, fuzzy, c-format msgid "Invalid URL option syntax in option '%s' for input file '%s'" msgstr "Érvénytelen URL: %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:606 #, fuzzy, c-format msgid "Invalid URL: '%s' in input file '%s'" msgstr "Érvénytelen URL: %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:677 msgid "At least two values are needed for the 'outputfiles' attribute" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:682 msgid "First value of 'outputfiles' attribute (filename) cannot be empty" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:694 #, fuzzy, c-format msgid "Invalid URL '%s' for output file '%s'" msgstr "Érvénytelen URL: %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:704 #, fuzzy, c-format msgid "Invalid URL option syntax in option '%s' for output file '%s'" msgstr "Érvénytelen URL: %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:714 #, fuzzy, c-format msgid "Invalid URL: '%s' in output file '%s'" msgstr "Érvénytelen URL: %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:746 #, c-format msgid "" "Invalid comparison operator '%s' used at 'delegationid' attribute, only \"=" "\" is allowed." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:764 #, c-format msgid "" "Invalid comparison operator '%s' used at 'queue' attribute in 'GRIDMANAGER' " "dialect, only \"=\" is allowed" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:770 #, c-format msgid "" "Invalid comparison operator '%s' used at 'queue' attribute, only \"!=\" or " "\"=\" are allowed." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:927 #, c-format msgid "Value of attribute '%s' expected not to be empty" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1036 msgid "The value of the acl XRSL attribute isn't valid XML." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1050 msgid "The cluster XRSL attribute is currently unsupported." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1066 #, c-format msgid "" "Syntax error in 'notify' attribute value ('%s'), it must contain an email " "address" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1074 #, c-format msgid "" "Syntax error in 'notify' attribute value ('%s'), it must only contain email " "addresses after state flag(s)" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1077 #, c-format msgid "" "Syntax error in 'notify' attribute value ('%s'), it contains unknown state " "flags" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1125 msgid "priority is too large - using max value 100" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1158 #, c-format msgid "Invalid nodeaccess value: %s" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1201 msgid "Value of 'count' attribute must be an integer" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1231 msgid "Value of 'exclusiveexecution' attribute must either be 'yes' or 'no'" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1277 #, c-format msgid "Invalid action value %s" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1367 #, c-format msgid "The specified Globus attribute (%s) is not supported. %s ignored." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1371 #, c-format msgid "Unknown XRSL attribute: %s - Ignoring it." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1385 #, c-format msgid "Wrong language requested: %s" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1722 msgid "" "Cannot output XRSL representation: The Resources.SlotRequirement." "NumberOfSlots attribute must be specified when the Resources.SlotRequirement." "SlotsPerHost attribute is specified." msgstr "" #: src/hed/acc/LDAP/Extractor.h:22 #, c-format msgid "Extractor[%s] (%s): %s = %s" msgstr "" #: src/hed/acc/LDAP/Extractor.h:113 src/hed/acc/LDAP/Extractor.h:130 #, c-format msgid "Extractor[%s] (%s): %s contains %s" msgstr "" #: src/hed/acc/LDAP/JobListRetrieverPluginLDAPGLUE2.cpp:54 #, c-format msgid "Adding endpoint '%s' with interface name %s" msgstr "" #: src/hed/acc/LDAP/JobListRetrieverPluginLDAPNG.cpp:72 #: src/hed/acc/LDAP/ServiceEndpointRetrieverPluginEGIIS.cpp:46 #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPGLUE2.cpp:47 #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPNG.cpp:57 msgid "Can't create information handle - is the ARC ldap DMC plugin available?" msgstr "" #: src/hed/acc/LDAP/ServiceEndpointRetrieverPluginEGIIS.cpp:79 #, c-format msgid "Unknown entry in EGIIS (%s)" msgstr "" #: src/hed/acc/LDAP/ServiceEndpointRetrieverPluginEGIIS.cpp:87 msgid "" "Entry in EGIIS is missing one or more of the attributes 'Mds-Service-type', " "'Mds-Service-hn', 'Mds-Service-port' and/or 'Mds-Service-Ldap-suffix'" msgstr "" #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPGLUE2.cpp:219 msgid "" "The \"FreeSlotsWithDuration\" attribute is wrongly formatted. Ignoring it." msgstr "" #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPGLUE2.cpp:220 #: src/hed/libs/compute/GLUE2.cpp:248 #, c-format msgid "Wrong format of the \"FreeSlotsWithDuration\" = \"%s\" (\"%s\")" msgstr "" #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPNG.cpp:389 #, c-format msgid "Unable to parse the %s.%s value from execution service (%s)." msgstr "" #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPNG.cpp:390 #, c-format msgid "Value of %s.%s is \"%s\"" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:65 #: src/services/wrappers/python/pythonwrapper.cpp:92 msgid "Failed to initialize main Python thread" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:71 #: src/services/wrappers/python/pythonwrapper.cpp:97 msgid "Main Python thread was not initialized" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:81 #, fuzzy, c-format msgid "Loading Python broker (%i)" msgstr "PythonBroker betöltése" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:104 #: src/services/wrappers/python/pythonwrapper.cpp:134 msgid "Main Python thread is not initialized" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:108 msgid "PythonBroker init" msgstr "PythonBroker betöltése" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:116 msgid "" "Invalid class name. The broker argument for the PythonBroker should be\n" " Filename.Class.args (args is optional), for example SampleBroker." "MyBroker" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:122 #, fuzzy, c-format msgid "Class name: %s" msgstr "osztály neve: %s" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:123 #, fuzzy, c-format msgid "Module name: %s" msgstr "modul neve: %s" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:132 #: src/services/wrappers/python/pythonwrapper.cpp:178 msgid "Cannot convert ARC module name to Python string" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:140 #: src/services/wrappers/python/pythonwrapper.cpp:186 #, fuzzy msgid "Cannot import ARC module" msgstr "Nem tudom importálni az arc modult" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:149 #: src/services/wrappers/python/pythonwrapper.cpp:196 #: src/services/wrappers/python/pythonwrapper.cpp:429 #, fuzzy msgid "Cannot get dictionary of ARC module" msgstr "Nem tudom importálni az arc modult" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:158 msgid "Cannot find ARC UserConfig class" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:166 msgid "UserConfig class is not an object" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:173 #, fuzzy msgid "Cannot find ARC JobDescription class" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:181 msgid "JobDescription class is not an object" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:188 msgid "Cannot find ARC ExecutionTarget class" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:196 msgid "ExecutionTarget class is not an object" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:207 #: src/services/wrappers/python/pythonwrapper.cpp:157 msgid "Cannot convert module name to Python string" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:215 #: src/services/wrappers/python/pythonwrapper.cpp:164 msgid "Cannot import module" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:224 msgid "Cannot get dictionary of custom broker module" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:233 msgid "Cannot find custom broker class" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:241 #, c-format msgid "%s class is not an object" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:247 msgid "Cannot create UserConfig argument" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:255 msgid "Cannot convert UserConfig to Python object" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:263 #: src/services/wrappers/python/pythonwrapper.cpp:253 msgid "Cannot create argument of the constructor" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:272 #: src/services/wrappers/python/pythonwrapper.cpp:261 msgid "Cannot create instance of Python class" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:278 #, c-format msgid "Python broker constructor called (%d)" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:302 #, c-format msgid "Python broker destructor called (%d)" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:311 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:328 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:361 msgid "Cannot create ExecutionTarget argument" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:319 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:336 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:369 #, c-format msgid "Cannot convert ExecutionTarget (%s) to python object" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:393 msgid "Cannot create JobDescription argument" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:401 msgid "Cannot convert JobDescription to python object" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:422 msgid "Do sorting using user created python broker" msgstr "" #: src/hed/daemon/unix/daemon.cpp:84 #, c-format msgid "Daemonization fork failed: %s" msgstr "" #: src/hed/daemon/unix/daemon.cpp:95 msgid "Watchdog (re)starting application" msgstr "" #: src/hed/daemon/unix/daemon.cpp:100 #, c-format msgid "Watchdog fork failed: %s" msgstr "" #: src/hed/daemon/unix/daemon.cpp:110 msgid "Watchdog starting monitoring" msgstr "" #: src/hed/daemon/unix/daemon.cpp:136 #, c-format msgid "Watchdog detected application exit due to signal %u" msgstr "" #: src/hed/daemon/unix/daemon.cpp:138 #, c-format msgid "Watchdog detected application exited with code %u" msgstr "" #: src/hed/daemon/unix/daemon.cpp:140 msgid "Watchdog detected application exit" msgstr "" #: src/hed/daemon/unix/daemon.cpp:149 msgid "" "Watchdog exiting because application was purposely killed or exited itself" msgstr "" #: src/hed/daemon/unix/daemon.cpp:156 msgid "Watchdog detected application timeout or error - killing process" msgstr "" #: src/hed/daemon/unix/daemon.cpp:167 msgid "Watchdog failed to wait till application exited - sending KILL" msgstr "" #: src/hed/daemon/unix/daemon.cpp:179 msgid "Watchdog failed to kill application - giving up and exiting" msgstr "" #: src/hed/daemon/unix/daemon.cpp:200 msgid "Shutdown daemon" msgstr "Démon leállítása" #: src/hed/daemon/unix/main_unix.cpp:43 msgid "shutdown" msgstr "leállítás" #: src/hed/daemon/unix/main_unix.cpp:46 msgid "exit" msgstr "kilép" #: src/hed/daemon/unix/main_unix.cpp:84 msgid "No server config part of config file" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:159 #, c-format msgid "Unknown log level %s" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:169 #, c-format msgid "Failed to open log file: %s" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:201 msgid "Start foreground" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:250 #, c-format msgid "XML config file %s does not exist" msgstr "Az XML konfigurációs fájl: %s nem létezik" #: src/hed/daemon/unix/main_unix.cpp:254 src/hed/daemon/unix/main_unix.cpp:269 #, c-format msgid "Failed to load service configuration from file %s" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:260 #, c-format msgid "INI config file %s does not exist" msgstr "Az INI konfigurációs fájl: %s nem létezik" #: src/hed/daemon/unix/main_unix.cpp:265 src/hed/daemon/unix/main_unix.cpp:287 msgid "Error evaluating profile" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:281 msgid "Error loading generated configuration" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:292 msgid "Failed to load service configuration from any default config file" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:353 msgid "Schema validation error" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:368 msgid "Configuration root element is not " msgstr "" #: src/hed/daemon/unix/main_unix.cpp:384 #, c-format msgid "Cannot switch to group (%s)" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:394 #, c-format msgid "Cannot switch to primary group for user (%s)" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:399 #, c-format msgid "Cannot switch to user (%s)" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:417 msgid "Failed to load service side MCCs" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:419 src/tests/count/test_service.cpp:29 #: src/tests/echo/test.cpp:30 src/tests/echo/test_service.cpp:29 msgid "Service side MCCs are loaded" msgstr "A szolgáltatás oldali MCC-k betöltÅ‘dtek" #: src/hed/daemon/unix/main_unix.cpp:426 msgid "Unexpected arguments supplied" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:93 src/hed/dmc/acix/DataPointACIX.cpp:342 #: src/hed/dmc/rucio/DataPointRucio.cpp:220 #: src/hed/dmc/rucio/DataPointRucio.cpp:462 #, c-format msgid "No locations found for %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:121 #, c-format msgid "Found none or multiple URLs (%s) in ACIX URL: %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:131 #, c-format msgid "Cannot handle URL %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:138 #, c-format msgid "Could not resolve original source of %s: out of time" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:144 #, c-format msgid "Could not resolve original source of %s: %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:160 #, c-format msgid "Querying ACIX server at %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:161 #, c-format msgid "Calling acix with query %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:167 #, fuzzy, c-format msgid "Failed to query ACIX: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/dmc/acix/DataPointACIX.cpp:171 #: src/hed/dmc/acix/DataPointACIX.cpp:308 #, fuzzy, c-format msgid "Failed to parse ACIX response: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/dmc/acix/DataPointACIX.cpp:298 #, c-format msgid "ACIX returned %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:319 #, c-format msgid "No locations for %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:325 #, c-format msgid "%s: ACIX Location: %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:327 #, c-format msgid "%s: Location %s not accessible remotely, skipping" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:86 #, c-format msgid "Unknown channel %s for stdio protocol" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:93 #, fuzzy, c-format msgid "Failed to open stdio channel %s" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/dmc/file/DataPointFile.cpp:94 #, c-format msgid "Failed to open stdio channel %d" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:334 #, c-format msgid "fsync of file %s failed: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:338 #: src/hed/dmc/file/DataPointFile.cpp:345 #, c-format msgid "closing file %s failed: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:364 #, c-format msgid "File is not accessible: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:370 #: src/hed/dmc/file/DataPointFile.cpp:455 #, c-format msgid "Can't stat file: %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:416 #: src/hed/dmc/file/DataPointFile.cpp:422 #, c-format msgid "Can't stat stdio channel %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:470 #, fuzzy, c-format msgid "%s is not a directory" msgstr "könyvtár" #: src/hed/dmc/file/DataPointFile.cpp:485 src/hed/dmc/s3/DataPointS3.cpp:440 #: src/hed/dmc/s3/DataPointS3.cpp:550 #, c-format msgid "Failed to read object %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:498 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:517 #, c-format msgid "File is not accessible %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:504 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:523 #, c-format msgid "Can't delete directory %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:511 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:530 #, c-format msgid "Can't delete file %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:521 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:335 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:313 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1466 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:545 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:565 #, c-format msgid "Creating directory %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:529 src/hed/dmc/srm/DataPointSRM.cpp:171 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:579 #, fuzzy, c-format msgid "Renaming %s to %s" msgstr "Indok : %s" #: src/hed/dmc/file/DataPointFile.cpp:531 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:588 #, c-format msgid "Can't rename file %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:559 #, c-format msgid "Failed to open %s for reading: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:574 #: src/hed/dmc/file/DataPointFile.cpp:709 #, c-format msgid "Failed to switch user id to %d/%d" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:580 #, c-format msgid "Failed to create/open file %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:596 #, fuzzy msgid "Failed to create thread" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/dmc/file/DataPointFile.cpp:676 #, c-format msgid "Invalid url: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:685 src/hed/libs/data/FileCache.cpp:482 #, c-format msgid "Failed to create directory %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:698 #: src/hed/dmc/file/DataPointFile.cpp:717 #, fuzzy, c-format msgid "Failed to create file %s: %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/hed/dmc/file/DataPointFile.cpp:729 #, c-format msgid "setting file %s to size %llu" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:749 #, c-format msgid "Failed to preallocate space for %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:790 src/hed/libs/data/FileCache.cpp:856 #, fuzzy, c-format msgid "Failed to clean up file %s: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/dmc/file/DataPointFile.cpp:799 #, c-format msgid "Error during file validation. Can't stat file %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:803 #, c-format msgid "" "Error during file validation: Local file size %llu does not match source " "file size %llu for file %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:53 #, c-format msgid "Using proxy %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:54 #, c-format msgid "Using key %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:55 #, c-format msgid "Using cert %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:113 msgid "Locations are missing in destination LFC URL" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:119 #, c-format msgid "Duplicate replica found in LFC: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:121 #, c-format msgid "Adding location: %s - %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:129 #: src/hed/libs/data/DataPointIndex.cpp:161 #, c-format msgid "Add location: url: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:130 #: src/hed/libs/data/DataPointIndex.cpp:162 #, c-format msgid "Add location: metadata: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:150 #: src/hed/dmc/gfal/DataPointGFAL.cpp:310 #, fuzzy, c-format msgid "gfal_open failed: %s" msgstr "Fájl feltöltve %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:163 #: src/hed/dmc/gfal/DataPointGFAL.cpp:223 #: src/hed/dmc/gfal/DataPointGFAL.cpp:249 #: src/hed/dmc/gfal/DataPointGFAL.cpp:324 #: src/hed/dmc/gfal/DataPointGFAL.cpp:403 #: src/hed/dmc/gfal/DataPointGFAL.cpp:430 #, c-format msgid "gfal_close failed: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:195 #, fuzzy, c-format msgid "gfal_read failed: %s" msgstr "Fájl feltöltve %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:237 msgid "StopReading starts waiting for transfer_condition." msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:239 msgid "StopReading finished waiting for transfer_condition." msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:271 #: src/libs/data-staging/DataDeliveryLocalComm.cpp:68 #: src/libs/data-staging/DataDeliveryLocalComm.cpp:73 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:42 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:47 #, c-format msgid "No locations defined for %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:278 #, fuzzy, c-format msgid "Failed to set LFC replicas: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/dmc/gfal/DataPointGFAL.cpp:304 #, c-format msgid "gfal_mkdir failed (%s), trying to write anyway" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:359 #, c-format msgid "DataPointGFAL::write_file got position %d and offset %d, has to seek" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:388 #, c-format msgid "gfal_write failed: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:418 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:388 msgid "StopWriting starts waiting for transfer_condition." msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:420 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:390 msgid "StopWriting finished waiting for transfer_condition." msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:451 #, c-format msgid "gfal_stat failed: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:496 #, c-format msgid "gfal_listxattr failed, no replica information can be obtained: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:537 #, fuzzy, c-format msgid "gfal_opendir failed: %s" msgstr "Fájl feltöltve %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:549 #, c-format msgid "List will stat the URL %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:556 #, fuzzy, c-format msgid "gfal_closedir failed: %s" msgstr "Fájl feltöltve %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:584 #, c-format msgid "gfal_rmdir failed: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:587 #, c-format msgid "gfal_unlink failed: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:604 #, c-format msgid "gfal_mkdir failed: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:619 #, c-format msgid "gfal_rename failed: %s" msgstr "" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:19 #, c-format msgid "Failed to obtain bytes transferred: %s" msgstr "" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:42 #, c-format msgid "Failed to get initiate GFAL2 parameter handle: %s" msgstr "" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:49 #, c-format msgid "Failed to get initiate new GFAL2 context: %s" msgstr "" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:56 #, c-format msgid "Failed to set GFAL2 monitor callback: %s" msgstr "" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:64 #, c-format msgid "Failed to set overwrite option in GFAL2: %s" msgstr "" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:72 #, c-format msgid "Failed to set GFAL2 transfer timeout, will use default: %s" msgstr "" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:84 msgid "Transfer failed" msgstr "" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:92 #, fuzzy msgid "Transfer succeeded" msgstr "Proxy készítés sikeres" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:38 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:54 msgid "ftp_complete_callback: success" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:44 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:60 #, c-format msgid "ftp_complete_callback: error: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:60 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:76 msgid "ftp_check_callback" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:62 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:90 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:116 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:135 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:305 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:340 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:678 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:847 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:879 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:913 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1052 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1104 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1114 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1122 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1130 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1138 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1144 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:78 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:106 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:283 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:319 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:729 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:762 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:799 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:930 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:994 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1004 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1012 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1020 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1028 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1034 #: src/services/gridftpd/commands.cpp:1226 #: src/services/gridftpd/dataread.cpp:76 src/services/gridftpd/dataread.cpp:173 #: src/services/gridftpd/datawrite.cpp:59 #: src/services/gridftpd/datawrite.cpp:146 #, c-format msgid "Globus error: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:73 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:89 msgid "Excessive data received while checking file access" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:89 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:105 msgid "Registration of Globus FTP buffer failed - cancel check" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:115 msgid "check_ftp: globus_ftp_client_size failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:119 msgid "check_ftp: timeout waiting for size" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:124 msgid "check_ftp: failed to get file's size" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:127 #, c-format msgid "check_ftp: obtained size: %lli" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:134 msgid "check_ftp: globus_ftp_client_modification_time failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:138 msgid "check_ftp: timeout waiting for modification_time" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:143 msgid "check_ftp: failed to get file's modification time" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:148 #, c-format msgid "check_ftp: obtained modification date: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:167 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:145 msgid "check_ftp: globus_ftp_client_get failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:174 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:152 msgid "check_ftp: globus_ftp_client_register_read" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:185 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:164 msgid "check_ftp: timeout waiting for partial get" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:215 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:191 #, c-format msgid "File delete failed, attempting directory delete for %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:225 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:202 msgid "delete_ftp: globus_ftp_client_delete failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:231 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:252 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:208 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:230 msgid "delete_ftp: timeout waiting for delete" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:246 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:224 msgid "delete_ftp: globus_ftp_client_rmdir failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:301 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:278 #, c-format msgid "mkdir_ftp: making %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:309 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:287 msgid "mkdir_ftp: timeout waiting for mkdir" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:344 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:323 msgid "Timeout waiting for mkdir" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:370 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:346 msgid "start_reading_ftp" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:374 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:350 msgid "start_reading_ftp: globus_ftp_client_get" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:388 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:364 msgid "start_reading_ftp: globus_ftp_client_get failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:399 msgid "start_reading_ftp: globus_thread_create failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:418 msgid "stop_reading_ftp: aborting connection" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:425 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:647 #, c-format msgid "Failed to abort transfer of ftp file: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:426 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:648 msgid "Assuming transfer is already aborted or failed." msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:433 msgid "stop_reading_ftp: waiting for transfer to finish" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:435 #, c-format msgid "stop_reading_ftp: exiting: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:449 msgid "ftp_read_thread: get and register buffers" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:455 #, c-format msgid "ftp_read_thread: for_read failed - aborting: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:465 #, c-format msgid "ftp_read_thread: data callback failed - aborting: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:477 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:380 #, c-format msgid "ftp_read_thread: Globus error: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:490 #, c-format msgid "ftp_read_thread: too many registration failures - abort: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:495 #, c-format msgid "ftp_read_thread: failed to register Globus buffer - will try later: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:508 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:396 msgid "ftp_read_thread: waiting for eof" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:512 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:400 msgid "ftp_read_thread: waiting for buffers released" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:516 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:408 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:662 msgid "ftp_read_thread: failed to release buffers - leaking" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:521 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:415 msgid "ftp_read_thread: exiting" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:539 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:436 #, c-format msgid "ftp_read_callback: failure: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:542 msgid "ftp_read_callback: success" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:558 msgid "Failed to get ftp file" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:594 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:543 msgid "start_writing_ftp: mkdir" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:597 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:545 msgid "start_writing_ftp: mkdir failed - still trying to write" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:599 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:547 msgid "start_writing_ftp: put" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:613 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:561 msgid "start_writing_ftp: put failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:623 msgid "start_writing_ftp: globus_thread_create failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:640 #: src/hed/libs/data/DataPointDelegate.cpp:307 msgid "StopWriting: aborting connection" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:664 #: src/hed/libs/data/DataPointDelegate.cpp:321 #, c-format msgid "StopWriting: Calculated checksum %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:668 #: src/hed/libs/data/DataPointDelegate.cpp:325 #, c-format msgid "StopWriting: looking for checksum of %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:677 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:912 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:798 msgid "list_files_ftp: globus_ftp_client_cksm failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:681 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:916 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:802 msgid "list_files_ftp: timeout waiting for cksum" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:688 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:923 msgid "list_files_ftp: no checksum information possible" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:691 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:926 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:815 #, c-format msgid "list_files_ftp: checksum %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:694 #: src/hed/libs/data/DataPointDelegate.cpp:332 msgid "" "Checksum type returned by server is different to requested type, cannot " "compare" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:696 #: src/hed/libs/data/DataPointDelegate.cpp:334 #, c-format msgid "Calculated checksum %s matches checksum reported by server" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:699 #: src/hed/libs/data/DataPointDelegate.cpp:337 #, c-format msgid "" "Checksum mismatch between calculated checksum %s and checksum reported by " "server %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:721 msgid "ftp_write_thread: get and register buffers" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:725 msgid "ftp_write_thread: for_write failed - aborting" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:743 msgid "ftp_write_thread: data callback failed - aborting" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:759 msgid "ftp_write_thread: waiting for eof" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:763 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:660 msgid "ftp_write_thread: waiting for buffers released" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:771 msgid "ftp_write_thread: failed to release buffers - leaking" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:776 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:667 msgid "ftp_write_thread: exiting" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:799 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:686 #, c-format msgid "ftp_write_callback: failure: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:802 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:688 #, c-format msgid "ftp_write_callback: success %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:817 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:704 msgid "Failed to store ftp file" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:825 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:709 msgid "ftp_put_complete_callback: success" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:841 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:723 #, c-format msgid "list_files_ftp: looking for size of %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:845 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:727 msgid "list_files_ftp: globus_ftp_client_size failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:851 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:852 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:733 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:734 msgid "list_files_ftp: timeout waiting for size" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:858 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:740 msgid "list_files_ftp: failed to get file's size" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:870 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:753 #, c-format msgid "list_files_ftp: looking for modification time of %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:876 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:759 msgid "list_files_ftp: globus_ftp_client_modification_time failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:883 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:766 msgid "list_files_ftp: timeout waiting for modification_time" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:891 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:774 msgid "list_files_ftp: failed to get file's modification time" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:903 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:788 #, c-format msgid "list_files_ftp: looking for checksum of %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:942 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:828 #, c-format msgid "Failed to obtain stat from FTP: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:948 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:833 msgid "No results returned from stat" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:954 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:839 #, c-format msgid "Wrong number of objects (%i) for stat from ftp: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:968 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:852 #, c-format msgid "Unexpected path %s returned from server" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1007 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:885 #, c-format msgid "Failed to obtain listing from FTP: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1050 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:928 msgid "Rename: globus_ftp_client_move failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1056 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:934 msgid "Rename: timeout waiting for operation to complete" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1103 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:993 msgid "init_handle: globus_ftp_client_handleattr_init failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1112 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1002 msgid "init_handle: globus_ftp_client_handleattr_set_gridftp2 failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1121 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1011 msgid "init_handle: globus_ftp_client_handle_init failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1128 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1018 msgid "init_handle: globus_ftp_client_operationattr_init failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1136 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1026 msgid "init_handle: globus_ftp_client_operationattr_set_allow_ipv6 failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1142 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1032 msgid "init_handle: globus_ftp_client_operationattr_set_delayed_pasv failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1190 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1218 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1084 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1113 #, c-format msgid "globus_ftp_client_operationattr_set_authorization: error: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1217 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1112 msgid "Failed to set credentials for GridFTP transfer" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1223 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1118 msgid "Using secure data transfer" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1228 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1123 msgid "Using insecure data transfer" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1255 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1150 msgid "~DataPoint: destroy ftp_handle" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1258 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1153 msgid "~DataPoint: destroy ftp_handle failed - retrying" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1276 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1171 msgid "~DataPoint: failed to destroy ftp_handle - leaking" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1296 msgid "" "Missing reference to factory and/or module. It is unsafe to use Globus in " "non-persistent mode - (Grid)FTP code is disabled. Report to developers." msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:388 msgid "ftp_read_thread: failed to register buffers" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:403 msgid "ftp_read_thread: failed to release buffers" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:441 #, c-format msgid "ftp_read_callback: success - offset=%u, length=%u, eof=%u, allow oof=%u" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:455 #, c-format msgid "ftp_read_callback: delayed data chunk: %llu %llu" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:462 #, c-format msgid "ftp_read_callback: unexpected data out of order: %llu != %llu" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:469 msgid "ftp_read_callback: too many unexpected out of order chunks" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:490 #, c-format msgid "ftp_read_callback: Globus error: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:515 msgid "ftp_get_complete_callback: Failed to get ftp file" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:520 msgid "ftp_get_complete_callback: success" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:575 msgid "start_writing_ftp: waiting for data tag" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:578 msgid "start_writing_ftp: failed to read data tag" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:583 msgid "start_writing_ftp: waiting for data chunk" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:585 msgid "start_writing_ftp: failed to read data chunk" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:596 #, c-format msgid "ftp_write_thread: data out of order in stream mode: %llu != %llu" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:603 msgid "ftp_write_thread: too many out of order chunks in stream mode" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:608 #, c-format msgid "start_writing_ftp: data chunk: %llu %llu" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:614 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:640 #, c-format msgid "ftp_write_thread: Globus error: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:633 #, c-format msgid "start_writing_ftp: delayed data chunk: %llu %llu" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:652 msgid "start_writing_ftp: waiting for some buffers sent" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:658 #, fuzzy msgid "ftp_write_thread: waiting for transfer complete" msgstr "Az aktuális átvitel sikeres" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:809 msgid "list_files_ftp: no checksum information supported" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:811 msgid "list_files_ftp: no checksum information returned" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:906 msgid "Too many failures to obtain checksum - giving up" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1266 msgid "Expecting Command and URL provided" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1273 #: src/hed/libs/data/DataExternalHelper.cpp:376 msgid "Expecting Command among arguments" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1277 #: src/hed/libs/data/DataExternalHelper.cpp:380 msgid "Expecting URL among arguments" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:221 src/hed/dmc/gridftp/Lister.cpp:289 #: src/hed/dmc/gridftp/Lister.cpp:384 src/hed/dmc/gridftp/Lister.cpp:767 #: src/hed/dmc/gridftp/Lister.cpp:812 #, c-format msgid "Failure: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:288 msgid "Error getting list of files (in list)" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:290 msgid "Assuming - file not found" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:307 #, c-format msgid "list record: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:362 msgid "Failed reading list of files" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:398 msgid "Failed reading data" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:426 #, c-format msgid "Command: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:430 src/hed/dmc/gridftp/Lister.cpp:471 #: src/hed/mcc/http/PayloadHTTP.cpp:990 msgid "Memory allocation error" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:438 #, c-format msgid "%s failed" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:442 msgid "Command is being sent" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:447 msgid "Waiting for response" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:452 msgid "Callback got failure" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:538 msgid "Failed in globus_cond_init" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:542 msgid "Failed in globus_mutex_init" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:549 msgid "Failed allocating memory for handle" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:554 msgid "Failed in globus_ftp_control_handle_init" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:562 #, fuzzy msgid "Failed to enable IPv6" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/dmc/gridftp/Lister.cpp:573 src/services/gridftpd/commands.cpp:984 msgid "Closing connection" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:580 src/hed/dmc/gridftp/Lister.cpp:595 msgid "Timeout waiting for Globus callback - leaking connection" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:605 msgid "Closed successfully" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:607 msgid "Closing may have failed" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:634 msgid "Waiting for globus handle to settle" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:639 #, c-format msgid "Handle is not in proper state %u/%u" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:645 msgid "Globus handle is stuck" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:661 #, c-format msgid "Failed destroying handle: %s. Can't handle such situation." msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:684 #, c-format msgid "EPSV failed: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:688 msgid "EPSV failed" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:695 #, c-format msgid "PASV failed: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:699 msgid "PASV failed" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:765 msgid "Failed to apply local address to data connection" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:783 msgid "Can't parse host and/or port in response to EPSV/PASV" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:788 #, c-format msgid "Data channel: %d.%d.%d.%d:%d" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:806 #, c-format msgid "Data channel: [%s]:%d" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:810 msgid "Obtained host and address are not acceptable" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:820 msgid "Failed to open data channel" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:838 #, c-format msgid "Unsupported protocol in url %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:850 msgid "Reusing connection" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:874 #, c-format msgid "Failed connecting to server %s:%d" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:880 #, c-format msgid "Failed to connect to server %s:%d" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:896 #, fuzzy msgid "Missing authentication information" msgstr "verzió információ kiírása" #: src/hed/dmc/gridftp/Lister.cpp:905 src/hed/dmc/gridftp/Lister.cpp:919 #, c-format msgid "Bad authentication information: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:928 src/hed/dmc/gridftp/Lister.cpp:943 #, fuzzy, c-format msgid "Failed authenticating: %s" msgstr "Nem sikerült listázni a fájlokat" #: src/hed/dmc/gridftp/Lister.cpp:935 msgid "Failed authenticating" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:970 src/hed/dmc/gridftp/Lister.cpp:1126 #, c-format msgid "DCAU failed: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:974 src/hed/dmc/gridftp/Lister.cpp:1131 msgid "DCAU failed" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:994 msgid "MLST is not supported - trying LIST" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1010 #, c-format msgid "Immediate completion expected: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1014 msgid "Immediate completion expected" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1027 #, c-format msgid "Missing information in reply: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1061 #, c-format msgid "Missing final reply: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1085 #, c-format msgid "Unexpected immediate completion: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1097 #, c-format msgid "LIST/MLST failed: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1102 msgid "LIST/MLST failed" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1152 msgid "MLSD is not supported - trying NLST" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1166 #, c-format msgid "Immediate completion: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1174 #, c-format msgid "NLST/MLSD failed: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1180 msgid "NLST/MLSD failed" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1201 #, c-format msgid "Data transfer aborted: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1206 msgid "Data transfer aborted" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1218 msgid "Failed to transfer data" msgstr "" #: src/hed/dmc/http/DataPointHTTP.cpp:391 #: src/hed/dmc/http/DataPointHTTP.cpp:520 #: src/hed/dmc/http/DataPointHTTP.cpp:601 #: src/hed/dmc/http/DataPointHTTP.cpp:1003 #: src/hed/dmc/http/DataPointHTTP.cpp:1147 #: src/hed/dmc/http/DataPointHTTP.cpp:1292 #, c-format msgid "Redirecting to %s" msgstr "" #: src/hed/dmc/http/DataPointHTTP.cpp:673 #, c-format msgid "Stat: obtained size %llu" msgstr "" #: src/hed/dmc/http/DataPointHTTP.cpp:677 #, c-format msgid "Stat: obtained modification time %s" msgstr "" #: src/hed/dmc/http/DataPointHTTP.cpp:906 #, c-format msgid "Check: obtained size %llu" msgstr "" #: src/hed/dmc/http/DataPointHTTP.cpp:908 #, c-format msgid "Check: obtained modification time %s" msgstr "" #: src/hed/dmc/http/DataPointHTTP.cpp:1020 #: src/hed/dmc/http/DataPointHTTP.cpp:1167 #, c-format msgid "HTTP failure %u - %s" msgstr "" #: src/hed/dmc/ldap/DataPointLDAP.cpp:36 msgid "" "Missing reference to factory and/or module. Currently safe unloading of LDAP " "DMC is not supported. Report to developers." msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:151 msgid "SASL Interaction" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:199 #, c-format msgid "Challenge: %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:203 #, c-format msgid "Default: %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:279 #, c-format msgid "LDAPQuery: Initializing connection to %s:%d" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:283 #, c-format msgid "LDAP connection already open to %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:297 #, c-format msgid "Could not open LDAP connection to %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:318 #, c-format msgid "Failed to create ldap bind thread (%s)" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:325 #, c-format msgid "Ldap bind timeout (%s)" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:332 #, c-format msgid "Failed to bind to ldap server (%s)" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:353 #, c-format msgid "Could not set LDAP network timeout (%s)" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:361 #, c-format msgid "Could not set LDAP timelimit (%s)" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:368 #, c-format msgid "Could not set LDAP protocol version (%s)" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:436 #, c-format msgid "LDAPQuery: Querying %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:438 #, c-format msgid " base dn: %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:440 #, c-format msgid " filter: %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:442 msgid " attributes:" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:445 #: src/services/gridftpd/misc/ldapquery.cpp:399 #, c-format msgid " %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:482 src/hed/dmc/ldap/LDAPQuery.cpp:548 #, c-format msgid "%s (%s)" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:506 #, c-format msgid "LDAPQuery: Getting results from %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:509 #, c-format msgid "Error: no LDAP query started to %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:543 #, c-format msgid "LDAP query timed out: %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:23 #, c-format msgid "Replacing existing token for %s in Rucio token cache" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:36 #, c-format msgid "Found existing token for %s in Rucio token cache with expiry time %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:39 #, c-format msgid "Rucio token for %s has expired or is about to expire" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:101 #, c-format msgid "Extracted nickname %s from credentials to use for RUCIO_ACCOUNT" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:104 msgid "Failed to extract VOMS nickname from proxy" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:106 #, c-format msgid "Using Rucio account %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:146 #, c-format msgid "" "Bad path for %s: Rucio supports read/write at /objectstores and read-only " "at /replicas" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:162 #: src/services/candypond/CandyPond.cpp:140 #: src/services/candypond/CandyPond.cpp:347 #, c-format msgid "Can't handle URL %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:310 #, c-format msgid "Acquired auth token for %s: %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:367 #, c-format msgid "Rucio returned %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:393 #, fuzzy, c-format msgid "Failed to parse Rucio response: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/dmc/rucio/DataPointRucio.cpp:399 #, c-format msgid "Filename not returned in Rucio response: %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:405 #, c-format msgid "Unexpected name returned in Rucio response: %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:411 #, fuzzy, c-format msgid "No pfns returned in Rucio response: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/dmc/rucio/DataPointRucio.cpp:422 #, c-format msgid "Cannot determine replica type for %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:424 #, c-format msgid "%s: replica type %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:427 #, c-format msgid "Skipping %s replica %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:447 #, c-format msgid "No filesize information returned in Rucio response for %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:450 #, c-format msgid "%s: size %llu" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:454 #, c-format msgid "No checksum information returned in Rucio response for %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:457 #, c-format msgid "%s: checksum %s" msgstr "" #: src/hed/dmc/s3/DataPointS3.cpp:621 #, fuzzy, c-format msgid "Failed to write object %s: %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/hed/dmc/srm/DataPointSRM.cpp:59 #, c-format msgid "TURL %s cannot be handled" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:86 #, c-format msgid "Check: looking for metadata: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:97 #, c-format msgid "Check: obtained size: %lli" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:103 #, c-format msgid "Check: obtained checksum: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:107 #, c-format msgid "Check: obtained modification date: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:111 msgid "Check: obtained access latency: low (ONLINE)" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:115 msgid "Check: obtained access latency: high (NEARLINE)" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:134 #, c-format msgid "Remove: deleting: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:152 #, c-format msgid "Creating directory: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:200 src/hed/dmc/srm/DataPointSRM.cpp:249 msgid "Calling PrepareReading when request was already prepared!" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:220 #, c-format msgid "File %s is NEARLINE, will make request to bring online" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:229 #, c-format msgid "Bring online request %s is still in queue, should wait" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:234 #, c-format msgid "Bring online request %s finished successfully, file is now ONLINE" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:240 #, c-format msgid "" "Bad logic for %s - bringOnline returned ok but SRM request is not finished " "successfully or on going" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:268 src/hed/dmc/srm/DataPointSRM.cpp:411 msgid "None of the requested transfer protocols are supported" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:281 #, c-format msgid "Get request %s is still in queue, should wait %i seconds" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:289 src/hed/dmc/srm/DataPointSRM.cpp:468 #, c-format msgid "Checking URL returned by SRM: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:304 src/hed/dmc/srm/DataPointSRM.cpp:483 #, c-format msgid "SRM returned no useful Transfer URLs: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:311 #, c-format msgid "" "Bad logic for %s - getTURLs returned ok but SRM request is not finished " "successfully or on going" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:319 msgid "StartReading" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:321 msgid "StartReading: File was not prepared properly" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:331 src/hed/dmc/srm/DataPointSRM.cpp:510 #, c-format msgid "Redirecting to new URL: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:392 msgid "Calling PrepareWriting when request was already prepared!" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:421 msgid "No space token specified" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:427 msgid "Warning: Using SRM protocol v1 which does not support space tokens" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:430 #, c-format msgid "Using space token description %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:436 #, c-format msgid "Error looking up space tokens matching description %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:440 #, c-format msgid "No space tokens found matching description %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:445 #, c-format msgid "Using space token %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:460 #, c-format msgid "Put request %s is still in queue, should wait %i seconds" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:490 #, c-format msgid "" "Bad logic for %s - putTURLs returned ok but SRM request is not finished " "successfully or on going" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:498 msgid "StartWriting" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:500 msgid "StartWriting: File was not prepared properly" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:559 #, c-format msgid "FinishWriting: looking for metadata: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:574 #, c-format msgid "FinishWriting: obtained checksum: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:577 #, c-format msgid "" "Calculated/supplied transfer checksum %s matches checksum reported by SRM " "destination %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:580 #, c-format msgid "" "Checksum mismatch between calculated/supplied checksum (%s) and checksum " "reported by SRM destination (%s)" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:583 #, c-format msgid "" "Checksum type of SRM (%s) and calculated/supplied checksum (%s) differ, " "cannot compare" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:584 src/hed/dmc/srm/DataPointSRM.cpp:585 msgid "No checksum information from server" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:586 src/hed/dmc/srm/DataPointSRM.cpp:587 msgid "No checksum verification possible" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:593 #, fuzzy msgid "Failed to release completed request" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/dmc/srm/DataPointSRM.cpp:636 src/hed/dmc/srm/DataPointSRM.cpp:703 #, c-format msgid "ListFiles: looking for metadata: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:821 #, c-format msgid "plugin for transport protocol %s is not installed" msgstr "" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:51 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:90 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:142 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:181 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:221 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:259 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:303 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:365 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:438 msgid "SRM did not return any information" msgstr "" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:316 #, c-format msgid "File could not be moved to Running state: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:372 msgid "SRM did not return any useful information" msgstr "" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:450 msgid "File could not be moved to Done state" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:88 msgid "Could not determine version of server" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:94 #, c-format msgid "Server SRM version: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:99 #, c-format msgid "Server implementation: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:136 #, c-format msgid "Adding space token %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:163 msgid "No request tokens found" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:176 #, c-format msgid "Adding request token %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:237 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:642 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:828 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1385 #, c-format msgid "%s: File request %s in SRM queue. Sleeping for %i seconds" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:275 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:327 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:698 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:764 #, c-format msgid "File is ready! TURL is %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:359 #, c-format msgid "Setting userRequestDescription to %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:414 #, c-format msgid "%s: Bring online request %s in SRM queue. Sleeping for %i seconds" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:457 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1160 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1194 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1228 msgid "No request token specified!" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:524 msgid "Request is reported as ABORTED, but all files are done" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:530 msgid "Request is reported as ABORTED, since it was cancelled" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:536 #, c-format msgid "Request is reported as ABORTED. Reason: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:673 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:745 #, c-format msgid "Path %s is invalid, creating required directories" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:678 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:750 #, c-format msgid "Error creating required directories for %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:851 msgid "Too many files in one request - please try again with fewer files" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:899 msgid "" "Directory size is too large to list in one call, will have to call multiple " "times" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:936 msgid "" "Failure in parsing response from server - some information may be inaccurate" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:942 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:566 #: src/services/gridftpd/misc/ldapquery.cpp:183 #: src/services/gridftpd/misc/ldapquery.cpp:186 #: src/services/gridftpd/misc/ldapquery.cpp:392 #: src/services/gridftpd/misc/ldapquery.cpp:622 #: src/services/gridftpd/misc/ldapquery.cpp:631 #, c-format msgid "%s: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:975 #, c-format msgid "" "Directory size is larger than %i files, will have to call multiple times" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1185 #, c-format msgid "Files associated with request token %s released successfully" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1219 #, c-format msgid "Files associated with request token %s put done successfully" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1254 #, c-format msgid "Files associated with request token %s aborted successfully" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1271 #, c-format msgid "" "Failed to find metadata info on %s for determining file or directory delete" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1277 msgid "Type is file, calling srmRm" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1281 msgid "Type is dir, calling srmRmDir" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1285 msgid "File type is not available, attempting file delete" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1288 msgid "File delete failed, attempting directory delete" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1313 #, c-format msgid "File %s removed successfully" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1340 #, c-format msgid "Directory %s removed successfully" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1455 #, c-format msgid "Checking for existence of %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1458 #, c-format msgid "File already exists: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1495 #, c-format msgid "Error creating directory %s: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:82 #, c-format msgid "Attempting to contact %s on port %i" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:88 #, c-format msgid "Storing port %i for %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:102 #, c-format msgid "No port succeeded for %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:112 #, c-format msgid "URL %s disagrees with stored SRM info, testing new info" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:118 #, c-format msgid "Replacing old SRM info with new for URL %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:140 #, fuzzy, c-format msgid "SOAP request: %s" msgstr "Kérés: %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:147 #: src/hed/dmc/srm/srmclient/SRMClient.cpp:176 #, c-format msgid "SOAP fault: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:148 msgid "Reconnecting" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:158 #, c-format msgid "SRM Client status: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:164 msgid "No SOAP response" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:171 #: src/hed/identitymap/ArgusPDPClient.cpp:250 #, fuzzy, c-format msgid "SOAP response: %s" msgstr "Válasz: %s" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:76 #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:162 #, fuzzy, c-format msgid "Failed to acquire lock on file %s" msgstr "Nem sikerült listázni a fájlokat" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:81 #, fuzzy, c-format msgid "Error reading info from file %s:%s" msgstr "Nem tudom olvasni a forrásokat a fájlból: %s" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:95 #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:187 #, c-format msgid "Bad or old format detected in file %s, in line %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:100 #, c-format msgid "Cannot convert string %s to int in line %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:203 #, c-format msgid "Error writing srm info file %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:64 msgid "" "Missing reference to factory and/or module. It is unsafe to use Xrootd in " "non-persistent mode - Xrootd code is disabled. Report to developers." msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:103 #, c-format msgid "Could not handle checksum %s: skip checksum check" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:109 #, fuzzy, c-format msgid "Failed to create xrootd copy job: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:126 #, fuzzy, c-format msgid "Failed to copy %s: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:177 #, c-format msgid "Reading %u bytes from byte %llu" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:179 #, c-format msgid "Read %i bytes" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:210 #, c-format msgid "Could not open file %s for reading: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:225 #, c-format msgid "Unable to find file size of %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:289 #, c-format msgid "DataPointXrootd::write_file got position %d and offset %d, has to seek" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:312 #, c-format msgid "xrootd write failed: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:321 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:395 #, c-format msgid "xrootd close failed: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:344 #, c-format msgid "Failed to open %s, trying to create parent directories" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:357 #, c-format msgid "xrootd open failed: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:371 #, fuzzy, c-format msgid "close failed: %s" msgstr "Fájl feltöltve %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:413 #, c-format msgid "Read access not allowed for %s: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:432 #, c-format msgid "Could not stat file %s: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:437 msgid "Not getting checksum of zip constituent" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:441 #, c-format msgid "Could not get checksum of %s: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:445 #, c-format msgid "Checksum %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:483 #, fuzzy, c-format msgid "Failed to open directory %s: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:501 #, c-format msgid "Error while reading dir %s: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:551 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:569 #, c-format msgid "Error creating required dirs: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:158 msgid "PDPD location is missing" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:161 #, fuzzy, c-format msgid "PDPD location: %s" msgstr "Célállomás: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:166 #: src/hed/identitymap/ArgusPEPClient.cpp:129 msgid "Conversion mode is set to SUBJECT" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:169 #: src/hed/identitymap/ArgusPEPClient.cpp:132 msgid "Conversion mode is set to CREAM" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:172 #: src/hed/identitymap/ArgusPEPClient.cpp:135 msgid "Conversion mode is set to EMI" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:175 #: src/hed/identitymap/ArgusPEPClient.cpp:138 #, c-format msgid "Unknown conversion mode %s, using default" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:242 #, c-format msgid "Failed to contact PDP server: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:245 #, fuzzy, c-format msgid "There was no SOAP response return from PDP server: %s" msgstr "Nincs SOAP-os válasz" #: src/hed/identitymap/ArgusPDPClient.cpp:360 #: src/hed/identitymap/ArgusPEPClient.cpp:286 #, c-format msgid "Have %i requests to process" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:362 msgid "Creating a client to Argus PDP service" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:375 #, fuzzy, c-format msgid "XACML authorisation request: %s" msgstr "A feladat megszakítása sikeres" #: src/hed/identitymap/ArgusPDPClient.cpp:386 #, c-format msgid "XACML authorisation response: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:426 #, c-format msgid "%s is not authorized to do action %s in resource %s " msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:429 #: src/hed/identitymap/ArgusPDPClient.cpp:434 #: src/hed/identitymap/ArgusPEPClient.cpp:336 msgid "Not authorized" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:439 #: src/hed/identitymap/ArgusPEPClient.cpp:341 #: src/hed/identitymap/IdentityMap.cpp:219 src/hed/shc/legacy/LegacyMap.cpp:216 #, c-format msgid "Grid identity is mapped to local identity '%s'" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:566 #: src/hed/identitymap/ArgusPEPClient.cpp:655 msgid "Doing CREAM request" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:580 #: src/hed/identitymap/ArgusPDPClient.cpp:748 #: src/hed/identitymap/ArgusPEPClient.cpp:683 #, c-format msgid "Adding profile-id value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:592 #: src/hed/identitymap/ArgusPDPClient.cpp:759 #: src/hed/identitymap/ArgusPEPClient.cpp:694 #, c-format msgid "Adding subject-id value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:600 #: src/hed/identitymap/ArgusPDPClient.cpp:767 #: src/hed/identitymap/ArgusPEPClient.cpp:704 #, c-format msgid "Adding subject-issuer value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:607 #: src/hed/identitymap/ArgusPEPClient.cpp:713 #, c-format msgid "Adding virtual-organization value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:620 #: src/hed/identitymap/ArgusPEPClient.cpp:730 #, c-format msgid "Adding FQAN value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:629 #: src/hed/identitymap/ArgusPEPClient.cpp:739 #, c-format msgid "Adding FQAN/primary value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:639 #: src/hed/identitymap/ArgusPEPClient.cpp:750 #, c-format msgid "Adding cert chain value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:648 #: src/hed/identitymap/ArgusPDPClient.cpp:840 #: src/hed/identitymap/ArgusPEPClient.cpp:760 #, c-format msgid "Adding resource-id value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:662 #: src/hed/identitymap/ArgusPDPClient.cpp:863 #: src/hed/identitymap/ArgusPEPClient.cpp:775 #, c-format msgid "Adding action-id value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:666 #: src/hed/identitymap/ArgusPEPClient.cpp:786 #, c-format msgid "CREAM request generation failed: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:732 msgid "Doing EMI request" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:774 #, c-format msgid "Adding Virtual Organization value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:797 #, c-format msgid "Adding VOMS group value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:803 #, c-format msgid "Adding VOMS primary group value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:822 #, c-format msgid "Adding VOMS role value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:829 #, c-format msgid "Adding VOMS primary role value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:846 #, c-format msgid "Adding resource-owner value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:867 #, c-format msgid "EMI request generation failed: %s" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:119 msgid "PEPD location is missing" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:122 #, fuzzy, c-format msgid "PEPD location: %s" msgstr "Célállomás: %s" #: src/hed/identitymap/ArgusPEPClient.cpp:126 msgid "Conversion mode is set to DIRECT" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:331 #, c-format msgid "" "Not authorized according to request:\n" "%s" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:333 #, c-format msgid "%s is not authorized to do action %s in resource %s" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:361 msgid "Subject of request is null" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:366 #, c-format msgid "Can not create XACML SubjectAttribute: %s" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:375 #, fuzzy msgid "Can not create XACML Resource" msgstr "Nem tudom importálni az arc modult" #: src/hed/identitymap/ArgusPEPClient.cpp:381 #, c-format msgid "Can not create XACML ResourceAttribute: %s" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:390 msgid "Can not create XACML Action" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:397 #, c-format msgid "Can not create XACML ActionAttribute: %s" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:407 msgid "Can not create XACML request" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:539 #, c-format msgid "Converting to CREAM action - namespace: %s, operation: %s" msgstr "" #: src/hed/identitymap/IdentityMap.cpp:196 #, c-format msgid "PDP: %s can not be loaded" msgstr "" #: src/hed/libs/common/ArcLocation.cpp:128 #, c-format msgid "" "Can not determine the install location. Using %s. Please set ARC_LOCATION if " "this is not correct." msgstr "" #: src/hed/libs/common/DateTime.cpp:86 src/hed/libs/common/DateTime.cpp:631 #: src/hed/libs/common/StringConv.h:25 msgid "Empty string" msgstr "" #: src/hed/libs/common/DateTime.cpp:107 #, c-format msgid "Can not parse date: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:130 #, c-format msgid "Can not parse time: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:160 #, c-format msgid "Can not parse time zone offset: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:180 src/hed/libs/common/DateTime.cpp:199 #: src/hed/libs/common/DateTime.cpp:252 src/hed/libs/common/DateTime.cpp:291 #, c-format msgid "Illegal time format: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:230 src/hed/libs/common/DateTime.cpp:283 #, c-format msgid "Can not parse month: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:647 src/hed/libs/common/DateTime.cpp:688 #, c-format msgid "Invalid ISO duration format: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:752 #, c-format msgid "Invalid period string: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:874 msgid "hour" msgid_plural "hours" msgstr[0] "" msgstr[1] "" #: src/hed/libs/common/DateTime.cpp:880 msgid "minute" msgid_plural "minutes" msgstr[0] "" msgstr[1] "" #: src/hed/libs/common/DateTime.cpp:886 msgid "second" msgid_plural "seconds" msgstr[0] "" msgstr[1] "" #: src/hed/libs/common/FileLock.cpp:43 msgid "Cannot determine hostname from gethostname()" msgstr "" #: src/hed/libs/common/FileLock.cpp:92 #, c-format msgid "EACCES Error opening lock file %s: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:97 #, c-format msgid "Error opening lock file %s in initial check: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:104 #, c-format msgid "Error creating temporary file %s: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:113 #, c-format msgid "Could not create link to lock file %s as it already exists" msgstr "" #: src/hed/libs/common/FileLock.cpp:124 #, c-format msgid "Could not create lock file %s as it already exists" msgstr "" #: src/hed/libs/common/FileLock.cpp:128 #, c-format msgid "Error creating lock file %s: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:133 #, c-format msgid "Error writing to lock file %s: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:141 #, c-format msgid "Error linking tmp file %s to lock file %s: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:150 #, c-format msgid "Error in lock file %s, even though linking did not return an error" msgstr "" #: src/hed/libs/common/FileLock.cpp:159 #, c-format msgid "%li seconds since lock file %s was created" msgstr "" #: src/hed/libs/common/FileLock.cpp:162 #, c-format msgid "Timeout has expired, will remove lock file %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:166 #, c-format msgid "Failed to remove stale lock file %s: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:178 #, c-format msgid "This process already owns the lock on %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:183 #, c-format msgid "" "The process owning the lock on %s is no longer running, will remove lock" msgstr "" #: src/hed/libs/common/FileLock.cpp:185 #, fuzzy, c-format msgid "Failed to remove file %s: %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/hed/libs/common/FileLock.cpp:193 #, c-format msgid "The file %s is currently locked with a valid lock" msgstr "" #: src/hed/libs/common/FileLock.cpp:208 #, c-format msgid "Failed to unlock file with lock %s: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:220 #, c-format msgid "Lock file %s doesn't exist" msgstr "" #: src/hed/libs/common/FileLock.cpp:222 #, c-format msgid "Error listing lock file %s: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:228 #, c-format msgid "Found unexpected empty lock file %s. Must go back to acquire()" msgstr "" #: src/hed/libs/common/FileLock.cpp:234 #, c-format msgid "Error reading lock file %s: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:238 #, c-format msgid "Error with formatting in lock file %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:248 #, c-format msgid "Lock %s is owned by a different host (%s)" msgstr "" #: src/hed/libs/common/FileLock.cpp:257 #, c-format msgid "Badly formatted pid %s in lock file %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:260 #, c-format msgid "Another process (%s) owns the lock on file %s" msgstr "" #: src/hed/libs/common/IString.cpp:32 src/hed/libs/common/IString.cpp:41 #: src/hed/libs/common/IString.cpp:42 msgid "(empty)" msgstr "" #: src/hed/libs/common/IString.cpp:32 src/hed/libs/common/IString.cpp:41 #: src/hed/libs/common/IString.cpp:42 msgid "(null)" msgstr "" #: src/hed/libs/common/Logger.cpp:58 #, c-format msgid "Invalid log level. Using default %s." msgstr "" #: src/hed/libs/common/Logger.cpp:123 #, c-format msgid "Invalid old log level. Using default %s." msgstr "" #: src/hed/libs/common/OptionParser.cpp:106 #, c-format msgid "Cannot parse integer value '%s' for -%c" msgstr "" #: src/hed/libs/common/OptionParser.cpp:309 #: src/hed/libs/common/OptionParser.cpp:446 #, c-format msgid "Options Group %s:" msgstr "" #: src/hed/libs/common/OptionParser.cpp:311 #: src/hed/libs/common/OptionParser.cpp:449 #, c-format msgid "%s:" msgstr "" #: src/hed/libs/common/OptionParser.cpp:313 #, c-format msgid "Show %s help options" msgstr "" #: src/hed/libs/common/OptionParser.cpp:342 #, fuzzy msgid "Failed to parse command line options" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/common/OptionParser.cpp:352 msgid "Use -? to get usage description" msgstr "" #: src/hed/libs/common/OptionParser.cpp:429 msgid "Usage:" msgstr "" #: src/hed/libs/common/OptionParser.cpp:432 msgid "OPTION..." msgstr "" #: src/hed/libs/common/OptionParser.cpp:438 msgid "Help Options:" msgstr "" #: src/hed/libs/common/OptionParser.cpp:439 msgid "Show help options" msgstr "" #: src/hed/libs/common/Profile.cpp:199 src/hed/libs/common/Profile.cpp:273 #: src/hed/libs/common/Profile.cpp:404 #, c-format msgid "" "Element \"%s\" in the profile ignored: the value of the \"inisections\" " "attribute cannot be the empty string." msgstr "" #: src/hed/libs/common/Profile.cpp:205 src/hed/libs/common/Profile.cpp:279 #: src/hed/libs/common/Profile.cpp:411 #, c-format msgid "" "Element \"%s\" in the profile ignored: the value of the \"initag\" attribute " "cannot be the empty string." msgstr "" #: src/hed/libs/common/Profile.cpp:419 #, c-format msgid "" "Element \"%s\" in the profile ignored: the value of the \"initype\" " "attribute cannot be the empty string." msgstr "" #: src/hed/libs/common/Profile.cpp:422 #, c-format msgid "" "Element \"%s\" in the profile ignored: the \"inidefaultvalue\" attribute " "cannot be specified when the \"inisections\" and \"initag\" attributes have " "not been specified." msgstr "" #: src/hed/libs/common/Profile.cpp:497 #, c-format msgid "" "In the configuration profile the 'initype' attribute on the \"%s\" element " "has a invalid value \"%s\"." msgstr "" #: src/hed/libs/common/Run_unix.cpp:226 msgid "Child monitoring signal detected" msgstr "" #: src/hed/libs/common/Run_unix.cpp:231 #, c-format msgid "Child monitoring error: %i" msgstr "" #: src/hed/libs/common/Run_unix.cpp:244 msgid "Child monitoring kick detected" msgstr "" #: src/hed/libs/common/Run_unix.cpp:247 msgid "Child monitoring internal communication error" msgstr "" #: src/hed/libs/common/Run_unix.cpp:259 msgid "Child monitoring stdout is closed" msgstr "" #: src/hed/libs/common/Run_unix.cpp:269 msgid "Child monitoring stderr is closed" msgstr "" #: src/hed/libs/common/Run_unix.cpp:279 msgid "Child monitoring stdin is closed" msgstr "" #: src/hed/libs/common/Run_unix.cpp:297 #, c-format msgid "Child monitoring child %d exited" msgstr "" #: src/hed/libs/common/Run_unix.cpp:301 #, c-format msgid "Child monitoring lost child %d (%d)" msgstr "" #: src/hed/libs/common/Run_unix.cpp:322 #, c-format msgid "Child monitoring drops abandoned child %d (%d)" msgstr "" #: src/hed/libs/common/Run_unix.cpp:483 msgid "Child was already started" msgstr "" #: src/hed/libs/common/Run_unix.cpp:487 msgid "No arguments are assigned for external process" msgstr "" #: src/hed/libs/common/Run_unix.cpp:620 src/hed/libs/common/Run_unix.cpp:625 #, c-format msgid "Excepton while trying to start external process: %s" msgstr "" #: src/hed/libs/common/StringConv.h:31 #, c-format msgid "Conversion failed: %s" msgstr "" #: src/hed/libs/common/StringConv.h:35 #, c-format msgid "Full string not used: %s" msgstr "" #: src/hed/libs/common/Thread.cpp:261 msgid "Maximum number of threads running - putting new request into queue" msgstr "" #: src/hed/libs/common/Thread.cpp:309 #, c-format msgid "Thread exited with Glib error: %s" msgstr "" #: src/hed/libs/common/Thread.cpp:311 #, c-format msgid "Thread exited with Glib exception: %s" msgstr "" #: src/hed/libs/common/Thread.cpp:313 #, c-format msgid "Thread exited with generic exception: %s" msgstr "" #: src/hed/libs/common/URL.cpp:137 #, c-format msgid "URL is not valid: %s" msgstr "" #: src/hed/libs/common/URL.cpp:188 #, c-format msgid "Illegal URL - path must be absolute: %s" msgstr "" #: src/hed/libs/common/URL.cpp:193 #, c-format msgid "Illegal URL - no hostname given: %s" msgstr "" #: src/hed/libs/common/URL.cpp:282 #, c-format msgid "Illegal URL - path must be absolute or empty: %s" msgstr "" #: src/hed/libs/common/URL.cpp:298 #, c-format msgid "Illegal URL - no closing ] for IPv6 address found: %s" msgstr "" #: src/hed/libs/common/URL.cpp:306 #, c-format msgid "" "Illegal URL - closing ] for IPv6 address is followed by illegal token: %s" msgstr "" #: src/hed/libs/common/URL.cpp:322 #, c-format msgid "Invalid port number in %s" msgstr "" #: src/hed/libs/common/URL.cpp:453 #, c-format msgid "Unknown LDAP scope %s - using base" msgstr "" #: src/hed/libs/common/URL.cpp:616 msgid "Attempt to assign relative path to URL - making it absolute" msgstr "" #: src/hed/libs/common/URL.cpp:715 #, c-format msgid "URL option %s does not have format name=value" msgstr "" #: src/hed/libs/common/URL.cpp:1180 #, c-format msgid "urllist %s contains invalid URL: %s" msgstr "" #: src/hed/libs/common/URL.cpp:1185 #, c-format msgid "URL protocol is not urllist: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:32 src/hed/libs/common/UserConfig.cpp:781 #: src/hed/libs/common/UserConfig.cpp:790 #: src/hed/libs/common/UserConfig.cpp:796 #: src/hed/libs/common/UserConfig.cpp:822 #: src/hed/libs/common/UserConfig.cpp:832 #: src/hed/libs/common/UserConfig.cpp:844 #: src/hed/libs/common/UserConfig.cpp:864 #, c-format msgid "Multiple %s attributes in configuration file (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:132 #, fuzzy, c-format msgid "Wrong ownership of certificate file: %s" msgstr "publikus kulcs elérési útvonala" #: src/hed/libs/common/UserConfig.cpp:134 #, fuzzy, c-format msgid "Wrong permissions of certificate file: %s" msgstr "publikus kulcs elérési útvonala" #: src/hed/libs/common/UserConfig.cpp:136 #, c-format msgid "Can not access certificate file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:143 #, c-format msgid "Wrong ownership of key file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:145 #, c-format msgid "Wrong permissions of key file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:147 #, c-format msgid "Can not access key file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:154 #, c-format msgid "Wrong ownership of proxy file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:156 #, c-format msgid "Wrong permissions of proxy file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:158 #, c-format msgid "Can not access proxy file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:169 msgid "computing" msgstr "" #: src/hed/libs/common/UserConfig.cpp:171 msgid "index" msgstr "" #: src/hed/libs/common/UserConfig.cpp:270 #: src/hed/libs/common/UserConfig.cpp:274 #: src/hed/libs/common/UserConfig.cpp:321 #: src/hed/libs/common/UserConfig.cpp:325 #, c-format msgid "System configuration file (%s) contains errors." msgstr "" #: src/hed/libs/common/UserConfig.cpp:278 #: src/hed/libs/common/UserConfig.cpp:329 #, c-format msgid "System configuration file (%s or %s) does not exist." msgstr "" #: src/hed/libs/common/UserConfig.cpp:280 #: src/hed/libs/common/UserConfig.cpp:331 #, c-format msgid "System configuration file (%s) does not exist." msgstr "" #: src/hed/libs/common/UserConfig.cpp:286 #: src/hed/libs/common/UserConfig.cpp:298 #: src/hed/libs/common/UserConfig.cpp:337 #: src/hed/libs/common/UserConfig.cpp:349 #, c-format msgid "User configuration file (%s) contains errors." msgstr "" #: src/hed/libs/common/UserConfig.cpp:291 #: src/hed/libs/common/UserConfig.cpp:342 msgid "No configuration file could be loaded." msgstr "" #: src/hed/libs/common/UserConfig.cpp:294 #: src/hed/libs/common/UserConfig.cpp:345 #, c-format msgid "User configuration file (%s) does not exist or cannot be loaded." msgstr "" #: src/hed/libs/common/UserConfig.cpp:406 #, c-format msgid "" "Unable to parse the specified verbosity (%s) to one of the allowed levels" msgstr "" #: src/hed/libs/common/UserConfig.cpp:418 #, c-format msgid "" "Unsupported job list type '%s', using 'BDB'. Supported types are: BDB, " "SQLITE, XML." msgstr "" #: src/hed/libs/common/UserConfig.cpp:463 msgid "Loading OToken failed - ignoring its presence" msgstr "" #: src/hed/libs/common/UserConfig.cpp:604 #, c-format msgid "Certificate and key ('%s' and '%s') not found in any of the paths: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:606 #, c-format msgid "" "If the proxy or certificate/key does exist, you can manually specify the " "locations via environment variables '%s'/'%s' or '%s', or the '%s'/'%s' or " "'%s' attributes in the client configuration file (e.g. '%s')" msgstr "" #: src/hed/libs/common/UserConfig.cpp:623 #: src/hed/libs/common/UserConfig.cpp:633 #, c-format msgid "" "Can not access CA certificate directory: %s. The certificates will not be " "verified." msgstr "" #: src/hed/libs/common/UserConfig.cpp:659 #, c-format msgid "" "Can not find CA certificates directory in default locations:\n" "~/.arc/certificates, ~/.globus/certificates,\n" "%s/etc/certificates, %s/etc/grid-security/certificates,\n" "%s/share/certificates, /etc/grid-security/certificates.\n" "The certificate will not be verified.\n" "If the CA certificates directory does exist, please manually specify the " "locations via env\n" "X509_CERT_DIR, or the cacertificatesdirectory item in client.conf\n" msgstr "" #: src/hed/libs/common/UserConfig.cpp:680 #, c-format msgid "Using proxy file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:683 #, c-format msgid "Using certificate file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:684 #, c-format msgid "Using key file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:688 #, c-format msgid "Using CA certificate directory: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:692 msgid "Using OToken" msgstr "" #: src/hed/libs/common/UserConfig.cpp:705 #: src/hed/libs/common/UserConfig.cpp:711 #, c-format msgid "Can not access VOMSES file/directory: %s." msgstr "" #: src/hed/libs/common/UserConfig.cpp:717 #, c-format msgid "Can not access VOMS file/directory: %s." msgstr "" #: src/hed/libs/common/UserConfig.cpp:731 msgid "" "Can not find voms service configuration file (vomses) in default locations: " "~/.arc/vomses, ~/.voms/vomses, $ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/" "grid-security/vomses, $PWD/vomses, /etc/vomses, /etc/grid-security/vomses" msgstr "" #: src/hed/libs/common/UserConfig.cpp:744 #, c-format msgid "Loading configuration (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:778 #, c-format msgid "" "The value of the timeout attribute in the configuration file (%s) was only " "partially parsed" msgstr "" #: src/hed/libs/common/UserConfig.cpp:803 msgid "" "The brokerarguments attribute can only be used in conjunction with the " "brokername attribute" msgstr "" #: src/hed/libs/common/UserConfig.cpp:819 #, c-format msgid "" "The value of the keysize attribute in the configuration file (%s) was only " "partially parsed" msgstr "" #: src/hed/libs/common/UserConfig.cpp:839 #, c-format msgid "" "Could not convert the slcs attribute value (%s) to an URL instance in " "configuration file (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:885 #, c-format msgid "Specified overlay file (%s) does not exist." msgstr "" #: src/hed/libs/common/UserConfig.cpp:889 #, c-format msgid "" "Unknown attribute %s in common section of configuration file (%s), ignoring " "it" msgstr "" #: src/hed/libs/common/UserConfig.cpp:930 #, c-format msgid "Unknown section %s, ignoring it" msgstr "" #: src/hed/libs/common/UserConfig.cpp:934 #, c-format msgid "Configuration (%s) loaded" msgstr "" #: src/hed/libs/common/UserConfig.cpp:937 #, c-format msgid "Could not load configuration (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:1032 #, c-format msgid "UserConfiguration saved to file (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:1045 #, c-format msgid "Unable to create %s directory." msgstr "" #: src/hed/libs/common/UserConfig.cpp:1054 #, c-format msgid "Configuration example file created (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:1056 #, c-format msgid "Unable to copy example configuration from existing configuration (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:1061 #, c-format msgid "Cannot copy example configuration (%s), it is not a regular file" msgstr "" #: src/hed/libs/common/UserConfig.cpp:1066 #, c-format msgid "Example configuration (%s) not created." msgstr "" #: src/hed/libs/common/UserConfig.cpp:1071 #, c-format msgid "The default configuration file (%s) is not a regular file." msgstr "" #: src/hed/libs/common/UserConfig.cpp:1089 #, c-format msgid "%s directory created" msgstr "" #: src/hed/libs/common/UserConfig.cpp:1091 #: src/hed/libs/common/UserConfig.cpp:1130 src/hed/libs/data/DataMover.cpp:679 #, c-format msgid "Failed to create directory %s" msgstr "" #: src/hed/libs/common/test/LoggerTest.cpp:58 msgid "This VERBOSE message should not be seen" msgstr "" #: src/hed/libs/common/test/LoggerTest.cpp:62 msgid "This INFO message should be seen" msgstr "" #: src/hed/libs/common/test/LoggerTest.cpp:73 msgid "This VERBOSE message should now be seen" msgstr "" #: src/hed/libs/common/test/LoggerTest.cpp:79 msgid "This INFO message should also be seen" msgstr "" #: src/hed/libs/common/test/LoggerTest.cpp:93 msgid "This message goes to initial destination" msgstr "" #: src/hed/libs/common/test/LoggerTest.cpp:108 msgid "This message goes to per-thread destination" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:80 msgid "Request failed: No response from SPService" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:84 #: src/hed/libs/communication/ClientSAML2SSO.cpp:137 msgid "Request failed: response from SPService is not as expected" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:92 #, c-format msgid "Authentication Request URL: %s" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:133 msgid "Request failed: No response from IdP" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:184 msgid "Request failed: No response from IdP when doing redirecting" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:188 msgid "" "Request failed: response from IdP is not as expected when doing redirecting" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:245 msgid "Request failed: No response from IdP when doing authentication" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:249 msgid "" "Request failed: response from IdP is not as expected when doing " "authentication" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:294 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:300 msgid "Succeeded to verify the signature under " msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:296 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:303 msgid "Failed to verify the signature under " msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:310 msgid "" "Request failed: No response from SP Service when sending SAML assertion to SP" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:314 msgid "" "Request failed: response from SP Service is not as expected when sending " "SAML assertion to SP" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:325 #, c-format msgid "IdP return some error message: %s" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:353 #: src/hed/libs/communication/ClientSAML2SSO.cpp:398 msgid "SAML2SSO process failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:54 msgid "Creating delegation credential to ARC delegation service" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:64 #: src/hed/libs/communication/ClientX509Delegation.cpp:267 msgid "DelegateCredentialsInit failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:68 #: src/hed/libs/communication/ClientX509Delegation.cpp:122 #: src/hed/libs/communication/ClientX509Delegation.cpp:157 #: src/hed/libs/communication/ClientX509Delegation.cpp:212 #: src/hed/libs/communication/ClientX509Delegation.cpp:271 msgid "There is no SOAP response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:73 msgid "There is no X509 request in the response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:78 msgid "There is no Format request in the response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:86 msgid "There is no Id or X509 request value in the response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:99 #: src/hed/libs/communication/ClientX509Delegation.cpp:187 msgid "DelegateProxy failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:118 msgid "UpdateCredentials failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:126 msgid "There is no UpdateCredentialsResponse in response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:134 #: src/hed/libs/communication/ClientX509Delegation.cpp:162 #: src/hed/libs/communication/ClientX509Delegation.cpp:217 #: src/hed/libs/communication/ClientX509Delegation.cpp:302 msgid "There is no SOAP connection chain configured" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:140 msgid "Creating delegation to CREAM delegation service" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:153 msgid "Delegation getProxyReq request failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:173 msgid "Creating delegation to CREAM delegation service failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:208 msgid "Delegation putProxy request failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:222 msgid "Creating delegation to CREAM delegation failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:237 msgid "Getting delegation credential from ARC delegation service" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:276 msgid "There is no Delegated X509 token in the response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:281 msgid "There is no Format delegated token in the response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:289 msgid "There is no Id or X509 token value in the response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:298 #, c-format msgid "" "Get delegated credential from delegation service: \n" " %s" msgstr "" #: src/hed/libs/compute/Broker.cpp:62 #, c-format msgid "Performing matchmaking against target (%s)." msgstr "" #: src/hed/libs/compute/Broker.cpp:72 #, c-format msgid "Matchmaking, ExecutionTarget: %s matches job description" msgstr "" #: src/hed/libs/compute/Broker.cpp:145 #, c-format msgid "" "The CA issuer (%s) of the credentials (%s) is not trusted by the target (%s)." msgstr "" #: src/hed/libs/compute/Broker.cpp:153 src/hed/libs/compute/Broker.cpp:171 #, c-format msgid "ComputingShareName of ExecutionTarget (%s) is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:157 src/hed/libs/compute/Broker.cpp:162 #, c-format msgid "ComputingShare (%s) explicitly rejected" msgstr "" #: src/hed/libs/compute/Broker.cpp:175 src/hed/libs/compute/Broker.cpp:180 #, c-format msgid "ComputingShare (%s) does not match selected queue (%s)" msgstr "" #: src/hed/libs/compute/Broker.cpp:189 #, c-format msgid "" "ProcessingStartTime (%s) specified in job description is inside the targets " "downtime period [ %s - %s ]." msgstr "" #: src/hed/libs/compute/Broker.cpp:194 #, c-format msgid "The downtime of the target (%s) is not published. Keeping target." msgstr "" #: src/hed/libs/compute/Broker.cpp:200 #, c-format msgid "HealthState of ExecutionTarget (%s) is not OK (%s)" msgstr "" #: src/hed/libs/compute/Broker.cpp:205 #, c-format msgid "Matchmaking, ExecutionTarget: %s, HealthState is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:212 #, c-format msgid "" "Matchmaking, Computing endpoint requirement not satisfied. ExecutionTarget: " "%s" msgstr "" #: src/hed/libs/compute/Broker.cpp:217 #, c-format msgid "Matchmaking, ExecutionTarget: %s, ImplementationName is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:243 #, c-format msgid "" "Matchmaking, %s (%d) is %s than %s (%d) published by the ExecutionTarget." msgstr "" #: src/hed/libs/compute/Broker.cpp:272 #, c-format msgid "" "Matchmaking, The %s scaled %s (%d) is %s than the %s (%d) published by the " "ExecutionTarget." msgstr "" #: src/hed/libs/compute/Broker.cpp:284 #, c-format msgid "Matchmaking, Benchmark %s is not published by the ExecutionTarget." msgstr "" #: src/hed/libs/compute/Broker.cpp:299 #, c-format msgid "" "Matchmaking, MaxTotalCPUTime problem, ExecutionTarget: %d (MaxTotalCPUTime), " "JobDescription: %d (TotalCPUTime)" msgstr "" #: src/hed/libs/compute/Broker.cpp:306 #, c-format msgid "" "Matchmaking, MaxCPUTime problem, ExecutionTarget: %d (MaxCPUTime), " "JobDescription: %d (TotalCPUTime/NumberOfSlots)" msgstr "" #: src/hed/libs/compute/Broker.cpp:311 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MaxTotalCPUTime or MaxCPUTime not " "defined, assuming no CPU time limit" msgstr "" #: src/hed/libs/compute/Broker.cpp:317 #, c-format msgid "" "Matchmaking, MinCPUTime problem, ExecutionTarget: %d (MinCPUTime), " "JobDescription: %d (TotalCPUTime/NumberOfSlots)" msgstr "" #: src/hed/libs/compute/Broker.cpp:322 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MinCPUTime not defined, assuming no CPU " "time limit" msgstr "" #: src/hed/libs/compute/Broker.cpp:330 #, c-format msgid "" "Matchmaking, MainMemorySize problem, ExecutionTarget: %d (MainMemorySize), " "JobDescription: %d (IndividualPhysicalMemory)" msgstr "" #: src/hed/libs/compute/Broker.cpp:336 #, c-format msgid "" "Matchmaking, MaxMainMemory problem, ExecutionTarget: %d (MaxMainMemory), " "JobDescription: %d (IndividualPhysicalMemory)" msgstr "" #: src/hed/libs/compute/Broker.cpp:341 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MaxMainMemory and MainMemorySize are not " "defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:349 #, c-format msgid "" "Matchmaking, MaxVirtualMemory problem, ExecutionTarget: %d " "(MaxVirtualMemory), JobDescription: %d (IndividualVirtualMemory)" msgstr "" #: src/hed/libs/compute/Broker.cpp:354 #, c-format msgid "Matchmaking, ExecutionTarget: %s, MaxVirtualMemory is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:362 #, c-format msgid "" "Matchmaking, Platform problem, ExecutionTarget: %s (Platform) " "JobDescription: %s (Platform)" msgstr "" #: src/hed/libs/compute/Broker.cpp:367 #, c-format msgid "Matchmaking, ExecutionTarget: %s, Platform is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:375 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, OperatingSystem requirements not satisfied" msgstr "" #: src/hed/libs/compute/Broker.cpp:380 #, c-format msgid "Matchmaking, ExecutionTarget: %s, OperatingSystem is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:388 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, RunTimeEnvironment requirements not " "satisfied" msgstr "" #: src/hed/libs/compute/Broker.cpp:393 #, c-format msgid "Matchmaking, ExecutionTarget: %s, ApplicationEnvironments not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:402 #, c-format msgid "" "Matchmaking, NetworkInfo demand not fulfilled, ExecutionTarget do not " "support %s, specified in the JobDescription." msgstr "" #: src/hed/libs/compute/Broker.cpp:406 #, c-format msgid "Matchmaking, ExecutionTarget: %s, NetworkInfo is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:414 #, c-format msgid "" "Matchmaking, MaxDiskSpace problem, ExecutionTarget: %d MB (MaxDiskSpace); " "JobDescription: %d MB (SessionDiskSpace)" msgstr "" #: src/hed/libs/compute/Broker.cpp:421 #, c-format msgid "" "Matchmaking, WorkingAreaFree problem, ExecutionTarget: %d MB " "(WorkingAreaFree); JobDescription: %d MB (SessionDiskSpace)" msgstr "" #: src/hed/libs/compute/Broker.cpp:427 src/hed/libs/compute/Broker.cpp:448 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MaxDiskSpace and WorkingAreaFree are not " "defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:435 #, c-format msgid "" "Matchmaking, MaxDiskSpace problem, ExecutionTarget: %d MB (MaxDiskSpace); " "JobDescription: %d MB (DiskSpace)" msgstr "" #: src/hed/libs/compute/Broker.cpp:442 #, c-format msgid "" "Matchmaking, WorkingAreaFree problem, ExecutionTarget: %d MB " "(WorkingAreaFree); JobDescription: %d MB (DiskSpace)" msgstr "" #: src/hed/libs/compute/Broker.cpp:456 #, c-format msgid "" "Matchmaking, CacheTotal problem, ExecutionTarget: %d MB (CacheTotal); " "JobDescription: %d MB (CacheDiskSpace)" msgstr "" #: src/hed/libs/compute/Broker.cpp:461 #, c-format msgid "Matchmaking, ExecutionTarget: %s, CacheTotal is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:469 #, c-format msgid "" "Matchmaking, TotalSlots problem, ExecutionTarget: %d (TotalSlots) " "JobDescription: %d (NumberOfProcesses)" msgstr "" #: src/hed/libs/compute/Broker.cpp:475 #, c-format msgid "" "Matchmaking, MaxSlotsPerJob problem, ExecutionTarget: %d (MaxSlotsPerJob) " "JobDescription: %d (NumberOfProcesses)" msgstr "" #: src/hed/libs/compute/Broker.cpp:481 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, TotalSlots and MaxSlotsPerJob are not " "defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:489 #, c-format msgid "" "Matchmaking, WorkingAreaLifeTime problem, ExecutionTarget: %s " "(WorkingAreaLifeTime) JobDescription: %s (SessionLifeTime)" msgstr "" #: src/hed/libs/compute/Broker.cpp:494 #, c-format msgid "Matchmaking, ExecutionTarget: %s, WorkingAreaLifeTime is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:502 #, c-format msgid "" "Matchmaking, ConnectivityIn problem, ExecutionTarget: %s (ConnectivityIn) " "JobDescription: %s (InBound)" msgstr "" #: src/hed/libs/compute/Broker.cpp:509 #, c-format msgid "" "Matchmaking, ConnectivityOut problem, ExecutionTarget: %s (ConnectivityOut) " "JobDescription: %s (OutBound)" msgstr "" #: src/hed/libs/compute/Broker.cpp:532 msgid "Unable to sort added jobs. The BrokerPlugin plugin has not been loaded." msgstr "" #: src/hed/libs/compute/Broker.cpp:549 msgid "Unable to match target, marking it as not matching. Broker not valid." msgstr "" #: src/hed/libs/compute/Broker.cpp:585 msgid "Unable to sort ExecutionTarget objects - Invalid Broker object." msgstr "" #: src/hed/libs/compute/Broker.cpp:609 msgid "" "Unable to register job submission. Can't get JobDescription object from " "Broker, Broker is invalid." msgstr "" #: src/hed/libs/compute/BrokerPlugin.cpp:89 #, c-format msgid "Broker plugin \"%s\" not found." msgstr "" #: src/hed/libs/compute/BrokerPlugin.cpp:96 #, fuzzy, c-format msgid "Unable to load BrokerPlugin (%s)" msgstr "Nem sikerült betölteni a %s bróker modult" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:27 #, c-format msgid "Uniq is replacing service coming from %s with service coming from %s" msgstr "" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:31 #, c-format msgid "Uniq is ignoring service coming from %s" msgstr "" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:38 #, c-format msgid "Uniq is adding service coming from %s" msgstr "" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:61 #, c-format msgid "Adding endpoint (%s) to TargetInformationRetriever" msgstr "" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:64 #, c-format msgid "Adding endpoint (%s) to ServiceEndpointRetriever" msgstr "" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:67 #, c-format msgid "" "Adding endpoint (%s) to both ServiceEndpointRetriever and " "TargetInformationRetriever" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:42 #, c-format msgid "The plugin %s does not support any interfaces, skipping it." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:47 #, c-format msgid "" "The first supported interface of the plugin %s is an empty string, skipping " "the plugin." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:95 #, c-format msgid "Interface on endpoint (%s) %s." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:101 #: src/hed/libs/compute/EntityRetriever.cpp:133 #: src/hed/libs/compute/EntityRetriever.cpp:425 #, c-format msgid "Ignoring endpoint (%s), it is already registered in retriever." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:110 #, c-format msgid "Service Loop: Endpoint %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:112 #, c-format msgid " This endpoint (%s) is STARTED or SUCCESSFUL" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:115 #, c-format msgid "" "Suspending querying of endpoint (%s) since the service at the endpoint is " "already being queried, or has been queried." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:122 #: src/hed/libs/compute/EntityRetriever.cpp:237 #, c-format msgid " Status of endpoint (%s) is %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:126 #, c-format msgid "Setting status (STARTED) for endpoint: %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:145 #, c-format msgid "Starting thread to query the endpoint on %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:147 #: src/hed/libs/compute/EntityRetriever.cpp:289 #, c-format msgid "Failed to start querying the endpoint on %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:174 #, c-format msgid "Found a registry, will query it recursively: %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:211 #, c-format msgid "Setting status (%s) for endpoint: %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:231 msgid "Checking for suspended endpoints which should be started." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:241 #, c-format msgid "Found started or successful endpoint (%s)" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:253 #, c-format msgid "Found suspended endpoint (%s)" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:264 #, c-format msgid "Trying to start suspended endpoint (%s)" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:284 #, c-format msgid "" "Starting querying of suspended endpoint (%s) - no other endpoints for this " "service is being queried or has been queried successfully." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:351 #, c-format msgid "Calling plugin %s to query endpoint on %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:373 #, c-format msgid "" "The interface of this endpoint (%s) is unspecified, will try all possible " "plugins" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:389 #, c-format msgid "Problem loading plugin %s, skipping it." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:393 #, c-format msgid "The endpoint (%s) is not supported by this plugin (%s)" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:414 #, c-format msgid "" "New endpoint is created (%s) from the one with the unspecified interface (%s)" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:432 #, c-format msgid "Starting sub-thread to query the endpoint on %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:434 #, c-format msgid "" "Failed to start querying the endpoint on %s (unable to create sub-thread)" msgstr "" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:32 #, c-format msgid "Found %s %s (it was loaded already)" msgstr "" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:41 #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:49 #: src/hed/libs/compute/JobControllerPlugin.cpp:100 #: src/hed/libs/compute/JobControllerPlugin.cpp:109 #: src/hed/libs/compute/SubmitterPlugin.cpp:171 #: src/hed/libs/compute/SubmitterPlugin.cpp:181 #, c-format msgid "" "Unable to locate the \"%s\" plugin. Please refer to installation " "instructions and check if package providing support for \"%s\" plugin is " "installed" msgstr "" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:42 #, c-format msgid "%s plugin \"%s\" not found." msgstr "" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:50 #, fuzzy, c-format msgid "%s %s could not be created." msgstr "A feladatot nem sikerült megölni vagy letörölni" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:55 #, fuzzy, c-format msgid "Loaded %s %s" msgstr "Feltöltve %s" #: src/hed/libs/compute/ExecutionTarget.cpp:51 #, c-format msgid "" "Skipping ComputingEndpoint '%s', because it has '%s' interface instead of " "the requested '%s'." msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:132 #, c-format msgid "" "Computing endpoint %s (type %s) added to the list for submission brokering" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:239 #, fuzzy, c-format msgid "Address: %s" msgstr "Válasz: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:240 #, fuzzy, c-format msgid "Place: %s" msgstr "Név: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:241 #, fuzzy, c-format msgid "Country: %s" msgstr "Forrás: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:242 #, c-format msgid "Postal code: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:243 #, c-format msgid "Latitude: %f" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:244 #, c-format msgid "Longitude: %f" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:250 #, c-format msgid "Owner: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:257 #, c-format msgid "ID: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:258 #, fuzzy, c-format msgid "Type: %s" msgstr "Proxy típusa: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:263 #, fuzzy, c-format msgid "URL: %s" msgstr "Érvénytelen URL: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:264 #, fuzzy, c-format msgid "Interface: %s" msgstr "Forrás: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:266 #, fuzzy msgid "Interface versions:" msgstr "Felhasználó oldali hiba" #: src/hed/libs/compute/ExecutionTarget.cpp:271 msgid "Interface extensions:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:276 msgid "Capabilities:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:280 #, c-format msgid "Technology: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:282 msgid "Supported Profiles:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:286 #, fuzzy, c-format msgid "Implementor: %s" msgstr "modul neve: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:287 #, fuzzy, c-format msgid "Implementation name: %s" msgstr "modul neve: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:288 #, c-format msgid "Quality level: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:289 #, c-format msgid "Health state: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:290 #, fuzzy, c-format msgid "Health state info: %s" msgstr "Célállomás: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:291 #, c-format msgid "Serving state: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:292 #, c-format msgid "Issuer CA: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:294 msgid "Trusted CAs:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:298 #, c-format msgid "Downtime starts: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:299 #, c-format msgid "Downtime ends: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:300 #, fuzzy, c-format msgid "Staging: %s" msgstr "Célállomás: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:302 #, fuzzy msgid "Job descriptions:" msgstr "Feladat leírás: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:314 #, fuzzy, c-format msgid "Scheme: %s" msgstr "Forrás: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:317 #, fuzzy, c-format msgid "Rule: %s" msgstr "Kérés: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:329 #, fuzzy, c-format msgid "Mapping queue: %s" msgstr "Kérés: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:330 #, c-format msgid "Max wall-time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:331 #, c-format msgid "Max total wall-time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:332 #, c-format msgid "Min wall-time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:333 #, c-format msgid "Default wall-time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:334 #, c-format msgid "Max CPU time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:335 #, c-format msgid "Min CPU time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:336 #, c-format msgid "Default CPU time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:337 #, c-format msgid "Max total jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:338 #, c-format msgid "Max running jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:339 #, c-format msgid "Max waiting jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:340 #, c-format msgid "Max pre-LRMS waiting jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:341 #, c-format msgid "Max user running jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:342 #, c-format msgid "Max slots per job: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:343 #, c-format msgid "Max stage in streams: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:344 #, c-format msgid "Max stage out streams: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:345 #, c-format msgid "Scheduling policy: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:346 #, c-format msgid "Max memory: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:347 #, c-format msgid "Max virtual memory: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:348 #, c-format msgid "Max disk space: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:349 #, c-format msgid "Default Storage Service: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:350 msgid "Supports preemption" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:351 msgid "Doesn't support preemption" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:352 #, c-format msgid "Total jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:353 #, c-format msgid "Running jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:354 #, c-format msgid "Local running jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:355 #, c-format msgid "Waiting jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:356 #, c-format msgid "Local waiting jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:357 #, c-format msgid "Suspended jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:358 #, c-format msgid "Local suspended jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:359 #, c-format msgid "Staging jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:360 #, c-format msgid "Pre-LRMS waiting jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:361 #, c-format msgid "Estimated average waiting time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:362 #, c-format msgid "Estimated worst waiting time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:363 #, c-format msgid "Free slots: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:365 msgid "Free slots grouped according to time limits (limit: free slots):" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:368 #, c-format msgid " %s: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:369 #, c-format msgid " unspecified: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:372 #, c-format msgid "Used slots: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:373 #, fuzzy, c-format msgid "Requested slots: %i" msgstr "Kérés: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:374 #, c-format msgid "Reservation policy: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:381 #, fuzzy, c-format msgid "Resource manager: %s" msgstr "modul neve: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:383 #, c-format msgid " (%s)" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:387 #, c-format msgid "Total physical CPUs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:388 #, c-format msgid "Total logical CPUs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:389 #, c-format msgid "Total slots: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:390 msgid "Supports advance reservations" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:391 msgid "Doesn't support advance reservations" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:392 msgid "Supports bulk submission" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:393 msgid "Doesn't support bulk Submission" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:394 msgid "Homogeneous resource" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:395 msgid "Non-homogeneous resource" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:397 #, fuzzy msgid "Network information:" msgstr "verzió információ kiírása" #: src/hed/libs/compute/ExecutionTarget.cpp:402 msgid "Working area is shared among jobs" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:403 msgid "Working area is not shared among jobs" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:404 #, c-format msgid "Working area total size: %i GB" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:405 #, c-format msgid "Working area free size: %i GB" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:406 #, c-format msgid "Working area life time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:407 #, c-format msgid "Cache area total size: %i GB" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:408 #, c-format msgid "Cache area free size: %i GB" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:414 #, fuzzy, c-format msgid "Platform: %s" msgstr "Név: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:415 msgid "Execution environment supports inbound connections" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:416 msgid "Execution environment does not support inbound connections" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:417 msgid "Execution environment supports outbound connections" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:418 msgid "Execution environment does not support outbound connections" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:419 msgid "Execution environment is a virtual machine" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:420 msgid "Execution environment is a physical machine" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:421 #, c-format msgid "CPU vendor: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:422 #, c-format msgid "CPU model: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:423 #, fuzzy, c-format msgid "CPU version: %s" msgstr "%s verzió %s" #: src/hed/libs/compute/ExecutionTarget.cpp:424 #, c-format msgid "CPU clock speed: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:425 #, c-format msgid "Main memory size: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:426 #, c-format msgid "OS family: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:427 #, fuzzy, c-format msgid "OS name: %s" msgstr "Név: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:428 #, fuzzy, c-format msgid "OS version: %s" msgstr "%s verzió %s" #: src/hed/libs/compute/ExecutionTarget.cpp:435 msgid "Computing service:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:459 #, c-format msgid "%d Endpoints" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:464 #, fuzzy msgid "Endpoint Information:" msgstr "verzió információ kiírása" #: src/hed/libs/compute/ExecutionTarget.cpp:476 #, c-format msgid "%d Batch Systems" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:481 msgid "Batch System Information:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:487 msgid "Installed application environments:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:500 #, c-format msgid "%d Shares" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:505 #, fuzzy msgid "Share Information:" msgstr "verzió információ kiírása" #: src/hed/libs/compute/ExecutionTarget.cpp:511 #, c-format msgid "%d mapping policies" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:515 msgid "Mapping policy:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:531 #, c-format msgid "Execution Target on Computing Service: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:533 #, c-format msgid " Computing endpoint URL: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:535 #, c-format msgid " Computing endpoint interface name: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:537 #: src/hed/libs/compute/Job.cpp:575 #, c-format msgid " Queue: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:540 #, c-format msgid " Mapping queue: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:543 #, c-format msgid " Health state: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:548 msgid "Service information:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:553 msgid " Installed application environments:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:560 #, fuzzy msgid "Batch system information:" msgstr "verzió információ kiírása" #: src/hed/libs/compute/ExecutionTarget.cpp:563 msgid "Queue information:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:570 #, fuzzy msgid " Benchmark information:" msgstr "verzió információ kiírása" #: src/hed/libs/compute/GLUE2.cpp:53 msgid "The Service doesn't advertise its Type." msgstr "" #: src/hed/libs/compute/GLUE2.cpp:58 msgid "The ComputingService doesn't advertise its Quality Level." msgstr "" #: src/hed/libs/compute/GLUE2.cpp:99 msgid "The ComputingEndpoint has no URL." msgstr "" #: src/hed/libs/compute/GLUE2.cpp:104 msgid "The Service advertises no Health State." msgstr "" #: src/hed/libs/compute/GLUE2.cpp:117 msgid "The ComputingEndpoint doesn't advertise its Quality Level." msgstr "" #: src/hed/libs/compute/GLUE2.cpp:128 msgid "The ComputingService doesn't advertise its Interface." msgstr "" #: src/hed/libs/compute/GLUE2.cpp:160 msgid "The ComputingEndpoint doesn't advertise its Serving State." msgstr "" #: src/hed/libs/compute/GLUE2.cpp:247 #, c-format msgid "" "The \"FreeSlotsWithDuration\" attribute published by \"%s\" is wrongly " "formatted. Ignoring it." msgstr "" #: src/hed/libs/compute/GLUE2.cpp:420 #, c-format msgid "" "Couldn't parse benchmark XML:\n" "%s" msgstr "" #: src/hed/libs/compute/Job.cpp:324 msgid "Unable to detect format of job record." msgstr "" #: src/hed/libs/compute/Job.cpp:545 #, c-format msgid "Job: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:547 #, c-format msgid " Name: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:548 #, fuzzy, c-format msgid " State: %s" msgstr "Név: %s" #: src/hed/libs/compute/Job.cpp:551 #, c-format msgid " Specific state: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:555 src/hed/libs/compute/Job.cpp:579 #, c-format msgid " Waiting Position: %d" msgstr "" #: src/hed/libs/compute/Job.cpp:559 #, c-format msgid " Exit Code: %d" msgstr "" #: src/hed/libs/compute/Job.cpp:563 #, fuzzy, c-format msgid " Job Error: %s" msgstr "Feladat leírás: %s" #: src/hed/libs/compute/Job.cpp:568 #, c-format msgid " Owner: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:572 #, c-format msgid " Other Messages: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:577 #, fuzzy, c-format msgid " Requested Slots: %d" msgstr "Kérés: %s" #: src/hed/libs/compute/Job.cpp:582 #, c-format msgid " Stdin: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:584 #, c-format msgid " Stdout: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:586 #, c-format msgid " Stderr: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:588 #, c-format msgid " Computing Service Log Directory: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:591 #, c-format msgid " Submitted: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:594 #, c-format msgid " End Time: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:597 #, c-format msgid " Submitted from: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:600 #, c-format msgid " Submitting client: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:603 #, c-format msgid " Requested CPU Time: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:608 #, c-format msgid " Used CPU Time: %s (%s per slot)" msgstr "" #: src/hed/libs/compute/Job.cpp:612 #, c-format msgid " Used CPU Time: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:618 #, c-format msgid " Used Wall Time: %s (%s per slot)" msgstr "" #: src/hed/libs/compute/Job.cpp:622 #, c-format msgid " Used Wall Time: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:627 #, c-format msgid " Used Memory: %d" msgstr "" #: src/hed/libs/compute/Job.cpp:631 #, c-format msgid " Results were deleted: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:632 #, c-format msgid " Results must be retrieved before: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:636 #, c-format msgid " Proxy valid until: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:640 #, c-format msgid " Entry valid from: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:643 #, c-format msgid " Entry valid for: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:647 msgid " Old job IDs:" msgstr "" #: src/hed/libs/compute/Job.cpp:655 #, c-format msgid " ID on service: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:656 #, c-format msgid " Service information URL: %s (%s)" msgstr "" #: src/hed/libs/compute/Job.cpp:657 #, c-format msgid " Job status URL: %s (%s)" msgstr "" #: src/hed/libs/compute/Job.cpp:658 #, c-format msgid " Job management URL: %s (%s)" msgstr "" #: src/hed/libs/compute/Job.cpp:659 #, c-format msgid " Stagein directory URL: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:660 #, c-format msgid " Stageout directory URL: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:661 #, c-format msgid " Session directory URL: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:663 msgid " Delegation IDs:" msgstr "" #: src/hed/libs/compute/Job.cpp:845 #, c-format msgid "Unable to handle job (%s), no interface specified." msgstr "" #: src/hed/libs/compute/Job.cpp:850 #, c-format msgid "" "Unable to handle job (%s), no plugin associated with the specified interface " "(%s)" msgstr "" #: src/hed/libs/compute/Job.cpp:872 #, c-format msgid "Invalid download destination path specified (%s)" msgstr "" #: src/hed/libs/compute/Job.cpp:877 #, c-format msgid "" "Unable to download job (%s), no JobControllerPlugin plugin was set to handle " "the job." msgstr "" #: src/hed/libs/compute/Job.cpp:881 #, c-format msgid "Downloading job: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:885 #, c-format msgid "" "Cant retrieve job files for job (%s) - unable to determine URL of stage out " "directory" msgstr "" #: src/hed/libs/compute/Job.cpp:890 #, c-format msgid "Invalid stage out path specified (%s)" msgstr "" #: src/hed/libs/compute/Job.cpp:897 #, c-format msgid "%s directory exist! Skipping job." msgstr "" #: src/hed/libs/compute/Job.cpp:903 #, c-format msgid "Unable to retrieve list of job files to download for job %s" msgstr "" #: src/hed/libs/compute/Job.cpp:908 #, c-format msgid "No files to retrieve for job %s" msgstr "" #: src/hed/libs/compute/Job.cpp:914 #, c-format msgid "Failed to create directory %s! Skipping job." msgstr "" #: src/hed/libs/compute/Job.cpp:927 #, fuzzy, c-format msgid "Failed downloading %s to %s, destination already exist" msgstr "Nem sikerült feloldani a célállomást" #: src/hed/libs/compute/Job.cpp:933 #, c-format msgid "Failed downloading %s to %s, unable to remove existing destination" msgstr "" #: src/hed/libs/compute/Job.cpp:939 #, c-format msgid "Failed downloading %s to %s" msgstr "" #: src/hed/libs/compute/Job.cpp:952 #, fuzzy, c-format msgid "Unable to initialize handler for %s" msgstr "Nem sikerült betölteni a %s bróker modult" #: src/hed/libs/compute/Job.cpp:957 #, c-format msgid "Unable to list files at %s" msgstr "" #: src/hed/libs/compute/Job.cpp:999 msgid "Now copying (from -> to)" msgstr "" #: src/hed/libs/compute/Job.cpp:1000 #, c-format msgid " %s -> %s" msgstr "" #: src/hed/libs/compute/Job.cpp:1015 #, c-format msgid "Unable to initialise connection to source: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:1026 #, c-format msgid "Unable to initialise connection to destination: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:1045 #, c-format msgid "File download failed: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:1084 src/hed/libs/compute/Job.cpp:1113 #: src/hed/libs/compute/Job.cpp:1145 src/hed/libs/compute/Job.cpp:1178 #, c-format msgid "Waiting for lock on file %s" msgstr "" #: src/hed/libs/compute/JobControllerPlugin.cpp:101 #, c-format msgid "JobControllerPlugin plugin \"%s\" not found." msgstr "" #: src/hed/libs/compute/JobControllerPlugin.cpp:110 #, fuzzy, c-format msgid "JobControllerPlugin %s could not be created" msgstr "A feladat vezérlÅ‘ modult nem sikerült betölteni" #: src/hed/libs/compute/JobControllerPlugin.cpp:115 #, fuzzy, c-format msgid "Loaded JobControllerPlugin %s" msgstr "A feladat vezérlÅ‘ modult nem sikerült betölteni" #: src/hed/libs/compute/JobDescription.cpp:22 #, c-format msgid ": %d" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:24 #, c-format msgid ": %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:138 msgid " --- DRY RUN --- " msgstr "" #: src/hed/libs/compute/JobDescription.cpp:148 #, fuzzy, c-format msgid " Annotation: %s" msgstr "Célállomás: %s" #: src/hed/libs/compute/JobDescription.cpp:154 #, c-format msgid " Old activity ID: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:160 #, c-format msgid " Argument: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:171 #, c-format msgid " RemoteLogging (optional): %s (%s)" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:174 #, c-format msgid " RemoteLogging: %s (%s)" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:182 #, c-format msgid " Environment.name: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:183 #, c-format msgid " Environment: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:196 #, c-format msgid " PreExecutable.Argument: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:199 #: src/hed/libs/compute/JobDescription.cpp:217 #, c-format msgid " Exit code for successful execution: %d" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:202 #: src/hed/libs/compute/JobDescription.cpp:220 msgid " No exit code for successful execution specified." msgstr "" #: src/hed/libs/compute/JobDescription.cpp:214 #, c-format msgid " PostExecutable.Argument: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:230 #, c-format msgid " Access control: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:234 #, c-format msgid " Processing start time: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:237 msgid " Notify:" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:251 #, c-format msgid " Credential service: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:261 msgid " Operating system requirements:" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:279 msgid " Computing endpoint requirements:" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:292 msgid " Node access: inbound" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:295 msgid " Node access: outbound" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:298 msgid " Node access: inbound and outbound" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:308 msgid " Job requires exclusive execution" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:311 msgid " Job does not require exclusive execution" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:316 msgid " Run time environment requirements:" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:328 msgid " Inputfile element:" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:329 #: src/hed/libs/compute/JobDescription.cpp:351 #, c-format msgid " Name: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:331 msgid " Is executable: true" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:335 #, fuzzy, c-format msgid " Sources: %s" msgstr "Forrás: %s" #: src/hed/libs/compute/JobDescription.cpp:337 #, c-format msgid " Sources.DelegationID: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:341 #, c-format msgid " Sources.Options: %s = %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:350 msgid " Outputfile element:" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:354 #, c-format msgid " Targets: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:356 #, c-format msgid " Targets.DelegationID: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:360 #, c-format msgid " Targets.Options: %s = %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:367 #, c-format msgid " DelegationID element: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:374 #, c-format msgid " Other attributes: [%s], %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:440 msgid "Empty job description source string" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:473 #, fuzzy msgid "No job description parsers available" msgstr "Nincs megadva feladat leírás bemeneti adatként" #: src/hed/libs/compute/JobDescription.cpp:475 #, c-format msgid "" "No job description parsers suitable for handling '%s' language are available" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:483 #, c-format msgid "%s parsing error" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:499 #, fuzzy msgid "No job description parser was able to interpret job description" msgstr "Feladat leírás elküldve ide: %s" #: src/hed/libs/compute/JobDescription.cpp:509 msgid "" "Job description language is not specified, unable to output description." msgstr "" #: src/hed/libs/compute/JobDescription.cpp:521 #, fuzzy, c-format msgid "Generating %s job description output" msgstr "Egy hiba lépett fel a feladat leírás elkészítése közben." #: src/hed/libs/compute/JobDescription.cpp:537 #, c-format msgid "Language (%s) not recognized by any job description parsers." msgstr "" #: src/hed/libs/compute/JobDescription.cpp:550 #, c-format msgid "Two input files have identical name '%s'." msgstr "" #: src/hed/libs/compute/JobDescription.cpp:569 #: src/hed/libs/compute/JobDescription.cpp:582 #, fuzzy, c-format msgid "Cannot stat local input file '%s'" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/hed/libs/compute/JobDescription.cpp:602 #, c-format msgid "Cannot find local input file '%s' (%s)" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:644 msgid "Unable to select runtime environment" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:651 msgid "Unable to select middleware" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:658 msgid "Unable to select operating system." msgstr "" #: src/hed/libs/compute/JobDescription.cpp:677 #, c-format msgid "No test-job with ID %d found." msgstr "" #: src/hed/libs/compute/JobDescription.cpp:689 #, c-format msgid "Test was defined with ID %d, but some error occurred during parsing it." msgstr "" #: src/hed/libs/compute/JobDescription.cpp:693 #, fuzzy, c-format msgid "No jobdescription resulted at %d test" msgstr "Feladat leírás elküldve ide: %s" #: src/hed/libs/compute/JobDescriptionParserPlugin.cpp:52 #, c-format msgid "JobDescriptionParserPlugin plugin \"%s\" not found." msgstr "" #: src/hed/libs/compute/JobDescriptionParserPlugin.cpp:59 #, c-format msgid "JobDescriptionParserPlugin %s could not be created" msgstr "" #: src/hed/libs/compute/JobDescriptionParserPlugin.cpp:64 #, c-format msgid "Loaded JobDescriptionParserPlugin %s" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:328 #, fuzzy msgid "Unable to create temporary directory" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:336 #, c-format msgid "Unable to create data base environment (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:346 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:350 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:354 #, c-format msgid "Unable to set duplicate flags for secondary key DB (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:360 #, c-format msgid "Unable to create job database (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:364 #, c-format msgid "Unable to create DB for secondary name keys (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:368 #, c-format msgid "Unable to create DB for secondary endpoint keys (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:372 #, c-format msgid "Unable to create DB for secondary service info keys (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:377 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:381 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:385 #, c-format msgid "Unable to associate secondary DB with primary DB (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:388 #, c-format msgid "Job database created successfully (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:430 #, c-format msgid "Error from BDB: %s: %s" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:433 #, c-format msgid "Error from BDB: %s" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:453 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:221 #: src/hed/libs/compute/JobInformationStorageXML.cpp:27 #, c-format msgid "" "Job list file cannot be created: The parent directory (%s) doesn't exist." msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:457 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:225 #: src/hed/libs/compute/JobInformationStorageXML.cpp:31 #, c-format msgid "Job list file cannot be created: %s is not a directory" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:464 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:232 #: src/hed/libs/compute/JobInformationStorageXML.cpp:38 #, c-format msgid "Job list file (%s) is not a regular file" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:502 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:561 #, c-format msgid "Unable to write key/value pair to job database (%s): Key \"%s\"" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:728 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:622 #: src/hed/libs/compute/JobInformationStorageXML.cpp:137 #, c-format msgid "Unable to truncate job database (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:759 msgid "" "ENOENT: The file or directory does not exist, Or a nonexistent re_source " "file was specified." msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:762 msgid "" "DB_OLD_VERSION: The database cannot be opened without being first upgraded." msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:765 msgid "EEXIST: DB_CREATE and DB_EXCL were specified and the database exists." msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:767 msgid "EINVAL" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:770 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:655 #, fuzzy, c-format msgid "Unable to determine error (%d)" msgstr "Nem sikerült betölteni a %s bróker modult" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:120 #, fuzzy, c-format msgid "Unable to create data base (%s)" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:128 #, fuzzy, c-format msgid "Unable to create jobs table in data base (%s)" msgstr "Nem sikerült elérnem a voms szervert %s, ezen fájl alapján: %s" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:137 #, fuzzy, c-format msgid "Unable to create jobs_new table in data base (%s)" msgstr "Nem sikerült elérnem a voms szervert %s, ezen fájl alapján: %s" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:143 #, fuzzy, c-format msgid "Unable to transfer from jobs to jobs_new in data base (%s)" msgstr "Nem sikerült elérnem a voms szervert %s, ezen fájl alapján: %s" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:149 #, fuzzy, c-format msgid "Unable to drop jobs in data base (%s)" msgstr "Nem sikerült elérnem a voms szervert %s, ezen fájl alapján: %s" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:155 #, fuzzy, c-format msgid "Unable to rename jobs table in data base (%s)" msgstr "Nem sikerült elérnem a voms szervert %s, ezen fájl alapján: %s" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:165 #, fuzzy, c-format msgid "Unable to create index for jobs table in data base (%s)" msgstr "Nem sikerült elérnem a voms szervert %s, ezen fájl alapján: %s" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:173 #, fuzzy, c-format msgid "Failed checking database (%s)" msgstr "Nem sikerült listázni a fájlokat" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:175 #, c-format msgid "Job database connection established successfully (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:197 #, fuzzy, c-format msgid "Error from SQLite: %s: %s" msgstr "Nem tudom olvasni a forrásokat a fájlból: %s" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:200 #, c-format msgid "Error from SQLite: %s" msgstr "" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:362 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:369 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:376 #, c-format msgid "Unable to write records into job database (%s): Id \"%s\"" msgstr "" #: src/hed/libs/compute/JobInformationStorageXML.cpp:51 #: src/hed/libs/compute/JobInformationStorageXML.cpp:223 #: src/hed/libs/compute/JobInformationStorageXML.cpp:264 #, fuzzy, c-format msgid "Waiting for lock on job list file %s" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/hed/libs/compute/JobInformationStorageXML.cpp:162 #, c-format msgid "Will remove %s on service %s." msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:36 msgid "Ignoring job, the job ID is empty" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:41 #, c-format msgid "Ignoring job (%s), the management interface name is unknown" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:46 #, c-format msgid "Ignoring job (%s), the job management URL is unknown" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:51 #, c-format msgid "Ignoring job (%s), the status interface name is unknown" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:56 #, c-format msgid "Ignoring job (%s), the job status URL is unknown" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:65 #, c-format msgid "Ignoring job (%s), unable to load JobControllerPlugin for %s" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:72 #, c-format msgid "" "Ignoring job (%s), already tried and were unable to load JobControllerPlugin" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:385 #, fuzzy, c-format msgid "Job resubmission failed: Unable to load broker (%s)" msgstr "A feladat küldés meghiusúlt, mert nincs több szabad várakozó sor." #: src/hed/libs/compute/JobSupervisor.cpp:400 #, fuzzy msgid "Job resubmission aborted because no resource returned any information" msgstr "" "Feladat küldés sikertelen, mert egyetlen klaszter sem adott vissza " "információt magáról" #: src/hed/libs/compute/JobSupervisor.cpp:421 #, c-format msgid "Unable to resubmit job (%s), unable to parse obtained job description" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:443 #, c-format msgid "" "Unable to resubmit job (%s), target information retrieval failed for target: " "%s" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:469 #, c-format msgid "Unable to resubmit job (%s), no targets applicable for submission" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:504 #, c-format msgid "" "Unable to migrate job (%s), job description could not be retrieved remotely" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:524 #, fuzzy msgid "Job migration aborted, no resource returned any information" msgstr "" "Feladat küldés sikertelen, mert egyetlen klaszter sem adott vissza " "információt magáról" #: src/hed/libs/compute/JobSupervisor.cpp:536 #, fuzzy, c-format msgid "Job migration aborted, unable to load broker (%s)" msgstr "Nem sikerült betölteni a %s bróker modult" #: src/hed/libs/compute/JobSupervisor.cpp:552 #, c-format msgid "Unable to migrate job (%s), unable to parse obtained job description" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:573 #, c-format msgid "Unable to load submission plugin for %s interface" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:583 #, fuzzy, c-format msgid "Job migration failed for job (%s), no applicable targets" msgstr "A feladat küldés meghiusúlt, mert nincs több szabad várakozó sor." #: src/hed/libs/compute/Software.cpp:65 src/hed/libs/compute/Software.cpp:94 #: src/hed/libs/compute/Software.cpp:113 #, c-format msgid "%s > %s => false" msgstr "" #: src/hed/libs/compute/Software.cpp:70 src/hed/libs/compute/Software.cpp:83 #: src/hed/libs/compute/Software.cpp:107 #, c-format msgid "%s > %s => true" msgstr "" #: src/hed/libs/compute/Software.cpp:90 src/hed/libs/compute/Software.cpp:102 #, c-format msgid "%s > %s => false: %s contains non numbers in the version part." msgstr "" #: src/hed/libs/compute/Software.cpp:199 src/hed/libs/compute/Software.cpp:210 #, c-format msgid "Requirement \"%s %s\" NOT satisfied." msgstr "" #: src/hed/libs/compute/Software.cpp:205 #, c-format msgid "Requirement \"%s %s\" satisfied." msgstr "" #: src/hed/libs/compute/Software.cpp:214 #, c-format msgid "Requirement \"%s %s\" satisfied by \"%s\"." msgstr "" #: src/hed/libs/compute/Software.cpp:219 msgid "All requirements satisfied." msgstr "" #: src/hed/libs/compute/Submitter.cpp:83 #, fuzzy, c-format msgid "Trying to submit directly to endpoint (%s)" msgstr "Feladat újraküldésének megkisérlése ide: %s" #: src/hed/libs/compute/Submitter.cpp:88 #, c-format msgid "Interface (%s) specified, submitting only to that interface" msgstr "" #: src/hed/libs/compute/Submitter.cpp:106 msgid "Trying all available interfaces" msgstr "" #: src/hed/libs/compute/Submitter.cpp:112 #, c-format msgid "Trying to submit endpoint (%s) using interface (%s) with plugin (%s)." msgstr "" #: src/hed/libs/compute/Submitter.cpp:116 #, c-format msgid "" "Unable to load plugin (%s) for interface (%s) when trying to submit job " "description." msgstr "" #: src/hed/libs/compute/Submitter.cpp:130 #, c-format msgid "No more interfaces to try for endpoint %s." msgstr "" #: src/hed/libs/compute/Submitter.cpp:336 #, c-format msgid "Target %s does not match requested interface(s)." msgstr "" #: src/hed/libs/compute/SubmitterPlugin.cpp:64 msgid "No stagein URL is provided" msgstr "" #: src/hed/libs/compute/SubmitterPlugin.cpp:83 #, fuzzy, c-format msgid "Failed uploading file %s to %s: %s" msgstr "Nem sikerült listázni a fájlokat" #: src/hed/libs/compute/SubmitterPlugin.cpp:116 #, c-format msgid "Trying to migrate to %s: Migration to a %s interface is not supported." msgstr "" #: src/hed/libs/compute/SubmitterPlugin.cpp:172 #, c-format msgid "SubmitterPlugin plugin \"%s\" not found." msgstr "" #: src/hed/libs/compute/SubmitterPlugin.cpp:182 #, c-format msgid "SubmitterPlugin %s could not be created" msgstr "" #: src/hed/libs/compute/SubmitterPlugin.cpp:187 #, c-format msgid "Loaded SubmitterPlugin %s" msgstr "" #: src/hed/libs/compute/examples/basic_job_submission.cpp:28 #, fuzzy msgid "Invalid job description" msgstr "Érvénytelen feladat leírás:" #: src/hed/libs/compute/examples/basic_job_submission.cpp:47 #, fuzzy msgid "Failed to submit job" msgstr "Feladat újraküldésének megkisérlése ide: %s" #: src/hed/libs/compute/examples/basic_job_submission.cpp:54 #, c-format msgid "Failed to write to local job list %s" msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:20 msgid "[job description ...]" msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:21 msgid "" "This tiny tool can be used for testing the JobDescription's conversion " "abilities." msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:23 msgid "" "The job description also can be a file or a string in ADL or XRSL format." msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:27 msgid "define the requested format (nordugrid:xrsl, emies:adl)" msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:28 msgid "format" msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:33 msgid "show the original job description" msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:43 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:71 msgid "Use --help option for detailed usage information" msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:50 #, fuzzy msgid " [ JobDescription tester ] " msgstr "Feladat leírás elküldve ide: %s" #: src/hed/libs/compute/test_jobdescription.cpp:74 msgid " [ Parsing the original text ] " msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:80 #, fuzzy msgid "Unable to parse." msgstr "Nem sikerült betölteni a %s bróker modult" #: src/hed/libs/compute/test_jobdescription.cpp:89 msgid " [ emies:adl ] " msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:91 msgid " [ nordugrid:xrsl ] " msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:134 msgid "VOMS command is empty" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:254 #, fuzzy, c-format msgid "OpenSSL error -- %s" msgstr "OpenSSL hiba -- %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:255 #, c-format msgid "Library : %s" msgstr "Könyvtár : %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:256 #, c-format msgid "Function : %s" msgstr "Funkció: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:257 #, c-format msgid "Reason : %s" msgstr "Indok : %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:313 msgid "User interface error" msgstr "Felhasználó oldali hiba" #: src/hed/libs/credential/ARCProxyUtil.cpp:319 msgid "Aborted!" msgstr "Megszakítva!" #: src/hed/libs/credential/ARCProxyUtil.cpp:418 #: src/hed/libs/credential/ARCProxyUtil.cpp:1399 #, fuzzy msgid "Failed to sign proxy" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/credential/ARCProxyUtil.cpp:437 #: src/hed/libs/credential/Credential.cpp:878 #, c-format msgid "Error: can't open policy file: %s" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:447 #: src/hed/libs/credential/Credential.cpp:891 #, c-format msgid "Error: policy location: %s is not a regular file" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:546 #, c-format msgid "VOMS line contains wrong number of tokens (%u expected): \"%s\"" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:590 #, fuzzy, c-format msgid "Cannot get VOMS server %s information from the vomses files" msgstr "Nem sikerült elérnem a voms szervert %s, ezen fájl alapján: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:623 #, fuzzy, c-format msgid "There are %d commands to the same VOMS server %s" msgstr "%d számú parancs van ugyanahoz a voms szerverhez: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:677 #, fuzzy, c-format msgid "Try to get attribute from VOMS server with order: %s" msgstr "Attribútumok lekérdezés a voms szervertÅ‘l ebben a sorrendben: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:680 #, c-format msgid "Message sent to VOMS server %s is: %s" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:699 #: src/hed/libs/credential/ARCProxyUtil.cpp:720 #, c-format msgid "" "The VOMS server with the information:\n" "\t%s\n" "can not be reached, please make sure it is available" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:703 #, fuzzy msgid "No HTTP response from VOMS server" msgstr "Nincs válasz a voms szervertÅ‘l" #: src/hed/libs/credential/ARCProxyUtil.cpp:708 #: src/hed/libs/credential/ARCProxyUtil.cpp:734 #, fuzzy, c-format msgid "Returned message from VOMS server: %s" msgstr "Ez a válasz érkezett a voms szervertÅ‘l: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:724 #, fuzzy msgid "No stream response from VOMS server" msgstr "Nincs válasz a voms szervertÅ‘l" #: src/hed/libs/credential/ARCProxyUtil.cpp:746 #, c-format msgid "" "The validity duration of VOMS AC is shortened from %s to %s, due to the " "validity constraint on voms server side.\n" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:749 #, c-format msgid "" "Cannot get any AC or attributes info from VOMS server: %s;\n" " Returned message from VOMS server: %s\n" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:754 #, fuzzy, c-format msgid "Returned message from VOMS server %s is: %s\n" msgstr "Ez a válasz érkezett a voms szervertÅ‘l: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:776 #, c-format msgid "The attribute information from VOMS server: %s is list as following:" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:788 #, c-format msgid "" "There are %d servers with the same name: %s in your vomses file, but none of " "them can be reached, or can return valid message. But proxy without VOMS AC " "extension will still be generated." msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:1321 #: src/hed/libs/credential/ARCProxyUtil.cpp:1428 #, fuzzy msgid "Failed to generate X509 request with NSS" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/credential/ARCProxyUtil.cpp:1332 #: src/hed/libs/credential/ARCProxyUtil.cpp:1439 #: src/hed/libs/credential/ARCProxyUtil.cpp:1480 msgid "Failed to create X509 certificate with NSS" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:1344 #: src/hed/libs/credential/ARCProxyUtil.cpp:1451 #: src/hed/libs/credential/ARCProxyUtil.cpp:1504 msgid "Failed to export X509 certificate from NSS DB" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:1487 msgid "Failed to import X509 certificate into NSS DB" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:1496 msgid "Failed to initialize the credential configuration" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:162 #, c-format msgid "Error number in store context: %i" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:163 msgid "Self-signed certificate" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:166 #, fuzzy, c-format msgid "The certificate with subject %s is not valid" msgstr "Proxy készítés sikertelen: A publikus kulcs érvénytelen." #: src/hed/libs/credential/CertUtil.cpp:169 #, c-format msgid "" "Can not find issuer certificate for the certificate with subject %s and " "hash: %lu" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:172 #, c-format msgid "Certificate with subject %s has expired" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:175 #, c-format msgid "" "Untrusted self-signed certificate in chain with subject %s and hash: %lu" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:177 #, c-format msgid "Certificate verification error: %s" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:189 msgid "Can not get the certificate type" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:229 msgid "Couldn't verify availability of CRL" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:242 msgid "In the available CRL the lastUpdate field is not valid" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:249 msgid "The available CRL is not yet valid" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:258 msgid "In the available CRL, the nextUpdate field is not valid" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:264 msgid "The available CRL has expired" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:287 #, c-format msgid "Certificate with serial number %s and subject \"%s\" is revoked" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:305 msgid "" "Directory of trusted CAs is not specified/found; Using current path as the " "CA direcroty" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:314 msgid "Can't allocate memory for CA policy path" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:360 #, c-format msgid "Certificate has unknown extension with numeric ID %u and SN %s" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:374 #: src/hed/libs/credential/Credential.cpp:1697 msgid "" "Can not convert DER encoded PROXY_CERT_INFO_EXTENSION extension to internal " "format" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:420 msgid "Trying to check X509 cert with check_cert_type" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:459 msgid "Can't convert DER encoded PROXYCERTINFO extension to internal format" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:463 msgid "Can't get policy from PROXYCERTINFO extension" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:467 msgid "Can't get policy language from PROXYCERTINFO extension" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:499 msgid "The subject does not match the issuer name + proxy CN entry" msgstr "" #: src/hed/libs/credential/Credential.cpp:84 #, c-format msgid "OpenSSL error string: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:205 msgid "Can't get the first byte of input to determine its format" msgstr "" #: src/hed/libs/credential/Credential.cpp:219 msgid "Can't reset the input" msgstr "" #: src/hed/libs/credential/Credential.cpp:244 #: src/hed/libs/credential/Credential.cpp:280 msgid "Can't get the first byte of input BIO to get its format" msgstr "" #: src/hed/libs/credential/Credential.cpp:256 msgid "Can not read certificate/key string" msgstr "" #: src/hed/libs/credential/Credential.cpp:460 #, c-format msgid "Can not find certificate file: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:465 #, c-format msgid "Can not read certificate file: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:503 msgid "Can not read certificate string" msgstr "" #: src/hed/libs/credential/Credential.cpp:523 msgid "Certificate format is PEM" msgstr "" #: src/hed/libs/credential/Credential.cpp:550 msgid "Certificate format is DER" msgstr "" #: src/hed/libs/credential/Credential.cpp:579 msgid "Certificate format is PKCS" msgstr "" #: src/hed/libs/credential/Credential.cpp:605 msgid "Certificate format is unknown" msgstr "" #: src/hed/libs/credential/Credential.cpp:613 #, c-format msgid "Can not find key file: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:618 #, c-format msgid "Can not open key file %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:637 msgid "Can not read key string" msgstr "" #: src/hed/libs/credential/Credential.cpp:700 #: src/hed/libs/credential/VOMSUtil.cpp:244 msgid "Failed to lock arccredential library in memory" msgstr "" #: src/hed/libs/credential/Credential.cpp:712 msgid "Certificate verification succeeded" msgstr "" #: src/hed/libs/credential/Credential.cpp:716 msgid "Certificate verification failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:729 #: src/hed/libs/credential/Credential.cpp:747 #: src/hed/libs/credential/Credential.cpp:765 #: src/hed/libs/credential/Credential.cpp:996 #: src/hed/libs/credential/Credential.cpp:2368 #: src/hed/libs/credential/Credential.cpp:2397 msgid "Failed to initialize extensions member for Credential" msgstr "" #: src/hed/libs/credential/Credential.cpp:808 #, c-format msgid "Unsupported proxy policy language is requested - %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:820 #, c-format msgid "Unsupported proxy version is requested - %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:831 msgid "If you specify a policy you also need to specify a policy language" msgstr "" #: src/hed/libs/credential/Credential.cpp:1001 msgid "Certificate/Proxy path is empty" msgstr "" #: src/hed/libs/credential/Credential.cpp:1059 #: src/hed/libs/credential/Credential.cpp:2905 msgid "Failed to duplicate extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:1063 #, fuzzy msgid "Failed to add extension into credential extensions" msgstr "Nem sikerült betölteni a konfigurációt" #: src/hed/libs/credential/Credential.cpp:1074 #, fuzzy msgid "Certificate information collection failed" msgstr "verzió információ kiírása" #: src/hed/libs/credential/Credential.cpp:1113 #: src/hed/libs/credential/Credential.cpp:1118 msgid "Can not convert string into ASN1_OBJECT" msgstr "" #: src/hed/libs/credential/Credential.cpp:1125 msgid "Can not create ASN1_OCTET_STRING" msgstr "" #: src/hed/libs/credential/Credential.cpp:1134 msgid "Can not allocate memory for extension for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:1144 msgid "Can not create extension for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:1180 #: src/hed/libs/credential/Credential.cpp:1348 msgid "BN_set_word failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:1189 #: src/hed/libs/credential/Credential.cpp:1357 msgid "RSA_generate_key_ex failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:1198 #: src/hed/libs/credential/Credential.cpp:1365 msgid "BN_new || RSA_new failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:1209 msgid "Created RSA key, proceeding with request" msgstr "" #: src/hed/libs/credential/Credential.cpp:1214 msgid "pkey and rsa_key exist!" msgstr "" #: src/hed/libs/credential/Credential.cpp:1217 msgid "Generate new X509 request!" msgstr "" #: src/hed/libs/credential/Credential.cpp:1222 msgid "Setting subject name!" msgstr "" #: src/hed/libs/credential/Credential.cpp:1230 #: src/hed/libs/credential/Credential.cpp:1444 msgid "PEM_write_bio_X509_REQ failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:1260 #: src/hed/libs/credential/Credential.cpp:1301 #: src/hed/libs/credential/Credential.cpp:1476 #: src/hed/libs/credential/Credential.cpp:1496 msgid "Can not create BIO for request" msgstr "" #: src/hed/libs/credential/Credential.cpp:1278 msgid "Failed to write request into string" msgstr "" #: src/hed/libs/credential/Credential.cpp:1305 #: src/hed/libs/credential/Credential.cpp:1310 #: src/hed/libs/credential/Credential.cpp:1500 msgid "Can not set writable file for request BIO" msgstr "" #: src/hed/libs/credential/Credential.cpp:1316 #: src/hed/libs/credential/Credential.cpp:1505 msgid "Wrote request into a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:1318 #: src/hed/libs/credential/Credential.cpp:1508 msgid "Failed to write request into a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:1338 msgid "The credential's private key has already been initialized" msgstr "" #: src/hed/libs/credential/Credential.cpp:1386 msgid "" "Can not duplicate the subject name for the self-signing proxy certificate " "request" msgstr "" #: src/hed/libs/credential/Credential.cpp:1396 msgid "Can not create a new X509_NAME_ENTRY for the proxy certificate request" msgstr "" #: src/hed/libs/credential/Credential.cpp:1414 #: src/hed/libs/credential/Credential.cpp:1421 #: src/hed/libs/credential/Credential.cpp:1999 #: src/hed/libs/credential/Credential.cpp:2007 msgid "" "Can not convert PROXY_CERT_INFO_EXTENSION struct from internal to DER " "encoded format" msgstr "" #: src/hed/libs/credential/Credential.cpp:1451 msgid "Can't convert X509 request from internal to DER encoded format" msgstr "" #: src/hed/libs/credential/Credential.cpp:1461 msgid "Can not generate X509 request" msgstr "" #: src/hed/libs/credential/Credential.cpp:1463 msgid "Can not set private key" msgstr "" #: src/hed/libs/credential/Credential.cpp:1561 msgid "Failed to get private key" msgstr "" #: src/hed/libs/credential/Credential.cpp:1580 msgid "Failed to get public key from RSA object" msgstr "" #: src/hed/libs/credential/Credential.cpp:1588 msgid "Failed to get public key from X509 object" msgstr "" #: src/hed/libs/credential/Credential.cpp:1595 msgid "Failed to get public key" msgstr "" #: src/hed/libs/credential/Credential.cpp:1633 #, c-format msgid "Certiticate chain number %d" msgstr "" #: src/hed/libs/credential/Credential.cpp:1661 msgid "NULL BIO passed to InquireRequest" msgstr "" #: src/hed/libs/credential/Credential.cpp:1664 msgid "PEM_read_bio_X509_REQ failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:1668 msgid "d2i_X509_REQ_bio failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:1690 msgid "Missing data in DER encoded PROXY_CERT_INFO_EXTENSION extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:1702 msgid "Can not create PROXY_CERT_INFO_EXTENSION extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:1712 msgid "Can not get policy from PROXY_CERT_INFO_EXTENSION extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:1716 msgid "Can not get policy language from PROXY_CERT_INFO_EXTENSION extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:1732 #, c-format msgid "Cert Type: %d" msgstr "" #: src/hed/libs/credential/Credential.cpp:1745 #: src/hed/libs/credential/Credential.cpp:1764 msgid "Can not create BIO for parsing request" msgstr "" #: src/hed/libs/credential/Credential.cpp:1750 msgid "Read request from a string" msgstr "" #: src/hed/libs/credential/Credential.cpp:1753 msgid "Failed to read request from a string" msgstr "" #: src/hed/libs/credential/Credential.cpp:1768 msgid "Can not set readable file for request BIO" msgstr "" #: src/hed/libs/credential/Credential.cpp:1773 msgid "Read request from a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:1776 msgid "Failed to read request from a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:1816 msgid "Can not convert private key to DER format" msgstr "" #: src/hed/libs/credential/Credential.cpp:1980 msgid "Credential is not initialized" msgstr "" #: src/hed/libs/credential/Credential.cpp:1986 #, fuzzy msgid "Failed to duplicate X509 structure" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/credential/Credential.cpp:1991 msgid "Failed to initialize X509 structure" msgstr "" #: src/hed/libs/credential/Credential.cpp:2014 msgid "Can not create extension for PROXY_CERT_INFO" msgstr "" #: src/hed/libs/credential/Credential.cpp:2018 #: src/hed/libs/credential/Credential.cpp:2066 msgid "Can not add X509 extension to proxy cert" msgstr "" #: src/hed/libs/credential/Credential.cpp:2034 msgid "Can not convert keyUsage struct from DER encoded format" msgstr "" #: src/hed/libs/credential/Credential.cpp:2046 #: src/hed/libs/credential/Credential.cpp:2055 msgid "Can not convert keyUsage struct from internal to DER format" msgstr "" #: src/hed/libs/credential/Credential.cpp:2062 msgid "Can not create extension for keyUsage" msgstr "" #: src/hed/libs/credential/Credential.cpp:2075 msgid "Can not get extended KeyUsage extension from issuer certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2080 msgid "Can not copy extended KeyUsage extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:2085 msgid "Can not add X509 extended KeyUsage extension to new proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2095 msgid "Can not compute digest of public key" msgstr "" #: src/hed/libs/credential/Credential.cpp:2106 msgid "Can not copy the subject name from issuer for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2112 msgid "Can not create name entry CN for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2117 msgid "Can not set CN in proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2125 msgid "Can not set issuer's subject for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2130 msgid "Can not set version number for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2138 msgid "Can not set serial number for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2144 msgid "Can not duplicate serial number for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2150 msgid "Can not set the lifetime for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2154 msgid "Can not set pubkey for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2170 #: src/hed/libs/credential/Credential.cpp:2795 msgid "The credential to be signed is NULL" msgstr "" #: src/hed/libs/credential/Credential.cpp:2174 #: src/hed/libs/credential/Credential.cpp:2799 msgid "The credential to be signed contains no request" msgstr "" #: src/hed/libs/credential/Credential.cpp:2178 #: src/hed/libs/credential/Credential.cpp:2803 msgid "The BIO for output is NULL" msgstr "" #: src/hed/libs/credential/Credential.cpp:2192 #: src/hed/libs/credential/Credential.cpp:2810 msgid "Error when extracting public key from request" msgstr "" #: src/hed/libs/credential/Credential.cpp:2197 #: src/hed/libs/credential/Credential.cpp:2814 msgid "Failed to verify the request" msgstr "" #: src/hed/libs/credential/Credential.cpp:2201 msgid "Failed to add issuer's extension into proxy" msgstr "" #: src/hed/libs/credential/Credential.cpp:2225 msgid "Failed to find extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:2237 msgid "Can not get the issuer's private key" msgstr "" #: src/hed/libs/credential/Credential.cpp:2244 #: src/hed/libs/credential/Credential.cpp:2846 msgid "There is no digest in issuer's private key object" msgstr "" #: src/hed/libs/credential/Credential.cpp:2249 #: src/hed/libs/credential/Credential.cpp:2850 #, c-format msgid "%s is an unsupported digest type" msgstr "" #: src/hed/libs/credential/Credential.cpp:2260 #, c-format msgid "" "The signing algorithm %s is not allowed,it should be SHA1 or SHA2 to sign " "certificate requests" msgstr "" #: src/hed/libs/credential/Credential.cpp:2266 msgid "Failed to sign the proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2268 msgid "Succeeded to sign the proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2273 msgid "Failed to verify the signed certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2275 msgid "Succeeded to verify the signed certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2280 #: src/hed/libs/credential/Credential.cpp:2289 msgid "Output the proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2283 msgid "Can not convert signed proxy cert into PEM format" msgstr "" #: src/hed/libs/credential/Credential.cpp:2292 msgid "Can not convert signed proxy cert into DER format" msgstr "" #: src/hed/libs/credential/Credential.cpp:2308 #: src/hed/libs/credential/Credential.cpp:2331 msgid "Can not create BIO for signed proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2335 msgid "Can not set writable file for signed proxy certificate BIO" msgstr "" #: src/hed/libs/credential/Credential.cpp:2340 msgid "Wrote signed proxy certificate into a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:2343 msgid "Failed to write signed proxy certificate into a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:2377 #: src/hed/libs/credential/Credential.cpp:2415 #, fuzzy, c-format msgid "ERROR: %s" msgstr "Érvénytelen URL: %s" #: src/hed/libs/credential/Credential.cpp:2423 #, c-format msgid "SSL error: %s, libs: %s, func: %s, reason: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:2468 #, c-format msgid "unable to load number from: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:2473 msgid "error converting number from bin to BIGNUM" msgstr "" #: src/hed/libs/credential/Credential.cpp:2500 msgid "file name too long" msgstr "" #: src/hed/libs/credential/Credential.cpp:2523 msgid "error converting serial to ASN.1 format" msgstr "" #: src/hed/libs/credential/Credential.cpp:2556 #, c-format msgid "load serial from %s failure" msgstr "" #: src/hed/libs/credential/Credential.cpp:2561 msgid "add_word failure" msgstr "" #: src/hed/libs/credential/Credential.cpp:2566 #, c-format msgid "save serial to %s failure" msgstr "" #: src/hed/libs/credential/Credential.cpp:2586 msgid "Error initialising X509 store" msgstr "" #: src/hed/libs/credential/Credential.cpp:2593 msgid "Out of memory when generate random serial" msgstr "" #: src/hed/libs/credential/Credential.cpp:2605 msgid "CA certificate and CA private key do not match" msgstr "" #: src/hed/libs/credential/Credential.cpp:2629 #, c-format msgid "Failed to load extension section: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:2666 msgid "malloc error" msgstr "" #: src/hed/libs/credential/Credential.cpp:2670 msgid "Subject does not start with '/'" msgstr "" #: src/hed/libs/credential/Credential.cpp:2686 #: src/hed/libs/credential/Credential.cpp:2707 msgid "escape character at end of string" msgstr "" #: src/hed/libs/credential/Credential.cpp:2698 #, c-format msgid "" "end of string encountered while processing type of subject name element #%d" msgstr "" #: src/hed/libs/credential/Credential.cpp:2735 #, c-format msgid "Subject Attribute %s has no known NID, skipped" msgstr "" #: src/hed/libs/credential/Credential.cpp:2739 #, c-format msgid "No value provided for Subject Attribute %s skipped" msgstr "" #: src/hed/libs/credential/Credential.cpp:2780 msgid "Failed to set the pubkey for X509 object by using pubkey from X509_REQ" msgstr "" #: src/hed/libs/credential/Credential.cpp:2790 msgid "The private key for signing is not initialized" msgstr "" #: src/hed/libs/credential/Credential.cpp:2869 #, c-format msgid "Error when loading the extension config file: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:2873 #, c-format msgid "Error when loading the extension config file: %s on line: %d" msgstr "" #: src/hed/libs/credential/Credential.cpp:2921 msgid "Can not sign a EEC" msgstr "" #: src/hed/libs/credential/Credential.cpp:2925 msgid "Output EEC certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2928 msgid "Can not convert signed EEC cert into DER format" msgstr "" #: src/hed/libs/credential/Credential.cpp:2942 #: src/hed/libs/credential/Credential.cpp:2961 msgid "Can not create BIO for signed EEC certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2965 msgid "Can not set writable file for signed EEC certificate BIO" msgstr "" #: src/hed/libs/credential/Credential.cpp:2970 msgid "Wrote signed EEC certificate into a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:2973 msgid "Failed to write signed EEC certificate into a file" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:143 msgid "Error writing raw certificate" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:222 msgid "Failed to add RFC proxy OID" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:225 #, c-format msgid "Succeeded to add RFC proxy OID, tag %d is returned" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:231 msgid "Failed to add anyLanguage OID" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:234 #, c-format msgid "Succeeded to add anyLanguage OID, tag %d is returned" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:240 msgid "Failed to add inheritAll OID" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:243 #, c-format msgid "Succeeded to add inheritAll OID, tag %d is returned" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:249 msgid "Failed to add Independent OID" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:252 #, c-format msgid "Succeeded to add Independent OID, tag %d is returned" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:258 msgid "Failed to add VOMS AC sequence OID" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:261 #, c-format msgid "Succeeded to add VOMS AC sequence OID, tag %d is returned" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:290 #, c-format msgid "NSS initialization failed on certificate database: %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:301 msgid "Succeeded to initialize NSS" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:323 #, c-format msgid "Failed to read attribute %x from private key." msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:375 msgid "Succeeded to get credential" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:376 #, fuzzy msgid "Failed to get credential" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/credential/NSSUtil.cpp:438 msgid "p12 file is empty" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:448 msgid "Unable to write to p12 file" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:464 #, fuzzy msgid "Failed to open p12 file" msgstr "Nem sikerült listázni a fájlokat" #: src/hed/libs/credential/NSSUtil.cpp:492 msgid "Failed to allocate p12 context" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1200 msgid "Failed to find issuer certificate for proxy certificate" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1351 #, c-format msgid "Failed to authenticate to PKCS11 slot %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1357 #, c-format msgid "Failed to find certificates by nickname: %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1362 #, c-format msgid "No user certificate by nickname %s found" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1375 #: src/hed/libs/credential/NSSUtil.cpp:1411 msgid "Certificate does not have a slot" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1381 msgid "Failed to create export context" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1396 msgid "PKCS12 output password not provided" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1403 msgid "PKCS12 add password integrity failed" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1424 #, fuzzy msgid "Failed to create key or certificate safe" msgstr "publikus kulcs elérési útvonala" #: src/hed/libs/credential/NSSUtil.cpp:1440 #, fuzzy msgid "Failed to add certificate and key" msgstr "publikus kulcs elérési útvonala" #: src/hed/libs/credential/NSSUtil.cpp:1449 #, c-format msgid "Failed to initialize PKCS12 file: %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1454 #, fuzzy msgid "Failed to encode PKCS12" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/credential/NSSUtil.cpp:1457 msgid "Succeeded to export PKCS12" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1485 #, c-format msgid "" "There is no certificate named %s found, the certificate could be removed " "when generating CSR" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1491 #, fuzzy msgid "Failed to delete certificate" msgstr "publikus kulcs elérési útvonala" #: src/hed/libs/credential/NSSUtil.cpp:1505 msgid "The name of the private key to delete is empty" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1510 #: src/hed/libs/credential/NSSUtil.cpp:2939 #: src/hed/libs/credential/NSSUtil.cpp:2956 #, c-format msgid "Failed to authenticate to token %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1517 #, c-format msgid "No private key with nickname %s exist in NSS database" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1550 #, fuzzy msgid "Failed to delete private key and certificate" msgstr "A privát és publikus kulcs tárolására szolgáló könyvtár" #: src/hed/libs/credential/NSSUtil.cpp:1560 #, fuzzy msgid "Failed to delete private key" msgstr "privát kulcs elérési útvonala" #: src/hed/libs/credential/NSSUtil.cpp:1571 #, c-format msgid "Can not find key with name: %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1599 msgid "Can not read PEM private key: probably bad password" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1601 msgid "Can not read PEM private key: failed to decrypt" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1603 #: src/hed/libs/credential/NSSUtil.cpp:1605 msgid "Can not read PEM private key: failed to obtain password" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1606 msgid "Can not read PEM private key" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1613 msgid "Failed to convert EVP_PKEY to PKCS8" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1650 #, fuzzy msgid "Failed to load private key" msgstr "privát kulcs elérési útvonala" #: src/hed/libs/credential/NSSUtil.cpp:1651 msgid "Succeeded to load PrivateKeyInfo" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1654 msgid "Failed to convert PrivateKeyInfo to EVP_PKEY" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1655 msgid "Succeeded to convert PrivateKeyInfo to EVP_PKEY" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1692 #, fuzzy msgid "Failed to import private key" msgstr "privát kulcs elérési útvonala" #: src/hed/libs/credential/NSSUtil.cpp:1695 msgid "Succeeded to import private key" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1708 #: src/hed/libs/credential/NSSUtil.cpp:1750 #: src/hed/libs/credential/NSSUtil.cpp:2889 msgid "Failed to authenticate to key database" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1717 msgid "Succeeded to generate public/private key pair" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1719 msgid "Failed to generate public/private key pair" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1724 #, fuzzy msgid "Failed to export private key" msgstr "privát kulcs elérési útvonala" #: src/hed/libs/credential/NSSUtil.cpp:1791 msgid "Failed to create subject name" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1807 #, fuzzy msgid "Failed to create certificate request" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/credential/NSSUtil.cpp:1820 msgid "Failed to call PORT_NewArena" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1828 msgid "Failed to encode the certificate request with DER format" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1835 msgid "Unknown key or hash type" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1841 #, fuzzy msgid "Failed to sign the certificate request" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/credential/NSSUtil.cpp:1857 msgid "Failed to output the certificate request as ASCII format" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1866 msgid "Failed to output the certificate request as DER format" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1875 #, c-format msgid "Succeeded to output the certificate request into %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1914 #: src/hed/libs/credential/NSSUtil.cpp:1951 msgid "Failed to read data from input file" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1930 msgid "Input is without trailer\n" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1941 msgid "Failed to convert ASCII to DER" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1992 msgid "Certificate request is invalid" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2212 #, c-format msgid "The policy language: %s is not supported" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2220 #: src/hed/libs/credential/NSSUtil.cpp:2245 #: src/hed/libs/credential/NSSUtil.cpp:2268 #: src/hed/libs/credential/NSSUtil.cpp:2290 #, fuzzy msgid "Failed to new arena" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/credential/NSSUtil.cpp:2229 #: src/hed/libs/credential/NSSUtil.cpp:2254 msgid "Failed to create path length" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2232 #: src/hed/libs/credential/NSSUtil.cpp:2257 #: src/hed/libs/credential/NSSUtil.cpp:2277 #: src/hed/libs/credential/NSSUtil.cpp:2299 msgid "Failed to create policy language" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2700 #, c-format msgid "Failed to parse certificate request from CSR file %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2707 #, c-format msgid "Can not find certificate with name %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2739 msgid "Can not allocate memory" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2747 #, fuzzy, c-format msgid "Proxy subject: %s" msgstr "Tárgy: %s" #: src/hed/libs/credential/NSSUtil.cpp:2764 msgid "Failed to start certificate extension" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2769 msgid "Failed to add key usage extension" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2774 msgid "Failed to add proxy certificate information extension" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2778 msgid "Failed to add voms AC extension" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2798 #, fuzzy msgid "Failed to retrieve private key for issuer" msgstr "privát kulcs elérési útvonala" #: src/hed/libs/credential/NSSUtil.cpp:2805 msgid "Unknown key or hash type of issuer" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2811 msgid "Failed to set signature algorithm ID" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2823 #, fuzzy msgid "Failed to encode certificate" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/credential/NSSUtil.cpp:2829 msgid "Failed to allocate item for certificate data" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2835 msgid "Failed to sign encoded certificate data" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2844 #, c-format msgid "Failed to open file %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2855 #, c-format msgid "Succeeded to output certificate to %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2896 #, fuzzy, c-format msgid "Failed to open input certificate file %s" msgstr "publikus kulcs elérési útvonala" #: src/hed/libs/credential/NSSUtil.cpp:2913 #, fuzzy msgid "Failed to read input certificate file" msgstr "publikus kulcs elérési útvonala" #: src/hed/libs/credential/NSSUtil.cpp:2918 #, fuzzy msgid "Failed to get certificate from certificate file" msgstr "publikus kulcs elérési útvonala" #: src/hed/libs/credential/NSSUtil.cpp:2925 msgid "Failed to allocate certificate trust" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2930 #, fuzzy msgid "Failed to decode trust string" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/credential/NSSUtil.cpp:2944 #: src/hed/libs/credential/NSSUtil.cpp:2961 msgid "Failed to add certificate to token or database" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2947 #: src/hed/libs/credential/NSSUtil.cpp:2950 msgid "Succeeded to import certificate" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2964 #: src/hed/libs/credential/NSSUtil.cpp:2967 #, c-format msgid "Succeeded to change trusts to: %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2994 #, fuzzy, c-format msgid "Failed to import private key from file: %s" msgstr "privát kulcs elérési útvonala" #: src/hed/libs/credential/NSSUtil.cpp:2996 #, fuzzy, c-format msgid "Failed to import certificate from file: %s" msgstr "publikus kulcs elérési útvonala" #: src/hed/libs/credential/VOMSConfig.cpp:142 #, c-format msgid "" "ERROR: VOMS configuration line contains too many tokens. Expecting 5 or 6. " "Line was: %s" msgstr "" #: src/hed/libs/credential/VOMSConfig.cpp:158 #, c-format msgid "" "ERROR: file tree is too deep while scanning VOMS configuration. Max allowed " "nesting is %i." msgstr "" #: src/hed/libs/credential/VOMSConfig.cpp:176 #, c-format msgid "ERROR: failed to read file %s while scanning VOMS configuration." msgstr "" #: src/hed/libs/credential/VOMSConfig.cpp:181 #, c-format msgid "" "ERROR: VOMS configuration file %s contains too many lines. Max supported " "number is %i." msgstr "" #: src/hed/libs/credential/VOMSConfig.cpp:188 #, c-format msgid "" "ERROR: VOMS configuration file %s contains too long line(s). Max supported " "length is %i characters." msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:171 #, fuzzy, c-format msgid "Failed to create OpenSSL object %s %s - %u %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/hed/libs/credential/VOMSUtil.cpp:179 #, fuzzy, c-format msgid "Failed to obtain OpenSSL identifier for %s" msgstr "publikus kulcs elérési útvonala" #: src/hed/libs/credential/VOMSUtil.cpp:332 #, c-format msgid "VOMS: create FQAN: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:370 #, c-format msgid "VOMS: create attribute: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:651 msgid "VOMS: Can not allocate memory for parsing AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:659 msgid "VOMS: Can not allocate memory for storing the order of AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:685 msgid "VOMS: Can not parse AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:715 msgid "VOMS: CA directory or CA file must be provided" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:779 msgid "VOMS: failed to verify AC signature" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:848 #, c-format msgid "VOMS: trust chain to check: %s " msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:856 #, c-format msgid "" "VOMS: the DN in certificate: %s does not match that in trusted DN list: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:862 #, c-format msgid "" "VOMS: the Issuer identity in certificate: %s does not match that in trusted " "DN list: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:897 #, fuzzy, c-format msgid "VOMS: The lsc file %s does not exist" msgstr "Az XML konfigurációs fájl: %s nem létezik" #: src/hed/libs/credential/VOMSUtil.cpp:903 #, c-format msgid "VOMS: The lsc file %s can not be open" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:951 msgid "" "VOMS: there is no constraints of trusted voms DNs, the certificates stack in " "AC will not be checked." msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:984 msgid "VOMS: unable to match certificate chain against VOMS trusted DNs" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1004 msgid "VOMS: AC signature verification failed" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1013 msgid "VOMS: unable to verify certificate chain" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1019 #, c-format msgid "VOMS: cannot validate AC issuer for VO %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1042 #, c-format msgid "VOMS: directory for trusted service certificates: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1068 #, c-format msgid "VOMS: Cannot find certificate of AC issuer for VO %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1090 msgid "VOMS: Can not find AC_ATTR with IETFATTR type" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1097 msgid "VOMS: case of multiple IETFATTR attributes not supported" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1107 msgid "VOMS: case of multiple policyAuthority not supported" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1123 msgid "VOMS: the format of policyAuthority is unsupported - expecting URI" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1132 msgid "" "VOMS: the format of IETFATTRVAL is not supported - expecting OCTET STRING" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1189 msgid "VOMS: the grantor attribute is empty" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1207 msgid "VOMS: the attribute name is empty" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1213 #, c-format msgid "VOMS: the attribute value for %s is empty" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1218 msgid "VOMS: the attribute qualifier is empty" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1250 msgid "" "VOMS: both idcenoRevAvail and authorityKeyIdentifier certificate extensions " "must be present" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1284 #, c-format msgid "VOMS: FQDN of this host %s does not match any target in AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1289 msgid "VOMS: the only supported critical extension of the AC is idceTargets" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1304 msgid "VOMS: failed to parse attributes from AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1348 msgid "VOMS: authorityKey is wrong" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1376 msgid "VOMS: missing AC parts" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1393 msgid "VOMS: unsupported time format in AC - expecting GENERALIZED TIME" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1399 msgid "VOMS: AC is not yet valid" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1406 msgid "VOMS: AC has expired" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1421 msgid "VOMS: AC is not complete - missing Serial or Issuer information" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1426 #, c-format msgid "VOMS: the holder serial number is: %lx" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1427 #, c-format msgid "VOMS: the serial number in AC is: %lx" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1430 #, c-format msgid "" "VOMS: the holder serial number %lx is not the same as the serial number in " "AC %lx, the holder certificate that is used to create a voms proxy could be " "a proxy certificate with a different serial number as the original EEC cert" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1439 msgid "VOMS: the holder information in AC is wrong" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1461 #, c-format msgid "VOMS: DN of holder in AC: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1462 #, c-format msgid "VOMS: DN of holder: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1463 #, c-format msgid "VOMS: DN of issuer: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1470 msgid "" "VOMS: the holder name in AC is not related to the distinguished name in " "holder certificate" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1482 #: src/hed/libs/credential/VOMSUtil.cpp:1489 msgid "VOMS: the holder issuerUID is not the same as that in AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1502 msgid "VOMS: the holder issuer name is not the same as that in AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1512 msgid "VOMS: the issuer information in AC is wrong" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1520 #, c-format msgid "VOMS: the issuer name %s is not the same as that in AC - %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1528 msgid "" "VOMS: the serial number of AC INFO is too long - expecting no more than 20 " "octets" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1558 #: src/hed/libs/credential/VOMSUtil.cpp:1566 #: src/hed/libs/credential/VOMSUtil.cpp:1574 #: src/hed/libs/credential/VOMSUtil.cpp:1582 #: src/hed/libs/credential/VOMSUtil.cpp:1605 msgid "VOMS: unable to extract VO name from AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1596 #, c-format msgid "VOMS: unable to determine hostname of AC from VO name: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1615 msgid "VOMS: can not verify the signature of the AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1621 msgid "VOMS: problems while parsing information in AC" msgstr "" #: src/hed/libs/credential/test/VOMSUtilTest.cpp:128 #, c-format msgid "Line %d.%d of the attributes returned: %s" msgstr "" #: src/hed/libs/credentialstore/ClientVOMS.cpp:149 msgid "voms" msgstr "" #: src/hed/libs/credentialstore/CredentialStore.cpp:194 #: src/hed/libs/credentialstore/CredentialStore.cpp:245 #: src/hed/libs/credentialstore/CredentialStore.cpp:273 #: src/hed/libs/credentialstore/CredentialStore.cpp:336 #: src/hed/libs/credentialstore/CredentialStore.cpp:376 #: src/hed/libs/credentialstore/CredentialStore.cpp:406 #, c-format msgid "MyProxy failure: %s" msgstr "" #: src/hed/libs/crypto/OpenSSL.cpp:71 #, c-format msgid "SSL error: %d - %s:%s:%s" msgstr "" #: src/hed/libs/crypto/OpenSSL.cpp:84 msgid "SSL locks not initialized" msgstr "" #: src/hed/libs/crypto/OpenSSL.cpp:88 #, c-format msgid "wrong SSL lock requested: %i of %i: %i - %s" msgstr "" #: src/hed/libs/crypto/OpenSSL.cpp:111 msgid "Failed to lock arccrypto library in memory" msgstr "" #: src/hed/libs/crypto/OpenSSL.cpp:116 src/hed/libs/crypto/OpenSSL.cpp:130 msgid "Failed to initialize OpenSSL library" msgstr "" #: src/hed/libs/crypto/OpenSSL.cpp:152 msgid "Number of OpenSSL locks changed - reinitializing" msgstr "" #: src/hed/libs/data/DataExternalHelper.cpp:157 #, fuzzy msgid "failed to read data tag" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/data/DataExternalHelper.cpp:161 msgid "waiting for data chunk" msgstr "" #: src/hed/libs/data/DataExternalHelper.cpp:163 #, fuzzy msgid "failed to read data chunk" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/data/DataExternalHelper.cpp:171 #, c-format msgid "data chunk: %llu %llu" msgstr "" #: src/hed/libs/data/DataExternalHelper.cpp:242 #, c-format msgid "DataMove::Transfer: using supplied checksum %s" msgstr "" #: src/hed/libs/data/DataExternalHelper.cpp:361 msgid "Expecting Module, Command and URL provided" msgstr "" #: src/hed/libs/data/DataExternalHelper.cpp:368 msgid "Expecting Command module path among arguments" msgstr "" #: src/hed/libs/data/DataExternalHelper.cpp:372 msgid "Expecting Command module name among arguments" msgstr "" #: src/hed/libs/data/DataMover.cpp:115 msgid "No locations found - probably no more physical instances" msgstr "" #: src/hed/libs/data/DataMover.cpp:121 src/hed/libs/data/FileCache.cpp:552 #: src/libs/data-staging/Processor.cpp:443 #: src/libs/data-staging/Processor.cpp:457 #, c-format msgid "Removing %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:134 msgid "This instance was already deleted" msgstr "" #: src/hed/libs/data/DataMover.cpp:140 msgid "Failed to delete physical file" msgstr "" #: src/hed/libs/data/DataMover.cpp:151 #, c-format msgid "Removing metadata in %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:155 msgid "Failed to delete meta-information" msgstr "" #: src/hed/libs/data/DataMover.cpp:169 msgid "Failed to remove all physical instances" msgstr "" #: src/hed/libs/data/DataMover.cpp:173 #, c-format msgid "Removing logical file from metadata %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:176 msgid "Failed to delete logical file" msgstr "" #: src/hed/libs/data/DataMover.cpp:183 msgid "Failed to remove instance" msgstr "" #: src/hed/libs/data/DataMover.cpp:232 msgid "DataMover::Transfer : starting new thread" msgstr "" #: src/hed/libs/data/DataMover.cpp:260 #, c-format msgid "Transfer from %s to %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:262 msgid "Not valid source" msgstr "" #: src/hed/libs/data/DataMover.cpp:267 msgid "Not valid destination" msgstr "" #: src/hed/libs/data/DataMover.cpp:287 src/services/candypond/CandyPond.cpp:304 #, c-format msgid "Couldn't handle certificate: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:296 src/hed/libs/data/DataMover.cpp:590 #: src/libs/data-staging/Processor.cpp:133 #, c-format msgid "File %s is cached (%s) - checking permissions" msgstr "" #: src/hed/libs/data/DataMover.cpp:300 src/hed/libs/data/DataMover.cpp:609 #: src/hed/libs/data/DataMover.cpp:667 src/libs/data-staging/Processor.cpp:152 msgid "Permission checking passed" msgstr "" #: src/hed/libs/data/DataMover.cpp:301 src/hed/libs/data/DataMover.cpp:628 #: src/hed/libs/data/DataMover.cpp:1144 msgid "Linking/copying cached file" msgstr "" #: src/hed/libs/data/DataMover.cpp:325 #, c-format msgid "No locations for source found: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:329 #, c-format msgid "Failed to resolve source: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:341 src/hed/libs/data/DataMover.cpp:409 #, c-format msgid "No locations for destination found: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:346 src/hed/libs/data/DataMover.cpp:413 #, c-format msgid "Failed to resolve destination: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:361 #, c-format msgid "No locations for destination different from source found: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:382 #, c-format msgid "DataMover::Transfer: trying to destroy/overwrite destination: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:393 #, c-format msgid "Failed to delete %s but will still try to copy" msgstr "" #: src/hed/libs/data/DataMover.cpp:396 #, c-format msgid "Failed to delete %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:423 #, c-format msgid "Deleted but still have locations at %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:435 msgid "DataMover: cycle" msgstr "" #: src/hed/libs/data/DataMover.cpp:437 msgid "DataMover: no retries requested - exit" msgstr "" #: src/hed/libs/data/DataMover.cpp:442 msgid "DataMover: source out of tries - exit" msgstr "" #: src/hed/libs/data/DataMover.cpp:444 msgid "DataMover: destination out of tries - exit" msgstr "" #: src/hed/libs/data/DataMover.cpp:452 #, c-format msgid "Real transfer from %s to %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:478 #, c-format msgid "Creating buffer: %lli x %i" msgstr "" #: src/hed/libs/data/DataMover.cpp:494 #, c-format msgid "DataMove::Transfer: no checksum calculation for %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:499 #, c-format msgid "DataMove::Transfer: using supplied checksum %s:%s" msgstr "" #: src/hed/libs/data/DataMover.cpp:523 #, c-format msgid "DataMove::Transfer: will calculate %s checksum" msgstr "" #: src/hed/libs/data/DataMover.cpp:528 msgid "Buffer creation failed !" msgstr "" #: src/hed/libs/data/DataMover.cpp:551 #, c-format msgid "URL is mapped to: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:579 src/hed/libs/data/DataMover.cpp:637 #: src/libs/data-staging/Processor.cpp:88 msgid "Cached file is locked - should retry" msgstr "" #: src/hed/libs/data/DataMover.cpp:584 src/libs/data-staging/Processor.cpp:106 msgid "Failed to initiate cache" msgstr "" #: src/hed/libs/data/DataMover.cpp:601 src/services/candypond/CandyPond.cpp:379 #, c-format msgid "Permission checking failed: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:603 src/hed/libs/data/DataMover.cpp:661 #: src/hed/libs/data/DataMover.cpp:681 src/hed/libs/data/DataMover.cpp:692 msgid "source.next_location" msgstr "" #: src/hed/libs/data/DataMover.cpp:617 src/libs/data-staging/Processor.cpp:157 #, c-format msgid "Source modification date: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:618 src/libs/data-staging/Processor.cpp:158 #, c-format msgid "Cache creation date: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:624 src/libs/data-staging/Processor.cpp:163 msgid "Cached file is outdated, will re-download" msgstr "" #: src/hed/libs/data/DataMover.cpp:627 src/libs/data-staging/Processor.cpp:168 msgid "Cached copy is still valid" msgstr "" #: src/hed/libs/data/DataMover.cpp:654 msgid "URL is mapped to local access - checking permissions on original URL" msgstr "" #: src/hed/libs/data/DataMover.cpp:658 #, c-format msgid "Permission checking on original URL failed: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:669 msgid "Linking local file" msgstr "" #: src/hed/libs/data/DataMover.cpp:689 #, c-format msgid "Failed to make symbolic link %s to %s : %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:698 #, c-format msgid "Failed to change owner of symbolic link %s to %i" msgstr "" #: src/hed/libs/data/DataMover.cpp:709 #, c-format msgid "cache file: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:735 #, fuzzy, c-format msgid "Failed to stat source %s" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/data/DataMover.cpp:737 src/hed/libs/data/DataMover.cpp:750 #: src/hed/libs/data/DataMover.cpp:781 src/hed/libs/data/DataMover.cpp:800 #: src/hed/libs/data/DataMover.cpp:822 src/hed/libs/data/DataMover.cpp:839 #: src/hed/libs/data/DataMover.cpp:996 src/hed/libs/data/DataMover.cpp:1028 #: src/hed/libs/data/DataMover.cpp:1038 src/hed/libs/data/DataMover.cpp:1111 msgid "(Re)Trying next source" msgstr "" #: src/hed/libs/data/DataMover.cpp:748 #, c-format msgid "Meta info of source and location do not match for %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:760 #, c-format msgid "" "Replica %s has high latency, but no more sources exist so will use this one" msgstr "" #: src/hed/libs/data/DataMover.cpp:764 #, c-format msgid "Replica %s has high latency, trying next source" msgstr "" #: src/hed/libs/data/DataMover.cpp:776 src/hed/libs/data/DataMover.cpp:796 #: src/libs/data-staging/DataStagingDelivery.cpp:344 #: src/libs/data-staging/DataStagingDelivery.cpp:367 #, c-format msgid "Using internal transfer method of %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:788 src/hed/libs/data/DataMover.cpp:805 #: src/libs/data-staging/DataStagingDelivery.cpp:360 #: src/libs/data-staging/DataStagingDelivery.cpp:381 #, c-format msgid "Internal transfer method is not supported for %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:812 msgid "Using buffered transfer method" msgstr "" #: src/hed/libs/data/DataMover.cpp:816 #, fuzzy, c-format msgid "Failed to prepare source: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/data/DataMover.cpp:830 #, c-format msgid "Failed to start reading from source: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:849 msgid "Metadata of source and destination are different" msgstr "" #: src/hed/libs/data/DataMover.cpp:868 #, c-format msgid "Failed to preregister destination: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:873 src/hed/libs/data/DataMover.cpp:1135 msgid "destination.next_location" msgstr "" #: src/hed/libs/data/DataMover.cpp:884 #, fuzzy, c-format msgid "Failed to prepare destination: %s" msgstr "Nem támogatott url: %s" #: src/hed/libs/data/DataMover.cpp:891 src/hed/libs/data/DataMover.cpp:914 #: src/hed/libs/data/DataMover.cpp:1132 #, c-format msgid "" "Failed to unregister preregistered lfn. You may need to unregister it " "manually: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:895 src/hed/libs/data/DataMover.cpp:917 #: src/hed/libs/data/DataMover.cpp:1005 src/hed/libs/data/DataMover.cpp:1021 #: src/hed/libs/data/DataMover.cpp:1044 src/hed/libs/data/DataMover.cpp:1089 msgid "(Re)Trying next destination" msgstr "" #: src/hed/libs/data/DataMover.cpp:906 #, c-format msgid "Failed to start writing to destination: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:929 msgid "Failed to start writing to cache" msgstr "" #: src/hed/libs/data/DataMover.cpp:937 src/hed/libs/data/DataMover.cpp:983 #: src/hed/libs/data/DataMover.cpp:1156 msgid "" "Failed to unregister preregistered lfn. You may need to unregister it " "manually" msgstr "" #: src/hed/libs/data/DataMover.cpp:944 msgid "Waiting for buffer" msgstr "" #: src/hed/libs/data/DataMover.cpp:951 #, c-format msgid "Failed updating timestamp on cache lock file %s for file %s: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:956 #, c-format msgid "buffer: read EOF : %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:957 #, c-format msgid "buffer: write EOF: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:958 #, c-format msgid "buffer: error : %s, read: %s, write: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:959 msgid "Closing read channel" msgstr "" #: src/hed/libs/data/DataMover.cpp:966 msgid "Closing write channel" msgstr "" #: src/hed/libs/data/DataMover.cpp:974 msgid "Failed to complete writing to destination" msgstr "" #: src/hed/libs/data/DataMover.cpp:988 msgid "Transfer cancelled successfully" msgstr "" #: src/hed/libs/data/DataMover.cpp:1033 msgid "Cause of failure unclear - choosing randomly" msgstr "" #: src/hed/libs/data/DataMover.cpp:1076 #, c-format msgid "" "Checksum mismatch between checksum given as meta option (%s:%s) and " "calculated checksum (%s)" msgstr "" #: src/hed/libs/data/DataMover.cpp:1082 msgid "" "Failed to unregister preregistered lfn, You may need to unregister it " "manually" msgstr "" #: src/hed/libs/data/DataMover.cpp:1086 msgid "Failed to delete destination, retry may fail" msgstr "" #: src/hed/libs/data/DataMover.cpp:1096 msgid "Cannot compare empty checksum" msgstr "" #: src/hed/libs/data/DataMover.cpp:1103 #: src/libs/data-staging/DataStagingDelivery.cpp:538 msgid "Checksum type of source and calculated checksum differ, cannot compare" msgstr "" #: src/hed/libs/data/DataMover.cpp:1105 #, c-format msgid "Checksum mismatch between calcuated checksum %s and source checksum %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:1116 #: src/libs/data-staging/DataStagingDelivery.cpp:554 #, c-format msgid "Calculated transfer checksum %s matches source checksum" msgstr "" #: src/hed/libs/data/DataMover.cpp:1122 #: src/libs/data-staging/DataStagingDelivery.cpp:557 msgid "Checksum not computed" msgstr "" #: src/hed/libs/data/DataMover.cpp:1128 #, c-format msgid "Failed to postregister destination %s" msgstr "" #: src/hed/libs/data/DataPoint.cpp:84 #, c-format msgid "Invalid URL option: %s" msgstr "" #: src/hed/libs/data/DataPoint.cpp:263 #, c-format msgid "Skipping invalid URL option %s" msgstr "" #: src/hed/libs/data/DataPoint.cpp:278 msgid "" "Third party transfer was requested but the corresponding plugin could\n" " not be loaded. Is the GFAL plugin installed? If not, please install " "the\n" " packages 'nordugrid-arc-plugins-gfal' and 'gfal2-all'. Depending on\n" " your type of installation the package names might differ." msgstr "" #: src/hed/libs/data/DataPoint.cpp:296 #, fuzzy, c-format msgid "Failed to load plugin for URL %s" msgstr "Nem sikerült listázni a fájlokat" #: src/hed/libs/data/DataPointDelegate.cpp:75 #: src/hed/libs/data/DataPointDelegate.cpp:76 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:2011 #, c-format msgid "Starting helper process: %s" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:180 msgid "start_reading" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:189 msgid "start_reading: helper start failed" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:197 msgid "start_reading: thread create failed" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:213 msgid "StopReading: aborting connection" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:218 msgid "stop_reading: waiting for transfer to finish" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:221 #, c-format msgid "stop_reading: exiting: %s" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:231 msgid "read_thread: get and register buffers" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:239 #, c-format msgid "read_thread: for_read failed - aborting: %s" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:247 #, c-format msgid "read_thread: non-data tag '%c' from external process - leaving: %s" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:256 #, c-format msgid "read_thread: data read error from external process - aborting: %s" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:264 msgid "read_thread: exiting" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:285 msgid "start_writing_ftp: helper start failed" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:293 msgid "start_writing_ftp: thread create failed" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:343 #, fuzzy msgid "No checksum information possible" msgstr "Nem jött létre új információs dokumentum" #: src/hed/libs/data/DataPointDelegate.cpp:359 msgid "write_thread: get and pass buffers" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:366 msgid "write_thread: for_write failed - aborting" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:370 msgid "write_thread: for_write eof" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:384 msgid "write_thread: out failed - aborting" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:392 msgid "write_thread: exiting" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:91 #, c-format msgid "Can't handle location %s" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:183 msgid "Sorting replicas according to URL map" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:187 #, c-format msgid "Replica %s is mapped" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:195 #, c-format msgid "Sorting replicas according to preferred pattern %s" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:218 #: src/hed/libs/data/DataPointIndex.cpp:236 #, c-format msgid "Excluding replica %s matching pattern !%s" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:229 #, c-format msgid "Replica %s matches host pattern %s" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:247 #, c-format msgid "Replica %s matches pattern %s" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:263 #, c-format msgid "Replica %s doesn't match preferred pattern or URL map" msgstr "" #: src/hed/libs/data/DataStatus.cpp:12 msgid "Operation completed successfully" msgstr "" #: src/hed/libs/data/DataStatus.cpp:13 #, fuzzy msgid "Source is invalid URL" msgstr "A lekérdezés nem XML helyes" #: src/hed/libs/data/DataStatus.cpp:14 #, fuzzy msgid "Destination is invalid URL" msgstr "Célállomás: %s" #: src/hed/libs/data/DataStatus.cpp:15 msgid "Resolving of index service for source failed" msgstr "" #: src/hed/libs/data/DataStatus.cpp:16 msgid "Resolving of index service for destination failed" msgstr "" #: src/hed/libs/data/DataStatus.cpp:17 msgid "Can't read from source" msgstr "" #: src/hed/libs/data/DataStatus.cpp:18 msgid "Can't write to destination" msgstr "" #: src/hed/libs/data/DataStatus.cpp:19 msgid "Failed while reading from source" msgstr "" #: src/hed/libs/data/DataStatus.cpp:20 msgid "Failed while writing to destination" msgstr "" #: src/hed/libs/data/DataStatus.cpp:21 msgid "Failed while transferring data" msgstr "" #: src/hed/libs/data/DataStatus.cpp:22 msgid "Failed while finishing reading from source" msgstr "" #: src/hed/libs/data/DataStatus.cpp:23 msgid "Failed while finishing writing to destination" msgstr "" #: src/hed/libs/data/DataStatus.cpp:24 msgid "First stage of registration to index service failed" msgstr "" #: src/hed/libs/data/DataStatus.cpp:25 msgid "Last stage of registration to index service failed" msgstr "" #: src/hed/libs/data/DataStatus.cpp:26 msgid "Unregistering from index service failed" msgstr "" #: src/hed/libs/data/DataStatus.cpp:27 msgid "Error in caching procedure" msgstr "" #: src/hed/libs/data/DataStatus.cpp:28 msgid "Error due to expiration of provided credentials" msgstr "" #: src/hed/libs/data/DataStatus.cpp:29 msgid "Delete error" msgstr "" #: src/hed/libs/data/DataStatus.cpp:30 msgid "No valid location available" msgstr "" #: src/hed/libs/data/DataStatus.cpp:31 msgid "Location already exists" msgstr "" #: src/hed/libs/data/DataStatus.cpp:32 msgid "Operation not supported for this kind of URL" msgstr "" #: src/hed/libs/data/DataStatus.cpp:33 msgid "Feature is not implemented" msgstr "" #: src/hed/libs/data/DataStatus.cpp:34 msgid "Already reading from source" msgstr "" #: src/hed/libs/data/DataStatus.cpp:35 #, fuzzy msgid "Already writing to destination" msgstr "Nem sikerült feloldani a célállomást" #: src/hed/libs/data/DataStatus.cpp:36 msgid "Read access check failed" msgstr "" #: src/hed/libs/data/DataStatus.cpp:37 #, fuzzy msgid "Directory listing failed" msgstr "Nem sikerült listázni a fájlokat" #: src/hed/libs/data/DataStatus.cpp:38 msgid "Object is not suitable for listing" msgstr "" #: src/hed/libs/data/DataStatus.cpp:39 msgid "Failed to obtain information about file" msgstr "" #: src/hed/libs/data/DataStatus.cpp:40 msgid "No such file or directory" msgstr "" #: src/hed/libs/data/DataStatus.cpp:41 msgid "Object not initialized (internal error)" msgstr "" #: src/hed/libs/data/DataStatus.cpp:42 msgid "Operating System error" msgstr "" #: src/hed/libs/data/DataStatus.cpp:43 msgid "Failed to stage file(s)" msgstr "" #: src/hed/libs/data/DataStatus.cpp:44 msgid "Inconsistent metadata" msgstr "" #: src/hed/libs/data/DataStatus.cpp:45 #, fuzzy msgid "Failed to prepare source" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/data/DataStatus.cpp:46 msgid "Should wait for source to be prepared" msgstr "" #: src/hed/libs/data/DataStatus.cpp:47 #, fuzzy msgid "Failed to prepare destination" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/data/DataStatus.cpp:48 msgid "Should wait for destination to be prepared" msgstr "" #: src/hed/libs/data/DataStatus.cpp:49 msgid "Failed to finalize reading from source" msgstr "" #: src/hed/libs/data/DataStatus.cpp:50 msgid "Failed to finalize writing to destination" msgstr "" #: src/hed/libs/data/DataStatus.cpp:51 #, fuzzy msgid "Failed to create directory" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/data/DataStatus.cpp:52 #, fuzzy msgid "Failed to rename URL" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/libs/data/DataStatus.cpp:53 msgid "Data was already cached" msgstr "" #: src/hed/libs/data/DataStatus.cpp:54 msgid "Operation cancelled successfully" msgstr "" #: src/hed/libs/data/DataStatus.cpp:55 #, fuzzy msgid "Generic error" msgstr "Felhasználó oldali hiba" #: src/hed/libs/data/DataStatus.cpp:56 src/hed/libs/data/DataStatus.cpp:69 msgid "Unknown error" msgstr "" #: src/hed/libs/data/DataStatus.cpp:60 msgid "No error" msgstr "" #: src/hed/libs/data/DataStatus.cpp:61 msgid "Transfer timed out" msgstr "" #: src/hed/libs/data/DataStatus.cpp:62 msgid "Checksum mismatch" msgstr "" #: src/hed/libs/data/DataStatus.cpp:63 msgid "Bad logic" msgstr "" #: src/hed/libs/data/DataStatus.cpp:64 msgid "All results obtained are invalid" msgstr "" #: src/hed/libs/data/DataStatus.cpp:65 #, fuzzy msgid "Temporary service error" msgstr "Felhasználó oldali hiba" #: src/hed/libs/data/DataStatus.cpp:66 #, fuzzy msgid "Permanent service error" msgstr "Felhasználó oldali hiba" #: src/hed/libs/data/DataStatus.cpp:67 msgid "Error switching uid" msgstr "" #: src/hed/libs/data/DataStatus.cpp:68 msgid "Request timed out" msgstr "" #: src/hed/libs/data/FileCache.cpp:111 msgid "No cache directory specified" msgstr "" #: src/hed/libs/data/FileCache.cpp:128 msgid "No usable caches" msgstr "" #: src/hed/libs/data/FileCache.cpp:137 msgid "No draining cache directory specified" msgstr "" #: src/hed/libs/data/FileCache.cpp:155 msgid "No read-only cache directory specified" msgstr "" #: src/hed/libs/data/FileCache.cpp:184 #, c-format msgid "Failed to create cache directory for file %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:194 #, c-format msgid "Failed to create any cache directories for %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:201 #, c-format msgid "Failed to change permissions on %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:213 #, c-format msgid "Failed to delete stale cache file %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:216 #, fuzzy, c-format msgid "Failed to release lock on file %s" msgstr "Nem sikerült listázni a fájlokat" #: src/hed/libs/data/FileCache.cpp:234 #, c-format msgid "Failed looking up attributes of cached file: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:240 #, c-format msgid "Failed to obtain lock on cache file %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:249 src/hed/libs/data/FileCache.cpp:309 #, c-format msgid "Error removing cache file %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:251 src/hed/libs/data/FileCache.cpp:262 #, c-format msgid "Failed to remove lock on %s. Some manual intervention may be required" msgstr "" #: src/hed/libs/data/FileCache.cpp:281 src/hed/libs/data/FileCache.cpp:315 #, c-format msgid "Failed to unlock file %s: %s. Manual intervention may be required" msgstr "" #: src/hed/libs/data/FileCache.cpp:298 #, c-format msgid "Invalid lock on file %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:304 #, fuzzy, c-format msgid "Failed to remove .meta file %s: %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/hed/libs/data/FileCache.cpp:369 #, fuzzy, c-format msgid "Cache not found for file %s" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/hed/libs/data/FileCache.cpp:379 #, c-format msgid "" "Cache file %s was modified in the last second, sleeping 1 second to avoid " "race condition" msgstr "" #: src/hed/libs/data/FileCache.cpp:384 src/hed/libs/data/FileCache.cpp:689 #, c-format msgid "Cache file %s does not exist" msgstr "" #: src/hed/libs/data/FileCache.cpp:389 src/hed/libs/data/FileCache.cpp:691 #, c-format msgid "Error accessing cache file %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:395 #, c-format msgid "Cannot create directory %s for per-job hard links" msgstr "" #: src/hed/libs/data/FileCache.cpp:400 #, c-format msgid "Cannot change permission of %s: %s " msgstr "" #: src/hed/libs/data/FileCache.cpp:404 #, c-format msgid "Cannot change owner of %s: %s " msgstr "" #: src/hed/libs/data/FileCache.cpp:418 #, c-format msgid "Failed to remove existing hard link at %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:422 src/hed/libs/data/FileCache.cpp:433 #, c-format msgid "Failed to create hard link from %s to %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:428 #, c-format msgid "Cache file %s not found" msgstr "" #: src/hed/libs/data/FileCache.cpp:443 #, c-format msgid "Failed to change permissions or set owner of hard link %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:451 #, c-format msgid "Failed to release lock on cache file %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:462 #, c-format msgid "Cache file %s was locked during link/copy, must start again" msgstr "" #: src/hed/libs/data/FileCache.cpp:467 #, c-format msgid "Cache file %s was deleted during link/copy, must start again" msgstr "" #: src/hed/libs/data/FileCache.cpp:472 #, c-format msgid "Cache file %s was modified while linking, must start again" msgstr "" #: src/hed/libs/data/FileCache.cpp:490 #, c-format msgid "Failed to copy file %s to %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:496 #, c-format msgid "Failed to set executable bit on file %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:501 #, c-format msgid "Failed to set executable bit on file %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:515 #, c-format msgid "Failed to remove existing symbolic link at %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:519 src/hed/libs/data/FileCache.cpp:524 #, c-format msgid "Failed to create symbolic link from %s to %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:554 #, c-format msgid "Failed to remove cache per-job dir %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:573 src/hed/libs/data/FileCache.cpp:641 #, c-format msgid "Error reading meta file %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:578 src/hed/libs/data/FileCache.cpp:646 #, c-format msgid "Error opening meta file %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:583 src/hed/libs/data/FileCache.cpp:650 #, c-format msgid "meta file %s is empty" msgstr "" #: src/hed/libs/data/FileCache.cpp:593 #, c-format msgid "" "File %s is already cached at %s under a different URL: %s - will not add DN " "to cached list" msgstr "" #: src/hed/libs/data/FileCache.cpp:604 #, c-format msgid "Bad format detected in file %s, in line %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:620 #, c-format msgid "Could not acquire lock on meta file %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:624 #, c-format msgid "Error opening meta file for writing %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:660 #, c-format msgid "DN %s is cached and is valid until %s for URL %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:664 #, c-format msgid "DN %s is cached but has expired for URL %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:715 #, c-format msgid "Failed to acquire lock on cache meta file %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:720 #, fuzzy, c-format msgid "Failed to create cache meta file %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/hed/libs/data/FileCache.cpp:735 #, fuzzy, c-format msgid "Failed to read cache meta file %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/hed/libs/data/FileCache.cpp:740 #, c-format msgid "Cache meta file %s is empty, will recreate" msgstr "" #: src/hed/libs/data/FileCache.cpp:745 #, c-format msgid "Cache meta file %s possibly corrupted, will recreate" msgstr "" #: src/hed/libs/data/FileCache.cpp:749 #, c-format msgid "" "File %s is already cached at %s under a different URL: %s - this file will " "not be cached" msgstr "" #: src/hed/libs/data/FileCache.cpp:759 #, c-format msgid "Error looking up attributes of cache meta file %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:830 #, c-format msgid "Using cache %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:844 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:79 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:112 #, c-format msgid "Error getting info from statvfs for the path %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:850 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:118 #, c-format msgid "Cache %s: Free space %f GB" msgstr "" #: src/hed/libs/data/URLMap.cpp:33 #, c-format msgid "Can't use URL %s" msgstr "" #: src/hed/libs/data/URLMap.cpp:39 #, c-format msgid "file %s is not accessible" msgstr "" #: src/hed/libs/data/URLMap.cpp:49 #, c-format msgid "Mapping %s to %s" msgstr "" #: src/hed/libs/data/examples/simple_copy.cpp:17 msgid "Usage: copy source destination" msgstr "" #: src/hed/libs/data/examples/simple_copy.cpp:42 #, c-format msgid "Copy failed: %s" msgstr "" #: src/hed/libs/globusutils/GSSCredential.cpp:41 #, c-format msgid "Failed to read proxy file: %s" msgstr "" #: src/hed/libs/globusutils/GSSCredential.cpp:49 #, c-format msgid "Failed to read certificate file: %s" msgstr "" #: src/hed/libs/globusutils/GSSCredential.cpp:56 #, c-format msgid "Failed to read private key file: %s" msgstr "" #: src/hed/libs/globusutils/GSSCredential.cpp:82 #, c-format msgid "" "Failed to convert GSI credential to GSS credential (major: %d, minor: %d):%s:" "%s" msgstr "" #: src/hed/libs/globusutils/GSSCredential.cpp:94 #, c-format msgid "Failed to release GSS credential (major: %d, minor: %d):%s:%s" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:28 msgid "Module Manager Init" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:71 msgid "" "Busy plugins found while unloading Module Manager. Waiting for them to be " "released." msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:205 #, c-format msgid "Found %s in cache" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:212 #, c-format msgid "Could not locate module %s in following paths:" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:216 #, c-format msgid "\t%s" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:230 #, c-format msgid "Loaded %s" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:274 msgid "Module Manager Init by ModuleManager::setCfg" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:310 #: src/hed/libs/loader/ModuleManager.cpp:323 #, c-format msgid "%s made persistent" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:314 #, c-format msgid "Not found %s in cache" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:328 msgid "Specified module not found in cache" msgstr "" #: src/hed/libs/loader/Plugin.cpp:364 src/hed/libs/loader/Plugin.cpp:557 #, c-format msgid "Could not find loadable module descriptor by name %s" msgstr "" #: src/hed/libs/loader/Plugin.cpp:372 src/hed/libs/loader/Plugin.cpp:567 #, c-format msgid "Could not find loadable module by name %s (%s)" msgstr "" #: src/hed/libs/loader/Plugin.cpp:378 src/hed/libs/loader/Plugin.cpp:480 #: src/hed/libs/loader/Plugin.cpp:572 #, c-format msgid "Module %s is not an ARC plugin (%s)" msgstr "" #: src/hed/libs/loader/Plugin.cpp:395 src/hed/libs/loader/Plugin.cpp:490 #: src/hed/libs/loader/Plugin.cpp:598 #, c-format msgid "Module %s failed to reload (%s)" msgstr "" #: src/hed/libs/loader/Plugin.cpp:417 #, c-format msgid "Module %s contains no plugin %s" msgstr "" #: src/hed/libs/loader/Plugin.cpp:462 #, c-format msgid "Could not find loadable module descriptor by name %s or kind %s" msgstr "" #: src/hed/libs/loader/Plugin.cpp:467 #, c-format msgid "Loadable module %s contains no requested plugin %s of kind %s" msgstr "" #: src/hed/libs/loader/Plugin.cpp:474 #, c-format msgid "Could not find loadable module by names %s and %s (%s)" msgstr "" #: src/hed/libs/loader/Plugin.cpp:503 #, c-format msgid "Module %s contains no requested plugin %s of kind %s" msgstr "" #: src/hed/libs/loader/Plugin.cpp:588 #, c-format msgid "Module %s does not contain plugin(s) of specified kind(s)" msgstr "" #: src/hed/libs/message/MCC.cpp:76 src/hed/libs/message/Service.cpp:25 #, c-format msgid "No security processing/check requested for '%s'" msgstr "" #: src/hed/libs/message/MCC.cpp:85 #, c-format msgid "Security processing/check failed: %s" msgstr "" #: src/hed/libs/message/MCC.cpp:90 msgid "Security processing/check passed" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:16 msgid "Chain(s) configuration failed" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:133 msgid "SecHandler configuration is not defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:156 msgid "SecHandler has no configuration" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:162 msgid "SecHandler has no name attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:172 #, c-format msgid "Security Handler %s(%s) could not be created" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:176 #, c-format msgid "SecHandler: %s(%s)" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:188 msgid "Component has no name attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:193 msgid "Component has no ID attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:202 #, c-format msgid "Component %s(%s) could not be created" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:232 #, c-format msgid "Component's %s(%s) next has no ID attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:287 #, c-format msgid "Loaded MCC %s(%s)" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:305 #, c-format msgid "Plexer's (%s) next has no ID attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:315 #, c-format msgid "Loaded Plexer %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:323 msgid "Service has no Name attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:329 msgid "Service has no ID attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:338 #, c-format msgid "Service %s(%s) could not be created" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:345 #, c-format msgid "Loaded Service %s(%s)" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:387 #, c-format msgid "Linking MCC %s(%s) to MCC (%s) under %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:398 #, c-format msgid "Linking MCC %s(%s) to Service (%s) under %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:407 #, c-format msgid "Linking MCC %s(%s) to Plexer (%s) under %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:412 #, c-format msgid "MCC %s(%s) - next %s(%s) has no target" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:431 #, c-format msgid "Linking Plexer %s to MCC (%s) under %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:442 #, c-format msgid "Linking Plexer %s to Service (%s) under %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:451 #, c-format msgid "Linking Plexer %s to Plexer (%s) under %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:457 #, c-format msgid "Plexer (%s) - next %s(%s) has no target" msgstr "" #: src/hed/libs/message/Plexer.cpp:31 #, c-format msgid "Bad label: \"%s\"" msgstr "" #: src/hed/libs/message/Plexer.cpp:47 #, c-format msgid "Operation on path \"%s\"" msgstr "" #: src/hed/libs/message/Plexer.cpp:60 #, c-format msgid "No next MCC or Service at path \"%s\"" msgstr "" #: src/hed/libs/message/Service.cpp:35 #, c-format msgid "Security processing/check for '%s' failed: %s" msgstr "" #: src/hed/libs/message/Service.cpp:41 #, c-format msgid "Security processing/check for '%s' passed" msgstr "" #: src/hed/libs/otokens/jwse.cpp:55 #, c-format msgid "JWSE::Input: token: %s" msgstr "" #: src/hed/libs/otokens/jwse.cpp:75 #, c-format msgid "JWSE::Input: header: %s" msgstr "" #: src/hed/libs/otokens/jwse.cpp:101 #, c-format msgid "JWSE::Input: JWS content: %s" msgstr "" #: src/hed/libs/otokens/jwse.cpp:111 msgid "JWSE::Input: JWS: token too young" msgstr "" #: src/hed/libs/otokens/jwse.cpp:120 msgid "JWSE::Input: JWS: token too old" msgstr "" #: src/hed/libs/otokens/jwse.cpp:131 #, c-format msgid "JWSE::Input: JWS: signature algorithm: %s" msgstr "" #: src/hed/libs/otokens/jwse.cpp:190 msgid "JWSE::Input: JWS: signature verification failed" msgstr "" #: src/hed/libs/otokens/jwse.cpp:196 msgid "JWSE::Input: JWE: not supported yet" msgstr "" #: src/hed/libs/otokens/jwse_keys.cpp:271 msgid "JWSE::ExtractPublicKey: x5c key" msgstr "" #: src/hed/libs/otokens/jwse_keys.cpp:279 msgid "JWSE::ExtractPublicKey: jwk key" msgstr "" #: src/hed/libs/otokens/jwse_keys.cpp:286 msgid "JWSE::ExtractPublicKey: external jwk key" msgstr "" #: src/hed/libs/otokens/jwse_keys.cpp:303 #, c-format msgid "JWSE::ExtractPublicKey: fetching jwl key from %s" msgstr "" #: src/hed/libs/otokens/jwse_keys.cpp:316 msgid "JWSE::ExtractPublicKey: no supported key" msgstr "" #: src/hed/libs/otokens/jwse_keys.cpp:319 msgid "JWSE::ExtractPublicKey: key parsing error" msgstr "" #: src/hed/libs/otokens/openid_metadata.cpp:40 #: src/hed/libs/otokens/openid_metadata.cpp:45 #, c-format msgid "Input: metadata: %s" msgstr "" #: src/hed/libs/otokens/openid_metadata.cpp:414 #, c-format msgid "Fetch: response code: %u %s" msgstr "" #: src/hed/libs/otokens/openid_metadata.cpp:416 #, fuzzy, c-format msgid "Fetch: response body: %s" msgstr "Válasz: %s" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:136 #, c-format msgid "Can not load ARC evaluator object: %s" msgstr "" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:187 #, c-format msgid "Can not load ARC request object: %s" msgstr "" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:228 #, c-format msgid "Can not load policy object: %s" msgstr "" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:276 msgid "Can not load policy object" msgstr "" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:324 msgid "Can not load request object" msgstr "" #: src/hed/libs/security/ArcPDP/PolicyParser.cpp:119 msgid "Can not generate policy object" msgstr "" #: src/hed/libs/security/ArcPDP/attr/RequestAttribute.cpp:37 #, c-format msgid "Id= %s,Type= %s,Issuer= %s,Value= %s" msgstr "" #: src/hed/libs/security/ArcPDP/attr/RequestAttribute.cpp:40 #, c-format msgid "No Attribute exists, which can deal with type: %s" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:168 #, c-format msgid "HTTP Error: %d %s" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:241 msgid "Cannot create http payload" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:311 msgid "No next element in the chain" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:320 #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:253 msgid "next element of the chain returned error status" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:329 msgid "next element of the chain returned no payload" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:341 msgid "next element of the chain returned invalid/unsupported payload" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:423 msgid "Error to flush output payload" msgstr "" #: src/hed/mcc/http/PayloadHTTP.cpp:305 #, c-format msgid "<< %s" msgstr "" #: src/hed/mcc/http/PayloadHTTP.cpp:354 src/hed/mcc/http/PayloadHTTP.cpp:456 #, c-format msgid "< %s" msgstr "" #: src/hed/mcc/http/PayloadHTTP.cpp:575 #, fuzzy msgid "Failed to parse HTTP header" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/mcc/http/PayloadHTTP.cpp:836 msgid "Invalid HTTP object can't produce result" msgstr "" #: src/hed/mcc/http/PayloadHTTP.cpp:949 #, c-format msgid "> %s" msgstr "" #: src/hed/mcc/http/PayloadHTTP.cpp:974 msgid "Failed to write header to output stream" msgstr "" #: src/hed/mcc/http/PayloadHTTP.cpp:999 src/hed/mcc/http/PayloadHTTP.cpp:1005 #: src/hed/mcc/http/PayloadHTTP.cpp:1011 src/hed/mcc/http/PayloadHTTP.cpp:1021 #: src/hed/mcc/http/PayloadHTTP.cpp:1033 src/hed/mcc/http/PayloadHTTP.cpp:1038 #: src/hed/mcc/http/PayloadHTTP.cpp:1043 src/hed/mcc/http/PayloadHTTP.cpp:1051 #: src/hed/mcc/http/PayloadHTTP.cpp:1058 msgid "Failed to write body to output stream" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:31 msgid "Skipping service: no ServicePath found!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:37 msgid "Skipping service: no SchemaPath found!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:89 msgid "Parser Context creation failed!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:98 msgid "Cannot parse schema!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:110 msgid "Empty payload!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:119 msgid "Could not convert payload!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:125 #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:212 msgid "Could not create PayloadSOAP!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:196 msgid "Empty input payload!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:205 msgid "Could not convert incoming payload!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:232 msgid "Missing schema! Skipping validation..." msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:237 msgid "Could not validate message!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:245 #: src/hed/mcc/soap/MCCSOAP.cpp:222 src/hed/mcc/soap/MCCSOAP.cpp:236 #: src/hed/mcc/soap/MCCSOAP.cpp:266 msgid "empty next chain element" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:257 #: src/hed/mcc/soap/MCCSOAP.cpp:282 msgid "next element of the chain returned empty payload" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:265 msgid "next element of the chain returned invalid payload" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:207 msgid "empty input payload" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:217 #, c-format msgid "MIME is not suitable for SOAP: %s" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:231 msgid "incoming message is not SOAP" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:258 #, c-format msgid "Security check failed in SOAP MCC for incoming message: %s" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:274 #, c-format msgid "next element of the chain returned error status: %s" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:293 msgid "next element of the chain returned unknown payload - passing through" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:298 src/hed/mcc/soap/MCCSOAP.cpp:314 #, c-format msgid "Security check failed in SOAP MCC for outgoing message: %s" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:368 msgid "Security check failed in SOAP MCC for outgoing message" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:421 msgid "Security check failed in SOAP MCC for incoming message" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:82 msgid "Missing Port in Listen element" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:91 msgid "Version in Listen element can't be recognized" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:100 #, c-format msgid "Failed to obtain local address for port %s - %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:102 #, c-format msgid "Failed to obtain local address for %s:%s - %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:109 #, c-format msgid "Trying to listen on TCP port %s(%s)" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:111 #, c-format msgid "Trying to listen on %s:%s(%s)" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:117 #, fuzzy, c-format msgid "Failed to create socket for listening at TCP port %s(%s): %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/hed/mcc/tcp/MCCTCP.cpp:119 #, fuzzy, c-format msgid "Failed to create socket for listening at %s:%s(%s): %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/hed/mcc/tcp/MCCTCP.cpp:134 #, c-format msgid "" "Failed to limit socket to IPv6 at TCP port %s - may cause errors for IPv4 at " "same port" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:136 #, c-format msgid "" "Failed to limit socket to IPv6 at %s:%s - may cause errors for IPv4 at same " "port" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:144 #, c-format msgid "Failed to bind socket for TCP port %s(%s): %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:146 #, c-format msgid "Failed to bind socket for %s:%s(%s): %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:161 #, c-format msgid "Failed to listen at TCP port %s(%s): %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:163 #, c-format msgid "Failed to listen at %s:%s(%s): %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:180 #, c-format msgid "Listening on TCP port %s(%s)" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:182 #, c-format msgid "Listening on %s:%s(%s)" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:189 #, c-format msgid "Failed to start listening on any address for %s:%s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:191 #, c-format msgid "Failed to start listening on any address for %s:%s(IPv%s)" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:197 msgid "No listening ports initiated" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:208 msgid "dropped" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:208 msgid "put on hold" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:208 #, c-format msgid "Setting connections limit to %i, connections over limit will be %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:212 msgid "Failed to start thread for listening" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:245 msgid "Failed to start thread for communication" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:271 msgid "Failed while waiting for connection request" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:293 msgid "Failed to accept connection request" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:302 msgid "Too many connections - dropping new one" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:309 msgid "Too many connections - waiting for old to close" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:533 msgid "next chain element called" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:548 msgid "Only Raw Buffer payload is supported for output" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:556 src/hed/mcc/tcp/MCCTCP.cpp:655 #: src/hed/mcc/tls/MCCTLS.cpp:542 msgid "Failed to send content of buffer" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:568 msgid "TCP executor is removed" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:570 #, c-format msgid "Sockets do not match on exit %i != %i" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:591 msgid "No Connect element specified" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:597 msgid "Missing Port in Connect element" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:603 msgid "Missing Host in Connect element" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:631 msgid "TCP client process called" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:67 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:83 #, c-format msgid "Failed to resolve %s (%s)" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:93 #, c-format msgid "Trying to connect %s(%s):%d" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:97 #, c-format msgid "Failed to create socket for connecting to %s(%s):%d - %s" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:109 #, c-format msgid "" "Failed to get TCP socket options for connection to %s(%s):%d - timeout won't " "work - %s" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:116 #, c-format msgid "Failed to connect to %s(%s):%i - %s" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:126 #, c-format msgid "Timeout connecting to %s(%s):%i - %i s" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:134 #, c-format msgid "Failed while waiting for connection to %s(%s):%i - %s" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:144 #, c-format msgid "Failed to connect to %s(%s):%i" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:200 msgid "" "Received message out-of-band (not critical, ERROR level is just for " "debugging purposes)" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:293 #, fuzzy, c-format msgid "Using DH parameters from file: %s" msgstr "voms szerver fájljának az elérési útvonala" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:296 msgid "Failed to open file with DH parameters for reading" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:301 #, fuzzy msgid "Failed to read file with DH parameters" msgstr "Nem sikerült listázni a meta adatokat" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:304 #, fuzzy msgid "Failed to apply DH parameters" msgstr "Túl sok paraméter" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:306 msgid "DH parameters applied" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:321 #, c-format msgid "Using curve with NID: %u" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:324 #, fuzzy msgid "Failed to generate EC key" msgstr "privát kulcs elérési útvonala" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:327 #, fuzzy msgid "Failed to apply ECDH parameters" msgstr "Túl sok paraméter" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:329 msgid "ECDH parameters applied" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:336 #, c-format msgid "Using cipher list: %s" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:352 #, c-format msgid "Using protocol options: 0x%x" msgstr "" #: src/hed/mcc/tls/DelegationCollector.cpp:39 msgid "Independent proxy - no rights granted" msgstr "" #: src/hed/mcc/tls/DelegationCollector.cpp:43 msgid "Proxy with all rights inherited" msgstr "" #: src/hed/mcc/tls/DelegationCollector.cpp:51 msgid "Proxy with empty policy - fail on unrecognized policy" msgstr "" #: src/hed/mcc/tls/DelegationCollector.cpp:56 #, c-format msgid "Proxy with specific policy: %s" msgstr "" #: src/hed/mcc/tls/DelegationCollector.cpp:60 msgid "Proxy with ARC Policy" msgstr "" #: src/hed/mcc/tls/DelegationCollector.cpp:62 msgid "Proxy with unknown policy - fail on unrecognized policy" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:77 #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:128 #, c-format msgid "Was expecting %s at the beginning of \"%s\"" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:82 #, c-format msgid "We only support CAs in Globus signing policy - %s is not supported" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:87 #, c-format msgid "We only support X509 CAs in Globus signing policy - %s is not supported" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:92 msgid "Missing CA subject in Globus signing policy" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:102 msgid "Negative rights are not supported in Globus signing policy" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:106 #, c-format msgid "Unknown rights in Globus signing policy - %s" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:111 #, c-format msgid "" "Only globus rights are supported in Globus signing policy - %s is not " "supported" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:116 #, c-format msgid "" "Only signing rights are supported in Globus signing policy - %s is not " "supported" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:133 #, c-format msgid "" "We only support subjects conditions in Globus signing policy - %s is not " "supported" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:138 #, c-format msgid "" "We only support globus conditions in Globus signing policy - %s is not " "supported" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:144 msgid "Missing condition subjects in Globus signing policy" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:220 msgid "Unknown element in Globus signing policy" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:218 #, fuzzy msgid "Critical VOMS attribute processing failed" msgstr "VOMS attribútumok értelmezése sikertelen" #: src/hed/mcc/tls/MCCTLS.cpp:226 #, fuzzy msgid "VOMS attribute validation failed" msgstr "VOMS attribútumok értelmezése sikertelen" #: src/hed/mcc/tls/MCCTLS.cpp:228 msgid "VOMS attribute is ignored due to processing/validation error" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:420 src/hed/mcc/tls/MCCTLS.cpp:559 #: src/hed/mcc/tls/MCCTLS.cpp:578 #, fuzzy, c-format msgid "Failed to establish connection: %s" msgstr "Nem sikerült betölteni a konfigurációt" #: src/hed/mcc/tls/MCCTLS.cpp:439 src/hed/mcc/tls/MCCTLS.cpp:521 #, c-format msgid "Peer name: %s" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:441 src/hed/mcc/tls/MCCTLS.cpp:523 #, c-format msgid "Identity name: %s" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:443 src/hed/mcc/tls/MCCTLS.cpp:525 #, c-format msgid "CA name: %s" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:450 msgid "Failed to process security attributes in TLS MCC for incoming message" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:458 msgid "Security check failed in TLS MCC for incoming message" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:531 msgid "Security check failed for outgoing TLS message" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:563 msgid "Security check failed for incoming TLS message" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:93 #, fuzzy msgid "" "Failed to allocate memory for certificate subject while matching policy." msgstr "publikus kulcs elérési útvonala" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:98 msgid "" "Failed to retrieve link to TLS stream. Additional policy matching is skipped." msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:128 #, c-format msgid "Certificate %s already expired" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:136 #, c-format msgid "Certificate %s will expire in %s" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:157 msgid "Failed to store application data" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:185 msgid "Failed to retrieve application data from OpenSSL" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:257 src/hed/mcc/tls/PayloadTLSMCC.cpp:356 msgid "Can not create the SSL Context object" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:270 src/hed/mcc/tls/PayloadTLSMCC.cpp:376 msgid "Can't set OpenSSL verify flags" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:289 src/hed/mcc/tls/PayloadTLSMCC.cpp:390 msgid "Can not create the SSL object" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:299 #, fuzzy msgid "Faile to assign hostname extension" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:313 msgid "Failed to establish SSL connection" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:316 src/hed/mcc/tls/PayloadTLSMCC.cpp:405 #, c-format msgid "Using cipher: %s" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:402 msgid "Failed to accept SSL connection" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:455 #, c-format msgid "Failed to shut down SSL: %s" msgstr "" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:47 msgid "" "ArcAuthZ: failed to initiate all PDPs - this instance will be non-functional" msgstr "" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:70 msgid "PDP: missing name attribute" msgstr "" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:74 #, c-format msgid "PDP: %s (%s)" msgstr "" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:79 #, c-format msgid "PDP: %s (%s) can not be loaded" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluationCtx.cpp:251 #, fuzzy, c-format msgid "There are %d RequestItems" msgstr "%d darab publikus tanúsítvány van a válasz üzenetben" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:60 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:55 msgid "Can not parse classname for FunctionFactory from configuration" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:68 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:63 msgid "Can not parse classname for AttributeFactory from configuration" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:76 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:71 msgid "" "Can not parse classname for CombiningAlgorithmFactory from configuration" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:84 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:79 msgid "Can not parse classname for Request from configuration" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:93 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:88 msgid "Can not parse classname for Policy from configuration" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:105 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:100 msgid "Can not dynamically produce AttributeFactory" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:110 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:105 msgid "Can not dynamically produce FnFactory" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:115 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:110 msgid "Can not dynamically produce AlgFacroty" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:126 #: src/hed/shc/gaclpdp/GACLEvaluator.cpp:31 #: src/hed/shc/gaclpdp/GACLEvaluator.cpp:37 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:121 msgid "Can not create PolicyStore object" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:177 src/hed/shc/test.cpp:183 #: src/hed/shc/testinterface_arc.cpp:102 src/hed/shc/testinterface_xacml.cpp:54 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:172 msgid "Can not dynamically produce Request" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:261 #, c-format msgid "Result value (0=Permit, 1=Deny, 2=Indeterminate, 3=Not_Applicable): %d" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:110 msgid "Can not find ArcPDPContext" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:139 src/hed/shc/xacmlpdp/XACMLPDP.cpp:117 msgid "Evaluator does not support loadable Combining Algorithms" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:143 src/hed/shc/xacmlpdp/XACMLPDP.cpp:121 #, c-format msgid "Evaluator does not support specified Combining Algorithm - %s" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:155 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:84 #: src/hed/shc/gaclpdp/GACLPDP.cpp:118 src/hed/shc/test.cpp:94 #: src/hed/shc/testinterface_arc.cpp:37 src/hed/shc/testinterface_xacml.cpp:37 #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:133 msgid "Can not dynamically produce Evaluator" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:158 msgid "Evaluator for ArcPDP was not loaded" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:165 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:57 #: src/hed/shc/gaclpdp/GACLPDP.cpp:128 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:87 #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:143 src/tests/echo/echo.cpp:108 msgid "Missing security object in message" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:173 src/hed/shc/arcpdp/ArcPDP.cpp:181 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:137 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:143 #: src/hed/shc/gaclpdp/GACLPDP.cpp:136 src/hed/shc/gaclpdp/GACLPDP.cpp:144 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:95 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:103 #: src/tests/echo/echo.cpp:116 src/tests/echo/echo.cpp:123 msgid "Failed to convert security information to ARC request" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:189 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:150 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:111 #, c-format msgid "ARC Auth. request: %s" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:192 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:153 #: src/hed/shc/gaclpdp/GACLPDP.cpp:155 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:114 #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:170 msgid "No requested security information was collected" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:199 msgid "Not authorized by arc.pdp - failed to get response from Evaluator" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:245 msgid "Authorized by arc.pdp" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:246 msgid "" "Not authorized by arc.pdp - some of the RequestItem elements do not satisfy " "Policy" msgstr "" #: src/hed/shc/arcpdp/ArcPolicy.cpp:56 src/hed/shc/arcpdp/ArcPolicy.cpp:70 #: src/hed/shc/gaclpdp/GACLPolicy.cpp:46 src/hed/shc/gaclpdp/GACLPolicy.cpp:59 #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:48 #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:64 msgid "Policy is empty" msgstr "" #: src/hed/shc/arcpdp/ArcPolicy.cpp:114 #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:115 #, c-format msgid "PolicyId: %s Alg inside this policy is:-- %s" msgstr "" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:75 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:129 msgid "No delegation policies in this context and message - passing through" msgstr "" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:95 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:109 msgid "Failed to convert security information to ARC policy" msgstr "" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:116 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:123 #, c-format msgid "ARC delegation policy: %s" msgstr "" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:161 msgid "No authorization response was returned" msgstr "" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:164 #, c-format msgid "There are %d requests, which satisfy at least one policy" msgstr "" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:183 msgid "Delegation authorization passed" msgstr "" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:185 msgid "Delegation authorization failed" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:63 msgid "" "Missing CertificatePath element or ProxyPath element, or " " is missing" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:68 msgid "" "Missing or empty KeyPath element, or is missing" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:74 msgid "Missing or empty CertificatePath or CACertificatesDir element" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:81 #, c-format msgid "Delegation role not supported: %s" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:90 #, c-format msgid "Delegation type not supported: %s" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:115 msgid "Failed to acquire delegation context" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:143 #: src/hed/shc/delegationsh/DelegationSH.cpp:254 msgid "Can't create delegation context" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:149 msgid "Delegation handler with delegatee role starts to process" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:152 #: src/services/a-rex/arex.cpp:592 src/services/candypond/CandyPond.cpp:526 #: src/services/data-staging/DataDeliveryService.cpp:624 msgid "process: POST" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:159 #: src/services/a-rex/arex.cpp:599 src/services/candypond/CandyPond.cpp:535 #: src/services/data-staging/DataDeliveryService.cpp:633 #: src/services/wrappers/python/pythonwrapper.cpp:416 msgid "input is not SOAP" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:166 #, c-format msgid "Delegation service: %s" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:181 #: src/hed/shc/delegationsh/DelegationSH.cpp:188 #: src/tests/client/test_ClientX509Delegation_ARC.cpp:55 #, c-format msgid "Can not get the delegation credential: %s from delegation service: %s" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:204 #: src/hed/shc/delegationsh/DelegationSH.cpp:268 #, c-format msgid "Delegated credential identity: %s" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:205 #, c-format msgid "" "The delegated credential got from delegation service is stored into path: %s" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:218 msgid "The endpoint of delegation service should be configured" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:228 #: src/hed/shc/delegationsh/DelegationSH.cpp:340 msgid "Delegation handler with delegatee role ends" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:260 msgid "Delegation handler with delegator role starts to process" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:269 #, c-format msgid "The delegated credential got from path: %s" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:290 #, c-format msgid "Can not create delegation crendential to delegation service: %s" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:328 msgid "output is not SOAP" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:339 #, c-format msgid "" "Succeeded to send DelegationService: %s and DelegationID: %s info to peer " "service" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:345 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:220 #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:101 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:94 msgid "Incoming Message is not SOAP" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:352 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:341 #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:123 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:108 msgid "Outgoing Message is not SOAP" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:356 msgid "Delegation handler is not configured" msgstr "" #: src/hed/shc/gaclpdp/GACLPDP.cpp:121 msgid "Evaluator for GACLPDP was not loaded" msgstr "" #: src/hed/shc/gaclpdp/GACLPDP.cpp:152 #, c-format msgid "GACL Auth. request: %s" msgstr "" #: src/hed/shc/gaclpdp/GACLPolicy.cpp:50 src/hed/shc/gaclpdp/GACLPolicy.cpp:63 msgid "Policy is not gacl" msgstr "" #: src/hed/shc/legacy/ConfigParser.cpp:13 #, fuzzy msgid "Configuration file not specified" msgstr "Nincs megadva feladat leírás bemeneti adatként" #: src/hed/shc/legacy/ConfigParser.cpp:18 #: src/hed/shc/legacy/ConfigParser.cpp:28 #: src/hed/shc/legacy/ConfigParser.cpp:33 msgid "Configuration file can not be read" msgstr "" #: src/hed/shc/legacy/ConfigParser.cpp:43 #, c-format msgid "Configuration file is broken - block name is too short: %s" msgstr "" #: src/hed/shc/legacy/ConfigParser.cpp:47 #, c-format msgid "Configuration file is broken - block name does not end with ]: %s" msgstr "" #: src/hed/shc/legacy/LegacyMap.cpp:39 src/hed/shc/legacy/LegacyPDP.cpp:119 msgid "Configuration file not specified in ConfigBlock" msgstr "" #: src/hed/shc/legacy/LegacyMap.cpp:48 src/hed/shc/legacy/LegacyPDP.cpp:128 msgid "BlockName is empty" msgstr "" #: src/hed/shc/legacy/LegacyMap.cpp:108 #, fuzzy, c-format msgid "Failed processing user mapping command: %s %s" msgstr "voms szerver fájljának az elérési útvonala" #: src/hed/shc/legacy/LegacyMap.cpp:114 #: src/services/gridftpd/fileroot_config.cpp:320 #, c-format msgid "Failed to change mapping stack processing policy in: %s = %s" msgstr "" #: src/hed/shc/legacy/LegacyMap.cpp:174 msgid "LegacyMap: no configurations blocks defined" msgstr "" #: src/hed/shc/legacy/LegacyMap.cpp:196 src/hed/shc/legacy/LegacyPDP.cpp:239 #, c-format msgid "" "LegacyPDP: there is no %s Sec Attribute defined. Probably ARC Legacy Sec " "Handler is not configured or failed." msgstr "" #: src/hed/shc/legacy/LegacyMap.cpp:201 src/hed/shc/legacy/LegacyPDP.cpp:244 msgid "LegacyPDP: ARC Legacy Sec Attribute not recognized." msgstr "" #: src/hed/shc/legacy/LegacyPDP.cpp:138 #, fuzzy, c-format msgid "Failed to parse configuration file %s" msgstr "voms szerver fájljának az elérési útvonala" #: src/hed/shc/legacy/LegacyPDP.cpp:144 #, c-format msgid "Block %s not found in configuration file %s" msgstr "" #: src/hed/shc/legacy/LegacySecHandler.cpp:40 #: src/hed/shc/legacy/LegacySecHandler.cpp:118 msgid "LegacySecHandler: configuration file not specified" msgstr "" #: src/hed/shc/legacy/arc_lcas.cpp:146 src/hed/shc/legacy/arc_lcmaps.cpp:161 #, c-format msgid "" "Failed to convert GSI credential to GSS credential (major: %d, minor: %d)" msgstr "" #: src/hed/shc/legacy/arc_lcas.cpp:171 src/hed/shc/legacy/arc_lcmaps.cpp:186 msgid "Missing subject name" msgstr "" #: src/hed/shc/legacy/arc_lcas.cpp:176 src/hed/shc/legacy/arc_lcmaps.cpp:191 #, fuzzy msgid "Missing path of credentials file" msgstr "kérési fájl elérési útvonala" #: src/hed/shc/legacy/arc_lcas.cpp:182 msgid "Missing name of LCAS library" msgstr "" #: src/hed/shc/legacy/arc_lcas.cpp:199 #, c-format msgid "Can't load LCAS library %s: %s" msgstr "" #: src/hed/shc/legacy/arc_lcas.cpp:209 #, c-format msgid "Can't find LCAS functions in a library %s" msgstr "" #: src/hed/shc/legacy/arc_lcas.cpp:219 msgid "Failed to initialize LCAS" msgstr "" #: src/hed/shc/legacy/arc_lcas.cpp:234 msgid "Failed to terminate LCAS" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:197 msgid "Missing name of LCMAPS library" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:211 msgid "Can't read policy names" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:222 #, c-format msgid "Can't load LCMAPS library %s: %s" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:234 #, c-format msgid "Can't find LCMAPS functions in a library %s" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:246 msgid "LCMAPS has lcmaps_run" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:247 msgid "LCMAPS has getCredentialData" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:251 msgid "Failed to initialize LCMAPS" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:291 #, c-format msgid "LCMAPS returned invalid GID: %u" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:294 msgid "LCMAPS did not return any GID" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:297 #, c-format msgid "LCMAPS returned UID which has no username: %u" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:300 #, c-format msgid "LCMAPS returned invalid UID: %u" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:303 msgid "LCMAPS did not return any UID" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:312 msgid "Failed to terminate LCMAPS" msgstr "" #: src/hed/shc/legacy/auth.cpp:35 src/services/gridftpd/auth/auth.cpp:35 #, c-format msgid "Unexpected argument for 'all' rule - %s" msgstr "" #: src/hed/shc/legacy/auth.cpp:337 #, c-format msgid "Credentials stored in temporary file %s" msgstr "" #: src/hed/shc/legacy/auth.cpp:346 #, c-format msgid "Assigned to authorization group %s" msgstr "" #: src/hed/shc/legacy/auth.cpp:351 #, fuzzy, c-format msgid "Assigned to userlist %s" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/shc/legacy/auth_file.cpp:22 #: src/services/gridftpd/auth/auth_file.cpp:22 #, c-format msgid "Failed to read file %s" msgstr "" #: src/hed/shc/legacy/auth_otokens.cpp:30 #, fuzzy msgid "Missing subject in configuration" msgstr "verzió információ kiírása" #: src/hed/shc/legacy/auth_otokens.cpp:35 #, fuzzy msgid "Missing issuer in configuration" msgstr "verzió információ kiírása" #: src/hed/shc/legacy/auth_otokens.cpp:40 #, fuzzy msgid "Missing audience in configuration" msgstr "verzió információ kiírása" #: src/hed/shc/legacy/auth_otokens.cpp:45 #, fuzzy msgid "Missing scope in configuration" msgstr "voms szerver fájljának az elérési útvonala" #: src/hed/shc/legacy/auth_otokens.cpp:50 src/hed/shc/legacy/auth_voms.cpp:47 #: src/services/gridftpd/auth/auth_voms.cpp:51 msgid "Missing group in configuration" msgstr "" #: src/hed/shc/legacy/auth_otokens.cpp:53 #, fuzzy, c-format msgid "Rule: subject: %s" msgstr "Tárgy: %s" #: src/hed/shc/legacy/auth_otokens.cpp:54 #, fuzzy, c-format msgid "Rule: issuer: %s" msgstr "modul neve: %s" #: src/hed/shc/legacy/auth_otokens.cpp:55 #, fuzzy, c-format msgid "Rule: audience: %s" msgstr "modul neve: %s" #: src/hed/shc/legacy/auth_otokens.cpp:56 #, fuzzy, c-format msgid "Rule: scope: %s" msgstr "modul neve: %s" #: src/hed/shc/legacy/auth_otokens.cpp:57 src/hed/shc/legacy/auth_voms.cpp:66 #: src/services/gridftpd/auth/auth_voms.cpp:68 #, c-format msgid "Rule: group: %s" msgstr "" #: src/hed/shc/legacy/auth_otokens.cpp:60 #, fuzzy, c-format msgid "Match issuer: %s" msgstr "Válasz: %s" #: src/hed/shc/legacy/auth_otokens.cpp:66 #, fuzzy, c-format msgid "Matched: %s %s %s" msgstr "Feltöltve %s" #: src/hed/shc/legacy/auth_otokens.cpp:80 src/hed/shc/legacy/auth_voms.cpp:93 #: src/services/gridftpd/auth/auth_voms.cpp:98 msgid "Matched nothing" msgstr "" #: src/hed/shc/legacy/auth_plugin.cpp:45 src/hed/shc/legacy/unixmap.cpp:215 #: src/services/gridftpd/auth/auth_plugin.cpp:70 #: src/services/gridftpd/auth/unixmap.cpp:214 #, c-format msgid "Plugin %s returned: %u" msgstr "" #: src/hed/shc/legacy/auth_plugin.cpp:49 src/hed/shc/legacy/unixmap.cpp:219 #, c-format msgid "Plugin %s timeout after %u seconds" msgstr "" #: src/hed/shc/legacy/auth_plugin.cpp:52 src/hed/shc/legacy/unixmap.cpp:222 #, c-format msgid "Plugin %s failed to start" msgstr "" #: src/hed/shc/legacy/auth_plugin.cpp:54 src/hed/shc/legacy/unixmap.cpp:224 #, c-format msgid "Plugin %s printed: %s" msgstr "" #: src/hed/shc/legacy/auth_plugin.cpp:55 src/hed/shc/legacy/unixmap.cpp:212 #: src/hed/shc/legacy/unixmap.cpp:225 #, c-format msgid "Plugin %s error: %s" msgstr "" #: src/hed/shc/legacy/auth_voms.cpp:42 #: src/services/gridftpd/auth/auth_voms.cpp:45 msgid "Missing VO in configuration" msgstr "" #: src/hed/shc/legacy/auth_voms.cpp:52 #: src/services/gridftpd/auth/auth_voms.cpp:57 msgid "Missing role in configuration" msgstr "" #: src/hed/shc/legacy/auth_voms.cpp:57 #: src/services/gridftpd/auth/auth_voms.cpp:63 msgid "Missing capabilities in configuration" msgstr "" #: src/hed/shc/legacy/auth_voms.cpp:62 msgid "Too many arguments in configuration" msgstr "" #: src/hed/shc/legacy/auth_voms.cpp:65 #: src/services/gridftpd/auth/auth_voms.cpp:67 #, fuzzy, c-format msgid "Rule: vo: %s" msgstr "Kérés: %s" #: src/hed/shc/legacy/auth_voms.cpp:67 #: src/services/gridftpd/auth/auth_voms.cpp:69 #, fuzzy, c-format msgid "Rule: role: %s" msgstr "modul neve: %s" #: src/hed/shc/legacy/auth_voms.cpp:68 #: src/services/gridftpd/auth/auth_voms.cpp:70 #, c-format msgid "Rule: capabilities: %s" msgstr "" #: src/hed/shc/legacy/auth_voms.cpp:71 #: src/services/gridftpd/auth/auth_voms.cpp:77 #, c-format msgid "Match vo: %s" msgstr "" #: src/hed/shc/legacy/auth_voms.cpp:78 #, c-format msgid "Matched: %s %s %s %s" msgstr "" #: src/hed/shc/legacy/simplemap.cpp:70 #: src/services/gridftpd/auth/simplemap.cpp:68 #, c-format msgid "SimpleMap: acquired new unmap time of %u seconds" msgstr "" #: src/hed/shc/legacy/simplemap.cpp:72 #: src/services/gridftpd/auth/simplemap.cpp:70 msgid "SimpleMap: wrong number in unmaptime command" msgstr "" #: src/hed/shc/legacy/simplemap.cpp:85 src/hed/shc/legacy/simplemap.cpp:90 #: src/services/gridftpd/auth/simplemap.cpp:83 #: src/services/gridftpd/auth/simplemap.cpp:88 #, c-format msgid "SimpleMap: %s" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:65 src/hed/shc/legacy/unixmap.cpp:70 #: src/services/gridftpd/auth/unixmap.cpp:63 #: src/services/gridftpd/auth/unixmap.cpp:68 msgid "Mapping policy option has empty value" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:80 src/services/gridftpd/auth/unixmap.cpp:78 #, fuzzy, c-format msgid "Unsupported mapping policy action: %s" msgstr "Nem támogatott url: %s" #: src/hed/shc/legacy/unixmap.cpp:91 src/services/gridftpd/auth/unixmap.cpp:89 #, fuzzy, c-format msgid "Unsupported mapping policy option: %s" msgstr "Nem támogatott url: %s" #: src/hed/shc/legacy/unixmap.cpp:103 src/hed/shc/legacy/unixmap.cpp:108 #: src/services/gridftpd/auth/unixmap.cpp:100 #: src/services/gridftpd/auth/unixmap.cpp:105 msgid "User name mapping command is empty" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:116 #: src/services/gridftpd/auth/unixmap.cpp:113 #, c-format msgid "User name mapping has empty authgroup: %s" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:147 #: src/services/gridftpd/auth/unixmap.cpp:147 #, c-format msgid "Unknown user name mapping rule %s" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:156 src/hed/shc/legacy/unixmap.cpp:161 #: src/hed/shc/legacy/unixmap.cpp:177 src/hed/shc/legacy/unixmap.cpp:183 #: src/services/gridftpd/auth/unixmap.cpp:175 #: src/services/gridftpd/auth/unixmap.cpp:180 #: src/services/gridftpd/auth/unixmap.cpp:196 msgid "Plugin (user mapping) command is empty" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:167 #: src/services/gridftpd/auth/unixmap.cpp:186 #, c-format msgid "Plugin (user mapping) timeout is not a number: %s" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:171 #: src/services/gridftpd/auth/unixmap.cpp:190 #, c-format msgid "Plugin (user mapping) timeout is wrong number: %s" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:203 #, c-format msgid "Plugin %s returned no username" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:208 #: src/services/gridftpd/auth/unixmap.cpp:211 #, c-format msgid "Plugin %s returned too much: %s" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:211 #, c-format msgid "Plugin %s returned no mapping" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:234 msgid "User subject match is missing user subject." msgstr "" #: src/hed/shc/legacy/unixmap.cpp:238 #: src/services/gridftpd/auth/unixmap.cpp:230 #, c-format msgid "Mapfile at %s can't be opened." msgstr "" #: src/hed/shc/legacy/unixmap.cpp:262 #: src/services/gridftpd/auth/unixmap.cpp:255 msgid "User pool mapping is missing user subject." msgstr "" #: src/hed/shc/legacy/unixmap.cpp:267 #: src/services/gridftpd/auth/unixmap.cpp:260 #, c-format msgid "User pool at %s can't be opened." msgstr "" #: src/hed/shc/legacy/unixmap.cpp:272 #: src/services/gridftpd/auth/unixmap.cpp:265 #, c-format msgid "User pool at %s failed to perform user mapping." msgstr "" #: src/hed/shc/legacy/unixmap.cpp:290 #: src/services/gridftpd/auth/unixmap.cpp:283 #, c-format msgid "User name direct mapping is missing user name: %s." msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:63 msgid "OTokens: Attr: message" msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:68 #, c-format msgid "OTokens: Attr: %s = %s" msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:73 #, c-format msgid "OTokens: Attr: token: %s" msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:76 #, c-format msgid "OTokens: Attr: token: bearer: %s" msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:146 msgid "OTokens: Handle" msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:148 msgid "OTokens: Handle: message" msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:151 #, fuzzy msgid "Failed to create OTokens security attributes" msgstr "Nem sikerült elküldeni a kérést" #: src/hed/shc/otokens/OTokensSH.cpp:155 #, c-format msgid "OTokens: Handle: attributes created: subject = %s" msgstr "" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:48 msgid "Creating a pdpservice client" msgstr "" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:80 msgid "Arc policy can not been carried by SAML2.0 profile of XACML" msgstr "" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:152 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:185 msgid "Policy Decision Service invocation failed" msgstr "" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:155 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:188 #: src/tests/client/test_ClientInterface.cpp:88 #: src/tests/client/test_ClientSAML2SSO.cpp:81 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:100 #: src/tests/echo/test_clientinterface.cpp:82 #: src/tests/echo/test_clientinterface.cpp:149 #: src/tests/echo/test_clientinterface.py:32 msgid "There was no SOAP response" msgstr "Nincs SOAP-os válasz" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:170 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:204 msgid "Authorized from remote pdp service" msgstr "" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:171 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:205 msgid "Unauthorized from remote pdp service" msgstr "" #: src/hed/shc/saml2sso_assertionconsumersh/SAML2SSO_AssertionConsumerSH.cpp:69 msgid "Can not get SAMLAssertion SecAttr from message context" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:152 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:44 msgid "Missing or empty CertificatePath element" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:157 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:49 msgid "Missing or empty KeyPath element" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:163 msgid "" "Both of CACertificatePath and CACertificatesDir elements missing or empty" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:175 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:61 msgid "" "Missing or empty CertificatePath or CACertificatesDir element; will only " "check the signature, will not do message authentication" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:179 #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:65 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:65 #, c-format msgid "Processing type not supported: %s" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:199 msgid "Failed to parse SAML Token from incoming SOAP" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:209 msgid "Failed to authenticate SAML Token inside the incoming SOAP" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:212 msgid "Succeeded to authenticate SAMLToken" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:275 #, fuzzy, c-format msgid "No response from AA service %s" msgstr "Nincs válasz a voms szervertÅ‘l" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:279 #, c-format msgid "SOAP Request to AA service %s failed" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:287 msgid "Cannot find content under response soap message" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:291 msgid "Cannot find under response soap message:" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:308 msgid "The Response is not going to this end" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:315 msgid "The StatusCode is Success" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:321 msgid "Succeeded to verify the signature under " msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:324 msgid "Failed to verify the signature under " msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:335 msgid "Failed to generate SAML Token for outgoing SOAP" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:345 msgid "SAML Token handler is not configured" msgstr "" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:29 #, c-format msgid "Access list location: %s" msgstr "" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:39 msgid "" "No policy file or DNs specified for simplelist.pdp, please set location " "attribute or at least one DN element for simplelist PDP node in " "configuration." msgstr "" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:42 #, fuzzy, c-format msgid "Subject to match: %s" msgstr "Tárgy: %s" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:45 #, fuzzy, c-format msgid "Policy subject: %s" msgstr "Tárgy: %s" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:47 #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:73 #, c-format msgid "Authorized from simplelist.pdp: %s" msgstr "" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:54 msgid "" "The policy file setup for simplelist.pdp does not exist, please check " "location attribute for simplelist PDP node in service configuration" msgstr "" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:61 #, fuzzy, c-format msgid "Policy line: %s" msgstr "Proxy típusa: %s" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:79 #, c-format msgid "Not authorized from simplelist.pdp: %s" msgstr "" #: src/hed/shc/test.cpp:27 src/hed/shc/testinterface_arc.cpp:26 #: src/hed/shc/testinterface_xacml.cpp:26 msgid "Start test" msgstr "" #: src/hed/shc/test.cpp:101 msgid "Input request from a file: Request.xml" msgstr "" #: src/hed/shc/test.cpp:107 src/hed/shc/test.cpp:197 #: src/hed/shc/testinterface_arc.cpp:124 #, c-format msgid "There is %d subjects, which satisfy at least one policy" msgstr "" #: src/hed/shc/test.cpp:121 #, c-format msgid "Attribute Value (1): %s" msgstr "" #: src/hed/shc/test.cpp:132 msgid "Input request from code" msgstr "" #: src/hed/shc/test.cpp:211 #, c-format msgid "Attribute Value (2): %s" msgstr "" #: src/hed/shc/testinterface_arc.cpp:75 src/hed/shc/testinterface_xacml.cpp:46 msgid "Can not dynamically produce Policy" msgstr "" #: src/hed/shc/testinterface_arc.cpp:138 #, c-format msgid "Attribute Value inside Subject: %s" msgstr "" #: src/hed/shc/testinterface_arc.cpp:148 msgid "The request has passed the policy evaluation" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:43 msgid "Missing or empty PasswordSource element" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:54 #, c-format msgid "Password encoding type not supported: %s" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:59 msgid "Missing or empty Username element" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:79 msgid "The payload of incoming message is empty" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:84 msgid "Failed to cast PayloadSOAP from incoming payload" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:89 msgid "Failed to parse Username Token from incoming SOAP" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:95 msgid "Failed to authenticate Username Token inside the incoming SOAP" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:98 msgid "Succeeded to authenticate UsernameToken" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:108 msgid "The payload of outgoing message is empty" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:113 msgid "Failed to cast PayloadSOAP from outgoing payload" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:119 msgid "Failed to generate Username Token for outgoing SOAP" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:127 msgid "Username Token handler is not configured" msgstr "" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:81 msgid "Failed to parse X509 Token from incoming SOAP" msgstr "" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:85 msgid "Failed to verify X509 Token inside the incoming SOAP" msgstr "" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:89 msgid "Failed to authenticate X509 Token inside the incoming SOAP" msgstr "" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:92 msgid "Succeeded to authenticate X509Token" msgstr "" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:102 msgid "Failed to generate X509 Token for outgoing SOAP" msgstr "" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:112 msgid "X509 Token handler is not configured" msgstr "" #: src/hed/shc/xacmlpdp/XACMLApply.cpp:29 msgid "Can not create function: FunctionId does not exist" msgstr "" #: src/hed/shc/xacmlpdp/XACMLApply.cpp:33 #: src/hed/shc/xacmlpdp/XACMLTarget.cpp:40 #, c-format msgid "Can not create function %s" msgstr "" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:88 msgid "Can not find XACMLPDPContext" msgstr "" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:136 msgid "Evaluator for XACMLPDP was not loaded" msgstr "" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:151 src/hed/shc/xacmlpdp/XACMLPDP.cpp:159 msgid "Failed to convert security information to XACML request" msgstr "" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:167 #, c-format msgid "XACML request: %s" msgstr "" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:179 msgid "Authorized from xacml.pdp" msgstr "" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:180 msgid "UnAuthorized from xacml.pdp" msgstr "" #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:55 msgid "Can not find element with proper namespace" msgstr "" #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:132 msgid "No target available inside the policy" msgstr "" #: src/hed/shc/xacmlpdp/XACMLRequest.cpp:34 msgid "Request is empty" msgstr "" #: src/hed/shc/xacmlpdp/XACMLRequest.cpp:39 msgid "Can not find element with proper namespace" msgstr "" #: src/hed/shc/xacmlpdp/XACMLRule.cpp:35 msgid "Invalid Effect" msgstr "" #: src/hed/shc/xacmlpdp/XACMLRule.cpp:48 msgid "No target available inside the rule" msgstr "" #: src/libs/data-staging/DTR.cpp:82 src/libs/data-staging/DTR.cpp:86 #, c-format msgid "Could not handle endpoint %s" msgstr "" #: src/libs/data-staging/DTR.cpp:96 msgid "Source is the same as destination" msgstr "" #: src/libs/data-staging/DTR.cpp:175 #, fuzzy, c-format msgid "Invalid ID: %s" msgstr "Érvénytelen URL: %s" #: src/libs/data-staging/DTR.cpp:212 #, c-format msgid "%s->%s" msgstr "" #: src/libs/data-staging/DTR.cpp:320 #, c-format msgid "No callback for %s defined" msgstr "" #: src/libs/data-staging/DTR.cpp:335 #, c-format msgid "NULL callback for %s" msgstr "" #: src/libs/data-staging/DTR.cpp:338 #, c-format msgid "Request to push to unknown owner - %u" msgstr "" #: src/libs/data-staging/DTRList.cpp:216 #, c-format msgid "Boosting priority from %i to %i due to incoming higher priority DTR" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:48 #: src/libs/data-staging/DataDelivery.cpp:72 msgid "Received invalid DTR" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:54 #, c-format msgid "Delivery received new DTR %s with source: %s, destination: %s" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:68 msgid "Received no DTR" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:80 #, c-format msgid "Cancelling DTR %s with source: %s, destination: %s" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:91 #, c-format msgid "DTR %s requested cancel but no active transfer" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:147 #, c-format msgid "Cleaning up after failure: deleting %s" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:188 #: src/libs/data-staging/DataDelivery.cpp:263 #: src/libs/data-staging/DataDelivery.cpp:303 #: src/libs/data-staging/DataDelivery.cpp:323 msgid "Failed to delete delivery object or deletion timed out" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:254 #, c-format msgid "Transfer finished: %llu bytes transferred %s" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:329 msgid "Data delivery loop exited" msgstr "" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:45 #, fuzzy msgid "No source defined" msgstr "Státusz lekérdezés sikertelen" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:49 #, fuzzy msgid "No destination defined" msgstr "Nem sikerült feloldani a célállomást" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:139 #, c-format msgid "Bad checksum format %s" msgstr "" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:178 #, fuzzy, c-format msgid "Failed to run command: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:213 #, c-format msgid "DataDelivery: %s" msgstr "" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:225 #, c-format msgid "DataStagingDelivery exited with code %i" msgstr "" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:244 #, c-format msgid "Transfer killed after %i seconds without communication" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:67 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:315 #, c-format msgid "Connecting to Delivery service at %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:94 #, c-format msgid "Failed to set up credential delegation with %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:100 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:174 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:240 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:326 #, c-format msgid "" "Request:\n" "%s" msgstr "" "Kérés:\n" "%s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:106 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:332 #, c-format msgid "Could not connect to service %s: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:114 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:340 #, fuzzy, c-format msgid "No SOAP response from Delivery service %s" msgstr "Nincs válasz a voms szervertÅ‘l" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:119 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:193 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:267 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:346 #, c-format msgid "" "Response:\n" "%s" msgstr "" "Válasz:\n" "%s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:128 #, fuzzy, c-format msgid "Failed to start transfer request: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:135 #, c-format msgid "Bad format in XML response from service at %s: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:143 #, c-format msgid "Could not make new transfer request: %s: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:148 #, c-format msgid "Started remote Delivery at %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:181 #, fuzzy, c-format msgid "Failed to send cancel request: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:188 #, fuzzy msgid "Failed to cancel: No SOAP response" msgstr "Nincs SOAP-os válasz" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:202 #, fuzzy, c-format msgid "Failed to cancel transfer request: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:209 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:290 #, c-format msgid "Bad format in XML response: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:216 #, fuzzy, c-format msgid "Failed to cancel: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:260 #, fuzzy msgid "No SOAP response from delivery service" msgstr "Nincs válasz a voms szervertÅ‘l" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:281 #, fuzzy, c-format msgid "Failed to query state: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:355 #, c-format msgid "SOAP fault from delivery service at %s: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:363 #, c-format msgid "Bad format in XML response from delivery service at %s: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:371 #, c-format msgid "Error pinging delivery service at %s: %s: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:379 #, c-format msgid "Dir %s allowed at service %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:473 #, c-format msgid "" "DataDelivery log tail:\n" "%s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:486 #, fuzzy msgid "Failed locating credentials" msgstr "Nem sikerült listázni a meta adatokat" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:491 #, fuzzy msgid "Failed to initiate client connection" msgstr "Nem sikerült betölteni a konfigurációt" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:497 msgid "Client connection has no entry point" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:507 msgid "Failed to initiate delegation credentials" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:97 #, c-format msgid "%5u s: %10.1f kB %8.1f kB/s" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:152 msgid "Unexpected arguments" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:155 msgid "Source URL missing" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:158 #, fuzzy msgid "Destination URL missing" msgstr "Célállomás: %s" #: src/libs/data-staging/DataStagingDelivery.cpp:162 #, fuzzy, c-format msgid "Source URL not valid: %s" msgstr "A lekérdezés nem XML helyes" #: src/libs/data-staging/DataStagingDelivery.cpp:166 #, fuzzy, c-format msgid "Destination URL not valid: %s" msgstr "Célállomás: %s" #: src/libs/data-staging/DataStagingDelivery.cpp:223 #, c-format msgid "Unknown transfer option: %s" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:248 #, c-format msgid "Source URL not supported: %s" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:253 #: src/libs/data-staging/DataStagingDelivery.cpp:272 msgid "No credentials supplied" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:267 #, fuzzy, c-format msgid "Destination URL not supported: %s" msgstr "A fileset regisztcáció nem támogatott még" #: src/libs/data-staging/DataStagingDelivery.cpp:316 #, c-format msgid "Will calculate %s checksum" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:327 msgid "Cannot use supplied --size option" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:540 #, c-format msgid "Checksum mismatch between calculated checksum %s and source checksum %s" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:550 #, fuzzy, c-format msgid "Failed cleaning up destination %s" msgstr "Nem sikerült feloldani a célállomást" #: src/libs/data-staging/Processor.cpp:59 #: src/services/candypond/CandyPond.cpp:117 msgid "Error creating cache" msgstr "" #: src/libs/data-staging/Processor.cpp:83 #, c-format msgid "Forcing re-download of file %s" msgstr "" #: src/libs/data-staging/Processor.cpp:100 #, c-format msgid "Will wait around %is" msgstr "" #: src/libs/data-staging/Processor.cpp:119 #, c-format msgid "Force-checking source of cache file %s" msgstr "" #: src/libs/data-staging/Processor.cpp:122 #, c-format msgid "Source check requested but failed: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:142 msgid "Permission checking failed, will try downloading without using cache" msgstr "" #: src/libs/data-staging/Processor.cpp:172 #, c-format msgid "Will download to cache file %s" msgstr "" #: src/libs/data-staging/Processor.cpp:193 msgid "Looking up source replicas" msgstr "" #: src/libs/data-staging/Processor.cpp:210 #: src/libs/data-staging/Processor.cpp:317 #, c-format msgid "Skipping replica on local host %s" msgstr "" #: src/libs/data-staging/Processor.cpp:218 #: src/libs/data-staging/Processor.cpp:325 #, c-format msgid "No locations left for %s" msgstr "" #: src/libs/data-staging/Processor.cpp:239 #: src/libs/data-staging/Processor.cpp:481 #, fuzzy msgid "Resolving destination replicas" msgstr "Nem sikerült feloldani a célállomást" #: src/libs/data-staging/Processor.cpp:256 msgid "No locations for destination different from source found" msgstr "" #: src/libs/data-staging/Processor.cpp:267 msgid "Pre-registering destination in index service" msgstr "" #: src/libs/data-staging/Processor.cpp:293 msgid "Resolving source replicas in bulk" msgstr "" #: src/libs/data-staging/Processor.cpp:307 #, c-format msgid "No replicas found for %s" msgstr "" #: src/libs/data-staging/Processor.cpp:348 #, c-format msgid "Checking %s" msgstr "" #: src/libs/data-staging/Processor.cpp:357 #: src/libs/data-staging/Processor.cpp:415 msgid "Metadata of replica and index service differ" msgstr "" #: src/libs/data-staging/Processor.cpp:365 #, c-format msgid "Failed checking source replica %s: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:391 msgid "Querying source replicas in bulk" msgstr "" #: src/libs/data-staging/Processor.cpp:403 #, c-format msgid "Failed checking source replica: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:409 msgid "Failed checking source replica" msgstr "" #: src/libs/data-staging/Processor.cpp:449 msgid "Finding existing destination replicas" msgstr "" #: src/libs/data-staging/Processor.cpp:461 #, c-format msgid "Failed to delete replica %s: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:475 #, fuzzy, c-format msgid "Unregistering %s" msgstr "Figyelmen kívül hagyás: %s" #: src/libs/data-staging/Processor.cpp:486 #, fuzzy msgid "Pre-registering destination" msgstr "Nem sikerült feloldani a célállomást" #: src/libs/data-staging/Processor.cpp:492 #, c-format msgid "Failed to pre-clean destination: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:515 msgid "Preparing to stage source" msgstr "" #: src/libs/data-staging/Processor.cpp:528 #, c-format msgid "Source is not ready, will wait %u seconds" msgstr "" #: src/libs/data-staging/Processor.cpp:534 msgid "No physical files found for source" msgstr "" #: src/libs/data-staging/Processor.cpp:552 #, fuzzy msgid "Preparing to stage destination" msgstr "Nem sikerült feloldani a célállomást" #: src/libs/data-staging/Processor.cpp:565 #, c-format msgid "Destination is not ready, will wait %u seconds" msgstr "" #: src/libs/data-staging/Processor.cpp:571 msgid "No physical files found for destination" msgstr "" #: src/libs/data-staging/Processor.cpp:597 msgid "Releasing source" msgstr "" #: src/libs/data-staging/Processor.cpp:601 #, c-format msgid "There was a problem during post-transfer source handling: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:606 #, fuzzy msgid "Releasing destination" msgstr "Nem sikerült feloldani a célállomást" #: src/libs/data-staging/Processor.cpp:610 #, c-format msgid "" "There was a problem during post-transfer destination handling after error: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:614 #, c-format msgid "Error with post-transfer destination handling: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:640 msgid "Removing pre-registered destination in index service" msgstr "" #: src/libs/data-staging/Processor.cpp:643 #, c-format msgid "" "Failed to unregister pre-registered destination %s: %s. You may need to " "unregister it manually" msgstr "" #: src/libs/data-staging/Processor.cpp:649 msgid "Registering destination replica" msgstr "" #: src/libs/data-staging/Processor.cpp:652 #, fuzzy, c-format msgid "Failed to register destination replica: %s" msgstr "Nem támogatott url: %s" #: src/libs/data-staging/Processor.cpp:655 #, c-format msgid "" "Failed to unregister pre-registered destination %s. You may need to " "unregister it manually" msgstr "" #: src/libs/data-staging/Processor.cpp:685 msgid "Error creating cache. Stale locks may remain." msgstr "" #: src/libs/data-staging/Processor.cpp:718 #, c-format msgid "Linking/copying cached file to %s" msgstr "" #: src/libs/data-staging/Processor.cpp:739 #, fuzzy, c-format msgid "Failed linking cache file to %s" msgstr "Nem sikerült listázni a fájlokat" #: src/libs/data-staging/Processor.cpp:743 #, c-format msgid "Error linking cache file to %s." msgstr "" #: src/libs/data-staging/Processor.cpp:764 #: src/libs/data-staging/Processor.cpp:771 msgid "Adding to bulk request" msgstr "" #: src/libs/data-staging/Scheduler.cpp:174 #: src/libs/data-staging/Scheduler.cpp:181 #, fuzzy msgid "source" msgstr "Forrás: %s" #: src/libs/data-staging/Scheduler.cpp:174 #: src/libs/data-staging/Scheduler.cpp:181 #, fuzzy msgid "destination" msgstr "Célállomás: %s" #: src/libs/data-staging/Scheduler.cpp:174 #, c-format msgid "Using next %s replica" msgstr "" #: src/libs/data-staging/Scheduler.cpp:181 #, c-format msgid "No more %s replicas" msgstr "" #: src/libs/data-staging/Scheduler.cpp:183 msgid "Will clean up pre-registered destination" msgstr "" #: src/libs/data-staging/Scheduler.cpp:187 msgid "Will release cache locks" msgstr "" #: src/libs/data-staging/Scheduler.cpp:190 msgid "Moving to end of data staging" msgstr "" #: src/libs/data-staging/Scheduler.cpp:199 #, c-format msgid "Source is mapped to %s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:203 msgid "Cannot link to source which can be modified, will copy instead" msgstr "" #: src/libs/data-staging/Scheduler.cpp:212 msgid "Cannot link to a remote destination. Will not use mapped URL" msgstr "" #: src/libs/data-staging/Scheduler.cpp:215 msgid "Linking mapped file" msgstr "" #: src/libs/data-staging/Scheduler.cpp:222 #, c-format msgid "Failed to create link: %s. Will not use mapped URL" msgstr "" #: src/libs/data-staging/Scheduler.cpp:247 #, c-format msgid "" "Scheduler received new DTR %s with source: %s, destination: %s, assigned to " "transfer share %s with priority %d" msgstr "" #: src/libs/data-staging/Scheduler.cpp:255 msgid "" "File is not cacheable, was requested not to be cached or no cache available, " "skipping cache check" msgstr "" #: src/libs/data-staging/Scheduler.cpp:261 msgid "File is cacheable, will check cache" msgstr "" #: src/libs/data-staging/Scheduler.cpp:264 #: src/libs/data-staging/Scheduler.cpp:289 #, c-format msgid "File is currently being cached, will wait %is" msgstr "" #: src/libs/data-staging/Scheduler.cpp:283 msgid "Timed out while waiting for cache lock" msgstr "" #: src/libs/data-staging/Scheduler.cpp:293 msgid "Checking cache again" msgstr "" #: src/libs/data-staging/Scheduler.cpp:313 #, fuzzy msgid "Destination file is in cache" msgstr "Célállomás: %s" #: src/libs/data-staging/Scheduler.cpp:317 msgid "Source and/or destination is index service, will resolve replicas" msgstr "" #: src/libs/data-staging/Scheduler.cpp:320 msgid "" "Neither source nor destination are index services, will skip resolving " "replicas" msgstr "" #: src/libs/data-staging/Scheduler.cpp:331 msgid "Problem with index service, will release cache lock" msgstr "" #: src/libs/data-staging/Scheduler.cpp:335 msgid "Problem with index service, will proceed to end of data staging" msgstr "" #: src/libs/data-staging/Scheduler.cpp:345 msgid "Checking source file is present" msgstr "" #: src/libs/data-staging/Scheduler.cpp:353 msgid "Error with source file, moving to next replica" msgstr "" #: src/libs/data-staging/Scheduler.cpp:375 #, c-format msgid "Replica %s has long latency, trying next replica" msgstr "" #: src/libs/data-staging/Scheduler.cpp:377 #, c-format msgid "No more replicas, will use %s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:380 #, c-format msgid "Checking replica %s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:390 msgid "Overwrite requested - will pre-clean destination" msgstr "" #: src/libs/data-staging/Scheduler.cpp:393 msgid "No overwrite requested or allowed, skipping pre-cleaning" msgstr "" #: src/libs/data-staging/Scheduler.cpp:401 msgid "Pre-clean failed, will still try to copy" msgstr "" #: src/libs/data-staging/Scheduler.cpp:408 #, fuzzy msgid "Source or destination requires staging" msgstr "A feladat megszakítása sikertelen" #: src/libs/data-staging/Scheduler.cpp:412 msgid "No need to stage source or destination, skipping staging" msgstr "" #: src/libs/data-staging/Scheduler.cpp:442 msgid "Staging request timed out, will release request" msgstr "" #: src/libs/data-staging/Scheduler.cpp:446 msgid "Querying status of staging request" msgstr "" #: src/libs/data-staging/Scheduler.cpp:455 #, fuzzy msgid "Releasing requests" msgstr "Nem sikerült elküldeni a kérést" #: src/libs/data-staging/Scheduler.cpp:472 msgid "DTR is ready for transfer, moving to delivery queue" msgstr "" #: src/libs/data-staging/Scheduler.cpp:487 #, c-format msgid "Transfer failed: %s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:497 msgid "Releasing request(s) made during staging" msgstr "" #: src/libs/data-staging/Scheduler.cpp:500 msgid "Neither source nor destination were staged, skipping releasing requests" msgstr "" #: src/libs/data-staging/Scheduler.cpp:512 msgid "Trying next replica" msgstr "" #: src/libs/data-staging/Scheduler.cpp:517 #, fuzzy msgid "unregister" msgstr "Figyelmen kívül hagyás: %s" #: src/libs/data-staging/Scheduler.cpp:517 #, fuzzy msgid "register" msgstr "Figyelmen kívül hagyás: %s" #: src/libs/data-staging/Scheduler.cpp:516 #, c-format msgid "Will %s in destination index service" msgstr "" #: src/libs/data-staging/Scheduler.cpp:520 msgid "Destination is not index service, skipping replica registration" msgstr "" #: src/libs/data-staging/Scheduler.cpp:533 msgid "Error registering replica, moving to end of data staging" msgstr "" #: src/libs/data-staging/Scheduler.cpp:542 msgid "Will process cache" msgstr "" #: src/libs/data-staging/Scheduler.cpp:546 msgid "File is not cacheable, skipping cache processing" msgstr "" #: src/libs/data-staging/Scheduler.cpp:560 msgid "Cancellation complete" msgstr "" #: src/libs/data-staging/Scheduler.cpp:574 msgid "Will wait 10s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:580 msgid "Error in cache processing, will retry without caching" msgstr "" #: src/libs/data-staging/Scheduler.cpp:589 msgid "Will retry without caching" msgstr "" #: src/libs/data-staging/Scheduler.cpp:607 msgid "Proxy has expired" msgstr "" #: src/libs/data-staging/Scheduler.cpp:618 #, c-format msgid "%i retries left, will wait until %s before next attempt" msgstr "" #: src/libs/data-staging/Scheduler.cpp:634 msgid "Out of retries" msgstr "" #: src/libs/data-staging/Scheduler.cpp:636 msgid "Permanent failure" msgstr "" #: src/libs/data-staging/Scheduler.cpp:642 msgid "Finished successfully" msgstr "" #: src/libs/data-staging/Scheduler.cpp:652 msgid "Returning to generator" msgstr "" #: src/libs/data-staging/Scheduler.cpp:818 #, c-format msgid "File is smaller than %llu bytes, will use local delivery" msgstr "" #: src/libs/data-staging/Scheduler.cpp:872 #, c-format msgid "Delivery service at %s can copy to %s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:880 #, c-format msgid "Delivery service at %s can copy from %s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:893 msgid "Could not find any useable delivery service, forcing local transfer" msgstr "" #: src/libs/data-staging/Scheduler.cpp:909 #, c-format msgid "Not using delivery service at %s because it is full" msgstr "" #: src/libs/data-staging/Scheduler.cpp:936 #, c-format msgid "Not using delivery service %s due to previous failure" msgstr "" #: src/libs/data-staging/Scheduler.cpp:946 msgid "No remote delivery services are useable, forcing local delivery" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1150 msgid "Cancelling active transfer" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1160 msgid "Processing thread timed out. Restarting DTR" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1228 #, fuzzy msgid "Will use bulk request" msgstr "Nem sikerült elküldeni a kérést" #: src/libs/data-staging/Scheduler.cpp:1250 msgid "No delivery endpoints available, will try later" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1269 msgid "Scheduler received NULL DTR" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1279 msgid "Scheduler received invalid DTR" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1368 msgid "Scheduler starting up" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1369 msgid "Scheduler configuration:" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1370 #, c-format msgid " Pre-processor slots: %u" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1371 #, c-format msgid " Delivery slots: %u" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1372 #, c-format msgid " Post-processor slots: %u" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1373 #, c-format msgid " Emergency slots: %u" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1374 #, c-format msgid " Prepared slots: %u" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1375 #, c-format msgid "" " Shares configuration:\n" "%s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1378 msgid " Delivery service: LOCAL" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1379 #, c-format msgid " Delivery service: %s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1384 msgid "Failed to create DTR dump thread" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1401 #: src/services/data-staging/DataDeliveryService.cpp:507 #, c-format msgid "DTR %s cancelled" msgstr "" #: src/libs/data-staging/examples/Generator.cpp:15 msgid "Shutting down scheduler" msgstr "" #: src/libs/data-staging/examples/Generator.cpp:17 msgid "Scheduler stopped, exiting" msgstr "" #: src/libs/data-staging/examples/Generator.cpp:23 #, c-format msgid "Received DTR %s back from scheduler in state %s" msgstr "" #: src/libs/data-staging/examples/Generator.cpp:30 #, fuzzy msgid "Generator started" msgstr "A feltöltÅ‘ elindult" #: src/libs/data-staging/examples/Generator.cpp:31 msgid "Starting DTR threads" msgstr "" #: src/libs/data-staging/examples/Generator.cpp:44 #, fuzzy msgid "No valid credentials found, exiting" msgstr "Nem tudom másolni a %s fájlt: Nem érvényes tanúsítvány" #: src/libs/data-staging/examples/Generator.cpp:55 #, fuzzy, c-format msgid "Problem creating dtr (source %s, destination %s)" msgstr "Nem sikerült feloldani a célállomást" #: src/services/a-rex/arex.cpp:452 src/services/candypond/CandyPond.cpp:569 #: src/services/data-staging/DataDeliveryService.cpp:681 #, c-format msgid "SOAP operation is not supported: %s" msgstr "" #: src/services/a-rex/arex.cpp:471 src/services/a-rex/arex.cpp:517 #, fuzzy, c-format msgid "Security Handlers processing failed: %s" msgstr "VOMS attribútumok értelmezése sikertelen" #: src/services/a-rex/arex.cpp:485 msgid "Can't obtain configuration. Public information is disabled." msgstr "" #: src/services/a-rex/arex.cpp:495 msgid "" "Can't obtain configuration. Public information is disallowed for this user." msgstr "" #: src/services/a-rex/arex.cpp:502 msgid "Can't obtain configuration. Only public information is provided." msgstr "" #: src/services/a-rex/arex.cpp:530 src/services/a-rex/rest/rest.cpp:674 #, fuzzy, c-format msgid "Connection from %s: %s" msgstr "Funkció: %s" #: src/services/a-rex/arex.cpp:533 src/services/a-rex/rest/rest.cpp:678 #, c-format msgid "process: method: %s" msgstr "" #: src/services/a-rex/arex.cpp:534 src/services/a-rex/rest/rest.cpp:679 #, c-format msgid "process: endpoint: %s" msgstr "" #: src/services/a-rex/arex.cpp:559 #, c-format msgid "process: id: %s" msgstr "" #: src/services/a-rex/arex.cpp:560 #, c-format msgid "process: subop: %s" msgstr "" #: src/services/a-rex/arex.cpp:567 #, c-format msgid "process: subpath: %s" msgstr "" #: src/services/a-rex/arex.cpp:605 src/services/candypond/CandyPond.cpp:543 #: src/services/data-staging/DataDeliveryService.cpp:641 #: src/tests/echo/echo.cpp:98 #, c-format msgid "process: request=%s" msgstr "" #: src/services/a-rex/arex.cpp:610 src/services/candypond/CandyPond.cpp:548 #: src/services/data-staging/DataDeliveryService.cpp:646 #: src/tests/count/count.cpp:69 msgid "input does not define operation" msgstr "" #: src/services/a-rex/arex.cpp:613 src/services/candypond/CandyPond.cpp:551 #: src/services/data-staging/DataDeliveryService.cpp:649 #: src/tests/count/count.cpp:72 #, c-format msgid "process: operation: %s" msgstr "" #: src/services/a-rex/arex.cpp:640 #, fuzzy msgid "POST request on special path is not supported" msgstr "A fileset regisztcáció nem támogatott még" #: src/services/a-rex/arex.cpp:645 msgid "process: factory endpoint" msgstr "" #: src/services/a-rex/arex.cpp:788 src/services/candypond/CandyPond.cpp:580 #: src/services/data-staging/DataDeliveryService.cpp:692 #: src/tests/echo/echo.cpp:158 #, c-format msgid "process: response=%s" msgstr "" #: src/services/a-rex/arex.cpp:794 msgid "Per-job POST/SOAP requests are not supported" msgstr "" #: src/services/a-rex/arex.cpp:803 msgid "process: GET" msgstr "" #: src/services/a-rex/arex.cpp:804 #, c-format msgid "GET: id %s path %s" msgstr "" #: src/services/a-rex/arex.cpp:837 msgid "process: HEAD" msgstr "" #: src/services/a-rex/arex.cpp:838 #, c-format msgid "HEAD: id %s path %s" msgstr "" #: src/services/a-rex/arex.cpp:871 msgid "process: PUT" msgstr "" #: src/services/a-rex/arex.cpp:904 msgid "process: DELETE" msgstr "" #: src/services/a-rex/arex.cpp:937 #, c-format msgid "process: method %s is not supported" msgstr "" #: src/services/a-rex/arex.cpp:940 msgid "process: method is not defined" msgstr "" #: src/services/a-rex/arex.cpp:1050 msgid "Failed to run Grid Manager thread" msgstr "" #: src/services/a-rex/arex.cpp:1109 #, c-format msgid "Failed to process configuration in %s" msgstr "" #: src/services/a-rex/arex.cpp:1114 msgid "No control directory set in configuration" msgstr "" #: src/services/a-rex/arex.cpp:1118 msgid "No session directory set in configuration" msgstr "" #: src/services/a-rex/arex.cpp:1122 msgid "No LRMS set in configuration" msgstr "" #: src/services/a-rex/arex.cpp:1127 #, c-format msgid "Failed to create control directory %s" msgstr "" #: src/services/a-rex/cachecheck.cpp:37 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:658 #, c-format msgid "Error with cache configuration: %s" msgstr "" #: src/services/a-rex/cachecheck.cpp:53 #: src/services/candypond/CandyPond.cpp:318 msgid "Error with cache configuration" msgstr "" #: src/services/a-rex/cachecheck.cpp:78 #: src/services/candypond/CandyPond.cpp:146 #: src/services/candypond/CandyPond.cpp:343 #, c-format msgid "Looking up URL %s" msgstr "" #: src/services/a-rex/cachecheck.cpp:80 #: src/services/candypond/CandyPond.cpp:155 #, fuzzy, c-format msgid "Cache file is %s" msgstr "Fájl feltöltve %s" #: src/services/a-rex/change_activity_status.cpp:55 #: src/services/a-rex/change_activity_status.cpp:59 #, c-format msgid "EMIES:PauseActivity: job %s - %s" msgstr "" #: src/services/a-rex/change_activity_status.cpp:104 #: src/services/a-rex/change_activity_status.cpp:108 #, c-format msgid "EMIES:ResumeActivity: job %s - %s" msgstr "" #: src/services/a-rex/change_activity_status.cpp:153 #: src/services/a-rex/change_activity_status.cpp:158 #, c-format msgid "EMIES:CancelActivity: job %s - %s" msgstr "" #: src/services/a-rex/change_activity_status.cpp:166 #, c-format msgid "job %s cancelled successfully" msgstr "" #: src/services/a-rex/change_activity_status.cpp:212 #: src/services/a-rex/change_activity_status.cpp:227 #, c-format msgid "EMIES:WipeActivity: job %s - %s" msgstr "" #: src/services/a-rex/change_activity_status.cpp:231 #, c-format msgid "job %s (will be) cleaned successfully" msgstr "" #: src/services/a-rex/change_activity_status.cpp:277 #: src/services/a-rex/change_activity_status.cpp:282 #, c-format msgid "EMIES:RestartActivity: job %s - %s" msgstr "" #: src/services/a-rex/change_activity_status.cpp:286 #, c-format msgid "job %s restarted successfully" msgstr "" #: src/services/a-rex/change_activity_status.cpp:301 #: src/services/a-rex/put.cpp:163 src/services/a-rex/put.cpp:204 #, c-format msgid "%s: there is no such job: %s" msgstr "" #: src/services/a-rex/change_activity_status.cpp:309 #, c-format msgid "%s: put log %s: there is no payload" msgstr "" #: src/services/a-rex/change_activity_status.cpp:315 #, c-format msgid "%s: put log %s: unrecognized payload" msgstr "" #: src/services/a-rex/change_activity_status.cpp:354 #, fuzzy msgid "A-REX REST: Failed to resume job" msgstr "Feladat újraküldésének megkisérlése ide: %s" #: src/services/a-rex/change_activity_status.cpp:358 #, c-format msgid "A-REX REST: State change not allowed: from %s to %s" msgstr "" #: src/services/a-rex/create_activity.cpp:52 #, c-format msgid "" "EMIES:CreateActivity: request = \n" "%s" msgstr "" #: src/services/a-rex/create_activity.cpp:58 msgid "EMIES:CreateActivity: too many activity descriptions" msgstr "" #: src/services/a-rex/create_activity.cpp:68 msgid "EMIES:CreateActivity: no job description found" msgstr "" #: src/services/a-rex/create_activity.cpp:75 msgid "EMIES:CreateActivity: max jobs total limit reached" msgstr "" #: src/services/a-rex/create_activity.cpp:101 #, c-format msgid "ES:CreateActivity: Failed to create new job: %s" msgstr "" #: src/services/a-rex/create_activity.cpp:117 msgid "EMIES:CreateActivity finished successfully" msgstr "" #: src/services/a-rex/create_activity.cpp:118 #, fuzzy, c-format msgid "New job accepted with id %s" msgstr "Feladat elküldve ezzel az azonítóval: %s" #: src/services/a-rex/create_activity.cpp:122 #, c-format msgid "" "EMIES:CreateActivity: response = \n" "%s" msgstr "" #: src/services/a-rex/create_activity.cpp:137 msgid "NEW: put new job: there is no payload" msgstr "" #: src/services/a-rex/create_activity.cpp:141 msgid "NEW: put new job: max jobs total limit reached" msgstr "" #: src/services/a-rex/delegation/DelegationStore.cpp:51 msgid "Wiping and re-creating whole storage" msgstr "" #: src/services/a-rex/delegation/DelegationStore.cpp:214 #: src/services/a-rex/delegation/DelegationStore.cpp:316 #, c-format msgid "DelegationStore: TouchConsumer failed to create file %s" msgstr "" #: src/services/a-rex/delegation/DelegationStore.cpp:276 msgid "DelegationStore: PeriodicCheckConsumers failed to resume iterator" msgstr "" #: src/services/a-rex/delegation/DelegationStore.cpp:296 #, c-format msgid "" "DelegationStore: PeriodicCheckConsumers failed to remove old delegation %s - " "%s" msgstr "" #: src/services/a-rex/get.cpp:174 src/services/a-rex/get.cpp:229 #: src/services/a-rex/get.cpp:313 #, c-format msgid "Get: there is no job %s - %s" msgstr "" #: src/services/a-rex/get.cpp:380 #, c-format msgid "Head: there is no job %s - %s" msgstr "" #: src/services/a-rex/get.cpp:436 #, fuzzy msgid "Failed to extract credential information" msgstr "Nem sikerült betölteni a konfigurációt" #: src/services/a-rex/get.cpp:439 #, c-format msgid "Checking cache permissions: DN: %s" msgstr "" #: src/services/a-rex/get.cpp:440 #, c-format msgid "Checking cache permissions: VO: %s" msgstr "" #: src/services/a-rex/get.cpp:442 #, c-format msgid "Checking cache permissions: VOMS attr: %s" msgstr "" #: src/services/a-rex/get.cpp:452 #, c-format msgid "Cache access allowed to %s by DN %s" msgstr "" #: src/services/a-rex/get.cpp:455 #, c-format msgid "DN %s doesn't match %s" msgstr "" #: src/services/a-rex/get.cpp:458 #, c-format msgid "Cache access allowed to %s by VO %s" msgstr "" #: src/services/a-rex/get.cpp:461 #, c-format msgid "VO %s doesn't match %s" msgstr "" #: src/services/a-rex/get.cpp:467 src/services/a-rex/get.cpp:486 #, c-format msgid "Bad credential value %s in cache access rules" msgstr "" #: src/services/a-rex/get.cpp:475 src/services/a-rex/get.cpp:494 #, c-format msgid "VOMS attr %s matches %s" msgstr "" #: src/services/a-rex/get.cpp:476 #, c-format msgid "Cache access allowed to %s by VO %s and role %s" msgstr "" #: src/services/a-rex/get.cpp:479 src/services/a-rex/get.cpp:498 #, c-format msgid "VOMS attr %s doesn't match %s" msgstr "" #: src/services/a-rex/get.cpp:495 #, c-format msgid "Cache access allowed to %s by VO %s and group %s" msgstr "" #: src/services/a-rex/get.cpp:501 #, c-format msgid "Unknown credential type %s for URL pattern %s" msgstr "" #: src/services/a-rex/get.cpp:507 #, c-format msgid "No match found in cache access rules for %s" msgstr "" #: src/services/a-rex/get.cpp:517 #, c-format msgid "Get from cache: Looking in cache for %s" msgstr "" #: src/services/a-rex/get.cpp:520 #, fuzzy, c-format msgid "Get from cache: Invalid URL %s" msgstr "Érvénytelen URL: %s" #: src/services/a-rex/get.cpp:537 msgid "Get from cache: Error in cache configuration" msgstr "" #: src/services/a-rex/get.cpp:546 msgid "Get from cache: File not in cache" msgstr "" #: src/services/a-rex/get.cpp:549 #, c-format msgid "Get from cache: could not access cached file: %s" msgstr "" #: src/services/a-rex/get.cpp:559 msgid "Get from cache: Cached file is locked" msgstr "" #: src/services/a-rex/get_activity_statuses.cpp:214 #: src/services/a-rex/get_activity_statuses.cpp:320 #, c-format msgid "EMIES:GetActivityStatus: job %s - %s" msgstr "" #: src/services/a-rex/get_activity_statuses.cpp:455 #, c-format msgid "EMIES:GetActivityInfo: job %s - failed to retrieve GLUE2 information" msgstr "" #: src/services/a-rex/get_activity_statuses.cpp:507 #: src/services/a-rex/get_activity_statuses.cpp:514 #, c-format msgid "EMIES:NotifyService: job %s - %s" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:98 #, c-format msgid "" "Cannot create directories for log file %s. Messages will be logged to this " "log" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:104 #, c-format msgid "" "Cannot open cache log file %s: %s. Cache cleaning messages will be logged to " "this log" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:114 msgid "Failed to start cache clean script" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:115 msgid "Cache cleaning script failed" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:183 #, c-format msgid "External request for attention %s" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:201 #, fuzzy, c-format msgid "Failed to open heartbeat file %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/services/a-rex/grid-manager/GridManager.cpp:223 msgid "Starting jobs processing thread" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:224 #, c-format msgid "Used configuration file %s" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:232 #, c-format msgid "" "Error initiating delegation database in %s. Maybe permissions are not " "suitable. Returned error is: %s." msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:244 msgid "Failed to start new thread: cache won't be cleaned" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:251 msgid "Failed to activate Jobs Processing object, exiting Grid Manager thread" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:260 #, c-format msgid "" "Error adding communication interface in %s. Maybe another instance of A-REX " "is already running." msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:263 #, c-format msgid "" "Error adding communication interface in %s. Maybe permissions are not " "suitable." msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:270 #, fuzzy msgid "Failed to start new thread for monitoring job requests" msgstr "Nem sikerült elküldeni a kérést" #: src/services/a-rex/grid-manager/GridManager.cpp:276 msgid "Picking up left jobs" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:279 msgid "Starting data staging threads" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:283 msgid "Starting jobs' monitoring" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:291 #, c-format msgid "" "SSHFS mount point of session directory (%s) is broken - waiting for " "reconnect ..." msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:295 #, c-format msgid "" "SSHFS mount point of runtime directory (%s) is broken - waiting for " "reconnect ..." msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:300 #, c-format msgid "" "SSHFS mount point of cache directory (%s) is broken - waiting for " "reconnect ..." msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:349 #, c-format msgid "Orphan delegation lock detected (%s) - cleaning" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:354 msgid "Failed to obtain delegation locks for cleaning orphaned locks" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:368 msgid "Waking up" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:371 msgid "Stopping jobs processing thread" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:373 msgid "Exiting jobs processing thread" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:391 msgid "Requesting to stop job processing" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:399 msgid "Waiting for main job processing thread to exit" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:401 msgid "Stopped job processing" msgstr "" #: src/services/a-rex/grid-manager/accounting/AAR.cpp:73 msgid "Cannot find information abouto job submission endpoint" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:53 #, fuzzy, c-format msgid "Failed to read database schema file at %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:63 msgid "Accounting database initialized succesfully" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:65 msgid "Accounting database connection has been established" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:75 #, c-format msgid "%s. SQLite database error: %s" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:77 #, c-format msgid "SQLite database error: %s" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:105 #, c-format msgid "Directory %s to store accounting database has been created." msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:107 #, c-format msgid "" "Accounting database cannot be created. Faile to create parent directory %s." msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:111 #, c-format msgid "Accounting database cannot be created: %s is not a directory" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:118 #, fuzzy msgid "Failed to initialize accounting database" msgstr "Nem sikerült betölteni a konfigurációt" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:125 #, c-format msgid "Accounting database file (%s) is not a regular file" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:131 msgid "Error opening accounting database" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:149 msgid "Closing connection to SQLite accounting database" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:238 #, c-format msgid "Failed to fetch data from %s accounting database table" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:255 #, c-format msgid "Failed to add '%s' into the accounting database %s table" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:314 msgid "Failed to fetch data from accounting database Endpoints table" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:331 #, c-format msgid "" "Failed to add '%s' URL (interface type %s) into the accounting database " "Endpoints table" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:357 #, fuzzy, c-format msgid "Failed to query AAR database ID for job %s" msgstr "Nem sikerült elküldeni a kérést" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:412 #, c-format msgid "Failed to insert AAR into the database for job %s" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:413 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:460 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:491 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:507 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:523 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:544 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:560 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:575 #, c-format msgid "SQL statement used: %s" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:418 #, fuzzy, c-format msgid "Failed to write authtoken attributes for job %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:422 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:477 #, fuzzy, c-format msgid "Failed to write event records for job %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:433 #, c-format msgid "" "Cannot to update AAR. Cannot find registered AAR for job %s in accounting " "database." msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:459 #, fuzzy, c-format msgid "Failed to update AAR in the database for job %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:465 #, fuzzy, c-format msgid "Failed to write RTEs information for the job %s" msgstr "Nem sikerült elérnem a voms szervert %s, ezen fájl alapján: %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:469 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:473 #, fuzzy, c-format msgid "Failed to write data transfers information for the job %s" msgstr "Nem sikerült elérnem a voms szervert %s, ezen fájl alapján: %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:569 #, fuzzy, c-format msgid "Unable to add event: cannot find AAR for job %s in accounting database." msgstr "Nem sikerült elérnem a voms szervert %s, ezen fájl alapján: %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:73 #, fuzzy, c-format msgid "Unknown option %s" msgstr "Célállomás: %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:80 msgid "Job ID argument is required." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:86 msgid "Path to user's proxy file should be specified." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:92 msgid "User name should be specified." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:98 msgid "Path to .local job status file is required." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:106 msgid "Generating ceID prefix from hostname automatically" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:109 msgid "" "Cannot determine hostname from gethostname() to generate ceID automatically." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:118 #, c-format msgid "ceID prefix is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:126 #, c-format msgid "Getting currect timestamp for BLAH parser log: %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:135 msgid "Parsing .local file to obtain job-specific identifiers and info" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:145 #, c-format msgid "globalid is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:148 #, c-format msgid "headnode is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:151 #, c-format msgid "interface is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:155 msgid "There is no local LRMS ID. Message will not be written to BLAH log." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:158 #, c-format msgid "localid is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:161 #, c-format msgid "queue name is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:164 #, c-format msgid "owner subject is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:166 msgid "" "Job did not finished successfully. Message will not be written to BLAH log." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:174 #, c-format msgid "Job timestamp successfully parsed as %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:178 msgid "Can not read information from the local job status file" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:194 #, c-format msgid "" "Unsupported submission interface %s. Seems arc-blahp-logger need to be " "updated accordingly. Please submit the bug to bugzilla." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:204 msgid "Parsing VOMS AC to get FQANs information" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:217 #, c-format msgid "Found VOMS AC attribute: %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:230 #, fuzzy msgid "VOMS AC attribute is a tag" msgstr "VOMS attribútumok értelmezése sikertelen" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:237 msgid "Skipping policyAuthority VOMS AC attribute" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:241 #, fuzzy msgid "VOMS AC attribute is the FQAN" msgstr "VOMS attribútumok értelmezése sikertelen" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:249 msgid "No FQAN found. Using None as userFQAN value" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:263 #, c-format msgid "Assembling BLAH parser log entry: %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:268 #, c-format msgid "Writing the info to the BLAH parser log: %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:276 #, fuzzy, c-format msgid "Cannot open BLAH log file '%s'" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:34 #, c-format msgid "Missing cancel-%s-job - job cancellation may not work" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:38 #, c-format msgid "Missing submit-%s-job - job submission to LRMS may not work" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:42 #, c-format msgid "Missing scan-%s-job - may miss when job finished executing" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:56 #, c-format msgid "Wrong option in %s" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:67 #, fuzzy, c-format msgid "Can't read configuration file at %s" msgstr "Nem tudom olvasni a célállomásokat a fájlból: %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:77 #, fuzzy, c-format msgid "Can't recognize type of configuration file at %s" msgstr "Nem tudom olvasni a célállomásokat a fájlból: %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:80 msgid "Could not determine configuration type or configuration is empty" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:161 #, fuzzy msgid "lrms is empty" msgstr "Túl sok paraméter" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:194 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:203 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:212 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:221 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:230 msgid "Missing number in maxjobs" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:197 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:206 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:215 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:224 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:233 #, c-format msgid "Wrong number in maxjobs: %s" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:243 #, c-format msgid "Wrong number in wakeupperiod: %s" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:249 #, fuzzy msgid "mail parameter is empty" msgstr "Túl sok paraméter" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:255 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:259 msgid "Wrong number in defaultttl command" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:265 msgid "Wrong number in maxrerun command" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:272 msgid "State name for plugin is missing" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:276 msgid "Options for plugin are missing" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:279 #, c-format msgid "Failed to register plugin for state %s" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:285 msgid "Session root directory is missing" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:288 msgid "Junk in sessiondir command" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:300 msgid "Missing directory in controldir command" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:305 msgid "" "'control' configuration option is no longer supported, please use " "'controldir' instead" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:310 msgid "User for helper program is missing" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:313 msgid "Only user '.' for helper program is supported" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:316 msgid "Helper program is missing" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:337 msgid "Wrong option in fixdirectories" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:364 msgid "Wrong option in delegationdb" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:370 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:556 #, fuzzy msgid "forcedefaultvoms parameter is empty" msgstr "Túl sok paraméter" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:445 msgid "Wrong number in maxjobdesc command" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:495 msgid "Missing file name in [arex/jura] logfile" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:506 #, c-format msgid "Wrong number in urdelivery_frequency: %s" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:549 msgid "No queue name given in queue block name" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:565 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:600 #, fuzzy msgid "advertisedvo parameter is empty" msgstr "Túl sok paraméter" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:117 #, c-format msgid "\tSession root dir : %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:118 #, c-format msgid "\tControl dir : %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:119 #, c-format msgid "\tdefault LRMS : %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:120 #, c-format msgid "\tdefault queue : %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:121 #, c-format msgid "\tdefault ttl : %u" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:126 msgid "No valid caches found in configuration, caching is disabled" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:131 #, c-format msgid "\tCache : %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:133 #, c-format msgid "\tCache link dir : %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:136 #, c-format msgid "\tCache (read-only): %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:138 msgid "\tCache cleaning enabled" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:139 msgid "\tCache cleaning disabled" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:327 msgid "" "Globus location variable substitution is not supported anymore. Please " "specify path directly." msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:35 msgid "Can't read configuration file" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:41 #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:29 msgid "Can't recognize type of configuration file" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:47 msgid "Configuration error" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:77 msgid "Bad number in maxdelivery" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:83 msgid "Bad number in maxemergency" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:89 msgid "Bad number in maxprocessor" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:95 msgid "Bad number in maxprepared" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:101 msgid "Bad number in maxtransfertries" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:112 msgid "Bad number in speedcontrol" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:123 #, c-format msgid "Bad number in definedshare %s" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:132 #, c-format msgid "Bad URL in deliveryservice: %s" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:143 msgid "Bad number in remotesizelimit" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:168 msgid "Bad value for loglevel" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:182 msgid "Bad URL in acix_endpoint" msgstr "" #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:24 msgid "Can't open configuration file" msgstr "" #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:45 msgid "Not enough parameters in copyurl" msgstr "" #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:54 msgid "Not enough parameters in linkurl" msgstr "" #: src/services/a-rex/grid-manager/files/ControlFileContent.cpp:179 #, c-format msgid "Wrong directory in %s" msgstr "" #: src/services/a-rex/grid-manager/files/ControlFileHandling.cpp:102 #, c-format msgid "Failed setting file owner: %s" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:33 msgid "gm-delegations-converter changes format of delegation database." msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:38 #: src/services/a-rex/grid-manager/gm_jobs.cpp:110 #: src/services/a-rex/grid-manager/gm_kick.cpp:24 msgid "use specified configuration file" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:39 #: src/services/a-rex/grid-manager/gm_jobs.cpp:111 #: src/services/a-rex/grid-manager/gm_kick.cpp:25 msgid "file" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:43 #: src/services/a-rex/grid-manager/gm_jobs.cpp:115 msgid "read information from specified control directory" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:44 #: src/services/a-rex/grid-manager/gm_jobs.cpp:116 msgid "dir" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:48 msgid "convert from specified input database format [bdb|sqlite]" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:49 #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:54 msgid "database format" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:53 msgid "convert into specified output database format [bdb|sqlite]" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:36 #, fuzzy, c-format msgid "Could not read data staging configuration from %s" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/services/a-rex/grid-manager/gm_jobs.cpp:44 #, c-format msgid "Can't read transfer states from %s. Perhaps A-REX is not running?" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:100 msgid "gm-jobs displays information on current jobs in the system." msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:105 msgid "display more information on each job" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:120 msgid "print summary of jobs in each transfer share" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:125 msgid "do not print list of jobs" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:130 msgid "do not print number of jobs in each state" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:135 msgid "print state of the service" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:140 msgid "show only jobs of user(s) with specified subject name(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:141 #: src/services/a-rex/grid-manager/gm_jobs.cpp:151 #: src/services/a-rex/grid-manager/gm_jobs.cpp:161 #, fuzzy msgid "dn" msgstr "n" #: src/services/a-rex/grid-manager/gm_jobs.cpp:145 msgid "request to cancel job(s) with specified ID(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:146 #: src/services/a-rex/grid-manager/gm_jobs.cpp:156 #: src/services/a-rex/grid-manager/gm_jobs.cpp:166 #: src/services/a-rex/grid-manager/gm_jobs.cpp:176 #: src/services/a-rex/grid-manager/gm_kick.cpp:30 msgid "id" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:150 msgid "" "request to cancel jobs belonging to user(s) with specified subject name(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:155 msgid "request to clean job(s) with specified ID(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:160 msgid "" "request to clean jobs belonging to user(s) with specified subject name(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:165 msgid "show only jobs with specified ID(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:170 msgid "print list of available delegation IDs" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:175 msgid "print delegation token of specified ID(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:180 msgid "print main delegation token of specified Job ID(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:181 msgid "job id" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:185 msgid "" "output requested elements (jobs list, delegation ids and tokens) to file" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:186 #, fuzzy msgid "file name" msgstr "fájlnév" #: src/services/a-rex/grid-manager/gm_jobs.cpp:209 #, fuzzy, c-format msgid "Using configuration at %s" msgstr "Nem tudom olvasni a célállomásokat a fájlból: %s" #: src/services/a-rex/grid-manager/gm_jobs.cpp:232 #, fuzzy, c-format msgid "Failed to open output file '%s'" msgstr "Nem sikerült listázni a fájlokat" #: src/services/a-rex/grid-manager/gm_jobs.cpp:241 msgid "Looking for current jobs" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:278 #, c-format msgid "Job: %s : ERROR : Unrecognizable state" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:287 #, c-format msgid "Job: %s : ERROR : No local information." msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:461 #, c-format msgid "Job: %s : ERROR : Failed to put cancel mark" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:465 #, c-format msgid "Job: %s : Cancel request put but failed to communicate to service" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:467 #, c-format msgid "Job: %s : Cancel request put and communicated to service" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:478 #, c-format msgid "Job: %s : ERROR : Failed to put clean mark" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:482 #, c-format msgid "Job: %s : Clean request put but failed to communicate to service" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:484 #, c-format msgid "Job: %s : Clean request put and communicated to service" msgstr "" #: src/services/a-rex/grid-manager/gm_kick.cpp:18 msgid "" "gm-kick wakes up the A-REX corresponding to the given control file. If no " "file is given it uses the control directory found in the configuration file." msgstr "" #: src/services/a-rex/grid-manager/gm_kick.cpp:29 msgid "inform about changes in particular job (can be used multiple times)" msgstr "" #: src/services/a-rex/grid-manager/inputcheck.cpp:39 #, c-format msgid "Failed to acquire source: %s" msgstr "" #: src/services/a-rex/grid-manager/inputcheck.cpp:44 #, c-format msgid "Failed to resolve %s" msgstr "" #: src/services/a-rex/grid-manager/inputcheck.cpp:61 #, c-format msgid "Failed to check %s" msgstr "" #: src/services/a-rex/grid-manager/inputcheck.cpp:75 msgid "job_description_file [proxy_file]" msgstr "" #: src/services/a-rex/grid-manager/inputcheck.cpp:76 msgid "" "inputcheck checks that input files specified in the job description are " "available and accessible using the credentials in the given proxy file." msgstr "" #: src/services/a-rex/grid-manager/inputcheck.cpp:88 msgid "Wrong number of arguments given" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:143 #, fuzzy, c-format msgid "Unsupported value for allownew: %s" msgstr "Nem támogatott url: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:154 msgid "Wrong number in maxjobdesc" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:162 #: src/services/gridftpd/fileplugin/fileplugin.cpp:186 #, c-format msgid "Unsupported configuration command: %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:171 #, c-format msgid "Mapped user:group (%s:%s) not found" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:174 #, fuzzy msgid "Job submission user can't be root" msgstr "Job küldési összefoglaló" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:177 #, fuzzy msgid "Failed processing A-REX configuration" msgstr "voms szerver fájljának az elérési útvonala" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:201 msgid "This user is denied to submit new jobs." msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:206 msgid "No control or session directories defined in configuration" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:210 #, c-format msgid "Job submission user: %s (%i:%i)" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:213 msgid "Job plugin was not initialised" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:231 msgid "No delegated credentials were passed" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:305 #, c-format msgid "Cancelling job %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:360 #, c-format msgid "Cleaning job %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:400 msgid "Request to open file with storing in progress" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:434 #: src/services/gridftpd/fileplugin/fileplugin.cpp:344 #, c-format msgid "Retrieving file %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:484 #, c-format msgid "Accepting submission of new job or modification request: %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:506 #: src/services/gridftpd/fileplugin/fileplugin.cpp:384 #: src/services/gridftpd/fileplugin/fileplugin.cpp:421 #, c-format msgid "Storing file %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:527 #, c-format msgid "Unknown open mode %i" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:653 #, c-format msgid "action(%s) != request" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:704 #, fuzzy msgid "Failed writing job description" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:920 msgid "Failed writing local description" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:930 msgid "Failed writing ACL" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:946 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:953 #: src/services/a-rex/job.cpp:819 #, c-format msgid "Failed to run external plugin: %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:957 #: src/services/a-rex/job.cpp:823 #, c-format msgid "Plugin response: %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:959 msgid "Failed to run external plugin" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:969 #, c-format msgid "Failed to create session directory %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:979 msgid "Failed writing status" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:993 #, fuzzy, c-format msgid "Failed to lock delegated credentials: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1235 #, c-format msgid "Renewing proxy for job %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1251 #, c-format msgid "New proxy expires at %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1254 msgid "Failed to write 'local' information" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1257 msgid "Failed to renew proxy" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1260 msgid "New proxy expiry time is not later than old proxy, not renewing proxy" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1302 #, c-format msgid "Checking file %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1349 msgid "ID contains forbidden characters" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1383 #: src/services/a-rex/job.cpp:1023 #, c-format msgid "Failed to create file in %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1393 msgid "Out of tries while allocating new job ID" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1473 #, c-format msgid "Failed to read job's local description for job %s from %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1562 #, fuzzy msgid "No non-draining session directories available" msgstr "Nincs megadva feladat leírás bemeneti adatként" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1568 #, c-format msgid "Using control directory %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1569 #, c-format msgid "Using session directory %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:26 #, c-format msgid "Failed to read job's ACL for job %s from %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:70 #, c-format msgid "Failed to parse user policy for job %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:75 #, c-format msgid "Failed to load policy evaluator for policy of job %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:129 #, c-format msgid "Unknown ACL policy %s for job %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:73 #, c-format msgid "" "DTR Generator waiting to process: %d jobs to cancel, %d DTRs, %d new jobs" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:87 #, c-format msgid "%s: Job cancel request from DTR generator to scheduler" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:92 #, c-format msgid "%s: Returning canceled job from DTR generator" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:133 #, c-format msgid "%s: Re-requesting attention from DTR generator" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:143 #, c-format msgid "DTR Generator processed: %d jobs to cancel, %d DTRs, %d new jobs" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:162 msgid "Exiting Generator thread" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:234 msgid "Shutting down data staging threads" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:244 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:257 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:285 msgid "DTRGenerator is not running!" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:247 #, c-format msgid "Received DTR %s during Generator shutdown - may not be processed" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:261 msgid "DTRGenerator was sent null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:270 #, c-format msgid "%s: Received job in DTR generator" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:273 #, c-format msgid "%s: Failed to receive job in DTR generator" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:280 msgid "DTRGenerator got request to cancel null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:295 msgid "DTRGenerator is queried about null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:325 msgid "DTRGenerator is asked about null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:353 msgid "DTRGenerator is requested to remove null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:360 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:368 #, c-format msgid "%s: Trying to remove job from data staging which is still active" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:376 #, c-format msgid "%s: Trying remove job from data staging which does not exist" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:387 #, fuzzy, c-format msgid "%s: Invalid DTR" msgstr "Érvénytelen URL: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:404 #, c-format msgid "%s: Received DTR %s to copy file %s in state %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:408 #, c-format msgid "%s: Received DTR belongs to inactive job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:425 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1067 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:459 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:517 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:631 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:841 #, c-format msgid "%s: Failed reading local information" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:434 #, c-format msgid "%s: DTR %s to copy file %s failed" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:440 #, c-format msgid "%s: Cancelling other DTRs" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:450 #, c-format msgid "%s: DTR %s to copy to %s failed but is not mandatory" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:460 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:722 #, c-format msgid "%s: Failed to read list of output files" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:474 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:615 #, c-format msgid "%s: Failed to read dynamic output files in %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:476 #, c-format msgid "%s: Going through files in list %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:480 #, c-format msgid "%s: Removing %s from dynamic output file %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:484 #, c-format msgid "%s: Failed to write back dynamic output files in %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:500 #, c-format msgid "%s: Failed to write list of output files" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:504 #, c-format msgid "%s: Failed to write list of output status files" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:516 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:734 #, c-format msgid "%s: Failed to read list of input files" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:535 #, fuzzy, c-format msgid "%s: Failed to write list of input files" msgstr "Nem sikerült listázni a fájlokat" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:547 #, c-format msgid "%s: Received DTR with two remote endpoints!" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:559 #: src/services/candypond/CandyPondGenerator.cpp:105 #, fuzzy, c-format msgid "No active job id %s" msgstr "Feladat migrálásra került ezzel az azonosítóval: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:603 #, c-format msgid "%s: Failed to read list of output files, can't clean up session dir" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:629 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:648 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:772 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:895 #, c-format msgid "%s: Failed to clean up session dir" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:639 #, c-format msgid "%s: Failed to read list of input files, can't clean up session dir" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:661 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:665 #, fuzzy msgid "uploads" msgstr "Feltöltve %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:661 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:665 msgid "downloads" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:662 msgid "cancelled" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:662 msgid "finished" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:660 #, c-format msgid "%s: All %s %s successfully" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:664 #, c-format msgid "%s: Some %s failed" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:668 #, c-format msgid "%s: Requesting attention from DTR generator" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:679 msgid "DTRGenerator is requested to process null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:685 msgid "download" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:685 #, fuzzy msgid "upload" msgstr "Feltöltve %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:684 #, c-format msgid "%s: Received data staging request to %s files" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:743 #, c-format msgid "%s: Duplicate file in list of input files: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:792 #, c-format msgid "%s: Reading output files from user generated list in %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:794 #, c-format msgid "%s: Error reading user generated output file list in %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:821 #, c-format msgid "%s: Failed to list output directory %s: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:839 #, c-format msgid "%s: Adding new output file %s: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:861 #, c-format msgid "%s: Two identical output destinations: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:874 #, c-format msgid "%s: Cannot upload two different files %s and %s to same LFN: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:906 #, c-format msgid "%s: Received job in a bad state: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:914 #, c-format msgid "%s: Session directory processing takes too long - %u.%06u seconds" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:981 #, c-format msgid "" "%s: Destination file %s was possibly left unfinished from previous A-REX " "run, will overwrite" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1073 #, c-format msgid "%s: Failed writing local information" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1091 #, c-format msgid "%s: Cancelling active DTRs" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1098 msgid "DTRGenerator is asked to check files for null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1118 #, fuzzy, c-format msgid "%s: Can't read list of input files" msgstr "Nem tudom olvasni a célállomásokat a fájlból: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1133 #, c-format msgid "%s: Checking user uploadable file: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1138 #, fuzzy, c-format msgid "%s: User has uploaded file %s" msgstr "Fájl feltöltve %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1145 #, fuzzy, c-format msgid "%s: Failed writing changed input file." msgstr "Nem sikerült listázni a meta adatokat" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1149 #, c-format msgid "%s: Critical error for uploadable file %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1155 #, fuzzy, c-format msgid "%s: User has NOT uploaded file %s" msgstr "Fájl feltöltve %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1167 #, c-format msgid "%s: Uploadable files timed out" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1223 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1249 #, c-format msgid "%s: Can't convert checksum %s to int for %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1230 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1244 #, c-format msgid "%s: Can't convert filesize %s to int for %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1239 #, c-format msgid "%s: Invalid size/checksum information (%s) for %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1261 #, c-format msgid "%s: Invalid file: %s is too big." msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1277 #, c-format msgid "%s: Failed to switch user ID to %d/%d to read file %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1283 #, c-format msgid "%s: Failed to open file %s for reading" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1291 #, c-format msgid "%s: Error accessing file %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1303 #, c-format msgid "%s: Error reading file %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1318 #, c-format msgid "%s: File %s has wrong checksum: %llu. Expected %lli" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1324 #, c-format msgid "%s: Checksum %llu verified for %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1336 msgid "" "Found unfinished DTR transfers. It is possible the previous A-REX process " "did not shut down normally" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1343 #, c-format msgid "Found DTR %s for file %s left in transferring state from previous run" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1352 msgid "DTRGenerator is requested to clean links for null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1368 #, c-format msgid "%s: Cache cleaning takes too long - %u.%06u seconds" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:108 #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:190 #, c-format msgid "%s: Job monitoring counter is broken" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:115 #, c-format msgid "%s: Job monitoring is unintentionally lost" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:124 #, c-format msgid "%s: Job monitoring stop success" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:129 #, c-format msgid "" "%s: Job monitoring stop requested with %u active references and %s queue " "associated" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:131 #, c-format msgid "%s: Job monitoring stop requested with %u active references" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:195 #, c-format msgid "%s: Job monitoring is lost due to removal from queue" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:278 #, c-format msgid "%s: PushSorted failed to find job where expected" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:132 #, c-format msgid "Replacing queue '%s' with '%s'" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:226 #, c-format msgid "Bad name for stdout: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:234 #, c-format msgid "Bad name for stderr: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:297 #, c-format msgid "Bad name for runtime environment: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:342 msgid "Job description file could not be read." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:393 #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:407 #, c-format msgid "Bad name for executable: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:89 #, fuzzy msgid "Failed to start data staging threads" msgstr "Nem sikerült elküldeni a kérést" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:190 #, c-format msgid "" "%s: Failed reading .local and changing state, job and A-REX may be left in " "an inconsistent state" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:195 #, c-format msgid "%s: unexpected failed job add request: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:206 #, c-format msgid "%s: unexpected job add request: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:259 #, c-format msgid "%s: job for attention" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:269 msgid "all for attention" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:286 #, c-format msgid "%s: job found while scanning" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:314 #, c-format msgid "%s: job will wait for external process" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:331 #, c-format msgid "%s: job assigned for slow polling" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:349 #, c-format msgid "%s: job being processed" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:384 #, c-format msgid "Current jobs in system (PREPARING to FINISHING) per-DN (%i entries)" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:386 #, c-format msgid "%s: %i" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:398 #, fuzzy, c-format msgid "%s: Failed storing failure reason: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:404 #, fuzzy, c-format msgid "%s: Failed reading job description: %s" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:416 #, fuzzy, c-format msgid "%s: Failed parsing job request." msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:451 #, c-format msgid "%s: Failed writing list of output files: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:477 #, c-format msgid "%s: Failed obtaining lrms id" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:491 #, c-format msgid "%s: Failed writing local information: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:523 #, c-format msgid "%s: Failed creating grami file" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:527 #, c-format msgid "%s: Failed setting executable permissions" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:535 #, c-format msgid "%s: state SUBMIT: starting child: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:542 #, c-format msgid "%s: Failed running submission process" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:547 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:654 #, c-format msgid "%s: LRMS scripts limit of %u is reached - suspending submit/cancel" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:563 #, c-format msgid "" "%s: Job submission to LRMS takes too long, but ID is already obtained. " "Pretending submission is done." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:570 #, c-format msgid "%s: Job submission to LRMS takes too long. Failing." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:579 #, c-format msgid "%s: state SUBMIT: child exited with code %i" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:584 #, c-format msgid "%s: Job submission to LRMS failed" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:605 #, c-format msgid "%s: state CANCELING: timeout waiting for cancellation" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:611 #, c-format msgid "%s: state CANCELING: job diagnostics collected" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:639 #, c-format msgid "%s: state CANCELING: starting child: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:641 #, c-format msgid "%s: Job has completed already. No action taken to cancel" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:649 #, c-format msgid "%s: Failed running cancellation process" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:668 #, c-format msgid "" "%s: Job cancellation takes too long, but diagnostic collection seems to be " "done. Pretending cancellation succeeded." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:674 #, c-format msgid "%s: Job cancellation takes too long. Failing." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:684 #, c-format msgid "%s: state CANCELING: child exited with code %i" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:690 #, c-format msgid "%s: Failed to cancel running job" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:709 #, c-format msgid "%s: State: %s: data staging finished" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:744 #, c-format msgid "%s: State: %s: still in data staging" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:757 #, c-format msgid "%s: Job is not allowed to be rerun anymore" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:767 #, c-format msgid "%s: Job failed in unknown state. Won't rerun." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:788 #, fuzzy, c-format msgid "%s: Reprocessing job description failed" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:795 #, c-format msgid "%s: Failed to read reprocessed list of output files" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:799 #, c-format msgid "%s: Failed to read reprocessed list of input files" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:883 #, c-format msgid "%s: Reading status of new job failed" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:896 #, c-format msgid "%s: State: ACCEPTED: parsing job description" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:898 #, c-format msgid "%s: Processing job description failed" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:936 #, fuzzy, c-format msgid "%s: new job is accepted" msgstr "Feladat elküldve ezzel az azonítóval: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:948 #, c-format msgid "%s: %s: New job belongs to %i/%i" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:953 #, c-format msgid "%s: old job is accepted" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:964 #, c-format msgid "%s: State: ACCEPTED" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:970 #, c-format msgid "%s: State: ACCEPTED: dryrun" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:993 #, c-format msgid "%s: State: ACCEPTED: has process time %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:999 #, c-format msgid "%s: State: ACCEPTED: moving to PREPARING" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1015 #, c-format msgid "%s: State: PREPARING" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1022 #, c-format msgid "%s: Failed obtaining local job information." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1075 #, c-format msgid "%s: State: SUBMIT" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1095 #, c-format msgid "%s: State: CANCELING" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1115 #, c-format msgid "%s: State: INLRMS" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1120 #, c-format msgid "%s: State: INLRMS - checking for pending(%u) and mark" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1122 #, c-format msgid "%s: State: INLRMS - checking for not pending" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1124 #, c-format msgid "%s: Job finished" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1128 #, c-format msgid "%s: State: INLRMS: exit message is %i %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1141 #, c-format msgid "%s: State: INLRMS - no mark found" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1153 #, c-format msgid "%s: State: FINISHING" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1174 #, c-format msgid "%s: Job is requested to clean - deleting" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1191 #, c-format msgid "%s: restarted PREPARING job" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1207 #, c-format msgid "%s: restarted INLRMS job" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1216 #, c-format msgid "%s: restarted FINISHING job" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1221 #, c-format msgid "%s: Can't rerun on request" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1223 #, c-format msgid "%s: Can't rerun on request - not a suitable state" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1234 #, c-format msgid "%s: Job is too old - deleting" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1279 #, c-format msgid "%s: Job is ancient - delete rest of information" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1297 #, c-format msgid "%s: Canceling job because of user request" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1311 #, c-format msgid "%s: Failed to turn job into failed during cancel processing." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1343 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1351 #, c-format msgid "%s: Plugin at state %s : %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1357 #, c-format msgid "%s: Plugin execution failed" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1464 #, c-format msgid "%s: State: %s from %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1515 #, c-format msgid "Failed to get DN information from .local file for job %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1542 #, c-format msgid "%s: Delete request due to internal problems" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1577 #, c-format msgid "%s: Job failure detected" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1630 #, fuzzy, c-format msgid "Failed to move file %s to %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1638 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1748 #, c-format msgid "Failed reading control directory: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1708 #, c-format msgid "Failed reading control directory: %s: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:2022 #, c-format msgid "Helper process start failed: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:2029 #, c-format msgid "Stopping helper process %s" msgstr "" #: src/services/a-rex/grid-manager/log/HeartBeatMetrics.cpp:61 #, fuzzy, c-format msgid "Error with hearbeatfile: %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/services/a-rex/grid-manager/log/HeartBeatMetrics.cpp:73 #: src/services/a-rex/grid-manager/log/JobsMetrics.cpp:139 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:136 #, c-format msgid ": Metrics tool returned error code %i: %s" msgstr "" #: src/services/a-rex/grid-manager/log/HeartBeatMetrics.cpp:107 #: src/services/a-rex/grid-manager/log/JobsMetrics.cpp:186 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:178 msgid "" "gmetric_bin_path empty in arc.conf (should never happen the default value " "should be used)" msgstr "" #: src/services/a-rex/grid-manager/log/JobLog.cpp:114 #, fuzzy msgid ": Accounting records reporter tool is not specified" msgstr "Nincs megadva feladat leírás bemeneti adatként" #: src/services/a-rex/grid-manager/log/JobLog.cpp:130 msgid ": Failure creating slot for accounting reporter child process" msgstr "" #: src/services/a-rex/grid-manager/log/JobLog.cpp:143 msgid ": Failure starting accounting reporter child process" msgstr "" #: src/services/a-rex/grid-manager/log/JobLog.cpp:176 msgid ": Failure creating accounting database connection" msgstr "" #: src/services/a-rex/grid-manager/log/JobLog.cpp:202 #, c-format msgid ": writing accounting record took %llu ms" msgstr "" #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:74 #, c-format msgid "Session dir '%s' contains user specific substitutions - skipping it" msgstr "" #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:86 #, c-format msgid "Sessiondir %s: Free space %f GB" msgstr "" #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:94 #, fuzzy msgid "No session directories found in configuration." msgstr "voms szerver fájljának az elérési útvonala" #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:125 msgid "No cachedirs found/configured for calculation of free space." msgstr "" #: src/services/a-rex/grid-manager/mail/send_mail.cpp:29 msgid "Failed reading local information" msgstr "" #: src/services/a-rex/grid-manager/mail/send_mail.cpp:79 #, c-format msgid "Running mailer command (%s)" msgstr "" #: src/services/a-rex/grid-manager/mail/send_mail.cpp:81 msgid "Failed running mailer" msgstr "" #: src/services/a-rex/grid-manager/run/RunParallel.cpp:33 #, c-format msgid "%s: Job's helper exited" msgstr "" #: src/services/a-rex/grid-manager/run/RunParallel.cpp:70 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:24 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:56 #, c-format msgid "%s: Failure creating slot for child process" msgstr "" #: src/services/a-rex/grid-manager/run/RunParallel.cpp:119 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:41 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:73 #, c-format msgid "%s: Failure starting child process" msgstr "" #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:30 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:62 #, c-format msgid "%s: Failure creating data storage for child process" msgstr "" #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:46 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:78 #, c-format msgid "%s: Failure waiting for child process to finish" msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:46 #, fuzzy msgid "[job description input]" msgstr "Nincs megadva feladat leírás bemeneti adatként" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:47 msgid "" "Tool for writing the grami file representation of a job description file." msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:51 msgid "Name of grami file" msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:56 #, fuzzy msgid "Configuration file to load" msgstr "Nincs megadva feladat leírás bemeneti adatként" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:57 msgid "arc.conf" msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:61 msgid "Session directory to use" msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:62 #, fuzzy msgid "directory" msgstr "könyvtár" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:78 #, fuzzy msgid "No job description file name provided." msgstr "Nincs megadva feladat leírás bemeneti adatként" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:84 #, fuzzy, c-format msgid "Unable to parse job description input: %s" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:90 #, fuzzy msgid "Unable to load ARC configuration file." msgstr "voms szerver fájljának az elérési útvonala" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:107 #, fuzzy, c-format msgid "Unable to write grami file: %s" msgstr "voms szerver fájljának az elérési útvonala" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:112 #, fuzzy, c-format msgid "Unable to write 'output' file: %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/services/a-rex/information_collector.cpp:53 #, c-format msgid "Resource information provider: %s" msgstr "" #: src/services/a-rex/information_collector.cpp:56 #, fuzzy msgid "Resource information provider failed to start" msgstr "Státusz lekérdezés sikertelen" #: src/services/a-rex/information_collector.cpp:59 #, fuzzy msgid "Resource information provider failed to run" msgstr "Státusz lekérdezés sikertelen" #: src/services/a-rex/information_collector.cpp:63 #, c-format msgid "" "Resource information provider failed with exit status: %i\n" "%s" msgstr "" #: src/services/a-rex/information_collector.cpp:65 #, c-format msgid "" "Resource information provider log:\n" "%s" msgstr "" #: src/services/a-rex/information_collector.cpp:71 msgid "No new informational document assigned" msgstr "Nem jött létre új információs dokumentum" #: src/services/a-rex/information_collector.cpp:73 #, c-format msgid "Obtained XML: %s" msgstr "" #: src/services/a-rex/information_collector.cpp:87 msgid "Informational document is empty" msgstr "" #: src/services/a-rex/information_collector.cpp:212 msgid "OptimizedInformationContainer failed to create temporary file" msgstr "" #: src/services/a-rex/information_collector.cpp:215 #, c-format msgid "OptimizedInformationContainer created temporary file: %s" msgstr "" #: src/services/a-rex/information_collector.cpp:221 msgid "" "OptimizedInformationContainer failed to store XML document to temporary file" msgstr "" #: src/services/a-rex/information_collector.cpp:230 msgid "OptimizedInformationContainer failed to parse XML" msgstr "" #: src/services/a-rex/information_collector.cpp:242 #, fuzzy msgid "OptimizedInformationContainer failed to rename temprary file" msgstr "" "Proxy készítés sikertelen: Nem sikerült leellenÅ‘rizni a publikus kulcsot" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:36 msgid "Default INTERNAL client contructor" msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:39 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:59 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:81 #, fuzzy msgid "Failed to load grid-manager configfile" msgstr "privát kulcs elérési útvonala" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:44 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:64 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:86 #, fuzzy msgid "Failed to set INTERNAL endpoint" msgstr "Nem sikerült elküldeni a kérést" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:129 #, fuzzy msgid "Failed to identify grid-manager config file" msgstr "voms szerver fájljának az elérési útvonala" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:148 #, fuzzy, c-format msgid "Failed to run configuration parser at %s." msgstr "voms szerver fájljának az elérési útvonala" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:152 #, c-format msgid "Parser failed with error code %i." msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:158 #, c-format msgid "No pid file is found at '%s'. Probably A-REX is not running." msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:173 #, fuzzy, c-format msgid "Failed to load grid-manager config file from %s" msgstr "voms szerver fájljának az elérési útvonala" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:257 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:363 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:396 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:442 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:496 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:548 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:566 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:616 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:646 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:664 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:682 msgid "INTERNALClient is not initialized" msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:447 msgid "Submitting job " msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:508 #, fuzzy, c-format msgid "Failed to copy input file: %s to path: %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:514 #, fuzzy, c-format msgid "Failed to set permissions on: %s" msgstr "Nem sikerült elküldeni a kérést" #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:51 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:92 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:119 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:145 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:184 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:246 #, fuzzy msgid "Failed to load grid-manager config file" msgstr "voms szerver fájljának az elérési útvonala" #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:324 msgid "Retrieving job description of INTERNAL jobs is not supported" msgstr "" #: src/services/a-rex/internaljobplugin/JobListRetrieverPluginINTERNAL.cpp:67 #, c-format msgid "Listing localjobs succeeded, %d localjobs found" msgstr "" #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:130 #, fuzzy msgid "Failed submitting job description" msgstr "Nem tudom megnyitni a feladat leíró fájlt: %s" #: src/services/a-rex/job.cpp:66 #, c-format msgid "Using cached local account '%s'" msgstr "" #: src/services/a-rex/job.cpp:77 msgid "Will not map to 'root' account by default" msgstr "" #: src/services/a-rex/job.cpp:90 msgid "No local account name specified" msgstr "" #: src/services/a-rex/job.cpp:93 #, c-format msgid "Using local account '%s'" msgstr "" #: src/services/a-rex/job.cpp:97 msgid "TLS provides no identity, going for OTokens" msgstr "" #: src/services/a-rex/job.cpp:155 #, fuzzy msgid "Failed to acquire A-REX's configuration" msgstr "voms szerver fájljának az elérési útvonala" #: src/services/a-rex/job.cpp:227 #, c-format msgid "Cannot handle local user %s" msgstr "" #: src/services/a-rex/job.cpp:275 #, c-format msgid "%s: Failed to parse user policy" msgstr "" #: src/services/a-rex/job.cpp:280 #, c-format msgid "%s: Failed to load evaluator for user policy " msgstr "" #: src/services/a-rex/job.cpp:385 #, c-format msgid "%s: Unknown user policy '%s'" msgstr "" #: src/services/a-rex/job.cpp:707 src/services/a-rex/job.cpp:731 #, c-format msgid "Credential expires at %s" msgstr "" #: src/services/a-rex/job.cpp:709 src/services/a-rex/job.cpp:733 #, c-format msgid "Credential handling exception: %s" msgstr "" #: src/services/a-rex/job.cpp:1031 #, c-format msgid "Out of tries while allocating new job ID in %s" msgstr "" #: src/services/a-rex/job.cpp:1270 msgid "No non-draining session dirs available" msgstr "" #: src/services/a-rex/put.cpp:150 #, c-format msgid "%s: put file %s: there is no payload" msgstr "" #: src/services/a-rex/put.cpp:156 #, c-format msgid "%s: put file %s: unrecognized payload" msgstr "" #: src/services/a-rex/put.cpp:172 src/services/a-rex/rest/rest.cpp:1595 #, fuzzy, c-format msgid "%s: put file %s: failed to create file: %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/services/a-rex/put.cpp:188 #, c-format msgid "%s: put file %s: %s" msgstr "" #: src/services/a-rex/put.cpp:210 #, fuzzy, c-format msgid "%s: delete file %s: failed to obtain file path: %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/services/a-rex/put.cpp:221 #, fuzzy, c-format msgid "%s: delete file %s: failed to open file/dir: %s" msgstr "Nem sikerült listázni a meta adatokat" #: src/services/a-rex/rest/rest.cpp:683 #, c-format msgid "REST: process %s at %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:726 src/services/a-rex/rest/rest.cpp:742 #: src/services/a-rex/rest/rest.cpp:797 src/services/a-rex/rest/rest.cpp:876 #: src/services/a-rex/rest/rest.cpp:1104 src/services/a-rex/rest/rest.cpp:1696 #, c-format msgid "process: method %s is not supported for subpath %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:748 #, c-format msgid "process: schema %s is not supported for subpath %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:873 src/services/a-rex/rest/rest.cpp:1101 #, fuzzy, c-format msgid "process: action %s is not supported for subpath %s" msgstr "A fileset regisztcáció nem támogatott még" #: src/services/a-rex/rest/rest.cpp:1113 src/services/a-rex/rest/rest.cpp:1182 #: src/services/a-rex/rest/rest.cpp:1542 src/services/a-rex/rest/rest.cpp:1685 #, c-format msgid "REST:GET job %s - %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:1229 src/services/a-rex/rest/rest.cpp:1237 #, c-format msgid "REST:KILL job %s - %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:1254 src/services/a-rex/rest/rest.cpp:1262 #, c-format msgid "REST:CLEAN job %s - %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:1279 src/services/a-rex/rest/rest.cpp:1287 #: src/services/a-rex/rest/rest.cpp:1304 #, c-format msgid "REST:RESTART job %s - %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:1588 #, c-format msgid "REST:PUT job %s: file %s: there is no payload" msgstr "" #: src/services/a-rex/rest/rest.cpp:1608 #, c-format msgid "HTTP:PUT %s: put file %s: %s" msgstr "" #: src/services/a-rex/test_cache_check.cpp:24 #: src/tests/count/test_client.cpp:20 #: src/tests/echo/echo_test4axis2c/test_client.cpp:20 #: src/tests/echo/test_client.cpp:21 msgid "Creating client side chain" msgstr "" #: src/services/a-rex/update_credentials.cpp:29 #, c-format msgid "" "UpdateCredentials: request = \n" "%s" msgstr "" #: src/services/a-rex/update_credentials.cpp:35 msgid "UpdateCredentials: missing Reference" msgstr "" #: src/services/a-rex/update_credentials.cpp:43 msgid "UpdateCredentials: wrong number of Reference" msgstr "" #: src/services/a-rex/update_credentials.cpp:51 msgid "UpdateCredentials: wrong number of elements inside Reference" msgstr "" #: src/services/a-rex/update_credentials.cpp:60 msgid "UpdateCredentials: EPR contains no JobID" msgstr "" #: src/services/a-rex/update_credentials.cpp:70 #, c-format msgid "UpdateCredentials: no job found: %s" msgstr "" #: src/services/a-rex/update_credentials.cpp:77 msgid "UpdateCredentials: failed to update credentials" msgstr "" #: src/services/a-rex/update_credentials.cpp:85 #, c-format msgid "" "UpdateCredentials: response = \n" "%s" msgstr "" #: src/services/candypond/CandyPond.cpp:52 msgid "No A-REX config file found in candypond configuration" msgstr "" #: src/services/candypond/CandyPond.cpp:56 #, c-format msgid "Using A-REX config file %s" msgstr "" #: src/services/candypond/CandyPond.cpp:60 #, c-format msgid "Failed to process A-REX configuration in %s" msgstr "" #: src/services/candypond/CandyPond.cpp:65 msgid "No caches defined in configuration" msgstr "" #: src/services/candypond/CandyPond.cpp:150 msgid "Empty filename returned from FileCache" msgstr "" #: src/services/candypond/CandyPond.cpp:162 #, c-format msgid "Problem accessing cache file %s: %s" msgstr "" #: src/services/candypond/CandyPond.cpp:210 #: src/services/candypond/CandyPond.cpp:474 msgid "No job ID supplied" msgstr "" #: src/services/candypond/CandyPond.cpp:219 #, c-format msgid "Bad number in priority element: %s" msgstr "" #: src/services/candypond/CandyPond.cpp:228 msgid "No username supplied" msgstr "" #: src/services/candypond/CandyPond.cpp:235 #, c-format msgid "Supplied username %s does not match mapped username %s" msgstr "" #: src/services/candypond/CandyPond.cpp:249 msgid "No session directory found" msgstr "" #: src/services/candypond/CandyPond.cpp:253 #, c-format msgid "Using session dir %s" msgstr "" #: src/services/candypond/CandyPond.cpp:257 #, fuzzy, c-format msgid "Failed to stat session dir %s" msgstr "Nem sikerült elküldeni a kérést" #: src/services/candypond/CandyPond.cpp:262 #, c-format msgid "Session dir %s is owned by %i, but current mapped user is %i" msgstr "" #: src/services/candypond/CandyPond.cpp:289 #, fuzzy, c-format msgid "Failed to access proxy of given job id %s at %s" msgstr "Nem sikerült elküldeni a kérést" #: src/services/candypond/CandyPond.cpp:307 #, c-format msgid "DN is %s" msgstr "" #: src/services/candypond/CandyPond.cpp:385 #, c-format msgid "Permission checking passed for url %s" msgstr "" #: src/services/candypond/CandyPond.cpp:410 #: src/services/candypond/CandyPondGenerator.cpp:135 #, c-format msgid "Failed to move %s to %s: %s" msgstr "" #: src/services/candypond/CandyPond.cpp:441 #, c-format msgid "Starting new DTR for %s" msgstr "" #: src/services/candypond/CandyPond.cpp:443 #, c-format msgid "Failed to start new DTR for %s" msgstr "" #: src/services/candypond/CandyPond.cpp:487 #, c-format msgid "Job %s: all files downloaded successfully" msgstr "" #: src/services/candypond/CandyPond.cpp:494 #, c-format msgid "Job %s: Some downloads failed" msgstr "" #: src/services/candypond/CandyPond.cpp:499 #, c-format msgid "Job %s: files still downloading" msgstr "" #: src/services/candypond/CandyPond.cpp:511 msgid "CandyPond: Unauthorized" msgstr "" #: src/services/candypond/CandyPond.cpp:520 msgid "No local user mapping found" msgstr "" #: src/services/candypond/CandyPond.cpp:527 #: src/services/data-staging/DataDeliveryService.cpp:625 #, fuzzy, c-format msgid "Identity is %s" msgstr "Azonosító: %s" #: src/services/candypond/CandyPond.cpp:585 #: src/services/data-staging/DataDeliveryService.cpp:697 msgid "Security Handlers processing failed" msgstr "" #: src/services/candypond/CandyPond.cpp:592 msgid "Only POST is supported in CandyPond" msgstr "" #: src/services/candypond/CandyPondGenerator.cpp:88 #, c-format msgid "DTR %s finished with state %s" msgstr "" #: src/services/candypond/CandyPondGenerator.cpp:124 #, c-format msgid "Could not determine session directory from filename %s" msgstr "" #: src/services/candypond/CandyPondGenerator.cpp:164 #, c-format msgid "Invalid DTR for source %s, destination %s" msgstr "" #: src/services/candypond/CandyPondGenerator.cpp:206 #, c-format msgid "DTRs still running for job %s" msgstr "" #: src/services/candypond/CandyPondGenerator.cpp:215 #, c-format msgid "All DTRs finished for job %s" msgstr "" #: src/services/candypond/CandyPondGenerator.cpp:222 #, c-format msgid "Job %s not found" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:58 #, c-format msgid "Archiving DTR %s, state ERROR" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:62 #, c-format msgid "Archiving DTR %s, state %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:164 msgid "No delegation token in request" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:172 msgid "Failed to accept delegation" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:201 #: src/services/data-staging/DataDeliveryService.cpp:208 #, fuzzy msgid "ErrorDescription" msgstr "Feladat leírás: %s" #: src/services/data-staging/DataDeliveryService.cpp:213 #, c-format msgid "All %u process slots used" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:228 #, c-format msgid "Received retry for DTR %s still in transfer" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:235 #, c-format msgid "Replacing DTR %s in state %s with new request" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:245 #, c-format msgid "Storing temp proxy at %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:253 #, c-format msgid "Failed to create temp proxy at %s: %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:260 #, c-format msgid "Failed to change owner of temp proxy at %s to %i:%i: %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:285 #, fuzzy msgid "Invalid DTR" msgstr "Érvénytelen URL: %s" #: src/services/data-staging/DataDeliveryService.cpp:289 #, c-format msgid "Failed to remove temporary proxy %s: %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:390 #, c-format msgid "No such DTR %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:401 #, c-format msgid "DTR %s failed: %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:412 #, c-format msgid "DTR %s finished successfully" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:422 #, c-format msgid "DTR %s still in progress (%lluB transferred)" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:482 #, c-format msgid "No active DTR %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:492 #, c-format msgid "DTR %s was already cancelled" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:501 #, fuzzy, c-format msgid "DTR %s could not be cancelled" msgstr "A feladatot nem sikerült megölni vagy letörölni" #: src/services/data-staging/DataDeliveryService.cpp:545 #, fuzzy, c-format msgid "Failed to get load average: %s" msgstr "privát kulcs elérési útvonala" #: src/services/data-staging/DataDeliveryService.cpp:569 msgid "Invalid configuration - no allowed IP address specified" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:573 #, fuzzy msgid "Invalid configuration - no transfer dirs specified" msgstr "Nincs megadva feladat leírás bemeneti adatként" #: src/services/data-staging/DataDeliveryService.cpp:584 msgid "Failed to start archival thread" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:609 msgid "Shutting down data delivery service" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:618 msgid "Unauthorized" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:704 msgid "Only POST is supported in DataDeliveryService" msgstr "" #: src/services/examples/echo_python/EchoService.py:12 msgid "EchoService (python) constructor called" msgstr "" #: src/services/examples/echo_python/EchoService.py:17 #, python-format msgid "EchoService (python) has prefix %(prefix)s and suffix %(suffix)s" msgstr "" #: src/services/examples/echo_python/EchoService.py:24 msgid "EchoService (python) destructor called" msgstr "" #: src/services/examples/echo_python/EchoService.py:54 msgid "EchoService (python) thread test starting" msgstr "" #: src/services/examples/echo_python/EchoService.py:65 #, python-format msgid "EchoService (python) thread test, iteration %(iteration)s %(status)s" msgstr "" #: src/services/examples/echo_python/EchoService.py:82 msgid "EchoService (python) 'Process' called" msgstr "" #: src/services/examples/echo_python/EchoService.py:86 #, python-format msgid "inmsg.Auth().Export(arc.SecAttr.ARCAuth) = %s" msgstr "" #: src/services/examples/echo_python/EchoService.py:87 #, python-format msgid "inmsg.Attributes().getAll() = %s " msgstr "" #: src/services/examples/echo_python/EchoService.py:88 #, python-format msgid "EchoService (python) got: %s " msgstr "" #: src/services/examples/echo_python/EchoService.py:93 #, python-format msgid "EchoService (python) request_namespace: %s" msgstr "" #: src/services/examples/echo_python/EchoService.py:99 #: src/services/examples/echo_python/EchoService.py:171 #, fuzzy, python-format msgid "outpayload %s" msgstr "Feltöltve %s" #: src/services/examples/echo_python/EchoService.py:128 msgid "Calling https://localhost:60000/Echo using ClientSOAP" msgstr "" #: src/services/examples/echo_python/EchoService.py:131 msgid "Calling http://localhost:60000/Echo using ClientSOAP" msgstr "" #: src/services/examples/echo_python/EchoService.py:137 #: src/services/examples/echo_python/EchoService.py:155 #, fuzzy, python-format msgid "new_payload %s" msgstr "Feltöltve %s" #: src/services/examples/echo_python/EchoService.py:149 msgid "Calling http://localhost:60000/Echo using httplib" msgstr "" #: src/services/examples/echo_python/EchoService.py:165 msgid "Start waiting 10 sec..." msgstr "" #: src/services/examples/echo_python/EchoService.py:167 msgid "Waiting ends." msgstr "" #: src/services/gridftpd/auth/auth.cpp:328 #, c-format msgid "Unknown authorization command %s" msgstr "" #: src/services/gridftpd/auth/auth.cpp:347 #, c-format msgid "" "The [vo] section labeled '%s' has no file associated and can't be used for " "matching" msgstr "" #: src/services/gridftpd/auth/auth_plugin.cpp:73 #: src/services/gridftpd/auth/unixmap.cpp:217 #, c-format msgid "Plugin %s failed to run" msgstr "" #: src/services/gridftpd/auth/auth_plugin.cpp:75 #: src/services/gridftpd/auth/unixmap.cpp:219 #, c-format msgid "Plugin %s printed: %u" msgstr "" #: src/services/gridftpd/auth/auth_plugin.cpp:76 #: src/services/gridftpd/auth/unixmap.cpp:220 #, c-format msgid "Plugin %s error: %u" msgstr "" #: src/services/gridftpd/auth/auth_voms.cpp:28 #, c-format msgid "VOMS proxy processing returns: %i - %s" msgstr "" #: src/services/gridftpd/auth/auth_voms.cpp:120 #, c-format msgid "VOMS trust chains: %s" msgstr "" #: src/services/gridftpd/auth/unixmap.cpp:126 msgid "User name mapping has empty command" msgstr "" #: src/services/gridftpd/auth/unixmap.cpp:154 #, c-format msgid "User name mapping has empty name: %s" msgstr "" #: src/services/gridftpd/commands.cpp:46 #, c-format msgid "response: %s" msgstr "" #: src/services/gridftpd/commands.cpp:50 #, c-format msgid "Send response failed: %s" msgstr "" #: src/services/gridftpd/commands.cpp:80 msgid "Response sending error" msgstr "" #: src/services/gridftpd/commands.cpp:93 msgid "Closed connection" msgstr "" #: src/services/gridftpd/commands.cpp:131 #, c-format msgid "Socket conversion failed: %s" msgstr "" #: src/services/gridftpd/commands.cpp:141 #, c-format msgid "Failed to obtain own address: %s" msgstr "" #: src/services/gridftpd/commands.cpp:149 #, c-format msgid "Failed to recognize own address type (IPv4 or IPv6) - %u" msgstr "" #: src/services/gridftpd/commands.cpp:159 #, c-format msgid "Accepted connection on [%s]:%u" msgstr "" #: src/services/gridftpd/commands.cpp:161 #, c-format msgid "Accepted connection on %u.%u.%u.%u:%u" msgstr "" #: src/services/gridftpd/commands.cpp:196 msgid "Accept failed" msgstr "" #: src/services/gridftpd/commands.cpp:204 #: src/services/gridftpd/listener.cpp:415 #, c-format msgid "Accept failed: %s" msgstr "" #: src/services/gridftpd/commands.cpp:219 #, c-format msgid "Accepted connection from [%s]:%u" msgstr "" #: src/services/gridftpd/commands.cpp:221 #, c-format msgid "Accepted connection from %u.%u.%u.%u:%u" msgstr "" #: src/services/gridftpd/commands.cpp:230 msgid "Authenticate in commands failed" msgstr "" #: src/services/gridftpd/commands.cpp:239 msgid "Authentication failure" msgstr "" #: src/services/gridftpd/commands.cpp:247 #, c-format msgid "User subject: %s" msgstr "" #: src/services/gridftpd/commands.cpp:248 #, c-format msgid "Encrypted: %s" msgstr "" #: src/services/gridftpd/commands.cpp:254 msgid "User has no proper configuration associated" msgstr "" #: src/services/gridftpd/commands.cpp:262 msgid "" "User has empty virtual directory tree.\n" "Either user has no authorised plugins or there are no plugins configured at " "all." msgstr "" #: src/services/gridftpd/commands.cpp:279 msgid "Read commands in authenticate failed" msgstr "" #: src/services/gridftpd/commands.cpp:411 msgid "Control connection (probably) closed" msgstr "" #: src/services/gridftpd/commands.cpp:445 #: src/services/gridftpd/commands.cpp:724 msgid "Command EPRT" msgstr "" #: src/services/gridftpd/commands.cpp:446 #, fuzzy, c-format msgid "Failed to parse remote address %s" msgstr "Nem sikerült elküldeni a kérést" #: src/services/gridftpd/commands.cpp:468 #, c-format msgid "Command USER %s" msgstr "" #: src/services/gridftpd/commands.cpp:475 msgid "Command CDUP" msgstr "" #: src/services/gridftpd/commands.cpp:481 #, c-format msgid "Command CWD %s" msgstr "" #: src/services/gridftpd/commands.cpp:497 #, c-format msgid "Command MKD %s" msgstr "" #: src/services/gridftpd/commands.cpp:517 #, c-format msgid "Command SIZE %s" msgstr "" #: src/services/gridftpd/commands.cpp:532 #, c-format msgid "Command SBUF: %i" msgstr "" #: src/services/gridftpd/commands.cpp:553 #, c-format msgid "Command MLST %s" msgstr "" #: src/services/gridftpd/commands.cpp:576 #, c-format msgid "Command DELE %s" msgstr "" #: src/services/gridftpd/commands.cpp:591 #, c-format msgid "Command RMD %s" msgstr "" #: src/services/gridftpd/commands.cpp:605 #, c-format msgid "Command TYPE %c" msgstr "" #: src/services/gridftpd/commands.cpp:616 #, c-format msgid "Command MODE %c" msgstr "" #: src/services/gridftpd/commands.cpp:628 msgid "Command ABOR" msgstr "" #: src/services/gridftpd/commands.cpp:641 #, c-format msgid "Command REST %s" msgstr "" #: src/services/gridftpd/commands.cpp:654 #, c-format msgid "Command EPSV %s" msgstr "" #: src/services/gridftpd/commands.cpp:656 msgid "Command SPAS" msgstr "" #: src/services/gridftpd/commands.cpp:658 msgid "Command PASV" msgstr "" #: src/services/gridftpd/commands.cpp:679 msgid "local_pasv failed" msgstr "" #: src/services/gridftpd/commands.cpp:703 msgid "local_spas failed" msgstr "" #: src/services/gridftpd/commands.cpp:726 msgid "Command PORT" msgstr "" #: src/services/gridftpd/commands.cpp:729 msgid "active_data is disabled" msgstr "" #: src/services/gridftpd/commands.cpp:738 msgid "local_port failed" msgstr "" #: src/services/gridftpd/commands.cpp:751 #, c-format msgid "Command MLSD %s" msgstr "" #: src/services/gridftpd/commands.cpp:753 #, c-format msgid "Command NLST %s" msgstr "" #: src/services/gridftpd/commands.cpp:755 #, c-format msgid "Command LIST %s" msgstr "" #: src/services/gridftpd/commands.cpp:806 #, c-format msgid "Command ERET %s" msgstr "" #: src/services/gridftpd/commands.cpp:836 #, c-format msgid "Command RETR %s" msgstr "" #: src/services/gridftpd/commands.cpp:865 #, c-format msgid "Command STOR %s" msgstr "" #: src/services/gridftpd/commands.cpp:893 #, c-format msgid "Command ALLO %i" msgstr "" #: src/services/gridftpd/commands.cpp:916 msgid "Command OPTS" msgstr "" #: src/services/gridftpd/commands.cpp:919 msgid "Command OPTS RETR" msgstr "" #: src/services/gridftpd/commands.cpp:929 #, c-format msgid "Option: %s" msgstr "" #: src/services/gridftpd/commands.cpp:973 msgid "Command NOOP" msgstr "" #: src/services/gridftpd/commands.cpp:977 msgid "Command QUIT" msgstr "" #: src/services/gridftpd/commands.cpp:987 msgid "Failed to close, deleting client" msgstr "" #: src/services/gridftpd/commands.cpp:1001 #, c-format msgid "Command DCAU: %i '%s'" msgstr "" #: src/services/gridftpd/commands.cpp:1029 #, c-format msgid "Command PBZS: %s" msgstr "" #: src/services/gridftpd/commands.cpp:1037 #, c-format msgid "Setting pbsz to %lu" msgstr "" #: src/services/gridftpd/commands.cpp:1053 #, c-format msgid "Command PROT: %s" msgstr "" #: src/services/gridftpd/commands.cpp:1078 #, c-format msgid "Command MDTM %s" msgstr "" #: src/services/gridftpd/commands.cpp:1100 #, c-format msgid "Raw command: %s" msgstr "" #: src/services/gridftpd/commands.cpp:1148 msgid "Failed to allocate memory for buffer" msgstr "" #: src/services/gridftpd/commands.cpp:1155 #, c-format msgid "Allocated %u buffers %llu bytes each." msgstr "" #: src/services/gridftpd/commands.cpp:1162 msgid "abort_callback: start" msgstr "" #: src/services/gridftpd/commands.cpp:1165 #, c-format msgid "abort_callback: Globus error: %s" msgstr "" #: src/services/gridftpd/commands.cpp:1179 msgid "make_abort: start" msgstr "" #: src/services/gridftpd/commands.cpp:1191 msgid "Failed to abort data connection - ignoring and recovering" msgstr "" #: src/services/gridftpd/commands.cpp:1199 msgid "make_abort: wait for abort flag to be reset" msgstr "" #: src/services/gridftpd/commands.cpp:1209 msgid "make_abort: leaving" msgstr "" #: src/services/gridftpd/commands.cpp:1224 msgid "check_abort: have Globus error" msgstr "" #: src/services/gridftpd/commands.cpp:1225 msgid "Abort request caused by transfer error" msgstr "" #: src/services/gridftpd/commands.cpp:1228 msgid "check_abort: sending 426" msgstr "" #: src/services/gridftpd/commands.cpp:1249 msgid "Abort request caused by error in transfer function" msgstr "" #: src/services/gridftpd/commands.cpp:1331 msgid "Failed to start timer thread - timeout won't work" msgstr "" #: src/services/gridftpd/commands.cpp:1383 msgid "Killing connection due to timeout" msgstr "" #: src/services/gridftpd/conf/conf_vo.cpp:22 #: src/services/gridftpd/conf/conf_vo.cpp:48 msgid "Configuration section [userlist] is missing name." msgstr "" #: src/services/gridftpd/conf/daemon.cpp:58 #: src/services/gridftpd/conf/daemon.cpp:138 #, c-format msgid "No such user: %s" msgstr "" #: src/services/gridftpd/conf/daemon.cpp:70 #: src/services/gridftpd/conf/daemon.cpp:150 #, c-format msgid "No such group: %s" msgstr "" #: src/services/gridftpd/conf/daemon.cpp:83 #: src/services/gridftpd/conf/daemon.cpp:163 #, c-format msgid "Improper debug level '%s'" msgstr "" #: src/services/gridftpd/conf/daemon.cpp:120 msgid "Missing option for command logreopen" msgstr "" #: src/services/gridftpd/conf/daemon.cpp:125 msgid "Wrong option in logreopen" msgstr "" #: src/services/gridftpd/conf/daemon.cpp:209 #, fuzzy, c-format msgid "Failed to open log file %s" msgstr "Nem sikerült listázni a fájlokat" #: src/services/gridftpd/datalist.cpp:101 msgid "Closing channel (list)" msgstr "" #: src/services/gridftpd/datalist.cpp:157 msgid "Data channel connected (list)" msgstr "" #: src/services/gridftpd/dataread.cpp:24 msgid "data_connect_retrieve_callback" msgstr "" #: src/services/gridftpd/dataread.cpp:30 msgid "Data channel connected (retrieve)" msgstr "" #: src/services/gridftpd/dataread.cpp:37 msgid "data_connect_retrieve_callback: allocate_data_buffer" msgstr "" #: src/services/gridftpd/dataread.cpp:40 msgid "data_connect_retrieve_callback: allocate_data_buffer failed" msgstr "" #: src/services/gridftpd/dataread.cpp:48 #, c-format msgid "data_connect_retrieve_callback: check for buffer %u" msgstr "" #: src/services/gridftpd/dataread.cpp:61 src/services/gridftpd/dataread.cpp:158 #, c-format msgid "Closing channel (retrieve) due to local read error: %s" msgstr "" #: src/services/gridftpd/dataread.cpp:75 src/services/gridftpd/dataread.cpp:172 msgid "Buffer registration failed" msgstr "" #: src/services/gridftpd/dataread.cpp:88 msgid "data_retrieve_callback" msgstr "" #: src/services/gridftpd/dataread.cpp:96 #, c-format msgid "Data channel (retrieve) %i %i %i" msgstr "" #: src/services/gridftpd/dataread.cpp:104 msgid "Closing channel (retrieve)" msgstr "" #: src/services/gridftpd/dataread.cpp:110 #: src/services/gridftpd/datawrite.cpp:128 #, c-format msgid "Time spent waiting for network: %.3f ms" msgstr "" #: src/services/gridftpd/dataread.cpp:111 #: src/services/gridftpd/datawrite.cpp:129 #, c-format msgid "Time spent waiting for disc: %.3f ms" msgstr "" #: src/services/gridftpd/dataread.cpp:122 msgid "data_retrieve_callback: lost buffer" msgstr "" #: src/services/gridftpd/datawrite.cpp:24 msgid "data_connect_store_callback" msgstr "" #: src/services/gridftpd/datawrite.cpp:30 msgid "Data channel connected (store)" msgstr "" #: src/services/gridftpd/datawrite.cpp:57 msgid "Failed to register any buffer" msgstr "" #: src/services/gridftpd/datawrite.cpp:76 #, c-format msgid "Data channel (store) %i %i %i" msgstr "" #: src/services/gridftpd/datawrite.cpp:89 msgid "data_store_callback: lost buffer" msgstr "" #: src/services/gridftpd/datawrite.cpp:105 #, c-format msgid "Closing channel (store) due to error: %s" msgstr "" #: src/services/gridftpd/datawrite.cpp:115 msgid "Closing channel (store)" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:55 msgid "Can't parse access rights in configuration line" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:61 msgid "Can't parse user:group in configuration line" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:68 msgid "Can't recognize user in configuration line" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:77 msgid "Can't recognize group in configuration line" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:84 #: src/services/gridftpd/fileplugin/fileplugin.cpp:89 msgid "Can't parse or:and in configuration line" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:116 msgid "Can't parse configuration line" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:120 #, c-format msgid "Bad directory name: %s" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:137 msgid "Can't parse create arguments in configuration line" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:146 msgid "Can't parse mkdir arguments in configuration line" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:163 #, c-format msgid "Bad subcommand in configuration line: %s" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:175 msgid "Bad mount directory specified" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:177 #, c-format msgid "Mount point %s" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:215 #: src/services/gridftpd/fileplugin/fileplugin.cpp:274 #, c-format msgid "mkdir failed: %s" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:226 #, c-format msgid "Warning: mount point %s creation failed." msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:330 #, c-format msgid "plugin: open: %s" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:378 #: src/services/gridftpd/fileplugin/fileplugin.cpp:415 msgid "Not enough space to store file" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:430 #, c-format msgid "open: changing owner for %s, %i, %i" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:437 #, c-format msgid "open: owner: %i %i" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:446 #: src/services/gridftpd/fileplugin/fileplugin.cpp:486 #, c-format msgid "Unknown open mode %s" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:451 msgid "plugin: close" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:492 msgid "plugin: read" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:498 msgid "Error while reading file" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:508 msgid "plugin: write" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:519 msgid "Zero bytes written to file" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:727 #, c-format msgid "plugin: checkdir: %s" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:730 #, c-format msgid "plugin: checkdir: access: %s" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:739 #, c-format msgid "plugin: checkdir: access: allowed: %s" msgstr "" #: src/services/gridftpd/fileroot.cpp:14 #, c-format msgid "No plugin is configured or authorised for requested path %s" msgstr "" #: src/services/gridftpd/fileroot.cpp:19 msgid "FilePlugin: more unload than load" msgstr "" #: src/services/gridftpd/fileroot.cpp:34 #, c-format msgid "Can't load plugin %s for access point %s" msgstr "" #: src/services/gridftpd/fileroot.cpp:39 src/services/gridftpd/fileroot.cpp:43 #, c-format msgid "Plugin %s for access point %s is broken." msgstr "" #: src/services/gridftpd/fileroot.cpp:47 #, c-format msgid "Plugin %s for access point %s acquire failed (should never happen)." msgstr "" #: src/services/gridftpd/fileroot.cpp:54 #, c-format msgid "Destructor with dlclose (%s)" msgstr "" #: src/services/gridftpd/fileroot.cpp:77 #, c-format msgid "FileNode: operator= (%s <- %s) %lu <- %lu" msgstr "" #: src/services/gridftpd/fileroot.cpp:80 msgid "Copying with dlclose" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:31 #: src/services/gridftpd/fileroot_config.cpp:405 msgid "configuration file not found" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:54 msgid "Wrong port number in configuration" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:63 msgid "Wrong maxconnections number in configuration" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:72 msgid "Wrong defaultbuffer number in configuration" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:81 msgid "Wrong maxbuffer number in configuration" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:113 #: src/services/gridftpd/fileroot_config.cpp:121 #, c-format msgid "Can't resolve host %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:173 msgid "Could not determine hostname from gethostname()" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:190 msgid "unnamed group" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:199 msgid "undefined plugin name" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:203 msgid "undefined virtual plugin path" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:208 #, c-format msgid "bad directory for plugin: %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:220 #, c-format msgid "Already have directory: %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:223 #, c-format msgid "Registering directory: %s with plugin: %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:236 #, c-format msgid "file node creation failed: %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:286 #, c-format msgid "improper attribute for allowencryption command: %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:300 #, c-format msgid "improper attribute for allowactvedata command: %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:314 #, fuzzy, c-format msgid "failed while processing configuration command: %s %s" msgstr "voms szerver fájljának az elérési útvonala" #: src/services/gridftpd/fileroot_config.cpp:339 #, c-format msgid "Failed processing authorization group %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:352 msgid "Missing authgroup name in allowaccess" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:369 msgid "Missing authgroup name in denyaccess" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:419 msgid "failed to process client identification" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:426 msgid "failed to identify plugins path" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:453 #, c-format msgid "Registering dummy directory: %s" msgstr "" #: src/services/gridftpd/listener.cpp:57 src/services/gridftpd/listener.cpp:466 msgid "Activation failed" msgstr "" #: src/services/gridftpd/listener.cpp:66 src/services/gridftpd/listener.cpp:172 msgid "Child exited" msgstr "" #: src/services/gridftpd/listener.cpp:78 msgid "Globus connection error" msgstr "" #: src/services/gridftpd/listener.cpp:80 src/services/gridftpd/listener.cpp:424 msgid "New connection" msgstr "" #: src/services/gridftpd/listener.cpp:87 msgid "Server stopped" msgstr "" #: src/services/gridftpd/listener.cpp:157 msgid "Error: failed to set handler for SIGTERM" msgstr "" #: src/services/gridftpd/listener.cpp:161 msgid "Starting controlled process" msgstr "" #: src/services/gridftpd/listener.cpp:164 msgid "fork failed" msgstr "" #: src/services/gridftpd/listener.cpp:169 msgid "wait failed - killing child" msgstr "" #: src/services/gridftpd/listener.cpp:174 msgid "Killed with signal: " msgstr "" #: src/services/gridftpd/listener.cpp:176 msgid "Restarting after segmentation violation." msgstr "" #: src/services/gridftpd/listener.cpp:177 msgid "Waiting 1 minute" msgstr "" #: src/services/gridftpd/listener.cpp:239 msgid "Error: failed to set handler for SIGCHLD" msgstr "" #: src/services/gridftpd/listener.cpp:256 msgid "Missing argument" msgstr "" #: src/services/gridftpd/listener.cpp:257 msgid "Unknown option" msgstr "" #: src/services/gridftpd/listener.cpp:264 msgid "Wrong port number" msgstr "" #: src/services/gridftpd/listener.cpp:274 msgid "Wrong number of connections" msgstr "" #: src/services/gridftpd/listener.cpp:281 msgid "Wrong buffer size" msgstr "" #: src/services/gridftpd/listener.cpp:288 msgid "Wrong maximal buffer size" msgstr "" #: src/services/gridftpd/listener.cpp:300 msgid "Failed reading configuration" msgstr "" #: src/services/gridftpd/listener.cpp:331 #, c-format msgid "Failed to obtain local address: %s" msgstr "" #: src/services/gridftpd/listener.cpp:338 #, c-format msgid "Failed to create socket(%s): %s" msgstr "" #: src/services/gridftpd/listener.cpp:352 #, c-format msgid "Failed to limit socket to IPv6: %s" msgstr "" #: src/services/gridftpd/listener.cpp:359 #, c-format msgid "Failed to bind socket(%s): %s" msgstr "" #: src/services/gridftpd/listener.cpp:364 #, c-format msgid "Failed to listen on socket(%s): %s" msgstr "" #: src/services/gridftpd/listener.cpp:371 msgid "Not listening to anything" msgstr "" #: src/services/gridftpd/listener.cpp:374 #, c-format msgid "Some addresses failed. Listening on %u of %u." msgstr "" #: src/services/gridftpd/listener.cpp:382 #: src/services/gridftpd/listener.cpp:477 msgid "Listen started" msgstr "" #: src/services/gridftpd/listener.cpp:395 msgid "No valid handles left for listening" msgstr "" #: src/services/gridftpd/listener.cpp:401 #, fuzzy, c-format msgid "Select failed: %s" msgstr "Fájl feltöltve %s" #: src/services/gridftpd/listener.cpp:422 #, c-format msgid "Have connections: %i, max: %i" msgstr "" #: src/services/gridftpd/listener.cpp:427 #, c-format msgid "Fork failed: %s" msgstr "" #: src/services/gridftpd/listener.cpp:445 msgid "Refusing connection: Connection limit exceeded" msgstr "" #: src/services/gridftpd/listener.cpp:471 msgid "Init failed" msgstr "" #: src/services/gridftpd/listener.cpp:474 msgid "Listen failed" msgstr "" #: src/services/gridftpd/listener.cpp:488 msgid "Listen finished" msgstr "" #: src/services/gridftpd/listener.cpp:493 msgid "Stopping server" msgstr "" #: src/services/gridftpd/listener.cpp:497 msgid "Destroying handle" msgstr "" #: src/services/gridftpd/listener.cpp:500 msgid "Deactivating modules" msgstr "" #: src/services/gridftpd/listener.cpp:508 msgid "Exiting" msgstr "" #: src/services/gridftpd/misc/ldapquery.cpp:253 #, c-format msgid "%s: %s:%i" msgstr "" #: src/services/gridftpd/misc/ldapquery.cpp:390 #: src/services/gridftpd/misc/ldapquery.cpp:467 #, c-format msgid "%s %s" msgstr "" #: src/services/gridftpd/misc/ldapquery.cpp:394 #, c-format msgid " %s: %s" msgstr "" #: src/services/gridftpd/misc/ldapquery.cpp:396 #, c-format msgid " %s:" msgstr "" #: src/services/gridftpd/userspec.cpp:83 src/services/gridftpd/userspec.cpp:133 msgid "No proxy provided" msgstr "" #: src/services/gridftpd/userspec.cpp:85 #, c-format msgid "Proxy/credentials stored at %s" msgstr "" #: src/services/gridftpd/userspec.cpp:91 src/services/gridftpd/userspec.cpp:141 msgid "Running user has no name" msgstr "" #: src/services/gridftpd/userspec.cpp:94 src/services/gridftpd/userspec.cpp:144 #, c-format msgid "Mapped to running user: %s" msgstr "" #: src/services/gridftpd/userspec.cpp:104 #: src/services/gridftpd/userspec.cpp:154 #, c-format msgid "Mapped to local id: %i" msgstr "" #: src/services/gridftpd/userspec.cpp:109 #: src/services/gridftpd/userspec.cpp:159 #, c-format msgid "No group %i for mapped user" msgstr "" #: src/services/gridftpd/userspec.cpp:113 #: src/services/gridftpd/userspec.cpp:163 #, c-format msgid "Mapped to local group id: %i" msgstr "" #: src/services/gridftpd/userspec.cpp:114 #: src/services/gridftpd/userspec.cpp:164 #, c-format msgid "Mapped to local group name: %s" msgstr "" #: src/services/gridftpd/userspec.cpp:115 #: src/services/gridftpd/userspec.cpp:165 #, c-format msgid "Mapped user's home: %s" msgstr "" #: src/services/gridftpd/userspec.cpp:135 #, c-format msgid "Proxy stored at %s" msgstr "" #: src/services/gridftpd/userspec.cpp:195 #, c-format msgid "Undefined control sequence: %%%s" msgstr "" #: src/services/gridftpd/userspec.cpp:218 #, c-format msgid "Local user %s does not exist" msgstr "" #: src/services/gridftpd/userspec.cpp:227 #, c-format msgid "Local group %s does not exist" msgstr "" #: src/services/gridftpd/userspec.cpp:232 #, c-format msgid "Remapped to local user: %s" msgstr "" #: src/services/gridftpd/userspec.cpp:233 #, c-format msgid "Remapped to local id: %i" msgstr "" #: src/services/gridftpd/userspec.cpp:234 #, c-format msgid "Remapped to local group id: %i" msgstr "" #: src/services/gridftpd/userspec.cpp:235 #, c-format msgid "Remapped to local group name: %s" msgstr "" #: src/services/gridftpd/userspec.cpp:236 #, c-format msgid "Remapped user's home: %s" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:103 #, c-format msgid "Loading %u-th Python service" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:107 #, c-format msgid "Initialized %u-th Python service" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:142 msgid "Invalid class name" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:147 #, c-format msgid "class name: %s" msgstr "osztály neve: %s" #: src/services/wrappers/python/pythonwrapper.cpp:148 #, c-format msgid "module name: %s" msgstr "modul neve: %s" #: src/services/wrappers/python/pythonwrapper.cpp:205 msgid "Cannot find ARC Config class" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:212 msgid "Config class is not an object" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:220 msgid "Cannot get dictionary of module" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:229 msgid "Cannot find service class" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:238 msgid "Cannot create config argument" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:245 msgid "Cannot convert config to Python object" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:268 #, c-format msgid "%s is not an object" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:274 msgid "Message class is not an object" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:280 msgid "Python Wrapper constructor succeeded" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:295 #, c-format msgid "Python Wrapper destructor (%d)" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:328 msgid "Python interpreter locked" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:332 msgid "Python interpreter released" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:403 msgid "Python wrapper process called" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:412 msgid "Failed to create input SOAP container" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:422 msgid "Cannot create inmsg argument" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:436 msgid "Cannot find ARC Message class" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:442 msgid "Cannot convert inmsg to Python object" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:451 msgid "Failed to create SOAP containers" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:457 msgid "Cannot create outmsg argument" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:463 msgid "Cannot convert outmsg to Python object" msgstr "" #: src/tests/client/test_ClientInterface.cpp:36 #: src/tests/client/test_ClientSAML2SSO.cpp:68 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:78 #: src/tests/echo/test_clientinterface.cpp:41 #: src/tests/echo/test_clientinterface.cpp:132 #: src/tests/echo/test_clientinterface.py:12 msgid "Creating a soap client" msgstr "" #: src/tests/client/test_ClientInterface.cpp:73 #: src/tests/client/test_ClientSAML2SSO.cpp:47 #: src/tests/client/test_ClientSAML2SSO.cpp:71 #: src/tests/count/test_client.cpp:61 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:85 #: src/tests/echo/echo_test4axis2c/test_client.cpp:56 #: src/tests/echo/test.cpp:62 src/tests/echo/test_client.cpp:72 #: src/tests/echo/test_clientinterface.cpp:67 #: src/tests/echo/test_clientinterface.cpp:107 #: src/tests/echo/test_clientinterface.cpp:136 #: src/tests/echo/test_clientinterface.py:22 msgid "Creating and sending request" msgstr "" #: src/tests/client/test_ClientInterface.cpp:84 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:97 #: src/tests/echo/test_clientinterface.cpp:78 #: src/tests/echo/test_clientinterface.py:30 msgid "SOAP invocation failed" msgstr "" #: src/tests/client/test_ClientSAML2SSO.cpp:44 #: src/tests/echo/test_clientinterface.cpp:100 msgid "Creating a http client" msgstr "" #: src/tests/client/test_ClientSAML2SSO.cpp:55 #: src/tests/echo/test_clientinterface.cpp:117 msgid "HTTP with SAML2SSO invocation failed" msgstr "" #: src/tests/client/test_ClientSAML2SSO.cpp:59 #: src/tests/echo/test_clientinterface.cpp:121 msgid "There was no HTTP response" msgstr "" #: src/tests/client/test_ClientSAML2SSO.cpp:77 #: src/tests/echo/test_clientinterface.cpp:145 msgid "SOAP with SAML2SSO invocation failed" msgstr "" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:37 #: src/tests/client/test_ClientX509Delegation_GridSite.cpp:38 #: src/tests/delegation/test_delegation_client.cpp:45 #: src/tests/delegation/test_delegation_client.cpp:76 #: src/tests/echo/test_clientinterface.cpp:172 #: src/tests/echo/test_clientinterface.cpp:194 msgid "Creating a delegation soap client" msgstr "" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:46 #: src/tests/delegation/test_delegation_client.cpp:51 #: src/tests/echo/test_clientinterface.cpp:178 msgid "Delegation to ARC delegation service failed" msgstr "" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:50 #: src/tests/client/test_ClientX509Delegation_GridSite.cpp:49 #: src/tests/delegation/test_delegation_client.cpp:56 #: src/tests/delegation/test_delegation_client.cpp:88 #: src/tests/echo/test_clientinterface.cpp:182 #: src/tests/echo/test_clientinterface.cpp:205 #, c-format msgid "Delegation ID: %s" msgstr "" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:58 #, c-format msgid "Delegated credential from delegation service: %s" msgstr "" #: src/tests/client/test_ClientX509Delegation_GridSite.cpp:45 #: src/tests/delegation/test_delegation_client.cpp:83 #: src/tests/echo/test_clientinterface.cpp:201 msgid "Delegation to gridsite delegation service failed" msgstr "" #: src/tests/count/count.cpp:58 msgid "Input is not SOAP" msgstr "" #: src/tests/count/count.cpp:89 src/tests/echo/echo.cpp:83 msgid "echo: Unauthorized" msgstr "" #: src/tests/count/count.cpp:98 src/tests/count/count.cpp:104 #, c-format msgid "Request is not supported - %s" msgstr "" #: src/tests/count/test_client.cpp:50 #: src/tests/echo/echo_test4axis2c/test_client.cpp:43 #: src/tests/echo/test_client.cpp:59 msgid "Failed to load client configuration" msgstr "" #: src/tests/count/test_client.cpp:54 #: src/tests/echo/echo_test4axis2c/test_client.cpp:47 #: src/tests/echo/test.cpp:58 src/tests/echo/test_client.cpp:63 msgid "Client side MCCs are loaded" msgstr "" #: src/tests/count/test_client.cpp:57 #: src/tests/echo/echo_test4axis2c/test_client.cpp:50 #: src/tests/echo/test_client.cpp:66 msgid "Client chain does not have entry point" msgstr "" #: src/tests/count/test_client.cpp:84 #: src/tests/echo/echo_test4axis2c/test_client.cpp:74 #: src/tests/echo/test.cpp:74 src/tests/echo/test_client.cpp:90 msgid "Request failed" msgstr "" #: src/tests/count/test_client.cpp:90 #: src/tests/echo/echo_test4axis2c/test_client.cpp:80 #: src/tests/echo/test.cpp:79 src/tests/echo/test_client.cpp:96 msgid "There is no response" msgstr "" #: src/tests/count/test_client.cpp:97 #: src/tests/echo/echo_test4axis2c/test_client.cpp:87 #: src/tests/echo/test_client.cpp:103 msgid "Response is not SOAP" msgstr "" #: src/tests/count/test_service.cpp:22 src/tests/echo/test.cpp:23 #: src/tests/echo/test_service.cpp:22 msgid "Creating service side chain" msgstr "" #: src/tests/count/test_service.cpp:25 src/tests/echo/test.cpp:26 #: src/tests/echo/test_service.cpp:25 msgid "Failed to load service configuration" msgstr "" #: src/tests/count/test_service.cpp:30 src/tests/echo/test_service.cpp:30 msgid "Service is waiting for requests" msgstr "" #: src/tests/echo/test.cpp:32 msgid "Creating client interface" msgstr "" #: src/tests/echo/test.cpp:82 msgid "Request succeed!!!" msgstr "" #, fuzzy #~ msgid "username to MyProxy server" #~ msgstr "myproxy szerverhez szükséges felhasználónév" #~ msgid "No stream response" #~ msgstr "Nincs válasz" #~ msgid "Returned msg from myproxy server: %s %d" #~ msgstr "Ezt a választ kaptam a myproxy szervertÅ‘l: %s %d" #~ msgid "There are %d certificates in the returned msg" #~ msgstr "%d darab publikus tanúsítvány van a válasz üzenetben" #~ msgid "Delegate proxy failed" #~ msgstr "Proxy delegáció sikertelen" #~ msgid "Returned msg from voms server: %s " #~ msgstr "Ezt a választ kaptam a voms szervertÅ‘l: %s" #~ msgid "path to config file" #~ msgstr "a konfigurációs fájl elérési útvonala" #~ msgid "[-]name" #~ msgstr "[-]név" #~ msgid "Missing URL" #~ msgstr "Hiányzik az URL" #, fuzzy #~ msgid "Query is not a valid XML" #~ msgstr "A lekérdezés nem XML helyes" #~ msgid "Failed to send request" #~ msgstr "Nem sikerült elküldeni a kérést" #, fuzzy #~ msgid "Failed locating credentials." #~ msgstr "Nem sikerült listázni a meta adatokat" #, fuzzy #~ msgid "Failed to cancel job: %s" #~ msgstr "Nem sikerült elküldeni a kérést" #, fuzzy #~ msgid "Creating and sending request to resume a job" #~ msgstr "SOAP kérés készítése és küldése" #, fuzzy #~ msgid "Creating and sending request to list jobs" #~ msgstr "SOAP kérés készítése és küldése" #, fuzzy #~ msgid "Failed resuming job: %s" #~ msgstr "Nem sikerült listázni a fájlokat" #, fuzzy #~ msgid "Unable to submit job. Job description is not valid in the %s format" #~ msgstr "Nem tudom kiíratni a feladat leírást: Nem található várakozó sor." #~ msgid "A status request failed" #~ msgstr "Státusz lekérdezés sikertelen" #~ msgid "A status request succeed" #~ msgstr "Státusz lekérdezés sikeres" #~ msgid "A job termination request failed" #~ msgstr "A feladat megszakítása sikertelen" #~ msgid "A job termination request succeed" #~ msgstr "A feladat megszakítása sikeres" #, fuzzy #~ msgid "Sent entry: %s" #~ msgstr "Azonosító: %s" #, fuzzy #~ msgid "Failed processing user mapping command: unixgroupmap %s" #~ msgstr "voms szerver fájljának az elérési útvonala" #, fuzzy #~ msgid "CA certificates directory %s does not exist" #~ msgstr "Az XML konfigurációs fájl: %s nem létezik" #, fuzzy #~ msgid "Can't interpret configuration file %s as XML" #~ msgstr "voms szerver fájljának az elérési útvonala" #, fuzzy #~ msgid "year: %s" #~ msgstr "Név: %s" #, fuzzy #~ msgid "moth: %s" #~ msgstr "Proxy elérési útvonal: %s" #, fuzzy #~ msgid "queue: %s" #~ msgstr "Kérés: %s" #, fuzzy #~ msgid "query: %s" #~ msgstr "Kérés: %s" #, fuzzy #~ msgid "failed to initialize environment variables" #~ msgstr "Nem sikerült betölteni a konfigurációt" #~ msgid "use GSI proxy (RFC 3820 compliant proxy is default)" #~ msgstr "" #~ "GSI proxy használata (RFC 3820-nak megfelelÅ‘ proxy, ez az alapbeállítás)" #, fuzzy #~ msgid "Unable to copy %s: No valid credentials found" #~ msgstr "Nem tudom másolni a %s fájlt: Nem érvényes tanúsítvány" #, fuzzy #~ msgid "Unable to create directory %s: No valid credentials found" #~ msgstr "Nem tudom másolni a %s fájlt: Nem érvényes tanúsítvány" #, fuzzy #~ msgid "Unable to rename %s: No valid credentials found" #~ msgstr "Nem tudom másolni a %s fájlt: Nem érvényes tanúsítvány" #, fuzzy #~ msgid "Unable to remove file %s: No valid credentials found" #~ msgstr "Nem tudom másolni a %s fájlt: Nem érvényes tanúsítvány" #~ msgid "Uploaded file %s" #~ msgstr "Fájl feltöltve %s" #~ msgid "Uploader started" #~ msgstr "A feltöltÅ‘ elindult" #~ msgid "Uploaded %s" #~ msgstr "Feltöltve %s" #, fuzzy #~ msgid "Failed writing output status file" #~ msgstr "Nem sikerült listázni a meta adatokat" #~ msgid "explicitly select or reject a specific cluster" #~ msgstr "egy klaszter egyértelmű kiválasztása vagy tiltása" #~ msgid "explicitly select or reject an index server" #~ msgstr "egy index szerver egyértelmű kiválasztása vagy tiltása" #~ msgid "" #~ "The arcmigrate command is used for migrating queued jobs to another " #~ "cluster.\n" #~ "Note that migration is only supported between ARC1 clusters." #~ msgstr "" #~ "Az arcmigrate paraccsot arra lehet használni, hogy egy várakozó sorban\n" #~ "lévÅ‘ feladatot átmozgassunk egy másik klaszterre. Jelenleg csak az ARC1-" #~ "es\n" #~ "klaszterek esetén lehet csak használni" #~ msgid "explicitly select or reject a cluster to migrate to" #~ msgstr "egy klaszter egyértelmű kiválasztása vagy tiltása migráció esetére" #~ msgid "select broker method (Random (default), FastestQueue, or custom)" #~ msgstr "" #~ "bróker kiválasztása (Random (alapbeállítás), FastestQueue vagy saját)" #~ msgid "file where the jobs will be stored" #~ msgstr "azon fájl, ahol a feladat azonosítók tárolásra kerülnek" #~ msgid "explicitly select or reject a specific cluster for the new job" #~ msgstr "" #~ "egy klaszter egyértelmű kiválasztása vagy tiltása új feladat számára" #~ msgid "No jobs to resubmit" #~ msgstr "Nem sikerült újraküldeni a feladatot" #~ msgid "Submission to %s failed, trying next target" #~ msgstr "" #~ "Feladat küldés erre a klaszterre nem sikerült: %s, megpróbálom a " #~ "következÅ‘t" #~ msgid "Job resubmitted with new jobid: %s" #~ msgstr "Feladat újraküldve ezzel az azonosítóval: %s" #~ msgid "service_url request_file" #~ msgstr "szolgáltatás_url kérési_fájl" #~ msgid "url of the policy decision service" #~ msgstr "az eljárásmódot eldöntÅ‘ szolgáltatás url-je" #~ msgid "URL of SLCS service" #~ msgstr "SLCS szolgáltatás URL-je" #~ msgid "Identity provider name" #~ msgstr "Azonító szolgáltatás neve" #~ msgid "User account to identity provider" #~ msgstr "Felhasználói név az azonosító szolgáltató részére" #~ msgid "Password for user account to identity provider" #~ msgstr "" #~ "A felhasználói névhez tartozó jelszó az azonosító szolgáltató részére" #~ msgid "Key size of the private key (512, 1024, 2048)" #~ msgstr "A privát kulcs mérete (512, 1024, 2048)" #~ msgid "Private key passphrase" #~ msgstr "Privát kulcs jelszava" #~ msgid "passphrase" #~ msgstr "jelszó" #~ msgid "period" #~ msgstr "periódus" #~ msgid "Current transfer FAILED: %s - %s" #~ msgstr "Az aktuális átvitel MEGSZAKADT: %s - %s" nordugrid-arc-6.14.0/po/PaxHeaders.30264/remove-potcdate.sin0000644000000000000000000000013114152153401021557 xustar000000000000000029 mtime=1638455041.34369012 30 atime=1638455102.040602119 30 ctime=1638455103.881629781 nordugrid-arc-6.14.0/po/remove-potcdate.sin0000644000175000002070000000066014152153401021547 0ustar00mockbuildmock00000000000000# Sed script that remove the POT-Creation-Date line in the header entry # from a POT file. # # The distinction between the first and the following occurrences of the # pattern is achieved by looking at the hold space. /^"POT-Creation-Date: .*"$/{ x # Test if the hold space is empty. s/P/P/ ta # Yes it was empty. First occurrence. Remove the line. g d bb :a # The hold space was nonempty. Following occurrences. Do nothing. x :b } nordugrid-arc-6.14.0/po/PaxHeaders.30264/stamp-po0000644000000000000000000000013214152153477017447 xustar000000000000000030 mtime=1638455103.877629721 30 atime=1638455103.877629721 30 ctime=1638455103.902630096 nordugrid-arc-6.14.0/po/stamp-po0000644000175000002070000000001214152153477017425 0ustar00mockbuildmock00000000000000timestamp nordugrid-arc-6.14.0/po/PaxHeaders.30264/boldquot.sed0000644000000000000000000000013214152153401020275 xustar000000000000000030 mtime=1638455041.303689518 30 atime=1638455041.303689518 30 ctime=1638455103.883629811 nordugrid-arc-6.14.0/po/boldquot.sed0000644000175000002070000000033114152153401020257 0ustar00mockbuildmock00000000000000s/"\([^"]*\)"/“\1â€/g s/`\([^`']*\)'/‘\1’/g s/ '\([^`']*\)' / ‘\1’ /g s/ '\([^`']*\)'$/ ‘\1’/g s/^'\([^`']*\)' /‘\1’ /g s/“â€/""/g s/“/“/g s/â€/â€/g s/‘/‘/g s/’/’/g nordugrid-arc-6.14.0/po/PaxHeaders.30264/LINGUAS0000644000000000000000000000013214152153376017007 xustar000000000000000030 mtime=1638455038.281644111 30 atime=1638455038.462646831 30 ctime=1638455103.903630112 nordugrid-arc-6.14.0/po/LINGUAS0000644000175000002070000000001414152153376016767 0ustar00mockbuildmock00000000000000ru sv de hu nordugrid-arc-6.14.0/po/PaxHeaders.30264/de.po0000644000000000000000000000013214152153477016714 xustar000000000000000030 mtime=1638455103.316621291 30 atime=1638455103.737627617 30 ctime=1638455103.893629961 nordugrid-arc-6.14.0/po/de.po0000644000175000002070000252333614152153477016717 0ustar00mockbuildmock00000000000000# translation of Arc.po to Russian # Oxana Smirnova , 2007. # Translation file for the Advanced Resource Connector (Arc) msgid "" msgstr "" "Project-Id-Version: Arc\n" "Report-Msgid-Bugs-To: support@nordugrid.org\n" "POT-Creation-Date: 2021-12-02 15:25+0100\n" "PO-Revision-Date: 2010-02-25 19:18+0100\n" "Last-Translator: Steffen Möller \n" "Language-Team: German\n" "Language: \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "X-Generator: KBabel 1.11.4\n" "Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n" "%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n" "X-Poedit-Language: Russian\n" "X-Poedit-KeywordsList: msg:2;IString:1;istring:1\n" "X-Poedit-Basepath: /home/oxana/CVSROOT/ARC1\n" "X-Poedit-SearchPath-0: src\n" #: src/clients/compute/arccat.cpp:35 src/clients/compute/arcclean.cpp:34 #: src/clients/compute/arcget.cpp:35 src/clients/compute/arckill.cpp:33 #: src/clients/compute/arcrenew.cpp:32 src/clients/compute/arcresub.cpp:36 #: src/clients/compute/arcresume.cpp:32 src/clients/compute/arcstat.cpp:34 msgid "[job ...]" msgstr "[Job ...]" #: src/clients/compute/arccat.cpp:36 msgid "" "The arccat command performs the cat command on the stdout, stderr or grid\n" "manager's error log of the job." msgstr "" "Эта команда предназначена Ð´Ð»Ñ Ð²Ñ‹Ð²Ð¾Ð´Ð° на Ñкран Ñообщений Ñтандартного\n" "выхода, Ñтандартной ошибки или ошибок ÑиÑтемы при иÑполнении задачи" #: src/clients/compute/arccat.cpp:43 src/clients/compute/arcclean.cpp:41 #: src/clients/compute/arcget.cpp:42 src/clients/compute/arcinfo.cpp:45 #: src/clients/compute/arckill.cpp:40 src/clients/compute/arcrenew.cpp:37 #: src/clients/compute/arcresub.cpp:41 src/clients/compute/arcresume.cpp:37 #: src/clients/compute/arcstat.cpp:42 src/clients/compute/arcsub.cpp:53 #: src/clients/compute/arcsync.cpp:147 src/clients/compute/arctest.cpp:64 #: src/clients/credentials/arcproxy.cpp:457 src/clients/data/arccp.cpp:641 #: src/clients/data/arcls.cpp:347 src/clients/data/arcmkdir.cpp:125 #: src/clients/data/arcrename.cpp:136 src/clients/data/arcrm.cpp:151 #: src/hed/daemon/unix/main_unix.cpp:341 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1263 #: src/hed/libs/data/DataExternalHelper.cpp:358 #, c-format msgid "%s version %s" msgstr "%s version %s" #: src/clients/compute/arccat.cpp:52 src/clients/compute/arcclean.cpp:50 #: src/clients/compute/arcget.cpp:51 src/clients/compute/arcinfo.cpp:53 #: src/clients/compute/arckill.cpp:49 src/clients/compute/arcrenew.cpp:46 #: src/clients/compute/arcresub.cpp:50 src/clients/compute/arcresume.cpp:46 #: src/clients/compute/arcstat.cpp:51 src/clients/compute/arcsub.cpp:62 #: src/clients/compute/arcsync.cpp:156 src/clients/compute/arctest.cpp:86 #: src/clients/credentials/arcproxy.cpp:465 src/clients/data/arccp.cpp:648 #: src/clients/data/arcls.cpp:355 src/clients/data/arcmkdir.cpp:133 #: src/clients/data/arcrename.cpp:144 src/clients/data/arcrm.cpp:160 #: src/libs/data-staging/DataDeliveryLocalComm.cpp:174 #: src/services/a-rex/grid-manager/GridManager.cpp:110 #: src/services/a-rex/grid-manager/log/JobLog.cpp:139 #, fuzzy, c-format msgid "Running command: %s" msgstr "Kommando: %s" #: src/clients/compute/arccat.cpp:63 src/clients/compute/arcclean.cpp:61 #: src/clients/compute/arcget.cpp:62 src/clients/compute/arcinfo.cpp:65 #: src/clients/compute/arckill.cpp:60 src/clients/compute/arcrenew.cpp:57 #: src/clients/compute/arcresub.cpp:54 src/clients/compute/arcresume.cpp:50 #: src/clients/compute/arcstat.cpp:62 src/clients/compute/arcsub.cpp:66 #: src/clients/compute/arcsync.cpp:167 src/clients/compute/arctest.cpp:90 #: src/clients/data/arccp.cpp:671 src/clients/data/arcls.cpp:377 #: src/clients/data/arcmkdir.cpp:155 src/clients/data/arcrename.cpp:166 #: src/clients/data/arcrm.cpp:182 #, fuzzy msgid "Failed configuration initialization" msgstr "Fehler bei Initialisierung der Konfiguration" #: src/clients/compute/arccat.cpp:78 src/clients/compute/arcclean.cpp:76 #: src/clients/compute/arcget.cpp:90 src/clients/compute/arckill.cpp:75 #: src/clients/compute/arcrenew.cpp:70 src/clients/compute/arcresub.cpp:85 #: src/clients/compute/arcresume.cpp:72 src/clients/compute/arcstat.cpp:71 #, fuzzy, c-format msgid "Cannot read specified jobid file: %s" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/clients/compute/arccat.cpp:89 src/clients/compute/arcclean.cpp:87 #: src/clients/compute/arcget.cpp:101 src/clients/compute/arckill.cpp:86 #: src/clients/compute/arcrenew.cpp:81 src/clients/compute/arcresub.cpp:99 #: src/clients/compute/arcresume.cpp:83 src/clients/compute/arcstat.cpp:105 msgid "No jobs given" msgstr "Keine Jobs angegeben" #: src/clients/compute/arccat.cpp:102 src/clients/compute/arcclean.cpp:100 #: src/clients/compute/arcget.cpp:114 src/clients/compute/arckill.cpp:99 #: src/clients/compute/arcrenew.cpp:94 src/clients/compute/arcresub.cpp:109 #: src/clients/compute/arcresume.cpp:96 src/clients/compute/arcstat.cpp:117 #, fuzzy, c-format msgid "Job list file (%s) doesn't exist" msgstr "Lock-Datei %s existiert nicht" #: src/clients/compute/arccat.cpp:109 src/clients/compute/arcclean.cpp:107 #: src/clients/compute/arcget.cpp:121 src/clients/compute/arckill.cpp:106 #: src/clients/compute/arcrenew.cpp:101 src/clients/compute/arcresub.cpp:116 #: src/clients/compute/arcresume.cpp:103 src/clients/compute/arcstat.cpp:124 #: src/clients/compute/arctest.cpp:335 #, fuzzy, c-format msgid "Unable to read job information from file (%s)" msgstr "Konnte job information nicht beziehen für job: %s" #: src/clients/compute/arccat.cpp:118 src/clients/compute/arcclean.cpp:115 #: src/clients/compute/arcget.cpp:129 src/clients/compute/arckill.cpp:114 #: src/clients/compute/arcrenew.cpp:110 src/clients/compute/arcresub.cpp:124 #: src/clients/compute/arcresume.cpp:112 src/clients/compute/arcstat.cpp:133 #, fuzzy, c-format msgid "Warning: Job not found in job list: %s" msgstr "Kann Job ID nicht finden: %s" #: src/clients/compute/arccat.cpp:131 src/clients/compute/arcclean.cpp:170 #: src/clients/compute/arcget.cpp:142 src/clients/compute/arckill.cpp:126 #: src/clients/compute/arcrenew.cpp:122 src/clients/compute/arcresub.cpp:136 #: src/clients/compute/arcresume.cpp:124 #, fuzzy msgid "No jobs" msgstr "NO Job" #: src/clients/compute/arccat.cpp:146 #, c-format msgid "Could not create temporary file \"%s\"" msgstr "" #: src/clients/compute/arccat.cpp:147 src/clients/compute/arccat.cpp:153 #, fuzzy, c-format msgid "Cannot create output of %s for any jobs" msgstr "Kann Verzeichnis \"%s\" für cache nicht anlegen" #: src/clients/compute/arccat.cpp:154 #, fuzzy, c-format msgid "Invalid destination URL %s" msgstr "Ungültige URL: %s" #: src/clients/compute/arccat.cpp:172 #, c-format msgid "Job deleted: %s" msgstr "" #: src/clients/compute/arccat.cpp:182 #, c-format msgid "Job has not started yet: %s" msgstr "" #: src/clients/compute/arccat.cpp:223 #, fuzzy, c-format msgid "Cannot determine the %s location: %s" msgstr "Kann Funktion %s nicht anlegen" #: src/clients/compute/arccat.cpp:228 #, c-format msgid "Cannot create output of %s for job (%s): Invalid source %s" msgstr "" #: src/clients/compute/arccat.cpp:241 #, c-format msgid "Catting %s for job %s" msgstr "" #: src/clients/compute/arcclean.cpp:35 #, fuzzy msgid "The arcclean command removes a job from the computing resource." msgstr "Das arcclean Kommando entfernt einen Job von einem entfernten Cluster." #: src/clients/compute/arcclean.cpp:139 msgid "" "You are about to remove jobs from the job list for which no information " "could be\n" "found. NOTE: Recently submitted jobs might not have appeared in the " "information\n" "system, and this action will also remove such jobs." msgstr "" #: src/clients/compute/arcclean.cpp:142 #, fuzzy msgid "Are you sure you want to clean jobs missing information?" msgstr "Soll die lokale job list wirklich synchronisiert werden?" #: src/clients/compute/arcclean.cpp:143 src/clients/compute/arcsync.cpp:221 msgid "y" msgstr "j" #: src/clients/compute/arcclean.cpp:143 src/clients/compute/arcsync.cpp:221 msgid "n" msgstr "n" #: src/clients/compute/arcclean.cpp:148 #, fuzzy msgid "Jobs missing information will not be cleaned!" msgstr "Fehler bei Bezug von Information für Job: %s" #: src/clients/compute/arcclean.cpp:164 src/clients/compute/arcresub.cpp:177 #: src/clients/compute/arctest.cpp:339 #, fuzzy, c-format msgid "Warning: Failed to write job information to file (%s)" msgstr "Konnte job information nicht beziehen für job: %s" #: src/clients/compute/arcclean.cpp:165 msgid "" " Run 'arcclean -s Undefined' to remove cleaned jobs from job list" msgstr "" #: src/clients/compute/arcclean.cpp:174 #, c-format msgid "Jobs processed: %d, deleted: %d" msgstr "" #: src/clients/compute/arcget.cpp:36 msgid "The arcget command is used for retrieving the results from a job." msgstr "Mit arcget erhält man die Ergebnisse eines Jobs." #: src/clients/compute/arcget.cpp:78 #, fuzzy, c-format msgid "Job download directory from user configuration file: %s" msgstr "Pfad zu VOMS Server Konfigurationsdatei" #: src/clients/compute/arcget.cpp:81 #, fuzzy msgid "Job download directory will be created in present working directory." msgstr "" "Download-Verzeichnis (das Job-Verzeichnis wird in diesem Verzeichnis " "abgelegt)" #: src/clients/compute/arcget.cpp:85 #, fuzzy, c-format msgid "Job download directory: %s" msgstr "Fehler beim Öffnen von Verzeichs: %s" #: src/clients/compute/arcget.cpp:152 #, fuzzy, c-format msgid "Unable to create directory for storing results (%s) - %s" msgstr "Fehler bei Anlegen von Datei %s zum Schreiben: %s" #: src/clients/compute/arcget.cpp:162 #, c-format msgid "Results stored at: %s" msgstr "" #: src/clients/compute/arcget.cpp:174 src/clients/compute/arckill.cpp:142 msgid "Warning: Some jobs were not removed from server" msgstr "" #: src/clients/compute/arcget.cpp:175 src/clients/compute/arcget.cpp:182 #: src/clients/compute/arckill.cpp:143 msgid " Use arcclean to remove retrieved jobs from job list" msgstr "" #: src/clients/compute/arcget.cpp:181 src/clients/compute/arckill.cpp:149 #: src/clients/compute/arcresub.cpp:207 #, fuzzy, c-format msgid "Warning: Failed removing jobs from file (%s)" msgstr "Warnung: Fehler bei Bezug von Attributen von %s: %s" #: src/clients/compute/arcget.cpp:186 #, c-format msgid "" "Jobs processed: %d, successfully retrieved: %d, successfully cleaned: %d" msgstr "" #: src/clients/compute/arcget.cpp:190 #, c-format msgid "Jobs processed: %d, successfully retrieved: %d" msgstr "" #: src/clients/compute/arcinfo.cpp:34 #, fuzzy msgid "[resource ...]" msgstr "[Job ...]" #: src/clients/compute/arcinfo.cpp:35 #, fuzzy msgid "" "The arcinfo command is used for obtaining the status of computing resources " "on the Grid." msgstr "Mit arcinfo wird der Zustand von Clustern auf dem Grid bestimmt." #: src/clients/compute/arcinfo.cpp:142 msgid "Information endpoint" msgstr "" #: src/clients/compute/arcinfo.cpp:153 #, fuzzy msgid "Submission endpoint" msgstr "Submission ergab Fehler: %s" #: src/clients/compute/arcinfo.cpp:155 #, fuzzy msgid "status" msgstr "statusstr" #: src/clients/compute/arcinfo.cpp:157 #, fuzzy msgid "interface" msgstr "Benutzungsschnittstellenfehler" #: src/clients/compute/arcinfo.cpp:176 #, fuzzy msgid "ERROR: Failed to retrieve information from the following endpoints:" msgstr "Fehler bei Bezug von Information für Job: %s" #: src/clients/compute/arcinfo.cpp:189 #, fuzzy msgid "ERROR: Failed to retrieve information" msgstr "Konnte Job Status Information nicht beziehen." #: src/clients/compute/arcinfo.cpp:191 msgid "from the following endpoints:" msgstr "" #: src/clients/compute/arckill.cpp:34 msgid "The arckill command is used to kill running jobs." msgstr "Mit arckill lassen sich laufenden Prozesse beenden." #: src/clients/compute/arckill.cpp:150 msgid "" " Run 'arcclean -s Undefined' to remove killed jobs from job list" msgstr "" #: src/clients/compute/arckill.cpp:153 #, c-format msgid "Jobs processed: %d, successfully killed: %d, successfully cleaned: %d" msgstr "" #: src/clients/compute/arckill.cpp:155 #, fuzzy, c-format msgid "Jobs processed: %d, successfully killed: %d" msgstr "Job resumed erfolgreich" #: src/clients/compute/arcrenew.cpp:128 #, c-format msgid "Jobs processed: %d, renewed: %d" msgstr "" #: src/clients/compute/arcresub.cpp:79 msgid "--same and --not-same cannot be specified together." msgstr "" #: src/clients/compute/arcresub.cpp:153 msgid "" "It is not possible to resubmit jobs without new target information discovery" msgstr "" #: src/clients/compute/arcresub.cpp:166 #, fuzzy msgid "No jobs to resubmit with the specified status" msgstr "Keine Job Beschreibung als Eingabe benötigt" #: src/clients/compute/arcresub.cpp:173 src/clients/compute/submit.cpp:34 #, c-format msgid "Job submitted with jobid: %s" msgstr "Job hochgeladen mit Job ID: %s" #: src/clients/compute/arcresub.cpp:178 msgid " To recover missing jobs, run arcsync" msgstr "" #: src/clients/compute/arcresub.cpp:183 #, fuzzy, c-format msgid "Cannot write jobids to file (%s)" msgstr "Konnte Datei mit Job Beschreibung nicht öffnen: %s" #: src/clients/compute/arcresub.cpp:194 #, c-format msgid "" "Resubmission of job (%s) succeeded, but killing the job failed - it will " "still appear in the job list" msgstr "" #: src/clients/compute/arcresub.cpp:203 #, c-format msgid "" "Resubmission of job (%s) succeeded, but cleaning the job failed - it will " "still appear in the job list" msgstr "" #: src/clients/compute/arcresub.cpp:208 msgid " Use arcclean to remove non-existing jobs" msgstr "" #: src/clients/compute/arcresub.cpp:215 #, fuzzy msgid "Job resubmission summary:" msgstr "Job Hochladen Zusammenfassung:" #: src/clients/compute/arcresub.cpp:217 #, fuzzy, c-format msgid "%d of %d jobs were resubmitted" msgstr "%d von %s Jobs wurden hochgeladen" #: src/clients/compute/arcresub.cpp:219 #, fuzzy, c-format msgid "The following %d were not resubmitted" msgstr "Die folgenden %d wurden nicht hochgeladen" #: src/clients/compute/arcresume.cpp:130 #, c-format msgid "Jobs processed: %d, resumed: %d" msgstr "" #: src/clients/compute/arcstat.cpp:35 #, fuzzy msgid "" "The arcstat command is used for obtaining the status of jobs that have\n" "been submitted to Grid enabled resources." msgstr "" "Эта команда иÑпользуетÑÑ Ð´Ð»Ñ Ð²Ñ‹Ð²Ð¾Ð´Ð° информации о ÑоÑтоÑнии\n" "задач, отправленных на Грид, и о ÑоÑтоÑнии вычиÑлительных\n" "реÑурÑов Грид " #: src/clients/compute/arcstat.cpp:79 msgid "The 'sort' and 'rsort' flags cannot be specified at the same time." msgstr "" #: src/clients/compute/arcstat.cpp:149 #, fuzzy msgid "No jobs found, try later" msgstr "Keine Jobs zu bearbeiten" #: src/clients/compute/arcstat.cpp:193 #, c-format msgid "Status of %d jobs was queried, %d jobs returned information" msgstr "" #: src/clients/compute/arcsub.cpp:45 msgid "[filename ...]" msgstr "[dateiname ...]" #: src/clients/compute/arcsub.cpp:46 #, fuzzy msgid "" "The arcsub command is used for submitting jobs to Grid enabled computing\n" "resources." msgstr "Mit dem arcsub Kommando werden Jobs den entfernten Clustern zugewiesen" #: src/clients/compute/arcsub.cpp:94 #, fuzzy msgid "No job description input specified" msgstr "Keine Job Beschreibung als Eingabe benötigt" #: src/clients/compute/arcsub.cpp:107 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:582 #, c-format msgid "Can not open job description file: %s" msgstr "Konnte Datei mit Job Beschreibung nicht öffnen: %s" #: src/clients/compute/arcsub.cpp:135 src/clients/compute/arcsub.cpp:163 msgid "Invalid JobDescription:" msgstr "Ungültige JobDescription:" #: src/clients/compute/arcsub.cpp:198 src/clients/compute/arctest.cpp:229 msgid "" "Cannot adapt job description to the submission target when information " "discovery is turned off" msgstr "" #: src/clients/compute/arcsync.cpp:66 src/clients/compute/arcsync.cpp:174 #, c-format msgid "Warning: Unable to open job list file (%s), unknown format" msgstr "" #: src/clients/compute/arcsync.cpp:76 msgid "Found the following jobs:" msgstr "" #: src/clients/compute/arcsync.cpp:86 msgid "Total number of jobs found: " msgstr "" #: src/clients/compute/arcsync.cpp:98 msgid "Found the following new jobs:" msgstr "" #: src/clients/compute/arcsync.cpp:108 msgid "Total number of new jobs found: " msgstr "" #: src/clients/compute/arcsync.cpp:113 #, fuzzy, c-format msgid "ERROR: Failed to write job information to file (%s)" msgstr "Konnte job information nicht beziehen für job: %s" #: src/clients/compute/arcsync.cpp:140 #, fuzzy msgid "" "The arcsync command synchronizes your local job list with the information " "at\n" "the given resources or index servers." msgstr "" "Das Kommando synchronisierte Ihre lokale Jobliste mit der Information eines " "Clusters oder Index-Servers" #: src/clients/compute/arcsync.cpp:180 #, fuzzy, c-format msgid "Warning: Unable to read local list of jobs from file (%s)" msgstr "Warnung: Fehler bei Bezug von Attributen von %s: %s" #: src/clients/compute/arcsync.cpp:185 #, fuzzy, c-format msgid "Warning: Unable to truncate local list of jobs in file (%s)" msgstr "Konnte job information nicht beziehen für job: %s" #: src/clients/compute/arcsync.cpp:191 #, c-format msgid "Warning: Unable to create job list file (%s), jobs list is destroyed" msgstr "" #: src/clients/compute/arcsync.cpp:195 #, fuzzy, c-format msgid "" "Warning: Failed to write local list of jobs into file (%s), jobs list is " "destroyed" msgstr "Konnte job information nicht beziehen für job: %s" #: src/clients/compute/arcsync.cpp:215 #, fuzzy msgid "" "Synchronizing the local list of active jobs with the information in the\n" "information system can result in some inconsistencies. Very recently " "submitted\n" "jobs might not yet be present, whereas jobs very recently scheduled for\n" "deletion can still be present." msgstr "" "Synchronisiere lokale Liste aktiver Jobs mit der Information im MDS. Dies " "mag\n" "zu Inkonsistenzen führen. Gerade erst hochgeladene Jobs sind vielleicht " "noch\n" "nicht dem MDB bekannt, während für die Löschung ausgewählte Jobs noch ange-\n" "zeigt werden." #: src/clients/compute/arcsync.cpp:220 msgid "Are you sure you want to synchronize your local job list?" msgstr "Soll die lokale job list wirklich synchronisiert werden?" #: src/clients/compute/arcsync.cpp:225 msgid "Cancelling synchronization request" msgstr "Abbruch der Synchronisationsanfrage" #: src/clients/compute/arcsync.cpp:243 msgid "" "No services specified. Please configure default services in the client " "configuration, or specify a cluster or index (-c or -g options, see arcsync -" "h)." msgstr "" #: src/clients/compute/arctest.cpp:57 msgid " " msgstr "" #: src/clients/compute/arctest.cpp:58 #, fuzzy msgid "The arctest command is used for testing clusters as resources." msgstr "Mit arcget erhält man die Ergebnisse eines Jobs." #: src/clients/compute/arctest.cpp:70 msgid "" "Nothing to do:\n" "you have to either specify a test job id with -J (--job)\n" "or query information about the certificates with -E (--certificate)\n" msgstr "" #: src/clients/compute/arctest.cpp:77 msgid "" "For the 1st test job you also have to specify a runtime value with -r (--" "runtime) option." msgstr "" #: src/clients/compute/arctest.cpp:111 #, fuzzy msgid "Certificate information:" msgstr "Ungültige Authentisierungs-Information" #: src/clients/compute/arctest.cpp:115 #, fuzzy msgid "No user-certificate found" msgstr "Pfad zu Zertifikat-Datei" #: src/clients/compute/arctest.cpp:118 #, fuzzy, c-format msgid "Certificate: %s" msgstr "Voreinstellung: %s" #: src/clients/compute/arctest.cpp:120 #, fuzzy, c-format msgid "Subject name: %s" msgstr "Subjekt: %s" #: src/clients/compute/arctest.cpp:121 #, fuzzy, c-format msgid "Valid until: %s" msgstr "Ungültige url: %s" #: src/clients/compute/arctest.cpp:125 #, fuzzy msgid "Unable to determine certificate information" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/clients/compute/arctest.cpp:129 #, fuzzy msgid "Proxy certificate information:" msgstr "Ungültige Authentisierungs-Information" #: src/clients/compute/arctest.cpp:131 msgid "No proxy found" msgstr "" #: src/clients/compute/arctest.cpp:134 #, fuzzy, c-format msgid "Proxy: %s" msgstr "Proxy Pfad: %s" #: src/clients/compute/arctest.cpp:135 #, fuzzy, c-format msgid "Proxy-subject: %s" msgstr "Subjekt: %s" #: src/clients/compute/arctest.cpp:137 #, fuzzy msgid "Valid for: Proxy expired" msgstr "Zeit verbleibend für Proxy: Proxy abgelaufen" #: src/clients/compute/arctest.cpp:139 #, fuzzy msgid "Valid for: Proxy not valid" msgstr "Zeit verbleibend für Proxy: Proxy ungültig" #: src/clients/compute/arctest.cpp:141 #, fuzzy, c-format msgid "Valid for: %s" msgstr "Ungültige url: %s" #: src/clients/compute/arctest.cpp:146 #, c-format msgid "Certificate issuer: %s" msgstr "" #: src/clients/compute/arctest.cpp:150 #, fuzzy msgid "CA-certificates installed:" msgstr "Pfad zu Zertifikat-Datei" #: src/clients/compute/arctest.cpp:172 msgid "Unable to detect if issuer certificate is installed." msgstr "" #: src/clients/compute/arctest.cpp:175 msgid "Your issuer's certificate is not installed" msgstr "" #: src/clients/compute/arctest.cpp:189 #, c-format msgid "No test-job, with ID \"%d\"" msgstr "" #: src/clients/compute/arctest.cpp:245 #, fuzzy, c-format msgid "Unable to load broker %s" msgstr "Konnter Broker %s nicht laden" #: src/clients/compute/arctest.cpp:248 #: src/hed/libs/compute/BrokerPlugin.cpp:106 #, c-format msgid "Broker %s loaded" msgstr "Broker %s geladen" #: src/clients/compute/arctest.cpp:270 #, fuzzy msgid "Test aborted because no resource returned any information" msgstr "Job Migration abgebrochen, da kein Cluster Informationen lieferte" #: src/clients/compute/arctest.cpp:272 src/clients/compute/submit.cpp:170 msgid "" "Unable to adapt job description to any resource, no resource information " "could be obtained." msgstr "" #: src/clients/compute/arctest.cpp:273 src/clients/compute/submit.cpp:171 #, fuzzy msgid "Original job description is listed below:" msgstr "" " -o, -stdout вывеÑти файл Ñтандартого выхода задачи (по\n" " умолчанию)" #: src/clients/compute/arctest.cpp:283 msgid "" "ERROR: Test aborted because no suitable resources were found for the test-job" msgstr "" #: src/clients/compute/arctest.cpp:285 msgid "" "ERROR: Dumping job description aborted because no suitable resources were " "found for the test-job" msgstr "" #: src/clients/compute/arctest.cpp:294 #, c-format msgid "Submitting test-job %d:" msgstr "" #: src/clients/compute/arctest.cpp:298 #, c-format msgid "Client version: nordugrid-arc-%s" msgstr "" #: src/clients/compute/arctest.cpp:306 #, fuzzy, c-format msgid "Cannot write jobid (%s) to file (%s)" msgstr "Konnte Datei mit Job Beschreibung nicht öffnen: %s" #: src/clients/compute/arctest.cpp:307 #, fuzzy, c-format msgid "Test submitted with jobid: %s" msgstr "Job hochgeladen mit Job ID: %s" #: src/clients/compute/arctest.cpp:322 #, fuzzy, c-format msgid "Computing service: %s" msgstr "Delegation service: %s" #: src/clients/compute/arctest.cpp:328 #, fuzzy msgid "Test failed, no more possible targets" msgstr "Hochladen des Jobs schlug fehl, keine weiteren Ziele verfügbar" #: src/clients/compute/arctest.cpp:341 src/clients/compute/submit.cpp:49 msgid "To recover missing jobs, run arcsync" msgstr "" #: src/clients/compute/arctest.cpp:354 src/clients/compute/submit.cpp:200 #, fuzzy, c-format msgid "" "Unable to prepare job description according to needs of the target resource " "(%s)." msgstr "Konnte Job Beschreibung nicht an Resourcen des Ziels anpassen: %s" #: src/clients/compute/arctest.cpp:364 src/clients/compute/submit.cpp:216 #, fuzzy, c-format msgid "" "An error occurred during the generation of job description to be sent to %s" msgstr "Eine fehler geschah während des Generieres der Job Beschreibung." #: src/clients/compute/arctest.cpp:368 src/clients/compute/submit.cpp:220 #, fuzzy, c-format msgid "Job description to be sent to %s:" msgstr "Zu sendende Job-Beschreibung : %s" #: src/clients/compute/submit.cpp:40 #, fuzzy, c-format msgid "Cannot write job IDs to file (%s)" msgstr "Konnte Datei mit Job Beschreibung nicht öffnen: %s" #: src/clients/compute/submit.cpp:45 #, c-format msgid "Unable to open job list file (%s), unknown format" msgstr "" #: src/clients/compute/submit.cpp:47 #, fuzzy, c-format msgid "Failed to write job information to database (%s)" msgstr "Konnte job information nicht beziehen für job: %s" #: src/clients/compute/submit.cpp:51 #, c-format msgid "Record about new job successfully added to the database (%s)" msgstr "" #: src/clients/compute/submit.cpp:57 msgid "Job submission summary:" msgstr "Job Hochladen Zusammenfassung:" #: src/clients/compute/submit.cpp:59 #, fuzzy, c-format msgid "%d of %d jobs were submitted" msgstr "%d von %s Jobs wurden hochgeladen" #: src/clients/compute/submit.cpp:61 #, fuzzy msgid "The following jobs were not submitted:" msgstr "Die folgenden %d wurden nicht hochgeladen" #: src/clients/compute/submit.cpp:65 msgid "Job nr." msgstr "" #: src/clients/compute/submit.cpp:75 #, fuzzy, c-format msgid "ERROR: Unable to load broker %s" msgstr "Konnter Broker %s nicht laden" #: src/clients/compute/submit.cpp:79 #, fuzzy msgid "" "ERROR: Job submission aborted because no resource returned any information" msgstr "" "Hochladen des Jobs abgebrochen, da keine Cluster entsprechende Informationen " "anboten" #: src/clients/compute/submit.cpp:83 msgid "ERROR: One or multiple job descriptions was not submitted." msgstr "" #: src/clients/compute/submit.cpp:100 #, c-format msgid "" "A computing resource using the GridFTP interface was requested, but\n" "%sthe corresponding plugin could not be loaded. Is the plugin installed?\n" "%sIf not, please install the package 'nordugrid-arc-plugins-globus'.\n" "%sDepending on your type of installation the package name might differ." msgstr "" #: src/clients/compute/submit.cpp:125 #, c-format msgid "Removing endpoint %s: It has an unrequested interface (%s)." msgstr "" #: src/clients/compute/submit.cpp:183 #, c-format msgid "Dumping job description aborted: Unable to load broker %s" msgstr "" #: src/clients/compute/submit.cpp:238 #, fuzzy msgid "" "Unable to prepare job description according to needs of the target resource." msgstr "Konnte Job Beschreibung nicht an Resourcen des Ziels anpassen: %s" #: src/clients/compute/submit.cpp:322 src/clients/compute/submit.cpp:352 #, c-format msgid "Service endpoint %s (type %s) added to the list for resource discovery" msgstr "" #: src/clients/compute/submit.cpp:332 msgid "" "There are no endpoints in registry that match requested info endpoint type" msgstr "" #: src/clients/compute/submit.cpp:373 #, c-format msgid "Service endpoint %s (type %s) added to the list for direct submission" msgstr "" #: src/clients/compute/submit.cpp:381 msgid "" "There are no endpoints in registry that match requested submission endpoint " "type" msgstr "" #: src/clients/compute/utils.cpp:109 #, c-format msgid "Types of execution services that %s is able to submit jobs to:" msgstr "" #: src/clients/compute/utils.cpp:112 #, c-format msgid "Types of registry services that %s is able to collect information from:" msgstr "" #: src/clients/compute/utils.cpp:115 #, c-format msgid "" "Types of local information services that %s is able to collect information " "from:" msgstr "" #: src/clients/compute/utils.cpp:118 #, c-format msgid "" "Types of local information services that %s is able to collect job " "information from:" msgstr "" #: src/clients/compute/utils.cpp:121 #, c-format msgid "Types of services that %s is able to manage jobs at:" msgstr "" #: src/clients/compute/utils.cpp:124 #, fuzzy, c-format msgid "Job description languages supported by %s:" msgstr "Zu sendende Job-Beschreibung : %s" #: src/clients/compute/utils.cpp:127 #, c-format msgid "Brokers available to %s:" msgstr "" #: src/clients/compute/utils.cpp:150 #, c-format msgid "" "Default broker (%s) is not available. When using %s a broker should be " "specified explicitly (-b option)." msgstr "" #: src/clients/compute/utils.cpp:160 msgid "Proxy expired. Job submission aborted. Please run 'arcproxy'!" msgstr "" #: src/clients/compute/utils.cpp:165 msgid "" "Cannot find any proxy. This application currently cannot run without a " "proxy.\n" " If you have the proxy file in a non-default location,\n" " please make sure the path is specified in the client configuration file.\n" " If you don't have a proxy yet, please run 'arcproxy'!" msgstr "" #: src/clients/compute/utils.cpp:277 msgid "" "It is impossible to mix ARC6 target selection options with legacy options. " "All legacy options will be ignored!" msgstr "" #: src/clients/compute/utils.cpp:345 #, fuzzy, c-format msgid "Unsupported submission endpoint type: %s" msgstr "Nicht unterstützte URL für Ziel: %s" #: src/clients/compute/utils.cpp:383 #, c-format msgid "" "Requested to skip resource discovery. Will try direct submission to %s and " "%s submission endpoint types" msgstr "" #: src/clients/compute/utils.cpp:389 #, fuzzy, c-format msgid "Unsupported information endpoint type: %s" msgstr "Nicht unterstützte URL für Ziel: %s" #: src/clients/compute/utils.cpp:434 #, fuzzy msgid "Other actions" msgstr "SASL Interaktion" #: src/clients/compute/utils.cpp:435 #, fuzzy msgid "Brokering and filtering" msgstr "Zeichenkette" #: src/clients/compute/utils.cpp:436 msgid "Output format modifiers" msgstr "" #: src/clients/compute/utils.cpp:437 msgid "Behaviour tuning" msgstr "" #: src/clients/compute/utils.cpp:438 #, fuzzy msgid "ARC6 submission endpoint selection" msgstr "Submission ergab Fehler: %s" #: src/clients/compute/utils.cpp:439 msgid "Legacy options set for defining targets" msgstr "" #: src/clients/compute/utils.cpp:443 msgid "specify computing element hostname or a complete endpoint URL" msgstr "" #: src/clients/compute/utils.cpp:444 msgid "ce" msgstr "" #: src/clients/compute/utils.cpp:448 msgid "registry service URL with optional specification of protocol" msgstr "" #: src/clients/compute/utils.cpp:449 msgid "registry" msgstr "" #: src/clients/compute/utils.cpp:455 msgid "" "require the specified endpoint type for job submission.\n" "\tAllowed values are: arcrest, emies, gridftp or gridftpjob and internal." msgstr "" #: src/clients/compute/utils.cpp:457 src/clients/compute/utils.cpp:464 msgid "type" msgstr "" #: src/clients/compute/utils.cpp:461 msgid "" "require information query using the specified information endpoint type.\n" "\tSpecial value 'NONE' will disable all resource information queries and the " "following brokering.\n" "\tAllowed values are: ldap.nordugrid, ldap.glue2, emies, arcrest and " "internal." msgstr "" #: src/clients/compute/utils.cpp:470 msgid "" "select one or more computing elements: name can be an alias for a single CE, " "a group of CEs or a URL" msgstr "" #: src/clients/compute/utils.cpp:472 src/clients/compute/utils.cpp:477 #: src/clients/compute/utils.cpp:494 src/clients/compute/utils.cpp:614 msgid "name" msgstr "" #: src/clients/compute/utils.cpp:476 #, fuzzy msgid "only select jobs that were submitted to this resource" msgstr "Erneut zu demselben Cluster submitten" #: src/clients/compute/utils.cpp:483 msgid "" "the computing element specified by URL at the command line should be queried " "using this information interface.\n" "\tAllowed values are: org.nordugrid.ldapng, org.nordugrid.ldapglue2 and org." "ogf.glue.emies.resourceinfo" msgstr "" #: src/clients/compute/utils.cpp:486 #, fuzzy msgid "interfacename" msgstr "Benutzungsschnittstellenfehler" #: src/clients/compute/utils.cpp:492 msgid "" "selecting a computing element for the new jobs with a URL or an alias, or " "selecting a group of computing elements with the name of the group" msgstr "" #: src/clients/compute/utils.cpp:500 msgid "force migration, ignore kill failure" msgstr "" "erzwinge Migration, ignoriere ein Fehlschlagen des Abbruchs bereits " "laufender Jobs" #: src/clients/compute/utils.cpp:506 msgid "keep the files on the server (do not clean)" msgstr "behalte die Dateien auf dem Server (dort nicht löschen)" #: src/clients/compute/utils.cpp:512 msgid "do not ask for verification" msgstr "frage nicht nach Verifikation" #: src/clients/compute/utils.cpp:516 #, fuzzy msgid "truncate the joblist before synchronizing" msgstr "kürze Jobliste vor Synchronisation" #: src/clients/compute/utils.cpp:520 msgid "do not collect information, only convert jobs storage format" msgstr "" #: src/clients/compute/utils.cpp:526 src/clients/data/arcls.cpp:288 msgid "long format (more information)" msgstr "ausführliche Ausgabe" #: src/clients/compute/utils.cpp:532 msgid "print a list of services configured in the client.conf" msgstr "" #: src/clients/compute/utils.cpp:538 msgid "show the stdout of the job (default)" msgstr "Zeige stdout des Jobs (Voreinstellung)" #: src/clients/compute/utils.cpp:542 msgid "show the stderr of the job" msgstr "zeige stderr des Jobs" #: src/clients/compute/utils.cpp:546 #, fuzzy msgid "show the CE's error log of the job" msgstr "zeige den error log des Grid Manager für diesen Job" #: src/clients/compute/utils.cpp:550 msgid "show the specified file from job's session directory" msgstr "" #: src/clients/compute/utils.cpp:551 #, fuzzy msgid "filepath" msgstr "Pfad" #: src/clients/compute/utils.cpp:557 msgid "" "download directory (the job directory will be created in this directory)" msgstr "" "Download-Verzeichnis (das Job-Verzeichnis wird in diesem Verzeichnis " "abgelegt)" #: src/clients/compute/utils.cpp:559 msgid "dirname" msgstr "Verzeichnisname" #: src/clients/compute/utils.cpp:563 msgid "use the jobname instead of the short ID as the job directory name" msgstr "" #: src/clients/compute/utils.cpp:568 msgid "force download (overwrite existing job directory)" msgstr "" #: src/clients/compute/utils.cpp:574 msgid "instead of the status only the IDs of the selected jobs will be printed" msgstr "" #: src/clients/compute/utils.cpp:578 msgid "sort jobs according to jobid, submissiontime or jobname" msgstr "" #: src/clients/compute/utils.cpp:579 src/clients/compute/utils.cpp:582 msgid "order" msgstr "" #: src/clients/compute/utils.cpp:581 msgid "reverse sorting of jobs according to jobid, submissiontime or jobname" msgstr "" #: src/clients/compute/utils.cpp:585 msgid "show jobs where status information is unavailable" msgstr "" #: src/clients/compute/utils.cpp:589 msgid "show status information in JSON format" msgstr "" #: src/clients/compute/utils.cpp:595 #, fuzzy msgid "resubmit to the same resource" msgstr "Erneut zu demselben Cluster submitten" #: src/clients/compute/utils.cpp:599 #, fuzzy msgid "do not resubmit to the same resource" msgstr "Erneut zu demselben Cluster submitten" #: src/clients/compute/utils.cpp:605 msgid "" "remove the job from the local list of jobs even if the job is not found in " "the infosys" msgstr "" "entferne Job aus lokaler Liste selbst wenn der Job dem Infosys nicht bekannt " "ist" #: src/clients/compute/utils.cpp:612 msgid "" "select one or more registries: name can be an alias for a single registry, a " "group of registries or a URL" msgstr "" #: src/clients/compute/utils.cpp:620 msgid "submit test job given by the number" msgstr "" #: src/clients/compute/utils.cpp:621 src/clients/compute/utils.cpp:625 #, fuzzy msgid "int" msgstr "Minuten" #: src/clients/compute/utils.cpp:624 msgid "test job runtime specified by the number" msgstr "" #: src/clients/compute/utils.cpp:631 msgid "only select jobs whose status is statusstr" msgstr "Selektiere Jobs mit Status statusstr" #: src/clients/compute/utils.cpp:632 msgid "statusstr" msgstr "statusstr" #: src/clients/compute/utils.cpp:638 msgid "all jobs" msgstr "alle Jobs" #: src/clients/compute/utils.cpp:644 msgid "jobdescription string describing the job to be submitted" msgstr "Zeichenkette mit Job-Beschreibung wird hochgeladen" #: src/clients/compute/utils.cpp:646 src/clients/compute/utils.cpp:652 #: src/clients/credentials/arcproxy.cpp:345 #: src/clients/credentials/arcproxy.cpp:352 #: src/clients/credentials/arcproxy.cpp:371 #: src/clients/credentials/arcproxy.cpp:378 #: src/clients/credentials/arcproxy.cpp:396 #: src/clients/credentials/arcproxy.cpp:400 #: src/clients/credentials/arcproxy.cpp:415 #: src/clients/credentials/arcproxy.cpp:425 #: src/clients/credentials/arcproxy.cpp:429 msgid "string" msgstr "Zeichenkette" #: src/clients/compute/utils.cpp:650 msgid "jobdescription file describing the job to be submitted" msgstr "Datei mit Job-Beschreibung wird hochgeladen" #: src/clients/compute/utils.cpp:658 msgid "select broker method (list available brokers with --listplugins flag)" msgstr "" #: src/clients/compute/utils.cpp:659 msgid "broker" msgstr "Broker" #: src/clients/compute/utils.cpp:662 msgid "the IDs of the submitted jobs will be appended to this file" msgstr "" #: src/clients/compute/utils.cpp:663 src/clients/compute/utils.cpp:685 #: src/clients/compute/utils.cpp:722 src/clients/compute/utils.cpp:730 #: src/clients/credentials/arcproxy.cpp:438 src/clients/data/arccp.cpp:627 #: src/clients/data/arcls.cpp:333 src/clients/data/arcmkdir.cpp:111 #: src/clients/data/arcrename.cpp:122 src/clients/data/arcrm.cpp:137 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:52 msgid "filename" msgstr "Dateiname" #: src/clients/compute/utils.cpp:667 msgid "" "only use this interface for submitting.\n" "\tAllowed values are: org.nordugrid.gridftpjob or org.nordugrid.gridftp, org." "ogf.glue.emies.activitycreation and org.nordugrid.internal" msgstr "" #: src/clients/compute/utils.cpp:669 src/clients/compute/utils.cpp:711 #, fuzzy msgid "InterfaceName" msgstr "Interaktiver Modus." #: src/clients/compute/utils.cpp:676 msgid "skip the service with the given URL during service discovery" msgstr "" #: src/clients/compute/utils.cpp:677 src/clients/compute/utils.cpp:690 #: src/clients/data/arccp.cpp:607 msgid "URL" msgstr "" #: src/clients/compute/utils.cpp:684 #, fuzzy msgid "a file containing a list of jobIDs" msgstr "Datei mit Liste aller Jobs" #: src/clients/compute/utils.cpp:689 msgid "skip jobs that are on a computing element with a given URL" msgstr "" #: src/clients/compute/utils.cpp:695 msgid "submit jobs as dry run (no submission to batch system)" msgstr "" #: src/clients/compute/utils.cpp:698 msgid "submit directly - no resource discovery or matchmaking" msgstr "" #: src/clients/compute/utils.cpp:702 msgid "" "do not submit - dump job description in the language accepted by the target" msgstr "" #: src/clients/compute/utils.cpp:709 msgid "" "only get information about executon targets that support this job submission " "interface.\n" "\tAllowed values are org.nordugrid.gridftpjob or org.nordugrid.gridftp, org." "ogf.glue.emies.activitycreation and org.nordugrid.internal" msgstr "" #: src/clients/compute/utils.cpp:716 msgid "prints info about installed user- and CA-certificates" msgstr "" #: src/clients/compute/utils.cpp:721 #, fuzzy, c-format msgid "the file storing information about active jobs (default %s)" msgstr "Fehler bei Bezug von Information für Job: %s" #: src/clients/compute/utils.cpp:729 src/clients/credentials/arcproxy.cpp:437 #: src/clients/data/arccp.cpp:626 src/clients/data/arcls.cpp:332 #: src/clients/data/arcmkdir.cpp:110 src/clients/data/arcrename.cpp:121 #: src/clients/data/arcrm.cpp:136 msgid "configuration file (default ~/.arc/client.conf)" msgstr "Konfigurationsdatei (Vorteinstellung ~/.arc/client.conf)" #: src/clients/compute/utils.cpp:732 src/clients/credentials/arcproxy.cpp:432 #: src/clients/data/arccp.cpp:621 src/clients/data/arcls.cpp:327 #: src/clients/data/arcmkdir.cpp:105 src/clients/data/arcrename.cpp:116 #: src/clients/data/arcrm.cpp:131 msgid "timeout in seconds (default 20)" msgstr "Zeitüberschreitung nach Sekunden (Voreinstellung 20)" #: src/clients/compute/utils.cpp:733 src/clients/credentials/arcproxy.cpp:433 #: src/clients/data/arccp.cpp:622 src/clients/data/arcls.cpp:328 #: src/clients/data/arcmkdir.cpp:106 src/clients/data/arcrename.cpp:117 #: src/clients/data/arcrm.cpp:132 msgid "seconds" msgstr "Sekunden" #: src/clients/compute/utils.cpp:736 msgid "list the available plugins" msgstr "" #: src/clients/compute/utils.cpp:740 src/clients/credentials/arcproxy.cpp:442 #: src/clients/data/arccp.cpp:631 src/clients/data/arcls.cpp:337 #: src/clients/data/arcmkdir.cpp:115 src/clients/data/arcrename.cpp:126 #: src/clients/data/arcrm.cpp:141 #: src/hed/libs/compute/test_jobdescription.cpp:38 #: src/services/a-rex/grid-manager/gm_jobs.cpp:190 #: src/services/a-rex/grid-manager/inputcheck.cpp:81 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:66 msgid "FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG" msgstr "FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG" #: src/clients/compute/utils.cpp:741 src/clients/credentials/arcproxy.cpp:443 #: src/clients/data/arccp.cpp:632 src/clients/data/arcls.cpp:338 #: src/clients/data/arcmkdir.cpp:116 src/clients/data/arcrename.cpp:127 #: src/clients/data/arcrm.cpp:142 #: src/hed/libs/compute/test_jobdescription.cpp:39 #: src/services/a-rex/grid-manager/gm_jobs.cpp:191 #: src/services/a-rex/grid-manager/inputcheck.cpp:82 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:67 #, fuzzy msgid "debuglevel" msgstr "debuglevel" #: src/clients/compute/utils.cpp:743 src/clients/credentials/arcproxy.cpp:446 #: src/clients/data/arccp.cpp:635 src/clients/data/arcls.cpp:341 #: src/clients/data/arcmkdir.cpp:119 src/clients/data/arcrename.cpp:130 #: src/clients/data/arcrm.cpp:145 msgid "print version information" msgstr "Angabe des aktuellen Versionsbezeichners" #: src/clients/credentials/arcproxy.cpp:146 #: src/hed/libs/credential/ARCProxyUtil.cpp:1216 #, fuzzy, c-format msgid "There are %d user certificates existing in the NSS database" msgstr "Es sind %d Zertifikate in der zurückgelieferten Nachricht." #: src/clients/credentials/arcproxy.cpp:162 #: src/hed/libs/credential/ARCProxyUtil.cpp:1232 #, c-format msgid "Number %d is with nickname: %s%s" msgstr "" #: src/clients/credentials/arcproxy.cpp:171 #: src/hed/libs/credential/ARCProxyUtil.cpp:1241 #, c-format msgid " expiration time: %s " msgstr "" #: src/clients/credentials/arcproxy.cpp:175 #: src/hed/libs/credential/ARCProxyUtil.cpp:1245 #, fuzzy, c-format msgid " certificate dn: %s" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/clients/credentials/arcproxy.cpp:176 #: src/hed/libs/credential/ARCProxyUtil.cpp:1246 #, fuzzy, c-format msgid " issuer dn: %s" msgstr " base dn: %s" #: src/clients/credentials/arcproxy.cpp:177 #: src/hed/libs/credential/ARCProxyUtil.cpp:1247 #, c-format msgid " serial number: %d" msgstr "" #: src/clients/credentials/arcproxy.cpp:181 #: src/hed/libs/credential/ARCProxyUtil.cpp:1251 #, c-format msgid "Please choose the one you would use (1-%d): " msgstr "" #: src/clients/credentials/arcproxy.cpp:246 #, fuzzy msgid "" "The arcproxy command creates a proxy from a key/certificate pair which can\n" "then be used to access grid resources." msgstr "" "Команда arcproxy Ñоздаёт доверенноÑти из пары закрытый/открытый ключ\n" "Ð´Ð»Ñ Ð¸ÑÐ¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ð½Ð¸Ñ Ð½Ð° Гриде" #: src/clients/credentials/arcproxy.cpp:248 msgid "" "Supported constraints are:\n" " validityStart=time (e.g. 2008-05-29T10:20:30Z; if not specified, start " "from now)\n" " validityEnd=time\n" " validityPeriod=time (e.g. 43200 or 12h or 12H; if both validityPeriod and " "validityEnd\n" " not specified, the default is 12 hours for local proxy, and 168 hours " "for delegated\n" " proxy on myproxy server)\n" " vomsACvalidityPeriod=time (e.g. 43200 or 12h or 12H; if not specified, the " "default\n" " is the minimum value of 12 hours and validityPeriod)\n" " myproxyvalidityPeriod=time (lifetime of proxies delegated by myproxy " "server,\n" " e.g. 43200 or 12h or 12H; if not specified, the default is the minimum " "value of\n" " 12 hours and validityPeriod (which is lifetime of the delegated proxy on " "myproxy server))\n" " proxyPolicy=policy content\n" " proxyPolicyFile=policy file\n" " keybits=number - length of the key to generate. Default is 2048 bits.\n" " Special value 'inherit' is to use key length of signing certificate.\n" " signingAlgorithm=name - signing algorithm to use for signing public key of " "proxy.\n" " Possible values are sha1, sha2 (alias for sha256), sha224, sha256, " "sha384, sha512\n" " and inherit (use algorithm of signing certificate). Default is inherit.\n" " With old systems, only sha1 is acceptable.\n" "\n" "Supported information item names are:\n" " subject - subject name of proxy certificate.\n" " identity - identity subject name of proxy certificate.\n" " issuer - issuer subject name of proxy certificate.\n" " ca - subject name of CA which issued initial certificate.\n" " path - file system path to file containing proxy.\n" " type - type of proxy certificate.\n" " validityStart - timestamp when proxy validity starts.\n" " validityEnd - timestamp when proxy validity ends.\n" " validityPeriod - duration of proxy validity in seconds.\n" " validityLeft - duration of proxy validity left in seconds.\n" " vomsVO - VO name represented by VOMS attribute\n" " vomsSubject - subject of certificate for which VOMS attribute is issued\n" " vomsIssuer - subject of service which issued VOMS certificate\n" " vomsACvalidityStart - timestamp when VOMS attribute validity starts.\n" " vomsACvalidityEnd - timestamp when VOMS attribute validity ends.\n" " vomsACvalidityPeriod - duration of VOMS attribute validity in seconds.\n" " vomsACvalidityLeft - duration of VOMS attribute validity left in seconds.\n" " proxyPolicy\n" " keybits - size of proxy certificate key in bits.\n" " signingAlgorithm - algorithm used to sign proxy certificate.\n" "Items are printed in requested order and are separated by newline.\n" "If item has multiple values they are printed in same line separated by |.\n" "\n" "Supported password destinations are:\n" " key - for reading private key\n" " myproxy - for accessing credentials at MyProxy service\n" " myproxynew - for creating credentials at MyProxy service\n" " all - for any purspose.\n" "\n" "Supported password sources are:\n" " quoted string (\"password\") - explicitly specified password\n" " int - interactively request password from console\n" " stdin - read password from standard input delimited by newline\n" " file:filename - read password from file named filename\n" " stream:# - read password from input stream number #.\n" " Currently only 0 (standard input) is supported.\n" msgstr "" #: src/clients/credentials/arcproxy.cpp:308 #, fuzzy msgid "path to the proxy file" msgstr "Pfad zu Proxy-Datei" #: src/clients/credentials/arcproxy.cpp:309 #: src/clients/credentials/arcproxy.cpp:313 #: src/clients/credentials/arcproxy.cpp:317 #: src/clients/credentials/arcproxy.cpp:321 #: src/clients/credentials/arcproxy.cpp:325 #: src/clients/credentials/arcproxy.cpp:329 src/clients/data/arccp.cpp:584 msgid "path" msgstr "Pfad" #: src/clients/credentials/arcproxy.cpp:312 msgid "" "path to the certificate file, it can be either PEM, DER, or PKCS12 formatted" msgstr "" #: src/clients/credentials/arcproxy.cpp:316 msgid "" "path to the private key file, if the certificate is in PKCS12 format, then " "no need to give private key" msgstr "" #: src/clients/credentials/arcproxy.cpp:320 #, fuzzy msgid "" "path to the trusted certificate directory, only needed for the VOMS client " "functionality" msgstr "" "путь к каталогу Ñ Ð´Ð¾Ð²ÐµÑ€Ñемыми Ñертификатами, иÑпользуетÑÑ\n" " только клиентом VOMS" #: src/clients/credentials/arcproxy.cpp:324 #, fuzzy msgid "" "path to the top directory of VOMS *.lsc files, only needed for the VOMS " "client functionality" msgstr "" "путь к каталогу Ñ Ð´Ð¾Ð²ÐµÑ€Ñемыми Ñертификатами, иÑпользуетÑÑ\n" " только клиентом VOMS" #: src/clients/credentials/arcproxy.cpp:328 #, fuzzy msgid "path to the VOMS server configuration file" msgstr "Pfad zu VOMS Server Konfigurationsdatei" #: src/clients/credentials/arcproxy.cpp:332 #, fuzzy msgid "" "voms<:command>. Specify VOMS server (More than one VOMS server \n" " can be specified like this: --voms VOa:command1 --voms VOb:" "command2). \n" " :command is optional, and is used to ask for specific " "attributes(e.g: roles)\n" " command options are:\n" " all --- put all of this DN's attributes into AC; \n" " list ---list all of the DN's attribute, will not create AC " "extension; \n" " /Role=yourRole --- specify the role, if this DN \n" " has such a role, the role will be put into " "AC; \n" " /voname/groupname/Role=yourRole --- specify the VO, group and " "role; if this DN \n" " has such a role, the role will be put into " "AC. \n" " If this option is not specified values from configuration " "files are used.\n" " To avoid anything to be used specify -S with empty value.\n" msgstr "" "voms<:инÑтрукциÑ>. ОпиÑание Ñервера VOMS (неÑколько Ñерверов задаютÑÑ\n" " Ñледующим образом: --voms VOa:инÑтрукциÑ1 --voms VOb:" "инÑтрукциÑ2).\n" " <:инÑтрукциÑ> не обÑзательна и Ñлужит Ð´Ð»Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа " "дополнительных\n" " атрибутов (например, ролей)\n" " ИнÑтрукции:\n" " all --- добавить вÑе атрибуты, доÑтупные данному " "пользователю;\n" " list --- перечиÑлить вÑе атрибуты, доÑтупные данному " "пользователю,\n" " без ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ€Ð°ÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ AC; \n" " /Role=вашаРоль --- указать желаемую роль; еÑли данный " "пользователь\n" " может играть такую роль, она будет " "добавлена;\n" " /voname/groupname/Role=вашаРоль --- указать ВО, группу и роль; " "еÑли\n" " данный пользователь может играть такую " "роль, она\n" " будет добавлена.\n" #: src/clients/credentials/arcproxy.cpp:348 msgid "" "group<:role>. Specify ordering of attributes \n" " Example: --order /knowarc.eu/coredev:Developer,/knowarc.eu/" "testers:Tester \n" " or: --order /knowarc.eu/coredev:Developer --order /knowarc.eu/" "testers:Tester \n" " Note that it does not make sense to specify the order if you have two or " "more different VOMS servers specified" msgstr "" #: src/clients/credentials/arcproxy.cpp:355 msgid "use GSI communication protocol for contacting VOMS services" msgstr "" #: src/clients/credentials/arcproxy.cpp:358 msgid "" "use HTTP communication protocol for contacting VOMS services that provide " "RESTful access \n" " Note for RESTful access, 'list' command and multiple VOMS " "server are not supported\n" msgstr "" #: src/clients/credentials/arcproxy.cpp:362 msgid "" "use old communication protocol for contacting VOMS services instead of " "RESTful access\n" msgstr "" #: src/clients/credentials/arcproxy.cpp:365 msgid "" "this option is not functional (old GSI proxies are not supported anymore)" msgstr "" #: src/clients/credentials/arcproxy.cpp:368 msgid "print all information about this proxy." msgstr "" #: src/clients/credentials/arcproxy.cpp:371 msgid "print selected information about this proxy." msgstr "" #: src/clients/credentials/arcproxy.cpp:374 msgid "remove proxy" msgstr "" #: src/clients/credentials/arcproxy.cpp:377 msgid "" "username to MyProxy server (if missing subject of user certificate is used)" msgstr "" #: src/clients/credentials/arcproxy.cpp:382 msgid "" "don't prompt for a credential passphrase, when retrieve a \n" " credential from on MyProxy server. \n" " The precondition of this choice is the credential is PUT onto\n" " the MyProxy server without a passphrase by using -R (--" "retrievable_by_cert) \n" " option when being PUTing onto Myproxy server. \n" " This option is specific for the GET command when contacting " "Myproxy server." msgstr "" #: src/clients/credentials/arcproxy.cpp:393 msgid "" "Allow specified entity to retrieve credential without passphrase.\n" " This option is specific for the PUT command when contacting " "Myproxy server." msgstr "" #: src/clients/credentials/arcproxy.cpp:399 #, fuzzy msgid "hostname[:port] of MyProxy server" msgstr "Nutzername bei myproxy Server" #: src/clients/credentials/arcproxy.cpp:404 msgid "" "command to MyProxy server. The command can be PUT, GET, INFO, NEWPASS or " "DESTROY.\n" " PUT -- put a delegated credentials to the MyProxy server; \n" " GET -- get a delegated credentials from the MyProxy server; \n" " INFO -- get and present information about credentials stored " "at the MyProxy server; \n" " NEWPASS -- change password protecting credentials stored at " "the MyProxy server; \n" " DESTROY -- wipe off credentials stored at the MyProxy " "server; \n" " Local credentials (certificate and key) are not necessary " "except in case of PUT. \n" " MyProxy functionality can be used together with VOMS " "functionality.\n" " --voms and --vomses can be used for Get command if VOMS " "attributes\n" " is required to be included in the proxy.\n" msgstr "" #: src/clients/credentials/arcproxy.cpp:419 msgid "" "use NSS credential database in default Mozilla profiles, \n" " including Firefox, Seamonkey and Thunderbird.\n" msgstr "" #: src/clients/credentials/arcproxy.cpp:424 #, fuzzy msgid "proxy constraints" msgstr "Proxy constraints" #: src/clients/credentials/arcproxy.cpp:428 msgid "password destination=password source" msgstr "" #: src/clients/credentials/arcproxy.cpp:452 msgid "" "RESTful and old VOMS communication protocols can't be requested " "simultaneously." msgstr "" #: src/clients/credentials/arcproxy.cpp:482 #: src/clients/credentials/arcproxy.cpp:1187 #, fuzzy msgid "Failed configuration initialization." msgstr "Fehler bei Initialisierung der Konfiguration" #: src/clients/credentials/arcproxy.cpp:511 msgid "" "Failed to find certificate and/or private key or files have improper " "permissions or ownership." msgstr "" #: src/clients/credentials/arcproxy.cpp:512 #: src/clients/credentials/arcproxy.cpp:524 msgid "You may try to increase verbosity to get more information." msgstr "" #: src/clients/credentials/arcproxy.cpp:520 #, fuzzy msgid "Failed to find CA certificates" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/clients/credentials/arcproxy.cpp:521 #, fuzzy msgid "" "Cannot find the CA certificates directory path, please set environment " "variable X509_CERT_DIR, or cacertificatesdirectory in a configuration file." msgstr "" "Kann den Pfad zum CA Zertifikat-Verzeichnis nicht ermitteln. Bitte setzen " "Sie die Umgebungsvariable X509_CERT_DIR oder den Eintrag zu " "cacertificatesdirectory in der Konfigurationsdatei" #: src/clients/credentials/arcproxy.cpp:525 msgid "" "The CA certificates directory is required for contacting VOMS and MyProxy " "servers." msgstr "" #: src/clients/credentials/arcproxy.cpp:537 msgid "" "$X509_VOMS_FILE, and $X509_VOMSES are not set;\n" "User has not specified the location for vomses information;\n" "There is also not vomses location information in user's configuration file;\n" "Can not find vomses in default locations: ~/.arc/vomses, ~/.voms/vomses,\n" "$ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/grid-security/vomses, $PWD/" "vomses,\n" "/etc/vomses, /etc/grid-security/vomses, and the location at the " "corresponding sub-directory" msgstr "" #: src/clients/credentials/arcproxy.cpp:582 msgid "Wrong number of arguments!" msgstr "" #: src/clients/credentials/arcproxy.cpp:590 #: src/clients/credentials/arcproxy.cpp:614 #: src/clients/credentials/arcproxy.cpp:747 #, fuzzy msgid "" "Cannot find the path of the proxy file, please setup environment " "X509_USER_PROXY, or proxypath in a configuration file" msgstr "" "Kann den Pfad zum CA Zertifikat-Verzeichnis nicht ermitteln. Bitte setzen " "Sie die Umgebungsvariable X509_CERT_DIR oder den Eintrag zu " "cacertificatesdirectory in der Konfigurationsdatei" #: src/clients/credentials/arcproxy.cpp:597 #, fuzzy, c-format msgid "Cannot remove proxy file at %s" msgstr "Konnte Datei mit Job Beschreibung nicht öffnen: %s" #: src/clients/credentials/arcproxy.cpp:599 #, c-format msgid "Cannot remove proxy file at %s, because it's not there" msgstr "" #: src/clients/credentials/arcproxy.cpp:608 msgid "Bearer token is available. It is preferred for job submission." msgstr "" #: src/clients/credentials/arcproxy.cpp:620 #: src/clients/credentials/arcproxy.cpp:753 #, c-format msgid "" "Cannot find file at %s for getting the proxy. Please make sure this file " "exists." msgstr "" "Kann Datei nicht bei %s finden, um den Proxy zu erhalten. Bitte stellen Sie " "sicher, dass diese Datei existiert." #: src/clients/credentials/arcproxy.cpp:626 #: src/clients/credentials/arcproxy.cpp:759 #, fuzzy, c-format msgid "Cannot process proxy file at %s." msgstr "Konnte Datei mit Job Beschreibung nicht öffnen: %s" #: src/clients/credentials/arcproxy.cpp:629 #, c-format msgid "Subject: %s" msgstr "Subjekt: %s" #: src/clients/credentials/arcproxy.cpp:630 #, fuzzy, c-format msgid "Issuer: %s" msgstr "Anfrage: %s" #: src/clients/credentials/arcproxy.cpp:631 #, c-format msgid "Identity: %s" msgstr "Identität: %s" #: src/clients/credentials/arcproxy.cpp:633 #, fuzzy msgid "Time left for proxy: Proxy expired" msgstr "Zeit verbleibend für Proxy: Proxy abgelaufen" #: src/clients/credentials/arcproxy.cpp:635 #, fuzzy msgid "Time left for proxy: Proxy not valid yet" msgstr "Zeit verbleibend für Proxy: Proxy ungültig" #: src/clients/credentials/arcproxy.cpp:637 #, fuzzy, c-format msgid "Time left for proxy: %s" msgstr "Fehler bei Lesen von Proxy-Datei: %s" #: src/clients/credentials/arcproxy.cpp:638 #, c-format msgid "Proxy path: %s" msgstr "Proxy Pfad: %s" #: src/clients/credentials/arcproxy.cpp:639 #, c-format msgid "Proxy type: %s" msgstr "Proxy Typ: %s" #: src/clients/credentials/arcproxy.cpp:640 #, fuzzy, c-format msgid "Proxy key length: %i" msgstr "Proxy Pfad: %s" #: src/clients/credentials/arcproxy.cpp:641 #, fuzzy, c-format msgid "Proxy signature: %s" msgstr "Nach Signatur: %s" #: src/clients/credentials/arcproxy.cpp:650 #, fuzzy msgid "AC extension information for VO " msgstr "Fehler bei Bezug von Information für Job: %s" #: src/clients/credentials/arcproxy.cpp:653 msgid "Error detected while parsing this AC" msgstr "" #: src/clients/credentials/arcproxy.cpp:666 msgid "AC is invalid: " msgstr "" #: src/clients/credentials/arcproxy.cpp:696 #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:222 #, c-format msgid "Malformed VOMS AC attribute %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:727 #, fuzzy msgid "Time left for AC: AC is not valid yet" msgstr "Zeit verbleibend für Proxy: Proxy ungültig" #: src/clients/credentials/arcproxy.cpp:729 #, fuzzy msgid "Time left for AC: AC has expired" msgstr "Zeit verbleibend für Proxy: Proxy abgelaufen" #: src/clients/credentials/arcproxy.cpp:731 #, fuzzy, c-format msgid "Time left for AC: %s" msgstr "Fehler bei Lesen von Proxy-Datei: %s" #: src/clients/credentials/arcproxy.cpp:838 #, c-format msgid "Information item '%s' is not known" msgstr "" #: src/clients/credentials/arcproxy.cpp:850 #, fuzzy msgid "" "Cannot find the user certificate path, please setup environment " "X509_USER_CERT, or certificatepath in a configuration file" msgstr "" "Kann den Pfad zum CA Zertifikat-Verzeichnis nicht ermitteln. Bitte setzen " "Sie die Umgebungsvariable X509_CERT_DIR oder den Eintrag zu " "cacertificatesdirectory in der Konfigurationsdatei" #: src/clients/credentials/arcproxy.cpp:854 #, fuzzy msgid "" "Cannot find the user private key path, please setup environment " "X509_USER_KEY, or keypath in a configuration file" msgstr "" "Kann den Pfad zum CA Zertifikat-Verzeichnis nicht ermitteln. Bitte setzen " "Sie die Umgebungsvariable X509_CERT_DIR oder den Eintrag zu " "cacertificatesdirectory in der Konfigurationsdatei" #: src/clients/credentials/arcproxy.cpp:878 #, c-format msgid "" "Cannot parse password source expression %s it must be of type=source format" msgstr "" #: src/clients/credentials/arcproxy.cpp:895 #, c-format msgid "" "Cannot parse password type %s. Currently supported values are " "'key','myproxy','myproxynew' and 'all'." msgstr "" #: src/clients/credentials/arcproxy.cpp:910 #, c-format msgid "" "Cannot parse password source %s it must be of source_type or source_type:" "data format. Supported source types are int,stdin,stream,file." msgstr "" #: src/clients/credentials/arcproxy.cpp:924 msgid "Only standard input is currently supported for password source." msgstr "" #: src/clients/credentials/arcproxy.cpp:929 #, c-format msgid "" "Cannot parse password source type %s. Supported source types are int,stdin," "stream,file." msgstr "" #: src/clients/credentials/arcproxy.cpp:968 msgid "The start, end and period can't be set simultaneously" msgstr "" #: src/clients/credentials/arcproxy.cpp:974 #, c-format msgid "The start time that you set: %s can't be recognized." msgstr "" #: src/clients/credentials/arcproxy.cpp:981 #, c-format msgid "The period that you set: %s can't be recognized." msgstr "" #: src/clients/credentials/arcproxy.cpp:988 #, c-format msgid "The end time that you set: %s can't be recognized." msgstr "" #: src/clients/credentials/arcproxy.cpp:997 #, c-format msgid "The end time that you set: %s is before start time: %s." msgstr "" #: src/clients/credentials/arcproxy.cpp:1008 #, c-format msgid "WARNING: The start time that you set: %s is before current time: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:1011 #, c-format msgid "WARNING: The end time that you set: %s is before current time: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:1021 #, c-format msgid "The VOMS AC period that you set: %s can't be recognized." msgstr "" #: src/clients/credentials/arcproxy.cpp:1039 #, c-format msgid "The MyProxy period that you set: %s can't be recognized." msgstr "" #: src/clients/credentials/arcproxy.cpp:1054 #, c-format msgid "The keybits constraint is wrong: %s." msgstr "" #: src/clients/credentials/arcproxy.cpp:1068 #: src/hed/libs/credential/ARCProxyUtil.cpp:1271 msgid "The NSS database can not be detected in the Firefox profile" msgstr "" #: src/clients/credentials/arcproxy.cpp:1077 #: src/hed/libs/credential/ARCProxyUtil.cpp:1279 #, c-format msgid "" "There are %d NSS base directories where the certificate, key, and module " "databases live" msgstr "" #: src/clients/credentials/arcproxy.cpp:1079 #: src/hed/libs/credential/ARCProxyUtil.cpp:1283 #, c-format msgid "Number %d is: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:1081 #: src/hed/libs/credential/ARCProxyUtil.cpp:1285 #, c-format msgid "Please choose the NSS database you would like to use (1-%d): " msgstr "" #: src/clients/credentials/arcproxy.cpp:1097 #: src/hed/libs/credential/ARCProxyUtil.cpp:1297 #, c-format msgid "NSS database to be accessed: %s\n" msgstr "" #: src/clients/credentials/arcproxy.cpp:1168 #: src/hed/libs/credential/ARCProxyUtil.cpp:1471 #, fuzzy, c-format msgid "Certificate to use is: %s" msgstr "Verbindung zu %s schlug fehl: %s" #: src/clients/credentials/arcproxy.cpp:1216 #: src/clients/credentials/arcproxy.cpp:1330 #: src/hed/libs/credential/ARCProxyUtil.cpp:1528 msgid "Proxy generation succeeded" msgstr "Proxy erfolgreich angelegt" #: src/clients/credentials/arcproxy.cpp:1217 #: src/clients/credentials/arcproxy.cpp:1331 #: src/hed/libs/credential/ARCProxyUtil.cpp:1529 #, c-format msgid "Your proxy is valid until: %s" msgstr "Ihr Proxy ist gültig bis: %s" #: src/clients/credentials/arcproxy.cpp:1236 msgid "" "The old GSI proxies are not supported anymore. Please do not use -O/--old " "option." msgstr "" #: src/clients/credentials/arcproxy.cpp:1255 src/hed/mcc/tls/MCCTLS.cpp:163 #: src/hed/mcc/tls/MCCTLS.cpp:196 src/hed/mcc/tls/MCCTLS.cpp:222 msgid "VOMS attribute parsing failed" msgstr "Konnte VOMS Attribut nicht herauslesen" #: src/clients/credentials/arcproxy.cpp:1257 msgid "Myproxy server did not return proxy with VOMS AC included" msgstr "" #: src/clients/credentials/arcproxy.cpp:1278 #: src/hed/libs/credential/ARCProxyUtil.cpp:337 msgid "Proxy generation failed: No valid certificate found." msgstr "" #: src/clients/credentials/arcproxy.cpp:1283 #: src/hed/libs/credential/ARCProxyUtil.cpp:343 msgid "Proxy generation failed: No valid private key found." msgstr "" #: src/clients/credentials/arcproxy.cpp:1287 #: src/hed/libs/credential/ARCProxyUtil.cpp:169 #, c-format msgid "Your identity: %s" msgstr "Ihre Identität: %s" #: src/clients/credentials/arcproxy.cpp:1289 #: src/hed/libs/credential/ARCProxyUtil.cpp:350 msgid "Proxy generation failed: Certificate has expired." msgstr "" #: src/clients/credentials/arcproxy.cpp:1293 #: src/hed/libs/credential/ARCProxyUtil.cpp:355 msgid "Proxy generation failed: Certificate is not valid yet." msgstr "" #: src/clients/credentials/arcproxy.cpp:1304 #, fuzzy msgid "Proxy generation failed: Failed to create temporary file." msgstr "Fehler bei Erstellen von Info-Datei %s: %s" #: src/clients/credentials/arcproxy.cpp:1312 msgid "Proxy generation failed: Failed to retrieve VOMS information." msgstr "" #: src/clients/credentials/arcproxy_myproxy.cpp:100 #: src/hed/libs/credential/ARCProxyUtil.cpp:838 msgid "Succeeded to get info from MyProxy server" msgstr "" #: src/clients/credentials/arcproxy_myproxy.cpp:144 #: src/hed/libs/credential/ARCProxyUtil.cpp:894 msgid "Succeeded to change password on MyProxy server" msgstr "" #: src/clients/credentials/arcproxy_myproxy.cpp:185 #: src/hed/libs/credential/ARCProxyUtil.cpp:943 msgid "Succeeded to destroy credential on MyProxy server" msgstr "" #: src/clients/credentials/arcproxy_myproxy.cpp:265 #: src/hed/libs/credential/ARCProxyUtil.cpp:1032 #, fuzzy, c-format msgid "Succeeded to get a proxy in %s from MyProxy server %s" msgstr "Zurückerhaltene Nachricht von myproxy Server: %s" #: src/clients/credentials/arcproxy_myproxy.cpp:318 #: src/hed/libs/credential/ARCProxyUtil.cpp:1091 msgid "Succeeded to put a proxy onto MyProxy server" msgstr "" #: src/clients/credentials/arcproxy_proxy.cpp:93 #: src/hed/libs/credential/ARCProxyUtil.cpp:397 #: src/hed/libs/credential/ARCProxyUtil.cpp:1378 msgid "Failed to add VOMS AC extension. Your proxy may be incomplete." msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:63 #, fuzzy msgid "" "Failed to process VOMS configuration or no suitable configuration lines " "found." msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #: src/clients/credentials/arcproxy_voms.cpp:75 #, fuzzy, c-format msgid "Failed to parse requested VOMS lifetime: %s" msgstr "Fehler bei Lesen von privater Schlüssel-Datei: %s" #: src/clients/credentials/arcproxy_voms.cpp:93 #: src/hed/libs/credential/ARCProxyUtil.cpp:634 #, c-format msgid "Cannot get VOMS server address information from vomses line: \"%s\"" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:97 #: src/clients/credentials/arcproxy_voms.cpp:99 #: src/hed/libs/credential/ARCProxyUtil.cpp:644 #: src/hed/libs/credential/ARCProxyUtil.cpp:646 #, c-format msgid "Contacting VOMS server (named %s): %s on port: %s" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:105 #, c-format msgid "Failed to parse requested VOMS server port number: %s" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:122 msgid "List functionality is not supported for RESTful VOMS interface" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:132 #: src/clients/credentials/arcproxy_voms.cpp:188 #, c-format msgid "" "The VOMS server with the information:\n" "\t%s\n" "can not be reached, please make sure it is available." msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:133 #: src/clients/credentials/arcproxy_voms.cpp:138 #: src/clients/credentials/arcproxy_voms.cpp:189 #: src/clients/credentials/arcproxy_voms.cpp:194 #, c-format msgid "" "Collected error is:\n" "\t%s" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:137 #: src/clients/credentials/arcproxy_voms.cpp:193 #, fuzzy, c-format msgid "No valid response from VOMS server: %s" msgstr "Frühe Antwort vom Server" #: src/clients/credentials/arcproxy_voms.cpp:155 msgid "List functionality is not supported for legacy VOMS interface" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:167 #, fuzzy, c-format msgid "Failed to parse VOMS command: %s" msgstr "Fehler bei Anlegen von GSI Context: %s" #: src/clients/credentials/arcproxy_voms.cpp:204 #, c-format msgid "" "There are %d servers with the same name: %s in your vomses file, but none of " "them can be reached, or can return a valid message." msgstr "" #: src/clients/data/arccp.cpp:77 src/clients/data/arccp.cpp:330 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:509 #, c-format msgid "Current transfer FAILED: %s" msgstr "Aktueller Transfer SCHLUG FEHL: %s" #: src/clients/data/arccp.cpp:79 src/clients/data/arccp.cpp:135 #: src/clients/data/arccp.cpp:332 src/clients/data/arcls.cpp:225 #: src/clients/data/arcmkdir.cpp:73 src/clients/data/arcrename.cpp:89 #: src/clients/data/arcrm.cpp:95 msgid "This seems like a temporary error, please try again later" msgstr "" "Dies scheint ein vorübergehender Fehler zu sein, bitte später nochmal " "probieren" #: src/clients/data/arccp.cpp:87 src/clients/data/arccp.cpp:96 #, fuzzy, c-format msgid "Unable to copy %s" msgstr "Konnter Broker %s nicht laden" #: src/clients/data/arccp.cpp:88 src/clients/data/arccp.cpp:97 #: src/clients/data/arcls.cpp:150 src/clients/data/arcls.cpp:159 #: src/clients/data/arcmkdir.cpp:55 src/clients/data/arcmkdir.cpp:64 #: src/clients/data/arcrename.cpp:67 src/clients/data/arcrename.cpp:76 #: src/clients/data/arcrm.cpp:68 src/clients/data/arcrm.cpp:80 msgid "Invalid credentials, please check proxy and/or CA certificates" msgstr "" #: src/clients/data/arccp.cpp:94 src/clients/data/arcls.cpp:156 #: src/clients/data/arcmkdir.cpp:61 src/clients/data/arcrename.cpp:73 #: src/clients/data/arcrm.cpp:77 #, fuzzy msgid "Proxy expired" msgstr "Proxy store:" #: src/clients/data/arccp.cpp:112 src/clients/data/arccp.cpp:116 #: src/clients/data/arccp.cpp:149 src/clients/data/arccp.cpp:153 #: src/clients/data/arccp.cpp:358 src/clients/data/arccp.cpp:363 #: src/clients/data/arcls.cpp:123 src/clients/data/arcmkdir.cpp:28 #: src/clients/data/arcrename.cpp:29 src/clients/data/arcrename.cpp:33 #: src/clients/data/arcrm.cpp:36 #, c-format msgid "Invalid URL: %s" msgstr "Ungültige URL: %s" #: src/clients/data/arccp.cpp:128 msgid "Third party transfer is not supported for these endpoints" msgstr "" #: src/clients/data/arccp.cpp:130 msgid "" "Protocol(s) not supported - please check that the relevant gfal2\n" " plugins are installed (gfal2-plugin-* packages)" msgstr "" #: src/clients/data/arccp.cpp:133 #, c-format msgid "Transfer FAILED: %s" msgstr "Transfer FEHLER: %s" #: src/clients/data/arccp.cpp:161 src/clients/data/arccp.cpp:187 #: src/clients/data/arccp.cpp:374 src/clients/data/arccp.cpp:402 #, c-format msgid "Can't read list of sources from file %s" msgstr "" #: src/clients/data/arccp.cpp:166 src/clients/data/arccp.cpp:202 #: src/clients/data/arccp.cpp:379 src/clients/data/arccp.cpp:418 #, c-format msgid "Can't read list of destinations from file %s" msgstr "" #: src/clients/data/arccp.cpp:171 src/clients/data/arccp.cpp:385 msgid "Numbers of sources and destinations do not match" msgstr "" #: src/clients/data/arccp.cpp:216 msgid "Fileset registration is not supported yet" msgstr "" #: src/clients/data/arccp.cpp:222 src/clients/data/arccp.cpp:295 #: src/clients/data/arccp.cpp:456 #, c-format msgid "Unsupported source url: %s" msgstr "Nicht unterstützte URL für Quelle: %s" #: src/clients/data/arccp.cpp:226 src/clients/data/arccp.cpp:299 #, c-format msgid "Unsupported destination url: %s" msgstr "Nicht unterstützte URL für Ziel: %s" #: src/clients/data/arccp.cpp:233 msgid "" "For registration source must be ordinary URL and destination must be " "indexing service" msgstr "" #: src/clients/data/arccp.cpp:243 #, fuzzy, c-format msgid "Could not obtain information about source: %s" msgstr "Fehler bei Bezug von Information für Job: %s" #: src/clients/data/arccp.cpp:250 msgid "" "Metadata of source does not match existing destination. Use the --force " "option to override this." msgstr "" #: src/clients/data/arccp.cpp:262 msgid "Failed to accept new file/destination" msgstr "" #: src/clients/data/arccp.cpp:268 src/clients/data/arccp.cpp:274 #, fuzzy, c-format msgid "Failed to register new file/destination: %s" msgstr "Fehler bei Schreiben zu Datein %s: %s" #: src/clients/data/arccp.cpp:436 msgid "Fileset copy to single object is not supported yet" msgstr "" #: src/clients/data/arccp.cpp:446 msgid "Can't extract object's name from source url" msgstr "" #: src/clients/data/arccp.cpp:465 #, fuzzy, c-format msgid "%s. Cannot copy fileset" msgstr "Konnte Datei mit Job Beschreibung nicht öffnen: %s" #: src/clients/data/arccp.cpp:475 src/hed/libs/compute/ExecutionTarget.cpp:256 #: src/hed/libs/compute/ExecutionTarget.cpp:328 #, c-format msgid "Name: %s" msgstr "Name %s" #: src/clients/data/arccp.cpp:478 #, c-format msgid "Source: %s" msgstr "Quelle: %s" #: src/clients/data/arccp.cpp:479 #, c-format msgid "Destination: %s" msgstr "Ziel: %s" #: src/clients/data/arccp.cpp:485 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:516 msgid "Current transfer complete" msgstr "Aktueller Transfer vollständig" #: src/clients/data/arccp.cpp:488 msgid "Some transfers failed" msgstr "Einige Transfers schlugen fehl" #: src/clients/data/arccp.cpp:498 #, c-format msgid "Directory: %s" msgstr "Verzeichnis: %s" #: src/clients/data/arccp.cpp:518 msgid "Transfer complete" msgstr "Transfer vollständig" #: src/clients/data/arccp.cpp:537 msgid "source destination" msgstr "Quelle Ziel" #: src/clients/data/arccp.cpp:538 msgid "" "The arccp command copies files to, from and between grid storage elements." msgstr "" "Mit arccp werden Dateien zu, von und zwischen grid storage Elementen kopiert." #: src/clients/data/arccp.cpp:543 msgid "" "use passive transfer (off by default if secure is on, on by default if " "secure is not requested)" msgstr "" #: src/clients/data/arccp.cpp:549 msgid "do not try to force passive transfer" msgstr "versuche nicht, passiven Transfer zu erzwigen" #: src/clients/data/arccp.cpp:554 msgid "" "if the destination is an indexing service and not the same as the source and " "the destination is already registered, then the copy is normally not done. " "However, if this option is specified the source is assumed to be a replica " "of the destination created in an uncontrolled way and the copy is done like " "in case of replication. Using this option also skips validation of completed " "transfers." msgstr "" #: src/clients/data/arccp.cpp:567 msgid "show progress indicator" msgstr "zeige Fortschrittsanzeige" #: src/clients/data/arccp.cpp:572 #, fuzzy msgid "" "do not transfer, but register source into destination. destination must be a " "meta-url." msgstr "" "transferiere Datei nicht, registriere sie nur - Zeil muss eine nicht-" "existierende Meta-URL sein" #: src/clients/data/arccp.cpp:578 msgid "use secure transfer (insecure by default)" msgstr "Nutze sicheren Transfer (unsicher ist Voreinstellung)" #: src/clients/data/arccp.cpp:583 msgid "path to local cache (use to put file into cache)" msgstr "" #: src/clients/data/arccp.cpp:588 src/clients/data/arcls.cpp:301 #, fuzzy msgid "operate recursively" msgstr "arbeite rekursiv bis zu einer festgelegten Tiefe" #: src/clients/data/arccp.cpp:593 src/clients/data/arcls.cpp:306 msgid "operate recursively up to specified level" msgstr "arbeite rekursiv bis zu einer festgelegten Tiefe" #: src/clients/data/arccp.cpp:594 src/clients/data/arcls.cpp:307 msgid "level" msgstr "Tiefe" #: src/clients/data/arccp.cpp:598 msgid "number of retries before failing file transfer" msgstr "Anzahl von Wiederholungen bis zu einem Abbruch der Dateiübertragung" #: src/clients/data/arccp.cpp:599 msgid "number" msgstr "Nummer" #: src/clients/data/arccp.cpp:603 msgid "" "physical location to write to when destination is an indexing service. Must " "be specified for indexing services which do not automatically generate " "physical locations. Can be specified multiple times - locations will be " "tried in order until one succeeds." msgstr "" #: src/clients/data/arccp.cpp:611 msgid "" "perform third party transfer, where the destination pulls from the source " "(only available with GFAL plugin)" msgstr "" #: src/clients/data/arccp.cpp:617 src/clients/data/arcls.cpp:323 #: src/clients/data/arcmkdir.cpp:101 src/clients/data/arcrename.cpp:112 #: src/clients/data/arcrm.cpp:127 msgid "list the available plugins (protocols supported)" msgstr "" #: src/clients/data/arccp.cpp:656 src/clients/data/arcls.cpp:363 #: src/clients/data/arcmkdir.cpp:141 src/clients/data/arcrename.cpp:152 #: src/clients/data/arcrm.cpp:168 msgid "Protocol plugins available:" msgstr "" #: src/clients/data/arccp.cpp:681 src/clients/data/arcls.cpp:388 #: src/clients/data/arcmkdir.cpp:165 src/clients/data/arcrename.cpp:175 #: src/clients/data/arcrm.cpp:193 msgid "Wrong number of parameters specified" msgstr "Falsche Anzahl an Parametern übertragen" #: src/clients/data/arccp.cpp:686 msgid "Options 'p' and 'n' can't be used simultaneously" msgstr "" #: src/clients/data/arcls.cpp:129 src/clients/data/arcmkdir.cpp:34 #: src/clients/data/arcrm.cpp:43 #, c-format msgid "Can't read list of locations from file %s" msgstr "" #: src/clients/data/arcls.cpp:144 src/clients/data/arcmkdir.cpp:49 #: src/clients/data/arcrename.cpp:61 #, fuzzy msgid "Unsupported URL given" msgstr "Nicht-unterstützte URL angegeben" #: src/clients/data/arcls.cpp:149 src/clients/data/arcls.cpp:158 #, fuzzy, c-format msgid "Unable to list content of %s" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/clients/data/arcls.cpp:228 msgid "Warning: Failed listing files but some information is obtained" msgstr "" #: src/clients/data/arcls.cpp:282 src/clients/data/arcmkdir.cpp:90 msgid "url" msgstr "URL" #: src/clients/data/arcls.cpp:283 msgid "" "The arcls command is used for listing files in grid storage elements and " "file\n" "index catalogues." msgstr "" "Mit arcls werden Verzeichniss auf grid storage Elementen und Datei Index " "Katalogen angegeben" #: src/clients/data/arcls.cpp:292 msgid "show URLs of file locations" msgstr "zeige URLs von Datei-Lokalisationen" #: src/clients/data/arcls.cpp:296 msgid "display all available metadata" msgstr "zeige alle verfügbare Metadaten" #: src/clients/data/arcls.cpp:310 msgid "" "show only description of requested object, do not list content of directories" msgstr "" #: src/clients/data/arcls.cpp:314 msgid "treat requested object as directory and always try to list content" msgstr "" #: src/clients/data/arcls.cpp:318 msgid "check readability of object, does not show any information about object" msgstr "" #: src/clients/data/arcls.cpp:393 msgid "Incompatible options --nolist and --forcelist requested" msgstr "" #: src/clients/data/arcls.cpp:398 msgid "Requesting recursion and --nolist has no sense" msgstr "" #: src/clients/data/arcmkdir.cpp:54 src/clients/data/arcmkdir.cpp:63 #, fuzzy, c-format msgid "Unable to create directory %s" msgstr "Fehler bei Anlegen/Finden von Verzeichnis %s, (%d)" #: src/clients/data/arcmkdir.cpp:91 #, fuzzy msgid "" "The arcmkdir command creates directories on grid storage elements and " "catalogs." msgstr "" "Mit arcls werden Verzeichniss auf grid storage Elementen und Datei Index " "Katalogen angegeben" #: src/clients/data/arcmkdir.cpp:96 msgid "make parent directories as needed" msgstr "" #: src/clients/data/arcrename.cpp:41 msgid "Both URLs must have the same protocol, host and port" msgstr "" #: src/clients/data/arcrename.cpp:51 #, fuzzy msgid "Cannot rename to or from root directory" msgstr "Konnte classname für Policy nicht von Konfiguration parsen" #: src/clients/data/arcrename.cpp:55 #, fuzzy msgid "Cannot rename to the same URL" msgstr "Kann doc Argument nicht anlegen" #: src/clients/data/arcrename.cpp:66 src/clients/data/arcrename.cpp:75 #, fuzzy, c-format msgid "Unable to rename %s" msgstr "Fehler bei Erstellen von GUID in RLS: %s" #: src/clients/data/arcrename.cpp:106 msgid "old_url new_url" msgstr "" #: src/clients/data/arcrename.cpp:107 #, fuzzy msgid "The arcrename command renames files on grid storage elements." msgstr "" "Mit arccp werden Dateien zu, von und zwischen grid storage Elementen kopiert." #: src/clients/data/arcrm.cpp:58 #, fuzzy, c-format msgid "Unsupported URL given: %s" msgstr "Nicht-unterstützte URL angegeben" #: src/clients/data/arcrm.cpp:67 src/clients/data/arcrm.cpp:79 #, fuzzy, c-format msgid "Unable to remove file %s" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/clients/data/arcrm.cpp:115 #, fuzzy msgid "url [url ...]" msgstr "[Cluster ...]" #: src/clients/data/arcrm.cpp:116 #, fuzzy msgid "The arcrm command deletes files on grid storage elements." msgstr "" "Mit arccp werden Dateien zu, von und zwischen grid storage Elementen kopiert." #: src/clients/data/arcrm.cpp:121 msgid "" "remove logical file name registration even if not all physical instances " "were removed" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:53 msgid "Cannot initialize ARCHERY domain name for query" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:60 #, fuzzy msgid "Cannot create resolver from /etc/resolv.conf" msgstr "Konnte classname für Policy nicht von Konfiguration parsen" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:68 msgid "Cannot query service endpoint TXT records from DNS" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:79 msgid "Cannot parse service endpoint TXT records." msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:124 #, c-format msgid "Wrong service record field \"%s\" found in the \"%s\"" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:129 #, c-format msgid "Malformed ARCHERY record found (endpoint url is not defined): %s" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:134 #, c-format msgid "Malformed ARCHERY record found (endpoint type is not defined): %s" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:138 #, fuzzy, c-format msgid "Found service endpoint %s (type %s)" msgstr "Fand %u execution services des index service %s" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:157 #, c-format msgid "" "Status for service endpoint \"%s\" is set to inactive in ARCHERY. Skipping." msgstr "" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:229 #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:161 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:149 #, c-format msgid "Job %s has no delegation associated. Can't renew such job." msgstr "" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:241 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:161 #, fuzzy, c-format msgid "Job %s failed to renew delegation %s." msgstr "Initiierung der Delegation fehlgeschlagen" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:313 #, fuzzy, c-format msgid "Failed to process jobs - wrong response: %u" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:314 #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:323 #, fuzzy, c-format msgid "Content: %s" msgstr "Anfrage: %s" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:317 #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:329 #, fuzzy, c-format msgid "Failed to process job: %s" msgstr "Löschen fehlgeschlagen von job: %s" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:327 #, fuzzy msgid "Failed to process jobs - failed to parse response" msgstr "Konnte Job Beschreibung nicht an Resourcen des Ziels anpassen: %s" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:340 #, fuzzy, c-format msgid "No response returned: %s" msgstr "Keine Antwort von %s" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:364 #, fuzzy, c-format msgid "Failed to process job: %s - %s %s" msgstr "Fehler bei Entfernen von hard link %s: %s" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:431 #, fuzzy, c-format msgid "Failed retrieving job description for job: %s" msgstr "Fehler beim Bezug der Job Beschreibung von Job: %s" #: src/hed/acc/ARCREST/JobListRetrieverPluginREST.cpp:29 msgid "Collecting Job (A-REX REST jobs) information." msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:50 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:84 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:124 #, fuzzy msgid "Failed to communicate to delegation endpoint." msgstr "Fehler bei der Initialisierung der delegation credentials" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:55 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:89 #, c-format msgid "Unexpected response code from delegation endpoint - %u" msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:57 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:91 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:242 #: src/hed/dmc/gridftp/Lister.cpp:223 src/hed/dmc/gridftp/Lister.cpp:243 #: src/hed/dmc/gridftp/Lister.cpp:468 src/hed/dmc/gridftp/Lister.cpp:475 #: src/hed/dmc/gridftp/Lister.cpp:497 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:163 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:196 #, c-format msgid "Response: %s" msgstr "Antwort: %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:62 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:96 msgid "Missing response from delegation endpoint." msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:71 #, fuzzy, c-format msgid "Unexpected delegation location from delegation endpoint - %s." msgstr "" "Kann delegation credential nicht erhalten: %s von delegation service: %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:128 #, c-format msgid "Unexpected response code from delegation endpoint: %u, %s." msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:177 #, fuzzy msgid "Unable to submit jobs. Failed to delegate credentials." msgstr "Fehler bei der Initialisierung der delegation credentials" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:192 #, fuzzy msgid "Failed to prepare job description" msgstr "Fehler beim Bezug der Job Beschreibung von Job: %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:201 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:87 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:401 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:116 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:262 #, fuzzy, c-format msgid "Unable to submit job. Job description is not valid in the %s format: %s" msgstr "Konnte Job Beschreibung nicht an Resourcen des Ziels anpassen: %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:208 #, fuzzy msgid "Unable to submit job. Failed to assign delegation to job description." msgstr "Submit: Fehler bei Senden von Job Beschreibung" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:231 #, fuzzy msgid "Failed to submit all jobs." msgstr "Konnte job nicht starten" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:241 #, fuzzy, c-format msgid "Failed to submit all jobs: %u %s" msgstr "Konnte job nicht starten" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:254 #, fuzzy, c-format msgid "Failed to submit all jobs: %s" msgstr "Konnte job nicht starten" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:274 #, fuzzy, c-format msgid "Failed to submit all jobs: %s %s" msgstr "Konnte job nicht starten" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:289 msgid "Failed uploading local input files" msgstr "Konnte lokale Inputdateien nicht hochladen" #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:27 msgid "Querying WSRF GLUE2 computing REST endpoint." msgstr "" #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:60 #, c-format msgid "CONTENT %u: %s" msgstr "" #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:64 #, fuzzy msgid "Response is not XML" msgstr "Antwort: %s" #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:69 #, c-format msgid "Parsed domains: %u" msgstr "" #: src/hed/acc/Broker/DescriptorsBroker.cpp:14 msgid "Sorting according to free slots in queue" msgstr "" #: src/hed/acc/Broker/DescriptorsBroker.cpp:15 msgid "Random sorting" msgstr "" #: src/hed/acc/Broker/DescriptorsBroker.cpp:16 msgid "Sorting according to specified benchmark (default \"specint2000\")" msgstr "" #: src/hed/acc/Broker/DescriptorsBroker.cpp:17 msgid "Sorting according to input data availability at target" msgstr "" #: src/hed/acc/Broker/DescriptorsBroker.cpp:18 msgid "Performs neither sorting nor matching" msgstr "" #: src/hed/acc/Broker/FastestQueueBrokerPlugin.cpp:24 #, c-format msgid "" "Target %s removed by FastestQueueBroker, doesn't report number of waiting " "jobs" msgstr "" "Ziel %s entfernt durch FastestQueueBroker, die Anzahl wartender Jobs wird " "nicht genannt" #: src/hed/acc/Broker/FastestQueueBrokerPlugin.cpp:27 #, c-format msgid "" "Target %s removed by FastestQueueBroker, doesn't report number of total slots" msgstr "" "Ziel %s entfernt durch FastestQueueBroker, die Anzahl vorhandener slots wird " "nicht genannt" #: src/hed/acc/Broker/FastestQueueBrokerPlugin.cpp:30 #, c-format msgid "" "Target %s removed by FastestQueueBroker, doesn't report number of free slots" msgstr "" "Ziel %s entfernt durch FastestQueueBroker, die Anzahl freier slots wird " "nicht genannt" #: src/hed/acc/EMIES/EMIESClient.cpp:81 #, fuzzy msgid "Creating an EMI ES client" msgstr "Anlegen eines CREAM client" #: src/hed/acc/EMIES/EMIESClient.cpp:85 #, fuzzy msgid "Unable to create SOAP client used by EMIESClient." msgstr "Konnte SOAP client nicht anlegen für CREAMClient." #: src/hed/acc/EMIES/EMIESClient.cpp:133 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:505 msgid "Initiating delegation procedure" msgstr "Initialisierung der Delegations-Prozedur" #: src/hed/acc/EMIES/EMIESClient.cpp:163 #, fuzzy msgid "Re-creating an EMI ES client" msgstr "Anlegen eines CREAM client" #: src/hed/acc/EMIES/EMIESClient.cpp:180 #, c-format msgid "Processing a %s request" msgstr "Verarbeite %s Anfrage" #: src/hed/acc/EMIES/EMIESClient.cpp:191 #, c-format msgid "%s request failed" msgstr "Anfrage %s schlug fehl" #: src/hed/acc/EMIES/EMIESClient.cpp:200 #, c-format msgid "No response from %s" msgstr "Keine Antwort von %s" #: src/hed/acc/EMIES/EMIESClient.cpp:209 #, c-format msgid "%s request to %s failed with response: %s" msgstr "%s Anfrage an %s schlug fehl mit Antwort %s" #: src/hed/acc/EMIES/EMIESClient.cpp:224 #, c-format msgid "XML response: %s" msgstr "XML Antwort: %s" #: src/hed/acc/EMIES/EMIESClient.cpp:234 #, fuzzy, c-format msgid "%s request to %s failed. Unexpected response: %s." msgstr "%s Anfrage an %s schlug fehl mit Antwort %s" #: src/hed/acc/EMIES/EMIESClient.cpp:248 src/hed/acc/EMIES/EMIESClient.cpp:355 #, fuzzy, c-format msgid "Creating and sending job submit request to %s" msgstr "Erstelle und sende submit Anfrage an %s" #: src/hed/acc/EMIES/EMIESClient.cpp:313 src/hed/acc/EMIES/EMIESClient.cpp:416 #, c-format msgid "Job description to be sent: %s" msgstr "Zu sendende Job-Beschreibung : %s" #: src/hed/acc/EMIES/EMIESClient.cpp:426 src/hed/acc/EMIES/EMIESClient.cpp:609 #: src/hed/acc/EMIES/EMIESClient.cpp:1098 #, fuzzy, c-format msgid "New limit for vector queries returned by EMI ES service: %d" msgstr "Kein Job identifier von BES service zurückerhalten" #: src/hed/acc/EMIES/EMIESClient.cpp:434 src/hed/acc/EMIES/EMIESClient.cpp:617 #: src/hed/acc/EMIES/EMIESClient.cpp:1106 #, c-format msgid "" "Error: Service returned a limit higher or equal to current limit (current: " "%d; returned: %d)" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:502 src/hed/acc/EMIES/EMIESClient.cpp:536 #: src/hed/acc/EMIES/EMIESClient.cpp:592 #, fuzzy, c-format msgid "Creating and sending job information query request to %s" msgstr "Erstelle und sende job information query request an %s" #: src/hed/acc/EMIES/EMIESClient.cpp:775 #, fuzzy, c-format msgid "Creating and sending service information request to %s" msgstr "Erstelle und sende service information query request an %s" #: src/hed/acc/EMIES/EMIESClient.cpp:832 #, fuzzy, c-format msgid "Creating and sending service information query request to %s" msgstr "Erstelle und sende service information query request an %s" #: src/hed/acc/EMIES/EMIESClient.cpp:880 src/hed/acc/EMIES/EMIESClient.cpp:901 #, fuzzy, c-format msgid "Creating and sending job clean request to %s" msgstr "Erstelle und sende clean request an %s" #: src/hed/acc/EMIES/EMIESClient.cpp:922 #, fuzzy, c-format msgid "Creating and sending job suspend request to %s" msgstr "Erstelle und sende job resume request an %s" #: src/hed/acc/EMIES/EMIESClient.cpp:943 #, fuzzy, c-format msgid "Creating and sending job resume request to %s" msgstr "Erstelle und sende job resume request an %s" #: src/hed/acc/EMIES/EMIESClient.cpp:964 #, fuzzy, c-format msgid "Creating and sending job restart request to %s" msgstr "Erstelle und sende job resume request an %s" #: src/hed/acc/EMIES/EMIESClient.cpp:1021 #, fuzzy, c-format msgid "Creating and sending job notify request to %s" msgstr "Erstelle und sende job migrate request an %s" #: src/hed/acc/EMIES/EMIESClient.cpp:1076 #, fuzzy, c-format msgid "Creating and sending notify request to %s" msgstr "Erstelle und sende clean request an %s" #: src/hed/acc/EMIES/EMIESClient.cpp:1166 #, fuzzy, c-format msgid "Creating and sending job list request to %s" msgstr "Erstelle und sende job migrate request an %s" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:175 #, fuzzy, c-format msgid "Job %s failed to renew delegation %s - %s." msgstr "Initiierung der Delegation fehlgeschlagen" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:197 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:464 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:191 #, c-format msgid "Job %s does not report a resumable state" msgstr "Job %s berichtet nicht von einem resumable Zustand" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:202 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:196 #, fuzzy, c-format msgid "Resuming job: %s at state: %s (%s)" msgstr "Resuming Job: %s in Zustand: %s" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:215 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:520 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:205 msgid "Job resuming successful" msgstr "Job erfolgreich resumed." #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:248 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:251 #, fuzzy, c-format msgid "Failed retrieving information for job: %s" msgstr "Fehler bei Bezug von Information für Job: %s" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:330 #, fuzzy msgid "Retrieving job description of EMI ES jobs is not supported" msgstr "Resume von CREAM jobs wird nicht unterstützt" #: src/hed/acc/EMIES/JobListRetrieverPluginEMIES.cpp:37 #, c-format msgid "Listing jobs succeeded, %d jobs found" msgstr "" #: src/hed/acc/EMIES/JobListRetrieverPluginEMIES.cpp:53 #: src/hed/acc/LDAP/JobListRetrieverPluginLDAPNG.cpp:111 #: src/services/a-rex/internaljobplugin/JobListRetrieverPluginINTERNAL.cpp:83 #, c-format msgid "" "Skipping retrieved job (%s) because it was submitted via another interface " "(%s)." msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:47 #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:41 #, fuzzy msgid "" "Failed to delegate credentials to server - no delegation interface found" msgstr "Konnte delegation credentials in Client Konfiguration nicht finden" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:54 #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:48 #, fuzzy, c-format msgid "Failed to delegate credentials to server - %s" msgstr "Konnte delegation credentatials nicht zerstören für job: %s" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:77 #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:87 #, fuzzy msgid "Failed preparing job description" msgstr "Submit: Fehler bei Senden von Job Beschreibung" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:95 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:406 #, fuzzy msgid "Unable to submit job. Job description is not valid XML" msgstr "Konnte Job Beschreibung nicht an Resourcen des Ziels anpassen: %s" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:154 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:482 #, fuzzy msgid "No valid job identifier returned by EMI ES" msgstr "A-REX lieferte keinen Job Identifikator zurück" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:180 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:499 #, fuzzy msgid "Job failed on service side" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:190 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:509 #, fuzzy msgid "Failed to obtain state of job" msgstr "Konnte Listing nicht via FTP beziehen: %s" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:205 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:521 #, fuzzy msgid "Failed to wait for job to allow stage in" msgstr "Konnte nicht verbinden, um Job aufzuräumen" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:228 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:541 #, fuzzy msgid "Failed to obtain valid stagein URL for input files" msgstr "Konnte Listing nicht via FTP beziehen: %s" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:248 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:558 #, fuzzy, c-format msgid "Failed uploading local input files to %s" msgstr "Konnte lokale Inputdateien nicht hochladen" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:269 #, fuzzy, c-format msgid "Failed to submit job description: EMIESFault(%s , %s)" msgstr "Fehler beim Bezug der Job Beschreibung von Job: %s" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:278 #, fuzzy, c-format msgid "Failed to submit job description: UnexpectedError(%s)" msgstr "Konnte Job Beschreibung nicht an Resourcen des Ziels anpassen: %s" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:315 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:574 #, fuzzy msgid "Failed to notify service" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:367 #, fuzzy msgid "Failed preparing job description to target resources" msgstr "Konnte Job Beschreibung nicht an Resourcen des Ziels anpassen: %s" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:475 #, fuzzy, c-format msgid "Failed to submit job description: %s" msgstr "Fehler beim Bezug der Job Beschreibung von Job: %s" #: src/hed/acc/EMIES/TargetInformationRetrieverPluginEMIES.cpp:30 msgid "Collecting EMI-ES GLUE2 computing info endpoint information." msgstr "" #: src/hed/acc/EMIES/TargetInformationRetrieverPluginEMIES.cpp:50 #, fuzzy msgid "Generating EMIES targets" msgstr "Generiere A-REX target: %s" #: src/hed/acc/EMIES/TargetInformationRetrieverPluginEMIES.cpp:59 #, fuzzy, c-format msgid "Generated EMIES target: %s" msgstr "Generiere A-REX target: %s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:75 #: src/hed/acc/EMIES/TestEMIESClient.cpp:79 #, c-format msgid "Query returned unexpected element: %s:%s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:85 #, c-format msgid "Element validation according to GLUE2 schema failed: %s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:114 #, fuzzy msgid "Resource query failed" msgstr "Anfrage %s schlug fehl" #: src/hed/acc/EMIES/TestEMIESClient.cpp:132 #, fuzzy msgid "Submission failed" msgstr "Submission von Anfrage schlug fehl" #: src/hed/acc/EMIES/TestEMIESClient.cpp:143 #, fuzzy msgid "Obtaining status failed" msgstr "Die Job Terminierungs-Anfrage schlug fehl" #: src/hed/acc/EMIES/TestEMIESClient.cpp:153 #, fuzzy msgid "Obtaining information failed" msgstr "SOAP Aufruf fehlgeschlagen" #: src/hed/acc/EMIES/TestEMIESClient.cpp:170 #, fuzzy msgid "Cleaning failed" msgstr "Delegation nicht erfolgreich: " #: src/hed/acc/EMIES/TestEMIESClient.cpp:177 #, fuzzy msgid "Notify failed" msgstr "Schreibfehler" #: src/hed/acc/EMIES/TestEMIESClient.cpp:184 #, fuzzy msgid "Kill failed" msgstr "%s fehlgeschlagen" #: src/hed/acc/EMIES/TestEMIESClient.cpp:190 #, fuzzy msgid "List failed" msgstr "%s fehlgeschlagen" #: src/hed/acc/EMIES/TestEMIESClient.cpp:201 #, fuzzy, c-format msgid "Fetching resource description from %s" msgstr "Setzer userRequestDescription zu %s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:204 #: src/hed/acc/EMIES/TestEMIESClient.cpp:272 #: src/hed/acc/EMIES/TestEMIESClient.cpp:282 #: src/hed/acc/EMIES/TestEMIESClient.cpp:293 #, fuzzy, c-format msgid "Failed to obtain resource description: %s" msgstr "Fehler beim Bezug der Job Beschreibung von Job: %s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:213 #: src/hed/acc/EMIES/TestEMIESClient.cpp:217 #, c-format msgid "Resource description contains unexpected element: %s:%s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:223 msgid "Resource description validation according to GLUE2 schema failed: " msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:228 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:560 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:819 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:517 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:706 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:129 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:169 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1214 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1248 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1430 #: src/hed/identitymap/ArgusPDPClient.cpp:444 #: src/hed/identitymap/ArgusPEPClient.cpp:98 #: src/hed/identitymap/ArgusPEPClient.cpp:345 #: src/hed/libs/common/Thread.cpp:242 src/hed/libs/common/Thread.cpp:245 #: src/hed/libs/common/Thread.cpp:248 #: src/hed/libs/credential/Credential.cpp:1048 #: src/hed/libs/data/DataPointDelegate.cpp:628 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:68 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:84 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:100 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:119 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:129 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:137 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:146 #: src/hed/mcc/tls/PayloadTLSMCC.cpp:82 src/hed/shc/arcpdp/ArcPDP.cpp:235 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:293 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:247 #: src/libs/data-staging/Scheduler.cpp:117 #: src/services/a-rex/delegation/DelegationStore.cpp:40 #: src/services/a-rex/delegation/DelegationStore.cpp:45 #: src/services/a-rex/delegation/DelegationStore.cpp:50 #: src/services/a-rex/delegation/DelegationStore.cpp:82 #: src/services/a-rex/delegation/DelegationStore.cpp:88 #: src/services/a-rex/grid-manager/inputcheck.cpp:33 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:480 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:551 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:576 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:587 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:598 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:609 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:617 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:623 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:628 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:633 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:643 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:652 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:660 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:671 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:678 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:736 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:743 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:783 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:787 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:790 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:859 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:872 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:889 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:901 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1174 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1179 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1208 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1221 #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:379 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:386 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:426 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:478 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:593 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:627 #, c-format msgid "%s" msgstr "%s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:248 #, fuzzy msgid "Resource description is empty" msgstr "Anfrage ist leer" #: src/hed/acc/EMIES/TestEMIESClient.cpp:255 #, c-format msgid "Resource description provides URL for interface %s: %s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:260 msgid "Resource description provides no URLs for interfaces" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:263 #, fuzzy msgid "Resource description validation passed" msgstr "Quelle Ziel" #: src/hed/acc/EMIES/TestEMIESClient.cpp:265 #, c-format msgid "Requesting ComputingService elements of resource description at %s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:270 msgid "Performing /Services/ComputingService query" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:274 #: src/hed/acc/EMIES/TestEMIESClient.cpp:284 #: src/hed/acc/EMIES/TestEMIESClient.cpp:295 msgid "Query returned no elements." msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:280 msgid "Performing /ComputingService query" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:291 msgid "Performing /* query" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:301 #, fuzzy msgid "All queries failed" msgstr "Anfrage %s schlug fehl" #: src/hed/acc/EMIES/TestEMIESClient.cpp:331 #, c-format msgid "" "Number of ComputingService elements obtained from full document and XPath " "query do not match: %d != %d" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:334 msgid "Resource description query validation passed" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:336 #, fuzzy, c-format msgid "Unsupported command: %s" msgstr "Nicht unterstützte URL für Quelle: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:142 #, c-format msgid "Connect: Failed to init handle: %s" msgstr "Connect: Konnte init handle nicht initialisieren: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:148 #, fuzzy, c-format msgid "Failed to enable IPv6: %s" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:158 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:172 #, c-format msgid "Connect: Failed to connect: %s" msgstr "Connect: Verbindung zu %s schlug fehl" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:165 #, c-format msgid "Connect: Connecting timed out after %d ms" msgstr "Connect: Zeitüberschreitung der Verbindung nach %d ms" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:185 #, c-format msgid "Connect: Failed to init auth info handle: %s" msgstr "Connect: Konnte auth info handle nicht initialisieren: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:196 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:210 #, c-format msgid "Connect: Failed authentication: %s" msgstr "Connect: Authentikation fehlgeschlagen: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:203 #, c-format msgid "Connect: Authentication timed out after %d ms" msgstr "Connect: Zeitüberschreitung der Authentikation nach %d ms" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:224 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:256 #, fuzzy, c-format msgid "SendCommand: Command: %s" msgstr "SendCommand: Fehler: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:229 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:240 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:260 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:271 #, c-format msgid "SendCommand: Failed: %s" msgstr "SendCommand: Fehler: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:235 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:266 #, c-format msgid "SendCommand: Timed out after %d ms" msgstr "SendCommand: Zeitüberschreitung nach %d ms" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:243 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:276 #, fuzzy, c-format msgid "SendCommand: Response: %s" msgstr "SendCommand: Fehler: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:293 #, fuzzy msgid "FTP Job Control: Failed sending EPSV and PASV commands" msgstr "SendData: Fehler bei Senden von PASV Kommando" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:298 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:304 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:320 #, fuzzy, c-format msgid "FTP Job Control: Server PASV response parsing failed: %s" msgstr "SendData: Server PASV Antwort konnte nicht geparst werden: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:330 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:336 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:343 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:350 #, fuzzy, c-format msgid "FTP Job Control: Server EPSV response parsing failed: %s" msgstr "SendData: Server PASV Antwort konnte nicht geparst werden: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:357 #, fuzzy, c-format msgid "FTP Job Control: Server EPSV response port parsing failed: %s" msgstr "SendData: Server PASV Antwort konnte nicht geparst werden: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:366 #, fuzzy, c-format msgid "FTP Job Control: Failed to apply local address to data connection: %s" msgstr "Fehler bei Schließen von Verbindung 1" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:372 #, fuzzy, c-format msgid "" "FTP Job Control: Can't parse host and/or port in response to EPSV/PASV: %s" msgstr "Kann host and port nicht aus Antwort zu PASV herauslesen" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:377 #, fuzzy, c-format msgid "FTP Job Control: Data channel: %d.%d.%d.%d:%d" msgstr "Datenkanal: %d.%d.%d.%d %d" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:393 #, fuzzy, c-format msgid "FTP Job Control: Data channel: [%s]:%d" msgstr "SendData: Fehler bei Datenverbindung zum Schreiben: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:398 #, fuzzy, c-format msgid "FTP Job Control: Local port failed: %s" msgstr "SendData: Lokaler port schlug fehl: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:422 #, fuzzy msgid "FTP Job Control: Failed sending DCAU command" msgstr "SendData: Fehler bei Senden von DCAU Kommando" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:427 #, fuzzy msgid "FTP Job Control: Failed sending TYPE command" msgstr "SendData: Fehler bei Senden von TYPE Kommando" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:436 #, fuzzy, c-format msgid "FTP Job Control: Local type failed: %s" msgstr "SendData: Lokaler type schlug fehl: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:446 #, fuzzy, c-format msgid "FTP Job Control: Failed sending STOR command: %s" msgstr "SendData: Fehler bei Senden von STOR Kommando: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:454 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:475 #, fuzzy, c-format msgid "FTP Job Control: Data connect write failed: %s" msgstr "SendData: Fehler bei Datenverbindung zum Schreiben: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:461 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:469 #, fuzzy, c-format msgid "FTP Job Control: Data connect write timed out after %d ms" msgstr "" "SendData: Zeitüberschreitung bei Datenverbindung zum Schreiben nach %d ms" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:487 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:507 #, fuzzy, c-format msgid "FTP Job Control: Data write failed: %s" msgstr "SendData: Schreiben von Daten schlug fehl: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:493 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:501 #, fuzzy, c-format msgid "FTP Job Control: Data write timed out after %d ms" msgstr "SendData: Zeitüberschreitung beim Schreiben nach %d ms" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:527 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:538 #, fuzzy, c-format msgid "Disconnect: Failed aborting - ignoring: %s" msgstr "Disconnect: Fehler beim Schließen der Verbindung - ignoriert: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:530 #, fuzzy, c-format msgid "Disconnect: Data close timed out after %d ms" msgstr "Disconnect: Zeitüberschreitung vom Schließen nach %d ms" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:541 #, fuzzy, c-format msgid "Disconnect: Abort timed out after %d ms" msgstr "Disconnect: Zeitüberschreitung vom Schließen nach %d ms" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:549 #, fuzzy, c-format msgid "Disconnect: Failed quitting - ignoring: %s" msgstr "Disconnect: Fehler beim Schließen der Verbindung - ignoriert: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:552 #, c-format msgid "Disconnect: Quitting timed out after %d ms" msgstr "Disconnect: Zeitüberschreitung beim Verlassen nach %d ms" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:561 #, c-format msgid "Disconnect: Failed closing - ignoring: %s" msgstr "Disconnect: Fehler beim Schließen der Verbindung - ignoriert: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:567 #, c-format msgid "Disconnect: Closing timed out after %d ms" msgstr "Disconnect: Zeitüberschreitung vom Schließen nach %d ms" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:582 #, fuzzy msgid "Disconnect: waiting for globus handle to settle" msgstr "Disconnect: Konnte handle nicht freigeben: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:596 #, fuzzy msgid "Disconnect: globus handle is stuck." msgstr "Disconnect: Konnte handle nicht freigeben: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:604 #, fuzzy, c-format msgid "Disconnect: Failed destroying handle: %s. Can't handle such situation." msgstr "Disconnect: Konnte handle nicht freigeben: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:607 #, fuzzy msgid "Disconnect: handle destroyed." msgstr "Disconnect: Konnte handle nicht freigeben: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:43 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:44 #, fuzzy msgid "" "Missing reference to factory and/or module. It is unsafe to use Globus in " "non-persistent mode - SubmitterPlugin for GRIDFTPJOB is disabled. Report to " "developers." msgstr "" "Fehlende Referenz zu factory und/doer Module. Es ist unsicher, Globus im " "nicht-persistenten Modus zu nutzen - (Grid)FTP code wurde disabled. Bitte " "die Entwickler informieren." #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:58 #, c-format msgid "Unable to query job information (%s), invalid URL provided (%s)" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:70 #, c-format msgid "Jobs left to query: %d" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:79 #, c-format msgid "Querying batch with %d jobs" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:97 #, fuzzy msgid "Can't create information handle - is the ARC LDAP DMC plugin available?" msgstr "" "Kann information handle nicht anlegen - ist das ARC LDAP DMC plugin " "verfügbar?" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:130 #, c-format msgid "Job information not found in the information system: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:132 msgid "" "This job was very recently submitted and might not yet have reached the " "information system" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:319 #, c-format msgid "Cleaning job: %s" msgstr "Aufräumen von Job: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:323 msgid "Failed to connect for job cleaning" msgstr "Konnte nicht verbinden, um Job aufzuräumen" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:335 msgid "Failed sending CWD command for job cleaning" msgstr "Konnte CWD Kommando nicht senden um Job aufzuräumen" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:342 msgid "Failed sending RMD command for job cleaning" msgstr "Konnte RMD Kommando nicht senden um Job aufzuräumen" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:349 msgid "Failed to disconnect after job cleaning" msgstr "Konnte Verbindung nicht trennen nach Aufräumen von Job" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:356 msgid "Job cleaning successful" msgstr "Job erfolgreich aufgeräumt." #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:367 #, fuzzy, c-format msgid "Cancelling job: %s" msgstr "Aufräumen von Job: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:371 #, fuzzy msgid "Failed to connect for job cancelling" msgstr "Konnte nicht verbinden, um Job aufzuräumen" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:383 msgid "Failed sending CWD command for job cancelling" msgstr "Fehler beim Senden von CWD für den Abbruch eines Jobs" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:390 msgid "Failed sending DELE command for job cancelling" msgstr "Fehler beim Senden von DELE für den Abbruch eines Jobs" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:397 msgid "Failed to disconnect after job cancelling" msgstr "Fehler beim Trennen der Verbindung nach Abbruch von Job" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:405 msgid "Job cancelling successful" msgstr "Job erfolgreich abgebrochen" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:416 #, c-format msgid "Renewing credentials for job: %s" msgstr "Erneuern der credentials für Job %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:420 msgid "Failed to connect for credential renewal" msgstr "Fehler beim Verbindungen für Erneuerung von credentials" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:432 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:439 msgid "Failed sending CWD command for credentials renewal" msgstr "Fehler beim Senden von CWD Kommando für Erneuerung von credentials" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:445 msgid "Failed to disconnect after credentials renewal" msgstr "Fehler bein Trennen der Verbindung nach Erneuerung der credentials" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:452 msgid "Renewal of credentials was successful" msgstr "Erneuerung der Credentials war erfolgreich" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:474 #, fuzzy, c-format msgid "Illegal jobID specified (%s)" msgstr "Ungültige Job ID angegeben" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:481 #, c-format msgid "HER: %s" msgstr "HER: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:487 #, c-format msgid "Could not create temporary file: %s" msgstr "Konnte temporäre Datei nicht anlegen: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:556 #, fuzzy, c-format msgid "Trying to retrieve job description of %s from computing resource" msgstr "Versuche Job Beschreibung von %s von Cluster zu beziehen" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:561 #, fuzzy, c-format msgid "invalid jobID: %s" msgstr "ungültige Job ID: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:603 msgid "clientxrsl found" msgstr "clientxrsl gefunden" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:606 msgid "could not find start of clientxrsl" msgstr "konnte Start von clientxrsl nicht finden" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:611 msgid "could not find end of clientxrsl" msgstr "konnte Ende von clientxrsl nicht finden" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:624 #, c-format msgid "Job description: %s" msgstr "Job Beschreibung: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:627 msgid "clientxrsl not found" msgstr "clientxrsl nicht gefunden" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:633 #, fuzzy, c-format msgid "Invalid JobDescription: %s" msgstr "Ungültige JobDescription:" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:636 #, fuzzy msgid "Valid JobDescription found" msgstr "Gültige JobDescription gefunden" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:60 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:206 msgid "Submit: Failed to connect" msgstr "Submit: Verbindungsfehler" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:68 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:214 msgid "Submit: Failed sending CWD command" msgstr "Submit: Konnte CWD Kommmando nicht senden" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:79 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:225 msgid "Submit: Failed sending CWD new command" msgstr "Submit: Konnte CWD new Kommmando nicht senden" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:106 #, fuzzy msgid "Failed to prepare job description." msgstr "Fehler beim Bezug der Job Beschreibung von Job: %s" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:123 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:269 msgid "Submit: Failed sending job description" msgstr "Submit: Fehler bei Senden von Job Beschreibung" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:138 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:284 msgid "Submit: Failed uploading local input files" msgstr "Submit; Hochladen der lokalen Inputfiles schlug fehl" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:194 msgid "" "Submit: service has no suitable information interface - need org.nordugrid." "ldapng" msgstr "" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:252 #, fuzzy msgid "Failed to prepare job description to target resources." msgstr "Konnte Job Beschreibung nicht an Resourcen des Ziels anpassen: %s" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:75 #, c-format msgid "[ADLParser] Unsupported EMI ES state %s." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:95 #, c-format msgid "[ADLParser] Unsupported internal state %s." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:105 #, c-format msgid "[ADLParser] Optional for %s elements are not supported yet." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:114 #, c-format msgid "[ADLParser] %s element must be boolean." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:126 #, c-format msgid "[ADLParser] Code in FailIfExitCodeNotEqualTo in %s is not valid number." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:364 msgid "[ADLParser] Root element is not ActivityDescription " msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:411 msgid "[ADLParser] priority is too large - using max value 100" msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:454 #, c-format msgid "[ADLParser] Unsupported URL %s for RemoteLogging." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:473 #, c-format msgid "[ADLParser] Wrong time %s in ExpirationTime." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:503 msgid "[ADLParser] AccessControl isn't valid XML." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:514 msgid "[ADLParser] CredentialService must contain valid URL." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:543 #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:546 msgid "[ADLParser] Only email Prorocol for Notification is supported yet." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:604 msgid "[ADLParser] Missing or wrong value in ProcessesPerSlot." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:609 msgid "[ADLParser] Missing or wrong value in ThreadsPerProcess." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:615 msgid "" "[ADLParser] Missing Name element or value in ParallelEnvironment/Option " "element." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:632 msgid "[ADLParser] NetworkInfo is not supported yet." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:646 #, c-format msgid "[ADLParser] NodeAccess value %s is not supported yet." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:654 msgid "[ADLParser] Missing or wrong value in NumberOfSlots." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:661 msgid "" "[ADLParser] The NumberOfSlots element should be specified, when the value of " "useNumberOfSlots attribute of SlotsPerHost element is \"true\"." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:668 msgid "[ADLParser] Missing or wrong value in SlotsPerHost." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:697 msgid "[ADLParser] Missing or wrong value in IndividualPhysicalMemory." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:707 msgid "[ADLParser] Missing or wrong value in IndividualVirtualMemory." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:717 msgid "[ADLParser] Missing or wrong value in DiskSpaceRequirement." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:731 msgid "[ADLParser] Benchmark is not supported yet." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:739 msgid "[ADLParser] Missing or wrong value in IndividualCPUTime." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:747 msgid "[ADLParser] Missing or wrong value in TotalCPUTime." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:756 msgid "[ADLParser] Missing or wrong value in WallTime." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:776 msgid "[ADLParser] Missing or empty Name in InputFile." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:787 #, c-format msgid "[ADLParser] Wrong URI specified in Source - %s." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:809 msgid "[ADLParser] Missing or empty Name in OutputFile." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:815 #, c-format msgid "[ADLParser] Wrong URI specified in Target - %s." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:828 #, c-format msgid "Location URI for file %s is invalid" msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:853 #, c-format msgid "[ADLParser] CreationFlag value %s is not supported." msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:42 msgid "Left operand for RSL concatenation does not evaluate to a literal" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:50 msgid "Right operand for RSL concatenation does not evaluate to a literal" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:161 msgid "Multi-request operator only allowed at top level" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:186 msgid "RSL substitution is not a sequence" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:192 msgid "RSL substitution sequence is not of length 2" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:211 msgid "RSL substitution variable name does not evaluate to a literal" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:220 msgid "RSL substitution variable value does not evaluate to a literal" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:313 #, fuzzy msgid "End of comment not found" msgstr "clientxrsl nicht gefunden" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:324 msgid "Junk at end of RSL" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:424 msgid "End of single quoted string not found" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:441 msgid "End of double quoted string not found" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:460 #, c-format msgid "End of user delimiter (%s) quoted string not found" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:518 #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:546 #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:625 msgid "')' expected" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:528 #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:609 msgid "'(' expected" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:536 msgid "Variable name expected" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:541 #, c-format msgid "Variable name (%s) contains invalid character (%s)" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:557 #, fuzzy msgid "Broken string" msgstr "Zeichenkette" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:570 msgid "No left operand for concatenation operator" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:574 msgid "No right operand for concatenation operator" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:638 msgid "Attribute name expected" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:643 #, c-format msgid "Attribute name (%s) contains invalid character (%s)" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:649 #, fuzzy msgid "Relation operator expected" msgstr "Sofortige Vervollständigung: %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:86 msgid "Error parsing the internally set executables attribute." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:102 #, c-format msgid "" "File '%s' in the 'executables' attribute is not present in the 'inputfiles' " "attribute" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:120 msgid "The value of the ftpthreads attribute must be a number from 1 to 10" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:177 msgid "'stdout' attribute must be specified when 'join' attribute is specified" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:181 msgid "" "Attribute 'join' cannot be specified when both 'stdout' and 'stderr' " "attributes is specified" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:200 msgid "Attributes 'gridtime' and 'cputime' cannot be specified together" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:204 msgid "Attributes 'gridtime' and 'walltime' cannot be specified together" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:226 msgid "" "When specifying 'countpernode' attribute, 'count' attribute must also be " "specified" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:229 msgid "Value of 'countpernode' attribute must be an integer" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:287 #, fuzzy msgid "No RSL content in job description found" msgstr "Konnte Job Beschreibung nicht an Resourcen des Ziels anpassen: %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:295 msgid "'action' attribute not allowed in user-side job description" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:304 #, c-format msgid "String successfully parsed as %s." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:313 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:331 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:349 #, c-format msgid "Attribute '%s' multiply defined" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:317 #, c-format msgid "Value of attribute '%s' expected to be single value" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:322 #, c-format msgid "Value of attribute '%s' expected to be a string" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:338 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:368 #, c-format msgid "Value of attribute '%s' is not a string" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:356 #, c-format msgid "Value of attribute '%s' is not sequence" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:360 #, c-format msgid "" "Value of attribute '%s' has wrong sequence length: Expected %d, found %d" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:492 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1375 msgid "Unexpected RSL type" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:557 msgid "At least two values are needed for the 'inputfiles' attribute" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:562 msgid "First value of 'inputfiles' attribute (filename) cannot be empty" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:587 #, fuzzy, c-format msgid "Invalid URL '%s' for input file '%s'" msgstr "Ungültige URL Option: %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:596 #, fuzzy, c-format msgid "Invalid URL option syntax in option '%s' for input file '%s'" msgstr "Ungültige URL Option: %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:606 #, fuzzy, c-format msgid "Invalid URL: '%s' in input file '%s'" msgstr "Ungültige URL Option: %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:677 msgid "At least two values are needed for the 'outputfiles' attribute" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:682 msgid "First value of 'outputfiles' attribute (filename) cannot be empty" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:694 #, fuzzy, c-format msgid "Invalid URL '%s' for output file '%s'" msgstr "Ungültige URL Option: %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:704 #, fuzzy, c-format msgid "Invalid URL option syntax in option '%s' for output file '%s'" msgstr "Ungültige URL Option: %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:714 #, fuzzy, c-format msgid "Invalid URL: '%s' in output file '%s'" msgstr "Ungültige URL Option: %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:746 #, c-format msgid "" "Invalid comparison operator '%s' used at 'delegationid' attribute, only \"=" "\" is allowed." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:764 #, c-format msgid "" "Invalid comparison operator '%s' used at 'queue' attribute in 'GRIDMANAGER' " "dialect, only \"=\" is allowed" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:770 #, c-format msgid "" "Invalid comparison operator '%s' used at 'queue' attribute, only \"!=\" or " "\"=\" are allowed." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:927 #, c-format msgid "Value of attribute '%s' expected not to be empty" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1036 msgid "The value of the acl XRSL attribute isn't valid XML." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1050 msgid "The cluster XRSL attribute is currently unsupported." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1066 #, c-format msgid "" "Syntax error in 'notify' attribute value ('%s'), it must contain an email " "address" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1074 #, c-format msgid "" "Syntax error in 'notify' attribute value ('%s'), it must only contain email " "addresses after state flag(s)" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1077 #, c-format msgid "" "Syntax error in 'notify' attribute value ('%s'), it contains unknown state " "flags" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1125 msgid "priority is too large - using max value 100" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1158 #, c-format msgid "Invalid nodeaccess value: %s" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1201 msgid "Value of 'count' attribute must be an integer" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1231 msgid "Value of 'exclusiveexecution' attribute must either be 'yes' or 'no'" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1277 #, c-format msgid "Invalid action value %s" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1367 #, c-format msgid "The specified Globus attribute (%s) is not supported. %s ignored." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1371 #, c-format msgid "Unknown XRSL attribute: %s - Ignoring it." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1385 #, fuzzy, c-format msgid "Wrong language requested: %s" msgstr "*** Client Anfrage: %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1722 msgid "" "Cannot output XRSL representation: The Resources.SlotRequirement." "NumberOfSlots attribute must be specified when the Resources.SlotRequirement." "SlotsPerHost attribute is specified." msgstr "" #: src/hed/acc/LDAP/Extractor.h:22 #, c-format msgid "Extractor[%s] (%s): %s = %s" msgstr "" #: src/hed/acc/LDAP/Extractor.h:113 src/hed/acc/LDAP/Extractor.h:130 #, c-format msgid "Extractor[%s] (%s): %s contains %s" msgstr "" #: src/hed/acc/LDAP/JobListRetrieverPluginLDAPGLUE2.cpp:54 #, c-format msgid "Adding endpoint '%s' with interface name %s" msgstr "" #: src/hed/acc/LDAP/JobListRetrieverPluginLDAPNG.cpp:72 #: src/hed/acc/LDAP/ServiceEndpointRetrieverPluginEGIIS.cpp:46 #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPGLUE2.cpp:47 #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPNG.cpp:57 msgid "Can't create information handle - is the ARC ldap DMC plugin available?" msgstr "" "Kann information handle nicht anlegen - ist das ARC LDAP DMC plugin " "verfügbar?" #: src/hed/acc/LDAP/ServiceEndpointRetrieverPluginEGIIS.cpp:79 #, fuzzy, c-format msgid "Unknown entry in EGIIS (%s)" msgstr "unbekannter return code %s" #: src/hed/acc/LDAP/ServiceEndpointRetrieverPluginEGIIS.cpp:87 msgid "" "Entry in EGIIS is missing one or more of the attributes 'Mds-Service-type', " "'Mds-Service-hn', 'Mds-Service-port' and/or 'Mds-Service-Ldap-suffix'" msgstr "" #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPGLUE2.cpp:219 msgid "" "The \"FreeSlotsWithDuration\" attribute is wrongly formatted. Ignoring it." msgstr "" #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPGLUE2.cpp:220 #: src/hed/libs/compute/GLUE2.cpp:248 #, c-format msgid "Wrong format of the \"FreeSlotsWithDuration\" = \"%s\" (\"%s\")" msgstr "" #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPNG.cpp:389 #, c-format msgid "Unable to parse the %s.%s value from execution service (%s)." msgstr "" #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPNG.cpp:390 #, c-format msgid "Value of %s.%s is \"%s\"" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:65 #: src/services/wrappers/python/pythonwrapper.cpp:92 msgid "Failed to initialize main Python thread" msgstr "Fehler bei Initialisierung des main Python Threads" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:71 #: src/services/wrappers/python/pythonwrapper.cpp:97 msgid "Main Python thread was not initialized" msgstr "Main Python Thread wurde nicht initialisiert" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:81 #, fuzzy, c-format msgid "Loading Python broker (%i)" msgstr "Lade python broker (%i)" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:104 #: src/services/wrappers/python/pythonwrapper.cpp:134 #, fuzzy msgid "Main Python thread is not initialized" msgstr "Main python thread wurde nicht initialisiert" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:108 #, fuzzy msgid "PythonBroker init" msgstr "PythonBroker init" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:116 msgid "" "Invalid class name. The broker argument for the PythonBroker should be\n" " Filename.Class.args (args is optional), for example SampleBroker." "MyBroker" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:122 #, fuzzy, c-format msgid "Class name: %s" msgstr "Klassenname: %s" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:123 #, fuzzy, c-format msgid "Module name: %s" msgstr "Modulname: %s" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:132 #: src/services/wrappers/python/pythonwrapper.cpp:178 #, fuzzy msgid "Cannot convert ARC module name to Python string" msgstr "Kann Modul name nicht zu Python Zeichenkette konvertieren" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:140 #: src/services/wrappers/python/pythonwrapper.cpp:186 #, fuzzy msgid "Cannot import ARC module" msgstr "Kann Modul nicht importieren" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:149 #: src/services/wrappers/python/pythonwrapper.cpp:196 #: src/services/wrappers/python/pythonwrapper.cpp:429 #, fuzzy msgid "Cannot get dictionary of ARC module" msgstr "Kann auf Wörterbuch des arc Moduls nicht zugreifen" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:158 #, fuzzy msgid "Cannot find ARC UserConfig class" msgstr "Kann UserConfig Klasse nicht finden" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:166 #, fuzzy msgid "UserConfig class is not an object" msgstr "UserConfig Klasse ist kein Objekt" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:173 #, fuzzy msgid "Cannot find ARC JobDescription class" msgstr "Kann arc JobDescription Klasse nicht finden" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:181 #, fuzzy msgid "JobDescription class is not an object" msgstr "JobDescription Klasse ist kein Objekt" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:188 #, fuzzy msgid "Cannot find ARC ExecutionTarget class" msgstr "Kann arc ExecutionTarget Klasse nicht finden" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:196 #, fuzzy msgid "ExecutionTarget class is not an object" msgstr "ExecutionTarget Klasse ist kein Objekt" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:207 #: src/services/wrappers/python/pythonwrapper.cpp:157 msgid "Cannot convert module name to Python string" msgstr "Kann Modul name nicht zu Python Zeichenkette konvertieren" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:215 #: src/services/wrappers/python/pythonwrapper.cpp:164 msgid "Cannot import module" msgstr "Kann Modul nicht importieren" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:224 #, fuzzy msgid "Cannot get dictionary of custom broker module" msgstr "Kann auf Wörterbuch von custom broker Modul nicht zugreifen" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:233 #, fuzzy msgid "Cannot find custom broker class" msgstr "Kann custom broker Klasse nicht finden" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:241 #, fuzzy, c-format msgid "%s class is not an object" msgstr "Klasse %s ist kein Objekt" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:247 #, fuzzy msgid "Cannot create UserConfig argument" msgstr "Kann UserConfig Argument nicht anlegen" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:255 #, fuzzy msgid "Cannot convert UserConfig to Python object" msgstr "Kann UserConfig nicht zu python Objekt konvertieren" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:263 #: src/services/wrappers/python/pythonwrapper.cpp:253 msgid "Cannot create argument of the constructor" msgstr "Kann Argument für den Konstruktor nicht anlegen" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:272 #: src/services/wrappers/python/pythonwrapper.cpp:261 #, fuzzy msgid "Cannot create instance of Python class" msgstr "Kann Instanz von Python Klasse nicht anlegen" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:278 #, fuzzy, c-format msgid "Python broker constructor called (%d)" msgstr "Python broker Kontruktor aufgerufen (%d)" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:302 #, fuzzy, c-format msgid "Python broker destructor called (%d)" msgstr "Python broker Destruktor aufgerufen (%d)" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:311 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:328 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:361 #, fuzzy msgid "Cannot create ExecutionTarget argument" msgstr "Kann ExecutionTarget Argument nicht anlegen" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:319 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:336 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:369 #, fuzzy, c-format msgid "Cannot convert ExecutionTarget (%s) to python object" msgstr "Kann ExecutionTarget nicht zu Python Objekt konvertieren" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:393 #, fuzzy msgid "Cannot create JobDescription argument" msgstr "Kann JobDescription Argument nicht anlegen." #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:401 #, fuzzy msgid "Cannot convert JobDescription to python object" msgstr "Kann JobDescription nicht zu Python Objekt konvertieren" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:422 msgid "Do sorting using user created python broker" msgstr "" #: src/hed/daemon/unix/daemon.cpp:84 #, c-format msgid "Daemonization fork failed: %s" msgstr "" #: src/hed/daemon/unix/daemon.cpp:95 msgid "Watchdog (re)starting application" msgstr "" #: src/hed/daemon/unix/daemon.cpp:100 #, fuzzy, c-format msgid "Watchdog fork failed: %s" msgstr "Verbindung zu %s schlug fehl: %s" #: src/hed/daemon/unix/daemon.cpp:110 msgid "Watchdog starting monitoring" msgstr "" #: src/hed/daemon/unix/daemon.cpp:136 #, c-format msgid "Watchdog detected application exit due to signal %u" msgstr "" #: src/hed/daemon/unix/daemon.cpp:138 #, c-format msgid "Watchdog detected application exited with code %u" msgstr "" #: src/hed/daemon/unix/daemon.cpp:140 msgid "Watchdog detected application exit" msgstr "" #: src/hed/daemon/unix/daemon.cpp:149 msgid "" "Watchdog exiting because application was purposely killed or exited itself" msgstr "" #: src/hed/daemon/unix/daemon.cpp:156 msgid "Watchdog detected application timeout or error - killing process" msgstr "" #: src/hed/daemon/unix/daemon.cpp:167 msgid "Watchdog failed to wait till application exited - sending KILL" msgstr "" #: src/hed/daemon/unix/daemon.cpp:179 msgid "Watchdog failed to kill application - giving up and exiting" msgstr "" #: src/hed/daemon/unix/daemon.cpp:200 msgid "Shutdown daemon" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:43 msgid "shutdown" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:46 msgid "exit" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:84 msgid "No server config part of config file" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:159 #, c-format msgid "Unknown log level %s" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:169 #, c-format msgid "Failed to open log file: %s" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:201 msgid "Start foreground" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:250 #, c-format msgid "XML config file %s does not exist" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:254 src/hed/daemon/unix/main_unix.cpp:269 #, c-format msgid "Failed to load service configuration from file %s" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:260 #, c-format msgid "INI config file %s does not exist" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:265 src/hed/daemon/unix/main_unix.cpp:287 msgid "Error evaluating profile" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:281 #, fuzzy msgid "Error loading generated configuration" msgstr "" "\n" "%s: ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð²Ñ…Ð¾Ð´Ð½Ð¾Ð³Ð¾ файла '%s': %s\n" #: src/hed/daemon/unix/main_unix.cpp:292 msgid "Failed to load service configuration from any default config file" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:353 msgid "Schema validation error" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:368 msgid "Configuration root element is not " msgstr "" #: src/hed/daemon/unix/main_unix.cpp:384 #, c-format msgid "Cannot switch to group (%s)" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:394 #, c-format msgid "Cannot switch to primary group for user (%s)" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:399 #, c-format msgid "Cannot switch to user (%s)" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:417 #, fuzzy msgid "Failed to load service side MCCs" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/daemon/unix/main_unix.cpp:419 src/tests/count/test_service.cpp:29 #: src/tests/echo/test.cpp:30 src/tests/echo/test_service.cpp:29 msgid "Service side MCCs are loaded" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:426 msgid "Unexpected arguments supplied" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:93 src/hed/dmc/acix/DataPointACIX.cpp:342 #: src/hed/dmc/rucio/DataPointRucio.cpp:220 #: src/hed/dmc/rucio/DataPointRucio.cpp:462 #, fuzzy, c-format msgid "No locations found for %s" msgstr "Keine locations gefunden für %s" #: src/hed/dmc/acix/DataPointACIX.cpp:121 #, c-format msgid "Found none or multiple URLs (%s) in ACIX URL: %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:131 #, fuzzy, c-format msgid "Cannot handle URL %s" msgstr "" "Kann Owner von %s nicht ändernÐевозможно изменить владельца папки %1.\n" "Ошибка: %2" #: src/hed/dmc/acix/DataPointACIX.cpp:138 #, fuzzy, c-format msgid "Could not resolve original source of %s: out of time" msgstr "Konnte temporäre Datei nicht anlegen: %s" #: src/hed/dmc/acix/DataPointACIX.cpp:144 #, fuzzy, c-format msgid "Could not resolve original source of %s: %s" msgstr "Konnte temporäre Datei nicht anlegen: %s" #: src/hed/dmc/acix/DataPointACIX.cpp:160 #, c-format msgid "Querying ACIX server at %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:161 #, c-format msgid "Calling acix with query %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:167 #, fuzzy, c-format msgid "Failed to query ACIX: %s" msgstr "Fehler bei Lesen von Proxy-Datei: %s" #: src/hed/dmc/acix/DataPointACIX.cpp:171 #: src/hed/dmc/acix/DataPointACIX.cpp:308 #, fuzzy, c-format msgid "Failed to parse ACIX response: %s" msgstr "Fehler bei Anlegen von GSI Context: %s" #: src/hed/dmc/acix/DataPointACIX.cpp:298 #, fuzzy, c-format msgid "ACIX returned %s" msgstr "XACML Anfrage: %s" #: src/hed/dmc/acix/DataPointACIX.cpp:319 #, fuzzy, c-format msgid "No locations for %s" msgstr "Keine locations gefunden für %s" #: src/hed/dmc/acix/DataPointACIX.cpp:325 #, fuzzy, c-format msgid "%s: ACIX Location: %s" msgstr "Zugriffslist location: %s" #: src/hed/dmc/acix/DataPointACIX.cpp:327 #, c-format msgid "%s: Location %s not accessible remotely, skipping" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:86 #, c-format msgid "Unknown channel %s for stdio protocol" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:93 #, fuzzy, c-format msgid "Failed to open stdio channel %s" msgstr "Fehler bei Öffnen von Datenkanal" #: src/hed/dmc/file/DataPointFile.cpp:94 #, fuzzy, c-format msgid "Failed to open stdio channel %d" msgstr "Fehler bei Öffnen von Datenkanal" #: src/hed/dmc/file/DataPointFile.cpp:334 #, fuzzy, c-format msgid "fsync of file %s failed: %s" msgstr "Verbindung zu %s schlug fehl: %s" #: src/hed/dmc/file/DataPointFile.cpp:338 #: src/hed/dmc/file/DataPointFile.cpp:345 #, fuzzy, c-format msgid "closing file %s failed: %s" msgstr "Verbindung zu %s schlug fehl: %s" #: src/hed/dmc/file/DataPointFile.cpp:364 #, c-format msgid "File is not accessible: %s" msgstr "Datei ist nicht zugreifbar: %s" #: src/hed/dmc/file/DataPointFile.cpp:370 #: src/hed/dmc/file/DataPointFile.cpp:455 #, fuzzy, c-format msgid "Can't stat file: %s: %s" msgstr "Kann stat-Informationen zu Datei nicht einholen: %s" #: src/hed/dmc/file/DataPointFile.cpp:416 #: src/hed/dmc/file/DataPointFile.cpp:422 #, fuzzy, c-format msgid "Can't stat stdio channel %s" msgstr "Kann stat-Informationen zu Datei nicht einholen: %s" #: src/hed/dmc/file/DataPointFile.cpp:470 #, fuzzy, c-format msgid "%s is not a directory" msgstr "Klasse %s ist kein Objekt" #: src/hed/dmc/file/DataPointFile.cpp:485 src/hed/dmc/s3/DataPointS3.cpp:440 #: src/hed/dmc/s3/DataPointS3.cpp:550 #, c-format msgid "Failed to read object %s: %s" msgstr "Fehler bei Lesen von Objekt %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:498 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:517 #, fuzzy, c-format msgid "File is not accessible %s: %s" msgstr "Datei ist nicht zugreifbar: %s" #: src/hed/dmc/file/DataPointFile.cpp:504 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:523 #, fuzzy, c-format msgid "Can't delete directory %s: %s" msgstr "Kann Verzeichnis nicht löschen: %s - %s" #: src/hed/dmc/file/DataPointFile.cpp:511 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:530 #, fuzzy, c-format msgid "Can't delete file %s: %s" msgstr "Kann Datei nicht löschen: %s - %s" #: src/hed/dmc/file/DataPointFile.cpp:521 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:335 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:313 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1466 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:545 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:565 #, c-format msgid "Creating directory %s" msgstr "Lege Verzeichnis %s an" #: src/hed/dmc/file/DataPointFile.cpp:529 src/hed/dmc/srm/DataPointSRM.cpp:171 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:579 #, fuzzy, c-format msgid "Renaming %s to %s" msgstr "" "\n" " СоответÑтвие раздел-Ñегмент:\n" #: src/hed/dmc/file/DataPointFile.cpp:531 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:588 #, fuzzy, c-format msgid "Can't rename file %s: %s" msgstr "Kann Datei nicht löschen: %s - %s" #: src/hed/dmc/file/DataPointFile.cpp:559 #, fuzzy, c-format msgid "Failed to open %s for reading: %s" msgstr "Fehler bei Öffnen von Datei %s zum Lesen: %s" #: src/hed/dmc/file/DataPointFile.cpp:574 #: src/hed/dmc/file/DataPointFile.cpp:709 #, fuzzy, c-format msgid "Failed to switch user id to %d/%d" msgstr "fehler bei Senden zu %d von %s" #: src/hed/dmc/file/DataPointFile.cpp:580 #, fuzzy, c-format msgid "Failed to create/open file %s: %s" msgstr "Fehler bei Anlegen/Öffnen von Datei %s (%d)" #: src/hed/dmc/file/DataPointFile.cpp:596 #, fuzzy msgid "Failed to create thread" msgstr "Fehler bei Anlegen von ldap bind thread (%s)" #: src/hed/dmc/file/DataPointFile.cpp:676 #, c-format msgid "Invalid url: %s" msgstr "Ungültige url: %s" #: src/hed/dmc/file/DataPointFile.cpp:685 src/hed/libs/data/FileCache.cpp:482 #, fuzzy, c-format msgid "Failed to create directory %s: %s" msgstr "Fehler bei Anlegen/Finden von Verzeichnis %s, (%d)" #: src/hed/dmc/file/DataPointFile.cpp:698 #: src/hed/dmc/file/DataPointFile.cpp:717 #, fuzzy, c-format msgid "Failed to create file %s: %s" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:729 #, c-format msgid "setting file %s to size %llu" msgstr "Setze Datei %s zu Größe %llu" #: src/hed/dmc/file/DataPointFile.cpp:749 #, fuzzy, c-format msgid "Failed to preallocate space for %s" msgstr "Fehler bei Reservieren von Platz" #: src/hed/dmc/file/DataPointFile.cpp:790 src/hed/libs/data/FileCache.cpp:856 #, fuzzy, c-format msgid "Failed to clean up file %s: %s" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:799 #, fuzzy, c-format msgid "Error during file validation. Can't stat file %s: %s" msgstr "Fehler bei Lesen von gültiger und existierender Lock-Datei %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:803 #, c-format msgid "" "Error during file validation: Local file size %llu does not match source " "file size %llu for file %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:53 #, fuzzy, c-format msgid "Using proxy %s" msgstr "Nutze space token %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:54 #, fuzzy, c-format msgid "Using key %s" msgstr "Nutze space token %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:55 #, fuzzy, c-format msgid "Using cert %s" msgstr "Nutze space token %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:113 msgid "Locations are missing in destination LFC URL" msgstr "Locations fehlen in destination LFC URL" #: src/hed/dmc/gfal/DataPointGFAL.cpp:119 #, c-format msgid "Duplicate replica found in LFC: %s" msgstr "Doppelte replica gefunden in LFC: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:121 #, c-format msgid "Adding location: %s - %s" msgstr "Füge location hinzu: %s - %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:129 #: src/hed/libs/data/DataPointIndex.cpp:161 #, fuzzy, c-format msgid "Add location: url: %s" msgstr "Füge location hinzu: url: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:130 #: src/hed/libs/data/DataPointIndex.cpp:162 #, fuzzy, c-format msgid "Add location: metadata: %s" msgstr "Füge location hinzu: Metadaten: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:150 #: src/hed/dmc/gfal/DataPointGFAL.cpp:310 #, fuzzy, c-format msgid "gfal_open failed: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:163 #: src/hed/dmc/gfal/DataPointGFAL.cpp:223 #: src/hed/dmc/gfal/DataPointGFAL.cpp:249 #: src/hed/dmc/gfal/DataPointGFAL.cpp:324 #: src/hed/dmc/gfal/DataPointGFAL.cpp:403 #: src/hed/dmc/gfal/DataPointGFAL.cpp:430 #, fuzzy, c-format msgid "gfal_close failed: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:195 #, fuzzy, c-format msgid "gfal_read failed: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:237 #, fuzzy msgid "StopReading starts waiting for transfer_condition." msgstr "stop_reading_ftp: warte auf Beenden von Transfer" #: src/hed/dmc/gfal/DataPointGFAL.cpp:239 #, fuzzy msgid "StopReading finished waiting for transfer_condition." msgstr "stop_reading_ftp: warte auf Beenden von Transfer" #: src/hed/dmc/gfal/DataPointGFAL.cpp:271 #: src/libs/data-staging/DataDeliveryLocalComm.cpp:68 #: src/libs/data-staging/DataDeliveryLocalComm.cpp:73 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:42 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:47 #, fuzzy, c-format msgid "No locations defined for %s" msgstr "Keine locations gefunden für %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:278 #, fuzzy, c-format msgid "Failed to set LFC replicas: %s" msgstr "Fehler beim Entfernen von LFC Verzeichnis: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:304 #, fuzzy, c-format msgid "gfal_mkdir failed (%s), trying to write anyway" msgstr "start_writing_ftp: mkdir fehlgeschlagen - versuche weiter zu schreiben" #: src/hed/dmc/gfal/DataPointGFAL.cpp:359 #, c-format msgid "DataPointGFAL::write_file got position %d and offset %d, has to seek" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:388 #, fuzzy, c-format msgid "gfal_write failed: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:418 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:388 #, fuzzy msgid "StopWriting starts waiting for transfer_condition." msgstr "StopWriting: Abbruch der Verbindung" #: src/hed/dmc/gfal/DataPointGFAL.cpp:420 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:390 #, fuzzy msgid "StopWriting finished waiting for transfer_condition." msgstr "stop_reading_ftp: warte auf Beenden von Transfer" #: src/hed/dmc/gfal/DataPointGFAL.cpp:451 #, fuzzy, c-format msgid "gfal_stat failed: %s" msgstr "Kann stat-Informationen zu Datei nicht einholen: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:496 #, c-format msgid "gfal_listxattr failed, no replica information can be obtained: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:537 #, fuzzy, c-format msgid "gfal_opendir failed: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:549 #, c-format msgid "List will stat the URL %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:556 #, fuzzy, c-format msgid "gfal_closedir failed: %s" msgstr "Verbindung zu %s schlug fehl: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:584 #, fuzzy, c-format msgid "gfal_rmdir failed: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:587 #, fuzzy, c-format msgid "gfal_unlink failed: %s" msgstr "globus_io_cancel ist fehlgeschlagen: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:604 #, fuzzy, c-format msgid "gfal_mkdir failed: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:619 #, fuzzy, c-format msgid "gfal_rename failed: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:19 #, fuzzy, c-format msgid "Failed to obtain bytes transferred: %s" msgstr "Konnte Listing nicht via FTP beziehen: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:42 #, fuzzy, c-format msgid "Failed to get initiate GFAL2 parameter handle: %s" msgstr "Connect: Konnte init handle nicht initialisieren: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:49 #, fuzzy, c-format msgid "Failed to get initiate new GFAL2 context: %s" msgstr "Fehler bei Anlegen von GSI Context: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:56 #, fuzzy, c-format msgid "Failed to set GFAL2 monitor callback: %s" msgstr "Fehler bei Anlegen von soft link: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:64 #, fuzzy, c-format msgid "Failed to set overwrite option in GFAL2: %s" msgstr "Fehler beim Entfernen der location vom LFC: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:72 #, c-format msgid "Failed to set GFAL2 transfer timeout, will use default: %s" msgstr "" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:84 #, fuzzy msgid "Transfer failed" msgstr "Einige Transfers schlugen fehl" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:92 #, fuzzy msgid "Transfer succeeded" msgstr "Transfer vollständig" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:38 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:54 msgid "ftp_complete_callback: success" msgstr "ftp_complete_callback: erfolgreich" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:44 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:60 #, c-format msgid "ftp_complete_callback: error: %s" msgstr "ftp_complete_callback: Fehler: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:60 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:76 #, fuzzy msgid "ftp_check_callback" msgstr "ftp_check_callback" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:62 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:90 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:116 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:135 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:305 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:340 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:678 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:847 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:879 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:913 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1052 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1104 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1114 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1122 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1130 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1138 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1144 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:78 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:106 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:283 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:319 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:729 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:762 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:799 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:930 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:994 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1004 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1012 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1020 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1028 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1034 #: src/services/gridftpd/commands.cpp:1226 #: src/services/gridftpd/dataread.cpp:76 src/services/gridftpd/dataread.cpp:173 #: src/services/gridftpd/datawrite.cpp:59 #: src/services/gridftpd/datawrite.cpp:146 #, c-format msgid "Globus error: %s" msgstr "Globus Fehler: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:73 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:89 msgid "Excessive data received while checking file access" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:89 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:105 #, fuzzy msgid "Registration of Globus FTP buffer failed - cancel check" msgstr "Registrierung von Globus FTP buffer fehlgeschlagen - breche check ab" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:115 msgid "check_ftp: globus_ftp_client_size failed" msgstr "check_ftp: globus_ftp_client_size fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:119 msgid "check_ftp: timeout waiting for size" msgstr "check_ftp: Zeitüberschreitung bei Warten für Größe" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:124 msgid "check_ftp: failed to get file's size" msgstr "check_ftp: konnten Dateigröße nicht bestimmen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:127 #, fuzzy, c-format msgid "check_ftp: obtained size: %lli" msgstr "Check: erhielt Größe: %lli" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:134 msgid "check_ftp: globus_ftp_client_modification_time failed" msgstr "check_ftp: globus_ftp_client_modification_time fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:138 msgid "check_ftp: timeout waiting for modification_time" msgstr "check_ftp: Zeitüberschreitung bei Warten auf modification_time" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:143 msgid "check_ftp: failed to get file's modification time" msgstr "check_ftp: konnte Modification time von Datei nicht erhalten" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:148 #, fuzzy, c-format msgid "check_ftp: obtained modification date: %s" msgstr "Check: erhielt Erstelldatum: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:167 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:145 msgid "check_ftp: globus_ftp_client_get failed" msgstr "check_ftp: globus_ftp_client_get fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:174 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:152 msgid "check_ftp: globus_ftp_client_register_read" msgstr "check_ftp: globus_ftp_client_register_read" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:185 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:164 #, fuzzy msgid "check_ftp: timeout waiting for partial get" msgstr "check_ftp: Zeitüberschreitung beim Warten auf partial get" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:215 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:191 #, fuzzy, c-format msgid "File delete failed, attempting directory delete for %s" msgstr "Löschen von Datei schlug fehl, versuche als Verzeichnis zu löschen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:225 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:202 #, fuzzy msgid "delete_ftp: globus_ftp_client_delete failed" msgstr "delete_ftp: globus_ftp_client_delete fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:231 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:252 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:208 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:230 #, fuzzy msgid "delete_ftp: timeout waiting for delete" msgstr "list_files_ftp: Zeitüberschreitung bei Warten auf Dateigröße " #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:246 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:224 #, fuzzy msgid "delete_ftp: globus_ftp_client_rmdir failed" msgstr "delete_ftp: globus_ftp_client_delete fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:301 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:278 #, fuzzy, c-format msgid "mkdir_ftp: making %s" msgstr "mkdir_ftp: erstelle %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:309 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:287 #, fuzzy msgid "mkdir_ftp: timeout waiting for mkdir" msgstr "mkdir_ftp: Zeitüberschreitung bei mkdir" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:344 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:323 #, fuzzy msgid "Timeout waiting for mkdir" msgstr "mkdir_ftp: Zeitüberschreitung bei mkdir" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:370 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:346 #, fuzzy msgid "start_reading_ftp" msgstr "start_reading_ftp" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:374 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:350 msgid "start_reading_ftp: globus_ftp_client_get" msgstr "start_reading_ftp: globus_ftp_client_get fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:388 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:364 msgid "start_reading_ftp: globus_ftp_client_get failed" msgstr "start_reading_ftp: globus_ftp_client_get fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:399 msgid "start_reading_ftp: globus_thread_create failed" msgstr "start_reading_ftp: globus_thread_create fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:418 msgid "stop_reading_ftp: aborting connection" msgstr "stop_reading_ftp: Abbruch der Verbindung" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:425 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:647 #, fuzzy, c-format msgid "Failed to abort transfer of ftp file: %s" msgstr "Fehler bei Ablage von FTP Datei" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:426 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:648 msgid "Assuming transfer is already aborted or failed." msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:433 msgid "stop_reading_ftp: waiting for transfer to finish" msgstr "stop_reading_ftp: warte auf Beenden von Transfer" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:435 #, fuzzy, c-format msgid "stop_reading_ftp: exiting: %s" msgstr "stop-reading_ftp: verlasse: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:449 msgid "ftp_read_thread: get and register buffers" msgstr "ftp_read_thread: beziehe und registriere Puffer" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:455 #, c-format msgid "ftp_read_thread: for_read failed - aborting: %s" msgstr "ftp_read_thread: for_read fehlgeschlagen - Abbruch: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:465 #, fuzzy, c-format msgid "ftp_read_thread: data callback failed - aborting: %s" msgstr "ftp_read_thread: for_read fehlgeschlagen - Abbruch: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:477 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:380 #, c-format msgid "ftp_read_thread: Globus error: %s" msgstr "ftp_read_thread: Globus Fehler: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:490 #, c-format msgid "ftp_read_thread: too many registration failures - abort: %s" msgstr "ftp_read_thread: zu viele Registrierungsfehler - Abbruch: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:495 #, fuzzy, c-format msgid "ftp_read_thread: failed to register Globus buffer - will try later: %s" msgstr "" "ftp_read_thread: Fehler bei Registrieren von Globus Puffer - verschoben auf " "später: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:508 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:396 msgid "ftp_read_thread: waiting for eof" msgstr "ftp_read_thread: warte auf EOF" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:512 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:400 #, fuzzy msgid "ftp_read_thread: waiting for buffers released" msgstr "ftp_read_thread: warte auf EOF" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:516 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:408 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:662 #, fuzzy msgid "ftp_read_thread: failed to release buffers - leaking" msgstr "" "ftp_read_thread: Fehler bei Registrieren von Globus Puffer - verschoben auf " "später: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:521 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:415 #, fuzzy msgid "ftp_read_thread: exiting" msgstr "ftp_read_thread: Beenden" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:539 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:436 #, fuzzy, c-format msgid "ftp_read_callback: failure: %s" msgstr "ftp_read_callback: Fehler" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:542 msgid "ftp_read_callback: success" msgstr "ftp_read_callback: Erfolg" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:558 #, fuzzy msgid "Failed to get ftp file" msgstr "Fehler bei Bezug von FTP Datei" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:594 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:543 #, fuzzy msgid "start_writing_ftp: mkdir" msgstr "start_wrtiting_ftp: mkdir" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:597 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:545 msgid "start_writing_ftp: mkdir failed - still trying to write" msgstr "start_writing_ftp: mkdir fehlgeschlagen - versuche weiter zu schreiben" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:599 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:547 #, fuzzy msgid "start_writing_ftp: put" msgstr "start_writing_ftp: put" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:613 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:561 msgid "start_writing_ftp: put failed" msgstr "start_writing_ftp: put fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:623 msgid "start_writing_ftp: globus_thread_create failed" msgstr "start_writitng_ftp: globus_thread_create failed" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:640 #: src/hed/libs/data/DataPointDelegate.cpp:307 #, fuzzy msgid "StopWriting: aborting connection" msgstr "StopWriting: Abbruch der Verbindung" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:664 #: src/hed/libs/data/DataPointDelegate.cpp:321 #, fuzzy, c-format msgid "StopWriting: Calculated checksum %s" msgstr "Errechneted checksum: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:668 #: src/hed/libs/data/DataPointDelegate.cpp:325 #, fuzzy, c-format msgid "StopWriting: looking for checksum of %s" msgstr "list_files_ftp: Suche nach Größe von %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:677 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:912 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:798 #, fuzzy msgid "list_files_ftp: globus_ftp_client_cksm failed" msgstr "list_files_ftp: globus_ftp_client_size fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:681 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:916 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:802 #, fuzzy msgid "list_files_ftp: timeout waiting for cksum" msgstr "list_files_ftp: Zeitüberschreitung bei Warten auf Dateigröße " #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:688 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:923 #, fuzzy msgid "list_files_ftp: no checksum information possible" msgstr "list_files_ftp: Bezug von Zeitpunkt der letzten Dateiänderung von %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:691 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:926 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:815 #, fuzzy, c-format msgid "list_files_ftp: checksum %s" msgstr "meta_get_data: checksum: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:694 #: src/hed/libs/data/DataPointDelegate.cpp:332 msgid "" "Checksum type returned by server is different to requested type, cannot " "compare" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:696 #: src/hed/libs/data/DataPointDelegate.cpp:334 #, c-format msgid "Calculated checksum %s matches checksum reported by server" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:699 #: src/hed/libs/data/DataPointDelegate.cpp:337 #, c-format msgid "" "Checksum mismatch between calculated checksum %s and checksum reported by " "server %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:721 msgid "ftp_write_thread: get and register buffers" msgstr "ftp_write_thread: Beziehe und Registriere Puffer" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:725 msgid "ftp_write_thread: for_write failed - aborting" msgstr "ftp_write_thread: for_write fehlgeschlagen - Abbruch" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:743 #, fuzzy msgid "ftp_write_thread: data callback failed - aborting" msgstr "ftp_write_thread: for_write fehlgeschlagen - Abbruch" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:759 #, fuzzy msgid "ftp_write_thread: waiting for eof" msgstr "ftp_read_thread: warte auf EOF" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:763 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:660 #, fuzzy msgid "ftp_write_thread: waiting for buffers released" msgstr "ftp_read_thread: warte auf EOF" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:771 #, fuzzy msgid "ftp_write_thread: failed to release buffers - leaking" msgstr "ftp_write_thread: Beziehe und Registriere Puffer" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:776 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:667 #, fuzzy msgid "ftp_write_thread: exiting" msgstr "ftp_read_thread: Beenden" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:799 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:686 #, fuzzy, c-format msgid "ftp_write_callback: failure: %s" msgstr "ftp_write_callback: Fehler" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:802 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:688 #, fuzzy, c-format msgid "ftp_write_callback: success %s" msgstr "ftp_write_callback: Erfolg" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:817 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:704 msgid "Failed to store ftp file" msgstr "Fehler bei Ablage von FTP Datei" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:825 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:709 #, fuzzy msgid "ftp_put_complete_callback: success" msgstr "ftp_complete_callback: erfolgreich" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:841 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:723 #, c-format msgid "list_files_ftp: looking for size of %s" msgstr "list_files_ftp: Suche nach Größe von %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:845 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:727 msgid "list_files_ftp: globus_ftp_client_size failed" msgstr "list_files_ftp: globus_ftp_client_size fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:851 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:852 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:733 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:734 msgid "list_files_ftp: timeout waiting for size" msgstr "list_files_ftp: Zeitüberschreitung bei Warten auf Dateigröße " #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:858 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:740 msgid "list_files_ftp: failed to get file's size" msgstr "list_files_ftp: Fehler bei Bezug von Dateigröße" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:870 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:753 #, c-format msgid "list_files_ftp: looking for modification time of %s" msgstr "list_files_ftp: Bezug von Zeitpunkt der letzten Dateiänderung von %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:876 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:759 msgid "list_files_ftp: globus_ftp_client_modification_time failed" msgstr "list_files_ftp: globus_ftp_client_modification_time fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:883 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:766 msgid "list_files_ftp: timeout waiting for modification_time" msgstr "" "list_files_ftp: Zeitüberschreitung bei Warten auf Zeitpunkt der letzten " "Dateiänderung " #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:891 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:774 msgid "list_files_ftp: failed to get file's modification time" msgstr "" "list_files_ftp: Fehler bei Bezug von Zeitpunkt der letzten Dateiänderung" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:903 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:788 #, fuzzy, c-format msgid "list_files_ftp: looking for checksum of %s" msgstr "list_files_ftp: Suche nach Größe von %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:942 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:828 #, fuzzy, c-format msgid "Failed to obtain stat from FTP: %s" msgstr "Konnte Listing nicht via FTP beziehen: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:948 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:833 #, fuzzy msgid "No results returned from stat" msgstr "Keine Antwort von %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:954 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:839 #, c-format msgid "Wrong number of objects (%i) for stat from ftp: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:968 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:852 #, c-format msgid "Unexpected path %s returned from server" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1007 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:885 #, fuzzy, c-format msgid "Failed to obtain listing from FTP: %s" msgstr "Konnte Listing nicht via FTP beziehen: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1050 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:928 #, fuzzy msgid "Rename: globus_ftp_client_move failed" msgstr "check_ftp: globus_ftp_client_get fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1056 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:934 #, fuzzy msgid "Rename: timeout waiting for operation to complete" msgstr "check_ftp: Zeitüberschreitung bei Warten auf modification_time" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1103 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:993 msgid "init_handle: globus_ftp_client_handleattr_init failed" msgstr "init_handle: globus_ftp_client_handleattr_init fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1112 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1002 msgid "init_handle: globus_ftp_client_handleattr_set_gridftp2 failed" msgstr "init_handle: globus_ftp_client_handleattr_set_gridftp2 fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1121 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1011 #, fuzzy msgid "init_handle: globus_ftp_client_handle_init failed" msgstr "init_handle: globus_ftp_client_handle_init fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1128 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1018 msgid "init_handle: globus_ftp_client_operationattr_init failed" msgstr "init_handle: globus_ftp_client_operationattr_init fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1136 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1026 #, fuzzy msgid "init_handle: globus_ftp_client_operationattr_set_allow_ipv6 failed" msgstr "init_handle: globus_ftp_client_operationattr_init fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1142 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1032 #, fuzzy msgid "init_handle: globus_ftp_client_operationattr_set_delayed_pasv failed" msgstr "init_handle: globus_ftp_client_operationattr_init fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1190 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1218 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1084 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1113 #, c-format msgid "globus_ftp_client_operationattr_set_authorization: error: %s" msgstr "globus_ftp_client_operationattr_set_authorisation: Fehler: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1217 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1112 #, fuzzy msgid "Failed to set credentials for GridFTP transfer" msgstr "Fehler bei Setzen von Credentials für GridFTP transfer" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1223 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1118 msgid "Using secure data transfer" msgstr "Nutze sicheren Datentransfer" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1228 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1123 msgid "Using insecure data transfer" msgstr "Nutze unsicheren Datentransfer" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1255 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1150 #, fuzzy msgid "~DataPoint: destroy ftp_handle" msgstr "DataPoint::deinit_handle: destroly ftp_handle" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1258 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1153 #, fuzzy msgid "~DataPoint: destroy ftp_handle failed - retrying" msgstr "DataPoint::deinit_handle: destroly ftp_handle" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1276 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1171 #, fuzzy msgid "~DataPoint: failed to destroy ftp_handle - leaking" msgstr "DataPoint::deinit_handle: destroly ftp_handle" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1296 msgid "" "Missing reference to factory and/or module. It is unsafe to use Globus in " "non-persistent mode - (Grid)FTP code is disabled. Report to developers." msgstr "" "Fehlende Referenz zu factory und/doer Module. Es ist unsicher, Globus im " "nicht-persistenten Modus zu nutzen - (Grid)FTP code wurde disabled. Bitte " "die Entwickler informieren." #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:388 #, fuzzy msgid "ftp_read_thread: failed to register buffers" msgstr "ftp_read_thread: beziehe und registriere Puffer" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:403 #, fuzzy msgid "ftp_read_thread: failed to release buffers" msgstr "" "ftp_read_thread: Fehler bei Registrieren von Globus Puffer - verschoben auf " "später: %s" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:441 #, c-format msgid "ftp_read_callback: success - offset=%u, length=%u, eof=%u, allow oof=%u" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:455 #, fuzzy, c-format msgid "ftp_read_callback: delayed data chunk: %llu %llu" msgstr "ftp_read_callback: Fehler" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:462 #, c-format msgid "ftp_read_callback: unexpected data out of order: %llu != %llu" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:469 msgid "ftp_read_callback: too many unexpected out of order chunks" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:490 #, fuzzy, c-format msgid "ftp_read_callback: Globus error: %s" msgstr "ftp_complete_callback: Fehler: %s" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:515 #, fuzzy msgid "ftp_get_complete_callback: Failed to get ftp file" msgstr "ftp_complete_callback: Fehler: %s" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:520 #, fuzzy msgid "ftp_get_complete_callback: success" msgstr "ftp_complete_callback: erfolgreich" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:575 #, fuzzy msgid "start_writing_ftp: waiting for data tag" msgstr "start_writing_ftp: put fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:578 #, fuzzy msgid "start_writing_ftp: failed to read data tag" msgstr "start_writing_ftp: put fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:583 #, fuzzy msgid "start_writing_ftp: waiting for data chunk" msgstr "start_writing_ftp: put fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:585 #, fuzzy msgid "start_writing_ftp: failed to read data chunk" msgstr "start_writing_ftp: put fehlgeschlagen" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:596 #, c-format msgid "ftp_write_thread: data out of order in stream mode: %llu != %llu" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:603 #, fuzzy msgid "ftp_write_thread: too many out of order chunks in stream mode" msgstr "ftp_write_thread: Beziehe und Registriere Puffer" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:608 #, fuzzy, c-format msgid "start_writing_ftp: data chunk: %llu %llu" msgstr "start_writing_ftp: put" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:614 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:640 #, fuzzy, c-format msgid "ftp_write_thread: Globus error: %s" msgstr "ftp_read_thread: Globus Fehler: %s" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:633 #, fuzzy, c-format msgid "start_writing_ftp: delayed data chunk: %llu %llu" msgstr "start_reading_ftp: erzielte Größe: %llu" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:652 #, fuzzy msgid "start_writing_ftp: waiting for some buffers sent" msgstr "start_reading_ftp: Zeitüberschreitung bei Warten auf Dateigröße" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:658 #, fuzzy msgid "ftp_write_thread: waiting for transfer complete" msgstr "ftp_read_thread: warte auf EOF" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:809 #, fuzzy msgid "list_files_ftp: no checksum information supported" msgstr "list_files_ftp: Bezug von Zeitpunkt der letzten Dateiänderung von %s" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:811 #, fuzzy msgid "list_files_ftp: no checksum information returned" msgstr "list_files_ftp: Bezug von Zeitpunkt der letzten Dateiänderung von %s" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:906 msgid "Too many failures to obtain checksum - giving up" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1266 msgid "Expecting Command and URL provided" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1273 #: src/hed/libs/data/DataExternalHelper.cpp:376 #, fuzzy msgid "Expecting Command among arguments" msgstr "Kann doc Argument nicht anlegen" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1277 #: src/hed/libs/data/DataExternalHelper.cpp:380 #, fuzzy msgid "Expecting URL among arguments" msgstr "Kann doc Argument nicht anlegen" #: src/hed/dmc/gridftp/Lister.cpp:221 src/hed/dmc/gridftp/Lister.cpp:289 #: src/hed/dmc/gridftp/Lister.cpp:384 src/hed/dmc/gridftp/Lister.cpp:767 #: src/hed/dmc/gridftp/Lister.cpp:812 #, c-format msgid "Failure: %s" msgstr "Fehler: %s" #: src/hed/dmc/gridftp/Lister.cpp:288 #, fuzzy msgid "Error getting list of files (in list)" msgstr "Fehler bei Bezug von Dateiliste (in Liste)" #: src/hed/dmc/gridftp/Lister.cpp:290 #, fuzzy msgid "Assuming - file not found" msgstr "Vermuting - Datei nicht gefunden" #: src/hed/dmc/gridftp/Lister.cpp:307 #, fuzzy, c-format msgid "list record: %s" msgstr "Listen-Eintrag: %s" #: src/hed/dmc/gridftp/Lister.cpp:362 msgid "Failed reading list of files" msgstr "Fehler bei Lesen von Dateiliste" #: src/hed/dmc/gridftp/Lister.cpp:398 msgid "Failed reading data" msgstr "Fehler bei Lesen von Daten" #: src/hed/dmc/gridftp/Lister.cpp:426 #, c-format msgid "Command: %s" msgstr "Kommando: %s" #: src/hed/dmc/gridftp/Lister.cpp:430 src/hed/dmc/gridftp/Lister.cpp:471 #: src/hed/mcc/http/PayloadHTTP.cpp:990 msgid "Memory allocation error" msgstr "Speicherallokationsfehler" #: src/hed/dmc/gridftp/Lister.cpp:438 #, c-format msgid "%s failed" msgstr "%s fehlgeschlagen" #: src/hed/dmc/gridftp/Lister.cpp:442 #, fuzzy msgid "Command is being sent" msgstr "Kommando wird gesendet" #: src/hed/dmc/gridftp/Lister.cpp:447 msgid "Waiting for response" msgstr "Warte vor Antwort" #: src/hed/dmc/gridftp/Lister.cpp:452 #, fuzzy msgid "Callback got failure" msgstr "Callback erhielt Fehler" #: src/hed/dmc/gridftp/Lister.cpp:538 #, fuzzy msgid "Failed in globus_cond_init" msgstr "Fehler bei Initialisierung der condition" #: src/hed/dmc/gridftp/Lister.cpp:542 #, fuzzy msgid "Failed in globus_mutex_init" msgstr "Fehler bei Initialisierung des Mutex" #: src/hed/dmc/gridftp/Lister.cpp:549 #, fuzzy msgid "Failed allocating memory for handle" msgstr "Fehler bei Reservieren des Speichers für handle" #: src/hed/dmc/gridftp/Lister.cpp:554 #, fuzzy msgid "Failed in globus_ftp_control_handle_init" msgstr "Memory leak (globus_ftp_control_handle_t)" #: src/hed/dmc/gridftp/Lister.cpp:562 #, fuzzy msgid "Failed to enable IPv6" msgstr "Fehler bei Lesen von Objekt %s" #: src/hed/dmc/gridftp/Lister.cpp:573 src/services/gridftpd/commands.cpp:984 msgid "Closing connection" msgstr "Schließe Verbindung" #: src/hed/dmc/gridftp/Lister.cpp:580 src/hed/dmc/gridftp/Lister.cpp:595 msgid "Timeout waiting for Globus callback - leaking connection" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:605 msgid "Closed successfully" msgstr "Verbindung erfolgreich geschlossen" #: src/hed/dmc/gridftp/Lister.cpp:607 #, fuzzy msgid "Closing may have failed" msgstr "Verbindung zu %s schlug fehl: %s" #: src/hed/dmc/gridftp/Lister.cpp:634 msgid "Waiting for globus handle to settle" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:639 #, c-format msgid "Handle is not in proper state %u/%u" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:645 msgid "Globus handle is stuck" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:661 #, c-format msgid "Failed destroying handle: %s. Can't handle such situation." msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:684 #, fuzzy, c-format msgid "EPSV failed: %s" msgstr "PASV fehlgeschlagen: %s" #: src/hed/dmc/gridftp/Lister.cpp:688 #, fuzzy msgid "EPSV failed" msgstr "PASV fehlgeschlagen" #: src/hed/dmc/gridftp/Lister.cpp:695 #, c-format msgid "PASV failed: %s" msgstr "PASV fehlgeschlagen: %s" #: src/hed/dmc/gridftp/Lister.cpp:699 msgid "PASV failed" msgstr "PASV fehlgeschlagen" #: src/hed/dmc/gridftp/Lister.cpp:765 #, fuzzy msgid "Failed to apply local address to data connection" msgstr "Fehler bei Schließen von Verbindung 1" #: src/hed/dmc/gridftp/Lister.cpp:783 #, fuzzy msgid "Can't parse host and/or port in response to EPSV/PASV" msgstr "Kann host and port nicht aus Antwort zu PASV herauslesen" #: src/hed/dmc/gridftp/Lister.cpp:788 #, fuzzy, c-format msgid "Data channel: %d.%d.%d.%d:%d" msgstr "Datenkanal: %d.%d.%d.%d %d" #: src/hed/dmc/gridftp/Lister.cpp:806 #, fuzzy, c-format msgid "Data channel: [%s]:%d" msgstr "Datenkanal: %d.%d.%d.%d %d" #: src/hed/dmc/gridftp/Lister.cpp:810 #, fuzzy msgid "Obtained host and address are not acceptable" msgstr "Erhaltener host und Adresse sind nicht akzeptabel" #: src/hed/dmc/gridftp/Lister.cpp:820 msgid "Failed to open data channel" msgstr "Fehler bei Öffnen von Datenkanal" #: src/hed/dmc/gridftp/Lister.cpp:838 #, c-format msgid "Unsupported protocol in url %s" msgstr "Nicht-unterstzütztes Protrokoll in URL %s" #: src/hed/dmc/gridftp/Lister.cpp:850 msgid "Reusing connection" msgstr "Wiederholte Nutzung von Verbindung" #: src/hed/dmc/gridftp/Lister.cpp:874 #, c-format msgid "Failed connecting to server %s:%d" msgstr "Fehler bei Verbinden zu server %s:%d" #: src/hed/dmc/gridftp/Lister.cpp:880 #, c-format msgid "Failed to connect to server %s:%d" msgstr "Fehler bei Verbinden zu server %s:%d" #: src/hed/dmc/gridftp/Lister.cpp:896 #, fuzzy msgid "Missing authentication information" msgstr "Ungültige Authentisierungs-Information" #: src/hed/dmc/gridftp/Lister.cpp:905 src/hed/dmc/gridftp/Lister.cpp:919 #, fuzzy, c-format msgid "Bad authentication information: %s" msgstr "Ungültige Authentisierungs-Information" #: src/hed/dmc/gridftp/Lister.cpp:928 src/hed/dmc/gridftp/Lister.cpp:943 #, fuzzy, c-format msgid "Failed authenticating: %s" msgstr "Fehler bei Authentisieren" #: src/hed/dmc/gridftp/Lister.cpp:935 msgid "Failed authenticating" msgstr "Fehler bei Authentisieren" #: src/hed/dmc/gridftp/Lister.cpp:970 src/hed/dmc/gridftp/Lister.cpp:1126 #, c-format msgid "DCAU failed: %s" msgstr "DCAU fehlgeschlagen: %s" #: src/hed/dmc/gridftp/Lister.cpp:974 src/hed/dmc/gridftp/Lister.cpp:1131 msgid "DCAU failed" msgstr "DCAU fehlgeschlagen" #: src/hed/dmc/gridftp/Lister.cpp:994 #, fuzzy msgid "MLST is not supported - trying LIST" msgstr "MLSD ist nicht unterstützt - versuche NLST" #: src/hed/dmc/gridftp/Lister.cpp:1010 #, fuzzy, c-format msgid "Immediate completion expected: %s" msgstr "Sofortige Vervollständigung: %s" #: src/hed/dmc/gridftp/Lister.cpp:1014 #, fuzzy msgid "Immediate completion expected" msgstr "Sofortige Vervollständigung: %s" #: src/hed/dmc/gridftp/Lister.cpp:1027 #, fuzzy, c-format msgid "Missing information in reply: %s" msgstr "Fehler bei Bezug von Information für Job: %s" #: src/hed/dmc/gridftp/Lister.cpp:1061 #, c-format msgid "Missing final reply: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1085 #, fuzzy, c-format msgid "Unexpected immediate completion: %s" msgstr "Sofortige Vervollständigung: %s" #: src/hed/dmc/gridftp/Lister.cpp:1097 #, fuzzy, c-format msgid "LIST/MLST failed: %s" msgstr "NLST/UMLSD fehlgeschlagen: %s" #: src/hed/dmc/gridftp/Lister.cpp:1102 #, fuzzy msgid "LIST/MLST failed" msgstr "NLST/UMLSD fehlgeschlagen" #: src/hed/dmc/gridftp/Lister.cpp:1152 msgid "MLSD is not supported - trying NLST" msgstr "MLSD ist nicht unterstützt - versuche NLST" #: src/hed/dmc/gridftp/Lister.cpp:1166 #, fuzzy, c-format msgid "Immediate completion: %s" msgstr "Sofortige Vervollständigung: %s" #: src/hed/dmc/gridftp/Lister.cpp:1174 #, c-format msgid "NLST/MLSD failed: %s" msgstr "NLST/UMLSD fehlgeschlagen: %s" #: src/hed/dmc/gridftp/Lister.cpp:1180 msgid "NLST/MLSD failed" msgstr "NLST/UMLSD fehlgeschlagen" #: src/hed/dmc/gridftp/Lister.cpp:1201 #, c-format msgid "Data transfer aborted: %s" msgstr "Datentransfer abgebrochen: %s" #: src/hed/dmc/gridftp/Lister.cpp:1206 msgid "Data transfer aborted" msgstr "Datentransfer abgebrochen" #: src/hed/dmc/gridftp/Lister.cpp:1218 msgid "Failed to transfer data" msgstr "Fehler bei Transfer von Daten" #: src/hed/dmc/http/DataPointHTTP.cpp:391 #: src/hed/dmc/http/DataPointHTTP.cpp:520 #: src/hed/dmc/http/DataPointHTTP.cpp:601 #: src/hed/dmc/http/DataPointHTTP.cpp:1003 #: src/hed/dmc/http/DataPointHTTP.cpp:1147 #: src/hed/dmc/http/DataPointHTTP.cpp:1292 #, fuzzy, c-format msgid "Redirecting to %s" msgstr "Weiterleitung zu neuer URL: %s" #: src/hed/dmc/http/DataPointHTTP.cpp:673 #, fuzzy, c-format msgid "Stat: obtained size %llu" msgstr "StartReading: erhielt Größe: %" #: src/hed/dmc/http/DataPointHTTP.cpp:677 #, fuzzy, c-format msgid "Stat: obtained modification time %s" msgstr "Check: erhielt Erstelldatum: %s" #: src/hed/dmc/http/DataPointHTTP.cpp:906 #, fuzzy, c-format msgid "Check: obtained size %llu" msgstr "Check: erhielt Größe: %lli" #: src/hed/dmc/http/DataPointHTTP.cpp:908 #, fuzzy, c-format msgid "Check: obtained modification time %s" msgstr "Check: erhielt Erstelldatum: %s" #: src/hed/dmc/http/DataPointHTTP.cpp:1020 #: src/hed/dmc/http/DataPointHTTP.cpp:1167 #, fuzzy, c-format msgid "HTTP failure %u - %s" msgstr "Proxy Pfad: %s" #: src/hed/dmc/ldap/DataPointLDAP.cpp:36 #, fuzzy msgid "" "Missing reference to factory and/or module. Currently safe unloading of LDAP " "DMC is not supported. Report to developers." msgstr "" "Fehlende Referenz zu factory und/doer Module. Es ist unsicher, Globus im " "nicht-persistenten Modus zu nutzen - (Grid)FTP code wurde disabled. Bitte " "die Entwickler informieren." #: src/hed/dmc/ldap/LDAPQuery.cpp:151 msgid "SASL Interaction" msgstr "SASL Interaktion" #: src/hed/dmc/ldap/LDAPQuery.cpp:199 #, fuzzy, c-format msgid "Challenge: %s" msgstr "Herausforderung: %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:203 #, c-format msgid "Default: %s" msgstr "Voreinstellung: %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:279 #, c-format msgid "LDAPQuery: Initializing connection to %s:%d" msgstr "LDAPQuery: Initialisiere Verbindung zu %s:%d" #: src/hed/dmc/ldap/LDAPQuery.cpp:283 #, c-format msgid "LDAP connection already open to %s" msgstr "LDAP Verbindung bereits offen zu %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:297 #, c-format msgid "Could not open LDAP connection to %s" msgstr "Konnte LDAP Verbindung nicht öffnen zu %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:318 #, c-format msgid "Failed to create ldap bind thread (%s)" msgstr "Fehler bei Anlegen von ldap bind thread (%s)" #: src/hed/dmc/ldap/LDAPQuery.cpp:325 #, c-format msgid "Ldap bind timeout (%s)" msgstr "Ldap bind timeout (%s)" #: src/hed/dmc/ldap/LDAPQuery.cpp:332 #, c-format msgid "Failed to bind to ldap server (%s)" msgstr "Fehler bei Verbinden zu ldap server (%s)" #: src/hed/dmc/ldap/LDAPQuery.cpp:353 #, c-format msgid "Could not set LDAP network timeout (%s)" msgstr "Konnte LDAP netowrk Zeitüberschreitung nicht setzen (%s)" #: src/hed/dmc/ldap/LDAPQuery.cpp:361 #, c-format msgid "Could not set LDAP timelimit (%s)" msgstr "Konnte LDAP Zeitlimit nicht setzen (%s)" #: src/hed/dmc/ldap/LDAPQuery.cpp:368 #, c-format msgid "Could not set LDAP protocol version (%s)" msgstr "Konnte LDAP Protokoll Version nicht setzen (%s)" #: src/hed/dmc/ldap/LDAPQuery.cpp:436 #, c-format msgid "LDAPQuery: Querying %s" msgstr "LDAPQuery: Frage an %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:438 #, c-format msgid " base dn: %s" msgstr " base dn: %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:440 #, c-format msgid " filter: %s" msgstr " Filter: %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:442 msgid " attributes:" msgstr " Attribute" #: src/hed/dmc/ldap/LDAPQuery.cpp:445 #: src/services/gridftpd/misc/ldapquery.cpp:399 #, c-format msgid " %s" msgstr " %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:482 src/hed/dmc/ldap/LDAPQuery.cpp:548 #, c-format msgid "%s (%s)" msgstr "%s (%s)" #: src/hed/dmc/ldap/LDAPQuery.cpp:506 #, c-format msgid "LDAPQuery: Getting results from %s" msgstr "LDAPQuery: Erhalte Ergebnisse von %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:509 #, c-format msgid "Error: no LDAP query started to %s" msgstr "Fehler: keine LDAP Anfrage gestartet bei %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:543 #, c-format msgid "LDAP query timed out: %s" msgstr "Zeitüberschreibung bei LDAP Anfrage: %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:23 #, c-format msgid "Replacing existing token for %s in Rucio token cache" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:36 #, c-format msgid "Found existing token for %s in Rucio token cache with expiry time %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:39 #, c-format msgid "Rucio token for %s has expired or is about to expire" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:101 #, c-format msgid "Extracted nickname %s from credentials to use for RUCIO_ACCOUNT" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:104 msgid "Failed to extract VOMS nickname from proxy" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:106 #, fuzzy, c-format msgid "Using Rucio account %s" msgstr "Nutze space token %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:146 #, c-format msgid "" "Bad path for %s: Rucio supports read/write at /objectstores and read-only " "at /replicas" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:162 #: src/services/candypond/CandyPond.cpp:140 #: src/services/candypond/CandyPond.cpp:347 #, fuzzy, c-format msgid "Can't handle URL %s" msgstr "Kann stat-Informationen zu Datei nicht einholen: %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:310 #, c-format msgid "Acquired auth token for %s: %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:367 #, fuzzy, c-format msgid "Rucio returned %s" msgstr "unbekannter return code %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:393 #, fuzzy, c-format msgid "Failed to parse Rucio response: %s" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:399 #, c-format msgid "Filename not returned in Rucio response: %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:405 #, fuzzy, c-format msgid "Unexpected name returned in Rucio response: %s" msgstr "Sofortige Vervollständigung: %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:411 #, fuzzy, c-format msgid "No pfns returned in Rucio response: %s" msgstr "Sofortige Vervollständigung: %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:422 #, fuzzy, c-format msgid "Cannot determine replica type for %s" msgstr "Kann Funktion %s nicht anlegen" #: src/hed/dmc/rucio/DataPointRucio.cpp:424 #, c-format msgid "%s: replica type %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:427 #, fuzzy, c-format msgid "Skipping %s replica %s" msgstr "Suche nache Existenz von %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:447 #, c-format msgid "No filesize information returned in Rucio response for %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:450 #, c-format msgid "%s: size %llu" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:454 #, c-format msgid "No checksum information returned in Rucio response for %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:457 #, fuzzy, c-format msgid "%s: checksum %s" msgstr "Errechneted checksum: %s" #: src/hed/dmc/s3/DataPointS3.cpp:621 #, fuzzy, c-format msgid "Failed to write object %s: %s" msgstr "Fehler bei Lesen von Objekt %s: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:59 #, fuzzy, c-format msgid "TURL %s cannot be handled" msgstr "PDP: %s kann nicht geladen werden" #: src/hed/dmc/srm/DataPointSRM.cpp:86 #, c-format msgid "Check: looking for metadata: %s" msgstr "Check: looking für Metadata: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:97 #, c-format msgid "Check: obtained size: %lli" msgstr "Check: erhielt Größe: %lli" #: src/hed/dmc/srm/DataPointSRM.cpp:103 #, c-format msgid "Check: obtained checksum: %s" msgstr "Check: erhielt checksum: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:107 #, fuzzy, c-format msgid "Check: obtained modification date: %s" msgstr "Check: erhielt Erstelldatum: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:111 #, fuzzy msgid "Check: obtained access latency: low (ONLINE)" msgstr "Check: erhielt Erstelldatum: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:115 #, fuzzy msgid "Check: obtained access latency: high (NEARLINE)" msgstr "Check: erhielt Erstelldatum: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:134 #, fuzzy, c-format msgid "Remove: deleting: %s" msgstr "remove_srm: lösche: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:152 #, fuzzy, c-format msgid "Creating directory: %s" msgstr "Lege Verzeichnis %s an" #: src/hed/dmc/srm/DataPointSRM.cpp:200 src/hed/dmc/srm/DataPointSRM.cpp:249 msgid "Calling PrepareReading when request was already prepared!" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:220 #, c-format msgid "File %s is NEARLINE, will make request to bring online" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:229 #, c-format msgid "Bring online request %s is still in queue, should wait" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:234 #, c-format msgid "Bring online request %s finished successfully, file is now ONLINE" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:240 #, c-format msgid "" "Bad logic for %s - bringOnline returned ok but SRM request is not finished " "successfully or on going" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:268 src/hed/dmc/srm/DataPointSRM.cpp:411 msgid "None of the requested transfer protocols are supported" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:281 #, fuzzy, c-format msgid "Get request %s is still in queue, should wait %i seconds" msgstr "%s: Datei Anfrage %s in SRM queue. Schlage für %i Sekunden" #: src/hed/dmc/srm/DataPointSRM.cpp:289 src/hed/dmc/srm/DataPointSRM.cpp:468 #, c-format msgid "Checking URL returned by SRM: %s" msgstr "Überprüfen der URL zurückgegeben von SRM: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:304 src/hed/dmc/srm/DataPointSRM.cpp:483 #, c-format msgid "SRM returned no useful Transfer URLs: %s" msgstr "SRM gab keine nützliche Transfer URLs: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:311 #, c-format msgid "" "Bad logic for %s - getTURLs returned ok but SRM request is not finished " "successfully or on going" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:319 msgid "StartReading" msgstr "StartReading" #: src/hed/dmc/srm/DataPointSRM.cpp:321 #, fuzzy msgid "StartReading: File was not prepared properly" msgstr "AREXClient wurde nicht richtig angelegt." #: src/hed/dmc/srm/DataPointSRM.cpp:331 src/hed/dmc/srm/DataPointSRM.cpp:510 #, fuzzy, c-format msgid "Redirecting to new URL: %s" msgstr "Weiterleitung zu neuer URL: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:392 msgid "Calling PrepareWriting when request was already prepared!" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:421 #, fuzzy msgid "No space token specified" msgstr "Kein space token angegeben" #: src/hed/dmc/srm/DataPointSRM.cpp:427 msgid "Warning: Using SRM protocol v1 which does not support space tokens" msgstr "Warnung: Nutze SRM Protokol v1 das keine space tokens unterstützt" #: src/hed/dmc/srm/DataPointSRM.cpp:430 #, fuzzy, c-format msgid "Using space token description %s" msgstr "Nutze space token Beschreibugn %s" #: src/hed/dmc/srm/DataPointSRM.cpp:436 #, fuzzy, c-format msgid "Error looking up space tokens matching description %s" msgstr "" "Warnung: Fehler beim Nachschlagen von space tokens, entsprechend der " "Beschreibung %s. Kopiere ohne Nutzung der Token" #: src/hed/dmc/srm/DataPointSRM.cpp:440 #, fuzzy, c-format msgid "No space tokens found matching description %s" msgstr "Nutze space token Beschreibugn %s" #: src/hed/dmc/srm/DataPointSRM.cpp:445 #, c-format msgid "Using space token %s" msgstr "Nutze space token %s" #: src/hed/dmc/srm/DataPointSRM.cpp:460 #, fuzzy, c-format msgid "Put request %s is still in queue, should wait %i seconds" msgstr "%s: Datei Anfrage %s in SRM queue. Schlage für %i Sekunden" #: src/hed/dmc/srm/DataPointSRM.cpp:490 #, c-format msgid "" "Bad logic for %s - putTURLs returned ok but SRM request is not finished " "successfully or on going" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:498 msgid "StartWriting" msgstr "StartWriting" #: src/hed/dmc/srm/DataPointSRM.cpp:500 #, fuzzy msgid "StartWriting: File was not prepared properly" msgstr "AREXClient wurde nicht richtig angelegt." #: src/hed/dmc/srm/DataPointSRM.cpp:559 #, fuzzy, c-format msgid "FinishWriting: looking for metadata: %s" msgstr "ListFiles: suche nach Metadata: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:574 #, fuzzy, c-format msgid "FinishWriting: obtained checksum: %s" msgstr "StartReading: erhielt checksum: %s:%s" #: src/hed/dmc/srm/DataPointSRM.cpp:577 #, c-format msgid "" "Calculated/supplied transfer checksum %s matches checksum reported by SRM " "destination %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:580 #, c-format msgid "" "Checksum mismatch between calculated/supplied checksum (%s) and checksum " "reported by SRM destination (%s)" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:583 #, c-format msgid "" "Checksum type of SRM (%s) and calculated/supplied checksum (%s) differ, " "cannot compare" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:584 src/hed/dmc/srm/DataPointSRM.cpp:585 msgid "No checksum information from server" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:586 src/hed/dmc/srm/DataPointSRM.cpp:587 msgid "No checksum verification possible" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:593 #, fuzzy msgid "Failed to release completed request" msgstr "Fehler bei Lesen von Objekt %s" #: src/hed/dmc/srm/DataPointSRM.cpp:636 src/hed/dmc/srm/DataPointSRM.cpp:703 #, fuzzy, c-format msgid "ListFiles: looking for metadata: %s" msgstr "ListFiles: suche nach Metadata: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:821 #, c-format msgid "plugin for transport protocol %s is not installed" msgstr "" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:51 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:90 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:142 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:181 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:221 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:259 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:303 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:365 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:438 msgid "SRM did not return any information" msgstr "SRM lieferte keine Information zurück" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:316 #, fuzzy, c-format msgid "File could not be moved to Running state: %s" msgstr "Datei konnte nicht zu Running Zustand bewegt werden: %s" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:372 msgid "SRM did not return any useful information" msgstr "SRM lieferte keinerlei gebrauchbare Information" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:450 #, fuzzy msgid "File could not be moved to Done state" msgstr "Datei konnte nicht zu Done Zustand bewegt werden" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:88 msgid "Could not determine version of server" msgstr "Konnte Version des Server nicht bestimmen" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:94 #, c-format msgid "Server SRM version: %s" msgstr "Server SRM version: %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:99 #, c-format msgid "Server implementation: %s" msgstr "Server Implementation: %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:136 #, fuzzy, c-format msgid "Adding space token %s" msgstr "Füge space token %s hinzu" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:163 #, fuzzy msgid "No request tokens found" msgstr "Keine Anfrage-Token gefunden" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:176 #, fuzzy, c-format msgid "Adding request token %s" msgstr "Füge Anfrage-Token %s hinzu" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:237 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:642 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:828 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1385 #, c-format msgid "%s: File request %s in SRM queue. Sleeping for %i seconds" msgstr "%s: Datei Anfrage %s in SRM queue. Schlage für %i Sekunden" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:275 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:327 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:698 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:764 #, fuzzy, c-format msgid "File is ready! TURL is %s" msgstr "Datei ist bereit! Die URL ist %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:359 #, fuzzy, c-format msgid "Setting userRequestDescription to %s" msgstr "Setzer userRequestDescription zu %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:414 #, fuzzy, c-format msgid "%s: Bring online request %s in SRM queue. Sleeping for %i seconds" msgstr "%s: Datei Anfrage %s in SRM queue. Schlage für %i Sekunden" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:457 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1160 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1194 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1228 #, fuzzy msgid "No request token specified!" msgstr "Keine Anfrage-Token spezifiziert!" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:524 msgid "Request is reported as ABORTED, but all files are done" msgstr "" "Anfrage wurde berichtet als ABORTED (abgebrochen), aber alle Dateien wurden " "bearbeitet" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:530 msgid "Request is reported as ABORTED, since it was cancelled" msgstr "" "Anfrage wurde berichtet als ABORTED (abgebrochen), denn sie wurde abgebrochen" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:536 #, c-format msgid "Request is reported as ABORTED. Reason: %s" msgstr "Anfrage wurde berichtet als ABORTED (abgebrochen). Grund: %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:673 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:745 #, c-format msgid "Path %s is invalid, creating required directories" msgstr "Pfad %s ist ungültig, lege benötigte Verzeichnisse an" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:678 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:750 #, fuzzy, c-format msgid "Error creating required directories for %s" msgstr "Fehler bei Anlegen von benötigten Verzeichnissen für %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:851 msgid "Too many files in one request - please try again with fewer files" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:899 #, fuzzy msgid "" "Directory size is too large to list in one call, will have to call multiple " "times" msgstr "" "Verzeichnis enthält mehr als %i Dateien, werde Aufruf mehrfach ausführen" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:936 msgid "" "Failure in parsing response from server - some information may be inaccurate" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:942 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:566 #: src/services/gridftpd/misc/ldapquery.cpp:183 #: src/services/gridftpd/misc/ldapquery.cpp:186 #: src/services/gridftpd/misc/ldapquery.cpp:392 #: src/services/gridftpd/misc/ldapquery.cpp:622 #: src/services/gridftpd/misc/ldapquery.cpp:631 #, c-format msgid "%s: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:975 #, c-format msgid "" "Directory size is larger than %i files, will have to call multiple times" msgstr "" "Verzeichnis enthält mehr als %i Dateien, werde Aufruf mehrfach ausführen" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1185 #, c-format msgid "Files associated with request token %s released successfully" msgstr "Dateien assoziiert mit Anfrage Token %s erfolgreich freigegeben" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1219 #, fuzzy, c-format msgid "Files associated with request token %s put done successfully" msgstr "Dateien assoziiert mit Anfrage Token %s erfolgreich put done" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1254 #, c-format msgid "Files associated with request token %s aborted successfully" msgstr "Dateien assoziiert mit Anfrage Token %s erfolgreich abgebrochen" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1271 #, fuzzy, c-format msgid "" "Failed to find metadata info on %s for determining file or directory delete" msgstr "Konnte Metadaen für Datei %s nicht finden" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1277 msgid "Type is file, calling srmRm" msgstr "Typ ist Datei, rufe srmRm auf" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1281 #, fuzzy msgid "Type is dir, calling srmRmDir" msgstr "Typ ist Datei, rufe srmRmDir auf" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1285 msgid "File type is not available, attempting file delete" msgstr "Dateitype ist nicht verfügbar, versuche Datei zu löschen" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1288 msgid "File delete failed, attempting directory delete" msgstr "Löschen von Datei schlug fehl, versuche als Verzeichnis zu löschen" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1313 #, fuzzy, c-format msgid "File %s removed successfully" msgstr "Datei %s erfolgreich entfernt" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1340 #, fuzzy, c-format msgid "Directory %s removed successfully" msgstr "Verzeichnis %s erfolgreich entfernt" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1455 #, fuzzy, c-format msgid "Checking for existence of %s" msgstr "Suche nache Existenz von %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1458 #, fuzzy, c-format msgid "File already exists: %s" msgstr "LFN existiert bereits in LFC" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1495 #, fuzzy, c-format msgid "Error creating directory %s: %s" msgstr "Fehler bei Anlegen von Verzeichnis %s: %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:82 #, c-format msgid "Attempting to contact %s on port %i" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:88 #, fuzzy, c-format msgid "Storing port %i for %s" msgstr "Nicht-unterstzütztes Protrokoll in URL %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:102 #, fuzzy, c-format msgid "No port succeeded for %s" msgstr "Keine locations gefunden für %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:112 #, c-format msgid "URL %s disagrees with stored SRM info, testing new info" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:118 #, c-format msgid "Replacing old SRM info with new for URL %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:140 #, fuzzy, c-format msgid "SOAP request: %s" msgstr "XACML Anfrage: %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:147 #: src/hed/dmc/srm/srmclient/SRMClient.cpp:176 #, fuzzy, c-format msgid "SOAP fault: %s" msgstr "Voreinstellung: %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:148 #, fuzzy msgid "Reconnecting" msgstr "Wiederholte Nutzung von Verbindung" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:158 #, fuzzy, c-format msgid "SRM Client status: %s" msgstr "*** Client Anfrage: %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:164 msgid "No SOAP response" msgstr "Keine SOAP Antwort" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:171 #: src/hed/identitymap/ArgusPDPClient.cpp:250 #, fuzzy, c-format msgid "SOAP response: %s" msgstr "Keine SOAP Antwort" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:76 #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:162 #, fuzzy, c-format msgid "Failed to acquire lock on file %s" msgstr "Fehler bei Unlock von Datei %s: %s" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:81 #, fuzzy, c-format msgid "Error reading info from file %s:%s" msgstr "Fehler bei Lesen von Meta-Datei %s: %s" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:95 #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:187 #, fuzzy, c-format msgid "Bad or old format detected in file %s, in line %s" msgstr "Formatierungsfehler erkannt in Datei %s, in Zeile %s" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:100 #, fuzzy, c-format msgid "Cannot convert string %s to int in line %s" msgstr "Formatierungsfehler erkannt in Datei %s, in Zeile %s" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:203 #, fuzzy, c-format msgid "Error writing srm info file %s" msgstr "Fehler bei Lesen von Meta-Datei %s: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:64 #, fuzzy msgid "" "Missing reference to factory and/or module. It is unsafe to use Xrootd in " "non-persistent mode - Xrootd code is disabled. Report to developers." msgstr "" "Fehlende Referenz zu factory und/doer Module. Es ist unsicher, Globus im " "nicht-persistenten Modus zu nutzen - (Grid)FTP code wurde disabled. Bitte " "die Entwickler informieren." #: src/hed/dmc/xrootd/DataPointXrootd.cpp:103 #, c-format msgid "Could not handle checksum %s: skip checksum check" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:109 #, fuzzy, c-format msgid "Failed to create xrootd copy job: %s" msgstr "Fehler bei Anlegen/Finden von Verzeichnis %s, (%d)" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:126 #, fuzzy, c-format msgid "Failed to copy %s: %s" msgstr "Fehler bei Öffnen von Datei %s zum Lesen: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:177 #, c-format msgid "Reading %u bytes from byte %llu" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:179 #, c-format msgid "Read %i bytes" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:210 #, fuzzy, c-format msgid "Could not open file %s for reading: %s" msgstr "Fehler bei Öffnen von Datei %s zum Lesen: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:225 #, fuzzy, c-format msgid "Unable to find file size of %s" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:289 #, c-format msgid "DataPointXrootd::write_file got position %d and offset %d, has to seek" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:312 #, fuzzy, c-format msgid "xrootd write failed: %s" msgstr "SendData: Schreiben von Daten schlug fehl: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:321 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:395 #, fuzzy, c-format msgid "xrootd close failed: %s" msgstr "Verbindung zu %s schlug fehl: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:344 #, c-format msgid "Failed to open %s, trying to create parent directories" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:357 #, fuzzy, c-format msgid "xrootd open failed: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:371 #, fuzzy, c-format msgid "close failed: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:413 #, c-format msgid "Read access not allowed for %s: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:432 #, fuzzy, c-format msgid "Could not stat file %s: %s" msgstr "Kann stat-Informationen zu Datei nicht einholen: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:437 msgid "Not getting checksum of zip constituent" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:441 #, fuzzy, c-format msgid "Could not get checksum of %s: %s" msgstr "Kann stat-Informationen zu Datei nicht einholen: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:445 #, fuzzy, c-format msgid "Checksum %s" msgstr "Errechneted checksum: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:483 #, fuzzy, c-format msgid "Failed to open directory %s: %s" msgstr "Fehler beim Entfernen von LFC Verzeichnis: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:501 #, fuzzy, c-format msgid "Error while reading dir %s: %s" msgstr "Fehler beim Listen von Verzeichnis: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:551 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:569 #, fuzzy, c-format msgid "Error creating required dirs: %s" msgstr "Fehler bei Anlegen benötigter Verzeichnisse: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:158 #, fuzzy msgid "PDPD location is missing" msgstr "Location fehlt" #: src/hed/identitymap/ArgusPDPClient.cpp:161 #, fuzzy, c-format msgid "PDPD location: %s" msgstr "Füge location hinzu: url: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:166 #: src/hed/identitymap/ArgusPEPClient.cpp:129 msgid "Conversion mode is set to SUBJECT" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:169 #: src/hed/identitymap/ArgusPEPClient.cpp:132 msgid "Conversion mode is set to CREAM" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:172 #: src/hed/identitymap/ArgusPEPClient.cpp:135 msgid "Conversion mode is set to EMI" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:175 #: src/hed/identitymap/ArgusPEPClient.cpp:138 #, c-format msgid "Unknown conversion mode %s, using default" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:242 #, fuzzy, c-format msgid "Failed to contact PDP server: %s" msgstr "Fehler beim Verbinden zu RLS server: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:245 #, fuzzy, c-format msgid "There was no SOAP response return from PDP server: %s" msgstr "Keine SOAP response erhalten" #: src/hed/identitymap/ArgusPDPClient.cpp:360 #: src/hed/identitymap/ArgusPEPClient.cpp:286 #, c-format msgid "Have %i requests to process" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:362 #, fuzzy msgid "Creating a client to Argus PDP service" msgstr "Erstelle Client Schnitstelle" #: src/hed/identitymap/ArgusPDPClient.cpp:375 #, fuzzy, c-format msgid "XACML authorisation request: %s" msgstr "GACL Auth. Anfrage. %s" #: src/hed/identitymap/ArgusPDPClient.cpp:386 #, fuzzy, c-format msgid "XACML authorisation response: %s" msgstr "Es wurde keine authorization response erwidert" #: src/hed/identitymap/ArgusPDPClient.cpp:426 #, c-format msgid "%s is not authorized to do action %s in resource %s " msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:429 #: src/hed/identitymap/ArgusPDPClient.cpp:434 #: src/hed/identitymap/ArgusPEPClient.cpp:336 #, fuzzy msgid "Not authorized" msgstr "echo: Unauthorisiert" #: src/hed/identitymap/ArgusPDPClient.cpp:439 #: src/hed/identitymap/ArgusPEPClient.cpp:341 #: src/hed/identitymap/IdentityMap.cpp:219 src/hed/shc/legacy/LegacyMap.cpp:216 #, c-format msgid "Grid identity is mapped to local identity '%s'" msgstr "Grid Identität wird zugewiesen zu lokaler Identität '%s'" #: src/hed/identitymap/ArgusPDPClient.cpp:566 #: src/hed/identitymap/ArgusPEPClient.cpp:655 #, fuzzy msgid "Doing CREAM request" msgstr "Verarbeite %s Anfrage" #: src/hed/identitymap/ArgusPDPClient.cpp:580 #: src/hed/identitymap/ArgusPDPClient.cpp:748 #: src/hed/identitymap/ArgusPEPClient.cpp:683 #, fuzzy, c-format msgid "Adding profile-id value: %s" msgstr "Verbindung zu %s schlug fehl: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:592 #: src/hed/identitymap/ArgusPDPClient.cpp:759 #: src/hed/identitymap/ArgusPEPClient.cpp:694 #, fuzzy, c-format msgid "Adding subject-id value: %s" msgstr "Füge Anfrage-Token %s hinzu" #: src/hed/identitymap/ArgusPDPClient.cpp:600 #: src/hed/identitymap/ArgusPDPClient.cpp:767 #: src/hed/identitymap/ArgusPEPClient.cpp:704 #, c-format msgid "Adding subject-issuer value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:607 #: src/hed/identitymap/ArgusPEPClient.cpp:713 #, c-format msgid "Adding virtual-organization value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:620 #: src/hed/identitymap/ArgusPEPClient.cpp:730 #, c-format msgid "Adding FQAN value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:629 #: src/hed/identitymap/ArgusPEPClient.cpp:739 #, c-format msgid "Adding FQAN/primary value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:639 #: src/hed/identitymap/ArgusPEPClient.cpp:750 #, fuzzy, c-format msgid "Adding cert chain value: %s" msgstr "Anlegen von Socket schlug fehl: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:648 #: src/hed/identitymap/ArgusPDPClient.cpp:840 #: src/hed/identitymap/ArgusPEPClient.cpp:760 #, fuzzy, c-format msgid "Adding resource-id value: %s" msgstr "Addressen-Auflösung schlug fehl: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:662 #: src/hed/identitymap/ArgusPDPClient.cpp:863 #: src/hed/identitymap/ArgusPEPClient.cpp:775 #, fuzzy, c-format msgid "Adding action-id value: %s" msgstr "Füge location hinzu: %s - %s" #: src/hed/identitymap/ArgusPDPClient.cpp:666 #: src/hed/identitymap/ArgusPEPClient.cpp:786 #, fuzzy, c-format msgid "CREAM request generation failed: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:732 #, fuzzy msgid "Doing EMI request" msgstr "Verarbeite %s Anfrage" #: src/hed/identitymap/ArgusPDPClient.cpp:774 #, c-format msgid "Adding Virtual Organization value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:797 #, c-format msgid "Adding VOMS group value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:803 #, c-format msgid "Adding VOMS primary group value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:822 #, c-format msgid "Adding VOMS role value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:829 #, c-format msgid "Adding VOMS primary role value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:846 #, fuzzy, c-format msgid "Adding resource-owner value: %s" msgstr "Addressen-Auflösung schlug fehl: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:867 #, fuzzy, c-format msgid "EMI request generation failed: %s" msgstr "Anlegen von Socket schlug fehl: %s" #: src/hed/identitymap/ArgusPEPClient.cpp:119 #, fuzzy msgid "PEPD location is missing" msgstr "Location fehlt" #: src/hed/identitymap/ArgusPEPClient.cpp:122 #, fuzzy, c-format msgid "PEPD location: %s" msgstr "Füge location hinzu: url: %s" #: src/hed/identitymap/ArgusPEPClient.cpp:126 msgid "Conversion mode is set to DIRECT" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:331 #, c-format msgid "" "Not authorized according to request:\n" "%s" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:333 #, c-format msgid "%s is not authorized to do action %s in resource %s" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:361 msgid "Subject of request is null" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:366 #, fuzzy, c-format msgid "Can not create XACML SubjectAttribute: %s" msgstr "Kann Funktion %s nicht anlegen" #: src/hed/identitymap/ArgusPEPClient.cpp:375 #, fuzzy msgid "Can not create XACML Resource" msgstr "Kann Resource ID nicht erhalten" #: src/hed/identitymap/ArgusPEPClient.cpp:381 #, fuzzy, c-format msgid "Can not create XACML ResourceAttribute: %s" msgstr "Kann Funktion %s nicht anlegen" #: src/hed/identitymap/ArgusPEPClient.cpp:390 #, fuzzy msgid "Can not create XACML Action" msgstr "Kann Funktion %s nicht anlegen" #: src/hed/identitymap/ArgusPEPClient.cpp:397 #, fuzzy, c-format msgid "Can not create XACML ActionAttribute: %s" msgstr "Kann Funktion %s nicht anlegen" #: src/hed/identitymap/ArgusPEPClient.cpp:407 #, fuzzy msgid "Can not create XACML request" msgstr "Kann doc Argument nicht anlegen" #: src/hed/identitymap/ArgusPEPClient.cpp:539 #, c-format msgid "Converting to CREAM action - namespace: %s, operation: %s" msgstr "" #: src/hed/identitymap/IdentityMap.cpp:196 #, c-format msgid "PDP: %s can not be loaded" msgstr "PDP: %s kann nicht geladen werden" #: src/hed/libs/common/ArcLocation.cpp:128 #, c-format msgid "" "Can not determine the install location. Using %s. Please set ARC_LOCATION if " "this is not correct." msgstr "" #: src/hed/libs/common/DateTime.cpp:86 src/hed/libs/common/DateTime.cpp:631 #: src/hed/libs/common/StringConv.h:25 msgid "Empty string" msgstr "" #: src/hed/libs/common/DateTime.cpp:107 #, c-format msgid "Can not parse date: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:130 #, c-format msgid "Can not parse time: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:160 #, c-format msgid "Can not parse time zone offset: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:180 src/hed/libs/common/DateTime.cpp:199 #: src/hed/libs/common/DateTime.cpp:252 src/hed/libs/common/DateTime.cpp:291 #, c-format msgid "Illegal time format: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:230 src/hed/libs/common/DateTime.cpp:283 #, c-format msgid "Can not parse month: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:647 src/hed/libs/common/DateTime.cpp:688 #, c-format msgid "Invalid ISO duration format: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:752 #, c-format msgid "Invalid period string: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:874 msgid "hour" msgid_plural "hours" msgstr[0] "Stunden" msgstr[1] "Stunde" msgstr[2] "Stunden" #: src/hed/libs/common/DateTime.cpp:880 msgid "minute" msgid_plural "minutes" msgstr[0] "Minuten" msgstr[1] "Minute" msgstr[2] "Minuten" #: src/hed/libs/common/DateTime.cpp:886 msgid "second" msgid_plural "seconds" msgstr[0] "Sekunden" msgstr[1] "Sekunde" msgstr[2] "Sekunden" #: src/hed/libs/common/FileLock.cpp:43 #, fuzzy msgid "Cannot determine hostname from gethostname()" msgstr "Kann hostname von uname nciht ermitteln" #: src/hed/libs/common/FileLock.cpp:92 #, fuzzy, c-format msgid "EACCES Error opening lock file %s: %s" msgstr "EACCESS Fehler bei Öffnen von Lock-Datei %s: %s" #: src/hed/libs/common/FileLock.cpp:97 #, c-format msgid "Error opening lock file %s in initial check: %s" msgstr "Fehler bei Öffnen von Lock-Datei %s in initialer Überprüfung: %s" #: src/hed/libs/common/FileLock.cpp:104 #, fuzzy, c-format msgid "Error creating temporary file %s: %s" msgstr "Fehler bei Lesen von Meta-Datei %s: %s" #: src/hed/libs/common/FileLock.cpp:113 #, c-format msgid "Could not create link to lock file %s as it already exists" msgstr "" #: src/hed/libs/common/FileLock.cpp:124 #, fuzzy, c-format msgid "Could not create lock file %s as it already exists" msgstr "Konnte temporäre Datei nicht anlegen: %s" #: src/hed/libs/common/FileLock.cpp:128 #, fuzzy, c-format msgid "Error creating lock file %s: %s" msgstr "Fehler bei Lesen von Lock-datei %s. %s" #: src/hed/libs/common/FileLock.cpp:133 #, fuzzy, c-format msgid "Error writing to lock file %s: %s" msgstr "Fehler beim Schreiben zu tmp lock Datei %s: %s" #: src/hed/libs/common/FileLock.cpp:141 #, fuzzy, c-format msgid "Error linking tmp file %s to lock file %s: %s" msgstr "Fehler bei Umbenennen von temporärer Datei %s zu Lock_datei %s: %s" #: src/hed/libs/common/FileLock.cpp:150 #, fuzzy, c-format msgid "Error in lock file %s, even though linking did not return an error" msgstr "" "Fehler bei Umbenennen von Lock-Datei, obwohl rename() keinen Fehler " "zurücklieferte" #: src/hed/libs/common/FileLock.cpp:159 #, fuzzy, c-format msgid "%li seconds since lock file %s was created" msgstr "%li Sekunden seit Lock-Datei engelegt wurde" #: src/hed/libs/common/FileLock.cpp:162 #, fuzzy, c-format msgid "Timeout has expired, will remove lock file %s" msgstr "Zeitüberschreitung, werde lock-Datei entfernen" #: src/hed/libs/common/FileLock.cpp:166 #, fuzzy, c-format msgid "Failed to remove stale lock file %s: %s" msgstr "Fehler bei Erstellen von Info-Datei %s: %s" #: src/hed/libs/common/FileLock.cpp:178 #, fuzzy, c-format msgid "This process already owns the lock on %s" msgstr "Warnung: Diesem Prozess gehört der Lock bereits" #: src/hed/libs/common/FileLock.cpp:183 #, fuzzy, c-format msgid "" "The process owning the lock on %s is no longer running, will remove lock" msgstr "" "Der Prozesse, dem der Lock gehört, läuft nicht mehr. Der Lock wird entfernt." #: src/hed/libs/common/FileLock.cpp:185 #, fuzzy, c-format msgid "Failed to remove file %s: %s" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/hed/libs/common/FileLock.cpp:193 #, fuzzy, c-format msgid "The file %s is currently locked with a valid lock" msgstr "Die Datei ist derzeit gelockt mit einem gültigen Lock" #: src/hed/libs/common/FileLock.cpp:208 #, fuzzy, c-format msgid "Failed to unlock file with lock %s: %s" msgstr "Fehler bei Unlock von Datei mit Lock %s: %s" #: src/hed/libs/common/FileLock.cpp:220 #, fuzzy, c-format msgid "Lock file %s doesn't exist" msgstr "Lock-Datei %s existiert nicht" #: src/hed/libs/common/FileLock.cpp:222 #, fuzzy, c-format msgid "Error listing lock file %s: %s" msgstr "Fehler bei Listing von Lock-Datei %s: %s" #: src/hed/libs/common/FileLock.cpp:228 #, fuzzy, c-format msgid "Found unexpected empty lock file %s. Must go back to acquire()" msgstr "" "Ein anderer Prozess besitzt das Lock auf Datei %s. Muss zurück zu Start()" #: src/hed/libs/common/FileLock.cpp:234 #, fuzzy, c-format msgid "Error reading lock file %s: %s" msgstr "Fehler bei Lesen von Lock-datei %s. %s" #: src/hed/libs/common/FileLock.cpp:238 #, fuzzy, c-format msgid "Error with formatting in lock file %s" msgstr "Fehler bei Formatieren von Lock-Datei %s: %s" #: src/hed/libs/common/FileLock.cpp:248 #, fuzzy, c-format msgid "Lock %s is owned by a different host (%s)" msgstr "Lock gehört einem anderen host" #: src/hed/libs/common/FileLock.cpp:257 #, fuzzy, c-format msgid "Badly formatted pid %s in lock file %s" msgstr "Formatierungsfehler erkannt in Datei %s, in Zeile %s" #: src/hed/libs/common/FileLock.cpp:260 #, fuzzy, c-format msgid "Another process (%s) owns the lock on file %s" msgstr "" "Ein anderer Prozess besitzt das Lock auf Datei %s. Muss zurück zu Start()" #: src/hed/libs/common/IString.cpp:32 src/hed/libs/common/IString.cpp:41 #: src/hed/libs/common/IString.cpp:42 msgid "(empty)" msgstr "" #: src/hed/libs/common/IString.cpp:32 src/hed/libs/common/IString.cpp:41 #: src/hed/libs/common/IString.cpp:42 msgid "(null)" msgstr "" #: src/hed/libs/common/Logger.cpp:58 #, fuzzy, c-format msgid "Invalid log level. Using default %s." msgstr "Ungültiger Wert für Priority, nutze Voreinstellung von 10" #: src/hed/libs/common/Logger.cpp:123 #, fuzzy, c-format msgid "Invalid old log level. Using default %s." msgstr "Ungültiger Wert für Priority, nutze Voreinstellung von 10" #: src/hed/libs/common/OptionParser.cpp:106 #, c-format msgid "Cannot parse integer value '%s' for -%c" msgstr "" #: src/hed/libs/common/OptionParser.cpp:309 #: src/hed/libs/common/OptionParser.cpp:446 #, fuzzy, c-format msgid "Options Group %s:" msgstr "Funktion : %s" #: src/hed/libs/common/OptionParser.cpp:311 #: src/hed/libs/common/OptionParser.cpp:449 #, fuzzy, c-format msgid "%s:" msgstr "%s" #: src/hed/libs/common/OptionParser.cpp:313 #, c-format msgid "Show %s help options" msgstr "" #: src/hed/libs/common/OptionParser.cpp:342 #, fuzzy msgid "Failed to parse command line options" msgstr "Fehler bei Anlegen von GSI Context: %s" #: src/hed/libs/common/OptionParser.cpp:352 #, fuzzy msgid "Use -? to get usage description" msgstr "Nutze space token Beschreibugn %s" #: src/hed/libs/common/OptionParser.cpp:429 msgid "Usage:" msgstr "" #: src/hed/libs/common/OptionParser.cpp:432 msgid "OPTION..." msgstr "" #: src/hed/libs/common/OptionParser.cpp:438 msgid "Help Options:" msgstr "" #: src/hed/libs/common/OptionParser.cpp:439 msgid "Show help options" msgstr "" #: src/hed/libs/common/Profile.cpp:199 src/hed/libs/common/Profile.cpp:273 #: src/hed/libs/common/Profile.cpp:404 #, c-format msgid "" "Element \"%s\" in the profile ignored: the value of the \"inisections\" " "attribute cannot be the empty string." msgstr "" #: src/hed/libs/common/Profile.cpp:205 src/hed/libs/common/Profile.cpp:279 #: src/hed/libs/common/Profile.cpp:411 #, c-format msgid "" "Element \"%s\" in the profile ignored: the value of the \"initag\" attribute " "cannot be the empty string." msgstr "" #: src/hed/libs/common/Profile.cpp:419 #, c-format msgid "" "Element \"%s\" in the profile ignored: the value of the \"initype\" " "attribute cannot be the empty string." msgstr "" #: src/hed/libs/common/Profile.cpp:422 #, c-format msgid "" "Element \"%s\" in the profile ignored: the \"inidefaultvalue\" attribute " "cannot be specified when the \"inisections\" and \"initag\" attributes have " "not been specified." msgstr "" #: src/hed/libs/common/Profile.cpp:497 #, c-format msgid "" "In the configuration profile the 'initype' attribute on the \"%s\" element " "has a invalid value \"%s\"." msgstr "" #: src/hed/libs/common/Run_unix.cpp:226 msgid "Child monitoring signal detected" msgstr "" #: src/hed/libs/common/Run_unix.cpp:231 #, fuzzy, c-format msgid "Child monitoring error: %i" msgstr "Cthread_init() Fehler: %s" #: src/hed/libs/common/Run_unix.cpp:244 msgid "Child monitoring kick detected" msgstr "" #: src/hed/libs/common/Run_unix.cpp:247 msgid "Child monitoring internal communication error" msgstr "" #: src/hed/libs/common/Run_unix.cpp:259 msgid "Child monitoring stdout is closed" msgstr "" #: src/hed/libs/common/Run_unix.cpp:269 msgid "Child monitoring stderr is closed" msgstr "" #: src/hed/libs/common/Run_unix.cpp:279 msgid "Child monitoring stdin is closed" msgstr "" #: src/hed/libs/common/Run_unix.cpp:297 #, c-format msgid "Child monitoring child %d exited" msgstr "" #: src/hed/libs/common/Run_unix.cpp:301 #, c-format msgid "Child monitoring lost child %d (%d)" msgstr "" #: src/hed/libs/common/Run_unix.cpp:322 #, c-format msgid "Child monitoring drops abandoned child %d (%d)" msgstr "" #: src/hed/libs/common/Run_unix.cpp:483 msgid "Child was already started" msgstr "" #: src/hed/libs/common/Run_unix.cpp:487 msgid "No arguments are assigned for external process" msgstr "" #: src/hed/libs/common/Run_unix.cpp:620 src/hed/libs/common/Run_unix.cpp:625 #, c-format msgid "Excepton while trying to start external process: %s" msgstr "" #: src/hed/libs/common/StringConv.h:31 #, c-format msgid "Conversion failed: %s" msgstr "" #: src/hed/libs/common/StringConv.h:35 #, c-format msgid "Full string not used: %s" msgstr "" #: src/hed/libs/common/Thread.cpp:261 msgid "Maximum number of threads running - putting new request into queue" msgstr "" #: src/hed/libs/common/Thread.cpp:309 #, fuzzy, c-format msgid "Thread exited with Glib error: %s" msgstr "ftp_read_thread: Globus Fehler: %s" #: src/hed/libs/common/Thread.cpp:311 #, c-format msgid "Thread exited with Glib exception: %s" msgstr "" #: src/hed/libs/common/Thread.cpp:313 #, c-format msgid "Thread exited with generic exception: %s" msgstr "" #: src/hed/libs/common/URL.cpp:137 #, c-format msgid "URL is not valid: %s" msgstr "" #: src/hed/libs/common/URL.cpp:188 #, c-format msgid "Illegal URL - path must be absolute: %s" msgstr "" #: src/hed/libs/common/URL.cpp:193 #, c-format msgid "Illegal URL - no hostname given: %s" msgstr "" #: src/hed/libs/common/URL.cpp:282 #, c-format msgid "Illegal URL - path must be absolute or empty: %s" msgstr "" #: src/hed/libs/common/URL.cpp:298 #, c-format msgid "Illegal URL - no closing ] for IPv6 address found: %s" msgstr "" #: src/hed/libs/common/URL.cpp:306 #, c-format msgid "" "Illegal URL - closing ] for IPv6 address is followed by illegal token: %s" msgstr "" #: src/hed/libs/common/URL.cpp:322 #, fuzzy, c-format msgid "Invalid port number in %s" msgstr "Ungültige url: %s" #: src/hed/libs/common/URL.cpp:453 #, c-format msgid "Unknown LDAP scope %s - using base" msgstr "" #: src/hed/libs/common/URL.cpp:616 msgid "Attempt to assign relative path to URL - making it absolute" msgstr "" #: src/hed/libs/common/URL.cpp:715 #, c-format msgid "URL option %s does not have format name=value" msgstr "" #: src/hed/libs/common/URL.cpp:1180 #, c-format msgid "urllist %s contains invalid URL: %s" msgstr "" #: src/hed/libs/common/URL.cpp:1185 #, c-format msgid "URL protocol is not urllist: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:32 src/hed/libs/common/UserConfig.cpp:781 #: src/hed/libs/common/UserConfig.cpp:790 #: src/hed/libs/common/UserConfig.cpp:796 #: src/hed/libs/common/UserConfig.cpp:822 #: src/hed/libs/common/UserConfig.cpp:832 #: src/hed/libs/common/UserConfig.cpp:844 #: src/hed/libs/common/UserConfig.cpp:864 #, c-format msgid "Multiple %s attributes in configuration file (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:132 #, fuzzy, c-format msgid "Wrong ownership of certificate file: %s" msgstr "Fehler bei Ändern des Besitzers der destination Datei %s: %s" #: src/hed/libs/common/UserConfig.cpp:134 #, fuzzy, c-format msgid "Wrong permissions of certificate file: %s" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/common/UserConfig.cpp:136 #, c-format msgid "Can not access certificate file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:143 #, fuzzy, c-format msgid "Wrong ownership of key file: %s" msgstr "Fehler bei Ändern des Besitzers der destination Datei %s: %s" #: src/hed/libs/common/UserConfig.cpp:145 #, fuzzy, c-format msgid "Wrong permissions of key file: %s" msgstr "Fehler bei Änderung von Zugriffsrechten auf Datei %s: %s" #: src/hed/libs/common/UserConfig.cpp:147 #, c-format msgid "Can not access key file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:154 #, fuzzy, c-format msgid "Wrong ownership of proxy file: %s" msgstr "Fehler bei Ändern des Besitzers der destination Datei %s: %s" #: src/hed/libs/common/UserConfig.cpp:156 #, fuzzy, c-format msgid "Wrong permissions of proxy file: %s" msgstr "Fehler bei Änderung von Zugriffsrechten auf Datei %s: %s" #: src/hed/libs/common/UserConfig.cpp:158 #, c-format msgid "Can not access proxy file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:169 msgid "computing" msgstr "" #: src/hed/libs/common/UserConfig.cpp:171 msgid "index" msgstr "" #: src/hed/libs/common/UserConfig.cpp:270 #: src/hed/libs/common/UserConfig.cpp:274 #: src/hed/libs/common/UserConfig.cpp:321 #: src/hed/libs/common/UserConfig.cpp:325 #, c-format msgid "System configuration file (%s) contains errors." msgstr "" #: src/hed/libs/common/UserConfig.cpp:278 #: src/hed/libs/common/UserConfig.cpp:329 #, fuzzy, c-format msgid "System configuration file (%s or %s) does not exist." msgstr "Cache-Datei %s existiert nicht" #: src/hed/libs/common/UserConfig.cpp:280 #: src/hed/libs/common/UserConfig.cpp:331 #, c-format msgid "System configuration file (%s) does not exist." msgstr "" #: src/hed/libs/common/UserConfig.cpp:286 #: src/hed/libs/common/UserConfig.cpp:298 #: src/hed/libs/common/UserConfig.cpp:337 #: src/hed/libs/common/UserConfig.cpp:349 #, c-format msgid "User configuration file (%s) contains errors." msgstr "" #: src/hed/libs/common/UserConfig.cpp:291 #: src/hed/libs/common/UserConfig.cpp:342 msgid "No configuration file could be loaded." msgstr "" #: src/hed/libs/common/UserConfig.cpp:294 #: src/hed/libs/common/UserConfig.cpp:345 #, c-format msgid "User configuration file (%s) does not exist or cannot be loaded." msgstr "" #: src/hed/libs/common/UserConfig.cpp:406 #, c-format msgid "" "Unable to parse the specified verbosity (%s) to one of the allowed levels" msgstr "" #: src/hed/libs/common/UserConfig.cpp:418 #, c-format msgid "" "Unsupported job list type '%s', using 'BDB'. Supported types are: BDB, " "SQLITE, XML." msgstr "" #: src/hed/libs/common/UserConfig.cpp:463 msgid "Loading OToken failed - ignoring its presence" msgstr "" #: src/hed/libs/common/UserConfig.cpp:604 #, c-format msgid "Certificate and key ('%s' and '%s') not found in any of the paths: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:606 #, c-format msgid "" "If the proxy or certificate/key does exist, you can manually specify the " "locations via environment variables '%s'/'%s' or '%s', or the '%s'/'%s' or " "'%s' attributes in the client configuration file (e.g. '%s')" msgstr "" #: src/hed/libs/common/UserConfig.cpp:623 #: src/hed/libs/common/UserConfig.cpp:633 #, c-format msgid "" "Can not access CA certificate directory: %s. The certificates will not be " "verified." msgstr "" #: src/hed/libs/common/UserConfig.cpp:659 #, c-format msgid "" "Can not find CA certificates directory in default locations:\n" "~/.arc/certificates, ~/.globus/certificates,\n" "%s/etc/certificates, %s/etc/grid-security/certificates,\n" "%s/share/certificates, /etc/grid-security/certificates.\n" "The certificate will not be verified.\n" "If the CA certificates directory does exist, please manually specify the " "locations via env\n" "X509_CERT_DIR, or the cacertificatesdirectory item in client.conf\n" msgstr "" #: src/hed/libs/common/UserConfig.cpp:680 #, c-format msgid "Using proxy file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:683 #, c-format msgid "Using certificate file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:684 #, c-format msgid "Using key file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:688 #, c-format msgid "Using CA certificate directory: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:692 #, fuzzy msgid "Using OToken" msgstr "Nutze space token %s" #: src/hed/libs/common/UserConfig.cpp:705 #: src/hed/libs/common/UserConfig.cpp:711 #, fuzzy, c-format msgid "Can not access VOMSES file/directory: %s." msgstr "Lege Verzeichnis %s an" #: src/hed/libs/common/UserConfig.cpp:717 #, fuzzy, c-format msgid "Can not access VOMS file/directory: %s." msgstr "Lege Verzeichnis %s an" #: src/hed/libs/common/UserConfig.cpp:731 msgid "" "Can not find voms service configuration file (vomses) in default locations: " "~/.arc/vomses, ~/.voms/vomses, $ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/" "grid-security/vomses, $PWD/vomses, /etc/vomses, /etc/grid-security/vomses" msgstr "" #: src/hed/libs/common/UserConfig.cpp:744 #, c-format msgid "Loading configuration (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:778 #, c-format msgid "" "The value of the timeout attribute in the configuration file (%s) was only " "partially parsed" msgstr "" #: src/hed/libs/common/UserConfig.cpp:803 msgid "" "The brokerarguments attribute can only be used in conjunction with the " "brokername attribute" msgstr "" #: src/hed/libs/common/UserConfig.cpp:819 #, c-format msgid "" "The value of the keysize attribute in the configuration file (%s) was only " "partially parsed" msgstr "" #: src/hed/libs/common/UserConfig.cpp:839 #, c-format msgid "" "Could not convert the slcs attribute value (%s) to an URL instance in " "configuration file (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:885 #, c-format msgid "Specified overlay file (%s) does not exist." msgstr "" #: src/hed/libs/common/UserConfig.cpp:889 #, c-format msgid "" "Unknown attribute %s in common section of configuration file (%s), ignoring " "it" msgstr "" #: src/hed/libs/common/UserConfig.cpp:930 #, c-format msgid "Unknown section %s, ignoring it" msgstr "" #: src/hed/libs/common/UserConfig.cpp:934 #, c-format msgid "Configuration (%s) loaded" msgstr "" #: src/hed/libs/common/UserConfig.cpp:937 #, c-format msgid "Could not load configuration (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:1032 #, c-format msgid "UserConfiguration saved to file (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:1045 #, c-format msgid "Unable to create %s directory." msgstr "" #: src/hed/libs/common/UserConfig.cpp:1054 #, c-format msgid "Configuration example file created (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:1056 #, c-format msgid "Unable to copy example configuration from existing configuration (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:1061 #, c-format msgid "Cannot copy example configuration (%s), it is not a regular file" msgstr "" #: src/hed/libs/common/UserConfig.cpp:1066 #, c-format msgid "Example configuration (%s) not created." msgstr "" #: src/hed/libs/common/UserConfig.cpp:1071 #, c-format msgid "The default configuration file (%s) is not a regular file." msgstr "" #: src/hed/libs/common/UserConfig.cpp:1089 #, c-format msgid "%s directory created" msgstr "" #: src/hed/libs/common/UserConfig.cpp:1091 #: src/hed/libs/common/UserConfig.cpp:1130 src/hed/libs/data/DataMover.cpp:679 #, fuzzy, c-format msgid "Failed to create directory %s" msgstr "Fehler bei Anlegen/Finden von Verzeichnis %s, (%d)" #: src/hed/libs/common/test/LoggerTest.cpp:58 msgid "This VERBOSE message should not be seen" msgstr "" #: src/hed/libs/common/test/LoggerTest.cpp:62 msgid "This INFO message should be seen" msgstr "" #: src/hed/libs/common/test/LoggerTest.cpp:73 msgid "This VERBOSE message should now be seen" msgstr "" #: src/hed/libs/common/test/LoggerTest.cpp:79 msgid "This INFO message should also be seen" msgstr "" #: src/hed/libs/common/test/LoggerTest.cpp:93 msgid "This message goes to initial destination" msgstr "" #: src/hed/libs/common/test/LoggerTest.cpp:108 msgid "This message goes to per-thread destination" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:80 msgid "Request failed: No response from SPService" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:84 #: src/hed/libs/communication/ClientSAML2SSO.cpp:137 msgid "Request failed: response from SPService is not as expected" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:92 #, c-format msgid "Authentication Request URL: %s" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:133 msgid "Request failed: No response from IdP" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:184 msgid "Request failed: No response from IdP when doing redirecting" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:188 msgid "" "Request failed: response from IdP is not as expected when doing redirecting" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:245 msgid "Request failed: No response from IdP when doing authentication" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:249 msgid "" "Request failed: response from IdP is not as expected when doing " "authentication" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:294 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:300 msgid "Succeeded to verify the signature under " msgstr "Erfolgreiche Verifikation der Signatur unter " #: src/hed/libs/communication/ClientSAML2SSO.cpp:296 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:303 msgid "Failed to verify the signature under " msgstr "Fehler bei der Überprüfung der Signatur unter " #: src/hed/libs/communication/ClientSAML2SSO.cpp:310 msgid "" "Request failed: No response from SP Service when sending SAML assertion to SP" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:314 msgid "" "Request failed: response from SP Service is not as expected when sending " "SAML assertion to SP" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:325 #, c-format msgid "IdP return some error message: %s" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:353 #: src/hed/libs/communication/ClientSAML2SSO.cpp:398 msgid "SAML2SSO process failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:54 msgid "Creating delegation credential to ARC delegation service" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:64 #: src/hed/libs/communication/ClientX509Delegation.cpp:267 msgid "DelegateCredentialsInit failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:68 #: src/hed/libs/communication/ClientX509Delegation.cpp:122 #: src/hed/libs/communication/ClientX509Delegation.cpp:157 #: src/hed/libs/communication/ClientX509Delegation.cpp:212 #: src/hed/libs/communication/ClientX509Delegation.cpp:271 msgid "There is no SOAP response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:73 msgid "There is no X509 request in the response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:78 msgid "There is no Format request in the response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:86 msgid "There is no Id or X509 request value in the response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:99 #: src/hed/libs/communication/ClientX509Delegation.cpp:187 msgid "DelegateProxy failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:118 msgid "UpdateCredentials failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:126 msgid "There is no UpdateCredentialsResponse in response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:134 #: src/hed/libs/communication/ClientX509Delegation.cpp:162 #: src/hed/libs/communication/ClientX509Delegation.cpp:217 #: src/hed/libs/communication/ClientX509Delegation.cpp:302 msgid "There is no SOAP connection chain configured" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:140 msgid "Creating delegation to CREAM delegation service" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:153 msgid "Delegation getProxyReq request failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:173 msgid "Creating delegation to CREAM delegation service failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:208 msgid "Delegation putProxy request failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:222 msgid "Creating delegation to CREAM delegation failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:237 msgid "Getting delegation credential from ARC delegation service" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:276 msgid "There is no Delegated X509 token in the response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:281 msgid "There is no Format delegated token in the response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:289 msgid "There is no Id or X509 token value in the response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:298 #, c-format msgid "" "Get delegated credential from delegation service: \n" " %s" msgstr "" #: src/hed/libs/compute/Broker.cpp:62 #, c-format msgid "Performing matchmaking against target (%s)." msgstr "" #: src/hed/libs/compute/Broker.cpp:72 #, c-format msgid "Matchmaking, ExecutionTarget: %s matches job description" msgstr "" #: src/hed/libs/compute/Broker.cpp:145 #, c-format msgid "" "The CA issuer (%s) of the credentials (%s) is not trusted by the target (%s)." msgstr "" #: src/hed/libs/compute/Broker.cpp:153 src/hed/libs/compute/Broker.cpp:171 #, c-format msgid "ComputingShareName of ExecutionTarget (%s) is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:157 src/hed/libs/compute/Broker.cpp:162 #, c-format msgid "ComputingShare (%s) explicitly rejected" msgstr "" #: src/hed/libs/compute/Broker.cpp:175 src/hed/libs/compute/Broker.cpp:180 #, c-format msgid "ComputingShare (%s) does not match selected queue (%s)" msgstr "" #: src/hed/libs/compute/Broker.cpp:189 #, c-format msgid "" "ProcessingStartTime (%s) specified in job description is inside the targets " "downtime period [ %s - %s ]." msgstr "" #: src/hed/libs/compute/Broker.cpp:194 #, c-format msgid "The downtime of the target (%s) is not published. Keeping target." msgstr "" #: src/hed/libs/compute/Broker.cpp:200 #, c-format msgid "HealthState of ExecutionTarget (%s) is not OK (%s)" msgstr "" #: src/hed/libs/compute/Broker.cpp:205 #, c-format msgid "Matchmaking, ExecutionTarget: %s, HealthState is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:212 #, c-format msgid "" "Matchmaking, Computing endpoint requirement not satisfied. ExecutionTarget: " "%s" msgstr "" #: src/hed/libs/compute/Broker.cpp:217 #, c-format msgid "Matchmaking, ExecutionTarget: %s, ImplementationName is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:243 #, c-format msgid "" "Matchmaking, %s (%d) is %s than %s (%d) published by the ExecutionTarget." msgstr "" #: src/hed/libs/compute/Broker.cpp:272 #, c-format msgid "" "Matchmaking, The %s scaled %s (%d) is %s than the %s (%d) published by the " "ExecutionTarget." msgstr "" #: src/hed/libs/compute/Broker.cpp:284 #, c-format msgid "Matchmaking, Benchmark %s is not published by the ExecutionTarget." msgstr "" #: src/hed/libs/compute/Broker.cpp:299 #, c-format msgid "" "Matchmaking, MaxTotalCPUTime problem, ExecutionTarget: %d (MaxTotalCPUTime), " "JobDescription: %d (TotalCPUTime)" msgstr "" #: src/hed/libs/compute/Broker.cpp:306 #, c-format msgid "" "Matchmaking, MaxCPUTime problem, ExecutionTarget: %d (MaxCPUTime), " "JobDescription: %d (TotalCPUTime/NumberOfSlots)" msgstr "" #: src/hed/libs/compute/Broker.cpp:311 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MaxTotalCPUTime or MaxCPUTime not " "defined, assuming no CPU time limit" msgstr "" #: src/hed/libs/compute/Broker.cpp:317 #, c-format msgid "" "Matchmaking, MinCPUTime problem, ExecutionTarget: %d (MinCPUTime), " "JobDescription: %d (TotalCPUTime/NumberOfSlots)" msgstr "" #: src/hed/libs/compute/Broker.cpp:322 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MinCPUTime not defined, assuming no CPU " "time limit" msgstr "" #: src/hed/libs/compute/Broker.cpp:330 #, c-format msgid "" "Matchmaking, MainMemorySize problem, ExecutionTarget: %d (MainMemorySize), " "JobDescription: %d (IndividualPhysicalMemory)" msgstr "" #: src/hed/libs/compute/Broker.cpp:336 #, c-format msgid "" "Matchmaking, MaxMainMemory problem, ExecutionTarget: %d (MaxMainMemory), " "JobDescription: %d (IndividualPhysicalMemory)" msgstr "" #: src/hed/libs/compute/Broker.cpp:341 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MaxMainMemory and MainMemorySize are not " "defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:349 #, c-format msgid "" "Matchmaking, MaxVirtualMemory problem, ExecutionTarget: %d " "(MaxVirtualMemory), JobDescription: %d (IndividualVirtualMemory)" msgstr "" #: src/hed/libs/compute/Broker.cpp:354 #, c-format msgid "Matchmaking, ExecutionTarget: %s, MaxVirtualMemory is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:362 #, c-format msgid "" "Matchmaking, Platform problem, ExecutionTarget: %s (Platform) " "JobDescription: %s (Platform)" msgstr "" #: src/hed/libs/compute/Broker.cpp:367 #, c-format msgid "Matchmaking, ExecutionTarget: %s, Platform is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:375 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, OperatingSystem requirements not satisfied" msgstr "" #: src/hed/libs/compute/Broker.cpp:380 #, c-format msgid "Matchmaking, ExecutionTarget: %s, OperatingSystem is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:388 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, RunTimeEnvironment requirements not " "satisfied" msgstr "" #: src/hed/libs/compute/Broker.cpp:393 #, c-format msgid "Matchmaking, ExecutionTarget: %s, ApplicationEnvironments not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:402 #, c-format msgid "" "Matchmaking, NetworkInfo demand not fulfilled, ExecutionTarget do not " "support %s, specified in the JobDescription." msgstr "" #: src/hed/libs/compute/Broker.cpp:406 #, c-format msgid "Matchmaking, ExecutionTarget: %s, NetworkInfo is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:414 #, c-format msgid "" "Matchmaking, MaxDiskSpace problem, ExecutionTarget: %d MB (MaxDiskSpace); " "JobDescription: %d MB (SessionDiskSpace)" msgstr "" #: src/hed/libs/compute/Broker.cpp:421 #, c-format msgid "" "Matchmaking, WorkingAreaFree problem, ExecutionTarget: %d MB " "(WorkingAreaFree); JobDescription: %d MB (SessionDiskSpace)" msgstr "" #: src/hed/libs/compute/Broker.cpp:427 src/hed/libs/compute/Broker.cpp:448 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MaxDiskSpace and WorkingAreaFree are not " "defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:435 #, c-format msgid "" "Matchmaking, MaxDiskSpace problem, ExecutionTarget: %d MB (MaxDiskSpace); " "JobDescription: %d MB (DiskSpace)" msgstr "" #: src/hed/libs/compute/Broker.cpp:442 #, c-format msgid "" "Matchmaking, WorkingAreaFree problem, ExecutionTarget: %d MB " "(WorkingAreaFree); JobDescription: %d MB (DiskSpace)" msgstr "" #: src/hed/libs/compute/Broker.cpp:456 #, c-format msgid "" "Matchmaking, CacheTotal problem, ExecutionTarget: %d MB (CacheTotal); " "JobDescription: %d MB (CacheDiskSpace)" msgstr "" #: src/hed/libs/compute/Broker.cpp:461 #, c-format msgid "Matchmaking, ExecutionTarget: %s, CacheTotal is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:469 #, c-format msgid "" "Matchmaking, TotalSlots problem, ExecutionTarget: %d (TotalSlots) " "JobDescription: %d (NumberOfProcesses)" msgstr "" #: src/hed/libs/compute/Broker.cpp:475 #, c-format msgid "" "Matchmaking, MaxSlotsPerJob problem, ExecutionTarget: %d (MaxSlotsPerJob) " "JobDescription: %d (NumberOfProcesses)" msgstr "" #: src/hed/libs/compute/Broker.cpp:481 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, TotalSlots and MaxSlotsPerJob are not " "defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:489 #, c-format msgid "" "Matchmaking, WorkingAreaLifeTime problem, ExecutionTarget: %s " "(WorkingAreaLifeTime) JobDescription: %s (SessionLifeTime)" msgstr "" #: src/hed/libs/compute/Broker.cpp:494 #, c-format msgid "Matchmaking, ExecutionTarget: %s, WorkingAreaLifeTime is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:502 #, c-format msgid "" "Matchmaking, ConnectivityIn problem, ExecutionTarget: %s (ConnectivityIn) " "JobDescription: %s (InBound)" msgstr "" #: src/hed/libs/compute/Broker.cpp:509 #, c-format msgid "" "Matchmaking, ConnectivityOut problem, ExecutionTarget: %s (ConnectivityOut) " "JobDescription: %s (OutBound)" msgstr "" #: src/hed/libs/compute/Broker.cpp:532 msgid "Unable to sort added jobs. The BrokerPlugin plugin has not been loaded." msgstr "" #: src/hed/libs/compute/Broker.cpp:549 msgid "Unable to match target, marking it as not matching. Broker not valid." msgstr "" #: src/hed/libs/compute/Broker.cpp:585 #, fuzzy msgid "Unable to sort ExecutionTarget objects - Invalid Broker object." msgstr "Kann ExecutionTarget nicht zu Python Objekt konvertieren" #: src/hed/libs/compute/Broker.cpp:609 msgid "" "Unable to register job submission. Can't get JobDescription object from " "Broker, Broker is invalid." msgstr "" #: src/hed/libs/compute/BrokerPlugin.cpp:89 #, c-format msgid "Broker plugin \"%s\" not found." msgstr "" #: src/hed/libs/compute/BrokerPlugin.cpp:96 #, fuzzy, c-format msgid "Unable to load BrokerPlugin (%s)" msgstr "Konnter Broker %s nicht laden" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:27 #, c-format msgid "Uniq is replacing service coming from %s with service coming from %s" msgstr "" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:31 #, c-format msgid "Uniq is ignoring service coming from %s" msgstr "" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:38 #, c-format msgid "Uniq is adding service coming from %s" msgstr "" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:61 #, c-format msgid "Adding endpoint (%s) to TargetInformationRetriever" msgstr "" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:64 #, c-format msgid "Adding endpoint (%s) to ServiceEndpointRetriever" msgstr "" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:67 #, c-format msgid "" "Adding endpoint (%s) to both ServiceEndpointRetriever and " "TargetInformationRetriever" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:42 #, c-format msgid "The plugin %s does not support any interfaces, skipping it." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:47 #, c-format msgid "" "The first supported interface of the plugin %s is an empty string, skipping " "the plugin." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:95 #, c-format msgid "Interface on endpoint (%s) %s." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:101 #: src/hed/libs/compute/EntityRetriever.cpp:133 #: src/hed/libs/compute/EntityRetriever.cpp:425 #, c-format msgid "Ignoring endpoint (%s), it is already registered in retriever." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:110 #, c-format msgid "Service Loop: Endpoint %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:112 #, c-format msgid " This endpoint (%s) is STARTED or SUCCESSFUL" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:115 #, c-format msgid "" "Suspending querying of endpoint (%s) since the service at the endpoint is " "already being queried, or has been queried." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:122 #: src/hed/libs/compute/EntityRetriever.cpp:237 #, c-format msgid " Status of endpoint (%s) is %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:126 #, c-format msgid "Setting status (STARTED) for endpoint: %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:145 #, c-format msgid "Starting thread to query the endpoint on %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:147 #: src/hed/libs/compute/EntityRetriever.cpp:289 #, fuzzy, c-format msgid "Failed to start querying the endpoint on %s" msgstr "Fehler beim Authentifizieren: %s" #: src/hed/libs/compute/EntityRetriever.cpp:174 #, c-format msgid "Found a registry, will query it recursively: %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:211 #, c-format msgid "Setting status (%s) for endpoint: %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:231 msgid "Checking for suspended endpoints which should be started." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:241 #, c-format msgid "Found started or successful endpoint (%s)" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:253 #, c-format msgid "Found suspended endpoint (%s)" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:264 #, fuzzy, c-format msgid "Trying to start suspended endpoint (%s)" msgstr "" "Ошибка при попытке открыть файл:\n" " %1" #: src/hed/libs/compute/EntityRetriever.cpp:284 #, c-format msgid "" "Starting querying of suspended endpoint (%s) - no other endpoints for this " "service is being queried or has been queried successfully." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:351 #, c-format msgid "Calling plugin %s to query endpoint on %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:373 #, c-format msgid "" "The interface of this endpoint (%s) is unspecified, will try all possible " "plugins" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:389 #, c-format msgid "Problem loading plugin %s, skipping it." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:393 #, c-format msgid "The endpoint (%s) is not supported by this plugin (%s)" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:414 #, c-format msgid "" "New endpoint is created (%s) from the one with the unspecified interface (%s)" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:432 #, c-format msgid "Starting sub-thread to query the endpoint on %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:434 #, c-format msgid "" "Failed to start querying the endpoint on %s (unable to create sub-thread)" msgstr "" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:32 #, c-format msgid "Found %s %s (it was loaded already)" msgstr "" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:41 #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:49 #: src/hed/libs/compute/JobControllerPlugin.cpp:100 #: src/hed/libs/compute/JobControllerPlugin.cpp:109 #: src/hed/libs/compute/SubmitterPlugin.cpp:171 #: src/hed/libs/compute/SubmitterPlugin.cpp:181 #, c-format msgid "" "Unable to locate the \"%s\" plugin. Please refer to installation " "instructions and check if package providing support for \"%s\" plugin is " "installed" msgstr "" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:42 #, fuzzy, c-format msgid "%s plugin \"%s\" not found." msgstr "clientxrsl nicht gefunden" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:50 #, fuzzy, c-format msgid "%s %s could not be created." msgstr "Der Job Status konnte nicht ermittelt werden" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:55 #, c-format msgid "Loaded %s %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:51 #, c-format msgid "" "Skipping ComputingEndpoint '%s', because it has '%s' interface instead of " "the requested '%s'." msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:132 #, c-format msgid "" "Computing endpoint %s (type %s) added to the list for submission brokering" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:239 #, fuzzy, c-format msgid "Address: %s" msgstr "Antwort: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:240 #, fuzzy, c-format msgid "Place: %s" msgstr "Name %s" #: src/hed/libs/compute/ExecutionTarget.cpp:241 #, fuzzy, c-format msgid "Country: %s" msgstr "Anfrage: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:242 #, fuzzy, c-format msgid "Postal code: %s" msgstr "Listen-Eintrag: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:243 #, fuzzy, c-format msgid "Latitude: %f" msgstr "Fehler: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:244 #, c-format msgid "Longitude: %f" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:250 #, fuzzy, c-format msgid "Owner: %s" msgstr "Anfrage: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:257 #, fuzzy, c-format msgid "ID: %s" msgstr "ID: " #: src/hed/libs/compute/ExecutionTarget.cpp:258 #, fuzzy, c-format msgid "Type: %s" msgstr "Proxy Typ: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:263 #, fuzzy, c-format msgid "URL: %s" msgstr "HER: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:264 #, fuzzy, c-format msgid "Interface: %s" msgstr "Quelle: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:266 #, fuzzy msgid "Interface versions:" msgstr "Benutzungsschnittstellenfehler" #: src/hed/libs/compute/ExecutionTarget.cpp:271 msgid "Interface extensions:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:276 msgid "Capabilities:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:280 #, c-format msgid "Technology: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:282 #, fuzzy msgid "Supported Profiles:" msgstr "Nicht-unterstützte URL angegeben" #: src/hed/libs/compute/ExecutionTarget.cpp:286 #, fuzzy, c-format msgid "Implementor: %s" msgstr "Server Implementation: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:287 #, fuzzy, c-format msgid "Implementation name: %s" msgstr "Server Implementation: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:288 #, fuzzy, c-format msgid "Quality level: %s" msgstr "Policy Zeile: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:289 #, fuzzy, c-format msgid "Health state: %s" msgstr "Kann stat-Informationen zu Datei nicht einholen: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:290 #, fuzzy, c-format msgid "Health state info: %s" msgstr "Kann stat-Informationen zu Datei nicht einholen: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:291 #, fuzzy, c-format msgid "Serving state: %s" msgstr "Start start" #: src/hed/libs/compute/ExecutionTarget.cpp:292 #, fuzzy, c-format msgid "Issuer CA: %s" msgstr "Anfrage: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:294 msgid "Trusted CAs:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:298 #, c-format msgid "Downtime starts: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:299 #, c-format msgid "Downtime ends: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:300 #, fuzzy, c-format msgid "Staging: %s" msgstr "Kontaktiere %s" #: src/hed/libs/compute/ExecutionTarget.cpp:302 #, fuzzy msgid "Job descriptions:" msgstr "Job Beschreibung: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:314 #, fuzzy, c-format msgid "Scheme: %s" msgstr "Quelle: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:317 #, fuzzy, c-format msgid "Rule: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:329 #, fuzzy, c-format msgid "Mapping queue: %s" msgstr "" "\n" " СоответÑтвие раздел-Ñегмент:\n" #: src/hed/libs/compute/ExecutionTarget.cpp:330 #, c-format msgid "Max wall-time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:331 #, c-format msgid "Max total wall-time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:332 #, c-format msgid "Min wall-time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:333 #, fuzzy, c-format msgid "Default wall-time: %s" msgstr "Voreinstellung: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:334 #, c-format msgid "Max CPU time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:335 #, c-format msgid "Min CPU time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:336 #, fuzzy, c-format msgid "Default CPU time: %s" msgstr "Voreinstellung: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:337 #, c-format msgid "Max total jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:338 #, fuzzy, c-format msgid "Max running jobs: %i" msgstr "Aufräumen von Job: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:339 #, fuzzy, c-format msgid "Max waiting jobs: %i" msgstr "Herunterladen des Job: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:340 #, c-format msgid "Max pre-LRMS waiting jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:341 #, c-format msgid "Max user running jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:342 #, c-format msgid "Max slots per job: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:343 #, c-format msgid "Max stage in streams: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:344 #, c-format msgid "Max stage out streams: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:345 #, fuzzy, c-format msgid "Scheduling policy: %s" msgstr "ARC delegation policy: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:346 #, c-format msgid "Max memory: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:347 #, c-format msgid "Max virtual memory: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:348 #, c-format msgid "Max disk space: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:349 #, fuzzy, c-format msgid "Default Storage Service: %s" msgstr "Delegation service: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:350 msgid "Supports preemption" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:351 msgid "Doesn't support preemption" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:352 #, fuzzy, c-format msgid "Total jobs: %i" msgstr "alle Jobs" #: src/hed/libs/compute/ExecutionTarget.cpp:353 #, fuzzy, c-format msgid "Running jobs: %i" msgstr "Aufräumen von Job: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:354 #, fuzzy, c-format msgid "Local running jobs: %i" msgstr "Aufräumen von Job: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:355 #, fuzzy, c-format msgid "Waiting jobs: %i" msgstr "Aufräumen von Job: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:356 #, fuzzy, c-format msgid "Local waiting jobs: %i" msgstr "Herunterladen des Job: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:357 #, c-format msgid "Suspended jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:358 #, c-format msgid "Local suspended jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:359 #, fuzzy, c-format msgid "Staging jobs: %i" msgstr "Aufräumen von Job: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:360 #, c-format msgid "Pre-LRMS waiting jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:361 #, fuzzy, c-format msgid "Estimated average waiting time: %s" msgstr "start_reading_ftp: angegelegt um: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:362 #, fuzzy, c-format msgid "Estimated worst waiting time: %s" msgstr "start_reading_ftp: angegelegt um: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:363 #, c-format msgid "Free slots: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:365 msgid "Free slots grouped according to time limits (limit: free slots):" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:368 #, c-format msgid " %s: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:369 #, c-format msgid " unspecified: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:372 #, c-format msgid "Used slots: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:373 #, fuzzy, c-format msgid "Requested slots: %i" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:374 #, fuzzy, c-format msgid "Reservation policy: %s" msgstr "ARC delegation policy: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:381 #, fuzzy, c-format msgid "Resource manager: %s" msgstr "Modulname: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:383 #, fuzzy, c-format msgid " (%s)" msgstr "%s (%s)" #: src/hed/libs/compute/ExecutionTarget.cpp:387 #, c-format msgid "Total physical CPUs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:388 #, c-format msgid "Total logical CPUs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:389 #, c-format msgid "Total slots: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:390 msgid "Supports advance reservations" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:391 msgid "Doesn't support advance reservations" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:392 msgid "Supports bulk submission" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:393 msgid "Doesn't support bulk Submission" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:394 msgid "Homogeneous resource" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:395 msgid "Non-homogeneous resource" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:397 #, fuzzy msgid "Network information:" msgstr "Angabe des aktuellen Versionsbezeichners" #: src/hed/libs/compute/ExecutionTarget.cpp:402 msgid "Working area is shared among jobs" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:403 msgid "Working area is not shared among jobs" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:404 #, c-format msgid "Working area total size: %i GB" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:405 #, c-format msgid "Working area free size: %i GB" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:406 #, c-format msgid "Working area life time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:407 #, c-format msgid "Cache area total size: %i GB" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:408 #, c-format msgid "Cache area free size: %i GB" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:414 #, fuzzy, c-format msgid "Platform: %s" msgstr "ProxyStore: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:415 msgid "Execution environment supports inbound connections" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:416 msgid "Execution environment does not support inbound connections" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:417 msgid "Execution environment supports outbound connections" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:418 msgid "Execution environment does not support outbound connections" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:419 msgid "Execution environment is a virtual machine" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:420 msgid "Execution environment is a physical machine" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:421 #, fuzzy, c-format msgid "CPU vendor: %s" msgstr "DCAU fehlgeschlagen: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:422 #, fuzzy, c-format msgid "CPU model: %s" msgstr "Policy Zeile: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:423 #, fuzzy, c-format msgid "CPU version: %s" msgstr "%s version %s" #: src/hed/libs/compute/ExecutionTarget.cpp:424 #, c-format msgid "CPU clock speed: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:425 #, c-format msgid "Main memory size: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:426 #, fuzzy, c-format msgid "OS family: %s" msgstr "PASV fehlgeschlagen: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:427 #, fuzzy, c-format msgid "OS name: %s" msgstr "Name %s" #: src/hed/libs/compute/ExecutionTarget.cpp:428 #, fuzzy, c-format msgid "OS version: %s" msgstr "%s version %s" #: src/hed/libs/compute/ExecutionTarget.cpp:435 #, fuzzy msgid "Computing service:" msgstr "Delegation service: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:459 #, c-format msgid "%d Endpoints" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:464 #, fuzzy msgid "Endpoint Information:" msgstr "Angabe des aktuellen Versionsbezeichners" #: src/hed/libs/compute/ExecutionTarget.cpp:476 #, c-format msgid "%d Batch Systems" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:481 msgid "Batch System Information:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:487 #, fuzzy msgid "Installed application environments:" msgstr "Initialisierte replication Umgebung" #: src/hed/libs/compute/ExecutionTarget.cpp:500 #, c-format msgid "%d Shares" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:505 #, fuzzy msgid "Share Information:" msgstr "Ungültige Authentisierungs-Information" #: src/hed/libs/compute/ExecutionTarget.cpp:511 #, c-format msgid "%d mapping policies" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:515 #, fuzzy msgid "Mapping policy:" msgstr "ARC delegation policy: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:531 #, c-format msgid "Execution Target on Computing Service: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:533 #, c-format msgid " Computing endpoint URL: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:535 #, fuzzy, c-format msgid " Computing endpoint interface name: %s" msgstr "Erstelle Client Schnitstelle" #: src/hed/libs/compute/ExecutionTarget.cpp:537 #: src/hed/libs/compute/Job.cpp:575 #, c-format msgid " Queue: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:540 #, fuzzy, c-format msgid " Mapping queue: %s" msgstr "" "\n" " СоответÑтвие раздел-Ñегмент:\n" #: src/hed/libs/compute/ExecutionTarget.cpp:543 #, fuzzy, c-format msgid " Health state: %s" msgstr "Kann stat-Informationen zu Datei nicht einholen: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:548 msgid "Service information:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:553 msgid " Installed application environments:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:560 #, fuzzy msgid "Batch system information:" msgstr "Ungültige Authentisierungs-Information" #: src/hed/libs/compute/ExecutionTarget.cpp:563 msgid "Queue information:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:570 #, fuzzy msgid " Benchmark information:" msgstr "Ungültige Authentisierungs-Information" #: src/hed/libs/compute/GLUE2.cpp:53 msgid "The Service doesn't advertise its Type." msgstr "Der Service gibt seinen Typ nicht an." #: src/hed/libs/compute/GLUE2.cpp:58 #, fuzzy msgid "The ComputingService doesn't advertise its Quality Level." msgstr "Der Service gibt seinen Quality Level nicht an." #: src/hed/libs/compute/GLUE2.cpp:99 msgid "The ComputingEndpoint has no URL." msgstr "" #: src/hed/libs/compute/GLUE2.cpp:104 msgid "The Service advertises no Health State." msgstr "Der Service gibt keinen Health State an." #: src/hed/libs/compute/GLUE2.cpp:117 #, fuzzy msgid "The ComputingEndpoint doesn't advertise its Quality Level." msgstr "Der Service gibt seinen Quality Level nicht an." #: src/hed/libs/compute/GLUE2.cpp:128 #, fuzzy msgid "The ComputingService doesn't advertise its Interface." msgstr "Der Service gibt seine Interface nicht an." #: src/hed/libs/compute/GLUE2.cpp:160 #, fuzzy msgid "The ComputingEndpoint doesn't advertise its Serving State." msgstr "Der Servcice gibt seinen Serving State nicht an." #: src/hed/libs/compute/GLUE2.cpp:247 #, c-format msgid "" "The \"FreeSlotsWithDuration\" attribute published by \"%s\" is wrongly " "formatted. Ignoring it." msgstr "" #: src/hed/libs/compute/GLUE2.cpp:420 #, c-format msgid "" "Couldn't parse benchmark XML:\n" "%s" msgstr "" "Konnte benchmark XML nicht parsen:\n" "%s" #: src/hed/libs/compute/Job.cpp:324 #, fuzzy msgid "Unable to detect format of job record." msgstr "Konnte status Beschreibung des jobs (%s) nicht erhalten: " #: src/hed/libs/compute/Job.cpp:545 #, c-format msgid "Job: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:547 #, c-format msgid " Name: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:548 #, fuzzy, c-format msgid " State: %s" msgstr "Name %s" #: src/hed/libs/compute/Job.cpp:551 #, c-format msgid " Specific state: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:555 src/hed/libs/compute/Job.cpp:579 #, c-format msgid " Waiting Position: %d" msgstr "" #: src/hed/libs/compute/Job.cpp:559 #, c-format msgid " Exit Code: %d" msgstr "" #: src/hed/libs/compute/Job.cpp:563 #, fuzzy, c-format msgid " Job Error: %s" msgstr "Error: %s" #: src/hed/libs/compute/Job.cpp:568 #, c-format msgid " Owner: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:572 #, c-format msgid " Other Messages: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:577 #, fuzzy, c-format msgid " Requested Slots: %d" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/libs/compute/Job.cpp:582 #, c-format msgid " Stdin: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:584 #, c-format msgid " Stdout: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:586 #, c-format msgid " Stderr: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:588 #, fuzzy, c-format msgid " Computing Service Log Directory: %s" msgstr "Fehler beim listen von Datei oder Verzeichnis: %s" #: src/hed/libs/compute/Job.cpp:591 #, c-format msgid " Submitted: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:594 #, c-format msgid " End Time: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:597 #, c-format msgid " Submitted from: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:600 #, c-format msgid " Submitting client: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:603 #, c-format msgid " Requested CPU Time: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:608 #, c-format msgid " Used CPU Time: %s (%s per slot)" msgstr "" #: src/hed/libs/compute/Job.cpp:612 #, c-format msgid " Used CPU Time: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:618 #, c-format msgid " Used Wall Time: %s (%s per slot)" msgstr "" #: src/hed/libs/compute/Job.cpp:622 #, c-format msgid " Used Wall Time: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:627 #, c-format msgid " Used Memory: %d" msgstr "" #: src/hed/libs/compute/Job.cpp:631 #, c-format msgid " Results were deleted: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:632 #, c-format msgid " Results must be retrieved before: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:636 #, c-format msgid " Proxy valid until: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:640 #, c-format msgid " Entry valid from: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:643 #, c-format msgid " Entry valid for: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:647 msgid " Old job IDs:" msgstr "" #: src/hed/libs/compute/Job.cpp:655 #, fuzzy, c-format msgid " ID on service: %s" msgstr "Delegation service: %s" #: src/hed/libs/compute/Job.cpp:656 #, c-format msgid " Service information URL: %s (%s)" msgstr "" #: src/hed/libs/compute/Job.cpp:657 #, c-format msgid " Job status URL: %s (%s)" msgstr "" #: src/hed/libs/compute/Job.cpp:658 #, c-format msgid " Job management URL: %s (%s)" msgstr "" #: src/hed/libs/compute/Job.cpp:659 #, fuzzy, c-format msgid " Stagein directory URL: %s" msgstr "Lege Verzeichnis %s an" #: src/hed/libs/compute/Job.cpp:660 #, fuzzy, c-format msgid " Stageout directory URL: %s" msgstr "Fehler beim Listen von Verzeichnis: %s" #: src/hed/libs/compute/Job.cpp:661 #, fuzzy, c-format msgid " Session directory URL: %s" msgstr "Lege Verzeichnis %s an" #: src/hed/libs/compute/Job.cpp:663 #, fuzzy msgid " Delegation IDs:" msgstr "Delegation ID: %s" #: src/hed/libs/compute/Job.cpp:845 #, c-format msgid "Unable to handle job (%s), no interface specified." msgstr "" #: src/hed/libs/compute/Job.cpp:850 #, c-format msgid "" "Unable to handle job (%s), no plugin associated with the specified interface " "(%s)" msgstr "" #: src/hed/libs/compute/Job.cpp:872 #, c-format msgid "Invalid download destination path specified (%s)" msgstr "" #: src/hed/libs/compute/Job.cpp:877 #, c-format msgid "" "Unable to download job (%s), no JobControllerPlugin plugin was set to handle " "the job." msgstr "" #: src/hed/libs/compute/Job.cpp:881 #, fuzzy, c-format msgid "Downloading job: %s" msgstr "Herunterladen des Job: %s" #: src/hed/libs/compute/Job.cpp:885 #, c-format msgid "" "Cant retrieve job files for job (%s) - unable to determine URL of stage out " "directory" msgstr "" #: src/hed/libs/compute/Job.cpp:890 #, c-format msgid "Invalid stage out path specified (%s)" msgstr "" #: src/hed/libs/compute/Job.cpp:897 #, c-format msgid "%s directory exist! Skipping job." msgstr "" #: src/hed/libs/compute/Job.cpp:903 #, fuzzy, c-format msgid "Unable to retrieve list of job files to download for job %s" msgstr "Konnte status Beschreibung des jobs (%s) nicht erhalten: " #: src/hed/libs/compute/Job.cpp:908 #, fuzzy, c-format msgid "No files to retrieve for job %s" msgstr "Konnte status Beschreibung des jobs (%s) nicht erhalten: " #: src/hed/libs/compute/Job.cpp:914 #, fuzzy, c-format msgid "Failed to create directory %s! Skipping job." msgstr "Fehler bei Anlegen/Finden von Verzeichnis %s, (%d)" #: src/hed/libs/compute/Job.cpp:927 #, fuzzy, c-format msgid "Failed downloading %s to %s, destination already exist" msgstr "Fahler bei Herunterladen %s zu %s" #: src/hed/libs/compute/Job.cpp:933 #, fuzzy, c-format msgid "Failed downloading %s to %s, unable to remove existing destination" msgstr "Fehler bei Schreiben zu Ziel" #: src/hed/libs/compute/Job.cpp:939 #, fuzzy, c-format msgid "Failed downloading %s to %s" msgstr "Fahler bei Herunterladen %s zu %s" #: src/hed/libs/compute/Job.cpp:952 #, fuzzy, c-format msgid "Unable to initialize handler for %s" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/hed/libs/compute/Job.cpp:957 #, c-format msgid "Unable to list files at %s" msgstr "" #: src/hed/libs/compute/Job.cpp:999 msgid "Now copying (from -> to)" msgstr "" #: src/hed/libs/compute/Job.cpp:1000 #, c-format msgid " %s -> %s" msgstr "" #: src/hed/libs/compute/Job.cpp:1015 #, c-format msgid "Unable to initialise connection to source: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:1026 #, c-format msgid "Unable to initialise connection to destination: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:1045 #, c-format msgid "File download failed: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:1084 src/hed/libs/compute/Job.cpp:1113 #: src/hed/libs/compute/Job.cpp:1145 src/hed/libs/compute/Job.cpp:1178 #, fuzzy, c-format msgid "Waiting for lock on file %s" msgstr "Warte vor Antwort" #: src/hed/libs/compute/JobControllerPlugin.cpp:101 #, c-format msgid "JobControllerPlugin plugin \"%s\" not found." msgstr "" #: src/hed/libs/compute/JobControllerPlugin.cpp:110 #, fuzzy, c-format msgid "JobControllerPlugin %s could not be created" msgstr "Keine Job controller Plugins geladen" #: src/hed/libs/compute/JobControllerPlugin.cpp:115 #, fuzzy, c-format msgid "Loaded JobControllerPlugin %s" msgstr "Keine Job controller Plugins geladen" #: src/hed/libs/compute/JobDescription.cpp:22 #, c-format msgid ": %d" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:24 #, c-format msgid ": %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:138 msgid " --- DRY RUN --- " msgstr "" #: src/hed/libs/compute/JobDescription.cpp:148 #, fuzzy, c-format msgid " Annotation: %s" msgstr "Ziel: %s" #: src/hed/libs/compute/JobDescription.cpp:154 #, c-format msgid " Old activity ID: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:160 #, c-format msgid " Argument: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:171 #, c-format msgid " RemoteLogging (optional): %s (%s)" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:174 #, c-format msgid " RemoteLogging: %s (%s)" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:182 #, c-format msgid " Environment.name: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:183 #, c-format msgid " Environment: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:196 #, c-format msgid " PreExecutable.Argument: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:199 #: src/hed/libs/compute/JobDescription.cpp:217 #, c-format msgid " Exit code for successful execution: %d" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:202 #: src/hed/libs/compute/JobDescription.cpp:220 msgid " No exit code for successful execution specified." msgstr "" #: src/hed/libs/compute/JobDescription.cpp:214 #, c-format msgid " PostExecutable.Argument: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:230 #, fuzzy, c-format msgid " Access control: %s" msgstr "Zugriffslist location: %s" #: src/hed/libs/compute/JobDescription.cpp:234 #, fuzzy, c-format msgid " Processing start time: %s" msgstr "Verarbeitungstyp nicht unterstützt: %s" #: src/hed/libs/compute/JobDescription.cpp:237 msgid " Notify:" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:251 #, fuzzy, c-format msgid " Credential service: %s" msgstr "Delegation service: %s" #: src/hed/libs/compute/JobDescription.cpp:261 msgid " Operating system requirements:" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:279 msgid " Computing endpoint requirements:" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:292 msgid " Node access: inbound" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:295 msgid " Node access: outbound" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:298 msgid " Node access: inbound and outbound" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:308 msgid " Job requires exclusive execution" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:311 msgid " Job does not require exclusive execution" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:316 msgid " Run time environment requirements:" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:328 msgid " Inputfile element:" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:329 #: src/hed/libs/compute/JobDescription.cpp:351 #, c-format msgid " Name: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:331 msgid " Is executable: true" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:335 #, fuzzy, c-format msgid " Sources: %s" msgstr "Quelle: %s" #: src/hed/libs/compute/JobDescription.cpp:337 #, fuzzy, c-format msgid " Sources.DelegationID: %s" msgstr "Delegation ID: %s" #: src/hed/libs/compute/JobDescription.cpp:341 #, c-format msgid " Sources.Options: %s = %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:350 msgid " Outputfile element:" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:354 #, c-format msgid " Targets: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:356 #, fuzzy, c-format msgid " Targets.DelegationID: %s" msgstr "Delegation ID: %s" #: src/hed/libs/compute/JobDescription.cpp:360 #, c-format msgid " Targets.Options: %s = %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:367 #, fuzzy, c-format msgid " DelegationID element: %s" msgstr "Delegation ID: %s" #: src/hed/libs/compute/JobDescription.cpp:374 #, fuzzy, c-format msgid " Other attributes: [%s], %s" msgstr "Attribut: %s - %s" #: src/hed/libs/compute/JobDescription.cpp:440 msgid "Empty job description source string" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:473 #, fuzzy msgid "No job description parsers available" msgstr "Keine Job Beschreibung als Eingabe benötigt" #: src/hed/libs/compute/JobDescription.cpp:475 #, c-format msgid "" "No job description parsers suitable for handling '%s' language are available" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:483 #, fuzzy, c-format msgid "%s parsing error" msgstr "Fataler Fehler: %s" #: src/hed/libs/compute/JobDescription.cpp:499 #, fuzzy msgid "No job description parser was able to interpret job description" msgstr "Zu sendende Job-Beschreibung : %s" #: src/hed/libs/compute/JobDescription.cpp:509 msgid "" "Job description language is not specified, unable to output description." msgstr "" #: src/hed/libs/compute/JobDescription.cpp:521 #, fuzzy, c-format msgid "Generating %s job description output" msgstr "Eine fehler geschah während des Generieres der Job Beschreibung." #: src/hed/libs/compute/JobDescription.cpp:537 #, c-format msgid "Language (%s) not recognized by any job description parsers." msgstr "" #: src/hed/libs/compute/JobDescription.cpp:550 #, fuzzy, c-format msgid "Two input files have identical name '%s'." msgstr "Zwei Dateien haben identische Namen: '%s'." #: src/hed/libs/compute/JobDescription.cpp:569 #: src/hed/libs/compute/JobDescription.cpp:582 #, fuzzy, c-format msgid "Cannot stat local input file '%s'" msgstr "Kann stat-Informationen zu Datei nicht einholen: %s" #: src/hed/libs/compute/JobDescription.cpp:602 #, fuzzy, c-format msgid "Cannot find local input file '%s' (%s)" msgstr "Konnte lokale Inputdateien nicht hochladen" #: src/hed/libs/compute/JobDescription.cpp:644 #, fuzzy msgid "Unable to select runtime environment" msgstr "Kann run time environment nicht auswählen." #: src/hed/libs/compute/JobDescription.cpp:651 #, fuzzy msgid "Unable to select middleware" msgstr "Kann middleware nicht auswählen." #: src/hed/libs/compute/JobDescription.cpp:658 #, fuzzy msgid "Unable to select operating system." msgstr "Kann Operating System nciht auswählen." #: src/hed/libs/compute/JobDescription.cpp:677 #, c-format msgid "No test-job with ID %d found." msgstr "" #: src/hed/libs/compute/JobDescription.cpp:689 #, c-format msgid "Test was defined with ID %d, but some error occurred during parsing it." msgstr "" #: src/hed/libs/compute/JobDescription.cpp:693 #, fuzzy, c-format msgid "No jobdescription resulted at %d test" msgstr "Zu sendende Job-Beschreibung : %s" #: src/hed/libs/compute/JobDescriptionParserPlugin.cpp:52 #, c-format msgid "JobDescriptionParserPlugin plugin \"%s\" not found." msgstr "" #: src/hed/libs/compute/JobDescriptionParserPlugin.cpp:59 #, fuzzy, c-format msgid "JobDescriptionParserPlugin %s could not be created" msgstr "JobDescription Klasse ist kein Objekt" #: src/hed/libs/compute/JobDescriptionParserPlugin.cpp:64 #, fuzzy, c-format msgid "Loaded JobDescriptionParserPlugin %s" msgstr "Gültige JobDescription gefunden" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:328 #, fuzzy msgid "Unable to create temporary directory" msgstr "Fehler bei Anlegen/Finden von Verzeichnis %s, (%d)" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:336 #, fuzzy, c-format msgid "Unable to create data base environment (%s)" msgstr "Kann run time environment nicht auswählen." #: src/hed/libs/compute/JobInformationStorageBDB.cpp:346 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:350 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:354 #, c-format msgid "Unable to set duplicate flags for secondary key DB (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:360 #, fuzzy, c-format msgid "Unable to create job database (%s)" msgstr "Konnte Job Status-Informationen nicht erhalten." #: src/hed/libs/compute/JobInformationStorageBDB.cpp:364 #, c-format msgid "Unable to create DB for secondary name keys (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:368 #, fuzzy, c-format msgid "Unable to create DB for secondary endpoint keys (%s)" msgstr "Fehler bei Anlegen von Datei %s zum Schreiben: %s" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:372 #, fuzzy, c-format msgid "Unable to create DB for secondary service info keys (%s)" msgstr "Fehler bei Erstellen von Info-Datei %s: %s" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:377 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:381 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:385 #, c-format msgid "Unable to associate secondary DB with primary DB (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:388 #, fuzzy, c-format msgid "Job database created successfully (%s)" msgstr "erfolgreich angelegt, ID: %s" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:430 #, fuzzy, c-format msgid "Error from BDB: %s: %s" msgstr "Fehler bei Suche nach LFN anhand guid %s: %s" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:433 #, fuzzy, c-format msgid "Error from BDB: %s" msgstr "Error: %s" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:453 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:221 #: src/hed/libs/compute/JobInformationStorageXML.cpp:27 #, c-format msgid "" "Job list file cannot be created: The parent directory (%s) doesn't exist." msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:457 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:225 #: src/hed/libs/compute/JobInformationStorageXML.cpp:31 #, c-format msgid "Job list file cannot be created: %s is not a directory" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:464 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:232 #: src/hed/libs/compute/JobInformationStorageXML.cpp:38 #, c-format msgid "Job list file (%s) is not a regular file" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:502 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:561 #, c-format msgid "Unable to write key/value pair to job database (%s): Key \"%s\"" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:728 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:622 #: src/hed/libs/compute/JobInformationStorageXML.cpp:137 #, fuzzy, c-format msgid "Unable to truncate job database (%s)" msgstr "Konnte status Beschreibung des jobs (%s) nicht erhalten: " #: src/hed/libs/compute/JobInformationStorageBDB.cpp:759 msgid "" "ENOENT: The file or directory does not exist, Or a nonexistent re_source " "file was specified." msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:762 msgid "" "DB_OLD_VERSION: The database cannot be opened without being first upgraded." msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:765 msgid "EEXIST: DB_CREATE and DB_EXCL were specified and the database exists." msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:767 msgid "EINVAL" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:770 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:655 #, fuzzy, c-format msgid "Unable to determine error (%d)" msgstr "Konnter Broker %s nicht laden" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:120 #, fuzzy, c-format msgid "Unable to create data base (%s)" msgstr "Konnte Job Status-Informationen nicht erhalten." #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:128 #, fuzzy, c-format msgid "Unable to create jobs table in data base (%s)" msgstr "Konnte Job Status-Informationen nicht erhalten." #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:137 #, fuzzy, c-format msgid "Unable to create jobs_new table in data base (%s)" msgstr "Konnte Job Status-Informationen nicht erhalten." #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:143 #, fuzzy, c-format msgid "Unable to transfer from jobs to jobs_new in data base (%s)" msgstr "Konnte Job Status-Informationen nicht erhalten." #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:149 #, fuzzy, c-format msgid "Unable to drop jobs in data base (%s)" msgstr "Konnte Job Status-Informationen nicht erhalten." #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:155 #, fuzzy, c-format msgid "Unable to rename jobs table in data base (%s)" msgstr "Konnte Job Status-Informationen nicht erhalten." #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:165 #, fuzzy, c-format msgid "Unable to create index for jobs table in data base (%s)" msgstr "Konnte Job Status-Informationen nicht erhalten." #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:173 #, fuzzy, c-format msgid "Failed checking database (%s)" msgstr "Löschen fehlgeschlagen von job: %s" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:175 #, fuzzy, c-format msgid "Job database connection established successfully (%s)" msgstr "erfolgreich angelegt, ID: %s" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:197 #, fuzzy, c-format msgid "Error from SQLite: %s: %s" msgstr "Fehler bei Suche nach LFN anhand guid %s: %s" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:200 #, fuzzy, c-format msgid "Error from SQLite: %s" msgstr "Error: %s" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:362 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:369 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:376 #, fuzzy, c-format msgid "Unable to write records into job database (%s): Id \"%s\"" msgstr "Konnte status Beschreibung des jobs (%s) nicht erhalten: " #: src/hed/libs/compute/JobInformationStorageXML.cpp:51 #: src/hed/libs/compute/JobInformationStorageXML.cpp:223 #: src/hed/libs/compute/JobInformationStorageXML.cpp:264 #, fuzzy, c-format msgid "Waiting for lock on job list file %s" msgstr "Konnte Datei mit Job Beschreibung nicht öffnen: %s" #: src/hed/libs/compute/JobInformationStorageXML.cpp:162 #, c-format msgid "Will remove %s on service %s." msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:36 msgid "Ignoring job, the job ID is empty" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:41 #, c-format msgid "Ignoring job (%s), the management interface name is unknown" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:46 #, c-format msgid "Ignoring job (%s), the job management URL is unknown" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:51 #, c-format msgid "Ignoring job (%s), the status interface name is unknown" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:56 #, c-format msgid "Ignoring job (%s), the job status URL is unknown" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:65 #, c-format msgid "Ignoring job (%s), unable to load JobControllerPlugin for %s" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:72 #, c-format msgid "" "Ignoring job (%s), already tried and were unable to load JobControllerPlugin" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:385 #, fuzzy, c-format msgid "Job resubmission failed: Unable to load broker (%s)" msgstr "Hochladen des Jobs schlug fehl, keine weiteren Ziele verfügbar" #: src/hed/libs/compute/JobSupervisor.cpp:400 #, fuzzy msgid "Job resubmission aborted because no resource returned any information" msgstr "" "Hochladen des Jobs abgebrochen, da keine Cluster entsprechende Informationen " "anboten" #: src/hed/libs/compute/JobSupervisor.cpp:421 #, c-format msgid "Unable to resubmit job (%s), unable to parse obtained job description" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:443 #, c-format msgid "" "Unable to resubmit job (%s), target information retrieval failed for target: " "%s" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:469 #, c-format msgid "Unable to resubmit job (%s), no targets applicable for submission" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:504 #, c-format msgid "" "Unable to migrate job (%s), job description could not be retrieved remotely" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:524 #, fuzzy msgid "Job migration aborted, no resource returned any information" msgstr "Job Migration abgebrochen, da kein Cluster Informationen lieferte" #: src/hed/libs/compute/JobSupervisor.cpp:536 #, fuzzy, c-format msgid "Job migration aborted, unable to load broker (%s)" msgstr "Konnter Broker %s nicht laden" #: src/hed/libs/compute/JobSupervisor.cpp:552 #, c-format msgid "Unable to migrate job (%s), unable to parse obtained job description" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:573 #, c-format msgid "Unable to load submission plugin for %s interface" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:583 #, fuzzy, c-format msgid "Job migration failed for job (%s), no applicable targets" msgstr "Hochladen des Jobs schlug fehl, keine weiteren Ziele verfügbar" #: src/hed/libs/compute/Software.cpp:65 src/hed/libs/compute/Software.cpp:94 #: src/hed/libs/compute/Software.cpp:113 #, c-format msgid "%s > %s => false" msgstr "" #: src/hed/libs/compute/Software.cpp:70 src/hed/libs/compute/Software.cpp:83 #: src/hed/libs/compute/Software.cpp:107 #, c-format msgid "%s > %s => true" msgstr "" #: src/hed/libs/compute/Software.cpp:90 src/hed/libs/compute/Software.cpp:102 #, c-format msgid "%s > %s => false: %s contains non numbers in the version part." msgstr "" #: src/hed/libs/compute/Software.cpp:199 src/hed/libs/compute/Software.cpp:210 #, c-format msgid "Requirement \"%s %s\" NOT satisfied." msgstr "" #: src/hed/libs/compute/Software.cpp:205 #, c-format msgid "Requirement \"%s %s\" satisfied." msgstr "" #: src/hed/libs/compute/Software.cpp:214 #, c-format msgid "Requirement \"%s %s\" satisfied by \"%s\"." msgstr "" #: src/hed/libs/compute/Software.cpp:219 msgid "All requirements satisfied." msgstr "" #: src/hed/libs/compute/Submitter.cpp:83 #, fuzzy, c-format msgid "Trying to submit directly to endpoint (%s)" msgstr "Versuche den Job erneut hochzuladen zu %s" #: src/hed/libs/compute/Submitter.cpp:88 #, c-format msgid "Interface (%s) specified, submitting only to that interface" msgstr "" #: src/hed/libs/compute/Submitter.cpp:106 #, fuzzy msgid "Trying all available interfaces" msgstr "Erstelle Client Schnitstelle" #: src/hed/libs/compute/Submitter.cpp:112 #, c-format msgid "Trying to submit endpoint (%s) using interface (%s) with plugin (%s)." msgstr "" #: src/hed/libs/compute/Submitter.cpp:116 #, c-format msgid "" "Unable to load plugin (%s) for interface (%s) when trying to submit job " "description." msgstr "" #: src/hed/libs/compute/Submitter.cpp:130 #, c-format msgid "No more interfaces to try for endpoint %s." msgstr "" #: src/hed/libs/compute/Submitter.cpp:336 #, c-format msgid "Target %s does not match requested interface(s)." msgstr "" #: src/hed/libs/compute/SubmitterPlugin.cpp:64 msgid "No stagein URL is provided" msgstr "" #: src/hed/libs/compute/SubmitterPlugin.cpp:83 #, fuzzy, c-format msgid "Failed uploading file %s to %s: %s" msgstr "Fahler bei Herunterladen %s zu %s" #: src/hed/libs/compute/SubmitterPlugin.cpp:116 #, fuzzy, c-format msgid "Trying to migrate to %s: Migration to a %s interface is not supported." msgstr "" "Versuche zu %s zu migrieren: Migration zu einem BES cluster wird nicht " "unterstützt" #: src/hed/libs/compute/SubmitterPlugin.cpp:172 #, c-format msgid "SubmitterPlugin plugin \"%s\" not found." msgstr "" #: src/hed/libs/compute/SubmitterPlugin.cpp:182 #, fuzzy, c-format msgid "SubmitterPlugin %s could not be created" msgstr "Der Job Status konnte nicht ermittelt werden" #: src/hed/libs/compute/SubmitterPlugin.cpp:187 #, c-format msgid "Loaded SubmitterPlugin %s" msgstr "" #: src/hed/libs/compute/examples/basic_job_submission.cpp:28 #, fuzzy msgid "Invalid job description" msgstr "Ungültige JobDescription:" #: src/hed/libs/compute/examples/basic_job_submission.cpp:47 #, fuzzy msgid "Failed to submit job" msgstr "Konnte job nicht starten" #: src/hed/libs/compute/examples/basic_job_submission.cpp:54 #, fuzzy, c-format msgid "Failed to write to local job list %s" msgstr "Fehler bei Schreiben zu Datein %s: %s" #: src/hed/libs/compute/test_jobdescription.cpp:20 msgid "[job description ...]" msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:21 msgid "" "This tiny tool can be used for testing the JobDescription's conversion " "abilities." msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:23 msgid "" "The job description also can be a file or a string in ADL or XRSL format." msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:27 msgid "define the requested format (nordugrid:xrsl, emies:adl)" msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:28 msgid "format" msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:33 #, fuzzy msgid "show the original job description" msgstr "" " -o, -stdout вывеÑти файл Ñтандартого выхода задачи (по\n" " умолчанию)" #: src/hed/libs/compute/test_jobdescription.cpp:43 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:71 msgid "Use --help option for detailed usage information" msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:50 #, fuzzy msgid " [ JobDescription tester ] " msgstr "Zu sendende Job-Beschreibung : %s" #: src/hed/libs/compute/test_jobdescription.cpp:74 msgid " [ Parsing the original text ] " msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:80 #, fuzzy msgid "Unable to parse." msgstr "Konnter Broker %s nicht laden" #: src/hed/libs/compute/test_jobdescription.cpp:89 msgid " [ emies:adl ] " msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:91 msgid " [ nordugrid:xrsl ] " msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:134 msgid "VOMS command is empty" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:254 #, fuzzy, c-format msgid "OpenSSL error -- %s" msgstr "OpenSSL Fehler -- %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:255 #, c-format msgid "Library : %s" msgstr "Bibliothek : %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:256 #, c-format msgid "Function : %s" msgstr "Funktion : %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:257 #, c-format msgid "Reason : %s" msgstr "Grund : %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:313 msgid "User interface error" msgstr "Benutzungsschnittstellenfehler" #: src/hed/libs/credential/ARCProxyUtil.cpp:319 msgid "Aborted!" msgstr "Abbruch!" #: src/hed/libs/credential/ARCProxyUtil.cpp:418 #: src/hed/libs/credential/ARCProxyUtil.cpp:1399 #, fuzzy msgid "Failed to sign proxy" msgstr "Fehler beim Senden von body" #: src/hed/libs/credential/ARCProxyUtil.cpp:437 #: src/hed/libs/credential/Credential.cpp:878 #, c-format msgid "Error: can't open policy file: %s" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:447 #: src/hed/libs/credential/Credential.cpp:891 #, c-format msgid "Error: policy location: %s is not a regular file" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:546 #, c-format msgid "VOMS line contains wrong number of tokens (%u expected): \"%s\"" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:590 #, c-format msgid "Cannot get VOMS server %s information from the vomses files" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:623 #, fuzzy, c-format msgid "There are %d commands to the same VOMS server %s" msgstr "Es gibt %d Kommandos an denselben VOMS Server %s\n" #: src/hed/libs/credential/ARCProxyUtil.cpp:677 #, c-format msgid "Try to get attribute from VOMS server with order: %s" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:680 #, fuzzy, c-format msgid "Message sent to VOMS server %s is: %s" msgstr "Warnung: kann nicht verbinden zu RLS server %s: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:699 #: src/hed/libs/credential/ARCProxyUtil.cpp:720 #, c-format msgid "" "The VOMS server with the information:\n" "\t%s\n" "can not be reached, please make sure it is available" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:703 #, fuzzy msgid "No HTTP response from VOMS server" msgstr "Frühe Antwort vom Server" #: src/hed/libs/credential/ARCProxyUtil.cpp:708 #: src/hed/libs/credential/ARCProxyUtil.cpp:734 #, fuzzy, c-format msgid "Returned message from VOMS server: %s" msgstr "Zurückerhaltene Nachricht von myproxy Server: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:724 #, fuzzy msgid "No stream response from VOMS server" msgstr "Frühe Antwort vom Server" #: src/hed/libs/credential/ARCProxyUtil.cpp:746 #, c-format msgid "" "The validity duration of VOMS AC is shortened from %s to %s, due to the " "validity constraint on voms server side.\n" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:749 #, c-format msgid "" "Cannot get any AC or attributes info from VOMS server: %s;\n" " Returned message from VOMS server: %s\n" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:754 #, fuzzy, c-format msgid "Returned message from VOMS server %s is: %s\n" msgstr "Zurückerhaltene Nachricht von myproxy Server: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:776 #, c-format msgid "The attribute information from VOMS server: %s is list as following:" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:788 #, c-format msgid "" "There are %d servers with the same name: %s in your vomses file, but none of " "them can be reached, or can return valid message. But proxy without VOMS AC " "extension will still be generated." msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:1321 #: src/hed/libs/credential/ARCProxyUtil.cpp:1428 #, fuzzy msgid "Failed to generate X509 request with NSS" msgstr "Fehler bei Generieren von X509 Token für ausgehende SOAP" #: src/hed/libs/credential/ARCProxyUtil.cpp:1332 #: src/hed/libs/credential/ARCProxyUtil.cpp:1439 #: src/hed/libs/credential/ARCProxyUtil.cpp:1480 #, fuzzy msgid "Failed to create X509 certificate with NSS" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:1344 #: src/hed/libs/credential/ARCProxyUtil.cpp:1451 #: src/hed/libs/credential/ARCProxyUtil.cpp:1504 #, fuzzy msgid "Failed to export X509 certificate from NSS DB" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:1487 #, fuzzy msgid "Failed to import X509 certificate into NSS DB" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:1496 #, fuzzy msgid "Failed to initialize the credential configuration" msgstr "Konnte delegation credentials in Client Konfiguration nicht finden" #: src/hed/libs/credential/CertUtil.cpp:162 #, c-format msgid "Error number in store context: %i" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:163 msgid "Self-signed certificate" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:166 #, fuzzy, c-format msgid "The certificate with subject %s is not valid" msgstr "Cache-Datei %s existiert nicht" #: src/hed/libs/credential/CertUtil.cpp:169 #, c-format msgid "" "Can not find issuer certificate for the certificate with subject %s and " "hash: %lu" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:172 #, c-format msgid "Certificate with subject %s has expired" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:175 #, c-format msgid "" "Untrusted self-signed certificate in chain with subject %s and hash: %lu" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:177 #, c-format msgid "Certificate verification error: %s" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:189 msgid "Can not get the certificate type" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:229 msgid "Couldn't verify availability of CRL" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:242 msgid "In the available CRL the lastUpdate field is not valid" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:249 msgid "The available CRL is not yet valid" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:258 msgid "In the available CRL, the nextUpdate field is not valid" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:264 msgid "The available CRL has expired" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:287 #, c-format msgid "Certificate with serial number %s and subject \"%s\" is revoked" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:305 msgid "" "Directory of trusted CAs is not specified/found; Using current path as the " "CA direcroty" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:314 msgid "Can't allocate memory for CA policy path" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:360 #, c-format msgid "Certificate has unknown extension with numeric ID %u and SN %s" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:374 #: src/hed/libs/credential/Credential.cpp:1697 msgid "" "Can not convert DER encoded PROXY_CERT_INFO_EXTENSION extension to internal " "format" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:420 msgid "Trying to check X509 cert with check_cert_type" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:459 msgid "Can't convert DER encoded PROXYCERTINFO extension to internal format" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:463 msgid "Can't get policy from PROXYCERTINFO extension" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:467 msgid "Can't get policy language from PROXYCERTINFO extension" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:499 msgid "The subject does not match the issuer name + proxy CN entry" msgstr "" #: src/hed/libs/credential/Credential.cpp:84 #, fuzzy, c-format msgid "OpenSSL error string: %s" msgstr "Fehler bei Traversieren: %s" #: src/hed/libs/credential/Credential.cpp:205 msgid "Can't get the first byte of input to determine its format" msgstr "" #: src/hed/libs/credential/Credential.cpp:219 #, fuzzy msgid "Can't reset the input" msgstr "Kann Python Liste nicht anlegen" #: src/hed/libs/credential/Credential.cpp:244 #: src/hed/libs/credential/Credential.cpp:280 msgid "Can't get the first byte of input BIO to get its format" msgstr "" #: src/hed/libs/credential/Credential.cpp:256 #, fuzzy msgid "Can not read certificate/key string" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/Credential.cpp:460 #, c-format msgid "Can not find certificate file: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:465 #, c-format msgid "Can not read certificate file: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:503 msgid "Can not read certificate string" msgstr "" #: src/hed/libs/credential/Credential.cpp:523 msgid "Certificate format is PEM" msgstr "" #: src/hed/libs/credential/Credential.cpp:550 msgid "Certificate format is DER" msgstr "" #: src/hed/libs/credential/Credential.cpp:579 msgid "Certificate format is PKCS" msgstr "" #: src/hed/libs/credential/Credential.cpp:605 msgid "Certificate format is unknown" msgstr "" #: src/hed/libs/credential/Credential.cpp:613 #, c-format msgid "Can not find key file: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:618 #, fuzzy, c-format msgid "Can not open key file %s" msgstr "Konnte Datei mit Job Beschreibung nicht öffnen: %s" #: src/hed/libs/credential/Credential.cpp:637 msgid "Can not read key string" msgstr "" #: src/hed/libs/credential/Credential.cpp:700 #: src/hed/libs/credential/VOMSUtil.cpp:244 msgid "Failed to lock arccredential library in memory" msgstr "" #: src/hed/libs/credential/Credential.cpp:712 msgid "Certificate verification succeeded" msgstr "" #: src/hed/libs/credential/Credential.cpp:716 msgid "Certificate verification failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:729 #: src/hed/libs/credential/Credential.cpp:747 #: src/hed/libs/credential/Credential.cpp:765 #: src/hed/libs/credential/Credential.cpp:996 #: src/hed/libs/credential/Credential.cpp:2368 #: src/hed/libs/credential/Credential.cpp:2397 #, fuzzy msgid "Failed to initialize extensions member for Credential" msgstr "Fehler bei der Initialisierung der delegation credentials" #: src/hed/libs/credential/Credential.cpp:808 #, fuzzy, c-format msgid "Unsupported proxy policy language is requested - %s" msgstr "Nicht-unterstzütztes Protrokoll in URL %s" #: src/hed/libs/credential/Credential.cpp:820 #, fuzzy, c-format msgid "Unsupported proxy version is requested - %s" msgstr "Nicht-unterstzütztes Protrokoll in URL %s" #: src/hed/libs/credential/Credential.cpp:831 msgid "If you specify a policy you also need to specify a policy language" msgstr "" #: src/hed/libs/credential/Credential.cpp:1001 msgid "Certificate/Proxy path is empty" msgstr "" #: src/hed/libs/credential/Credential.cpp:1059 #: src/hed/libs/credential/Credential.cpp:2905 msgid "Failed to duplicate extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:1063 #, fuzzy msgid "Failed to add extension into credential extensions" msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #: src/hed/libs/credential/Credential.cpp:1074 #, fuzzy msgid "Certificate information collection failed" msgstr "Ungültige Authentisierungs-Information" #: src/hed/libs/credential/Credential.cpp:1113 #: src/hed/libs/credential/Credential.cpp:1118 msgid "Can not convert string into ASN1_OBJECT" msgstr "" #: src/hed/libs/credential/Credential.cpp:1125 msgid "Can not create ASN1_OCTET_STRING" msgstr "" #: src/hed/libs/credential/Credential.cpp:1134 #, fuzzy msgid "Can not allocate memory for extension for proxy certificate" msgstr "" "Der Transfer des signierten delegation certificate zu Service schlug fehl" #: src/hed/libs/credential/Credential.cpp:1144 msgid "Can not create extension for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:1180 #: src/hed/libs/credential/Credential.cpp:1348 msgid "BN_set_word failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:1189 #: src/hed/libs/credential/Credential.cpp:1357 msgid "RSA_generate_key_ex failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:1198 #: src/hed/libs/credential/Credential.cpp:1365 msgid "BN_new || RSA_new failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:1209 msgid "Created RSA key, proceeding with request" msgstr "" #: src/hed/libs/credential/Credential.cpp:1214 msgid "pkey and rsa_key exist!" msgstr "" #: src/hed/libs/credential/Credential.cpp:1217 msgid "Generate new X509 request!" msgstr "" #: src/hed/libs/credential/Credential.cpp:1222 msgid "Setting subject name!" msgstr "" #: src/hed/libs/credential/Credential.cpp:1230 #: src/hed/libs/credential/Credential.cpp:1444 msgid "PEM_write_bio_X509_REQ failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:1260 #: src/hed/libs/credential/Credential.cpp:1301 #: src/hed/libs/credential/Credential.cpp:1476 #: src/hed/libs/credential/Credential.cpp:1496 msgid "Can not create BIO for request" msgstr "" #: src/hed/libs/credential/Credential.cpp:1278 msgid "Failed to write request into string" msgstr "" #: src/hed/libs/credential/Credential.cpp:1305 #: src/hed/libs/credential/Credential.cpp:1310 #: src/hed/libs/credential/Credential.cpp:1500 msgid "Can not set writable file for request BIO" msgstr "" #: src/hed/libs/credential/Credential.cpp:1316 #: src/hed/libs/credential/Credential.cpp:1505 msgid "Wrote request into a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:1318 #: src/hed/libs/credential/Credential.cpp:1508 msgid "Failed to write request into a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:1338 msgid "The credential's private key has already been initialized" msgstr "" #: src/hed/libs/credential/Credential.cpp:1386 msgid "" "Can not duplicate the subject name for the self-signing proxy certificate " "request" msgstr "" #: src/hed/libs/credential/Credential.cpp:1396 msgid "Can not create a new X509_NAME_ENTRY for the proxy certificate request" msgstr "" #: src/hed/libs/credential/Credential.cpp:1414 #: src/hed/libs/credential/Credential.cpp:1421 #: src/hed/libs/credential/Credential.cpp:1999 #: src/hed/libs/credential/Credential.cpp:2007 msgid "" "Can not convert PROXY_CERT_INFO_EXTENSION struct from internal to DER " "encoded format" msgstr "" #: src/hed/libs/credential/Credential.cpp:1451 msgid "Can't convert X509 request from internal to DER encoded format" msgstr "" #: src/hed/libs/credential/Credential.cpp:1461 msgid "Can not generate X509 request" msgstr "" #: src/hed/libs/credential/Credential.cpp:1463 msgid "Can not set private key" msgstr "" #: src/hed/libs/credential/Credential.cpp:1561 msgid "Failed to get private key" msgstr "" #: src/hed/libs/credential/Credential.cpp:1580 msgid "Failed to get public key from RSA object" msgstr "" #: src/hed/libs/credential/Credential.cpp:1588 msgid "Failed to get public key from X509 object" msgstr "" #: src/hed/libs/credential/Credential.cpp:1595 msgid "Failed to get public key" msgstr "" #: src/hed/libs/credential/Credential.cpp:1633 #, c-format msgid "Certiticate chain number %d" msgstr "" #: src/hed/libs/credential/Credential.cpp:1661 msgid "NULL BIO passed to InquireRequest" msgstr "" #: src/hed/libs/credential/Credential.cpp:1664 msgid "PEM_read_bio_X509_REQ failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:1668 msgid "d2i_X509_REQ_bio failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:1690 msgid "Missing data in DER encoded PROXY_CERT_INFO_EXTENSION extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:1702 msgid "Can not create PROXY_CERT_INFO_EXTENSION extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:1712 msgid "Can not get policy from PROXY_CERT_INFO_EXTENSION extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:1716 msgid "Can not get policy language from PROXY_CERT_INFO_EXTENSION extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:1732 #, c-format msgid "Cert Type: %d" msgstr "" #: src/hed/libs/credential/Credential.cpp:1745 #: src/hed/libs/credential/Credential.cpp:1764 msgid "Can not create BIO for parsing request" msgstr "" #: src/hed/libs/credential/Credential.cpp:1750 msgid "Read request from a string" msgstr "" #: src/hed/libs/credential/Credential.cpp:1753 msgid "Failed to read request from a string" msgstr "" #: src/hed/libs/credential/Credential.cpp:1768 msgid "Can not set readable file for request BIO" msgstr "" #: src/hed/libs/credential/Credential.cpp:1773 msgid "Read request from a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:1776 msgid "Failed to read request from a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:1816 msgid "Can not convert private key to DER format" msgstr "" #: src/hed/libs/credential/Credential.cpp:1980 msgid "Credential is not initialized" msgstr "" #: src/hed/libs/credential/Credential.cpp:1986 #, fuzzy msgid "Failed to duplicate X509 structure" msgstr "Fehler bei Lesen von Objekt %s" #: src/hed/libs/credential/Credential.cpp:1991 msgid "Failed to initialize X509 structure" msgstr "" #: src/hed/libs/credential/Credential.cpp:2014 msgid "Can not create extension for PROXY_CERT_INFO" msgstr "" #: src/hed/libs/credential/Credential.cpp:2018 #: src/hed/libs/credential/Credential.cpp:2066 msgid "Can not add X509 extension to proxy cert" msgstr "" #: src/hed/libs/credential/Credential.cpp:2034 msgid "Can not convert keyUsage struct from DER encoded format" msgstr "" #: src/hed/libs/credential/Credential.cpp:2046 #: src/hed/libs/credential/Credential.cpp:2055 msgid "Can not convert keyUsage struct from internal to DER format" msgstr "" #: src/hed/libs/credential/Credential.cpp:2062 #, fuzzy msgid "Can not create extension for keyUsage" msgstr "Kann Funktion %s nicht anlegen" #: src/hed/libs/credential/Credential.cpp:2075 msgid "Can not get extended KeyUsage extension from issuer certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2080 msgid "Can not copy extended KeyUsage extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:2085 msgid "Can not add X509 extended KeyUsage extension to new proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2095 msgid "Can not compute digest of public key" msgstr "" #: src/hed/libs/credential/Credential.cpp:2106 msgid "Can not copy the subject name from issuer for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2112 msgid "Can not create name entry CN for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2117 msgid "Can not set CN in proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2125 msgid "Can not set issuer's subject for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2130 msgid "Can not set version number for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2138 msgid "Can not set serial number for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2144 #, fuzzy msgid "Can not duplicate serial number for proxy certificate" msgstr "" "Der Transfer des signierten delegation certificate zu Service schlug fehl" #: src/hed/libs/credential/Credential.cpp:2150 msgid "Can not set the lifetime for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2154 msgid "Can not set pubkey for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2170 #: src/hed/libs/credential/Credential.cpp:2795 msgid "The credential to be signed is NULL" msgstr "" #: src/hed/libs/credential/Credential.cpp:2174 #: src/hed/libs/credential/Credential.cpp:2799 msgid "The credential to be signed contains no request" msgstr "" #: src/hed/libs/credential/Credential.cpp:2178 #: src/hed/libs/credential/Credential.cpp:2803 msgid "The BIO for output is NULL" msgstr "" #: src/hed/libs/credential/Credential.cpp:2192 #: src/hed/libs/credential/Credential.cpp:2810 msgid "Error when extracting public key from request" msgstr "" #: src/hed/libs/credential/Credential.cpp:2197 #: src/hed/libs/credential/Credential.cpp:2814 msgid "Failed to verify the request" msgstr "" #: src/hed/libs/credential/Credential.cpp:2201 msgid "Failed to add issuer's extension into proxy" msgstr "" #: src/hed/libs/credential/Credential.cpp:2225 msgid "Failed to find extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:2237 msgid "Can not get the issuer's private key" msgstr "" #: src/hed/libs/credential/Credential.cpp:2244 #: src/hed/libs/credential/Credential.cpp:2846 msgid "There is no digest in issuer's private key object" msgstr "" #: src/hed/libs/credential/Credential.cpp:2249 #: src/hed/libs/credential/Credential.cpp:2850 #, c-format msgid "%s is an unsupported digest type" msgstr "" #: src/hed/libs/credential/Credential.cpp:2260 #, c-format msgid "" "The signing algorithm %s is not allowed,it should be SHA1 or SHA2 to sign " "certificate requests" msgstr "" #: src/hed/libs/credential/Credential.cpp:2266 msgid "Failed to sign the proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2268 msgid "Succeeded to sign the proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2273 msgid "Failed to verify the signed certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2275 msgid "Succeeded to verify the signed certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2280 #: src/hed/libs/credential/Credential.cpp:2289 msgid "Output the proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2283 msgid "Can not convert signed proxy cert into PEM format" msgstr "" #: src/hed/libs/credential/Credential.cpp:2292 msgid "Can not convert signed proxy cert into DER format" msgstr "" #: src/hed/libs/credential/Credential.cpp:2308 #: src/hed/libs/credential/Credential.cpp:2331 msgid "Can not create BIO for signed proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2335 msgid "Can not set writable file for signed proxy certificate BIO" msgstr "" #: src/hed/libs/credential/Credential.cpp:2340 msgid "Wrote signed proxy certificate into a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:2343 msgid "Failed to write signed proxy certificate into a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:2377 #: src/hed/libs/credential/Credential.cpp:2415 #, fuzzy, c-format msgid "ERROR: %s" msgstr "HER: %s" #: src/hed/libs/credential/Credential.cpp:2423 #, c-format msgid "SSL error: %s, libs: %s, func: %s, reason: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:2468 #, c-format msgid "unable to load number from: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:2473 msgid "error converting number from bin to BIGNUM" msgstr "" #: src/hed/libs/credential/Credential.cpp:2500 msgid "file name too long" msgstr "" #: src/hed/libs/credential/Credential.cpp:2523 msgid "error converting serial to ASN.1 format" msgstr "" #: src/hed/libs/credential/Credential.cpp:2556 #, c-format msgid "load serial from %s failure" msgstr "" #: src/hed/libs/credential/Credential.cpp:2561 msgid "add_word failure" msgstr "" #: src/hed/libs/credential/Credential.cpp:2566 #, c-format msgid "save serial to %s failure" msgstr "" #: src/hed/libs/credential/Credential.cpp:2586 msgid "Error initialising X509 store" msgstr "" #: src/hed/libs/credential/Credential.cpp:2593 msgid "Out of memory when generate random serial" msgstr "" #: src/hed/libs/credential/Credential.cpp:2605 msgid "CA certificate and CA private key do not match" msgstr "" #: src/hed/libs/credential/Credential.cpp:2629 #, fuzzy, c-format msgid "Failed to load extension section: %s" msgstr "Fehler bei Schließen von Verbindung 1" #: src/hed/libs/credential/Credential.cpp:2666 msgid "malloc error" msgstr "" #: src/hed/libs/credential/Credential.cpp:2670 msgid "Subject does not start with '/'" msgstr "" #: src/hed/libs/credential/Credential.cpp:2686 #: src/hed/libs/credential/Credential.cpp:2707 msgid "escape character at end of string" msgstr "" #: src/hed/libs/credential/Credential.cpp:2698 #, c-format msgid "" "end of string encountered while processing type of subject name element #%d" msgstr "" #: src/hed/libs/credential/Credential.cpp:2735 #, c-format msgid "Subject Attribute %s has no known NID, skipped" msgstr "" #: src/hed/libs/credential/Credential.cpp:2739 #, c-format msgid "No value provided for Subject Attribute %s skipped" msgstr "" #: src/hed/libs/credential/Credential.cpp:2780 msgid "Failed to set the pubkey for X509 object by using pubkey from X509_REQ" msgstr "" #: src/hed/libs/credential/Credential.cpp:2790 msgid "The private key for signing is not initialized" msgstr "" #: src/hed/libs/credential/Credential.cpp:2869 #, c-format msgid "Error when loading the extension config file: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:2873 #, c-format msgid "Error when loading the extension config file: %s on line: %d" msgstr "" #: src/hed/libs/credential/Credential.cpp:2921 msgid "Can not sign a EEC" msgstr "" #: src/hed/libs/credential/Credential.cpp:2925 msgid "Output EEC certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2928 msgid "Can not convert signed EEC cert into DER format" msgstr "" #: src/hed/libs/credential/Credential.cpp:2942 #: src/hed/libs/credential/Credential.cpp:2961 msgid "Can not create BIO for signed EEC certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2965 msgid "Can not set writable file for signed EEC certificate BIO" msgstr "" #: src/hed/libs/credential/Credential.cpp:2970 msgid "Wrote signed EEC certificate into a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:2973 msgid "Failed to write signed EEC certificate into a file" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:143 #, fuzzy msgid "Error writing raw certificate" msgstr "Fehler beim Listen der replicas: %s" #: src/hed/libs/credential/NSSUtil.cpp:222 #, fuzzy msgid "Failed to add RFC proxy OID" msgstr "Fehler bei Lesen von Proxy-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:225 #, c-format msgid "Succeeded to add RFC proxy OID, tag %d is returned" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:231 msgid "Failed to add anyLanguage OID" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:234 #, c-format msgid "Succeeded to add anyLanguage OID, tag %d is returned" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:240 #, fuzzy msgid "Failed to add inheritAll OID" msgstr "Fehler bei Lesen von Objekt %s" #: src/hed/libs/credential/NSSUtil.cpp:243 #, c-format msgid "Succeeded to add inheritAll OID, tag %d is returned" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:249 #, fuzzy msgid "Failed to add Independent OID" msgstr "Fehler bei Lesen von Objekt %s" #: src/hed/libs/credential/NSSUtil.cpp:252 #, fuzzy, c-format msgid "Succeeded to add Independent OID, tag %d is returned" msgstr "Fehler bei Lesen von Objekt %s" #: src/hed/libs/credential/NSSUtil.cpp:258 msgid "Failed to add VOMS AC sequence OID" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:261 #, c-format msgid "Succeeded to add VOMS AC sequence OID, tag %d is returned" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:290 #, c-format msgid "NSS initialization failed on certificate database: %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:301 #, fuzzy msgid "Succeeded to initialize NSS" msgstr "Erfolreiche Anthentifikation von SAMLTOken" #: src/hed/libs/credential/NSSUtil.cpp:323 #, fuzzy, c-format msgid "Failed to read attribute %x from private key." msgstr "Fehler bei Lesen von privater Schlüssel-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:375 #, fuzzy msgid "Succeeded to get credential" msgstr "Erfolreiche Anthentifikation von SAMLTOken" #: src/hed/libs/credential/NSSUtil.cpp:376 #, fuzzy msgid "Failed to get credential" msgstr "Fehler bei Bezug von FTP Datei" #: src/hed/libs/credential/NSSUtil.cpp:438 #, fuzzy msgid "p12 file is empty" msgstr "Policy is leer" #: src/hed/libs/credential/NSSUtil.cpp:448 #, fuzzy msgid "Unable to write to p12 file" msgstr "Fehler bei Ablage von FTP Datei" #: src/hed/libs/credential/NSSUtil.cpp:464 #, fuzzy msgid "Failed to open p12 file" msgstr "Fehler bei Ablage von FTP Datei" #: src/hed/libs/credential/NSSUtil.cpp:492 #, fuzzy msgid "Failed to allocate p12 context" msgstr "Fehler bei Reservieren von Platz" #: src/hed/libs/credential/NSSUtil.cpp:1200 #, fuzzy msgid "Failed to find issuer certificate for proxy certificate" msgstr "" "Der Transfer des signierten delegation certificate zu Service schlug fehl" #: src/hed/libs/credential/NSSUtil.cpp:1351 #, fuzzy, c-format msgid "Failed to authenticate to PKCS11 slot %s" msgstr "Fehler beim Authentifizieren: %s" #: src/hed/libs/credential/NSSUtil.cpp:1357 #, fuzzy, c-format msgid "Failed to find certificates by nickname: %s" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:1362 #, c-format msgid "No user certificate by nickname %s found" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1375 #: src/hed/libs/credential/NSSUtil.cpp:1411 #, fuzzy msgid "Certificate does not have a slot" msgstr "Cache-Datei %s existiert nicht" #: src/hed/libs/credential/NSSUtil.cpp:1381 #, fuzzy msgid "Failed to create export context" msgstr "Fehler bei Anlegen von GSI Context: %s" #: src/hed/libs/credential/NSSUtil.cpp:1396 msgid "PKCS12 output password not provided" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1403 msgid "PKCS12 add password integrity failed" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1424 #, fuzzy msgid "Failed to create key or certificate safe" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:1440 #, fuzzy msgid "Failed to add certificate and key" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:1449 #, fuzzy, c-format msgid "Failed to initialize PKCS12 file: %s" msgstr "Fehler bei Schreiben zu Datein %s: %s" #: src/hed/libs/credential/NSSUtil.cpp:1454 #, fuzzy msgid "Failed to encode PKCS12" msgstr "Fehler beim Senden von body" #: src/hed/libs/credential/NSSUtil.cpp:1457 msgid "Succeeded to export PKCS12" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1485 #, c-format msgid "" "There is no certificate named %s found, the certificate could be removed " "when generating CSR" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1491 #, fuzzy msgid "Failed to delete certificate" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:1505 msgid "The name of the private key to delete is empty" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1510 #: src/hed/libs/credential/NSSUtil.cpp:2939 #: src/hed/libs/credential/NSSUtil.cpp:2956 #, fuzzy, c-format msgid "Failed to authenticate to token %s" msgstr "Fehler beim Authentifizieren: %s" #: src/hed/libs/credential/NSSUtil.cpp:1517 #, c-format msgid "No private key with nickname %s exist in NSS database" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1550 #, fuzzy msgid "Failed to delete private key and certificate" msgstr "Fehler bei Lesen von privater Schlüssel-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:1560 #, fuzzy msgid "Failed to delete private key" msgstr "Fehler bei Lesen von privater Schlüssel-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:1571 #, fuzzy, c-format msgid "Can not find key with name: %s" msgstr "Konnte Datei mit Job Beschreibung nicht öffnen: %s" #: src/hed/libs/credential/NSSUtil.cpp:1599 msgid "Can not read PEM private key: probably bad password" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1601 #, fuzzy msgid "Can not read PEM private key: failed to decrypt" msgstr "Fehler bei Lesen von privater Schlüssel-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:1603 #: src/hed/libs/credential/NSSUtil.cpp:1605 #, fuzzy msgid "Can not read PEM private key: failed to obtain password" msgstr "Fehler bei Lesen von privater Schlüssel-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:1606 #, fuzzy msgid "Can not read PEM private key" msgstr "Fehler bei Lesen von privater Schlüssel-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:1613 msgid "Failed to convert EVP_PKEY to PKCS8" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1650 #, fuzzy msgid "Failed to load private key" msgstr "Fehler bei Lesen von privater Schlüssel-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:1651 msgid "Succeeded to load PrivateKeyInfo" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1654 #, fuzzy msgid "Failed to convert PrivateKeyInfo to EVP_PKEY" msgstr "Fehler bei Konvertieren von security information für ARC policy" #: src/hed/libs/credential/NSSUtil.cpp:1655 msgid "Succeeded to convert PrivateKeyInfo to EVP_PKEY" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1692 #, fuzzy msgid "Failed to import private key" msgstr "Fehler bei Lesen von privater Schlüssel-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:1695 #, fuzzy msgid "Succeeded to import private key" msgstr "Erfolreiche Anthentifikation von SAMLTOken" #: src/hed/libs/credential/NSSUtil.cpp:1708 #: src/hed/libs/credential/NSSUtil.cpp:1750 #: src/hed/libs/credential/NSSUtil.cpp:2889 #, fuzzy msgid "Failed to authenticate to key database" msgstr "Fehler beim Authentifizieren: %s" #: src/hed/libs/credential/NSSUtil.cpp:1717 #, fuzzy msgid "Succeeded to generate public/private key pair" msgstr "Erfolreiche Anthentifikation von SAMLTOken" #: src/hed/libs/credential/NSSUtil.cpp:1719 #, fuzzy msgid "Failed to generate public/private key pair" msgstr "Fehler bei Lesen von privater Schlüssel-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:1724 #, fuzzy msgid "Failed to export private key" msgstr "Fehler bei Lesen von privater Schlüssel-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:1791 #, fuzzy msgid "Failed to create subject name" msgstr "Fehler bei Lesen von Objekt %s" #: src/hed/libs/credential/NSSUtil.cpp:1807 #, fuzzy msgid "Failed to create certificate request" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:1820 #, fuzzy msgid "Failed to call PORT_NewArena" msgstr "Fehler beim Reservieren von Speicher" #: src/hed/libs/credential/NSSUtil.cpp:1828 #, fuzzy msgid "Failed to encode the certificate request with DER format" msgstr "Fehler beim Signieren der Anfrage nach Austellen eines Zertifikats" #: src/hed/libs/credential/NSSUtil.cpp:1835 msgid "Unknown key or hash type" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1841 #, fuzzy msgid "Failed to sign the certificate request" msgstr "Fehler beim Signieren der Anfrage nach Austellen eines Zertifikats" #: src/hed/libs/credential/NSSUtil.cpp:1857 #, fuzzy msgid "Failed to output the certificate request as ASCII format" msgstr "Fehler beim Signieren der Anfrage nach Austellen eines Zertifikats" #: src/hed/libs/credential/NSSUtil.cpp:1866 #, fuzzy msgid "Failed to output the certificate request as DER format" msgstr "Fehler beim Signieren der Anfrage nach Austellen eines Zertifikats" #: src/hed/libs/credential/NSSUtil.cpp:1875 #, fuzzy, c-format msgid "Succeeded to output the certificate request into %s" msgstr "Erfolgreiche Authentifikation des UsernameToken" #: src/hed/libs/credential/NSSUtil.cpp:1914 #: src/hed/libs/credential/NSSUtil.cpp:1951 #, fuzzy msgid "Failed to read data from input file" msgstr "Konnte Metadaen für Datei %s nicht finden" #: src/hed/libs/credential/NSSUtil.cpp:1930 msgid "Input is without trailer\n" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1941 #, fuzzy msgid "Failed to convert ASCII to DER" msgstr "Fehler beim Verbinden zu RLS server: %s" #: src/hed/libs/credential/NSSUtil.cpp:1992 msgid "Certificate request is invalid" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2212 #, fuzzy, c-format msgid "The policy language: %s is not supported" msgstr "Der Erhalt von BES Jobs wird nicht unterstützt" #: src/hed/libs/credential/NSSUtil.cpp:2220 #: src/hed/libs/credential/NSSUtil.cpp:2245 #: src/hed/libs/credential/NSSUtil.cpp:2268 #: src/hed/libs/credential/NSSUtil.cpp:2290 #, fuzzy msgid "Failed to new arena" msgstr "Fehler bei Transfer von Daten" #: src/hed/libs/credential/NSSUtil.cpp:2229 #: src/hed/libs/credential/NSSUtil.cpp:2254 #, fuzzy msgid "Failed to create path length" msgstr "Fehler bei Anlegen von soft link: %s" #: src/hed/libs/credential/NSSUtil.cpp:2232 #: src/hed/libs/credential/NSSUtil.cpp:2257 #: src/hed/libs/credential/NSSUtil.cpp:2277 #: src/hed/libs/credential/NSSUtil.cpp:2299 #, fuzzy msgid "Failed to create policy language" msgstr "Fehler bei Anlegen von soft link: %s" #: src/hed/libs/credential/NSSUtil.cpp:2700 #, fuzzy, c-format msgid "Failed to parse certificate request from CSR file %s" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:2707 #, fuzzy, c-format msgid "Can not find certificate with name %s" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:2739 #, fuzzy msgid "Can not allocate memory" msgstr "Fehler beim Reservieren von Speicher" #: src/hed/libs/credential/NSSUtil.cpp:2747 #, fuzzy, c-format msgid "Proxy subject: %s" msgstr "Subjekt: %s" #: src/hed/libs/credential/NSSUtil.cpp:2764 #, fuzzy msgid "Failed to start certificate extension" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:2769 #, fuzzy msgid "Failed to add key usage extension" msgstr "Fehler beim Lesen von SSL Token während Authentifizierung" #: src/hed/libs/credential/NSSUtil.cpp:2774 #, fuzzy msgid "Failed to add proxy certificate information extension" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:2778 #, fuzzy msgid "Failed to add voms AC extension" msgstr "Fehler bei Lesen von Objekt %s" #: src/hed/libs/credential/NSSUtil.cpp:2798 #, fuzzy msgid "Failed to retrieve private key for issuer" msgstr "Fehler bei Lesen von privater Schlüssel-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:2805 msgid "Unknown key or hash type of issuer" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2811 #, fuzzy msgid "Failed to set signature algorithm ID" msgstr "Fehler bei der Überprüfung der Signatur unter " #: src/hed/libs/credential/NSSUtil.cpp:2823 #, fuzzy msgid "Failed to encode certificate" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:2829 #, fuzzy msgid "Failed to allocate item for certificate data" msgstr "Fehler beim Reservieren von Speicher" #: src/hed/libs/credential/NSSUtil.cpp:2835 #, fuzzy msgid "Failed to sign encoded certificate data" msgstr "Fehler beim Signieren der Anfrage nach Austellen eines Zertifikats" #: src/hed/libs/credential/NSSUtil.cpp:2844 #, fuzzy, c-format msgid "Failed to open file %s" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/hed/libs/credential/NSSUtil.cpp:2855 #, fuzzy, c-format msgid "Succeeded to output certificate to %s" msgstr "Erfolreiche Anthentifikation von SAMLTOken" #: src/hed/libs/credential/NSSUtil.cpp:2896 #, fuzzy, c-format msgid "Failed to open input certificate file %s" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:2913 #, fuzzy msgid "Failed to read input certificate file" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:2918 #, fuzzy msgid "Failed to get certificate from certificate file" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:2925 #, fuzzy msgid "Failed to allocate certificate trust" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:2930 #, fuzzy msgid "Failed to decode trust string" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/hed/libs/credential/NSSUtil.cpp:2944 #: src/hed/libs/credential/NSSUtil.cpp:2961 #, fuzzy msgid "Failed to add certificate to token or database" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:2947 #: src/hed/libs/credential/NSSUtil.cpp:2950 #, fuzzy msgid "Succeeded to import certificate" msgstr "Erfolreiche Anthentifikation von SAMLTOken" #: src/hed/libs/credential/NSSUtil.cpp:2964 #: src/hed/libs/credential/NSSUtil.cpp:2967 #, c-format msgid "Succeeded to change trusts to: %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2994 #, fuzzy, c-format msgid "Failed to import private key from file: %s" msgstr "Fehler bei Lesen von privater Schlüssel-Datei: %s" #: src/hed/libs/credential/NSSUtil.cpp:2996 #, fuzzy, c-format msgid "Failed to import certificate from file: %s" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/credential/VOMSConfig.cpp:142 #, c-format msgid "" "ERROR: VOMS configuration line contains too many tokens. Expecting 5 or 6. " "Line was: %s" msgstr "" #: src/hed/libs/credential/VOMSConfig.cpp:158 #, c-format msgid "" "ERROR: file tree is too deep while scanning VOMS configuration. Max allowed " "nesting is %i." msgstr "" #: src/hed/libs/credential/VOMSConfig.cpp:176 #, c-format msgid "ERROR: failed to read file %s while scanning VOMS configuration." msgstr "" #: src/hed/libs/credential/VOMSConfig.cpp:181 #, c-format msgid "" "ERROR: VOMS configuration file %s contains too many lines. Max supported " "number is %i." msgstr "" #: src/hed/libs/credential/VOMSConfig.cpp:188 #, c-format msgid "" "ERROR: VOMS configuration file %s contains too long line(s). Max supported " "length is %i characters." msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:171 #, fuzzy, c-format msgid "Failed to create OpenSSL object %s %s - %u %s" msgstr "Fehler bei Lesen von Objekt %s: %s" #: src/hed/libs/credential/VOMSUtil.cpp:179 #, fuzzy, c-format msgid "Failed to obtain OpenSSL identifier for %s" msgstr "Konnte Listing nicht via FTP beziehen: %s" #: src/hed/libs/credential/VOMSUtil.cpp:332 #, c-format msgid "VOMS: create FQAN: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:370 #, c-format msgid "VOMS: create attribute: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:651 msgid "VOMS: Can not allocate memory for parsing AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:659 msgid "VOMS: Can not allocate memory for storing the order of AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:685 msgid "VOMS: Can not parse AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:715 msgid "VOMS: CA directory or CA file must be provided" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:779 msgid "VOMS: failed to verify AC signature" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:848 #, c-format msgid "VOMS: trust chain to check: %s " msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:856 #, c-format msgid "" "VOMS: the DN in certificate: %s does not match that in trusted DN list: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:862 #, c-format msgid "" "VOMS: the Issuer identity in certificate: %s does not match that in trusted " "DN list: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:897 #, fuzzy, c-format msgid "VOMS: The lsc file %s does not exist" msgstr "Cache-Datei %s existiert nicht" #: src/hed/libs/credential/VOMSUtil.cpp:903 #, c-format msgid "VOMS: The lsc file %s can not be open" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:951 msgid "" "VOMS: there is no constraints of trusted voms DNs, the certificates stack in " "AC will not be checked." msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:984 msgid "VOMS: unable to match certificate chain against VOMS trusted DNs" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1004 #, fuzzy msgid "VOMS: AC signature verification failed" msgstr "SOAP Aufruf fehlgeschlagen" #: src/hed/libs/credential/VOMSUtil.cpp:1013 msgid "VOMS: unable to verify certificate chain" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1019 #, c-format msgid "VOMS: cannot validate AC issuer for VO %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1042 #, c-format msgid "VOMS: directory for trusted service certificates: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1068 #, c-format msgid "VOMS: Cannot find certificate of AC issuer for VO %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1090 msgid "VOMS: Can not find AC_ATTR with IETFATTR type" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1097 msgid "VOMS: case of multiple IETFATTR attributes not supported" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1107 msgid "VOMS: case of multiple policyAuthority not supported" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1123 msgid "VOMS: the format of policyAuthority is unsupported - expecting URI" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1132 msgid "" "VOMS: the format of IETFATTRVAL is not supported - expecting OCTET STRING" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1189 msgid "VOMS: the grantor attribute is empty" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1207 msgid "VOMS: the attribute name is empty" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1213 #, c-format msgid "VOMS: the attribute value for %s is empty" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1218 msgid "VOMS: the attribute qualifier is empty" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1250 msgid "" "VOMS: both idcenoRevAvail and authorityKeyIdentifier certificate extensions " "must be present" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1284 #, c-format msgid "VOMS: FQDN of this host %s does not match any target in AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1289 msgid "VOMS: the only supported critical extension of the AC is idceTargets" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1304 msgid "VOMS: failed to parse attributes from AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1348 msgid "VOMS: authorityKey is wrong" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1376 msgid "VOMS: missing AC parts" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1393 msgid "VOMS: unsupported time format in AC - expecting GENERALIZED TIME" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1399 msgid "VOMS: AC is not yet valid" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1406 msgid "VOMS: AC has expired" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1421 msgid "VOMS: AC is not complete - missing Serial or Issuer information" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1426 #, c-format msgid "VOMS: the holder serial number is: %lx" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1427 #, c-format msgid "VOMS: the serial number in AC is: %lx" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1430 #, c-format msgid "" "VOMS: the holder serial number %lx is not the same as the serial number in " "AC %lx, the holder certificate that is used to create a voms proxy could be " "a proxy certificate with a different serial number as the original EEC cert" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1439 msgid "VOMS: the holder information in AC is wrong" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1461 #, c-format msgid "VOMS: DN of holder in AC: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1462 #, c-format msgid "VOMS: DN of holder: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1463 #, c-format msgid "VOMS: DN of issuer: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1470 msgid "" "VOMS: the holder name in AC is not related to the distinguished name in " "holder certificate" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1482 #: src/hed/libs/credential/VOMSUtil.cpp:1489 msgid "VOMS: the holder issuerUID is not the same as that in AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1502 msgid "VOMS: the holder issuer name is not the same as that in AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1512 msgid "VOMS: the issuer information in AC is wrong" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1520 #, c-format msgid "VOMS: the issuer name %s is not the same as that in AC - %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1528 msgid "" "VOMS: the serial number of AC INFO is too long - expecting no more than 20 " "octets" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1558 #: src/hed/libs/credential/VOMSUtil.cpp:1566 #: src/hed/libs/credential/VOMSUtil.cpp:1574 #: src/hed/libs/credential/VOMSUtil.cpp:1582 #: src/hed/libs/credential/VOMSUtil.cpp:1605 msgid "VOMS: unable to extract VO name from AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1596 #, c-format msgid "VOMS: unable to determine hostname of AC from VO name: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1615 msgid "VOMS: can not verify the signature of the AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1621 msgid "VOMS: problems while parsing information in AC" msgstr "" #: src/hed/libs/credential/test/VOMSUtilTest.cpp:128 #, c-format msgid "Line %d.%d of the attributes returned: %s" msgstr "" #: src/hed/libs/credentialstore/ClientVOMS.cpp:149 msgid "voms" msgstr "" #: src/hed/libs/credentialstore/CredentialStore.cpp:194 #: src/hed/libs/credentialstore/CredentialStore.cpp:245 #: src/hed/libs/credentialstore/CredentialStore.cpp:273 #: src/hed/libs/credentialstore/CredentialStore.cpp:336 #: src/hed/libs/credentialstore/CredentialStore.cpp:376 #: src/hed/libs/credentialstore/CredentialStore.cpp:406 #, fuzzy, c-format msgid "MyProxy failure: %s" msgstr "Proxy Pfad: %s" #: src/hed/libs/crypto/OpenSSL.cpp:71 #, c-format msgid "SSL error: %d - %s:%s:%s" msgstr "SSL Fehler: %d - %s:%s:%s" #: src/hed/libs/crypto/OpenSSL.cpp:84 #, fuzzy msgid "SSL locks not initialized" msgstr "FATAL: SSL Locks nicht initialisiert" #: src/hed/libs/crypto/OpenSSL.cpp:88 #, fuzzy, c-format msgid "wrong SSL lock requested: %i of %i: %i - %s" msgstr "FATAL: falsches SSL lock angefragt: %i von %i: %i - %s" #: src/hed/libs/crypto/OpenSSL.cpp:111 #, fuzzy msgid "Failed to lock arccrypto library in memory" msgstr "Fehler bei Lock von arccrypto Bibliothek in Speicher" #: src/hed/libs/crypto/OpenSSL.cpp:116 src/hed/libs/crypto/OpenSSL.cpp:130 msgid "Failed to initialize OpenSSL library" msgstr "Fehler bei Initialisierung von OpenSSL Bibliothek" #: src/hed/libs/crypto/OpenSSL.cpp:152 msgid "Number of OpenSSL locks changed - reinitializing" msgstr "Anzahl von OpenSSL locks verändert - reinitialisierung" #: src/hed/libs/data/DataExternalHelper.cpp:157 #, fuzzy msgid "failed to read data tag" msgstr "Fehler bei Lesen von Daten" #: src/hed/libs/data/DataExternalHelper.cpp:161 msgid "waiting for data chunk" msgstr "" #: src/hed/libs/data/DataExternalHelper.cpp:163 #, fuzzy msgid "failed to read data chunk" msgstr "Fehler bei Öffnen von Datenkanal" #: src/hed/libs/data/DataExternalHelper.cpp:171 #, c-format msgid "data chunk: %llu %llu" msgstr "" #: src/hed/libs/data/DataExternalHelper.cpp:242 #, c-format msgid "DataMove::Transfer: using supplied checksum %s" msgstr "" #: src/hed/libs/data/DataExternalHelper.cpp:361 msgid "Expecting Module, Command and URL provided" msgstr "" #: src/hed/libs/data/DataExternalHelper.cpp:368 msgid "Expecting Command module path among arguments" msgstr "" #: src/hed/libs/data/DataExternalHelper.cpp:372 msgid "Expecting Command module name among arguments" msgstr "" #: src/hed/libs/data/DataMover.cpp:115 msgid "No locations found - probably no more physical instances" msgstr "" #: src/hed/libs/data/DataMover.cpp:121 src/hed/libs/data/FileCache.cpp:552 #: src/libs/data-staging/Processor.cpp:443 #: src/libs/data-staging/Processor.cpp:457 #, c-format msgid "Removing %s" msgstr "Entferne %s" #: src/hed/libs/data/DataMover.cpp:134 msgid "This instance was already deleted" msgstr "" #: src/hed/libs/data/DataMover.cpp:140 msgid "Failed to delete physical file" msgstr "" #: src/hed/libs/data/DataMover.cpp:151 #, c-format msgid "Removing metadata in %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:155 msgid "Failed to delete meta-information" msgstr "" #: src/hed/libs/data/DataMover.cpp:169 msgid "Failed to remove all physical instances" msgstr "" #: src/hed/libs/data/DataMover.cpp:173 #, c-format msgid "Removing logical file from metadata %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:176 msgid "Failed to delete logical file" msgstr "" #: src/hed/libs/data/DataMover.cpp:183 msgid "Failed to remove instance" msgstr "" #: src/hed/libs/data/DataMover.cpp:232 msgid "DataMover::Transfer : starting new thread" msgstr "" #: src/hed/libs/data/DataMover.cpp:260 #, c-format msgid "Transfer from %s to %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:262 msgid "Not valid source" msgstr "" #: src/hed/libs/data/DataMover.cpp:267 msgid "Not valid destination" msgstr "" #: src/hed/libs/data/DataMover.cpp:287 src/services/candypond/CandyPond.cpp:304 #, c-format msgid "Couldn't handle certificate: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:296 src/hed/libs/data/DataMover.cpp:590 #: src/libs/data-staging/Processor.cpp:133 #, c-format msgid "File %s is cached (%s) - checking permissions" msgstr "" #: src/hed/libs/data/DataMover.cpp:300 src/hed/libs/data/DataMover.cpp:609 #: src/hed/libs/data/DataMover.cpp:667 src/libs/data-staging/Processor.cpp:152 msgid "Permission checking passed" msgstr "" #: src/hed/libs/data/DataMover.cpp:301 src/hed/libs/data/DataMover.cpp:628 #: src/hed/libs/data/DataMover.cpp:1144 msgid "Linking/copying cached file" msgstr "" #: src/hed/libs/data/DataMover.cpp:325 #, c-format msgid "No locations for source found: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:329 #, c-format msgid "Failed to resolve source: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:341 src/hed/libs/data/DataMover.cpp:409 #, c-format msgid "No locations for destination found: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:346 src/hed/libs/data/DataMover.cpp:413 #, c-format msgid "Failed to resolve destination: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:361 #, c-format msgid "No locations for destination different from source found: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:382 #, c-format msgid "DataMover::Transfer: trying to destroy/overwrite destination: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:393 #, c-format msgid "Failed to delete %s but will still try to copy" msgstr "" #: src/hed/libs/data/DataMover.cpp:396 #, c-format msgid "Failed to delete %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:423 #, c-format msgid "Deleted but still have locations at %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:435 msgid "DataMover: cycle" msgstr "" #: src/hed/libs/data/DataMover.cpp:437 msgid "DataMover: no retries requested - exit" msgstr "" #: src/hed/libs/data/DataMover.cpp:442 msgid "DataMover: source out of tries - exit" msgstr "" #: src/hed/libs/data/DataMover.cpp:444 msgid "DataMover: destination out of tries - exit" msgstr "" #: src/hed/libs/data/DataMover.cpp:452 #, c-format msgid "Real transfer from %s to %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:478 #, c-format msgid "Creating buffer: %lli x %i" msgstr "" #: src/hed/libs/data/DataMover.cpp:494 #, c-format msgid "DataMove::Transfer: no checksum calculation for %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:499 #, c-format msgid "DataMove::Transfer: using supplied checksum %s:%s" msgstr "" #: src/hed/libs/data/DataMover.cpp:523 #, c-format msgid "DataMove::Transfer: will calculate %s checksum" msgstr "" #: src/hed/libs/data/DataMover.cpp:528 msgid "Buffer creation failed !" msgstr "" #: src/hed/libs/data/DataMover.cpp:551 #, c-format msgid "URL is mapped to: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:579 src/hed/libs/data/DataMover.cpp:637 #: src/libs/data-staging/Processor.cpp:88 msgid "Cached file is locked - should retry" msgstr "" #: src/hed/libs/data/DataMover.cpp:584 src/libs/data-staging/Processor.cpp:106 msgid "Failed to initiate cache" msgstr "" #: src/hed/libs/data/DataMover.cpp:601 src/services/candypond/CandyPond.cpp:379 #, c-format msgid "Permission checking failed: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:603 src/hed/libs/data/DataMover.cpp:661 #: src/hed/libs/data/DataMover.cpp:681 src/hed/libs/data/DataMover.cpp:692 msgid "source.next_location" msgstr "" #: src/hed/libs/data/DataMover.cpp:617 src/libs/data-staging/Processor.cpp:157 #, fuzzy, c-format msgid "Source modification date: %s" msgstr "Anlegen von Socket schlug fehl: %s" #: src/hed/libs/data/DataMover.cpp:618 src/libs/data-staging/Processor.cpp:158 #, c-format msgid "Cache creation date: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:624 src/libs/data-staging/Processor.cpp:163 msgid "Cached file is outdated, will re-download" msgstr "" #: src/hed/libs/data/DataMover.cpp:627 src/libs/data-staging/Processor.cpp:168 msgid "Cached copy is still valid" msgstr "" #: src/hed/libs/data/DataMover.cpp:654 msgid "URL is mapped to local access - checking permissions on original URL" msgstr "" #: src/hed/libs/data/DataMover.cpp:658 #, c-format msgid "Permission checking on original URL failed: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:669 msgid "Linking local file" msgstr "" #: src/hed/libs/data/DataMover.cpp:689 #, c-format msgid "Failed to make symbolic link %s to %s : %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:698 #, fuzzy, c-format msgid "Failed to change owner of symbolic link %s to %i" msgstr "Fehler bei Ändernn des Owner von hard link zu %i: %s" #: src/hed/libs/data/DataMover.cpp:709 #, c-format msgid "cache file: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:735 #, fuzzy, c-format msgid "Failed to stat source %s" msgstr "Fehler bei Lesen von Objekt %s" #: src/hed/libs/data/DataMover.cpp:737 src/hed/libs/data/DataMover.cpp:750 #: src/hed/libs/data/DataMover.cpp:781 src/hed/libs/data/DataMover.cpp:800 #: src/hed/libs/data/DataMover.cpp:822 src/hed/libs/data/DataMover.cpp:839 #: src/hed/libs/data/DataMover.cpp:996 src/hed/libs/data/DataMover.cpp:1028 #: src/hed/libs/data/DataMover.cpp:1038 src/hed/libs/data/DataMover.cpp:1111 msgid "(Re)Trying next source" msgstr "" #: src/hed/libs/data/DataMover.cpp:748 #, c-format msgid "Meta info of source and location do not match for %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:760 #, c-format msgid "" "Replica %s has high latency, but no more sources exist so will use this one" msgstr "" #: src/hed/libs/data/DataMover.cpp:764 #, c-format msgid "Replica %s has high latency, trying next source" msgstr "" #: src/hed/libs/data/DataMover.cpp:776 src/hed/libs/data/DataMover.cpp:796 #: src/libs/data-staging/DataStagingDelivery.cpp:344 #: src/libs/data-staging/DataStagingDelivery.cpp:367 #, fuzzy, c-format msgid "Using internal transfer method of %s" msgstr "Nutze unsicheren Datentransfer" #: src/hed/libs/data/DataMover.cpp:788 src/hed/libs/data/DataMover.cpp:805 #: src/libs/data-staging/DataStagingDelivery.cpp:360 #: src/libs/data-staging/DataStagingDelivery.cpp:381 #, c-format msgid "Internal transfer method is not supported for %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:812 #, fuzzy msgid "Using buffered transfer method" msgstr "Nutze sicheren Datentransfer" #: src/hed/libs/data/DataMover.cpp:816 #, fuzzy, c-format msgid "Failed to prepare source: %s" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/hed/libs/data/DataMover.cpp:830 #, c-format msgid "Failed to start reading from source: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:849 #, fuzzy msgid "Metadata of source and destination are different" msgstr "" "Файл Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ñовпадает Ñ Ð¸Ñходным.\n" "%1" #: src/hed/libs/data/DataMover.cpp:868 #, c-format msgid "Failed to preregister destination: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:873 src/hed/libs/data/DataMover.cpp:1135 msgid "destination.next_location" msgstr "" #: src/hed/libs/data/DataMover.cpp:884 #, fuzzy, c-format msgid "Failed to prepare destination: %s" msgstr "Fehler bei Anlegen von soft link: %s" #: src/hed/libs/data/DataMover.cpp:891 src/hed/libs/data/DataMover.cpp:914 #: src/hed/libs/data/DataMover.cpp:1132 #, c-format msgid "" "Failed to unregister preregistered lfn. You may need to unregister it " "manually: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:895 src/hed/libs/data/DataMover.cpp:917 #: src/hed/libs/data/DataMover.cpp:1005 src/hed/libs/data/DataMover.cpp:1021 #: src/hed/libs/data/DataMover.cpp:1044 src/hed/libs/data/DataMover.cpp:1089 msgid "(Re)Trying next destination" msgstr "" #: src/hed/libs/data/DataMover.cpp:906 #, c-format msgid "Failed to start writing to destination: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:929 msgid "Failed to start writing to cache" msgstr "" #: src/hed/libs/data/DataMover.cpp:937 src/hed/libs/data/DataMover.cpp:983 #: src/hed/libs/data/DataMover.cpp:1156 msgid "" "Failed to unregister preregistered lfn. You may need to unregister it " "manually" msgstr "" #: src/hed/libs/data/DataMover.cpp:944 msgid "Waiting for buffer" msgstr "" #: src/hed/libs/data/DataMover.cpp:951 #, fuzzy, c-format msgid "Failed updating timestamp on cache lock file %s for file %s: %s" msgstr "Fehler bei Erstellen von Info-Datei %s: %s" #: src/hed/libs/data/DataMover.cpp:956 #, c-format msgid "buffer: read EOF : %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:957 #, fuzzy, c-format msgid "buffer: write EOF: %s" msgstr "Globus error (Schreiben): %s" #: src/hed/libs/data/DataMover.cpp:958 #, c-format msgid "buffer: error : %s, read: %s, write: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:959 msgid "Closing read channel" msgstr "" #: src/hed/libs/data/DataMover.cpp:966 msgid "Closing write channel" msgstr "" #: src/hed/libs/data/DataMover.cpp:974 #, fuzzy msgid "Failed to complete writing to destination" msgstr "Fehler bei Schreiben zu Ziel" #: src/hed/libs/data/DataMover.cpp:988 #, fuzzy msgid "Transfer cancelled successfully" msgstr "Job erfolgreich abgebrochen" #: src/hed/libs/data/DataMover.cpp:1033 msgid "Cause of failure unclear - choosing randomly" msgstr "" #: src/hed/libs/data/DataMover.cpp:1076 #, c-format msgid "" "Checksum mismatch between checksum given as meta option (%s:%s) and " "calculated checksum (%s)" msgstr "" #: src/hed/libs/data/DataMover.cpp:1082 msgid "" "Failed to unregister preregistered lfn, You may need to unregister it " "manually" msgstr "" #: src/hed/libs/data/DataMover.cpp:1086 #, fuzzy msgid "Failed to delete destination, retry may fail" msgstr "Fehler bei der Initialisierung der delegation credentials" #: src/hed/libs/data/DataMover.cpp:1096 #, fuzzy msgid "Cannot compare empty checksum" msgstr "Konnte Datei mit Job Beschreibung nicht öffnen: %s" #: src/hed/libs/data/DataMover.cpp:1103 #: src/libs/data-staging/DataStagingDelivery.cpp:538 msgid "Checksum type of source and calculated checksum differ, cannot compare" msgstr "" #: src/hed/libs/data/DataMover.cpp:1105 #, c-format msgid "Checksum mismatch between calcuated checksum %s and source checksum %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:1116 #: src/libs/data-staging/DataStagingDelivery.cpp:554 #, c-format msgid "Calculated transfer checksum %s matches source checksum" msgstr "" #: src/hed/libs/data/DataMover.cpp:1122 #: src/libs/data-staging/DataStagingDelivery.cpp:557 msgid "Checksum not computed" msgstr "" #: src/hed/libs/data/DataMover.cpp:1128 #, c-format msgid "Failed to postregister destination %s" msgstr "" #: src/hed/libs/data/DataPoint.cpp:84 #, fuzzy, c-format msgid "Invalid URL option: %s" msgstr "Ungültige URL Option: %s" #: src/hed/libs/data/DataPoint.cpp:263 #, fuzzy, c-format msgid "Skipping invalid URL option %s" msgstr "Ungültige URL Option: %s" #: src/hed/libs/data/DataPoint.cpp:278 msgid "" "Third party transfer was requested but the corresponding plugin could\n" " not be loaded. Is the GFAL plugin installed? If not, please install " "the\n" " packages 'nordugrid-arc-plugins-gfal' and 'gfal2-all'. Depending on\n" " your type of installation the package names might differ." msgstr "" #: src/hed/libs/data/DataPoint.cpp:296 #, fuzzy, c-format msgid "Failed to load plugin for URL %s" msgstr "Fehler bei Reservieren von Platz" #: src/hed/libs/data/DataPointDelegate.cpp:75 #: src/hed/libs/data/DataPointDelegate.cpp:76 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:2011 #, c-format msgid "Starting helper process: %s" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:180 #, fuzzy msgid "start_reading" msgstr "start_reading_ftp" #: src/hed/libs/data/DataPointDelegate.cpp:189 #, fuzzy msgid "start_reading: helper start failed" msgstr "start_reading_ftp: globus_thread_create fehlgeschlagen" #: src/hed/libs/data/DataPointDelegate.cpp:197 #, fuzzy msgid "start_reading: thread create failed" msgstr "start_reading_ftp: globus_thread_create fehlgeschlagen" #: src/hed/libs/data/DataPointDelegate.cpp:213 #, fuzzy msgid "StopReading: aborting connection" msgstr "StopWriting: Abbruch der Verbindung" #: src/hed/libs/data/DataPointDelegate.cpp:218 #, fuzzy msgid "stop_reading: waiting for transfer to finish" msgstr "stop_reading_ftp: warte auf Beenden von Transfer" #: src/hed/libs/data/DataPointDelegate.cpp:221 #, fuzzy, c-format msgid "stop_reading: exiting: %s" msgstr "stop-reading_ftp: verlasse: %s" #: src/hed/libs/data/DataPointDelegate.cpp:231 #, fuzzy msgid "read_thread: get and register buffers" msgstr "ftp_read_thread: beziehe und registriere Puffer" #: src/hed/libs/data/DataPointDelegate.cpp:239 #, fuzzy, c-format msgid "read_thread: for_read failed - aborting: %s" msgstr "ftp_read_thread: for_read fehlgeschlagen - Abbruch: %s" #: src/hed/libs/data/DataPointDelegate.cpp:247 #, fuzzy, c-format msgid "read_thread: non-data tag '%c' from external process - leaving: %s" msgstr "ftp_read_thread: for_read fehlgeschlagen - Abbruch: %s" #: src/hed/libs/data/DataPointDelegate.cpp:256 #, fuzzy, c-format msgid "read_thread: data read error from external process - aborting: %s" msgstr "ftp_read_thread: for_read fehlgeschlagen - Abbruch: %s" #: src/hed/libs/data/DataPointDelegate.cpp:264 #, fuzzy msgid "read_thread: exiting" msgstr "ftp_read_thread: Beenden" #: src/hed/libs/data/DataPointDelegate.cpp:285 #, fuzzy msgid "start_writing_ftp: helper start failed" msgstr "start_writing_ftp: put fehlgeschlagen" #: src/hed/libs/data/DataPointDelegate.cpp:293 #, fuzzy msgid "start_writing_ftp: thread create failed" msgstr "start_writitng_ftp: globus_thread_create failed" #: src/hed/libs/data/DataPointDelegate.cpp:343 #, fuzzy msgid "No checksum information possible" msgstr "list_files_ftp: Bezug von Zeitpunkt der letzten Dateiänderung von %s" #: src/hed/libs/data/DataPointDelegate.cpp:359 #, fuzzy msgid "write_thread: get and pass buffers" msgstr "ftp_write_thread: Beziehe und Registriere Puffer" #: src/hed/libs/data/DataPointDelegate.cpp:366 #, fuzzy msgid "write_thread: for_write failed - aborting" msgstr "ftp_write_thread: for_write fehlgeschlagen - Abbruch" #: src/hed/libs/data/DataPointDelegate.cpp:370 #, fuzzy msgid "write_thread: for_write eof" msgstr "ftp_write_thread: for_write fehlgeschlagen - Abbruch" #: src/hed/libs/data/DataPointDelegate.cpp:384 #, fuzzy msgid "write_thread: out failed - aborting" msgstr "ftp_write_thread: for_write fehlgeschlagen - Abbruch" #: src/hed/libs/data/DataPointDelegate.cpp:392 #, fuzzy msgid "write_thread: exiting" msgstr "ftp_read_thread: Beenden" #: src/hed/libs/data/DataPointIndex.cpp:91 #, fuzzy, c-format msgid "Can't handle location %s" msgstr "Kann Funktion %s nicht anlegen" #: src/hed/libs/data/DataPointIndex.cpp:183 msgid "Sorting replicas according to URL map" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:187 #, c-format msgid "Replica %s is mapped" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:195 #, c-format msgid "Sorting replicas according to preferred pattern %s" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:218 #: src/hed/libs/data/DataPointIndex.cpp:236 #, fuzzy, c-format msgid "Excluding replica %s matching pattern !%s" msgstr "Replica %s existiert bereits für LFN %s" #: src/hed/libs/data/DataPointIndex.cpp:229 #, fuzzy, c-format msgid "Replica %s matches host pattern %s" msgstr "Replica %s existiert bereits für LFN %s" #: src/hed/libs/data/DataPointIndex.cpp:247 #, c-format msgid "Replica %s matches pattern %s" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:263 #, c-format msgid "Replica %s doesn't match preferred pattern or URL map" msgstr "" #: src/hed/libs/data/DataStatus.cpp:12 msgid "Operation completed successfully" msgstr "Operation erfolgreich abgeschlossen" #: src/hed/libs/data/DataStatus.cpp:13 #, fuzzy msgid "Source is invalid URL" msgstr "Quelle muss LFN enthalten" #: src/hed/libs/data/DataStatus.cpp:14 #, fuzzy msgid "Destination is invalid URL" msgstr "Destination muss LFN enthalten" #: src/hed/libs/data/DataStatus.cpp:15 #, fuzzy msgid "Resolving of index service for source failed" msgstr "Auflösen von index service URL für Quelle schlug fehl" #: src/hed/libs/data/DataStatus.cpp:16 #, fuzzy msgid "Resolving of index service for destination failed" msgstr "Auflösen von index service URL für Ziel schlug fehl" #: src/hed/libs/data/DataStatus.cpp:17 msgid "Can't read from source" msgstr "Kann nicht von Quelle lesen" #: src/hed/libs/data/DataStatus.cpp:18 msgid "Can't write to destination" msgstr "Kann nicht zu Ziel schreiben" #: src/hed/libs/data/DataStatus.cpp:19 msgid "Failed while reading from source" msgstr "Fehler bei Lesen von Quelle" #: src/hed/libs/data/DataStatus.cpp:20 msgid "Failed while writing to destination" msgstr "Fehler bei Schreiben zu Ziel" #: src/hed/libs/data/DataStatus.cpp:21 #, fuzzy msgid "Failed while transferring data" msgstr "Fehler bei Transfer von Daten" #: src/hed/libs/data/DataStatus.cpp:22 msgid "Failed while finishing reading from source" msgstr "Fehler bei Abschluß des Lesens von Quelle" #: src/hed/libs/data/DataStatus.cpp:23 msgid "Failed while finishing writing to destination" msgstr "" #: src/hed/libs/data/DataStatus.cpp:24 msgid "First stage of registration to index service failed" msgstr "" #: src/hed/libs/data/DataStatus.cpp:25 msgid "Last stage of registration to index service failed" msgstr "" #: src/hed/libs/data/DataStatus.cpp:26 #, fuzzy msgid "Unregistering from index service failed" msgstr "Keine Antwort von AA service %s schlug fehl" #: src/hed/libs/data/DataStatus.cpp:27 msgid "Error in caching procedure" msgstr "" #: src/hed/libs/data/DataStatus.cpp:28 msgid "Error due to expiration of provided credentials" msgstr "" #: src/hed/libs/data/DataStatus.cpp:29 #, fuzzy msgid "Delete error" msgstr "Löschte %s" #: src/hed/libs/data/DataStatus.cpp:30 msgid "No valid location available" msgstr "" #: src/hed/libs/data/DataStatus.cpp:31 msgid "Location already exists" msgstr "" #: src/hed/libs/data/DataStatus.cpp:32 msgid "Operation not supported for this kind of URL" msgstr "" #: src/hed/libs/data/DataStatus.cpp:33 msgid "Feature is not implemented" msgstr "" #: src/hed/libs/data/DataStatus.cpp:34 #, fuzzy msgid "Already reading from source" msgstr "Fehler bei Lesen von Quelle" #: src/hed/libs/data/DataStatus.cpp:35 #, fuzzy msgid "Already writing to destination" msgstr "Fehler bei Schreiben zu Ziel" #: src/hed/libs/data/DataStatus.cpp:36 #, fuzzy msgid "Read access check failed" msgstr "Lese Archiv Datei %s" #: src/hed/libs/data/DataStatus.cpp:37 #, fuzzy msgid "Directory listing failed" msgstr "Fehler beim Auflisten von Dateien" #: src/hed/libs/data/DataStatus.cpp:38 msgid "Object is not suitable for listing" msgstr "" #: src/hed/libs/data/DataStatus.cpp:39 #, fuzzy msgid "Failed to obtain information about file" msgstr "Konnte Listing nicht via FTP beziehen: %s" #: src/hed/libs/data/DataStatus.cpp:40 #, fuzzy msgid "No such file or directory" msgstr "Fehler beim listen von Datei oder Verzeichnis: %s" #: src/hed/libs/data/DataStatus.cpp:41 msgid "Object not initialized (internal error)" msgstr "" #: src/hed/libs/data/DataStatus.cpp:42 msgid "Operating System error" msgstr "" #: src/hed/libs/data/DataStatus.cpp:43 msgid "Failed to stage file(s)" msgstr "" #: src/hed/libs/data/DataStatus.cpp:44 msgid "Inconsistent metadata" msgstr "" #: src/hed/libs/data/DataStatus.cpp:45 #, fuzzy msgid "Failed to prepare source" msgstr "Fehler bei Reservieren von Platz" #: src/hed/libs/data/DataStatus.cpp:46 msgid "Should wait for source to be prepared" msgstr "" #: src/hed/libs/data/DataStatus.cpp:47 #, fuzzy msgid "Failed to prepare destination" msgstr "Initiierung der Delegation fehlgeschlagen" #: src/hed/libs/data/DataStatus.cpp:48 msgid "Should wait for destination to be prepared" msgstr "" #: src/hed/libs/data/DataStatus.cpp:49 #, fuzzy msgid "Failed to finalize reading from source" msgstr "Fehler bei Lesen von Quelle" #: src/hed/libs/data/DataStatus.cpp:50 #, fuzzy msgid "Failed to finalize writing to destination" msgstr "Fehler bei Schreiben zu Ziel" #: src/hed/libs/data/DataStatus.cpp:51 #, fuzzy msgid "Failed to create directory" msgstr "Fehler bei Anlegen/Finden von Verzeichnis %s, (%d)" #: src/hed/libs/data/DataStatus.cpp:52 #, fuzzy msgid "Failed to rename URL" msgstr "Fehler bei Erstellen von GUID in RLS: %s" #: src/hed/libs/data/DataStatus.cpp:53 msgid "Data was already cached" msgstr "" #: src/hed/libs/data/DataStatus.cpp:54 #, fuzzy msgid "Operation cancelled successfully" msgstr "Operation erfolgreich abgeschlossen" #: src/hed/libs/data/DataStatus.cpp:55 #, fuzzy msgid "Generic error" msgstr "Benutzungsschnittstellenfehler" #: src/hed/libs/data/DataStatus.cpp:56 src/hed/libs/data/DataStatus.cpp:69 msgid "Unknown error" msgstr "" #: src/hed/libs/data/DataStatus.cpp:60 msgid "No error" msgstr "" #: src/hed/libs/data/DataStatus.cpp:61 #, fuzzy msgid "Transfer timed out" msgstr "Transfer vollständig" #: src/hed/libs/data/DataStatus.cpp:62 msgid "Checksum mismatch" msgstr "" #: src/hed/libs/data/DataStatus.cpp:63 msgid "Bad logic" msgstr "" #: src/hed/libs/data/DataStatus.cpp:64 msgid "All results obtained are invalid" msgstr "" #: src/hed/libs/data/DataStatus.cpp:65 #, fuzzy msgid "Temporary service error" msgstr "Benutzungsschnittstellenfehler" #: src/hed/libs/data/DataStatus.cpp:66 #, fuzzy msgid "Permanent service error" msgstr "Benutzungsschnittstellenfehler" #: src/hed/libs/data/DataStatus.cpp:67 #, fuzzy msgid "Error switching uid" msgstr "Fehler bei Importieren" #: src/hed/libs/data/DataStatus.cpp:68 #, fuzzy msgid "Request timed out" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/libs/data/FileCache.cpp:111 #, fuzzy msgid "No cache directory specified" msgstr "Kein Cache-Verzeichnis angegeben" #: src/hed/libs/data/FileCache.cpp:128 msgid "No usable caches" msgstr "" #: src/hed/libs/data/FileCache.cpp:137 #, fuzzy msgid "No draining cache directory specified" msgstr "Kein Cache-Verzeichnis angegeben" #: src/hed/libs/data/FileCache.cpp:155 #, fuzzy msgid "No read-only cache directory specified" msgstr "Kein Cache-Verzeichnis angegeben" #: src/hed/libs/data/FileCache.cpp:184 #, fuzzy, c-format msgid "Failed to create cache directory for file %s: %s" msgstr "Fehler bei Erstellen von Info-Datei %s: %s" #: src/hed/libs/data/FileCache.cpp:194 #, fuzzy, c-format msgid "Failed to create any cache directories for %s" msgstr "Fehler bei Anlegen/Finden von Verzeichnis %s, (%d)" #: src/hed/libs/data/FileCache.cpp:201 #, fuzzy, c-format msgid "Failed to change permissions on %s: %s" msgstr "Konnte Zugriffsrechte von hard link nicht ändern zu 0644: %s" #: src/hed/libs/data/FileCache.cpp:213 #, fuzzy, c-format msgid "Failed to delete stale cache file %s: %s" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/hed/libs/data/FileCache.cpp:216 #, fuzzy, c-format msgid "Failed to release lock on file %s" msgstr "Fehler bei Lesen von Proxy-Datei: %s" #: src/hed/libs/data/FileCache.cpp:234 #, fuzzy, c-format msgid "Failed looking up attributes of cached file: %s" msgstr "" "Warnung: Fehler bei Nachschlagen von Attributen von gecachter Datei: %s" #: src/hed/libs/data/FileCache.cpp:240 #, fuzzy, c-format msgid "Failed to obtain lock on cache file %s" msgstr "Fehler bei Unlock von Datei %s: %s" #: src/hed/libs/data/FileCache.cpp:249 src/hed/libs/data/FileCache.cpp:309 #, fuzzy, c-format msgid "Error removing cache file %s: %s" msgstr "Fehler bei Entfernen von Cache-Datei %s: %s" #: src/hed/libs/data/FileCache.cpp:251 src/hed/libs/data/FileCache.cpp:262 #, c-format msgid "Failed to remove lock on %s. Some manual intervention may be required" msgstr "" #: src/hed/libs/data/FileCache.cpp:281 src/hed/libs/data/FileCache.cpp:315 #, fuzzy, c-format msgid "Failed to unlock file %s: %s. Manual intervention may be required" msgstr "Fehler bei Unlock von Datei %s: %s" #: src/hed/libs/data/FileCache.cpp:298 #, fuzzy, c-format msgid "Invalid lock on file %s" msgstr "Ungültige url: %s" #: src/hed/libs/data/FileCache.cpp:304 #, fuzzy, c-format msgid "Failed to remove .meta file %s: %s" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/hed/libs/data/FileCache.cpp:369 #, fuzzy, c-format msgid "Cache not found for file %s" msgstr "Konnte Datei mit Job Beschreibung nicht öffnen: %s" #: src/hed/libs/data/FileCache.cpp:379 #, c-format msgid "" "Cache file %s was modified in the last second, sleeping 1 second to avoid " "race condition" msgstr "" #: src/hed/libs/data/FileCache.cpp:384 src/hed/libs/data/FileCache.cpp:689 #, fuzzy, c-format msgid "Cache file %s does not exist" msgstr "Cache-Datei %s existiert nicht" #: src/hed/libs/data/FileCache.cpp:389 src/hed/libs/data/FileCache.cpp:691 #, fuzzy, c-format msgid "Error accessing cache file %s: %s" msgstr "Fehler bei Zugriff auf Cache-Datei %s: %s" #: src/hed/libs/data/FileCache.cpp:395 #, fuzzy, c-format msgid "Cannot create directory %s for per-job hard links" msgstr "Kann Verzeichnis \"%s\" nicht anlegen für Job-spezifische hard links" #: src/hed/libs/data/FileCache.cpp:400 #, fuzzy, c-format msgid "Cannot change permission of %s: %s " msgstr "Kann Zugriffsrecht von \"%s\" nicht zu 0700 ändern" #: src/hed/libs/data/FileCache.cpp:404 #, fuzzy, c-format msgid "Cannot change owner of %s: %s " msgstr "" "Kann Owner von %s nicht ändernÐевозможно изменить владельца папки %1.\n" "Ошибка: %2" #: src/hed/libs/data/FileCache.cpp:418 #, fuzzy, c-format msgid "Failed to remove existing hard link at %s: %s" msgstr "Fehler bei Entfernen von hard link %s: %s" #: src/hed/libs/data/FileCache.cpp:422 src/hed/libs/data/FileCache.cpp:433 #, fuzzy, c-format msgid "Failed to create hard link from %s to %s: %s" msgstr "Fehler bei Anlegen von hard link von %s zu %s: %s" #: src/hed/libs/data/FileCache.cpp:428 #, fuzzy, c-format msgid "Cache file %s not found" msgstr "Cache-Datei %s existiert nicht" #: src/hed/libs/data/FileCache.cpp:443 #, fuzzy, c-format msgid "Failed to change permissions or set owner of hard link %s: %s" msgstr "Konnte Zugriffsrechte von hard link nicht ändern zu 0644: %s" #: src/hed/libs/data/FileCache.cpp:451 #, fuzzy, c-format msgid "Failed to release lock on cache file %s" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/data/FileCache.cpp:462 #, c-format msgid "Cache file %s was locked during link/copy, must start again" msgstr "" #: src/hed/libs/data/FileCache.cpp:467 #, c-format msgid "Cache file %s was deleted during link/copy, must start again" msgstr "" #: src/hed/libs/data/FileCache.cpp:472 #, c-format msgid "Cache file %s was modified while linking, must start again" msgstr "" #: src/hed/libs/data/FileCache.cpp:490 #, fuzzy, c-format msgid "Failed to copy file %s to %s: %s" msgstr "Fehler bei Öffnen von Datei %s zum Lesen: %s" #: src/hed/libs/data/FileCache.cpp:496 #, fuzzy, c-format msgid "Failed to set executable bit on file %s" msgstr "Konnte Metadaen für Datei %s nicht finden" #: src/hed/libs/data/FileCache.cpp:501 #, fuzzy, c-format msgid "Failed to set executable bit on file %s: %s" msgstr "Fehler bei Erstellen von Info-Datei %s: %s" #: src/hed/libs/data/FileCache.cpp:515 #, fuzzy, c-format msgid "Failed to remove existing symbolic link at %s: %s" msgstr "Fehler bei Entfernen von hard link %s: %s" #: src/hed/libs/data/FileCache.cpp:519 src/hed/libs/data/FileCache.cpp:524 #, fuzzy, c-format msgid "Failed to create symbolic link from %s to %s: %s" msgstr "Fehler bei Anlegen von hard link von %s zu %s: %s" #: src/hed/libs/data/FileCache.cpp:554 #, c-format msgid "Failed to remove cache per-job dir %s: %s" msgstr "Fehler bei Entfernen von cache per-job Verzeichnis %s: %s" #: src/hed/libs/data/FileCache.cpp:573 src/hed/libs/data/FileCache.cpp:641 #, fuzzy, c-format msgid "Error reading meta file %s: %s" msgstr "Fehler bei Lesen von Meta-Datei %s: %s" #: src/hed/libs/data/FileCache.cpp:578 src/hed/libs/data/FileCache.cpp:646 #, fuzzy, c-format msgid "Error opening meta file %s" msgstr "Fehler bei Öffnen von Meta-Datei %s: %s" #: src/hed/libs/data/FileCache.cpp:583 src/hed/libs/data/FileCache.cpp:650 #, fuzzy, c-format msgid "meta file %s is empty" msgstr "Anfrage ist leer" #: src/hed/libs/data/FileCache.cpp:593 #, fuzzy, c-format msgid "" "File %s is already cached at %s under a different URL: %s - will not add DN " "to cached list" msgstr "" "Fehler: Datei %s wird bereits gecacht bei %s unter anderer URL: %s - werde " "DN nicht zu cached list hinzufügen" #: src/hed/libs/data/FileCache.cpp:604 #, fuzzy, c-format msgid "Bad format detected in file %s, in line %s" msgstr "Formatierungsfehler erkannt in Datei %s, in Zeile %s" #: src/hed/libs/data/FileCache.cpp:620 #, fuzzy, c-format msgid "Could not acquire lock on meta file %s" msgstr "Konnte temporäre Datei nicht anlegen: %s" #: src/hed/libs/data/FileCache.cpp:624 #, fuzzy, c-format msgid "Error opening meta file for writing %s" msgstr "Fehler bei Öffnen von Meta-Datei zum schreiben %s: %s" #: src/hed/libs/data/FileCache.cpp:660 #, c-format msgid "DN %s is cached and is valid until %s for URL %s" msgstr "DN %s wird gecacht und ist gültig bis %s für Datei %s" #: src/hed/libs/data/FileCache.cpp:664 #, c-format msgid "DN %s is cached but has expired for URL %s" msgstr "DN %s wird gecacht aber ist abgelaufen für URL %s" #: src/hed/libs/data/FileCache.cpp:715 #, fuzzy, c-format msgid "Failed to acquire lock on cache meta file %s" msgstr "Fehler bei Unlock von Datei %s: %s" #: src/hed/libs/data/FileCache.cpp:720 #, fuzzy, c-format msgid "Failed to create cache meta file %s" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/data/FileCache.cpp:735 #, fuzzy, c-format msgid "Failed to read cache meta file %s" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/data/FileCache.cpp:740 #, c-format msgid "Cache meta file %s is empty, will recreate" msgstr "" #: src/hed/libs/data/FileCache.cpp:745 #, c-format msgid "Cache meta file %s possibly corrupted, will recreate" msgstr "" #: src/hed/libs/data/FileCache.cpp:749 #, fuzzy, c-format msgid "" "File %s is already cached at %s under a different URL: %s - this file will " "not be cached" msgstr "" "Fehler: Datei %s wird bereits gecacht bei %s unter einer anderen URL: %s - " "diese Datei wird nicht gecacht" #: src/hed/libs/data/FileCache.cpp:759 #, fuzzy, c-format msgid "Error looking up attributes of cache meta file %s: %s" msgstr "Fehler bei Nachschlagen von Attributen von Meta-Datei %s: %s" #: src/hed/libs/data/FileCache.cpp:830 #, fuzzy, c-format msgid "Using cache %s" msgstr "Nutze space token %s" #: src/hed/libs/data/FileCache.cpp:844 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:79 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:112 #, fuzzy, c-format msgid "Error getting info from statvfs for the path %s: %s" msgstr "Fehler bei Öffnen von Meta-Datei zum schreiben %s: %s" #: src/hed/libs/data/FileCache.cpp:850 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:118 #, c-format msgid "Cache %s: Free space %f GB" msgstr "" #: src/hed/libs/data/URLMap.cpp:33 #, fuzzy, c-format msgid "Can't use URL %s" msgstr "Kann stat-Informationen zu Datei nicht einholen: %s" #: src/hed/libs/data/URLMap.cpp:39 #, c-format msgid "file %s is not accessible" msgstr "" #: src/hed/libs/data/URLMap.cpp:49 #, fuzzy, c-format msgid "Mapping %s to %s" msgstr "" "\n" " СоответÑтвие раздел-Ñегмент:\n" #: src/hed/libs/data/examples/simple_copy.cpp:17 #, fuzzy msgid "Usage: copy source destination" msgstr "Quelle Ziel" #: src/hed/libs/data/examples/simple_copy.cpp:42 #, fuzzy, c-format msgid "Copy failed: %s" msgstr "DCAU fehlgeschlagen: %s" #: src/hed/libs/globusutils/GSSCredential.cpp:41 #, fuzzy, c-format msgid "Failed to read proxy file: %s" msgstr "Fehler bei Lesen von Proxy-Datei: %s" #: src/hed/libs/globusutils/GSSCredential.cpp:49 #, fuzzy, c-format msgid "Failed to read certificate file: %s" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/hed/libs/globusutils/GSSCredential.cpp:56 #, fuzzy, c-format msgid "Failed to read private key file: %s" msgstr "Fehler bei Lesen von privater Schlüssel-Datei: %s" #: src/hed/libs/globusutils/GSSCredential.cpp:82 #, fuzzy, c-format msgid "" "Failed to convert GSI credential to GSS credential (major: %d, minor: %d):%s:" "%s" msgstr "" "Fehler bei Konvertieren von GSI Credential zu GCC Credential (major: %d, " "minor: %d)%s" #: src/hed/libs/globusutils/GSSCredential.cpp:94 #, fuzzy, c-format msgid "Failed to release GSS credential (major: %d, minor: %d):%s:%s" msgstr "Fehler bei Freigabe von GSS Credential (major: %d, minor: %d):%s" #: src/hed/libs/loader/ModuleManager.cpp:28 msgid "Module Manager Init" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:71 msgid "" "Busy plugins found while unloading Module Manager. Waiting for them to be " "released." msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:205 #, c-format msgid "Found %s in cache" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:212 #, c-format msgid "Could not locate module %s in following paths:" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:216 #, fuzzy, c-format msgid "\t%s" msgstr "%s" #: src/hed/libs/loader/ModuleManager.cpp:230 #, c-format msgid "Loaded %s" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:274 msgid "Module Manager Init by ModuleManager::setCfg" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:310 #: src/hed/libs/loader/ModuleManager.cpp:323 #, c-format msgid "%s made persistent" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:314 #, c-format msgid "Not found %s in cache" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:328 msgid "Specified module not found in cache" msgstr "" #: src/hed/libs/loader/Plugin.cpp:364 src/hed/libs/loader/Plugin.cpp:557 #, c-format msgid "Could not find loadable module descriptor by name %s" msgstr "" #: src/hed/libs/loader/Plugin.cpp:372 src/hed/libs/loader/Plugin.cpp:567 #, c-format msgid "Could not find loadable module by name %s (%s)" msgstr "" #: src/hed/libs/loader/Plugin.cpp:378 src/hed/libs/loader/Plugin.cpp:480 #: src/hed/libs/loader/Plugin.cpp:572 #, c-format msgid "Module %s is not an ARC plugin (%s)" msgstr "" #: src/hed/libs/loader/Plugin.cpp:395 src/hed/libs/loader/Plugin.cpp:490 #: src/hed/libs/loader/Plugin.cpp:598 #, c-format msgid "Module %s failed to reload (%s)" msgstr "" #: src/hed/libs/loader/Plugin.cpp:417 #, c-format msgid "Module %s contains no plugin %s" msgstr "" #: src/hed/libs/loader/Plugin.cpp:462 #, c-format msgid "Could not find loadable module descriptor by name %s or kind %s" msgstr "" #: src/hed/libs/loader/Plugin.cpp:467 #, c-format msgid "Loadable module %s contains no requested plugin %s of kind %s" msgstr "" #: src/hed/libs/loader/Plugin.cpp:474 #, c-format msgid "Could not find loadable module by names %s and %s (%s)" msgstr "" #: src/hed/libs/loader/Plugin.cpp:503 #, c-format msgid "Module %s contains no requested plugin %s of kind %s" msgstr "" #: src/hed/libs/loader/Plugin.cpp:588 #, c-format msgid "Module %s does not contain plugin(s) of specified kind(s)" msgstr "" #: src/hed/libs/message/MCC.cpp:76 src/hed/libs/message/Service.cpp:25 #, c-format msgid "No security processing/check requested for '%s'" msgstr "" #: src/hed/libs/message/MCC.cpp:85 #, fuzzy, c-format msgid "Security processing/check failed: %s" msgstr "Fehler bei Zugriff auf Cache-Datei %s: %s" #: src/hed/libs/message/MCC.cpp:90 msgid "Security processing/check passed" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:16 #, fuzzy msgid "Chain(s) configuration failed" msgstr "Delegation Authorisierung fehlgeschlagen" #: src/hed/libs/message/MCCLoader.cpp:133 msgid "SecHandler configuration is not defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:156 msgid "SecHandler has no configuration" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:162 msgid "SecHandler has no name attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:172 #, c-format msgid "Security Handler %s(%s) could not be created" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:176 #, c-format msgid "SecHandler: %s(%s)" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:188 msgid "Component has no name attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:193 msgid "Component has no ID attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:202 #, c-format msgid "Component %s(%s) could not be created" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:232 #, c-format msgid "Component's %s(%s) next has no ID attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:287 #, c-format msgid "Loaded MCC %s(%s)" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:305 #, c-format msgid "Plexer's (%s) next has no ID attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:315 #, c-format msgid "Loaded Plexer %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:323 msgid "Service has no Name attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:329 msgid "Service has no ID attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:338 #, c-format msgid "Service %s(%s) could not be created" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:345 #, c-format msgid "Loaded Service %s(%s)" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:387 #, c-format msgid "Linking MCC %s(%s) to MCC (%s) under %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:398 #, c-format msgid "Linking MCC %s(%s) to Service (%s) under %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:407 #, c-format msgid "Linking MCC %s(%s) to Plexer (%s) under %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:412 #, c-format msgid "MCC %s(%s) - next %s(%s) has no target" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:431 #, c-format msgid "Linking Plexer %s to MCC (%s) under %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:442 #, c-format msgid "Linking Plexer %s to Service (%s) under %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:451 #, c-format msgid "Linking Plexer %s to Plexer (%s) under %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:457 #, c-format msgid "Plexer (%s) - next %s(%s) has no target" msgstr "" #: src/hed/libs/message/Plexer.cpp:31 #, c-format msgid "Bad label: \"%s\"" msgstr "" #: src/hed/libs/message/Plexer.cpp:47 #, c-format msgid "Operation on path \"%s\"" msgstr "" #: src/hed/libs/message/Plexer.cpp:60 #, c-format msgid "No next MCC or Service at path \"%s\"" msgstr "" #: src/hed/libs/message/Service.cpp:35 #, fuzzy, c-format msgid "Security processing/check for '%s' failed: %s" msgstr "Verbindung zu %s schlug fehl: %s" #: src/hed/libs/message/Service.cpp:41 #, c-format msgid "Security processing/check for '%s' passed" msgstr "" #: src/hed/libs/otokens/jwse.cpp:55 #, c-format msgid "JWSE::Input: token: %s" msgstr "" #: src/hed/libs/otokens/jwse.cpp:75 #, c-format msgid "JWSE::Input: header: %s" msgstr "" #: src/hed/libs/otokens/jwse.cpp:101 #, c-format msgid "JWSE::Input: JWS content: %s" msgstr "" #: src/hed/libs/otokens/jwse.cpp:111 msgid "JWSE::Input: JWS: token too young" msgstr "" #: src/hed/libs/otokens/jwse.cpp:120 msgid "JWSE::Input: JWS: token too old" msgstr "" #: src/hed/libs/otokens/jwse.cpp:131 #, c-format msgid "JWSE::Input: JWS: signature algorithm: %s" msgstr "" #: src/hed/libs/otokens/jwse.cpp:190 #, fuzzy msgid "JWSE::Input: JWS: signature verification failed" msgstr "SOAP Aufruf fehlgeschlagen" #: src/hed/libs/otokens/jwse.cpp:196 msgid "JWSE::Input: JWE: not supported yet" msgstr "" #: src/hed/libs/otokens/jwse_keys.cpp:271 msgid "JWSE::ExtractPublicKey: x5c key" msgstr "" #: src/hed/libs/otokens/jwse_keys.cpp:279 msgid "JWSE::ExtractPublicKey: jwk key" msgstr "" #: src/hed/libs/otokens/jwse_keys.cpp:286 msgid "JWSE::ExtractPublicKey: external jwk key" msgstr "" #: src/hed/libs/otokens/jwse_keys.cpp:303 #, c-format msgid "JWSE::ExtractPublicKey: fetching jwl key from %s" msgstr "" #: src/hed/libs/otokens/jwse_keys.cpp:316 msgid "JWSE::ExtractPublicKey: no supported key" msgstr "" #: src/hed/libs/otokens/jwse_keys.cpp:319 msgid "JWSE::ExtractPublicKey: key parsing error" msgstr "" #: src/hed/libs/otokens/openid_metadata.cpp:40 #: src/hed/libs/otokens/openid_metadata.cpp:45 #, fuzzy, c-format msgid "Input: metadata: %s" msgstr "Füge location hinzu: Metadaten: %s" #: src/hed/libs/otokens/openid_metadata.cpp:414 #, fuzzy, c-format msgid "Fetch: response code: %u %s" msgstr "Erhalte Antwort: %s" #: src/hed/libs/otokens/openid_metadata.cpp:416 #, fuzzy, c-format msgid "Fetch: response body: %s" msgstr "Erhalte Antwort: %s" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:136 #, fuzzy, c-format msgid "Can not load ARC evaluator object: %s" msgstr "Kann PolicyStore Objekt nicht anlegen" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:187 #, c-format msgid "Can not load ARC request object: %s" msgstr "" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:228 #, c-format msgid "Can not load policy object: %s" msgstr "" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:276 msgid "Can not load policy object" msgstr "" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:324 msgid "Can not load request object" msgstr "" #: src/hed/libs/security/ArcPDP/PolicyParser.cpp:119 msgid "Can not generate policy object" msgstr "" #: src/hed/libs/security/ArcPDP/attr/RequestAttribute.cpp:37 #, c-format msgid "Id= %s,Type= %s,Issuer= %s,Value= %s" msgstr "" #: src/hed/libs/security/ArcPDP/attr/RequestAttribute.cpp:40 #, c-format msgid "No Attribute exists, which can deal with type: %s" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:168 #, c-format msgid "HTTP Error: %d %s" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:241 msgid "Cannot create http payload" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:311 msgid "No next element in the chain" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:320 #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:253 msgid "next element of the chain returned error status" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:329 msgid "next element of the chain returned no payload" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:341 msgid "next element of the chain returned invalid/unsupported payload" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:423 msgid "Error to flush output payload" msgstr "" #: src/hed/mcc/http/PayloadHTTP.cpp:305 #, c-format msgid "<< %s" msgstr "" #: src/hed/mcc/http/PayloadHTTP.cpp:354 src/hed/mcc/http/PayloadHTTP.cpp:456 #, fuzzy, c-format msgid "< %s" msgstr "%s" #: src/hed/mcc/http/PayloadHTTP.cpp:575 #, fuzzy msgid "Failed to parse HTTP header" msgstr "Fehler beim Senden von header" #: src/hed/mcc/http/PayloadHTTP.cpp:836 msgid "Invalid HTTP object can't produce result" msgstr "" #: src/hed/mcc/http/PayloadHTTP.cpp:949 #, fuzzy, c-format msgid "> %s" msgstr "%s" #: src/hed/mcc/http/PayloadHTTP.cpp:974 #, fuzzy msgid "Failed to write header to output stream" msgstr "Fehler bei Schreiben zu Datein %s: %s" #: src/hed/mcc/http/PayloadHTTP.cpp:999 src/hed/mcc/http/PayloadHTTP.cpp:1005 #: src/hed/mcc/http/PayloadHTTP.cpp:1011 src/hed/mcc/http/PayloadHTTP.cpp:1021 #: src/hed/mcc/http/PayloadHTTP.cpp:1033 src/hed/mcc/http/PayloadHTTP.cpp:1038 #: src/hed/mcc/http/PayloadHTTP.cpp:1043 src/hed/mcc/http/PayloadHTTP.cpp:1051 #: src/hed/mcc/http/PayloadHTTP.cpp:1058 #, fuzzy msgid "Failed to write body to output stream" msgstr "Fehler bei Lesen von Dateiliste" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:31 msgid "Skipping service: no ServicePath found!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:37 msgid "Skipping service: no SchemaPath found!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:89 msgid "Parser Context creation failed!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:98 msgid "Cannot parse schema!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:110 msgid "Empty payload!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:119 msgid "Could not convert payload!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:125 #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:212 msgid "Could not create PayloadSOAP!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:196 msgid "Empty input payload!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:205 msgid "Could not convert incoming payload!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:232 msgid "Missing schema! Skipping validation..." msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:237 msgid "Could not validate message!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:245 #: src/hed/mcc/soap/MCCSOAP.cpp:222 src/hed/mcc/soap/MCCSOAP.cpp:236 #: src/hed/mcc/soap/MCCSOAP.cpp:266 msgid "empty next chain element" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:257 #: src/hed/mcc/soap/MCCSOAP.cpp:282 msgid "next element of the chain returned empty payload" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:265 msgid "next element of the chain returned invalid payload" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:207 msgid "empty input payload" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:217 #, c-format msgid "MIME is not suitable for SOAP: %s" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:231 msgid "incoming message is not SOAP" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:258 #, c-format msgid "Security check failed in SOAP MCC for incoming message: %s" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:274 #, c-format msgid "next element of the chain returned error status: %s" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:293 msgid "next element of the chain returned unknown payload - passing through" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:298 src/hed/mcc/soap/MCCSOAP.cpp:314 #, c-format msgid "Security check failed in SOAP MCC for outgoing message: %s" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:368 msgid "Security check failed in SOAP MCC for outgoing message" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:421 msgid "Security check failed in SOAP MCC for incoming message" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:82 msgid "Missing Port in Listen element" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:91 msgid "Version in Listen element can't be recognized" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:100 #, c-format msgid "Failed to obtain local address for port %s - %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:102 #, fuzzy, c-format msgid "Failed to obtain local address for %s:%s - %s" msgstr "Warnung: Fehler bei Bezug von Attributen von %s: %s" #: src/hed/mcc/tcp/MCCTCP.cpp:109 #, c-format msgid "Trying to listen on TCP port %s(%s)" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:111 #, fuzzy, c-format msgid "Trying to listen on %s:%s(%s)" msgstr "" "Ошибка при попытке открыть файл:\n" " %1" #: src/hed/mcc/tcp/MCCTCP.cpp:117 #, fuzzy, c-format msgid "Failed to create socket for listening at TCP port %s(%s): %s" msgstr "Fehler bei Anlegen von Datei %s zum Schreiben: %s" #: src/hed/mcc/tcp/MCCTCP.cpp:119 #, fuzzy, c-format msgid "Failed to create socket for listening at %s:%s(%s): %s" msgstr "Fehler bei Anlegen von Datei %s zum Schreiben: %s" #: src/hed/mcc/tcp/MCCTCP.cpp:134 #, c-format msgid "" "Failed to limit socket to IPv6 at TCP port %s - may cause errors for IPv4 at " "same port" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:136 #, c-format msgid "" "Failed to limit socket to IPv6 at %s:%s - may cause errors for IPv4 at same " "port" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:144 #, c-format msgid "Failed to bind socket for TCP port %s(%s): %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:146 #, fuzzy, c-format msgid "Failed to bind socket for %s:%s(%s): %s" msgstr "Fehler bei Unlock von Datei %s: %s" #: src/hed/mcc/tcp/MCCTCP.cpp:161 #, c-format msgid "Failed to listen at TCP port %s(%s): %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:163 #, fuzzy, c-format msgid "Failed to listen at %s:%s(%s): %s" msgstr "Fehler bei Schreiben zu Datein %s: %s" #: src/hed/mcc/tcp/MCCTCP.cpp:180 #, c-format msgid "Listening on TCP port %s(%s)" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:182 #, c-format msgid "Listening on %s:%s(%s)" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:189 #, fuzzy, c-format msgid "Failed to start listening on any address for %s:%s" msgstr "Warnung: Fehler bei Bezug von Attributen von %s: %s" #: src/hed/mcc/tcp/MCCTCP.cpp:191 #, fuzzy, c-format msgid "Failed to start listening on any address for %s:%s(IPv%s)" msgstr "Warnung: Fehler bei Bezug von Attributen von %s: %s" #: src/hed/mcc/tcp/MCCTCP.cpp:197 msgid "No listening ports initiated" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:208 msgid "dropped" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:208 msgid "put on hold" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:208 #, c-format msgid "Setting connections limit to %i, connections over limit will be %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:212 msgid "Failed to start thread for listening" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:245 msgid "Failed to start thread for communication" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:271 msgid "Failed while waiting for connection request" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:293 msgid "Failed to accept connection request" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:302 msgid "Too many connections - dropping new one" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:309 msgid "Too many connections - waiting for old to close" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:533 msgid "next chain element called" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:548 msgid "Only Raw Buffer payload is supported for output" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:556 src/hed/mcc/tcp/MCCTCP.cpp:655 #: src/hed/mcc/tls/MCCTLS.cpp:542 msgid "Failed to send content of buffer" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:568 msgid "TCP executor is removed" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:570 #, c-format msgid "Sockets do not match on exit %i != %i" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:591 msgid "No Connect element specified" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:597 msgid "Missing Port in Connect element" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:603 msgid "Missing Host in Connect element" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:631 #, fuzzy msgid "TCP client process called" msgstr "konnte Nachricht nicht verarbeiten" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:67 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:83 #, c-format msgid "Failed to resolve %s (%s)" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:93 #, c-format msgid "Trying to connect %s(%s):%d" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:97 #, fuzzy, c-format msgid "Failed to create socket for connecting to %s(%s):%d - %s" msgstr "Fehler bei Anlegen von Datei %s zum Schreiben: %s" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:109 #, c-format msgid "" "Failed to get TCP socket options for connection to %s(%s):%d - timeout won't " "work - %s" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:116 #, c-format msgid "Failed to connect to %s(%s):%i - %s" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:126 #, c-format msgid "Timeout connecting to %s(%s):%i - %i s" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:134 #, fuzzy, c-format msgid "Failed while waiting for connection to %s(%s):%i - %s" msgstr "Fehler bei Schreiben zu Ziel" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:144 #, c-format msgid "Failed to connect to %s(%s):%i" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:200 msgid "" "Received message out-of-band (not critical, ERROR level is just for " "debugging purposes)" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:293 #, fuzzy, c-format msgid "Using DH parameters from file: %s" msgstr "Fehler bei Ablage von FTP Datei" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:296 #, fuzzy msgid "Failed to open file with DH parameters for reading" msgstr "Fehler bei Öffnen von Datei %s zum Lesen: %s" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:301 #, fuzzy msgid "Failed to read file with DH parameters" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:304 #, fuzzy msgid "Failed to apply DH parameters" msgstr "Fehler bei Lesen von privater Schlüssel-Datei: %s" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:306 msgid "DH parameters applied" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:321 #, fuzzy, c-format msgid "Using curve with NID: %u" msgstr "Nutze space token %s" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:324 #, fuzzy msgid "Failed to generate EC key" msgstr "Fehler bei Lesen von privater Schlüssel-Datei: %s" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:327 msgid "Failed to apply ECDH parameters" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:329 msgid "ECDH parameters applied" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:336 #, fuzzy, c-format msgid "Using cipher list: %s" msgstr "Nutze space token %s" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:352 #, c-format msgid "Using protocol options: 0x%x" msgstr "" #: src/hed/mcc/tls/DelegationCollector.cpp:39 msgid "Independent proxy - no rights granted" msgstr "" #: src/hed/mcc/tls/DelegationCollector.cpp:43 msgid "Proxy with all rights inherited" msgstr "" #: src/hed/mcc/tls/DelegationCollector.cpp:51 msgid "Proxy with empty policy - fail on unrecognized policy" msgstr "" #: src/hed/mcc/tls/DelegationCollector.cpp:56 #, c-format msgid "Proxy with specific policy: %s" msgstr "" #: src/hed/mcc/tls/DelegationCollector.cpp:60 msgid "Proxy with ARC Policy" msgstr "" #: src/hed/mcc/tls/DelegationCollector.cpp:62 msgid "Proxy with unknown policy - fail on unrecognized policy" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:77 #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:128 #, c-format msgid "Was expecting %s at the beginning of \"%s\"" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:82 #, c-format msgid "We only support CAs in Globus signing policy - %s is not supported" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:87 #, c-format msgid "We only support X509 CAs in Globus signing policy - %s is not supported" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:92 msgid "Missing CA subject in Globus signing policy" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:102 msgid "Negative rights are not supported in Globus signing policy" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:106 #, c-format msgid "Unknown rights in Globus signing policy - %s" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:111 #, c-format msgid "" "Only globus rights are supported in Globus signing policy - %s is not " "supported" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:116 #, c-format msgid "" "Only signing rights are supported in Globus signing policy - %s is not " "supported" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:133 #, c-format msgid "" "We only support subjects conditions in Globus signing policy - %s is not " "supported" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:138 #, c-format msgid "" "We only support globus conditions in Globus signing policy - %s is not " "supported" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:144 msgid "Missing condition subjects in Globus signing policy" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:220 msgid "Unknown element in Globus signing policy" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:218 #, fuzzy msgid "Critical VOMS attribute processing failed" msgstr "Konnte VOMS Attribut nicht herauslesen" #: src/hed/mcc/tls/MCCTLS.cpp:226 #, fuzzy msgid "VOMS attribute validation failed" msgstr "Konnte VOMS Attribut nicht herauslesen" #: src/hed/mcc/tls/MCCTLS.cpp:228 msgid "VOMS attribute is ignored due to processing/validation error" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:420 src/hed/mcc/tls/MCCTLS.cpp:559 #: src/hed/mcc/tls/MCCTLS.cpp:578 #, fuzzy, c-format msgid "Failed to establish connection: %s" msgstr "Fehler bei Schließen von Verbindung 2" #: src/hed/mcc/tls/MCCTLS.cpp:439 src/hed/mcc/tls/MCCTLS.cpp:521 #, c-format msgid "Peer name: %s" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:441 src/hed/mcc/tls/MCCTLS.cpp:523 #, c-format msgid "Identity name: %s" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:443 src/hed/mcc/tls/MCCTLS.cpp:525 #, c-format msgid "CA name: %s" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:450 msgid "Failed to process security attributes in TLS MCC for incoming message" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:458 msgid "Security check failed in TLS MCC for incoming message" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:531 msgid "Security check failed for outgoing TLS message" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:563 msgid "Security check failed for incoming TLS message" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:93 #, fuzzy msgid "" "Failed to allocate memory for certificate subject while matching policy." msgstr "Fehler beim Reservieren von Speicher" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:98 msgid "" "Failed to retrieve link to TLS stream. Additional policy matching is skipped." msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:128 #, c-format msgid "Certificate %s already expired" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:136 #, c-format msgid "Certificate %s will expire in %s" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:157 msgid "Failed to store application data" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:185 msgid "Failed to retrieve application data from OpenSSL" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:257 src/hed/mcc/tls/PayloadTLSMCC.cpp:356 msgid "Can not create the SSL Context object" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:270 src/hed/mcc/tls/PayloadTLSMCC.cpp:376 msgid "Can't set OpenSSL verify flags" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:289 src/hed/mcc/tls/PayloadTLSMCC.cpp:390 msgid "Can not create the SSL object" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:299 #, fuzzy msgid "Faile to assign hostname extension" msgstr "Fehler beim Lesen von SSL Token während Authentifizierung" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:313 msgid "Failed to establish SSL connection" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:316 src/hed/mcc/tls/PayloadTLSMCC.cpp:405 #, fuzzy, c-format msgid "Using cipher: %s" msgstr "Nutze space token %s" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:402 msgid "Failed to accept SSL connection" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:455 #, fuzzy, c-format msgid "Failed to shut down SSL: %s" msgstr "Fehler bei Erstellen von GUID in RLS: %s" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:47 msgid "" "ArcAuthZ: failed to initiate all PDPs - this instance will be non-functional" msgstr "" "ArcAuthZ: Fehler bei Initiierung wenigstens einer PDP - diese Instanz wird " "nicht funktional sein" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:70 #, fuzzy msgid "PDP: missing name attribute" msgstr "PDP: %s Name ist Doublette" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:74 #, fuzzy, c-format msgid "PDP: %s (%s)" msgstr "PDP: %s (%d)" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:79 #, fuzzy, c-format msgid "PDP: %s (%s) can not be loaded" msgstr "PDP: %s kann nicht geladen werden" #: src/hed/shc/arcpdp/ArcEvaluationCtx.cpp:251 #, fuzzy, c-format msgid "There are %d RequestItems" msgstr "Es gibt %d RequestItems" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:60 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:55 msgid "Can not parse classname for FunctionFactory from configuration" msgstr "Konnte classname für FunctionFactory nicht von Konfiguration parsen" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:68 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:63 msgid "Can not parse classname for AttributeFactory from configuration" msgstr "Konnte classname für AttributeFactory nicht von Konfiguration parsen" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:76 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:71 msgid "" "Can not parse classname for CombiningAlgorithmFactory from configuration" msgstr "" "Konnte classname für CombiningAlgorithmFactory nicht von Konfiguration parsen" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:84 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:79 msgid "Can not parse classname for Request from configuration" msgstr "Konnte classname für Request nicht von Konfiguration parsen" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:93 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:88 msgid "Can not parse classname for Policy from configuration" msgstr "Konnte classname für Policy nicht von Konfiguration parsen" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:105 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:100 msgid "Can not dynamically produce AttributeFactory" msgstr "Kann AttributeFactory nicht dynamisch anlegen" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:110 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:105 msgid "Can not dynamically produce FnFactory" msgstr "Kann FnFactory nicht dynamisch anlegen" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:115 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:110 msgid "Can not dynamically produce AlgFacroty" msgstr "Kann AlgFactory nicht dynamisch anlegen" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:126 #: src/hed/shc/gaclpdp/GACLEvaluator.cpp:31 #: src/hed/shc/gaclpdp/GACLEvaluator.cpp:37 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:121 #, fuzzy msgid "Can not create PolicyStore object" msgstr "Kann PolicyStore Objekt nicht anlegen" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:177 src/hed/shc/test.cpp:183 #: src/hed/shc/testinterface_arc.cpp:102 src/hed/shc/testinterface_xacml.cpp:54 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:172 #, fuzzy msgid "Can not dynamically produce Request" msgstr "Kann Anfrage nicht dynamisch produzieren" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:261 #, c-format msgid "Result value (0=Permit, 1=Deny, 2=Indeterminate, 3=Not_Applicable): %d" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:110 #, fuzzy msgid "Can not find ArcPDPContext" msgstr "Kann ArcPDPContext nicht finden" #: src/hed/shc/arcpdp/ArcPDP.cpp:139 src/hed/shc/xacmlpdp/XACMLPDP.cpp:117 msgid "Evaluator does not support loadable Combining Algorithms" msgstr "Evaluator unterstützt ladare Combining Algorithms nicht" #: src/hed/shc/arcpdp/ArcPDP.cpp:143 src/hed/shc/xacmlpdp/XACMLPDP.cpp:121 #, c-format msgid "Evaluator does not support specified Combining Algorithm - %s" msgstr "Evaluator unterstützt die angegebenen Combining Algorithms nicht - %s" #: src/hed/shc/arcpdp/ArcPDP.cpp:155 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:84 #: src/hed/shc/gaclpdp/GACLPDP.cpp:118 src/hed/shc/test.cpp:94 #: src/hed/shc/testinterface_arc.cpp:37 src/hed/shc/testinterface_xacml.cpp:37 #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:133 msgid "Can not dynamically produce Evaluator" msgstr "Kann Evaluator nicht dynamisch produzieren" #: src/hed/shc/arcpdp/ArcPDP.cpp:158 msgid "Evaluator for ArcPDP was not loaded" msgstr "Evaluator für ArcPDP wurde nicht geladen" #: src/hed/shc/arcpdp/ArcPDP.cpp:165 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:57 #: src/hed/shc/gaclpdp/GACLPDP.cpp:128 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:87 #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:143 src/tests/echo/echo.cpp:108 msgid "Missing security object in message" msgstr "Fehlendes security Objekt in Nachricht" #: src/hed/shc/arcpdp/ArcPDP.cpp:173 src/hed/shc/arcpdp/ArcPDP.cpp:181 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:137 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:143 #: src/hed/shc/gaclpdp/GACLPDP.cpp:136 src/hed/shc/gaclpdp/GACLPDP.cpp:144 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:95 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:103 #: src/tests/echo/echo.cpp:116 src/tests/echo/echo.cpp:123 msgid "Failed to convert security information to ARC request" msgstr "Fehler bei Konvertierung von security information für ARC Anfrage" #: src/hed/shc/arcpdp/ArcPDP.cpp:189 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:150 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:111 #, c-format msgid "ARC Auth. request: %s" msgstr "ARC Auth. Anfrage: %s" #: src/hed/shc/arcpdp/ArcPDP.cpp:192 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:153 #: src/hed/shc/gaclpdp/GACLPDP.cpp:155 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:114 #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:170 #, fuzzy msgid "No requested security information was collected" msgstr "Keine security information erhalten" #: src/hed/shc/arcpdp/ArcPDP.cpp:199 msgid "Not authorized by arc.pdp - failed to get response from Evaluator" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:245 #, fuzzy msgid "Authorized by arc.pdp" msgstr "Authorisiert von arc.pdp" #: src/hed/shc/arcpdp/ArcPDP.cpp:246 #, fuzzy msgid "" "Not authorized by arc.pdp - some of the RequestItem elements do not satisfy " "Policy" msgstr "" "UnAuthorisiert von arc.pdp; einige der ReqestItems genügen nicht der Policy" #: src/hed/shc/arcpdp/ArcPolicy.cpp:56 src/hed/shc/arcpdp/ArcPolicy.cpp:70 #: src/hed/shc/gaclpdp/GACLPolicy.cpp:46 src/hed/shc/gaclpdp/GACLPolicy.cpp:59 #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:48 #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:64 msgid "Policy is empty" msgstr "Policy is leer" #: src/hed/shc/arcpdp/ArcPolicy.cpp:114 #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:115 #, fuzzy, c-format msgid "PolicyId: %s Alg inside this policy is:-- %s" msgstr "PolicyId: %s Alg in dieser Policy ist:-- %s" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:75 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:129 #, fuzzy msgid "No delegation policies in this context and message - passing through" msgstr "" "Keine delegation policies in diesem context und message - durchgelassen" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:95 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:109 msgid "Failed to convert security information to ARC policy" msgstr "Fehler bei Konvertieren von security information für ARC policy" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:116 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:123 #, c-format msgid "ARC delegation policy: %s" msgstr "ARC delegation policy: %s" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:161 msgid "No authorization response was returned" msgstr "Es wurde keine authorization response erwidert" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:164 #, c-format msgid "There are %d requests, which satisfy at least one policy" msgstr "Es gibt %d Anfragen, die wenigstens einer Policy Anfrage genügt" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:183 #, fuzzy msgid "Delegation authorization passed" msgstr "Delegations Authorisation zugelassen" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:185 msgid "Delegation authorization failed" msgstr "Delegation Authorisierung fehlgeschlagen" #: src/hed/shc/delegationsh/DelegationSH.cpp:63 msgid "" "Missing CertificatePath element or ProxyPath element, or " " is missing" msgstr "" "Fehlendes CertificatePath Element oder ProxyPath Element, oder " " fehlt" #: src/hed/shc/delegationsh/DelegationSH.cpp:68 msgid "" "Missing or empty KeyPath element, or is missing" msgstr "" "Fehlendes oder leeres KeyPath Element, oder fehlt" #: src/hed/shc/delegationsh/DelegationSH.cpp:74 msgid "Missing or empty CertificatePath or CACertificatesDir element" msgstr "Fehlendes oder leeres CertificatePath oder CACertificatesDir Element" #: src/hed/shc/delegationsh/DelegationSH.cpp:81 #, c-format msgid "Delegation role not supported: %s" msgstr "Delegation role nicht unterstützt: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:90 #, c-format msgid "Delegation type not supported: %s" msgstr "Delegation Typ nicht unterstützt: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:115 #, fuzzy msgid "Failed to acquire delegation context" msgstr "Konnte delegation context nicht erhalten" #: src/hed/shc/delegationsh/DelegationSH.cpp:143 #: src/hed/shc/delegationsh/DelegationSH.cpp:254 msgid "Can't create delegation context" msgstr "Kann delegation context nicht anlegen" #: src/hed/shc/delegationsh/DelegationSH.cpp:149 #, fuzzy msgid "Delegation handler with delegatee role starts to process" msgstr "Delegation handler mit delegatee role gestartet." #: src/hed/shc/delegationsh/DelegationSH.cpp:152 #: src/services/a-rex/arex.cpp:592 src/services/candypond/CandyPond.cpp:526 #: src/services/data-staging/DataDeliveryService.cpp:624 #, fuzzy msgid "process: POST" msgstr "Prozess: POST" #: src/hed/shc/delegationsh/DelegationSH.cpp:159 #: src/services/a-rex/arex.cpp:599 src/services/candypond/CandyPond.cpp:535 #: src/services/data-staging/DataDeliveryService.cpp:633 #: src/services/wrappers/python/pythonwrapper.cpp:416 msgid "input is not SOAP" msgstr "Eingabe ist kein SOAP" #: src/hed/shc/delegationsh/DelegationSH.cpp:166 #, c-format msgid "Delegation service: %s" msgstr "Delegation service: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:181 #: src/hed/shc/delegationsh/DelegationSH.cpp:188 #: src/tests/client/test_ClientX509Delegation_ARC.cpp:55 #, fuzzy, c-format msgid "Can not get the delegation credential: %s from delegation service: %s" msgstr "" "Kann delegation credential nicht erhalten: %s von delegation service: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:204 #: src/hed/shc/delegationsh/DelegationSH.cpp:268 #, fuzzy, c-format msgid "Delegated credential identity: %s" msgstr "Delegated credential Identität: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:205 #, c-format msgid "" "The delegated credential got from delegation service is stored into path: %s" msgstr "" "Das delegierte credential wie erhalten von delegation service is abgelegt " "unter Pfad: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:218 #, fuzzy msgid "The endpoint of delegation service should be configured" msgstr "Der Endpunkt des delegation service konnte nicht konfiguriert werden" #: src/hed/shc/delegationsh/DelegationSH.cpp:228 #: src/hed/shc/delegationsh/DelegationSH.cpp:340 #, fuzzy msgid "Delegation handler with delegatee role ends" msgstr "Delegation handler mit delegatee Rolle endet" #: src/hed/shc/delegationsh/DelegationSH.cpp:260 #, fuzzy msgid "Delegation handler with delegator role starts to process" msgstr "Delegation handler mit delegator Rolle startet" #: src/hed/shc/delegationsh/DelegationSH.cpp:269 #, fuzzy, c-format msgid "The delegated credential got from path: %s" msgstr "Das delegated credetion erhalten von Pfad: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:290 #, fuzzy, c-format msgid "Can not create delegation crendential to delegation service: %s" msgstr "Kann delegation credential nicht anlegen für delegation service: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:328 msgid "output is not SOAP" msgstr "Ausgabe ist nicht SOAP" #: src/hed/shc/delegationsh/DelegationSH.cpp:339 #, fuzzy, c-format msgid "" "Succeeded to send DelegationService: %s and DelegationID: %s info to peer " "service" msgstr "" "Senden von DelegationService erfolgreich: %s und DelegationID: %s Info an " "peer service" #: src/hed/shc/delegationsh/DelegationSH.cpp:345 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:220 #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:101 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:94 #, fuzzy msgid "Incoming Message is not SOAP" msgstr "Einkommende Nachricht ist nicht SOAP" #: src/hed/shc/delegationsh/DelegationSH.cpp:352 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:341 #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:123 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:108 msgid "Outgoing Message is not SOAP" msgstr "Ausgehende Nachricht ist kein SOAP" #: src/hed/shc/delegationsh/DelegationSH.cpp:356 #, fuzzy msgid "Delegation handler is not configured" msgstr "Delegation handler wurde nicht konfiguriert" #: src/hed/shc/gaclpdp/GACLPDP.cpp:121 msgid "Evaluator for GACLPDP was not loaded" msgstr "Evaluator für GACLPDP wurde nicht geladen" #: src/hed/shc/gaclpdp/GACLPDP.cpp:152 #, fuzzy, c-format msgid "GACL Auth. request: %s" msgstr "GACL Auth. Anfrage. %s" #: src/hed/shc/gaclpdp/GACLPolicy.cpp:50 src/hed/shc/gaclpdp/GACLPolicy.cpp:63 #, fuzzy msgid "Policy is not gacl" msgstr "Policy ist nicht gacl" #: src/hed/shc/legacy/ConfigParser.cpp:13 #, fuzzy msgid "Configuration file not specified" msgstr "Delegation Authorisierung fehlgeschlagen" #: src/hed/shc/legacy/ConfigParser.cpp:18 #: src/hed/shc/legacy/ConfigParser.cpp:28 #: src/hed/shc/legacy/ConfigParser.cpp:33 #, fuzzy msgid "Configuration file can not be read" msgstr "Delegation Authorisierung fehlgeschlagen" #: src/hed/shc/legacy/ConfigParser.cpp:43 #, c-format msgid "Configuration file is broken - block name is too short: %s" msgstr "" #: src/hed/shc/legacy/ConfigParser.cpp:47 #, c-format msgid "Configuration file is broken - block name does not end with ]: %s" msgstr "" #: src/hed/shc/legacy/LegacyMap.cpp:39 src/hed/shc/legacy/LegacyPDP.cpp:119 msgid "Configuration file not specified in ConfigBlock" msgstr "" #: src/hed/shc/legacy/LegacyMap.cpp:48 src/hed/shc/legacy/LegacyPDP.cpp:128 #, fuzzy msgid "BlockName is empty" msgstr "Policy is leer" #: src/hed/shc/legacy/LegacyMap.cpp:108 #, fuzzy, c-format msgid "Failed processing user mapping command: %s %s" msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #: src/hed/shc/legacy/LegacyMap.cpp:114 #: src/services/gridftpd/fileroot_config.cpp:320 #, fuzzy, c-format msgid "Failed to change mapping stack processing policy in: %s = %s" msgstr "Konnte Zugriffsrechte von hard link nicht ändern zu 0644: %s" #: src/hed/shc/legacy/LegacyMap.cpp:174 msgid "LegacyMap: no configurations blocks defined" msgstr "" #: src/hed/shc/legacy/LegacyMap.cpp:196 src/hed/shc/legacy/LegacyPDP.cpp:239 #, c-format msgid "" "LegacyPDP: there is no %s Sec Attribute defined. Probably ARC Legacy Sec " "Handler is not configured or failed." msgstr "" #: src/hed/shc/legacy/LegacyMap.cpp:201 src/hed/shc/legacy/LegacyPDP.cpp:244 msgid "LegacyPDP: ARC Legacy Sec Attribute not recognized." msgstr "" #: src/hed/shc/legacy/LegacyPDP.cpp:138 #, fuzzy, c-format msgid "Failed to parse configuration file %s" msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #: src/hed/shc/legacy/LegacyPDP.cpp:144 #, fuzzy, c-format msgid "Block %s not found in configuration file %s" msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #: src/hed/shc/legacy/LegacySecHandler.cpp:40 #: src/hed/shc/legacy/LegacySecHandler.cpp:118 msgid "LegacySecHandler: configuration file not specified" msgstr "" #: src/hed/shc/legacy/arc_lcas.cpp:146 src/hed/shc/legacy/arc_lcmaps.cpp:161 #, fuzzy, c-format msgid "" "Failed to convert GSI credential to GSS credential (major: %d, minor: %d)" msgstr "" "Fehler bei Konvertieren von GSI Credential zu GCC Credential (major: %d, " "minor: %d)%s" #: src/hed/shc/legacy/arc_lcas.cpp:171 src/hed/shc/legacy/arc_lcmaps.cpp:186 #, fuzzy msgid "Missing subject name" msgstr "Fehlendes security Objekt in Nachricht" #: src/hed/shc/legacy/arc_lcas.cpp:176 src/hed/shc/legacy/arc_lcmaps.cpp:191 #, fuzzy msgid "Missing path of credentials file" msgstr "Pfad zu verlangter Datei" #: src/hed/shc/legacy/arc_lcas.cpp:182 msgid "Missing name of LCAS library" msgstr "" #: src/hed/shc/legacy/arc_lcas.cpp:199 #, c-format msgid "Can't load LCAS library %s: %s" msgstr "" #: src/hed/shc/legacy/arc_lcas.cpp:209 #, c-format msgid "Can't find LCAS functions in a library %s" msgstr "" #: src/hed/shc/legacy/arc_lcas.cpp:219 #, fuzzy msgid "Failed to initialize LCAS" msgstr "Fehler bei Initialisierung von OpenSSL Bibliothek" #: src/hed/shc/legacy/arc_lcas.cpp:234 #, fuzzy msgid "Failed to terminate LCAS" msgstr "Fehler beim Authentifizieren: %s" #: src/hed/shc/legacy/arc_lcmaps.cpp:197 msgid "Missing name of LCMAPS library" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:211 #, fuzzy msgid "Can't read policy names" msgstr "Kann nicht von Quelle lesen" #: src/hed/shc/legacy/arc_lcmaps.cpp:222 #, c-format msgid "Can't load LCMAPS library %s: %s" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:234 #, c-format msgid "Can't find LCMAPS functions in a library %s" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:246 msgid "LCMAPS has lcmaps_run" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:247 msgid "LCMAPS has getCredentialData" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:251 #, fuzzy msgid "Failed to initialize LCMAPS" msgstr "Fehler bei Initialisierung von OpenSSL Bibliothek" #: src/hed/shc/legacy/arc_lcmaps.cpp:291 #, c-format msgid "LCMAPS returned invalid GID: %u" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:294 #, fuzzy msgid "LCMAPS did not return any GID" msgstr "SRM lieferte keine Information zurück" #: src/hed/shc/legacy/arc_lcmaps.cpp:297 #, c-format msgid "LCMAPS returned UID which has no username: %u" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:300 #, c-format msgid "LCMAPS returned invalid UID: %u" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:303 #, fuzzy msgid "LCMAPS did not return any UID" msgstr "SRM lieferte keine Information zurück" #: src/hed/shc/legacy/arc_lcmaps.cpp:312 #, fuzzy msgid "Failed to terminate LCMAPS" msgstr "Fehler beim Authentifizieren: %s" #: src/hed/shc/legacy/auth.cpp:35 src/services/gridftpd/auth/auth.cpp:35 #, fuzzy, c-format msgid "Unexpected argument for 'all' rule - %s" msgstr "Kann doc Argument nicht anlegen" #: src/hed/shc/legacy/auth.cpp:337 #, fuzzy, c-format msgid "Credentials stored in temporary file %s" msgstr "Konnte nicht in temporäre Datei schreiben: %s" #: src/hed/shc/legacy/auth.cpp:346 #, fuzzy, c-format msgid "Assigned to authorization group %s" msgstr "Delegations Authorisation zugelassen" #: src/hed/shc/legacy/auth.cpp:351 #, fuzzy, c-format msgid "Assigned to userlist %s" msgstr "Delegations Authorisation zugelassen" #: src/hed/shc/legacy/auth_file.cpp:22 #: src/services/gridftpd/auth/auth_file.cpp:22 #, fuzzy, c-format msgid "Failed to read file %s" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/hed/shc/legacy/auth_otokens.cpp:30 #, fuzzy msgid "Missing subject in configuration" msgstr "Fehlendes oder leeres KeyPath Element" #: src/hed/shc/legacy/auth_otokens.cpp:35 #, fuzzy msgid "Missing issuer in configuration" msgstr "Fehlendes oder leeres KeyPath Element" #: src/hed/shc/legacy/auth_otokens.cpp:40 #, fuzzy msgid "Missing audience in configuration" msgstr "Fehlendes oder leeres KeyPath Element" #: src/hed/shc/legacy/auth_otokens.cpp:45 #, fuzzy msgid "Missing scope in configuration" msgstr "Fehlendes oder leeres KeyPath Element" #: src/hed/shc/legacy/auth_otokens.cpp:50 src/hed/shc/legacy/auth_voms.cpp:47 #: src/services/gridftpd/auth/auth_voms.cpp:51 #, fuzzy msgid "Missing group in configuration" msgstr "Fehlendes oder leeres KeyPath Element" #: src/hed/shc/legacy/auth_otokens.cpp:53 #, fuzzy, c-format msgid "Rule: subject: %s" msgstr "Subjekt: %s" #: src/hed/shc/legacy/auth_otokens.cpp:54 #, fuzzy, c-format msgid "Rule: issuer: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/shc/legacy/auth_otokens.cpp:55 #, fuzzy, c-format msgid "Rule: audience: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/shc/legacy/auth_otokens.cpp:56 #, fuzzy, c-format msgid "Rule: scope: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/shc/legacy/auth_otokens.cpp:57 src/hed/shc/legacy/auth_voms.cpp:66 #: src/services/gridftpd/auth/auth_voms.cpp:68 #, c-format msgid "Rule: group: %s" msgstr "" #: src/hed/shc/legacy/auth_otokens.cpp:60 #, fuzzy, c-format msgid "Match issuer: %s" msgstr "Fataler Fehler: %s" #: src/hed/shc/legacy/auth_otokens.cpp:66 #, fuzzy, c-format msgid "Matched: %s %s %s" msgstr "" "cnd:\n" "%s ist ein %s" #: src/hed/shc/legacy/auth_otokens.cpp:80 src/hed/shc/legacy/auth_voms.cpp:93 #: src/services/gridftpd/auth/auth_voms.cpp:98 msgid "Matched nothing" msgstr "" #: src/hed/shc/legacy/auth_plugin.cpp:45 src/hed/shc/legacy/unixmap.cpp:215 #: src/services/gridftpd/auth/auth_plugin.cpp:70 #: src/services/gridftpd/auth/unixmap.cpp:214 #, c-format msgid "Plugin %s returned: %u" msgstr "" #: src/hed/shc/legacy/auth_plugin.cpp:49 src/hed/shc/legacy/unixmap.cpp:219 #, fuzzy, c-format msgid "Plugin %s timeout after %u seconds" msgstr "Verbindung zu %s fehlgeschlagen nach %i Sekunden" #: src/hed/shc/legacy/auth_plugin.cpp:52 src/hed/shc/legacy/unixmap.cpp:222 #, c-format msgid "Plugin %s failed to start" msgstr "" #: src/hed/shc/legacy/auth_plugin.cpp:54 src/hed/shc/legacy/unixmap.cpp:224 #, fuzzy, c-format msgid "Plugin %s printed: %s" msgstr "lfn: %s - pfn: %s" #: src/hed/shc/legacy/auth_plugin.cpp:55 src/hed/shc/legacy/unixmap.cpp:212 #: src/hed/shc/legacy/unixmap.cpp:225 #, fuzzy, c-format msgid "Plugin %s error: %s" msgstr "Globus Fehler: %s" #: src/hed/shc/legacy/auth_voms.cpp:42 #: src/services/gridftpd/auth/auth_voms.cpp:45 #, fuzzy msgid "Missing VO in configuration" msgstr "Fehlendes oder leeres KeyPath Element" #: src/hed/shc/legacy/auth_voms.cpp:52 #: src/services/gridftpd/auth/auth_voms.cpp:57 #, fuzzy msgid "Missing role in configuration" msgstr "Fehlendes oder leeres KeyPath Element" #: src/hed/shc/legacy/auth_voms.cpp:57 #: src/services/gridftpd/auth/auth_voms.cpp:63 #, fuzzy msgid "Missing capabilities in configuration" msgstr "Fehlendes oder leeres KeyPath Element" #: src/hed/shc/legacy/auth_voms.cpp:62 #, fuzzy msgid "Too many arguments in configuration" msgstr "ausführliche Ausgabe" #: src/hed/shc/legacy/auth_voms.cpp:65 #: src/services/gridftpd/auth/auth_voms.cpp:67 #, c-format msgid "Rule: vo: %s" msgstr "" #: src/hed/shc/legacy/auth_voms.cpp:67 #: src/services/gridftpd/auth/auth_voms.cpp:69 #, fuzzy, c-format msgid "Rule: role: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/shc/legacy/auth_voms.cpp:68 #: src/services/gridftpd/auth/auth_voms.cpp:70 #, fuzzy, c-format msgid "Rule: capabilities: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/hed/shc/legacy/auth_voms.cpp:71 #: src/services/gridftpd/auth/auth_voms.cpp:77 #, c-format msgid "Match vo: %s" msgstr "" #: src/hed/shc/legacy/auth_voms.cpp:78 #, fuzzy, c-format msgid "Matched: %s %s %s %s" msgstr "" "cnd:\n" "%s ist ein %s" #: src/hed/shc/legacy/simplemap.cpp:70 #: src/services/gridftpd/auth/simplemap.cpp:68 #, c-format msgid "SimpleMap: acquired new unmap time of %u seconds" msgstr "" #: src/hed/shc/legacy/simplemap.cpp:72 #: src/services/gridftpd/auth/simplemap.cpp:70 #, fuzzy msgid "SimpleMap: wrong number in unmaptime command" msgstr "Falsche Anzahl an Parametern übertragen" #: src/hed/shc/legacy/simplemap.cpp:85 src/hed/shc/legacy/simplemap.cpp:90 #: src/services/gridftpd/auth/simplemap.cpp:83 #: src/services/gridftpd/auth/simplemap.cpp:88 #, c-format msgid "SimpleMap: %s" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:65 src/hed/shc/legacy/unixmap.cpp:70 #: src/services/gridftpd/auth/unixmap.cpp:63 #: src/services/gridftpd/auth/unixmap.cpp:68 msgid "Mapping policy option has empty value" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:80 src/services/gridftpd/auth/unixmap.cpp:78 #, fuzzy, c-format msgid "Unsupported mapping policy action: %s" msgstr "Nicht unterstützte URL für Ziel: %s" #: src/hed/shc/legacy/unixmap.cpp:91 src/services/gridftpd/auth/unixmap.cpp:89 #, fuzzy, c-format msgid "Unsupported mapping policy option: %s" msgstr "Nicht unterstützte URL für Ziel: %s" #: src/hed/shc/legacy/unixmap.cpp:103 src/hed/shc/legacy/unixmap.cpp:108 #: src/services/gridftpd/auth/unixmap.cpp:100 #: src/services/gridftpd/auth/unixmap.cpp:105 msgid "User name mapping command is empty" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:116 #: src/services/gridftpd/auth/unixmap.cpp:113 #, c-format msgid "User name mapping has empty authgroup: %s" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:147 #: src/services/gridftpd/auth/unixmap.cpp:147 #, fuzzy, c-format msgid "Unknown user name mapping rule %s" msgstr "unbekannter return code %s" #: src/hed/shc/legacy/unixmap.cpp:156 src/hed/shc/legacy/unixmap.cpp:161 #: src/hed/shc/legacy/unixmap.cpp:177 src/hed/shc/legacy/unixmap.cpp:183 #: src/services/gridftpd/auth/unixmap.cpp:175 #: src/services/gridftpd/auth/unixmap.cpp:180 #: src/services/gridftpd/auth/unixmap.cpp:196 msgid "Plugin (user mapping) command is empty" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:167 #: src/services/gridftpd/auth/unixmap.cpp:186 #, c-format msgid "Plugin (user mapping) timeout is not a number: %s" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:171 #: src/services/gridftpd/auth/unixmap.cpp:190 #, c-format msgid "Plugin (user mapping) timeout is wrong number: %s" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:203 #, fuzzy, c-format msgid "Plugin %s returned no username" msgstr "lfn: %s - pfn: %s" #: src/hed/shc/legacy/unixmap.cpp:208 #: src/services/gridftpd/auth/unixmap.cpp:211 #, c-format msgid "Plugin %s returned too much: %s" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:211 #, fuzzy, c-format msgid "Plugin %s returned no mapping" msgstr "lfn: %s - pfn: %s" #: src/hed/shc/legacy/unixmap.cpp:234 msgid "User subject match is missing user subject." msgstr "" #: src/hed/shc/legacy/unixmap.cpp:238 #: src/services/gridftpd/auth/unixmap.cpp:230 #, c-format msgid "Mapfile at %s can't be opened." msgstr "" #: src/hed/shc/legacy/unixmap.cpp:262 #: src/services/gridftpd/auth/unixmap.cpp:255 msgid "User pool mapping is missing user subject." msgstr "" #: src/hed/shc/legacy/unixmap.cpp:267 #: src/services/gridftpd/auth/unixmap.cpp:260 #, c-format msgid "User pool at %s can't be opened." msgstr "" #: src/hed/shc/legacy/unixmap.cpp:272 #: src/services/gridftpd/auth/unixmap.cpp:265 #, c-format msgid "User pool at %s failed to perform user mapping." msgstr "" #: src/hed/shc/legacy/unixmap.cpp:290 #: src/services/gridftpd/auth/unixmap.cpp:283 #, c-format msgid "User name direct mapping is missing user name: %s." msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:63 msgid "OTokens: Attr: message" msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:68 #, c-format msgid "OTokens: Attr: %s = %s" msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:73 #, c-format msgid "OTokens: Attr: token: %s" msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:76 #, c-format msgid "OTokens: Attr: token: bearer: %s" msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:146 msgid "OTokens: Handle" msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:148 msgid "OTokens: Handle: message" msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:151 #, fuzzy msgid "Failed to create OTokens security attributes" msgstr "Fehler bei Lesen von Objekt %s" #: src/hed/shc/otokens/OTokensSH.cpp:155 #, c-format msgid "OTokens: Handle: attributes created: subject = %s" msgstr "" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:48 msgid "Creating a pdpservice client" msgstr "Lege pdpservice client an" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:80 #, fuzzy msgid "Arc policy can not been carried by SAML2.0 profile of XACML" msgstr "Arc policy can nicht mit SAML2.0 Profil von XACML geprüft werden" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:152 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:185 msgid "Policy Decision Service invocation failed" msgstr "Ausführen des Policy Decision Service schlug fehl" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:155 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:188 #: src/tests/client/test_ClientInterface.cpp:88 #: src/tests/client/test_ClientSAML2SSO.cpp:81 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:100 #: src/tests/echo/test_clientinterface.cpp:82 #: src/tests/echo/test_clientinterface.cpp:149 #: src/tests/echo/test_clientinterface.py:32 msgid "There was no SOAP response" msgstr "Keine SOAP response erhalten" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:170 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:204 msgid "Authorized from remote pdp service" msgstr "Authorisiert durch remote pdp service" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:171 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:205 msgid "Unauthorized from remote pdp service" msgstr "Nicht authorisiert von entferntem PDP service" #: src/hed/shc/saml2sso_assertionconsumersh/SAML2SSO_AssertionConsumerSH.cpp:69 #, fuzzy msgid "Can not get SAMLAssertion SecAttr from message context" msgstr "Kann SAMLAssertion SecAttr nicht erhalten von message context" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:152 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:44 msgid "Missing or empty CertificatePath element" msgstr "Fehlendes oder leeres CertificatePath Element" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:157 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:49 msgid "Missing or empty KeyPath element" msgstr "Fehlendes oder leeres KeyPath Element" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:163 msgid "" "Both of CACertificatePath and CACertificatesDir elements missing or empty" msgstr "" "Sowohl CACertificatePath als auch CACertificatesDir Elemente sind fehlend " "oder leer" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:175 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:61 msgid "" "Missing or empty CertificatePath or CACertificatesDir element; will only " "check the signature, will not do message authentication" msgstr "" "Fehlendes oder leeres CertificatePath oder CACertificatesDir Element; werde " "nur die Signature überprüfen, die Nachricht jedoch nicht authentifizieren" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:179 #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:65 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:65 #, fuzzy, c-format msgid "Processing type not supported: %s" msgstr "Verarbeitungstyp nicht unterstützt: %s" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:199 msgid "Failed to parse SAML Token from incoming SOAP" msgstr "Konnte SAML Token nicht aus eingehender SOAP herausparsen" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:209 msgid "Failed to authenticate SAML Token inside the incoming SOAP" msgstr "Konnte SAML Token aus eingehender SOAP nicht authentifizieren" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:212 msgid "Succeeded to authenticate SAMLToken" msgstr "Erfolreiche Anthentifikation von SAMLTOken" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:275 #, fuzzy, c-format msgid "No response from AA service %s" msgstr "Keine Antwort von AA service %s schlug fehl" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:279 #, fuzzy, c-format msgid "SOAP Request to AA service %s failed" msgstr "SOAP Request zu AA service %s schlug fehl" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:287 msgid "Cannot find content under response soap message" msgstr "Kann Inhalt in SOAP-Antwort nicht finden" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:291 msgid "Cannot find under response soap message:" msgstr "Kann in SOAP-Antwort nicht finden" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:308 msgid "The Response is not going to this end" msgstr "Die Antwort geht nicht bis zu diesem Ende" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:315 msgid "The StatusCode is Success" msgstr "Der StatusCode ist Success" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:321 msgid "Succeeded to verify the signature under " msgstr "Erfolgreiche Überprüfung der Signatur unter " #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:324 msgid "Failed to verify the signature under " msgstr "Fehler bei der Überprüfung der Signatur unter " #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:335 msgid "Failed to generate SAML Token for outgoing SOAP" msgstr "Konnte SAML Token für ausgehendes SOAP nicht generieren" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:345 #, fuzzy msgid "SAML Token handler is not configured" msgstr "SAML Token handler ist nicht konfiguriert" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:29 #, fuzzy, c-format msgid "Access list location: %s" msgstr "Zugriffslist location: %s" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:39 msgid "" "No policy file or DNs specified for simplelist.pdp, please set location " "attribute or at least one DN element for simplelist PDP node in " "configuration." msgstr "" "Keine Policy Datei oder DNs angegeben für simplelist.pdp, bitte setzen Sie " "ein location Attribut oder zumindest ein DN Element für den PDP Knoten in " "der Konfiguration" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:42 #, fuzzy, c-format msgid "Subject to match: %s" msgstr "Subjekt: %s" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:45 #, fuzzy, c-format msgid "Policy subject: %s" msgstr "Subjekt: %s" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:47 #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:73 #, fuzzy, c-format msgid "Authorized from simplelist.pdp: %s" msgstr "Authorisiert durch simplelist.pdp" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:54 #, fuzzy msgid "" "The policy file setup for simplelist.pdp does not exist, please check " "location attribute for simplelist PDP node in service configuration" msgstr "" "Die Policy Datei Konfiguration für simplelist.pdb existiert nicht, bitte " "überprüfen Sie das location Attribut für simplelist PDP node in der Serivice " "Konfiguration" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:61 #, fuzzy, c-format msgid "Policy line: %s" msgstr "Policy Zeile: %s" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:79 #, fuzzy, c-format msgid "Not authorized from simplelist.pdp: %s" msgstr "Nicht authorisiert von simplelist.pdp" #: src/hed/shc/test.cpp:27 src/hed/shc/testinterface_arc.cpp:26 #: src/hed/shc/testinterface_xacml.cpp:26 msgid "Start test" msgstr "Starte Test" #: src/hed/shc/test.cpp:101 #, fuzzy msgid "Input request from a file: Request.xml" msgstr "Input request von einer Datei: Request.xml" #: src/hed/shc/test.cpp:107 src/hed/shc/test.cpp:197 #: src/hed/shc/testinterface_arc.cpp:124 #, c-format msgid "There is %d subjects, which satisfy at least one policy" msgstr "Es gibt %d Subjekte, die wenigstens eine Policy erfüllen" #: src/hed/shc/test.cpp:121 #, fuzzy, c-format msgid "Attribute Value (1): %s" msgstr "Attribut Wert (1): %s" #: src/hed/shc/test.cpp:132 #, fuzzy msgid "Input request from code" msgstr "Eingabe-Aufforderung von code" #: src/hed/shc/test.cpp:211 #, fuzzy, c-format msgid "Attribute Value (2): %s" msgstr "Attributewert (2): %s" #: src/hed/shc/testinterface_arc.cpp:75 src/hed/shc/testinterface_xacml.cpp:46 #, fuzzy msgid "Can not dynamically produce Policy" msgstr "Kann Policy nicht dynamisch produzieren" #: src/hed/shc/testinterface_arc.cpp:138 #, fuzzy, c-format msgid "Attribute Value inside Subject: %s" msgstr "Attributwert in Subjekt: %s" #: src/hed/shc/testinterface_arc.cpp:148 msgid "The request has passed the policy evaluation" msgstr "Die Anfrage hat die Policy Evaluierung bestanden" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:43 msgid "Missing or empty PasswordSource element" msgstr "Fehlendes oder leeres PasswordSource Element" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:54 #, c-format msgid "Password encoding type not supported: %s" msgstr "Passwort Kodierung nicht unterstützt: %s" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:59 msgid "Missing or empty Username element" msgstr "Fehlendes oder leeres Username Element" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:79 msgid "The payload of incoming message is empty" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:84 #, fuzzy msgid "Failed to cast PayloadSOAP from incoming payload" msgstr "Konnte SAML Token nicht aus eingehender SOAP herausparsen" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:89 msgid "Failed to parse Username Token from incoming SOAP" msgstr "Konnte Username Token nicht von eingehender SOAP Nachricht herauslesen" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:95 msgid "Failed to authenticate Username Token inside the incoming SOAP" msgstr "" "Fehler bei der Authentifikation des Username Token in der einngehenden SOAP" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:98 msgid "Succeeded to authenticate UsernameToken" msgstr "Erfolgreiche Authentifikation des UsernameToken" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:108 msgid "The payload of outgoing message is empty" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:113 msgid "Failed to cast PayloadSOAP from outgoing payload" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:119 msgid "Failed to generate Username Token for outgoing SOAP" msgstr "Fehler bei Erstellen von Nutzernamen Token für ausgehende SOAP" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:127 msgid "Username Token handler is not configured" msgstr "Nutzernamen Token handler ist nicht konfiguriert" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:81 msgid "Failed to parse X509 Token from incoming SOAP" msgstr "Fehler bei Parsen von X509 Token in eigehendem SOAP" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:85 msgid "Failed to verify X509 Token inside the incoming SOAP" msgstr "Fehler bei Verifizieren von X509 Token in eigehendem SOAP" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:89 msgid "Failed to authenticate X509 Token inside the incoming SOAP" msgstr "Fehler bei Authentifizieren von X509 Token in eigehendem SOAP" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:92 msgid "Succeeded to authenticate X509Token" msgstr "X509Token erfolgreich authentifiziert" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:102 msgid "Failed to generate X509 Token for outgoing SOAP" msgstr "Fehler bei Generieren von X509 Token für ausgehende SOAP" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:112 msgid "X509 Token handler is not configured" msgstr "X509 Token handler ist nicht konfiguriert" #: src/hed/shc/xacmlpdp/XACMLApply.cpp:29 msgid "Can not create function: FunctionId does not exist" msgstr "Kann Funktion nicht anlegen: FunctionId existiert nicht" #: src/hed/shc/xacmlpdp/XACMLApply.cpp:33 #: src/hed/shc/xacmlpdp/XACMLTarget.cpp:40 #, fuzzy, c-format msgid "Can not create function %s" msgstr "Kann Funktion %s nicht anlegen" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:88 #, fuzzy msgid "Can not find XACMLPDPContext" msgstr "Kann XACMLPDPContext nciht finden" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:136 msgid "Evaluator for XACMLPDP was not loaded" msgstr "Evaluator für XACMLPDP wurde nicht geladen" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:151 src/hed/shc/xacmlpdp/XACMLPDP.cpp:159 #, fuzzy msgid "Failed to convert security information to XACML request" msgstr "" "Fehler bei Konvertierung der security information zu einer XACML Anfrage" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:167 #, c-format msgid "XACML request: %s" msgstr "XACML Anfrage: %s" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:179 msgid "Authorized from xacml.pdp" msgstr "Authorisiert durch xaml.pdp" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:180 #, fuzzy msgid "UnAuthorized from xacml.pdp" msgstr "UnAuthorisiert durch xaml.pdp" #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:55 msgid "Can not find element with proper namespace" msgstr "Kann element mit passendem namespace nicht finden" #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:132 msgid "No target available inside the policy" msgstr "Kein Ziel innerhalb der Policy vorhanden" #: src/hed/shc/xacmlpdp/XACMLRequest.cpp:34 msgid "Request is empty" msgstr "Anfrage ist leer" #: src/hed/shc/xacmlpdp/XACMLRequest.cpp:39 msgid "Can not find element with proper namespace" msgstr "Kann element mit passendem namespace nicht finden" #: src/hed/shc/xacmlpdp/XACMLRule.cpp:35 msgid "Invalid Effect" msgstr "Ungültiger Effekt" #: src/hed/shc/xacmlpdp/XACMLRule.cpp:48 msgid "No target available inside the rule" msgstr "Kein Ziel verfügbar in dieser Regel" #: src/libs/data-staging/DTR.cpp:82 src/libs/data-staging/DTR.cpp:86 #, fuzzy, c-format msgid "Could not handle endpoint %s" msgstr "konnte Ende von clientxrsl nicht finden" #: src/libs/data-staging/DTR.cpp:96 #, fuzzy msgid "Source is the same as destination" msgstr "Quelle Ziel" #: src/libs/data-staging/DTR.cpp:175 #, fuzzy, c-format msgid "Invalid ID: %s" msgstr "Ungültige URL: %s" #: src/libs/data-staging/DTR.cpp:212 #, fuzzy, c-format msgid "%s->%s" msgstr "%s (%s)" #: src/libs/data-staging/DTR.cpp:320 #, c-format msgid "No callback for %s defined" msgstr "" #: src/libs/data-staging/DTR.cpp:335 #, c-format msgid "NULL callback for %s" msgstr "" #: src/libs/data-staging/DTR.cpp:338 #, c-format msgid "Request to push to unknown owner - %u" msgstr "" #: src/libs/data-staging/DTRList.cpp:216 #, c-format msgid "Boosting priority from %i to %i due to incoming higher priority DTR" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:48 #: src/libs/data-staging/DataDelivery.cpp:72 msgid "Received invalid DTR" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:54 #, c-format msgid "Delivery received new DTR %s with source: %s, destination: %s" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:68 msgid "Received no DTR" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:80 #, c-format msgid "Cancelling DTR %s with source: %s, destination: %s" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:91 #, c-format msgid "DTR %s requested cancel but no active transfer" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:147 #, fuzzy, c-format msgid "Cleaning up after failure: deleting %s" msgstr "Lege Verzeichnis %s an" #: src/libs/data-staging/DataDelivery.cpp:188 #: src/libs/data-staging/DataDelivery.cpp:263 #: src/libs/data-staging/DataDelivery.cpp:303 #: src/libs/data-staging/DataDelivery.cpp:323 msgid "Failed to delete delivery object or deletion timed out" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:254 #, c-format msgid "Transfer finished: %llu bytes transferred %s" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:329 msgid "Data delivery loop exited" msgstr "" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:45 #, fuzzy msgid "No source defined" msgstr "Anfrage %s schlug fehl" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:49 #, fuzzy msgid "No destination defined" msgstr "Beendigung des Jobs schlug fehl" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:139 #, fuzzy, c-format msgid "Bad checksum format %s" msgstr "Errechneted checksum: %s" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:178 #, fuzzy, c-format msgid "Failed to run command: %s" msgstr "Fehler bei Anlegen von GSI Context: %s" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:213 #, fuzzy, c-format msgid "DataDelivery: %s" msgstr "Fataler Fehler: %s" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:225 #, c-format msgid "DataStagingDelivery exited with code %i" msgstr "" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:244 #, c-format msgid "Transfer killed after %i seconds without communication" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:67 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:315 #, fuzzy, c-format msgid "Connecting to Delivery service at %s" msgstr "Kein Verbindungsaufbau zu Server: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:94 #, fuzzy, c-format msgid "Failed to set up credential delegation with %s" msgstr "Initiierung der Delegation fehlgeschlagen" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:100 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:174 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:240 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:326 #, c-format msgid "" "Request:\n" "%s" msgstr "" "Anfrage:\n" "%s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:106 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:332 #, fuzzy, c-format msgid "Could not connect to service %s: %s" msgstr "Fehler bei Verbinden zu server %s:%d" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:114 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:340 #, fuzzy, c-format msgid "No SOAP response from Delivery service %s" msgstr "Keine Antwort von AA service %s schlug fehl" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:119 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:193 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:267 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:346 #, c-format msgid "" "Response:\n" "%s" msgstr "" "Antwort:\n" "%s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:128 #, fuzzy, c-format msgid "Failed to start transfer request: %s" msgstr "Fehler bei Transfer von Daten" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:135 #, fuzzy, c-format msgid "Bad format in XML response from service at %s: %s" msgstr "Formatierungfehler in Dati %s: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:143 #, fuzzy, c-format msgid "Could not make new transfer request: %s: %s" msgstr "Konnte temporäre Datei nicht anlegen: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:148 #, c-format msgid "Started remote Delivery at %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:181 #, fuzzy, c-format msgid "Failed to send cancel request: %s" msgstr "Kann Kanal stdout nicht nutzen" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:188 #, fuzzy msgid "Failed to cancel: No SOAP response" msgstr "Keine SOAP Antwort" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:202 #, fuzzy, c-format msgid "Failed to cancel transfer request: %s" msgstr "Fehler bei Transfer von Daten" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:209 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:290 #, fuzzy, c-format msgid "Bad format in XML response: %s" msgstr "Formatierungfehler in Dati %s: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:216 #, fuzzy, c-format msgid "Failed to cancel: %s" msgstr "Fehler beim Authentifizieren: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:260 #, fuzzy msgid "No SOAP response from delivery service" msgstr "Keine Antwort von Server erhalten" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:281 #, fuzzy, c-format msgid "Failed to query state: %s" msgstr "Fehler beim Authentifizieren: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:355 #, c-format msgid "SOAP fault from delivery service at %s: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:363 #, c-format msgid "Bad format in XML response from delivery service at %s: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:371 #, fuzzy, c-format msgid "Error pinging delivery service at %s: %s: %s" msgstr "Konnte replica nicht finden: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:379 #, fuzzy, c-format msgid "Dir %s allowed at service %s" msgstr "Delegation service: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:473 #, c-format msgid "" "DataDelivery log tail:\n" "%s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:486 #, fuzzy msgid "Failed locating credentials" msgstr "Fehler beim Auflisten von Meta-Dateien" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:491 #, fuzzy msgid "Failed to initiate client connection" msgstr "Initiierung der Delegation fehlgeschlagen" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:497 msgid "Client connection has no entry point" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:507 msgid "Failed to initiate delegation credentials" msgstr "Fehler bei der Initialisierung der delegation credentials" #: src/libs/data-staging/DataStagingDelivery.cpp:97 #, c-format msgid "%5u s: %10.1f kB %8.1f kB/s" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:152 #, fuzzy msgid "Unexpected arguments" msgstr "Kann doc Argument nicht anlegen" #: src/libs/data-staging/DataStagingDelivery.cpp:155 #, fuzzy msgid "Source URL missing" msgstr "ServiceURL fehlt" #: src/libs/data-staging/DataStagingDelivery.cpp:158 #, fuzzy msgid "Destination URL missing" msgstr "Ziel: %s" #: src/libs/data-staging/DataStagingDelivery.cpp:162 #, c-format msgid "Source URL not valid: %s" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:166 #, fuzzy, c-format msgid "Destination URL not valid: %s" msgstr "Ziel: %s" #: src/libs/data-staging/DataStagingDelivery.cpp:223 #, fuzzy, c-format msgid "Unknown transfer option: %s" msgstr "Datentransfer abgebrochen: %s" #: src/libs/data-staging/DataStagingDelivery.cpp:248 #, fuzzy, c-format msgid "Source URL not supported: %s" msgstr "Verarbeitungstyp nicht unterstützt: %s" #: src/libs/data-staging/DataStagingDelivery.cpp:253 #: src/libs/data-staging/DataStagingDelivery.cpp:272 msgid "No credentials supplied" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:267 #, fuzzy, c-format msgid "Destination URL not supported: %s" msgstr "Delegation role nicht unterstützt: %s" #: src/libs/data-staging/DataStagingDelivery.cpp:316 #, fuzzy, c-format msgid "Will calculate %s checksum" msgstr "Errechneted checksum: %s" #: src/libs/data-staging/DataStagingDelivery.cpp:327 msgid "Cannot use supplied --size option" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:540 #, c-format msgid "Checksum mismatch between calculated checksum %s and source checksum %s" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:550 #, fuzzy, c-format msgid "Failed cleaning up destination %s" msgstr "Fehler bei Schreiben zu Ziel" #: src/libs/data-staging/Processor.cpp:59 #: src/services/candypond/CandyPond.cpp:117 msgid "Error creating cache" msgstr "" #: src/libs/data-staging/Processor.cpp:83 #, c-format msgid "Forcing re-download of file %s" msgstr "" #: src/libs/data-staging/Processor.cpp:100 #, c-format msgid "Will wait around %is" msgstr "" #: src/libs/data-staging/Processor.cpp:119 #, fuzzy, c-format msgid "Force-checking source of cache file %s" msgstr "Fehler bei Zugriff auf Cache-Datei %s: %s" #: src/libs/data-staging/Processor.cpp:122 #, fuzzy, c-format msgid "Source check requested but failed: %s" msgstr "Anlegen von Socket schlug fehl: %s" #: src/libs/data-staging/Processor.cpp:142 msgid "Permission checking failed, will try downloading without using cache" msgstr "" #: src/libs/data-staging/Processor.cpp:172 #, fuzzy, c-format msgid "Will download to cache file %s" msgstr "Lese Archiv Datei %s" #: src/libs/data-staging/Processor.cpp:193 msgid "Looking up source replicas" msgstr "" #: src/libs/data-staging/Processor.cpp:210 #: src/libs/data-staging/Processor.cpp:317 #, fuzzy, c-format msgid "Skipping replica on local host %s" msgstr "Doppelte replica location: %s" #: src/libs/data-staging/Processor.cpp:218 #: src/libs/data-staging/Processor.cpp:325 #, fuzzy, c-format msgid "No locations left for %s" msgstr "Keine locations gefunden für %s" #: src/libs/data-staging/Processor.cpp:239 #: src/libs/data-staging/Processor.cpp:481 #, fuzzy msgid "Resolving destination replicas" msgstr "Probleme bei Auflösen von Zieladresse" #: src/libs/data-staging/Processor.cpp:256 #, fuzzy msgid "No locations for destination different from source found" msgstr "Keine locations gefunden für Ziel" #: src/libs/data-staging/Processor.cpp:267 #, fuzzy msgid "Pre-registering destination in index service" msgstr "Erstellen und senden einer Index Service Anfrage" #: src/libs/data-staging/Processor.cpp:293 msgid "Resolving source replicas in bulk" msgstr "" #: src/libs/data-staging/Processor.cpp:307 #, fuzzy, c-format msgid "No replicas found for %s" msgstr "Keine locations gefunden für %s" #: src/libs/data-staging/Processor.cpp:348 #, fuzzy, c-format msgid "Checking %s" msgstr "Herausforderung: %s" #: src/libs/data-staging/Processor.cpp:357 #: src/libs/data-staging/Processor.cpp:415 #, fuzzy msgid "Metadata of replica and index service differ" msgstr "" "Файл Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ñовпадает Ñ Ð¸Ñходным.\n" "%1" #: src/libs/data-staging/Processor.cpp:365 #, fuzzy, c-format msgid "Failed checking source replica %s: %s" msgstr "Fehler bei Verbinden zu server %s:%d" #: src/libs/data-staging/Processor.cpp:391 msgid "Querying source replicas in bulk" msgstr "" #: src/libs/data-staging/Processor.cpp:403 #, fuzzy, c-format msgid "Failed checking source replica: %s" msgstr "Löschen fehlgeschlagen von job: %s" #: src/libs/data-staging/Processor.cpp:409 #, fuzzy msgid "Failed checking source replica" msgstr "Fehler bei Lesen von Quelle" #: src/libs/data-staging/Processor.cpp:449 #, fuzzy msgid "Finding existing destination replicas" msgstr "Fehler bei Schreiben zu Ziel" #: src/libs/data-staging/Processor.cpp:461 #, fuzzy, c-format msgid "Failed to delete replica %s: %s" msgstr "Fehler bei Schreiben zu Datein %s: %s" #: src/libs/data-staging/Processor.cpp:475 #, fuzzy, c-format msgid "Unregistering %s" msgstr "Außer Acht lassend %s" #: src/libs/data-staging/Processor.cpp:486 #, fuzzy msgid "Pre-registering destination" msgstr "Probleme bei Auflösen von Zieladresse" #: src/libs/data-staging/Processor.cpp:492 #, fuzzy, c-format msgid "Failed to pre-clean destination: %s" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/libs/data-staging/Processor.cpp:515 msgid "Preparing to stage source" msgstr "" #: src/libs/data-staging/Processor.cpp:528 #, c-format msgid "Source is not ready, will wait %u seconds" msgstr "" #: src/libs/data-staging/Processor.cpp:534 #, fuzzy msgid "No physical files found for source" msgstr "Keine locations gefunden für %s" #: src/libs/data-staging/Processor.cpp:552 #, fuzzy msgid "Preparing to stage destination" msgstr "Kann nicht zu Ziel schreiben" #: src/libs/data-staging/Processor.cpp:565 #, c-format msgid "Destination is not ready, will wait %u seconds" msgstr "" #: src/libs/data-staging/Processor.cpp:571 #, fuzzy msgid "No physical files found for destination" msgstr "Keine locations gefunden für Ziel" #: src/libs/data-staging/Processor.cpp:597 msgid "Releasing source" msgstr "" #: src/libs/data-staging/Processor.cpp:601 #, c-format msgid "There was a problem during post-transfer source handling: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:606 #, fuzzy msgid "Releasing destination" msgstr "Probleme bei Auflösen von Zieladresse" #: src/libs/data-staging/Processor.cpp:610 #, c-format msgid "" "There was a problem during post-transfer destination handling after error: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:614 #, c-format msgid "Error with post-transfer destination handling: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:640 #, fuzzy msgid "Removing pre-registered destination in index service" msgstr "Keine execution services in index service registriert" #: src/libs/data-staging/Processor.cpp:643 #, c-format msgid "" "Failed to unregister pre-registered destination %s: %s. You may need to " "unregister it manually" msgstr "" #: src/libs/data-staging/Processor.cpp:649 msgid "Registering destination replica" msgstr "" #: src/libs/data-staging/Processor.cpp:652 #, fuzzy, c-format msgid "Failed to register destination replica: %s" msgstr "Fehler bei Ändern von owner des Zielverzeichnisses zu %i: %s" #: src/libs/data-staging/Processor.cpp:655 #, c-format msgid "" "Failed to unregister pre-registered destination %s. You may need to " "unregister it manually" msgstr "" #: src/libs/data-staging/Processor.cpp:685 msgid "Error creating cache. Stale locks may remain." msgstr "" #: src/libs/data-staging/Processor.cpp:718 #, c-format msgid "Linking/copying cached file to %s" msgstr "" #: src/libs/data-staging/Processor.cpp:739 #, fuzzy, c-format msgid "Failed linking cache file to %s" msgstr "Fehler beim Auflisten von Dateien" #: src/libs/data-staging/Processor.cpp:743 #, fuzzy, c-format msgid "Error linking cache file to %s." msgstr "Fehler bei Entfernen von Cache-Datei %s: %s" #: src/libs/data-staging/Processor.cpp:764 #: src/libs/data-staging/Processor.cpp:771 #, fuzzy msgid "Adding to bulk request" msgstr "Füge Anfrage-Token %s hinzu" #: src/libs/data-staging/Scheduler.cpp:174 #: src/libs/data-staging/Scheduler.cpp:181 #, fuzzy msgid "source" msgstr "Quelle: %s" #: src/libs/data-staging/Scheduler.cpp:174 #: src/libs/data-staging/Scheduler.cpp:181 #, fuzzy msgid "destination" msgstr "Ziel: %s" #: src/libs/data-staging/Scheduler.cpp:174 #, c-format msgid "Using next %s replica" msgstr "" #: src/libs/data-staging/Scheduler.cpp:181 #, c-format msgid "No more %s replicas" msgstr "" #: src/libs/data-staging/Scheduler.cpp:183 msgid "Will clean up pre-registered destination" msgstr "" #: src/libs/data-staging/Scheduler.cpp:187 msgid "Will release cache locks" msgstr "" #: src/libs/data-staging/Scheduler.cpp:190 msgid "Moving to end of data staging" msgstr "" #: src/libs/data-staging/Scheduler.cpp:199 #, c-format msgid "Source is mapped to %s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:203 msgid "Cannot link to source which can be modified, will copy instead" msgstr "" #: src/libs/data-staging/Scheduler.cpp:212 msgid "Cannot link to a remote destination. Will not use mapped URL" msgstr "" #: src/libs/data-staging/Scheduler.cpp:215 msgid "Linking mapped file" msgstr "" #: src/libs/data-staging/Scheduler.cpp:222 #, fuzzy, c-format msgid "Failed to create link: %s. Will not use mapped URL" msgstr "Fehler bei Anlegen von soft link: %s" #: src/libs/data-staging/Scheduler.cpp:247 #, c-format msgid "" "Scheduler received new DTR %s with source: %s, destination: %s, assigned to " "transfer share %s with priority %d" msgstr "" #: src/libs/data-staging/Scheduler.cpp:255 msgid "" "File is not cacheable, was requested not to be cached or no cache available, " "skipping cache check" msgstr "" #: src/libs/data-staging/Scheduler.cpp:261 msgid "File is cacheable, will check cache" msgstr "" #: src/libs/data-staging/Scheduler.cpp:264 #: src/libs/data-staging/Scheduler.cpp:289 #, c-format msgid "File is currently being cached, will wait %is" msgstr "" #: src/libs/data-staging/Scheduler.cpp:283 #, fuzzy msgid "Timed out while waiting for cache lock" msgstr "Timeout beim Lesen des response header" #: src/libs/data-staging/Scheduler.cpp:293 msgid "Checking cache again" msgstr "" #: src/libs/data-staging/Scheduler.cpp:313 #, fuzzy msgid "Destination file is in cache" msgstr "Destination muss LFN enthalten" #: src/libs/data-staging/Scheduler.cpp:317 msgid "Source and/or destination is index service, will resolve replicas" msgstr "" #: src/libs/data-staging/Scheduler.cpp:320 msgid "" "Neither source nor destination are index services, will skip resolving " "replicas" msgstr "" #: src/libs/data-staging/Scheduler.cpp:331 msgid "Problem with index service, will release cache lock" msgstr "" #: src/libs/data-staging/Scheduler.cpp:335 msgid "Problem with index service, will proceed to end of data staging" msgstr "" #: src/libs/data-staging/Scheduler.cpp:345 msgid "Checking source file is present" msgstr "" #: src/libs/data-staging/Scheduler.cpp:353 msgid "Error with source file, moving to next replica" msgstr "" #: src/libs/data-staging/Scheduler.cpp:375 #, c-format msgid "Replica %s has long latency, trying next replica" msgstr "" #: src/libs/data-staging/Scheduler.cpp:377 #, c-format msgid "No more replicas, will use %s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:380 #, fuzzy, c-format msgid "Checking replica %s" msgstr "Suche nache Existenz von %s" #: src/libs/data-staging/Scheduler.cpp:390 msgid "Overwrite requested - will pre-clean destination" msgstr "" #: src/libs/data-staging/Scheduler.cpp:393 msgid "No overwrite requested or allowed, skipping pre-cleaning" msgstr "" #: src/libs/data-staging/Scheduler.cpp:401 msgid "Pre-clean failed, will still try to copy" msgstr "" #: src/libs/data-staging/Scheduler.cpp:408 #, fuzzy msgid "Source or destination requires staging" msgstr "Quelle Ziel" #: src/libs/data-staging/Scheduler.cpp:412 msgid "No need to stage source or destination, skipping staging" msgstr "" #: src/libs/data-staging/Scheduler.cpp:442 msgid "Staging request timed out, will release request" msgstr "" #: src/libs/data-staging/Scheduler.cpp:446 #, fuzzy msgid "Querying status of staging request" msgstr "Erstellen und senden von Anfrage" #: src/libs/data-staging/Scheduler.cpp:455 #, fuzzy msgid "Releasing requests" msgstr "Verarbeite %s Anfrage" #: src/libs/data-staging/Scheduler.cpp:472 msgid "DTR is ready for transfer, moving to delivery queue" msgstr "" #: src/libs/data-staging/Scheduler.cpp:487 #, fuzzy, c-format msgid "Transfer failed: %s" msgstr "Einige Transfers schlugen fehl" #: src/libs/data-staging/Scheduler.cpp:497 msgid "Releasing request(s) made during staging" msgstr "" #: src/libs/data-staging/Scheduler.cpp:500 msgid "Neither source nor destination were staged, skipping releasing requests" msgstr "" #: src/libs/data-staging/Scheduler.cpp:512 msgid "Trying next replica" msgstr "" #: src/libs/data-staging/Scheduler.cpp:517 #, fuzzy msgid "unregister" msgstr "Außer Acht lassend %s" #: src/libs/data-staging/Scheduler.cpp:517 #, fuzzy msgid "register" msgstr "Außer Acht lassend %s" #: src/libs/data-staging/Scheduler.cpp:516 #, c-format msgid "Will %s in destination index service" msgstr "" #: src/libs/data-staging/Scheduler.cpp:520 msgid "Destination is not index service, skipping replica registration" msgstr "" #: src/libs/data-staging/Scheduler.cpp:533 msgid "Error registering replica, moving to end of data staging" msgstr "" #: src/libs/data-staging/Scheduler.cpp:542 msgid "Will process cache" msgstr "" #: src/libs/data-staging/Scheduler.cpp:546 msgid "File is not cacheable, skipping cache processing" msgstr "" #: src/libs/data-staging/Scheduler.cpp:560 #, fuzzy msgid "Cancellation complete" msgstr "Transfer vollständig" #: src/libs/data-staging/Scheduler.cpp:574 msgid "Will wait 10s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:580 msgid "Error in cache processing, will retry without caching" msgstr "" #: src/libs/data-staging/Scheduler.cpp:589 msgid "Will retry without caching" msgstr "" #: src/libs/data-staging/Scheduler.cpp:607 #, fuzzy msgid "Proxy has expired" msgstr "Proxy store:" #: src/libs/data-staging/Scheduler.cpp:618 #, c-format msgid "%i retries left, will wait until %s before next attempt" msgstr "" #: src/libs/data-staging/Scheduler.cpp:634 msgid "Out of retries" msgstr "" #: src/libs/data-staging/Scheduler.cpp:636 msgid "Permanent failure" msgstr "" #: src/libs/data-staging/Scheduler.cpp:642 #, fuzzy msgid "Finished successfully" msgstr "Verbindung erfolgreich geschlossen" #: src/libs/data-staging/Scheduler.cpp:652 #, fuzzy msgid "Returning to generator" msgstr "Wiederholte Nutzung von Verbindung" #: src/libs/data-staging/Scheduler.cpp:818 #, c-format msgid "File is smaller than %llu bytes, will use local delivery" msgstr "" #: src/libs/data-staging/Scheduler.cpp:872 #, c-format msgid "Delivery service at %s can copy to %s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:880 #, c-format msgid "Delivery service at %s can copy from %s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:893 msgid "Could not find any useable delivery service, forcing local transfer" msgstr "" #: src/libs/data-staging/Scheduler.cpp:909 #, c-format msgid "Not using delivery service at %s because it is full" msgstr "" #: src/libs/data-staging/Scheduler.cpp:936 #, c-format msgid "Not using delivery service %s due to previous failure" msgstr "" #: src/libs/data-staging/Scheduler.cpp:946 msgid "No remote delivery services are useable, forcing local delivery" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1150 msgid "Cancelling active transfer" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1160 msgid "Processing thread timed out. Restarting DTR" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1228 msgid "Will use bulk request" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1250 msgid "No delivery endpoints available, will try later" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1269 msgid "Scheduler received NULL DTR" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1279 msgid "Scheduler received invalid DTR" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1368 #, fuzzy msgid "Scheduler starting up" msgstr "Konnte job nicht starten" #: src/libs/data-staging/Scheduler.cpp:1369 msgid "Scheduler configuration:" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1370 #, c-format msgid " Pre-processor slots: %u" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1371 #, c-format msgid " Delivery slots: %u" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1372 #, c-format msgid " Post-processor slots: %u" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1373 #, c-format msgid " Emergency slots: %u" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1374 #, c-format msgid " Prepared slots: %u" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1375 #, fuzzy, c-format msgid "" " Shares configuration:\n" "%s" msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #: src/libs/data-staging/Scheduler.cpp:1378 msgid " Delivery service: LOCAL" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1379 #, fuzzy, c-format msgid " Delivery service: %s" msgstr "Delegation service: %s" #: src/libs/data-staging/Scheduler.cpp:1384 #, fuzzy msgid "Failed to create DTR dump thread" msgstr "Fehler bei Anlegen von ldap bind thread (%s)" #: src/libs/data-staging/Scheduler.cpp:1401 #: src/services/data-staging/DataDeliveryService.cpp:507 #, c-format msgid "DTR %s cancelled" msgstr "" #: src/libs/data-staging/examples/Generator.cpp:15 msgid "Shutting down scheduler" msgstr "" #: src/libs/data-staging/examples/Generator.cpp:17 msgid "Scheduler stopped, exiting" msgstr "" #: src/libs/data-staging/examples/Generator.cpp:23 #, c-format msgid "Received DTR %s back from scheduler in state %s" msgstr "" #: src/libs/data-staging/examples/Generator.cpp:30 #, fuzzy msgid "Generator started" msgstr "Start start" #: src/libs/data-staging/examples/Generator.cpp:31 #, fuzzy msgid "Starting DTR threads" msgstr "Starte Test" #: src/libs/data-staging/examples/Generator.cpp:44 msgid "No valid credentials found, exiting" msgstr "" #: src/libs/data-staging/examples/Generator.cpp:55 #, fuzzy, c-format msgid "Problem creating dtr (source %s, destination %s)" msgstr "Probleme bei Auflösen von Zieladresse" #: src/services/a-rex/arex.cpp:452 src/services/candypond/CandyPond.cpp:569 #: src/services/data-staging/DataDeliveryService.cpp:681 #, c-format msgid "SOAP operation is not supported: %s" msgstr "" #: src/services/a-rex/arex.cpp:471 src/services/a-rex/arex.cpp:517 #, fuzzy, c-format msgid "Security Handlers processing failed: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/services/a-rex/arex.cpp:485 msgid "Can't obtain configuration. Public information is disabled." msgstr "" #: src/services/a-rex/arex.cpp:495 msgid "" "Can't obtain configuration. Public information is disallowed for this user." msgstr "" #: src/services/a-rex/arex.cpp:502 msgid "Can't obtain configuration. Only public information is provided." msgstr "" #: src/services/a-rex/arex.cpp:530 src/services/a-rex/rest/rest.cpp:674 #, fuzzy, c-format msgid "Connection from %s: %s" msgstr "Verbindung zu %s schlug fehl: %s" #: src/services/a-rex/arex.cpp:533 src/services/a-rex/rest/rest.cpp:678 #, c-format msgid "process: method: %s" msgstr "" #: src/services/a-rex/arex.cpp:534 src/services/a-rex/rest/rest.cpp:679 #, c-format msgid "process: endpoint: %s" msgstr "" #: src/services/a-rex/arex.cpp:559 #, c-format msgid "process: id: %s" msgstr "" #: src/services/a-rex/arex.cpp:560 #, c-format msgid "process: subop: %s" msgstr "" #: src/services/a-rex/arex.cpp:567 #, c-format msgid "process: subpath: %s" msgstr "" #: src/services/a-rex/arex.cpp:605 src/services/candypond/CandyPond.cpp:543 #: src/services/data-staging/DataDeliveryService.cpp:641 #: src/tests/echo/echo.cpp:98 #, c-format msgid "process: request=%s" msgstr "" #: src/services/a-rex/arex.cpp:610 src/services/candypond/CandyPond.cpp:548 #: src/services/data-staging/DataDeliveryService.cpp:646 #: src/tests/count/count.cpp:69 msgid "input does not define operation" msgstr "" #: src/services/a-rex/arex.cpp:613 src/services/candypond/CandyPond.cpp:551 #: src/services/data-staging/DataDeliveryService.cpp:649 #: src/tests/count/count.cpp:72 #, c-format msgid "process: operation: %s" msgstr "" #: src/services/a-rex/arex.cpp:640 #, fuzzy msgid "POST request on special path is not supported" msgstr "Es wurde keine authorization response erwidert" #: src/services/a-rex/arex.cpp:645 msgid "process: factory endpoint" msgstr "" #: src/services/a-rex/arex.cpp:788 src/services/candypond/CandyPond.cpp:580 #: src/services/data-staging/DataDeliveryService.cpp:692 #: src/tests/echo/echo.cpp:158 #, c-format msgid "process: response=%s" msgstr "" #: src/services/a-rex/arex.cpp:794 msgid "Per-job POST/SOAP requests are not supported" msgstr "" #: src/services/a-rex/arex.cpp:803 msgid "process: GET" msgstr "" #: src/services/a-rex/arex.cpp:804 #, c-format msgid "GET: id %s path %s" msgstr "" #: src/services/a-rex/arex.cpp:837 #, fuzzy msgid "process: HEAD" msgstr "Prozess: POST" #: src/services/a-rex/arex.cpp:838 #, c-format msgid "HEAD: id %s path %s" msgstr "" #: src/services/a-rex/arex.cpp:871 msgid "process: PUT" msgstr "" #: src/services/a-rex/arex.cpp:904 #, fuzzy msgid "process: DELETE" msgstr "Prozess: POST" #: src/services/a-rex/arex.cpp:937 #, c-format msgid "process: method %s is not supported" msgstr "" #: src/services/a-rex/arex.cpp:940 msgid "process: method is not defined" msgstr "" #: src/services/a-rex/arex.cpp:1050 #, fuzzy msgid "Failed to run Grid Manager thread" msgstr "Fehler bei Transfer von Daten" #: src/services/a-rex/arex.cpp:1109 #, fuzzy, c-format msgid "Failed to process configuration in %s" msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #: src/services/a-rex/arex.cpp:1114 #, fuzzy msgid "No control directory set in configuration" msgstr "Pfad zu VOMS Server Konfigurationsdatei" #: src/services/a-rex/arex.cpp:1118 #, fuzzy msgid "No session directory set in configuration" msgstr "Fehlendes oder leeres KeyPath Element" #: src/services/a-rex/arex.cpp:1122 msgid "No LRMS set in configuration" msgstr "" #: src/services/a-rex/arex.cpp:1127 #, fuzzy, c-format msgid "Failed to create control directory %s" msgstr "Fehler bei Anlegen/Finden von Verzeichnis %s, (%d)" #: src/services/a-rex/cachecheck.cpp:37 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:658 #, c-format msgid "Error with cache configuration: %s" msgstr "" #: src/services/a-rex/cachecheck.cpp:53 #: src/services/candypond/CandyPond.cpp:318 msgid "Error with cache configuration" msgstr "" #: src/services/a-rex/cachecheck.cpp:78 #: src/services/candypond/CandyPond.cpp:146 #: src/services/candypond/CandyPond.cpp:343 #, c-format msgid "Looking up URL %s" msgstr "" #: src/services/a-rex/cachecheck.cpp:80 #: src/services/candypond/CandyPond.cpp:155 #, fuzzy, c-format msgid "Cache file is %s" msgstr "Lese Archiv Datei %s" #: src/services/a-rex/change_activity_status.cpp:55 #: src/services/a-rex/change_activity_status.cpp:59 #, fuzzy, c-format msgid "EMIES:PauseActivity: job %s - %s" msgstr "" "MigrateActivity: отзыв = \n" "%s" #: src/services/a-rex/change_activity_status.cpp:104 #: src/services/a-rex/change_activity_status.cpp:108 #, c-format msgid "EMIES:ResumeActivity: job %s - %s" msgstr "" #: src/services/a-rex/change_activity_status.cpp:153 #: src/services/a-rex/change_activity_status.cpp:158 #, fuzzy, c-format msgid "EMIES:CancelActivity: job %s - %s" msgstr "" "CreateActivity: ответ = \n" "%s" #: src/services/a-rex/change_activity_status.cpp:166 #, fuzzy, c-format msgid "job %s cancelled successfully" msgstr "Job erfolgreich abgebrochen" #: src/services/a-rex/change_activity_status.cpp:212 #: src/services/a-rex/change_activity_status.cpp:227 #, fuzzy, c-format msgid "EMIES:WipeActivity: job %s - %s" msgstr "" "MigrateActivity: отзыв = \n" "%s" #: src/services/a-rex/change_activity_status.cpp:231 #, fuzzy, c-format msgid "job %s (will be) cleaned successfully" msgstr "Job erfolgreich aufgeräumt." #: src/services/a-rex/change_activity_status.cpp:277 #: src/services/a-rex/change_activity_status.cpp:282 #, c-format msgid "EMIES:RestartActivity: job %s - %s" msgstr "" #: src/services/a-rex/change_activity_status.cpp:286 #, fuzzy, c-format msgid "job %s restarted successfully" msgstr "Datei %s erfolgreich entfernt" #: src/services/a-rex/change_activity_status.cpp:301 #: src/services/a-rex/put.cpp:163 src/services/a-rex/put.cpp:204 #, c-format msgid "%s: there is no such job: %s" msgstr "" #: src/services/a-rex/change_activity_status.cpp:309 #, c-format msgid "%s: put log %s: there is no payload" msgstr "" #: src/services/a-rex/change_activity_status.cpp:315 #, c-format msgid "%s: put log %s: unrecognized payload" msgstr "" #: src/services/a-rex/change_activity_status.cpp:354 #, fuzzy msgid "A-REX REST: Failed to resume job" msgstr "Konnte job nicht starten" #: src/services/a-rex/change_activity_status.cpp:358 #, fuzzy, c-format msgid "A-REX REST: State change not allowed: from %s to %s" msgstr "" "ChangeActivityStatus: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" "%s" #: src/services/a-rex/create_activity.cpp:52 #, fuzzy, c-format msgid "" "EMIES:CreateActivity: request = \n" "%s" msgstr "" "CreateActivity: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" "%s" #: src/services/a-rex/create_activity.cpp:58 msgid "EMIES:CreateActivity: too many activity descriptions" msgstr "" #: src/services/a-rex/create_activity.cpp:68 msgid "EMIES:CreateActivity: no job description found" msgstr "" #: src/services/a-rex/create_activity.cpp:75 msgid "EMIES:CreateActivity: max jobs total limit reached" msgstr "" #: src/services/a-rex/create_activity.cpp:101 #, fuzzy, c-format msgid "ES:CreateActivity: Failed to create new job: %s" msgstr "" "CreateActivity: ответ = \n" "%s" #: src/services/a-rex/create_activity.cpp:117 msgid "EMIES:CreateActivity finished successfully" msgstr "" #: src/services/a-rex/create_activity.cpp:118 #, fuzzy, c-format msgid "New job accepted with id %s" msgstr "Job migrierte mit Job ID: %s" #: src/services/a-rex/create_activity.cpp:122 #, fuzzy, c-format msgid "" "EMIES:CreateActivity: response = \n" "%s" msgstr "" "CreateActivity: ответ = \n" "%s" #: src/services/a-rex/create_activity.cpp:137 msgid "NEW: put new job: there is no payload" msgstr "" #: src/services/a-rex/create_activity.cpp:141 msgid "NEW: put new job: max jobs total limit reached" msgstr "" #: src/services/a-rex/delegation/DelegationStore.cpp:51 msgid "Wiping and re-creating whole storage" msgstr "" #: src/services/a-rex/delegation/DelegationStore.cpp:214 #: src/services/a-rex/delegation/DelegationStore.cpp:316 #, c-format msgid "DelegationStore: TouchConsumer failed to create file %s" msgstr "" #: src/services/a-rex/delegation/DelegationStore.cpp:276 msgid "DelegationStore: PeriodicCheckConsumers failed to resume iterator" msgstr "" #: src/services/a-rex/delegation/DelegationStore.cpp:296 #, c-format msgid "" "DelegationStore: PeriodicCheckConsumers failed to remove old delegation %s - " "%s" msgstr "" #: src/services/a-rex/get.cpp:174 src/services/a-rex/get.cpp:229 #: src/services/a-rex/get.cpp:313 #, c-format msgid "Get: there is no job %s - %s" msgstr "" #: src/services/a-rex/get.cpp:380 #, c-format msgid "Head: there is no job %s - %s" msgstr "" #: src/services/a-rex/get.cpp:436 #, fuzzy msgid "Failed to extract credential information" msgstr "Fehler beim Verbindungen für Erneuerung von credentials" #: src/services/a-rex/get.cpp:439 #, fuzzy, c-format msgid "Checking cache permissions: DN: %s" msgstr "Check: looking für Metadata: %s" #: src/services/a-rex/get.cpp:440 #, fuzzy, c-format msgid "Checking cache permissions: VO: %s" msgstr "Check: looking für Metadata: %s" #: src/services/a-rex/get.cpp:442 #, c-format msgid "Checking cache permissions: VOMS attr: %s" msgstr "" #: src/services/a-rex/get.cpp:452 #, c-format msgid "Cache access allowed to %s by DN %s" msgstr "" #: src/services/a-rex/get.cpp:455 #, c-format msgid "DN %s doesn't match %s" msgstr "" #: src/services/a-rex/get.cpp:458 #, c-format msgid "Cache access allowed to %s by VO %s" msgstr "" #: src/services/a-rex/get.cpp:461 #, c-format msgid "VO %s doesn't match %s" msgstr "" #: src/services/a-rex/get.cpp:467 src/services/a-rex/get.cpp:486 #, c-format msgid "Bad credential value %s in cache access rules" msgstr "" #: src/services/a-rex/get.cpp:475 src/services/a-rex/get.cpp:494 #, c-format msgid "VOMS attr %s matches %s" msgstr "" #: src/services/a-rex/get.cpp:476 #, c-format msgid "Cache access allowed to %s by VO %s and role %s" msgstr "" #: src/services/a-rex/get.cpp:479 src/services/a-rex/get.cpp:498 #, c-format msgid "VOMS attr %s doesn't match %s" msgstr "" #: src/services/a-rex/get.cpp:495 #, c-format msgid "Cache access allowed to %s by VO %s and group %s" msgstr "" #: src/services/a-rex/get.cpp:501 #, c-format msgid "Unknown credential type %s for URL pattern %s" msgstr "" #: src/services/a-rex/get.cpp:507 #, c-format msgid "No match found in cache access rules for %s" msgstr "" #: src/services/a-rex/get.cpp:517 #, c-format msgid "Get from cache: Looking in cache for %s" msgstr "" #: src/services/a-rex/get.cpp:520 #, fuzzy, c-format msgid "Get from cache: Invalid URL %s" msgstr "Ungültige URL: %s" #: src/services/a-rex/get.cpp:537 #, fuzzy msgid "Get from cache: Error in cache configuration" msgstr "Pfad zu VOMS Server Konfigurationsdatei" #: src/services/a-rex/get.cpp:546 msgid "Get from cache: File not in cache" msgstr "" #: src/services/a-rex/get.cpp:549 #, c-format msgid "Get from cache: could not access cached file: %s" msgstr "" #: src/services/a-rex/get.cpp:559 msgid "Get from cache: Cached file is locked" msgstr "" #: src/services/a-rex/get_activity_statuses.cpp:214 #: src/services/a-rex/get_activity_statuses.cpp:320 #, fuzzy, c-format msgid "EMIES:GetActivityStatus: job %s - %s" msgstr "" "GetActivityStatuses: ответ = \n" "%s" #: src/services/a-rex/get_activity_statuses.cpp:455 #, c-format msgid "EMIES:GetActivityInfo: job %s - failed to retrieve GLUE2 information" msgstr "" #: src/services/a-rex/get_activity_statuses.cpp:507 #: src/services/a-rex/get_activity_statuses.cpp:514 #, c-format msgid "EMIES:NotifyService: job %s - %s" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:98 #, c-format msgid "" "Cannot create directories for log file %s. Messages will be logged to this " "log" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:104 #, c-format msgid "" "Cannot open cache log file %s: %s. Cache cleaning messages will be logged to " "this log" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:114 #, fuzzy msgid "Failed to start cache clean script" msgstr "Fehler bei der Initialisierung der delegation credentials" #: src/services/a-rex/grid-manager/GridManager.cpp:115 #, fuzzy msgid "Cache cleaning script failed" msgstr "Die Job Terminierungs-Anfrage schlug fehl" #: src/services/a-rex/grid-manager/GridManager.cpp:183 #, c-format msgid "External request for attention %s" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:201 #, fuzzy, c-format msgid "Failed to open heartbeat file %s" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/services/a-rex/grid-manager/GridManager.cpp:223 msgid "Starting jobs processing thread" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:224 #, c-format msgid "Used configuration file %s" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:232 #, c-format msgid "" "Error initiating delegation database in %s. Maybe permissions are not " "suitable. Returned error is: %s." msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:244 msgid "Failed to start new thread: cache won't be cleaned" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:251 #, fuzzy msgid "Failed to activate Jobs Processing object, exiting Grid Manager thread" msgstr "Fehler bei Transfer von Daten" #: src/services/a-rex/grid-manager/GridManager.cpp:260 #, c-format msgid "" "Error adding communication interface in %s. Maybe another instance of A-REX " "is already running." msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:263 #, c-format msgid "" "Error adding communication interface in %s. Maybe permissions are not " "suitable." msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:270 #, fuzzy msgid "Failed to start new thread for monitoring job requests" msgstr "Fehler bei Transfer von Daten" #: src/services/a-rex/grid-manager/GridManager.cpp:276 msgid "Picking up left jobs" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:279 msgid "Starting data staging threads" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:283 msgid "Starting jobs' monitoring" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:291 #, c-format msgid "" "SSHFS mount point of session directory (%s) is broken - waiting for " "reconnect ..." msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:295 #, c-format msgid "" "SSHFS mount point of runtime directory (%s) is broken - waiting for " "reconnect ..." msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:300 #, c-format msgid "" "SSHFS mount point of cache directory (%s) is broken - waiting for " "reconnect ..." msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:349 #, c-format msgid "Orphan delegation lock detected (%s) - cleaning" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:354 #, fuzzy msgid "Failed to obtain delegation locks for cleaning orphaned locks" msgstr "Fehler bei der Initialisierung der delegation credentials" #: src/services/a-rex/grid-manager/GridManager.cpp:368 msgid "Waking up" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:371 msgid "Stopping jobs processing thread" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:373 msgid "Exiting jobs processing thread" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:391 msgid "Requesting to stop job processing" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:399 msgid "Waiting for main job processing thread to exit" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:401 msgid "Stopped job processing" msgstr "" #: src/services/a-rex/grid-manager/accounting/AAR.cpp:73 msgid "Cannot find information abouto job submission endpoint" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:53 #, fuzzy, c-format msgid "Failed to read database schema file at %s" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:63 #, fuzzy msgid "Accounting database initialized succesfully" msgstr "erfolgreich angelegt, ID: %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:65 msgid "Accounting database connection has been established" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:75 #, c-format msgid "%s. SQLite database error: %s" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:77 #, c-format msgid "SQLite database error: %s" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:105 #, c-format msgid "Directory %s to store accounting database has been created." msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:107 #, c-format msgid "" "Accounting database cannot be created. Faile to create parent directory %s." msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:111 #, c-format msgid "Accounting database cannot be created: %s is not a directory" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:118 #, fuzzy msgid "Failed to initialize accounting database" msgstr "Fehler bei Initialisierung des main Python Threads" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:125 #, c-format msgid "Accounting database file (%s) is not a regular file" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:131 #, fuzzy msgid "Error opening accounting database" msgstr "Fehler bei Öffnen von Meta-Datei zum schreiben %s: %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:149 msgid "Closing connection to SQLite accounting database" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:238 #, fuzzy, c-format msgid "Failed to fetch data from %s accounting database table" msgstr "Konnte Metadaen für Datei %s nicht finden" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:255 #, fuzzy, c-format msgid "Failed to add '%s' into the accounting database %s table" msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:314 #, fuzzy msgid "Failed to fetch data from accounting database Endpoints table" msgstr "Konnte Metadaen für Datei %s nicht finden" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:331 #, c-format msgid "" "Failed to add '%s' URL (interface type %s) into the accounting database " "Endpoints table" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:357 #, fuzzy, c-format msgid "Failed to query AAR database ID for job %s" msgstr "Fehler beim Verbinden zu RLS server: %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:412 #, fuzzy, c-format msgid "Failed to insert AAR into the database for job %s" msgstr "Fehler bei Lesen von Objekt %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:413 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:460 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:491 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:507 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:523 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:544 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:560 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:575 #, c-format msgid "SQL statement used: %s" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:418 #, fuzzy, c-format msgid "Failed to write authtoken attributes for job %s" msgstr "Fehler beim Authentifizieren: %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:422 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:477 #, fuzzy, c-format msgid "Failed to write event records for job %s" msgstr "Fehler bei Schreiben zu Datein %s: %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:433 #, c-format msgid "" "Cannot to update AAR. Cannot find registered AAR for job %s in accounting " "database." msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:459 #, fuzzy, c-format msgid "Failed to update AAR in the database for job %s" msgstr "Fahler bei Herunterladen %s zu %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:465 #, fuzzy, c-format msgid "Failed to write RTEs information for the job %s" msgstr "Fehler bei Bezug von Information für Job: %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:469 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:473 #, fuzzy, c-format msgid "Failed to write data transfers information for the job %s" msgstr "Fehler bei Bezug von Information für Job: %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:569 #, fuzzy, c-format msgid "Unable to add event: cannot find AAR for job %s in accounting database." msgstr "Konnte Job Status-Informationen nicht erhalten." #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:73 #, fuzzy, c-format msgid "Unknown option %s" msgstr "Datentransfer abgebrochen: %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:80 msgid "Job ID argument is required." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:86 msgid "Path to user's proxy file should be specified." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:92 msgid "User name should be specified." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:98 msgid "Path to .local job status file is required." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:106 msgid "Generating ceID prefix from hostname automatically" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:109 #, fuzzy msgid "" "Cannot determine hostname from gethostname() to generate ceID automatically." msgstr "Kann hostname von uname nciht ermitteln" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:118 #, c-format msgid "ceID prefix is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:126 #, c-format msgid "Getting currect timestamp for BLAH parser log: %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:135 msgid "Parsing .local file to obtain job-specific identifiers and info" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:145 #, c-format msgid "globalid is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:148 #, c-format msgid "headnode is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:151 #, c-format msgid "interface is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:155 msgid "There is no local LRMS ID. Message will not be written to BLAH log." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:158 #, c-format msgid "localid is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:161 #, c-format msgid "queue name is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:164 #, c-format msgid "owner subject is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:166 msgid "" "Job did not finished successfully. Message will not be written to BLAH log." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:174 #, fuzzy, c-format msgid "Job timestamp successfully parsed as %s" msgstr "erfolgreich angelegt, ID: %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:178 msgid "Can not read information from the local job status file" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:194 #, c-format msgid "" "Unsupported submission interface %s. Seems arc-blahp-logger need to be " "updated accordingly. Please submit the bug to bugzilla." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:204 msgid "Parsing VOMS AC to get FQANs information" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:217 #, fuzzy, c-format msgid "Found VOMS AC attribute: %s" msgstr " Attribute" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:230 #, fuzzy msgid "VOMS AC attribute is a tag" msgstr "Konnte VOMS Attribut nicht herauslesen" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:237 msgid "Skipping policyAuthority VOMS AC attribute" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:241 #, fuzzy msgid "VOMS AC attribute is the FQAN" msgstr "Konnte VOMS Attribut nicht herauslesen" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:249 msgid "No FQAN found. Using None as userFQAN value" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:263 #, c-format msgid "Assembling BLAH parser log entry: %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:268 #, c-format msgid "Writing the info to the BLAH parser log: %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:276 #, fuzzy, c-format msgid "Cannot open BLAH log file '%s'" msgstr "Konnte Datei mit Job Beschreibung nicht öffnen: %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:34 #, c-format msgid "Missing cancel-%s-job - job cancellation may not work" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:38 #, c-format msgid "Missing submit-%s-job - job submission to LRMS may not work" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:42 #, c-format msgid "Missing scan-%s-job - may miss when job finished executing" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:56 #, c-format msgid "Wrong option in %s" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:67 #, fuzzy, c-format msgid "Can't read configuration file at %s" msgstr "Delegation Authorisierung fehlgeschlagen" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:77 #, fuzzy, c-format msgid "Can't recognize type of configuration file at %s" msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:80 msgid "Could not determine configuration type or configuration is empty" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:161 #, fuzzy msgid "lrms is empty" msgstr "Policy is leer" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:194 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:203 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:212 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:221 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:230 #, fuzzy msgid "Missing number in maxjobs" msgstr "Fehlendes security Objekt in Nachricht" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:197 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:206 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:215 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:224 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:233 #, c-format msgid "Wrong number in maxjobs: %s" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:243 #, c-format msgid "Wrong number in wakeupperiod: %s" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:249 msgid "mail parameter is empty" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:255 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:259 msgid "Wrong number in defaultttl command" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:265 #, fuzzy msgid "Wrong number in maxrerun command" msgstr "Falsche Anzahl an Parametern übertragen" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:272 msgid "State name for plugin is missing" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:276 msgid "Options for plugin are missing" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:279 #, c-format msgid "Failed to register plugin for state %s" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:285 msgid "Session root directory is missing" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:288 msgid "Junk in sessiondir command" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:300 #, fuzzy msgid "Missing directory in controldir command" msgstr "Fehlendes oder leeres KeyPath Element" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:305 msgid "" "'control' configuration option is no longer supported, please use " "'controldir' instead" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:310 msgid "User for helper program is missing" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:313 msgid "Only user '.' for helper program is supported" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:316 msgid "Helper program is missing" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:337 #, fuzzy msgid "Wrong option in fixdirectories" msgstr "Fehler beim Öffnen von Verzeichs: %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:364 #, fuzzy msgid "Wrong option in delegationdb" msgstr "Fehler beim Öffnen von Verzeichs: %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:370 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:556 msgid "forcedefaultvoms parameter is empty" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:445 #, fuzzy msgid "Wrong number in maxjobdesc command" msgstr "Falsche Anzahl an Parametern übertragen" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:495 msgid "Missing file name in [arex/jura] logfile" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:506 #, fuzzy, c-format msgid "Wrong number in urdelivery_frequency: %s" msgstr "Falsche Anzahl an Parametern übertragen" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:549 msgid "No queue name given in queue block name" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:565 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:600 msgid "advertisedvo parameter is empty" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:117 #, c-format msgid "\tSession root dir : %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:118 #, c-format msgid "\tControl dir : %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:119 #, c-format msgid "\tdefault LRMS : %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:120 #, c-format msgid "\tdefault queue : %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:121 #, c-format msgid "\tdefault ttl : %u" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:126 msgid "No valid caches found in configuration, caching is disabled" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:131 #, c-format msgid "\tCache : %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:133 #, c-format msgid "\tCache link dir : %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:136 #, fuzzy, c-format msgid "\tCache (read-only): %s" msgstr "Fataler Fehler: %s" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:138 msgid "\tCache cleaning enabled" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:139 msgid "\tCache cleaning disabled" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:327 msgid "" "Globus location variable substitution is not supported anymore. Please " "specify path directly." msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:35 msgid "Can't read configuration file" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:41 #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:29 msgid "Can't recognize type of configuration file" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:47 #, fuzzy msgid "Configuration error" msgstr "Delegation Authorisierung fehlgeschlagen" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:77 msgid "Bad number in maxdelivery" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:83 msgid "Bad number in maxemergency" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:89 msgid "Bad number in maxprocessor" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:95 msgid "Bad number in maxprepared" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:101 msgid "Bad number in maxtransfertries" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:112 msgid "Bad number in speedcontrol" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:123 #, c-format msgid "Bad number in definedshare %s" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:132 #, c-format msgid "Bad URL in deliveryservice: %s" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:143 msgid "Bad number in remotesizelimit" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:168 msgid "Bad value for loglevel" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:182 msgid "Bad URL in acix_endpoint" msgstr "" #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:24 msgid "Can't open configuration file" msgstr "" #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:45 msgid "Not enough parameters in copyurl" msgstr "" #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:54 msgid "Not enough parameters in linkurl" msgstr "" #: src/services/a-rex/grid-manager/files/ControlFileContent.cpp:179 #, c-format msgid "Wrong directory in %s" msgstr "" #: src/services/a-rex/grid-manager/files/ControlFileHandling.cpp:102 #, c-format msgid "Failed setting file owner: %s" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:33 msgid "gm-delegations-converter changes format of delegation database." msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:38 #: src/services/a-rex/grid-manager/gm_jobs.cpp:110 #: src/services/a-rex/grid-manager/gm_kick.cpp:24 msgid "use specified configuration file" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:39 #: src/services/a-rex/grid-manager/gm_jobs.cpp:111 #: src/services/a-rex/grid-manager/gm_kick.cpp:25 msgid "file" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:43 #: src/services/a-rex/grid-manager/gm_jobs.cpp:115 msgid "read information from specified control directory" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:44 #: src/services/a-rex/grid-manager/gm_jobs.cpp:116 msgid "dir" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:48 msgid "convert from specified input database format [bdb|sqlite]" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:49 #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:54 msgid "database format" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:53 msgid "convert into specified output database format [bdb|sqlite]" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:36 #, fuzzy, c-format msgid "Could not read data staging configuration from %s" msgstr "Konnte Datei mit Job Beschreibung nicht öffnen: %s" #: src/services/a-rex/grid-manager/gm_jobs.cpp:44 #, c-format msgid "Can't read transfer states from %s. Perhaps A-REX is not running?" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:100 msgid "gm-jobs displays information on current jobs in the system." msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:105 msgid "display more information on each job" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:120 msgid "print summary of jobs in each transfer share" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:125 msgid "do not print list of jobs" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:130 msgid "do not print number of jobs in each state" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:135 msgid "print state of the service" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:140 msgid "show only jobs of user(s) with specified subject name(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:141 #: src/services/a-rex/grid-manager/gm_jobs.cpp:151 #: src/services/a-rex/grid-manager/gm_jobs.cpp:161 #, fuzzy msgid "dn" msgstr "n" #: src/services/a-rex/grid-manager/gm_jobs.cpp:145 msgid "request to cancel job(s) with specified ID(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:146 #: src/services/a-rex/grid-manager/gm_jobs.cpp:156 #: src/services/a-rex/grid-manager/gm_jobs.cpp:166 #: src/services/a-rex/grid-manager/gm_jobs.cpp:176 #: src/services/a-rex/grid-manager/gm_kick.cpp:30 msgid "id" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:150 msgid "" "request to cancel jobs belonging to user(s) with specified subject name(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:155 #, fuzzy msgid "request to clean job(s) with specified ID(s)" msgstr "Keine Anfrage-Token spezifiziert!" #: src/services/a-rex/grid-manager/gm_jobs.cpp:160 msgid "" "request to clean jobs belonging to user(s) with specified subject name(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:165 msgid "show only jobs with specified ID(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:170 msgid "print list of available delegation IDs" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:175 msgid "print delegation token of specified ID(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:180 msgid "print main delegation token of specified Job ID(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:181 #, fuzzy msgid "job id" msgstr "ungültige Job ID" #: src/services/a-rex/grid-manager/gm_jobs.cpp:185 msgid "" "output requested elements (jobs list, delegation ids and tokens) to file" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:186 #, fuzzy msgid "file name" msgstr "Dateiname" #: src/services/a-rex/grid-manager/gm_jobs.cpp:209 #, fuzzy, c-format msgid "Using configuration at %s" msgstr "Delegation Authorisierung fehlgeschlagen" #: src/services/a-rex/grid-manager/gm_jobs.cpp:232 #, fuzzy, c-format msgid "Failed to open output file '%s'" msgstr "Fehler bei Unlock von Datei %s: %s" #: src/services/a-rex/grid-manager/gm_jobs.cpp:241 msgid "Looking for current jobs" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:278 #, c-format msgid "Job: %s : ERROR : Unrecognizable state" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:287 #, fuzzy, c-format msgid "Job: %s : ERROR : No local information." msgstr "Konnte Job Status Information nicht beziehen." #: src/services/a-rex/grid-manager/gm_jobs.cpp:461 #, c-format msgid "Job: %s : ERROR : Failed to put cancel mark" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:465 #, c-format msgid "Job: %s : Cancel request put but failed to communicate to service" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:467 #, c-format msgid "Job: %s : Cancel request put and communicated to service" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:478 #, c-format msgid "Job: %s : ERROR : Failed to put clean mark" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:482 #, c-format msgid "Job: %s : Clean request put but failed to communicate to service" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:484 #, c-format msgid "Job: %s : Clean request put and communicated to service" msgstr "" #: src/services/a-rex/grid-manager/gm_kick.cpp:18 msgid "" "gm-kick wakes up the A-REX corresponding to the given control file. If no " "file is given it uses the control directory found in the configuration file." msgstr "" #: src/services/a-rex/grid-manager/gm_kick.cpp:29 msgid "inform about changes in particular job (can be used multiple times)" msgstr "" #: src/services/a-rex/grid-manager/inputcheck.cpp:39 #, fuzzy, c-format msgid "Failed to acquire source: %s" msgstr "Fehler beim Authentifizieren: %s" #: src/services/a-rex/grid-manager/inputcheck.cpp:44 #, fuzzy, c-format msgid "Failed to resolve %s" msgstr "Fehler bei Lesen von Objekt %s" #: src/services/a-rex/grid-manager/inputcheck.cpp:61 #, fuzzy, c-format msgid "Failed to check %s" msgstr "Fehler beim Authentifizieren: %s" #: src/services/a-rex/grid-manager/inputcheck.cpp:75 msgid "job_description_file [proxy_file]" msgstr "" #: src/services/a-rex/grid-manager/inputcheck.cpp:76 msgid "" "inputcheck checks that input files specified in the job description are " "available and accessible using the credentials in the given proxy file." msgstr "" #: src/services/a-rex/grid-manager/inputcheck.cpp:88 #, fuzzy msgid "Wrong number of arguments given" msgstr "Falsche Anzahl an Parametern übertragen" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:143 #, fuzzy, c-format msgid "Unsupported value for allownew: %s" msgstr "Nicht unterstützte URL für Quelle: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:154 msgid "Wrong number in maxjobdesc" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:162 #: src/services/gridftpd/fileplugin/fileplugin.cpp:186 #, fuzzy, c-format msgid "Unsupported configuration command: %s" msgstr "Nicht unterstützte URL für Ziel: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:171 #, c-format msgid "Mapped user:group (%s:%s) not found" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:174 #, fuzzy msgid "Job submission user can't be root" msgstr "Job Hochladen Zusammenfassung:" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:177 #, fuzzy msgid "Failed processing A-REX configuration" msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:201 msgid "This user is denied to submit new jobs." msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:206 #, fuzzy msgid "No control or session directories defined in configuration" msgstr "Fehlendes oder leeres KeyPath Element" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:210 #, fuzzy, c-format msgid "Job submission user: %s (%i:%i)" msgstr "Job Hochladen Zusammenfassung:" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:213 #, fuzzy msgid "Job plugin was not initialised" msgstr "Main python thread wurde nicht initialisiert" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:231 #, fuzzy msgid "No delegated credentials were passed" msgstr "" "Делегированные параметры доÑтупа:\n" " %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:305 #, fuzzy, c-format msgid "Cancelling job %s" msgstr "Aufräumen von Job: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:360 #, fuzzy, c-format msgid "Cleaning job %s" msgstr "Aufräumen von Job: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:400 #, fuzzy msgid "Request to open file with storing in progress" msgstr "%s Anfrage an %s schlug fehl mit Antwort %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:434 #: src/services/gridftpd/fileplugin/fileplugin.cpp:344 #, fuzzy, c-format msgid "Retrieving file %s" msgstr "Lese Archiv Datei %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:484 #, c-format msgid "Accepting submission of new job or modification request: %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:506 #: src/services/gridftpd/fileplugin/fileplugin.cpp:384 #: src/services/gridftpd/fileplugin/fileplugin.cpp:421 #, fuzzy, c-format msgid "Storing file %s" msgstr "Anlegen von Socket schlug fehl: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:527 #, fuzzy, c-format msgid "Unknown open mode %i" msgstr "unbekannter return code %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:653 #, c-format msgid "action(%s) != request" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:704 #, fuzzy msgid "Failed writing job description" msgstr "Fehler bei Schreiben zu Ziel" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:920 #, fuzzy msgid "Failed writing local description" msgstr "Fehler bei Schreiben zu Ziel" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:930 #, fuzzy msgid "Failed writing ACL" msgstr "Konnte job nicht starten" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:946 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:953 #: src/services/a-rex/job.cpp:819 #, c-format msgid "Failed to run external plugin: %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:957 #: src/services/a-rex/job.cpp:823 #, c-format msgid "Plugin response: %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:959 #, fuzzy msgid "Failed to run external plugin" msgstr "Initiierung der Delegation fehlgeschlagen" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:969 #, fuzzy, c-format msgid "Failed to create session directory %s" msgstr "Fehler bei Anlegen/Finden von Verzeichnis %s, (%d)" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:979 #, fuzzy msgid "Failed writing status" msgstr "Fehler beim Auflisten von Meta-Dateien" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:993 #, fuzzy, c-format msgid "Failed to lock delegated credentials: %s" msgstr "Fehler bei der Initialisierung der delegation credentials" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1235 #, fuzzy, c-format msgid "Renewing proxy for job %s" msgstr "Erneuern der credentials für Job %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1251 #, c-format msgid "New proxy expires at %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1254 #, fuzzy msgid "Failed to write 'local' information" msgstr "Konnte Job Status Information nicht beziehen." #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1257 #, fuzzy msgid "Failed to renew proxy" msgstr "Fehler beim Senden von body" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1260 msgid "New proxy expiry time is not later than old proxy, not renewing proxy" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1302 #, fuzzy, c-format msgid "Checking file %s" msgstr "Suche nache Existenz von %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1349 msgid "ID contains forbidden characters" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1383 #: src/services/a-rex/job.cpp:1023 #, c-format msgid "Failed to create file in %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1393 msgid "Out of tries while allocating new job ID" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1473 #, fuzzy, c-format msgid "Failed to read job's local description for job %s from %s" msgstr "Fehler beim Bezug der Job Beschreibung von Job: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1562 #, fuzzy msgid "No non-draining session directories available" msgstr "Konnte ownen des session dir nicht ändern zu %i: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1568 #, fuzzy, c-format msgid "Using control directory %s" msgstr "Lege Verzeichnis %s an" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1569 #, fuzzy, c-format msgid "Using session directory %s" msgstr "Lege Verzeichnis %s an" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:26 #, fuzzy, c-format msgid "Failed to read job's ACL for job %s from %s" msgstr "Fehler bei Lesen von Objekt %s: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:70 #, fuzzy, c-format msgid "Failed to parse user policy for job %s" msgstr "Fahler bei Herunterladen %s zu %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:75 #, fuzzy, c-format msgid "Failed to load policy evaluator for policy of job %s" msgstr "Fahler bei Herunterladen %s zu %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:129 #, c-format msgid "Unknown ACL policy %s for job %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:73 #, c-format msgid "" "DTR Generator waiting to process: %d jobs to cancel, %d DTRs, %d new jobs" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:87 #, c-format msgid "%s: Job cancel request from DTR generator to scheduler" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:92 #, fuzzy, c-format msgid "%s: Returning canceled job from DTR generator" msgstr "Wiederholte Nutzung von Verbindung" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:133 #, c-format msgid "%s: Re-requesting attention from DTR generator" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:143 #, c-format msgid "DTR Generator processed: %d jobs to cancel, %d DTRs, %d new jobs" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:162 msgid "Exiting Generator thread" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:234 msgid "Shutting down data staging threads" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:244 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:257 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:285 msgid "DTRGenerator is not running!" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:247 #, c-format msgid "Received DTR %s during Generator shutdown - may not be processed" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:261 msgid "DTRGenerator was sent null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:270 #, fuzzy, c-format msgid "%s: Received job in DTR generator" msgstr "Resuming Job: %s in Zustand: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:273 #, fuzzy, c-format msgid "%s: Failed to receive job in DTR generator" msgstr "Konnte Job Status Information nicht beziehen." #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:280 msgid "DTRGenerator got request to cancel null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:295 msgid "DTRGenerator is queried about null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:325 msgid "DTRGenerator is asked about null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:353 msgid "DTRGenerator is requested to remove null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:360 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:368 #, c-format msgid "%s: Trying to remove job from data staging which is still active" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:376 #, c-format msgid "%s: Trying remove job from data staging which does not exist" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:387 #, fuzzy, c-format msgid "%s: Invalid DTR" msgstr "Ungültige URL: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:404 #, c-format msgid "%s: Received DTR %s to copy file %s in state %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:408 #, c-format msgid "%s: Received DTR belongs to inactive job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:425 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1067 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:459 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:517 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:631 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:841 #, c-format msgid "%s: Failed reading local information" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:434 #, fuzzy, c-format msgid "%s: DTR %s to copy file %s failed" msgstr "Warnung: Schließen von tmp Lock Datei %s fehlgeschlagen" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:440 #, c-format msgid "%s: Cancelling other DTRs" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:450 #, fuzzy, c-format msgid "%s: DTR %s to copy to %s failed but is not mandatory" msgstr "Warnung: Schließen von tmp Lock Datei %s fehlgeschlagen" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:460 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:722 #, c-format msgid "%s: Failed to read list of output files" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:474 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:615 #, fuzzy, c-format msgid "%s: Failed to read dynamic output files in %s" msgstr "Fehler bei Lesen von Dateiliste" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:476 #, c-format msgid "%s: Going through files in list %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:480 #, fuzzy, c-format msgid "%s: Removing %s from dynamic output file %s" msgstr "Fehler bei Lesen von Dateiliste" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:484 #, fuzzy, c-format msgid "%s: Failed to write back dynamic output files in %s" msgstr "Fehler bei Lesen von Dateiliste" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:500 #, fuzzy, c-format msgid "%s: Failed to write list of output files" msgstr "Fehler bei Lesen von Dateiliste" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:504 #, fuzzy, c-format msgid "%s: Failed to write list of output status files" msgstr "Fehler bei Lesen von Dateiliste" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:516 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:734 #, c-format msgid "%s: Failed to read list of input files" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:535 #, fuzzy, c-format msgid "%s: Failed to write list of input files" msgstr "Fehler bei Lesen von Dateiliste" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:547 #, c-format msgid "%s: Received DTR with two remote endpoints!" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:559 #: src/services/candypond/CandyPondGenerator.cpp:105 #, fuzzy, c-format msgid "No active job id %s" msgstr "Kann Job ID nicht finden: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:603 #, fuzzy, c-format msgid "%s: Failed to read list of output files, can't clean up session dir" msgstr "Fehler bei Lesen von Dateiliste" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:629 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:648 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:772 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:895 #, fuzzy, c-format msgid "%s: Failed to clean up session dir" msgstr "Fehler bei Reservieren von Platz" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:639 #, fuzzy, c-format msgid "%s: Failed to read list of input files, can't clean up session dir" msgstr "Fehler bei Lesen von Dateiliste" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:661 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:665 msgid "uploads" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:661 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:665 msgid "downloads" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:662 msgid "cancelled" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:662 msgid "finished" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:660 #, fuzzy, c-format msgid "%s: All %s %s successfully" msgstr "Verbindung erfolgreich geschlossen" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:664 #, fuzzy, c-format msgid "%s: Some %s failed" msgstr "Anfrage %s schlug fehl" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:668 #, c-format msgid "%s: Requesting attention from DTR generator" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:679 msgid "DTRGenerator is requested to process null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:685 msgid "download" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:685 msgid "upload" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:684 #, c-format msgid "%s: Received data staging request to %s files" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:743 #, fuzzy, c-format msgid "%s: Duplicate file in list of input files: %s" msgstr "Fehler bei Lesen von Dateiliste" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:792 #, fuzzy, c-format msgid "%s: Reading output files from user generated list in %s" msgstr "" "\n" "%s: ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð²Ñ…Ð¾Ð´Ð½Ð¾Ð³Ð¾ файла '%s': %s\n" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:794 #, fuzzy, c-format msgid "%s: Error reading user generated output file list in %s" msgstr "" "\n" "%s: ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð²Ñ…Ð¾Ð´Ð½Ð¾Ð³Ð¾ файла '%s': %s\n" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:821 #, fuzzy, c-format msgid "%s: Failed to list output directory %s: %s" msgstr "Fehler bei Lesen von Dateiliste" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:839 #, fuzzy, c-format msgid "%s: Adding new output file %s: %s" msgstr "Fehler bei Lesen von Dateiliste" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:861 #, c-format msgid "%s: Two identical output destinations: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:874 #, c-format msgid "%s: Cannot upload two different files %s and %s to same LFN: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:906 #, fuzzy, c-format msgid "%s: Received job in a bad state: %s" msgstr "Resuming Job: %s in Zustand: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:914 #, c-format msgid "%s: Session directory processing takes too long - %u.%06u seconds" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:981 #, c-format msgid "" "%s: Destination file %s was possibly left unfinished from previous A-REX " "run, will overwrite" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1073 #, c-format msgid "%s: Failed writing local information" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1091 #, c-format msgid "%s: Cancelling active DTRs" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1098 msgid "DTRGenerator is asked to check files for null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1118 #, fuzzy, c-format msgid "%s: Can't read list of input files" msgstr "Fehler bei Lesen von Dateiliste" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1133 #, c-format msgid "%s: Checking user uploadable file: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1138 #, c-format msgid "%s: User has uploaded file %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1145 #, fuzzy, c-format msgid "%s: Failed writing changed input file." msgstr "Fehler bei Lesen von Dateiliste" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1149 #, c-format msgid "%s: Critical error for uploadable file %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1155 #, c-format msgid "%s: User has NOT uploaded file %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1167 #, c-format msgid "%s: Uploadable files timed out" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1223 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1249 #, fuzzy, c-format msgid "%s: Can't convert checksum %s to int for %s" msgstr "Formatierungsfehler erkannt in Datei %s, in Zeile %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1230 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1244 #, fuzzy, c-format msgid "%s: Can't convert filesize %s to int for %s" msgstr "Formatierungsfehler erkannt in Datei %s, in Zeile %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1239 #, c-format msgid "%s: Invalid size/checksum information (%s) for %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1261 #, c-format msgid "%s: Invalid file: %s is too big." msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1277 #, c-format msgid "%s: Failed to switch user ID to %d/%d to read file %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1283 #, fuzzy, c-format msgid "%s: Failed to open file %s for reading" msgstr "Fehler bei Öffnen von Datei %s zum Lesen: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1291 #, fuzzy, c-format msgid "%s: Error accessing file %s" msgstr "Fehler bei Zugriff auf Cache-Datei %s: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1303 #, fuzzy, c-format msgid "%s: Error reading file %s" msgstr "Fehler bei Lesen von Meta-Datei %s: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1318 #, c-format msgid "%s: File %s has wrong checksum: %llu. Expected %lli" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1324 #, c-format msgid "%s: Checksum %llu verified for %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1336 msgid "" "Found unfinished DTR transfers. It is possible the previous A-REX process " "did not shut down normally" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1343 #, c-format msgid "Found DTR %s for file %s left in transferring state from previous run" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1352 msgid "DTRGenerator is requested to clean links for null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1368 #, c-format msgid "%s: Cache cleaning takes too long - %u.%06u seconds" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:108 #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:190 #, c-format msgid "%s: Job monitoring counter is broken" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:115 #, c-format msgid "%s: Job monitoring is unintentionally lost" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:124 #, c-format msgid "%s: Job monitoring stop success" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:129 #, c-format msgid "" "%s: Job monitoring stop requested with %u active references and %s queue " "associated" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:131 #, c-format msgid "%s: Job monitoring stop requested with %u active references" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:195 #, c-format msgid "%s: Job monitoring is lost due to removal from queue" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:278 #, c-format msgid "%s: PushSorted failed to find job where expected" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:132 #, c-format msgid "Replacing queue '%s' with '%s'" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:226 #, c-format msgid "Bad name for stdout: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:234 #, c-format msgid "Bad name for stderr: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:297 #, c-format msgid "Bad name for runtime environment: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:342 msgid "Job description file could not be read." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:393 #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:407 #, c-format msgid "Bad name for executable: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:89 #, fuzzy msgid "Failed to start data staging threads" msgstr "Fehler bei Ablage von FTP Datei" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:190 #, c-format msgid "" "%s: Failed reading .local and changing state, job and A-REX may be left in " "an inconsistent state" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:195 #, fuzzy, c-format msgid "%s: unexpected failed job add request: %s" msgstr "Resuming Job: %s in Zustand: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:206 #, fuzzy, c-format msgid "%s: unexpected job add request: %s" msgstr "Resuming Job: %s in Zustand: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:259 #, c-format msgid "%s: job for attention" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:269 msgid "all for attention" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:286 #, c-format msgid "%s: job found while scanning" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:314 #, c-format msgid "%s: job will wait for external process" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:331 #, c-format msgid "%s: job assigned for slow polling" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:349 #, fuzzy, c-format msgid "%s: job being processed" msgstr "Die Job Löschen-Anfrage war erfolgreich" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:384 #, c-format msgid "Current jobs in system (PREPARING to FINISHING) per-DN (%i entries)" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:386 #, fuzzy, c-format msgid "%s: %i" msgstr "%s (%s)" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:398 #, fuzzy, c-format msgid "%s: Failed storing failure reason: %s" msgstr "Konnte job information nicht beziehen für job: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:404 #, fuzzy, c-format msgid "%s: Failed reading job description: %s" msgstr "Submit: Fehler bei Senden von Job Beschreibung" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:416 #, fuzzy, c-format msgid "%s: Failed parsing job request." msgstr "Submit: Fehler bei Senden von Job Beschreibung" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:451 #, fuzzy, c-format msgid "%s: Failed writing list of output files: %s" msgstr "Fehler bei Lesen von Dateiliste" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:477 #, c-format msgid "%s: Failed obtaining lrms id" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:491 #, fuzzy, c-format msgid "%s: Failed writing local information: %s" msgstr "Konnte job information nicht beziehen für job: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:523 #, c-format msgid "%s: Failed creating grami file" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:527 #, c-format msgid "%s: Failed setting executable permissions" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:535 #, c-format msgid "%s: state SUBMIT: starting child: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:542 #, c-format msgid "%s: Failed running submission process" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:547 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:654 #, c-format msgid "%s: LRMS scripts limit of %u is reached - suspending submit/cancel" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:563 #, c-format msgid "" "%s: Job submission to LRMS takes too long, but ID is already obtained. " "Pretending submission is done." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:570 #, c-format msgid "%s: Job submission to LRMS takes too long. Failing." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:579 #, c-format msgid "%s: state SUBMIT: child exited with code %i" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:584 #, c-format msgid "%s: Job submission to LRMS failed" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:605 #, fuzzy, c-format msgid "%s: state CANCELING: timeout waiting for cancellation" msgstr "" "start_reading_ftp: Zeitüberschreitung bei Warten auf Zeitpunkt letzter " "Änderung" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:611 #, c-format msgid "%s: state CANCELING: job diagnostics collected" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:639 #, c-format msgid "%s: state CANCELING: starting child: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:641 #, c-format msgid "%s: Job has completed already. No action taken to cancel" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:649 #, fuzzy, c-format msgid "%s: Failed running cancellation process" msgstr "Fehler bei Reservieren von Platz" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:668 #, c-format msgid "" "%s: Job cancellation takes too long, but diagnostic collection seems to be " "done. Pretending cancellation succeeded." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:674 #, c-format msgid "%s: Job cancellation takes too long. Failing." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:684 #, c-format msgid "%s: state CANCELING: child exited with code %i" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:690 #, c-format msgid "%s: Failed to cancel running job" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:709 #, c-format msgid "%s: State: %s: data staging finished" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:744 #, c-format msgid "%s: State: %s: still in data staging" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:757 #, c-format msgid "%s: Job is not allowed to be rerun anymore" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:767 #, c-format msgid "%s: Job failed in unknown state. Won't rerun." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:788 #, fuzzy, c-format msgid "%s: Reprocessing job description failed" msgstr "Konnte Datei mit Job Beschreibung nicht öffnen: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:795 #, c-format msgid "%s: Failed to read reprocessed list of output files" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:799 #, c-format msgid "%s: Failed to read reprocessed list of input files" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:883 #, c-format msgid "%s: Reading status of new job failed" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:896 #, c-format msgid "%s: State: ACCEPTED: parsing job description" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:898 #, c-format msgid "%s: Processing job description failed" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:936 #, fuzzy, c-format msgid "%s: new job is accepted" msgstr "Job migrierte mit Job ID: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:948 #, c-format msgid "%s: %s: New job belongs to %i/%i" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:953 #, c-format msgid "%s: old job is accepted" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:964 #, c-format msgid "%s: State: ACCEPTED" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:970 #, c-format msgid "%s: State: ACCEPTED: dryrun" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:993 #, c-format msgid "%s: State: ACCEPTED: has process time %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:999 #, c-format msgid "%s: State: ACCEPTED: moving to PREPARING" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1015 #, c-format msgid "%s: State: PREPARING" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1022 #, fuzzy, c-format msgid "%s: Failed obtaining local job information." msgstr "Konnte Job Status Information nicht beziehen." #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1075 #, c-format msgid "%s: State: SUBMIT" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1095 #, c-format msgid "%s: State: CANCELING" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1115 #, c-format msgid "%s: State: INLRMS" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1120 #, c-format msgid "%s: State: INLRMS - checking for pending(%u) and mark" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1122 #, c-format msgid "%s: State: INLRMS - checking for not pending" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1124 #, c-format msgid "%s: Job finished" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1128 #, c-format msgid "%s: State: INLRMS: exit message is %i %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1141 #, c-format msgid "%s: State: INLRMS - no mark found" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1153 #, c-format msgid "%s: State: FINISHING" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1174 #, c-format msgid "%s: Job is requested to clean - deleting" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1191 #, c-format msgid "%s: restarted PREPARING job" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1207 #, c-format msgid "%s: restarted INLRMS job" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1216 #, c-format msgid "%s: restarted FINISHING job" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1221 #, c-format msgid "%s: Can't rerun on request" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1223 #, c-format msgid "%s: Can't rerun on request - not a suitable state" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1234 #, c-format msgid "%s: Job is too old - deleting" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1279 #, c-format msgid "%s: Job is ancient - delete rest of information" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1297 #, c-format msgid "%s: Canceling job because of user request" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1311 #, fuzzy, c-format msgid "%s: Failed to turn job into failed during cancel processing." msgstr "Fehler bei Reservieren von Platz" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1343 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1351 #, c-format msgid "%s: Plugin at state %s : %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1357 #, c-format msgid "%s: Plugin execution failed" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1464 #, c-format msgid "%s: State: %s from %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1515 #, fuzzy, c-format msgid "Failed to get DN information from .local file for job %s" msgstr "Fehler bei Bezug von Information für Job: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1542 #, c-format msgid "%s: Delete request due to internal problems" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1577 #, c-format msgid "%s: Job failure detected" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1630 #, fuzzy, c-format msgid "Failed to move file %s to %s" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1638 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1748 #, c-format msgid "Failed reading control directory: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1708 #, fuzzy, c-format msgid "Failed reading control directory: %s: %s" msgstr "Fehler bei Lesen von Objekt %s: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:2022 #, fuzzy, c-format msgid "Helper process start failed: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:2029 #, c-format msgid "Stopping helper process %s" msgstr "" #: src/services/a-rex/grid-manager/log/HeartBeatMetrics.cpp:61 #, fuzzy, c-format msgid "Error with hearbeatfile: %s" msgstr "Fehler bei Formatieren von Lock-Datei %s: %s" #: src/services/a-rex/grid-manager/log/HeartBeatMetrics.cpp:73 #: src/services/a-rex/grid-manager/log/JobsMetrics.cpp:139 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:136 #, c-format msgid ": Metrics tool returned error code %i: %s" msgstr "" #: src/services/a-rex/grid-manager/log/HeartBeatMetrics.cpp:107 #: src/services/a-rex/grid-manager/log/JobsMetrics.cpp:186 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:178 msgid "" "gmetric_bin_path empty in arc.conf (should never happen the default value " "should be used)" msgstr "" #: src/services/a-rex/grid-manager/log/JobLog.cpp:114 #, fuzzy msgid ": Accounting records reporter tool is not specified" msgstr "Delegation Authorisierung fehlgeschlagen" #: src/services/a-rex/grid-manager/log/JobLog.cpp:130 #, fuzzy msgid ": Failure creating slot for accounting reporter child process" msgstr "Fehler bei Lesen von Dateiliste" #: src/services/a-rex/grid-manager/log/JobLog.cpp:143 #, fuzzy msgid ": Failure starting accounting reporter child process" msgstr "Fehler bei Reservieren von Platz" #: src/services/a-rex/grid-manager/log/JobLog.cpp:176 msgid ": Failure creating accounting database connection" msgstr "" #: src/services/a-rex/grid-manager/log/JobLog.cpp:202 #, c-format msgid ": writing accounting record took %llu ms" msgstr "" #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:74 #, c-format msgid "Session dir '%s' contains user specific substitutions - skipping it" msgstr "" #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:86 #, c-format msgid "Sessiondir %s: Free space %f GB" msgstr "" #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:94 #, fuzzy msgid "No session directories found in configuration." msgstr "Fehlendes oder leeres KeyPath Element" #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:125 msgid "No cachedirs found/configured for calculation of free space." msgstr "" #: src/services/a-rex/grid-manager/mail/send_mail.cpp:29 msgid "Failed reading local information" msgstr "" #: src/services/a-rex/grid-manager/mail/send_mail.cpp:79 #, c-format msgid "Running mailer command (%s)" msgstr "" #: src/services/a-rex/grid-manager/mail/send_mail.cpp:81 msgid "Failed running mailer" msgstr "" #: src/services/a-rex/grid-manager/run/RunParallel.cpp:33 #, c-format msgid "%s: Job's helper exited" msgstr "" #: src/services/a-rex/grid-manager/run/RunParallel.cpp:70 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:24 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:56 #, c-format msgid "%s: Failure creating slot for child process" msgstr "" #: src/services/a-rex/grid-manager/run/RunParallel.cpp:119 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:41 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:73 #, c-format msgid "%s: Failure starting child process" msgstr "" #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:30 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:62 #, c-format msgid "%s: Failure creating data storage for child process" msgstr "" #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:46 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:78 #, c-format msgid "%s: Failure waiting for child process to finish" msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:46 #, fuzzy msgid "[job description input]" msgstr "Keine Job Beschreibung als Eingabe benötigt" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:47 msgid "" "Tool for writing the grami file representation of a job description file." msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:51 #, fuzzy msgid "Name of grami file" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:56 #, fuzzy msgid "Configuration file to load" msgstr "Vermuting - Datei nicht gefunden" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:57 msgid "arc.conf" msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:61 #, fuzzy msgid "Session directory to use" msgstr "Lege Verzeichnis %s an" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:62 msgid "directory" msgstr "Verzeichnis" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:78 #, fuzzy msgid "No job description file name provided." msgstr "Keine Job Beschreibung als Eingabe benötigt" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:84 #, fuzzy, c-format msgid "Unable to parse job description input: %s" msgstr "Fehler beim Bezug der Job Beschreibung von Job: %s" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:90 #, fuzzy msgid "Unable to load ARC configuration file." msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:107 #, fuzzy, c-format msgid "Unable to write grami file: %s" msgstr "Fehler bei Ablage von FTP Datei" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:112 #, fuzzy, c-format msgid "Unable to write 'output' file: %s" msgstr "Fehler bei Ablage von FTP Datei" #: src/services/a-rex/information_collector.cpp:53 #, fuzzy, c-format msgid "Resource information provider: %s" msgstr "Fehler bei Bezug von Information für Job: %s" #: src/services/a-rex/information_collector.cpp:56 #, fuzzy msgid "Resource information provider failed to start" msgstr "Fehler bei Bezug von Information für Job: %s" #: src/services/a-rex/information_collector.cpp:59 #, fuzzy msgid "Resource information provider failed to run" msgstr "Fehler bei Bezug von Information für Job: %s" #: src/services/a-rex/information_collector.cpp:63 #, c-format msgid "" "Resource information provider failed with exit status: %i\n" "%s" msgstr "" #: src/services/a-rex/information_collector.cpp:65 #, c-format msgid "" "Resource information provider log:\n" "%s" msgstr "" #: src/services/a-rex/information_collector.cpp:71 msgid "No new informational document assigned" msgstr "" #: src/services/a-rex/information_collector.cpp:73 #, c-format msgid "Obtained XML: %s" msgstr "" #: src/services/a-rex/information_collector.cpp:87 msgid "Informational document is empty" msgstr "" #: src/services/a-rex/information_collector.cpp:212 msgid "OptimizedInformationContainer failed to create temporary file" msgstr "" #: src/services/a-rex/information_collector.cpp:215 #, c-format msgid "OptimizedInformationContainer created temporary file: %s" msgstr "" #: src/services/a-rex/information_collector.cpp:221 msgid "" "OptimizedInformationContainer failed to store XML document to temporary file" msgstr "" #: src/services/a-rex/information_collector.cpp:230 msgid "OptimizedInformationContainer failed to parse XML" msgstr "" #: src/services/a-rex/information_collector.cpp:242 #, fuzzy msgid "OptimizedInformationContainer failed to rename temprary file" msgstr "Fehler bei Erstellen von Info-Datei %s: %s" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:36 msgid "Default INTERNAL client contructor" msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:39 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:59 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:81 #, fuzzy msgid "Failed to load grid-manager configfile" msgstr "Pfad zu VOMS Server Konfigurationsdatei" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:44 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:64 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:86 #, fuzzy msgid "Failed to set INTERNAL endpoint" msgstr "Fehler beim Entfernen von LFC Verzeichnis: %s" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:129 #, fuzzy msgid "Failed to identify grid-manager config file" msgstr "Pfad zu VOMS Server Konfigurationsdatei" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:148 #, fuzzy, c-format msgid "Failed to run configuration parser at %s." msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:152 #, c-format msgid "Parser failed with error code %i." msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:158 #, c-format msgid "No pid file is found at '%s'. Probably A-REX is not running." msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:173 #, fuzzy, c-format msgid "Failed to load grid-manager config file from %s" msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:257 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:363 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:396 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:442 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:496 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:548 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:566 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:616 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:646 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:664 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:682 #, fuzzy msgid "INTERNALClient is not initialized" msgstr "FATAL: SSL Locks nicht initialisiert" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:447 #, fuzzy msgid "Submitting job " msgstr "Aufräumen von Job: %s" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:508 #, fuzzy, c-format msgid "Failed to copy input file: %s to path: %s" msgstr "Fehler bei Öffnen von Datei %s zum Lesen: %s" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:514 #, fuzzy, c-format msgid "Failed to set permissions on: %s" msgstr "Konnte Zugriffsrechte von hard link nicht ändern zu 0644: %s" #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:51 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:92 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:119 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:145 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:184 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:246 #, fuzzy msgid "Failed to load grid-manager config file" msgstr "Pfad zu VOMS Server Konfigurationsdatei" #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:324 #, fuzzy msgid "Retrieving job description of INTERNAL jobs is not supported" msgstr "Resume von CREAM jobs wird nicht unterstützt" #: src/services/a-rex/internaljobplugin/JobListRetrieverPluginINTERNAL.cpp:67 #, c-format msgid "Listing localjobs succeeded, %d localjobs found" msgstr "" #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:130 #, fuzzy msgid "Failed submitting job description" msgstr "Fehler bei Schreiben zu Ziel" #: src/services/a-rex/job.cpp:66 #, fuzzy, c-format msgid "Using cached local account '%s'" msgstr "Nutze space token %s" #: src/services/a-rex/job.cpp:77 msgid "Will not map to 'root' account by default" msgstr "" #: src/services/a-rex/job.cpp:90 msgid "No local account name specified" msgstr "" #: src/services/a-rex/job.cpp:93 #, c-format msgid "Using local account '%s'" msgstr "" #: src/services/a-rex/job.cpp:97 msgid "TLS provides no identity, going for OTokens" msgstr "" #: src/services/a-rex/job.cpp:155 #, fuzzy msgid "Failed to acquire A-REX's configuration" msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #: src/services/a-rex/job.cpp:227 #, fuzzy, c-format msgid "Cannot handle local user %s" msgstr "" "Kann Owner von %s nicht ändernÐевозможно изменить владельца папки %1.\n" "Ошибка: %2" #: src/services/a-rex/job.cpp:275 #, c-format msgid "%s: Failed to parse user policy" msgstr "" #: src/services/a-rex/job.cpp:280 #, c-format msgid "%s: Failed to load evaluator for user policy " msgstr "" #: src/services/a-rex/job.cpp:385 #, c-format msgid "%s: Unknown user policy '%s'" msgstr "" #: src/services/a-rex/job.cpp:707 src/services/a-rex/job.cpp:731 #, fuzzy, c-format msgid "Credential expires at %s" msgstr "Delegation service: %s" #: src/services/a-rex/job.cpp:709 src/services/a-rex/job.cpp:733 #, fuzzy, c-format msgid "Credential handling exception: %s" msgstr "Delegation service: %s" #: src/services/a-rex/job.cpp:1031 #, c-format msgid "Out of tries while allocating new job ID in %s" msgstr "" #: src/services/a-rex/job.cpp:1270 msgid "No non-draining session dirs available" msgstr "" #: src/services/a-rex/put.cpp:150 #, c-format msgid "%s: put file %s: there is no payload" msgstr "" #: src/services/a-rex/put.cpp:156 #, c-format msgid "%s: put file %s: unrecognized payload" msgstr "" #: src/services/a-rex/put.cpp:172 src/services/a-rex/rest/rest.cpp:1595 #, fuzzy, c-format msgid "%s: put file %s: failed to create file: %s" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/services/a-rex/put.cpp:188 #, fuzzy, c-format msgid "%s: put file %s: %s" msgstr "Fehler bei Lesen von Dateiliste" #: src/services/a-rex/put.cpp:210 #, fuzzy, c-format msgid "%s: delete file %s: failed to obtain file path: %s" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/services/a-rex/put.cpp:221 #, fuzzy, c-format msgid "%s: delete file %s: failed to open file/dir: %s" msgstr "Fehler bei Lesen von Datei %s: %s" #: src/services/a-rex/rest/rest.cpp:683 #, c-format msgid "REST: process %s at %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:726 src/services/a-rex/rest/rest.cpp:742 #: src/services/a-rex/rest/rest.cpp:797 src/services/a-rex/rest/rest.cpp:876 #: src/services/a-rex/rest/rest.cpp:1104 src/services/a-rex/rest/rest.cpp:1696 #, fuzzy, c-format msgid "process: method %s is not supported for subpath %s" msgstr "Verarbeitungstyp nicht unterstützt: %s" #: src/services/a-rex/rest/rest.cpp:748 #, fuzzy, c-format msgid "process: schema %s is not supported for subpath %s" msgstr "Verarbeitungstyp nicht unterstützt: %s" #: src/services/a-rex/rest/rest.cpp:873 src/services/a-rex/rest/rest.cpp:1101 #, fuzzy, c-format msgid "process: action %s is not supported for subpath %s" msgstr "Verarbeitungstyp nicht unterstützt: %s" #: src/services/a-rex/rest/rest.cpp:1113 src/services/a-rex/rest/rest.cpp:1182 #: src/services/a-rex/rest/rest.cpp:1542 src/services/a-rex/rest/rest.cpp:1685 #, c-format msgid "REST:GET job %s - %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:1229 src/services/a-rex/rest/rest.cpp:1237 #, c-format msgid "REST:KILL job %s - %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:1254 src/services/a-rex/rest/rest.cpp:1262 #, fuzzy, c-format msgid "REST:CLEAN job %s - %s" msgstr "" "CreateActivity: ответ = \n" "%s" #: src/services/a-rex/rest/rest.cpp:1279 src/services/a-rex/rest/rest.cpp:1287 #: src/services/a-rex/rest/rest.cpp:1304 #, c-format msgid "REST:RESTART job %s - %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:1588 #, c-format msgid "REST:PUT job %s: file %s: there is no payload" msgstr "" #: src/services/a-rex/rest/rest.cpp:1608 #, fuzzy, c-format msgid "HTTP:PUT %s: put file %s: %s" msgstr "Fehler bei Lesen von Dateiliste" #: src/services/a-rex/test_cache_check.cpp:24 #: src/tests/count/test_client.cpp:20 #: src/tests/echo/echo_test4axis2c/test_client.cpp:20 #: src/tests/echo/test_client.cpp:21 msgid "Creating client side chain" msgstr "" #: src/services/a-rex/update_credentials.cpp:29 #, c-format msgid "" "UpdateCredentials: request = \n" "%s" msgstr "" "UpdateCredentials: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" "%s" #: src/services/a-rex/update_credentials.cpp:35 msgid "UpdateCredentials: missing Reference" msgstr "" #: src/services/a-rex/update_credentials.cpp:43 msgid "UpdateCredentials: wrong number of Reference" msgstr "" #: src/services/a-rex/update_credentials.cpp:51 msgid "UpdateCredentials: wrong number of elements inside Reference" msgstr "" #: src/services/a-rex/update_credentials.cpp:60 msgid "UpdateCredentials: EPR contains no JobID" msgstr "" #: src/services/a-rex/update_credentials.cpp:70 #, c-format msgid "UpdateCredentials: no job found: %s" msgstr "" #: src/services/a-rex/update_credentials.cpp:77 msgid "UpdateCredentials: failed to update credentials" msgstr "" #: src/services/a-rex/update_credentials.cpp:85 #, c-format msgid "" "UpdateCredentials: response = \n" "%s" msgstr "" "UpdateCredentials: отзыв = \n" "%s" #: src/services/candypond/CandyPond.cpp:52 #, fuzzy msgid "No A-REX config file found in candypond configuration" msgstr "Pfad zu VOMS Server Konfigurationsdatei" #: src/services/candypond/CandyPond.cpp:56 #, c-format msgid "Using A-REX config file %s" msgstr "" #: src/services/candypond/CandyPond.cpp:60 #, fuzzy, c-format msgid "Failed to process A-REX configuration in %s" msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #: src/services/candypond/CandyPond.cpp:65 #, fuzzy msgid "No caches defined in configuration" msgstr "Pfad zu VOMS Server Konfigurationsdatei" #: src/services/candypond/CandyPond.cpp:150 msgid "Empty filename returned from FileCache" msgstr "" #: src/services/candypond/CandyPond.cpp:162 #, fuzzy, c-format msgid "Problem accessing cache file %s: %s" msgstr "Fehler bei Zugriff auf Cache-Datei %s: %s" #: src/services/candypond/CandyPond.cpp:210 #: src/services/candypond/CandyPond.cpp:474 #, fuzzy msgid "No job ID supplied" msgstr "Keine Job ID in Antwort" #: src/services/candypond/CandyPond.cpp:219 #, c-format msgid "Bad number in priority element: %s" msgstr "" #: src/services/candypond/CandyPond.cpp:228 msgid "No username supplied" msgstr "" #: src/services/candypond/CandyPond.cpp:235 #, c-format msgid "Supplied username %s does not match mapped username %s" msgstr "" #: src/services/candypond/CandyPond.cpp:249 #, fuzzy msgid "No session directory found" msgstr "Kein Cache-Verzeichnis angegeben" #: src/services/candypond/CandyPond.cpp:253 #, fuzzy, c-format msgid "Using session dir %s" msgstr "Fehler beim start von session: %s" #: src/services/candypond/CandyPond.cpp:257 #, fuzzy, c-format msgid "Failed to stat session dir %s" msgstr "Konnte ownen des session dir nicht ändern zu %i: %s" #: src/services/candypond/CandyPond.cpp:262 #, c-format msgid "Session dir %s is owned by %i, but current mapped user is %i" msgstr "" #: src/services/candypond/CandyPond.cpp:289 #, fuzzy, c-format msgid "Failed to access proxy of given job id %s at %s" msgstr "Fehler bei Lesen von Proxy-Datei: %s" #: src/services/candypond/CandyPond.cpp:307 #, fuzzy, c-format msgid "DN is %s" msgstr "Größe ist %s" #: src/services/candypond/CandyPond.cpp:385 #, c-format msgid "Permission checking passed for url %s" msgstr "" #: src/services/candypond/CandyPond.cpp:410 #: src/services/candypond/CandyPondGenerator.cpp:135 #, fuzzy, c-format msgid "Failed to move %s to %s: %s" msgstr "Fehler bei Entfernen von hard link %s: %s" #: src/services/candypond/CandyPond.cpp:441 #, c-format msgid "Starting new DTR for %s" msgstr "" #: src/services/candypond/CandyPond.cpp:443 #, fuzzy, c-format msgid "Failed to start new DTR for %s" msgstr "Fehler bei Schreiben zu Datein %s: %s" #: src/services/candypond/CandyPond.cpp:487 #, fuzzy, c-format msgid "Job %s: all files downloaded successfully" msgstr "Datei %s erfolgreich entfernt" #: src/services/candypond/CandyPond.cpp:494 #, c-format msgid "Job %s: Some downloads failed" msgstr "" #: src/services/candypond/CandyPond.cpp:499 #, c-format msgid "Job %s: files still downloading" msgstr "" #: src/services/candypond/CandyPond.cpp:511 #, fuzzy msgid "CandyPond: Unauthorized" msgstr "echo: Unauthorisiert" #: src/services/candypond/CandyPond.cpp:520 msgid "No local user mapping found" msgstr "" #: src/services/candypond/CandyPond.cpp:527 #: src/services/data-staging/DataDeliveryService.cpp:625 #, fuzzy, c-format msgid "Identity is %s" msgstr "Identität: %s" #: src/services/candypond/CandyPond.cpp:585 #: src/services/data-staging/DataDeliveryService.cpp:697 msgid "Security Handlers processing failed" msgstr "" #: src/services/candypond/CandyPond.cpp:592 msgid "Only POST is supported in CandyPond" msgstr "" #: src/services/candypond/CandyPondGenerator.cpp:88 #, c-format msgid "DTR %s finished with state %s" msgstr "" #: src/services/candypond/CandyPondGenerator.cpp:124 #, fuzzy, c-format msgid "Could not determine session directory from filename %s" msgstr "Konnte Version des Server nicht bestimmen" #: src/services/candypond/CandyPondGenerator.cpp:164 #, fuzzy, c-format msgid "Invalid DTR for source %s, destination %s" msgstr "Quelle Ziel" #: src/services/candypond/CandyPondGenerator.cpp:206 #, c-format msgid "DTRs still running for job %s" msgstr "" #: src/services/candypond/CandyPondGenerator.cpp:215 #, c-format msgid "All DTRs finished for job %s" msgstr "" #: src/services/candypond/CandyPondGenerator.cpp:222 #, fuzzy, c-format msgid "Job %s not found" msgstr "Erhielt dbnotfound" #: src/services/data-staging/DataDeliveryService.cpp:58 #, c-format msgid "Archiving DTR %s, state ERROR" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:62 #, c-format msgid "Archiving DTR %s, state %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:164 #, fuzzy msgid "No delegation token in request" msgstr "Erstellen und senden von Anfrage" #: src/services/data-staging/DataDeliveryService.cpp:172 msgid "Failed to accept delegation" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:201 #: src/services/data-staging/DataDeliveryService.cpp:208 #, fuzzy msgid "ErrorDescription" msgstr "Fehler bei Importieren" #: src/services/data-staging/DataDeliveryService.cpp:213 #, c-format msgid "All %u process slots used" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:228 #, c-format msgid "Received retry for DTR %s still in transfer" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:235 #, c-format msgid "Replacing DTR %s in state %s with new request" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:245 #, c-format msgid "Storing temp proxy at %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:253 #, fuzzy, c-format msgid "Failed to create temp proxy at %s: %s" msgstr "Fehler bei Erstellen von Info-Datei %s: %s" #: src/services/data-staging/DataDeliveryService.cpp:260 #, fuzzy, c-format msgid "Failed to change owner of temp proxy at %s to %i:%i: %s" msgstr "Konnte ownen des session dir nicht ändern zu %i: %s" #: src/services/data-staging/DataDeliveryService.cpp:285 #, fuzzy msgid "Invalid DTR" msgstr "Ungültige URL: %s" #: src/services/data-staging/DataDeliveryService.cpp:289 #, fuzzy, c-format msgid "Failed to remove temporary proxy %s: %s" msgstr "Fehler bei Entfernen von hard link %s: %s" #: src/services/data-staging/DataDeliveryService.cpp:390 #, c-format msgid "No such DTR %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:401 #, fuzzy, c-format msgid "DTR %s failed: %s" msgstr "DCAU fehlgeschlagen: %s" #: src/services/data-staging/DataDeliveryService.cpp:412 #, fuzzy, c-format msgid "DTR %s finished successfully" msgstr "Verbindung erfolgreich geschlossen" #: src/services/data-staging/DataDeliveryService.cpp:422 #, c-format msgid "DTR %s still in progress (%lluB transferred)" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:482 #, c-format msgid "No active DTR %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:492 #, c-format msgid "DTR %s was already cancelled" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:501 #, fuzzy, c-format msgid "DTR %s could not be cancelled" msgstr "PDP: %s kann nicht geladen werden" #: src/services/data-staging/DataDeliveryService.cpp:545 #, fuzzy, c-format msgid "Failed to get load average: %s" msgstr "Warnung: Fehler bei Bezug von Attributen von %s: %s" #: src/services/data-staging/DataDeliveryService.cpp:569 msgid "Invalid configuration - no allowed IP address specified" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:573 #, fuzzy msgid "Invalid configuration - no transfer dirs specified" msgstr "Delegation Authorisierung fehlgeschlagen" #: src/services/data-staging/DataDeliveryService.cpp:584 #, fuzzy msgid "Failed to start archival thread" msgstr "Fehler bei Ablage von FTP Datei" #: src/services/data-staging/DataDeliveryService.cpp:609 msgid "Shutting down data delivery service" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:618 #, fuzzy msgid "Unauthorized" msgstr "echo: Unauthorisiert" #: src/services/data-staging/DataDeliveryService.cpp:704 msgid "Only POST is supported in DataDeliveryService" msgstr "" #: src/services/examples/echo_python/EchoService.py:12 msgid "EchoService (python) constructor called" msgstr "" #: src/services/examples/echo_python/EchoService.py:17 #, python-format msgid "EchoService (python) has prefix %(prefix)s and suffix %(suffix)s" msgstr "" #: src/services/examples/echo_python/EchoService.py:24 msgid "EchoService (python) destructor called" msgstr "" #: src/services/examples/echo_python/EchoService.py:54 msgid "EchoService (python) thread test starting" msgstr "" #: src/services/examples/echo_python/EchoService.py:65 #, python-format msgid "EchoService (python) thread test, iteration %(iteration)s %(status)s" msgstr "" #: src/services/examples/echo_python/EchoService.py:82 msgid "EchoService (python) 'Process' called" msgstr "" #: src/services/examples/echo_python/EchoService.py:86 #, python-format msgid "inmsg.Auth().Export(arc.SecAttr.ARCAuth) = %s" msgstr "" #: src/services/examples/echo_python/EchoService.py:87 #, python-format msgid "inmsg.Attributes().getAll() = %s " msgstr "" #: src/services/examples/echo_python/EchoService.py:88 #, python-format msgid "EchoService (python) got: %s " msgstr "" #: src/services/examples/echo_python/EchoService.py:93 #, python-format msgid "EchoService (python) request_namespace: %s" msgstr "" #: src/services/examples/echo_python/EchoService.py:99 #: src/services/examples/echo_python/EchoService.py:171 #, python-format msgid "outpayload %s" msgstr "" #: src/services/examples/echo_python/EchoService.py:128 msgid "Calling https://localhost:60000/Echo using ClientSOAP" msgstr "" #: src/services/examples/echo_python/EchoService.py:131 msgid "Calling http://localhost:60000/Echo using ClientSOAP" msgstr "" #: src/services/examples/echo_python/EchoService.py:137 #: src/services/examples/echo_python/EchoService.py:155 #, python-format msgid "new_payload %s" msgstr "" #: src/services/examples/echo_python/EchoService.py:149 msgid "Calling http://localhost:60000/Echo using httplib" msgstr "" #: src/services/examples/echo_python/EchoService.py:165 msgid "Start waiting 10 sec..." msgstr "" #: src/services/examples/echo_python/EchoService.py:167 #, fuzzy msgid "Waiting ends." msgstr "Warte vor Antwort" #: src/services/gridftpd/auth/auth.cpp:328 #, fuzzy, c-format msgid "Unknown authorization command %s" msgstr "unbekannter return code %s" #: src/services/gridftpd/auth/auth.cpp:347 #, c-format msgid "" "The [vo] section labeled '%s' has no file associated and can't be used for " "matching" msgstr "" #: src/services/gridftpd/auth/auth_plugin.cpp:73 #: src/services/gridftpd/auth/unixmap.cpp:217 #, c-format msgid "Plugin %s failed to run" msgstr "" #: src/services/gridftpd/auth/auth_plugin.cpp:75 #: src/services/gridftpd/auth/unixmap.cpp:219 #, c-format msgid "Plugin %s printed: %u" msgstr "" #: src/services/gridftpd/auth/auth_plugin.cpp:76 #: src/services/gridftpd/auth/unixmap.cpp:220 #, fuzzy, c-format msgid "Plugin %s error: %u" msgstr "Globus Fehler: %s" #: src/services/gridftpd/auth/auth_voms.cpp:28 #, c-format msgid "VOMS proxy processing returns: %i - %s" msgstr "" #: src/services/gridftpd/auth/auth_voms.cpp:120 #, c-format msgid "VOMS trust chains: %s" msgstr "" #: src/services/gridftpd/auth/unixmap.cpp:126 msgid "User name mapping has empty command" msgstr "" #: src/services/gridftpd/auth/unixmap.cpp:154 #, c-format msgid "User name mapping has empty name: %s" msgstr "" #: src/services/gridftpd/commands.cpp:46 #, fuzzy, c-format msgid "response: %s" msgstr "Antwort: %s" #: src/services/gridftpd/commands.cpp:50 #, fuzzy, c-format msgid "Send response failed: %s" msgstr "Anlegen von Socket schlug fehl: %s" #: src/services/gridftpd/commands.cpp:80 msgid "Response sending error" msgstr "" #: src/services/gridftpd/commands.cpp:93 #, fuzzy msgid "Closed connection" msgstr "Schließe Verbindung" #: src/services/gridftpd/commands.cpp:131 #, fuzzy, c-format msgid "Socket conversion failed: %s" msgstr "Anlegen von Socket schlug fehl: %s" #: src/services/gridftpd/commands.cpp:141 #, fuzzy, c-format msgid "Failed to obtain own address: %s" msgstr "Warnung: Fehler bei Bezug von Attributen von %s: %s" #: src/services/gridftpd/commands.cpp:149 #, c-format msgid "Failed to recognize own address type (IPv4 or IPv6) - %u" msgstr "" #: src/services/gridftpd/commands.cpp:159 #, fuzzy, c-format msgid "Accepted connection on [%s]:%u" msgstr "Fehler bei Verbinden zu server %s:%d" #: src/services/gridftpd/commands.cpp:161 #, c-format msgid "Accepted connection on %u.%u.%u.%u:%u" msgstr "" #: src/services/gridftpd/commands.cpp:196 #, fuzzy msgid "Accept failed" msgstr "PASV fehlgeschlagen" #: src/services/gridftpd/commands.cpp:204 #: src/services/gridftpd/listener.cpp:415 #, fuzzy, c-format msgid "Accept failed: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/services/gridftpd/commands.cpp:219 #, c-format msgid "Accepted connection from [%s]:%u" msgstr "" #: src/services/gridftpd/commands.cpp:221 #, c-format msgid "Accepted connection from %u.%u.%u.%u:%u" msgstr "" #: src/services/gridftpd/commands.cpp:230 #, fuzzy msgid "Authenticate in commands failed" msgstr "Authentifiziere: %s" #: src/services/gridftpd/commands.cpp:239 #, fuzzy msgid "Authentication failure" msgstr "Authentifiziere: %s" #: src/services/gridftpd/commands.cpp:247 #, fuzzy, c-format msgid "User subject: %s" msgstr "Subjekt: %s" #: src/services/gridftpd/commands.cpp:248 #, fuzzy, c-format msgid "Encrypted: %s" msgstr "Verschlüsselter Name ID: %s" #: src/services/gridftpd/commands.cpp:254 #, fuzzy msgid "User has no proper configuration associated" msgstr "Delegation Authorisierung fehlgeschlagen" #: src/services/gridftpd/commands.cpp:262 msgid "" "User has empty virtual directory tree.\n" "Either user has no authorised plugins or there are no plugins configured at " "all." msgstr "" #: src/services/gridftpd/commands.cpp:279 msgid "Read commands in authenticate failed" msgstr "" #: src/services/gridftpd/commands.cpp:411 #, fuzzy msgid "Control connection (probably) closed" msgstr "GET: Verbindung wird geschlossen" #: src/services/gridftpd/commands.cpp:445 #: src/services/gridftpd/commands.cpp:724 #, fuzzy msgid "Command EPRT" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:446 #, fuzzy, c-format msgid "Failed to parse remote address %s" msgstr "Fehler bei Entfernen von hard link %s: %s" #: src/services/gridftpd/commands.cpp:468 #, fuzzy, c-format msgid "Command USER %s" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:475 #, fuzzy msgid "Command CDUP" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:481 #, fuzzy, c-format msgid "Command CWD %s" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:497 #, fuzzy, c-format msgid "Command MKD %s" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:517 #, fuzzy, c-format msgid "Command SIZE %s" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:532 #, fuzzy, c-format msgid "Command SBUF: %i" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:553 #, fuzzy, c-format msgid "Command MLST %s" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:576 #, fuzzy, c-format msgid "Command DELE %s" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:591 #, fuzzy, c-format msgid "Command RMD %s" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:605 #, fuzzy, c-format msgid "Command TYPE %c" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:616 #, fuzzy, c-format msgid "Command MODE %c" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:628 #, fuzzy msgid "Command ABOR" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:641 #, fuzzy, c-format msgid "Command REST %s" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:654 #, fuzzy, c-format msgid "Command EPSV %s" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:656 #, fuzzy msgid "Command SPAS" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:658 #, fuzzy msgid "Command PASV" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:679 #, fuzzy msgid "local_pasv failed" msgstr "%s fehlgeschlagen" #: src/services/gridftpd/commands.cpp:703 #, fuzzy msgid "local_spas failed" msgstr "%s fehlgeschlagen" #: src/services/gridftpd/commands.cpp:726 #, fuzzy msgid "Command PORT" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:729 #, fuzzy msgid "active_data is disabled" msgstr "SOAP Aufruf fehlgeschlagen" #: src/services/gridftpd/commands.cpp:738 #, fuzzy msgid "local_port failed" msgstr "SendData: Lokaler port schlug fehl: %s" #: src/services/gridftpd/commands.cpp:751 #, fuzzy, c-format msgid "Command MLSD %s" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:753 #, fuzzy, c-format msgid "Command NLST %s" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:755 #, fuzzy, c-format msgid "Command LIST %s" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:806 #, fuzzy, c-format msgid "Command ERET %s" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:836 #, fuzzy, c-format msgid "Command RETR %s" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:865 #, fuzzy, c-format msgid "Command STOR %s" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:893 #, fuzzy, c-format msgid "Command ALLO %i" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:916 #, fuzzy msgid "Command OPTS" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:919 msgid "Command OPTS RETR" msgstr "" #: src/services/gridftpd/commands.cpp:929 #, fuzzy, c-format msgid "Option: %s" msgstr "Funktion : %s" #: src/services/gridftpd/commands.cpp:973 #, fuzzy msgid "Command NOOP" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:977 #, fuzzy msgid "Command QUIT" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:987 #, fuzzy msgid "Failed to close, deleting client" msgstr "Konnte delegation context nicht erhalten" #: src/services/gridftpd/commands.cpp:1001 #, fuzzy, c-format msgid "Command DCAU: %i '%s'" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:1029 #, fuzzy, c-format msgid "Command PBZS: %s" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:1037 #, fuzzy, c-format msgid "Setting pbsz to %lu" msgstr "Setze Datei %s zu Größe %llu" #: src/services/gridftpd/commands.cpp:1053 #, fuzzy, c-format msgid "Command PROT: %s" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:1078 #, fuzzy, c-format msgid "Command MDTM %s" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:1100 #, fuzzy, c-format msgid "Raw command: %s" msgstr "Kommando: %s" #: src/services/gridftpd/commands.cpp:1148 #, fuzzy msgid "Failed to allocate memory for buffer" msgstr "Fehler beim Reservieren von Speicher" #: src/services/gridftpd/commands.cpp:1155 #, c-format msgid "Allocated %u buffers %llu bytes each." msgstr "" #: src/services/gridftpd/commands.cpp:1162 #, fuzzy msgid "abort_callback: start" msgstr "ftp_write_callback: Fehler" #: src/services/gridftpd/commands.cpp:1165 #, fuzzy, c-format msgid "abort_callback: Globus error: %s" msgstr "ftp_complete_callback: Fehler: %s" #: src/services/gridftpd/commands.cpp:1179 msgid "make_abort: start" msgstr "" #: src/services/gridftpd/commands.cpp:1191 msgid "Failed to abort data connection - ignoring and recovering" msgstr "" #: src/services/gridftpd/commands.cpp:1199 msgid "make_abort: wait for abort flag to be reset" msgstr "" #: src/services/gridftpd/commands.cpp:1209 msgid "make_abort: leaving" msgstr "" #: src/services/gridftpd/commands.cpp:1224 msgid "check_abort: have Globus error" msgstr "" #: src/services/gridftpd/commands.cpp:1225 msgid "Abort request caused by transfer error" msgstr "" #: src/services/gridftpd/commands.cpp:1228 msgid "check_abort: sending 426" msgstr "" #: src/services/gridftpd/commands.cpp:1249 msgid "Abort request caused by error in transfer function" msgstr "" #: src/services/gridftpd/commands.cpp:1331 msgid "Failed to start timer thread - timeout won't work" msgstr "" #: src/services/gridftpd/commands.cpp:1383 msgid "Killing connection due to timeout" msgstr "" #: src/services/gridftpd/conf/conf_vo.cpp:22 #: src/services/gridftpd/conf/conf_vo.cpp:48 msgid "Configuration section [userlist] is missing name." msgstr "" #: src/services/gridftpd/conf/daemon.cpp:58 #: src/services/gridftpd/conf/daemon.cpp:138 #, c-format msgid "No such user: %s" msgstr "" #: src/services/gridftpd/conf/daemon.cpp:70 #: src/services/gridftpd/conf/daemon.cpp:150 #, c-format msgid "No such group: %s" msgstr "" #: src/services/gridftpd/conf/daemon.cpp:83 #: src/services/gridftpd/conf/daemon.cpp:163 #, c-format msgid "Improper debug level '%s'" msgstr "" #: src/services/gridftpd/conf/daemon.cpp:120 msgid "Missing option for command logreopen" msgstr "" #: src/services/gridftpd/conf/daemon.cpp:125 msgid "Wrong option in logreopen" msgstr "" #: src/services/gridftpd/conf/daemon.cpp:209 #, fuzzy, c-format msgid "Failed to open log file %s" msgstr "Fehler bei Unlock von Datei %s: %s" #: src/services/gridftpd/datalist.cpp:101 #, fuzzy msgid "Closing channel (list)" msgstr "Schließe Verbindung" #: src/services/gridftpd/datalist.cpp:157 msgid "Data channel connected (list)" msgstr "" #: src/services/gridftpd/dataread.cpp:24 msgid "data_connect_retrieve_callback" msgstr "" #: src/services/gridftpd/dataread.cpp:30 msgid "Data channel connected (retrieve)" msgstr "" #: src/services/gridftpd/dataread.cpp:37 msgid "data_connect_retrieve_callback: allocate_data_buffer" msgstr "" #: src/services/gridftpd/dataread.cpp:40 msgid "data_connect_retrieve_callback: allocate_data_buffer failed" msgstr "" #: src/services/gridftpd/dataread.cpp:48 #, c-format msgid "data_connect_retrieve_callback: check for buffer %u" msgstr "" #: src/services/gridftpd/dataread.cpp:61 src/services/gridftpd/dataread.cpp:158 #, c-format msgid "Closing channel (retrieve) due to local read error: %s" msgstr "" #: src/services/gridftpd/dataread.cpp:75 src/services/gridftpd/dataread.cpp:172 #, fuzzy msgid "Buffer registration failed" msgstr "Delegation nicht erfolgreich: " #: src/services/gridftpd/dataread.cpp:88 #, fuzzy msgid "data_retrieve_callback" msgstr "ftp_check_callback" #: src/services/gridftpd/dataread.cpp:96 #, c-format msgid "Data channel (retrieve) %i %i %i" msgstr "" #: src/services/gridftpd/dataread.cpp:104 #, fuzzy msgid "Closing channel (retrieve)" msgstr "Schließe Verbindung" #: src/services/gridftpd/dataread.cpp:110 #: src/services/gridftpd/datawrite.cpp:128 #, c-format msgid "Time spent waiting for network: %.3f ms" msgstr "" #: src/services/gridftpd/dataread.cpp:111 #: src/services/gridftpd/datawrite.cpp:129 #, c-format msgid "Time spent waiting for disc: %.3f ms" msgstr "" #: src/services/gridftpd/dataread.cpp:122 #, fuzzy msgid "data_retrieve_callback: lost buffer" msgstr "ftp_write_callback: Fehler" #: src/services/gridftpd/datawrite.cpp:24 #, fuzzy msgid "data_connect_store_callback" msgstr "ftp_check_callback" #: src/services/gridftpd/datawrite.cpp:30 msgid "Data channel connected (store)" msgstr "" #: src/services/gridftpd/datawrite.cpp:57 #, fuzzy msgid "Failed to register any buffer" msgstr "Konnte job nicht registrieren" #: src/services/gridftpd/datawrite.cpp:76 #, c-format msgid "Data channel (store) %i %i %i" msgstr "" #: src/services/gridftpd/datawrite.cpp:89 #, fuzzy msgid "data_store_callback: lost buffer" msgstr "ftp_read_callback: Fehler" #: src/services/gridftpd/datawrite.cpp:105 #, c-format msgid "Closing channel (store) due to error: %s" msgstr "" #: src/services/gridftpd/datawrite.cpp:115 #, fuzzy msgid "Closing channel (store)" msgstr "Schließe Verbindung" #: src/services/gridftpd/fileplugin/fileplugin.cpp:55 #, fuzzy msgid "Can't parse access rights in configuration line" msgstr "Konnte classname für Policy nicht von Konfiguration parsen" #: src/services/gridftpd/fileplugin/fileplugin.cpp:61 #, fuzzy msgid "Can't parse user:group in configuration line" msgstr "Pfad zu VOMS Server Konfigurationsdatei" #: src/services/gridftpd/fileplugin/fileplugin.cpp:68 #, fuzzy msgid "Can't recognize user in configuration line" msgstr "Pfad zu VOMS Server Konfigurationsdatei" #: src/services/gridftpd/fileplugin/fileplugin.cpp:77 msgid "Can't recognize group in configuration line" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:84 #: src/services/gridftpd/fileplugin/fileplugin.cpp:89 #, fuzzy msgid "Can't parse or:and in configuration line" msgstr "Konnte classname für Policy nicht von Konfiguration parsen" #: src/services/gridftpd/fileplugin/fileplugin.cpp:116 #, fuzzy msgid "Can't parse configuration line" msgstr "Delegation Authorisierung fehlgeschlagen" #: src/services/gridftpd/fileplugin/fileplugin.cpp:120 #, fuzzy, c-format msgid "Bad directory name: %s" msgstr "Verzeichnis: %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:137 #, fuzzy msgid "Can't parse create arguments in configuration line" msgstr "Konnte classname für Request nicht von Konfiguration parsen" #: src/services/gridftpd/fileplugin/fileplugin.cpp:146 #, fuzzy msgid "Can't parse mkdir arguments in configuration line" msgstr "Konnte classname für Request nicht von Konfiguration parsen" #: src/services/gridftpd/fileplugin/fileplugin.cpp:163 #, fuzzy, c-format msgid "Bad subcommand in configuration line: %s" msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:175 #, fuzzy msgid "Bad mount directory specified" msgstr "Kein Cache-Verzeichnis angegeben" #: src/services/gridftpd/fileplugin/fileplugin.cpp:177 #, c-format msgid "Mount point %s" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:215 #: src/services/gridftpd/fileplugin/fileplugin.cpp:274 #, fuzzy, c-format msgid "mkdir failed: %s" msgstr "PASV fehlgeschlagen: %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:226 #, fuzzy, c-format msgid "Warning: mount point %s creation failed." msgstr "Warnung: Schließen von tmp Lock Datei %s fehlgeschlagen" #: src/services/gridftpd/fileplugin/fileplugin.cpp:330 #, fuzzy, c-format msgid "plugin: open: %s" msgstr "lfn: %s - pfn: %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:378 #: src/services/gridftpd/fileplugin/fileplugin.cpp:415 #, fuzzy msgid "Not enough space to store file" msgstr "Kein space token angegeben" #: src/services/gridftpd/fileplugin/fileplugin.cpp:430 #, fuzzy, c-format msgid "open: changing owner for %s, %i, %i" msgstr "" "Kann Owner von %s nicht ändernÐевозможно изменить владельца папки %1.\n" "Ошибка: %2" #: src/services/gridftpd/fileplugin/fileplugin.cpp:437 #, c-format msgid "open: owner: %i %i" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:446 #: src/services/gridftpd/fileplugin/fileplugin.cpp:486 #, fuzzy, c-format msgid "Unknown open mode %s" msgstr "unbekannter return code %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:451 msgid "plugin: close" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:492 msgid "plugin: read" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:498 #, fuzzy msgid "Error while reading file" msgstr "Fehler beim Lesen des response header" #: src/services/gridftpd/fileplugin/fileplugin.cpp:508 msgid "plugin: write" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:519 msgid "Zero bytes written to file" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:727 #, c-format msgid "plugin: checkdir: %s" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:730 #, c-format msgid "plugin: checkdir: access: %s" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:739 #, c-format msgid "plugin: checkdir: access: allowed: %s" msgstr "" #: src/services/gridftpd/fileroot.cpp:14 #, c-format msgid "No plugin is configured or authorised for requested path %s" msgstr "" #: src/services/gridftpd/fileroot.cpp:19 msgid "FilePlugin: more unload than load" msgstr "" #: src/services/gridftpd/fileroot.cpp:34 #, c-format msgid "Can't load plugin %s for access point %s" msgstr "" #: src/services/gridftpd/fileroot.cpp:39 src/services/gridftpd/fileroot.cpp:43 #, c-format msgid "Plugin %s for access point %s is broken." msgstr "" #: src/services/gridftpd/fileroot.cpp:47 #, c-format msgid "Plugin %s for access point %s acquire failed (should never happen)." msgstr "" #: src/services/gridftpd/fileroot.cpp:54 #, c-format msgid "Destructor with dlclose (%s)" msgstr "" #: src/services/gridftpd/fileroot.cpp:77 #, c-format msgid "FileNode: operator= (%s <- %s) %lu <- %lu" msgstr "" #: src/services/gridftpd/fileroot.cpp:80 msgid "Copying with dlclose" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:31 #: src/services/gridftpd/fileroot_config.cpp:405 #, fuzzy msgid "configuration file not found" msgstr "Vermuting - Datei nicht gefunden" #: src/services/gridftpd/fileroot_config.cpp:54 #, fuzzy msgid "Wrong port number in configuration" msgstr "ausführliche Ausgabe" #: src/services/gridftpd/fileroot_config.cpp:63 #, fuzzy msgid "Wrong maxconnections number in configuration" msgstr "Es ist keine connetion chain konfiguriert" #: src/services/gridftpd/fileroot_config.cpp:72 msgid "Wrong defaultbuffer number in configuration" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:81 msgid "Wrong maxbuffer number in configuration" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:113 #: src/services/gridftpd/fileroot_config.cpp:121 #, fuzzy, c-format msgid "Can't resolve host %s" msgstr "Kann stat-Informationen zu Datei nicht einholen: %s" #: src/services/gridftpd/fileroot_config.cpp:173 #, fuzzy msgid "Could not determine hostname from gethostname()" msgstr "Kann hostname von uname nciht ermitteln" #: src/services/gridftpd/fileroot_config.cpp:190 msgid "unnamed group" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:199 msgid "undefined plugin name" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:203 msgid "undefined virtual plugin path" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:208 #, c-format msgid "bad directory for plugin: %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:220 #, fuzzy, c-format msgid "Already have directory: %s" msgstr "Lege Verzeichnis %s an" #: src/services/gridftpd/fileroot_config.cpp:223 #, fuzzy, c-format msgid "Registering directory: %s with plugin: %s" msgstr "Fehler bei Anlegen von Verzeichnis %s: %s" #: src/services/gridftpd/fileroot_config.cpp:236 #, fuzzy, c-format msgid "file node creation failed: %s" msgstr "Anlegen von Socket schlug fehl: %s" #: src/services/gridftpd/fileroot_config.cpp:286 #, c-format msgid "improper attribute for allowencryption command: %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:300 #, c-format msgid "improper attribute for allowactvedata command: %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:314 #, fuzzy, c-format msgid "failed while processing configuration command: %s %s" msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #: src/services/gridftpd/fileroot_config.cpp:339 #, fuzzy, c-format msgid "Failed processing authorization group %s" msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #: src/services/gridftpd/fileroot_config.cpp:352 msgid "Missing authgroup name in allowaccess" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:369 msgid "Missing authgroup name in denyaccess" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:419 #, fuzzy msgid "failed to process client identification" msgstr "Pfad zu VOMS Server Konfigurationsdatei" #: src/services/gridftpd/fileroot_config.cpp:426 #, fuzzy msgid "failed to identify plugins path" msgstr "Initiierung der Delegation fehlgeschlagen" #: src/services/gridftpd/fileroot_config.cpp:453 #, fuzzy, c-format msgid "Registering dummy directory: %s" msgstr "Lege Verzeichnis %s an" #: src/services/gridftpd/listener.cpp:57 src/services/gridftpd/listener.cpp:466 #, fuzzy msgid "Activation failed" msgstr "SOAP Aufruf fehlgeschlagen" #: src/services/gridftpd/listener.cpp:66 src/services/gridftpd/listener.cpp:172 msgid "Child exited" msgstr "" #: src/services/gridftpd/listener.cpp:78 #, fuzzy msgid "Globus connection error" msgstr "Schließe Verbindung" #: src/services/gridftpd/listener.cpp:80 src/services/gridftpd/listener.cpp:424 #, fuzzy msgid "New connection" msgstr "Wiederholte Nutzung von Verbindung" #: src/services/gridftpd/listener.cpp:87 msgid "Server stopped" msgstr "" #: src/services/gridftpd/listener.cpp:157 msgid "Error: failed to set handler for SIGTERM" msgstr "" #: src/services/gridftpd/listener.cpp:161 msgid "Starting controlled process" msgstr "" #: src/services/gridftpd/listener.cpp:164 #, fuzzy msgid "fork failed" msgstr "%s fehlgeschlagen" #: src/services/gridftpd/listener.cpp:169 msgid "wait failed - killing child" msgstr "" #: src/services/gridftpd/listener.cpp:174 msgid "Killed with signal: " msgstr "" #: src/services/gridftpd/listener.cpp:176 msgid "Restarting after segmentation violation." msgstr "" #: src/services/gridftpd/listener.cpp:177 #, fuzzy msgid "Waiting 1 minute" msgstr "Warte vor Antwort" #: src/services/gridftpd/listener.cpp:239 msgid "Error: failed to set handler for SIGCHLD" msgstr "" #: src/services/gridftpd/listener.cpp:256 msgid "Missing argument" msgstr "" #: src/services/gridftpd/listener.cpp:257 msgid "Unknown option" msgstr "" #: src/services/gridftpd/listener.cpp:264 msgid "Wrong port number" msgstr "" #: src/services/gridftpd/listener.cpp:274 #, fuzzy msgid "Wrong number of connections" msgstr "Schließe Verbindung" #: src/services/gridftpd/listener.cpp:281 msgid "Wrong buffer size" msgstr "" #: src/services/gridftpd/listener.cpp:288 msgid "Wrong maximal buffer size" msgstr "" #: src/services/gridftpd/listener.cpp:300 #, fuzzy msgid "Failed reading configuration" msgstr "Fehler bei Initialisierung der condition" #: src/services/gridftpd/listener.cpp:331 #, fuzzy, c-format msgid "Failed to obtain local address: %s" msgstr "Warnung: Fehler bei Bezug von Attributen von %s: %s" #: src/services/gridftpd/listener.cpp:338 #, fuzzy, c-format msgid "Failed to create socket(%s): %s" msgstr "Fehler bei Anlegen von soft link: %s" #: src/services/gridftpd/listener.cpp:352 #, fuzzy, c-format msgid "Failed to limit socket to IPv6: %s" msgstr "Fehler bei Unlock von Datei %s: %s" #: src/services/gridftpd/listener.cpp:359 #, fuzzy, c-format msgid "Failed to bind socket(%s): %s" msgstr "Fehler bei Unlock von Datei %s: %s" #: src/services/gridftpd/listener.cpp:364 #, fuzzy, c-format msgid "Failed to listen on socket(%s): %s" msgstr "Fehler bei Schreiben zu Datein %s: %s" #: src/services/gridftpd/listener.cpp:371 msgid "Not listening to anything" msgstr "" #: src/services/gridftpd/listener.cpp:374 #, c-format msgid "Some addresses failed. Listening on %u of %u." msgstr "" #: src/services/gridftpd/listener.cpp:382 #: src/services/gridftpd/listener.cpp:477 #, fuzzy msgid "Listen started" msgstr "Start start" #: src/services/gridftpd/listener.cpp:395 msgid "No valid handles left for listening" msgstr "" #: src/services/gridftpd/listener.cpp:401 #, fuzzy, c-format msgid "Select failed: %s" msgstr "Anfrage fehlgeschlagen: %s" #: src/services/gridftpd/listener.cpp:422 #, c-format msgid "Have connections: %i, max: %i" msgstr "" #: src/services/gridftpd/listener.cpp:427 #, fuzzy, c-format msgid "Fork failed: %s" msgstr "PASV fehlgeschlagen: %s" #: src/services/gridftpd/listener.cpp:445 msgid "Refusing connection: Connection limit exceeded" msgstr "" #: src/services/gridftpd/listener.cpp:471 #, fuzzy msgid "Init failed" msgstr "Schreibfehler" #: src/services/gridftpd/listener.cpp:474 #, fuzzy msgid "Listen failed" msgstr "Schreibfehler" #: src/services/gridftpd/listener.cpp:488 msgid "Listen finished" msgstr "" #: src/services/gridftpd/listener.cpp:493 msgid "Stopping server" msgstr "" #: src/services/gridftpd/listener.cpp:497 #, fuzzy msgid "Destroying handle" msgstr "Fehler bei Initialisierung von handle" #: src/services/gridftpd/listener.cpp:500 msgid "Deactivating modules" msgstr "" #: src/services/gridftpd/listener.cpp:508 #, fuzzy msgid "Exiting" msgstr "Zeichenkette" #: src/services/gridftpd/misc/ldapquery.cpp:253 #, fuzzy, c-format msgid "%s: %s:%i" msgstr "%s (%s)" #: src/services/gridftpd/misc/ldapquery.cpp:390 #: src/services/gridftpd/misc/ldapquery.cpp:467 #, fuzzy, c-format msgid "%s %s" msgstr "%s (%s)" #: src/services/gridftpd/misc/ldapquery.cpp:394 #, fuzzy, c-format msgid " %s: %s" msgstr " %s" #: src/services/gridftpd/misc/ldapquery.cpp:396 #, fuzzy, c-format msgid " %s:" msgstr " %s" #: src/services/gridftpd/userspec.cpp:83 src/services/gridftpd/userspec.cpp:133 msgid "No proxy provided" msgstr "" #: src/services/gridftpd/userspec.cpp:85 #, c-format msgid "Proxy/credentials stored at %s" msgstr "" #: src/services/gridftpd/userspec.cpp:91 src/services/gridftpd/userspec.cpp:141 msgid "Running user has no name" msgstr "" #: src/services/gridftpd/userspec.cpp:94 src/services/gridftpd/userspec.cpp:144 #, c-format msgid "Mapped to running user: %s" msgstr "" #: src/services/gridftpd/userspec.cpp:104 #: src/services/gridftpd/userspec.cpp:154 #, fuzzy, c-format msgid "Mapped to local id: %i" msgstr "Fehler bei Unlock von Datei %s: %s" #: src/services/gridftpd/userspec.cpp:109 #: src/services/gridftpd/userspec.cpp:159 #, c-format msgid "No group %i for mapped user" msgstr "" #: src/services/gridftpd/userspec.cpp:113 #: src/services/gridftpd/userspec.cpp:163 #, c-format msgid "Mapped to local group id: %i" msgstr "" #: src/services/gridftpd/userspec.cpp:114 #: src/services/gridftpd/userspec.cpp:164 #, c-format msgid "Mapped to local group name: %s" msgstr "" #: src/services/gridftpd/userspec.cpp:115 #: src/services/gridftpd/userspec.cpp:165 #, c-format msgid "Mapped user's home: %s" msgstr "" #: src/services/gridftpd/userspec.cpp:135 #, fuzzy, c-format msgid "Proxy stored at %s" msgstr "ProxyStore: %s" #: src/services/gridftpd/userspec.cpp:195 #, c-format msgid "Undefined control sequence: %%%s" msgstr "" #: src/services/gridftpd/userspec.cpp:218 #, fuzzy, c-format msgid "Local user %s does not exist" msgstr "Lock-Datei %s existiert nicht" #: src/services/gridftpd/userspec.cpp:227 #, fuzzy, c-format msgid "Local group %s does not exist" msgstr "Lock-Datei %s existiert nicht" #: src/services/gridftpd/userspec.cpp:232 #, fuzzy, c-format msgid "Remapped to local user: %s" msgstr "Fehler bei Unlock von Datei %s: %s" #: src/services/gridftpd/userspec.cpp:233 #, fuzzy, c-format msgid "Remapped to local id: %i" msgstr "Grid Identität wird zugewiesen zu lokaler Identität '%s'" #: src/services/gridftpd/userspec.cpp:234 #, c-format msgid "Remapped to local group id: %i" msgstr "" #: src/services/gridftpd/userspec.cpp:235 #, c-format msgid "Remapped to local group name: %s" msgstr "" #: src/services/gridftpd/userspec.cpp:236 #, c-format msgid "Remapped user's home: %s" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:103 #, c-format msgid "Loading %u-th Python service" msgstr "Lade %u-th Python Service" #: src/services/wrappers/python/pythonwrapper.cpp:107 #, c-format msgid "Initialized %u-th Python service" msgstr "Initialisierte %u-th Python servce" #: src/services/wrappers/python/pythonwrapper.cpp:142 msgid "Invalid class name" msgstr "Ungültiger Klassenname" #: src/services/wrappers/python/pythonwrapper.cpp:147 #, c-format msgid "class name: %s" msgstr "Klassenname: %s" #: src/services/wrappers/python/pythonwrapper.cpp:148 #, c-format msgid "module name: %s" msgstr "Modulname: %s" #: src/services/wrappers/python/pythonwrapper.cpp:205 #, fuzzy msgid "Cannot find ARC Config class" msgstr "Kann UserConfig Klasse nicht finden" #: src/services/wrappers/python/pythonwrapper.cpp:212 #, fuzzy msgid "Config class is not an object" msgstr "UserConfig Klasse ist kein Objekt" #: src/services/wrappers/python/pythonwrapper.cpp:220 msgid "Cannot get dictionary of module" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:229 msgid "Cannot find service class" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:238 msgid "Cannot create config argument" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:245 #, fuzzy msgid "Cannot convert config to Python object" msgstr "Kann UserConfig nicht zu python Objekt konvertieren" #: src/services/wrappers/python/pythonwrapper.cpp:268 #, c-format msgid "%s is not an object" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:274 #, fuzzy msgid "Message class is not an object" msgstr "Klasse %s ist kein Objekt" #: src/services/wrappers/python/pythonwrapper.cpp:280 msgid "Python Wrapper constructor succeeded" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:295 #, c-format msgid "Python Wrapper destructor (%d)" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:328 msgid "Python interpreter locked" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:332 msgid "Python interpreter released" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:403 msgid "Python wrapper process called" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:412 msgid "Failed to create input SOAP container" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:422 msgid "Cannot create inmsg argument" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:436 #, fuzzy msgid "Cannot find ARC Message class" msgstr "Kann arc ExecutionTarget Klasse nicht finden" #: src/services/wrappers/python/pythonwrapper.cpp:442 #, fuzzy msgid "Cannot convert inmsg to Python object" msgstr "Kann doc nicht zu Python Objekt konvertieren" #: src/services/wrappers/python/pythonwrapper.cpp:451 msgid "Failed to create SOAP containers" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:457 msgid "Cannot create outmsg argument" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:463 #, fuzzy msgid "Cannot convert outmsg to Python object" msgstr "Kann doc nicht zu Python Objekt konvertieren" #: src/tests/client/test_ClientInterface.cpp:36 #: src/tests/client/test_ClientSAML2SSO.cpp:68 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:78 #: src/tests/echo/test_clientinterface.cpp:41 #: src/tests/echo/test_clientinterface.cpp:132 #: src/tests/echo/test_clientinterface.py:12 msgid "Creating a soap client" msgstr "Lege SOAP Clietn an" #: src/tests/client/test_ClientInterface.cpp:73 #: src/tests/client/test_ClientSAML2SSO.cpp:47 #: src/tests/client/test_ClientSAML2SSO.cpp:71 #: src/tests/count/test_client.cpp:61 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:85 #: src/tests/echo/echo_test4axis2c/test_client.cpp:56 #: src/tests/echo/test.cpp:62 src/tests/echo/test_client.cpp:72 #: src/tests/echo/test_clientinterface.cpp:67 #: src/tests/echo/test_clientinterface.cpp:107 #: src/tests/echo/test_clientinterface.cpp:136 #: src/tests/echo/test_clientinterface.py:22 msgid "Creating and sending request" msgstr "Erstellen und senden von Anfrage" #: src/tests/client/test_ClientInterface.cpp:84 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:97 #: src/tests/echo/test_clientinterface.cpp:78 #: src/tests/echo/test_clientinterface.py:30 msgid "SOAP invocation failed" msgstr "SOAP Aufruf fehlgeschlagen" #: src/tests/client/test_ClientSAML2SSO.cpp:44 #: src/tests/echo/test_clientinterface.cpp:100 msgid "Creating a http client" msgstr "Lege HTTP Client an" #: src/tests/client/test_ClientSAML2SSO.cpp:55 #: src/tests/echo/test_clientinterface.cpp:117 #, fuzzy msgid "HTTP with SAML2SSO invocation failed" msgstr "SOAP Aufruf fehlgeschlagen" #: src/tests/client/test_ClientSAML2SSO.cpp:59 #: src/tests/echo/test_clientinterface.cpp:121 msgid "There was no HTTP response" msgstr "Keine HTTP Antwort erhalten" #: src/tests/client/test_ClientSAML2SSO.cpp:77 #: src/tests/echo/test_clientinterface.cpp:145 #, fuzzy msgid "SOAP with SAML2SSO invocation failed" msgstr "SOAP Aufruf fehlgeschlagen" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:37 #: src/tests/client/test_ClientX509Delegation_GridSite.cpp:38 #: src/tests/delegation/test_delegation_client.cpp:45 #: src/tests/delegation/test_delegation_client.cpp:76 #: src/tests/echo/test_clientinterface.cpp:172 #: src/tests/echo/test_clientinterface.cpp:194 msgid "Creating a delegation soap client" msgstr "" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:46 #: src/tests/delegation/test_delegation_client.cpp:51 #: src/tests/echo/test_clientinterface.cpp:178 msgid "Delegation to ARC delegation service failed" msgstr "" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:50 #: src/tests/client/test_ClientX509Delegation_GridSite.cpp:49 #: src/tests/delegation/test_delegation_client.cpp:56 #: src/tests/delegation/test_delegation_client.cpp:88 #: src/tests/echo/test_clientinterface.cpp:182 #: src/tests/echo/test_clientinterface.cpp:205 #, c-format msgid "Delegation ID: %s" msgstr "Delegation ID: %s" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:58 #, c-format msgid "Delegated credential from delegation service: %s" msgstr "" #: src/tests/client/test_ClientX509Delegation_GridSite.cpp:45 #: src/tests/delegation/test_delegation_client.cpp:83 #: src/tests/echo/test_clientinterface.cpp:201 msgid "Delegation to gridsite delegation service failed" msgstr "" #: src/tests/count/count.cpp:58 msgid "Input is not SOAP" msgstr "" #: src/tests/count/count.cpp:89 src/tests/echo/echo.cpp:83 msgid "echo: Unauthorized" msgstr "echo: Unauthorisiert" #: src/tests/count/count.cpp:98 src/tests/count/count.cpp:104 #, c-format msgid "Request is not supported - %s" msgstr "" #: src/tests/count/test_client.cpp:50 #: src/tests/echo/echo_test4axis2c/test_client.cpp:43 #: src/tests/echo/test_client.cpp:59 msgid "Failed to load client configuration" msgstr "" #: src/tests/count/test_client.cpp:54 #: src/tests/echo/echo_test4axis2c/test_client.cpp:47 #: src/tests/echo/test.cpp:58 src/tests/echo/test_client.cpp:63 msgid "Client side MCCs are loaded" msgstr "" #: src/tests/count/test_client.cpp:57 #: src/tests/echo/echo_test4axis2c/test_client.cpp:50 #: src/tests/echo/test_client.cpp:66 msgid "Client chain does not have entry point" msgstr "" #: src/tests/count/test_client.cpp:84 #: src/tests/echo/echo_test4axis2c/test_client.cpp:74 #: src/tests/echo/test.cpp:74 src/tests/echo/test_client.cpp:90 msgid "Request failed" msgstr "" #: src/tests/count/test_client.cpp:90 #: src/tests/echo/echo_test4axis2c/test_client.cpp:80 #: src/tests/echo/test.cpp:79 src/tests/echo/test_client.cpp:96 msgid "There is no response" msgstr "" #: src/tests/count/test_client.cpp:97 #: src/tests/echo/echo_test4axis2c/test_client.cpp:87 #: src/tests/echo/test_client.cpp:103 msgid "Response is not SOAP" msgstr "" #: src/tests/count/test_service.cpp:22 src/tests/echo/test.cpp:23 #: src/tests/echo/test_service.cpp:22 msgid "Creating service side chain" msgstr "" #: src/tests/count/test_service.cpp:25 src/tests/echo/test.cpp:26 #: src/tests/echo/test_service.cpp:25 msgid "Failed to load service configuration" msgstr "" #: src/tests/count/test_service.cpp:30 src/tests/echo/test_service.cpp:30 msgid "Service is waiting for requests" msgstr "Service wartet auf Anfragen" #: src/tests/echo/test.cpp:32 #, fuzzy msgid "Creating client interface" msgstr "Erstelle Client Schnitstelle" #: src/tests/echo/test.cpp:82 msgid "Request succeed!!!" msgstr "" #, fuzzy #~ msgid "" #~ "Supported constraints are:\n" #~ " validityStart=time (e.g. 2008-05-29T10:20:30Z; if not specified, start " #~ "from now)\n" #~ " validityEnd=time\n" #~ " validityPeriod=time (e.g. 43200 or 12h or 12H; if both validityPeriod " #~ "and validityEnd\n" #~ " not specified, the default is 12 hours for local proxy, and 168 hours " #~ "for delegated\n" #~ " proxy on myproxy server)\n" #~ " vomsACvalidityPeriod=time (e.g. 43200 or 12h or 12H; if not specified, " #~ "the default\n" #~ " is the minimum value of 12 hours and validityPeriod)\n" #~ " myproxyvalidityPeriod=time (lifetime of proxies delegated by myproxy " #~ "server,\n" #~ " e.g. 43200 or 12h or 12H; if not specified, the default is the minimum " #~ "value of\n" #~ " 12 hours and validityPeriod (which is lifetime of the delegated proxy " #~ "on myproxy server))\n" #~ " proxyPolicy=policy content\n" #~ " proxyPolicyFile=policy file" #~ msgstr "" #~ "Поддерживаемые ограничениÑ:\n" #~ " validityStart=Ð²Ñ€ÐµÐ¼Ñ (например, 2008-05-29T10:20:30Z; еÑли не указано, " #~ "то начинаетÑÑ Ð½ÐµÐ¼ÐµÐ´Ð»ÐµÐ½Ð½Ð¾)\n" #~ " validityEnd=времÑ\n" #~ " validityPeriod=Ð²Ñ€ÐµÐ¼Ñ (например, 43200, или 12h, или 12H; еÑли не " #~ "указаны ни validityPeriod,\n" #~ " ни validityEnd, то Ñрок дейÑÑ‚Ð²Ð¸Ñ Ð¿Ð¾ умолчанию ÑоÑтавлÑет 12 чаÑов)\n" #~ " vomsACvalidityPeriod=Ð²Ñ€ÐµÐ¼Ñ (например, 43200, или 12h, или 12H; еÑли не " #~ "указано, то иÑпользуетÑÑ\n" #~ " значение validityPeriod)\n" #~ " proxyPolicy=Ñодержимое норматива\n" #~ " proxyPolicyFile=файл норматива" #, fuzzy #~ msgid "" #~ "print all information about this proxy. \n" #~ " In order to show the Identity (DN without CN as suffix for " #~ "proxy) \n" #~ " of the certificate, the 'trusted certdir' is needed." #~ msgstr "" #~ "вывеÑти вÑÑŽ информацию о данной доверенноÑти. \n" #~ " Ð”Ð»Ñ Ð²Ñ‹Ð²Ð¾Ð´Ð° перÑональной информации (DN без CN как ÑÑƒÑ„Ñ„Ð¸ÐºÑ " #~ "доверенноÑти) \n" #~ " из Ñертификата, необходим 'trusted certdir'." #, fuzzy #~ msgid "username to MyProxy server" #~ msgstr "Nutzername bei myproxy Server" #, fuzzy #~ msgid "" #~ "command to MyProxy server. The command can be PUT or GET.\n" #~ " PUT/put/Put -- put a delegated credential to the MyProxy " #~ "server; \n" #~ " GET/get/Get -- get a delegated credential from the MyProxy " #~ "server, \n" #~ " credential (certificate and key) is not needed in this " #~ "case. \n" #~ " MyProxy functionality can be used together with VOMS\n" #~ " functionality.\n" #~ msgstr "" #~ "инÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ Ñерверу MyProxy. Возможны две инÑтрукции: PUT и GET:\n" #~ " PUT/put -- Ñохранить делегированный Ñертификат на Ñервере " #~ "MyProxy;\n" #~ " GET/get -- получить делегированный Ñертификат Ñ Ñервера " #~ "MyProxy,\n" #~ " в Ñтом Ñлучае не требуютÑÑ Ð»Ð¸Ñ‡Ð½Ñ‹Ðµ Ñертификаты и " #~ "ключи.\n" #~ " ИнÑтрукции MyProxy и VOMS могут иÑпользоватьÑÑ " #~ "одновременно.\n" #, fuzzy #~ msgid "Failed to add extension: %s" #~ msgstr "Fehler bei Lesen von Objekt %s: %s" #~ msgid "There are %d certificates in the returned msg" #~ msgstr "Es sind %d Zertifikate in der zurückgelieferten Nachricht." #, fuzzy #~ msgid "service message" #~ msgstr "Service Nachricht" #~ msgid "path to config file" #~ msgstr "Pfad zu Konfigurationsdatei" #~ msgid "[-]name" #~ msgstr "[-]Name" #, fuzzy #~ msgid "Query is not a valid XML" #~ msgstr "" #~ "Указанный URL\n" #~ " %1 \n" #~ " Ñодержит ошибки." #~ msgid "Creating an A-REX client" #~ msgstr "Lege A-REX client an." #, fuzzy #~ msgid "Unable to create SOAP client used by AREXClient." #~ msgstr "Konnte SOAP client nicht für AREXClient anlegen." #, fuzzy #~ msgid "Failed locating credentials." #~ msgstr "Fehler bei der Initialisierung der delegation credentials" #, fuzzy #~ msgid "Failed initiate client connection." #~ msgstr "Fehler bei Schließen von Verbindung 1" #, fuzzy #~ msgid "Re-creating an A-REX client" #~ msgstr "Lege A-REX client an." #~ msgid "AREXClient was not created properly." #~ msgstr "AREXClient wurde nicht richtig angelegt." #, fuzzy #~ msgid "%s request to %s failed. No expected response." #~ msgstr "Anfrage %s an %s schlug fehl. Leere Anwort." #~ msgid "Creating and sending submit request to %s" #~ msgstr "Erstelle und sende submit Anfrage an %s" #, fuzzy #~ msgid "Unable to retrieve status of job (%s)" #~ msgstr "Konnte status Beschreibung des jobs (%s) nicht erhalten: " #, fuzzy #~ msgid "Creating and sending ISIS information query request to %s" #~ msgstr "Erstelle und send ISIS information query request an %s" #, fuzzy #~ msgid "Service %s of type %s ignored" #~ msgstr "Service %s des Typ %s wurde ignoriert" #~ msgid "No execution services registered in the index service" #~ msgstr "Keine execution services in index service registriert" #, fuzzy #~ msgid "Creating and sending terminate request to %s" #~ msgstr "Erstelle und sende terminate request an %s" #, fuzzy #~ msgid "Creating and sending clean request to %s" #~ msgstr "Erstelle und sende clean request an %s" #, fuzzy #~ msgid "Creating and sending job description retrieval request to %s" #~ msgstr "Erstelle und sende job description retrieval request an %s" #, fuzzy #~ msgid "Creating and sending job migrate request to %s" #~ msgstr "Erstelle und sende job migrate request an %s" #, fuzzy #~ msgid "Renewal of ARC1 jobs is not supported" #~ msgstr "Das Erneuern von ARC1 Jobs wird nicht unterstützt" #~ msgid "Failed retrieving job status information" #~ msgstr "Konnte Job Status Information nicht beziehen." #, fuzzy #~ msgid "Cleaning of BES jobs is not supported" #~ msgstr "Das Löschen von BES Jobs wird nicht unterstützt" #, fuzzy #~ msgid "Renewal of BES jobs is not supported" #~ msgstr "Das Erneuern von BES Jobs wird nicht unterstützt" #, fuzzy #~ msgid "Resuming BES jobs is not supported" #~ msgstr "Ein Resume von BES jobs wird nicht unterstützt" #~ msgid "Failed retrieving job IDs: Unsupported url (%s) given" #~ msgstr "" #~ "Konnte job IDs nicht bestimmen: Nicht unterstützte URL erhalten (%s)" #~ msgid "Failed retrieving job IDs" #~ msgstr "Konnt job IDs nicht erhalten." #~ msgid "" #~ "Error encoutered during job ID retrieval. All job IDs might not have been " #~ "retrieved" #~ msgstr "" #~ "Fehler beim Bestimmen der job ID. Womöglich wurde keine job ID erhalten." #~ msgid "No job identifier returned by BES service" #~ msgstr "Kein Job identifier von BES service zurückerhalten" #, fuzzy #~ msgid "Failed adapting job description to target resources" #~ msgstr "Konnte Job Beschreibung nicht an Resourcen des Ziels anpassen: %s" #~ msgid "No job identifier returned by A-REX" #~ msgstr "A-REX lieferte keinen Job Identifikator zurück" #~ msgid "The Service doesn't advertise its Quality Level." #~ msgstr "Der Service gibt seinen Quality Level nicht an." #, fuzzy #~ msgid "Generating A-REX target: %s" #~ msgstr "Generiere A-REX target: %s" #~ msgid "The Service doesn't advertise its Interface." #~ msgstr "Der Service gibt seine Interface nicht an." #~ msgid "The Service doesn't advertise its Serving State." #~ msgstr "Der Servcice gibt seinen Serving State nicht an." #, fuzzy #~ msgid "Failed to cancel job: %s" #~ msgstr "Fehler beim Authentifizieren: %s" #~ msgid "Creating a CREAM client" #~ msgstr "Anlegen eines CREAM client" #~ msgid "Unable to create SOAP client used by CREAMClient." #~ msgstr "Konnte SOAP client nicht anlegen für CREAMClient." #, fuzzy #~ msgid "CREAMClient not created properly" #~ msgstr "CREAMClient nicht richtig angelegt" #, fuzzy #~ msgid "Empty response" #~ msgstr "Leere Antwort" #, fuzzy #~ msgid "Request failed: %s" #~ msgstr "Anfrage fehlgeschlagen: %s" #~ msgid "Creating and sending a status request" #~ msgstr "Erstellen und senden einer Status-Anfrage" #, fuzzy #~ msgid "Unable to retrieve job status." #~ msgstr "Konnte Job Status-Informationen nicht erhalten." #~ msgid "Creating and sending request to terminate a job" #~ msgstr "Erstellen und senden von Anfrage, einen Job zu beenden" #~ msgid "Creating and sending request to clean a job" #~ msgstr "Erstlelen und senden einer Anfragen einen Job zu löschen" #, fuzzy #~ msgid "Creating and sending request to resume a job" #~ msgstr "Erstellen und senden von Anfrage, einen Job zu beenden" #, fuzzy #~ msgid "Creating and sending request to list jobs" #~ msgstr "Erstlelen und senden einer Anfragen einen Job zu löschen" #~ msgid "Creating and sending job register request" #~ msgstr "Erstellen und senden einer Anfragen, eien Job zu registrieren" #, fuzzy #~ msgid "No job ID in response" #~ msgstr "Keine Job ID in Antwort" #~ msgid "Creating and sending job start request" #~ msgstr "Erstellen und senden einer Anfrage, einen Job zu starten" #, fuzzy #~ msgid "Creating delegation" #~ msgstr "Erstelle Delegation" #, fuzzy #~ msgid "Delegatable credentials expired: %s" #~ msgstr "" #~ "Делегированные параметры доÑтупа:\n" #~ " %s" #, fuzzy #~ msgid "Failed signing certificate request" #~ msgstr "Fehler beim Signieren der Anfrage nach Austellen eines Zertifikats" #, fuzzy #~ msgid "Failed putting signed delegation certificate to service" #~ msgstr "" #~ "Der Transfer des signierten delegation certificate zu Service schlug fehl" #, fuzzy #~ msgid "Failed cleaning job: %s" #~ msgstr "Löschen fehlgeschlagen von job: %s" #, fuzzy #~ msgid "Failed canceling job: %s" #~ msgstr "Abbruch fehlgeschlagen von job: %s" #, fuzzy #~ msgid "Renewal of CREAM jobs is not supported" #~ msgstr "Erneuerung von CREAM jobs wird nicht unterstützt" #, fuzzy #~ msgid "Failed resuming job: %s" #~ msgstr "Löschen fehlgeschlagen von job: %s" #, fuzzy #~ msgid "Failed creating signed delegation certificate" #~ msgstr "Erstellen eines singed delegation certificate ist fehlgeschlagen" #, fuzzy #~ msgid "Unable to submit job. Job description is not valid in the %s format" #~ msgstr "Konnte Job Beschreibung nicht an Resourcen des Ziels anpassen: %s" #, fuzzy #~ msgid "Failed registering job" #~ msgstr "Konnte job nicht registrieren" #~ msgid "Failed starting job" #~ msgstr "Konnte job nicht starten" #, fuzzy #~ msgid "Failed creating singed delegation certificate" #~ msgstr "Erstellen eines singed delegation certificate ist fehlgeschlagen" #, fuzzy #~ msgid "Found %u service endpoints from the index service at %s" #~ msgstr "Fand %u execution services des index service %s" #, fuzzy #~ msgid "Cleaning of UNICORE jobs is not supported" #~ msgstr "Das Löschen von BES Jobs wird nicht unterstützt" #, fuzzy #~ msgid "Canceling of UNICORE jobs is not supported" #~ msgstr "Erneuerung von UNICORE Jobs wird nicht unterstützt" #, fuzzy #~ msgid "Renewal of UNICORE jobs is not supported" #~ msgstr "Erneuerung von UNICORE Jobs wird nicht unterstützt" #, fuzzy #~ msgid "Resumation of UNICORE jobs is not supported" #~ msgstr "Resume von UNICORE jobs wird nicht unterstützt" #~ msgid "Creating a UNICORE client" #~ msgstr "Erstellen von UNICORE client" #~ msgid "Failed to find delegation credentials in client configuration" #~ msgstr "Konnte delegation credentials in Client Konfiguration nicht finden" #~ msgid "Failed to initiate delegation" #~ msgstr "Initiierung der Delegation fehlgeschlagen" #~ msgid "Submission request failed" #~ msgstr "Submission von Anfrage schlug fehl" #~ msgid "Submission request succeed" #~ msgstr "Submission von Anfrage ist erfolgt" #~ msgid "There was no response to a submission request" #~ msgstr "Keine Antwort zu submission request erhalten" #~ msgid "A response to a submission request was not a SOAP message" #~ msgstr "Eine Antwort zu submission request war keine SOAP message" #, fuzzy #~ msgid "There is no connection chain configured" #~ msgstr "Es ist keine connetion chain konfiguriert" #, fuzzy #~ msgid "Submission returned failure: %s" #~ msgstr "Submission ergab Fehler: %s" #, fuzzy #~ msgid "Submission failed, service returned: %s" #~ msgstr "Submission fehlgeschlagen, Service erwiderte: %s" #, fuzzy #~ msgid "Creating and sending a start job request" #~ msgstr "Erstelle und sende eine start job Anfrage" #, fuzzy #~ msgid "A start job request failed" #~ msgstr "Eine start job Anfrage schlug fehl" #, fuzzy #~ msgid "A start job request succeeded" #~ msgstr "Eine start job Anfrage war erfolgreich" #, fuzzy #~ msgid "There was no response to a start job request" #~ msgstr "Keine Antwort zu start job Anfrage erhalten" #, fuzzy #~ msgid "The response of a start job request was not a SOAP message" #~ msgstr "Die Antwort zu start job Anfrage war keine SOAP Nachricht" #~ msgid "A status request failed" #~ msgstr "Eine Anfrage nach dem Status schlug fehl" #~ msgid "A status request succeed" #~ msgstr "Die Anfrage nach dem Status war erfolgreich" #~ msgid "There was no response to a status request" #~ msgstr "Es gab keine Antwort zu einer Status Anfrage" #~ msgid "The response of a status request was not a SOAP message" #~ msgstr "Die Antwort auf eine Status Anfrage war keine SOAP Nachricht" #~ msgid "The job status could not be retrieved" #~ msgstr "Der Job Status konnte nicht ermittelt werden" #, fuzzy #~ msgid "Creating and sending an index service query" #~ msgstr "Erstellen und senden einer Index Service Anfrage" #~ msgid "Creating and sending a service status request" #~ msgstr "Erstlelen und senden einer Service Status Anfrage" #, fuzzy #~ msgid "A service status request failed" #~ msgstr "Eine Service Status Anfrage schlug fehl" #, fuzzy #~ msgid "A service status request succeeded" #~ msgstr "Eine Service Status Anfrage war erfolgreich" #~ msgid "There was no response to a service status request" #~ msgstr "Es gab keine Antwort zu einer Service Status Anfrage" #~ msgid "The response of a service status request was not a SOAP message" #~ msgstr "Die Antwort zu einer Service Status Anfrage war keine SOAP message" #~ msgid "The service status could not be retrieved" #~ msgstr "Der Service Status konnte nicht ermittelt werden" #, fuzzy #~ msgid "A job termination request failed" #~ msgstr "Die Job Terminierungs-Anfrage schlug fehl" #, fuzzy #~ msgid "A job termination request succeed" #~ msgstr "Eine Job Terminierungs-Anfrage war erfolgreich" #~ msgid "There was no response to a job termination request" #~ msgstr "Es gab keine Antwort zu einer Job Terminierungs-Anfrage" #~ msgid "The response of a job termination request was not a SOAP message" #~ msgstr "" #~ "Die Antwort zu einer Job Terminierungs-Anfrage war keine SOAP Nachricht" #, fuzzy #~ msgid "A job cleaning request failed" #~ msgstr "Die Job Terminierungs-Anfrage schlug fehl" #~ msgid "There was no response to a job cleaning request" #~ msgstr "Keine Antwort auf eine Job Löschen-Anfrage erhalten" #~ msgid "The response of a job cleaning request was not a SOAP message" #~ msgstr "Die Antwort auf eine Job Löschen-Anfrage war keine SOAP Nachricht" #~ msgid "" #~ "checingBartenderURL: Response:\n" #~ "%s" #~ msgstr "" #~ "checkingBartenderURL: Response:\n" #~ "%s" #~ msgid "" #~ "nd:\n" #~ "%s" #~ msgstr "" #~ "nd:\n" #~ "%s" #, fuzzy #~ msgid "Not a collection" #~ msgstr "Nicht verbunden" #~ msgid "Recieved transfer URL: %s" #~ msgstr "Erhielt transfer URL: %s" #, fuzzy #~ msgid "Calculated checksum: %s" #~ msgstr "Errechneted checksum: %s" #~ msgid "Deleted %s" #~ msgstr "Löschte %s" #, fuzzy #~ msgid "Adding resoure-id value: %s" #~ msgstr "Addressen-Auflösung schlug fehl: %s" #, fuzzy #~ msgid "Failed to authenticate to token %s." #~ msgstr "Fehler beim Authentifizieren: %s" #, fuzzy #~ msgid "Failed to delete private key that attaches to certificate: %s" #~ msgstr "Fehler bei Lesen von privater Schlüssel-Datei: %s" #, fuzzy #~ msgid "Failed to delete stale remote cache file %s: %s" #~ msgstr "Fehler bei Lesen von Datei %s: %s" #, fuzzy #~ msgid "Failed to release lock on remote cache file %s" #~ msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #, fuzzy #~ msgid "Replicating file %s to local cache file %s" #~ msgstr "Fehler bei Umbenennen von temporärer Datei %s zu Lock_datei %s: %s" #, fuzzy #~ msgid "Failed to delete bad copy of remote cache file %s at %s: %s" #~ msgstr "Fehler bei Anlegen von hard link von %s zu %s: %s" #, fuzzy #~ msgid "Using remote cache file %s for url %s" #~ msgstr "Fehler bei Entfernen von Cache-Datei %s: %s" #, fuzzy #~ msgid "Failed to remove registration from %s ISIS" #~ msgstr "Fehler beim Entfernen der location vom LFC: %s" #, fuzzy #~ msgid "Failed to remove registration from %s EMIRegistry" #~ msgstr "Fehler beim Entfernen der location vom LFC: %s" #, fuzzy #~ msgid "Sent entry: %s" #~ msgstr "Identität: %s" #, fuzzy #~ msgid "Failed to %s to EMIRegistry (%s) - %d" #~ msgstr "Fehler bei Verbinden zu server %s:%d" #, fuzzy #~ msgid "Failed processing user mapping command: unixmap %s" #~ msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #, fuzzy #~ msgid "Failed processing user mapping command: unixgroupmap %s" #~ msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #, fuzzy #~ msgid "Failed processing user mapping command: unixlistmap %s" #~ msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #, fuzzy #~ msgid "Storing configuration in temporary file %s" #~ msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #, fuzzy #~ msgid "Failed to process service configuration" #~ msgstr "Pfad zu VOMS Server Konfigurationsdatei" #~ msgid "" #~ "ChangeActivityStatus: request = \n" #~ "%s" #~ msgstr "" #~ "ChangeActivityStatus: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #, fuzzy #~ msgid "ChangeActivityStatus: old A-REX state does not match" #~ msgstr "" #~ "ChangeActivityStatus: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #, fuzzy #~ msgid "ChangeActivityStatus: Failed to update credentials" #~ msgstr "" #~ "ChangeActivityStatus: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #, fuzzy #~ msgid "ChangeActivityStatus: Failed to resume job" #~ msgstr "" #~ "ChangeActivityStatus: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #~ msgid "" #~ "ChangeActivityStatus: response = \n" #~ "%s" #~ msgstr "" #~ "ChangeActivityStatus: ответ = \n" #~ "%s" #~ msgid "" #~ "CreateActivity: request = \n" #~ "%s" #~ msgstr "" #~ "CreateActivity: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #~ msgid "" #~ "CreateActivity: response = \n" #~ "%s" #~ msgstr "" #~ "CreateActivity: ответ = \n" #~ "%s" #~ msgid "" #~ "GetActivityDocuments: request = \n" #~ "%s" #~ msgstr "" #~ "GetActivityDocuments: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #, fuzzy #~ msgid "GetActivityDocuments: non-AREX job requested" #~ msgstr "" #~ "GetActivityDocuments: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #~ msgid "" #~ "GetActivityDocuments: response = \n" #~ "%s" #~ msgstr "" #~ "GetActivityDocuments: ответ = \n" #~ "%s" #~ msgid "" #~ "GetActivityStatuses: request = \n" #~ "%s" #~ msgstr "" #~ "GetActivityStatuses: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #, fuzzy #~ msgid "GetActivityStatuses: unknown verbosity level requested: %s" #~ msgstr "" #~ "GetActivityStatuses: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #~ msgid "" #~ "GetActivityStatuses: response = \n" #~ "%s" #~ msgstr "" #~ "GetActivityStatuses: ответ = \n" #~ "%s" #~ msgid "" #~ "GetFactoryAttributesDocument: request = \n" #~ "%s" #~ msgstr "" #~ "GetFactoryAttributesDocument: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #~ msgid "" #~ "GetFactoryAttributesDocument: response = \n" #~ "%s" #~ msgstr "" #~ "GetFactoryAttributesDocument: ответ = \n" #~ "%s" #, fuzzy #~ msgid "Can not find queue '%s' in the configuration file" #~ msgstr "Delegation Authorisierung fehlgeschlagen" #, fuzzy #~ msgid "Checking a match for '%s'" #~ msgstr "Suche nache Existenz von %s" #, fuzzy #~ msgid "Can't interpret configuration file %s as XML" #~ msgstr "Pfad zu VOMS Server Konfigurationsdatei" #, fuzzy #~ msgid "Type in LRMS is missing" #~ msgstr "Location fehlt" #, fuzzy #~ msgid "Command for authPlugin is missing" #~ msgstr "Location fehlt" #, fuzzy #~ msgid "The delegationDB element is incorrect value" #~ msgstr "Delegation ID: %s" #, fuzzy #~ msgid "Running command %s" #~ msgstr "Kommando: %s" #, fuzzy #~ msgid "" #~ "Registered static information: \n" #~ " doc: %s" #~ msgstr "Fehler bei Bezug von Information für Job: %s" #~ msgid "ServiceURL missing" #~ msgstr "ServiceURL fehlt" #~ msgid "Protocol is %s, should be https" #~ msgstr "Protokol ist %s, sollte https sein" #, fuzzy #~ msgid "Aggregation record (%s) read from file successful." #~ msgstr "Verzeichnis %s erfolgreich entfernt" #, fuzzy #~ msgid "Aggregation record (%s) stored successful." #~ msgstr "Verzeichnis %s erfolgreich entfernt" #, fuzzy #~ msgid "year: %s" #~ msgstr "header: %s" #, fuzzy #~ msgid "moth: %s" #~ msgstr "Proxy Pfad: %s" #, fuzzy #~ msgid "queue: %s" #~ msgstr "Anfrage: %s" #, fuzzy #~ msgid "query: %s" #~ msgstr "Anfrage: %s" #, fuzzy #~ msgid "list size: %d" #~ msgstr "Zeige Antwort: %s" #, fuzzy #~ msgid "XML: %s" #~ msgstr "XML Antwort: %s" #, fuzzy #~ msgid "synch message: %s" #~ msgstr "Service Nachricht" #, fuzzy #~ msgid "VO filter for host: %s" #~ msgstr " Filter: %s" #~ msgid "Read archive file %s" #~ msgstr "Lese Archiv Datei %s" #~ msgid "Failed to write file %s: %s" #~ msgstr "Fehler bei Schreiben zu Datein %s: %s" #, fuzzy #~ msgid "Incoming time range: %s" #~ msgstr "Verbindung zu %s schlug fehl: %s" #~ msgid "Interactive mode." #~ msgstr "Interaktiver Modus." #, fuzzy #~ msgid "Could not open log directory \"%s\": %s" #~ msgstr "" #~ "Ðевозможно открыть каталог Ñо Ñправкой:\n" #~ "%s" #, fuzzy #~ msgid "Could not open output directory \"%s\": %s" #~ msgstr "" #~ "Ðевозможно открыть каталог Ñо Ñправкой:\n" #~ "%s" #~ msgid "" #~ "MigrateActivity: request = \n" #~ "%s" #~ msgstr "" #~ "MigrateActivity: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #~ msgid "" #~ "MigrateActivity: response = \n" #~ "%s" #~ msgstr "" #~ "MigrateActivity: отзыв = \n" #~ "%s" #~ msgid "" #~ "TerminateActivities: request = \n" #~ "%s" #~ msgstr "" #~ "TerminateActivities: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #, fuzzy #~ msgid "TerminateActivities: non-AREX job requested" #~ msgstr "" #~ "TerminateActivities: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #~ msgid "" #~ "TerminateActivities: response = \n" #~ "%s" #~ msgstr "" #~ "TerminateActivities: ответ = \n" #~ "%s" #, fuzzy #~ msgid "failed to initialize environment variables" #~ msgstr "Fehler bei der Initialisierung der delegation credentials" #, fuzzy #~ msgid "Mapfile is missing at %s" #~ msgstr "Location fehlt" #, fuzzy #~ msgid "There is no local mapping for user" #~ msgstr "Es ist keine connetion chain konfiguriert" #, fuzzy #~ msgid "Initially mapped to local user: %s" #~ msgstr "Grid Identität wird zugewiesen zu lokaler Identität '%s'" #, fuzzy #~ msgid "Local user does not exist" #~ msgstr "Lock-Datei %s existiert nicht" #, fuzzy #~ msgid "Cannot find ARC XMLNode class" #~ msgstr "Kann custom broker Klasse nicht finden" #~ msgid "Cannot create doc argument" #~ msgstr "Kann doc Argument nicht anlegen" #, fuzzy #~ msgid "Cannot convert doc to Python object" #~ msgstr "Kann doc nicht zu Python Objekt konvertieren" #~ msgid "Can not get the delegation credential: %s from delegation service:%s" #~ msgstr "" #~ "Kann delegation credential nicht erhalten: %s von delegation service: %s" #, fuzzy #~ msgid "Head: can't process file %s" #~ msgstr "Lese Archiv Datei %s" #, fuzzy #~ msgid "wrong number in %s" #~ msgstr "Schließe Verbindung" #, fuzzy #~ msgid "CacheService: Unauthorized" #~ msgstr "echo: Unauthorisiert" #~ msgid "libjvm.so not loadable - check your LD_LIBRARY_PATH" #~ msgstr "libjvm.so nicht ladbar - überprüfe LD_LIBRARY_PATH" #~ msgid "JVM started" #~ msgstr "JVM gestartet" #~ msgid "Java object returned NULL status" #~ msgstr "Java Objekt gab NULL status an" #~ msgid "use GSI proxy (RFC 3820 compliant proxy is default)" #~ msgstr "" #~ "иÑпользовать доверенноÑть GSI (по умолчанию иÑпользуетÑÑ\n" #~ " RFC 3820-ÑовмеÑÑ‚Ð¸Ð¼Ð°Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñть)" #, fuzzy #~ msgid "Unable to create directory %s: No valid credentials found" #~ msgstr "Konnte Job Beschreibung nicht an Resourcen des Ziels anpassen: %s" #, fuzzy #~ msgid "Unable to rename %s: No valid credentials found" #~ msgstr "Konnte Job Beschreibung nicht an Resourcen des Ziels anpassen: %s" #~ msgid "Submit: Failed to disconnect after submission" #~ msgstr "Submit: Fehler bei Disconnect nach Submission" #~ msgid "year" #~ msgid_plural "years" #~ msgstr[0] "Jahre" #~ msgstr[1] "Jahr" #~ msgstr[2] "Jahre" #~ msgid "month" #~ msgid_plural "months" #~ msgstr[0] "Monate" #~ msgstr[1] "Monat" #~ msgstr[2] "Monate" #~ msgid "day" #~ msgid_plural "days" #~ msgstr[0] "Tage" #~ msgstr[1] "Tag" #~ msgstr[2] "Tage" #, fuzzy #~ msgid "arc_to_voms - %u attributes" #~ msgstr " Attribute" #, fuzzy #~ msgid "Plugin failed: %s" #~ msgstr "PASV fehlgeschlagen: %s" #, fuzzy #~ msgid "Failed to report renewed proxy to job" #~ msgstr "Fehler bei Lesen von Proxy-Datei: %s" #, fuzzy #~ msgid "Connecting to %s:%i" #~ msgstr "Verbindung zu %s schlug fehl: %s" #, fuzzy #~ msgid "Querying at %s" #~ msgstr "Anfrage: %s" #, fuzzy #~ msgid "Failed to get results from LDAP server %s" #~ msgstr "Fehler beim Verbinden zu RLS server: %s" #, fuzzy #~ msgid "couldn't open file %s" #~ msgstr "Konnte Datei mit Job Beschreibung nicht öffnen: %s" #, fuzzy #~ msgid "couldn't process VO configuration" #~ msgstr "Pfad zu VOMS Server Konfigurationsdatei" #, fuzzy #~ msgid "can't parse configuration line: %s %s %s %s" #~ msgstr "Fehler beim Laden der Service Konfigurationsdatei %s" #, fuzzy #~ msgid "unsupported configuration command: %s" #~ msgstr "Nicht unterstützte URL für Ziel: %s" #, fuzzy #~ msgid "Match group: %s" #~ msgstr "Fataler Fehler: %s" #, fuzzy #~ msgid "Failed writing RSL" #~ msgstr "Konnte job nicht starten" #, fuzzy #~ msgid "RSL could not be evaluated: %s" #~ msgstr "Datei konnte nicht zu Running Zustand bewegt werden: %s" #, fuzzy #~ msgid "Can't evaluate RSL fragment: %s" #~ msgstr "Kann stat-Informationen zu Datei nicht einholen: %s" #, fuzzy #~ msgid "Error reading user generated output file list in %s" #~ msgstr "" #~ "\n" #~ "%s: ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð²Ñ…Ð¾Ð´Ð½Ð¾Ð³Ð¾ файла '%s': %s\n" #, fuzzy #~ msgid "Failed writing output status file" #~ msgstr "Fehler bei Lesen von Dateiliste" #, fuzzy #~ msgid "Failed to upload (but may be retried) %s" #~ msgstr "Fehler bei Verbinden zu ldap server (%s)" #, fuzzy #~ msgid "Some uploads failed, but (some) may be retried" #~ msgstr "Fehler bei Verbinden zu ldap server (%s)" #~ msgid "" #~ "Argument to -c has the format Flavour:URL e.g.\n" #~ "ARC0:ldap://grid.tsl.uu.se:2135/nordugrid-cluster-name=grid.tsl.uu.se,Mds-" #~ "Vo-name=local,o=grid" #~ msgstr "" #~ "Ðргумент опции -c задаётÑÑ Ð¿Ð¾ форме Flavour:URL, например:\n" #~ "ARC0:ldap://grid.tsl.uu.se:2135/nordugrid-cluster-name=grid.tsl.uu.se,Mds-" #~ "Vo-name=local,o=grid" #~ msgid "explicity select or reject a specific cluster" #~ msgstr "explizit einen Cluster auswählen oder ablehnen" #~ msgid "" #~ "Argument to -i has the format Flavour:URL e.g.\n" #~ "ARC0:ldap://grid.tsl.uu.se:2135/mds-vo-name=sweden,O=grid\n" #~ "CREAM:ldap://cream.grid.upjs.sk:2170/o=grid\n" #~ "\n" #~ "Argument to -c has the format Flavour:URL e.g.\n" #~ "ARC0:ldap://grid.tsl.uu.se:2135/nordugrid-cluster-name=grid.tsl.uu.se,Mds-" #~ "Vo-name=local,o=grid" #~ msgstr "" #~ "Ðргумент опции -i задаётÑÑ Ð¿Ð¾ форме Flavour:URL, например:\n" #~ "ARC0:ldap://grid.tsl.uu.se:2135/mds-vo-name=sweden,O=grid\n" #~ "CREAM:ldap://cream.grid.upjs.sk:2170/o=grid\n" #~ "\n" #~ "Ðргумент опции -c задаётÑÑ Ð¿Ð¾ форме Flavour:URL, например:\n" #~ "ARC0:ldap://grid.tsl.uu.se:2135/nordugrid-cluster-name=grid.tsl.uu.se,Mds-" #~ "Vo-name=local,o=grid" #~ msgid "explicity select or reject an index server" #~ msgstr "Ausdrücklich einen Index Server bestimmen oder ablehnen" #~ msgid "" #~ "The arcmigrate command is used for migrating queud jobs to another " #~ "cluster.\n" #~ "Note that migration is only supported between ARC1 clusters." #~ msgstr "" #~ "Das arcmigrate Kommando dient zur Verteilung von bereits hochgeladenen " #~ "Jobs zwischen Clustern. Diese Migration wird nur zwischen ARC1 Clustern " #~ "unterstützt." #, fuzzy #~ msgid "select broker method (Random (default), FastestQueue, or custom)" #~ msgstr "" #~ "wählt Methode für eine Verteilung von Jobs zwischen Clustern (Random " #~ "(engl. für zufällig, die Voreinstellung, FastestQueue (die Queue mit den " #~ "schnellsten Rechnern), oder custom (für eigenes))" #~ msgid "[job ...]\n" #~ msgstr "[Job ...]\n" #, fuzzy #~ msgid "file where the jobs will be stored" #~ msgstr "Datei, in der Jobs abgelegt werden" #~ msgid "explicity select or reject a specific cluster for the new job" #~ msgstr "positive oder negative Selektion eines Clusters für einen Job" #~ msgid "No jobs to resubmit" #~ msgstr "Keine erneut hochzuladenen Jobs" #~ msgid "Submission to %s failed, trying next target" #~ msgstr "Hochladen zu %s schlug fehl, versuche nächtes Ziel" #~ msgid "Job resubmitted with new jobid: %s" #~ msgstr "Job erneut hochgeladen mit neuer Job ID: %s" #~ msgid "Job could not be killed or cleaned" #~ msgstr "Job konnte nicht abgebrochen oder gelöscht werden" #~ msgid "url of the policy decision service" #~ msgstr "URL des Policy Decision Service" #~ msgid "use SAML 2.0 profile of XACML v2.0 to contact the service" #~ msgstr "nutze SAML 2.0 Profil von XACML v2.0 um Server zu kontaktieren" #~ msgid "path to private key file" #~ msgstr "Pfad zu Datei mit privatem Schlüssel" #~ msgid "URL of SLCS service" #~ msgstr "URL des SLCS Service" #~ msgid "IdP name" #~ msgstr "IdP Name" #~ msgid "Password for user account to IdP" #~ msgstr "Passwort des user accountd für IdP" #~ msgid "Private key passphrase" #~ msgstr "Passphrase für privaten Schlüssel" #~ msgid "passphrase" #~ msgstr "Passphrase" #~ msgid "Source probably does not exist" #~ msgstr "Quelle existiert vermutlich nicht" #~ msgid "Current transfer FAILED: %s - %s" #~ msgstr "Aktueller Transfer SCHLUG FEHL: %s - %s" #~ msgid "Transfer FAILED: %s - %s" #~ msgstr "Transfer FEHELER: %s - %s" #~ msgid "isis" #~ msgstr "isis" #~ msgid "method" #~ msgstr "Methode" #~ msgid " ISIS tester start!" #~ msgstr "ISIS tester Start" #~ msgid "Disconnect: Failed quitting: %s" #~ msgstr "Disconnect: Verlassen der Veerbindung fehlgeschlagen: %s" #~ msgid "Submit: Failed to modify job description to be sent to target." #~ msgstr "" #~ "Submit: Konnte Job Beschreibungung für die Sendung zum Ziel nicht " #~ "modifizieren" #, fuzzy #~ msgid "" #~ "Trying to migrate to %s: Migration to a ARC0 cluster is not supported." #~ msgstr "" #~ "Versuch zu migieren zu %s: Migration zu ARC0 cluster wird nicht " #~ "unterstützt" #~ msgid "TargetRetriverARC0 initialized with %s service url: %s" #~ msgstr "TargetRetrieverARC0 initialisiert mit %s service URL: %s" #, fuzzy #~ msgid "Failed locating delegation credentials in chain configuration" #~ msgstr "" #~ "Fehler bei Lokalisation der delegation credentials in chain Konfiguration" #, fuzzy #~ msgid "Found malformed job state string: %s" #~ msgstr "Fand ungültig formulierte Job Zustandsbeschreibung: %s\n" #~ msgid "TargetRetriverARC1 initialized with %s service url: %s" #~ msgstr "TargetRetriverARC1 initialisiert mit %s service url: %s" #~ msgid "TargetRetriverBES initialized with %s service url: %s" #~ msgstr "TargetRetriverBES initialisiert mit %s Service URL: %s" #~ msgid "" #~ "Matching against job description,following targets possible for " #~ "BenchmarkBroker: %d" #~ msgstr "" #~ "Abgleich mit Job Beschreibung, die folgenden Ziele sind möglich für " #~ "BenchmarkBroker: %d" #, fuzzy #~ msgid "%d. Cluster: %s; Queue: %s" #~ msgstr "%d. Cluster: %s; Queue: %s" #~ msgid "Cluster will be ranked according to the %s benchmark scenario" #~ msgstr "Cluster wird bewertet anhand des %s benchmark" #~ msgid "Best targets are: %d" #~ msgstr "Die besten Ziel sind: %d" #~ msgid "" #~ "Matching against job description, following targets possible for " #~ "DataBroker: %d" #~ msgstr "" #~ "Abgleich mit Job Beschreibung, die folgenden Ziele sind möglich für " #~ "DataBroker: %d" #~ msgid "FastestQueueBroker is filtering %d targets" #~ msgstr "FastestQueueBroker filter %d Ziele" #~ msgid "FastestQueueBroker will rank the following %d targets" #~ msgstr "FastestQueueBroker bewertet die folgenden %d Ziele:" #~ msgid "" #~ "Matching against job description, following targets possible for " #~ "RandomBroker: %d" #~ msgstr "" #~ "Abgleich gegen Job Beschreibung, folgende Ziele sind möglich für " #~ "RandomBroker: %d" #, fuzzy #~ msgid "" #~ "Trying to migrate to %s: Migration to a CREAM cluster is not supported." #~ msgstr "" #~ "Versuch zu migrieren zu %s: MIgration zu einem CREAM cluster wird nicht " #~ "unterstützt" #~ msgid "TargetRetriverCREAM initialized with %s service url: %s" #~ msgstr "TargetRetrieverCREAM initialisiert mit %s service URL: %s" #~ msgid "Cannot convert arc module name to Python string" #~ msgstr "Kann arc Modulename nicht zu Python Zeichenkette konvertierten" #~ msgid "Cannot import arc module" #~ msgstr "Kann arc Modul nicht importieren" #, fuzzy #~ msgid "" #~ "Trying to migrate to %s: Migration to a UNICORE cluster is not supported." #~ msgstr "" #~ "Versuch zu migrieren zu %s: Migration zu einem UNICORE cluster wird nicht " #~ "unterstützt" #~ msgid "TargetRetriverUNICORE initialized with %s service url: %s" #~ msgstr "TargetRetriverUNICORE initialisiert mit %s service URL: %s" #~ msgid "File is not accessible: %s - %s" #~ msgstr "Dati ist nicht zugreifbar: %s - %s" #~ msgid "delete_ftp: globus_ftp_client_delete timeout" #~ msgstr "delete_ftp: globus_ftp_client_delete Zeitüberschreitung" #, fuzzy #~ msgid "Response(%i): %s" #~ msgstr "Antwort: %s" #, fuzzy #~ msgid "Failed to close connection 3" #~ msgstr "Fehler bei Schließen von Verbindung 3" #, fuzzy #~ msgid "Using supplied guid %s" #~ msgstr "Nutze angegebene guid %s" #~ msgid "meta_get_data: size: %llu" #~ msgstr "meta_get_data: Größe: %llu" #~ msgid "meta_get_data: created: %s" #~ msgstr "meta_get_data: angelegt: %s" #~ msgid "LFN is missing in LFC (needed for replication)" #~ msgstr "LFN fehlt in LFC (benötigt für Replikation)" #~ msgid "LFN already exists in LFC" #~ msgstr "LFN existiert bereits in LFC" #~ msgid "Creating LFC directory %s" #~ msgstr "Anlegen von LFC Verzeichnis %s" #~ msgid "Error creating required LFC dirs: %s" #~ msgstr "Fehler bei Anlegen benötigter LFC Verzeichnisse: %s" #~ msgid "Error creating LFC entry: %s" #~ msgstr "Fehler bei Anlegen von LFC Eintrag: %s" #, fuzzy #~ msgid "Error finding info on LFC entry %s which should exist: %s" #~ msgstr "Fehler beim Anlegen von LFC Eintrag %s, guid %s: %s" #~ msgid "Error creating LFC entry %s, guid %s: %s" #~ msgstr "Fehler beim Anlegen von LFC Eintrag %s, guid %s: %s" #, fuzzy #~ msgid "Error entering metadata: %s" #~ msgstr "Fehler bei Eingabe von Metadaten: %s" #~ msgid "No GUID defined for LFN - probably not preregistered" #~ msgstr "Keine GUID definiert für LFN - vielleicht nicht preregistriert" #~ msgid "Error adding replica: %s" #~ msgstr "Fehler beim Hinzufügen von replica: %s" #, fuzzy #~ msgid "Entering checksum type %s, value %s, file size %llu" #~ msgstr "Eingabe von checksum Typ %s, Wert %s, Dateigröße %llu" #~ msgid "Failed to remove LFN in LFC - You may need to do it by hand" #~ msgstr "" #~ "Fehler beim Entfernen von LFN in LFC - Sie müssen dies wohl von Hand " #~ "erledigen" #, fuzzy #~ msgid "Error getting replicas: %s" #~ msgstr "Fehler bei Erhalt der replicas: %s" #~ msgid "Failed to remove location from LFC" #~ msgstr "Fehler beim Entfernen von location von LFC" #~ msgid "Failed to remove LFC directory: directory is not empty" #~ msgstr "" #~ "Fehler beim Entfernen von LFC Verzeichnis: Verzeichnis ist nicht leer" #~ msgid "Failed to remove LFN in LFC: %s" #~ msgstr "Fehler beim Entfernen von LFN in LFC: %s" #~ msgid "guid %s resolved to LFN %s" #~ msgstr "guid %s aufgelöst zu LFN %s" #~ msgid "Failed to find GUID for specified LFN in %s: %s" #~ msgstr "Konnte GUID für angegebenen LFN nicht finden in %s: %s" #~ msgid "There is no GUID for specified LFN in %s" #~ msgstr "Es gibt keine GUID für angegebenen LFN in %s" #~ msgid "Warning: can't get PFNs from server %s: %s" #~ msgstr "Warnung: Kann keine PFNs erhalten von server %s: %s" #, fuzzy #~ msgid "RLS URL must contain host" #~ msgstr "RLS URL muss Angabe von host enthalten" #, fuzzy #~ msgid "" #~ "Locations are missing in destination RLS url - will use those registered " #~ "with special name" #~ msgstr "" #~ "Lokalisation fehlen in Ziel RLS URL - werde die mit special name " #~ "registrierten nutzen" #~ msgid "LFN is missing in RLS (needed for replication)" #~ msgstr "LFN fehlt in RLS (benötigt für Replikation)" #~ msgid "LFN already exists in replica" #~ msgstr "LFN existiert bereits in replica" #~ msgid "Failed to check for existing LFN in %s: %s" #~ msgstr "Fehler bei Überprüfung für existierenden LFN in %s: %s" #, fuzzy #~ msgid "There is same LFN in %s" #~ msgstr "Es existiert dieselbe LFN in %s" #~ msgid "Failed to add LFN-GUID to RLS: %s" #~ msgstr "Fehler bei Hinzufüden von LFN-GUID zu RLS: %s" #~ msgid "Failed to create/add LFN-PFN mapping: %s" #~ msgstr "Fehler bei Anlegen/Hinzufügen von LFN-PFN Zuweisungen: %s" #~ msgid "Warning: failed to add attribute to RLS: %s" #~ msgstr "Warnung: Fehler bei Hinzufügen eines Attributs zu RLS: %s" #~ msgid "Warning: Failed to retrieve LFN/PFNs from %s: %s" #~ msgstr "Warnung: Fehler bei Bezug von LFN/PFNs von %s: %s" #~ msgid "SE location will be unregistered automatically" #~ msgstr "SE location wird automatisch deregistriert" #~ msgid "Warning: Failed to delete LFN/PFN from %s: %s" #~ msgstr "Warnung: Fehler beim Löschen von LFN/PFN von %s: %s" #~ msgid "LFN must be already deleted, try LRC anyway" #~ msgstr "LFN muss bereits gelöscht sein, versuche dennoch LRC" #, fuzzy #~ msgid "Failed to retrieve LFN/LRC: %s" #~ msgstr "Fehler bei Bezug von LFN/LRC: %s" #~ msgid "Warning: Failed to connect to LRC at %s: %s" #~ msgstr "Warnung. Fehler bei Verbindung zu LRC bei %s: %s" #~ msgid "No LFNs found in %s" #~ msgstr "Keine LFNs gefunden in %s" #~ msgid "Failed to retrieve list of LFNs/PFNs from %s" #~ msgstr "Fehler bei Bezug von List von LFNs/PFNs von %s" #~ msgid "lfn: %s(%s) - %s" #~ msgstr "lfn: %s(%s) - %s" #~ msgid "Warning: can't get list of RLIs from server %s: %s" #~ msgstr "Warnung. Erhalte keine Liste von RLIs von server %s: %s" #, fuzzy #~ msgid "Warning: can't get list of senders from server %s: %s" #~ msgstr "Warnung: Erhalte keine Liste von senders von Server %s: %s" #, fuzzy #~ msgid "" #~ "Warning: No space tokens found matching description! Will copy without " #~ "using token" #~ msgstr "" #~ "Warnung: Keine space tokens gefunden, die der Beschreibung entsprechen! " #~ "Kopiere ohne Nutzung der Token" #, fuzzy #~ msgid "start_reading_srm: looking for metadata: %s" #~ msgstr "StartReading: suche nach Metadaten: %s" #~ msgid "globus_io_register_read failed: %s" #~ msgstr "globus_io_register_read ist fehlgeschlagen: %s" #~ msgid "globus_io_register_write failed: %s" #~ msgstr "globus_io_register_write ist fehlgeschlagen: %s" #, fuzzy #~ msgid "clear_input: %s" #~ msgstr "clear_input: %s" #~ msgid "Connection closed" #~ msgstr "Verbindung geschlossen" #, fuzzy #~ msgid "Globus error (read): %s" #~ msgstr "Globus Fehler (Lesen): %s" #, fuzzy #~ msgid "*** Server response: %s" #~ msgstr "*** Server Antwort: %s" #, fuzzy #~ msgid "Failed wrapping GSI token: %s" #~ msgstr "Konnte GI token nicht wrappen: %s" #, fuzzy #~ msgid "Failed unwrapping GSI token: %s" #~ msgstr "Fehler bei unwrap des GSI token: %s" #, fuzzy #~ msgid "Unwrapped data does not fit into buffer" #~ msgstr "Unwrapped data passt nicht in Puffer" #, fuzzy #~ msgid "Urecognized SSL token received" #~ msgstr "Nicht erkannter SSL token erkannt" #, fuzzy #~ msgid "read_response_header: line: %s" #~ msgstr "read_response_header: Zeile: %s" #~ msgid "read_response_header: header finished" #~ msgstr "read_response_header: header beendet" #~ msgid "skip_response_entity" #~ msgstr "skip_response_entity" #~ msgid "skip_response_entity: size: %llu" #~ msgstr "skip_response_entity: Größe: %llu" #~ msgid "skip_response_entity: already have all" #~ msgstr "skip_response_entity: Sie haben bereits alle" #~ msgid "skip_response_entity: size left: %llu" #~ msgstr "skip_response_entity: Größe verbleibend: %llu" #~ msgid "skip_response_entity: to read: %llu" #~ msgstr "skip_response_entity: zu lesen: %llu" #~ msgid "skip_response_entity: timeout %llu" #~ msgstr "skip_response_entity: Zeitüberschreitung %llu" #~ msgid "skip_response_entity: read: %u (%llu)" #~ msgstr "skip_response_entity: gelesen: %u (%llu)" #~ msgid "skip_response_entity: read all" #~ msgstr "skip_response_entity: alles gelesen" #, fuzzy #~ msgid "skip_response_entity: no entity" #~ msgstr "skip_response_entity: no entity" #~ msgid "skip_response_entity: unknown size" #~ msgstr "skip_response_entity: unbekannte Größe" #, fuzzy #~ msgid "Timeout sending header" #~ msgstr "Zeitüberschreitung beim Senden des Header" #, fuzzy #~ msgid "Failure while receiving entity" #~ msgstr "Fehler beim Emfpangen von entity" #, fuzzy #~ msgid "Timeout while sending header" #~ msgstr "Zeitüberschreitung beim Senden von header" #~ msgid "GET: header is read - rest: %u" #~ msgstr "GET: header wird gelesen - verbleibend: %u" #~ msgid "GET: calling callback(rest): content: %s" #~ msgstr "GET: rufe callback(rest): Inhalt: %s" #~ msgid "GET: calling callback(rest): size: %u" #~ msgstr "GET: rufe callbeck(rest): Größe: %u" #~ msgid "GET: calling callback(rest): offset: %llu" #~ msgstr "GET: rufe callbeck(rest): offset: %llu" #, fuzzy #~ msgid "GET callback returned error" #~ msgstr "GET callback lieferte Fehlermeldung" #, fuzzy #~ msgid "Failed while reading response content" #~ msgstr "Fehler beim Lesen von Antwort Inhalt" #, fuzzy #~ msgid "Timeout while reading response content" #~ msgstr "Timeout beim Lesen von Antwort Inhalt" #, fuzzy #~ msgid "Error while reading response content" #~ msgstr "Fehler beim Lesen von Antwort Inhalt" #~ msgid "GET: calling callback: content: %s" #~ msgstr "GET: rufe callback: Inhalt: %s" #, fuzzy #~ msgid "GET: calling callback: size: %u" #~ msgstr "GET: rufe callback: Größe: %u" #~ msgid "GET: calling callback: offset: %llu" #~ msgstr "GET: rufe callback: offset: %llu" #, fuzzy #~ msgid "Timeout while sending SOAP request" #~ msgstr "Timeout beim Senden von SOAP request" #~ msgid "Error sending data to server" #~ msgstr "Fehler beim Senden von Daten zum Server" #~ msgid "SOAP request failed (get)" #~ msgstr "SOAP Anfrage fehlgeschlagen (get)" #~ msgid "SOAP request failed (getRequestStatus)" #~ msgstr "SOAP Anfrage fehlgeschlagen (getRequestStatus)" #~ msgid "SOAP request failed (put)" #~ msgstr "SOAP Anfrage fehlgeschlagen (put)" #~ msgid "SOAP request failed (copy)" #~ msgstr "SOAP Anfrage fehlgeschlagen (copy)" #~ msgid "SOAP request failed (setFileStatus)" #~ msgstr "SOAP Anfrage fehlgeschlagen (setFileStatus)" #~ msgid "SOAP request failed (SRMv1Meth__advisoryDelete)" #~ msgstr "SOAP Anfragen fehlgeschlagen (SRMv1Meth__advisoryDelete)" #~ msgid "SOAP request failed (getFileMetaData)" #~ msgstr "SOAP Anfragen fehlgeschlagen (getFileMetaData)" #~ msgid "SOAP request failed (%s)" #~ msgstr "SOAP Anfrage fehlgeschlagen (%s)" #~ msgid "Error: PrepareToGet request timed out after %i seconds" #~ msgstr "" #~ "Fehler: Zeitüberschreitung bei PrepareToGet Anfrage nach %i Sekunden" #~ msgid "Request is reported as ABORTED" #~ msgstr "Anfrage wurde berichtet als ABORTED (abgebrochen)" #~ msgid "Error: PrepareToPut request timed out after %i seconds" #~ msgstr "" #~ "Fehler: Zeitüberschreitung bei PrepareToPut Anfrage nach %i Sekunden" #~ msgid "Error: Ls request timed out after %i seconds" #~ msgstr "Fehler: Zeitüberschreitung bei Ls Anfrage nach %i Sekunden" #~ msgid "Error: copy request timed out after %i seconds" #~ msgstr "Fehler: Zeitüberschreitung bei Kopieranfrage nach %i Sekunden " #, fuzzy #~ msgid "SOAP request failed (srmMkdir)" #~ msgstr "SOAP Anfrage schlug fehl (srmMkdir)" #, fuzzy #~ msgid "Error opening srm info file %s:%s" #~ msgstr "Fehler bei Öffnen von Meta-Datei %s: %s" #, fuzzy #~ msgid "Trying to open confirm site %s" #~ msgstr "" #~ "Ошибка при попытке открыть файл:\n" #~ " %1" #~ msgid "Source is bad URL or can't be used due to some reason" #~ msgstr "" #~ "Quelle ist eine schlechte URL oder kann aus irgendeinem Grund nicht " #~ "genutzt werden." #~ msgid "Destination is bad URL or can't be used due to some reason" #~ msgstr "" #~ "Ziel ist eine schlechte URL oder kann aus irgendeinem Grund nicht genutzt " #~ "werden." #, fuzzy #~ msgid "Failed while transfering data (mostly timeout)" #~ msgstr "Fehler bei Datentransfer (überwiegend Zeitüberschreitung)" #, fuzzy #~ msgid "Error creating file %s with mkstemp(): %s" #~ msgstr "Fehler bei Anlegen von %s mit mkstemp(): %s" #~ msgid "Error opening lock file we just renamed successfully %s: %s" #~ msgstr "" #~ "Fehler bei Öfnnen von Lock-Datei die gerade erfolgreich umgenannt wurde " #~ "%s: %s" #~ msgid "" #~ "Lock that recently existed has been deleted by another process, calling " #~ "Start() again" #~ msgstr "" #~ "Lock das zuvor existierte wurde gelöscht von anderem Prozess, rufe " #~ "Start() nochmals" #~ msgid "Error opening valid and existing lock file %s: %s" #~ msgstr "Fehler bei Öffnen von gültiger und existierener Lock-Datei %s: %s" #~ msgid "Error reading valid and existing lock file %s: %s" #~ msgstr "Fehler bei Lesen von gültiger und existierender Lock-Datei %s: %s" #, fuzzy #~ msgid "Error creating tmp file %s for remote lock with mkstemp(): %s" #~ msgstr "Fehler bei Anlegen von %s mit mkstemp(): %s" #, fuzzy #~ msgid "Error writing to tmp lock file for remote lock %s: %s" #~ msgstr "Fehler beim Schreiben zu tmp lock Datei %s: %s" #, fuzzy #~ msgid "Warning: closing tmp lock file for remote lock %s failed" #~ msgstr "Warnung: Schließen von tmp Lock Datei %s fehlgeschlagen" #, fuzzy #~ msgid "Error renaming tmp file %s to lock file %s for remote lock: %s" #~ msgstr "Fehler bei Umbenennen von temporärer Datei %s zu Lock_datei %s: %s" #, fuzzy #~ msgid "" #~ "Error renaming lock file for remote lock, even though rename() did not " #~ "return an error: %s" #~ msgstr "" #~ "Fehler bei Umbenennen von Lock-Datei, obwohl rename() keinen Fehler " #~ "zurücklieferte" #, fuzzy #~ msgid "" #~ "Error opening lock file for remote lock we just renamed successfully %s: " #~ "%s" #~ msgstr "" #~ "Fehler bei Öfnnen von Lock-Datei die gerade erfolgreich umgenannt wurde " #~ "%s: %s" #, fuzzy #~ msgid "" #~ "The remote cache file is currently locked with a valid lock, will " #~ "download from source" #~ msgstr "Die Datei ist derzeit gelockt mit einem gültigen Lock" #, fuzzy #~ msgid "Failed to create file %s for writing: %s" #~ msgstr "Fehler bei Anlegen von Datei %s zum Schreiben: %s" #, fuzzy #~ msgid "Error: Cache file %s does not exist" #~ msgstr "Cache-Datei %s existiert nicht" #~ msgid "Failed to change permissions of session dir to 0700: %s" #~ msgstr "Konnte Zugriffsrechte auf session dir nicht ändern zu 0700: %s" #~ msgid "Error opening per-job dir %s: %s" #~ msgstr "Fehler bei Öffnen von per-job Verzeichnis %s. %s" #, fuzzy #~ msgid "Mismatching url in file %s: %s Expected %s" #~ msgstr "Nicht-Übereinstimmung von URL in Datei %s: %s erwartete %s" #, fuzzy #~ msgid "Bad separator in file %s: %s" #~ msgstr "Ungültges Trennzeichen in Datei %s: %s" #, fuzzy #~ msgid "Bad value of expiry time in %s: %s" #~ msgstr "Ungültiger Wert für expiry time in %s: %s" #, fuzzy #~ msgid "Error opening lock file %s: %s" #~ msgstr "Fehler bei Öffnen von Lock Datei %s: %s" #, fuzzy #~ msgid "Can't read user list in specified file %s" #~ msgstr "Fehler bei Lesen von Zertifikat-Datei: %s" #, fuzzy #~ msgid "%s: State: %s: failed to create temporary proxy for renew: %s" #~ msgstr "Fehler bei Erstellen von Info-Datei %s: %s" #, fuzzy #~ msgid "%s: adding to transfer share %s" #~ msgstr "Datentransfer abgebrochen: %s" #, fuzzy #~ msgid "NULL response" #~ msgstr "Keine SOAP Antwort" #, fuzzy #~ msgid "" #~ "Not authorized from Charon service; Some of the RequestItem does not " #~ "satisfy Policy" #~ msgstr "" #~ "UnAuthorisiert von arc.pdp; einige der ReqestItems genügen nicht der " #~ "Policy" #, fuzzy #~ msgid "Loading policy from %s" #~ msgstr "Lade python broker (%i)" #, fuzzy #~ msgid "Old policy times: %u/%u" #~ msgstr "Policy Zeile: %s" #, fuzzy #~ msgid "New policy times: %u/%u" #~ msgstr "Policy Zeile: %s" #, fuzzy #~ msgid "Misisng or empty CertificatePath element in the configuration!" #~ msgstr "Fehlendes oder leeres CertificatePath Element" #, fuzzy #~ msgid "Missing or empty CACertificatesDir element in the configuration!" #~ msgstr "Fehlendes oder leeres CertificatePath Element" #, fuzzy #~ msgid "Missing or empty CACertificatePath element in the configuration!" #~ msgstr "Fehlendes oder leeres CertificatePath Element" #~ msgid "" #~ "Thread %d: Task %d Result:\n" #~ "%s\n" #~ msgstr "" #~ "Поток %d: Задание %d Результат:\n" #~ "%s\n" #~ msgid "Is connected to database? %s" #~ msgstr "Ist verbunden mit Datenban? %s" #~ msgid "Can not decrypt the EncryptedID from saml assertion" #~ msgstr "Konnte die Encrypted ID von SAML Assertion nicht entschlüsseln" #~ msgid "Decrypted SAML NameID: %s" #~ msgstr "Entschlüsselter SAML NameID: %s" #~ msgid "Request succeeded!!!" #~ msgstr "Anfragen Erfolgreich!!!" #~ msgid "%d <> %d" #~ msgstr "%d <> %d" #~ msgid "Invalid status report" #~ msgstr "Ungültiger Status Report:" #~ msgid "%s reports job status of %s but it is running on %s" #~ msgstr "%s berichtet Job Status von %s aber läuft auf %s" #~ msgid "%s try to status change: %s->%s" #~ msgstr "%s versuch Status zu ändern: %s -> %s" #~ msgid "refresh: Cannot abort transaction: %s" #~ msgstr "refresh: Kann Transaktion nicht abbrechen: %s" #~ msgid "refresh: Error during transaction: %s" #~ msgstr "refresh: Fehler bei Transaktion: %s" #~ msgid "operator[]: Cannot abort transaction: %s" #~ msgstr "operator[]: Kann Transaktion nicht abbrechen: %s" #~ msgid "remove: Cannot abort transaction: %s" #~ msgstr "remove: Kann Transaktion nicht abbrechen: %s" #, fuzzy #~ msgid "There is no X509Request node in the request message" #~ msgstr "Es ist kein X509Request Knoten in der request Nachricht" #~ msgid "Composed DN: %s" #~ msgstr "Zusammengestellte DN: %s" #~ msgid "CentralAHash constructor called" #~ msgstr "CentralAHash Konstruktor aufgerufen" #~ msgid "Error importing class" #~ msgstr "Fehler bei Importieren von Klasse" #~ msgid "ReplicatedAHash constructor called" #~ msgstr "ReplicatedAHash aconstructor aufgrufen" #, fuzzy #~ msgid "sending message of length" #~ msgstr "sende Nachricht der Läng %d an %s" #~ msgid "sendt message, success=%s" #~ msgstr "Nachricht gesende, Erfolg=%s" #~ msgid "processing message..." #~ msgstr "verarbeite Nachricht" #~ msgid "processing message... Finished" #~ msgstr "Verarbeiten der Nachricht beendet" #~ msgid "Couldn't start replication manager." #~ msgstr "Konnte replication manager nicht starten" #~ msgid "Could not find checking period, using default 10s" #~ msgstr "Konnte checking period nicht finden, nutze Voreinstellung von 10s" #~ msgid "Bad cache size or no cache size configured, using 10MB" #~ msgstr "" #~ "Ungültige cache Größe oder keine cache Größe konfiguriert, nutze 10MB" #~ msgid "master locking" #~ msgstr "master setzt lock" #~ msgid "unlocking" #~ msgstr "entferne lock" #~ msgid "unlocked" #~ msgstr "lock entfernt" #~ msgid "couldn't unlock" #~ msgstr "konnte lock nicht entfernen" #~ msgid "checkingThread slept %d s" #~ msgstr "checkingThread schlief %d s" #, fuzzy #~ msgid "wrote ahash list %s" #~ msgstr "schrieb ahash Liste %s" #~ msgid "but dbenv wasn't ready." #~ msgstr "aber dbenv war nicht bereit" #~ msgid "Couldn't start replication framework" #~ msgstr "Konnte replication framework nicht starten" #, fuzzy #~ msgid "entered election thread" #~ msgstr "Starte Auswahl-Thread" #, fuzzy #~ msgid "%s: my role is" #~ msgstr "%s: meine Rolle ist %d" #, fuzzy #~ msgid "%s: my role is now" #~ msgstr "%s: meine Rolle ist nun %d" #, fuzzy #~ msgid "Couldn't run election" #~ msgstr "Konnte Auswahl nicht vornehmen" #, fuzzy #~ msgid "num_reps is %(nr)d, votes is %(v)d, hostMap is %(hm)s" #~ msgstr "num_reps ist %d, Stimmen sind %d, hostMap ist %s" #~ msgid "entering startElection" #~ msgstr "Start von startElection" #~ msgid "new role" #~ msgstr "neue Rolle" #~ msgid "Couldn't begin role" #~ msgstr "Konnte Rolle nicht beginnen" #~ msgid "entering send" #~ msgstr "Start von send" #, fuzzy #~ msgid "failed to send to" #~ msgstr "Fehler beim Senden von body" #~ msgid "Master is offline, starting re-election" #~ msgstr "Master ist offline, starte Neuwahl" #~ msgid "entering repSend" #~ msgstr "Starte repSend" #~ msgid "entering sendNewSiteMsg" #~ msgstr "Start von sendNewSiteMsg" #~ msgid "entering sendHeartbeatMsg" #~ msgstr "Start von sendHeartbeatMsg" #~ msgid "entering sendNewMasterMsg" #~ msgstr "Start von sendNewMasterMsg" #~ msgid "entering processMessage from " #~ msgstr "verarbeite processMessage von " #~ msgid "received message from myself!" #~ msgstr "erhielt Nachricht von mir selbst!" #~ msgid "received from new sender or sender back online" #~ msgstr "erhalten von neuem Sender der Sender ist wieder online" #~ msgid "received master id" #~ msgstr "erhielt master id" #~ msgid "received HEARTBEAT_MESSAGE" #~ msgstr "erhielt HEARTBEAT_MESSAGE" #~ msgid "received ELECTION_MESSAGE" #~ msgstr "erhielt ELECTION_MESSAGE" #~ msgid "received NEWSITE_MESSAGE" #~ msgstr "erhielt NEWSITE_MESSAGE" #~ msgid "processing message from %d" #~ msgstr "Verarbeite Nachricht von %d" #~ msgid "received DB_REP_NEWSITE from %s" #~ msgstr "erhielt DB_REP_NEWSITE von %s" #~ msgid "received DB_REP_HOLDELECTION" #~ msgstr "erhielt DB_REP_HODLELECTION" #~ msgid "REP_ISPERM returned for LSN %s" #~ msgstr "REP_ISPERM erhalten für LSN %s" #~ msgid "REP_NOTPERM returned for LSN %s" #~ msgstr "REP_NOTPERM erhalten für LSN %s" #~ msgid "REP_DUPMASTER received, starting new election" #~ msgstr "REP_DUPMASTER erhalten, stare neue Verbindung" #~ msgid "REP_IGNORE received" #~ msgstr "REP_IGNORE erhalten" #~ msgid "JOIN_FAILURE received" #~ msgstr "JOIN_FAILURE erhalten" #~ msgid "I am now a master" #~ msgstr "Ich bin nun ein master" #~ msgid "received DB_EVENT_REP_MASTER" #~ msgstr "erhielt DB_EVEN_REP_MASTER" #~ msgid "I am now a client" #~ msgstr "Ich bin nun ein Client" #~ msgid "Getting permission failed" #~ msgstr "Erlaubnis nicht erhalten" #~ msgid "New master elected" #~ msgstr "Neuer Master ausgewählt" #~ msgid "I won the election: I am the MASTER" #~ msgstr "Ich gewann die Auswahl: Ich bin der MASTER" #, fuzzy #~ msgid "Oops! Internal DB panic!" #~ msgstr "Ooops! Interne DB Panik!" #~ msgid "accessing gateway: %s" #~ msgstr "greife zu auf gateway: %s" #~ msgid "This bartender does not support gateway" #~ msgstr "Dieser Bartender benötigt keinen support gateway" #~ msgid "" #~ "cannot connect to gateway. Access of third party store required gateway." #~ msgstr "" #~ "kann nicht verbinden zu Gateway. Zugang zu store Dritter benötigt einen " #~ "Gatway." #~ msgid "Got Librarian URLs from the config:" #~ msgstr "Erhielt Librarian URLs von Konfiguration:" #, fuzzy #~ msgid "Librarian URL or ISIS URL not found in the configuration." #~ msgstr "Librarian URL oder ISIS URL nicht gefunden in der Konfiguration." #~ msgid "Got ISIS URL, starting initThread" #~ msgstr "Erhielt ISIS URL, startete initThread" #~ msgid "Trying to get Librarian from" #~ msgstr "Versuche Librarian zu erhalten von" #~ msgid "Got Librarian from ISIS:" #~ msgstr "Erhielt Librarian von ISIS:" #, fuzzy #~ msgid "Error connecting to ISIS %{iu}s, reason: %{r}s" #~ msgstr "Fehler beim Verbinden zu ISIS %s, Grund: %s" #~ msgid "Error in initThread: %s" #~ msgstr "Fehler in initThread: %s" #~ msgid "initThread finished, starting isisThread" #~ msgstr "initThread beended, starte isisThread" #, fuzzy #~ msgid "Error in isisThread: %s" #~ msgstr "Fehler in isisThread: %s" #~ msgid "//// _traverse request trailing slash removed:" #~ msgstr "//// bei _traverse Anfrage wurde terminaler Schrägstrich entfernt" #~ msgid "adding" #~ msgstr "beim Hinzufügen" #~ msgid "modifyMetadata response" #~ msgstr "modifyMetadata Antwort" #~ msgid "modifyMetadata failed, removing the new librarian entry" #~ msgstr "modifyMetadata failed, entferne den enuen librarian Eintrag" #~ msgid "Error creating new entry in Librarian: %s" #~ msgstr "Fehler beim Anlegen eines neuen Eintrags in Librarian: %s" #~ msgid "//// response from the external store:" #~ msgstr "//// Antwort von ausgewähltem store:" #~ msgid "location chosen:" #~ msgstr "ausgewählte Lokalisation:" #, fuzzy #~ msgid "ERROR from the chosen Shepherd" #~ msgstr "FEHLER bei ausgewähltem Shepherd" #~ msgid "addReplica" #~ msgstr "addReplica" #~ msgid "Registered Shepherds in Librarian" #~ msgstr "Registrierte Shepherds bei Librarian" #~ msgid "Alive Shepherds:" #~ msgstr "Aktive Shepherds:" #~ msgid "LN" #~ msgstr "LN" #~ msgid "metadata" #~ msgstr "Metadaten" #~ msgid "\\/\\/" #~ msgstr "\\/\\/" #~ msgid "removing" #~ msgstr "am Entfernen" #~ msgid "" #~ "The directory for storing proxies is not available. Proxy delegation " #~ "disabled." #~ msgstr "" #~ "Das Verzeichnis für die Ablage von Proxies ist nicht verfügbar. Proxy " #~ "delegation ausgesetzt." #~ msgid "Delegation status: " #~ msgstr "Delegation status: " #~ msgid "creating proxy file : " #~ msgstr "erstelle Proxy Datei : " #~ msgid "" #~ "cannot access proxy_store, Check the configuration file (service.xml)\n" #~ " Need to have a " #~ msgstr "" #~ "Kann auf Proxy Store nicht zugreifen. Überprüfe die Konfigurationsdatei " #~ "(service.xml)\n" #~ " Es wird ein benötigt" #~ msgid "removeCredentials: %s" #~ msgstr "removeCredentials: %s" #~ msgid "proxy store is not accessable." #~ msgstr "Proxy store nicht zugereifbar" #~ msgid "Error processing report message" #~ msgstr "Fehler bei Verarbeiten von report message" #~ msgid "Error traversing: %s" #~ msgstr "Fehler bei Traversieren: %s" #~ msgid "Error in traverseLN method: %s" #~ msgstr "Fehler in taverseLN Methode: %s" #~ msgid "Trying to get Bartender from" #~ msgstr "Zerstöre JVM" #~ msgid "Got Bartender from ISIS:" #~ msgstr "Erhielt Bartender von ISIS:" #~ msgid "" #~ "\n" #~ "CHECKSUM OK" #~ msgstr "" #~ "\n" #~ "CHECKSUM в порÑдке" #~ msgid "" #~ "\n" #~ "CHECKSUM MISMATCH" #~ msgstr "" #~ "\n" #~ "CHECKSUM не Ñовпадает" #~ msgid "\n" #~ msgstr "\n" #~ msgid "" #~ "\n" #~ "\n" #~ "File" #~ msgstr "" #~ "\n" #~ "\n" #~ "Файл" #~ msgid "" #~ "\n" #~ "\n" #~ "I have an invalid replica of file" #~ msgstr "" #~ "\n" #~ "\n" #~ "Обнаружена Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ ÐºÐ¾Ð¿Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð°" #~ msgid "" #~ "\n" #~ "\n" #~ msgstr "" #~ "\n" #~ "\n" #~ msgid "url of myproxy server" #~ msgstr "URL von myproxy Server" #~ msgid "Myproxy server return failure msg" #~ msgstr "Myproxy Server lieferte eine Fehlermeldung zurück." #, fuzzy #~ msgid "Malformated response" #~ msgstr "Antwort hält sich nicht an Format" #~ msgid "srmPing gives v2.2, instantiating v2.2 client" #~ msgstr "srmPing gibt v2.2, instanziierend v2.2 client" #~ msgid "SOAP error with srmPing, instantiating v1 client" #~ msgstr "SOAP Fehler mit srmPing, instanziierende v1 client" #~ msgid "Service error, cannot instantiate SRM client" #~ msgstr "Service Fehler, kann nicht SRM client instanziieren" #~ msgid "start_reading_ftp: size: url: %s" #~ msgstr "start_reading_ftp: url: %s" #, fuzzy #~ msgid "start_reading_ftp: failure" #~ msgstr "start_reading_ftp: Fehler" #~ msgid "Timeout waiting for FTP file size - cancel transfer" #~ msgstr "" #~ "Zeitüberschreitung bei Warten auf FTP Datei Größe - breche Transfer ab" #~ msgid "start_reading_ftp: failed to get file's size" #~ msgstr "start_reading_ftp: Fehler bei Bezug von Dateigröße" #~ msgid "start_reading_ftp: globus_ftp_client_modification_time failed" #~ msgstr "" #~ "start_reading_ftp: globus_ftp_client_modification_time fehlgeschlagen" #~ msgid "start_reading_ftp: failed to get file's modification time" #~ msgstr "start_reading_ftp: Fehler bei Bezug von Zeitpunkt letzter Änderung" #, fuzzy #~ msgid "start_reading_ftp: range is out of size" #~ msgstr "start_reading_ftp: Größe von Wert verlässt erwarteten Bereich" #~ msgid "%s tried election with %d replicas" #~ msgstr "%s initiierte Auswahl mit %d Replicas" #, fuzzy #~ msgid "store job descriptions in local sandbox." #~ msgstr "Lege Job Beschreibung in lokaler Sandbox ab." nordugrid-arc-6.14.0/po/PaxHeaders.30264/POTFILES.in0000644000000000000000000000013214152153475017537 xustar000000000000000030 mtime=1638455101.820598813 30 atime=1638455101.971601082 30 ctime=1638455103.888629886 nordugrid-arc-6.14.0/po/POTFILES.in0000644000175000002070000012062014152153475017525 0ustar00mockbuildmock00000000000000src/Test.cpp src/clients/compute/arccat.cpp src/clients/compute/arcclean.cpp src/clients/compute/arcget.cpp src/clients/compute/arcinfo.cpp src/clients/compute/arckill.cpp src/clients/compute/arcrenew.cpp src/clients/compute/arcresub.cpp src/clients/compute/arcresume.cpp src/clients/compute/arcstat.cpp src/clients/compute/arcsub.cpp src/clients/compute/arcsync.cpp src/clients/compute/arctest.cpp src/clients/compute/submit.cpp src/clients/compute/submit.h src/clients/compute/utils.cpp src/clients/compute/utils.h src/clients/credentials/arcproxy.cpp src/clients/credentials/arcproxy.h src/clients/credentials/arcproxy_myproxy.cpp src/clients/credentials/arcproxy_proxy.cpp src/clients/credentials/arcproxy_voms.cpp src/clients/data/arccp.cpp src/clients/data/arcls.cpp src/clients/data/arcmkdir.cpp src/clients/data/arcrename.cpp src/clients/data/arcrm.cpp src/doxygen/add-bindings-deviations-to-dox.py src/doxygen/create-mapping-documentation.py src/external/cJSON/cJSON.h src/hed/acc/ARCHERY/DescriptorsARCHERY.cpp src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.h src/hed/acc/ARCREST/DescriptorsARCREST.cpp src/hed/acc/ARCREST/JobControllerPluginREST.cpp src/hed/acc/ARCREST/JobControllerPluginREST.h src/hed/acc/ARCREST/JobListRetrieverPluginREST.cpp src/hed/acc/ARCREST/JobListRetrieverPluginREST.h src/hed/acc/ARCREST/SubmitterPluginREST.cpp src/hed/acc/ARCREST/SubmitterPluginREST.h src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.h src/hed/acc/Broker/BenchmarkBrokerPlugin.cpp src/hed/acc/Broker/BenchmarkBrokerPlugin.h src/hed/acc/Broker/DataBrokerPlugin.cpp src/hed/acc/Broker/DataBrokerPlugin.h src/hed/acc/Broker/DescriptorsBroker.cpp src/hed/acc/Broker/FastestQueueBrokerPlugin.cpp src/hed/acc/Broker/FastestQueueBrokerPlugin.h src/hed/acc/Broker/NullBrokerPlugin.h src/hed/acc/Broker/RandomBrokerPlugin.h src/hed/acc/Broker/test/BenchmarkBrokerTest.cpp src/hed/acc/EMIES/DescriptorsEMIES.cpp src/hed/acc/EMIES/EMIESClient.cpp src/hed/acc/EMIES/EMIESClient.h src/hed/acc/EMIES/JobControllerPluginEMIES.cpp src/hed/acc/EMIES/JobControllerPluginEMIES.h src/hed/acc/EMIES/JobListRetrieverPluginEMIES.cpp src/hed/acc/EMIES/JobListRetrieverPluginEMIES.h src/hed/acc/EMIES/JobStateEMIES.cpp src/hed/acc/EMIES/JobStateEMIES.h src/hed/acc/EMIES/SubmitterPluginEMIES.cpp src/hed/acc/EMIES/SubmitterPluginEMIES.h src/hed/acc/EMIES/TargetInformationRetrieverPluginEMIES.cpp src/hed/acc/EMIES/TargetInformationRetrieverPluginEMIES.h src/hed/acc/EMIES/TestEMIESClient.cpp src/hed/acc/GRIDFTPJOB/DescriptorsGRIDFTPJOB.cpp src/hed/acc/GRIDFTPJOB/FTPControl.cpp src/hed/acc/GRIDFTPJOB/FTPControl.h src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.h src/hed/acc/GRIDFTPJOB/JobStateGRIDFTPJOB.cpp src/hed/acc/GRIDFTPJOB/JobStateGRIDFTPJOB.h src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.h src/hed/acc/JobDescriptionParser/ADLParser.cpp src/hed/acc/JobDescriptionParser/ADLParser.h src/hed/acc/JobDescriptionParser/DescriptorsJobDescriptionParser.cpp src/hed/acc/JobDescriptionParser/RSLParser.cpp src/hed/acc/JobDescriptionParser/RSLParser.h src/hed/acc/JobDescriptionParser/XMLNodeRecover.cpp src/hed/acc/JobDescriptionParser/XMLNodeRecover.h src/hed/acc/JobDescriptionParser/XRSLParser.cpp src/hed/acc/JobDescriptionParser/XRSLParser.h src/hed/acc/JobDescriptionParser/test/ADLParserTest.cpp src/hed/acc/JobDescriptionParser/test/XRSLParserTest.cpp src/hed/acc/LDAP/DescriptorsLDAP.cpp src/hed/acc/LDAP/Extractor.h src/hed/acc/LDAP/JobListRetrieverPluginLDAPGLUE2.cpp src/hed/acc/LDAP/JobListRetrieverPluginLDAPGLUE2.h src/hed/acc/LDAP/JobListRetrieverPluginLDAPNG.cpp src/hed/acc/LDAP/JobListRetrieverPluginLDAPNG.h src/hed/acc/LDAP/ServiceEndpointRetrieverPluginEGIIS.cpp src/hed/acc/LDAP/ServiceEndpointRetrieverPluginEGIIS.h src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPGLUE2.cpp src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPGLUE2.h src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPNG.cpp src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPNG.h src/hed/acc/PythonBroker/ACIXBroker.py src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp src/hed/acc/PythonBroker/PythonBrokerPlugin.h src/hed/acc/PythonBroker/SampleBroker.py src/hed/acc/TEST/BrokerPluginTestACC.h src/hed/acc/TEST/JobControllerPluginTestACC.cpp src/hed/acc/TEST/JobControllerPluginTestACC.h src/hed/acc/TEST/JobDescriptionParserPluginTestACC.cpp src/hed/acc/TEST/JobDescriptionParserPluginTestACC.h src/hed/acc/TEST/JobListRetrieverPluginTEST.cpp src/hed/acc/TEST/JobListRetrieverPluginTEST.h src/hed/acc/TEST/ServiceEndpointRetrieverPluginTEST.cpp src/hed/acc/TEST/ServiceEndpointRetrieverPluginTEST.h src/hed/acc/TEST/SubmitterPluginTestACC.cpp src/hed/acc/TEST/SubmitterPluginTestACC.h src/hed/acc/TEST/TargetInformationRetrieverPluginTEST.cpp src/hed/acc/TEST/TargetInformationRetrieverPluginTEST.h src/hed/acc/TEST/TestACCPluginDescriptors.cpp src/hed/daemon/options.cpp src/hed/daemon/options.h src/hed/daemon/unix/daemon.cpp src/hed/daemon/unix/daemon.h src/hed/daemon/unix/main_unix.cpp src/hed/dmc/acix/DataPointACIX.cpp src/hed/dmc/acix/DataPointACIX.h src/hed/dmc/file/DataPointFile.cpp src/hed/dmc/file/DataPointFile.h src/hed/dmc/gfal/DataPointGFAL.cpp src/hed/dmc/gfal/DataPointGFAL.h src/hed/dmc/gfal/DataPointGFALDelegate.cpp src/hed/dmc/gfal/DataPointGFALDelegate.h src/hed/dmc/gfal/GFALTransfer3rdParty.cpp src/hed/dmc/gfal/GFALTransfer3rdParty.h src/hed/dmc/gfal/GFALUtils.cpp src/hed/dmc/gfal/GFALUtils.h src/hed/dmc/gridftp/DataPointGridFTP.cpp src/hed/dmc/gridftp/DataPointGridFTP.h src/hed/dmc/gridftp/DataPointGridFTPDelegate.cpp src/hed/dmc/gridftp/DataPointGridFTPDelegate.h src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp src/hed/dmc/gridftp/DataPointGridFTPHelper.h src/hed/dmc/gridftp/Lister.cpp src/hed/dmc/gridftp/Lister.h src/hed/dmc/http/DataPointHTTP.cpp src/hed/dmc/http/DataPointHTTP.h src/hed/dmc/http/StreamBuffer.cpp src/hed/dmc/http/StreamBuffer.h src/hed/dmc/ldap/DataPointLDAP.cpp src/hed/dmc/ldap/DataPointLDAP.h src/hed/dmc/ldap/LDAPQuery.cpp src/hed/dmc/ldap/LDAPQuery.h src/hed/dmc/mock/DataPointMock.cpp src/hed/dmc/mock/DataPointMock.h src/hed/dmc/rucio/DataPointRucio.cpp src/hed/dmc/rucio/DataPointRucio.h src/hed/dmc/s3/DataPointS3.cpp src/hed/dmc/s3/DataPointS3.h src/hed/dmc/srm/DataPointSRM.cpp src/hed/dmc/srm/DataPointSRM.h src/hed/dmc/srm/srmclient/SRM1Client.cpp src/hed/dmc/srm/srmclient/SRM1Client.h src/hed/dmc/srm/srmclient/SRM22Client.cpp src/hed/dmc/srm/srmclient/SRM22Client.h src/hed/dmc/srm/srmclient/SRMClient.cpp src/hed/dmc/srm/srmclient/SRMClient.h src/hed/dmc/srm/srmclient/SRMClientRequest.h src/hed/dmc/srm/srmclient/SRMInfo.cpp src/hed/dmc/srm/srmclient/SRMInfo.h src/hed/dmc/srm/srmclient/SRMURL.cpp src/hed/dmc/srm/srmclient/SRMURL.h src/hed/dmc/xrootd/DataPointXrootd.cpp src/hed/dmc/xrootd/DataPointXrootd.h src/hed/dmc/xrootd/DataPointXrootdDelegate.cpp src/hed/dmc/xrootd/DataPointXrootdDelegate.h src/hed/identitymap/ArgusPDPClient.cpp src/hed/identitymap/ArgusPDPClient.h src/hed/identitymap/ArgusPEPClient.cpp src/hed/identitymap/ArgusPEPClient.h src/hed/identitymap/ArgusXACMLConstant.h src/hed/identitymap/IdentityMap.cpp src/hed/identitymap/IdentityMap.h src/hed/identitymap/SimpleMap.cpp src/hed/identitymap/SimpleMap.h src/hed/libs/common/ArcConfig.cpp src/hed/libs/common/ArcConfig.h src/hed/libs/common/ArcConfigFile.cpp src/hed/libs/common/ArcConfigFile.h src/hed/libs/common/ArcConfigIni.cpp src/hed/libs/common/ArcConfigIni.h src/hed/libs/common/ArcLocation.cpp src/hed/libs/common/ArcLocation.h src/hed/libs/common/ArcRegex.cpp src/hed/libs/common/ArcRegex.h src/hed/libs/common/ArcVersion.cpp src/hed/libs/common/ArcVersion.h src/hed/libs/common/Base64.cpp src/hed/libs/common/Base64.h src/hed/libs/common/CheckSum.cpp src/hed/libs/common/CheckSum.h src/hed/libs/common/Counter.cpp src/hed/libs/common/Counter.h src/hed/libs/common/DBInterface.h src/hed/libs/common/DateTime.cpp src/hed/libs/common/DateTime.h src/hed/libs/common/FileAccess.cpp src/hed/libs/common/FileAccess.h src/hed/libs/common/FileLock.cpp src/hed/libs/common/FileLock.h src/hed/libs/common/FileUtils.cpp src/hed/libs/common/FileUtils.h src/hed/libs/common/GUID.cpp src/hed/libs/common/GUID.h src/hed/libs/common/HostnameResolver.cpp src/hed/libs/common/HostnameResolver.h src/hed/libs/common/IString.cpp src/hed/libs/common/IString.h src/hed/libs/common/IniConfig.cpp src/hed/libs/common/IniConfig.h src/hed/libs/common/IntraProcessCounter.cpp src/hed/libs/common/IntraProcessCounter.h src/hed/libs/common/JSON.cpp src/hed/libs/common/JSON.h src/hed/libs/common/JobPerfLog.cpp src/hed/libs/common/JobPerfLog.h src/hed/libs/common/Logger.cpp src/hed/libs/common/Logger.h src/hed/libs/common/MysqlWrapper.cpp src/hed/libs/common/MysqlWrapper.h src/hed/libs/common/OptionParser.cpp src/hed/libs/common/OptionParser.h src/hed/libs/common/Profile.cpp src/hed/libs/common/Profile.h src/hed/libs/common/Run.h src/hed/libs/common/Run_unix.cpp src/hed/libs/common/StringConv.cpp src/hed/libs/common/StringConv.h src/hed/libs/common/Thread.cpp src/hed/libs/common/Thread.h src/hed/libs/common/URL.cpp src/hed/libs/common/URL.h src/hed/libs/common/User.cpp src/hed/libs/common/User.h src/hed/libs/common/UserConfig.cpp src/hed/libs/common/UserConfig.h src/hed/libs/common/Utils.cpp src/hed/libs/common/Utils.h src/hed/libs/common/Watchdog.cpp src/hed/libs/common/Watchdog.h src/hed/libs/common/XMLNode.cpp src/hed/libs/common/XMLNode.h src/hed/libs/common/file_access.cpp src/hed/libs/common/file_access.h src/hed/libs/common/hostname_resolver.cpp src/hed/libs/common/hostname_resolver.h src/hed/libs/common/test/ArcRegexTest.cpp src/hed/libs/common/test/Base64Test.cpp src/hed/libs/common/test/CheckSumTest.cpp src/hed/libs/common/test/EnvTest.cpp src/hed/libs/common/test/FileAccessTest.cpp src/hed/libs/common/test/FileLockTest.cpp src/hed/libs/common/test/FileUtilsTest.cpp src/hed/libs/common/test/LoggerTest.cpp src/hed/libs/common/test/MysqlWrapperTest.cpp src/hed/libs/common/test/ProfileTest.cpp src/hed/libs/common/test/RunTest.cpp src/hed/libs/common/test/StringConvTest.cpp src/hed/libs/common/test/ThreadTest.cpp src/hed/libs/common/test/URLTest.cpp src/hed/libs/common/test/UserConfigTest.cpp src/hed/libs/common/test/UserTest.cpp src/hed/libs/common/test/WatchdogTest.cpp src/hed/libs/common/test/XMLNodeTest.cpp src/hed/libs/communication/ClientInterface.cpp src/hed/libs/communication/ClientInterface.h src/hed/libs/communication/ClientSAML2SSO.cpp src/hed/libs/communication/ClientSAML2SSO.h src/hed/libs/communication/ClientX509Delegation.cpp src/hed/libs/communication/ClientX509Delegation.h src/hed/libs/communication/test/SimulatorClasses.cpp src/hed/libs/communication/test/SimulatorClasses.h src/hed/libs/compute/Broker.cpp src/hed/libs/compute/Broker.h src/hed/libs/compute/BrokerPlugin.cpp src/hed/libs/compute/BrokerPlugin.h src/hed/libs/compute/ComputingServiceRetriever.cpp src/hed/libs/compute/ComputingServiceRetriever.h src/hed/libs/compute/Endpoint.cpp src/hed/libs/compute/Endpoint.h src/hed/libs/compute/EndpointQueryingStatus.cpp src/hed/libs/compute/EndpointQueryingStatus.h src/hed/libs/compute/EntityRetriever.cpp src/hed/libs/compute/EntityRetriever.h src/hed/libs/compute/EntityRetrieverPlugin.cpp src/hed/libs/compute/EntityRetrieverPlugin.h src/hed/libs/compute/ExecutionTarget.cpp src/hed/libs/compute/ExecutionTarget.h src/hed/libs/compute/GLUE2.cpp src/hed/libs/compute/GLUE2.h src/hed/libs/compute/GLUE2Entity.h src/hed/libs/compute/Job.cpp src/hed/libs/compute/Job.h src/hed/libs/compute/JobControllerPlugin.cpp src/hed/libs/compute/JobControllerPlugin.h src/hed/libs/compute/JobDescription.cpp src/hed/libs/compute/JobDescription.h src/hed/libs/compute/JobDescriptionParserPlugin.cpp src/hed/libs/compute/JobDescriptionParserPlugin.h src/hed/libs/compute/JobInformationStorage.h src/hed/libs/compute/JobInformationStorageBDB.cpp src/hed/libs/compute/JobInformationStorageBDB.h src/hed/libs/compute/JobInformationStorageDescriptor.cpp src/hed/libs/compute/JobInformationStorageSQLite.cpp src/hed/libs/compute/JobInformationStorageSQLite.h src/hed/libs/compute/JobInformationStorageXML.cpp src/hed/libs/compute/JobInformationStorageXML.h src/hed/libs/compute/JobState.cpp src/hed/libs/compute/JobState.h src/hed/libs/compute/JobSupervisor.cpp src/hed/libs/compute/JobSupervisor.h src/hed/libs/compute/Software.cpp src/hed/libs/compute/Software.h src/hed/libs/compute/SubmissionStatus.h src/hed/libs/compute/Submitter.cpp src/hed/libs/compute/Submitter.h src/hed/libs/compute/SubmitterPlugin.cpp src/hed/libs/compute/SubmitterPlugin.h src/hed/libs/compute/TestACCControl.cpp src/hed/libs/compute/TestACCControl.h src/hed/libs/compute/WSCommonPlugin.h src/hed/libs/compute/examples/basic_job_submission.cpp src/hed/libs/compute/examples/job_selector.cpp src/hed/libs/compute/test/BrokerTest.cpp src/hed/libs/compute/test/ComputingServiceUniqTest.cpp src/hed/libs/compute/test/ExecutionTargetTest.cpp src/hed/libs/compute/test/JobControllerPluginTest.cpp src/hed/libs/compute/test/JobDescriptionParserPluginTest.cpp src/hed/libs/compute/test/JobDescriptionTest.cpp src/hed/libs/compute/test/JobInformationStorageTest.cpp src/hed/libs/compute/test/JobListRetrieverTest.cpp src/hed/libs/compute/test/JobStateTest.cpp src/hed/libs/compute/test/JobSupervisorTest.cpp src/hed/libs/compute/test/JobTest.cpp src/hed/libs/compute/test/ServiceEndpointRetrieverTest.cpp src/hed/libs/compute/test/SoftwareTest.cpp src/hed/libs/compute/test/SubmissionStatusTest.cpp src/hed/libs/compute/test/SubmitterPluginTest.cpp src/hed/libs/compute/test/SubmitterTest.cpp src/hed/libs/compute/test/TargetInformationRetrieverTest.cpp src/hed/libs/compute/test_JobInformationStorage.cpp src/hed/libs/compute/test_jobdescription.cpp src/hed/libs/credential/ARCProxyUtil.cpp src/hed/libs/credential/ARCProxyUtil.h src/hed/libs/credential/CertUtil.cpp src/hed/libs/credential/CertUtil.h src/hed/libs/credential/Credential.cpp src/hed/libs/credential/Credential.h src/hed/libs/credential/NSSUtil.cpp src/hed/libs/credential/NSSUtil.h src/hed/libs/credential/PasswordSource.cpp src/hed/libs/credential/PasswordSource.h src/hed/libs/credential/Proxycertinfo.cpp src/hed/libs/credential/Proxycertinfo.h src/hed/libs/credential/VOMSAttribute.cpp src/hed/libs/credential/VOMSAttribute.h src/hed/libs/credential/VOMSConfig.cpp src/hed/libs/credential/VOMSConfig.h src/hed/libs/credential/VOMSUtil.cpp src/hed/libs/credential/VOMSUtil.h src/hed/libs/credential/listfunc.cpp src/hed/libs/credential/listfunc.h src/hed/libs/credential/nssprivkeyinfocodec.cpp src/hed/libs/credential/nssprivkeyinfocodec.h src/hed/libs/credential/test/CredentialTest.cpp src/hed/libs/credential/test/VOMSUtilTest.cpp src/hed/libs/credential/test/listfuncTest.cpp src/hed/libs/credential/testcertinfo.cpp src/hed/libs/credential/testeec.cpp src/hed/libs/credential/testproxy.cpp src/hed/libs/credential/testproxy2proxy.cpp src/hed/libs/credential/testvoms.cpp src/hed/libs/credentialmod/cred.cpp src/hed/libs/credentialstore/ClientVOMS.cpp src/hed/libs/credentialstore/ClientVOMS.h src/hed/libs/credentialstore/ClientVOMSRESTful.cpp src/hed/libs/credentialstore/ClientVOMSRESTful.h src/hed/libs/credentialstore/CredentialStore.cpp src/hed/libs/credentialstore/CredentialStore.h src/hed/libs/crypto/OpenSSL.cpp src/hed/libs/crypto/OpenSSL.h src/hed/libs/cryptomod/crypto.cpp src/hed/libs/data/DataBuffer.cpp src/hed/libs/data/DataBuffer.h src/hed/libs/data/DataCallback.h src/hed/libs/data/DataExternalComm.cpp src/hed/libs/data/DataExternalComm.h src/hed/libs/data/DataExternalHelper.cpp src/hed/libs/data/DataExternalHelper.h src/hed/libs/data/DataHandle.h src/hed/libs/data/DataMover.cpp src/hed/libs/data/DataMover.h src/hed/libs/data/DataPoint.cpp src/hed/libs/data/DataPoint.h src/hed/libs/data/DataPointDelegate.cpp src/hed/libs/data/DataPointDelegate.h src/hed/libs/data/DataPointDirect.cpp src/hed/libs/data/DataPointDirect.h src/hed/libs/data/DataPointIndex.cpp src/hed/libs/data/DataPointIndex.h src/hed/libs/data/DataSpeed.cpp src/hed/libs/data/DataSpeed.h src/hed/libs/data/DataStatus.cpp src/hed/libs/data/DataStatus.h src/hed/libs/data/FileCache.cpp src/hed/libs/data/FileCache.h src/hed/libs/data/FileCacheHash.cpp src/hed/libs/data/FileCacheHash.h src/hed/libs/data/FileInfo.h src/hed/libs/data/URLMap.cpp src/hed/libs/data/URLMap.h src/hed/libs/data/examples/DataPointMyProtocol.cpp src/hed/libs/data/examples/partial_copy.cpp src/hed/libs/data/examples/simple_copy.cpp src/hed/libs/data/test/FileCacheTest.cpp src/hed/libs/delegation/DelegationInterface.cpp src/hed/libs/delegation/DelegationInterface.h src/hed/libs/delegation/test/DelegationInterfaceTest.cpp src/hed/libs/deprecated.h src/hed/libs/globusutils/GSSCredential.cpp src/hed/libs/globusutils/GSSCredential.h src/hed/libs/globusutils/GlobusErrorUtils.cpp src/hed/libs/globusutils/GlobusErrorUtils.h src/hed/libs/globusutils/GlobusWorkarounds.cpp src/hed/libs/globusutils/GlobusWorkarounds.h src/hed/libs/infosys/InformationInterface.cpp src/hed/libs/infosys/InformationInterface.h src/hed/libs/infosys/test/InformationInterfaceTest.cpp src/hed/libs/loader/FinderLoader.cpp src/hed/libs/loader/FinderLoader.h src/hed/libs/loader/Loader.cpp src/hed/libs/loader/Loader.h src/hed/libs/loader/ModuleManager.cpp src/hed/libs/loader/ModuleManager.h src/hed/libs/loader/Plugin.cpp src/hed/libs/loader/Plugin.h src/hed/libs/loader/test/PluginTest.cpp src/hed/libs/loader/test/TestPlugin.cpp src/hed/libs/message/MCC.cpp src/hed/libs/message/MCC.h src/hed/libs/message/MCCLoader.cpp src/hed/libs/message/MCCLoader.h src/hed/libs/message/MCC_Status.cpp src/hed/libs/message/MCC_Status.h src/hed/libs/message/Message.cpp src/hed/libs/message/Message.h src/hed/libs/message/MessageAttributes.cpp src/hed/libs/message/MessageAttributes.h src/hed/libs/message/MessageAuth.cpp src/hed/libs/message/MessageAuth.h src/hed/libs/message/PayloadRaw.cpp src/hed/libs/message/PayloadRaw.h src/hed/libs/message/PayloadSOAP.cpp src/hed/libs/message/PayloadSOAP.h src/hed/libs/message/PayloadStream.cpp src/hed/libs/message/PayloadStream.h src/hed/libs/message/Plexer.cpp src/hed/libs/message/Plexer.h src/hed/libs/message/SOAPEnvelope.cpp src/hed/libs/message/SOAPEnvelope.h src/hed/libs/message/SOAPMessage.cpp src/hed/libs/message/SOAPMessage.h src/hed/libs/message/SecAttr.cpp src/hed/libs/message/SecAttr.h src/hed/libs/message/SecHandler.cpp src/hed/libs/message/SecHandler.h src/hed/libs/message/Service.cpp src/hed/libs/message/Service.h src/hed/libs/message/secattr/CIStringValue.cpp src/hed/libs/message/secattr/CIStringValue.h src/hed/libs/message/secattr/SecAttrValue.cpp src/hed/libs/message/secattr/SecAttrValue.h src/hed/libs/message/test/ChainTest.cpp src/hed/libs/message/test/TestMCC.cpp src/hed/libs/message/test/TestService.cpp src/hed/libs/otokens/jwse.cpp src/hed/libs/otokens/jwse_ecdsa.cpp src/hed/libs/otokens/jwse_hmac.cpp src/hed/libs/otokens/jwse_keys.cpp src/hed/libs/otokens/jwse_private.h src/hed/libs/otokens/jwse_rsassapkcs1.cpp src/hed/libs/otokens/jwse_rsassapss.cpp src/hed/libs/otokens/openid_metadata.cpp src/hed/libs/otokens/openid_metadata.h src/hed/libs/otokens/otokens.h src/hed/libs/security/ArcPDP/EvaluationCtx.cpp src/hed/libs/security/ArcPDP/EvaluationCtx.h src/hed/libs/security/ArcPDP/Evaluator.cpp src/hed/libs/security/ArcPDP/Evaluator.h src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp src/hed/libs/security/ArcPDP/EvaluatorLoader.h src/hed/libs/security/ArcPDP/PolicyParser.cpp src/hed/libs/security/ArcPDP/PolicyParser.h src/hed/libs/security/ArcPDP/PolicyStore.cpp src/hed/libs/security/ArcPDP/PolicyStore.h src/hed/libs/security/ArcPDP/Request.h src/hed/libs/security/ArcPDP/RequestItem.h src/hed/libs/security/ArcPDP/Response.h src/hed/libs/security/ArcPDP/Result.h src/hed/libs/security/ArcPDP/Source.cpp src/hed/libs/security/ArcPDP/Source.h src/hed/libs/security/ArcPDP/alg/AlgFactory.h src/hed/libs/security/ArcPDP/alg/CombiningAlg.h src/hed/libs/security/ArcPDP/alg/DenyOverridesAlg.cpp src/hed/libs/security/ArcPDP/alg/DenyOverridesAlg.h src/hed/libs/security/ArcPDP/alg/OrderedAlg.cpp src/hed/libs/security/ArcPDP/alg/OrderedAlg.h src/hed/libs/security/ArcPDP/alg/PermitOverridesAlg.cpp src/hed/libs/security/ArcPDP/alg/PermitOverridesAlg.h src/hed/libs/security/ArcPDP/attr/AnyURIAttribute.cpp src/hed/libs/security/ArcPDP/attr/AnyURIAttribute.h src/hed/libs/security/ArcPDP/attr/AttributeFactory.h src/hed/libs/security/ArcPDP/attr/AttributeProxy.h src/hed/libs/security/ArcPDP/attr/AttributeValue.h src/hed/libs/security/ArcPDP/attr/BooleanAttribute.cpp src/hed/libs/security/ArcPDP/attr/BooleanAttribute.h src/hed/libs/security/ArcPDP/attr/DateTimeAttribute.cpp src/hed/libs/security/ArcPDP/attr/DateTimeAttribute.h src/hed/libs/security/ArcPDP/attr/GenericAttribute.cpp src/hed/libs/security/ArcPDP/attr/GenericAttribute.h src/hed/libs/security/ArcPDP/attr/RequestAttribute.cpp src/hed/libs/security/ArcPDP/attr/RequestAttribute.h src/hed/libs/security/ArcPDP/attr/StringAttribute.cpp src/hed/libs/security/ArcPDP/attr/StringAttribute.h src/hed/libs/security/ArcPDP/attr/X500NameAttribute.cpp src/hed/libs/security/ArcPDP/attr/X500NameAttribute.h src/hed/libs/security/ArcPDP/fn/EqualFunction.cpp src/hed/libs/security/ArcPDP/fn/EqualFunction.h src/hed/libs/security/ArcPDP/fn/FnFactory.h src/hed/libs/security/ArcPDP/fn/Function.h src/hed/libs/security/ArcPDP/fn/InRangeFunction.cpp src/hed/libs/security/ArcPDP/fn/InRangeFunction.h src/hed/libs/security/ArcPDP/fn/MatchFunction.cpp src/hed/libs/security/ArcPDP/fn/MatchFunction.h src/hed/libs/security/ArcPDP/policy/Policy.cpp src/hed/libs/security/ArcPDP/policy/Policy.h src/hed/libs/security/ClassLoader.cpp src/hed/libs/security/ClassLoader.h src/hed/libs/security/PDP.cpp src/hed/libs/security/PDP.h src/hed/libs/security/Security.cpp src/hed/libs/security/Security.h src/hed/libs/ws-addressing/WSA.cpp src/hed/libs/ws-addressing/WSA.h src/hed/libs/ws-addressing/test.cpp src/hed/libs/ws-security/SAMLToken.cpp src/hed/libs/ws-security/SAMLToken.h src/hed/libs/ws-security/UsernameToken.cpp src/hed/libs/ws-security/UsernameToken.h src/hed/libs/ws-security/X509Token.cpp src/hed/libs/ws-security/X509Token.h src/hed/libs/ws-security/test/SAMLTokenTest.cpp src/hed/libs/ws-security/test/UsernameTokenTest.cpp src/hed/libs/ws-security/test/X509TokenTest.cpp src/hed/libs/ws-security/test_samltoken.cpp src/hed/libs/ws-security/test_usernametoken.cpp src/hed/libs/ws-security/test_x509token.cpp src/hed/libs/xmlsec/XMLSecNode.cpp src/hed/libs/xmlsec/XMLSecNode.h src/hed/libs/xmlsec/XmlSecUtils.cpp src/hed/libs/xmlsec/XmlSecUtils.h src/hed/libs/xmlsec/saml_util.cpp src/hed/libs/xmlsec/saml_util.h src/hed/libs/xmlsec/test_xmlsecnode.cpp src/hed/mcc/http/MCCHTTP.cpp src/hed/mcc/http/MCCHTTP.h src/hed/mcc/http/PayloadHTTP.cpp src/hed/mcc/http/PayloadHTTP.h src/hed/mcc/http/http_test.cpp src/hed/mcc/http/http_test_withtls.cpp src/hed/mcc/msgvalidator/MCCMsgValidator.cpp src/hed/mcc/msgvalidator/MCCMsgValidator.h src/hed/mcc/soap/MCCSOAP.cpp src/hed/mcc/soap/MCCSOAP.h src/hed/mcc/tcp/MCCTCP.cpp src/hed/mcc/tcp/MCCTCP.h src/hed/mcc/tcp/PayloadTCPSocket.cpp src/hed/mcc/tcp/PayloadTCPSocket.h src/hed/mcc/tls/BIOGSIMCC.cpp src/hed/mcc/tls/BIOGSIMCC.h src/hed/mcc/tls/BIOMCC.cpp src/hed/mcc/tls/BIOMCC.h src/hed/mcc/tls/ConfigTLSMCC.cpp src/hed/mcc/tls/ConfigTLSMCC.h src/hed/mcc/tls/DelegationCollector.cpp src/hed/mcc/tls/DelegationCollector.h src/hed/mcc/tls/DelegationSecAttr.cpp src/hed/mcc/tls/DelegationSecAttr.h src/hed/mcc/tls/GlobusSigningPolicy.cpp src/hed/mcc/tls/GlobusSigningPolicy.h src/hed/mcc/tls/MCCTLS.cpp src/hed/mcc/tls/MCCTLS.h src/hed/mcc/tls/PayloadTLSMCC.cpp src/hed/mcc/tls/PayloadTLSMCC.h src/hed/mcc/tls/PayloadTLSStream.cpp src/hed/mcc/tls/PayloadTLSStream.h src/hed/shc/SecHandlerPlugin.cpp src/hed/shc/allowpdp/AllowPDP.cpp src/hed/shc/allowpdp/AllowPDP.h src/hed/shc/arcauthzsh/ArcAuthZ.cpp src/hed/shc/arcauthzsh/ArcAuthZ.h src/hed/shc/arcpdp/ArcAlgFactory.cpp src/hed/shc/arcpdp/ArcAlgFactory.h src/hed/shc/arcpdp/ArcAttributeFactory.cpp src/hed/shc/arcpdp/ArcAttributeFactory.h src/hed/shc/arcpdp/ArcAttributeProxy.h src/hed/shc/arcpdp/ArcEvaluationCtx.cpp src/hed/shc/arcpdp/ArcEvaluationCtx.h src/hed/shc/arcpdp/ArcEvaluator.cpp src/hed/shc/arcpdp/ArcEvaluator.h src/hed/shc/arcpdp/ArcFnFactory.cpp src/hed/shc/arcpdp/ArcFnFactory.h src/hed/shc/arcpdp/ArcPDP.cpp src/hed/shc/arcpdp/ArcPDP.h src/hed/shc/arcpdp/ArcPolicy.cpp src/hed/shc/arcpdp/ArcPolicy.h src/hed/shc/arcpdp/ArcRequest.cpp src/hed/shc/arcpdp/ArcRequest.h src/hed/shc/arcpdp/ArcRequestItem.cpp src/hed/shc/arcpdp/ArcRequestItem.h src/hed/shc/arcpdp/ArcRule.cpp src/hed/shc/arcpdp/ArcRule.h src/hed/shc/classload_test.cpp src/hed/shc/delegationpdp/DelegationPDP.cpp src/hed/shc/delegationpdp/DelegationPDP.h src/hed/shc/delegationsh/DelegationSH.cpp src/hed/shc/delegationsh/DelegationSH.h src/hed/shc/denypdp/DenyPDP.cpp src/hed/shc/denypdp/DenyPDP.h src/hed/shc/gaclpdp/GACLEvaluator.cpp src/hed/shc/gaclpdp/GACLEvaluator.h src/hed/shc/gaclpdp/GACLPDP.cpp src/hed/shc/gaclpdp/GACLPDP.h src/hed/shc/gaclpdp/GACLPolicy.cpp src/hed/shc/gaclpdp/GACLPolicy.h src/hed/shc/gaclpdp/GACLRequest.cpp src/hed/shc/gaclpdp/GACLRequest.h src/hed/shc/legacy/ConfigParser.cpp src/hed/shc/legacy/ConfigParser.h src/hed/shc/legacy/LegacyMap.cpp src/hed/shc/legacy/LegacyMap.h src/hed/shc/legacy/LegacyPDP.cpp src/hed/shc/legacy/LegacyPDP.h src/hed/shc/legacy/LegacySecAttr.cpp src/hed/shc/legacy/LegacySecAttr.h src/hed/shc/legacy/LegacySecHandler.cpp src/hed/shc/legacy/LegacySecHandler.h src/hed/shc/legacy/arc_lcas.cpp src/hed/shc/legacy/arc_lcmaps.cpp src/hed/shc/legacy/auth.cpp src/hed/shc/legacy/auth.h src/hed/shc/legacy/auth_file.cpp src/hed/shc/legacy/auth_otokens.cpp src/hed/shc/legacy/auth_plugin.cpp src/hed/shc/legacy/auth_subject.cpp src/hed/shc/legacy/auth_voms.cpp src/hed/shc/legacy/cert_util.cpp src/hed/shc/legacy/cert_util.h src/hed/shc/legacy/plugin.cpp src/hed/shc/legacy/simplemap.cpp src/hed/shc/legacy/simplemap.h src/hed/shc/legacy/unixmap.cpp src/hed/shc/legacy/unixmap.h src/hed/shc/legacy/unixmap_lcmaps.cpp src/hed/shc/otokens/OTokensSH.cpp src/hed/shc/otokens/OTokensSH.h src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.h src/hed/shc/saml2sso_assertionconsumersh/SAML2SSO_AssertionConsumerSH.cpp src/hed/shc/saml2sso_assertionconsumersh/SAML2SSO_AssertionConsumerSH.h src/hed/shc/samltokensh/SAMLTokenSH.cpp src/hed/shc/samltokensh/SAMLTokenSH.h src/hed/shc/simplelistpdp/SimpleListPDP.cpp src/hed/shc/simplelistpdp/SimpleListPDP.h src/hed/shc/test.cpp src/hed/shc/testinterface_arc.cpp src/hed/shc/testinterface_xacml.cpp src/hed/shc/usernametokensh/UsernameTokenSH.cpp src/hed/shc/usernametokensh/UsernameTokenSH.h src/hed/shc/x509tokensh/X509TokenSH.cpp src/hed/shc/x509tokensh/X509TokenSH.h src/hed/shc/xacmlpdp/AttributeDesignator.cpp src/hed/shc/xacmlpdp/AttributeDesignator.h src/hed/shc/xacmlpdp/AttributeSelector.cpp src/hed/shc/xacmlpdp/AttributeSelector.h src/hed/shc/xacmlpdp/XACMLAlgFactory.cpp src/hed/shc/xacmlpdp/XACMLAlgFactory.h src/hed/shc/xacmlpdp/XACMLApply.cpp src/hed/shc/xacmlpdp/XACMLApply.h src/hed/shc/xacmlpdp/XACMLAttributeFactory.cpp src/hed/shc/xacmlpdp/XACMLAttributeFactory.h src/hed/shc/xacmlpdp/XACMLAttributeProxy.h src/hed/shc/xacmlpdp/XACMLCondition.cpp src/hed/shc/xacmlpdp/XACMLCondition.h src/hed/shc/xacmlpdp/XACMLEvaluationCtx.cpp src/hed/shc/xacmlpdp/XACMLEvaluationCtx.h src/hed/shc/xacmlpdp/XACMLEvaluator.cpp src/hed/shc/xacmlpdp/XACMLEvaluator.h src/hed/shc/xacmlpdp/XACMLFnFactory.cpp src/hed/shc/xacmlpdp/XACMLFnFactory.h src/hed/shc/xacmlpdp/XACMLPDP.cpp src/hed/shc/xacmlpdp/XACMLPDP.h src/hed/shc/xacmlpdp/XACMLPolicy.cpp src/hed/shc/xacmlpdp/XACMLPolicy.h src/hed/shc/xacmlpdp/XACMLRequest.cpp src/hed/shc/xacmlpdp/XACMLRequest.h src/hed/shc/xacmlpdp/XACMLRule.cpp src/hed/shc/xacmlpdp/XACMLRule.h src/hed/shc/xacmlpdp/XACMLTarget.cpp src/hed/shc/xacmlpdp/XACMLTarget.h src/libs/data-staging/DTR.cpp src/libs/data-staging/DTR.h src/libs/data-staging/DTRList.cpp src/libs/data-staging/DTRList.h src/libs/data-staging/DTRStatus.cpp src/libs/data-staging/DTRStatus.h src/libs/data-staging/DataDelivery.cpp src/libs/data-staging/DataDelivery.h src/libs/data-staging/DataDeliveryComm.cpp src/libs/data-staging/DataDeliveryComm.h src/libs/data-staging/DataDeliveryLocalComm.cpp src/libs/data-staging/DataDeliveryLocalComm.h src/libs/data-staging/DataDeliveryRemoteComm.cpp src/libs/data-staging/DataDeliveryRemoteComm.h src/libs/data-staging/DataStagingDelivery.cpp src/libs/data-staging/Processor.cpp src/libs/data-staging/Processor.h src/libs/data-staging/Scheduler.cpp src/libs/data-staging/Scheduler.h src/libs/data-staging/TransferShares.cpp src/libs/data-staging/TransferShares.h src/libs/data-staging/examples/Generator.cpp src/libs/data-staging/examples/Generator.h src/libs/data-staging/examples/generator-main.cpp src/libs/data-staging/test/DTRTest.cpp src/libs/data-staging/test/DeliveryTest.cpp src/libs/data-staging/test/ProcessorTest.cpp src/services/a-rex/FileChunks.cpp src/services/a-rex/FileChunks.h src/services/a-rex/PayloadFile.cpp src/services/a-rex/PayloadFile.h src/services/a-rex/SQLhelpers.h src/services/a-rex/arex.cpp src/services/a-rex/arex.h src/services/a-rex/cachecheck.cpp src/services/a-rex/change_activity_status.cpp src/services/a-rex/create_activity.cpp src/services/a-rex/delegation/DelegationStore.cpp src/services/a-rex/delegation/DelegationStore.h src/services/a-rex/delegation/DelegationStores.cpp src/services/a-rex/delegation/DelegationStores.h src/services/a-rex/delegation/FileRecord.cpp src/services/a-rex/delegation/FileRecord.h src/services/a-rex/delegation/FileRecordBDB.cpp src/services/a-rex/delegation/FileRecordBDB.h src/services/a-rex/delegation/FileRecordSQLite.cpp src/services/a-rex/delegation/FileRecordSQLite.h src/services/a-rex/delegation/uid.cpp src/services/a-rex/delegation/uid.h src/services/a-rex/faults.cpp src/services/a-rex/get.cpp src/services/a-rex/get_activity_statuses.cpp src/services/a-rex/grid-manager/GridManager.cpp src/services/a-rex/grid-manager/GridManager.h src/services/a-rex/grid-manager/accounting/AAR.cpp src/services/a-rex/grid-manager/accounting/AAR.h src/services/a-rex/grid-manager/accounting/AccountingDB.h src/services/a-rex/grid-manager/accounting/AccountingDBAsync.cpp src/services/a-rex/grid-manager/accounting/AccountingDBAsync.h src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.h src/services/a-rex/grid-manager/accounting/test_adb.cpp src/services/a-rex/grid-manager/arc_blahp_logger.cpp src/services/a-rex/grid-manager/conf/CacheConfig.cpp src/services/a-rex/grid-manager/conf/CacheConfig.h src/services/a-rex/grid-manager/conf/CoreConfig.cpp src/services/a-rex/grid-manager/conf/CoreConfig.h src/services/a-rex/grid-manager/conf/GMConfig.cpp src/services/a-rex/grid-manager/conf/GMConfig.h src/services/a-rex/grid-manager/conf/StagingConfig.cpp src/services/a-rex/grid-manager/conf/StagingConfig.h src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp src/services/a-rex/grid-manager/conf/UrlMapConfig.h src/services/a-rex/grid-manager/files/ControlFileContent.cpp src/services/a-rex/grid-manager/files/ControlFileContent.h src/services/a-rex/grid-manager/files/ControlFileHandling.cpp src/services/a-rex/grid-manager/files/ControlFileHandling.h src/services/a-rex/grid-manager/gm_delegations_converter.cpp src/services/a-rex/grid-manager/gm_jobs.cpp src/services/a-rex/grid-manager/gm_kick.cpp src/services/a-rex/grid-manager/inputcheck.cpp src/services/a-rex/grid-manager/jobplugin/init.cpp src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp src/services/a-rex/grid-manager/jobplugin/jobplugin.h src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp src/services/a-rex/grid-manager/jobs/CommFIFO.cpp src/services/a-rex/grid-manager/jobs/CommFIFO.h src/services/a-rex/grid-manager/jobs/ContinuationPlugins.cpp src/services/a-rex/grid-manager/jobs/ContinuationPlugins.h src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp src/services/a-rex/grid-manager/jobs/DTRGenerator.h src/services/a-rex/grid-manager/jobs/GMJob.cpp src/services/a-rex/grid-manager/jobs/GMJob.h src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.h src/services/a-rex/grid-manager/jobs/JobsList.cpp src/services/a-rex/grid-manager/jobs/JobsList.h src/services/a-rex/grid-manager/log/HeartBeatMetrics.cpp src/services/a-rex/grid-manager/log/HeartBeatMetrics.h src/services/a-rex/grid-manager/log/JobLog.cpp src/services/a-rex/grid-manager/log/JobLog.h src/services/a-rex/grid-manager/log/JobsMetrics.cpp src/services/a-rex/grid-manager/log/JobsMetrics.h src/services/a-rex/grid-manager/log/SpaceMetrics.cpp src/services/a-rex/grid-manager/log/SpaceMetrics.h src/services/a-rex/grid-manager/mail/send_mail.cpp src/services/a-rex/grid-manager/mail/send_mail.h src/services/a-rex/grid-manager/misc/proxy.cpp src/services/a-rex/grid-manager/misc/proxy.h src/services/a-rex/grid-manager/run/RunParallel.cpp src/services/a-rex/grid-manager/run/RunParallel.h src/services/a-rex/grid-manager/run/RunRedirected.cpp src/services/a-rex/grid-manager/run/RunRedirected.h src/services/a-rex/grid-manager/test_write_grami_file.cpp src/services/a-rex/information_collector.cpp src/services/a-rex/internaljobplugin/DescriptorsINTERNAL.cpp src/services/a-rex/internaljobplugin/INTERNALClient.cpp src/services/a-rex/internaljobplugin/INTERNALClient.h src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.h src/services/a-rex/internaljobplugin/JobListRetrieverPluginINTERNAL.cpp src/services/a-rex/internaljobplugin/JobListRetrieverPluginINTERNAL.h src/services/a-rex/internaljobplugin/JobStateINTERNAL.cpp src/services/a-rex/internaljobplugin/JobStateINTERNAL.h src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.h src/services/a-rex/internaljobplugin/TargetInformationRetrieverPluginINTERNAL.cpp src/services/a-rex/internaljobplugin/TargetInformationRetrieverPluginINTERNAL.h src/services/a-rex/job.cpp src/services/a-rex/job.h src/services/a-rex/lrms/arc/__init__.py src/services/a-rex/lrms/arc/lrms/__init__.py src/services/a-rex/lrms/arc/lrms/common/__init__.py src/services/a-rex/lrms/arc/lrms/common/cancel.py src/services/a-rex/lrms/arc/lrms/common/config.py src/services/a-rex/lrms/arc/lrms/common/files.py src/services/a-rex/lrms/arc/lrms/common/log.py src/services/a-rex/lrms/arc/lrms/common/lrmsinfo.py src/services/a-rex/lrms/arc/lrms/common/parse.py src/services/a-rex/lrms/arc/lrms/common/proc.py src/services/a-rex/lrms/arc/lrms/common/scan.py src/services/a-rex/lrms/arc/lrms/common/ssh.py src/services/a-rex/lrms/arc/lrms/common/submit.py src/services/a-rex/lrms/arc/lrms/pyCancel.py src/services/a-rex/lrms/arc/lrms/pyScanner.py src/services/a-rex/lrms/arc/lrms/pySubmit.py src/services/a-rex/lrms/arc/lrms/slurm.py src/services/a-rex/put.cpp src/services/a-rex/rest/rest.cpp src/services/a-rex/rest/rest.h src/services/a-rex/test_cache_check.cpp src/services/a-rex/tools.cpp src/services/a-rex/tools.h src/services/a-rex/update_credentials.cpp src/services/acix/__init__.py src/services/acix/core/__init__.py src/services/acix/core/bitvector.py src/services/acix/core/bloomfilter.py src/services/acix/core/cacheclient.py src/services/acix/core/hashes.py src/services/acix/core/indexclient.py src/services/acix/core/ssl.py src/services/acix/core/test/test_bloomfilter.py src/services/acix/indexserver/__init__.py src/services/acix/indexserver/index.py src/services/acix/indexserver/indexresource.py src/services/acix/indexserver/indexsetup.py src/services/acix/indexserver/test/test_system.py src/services/acix/scanner/__init__.py src/services/acix/scanner/cache.py src/services/acix/scanner/cacheresource.py src/services/acix/scanner/cachesetup.py src/services/acix/scanner/pscan.py src/services/acix/scanner/test/test_cacheresource.py src/services/acix/scanner/test/test_scan.py src/services/candypond/CandyPond.cpp src/services/candypond/CandyPond.h src/services/candypond/CandyPondGenerator.cpp src/services/candypond/CandyPondGenerator.h src/services/data-staging/DataDeliveryService.cpp src/services/data-staging/DataDeliveryService.h src/services/examples/echo_python/EchoService.py src/services/examples/echo_python/__init__.py src/services/examples/echo_python/test.cpp src/services/gridftpd/auth/auth.cpp src/services/gridftpd/auth/auth.h src/services/gridftpd/auth/auth_file.cpp src/services/gridftpd/auth/auth_plugin.cpp src/services/gridftpd/auth/auth_subject.cpp src/services/gridftpd/auth/auth_voms.cpp src/services/gridftpd/auth/identity.cpp src/services/gridftpd/auth/identity.h src/services/gridftpd/auth/identity_dn.cpp src/services/gridftpd/auth/identity_dn.h src/services/gridftpd/auth/identity_voms.cpp src/services/gridftpd/auth/identity_voms.h src/services/gridftpd/auth/object_access.cpp src/services/gridftpd/auth/object_access.h src/services/gridftpd/auth/permission.cpp src/services/gridftpd/auth/permission.h src/services/gridftpd/auth/simplemap.cpp src/services/gridftpd/auth/simplemap.h src/services/gridftpd/auth/unixmap.cpp src/services/gridftpd/auth/unixmap.h src/services/gridftpd/auth/unixmap_lcmaps.cpp src/services/gridftpd/commands.cpp src/services/gridftpd/commands.h src/services/gridftpd/conf.h src/services/gridftpd/conf/conf_vo.cpp src/services/gridftpd/conf/conf_vo.h src/services/gridftpd/conf/daemon.cpp src/services/gridftpd/conf/daemon.h src/services/gridftpd/config.cpp src/services/gridftpd/datalist.cpp src/services/gridftpd/dataread.cpp src/services/gridftpd/datawrite.cpp src/services/gridftpd/fileplugin/fileplugin.cpp src/services/gridftpd/fileplugin/fileplugin.h src/services/gridftpd/fileplugin/init.cpp src/services/gridftpd/fileroot.cpp src/services/gridftpd/fileroot.h src/services/gridftpd/fileroot_config.cpp src/services/gridftpd/listener.cpp src/services/gridftpd/misc.cpp src/services/gridftpd/misc.h src/services/gridftpd/misc/ldapquery.cpp src/services/gridftpd/misc/ldapquery.h src/services/gridftpd/misc/proxy.cpp src/services/gridftpd/misc/proxy.h src/services/gridftpd/names.cpp src/services/gridftpd/names.h src/services/gridftpd/run/run_plugin.cpp src/services/gridftpd/run/run_plugin.h src/services/gridftpd/userspec.cpp src/services/gridftpd/userspec.h src/services/wrappers/python/pythonwrapper.cpp src/services/wrappers/python/pythonwrapper.h src/tests/arcpolicy/arcpolicy.cpp src/tests/client/test_ClientInterface.cpp src/tests/client/test_ClientSAML2SSO.cpp src/tests/client/test_ClientX509Delegation_ARC.cpp src/tests/client/test_ClientX509Delegation_GridSite.cpp src/tests/count/count.cpp src/tests/count/count.h src/tests/count/test_client.cpp src/tests/count/test_service.cpp src/tests/delegation/test_client_with_delegation_sechandler.cpp src/tests/delegation/test_delegation_client.cpp src/tests/echo/echo.cpp src/tests/echo/echo.h src/tests/echo/echo_client.py src/tests/echo/echo_test4axis2c/test_client.cpp src/tests/echo/perfengine.py src/tests/echo/perftest.cpp src/tests/echo/test.cpp src/tests/echo/test_client.cpp src/tests/echo/test_clientinterface.cpp src/tests/echo/test_clientinterface.py src/tests/echo/test_service.cpp src/tests/perf/perftest.cpp src/tests/perf/perftest_cmd_duration.cpp src/tests/perf/perftest_cmd_times.cpp src/tests/perf/perftest_deleg_bydelegclient.cpp src/tests/perf/perftest_deleg_bysechandler.cpp src/tests/perf/perftest_msgsize.cpp src/tests/perf/perftest_saml2sso.cpp src/tests/perf/perftest_samlaa.cpp src/tests/perf/perftest_slcs.cpp src/tests/policy-delegation/test.cpp src/tests/translator/translator.cpp src/tests/unit/ClientsTest.cpp src/tests/unit/ClientsTest.h src/tests/unit/Test.cpp src/tests/xpath/prepare.py src/tests/xpath/query.cpp src/utils/hed/arcplugin.cpp src/utils/hed/common.cpp src/utils/hed/complextype.cpp src/utils/hed/schemaconv.cpp src/utils/hed/schemaconv.h src/utils/hed/simpletype.cpp src/utils/hed/wsdl2hed.cpp src/utils/python/arc/__init__.py src/utils/python/arc/control/Accounting.py src/utils/python/arc/control/AccountingDB.py src/utils/python/arc/control/AccountingPublishing.py src/utils/python/arc/control/Cache.py src/utils/python/arc/control/CertificateGenerator.py src/utils/python/arc/control/CommunityRTE.py src/utils/python/arc/control/Config.py src/utils/python/arc/control/ControlCommon.py src/utils/python/arc/control/DataStaging.py src/utils/python/arc/control/Jobs.py src/utils/python/arc/control/OSPackage.py src/utils/python/arc/control/OSService.py src/utils/python/arc/control/RunTimeEnvironment.py src/utils/python/arc/control/ServiceCommon.py src/utils/python/arc/control/Services.py src/utils/python/arc/control/TestCA.py src/utils/python/arc/control/ThirdPartyDeployment.py src/utils/python/arc/control/Validator.py src/utils/python/arc/control/__init__.py src/utils/python/arc/paths.py src/utils/python/arc/utils/__init__.py src/utils/python/arc/utils/config.py src/utils/python/arc/utils/reference.py nordugrid-arc-6.14.0/po/PaxHeaders.30264/Rules-quot0000644000000000000000000000013214152153401017752 xustar000000000000000030 mtime=1638455041.296689413 30 atime=1638455095.623505699 30 ctime=1638455103.886629856 nordugrid-arc-6.14.0/po/Rules-quot0000644000175000002070000000337614152153401017750 0ustar00mockbuildmock00000000000000# Special Makefile rules for English message catalogs with quotation marks. DISTFILES.common.extra1 = quot.sed boldquot.sed en@quot.header en@boldquot.header insert-header.sin Rules-quot .SUFFIXES: .insert-header .po-update-en en@quot.po-create: $(MAKE) en@quot.po-update en@boldquot.po-create: $(MAKE) en@boldquot.po-update en@quot.po-update: en@quot.po-update-en en@boldquot.po-update: en@boldquot.po-update-en .insert-header.po-update-en: @lang=`echo $@ | sed -e 's/\.po-update-en$$//'`; \ if test "$(PACKAGE)" = "gettext"; then PATH=`pwd`/../src:$$PATH; GETTEXTLIBDIR=`cd $(top_srcdir)/src && pwd`; export GETTEXTLIBDIR; fi; \ tmpdir=`pwd`; \ echo "$$lang:"; \ ll=`echo $$lang | sed -e 's/@.*//'`; \ LC_ALL=C; export LC_ALL; \ cd $(srcdir); \ if $(MSGINIT) -i $(DOMAIN).pot --no-translator -l $$ll -o - 2>/dev/null | sed -f $$tmpdir/$$lang.insert-header | $(MSGCONV) -t UTF-8 | $(MSGFILTER) sed -f `echo $$lang | sed -e 's/.*@//'`.sed 2>/dev/null > $$tmpdir/$$lang.new.po; then \ if cmp $$lang.po $$tmpdir/$$lang.new.po >/dev/null 2>&1; then \ rm -f $$tmpdir/$$lang.new.po; \ else \ if mv -f $$tmpdir/$$lang.new.po $$lang.po; then \ :; \ else \ echo "creation of $$lang.po failed: cannot move $$tmpdir/$$lang.new.po to $$lang.po" 1>&2; \ exit 1; \ fi; \ fi; \ else \ echo "creation of $$lang.po failed!" 1>&2; \ rm -f $$tmpdir/$$lang.new.po; \ fi en@quot.insert-header: insert-header.sin sed -e '/^#/d' -e 's/HEADER/en@quot.header/g' $(srcdir)/insert-header.sin > en@quot.insert-header en@boldquot.insert-header: insert-header.sin sed -e '/^#/d' -e 's/HEADER/en@boldquot.header/g' $(srcdir)/insert-header.sin > en@boldquot.insert-header mostlyclean: mostlyclean-quot mostlyclean-quot: rm -f *.insert-header nordugrid-arc-6.14.0/po/PaxHeaders.30264/Makefile.in.in0000644000000000000000000000013214152153401020421 xustar000000000000000030 mtime=1638455041.280689173 30 atime=1638455092.012451441 30 ctime=1638455103.880629766 nordugrid-arc-6.14.0/po/Makefile.in.in0000644000175000002070000003552414152153401020417 0ustar00mockbuildmock00000000000000# Makefile for PO directory in any package using GNU gettext. # Copyright (C) 1995-1997, 2000-2007 by Ulrich Drepper # # This file can be copied and used freely without restrictions. It can # be used in projects which are not available under the GNU General Public # License but which still want to provide support for the GNU gettext # functionality. # Please note that the actual code of GNU gettext is covered by the GNU # General Public License and is *not* in the public domain. # # Origin: gettext-0.17 GETTEXT_MACRO_VERSION = 0.17 PACKAGE = @PACKAGE@ VERSION = @VERSION@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ SHELL = /bin/sh @SET_MAKE@ srcdir = @srcdir@ top_srcdir = @top_srcdir@ VPATH = @srcdir@ prefix = @prefix@ exec_prefix = @exec_prefix@ datarootdir = @datarootdir@ datadir = @datadir@ localedir = @localedir@ gettextsrcdir = $(datadir)/gettext/po INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ # We use $(mkdir_p). # In automake <= 1.9.x, $(mkdir_p) is defined either as "mkdir -p --" or as # "$(mkinstalldirs)" or as "$(install_sh) -d". For these automake versions, # @install_sh@ does not start with $(SHELL), so we add it. # In automake >= 1.10, @mkdir_p@ is derived from ${MKDIR_P}, which is defined # either as "/path/to/mkdir -p" or ".../install-sh -c -d". For these automake # versions, $(mkinstalldirs) and $(install_sh) are unused. mkinstalldirs = $(SHELL) @install_sh@ -d install_sh = $(SHELL) @install_sh@ MKDIR_P = @MKDIR_P@ mkdir_p = @mkdir_p@ GMSGFMT_ = @GMSGFMT@ GMSGFMT_no = @GMSGFMT@ GMSGFMT_yes = @GMSGFMT_015@ GMSGFMT = $(GMSGFMT_$(USE_MSGCTXT)) MSGFMT_ = @MSGFMT@ MSGFMT_no = @MSGFMT@ MSGFMT_yes = @MSGFMT_015@ MSGFMT = $(MSGFMT_$(USE_MSGCTXT)) XGETTEXT_ = @XGETTEXT@ XGETTEXT_no = @XGETTEXT@ XGETTEXT_yes = @XGETTEXT_015@ XGETTEXT = $(XGETTEXT_$(USE_MSGCTXT)) MSGMERGE = msgmerge MSGMERGE_UPDATE = @MSGMERGE@ --update MSGINIT = msginit MSGCONV = msgconv MSGFILTER = msgfilter POFILES = @POFILES@ GMOFILES = @GMOFILES@ UPDATEPOFILES = @UPDATEPOFILES@ DUMMYPOFILES = @DUMMYPOFILES@ DISTFILES.common = Makefile.in.in remove-potcdate.sin \ $(DISTFILES.common.extra1) $(DISTFILES.common.extra2) $(DISTFILES.common.extra3) DISTFILES = $(DISTFILES.common) Makevars POTFILES.in \ $(POFILES) $(GMOFILES) \ $(DISTFILES.extra1) $(DISTFILES.extra2) $(DISTFILES.extra3) POTFILES = \ CATALOGS = @CATALOGS@ # Makevars gets inserted here. (Don't remove this line!) .SUFFIXES: .SUFFIXES: .po .gmo .mo .sed .sin .nop .po-create .po-update .po.mo: @echo "$(MSGFMT) -c -o $@ $<"; \ $(MSGFMT) -c -o t-$@ $< && mv t-$@ $@ .po.gmo: @lang=`echo $* | sed -e 's,.*/,,'`; \ test "$(srcdir)" = . && cdcmd="" || cdcmd="cd $(srcdir) && "; \ echo "$${cdcmd}rm -f $${lang}.gmo && $(GMSGFMT) -c --statistics -o $${lang}.gmo $${lang}.po"; \ cd $(srcdir) && rm -f $${lang}.gmo && $(GMSGFMT) -c --statistics -o t-$${lang}.gmo $${lang}.po && mv t-$${lang}.gmo $${lang}.gmo .sin.sed: sed -e '/^#/d' $< > t-$@ mv t-$@ $@ all: check-macro-version all-@USE_NLS@ all-yes: stamp-po all-no: # Ensure that the gettext macros and this Makefile.in.in are in sync. check-macro-version: @test "$(GETTEXT_MACRO_VERSION)" = "@GETTEXT_MACRO_VERSION@" \ || { echo "*** error: gettext infrastructure mismatch: using a Makefile.in.in from gettext version $(GETTEXT_MACRO_VERSION) but the autoconf macros are from gettext version @GETTEXT_MACRO_VERSION@" 1>&2; \ exit 1; \ } # $(srcdir)/$(DOMAIN).pot is only created when needed. When xgettext finds no # internationalized messages, no $(srcdir)/$(DOMAIN).pot is created (because # we don't want to bother translators with empty POT files). We assume that # LINGUAS is empty in this case, i.e. $(POFILES) and $(GMOFILES) are empty. # In this case, stamp-po is a nop (i.e. a phony target). # stamp-po is a timestamp denoting the last time at which the CATALOGS have # been loosely updated. Its purpose is that when a developer or translator # checks out the package via CVS, and the $(DOMAIN).pot file is not in CVS, # "make" will update the $(DOMAIN).pot and the $(CATALOGS), but subsequent # invocations of "make" will do nothing. This timestamp would not be necessary # if updating the $(CATALOGS) would always touch them; however, the rule for # $(POFILES) has been designed to not touch files that don't need to be # changed. stamp-po: $(srcdir)/$(DOMAIN).pot test ! -f $(srcdir)/$(DOMAIN).pot || \ test -z "$(GMOFILES)" || $(MAKE) $(GMOFILES) @test ! -f $(srcdir)/$(DOMAIN).pot || { \ echo "touch stamp-po" && \ echo timestamp > stamp-poT && \ mv stamp-poT stamp-po; \ } # Note: Target 'all' must not depend on target '$(DOMAIN).pot-update', # otherwise packages like GCC can not be built if only parts of the source # have been downloaded. # This target rebuilds $(DOMAIN).pot; it is an expensive operation. # Note that $(DOMAIN).pot is not touched if it doesn't need to be changed. $(DOMAIN).pot-update: $(POTFILES) $(srcdir)/POTFILES.in remove-potcdate.sed if LC_ALL=C grep 'GNU @PACKAGE@' $(top_srcdir)/* 2>/dev/null | grep -v 'libtool:' >/dev/null; then \ package_gnu='GNU '; \ else \ package_gnu=''; \ fi; \ if test -n '$(MSGID_BUGS_ADDRESS)' || test '$(PACKAGE_BUGREPORT)' = '@'PACKAGE_BUGREPORT'@'; then \ msgid_bugs_address='$(MSGID_BUGS_ADDRESS)'; \ else \ msgid_bugs_address='$(PACKAGE_BUGREPORT)'; \ fi; \ case `$(XGETTEXT) --version | sed 1q | sed -e 's,^[^0-9]*,,'` in \ '' | 0.[0-9] | 0.[0-9].* | 0.1[0-5] | 0.1[0-5].* | 0.16 | 0.16.[0-1]*) \ $(XGETTEXT) --default-domain=$(DOMAIN) --directory=$(top_srcdir) \ --add-comments=TRANSLATORS: $(XGETTEXT_OPTIONS) @XGETTEXT_EXTRA_OPTIONS@ \ --files-from=$(srcdir)/POTFILES.in \ --copyright-holder='$(COPYRIGHT_HOLDER)' \ --msgid-bugs-address="$$msgid_bugs_address" \ ;; \ *) \ $(XGETTEXT) --default-domain=$(DOMAIN) --directory=$(top_srcdir) \ --add-comments=TRANSLATORS: $(XGETTEXT_OPTIONS) @XGETTEXT_EXTRA_OPTIONS@ \ --files-from=$(srcdir)/POTFILES.in \ --copyright-holder='$(COPYRIGHT_HOLDER)' \ --package-name="$${package_gnu}@PACKAGE@" \ --package-version='@VERSION@' \ --msgid-bugs-address="$$msgid_bugs_address" \ ;; \ esac test ! -f $(DOMAIN).po || { \ if test -f $(srcdir)/$(DOMAIN).pot; then \ sed -f remove-potcdate.sed < $(srcdir)/$(DOMAIN).pot > $(DOMAIN).1po && \ sed -f remove-potcdate.sed < $(DOMAIN).po > $(DOMAIN).2po && \ if cmp $(DOMAIN).1po $(DOMAIN).2po >/dev/null 2>&1; then \ rm -f $(DOMAIN).1po $(DOMAIN).2po $(DOMAIN).po; \ else \ rm -f $(DOMAIN).1po $(DOMAIN).2po $(srcdir)/$(DOMAIN).pot && \ mv $(DOMAIN).po $(srcdir)/$(DOMAIN).pot; \ fi; \ else \ mv $(DOMAIN).po $(srcdir)/$(DOMAIN).pot; \ fi; \ } # This rule has no dependencies: we don't need to update $(DOMAIN).pot at # every "make" invocation, only create it when it is missing. # Only "make $(DOMAIN).pot-update" or "make dist" will force an update. $(srcdir)/$(DOMAIN).pot: $(MAKE) $(DOMAIN).pot-update # This target rebuilds a PO file if $(DOMAIN).pot has changed. # Note that a PO file is not touched if it doesn't need to be changed. $(POFILES): $(srcdir)/$(DOMAIN).pot @lang=`echo $@ | sed -e 's,.*/,,' -e 's/\.po$$//'`; \ if test -f "$(srcdir)/$${lang}.po"; then \ test "$(srcdir)" = . && cdcmd="" || cdcmd="cd $(srcdir) && "; \ echo "$${cdcmd}$(MSGMERGE_UPDATE) $${lang}.po $(DOMAIN).pot"; \ cd $(srcdir) && $(MSGMERGE_UPDATE) $${lang}.po $(DOMAIN).pot; \ else \ $(MAKE) $${lang}.po-create; \ fi install: install-exec install-data install-exec: install-data: install-data-@USE_NLS@ if test "$(PACKAGE)" = "gettext-tools"; then \ $(mkdir_p) $(DESTDIR)$(gettextsrcdir); \ for file in $(DISTFILES.common) Makevars.template; do \ $(INSTALL_DATA) $(srcdir)/$$file \ $(DESTDIR)$(gettextsrcdir)/$$file; \ done; \ for file in Makevars; do \ rm -f $(DESTDIR)$(gettextsrcdir)/$$file; \ done; \ else \ : ; \ fi install-data-no: all install-data-yes: all $(mkdir_p) $(DESTDIR)$(datadir) @catalogs='$(CATALOGS)'; \ for cat in $$catalogs; do \ cat=`basename $$cat`; \ lang=`echo $$cat | sed -e 's/\.gmo$$//'`; \ dir=$(localedir)/$$lang/LC_MESSAGES; \ $(mkdir_p) $(DESTDIR)$$dir; \ if test -r $$cat; then realcat=$$cat; else realcat=$(srcdir)/$$cat; fi; \ $(INSTALL_DATA) $$realcat $(DESTDIR)$$dir/$(DOMAIN).mo; \ echo "installing $$realcat as $(DESTDIR)$$dir/$(DOMAIN).mo"; \ for lc in '' $(EXTRA_LOCALE_CATEGORIES); do \ if test -n "$$lc"; then \ if (cd $(DESTDIR)$(localedir)/$$lang && LC_ALL=C ls -l -d $$lc 2>/dev/null) | grep ' -> ' >/dev/null; then \ link=`cd $(DESTDIR)$(localedir)/$$lang && LC_ALL=C ls -l -d $$lc | sed -e 's/^.* -> //'`; \ mv $(DESTDIR)$(localedir)/$$lang/$$lc $(DESTDIR)$(localedir)/$$lang/$$lc.old; \ mkdir $(DESTDIR)$(localedir)/$$lang/$$lc; \ (cd $(DESTDIR)$(localedir)/$$lang/$$lc.old && \ for file in *; do \ if test -f $$file; then \ ln -s ../$$link/$$file $(DESTDIR)$(localedir)/$$lang/$$lc/$$file; \ fi; \ done); \ rm -f $(DESTDIR)$(localedir)/$$lang/$$lc.old; \ else \ if test -d $(DESTDIR)$(localedir)/$$lang/$$lc; then \ :; \ else \ rm -f $(DESTDIR)$(localedir)/$$lang/$$lc; \ mkdir $(DESTDIR)$(localedir)/$$lang/$$lc; \ fi; \ fi; \ rm -f $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo; \ ln -s ../LC_MESSAGES/$(DOMAIN).mo $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo 2>/dev/null || \ ln $(DESTDIR)$(localedir)/$$lang/LC_MESSAGES/$(DOMAIN).mo $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo 2>/dev/null || \ cp -p $(DESTDIR)$(localedir)/$$lang/LC_MESSAGES/$(DOMAIN).mo $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo; \ echo "installing $$realcat link as $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo"; \ fi; \ done; \ done install-strip: install installdirs: installdirs-exec installdirs-data installdirs-exec: installdirs-data: installdirs-data-@USE_NLS@ if test "$(PACKAGE)" = "gettext-tools"; then \ $(mkdir_p) $(DESTDIR)$(gettextsrcdir); \ else \ : ; \ fi installdirs-data-no: installdirs-data-yes: $(mkdir_p) $(DESTDIR)$(datadir) @catalogs='$(CATALOGS)'; \ for cat in $$catalogs; do \ cat=`basename $$cat`; \ lang=`echo $$cat | sed -e 's/\.gmo$$//'`; \ dir=$(localedir)/$$lang/LC_MESSAGES; \ $(mkdir_p) $(DESTDIR)$$dir; \ for lc in '' $(EXTRA_LOCALE_CATEGORIES); do \ if test -n "$$lc"; then \ if (cd $(DESTDIR)$(localedir)/$$lang && LC_ALL=C ls -l -d $$lc 2>/dev/null) | grep ' -> ' >/dev/null; then \ link=`cd $(DESTDIR)$(localedir)/$$lang && LC_ALL=C ls -l -d $$lc | sed -e 's/^.* -> //'`; \ mv $(DESTDIR)$(localedir)/$$lang/$$lc $(DESTDIR)$(localedir)/$$lang/$$lc.old; \ mkdir $(DESTDIR)$(localedir)/$$lang/$$lc; \ (cd $(DESTDIR)$(localedir)/$$lang/$$lc.old && \ for file in *; do \ if test -f $$file; then \ ln -s ../$$link/$$file $(DESTDIR)$(localedir)/$$lang/$$lc/$$file; \ fi; \ done); \ rm -f $(DESTDIR)$(localedir)/$$lang/$$lc.old; \ else \ if test -d $(DESTDIR)$(localedir)/$$lang/$$lc; then \ :; \ else \ rm -f $(DESTDIR)$(localedir)/$$lang/$$lc; \ mkdir $(DESTDIR)$(localedir)/$$lang/$$lc; \ fi; \ fi; \ fi; \ done; \ done # Define this as empty until I found a useful application. installcheck: uninstall: uninstall-exec uninstall-data uninstall-exec: uninstall-data: uninstall-data-@USE_NLS@ if test "$(PACKAGE)" = "gettext-tools"; then \ for file in $(DISTFILES.common) Makevars.template; do \ rm -f $(DESTDIR)$(gettextsrcdir)/$$file; \ done; \ else \ : ; \ fi uninstall-data-no: uninstall-data-yes: catalogs='$(CATALOGS)'; \ for cat in $$catalogs; do \ cat=`basename $$cat`; \ lang=`echo $$cat | sed -e 's/\.gmo$$//'`; \ for lc in LC_MESSAGES $(EXTRA_LOCALE_CATEGORIES); do \ rm -f $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo; \ done; \ done check: all info dvi ps pdf html tags TAGS ctags CTAGS ID: mostlyclean: rm -f remove-potcdate.sed rm -f stamp-poT rm -f core core.* $(DOMAIN).po $(DOMAIN).1po $(DOMAIN).2po *.new.po rm -fr *.o clean: mostlyclean distclean: clean rm -f Makefile Makefile.in POTFILES *.mo maintainer-clean: distclean @echo "This command is intended for maintainers to use;" @echo "it deletes files that may require special tools to rebuild." rm -f stamp-po $(GMOFILES) distdir = $(top_builddir)/$(PACKAGE)-$(VERSION)/$(subdir) dist distdir: $(MAKE) update-po @$(MAKE) dist2 # This is a separate target because 'update-po' must be executed before. dist2: stamp-po $(DISTFILES) dists="$(DISTFILES)"; \ if test "$(PACKAGE)" = "gettext-tools"; then \ dists="$$dists Makevars.template"; \ fi; \ if test -f $(srcdir)/$(DOMAIN).pot; then \ dists="$$dists $(DOMAIN).pot stamp-po"; \ fi; \ if test -f $(srcdir)/ChangeLog; then \ dists="$$dists ChangeLog"; \ fi; \ for i in 0 1 2 3 4 5 6 7 8 9; do \ if test -f $(srcdir)/ChangeLog.$$i; then \ dists="$$dists ChangeLog.$$i"; \ fi; \ done; \ if test -f $(srcdir)/LINGUAS; then dists="$$dists LINGUAS"; fi; \ for file in $$dists; do \ if test -f $$file; then \ cp -p $$file $(distdir) || exit 1; \ else \ cp -p $(srcdir)/$$file $(distdir) || exit 1; \ fi; \ done update-po: Makefile $(MAKE) $(DOMAIN).pot-update test -z "$(UPDATEPOFILES)" || $(MAKE) $(UPDATEPOFILES) $(MAKE) update-gmo # General rule for creating PO files. .nop.po-create: @lang=`echo $@ | sed -e 's/\.po-create$$//'`; \ echo "File $$lang.po does not exist. If you are a translator, you can create it through 'msginit'." 1>&2; \ exit 1 # General rule for updating PO files. .nop.po-update: @lang=`echo $@ | sed -e 's/\.po-update$$//'`; \ if test "$(PACKAGE)" = "gettext-tools"; then PATH=`pwd`/../src:$$PATH; fi; \ tmpdir=`pwd`; \ echo "$$lang:"; \ test "$(srcdir)" = . && cdcmd="" || cdcmd="cd $(srcdir) && "; \ echo "$${cdcmd}$(MSGMERGE) $$lang.po $(DOMAIN).pot -o $$lang.new.po"; \ cd $(srcdir); \ if $(MSGMERGE) $$lang.po $(DOMAIN).pot -o $$tmpdir/$$lang.new.po; then \ if cmp $$lang.po $$tmpdir/$$lang.new.po >/dev/null 2>&1; then \ rm -f $$tmpdir/$$lang.new.po; \ else \ if mv -f $$tmpdir/$$lang.new.po $$lang.po; then \ :; \ else \ echo "msgmerge for $$lang.po failed: cannot move $$tmpdir/$$lang.new.po to $$lang.po" 1>&2; \ exit 1; \ fi; \ fi; \ else \ echo "msgmerge for $$lang.po failed!" 1>&2; \ rm -f $$tmpdir/$$lang.new.po; \ fi $(DUMMYPOFILES): update-gmo: Makefile $(GMOFILES) @: Makefile: Makefile.in.in Makevars $(top_builddir)/config.status @POMAKEFILEDEPS@ cd $(top_builddir) \ && $(SHELL) ./config.status $(subdir)/$@.in po-directories force: # Tell versions [3.59,3.63) of GNU make not to export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/po/PaxHeaders.30264/sv.po0000644000000000000000000000013114152153476016752 xustar000000000000000030 mtime=1638455102.829613974 29 atime=1638455103.65862643 30 ctime=1638455103.891629931 nordugrid-arc-6.14.0/po/sv.po0000644000175000002070000264661514152153476016764 0ustar00mockbuildmock00000000000000# Translation file for the Advanced Resource Connector (Arc) msgid "" msgstr "" "Project-Id-Version: Arc\n" "Report-Msgid-Bugs-To: support@nordugrid.org\n" "POT-Creation-Date: 2021-12-02 15:25+0100\n" "PO-Revision-Date: 2021-11-26 10:49+0100\n" "Last-Translator: Mattias Ellert \n" "Language-Team: Swedish\n" "Language: sv\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=2; plural=n != 1;\n" #: src/clients/compute/arccat.cpp:35 src/clients/compute/arcclean.cpp:34 #: src/clients/compute/arcget.cpp:35 src/clients/compute/arckill.cpp:33 #: src/clients/compute/arcrenew.cpp:32 src/clients/compute/arcresub.cpp:36 #: src/clients/compute/arcresume.cpp:32 src/clients/compute/arcstat.cpp:34 msgid "[job ...]" msgstr "[jobb ...]" #: src/clients/compute/arccat.cpp:36 msgid "" "The arccat command performs the cat command on the stdout, stderr or grid\n" "manager's error log of the job." msgstr "" "arccat-kommandot utför cat-kommandot pÃ¥ jobbets stdout, stderr eller\n" "gridmanager-fellogg." #: src/clients/compute/arccat.cpp:43 src/clients/compute/arcclean.cpp:41 #: src/clients/compute/arcget.cpp:42 src/clients/compute/arcinfo.cpp:45 #: src/clients/compute/arckill.cpp:40 src/clients/compute/arcrenew.cpp:37 #: src/clients/compute/arcresub.cpp:41 src/clients/compute/arcresume.cpp:37 #: src/clients/compute/arcstat.cpp:42 src/clients/compute/arcsub.cpp:53 #: src/clients/compute/arcsync.cpp:147 src/clients/compute/arctest.cpp:64 #: src/clients/credentials/arcproxy.cpp:457 src/clients/data/arccp.cpp:641 #: src/clients/data/arcls.cpp:347 src/clients/data/arcmkdir.cpp:125 #: src/clients/data/arcrename.cpp:136 src/clients/data/arcrm.cpp:151 #: src/hed/daemon/unix/main_unix.cpp:341 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1263 #: src/hed/libs/data/DataExternalHelper.cpp:358 #, c-format msgid "%s version %s" msgstr "%s version %s" #: src/clients/compute/arccat.cpp:52 src/clients/compute/arcclean.cpp:50 #: src/clients/compute/arcget.cpp:51 src/clients/compute/arcinfo.cpp:53 #: src/clients/compute/arckill.cpp:49 src/clients/compute/arcrenew.cpp:46 #: src/clients/compute/arcresub.cpp:50 src/clients/compute/arcresume.cpp:46 #: src/clients/compute/arcstat.cpp:51 src/clients/compute/arcsub.cpp:62 #: src/clients/compute/arcsync.cpp:156 src/clients/compute/arctest.cpp:86 #: src/clients/credentials/arcproxy.cpp:465 src/clients/data/arccp.cpp:648 #: src/clients/data/arcls.cpp:355 src/clients/data/arcmkdir.cpp:133 #: src/clients/data/arcrename.cpp:144 src/clients/data/arcrm.cpp:160 #: src/libs/data-staging/DataDeliveryLocalComm.cpp:174 #: src/services/a-rex/grid-manager/GridManager.cpp:110 #: src/services/a-rex/grid-manager/log/JobLog.cpp:139 #, c-format msgid "Running command: %s" msgstr "Kör kommando: %s" #: src/clients/compute/arccat.cpp:63 src/clients/compute/arcclean.cpp:61 #: src/clients/compute/arcget.cpp:62 src/clients/compute/arcinfo.cpp:65 #: src/clients/compute/arckill.cpp:60 src/clients/compute/arcrenew.cpp:57 #: src/clients/compute/arcresub.cpp:54 src/clients/compute/arcresume.cpp:50 #: src/clients/compute/arcstat.cpp:62 src/clients/compute/arcsub.cpp:66 #: src/clients/compute/arcsync.cpp:167 src/clients/compute/arctest.cpp:90 #: src/clients/data/arccp.cpp:671 src/clients/data/arcls.cpp:377 #: src/clients/data/arcmkdir.cpp:155 src/clients/data/arcrename.cpp:166 #: src/clients/data/arcrm.cpp:182 msgid "Failed configuration initialization" msgstr "Misslyckades med att initiera inställningar" #: src/clients/compute/arccat.cpp:78 src/clients/compute/arcclean.cpp:76 #: src/clients/compute/arcget.cpp:90 src/clients/compute/arckill.cpp:75 #: src/clients/compute/arcrenew.cpp:70 src/clients/compute/arcresub.cpp:85 #: src/clients/compute/arcresume.cpp:72 src/clients/compute/arcstat.cpp:71 #, c-format msgid "Cannot read specified jobid file: %s" msgstr "Kan inte läsa angiven jobb-id-fil: %s" #: src/clients/compute/arccat.cpp:89 src/clients/compute/arcclean.cpp:87 #: src/clients/compute/arcget.cpp:101 src/clients/compute/arckill.cpp:86 #: src/clients/compute/arcrenew.cpp:81 src/clients/compute/arcresub.cpp:99 #: src/clients/compute/arcresume.cpp:83 src/clients/compute/arcstat.cpp:105 msgid "No jobs given" msgstr "Inga jobb angivna" #: src/clients/compute/arccat.cpp:102 src/clients/compute/arcclean.cpp:100 #: src/clients/compute/arcget.cpp:114 src/clients/compute/arckill.cpp:99 #: src/clients/compute/arcrenew.cpp:94 src/clients/compute/arcresub.cpp:109 #: src/clients/compute/arcresume.cpp:96 src/clients/compute/arcstat.cpp:117 #, c-format msgid "Job list file (%s) doesn't exist" msgstr "Jobblistfil (%s) existerar inte" #: src/clients/compute/arccat.cpp:109 src/clients/compute/arcclean.cpp:107 #: src/clients/compute/arcget.cpp:121 src/clients/compute/arckill.cpp:106 #: src/clients/compute/arcrenew.cpp:101 src/clients/compute/arcresub.cpp:116 #: src/clients/compute/arcresume.cpp:103 src/clients/compute/arcstat.cpp:124 #: src/clients/compute/arctest.cpp:335 #, c-format msgid "Unable to read job information from file (%s)" msgstr "Misslyckades med att läsa jobbinformation frÃ¥n fil (%s)" #: src/clients/compute/arccat.cpp:118 src/clients/compute/arcclean.cpp:115 #: src/clients/compute/arcget.cpp:129 src/clients/compute/arckill.cpp:114 #: src/clients/compute/arcrenew.cpp:110 src/clients/compute/arcresub.cpp:124 #: src/clients/compute/arcresume.cpp:112 src/clients/compute/arcstat.cpp:133 #, c-format msgid "Warning: Job not found in job list: %s" msgstr "Varning: Jobb finns inte i jobblista: %s" #: src/clients/compute/arccat.cpp:131 src/clients/compute/arcclean.cpp:170 #: src/clients/compute/arcget.cpp:142 src/clients/compute/arckill.cpp:126 #: src/clients/compute/arcrenew.cpp:122 src/clients/compute/arcresub.cpp:136 #: src/clients/compute/arcresume.cpp:124 msgid "No jobs" msgstr "Inga jobb" #: src/clients/compute/arccat.cpp:146 #, c-format msgid "Could not create temporary file \"%s\"" msgstr "Kunde inte skapa temporär fil \"%s\"" #: src/clients/compute/arccat.cpp:147 src/clients/compute/arccat.cpp:153 #, c-format msgid "Cannot create output of %s for any jobs" msgstr "Kan inte skapa %s-utdata för nÃ¥got jobb" #: src/clients/compute/arccat.cpp:154 #, c-format msgid "Invalid destination URL %s" msgstr "Ogiltig destinations-URL %s" #: src/clients/compute/arccat.cpp:172 #, c-format msgid "Job deleted: %s" msgstr "Jobb borttaget: %s" #: src/clients/compute/arccat.cpp:182 #, c-format msgid "Job has not started yet: %s" msgstr "Jobb har inte startat än: %s" #: src/clients/compute/arccat.cpp:223 #, c-format msgid "Cannot determine the %s location: %s" msgstr "Kan inte bestämma plats för %s: %s" #: src/clients/compute/arccat.cpp:228 #, c-format msgid "Cannot create output of %s for job (%s): Invalid source %s" msgstr "Kan inte skapa %s-utdata för jobb (%s): Ogiltig källa %s" #: src/clients/compute/arccat.cpp:241 #, c-format msgid "Catting %s for job %s" msgstr "Visar %s för jobb %s" #: src/clients/compute/arcclean.cpp:35 msgid "The arcclean command removes a job from the computing resource." msgstr "arcclean-kommandot tar bort ett jobb frÃ¥n en beräkningresurs." #: src/clients/compute/arcclean.cpp:139 msgid "" "You are about to remove jobs from the job list for which no information " "could be\n" "found. NOTE: Recently submitted jobs might not have appeared in the " "information\n" "system, and this action will also remove such jobs." msgstr "" "Du är pÃ¥ väg att ta bort jobb frÃ¥n jobblistan för vilka ingen information\n" "kunde hittas. Notera att nyligen insända jobb kan saknas i " "informationssystemet\n" "och att denna handling kommer att ta bort ocksÃ¥ sÃ¥dana jobb." #: src/clients/compute/arcclean.cpp:142 msgid "Are you sure you want to clean jobs missing information?" msgstr "Är du säker pÃ¥ att du vill ta bort jobb för vilka information saknas?" #: src/clients/compute/arcclean.cpp:143 src/clients/compute/arcsync.cpp:221 msgid "y" msgstr "j" #: src/clients/compute/arcclean.cpp:143 src/clients/compute/arcsync.cpp:221 msgid "n" msgstr "n" #: src/clients/compute/arcclean.cpp:148 msgid "Jobs missing information will not be cleaned!" msgstr "Jobb som saknar information kommer inte att tas bort!" #: src/clients/compute/arcclean.cpp:164 src/clients/compute/arcresub.cpp:177 #: src/clients/compute/arctest.cpp:339 #, c-format msgid "Warning: Failed to write job information to file (%s)" msgstr "Varning: Misslyckades med att skriva jobbinformation till fil (%s)" #: src/clients/compute/arcclean.cpp:165 msgid "" " Run 'arcclean -s Undefined' to remove cleaned jobs from job list" msgstr "" " Kör 'arcclean -s Undefined' för att ta bort borttagna jobb frÃ¥n " "jobblistan" #: src/clients/compute/arcclean.cpp:174 #, c-format msgid "Jobs processed: %d, deleted: %d" msgstr "Jobb behandlade: %d, borttagna; %d" #: src/clients/compute/arcget.cpp:36 msgid "The arcget command is used for retrieving the results from a job." msgstr "arcget-kommandot används för att hämta resultatet av ett jobb." #: src/clients/compute/arcget.cpp:78 #, c-format msgid "Job download directory from user configuration file: %s" msgstr "Jobbnedladdningskatalog frÃ¥n användarinställningsfil: %s" #: src/clients/compute/arcget.cpp:81 msgid "Job download directory will be created in present working directory." msgstr "Jobbnedladdningskatalog kommer att skapas i nuvarande arbetskatalog." #: src/clients/compute/arcget.cpp:85 #, c-format msgid "Job download directory: %s" msgstr "Jobbnedladdningskatalog: %s" #: src/clients/compute/arcget.cpp:152 #, c-format msgid "Unable to create directory for storing results (%s) - %s" msgstr "Misslyckades med att skapa katalog för att lagra resultat (%s) - %s" #: src/clients/compute/arcget.cpp:162 #, c-format msgid "Results stored at: %s" msgstr "Resultat lagrade i: %s" #: src/clients/compute/arcget.cpp:174 src/clients/compute/arckill.cpp:142 msgid "Warning: Some jobs were not removed from server" msgstr "Varning: NÃ¥gra jobb togs inte bort frÃ¥n servern" #: src/clients/compute/arcget.cpp:175 src/clients/compute/arcget.cpp:182 #: src/clients/compute/arckill.cpp:143 msgid " Use arcclean to remove retrieved jobs from job list" msgstr " Använd arcclean för att ta bort hämtade jobb frÃ¥n jobblistan" #: src/clients/compute/arcget.cpp:181 src/clients/compute/arckill.cpp:149 #: src/clients/compute/arcresub.cpp:207 #, c-format msgid "Warning: Failed removing jobs from file (%s)" msgstr "Varning: Misslyckades med att ta bort jobb frÃ¥n fil (%s)" #: src/clients/compute/arcget.cpp:186 #, c-format msgid "" "Jobs processed: %d, successfully retrieved: %d, successfully cleaned: %d" msgstr "" "Jobb behandlade: %d, framgÃ¥ngsrikt hämtade: %d, framgÃ¥ngsrikt borttagna: %d" #: src/clients/compute/arcget.cpp:190 #, c-format msgid "Jobs processed: %d, successfully retrieved: %d" msgstr "Jobb behandlade: %d, framgÃ¥ngsrikt hämtade: %d" #: src/clients/compute/arcinfo.cpp:34 msgid "[resource ...]" msgstr "[resurs ...]" #: src/clients/compute/arcinfo.cpp:35 msgid "" "The arcinfo command is used for obtaining the status of computing resources " "on the Grid." msgstr "" "arcinfo-kommandot används för att erhÃ¥lla statusen pÃ¥ beräkningsresurser pÃ¥ " "griden." #: src/clients/compute/arcinfo.cpp:142 msgid "Information endpoint" msgstr "Informationsändpunkt" #: src/clients/compute/arcinfo.cpp:153 msgid "Submission endpoint" msgstr "Insändningsändpunkt" #: src/clients/compute/arcinfo.cpp:155 msgid "status" msgstr "status" #: src/clients/compute/arcinfo.cpp:157 msgid "interface" msgstr "gränssnitt" #: src/clients/compute/arcinfo.cpp:176 msgid "ERROR: Failed to retrieve information from the following endpoints:" msgstr "Fel: Misslyckades med att hämta information frÃ¥n följande ändpunkter:" #: src/clients/compute/arcinfo.cpp:189 msgid "ERROR: Failed to retrieve information" msgstr "Fel: Misslyckades med att hämta information" #: src/clients/compute/arcinfo.cpp:191 msgid "from the following endpoints:" msgstr "frÃ¥n följande ändpunkter:" #: src/clients/compute/arckill.cpp:34 msgid "The arckill command is used to kill running jobs." msgstr "arckill-kommandot används för att avbryta körande jobb." #: src/clients/compute/arckill.cpp:150 msgid "" " Run 'arcclean -s Undefined' to remove killed jobs from job list" msgstr "" " Kör 'arcclean -s Undefined' för att ta bort avbrutna jobb frÃ¥n " "jobblistan" #: src/clients/compute/arckill.cpp:153 #, c-format msgid "Jobs processed: %d, successfully killed: %d, successfully cleaned: %d" msgstr "" "Jobb behandlade: %d, framgÃ¥ngsrikt avbrutna %d, framgÃ¥ngsrikt borttagna %d" #: src/clients/compute/arckill.cpp:155 #, c-format msgid "Jobs processed: %d, successfully killed: %d" msgstr "Jobb behandlade: %d, framgÃ¥ngsrikt avbrutna %d" #: src/clients/compute/arcrenew.cpp:128 #, c-format msgid "Jobs processed: %d, renewed: %d" msgstr "Jobb behandlade: %d, förnyade %d" #: src/clients/compute/arcresub.cpp:79 msgid "--same and --not-same cannot be specified together." msgstr "--same och --not-same kan inte anges samtidigt." #: src/clients/compute/arcresub.cpp:153 msgid "" "It is not possible to resubmit jobs without new target information discovery" msgstr "Det gÃ¥r inte att Ã¥terinsända jobb utan en ny targetinformationssökning" #: src/clients/compute/arcresub.cpp:166 msgid "No jobs to resubmit with the specified status" msgstr "Inga jobb att Ã¥terinsända med den angivna statusen" #: src/clients/compute/arcresub.cpp:173 src/clients/compute/submit.cpp:34 #, c-format msgid "Job submitted with jobid: %s" msgstr "Jobb insänt med jobb-id: %s" #: src/clients/compute/arcresub.cpp:178 msgid " To recover missing jobs, run arcsync" msgstr " För att Ã¥terställa saknade jobb, kör arcsync" #: src/clients/compute/arcresub.cpp:183 #, c-format msgid "Cannot write jobids to file (%s)" msgstr "Kan inte skriva jobb-id till fil (%s)" #: src/clients/compute/arcresub.cpp:194 #, c-format msgid "" "Resubmission of job (%s) succeeded, but killing the job failed - it will " "still appear in the job list" msgstr "" "Ã…terinsändning av jobb (%s) lyckades, men avbrytandet av jobbet misslyckades " "- det kommer fortfarande att synas i jobblistan" #: src/clients/compute/arcresub.cpp:203 #, c-format msgid "" "Resubmission of job (%s) succeeded, but cleaning the job failed - it will " "still appear in the job list" msgstr "" "Ã…terinsändning av jobb (%s) lyckades, men borttagandet av jobbet " "misslyckades - det kommer fortfarande att synas i jobblistan" #: src/clients/compute/arcresub.cpp:208 msgid " Use arcclean to remove non-existing jobs" msgstr " Använd arcclean för att ta bort icke existerande jobb" #: src/clients/compute/arcresub.cpp:215 msgid "Job resubmission summary:" msgstr "JobbÃ¥terinsändningssammanfattning:" #: src/clients/compute/arcresub.cpp:217 #, c-format msgid "%d of %d jobs were resubmitted" msgstr "%d av %d jobb Ã¥terinsändes" #: src/clients/compute/arcresub.cpp:219 #, c-format msgid "The following %d were not resubmitted" msgstr "Följande %d Ã¥terinsändes inte" #: src/clients/compute/arcresume.cpp:130 #, c-format msgid "Jobs processed: %d, resumed: %d" msgstr "Jobb behandlade: %d, Ã¥terupptagna: %d" #: src/clients/compute/arcstat.cpp:35 msgid "" "The arcstat command is used for obtaining the status of jobs that have\n" "been submitted to Grid enabled resources." msgstr "" "arcstat-kommandot används för att erhÃ¥lla statusen pÃ¥ jobb som sänts in\n" "till gridresurser." #: src/clients/compute/arcstat.cpp:79 msgid "The 'sort' and 'rsort' flags cannot be specified at the same time." msgstr "Flaggorna 'sort' och 'rsort' kan inte anges samtidigt." #: src/clients/compute/arcstat.cpp:149 msgid "No jobs found, try later" msgstr "Inga jobb hittades, försök senare" #: src/clients/compute/arcstat.cpp:193 #, c-format msgid "Status of %d jobs was queried, %d jobs returned information" msgstr "FrÃ¥gade om status för %d jobb, %d jobb returnerade information" #: src/clients/compute/arcsub.cpp:45 msgid "[filename ...]" msgstr "[filnamn ...]" #: src/clients/compute/arcsub.cpp:46 msgid "" "The arcsub command is used for submitting jobs to Grid enabled computing\n" "resources." msgstr "" "arcsub-kommandot används för att sända in jobb till beräkningsresurser pÃ¥\n" "griden." #: src/clients/compute/arcsub.cpp:94 msgid "No job description input specified" msgstr "Ingen jobbeskrivning angiven" #: src/clients/compute/arcsub.cpp:107 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:582 #, c-format msgid "Can not open job description file: %s" msgstr "Kan inte öppna jobbeskrivningsfil: %s" #: src/clients/compute/arcsub.cpp:135 src/clients/compute/arcsub.cpp:163 msgid "Invalid JobDescription:" msgstr "Ogiltig jobbeskrivning:" #: src/clients/compute/arcsub.cpp:198 src/clients/compute/arctest.cpp:229 msgid "" "Cannot adapt job description to the submission target when information " "discovery is turned off" msgstr "" "Kan inte anpassa jobbeskrivning till insändnings-target när " "informationssökning är avslagen" #: src/clients/compute/arcsync.cpp:66 src/clients/compute/arcsync.cpp:174 #, c-format msgid "Warning: Unable to open job list file (%s), unknown format" msgstr "Varning: Kan inte öppna jobblistfil (%s), okänt format" #: src/clients/compute/arcsync.cpp:76 msgid "Found the following jobs:" msgstr "Hittade följande jobb:" #: src/clients/compute/arcsync.cpp:86 msgid "Total number of jobs found: " msgstr "Totalt antal hittade jobb: " #: src/clients/compute/arcsync.cpp:98 msgid "Found the following new jobs:" msgstr "Hittade följande nya jobb:" #: src/clients/compute/arcsync.cpp:108 msgid "Total number of new jobs found: " msgstr "Totalt antal hittade nya jobb: " #: src/clients/compute/arcsync.cpp:113 #, c-format msgid "ERROR: Failed to write job information to file (%s)" msgstr "Fel: Misslyckades med att skriva jobbinformation till fil (%s)" #: src/clients/compute/arcsync.cpp:140 msgid "" "The arcsync command synchronizes your local job list with the information " "at\n" "the given resources or index servers." msgstr "" "arcsync-kommandot synkroniserar din lokala jobblista med information frÃ¥n\n" "de angivna resurserna eller indexservrarna." #: src/clients/compute/arcsync.cpp:180 #, c-format msgid "Warning: Unable to read local list of jobs from file (%s)" msgstr "Varning: Kunde inte läsa lokal jobblista frÃ¥n fil (%s)" #: src/clients/compute/arcsync.cpp:185 #, c-format msgid "Warning: Unable to truncate local list of jobs in file (%s)" msgstr "Varning: Kunde inte trunkera lokal jobblista i fil (%s)" #: src/clients/compute/arcsync.cpp:191 #, c-format msgid "Warning: Unable to create job list file (%s), jobs list is destroyed" msgstr "Varning: Kunde inte skapa jobblistfil (%s), jobblista har raderats" #: src/clients/compute/arcsync.cpp:195 #, c-format msgid "" "Warning: Failed to write local list of jobs into file (%s), jobs list is " "destroyed" msgstr "" "Varning: Misslyckades med att skriva lokal jobblista till fil (%s), " "jobblista har raderats" #: src/clients/compute/arcsync.cpp:215 msgid "" "Synchronizing the local list of active jobs with the information in the\n" "information system can result in some inconsistencies. Very recently " "submitted\n" "jobs might not yet be present, whereas jobs very recently scheduled for\n" "deletion can still be present." msgstr "" "Att synkronisera den lokal listan med aktiva jobb med informationen i\n" "informationssystemet kan resultera i bristande överensstämmelse.\n" "Nyligen insända jobb kan ännu saknas i informationssystemet, medan jobb\n" "som nyligen schemalagts för borttagning fortfarande kan finnas kvar." #: src/clients/compute/arcsync.cpp:220 msgid "Are you sure you want to synchronize your local job list?" msgstr "Är du säker pÃ¥ att du vill synkronisera din lokala jobblista?" #: src/clients/compute/arcsync.cpp:225 msgid "Cancelling synchronization request" msgstr "Avbryter synkroniseringsbegäran" #: src/clients/compute/arcsync.cpp:243 msgid "" "No services specified. Please configure default services in the client " "configuration, or specify a cluster or index (-c or -g options, see arcsync -" "h)." msgstr "" "Inga tjänster angivna. Konfigurera förvalda tjänster i " "användarinställningarna, eller ange ett kluster eller index (alternativ -c " "eller -g, se arcsync -h)." #: src/clients/compute/arctest.cpp:57 msgid " " msgstr " " #: src/clients/compute/arctest.cpp:58 msgid "The arctest command is used for testing clusters as resources." msgstr "arctest-kommandot används för att testa kluster som resurser." #: src/clients/compute/arctest.cpp:70 msgid "" "Nothing to do:\n" "you have to either specify a test job id with -J (--job)\n" "or query information about the certificates with -E (--certificate)\n" msgstr "" "Inget att göra:\n" "du mÃ¥ste antingen ange ett test-jobb-id med -J (--job)\n" "eller frÃ¥ga om information om certifikaten med -E (--certificate)\n" #: src/clients/compute/arctest.cpp:77 msgid "" "For the 1st test job you also have to specify a runtime value with -r (--" "runtime) option." msgstr "" "För det första test-jobbet mÃ¥ste du ocksÃ¥ ange en körtid med alternativet -r " "(--runtime)." #: src/clients/compute/arctest.cpp:111 msgid "Certificate information:" msgstr "Certifikatinformation:" #: src/clients/compute/arctest.cpp:115 msgid "No user-certificate found" msgstr "Hittade inget användarcertifikat" #: src/clients/compute/arctest.cpp:118 #, c-format msgid "Certificate: %s" msgstr "Certifikat: %s" #: src/clients/compute/arctest.cpp:120 #, c-format msgid "Subject name: %s" msgstr "Subjekt-namn: %s" #: src/clients/compute/arctest.cpp:121 #, c-format msgid "Valid until: %s" msgstr "Giltigt till: %s" #: src/clients/compute/arctest.cpp:125 msgid "Unable to determine certificate information" msgstr "Kunde inte bestämma certifikatinformation" #: src/clients/compute/arctest.cpp:129 msgid "Proxy certificate information:" msgstr "Proxycertifikatinformation:" #: src/clients/compute/arctest.cpp:131 msgid "No proxy found" msgstr "Hittade ingen proxy" #: src/clients/compute/arctest.cpp:134 #, c-format msgid "Proxy: %s" msgstr "Proxy: %s" #: src/clients/compute/arctest.cpp:135 #, c-format msgid "Proxy-subject: %s" msgstr "Proxy-subjekt: %s" #: src/clients/compute/arctest.cpp:137 msgid "Valid for: Proxy expired" msgstr "Giltig i: Proxyns giltighetstid har gÃ¥tt ut" #: src/clients/compute/arctest.cpp:139 msgid "Valid for: Proxy not valid" msgstr "Giltig i: Proxyn är ej giltig" #: src/clients/compute/arctest.cpp:141 #, c-format msgid "Valid for: %s" msgstr "Giltig i: %s" #: src/clients/compute/arctest.cpp:146 #, c-format msgid "Certificate issuer: %s" msgstr "Certifikatutfärdare: %s" #: src/clients/compute/arctest.cpp:150 msgid "CA-certificates installed:" msgstr "Installerade CA-certifikat:" #: src/clients/compute/arctest.cpp:172 msgid "Unable to detect if issuer certificate is installed." msgstr "Kunde inte detektera om utfärdarcertifikat är installerat." #: src/clients/compute/arctest.cpp:175 msgid "Your issuer's certificate is not installed" msgstr "Din utfärdares certifikat är inte installerat" #: src/clients/compute/arctest.cpp:189 #, c-format msgid "No test-job, with ID \"%d\"" msgstr "Inget test-jobb, med ID \"%d\"" #: src/clients/compute/arctest.cpp:245 #, c-format msgid "Unable to load broker %s" msgstr "Kunde inte ladda in mäklare %s" #: src/clients/compute/arctest.cpp:248 #: src/hed/libs/compute/BrokerPlugin.cpp:106 #, c-format msgid "Broker %s loaded" msgstr "Mäklare %s har laddats in" #: src/clients/compute/arctest.cpp:270 msgid "Test aborted because no resource returned any information" msgstr "Testet avbröts eftersom ingen resurs returnerade nÃ¥gon information" #: src/clients/compute/arctest.cpp:272 src/clients/compute/submit.cpp:170 msgid "" "Unable to adapt job description to any resource, no resource information " "could be obtained." msgstr "" "Kunde inte anpassa jobbeskrivningen till nÃ¥gon resurs, ingen " "resursinformation kunde erhÃ¥llas." #: src/clients/compute/arctest.cpp:273 src/clients/compute/submit.cpp:171 msgid "Original job description is listed below:" msgstr "Ursprunglig jobbeskrivning visas nedan:" #: src/clients/compute/arctest.cpp:283 msgid "" "ERROR: Test aborted because no suitable resources were found for the test-job" msgstr "" "Fel: Testet avbröts eftersom inga lämpliga resurser hittades för test-jobbet" #: src/clients/compute/arctest.cpp:285 msgid "" "ERROR: Dumping job description aborted because no suitable resources were " "found for the test-job" msgstr "" "Fel: Visning av jobbeskrivning avbröts eftersom inga lämpliga resurser " "hittades för test-jobbet" #: src/clients/compute/arctest.cpp:294 #, c-format msgid "Submitting test-job %d:" msgstr "Sänder in test-jobb %d:" #: src/clients/compute/arctest.cpp:298 #, c-format msgid "Client version: nordugrid-arc-%s" msgstr "Klientversion: nordugrid-arc-%s" #: src/clients/compute/arctest.cpp:306 #, c-format msgid "Cannot write jobid (%s) to file (%s)" msgstr "Kan inte skriva jobb-id (%s) till fil (%s)" #: src/clients/compute/arctest.cpp:307 #, c-format msgid "Test submitted with jobid: %s" msgstr "Test insänt med jobb-id: %s" #: src/clients/compute/arctest.cpp:322 #, c-format msgid "Computing service: %s" msgstr "Beräkningstjänst: %s" #: src/clients/compute/arctest.cpp:328 msgid "Test failed, no more possible targets" msgstr "Test misslyckades, inga fler möjliga target" #: src/clients/compute/arctest.cpp:341 src/clients/compute/submit.cpp:49 msgid "To recover missing jobs, run arcsync" msgstr "För att Ã¥terställa saknade jobb, kör arcsync" #: src/clients/compute/arctest.cpp:354 src/clients/compute/submit.cpp:200 #, c-format msgid "" "Unable to prepare job description according to needs of the target resource " "(%s)." msgstr "" "Kunde inte förbereda jobbeskrivningen enligt target-resursens behov (%s)." #: src/clients/compute/arctest.cpp:364 src/clients/compute/submit.cpp:216 #, c-format msgid "" "An error occurred during the generation of job description to be sent to %s" msgstr "" "Ett fel inträffade under skapandet av jobbeskrivningen som ska sändas till %s" #: src/clients/compute/arctest.cpp:368 src/clients/compute/submit.cpp:220 #, c-format msgid "Job description to be sent to %s:" msgstr "Jobbeskrivning som skall sändas till: %s" #: src/clients/compute/submit.cpp:40 #, c-format msgid "Cannot write job IDs to file (%s)" msgstr "Kan inte skriva jobb-id tillfil (%s)" #: src/clients/compute/submit.cpp:45 #, c-format msgid "Unable to open job list file (%s), unknown format" msgstr "Kan inte öppna jobblistfil (%s), okänt format" #: src/clients/compute/submit.cpp:47 #, c-format msgid "Failed to write job information to database (%s)" msgstr "Misslyckades med att skriva jobbinformation till databas (%s)" #: src/clients/compute/submit.cpp:51 #, c-format msgid "Record about new job successfully added to the database (%s)" msgstr "Post om nytt jobb framgÃ¥ngsrikt tillagd till databasen (%s)" #: src/clients/compute/submit.cpp:57 msgid "Job submission summary:" msgstr "Jobbinsändningssammanfattning:" #: src/clients/compute/submit.cpp:59 #, c-format msgid "%d of %d jobs were submitted" msgstr "%d av %d jobb sändes in" #: src/clients/compute/submit.cpp:61 msgid "The following jobs were not submitted:" msgstr "Följande jobb sändes inte in:" #: src/clients/compute/submit.cpp:65 msgid "Job nr." msgstr "Jobb nr." #: src/clients/compute/submit.cpp:75 #, c-format msgid "ERROR: Unable to load broker %s" msgstr "Fel: Kunde inte ladda in mäklare %s" #: src/clients/compute/submit.cpp:79 msgid "" "ERROR: Job submission aborted because no resource returned any information" msgstr "" "Fel: Jobbinsändning avbröts eftersom inga resurser returnerade nÃ¥gon " "information" #: src/clients/compute/submit.cpp:83 msgid "ERROR: One or multiple job descriptions was not submitted." msgstr "Fel: En eller flera jobbeskrivningar sändes inte in." #: src/clients/compute/submit.cpp:100 #, c-format msgid "" "A computing resource using the GridFTP interface was requested, but\n" "%sthe corresponding plugin could not be loaded. Is the plugin installed?\n" "%sIf not, please install the package 'nordugrid-arc-plugins-globus'.\n" "%sDepending on your type of installation the package name might differ." msgstr "" "En beräkningsresurs som använder GridFTP-gränssnittet begärdes, men\n" "%smotsvarande plugin kunde inte laddas in. Är pluginen installerad?\n" "%sOm inte, installera paketet 'nordugrid-arc-plugins-globus'.\n" "%sBeroende pÃ¥ din installationtyp kan paketnamnet variera." #: src/clients/compute/submit.cpp:125 #, c-format msgid "Removing endpoint %s: It has an unrequested interface (%s)." msgstr "Tar bort ändpunkt %s: Den har ett icke begärt gränssnitt (%s)." #: src/clients/compute/submit.cpp:183 #, c-format msgid "Dumping job description aborted: Unable to load broker %s" msgstr "Visning av jobbeskrivning avbruten: kan inte ladda in mäklare %s" #: src/clients/compute/submit.cpp:238 msgid "" "Unable to prepare job description according to needs of the target resource." msgstr "Kan inte förbereda jobbeskrivning enligt target-resursens behov." #: src/clients/compute/submit.cpp:322 src/clients/compute/submit.cpp:352 #, c-format msgid "Service endpoint %s (type %s) added to the list for resource discovery" msgstr "Tjänsteändpunkt %s (typ %s) lagd till i listan för resurssökning" #: src/clients/compute/submit.cpp:332 msgid "" "There are no endpoints in registry that match requested info endpoint type" msgstr "" "Det finns inga ändpunkter i registret som matchar den begärda " "informationsändpunktstypen." #: src/clients/compute/submit.cpp:373 #, c-format msgid "Service endpoint %s (type %s) added to the list for direct submission" msgstr "Tjänsteändpunkt %s (typ %s) lagd till i listan för direktinsändning" #: src/clients/compute/submit.cpp:381 msgid "" "There are no endpoints in registry that match requested submission endpoint " "type" msgstr "" "Det finns inga ändpunkter i registret som matchar den begärda " "insändningsändpunktstypen" #: src/clients/compute/utils.cpp:109 #, c-format msgid "Types of execution services that %s is able to submit jobs to:" msgstr "Typer av beräkningstjänster som %s kan sända in jobb till:" #: src/clients/compute/utils.cpp:112 #, c-format msgid "Types of registry services that %s is able to collect information from:" msgstr "Typer av registertjänster som %s kan samla in information frÃ¥n:" #: src/clients/compute/utils.cpp:115 #, c-format msgid "" "Types of local information services that %s is able to collect information " "from:" msgstr "" "Typer av lokala informationstjänster som %s kan samla in information frÃ¥n:" #: src/clients/compute/utils.cpp:118 #, c-format msgid "" "Types of local information services that %s is able to collect job " "information from:" msgstr "" "Typer av lokala informationstjänster som %s kan samla in jobbinformation " "frÃ¥n:" #: src/clients/compute/utils.cpp:121 #, c-format msgid "Types of services that %s is able to manage jobs at:" msgstr "Typer av tjänster som %s kan hantera jobb pÃ¥:" #: src/clients/compute/utils.cpp:124 #, c-format msgid "Job description languages supported by %s:" msgstr "JobbeskrivningssprÃ¥k som stöds av %s:" #: src/clients/compute/utils.cpp:127 #, c-format msgid "Brokers available to %s:" msgstr "Mäklare tillgängliga för %s:" #: src/clients/compute/utils.cpp:150 #, c-format msgid "" "Default broker (%s) is not available. When using %s a broker should be " "specified explicitly (-b option)." msgstr "" "Förvald mäklare (%s) är inte tillgänglig. När %s används mÃ¥ste en mäklare " "anges explicit (alternativ -b)." #: src/clients/compute/utils.cpp:160 msgid "Proxy expired. Job submission aborted. Please run 'arcproxy'!" msgstr "Proxyns livstid har gÃ¥tt ut. Jobbinsändning avbruten. Kör 'arcproxy'!" #: src/clients/compute/utils.cpp:165 msgid "" "Cannot find any proxy. This application currently cannot run without a " "proxy.\n" " If you have the proxy file in a non-default location,\n" " please make sure the path is specified in the client configuration file.\n" " If you don't have a proxy yet, please run 'arcproxy'!" msgstr "" "Kan inte hitta nÃ¥gon proxy. Detta program kan för närvarande inte köras utan " "en proxy.\n" " Om du har proxyfilen pÃ¥ en icke-förvald plats,\n" " se till att sökvägen är angiven i klientinställningsfilen.\n" " Om du inte har en proxy än, kör 'arcproxy'\"" #: src/clients/compute/utils.cpp:277 msgid "" "It is impossible to mix ARC6 target selection options with legacy options. " "All legacy options will be ignored!" msgstr "" "Det är omöjligt att blanda ARC6-target-valalternativ med legacy-alternativ. " "Alla legacy-alternativ kommer att ignoreras!" #: src/clients/compute/utils.cpp:345 #, c-format msgid "Unsupported submission endpoint type: %s" msgstr "Insändningsändpunktstyp stöds inte: %s" #: src/clients/compute/utils.cpp:383 #, c-format msgid "" "Requested to skip resource discovery. Will try direct submission to %s and " "%s submission endpoint types" msgstr "" "Begärt att hoppa över resurssökning. Kommer att försöka med direkt " "insändning till %s och %s insändningsändpunktstyper" #: src/clients/compute/utils.cpp:389 #, c-format msgid "Unsupported information endpoint type: %s" msgstr "Informationsänpunktstyp stöds inte: %s" #: src/clients/compute/utils.cpp:434 msgid "Other actions" msgstr "Övriga flaggor" #: src/clients/compute/utils.cpp:435 msgid "Brokering and filtering" msgstr "Resursmatchning och filtrering" #: src/clients/compute/utils.cpp:436 msgid "Output format modifiers" msgstr "Utdataformateringsmodifierare" #: src/clients/compute/utils.cpp:437 msgid "Behaviour tuning" msgstr "Beteendeinställning" #: src/clients/compute/utils.cpp:438 msgid "ARC6 submission endpoint selection" msgstr "Val av ARC6-insändningsändpunkt" #: src/clients/compute/utils.cpp:439 msgid "Legacy options set for defining targets" msgstr "Legacy-alternativ för att definiera target" #: src/clients/compute/utils.cpp:443 msgid "specify computing element hostname or a complete endpoint URL" msgstr "ange beräkningsresurs-värdnamn eller en fullständig ändpunkts-URL" #: src/clients/compute/utils.cpp:444 msgid "ce" msgstr "beräkningsresurs" #: src/clients/compute/utils.cpp:448 msgid "registry service URL with optional specification of protocol" msgstr "registertjänst-URL med frivilligt angivande av protokoll" #: src/clients/compute/utils.cpp:449 msgid "registry" msgstr "register" #: src/clients/compute/utils.cpp:455 msgid "" "require the specified endpoint type for job submission.\n" "\tAllowed values are: arcrest, emies, gridftp or gridftpjob and internal." msgstr "" "kräv den angivna ändpunktstypen för jobbinsändning.\n" "\tTillÃ¥tna värden är: arcrest, emies, gridftp eller gridftpjob och internal." #: src/clients/compute/utils.cpp:457 src/clients/compute/utils.cpp:464 msgid "type" msgstr "typ" #: src/clients/compute/utils.cpp:461 msgid "" "require information query using the specified information endpoint type.\n" "\tSpecial value 'NONE' will disable all resource information queries and the " "following brokering.\n" "\tAllowed values are: ldap.nordugrid, ldap.glue2, emies, arcrest and " "internal." msgstr "" "kräv informationsförfrÃ¥gan med den angivna informationsändpunkstypen.\n" "\tSärskilda värdet 'NONE' stänger av alla resursinformationsförfrÃ¥gningar " "och den efterföljande resursmatchningen.\n" "\tTillÃ¥tna värden är: ldap.nordugrid, ldap.glue2, emies, arcrest och " "internal." #: src/clients/compute/utils.cpp:470 msgid "" "select one or more computing elements: name can be an alias for a single CE, " "a group of CEs or a URL" msgstr "" "välj en eller flera beräkningsresurser: namn kan vara ett alias för en " "enskild beräkningsresurs, en grupp av beräkningsresurser eller en URL" #: src/clients/compute/utils.cpp:472 src/clients/compute/utils.cpp:477 #: src/clients/compute/utils.cpp:494 src/clients/compute/utils.cpp:614 msgid "name" msgstr "namn" #: src/clients/compute/utils.cpp:476 msgid "only select jobs that were submitted to this resource" msgstr "välj endast jobb som sändes in till denna resurs" #: src/clients/compute/utils.cpp:483 msgid "" "the computing element specified by URL at the command line should be queried " "using this information interface.\n" "\tAllowed values are: org.nordugrid.ldapng, org.nordugrid.ldapglue2 and org." "ogf.glue.emies.resourceinfo" msgstr "" "beräkningsresursen angiven med URL pÃ¥ kommandoraden ska frÃ¥gas med detta " "informationsgränssnitt.\n" "\tTillÃ¥tna värden är: org.nordugrid.ldapng, org.nordugrid.ldapglue2 och org." "ogf.glue.emies.resourceinfo" #: src/clients/compute/utils.cpp:486 msgid "interfacename" msgstr "gränssnittsnamn" #: src/clients/compute/utils.cpp:492 msgid "" "selecting a computing element for the new jobs with a URL or an alias, or " "selecting a group of computing elements with the name of the group" msgstr "" "att välja en beräkningsresurs för de nya jobben med en URL eller ett alias, " "eller att välja en grupp av beräkningselement med gruppen namn" #: src/clients/compute/utils.cpp:500 msgid "force migration, ignore kill failure" msgstr "tvÃ¥ngsmigrering, ignorera om avbrytandet av jobbet misslyckas" #: src/clients/compute/utils.cpp:506 msgid "keep the files on the server (do not clean)" msgstr "behÃ¥ll filerna pÃ¥ servern (ta inte bort)" #: src/clients/compute/utils.cpp:512 msgid "do not ask for verification" msgstr "frÃ¥ga inte efter bekräftelse" #: src/clients/compute/utils.cpp:516 msgid "truncate the joblist before synchronizing" msgstr "trunkera jobblistan för synkronisering" #: src/clients/compute/utils.cpp:520 msgid "do not collect information, only convert jobs storage format" msgstr "samla inte in information, konvertera endast lagringsformat" #: src/clients/compute/utils.cpp:526 src/clients/data/arcls.cpp:288 msgid "long format (more information)" msgstr "lÃ¥ngt format (mer information)" #: src/clients/compute/utils.cpp:532 msgid "print a list of services configured in the client.conf" msgstr "skriv ut en lista med tjänster konfigurerade i client.conf" #: src/clients/compute/utils.cpp:538 msgid "show the stdout of the job (default)" msgstr "visa jobbets stdout (förval)" #: src/clients/compute/utils.cpp:542 msgid "show the stderr of the job" msgstr "visa jobbets stderr" #: src/clients/compute/utils.cpp:546 msgid "show the CE's error log of the job" msgstr "visa jobbets beräkningsresurs-fellogg" #: src/clients/compute/utils.cpp:550 msgid "show the specified file from job's session directory" msgstr "visa den angivna filen frÃ¥n jobbets sessionskatalog" #: src/clients/compute/utils.cpp:551 msgid "filepath" msgstr "sökväg" #: src/clients/compute/utils.cpp:557 msgid "" "download directory (the job directory will be created in this directory)" msgstr "nedladdningskatalog (jobbkatalogen kommer att skapas i denna katalog)" #: src/clients/compute/utils.cpp:559 msgid "dirname" msgstr "katalognamn" #: src/clients/compute/utils.cpp:563 msgid "use the jobname instead of the short ID as the job directory name" msgstr "" "använd jobbets namn i stället för dess korta ID för jobbkatalogens namn" #: src/clients/compute/utils.cpp:568 msgid "force download (overwrite existing job directory)" msgstr "tvÃ¥ngsnedladdning (skriv över existerande jobbkatalog)" #: src/clients/compute/utils.cpp:574 msgid "instead of the status only the IDs of the selected jobs will be printed" msgstr "i stället för status skriv endast de utvalda jobbens ID" #: src/clients/compute/utils.cpp:578 msgid "sort jobs according to jobid, submissiontime or jobname" msgstr "sortera jobb efter jobb-id, insändningstid eller jobbnamn" #: src/clients/compute/utils.cpp:579 src/clients/compute/utils.cpp:582 msgid "order" msgstr "ordning" #: src/clients/compute/utils.cpp:581 msgid "reverse sorting of jobs according to jobid, submissiontime or jobname" msgstr "omvänd sortering av jobb efter jobb-id, insändningstid eller jobbnamn" #: src/clients/compute/utils.cpp:585 msgid "show jobs where status information is unavailable" msgstr "visa jobb för vilka statusinformation inte är tillgänglig" #: src/clients/compute/utils.cpp:589 msgid "show status information in JSON format" msgstr "visa statusinformation i JSON-format" #: src/clients/compute/utils.cpp:595 msgid "resubmit to the same resource" msgstr "sänd in till samma resurs igen" #: src/clients/compute/utils.cpp:599 msgid "do not resubmit to the same resource" msgstr "sänd inte in till samma resurs igen" #: src/clients/compute/utils.cpp:605 msgid "" "remove the job from the local list of jobs even if the job is not found in " "the infosys" msgstr "" "ta bort jobbet frÃ¥n den lokala jobblistan även om jobbet inte hittas i " "informationssystemet" #: src/clients/compute/utils.cpp:612 msgid "" "select one or more registries: name can be an alias for a single registry, a " "group of registries or a URL" msgstr "" "välj ett eller flera register: namn kan vara ett alias för ett enskilt " "register, en grupp av register eller en URL" #: src/clients/compute/utils.cpp:620 msgid "submit test job given by the number" msgstr "sänd in test-jobb givet av numret" #: src/clients/compute/utils.cpp:621 src/clients/compute/utils.cpp:625 msgid "int" msgstr "heltal" #: src/clients/compute/utils.cpp:624 msgid "test job runtime specified by the number" msgstr "test-jobbets körtid anges av numret" #: src/clients/compute/utils.cpp:631 msgid "only select jobs whose status is statusstr" msgstr "välj endast jobb vars status är statusstr" #: src/clients/compute/utils.cpp:632 msgid "statusstr" msgstr "statusstr" #: src/clients/compute/utils.cpp:638 msgid "all jobs" msgstr "alla jobb" #: src/clients/compute/utils.cpp:644 msgid "jobdescription string describing the job to be submitted" msgstr "jobbeskrivningssträng som beskriver jobbet som ska sändas in" #: src/clients/compute/utils.cpp:646 src/clients/compute/utils.cpp:652 #: src/clients/credentials/arcproxy.cpp:345 #: src/clients/credentials/arcproxy.cpp:352 #: src/clients/credentials/arcproxy.cpp:371 #: src/clients/credentials/arcproxy.cpp:378 #: src/clients/credentials/arcproxy.cpp:396 #: src/clients/credentials/arcproxy.cpp:400 #: src/clients/credentials/arcproxy.cpp:415 #: src/clients/credentials/arcproxy.cpp:425 #: src/clients/credentials/arcproxy.cpp:429 msgid "string" msgstr "sträng" #: src/clients/compute/utils.cpp:650 msgid "jobdescription file describing the job to be submitted" msgstr "jobbeskrivningsfil som beskriver jobbet som ska sändas in" #: src/clients/compute/utils.cpp:658 msgid "select broker method (list available brokers with --listplugins flag)" msgstr "" "välj resursmatchningsmetod (lista tillgängliga mäklare med --listplugins)" #: src/clients/compute/utils.cpp:659 msgid "broker" msgstr "mäklare" #: src/clients/compute/utils.cpp:662 msgid "the IDs of the submitted jobs will be appended to this file" msgstr "de insända jobbens ID kommer att läggas till i denna fil" #: src/clients/compute/utils.cpp:663 src/clients/compute/utils.cpp:685 #: src/clients/compute/utils.cpp:722 src/clients/compute/utils.cpp:730 #: src/clients/credentials/arcproxy.cpp:438 src/clients/data/arccp.cpp:627 #: src/clients/data/arcls.cpp:333 src/clients/data/arcmkdir.cpp:111 #: src/clients/data/arcrename.cpp:122 src/clients/data/arcrm.cpp:137 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:52 msgid "filename" msgstr "filnamn" #: src/clients/compute/utils.cpp:667 msgid "" "only use this interface for submitting.\n" "\tAllowed values are: org.nordugrid.gridftpjob or org.nordugrid.gridftp, org." "ogf.glue.emies.activitycreation and org.nordugrid.internal" msgstr "" "använd endast detta gränssnitt för insändning.\n" "\tTillÃ¥tna värden är: org.nordugrid.gridftpjob eller org.nordugrid.gridftp, " "org.ogf.glue.emies.activitycreation och org.nordugrid.internal" #: src/clients/compute/utils.cpp:669 src/clients/compute/utils.cpp:711 msgid "InterfaceName" msgstr "Gränssnittsnamn" #: src/clients/compute/utils.cpp:676 msgid "skip the service with the given URL during service discovery" msgstr "hoppa över tjänst med den angivna URLen under tjänstesökning" #: src/clients/compute/utils.cpp:677 src/clients/compute/utils.cpp:690 #: src/clients/data/arccp.cpp:607 msgid "URL" msgstr "URL" #: src/clients/compute/utils.cpp:684 msgid "a file containing a list of jobIDs" msgstr "en fil som innehÃ¥ller en lista med jobb-id" #: src/clients/compute/utils.cpp:689 msgid "skip jobs that are on a computing element with a given URL" msgstr "hoppa över jobb som är pÃ¥ en beräkningsresurs med en given URL" #: src/clients/compute/utils.cpp:695 msgid "submit jobs as dry run (no submission to batch system)" msgstr "sänd in jobb som dryrun (ingen insändning till batchsystemet)" #: src/clients/compute/utils.cpp:698 msgid "submit directly - no resource discovery or matchmaking" msgstr "direkt insändning - ingen resurssökning eller resursmatchning" #: src/clients/compute/utils.cpp:702 msgid "" "do not submit - dump job description in the language accepted by the target" msgstr "" "sänd inte in - skriv ut jobbeskrivning i ett sprÃ¥k som accepteras av target" #: src/clients/compute/utils.cpp:709 msgid "" "only get information about executon targets that support this job submission " "interface.\n" "\tAllowed values are org.nordugrid.gridftpjob or org.nordugrid.gridftp, org." "ogf.glue.emies.activitycreation and org.nordugrid.internal" msgstr "" "hämta endast information om exekverings-target som stöder detta " "jobbinsändningsgränssnitt.\n" "\tTillÃ¥tna värden är org.nordugrid.gridftpjob eller org.nordugrid.gridftp, " "org.ogf.glue.emies.activitycreation och org.nordugrid.internal" #: src/clients/compute/utils.cpp:716 msgid "prints info about installed user- and CA-certificates" msgstr "skriver ut information om installerade användar- och CA-certifikat" #: src/clients/compute/utils.cpp:721 #, c-format msgid "the file storing information about active jobs (default %s)" msgstr "filen som lagrar information om aktiva jobb (förval %s)" #: src/clients/compute/utils.cpp:729 src/clients/credentials/arcproxy.cpp:437 #: src/clients/data/arccp.cpp:626 src/clients/data/arcls.cpp:332 #: src/clients/data/arcmkdir.cpp:110 src/clients/data/arcrename.cpp:121 #: src/clients/data/arcrm.cpp:136 msgid "configuration file (default ~/.arc/client.conf)" msgstr "inställningsfil (förval ~/.arc/client.conf)" #: src/clients/compute/utils.cpp:732 src/clients/credentials/arcproxy.cpp:432 #: src/clients/data/arccp.cpp:621 src/clients/data/arcls.cpp:327 #: src/clients/data/arcmkdir.cpp:105 src/clients/data/arcrename.cpp:116 #: src/clients/data/arcrm.cpp:131 msgid "timeout in seconds (default 20)" msgstr "timeout i sekunder (förval 20)" #: src/clients/compute/utils.cpp:733 src/clients/credentials/arcproxy.cpp:433 #: src/clients/data/arccp.cpp:622 src/clients/data/arcls.cpp:328 #: src/clients/data/arcmkdir.cpp:106 src/clients/data/arcrename.cpp:117 #: src/clients/data/arcrm.cpp:132 msgid "seconds" msgstr "sekunder" #: src/clients/compute/utils.cpp:736 msgid "list the available plugins" msgstr "lista de tillgängliga pluginerna" #: src/clients/compute/utils.cpp:740 src/clients/credentials/arcproxy.cpp:442 #: src/clients/data/arccp.cpp:631 src/clients/data/arcls.cpp:337 #: src/clients/data/arcmkdir.cpp:115 src/clients/data/arcrename.cpp:126 #: src/clients/data/arcrm.cpp:141 #: src/hed/libs/compute/test_jobdescription.cpp:38 #: src/services/a-rex/grid-manager/gm_jobs.cpp:190 #: src/services/a-rex/grid-manager/inputcheck.cpp:81 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:66 msgid "FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG" msgstr "FATAL, ERROR, WARNING, INFO, VERBOSE eller DEBUG" #: src/clients/compute/utils.cpp:741 src/clients/credentials/arcproxy.cpp:443 #: src/clients/data/arccp.cpp:632 src/clients/data/arcls.cpp:338 #: src/clients/data/arcmkdir.cpp:116 src/clients/data/arcrename.cpp:127 #: src/clients/data/arcrm.cpp:142 #: src/hed/libs/compute/test_jobdescription.cpp:39 #: src/services/a-rex/grid-manager/gm_jobs.cpp:191 #: src/services/a-rex/grid-manager/inputcheck.cpp:82 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:67 msgid "debuglevel" msgstr "debugnivÃ¥" #: src/clients/compute/utils.cpp:743 src/clients/credentials/arcproxy.cpp:446 #: src/clients/data/arccp.cpp:635 src/clients/data/arcls.cpp:341 #: src/clients/data/arcmkdir.cpp:119 src/clients/data/arcrename.cpp:130 #: src/clients/data/arcrm.cpp:145 msgid "print version information" msgstr "skriv ut versionsinformation" #: src/clients/credentials/arcproxy.cpp:146 #: src/hed/libs/credential/ARCProxyUtil.cpp:1216 #, c-format msgid "There are %d user certificates existing in the NSS database" msgstr "Det finns %d användarcertifikat i NSS-databasen" #: src/clients/credentials/arcproxy.cpp:162 #: src/hed/libs/credential/ARCProxyUtil.cpp:1232 #, c-format msgid "Number %d is with nickname: %s%s" msgstr "Nummer %d är med smeknamn: %s%s" #: src/clients/credentials/arcproxy.cpp:171 #: src/hed/libs/credential/ARCProxyUtil.cpp:1241 #, c-format msgid " expiration time: %s " msgstr " giltig till: %s " #: src/clients/credentials/arcproxy.cpp:175 #: src/hed/libs/credential/ARCProxyUtil.cpp:1245 #, c-format msgid " certificate dn: %s" msgstr " certifikat-dn: %s" #: src/clients/credentials/arcproxy.cpp:176 #: src/hed/libs/credential/ARCProxyUtil.cpp:1246 #, c-format msgid " issuer dn: %s" msgstr " utfärdar-dn: %s" #: src/clients/credentials/arcproxy.cpp:177 #: src/hed/libs/credential/ARCProxyUtil.cpp:1247 #, c-format msgid " serial number: %d" msgstr " serie-nummer: %d" #: src/clients/credentials/arcproxy.cpp:181 #: src/hed/libs/credential/ARCProxyUtil.cpp:1251 #, c-format msgid "Please choose the one you would use (1-%d): " msgstr "Välj det som du vill använda (1-%d): " #: src/clients/credentials/arcproxy.cpp:246 msgid "" "The arcproxy command creates a proxy from a key/certificate pair which can\n" "then be used to access grid resources." msgstr "" "arcproxy-kommandot skapar en proxy frÃ¥n ett nyckel/certifikat-par som sedan\n" "kan användas för att komma Ã¥t gridresurser." #: src/clients/credentials/arcproxy.cpp:248 msgid "" "Supported constraints are:\n" " validityStart=time (e.g. 2008-05-29T10:20:30Z; if not specified, start " "from now)\n" " validityEnd=time\n" " validityPeriod=time (e.g. 43200 or 12h or 12H; if both validityPeriod and " "validityEnd\n" " not specified, the default is 12 hours for local proxy, and 168 hours " "for delegated\n" " proxy on myproxy server)\n" " vomsACvalidityPeriod=time (e.g. 43200 or 12h or 12H; if not specified, the " "default\n" " is the minimum value of 12 hours and validityPeriod)\n" " myproxyvalidityPeriod=time (lifetime of proxies delegated by myproxy " "server,\n" " e.g. 43200 or 12h or 12H; if not specified, the default is the minimum " "value of\n" " 12 hours and validityPeriod (which is lifetime of the delegated proxy on " "myproxy server))\n" " proxyPolicy=policy content\n" " proxyPolicyFile=policy file\n" " keybits=number - length of the key to generate. Default is 2048 bits.\n" " Special value 'inherit' is to use key length of signing certificate.\n" " signingAlgorithm=name - signing algorithm to use for signing public key of " "proxy.\n" " Possible values are sha1, sha2 (alias for sha256), sha224, sha256, " "sha384, sha512\n" " and inherit (use algorithm of signing certificate). Default is inherit.\n" " With old systems, only sha1 is acceptable.\n" "\n" "Supported information item names are:\n" " subject - subject name of proxy certificate.\n" " identity - identity subject name of proxy certificate.\n" " issuer - issuer subject name of proxy certificate.\n" " ca - subject name of CA which issued initial certificate.\n" " path - file system path to file containing proxy.\n" " type - type of proxy certificate.\n" " validityStart - timestamp when proxy validity starts.\n" " validityEnd - timestamp when proxy validity ends.\n" " validityPeriod - duration of proxy validity in seconds.\n" " validityLeft - duration of proxy validity left in seconds.\n" " vomsVO - VO name represented by VOMS attribute\n" " vomsSubject - subject of certificate for which VOMS attribute is issued\n" " vomsIssuer - subject of service which issued VOMS certificate\n" " vomsACvalidityStart - timestamp when VOMS attribute validity starts.\n" " vomsACvalidityEnd - timestamp when VOMS attribute validity ends.\n" " vomsACvalidityPeriod - duration of VOMS attribute validity in seconds.\n" " vomsACvalidityLeft - duration of VOMS attribute validity left in seconds.\n" " proxyPolicy\n" " keybits - size of proxy certificate key in bits.\n" " signingAlgorithm - algorithm used to sign proxy certificate.\n" "Items are printed in requested order and are separated by newline.\n" "If item has multiple values they are printed in same line separated by |.\n" "\n" "Supported password destinations are:\n" " key - for reading private key\n" " myproxy - for accessing credentials at MyProxy service\n" " myproxynew - for creating credentials at MyProxy service\n" " all - for any purspose.\n" "\n" "Supported password sources are:\n" " quoted string (\"password\") - explicitly specified password\n" " int - interactively request password from console\n" " stdin - read password from standard input delimited by newline\n" " file:filename - read password from file named filename\n" " stream:# - read password from input stream number #.\n" " Currently only 0 (standard input) is supported.\n" msgstr "" "Dessa begränsningar stöds:\n" " validityStart=tid (t.ex. 2008-05-29T10:20:30Z; om ej angivet, börjar\n" " giltighetstiden nu)\n" " validityEnd=tid\n" " validityPeriod=tid (t.ex. 43200 eller 12h eller 12H; om varken " "validityPeriod\n" " eller validityEnd angivet, är förval 12 timmar för lokal proxy och\n" " 168 timmar för delegerad proxy pÃ¥ myproxy-server)\n" " vomsACvalidityPeriod=tid (t.ex. 43200 eller 12h eller 12H; om ej angivet, " "är\n" " förval minimum av 12 timmar och validityPeriod)\n" " myproxyvalidityPeriod=tid (livstid för proxyer som delegeras av\n" " myproxy-server, t.ex. 43200 eller 12h eller 12H; om ej angivet, är\n" " förval minimum av 12 timmar och validityPeriod (vilket är livstiden för\n" " den delegerade proxyn pÃ¥ myproxy-servern))\n" " proxyPolicy=policy-text\n" " proxyPolicyFile=policy-fil\n" " keybits=nummer - längd för den genererade nyckeln, Förval är 2048 bitar.\n" " Särskilt värde 'inherit' betyder att det signerande certifikatets\n" " nyckellängd används.\n" " signingAlgorithm=namn - signeringsalgoritm att använda för att signera\n" " proxyns publika nyckel. Möjliga värden är sha1, sha2 (alias för " "sha256),\n" " sha224, sha256, sha384, sha512 och inherit (använd det signerande\n" " certifikatets algoritm). Förval är inherit.\n" " PÃ¥ gamla system, är endast sha1 möjligt.\n" "\n" "Informationspunker som stöds är:\n" " subject - proxycertifikatets subjektnamn.\n" " identity - proxycertifikatets identitets-subjektnamn.\n" " issuer - proxycertifikatets utfärdar-subjektnamn.\n" " ca - subjektnamn för CA som utfärdade det ursprungliga certifikatet.\n" " path - filsystem-sökväg till fil som innehÃ¥ller proxyn.\n" " type - typ av proxycertifikat.\n" " validityStart - klockslag dÃ¥ proxyns giltighetstid börjar.\n" " validityEnd - klockslag dÃ¥ proxyns giltighetstid slutar.\n" " validityPeriod - längd pÃ¥ proxyns giltighetstid i sekunder.\n" " validityLeft - kvarvarande längd pÃ¥ proxyns giltighetstid i sekunder.\n" " vomsVO - VO-namn representerat av VOMS-attribut\n" " vomsSubject - subjekt för certifikat för vilket VOMS-attribut utfärdats\n" " vomsIssuer - subjekt för tjänst som utfärdat VOMS-certifikat\n" " vomsACvalidityStart - klockslag dÃ¥ VOMS-attributets giltighetstid börjar.\n" " vomsACvalidityEnd - klockslag dÃ¥ VOMS-attributets giltighetstid slutar.\n" " vomsACvalidityPeriod - längd pÃ¥ VOMS-attributets giltighetstid i " "sekunder.\n" " vomsACvalidityLeft - kvarvarande längd pÃ¥ VOMS-attributets giltighetstid\n" " i sekunder.\n" " proxyPolicy\n" " keybits - proxycertifikatets nyckellängd i bitar.\n" " signingAlgorithm - algoritm som användes för att signera " "proxycertifikatet.\n" "Informationspunkterna skrivs i begärd ordning separerade av nyrad.\n" "Om en punkt har mer än ett värde skrivs dessa pÃ¥ samma rad separerade av |.\n" "\n" "Lösenordsdestinationer som stöds är:\n" " key - för att läsa privat nyckel\n" " myproxy - för att komma Ã¥t referens pÃ¥ myproxy-tjänst\n" " myproxynew - för att skapa referens pÃ¥ myproxy-tjänst\n" " all - för alla användningsomrÃ¥den.\n" "\n" "Lösenordskällor som stöds är:\n" " quoted string (\"lösenord\") - explicit angivet lösenord\n" " int - interaktiv begäran av lösenord frÃ¥n konsol\n" " stdin - läs lösenord frÃ¥n standard input avgränsat av nyrad\n" " file:filnamn - läs lösenord frÃ¥n fil med namn filnamn\n" " stream:# - läs lösenord frÃ¥n input stream nummer #.\n" " För närvarande stöds endast 0 (standard input).\n" #: src/clients/credentials/arcproxy.cpp:308 msgid "path to the proxy file" msgstr "sökväg till proxyfilen" #: src/clients/credentials/arcproxy.cpp:309 #: src/clients/credentials/arcproxy.cpp:313 #: src/clients/credentials/arcproxy.cpp:317 #: src/clients/credentials/arcproxy.cpp:321 #: src/clients/credentials/arcproxy.cpp:325 #: src/clients/credentials/arcproxy.cpp:329 src/clients/data/arccp.cpp:584 msgid "path" msgstr "sökväg" #: src/clients/credentials/arcproxy.cpp:312 msgid "" "path to the certificate file, it can be either PEM, DER, or PKCS12 formatted" msgstr "" "sökväg till certifikatfilen, kan vara i endera PEM-, DER- eller PKCS12-format" #: src/clients/credentials/arcproxy.cpp:316 msgid "" "path to the private key file, if the certificate is in PKCS12 format, then " "no need to give private key" msgstr "" "sökväg till privata-nyckel-filen, om certifikatet är i PKCS12-format behöver " "inte den privata nyckeln anges" #: src/clients/credentials/arcproxy.cpp:320 msgid "" "path to the trusted certificate directory, only needed for the VOMS client " "functionality" msgstr "" "sökväg till katalogen med betrodda certifikat, behövs endast för VOMS-klient-" "funktionalitet" #: src/clients/credentials/arcproxy.cpp:324 msgid "" "path to the top directory of VOMS *.lsc files, only needed for the VOMS " "client functionality" msgstr "" "sökväg till huvudkatalogen för VOMS *.lsc-filer, behövs endast för VOMS-" "klient-funktionalitet" #: src/clients/credentials/arcproxy.cpp:328 msgid "path to the VOMS server configuration file" msgstr "sökväg till VOMS-server-inställningsfilen" #: src/clients/credentials/arcproxy.cpp:332 msgid "" "voms<:command>. Specify VOMS server (More than one VOMS server \n" " can be specified like this: --voms VOa:command1 --voms VOb:" "command2). \n" " :command is optional, and is used to ask for specific " "attributes(e.g: roles)\n" " command options are:\n" " all --- put all of this DN's attributes into AC; \n" " list ---list all of the DN's attribute, will not create AC " "extension; \n" " /Role=yourRole --- specify the role, if this DN \n" " has such a role, the role will be put into " "AC; \n" " /voname/groupname/Role=yourRole --- specify the VO, group and " "role; if this DN \n" " has such a role, the role will be put into " "AC. \n" " If this option is not specified values from configuration " "files are used.\n" " To avoid anything to be used specify -S with empty value.\n" msgstr "" "voms<:kommando>. Ange VOMS-server (Mer än en VOMS-server kan\n" " anges pÃ¥ detta sätt: --voms VOa:kommando1 --voms VOb:" "kommando2).\n" " :kommando är valfritt, och används för att begära specifika\n" " attribut (t.ex. roller)\n" " kommandoalternativ är:\n" " all --- lägg till detta DNs alla attribut i AC;\n" " list --- lista detta DNs alla attribut, skapar inte AC-" "tillägg;\n" " /Role=dinRoll --- ange roll, om detta DN har en sÃ¥dan roll,\n" " kommer rollen att läggas till i AC;\n" " /vonamn/gruppnamn/Role=dinRoll --- ange VO, grupp och roll;\n" " om detta DN har en sÃ¥dan roll,\n" " kommer rollen att läggas till i AC.\n" " Om detta alternativ inte anges kommer värden frÃ¥n\n" " inställningsfilerna att användas.\n" " För att undvika att dessa används ange -S med tomt värde.\n" #: src/clients/credentials/arcproxy.cpp:348 msgid "" "group<:role>. Specify ordering of attributes \n" " Example: --order /knowarc.eu/coredev:Developer,/knowarc.eu/" "testers:Tester \n" " or: --order /knowarc.eu/coredev:Developer --order /knowarc.eu/" "testers:Tester \n" " Note that it does not make sense to specify the order if you have two or " "more different VOMS servers specified" msgstr "" "grupp<:roll>. Ange attributens ordning\n" " Exempel: --order /knowarc.eu/coredev:Developer,/knowarc.eu/" "testers:Tester\n" " eller: --order /knowarc.eu/coredev:Developer --order /knowarc." "eu/testers:Tester\n" " Notera att det saknar mening att ange ordningen om du har tvÃ¥ eller fler " "olika VOMS-servrar angivna" #: src/clients/credentials/arcproxy.cpp:355 msgid "use GSI communication protocol for contacting VOMS services" msgstr "använd GSI-kommunikationsprotokollet för att kontakta VOMS-tjänster." #: src/clients/credentials/arcproxy.cpp:358 msgid "" "use HTTP communication protocol for contacting VOMS services that provide " "RESTful access \n" " Note for RESTful access, 'list' command and multiple VOMS " "server are not supported\n" msgstr "" "använd HTTP-kommunikationsprotokollet för att kontakta VOMS-tjänster som " "erbjuder RESTful Ã¥tkomst \n" " Notera att för RESTful Ã¥tkomst stöds inte 'list'-kommandot " "och mer än en VOMS-server\n" #: src/clients/credentials/arcproxy.cpp:362 msgid "" "use old communication protocol for contacting VOMS services instead of " "RESTful access\n" msgstr "" "använd det gamla kommunikationsprotokollet för att kontakta VOMS-tjänster " "istället för RESTful Ã¥tkomst\n" #: src/clients/credentials/arcproxy.cpp:365 msgid "" "this option is not functional (old GSI proxies are not supported anymore)" msgstr "detta alternativ gör ingenting (gamla GSI-proxyer stöds inte längre)" #: src/clients/credentials/arcproxy.cpp:368 msgid "print all information about this proxy." msgstr "skriv ut all information om denna proxy." #: src/clients/credentials/arcproxy.cpp:371 msgid "print selected information about this proxy." msgstr "skriv ut utvald information om denna proxy." #: src/clients/credentials/arcproxy.cpp:374 msgid "remove proxy" msgstr "ta bort proxy" #: src/clients/credentials/arcproxy.cpp:377 msgid "" "username to MyProxy server (if missing subject of user certificate is used)" msgstr "" "användarnamn till myproxy-server (om detta saknas används subjektet frÃ¥n " "användarcertifikatet)" #: src/clients/credentials/arcproxy.cpp:382 msgid "" "don't prompt for a credential passphrase, when retrieve a \n" " credential from on MyProxy server. \n" " The precondition of this choice is the credential is PUT onto\n" " the MyProxy server without a passphrase by using -R (--" "retrievable_by_cert) \n" " option when being PUTing onto Myproxy server. \n" " This option is specific for the GET command when contacting " "Myproxy server." msgstr "" "frÃ¥ga inte efter lösenord när en referens hämtas frÃ¥n\n" " en myproxy-server.\n" " En förutsättning för detta val är att referensen har satts pÃ¥\n" " myproxy-server utan lösenord genom att använda alternativ -R\n" " (--retrievable_by_cert). Detta alternativ är specifikt för\n" " GET-kommandot när myproxy-servern kontaktas." #: src/clients/credentials/arcproxy.cpp:393 msgid "" "Allow specified entity to retrieve credential without passphrase.\n" " This option is specific for the PUT command when contacting " "Myproxy server." msgstr "" "tillÃ¥t angiven entitet att hämta referens utan lösenord.\n" " Detta alternativ är specifikt för PUT-kommandot när\n" " myproxy-servern kontaktas." #: src/clients/credentials/arcproxy.cpp:399 msgid "hostname[:port] of MyProxy server" msgstr "värdnamn[:port] för myproxy-server" #: src/clients/credentials/arcproxy.cpp:404 msgid "" "command to MyProxy server. The command can be PUT, GET, INFO, NEWPASS or " "DESTROY.\n" " PUT -- put a delegated credentials to the MyProxy server; \n" " GET -- get a delegated credentials from the MyProxy server; \n" " INFO -- get and present information about credentials stored " "at the MyProxy server; \n" " NEWPASS -- change password protecting credentials stored at " "the MyProxy server; \n" " DESTROY -- wipe off credentials stored at the MyProxy " "server; \n" " Local credentials (certificate and key) are not necessary " "except in case of PUT. \n" " MyProxy functionality can be used together with VOMS " "functionality.\n" " --voms and --vomses can be used for Get command if VOMS " "attributes\n" " is required to be included in the proxy.\n" msgstr "" "kommando till myproxy-server. Kommandot kan vara PUT, GET, INFO, NEWPASS " "eller DESTROY.\n" " PUT -- lägg upp en delegerad referens pÃ¥ myproxy-servern;\n" " GET -- hämta en delegerad referens frÃ¥n myproxy-servern;\n" " INFO -- hämta och presentera information om referenser lagrade " "pÃ¥ myproxy-servern;\n" " NEWPASS -- ändra lösenord som skyddar referenser lagrade pÃ¥ " "myproxy-servern;\n" " DESTROY -- ta bort referenser lagrade pÃ¥ myproxy-servern;\n" " Lokala referenser (certifikat och nyckel) är inte nödvändiga " "utom vid PUT.\n" " Myproxy-funktionalitet kan användas tillsammans med VOMS-" "funktionalitet.\n" " --voms och --vomses kan användas med GET-kommandot om VOMS-" "attribut\n" " mÃ¥ste inkluderas i proxyn.\n" #: src/clients/credentials/arcproxy.cpp:419 msgid "" "use NSS credential database in default Mozilla profiles, \n" " including Firefox, Seamonkey and Thunderbird.\n" msgstr "" "använd NSS-referens-databas i förvalda Mozilla-profiler,\n" " inklusive Firefox, Seamonkey och Thunderbird.\n" #: src/clients/credentials/arcproxy.cpp:424 msgid "proxy constraints" msgstr "proxybegränsningar" #: src/clients/credentials/arcproxy.cpp:428 msgid "password destination=password source" msgstr "lösenordsdestination=lösenordskälla" #: src/clients/credentials/arcproxy.cpp:452 msgid "" "RESTful and old VOMS communication protocols can't be requested " "simultaneously." msgstr "" "RESTful och gammalt VOMS-kommunikationsprotokoll kan inte begäras samtidigt." #: src/clients/credentials/arcproxy.cpp:482 #: src/clients/credentials/arcproxy.cpp:1187 msgid "Failed configuration initialization." msgstr "Misslyckades med att initiera inställningar." #: src/clients/credentials/arcproxy.cpp:511 msgid "" "Failed to find certificate and/or private key or files have improper " "permissions or ownership." msgstr "" "Misslyckades med att hitta certifikat och/eller privat nyckel eller filer " "har olämpliga Ã¥tkomsträttigheter eller ägare." #: src/clients/credentials/arcproxy.cpp:512 #: src/clients/credentials/arcproxy.cpp:524 msgid "You may try to increase verbosity to get more information." msgstr "Du kan försöka att öka debugnivÃ¥n för att fÃ¥ mer information." #: src/clients/credentials/arcproxy.cpp:520 msgid "Failed to find CA certificates" msgstr "Misslyckades med att hitta CA-certifikat" #: src/clients/credentials/arcproxy.cpp:521 msgid "" "Cannot find the CA certificates directory path, please set environment " "variable X509_CERT_DIR, or cacertificatesdirectory in a configuration file." msgstr "" "Kan inte hitta sökväg till CA-certifikat-katalogen, sätt miljövariabeln " "X509_CERT_DIR, eller cacertificatesdirectory i en inställningsfil." #: src/clients/credentials/arcproxy.cpp:525 msgid "" "The CA certificates directory is required for contacting VOMS and MyProxy " "servers." msgstr "" "CA-certifikat-katalogen behövs för att kontakta VOMS- och myproxy-servrar." #: src/clients/credentials/arcproxy.cpp:537 msgid "" "$X509_VOMS_FILE, and $X509_VOMSES are not set;\n" "User has not specified the location for vomses information;\n" "There is also not vomses location information in user's configuration file;\n" "Can not find vomses in default locations: ~/.arc/vomses, ~/.voms/vomses,\n" "$ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/grid-security/vomses, $PWD/" "vomses,\n" "/etc/vomses, /etc/grid-security/vomses, and the location at the " "corresponding sub-directory" msgstr "" "$X509_VOMS_FILE och $X509_VOMSES är inte tilldelade;\n" "Användaren har inte angivit sökvägen till vomses-informationen;\n" "Det finns inte heller sökväg till vomses i användarens inställningsfil;\n" "Kan inte hitta vomses pÃ¥ förvalda sökvägar: ~/.arc/vomses, ~/.voms/vomses,\n" "$ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/grid-security/vomses, $PWD/" "vomses,\n" "/etc/vomses, /etc/grid-security/vomses, och de motsvarande underkatalogerna" #: src/clients/credentials/arcproxy.cpp:582 msgid "Wrong number of arguments!" msgstr "Fel antal argument!" #: src/clients/credentials/arcproxy.cpp:590 #: src/clients/credentials/arcproxy.cpp:614 #: src/clients/credentials/arcproxy.cpp:747 msgid "" "Cannot find the path of the proxy file, please setup environment " "X509_USER_PROXY, or proxypath in a configuration file" msgstr "" "Kan inte hitta sökväg till proxyfilen, sätt miljövariabeln X509_USER_PROXY, " "eller proxypath i en inställningsfil" #: src/clients/credentials/arcproxy.cpp:597 #, c-format msgid "Cannot remove proxy file at %s" msgstr "Kan inte ta bort proxyfil pÃ¥ %s" #: src/clients/credentials/arcproxy.cpp:599 #, c-format msgid "Cannot remove proxy file at %s, because it's not there" msgstr "Kan inte ta bort proxyfil pÃ¥ %s, eftersom den inte existerar" #: src/clients/credentials/arcproxy.cpp:608 msgid "Bearer token is available. It is preferred for job submission." msgstr "Bärar-token är tillgängligt. Det föredras för jobbinsändning." #: src/clients/credentials/arcproxy.cpp:620 #: src/clients/credentials/arcproxy.cpp:753 #, c-format msgid "" "Cannot find file at %s for getting the proxy. Please make sure this file " "exists." msgstr "" "Kan inte hitta fil pÃ¥ %s för att hämta proxyn. Se till att denna fil " "existerar." #: src/clients/credentials/arcproxy.cpp:626 #: src/clients/credentials/arcproxy.cpp:759 #, c-format msgid "Cannot process proxy file at %s." msgstr "Kan inte behandla proxyfil pÃ¥ %s." #: src/clients/credentials/arcproxy.cpp:629 #, c-format msgid "Subject: %s" msgstr "Subjekt: %s" #: src/clients/credentials/arcproxy.cpp:630 #, c-format msgid "Issuer: %s" msgstr "Utfärdare: %s" #: src/clients/credentials/arcproxy.cpp:631 #, c-format msgid "Identity: %s" msgstr "Identitet: %s" #: src/clients/credentials/arcproxy.cpp:633 msgid "Time left for proxy: Proxy expired" msgstr "Kvarvarande tid för proxy: Proxyns giltighetstid har gÃ¥tt ut" #: src/clients/credentials/arcproxy.cpp:635 msgid "Time left for proxy: Proxy not valid yet" msgstr "Kvarvarande tid för proxy: Proxyn är inte giltig än" #: src/clients/credentials/arcproxy.cpp:637 #, c-format msgid "Time left for proxy: %s" msgstr "Kvarvarande tid för proxy: %s" #: src/clients/credentials/arcproxy.cpp:638 #, c-format msgid "Proxy path: %s" msgstr "Proxysökväg: %s" #: src/clients/credentials/arcproxy.cpp:639 #, c-format msgid "Proxy type: %s" msgstr "Proxytyp: %s" #: src/clients/credentials/arcproxy.cpp:640 #, c-format msgid "Proxy key length: %i" msgstr "Proxyns nyckellängd: %i" #: src/clients/credentials/arcproxy.cpp:641 #, c-format msgid "Proxy signature: %s" msgstr "Proxysignatur: %s" #: src/clients/credentials/arcproxy.cpp:650 msgid "AC extension information for VO " msgstr "AC-tilläggsinformation för VO " #: src/clients/credentials/arcproxy.cpp:653 msgid "Error detected while parsing this AC" msgstr "Fel upptäckt när denna AC tolkades" #: src/clients/credentials/arcproxy.cpp:666 msgid "AC is invalid: " msgstr "AC är ogiltig: " #: src/clients/credentials/arcproxy.cpp:696 #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:222 #, c-format msgid "Malformed VOMS AC attribute %s" msgstr "Felaktigt VOMS-AC-attribut %s" #: src/clients/credentials/arcproxy.cpp:727 msgid "Time left for AC: AC is not valid yet" msgstr "Kvarvarande tid för AC: AC är inte giltig än" #: src/clients/credentials/arcproxy.cpp:729 msgid "Time left for AC: AC has expired" msgstr "Kvarvarande tid för AC: ACs giltighetstid har gÃ¥tt ut" #: src/clients/credentials/arcproxy.cpp:731 #, c-format msgid "Time left for AC: %s" msgstr "Kvarvarande tid för AC: %s" #: src/clients/credentials/arcproxy.cpp:838 #, c-format msgid "Information item '%s' is not known" msgstr "Informationspunkt '%s' är okänd" #: src/clients/credentials/arcproxy.cpp:850 msgid "" "Cannot find the user certificate path, please setup environment " "X509_USER_CERT, or certificatepath in a configuration file" msgstr "" "Kan inte hitta sökväg till användarcertifikatet, sätt upp miljövariabeln " "X509_USER_CERT, eller certificatepath i en inställningsfil" #: src/clients/credentials/arcproxy.cpp:854 msgid "" "Cannot find the user private key path, please setup environment " "X509_USER_KEY, or keypath in a configuration file" msgstr "" "Kan inte hitta sökväg till privata nyckeln, sätt miljövariabeln " "X509_USER_KEY, eller keypath i en inställningsfil" #: src/clients/credentials/arcproxy.cpp:878 #, c-format msgid "" "Cannot parse password source expression %s it must be of type=source format" msgstr "" "Kan inte tolka uttrycket för lösenordskälla %s det mÃ¥ste vara i formatet " "typ=källa" #: src/clients/credentials/arcproxy.cpp:895 #, c-format msgid "" "Cannot parse password type %s. Currently supported values are " "'key','myproxy','myproxynew' and 'all'." msgstr "" "Kan inte tolka lösenordstyp %s. Nuvarande giltiga värden är 'key', " "'myproxy', 'myproxynew' och 'all'." #: src/clients/credentials/arcproxy.cpp:910 #, c-format msgid "" "Cannot parse password source %s it must be of source_type or source_type:" "data format. Supported source types are int,stdin,stream,file." msgstr "" "Kan inte tolka lösenordskälla %s det mÃ¥ste vara i formatet källtyp eller " "källtyp:data. Giltiga källtyper är int, stdin, stream, file." #: src/clients/credentials/arcproxy.cpp:924 msgid "Only standard input is currently supported for password source." msgstr "Endast standard input är för närvarande giltig för lösenordskälla." #: src/clients/credentials/arcproxy.cpp:929 #, c-format msgid "" "Cannot parse password source type %s. Supported source types are int,stdin," "stream,file." msgstr "" "Kan inte tolka lösenordskälltyp %s. Giltiga källtyper är int, stdin, stream, " "file." #: src/clients/credentials/arcproxy.cpp:968 msgid "The start, end and period can't be set simultaneously" msgstr "Början, slut och längd kan inte användas samtidigt" #: src/clients/credentials/arcproxy.cpp:974 #, c-format msgid "The start time that you set: %s can't be recognized." msgstr "Starttiden som du angivit: %s kan inte tolkas." #: src/clients/credentials/arcproxy.cpp:981 #, c-format msgid "The period that you set: %s can't be recognized." msgstr "Längden som du angivit: %s kan inte tolkas." #: src/clients/credentials/arcproxy.cpp:988 #, c-format msgid "The end time that you set: %s can't be recognized." msgstr "Sluttiden som du angivit: %s kan inte tolkas." #: src/clients/credentials/arcproxy.cpp:997 #, c-format msgid "The end time that you set: %s is before start time: %s." msgstr "Sluttiden du angivit: %s är före starttiden: %s." #: src/clients/credentials/arcproxy.cpp:1008 #, c-format msgid "WARNING: The start time that you set: %s is before current time: %s" msgstr "Varning: Starttiden du angivit: %s är före nuvarande tidpunkt: %s" #: src/clients/credentials/arcproxy.cpp:1011 #, c-format msgid "WARNING: The end time that you set: %s is before current time: %s" msgstr "Varning: Sluttiden du angivit: %s är före nuvarande tidpunkt: %s" #: src/clients/credentials/arcproxy.cpp:1021 #, c-format msgid "The VOMS AC period that you set: %s can't be recognized." msgstr "VOMS-AC-perioden du angivit: %s kan inte tolkas." #: src/clients/credentials/arcproxy.cpp:1039 #, c-format msgid "The MyProxy period that you set: %s can't be recognized." msgstr "Myproxy-perioden du angivit: %s kan inte tolkas." #: src/clients/credentials/arcproxy.cpp:1054 #, c-format msgid "The keybits constraint is wrong: %s." msgstr "Begränsningen av antalet bitar i nyckeln är felaktig: %s." #: src/clients/credentials/arcproxy.cpp:1068 #: src/hed/libs/credential/ARCProxyUtil.cpp:1271 msgid "The NSS database can not be detected in the Firefox profile" msgstr "NSS-databasen kan inte upptäckas i Firefox-profilen" #: src/clients/credentials/arcproxy.cpp:1077 #: src/hed/libs/credential/ARCProxyUtil.cpp:1279 #, c-format msgid "" "There are %d NSS base directories where the certificate, key, and module " "databases live" msgstr "" "Det finns %d NSS-baskataloger där certifikat, nycklar och moduldatabaser " "finns" #: src/clients/credentials/arcproxy.cpp:1079 #: src/hed/libs/credential/ARCProxyUtil.cpp:1283 #, c-format msgid "Number %d is: %s" msgstr "Nummer %d är: %s" #: src/clients/credentials/arcproxy.cpp:1081 #: src/hed/libs/credential/ARCProxyUtil.cpp:1285 #, c-format msgid "Please choose the NSS database you would like to use (1-%d): " msgstr "Välj den NSS-databas du vill använda (1-%d): " #: src/clients/credentials/arcproxy.cpp:1097 #: src/hed/libs/credential/ARCProxyUtil.cpp:1297 #, c-format msgid "NSS database to be accessed: %s\n" msgstr "NSS-databas som kommer att användas: %s\n" #: src/clients/credentials/arcproxy.cpp:1168 #: src/hed/libs/credential/ARCProxyUtil.cpp:1471 #, c-format msgid "Certificate to use is: %s" msgstr "Certifikat som kommer att användas är: %s" #: src/clients/credentials/arcproxy.cpp:1216 #: src/clients/credentials/arcproxy.cpp:1330 #: src/hed/libs/credential/ARCProxyUtil.cpp:1528 msgid "Proxy generation succeeded" msgstr "Proxygenerering lyckades" #: src/clients/credentials/arcproxy.cpp:1217 #: src/clients/credentials/arcproxy.cpp:1331 #: src/hed/libs/credential/ARCProxyUtil.cpp:1529 #, c-format msgid "Your proxy is valid until: %s" msgstr "Din proxy är giltig till: %s" #: src/clients/credentials/arcproxy.cpp:1236 msgid "" "The old GSI proxies are not supported anymore. Please do not use -O/--old " "option." msgstr "" "De gamla GSI-proxyerna stöds inte längre. Använd inte alternativ -O/--old." #: src/clients/credentials/arcproxy.cpp:1255 src/hed/mcc/tls/MCCTLS.cpp:163 #: src/hed/mcc/tls/MCCTLS.cpp:196 src/hed/mcc/tls/MCCTLS.cpp:222 msgid "VOMS attribute parsing failed" msgstr "Tolkning av VOMS-attribut misslyckades" #: src/clients/credentials/arcproxy.cpp:1257 msgid "Myproxy server did not return proxy with VOMS AC included" msgstr "Myproxy-server returnerade inte proxy med VOMS AC inkluderat" #: src/clients/credentials/arcproxy.cpp:1278 #: src/hed/libs/credential/ARCProxyUtil.cpp:337 msgid "Proxy generation failed: No valid certificate found." msgstr "Proxygenerering misslyckades: Hittade inget giltigt certifikat." #: src/clients/credentials/arcproxy.cpp:1283 #: src/hed/libs/credential/ARCProxyUtil.cpp:343 msgid "Proxy generation failed: No valid private key found." msgstr "Proxygenerering misslyckades: Hittade ingen giltig privat nyckel." #: src/clients/credentials/arcproxy.cpp:1287 #: src/hed/libs/credential/ARCProxyUtil.cpp:169 #, c-format msgid "Your identity: %s" msgstr "Din identitet: %s" #: src/clients/credentials/arcproxy.cpp:1289 #: src/hed/libs/credential/ARCProxyUtil.cpp:350 msgid "Proxy generation failed: Certificate has expired." msgstr "Proxygenerering misslyckades: Certifikatets giltighetstid har gÃ¥tt ut." #: src/clients/credentials/arcproxy.cpp:1293 #: src/hed/libs/credential/ARCProxyUtil.cpp:355 msgid "Proxy generation failed: Certificate is not valid yet." msgstr "Proxygenerering misslyckades: Certifikatet är inte giltigt än." #: src/clients/credentials/arcproxy.cpp:1304 msgid "Proxy generation failed: Failed to create temporary file." msgstr "Proxygenerering misslyckades: Misslyckades med att skapa temporär fil." #: src/clients/credentials/arcproxy.cpp:1312 msgid "Proxy generation failed: Failed to retrieve VOMS information." msgstr "" "Proxygenerering misslyckades: Misslyckades med att hämta VOMS-information." #: src/clients/credentials/arcproxy_myproxy.cpp:100 #: src/hed/libs/credential/ARCProxyUtil.cpp:838 msgid "Succeeded to get info from MyProxy server" msgstr "Lyckades med att hämta information frÃ¥n myproxy-server" #: src/clients/credentials/arcproxy_myproxy.cpp:144 #: src/hed/libs/credential/ARCProxyUtil.cpp:894 msgid "Succeeded to change password on MyProxy server" msgstr "Lyckades med att ändra lösenord pÃ¥ myproxy-server" #: src/clients/credentials/arcproxy_myproxy.cpp:185 #: src/hed/libs/credential/ARCProxyUtil.cpp:943 msgid "Succeeded to destroy credential on MyProxy server" msgstr "Lyckades med att ta bort referens pÃ¥ myproxy-server" #: src/clients/credentials/arcproxy_myproxy.cpp:265 #: src/hed/libs/credential/ARCProxyUtil.cpp:1032 #, c-format msgid "Succeeded to get a proxy in %s from MyProxy server %s" msgstr "Lyckades med att hämta en proxy i %s frÃ¥n myproxy-server %s" #: src/clients/credentials/arcproxy_myproxy.cpp:318 #: src/hed/libs/credential/ARCProxyUtil.cpp:1091 msgid "Succeeded to put a proxy onto MyProxy server" msgstr "Lyckades med att lägga upp en proxy pÃ¥ myproxy-server" #: src/clients/credentials/arcproxy_proxy.cpp:93 #: src/hed/libs/credential/ARCProxyUtil.cpp:397 #: src/hed/libs/credential/ARCProxyUtil.cpp:1378 msgid "Failed to add VOMS AC extension. Your proxy may be incomplete." msgstr "" "Misslyckades med att lägga till VOMS-AC-tillägg. Din proxy kan vara " "ofullständig." #: src/clients/credentials/arcproxy_voms.cpp:63 msgid "" "Failed to process VOMS configuration or no suitable configuration lines " "found." msgstr "" "Misslyckades med att behandla VOMS-inställningar eller hittade inga lämpliga " "inställningsrader." #: src/clients/credentials/arcproxy_voms.cpp:75 #, c-format msgid "Failed to parse requested VOMS lifetime: %s" msgstr "Misslyckades med att tolka begärd VOMS-livstid: %s" #: src/clients/credentials/arcproxy_voms.cpp:93 #: src/hed/libs/credential/ARCProxyUtil.cpp:634 #, c-format msgid "Cannot get VOMS server address information from vomses line: \"%s\"" msgstr "Kan inte hämta VOMS-serveradressinformation frÃ¥n vomsesrad: \"%s\"" #: src/clients/credentials/arcproxy_voms.cpp:97 #: src/clients/credentials/arcproxy_voms.cpp:99 #: src/hed/libs/credential/ARCProxyUtil.cpp:644 #: src/hed/libs/credential/ARCProxyUtil.cpp:646 #, c-format msgid "Contacting VOMS server (named %s): %s on port: %s" msgstr "Kontaktar VOMS-server (med namn %s): %s pÃ¥ port: %s" #: src/clients/credentials/arcproxy_voms.cpp:105 #, c-format msgid "Failed to parse requested VOMS server port number: %s" msgstr "Misslyckades med att tolka begärt VOMS-serverportnummer: %s" #: src/clients/credentials/arcproxy_voms.cpp:122 msgid "List functionality is not supported for RESTful VOMS interface" msgstr "Listfunktionalitet stöds inte av RESTful-VOMS-gränssnittet" #: src/clients/credentials/arcproxy_voms.cpp:132 #: src/clients/credentials/arcproxy_voms.cpp:188 #, c-format msgid "" "The VOMS server with the information:\n" "\t%s\n" "can not be reached, please make sure it is available." msgstr "" "VOMS-servern med informationen:\n" "\t%s\n" "kan inte nÃ¥s, säkerställ att den är tillgänglig." #: src/clients/credentials/arcproxy_voms.cpp:133 #: src/clients/credentials/arcproxy_voms.cpp:138 #: src/clients/credentials/arcproxy_voms.cpp:189 #: src/clients/credentials/arcproxy_voms.cpp:194 #, c-format msgid "" "Collected error is:\n" "\t%s" msgstr "" "Insamlade felet är:\n" "\t%s" #: src/clients/credentials/arcproxy_voms.cpp:137 #: src/clients/credentials/arcproxy_voms.cpp:193 #, c-format msgid "No valid response from VOMS server: %s" msgstr "Inget giltigt svar frÃ¥n VOMS-server: %s" #: src/clients/credentials/arcproxy_voms.cpp:155 msgid "List functionality is not supported for legacy VOMS interface" msgstr "Listfunktionalitet stöds inte av legacy-VOMS-gränssnittet" #: src/clients/credentials/arcproxy_voms.cpp:167 #, c-format msgid "Failed to parse VOMS command: %s" msgstr "Misslyckades med att tolka VOMS-kommando: %s" #: src/clients/credentials/arcproxy_voms.cpp:204 #, c-format msgid "" "There are %d servers with the same name: %s in your vomses file, but none of " "them can be reached, or can return a valid message." msgstr "" "Det finns %d servrar med samma namn: %s i din vomses-fil, men ingen av dem " "kan nÃ¥s eller returnera ett giltigt meddelande." #: src/clients/data/arccp.cpp:77 src/clients/data/arccp.cpp:330 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:509 #, c-format msgid "Current transfer FAILED: %s" msgstr "Nuvarande överföring MISSLYCKADES: %s" #: src/clients/data/arccp.cpp:79 src/clients/data/arccp.cpp:135 #: src/clients/data/arccp.cpp:332 src/clients/data/arcls.cpp:225 #: src/clients/data/arcmkdir.cpp:73 src/clients/data/arcrename.cpp:89 #: src/clients/data/arcrm.cpp:95 msgid "This seems like a temporary error, please try again later" msgstr "Detta verkar vara ett tillfälligt fel, försök igen senare" #: src/clients/data/arccp.cpp:87 src/clients/data/arccp.cpp:96 #, c-format msgid "Unable to copy %s" msgstr "Kunde inte kopiera %s" #: src/clients/data/arccp.cpp:88 src/clients/data/arccp.cpp:97 #: src/clients/data/arcls.cpp:150 src/clients/data/arcls.cpp:159 #: src/clients/data/arcmkdir.cpp:55 src/clients/data/arcmkdir.cpp:64 #: src/clients/data/arcrename.cpp:67 src/clients/data/arcrename.cpp:76 #: src/clients/data/arcrm.cpp:68 src/clients/data/arcrm.cpp:80 msgid "Invalid credentials, please check proxy and/or CA certificates" msgstr "Ogiltiga referenser, kontrollera proxy och/eller CA-certifikat" #: src/clients/data/arccp.cpp:94 src/clients/data/arcls.cpp:156 #: src/clients/data/arcmkdir.cpp:61 src/clients/data/arcrename.cpp:73 #: src/clients/data/arcrm.cpp:77 msgid "Proxy expired" msgstr "Proxyns livstid har gÃ¥tt ut" #: src/clients/data/arccp.cpp:112 src/clients/data/arccp.cpp:116 #: src/clients/data/arccp.cpp:149 src/clients/data/arccp.cpp:153 #: src/clients/data/arccp.cpp:358 src/clients/data/arccp.cpp:363 #: src/clients/data/arcls.cpp:123 src/clients/data/arcmkdir.cpp:28 #: src/clients/data/arcrename.cpp:29 src/clients/data/arcrename.cpp:33 #: src/clients/data/arcrm.cpp:36 #, c-format msgid "Invalid URL: %s" msgstr "Ogiltig URL: %s" #: src/clients/data/arccp.cpp:128 msgid "Third party transfer is not supported for these endpoints" msgstr "Tredjepartsöverföring stöds inte för dessa ändpunkter" #: src/clients/data/arccp.cpp:130 msgid "" "Protocol(s) not supported - please check that the relevant gfal2\n" " plugins are installed (gfal2-plugin-* packages)" msgstr "" "Protokoll stöds inte - kontrollera att relevanta gfal2-pluginer\n" " har installerats (gfal2-plugin-* paket)" #: src/clients/data/arccp.cpp:133 #, c-format msgid "Transfer FAILED: %s" msgstr "Överföring MISSLYCKADES: %s" #: src/clients/data/arccp.cpp:161 src/clients/data/arccp.cpp:187 #: src/clients/data/arccp.cpp:374 src/clients/data/arccp.cpp:402 #, c-format msgid "Can't read list of sources from file %s" msgstr "Kan inte läsa lista med källor frÃ¥n filen %s" #: src/clients/data/arccp.cpp:166 src/clients/data/arccp.cpp:202 #: src/clients/data/arccp.cpp:379 src/clients/data/arccp.cpp:418 #, c-format msgid "Can't read list of destinations from file %s" msgstr "Kan inte läsa lista med destinationer frÃ¥n filen %s" #: src/clients/data/arccp.cpp:171 src/clients/data/arccp.cpp:385 msgid "Numbers of sources and destinations do not match" msgstr "Antalet källor och destinationer stämmer inte överens" #: src/clients/data/arccp.cpp:216 msgid "Fileset registration is not supported yet" msgstr "Filuppsättningsregistrering stöds inte ännu" #: src/clients/data/arccp.cpp:222 src/clients/data/arccp.cpp:295 #: src/clients/data/arccp.cpp:456 #, c-format msgid "Unsupported source url: %s" msgstr "Käll-URL stöds inte: %s" #: src/clients/data/arccp.cpp:226 src/clients/data/arccp.cpp:299 #, c-format msgid "Unsupported destination url: %s" msgstr "Destinations-URL stöds inte: %s" #: src/clients/data/arccp.cpp:233 msgid "" "For registration source must be ordinary URL and destination must be " "indexing service" msgstr "" "För registrering mÃ¥ste källan vara en vanlig URL och destinationen en " "indexeringstjänst" #: src/clients/data/arccp.cpp:243 #, c-format msgid "Could not obtain information about source: %s" msgstr "Kunde inte erhÃ¥lla information om källa: %s" #: src/clients/data/arccp.cpp:250 msgid "" "Metadata of source does not match existing destination. Use the --force " "option to override this." msgstr "" "Källans metadata stämmer inte överens med existerande destination. Använd " "alternativet --force för att överstyra detta." #: src/clients/data/arccp.cpp:262 msgid "Failed to accept new file/destination" msgstr "Misslyckades med att acceptera ny fil/destination" #: src/clients/data/arccp.cpp:268 src/clients/data/arccp.cpp:274 #, c-format msgid "Failed to register new file/destination: %s" msgstr "Misslyckades med att registrera ny fil/destination: %s" #: src/clients/data/arccp.cpp:436 msgid "Fileset copy to single object is not supported yet" msgstr "Kopiering av filuppsättning till ett enstaka objekt stöds ej ännu" #: src/clients/data/arccp.cpp:446 msgid "Can't extract object's name from source url" msgstr "Kan ej extrahera objektets namn frÃ¥n käll-URL" #: src/clients/data/arccp.cpp:465 #, c-format msgid "%s. Cannot copy fileset" msgstr "%s. Kan inte kopiera filuppsättning" #: src/clients/data/arccp.cpp:475 src/hed/libs/compute/ExecutionTarget.cpp:256 #: src/hed/libs/compute/ExecutionTarget.cpp:328 #, c-format msgid "Name: %s" msgstr "Namn: %s" #: src/clients/data/arccp.cpp:478 #, c-format msgid "Source: %s" msgstr "Källa: %s" #: src/clients/data/arccp.cpp:479 #, c-format msgid "Destination: %s" msgstr "Destination: %s" #: src/clients/data/arccp.cpp:485 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:516 msgid "Current transfer complete" msgstr "Nuvarande överföring slutförd" #: src/clients/data/arccp.cpp:488 msgid "Some transfers failed" msgstr "NÃ¥gra överföringar misslyckades" #: src/clients/data/arccp.cpp:498 #, c-format msgid "Directory: %s" msgstr "Katalog: %s" #: src/clients/data/arccp.cpp:518 msgid "Transfer complete" msgstr "Överföring slutförd" #: src/clients/data/arccp.cpp:537 msgid "source destination" msgstr "källa destination" #: src/clients/data/arccp.cpp:538 msgid "" "The arccp command copies files to, from and between grid storage elements." msgstr "" "arccp-kommandot kopierar filer till, frÃ¥n och mellan gridlagringsresurser." #: src/clients/data/arccp.cpp:543 msgid "" "use passive transfer (off by default if secure is on, on by default if " "secure is not requested)" msgstr "" "använd passiv överföring (förvalt av om säker överföring begärts, förvalt pÃ¥ " "om säker överföring inte begärts" #: src/clients/data/arccp.cpp:549 msgid "do not try to force passive transfer" msgstr "försök inte tvinga fram passiv överföring" #: src/clients/data/arccp.cpp:554 msgid "" "if the destination is an indexing service and not the same as the source and " "the destination is already registered, then the copy is normally not done. " "However, if this option is specified the source is assumed to be a replica " "of the destination created in an uncontrolled way and the copy is done like " "in case of replication. Using this option also skips validation of completed " "transfers." msgstr "" "om destinationen är en indexeringstjänst och inte densamma som källan och " "destinationen redan är registrerad, sÃ¥ skapas kopian normalt inte. Men, om " "detta alternativ anges antas källan vara en kopia av destinationen som " "skapats pÃ¥ ett okontrollerat sätt och kopian skapas pÃ¥ samma sätt som vid " "replikering. Användning av detta alternativ hoppar ocksÃ¥ över validering av " "slutförda överföringar." #: src/clients/data/arccp.cpp:567 msgid "show progress indicator" msgstr "visa fortskridandeindikator" #: src/clients/data/arccp.cpp:572 msgid "" "do not transfer, but register source into destination. destination must be a " "meta-url." msgstr "" "gör inte överföringen, men registrera källan i destinationen. destinationen " "mÃ¥ste vara en meta-url" #: src/clients/data/arccp.cpp:578 msgid "use secure transfer (insecure by default)" msgstr "använd säker överföring (osäker som förval)" #: src/clients/data/arccp.cpp:583 msgid "path to local cache (use to put file into cache)" msgstr "sökväg till lokalt cache (använd för att lägga in fil i cache)" #: src/clients/data/arccp.cpp:588 src/clients/data/arcls.cpp:301 msgid "operate recursively" msgstr "arbeta rekursivt" #: src/clients/data/arccp.cpp:593 src/clients/data/arcls.cpp:306 msgid "operate recursively up to specified level" msgstr "arbeta rekursivt upp till den angivna nivÃ¥n" #: src/clients/data/arccp.cpp:594 src/clients/data/arcls.cpp:307 msgid "level" msgstr "nivÃ¥" #: src/clients/data/arccp.cpp:598 msgid "number of retries before failing file transfer" msgstr "antal försök innan överföring misslyckas" #: src/clients/data/arccp.cpp:599 msgid "number" msgstr "nummer" #: src/clients/data/arccp.cpp:603 msgid "" "physical location to write to when destination is an indexing service. Must " "be specified for indexing services which do not automatically generate " "physical locations. Can be specified multiple times - locations will be " "tried in order until one succeeds." msgstr "" "fysisk plats att skriva till när destinationen är en indexeringstjänst. " "MÃ¥ste anges för indexeringstjänster som inte genererar fysiska platser " "automatiskt. Kan anges flera gÃ¥nger - platser kommer att provas i angiven " "ordning tills en lyckas." #: src/clients/data/arccp.cpp:611 msgid "" "perform third party transfer, where the destination pulls from the source " "(only available with GFAL plugin)" msgstr "" "utför tredjepartsöverföring, där destinationen läser frÃ¥n källan (endast " "tillgänglig med GFAL-plugin)" #: src/clients/data/arccp.cpp:617 src/clients/data/arcls.cpp:323 #: src/clients/data/arcmkdir.cpp:101 src/clients/data/arcrename.cpp:112 #: src/clients/data/arcrm.cpp:127 msgid "list the available plugins (protocols supported)" msgstr "lista tillgängliga pluginer (protokoll som stöds)" #: src/clients/data/arccp.cpp:656 src/clients/data/arcls.cpp:363 #: src/clients/data/arcmkdir.cpp:141 src/clients/data/arcrename.cpp:152 #: src/clients/data/arcrm.cpp:168 msgid "Protocol plugins available:" msgstr "Tillgängliga protokollpluginer:" #: src/clients/data/arccp.cpp:681 src/clients/data/arcls.cpp:388 #: src/clients/data/arcmkdir.cpp:165 src/clients/data/arcrename.cpp:175 #: src/clients/data/arcrm.cpp:193 msgid "Wrong number of parameters specified" msgstr "Fel antal parametrar angivna" #: src/clients/data/arccp.cpp:686 msgid "Options 'p' and 'n' can't be used simultaneously" msgstr "Alternativen 'p' och 'n' kan inte användas samtidigt" #: src/clients/data/arcls.cpp:129 src/clients/data/arcmkdir.cpp:34 #: src/clients/data/arcrm.cpp:43 #, c-format msgid "Can't read list of locations from file %s" msgstr "Kan inte läsa platslista frÃ¥n fil %s" #: src/clients/data/arcls.cpp:144 src/clients/data/arcmkdir.cpp:49 #: src/clients/data/arcrename.cpp:61 msgid "Unsupported URL given" msgstr "Angiven URL stöds inte" #: src/clients/data/arcls.cpp:149 src/clients/data/arcls.cpp:158 #, c-format msgid "Unable to list content of %s" msgstr "Kan inte lista innehÃ¥llet i %s" #: src/clients/data/arcls.cpp:228 msgid "Warning: Failed listing files but some information is obtained" msgstr "" "Varning: Misslyckades med att lista filer men viss information har erhÃ¥llits" #: src/clients/data/arcls.cpp:282 src/clients/data/arcmkdir.cpp:90 msgid "url" msgstr "url" #: src/clients/data/arcls.cpp:283 msgid "" "The arcls command is used for listing files in grid storage elements and " "file\n" "index catalogues." msgstr "" "arcls-kommandot används för att lista filer pÃ¥ gridlagringsresurser och i\n" "filindexkataloger." #: src/clients/data/arcls.cpp:292 msgid "show URLs of file locations" msgstr "visa URLer till filens registrerade kopior" #: src/clients/data/arcls.cpp:296 msgid "display all available metadata" msgstr "visa all tillgänglig metadata" #: src/clients/data/arcls.cpp:310 msgid "" "show only description of requested object, do not list content of directories" msgstr "" "visa endast beskrivning av begärt objekt, lista inte innehÃ¥ll i kataloger" #: src/clients/data/arcls.cpp:314 msgid "treat requested object as directory and always try to list content" msgstr "behandla begärt objekt som en katalog och försök alltid lista innehÃ¥ll" #: src/clients/data/arcls.cpp:318 msgid "check readability of object, does not show any information about object" msgstr "kontrollera objektets läsbarhet, visar ingen information om objektet" #: src/clients/data/arcls.cpp:393 msgid "Incompatible options --nolist and --forcelist requested" msgstr "Inkompatibla alternativ --nolist och --forcelist har begärts" #: src/clients/data/arcls.cpp:398 msgid "Requesting recursion and --nolist has no sense" msgstr "Att begära rekursion och --nolist saknar mening" #: src/clients/data/arcmkdir.cpp:54 src/clients/data/arcmkdir.cpp:63 #, c-format msgid "Unable to create directory %s" msgstr "Misslyckades med att skapa katalog %s" #: src/clients/data/arcmkdir.cpp:91 msgid "" "The arcmkdir command creates directories on grid storage elements and " "catalogs." msgstr "" "arcmkdir-kommandot skapar kataloger pÃ¥ gridlagringsresurser och kataloger." #: src/clients/data/arcmkdir.cpp:96 msgid "make parent directories as needed" msgstr "skapa föräldrakataloger efter behov" #: src/clients/data/arcrename.cpp:41 msgid "Both URLs must have the same protocol, host and port" msgstr "BÃ¥da URLerna mÃ¥ste ha samma protokoll, värd och port" #: src/clients/data/arcrename.cpp:51 msgid "Cannot rename to or from root directory" msgstr "Kan inte byta namn till eller frÃ¥n rotkatalogen" #: src/clients/data/arcrename.cpp:55 msgid "Cannot rename to the same URL" msgstr "Kan inte byta namn till samma URL" #: src/clients/data/arcrename.cpp:66 src/clients/data/arcrename.cpp:75 #, c-format msgid "Unable to rename %s" msgstr "Kan inte byta namn pÃ¥ %s" #: src/clients/data/arcrename.cpp:106 msgid "old_url new_url" msgstr "gammal_url ny_url" #: src/clients/data/arcrename.cpp:107 msgid "The arcrename command renames files on grid storage elements." msgstr "arcrename-kommandot byter namn pÃ¥ filer pÃ¥ gridlagringsresurser." #: src/clients/data/arcrm.cpp:58 #, c-format msgid "Unsupported URL given: %s" msgstr "Angiven URL stöds inte: %s" #: src/clients/data/arcrm.cpp:67 src/clients/data/arcrm.cpp:79 #, c-format msgid "Unable to remove file %s" msgstr "Kunde inte ta bort fil %s" #: src/clients/data/arcrm.cpp:115 msgid "url [url ...]" msgstr "url [url ...]" #: src/clients/data/arcrm.cpp:116 msgid "The arcrm command deletes files on grid storage elements." msgstr "arcrm-kommandot tar bort filer pÃ¥ gridlagringsresurser." #: src/clients/data/arcrm.cpp:121 msgid "" "remove logical file name registration even if not all physical instances " "were removed" msgstr "" "ta bort logiska filnamnsregistreringen även om inte alla fysiska kopior " "tagits bort" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:53 msgid "Cannot initialize ARCHERY domain name for query" msgstr "Kan inte initiera ARCHERY-domännamn för förfrÃ¥gan" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:60 msgid "Cannot create resolver from /etc/resolv.conf" msgstr "Kan inte skapa resolver frÃ¥n /etc/resolv.conf" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:68 msgid "Cannot query service endpoint TXT records from DNS" msgstr "Kan inte frÃ¥ga om tjänsteändpunkt-TXT-poster frÃ¥n DNS" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:79 msgid "Cannot parse service endpoint TXT records." msgstr "Kan inte tolka tjänsteändpunkt-TXT-poster." #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:124 #, c-format msgid "Wrong service record field \"%s\" found in the \"%s\"" msgstr "Hittade fel tjänste-post-fält \"%s\" i \"%s\"" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:129 #, c-format msgid "Malformed ARCHERY record found (endpoint url is not defined): %s" msgstr "Hittade felaktig ARCHERY-post (ändpunkts-URL är inte definierad): %s" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:134 #, c-format msgid "Malformed ARCHERY record found (endpoint type is not defined): %s" msgstr "Hittade felaktig ARCHERY-post (ändpunktstyp är inte definierad): %s" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:138 #, c-format msgid "Found service endpoint %s (type %s)" msgstr "Hittade tjänsteändpunkt %s (typ %s)" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:157 #, c-format msgid "" "Status for service endpoint \"%s\" is set to inactive in ARCHERY. Skipping." msgstr "" "Status för tjänsteändpunkt \"%s\" är satt till inaktiv i ARCHERY. Hoppar " "över." #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:229 #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:161 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:149 #, c-format msgid "Job %s has no delegation associated. Can't renew such job." msgstr "Jobb %s har ingen associerad delegering. Kan inte förnya sÃ¥dana jobb." #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:241 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:161 #, c-format msgid "Job %s failed to renew delegation %s." msgstr "Jobb %s misslyckades med att förnya delegering %s." #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:313 #, c-format msgid "Failed to process jobs - wrong response: %u" msgstr "Misslyckades med att behandla jobb - felaktigt svar: %u" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:314 #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:323 #, c-format msgid "Content: %s" msgstr "InnehÃ¥ll: %s" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:317 #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:329 #, c-format msgid "Failed to process job: %s" msgstr "Misslyckades med att behandla jobb: %s" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:327 msgid "Failed to process jobs - failed to parse response" msgstr "Misslyckades med att behandla jobb - misslyckades med att tolka svar" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:340 #, c-format msgid "No response returned: %s" msgstr "Inget svar returnerades: %s" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:364 #, c-format msgid "Failed to process job: %s - %s %s" msgstr "Misslyckades med att behandla jobb: %s - %s %s" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:431 #, c-format msgid "Failed retrieving job description for job: %s" msgstr "Misslyckades med att hämta jobbeskrivning för jobb: %s" #: src/hed/acc/ARCREST/JobListRetrieverPluginREST.cpp:29 msgid "Collecting Job (A-REX REST jobs) information." msgstr "Samlar in jobbinformation (A-REX REST-jobb)." #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:50 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:84 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:124 msgid "Failed to communicate to delegation endpoint." msgstr "Misslyckades med att kommunicera med delegeringstjänst." #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:55 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:89 #, c-format msgid "Unexpected response code from delegation endpoint - %u" msgstr "Oväntad svarskod frÃ¥n delegeringstjänst - %u" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:57 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:91 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:242 #: src/hed/dmc/gridftp/Lister.cpp:223 src/hed/dmc/gridftp/Lister.cpp:243 #: src/hed/dmc/gridftp/Lister.cpp:468 src/hed/dmc/gridftp/Lister.cpp:475 #: src/hed/dmc/gridftp/Lister.cpp:497 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:163 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:196 #, c-format msgid "Response: %s" msgstr "Svar: %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:62 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:96 msgid "Missing response from delegation endpoint." msgstr "Saknat svar frÃ¥n delegeringstjänst." #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:71 #, c-format msgid "Unexpected delegation location from delegation endpoint - %s." msgstr "Oväntad delegeringsplats frÃ¥n delegeringstjänst - %s." #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:128 #, c-format msgid "Unexpected response code from delegation endpoint: %u, %s." msgstr "Oväntad svarskod frÃ¥n delegeringstjänst: %u, %s." #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:177 msgid "Unable to submit jobs. Failed to delegate credentials." msgstr "Kunde inte sända in jobb. Misslyckades med att delegera referenser." #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:192 msgid "Failed to prepare job description" msgstr "Misslyckades med att förbereda jobbeskrivning" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:201 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:87 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:401 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:116 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:262 #, c-format msgid "Unable to submit job. Job description is not valid in the %s format: %s" msgstr "Kunde inte sända in jobb. Jobbeskrivning inte giltig i %s-formatet: %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:208 msgid "Unable to submit job. Failed to assign delegation to job description." msgstr "" "Kunde inte sända in jobb. Misslyckades med att tilldela delegering till " "jobbeskrivning." #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:231 msgid "Failed to submit all jobs." msgstr "Misslyckades med att sända in alla jobb." #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:241 #, c-format msgid "Failed to submit all jobs: %u %s" msgstr "Misslyckades med att sända in alla jobb: %u %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:254 #, c-format msgid "Failed to submit all jobs: %s" msgstr "Misslyckades med att sända in alla jobb: %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:274 #, c-format msgid "Failed to submit all jobs: %s %s" msgstr "Misslyckades med att sända in alla jobb: %s %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:289 msgid "Failed uploading local input files" msgstr "Misslyckades med att ladda upp lokala indatafiler" #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:27 msgid "Querying WSRF GLUE2 computing REST endpoint." msgstr "FrÃ¥gar WSRF-GLUE2-beräknings-REST-ändpunkt." #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:60 #, c-format msgid "CONTENT %u: %s" msgstr "INNEHÃ…LL %u: %s" #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:64 msgid "Response is not XML" msgstr "Svaret är inte XML" #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:69 #, c-format msgid "Parsed domains: %u" msgstr "Tolkade domäner: %u" #: src/hed/acc/Broker/DescriptorsBroker.cpp:14 msgid "Sorting according to free slots in queue" msgstr "Sorterar efter lediga slottar i kö" #: src/hed/acc/Broker/DescriptorsBroker.cpp:15 msgid "Random sorting" msgstr "Slumpvis sortering" #: src/hed/acc/Broker/DescriptorsBroker.cpp:16 msgid "Sorting according to specified benchmark (default \"specint2000\")" msgstr "Sorterar efter angivet benchmark (förval \"specint2000\")" #: src/hed/acc/Broker/DescriptorsBroker.cpp:17 msgid "Sorting according to input data availability at target" msgstr "Sorterar efter indatas tillgänglighet pÃ¥ target" #: src/hed/acc/Broker/DescriptorsBroker.cpp:18 msgid "Performs neither sorting nor matching" msgstr "Utför varken sortering eller matchning" #: src/hed/acc/Broker/FastestQueueBrokerPlugin.cpp:24 #, c-format msgid "" "Target %s removed by FastestQueueBroker, doesn't report number of waiting " "jobs" msgstr "" "Target %s borttaget av FastestQueueBroker, rapporterar inte antal väntande " "jobb" #: src/hed/acc/Broker/FastestQueueBrokerPlugin.cpp:27 #, c-format msgid "" "Target %s removed by FastestQueueBroker, doesn't report number of total slots" msgstr "" "Target %s borttaget av FastestQueueBroker, rapporterar inte totalt antal jobb" #: src/hed/acc/Broker/FastestQueueBrokerPlugin.cpp:30 #, c-format msgid "" "Target %s removed by FastestQueueBroker, doesn't report number of free slots" msgstr "" "Target %s borttaget av FastestQueueBroker, rapporterar inte antal lediga " "slottar" #: src/hed/acc/EMIES/EMIESClient.cpp:81 msgid "Creating an EMI ES client" msgstr "Skapar en EMI-ES-klient" #: src/hed/acc/EMIES/EMIESClient.cpp:85 msgid "Unable to create SOAP client used by EMIESClient." msgstr "Misslyckades med att skapa SOAP-klient som används av EMIESClient." #: src/hed/acc/EMIES/EMIESClient.cpp:133 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:505 msgid "Initiating delegation procedure" msgstr "Initierar delegeringsprocess" #: src/hed/acc/EMIES/EMIESClient.cpp:163 msgid "Re-creating an EMI ES client" msgstr "Ã…terskapar en EMI-ES-klient" #: src/hed/acc/EMIES/EMIESClient.cpp:180 #, c-format msgid "Processing a %s request" msgstr "Behandlar en %s-begäran" #: src/hed/acc/EMIES/EMIESClient.cpp:191 #, c-format msgid "%s request failed" msgstr "%s-begäran misslyckades" #: src/hed/acc/EMIES/EMIESClient.cpp:200 #, c-format msgid "No response from %s" msgstr "Inget svar frÃ¥n %s" #: src/hed/acc/EMIES/EMIESClient.cpp:209 #, c-format msgid "%s request to %s failed with response: %s" msgstr "%s-begäran till %s misslyckades med svar: %s" #: src/hed/acc/EMIES/EMIESClient.cpp:224 #, c-format msgid "XML response: %s" msgstr "XML-svar: %s" #: src/hed/acc/EMIES/EMIESClient.cpp:234 #, c-format msgid "%s request to %s failed. Unexpected response: %s." msgstr "%s-begäran till %s misslyckades: Oväntat svar: %s." #: src/hed/acc/EMIES/EMIESClient.cpp:248 src/hed/acc/EMIES/EMIESClient.cpp:355 #, c-format msgid "Creating and sending job submit request to %s" msgstr "Skapar och skickar en begäran att sända in ett jobb till %s" #: src/hed/acc/EMIES/EMIESClient.cpp:313 src/hed/acc/EMIES/EMIESClient.cpp:416 #, c-format msgid "Job description to be sent: %s" msgstr "Jobbeskrivning som skall sändas: %s" #: src/hed/acc/EMIES/EMIESClient.cpp:426 src/hed/acc/EMIES/EMIESClient.cpp:609 #: src/hed/acc/EMIES/EMIESClient.cpp:1098 #, c-format msgid "New limit for vector queries returned by EMI ES service: %d" msgstr "Ny gräns för vektorförfrÃ¥gningar returnerad av EMI-ES-tjänst: %d" #: src/hed/acc/EMIES/EMIESClient.cpp:434 src/hed/acc/EMIES/EMIESClient.cpp:617 #: src/hed/acc/EMIES/EMIESClient.cpp:1106 #, c-format msgid "" "Error: Service returned a limit higher or equal to current limit (current: " "%d; returned: %d)" msgstr "" "Fel: Tjänsten returnerade en gräns högre än eller lika med nuvarande gräns " "(nuvarande: %d; returnerad: %d)" #: src/hed/acc/EMIES/EMIESClient.cpp:502 src/hed/acc/EMIES/EMIESClient.cpp:536 #: src/hed/acc/EMIES/EMIESClient.cpp:592 #, c-format msgid "Creating and sending job information query request to %s" msgstr "Skapar och sänder en begäran om en jobbinformationsförfrÃ¥gan till %s" #: src/hed/acc/EMIES/EMIESClient.cpp:775 #, c-format msgid "Creating and sending service information request to %s" msgstr "Skapar och sänder en begäran om tjänsteinformation till %s" #: src/hed/acc/EMIES/EMIESClient.cpp:832 #, c-format msgid "Creating and sending service information query request to %s" msgstr "" "Skapar och sänder en begäran om en tjänsteinformationsförfrÃ¥gan till %s" #: src/hed/acc/EMIES/EMIESClient.cpp:880 src/hed/acc/EMIES/EMIESClient.cpp:901 #, c-format msgid "Creating and sending job clean request to %s" msgstr "Skapar och skickar en begäran att ta bort ett jobb till %s" #: src/hed/acc/EMIES/EMIESClient.cpp:922 #, c-format msgid "Creating and sending job suspend request to %s" msgstr "Skapar och sänder en begäran att suspendera ett jobb till %s" #: src/hed/acc/EMIES/EMIESClient.cpp:943 #, c-format msgid "Creating and sending job resume request to %s" msgstr "Skapar och sänder en begäran att Ã¥teruppta ett jobb till %s" #: src/hed/acc/EMIES/EMIESClient.cpp:964 #, c-format msgid "Creating and sending job restart request to %s" msgstr "Skapar och sänder en begäran att starta om ett jobb till %s" #: src/hed/acc/EMIES/EMIESClient.cpp:1021 #, c-format msgid "Creating and sending job notify request to %s" msgstr "Skapar och sänder en begäran om jobbavisering till %s" #: src/hed/acc/EMIES/EMIESClient.cpp:1076 #, c-format msgid "Creating and sending notify request to %s" msgstr "Skapar och skickar en begäran om avisering till %s" #: src/hed/acc/EMIES/EMIESClient.cpp:1166 #, c-format msgid "Creating and sending job list request to %s" msgstr "Skapar och sänder en begäran om en jobblista till %s" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:175 #, c-format msgid "Job %s failed to renew delegation %s - %s." msgstr "Jobb %s misslyckades med att förnya delegering %s - %s." #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:197 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:464 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:191 #, c-format msgid "Job %s does not report a resumable state" msgstr "Jobb %s rapporterar inte ett tillstÃ¥nd varifrÃ¥n det kan Ã¥terupptas" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:202 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:196 #, c-format msgid "Resuming job: %s at state: %s (%s)" msgstr "Ã…terupptar jobb: %s i tillstÃ¥nd: %s (%s)" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:215 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:520 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:205 msgid "Job resuming successful" msgstr "Jobbet Ã¥terupptogs framgÃ¥ngsrikt" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:248 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:251 #, c-format msgid "Failed retrieving information for job: %s" msgstr "Misslyckades med att hämta information om jobb: %s" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:330 msgid "Retrieving job description of EMI ES jobs is not supported" msgstr "Att hämta jobbeskrivning för ett EMI-ES-jobb stöds inte" #: src/hed/acc/EMIES/JobListRetrieverPluginEMIES.cpp:37 #, c-format msgid "Listing jobs succeeded, %d jobs found" msgstr "Listning av jobb lyckades, hittade %d jobb" #: src/hed/acc/EMIES/JobListRetrieverPluginEMIES.cpp:53 #: src/hed/acc/LDAP/JobListRetrieverPluginLDAPNG.cpp:111 #: src/services/a-rex/internaljobplugin/JobListRetrieverPluginINTERNAL.cpp:83 #, c-format msgid "" "Skipping retrieved job (%s) because it was submitted via another interface " "(%s)." msgstr "" "Hoppar över hämtat jobb (%s) eftersom det sändes in via ett annat gränssnitt " "(%s)." #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:47 #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:41 msgid "" "Failed to delegate credentials to server - no delegation interface found" msgstr "" "Misslyckades med att delegera referenser till server - hittade inget " "delegeringsgränssnitt" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:54 #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:48 #, c-format msgid "Failed to delegate credentials to server - %s" msgstr "Misslyckades med att delegera referenser till server - %s" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:77 #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:87 msgid "Failed preparing job description" msgstr "Misslyckades med att förbereda jobbeskrivning" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:95 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:406 msgid "Unable to submit job. Job description is not valid XML" msgstr "Kunde inte sända in jobb. Jobbeskrivning är inte giltig XML" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:154 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:482 msgid "No valid job identifier returned by EMI ES" msgstr "Ingen giltig jobbidentifierare returnerades av EMI-ES" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:180 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:499 msgid "Job failed on service side" msgstr "Jobb misslyckades pÃ¥ tjänstesidan" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:190 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:509 msgid "Failed to obtain state of job" msgstr "Misslyckades med att erhÃ¥lla jobbets tillstÃ¥nd" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:205 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:521 msgid "Failed to wait for job to allow stage in" msgstr "Misslyckades med att vänta pÃ¥ att jobb ska tillÃ¥ta stage-in" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:228 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:541 msgid "Failed to obtain valid stagein URL for input files" msgstr "Misslyckades med att erhÃ¥lla giltig stage-in-URL för indatafiler" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:248 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:558 #, c-format msgid "Failed uploading local input files to %s" msgstr "Misslyckades med att ladda upp lokala indatafiler till %s" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:269 #, c-format msgid "Failed to submit job description: EMIESFault(%s , %s)" msgstr "Misslyckades med att sända in jobbeskrivning: EMI-ES-fel (%s, %s)" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:278 #, c-format msgid "Failed to submit job description: UnexpectedError(%s)" msgstr "Misslyckades med att sända in jobbeskrivning: Oväntat fel (%s)" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:315 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:574 msgid "Failed to notify service" msgstr "Misslyckades med att meddela tjänsten" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:367 msgid "Failed preparing job description to target resources" msgstr "Misslyckades med att förbereda jobbeskrivning för target-resurser" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:475 #, c-format msgid "Failed to submit job description: %s" msgstr "Misslyckades med att sända in jobbeskrivning: %s" #: src/hed/acc/EMIES/TargetInformationRetrieverPluginEMIES.cpp:30 msgid "Collecting EMI-ES GLUE2 computing info endpoint information." msgstr "Samlar in EMI-ES-GLUE2-beräkningsinformationsändpunktsinformation." #: src/hed/acc/EMIES/TargetInformationRetrieverPluginEMIES.cpp:50 msgid "Generating EMIES targets" msgstr "Genererar EMI-ES-destinationer" #: src/hed/acc/EMIES/TargetInformationRetrieverPluginEMIES.cpp:59 #, c-format msgid "Generated EMIES target: %s" msgstr "Genererad EMI-ES-target: %s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:75 #: src/hed/acc/EMIES/TestEMIESClient.cpp:79 #, c-format msgid "Query returned unexpected element: %s:%s" msgstr "FörfrÃ¥gan returnerade oväntat element: %s:%s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:85 #, c-format msgid "Element validation according to GLUE2 schema failed: %s" msgstr "Elementvalidering enligt GLUE2-schema misslyckades: %s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:114 msgid "Resource query failed" msgstr "ResursförfrÃ¥gan misslyckades" #: src/hed/acc/EMIES/TestEMIESClient.cpp:132 msgid "Submission failed" msgstr "Insändning misslyckades" #: src/hed/acc/EMIES/TestEMIESClient.cpp:143 msgid "Obtaining status failed" msgstr "Misslyckades med att erhÃ¥lla status" #: src/hed/acc/EMIES/TestEMIESClient.cpp:153 msgid "Obtaining information failed" msgstr "Misslyckades med att erhÃ¥lla information" #: src/hed/acc/EMIES/TestEMIESClient.cpp:170 msgid "Cleaning failed" msgstr "Borttagning misslyckades" #: src/hed/acc/EMIES/TestEMIESClient.cpp:177 msgid "Notify failed" msgstr "Avisering misslyckades" #: src/hed/acc/EMIES/TestEMIESClient.cpp:184 msgid "Kill failed" msgstr "Avbrytande misslyckades" #: src/hed/acc/EMIES/TestEMIESClient.cpp:190 msgid "List failed" msgstr "Listning misslyckades" #: src/hed/acc/EMIES/TestEMIESClient.cpp:201 #, c-format msgid "Fetching resource description from %s" msgstr "Hämtar resursbeskrivning frÃ¥n %s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:204 #: src/hed/acc/EMIES/TestEMIESClient.cpp:272 #: src/hed/acc/EMIES/TestEMIESClient.cpp:282 #: src/hed/acc/EMIES/TestEMIESClient.cpp:293 #, c-format msgid "Failed to obtain resource description: %s" msgstr "Misslyckades med att erhÃ¥lla resursbeskrivning: %s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:213 #: src/hed/acc/EMIES/TestEMIESClient.cpp:217 #, c-format msgid "Resource description contains unexpected element: %s:%s" msgstr "Resursbeskrivning innehÃ¥ller oväntat element: %s:%s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:223 msgid "Resource description validation according to GLUE2 schema failed: " msgstr "Resursbeskrivningsvalidering enligt GLUE2-schema misslyckades: " #: src/hed/acc/EMIES/TestEMIESClient.cpp:228 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:560 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:819 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:517 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:706 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:129 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:169 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1214 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1248 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1430 #: src/hed/identitymap/ArgusPDPClient.cpp:444 #: src/hed/identitymap/ArgusPEPClient.cpp:98 #: src/hed/identitymap/ArgusPEPClient.cpp:345 #: src/hed/libs/common/Thread.cpp:242 src/hed/libs/common/Thread.cpp:245 #: src/hed/libs/common/Thread.cpp:248 #: src/hed/libs/credential/Credential.cpp:1048 #: src/hed/libs/data/DataPointDelegate.cpp:628 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:68 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:84 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:100 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:119 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:129 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:137 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:146 #: src/hed/mcc/tls/PayloadTLSMCC.cpp:82 src/hed/shc/arcpdp/ArcPDP.cpp:235 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:293 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:247 #: src/libs/data-staging/Scheduler.cpp:117 #: src/services/a-rex/delegation/DelegationStore.cpp:40 #: src/services/a-rex/delegation/DelegationStore.cpp:45 #: src/services/a-rex/delegation/DelegationStore.cpp:50 #: src/services/a-rex/delegation/DelegationStore.cpp:82 #: src/services/a-rex/delegation/DelegationStore.cpp:88 #: src/services/a-rex/grid-manager/inputcheck.cpp:33 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:480 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:551 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:576 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:587 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:598 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:609 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:617 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:623 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:628 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:633 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:643 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:652 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:660 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:671 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:678 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:736 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:743 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:783 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:787 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:790 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:859 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:872 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:889 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:901 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1174 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1179 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1208 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1221 #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:379 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:386 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:426 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:478 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:593 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:627 #, c-format msgid "%s" msgstr "%s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:248 msgid "Resource description is empty" msgstr "Resursbeskrivning är tom" #: src/hed/acc/EMIES/TestEMIESClient.cpp:255 #, c-format msgid "Resource description provides URL for interface %s: %s" msgstr "Resursbeskrivning tillhandahÃ¥ller URL för gränssnitt %s: %s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:260 msgid "Resource description provides no URLs for interfaces" msgstr "Resursbeskrivning tillhandahÃ¥ller inga URLer för gränssnitt" #: src/hed/acc/EMIES/TestEMIESClient.cpp:263 msgid "Resource description validation passed" msgstr "Resursbeskrivningsvalidering lyckades" #: src/hed/acc/EMIES/TestEMIESClient.cpp:265 #, c-format msgid "Requesting ComputingService elements of resource description at %s" msgstr "Begär ComputingService-element frÃ¥n resursbeskrivning pÃ¥ %s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:270 msgid "Performing /Services/ComputingService query" msgstr "Utför /Services/ComputingService-förfrÃ¥gan" #: src/hed/acc/EMIES/TestEMIESClient.cpp:274 #: src/hed/acc/EMIES/TestEMIESClient.cpp:284 #: src/hed/acc/EMIES/TestEMIESClient.cpp:295 msgid "Query returned no elements." msgstr "FörfrÃ¥gan returnerade inga element" #: src/hed/acc/EMIES/TestEMIESClient.cpp:280 msgid "Performing /ComputingService query" msgstr "Utför /ComputingService-förfrÃ¥gan" #: src/hed/acc/EMIES/TestEMIESClient.cpp:291 msgid "Performing /* query" msgstr "Utför /*-förfrÃ¥gan" #: src/hed/acc/EMIES/TestEMIESClient.cpp:301 msgid "All queries failed" msgstr "Alla förfrÃ¥gningar misslyckades" #: src/hed/acc/EMIES/TestEMIESClient.cpp:331 #, c-format msgid "" "Number of ComputingService elements obtained from full document and XPath " "query do not match: %d != %d" msgstr "" "Antalet ComputingService-element som erhölls frÃ¥n hela dokumentet och frÃ¥n " "XPath-förfrÃ¥gan stämmer inte överens: %d != %d" #: src/hed/acc/EMIES/TestEMIESClient.cpp:334 msgid "Resource description query validation passed" msgstr "ResursbeskrivningsförfrÃ¥gansvalidering lyckades" #: src/hed/acc/EMIES/TestEMIESClient.cpp:336 #, c-format msgid "Unsupported command: %s" msgstr "Kommandot stöds inte: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:142 #, c-format msgid "Connect: Failed to init handle: %s" msgstr "Connect: Misslyckades med att initiera handtag: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:148 #, c-format msgid "Failed to enable IPv6: %s" msgstr "Misslyckades med att aktivera IPv6: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:158 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:172 #, c-format msgid "Connect: Failed to connect: %s" msgstr "Connect: Misslyckades med att koppla upp: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:165 #, c-format msgid "Connect: Connecting timed out after %d ms" msgstr "Connect: Anslutning avbröts pÃ¥ grund av timeout efter %d ms" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:185 #, c-format msgid "Connect: Failed to init auth info handle: %s" msgstr "Connect: Misslyckades med att initiera autentiseringsinfohandtag: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:196 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:210 #, c-format msgid "Connect: Failed authentication: %s" msgstr "Connect: Misslyckades med autentisering: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:203 #, c-format msgid "Connect: Authentication timed out after %d ms" msgstr "Connect: Autentisering avbröts pÃ¥ grund av timeout efter %d ms" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:224 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:256 #, c-format msgid "SendCommand: Command: %s" msgstr "SendCommand: Kommando: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:229 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:240 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:260 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:271 #, c-format msgid "SendCommand: Failed: %s" msgstr "SendCommand: Misslyckades: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:235 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:266 #, c-format msgid "SendCommand: Timed out after %d ms" msgstr "SendCommand: Avbröts pÃ¥ grund av timeout efter %d ms" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:243 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:276 #, c-format msgid "SendCommand: Response: %s" msgstr "SendCommand: Svar: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:293 msgid "FTP Job Control: Failed sending EPSV and PASV commands" msgstr "FTP-jobbkontroll: Misslyckades med att sända EPSV- och PASV-kommandon" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:298 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:304 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:320 #, c-format msgid "FTP Job Control: Server PASV response parsing failed: %s" msgstr "FTP-jobbkontroll: Tolkning av serverns PASV-svar misslyckades: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:330 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:336 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:343 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:350 #, c-format msgid "FTP Job Control: Server EPSV response parsing failed: %s" msgstr "FTP-jobbkontroll: Tolkning av serverns EPSV-svar misslyckades: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:357 #, c-format msgid "FTP Job Control: Server EPSV response port parsing failed: %s" msgstr "" "FTP-jobbkontroll: Tolkning av port i serverns EPSV-svar misslyckades: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:366 #, c-format msgid "FTP Job Control: Failed to apply local address to data connection: %s" msgstr "" "FTP-jobbkontroll: Misslyckades med att tillämpa lokal adress pÃ¥ " "dataförbindelse: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:372 #, c-format msgid "" "FTP Job Control: Can't parse host and/or port in response to EPSV/PASV: %s" msgstr "" "FTP-jobbkontroll: Kan inte tolka värd och/eller port i EPSV/PASV-svar: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:377 #, c-format msgid "FTP Job Control: Data channel: %d.%d.%d.%d:%d" msgstr "FTP-jobbkontroll: Datakanal: %d.%d.%d.%d:%d" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:393 #, c-format msgid "FTP Job Control: Data channel: [%s]:%d" msgstr "FTP-jobbkontroll: Datakanal: [%s]:%d" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:398 #, c-format msgid "FTP Job Control: Local port failed: %s" msgstr "FTP-jobbkontroll: Lokal port misslyckades: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:422 msgid "FTP Job Control: Failed sending DCAU command" msgstr "FTP-jobbkontroll: Misslyckades med att sända DCAU-kommando" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:427 msgid "FTP Job Control: Failed sending TYPE command" msgstr "FTP-jobbkontroll: Misslyckades med att sända TYPE-kommando" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:436 #, c-format msgid "FTP Job Control: Local type failed: %s" msgstr "FTP-jobbkontroll: Lokal typ misslyckades: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:446 #, c-format msgid "FTP Job Control: Failed sending STOR command: %s" msgstr "FTP-jobbkontroll: Misslyckades med att sända STOR-kommando: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:454 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:475 #, c-format msgid "FTP Job Control: Data connect write failed: %s" msgstr "FTP-jobbkontroll: Dataförbindelse för skrivning misslyckades: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:461 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:469 #, c-format msgid "FTP Job Control: Data connect write timed out after %d ms" msgstr "" "FTP-jobbkontroll: Dataförbindelse för skrivning avbröts pÃ¥ grund av timeout " "efter %d ms" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:487 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:507 #, c-format msgid "FTP Job Control: Data write failed: %s" msgstr "FTP-jobbkontroll: Dataskrivning misslyckades: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:493 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:501 #, c-format msgid "FTP Job Control: Data write timed out after %d ms" msgstr "" "FTP-jobbkontroll: Dataskrivning avbröts pÃ¥ grund av timeout efter %d ms" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:527 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:538 #, c-format msgid "Disconnect: Failed aborting - ignoring: %s" msgstr "Disconnect: Misslyckades med att avsluta - ignorerar: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:530 #, c-format msgid "Disconnect: Data close timed out after %d ms" msgstr "Disconnect: Data-nedstängning avbröts pÃ¥ grund av timeout efter %d ms" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:541 #, c-format msgid "Disconnect: Abort timed out after %d ms" msgstr "Disconnect: Avbrytande avbröts pÃ¥ grund av timeout efter %d ms" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:549 #, c-format msgid "Disconnect: Failed quitting - ignoring: %s" msgstr "Disconnect: Misslyckades med att avsluta - ignorerar: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:552 #, c-format msgid "Disconnect: Quitting timed out after %d ms" msgstr "Disconnect: Avslutande avbröts pÃ¥ grund av timeout efter %d ms" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:561 #, c-format msgid "Disconnect: Failed closing - ignoring: %s" msgstr "Disconnect: Misslyckades med att stänga ned - ignorerar: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:567 #, c-format msgid "Disconnect: Closing timed out after %d ms" msgstr "Disconnect: Nedstängning avbröts pÃ¥ grund av timeout efter %d ms" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:582 msgid "Disconnect: waiting for globus handle to settle" msgstr "Disconnect: väntar pÃ¥ att globus-handtag ska lugna ned sig" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:596 msgid "Disconnect: globus handle is stuck." msgstr "Disconnect: globus-handtag har fastnat" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:604 #, c-format msgid "Disconnect: Failed destroying handle: %s. Can't handle such situation." msgstr "" "Disconnect: Misslyckades med att förstöra handtag: %s. Kan inte hantera en " "sÃ¥dan situation." #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:607 msgid "Disconnect: handle destroyed." msgstr "Disconnect: handtag har förstörts." #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:43 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:44 msgid "" "Missing reference to factory and/or module. It is unsafe to use Globus in " "non-persistent mode - SubmitterPlugin for GRIDFTPJOB is disabled. Report to " "developers." msgstr "" "Saknar referens till fabrik och/eller modul. Det är osäkert att använda " "Globus i icke-persistent mode - insändnings-plugin för GRIDFTPJOB är " "deaktiverad. Rapportera till utvecklare." #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:58 #, c-format msgid "Unable to query job information (%s), invalid URL provided (%s)" msgstr "" "Kan inte frÃ¥ga om jobbinformation (%s), ogiltig URL tillhandahÃ¥llen (%s)" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:70 #, c-format msgid "Jobs left to query: %d" msgstr "Kvarvarande jobb att frÃ¥ga: %d" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:79 #, c-format msgid "Querying batch with %d jobs" msgstr "FrÃ¥gebatch med %d jobb" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:97 msgid "Can't create information handle - is the ARC LDAP DMC plugin available?" msgstr "" "Kan inte skapa informationshandtag - är ARCs LDAP-DMC-plugin tillgänglig?" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:130 #, c-format msgid "Job information not found in the information system: %s" msgstr "Hittade inte jobbinformation i informationssystemet: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:132 msgid "" "This job was very recently submitted and might not yet have reached the " "information system" msgstr "" "Detta jobb skickades nyligen in och har kanske inte nÃ¥tt " "informationssystemet ännu" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:319 #, c-format msgid "Cleaning job: %s" msgstr "Tar bort jobb: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:323 msgid "Failed to connect for job cleaning" msgstr "Misslyckades med att koppla upp för att ta bort jobb" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:335 msgid "Failed sending CWD command for job cleaning" msgstr "Misslyckades med att sända CWD-kommando för att ta bort jobb" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:342 msgid "Failed sending RMD command for job cleaning" msgstr "Misslyckades med att sända RMD-kommando för att ta bort jobb" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:349 msgid "Failed to disconnect after job cleaning" msgstr "Misslyckades med koppla ner efter att ha tagit bort jobb" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:356 msgid "Job cleaning successful" msgstr "Borttagande av jobb lyckades" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:367 #, c-format msgid "Cancelling job: %s" msgstr "Avbryter jobb: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:371 msgid "Failed to connect for job cancelling" msgstr "Misslyckades med att koppla upp för att avbryta jobb" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:383 msgid "Failed sending CWD command for job cancelling" msgstr "Misslyckades med att skicka CWD-kommando för att avbryta jobb" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:390 msgid "Failed sending DELE command for job cancelling" msgstr "Misslyckades med att skicka DELE-kommando för att avbryta jobb" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:397 msgid "Failed to disconnect after job cancelling" msgstr "Misslyckades med att koppla ner efter att ha avbrutit jobb" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:405 msgid "Job cancelling successful" msgstr "Avbrytande av jobb lyckades" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:416 #, c-format msgid "Renewing credentials for job: %s" msgstr "Förnyar referenser för jobb: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:420 msgid "Failed to connect for credential renewal" msgstr "Misslyckades med att koppla upp för att förnya referenser" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:432 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:439 msgid "Failed sending CWD command for credentials renewal" msgstr "Misslyckades med att sända CWD-kommando för att förnya referenser" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:445 msgid "Failed to disconnect after credentials renewal" msgstr "Misslyckades med koppla ner efter att ha förnyat referenser" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:452 msgid "Renewal of credentials was successful" msgstr "Förnyelse av referenser lyckades" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:474 #, c-format msgid "Illegal jobID specified (%s)" msgstr "Ogiltigt jobb-id angivet (%s)" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:481 #, c-format msgid "HER: %s" msgstr "HER: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:487 #, c-format msgid "Could not create temporary file: %s" msgstr "Kunde inte skapa temporär fil: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:556 #, c-format msgid "Trying to retrieve job description of %s from computing resource" msgstr "Försöker hämta jobbeskrivning för %s frÃ¥n beräkningsresurs" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:561 #, c-format msgid "invalid jobID: %s" msgstr "ogiltigt jobb-id: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:603 msgid "clientxrsl found" msgstr "hittade klient-xrsl" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:606 msgid "could not find start of clientxrsl" msgstr "kunde inte hitta början pÃ¥ klient-xrsl" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:611 msgid "could not find end of clientxrsl" msgstr "kunde inte hitta slutet pÃ¥ klient-xrsl" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:624 #, c-format msgid "Job description: %s" msgstr "Jobbeskrivning: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:627 msgid "clientxrsl not found" msgstr "hittade inte klient-xrsl" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:633 #, c-format msgid "Invalid JobDescription: %s" msgstr "Ogiltig jobbeskrivning: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:636 msgid "Valid JobDescription found" msgstr "Hittade giltig jobbeskrivning" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:60 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:206 msgid "Submit: Failed to connect" msgstr "Submit: Misslyckades med att koppla upp" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:68 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:214 msgid "Submit: Failed sending CWD command" msgstr "Submit: Misslyckades med att sända CWD-kommando" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:79 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:225 msgid "Submit: Failed sending CWD new command" msgstr "Submit: Misslyckades med att sända CWD new-kommando" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:106 msgid "Failed to prepare job description." msgstr "Misslyckades med att förbereda jobbeskrivning." #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:123 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:269 msgid "Submit: Failed sending job description" msgstr "Submit: Misslyckades med att sända jobbeskrivning" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:138 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:284 msgid "Submit: Failed uploading local input files" msgstr "Submit: Misslyckades med att ladda upp lokala indatafiler" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:194 msgid "" "Submit: service has no suitable information interface - need org.nordugrid." "ldapng" msgstr "" "Submit: tjänsten har inga lämpliga informationsgränssnitt - behöver org." "nordugrid.ldapng" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:252 msgid "Failed to prepare job description to target resources." msgstr "Misslyckades med att förbereda jobbeskrivning för target-resurser." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:75 #, c-format msgid "[ADLParser] Unsupported EMI ES state %s." msgstr "[ADLParser] EMI-ES-tillstÃ¥nd stöds inte %s." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:95 #, c-format msgid "[ADLParser] Unsupported internal state %s." msgstr "[ADLParser] Internt tillstÃ¥nd stöds inte %s." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:105 #, c-format msgid "[ADLParser] Optional for %s elements are not supported yet." msgstr "[ADLParser] Utelämnande av %s-element stöds inte än." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:114 #, c-format msgid "[ADLParser] %s element must be boolean." msgstr "[ADLParser] %s-element mÃ¥ste vara boolesk." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:126 #, c-format msgid "[ADLParser] Code in FailIfExitCodeNotEqualTo in %s is not valid number." msgstr "" "[ADLParser] Kod i FailIfExitCodeNotEqualTo i %s är inte ett giltigt nummer." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:364 msgid "[ADLParser] Root element is not ActivityDescription " msgstr "[ADLParser] Rot-element är inte ActivityDescription " #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:411 msgid "[ADLParser] priority is too large - using max value 100" msgstr "[ADLParser] prioritet är för stor - använder maxvärdet 100" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:454 #, c-format msgid "[ADLParser] Unsupported URL %s for RemoteLogging." msgstr "[ADLParser] URL %s för RemoteLogging stöds inte." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:473 #, c-format msgid "[ADLParser] Wrong time %s in ExpirationTime." msgstr "[ADLParser] Felaktig tid %s i ExpirationTime." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:503 msgid "[ADLParser] AccessControl isn't valid XML." msgstr "[ADLParser] AccessControl är inte giltig XML." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:514 msgid "[ADLParser] CredentialService must contain valid URL." msgstr "[ADLParser] CredentialService mÃ¥ste innehÃ¥lle en giltig URL." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:543 #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:546 msgid "[ADLParser] Only email Prorocol for Notification is supported yet." msgstr "[ADLParser] Endast email-protokoll för avisering stöds än." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:604 msgid "[ADLParser] Missing or wrong value in ProcessesPerSlot." msgstr "[ADLParser] Saknat eller felaktigt värde i ProcessesPerSlot." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:609 msgid "[ADLParser] Missing or wrong value in ThreadsPerProcess." msgstr "[ADLParser] Felaktigt eller saknat värde i ThreadsPerProcess." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:615 msgid "" "[ADLParser] Missing Name element or value in ParallelEnvironment/Option " "element." msgstr "" "[ADLParser] Saknat Name-element eller -värde i ParallelEnvironment/Option-" "element." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:632 msgid "[ADLParser] NetworkInfo is not supported yet." msgstr "[ADLParser] NetworkInfo stöds inte än." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:646 #, c-format msgid "[ADLParser] NodeAccess value %s is not supported yet." msgstr "[ADLParser] NodeAccess-värde %s stöds inte än." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:654 msgid "[ADLParser] Missing or wrong value in NumberOfSlots." msgstr "[ADLParser] Saknat eller felaktigt värde i NumberOfSlots." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:661 msgid "" "[ADLParser] The NumberOfSlots element should be specified, when the value of " "useNumberOfSlots attribute of SlotsPerHost element is \"true\"." msgstr "" "[ADLParser] NumberOfSlots-elementet ska anges när värdet pÃ¥ useNumberOfSlots-" "attributet i SlotsPerHost-element är \"true\"." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:668 msgid "[ADLParser] Missing or wrong value in SlotsPerHost." msgstr "[ADLParser] Saknat eller felaktigt värde i SlotsPerHost." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:697 msgid "[ADLParser] Missing or wrong value in IndividualPhysicalMemory." msgstr "[ADLParser] Saknat eller felaktigt värde i IndividualPhysicalMemory." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:707 msgid "[ADLParser] Missing or wrong value in IndividualVirtualMemory." msgstr "[ADLParser] Saknat eller felaktigt värde i IndividualVirtualMemory." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:717 msgid "[ADLParser] Missing or wrong value in DiskSpaceRequirement." msgstr "[ADLParser] Saknat eller felaktigt värde i DiskSpaceRequirement." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:731 msgid "[ADLParser] Benchmark is not supported yet." msgstr "[ADLParser] Benchmark stöds inte än." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:739 msgid "[ADLParser] Missing or wrong value in IndividualCPUTime." msgstr "[ADLParser] Saknat eller felaktigt värde i IndividualCPUTime." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:747 msgid "[ADLParser] Missing or wrong value in TotalCPUTime." msgstr "[ADLParser] Saknat eller felaktigt värde i TotalCPUTime." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:756 msgid "[ADLParser] Missing or wrong value in WallTime." msgstr "[ADLParser] Saknat eller felaktigt värde i WallTime." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:776 msgid "[ADLParser] Missing or empty Name in InputFile." msgstr "[ADLParser] Saknat eller tomt Name i InputFile." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:787 #, c-format msgid "[ADLParser] Wrong URI specified in Source - %s." msgstr "[ADLParser] Felaktig URI angiven i Source - %s." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:809 msgid "[ADLParser] Missing or empty Name in OutputFile." msgstr "[ADLParser] Saknat eller tomt Name i OutputFile." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:815 #, c-format msgid "[ADLParser] Wrong URI specified in Target - %s." msgstr "[ADLParser] Felaktig URI angiven i target - %s." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:828 #, c-format msgid "Location URI for file %s is invalid" msgstr "Location URI för fil %s är ogiltig" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:853 #, c-format msgid "[ADLParser] CreationFlag value %s is not supported." msgstr "[ADLParser] CreationFlag-värde %s stöds inte." #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:42 msgid "Left operand for RSL concatenation does not evaluate to a literal" msgstr "Vänster operand för RSL-konkatenering utvärderas inte till en sträng" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:50 msgid "Right operand for RSL concatenation does not evaluate to a literal" msgstr "Höger operand för RSL-konkatenering utvärderas inte till en sträng" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:161 msgid "Multi-request operator only allowed at top level" msgstr "Flerjobbsoperator endast tillÃ¥ten pÃ¥ toppnivÃ¥n" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:186 msgid "RSL substitution is not a sequence" msgstr "RSL-substitution är inte en sekvens" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:192 msgid "RSL substitution sequence is not of length 2" msgstr "RSL-substitutions-sekvens har inte längden 2" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:211 msgid "RSL substitution variable name does not evaluate to a literal" msgstr "RSL-substitutionsvariabelnamn utvärderas inte till en sträng" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:220 msgid "RSL substitution variable value does not evaluate to a literal" msgstr "RSL-substitutionsvariabelvärde utvärderas inte till en sträng" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:313 msgid "End of comment not found" msgstr "Hittade inte slutet pÃ¥ kommentar" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:324 msgid "Junk at end of RSL" msgstr "Skräp i slutet pÃ¥ RSL" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:424 msgid "End of single quoted string not found" msgstr "Hittade inte slutet pÃ¥ sträng i enkla citattecken" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:441 msgid "End of double quoted string not found" msgstr "Hittade inte slutet pÃ¥ sträng i dubbla citattecken" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:460 #, c-format msgid "End of user delimiter (%s) quoted string not found" msgstr "Hittade inte slutet pÃ¥ sträng i användardefinierade citattecken (%s)" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:518 #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:546 #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:625 msgid "')' expected" msgstr "')' förväntades" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:528 #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:609 msgid "'(' expected" msgstr "'(' förväntades" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:536 msgid "Variable name expected" msgstr "Variabelnamn förväntades" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:541 #, c-format msgid "Variable name (%s) contains invalid character (%s)" msgstr "Variabelnamn (%s) innehÃ¥ller ogiltigt tecken (%s)" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:557 msgid "Broken string" msgstr "Trasig sträng" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:570 msgid "No left operand for concatenation operator" msgstr "Ingen vänsteroperand för konkateneringsoperator" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:574 msgid "No right operand for concatenation operator" msgstr "Ingen högeroperand för konkateneringsoperator" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:638 msgid "Attribute name expected" msgstr "Attributnamn förväntades" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:643 #, c-format msgid "Attribute name (%s) contains invalid character (%s)" msgstr "Attributnamn (%s) innehÃ¥ller ogiltigt tecken (%s)" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:649 msgid "Relation operator expected" msgstr "Relationsoperator förväntades" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:86 msgid "Error parsing the internally set executables attribute." msgstr "Fel vid tolkning av det internt tilldelade executables-attributet." #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:102 #, c-format msgid "" "File '%s' in the 'executables' attribute is not present in the 'inputfiles' " "attribute" msgstr "" "Filen '%s' i 'executables'-attributet finns inte i 'inputfiles'-attributet" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:120 msgid "The value of the ftpthreads attribute must be a number from 1 to 10" msgstr "Värdet pÃ¥ ftpthreads-attributet mÃ¥ste vara ett nummer frÃ¥n 1 till 10" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:177 msgid "'stdout' attribute must be specified when 'join' attribute is specified" msgstr "'stdout'-attributet mÃ¥ste anges när 'join'-attributet anges" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:181 msgid "" "Attribute 'join' cannot be specified when both 'stdout' and 'stderr' " "attributes is specified" msgstr "" "Attributet 'join' kan inte anges när bÃ¥de 'stdout'- och 'stderr'-attributen " "anges" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:200 msgid "Attributes 'gridtime' and 'cputime' cannot be specified together" msgstr "Attributen 'gridtime' och 'cputime' kan inte anges samtidigt" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:204 msgid "Attributes 'gridtime' and 'walltime' cannot be specified together" msgstr "Attributen 'gridtime' och 'walltime' kan inte anges samtidigt" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:226 msgid "" "When specifying 'countpernode' attribute, 'count' attribute must also be " "specified" msgstr "" "När 'countpernode'-attributet anges, mÃ¥ste 'count'-attributet ocksÃ¥ anges" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:229 msgid "Value of 'countpernode' attribute must be an integer" msgstr "Värdet pÃ¥ 'countpernode'-attributet mÃ¥ste vara ett heltal" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:287 msgid "No RSL content in job description found" msgstr "Hittade inget RSL-innehÃ¥ll i jobbeskrivning" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:295 msgid "'action' attribute not allowed in user-side job description" msgstr "'action'-attribut inte tillÃ¥tet i jobbeskrivning pÃ¥ användarsidan" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:304 #, c-format msgid "String successfully parsed as %s." msgstr "Sträng framgÃ¥ngsrikt tolkad som %s." #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:313 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:331 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:349 #, c-format msgid "Attribute '%s' multiply defined" msgstr "Attribut '%s' definierat mer än en gÃ¥ng" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:317 #, c-format msgid "Value of attribute '%s' expected to be single value" msgstr "Värdet pÃ¥ attributet '%s' förväntas vara ett enstaka värde" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:322 #, c-format msgid "Value of attribute '%s' expected to be a string" msgstr "Värdet pÃ¥ attributet '%s' förväntas vara en sträng" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:338 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:368 #, c-format msgid "Value of attribute '%s' is not a string" msgstr "Värdet pÃ¥ attributet '%s' är inte en sträng" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:356 #, c-format msgid "Value of attribute '%s' is not sequence" msgstr "Värdet pÃ¥ attributet '%s' är inte en sekvens" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:360 #, c-format msgid "" "Value of attribute '%s' has wrong sequence length: Expected %d, found %d" msgstr "" "Värdet pÃ¥ attributet '%s' har fel sekvenslängd: förväntad %d, hittad %d" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:492 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1375 msgid "Unexpected RSL type" msgstr "Oväntad RSL-typ" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:557 msgid "At least two values are needed for the 'inputfiles' attribute" msgstr "Minst tvÃ¥ värden behövs för 'inputfiles'-attributet" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:562 msgid "First value of 'inputfiles' attribute (filename) cannot be empty" msgstr "Första värdet i 'inputfiles'-attributet (filnamn) kan inte vara tomt" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:587 #, c-format msgid "Invalid URL '%s' for input file '%s'" msgstr "Ogiltig URL '%s' för indatafil '%s'" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:596 #, c-format msgid "Invalid URL option syntax in option '%s' for input file '%s'" msgstr "Ogiltig URL-alternativ-syntax i alternativ '%s' för indatafil '%s'" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:606 #, c-format msgid "Invalid URL: '%s' in input file '%s'" msgstr "Ogiltig URL: '%s' i indatafil '%s'" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:677 msgid "At least two values are needed for the 'outputfiles' attribute" msgstr "Minst tvÃ¥ värden behövs för 'outputfiles'-attributet" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:682 msgid "First value of 'outputfiles' attribute (filename) cannot be empty" msgstr "Första värdet i 'inputfiles'-attributet (filnamn) kan inte vara tomt" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:694 #, c-format msgid "Invalid URL '%s' for output file '%s'" msgstr "Ogiltig URL '%s' för utdatafil '%s'" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:704 #, c-format msgid "Invalid URL option syntax in option '%s' for output file '%s'" msgstr "Ogiltig URL-alternativ-syntax i alternativ '%s' för utdatafil '%s'" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:714 #, c-format msgid "Invalid URL: '%s' in output file '%s'" msgstr "Ogiltig URL: '%s' i utdatafil '%s'" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:746 #, c-format msgid "" "Invalid comparison operator '%s' used at 'delegationid' attribute, only \"=" "\" is allowed." msgstr "" "Ogiltig jämförelseoperator '%s' använd i 'delegationid'-attributet, endast " "\"=\" är tillÃ¥ten." #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:764 #, c-format msgid "" "Invalid comparison operator '%s' used at 'queue' attribute in 'GRIDMANAGER' " "dialect, only \"=\" is allowed" msgstr "" "Ogiltig jämförelseoperator '%s' använd i 'queue'-attributet i 'GRIDMANAGER'-" "dialekt, endast \"=\" är tillÃ¥tet" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:770 #, c-format msgid "" "Invalid comparison operator '%s' used at 'queue' attribute, only \"!=\" or " "\"=\" are allowed." msgstr "" "Ogiltig jämförelseoperator '%s' använd i 'queue'-attributet, endast \"!=\" " "eller \"=\" är tillÃ¥tna." #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:927 #, c-format msgid "Value of attribute '%s' expected not to be empty" msgstr "Värdet av attributet '%s' förväntades inte vara tomt" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1036 msgid "The value of the acl XRSL attribute isn't valid XML." msgstr "Värdet pÃ¥ XRSL-attributet acl är inte giltig XML." #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1050 msgid "The cluster XRSL attribute is currently unsupported." msgstr "XRSL-attributet cluster stöds för närvarande inte." #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1066 #, c-format msgid "" "Syntax error in 'notify' attribute value ('%s'), it must contain an email " "address" msgstr "" "Syntaxfel i 'notify'-attributvärde ('%s'), det mÃ¥ste innehÃ¥lla en e-" "postadress" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1074 #, c-format msgid "" "Syntax error in 'notify' attribute value ('%s'), it must only contain email " "addresses after state flag(s)" msgstr "" "Syntaxfel i 'notify'-attributvärde ('%s'), det fÃ¥r endast innehÃ¥lla e-" "postadresser efter tillstÃ¥ndsflagg(a/or)" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1077 #, c-format msgid "" "Syntax error in 'notify' attribute value ('%s'), it contains unknown state " "flags" msgstr "" "Syntaxfel i 'notify'-attributvärde ('%s'), det innehÃ¥ller okända " "tillstÃ¥ndsflaggor" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1125 msgid "priority is too large - using max value 100" msgstr "prioritet är för stor - använder maxvärde 100" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1158 #, c-format msgid "Invalid nodeaccess value: %s" msgstr "Ogiltigt nodeaccess-värde: %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1201 msgid "Value of 'count' attribute must be an integer" msgstr "Värdet pÃ¥ 'count'-attributet mÃ¥ste vara ett heltal" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1231 msgid "Value of 'exclusiveexecution' attribute must either be 'yes' or 'no'" msgstr "" "Värdet pÃ¥ 'exclusiveexecution'-attributet mÃ¥ste vara endera 'yes' eller 'no'" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1277 #, c-format msgid "Invalid action value %s" msgstr "Ogiltigt action-värde %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1367 #, c-format msgid "The specified Globus attribute (%s) is not supported. %s ignored." msgstr "Det angivna Globus-attributet (%s) stöds inte. %s ignoreras." #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1371 #, c-format msgid "Unknown XRSL attribute: %s - Ignoring it." msgstr "Okänt XRSL-attribut: %s - ignorerar det." #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1385 #, c-format msgid "Wrong language requested: %s" msgstr "Felaktigt sprÃ¥k begärt: %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1722 msgid "" "Cannot output XRSL representation: The Resources.SlotRequirement." "NumberOfSlots attribute must be specified when the Resources.SlotRequirement." "SlotsPerHost attribute is specified." msgstr "" "Kan inte skapa XRSL-representation: Resources.SlotRequirement.NumberOfSlots-" "attributet mÃ¥ste anges när Resources.SlotRequirement.SlotsPerHost-attributet " "anges." #: src/hed/acc/LDAP/Extractor.h:22 #, c-format msgid "Extractor[%s] (%s): %s = %s" msgstr "Extraherare[%s] (%s): %s = %s" #: src/hed/acc/LDAP/Extractor.h:113 src/hed/acc/LDAP/Extractor.h:130 #, c-format msgid "Extractor[%s] (%s): %s contains %s" msgstr "Extraherare[%s] (%s): %s innehÃ¥ller %s" #: src/hed/acc/LDAP/JobListRetrieverPluginLDAPGLUE2.cpp:54 #, c-format msgid "Adding endpoint '%s' with interface name %s" msgstr "Lägger till ändpunkt '%s' men gränssnittsnamn %s" #: src/hed/acc/LDAP/JobListRetrieverPluginLDAPNG.cpp:72 #: src/hed/acc/LDAP/ServiceEndpointRetrieverPluginEGIIS.cpp:46 #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPGLUE2.cpp:47 #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPNG.cpp:57 msgid "Can't create information handle - is the ARC ldap DMC plugin available?" msgstr "" "Kan inte skapa informationshandtag - är ARCs ldap-DMC-plugin tillgänglig?" #: src/hed/acc/LDAP/ServiceEndpointRetrieverPluginEGIIS.cpp:79 #, c-format msgid "Unknown entry in EGIIS (%s)" msgstr "Okänd post i EGIIS (%s)" #: src/hed/acc/LDAP/ServiceEndpointRetrieverPluginEGIIS.cpp:87 msgid "" "Entry in EGIIS is missing one or more of the attributes 'Mds-Service-type', " "'Mds-Service-hn', 'Mds-Service-port' and/or 'Mds-Service-Ldap-suffix'" msgstr "" "Post i EGIIS saknar ett eller flera av attributen 'Mds-Service-type', 'Mds-" "Service-hn', 'Mds-Service-port' och/eller 'Mds-Service-Ldap-suffix'" #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPGLUE2.cpp:219 msgid "" "The \"FreeSlotsWithDuration\" attribute is wrongly formatted. Ignoring it." msgstr "" "Attributet \"FreeSlotsWithDuration\" är felaktigt formatterat. Ignorerar det." #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPGLUE2.cpp:220 #: src/hed/libs/compute/GLUE2.cpp:248 #, c-format msgid "Wrong format of the \"FreeSlotsWithDuration\" = \"%s\" (\"%s\")" msgstr "Felaktigt format för \"FreeSlotsWithDuration\" = \"%s\" (\"%s\")" #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPNG.cpp:389 #, c-format msgid "Unable to parse the %s.%s value from execution service (%s)." msgstr "Kan inte tolka %s.%s-värdet frÃ¥n beräkningstjänsten (%s)." #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPNG.cpp:390 #, c-format msgid "Value of %s.%s is \"%s\"" msgstr "Värdet av %s.%s är \"%s\"" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:65 #: src/services/wrappers/python/pythonwrapper.cpp:92 msgid "Failed to initialize main Python thread" msgstr "Misslyckades med att initiera Pythons huvudtrÃ¥d" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:71 #: src/services/wrappers/python/pythonwrapper.cpp:97 msgid "Main Python thread was not initialized" msgstr "Pythons huvudtrÃ¥d initierades inte" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:81 #, c-format msgid "Loading Python broker (%i)" msgstr "Laddar in Python-mäklare (%i)" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:104 #: src/services/wrappers/python/pythonwrapper.cpp:134 msgid "Main Python thread is not initialized" msgstr "Pythons huvudtrÃ¥d är inte initierad" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:108 msgid "PythonBroker init" msgstr "Python-mäklare init" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:116 msgid "" "Invalid class name. The broker argument for the PythonBroker should be\n" " Filename.Class.args (args is optional), for example SampleBroker." "MyBroker" msgstr "" "Ogiltigt klassnamn. broker-argumentet för Python-mäklaren ska vara\n" " Filnamn.Klass.args (args är valfritt), till exempel SampleBroker." "MyBroker" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:122 #, c-format msgid "Class name: %s" msgstr "Klassnamn: %s" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:123 #, c-format msgid "Module name: %s" msgstr "Modulnamn: %s" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:132 #: src/services/wrappers/python/pythonwrapper.cpp:178 msgid "Cannot convert ARC module name to Python string" msgstr "Kan inte konvertera ARC-modulnamn till pythonsträng" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:140 #: src/services/wrappers/python/pythonwrapper.cpp:186 msgid "Cannot import ARC module" msgstr "Kan inte importera ARC-modulen" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:149 #: src/services/wrappers/python/pythonwrapper.cpp:196 #: src/services/wrappers/python/pythonwrapper.cpp:429 msgid "Cannot get dictionary of ARC module" msgstr "Kan inte hämta ordlista för ARC-modulen" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:158 msgid "Cannot find ARC UserConfig class" msgstr "Kan inte hitta ARCs UserConfig-klass" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:166 msgid "UserConfig class is not an object" msgstr "UserConfig-klass är inte ett objekt" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:173 msgid "Cannot find ARC JobDescription class" msgstr "Kan inte hitta ARCs JobDescription-klass" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:181 msgid "JobDescription class is not an object" msgstr "JobDescription-klass är inte ett objekt" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:188 msgid "Cannot find ARC ExecutionTarget class" msgstr "Kan inte hitta ARCs ExecutionTarget-klass" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:196 msgid "ExecutionTarget class is not an object" msgstr "ExecutionTarget-klass är inte ett objekt" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:207 #: src/services/wrappers/python/pythonwrapper.cpp:157 msgid "Cannot convert module name to Python string" msgstr "Kan inte konvertera modulnamn till pythonsträng" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:215 #: src/services/wrappers/python/pythonwrapper.cpp:164 msgid "Cannot import module" msgstr "Kan inte importera modul" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:224 msgid "Cannot get dictionary of custom broker module" msgstr "Kan inte hämta ordlista för mäklarmodul" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:233 msgid "Cannot find custom broker class" msgstr "Kan inte hitta mäklarklass" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:241 #, c-format msgid "%s class is not an object" msgstr "%s-klass är inte ett objekt" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:247 msgid "Cannot create UserConfig argument" msgstr "Kan inte skapa UserConfig-argument" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:255 msgid "Cannot convert UserConfig to Python object" msgstr "Kan inte konvertera UserConfig till pythonobjekt" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:263 #: src/services/wrappers/python/pythonwrapper.cpp:253 msgid "Cannot create argument of the constructor" msgstr "Kan inte skapa argument till konstruktorn" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:272 #: src/services/wrappers/python/pythonwrapper.cpp:261 msgid "Cannot create instance of Python class" msgstr "Kan inte skapa instans av pythonklass" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:278 #, c-format msgid "Python broker constructor called (%d)" msgstr "Python-mäklarens konstruktor anropad (%d)" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:302 #, c-format msgid "Python broker destructor called (%d)" msgstr "Python-mäklarens destruktor anropad (%d)" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:311 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:328 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:361 msgid "Cannot create ExecutionTarget argument" msgstr "Kan inte skapa ExecutionTarget-argument" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:319 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:336 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:369 #, c-format msgid "Cannot convert ExecutionTarget (%s) to python object" msgstr "Kan inte konvertera ExecutionTarget (%s) till pythonobjekt" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:393 msgid "Cannot create JobDescription argument" msgstr "Kan inte skapa JobDescription-argument" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:401 msgid "Cannot convert JobDescription to python object" msgstr "Kan inte konvertera JobDescription till pythonobjekt" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:422 msgid "Do sorting using user created python broker" msgstr "Sortera med användarskapad python-mäklare" #: src/hed/daemon/unix/daemon.cpp:84 #, c-format msgid "Daemonization fork failed: %s" msgstr "Demonisering av fork misslyckades: %s" #: src/hed/daemon/unix/daemon.cpp:95 msgid "Watchdog (re)starting application" msgstr "Vakthund (Ã¥ter)startar programmet" #: src/hed/daemon/unix/daemon.cpp:100 #, c-format msgid "Watchdog fork failed: %s" msgstr "Vakthunds-fork misslyckades: %s" #: src/hed/daemon/unix/daemon.cpp:110 msgid "Watchdog starting monitoring" msgstr "Vakthund startar monitorering" #: src/hed/daemon/unix/daemon.cpp:136 #, c-format msgid "Watchdog detected application exit due to signal %u" msgstr "Vakthund upptäckte att programmet avslutades pÃ¥ grund av signal %u" #: src/hed/daemon/unix/daemon.cpp:138 #, c-format msgid "Watchdog detected application exited with code %u" msgstr "Vakthund upptäckte att programmet avslutades med kod %u" #: src/hed/daemon/unix/daemon.cpp:140 msgid "Watchdog detected application exit" msgstr "Vakthund upptäckte att programmet avslutades" #: src/hed/daemon/unix/daemon.cpp:149 msgid "" "Watchdog exiting because application was purposely killed or exited itself" msgstr "" "Vakthund avslutades eftersom programmet avbröts avsiktligt eller avslutade " "sig självt" #: src/hed/daemon/unix/daemon.cpp:156 msgid "Watchdog detected application timeout or error - killing process" msgstr "Vakthund upptäckte program-timeout eller -fel - avbryter process" #: src/hed/daemon/unix/daemon.cpp:167 msgid "Watchdog failed to wait till application exited - sending KILL" msgstr "" "Vakthund misslyckades med att vänta pÃ¥ programmets avslutande - sänder KILL" #: src/hed/daemon/unix/daemon.cpp:179 msgid "Watchdog failed to kill application - giving up and exiting" msgstr "" "Vakthund misslyckades med att avbryta programmet - ger upp och avslutar" #: src/hed/daemon/unix/daemon.cpp:200 msgid "Shutdown daemon" msgstr "Stänger av demon" #: src/hed/daemon/unix/main_unix.cpp:43 msgid "shutdown" msgstr "avstängning" #: src/hed/daemon/unix/main_unix.cpp:46 msgid "exit" msgstr "avslut" #: src/hed/daemon/unix/main_unix.cpp:84 msgid "No server config part of config file" msgstr "Ingen serverinställningsdel i inställningsfilen" #: src/hed/daemon/unix/main_unix.cpp:159 #, c-format msgid "Unknown log level %s" msgstr "Okänd logg-nivÃ¥ %s" #: src/hed/daemon/unix/main_unix.cpp:169 #, c-format msgid "Failed to open log file: %s" msgstr "Misslyckades med att öppna loggfil: %s" #: src/hed/daemon/unix/main_unix.cpp:201 msgid "Start foreground" msgstr "Startar i förgrunden" #: src/hed/daemon/unix/main_unix.cpp:250 #, c-format msgid "XML config file %s does not exist" msgstr "XML-inställningsfil %s existerar inte" #: src/hed/daemon/unix/main_unix.cpp:254 src/hed/daemon/unix/main_unix.cpp:269 #, c-format msgid "Failed to load service configuration from file %s" msgstr "Misslyckades med att ladda in tjänsteinställningar frÃ¥n fil %s" #: src/hed/daemon/unix/main_unix.cpp:260 #, c-format msgid "INI config file %s does not exist" msgstr "INI-inställningsfil %s existerar inte" #: src/hed/daemon/unix/main_unix.cpp:265 src/hed/daemon/unix/main_unix.cpp:287 msgid "Error evaluating profile" msgstr "Fel vid utvärdering av profil" #: src/hed/daemon/unix/main_unix.cpp:281 msgid "Error loading generated configuration" msgstr "Fel vid inladdning av genererade inställningar" #: src/hed/daemon/unix/main_unix.cpp:292 msgid "Failed to load service configuration from any default config file" msgstr "" "Misslyckades med att ladda in tjänsteinställningar frÃ¥n nÃ¥gon förvald " "inställningsfil" #: src/hed/daemon/unix/main_unix.cpp:353 msgid "Schema validation error" msgstr "Schemavalideringsfel" #: src/hed/daemon/unix/main_unix.cpp:368 msgid "Configuration root element is not " msgstr "Inställningarnas rotelement är inte " #: src/hed/daemon/unix/main_unix.cpp:384 #, c-format msgid "Cannot switch to group (%s)" msgstr "Kan inte byta till grupp (%s)" #: src/hed/daemon/unix/main_unix.cpp:394 #, c-format msgid "Cannot switch to primary group for user (%s)" msgstr "Kan inte byta till primär grupp för användare (%s)" #: src/hed/daemon/unix/main_unix.cpp:399 #, c-format msgid "Cannot switch to user (%s)" msgstr "Kan inte byta till användare (%s)" #: src/hed/daemon/unix/main_unix.cpp:417 msgid "Failed to load service side MCCs" msgstr "Misslyckades med att ladda in tjänstesidans MCCer" #: src/hed/daemon/unix/main_unix.cpp:419 src/tests/count/test_service.cpp:29 #: src/tests/echo/test.cpp:30 src/tests/echo/test_service.cpp:29 msgid "Service side MCCs are loaded" msgstr "Tjänstesidans MCCer har laddats in" #: src/hed/daemon/unix/main_unix.cpp:426 msgid "Unexpected arguments supplied" msgstr "Oväntat argument tillhandahÃ¥llet" #: src/hed/dmc/acix/DataPointACIX.cpp:93 src/hed/dmc/acix/DataPointACIX.cpp:342 #: src/hed/dmc/rucio/DataPointRucio.cpp:220 #: src/hed/dmc/rucio/DataPointRucio.cpp:462 #, c-format msgid "No locations found for %s" msgstr "Inga platser hittade för %s" #: src/hed/dmc/acix/DataPointACIX.cpp:121 #, c-format msgid "Found none or multiple URLs (%s) in ACIX URL: %s" msgstr "Hittade ingen eller mer än en URL (%s) i ACIX-URL: %s" #: src/hed/dmc/acix/DataPointACIX.cpp:131 #, c-format msgid "Cannot handle URL %s" msgstr "Kan inte hanter URL %s" #: src/hed/dmc/acix/DataPointACIX.cpp:138 #, c-format msgid "Could not resolve original source of %s: out of time" msgstr "Kunde inte slÃ¥ upp originalkällan för %s: tiden tog slut" #: src/hed/dmc/acix/DataPointACIX.cpp:144 #, c-format msgid "Could not resolve original source of %s: %s" msgstr "Kunde inte slÃ¥ upp originalkällan för %s: %s" #: src/hed/dmc/acix/DataPointACIX.cpp:160 #, c-format msgid "Querying ACIX server at %s" msgstr "FrÃ¥gar ACIX-server pÃ¥ %s" #: src/hed/dmc/acix/DataPointACIX.cpp:161 #, c-format msgid "Calling acix with query %s" msgstr "Anropar ACIX med förfrÃ¥gan %s" #: src/hed/dmc/acix/DataPointACIX.cpp:167 #, c-format msgid "Failed to query ACIX: %s" msgstr "Misslyckades med frÃ¥ga ACIX: %s" #: src/hed/dmc/acix/DataPointACIX.cpp:171 #: src/hed/dmc/acix/DataPointACIX.cpp:308 #, c-format msgid "Failed to parse ACIX response: %s" msgstr "Misslyckades med att tolka ACIX-svar: %s" #: src/hed/dmc/acix/DataPointACIX.cpp:298 #, c-format msgid "ACIX returned %s" msgstr "ACIX returnerade %s" #: src/hed/dmc/acix/DataPointACIX.cpp:319 #, c-format msgid "No locations for %s" msgstr "Inga platser för %s" #: src/hed/dmc/acix/DataPointACIX.cpp:325 #, c-format msgid "%s: ACIX Location: %s" msgstr "%s: ACIX-plats: %s" #: src/hed/dmc/acix/DataPointACIX.cpp:327 #, c-format msgid "%s: Location %s not accessible remotely, skipping" msgstr "%s: Plats %s kan inte kommas Ã¥t utifrÃ¥n, hoppar över" #: src/hed/dmc/file/DataPointFile.cpp:86 #, c-format msgid "Unknown channel %s for stdio protocol" msgstr "Okänd kanal %s för stdio-protokoll" #: src/hed/dmc/file/DataPointFile.cpp:93 #, c-format msgid "Failed to open stdio channel %s" msgstr "Misslyckades med att öppna stdio-kanal %s" #: src/hed/dmc/file/DataPointFile.cpp:94 #, c-format msgid "Failed to open stdio channel %d" msgstr "Misslyckades med att öppna stdio-kanal %d" #: src/hed/dmc/file/DataPointFile.cpp:334 #, c-format msgid "fsync of file %s failed: %s" msgstr "fsync för fil %s misslyckades: %s" #: src/hed/dmc/file/DataPointFile.cpp:338 #: src/hed/dmc/file/DataPointFile.cpp:345 #, c-format msgid "closing file %s failed: %s" msgstr "stängning av fil %s misslyckades: %s" #: src/hed/dmc/file/DataPointFile.cpp:364 #, c-format msgid "File is not accessible: %s" msgstr "Filen kan inte kommas Ã¥t: %s" #: src/hed/dmc/file/DataPointFile.cpp:370 #: src/hed/dmc/file/DataPointFile.cpp:455 #, c-format msgid "Can't stat file: %s: %s" msgstr "Kan inte göra stat pÃ¥ filen: %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:416 #: src/hed/dmc/file/DataPointFile.cpp:422 #, c-format msgid "Can't stat stdio channel %s" msgstr "Kan inte göra stat pÃ¥ stdio-kanal %s" #: src/hed/dmc/file/DataPointFile.cpp:470 #, c-format msgid "%s is not a directory" msgstr "%s är inte en katalog" #: src/hed/dmc/file/DataPointFile.cpp:485 src/hed/dmc/s3/DataPointS3.cpp:440 #: src/hed/dmc/s3/DataPointS3.cpp:550 #, c-format msgid "Failed to read object %s: %s" msgstr "Misslyckades med att läsa objekt: %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:498 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:517 #, c-format msgid "File is not accessible %s: %s" msgstr "Filen kan inte kommas Ã¥t %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:504 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:523 #, c-format msgid "Can't delete directory %s: %s" msgstr "Kan inte ta bort katalog: %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:511 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:530 #, c-format msgid "Can't delete file %s: %s" msgstr "Kan inte ta bort fil %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:521 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:335 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:313 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1466 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:545 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:565 #, c-format msgid "Creating directory %s" msgstr "Skapar katalog %s" #: src/hed/dmc/file/DataPointFile.cpp:529 src/hed/dmc/srm/DataPointSRM.cpp:171 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:579 #, c-format msgid "Renaming %s to %s" msgstr "Byter namn pÃ¥ %s till %s" #: src/hed/dmc/file/DataPointFile.cpp:531 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:588 #, c-format msgid "Can't rename file %s: %s" msgstr "Kan inte byta namn pÃ¥ fil %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:559 #, c-format msgid "Failed to open %s for reading: %s" msgstr "Misslyckades med att öppna %s för läsning: %s" #: src/hed/dmc/file/DataPointFile.cpp:574 #: src/hed/dmc/file/DataPointFile.cpp:709 #, c-format msgid "Failed to switch user id to %d/%d" msgstr "Misslyckades med att byta användar-id till %d/%d" #: src/hed/dmc/file/DataPointFile.cpp:580 #, c-format msgid "Failed to create/open file %s: %s" msgstr "Misslyckades med att skapa/öppna fil %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:596 msgid "Failed to create thread" msgstr "Misslyckades med att skapa trÃ¥d" #: src/hed/dmc/file/DataPointFile.cpp:676 #, c-format msgid "Invalid url: %s" msgstr "Ogiltig URL: %s" #: src/hed/dmc/file/DataPointFile.cpp:685 src/hed/libs/data/FileCache.cpp:482 #, c-format msgid "Failed to create directory %s: %s" msgstr "Misslyckades med att skapa katalog %s: %s\"" #: src/hed/dmc/file/DataPointFile.cpp:698 #: src/hed/dmc/file/DataPointFile.cpp:717 #, c-format msgid "Failed to create file %s: %s" msgstr "Misslyckades med att skapa fil %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:729 #, c-format msgid "setting file %s to size %llu" msgstr "Sätter fil %s till storlek %llu" #: src/hed/dmc/file/DataPointFile.cpp:749 #, c-format msgid "Failed to preallocate space for %s" msgstr "Misslyckades med förallokera utrymme for %s" #: src/hed/dmc/file/DataPointFile.cpp:790 src/hed/libs/data/FileCache.cpp:856 #, c-format msgid "Failed to clean up file %s: %s" msgstr "Misslyckades med att ta bort fil %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:799 #, c-format msgid "Error during file validation. Can't stat file %s: %s" msgstr "Fel under filvalidering. Kan inte göra stat pÃ¥ fil %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:803 #, c-format msgid "" "Error during file validation: Local file size %llu does not match source " "file size %llu for file %s" msgstr "" "Fel under filvalidering. Lokal filstorlek %llu stämmer inte överens med " "källans filstorlek %llu för fil %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:53 #, c-format msgid "Using proxy %s" msgstr "Använder proxy %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:54 #, c-format msgid "Using key %s" msgstr "Använder nyckel %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:55 #, c-format msgid "Using cert %s" msgstr "Använder certifikat %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:113 msgid "Locations are missing in destination LFC URL" msgstr "Platser saknas i destinations-LFC-URL" #: src/hed/dmc/gfal/DataPointGFAL.cpp:119 #, c-format msgid "Duplicate replica found in LFC: %s" msgstr "Duplicerad replika hittad i LFC: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:121 #, c-format msgid "Adding location: %s - %s" msgstr "Lägger till plats: %s - %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:129 #: src/hed/libs/data/DataPointIndex.cpp:161 #, c-format msgid "Add location: url: %s" msgstr "Lägg till plats: url: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:130 #: src/hed/libs/data/DataPointIndex.cpp:162 #, c-format msgid "Add location: metadata: %s" msgstr "Lägg till plats: metadata: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:150 #: src/hed/dmc/gfal/DataPointGFAL.cpp:310 #, c-format msgid "gfal_open failed: %s" msgstr "gfal_open misslyckades: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:163 #: src/hed/dmc/gfal/DataPointGFAL.cpp:223 #: src/hed/dmc/gfal/DataPointGFAL.cpp:249 #: src/hed/dmc/gfal/DataPointGFAL.cpp:324 #: src/hed/dmc/gfal/DataPointGFAL.cpp:403 #: src/hed/dmc/gfal/DataPointGFAL.cpp:430 #, c-format msgid "gfal_close failed: %s" msgstr "gfal_close misslyckades: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:195 #, c-format msgid "gfal_read failed: %s" msgstr "gfal_read misslyckades; %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:237 msgid "StopReading starts waiting for transfer_condition." msgstr "StopReading börjar vänta pÃ¥ transfer_condition." #: src/hed/dmc/gfal/DataPointGFAL.cpp:239 msgid "StopReading finished waiting for transfer_condition." msgstr "StopReading slutar vänta pÃ¥ transfer_condition." #: src/hed/dmc/gfal/DataPointGFAL.cpp:271 #: src/libs/data-staging/DataDeliveryLocalComm.cpp:68 #: src/libs/data-staging/DataDeliveryLocalComm.cpp:73 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:42 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:47 #, c-format msgid "No locations defined for %s" msgstr "Inga platser definierade för %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:278 #, c-format msgid "Failed to set LFC replicas: %s" msgstr "Misslyckades med att sätta in LFC-replika: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:304 #, c-format msgid "gfal_mkdir failed (%s), trying to write anyway" msgstr "gfal_mkdir misslyckades (%s), försöker skriva ändÃ¥" #: src/hed/dmc/gfal/DataPointGFAL.cpp:359 #, c-format msgid "DataPointGFAL::write_file got position %d and offset %d, has to seek" msgstr "" "DataPointGFAL::write_file fick position %d och offset %d, mÃ¥ste göra seek" #: src/hed/dmc/gfal/DataPointGFAL.cpp:388 #, c-format msgid "gfal_write failed: %s" msgstr "gfal_write misslyckades: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:418 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:388 msgid "StopWriting starts waiting for transfer_condition." msgstr "StopWriting börjar vänta pÃ¥ transfer_condition." #: src/hed/dmc/gfal/DataPointGFAL.cpp:420 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:390 msgid "StopWriting finished waiting for transfer_condition." msgstr "StopWriting slutar vänta pÃ¥ transfer_condition." #: src/hed/dmc/gfal/DataPointGFAL.cpp:451 #, c-format msgid "gfal_stat failed: %s" msgstr "gfal_stat misslyckades: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:496 #, c-format msgid "gfal_listxattr failed, no replica information can be obtained: %s" msgstr "" "gfal_listxattr misslyckades, ingen replika-information kan erhÃ¥llas: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:537 #, c-format msgid "gfal_opendir failed: %s" msgstr "gfal_opendir misslyckades: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:549 #, c-format msgid "List will stat the URL %s" msgstr "List kommer att göra stat pÃ¥ URL %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:556 #, c-format msgid "gfal_closedir failed: %s" msgstr "gfal_closedir misslyckades: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:584 #, c-format msgid "gfal_rmdir failed: %s" msgstr "gfal_rmdir misslyckades: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:587 #, c-format msgid "gfal_unlink failed: %s" msgstr "gfal_unlink misslyckades: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:604 #, c-format msgid "gfal_mkdir failed: %s" msgstr "gfal_mkdir misslyckades: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:619 #, c-format msgid "gfal_rename failed: %s" msgstr "gfal_rename misslyckades: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:19 #, c-format msgid "Failed to obtain bytes transferred: %s" msgstr "Misslyckades med att erhÃ¥lla antal överförda byte: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:42 #, c-format msgid "Failed to get initiate GFAL2 parameter handle: %s" msgstr "Misslyckades med att initiera GFAL2-parameter-handtag: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:49 #, c-format msgid "Failed to get initiate new GFAL2 context: %s" msgstr "Misslyckades med att initiera ny GFAL2-kontext: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:56 #, c-format msgid "Failed to set GFAL2 monitor callback: %s" msgstr "Misslyckades med att sätta GFAL2-monitor-callback: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:64 #, c-format msgid "Failed to set overwrite option in GFAL2: %s" msgstr "Misslyckades med att sätta skriv-över-option i GFAL2: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:72 #, c-format msgid "Failed to set GFAL2 transfer timeout, will use default: %s" msgstr "" "Misslyckades med att sätta GFAL2-överförings-timeout, använder förval: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:84 msgid "Transfer failed" msgstr "Överföring misslyckades" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:92 msgid "Transfer succeeded" msgstr "Överföring lyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:38 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:54 msgid "ftp_complete_callback: success" msgstr "ftp_complete_callback: OK" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:44 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:60 #, c-format msgid "ftp_complete_callback: error: %s" msgstr "ftp_complete_callback: fel: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:60 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:76 msgid "ftp_check_callback" msgstr "ftp_check_callback" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:62 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:90 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:116 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:135 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:305 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:340 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:678 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:847 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:879 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:913 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1052 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1104 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1114 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1122 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1130 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1138 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1144 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:78 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:106 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:283 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:319 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:729 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:762 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:799 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:930 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:994 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1004 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1012 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1020 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1028 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1034 #: src/services/gridftpd/commands.cpp:1226 #: src/services/gridftpd/dataread.cpp:76 src/services/gridftpd/dataread.cpp:173 #: src/services/gridftpd/datawrite.cpp:59 #: src/services/gridftpd/datawrite.cpp:146 #, c-format msgid "Globus error: %s" msgstr "Globusfel: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:73 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:89 msgid "Excessive data received while checking file access" msgstr "För mycket data mottaget när filÃ¥tkomst kontrollerades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:89 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:105 msgid "Registration of Globus FTP buffer failed - cancel check" msgstr "Registrering av Globus-FTP-buffer misslyckades - avbryter kontroll" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:115 msgid "check_ftp: globus_ftp_client_size failed" msgstr "check_ftp: globus_ftp_client_size misslyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:119 msgid "check_ftp: timeout waiting for size" msgstr "check_ftp: timeout vid väntan pÃ¥ storlek" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:124 msgid "check_ftp: failed to get file's size" msgstr "check_ftp: misslyckades med att erhÃ¥lla filens storlek" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:127 #, c-format msgid "check_ftp: obtained size: %lli" msgstr "check_ftp: erhÃ¥llen storlek: %lli" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:134 msgid "check_ftp: globus_ftp_client_modification_time failed" msgstr "check_ftp: globus_ftp_client_modification_time misslyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:138 msgid "check_ftp: timeout waiting for modification_time" msgstr "check_ftp: timeout vid väntan pÃ¥ ändringstid" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:143 msgid "check_ftp: failed to get file's modification time" msgstr "check_ftp: misslyckades med att erhÃ¥lla filens ändringstid" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:148 #, c-format msgid "check_ftp: obtained modification date: %s" msgstr "check_ftp: erhÃ¥llen ändringstid: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:167 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:145 msgid "check_ftp: globus_ftp_client_get failed" msgstr "check_ftp: globus_ftp_client_get misslyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:174 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:152 msgid "check_ftp: globus_ftp_client_register_read" msgstr "check_ftp: globus_ftp_client_register_read" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:185 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:164 msgid "check_ftp: timeout waiting for partial get" msgstr "check_ftp: timeout vid väntan pÃ¥ partiell get" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:215 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:191 #, c-format msgid "File delete failed, attempting directory delete for %s" msgstr "" "Borttagning av fil misslyckades, försöker med borttagande av katalog för %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:225 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:202 msgid "delete_ftp: globus_ftp_client_delete failed" msgstr "delete_ftp: globus_ftp_client_delete misslyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:231 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:252 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:208 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:230 msgid "delete_ftp: timeout waiting for delete" msgstr "delete_ftp: timeout vid väntan pÃ¥ borttagande" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:246 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:224 msgid "delete_ftp: globus_ftp_client_rmdir failed" msgstr "delete_ftp: globus_ftp_client_rmdir misslyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:301 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:278 #, c-format msgid "mkdir_ftp: making %s" msgstr "mkdir_ftp: skapar %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:309 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:287 msgid "mkdir_ftp: timeout waiting for mkdir" msgstr "mkdir_ftp: timeout vid väntan pÃ¥ mkdir" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:344 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:323 msgid "Timeout waiting for mkdir" msgstr "Timeout vid väntan pÃ¥ mkdir" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:370 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:346 msgid "start_reading_ftp" msgstr "start_reading_ftp" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:374 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:350 msgid "start_reading_ftp: globus_ftp_client_get" msgstr "start_reading_ftp: globus_ftp_client_get" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:388 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:364 msgid "start_reading_ftp: globus_ftp_client_get failed" msgstr "start_reading_ftp: globus_ftp_client_get misslyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:399 msgid "start_reading_ftp: globus_thread_create failed" msgstr "start_reading_ftp: globus_thread_create misslyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:418 msgid "stop_reading_ftp: aborting connection" msgstr "stop_reading_ftp: avbryter förbindelse" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:425 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:647 #, c-format msgid "Failed to abort transfer of ftp file: %s" msgstr "Misslyckades med att avbryta överföring av ftp-fil: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:426 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:648 msgid "Assuming transfer is already aborted or failed." msgstr "Antar att överföring redan är avbruten eller misslyckad." #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:433 msgid "stop_reading_ftp: waiting for transfer to finish" msgstr "stop_reading_ftp: väntar pÃ¥ att överföring ska avslutas" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:435 #, c-format msgid "stop_reading_ftp: exiting: %s" msgstr "stop_reading_ftp: avslutar: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:449 msgid "ftp_read_thread: get and register buffers" msgstr "ftp_read_thread: erhÃ¥ll och registrera buffrar" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:455 #, c-format msgid "ftp_read_thread: for_read failed - aborting: %s" msgstr "ftp_read_thread: for_read misslyckades - avbryter: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:465 #, c-format msgid "ftp_read_thread: data callback failed - aborting: %s" msgstr "ftp_read_thread: data-callback misslyckades - avbryter: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:477 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:380 #, c-format msgid "ftp_read_thread: Globus error: %s" msgstr "ftp_read_thread: Globusfel: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:490 #, c-format msgid "ftp_read_thread: too many registration failures - abort: %s" msgstr "ftp_read_thread: för mÃ¥nga registreringsfel - avbryter: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:495 #, c-format msgid "ftp_read_thread: failed to register Globus buffer - will try later: %s" msgstr "" "ftp_read_thread: misslyckades med att registrera globusbuffer - kommer att " "prova senare: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:508 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:396 msgid "ftp_read_thread: waiting for eof" msgstr "ftp_read_thread: väntar pÃ¥ filslut" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:512 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:400 msgid "ftp_read_thread: waiting for buffers released" msgstr "ftp_read_thread: väntar pÃ¥ att buffrar ska frigöras" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:516 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:408 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:662 msgid "ftp_read_thread: failed to release buffers - leaking" msgstr "ftp_read_thread: misslyckades med att frigöra buffrar - läcker" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:521 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:415 msgid "ftp_read_thread: exiting" msgstr "ftp_read_thread: avslutar" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:539 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:436 #, c-format msgid "ftp_read_callback: failure: %s" msgstr "ftp_read_callback: misslyckande: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:542 msgid "ftp_read_callback: success" msgstr "ftp_read_callback: OK" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:558 msgid "Failed to get ftp file" msgstr "Misslyckades med att hämta ftp-fil" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:594 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:543 msgid "start_writing_ftp: mkdir" msgstr "start_writing_ftp: mkdir" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:597 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:545 msgid "start_writing_ftp: mkdir failed - still trying to write" msgstr "start_writing_ftp: mkdir misslyckades - försöker fortfarande skriva" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:599 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:547 msgid "start_writing_ftp: put" msgstr "start_writing_ftp: put" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:613 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:561 msgid "start_writing_ftp: put failed" msgstr "start_writing_ftp: put misslyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:623 msgid "start_writing_ftp: globus_thread_create failed" msgstr "start_writing_ftp: globus_thread_create misslyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:640 #: src/hed/libs/data/DataPointDelegate.cpp:307 msgid "StopWriting: aborting connection" msgstr "StopWriting: avbryter förbindelse" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:664 #: src/hed/libs/data/DataPointDelegate.cpp:321 #, c-format msgid "StopWriting: Calculated checksum %s" msgstr "StopWriting: Beräkna checksumma %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:668 #: src/hed/libs/data/DataPointDelegate.cpp:325 #, c-format msgid "StopWriting: looking for checksum of %s" msgstr "StopWriting: letar efter för checksumma för %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:677 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:912 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:798 msgid "list_files_ftp: globus_ftp_client_cksm failed" msgstr "list_files_ftp: globus_ftp_client_cksm misslyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:681 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:916 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:802 msgid "list_files_ftp: timeout waiting for cksum" msgstr "list_files_ftp: timeout vid väntan pÃ¥ cksum" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:688 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:923 msgid "list_files_ftp: no checksum information possible" msgstr "list_files_ftp: information om checksumma inte möjlig" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:691 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:926 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:815 #, c-format msgid "list_files_ftp: checksum %s" msgstr "list_files_ftp: checksumma %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:694 #: src/hed/libs/data/DataPointDelegate.cpp:332 msgid "" "Checksum type returned by server is different to requested type, cannot " "compare" msgstr "" "Typ av checksumma som returnerades av servern skiljer sig frÃ¥n den begärda " "typen, kan inte jämföra" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:696 #: src/hed/libs/data/DataPointDelegate.cpp:334 #, c-format msgid "Calculated checksum %s matches checksum reported by server" msgstr "" "Beräknad checksumma %s stämmer överens med checksumma rapporterad av servern" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:699 #: src/hed/libs/data/DataPointDelegate.cpp:337 #, c-format msgid "" "Checksum mismatch between calculated checksum %s and checksum reported by " "server %s" msgstr "" "Beräknad checksumma %s stämmer inte överens med checksumma rapporterad av " "servern %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:721 msgid "ftp_write_thread: get and register buffers" msgstr "ftp_write_thread: erhÃ¥ll och registrera buffrar" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:725 msgid "ftp_write_thread: for_write failed - aborting" msgstr "ftp_write_thread: for_write misslyckades - avbryter" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:743 msgid "ftp_write_thread: data callback failed - aborting" msgstr "ftp_write_thread: data-callback misslyckades - avbryter" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:759 msgid "ftp_write_thread: waiting for eof" msgstr "ftp_write_thread: väntar pÃ¥ filslut" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:763 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:660 msgid "ftp_write_thread: waiting for buffers released" msgstr "ftp_read_thread: väntar pÃ¥ att buffrar ska frigöras" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:771 msgid "ftp_write_thread: failed to release buffers - leaking" msgstr "ftp_write_thread: misslyckades med att frigöra buffrar - läcker" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:776 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:667 msgid "ftp_write_thread: exiting" msgstr "ftp_write_thread: avslutar" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:799 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:686 #, c-format msgid "ftp_write_callback: failure: %s" msgstr "ftp_write_callback: misslyckande: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:802 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:688 #, c-format msgid "ftp_write_callback: success %s" msgstr "ftp_write_callback: OK %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:817 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:704 msgid "Failed to store ftp file" msgstr "Misslyckades med att spara ftp-fil" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:825 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:709 msgid "ftp_put_complete_callback: success" msgstr "ftp_put_complete_callback: OK" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:841 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:723 #, c-format msgid "list_files_ftp: looking for size of %s" msgstr "list_files_ftp: söker efter storlek pÃ¥ %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:845 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:727 msgid "list_files_ftp: globus_ftp_client_size failed" msgstr "list_files_ftp: globus_ftp_client_size misslyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:851 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:852 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:733 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:734 msgid "list_files_ftp: timeout waiting for size" msgstr "list_files_ftp: timeout vid väntan pÃ¥ storlek" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:858 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:740 msgid "list_files_ftp: failed to get file's size" msgstr "list_files_ftp: misslyckades med att erhÃ¥lla filens storlek" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:870 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:753 #, c-format msgid "list_files_ftp: looking for modification time of %s" msgstr "list_files_ftp: söker efter ändringstid för %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:876 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:759 msgid "list_files_ftp: globus_ftp_client_modification_time failed" msgstr "list_files_ftp: globus_ftp_client_modification_time misslyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:883 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:766 msgid "list_files_ftp: timeout waiting for modification_time" msgstr "list_files_ftp: timeout vid väntan pÃ¥ ändringstid" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:891 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:774 msgid "list_files_ftp: failed to get file's modification time" msgstr "list_files_ftp: misslyckades med att erhÃ¥lla filens ändringstid" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:903 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:788 #, c-format msgid "list_files_ftp: looking for checksum of %s" msgstr "list_files_ftp: söker efter checksumma pÃ¥ %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:942 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:828 #, c-format msgid "Failed to obtain stat from FTP: %s" msgstr "Misslyckades med att erhÃ¥lla stat frÃ¥n FTP: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:948 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:833 msgid "No results returned from stat" msgstr "Inga resultat returnerade frÃ¥n stat" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:954 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:839 #, c-format msgid "Wrong number of objects (%i) for stat from ftp: %s" msgstr "Fel antal objekt (%i) för stat frÃ¥n ftp: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:968 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:852 #, c-format msgid "Unexpected path %s returned from server" msgstr "Oväntad sökväg %s returnerad frÃ¥n server" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1007 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:885 #, c-format msgid "Failed to obtain listing from FTP: %s" msgstr "Misslyckades med att erhÃ¥lla listning frÃ¥n FTP: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1050 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:928 msgid "Rename: globus_ftp_client_move failed" msgstr "Rename: globus_ftp_client_move misslyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1056 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:934 msgid "Rename: timeout waiting for operation to complete" msgstr "Rename: timeout vid väntan pÃ¥ att operationen ska slutföras" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1103 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:993 msgid "init_handle: globus_ftp_client_handleattr_init failed" msgstr "init_handle: globus_ftp_client_handleattr_init misslyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1112 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1002 msgid "init_handle: globus_ftp_client_handleattr_set_gridftp2 failed" msgstr "init_handle: globus_ftp_client_handleattr_set_gridftp2 misslyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1121 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1011 msgid "init_handle: globus_ftp_client_handle_init failed" msgstr "init_handle: globus_ftp_client_handle_init misslyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1128 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1018 msgid "init_handle: globus_ftp_client_operationattr_init failed" msgstr "init_handle: globus_ftp_client_operationattr_init misslyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1136 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1026 msgid "init_handle: globus_ftp_client_operationattr_set_allow_ipv6 failed" msgstr "" "init_handle: globus_ftp_client_operationattr_set_allow_ipv6 misslyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1142 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1032 msgid "init_handle: globus_ftp_client_operationattr_set_delayed_pasv failed" msgstr "" "init_handle: globus_ftp_client_operationattr_set_delayed_pasv misslyckades" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1190 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1218 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1084 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1113 #, c-format msgid "globus_ftp_client_operationattr_set_authorization: error: %s" msgstr "globus_ftp_client_operationattr_set_authorization: fel: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1217 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1112 msgid "Failed to set credentials for GridFTP transfer" msgstr "Misslyckades med att sätta referenser för GridFTP-överföring" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1223 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1118 msgid "Using secure data transfer" msgstr "Använder säker dataöverföring" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1228 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1123 msgid "Using insecure data transfer" msgstr "Använder osäker dataöverföring" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1255 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1150 msgid "~DataPoint: destroy ftp_handle" msgstr "~DataPoint: förstör ftp_handle" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1258 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1153 msgid "~DataPoint: destroy ftp_handle failed - retrying" msgstr "~DataPoint: förstör ftp_handle misslyckades - försöker igen" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1276 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1171 msgid "~DataPoint: failed to destroy ftp_handle - leaking" msgstr "~DataPoint: misslyckades med att förstöra ftp_handle - läcker" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1296 msgid "" "Missing reference to factory and/or module. It is unsafe to use Globus in " "non-persistent mode - (Grid)FTP code is disabled. Report to developers." msgstr "" "Saknar referens till fabrik och/eller modul. Det är osäkert att använda " "Globus i icke-persistent mode - (Grid)FTP-koden är deaktiverad. Rapportera " "till utvecklare." #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:388 msgid "ftp_read_thread: failed to register buffers" msgstr "ftp_read_thread: misslyckades med att registrera buffrar" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:403 msgid "ftp_read_thread: failed to release buffers" msgstr "ftp_read_thread: misslyckades med att frigöra buffrar" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:441 #, c-format msgid "ftp_read_callback: success - offset=%u, length=%u, eof=%u, allow oof=%u" msgstr "" "ftp_read_callback: lyckades - offset=%u, längd=%u, eof=%u, tillÃ¥t oof=%u" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:455 #, c-format msgid "ftp_read_callback: delayed data chunk: %llu %llu" msgstr "ftp_read_callback: fördröjd data-chunk: %llu %llu" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:462 #, c-format msgid "ftp_read_callback: unexpected data out of order: %llu != %llu" msgstr "ftp_read_callback: oväntad data i oordning: %llu != %llu" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:469 msgid "ftp_read_callback: too many unexpected out of order chunks" msgstr "ftp_read_callback: för mÃ¥nga oväntade chunks i oordning" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:490 #, c-format msgid "ftp_read_callback: Globus error: %s" msgstr "ftp_read_callback: Globusfel: %s" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:515 msgid "ftp_get_complete_callback: Failed to get ftp file" msgstr "ftp_get_complete_callback: Misslyckades med att hämta ftp-fil" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:520 msgid "ftp_get_complete_callback: success" msgstr "ftp_get_complete_callback: OK" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:575 msgid "start_writing_ftp: waiting for data tag" msgstr "start_writing_ftp: väntar pÃ¥ data-tagg" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:578 msgid "start_writing_ftp: failed to read data tag" msgstr "start_writing_ftp: misslyckades med att läsa data-tagg" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:583 msgid "start_writing_ftp: waiting for data chunk" msgstr "start_writing_ftp: väntar pÃ¥ data-chunk" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:585 msgid "start_writing_ftp: failed to read data chunk" msgstr "start_writing_ftp: misslyckades med att läsa data-chunk" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:596 #, c-format msgid "ftp_write_thread: data out of order in stream mode: %llu != %llu" msgstr "ftp_write_thread: data i oordning i strömningsläge: %llu != %llu" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:603 msgid "ftp_write_thread: too many out of order chunks in stream mode" msgstr "ftp_write_thread: för mÃ¥nga chunks i oordning i strömningsläge" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:608 #, c-format msgid "start_writing_ftp: data chunk: %llu %llu" msgstr "start_writing_ftp: data-chunk: %llu %llu" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:614 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:640 #, c-format msgid "ftp_write_thread: Globus error: %s" msgstr "ftp_write_thread: Globusfel: %s" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:633 #, c-format msgid "start_writing_ftp: delayed data chunk: %llu %llu" msgstr "start_writing_ftp: fördröjd data-chunk: %llu %llu" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:652 msgid "start_writing_ftp: waiting for some buffers sent" msgstr "start_writing_ftp: väntar pÃ¥ nÃ¥gra buffrar som skickats" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:658 msgid "ftp_write_thread: waiting for transfer complete" msgstr "ftp_write_thread: väntar pÃ¥ slutförd överföring" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:809 msgid "list_files_ftp: no checksum information supported" msgstr "list_files_ftp: information om checksumma stöds inte" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:811 msgid "list_files_ftp: no checksum information returned" msgstr "list_files_ftp: information om checksumma returnerades inte" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:906 msgid "Too many failures to obtain checksum - giving up" msgstr "För mÃ¥nga fel för att erhÃ¥lla checksumma - ger up" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1266 msgid "Expecting Command and URL provided" msgstr "Saknar kommando och URL" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1273 #: src/hed/libs/data/DataExternalHelper.cpp:376 msgid "Expecting Command among arguments" msgstr "Saknar kommando bland argumenten" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1277 #: src/hed/libs/data/DataExternalHelper.cpp:380 msgid "Expecting URL among arguments" msgstr "Saknar URL bland argumenten" #: src/hed/dmc/gridftp/Lister.cpp:221 src/hed/dmc/gridftp/Lister.cpp:289 #: src/hed/dmc/gridftp/Lister.cpp:384 src/hed/dmc/gridftp/Lister.cpp:767 #: src/hed/dmc/gridftp/Lister.cpp:812 #, c-format msgid "Failure: %s" msgstr "Misslyckande: %s" #: src/hed/dmc/gridftp/Lister.cpp:288 msgid "Error getting list of files (in list)" msgstr "Fel vid erhÃ¥llande av fillista (i list)" #: src/hed/dmc/gridftp/Lister.cpp:290 msgid "Assuming - file not found" msgstr "Antar - hittade inte filen" #: src/hed/dmc/gridftp/Lister.cpp:307 #, c-format msgid "list record: %s" msgstr "listpost: %s" #: src/hed/dmc/gridftp/Lister.cpp:362 msgid "Failed reading list of files" msgstr "Misslyckades med att läsa fillista" #: src/hed/dmc/gridftp/Lister.cpp:398 msgid "Failed reading data" msgstr "Misslyckades med att läsa data" #: src/hed/dmc/gridftp/Lister.cpp:426 #, c-format msgid "Command: %s" msgstr "Kommando: %s" #: src/hed/dmc/gridftp/Lister.cpp:430 src/hed/dmc/gridftp/Lister.cpp:471 #: src/hed/mcc/http/PayloadHTTP.cpp:990 msgid "Memory allocation error" msgstr "Minnesallokeringsfel" #: src/hed/dmc/gridftp/Lister.cpp:438 #, c-format msgid "%s failed" msgstr "%s misslyckades" #: src/hed/dmc/gridftp/Lister.cpp:442 msgid "Command is being sent" msgstr "Kommande sänds" #: src/hed/dmc/gridftp/Lister.cpp:447 msgid "Waiting for response" msgstr "Väntar pÃ¥ svar" #: src/hed/dmc/gridftp/Lister.cpp:452 msgid "Callback got failure" msgstr "Callback erhöll misslyckande" #: src/hed/dmc/gridftp/Lister.cpp:538 msgid "Failed in globus_cond_init" msgstr "Misslyckades i globus_cond_init" #: src/hed/dmc/gridftp/Lister.cpp:542 msgid "Failed in globus_mutex_init" msgstr "Misslyckades i globus_mutex_init" #: src/hed/dmc/gridftp/Lister.cpp:549 msgid "Failed allocating memory for handle" msgstr "Misslyckades med att allokera minne för handtag" #: src/hed/dmc/gridftp/Lister.cpp:554 msgid "Failed in globus_ftp_control_handle_init" msgstr "Misslyckades i globus_ftp_control_handle_init" #: src/hed/dmc/gridftp/Lister.cpp:562 msgid "Failed to enable IPv6" msgstr "Misslyckades med att aktivera IPv6" #: src/hed/dmc/gridftp/Lister.cpp:573 src/services/gridftpd/commands.cpp:984 msgid "Closing connection" msgstr "Stänger förbindelse" #: src/hed/dmc/gridftp/Lister.cpp:580 src/hed/dmc/gridftp/Lister.cpp:595 msgid "Timeout waiting for Globus callback - leaking connection" msgstr "Timeout vid väntan pÃ¥ Globus callback - läcker förbindelse" #: src/hed/dmc/gridftp/Lister.cpp:605 msgid "Closed successfully" msgstr "Stängdes framgÃ¥ngsrikt" #: src/hed/dmc/gridftp/Lister.cpp:607 msgid "Closing may have failed" msgstr "Stängning kan ha misslyckats" #: src/hed/dmc/gridftp/Lister.cpp:634 msgid "Waiting for globus handle to settle" msgstr "Väntar pÃ¥ att globus-handtag ska lugna ned sig" #: src/hed/dmc/gridftp/Lister.cpp:639 #, c-format msgid "Handle is not in proper state %u/%u" msgstr "Handtag är i felaktigt tillstÃ¥nd %u/%u" #: src/hed/dmc/gridftp/Lister.cpp:645 msgid "Globus handle is stuck" msgstr "Globus-handtag har fastnat" #: src/hed/dmc/gridftp/Lister.cpp:661 #, c-format msgid "Failed destroying handle: %s. Can't handle such situation." msgstr "" "Misslyckades med att förstöra handtag: %s. Kan inte hantera en sÃ¥dan " "situation." #: src/hed/dmc/gridftp/Lister.cpp:684 #, c-format msgid "EPSV failed: %s" msgstr "EPSV misslyckades: %s" #: src/hed/dmc/gridftp/Lister.cpp:688 msgid "EPSV failed" msgstr "EPSV misslyckades" #: src/hed/dmc/gridftp/Lister.cpp:695 #, c-format msgid "PASV failed: %s" msgstr "PASV misslyckades: %s" #: src/hed/dmc/gridftp/Lister.cpp:699 msgid "PASV failed" msgstr "PASV misslyckades" #: src/hed/dmc/gridftp/Lister.cpp:765 msgid "Failed to apply local address to data connection" msgstr "Misslyckades med att tillämpa lokal adress pÃ¥ dataförbindelse" #: src/hed/dmc/gridftp/Lister.cpp:783 msgid "Can't parse host and/or port in response to EPSV/PASV" msgstr "Kan inte tolka värd och/eller port i EPSV/PASV-svar" #: src/hed/dmc/gridftp/Lister.cpp:788 #, c-format msgid "Data channel: %d.%d.%d.%d:%d" msgstr "Datakanal: %d.%d.%d.%d:%d" #: src/hed/dmc/gridftp/Lister.cpp:806 #, c-format msgid "Data channel: [%s]:%d" msgstr "Datakanal: [%s]:%d" #: src/hed/dmc/gridftp/Lister.cpp:810 msgid "Obtained host and address are not acceptable" msgstr "ErhÃ¥llen värd och adress kan inte accepteras" #: src/hed/dmc/gridftp/Lister.cpp:820 msgid "Failed to open data channel" msgstr "Misslyckades med att öppna datakanal" #: src/hed/dmc/gridftp/Lister.cpp:838 #, c-format msgid "Unsupported protocol in url %s" msgstr "Protokoll i url stöds inte %s" #: src/hed/dmc/gridftp/Lister.cpp:850 msgid "Reusing connection" msgstr "Ã…teranvänder förbindelse" #: src/hed/dmc/gridftp/Lister.cpp:874 #, c-format msgid "Failed connecting to server %s:%d" msgstr "Misslyckades med att ansluta till %s:%d" #: src/hed/dmc/gridftp/Lister.cpp:880 #, c-format msgid "Failed to connect to server %s:%d" msgstr "Misslyckades med att ansluta till %s:%d" #: src/hed/dmc/gridftp/Lister.cpp:896 msgid "Missing authentication information" msgstr "Saknad autentiseringsinformation" #: src/hed/dmc/gridftp/Lister.cpp:905 src/hed/dmc/gridftp/Lister.cpp:919 #, c-format msgid "Bad authentication information: %s" msgstr "Felaktig autentiseringsinformation: %s" #: src/hed/dmc/gridftp/Lister.cpp:928 src/hed/dmc/gridftp/Lister.cpp:943 #, c-format msgid "Failed authenticating: %s" msgstr "Misslyckades med autentisering: %s" #: src/hed/dmc/gridftp/Lister.cpp:935 msgid "Failed authenticating" msgstr "Misslyckades med autentisering" #: src/hed/dmc/gridftp/Lister.cpp:970 src/hed/dmc/gridftp/Lister.cpp:1126 #, c-format msgid "DCAU failed: %s" msgstr "DCAU misslyckades: %s" #: src/hed/dmc/gridftp/Lister.cpp:974 src/hed/dmc/gridftp/Lister.cpp:1131 msgid "DCAU failed" msgstr "DCAU misslyckades" #: src/hed/dmc/gridftp/Lister.cpp:994 msgid "MLST is not supported - trying LIST" msgstr "MLST stöds inte - försöker med LIST" #: src/hed/dmc/gridftp/Lister.cpp:1010 #, c-format msgid "Immediate completion expected: %s" msgstr "Omedelbart slutförande förväntas: %s" #: src/hed/dmc/gridftp/Lister.cpp:1014 msgid "Immediate completion expected" msgstr "Omedelbart slutförande förväntas" #: src/hed/dmc/gridftp/Lister.cpp:1027 #, c-format msgid "Missing information in reply: %s" msgstr "Saknad information i svar: %s" #: src/hed/dmc/gridftp/Lister.cpp:1061 #, c-format msgid "Missing final reply: %s" msgstr "Saknat sista svar: %s" #: src/hed/dmc/gridftp/Lister.cpp:1085 #, c-format msgid "Unexpected immediate completion: %s" msgstr "Oväntat omedelbart slutförande: %s" #: src/hed/dmc/gridftp/Lister.cpp:1097 #, c-format msgid "LIST/MLST failed: %s" msgstr "LIST/MLST misslyckades: %s" #: src/hed/dmc/gridftp/Lister.cpp:1102 msgid "LIST/MLST failed" msgstr "LIST/MLST misslyckades" #: src/hed/dmc/gridftp/Lister.cpp:1152 msgid "MLSD is not supported - trying NLST" msgstr "MSLD stöds inte - försöker med NLST" #: src/hed/dmc/gridftp/Lister.cpp:1166 #, c-format msgid "Immediate completion: %s" msgstr "Omedelbart färdigställande: %s" #: src/hed/dmc/gridftp/Lister.cpp:1174 #, c-format msgid "NLST/MLSD failed: %s" msgstr "NLST/MLSD misslyckades: %s" #: src/hed/dmc/gridftp/Lister.cpp:1180 msgid "NLST/MLSD failed" msgstr "NLST/MLSD misslyckades" #: src/hed/dmc/gridftp/Lister.cpp:1201 #, c-format msgid "Data transfer aborted: %s" msgstr "Dataöverföring avbruten: %s" #: src/hed/dmc/gridftp/Lister.cpp:1206 msgid "Data transfer aborted" msgstr "Dataöverföring avbruten" #: src/hed/dmc/gridftp/Lister.cpp:1218 msgid "Failed to transfer data" msgstr "Misslyckades med att överföra data" #: src/hed/dmc/http/DataPointHTTP.cpp:391 #: src/hed/dmc/http/DataPointHTTP.cpp:520 #: src/hed/dmc/http/DataPointHTTP.cpp:601 #: src/hed/dmc/http/DataPointHTTP.cpp:1003 #: src/hed/dmc/http/DataPointHTTP.cpp:1147 #: src/hed/dmc/http/DataPointHTTP.cpp:1292 #, c-format msgid "Redirecting to %s" msgstr "Omdirigerar till %s" #: src/hed/dmc/http/DataPointHTTP.cpp:673 #, c-format msgid "Stat: obtained size %llu" msgstr "Stat: erhÃ¥llen storlek: %llu" #: src/hed/dmc/http/DataPointHTTP.cpp:677 #, c-format msgid "Stat: obtained modification time %s" msgstr "Stat: erhÃ¥llen ändringstid %s" #: src/hed/dmc/http/DataPointHTTP.cpp:906 #, c-format msgid "Check: obtained size %llu" msgstr "Check: erhÃ¥llen storlek: %llu" #: src/hed/dmc/http/DataPointHTTP.cpp:908 #, c-format msgid "Check: obtained modification time %s" msgstr "Check: erhÃ¥llen ändringstid %s" #: src/hed/dmc/http/DataPointHTTP.cpp:1020 #: src/hed/dmc/http/DataPointHTTP.cpp:1167 #, c-format msgid "HTTP failure %u - %s" msgstr "HTTP-fel %u - %s" #: src/hed/dmc/ldap/DataPointLDAP.cpp:36 msgid "" "Missing reference to factory and/or module. Currently safe unloading of LDAP " "DMC is not supported. Report to developers." msgstr "" "Saknar referens till fabrik och/eller modul. För närvarande stöds inte säker " "utladdning av LDAP-DMC. Rapportera till utvecklare." #: src/hed/dmc/ldap/LDAPQuery.cpp:151 msgid "SASL Interaction" msgstr "SASL-växelverkan" #: src/hed/dmc/ldap/LDAPQuery.cpp:199 #, c-format msgid "Challenge: %s" msgstr "Utmaning: %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:203 #, c-format msgid "Default: %s" msgstr "Förval: %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:279 #, c-format msgid "LDAPQuery: Initializing connection to %s:%d" msgstr "LDAPQuery: Initierar förbindelse till %s:%d" #: src/hed/dmc/ldap/LDAPQuery.cpp:283 #, c-format msgid "LDAP connection already open to %s" msgstr "LDAP-förbindelse är redan öppen till %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:297 #, c-format msgid "Could not open LDAP connection to %s" msgstr "Kunde inte öppna LDAP-förbindelse till %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:318 #, c-format msgid "Failed to create ldap bind thread (%s)" msgstr "Misslyckades med att skapa trÃ¥d för ldap bind (%s)" #: src/hed/dmc/ldap/LDAPQuery.cpp:325 #, c-format msgid "Ldap bind timeout (%s)" msgstr "Ldap bind timeout (%s)" #: src/hed/dmc/ldap/LDAPQuery.cpp:332 #, c-format msgid "Failed to bind to ldap server (%s)" msgstr "Misslyckades med att binda till ldap-server: %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:353 #, c-format msgid "Could not set LDAP network timeout (%s)" msgstr "Kunde inte ange LDAP-nätverkstimeout (%s)" #: src/hed/dmc/ldap/LDAPQuery.cpp:361 #, c-format msgid "Could not set LDAP timelimit (%s)" msgstr "Kunde inte ange LDAP-tidsgräns (%s)" #: src/hed/dmc/ldap/LDAPQuery.cpp:368 #, c-format msgid "Could not set LDAP protocol version (%s)" msgstr "Kunde inte ange LDAP-protokollversion (%s)" #: src/hed/dmc/ldap/LDAPQuery.cpp:436 #, c-format msgid "LDAPQuery: Querying %s" msgstr "LDAPQuery: FrÃ¥gar %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:438 #, c-format msgid " base dn: %s" msgstr " bas-dn: %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:440 #, c-format msgid " filter: %s" msgstr " filter: %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:442 msgid " attributes:" msgstr " attribut:" #: src/hed/dmc/ldap/LDAPQuery.cpp:445 #: src/services/gridftpd/misc/ldapquery.cpp:399 #, c-format msgid " %s" msgstr " %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:482 src/hed/dmc/ldap/LDAPQuery.cpp:548 #, c-format msgid "%s (%s)" msgstr "%s (%s)" #: src/hed/dmc/ldap/LDAPQuery.cpp:506 #, c-format msgid "LDAPQuery: Getting results from %s" msgstr "LDAPQuery: ErhÃ¥ller resultat frÃ¥n %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:509 #, c-format msgid "Error: no LDAP query started to %s" msgstr "Fel: ingen LDAP-förfrÃ¥gan pÃ¥börjad till %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:543 #, c-format msgid "LDAP query timed out: %s" msgstr "LDAP-förfrÃ¥gan avbröts pÃ¥ grund av timeout: %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:23 #, c-format msgid "Replacing existing token for %s in Rucio token cache" msgstr "Byter ut existerande token för %s i Rucios token-cache" #: src/hed/dmc/rucio/DataPointRucio.cpp:36 #, c-format msgid "Found existing token for %s in Rucio token cache with expiry time %s" msgstr "" "Hittade existerande token för %s i Rucios token-cache vars giltighetstid gÃ¥r " "ut %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:39 #, c-format msgid "Rucio token for %s has expired or is about to expire" msgstr "Rucios token för %s har gÃ¥tt ut eller är pÃ¥ väg att gÃ¥ ut" #: src/hed/dmc/rucio/DataPointRucio.cpp:101 #, c-format msgid "Extracted nickname %s from credentials to use for RUCIO_ACCOUNT" msgstr "Extraherade smeknamn %s frÃ¥n referenser att använda som Rucio-konto" #: src/hed/dmc/rucio/DataPointRucio.cpp:104 msgid "Failed to extract VOMS nickname from proxy" msgstr "Misslyckades med att extrahera VOMS-smeknamn frÃ¥n proxy" #: src/hed/dmc/rucio/DataPointRucio.cpp:106 #, c-format msgid "Using Rucio account %s" msgstr "Använder Rucio-konto %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:146 #, c-format msgid "" "Bad path for %s: Rucio supports read/write at /objectstores and read-only " "at /replicas" msgstr "" "Felaktig sökväg för %s: Rucio stöder läsning/skrivning pÃ¥ /objectstores och " "endast läsning pÃ¥ /replicas" #: src/hed/dmc/rucio/DataPointRucio.cpp:162 #: src/services/candypond/CandyPond.cpp:140 #: src/services/candypond/CandyPond.cpp:347 #, c-format msgid "Can't handle URL %s" msgstr "Kan inte hantera URL %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:310 #, c-format msgid "Acquired auth token for %s: %s" msgstr "Erhöll autentiserings-token för %s: %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:367 #, c-format msgid "Rucio returned %s" msgstr "Rucio returnerade: %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:393 #, c-format msgid "Failed to parse Rucio response: %s" msgstr "Misslyckades med att tolka Rucio-svar: %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:399 #, c-format msgid "Filename not returned in Rucio response: %s" msgstr "Filnamn returnerades inte i Rucio-svar: %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:405 #, c-format msgid "Unexpected name returned in Rucio response: %s" msgstr "Oväntat namn returnerat i Rucio-svar: %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:411 #, c-format msgid "No pfns returned in Rucio response: %s" msgstr "Inga fysiska filnamn (PFN) returnerade i Rucio-svar: %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:422 #, c-format msgid "Cannot determine replica type for %s" msgstr "Kan inte bestämma replika-typ för %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:424 #, c-format msgid "%s: replica type %s" msgstr "%s: replika-typ %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:427 #, c-format msgid "Skipping %s replica %s" msgstr "Hoppar över %s replika %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:447 #, c-format msgid "No filesize information returned in Rucio response for %s" msgstr "Ingen information om filstorlek returnerad i Rucio-svar för %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:450 #, c-format msgid "%s: size %llu" msgstr "%s: storlek %llu" #: src/hed/dmc/rucio/DataPointRucio.cpp:454 #, c-format msgid "No checksum information returned in Rucio response for %s" msgstr "Ingen information om checksumma returnerad i Rucio-svar för %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:457 #, c-format msgid "%s: checksum %s" msgstr "%s: checksumma %s" #: src/hed/dmc/s3/DataPointS3.cpp:621 #, c-format msgid "Failed to write object %s: %s" msgstr "Misslyckades med att skriva objekt: %s: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:59 #, c-format msgid "TURL %s cannot be handled" msgstr "TURL %s kan inte hanteras" #: src/hed/dmc/srm/DataPointSRM.cpp:86 #, c-format msgid "Check: looking for metadata: %s" msgstr "Check: letar efter metadata: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:97 #, c-format msgid "Check: obtained size: %lli" msgstr "Check: erhÃ¥llen storlek: %lli" #: src/hed/dmc/srm/DataPointSRM.cpp:103 #, c-format msgid "Check: obtained checksum: %s" msgstr "Check: erhÃ¥llen checksumma: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:107 #, c-format msgid "Check: obtained modification date: %s" msgstr "Check: erhÃ¥llen ändringstid: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:111 msgid "Check: obtained access latency: low (ONLINE)" msgstr "Check: erhÃ¥llen Ã¥tkomst-latency: lÃ¥g (ONLINE)" #: src/hed/dmc/srm/DataPointSRM.cpp:115 msgid "Check: obtained access latency: high (NEARLINE)" msgstr "Check: erhÃ¥llen Ã¥tkomst-latency: hög (NEARLINE)" #: src/hed/dmc/srm/DataPointSRM.cpp:134 #, c-format msgid "Remove: deleting: %s" msgstr "Remove: tar bort: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:152 #, c-format msgid "Creating directory: %s" msgstr "Skapar katalog: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:200 src/hed/dmc/srm/DataPointSRM.cpp:249 msgid "Calling PrepareReading when request was already prepared!" msgstr "Anropar PrepareReading när begäran redan förberetts!" #: src/hed/dmc/srm/DataPointSRM.cpp:220 #, c-format msgid "File %s is NEARLINE, will make request to bring online" msgstr "Filen %s är NEARLINE, kommer att göra begäran att bringa online" #: src/hed/dmc/srm/DataPointSRM.cpp:229 #, c-format msgid "Bring online request %s is still in queue, should wait" msgstr "Begäran att bringa online %s är fortfarande i kö, ska vänta" #: src/hed/dmc/srm/DataPointSRM.cpp:234 #, c-format msgid "Bring online request %s finished successfully, file is now ONLINE" msgstr "" "Begäran att bringa online %s avslutades framgÃ¥ngsrikt, filen är nu ONLINE" #: src/hed/dmc/srm/DataPointSRM.cpp:240 #, c-format msgid "" "Bad logic for %s - bringOnline returned ok but SRM request is not finished " "successfully or on going" msgstr "" "DÃ¥lig logik för %s - bringOnline returnerade OK men SRM-begäran har inte " "avslutats framgÃ¥ngsrikt eller pÃ¥gÃ¥r" #: src/hed/dmc/srm/DataPointSRM.cpp:268 src/hed/dmc/srm/DataPointSRM.cpp:411 msgid "None of the requested transfer protocols are supported" msgstr "Inget av de begärda överföringsprotokollen stöds" #: src/hed/dmc/srm/DataPointSRM.cpp:281 #, c-format msgid "Get request %s is still in queue, should wait %i seconds" msgstr "Begäran att hämta %s är fortfarande i kö, ska vänta %i sekunder" #: src/hed/dmc/srm/DataPointSRM.cpp:289 src/hed/dmc/srm/DataPointSRM.cpp:468 #, c-format msgid "Checking URL returned by SRM: %s" msgstr "Kontrollerar URS returnerad av SRM: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:304 src/hed/dmc/srm/DataPointSRM.cpp:483 #, c-format msgid "SRM returned no useful Transfer URLs: %s" msgstr "SRM returnerade inga användbara överförings-URLer: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:311 #, c-format msgid "" "Bad logic for %s - getTURLs returned ok but SRM request is not finished " "successfully or on going" msgstr "" "DÃ¥lig logik för %s - getTURLs returnerade OK men SRM-begäran har inte " "avslutats framgÃ¥ngsrikt eller pÃ¥gÃ¥r" #: src/hed/dmc/srm/DataPointSRM.cpp:319 msgid "StartReading" msgstr "StartReading" #: src/hed/dmc/srm/DataPointSRM.cpp:321 msgid "StartReading: File was not prepared properly" msgstr "StartReading: Fil förbereddes inte pÃ¥ rätt sätt" #: src/hed/dmc/srm/DataPointSRM.cpp:331 src/hed/dmc/srm/DataPointSRM.cpp:510 #, c-format msgid "Redirecting to new URL: %s" msgstr "Omdirigerar till ny URL: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:392 msgid "Calling PrepareWriting when request was already prepared!" msgstr "Anropar PrepareWriting när begäran redan förberetts" #: src/hed/dmc/srm/DataPointSRM.cpp:421 msgid "No space token specified" msgstr "Inget spacetoken angivet" #: src/hed/dmc/srm/DataPointSRM.cpp:427 msgid "Warning: Using SRM protocol v1 which does not support space tokens" msgstr "Varning: Använder SRM-protokoll v1 som inte stöder spacetoken" #: src/hed/dmc/srm/DataPointSRM.cpp:430 #, c-format msgid "Using space token description %s" msgstr "Använder spacetokenbeskrivning: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:436 #, c-format msgid "Error looking up space tokens matching description %s" msgstr "Fel vid uppslagning av spacetoken som matchar beskrivning %s" #: src/hed/dmc/srm/DataPointSRM.cpp:440 #, c-format msgid "No space tokens found matching description %s" msgstr "Hittade inget spacetoken som matchar beskrivning %s" #: src/hed/dmc/srm/DataPointSRM.cpp:445 #, c-format msgid "Using space token %s" msgstr "Använder spacetoken: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:460 #, c-format msgid "Put request %s is still in queue, should wait %i seconds" msgstr "Begäran att spara %s är fortfarande i kö, ska vänta %i sekunder" #: src/hed/dmc/srm/DataPointSRM.cpp:490 #, c-format msgid "" "Bad logic for %s - putTURLs returned ok but SRM request is not finished " "successfully or on going" msgstr "" "DÃ¥lig logik för %s - putTURLs returnerade OK men SRM-begäran har inte " "avslutats framgÃ¥ngsrikt eller pÃ¥gÃ¥r" #: src/hed/dmc/srm/DataPointSRM.cpp:498 msgid "StartWriting" msgstr "StartWriting" #: src/hed/dmc/srm/DataPointSRM.cpp:500 msgid "StartWriting: File was not prepared properly" msgstr "StartWriting: Fil förbereddes inte pÃ¥ rätt sätt" #: src/hed/dmc/srm/DataPointSRM.cpp:559 #, c-format msgid "FinishWriting: looking for metadata: %s" msgstr "FinishWriting: letar efter metadata: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:574 #, c-format msgid "FinishWriting: obtained checksum: %s" msgstr "FinishWriting: erhÃ¥llen checksumma: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:577 #, c-format msgid "" "Calculated/supplied transfer checksum %s matches checksum reported by SRM " "destination %s" msgstr "" "Beräknad/tillhandahÃ¥llen överföringschecksumma %s stämmer överens med " "checksumma rapporterad av SRM-destinationen %s" #: src/hed/dmc/srm/DataPointSRM.cpp:580 #, c-format msgid "" "Checksum mismatch between calculated/supplied checksum (%s) and checksum " "reported by SRM destination (%s)" msgstr "" "Beräknad/tillhandahÃ¥llen överföringschecksumma %s stämmer inte överens med " "checksumma rapporterad av SRM-destinationen (%s)" #: src/hed/dmc/srm/DataPointSRM.cpp:583 #, c-format msgid "" "Checksum type of SRM (%s) and calculated/supplied checksum (%s) differ, " "cannot compare" msgstr "" "Typ av checksumma frÃ¥n SRM (%s) och beräknad/tillhandahÃ¥llen checksumma (%s) " "är olika, kan inte jämföra" #: src/hed/dmc/srm/DataPointSRM.cpp:584 src/hed/dmc/srm/DataPointSRM.cpp:585 msgid "No checksum information from server" msgstr "Ingen information om checksumma frÃ¥n server" #: src/hed/dmc/srm/DataPointSRM.cpp:586 src/hed/dmc/srm/DataPointSRM.cpp:587 msgid "No checksum verification possible" msgstr "Ingen verifiering av checksumma möjlig" #: src/hed/dmc/srm/DataPointSRM.cpp:593 msgid "Failed to release completed request" msgstr "Misslyckades med att frigöra slutförd begäran" #: src/hed/dmc/srm/DataPointSRM.cpp:636 src/hed/dmc/srm/DataPointSRM.cpp:703 #, c-format msgid "ListFiles: looking for metadata: %s" msgstr "ListFiles: letar efter metadata: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:821 #, c-format msgid "plugin for transport protocol %s is not installed" msgstr "plugin för överföringsprotokoll %s är inte installerad" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:51 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:90 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:142 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:181 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:221 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:259 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:303 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:365 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:438 msgid "SRM did not return any information" msgstr "SRM returnerade inte nÃ¥gon information" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:316 #, c-format msgid "File could not be moved to Running state: %s" msgstr "Fil kunde inte flyttas till tillstÃ¥nd Running: %s" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:372 msgid "SRM did not return any useful information" msgstr "SRM returnerade inte nÃ¥gon användbar information" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:450 msgid "File could not be moved to Done state" msgstr "Fil kunde inte flyttas till tillstÃ¥nd Done" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:88 msgid "Could not determine version of server" msgstr "Kunde inte bestämma servers version" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:94 #, c-format msgid "Server SRM version: %s" msgstr "Server-SRM-version: %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:99 #, c-format msgid "Server implementation: %s" msgstr "Server-implementering: %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:136 #, c-format msgid "Adding space token %s" msgstr "Lägger till spacetoken: %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:163 msgid "No request tokens found" msgstr "Hittade inga begäran-token" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:176 #, c-format msgid "Adding request token %s" msgstr "Lägger till begäran-token %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:237 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:642 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:828 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1385 #, c-format msgid "%s: File request %s in SRM queue. Sleeping for %i seconds" msgstr "%s: Fil-begäran %s i SRM-kö. Väntar i %i sekunder" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:275 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:327 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:698 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:764 #, c-format msgid "File is ready! TURL is %s" msgstr "Fil är klar! TURL är %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:359 #, c-format msgid "Setting userRequestDescription to %s" msgstr "Sätter userRequestDescription till %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:414 #, c-format msgid "%s: Bring online request %s in SRM queue. Sleeping for %i seconds" msgstr "%s: Begäran om att bringa online %s i SRM-kö. Väntar i %i sekunder" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:457 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1160 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1194 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1228 msgid "No request token specified!" msgstr "Inget begäran-token angivet!" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:524 msgid "Request is reported as ABORTED, but all files are done" msgstr "Begäran rapporteras som avbruten, men alla filer är färdiga" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:530 msgid "Request is reported as ABORTED, since it was cancelled" msgstr "Begäran rapporteras som avbruten, eftersom den avbröts" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:536 #, c-format msgid "Request is reported as ABORTED. Reason: %s" msgstr "Begäran rapporteras som avbruten. Orsak: %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:673 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:745 #, c-format msgid "Path %s is invalid, creating required directories" msgstr "Sökväg %s är ogiltig, skapar nödvändiga kataloger" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:678 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:750 #, c-format msgid "Error creating required directories for %s" msgstr "Fel vid skapande av nödvändiga kataloger för %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:851 msgid "Too many files in one request - please try again with fewer files" msgstr "För mÃ¥nga filer i en begäran - försök igen med färre filer" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:899 msgid "" "Directory size is too large to list in one call, will have to call multiple " "times" msgstr "" "Katalogstorleken är för stor för att lista i ett anrop, kommer att behöva " "anropa flera gÃ¥nger" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:936 msgid "" "Failure in parsing response from server - some information may be inaccurate" msgstr "" "Misslyckades med att tolka svar frÃ¥n server - en del information kan vara " "felaktig" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:942 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:566 #: src/services/gridftpd/misc/ldapquery.cpp:183 #: src/services/gridftpd/misc/ldapquery.cpp:186 #: src/services/gridftpd/misc/ldapquery.cpp:392 #: src/services/gridftpd/misc/ldapquery.cpp:622 #: src/services/gridftpd/misc/ldapquery.cpp:631 #, c-format msgid "%s: %s" msgstr "%s: %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:975 #, c-format msgid "" "Directory size is larger than %i files, will have to call multiple times" msgstr "" "Katalogstorleken är större än %i filer, kommer att behöva anropa flera gÃ¥nger" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1185 #, c-format msgid "Files associated with request token %s released successfully" msgstr "Filer associerade med begäran-token %s frigjordes framgÃ¥ngsrikt" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1219 #, c-format msgid "Files associated with request token %s put done successfully" msgstr "Filer associerade med begäran-token %s sparades framgÃ¥ngsrikt" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1254 #, c-format msgid "Files associated with request token %s aborted successfully" msgstr "Filer associerade med begäran-token %s avbröts framgÃ¥ngsrikt" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1271 #, c-format msgid "" "Failed to find metadata info on %s for determining file or directory delete" msgstr "" "Misslyckades med att hitta metadatainformation för %s för att bestämma " "borttagande av fil eller katalog" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1277 msgid "Type is file, calling srmRm" msgstr "Typ är fil, anropar srmRm" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1281 msgid "Type is dir, calling srmRmDir" msgstr "Typ är katalog, anropar srmRmDir" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1285 msgid "File type is not available, attempting file delete" msgstr "Filtyp är inte tillgänglig, försöker med borttagande av fil" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1288 msgid "File delete failed, attempting directory delete" msgstr "Borttagande av fil misslyckades, försöker med borttagande av katalog" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1313 #, c-format msgid "File %s removed successfully" msgstr "Fil %s borttagen framgÃ¥ngsrikt" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1340 #, c-format msgid "Directory %s removed successfully" msgstr "Katalog %s borttagen framgÃ¥ngsrikt" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1455 #, c-format msgid "Checking for existence of %s" msgstr "Kontrollerar om %s finns" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1458 #, c-format msgid "File already exists: %s" msgstr "Filen finns redan: %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1495 #, c-format msgid "Error creating directory %s: %s" msgstr "Fel vid skapande av katalog %s: %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:82 #, c-format msgid "Attempting to contact %s on port %i" msgstr "Försöker att kontakta %s pÃ¥ port %i" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:88 #, c-format msgid "Storing port %i for %s" msgstr "Sparar port %i för %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:102 #, c-format msgid "No port succeeded for %s" msgstr "Ingen port lyckades för %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:112 #, c-format msgid "URL %s disagrees with stored SRM info, testing new info" msgstr "URL %s överensstämmer inte med sparad SRM-info, provar ny info" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:118 #, c-format msgid "Replacing old SRM info with new for URL %s" msgstr "Byter ut gammal SRM-info mot by för URL %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:140 #, c-format msgid "SOAP request: %s" msgstr "SOAP-begäran: %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:147 #: src/hed/dmc/srm/srmclient/SRMClient.cpp:176 #, c-format msgid "SOAP fault: %s" msgstr "SOAP-fel: %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:148 msgid "Reconnecting" msgstr "Ã…teransluter" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:158 #, c-format msgid "SRM Client status: %s" msgstr "SRM-klientstatus: %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:164 msgid "No SOAP response" msgstr "Inget SOAP-svar" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:171 #: src/hed/identitymap/ArgusPDPClient.cpp:250 #, c-format msgid "SOAP response: %s" msgstr "SOAP-svar: %s" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:76 #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:162 #, c-format msgid "Failed to acquire lock on file %s" msgstr "Misslyckades med erhÃ¥lla lÃ¥s för fil %s" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:81 #, c-format msgid "Error reading info from file %s:%s" msgstr "Fel vid läsning av information frÃ¥n fil %s:%s" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:95 #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:187 #, c-format msgid "Bad or old format detected in file %s, in line %s" msgstr "Felaktigt eller gammalt format upptäckt i fil %s, pÃ¥ rad %s" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:100 #, c-format msgid "Cannot convert string %s to int in line %s" msgstr "Kan inte konvertera sträng %s till heltal pÃ¥ rad %s" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:203 #, c-format msgid "Error writing srm info file %s" msgstr "Fel vid skrivning av SRM-infofil %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:64 msgid "" "Missing reference to factory and/or module. It is unsafe to use Xrootd in " "non-persistent mode - Xrootd code is disabled. Report to developers." msgstr "" "Saknar referens till fabrik och/eller modul. Det är osäkert att använda " "Xrootd i icke-persistent mode - Xrootd-koden är deaktiverad. Rapportera till " "utvecklare." #: src/hed/dmc/xrootd/DataPointXrootd.cpp:103 #, c-format msgid "Could not handle checksum %s: skip checksum check" msgstr "Kunde inte hantera checksumma %s: hoppar över kontroll av checksumma" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:109 #, c-format msgid "Failed to create xrootd copy job: %s" msgstr "Misslyckades med att skapa xrootd-kopierings-jobb: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:126 #, c-format msgid "Failed to copy %s: %s" msgstr "Misslyckades med att kopiera %s: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:177 #, c-format msgid "Reading %u bytes from byte %llu" msgstr "Läser %u byte frÃ¥n byte %llu" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:179 #, c-format msgid "Read %i bytes" msgstr "Läste %i byte" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:210 #, c-format msgid "Could not open file %s for reading: %s" msgstr "Kunde inte öppna fil %s för läsning: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:225 #, c-format msgid "Unable to find file size of %s" msgstr "Kunde inte hitta filstorlek för %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:289 #, c-format msgid "DataPointXrootd::write_file got position %d and offset %d, has to seek" msgstr "" "DataPointXrootd::write_file fick position %d och offset %d, mÃ¥ste göra seek" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:312 #, c-format msgid "xrootd write failed: %s" msgstr "xrootd skrivning misslyckades: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:321 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:395 #, c-format msgid "xrootd close failed: %s" msgstr "xrootd stängning misslyckades: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:344 #, c-format msgid "Failed to open %s, trying to create parent directories" msgstr "Misslyckades med att öppna %s, försöker skapa föräldrakataloger" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:357 #, c-format msgid "xrootd open failed: %s" msgstr "xrootd öppning misslyckades: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:371 #, c-format msgid "close failed: %s" msgstr "stängning misslyckades: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:413 #, c-format msgid "Read access not allowed for %s: %s" msgstr "Ã…tkomst för läsning inte tillÃ¥ten för %s: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:432 #, c-format msgid "Could not stat file %s: %s" msgstr "Kunde inte göra stat pÃ¥ filen %s: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:437 msgid "Not getting checksum of zip constituent" msgstr "Hämtar inte checksumma för zip-komponent" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:441 #, c-format msgid "Could not get checksum of %s: %s" msgstr "Kunde inte hämta checksumma %s: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:445 #, c-format msgid "Checksum %s" msgstr "Checksumma: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:483 #, c-format msgid "Failed to open directory %s: %s" msgstr "Misslyckades med att öppna katalog %s: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:501 #, c-format msgid "Error while reading dir %s: %s" msgstr "Fel vid läsande av katalog %s: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:551 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:569 #, c-format msgid "Error creating required dirs: %s" msgstr "Fel vid skapande av nödvändiga kataloger: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:158 msgid "PDPD location is missing" msgstr "PDPD-plats saknas" #: src/hed/identitymap/ArgusPDPClient.cpp:161 #, c-format msgid "PDPD location: %s" msgstr "PDPD-plats: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:166 #: src/hed/identitymap/ArgusPEPClient.cpp:129 msgid "Conversion mode is set to SUBJECT" msgstr "Konverterings-mode är satt till SUBJECT" #: src/hed/identitymap/ArgusPDPClient.cpp:169 #: src/hed/identitymap/ArgusPEPClient.cpp:132 msgid "Conversion mode is set to CREAM" msgstr "Konverterings-mode är satt till CREAM" #: src/hed/identitymap/ArgusPDPClient.cpp:172 #: src/hed/identitymap/ArgusPEPClient.cpp:135 msgid "Conversion mode is set to EMI" msgstr "Konverterings-mode är satt till EMI" #: src/hed/identitymap/ArgusPDPClient.cpp:175 #: src/hed/identitymap/ArgusPEPClient.cpp:138 #, c-format msgid "Unknown conversion mode %s, using default" msgstr "Okänd konverterings-mode %s, använder förval" #: src/hed/identitymap/ArgusPDPClient.cpp:242 #, c-format msgid "Failed to contact PDP server: %s" msgstr "Misslyckades med att kontakta PDP-server: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:245 #, c-format msgid "There was no SOAP response return from PDP server: %s" msgstr "Inget SOAP-svar returnerades frÃ¥n PDP-server: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:360 #: src/hed/identitymap/ArgusPEPClient.cpp:286 #, c-format msgid "Have %i requests to process" msgstr "Har %i begärningar att processera" #: src/hed/identitymap/ArgusPDPClient.cpp:362 msgid "Creating a client to Argus PDP service" msgstr "Skapar en klient till Argus PDP-tjänst" #: src/hed/identitymap/ArgusPDPClient.cpp:375 #, c-format msgid "XACML authorisation request: %s" msgstr "XACML-auktoriseringsbegäran: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:386 #, c-format msgid "XACML authorisation response: %s" msgstr "XACML-auktoriseringssvar: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:426 #, c-format msgid "%s is not authorized to do action %s in resource %s " msgstr "%s är inte auktoriserat att utföra handling %s i resursen %s" #: src/hed/identitymap/ArgusPDPClient.cpp:429 #: src/hed/identitymap/ArgusPDPClient.cpp:434 #: src/hed/identitymap/ArgusPEPClient.cpp:336 msgid "Not authorized" msgstr "Inte auktoriserad" #: src/hed/identitymap/ArgusPDPClient.cpp:439 #: src/hed/identitymap/ArgusPEPClient.cpp:341 #: src/hed/identitymap/IdentityMap.cpp:219 src/hed/shc/legacy/LegacyMap.cpp:216 #, c-format msgid "Grid identity is mapped to local identity '%s'" msgstr "Grididentitet mappas till lokal identitet '%s'" #: src/hed/identitymap/ArgusPDPClient.cpp:566 #: src/hed/identitymap/ArgusPEPClient.cpp:655 msgid "Doing CREAM request" msgstr "Gör en CREAM-begäran" #: src/hed/identitymap/ArgusPDPClient.cpp:580 #: src/hed/identitymap/ArgusPDPClient.cpp:748 #: src/hed/identitymap/ArgusPEPClient.cpp:683 #, c-format msgid "Adding profile-id value: %s" msgstr "Lägger till profil-id-värde: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:592 #: src/hed/identitymap/ArgusPDPClient.cpp:759 #: src/hed/identitymap/ArgusPEPClient.cpp:694 #, c-format msgid "Adding subject-id value: %s" msgstr "Lägger till subjekt-id-värde: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:600 #: src/hed/identitymap/ArgusPDPClient.cpp:767 #: src/hed/identitymap/ArgusPEPClient.cpp:704 #, c-format msgid "Adding subject-issuer value: %s" msgstr "Lägger till subjekt-utfärdare-värde: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:607 #: src/hed/identitymap/ArgusPEPClient.cpp:713 #, c-format msgid "Adding virtual-organization value: %s" msgstr "Lägger till virtuell-organisation-värde: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:620 #: src/hed/identitymap/ArgusPEPClient.cpp:730 #, c-format msgid "Adding FQAN value: %s" msgstr "Lägger till FQAN-värde: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:629 #: src/hed/identitymap/ArgusPEPClient.cpp:739 #, c-format msgid "Adding FQAN/primary value: %s" msgstr "Lägger till FQAN/primär-värde: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:639 #: src/hed/identitymap/ArgusPEPClient.cpp:750 #, c-format msgid "Adding cert chain value: %s" msgstr "Lägger till certifikat-kedja-värde: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:648 #: src/hed/identitymap/ArgusPDPClient.cpp:840 #: src/hed/identitymap/ArgusPEPClient.cpp:760 #, c-format msgid "Adding resource-id value: %s" msgstr "Lägger till resurs-id-värde: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:662 #: src/hed/identitymap/ArgusPDPClient.cpp:863 #: src/hed/identitymap/ArgusPEPClient.cpp:775 #, c-format msgid "Adding action-id value: %s" msgstr "Lägger till handlings-id-värde: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:666 #: src/hed/identitymap/ArgusPEPClient.cpp:786 #, c-format msgid "CREAM request generation failed: %s" msgstr "Generering av CREAM-begäran misslyckades: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:732 msgid "Doing EMI request" msgstr "Gör en EMI-begäran" #: src/hed/identitymap/ArgusPDPClient.cpp:774 #, c-format msgid "Adding Virtual Organization value: %s" msgstr "Lägger till virtuell-organisation-värde: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:797 #, c-format msgid "Adding VOMS group value: %s" msgstr "Lägger till VOMS-grupp-värde: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:803 #, c-format msgid "Adding VOMS primary group value: %s" msgstr "Lägger till VOMS-primär-grupp-värde: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:822 #, c-format msgid "Adding VOMS role value: %s" msgstr "Lägger till VOMS-roll-värde: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:829 #, c-format msgid "Adding VOMS primary role value: %s" msgstr "Lägger till VOMS-primär-roll-värde: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:846 #, c-format msgid "Adding resource-owner value: %s" msgstr "Lägger till resurs-ägare-värde: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:867 #, c-format msgid "EMI request generation failed: %s" msgstr "Generering av EMI-begäran misslyckades: %s" #: src/hed/identitymap/ArgusPEPClient.cpp:119 msgid "PEPD location is missing" msgstr "PEPD-plats saknas" #: src/hed/identitymap/ArgusPEPClient.cpp:122 #, c-format msgid "PEPD location: %s" msgstr "PEPD-plats: %s" #: src/hed/identitymap/ArgusPEPClient.cpp:126 msgid "Conversion mode is set to DIRECT" msgstr "Konverterings-mode är satt till DIRECT" #: src/hed/identitymap/ArgusPEPClient.cpp:331 #, c-format msgid "" "Not authorized according to request:\n" "%s" msgstr "" "Inte auktoriserad enligt begäran:\n" "%s" #: src/hed/identitymap/ArgusPEPClient.cpp:333 #, c-format msgid "%s is not authorized to do action %s in resource %s" msgstr "%s är inte auktoriserat att utföra handling %s i resursen %s" #: src/hed/identitymap/ArgusPEPClient.cpp:361 msgid "Subject of request is null" msgstr "Begärans subjekt är null" #: src/hed/identitymap/ArgusPEPClient.cpp:366 #, c-format msgid "Can not create XACML SubjectAttribute: %s" msgstr "Kan inte skapa XACML SubjectAttribute: %s" #: src/hed/identitymap/ArgusPEPClient.cpp:375 msgid "Can not create XACML Resource" msgstr "Kan inte skapa XACML Resource" #: src/hed/identitymap/ArgusPEPClient.cpp:381 #, c-format msgid "Can not create XACML ResourceAttribute: %s" msgstr "Kan inte skapa XACML ResourceAttribute: %s" #: src/hed/identitymap/ArgusPEPClient.cpp:390 msgid "Can not create XACML Action" msgstr "Kan inte skapa XACML Action" #: src/hed/identitymap/ArgusPEPClient.cpp:397 #, c-format msgid "Can not create XACML ActionAttribute: %s" msgstr "Kan inte skapa XACML ActionAttribute: %s" #: src/hed/identitymap/ArgusPEPClient.cpp:407 msgid "Can not create XACML request" msgstr "Kan inte skapa XACML-begäran" #: src/hed/identitymap/ArgusPEPClient.cpp:539 #, c-format msgid "Converting to CREAM action - namespace: %s, operation: %s" msgstr "Konverterar till CREAM-handling - namnrymd: %s, operation: %s" #: src/hed/identitymap/IdentityMap.cpp:196 #, c-format msgid "PDP: %s can not be loaded" msgstr "PDP: %s kan inte laddas in" #: src/hed/libs/common/ArcLocation.cpp:128 #, c-format msgid "" "Can not determine the install location. Using %s. Please set ARC_LOCATION if " "this is not correct." msgstr "" "Kan inte bestämma installationsplats. Använder %s. Ange ARC_LOCATION om " "detta inte är korrekt." #: src/hed/libs/common/DateTime.cpp:86 src/hed/libs/common/DateTime.cpp:631 #: src/hed/libs/common/StringConv.h:25 msgid "Empty string" msgstr "Tom sträng" #: src/hed/libs/common/DateTime.cpp:107 #, c-format msgid "Can not parse date: %s" msgstr "Kan inte tolka datum: %s" #: src/hed/libs/common/DateTime.cpp:130 #, c-format msgid "Can not parse time: %s" msgstr "Kan inte tolka tid: %s" #: src/hed/libs/common/DateTime.cpp:160 #, c-format msgid "Can not parse time zone offset: %s" msgstr "Kan inte tolka tidszon: %s" #: src/hed/libs/common/DateTime.cpp:180 src/hed/libs/common/DateTime.cpp:199 #: src/hed/libs/common/DateTime.cpp:252 src/hed/libs/common/DateTime.cpp:291 #, c-format msgid "Illegal time format: %s" msgstr "Ogiltigt tidsformat: %s" #: src/hed/libs/common/DateTime.cpp:230 src/hed/libs/common/DateTime.cpp:283 #, c-format msgid "Can not parse month: %s" msgstr "Kan inte tolka mÃ¥nad: %s" #: src/hed/libs/common/DateTime.cpp:647 src/hed/libs/common/DateTime.cpp:688 #, c-format msgid "Invalid ISO duration format: %s" msgstr "Ogiltigt ISO-tidsperiodsformat: %s" #: src/hed/libs/common/DateTime.cpp:752 #, c-format msgid "Invalid period string: %s" msgstr "Ogiltig periodsträng: %s" #: src/hed/libs/common/DateTime.cpp:874 msgid "hour" msgid_plural "hours" msgstr[0] "timme" msgstr[1] "timmar" #: src/hed/libs/common/DateTime.cpp:880 msgid "minute" msgid_plural "minutes" msgstr[0] "minut" msgstr[1] "minuter" #: src/hed/libs/common/DateTime.cpp:886 msgid "second" msgid_plural "seconds" msgstr[0] "sekund" msgstr[1] "sekunder" #: src/hed/libs/common/FileLock.cpp:43 msgid "Cannot determine hostname from gethostname()" msgstr "Kan inte bestämma värdnamn frÃ¥n gethostname()" #: src/hed/libs/common/FileLock.cpp:92 #, c-format msgid "EACCES Error opening lock file %s: %s" msgstr "EACCES-fel vid öppnande av lÃ¥sfil %s: %s" #: src/hed/libs/common/FileLock.cpp:97 #, c-format msgid "Error opening lock file %s in initial check: %s" msgstr "Fel vid öppnande av lÃ¥sfil %s i initial check: %s" #: src/hed/libs/common/FileLock.cpp:104 #, c-format msgid "Error creating temporary file %s: %s" msgstr "Fel vid skapandet av temporär fil %s: %s" #: src/hed/libs/common/FileLock.cpp:113 #, c-format msgid "Could not create link to lock file %s as it already exists" msgstr "Kunde inte skapa länk till lÃ¥sfil %s eftersom den redan existerar" #: src/hed/libs/common/FileLock.cpp:124 #, c-format msgid "Could not create lock file %s as it already exists" msgstr "Kunde inte skapa lÃ¥sfil %s eftersom den redan existerar" #: src/hed/libs/common/FileLock.cpp:128 #, c-format msgid "Error creating lock file %s: %s" msgstr "Fel vid skapandet av lÃ¥sfil %s: %s" #: src/hed/libs/common/FileLock.cpp:133 #, c-format msgid "Error writing to lock file %s: %s" msgstr "Fel vid skrivning till lÃ¥sfil %s: %s" #: src/hed/libs/common/FileLock.cpp:141 #, c-format msgid "Error linking tmp file %s to lock file %s: %s" msgstr "Fel vid länkning av temporär fil %s till lÃ¥sfil %s: %s" #: src/hed/libs/common/FileLock.cpp:150 #, c-format msgid "Error in lock file %s, even though linking did not return an error" msgstr "Fel i lÃ¥sfil %s, trots att länkning inte returnerade ett fel" #: src/hed/libs/common/FileLock.cpp:159 #, c-format msgid "%li seconds since lock file %s was created" msgstr "%li sekunder sedan lÃ¥sfilen %s skapades" #: src/hed/libs/common/FileLock.cpp:162 #, c-format msgid "Timeout has expired, will remove lock file %s" msgstr "Timeout har passerat, kommer att ta bort lÃ¥sfil %s" #: src/hed/libs/common/FileLock.cpp:166 #, c-format msgid "Failed to remove stale lock file %s: %s" msgstr "Misslyckades med att ta bort gammal lÃ¥sfil %s: %s" #: src/hed/libs/common/FileLock.cpp:178 #, c-format msgid "This process already owns the lock on %s" msgstr "Denna process äger redan lÃ¥set pÃ¥ %s" #: src/hed/libs/common/FileLock.cpp:183 #, c-format msgid "" "The process owning the lock on %s is no longer running, will remove lock" msgstr "Processen som äger lÃ¥set pÃ¥ %s kör inte längre, kommer att ta bort lÃ¥s" #: src/hed/libs/common/FileLock.cpp:185 #, c-format msgid "Failed to remove file %s: %s" msgstr "Misslyckades med att ta bort fil %s: %s" #: src/hed/libs/common/FileLock.cpp:193 #, c-format msgid "The file %s is currently locked with a valid lock" msgstr "Filen %s är för tillfället lÃ¥st med ett giltigt lÃ¥s" #: src/hed/libs/common/FileLock.cpp:208 #, c-format msgid "Failed to unlock file with lock %s: %s" msgstr "Misslyckades med att lÃ¥sa upp fil med lÃ¥s %s: %s" #: src/hed/libs/common/FileLock.cpp:220 #, c-format msgid "Lock file %s doesn't exist" msgstr "LÃ¥sfil %s existerar inte" #: src/hed/libs/common/FileLock.cpp:222 #, c-format msgid "Error listing lock file %s: %s" msgstr "Fel vid listning av lÃ¥sfil %s: %s" #: src/hed/libs/common/FileLock.cpp:228 #, c-format msgid "Found unexpected empty lock file %s. Must go back to acquire()" msgstr "Hittade oväntad tom lÃ¥sfil %s. MÃ¥ste gÃ¥ tillbaka till acquire()" #: src/hed/libs/common/FileLock.cpp:234 #, c-format msgid "Error reading lock file %s: %s" msgstr "Fel vid läsning av lÃ¥sfil %s: %s" #: src/hed/libs/common/FileLock.cpp:238 #, c-format msgid "Error with formatting in lock file %s" msgstr "Formatteringsfel i lÃ¥sfil %s" #: src/hed/libs/common/FileLock.cpp:248 #, c-format msgid "Lock %s is owned by a different host (%s)" msgstr "LÃ¥s %s ägs av en annan värd (%s)" #: src/hed/libs/common/FileLock.cpp:257 #, c-format msgid "Badly formatted pid %s in lock file %s" msgstr "Felaktigt formatterat pid %s i lÃ¥sfil %s" #: src/hed/libs/common/FileLock.cpp:260 #, c-format msgid "Another process (%s) owns the lock on file %s" msgstr "En annan process (%s) äger lÃ¥set pÃ¥ fil %s" #: src/hed/libs/common/IString.cpp:32 src/hed/libs/common/IString.cpp:41 #: src/hed/libs/common/IString.cpp:42 msgid "(empty)" msgstr "(tom)" #: src/hed/libs/common/IString.cpp:32 src/hed/libs/common/IString.cpp:41 #: src/hed/libs/common/IString.cpp:42 msgid "(null)" msgstr "(null)" #: src/hed/libs/common/Logger.cpp:58 #, c-format msgid "Invalid log level. Using default %s." msgstr "Ogiltig logg-nivÃ¥. Använder förval %s." #: src/hed/libs/common/Logger.cpp:123 #, c-format msgid "Invalid old log level. Using default %s." msgstr "Ogiltig gammal logg-nivÃ¥. Använder förval %s." #: src/hed/libs/common/OptionParser.cpp:106 #, c-format msgid "Cannot parse integer value '%s' for -%c" msgstr "Kan inte tolka heltalsvärdet '%s' för -%c" #: src/hed/libs/common/OptionParser.cpp:309 #: src/hed/libs/common/OptionParser.cpp:446 #, c-format msgid "Options Group %s:" msgstr "Alternativgrupp %s:" #: src/hed/libs/common/OptionParser.cpp:311 #: src/hed/libs/common/OptionParser.cpp:449 #, c-format msgid "%s:" msgstr "%s:" #: src/hed/libs/common/OptionParser.cpp:313 #, c-format msgid "Show %s help options" msgstr "Visa %s hjälpalternativ" #: src/hed/libs/common/OptionParser.cpp:342 msgid "Failed to parse command line options" msgstr "Misslyckades med att tolka kommandoradsalternativ" #: src/hed/libs/common/OptionParser.cpp:352 msgid "Use -? to get usage description" msgstr "Använd -? för att fÃ¥ användningsbeskrivning" #: src/hed/libs/common/OptionParser.cpp:429 msgid "Usage:" msgstr "Användning:" #: src/hed/libs/common/OptionParser.cpp:432 msgid "OPTION..." msgstr "ALTERNATIV..." #: src/hed/libs/common/OptionParser.cpp:438 msgid "Help Options:" msgstr "Hjälpalternativ:" #: src/hed/libs/common/OptionParser.cpp:439 msgid "Show help options" msgstr "Visa hjälpalternativ" #: src/hed/libs/common/Profile.cpp:199 src/hed/libs/common/Profile.cpp:273 #: src/hed/libs/common/Profile.cpp:404 #, c-format msgid "" "Element \"%s\" in the profile ignored: the value of the \"inisections\" " "attribute cannot be the empty string." msgstr "" "Elementet \"%s\" i profilen ignoreras: värdet pÃ¥ \"inisections\"-attributet " "kan inte vara en tom sträng." #: src/hed/libs/common/Profile.cpp:205 src/hed/libs/common/Profile.cpp:279 #: src/hed/libs/common/Profile.cpp:411 #, c-format msgid "" "Element \"%s\" in the profile ignored: the value of the \"initag\" attribute " "cannot be the empty string." msgstr "" "Elementet \"%s\" i profilen ignoreras: värdet pÃ¥ \"initag\"-attributet kan " "inte vara en tom sträng." #: src/hed/libs/common/Profile.cpp:419 #, c-format msgid "" "Element \"%s\" in the profile ignored: the value of the \"initype\" " "attribute cannot be the empty string." msgstr "" "Elementet \"%s\" i profilen ignoreras: värdet pÃ¥ \"initype\"-attributet kan " "inte vara en tom sträng." #: src/hed/libs/common/Profile.cpp:422 #, c-format msgid "" "Element \"%s\" in the profile ignored: the \"inidefaultvalue\" attribute " "cannot be specified when the \"inisections\" and \"initag\" attributes have " "not been specified." msgstr "" "Elementet \"%s\" i profilen ignoreras: värdet pÃ¥ \"inidefaultvalue\"-" "attributet kan inte anges när \"inisections\"- and \"initag\"-attributen " "inte angivits." #: src/hed/libs/common/Profile.cpp:497 #, c-format msgid "" "In the configuration profile the 'initype' attribute on the \"%s\" element " "has a invalid value \"%s\"." msgstr "" "In inställningsprofilen har 'initype'-attributet pÃ¥ \"%s\"-elementet ett " "ogiltigt värde \"%s\"." #: src/hed/libs/common/Run_unix.cpp:226 msgid "Child monitoring signal detected" msgstr "Barnprocessmonitorering signal upptäckt" #: src/hed/libs/common/Run_unix.cpp:231 #, c-format msgid "Child monitoring error: %i" msgstr "Barnprocessmonitorering fel: %i" #: src/hed/libs/common/Run_unix.cpp:244 msgid "Child monitoring kick detected" msgstr "Barnprocessmonitorering kick upptäckt" #: src/hed/libs/common/Run_unix.cpp:247 msgid "Child monitoring internal communication error" msgstr "Barnprocessmonitorering internt kommunikationsfel" #: src/hed/libs/common/Run_unix.cpp:259 msgid "Child monitoring stdout is closed" msgstr "Barnprocessmonitorering stdout är stängd" #: src/hed/libs/common/Run_unix.cpp:269 msgid "Child monitoring stderr is closed" msgstr "Barnprocessmonitorering stderr är stängd" #: src/hed/libs/common/Run_unix.cpp:279 msgid "Child monitoring stdin is closed" msgstr "Barnprocessmonitorering stdin är stängd" #: src/hed/libs/common/Run_unix.cpp:297 #, c-format msgid "Child monitoring child %d exited" msgstr "Barnprocessmonitorering barnprocess %d avslutades" #: src/hed/libs/common/Run_unix.cpp:301 #, c-format msgid "Child monitoring lost child %d (%d)" msgstr "Barnprocessmonitorering borttappad barnprocess %d (%d)" #: src/hed/libs/common/Run_unix.cpp:322 #, c-format msgid "Child monitoring drops abandoned child %d (%d)" msgstr "Barnprocessmonitorering släpper övergiven barnprocess %d (%d)" #: src/hed/libs/common/Run_unix.cpp:483 msgid "Child was already started" msgstr "Barnprocess har redan startats" #: src/hed/libs/common/Run_unix.cpp:487 msgid "No arguments are assigned for external process" msgstr "Inga argument har tilldelats extern process" #: src/hed/libs/common/Run_unix.cpp:620 src/hed/libs/common/Run_unix.cpp:625 #, c-format msgid "Excepton while trying to start external process: %s" msgstr "Undantag under försök att starta extern process: %s" #: src/hed/libs/common/StringConv.h:31 #, c-format msgid "Conversion failed: %s" msgstr "Konvertering misslyckades: %s" #: src/hed/libs/common/StringConv.h:35 #, c-format msgid "Full string not used: %s" msgstr "Hela strängen användes inte: %s" #: src/hed/libs/common/Thread.cpp:261 msgid "Maximum number of threads running - putting new request into queue" msgstr "Maximalt antal trÃ¥dar kör - ställer ny begäran i kö" #: src/hed/libs/common/Thread.cpp:309 #, c-format msgid "Thread exited with Glib error: %s" msgstr "TrÃ¥d avslutades med Glib-fil: %s" #: src/hed/libs/common/Thread.cpp:311 #, c-format msgid "Thread exited with Glib exception: %s" msgstr "TrÃ¥d avslutades med Glib-undantag: %s" #: src/hed/libs/common/Thread.cpp:313 #, c-format msgid "Thread exited with generic exception: %s" msgstr "TrÃ¥d avslutades med generellt undantag: %s" #: src/hed/libs/common/URL.cpp:137 #, c-format msgid "URL is not valid: %s" msgstr "URL är inte giltig: %s" #: src/hed/libs/common/URL.cpp:188 #, c-format msgid "Illegal URL - path must be absolute: %s" msgstr "Ogiltig URL - sökväg mÃ¥ste vara absolut: %s" #: src/hed/libs/common/URL.cpp:193 #, c-format msgid "Illegal URL - no hostname given: %s" msgstr "Ogiltig URL - inget värdnamn angivet: %s" #: src/hed/libs/common/URL.cpp:282 #, c-format msgid "Illegal URL - path must be absolute or empty: %s" msgstr "Ogiltig URL - sökväg mÃ¥ste vara absolut eller tom: %s" #: src/hed/libs/common/URL.cpp:298 #, c-format msgid "Illegal URL - no closing ] for IPv6 address found: %s" msgstr "Ogiltig URL - hittade ingen avslutande ] för IPv6-adress: %s" #: src/hed/libs/common/URL.cpp:306 #, c-format msgid "" "Illegal URL - closing ] for IPv6 address is followed by illegal token: %s" msgstr "Ogiltig URL - avslutande ] för IPv6-adress följs av ogiltigt token: %s" #: src/hed/libs/common/URL.cpp:322 #, c-format msgid "Invalid port number in %s" msgstr "Ogiltigt portnummer i %s" #: src/hed/libs/common/URL.cpp:453 #, c-format msgid "Unknown LDAP scope %s - using base" msgstr "Okänt LDAP-scope %s - använder base" #: src/hed/libs/common/URL.cpp:616 msgid "Attempt to assign relative path to URL - making it absolute" msgstr "Försöker tilldela relativ sökväg till URL - gör den absolut" #: src/hed/libs/common/URL.cpp:715 #, c-format msgid "URL option %s does not have format name=value" msgstr "URL-alternativ %s har inte formatet namn=värde" #: src/hed/libs/common/URL.cpp:1180 #, c-format msgid "urllist %s contains invalid URL: %s" msgstr "urllistan %s innehÃ¥ller ogiltig URL: %s" #: src/hed/libs/common/URL.cpp:1185 #, c-format msgid "URL protocol is not urllist: %s" msgstr "URL-protokollet är inte urllist: %s" #: src/hed/libs/common/UserConfig.cpp:32 src/hed/libs/common/UserConfig.cpp:781 #: src/hed/libs/common/UserConfig.cpp:790 #: src/hed/libs/common/UserConfig.cpp:796 #: src/hed/libs/common/UserConfig.cpp:822 #: src/hed/libs/common/UserConfig.cpp:832 #: src/hed/libs/common/UserConfig.cpp:844 #: src/hed/libs/common/UserConfig.cpp:864 #, c-format msgid "Multiple %s attributes in configuration file (%s)" msgstr "Mer än ett %s-attribut i inställningsfil (%s)" #: src/hed/libs/common/UserConfig.cpp:132 #, c-format msgid "Wrong ownership of certificate file: %s" msgstr "Fel ägare för certifikatfil: %s" #: src/hed/libs/common/UserConfig.cpp:134 #, c-format msgid "Wrong permissions of certificate file: %s" msgstr "Fel Ã¥tkomsträttigheter för certifikatfil: %s" #: src/hed/libs/common/UserConfig.cpp:136 #, c-format msgid "Can not access certificate file: %s" msgstr "Kan inte komma Ã¥t certifikatfil: %s" #: src/hed/libs/common/UserConfig.cpp:143 #, c-format msgid "Wrong ownership of key file: %s" msgstr "Fel ägare för nyckelfil: %s" #: src/hed/libs/common/UserConfig.cpp:145 #, c-format msgid "Wrong permissions of key file: %s" msgstr "Fel Ã¥tkomsträttigheter för nyckelfil: %s" #: src/hed/libs/common/UserConfig.cpp:147 #, c-format msgid "Can not access key file: %s" msgstr "Kan inte komma Ã¥t nyckelfil: %s" #: src/hed/libs/common/UserConfig.cpp:154 #, c-format msgid "Wrong ownership of proxy file: %s" msgstr "Fel ägare för proxyfil: %s" #: src/hed/libs/common/UserConfig.cpp:156 #, c-format msgid "Wrong permissions of proxy file: %s" msgstr "Fel Ã¥tkomsträttigheter för proxyfil: %s" #: src/hed/libs/common/UserConfig.cpp:158 #, c-format msgid "Can not access proxy file: %s" msgstr "Kan inte komma Ã¥t proxyfil: %s" #: src/hed/libs/common/UserConfig.cpp:169 msgid "computing" msgstr "beräkning" #: src/hed/libs/common/UserConfig.cpp:171 msgid "index" msgstr "indexering" #: src/hed/libs/common/UserConfig.cpp:270 #: src/hed/libs/common/UserConfig.cpp:274 #: src/hed/libs/common/UserConfig.cpp:321 #: src/hed/libs/common/UserConfig.cpp:325 #, c-format msgid "System configuration file (%s) contains errors." msgstr "Systeminställningsfil (%s) innehÃ¥ller fel." #: src/hed/libs/common/UserConfig.cpp:278 #: src/hed/libs/common/UserConfig.cpp:329 #, c-format msgid "System configuration file (%s or %s) does not exist." msgstr "Systeminställningsfil (%s eller %s) existerar inte." #: src/hed/libs/common/UserConfig.cpp:280 #: src/hed/libs/common/UserConfig.cpp:331 #, c-format msgid "System configuration file (%s) does not exist." msgstr "Systeminställningsfil (%s) existerar inte." #: src/hed/libs/common/UserConfig.cpp:286 #: src/hed/libs/common/UserConfig.cpp:298 #: src/hed/libs/common/UserConfig.cpp:337 #: src/hed/libs/common/UserConfig.cpp:349 #, c-format msgid "User configuration file (%s) contains errors." msgstr "Användarinställningsfil (%s) innehÃ¥ller fel." #: src/hed/libs/common/UserConfig.cpp:291 #: src/hed/libs/common/UserConfig.cpp:342 msgid "No configuration file could be loaded." msgstr "Ingen inställningfil kunde laddas in." #: src/hed/libs/common/UserConfig.cpp:294 #: src/hed/libs/common/UserConfig.cpp:345 #, c-format msgid "User configuration file (%s) does not exist or cannot be loaded." msgstr "" "Användarinställningsfil (%s) existerar inte eller kunde inte laddas in." #: src/hed/libs/common/UserConfig.cpp:406 #, c-format msgid "" "Unable to parse the specified verbosity (%s) to one of the allowed levels" msgstr "" "Kunde inte tolka den angivna debugnivÃ¥n (%s) till en av de tillÃ¥tna nivÃ¥erna" #: src/hed/libs/common/UserConfig.cpp:418 #, c-format msgid "" "Unsupported job list type '%s', using 'BDB'. Supported types are: BDB, " "SQLITE, XML." msgstr "" "Jobblisttyp '%s' stöds inte, använder 'BDB'. Typer som stöds är: BDB, " "SQLITE, XML." #: src/hed/libs/common/UserConfig.cpp:463 msgid "Loading OToken failed - ignoring its presence" msgstr "Inladdning av OToken misslyckades - ignorerar dess närvaro" #: src/hed/libs/common/UserConfig.cpp:604 #, c-format msgid "Certificate and key ('%s' and '%s') not found in any of the paths: %s" msgstr "" "Hittade inte certifikat och nyckel ('%s' och '%s') i nÃ¥gon av sökvägarna: %s" #: src/hed/libs/common/UserConfig.cpp:606 #, c-format msgid "" "If the proxy or certificate/key does exist, you can manually specify the " "locations via environment variables '%s'/'%s' or '%s', or the '%s'/'%s' or " "'%s' attributes in the client configuration file (e.g. '%s')" msgstr "" "Om proxy eller certifikat/nyckel existerar, kan du ange deras platser " "manuellt via miljövariablerna '%s'/'%s' eller '%s', eller attributen " "'%s'/'%s' eller '%s' i klientinställningsfilen (t.ex. '%s')" #: src/hed/libs/common/UserConfig.cpp:623 #: src/hed/libs/common/UserConfig.cpp:633 #, c-format msgid "" "Can not access CA certificate directory: %s. The certificates will not be " "verified." msgstr "" "Kan inte komma Ã¥t CA-certifikatkatalog: %s. Certifikaten kommer inte att " "verifieras" #: src/hed/libs/common/UserConfig.cpp:659 #, c-format msgid "" "Can not find CA certificates directory in default locations:\n" "~/.arc/certificates, ~/.globus/certificates,\n" "%s/etc/certificates, %s/etc/grid-security/certificates,\n" "%s/share/certificates, /etc/grid-security/certificates.\n" "The certificate will not be verified.\n" "If the CA certificates directory does exist, please manually specify the " "locations via env\n" "X509_CERT_DIR, or the cacertificatesdirectory item in client.conf\n" msgstr "" "Kan inte hitta CA-certifikatkatalogen pÃ¥ förvalda platser:\n" "~/.arc/certificates, ~/.globus/certificates,\n" "%s/etc/certificates, %s/etc/grid-security/certificates,\n" "%s/share/certificates, /etc/grid-security/certificates.\n" "Certifikaten kommer inte att verifieras.\n" "Om CA-certifikatkatalogen existerar, ange dess plats manuellt ange platsen " "via\n" "miljövariabeln X509_CERT_DIR, eller attributet cacertificatesdirectory i " "client.conf\n" #: src/hed/libs/common/UserConfig.cpp:680 #, c-format msgid "Using proxy file: %s" msgstr "Använder proxyfil: %s" #: src/hed/libs/common/UserConfig.cpp:683 #, c-format msgid "Using certificate file: %s" msgstr "Använder certifikatfil: %s" #: src/hed/libs/common/UserConfig.cpp:684 #, c-format msgid "Using key file: %s" msgstr "Använder nyckelfil: %s" #: src/hed/libs/common/UserConfig.cpp:688 #, c-format msgid "Using CA certificate directory: %s" msgstr "Använder CA-certifikatkatalog: %s" #: src/hed/libs/common/UserConfig.cpp:692 msgid "Using OToken" msgstr "Använder OToken" #: src/hed/libs/common/UserConfig.cpp:705 #: src/hed/libs/common/UserConfig.cpp:711 #, c-format msgid "Can not access VOMSES file/directory: %s." msgstr "Kan inte komma Ã¥t VOMSES-fil/katalog: %s." #: src/hed/libs/common/UserConfig.cpp:717 #, c-format msgid "Can not access VOMS file/directory: %s." msgstr "Kan inte komma Ã¥t VOMS-fil/katalog: %s." #: src/hed/libs/common/UserConfig.cpp:731 msgid "" "Can not find voms service configuration file (vomses) in default locations: " "~/.arc/vomses, ~/.voms/vomses, $ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/" "grid-security/vomses, $PWD/vomses, /etc/vomses, /etc/grid-security/vomses" msgstr "" "Kan inte hitta voms-tjänst-inställningsfil (vomses) pÃ¥ förvalda platser: ~/." "arc/vomses, ~/.voms/vomses, $ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/grid-" "security/vomses, $PWD/vomses, /etc/vomses, /etc/grid-security/vomses" #: src/hed/libs/common/UserConfig.cpp:744 #, c-format msgid "Loading configuration (%s)" msgstr "Laddar in inställningar (%s)" #: src/hed/libs/common/UserConfig.cpp:778 #, c-format msgid "" "The value of the timeout attribute in the configuration file (%s) was only " "partially parsed" msgstr "" "Värdet pÃ¥ timeout-attributet i inställningsfilen (%s) tolkades endast delvis" #: src/hed/libs/common/UserConfig.cpp:803 msgid "" "The brokerarguments attribute can only be used in conjunction with the " "brokername attribute" msgstr "" "brokerarguments-attributet kan endast användas i kombination med brokername-" "attributet" #: src/hed/libs/common/UserConfig.cpp:819 #, c-format msgid "" "The value of the keysize attribute in the configuration file (%s) was only " "partially parsed" msgstr "" "Värdet pÃ¥ keysize-attributet i inställningsfilen (%s) tolkades endast delvis" #: src/hed/libs/common/UserConfig.cpp:839 #, c-format msgid "" "Could not convert the slcs attribute value (%s) to an URL instance in " "configuration file (%s)" msgstr "" "Kunde inte konvertera slcs-attributvärdet (%s) till en URL-instans i " "inställningsfilen (%s)" #: src/hed/libs/common/UserConfig.cpp:885 #, c-format msgid "Specified overlay file (%s) does not exist." msgstr "Angiven överlagringsfil (%s) existerar inte." #: src/hed/libs/common/UserConfig.cpp:889 #, c-format msgid "" "Unknown attribute %s in common section of configuration file (%s), ignoring " "it" msgstr "" "Okänt attribut %s i common-sektionen i inställningsfilen (%s), ignorerar det" #: src/hed/libs/common/UserConfig.cpp:930 #, c-format msgid "Unknown section %s, ignoring it" msgstr "Okänd sektion %s, ignorerar den" #: src/hed/libs/common/UserConfig.cpp:934 #, c-format msgid "Configuration (%s) loaded" msgstr "Inställningar (%s) har laddats in" #: src/hed/libs/common/UserConfig.cpp:937 #, c-format msgid "Could not load configuration (%s)" msgstr "Kunde inte ladda in inställningar (%s)" #: src/hed/libs/common/UserConfig.cpp:1032 #, c-format msgid "UserConfiguration saved to file (%s)" msgstr "Användarinställningar sparade till fil (%s)" #: src/hed/libs/common/UserConfig.cpp:1045 #, c-format msgid "Unable to create %s directory." msgstr "Kunde inte skapa %s katalog." #: src/hed/libs/common/UserConfig.cpp:1054 #, c-format msgid "Configuration example file created (%s)" msgstr "Exempel-inställningsfil skapades (%s)" #: src/hed/libs/common/UserConfig.cpp:1056 #, c-format msgid "Unable to copy example configuration from existing configuration (%s)" msgstr "" "Kunde inte kopiera exempelinställningar frÃ¥n existerande inställningar (%s)" #: src/hed/libs/common/UserConfig.cpp:1061 #, c-format msgid "Cannot copy example configuration (%s), it is not a regular file" msgstr "Kan inte kopiera exempelinställningar (%s), det är inte en vanlig fil" #: src/hed/libs/common/UserConfig.cpp:1066 #, c-format msgid "Example configuration (%s) not created." msgstr "Exempelinställningar (%s) skapades inte." #: src/hed/libs/common/UserConfig.cpp:1071 #, c-format msgid "The default configuration file (%s) is not a regular file." msgstr "Den förvalda inställningsfilen (%s) är inte en vanlig fil." #: src/hed/libs/common/UserConfig.cpp:1089 #, c-format msgid "%s directory created" msgstr "%s-katalog skapad" #: src/hed/libs/common/UserConfig.cpp:1091 #: src/hed/libs/common/UserConfig.cpp:1130 src/hed/libs/data/DataMover.cpp:679 #, c-format msgid "Failed to create directory %s" msgstr "Misslyckades med att skapa katalog %s" #: src/hed/libs/common/test/LoggerTest.cpp:58 msgid "This VERBOSE message should not be seen" msgstr "Detta VERBOSE-meddelande borde inte ses" #: src/hed/libs/common/test/LoggerTest.cpp:62 msgid "This INFO message should be seen" msgstr "Detta INFO-meddelande borde ses" #: src/hed/libs/common/test/LoggerTest.cpp:73 msgid "This VERBOSE message should now be seen" msgstr "Detta VERBOSE-meddelande borde nu ses" #: src/hed/libs/common/test/LoggerTest.cpp:79 msgid "This INFO message should also be seen" msgstr "Detta INFO-meddelande borde ocksÃ¥ ses" #: src/hed/libs/common/test/LoggerTest.cpp:93 msgid "This message goes to initial destination" msgstr "Detta meddelande gÃ¥r till ursprungliga destinationen" #: src/hed/libs/common/test/LoggerTest.cpp:108 msgid "This message goes to per-thread destination" msgstr "Detta meddelande gÃ¥r till per-trÃ¥d-destinationen" #: src/hed/libs/communication/ClientSAML2SSO.cpp:80 msgid "Request failed: No response from SPService" msgstr "Begäran misslyckades: Inget svar frÃ¥n SPService" #: src/hed/libs/communication/ClientSAML2SSO.cpp:84 #: src/hed/libs/communication/ClientSAML2SSO.cpp:137 msgid "Request failed: response from SPService is not as expected" msgstr "Begäran misslyckades: svar frÃ¥n SPService är inte som förväntat" #: src/hed/libs/communication/ClientSAML2SSO.cpp:92 #, c-format msgid "Authentication Request URL: %s" msgstr "Autentiseringsbegäran-URL: %s" #: src/hed/libs/communication/ClientSAML2SSO.cpp:133 msgid "Request failed: No response from IdP" msgstr "Begäran misslyckades: Inget svar frÃ¥n IdP" #: src/hed/libs/communication/ClientSAML2SSO.cpp:184 msgid "Request failed: No response from IdP when doing redirecting" msgstr "Begäran misslyckades: Inget svar frÃ¥n IdP när omdirigering görs" #: src/hed/libs/communication/ClientSAML2SSO.cpp:188 msgid "" "Request failed: response from IdP is not as expected when doing redirecting" msgstr "" "Begäran misslyckades: svar frÃ¥n IdP är inte som förväntat när omdirigering " "görs" #: src/hed/libs/communication/ClientSAML2SSO.cpp:245 msgid "Request failed: No response from IdP when doing authentication" msgstr "Begäran misslyckades: Inget svar frÃ¥n IdP när autentisering görs" #: src/hed/libs/communication/ClientSAML2SSO.cpp:249 msgid "" "Request failed: response from IdP is not as expected when doing " "authentication" msgstr "" "Begäran misslyckades: svar frÃ¥n IdP är inte som förväntat när autentisering " "görs" #: src/hed/libs/communication/ClientSAML2SSO.cpp:294 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:300 msgid "Succeeded to verify the signature under " msgstr "Lyckades verifiera signaturen under " #: src/hed/libs/communication/ClientSAML2SSO.cpp:296 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:303 msgid "Failed to verify the signature under " msgstr "Misslyckades med att verifiera signaturen under " #: src/hed/libs/communication/ClientSAML2SSO.cpp:310 msgid "" "Request failed: No response from SP Service when sending SAML assertion to SP" msgstr "" "Begäran misslyckades: Inget svar frÃ¥n SP-tjänsten när SAML-assertion sänds " "till SP" #: src/hed/libs/communication/ClientSAML2SSO.cpp:314 msgid "" "Request failed: response from SP Service is not as expected when sending " "SAML assertion to SP" msgstr "" "Begäran misslyckades: svar frÃ¥n SP-tjänsten är inte som förväntat när SAML-" "assertion sänds till SP" #: src/hed/libs/communication/ClientSAML2SSO.cpp:325 #, c-format msgid "IdP return some error message: %s" msgstr "IdP returnerar ett felmeddelande: %s" #: src/hed/libs/communication/ClientSAML2SSO.cpp:353 #: src/hed/libs/communication/ClientSAML2SSO.cpp:398 msgid "SAML2SSO process failed" msgstr "SAML2SSO-process misslyckades" #: src/hed/libs/communication/ClientX509Delegation.cpp:54 msgid "Creating delegation credential to ARC delegation service" msgstr "Skapar delegeringsreferens till ARC delegeringstjänst" #: src/hed/libs/communication/ClientX509Delegation.cpp:64 #: src/hed/libs/communication/ClientX509Delegation.cpp:267 msgid "DelegateCredentialsInit failed" msgstr "DelegateCredentialsInit misslyckades" #: src/hed/libs/communication/ClientX509Delegation.cpp:68 #: src/hed/libs/communication/ClientX509Delegation.cpp:122 #: src/hed/libs/communication/ClientX509Delegation.cpp:157 #: src/hed/libs/communication/ClientX509Delegation.cpp:212 #: src/hed/libs/communication/ClientX509Delegation.cpp:271 msgid "There is no SOAP response" msgstr "Det finns inget SOAP-svar" #: src/hed/libs/communication/ClientX509Delegation.cpp:73 msgid "There is no X509 request in the response" msgstr "Det finns ingen X509-begäran i svaret" #: src/hed/libs/communication/ClientX509Delegation.cpp:78 msgid "There is no Format request in the response" msgstr "Det finns ingen Format-begäran i svaret" #: src/hed/libs/communication/ClientX509Delegation.cpp:86 msgid "There is no Id or X509 request value in the response" msgstr "Det finns inget Id- eller X509-begäran-värde i svaret" #: src/hed/libs/communication/ClientX509Delegation.cpp:99 #: src/hed/libs/communication/ClientX509Delegation.cpp:187 msgid "DelegateProxy failed" msgstr "DelegateProxy misslyckades" #: src/hed/libs/communication/ClientX509Delegation.cpp:118 msgid "UpdateCredentials failed" msgstr "UpdateCredentials misslyckades" #: src/hed/libs/communication/ClientX509Delegation.cpp:126 msgid "There is no UpdateCredentialsResponse in response" msgstr "Det finns inget UpdateCredentialsResponse in svaret" #: src/hed/libs/communication/ClientX509Delegation.cpp:134 #: src/hed/libs/communication/ClientX509Delegation.cpp:162 #: src/hed/libs/communication/ClientX509Delegation.cpp:217 #: src/hed/libs/communication/ClientX509Delegation.cpp:302 msgid "There is no SOAP connection chain configured" msgstr "Det finns ingen SOAP-förbindelse-kedja i inställningarna" #: src/hed/libs/communication/ClientX509Delegation.cpp:140 msgid "Creating delegation to CREAM delegation service" msgstr "Skapar delegering till CREAM delegeringstjänst" #: src/hed/libs/communication/ClientX509Delegation.cpp:153 msgid "Delegation getProxyReq request failed" msgstr "Delegering-getProxyReq-begäran misslyckades" #: src/hed/libs/communication/ClientX509Delegation.cpp:173 msgid "Creating delegation to CREAM delegation service failed" msgstr "Att skapa delegering till CREAM delegeringstjänst misslyckades" #: src/hed/libs/communication/ClientX509Delegation.cpp:208 msgid "Delegation putProxy request failed" msgstr "Delegering-putProxy-begäran misslyckades" #: src/hed/libs/communication/ClientX509Delegation.cpp:222 msgid "Creating delegation to CREAM delegation failed" msgstr "Att skapa delegering till CREAM delegeringstjänst misslyckades" #: src/hed/libs/communication/ClientX509Delegation.cpp:237 msgid "Getting delegation credential from ARC delegation service" msgstr "Hämtar delegeringsreferens frÃ¥n ARC delegeringstjänst" #: src/hed/libs/communication/ClientX509Delegation.cpp:276 msgid "There is no Delegated X509 token in the response" msgstr "Det finns inget delegerat X509-token i svaret" #: src/hed/libs/communication/ClientX509Delegation.cpp:281 msgid "There is no Format delegated token in the response" msgstr "Det finns inget delegerat Format-token i svaret" #: src/hed/libs/communication/ClientX509Delegation.cpp:289 msgid "There is no Id or X509 token value in the response" msgstr "Det finns inget Id- eller X509-token-värde i svaret" #: src/hed/libs/communication/ClientX509Delegation.cpp:298 #, c-format msgid "" "Get delegated credential from delegation service: \n" " %s" msgstr "" "Hämta delegerad referens frÃ¥n delegeringstjänst:\n" " %s" #: src/hed/libs/compute/Broker.cpp:62 #, c-format msgid "Performing matchmaking against target (%s)." msgstr "Utför matchmaking mot target (%s)." #: src/hed/libs/compute/Broker.cpp:72 #, c-format msgid "Matchmaking, ExecutionTarget: %s matches job description" msgstr "Matchmaking, ExecutionTarget: %s matchar jobbeskrivning" #: src/hed/libs/compute/Broker.cpp:145 #, c-format msgid "" "The CA issuer (%s) of the credentials (%s) is not trusted by the target (%s)." msgstr "" "CA-utfärdaren (%s) för referenserna (%s) är inte betrodd av target (%s)." #: src/hed/libs/compute/Broker.cpp:153 src/hed/libs/compute/Broker.cpp:171 #, c-format msgid "ComputingShareName of ExecutionTarget (%s) is not defined" msgstr "ComputingShareName för ExecutionTarget (%s) är inte definierat" #: src/hed/libs/compute/Broker.cpp:157 src/hed/libs/compute/Broker.cpp:162 #, c-format msgid "ComputingShare (%s) explicitly rejected" msgstr "ComputingShare (%s) explicit avvisad" #: src/hed/libs/compute/Broker.cpp:175 src/hed/libs/compute/Broker.cpp:180 #, c-format msgid "ComputingShare (%s) does not match selected queue (%s)" msgstr "ComputingShare (%s) matchar inte vald kö (%s)" #: src/hed/libs/compute/Broker.cpp:189 #, c-format msgid "" "ProcessingStartTime (%s) specified in job description is inside the targets " "downtime period [ %s - %s ]." msgstr "" "ProcessingStartTime (%s) angiven i jobbeskrivning ligger inom targets " "driftstoppsperiod [ %s - %s ]." #: src/hed/libs/compute/Broker.cpp:194 #, c-format msgid "The downtime of the target (%s) is not published. Keeping target." msgstr "Targets (%s) driftstopp har inte publicerats. BehÃ¥ller target." #: src/hed/libs/compute/Broker.cpp:200 #, c-format msgid "HealthState of ExecutionTarget (%s) is not OK (%s)" msgstr "HealthState för ExecutionTarget (%s) är inte OK (%s)" #: src/hed/libs/compute/Broker.cpp:205 #, c-format msgid "Matchmaking, ExecutionTarget: %s, HealthState is not defined" msgstr "Matchmaking, ExecutionTarget: %s, HealthState är inte definierat" #: src/hed/libs/compute/Broker.cpp:212 #, c-format msgid "" "Matchmaking, Computing endpoint requirement not satisfied. ExecutionTarget: " "%s" msgstr "" "Matchmaking, Beräkningsändpunktsvillkor inte uppfyllt. ExecutionTarget: %s" #: src/hed/libs/compute/Broker.cpp:217 #, c-format msgid "Matchmaking, ExecutionTarget: %s, ImplementationName is not defined" msgstr "" "Matchmaking, ExecutionTarget: %s, ImplementationName är inte definierat" #: src/hed/libs/compute/Broker.cpp:243 #, c-format msgid "" "Matchmaking, %s (%d) is %s than %s (%d) published by the ExecutionTarget." msgstr "Matchmaking, %s (%d) är %s än %s (%d) publicerat av ExecutionTarget." #: src/hed/libs/compute/Broker.cpp:272 #, c-format msgid "" "Matchmaking, The %s scaled %s (%d) is %s than the %s (%d) published by the " "ExecutionTarget." msgstr "" "Matchmaking, %s skalad %s (%d) är %s än %s (%d) publicerat av " "ExecutionTarget." #: src/hed/libs/compute/Broker.cpp:284 #, c-format msgid "Matchmaking, Benchmark %s is not published by the ExecutionTarget." msgstr "Matchmaking, Benchmark %s är inte publicerat av ExecutionTarget." #: src/hed/libs/compute/Broker.cpp:299 #, c-format msgid "" "Matchmaking, MaxTotalCPUTime problem, ExecutionTarget: %d (MaxTotalCPUTime), " "JobDescription: %d (TotalCPUTime)" msgstr "" "Matchmaking, MaxTotalCPUTime-problem, ExecutionTarget: %d (MaxTotalCPUTime), " "JobDescription: %d (TotalCPUTime)" #: src/hed/libs/compute/Broker.cpp:306 #, c-format msgid "" "Matchmaking, MaxCPUTime problem, ExecutionTarget: %d (MaxCPUTime), " "JobDescription: %d (TotalCPUTime/NumberOfSlots)" msgstr "" "Matchmaking, MaxCPUTime-problem, ExecutionTarget: %d (MaxCPUTime), " "JobDescription: %d (TotalCPUTime/NumberOfSlots)" #: src/hed/libs/compute/Broker.cpp:311 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MaxTotalCPUTime or MaxCPUTime not " "defined, assuming no CPU time limit" msgstr "" "Matchmaking, ExecutionTarget: %s, varken MaxTotalCPUTime eller MaxCPUTime " "är definierad, antar ingen CPU-tidsgräns" #: src/hed/libs/compute/Broker.cpp:317 #, c-format msgid "" "Matchmaking, MinCPUTime problem, ExecutionTarget: %d (MinCPUTime), " "JobDescription: %d (TotalCPUTime/NumberOfSlots)" msgstr "" "Matchmaking, MinCPUTime-problem, ExecutionTarget: %d (MinCPUTime), " "JobDescription: %d (TotalCPUTime/NumberOfSlots)" #: src/hed/libs/compute/Broker.cpp:322 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MinCPUTime not defined, assuming no CPU " "time limit" msgstr "" "Matchmaking, ExecutionTarget: %s, MinCPUTime inte definierat, antar ingen " "CPU-tidsgräns" #: src/hed/libs/compute/Broker.cpp:330 #, c-format msgid "" "Matchmaking, MainMemorySize problem, ExecutionTarget: %d (MainMemorySize), " "JobDescription: %d (IndividualPhysicalMemory)" msgstr "" "Matchmaking, MainMemorySize-problem, ExecutionTarget: %d (MainMemorySize), " "JobDescription: %d (IndividualPhysicalMemory)" #: src/hed/libs/compute/Broker.cpp:336 #, c-format msgid "" "Matchmaking, MaxMainMemory problem, ExecutionTarget: %d (MaxMainMemory), " "JobDescription: %d (IndividualPhysicalMemory)" msgstr "" "Matchmaking, MaxMainMemory-problem, ExecutionTarget: %d (MaxMainMemory), " "JobDescription: %d (IndividualPhysicalMemory)" #: src/hed/libs/compute/Broker.cpp:341 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MaxMainMemory and MainMemorySize are not " "defined" msgstr "" "Matchmaking, ExecutionTarget: %s, MaxMainMemory och MainMemorySize är inte " "definierade" #: src/hed/libs/compute/Broker.cpp:349 #, c-format msgid "" "Matchmaking, MaxVirtualMemory problem, ExecutionTarget: %d " "(MaxVirtualMemory), JobDescription: %d (IndividualVirtualMemory)" msgstr "" "Matchmaking, MaxVirtualMemory-problem, ExecutionTarget: %d " "(MaxVirtualMemory), JobDescription: %d (IndividualVirtualMemory)" #: src/hed/libs/compute/Broker.cpp:354 #, c-format msgid "Matchmaking, ExecutionTarget: %s, MaxVirtualMemory is not defined" msgstr "Matchmaking, ExecutionTarget: %s, MaxVirtualMemory är inte definierat" #: src/hed/libs/compute/Broker.cpp:362 #, c-format msgid "" "Matchmaking, Platform problem, ExecutionTarget: %s (Platform) " "JobDescription: %s (Platform)" msgstr "" "Matchmaking, Platform-problem, ExecutionTarget: %s (Platform) " "JobDescription: %s (Platform)" #: src/hed/libs/compute/Broker.cpp:367 #, c-format msgid "Matchmaking, ExecutionTarget: %s, Platform is not defined" msgstr "Matchmaking, ExecutionTarget: %s, Platform är inte definierad" #: src/hed/libs/compute/Broker.cpp:375 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, OperatingSystem requirements not satisfied" msgstr "" "Matchmaking, ExecutionTarget: %s, OperatingSystem-villkor är inte uppfyllt" #: src/hed/libs/compute/Broker.cpp:380 #, c-format msgid "Matchmaking, ExecutionTarget: %s, OperatingSystem is not defined" msgstr "Matchmaking, ExecutionTarget: %s, OperatingSystem är inte definierat" #: src/hed/libs/compute/Broker.cpp:388 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, RunTimeEnvironment requirements not " "satisfied" msgstr "" "Matchmaking, ExecutionTarget: %s, RunTimeEnvironment-villkor är inte " "uppfyllt" #: src/hed/libs/compute/Broker.cpp:393 #, c-format msgid "Matchmaking, ExecutionTarget: %s, ApplicationEnvironments not defined" msgstr "" "Matchmaking, ExecutionTarget: %s, ApplicationEnvironments är inte definierade" #: src/hed/libs/compute/Broker.cpp:402 #, c-format msgid "" "Matchmaking, NetworkInfo demand not fulfilled, ExecutionTarget do not " "support %s, specified in the JobDescription." msgstr "" "Matchmaking, NetworkInfo-begäran inte uppfylld, ExecutionTarget stöder inte " "%s, angiven i jobbeskrivning." #: src/hed/libs/compute/Broker.cpp:406 #, c-format msgid "Matchmaking, ExecutionTarget: %s, NetworkInfo is not defined" msgstr "Matchmaking, ExecutionTarget: %s, NetworkInfo är inte definierad" #: src/hed/libs/compute/Broker.cpp:414 #, c-format msgid "" "Matchmaking, MaxDiskSpace problem, ExecutionTarget: %d MB (MaxDiskSpace); " "JobDescription: %d MB (SessionDiskSpace)" msgstr "" "Matchmaking, MaxDiskSpace-problem, ExecutionTarget: %d MB (MaxDiskSpace); " "JobDescription: %d MB (SessionDiskSpace)" #: src/hed/libs/compute/Broker.cpp:421 #, c-format msgid "" "Matchmaking, WorkingAreaFree problem, ExecutionTarget: %d MB " "(WorkingAreaFree); JobDescription: %d MB (SessionDiskSpace)" msgstr "" "Matchmaking, WorkingAreaFree-problem, ExecutionTarget: %d MB " "(WorkingAreaFree); JobDescription: %d MB (SessionDiskSpace)" #: src/hed/libs/compute/Broker.cpp:427 src/hed/libs/compute/Broker.cpp:448 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MaxDiskSpace and WorkingAreaFree are not " "defined" msgstr "" "Matchmaking, ExecutionTarget: %s, MaxDiskSpace och WorkingAreaFree är inte " "definierade" #: src/hed/libs/compute/Broker.cpp:435 #, c-format msgid "" "Matchmaking, MaxDiskSpace problem, ExecutionTarget: %d MB (MaxDiskSpace); " "JobDescription: %d MB (DiskSpace)" msgstr "" "Matchmaking, MaxDiskSpace-problem, ExecutionTarget: %d MB (MaxDiskSpace); " "JobDescription: %d MB (DiskSpace)" #: src/hed/libs/compute/Broker.cpp:442 #, c-format msgid "" "Matchmaking, WorkingAreaFree problem, ExecutionTarget: %d MB " "(WorkingAreaFree); JobDescription: %d MB (DiskSpace)" msgstr "" "Matchmaking, WorkingAreaFree-problem, ExecutionTarget: %d MB " "(WorkingAreaFree); JobDescription: %d MB (DiskSpace)" #: src/hed/libs/compute/Broker.cpp:456 #, c-format msgid "" "Matchmaking, CacheTotal problem, ExecutionTarget: %d MB (CacheTotal); " "JobDescription: %d MB (CacheDiskSpace)" msgstr "" "Matchmaking, CacheTotal-problem, ExecutionTarget: %d MB (CacheTotal); " "JobDescription: %d MB (CacheDiskSpace)" #: src/hed/libs/compute/Broker.cpp:461 #, c-format msgid "Matchmaking, ExecutionTarget: %s, CacheTotal is not defined" msgstr "Matchmaking, ExecutionTarget: %s, CacheTotal är inte definierat" #: src/hed/libs/compute/Broker.cpp:469 #, c-format msgid "" "Matchmaking, TotalSlots problem, ExecutionTarget: %d (TotalSlots) " "JobDescription: %d (NumberOfProcesses)" msgstr "" "Matchmaking, TotalSlots-problem, ExecutionTarget: %d (TotalSlots) " "JobDescription: %d (NumberOfProcesses)" #: src/hed/libs/compute/Broker.cpp:475 #, c-format msgid "" "Matchmaking, MaxSlotsPerJob problem, ExecutionTarget: %d (MaxSlotsPerJob) " "JobDescription: %d (NumberOfProcesses)" msgstr "" "Matchmaking, MaxSlotsPerJob-problem, ExecutionTarget: %d (MaxSlotsPerJob) " "JobDescription: %d (NumberOfProcesses)" #: src/hed/libs/compute/Broker.cpp:481 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, TotalSlots and MaxSlotsPerJob are not " "defined" msgstr "" "Matchmaking, ExecutionTarget: %s, TotalSlots och MaxSlotsPerJob är inte " "definierade" #: src/hed/libs/compute/Broker.cpp:489 #, c-format msgid "" "Matchmaking, WorkingAreaLifeTime problem, ExecutionTarget: %s " "(WorkingAreaLifeTime) JobDescription: %s (SessionLifeTime)" msgstr "" "Matchmaking, WorkingAreaLifeTime-problem, ExecutionTarget: %s " "(WorkingAreaLifeTime) JobDescription: %s (SessionLifeTime)" #: src/hed/libs/compute/Broker.cpp:494 #, c-format msgid "Matchmaking, ExecutionTarget: %s, WorkingAreaLifeTime is not defined" msgstr "" "Matchmaking, ExecutionTarget: %s, WorkingAreaLifeTime är inte definierad" #: src/hed/libs/compute/Broker.cpp:502 #, c-format msgid "" "Matchmaking, ConnectivityIn problem, ExecutionTarget: %s (ConnectivityIn) " "JobDescription: %s (InBound)" msgstr "" "Matchmaking, ConnectivityIn-problem, ExecutionTarget: %s (ConnectivityIn) " "JobDescription: %s (InBound)" #: src/hed/libs/compute/Broker.cpp:509 #, c-format msgid "" "Matchmaking, ConnectivityOut problem, ExecutionTarget: %s (ConnectivityOut) " "JobDescription: %s (OutBound)" msgstr "" "Matchmaking, ConnectivityOut-problem, ExecutionTarget: %s (ConnectivityOut) " "JobDescription: %s (OutBound)" #: src/hed/libs/compute/Broker.cpp:532 msgid "Unable to sort added jobs. The BrokerPlugin plugin has not been loaded." msgstr "" "Kan inte sortera tillagda jobb. BrokerPlugin-pluginen har inte laddats in." #: src/hed/libs/compute/Broker.cpp:549 msgid "Unable to match target, marking it as not matching. Broker not valid." msgstr "" "Kan inte matcha target, markerar det som inte matchande. Mäklare inte giltig." #: src/hed/libs/compute/Broker.cpp:585 msgid "Unable to sort ExecutionTarget objects - Invalid Broker object." msgstr "Kan inte sortera ExecutionTarget-objekt - Ogiltigt Broker-objekt." #: src/hed/libs/compute/Broker.cpp:609 msgid "" "Unable to register job submission. Can't get JobDescription object from " "Broker, Broker is invalid." msgstr "" "Kan inte registrera jobbinsändning. Kan inte fÃ¥ JobDescription-objekt frÃ¥n " "mäklare, Mäklare är ogiltig." #: src/hed/libs/compute/BrokerPlugin.cpp:89 #, c-format msgid "Broker plugin \"%s\" not found." msgstr "Hittade inte mäklar-plugin \"%s\"." #: src/hed/libs/compute/BrokerPlugin.cpp:96 #, c-format msgid "Unable to load BrokerPlugin (%s)" msgstr "Kunde inte ladda in BrokerPlugin (%s)" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:27 #, c-format msgid "Uniq is replacing service coming from %s with service coming from %s" msgstr "Uniq byter ut tjänst som kommer frÃ¥n %s mot tjänst som kommer frÃ¥n %s" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:31 #, c-format msgid "Uniq is ignoring service coming from %s" msgstr "Uniq ignorerar tjänst som kommer frÃ¥n %s" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:38 #, c-format msgid "Uniq is adding service coming from %s" msgstr "Uniq lägger till tjänst som kommer frÃ¥n %s" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:61 #, c-format msgid "Adding endpoint (%s) to TargetInformationRetriever" msgstr "Lägger till ändpunkt (%s) till TargetInformationRetriever" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:64 #, c-format msgid "Adding endpoint (%s) to ServiceEndpointRetriever" msgstr "Lägger till ändpunkt (%s) till ServiceEndpointRetriever" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:67 #, c-format msgid "" "Adding endpoint (%s) to both ServiceEndpointRetriever and " "TargetInformationRetriever" msgstr "" "Lägger till ändpunkt (%s) till bÃ¥de ServiceEndpointRetriever och " "TargetInformationRetriever" #: src/hed/libs/compute/EntityRetriever.cpp:42 #, c-format msgid "The plugin %s does not support any interfaces, skipping it." msgstr "Pluginen %s stöder inte nÃ¥got gränssnitt, hoppar över den." #: src/hed/libs/compute/EntityRetriever.cpp:47 #, c-format msgid "" "The first supported interface of the plugin %s is an empty string, skipping " "the plugin." msgstr "" "Första gränssnittet som stöds an pluginen %s är en tom sträng, hoppar över " "pluginen." #: src/hed/libs/compute/EntityRetriever.cpp:95 #, c-format msgid "Interface on endpoint (%s) %s." msgstr "Gränssnitt pÃ¥ ändpunkt (%s) %s." #: src/hed/libs/compute/EntityRetriever.cpp:101 #: src/hed/libs/compute/EntityRetriever.cpp:133 #: src/hed/libs/compute/EntityRetriever.cpp:425 #, c-format msgid "Ignoring endpoint (%s), it is already registered in retriever." msgstr "Ignorerar ändpunkt (%s), den är redan registrerad i insamlare." #: src/hed/libs/compute/EntityRetriever.cpp:110 #, c-format msgid "Service Loop: Endpoint %s" msgstr "Tjänsteloop: Ändpunkt %s" #: src/hed/libs/compute/EntityRetriever.cpp:112 #, c-format msgid " This endpoint (%s) is STARTED or SUCCESSFUL" msgstr " Denna ändpunkt (%s) är STARTED eller SUCCESSFUL" #: src/hed/libs/compute/EntityRetriever.cpp:115 #, c-format msgid "" "Suspending querying of endpoint (%s) since the service at the endpoint is " "already being queried, or has been queried." msgstr "" "Suspenderar frÃ¥gandet av ändpunkt (%s) eftersom tjänsten pÃ¥ ändpunkten redan " "hÃ¥ller pÃ¥ att frÃ¥gas eller har frÃ¥gats." #: src/hed/libs/compute/EntityRetriever.cpp:122 #: src/hed/libs/compute/EntityRetriever.cpp:237 #, c-format msgid " Status of endpoint (%s) is %s" msgstr " Status för ändpunkt (%s) är %s" #: src/hed/libs/compute/EntityRetriever.cpp:126 #, c-format msgid "Setting status (STARTED) for endpoint: %s" msgstr "Sätter status (STARTED) för ändpunkt: %s" #: src/hed/libs/compute/EntityRetriever.cpp:145 #, c-format msgid "Starting thread to query the endpoint on %s" msgstr "Startar trÃ¥d för att frÃ¥ga ändpunkten pÃ¥ %s" #: src/hed/libs/compute/EntityRetriever.cpp:147 #: src/hed/libs/compute/EntityRetriever.cpp:289 #, c-format msgid "Failed to start querying the endpoint on %s" msgstr "Misslyckades med att börja frÃ¥ga ändpunkten pÃ¥ %s" #: src/hed/libs/compute/EntityRetriever.cpp:174 #, c-format msgid "Found a registry, will query it recursively: %s" msgstr "Hittade ett register, kommer att frÃ¥ga det rekursivt: %s" #: src/hed/libs/compute/EntityRetriever.cpp:211 #, c-format msgid "Setting status (%s) for endpoint: %s" msgstr "Sätter status (%s) för ändpunkt: %s" #: src/hed/libs/compute/EntityRetriever.cpp:231 msgid "Checking for suspended endpoints which should be started." msgstr "Letar efter suspenderade ändpunkter som ska startas." #: src/hed/libs/compute/EntityRetriever.cpp:241 #, c-format msgid "Found started or successful endpoint (%s)" msgstr "Hittade STARTED eller SUCCESSFUL ändpunkt (%s)" #: src/hed/libs/compute/EntityRetriever.cpp:253 #, c-format msgid "Found suspended endpoint (%s)" msgstr "Hittade suspenderad ändpunkt (%s)" #: src/hed/libs/compute/EntityRetriever.cpp:264 #, c-format msgid "Trying to start suspended endpoint (%s)" msgstr "Försöker starta suspenderad ändpunkt (%s)" #: src/hed/libs/compute/EntityRetriever.cpp:284 #, c-format msgid "" "Starting querying of suspended endpoint (%s) - no other endpoints for this " "service is being queried or has been queried successfully." msgstr "" "Börjar frÃ¥ga suspenderad ändpunkt (%s) - ingen annan ändpunkt för denna " "tjänst hÃ¥ller pÃ¥ att frÃ¥gas eller har blivit frÃ¥gad framgÃ¥ngsrikt." #: src/hed/libs/compute/EntityRetriever.cpp:351 #, c-format msgid "Calling plugin %s to query endpoint on %s" msgstr "Anropar plugin %s för att frÃ¥ga ändpunkt pÃ¥ %s" #: src/hed/libs/compute/EntityRetriever.cpp:373 #, c-format msgid "" "The interface of this endpoint (%s) is unspecified, will try all possible " "plugins" msgstr "" "Gränssnittet för denna ändpunkt (%s) är ej angivet, kommer att prova alla " "möjliga pluginer" #: src/hed/libs/compute/EntityRetriever.cpp:389 #, c-format msgid "Problem loading plugin %s, skipping it." msgstr "Problem med att ladda in plugin %s, hoppar över den." #: src/hed/libs/compute/EntityRetriever.cpp:393 #, c-format msgid "The endpoint (%s) is not supported by this plugin (%s)" msgstr "Ändpunkten (%s) stöds inte av denna plugin (%s)" #: src/hed/libs/compute/EntityRetriever.cpp:414 #, c-format msgid "" "New endpoint is created (%s) from the one with the unspecified interface (%s)" msgstr "Ny ändpunkt skapas (%s) frÃ¥n den med ej angivet gränssnitt (%s)" #: src/hed/libs/compute/EntityRetriever.cpp:432 #, c-format msgid "Starting sub-thread to query the endpoint on %s" msgstr "Startar under-trÃ¥d för att frÃ¥ga ändpunkten pÃ¥ %s" #: src/hed/libs/compute/EntityRetriever.cpp:434 #, c-format msgid "" "Failed to start querying the endpoint on %s (unable to create sub-thread)" msgstr "" "Misslyckades med att börja frÃ¥ga ändpunkten pÃ¥ %s (kunde inte skapa under-" "trÃ¥d)" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:32 #, c-format msgid "Found %s %s (it was loaded already)" msgstr "Hittade %s %s (den hade redan laddats in)" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:41 #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:49 #: src/hed/libs/compute/JobControllerPlugin.cpp:100 #: src/hed/libs/compute/JobControllerPlugin.cpp:109 #: src/hed/libs/compute/SubmitterPlugin.cpp:171 #: src/hed/libs/compute/SubmitterPlugin.cpp:181 #, c-format msgid "" "Unable to locate the \"%s\" plugin. Please refer to installation " "instructions and check if package providing support for \"%s\" plugin is " "installed" msgstr "" "Kunde inte hitta \"%s\"-pluginen. Referera till installationsinstruktionerna " "och kontrollera om paketet som tillhandahÃ¥ller stöd för \"%s\"-pluginen är " "installerat" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:42 #, c-format msgid "%s plugin \"%s\" not found." msgstr "Hittade inte %s-plugin \"%s\"." #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:50 #, c-format msgid "%s %s could not be created." msgstr "%s %s kunde inte skapas." #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:55 #, c-format msgid "Loaded %s %s" msgstr "Laddade in %s %s" #: src/hed/libs/compute/ExecutionTarget.cpp:51 #, c-format msgid "" "Skipping ComputingEndpoint '%s', because it has '%s' interface instead of " "the requested '%s'." msgstr "" "Hoppar över beräkningsändpukt '%s', eftersom den har '%s'-gränssnitt i " "stället för det begärda '%s'." #: src/hed/libs/compute/ExecutionTarget.cpp:132 #, c-format msgid "" "Computing endpoint %s (type %s) added to the list for submission brokering" msgstr "" "Beräkningsändpunkt %s (typ %s) lagd till i listan för insändningsmäkling" #: src/hed/libs/compute/ExecutionTarget.cpp:239 #, c-format msgid "Address: %s" msgstr "Adress: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:240 #, c-format msgid "Place: %s" msgstr "Ort: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:241 #, c-format msgid "Country: %s" msgstr "Land: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:242 #, c-format msgid "Postal code: %s" msgstr "Postnummer: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:243 #, c-format msgid "Latitude: %f" msgstr "Latitud: %f" #: src/hed/libs/compute/ExecutionTarget.cpp:244 #, c-format msgid "Longitude: %f" msgstr "Longitud: %f" #: src/hed/libs/compute/ExecutionTarget.cpp:250 #, c-format msgid "Owner: %s" msgstr "Ägare: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:257 #, c-format msgid "ID: %s" msgstr "ID: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:258 #, c-format msgid "Type: %s" msgstr "Typ: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:263 #, c-format msgid "URL: %s" msgstr "URL: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:264 #, c-format msgid "Interface: %s" msgstr "Gränssnitt: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:266 msgid "Interface versions:" msgstr "Gränssnittsversioner:" #: src/hed/libs/compute/ExecutionTarget.cpp:271 msgid "Interface extensions:" msgstr "Gränssnittstillägg:" #: src/hed/libs/compute/ExecutionTarget.cpp:276 msgid "Capabilities:" msgstr "FörmÃ¥gor:" #: src/hed/libs/compute/ExecutionTarget.cpp:280 #, c-format msgid "Technology: %s" msgstr "Teknologi: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:282 msgid "Supported Profiles:" msgstr "Profiler som stöds:" #: src/hed/libs/compute/ExecutionTarget.cpp:286 #, c-format msgid "Implementor: %s" msgstr "Implementerare: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:287 #, c-format msgid "Implementation name: %s" msgstr "Implementeringsnamn: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:288 #, c-format msgid "Quality level: %s" msgstr "KvalitetsnivÃ¥: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:289 #, c-format msgid "Health state: %s" msgstr "HälsotillstÃ¥nd: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:290 #, c-format msgid "Health state info: %s" msgstr "HälsotillstÃ¥ndsinfo: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:291 #, c-format msgid "Serving state: %s" msgstr "BetjäningstillstÃ¥nd: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:292 #, c-format msgid "Issuer CA: %s" msgstr "Utfärdar-CA: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:294 msgid "Trusted CAs:" msgstr "Betrodda CA:" #: src/hed/libs/compute/ExecutionTarget.cpp:298 #, c-format msgid "Downtime starts: %s" msgstr "Driftstopp börjar: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:299 #, c-format msgid "Downtime ends: %s" msgstr "Driftstopp slutar: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:300 #, c-format msgid "Staging: %s" msgstr "Laddar ned/upp: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:302 msgid "Job descriptions:" msgstr "Jobbeskrivningar:" #: src/hed/libs/compute/ExecutionTarget.cpp:314 #, c-format msgid "Scheme: %s" msgstr "Schema: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:317 #, c-format msgid "Rule: %s" msgstr "Regel: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:329 #, c-format msgid "Mapping queue: %s" msgstr "Mappar till kö: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:330 #, c-format msgid "Max wall-time: %s" msgstr "Största klocktid: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:331 #, c-format msgid "Max total wall-time: %s" msgstr "Största totala klocktid: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:332 #, c-format msgid "Min wall-time: %s" msgstr "Minsta klocktid: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:333 #, c-format msgid "Default wall-time: %s" msgstr "Förvald klocktid: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:334 #, c-format msgid "Max CPU time: %s" msgstr "Största CPU-tid: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:335 #, c-format msgid "Min CPU time: %s" msgstr "Minsta CPU-tid: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:336 #, c-format msgid "Default CPU time: %s" msgstr "Förvald CPU-tid: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:337 #, c-format msgid "Max total jobs: %i" msgstr "Största totalt antal jobb: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:338 #, c-format msgid "Max running jobs: %i" msgstr "Största antal körande jobb: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:339 #, c-format msgid "Max waiting jobs: %i" msgstr "Största antal väntade jobb: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:340 #, c-format msgid "Max pre-LRMS waiting jobs: %i" msgstr "Största antal pre-LRMS-väntade jobb: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:341 #, c-format msgid "Max user running jobs: %i" msgstr "Största antal körande jobb för användaren: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:342 #, c-format msgid "Max slots per job: %i" msgstr "Största antal slottar per jobb: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:343 #, c-format msgid "Max stage in streams: %i" msgstr "Största antal stage-in-strömmar: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:344 #, c-format msgid "Max stage out streams: %i" msgstr "Största antal stage-out-strömmar: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:345 #, c-format msgid "Scheduling policy: %s" msgstr "Schemaläggningsspolicy: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:346 #, c-format msgid "Max memory: %i" msgstr "Största minne: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:347 #, c-format msgid "Max virtual memory: %i" msgstr "Största virtuella minne: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:348 #, c-format msgid "Max disk space: %i" msgstr "Största diskutrymme: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:349 #, c-format msgid "Default Storage Service: %s" msgstr "Förvald lagringstjänst: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:350 msgid "Supports preemption" msgstr "Stöder preemption" #: src/hed/libs/compute/ExecutionTarget.cpp:351 msgid "Doesn't support preemption" msgstr "Stöder ej preemption" #: src/hed/libs/compute/ExecutionTarget.cpp:352 #, c-format msgid "Total jobs: %i" msgstr "Totalt antal jobb: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:353 #, c-format msgid "Running jobs: %i" msgstr "Antal körande jobb: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:354 #, c-format msgid "Local running jobs: %i" msgstr "Antal lokala körande jobb: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:355 #, c-format msgid "Waiting jobs: %i" msgstr "Antal väntade jobb: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:356 #, c-format msgid "Local waiting jobs: %i" msgstr "Antal lokala väntade jobb: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:357 #, c-format msgid "Suspended jobs: %i" msgstr "Antal suspenderade jobb: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:358 #, c-format msgid "Local suspended jobs: %i" msgstr "Antal lokala suspenderade jobb: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:359 #, c-format msgid "Staging jobs: %i" msgstr "Antal jobb som laddar ned/upp: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:360 #, c-format msgid "Pre-LRMS waiting jobs: %i" msgstr "Antal pre-LRMS-väntade jobb: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:361 #, c-format msgid "Estimated average waiting time: %s" msgstr "Förväntad medelväntetid: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:362 #, c-format msgid "Estimated worst waiting time: %s" msgstr "Förväntad värsta väntetid: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:363 #, c-format msgid "Free slots: %i" msgstr "Lediga slottar: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:365 msgid "Free slots grouped according to time limits (limit: free slots):" msgstr "Lediga slottar grupperade enligt tidsgräns (gräns: lediga slottar):" #: src/hed/libs/compute/ExecutionTarget.cpp:368 #, c-format msgid " %s: %i" msgstr " %s: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:369 #, c-format msgid " unspecified: %i" msgstr " ospecificerad: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:372 #, c-format msgid "Used slots: %i" msgstr "Använda slottar: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:373 #, c-format msgid "Requested slots: %i" msgstr "Begärda slottar: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:374 #, c-format msgid "Reservation policy: %s" msgstr "Reserveringspolicy: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:381 #, c-format msgid "Resource manager: %s" msgstr "Resurshanterare: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:383 #, c-format msgid " (%s)" msgstr " (%s)" #: src/hed/libs/compute/ExecutionTarget.cpp:387 #, c-format msgid "Total physical CPUs: %i" msgstr "Totalt antal fysiska CPUer: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:388 #, c-format msgid "Total logical CPUs: %i" msgstr "Totalt antal logiska CPUer: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:389 #, c-format msgid "Total slots: %i" msgstr "Totalt antal slottar: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:390 msgid "Supports advance reservations" msgstr "Stöder framtida reservering" #: src/hed/libs/compute/ExecutionTarget.cpp:391 msgid "Doesn't support advance reservations" msgstr "Stöder ej framtida reservering" #: src/hed/libs/compute/ExecutionTarget.cpp:392 msgid "Supports bulk submission" msgstr "Stöder massinsändning" #: src/hed/libs/compute/ExecutionTarget.cpp:393 msgid "Doesn't support bulk Submission" msgstr "Stöder ej massinsändning" #: src/hed/libs/compute/ExecutionTarget.cpp:394 msgid "Homogeneous resource" msgstr "Homogen resurs" #: src/hed/libs/compute/ExecutionTarget.cpp:395 msgid "Non-homogeneous resource" msgstr "Icke-homogen resurs" #: src/hed/libs/compute/ExecutionTarget.cpp:397 msgid "Network information:" msgstr "Nätverksinformation:" #: src/hed/libs/compute/ExecutionTarget.cpp:402 msgid "Working area is shared among jobs" msgstr "Arbetsutrymme delas mellan jobb" #: src/hed/libs/compute/ExecutionTarget.cpp:403 msgid "Working area is not shared among jobs" msgstr "Arbetsutrymme delas inte mellan jobb" #: src/hed/libs/compute/ExecutionTarget.cpp:404 #, c-format msgid "Working area total size: %i GB" msgstr "Arbetsutrymme total storlek: %i GB" #: src/hed/libs/compute/ExecutionTarget.cpp:405 #, c-format msgid "Working area free size: %i GB" msgstr "Arbetsutrymme fri storlek: %i GB" #: src/hed/libs/compute/ExecutionTarget.cpp:406 #, c-format msgid "Working area life time: %s" msgstr "Arbetsutrymme livstid: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:407 #, c-format msgid "Cache area total size: %i GB" msgstr "Cacheutrymme total storlek: %i GB" #: src/hed/libs/compute/ExecutionTarget.cpp:408 #, c-format msgid "Cache area free size: %i GB" msgstr "Cacheutrymme fri storlek: %i GB" #: src/hed/libs/compute/ExecutionTarget.cpp:414 #, c-format msgid "Platform: %s" msgstr "Plattform: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:415 msgid "Execution environment supports inbound connections" msgstr "Exekveringsmiljö stöder inkommande förbindelser" #: src/hed/libs/compute/ExecutionTarget.cpp:416 msgid "Execution environment does not support inbound connections" msgstr "Exekveringsmiljö stöder inte inkommande förbindelser" #: src/hed/libs/compute/ExecutionTarget.cpp:417 msgid "Execution environment supports outbound connections" msgstr "Exekveringsmiljö stöder utgÃ¥ende förbindelser" #: src/hed/libs/compute/ExecutionTarget.cpp:418 msgid "Execution environment does not support outbound connections" msgstr "Exekveringsmiljö stöder inte utgÃ¥ende förbindelser" #: src/hed/libs/compute/ExecutionTarget.cpp:419 msgid "Execution environment is a virtual machine" msgstr "Exekveringsmiljö är en virtuell maskin" #: src/hed/libs/compute/ExecutionTarget.cpp:420 msgid "Execution environment is a physical machine" msgstr "Exekveringsmiljö är en fysisk maskin" #: src/hed/libs/compute/ExecutionTarget.cpp:421 #, c-format msgid "CPU vendor: %s" msgstr "CPU-tillverkare: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:422 #, c-format msgid "CPU model: %s" msgstr "CPU-modell: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:423 #, c-format msgid "CPU version: %s" msgstr "CPU-version %s" #: src/hed/libs/compute/ExecutionTarget.cpp:424 #, c-format msgid "CPU clock speed: %i" msgstr "CPU-klockhastighet: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:425 #, c-format msgid "Main memory size: %i" msgstr "Huvudminnesstorlek: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:426 #, c-format msgid "OS family: %s" msgstr "OS-familj: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:427 #, c-format msgid "OS name: %s" msgstr "OS-namn: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:428 #, c-format msgid "OS version: %s" msgstr "OS-version %s" #: src/hed/libs/compute/ExecutionTarget.cpp:435 msgid "Computing service:" msgstr "Beräkningstjänst:" #: src/hed/libs/compute/ExecutionTarget.cpp:459 #, c-format msgid "%d Endpoints" msgstr "%d Ändpunkter" #: src/hed/libs/compute/ExecutionTarget.cpp:464 msgid "Endpoint Information:" msgstr "Ändpunktsinformation:" #: src/hed/libs/compute/ExecutionTarget.cpp:476 #, c-format msgid "%d Batch Systems" msgstr "%d batchsystem" #: src/hed/libs/compute/ExecutionTarget.cpp:481 msgid "Batch System Information:" msgstr "Batchsysteminformation:" #: src/hed/libs/compute/ExecutionTarget.cpp:487 msgid "Installed application environments:" msgstr "Installerade programmiljöer:" #: src/hed/libs/compute/ExecutionTarget.cpp:500 #, c-format msgid "%d Shares" msgstr "%d andelar" #: src/hed/libs/compute/ExecutionTarget.cpp:505 msgid "Share Information:" msgstr "Andelsinformation:" #: src/hed/libs/compute/ExecutionTarget.cpp:511 #, c-format msgid "%d mapping policies" msgstr "%d mappningspolicyer" #: src/hed/libs/compute/ExecutionTarget.cpp:515 msgid "Mapping policy:" msgstr "Mappningspolicy:" #: src/hed/libs/compute/ExecutionTarget.cpp:531 #, c-format msgid "Execution Target on Computing Service: %s" msgstr "Exekveringstarget pÃ¥ beräkningstjänst: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:533 #, c-format msgid " Computing endpoint URL: %s" msgstr " Beräkningsändpunkt-URL: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:535 #, c-format msgid " Computing endpoint interface name: %s" msgstr " Beräkningsändpunktsgränssnittsnamn: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:537 #: src/hed/libs/compute/Job.cpp:575 #, c-format msgid " Queue: %s" msgstr " Kö: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:540 #, c-format msgid " Mapping queue: %s" msgstr " Mappar till kö: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:543 #, c-format msgid " Health state: %s" msgstr " HälsotillstÃ¥nd: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:548 msgid "Service information:" msgstr "Tjänsteinformation:" #: src/hed/libs/compute/ExecutionTarget.cpp:553 msgid " Installed application environments:" msgstr " Installerade programmiljöer:" #: src/hed/libs/compute/ExecutionTarget.cpp:560 msgid "Batch system information:" msgstr "Batchsysteminformation:" #: src/hed/libs/compute/ExecutionTarget.cpp:563 msgid "Queue information:" msgstr "Köinformation:" #: src/hed/libs/compute/ExecutionTarget.cpp:570 msgid " Benchmark information:" msgstr " Benchmarkinformation:" #: src/hed/libs/compute/GLUE2.cpp:53 msgid "The Service doesn't advertise its Type." msgstr "Tjänsten tillkännager inte sin typ." #: src/hed/libs/compute/GLUE2.cpp:58 msgid "The ComputingService doesn't advertise its Quality Level." msgstr "Beräkningstjänsten tillkännager inte sin kvalitetsnivÃ¥." #: src/hed/libs/compute/GLUE2.cpp:99 msgid "The ComputingEndpoint has no URL." msgstr "Beräkningsändpunkten har ingen URL." #: src/hed/libs/compute/GLUE2.cpp:104 msgid "The Service advertises no Health State." msgstr "Tjänsten tillkännaget inte sitt hälsotillstÃ¥nd." #: src/hed/libs/compute/GLUE2.cpp:117 msgid "The ComputingEndpoint doesn't advertise its Quality Level." msgstr "Beräkningsändpunkten tillkännager inte sin kvalitetsnivÃ¥." #: src/hed/libs/compute/GLUE2.cpp:128 msgid "The ComputingService doesn't advertise its Interface." msgstr "Beräkningstjänsten tillkännager inte ditt gränssnitt." #: src/hed/libs/compute/GLUE2.cpp:160 msgid "The ComputingEndpoint doesn't advertise its Serving State." msgstr "Beräkningsändpunkten tillkännager inte sitt servicetillstÃ¥nd." #: src/hed/libs/compute/GLUE2.cpp:247 #, c-format msgid "" "The \"FreeSlotsWithDuration\" attribute published by \"%s\" is wrongly " "formatted. Ignoring it." msgstr "" "\"FreeSlotsWithDuration\"-attributet publicerat av \"%s\" är felformatterat. " "Ignorerar det." #: src/hed/libs/compute/GLUE2.cpp:420 #, c-format msgid "" "Couldn't parse benchmark XML:\n" "%s" msgstr "" "Kunde inte tolka benchmark-XML:\n" "%s" #: src/hed/libs/compute/Job.cpp:324 msgid "Unable to detect format of job record." msgstr "Kunde inte detektera format för jobbpost." #: src/hed/libs/compute/Job.cpp:545 #, c-format msgid "Job: %s" msgstr "Jobb: %s" #: src/hed/libs/compute/Job.cpp:547 #, c-format msgid " Name: %s" msgstr " Namn: %s" #: src/hed/libs/compute/Job.cpp:548 #, c-format msgid " State: %s" msgstr " TillstÃ¥nd: %s" #: src/hed/libs/compute/Job.cpp:551 #, c-format msgid " Specific state: %s" msgstr " Specifikt tillstÃ¥nd: %s" #: src/hed/libs/compute/Job.cpp:555 src/hed/libs/compute/Job.cpp:579 #, c-format msgid " Waiting Position: %d" msgstr " Position i kö: %i" #: src/hed/libs/compute/Job.cpp:559 #, c-format msgid " Exit Code: %d" msgstr " Avslutningskod: %d" #: src/hed/libs/compute/Job.cpp:563 #, c-format msgid " Job Error: %s" msgstr " Jobbfel: %s" #: src/hed/libs/compute/Job.cpp:568 #, c-format msgid " Owner: %s" msgstr " Ägare: %s" #: src/hed/libs/compute/Job.cpp:572 #, c-format msgid " Other Messages: %s" msgstr " Övriga meddelanden: %s" #: src/hed/libs/compute/Job.cpp:577 #, c-format msgid " Requested Slots: %d" msgstr " Begärda slottar: %d" #: src/hed/libs/compute/Job.cpp:582 #, c-format msgid " Stdin: %s" msgstr " Stdin: %s" #: src/hed/libs/compute/Job.cpp:584 #, c-format msgid " Stdout: %s" msgstr " Stdout: %s" #: src/hed/libs/compute/Job.cpp:586 #, c-format msgid " Stderr: %s" msgstr " Stderr: %s" #: src/hed/libs/compute/Job.cpp:588 #, c-format msgid " Computing Service Log Directory: %s" msgstr " Beräkningstjänstens loggkatalog: %s" #: src/hed/libs/compute/Job.cpp:591 #, c-format msgid " Submitted: %s" msgstr " Insänt: %s" #: src/hed/libs/compute/Job.cpp:594 #, c-format msgid " End Time: %s" msgstr " Avslutningstid: %s" #: src/hed/libs/compute/Job.cpp:597 #, c-format msgid " Submitted from: %s" msgstr " Insänt frÃ¥n %s" #: src/hed/libs/compute/Job.cpp:600 #, c-format msgid " Submitting client: %s" msgstr " Insänt med klient: %s" #: src/hed/libs/compute/Job.cpp:603 #, c-format msgid " Requested CPU Time: %s" msgstr " Begärd CPU-tid: %s" #: src/hed/libs/compute/Job.cpp:608 #, c-format msgid " Used CPU Time: %s (%s per slot)" msgstr " Använd CPU-tid: %s (%s per slot)" #: src/hed/libs/compute/Job.cpp:612 #, c-format msgid " Used CPU Time: %s" msgstr " Använd CPU-tid: %s" #: src/hed/libs/compute/Job.cpp:618 #, c-format msgid " Used Wall Time: %s (%s per slot)" msgstr " Använd klocktid: %s (%s per slot)" #: src/hed/libs/compute/Job.cpp:622 #, c-format msgid " Used Wall Time: %s" msgstr " Använd klocktid: %s" #: src/hed/libs/compute/Job.cpp:627 #, c-format msgid " Used Memory: %d" msgstr " Använt minne: %i" #: src/hed/libs/compute/Job.cpp:631 #, c-format msgid " Results were deleted: %s" msgstr " Resultaten har raderats: %s" #: src/hed/libs/compute/Job.cpp:632 #, c-format msgid " Results must be retrieved before: %s" msgstr " Resultaten mÃ¥ste hämtas innan: %s" #: src/hed/libs/compute/Job.cpp:636 #, c-format msgid " Proxy valid until: %s" msgstr " Proxy giltig till: %s" #: src/hed/libs/compute/Job.cpp:640 #, c-format msgid " Entry valid from: %s" msgstr " Post giltig frÃ¥n: %s" #: src/hed/libs/compute/Job.cpp:643 #, c-format msgid " Entry valid for: %s" msgstr " Post giltig i: %s" #: src/hed/libs/compute/Job.cpp:647 msgid " Old job IDs:" msgstr " Gamla jobb-id:" #: src/hed/libs/compute/Job.cpp:655 #, c-format msgid " ID on service: %s" msgstr " ID pÃ¥ tjänst: %s" #: src/hed/libs/compute/Job.cpp:656 #, c-format msgid " Service information URL: %s (%s)" msgstr " Tjänsteinformation-URL: %s (%s)" #: src/hed/libs/compute/Job.cpp:657 #, c-format msgid " Job status URL: %s (%s)" msgstr " Jobbstatus-URL: %s (%s)" #: src/hed/libs/compute/Job.cpp:658 #, c-format msgid " Job management URL: %s (%s)" msgstr " Jobbhanterings-URL: %s (%s)" #: src/hed/libs/compute/Job.cpp:659 #, c-format msgid " Stagein directory URL: %s" msgstr " Stage-in-katalog-URL: %s" #: src/hed/libs/compute/Job.cpp:660 #, c-format msgid " Stageout directory URL: %s" msgstr " Stage-out-katalog-URL: %s" #: src/hed/libs/compute/Job.cpp:661 #, c-format msgid " Session directory URL: %s" msgstr " Sessionskatalog-URL: %s" #: src/hed/libs/compute/Job.cpp:663 msgid " Delegation IDs:" msgstr " Delegerings-ID:" #: src/hed/libs/compute/Job.cpp:845 #, c-format msgid "Unable to handle job (%s), no interface specified." msgstr "Kunde inte hantera jobb (%s), inget gränssnitt angivet." #: src/hed/libs/compute/Job.cpp:850 #, c-format msgid "" "Unable to handle job (%s), no plugin associated with the specified interface " "(%s)" msgstr "" "Kunde inte hantera jobb (%s), ingen plugin associerad med det angivna " "gränssnittet (%s)" #: src/hed/libs/compute/Job.cpp:872 #, c-format msgid "Invalid download destination path specified (%s)" msgstr "Ogiltig nedladdningsdestinationssökväg angiven (%s)" #: src/hed/libs/compute/Job.cpp:877 #, c-format msgid "" "Unable to download job (%s), no JobControllerPlugin plugin was set to handle " "the job." msgstr "" "Kunde inte ladda ner jobb (%s), ingen JobControllerPlugin-plugin har satts " "att hantera jobbet." #: src/hed/libs/compute/Job.cpp:881 #, c-format msgid "Downloading job: %s" msgstr "Laddar ner jobb: %s" #: src/hed/libs/compute/Job.cpp:885 #, c-format msgid "" "Cant retrieve job files for job (%s) - unable to determine URL of stage out " "directory" msgstr "" "Kan inte hämta jobbfiler för jobb (%s) - kan inte bestämma URL till stage-" "out-katalog" #: src/hed/libs/compute/Job.cpp:890 #, c-format msgid "Invalid stage out path specified (%s)" msgstr "Ogiltig stage-out-sökväg angiven (%s)" #: src/hed/libs/compute/Job.cpp:897 #, c-format msgid "%s directory exist! Skipping job." msgstr "%s-katalog existerar! Hoppar över jobb." #: src/hed/libs/compute/Job.cpp:903 #, c-format msgid "Unable to retrieve list of job files to download for job %s" msgstr "Kunde inte hämta lista med jobbfiler att ladda ned för jobb %s" #: src/hed/libs/compute/Job.cpp:908 #, c-format msgid "No files to retrieve for job %s" msgstr "Inga filer att hämta för jobb %s" #: src/hed/libs/compute/Job.cpp:914 #, c-format msgid "Failed to create directory %s! Skipping job." msgstr "Misslyckades med att skapa katalog %s! Hoppar över jobb." #: src/hed/libs/compute/Job.cpp:927 #, c-format msgid "Failed downloading %s to %s, destination already exist" msgstr "" "Misslyckades med att ladda ned %s till %s, destinationen existerar redan" #: src/hed/libs/compute/Job.cpp:933 #, c-format msgid "Failed downloading %s to %s, unable to remove existing destination" msgstr "" "Misslyckades med att ladda ned %s till %s, kunde inte ta bort existerande " "destination" #: src/hed/libs/compute/Job.cpp:939 #, c-format msgid "Failed downloading %s to %s" msgstr "Misslyckades med att ladda ned %s till %s" #: src/hed/libs/compute/Job.cpp:952 #, c-format msgid "Unable to initialize handler for %s" msgstr "Misslyckades med att initiera hanterare för %s" #: src/hed/libs/compute/Job.cpp:957 #, c-format msgid "Unable to list files at %s" msgstr "Kunde inte lista filer pÃ¥ %s" #: src/hed/libs/compute/Job.cpp:999 msgid "Now copying (from -> to)" msgstr "Kopierar nu (frÃ¥n -> till)" #: src/hed/libs/compute/Job.cpp:1000 #, c-format msgid " %s -> %s" msgstr " %s -> %s" #: src/hed/libs/compute/Job.cpp:1015 #, c-format msgid "Unable to initialise connection to source: %s" msgstr "Kunde inte initiera förbindelse till källa %s" #: src/hed/libs/compute/Job.cpp:1026 #, c-format msgid "Unable to initialise connection to destination: %s" msgstr "Kunde inte initiera förbindelse till destination: %s" #: src/hed/libs/compute/Job.cpp:1045 #, c-format msgid "File download failed: %s" msgstr "Filnedladdning misslyckades: %s" #: src/hed/libs/compute/Job.cpp:1084 src/hed/libs/compute/Job.cpp:1113 #: src/hed/libs/compute/Job.cpp:1145 src/hed/libs/compute/Job.cpp:1178 #, c-format msgid "Waiting for lock on file %s" msgstr "Väntar pÃ¥ lÃ¥s pÃ¥ fil %s" #: src/hed/libs/compute/JobControllerPlugin.cpp:101 #, c-format msgid "JobControllerPlugin plugin \"%s\" not found." msgstr "Hittade inte JobControllerPlugin-plugin \"%s\"." #: src/hed/libs/compute/JobControllerPlugin.cpp:110 #, c-format msgid "JobControllerPlugin %s could not be created" msgstr "JobControllerPlugin %s kunde inte skapas" #: src/hed/libs/compute/JobControllerPlugin.cpp:115 #, c-format msgid "Loaded JobControllerPlugin %s" msgstr "Laddade in JobControllerPlugin %s" #: src/hed/libs/compute/JobDescription.cpp:22 #, c-format msgid ": %d" msgstr ": %d" #: src/hed/libs/compute/JobDescription.cpp:24 #, c-format msgid ": %s" msgstr ": %s" #: src/hed/libs/compute/JobDescription.cpp:138 msgid " --- DRY RUN --- " msgstr " --- TORRKÖRNING --- " #: src/hed/libs/compute/JobDescription.cpp:148 #, c-format msgid " Annotation: %s" msgstr " Annotering: %s" #: src/hed/libs/compute/JobDescription.cpp:154 #, c-format msgid " Old activity ID: %s" msgstr " Gammalt aktivitets-ID: %s" #: src/hed/libs/compute/JobDescription.cpp:160 #, c-format msgid " Argument: %s" msgstr " Argument: %s" #: src/hed/libs/compute/JobDescription.cpp:171 #, c-format msgid " RemoteLogging (optional): %s (%s)" msgstr " RemoteLogging (valfritt): %s (%s)" #: src/hed/libs/compute/JobDescription.cpp:174 #, c-format msgid " RemoteLogging: %s (%s)" msgstr " RemoteLogging: %s (%s)" #: src/hed/libs/compute/JobDescription.cpp:182 #, c-format msgid " Environment.name: %s" msgstr " Environment.name: %s" #: src/hed/libs/compute/JobDescription.cpp:183 #, c-format msgid " Environment: %s" msgstr " Environment: %s" #: src/hed/libs/compute/JobDescription.cpp:196 #, c-format msgid " PreExecutable.Argument: %s" msgstr " PreExecutable.Argument: %s" #: src/hed/libs/compute/JobDescription.cpp:199 #: src/hed/libs/compute/JobDescription.cpp:217 #, c-format msgid " Exit code for successful execution: %d" msgstr " Avslutningskod för framgÃ¥ngsrik exekvering: %d" #: src/hed/libs/compute/JobDescription.cpp:202 #: src/hed/libs/compute/JobDescription.cpp:220 msgid " No exit code for successful execution specified." msgstr " Ingen avslutningkod för framgÃ¥ngsrik exekvering angiven." #: src/hed/libs/compute/JobDescription.cpp:214 #, c-format msgid " PostExecutable.Argument: %s" msgstr " PostExecutable.Argument: %s" #: src/hed/libs/compute/JobDescription.cpp:230 #, c-format msgid " Access control: %s" msgstr " Ã…tkomstkontroll: %s" #: src/hed/libs/compute/JobDescription.cpp:234 #, c-format msgid " Processing start time: %s" msgstr " Processering starttid: %s" #: src/hed/libs/compute/JobDescription.cpp:237 msgid " Notify:" msgstr " Avisera:" #: src/hed/libs/compute/JobDescription.cpp:251 #, c-format msgid " Credential service: %s" msgstr " Referenstjänst: %s" #: src/hed/libs/compute/JobDescription.cpp:261 msgid " Operating system requirements:" msgstr " Operativsystem-villkor:" #: src/hed/libs/compute/JobDescription.cpp:279 msgid " Computing endpoint requirements:" msgstr " Beräkningändpunkt-villkor:" #: src/hed/libs/compute/JobDescription.cpp:292 msgid " Node access: inbound" msgstr " Nod-Ã¥tkomst: inkommande" #: src/hed/libs/compute/JobDescription.cpp:295 msgid " Node access: outbound" msgstr " Nod-Ã¥tkomst: utgÃ¥ende" #: src/hed/libs/compute/JobDescription.cpp:298 msgid " Node access: inbound and outbound" msgstr " Nod-Ã¥tkomst: inkommande och utgÃ¥ende" #: src/hed/libs/compute/JobDescription.cpp:308 msgid " Job requires exclusive execution" msgstr " Jobb kräver exklusiv exekvering" #: src/hed/libs/compute/JobDescription.cpp:311 msgid " Job does not require exclusive execution" msgstr " Jobb kräver inte exklusiv exekvering" #: src/hed/libs/compute/JobDescription.cpp:316 msgid " Run time environment requirements:" msgstr " Runtime-miljö-villkor:" #: src/hed/libs/compute/JobDescription.cpp:328 msgid " Inputfile element:" msgstr " Indatafil-element:" #: src/hed/libs/compute/JobDescription.cpp:329 #: src/hed/libs/compute/JobDescription.cpp:351 #, c-format msgid " Name: %s" msgstr " Namn: %s" #: src/hed/libs/compute/JobDescription.cpp:331 msgid " Is executable: true" msgstr " Är exekverbar: sant" #: src/hed/libs/compute/JobDescription.cpp:335 #, c-format msgid " Sources: %s" msgstr " Sources: %s" #: src/hed/libs/compute/JobDescription.cpp:337 #, c-format msgid " Sources.DelegationID: %s" msgstr " Sources.DelegationID: %s" #: src/hed/libs/compute/JobDescription.cpp:341 #, c-format msgid " Sources.Options: %s = %s" msgstr " Sources.Options: %s = %s" #: src/hed/libs/compute/JobDescription.cpp:350 msgid " Outputfile element:" msgstr " Utdatafil-element:" #: src/hed/libs/compute/JobDescription.cpp:354 #, c-format msgid " Targets: %s" msgstr " Targets: %s" #: src/hed/libs/compute/JobDescription.cpp:356 #, c-format msgid " Targets.DelegationID: %s" msgstr " Targets.DelegationID: %s" #: src/hed/libs/compute/JobDescription.cpp:360 #, c-format msgid " Targets.Options: %s = %s" msgstr " Targets.Options: %s = %s" #: src/hed/libs/compute/JobDescription.cpp:367 #, c-format msgid " DelegationID element: %s" msgstr " Delegerings-ID-element: %s" #: src/hed/libs/compute/JobDescription.cpp:374 #, c-format msgid " Other attributes: [%s], %s" msgstr " Övriga attribut: [%s], %s" #: src/hed/libs/compute/JobDescription.cpp:440 msgid "Empty job description source string" msgstr "Tom jobbeskrivnings-källsträng" #: src/hed/libs/compute/JobDescription.cpp:473 msgid "No job description parsers available" msgstr "Inga jobbeskrivningstolkar tillgänglig" #: src/hed/libs/compute/JobDescription.cpp:475 #, c-format msgid "" "No job description parsers suitable for handling '%s' language are available" msgstr "" "Inga jobbeskrivningstolkar lämpliga att hantera '%s'-sprÃ¥ket är tillgängliga" #: src/hed/libs/compute/JobDescription.cpp:483 #, c-format msgid "%s parsing error" msgstr "%s-tolkningsfel" #: src/hed/libs/compute/JobDescription.cpp:499 msgid "No job description parser was able to interpret job description" msgstr "Ingen jobbeskrivningstolk kunde tolka jobbeskrivning" #: src/hed/libs/compute/JobDescription.cpp:509 msgid "" "Job description language is not specified, unable to output description." msgstr "JobbeskrivningssprÃ¥k är inte angivet, kan inte skriva ut beskrivning." #: src/hed/libs/compute/JobDescription.cpp:521 #, c-format msgid "Generating %s job description output" msgstr "Genererar %s-jobbeskrivning" #: src/hed/libs/compute/JobDescription.cpp:537 #, c-format msgid "Language (%s) not recognized by any job description parsers." msgstr "SprÃ¥k (%s) känns inte igen av nÃ¥gon jobbeskrivningstolk." #: src/hed/libs/compute/JobDescription.cpp:550 #, c-format msgid "Two input files have identical name '%s'." msgstr "TvÃ¥ indatafiler har identiska namn '%s'." #: src/hed/libs/compute/JobDescription.cpp:569 #: src/hed/libs/compute/JobDescription.cpp:582 #, c-format msgid "Cannot stat local input file '%s'" msgstr "Kan inte göra stat pÃ¥ lokal indatafil '%s'" #: src/hed/libs/compute/JobDescription.cpp:602 #, c-format msgid "Cannot find local input file '%s' (%s)" msgstr "Kan inte hitta lokal indatafil '%s' (%s)" #: src/hed/libs/compute/JobDescription.cpp:644 msgid "Unable to select runtime environment" msgstr "Kan inte välja runtime-miljö" #: src/hed/libs/compute/JobDescription.cpp:651 msgid "Unable to select middleware" msgstr "Kan inte välja middleware" #: src/hed/libs/compute/JobDescription.cpp:658 msgid "Unable to select operating system." msgstr "Kan inte välja operativsystem." #: src/hed/libs/compute/JobDescription.cpp:677 #, c-format msgid "No test-job with ID %d found." msgstr "Hittade inget test-jobb med ID %d." #: src/hed/libs/compute/JobDescription.cpp:689 #, c-format msgid "Test was defined with ID %d, but some error occurred during parsing it." msgstr "Test definierades med ID %d, men nÃ¥got fel uppstod när det tolkades." #: src/hed/libs/compute/JobDescription.cpp:693 #, c-format msgid "No jobdescription resulted at %d test" msgstr "Ingen jobbeskrivning resulterade vid %d test" #: src/hed/libs/compute/JobDescriptionParserPlugin.cpp:52 #, c-format msgid "JobDescriptionParserPlugin plugin \"%s\" not found." msgstr "Hittade inte JobDescriptionParserPlugin-plugin \"%s\"." #: src/hed/libs/compute/JobDescriptionParserPlugin.cpp:59 #, c-format msgid "JobDescriptionParserPlugin %s could not be created" msgstr "JobDescriptionParserPlugin %s kunde inte skapas" #: src/hed/libs/compute/JobDescriptionParserPlugin.cpp:64 #, c-format msgid "Loaded JobDescriptionParserPlugin %s" msgstr "Laddar in JobDescriptionParserPlugin %s" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:328 msgid "Unable to create temporary directory" msgstr "Kunde inte skapa temporär katalog" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:336 #, c-format msgid "Unable to create data base environment (%s)" msgstr "Kunde inte skapa databasmiljö (%s)" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:346 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:350 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:354 #, c-format msgid "Unable to set duplicate flags for secondary key DB (%s)" msgstr "Kunde inte sätta duplicate-flagga för sekundär-nyckel-DB (%s)" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:360 #, c-format msgid "Unable to create job database (%s)" msgstr "Kunde inte skapa jobbdatabas (%s)" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:364 #, c-format msgid "Unable to create DB for secondary name keys (%s)" msgstr "Kunde inte skapa databas för sekundära namn-nycklar (%s)" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:368 #, c-format msgid "Unable to create DB for secondary endpoint keys (%s)" msgstr "Kunde inte skapa databas för sekundära ändpunkt-nycklar (%s)" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:372 #, c-format msgid "Unable to create DB for secondary service info keys (%s)" msgstr "Kunde inte skapa databas för sekundära tjänste-info-nycklar (%s)" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:377 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:381 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:385 #, c-format msgid "Unable to associate secondary DB with primary DB (%s)" msgstr "Kunde inte associera sekundär DB med primär DB (%s)" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:388 #, c-format msgid "Job database created successfully (%s)" msgstr "Jobbdatabas skapades framgÃ¥ngsrikt (%s)" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:430 #, c-format msgid "Error from BDB: %s: %s" msgstr "Fel frÃ¥n BDB: %s: %s" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:433 #, c-format msgid "Error from BDB: %s" msgstr "Fel frÃ¥n BDB: %s" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:453 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:221 #: src/hed/libs/compute/JobInformationStorageXML.cpp:27 #, c-format msgid "" "Job list file cannot be created: The parent directory (%s) doesn't exist." msgstr "Jobblistfil kan inte skapas: Föräldrakatalogen (%s) existerar inte." #: src/hed/libs/compute/JobInformationStorageBDB.cpp:457 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:225 #: src/hed/libs/compute/JobInformationStorageXML.cpp:31 #, c-format msgid "Job list file cannot be created: %s is not a directory" msgstr "Jobblistfil kan inte skapas: %s är inte en katalog" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:464 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:232 #: src/hed/libs/compute/JobInformationStorageXML.cpp:38 #, c-format msgid "Job list file (%s) is not a regular file" msgstr "Jobblistfil (%s) är inte en vanlig fil" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:502 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:561 #, c-format msgid "Unable to write key/value pair to job database (%s): Key \"%s\"" msgstr "" "Kunde inte skriva nyckel/värde-par till jobbdatabas (%s): Nyckel \"%s\"" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:728 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:622 #: src/hed/libs/compute/JobInformationStorageXML.cpp:137 #, c-format msgid "Unable to truncate job database (%s)" msgstr "Kunde inte trunkera jobbdatabas (%s)" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:759 msgid "" "ENOENT: The file or directory does not exist, Or a nonexistent re_source " "file was specified." msgstr "" "ENOENT: Filen eller katalogen existerar inte, eller en icke-existerande " "re_source-fil angavs." #: src/hed/libs/compute/JobInformationStorageBDB.cpp:762 msgid "" "DB_OLD_VERSION: The database cannot be opened without being first upgraded." msgstr "DB_OLD_VERSION: Databasen kan inte öppnas utan att först uppgraderas." #: src/hed/libs/compute/JobInformationStorageBDB.cpp:765 msgid "EEXIST: DB_CREATE and DB_EXCL were specified and the database exists." msgstr "EEXIST: DB_CREATE och DB_EXCL angavs och databasen existerar." #: src/hed/libs/compute/JobInformationStorageBDB.cpp:767 msgid "EINVAL" msgstr "EINVAL" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:770 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:655 #, c-format msgid "Unable to determine error (%d)" msgstr "Kunde inte bestämma fel (%d)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:120 #, c-format msgid "Unable to create data base (%s)" msgstr "Kunde inte skapa databas (%s)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:128 #, c-format msgid "Unable to create jobs table in data base (%s)" msgstr "Kunde inte skapa jobs-tabell i databas (%s)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:137 #, c-format msgid "Unable to create jobs_new table in data base (%s)" msgstr "Kunde inte skapa jobs_new-tabell i databas (%s)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:143 #, c-format msgid "Unable to transfer from jobs to jobs_new in data base (%s)" msgstr "Kunde inte överföra frÃ¥n jobs till jobs_new i databas (%s)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:149 #, c-format msgid "Unable to drop jobs in data base (%s)" msgstr "Kunde inte ta bort jobs-tabell i databas (%s)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:155 #, c-format msgid "Unable to rename jobs table in data base (%s)" msgstr "Kunde inte byta namn pÃ¥ jobs-tabell i databas (%s)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:165 #, c-format msgid "Unable to create index for jobs table in data base (%s)" msgstr "Kunde inte skapa index för jobs-tabell i databas (%s)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:173 #, c-format msgid "Failed checking database (%s)" msgstr "Misslyckades med att kontrollera databas (%s)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:175 #, c-format msgid "Job database connection established successfully (%s)" msgstr "Jobbdatabasförbindelse etablerad framgÃ¥ngsrikt (%s)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:197 #, c-format msgid "Error from SQLite: %s: %s" msgstr "Fel frÃ¥n SQLite: %s: %s" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:200 #, c-format msgid "Error from SQLite: %s" msgstr "Fel frÃ¥n SQLite: %s" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:362 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:369 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:376 #, c-format msgid "Unable to write records into job database (%s): Id \"%s\"" msgstr "Kunde inte skriva poster till jobbdatabas (%s): Id \"%s\"" #: src/hed/libs/compute/JobInformationStorageXML.cpp:51 #: src/hed/libs/compute/JobInformationStorageXML.cpp:223 #: src/hed/libs/compute/JobInformationStorageXML.cpp:264 #, c-format msgid "Waiting for lock on job list file %s" msgstr "Väntat pÃ¥ lÃ¥s pÃ¥ jobblistfil %s" #: src/hed/libs/compute/JobInformationStorageXML.cpp:162 #, c-format msgid "Will remove %s on service %s." msgstr "Kommer att ta bort %s pÃ¥ tjänsten %s." #: src/hed/libs/compute/JobSupervisor.cpp:36 msgid "Ignoring job, the job ID is empty" msgstr "Ignorerar jobb, jobb-id är tomt" #: src/hed/libs/compute/JobSupervisor.cpp:41 #, c-format msgid "Ignoring job (%s), the management interface name is unknown" msgstr "Ignorerar jobb (%s), hanteringsgränssnittsnamnet är okänt" #: src/hed/libs/compute/JobSupervisor.cpp:46 #, c-format msgid "Ignoring job (%s), the job management URL is unknown" msgstr "Ignorerar jobb (%s), jobbhanterings-URL är okänd" #: src/hed/libs/compute/JobSupervisor.cpp:51 #, c-format msgid "Ignoring job (%s), the status interface name is unknown" msgstr "Ignorerar jobb (%s), statusgränssnittsnamnet är okänt" #: src/hed/libs/compute/JobSupervisor.cpp:56 #, c-format msgid "Ignoring job (%s), the job status URL is unknown" msgstr "Ignorerar jobb (%s), jobbstatus-URL är okänd" #: src/hed/libs/compute/JobSupervisor.cpp:65 #, c-format msgid "Ignoring job (%s), unable to load JobControllerPlugin for %s" msgstr "Ignorerar jobb (%s), kunde inte ladda in JobControllerPlugin gör %s" #: src/hed/libs/compute/JobSupervisor.cpp:72 #, c-format msgid "" "Ignoring job (%s), already tried and were unable to load JobControllerPlugin" msgstr "" "Ignorerar jobb (%s), redan försökt och kunde inte ladda in " "JobControllerPlugin" #: src/hed/libs/compute/JobSupervisor.cpp:385 #, c-format msgid "Job resubmission failed: Unable to load broker (%s)" msgstr "JobbÃ¥terinsändning misslyckades: Kunde inte ladda in mäklare (%s)" #: src/hed/libs/compute/JobSupervisor.cpp:400 msgid "Job resubmission aborted because no resource returned any information" msgstr "" "JobbÃ¥terinsändning avbröts eftersom ingen resurs returnerade nÃ¥gon " "information" #: src/hed/libs/compute/JobSupervisor.cpp:421 #, c-format msgid "Unable to resubmit job (%s), unable to parse obtained job description" msgstr "" "Kunde inte Ã¥terinsända jobb (%s), kunde inte tolka erhÃ¥llen jobbeskrivning" #: src/hed/libs/compute/JobSupervisor.cpp:443 #, c-format msgid "" "Unable to resubmit job (%s), target information retrieval failed for target: " "%s" msgstr "" "Kunde inte Ã¥terinsända jobb (%s), targetinformationsinhämtning misslyckades " "för target: %s" #: src/hed/libs/compute/JobSupervisor.cpp:469 #, c-format msgid "Unable to resubmit job (%s), no targets applicable for submission" msgstr "" "Kunde inte Ã¥terinsända jobb (%s), inget target tillämpbart för insändning" #: src/hed/libs/compute/JobSupervisor.cpp:504 #, c-format msgid "" "Unable to migrate job (%s), job description could not be retrieved remotely" msgstr "Kunde inte migrera jobb (%s), jobbeskrivning kunde inte hämtas utifrÃ¥n" #: src/hed/libs/compute/JobSupervisor.cpp:524 msgid "Job migration aborted, no resource returned any information" msgstr "Jobbmigrering avbröts, ingen resurs returnerade nÃ¥gon information" #: src/hed/libs/compute/JobSupervisor.cpp:536 #, c-format msgid "Job migration aborted, unable to load broker (%s)" msgstr "Jobbmigrering avbröts, kunde inte ladda in mäklare (%s)" #: src/hed/libs/compute/JobSupervisor.cpp:552 #, c-format msgid "Unable to migrate job (%s), unable to parse obtained job description" msgstr "Kunde inte migrera jobb (%s), kunde inte tolka erhÃ¥llen jobbeskrivning" #: src/hed/libs/compute/JobSupervisor.cpp:573 #, c-format msgid "Unable to load submission plugin for %s interface" msgstr "Kunde inte ladda in insändningsplugin för %s-gränssnitt" #: src/hed/libs/compute/JobSupervisor.cpp:583 #, c-format msgid "Job migration failed for job (%s), no applicable targets" msgstr "Jobbmigrering misslyckades för jobb (%s), inga tillämpbara target" #: src/hed/libs/compute/Software.cpp:65 src/hed/libs/compute/Software.cpp:94 #: src/hed/libs/compute/Software.cpp:113 #, c-format msgid "%s > %s => false" msgstr "%s > %s => falskt" #: src/hed/libs/compute/Software.cpp:70 src/hed/libs/compute/Software.cpp:83 #: src/hed/libs/compute/Software.cpp:107 #, c-format msgid "%s > %s => true" msgstr "%s > %s => sant" #: src/hed/libs/compute/Software.cpp:90 src/hed/libs/compute/Software.cpp:102 #, c-format msgid "%s > %s => false: %s contains non numbers in the version part." msgstr "%s > %s => falskt: %s innehÃ¥ller icke-nummer i versionsdelen." #: src/hed/libs/compute/Software.cpp:199 src/hed/libs/compute/Software.cpp:210 #, c-format msgid "Requirement \"%s %s\" NOT satisfied." msgstr "Villkor \"%s %s\" INTE uppfyllt." #: src/hed/libs/compute/Software.cpp:205 #, c-format msgid "Requirement \"%s %s\" satisfied." msgstr "Villkor \"%s %s\" uppfyllt." #: src/hed/libs/compute/Software.cpp:214 #, c-format msgid "Requirement \"%s %s\" satisfied by \"%s\"." msgstr "Villkor \"%s %s\" uppfyllt av \"%s\"." #: src/hed/libs/compute/Software.cpp:219 msgid "All requirements satisfied." msgstr "Alla villkor uppfyllda." #: src/hed/libs/compute/Submitter.cpp:83 #, c-format msgid "Trying to submit directly to endpoint (%s)" msgstr "Försöker sända in direkt till ändpunkt (%s)" #: src/hed/libs/compute/Submitter.cpp:88 #, c-format msgid "Interface (%s) specified, submitting only to that interface" msgstr "Gränssnitt (%s) angivet, sänder endast in till detta gränssnitt" #: src/hed/libs/compute/Submitter.cpp:106 msgid "Trying all available interfaces" msgstr "Provar alla tillgängliga gränssnitt" #: src/hed/libs/compute/Submitter.cpp:112 #, c-format msgid "Trying to submit endpoint (%s) using interface (%s) with plugin (%s)." msgstr "" "Försöker sända in till ändpunkt (%s) med gränssnitt (%s) med plugin (%s)." #: src/hed/libs/compute/Submitter.cpp:116 #, c-format msgid "" "Unable to load plugin (%s) for interface (%s) when trying to submit job " "description." msgstr "" "Kunde inte ladda in plugin (%s) för gränssnitt (%s) vid försök att sända in " "jobbeskrivning." #: src/hed/libs/compute/Submitter.cpp:130 #, c-format msgid "No more interfaces to try for endpoint %s." msgstr "Inga fler gränssnitt att prova för ändpunkt %s." #: src/hed/libs/compute/Submitter.cpp:336 #, c-format msgid "Target %s does not match requested interface(s)." msgstr "Target %s matchar inte begär(t/da) gränssnitt." #: src/hed/libs/compute/SubmitterPlugin.cpp:64 msgid "No stagein URL is provided" msgstr "Ingen stage-in-URL tillhandahÃ¥llen" #: src/hed/libs/compute/SubmitterPlugin.cpp:83 #, c-format msgid "Failed uploading file %s to %s: %s" msgstr "Misslyckades med att ladda upp fil %s till %s: %s" #: src/hed/libs/compute/SubmitterPlugin.cpp:116 #, c-format msgid "Trying to migrate to %s: Migration to a %s interface is not supported." msgstr "Försöker migrera till %s: Migrering till ett %s-gränssnitt stöds inte." #: src/hed/libs/compute/SubmitterPlugin.cpp:172 #, c-format msgid "SubmitterPlugin plugin \"%s\" not found." msgstr "Hittade inte SubmitterPlugin-plugin \"%s\"." #: src/hed/libs/compute/SubmitterPlugin.cpp:182 #, c-format msgid "SubmitterPlugin %s could not be created" msgstr "SubmitterPlugin %s kunde inte skapas" #: src/hed/libs/compute/SubmitterPlugin.cpp:187 #, c-format msgid "Loaded SubmitterPlugin %s" msgstr "Laddade in SubmitterPlugin %s" #: src/hed/libs/compute/examples/basic_job_submission.cpp:28 msgid "Invalid job description" msgstr "Ogiltig jobbeskrivning" #: src/hed/libs/compute/examples/basic_job_submission.cpp:47 msgid "Failed to submit job" msgstr "Misslyckades med att sända in jobb" #: src/hed/libs/compute/examples/basic_job_submission.cpp:54 #, c-format msgid "Failed to write to local job list %s" msgstr "Misslyckades med att skriva till lokal jobblista %s" #: src/hed/libs/compute/test_jobdescription.cpp:20 msgid "[job description ...]" msgstr "[jobbeskrivning ...]" #: src/hed/libs/compute/test_jobdescription.cpp:21 msgid "" "This tiny tool can be used for testing the JobDescription's conversion " "abilities." msgstr "" "Detta lilla verktyg kan användas för att testa JobDescription-klassens " "konverteringsmöjligheter." #: src/hed/libs/compute/test_jobdescription.cpp:23 msgid "" "The job description also can be a file or a string in ADL or XRSL format." msgstr "" "Jobbeskrivningen kan ocksÃ¥ vara en fil eller en sträng i ADL- eller XRSL-" "format." #: src/hed/libs/compute/test_jobdescription.cpp:27 msgid "define the requested format (nordugrid:xrsl, emies:adl)" msgstr "definiera det begärda formatet (nordugrid:xrsl, emies:adl)" #: src/hed/libs/compute/test_jobdescription.cpp:28 msgid "format" msgstr "format" #: src/hed/libs/compute/test_jobdescription.cpp:33 msgid "show the original job description" msgstr "visa den ursprungliga jobbeskrivningen" #: src/hed/libs/compute/test_jobdescription.cpp:43 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:71 msgid "Use --help option for detailed usage information" msgstr "Använd alternativet --help för detaljerad användningsinformation" #: src/hed/libs/compute/test_jobdescription.cpp:50 msgid " [ JobDescription tester ] " msgstr " [ JobDescription testare ] " #: src/hed/libs/compute/test_jobdescription.cpp:74 msgid " [ Parsing the original text ] " msgstr " [ Tolkar den ursprungliga texten ] " #: src/hed/libs/compute/test_jobdescription.cpp:80 msgid "Unable to parse." msgstr "Kan inte tolka." #: src/hed/libs/compute/test_jobdescription.cpp:89 msgid " [ emies:adl ] " msgstr " [ emies:adl ] " #: src/hed/libs/compute/test_jobdescription.cpp:91 msgid " [ nordugrid:xrsl ] " msgstr " [ nordugrid:xrsl ] " #: src/hed/libs/credential/ARCProxyUtil.cpp:134 msgid "VOMS command is empty" msgstr "VOMS-kommando är tom" #: src/hed/libs/credential/ARCProxyUtil.cpp:254 #, c-format msgid "OpenSSL error -- %s" msgstr "OpenSSL-fel -- %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:255 #, c-format msgid "Library : %s" msgstr "Bibliotek: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:256 #, c-format msgid "Function : %s" msgstr "Funktion : %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:257 #, c-format msgid "Reason : %s" msgstr "Anledning: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:313 msgid "User interface error" msgstr "Användargränssnitt-fel: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:319 msgid "Aborted!" msgstr "Avbruten!" #: src/hed/libs/credential/ARCProxyUtil.cpp:418 #: src/hed/libs/credential/ARCProxyUtil.cpp:1399 msgid "Failed to sign proxy" msgstr "Misslyckades med att signera proxy" #: src/hed/libs/credential/ARCProxyUtil.cpp:437 #: src/hed/libs/credential/Credential.cpp:878 #, c-format msgid "Error: can't open policy file: %s" msgstr "Fel: Kan inte öppna policyfil: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:447 #: src/hed/libs/credential/Credential.cpp:891 #, c-format msgid "Error: policy location: %s is not a regular file" msgstr "Fel: policy-plats: %s är inte en vanlig fil" #: src/hed/libs/credential/ARCProxyUtil.cpp:546 #, c-format msgid "VOMS line contains wrong number of tokens (%u expected): \"%s\"" msgstr "VOMS-rad innehÃ¥ller fel antal token (%u förväntade): \"%s\"" #: src/hed/libs/credential/ARCProxyUtil.cpp:590 #, c-format msgid "Cannot get VOMS server %s information from the vomses files" msgstr "Kan inte hämta VOMS-server %s information frÃ¥n vomses-filerna" #: src/hed/libs/credential/ARCProxyUtil.cpp:623 #, c-format msgid "There are %d commands to the same VOMS server %s" msgstr "Det finns %d kommandon till samma VOMS-server %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:677 #, c-format msgid "Try to get attribute from VOMS server with order: %s" msgstr "Försök att hämta attribut frÃ¥n VOMS-server i ordning: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:680 #, c-format msgid "Message sent to VOMS server %s is: %s" msgstr "Meddelande skickat till VOMS-server %s är: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:699 #: src/hed/libs/credential/ARCProxyUtil.cpp:720 #, c-format msgid "" "The VOMS server with the information:\n" "\t%s\n" "can not be reached, please make sure it is available" msgstr "" "VOMS-servern med informationen:\n" "\t%s\n" "kan inte nÃ¥s, se till att den är tillgänglig" #: src/hed/libs/credential/ARCProxyUtil.cpp:703 msgid "No HTTP response from VOMS server" msgstr "Inget HTTP-svar frÃ¥n VOMS-server" #: src/hed/libs/credential/ARCProxyUtil.cpp:708 #: src/hed/libs/credential/ARCProxyUtil.cpp:734 #, c-format msgid "Returned message from VOMS server: %s" msgstr "Returnerat meddelande frÃ¥n VOMS-server: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:724 msgid "No stream response from VOMS server" msgstr "Inget ström-svar frÃ¥n VOMS-server" #: src/hed/libs/credential/ARCProxyUtil.cpp:746 #, c-format msgid "" "The validity duration of VOMS AC is shortened from %s to %s, due to the " "validity constraint on voms server side.\n" msgstr "" "Giltighetstiden för VOMS-AC har kortats frÃ¥n %s till %s, pÃ¥ grund av " "giltighetsvillkoret pÃ¥ VOMS-serversidan.\n" #: src/hed/libs/credential/ARCProxyUtil.cpp:749 #, c-format msgid "" "Cannot get any AC or attributes info from VOMS server: %s;\n" " Returned message from VOMS server: %s\n" msgstr "" "Kan inte fÃ¥ nÃ¥gon AC- eller attributinfo frÃ¥n VOMS-server: %s;\n" " Returnerat meddelande frÃ¥n VOMS-server: %s\n" #: src/hed/libs/credential/ARCProxyUtil.cpp:754 #, c-format msgid "Returned message from VOMS server %s is: %s\n" msgstr "Returnerat meddelande frÃ¥n VOMS-server %s är: %s\n" #: src/hed/libs/credential/ARCProxyUtil.cpp:776 #, c-format msgid "The attribute information from VOMS server: %s is list as following:" msgstr "Attributinformationen frÃ¥n VOMS-server: %s listas som följer:" #: src/hed/libs/credential/ARCProxyUtil.cpp:788 #, c-format msgid "" "There are %d servers with the same name: %s in your vomses file, but none of " "them can be reached, or can return valid message. But proxy without VOMS AC " "extension will still be generated." msgstr "" "Det finns %d servrar med samma namn: %s i din vomses-fil, men ingen av dem " "kan nÃ¥s eller returnera ett giltigt meddelande. Men proxy utan VOMS-AC-" "tillägg kommer fortfarande att genereras." #: src/hed/libs/credential/ARCProxyUtil.cpp:1321 #: src/hed/libs/credential/ARCProxyUtil.cpp:1428 msgid "Failed to generate X509 request with NSS" msgstr "Misslyckades med att generera X509-begäran med NSS" #: src/hed/libs/credential/ARCProxyUtil.cpp:1332 #: src/hed/libs/credential/ARCProxyUtil.cpp:1439 #: src/hed/libs/credential/ARCProxyUtil.cpp:1480 msgid "Failed to create X509 certificate with NSS" msgstr "Misslyckades med att skapa X509-certifikat med NSS" #: src/hed/libs/credential/ARCProxyUtil.cpp:1344 #: src/hed/libs/credential/ARCProxyUtil.cpp:1451 #: src/hed/libs/credential/ARCProxyUtil.cpp:1504 msgid "Failed to export X509 certificate from NSS DB" msgstr "Misslyckades med att exportera X509-certifikat frÃ¥n NSS-databas" #: src/hed/libs/credential/ARCProxyUtil.cpp:1487 msgid "Failed to import X509 certificate into NSS DB" msgstr "Misslyckades med att importera X509-certifikat till NSS-databas" #: src/hed/libs/credential/ARCProxyUtil.cpp:1496 msgid "Failed to initialize the credential configuration" msgstr "Misslyckades med att initiera referensinställningarna" #: src/hed/libs/credential/CertUtil.cpp:162 #, c-format msgid "Error number in store context: %i" msgstr "Felnummer i lager-kontext: %i" #: src/hed/libs/credential/CertUtil.cpp:163 msgid "Self-signed certificate" msgstr "Självsignerat certifikat" #: src/hed/libs/credential/CertUtil.cpp:166 #, c-format msgid "The certificate with subject %s is not valid" msgstr "Certifikatet med subjekt %s är inte giltigt" #: src/hed/libs/credential/CertUtil.cpp:169 #, c-format msgid "" "Can not find issuer certificate for the certificate with subject %s and " "hash: %lu" msgstr "" "Kan inte hitta utfärdarcertifikat för certifikatet med subjekt %s och hash: " "%lu" #: src/hed/libs/credential/CertUtil.cpp:172 #, c-format msgid "Certificate with subject %s has expired" msgstr "Giltighetstiden för certifikat med subjekt %s har gÃ¥tt ut" #: src/hed/libs/credential/CertUtil.cpp:175 #, c-format msgid "" "Untrusted self-signed certificate in chain with subject %s and hash: %lu" msgstr "" "Icke betrott självsignerat certifikat i kedja med subjekt %s och hash: %lu" #: src/hed/libs/credential/CertUtil.cpp:177 #, c-format msgid "Certificate verification error: %s" msgstr "Certifikatverifieringsfel: %s" #: src/hed/libs/credential/CertUtil.cpp:189 msgid "Can not get the certificate type" msgstr "Kan inte erhÃ¥lla certifikattyp" #: src/hed/libs/credential/CertUtil.cpp:229 msgid "Couldn't verify availability of CRL" msgstr "Kunde inte verifiera tillgänglighet för CRL" #: src/hed/libs/credential/CertUtil.cpp:242 msgid "In the available CRL the lastUpdate field is not valid" msgstr "I den tillgängliga CRLen är lastUpdate-fältet inte giltigt" #: src/hed/libs/credential/CertUtil.cpp:249 msgid "The available CRL is not yet valid" msgstr "Den tillgängliga CRLen är inte giltig än" #: src/hed/libs/credential/CertUtil.cpp:258 msgid "In the available CRL, the nextUpdate field is not valid" msgstr "I den tillgängliga CRLen är nextUpdate-fältet inte giltigt" #: src/hed/libs/credential/CertUtil.cpp:264 msgid "The available CRL has expired" msgstr "Giltighetstiden för den tillgängliga CRLen har gÃ¥tt ut" #: src/hed/libs/credential/CertUtil.cpp:287 #, c-format msgid "Certificate with serial number %s and subject \"%s\" is revoked" msgstr "Certifikat med serienummer %s och subjekt \"%s\" är revokerat" #: src/hed/libs/credential/CertUtil.cpp:305 msgid "" "Directory of trusted CAs is not specified/found; Using current path as the " "CA direcroty" msgstr "" "Katalog med betrodda CA har inte angivits eller kan inte hittas; Använder " "nuvarande katalog som CA-katalog" #: src/hed/libs/credential/CertUtil.cpp:314 msgid "Can't allocate memory for CA policy path" msgstr "Kan inte allokera minne för CA-policy-sökväg" #: src/hed/libs/credential/CertUtil.cpp:360 #, c-format msgid "Certificate has unknown extension with numeric ID %u and SN %s" msgstr "Certifikat har okänt tillägg med numeriskt ID %u och SN %s" #: src/hed/libs/credential/CertUtil.cpp:374 #: src/hed/libs/credential/Credential.cpp:1697 msgid "" "Can not convert DER encoded PROXY_CERT_INFO_EXTENSION extension to internal " "format" msgstr "" "Kan inte konvertera DER-kodat PROXY_CERT_INFO_EXTENSION-tillägg till internt " "format" #: src/hed/libs/credential/CertUtil.cpp:420 msgid "Trying to check X509 cert with check_cert_type" msgstr "Försöker kontrollera X509-certifikat med check_cert_type" #: src/hed/libs/credential/CertUtil.cpp:459 msgid "Can't convert DER encoded PROXYCERTINFO extension to internal format" msgstr "" "Kan inte konvertera DER-kodat PROXYCERTINFO-tillägg till internt format" #: src/hed/libs/credential/CertUtil.cpp:463 msgid "Can't get policy from PROXYCERTINFO extension" msgstr "Kan inte erhÃ¥lla policy frÃ¥n PROXYCERTINFO-tillägg" #: src/hed/libs/credential/CertUtil.cpp:467 msgid "Can't get policy language from PROXYCERTINFO extension" msgstr "Kan inte erhÃ¥lla policy-sprÃ¥k frÃ¥n PROXYCERTINFO-tillägg" #: src/hed/libs/credential/CertUtil.cpp:499 msgid "The subject does not match the issuer name + proxy CN entry" msgstr "Subjekt matchar inte utfärdarnamn + proxy-CN-post" #: src/hed/libs/credential/Credential.cpp:84 #, c-format msgid "OpenSSL error string: %s" msgstr "OpenSSL-felsträng: %s" #: src/hed/libs/credential/Credential.cpp:205 msgid "Can't get the first byte of input to determine its format" msgstr "Kan inte erhÃ¥lla indatas fösta byte för att bestämma dess format" #: src/hed/libs/credential/Credential.cpp:219 msgid "Can't reset the input" msgstr "Kan inte Ã¥terställa indata" #: src/hed/libs/credential/Credential.cpp:244 #: src/hed/libs/credential/Credential.cpp:280 msgid "Can't get the first byte of input BIO to get its format" msgstr "Kan inte erhÃ¥lla indata-BIOs första byte för att bestämma dess format" #: src/hed/libs/credential/Credential.cpp:256 msgid "Can not read certificate/key string" msgstr "Kan inte läsa certifikat/nyckel-sträng" #: src/hed/libs/credential/Credential.cpp:460 #, c-format msgid "Can not find certificate file: %s" msgstr "Kan inte hitta certifikatfil: %s" #: src/hed/libs/credential/Credential.cpp:465 #, c-format msgid "Can not read certificate file: %s" msgstr "Kan inte läsa certifikatfil: %s" #: src/hed/libs/credential/Credential.cpp:503 msgid "Can not read certificate string" msgstr "Kan inte läsa certifikatsträng: %s" #: src/hed/libs/credential/Credential.cpp:523 msgid "Certificate format is PEM" msgstr "Certifikatformat är PEM" #: src/hed/libs/credential/Credential.cpp:550 msgid "Certificate format is DER" msgstr "Certifikatformat är DER" #: src/hed/libs/credential/Credential.cpp:579 msgid "Certificate format is PKCS" msgstr "Certifikatformat är PKCS" #: src/hed/libs/credential/Credential.cpp:605 msgid "Certificate format is unknown" msgstr "Certifikatformat är okänt" #: src/hed/libs/credential/Credential.cpp:613 #, c-format msgid "Can not find key file: %s" msgstr "Kan inte hitta nyckelfil: %s" #: src/hed/libs/credential/Credential.cpp:618 #, c-format msgid "Can not open key file %s" msgstr "Kan inte öppna nyckelfil: %s" #: src/hed/libs/credential/Credential.cpp:637 msgid "Can not read key string" msgstr "Kan inte läsa nyckelsträng" #: src/hed/libs/credential/Credential.cpp:700 #: src/hed/libs/credential/VOMSUtil.cpp:244 msgid "Failed to lock arccredential library in memory" msgstr "Misslyckades med att lÃ¥sa arccredential-biblioteket i minnet" #: src/hed/libs/credential/Credential.cpp:712 msgid "Certificate verification succeeded" msgstr "Certifikatverifiering lyckades" #: src/hed/libs/credential/Credential.cpp:716 msgid "Certificate verification failed" msgstr "Certifikatverifiering misslyckades" #: src/hed/libs/credential/Credential.cpp:729 #: src/hed/libs/credential/Credential.cpp:747 #: src/hed/libs/credential/Credential.cpp:765 #: src/hed/libs/credential/Credential.cpp:996 #: src/hed/libs/credential/Credential.cpp:2368 #: src/hed/libs/credential/Credential.cpp:2397 msgid "Failed to initialize extensions member for Credential" msgstr "Misslyckades med att initiera tilläggsmedlem för referens" #: src/hed/libs/credential/Credential.cpp:808 #, c-format msgid "Unsupported proxy policy language is requested - %s" msgstr "Det begärda proxypolicysprÃ¥ken stöds inte - %s" #: src/hed/libs/credential/Credential.cpp:820 #, c-format msgid "Unsupported proxy version is requested - %s" msgstr "Den begärda proxyversionen stöds inte - %s" #: src/hed/libs/credential/Credential.cpp:831 msgid "If you specify a policy you also need to specify a policy language" msgstr "Om du anger en policy mÃ¥ste du ocksÃ¥ ange ett policysprÃ¥k" #: src/hed/libs/credential/Credential.cpp:1001 msgid "Certificate/Proxy path is empty" msgstr "Certifikat/proxy-sökväg är tom" #: src/hed/libs/credential/Credential.cpp:1059 #: src/hed/libs/credential/Credential.cpp:2905 msgid "Failed to duplicate extension" msgstr "Misslyckades med att duplicera tillägg" #: src/hed/libs/credential/Credential.cpp:1063 msgid "Failed to add extension into credential extensions" msgstr "Misslyckades med att lägga till tillägg till referenstillägg" #: src/hed/libs/credential/Credential.cpp:1074 msgid "Certificate information collection failed" msgstr "Certifikatinformationsinsamling misslyckades" #: src/hed/libs/credential/Credential.cpp:1113 #: src/hed/libs/credential/Credential.cpp:1118 msgid "Can not convert string into ASN1_OBJECT" msgstr "Kan inte konvertera sträng till ASN1_OBJECT" #: src/hed/libs/credential/Credential.cpp:1125 msgid "Can not create ASN1_OCTET_STRING" msgstr "Kan inte skapa ASN1_OCTET_STRING" #: src/hed/libs/credential/Credential.cpp:1134 msgid "Can not allocate memory for extension for proxy certificate" msgstr "Kan inte allokera minne för tillägg för proxycertifikat" #: src/hed/libs/credential/Credential.cpp:1144 msgid "Can not create extension for proxy certificate" msgstr "Kan inte skapa tillägg för proxycertifikat" #: src/hed/libs/credential/Credential.cpp:1180 #: src/hed/libs/credential/Credential.cpp:1348 msgid "BN_set_word failed" msgstr "BN_set_word misslyckades" #: src/hed/libs/credential/Credential.cpp:1189 #: src/hed/libs/credential/Credential.cpp:1357 msgid "RSA_generate_key_ex failed" msgstr "RSA_generate_key_ex misslyckades" #: src/hed/libs/credential/Credential.cpp:1198 #: src/hed/libs/credential/Credential.cpp:1365 msgid "BN_new || RSA_new failed" msgstr "BN_new || RSA_new misslyckades" #: src/hed/libs/credential/Credential.cpp:1209 msgid "Created RSA key, proceeding with request" msgstr "Skapade RSA-nyckel, fortsätter med begäran" #: src/hed/libs/credential/Credential.cpp:1214 msgid "pkey and rsa_key exist!" msgstr "pkey och rsa_key existerar!" #: src/hed/libs/credential/Credential.cpp:1217 msgid "Generate new X509 request!" msgstr "Generera ny X509-begäran" #: src/hed/libs/credential/Credential.cpp:1222 msgid "Setting subject name!" msgstr "Sätter subjekt-namn" #: src/hed/libs/credential/Credential.cpp:1230 #: src/hed/libs/credential/Credential.cpp:1444 msgid "PEM_write_bio_X509_REQ failed" msgstr "PEM_write_bio_X509_REQ misslyckades" #: src/hed/libs/credential/Credential.cpp:1260 #: src/hed/libs/credential/Credential.cpp:1301 #: src/hed/libs/credential/Credential.cpp:1476 #: src/hed/libs/credential/Credential.cpp:1496 msgid "Can not create BIO for request" msgstr "Kan inte skapa BIO för begäran" #: src/hed/libs/credential/Credential.cpp:1278 msgid "Failed to write request into string" msgstr "Misslyckades med att skriva begäran till sträng" #: src/hed/libs/credential/Credential.cpp:1305 #: src/hed/libs/credential/Credential.cpp:1310 #: src/hed/libs/credential/Credential.cpp:1500 msgid "Can not set writable file for request BIO" msgstr "Kan inte ange skrivbar fil för begärans BIO" #: src/hed/libs/credential/Credential.cpp:1316 #: src/hed/libs/credential/Credential.cpp:1505 msgid "Wrote request into a file" msgstr "Skrev begäran till en fil" #: src/hed/libs/credential/Credential.cpp:1318 #: src/hed/libs/credential/Credential.cpp:1508 msgid "Failed to write request into a file" msgstr "Misslyckades med att skriva begäran till en fil" #: src/hed/libs/credential/Credential.cpp:1338 msgid "The credential's private key has already been initialized" msgstr "Referensens privata nyckel har redan initierats" #: src/hed/libs/credential/Credential.cpp:1386 msgid "" "Can not duplicate the subject name for the self-signing proxy certificate " "request" msgstr "" "Kan inte duplicera subjektnamnet för den självsignerande " "proxycertifikatbegäran" #: src/hed/libs/credential/Credential.cpp:1396 msgid "Can not create a new X509_NAME_ENTRY for the proxy certificate request" msgstr "Kan inte skapa en ny X509_NAME_ENTRY för proxycertifikatbegäran" #: src/hed/libs/credential/Credential.cpp:1414 #: src/hed/libs/credential/Credential.cpp:1421 #: src/hed/libs/credential/Credential.cpp:1999 #: src/hed/libs/credential/Credential.cpp:2007 msgid "" "Can not convert PROXY_CERT_INFO_EXTENSION struct from internal to DER " "encoded format" msgstr "" "Kan inte konvertera PROXY_CERT_INFO_EXTENSION-struct frÃ¥n internt till DER-" "kodat format" #: src/hed/libs/credential/Credential.cpp:1451 msgid "Can't convert X509 request from internal to DER encoded format" msgstr "Kan inte konvertera X509-begäran frÃ¥n internt till DER-kodat format" #: src/hed/libs/credential/Credential.cpp:1461 msgid "Can not generate X509 request" msgstr "Kan inte generera X509-begäran" #: src/hed/libs/credential/Credential.cpp:1463 msgid "Can not set private key" msgstr "Kan inte ange privat nyckel" #: src/hed/libs/credential/Credential.cpp:1561 msgid "Failed to get private key" msgstr "Misslyckades med att erhÃ¥lla privat nyckel" #: src/hed/libs/credential/Credential.cpp:1580 msgid "Failed to get public key from RSA object" msgstr "Misslyckades med att erhÃ¥lla publik nyckel frÃ¥n RSA-objekt" #: src/hed/libs/credential/Credential.cpp:1588 msgid "Failed to get public key from X509 object" msgstr "Misslyckades med att erhÃ¥lla publik nyckel frÃ¥n X509-objekt" #: src/hed/libs/credential/Credential.cpp:1595 msgid "Failed to get public key" msgstr "Misslyckades med att erhÃ¥lla publik nyckel" #: src/hed/libs/credential/Credential.cpp:1633 #, c-format msgid "Certiticate chain number %d" msgstr "Certifikatkedja nummer %d" #: src/hed/libs/credential/Credential.cpp:1661 msgid "NULL BIO passed to InquireRequest" msgstr "NULL BIO skickad till InquireRequest" #: src/hed/libs/credential/Credential.cpp:1664 msgid "PEM_read_bio_X509_REQ failed" msgstr "PEM_read_bio_X509_REQ misslyckades" #: src/hed/libs/credential/Credential.cpp:1668 msgid "d2i_X509_REQ_bio failed" msgstr "d2i_X509_REQ_bio misslyckades" #: src/hed/libs/credential/Credential.cpp:1690 msgid "Missing data in DER encoded PROXY_CERT_INFO_EXTENSION extension" msgstr "Saknar data i DER-kodat PROXY_CERT_INFO_EXTENSION-tillägg" #: src/hed/libs/credential/Credential.cpp:1702 msgid "Can not create PROXY_CERT_INFO_EXTENSION extension" msgstr "Kan inte skapa PROXY_CERT_INFO_EXTENSION-tillägg" #: src/hed/libs/credential/Credential.cpp:1712 msgid "Can not get policy from PROXY_CERT_INFO_EXTENSION extension" msgstr "Kan inte erhÃ¥lla policy frÃ¥n PROXY_CERT_INFO_EXTENSION-tillägg" #: src/hed/libs/credential/Credential.cpp:1716 msgid "Can not get policy language from PROXY_CERT_INFO_EXTENSION extension" msgstr "Kan inte erhÃ¥lla policy-sprÃ¥k frÃ¥n PROXY_CERT_INFO_EXTENSION-tillägg" #: src/hed/libs/credential/Credential.cpp:1732 #, c-format msgid "Cert Type: %d" msgstr "Certifikattyp: %d" #: src/hed/libs/credential/Credential.cpp:1745 #: src/hed/libs/credential/Credential.cpp:1764 msgid "Can not create BIO for parsing request" msgstr "Kan inte skapa BIO för att tolka begäran" #: src/hed/libs/credential/Credential.cpp:1750 msgid "Read request from a string" msgstr "Läste begäran frÃ¥n en sträng" #: src/hed/libs/credential/Credential.cpp:1753 msgid "Failed to read request from a string" msgstr "Misslyckades med att läsa begäran frÃ¥n en sträng" #: src/hed/libs/credential/Credential.cpp:1768 msgid "Can not set readable file for request BIO" msgstr "Kunde inte ange läsbar fil för begärans BIO" #: src/hed/libs/credential/Credential.cpp:1773 msgid "Read request from a file" msgstr "Läste begäran frÃ¥n en fil" #: src/hed/libs/credential/Credential.cpp:1776 msgid "Failed to read request from a file" msgstr "Misslyckades med att läsa begäran frÃ¥n en fil" #: src/hed/libs/credential/Credential.cpp:1816 msgid "Can not convert private key to DER format" msgstr "Kan inte konvertera privat nyckel till DER-format" #: src/hed/libs/credential/Credential.cpp:1980 msgid "Credential is not initialized" msgstr "Referens har inte initierats" #: src/hed/libs/credential/Credential.cpp:1986 msgid "Failed to duplicate X509 structure" msgstr "Misslyckades med att duplicera X509-struktur" #: src/hed/libs/credential/Credential.cpp:1991 msgid "Failed to initialize X509 structure" msgstr "Misslyckades med att initiera X509-struktur" #: src/hed/libs/credential/Credential.cpp:2014 msgid "Can not create extension for PROXY_CERT_INFO" msgstr "Kan inte skapa tillägg för PROXY_CERT_INFO" #: src/hed/libs/credential/Credential.cpp:2018 #: src/hed/libs/credential/Credential.cpp:2066 msgid "Can not add X509 extension to proxy cert" msgstr "Kan inte lägga till X509-tillägg till proxycertifikat" #: src/hed/libs/credential/Credential.cpp:2034 msgid "Can not convert keyUsage struct from DER encoded format" msgstr "Kan inte konvertera keyUsage-struct frÃ¥n DER-kodat format" #: src/hed/libs/credential/Credential.cpp:2046 #: src/hed/libs/credential/Credential.cpp:2055 msgid "Can not convert keyUsage struct from internal to DER format" msgstr "Kan inte konvertera keyUsage-struct frÃ¥n internt till DER-format" #: src/hed/libs/credential/Credential.cpp:2062 msgid "Can not create extension for keyUsage" msgstr "Kan inte skapa tillägg för keyUsage" #: src/hed/libs/credential/Credential.cpp:2075 msgid "Can not get extended KeyUsage extension from issuer certificate" msgstr "Kan inte erhÃ¥lla utökad KeyUsage-tillägg frÃ¥n utfärdarcertifikatet" #: src/hed/libs/credential/Credential.cpp:2080 msgid "Can not copy extended KeyUsage extension" msgstr "Kan inte kopiera det utökade KeyUsage-tillägget" #: src/hed/libs/credential/Credential.cpp:2085 msgid "Can not add X509 extended KeyUsage extension to new proxy certificate" msgstr "" "Kan inte lägga till X509-utökat KeyUsage-tillägg till det nya " "proxycertifikatet" #: src/hed/libs/credential/Credential.cpp:2095 msgid "Can not compute digest of public key" msgstr "Kan inte beräkna digest för publik nyckel" #: src/hed/libs/credential/Credential.cpp:2106 msgid "Can not copy the subject name from issuer for proxy certificate" msgstr "Kan inte kopiera subjektnamnet frÃ¥n utfärdaren för proxycertifikatet" #: src/hed/libs/credential/Credential.cpp:2112 msgid "Can not create name entry CN for proxy certificate" msgstr "Kan inte skapa namnpost CN för proxycertifikatet" #: src/hed/libs/credential/Credential.cpp:2117 msgid "Can not set CN in proxy certificate" msgstr "Kan inte ange CN i proxycertifikatet" #: src/hed/libs/credential/Credential.cpp:2125 msgid "Can not set issuer's subject for proxy certificate" msgstr "Kan inte ange utfärdarens subjekt för proxycertifikatet" #: src/hed/libs/credential/Credential.cpp:2130 msgid "Can not set version number for proxy certificate" msgstr "Kan inte ange versionsnummer för proxycertifikatet" #: src/hed/libs/credential/Credential.cpp:2138 msgid "Can not set serial number for proxy certificate" msgstr "Kan inte ange serienummer för proxycertifikatet" #: src/hed/libs/credential/Credential.cpp:2144 msgid "Can not duplicate serial number for proxy certificate" msgstr "Kan inte duplicera serienummer för proxycertifikatet" #: src/hed/libs/credential/Credential.cpp:2150 msgid "Can not set the lifetime for proxy certificate" msgstr "Kan inte ange livstid för proxycertifikatet" #: src/hed/libs/credential/Credential.cpp:2154 msgid "Can not set pubkey for proxy certificate" msgstr "Kan inte ange publik nyckel för proxycertifikatet" #: src/hed/libs/credential/Credential.cpp:2170 #: src/hed/libs/credential/Credential.cpp:2795 msgid "The credential to be signed is NULL" msgstr "Referensen som skall signeras är NULL" #: src/hed/libs/credential/Credential.cpp:2174 #: src/hed/libs/credential/Credential.cpp:2799 msgid "The credential to be signed contains no request" msgstr "Referensen som skall signeras innehÃ¥ller ingen begäran" #: src/hed/libs/credential/Credential.cpp:2178 #: src/hed/libs/credential/Credential.cpp:2803 msgid "The BIO for output is NULL" msgstr "Utdata-BIO är NULL" #: src/hed/libs/credential/Credential.cpp:2192 #: src/hed/libs/credential/Credential.cpp:2810 msgid "Error when extracting public key from request" msgstr "Fel när publik nyckel extraheras frÃ¥n begäran" #: src/hed/libs/credential/Credential.cpp:2197 #: src/hed/libs/credential/Credential.cpp:2814 msgid "Failed to verify the request" msgstr "Misslyckades med att verifiera begäran" #: src/hed/libs/credential/Credential.cpp:2201 msgid "Failed to add issuer's extension into proxy" msgstr "Misslyckades med att lägga till utfärdarens tillägg till proxyn" #: src/hed/libs/credential/Credential.cpp:2225 msgid "Failed to find extension" msgstr "Misslyckades med att hitta tillägg" #: src/hed/libs/credential/Credential.cpp:2237 msgid "Can not get the issuer's private key" msgstr "Kan inte erhÃ¥lla utfärdarens privata nyckel" #: src/hed/libs/credential/Credential.cpp:2244 #: src/hed/libs/credential/Credential.cpp:2846 msgid "There is no digest in issuer's private key object" msgstr "Det finns inget digest i utfärdarens privata-nyckel-objekt" #: src/hed/libs/credential/Credential.cpp:2249 #: src/hed/libs/credential/Credential.cpp:2850 #, c-format msgid "%s is an unsupported digest type" msgstr "Digesttypen %s stöds inte" #: src/hed/libs/credential/Credential.cpp:2260 #, c-format msgid "" "The signing algorithm %s is not allowed,it should be SHA1 or SHA2 to sign " "certificate requests" msgstr "" "Signeringsalgoritmen %s är ej tillÃ¥ten, den skall vara SHA1 eller SHA2 för " "att signera certifikatbegärningar" #: src/hed/libs/credential/Credential.cpp:2266 msgid "Failed to sign the proxy certificate" msgstr "Misslyckades med att signera proxycertifikatet" #: src/hed/libs/credential/Credential.cpp:2268 msgid "Succeeded to sign the proxy certificate" msgstr "Lyckades signera proxycertifikatet" #: src/hed/libs/credential/Credential.cpp:2273 msgid "Failed to verify the signed certificate" msgstr "Misslyckades med att verifiera det signerade certifikatet" #: src/hed/libs/credential/Credential.cpp:2275 msgid "Succeeded to verify the signed certificate" msgstr "Lyckades verifiera det signerade certifikatet" #: src/hed/libs/credential/Credential.cpp:2280 #: src/hed/libs/credential/Credential.cpp:2289 msgid "Output the proxy certificate" msgstr "Skriv ut proxycertifikatet" #: src/hed/libs/credential/Credential.cpp:2283 msgid "Can not convert signed proxy cert into PEM format" msgstr "Kan inte konvertera det signerade proxycertifikatet till PEM-format" #: src/hed/libs/credential/Credential.cpp:2292 msgid "Can not convert signed proxy cert into DER format" msgstr "Kan inte konvertera det signerade proxycertifikatet till DER-format" #: src/hed/libs/credential/Credential.cpp:2308 #: src/hed/libs/credential/Credential.cpp:2331 msgid "Can not create BIO for signed proxy certificate" msgstr "Kan inte skapa BIO för det signerade proxycertifikatet" #: src/hed/libs/credential/Credential.cpp:2335 msgid "Can not set writable file for signed proxy certificate BIO" msgstr "Kan inte ange skrivbar fil för det signerade proxycertifikatets BIO" #: src/hed/libs/credential/Credential.cpp:2340 msgid "Wrote signed proxy certificate into a file" msgstr "Skrev det signerade proxycertifikatet till en fil" #: src/hed/libs/credential/Credential.cpp:2343 msgid "Failed to write signed proxy certificate into a file" msgstr "" "Misslyckades med att skriva det signerade proxycertifikatet till en fil" #: src/hed/libs/credential/Credential.cpp:2377 #: src/hed/libs/credential/Credential.cpp:2415 #, c-format msgid "ERROR: %s" msgstr "Fel: %s" #: src/hed/libs/credential/Credential.cpp:2423 #, c-format msgid "SSL error: %s, libs: %s, func: %s, reason: %s" msgstr "SSL-fel: %s, bibliotek: %s, funktion: %s, anledning: %s" #: src/hed/libs/credential/Credential.cpp:2468 #, c-format msgid "unable to load number from: %s" msgstr "kunde inre ladda in nummer frÃ¥n: %s" #: src/hed/libs/credential/Credential.cpp:2473 msgid "error converting number from bin to BIGNUM" msgstr "fel vid konvertering av nummer frÃ¥n bin till BIGNUM" #: src/hed/libs/credential/Credential.cpp:2500 msgid "file name too long" msgstr "filnamn för lÃ¥ngt" #: src/hed/libs/credential/Credential.cpp:2523 msgid "error converting serial to ASN.1 format" msgstr "fel vid konvertering av serienummer till ASN.1-format" #: src/hed/libs/credential/Credential.cpp:2556 #, c-format msgid "load serial from %s failure" msgstr "hämta serienummer frÃ¥n %s misslyckades" #: src/hed/libs/credential/Credential.cpp:2561 msgid "add_word failure" msgstr "add_word misslyckades" #: src/hed/libs/credential/Credential.cpp:2566 #, c-format msgid "save serial to %s failure" msgstr "spara serienummer till %s misslyckades" #: src/hed/libs/credential/Credential.cpp:2586 msgid "Error initialising X509 store" msgstr "Fel vid initiering av X509-lager" #: src/hed/libs/credential/Credential.cpp:2593 msgid "Out of memory when generate random serial" msgstr "Minnet tog slut när slump-serienummer genererades" #: src/hed/libs/credential/Credential.cpp:2605 msgid "CA certificate and CA private key do not match" msgstr "CA-certifikat och CA-privat-nyckel matchar inte" #: src/hed/libs/credential/Credential.cpp:2629 #, c-format msgid "Failed to load extension section: %s" msgstr "Misslyckades med att hämta tilläggssektion: %s" #: src/hed/libs/credential/Credential.cpp:2666 msgid "malloc error" msgstr "Minnesallokeringsfel" #: src/hed/libs/credential/Credential.cpp:2670 msgid "Subject does not start with '/'" msgstr "Subjekt börjar inte med '/'" #: src/hed/libs/credential/Credential.cpp:2686 #: src/hed/libs/credential/Credential.cpp:2707 msgid "escape character at end of string" msgstr "escape-tecken vid strängens slut" #: src/hed/libs/credential/Credential.cpp:2698 #, c-format msgid "" "end of string encountered while processing type of subject name element #%d" msgstr "" "strängens slut pÃ¥träffades medan typ för subjekt-namn-element #%d " "processerades" #: src/hed/libs/credential/Credential.cpp:2735 #, c-format msgid "Subject Attribute %s has no known NID, skipped" msgstr "Subjektattribut %s har ingen känd NID, hoppar över" #: src/hed/libs/credential/Credential.cpp:2739 #, c-format msgid "No value provided for Subject Attribute %s skipped" msgstr "Inget värde tillhandahÃ¥llet för subjektattribut %s, hoppar över" #: src/hed/libs/credential/Credential.cpp:2780 msgid "Failed to set the pubkey for X509 object by using pubkey from X509_REQ" msgstr "" "Misslyckades med att ange publik nyckel för X509-objekt genom att använda " "publik nyckel frÃ¥n X509_REQ" #: src/hed/libs/credential/Credential.cpp:2790 msgid "The private key for signing is not initialized" msgstr "Privata nyckeln för signering har inte initierats" #: src/hed/libs/credential/Credential.cpp:2869 #, c-format msgid "Error when loading the extension config file: %s" msgstr "Fel vid inladdning av tilläggsinställningsfilen: %s" #: src/hed/libs/credential/Credential.cpp:2873 #, c-format msgid "Error when loading the extension config file: %s on line: %d" msgstr "Fel vid inladdning av tilläggsinställningsfilen: %s pÃ¥ rad: %d" #: src/hed/libs/credential/Credential.cpp:2921 msgid "Can not sign a EEC" msgstr "Kan inte signera ett EEC" #: src/hed/libs/credential/Credential.cpp:2925 msgid "Output EEC certificate" msgstr "Skriv ut EEC-certifikatet" #: src/hed/libs/credential/Credential.cpp:2928 msgid "Can not convert signed EEC cert into DER format" msgstr "Kan inte konvertera det signerade EEC-certifikatet till DER-format" #: src/hed/libs/credential/Credential.cpp:2942 #: src/hed/libs/credential/Credential.cpp:2961 msgid "Can not create BIO for signed EEC certificate" msgstr "Kan inte skapa BIO för det signerade EEC-certifikatet" #: src/hed/libs/credential/Credential.cpp:2965 msgid "Can not set writable file for signed EEC certificate BIO" msgstr "Kan inte ange skrivbar fil för det signerade EEC-certifikatets BIO" #: src/hed/libs/credential/Credential.cpp:2970 msgid "Wrote signed EEC certificate into a file" msgstr "Skrev det signerade EEC-certifikatet till en fil" #: src/hed/libs/credential/Credential.cpp:2973 msgid "Failed to write signed EEC certificate into a file" msgstr "Misslyckades med att skriva det signerade EEC-certifikatet till en fil" #: src/hed/libs/credential/NSSUtil.cpp:143 msgid "Error writing raw certificate" msgstr "Fel vid skrivning av raw certifikat" #: src/hed/libs/credential/NSSUtil.cpp:222 msgid "Failed to add RFC proxy OID" msgstr "Misslyckades med att lägga till RFC-proxy-OID" #: src/hed/libs/credential/NSSUtil.cpp:225 #, c-format msgid "Succeeded to add RFC proxy OID, tag %d is returned" msgstr "Lyckades lägga till RFC-proxy-OID, tagg %d returnerades" #: src/hed/libs/credential/NSSUtil.cpp:231 msgid "Failed to add anyLanguage OID" msgstr "Misslyckades med att lägga till anyLanguage-OID" #: src/hed/libs/credential/NSSUtil.cpp:234 #, c-format msgid "Succeeded to add anyLanguage OID, tag %d is returned" msgstr "Lyckades lägga till annyLanguage-OID, tagg %d returnerades" #: src/hed/libs/credential/NSSUtil.cpp:240 msgid "Failed to add inheritAll OID" msgstr "Misslyckades med att lägga till inheritAll-OID" #: src/hed/libs/credential/NSSUtil.cpp:243 #, c-format msgid "Succeeded to add inheritAll OID, tag %d is returned" msgstr "Lyckades lägga till inheritAll-OID, tagg %d returnerades" #: src/hed/libs/credential/NSSUtil.cpp:249 msgid "Failed to add Independent OID" msgstr "Misslyckades med att lägga till Independent-OID" #: src/hed/libs/credential/NSSUtil.cpp:252 #, c-format msgid "Succeeded to add Independent OID, tag %d is returned" msgstr "Lyckades lägga till Independent-OID, tagg %d returnerades" #: src/hed/libs/credential/NSSUtil.cpp:258 msgid "Failed to add VOMS AC sequence OID" msgstr "Misslyckades med att lägga till VOMS-AC-sekvens-OID" #: src/hed/libs/credential/NSSUtil.cpp:261 #, c-format msgid "Succeeded to add VOMS AC sequence OID, tag %d is returned" msgstr "Lyckades lägga till VOMS-AC-sekvens-OID, tagg %d returnerades" #: src/hed/libs/credential/NSSUtil.cpp:290 #, c-format msgid "NSS initialization failed on certificate database: %s" msgstr "NSS-initiering misslyckades pÃ¥ certifikatdatabas: %s" #: src/hed/libs/credential/NSSUtil.cpp:301 msgid "Succeeded to initialize NSS" msgstr "Lyckades med att initiera NSS" #: src/hed/libs/credential/NSSUtil.cpp:323 #, c-format msgid "Failed to read attribute %x from private key." msgstr "Misslyckades med att läsa attribut %x frÃ¥n privat nyckel." #: src/hed/libs/credential/NSSUtil.cpp:375 msgid "Succeeded to get credential" msgstr "Lyckades erhÃ¥lla referens" #: src/hed/libs/credential/NSSUtil.cpp:376 msgid "Failed to get credential" msgstr "Misslyckades med att erhÃ¥lla referens" #: src/hed/libs/credential/NSSUtil.cpp:438 msgid "p12 file is empty" msgstr "p12-fil är tom" #: src/hed/libs/credential/NSSUtil.cpp:448 msgid "Unable to write to p12 file" msgstr "Kunde inte skriva till p12-fil" #: src/hed/libs/credential/NSSUtil.cpp:464 msgid "Failed to open p12 file" msgstr "Misslyckades med att öppna p12-fil" #: src/hed/libs/credential/NSSUtil.cpp:492 msgid "Failed to allocate p12 context" msgstr "Misslyckades med allokera p12-kontext" #: src/hed/libs/credential/NSSUtil.cpp:1200 msgid "Failed to find issuer certificate for proxy certificate" msgstr "Misslyckades med att hitta utfärdarcertifikat för proxycertifikat" #: src/hed/libs/credential/NSSUtil.cpp:1351 #, c-format msgid "Failed to authenticate to PKCS11 slot %s" msgstr "Misslyckades med att autentisera till PKCS11 slot %s" #: src/hed/libs/credential/NSSUtil.cpp:1357 #, c-format msgid "Failed to find certificates by nickname: %s" msgstr "Misslyckades med att hitta certifikat med smeknamn: %s" #: src/hed/libs/credential/NSSUtil.cpp:1362 #, c-format msgid "No user certificate by nickname %s found" msgstr "Hittade inget certifikat med smeknamn %s" #: src/hed/libs/credential/NSSUtil.cpp:1375 #: src/hed/libs/credential/NSSUtil.cpp:1411 msgid "Certificate does not have a slot" msgstr "Certifikat har ingen slot" #: src/hed/libs/credential/NSSUtil.cpp:1381 msgid "Failed to create export context" msgstr "Misslyckades med att skapa export-kontext" #: src/hed/libs/credential/NSSUtil.cpp:1396 msgid "PKCS12 output password not provided" msgstr "PKCS12 output-lösenord inte tillhandahÃ¥llet" #: src/hed/libs/credential/NSSUtil.cpp:1403 msgid "PKCS12 add password integrity failed" msgstr "PKCS12 lägg till lösenordsintegritet misslyckades" #: src/hed/libs/credential/NSSUtil.cpp:1424 msgid "Failed to create key or certificate safe" msgstr "Misslyckades med att skapa nyckel- eller certifikat-safe" #: src/hed/libs/credential/NSSUtil.cpp:1440 msgid "Failed to add certificate and key" msgstr "Misslyckades med att lägga till certifikat och nyckel" #: src/hed/libs/credential/NSSUtil.cpp:1449 #, c-format msgid "Failed to initialize PKCS12 file: %s" msgstr "Misslyckades med att initiera PKCS12-fil: %s" #: src/hed/libs/credential/NSSUtil.cpp:1454 msgid "Failed to encode PKCS12" msgstr "Misslyckades med att koda PKCS12" #: src/hed/libs/credential/NSSUtil.cpp:1457 msgid "Succeeded to export PKCS12" msgstr "Lyckades exportera PKCS12" #: src/hed/libs/credential/NSSUtil.cpp:1485 #, c-format msgid "" "There is no certificate named %s found, the certificate could be removed " "when generating CSR" msgstr "" "Det finns inget certifikat med namn %s, certifikatet kan tas bort när CSR " "genereras" #: src/hed/libs/credential/NSSUtil.cpp:1491 msgid "Failed to delete certificate" msgstr "Misslyckades med att ta bort certifikat" #: src/hed/libs/credential/NSSUtil.cpp:1505 msgid "The name of the private key to delete is empty" msgstr "Namnen pÃ¥ den privata nyckeln som ska tas bort är tomt" #: src/hed/libs/credential/NSSUtil.cpp:1510 #: src/hed/libs/credential/NSSUtil.cpp:2939 #: src/hed/libs/credential/NSSUtil.cpp:2956 #, c-format msgid "Failed to authenticate to token %s" msgstr "Misslyckades med att autentisera till token %s" #: src/hed/libs/credential/NSSUtil.cpp:1517 #, c-format msgid "No private key with nickname %s exist in NSS database" msgstr "Ingen privat nyckel med smeknamn %s existerar i NSS-databasen" #: src/hed/libs/credential/NSSUtil.cpp:1550 msgid "Failed to delete private key and certificate" msgstr "Misslyckades med att ta bort privat nyckel och certifikat" #: src/hed/libs/credential/NSSUtil.cpp:1560 msgid "Failed to delete private key" msgstr "Misslyckades med att ta bort privat nyckel" #: src/hed/libs/credential/NSSUtil.cpp:1571 #, c-format msgid "Can not find key with name: %s" msgstr "Kan inte hitta nyckel med namn: %s" #: src/hed/libs/credential/NSSUtil.cpp:1599 msgid "Can not read PEM private key: probably bad password" msgstr "Kan inte läsa PEM privat nyckel: troligen fel lösenord" #: src/hed/libs/credential/NSSUtil.cpp:1601 msgid "Can not read PEM private key: failed to decrypt" msgstr "Kan inte läsa PEM privat nyckel: misslyckades med att avkoda" #: src/hed/libs/credential/NSSUtil.cpp:1603 #: src/hed/libs/credential/NSSUtil.cpp:1605 msgid "Can not read PEM private key: failed to obtain password" msgstr "Kan inte läsa PEM privat nyckel: misslycḱades med att erhÃ¥lla lösenord" #: src/hed/libs/credential/NSSUtil.cpp:1606 msgid "Can not read PEM private key" msgstr "Kan inte läsa PEM privat nyckel" #: src/hed/libs/credential/NSSUtil.cpp:1613 msgid "Failed to convert EVP_PKEY to PKCS8" msgstr "Misslyckades med att konvertera EVP_PKEY till PKCS8" #: src/hed/libs/credential/NSSUtil.cpp:1650 msgid "Failed to load private key" msgstr "Misslyckades med att ladda in privat nyckel" #: src/hed/libs/credential/NSSUtil.cpp:1651 msgid "Succeeded to load PrivateKeyInfo" msgstr "Lyckades ladda in PrivateKeyInfo" #: src/hed/libs/credential/NSSUtil.cpp:1654 msgid "Failed to convert PrivateKeyInfo to EVP_PKEY" msgstr "Misslyckades med att konvertera PrivateKeyInfo till EVP_PKEY" #: src/hed/libs/credential/NSSUtil.cpp:1655 msgid "Succeeded to convert PrivateKeyInfo to EVP_PKEY" msgstr "Lyckades konvertera PrivateKeyInfo till EVP_PKEY" #: src/hed/libs/credential/NSSUtil.cpp:1692 msgid "Failed to import private key" msgstr "Misslyckades med att importera privat nyckel" #: src/hed/libs/credential/NSSUtil.cpp:1695 msgid "Succeeded to import private key" msgstr "Lyckades importera privat nyckel" #: src/hed/libs/credential/NSSUtil.cpp:1708 #: src/hed/libs/credential/NSSUtil.cpp:1750 #: src/hed/libs/credential/NSSUtil.cpp:2889 msgid "Failed to authenticate to key database" msgstr "Misslyckades med att autentisera till nyckeldatabas" #: src/hed/libs/credential/NSSUtil.cpp:1717 msgid "Succeeded to generate public/private key pair" msgstr "Lyckades generera publik/privat nyckelpar" #: src/hed/libs/credential/NSSUtil.cpp:1719 msgid "Failed to generate public/private key pair" msgstr "Misslyckades med att generera publik/privat nyckelpar" #: src/hed/libs/credential/NSSUtil.cpp:1724 msgid "Failed to export private key" msgstr "Misslyckades med att exportera privat nyckel" #: src/hed/libs/credential/NSSUtil.cpp:1791 msgid "Failed to create subject name" msgstr "Misslyckades med att skapa subjektnamn" #: src/hed/libs/credential/NSSUtil.cpp:1807 msgid "Failed to create certificate request" msgstr "Misslyckades med att skapa certifikatbegäran" #: src/hed/libs/credential/NSSUtil.cpp:1820 msgid "Failed to call PORT_NewArena" msgstr "Misslyckades med att anropa PORT_NewArena" #: src/hed/libs/credential/NSSUtil.cpp:1828 msgid "Failed to encode the certificate request with DER format" msgstr "Misslyckades med att koda certifikatbegäran med DER-format" #: src/hed/libs/credential/NSSUtil.cpp:1835 msgid "Unknown key or hash type" msgstr "Okänd nyckel- eller hashtyp" #: src/hed/libs/credential/NSSUtil.cpp:1841 msgid "Failed to sign the certificate request" msgstr "Misslyckades med att signera certifikatbegäran" #: src/hed/libs/credential/NSSUtil.cpp:1857 msgid "Failed to output the certificate request as ASCII format" msgstr "Misslyckades med att skriva ut certifikatbegäran i ASCII-format" #: src/hed/libs/credential/NSSUtil.cpp:1866 msgid "Failed to output the certificate request as DER format" msgstr "Misslyckades med att skriva ut certifikatbegäran i DER-format" #: src/hed/libs/credential/NSSUtil.cpp:1875 #, c-format msgid "Succeeded to output the certificate request into %s" msgstr "Lyckades skriva ut certifikatbegäran till %s" #: src/hed/libs/credential/NSSUtil.cpp:1914 #: src/hed/libs/credential/NSSUtil.cpp:1951 msgid "Failed to read data from input file" msgstr "Misslyckades med att läsa data frÃ¥n indatabuffer" #: src/hed/libs/credential/NSSUtil.cpp:1930 msgid "Input is without trailer\n" msgstr "Indata är utan trailer\n" #: src/hed/libs/credential/NSSUtil.cpp:1941 msgid "Failed to convert ASCII to DER" msgstr "Misslyckades med att konvertera ASCII till DER" #: src/hed/libs/credential/NSSUtil.cpp:1992 msgid "Certificate request is invalid" msgstr "Certifikatbegäran är ogiltig" #: src/hed/libs/credential/NSSUtil.cpp:2212 #, c-format msgid "The policy language: %s is not supported" msgstr "Policy-sprÃ¥ket: %s stöds inte" #: src/hed/libs/credential/NSSUtil.cpp:2220 #: src/hed/libs/credential/NSSUtil.cpp:2245 #: src/hed/libs/credential/NSSUtil.cpp:2268 #: src/hed/libs/credential/NSSUtil.cpp:2290 msgid "Failed to new arena" msgstr "Misslyckades med ny arena" #: src/hed/libs/credential/NSSUtil.cpp:2229 #: src/hed/libs/credential/NSSUtil.cpp:2254 msgid "Failed to create path length" msgstr "Misslyckades med att skapa certifikatkedjelängd" #: src/hed/libs/credential/NSSUtil.cpp:2232 #: src/hed/libs/credential/NSSUtil.cpp:2257 #: src/hed/libs/credential/NSSUtil.cpp:2277 #: src/hed/libs/credential/NSSUtil.cpp:2299 msgid "Failed to create policy language" msgstr "Misslyckades med att skapa policy-sprÃ¥k" #: src/hed/libs/credential/NSSUtil.cpp:2700 #, c-format msgid "Failed to parse certificate request from CSR file %s" msgstr "Misslyckades med att tolka certifikatbegäran frÃ¥n CSR-fil %s" #: src/hed/libs/credential/NSSUtil.cpp:2707 #, c-format msgid "Can not find certificate with name %s" msgstr "Kan inte hitta certifikat med namn %s" #: src/hed/libs/credential/NSSUtil.cpp:2739 msgid "Can not allocate memory" msgstr "Kan inte allokera minne" #: src/hed/libs/credential/NSSUtil.cpp:2747 #, c-format msgid "Proxy subject: %s" msgstr "Proxysubjekt: %s" #: src/hed/libs/credential/NSSUtil.cpp:2764 msgid "Failed to start certificate extension" msgstr "Misslyckades med att pÃ¥börja certifikattillägg" #: src/hed/libs/credential/NSSUtil.cpp:2769 msgid "Failed to add key usage extension" msgstr "Misslyckades med att lägga till nyckelanvändningstillägg" #: src/hed/libs/credential/NSSUtil.cpp:2774 msgid "Failed to add proxy certificate information extension" msgstr "Misslyckades med att lägga till proxycertifikatsinformationstillägg" #: src/hed/libs/credential/NSSUtil.cpp:2778 msgid "Failed to add voms AC extension" msgstr "Misslyckades med att lägga till VOMS-AC-tillägg" #: src/hed/libs/credential/NSSUtil.cpp:2798 msgid "Failed to retrieve private key for issuer" msgstr "Misslyckades med att hämta privat nyckel för utfärdare" #: src/hed/libs/credential/NSSUtil.cpp:2805 msgid "Unknown key or hash type of issuer" msgstr "Okänd nyckel- eller hashtyp för utfärdare" #: src/hed/libs/credential/NSSUtil.cpp:2811 msgid "Failed to set signature algorithm ID" msgstr "Misslyckades med att sätta signeringsalgoritm" #: src/hed/libs/credential/NSSUtil.cpp:2823 msgid "Failed to encode certificate" msgstr "Misslyckades med att koda certifikat" #: src/hed/libs/credential/NSSUtil.cpp:2829 msgid "Failed to allocate item for certificate data" msgstr "Misslyckades med att allokera minne för certifikatdata" #: src/hed/libs/credential/NSSUtil.cpp:2835 msgid "Failed to sign encoded certificate data" msgstr "Misslyckades med att signera kodad certifikatdata" #: src/hed/libs/credential/NSSUtil.cpp:2844 #, c-format msgid "Failed to open file %s" msgstr "Misslyckades med att öppna fil %s" #: src/hed/libs/credential/NSSUtil.cpp:2855 #, c-format msgid "Succeeded to output certificate to %s" msgstr "Lyckades skriva ut certifikat till %s" #: src/hed/libs/credential/NSSUtil.cpp:2896 #, c-format msgid "Failed to open input certificate file %s" msgstr "Misslyckades med att öppna indata-certifikatfil %s" #: src/hed/libs/credential/NSSUtil.cpp:2913 msgid "Failed to read input certificate file" msgstr "Misslyckades med att läsa indata-certifikatfil" #: src/hed/libs/credential/NSSUtil.cpp:2918 msgid "Failed to get certificate from certificate file" msgstr "Misslyckades med att erhÃ¥lla certifikat frÃ¥n certifikatfil" #: src/hed/libs/credential/NSSUtil.cpp:2925 msgid "Failed to allocate certificate trust" msgstr "Misslyckades med att allokera certifikat-tillit" #: src/hed/libs/credential/NSSUtil.cpp:2930 msgid "Failed to decode trust string" msgstr "Misslyckades med att avkoda tillitssträng" #: src/hed/libs/credential/NSSUtil.cpp:2944 #: src/hed/libs/credential/NSSUtil.cpp:2961 msgid "Failed to add certificate to token or database" msgstr "Misslyckades med att lägga till certifikat till token eller databas" #: src/hed/libs/credential/NSSUtil.cpp:2947 #: src/hed/libs/credential/NSSUtil.cpp:2950 msgid "Succeeded to import certificate" msgstr "Lyckades importera certifikat" #: src/hed/libs/credential/NSSUtil.cpp:2964 #: src/hed/libs/credential/NSSUtil.cpp:2967 #, c-format msgid "Succeeded to change trusts to: %s" msgstr "Lyckades ändra tillit till: %s" #: src/hed/libs/credential/NSSUtil.cpp:2994 #, c-format msgid "Failed to import private key from file: %s" msgstr "Misslyckades med att importera privat nyckel frÃ¥n fil: %s" #: src/hed/libs/credential/NSSUtil.cpp:2996 #, c-format msgid "Failed to import certificate from file: %s" msgstr "Misslyckades med att importera certifikat frÃ¥n fil: %s" #: src/hed/libs/credential/VOMSConfig.cpp:142 #, c-format msgid "" "ERROR: VOMS configuration line contains too many tokens. Expecting 5 or 6. " "Line was: %s" msgstr "" "Fel: VOMS-inställningsrad innehÃ¥ller för mÃ¥nga token. Förväntade 5 eller 6. " "Raden är: %s" #: src/hed/libs/credential/VOMSConfig.cpp:158 #, c-format msgid "" "ERROR: file tree is too deep while scanning VOMS configuration. Max allowed " "nesting is %i." msgstr "" "Fel: filträd är för djupt vid skanning av VOMS-inställningar. Max tillÃ¥ten " "nestning är %i." #: src/hed/libs/credential/VOMSConfig.cpp:176 #, c-format msgid "ERROR: failed to read file %s while scanning VOMS configuration." msgstr "" "Fel: misslyckades med att läsa fil %s vid skanning av VOMS-inställningar." #: src/hed/libs/credential/VOMSConfig.cpp:181 #, c-format msgid "" "ERROR: VOMS configuration file %s contains too many lines. Max supported " "number is %i." msgstr "" "Fel: VOMS-inställningsfil %s innehÃ¥ller för mÃ¥nga rader. Max antal som stöds " "är %i." #: src/hed/libs/credential/VOMSConfig.cpp:188 #, c-format msgid "" "ERROR: VOMS configuration file %s contains too long line(s). Max supported " "length is %i characters." msgstr "" "Fel: VOMS-inställningsfil %s innehÃ¥ller för lÃ¥ng(a) rad(er). Max längd som " "stöds är %i tecken." #: src/hed/libs/credential/VOMSUtil.cpp:171 #, c-format msgid "Failed to create OpenSSL object %s %s - %u %s" msgstr "Misslyckades med att skapa OpenSSL-objekt %s %s - %u %s" #: src/hed/libs/credential/VOMSUtil.cpp:179 #, c-format msgid "Failed to obtain OpenSSL identifier for %s" msgstr "Misslyckades med att erhÃ¥lla OpenSSL-identifierare för %s" #: src/hed/libs/credential/VOMSUtil.cpp:332 #, c-format msgid "VOMS: create FQAN: %s" msgstr "VOMS: skapa FQAN: %s" #: src/hed/libs/credential/VOMSUtil.cpp:370 #, c-format msgid "VOMS: create attribute: %s" msgstr "VOMS: skapa attribut: %s" #: src/hed/libs/credential/VOMSUtil.cpp:651 msgid "VOMS: Can not allocate memory for parsing AC" msgstr "VOMS: Kan inte allokera minne för att tolka AC" #: src/hed/libs/credential/VOMSUtil.cpp:659 msgid "VOMS: Can not allocate memory for storing the order of AC" msgstr "VOMS: Kan inte allokera minne för att att lagra ordningen för AC" #: src/hed/libs/credential/VOMSUtil.cpp:685 msgid "VOMS: Can not parse AC" msgstr "VOMS: Kan inte tolka AC" #: src/hed/libs/credential/VOMSUtil.cpp:715 msgid "VOMS: CA directory or CA file must be provided" msgstr "VOMS: CA-katalog för CA-fil mÃ¥ste tillhandahÃ¥llas" #: src/hed/libs/credential/VOMSUtil.cpp:779 msgid "VOMS: failed to verify AC signature" msgstr "VOMS: misslyckades med att verifiera AC-signatur" #: src/hed/libs/credential/VOMSUtil.cpp:848 #, c-format msgid "VOMS: trust chain to check: %s " msgstr "VOMS: tillitskedja att kontrollera: %s " #: src/hed/libs/credential/VOMSUtil.cpp:856 #, c-format msgid "" "VOMS: the DN in certificate: %s does not match that in trusted DN list: %s" msgstr "VOMS: DN i certifikat: %s matchar inte det i betrodda DN-listan: %s" #: src/hed/libs/credential/VOMSUtil.cpp:862 #, c-format msgid "" "VOMS: the Issuer identity in certificate: %s does not match that in trusted " "DN list: %s" msgstr "" "VOMS: utfärdaridentiteten i certifikat: %s matchar inte den i betrodda DN-" "listan: %s" #: src/hed/libs/credential/VOMSUtil.cpp:897 #, c-format msgid "VOMS: The lsc file %s does not exist" msgstr "VOMS: lsc-filen %s existerar inte" #: src/hed/libs/credential/VOMSUtil.cpp:903 #, c-format msgid "VOMS: The lsc file %s can not be open" msgstr "VOMS: lsc-filen %s kan inte öppnas" #: src/hed/libs/credential/VOMSUtil.cpp:951 msgid "" "VOMS: there is no constraints of trusted voms DNs, the certificates stack in " "AC will not be checked." msgstr "" "VOMS: det finns inga villkor pÃ¥ betrodda voms-DN, certifikatstacken i AC " "kommer inte att kontrolleras." #: src/hed/libs/credential/VOMSUtil.cpp:984 msgid "VOMS: unable to match certificate chain against VOMS trusted DNs" msgstr "VOMS: kunde inte matcha certifikatkedja mot VOMS betrodda DN" #: src/hed/libs/credential/VOMSUtil.cpp:1004 msgid "VOMS: AC signature verification failed" msgstr "VOMS: AC-signaturverifiering misslyckades" #: src/hed/libs/credential/VOMSUtil.cpp:1013 msgid "VOMS: unable to verify certificate chain" msgstr "VOMS: kunde inte verifiera certifikatkedja" #: src/hed/libs/credential/VOMSUtil.cpp:1019 #, c-format msgid "VOMS: cannot validate AC issuer for VO %s" msgstr "VOMS: kunde inte validera AC-utfärdare för VO %s" #: src/hed/libs/credential/VOMSUtil.cpp:1042 #, c-format msgid "VOMS: directory for trusted service certificates: %s" msgstr "VOMS: katalog för betrodda tjänstecertifikat: %s" #: src/hed/libs/credential/VOMSUtil.cpp:1068 #, c-format msgid "VOMS: Cannot find certificate of AC issuer for VO %s" msgstr "VOMS: kan inte hitta AC-utfärdarens certifikat för VO %s" #: src/hed/libs/credential/VOMSUtil.cpp:1090 msgid "VOMS: Can not find AC_ATTR with IETFATTR type" msgstr "VOMS: kan inte hitta AC_ATTR med IETFATTR-typ" #: src/hed/libs/credential/VOMSUtil.cpp:1097 msgid "VOMS: case of multiple IETFATTR attributes not supported" msgstr "VOMS: mer än ett IETFATTR-attribut stöds inte" #: src/hed/libs/credential/VOMSUtil.cpp:1107 msgid "VOMS: case of multiple policyAuthority not supported" msgstr "VOMS: mer än en policyAuthority stöds inte" #: src/hed/libs/credential/VOMSUtil.cpp:1123 msgid "VOMS: the format of policyAuthority is unsupported - expecting URI" msgstr "VOMS: formatet för policyAuthority stöd inte - förväntar URI" #: src/hed/libs/credential/VOMSUtil.cpp:1132 msgid "" "VOMS: the format of IETFATTRVAL is not supported - expecting OCTET STRING" msgstr "VOMS: formatet för IETFATTRVAL stöds inte - förväntar OCTET STRING" #: src/hed/libs/credential/VOMSUtil.cpp:1189 msgid "VOMS: the grantor attribute is empty" msgstr "VOMS: grantor-attributet är tomt" #: src/hed/libs/credential/VOMSUtil.cpp:1207 msgid "VOMS: the attribute name is empty" msgstr "VOMS: attributnamnet är tomt" #: src/hed/libs/credential/VOMSUtil.cpp:1213 #, c-format msgid "VOMS: the attribute value for %s is empty" msgstr "VOMS: attributvärdet för %s är tomt" #: src/hed/libs/credential/VOMSUtil.cpp:1218 msgid "VOMS: the attribute qualifier is empty" msgstr "VOMS: attributkvalifieraren är tom" #: src/hed/libs/credential/VOMSUtil.cpp:1250 msgid "" "VOMS: both idcenoRevAvail and authorityKeyIdentifier certificate extensions " "must be present" msgstr "" "VOMS: bÃ¥de idcenoRevAvail och authorityKeyIdentifier certifikattilläggen " "mÃ¥ste vara närvarande" #: src/hed/libs/credential/VOMSUtil.cpp:1284 #, c-format msgid "VOMS: FQDN of this host %s does not match any target in AC" msgstr "VOMS: FQDN för denna värd %s matchar inte nÃ¥got target i AC" #: src/hed/libs/credential/VOMSUtil.cpp:1289 msgid "VOMS: the only supported critical extension of the AC is idceTargets" msgstr "VOMS: det enda kritiska tillägget till AC som stöds är idceTargets" #: src/hed/libs/credential/VOMSUtil.cpp:1304 msgid "VOMS: failed to parse attributes from AC" msgstr "VOMS: misslyckades med att tolka attribut frÃ¥n AC" #: src/hed/libs/credential/VOMSUtil.cpp:1348 msgid "VOMS: authorityKey is wrong" msgstr "VOMS: authorityKey är felaktig" #: src/hed/libs/credential/VOMSUtil.cpp:1376 msgid "VOMS: missing AC parts" msgstr "VOMS: saknade AC-delar" #: src/hed/libs/credential/VOMSUtil.cpp:1393 msgid "VOMS: unsupported time format in AC - expecting GENERALIZED TIME" msgstr "VOMS: tidsformat i AC som inte stöds - förväntade GENERALIZED TIME" #: src/hed/libs/credential/VOMSUtil.cpp:1399 msgid "VOMS: AC is not yet valid" msgstr "VOMS: AC är inte giltig än" #: src/hed/libs/credential/VOMSUtil.cpp:1406 msgid "VOMS: AC has expired" msgstr "VOMS: giltighetstiden för AC har gÃ¥tt ut" #: src/hed/libs/credential/VOMSUtil.cpp:1421 msgid "VOMS: AC is not complete - missing Serial or Issuer information" msgstr "VOMS: AC är inte komplett - saknar Serial- eller Issuer-information" #: src/hed/libs/credential/VOMSUtil.cpp:1426 #, c-format msgid "VOMS: the holder serial number is: %lx" msgstr "VOMS: innehavarens serienummer är: %lx" #: src/hed/libs/credential/VOMSUtil.cpp:1427 #, c-format msgid "VOMS: the serial number in AC is: %lx" msgstr "VOMS: serienumret i AC är: %lx" #: src/hed/libs/credential/VOMSUtil.cpp:1430 #, c-format msgid "" "VOMS: the holder serial number %lx is not the same as the serial number in " "AC %lx, the holder certificate that is used to create a voms proxy could be " "a proxy certificate with a different serial number as the original EEC cert" msgstr "" "VOMS: innehavarens serienummer %lx är inte detsamma som serienumret i AC " "%lx, innehavarens certifikat som används för att skapa en vomsproxy kan vara " "ett proxycertifikat med ett annat serienummer än det ursprungliga EEC-" "certifikatet" #: src/hed/libs/credential/VOMSUtil.cpp:1439 msgid "VOMS: the holder information in AC is wrong" msgstr "VOMS: innehavarinformationen is AC är felaktig" #: src/hed/libs/credential/VOMSUtil.cpp:1461 #, c-format msgid "VOMS: DN of holder in AC: %s" msgstr "VOMS: innehavarans DN i AC: %s" #: src/hed/libs/credential/VOMSUtil.cpp:1462 #, c-format msgid "VOMS: DN of holder: %s" msgstr "VOMS: innehavarans DN: %s" #: src/hed/libs/credential/VOMSUtil.cpp:1463 #, c-format msgid "VOMS: DN of issuer: %s" msgstr "VOMS: utfärdarens DN: %s" #: src/hed/libs/credential/VOMSUtil.cpp:1470 msgid "" "VOMS: the holder name in AC is not related to the distinguished name in " "holder certificate" msgstr "" "VOMS: innehavarens namn i AC är inte relaterat till DN i innehavarens " "certifikat" #: src/hed/libs/credential/VOMSUtil.cpp:1482 #: src/hed/libs/credential/VOMSUtil.cpp:1489 msgid "VOMS: the holder issuerUID is not the same as that in AC" msgstr "VOMS: innehavarens utfärdar-UID är inte detsamma som det i AC" #: src/hed/libs/credential/VOMSUtil.cpp:1502 msgid "VOMS: the holder issuer name is not the same as that in AC" msgstr "VOMS: innehavarens utfärdarnamn är inte detsamma som det i AC" #: src/hed/libs/credential/VOMSUtil.cpp:1512 msgid "VOMS: the issuer information in AC is wrong" msgstr "VOMS: utfärdarinformationen i AC är felaktig" #: src/hed/libs/credential/VOMSUtil.cpp:1520 #, c-format msgid "VOMS: the issuer name %s is not the same as that in AC - %s" msgstr "VOMS: utfärdarnamnet %s är inte det samma som det i AC - %s" #: src/hed/libs/credential/VOMSUtil.cpp:1528 msgid "" "VOMS: the serial number of AC INFO is too long - expecting no more than 20 " "octets" msgstr "" "VOMS: serienumret i AC INFO är för lÃ¥ngt - förväntade inte mer än 20 bytes" #: src/hed/libs/credential/VOMSUtil.cpp:1558 #: src/hed/libs/credential/VOMSUtil.cpp:1566 #: src/hed/libs/credential/VOMSUtil.cpp:1574 #: src/hed/libs/credential/VOMSUtil.cpp:1582 #: src/hed/libs/credential/VOMSUtil.cpp:1605 msgid "VOMS: unable to extract VO name from AC" msgstr "VOMS: kunde inte extrahera VO-namn frÃ¥n AC" #: src/hed/libs/credential/VOMSUtil.cpp:1596 #, c-format msgid "VOMS: unable to determine hostname of AC from VO name: %s" msgstr "VOMS: Kunde inte bestämma värdnamn i AC frÃ¥n VO-namn: %s" #: src/hed/libs/credential/VOMSUtil.cpp:1615 msgid "VOMS: can not verify the signature of the AC" msgstr "VOMS: kan inte verifiera ACs signatur" #: src/hed/libs/credential/VOMSUtil.cpp:1621 msgid "VOMS: problems while parsing information in AC" msgstr "VOMS: problem vid tolkning ac information i AC" #: src/hed/libs/credential/test/VOMSUtilTest.cpp:128 #, c-format msgid "Line %d.%d of the attributes returned: %s" msgstr "Rad %d.%d i attributen returnerade: %s" #: src/hed/libs/credentialstore/ClientVOMS.cpp:149 msgid "voms" msgstr "voms" #: src/hed/libs/credentialstore/CredentialStore.cpp:194 #: src/hed/libs/credentialstore/CredentialStore.cpp:245 #: src/hed/libs/credentialstore/CredentialStore.cpp:273 #: src/hed/libs/credentialstore/CredentialStore.cpp:336 #: src/hed/libs/credentialstore/CredentialStore.cpp:376 #: src/hed/libs/credentialstore/CredentialStore.cpp:406 #, c-format msgid "MyProxy failure: %s" msgstr "MyProxy-fel: %s" #: src/hed/libs/crypto/OpenSSL.cpp:71 #, c-format msgid "SSL error: %d - %s:%s:%s" msgstr "SSL-fel: %d - %s:%s:%s" #: src/hed/libs/crypto/OpenSSL.cpp:84 msgid "SSL locks not initialized" msgstr "SSL ser ut att inte vara initierat" #: src/hed/libs/crypto/OpenSSL.cpp:88 #, c-format msgid "wrong SSL lock requested: %i of %i: %i - %s" msgstr "fel SSL-lÃ¥s begärt: %i av %i: %i - %s" #: src/hed/libs/crypto/OpenSSL.cpp:111 msgid "Failed to lock arccrypto library in memory" msgstr "Misslyckades med lÃ¥sa arccrypto-biblioteket i minnet" #: src/hed/libs/crypto/OpenSSL.cpp:116 src/hed/libs/crypto/OpenSSL.cpp:130 msgid "Failed to initialize OpenSSL library" msgstr "Misslyckades med att initiera OpenSSL-biblioteket" #: src/hed/libs/crypto/OpenSSL.cpp:152 msgid "Number of OpenSSL locks changed - reinitializing" msgstr "Antalet SSL-lÃ¥s ändrades - Ã¥terinitierar" #: src/hed/libs/data/DataExternalHelper.cpp:157 msgid "failed to read data tag" msgstr "misslyckades med att läsa data-tagg" #: src/hed/libs/data/DataExternalHelper.cpp:161 msgid "waiting for data chunk" msgstr "väntar pÃ¥ data-chunk" #: src/hed/libs/data/DataExternalHelper.cpp:163 msgid "failed to read data chunk" msgstr "misslyckades med att läsa data-chunk" #: src/hed/libs/data/DataExternalHelper.cpp:171 #, c-format msgid "data chunk: %llu %llu" msgstr "data-chunk: %llu %llu" #: src/hed/libs/data/DataExternalHelper.cpp:242 #, c-format msgid "DataMove::Transfer: using supplied checksum %s" msgstr "DataMove::Transfer: använder tillhandahÃ¥llen checksumma %s" #: src/hed/libs/data/DataExternalHelper.cpp:361 msgid "Expecting Module, Command and URL provided" msgstr "Förväntade att modul, kommando och URL tillhandahÃ¥llits" #: src/hed/libs/data/DataExternalHelper.cpp:368 msgid "Expecting Command module path among arguments" msgstr "Förväntade kommando modul-sökväg bland argumenten" #: src/hed/libs/data/DataExternalHelper.cpp:372 msgid "Expecting Command module name among arguments" msgstr "Förväntade kommando modul-namn bland argumenten<" #: src/hed/libs/data/DataMover.cpp:115 msgid "No locations found - probably no more physical instances" msgstr "Hittade inga platser - troligen inga fler fysiska instanser" #: src/hed/libs/data/DataMover.cpp:121 src/hed/libs/data/FileCache.cpp:552 #: src/libs/data-staging/Processor.cpp:443 #: src/libs/data-staging/Processor.cpp:457 #, c-format msgid "Removing %s" msgstr "Tar bort %s" #: src/hed/libs/data/DataMover.cpp:134 msgid "This instance was already deleted" msgstr "Denna instans har redan tagits bort" #: src/hed/libs/data/DataMover.cpp:140 msgid "Failed to delete physical file" msgstr "Misslyckades med att ta bort fysisk fil" #: src/hed/libs/data/DataMover.cpp:151 #, c-format msgid "Removing metadata in %s" msgstr "Tar bort metadata i %s" #: src/hed/libs/data/DataMover.cpp:155 msgid "Failed to delete meta-information" msgstr "Misslyckades med att ta bort metainformation" #: src/hed/libs/data/DataMover.cpp:169 msgid "Failed to remove all physical instances" msgstr "Misslyckades med att ta bort alla instanser" #: src/hed/libs/data/DataMover.cpp:173 #, c-format msgid "Removing logical file from metadata %s" msgstr "Tar bort logisk fil frÃ¥n metadata %s" #: src/hed/libs/data/DataMover.cpp:176 msgid "Failed to delete logical file" msgstr "Misslyckades med att ta bort logisk fil" #: src/hed/libs/data/DataMover.cpp:183 msgid "Failed to remove instance" msgstr "Misslyckades med att ta bort instans" #: src/hed/libs/data/DataMover.cpp:232 msgid "DataMover::Transfer : starting new thread" msgstr "DataMover::Transfer : startar ny trÃ¥d" #: src/hed/libs/data/DataMover.cpp:260 #, c-format msgid "Transfer from %s to %s" msgstr "Överföring frÃ¥n %s till %s" #: src/hed/libs/data/DataMover.cpp:262 msgid "Not valid source" msgstr "Ogiltig källa" #: src/hed/libs/data/DataMover.cpp:267 msgid "Not valid destination" msgstr "Ogiltig destination" #: src/hed/libs/data/DataMover.cpp:287 src/services/candypond/CandyPond.cpp:304 #, c-format msgid "Couldn't handle certificate: %s" msgstr "Kunde inte hantera certifikatfil: %s" #: src/hed/libs/data/DataMover.cpp:296 src/hed/libs/data/DataMover.cpp:590 #: src/libs/data-staging/Processor.cpp:133 #, c-format msgid "File %s is cached (%s) - checking permissions" msgstr "Fil %s är cachad (%s) - kontrollerar Ã¥tkomsträttigheter" #: src/hed/libs/data/DataMover.cpp:300 src/hed/libs/data/DataMover.cpp:609 #: src/hed/libs/data/DataMover.cpp:667 src/libs/data-staging/Processor.cpp:152 msgid "Permission checking passed" msgstr "Ã…tkomsträttighetskontroll godkänd" #: src/hed/libs/data/DataMover.cpp:301 src/hed/libs/data/DataMover.cpp:628 #: src/hed/libs/data/DataMover.cpp:1144 msgid "Linking/copying cached file" msgstr "Länkar/kopierar cachad fil" #: src/hed/libs/data/DataMover.cpp:325 #, c-format msgid "No locations for source found: %s" msgstr "Hittade inga platser för källa: %s" #: src/hed/libs/data/DataMover.cpp:329 #, c-format msgid "Failed to resolve source: %s" msgstr "Misslyckades med att slÃ¥ upp källa: %s" #: src/hed/libs/data/DataMover.cpp:341 src/hed/libs/data/DataMover.cpp:409 #, c-format msgid "No locations for destination found: %s" msgstr "Hittade inga platser för destination: %s" #: src/hed/libs/data/DataMover.cpp:346 src/hed/libs/data/DataMover.cpp:413 #, c-format msgid "Failed to resolve destination: %s" msgstr "Misslyckades med att slÃ¥ upp destination: %s" #: src/hed/libs/data/DataMover.cpp:361 #, c-format msgid "No locations for destination different from source found: %s" msgstr "Hittade inga platser för destinationen som skiljer sig frÃ¥n källan: %s" #: src/hed/libs/data/DataMover.cpp:382 #, c-format msgid "DataMover::Transfer: trying to destroy/overwrite destination: %s" msgstr "DataMover::Transfer: försöker förstöra/skriva över destination: %s" #: src/hed/libs/data/DataMover.cpp:393 #, c-format msgid "Failed to delete %s but will still try to copy" msgstr "" "Misslyckades med att ta bort %s, men kommer fortfarande att försöka kopiera" #: src/hed/libs/data/DataMover.cpp:396 #, c-format msgid "Failed to delete %s" msgstr "Misslyckades med att ta bort %s" #: src/hed/libs/data/DataMover.cpp:423 #, c-format msgid "Deleted but still have locations at %s" msgstr "Borttagen men har fortfarande platser pÃ¥ %s" #: src/hed/libs/data/DataMover.cpp:435 msgid "DataMover: cycle" msgstr "DataMover: nästa cykel" #: src/hed/libs/data/DataMover.cpp:437 msgid "DataMover: no retries requested - exit" msgstr "DataMover: begärt att inte försöka igen - avsluta" #: src/hed/libs/data/DataMover.cpp:442 msgid "DataMover: source out of tries - exit" msgstr "DataMover: källan har slut pÃ¥ försök - avsluta" #: src/hed/libs/data/DataMover.cpp:444 msgid "DataMover: destination out of tries - exit" msgstr "DataMover: destinationen har slut pÃ¥ försök - avsluta" #: src/hed/libs/data/DataMover.cpp:452 #, c-format msgid "Real transfer from %s to %s" msgstr "Reell överföring frÃ¥n %s till %s" #: src/hed/libs/data/DataMover.cpp:478 #, c-format msgid "Creating buffer: %lli x %i" msgstr "Skapar buffer: %lli x %i" #: src/hed/libs/data/DataMover.cpp:494 #, c-format msgid "DataMove::Transfer: no checksum calculation for %s" msgstr "DataMove::Transfer: ingen checksumma beräknad för %s" #: src/hed/libs/data/DataMover.cpp:499 #, c-format msgid "DataMove::Transfer: using supplied checksum %s:%s" msgstr "DataMove::Transfer: använder tillhandahÃ¥llen checksumma %s:%s" #: src/hed/libs/data/DataMover.cpp:523 #, c-format msgid "DataMove::Transfer: will calculate %s checksum" msgstr "DataMove::Transfer: kommer att beräkna %s-checksumma" #: src/hed/libs/data/DataMover.cpp:528 msgid "Buffer creation failed !" msgstr "Skapande av buffer misslyckades" #: src/hed/libs/data/DataMover.cpp:551 #, c-format msgid "URL is mapped to: %s" msgstr "URL mappas till: %s" #: src/hed/libs/data/DataMover.cpp:579 src/hed/libs/data/DataMover.cpp:637 #: src/libs/data-staging/Processor.cpp:88 msgid "Cached file is locked - should retry" msgstr "Cachad fil är lÃ¥st - bör försöka igen" #: src/hed/libs/data/DataMover.cpp:584 src/libs/data-staging/Processor.cpp:106 msgid "Failed to initiate cache" msgstr "Misslyckades med att initiera cache" #: src/hed/libs/data/DataMover.cpp:601 src/services/candypond/CandyPond.cpp:379 #, c-format msgid "Permission checking failed: %s" msgstr "Ã…tkomsträttighetskontroll inte godkänd: %s" #: src/hed/libs/data/DataMover.cpp:603 src/hed/libs/data/DataMover.cpp:661 #: src/hed/libs/data/DataMover.cpp:681 src/hed/libs/data/DataMover.cpp:692 msgid "source.next_location" msgstr "source.next_location" #: src/hed/libs/data/DataMover.cpp:617 src/libs/data-staging/Processor.cpp:157 #, c-format msgid "Source modification date: %s" msgstr "Källans ändringstid: %s" #: src/hed/libs/data/DataMover.cpp:618 src/libs/data-staging/Processor.cpp:158 #, c-format msgid "Cache creation date: %s" msgstr "Cache skapades: %s" #: src/hed/libs/data/DataMover.cpp:624 src/libs/data-staging/Processor.cpp:163 msgid "Cached file is outdated, will re-download" msgstr "Cachad fil är gammal, kommer att ladda ner igen" #: src/hed/libs/data/DataMover.cpp:627 src/libs/data-staging/Processor.cpp:168 msgid "Cached copy is still valid" msgstr "Cachad kopia är fortfarande giltig" #: src/hed/libs/data/DataMover.cpp:654 msgid "URL is mapped to local access - checking permissions on original URL" msgstr "" "URL är mappad till lokal Ã¥tkomst - kontrollerar Ã¥tkomsträttigheter pÃ¥ " "ursprunglig URL" #: src/hed/libs/data/DataMover.cpp:658 #, c-format msgid "Permission checking on original URL failed: %s" msgstr "Ã…tkomsträttighetskontroll pÃ¥ ursprunglig URL inte godkänd: %s" #: src/hed/libs/data/DataMover.cpp:669 msgid "Linking local file" msgstr "Länkar lokal fil" #: src/hed/libs/data/DataMover.cpp:689 #, c-format msgid "Failed to make symbolic link %s to %s : %s" msgstr "Misslyckades med att skapa symbolisk länk %s till %s : %s" #: src/hed/libs/data/DataMover.cpp:698 #, c-format msgid "Failed to change owner of symbolic link %s to %i" msgstr "Misslyckades med att ändra ägare av symbolisk länk %s till %i" #: src/hed/libs/data/DataMover.cpp:709 #, c-format msgid "cache file: %s" msgstr "cachefil: %s" #: src/hed/libs/data/DataMover.cpp:735 #, c-format msgid "Failed to stat source %s" msgstr "Misslyckades med att göra stat pÃ¥ källa: %s" #: src/hed/libs/data/DataMover.cpp:737 src/hed/libs/data/DataMover.cpp:750 #: src/hed/libs/data/DataMover.cpp:781 src/hed/libs/data/DataMover.cpp:800 #: src/hed/libs/data/DataMover.cpp:822 src/hed/libs/data/DataMover.cpp:839 #: src/hed/libs/data/DataMover.cpp:996 src/hed/libs/data/DataMover.cpp:1028 #: src/hed/libs/data/DataMover.cpp:1038 src/hed/libs/data/DataMover.cpp:1111 msgid "(Re)Trying next source" msgstr "Försöker med nästa källa (igen)" #: src/hed/libs/data/DataMover.cpp:748 #, c-format msgid "Meta info of source and location do not match for %s" msgstr "Metainformation för källa och plats stämmer inte överens för %s" #: src/hed/libs/data/DataMover.cpp:760 #, c-format msgid "" "Replica %s has high latency, but no more sources exist so will use this one" msgstr "" "Replika %s har hög latency, men inga fler källor existerar sÃ¥ kommer att " "använda denna" #: src/hed/libs/data/DataMover.cpp:764 #, c-format msgid "Replica %s has high latency, trying next source" msgstr "Replika %s har hög latency, prövar nästa källa" #: src/hed/libs/data/DataMover.cpp:776 src/hed/libs/data/DataMover.cpp:796 #: src/libs/data-staging/DataStagingDelivery.cpp:344 #: src/libs/data-staging/DataStagingDelivery.cpp:367 #, c-format msgid "Using internal transfer method of %s" msgstr "Använder intern överföringsmetod %s" #: src/hed/libs/data/DataMover.cpp:788 src/hed/libs/data/DataMover.cpp:805 #: src/libs/data-staging/DataStagingDelivery.cpp:360 #: src/libs/data-staging/DataStagingDelivery.cpp:381 #, c-format msgid "Internal transfer method is not supported for %s" msgstr "Intern överföringsmetod stöds inte för %s" #: src/hed/libs/data/DataMover.cpp:812 msgid "Using buffered transfer method" msgstr "Använder buffrad överföringsmetod" #: src/hed/libs/data/DataMover.cpp:816 #, c-format msgid "Failed to prepare source: %s" msgstr "Misslyckades med att förbereda källa: %s" #: src/hed/libs/data/DataMover.cpp:830 #, c-format msgid "Failed to start reading from source: %s" msgstr "Misslyckades med att pÃ¥börja läsning frÃ¥n källa: %s" #: src/hed/libs/data/DataMover.cpp:849 msgid "Metadata of source and destination are different" msgstr "Källans och destinationens metadata är olika" #: src/hed/libs/data/DataMover.cpp:868 #, c-format msgid "Failed to preregister destination: %s" msgstr "Misslyckades med att förregistrera destination: %s" #: src/hed/libs/data/DataMover.cpp:873 src/hed/libs/data/DataMover.cpp:1135 msgid "destination.next_location" msgstr "destination.next_location" #: src/hed/libs/data/DataMover.cpp:884 #, c-format msgid "Failed to prepare destination: %s" msgstr "Misslyckades med att förbereda destination: %s" #: src/hed/libs/data/DataMover.cpp:891 src/hed/libs/data/DataMover.cpp:914 #: src/hed/libs/data/DataMover.cpp:1132 #, c-format msgid "" "Failed to unregister preregistered lfn. You may need to unregister it " "manually: %s" msgstr "" "Misslyckades med att avregistrera förregistrerad lfn. Du kan behöva " "avregistrera det manuellt: %s" #: src/hed/libs/data/DataMover.cpp:895 src/hed/libs/data/DataMover.cpp:917 #: src/hed/libs/data/DataMover.cpp:1005 src/hed/libs/data/DataMover.cpp:1021 #: src/hed/libs/data/DataMover.cpp:1044 src/hed/libs/data/DataMover.cpp:1089 msgid "(Re)Trying next destination" msgstr "Försöker med nästa destination (igen)" #: src/hed/libs/data/DataMover.cpp:906 #, c-format msgid "Failed to start writing to destination: %s" msgstr "Misslyckades med att pÃ¥börja skrivning till destination: %s" #: src/hed/libs/data/DataMover.cpp:929 msgid "Failed to start writing to cache" msgstr "Misslyckades med att pÃ¥börja skrivning till cache" #: src/hed/libs/data/DataMover.cpp:937 src/hed/libs/data/DataMover.cpp:983 #: src/hed/libs/data/DataMover.cpp:1156 msgid "" "Failed to unregister preregistered lfn. You may need to unregister it " "manually" msgstr "" "Misslyckades med att avregistrera förregistrerad lfn. Du kan behöva " "avregistrera det manuellt" #: src/hed/libs/data/DataMover.cpp:944 msgid "Waiting for buffer" msgstr "Väntar pÃ¥ buffer" #: src/hed/libs/data/DataMover.cpp:951 #, c-format msgid "Failed updating timestamp on cache lock file %s for file %s: %s" msgstr "" "Misslyckades med att uppdatera klockslag pÃ¥ cachelÃ¥sfil %s för fil %s: %s" #: src/hed/libs/data/DataMover.cpp:956 #, c-format msgid "buffer: read EOF : %s" msgstr "buffer: läs EOF : %s" #: src/hed/libs/data/DataMover.cpp:957 #, c-format msgid "buffer: write EOF: %s" msgstr "buffer: skriv EOF: %s" #: src/hed/libs/data/DataMover.cpp:958 #, c-format msgid "buffer: error : %s, read: %s, write: %s" msgstr "buffer: fel : %s, lÃ¥s: %s, skriv: %s" #: src/hed/libs/data/DataMover.cpp:959 msgid "Closing read channel" msgstr "Stänger läskanal" #: src/hed/libs/data/DataMover.cpp:966 msgid "Closing write channel" msgstr "Stänger skrivkanal" #: src/hed/libs/data/DataMover.cpp:974 msgid "Failed to complete writing to destination" msgstr "Misslyckades med att slutföra skrivning till destination" #: src/hed/libs/data/DataMover.cpp:988 msgid "Transfer cancelled successfully" msgstr "Överföring avbröts framgÃ¥ngsrikt" #: src/hed/libs/data/DataMover.cpp:1033 msgid "Cause of failure unclear - choosing randomly" msgstr "Anledning till misslyckande oklar - väljer slumpvis" #: src/hed/libs/data/DataMover.cpp:1076 #, c-format msgid "" "Checksum mismatch between checksum given as meta option (%s:%s) and " "calculated checksum (%s)" msgstr "" "Checksumma stämmer inte överens mellan checksumma given som metaalternativ " "(%s:%s) och beräknad checksumma(%s)" #: src/hed/libs/data/DataMover.cpp:1082 msgid "" "Failed to unregister preregistered lfn, You may need to unregister it " "manually" msgstr "" "Misslyckades med att avregistrera förregistrerad lfn, du kan behöva " "avregistrera den manuellt" #: src/hed/libs/data/DataMover.cpp:1086 msgid "Failed to delete destination, retry may fail" msgstr "Misslyckades med att ta bort destination, nytt försök kan misslyckas" #: src/hed/libs/data/DataMover.cpp:1096 msgid "Cannot compare empty checksum" msgstr "Kan inte jämföra tom checksumma" #: src/hed/libs/data/DataMover.cpp:1103 #: src/libs/data-staging/DataStagingDelivery.cpp:538 msgid "Checksum type of source and calculated checksum differ, cannot compare" msgstr "" "Typ av checksumma för källa och beräknad checksumma är olika, kan inte " "jämföra" #: src/hed/libs/data/DataMover.cpp:1105 #, c-format msgid "Checksum mismatch between calcuated checksum %s and source checksum %s" msgstr "" "Checksumma stämmer inte överens mellan beräknad checksumma (%s) och källans " "checksumma %s" #: src/hed/libs/data/DataMover.cpp:1116 #: src/libs/data-staging/DataStagingDelivery.cpp:554 #, c-format msgid "Calculated transfer checksum %s matches source checksum" msgstr "" "Beräknad överförings-checksumma %s stämmer överens med källans checksumma" #: src/hed/libs/data/DataMover.cpp:1122 #: src/libs/data-staging/DataStagingDelivery.cpp:557 msgid "Checksum not computed" msgstr "Checksumma ej beräknad" #: src/hed/libs/data/DataMover.cpp:1128 #, c-format msgid "Failed to postregister destination %s" msgstr "Misslyckades med att efterregistrera destination: %s" #: src/hed/libs/data/DataPoint.cpp:84 #, c-format msgid "Invalid URL option: %s" msgstr "Ogiltigt URL-alternativ: %s" #: src/hed/libs/data/DataPoint.cpp:263 #, c-format msgid "Skipping invalid URL option %s" msgstr "Hoppar över ogiltigt URL-alternativ: %s" #: src/hed/libs/data/DataPoint.cpp:278 msgid "" "Third party transfer was requested but the corresponding plugin could\n" " not be loaded. Is the GFAL plugin installed? If not, please install " "the\n" " packages 'nordugrid-arc-plugins-gfal' and 'gfal2-all'. Depending on\n" " your type of installation the package names might differ." msgstr "" "Tredjepartsöverföring begärdes men motsvarande plugin kunde inte laddas in.\n" " Är GFAL-plugin installerad? Om inte, installera paketen\n" " 'nordugrid-arc-plugins-gfal' och 'gfal2-all'. Beroende pÃ¥ din typ av\n" " installation kan paketnamnen variera." #: src/hed/libs/data/DataPoint.cpp:296 #, c-format msgid "Failed to load plugin for URL %s" msgstr "Misslyckades med att ladda in plugin för URL %s" #: src/hed/libs/data/DataPointDelegate.cpp:75 #: src/hed/libs/data/DataPointDelegate.cpp:76 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:2011 #, c-format msgid "Starting helper process: %s" msgstr "Startar hjälpprocess: %s" #: src/hed/libs/data/DataPointDelegate.cpp:180 msgid "start_reading" msgstr "start_reading" #: src/hed/libs/data/DataPointDelegate.cpp:189 msgid "start_reading: helper start failed" msgstr "start_reading: start av hjälpprocess misslyckades" #: src/hed/libs/data/DataPointDelegate.cpp:197 msgid "start_reading: thread create failed" msgstr "start_reading: skapande av trÃ¥d misslyckades" #: src/hed/libs/data/DataPointDelegate.cpp:213 msgid "StopReading: aborting connection" msgstr "StopReading: avbryter förbindelse" #: src/hed/libs/data/DataPointDelegate.cpp:218 msgid "stop_reading: waiting for transfer to finish" msgstr "stop_reading: väntar pÃ¥ att överföring ska avslutas" #: src/hed/libs/data/DataPointDelegate.cpp:221 #, c-format msgid "stop_reading: exiting: %s" msgstr "stop_reading: avslutar: %s" #: src/hed/libs/data/DataPointDelegate.cpp:231 msgid "read_thread: get and register buffers" msgstr "read_thread: erhÃ¥ll och registrera buffrar" #: src/hed/libs/data/DataPointDelegate.cpp:239 #, c-format msgid "read_thread: for_read failed - aborting: %s" msgstr "read_thread: for_read misslyckades - avbryter: %s" #: src/hed/libs/data/DataPointDelegate.cpp:247 #, c-format msgid "read_thread: non-data tag '%c' from external process - leaving: %s" msgstr "read_thread: non-data-tagg '%c' frÃ¥n extern process - lämnar: %s" #: src/hed/libs/data/DataPointDelegate.cpp:256 #, c-format msgid "read_thread: data read error from external process - aborting: %s" msgstr "read_thread: dataläsningsfel frÃ¥n extern process - avbryter: %s" #: src/hed/libs/data/DataPointDelegate.cpp:264 msgid "read_thread: exiting" msgstr "read_thread: avslutar" #: src/hed/libs/data/DataPointDelegate.cpp:285 msgid "start_writing_ftp: helper start failed" msgstr "start_writing_ftp: start av hjälpprocess misslyckades" #: src/hed/libs/data/DataPointDelegate.cpp:293 msgid "start_writing_ftp: thread create failed" msgstr "start_writing_ftp: skapande av trÃ¥d misslyckades" #: src/hed/libs/data/DataPointDelegate.cpp:343 msgid "No checksum information possible" msgstr "Ingen information om checksumma möjlig" #: src/hed/libs/data/DataPointDelegate.cpp:359 msgid "write_thread: get and pass buffers" msgstr "write_thread: erhÃ¥ll och skicka vidare buffrar" #: src/hed/libs/data/DataPointDelegate.cpp:366 msgid "write_thread: for_write failed - aborting" msgstr "write_thread: for_write misslyckades - avbryter" #: src/hed/libs/data/DataPointDelegate.cpp:370 msgid "write_thread: for_write eof" msgstr "write_thread: for_write eof" #: src/hed/libs/data/DataPointDelegate.cpp:384 msgid "write_thread: out failed - aborting" msgstr "write_thread: out misslyckades - avbryter" #: src/hed/libs/data/DataPointDelegate.cpp:392 msgid "write_thread: exiting" msgstr "write_thread: avslutar" #: src/hed/libs/data/DataPointIndex.cpp:91 #, c-format msgid "Can't handle location %s" msgstr "Kan inte hantera plats %s" #: src/hed/libs/data/DataPointIndex.cpp:183 msgid "Sorting replicas according to URL map" msgstr "Sortera replikor enligt URL-mapp" #: src/hed/libs/data/DataPointIndex.cpp:187 #, c-format msgid "Replica %s is mapped" msgstr "Replika %s har mappats" #: src/hed/libs/data/DataPointIndex.cpp:195 #, c-format msgid "Sorting replicas according to preferred pattern %s" msgstr "Sorterar replikor enligt föredraget mönster %s" #: src/hed/libs/data/DataPointIndex.cpp:218 #: src/hed/libs/data/DataPointIndex.cpp:236 #, c-format msgid "Excluding replica %s matching pattern !%s" msgstr "Exkluderar replika %s som matchar mönster !%s" #: src/hed/libs/data/DataPointIndex.cpp:229 #, c-format msgid "Replica %s matches host pattern %s" msgstr "Replika %s matchar värd-mönster %s" #: src/hed/libs/data/DataPointIndex.cpp:247 #, c-format msgid "Replica %s matches pattern %s" msgstr "Replika %s matchar mönster %s" #: src/hed/libs/data/DataPointIndex.cpp:263 #, c-format msgid "Replica %s doesn't match preferred pattern or URL map" msgstr "Replika %s matchar inte föredraget mönster eller URL-map" #: src/hed/libs/data/DataStatus.cpp:12 msgid "Operation completed successfully" msgstr "Operation avslutades framgÃ¥ngsrikt" #: src/hed/libs/data/DataStatus.cpp:13 msgid "Source is invalid URL" msgstr "Källa är inte en giltig URL" #: src/hed/libs/data/DataStatus.cpp:14 msgid "Destination is invalid URL" msgstr "Destination är inte en giltig URL" #: src/hed/libs/data/DataStatus.cpp:15 msgid "Resolving of index service for source failed" msgstr "Uppslagning av indextjänst för källa misslyckades" #: src/hed/libs/data/DataStatus.cpp:16 msgid "Resolving of index service for destination failed" msgstr "Uppslagning av indextjänst för destination misslyckades" #: src/hed/libs/data/DataStatus.cpp:17 msgid "Can't read from source" msgstr "Kan ej läsa frÃ¥n källa" #: src/hed/libs/data/DataStatus.cpp:18 msgid "Can't write to destination" msgstr "Kan ej skriva till destination" #: src/hed/libs/data/DataStatus.cpp:19 msgid "Failed while reading from source" msgstr "Misslyckades under läsning frÃ¥n källa" #: src/hed/libs/data/DataStatus.cpp:20 msgid "Failed while writing to destination" msgstr "Misslyckades under skrivning till destination" #: src/hed/libs/data/DataStatus.cpp:21 msgid "Failed while transferring data" msgstr "Misslyckades under överföring av data" #: src/hed/libs/data/DataStatus.cpp:22 msgid "Failed while finishing reading from source" msgstr "Misslyckades med att avsluta läsning frÃ¥n källa" #: src/hed/libs/data/DataStatus.cpp:23 msgid "Failed while finishing writing to destination" msgstr "Misslyckades med att avsluta skrivning till destination" #: src/hed/libs/data/DataStatus.cpp:24 msgid "First stage of registration to index service failed" msgstr "Första steget av registrering till indextjänst misslyckades" #: src/hed/libs/data/DataStatus.cpp:25 msgid "Last stage of registration to index service failed" msgstr "Sista steget av registrering till indextjänst misslyckades" #: src/hed/libs/data/DataStatus.cpp:26 msgid "Unregistering from index service failed" msgstr "Avregistrering frÃ¥n indextjänst misslyckades" #: src/hed/libs/data/DataStatus.cpp:27 msgid "Error in caching procedure" msgstr "Fel i cachningsprocedur" #: src/hed/libs/data/DataStatus.cpp:28 msgid "Error due to expiration of provided credentials" msgstr "Fel eftersom den tillhandahÃ¥llna referensens livstid har gÃ¥tt ut" #: src/hed/libs/data/DataStatus.cpp:29 msgid "Delete error" msgstr "Borttagningfel" #: src/hed/libs/data/DataStatus.cpp:30 msgid "No valid location available" msgstr "Ingen giltig plats tillgänglig" #: src/hed/libs/data/DataStatus.cpp:31 msgid "Location already exists" msgstr "Plats existerar redan" #: src/hed/libs/data/DataStatus.cpp:32 msgid "Operation not supported for this kind of URL" msgstr "Operationen stöds inte för denna typ av URL" #: src/hed/libs/data/DataStatus.cpp:33 msgid "Feature is not implemented" msgstr "Feature ej implementerad" #: src/hed/libs/data/DataStatus.cpp:34 msgid "Already reading from source" msgstr "Läser redan frÃ¥n källa" #: src/hed/libs/data/DataStatus.cpp:35 msgid "Already writing to destination" msgstr "Skriver redan till destination" #: src/hed/libs/data/DataStatus.cpp:36 msgid "Read access check failed" msgstr "Ã…tkomstkontroll för läsning inte godkänd" #: src/hed/libs/data/DataStatus.cpp:37 msgid "Directory listing failed" msgstr "Kataloglistning Misslyckades" #: src/hed/libs/data/DataStatus.cpp:38 msgid "Object is not suitable for listing" msgstr "Objekt är inte lämpligt för listning" #: src/hed/libs/data/DataStatus.cpp:39 msgid "Failed to obtain information about file" msgstr "Misslyckades med att erhÃ¥lla information om fil" #: src/hed/libs/data/DataStatus.cpp:40 msgid "No such file or directory" msgstr "Ingen sÃ¥dan fil eller katalog" #: src/hed/libs/data/DataStatus.cpp:41 msgid "Object not initialized (internal error)" msgstr "Objekt ej initierat (internt fel)" #: src/hed/libs/data/DataStatus.cpp:42 msgid "Operating System error" msgstr "Operativsystem-fel" #: src/hed/libs/data/DataStatus.cpp:43 msgid "Failed to stage file(s)" msgstr "Misslyckades med att staga file(er)" #: src/hed/libs/data/DataStatus.cpp:44 msgid "Inconsistent metadata" msgstr "Inkonsistent metadata" #: src/hed/libs/data/DataStatus.cpp:45 msgid "Failed to prepare source" msgstr "Misslyckades med att förbereda källa" #: src/hed/libs/data/DataStatus.cpp:46 msgid "Should wait for source to be prepared" msgstr "Ska vänta pÃ¥ att källa förbereds" #: src/hed/libs/data/DataStatus.cpp:47 msgid "Failed to prepare destination" msgstr "Misslyckades med att förbereda destination" #: src/hed/libs/data/DataStatus.cpp:48 msgid "Should wait for destination to be prepared" msgstr "Ska vänta pÃ¥ att destination förbereds" #: src/hed/libs/data/DataStatus.cpp:49 msgid "Failed to finalize reading from source" msgstr "Misslyckades med att slutföra läsning frÃ¥n källa" #: src/hed/libs/data/DataStatus.cpp:50 msgid "Failed to finalize writing to destination" msgstr "Misslyckades med att slutföra skrivning till destination" #: src/hed/libs/data/DataStatus.cpp:51 msgid "Failed to create directory" msgstr "Misslyckades med att skapa katalog" #: src/hed/libs/data/DataStatus.cpp:52 msgid "Failed to rename URL" msgstr "Misslyckades med byta namn pÃ¥ URL" #: src/hed/libs/data/DataStatus.cpp:53 msgid "Data was already cached" msgstr "Data var redan cachat" #: src/hed/libs/data/DataStatus.cpp:54 msgid "Operation cancelled successfully" msgstr "Operation avbröts framgÃ¥ngsrikt" #: src/hed/libs/data/DataStatus.cpp:55 msgid "Generic error" msgstr "Generiskt fel" #: src/hed/libs/data/DataStatus.cpp:56 src/hed/libs/data/DataStatus.cpp:69 msgid "Unknown error" msgstr "Okänt fel" #: src/hed/libs/data/DataStatus.cpp:60 msgid "No error" msgstr "Inget fel" #: src/hed/libs/data/DataStatus.cpp:61 msgid "Transfer timed out" msgstr "Överföring avbröts pÃ¥ grund av timeout" #: src/hed/libs/data/DataStatus.cpp:62 msgid "Checksum mismatch" msgstr "Checksumma stämmer inte överens" #: src/hed/libs/data/DataStatus.cpp:63 msgid "Bad logic" msgstr "DÃ¥lig logik" #: src/hed/libs/data/DataStatus.cpp:64 msgid "All results obtained are invalid" msgstr "Alla erhÃ¥llna resultat är ogiltiga" #: src/hed/libs/data/DataStatus.cpp:65 msgid "Temporary service error" msgstr "Temporärt tjänstefel" #: src/hed/libs/data/DataStatus.cpp:66 msgid "Permanent service error" msgstr "Permanent tjänstefel" #: src/hed/libs/data/DataStatus.cpp:67 msgid "Error switching uid" msgstr "Fel vid byte av UID" #: src/hed/libs/data/DataStatus.cpp:68 msgid "Request timed out" msgstr "Begäran avbröts pÃ¥ grund av timeout" #: src/hed/libs/data/FileCache.cpp:111 msgid "No cache directory specified" msgstr "Ingen cachekatalog angiven" #: src/hed/libs/data/FileCache.cpp:128 msgid "No usable caches" msgstr "Inga användbara cacher" #: src/hed/libs/data/FileCache.cpp:137 msgid "No draining cache directory specified" msgstr "Ingen draining-cachekatalog angiven" #: src/hed/libs/data/FileCache.cpp:155 msgid "No read-only cache directory specified" msgstr "Ingen readonly-cachekatalog angiven" #: src/hed/libs/data/FileCache.cpp:184 #, c-format msgid "Failed to create cache directory for file %s: %s" msgstr "Misslyckades med att skapa cachekatalog för fil %s: %s" #: src/hed/libs/data/FileCache.cpp:194 #, c-format msgid "Failed to create any cache directories for %s" msgstr "Misslyckades med att skapa cachekataloger för %s" #: src/hed/libs/data/FileCache.cpp:201 #, c-format msgid "Failed to change permissions on %s: %s" msgstr "Misslyckades med att ändra Ã¥tkomsträttigheter pÃ¥ %s: %s" #: src/hed/libs/data/FileCache.cpp:213 #, c-format msgid "Failed to delete stale cache file %s: %s" msgstr "Misslyckades med att ta bort gammal cachefil %s: %s" #: src/hed/libs/data/FileCache.cpp:216 #, c-format msgid "Failed to release lock on file %s" msgstr "Misslyckades med att frigöra lÃ¥s pÃ¥ fil %s" #: src/hed/libs/data/FileCache.cpp:234 #, c-format msgid "Failed looking up attributes of cached file: %s" msgstr "Misslyckades med att slÃ¥ upp attribut för cachad fil: %s" #: src/hed/libs/data/FileCache.cpp:240 #, c-format msgid "Failed to obtain lock on cache file %s" msgstr "Misslyckades med att erhÃ¥lla lÃ¥s pÃ¥ cachefil %s" #: src/hed/libs/data/FileCache.cpp:249 src/hed/libs/data/FileCache.cpp:309 #, c-format msgid "Error removing cache file %s: %s" msgstr "Fel vid borttagande av cachefil %s: %s" #: src/hed/libs/data/FileCache.cpp:251 src/hed/libs/data/FileCache.cpp:262 #, c-format msgid "Failed to remove lock on %s. Some manual intervention may be required" msgstr "" "Misslyckades med att ta bort lÃ¥s pÃ¥ %s. Manuell intervention kan behövas" #: src/hed/libs/data/FileCache.cpp:281 src/hed/libs/data/FileCache.cpp:315 #, c-format msgid "Failed to unlock file %s: %s. Manual intervention may be required" msgstr "" "Misslyckades med att lÃ¥sa upp fil %s: %s. Manuell intervention kan behövas" #: src/hed/libs/data/FileCache.cpp:298 #, c-format msgid "Invalid lock on file %s" msgstr "Ogiltigt lÃ¥s pÃ¥ fil %s" #: src/hed/libs/data/FileCache.cpp:304 #, c-format msgid "Failed to remove .meta file %s: %s" msgstr "Misslyckades med att ta bort .meta-fil %s: %s" #: src/hed/libs/data/FileCache.cpp:369 #, c-format msgid "Cache not found for file %s" msgstr "Hittade inte cache för fil %s" #: src/hed/libs/data/FileCache.cpp:379 #, c-format msgid "" "Cache file %s was modified in the last second, sleeping 1 second to avoid " "race condition" msgstr "" "Cachefil %s ändrades under den senaste sekunden, väntar 1 sekund för att " "undvika race condition" #: src/hed/libs/data/FileCache.cpp:384 src/hed/libs/data/FileCache.cpp:689 #, c-format msgid "Cache file %s does not exist" msgstr "Cachefil %s existerar inte" #: src/hed/libs/data/FileCache.cpp:389 src/hed/libs/data/FileCache.cpp:691 #, c-format msgid "Error accessing cache file %s: %s" msgstr "Ã…tkomstfel för cachefil %s: %s" #: src/hed/libs/data/FileCache.cpp:395 #, c-format msgid "Cannot create directory %s for per-job hard links" msgstr "Kan inte skapa katalog %s för per-jobb hÃ¥rda länkar" #: src/hed/libs/data/FileCache.cpp:400 #, c-format msgid "Cannot change permission of %s: %s " msgstr "Kan inte ändra Ã¥tkomsträttigheter för %s: %s" #: src/hed/libs/data/FileCache.cpp:404 #, c-format msgid "Cannot change owner of %s: %s " msgstr "Kan inte ändra ägare för %s: %s" #: src/hed/libs/data/FileCache.cpp:418 #, c-format msgid "Failed to remove existing hard link at %s: %s" msgstr "Misslyckades med att ta bort existerande hÃ¥rd länk pÃ¥ %s: %s" #: src/hed/libs/data/FileCache.cpp:422 src/hed/libs/data/FileCache.cpp:433 #, c-format msgid "Failed to create hard link from %s to %s: %s" msgstr "Misslyckades med att skapa hÃ¥rd länk frÃ¥n %s till %s: %s" #: src/hed/libs/data/FileCache.cpp:428 #, c-format msgid "Cache file %s not found" msgstr "Hittade inte cachefil %s" #: src/hed/libs/data/FileCache.cpp:443 #, c-format msgid "Failed to change permissions or set owner of hard link %s: %s" msgstr "" "Misslyckades med att ändra Ã¥tkomsträttigheter eller ägare för hÃ¥rd länk %s: " "%s" #: src/hed/libs/data/FileCache.cpp:451 #, c-format msgid "Failed to release lock on cache file %s" msgstr "Misslyckades med att frigöra lÃ¥s pÃ¥ cachefil %s" #: src/hed/libs/data/FileCache.cpp:462 #, c-format msgid "Cache file %s was locked during link/copy, must start again" msgstr "Cachefil %s lÃ¥stes under länkning/kopiering, mÃ¥ste börja om" #: src/hed/libs/data/FileCache.cpp:467 #, c-format msgid "Cache file %s was deleted during link/copy, must start again" msgstr "Cachefil %s togs bort under länkning/kopiering, mÃ¥ste börja om" #: src/hed/libs/data/FileCache.cpp:472 #, c-format msgid "Cache file %s was modified while linking, must start again" msgstr "Cachefil %s ändrades under länkning, mÃ¥ste börja om<" #: src/hed/libs/data/FileCache.cpp:490 #, c-format msgid "Failed to copy file %s to %s: %s" msgstr "Misslyckades med att kopiera fil %s till %s: %s" #: src/hed/libs/data/FileCache.cpp:496 #, c-format msgid "Failed to set executable bit on file %s" msgstr "Misslyckades med att sätta exekverbar bit pÃ¥ fil %s" #: src/hed/libs/data/FileCache.cpp:501 #, c-format msgid "Failed to set executable bit on file %s: %s" msgstr "Misslyckades med att sätta exekverbar bit pÃ¥ fil %s: %s" #: src/hed/libs/data/FileCache.cpp:515 #, c-format msgid "Failed to remove existing symbolic link at %s: %s" msgstr "Misslyckades med att ta bort existerande symbolisk länk pÃ¥ %s: %s" #: src/hed/libs/data/FileCache.cpp:519 src/hed/libs/data/FileCache.cpp:524 #, c-format msgid "Failed to create symbolic link from %s to %s: %s" msgstr "Misslyckades med att skapa symbolisk länk frÃ¥n %s till %s: %s" #: src/hed/libs/data/FileCache.cpp:554 #, c-format msgid "Failed to remove cache per-job dir %s: %s" msgstr "Misslyckades med att ta bort cache per-jobb-katalog %s: %s" #: src/hed/libs/data/FileCache.cpp:573 src/hed/libs/data/FileCache.cpp:641 #, c-format msgid "Error reading meta file %s: %s" msgstr "Fel vid läsning av metafil %s: %s" #: src/hed/libs/data/FileCache.cpp:578 src/hed/libs/data/FileCache.cpp:646 #, c-format msgid "Error opening meta file %s" msgstr "Fel vid öppnande av metafil %s" #: src/hed/libs/data/FileCache.cpp:583 src/hed/libs/data/FileCache.cpp:650 #, c-format msgid "meta file %s is empty" msgstr "metafil %s är tom" #: src/hed/libs/data/FileCache.cpp:593 #, c-format msgid "" "File %s is already cached at %s under a different URL: %s - will not add DN " "to cached list" msgstr "" "Fil %s är redan cachad pÃ¥ %s under en annan URL: %s - kommer ej att lägga " "till DN till cachad lista" #: src/hed/libs/data/FileCache.cpp:604 #, c-format msgid "Bad format detected in file %s, in line %s" msgstr "Felaktigt format upptäckt i fil %s, pÃ¥ rad %s" #: src/hed/libs/data/FileCache.cpp:620 #, c-format msgid "Could not acquire lock on meta file %s" msgstr "Kunde inte fÃ¥ lÃ¥s pÃ¥ metafil %s" #: src/hed/libs/data/FileCache.cpp:624 #, c-format msgid "Error opening meta file for writing %s" msgstr "Fel vid öppnande av metafil för skrivning: %s" #: src/hed/libs/data/FileCache.cpp:660 #, c-format msgid "DN %s is cached and is valid until %s for URL %s" msgstr "DN %s är cachat och är giltigt till %s för URL %s" #: src/hed/libs/data/FileCache.cpp:664 #, c-format msgid "DN %s is cached but has expired for URL %s" msgstr "DN %s är cachat men dess giltighetstid har gÃ¥tt ut för URL %s" #: src/hed/libs/data/FileCache.cpp:715 #, c-format msgid "Failed to acquire lock on cache meta file %s" msgstr "Misslyckades med att fÃ¥ lÃ¥s pÃ¥ cachemetafil %s" #: src/hed/libs/data/FileCache.cpp:720 #, c-format msgid "Failed to create cache meta file %s" msgstr "Misslyckades med att skapa cachemetafil %s" #: src/hed/libs/data/FileCache.cpp:735 #, c-format msgid "Failed to read cache meta file %s" msgstr "Misslyckades med att läsa cachemetafil %s" #: src/hed/libs/data/FileCache.cpp:740 #, c-format msgid "Cache meta file %s is empty, will recreate" msgstr "Cachemetafil %s är tom, kommer att Ã¥terskapa" #: src/hed/libs/data/FileCache.cpp:745 #, c-format msgid "Cache meta file %s possibly corrupted, will recreate" msgstr "Cachemetafil %s är möjligen korrupt, kommer att Ã¥terskapa" #: src/hed/libs/data/FileCache.cpp:749 #, c-format msgid "" "File %s is already cached at %s under a different URL: %s - this file will " "not be cached" msgstr "" "Fil %s är redan cachad pÃ¥ %s under en annan URL: %s - denna fil kommer ej " "att cachas" #: src/hed/libs/data/FileCache.cpp:759 #, c-format msgid "Error looking up attributes of cache meta file %s: %s" msgstr "Fel vid uppslagning av attribut för cachemetafil %s: %s" #: src/hed/libs/data/FileCache.cpp:830 #, c-format msgid "Using cache %s" msgstr "Använder cache %s" #: src/hed/libs/data/FileCache.cpp:844 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:79 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:112 #, c-format msgid "Error getting info from statvfs for the path %s: %s" msgstr "Fel vid hämtning av information frÃ¥n statvfs för sökväg %s: %s" #: src/hed/libs/data/FileCache.cpp:850 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:118 #, c-format msgid "Cache %s: Free space %f GB" msgstr "Cache %s: Fritt utrymme %f GB" #: src/hed/libs/data/URLMap.cpp:33 #, c-format msgid "Can't use URL %s" msgstr "Kan inte använda URL: %s" #: src/hed/libs/data/URLMap.cpp:39 #, c-format msgid "file %s is not accessible" msgstr "fil %s kan inte kommas Ã¥t" #: src/hed/libs/data/URLMap.cpp:49 #, c-format msgid "Mapping %s to %s" msgstr "Mappar %s till %s" #: src/hed/libs/data/examples/simple_copy.cpp:17 msgid "Usage: copy source destination" msgstr "Användning: kopiera källa destination" #: src/hed/libs/data/examples/simple_copy.cpp:42 #, c-format msgid "Copy failed: %s" msgstr "Kopiering misslyckades: %s" #: src/hed/libs/globusutils/GSSCredential.cpp:41 #, c-format msgid "Failed to read proxy file: %s" msgstr "Misslyckades med att läsa proxy fil: %s" #: src/hed/libs/globusutils/GSSCredential.cpp:49 #, c-format msgid "Failed to read certificate file: %s" msgstr "Misslyckades med att läsa certifikatfil: %s" #: src/hed/libs/globusutils/GSSCredential.cpp:56 #, c-format msgid "Failed to read private key file: %s" msgstr "Misslyckades med att läsa privat-nyckelfil: %s" #: src/hed/libs/globusutils/GSSCredential.cpp:82 #, c-format msgid "" "Failed to convert GSI credential to GSS credential (major: %d, minor: %d):%s:" "%s" msgstr "" "Misslyckades med att konvertera GSI-referens till GSS-referens (major: %d, " "minor: %d):%s:%s" #: src/hed/libs/globusutils/GSSCredential.cpp:94 #, c-format msgid "Failed to release GSS credential (major: %d, minor: %d):%s:%s" msgstr "Misslyckades med att frigöra GSS-referens (major: %d, minor: %d):%s:%s" #: src/hed/libs/loader/ModuleManager.cpp:28 msgid "Module Manager Init" msgstr "Modulhanterare init" #: src/hed/libs/loader/ModuleManager.cpp:71 msgid "" "Busy plugins found while unloading Module Manager. Waiting for them to be " "released." msgstr "" "Upptagna pluginer hittad när modul-hanteraren laddades ut. Väntar pÃ¥ att de " "ska frigöras." #: src/hed/libs/loader/ModuleManager.cpp:205 #, c-format msgid "Found %s in cache" msgstr "Hittade %s i cache" #: src/hed/libs/loader/ModuleManager.cpp:212 #, c-format msgid "Could not locate module %s in following paths:" msgstr "Kunde inte lokalisera modulen %s pÃ¥ följande sökvägar:" #: src/hed/libs/loader/ModuleManager.cpp:216 #, c-format msgid "\t%s" msgstr "\t%s" #: src/hed/libs/loader/ModuleManager.cpp:230 #, c-format msgid "Loaded %s" msgstr "Laddade in %s" #: src/hed/libs/loader/ModuleManager.cpp:274 msgid "Module Manager Init by ModuleManager::setCfg" msgstr "Modulhanterare init av ModuleManager::setCfg" #: src/hed/libs/loader/ModuleManager.cpp:310 #: src/hed/libs/loader/ModuleManager.cpp:323 #, c-format msgid "%s made persistent" msgstr "%s gjord persistent" #: src/hed/libs/loader/ModuleManager.cpp:314 #, c-format msgid "Not found %s in cache" msgstr "Hittade inte %s i cache" #: src/hed/libs/loader/ModuleManager.cpp:328 msgid "Specified module not found in cache" msgstr "Angiven modul hittades inte i cache" #: src/hed/libs/loader/Plugin.cpp:364 src/hed/libs/loader/Plugin.cpp:557 #, c-format msgid "Could not find loadable module descriptor by name %s" msgstr "Kunde inte hitta inladdningsbar modulbeskrivning med namn %s" #: src/hed/libs/loader/Plugin.cpp:372 src/hed/libs/loader/Plugin.cpp:567 #, c-format msgid "Could not find loadable module by name %s (%s)" msgstr "Kunde inte hitta inladdningsbar modul med namn %s (%s)" #: src/hed/libs/loader/Plugin.cpp:378 src/hed/libs/loader/Plugin.cpp:480 #: src/hed/libs/loader/Plugin.cpp:572 #, c-format msgid "Module %s is not an ARC plugin (%s)" msgstr "Modulen %s är inte en ARC-plugin (%s)" #: src/hed/libs/loader/Plugin.cpp:395 src/hed/libs/loader/Plugin.cpp:490 #: src/hed/libs/loader/Plugin.cpp:598 #, c-format msgid "Module %s failed to reload (%s)" msgstr "Modul %s kunde inte laddas in igen (%s)" #: src/hed/libs/loader/Plugin.cpp:417 #, c-format msgid "Module %s contains no plugin %s" msgstr "Modul %s innehÃ¥ller ingen plugin %s" #: src/hed/libs/loader/Plugin.cpp:462 #, c-format msgid "Could not find loadable module descriptor by name %s or kind %s" msgstr "" "Kunde inte hitta inladdningsbar modulbeskrivning med namn %s eller typ %s" #: src/hed/libs/loader/Plugin.cpp:467 #, c-format msgid "Loadable module %s contains no requested plugin %s of kind %s" msgstr "Inladdningsbar modul %s innehÃ¥ller inte begärd plugin %s av typen %s" #: src/hed/libs/loader/Plugin.cpp:474 #, c-format msgid "Could not find loadable module by names %s and %s (%s)" msgstr "Kunde inte hitta inladdningsbar modul med namn %s och %s (%s)" #: src/hed/libs/loader/Plugin.cpp:503 #, c-format msgid "Module %s contains no requested plugin %s of kind %s" msgstr "Modul %s innehÃ¥ller inte begärd plugin %s av typen %s" #: src/hed/libs/loader/Plugin.cpp:588 #, c-format msgid "Module %s does not contain plugin(s) of specified kind(s)" msgstr "Modul %s innehÃ¥ller inte en plugin(er) av angiven typ" #: src/hed/libs/message/MCC.cpp:76 src/hed/libs/message/Service.cpp:25 #, c-format msgid "No security processing/check requested for '%s'" msgstr "Ingen säkerhetsprocessering/kontroll begärd för '%s'" #: src/hed/libs/message/MCC.cpp:85 #, c-format msgid "Security processing/check failed: %s" msgstr "Säkerhetsprocessering/kontroll misslyckades: %s" #: src/hed/libs/message/MCC.cpp:90 msgid "Security processing/check passed" msgstr "Säkerhetsprocessering/kontroll OK" #: src/hed/libs/message/MCCLoader.cpp:16 msgid "Chain(s) configuration failed" msgstr "Inställning av kedja misslyckades" #: src/hed/libs/message/MCCLoader.cpp:133 msgid "SecHandler configuration is not defined" msgstr "SecHandler-inställningar är inte definierade" #: src/hed/libs/message/MCCLoader.cpp:156 msgid "SecHandler has no configuration" msgstr "Säkerhetshanterare saknar inställningar" #: src/hed/libs/message/MCCLoader.cpp:162 msgid "SecHandler has no name attribute defined" msgstr "Säkerhetshanterare har inget namnattribut definierat" #: src/hed/libs/message/MCCLoader.cpp:172 #, c-format msgid "Security Handler %s(%s) could not be created" msgstr "Säkerhetshanterare %s(%s) kunde inte skapas" #: src/hed/libs/message/MCCLoader.cpp:176 #, c-format msgid "SecHandler: %s(%s)" msgstr "SecHandler: %s(%s)" #: src/hed/libs/message/MCCLoader.cpp:188 msgid "Component has no name attribute defined" msgstr "Komponent har inget namnattribut definierat" #: src/hed/libs/message/MCCLoader.cpp:193 msgid "Component has no ID attribute defined" msgstr "Komponent har inget id-attribut definierat" #: src/hed/libs/message/MCCLoader.cpp:202 #, c-format msgid "Component %s(%s) could not be created" msgstr "Komponent %s(%s) kunde inte skapas" #: src/hed/libs/message/MCCLoader.cpp:232 #, c-format msgid "Component's %s(%s) next has no ID attribute defined" msgstr "Komponentens %s(%s) nästa har inget id-attribut definierat" #: src/hed/libs/message/MCCLoader.cpp:287 #, c-format msgid "Loaded MCC %s(%s)" msgstr "Laddade in MCC %s(%s)" #: src/hed/libs/message/MCCLoader.cpp:305 #, c-format msgid "Plexer's (%s) next has no ID attribute defined" msgstr "Plexerns (%s) nästa har inget id-attribut definierat" #: src/hed/libs/message/MCCLoader.cpp:315 #, c-format msgid "Loaded Plexer %s" msgstr "Laddade in Plexer %s" #: src/hed/libs/message/MCCLoader.cpp:323 msgid "Service has no Name attribute defined" msgstr "Tjänsten har inget namnattribut definierat" #: src/hed/libs/message/MCCLoader.cpp:329 msgid "Service has no ID attribute defined" msgstr "Tjänsten har inget id-attribut definierat" #: src/hed/libs/message/MCCLoader.cpp:338 #, c-format msgid "Service %s(%s) could not be created" msgstr "Tjänsten %s(%s) kunde inte skapas" #: src/hed/libs/message/MCCLoader.cpp:345 #, c-format msgid "Loaded Service %s(%s)" msgstr "Laddade in tjänst %s(%s)" #: src/hed/libs/message/MCCLoader.cpp:387 #, c-format msgid "Linking MCC %s(%s) to MCC (%s) under %s" msgstr "Länkar MCC %s(%s) till MCC (%s) under %s" #: src/hed/libs/message/MCCLoader.cpp:398 #, c-format msgid "Linking MCC %s(%s) to Service (%s) under %s" msgstr "Länkar MCC %s(%s) till tjänst (%s) under %s" #: src/hed/libs/message/MCCLoader.cpp:407 #, c-format msgid "Linking MCC %s(%s) to Plexer (%s) under %s" msgstr "Länkar MCC %s(%s) till Plexer (%s) under %s" #: src/hed/libs/message/MCCLoader.cpp:412 #, c-format msgid "MCC %s(%s) - next %s(%s) has no target" msgstr "MCC %s(%s) - nästa %s(%s) saknar target" #: src/hed/libs/message/MCCLoader.cpp:431 #, c-format msgid "Linking Plexer %s to MCC (%s) under %s" msgstr "Länkar Plexer %s till MCC (%s) under %s" #: src/hed/libs/message/MCCLoader.cpp:442 #, c-format msgid "Linking Plexer %s to Service (%s) under %s" msgstr "Länkar Plexer %s till tjänst (%s) under %s" #: src/hed/libs/message/MCCLoader.cpp:451 #, c-format msgid "Linking Plexer %s to Plexer (%s) under %s" msgstr "Länkar Plexer %s till Plexer (%s) under %s" #: src/hed/libs/message/MCCLoader.cpp:457 #, c-format msgid "Plexer (%s) - next %s(%s) has no target" msgstr "Plexer (%s) - nästa %s(%s) saknar target" #: src/hed/libs/message/Plexer.cpp:31 #, c-format msgid "Bad label: \"%s\"" msgstr "DÃ¥lig etikett: \"%s\"" #: src/hed/libs/message/Plexer.cpp:47 #, c-format msgid "Operation on path \"%s\"" msgstr "Operation pÃ¥ sökväg \"%s\"" #: src/hed/libs/message/Plexer.cpp:60 #, c-format msgid "No next MCC or Service at path \"%s\"" msgstr "Ingen nästa MCC eller tjänst pÃ¥ sökväg \"%s\"" #: src/hed/libs/message/Service.cpp:35 #, c-format msgid "Security processing/check for '%s' failed: %s" msgstr "Säkerhetsprocessering/kontroll för '%s' misslyckades: %s" #: src/hed/libs/message/Service.cpp:41 #, c-format msgid "Security processing/check for '%s' passed" msgstr "Säkerhetsprocessering/kontroll för '%s' OK" #: src/hed/libs/otokens/jwse.cpp:55 #, c-format msgid "JWSE::Input: token: %s" msgstr "JWSE::Input: token: %s" #: src/hed/libs/otokens/jwse.cpp:75 #, c-format msgid "JWSE::Input: header: %s" msgstr "JWSE::Input: header: %s" #: src/hed/libs/otokens/jwse.cpp:101 #, c-format msgid "JWSE::Input: JWS content: %s" msgstr "JWSE::Input: JWS innehÃ¥ll: %s" #: src/hed/libs/otokens/jwse.cpp:111 msgid "JWSE::Input: JWS: token too young" msgstr "JWSE::Input: JWS: token för ungt" #: src/hed/libs/otokens/jwse.cpp:120 msgid "JWSE::Input: JWS: token too old" msgstr "JWSE::Input: JWS: token för gammalt" #: src/hed/libs/otokens/jwse.cpp:131 #, c-format msgid "JWSE::Input: JWS: signature algorithm: %s" msgstr "JWSE::Input: JWS: signeringsalgoritm: %s" #: src/hed/libs/otokens/jwse.cpp:190 msgid "JWSE::Input: JWS: signature verification failed" msgstr "JWSE::Input: JWS: signaturverifiering misslyckades" #: src/hed/libs/otokens/jwse.cpp:196 msgid "JWSE::Input: JWE: not supported yet" msgstr "JWSE::Input: JWE: stöds inte än" #: src/hed/libs/otokens/jwse_keys.cpp:271 msgid "JWSE::ExtractPublicKey: x5c key" msgstr "JWSE::ExtractPublicKey: x5c-nyckel" #: src/hed/libs/otokens/jwse_keys.cpp:279 msgid "JWSE::ExtractPublicKey: jwk key" msgstr "JWSE::ExtractPublicKey: jwk-nyckel" #: src/hed/libs/otokens/jwse_keys.cpp:286 msgid "JWSE::ExtractPublicKey: external jwk key" msgstr "JWSE::ExtractPublicKey: extern jwk-nyckel" #: src/hed/libs/otokens/jwse_keys.cpp:303 #, c-format msgid "JWSE::ExtractPublicKey: fetching jwl key from %s" msgstr "JWSE::ExtractPublicKey: hämtar jwl-nyckel frÃ¥n %s" #: src/hed/libs/otokens/jwse_keys.cpp:316 msgid "JWSE::ExtractPublicKey: no supported key" msgstr "JWSE::ExtractPublicKey: inte en nyckel som stöds" #: src/hed/libs/otokens/jwse_keys.cpp:319 msgid "JWSE::ExtractPublicKey: key parsing error" msgstr "JWSE::ExtractPublicKey: nyckeltolkningsfel" #: src/hed/libs/otokens/openid_metadata.cpp:40 #: src/hed/libs/otokens/openid_metadata.cpp:45 #, c-format msgid "Input: metadata: %s" msgstr "Indata: metadata: %s" #: src/hed/libs/otokens/openid_metadata.cpp:414 #, c-format msgid "Fetch: response code: %u %s" msgstr "Fetch: svarskod: %u %s" #: src/hed/libs/otokens/openid_metadata.cpp:416 #, c-format msgid "Fetch: response body: %s" msgstr "Fetch: svars-body: %s" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:136 #, c-format msgid "Can not load ARC evaluator object: %s" msgstr "Kan inte ladda in ARC-utvärderingsobjekt: %s" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:187 #, c-format msgid "Can not load ARC request object: %s" msgstr "Kan inte ladda in ARC-begäranobjekt: %s" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:228 #, c-format msgid "Can not load policy object: %s" msgstr "Kan inte ladda in policyobjekt: %s" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:276 msgid "Can not load policy object" msgstr "Kan inte ladda in policyobjekt" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:324 msgid "Can not load request object" msgstr "Kan inte ladda in begäranobjekt" #: src/hed/libs/security/ArcPDP/PolicyParser.cpp:119 msgid "Can not generate policy object" msgstr "Kan inte generera policyobjekt" #: src/hed/libs/security/ArcPDP/attr/RequestAttribute.cpp:37 #, c-format msgid "Id= %s,Type= %s,Issuer= %s,Value= %s" msgstr "Id= %s,Typ= %s,Utfärdare= %s,Värde= %s" #: src/hed/libs/security/ArcPDP/attr/RequestAttribute.cpp:40 #, c-format msgid "No Attribute exists, which can deal with type: %s" msgstr "Inget attribut existerar som kan hantera typen: %s" #: src/hed/mcc/http/MCCHTTP.cpp:168 #, c-format msgid "HTTP Error: %d %s" msgstr "HTTP-fel: %d %s" #: src/hed/mcc/http/MCCHTTP.cpp:241 msgid "Cannot create http payload" msgstr "Kan inte skapa http-nyttolast" #: src/hed/mcc/http/MCCHTTP.cpp:311 msgid "No next element in the chain" msgstr "Inget nästa element i kedjan" #: src/hed/mcc/http/MCCHTTP.cpp:320 #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:253 msgid "next element of the chain returned error status" msgstr "nästa element i kedjan returnerade felstatus" #: src/hed/mcc/http/MCCHTTP.cpp:329 msgid "next element of the chain returned no payload" msgstr "nästa element i kedjan returnerade ingen nyttolast" #: src/hed/mcc/http/MCCHTTP.cpp:341 msgid "next element of the chain returned invalid/unsupported payload" msgstr "nästa element i kedjan returnerade ogiltig/ej stödd nyttolast" #: src/hed/mcc/http/MCCHTTP.cpp:423 msgid "Error to flush output payload" msgstr "Fel vid utmatning av utdatanyttolast" #: src/hed/mcc/http/PayloadHTTP.cpp:305 #, c-format msgid "<< %s" msgstr "<< %s" #: src/hed/mcc/http/PayloadHTTP.cpp:354 src/hed/mcc/http/PayloadHTTP.cpp:456 #, c-format msgid "< %s" msgstr "< %s" #: src/hed/mcc/http/PayloadHTTP.cpp:575 msgid "Failed to parse HTTP header" msgstr "Misslyckades med att tolka HTTP-huvud" #: src/hed/mcc/http/PayloadHTTP.cpp:836 msgid "Invalid HTTP object can't produce result" msgstr "Ogiltigt HTTP-objekt kan inte producera resultat" #: src/hed/mcc/http/PayloadHTTP.cpp:949 #, c-format msgid "> %s" msgstr "> %s" #: src/hed/mcc/http/PayloadHTTP.cpp:974 msgid "Failed to write header to output stream" msgstr "Misslyckades med att skriva header till utdataström" #: src/hed/mcc/http/PayloadHTTP.cpp:999 src/hed/mcc/http/PayloadHTTP.cpp:1005 #: src/hed/mcc/http/PayloadHTTP.cpp:1011 src/hed/mcc/http/PayloadHTTP.cpp:1021 #: src/hed/mcc/http/PayloadHTTP.cpp:1033 src/hed/mcc/http/PayloadHTTP.cpp:1038 #: src/hed/mcc/http/PayloadHTTP.cpp:1043 src/hed/mcc/http/PayloadHTTP.cpp:1051 #: src/hed/mcc/http/PayloadHTTP.cpp:1058 msgid "Failed to write body to output stream" msgstr "Misslyckades med att skriva body till utdataström" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:31 msgid "Skipping service: no ServicePath found!" msgstr "Hoppar över tjänst: hittade ingen ServicePath!" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:37 msgid "Skipping service: no SchemaPath found!" msgstr "Hoppar över tjänst: hittade ingen SchemaPath!" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:89 msgid "Parser Context creation failed!" msgstr "Skapande av tolkningskontext misslyckades!" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:98 msgid "Cannot parse schema!" msgstr "Kan inte tolka schema!" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:110 msgid "Empty payload!" msgstr "Tom nyttolast!" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:119 msgid "Could not convert payload!" msgstr "Kunde inte konvertera nyttolast!" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:125 #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:212 msgid "Could not create PayloadSOAP!" msgstr "Kunde inte skapa SOAP nyttolast!" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:196 msgid "Empty input payload!" msgstr "Tom indatanyttolast" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:205 msgid "Could not convert incoming payload!" msgstr "Kunde inte konvertera inkommande nyttolast!" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:232 msgid "Missing schema! Skipping validation..." msgstr "Schema saknas! Hoppar över validering..." #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:237 msgid "Could not validate message!" msgstr "Kunde inte validera meddelande!" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:245 #: src/hed/mcc/soap/MCCSOAP.cpp:222 src/hed/mcc/soap/MCCSOAP.cpp:236 #: src/hed/mcc/soap/MCCSOAP.cpp:266 msgid "empty next chain element" msgstr "tomt nästa kedjeelement" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:257 #: src/hed/mcc/soap/MCCSOAP.cpp:282 msgid "next element of the chain returned empty payload" msgstr "nästa element i kedjan returnerade tom nyttolast" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:265 msgid "next element of the chain returned invalid payload" msgstr "nästa element i kedjan returnerade ogiltig nyttolast" #: src/hed/mcc/soap/MCCSOAP.cpp:207 msgid "empty input payload" msgstr "tom indatanyttolast" #: src/hed/mcc/soap/MCCSOAP.cpp:217 #, c-format msgid "MIME is not suitable for SOAP: %s" msgstr "MIME är inte lämplig för SOAP: %s" #: src/hed/mcc/soap/MCCSOAP.cpp:231 msgid "incoming message is not SOAP" msgstr "inkommande meddelande är inte SOAP" #: src/hed/mcc/soap/MCCSOAP.cpp:258 #, c-format msgid "Security check failed in SOAP MCC for incoming message: %s" msgstr "" "Säkerhetskontroll misslyckades i SOAP-MCC för inkommande meddelande: %s" #: src/hed/mcc/soap/MCCSOAP.cpp:274 #, c-format msgid "next element of the chain returned error status: %s" msgstr "nästa element i kedjan returnerade felstatus: %s" #: src/hed/mcc/soap/MCCSOAP.cpp:293 msgid "next element of the chain returned unknown payload - passing through" msgstr "nästa element i kedjan returnerade ogiltig nyttolast - passerar igenom" #: src/hed/mcc/soap/MCCSOAP.cpp:298 src/hed/mcc/soap/MCCSOAP.cpp:314 #, c-format msgid "Security check failed in SOAP MCC for outgoing message: %s" msgstr "Säkerhetskontroll misslyckades i SOAP-MCC för utgÃ¥ende meddelande: %s" #: src/hed/mcc/soap/MCCSOAP.cpp:368 msgid "Security check failed in SOAP MCC for outgoing message" msgstr "Säkerhetskontroll misslyckades i SOAP-MCC för utgÃ¥ende meddelande" #: src/hed/mcc/soap/MCCSOAP.cpp:421 msgid "Security check failed in SOAP MCC for incoming message" msgstr "Säkerhetskontroll misslyckades i SOAP-MCC för inkommande meddelande" #: src/hed/mcc/tcp/MCCTCP.cpp:82 msgid "Missing Port in Listen element" msgstr "Port saknas i Listen-element" #: src/hed/mcc/tcp/MCCTCP.cpp:91 msgid "Version in Listen element can't be recognized" msgstr "Version i Listen-element kan ej kännas igen" #: src/hed/mcc/tcp/MCCTCP.cpp:100 #, c-format msgid "Failed to obtain local address for port %s - %s" msgstr "Misslyckades med att erhÃ¥lla lokal adress för port %s - %s" #: src/hed/mcc/tcp/MCCTCP.cpp:102 #, c-format msgid "Failed to obtain local address for %s:%s - %s" msgstr "Misslyckades med att erhÃ¥lla lokal adress för %s:%s - %s" #: src/hed/mcc/tcp/MCCTCP.cpp:109 #, c-format msgid "Trying to listen on TCP port %s(%s)" msgstr "Försöker lyssna pÃ¥ TCP-port %s(%s)" #: src/hed/mcc/tcp/MCCTCP.cpp:111 #, c-format msgid "Trying to listen on %s:%s(%s)" msgstr "Försöker lyssna pÃ¥ %s:%s(%s)" #: src/hed/mcc/tcp/MCCTCP.cpp:117 #, c-format msgid "Failed to create socket for listening at TCP port %s(%s): %s" msgstr "" "Misslyckades med att skapa socket för att lyssna pÃ¥ TCP-port %s(%s): %s" #: src/hed/mcc/tcp/MCCTCP.cpp:119 #, c-format msgid "Failed to create socket for listening at %s:%s(%s): %s" msgstr "Misslyckades med att skapa socket för att lyssna pÃ¥ %s:%s(%s): %s" #: src/hed/mcc/tcp/MCCTCP.cpp:134 #, c-format msgid "" "Failed to limit socket to IPv6 at TCP port %s - may cause errors for IPv4 at " "same port" msgstr "" "Misslyckades med att begränsa socket till IPv6 pÃ¥ TCP-port %s - kan orsaka " "fel för IPv4 pÃ¥ samma port" #: src/hed/mcc/tcp/MCCTCP.cpp:136 #, c-format msgid "" "Failed to limit socket to IPv6 at %s:%s - may cause errors for IPv4 at same " "port" msgstr "" "Misslyckades med att begränsa socket till IPv6 pÃ¥ %s:%s - kan orsaka fel för " "IPv4 pÃ¥ samma port" #: src/hed/mcc/tcp/MCCTCP.cpp:144 #, c-format msgid "Failed to bind socket for TCP port %s(%s): %s" msgstr "Misslyckades med att binda socket för TCP-port %s(%s): %s" #: src/hed/mcc/tcp/MCCTCP.cpp:146 #, c-format msgid "Failed to bind socket for %s:%s(%s): %s" msgstr "Misslyckades med att binda socket för %s:%s(%s): %s" #: src/hed/mcc/tcp/MCCTCP.cpp:161 #, c-format msgid "Failed to listen at TCP port %s(%s): %s" msgstr "Misslyckades med att lyssna pÃ¥ TCP-port %s(%s): %s" #: src/hed/mcc/tcp/MCCTCP.cpp:163 #, c-format msgid "Failed to listen at %s:%s(%s): %s" msgstr "Misslyckades med att lyssna pÃ¥ %s:%s(%s): %s" #: src/hed/mcc/tcp/MCCTCP.cpp:180 #, c-format msgid "Listening on TCP port %s(%s)" msgstr "Lyssnar pÃ¥ TCP-port %s(%s)" #: src/hed/mcc/tcp/MCCTCP.cpp:182 #, c-format msgid "Listening on %s:%s(%s)" msgstr "Lyssnar pÃ¥ %s:%s(%s)" #: src/hed/mcc/tcp/MCCTCP.cpp:189 #, c-format msgid "Failed to start listening on any address for %s:%s" msgstr "Misslyckades med att börja lyssna pÃ¥ nÃ¥gon adress för %s:%s" #: src/hed/mcc/tcp/MCCTCP.cpp:191 #, c-format msgid "Failed to start listening on any address for %s:%s(IPv%s)" msgstr "Misslyckades med att börja lyssna pÃ¥ nÃ¥gon adress för %s:%s(IPv%s)" #: src/hed/mcc/tcp/MCCTCP.cpp:197 msgid "No listening ports initiated" msgstr "Inga lyssnande portar initierade" #: src/hed/mcc/tcp/MCCTCP.cpp:208 msgid "dropped" msgstr "tappas" #: src/hed/mcc/tcp/MCCTCP.cpp:208 msgid "put on hold" msgstr "ställas i kö" #: src/hed/mcc/tcp/MCCTCP.cpp:208 #, c-format msgid "Setting connections limit to %i, connections over limit will be %s" msgstr "" "Sätter förbindelsegräns till %i, förbindelse över gränsen kommer att %s" #: src/hed/mcc/tcp/MCCTCP.cpp:212 msgid "Failed to start thread for listening" msgstr "Misslyckades med att starta trÃ¥d för att lyssna" #: src/hed/mcc/tcp/MCCTCP.cpp:245 msgid "Failed to start thread for communication" msgstr "Misslyckades med att starta trÃ¥d för kommunikation" #: src/hed/mcc/tcp/MCCTCP.cpp:271 msgid "Failed while waiting for connection request" msgstr "Misslyckades under väntan pÃ¥ förbindelsebegäran" #: src/hed/mcc/tcp/MCCTCP.cpp:293 msgid "Failed to accept connection request" msgstr "Misslyckades med att acceptera förbindelsebegäran" #: src/hed/mcc/tcp/MCCTCP.cpp:302 msgid "Too many connections - dropping new one" msgstr "För mÃ¥nga förbindelse - tappar en ny" #: src/hed/mcc/tcp/MCCTCP.cpp:309 msgid "Too many connections - waiting for old to close" msgstr "För mÃ¥nga förbindelse - ställer ny i kö" #: src/hed/mcc/tcp/MCCTCP.cpp:533 msgid "next chain element called" msgstr "nästa kedjeelement anropat" #: src/hed/mcc/tcp/MCCTCP.cpp:548 msgid "Only Raw Buffer payload is supported for output" msgstr "Endast raw-buffer-nyttolast stöds för utmatning" #: src/hed/mcc/tcp/MCCTCP.cpp:556 src/hed/mcc/tcp/MCCTCP.cpp:655 #: src/hed/mcc/tls/MCCTLS.cpp:542 msgid "Failed to send content of buffer" msgstr "Misslyckades med att skicka innehÃ¥ll till buffer" #: src/hed/mcc/tcp/MCCTCP.cpp:568 msgid "TCP executor is removed" msgstr "TCP-exekverare tas bort" #: src/hed/mcc/tcp/MCCTCP.cpp:570 #, c-format msgid "Sockets do not match on exit %i != %i" msgstr "Socketar passar inte ihop vid avslut %i != %i" #: src/hed/mcc/tcp/MCCTCP.cpp:591 msgid "No Connect element specified" msgstr "Inget Connect-element angivet" #: src/hed/mcc/tcp/MCCTCP.cpp:597 msgid "Missing Port in Connect element" msgstr "Port saknas i Connect-element" #: src/hed/mcc/tcp/MCCTCP.cpp:603 msgid "Missing Host in Connect element" msgstr "Värd saknas i Connect-element" #: src/hed/mcc/tcp/MCCTCP.cpp:631 msgid "TCP client process called" msgstr "TCP-klientprocess anropad" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:67 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:83 #, c-format msgid "Failed to resolve %s (%s)" msgstr "Misslyckades med att slÃ¥ upp %s (%s)" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:93 #, c-format msgid "Trying to connect %s(%s):%d" msgstr "Försöker koppla upp %s(%s):%d" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:97 #, c-format msgid "Failed to create socket for connecting to %s(%s):%d - %s" msgstr "Misslyckades med att skapa socket för förbindelse till %s(%s):%d - %s" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:109 #, c-format msgid "" "Failed to get TCP socket options for connection to %s(%s):%d - timeout won't " "work - %s" msgstr "" "Misslyckades med att erhÃ¥lla TCP-socket-alternativ för förbindelse till " "%s(%s):%d - timeout kommer inte att fungera - %s" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:116 #, c-format msgid "Failed to connect to %s(%s):%i - %s" msgstr "Misslyckades med att koppla upp mot %s(%s):%i - %s" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:126 #, c-format msgid "Timeout connecting to %s(%s):%i - %i s" msgstr "Timeout vid uppkoppling till %s(%s):%i - %i s" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:134 #, c-format msgid "Failed while waiting for connection to %s(%s):%i - %s" msgstr "Misslyckades under väntande pÃ¥ uppkoppling till %s(%s):%i - %s" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:144 #, c-format msgid "Failed to connect to %s(%s):%i" msgstr "Misslyckades med att koppla upp mot %s(%s):%i" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:200 msgid "" "Received message out-of-band (not critical, ERROR level is just for " "debugging purposes)" msgstr "" "Mottog meddelande out-of-band (inte kritiskt, ERROR-nivÃ¥ är bara för " "debuggningsändamÃ¥l)" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:293 #, c-format msgid "Using DH parameters from file: %s" msgstr "Använder DH-parametrar frÃ¥n fil: %s" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:296 msgid "Failed to open file with DH parameters for reading" msgstr "Misslyckades med att öppna fil med DH-parametrar för läsning" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:301 msgid "Failed to read file with DH parameters" msgstr "Misslyckades med att läsa fil med DH-parametrar" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:304 msgid "Failed to apply DH parameters" msgstr "Misslyckades med att tillämpa DH-parametrar" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:306 msgid "DH parameters applied" msgstr "DH-parametrar tillämpade" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:321 #, c-format msgid "Using curve with NID: %u" msgstr "Använder kurva med NID: %u" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:324 msgid "Failed to generate EC key" msgstr "Misslyckades med att generera EC-nyckel" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:327 msgid "Failed to apply ECDH parameters" msgstr "Misslyckades med att tillämpa ECDH-parametrar" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:329 msgid "ECDH parameters applied" msgstr "ECDH-parametrar tillämpade" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:336 #, c-format msgid "Using cipher list: %s" msgstr "Använder chifferlista: %s" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:352 #, c-format msgid "Using protocol options: 0x%x" msgstr "Använder protokollalternativ: 0x%x" #: src/hed/mcc/tls/DelegationCollector.cpp:39 msgid "Independent proxy - no rights granted" msgstr "Oberoende proxy - inga rättigheter beviljade" #: src/hed/mcc/tls/DelegationCollector.cpp:43 msgid "Proxy with all rights inherited" msgstr "Proxy med alla rättigheter ärvda" #: src/hed/mcc/tls/DelegationCollector.cpp:51 msgid "Proxy with empty policy - fail on unrecognized policy" msgstr "Proxy med tom policy - misslyckades pÃ¥ grund av okänd policy" #: src/hed/mcc/tls/DelegationCollector.cpp:56 #, c-format msgid "Proxy with specific policy: %s" msgstr "Proxy med specifik policy: %s" #: src/hed/mcc/tls/DelegationCollector.cpp:60 msgid "Proxy with ARC Policy" msgstr "Proxy med ARC-policy" #: src/hed/mcc/tls/DelegationCollector.cpp:62 msgid "Proxy with unknown policy - fail on unrecognized policy" msgstr "Proxy med okänd policy - misslyckades pÃ¥ grund av okänd policy" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:77 #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:128 #, c-format msgid "Was expecting %s at the beginning of \"%s\"" msgstr "Förväntade %s i början av \"%s\"" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:82 #, c-format msgid "We only support CAs in Globus signing policy - %s is not supported" msgstr "Vi stöder endast CA i Globus signeringspolicy - %s stöds inte" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:87 #, c-format msgid "We only support X509 CAs in Globus signing policy - %s is not supported" msgstr "Vi stöder endast X509-CA i Globus signeringspolicy - %s stöds inte" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:92 msgid "Missing CA subject in Globus signing policy" msgstr "Saknat CA-subjekt i Globus signeringspolicy" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:102 msgid "Negative rights are not supported in Globus signing policy" msgstr "Negativa rättigheter stöds inte i Globus signeringspolicy" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:106 #, c-format msgid "Unknown rights in Globus signing policy - %s" msgstr "Okända rättigheter i Globus signeringspolicy - %s" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:111 #, c-format msgid "" "Only globus rights are supported in Globus signing policy - %s is not " "supported" msgstr "" "Endast globusrättigheter stöds i Globus signeringspolicy - %s stöds inte" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:116 #, c-format msgid "" "Only signing rights are supported in Globus signing policy - %s is not " "supported" msgstr "" "Endast signeringsrättigheter stöds i Globus signeringspolicy - %s stöds inte" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:133 #, c-format msgid "" "We only support subjects conditions in Globus signing policy - %s is not " "supported" msgstr "" "Vi stöder endast subjektvillkor i Globus signeringspolicy - %s stöds inte" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:138 #, c-format msgid "" "We only support globus conditions in Globus signing policy - %s is not " "supported" msgstr "" "Vi stöder endast globusvillkor i Globus signeringspolicy - %s stöds inte" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:144 msgid "Missing condition subjects in Globus signing policy" msgstr "Saknade villkorssubjekt i Globus signeringspolicy" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:220 msgid "Unknown element in Globus signing policy" msgstr "Okänt element i Globus signeringspolicy" #: src/hed/mcc/tls/MCCTLS.cpp:218 msgid "Critical VOMS attribute processing failed" msgstr "Behandling av kritiskt VOMS-attribut misslyckades" #: src/hed/mcc/tls/MCCTLS.cpp:226 msgid "VOMS attribute validation failed" msgstr "VOMS-attributvalidering misslyckades" #: src/hed/mcc/tls/MCCTLS.cpp:228 msgid "VOMS attribute is ignored due to processing/validation error" msgstr "VOMS-attribut ignoreras pÃ¥ grund av processerings-/valideringsfel" #: src/hed/mcc/tls/MCCTLS.cpp:420 src/hed/mcc/tls/MCCTLS.cpp:559 #: src/hed/mcc/tls/MCCTLS.cpp:578 #, c-format msgid "Failed to establish connection: %s" msgstr "Misslyckades med att etablera förbindelse: %s" #: src/hed/mcc/tls/MCCTLS.cpp:439 src/hed/mcc/tls/MCCTLS.cpp:521 #, c-format msgid "Peer name: %s" msgstr "Peer-namn: %s" #: src/hed/mcc/tls/MCCTLS.cpp:441 src/hed/mcc/tls/MCCTLS.cpp:523 #, c-format msgid "Identity name: %s" msgstr "Identitetsnamn: %s" #: src/hed/mcc/tls/MCCTLS.cpp:443 src/hed/mcc/tls/MCCTLS.cpp:525 #, c-format msgid "CA name: %s" msgstr "CA-namn: %s" #: src/hed/mcc/tls/MCCTLS.cpp:450 msgid "Failed to process security attributes in TLS MCC for incoming message" msgstr "" "Misslyckades med att processera säkerhetsattribut i TLS-MCC för inkommande " "meddelande" #: src/hed/mcc/tls/MCCTLS.cpp:458 msgid "Security check failed in TLS MCC for incoming message" msgstr "Säkerhetskontroll misslyckades i TLS-MCC för inkommande meddelande" #: src/hed/mcc/tls/MCCTLS.cpp:531 msgid "Security check failed for outgoing TLS message" msgstr "Säkerhetskontroll misslyckades för utgÃ¥ende TLS-meddelande" #: src/hed/mcc/tls/MCCTLS.cpp:563 msgid "Security check failed for incoming TLS message" msgstr "Säkerhetskontroll misslyckades för inkommande TLS-meddelande" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:93 msgid "" "Failed to allocate memory for certificate subject while matching policy." msgstr "" "Misslyckades med att allokera minne för certifikatsubjekt vid " "policymatchning." #: src/hed/mcc/tls/PayloadTLSMCC.cpp:98 msgid "" "Failed to retrieve link to TLS stream. Additional policy matching is skipped." msgstr "" "Misslyckades med att hämta länk till TLS-ström. Ytterligare policymatchning " "hoppas över." #: src/hed/mcc/tls/PayloadTLSMCC.cpp:128 #, c-format msgid "Certificate %s already expired" msgstr "Giltighetstiden för certifikat %s har redan gÃ¥tt ut" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:136 #, c-format msgid "Certificate %s will expire in %s" msgstr "Giltighetstiden för certifikat %s kommer att gÃ¥ ut om %s" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:157 msgid "Failed to store application data" msgstr "Misslyckades med att lagra tillämpningsdata" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:185 msgid "Failed to retrieve application data from OpenSSL" msgstr "Misslyckades med att hämta tillämpningsdata frÃ¥n OpenSSL" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:257 src/hed/mcc/tls/PayloadTLSMCC.cpp:356 msgid "Can not create the SSL Context object" msgstr "Kan inte skapa SSL-kontextobjekt" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:270 src/hed/mcc/tls/PayloadTLSMCC.cpp:376 msgid "Can't set OpenSSL verify flags" msgstr "Kan inte ange OpenSSL verifikationsflaggor" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:289 src/hed/mcc/tls/PayloadTLSMCC.cpp:390 msgid "Can not create the SSL object" msgstr "Kan inte skapa SSL-objektet" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:299 msgid "Faile to assign hostname extension" msgstr "Misslyckades med att tilldela värdnamnstillägg" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:313 msgid "Failed to establish SSL connection" msgstr "Misslyckades med att etablera SSL-förbindelse" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:316 src/hed/mcc/tls/PayloadTLSMCC.cpp:405 #, c-format msgid "Using cipher: %s" msgstr "Använder chiffer: %s" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:402 msgid "Failed to accept SSL connection" msgstr "Misslyckades med att acceptera SSL-förbindelse" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:455 #, c-format msgid "Failed to shut down SSL: %s" msgstr "Misslyckades med att stänga av SSL: %s" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:47 msgid "" "ArcAuthZ: failed to initiate all PDPs - this instance will be non-functional" msgstr "" "ArcAuthZ: misslyckades med att initiera alla PDPer - denna instans kommer " "inte att fungera" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:70 msgid "PDP: missing name attribute" msgstr "PDP: %s namnattribut saknas" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:74 #, c-format msgid "PDP: %s (%s)" msgstr "PDP: %s (%s)" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:79 #, c-format msgid "PDP: %s (%s) can not be loaded" msgstr "PDP: %s (%s) kan inte laddas in" #: src/hed/shc/arcpdp/ArcEvaluationCtx.cpp:251 #, c-format msgid "There are %d RequestItems" msgstr "Det finns %d RequestItem" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:60 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:55 msgid "Can not parse classname for FunctionFactory from configuration" msgstr "Kan inte tolka klassnamn för FunctionFactory frÃ¥n konfigurationen" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:68 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:63 msgid "Can not parse classname for AttributeFactory from configuration" msgstr "Kan inte tolka klassnamn för AttributeFactory frÃ¥n konfigurationen" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:76 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:71 msgid "" "Can not parse classname for CombiningAlgorithmFactory from configuration" msgstr "" "Kan inte tolka klassnamn för CombiningAlgorithmFactory frÃ¥n konfigurationen" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:84 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:79 msgid "Can not parse classname for Request from configuration" msgstr "Kan inte tolka klassnamn för Request frÃ¥n konfigurationen" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:93 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:88 msgid "Can not parse classname for Policy from configuration" msgstr "Kan inte tolka klassnamn för Policy frÃ¥n konfigurationen" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:105 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:100 msgid "Can not dynamically produce AttributeFactory" msgstr "Kan inte skapa AttributeFactory dynamiskt" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:110 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:105 msgid "Can not dynamically produce FnFactory" msgstr "Kan inte skapa FnFactory dynamiskt" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:115 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:110 msgid "Can not dynamically produce AlgFacroty" msgstr "Kan inte skapa AlgFactory dynamiskt" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:126 #: src/hed/shc/gaclpdp/GACLEvaluator.cpp:31 #: src/hed/shc/gaclpdp/GACLEvaluator.cpp:37 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:121 msgid "Can not create PolicyStore object" msgstr "Kan inte skapa PolicyStore-objekt" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:177 src/hed/shc/test.cpp:183 #: src/hed/shc/testinterface_arc.cpp:102 src/hed/shc/testinterface_xacml.cpp:54 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:172 msgid "Can not dynamically produce Request" msgstr "Kan inte skapa Request dynamiskt" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:261 #, c-format msgid "Result value (0=Permit, 1=Deny, 2=Indeterminate, 3=Not_Applicable): %d" msgstr "Resultatvärde (0=TillÃ¥t, 1=Vägra, 2=Obestämd, 3=Ej applicerbar): %d" #: src/hed/shc/arcpdp/ArcPDP.cpp:110 msgid "Can not find ArcPDPContext" msgstr "Kan inte hitta ArcPDPContext" #: src/hed/shc/arcpdp/ArcPDP.cpp:139 src/hed/shc/xacmlpdp/XACMLPDP.cpp:117 msgid "Evaluator does not support loadable Combining Algorithms" msgstr "Utvärderare stöder inte laddningsbara kombinerande algoritmer" #: src/hed/shc/arcpdp/ArcPDP.cpp:143 src/hed/shc/xacmlpdp/XACMLPDP.cpp:121 #, c-format msgid "Evaluator does not support specified Combining Algorithm - %s" msgstr "Utvärderare stöder inte den angivna kombinerande algoritmen - %s" #: src/hed/shc/arcpdp/ArcPDP.cpp:155 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:84 #: src/hed/shc/gaclpdp/GACLPDP.cpp:118 src/hed/shc/test.cpp:94 #: src/hed/shc/testinterface_arc.cpp:37 src/hed/shc/testinterface_xacml.cpp:37 #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:133 msgid "Can not dynamically produce Evaluator" msgstr "Kan inte skapa utvärderare dynamiskt" #: src/hed/shc/arcpdp/ArcPDP.cpp:158 msgid "Evaluator for ArcPDP was not loaded" msgstr "Utvärderare för Arc-PDP laddades inte" #: src/hed/shc/arcpdp/ArcPDP.cpp:165 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:57 #: src/hed/shc/gaclpdp/GACLPDP.cpp:128 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:87 #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:143 src/tests/echo/echo.cpp:108 msgid "Missing security object in message" msgstr "Säkerhetsobjekt saknas i meddelande" #: src/hed/shc/arcpdp/ArcPDP.cpp:173 src/hed/shc/arcpdp/ArcPDP.cpp:181 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:137 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:143 #: src/hed/shc/gaclpdp/GACLPDP.cpp:136 src/hed/shc/gaclpdp/GACLPDP.cpp:144 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:95 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:103 #: src/tests/echo/echo.cpp:116 src/tests/echo/echo.cpp:123 msgid "Failed to convert security information to ARC request" msgstr "Misslyckades med att konvertera säkerhetsinformation till ARC-begäran" #: src/hed/shc/arcpdp/ArcPDP.cpp:189 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:150 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:111 #, c-format msgid "ARC Auth. request: %s" msgstr "ARC-auktoriseringsbegäran: %s" #: src/hed/shc/arcpdp/ArcPDP.cpp:192 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:153 #: src/hed/shc/gaclpdp/GACLPDP.cpp:155 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:114 #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:170 msgid "No requested security information was collected" msgstr "Ingen begärd säkerhetsinformation samlades in" #: src/hed/shc/arcpdp/ArcPDP.cpp:199 msgid "Not authorized by arc.pdp - failed to get response from Evaluator" msgstr "Inte auktoriserad av arc.pdp - kunde inte fÃ¥ svar frÃ¥n utvärderare" #: src/hed/shc/arcpdp/ArcPDP.cpp:245 msgid "Authorized by arc.pdp" msgstr "Auktoriserad av arc.pdp" #: src/hed/shc/arcpdp/ArcPDP.cpp:246 msgid "" "Not authorized by arc.pdp - some of the RequestItem elements do not satisfy " "Policy" msgstr "" "Ej auktoriserad av arc.pdp - nÃ¥gra av RequestItem-elementen uppfyller inte " "policy" #: src/hed/shc/arcpdp/ArcPolicy.cpp:56 src/hed/shc/arcpdp/ArcPolicy.cpp:70 #: src/hed/shc/gaclpdp/GACLPolicy.cpp:46 src/hed/shc/gaclpdp/GACLPolicy.cpp:59 #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:48 #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:64 msgid "Policy is empty" msgstr "Policy är tom" #: src/hed/shc/arcpdp/ArcPolicy.cpp:114 #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:115 #, c-format msgid "PolicyId: %s Alg inside this policy is:-- %s" msgstr "PolicyId: %s Alg inuti denna policy är:-- %s" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:75 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:129 msgid "No delegation policies in this context and message - passing through" msgstr "" "Inga delegeringspolicyer i denna kontext och meddelande - passerar igenom" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:95 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:109 msgid "Failed to convert security information to ARC policy" msgstr "Misslyckades med att konvertera säkerhetsinformation till ARC-policy" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:116 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:123 #, c-format msgid "ARC delegation policy: %s" msgstr "ARC delegeringspolicy: %s" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:161 msgid "No authorization response was returned" msgstr "Inget auktoriseringssvar returnerades" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:164 #, c-format msgid "There are %d requests, which satisfy at least one policy" msgstr "Det finns %d begärningar som uppfyller Ã¥tminstone en policy" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:183 msgid "Delegation authorization passed" msgstr "Delegeringsauktorisering lyckades" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:185 msgid "Delegation authorization failed" msgstr "Delegeringsauktorisering misslyckades" #: src/hed/shc/delegationsh/DelegationSH.cpp:63 msgid "" "Missing CertificatePath element or ProxyPath element, or " " is missing" msgstr "" "Saknat CertificatePath-element eller ProxyPath-element, eller " " saknas" #: src/hed/shc/delegationsh/DelegationSH.cpp:68 msgid "" "Missing or empty KeyPath element, or is missing" msgstr "" "Saknat eller tomt KeyPath-element, eller saknas" #: src/hed/shc/delegationsh/DelegationSH.cpp:74 msgid "Missing or empty CertificatePath or CACertificatesDir element" msgstr "Saknat eller tomt CertificatePath- eller CACertificatesDir-element" #: src/hed/shc/delegationsh/DelegationSH.cpp:81 #, c-format msgid "Delegation role not supported: %s" msgstr "Delegeringsroll stöds inte: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:90 #, c-format msgid "Delegation type not supported: %s" msgstr "Delegeringstyp stöds inte: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:115 msgid "Failed to acquire delegation context" msgstr "Misslyckades med att erhÃ¥lla delegeringskontext" #: src/hed/shc/delegationsh/DelegationSH.cpp:143 #: src/hed/shc/delegationsh/DelegationSH.cpp:254 msgid "Can't create delegation context" msgstr "Kan inte skapa delegeringskontext" #: src/hed/shc/delegationsh/DelegationSH.cpp:149 msgid "Delegation handler with delegatee role starts to process" msgstr "Delegeringshanterare med delegeringsmottagarroll börjar behandla" #: src/hed/shc/delegationsh/DelegationSH.cpp:152 #: src/services/a-rex/arex.cpp:592 src/services/candypond/CandyPond.cpp:526 #: src/services/data-staging/DataDeliveryService.cpp:624 msgid "process: POST" msgstr "process: POST" #: src/hed/shc/delegationsh/DelegationSH.cpp:159 #: src/services/a-rex/arex.cpp:599 src/services/candypond/CandyPond.cpp:535 #: src/services/data-staging/DataDeliveryService.cpp:633 #: src/services/wrappers/python/pythonwrapper.cpp:416 msgid "input is not SOAP" msgstr "indata är inte SOAP" #: src/hed/shc/delegationsh/DelegationSH.cpp:166 #, c-format msgid "Delegation service: %s" msgstr "Delegeringstjänst: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:181 #: src/hed/shc/delegationsh/DelegationSH.cpp:188 #: src/tests/client/test_ClientX509Delegation_ARC.cpp:55 #, c-format msgid "Can not get the delegation credential: %s from delegation service: %s" msgstr "Kan inte erhÃ¥lla delegeringsreferens: %s frÃ¥n delegeringstjänst: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:204 #: src/hed/shc/delegationsh/DelegationSH.cpp:268 #, c-format msgid "Delegated credential identity: %s" msgstr "Delegerad referens-identitet: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:205 #, c-format msgid "" "The delegated credential got from delegation service is stored into path: %s" msgstr "" "Delegerade referensen erhÃ¥llen frÃ¥n delegeringstjänsten lagras till sökväg: " "%s" #: src/hed/shc/delegationsh/DelegationSH.cpp:218 msgid "The endpoint of delegation service should be configured" msgstr "Delegeringstjänstens ändpunkt ska ställas in" #: src/hed/shc/delegationsh/DelegationSH.cpp:228 #: src/hed/shc/delegationsh/DelegationSH.cpp:340 msgid "Delegation handler with delegatee role ends" msgstr "Delegeringshanteraren med delegeringsmottagarroll slutar" #: src/hed/shc/delegationsh/DelegationSH.cpp:260 msgid "Delegation handler with delegator role starts to process" msgstr "Delegeringshanteraren med delegeringssändarroll börjar behandla" #: src/hed/shc/delegationsh/DelegationSH.cpp:269 #, c-format msgid "The delegated credential got from path: %s" msgstr "Delegerade referensen erhÃ¥llen frÃ¥n sökväg: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:290 #, c-format msgid "Can not create delegation crendential to delegation service: %s" msgstr "Kan inte skapa delegeringsreferens för delegeringstjänsten: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:328 msgid "output is not SOAP" msgstr "utdata är inte SOAP" #: src/hed/shc/delegationsh/DelegationSH.cpp:339 #, c-format msgid "" "Succeeded to send DelegationService: %s and DelegationID: %s info to peer " "service" msgstr "" "Lyckades sända DelegationService: %s och DelegationID: %s info till peer-" "tjänst" #: src/hed/shc/delegationsh/DelegationSH.cpp:345 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:220 #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:101 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:94 msgid "Incoming Message is not SOAP" msgstr "Inkommande meddelande är inte SOAP" #: src/hed/shc/delegationsh/DelegationSH.cpp:352 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:341 #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:123 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:108 msgid "Outgoing Message is not SOAP" msgstr "UtgÃ¥ende meddelande är inte SOAP" #: src/hed/shc/delegationsh/DelegationSH.cpp:356 msgid "Delegation handler is not configured" msgstr "Delegeringshanteraren har ej ställts in" #: src/hed/shc/gaclpdp/GACLPDP.cpp:121 msgid "Evaluator for GACLPDP was not loaded" msgstr "Utvärderare för GACL-PDP laddades inte" #: src/hed/shc/gaclpdp/GACLPDP.cpp:152 #, c-format msgid "GACL Auth. request: %s" msgstr "GACL-auktoriseringsbegäran: %s" #: src/hed/shc/gaclpdp/GACLPolicy.cpp:50 src/hed/shc/gaclpdp/GACLPolicy.cpp:63 msgid "Policy is not gacl" msgstr "Policy är inte gacl" #: src/hed/shc/legacy/ConfigParser.cpp:13 msgid "Configuration file not specified" msgstr "Inställningfil inte angiven" #: src/hed/shc/legacy/ConfigParser.cpp:18 #: src/hed/shc/legacy/ConfigParser.cpp:28 #: src/hed/shc/legacy/ConfigParser.cpp:33 msgid "Configuration file can not be read" msgstr "Inställningsfil kan inte läsas" #: src/hed/shc/legacy/ConfigParser.cpp:43 #, c-format msgid "Configuration file is broken - block name is too short: %s" msgstr "Inställningsfil är trasig - blocknamn är för kort: %s" #: src/hed/shc/legacy/ConfigParser.cpp:47 #, c-format msgid "Configuration file is broken - block name does not end with ]: %s" msgstr "Inställningsfil är trasig - blocknamn slutar inte med ]: %s" #: src/hed/shc/legacy/LegacyMap.cpp:39 src/hed/shc/legacy/LegacyPDP.cpp:119 msgid "Configuration file not specified in ConfigBlock" msgstr "Inställningsfil inte angiven i ConfigBlock" #: src/hed/shc/legacy/LegacyMap.cpp:48 src/hed/shc/legacy/LegacyPDP.cpp:128 msgid "BlockName is empty" msgstr "Blocknamn är tomt" #: src/hed/shc/legacy/LegacyMap.cpp:108 #, c-format msgid "Failed processing user mapping command: %s %s" msgstr "Misslyckades med att behandla användarmappningskommando: %s %s" #: src/hed/shc/legacy/LegacyMap.cpp:114 #: src/services/gridftpd/fileroot_config.cpp:320 #, c-format msgid "Failed to change mapping stack processing policy in: %s = %s" msgstr "Misslyckades med att ändra mappningsstack-behandlingspolicy i: %s = %s" #: src/hed/shc/legacy/LegacyMap.cpp:174 msgid "LegacyMap: no configurations blocks defined" msgstr "LegacyMap: inga inställningsblock definierade" #: src/hed/shc/legacy/LegacyMap.cpp:196 src/hed/shc/legacy/LegacyPDP.cpp:239 #, c-format msgid "" "LegacyPDP: there is no %s Sec Attribute defined. Probably ARC Legacy Sec " "Handler is not configured or failed." msgstr "" "LegacyPDP: Det finns inga %s-säkerhetsattribut definierade. ARC Legacy Sec " "Handler troligen inte inställd eller har misslyckats." #: src/hed/shc/legacy/LegacyMap.cpp:201 src/hed/shc/legacy/LegacyPDP.cpp:244 msgid "LegacyPDP: ARC Legacy Sec Attribute not recognized." msgstr "LegacyPDP: ARC Legacy Sec Attribute känns inte igen." #: src/hed/shc/legacy/LegacyPDP.cpp:138 #, c-format msgid "Failed to parse configuration file %s" msgstr "Misslyckades med att tolka inställningsfil %s" #: src/hed/shc/legacy/LegacyPDP.cpp:144 #, c-format msgid "Block %s not found in configuration file %s" msgstr "Hittade inte block %s i inställningsfil %s" #: src/hed/shc/legacy/LegacySecHandler.cpp:40 #: src/hed/shc/legacy/LegacySecHandler.cpp:118 msgid "LegacySecHandler: configuration file not specified" msgstr "LegacySecHandler: inställningsfil inte angiven" #: src/hed/shc/legacy/arc_lcas.cpp:146 src/hed/shc/legacy/arc_lcmaps.cpp:161 #, c-format msgid "" "Failed to convert GSI credential to GSS credential (major: %d, minor: %d)" msgstr "" "Misslyckades med att konvertera GSI-referens till GSS-referens (major: %d, " "minor: %d)" #: src/hed/shc/legacy/arc_lcas.cpp:171 src/hed/shc/legacy/arc_lcmaps.cpp:186 msgid "Missing subject name" msgstr "Subjektnamn saknas" #: src/hed/shc/legacy/arc_lcas.cpp:176 src/hed/shc/legacy/arc_lcmaps.cpp:191 msgid "Missing path of credentials file" msgstr "Sökväg till referensfil saknas" #: src/hed/shc/legacy/arc_lcas.cpp:182 msgid "Missing name of LCAS library" msgstr "Namn pÃ¥ LCAS-biblioteket saknas" #: src/hed/shc/legacy/arc_lcas.cpp:199 #, c-format msgid "Can't load LCAS library %s: %s" msgstr "Kan inte ladda in LCAS-bibliotek %s: %s" #: src/hed/shc/legacy/arc_lcas.cpp:209 #, c-format msgid "Can't find LCAS functions in a library %s" msgstr "Kan inte hitta LCAS-funktioner i bibliotek: %s" #: src/hed/shc/legacy/arc_lcas.cpp:219 msgid "Failed to initialize LCAS" msgstr "Misslyckades med att initiera LCAS" #: src/hed/shc/legacy/arc_lcas.cpp:234 msgid "Failed to terminate LCAS" msgstr "Misslyckades med att avsluta LCAS" #: src/hed/shc/legacy/arc_lcmaps.cpp:197 msgid "Missing name of LCMAPS library" msgstr "Namn pÃ¥ LCMAPS-bibliotek saknas" #: src/hed/shc/legacy/arc_lcmaps.cpp:211 msgid "Can't read policy names" msgstr "Kan inte läsa policynamn" #: src/hed/shc/legacy/arc_lcmaps.cpp:222 #, c-format msgid "Can't load LCMAPS library %s: %s" msgstr "Kan inte ladda in LCMAPS-bibliotek %s: %s" #: src/hed/shc/legacy/arc_lcmaps.cpp:234 #, c-format msgid "Can't find LCMAPS functions in a library %s" msgstr "Kan inte hitta LCMAPS-funktioner i bibliotek %s" #: src/hed/shc/legacy/arc_lcmaps.cpp:246 msgid "LCMAPS has lcmaps_run" msgstr "LCMAPS har lcmaps_run" #: src/hed/shc/legacy/arc_lcmaps.cpp:247 msgid "LCMAPS has getCredentialData" msgstr "LCMAPS har getCredentialData" #: src/hed/shc/legacy/arc_lcmaps.cpp:251 msgid "Failed to initialize LCMAPS" msgstr "Misslyckades med att initiera LCMAPS" #: src/hed/shc/legacy/arc_lcmaps.cpp:291 #, c-format msgid "LCMAPS returned invalid GID: %u" msgstr "LCMAPS returnerade ogiltig GID: %u" #: src/hed/shc/legacy/arc_lcmaps.cpp:294 msgid "LCMAPS did not return any GID" msgstr "LCMAPS returnerade ingen GID" #: src/hed/shc/legacy/arc_lcmaps.cpp:297 #, c-format msgid "LCMAPS returned UID which has no username: %u" msgstr "LCMAPS returnerade UID som saknar användarnamn: %u" #: src/hed/shc/legacy/arc_lcmaps.cpp:300 #, c-format msgid "LCMAPS returned invalid UID: %u" msgstr "LCMAPS returnerade ogiltig UID: %u" #: src/hed/shc/legacy/arc_lcmaps.cpp:303 msgid "LCMAPS did not return any UID" msgstr "LCMAPS returnerade ingen UID" #: src/hed/shc/legacy/arc_lcmaps.cpp:312 msgid "Failed to terminate LCMAPS" msgstr "Misslyckades med att avsluta LCMAPS" #: src/hed/shc/legacy/auth.cpp:35 src/services/gridftpd/auth/auth.cpp:35 #, c-format msgid "Unexpected argument for 'all' rule - %s" msgstr "Oväntat argument för 'all'-regel - %s" #: src/hed/shc/legacy/auth.cpp:337 #, c-format msgid "Credentials stored in temporary file %s" msgstr "Referenser lagrade i temporär fil %s" #: src/hed/shc/legacy/auth.cpp:346 #, c-format msgid "Assigned to authorization group %s" msgstr "Tilldelad till delegeringsgrupp %s" #: src/hed/shc/legacy/auth.cpp:351 #, c-format msgid "Assigned to userlist %s" msgstr "Tilldelad till användarlista %s" #: src/hed/shc/legacy/auth_file.cpp:22 #: src/services/gridftpd/auth/auth_file.cpp:22 #, c-format msgid "Failed to read file %s" msgstr "Misslyckades med att läsa fil %s" #: src/hed/shc/legacy/auth_otokens.cpp:30 msgid "Missing subject in configuration" msgstr "Subjekt saknas i inställningar" #: src/hed/shc/legacy/auth_otokens.cpp:35 msgid "Missing issuer in configuration" msgstr "Utfärdare saknas i inställningar" #: src/hed/shc/legacy/auth_otokens.cpp:40 msgid "Missing audience in configuration" msgstr "Publik saknas i inställningar" #: src/hed/shc/legacy/auth_otokens.cpp:45 msgid "Missing scope in configuration" msgstr "Scope saknas i inställningar" #: src/hed/shc/legacy/auth_otokens.cpp:50 src/hed/shc/legacy/auth_voms.cpp:47 #: src/services/gridftpd/auth/auth_voms.cpp:51 msgid "Missing group in configuration" msgstr "Grupp saknas i inställningar" #: src/hed/shc/legacy/auth_otokens.cpp:53 #, c-format msgid "Rule: subject: %s" msgstr "Regel: subjekt: %s" #: src/hed/shc/legacy/auth_otokens.cpp:54 #, c-format msgid "Rule: issuer: %s" msgstr "Regel: utfärdare: %s" #: src/hed/shc/legacy/auth_otokens.cpp:55 #, c-format msgid "Rule: audience: %s" msgstr "Regel: publik: %s" #: src/hed/shc/legacy/auth_otokens.cpp:56 #, c-format msgid "Rule: scope: %s" msgstr "Regel: scope: %s" #: src/hed/shc/legacy/auth_otokens.cpp:57 src/hed/shc/legacy/auth_voms.cpp:66 #: src/services/gridftpd/auth/auth_voms.cpp:68 #, c-format msgid "Rule: group: %s" msgstr "Regel: grupp: %s" #: src/hed/shc/legacy/auth_otokens.cpp:60 #, c-format msgid "Match issuer: %s" msgstr "Matcha utfärdare: %s" #: src/hed/shc/legacy/auth_otokens.cpp:66 #, c-format msgid "Matched: %s %s %s" msgstr "Matchad: %s %s %s" #: src/hed/shc/legacy/auth_otokens.cpp:80 src/hed/shc/legacy/auth_voms.cpp:93 #: src/services/gridftpd/auth/auth_voms.cpp:98 msgid "Matched nothing" msgstr "Matchad ingenting" #: src/hed/shc/legacy/auth_plugin.cpp:45 src/hed/shc/legacy/unixmap.cpp:215 #: src/services/gridftpd/auth/auth_plugin.cpp:70 #: src/services/gridftpd/auth/unixmap.cpp:214 #, c-format msgid "Plugin %s returned: %u" msgstr "Plugin %s returnerade: %u" #: src/hed/shc/legacy/auth_plugin.cpp:49 src/hed/shc/legacy/unixmap.cpp:219 #, c-format msgid "Plugin %s timeout after %u seconds" msgstr "Plugin %s timeout efter %u sekunder" #: src/hed/shc/legacy/auth_plugin.cpp:52 src/hed/shc/legacy/unixmap.cpp:222 #, c-format msgid "Plugin %s failed to start" msgstr "Plugin %s misslyckades med att starta" #: src/hed/shc/legacy/auth_plugin.cpp:54 src/hed/shc/legacy/unixmap.cpp:224 #, c-format msgid "Plugin %s printed: %s" msgstr "Plugin %s skrev ut: %s" #: src/hed/shc/legacy/auth_plugin.cpp:55 src/hed/shc/legacy/unixmap.cpp:212 #: src/hed/shc/legacy/unixmap.cpp:225 #, c-format msgid "Plugin %s error: %s" msgstr "Plugin %s fel: %s" #: src/hed/shc/legacy/auth_voms.cpp:42 #: src/services/gridftpd/auth/auth_voms.cpp:45 msgid "Missing VO in configuration" msgstr "VO saknas i inställningar" #: src/hed/shc/legacy/auth_voms.cpp:52 #: src/services/gridftpd/auth/auth_voms.cpp:57 msgid "Missing role in configuration" msgstr "Roll saknas i inställningar" #: src/hed/shc/legacy/auth_voms.cpp:57 #: src/services/gridftpd/auth/auth_voms.cpp:63 msgid "Missing capabilities in configuration" msgstr "FörmÃ¥ga saknas i inställningar" #: src/hed/shc/legacy/auth_voms.cpp:62 msgid "Too many arguments in configuration" msgstr "För mÃ¥nga argument i inställningar" #: src/hed/shc/legacy/auth_voms.cpp:65 #: src/services/gridftpd/auth/auth_voms.cpp:67 #, c-format msgid "Rule: vo: %s" msgstr "Regel: vo: %s" #: src/hed/shc/legacy/auth_voms.cpp:67 #: src/services/gridftpd/auth/auth_voms.cpp:69 #, c-format msgid "Rule: role: %s" msgstr "Regel: roll: %s" #: src/hed/shc/legacy/auth_voms.cpp:68 #: src/services/gridftpd/auth/auth_voms.cpp:70 #, c-format msgid "Rule: capabilities: %s" msgstr "Regel: förmÃ¥ga: %s" #: src/hed/shc/legacy/auth_voms.cpp:71 #: src/services/gridftpd/auth/auth_voms.cpp:77 #, c-format msgid "Match vo: %s" msgstr "Matcha vo: %s" #: src/hed/shc/legacy/auth_voms.cpp:78 #, c-format msgid "Matched: %s %s %s %s" msgstr "Matchad: %s %s %s %s" #: src/hed/shc/legacy/simplemap.cpp:70 #: src/services/gridftpd/auth/simplemap.cpp:68 #, c-format msgid "SimpleMap: acquired new unmap time of %u seconds" msgstr "SimpleMap: fick ny unmap-tid pÃ¥ %u sekunder" #: src/hed/shc/legacy/simplemap.cpp:72 #: src/services/gridftpd/auth/simplemap.cpp:70 msgid "SimpleMap: wrong number in unmaptime command" msgstr "SimpleMap: felaktigt nummer i unmaptime-kommando" #: src/hed/shc/legacy/simplemap.cpp:85 src/hed/shc/legacy/simplemap.cpp:90 #: src/services/gridftpd/auth/simplemap.cpp:83 #: src/services/gridftpd/auth/simplemap.cpp:88 #, c-format msgid "SimpleMap: %s" msgstr "SimpleMap: %s" #: src/hed/shc/legacy/unixmap.cpp:65 src/hed/shc/legacy/unixmap.cpp:70 #: src/services/gridftpd/auth/unixmap.cpp:63 #: src/services/gridftpd/auth/unixmap.cpp:68 msgid "Mapping policy option has empty value" msgstr "Mappningspolicyalternativ har tomt värde" #: src/hed/shc/legacy/unixmap.cpp:80 src/services/gridftpd/auth/unixmap.cpp:78 #, c-format msgid "Unsupported mapping policy action: %s" msgstr "Mappningspolicyhandling stöds inte: %s" #: src/hed/shc/legacy/unixmap.cpp:91 src/services/gridftpd/auth/unixmap.cpp:89 #, c-format msgid "Unsupported mapping policy option: %s" msgstr "Mappningspolicyalternativ stöds inte: %s" #: src/hed/shc/legacy/unixmap.cpp:103 src/hed/shc/legacy/unixmap.cpp:108 #: src/services/gridftpd/auth/unixmap.cpp:100 #: src/services/gridftpd/auth/unixmap.cpp:105 msgid "User name mapping command is empty" msgstr "Användarnamnsmappningskommando är tomt" #: src/hed/shc/legacy/unixmap.cpp:116 #: src/services/gridftpd/auth/unixmap.cpp:113 #, c-format msgid "User name mapping has empty authgroup: %s" msgstr "Användarnamnsmappning har tom auktoriseringsgrupp. %s" #: src/hed/shc/legacy/unixmap.cpp:147 #: src/services/gridftpd/auth/unixmap.cpp:147 #, c-format msgid "Unknown user name mapping rule %s" msgstr "Okänd användarnamnsmappningsregel %s" #: src/hed/shc/legacy/unixmap.cpp:156 src/hed/shc/legacy/unixmap.cpp:161 #: src/hed/shc/legacy/unixmap.cpp:177 src/hed/shc/legacy/unixmap.cpp:183 #: src/services/gridftpd/auth/unixmap.cpp:175 #: src/services/gridftpd/auth/unixmap.cpp:180 #: src/services/gridftpd/auth/unixmap.cpp:196 msgid "Plugin (user mapping) command is empty" msgstr "Plugin (användarmappning) kommando är tomt" #: src/hed/shc/legacy/unixmap.cpp:167 #: src/services/gridftpd/auth/unixmap.cpp:186 #, c-format msgid "Plugin (user mapping) timeout is not a number: %s" msgstr "Plugin (användarmappning) timeout är inte ett nummer: %s" #: src/hed/shc/legacy/unixmap.cpp:171 #: src/services/gridftpd/auth/unixmap.cpp:190 #, c-format msgid "Plugin (user mapping) timeout is wrong number: %s" msgstr "Plugin (användarmappning) timeout är felaktigt nummer: %s<" #: src/hed/shc/legacy/unixmap.cpp:203 #, c-format msgid "Plugin %s returned no username" msgstr "Plugin %s returnerade inget användarnamn" #: src/hed/shc/legacy/unixmap.cpp:208 #: src/services/gridftpd/auth/unixmap.cpp:211 #, c-format msgid "Plugin %s returned too much: %s" msgstr "Plugin %s returnerade för mycket: %s" #: src/hed/shc/legacy/unixmap.cpp:211 #, c-format msgid "Plugin %s returned no mapping" msgstr "Plugin %s returnerade ingen mappning" #: src/hed/shc/legacy/unixmap.cpp:234 msgid "User subject match is missing user subject." msgstr "Användarsubjektmatchning saknar användarsubjekt." #: src/hed/shc/legacy/unixmap.cpp:238 #: src/services/gridftpd/auth/unixmap.cpp:230 #, c-format msgid "Mapfile at %s can't be opened." msgstr "Mappningsfil pÃ¥ %s kan inte öppnas." #: src/hed/shc/legacy/unixmap.cpp:262 #: src/services/gridftpd/auth/unixmap.cpp:255 msgid "User pool mapping is missing user subject." msgstr "Användarpoolmappning saknar användarsubjekt." #: src/hed/shc/legacy/unixmap.cpp:267 #: src/services/gridftpd/auth/unixmap.cpp:260 #, c-format msgid "User pool at %s can't be opened." msgstr "Användarpool pÃ¥ %s kan inte öppnas." #: src/hed/shc/legacy/unixmap.cpp:272 #: src/services/gridftpd/auth/unixmap.cpp:265 #, c-format msgid "User pool at %s failed to perform user mapping." msgstr "Användarpool pÃ¥ %s misslyckades med att utföra användarmappning." #: src/hed/shc/legacy/unixmap.cpp:290 #: src/services/gridftpd/auth/unixmap.cpp:283 #, c-format msgid "User name direct mapping is missing user name: %s." msgstr "Direkt användarnamnsmappning saknar användarnamn: %s." #: src/hed/shc/otokens/OTokensSH.cpp:63 msgid "OTokens: Attr: message" msgstr "OTokens: Attr: meddelande" #: src/hed/shc/otokens/OTokensSH.cpp:68 #, c-format msgid "OTokens: Attr: %s = %s" msgstr "OTokens: Attr: %s = %s" #: src/hed/shc/otokens/OTokensSH.cpp:73 #, c-format msgid "OTokens: Attr: token: %s" msgstr "OTokens: Attr: token: %s" #: src/hed/shc/otokens/OTokensSH.cpp:76 #, c-format msgid "OTokens: Attr: token: bearer: %s" msgstr "OTokens: Attr: token: bärare: %s" #: src/hed/shc/otokens/OTokensSH.cpp:146 msgid "OTokens: Handle" msgstr "OTokens: Handtag" #: src/hed/shc/otokens/OTokensSH.cpp:148 msgid "OTokens: Handle: message" msgstr "OTokens: Handtag: meddelande" #: src/hed/shc/otokens/OTokensSH.cpp:151 msgid "Failed to create OTokens security attributes" msgstr "Misslyckades med att skapa OTokens säkerhetsattribut" #: src/hed/shc/otokens/OTokensSH.cpp:155 #, c-format msgid "OTokens: Handle: attributes created: subject = %s" msgstr "OTokens: Handtag: attribut skapade: subjekt = %s" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:48 msgid "Creating a pdpservice client" msgstr "Skapar en pdp-tjänste-klient" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:80 msgid "Arc policy can not been carried by SAML2.0 profile of XACML" msgstr "Arc-policy kan inte överföras av XACMLs SAML2.0-profil" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:152 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:185 msgid "Policy Decision Service invocation failed" msgstr "Policy-besluts-tjänst-anrop misslyckades" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:155 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:188 #: src/tests/client/test_ClientInterface.cpp:88 #: src/tests/client/test_ClientSAML2SSO.cpp:81 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:100 #: src/tests/echo/test_clientinterface.cpp:82 #: src/tests/echo/test_clientinterface.cpp:149 #: src/tests/echo/test_clientinterface.py:32 msgid "There was no SOAP response" msgstr "Det fanns inget SOAP-svar" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:170 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:204 msgid "Authorized from remote pdp service" msgstr "Auktoriserad av fjärr-pdp-tjänst" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:171 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:205 msgid "Unauthorized from remote pdp service" msgstr "Oauktoriserad av fjärr-pdp-tjänst" #: src/hed/shc/saml2sso_assertionconsumersh/SAML2SSO_AssertionConsumerSH.cpp:69 msgid "Can not get SAMLAssertion SecAttr from message context" msgstr "Kan inte erhÃ¥lla SAMLAssertion SecAttr frÃ¥n meddelandekontext" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:152 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:44 msgid "Missing or empty CertificatePath element" msgstr "CertificatePath-element saknas eller är tomt" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:157 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:49 msgid "Missing or empty KeyPath element" msgstr "KeyPath-element saknas eller är tomt" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:163 msgid "" "Both of CACertificatePath and CACertificatesDir elements missing or empty" msgstr "" "BÃ¥de CACertificatePath- och CACertificatesDir-elementen saknas eller är tomma" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:175 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:61 msgid "" "Missing or empty CertificatePath or CACertificatesDir element; will only " "check the signature, will not do message authentication" msgstr "" "CertificatePath- eller CACertificatesDir-element saknas eller är tomt; " "kommer endast att kontrollera signatur, kommer ej att göra " "meddelandeautentisering" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:179 #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:65 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:65 #, c-format msgid "Processing type not supported: %s" msgstr "Processeringstyp stöds inte: %s" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:199 msgid "Failed to parse SAML Token from incoming SOAP" msgstr "Misslyckades med att tolka SAML-token frÃ¥n inkommande SOAP" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:209 msgid "Failed to authenticate SAML Token inside the incoming SOAP" msgstr "Misslyckades med att autentisera SAML-token inuti inkommande SOAP" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:212 msgid "Succeeded to authenticate SAMLToken" msgstr "Lyckades med att autentisera SAML-token" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:275 #, c-format msgid "No response from AA service %s" msgstr "Inget svar frÃ¥n AA-tjänst %s" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:279 #, c-format msgid "SOAP Request to AA service %s failed" msgstr "SOAP-begäran till AA-tjänst %s misslyckades" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:287 msgid "Cannot find content under response soap message" msgstr "Kan inte hitta innehÃ¥ll under svar-soap-meddelande" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:291 msgid "Cannot find under response soap message:" msgstr "Kan inte hitta under svar-soap-meddelande:" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:308 msgid "The Response is not going to this end" msgstr "Svaret kommer inte till denna ände" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:315 msgid "The StatusCode is Success" msgstr "Statuskoden är Success" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:321 msgid "Succeeded to verify the signature under " msgstr "Lyckades verifiera signaturen under " #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:324 msgid "Failed to verify the signature under " msgstr "Misslyckades med att verifiera signaturen under " #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:335 msgid "Failed to generate SAML Token for outgoing SOAP" msgstr "Misslyckades med att generera SAML-token för utgÃ¥ende SOAP" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:345 msgid "SAML Token handler is not configured" msgstr "SAML-tokenhanteraren har ej ställts in" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:29 #, c-format msgid "Access list location: %s" msgstr "Ã…tkomstlista: %s" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:39 msgid "" "No policy file or DNs specified for simplelist.pdp, please set location " "attribute or at least one DN element for simplelist PDP node in " "configuration." msgstr "" "Ingen policyfil eller DN angiven för simplelist.pdp, ange ett location-" "attribut eller Ã¥tminstone ett DN-element i simplelist-PDP-noden i " "inställningarna." #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:42 #, c-format msgid "Subject to match: %s" msgstr "Subjekt att matcha: %s" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:45 #, c-format msgid "Policy subject: %s" msgstr "Policy-subjekt: %s" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:47 #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:73 #, c-format msgid "Authorized from simplelist.pdp: %s" msgstr "Auktoriserad av simplelist.pdp: %s" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:54 msgid "" "The policy file setup for simplelist.pdp does not exist, please check " "location attribute for simplelist PDP node in service configuration" msgstr "" "Policyfilen angiven för simplelist.pdp existerar inte, kontrollera location-" "attributet i simplelist-PDP-noden i tjänsteinställningarna" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:61 #, c-format msgid "Policy line: %s" msgstr "policyrad: %s" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:79 #, c-format msgid "Not authorized from simplelist.pdp: %s" msgstr "Ej auktoriserad av simplelist.pdp: %s" #: src/hed/shc/test.cpp:27 src/hed/shc/testinterface_arc.cpp:26 #: src/hed/shc/testinterface_xacml.cpp:26 msgid "Start test" msgstr "PÃ¥börja test" #: src/hed/shc/test.cpp:101 msgid "Input request from a file: Request.xml" msgstr "Mata in begäran frÃ¥n en fil: Request.xml" #: src/hed/shc/test.cpp:107 src/hed/shc/test.cpp:197 #: src/hed/shc/testinterface_arc.cpp:124 #, c-format msgid "There is %d subjects, which satisfy at least one policy" msgstr "Det finns %d subjekt som uppfyller Ã¥tminstone en policy" #: src/hed/shc/test.cpp:121 #, c-format msgid "Attribute Value (1): %s" msgstr "Attributvärde (1): %s" #: src/hed/shc/test.cpp:132 msgid "Input request from code" msgstr "Mata in begäran frÃ¥n kod" #: src/hed/shc/test.cpp:211 #, c-format msgid "Attribute Value (2): %s" msgstr "Attributvärde (2): %s" #: src/hed/shc/testinterface_arc.cpp:75 src/hed/shc/testinterface_xacml.cpp:46 msgid "Can not dynamically produce Policy" msgstr "Kan inte skapa policy dynamiskt" #: src/hed/shc/testinterface_arc.cpp:138 #, c-format msgid "Attribute Value inside Subject: %s" msgstr "Attributvärde inuti Subject: %s" #: src/hed/shc/testinterface_arc.cpp:148 msgid "The request has passed the policy evaluation" msgstr "Begäran har passerat policyutvärderingen" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:43 msgid "Missing or empty PasswordSource element" msgstr "PasswordSource-element saknas eller är tomt" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:54 #, c-format msgid "Password encoding type not supported: %s" msgstr "Lösenordskodningstyp stöds inte: %s" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:59 msgid "Missing or empty Username element" msgstr "Username-element saknas eller är tomt" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:79 msgid "The payload of incoming message is empty" msgstr "Nyttolasten i inkommande meddelande är tom" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:84 msgid "Failed to cast PayloadSOAP from incoming payload" msgstr "" "Misslyckades med att konvertera till PayloadSOAP frÃ¥n inkommande nyttolast" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:89 msgid "Failed to parse Username Token from incoming SOAP" msgstr "Misslyckades med att tolka användarnamnstoken frÃ¥n inkommande SOAP" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:95 msgid "Failed to authenticate Username Token inside the incoming SOAP" msgstr "" "Misslyckades med att autentisera användarnamnstoken inuti inkommande SOAP" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:98 msgid "Succeeded to authenticate UsernameToken" msgstr "Lyckades med att autentisera användarnamnstoken" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:108 msgid "The payload of outgoing message is empty" msgstr "Nyttolasten i utgÃ¥ende meddelande är tom," #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:113 msgid "Failed to cast PayloadSOAP from outgoing payload" msgstr "" "Misslyckades med att konvertera till PayloadSOAP frÃ¥n utgÃ¥ende nyttolast" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:119 msgid "Failed to generate Username Token for outgoing SOAP" msgstr "Misslyckades med att skapa användarnamnstoken för utgÃ¥ende SOAP" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:127 msgid "Username Token handler is not configured" msgstr "Användarnamnstokenhanteraren har ej ställts in" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:81 msgid "Failed to parse X509 Token from incoming SOAP" msgstr "Misslyckades med att tolka X509-token frÃ¥n inkommande SOAP" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:85 msgid "Failed to verify X509 Token inside the incoming SOAP" msgstr "Misslyckades med att verifiera X509-token inuti inkommande SOAP" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:89 msgid "Failed to authenticate X509 Token inside the incoming SOAP" msgstr "Misslyckades med att autentisera X509-token inuti inkommande SOAP" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:92 msgid "Succeeded to authenticate X509Token" msgstr "Lyckades med att autentisera X509-token" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:102 msgid "Failed to generate X509 Token for outgoing SOAP" msgstr "Misslyckades med att skapa X509-token för utgÃ¥ende SOAP" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:112 msgid "X509 Token handler is not configured" msgstr "X509-tokenhanteraren har ej ställts in" #: src/hed/shc/xacmlpdp/XACMLApply.cpp:29 msgid "Can not create function: FunctionId does not exist" msgstr "Kan inte skapa funktion: Funktions-id existerar inte" #: src/hed/shc/xacmlpdp/XACMLApply.cpp:33 #: src/hed/shc/xacmlpdp/XACMLTarget.cpp:40 #, c-format msgid "Can not create function %s" msgstr "Kan inte skapa funktion %s" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:88 msgid "Can not find XACMLPDPContext" msgstr "Kan inte hitta XACMLPDPContext" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:136 msgid "Evaluator for XACMLPDP was not loaded" msgstr "Utvärderare för XACML-PDP laddades inte" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:151 src/hed/shc/xacmlpdp/XACMLPDP.cpp:159 msgid "Failed to convert security information to XACML request" msgstr "" "Misslyckades med att konvertera säkerhetsinformation till XACML-begäran" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:167 #, c-format msgid "XACML request: %s" msgstr "XACML-begäran: %s" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:179 msgid "Authorized from xacml.pdp" msgstr "Auktoriserad av xacml.pdp" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:180 msgid "UnAuthorized from xacml.pdp" msgstr "Oauktoriserad av xacml.pdp" #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:55 msgid "Can not find element with proper namespace" msgstr "Kan inte hitta -element med rätt namnrymd" #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:132 msgid "No target available inside the policy" msgstr "Inget target tillgängligt inuti policyn" #: src/hed/shc/xacmlpdp/XACMLRequest.cpp:34 msgid "Request is empty" msgstr "Begäran är tom" #: src/hed/shc/xacmlpdp/XACMLRequest.cpp:39 msgid "Can not find element with proper namespace" msgstr "Kan inte hitta -element med rätt namnrymd" #: src/hed/shc/xacmlpdp/XACMLRule.cpp:35 msgid "Invalid Effect" msgstr "Ogiltig effekt" #: src/hed/shc/xacmlpdp/XACMLRule.cpp:48 msgid "No target available inside the rule" msgstr "Inget target tillgängligt inuti regeln" #: src/libs/data-staging/DTR.cpp:82 src/libs/data-staging/DTR.cpp:86 #, c-format msgid "Could not handle endpoint %s" msgstr "Kan inte hantera ändpunkt %s" #: src/libs/data-staging/DTR.cpp:96 msgid "Source is the same as destination" msgstr "källan är densamma som destinationen" #: src/libs/data-staging/DTR.cpp:175 #, c-format msgid "Invalid ID: %s" msgstr "Ogiltig ID: %s" #: src/libs/data-staging/DTR.cpp:212 #, c-format msgid "%s->%s" msgstr "%s->%s" #: src/libs/data-staging/DTR.cpp:320 #, c-format msgid "No callback for %s defined" msgstr "Ingen callback för %s definierad" #: src/libs/data-staging/DTR.cpp:335 #, c-format msgid "NULL callback for %s" msgstr "NULL-callback för %s" #: src/libs/data-staging/DTR.cpp:338 #, c-format msgid "Request to push to unknown owner - %u" msgstr "Begäran att pusha till okänd ägare - %u" #: src/libs/data-staging/DTRList.cpp:216 #, c-format msgid "Boosting priority from %i to %i due to incoming higher priority DTR" msgstr "" "Boostar prioritet frÃ¥n %i till %i pÃ¥ grund av inkommande DTR med högre " "prioritet" #: src/libs/data-staging/DataDelivery.cpp:48 #: src/libs/data-staging/DataDelivery.cpp:72 msgid "Received invalid DTR" msgstr "Mottog ogiltig DTR" #: src/libs/data-staging/DataDelivery.cpp:54 #, c-format msgid "Delivery received new DTR %s with source: %s, destination: %s" msgstr "Leverans mottog ny DTR %s med källa: %s, destination: %s" #: src/libs/data-staging/DataDelivery.cpp:68 msgid "Received no DTR" msgstr "Mottog ingen DTR" #: src/libs/data-staging/DataDelivery.cpp:80 #, c-format msgid "Cancelling DTR %s with source: %s, destination: %s" msgstr "Avbryter DTR %s med källa: %s, destination: %s" #: src/libs/data-staging/DataDelivery.cpp:91 #, c-format msgid "DTR %s requested cancel but no active transfer" msgstr "DTR %s begärd att avbrytas men ingen aktiv överföring" #: src/libs/data-staging/DataDelivery.cpp:147 #, c-format msgid "Cleaning up after failure: deleting %s" msgstr "Rensar upp efter misslyckande: tar bort %s" #: src/libs/data-staging/DataDelivery.cpp:188 #: src/libs/data-staging/DataDelivery.cpp:263 #: src/libs/data-staging/DataDelivery.cpp:303 #: src/libs/data-staging/DataDelivery.cpp:323 msgid "Failed to delete delivery object or deletion timed out" msgstr "" "Misslyckades med att ta bort leverans-objekt eller borttagning avbröts pÃ¥ " "grund av timeout" #: src/libs/data-staging/DataDelivery.cpp:254 #, c-format msgid "Transfer finished: %llu bytes transferred %s" msgstr "Överföring avslutad: %llu byteer överförda %s" #: src/libs/data-staging/DataDelivery.cpp:329 msgid "Data delivery loop exited" msgstr "Dataleveransloop avslutades" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:45 msgid "No source defined" msgstr "Ingen källa definierad" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:49 msgid "No destination defined" msgstr "Ingen destination definierad" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:139 #, c-format msgid "Bad checksum format %s" msgstr "Felaktigt format för checksumma %s" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:178 #, c-format msgid "Failed to run command: %s" msgstr "Misslyckades med att köra kommando: %s" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:213 #, c-format msgid "DataDelivery: %s" msgstr "Dataleverans: %s" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:225 #, c-format msgid "DataStagingDelivery exited with code %i" msgstr "DataStagingDelivery avslutades med kod %i" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:244 #, c-format msgid "Transfer killed after %i seconds without communication" msgstr "Överföring avbröts efter %i sekunder utan kommunikation" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:67 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:315 #, c-format msgid "Connecting to Delivery service at %s" msgstr "Kopplar upp mot leveranstjänst pÃ¥ %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:94 #, c-format msgid "Failed to set up credential delegation with %s" msgstr "Misslyckades med att sätta upp referensdelegering med %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:100 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:174 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:240 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:326 #, c-format msgid "" "Request:\n" "%s" msgstr "" "Begäran:\n" "%s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:106 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:332 #, c-format msgid "Could not connect to service %s: %s" msgstr "Misslyckades med att koppla upp mot tjänst %s: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:114 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:340 #, c-format msgid "No SOAP response from Delivery service %s" msgstr "Inget SOAP-svar frÃ¥n leveranstjänst %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:119 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:193 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:267 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:346 #, c-format msgid "" "Response:\n" "%s" msgstr "" "Svar:\n" "%s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:128 #, c-format msgid "Failed to start transfer request: %s" msgstr "Misslyckades med att börja överföringsbegäran: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:135 #, c-format msgid "Bad format in XML response from service at %s: %s" msgstr "Felaktigt format i XML-svar frÃ¥n tjänst pÃ¥: %s: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:143 #, c-format msgid "Could not make new transfer request: %s: %s" msgstr "Kan inte göra ny överföringsbegäran: %s: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:148 #, c-format msgid "Started remote Delivery at %s" msgstr "Startade fjärrleverans pÃ¥ %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:181 #, c-format msgid "Failed to send cancel request: %s" msgstr "Misslyckades med att sända begäran att avbryta: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:188 msgid "Failed to cancel: No SOAP response" msgstr "Misslyckades med att avbryta: inget SOAP-svar" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:202 #, c-format msgid "Failed to cancel transfer request: %s" msgstr "Misslyckades med att avbryta överföringsbegäran: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:209 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:290 #, c-format msgid "Bad format in XML response: %s" msgstr "Felaktigt format i XML-svar: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:216 #, c-format msgid "Failed to cancel: %s" msgstr "Misslyckades med att avbryta: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:260 msgid "No SOAP response from delivery service" msgstr "Inget SOAP-svar frÃ¥n leveranstjänst" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:281 #, c-format msgid "Failed to query state: %s" msgstr "Misslyckades med att frÃ¥ga om tillstÃ¥nd: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:355 #, c-format msgid "SOAP fault from delivery service at %s: %s" msgstr "SOAP-fel frÃ¥n leveranstjänst pÃ¥ %s: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:363 #, c-format msgid "Bad format in XML response from delivery service at %s: %s" msgstr "Felaktigt format i XML-svar frÃ¥n leveranstjänst pÃ¥ %s: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:371 #, c-format msgid "Error pinging delivery service at %s: %s: %s" msgstr "Fel vid pingning av leveranstjänst pÃ¥ %s: %s: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:379 #, c-format msgid "Dir %s allowed at service %s" msgstr "Katalog %s tillÃ¥ten pÃ¥ tjänst %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:473 #, c-format msgid "" "DataDelivery log tail:\n" "%s" msgstr "" "Dataleverans-logg-svans:\n" "%s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:486 msgid "Failed locating credentials" msgstr "Misslyckades med att lokalisera referenser" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:491 msgid "Failed to initiate client connection" msgstr "Misslyckades med att initiera klientförbindelse" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:497 msgid "Client connection has no entry point" msgstr "Klientförbindelsen har ingen ingÃ¥ngspunkt" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:507 msgid "Failed to initiate delegation credentials" msgstr "Misslyckades med att initiera delegeringsreferenser" #: src/libs/data-staging/DataStagingDelivery.cpp:97 #, c-format msgid "%5u s: %10.1f kB %8.1f kB/s" msgstr "%5u s: %10.1f kB %8.1f kB/s" #: src/libs/data-staging/DataStagingDelivery.cpp:152 msgid "Unexpected arguments" msgstr "Oväntade argument" #: src/libs/data-staging/DataStagingDelivery.cpp:155 msgid "Source URL missing" msgstr "Käll-URL saknas" #: src/libs/data-staging/DataStagingDelivery.cpp:158 msgid "Destination URL missing" msgstr "Destinations-URL saknas" #: src/libs/data-staging/DataStagingDelivery.cpp:162 #, c-format msgid "Source URL not valid: %s" msgstr "Käll-URL är inte giltig: %s" #: src/libs/data-staging/DataStagingDelivery.cpp:166 #, c-format msgid "Destination URL not valid: %s" msgstr "Destinations-URL är inte giltig: %s" #: src/libs/data-staging/DataStagingDelivery.cpp:223 #, c-format msgid "Unknown transfer option: %s" msgstr "Okänt överföringsalternativ: %s" #: src/libs/data-staging/DataStagingDelivery.cpp:248 #, c-format msgid "Source URL not supported: %s" msgstr "Käll-URL stöds inte: %s" #: src/libs/data-staging/DataStagingDelivery.cpp:253 #: src/libs/data-staging/DataStagingDelivery.cpp:272 msgid "No credentials supplied" msgstr "Inga referenser tillhandahÃ¥llna" #: src/libs/data-staging/DataStagingDelivery.cpp:267 #, c-format msgid "Destination URL not supported: %s" msgstr "Destinations-URL stöds inte: %s" #: src/libs/data-staging/DataStagingDelivery.cpp:316 #, c-format msgid "Will calculate %s checksum" msgstr "Kommer att beräkna %s-checksumma" #: src/libs/data-staging/DataStagingDelivery.cpp:327 msgid "Cannot use supplied --size option" msgstr "Kan inte använda tillhandahÃ¥llet --size-alternativ" #: src/libs/data-staging/DataStagingDelivery.cpp:540 #, c-format msgid "Checksum mismatch between calculated checksum %s and source checksum %s" msgstr "" "Checksumma stämmer inte överens mellan beräknad checksumma %s och källans " "checksumma %s" #: src/libs/data-staging/DataStagingDelivery.cpp:550 #, c-format msgid "Failed cleaning up destination %s" msgstr "Misslyckades med att rensa upp destination %s" #: src/libs/data-staging/Processor.cpp:59 #: src/services/candypond/CandyPond.cpp:117 msgid "Error creating cache" msgstr "Fel vid skapande av cache" #: src/libs/data-staging/Processor.cpp:83 #, c-format msgid "Forcing re-download of file %s" msgstr "Framtvingar Ã¥ternedladdning av fil %s" #: src/libs/data-staging/Processor.cpp:100 #, c-format msgid "Will wait around %is" msgstr "Kommer att att vänta omkring %i s" #: src/libs/data-staging/Processor.cpp:119 #, c-format msgid "Force-checking source of cache file %s" msgstr "TvÃ¥ngskontrollerar källa för cachad fil %s" #: src/libs/data-staging/Processor.cpp:122 #, c-format msgid "Source check requested but failed: %s" msgstr "Kontroll av källa begärd men misslyckades: %s" #: src/libs/data-staging/Processor.cpp:142 msgid "Permission checking failed, will try downloading without using cache" msgstr "" "Ã…tkomsträttighetskontroll inte godkänd, kommer att försöka ladda ned utan " "att använda cache" #: src/libs/data-staging/Processor.cpp:172 #, c-format msgid "Will download to cache file %s" msgstr "Kommer att ladda ned till cachefil %s" #: src/libs/data-staging/Processor.cpp:193 msgid "Looking up source replicas" msgstr "SlÃ¥r upp källreplikor" #: src/libs/data-staging/Processor.cpp:210 #: src/libs/data-staging/Processor.cpp:317 #, c-format msgid "Skipping replica on local host %s" msgstr "Hoppar över replika pÃ¥ lokal värd: %s" #: src/libs/data-staging/Processor.cpp:218 #: src/libs/data-staging/Processor.cpp:325 #, c-format msgid "No locations left for %s" msgstr "Inga platser kvar för %s" #: src/libs/data-staging/Processor.cpp:239 #: src/libs/data-staging/Processor.cpp:481 msgid "Resolving destination replicas" msgstr "SlÃ¥r upp destinationsreplikor" #: src/libs/data-staging/Processor.cpp:256 msgid "No locations for destination different from source found" msgstr "Hittade inga platser för destinationen som skiljer sig frÃ¥n källan" #: src/libs/data-staging/Processor.cpp:267 msgid "Pre-registering destination in index service" msgstr "Förregisterar destination i indextjänst" #: src/libs/data-staging/Processor.cpp:293 msgid "Resolving source replicas in bulk" msgstr "Massuppslagning av källreplikor" #: src/libs/data-staging/Processor.cpp:307 #, c-format msgid "No replicas found for %s" msgstr "Hittade inga replikor för %s" #: src/libs/data-staging/Processor.cpp:348 #, c-format msgid "Checking %s" msgstr "Kontrollerar: %s" #: src/libs/data-staging/Processor.cpp:357 #: src/libs/data-staging/Processor.cpp:415 msgid "Metadata of replica and index service differ" msgstr "Metadata för replika och indextjänst skiljer sig Ã¥t" #: src/libs/data-staging/Processor.cpp:365 #, c-format msgid "Failed checking source replica %s: %s" msgstr "Misslyckades med att kontrollera källreplika %s: %s" #: src/libs/data-staging/Processor.cpp:391 msgid "Querying source replicas in bulk" msgstr "MassfrÃ¥gar källreplikor" #: src/libs/data-staging/Processor.cpp:403 #, c-format msgid "Failed checking source replica: %s" msgstr "Misslyckades med att kontrollera källreplikor: %s" #: src/libs/data-staging/Processor.cpp:409 msgid "Failed checking source replica" msgstr "Misslyckades med att kontrollera källreplika" #: src/libs/data-staging/Processor.cpp:449 msgid "Finding existing destination replicas" msgstr "Hittar existerande destinationsreplikor" #: src/libs/data-staging/Processor.cpp:461 #, c-format msgid "Failed to delete replica %s: %s" msgstr "Misslyckades med att ta bort replika %s: %s" #: src/libs/data-staging/Processor.cpp:475 #, c-format msgid "Unregistering %s" msgstr "Avregistrerar %s" #: src/libs/data-staging/Processor.cpp:486 msgid "Pre-registering destination" msgstr "Förregisterar destination" #: src/libs/data-staging/Processor.cpp:492 #, c-format msgid "Failed to pre-clean destination: %s" msgstr "Misslyckades med att förregistrera destination: %s" #: src/libs/data-staging/Processor.cpp:515 msgid "Preparing to stage source" msgstr "Förberedar att staga källa" #: src/libs/data-staging/Processor.cpp:528 #, c-format msgid "Source is not ready, will wait %u seconds" msgstr "Källa är inte redo, kommer att vänta %u sekunder" #: src/libs/data-staging/Processor.cpp:534 msgid "No physical files found for source" msgstr "Hittade inga platser för destination" #: src/libs/data-staging/Processor.cpp:552 msgid "Preparing to stage destination" msgstr "Förbereder att staga destination" #: src/libs/data-staging/Processor.cpp:565 #, c-format msgid "Destination is not ready, will wait %u seconds" msgstr "Destination är inte redo, kommer att vänta %u sekunder" #: src/libs/data-staging/Processor.cpp:571 msgid "No physical files found for destination" msgstr "Hittade inga fysiska filer för destination" #: src/libs/data-staging/Processor.cpp:597 msgid "Releasing source" msgstr "Frigör källa" #: src/libs/data-staging/Processor.cpp:601 #, c-format msgid "There was a problem during post-transfer source handling: %s" msgstr "Det uppstod ett problem under källans efter-överförings-hantering: %s" #: src/libs/data-staging/Processor.cpp:606 msgid "Releasing destination" msgstr "Frigör destination" #: src/libs/data-staging/Processor.cpp:610 #, c-format msgid "" "There was a problem during post-transfer destination handling after error: %s" msgstr "" "Det uppstod ett problem under destinationens efter-överförings-hantering " "efter felet: %s" #: src/libs/data-staging/Processor.cpp:614 #, c-format msgid "Error with post-transfer destination handling: %s" msgstr "Fel under destinationens efter-överförings-hantering: %s" #: src/libs/data-staging/Processor.cpp:640 msgid "Removing pre-registered destination in index service" msgstr "Tar bort förregistrerad destination i indextjänst" #: src/libs/data-staging/Processor.cpp:643 #, c-format msgid "" "Failed to unregister pre-registered destination %s: %s. You may need to " "unregister it manually" msgstr "" "Misslyckades med att avregistrera förregistrerad destination %s: %s. Du kan " "behöva avregistrera den manuellt" #: src/libs/data-staging/Processor.cpp:649 msgid "Registering destination replica" msgstr "Registrerar destinationsreplika" #: src/libs/data-staging/Processor.cpp:652 #, c-format msgid "Failed to register destination replica: %s" msgstr "Misslyckades med att registrera destinationsreplika: %s" #: src/libs/data-staging/Processor.cpp:655 #, c-format msgid "" "Failed to unregister pre-registered destination %s. You may need to " "unregister it manually" msgstr "" "Misslyckades med att avregistrera förregistrerad destination %s. Du kan " "behöva avregistrera den manuellt" #: src/libs/data-staging/Processor.cpp:685 msgid "Error creating cache. Stale locks may remain." msgstr "Fel vid skapande av cache. Gamle lÃ¥s kan finnas kvar." #: src/libs/data-staging/Processor.cpp:718 #, c-format msgid "Linking/copying cached file to %s" msgstr "Länkar/kopierar cachad fil till %s" #: src/libs/data-staging/Processor.cpp:739 #, c-format msgid "Failed linking cache file to %s" msgstr "Misslyckades med att länka cachefil till %s" #: src/libs/data-staging/Processor.cpp:743 #, c-format msgid "Error linking cache file to %s." msgstr "Fel vid länkning av cachefil till %s." #: src/libs/data-staging/Processor.cpp:764 #: src/libs/data-staging/Processor.cpp:771 msgid "Adding to bulk request" msgstr "Lägger till till massbegäran" #: src/libs/data-staging/Scheduler.cpp:174 #: src/libs/data-staging/Scheduler.cpp:181 msgid "source" msgstr "käll" #: src/libs/data-staging/Scheduler.cpp:174 #: src/libs/data-staging/Scheduler.cpp:181 msgid "destination" msgstr "destinations" #: src/libs/data-staging/Scheduler.cpp:174 #, c-format msgid "Using next %s replica" msgstr "Använder nästa %s-replika" #: src/libs/data-staging/Scheduler.cpp:181 #, c-format msgid "No more %s replicas" msgstr "Inga fler %s-replikor" #: src/libs/data-staging/Scheduler.cpp:183 msgid "Will clean up pre-registered destination" msgstr "Kommer att rensa upp förregistrerad destination" #: src/libs/data-staging/Scheduler.cpp:187 msgid "Will release cache locks" msgstr "Kommer att frigöra cachelÃ¥s" #: src/libs/data-staging/Scheduler.cpp:190 msgid "Moving to end of data staging" msgstr "Hoppar till slutet av datastaging" #: src/libs/data-staging/Scheduler.cpp:199 #, c-format msgid "Source is mapped to %s" msgstr "Källa mappas till %s" #: src/libs/data-staging/Scheduler.cpp:203 msgid "Cannot link to source which can be modified, will copy instead" msgstr "" "Kan inte länka till källa som kan modifieras, kommer att kopiera istället" #: src/libs/data-staging/Scheduler.cpp:212 msgid "Cannot link to a remote destination. Will not use mapped URL" msgstr "" "Kan inte länka till fjärrdestination. Kommer inte att använda mappad URL" #: src/libs/data-staging/Scheduler.cpp:215 msgid "Linking mapped file" msgstr "Länkar mappad fil" #: src/libs/data-staging/Scheduler.cpp:222 #, c-format msgid "Failed to create link: %s. Will not use mapped URL" msgstr "" "Misslyckades med att skapa länk: %s. Kommer inte att använda mappad URL" #: src/libs/data-staging/Scheduler.cpp:247 #, c-format msgid "" "Scheduler received new DTR %s with source: %s, destination: %s, assigned to " "transfer share %s with priority %d" msgstr "" "Schemaläggaren mottog en ny DTR %s med källa: %s och destination: %s, " "tilldelad att överföra andel %s med prioritet %d" #: src/libs/data-staging/Scheduler.cpp:255 msgid "" "File is not cacheable, was requested not to be cached or no cache available, " "skipping cache check" msgstr "" "Fil kan inte cachas, begärdes att inte cachas eller inget cache " "tillgängligt, hoppar över cachekontroll" #: src/libs/data-staging/Scheduler.cpp:261 msgid "File is cacheable, will check cache" msgstr "Fil kan cachas, kommer att kontrollera cache" #: src/libs/data-staging/Scheduler.cpp:264 #: src/libs/data-staging/Scheduler.cpp:289 #, c-format msgid "File is currently being cached, will wait %is" msgstr "Fil hÃ¥ller pÃ¥ att cachas, kommer att vänta %i s" #: src/libs/data-staging/Scheduler.cpp:283 msgid "Timed out while waiting for cache lock" msgstr "Avbröts pÃ¥ grund av timeout under väntan pÃ¥ cachelÃ¥s" #: src/libs/data-staging/Scheduler.cpp:293 msgid "Checking cache again" msgstr "Kontrollerar cache igen" #: src/libs/data-staging/Scheduler.cpp:313 msgid "Destination file is in cache" msgstr "Destinationsfil finns i cache" #: src/libs/data-staging/Scheduler.cpp:317 msgid "Source and/or destination is index service, will resolve replicas" msgstr "" "Källa och/eller destination är indextjänst, kommer att slÃ¥ upp replikor" #: src/libs/data-staging/Scheduler.cpp:320 msgid "" "Neither source nor destination are index services, will skip resolving " "replicas" msgstr "" "Varken källa eller destination är indextjänster, kommer att hoppa över " "uppslagning av replikor" #: src/libs/data-staging/Scheduler.cpp:331 msgid "Problem with index service, will release cache lock" msgstr "Problem med indextjänst, kommer att frigöra cachelÃ¥s" #: src/libs/data-staging/Scheduler.cpp:335 msgid "Problem with index service, will proceed to end of data staging" msgstr "Problem med indextjänst, kommer att hoppa till slutet av datastaging" #: src/libs/data-staging/Scheduler.cpp:345 msgid "Checking source file is present" msgstr "Kontrollerar att källfil är nÃ¥rvarande" #: src/libs/data-staging/Scheduler.cpp:353 msgid "Error with source file, moving to next replica" msgstr "Fel med källfil, hoppar till nästa replika" #: src/libs/data-staging/Scheduler.cpp:375 #, c-format msgid "Replica %s has long latency, trying next replica" msgstr "Replika %s har lÃ¥ng latency, provar nästa replika" #: src/libs/data-staging/Scheduler.cpp:377 #, c-format msgid "No more replicas, will use %s" msgstr "Inga fler replikor, kommer att använda %s" #: src/libs/data-staging/Scheduler.cpp:380 #, c-format msgid "Checking replica %s" msgstr "Kontrollerar replika %s" #: src/libs/data-staging/Scheduler.cpp:390 msgid "Overwrite requested - will pre-clean destination" msgstr "Överskrivning begärd - kommer att för-städa destination" #: src/libs/data-staging/Scheduler.cpp:393 msgid "No overwrite requested or allowed, skipping pre-cleaning" msgstr "Ingen överskrivning begärd eller tillÃ¥ten, hoppar över förstädning" #: src/libs/data-staging/Scheduler.cpp:401 msgid "Pre-clean failed, will still try to copy" msgstr "Förstädning misslyckades, kommer fortfarande att försöka kopiera" #: src/libs/data-staging/Scheduler.cpp:408 msgid "Source or destination requires staging" msgstr "Källa eller destination kräver staging" #: src/libs/data-staging/Scheduler.cpp:412 msgid "No need to stage source or destination, skipping staging" msgstr "Behöver inte staga källa eller destination, hoppar över staging" #: src/libs/data-staging/Scheduler.cpp:442 msgid "Staging request timed out, will release request" msgstr "Stagingbegäran avbröts pÃ¥ grund av timeout, kommer att frigöra begäran" #: src/libs/data-staging/Scheduler.cpp:446 msgid "Querying status of staging request" msgstr "FrÃ¥gar efter status för stagingbegäran" #: src/libs/data-staging/Scheduler.cpp:455 msgid "Releasing requests" msgstr "Frigör begärningar" #: src/libs/data-staging/Scheduler.cpp:472 msgid "DTR is ready for transfer, moving to delivery queue" msgstr "DTR är redo att överföra, flyttar till leveranskö" #: src/libs/data-staging/Scheduler.cpp:487 #, c-format msgid "Transfer failed: %s" msgstr "Överföring misslyckades: %s" #: src/libs/data-staging/Scheduler.cpp:497 msgid "Releasing request(s) made during staging" msgstr "Frigör begärningar som gjordes under staging" #: src/libs/data-staging/Scheduler.cpp:500 msgid "Neither source nor destination were staged, skipping releasing requests" msgstr "" "Varken källa eller destination stagades, hoppar över frigörande av " "begärningar" #: src/libs/data-staging/Scheduler.cpp:512 msgid "Trying next replica" msgstr "Försöker med nästa replika" #: src/libs/data-staging/Scheduler.cpp:517 msgid "unregister" msgstr "avregistrera" #: src/libs/data-staging/Scheduler.cpp:517 msgid "register" msgstr "registrera" #: src/libs/data-staging/Scheduler.cpp:516 #, c-format msgid "Will %s in destination index service" msgstr "Kommer att %s i destinationsindextjänsten" #: src/libs/data-staging/Scheduler.cpp:520 msgid "Destination is not index service, skipping replica registration" msgstr "Destination är inte indextjänst, hoppar över replikaregistrering" #: src/libs/data-staging/Scheduler.cpp:533 msgid "Error registering replica, moving to end of data staging" msgstr "Fel vid registrering av replika, hoppar till slutet av datastaging" #: src/libs/data-staging/Scheduler.cpp:542 msgid "Will process cache" msgstr "Kommer att processera cache" #: src/libs/data-staging/Scheduler.cpp:546 msgid "File is not cacheable, skipping cache processing" msgstr "Filen kan inte cachas, hoppar över cacheprocessering" #: src/libs/data-staging/Scheduler.cpp:560 msgid "Cancellation complete" msgstr "Avbrytande slutfört" #: src/libs/data-staging/Scheduler.cpp:574 msgid "Will wait 10s" msgstr "Kommer att vänta 10 s" #: src/libs/data-staging/Scheduler.cpp:580 msgid "Error in cache processing, will retry without caching" msgstr "Fel vid cacheprocessering, kommer att försöka igen utan cachning" #: src/libs/data-staging/Scheduler.cpp:589 msgid "Will retry without caching" msgstr "Kommer att försöka igen utan cachning<" #: src/libs/data-staging/Scheduler.cpp:607 msgid "Proxy has expired" msgstr "Proxyns livstid har gÃ¥tt ut" #: src/libs/data-staging/Scheduler.cpp:618 #, c-format msgid "%i retries left, will wait until %s before next attempt" msgstr "%i försök kvar, kommer att vänta %s innan nästa försök" #: src/libs/data-staging/Scheduler.cpp:634 msgid "Out of retries" msgstr "Slut pÃ¥ försök" #: src/libs/data-staging/Scheduler.cpp:636 msgid "Permanent failure" msgstr "Permanent fel" #: src/libs/data-staging/Scheduler.cpp:642 msgid "Finished successfully" msgstr "Avslutades framgÃ¥ngsrikt" #: src/libs/data-staging/Scheduler.cpp:652 msgid "Returning to generator" msgstr "Ã…tervänder till generator" #: src/libs/data-staging/Scheduler.cpp:818 #, c-format msgid "File is smaller than %llu bytes, will use local delivery" msgstr "Fil är mindre än %llu bytes, kommer att använda lokal leverans" #: src/libs/data-staging/Scheduler.cpp:872 #, c-format msgid "Delivery service at %s can copy to %s" msgstr "Leveranstjänst pÃ¥ %s kan kopiera till %s" #: src/libs/data-staging/Scheduler.cpp:880 #, c-format msgid "Delivery service at %s can copy from %s" msgstr "Leveranstjänst pÃ¥ %s kan kopiera frÃ¥n %s" #: src/libs/data-staging/Scheduler.cpp:893 msgid "Could not find any useable delivery service, forcing local transfer" msgstr "Kunde inte hitta en lämplig leveranstjänst, tvingar lokal överföring" #: src/libs/data-staging/Scheduler.cpp:909 #, c-format msgid "Not using delivery service at %s because it is full" msgstr "Använder inte leveranstjänst pÃ¥ %s eftersom den är full" #: src/libs/data-staging/Scheduler.cpp:936 #, c-format msgid "Not using delivery service %s due to previous failure" msgstr "Använder inte leveranstjänst %s pÃ¥ grund av tidigare fel" #: src/libs/data-staging/Scheduler.cpp:946 msgid "No remote delivery services are useable, forcing local delivery" msgstr "Inga fjärrleveranstjänster kan användas, tvingar lokal leverans" #: src/libs/data-staging/Scheduler.cpp:1150 msgid "Cancelling active transfer" msgstr "Avbryter aktiv överföring" #: src/libs/data-staging/Scheduler.cpp:1160 msgid "Processing thread timed out. Restarting DTR" msgstr "ProcesseringstrÃ¥d avbröts pÃ¥ grund av timeout. Startar om DTR" #: src/libs/data-staging/Scheduler.cpp:1228 msgid "Will use bulk request" msgstr "Kommer att använda massbegäran" #: src/libs/data-staging/Scheduler.cpp:1250 msgid "No delivery endpoints available, will try later" msgstr "Ingen leveransändpunkt tillgänglig, kommer försöka senare" #: src/libs/data-staging/Scheduler.cpp:1269 msgid "Scheduler received NULL DTR" msgstr "Schemalägger mottog NULL-DTR" #: src/libs/data-staging/Scheduler.cpp:1279 msgid "Scheduler received invalid DTR" msgstr "Schemaläggare mottog ogiltig DTR" #: src/libs/data-staging/Scheduler.cpp:1368 msgid "Scheduler starting up" msgstr "Schemaläggare startar" #: src/libs/data-staging/Scheduler.cpp:1369 msgid "Scheduler configuration:" msgstr "Schemaläggarinställningar:" #: src/libs/data-staging/Scheduler.cpp:1370 #, c-format msgid " Pre-processor slots: %u" msgstr " Förprocesserings-slottar: %u" #: src/libs/data-staging/Scheduler.cpp:1371 #, c-format msgid " Delivery slots: %u" msgstr " Leverans-slottar: %u" #: src/libs/data-staging/Scheduler.cpp:1372 #, c-format msgid " Post-processor slots: %u" msgstr " Efterprocesserings-slottar: %u" #: src/libs/data-staging/Scheduler.cpp:1373 #, c-format msgid " Emergency slots: %u" msgstr " Akutslottar: %u" #: src/libs/data-staging/Scheduler.cpp:1374 #, c-format msgid " Prepared slots: %u" msgstr " Förberedda slottar: %u" #: src/libs/data-staging/Scheduler.cpp:1375 #, c-format msgid "" " Shares configuration:\n" "%s" msgstr "" " Andelsinställningar:\n" "%s" #: src/libs/data-staging/Scheduler.cpp:1378 msgid " Delivery service: LOCAL" msgstr " Leveranstjänst: LOKAL" #: src/libs/data-staging/Scheduler.cpp:1379 #, c-format msgid " Delivery service: %s" msgstr " Leveranstjänst: %s" #: src/libs/data-staging/Scheduler.cpp:1384 msgid "Failed to create DTR dump thread" msgstr "Misslyckades med att skapa DTR-dumpningstrÃ¥d" #: src/libs/data-staging/Scheduler.cpp:1401 #: src/services/data-staging/DataDeliveryService.cpp:507 #, c-format msgid "DTR %s cancelled" msgstr "DTR %s avbröts" #: src/libs/data-staging/examples/Generator.cpp:15 msgid "Shutting down scheduler" msgstr "Stänger ner schemaläggare" #: src/libs/data-staging/examples/Generator.cpp:17 msgid "Scheduler stopped, exiting" msgstr "Schemaläggare stoppar, avstutar" #: src/libs/data-staging/examples/Generator.cpp:23 #, c-format msgid "Received DTR %s back from scheduler in state %s" msgstr "Fick tillbaka DTR %s frÃ¥n schemaläggare i tillstÃ¥nd %s" #: src/libs/data-staging/examples/Generator.cpp:30 msgid "Generator started" msgstr "Generator startad" #: src/libs/data-staging/examples/Generator.cpp:31 msgid "Starting DTR threads" msgstr "Startar DTR-trÃ¥dar" #: src/libs/data-staging/examples/Generator.cpp:44 msgid "No valid credentials found, exiting" msgstr "Hittade inga giltiga referenser, avslutar" #: src/libs/data-staging/examples/Generator.cpp:55 #, c-format msgid "Problem creating dtr (source %s, destination %s)" msgstr "Problem med att skapa katalog (källa %s, destination %s)" #: src/services/a-rex/arex.cpp:452 src/services/candypond/CandyPond.cpp:569 #: src/services/data-staging/DataDeliveryService.cpp:681 #, c-format msgid "SOAP operation is not supported: %s" msgstr "SOAP-process stöds inte: %s" #: src/services/a-rex/arex.cpp:471 src/services/a-rex/arex.cpp:517 #, c-format msgid "Security Handlers processing failed: %s" msgstr "Säkerhetshanterarprocessering misslyckades: %s" #: src/services/a-rex/arex.cpp:485 msgid "Can't obtain configuration. Public information is disabled." msgstr "Kan inte erhÃ¥lla inställningar. Publik information har stängts av." #: src/services/a-rex/arex.cpp:495 msgid "" "Can't obtain configuration. Public information is disallowed for this user." msgstr "" "Kan inte erhÃ¥lla inställningar. Publik information är inte tillÃ¥ten för " "denna användare." #: src/services/a-rex/arex.cpp:502 msgid "Can't obtain configuration. Only public information is provided." msgstr "" "Kan inte erhÃ¥lla inställningar. Endast publik information tillhandahÃ¥lls." #: src/services/a-rex/arex.cpp:530 src/services/a-rex/rest/rest.cpp:674 #, c-format msgid "Connection from %s: %s" msgstr "Förbindelse frÃ¥n %s: %s" #: src/services/a-rex/arex.cpp:533 src/services/a-rex/rest/rest.cpp:678 #, c-format msgid "process: method: %s" msgstr "process: metod: %s" #: src/services/a-rex/arex.cpp:534 src/services/a-rex/rest/rest.cpp:679 #, c-format msgid "process: endpoint: %s" msgstr "process: ändpunkt: %s" #: src/services/a-rex/arex.cpp:559 #, c-format msgid "process: id: %s" msgstr "process: id: %s" #: src/services/a-rex/arex.cpp:560 #, c-format msgid "process: subop: %s" msgstr "process: subop: %s" #: src/services/a-rex/arex.cpp:567 #, c-format msgid "process: subpath: %s" msgstr "process: subsökväg: %s" #: src/services/a-rex/arex.cpp:605 src/services/candypond/CandyPond.cpp:543 #: src/services/data-staging/DataDeliveryService.cpp:641 #: src/tests/echo/echo.cpp:98 #, c-format msgid "process: request=%s" msgstr "process: begäran=%s" #: src/services/a-rex/arex.cpp:610 src/services/candypond/CandyPond.cpp:548 #: src/services/data-staging/DataDeliveryService.cpp:646 #: src/tests/count/count.cpp:69 msgid "input does not define operation" msgstr "indata definierar ej operation" #: src/services/a-rex/arex.cpp:613 src/services/candypond/CandyPond.cpp:551 #: src/services/data-staging/DataDeliveryService.cpp:649 #: src/tests/count/count.cpp:72 #, c-format msgid "process: operation: %s" msgstr "process: operation: %s" #: src/services/a-rex/arex.cpp:640 msgid "POST request on special path is not supported" msgstr "POST-begäran pÃ¥ specialsökväg stöds inte" #: src/services/a-rex/arex.cpp:645 msgid "process: factory endpoint" msgstr "process: factoryändpunkt" #: src/services/a-rex/arex.cpp:788 src/services/candypond/CandyPond.cpp:580 #: src/services/data-staging/DataDeliveryService.cpp:692 #: src/tests/echo/echo.cpp:158 #, c-format msgid "process: response=%s" msgstr "process: svar=%s" #: src/services/a-rex/arex.cpp:794 msgid "Per-job POST/SOAP requests are not supported" msgstr "Per-jobb POST/SOAP-begäran stöds inte" #: src/services/a-rex/arex.cpp:803 msgid "process: GET" msgstr "process: GET" #: src/services/a-rex/arex.cpp:804 #, c-format msgid "GET: id %s path %s" msgstr "GET: id %s sökväg %s" #: src/services/a-rex/arex.cpp:837 msgid "process: HEAD" msgstr "process: HEAD" #: src/services/a-rex/arex.cpp:838 #, c-format msgid "HEAD: id %s path %s" msgstr "HEAD: id %s sökväg %s" #: src/services/a-rex/arex.cpp:871 msgid "process: PUT" msgstr "process: PUT" #: src/services/a-rex/arex.cpp:904 msgid "process: DELETE" msgstr "process: DELETE" #: src/services/a-rex/arex.cpp:937 #, c-format msgid "process: method %s is not supported" msgstr "process: metod %s stöds inte" #: src/services/a-rex/arex.cpp:940 msgid "process: method is not defined" msgstr "process: metod är inte definierad" #: src/services/a-rex/arex.cpp:1050 msgid "Failed to run Grid Manager thread" msgstr "Misslyckades med att köra Grid-Manager-trÃ¥d" #: src/services/a-rex/arex.cpp:1109 #, c-format msgid "Failed to process configuration in %s" msgstr "Misslyckades med att processera inställningar i %s" #: src/services/a-rex/arex.cpp:1114 msgid "No control directory set in configuration" msgstr "Ingen kontrollkatalog satt i inställningarna" #: src/services/a-rex/arex.cpp:1118 msgid "No session directory set in configuration" msgstr "Ingen sessionskatalog satt i inställningarna" #: src/services/a-rex/arex.cpp:1122 msgid "No LRMS set in configuration" msgstr "Inget LRMS satt i inställningarna" #: src/services/a-rex/arex.cpp:1127 #, c-format msgid "Failed to create control directory %s" msgstr "Misslyckades med att skapa kontrollkatalog %s" #: src/services/a-rex/cachecheck.cpp:37 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:658 #, c-format msgid "Error with cache configuration: %s" msgstr "Fel med cacheinställningar: %s" #: src/services/a-rex/cachecheck.cpp:53 #: src/services/candypond/CandyPond.cpp:318 msgid "Error with cache configuration" msgstr "Fel med cacheinställningar" #: src/services/a-rex/cachecheck.cpp:78 #: src/services/candypond/CandyPond.cpp:146 #: src/services/candypond/CandyPond.cpp:343 #, c-format msgid "Looking up URL %s" msgstr "SlÃ¥r upp URL: %s" #: src/services/a-rex/cachecheck.cpp:80 #: src/services/candypond/CandyPond.cpp:155 #, c-format msgid "Cache file is %s" msgstr "Cachefil är %s" #: src/services/a-rex/change_activity_status.cpp:55 #: src/services/a-rex/change_activity_status.cpp:59 #, c-format msgid "EMIES:PauseActivity: job %s - %s" msgstr "EMIES:PauseActivity: jobb %s - %s" #: src/services/a-rex/change_activity_status.cpp:104 #: src/services/a-rex/change_activity_status.cpp:108 #, c-format msgid "EMIES:ResumeActivity: job %s - %s" msgstr "EMIES:ResumeActivity: jobb %s - %s" #: src/services/a-rex/change_activity_status.cpp:153 #: src/services/a-rex/change_activity_status.cpp:158 #, c-format msgid "EMIES:CancelActivity: job %s - %s" msgstr "EMIES:CancelActivity: jobb %s - %s" #: src/services/a-rex/change_activity_status.cpp:166 #, c-format msgid "job %s cancelled successfully" msgstr "jobb %s avbröts framgÃ¥ngsrikt" #: src/services/a-rex/change_activity_status.cpp:212 #: src/services/a-rex/change_activity_status.cpp:227 #, c-format msgid "EMIES:WipeActivity: job %s - %s" msgstr "EMIES:WipeActivity: jobb %s - %s" #: src/services/a-rex/change_activity_status.cpp:231 #, c-format msgid "job %s (will be) cleaned successfully" msgstr "jobb %s (kommer att bli) bortstädat framgÃ¥ngsrikt" #: src/services/a-rex/change_activity_status.cpp:277 #: src/services/a-rex/change_activity_status.cpp:282 #, c-format msgid "EMIES:RestartActivity: job %s - %s" msgstr "EMIES:RestartActivity: jobb %s - %s" #: src/services/a-rex/change_activity_status.cpp:286 #, c-format msgid "job %s restarted successfully" msgstr "jobb %s startades om framgÃ¥ngsrikt" #: src/services/a-rex/change_activity_status.cpp:301 #: src/services/a-rex/put.cpp:163 src/services/a-rex/put.cpp:204 #, c-format msgid "%s: there is no such job: %s" msgstr "%s: det finns inget jobb: %s" #: src/services/a-rex/change_activity_status.cpp:309 #, c-format msgid "%s: put log %s: there is no payload" msgstr "%s: put logg %s: det finns ingen nyttolast" #: src/services/a-rex/change_activity_status.cpp:315 #, c-format msgid "%s: put log %s: unrecognized payload" msgstr "%s: put logg %s: okänd nyttolast" #: src/services/a-rex/change_activity_status.cpp:354 msgid "A-REX REST: Failed to resume job" msgstr "A-REX REST: Misslyckades med att Ã¥teruppta jobb" #: src/services/a-rex/change_activity_status.cpp:358 #, c-format msgid "A-REX REST: State change not allowed: from %s to %s" msgstr "A-REX REST: TillstÃ¥ndsändring inte tillÃ¥ten: frÃ¥n %s till %s" #: src/services/a-rex/create_activity.cpp:52 #, c-format msgid "" "EMIES:CreateActivity: request = \n" "%s" msgstr "" "EMIES:CreateActivity: begäran = \n" "%s" #: src/services/a-rex/create_activity.cpp:58 msgid "EMIES:CreateActivity: too many activity descriptions" msgstr "EMIES:CreateActivity: för mÃ¥nga aktiva beskrivningar" #: src/services/a-rex/create_activity.cpp:68 msgid "EMIES:CreateActivity: no job description found" msgstr "EMIES:CreateActivity: hittade ingen jobbeskrivning" #: src/services/a-rex/create_activity.cpp:75 msgid "EMIES:CreateActivity: max jobs total limit reached" msgstr "EMIES:CreateActivity: maxgräns för totalt antal jobb nÃ¥dd" #: src/services/a-rex/create_activity.cpp:101 #, c-format msgid "ES:CreateActivity: Failed to create new job: %s" msgstr "ES:CreateActivity: Misslyckades med att skapa nytt jobb: %s" #: src/services/a-rex/create_activity.cpp:117 msgid "EMIES:CreateActivity finished successfully" msgstr "EMIES:CreateActivity avslutades framgÃ¥ngsrikt" #: src/services/a-rex/create_activity.cpp:118 #, c-format msgid "New job accepted with id %s" msgstr "Nytt jobb accepterat med id %s" #: src/services/a-rex/create_activity.cpp:122 #, c-format msgid "" "EMIES:CreateActivity: response = \n" "%s" msgstr "" "EMIES:CreateActivity: svar = \n" "%s" #: src/services/a-rex/create_activity.cpp:137 msgid "NEW: put new job: there is no payload" msgstr "NYTT: put nytt jobb: det finns inge nyttolast" #: src/services/a-rex/create_activity.cpp:141 msgid "NEW: put new job: max jobs total limit reached" msgstr "NYTT: put nytt jobb: gränsen för max totalt antal jobb nÃ¥dd" #: src/services/a-rex/delegation/DelegationStore.cpp:51 msgid "Wiping and re-creating whole storage" msgstr "Raderar och Ã¥terskapar hela lagret" #: src/services/a-rex/delegation/DelegationStore.cpp:214 #: src/services/a-rex/delegation/DelegationStore.cpp:316 #, c-format msgid "DelegationStore: TouchConsumer failed to create file %s" msgstr "DelegationStore: TouchConsumer misslyckades med att skapa fil %s" #: src/services/a-rex/delegation/DelegationStore.cpp:276 msgid "DelegationStore: PeriodicCheckConsumers failed to resume iterator" msgstr "" "DelegationStore: PeriodicCheckConsumers misslyckades med att Ã¥teruppta " "iterator" #: src/services/a-rex/delegation/DelegationStore.cpp:296 #, c-format msgid "" "DelegationStore: PeriodicCheckConsumers failed to remove old delegation %s - " "%s" msgstr "" "DelegationStore: PeriodicCheckConsumers misslyckades med att ta bort gammal " "delegering %s - %s" #: src/services/a-rex/get.cpp:174 src/services/a-rex/get.cpp:229 #: src/services/a-rex/get.cpp:313 #, c-format msgid "Get: there is no job %s - %s" msgstr "Get: det finns inget jobb %s - %s" #: src/services/a-rex/get.cpp:380 #, c-format msgid "Head: there is no job %s - %s" msgstr "Head: det finns inget jobb %s - %s" #: src/services/a-rex/get.cpp:436 msgid "Failed to extract credential information" msgstr "Misslyckades med att extrahera referensinformation" #: src/services/a-rex/get.cpp:439 #, c-format msgid "Checking cache permissions: DN: %s" msgstr "Kontrollerar cacheÃ¥tkomsträttigheter: DN: %s" #: src/services/a-rex/get.cpp:440 #, c-format msgid "Checking cache permissions: VO: %s" msgstr "Kontrollerar cacheÃ¥tkomsträttigheter: VO: %s" #: src/services/a-rex/get.cpp:442 #, c-format msgid "Checking cache permissions: VOMS attr: %s" msgstr "Kontrollerar cacheÃ¥tkomsträttigheter: VOMS attr: %s" #: src/services/a-rex/get.cpp:452 #, c-format msgid "Cache access allowed to %s by DN %s" msgstr "Ã…tkomst till cache tillÃ¥tet för %s av DN %s" #: src/services/a-rex/get.cpp:455 #, c-format msgid "DN %s doesn't match %s" msgstr "DN %s matchar inte %s" #: src/services/a-rex/get.cpp:458 #, c-format msgid "Cache access allowed to %s by VO %s" msgstr "Ã…tkomst till cache tillÃ¥tet för %s av VO %s" #: src/services/a-rex/get.cpp:461 #, c-format msgid "VO %s doesn't match %s" msgstr "VO %s matchar inte %s" #: src/services/a-rex/get.cpp:467 src/services/a-rex/get.cpp:486 #, c-format msgid "Bad credential value %s in cache access rules" msgstr "Felaktigt referensvärde %s i cacheÃ¥tkomstregler" #: src/services/a-rex/get.cpp:475 src/services/a-rex/get.cpp:494 #, c-format msgid "VOMS attr %s matches %s" msgstr "VOMS attr %s matchar %s" #: src/services/a-rex/get.cpp:476 #, c-format msgid "Cache access allowed to %s by VO %s and role %s" msgstr "Ã…tkomst till cache tillÃ¥tet för %s av VO %s och roll %s" #: src/services/a-rex/get.cpp:479 src/services/a-rex/get.cpp:498 #, c-format msgid "VOMS attr %s doesn't match %s" msgstr "VOMS attr %s matchar inte %s" #: src/services/a-rex/get.cpp:495 #, c-format msgid "Cache access allowed to %s by VO %s and group %s" msgstr "Ã…tkomst till cache tillÃ¥tet för %s av VO %s och grupp %s" #: src/services/a-rex/get.cpp:501 #, c-format msgid "Unknown credential type %s for URL pattern %s" msgstr "Okänd referenstyp %s för URL-mönster %s" #: src/services/a-rex/get.cpp:507 #, c-format msgid "No match found in cache access rules for %s" msgstr "Hittade ingen match i cacheÃ¥tkomstregler för %s" #: src/services/a-rex/get.cpp:517 #, c-format msgid "Get from cache: Looking in cache for %s" msgstr "Hämta frÃ¥n cache: Söker i cache efter %s" #: src/services/a-rex/get.cpp:520 #, c-format msgid "Get from cache: Invalid URL %s" msgstr "Hämta frÃ¥n cache: Ogiltig URL: %s" #: src/services/a-rex/get.cpp:537 msgid "Get from cache: Error in cache configuration" msgstr "Hämta frÃ¥n cache: Fel med cacheinställningar" #: src/services/a-rex/get.cpp:546 msgid "Get from cache: File not in cache" msgstr "Hämta frÃ¥n cache: Fil inte i cache" #: src/services/a-rex/get.cpp:549 #, c-format msgid "Get from cache: could not access cached file: %s" msgstr "Hämta frÃ¥n cache: kunde inte komma Ã¥t cachad fil: %s" #: src/services/a-rex/get.cpp:559 msgid "Get from cache: Cached file is locked" msgstr "Hämta frÃ¥n cache: Cachad fil är lÃ¥st" #: src/services/a-rex/get_activity_statuses.cpp:214 #: src/services/a-rex/get_activity_statuses.cpp:320 #, c-format msgid "EMIES:GetActivityStatus: job %s - %s" msgstr "EMIES:GetActivityStatus: jobb %s - %s" #: src/services/a-rex/get_activity_statuses.cpp:455 #, c-format msgid "EMIES:GetActivityInfo: job %s - failed to retrieve GLUE2 information" msgstr "" "EMIES:GetActivityInfo: jobb %s - misslyckades med att hämta GLUE2-information" #: src/services/a-rex/get_activity_statuses.cpp:507 #: src/services/a-rex/get_activity_statuses.cpp:514 #, c-format msgid "EMIES:NotifyService: job %s - %s" msgstr "EMIES:NotifyService: jobb %s - %s" #: src/services/a-rex/grid-manager/GridManager.cpp:98 #, c-format msgid "" "Cannot create directories for log file %s. Messages will be logged to this " "log" msgstr "" "Kan inte skapa kataloger för loggfil %s. Meddelanden kommer att loggas i " "denna logg" #: src/services/a-rex/grid-manager/GridManager.cpp:104 #, c-format msgid "" "Cannot open cache log file %s: %s. Cache cleaning messages will be logged to " "this log" msgstr "" "Kan inte öppna cacheloggfil %s: %s. Cacherensningsmeddelanden kommer att " "loggas till denna logg" #: src/services/a-rex/grid-manager/GridManager.cpp:114 msgid "Failed to start cache clean script" msgstr "Misslyckades med att starta cacherensningsskript" #: src/services/a-rex/grid-manager/GridManager.cpp:115 msgid "Cache cleaning script failed" msgstr "Cacherensningsskript misslyckades" #: src/services/a-rex/grid-manager/GridManager.cpp:183 #, c-format msgid "External request for attention %s" msgstr "Extern begäran om uppmärksamhet %s" #: src/services/a-rex/grid-manager/GridManager.cpp:201 #, c-format msgid "Failed to open heartbeat file %s" msgstr "Misslyckades med att öppna hjärtslagsfil %s" #: src/services/a-rex/grid-manager/GridManager.cpp:223 msgid "Starting jobs processing thread" msgstr "Startar jobbprocesserings-trÃ¥d" #: src/services/a-rex/grid-manager/GridManager.cpp:224 #, c-format msgid "Used configuration file %s" msgstr "Använd inställningsfil %s" #: src/services/a-rex/grid-manager/GridManager.cpp:232 #, c-format msgid "" "Error initiating delegation database in %s. Maybe permissions are not " "suitable. Returned error is: %s." msgstr "" "Fel vid initiering av delegeringsdatabas i %s. Kanske Ã¥tkomsträttigheter " "inte är lämpliga. Returnerat fel är: %s." #: src/services/a-rex/grid-manager/GridManager.cpp:244 msgid "Failed to start new thread: cache won't be cleaned" msgstr "Misslyckades med att starta ny trÃ¥d: cache kommer ej att rensas" #: src/services/a-rex/grid-manager/GridManager.cpp:251 msgid "Failed to activate Jobs Processing object, exiting Grid Manager thread" msgstr "" "Misslyckades med att aktivera jobbprocesseringsobjekt, avslutar grid-manager-" "trÃ¥d" #: src/services/a-rex/grid-manager/GridManager.cpp:260 #, c-format msgid "" "Error adding communication interface in %s. Maybe another instance of A-REX " "is already running." msgstr "" "Fel när kommunikationsgränssnitt lades till i %s. Kanske kör redan en annan " "instans av A-REX." #: src/services/a-rex/grid-manager/GridManager.cpp:263 #, c-format msgid "" "Error adding communication interface in %s. Maybe permissions are not " "suitable." msgstr "" "Fel när kommunikationsgränssnitt lades till i %s. Kanske är " "Ã¥tkomsträttigheter inte lämpliga." #: src/services/a-rex/grid-manager/GridManager.cpp:270 msgid "Failed to start new thread for monitoring job requests" msgstr "Misslyckades med att starta ny trÃ¥d för monitorering av jobbegärningar" #: src/services/a-rex/grid-manager/GridManager.cpp:276 msgid "Picking up left jobs" msgstr "Plockar upp lämnade jobb" #: src/services/a-rex/grid-manager/GridManager.cpp:279 msgid "Starting data staging threads" msgstr "Startar datastaging-trÃ¥dar" #: src/services/a-rex/grid-manager/GridManager.cpp:283 msgid "Starting jobs' monitoring" msgstr "Startar jobbmonitorering" #: src/services/a-rex/grid-manager/GridManager.cpp:291 #, c-format msgid "" "SSHFS mount point of session directory (%s) is broken - waiting for " "reconnect ..." msgstr "" "SSHFS-monteringspunkt för sessionskatalogen (%s) är trasig - väntar pÃ¥ " "Ã¥teruppkoppling ..." #: src/services/a-rex/grid-manager/GridManager.cpp:295 #, c-format msgid "" "SSHFS mount point of runtime directory (%s) is broken - waiting for " "reconnect ..." msgstr "" "SSHFS-monteringspunkt för runtimekatalogen (%s) är trasig - väntar pÃ¥ " "Ã¥teruppkoppling ..." #: src/services/a-rex/grid-manager/GridManager.cpp:300 #, c-format msgid "" "SSHFS mount point of cache directory (%s) is broken - waiting for " "reconnect ..." msgstr "" "SSHFS-monteringspunkt för cachekatalogen (%s) är trasig - väntar pÃ¥ " "Ã¥teruppkoppling ...<" #: src/services/a-rex/grid-manager/GridManager.cpp:349 #, c-format msgid "Orphan delegation lock detected (%s) - cleaning" msgstr "Föräldralöst delegeringslÃ¥s detekterat (%s) - städar" #: src/services/a-rex/grid-manager/GridManager.cpp:354 msgid "Failed to obtain delegation locks for cleaning orphaned locks" msgstr "" "Misslyckades med att erhÃ¥lla delegeringslÃ¥s för att ta bort föräldralösa lÃ¥s" #: src/services/a-rex/grid-manager/GridManager.cpp:368 msgid "Waking up" msgstr "Vaknar upp" #: src/services/a-rex/grid-manager/GridManager.cpp:371 msgid "Stopping jobs processing thread" msgstr "Stoppar jobbprocesseringstrÃ¥d" #: src/services/a-rex/grid-manager/GridManager.cpp:373 msgid "Exiting jobs processing thread" msgstr "Avslutar jobbprocesseringstrÃ¥d" #: src/services/a-rex/grid-manager/GridManager.cpp:391 msgid "Requesting to stop job processing" msgstr "Begär att avsluta jobbprocessering" #: src/services/a-rex/grid-manager/GridManager.cpp:399 msgid "Waiting for main job processing thread to exit" msgstr "Väntar pÃ¥ att huvud-jobbprocesseringstrÃ¥den avslutas" #: src/services/a-rex/grid-manager/GridManager.cpp:401 msgid "Stopped job processing" msgstr "Avslutade jobbprocessering" #: src/services/a-rex/grid-manager/accounting/AAR.cpp:73 msgid "Cannot find information abouto job submission endpoint" msgstr "Kan inte hitta information om jobbinsändningsändpunkt" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:53 #, c-format msgid "Failed to read database schema file at %s" msgstr "Misslyckades med att läsa databasschemafil pÃ¥ %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:63 msgid "Accounting database initialized succesfully" msgstr "Bokföringsdatabas initierad" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:65 msgid "Accounting database connection has been established" msgstr "Bokföringsdatabasförbindelse har etablerats" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:75 #, c-format msgid "%s. SQLite database error: %s" msgstr "%s. SQLite-databasfel: %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:77 #, c-format msgid "SQLite database error: %s" msgstr "SQLite-databasfel: %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:105 #, c-format msgid "Directory %s to store accounting database has been created." msgstr "Katalog %s som ska lagra bokföringsdatabasen har skapats." #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:107 #, c-format msgid "" "Accounting database cannot be created. Faile to create parent directory %s." msgstr "" "Bokföringsdatabasen kan inte skapas. Misslyckades med att skapa " "föräldrakatalog %s." #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:111 #, c-format msgid "Accounting database cannot be created: %s is not a directory" msgstr "Bokföringsdatabasen kan inte skapas. %s är inte en katalog" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:118 msgid "Failed to initialize accounting database" msgstr "Misslyckades med att initiera bokföringsdatabas" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:125 #, c-format msgid "Accounting database file (%s) is not a regular file" msgstr "Bokföringsdatabasfil (%s) är inte en vanlig fil" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:131 msgid "Error opening accounting database" msgstr "Fel vid öppnande av bokföringsdatabas" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:149 msgid "Closing connection to SQLite accounting database" msgstr "Stänger förbindelse till SQLite-bokföringsdatabas" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:238 #, c-format msgid "Failed to fetch data from %s accounting database table" msgstr "Misslyckades med att hämta data frÃ¥n %s bokföringsdatabastabell" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:255 #, c-format msgid "Failed to add '%s' into the accounting database %s table" msgstr "" "Misslyckades med att lägga till '%s' till bokföringsdatabasen %s-tabell" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:314 msgid "Failed to fetch data from accounting database Endpoints table" msgstr "" "Misslyckades med att hämta data frÃ¥n bokföringsdatabasens Endpoints-tabell" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:331 #, c-format msgid "" "Failed to add '%s' URL (interface type %s) into the accounting database " "Endpoints table" msgstr "" "Misslyckades med att lägga till '%s' URL (gränssnittstyp %s) till " "bokföringsdatabasens Endpoints-tabell" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:357 #, c-format msgid "Failed to query AAR database ID for job %s" msgstr "Misslyckades med att frÃ¥ga efter AAR-databas-ID för jobb %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:412 #, c-format msgid "Failed to insert AAR into the database for job %s" msgstr "Misslyckades med att sätta in AAR i databasen för jobb %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:413 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:460 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:491 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:507 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:523 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:544 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:560 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:575 #, c-format msgid "SQL statement used: %s" msgstr "Använd SQL-sats: %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:418 #, c-format msgid "Failed to write authtoken attributes for job %s" msgstr "Misslyckades med att skriva auktoriserings-token-attribut för jobb %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:422 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:477 #, c-format msgid "Failed to write event records for job %s" msgstr "Misslyckades med att skriva händelseposter för jobb %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:433 #, c-format msgid "" "Cannot to update AAR. Cannot find registered AAR for job %s in accounting " "database." msgstr "" "Kan inte uppdatera AAR. Kan inte hitta registrerad AAR för jobb %s i " "bokföringsdatabasen." #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:459 #, c-format msgid "Failed to update AAR in the database for job %s" msgstr "Misslyckades med att uppdatera AAR i databasen för jobb %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:465 #, c-format msgid "Failed to write RTEs information for the job %s" msgstr "Misslyckades med att skriva RTE-information för jobb %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:469 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:473 #, c-format msgid "Failed to write data transfers information for the job %s" msgstr "Misslyckades med att skriva dataöverföringsinformation för jobb %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:569 #, c-format msgid "Unable to add event: cannot find AAR for job %s in accounting database." msgstr "" "Kan inte lägga till händelse: kan inte hitta AAR för jobb %s i " "bokföringsdatabasen." #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:73 #, c-format msgid "Unknown option %s" msgstr "Okänt alternativ %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:80 msgid "Job ID argument is required." msgstr "Jobb-id-alternativ är obligatoriskt" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:86 msgid "Path to user's proxy file should be specified." msgstr "Sökväg till användarens proxyfil ska anges" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:92 msgid "User name should be specified." msgstr "Användarnamn ska anges" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:98 msgid "Path to .local job status file is required." msgstr "Sökväg till .local jobbstatusfil är obligatorisk." #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:106 msgid "Generating ceID prefix from hostname automatically" msgstr "Genererar ceID-prefix from värdnamn automatiskt" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:109 msgid "" "Cannot determine hostname from gethostname() to generate ceID automatically." msgstr "" "Kan inte bestämma värdnamn frÃ¥n gethostname() för att generera ceID " "automatiskt." #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:118 #, c-format msgid "ceID prefix is set to %s" msgstr "ceID-prefix är satt till %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:126 #, c-format msgid "Getting currect timestamp for BLAH parser log: %s" msgstr "Hämtar nuvarande klockslag för BLAH-tolk-logg: %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:135 msgid "Parsing .local file to obtain job-specific identifiers and info" msgstr "" "Tolkar .local-fil för att erhÃ¥lla jobb-specifika identifierare och info" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:145 #, c-format msgid "globalid is set to %s" msgstr "globalid är satt till %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:148 #, c-format msgid "headnode is set to %s" msgstr "headnode är satt till %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:151 #, c-format msgid "interface is set to %s" msgstr "gränssnitt är satt till %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:155 msgid "There is no local LRMS ID. Message will not be written to BLAH log." msgstr "" "Det finns inget lokalt LRMS-ID. Meddelande kommer inte att skrivas till BLAH-" "logg." #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:158 #, c-format msgid "localid is set to %s" msgstr "localid är satt till %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:161 #, c-format msgid "queue name is set to %s" msgstr "könamn är satt till %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:164 #, c-format msgid "owner subject is set to %s" msgstr "ägarsubjekt är satt till %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:166 msgid "" "Job did not finished successfully. Message will not be written to BLAH log." msgstr "" "Jobb avslutades inte framgÃ¥ngsrikt. Meddelande kommer inte att skrivas till " "BLAH-logg." #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:174 #, c-format msgid "Job timestamp successfully parsed as %s" msgstr "Jobbets klockslag tolkades framgÃ¥ngsrikt som %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:178 msgid "Can not read information from the local job status file" msgstr "Kan inte läsa information frÃ¥n den lokala statusfilen" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:194 #, c-format msgid "" "Unsupported submission interface %s. Seems arc-blahp-logger need to be " "updated accordingly. Please submit the bug to bugzilla." msgstr "" "Insändningsgränssnitt som inte stöds %s. Det ser ut som om arc-blahp-logger " "mÃ¥ste uppdateras. Sänd in buggen till bugzilla." #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:204 msgid "Parsing VOMS AC to get FQANs information" msgstr "Tolkar VOMS-AC för att fÃ¥ FQAN-information" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:217 #, c-format msgid "Found VOMS AC attribute: %s" msgstr "Hittade VOMS-AC-attribut: %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:230 msgid "VOMS AC attribute is a tag" msgstr "VOMS-AC-attribut är en tagg" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:237 msgid "Skipping policyAuthority VOMS AC attribute" msgstr "Hoppar över policyAuthority VOMS-AC-attribut" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:241 msgid "VOMS AC attribute is the FQAN" msgstr "VOMS-AC-attribut är FQAN" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:249 msgid "No FQAN found. Using None as userFQAN value" msgstr "Hittade inget FQAN. Använde None som användar-FQAN-värde" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:263 #, c-format msgid "Assembling BLAH parser log entry: %s" msgstr "Sätter samman BLAH-parser-logg-post: %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:268 #, c-format msgid "Writing the info to the BLAH parser log: %s" msgstr "Skriver informationen till BLAH-tolk-loggen: %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:276 #, c-format msgid "Cannot open BLAH log file '%s'" msgstr "Kan inte öppna BLAH-loggfil '%s'" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:34 #, c-format msgid "Missing cancel-%s-job - job cancellation may not work" msgstr "Saknar cancel-%s-job - avbrytande av jobb kanske inte fungerar" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:38 #, c-format msgid "Missing submit-%s-job - job submission to LRMS may not work" msgstr "" "Saknar submit-%s-job - insändning av jobb till LRMS kanske inte fungerar" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:42 #, c-format msgid "Missing scan-%s-job - may miss when job finished executing" msgstr "Saknar scan-%s-job - kan missa när jobb har slutat exekvera" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:56 #, c-format msgid "Wrong option in %s" msgstr "Felaktigt alternativ i %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:67 #, c-format msgid "Can't read configuration file at %s" msgstr "Kan inte läsa inställningsfil pÃ¥ %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:77 #, c-format msgid "Can't recognize type of configuration file at %s" msgstr "Känner inte igen typ av inställningsfil pÃ¥ %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:80 msgid "Could not determine configuration type or configuration is empty" msgstr "Kunde inte bestämma typ av inställningar eller inställningar är tom" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:161 msgid "lrms is empty" msgstr "lrms är tom" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:194 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:203 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:212 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:221 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:230 msgid "Missing number in maxjobs" msgstr "Saknar nummer i maxjobs" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:197 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:206 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:215 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:224 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:233 #, c-format msgid "Wrong number in maxjobs: %s" msgstr "Felaktigt nummer i maxjobs: %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:243 #, c-format msgid "Wrong number in wakeupperiod: %s" msgstr "Felaktigt nummer i wakeupperiod: %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:249 msgid "mail parameter is empty" msgstr "mail-parametern är tom" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:255 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:259 msgid "Wrong number in defaultttl command" msgstr "Felaktigt nummer i defaultttl-kommando" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:265 msgid "Wrong number in maxrerun command" msgstr "Felaktigt nummer i maxrerun-kommando" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:272 msgid "State name for plugin is missing" msgstr "TillstÃ¥ndsnamn för plugin saknas" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:276 msgid "Options for plugin are missing" msgstr "Alternativ för plugin saknas" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:279 #, c-format msgid "Failed to register plugin for state %s" msgstr "Misslyckades med att registrera plugin för tillstÃ¥nd %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:285 msgid "Session root directory is missing" msgstr "Sessions-rotkatalog saknas" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:288 msgid "Junk in sessiondir command" msgstr "Skräp i sessiondir-kommando" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:300 msgid "Missing directory in controldir command" msgstr "Saknar katalog i controldir-kommando" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:305 msgid "" "'control' configuration option is no longer supported, please use " "'controldir' instead" msgstr "" "'control'-inställningsalternativet stöds inte längre, använd 'controldir' " "istället" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:310 msgid "User for helper program is missing" msgstr "Användare för hjälpprogram saknas" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:313 msgid "Only user '.' for helper program is supported" msgstr "Endast användare '.' för hjälpprogram stöds" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:316 msgid "Helper program is missing" msgstr "Hjälpprogram saknas" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:337 msgid "Wrong option in fixdirectories" msgstr "Felaktigt alternativ i fixdirectories" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:364 msgid "Wrong option in delegationdb" msgstr "Felaktigt alternativ i delegationdb" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:370 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:556 msgid "forcedefaultvoms parameter is empty" msgstr "forcedefaultvoms-parametern är tom" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:445 msgid "Wrong number in maxjobdesc command" msgstr "Felaktigt nummer i maxjobdesc-kommando" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:495 msgid "Missing file name in [arex/jura] logfile" msgstr "Saknat filnamn i [arex/jura] loggfil" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:506 #, c-format msgid "Wrong number in urdelivery_frequency: %s" msgstr "Felaktigt nummer i urdelivery_frequency: %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:549 msgid "No queue name given in queue block name" msgstr "Inget könamn givet i queue-blocknamn" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:565 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:600 msgid "advertisedvo parameter is empty" msgstr "advertisedvo-parametern är tom" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:117 #, c-format msgid "\tSession root dir : %s" msgstr "\tSessionsrotkat : %s" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:118 #, c-format msgid "\tControl dir : %s" msgstr "\tKontrollkatalog : %s" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:119 #, c-format msgid "\tdefault LRMS : %s" msgstr "\tförvalt LRMS : %s" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:120 #, c-format msgid "\tdefault queue : %s" msgstr "\tförvald kö : %s" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:121 #, c-format msgid "\tdefault ttl : %u" msgstr "\tförvald ttl : %u" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:126 msgid "No valid caches found in configuration, caching is disabled" msgstr "Hittade inga giltiga cachar i inställningar, cachning är avstängd" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:131 #, c-format msgid "\tCache : %s" msgstr "\tCache : %s" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:133 #, c-format msgid "\tCache link dir : %s" msgstr "\tCachelänkkatalog : %s" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:136 #, c-format msgid "\tCache (read-only): %s" msgstr "\tCache (read-only): %s" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:138 msgid "\tCache cleaning enabled" msgstr "\tCacherensning pÃ¥slagen" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:139 msgid "\tCache cleaning disabled" msgstr "\tCacherensning avstängd" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:327 msgid "" "Globus location variable substitution is not supported anymore. Please " "specify path directly." msgstr "" "Globus platsvariabelsubstitution stöds inte längre. Ange sökväg direkt." #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:35 msgid "Can't read configuration file" msgstr "Kan inte läsa inställningsfil" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:41 #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:29 msgid "Can't recognize type of configuration file" msgstr "Känner inte igen typ av inställningsfil" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:47 msgid "Configuration error" msgstr "Inställningsfel" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:77 msgid "Bad number in maxdelivery" msgstr "Felaktigt nummer i maxdelivery" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:83 msgid "Bad number in maxemergency" msgstr "Felaktigt nummer i maxemergency" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:89 msgid "Bad number in maxprocessor" msgstr "Felaktigt nummer i maxprocessor" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:95 msgid "Bad number in maxprepared" msgstr "Felaktigt nummer i maxprepared" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:101 msgid "Bad number in maxtransfertries" msgstr "Felaktigt nummer i maxtransfertries" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:112 msgid "Bad number in speedcontrol" msgstr "Felaktigt nummer i speedcontrol" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:123 #, c-format msgid "Bad number in definedshare %s" msgstr "Felaktigt nummer i definedshare %s" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:132 #, c-format msgid "Bad URL in deliveryservice: %s" msgstr "Felaktig URL i leveranstjänsten: %s" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:143 msgid "Bad number in remotesizelimit" msgstr "Felaktigt nummer i remotesizelimit" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:168 msgid "Bad value for loglevel" msgstr "Felaktigt värde för loglevel" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:182 msgid "Bad URL in acix_endpoint" msgstr "Felaktig URL i acix_endpoint" #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:24 msgid "Can't open configuration file" msgstr "Kan inte öppna inställningsfil" #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:45 msgid "Not enough parameters in copyurl" msgstr "Ej tillräckligt antal parametrar i copyurl" #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:54 msgid "Not enough parameters in linkurl" msgstr "Ej tillräckligt antal parametrar i linkurl" #: src/services/a-rex/grid-manager/files/ControlFileContent.cpp:179 #, c-format msgid "Wrong directory in %s" msgstr "Fel katalog i %s" #: src/services/a-rex/grid-manager/files/ControlFileHandling.cpp:102 #, c-format msgid "Failed setting file owner: %s" msgstr "Misslyckades med ange filägare: %s" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:33 msgid "gm-delegations-converter changes format of delegation database." msgstr "gm-delegations-converter ändrar format pÃ¥ delegeringsdatabasen." #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:38 #: src/services/a-rex/grid-manager/gm_jobs.cpp:110 #: src/services/a-rex/grid-manager/gm_kick.cpp:24 msgid "use specified configuration file" msgstr "använd särskild inställningsfil" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:39 #: src/services/a-rex/grid-manager/gm_jobs.cpp:111 #: src/services/a-rex/grid-manager/gm_kick.cpp:25 msgid "file" msgstr "fil" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:43 #: src/services/a-rex/grid-manager/gm_jobs.cpp:115 msgid "read information from specified control directory" msgstr "läs information frÃ¥n angiven kontrollkatalog" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:44 #: src/services/a-rex/grid-manager/gm_jobs.cpp:116 msgid "dir" msgstr "katalog" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:48 msgid "convert from specified input database format [bdb|sqlite]" msgstr "konvertera frÃ¥n angivet databasformat [bdb|sqlite]" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:49 #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:54 msgid "database format" msgstr "databasformat" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:53 msgid "convert into specified output database format [bdb|sqlite]" msgstr "konvertera till angivet databasformat [bdb|sqlite]" #: src/services/a-rex/grid-manager/gm_jobs.cpp:36 #, c-format msgid "Could not read data staging configuration from %s" msgstr "Kunde inte läsa datastaginginställningar frÃ¥n %s" #: src/services/a-rex/grid-manager/gm_jobs.cpp:44 #, c-format msgid "Can't read transfer states from %s. Perhaps A-REX is not running?" msgstr "Kan inte läsa överföringstillstÃ¥nd frÃ¥n %s. Kanske kör inte A-REX?" #: src/services/a-rex/grid-manager/gm_jobs.cpp:100 msgid "gm-jobs displays information on current jobs in the system." msgstr "gm-jobs visar information om nuvarande jobb i systemet." #: src/services/a-rex/grid-manager/gm_jobs.cpp:105 msgid "display more information on each job" msgstr "visa mer information om varje jobb" #: src/services/a-rex/grid-manager/gm_jobs.cpp:120 msgid "print summary of jobs in each transfer share" msgstr "skriv ut sammanfattning av jobb i varje överföringsandel" #: src/services/a-rex/grid-manager/gm_jobs.cpp:125 msgid "do not print list of jobs" msgstr "skriv inte ut jobblista" #: src/services/a-rex/grid-manager/gm_jobs.cpp:130 msgid "do not print number of jobs in each state" msgstr "skriv inte ut antal jobb i varje tillstÃ¥nd" #: src/services/a-rex/grid-manager/gm_jobs.cpp:135 msgid "print state of the service" msgstr "skriv ut tjänstens tillstÃ¥nd" #: src/services/a-rex/grid-manager/gm_jobs.cpp:140 msgid "show only jobs of user(s) with specified subject name(s)" msgstr "visa endast jobb som ägs av användare med angiv(et/na) subjektnamn" #: src/services/a-rex/grid-manager/gm_jobs.cpp:141 #: src/services/a-rex/grid-manager/gm_jobs.cpp:151 #: src/services/a-rex/grid-manager/gm_jobs.cpp:161 msgid "dn" msgstr "dn" #: src/services/a-rex/grid-manager/gm_jobs.cpp:145 msgid "request to cancel job(s) with specified ID(s)" msgstr "begär att avbryta jobb med angiv(et/na) ID" #: src/services/a-rex/grid-manager/gm_jobs.cpp:146 #: src/services/a-rex/grid-manager/gm_jobs.cpp:156 #: src/services/a-rex/grid-manager/gm_jobs.cpp:166 #: src/services/a-rex/grid-manager/gm_jobs.cpp:176 #: src/services/a-rex/grid-manager/gm_kick.cpp:30 msgid "id" msgstr "id" #: src/services/a-rex/grid-manager/gm_jobs.cpp:150 msgid "" "request to cancel jobs belonging to user(s) with specified subject name(s)" msgstr "" "begär att avbryta jobb som ägs av användare med angiv(et/na) subjektnamn" #: src/services/a-rex/grid-manager/gm_jobs.cpp:155 msgid "request to clean job(s) with specified ID(s)" msgstr "begär att ta bort jobb med angiv(et/na) ID" #: src/services/a-rex/grid-manager/gm_jobs.cpp:160 msgid "" "request to clean jobs belonging to user(s) with specified subject name(s)" msgstr "" "begär att ta bort jobb som ägs av användare med angiv(et/na) subjektnamn" #: src/services/a-rex/grid-manager/gm_jobs.cpp:165 msgid "show only jobs with specified ID(s)" msgstr "visa endast jobb med angiv(et/na) ID<" #: src/services/a-rex/grid-manager/gm_jobs.cpp:170 msgid "print list of available delegation IDs" msgstr "skriv ut lista med tillgängliga delegerings-ID" #: src/services/a-rex/grid-manager/gm_jobs.cpp:175 msgid "print delegation token of specified ID(s)" msgstr "skriv ut delegeringstoken med angiv(et/na) ID" #: src/services/a-rex/grid-manager/gm_jobs.cpp:180 msgid "print main delegation token of specified Job ID(s)" msgstr "skriv ut huvuddelegeringstoken för angiv(et/na) jobb-id" #: src/services/a-rex/grid-manager/gm_jobs.cpp:181 msgid "job id" msgstr "jobb-id" #: src/services/a-rex/grid-manager/gm_jobs.cpp:185 msgid "" "output requested elements (jobs list, delegation ids and tokens) to file" msgstr "" "skriv ut begärda element (jobblista, delegerings-id och token) till fil" #: src/services/a-rex/grid-manager/gm_jobs.cpp:186 msgid "file name" msgstr "filnamn" #: src/services/a-rex/grid-manager/gm_jobs.cpp:209 #, c-format msgid "Using configuration at %s" msgstr "Använder inställningar pÃ¥ %s" #: src/services/a-rex/grid-manager/gm_jobs.cpp:232 #, c-format msgid "Failed to open output file '%s'" msgstr "Misslyckades med att öppna utdatafil '%s'" #: src/services/a-rex/grid-manager/gm_jobs.cpp:241 msgid "Looking for current jobs" msgstr "Letar efter nuvarande jobb" #: src/services/a-rex/grid-manager/gm_jobs.cpp:278 #, c-format msgid "Job: %s : ERROR : Unrecognizable state" msgstr "Jobb: %s : Fel : Okänt tillstÃ¥nd" #: src/services/a-rex/grid-manager/gm_jobs.cpp:287 #, c-format msgid "Job: %s : ERROR : No local information." msgstr "Jobb: %s : Fel : Ingen lokal information." #: src/services/a-rex/grid-manager/gm_jobs.cpp:461 #, c-format msgid "Job: %s : ERROR : Failed to put cancel mark" msgstr "Jobb: %s : Fel : Misslyckades med att sätta avbrytsmarkering" #: src/services/a-rex/grid-manager/gm_jobs.cpp:465 #, c-format msgid "Job: %s : Cancel request put but failed to communicate to service" msgstr "" "Jobb: %s : Begäran att avbryta satt men misslyckades att meddela tjänsten" #: src/services/a-rex/grid-manager/gm_jobs.cpp:467 #, c-format msgid "Job: %s : Cancel request put and communicated to service" msgstr "Jobb: %s : Begäran att avbryta satt och meddelad till tjänsten" #: src/services/a-rex/grid-manager/gm_jobs.cpp:478 #, c-format msgid "Job: %s : ERROR : Failed to put clean mark" msgstr "Jobb: %s : Fel : Misslyckades med att sätta borttagningsmarkering" #: src/services/a-rex/grid-manager/gm_jobs.cpp:482 #, c-format msgid "Job: %s : Clean request put but failed to communicate to service" msgstr "" "Jobb: %s : Begäran om borttagning satt men misslyckades att meddela tjänsten" #: src/services/a-rex/grid-manager/gm_jobs.cpp:484 #, c-format msgid "Job: %s : Clean request put and communicated to service" msgstr "Jobb: %s : Begäran om borttagning satt och meddelad till tjänsten" #: src/services/a-rex/grid-manager/gm_kick.cpp:18 msgid "" "gm-kick wakes up the A-REX corresponding to the given control file. If no " "file is given it uses the control directory found in the configuration file." msgstr "" "gm-kick väcker den A-REX som motsvarar den angivna kontrollfilen. Om ingen " "fil anges används kontrollkatalogen som hittas i inställningsfilen." #: src/services/a-rex/grid-manager/gm_kick.cpp:29 msgid "inform about changes in particular job (can be used multiple times)" msgstr "informera om ändringar i enstaka jobb (kan användas mer än en gÃ¥ng)" #: src/services/a-rex/grid-manager/inputcheck.cpp:39 #, c-format msgid "Failed to acquire source: %s" msgstr "Misslyckades med att erhÃ¥lla källa: %s" #: src/services/a-rex/grid-manager/inputcheck.cpp:44 #, c-format msgid "Failed to resolve %s" msgstr "Misslyckades med att slÃ¥ upp %s" #: src/services/a-rex/grid-manager/inputcheck.cpp:61 #, c-format msgid "Failed to check %s" msgstr "Misslyckades med att kontrollera %s" #: src/services/a-rex/grid-manager/inputcheck.cpp:75 msgid "job_description_file [proxy_file]" msgstr "jobbeskrivningsfil [proxyfil]" #: src/services/a-rex/grid-manager/inputcheck.cpp:76 msgid "" "inputcheck checks that input files specified in the job description are " "available and accessible using the credentials in the given proxy file." msgstr "" "inputcheck kontrollerar att indatafiler som angivits i jobbeskrivningen är " "tillgängliga och Ã¥tkomliga när referenserna i den givna proxyfilen används." #: src/services/a-rex/grid-manager/inputcheck.cpp:88 msgid "Wrong number of arguments given" msgstr "Fel antal argument angivna" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:143 #, c-format msgid "Unsupported value for allownew: %s" msgstr "Värde för allownew stöds inte: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:154 msgid "Wrong number in maxjobdesc" msgstr "Felaktigt nummer i maxjobdesc" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:162 #: src/services/gridftpd/fileplugin/fileplugin.cpp:186 #, c-format msgid "Unsupported configuration command: %s" msgstr "Inställningskommando stöds inte: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:171 #, c-format msgid "Mapped user:group (%s:%s) not found" msgstr "Hittade inte mappad användare:grupp (%s:%s)" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:174 msgid "Job submission user can't be root" msgstr "Jobbinsändningsanvändare kan inte vara root" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:177 msgid "Failed processing A-REX configuration" msgstr "Misslyckades med att processera A-REX inställningar" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:201 msgid "This user is denied to submit new jobs." msgstr "Denna användare nekas att sända in nya jobb." #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:206 msgid "No control or session directories defined in configuration" msgstr "Inga kontroll- eller sessionskataloger definierade i inställningar" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:210 #, c-format msgid "Job submission user: %s (%i:%i)" msgstr "Jobbinsändningsanvändare %s (%i:%i)" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:213 msgid "Job plugin was not initialised" msgstr "Jobbplugin har inte installerats" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:231 msgid "No delegated credentials were passed" msgstr "Inga delegerade referenser skickades med" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:305 #, c-format msgid "Cancelling job %s" msgstr "Avbryter jobb %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:360 #, c-format msgid "Cleaning job %s" msgstr "Tar bort jobb %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:400 msgid "Request to open file with storing in progress" msgstr "Begäran att öppna fil medan den hÃ¥ller pÃ¥ att tas emot" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:434 #: src/services/gridftpd/fileplugin/fileplugin.cpp:344 #, c-format msgid "Retrieving file %s" msgstr "Hämtar fil %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:484 #, c-format msgid "Accepting submission of new job or modification request: %s" msgstr "Accepterar insändning av nytt jobb eller modifieringsbegäran: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:506 #: src/services/gridftpd/fileplugin/fileplugin.cpp:384 #: src/services/gridftpd/fileplugin/fileplugin.cpp:421 #, c-format msgid "Storing file %s" msgstr "Lagrar fil %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:527 #, c-format msgid "Unknown open mode %i" msgstr "Okänd öppningsmode %i" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:653 #, c-format msgid "action(%s) != request" msgstr "handling(%s) != begäran" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:704 msgid "Failed writing job description" msgstr "Misslyckades med att skriva jobbeskrivning" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:920 msgid "Failed writing local description" msgstr "Misslyckades med att skriva lokal beskrivning" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:930 msgid "Failed writing ACL" msgstr "Misslyckades med att skriva ACL" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:946 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:953 #: src/services/a-rex/job.cpp:819 #, c-format msgid "Failed to run external plugin: %s" msgstr "Misslyckades med att köra extern plugin: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:957 #: src/services/a-rex/job.cpp:823 #, c-format msgid "Plugin response: %s" msgstr "Pluginsvar: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:959 msgid "Failed to run external plugin" msgstr "Misslyckades med att köra extern plugin" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:969 #, c-format msgid "Failed to create session directory %s" msgstr "Misslyckades med att skapa sessionskatalog %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:979 msgid "Failed writing status" msgstr "Misslyckades med att skriva status" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:993 #, c-format msgid "Failed to lock delegated credentials: %s" msgstr "Misslyckades med att lÃ¥sa delegerade referenser: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1235 #, c-format msgid "Renewing proxy for job %s" msgstr "Förnyas proxy för jobb %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1251 #, c-format msgid "New proxy expires at %s" msgstr "Livstid för ny proxy gÃ¥r ut %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1254 msgid "Failed to write 'local' information" msgstr "Misslyckades med att skriva lokal information" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1257 msgid "Failed to renew proxy" msgstr "Misslyckades med att förnya proxy" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1260 msgid "New proxy expiry time is not later than old proxy, not renewing proxy" msgstr "" "Den nya proxyns livslängd är inte längre än den gamla proxyns, förnyar inte " "proxy" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1302 #, c-format msgid "Checking file %s" msgstr "Kontrollerar fil %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1349 msgid "ID contains forbidden characters" msgstr "ID innehÃ¥ller förbjudet tecken" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1383 #: src/services/a-rex/job.cpp:1023 #, c-format msgid "Failed to create file in %s" msgstr "Misslyckades med att skapa fil i %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1393 msgid "Out of tries while allocating new job ID" msgstr "Slut pÃ¥ försök vid allokering av nytt jobb-id" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1473 #, c-format msgid "Failed to read job's local description for job %s from %s" msgstr "" "Misslyckades med att läsa jobbets lokala beskrivning för jobb %s frÃ¥n %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1562 msgid "No non-draining session directories available" msgstr "Ingen non-draining sessionskatalog tillgänglig" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1568 #, c-format msgid "Using control directory %s" msgstr "Använder kontrollkatalog %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1569 #, c-format msgid "Using session directory %s" msgstr "Använder sessionskatalog %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:26 #, c-format msgid "Failed to read job's ACL for job %s from %s" msgstr "Misslyckades med att läsa jobbets ACL för jobb %s frÃ¥n %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:70 #, c-format msgid "Failed to parse user policy for job %s" msgstr "Misslyckades med att tolka användarpolicy för jobb %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:75 #, c-format msgid "Failed to load policy evaluator for policy of job %s" msgstr "Misslyckades med ladda in policyutvärderare för policy för jobb %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:129 #, c-format msgid "Unknown ACL policy %s for job %s" msgstr "Okänd ACL-policy %s för jobb %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:73 #, c-format msgid "" "DTR Generator waiting to process: %d jobs to cancel, %d DTRs, %d new jobs" msgstr "" "DTR-generator väntat pÃ¥ att processera: %d jobb att avbryta, %d DTRer, %d " "nya jobb" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:87 #, c-format msgid "%s: Job cancel request from DTR generator to scheduler" msgstr "%s: Begäran att avbryta jobb frÃ¥n DTR-generator till schemaläggare" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:92 #, c-format msgid "%s: Returning canceled job from DTR generator" msgstr "%s: Returnerar avbrutet jobb frÃ¥n DTR-generator" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:133 #, c-format msgid "%s: Re-requesting attention from DTR generator" msgstr "%s: Begär uppmärksamhet frÃ¥n DTR-generator igen" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:143 #, c-format msgid "DTR Generator processed: %d jobs to cancel, %d DTRs, %d new jobs" msgstr "DTR-generator processerade: %d jobb att avbryta, %d DTRer, %d nya jobb" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:162 msgid "Exiting Generator thread" msgstr "Avslutar generator-trÃ¥d" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:234 msgid "Shutting down data staging threads" msgstr "Stänger ner datastaging-trÃ¥dar" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:244 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:257 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:285 msgid "DTRGenerator is not running!" msgstr "DTR-generator kör inte" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:247 #, c-format msgid "Received DTR %s during Generator shutdown - may not be processed" msgstr "Mottog DTR %s under generatoravstängning - kan inte processeras" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:261 msgid "DTRGenerator was sent null job" msgstr "DTR-generator blev tillsänd null-jobb" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:270 #, c-format msgid "%s: Received job in DTR generator" msgstr "%s: Mottog jobb i DTR-generator" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:273 #, c-format msgid "%s: Failed to receive job in DTR generator" msgstr "%s: Misslyckades med att motta jobb i DTR-generator" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:280 msgid "DTRGenerator got request to cancel null job" msgstr "DTR-generator fick begäran att avsluta null-jobb" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:295 msgid "DTRGenerator is queried about null job" msgstr "DTR-generator fick frÃ¥ga om null-jobb" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:325 msgid "DTRGenerator is asked about null job" msgstr "DTR-generator fick frÃ¥ga om null-jobb" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:353 msgid "DTRGenerator is requested to remove null job" msgstr "DTR-generator fick begäran att ta bort null-jobb" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:360 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:368 #, c-format msgid "%s: Trying to remove job from data staging which is still active" msgstr "" "%s: Försöker att ta bort jobb frÃ¥n datastaging som fortfarande är aktivt" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:376 #, c-format msgid "%s: Trying remove job from data staging which does not exist" msgstr "%s: Försöker att ta bort jobb frÃ¥n datastaging som inte existerar" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:387 #, c-format msgid "%s: Invalid DTR" msgstr "%s: Ogiltig DTR" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:404 #, c-format msgid "%s: Received DTR %s to copy file %s in state %s" msgstr "%s: Mottog DTR %s att kopiera fil %s i tillstÃ¥nd %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:408 #, c-format msgid "%s: Received DTR belongs to inactive job" msgstr "%s: Mottagen DTR tillhör inaktivt jobb" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:425 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1067 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:459 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:517 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:631 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:841 #, c-format msgid "%s: Failed reading local information" msgstr "%s: Misslyckades med att läsa lokal information" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:434 #, c-format msgid "%s: DTR %s to copy file %s failed" msgstr "%s: DTR %s att kopiera fil %s misslyckades" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:440 #, c-format msgid "%s: Cancelling other DTRs" msgstr "%s: Avbryter övriga DTRer" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:450 #, c-format msgid "%s: DTR %s to copy to %s failed but is not mandatory" msgstr "%s: DTR %s att kopiera till %s misslyckades men är inte obligatorisk" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:460 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:722 #, c-format msgid "%s: Failed to read list of output files" msgstr "%s: Misslyckades med att läsa lista med utdatafiler" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:474 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:615 #, c-format msgid "%s: Failed to read dynamic output files in %s" msgstr "%s: Misslyckades med att läsa dynamiska utdatafiler i %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:476 #, c-format msgid "%s: Going through files in list %s" msgstr "%s: GÃ¥r igenom filer i lista %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:480 #, c-format msgid "%s: Removing %s from dynamic output file %s" msgstr "%s: Tar bort %s frÃ¥n dynamisk utdatafil %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:484 #, c-format msgid "%s: Failed to write back dynamic output files in %s" msgstr "%s: Misslyckades med att skriva tillbaka dynamiska utdatafiler i %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:500 #, c-format msgid "%s: Failed to write list of output files" msgstr "%s: Misslyckades med att skriva lista med utdatafiler." #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:504 #, c-format msgid "%s: Failed to write list of output status files" msgstr "%s: Misslyckades med att skriva lista med utdatastatusfiler." #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:516 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:734 #, c-format msgid "%s: Failed to read list of input files" msgstr "%s: Misslyckades med att läsa lista med indatafiler" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:535 #, c-format msgid "%s: Failed to write list of input files" msgstr "%s: Misslyckades med att skriva lista med indatafiler" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:547 #, c-format msgid "%s: Received DTR with two remote endpoints!" msgstr "%s: Mottog DTR med tvÃ¥ fjärrändpunkter!" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:559 #: src/services/candypond/CandyPondGenerator.cpp:105 #, c-format msgid "No active job id %s" msgstr "Inget aktivt jobb-id: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:603 #, c-format msgid "%s: Failed to read list of output files, can't clean up session dir" msgstr "" "%s: Misslyckades med att läsa lista med utdatafiler, kan inte rensa upp " "sessionskatalog" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:629 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:648 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:772 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:895 #, c-format msgid "%s: Failed to clean up session dir" msgstr "%s: Misslyckades med att rensa upp sessionskatalog" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:639 #, c-format msgid "%s: Failed to read list of input files, can't clean up session dir" msgstr "" "%s: Misslyckades med att läsa lista med indatafiler, kan inte rensa upp " "sessionskatalog" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:661 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:665 msgid "uploads" msgstr "uppladdningar" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:661 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:665 msgid "downloads" msgstr "nedladdningar" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:662 msgid "cancelled" msgstr "avbröts" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:662 msgid "finished" msgstr "avslutade" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:660 #, c-format msgid "%s: All %s %s successfully" msgstr "%s: Alla %s %s framgÃ¥ngsrikt" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:664 #, c-format msgid "%s: Some %s failed" msgstr "%s: NÃ¥gra %s misslyckades" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:668 #, c-format msgid "%s: Requesting attention from DTR generator" msgstr "%s: Begär uppmärksamhet frÃ¥n DTR-generator" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:679 msgid "DTRGenerator is requested to process null job" msgstr "DTR-generator fick begäran att att processera null-jobb" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:685 msgid "download" msgstr "ladda ner" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:685 msgid "upload" msgstr "ladda upp" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:684 #, c-format msgid "%s: Received data staging request to %s files" msgstr "%s: Mottog datastagingbegäran att %s filer" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:743 #, c-format msgid "%s: Duplicate file in list of input files: %s" msgstr "%s: Duplikatfil i lista med indatafiler: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:792 #, c-format msgid "%s: Reading output files from user generated list in %s" msgstr "%s: Läser utdatafiler frÃ¥n användargenererad lista i %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:794 #, c-format msgid "%s: Error reading user generated output file list in %s" msgstr "%s: Fel vid läsning av användargenererad lista med utdatafiler i %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:821 #, c-format msgid "%s: Failed to list output directory %s: %s" msgstr "%s: Misslyckades med att lista utdatakatalog %s: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:839 #, c-format msgid "%s: Adding new output file %s: %s" msgstr "%s: Lägger till ny utdatafil %s: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:861 #, c-format msgid "%s: Two identical output destinations: %s" msgstr "%s: TvÃ¥ identiska utdatadestinationer: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:874 #, c-format msgid "%s: Cannot upload two different files %s and %s to same LFN: %s" msgstr "%s: Kan inte ladda upp tvÃ¥ olika filer %s och %s till samma LFN: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:906 #, c-format msgid "%s: Received job in a bad state: %s" msgstr "%s: Mottog jobb i ett dÃ¥ligt tillstÃ¥nd: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:914 #, c-format msgid "%s: Session directory processing takes too long - %u.%06u seconds" msgstr "%s: Sessionskatalogsprocessering tar för lÃ¥ng tid - %u.%06u sekunder" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:981 #, c-format msgid "" "%s: Destination file %s was possibly left unfinished from previous A-REX " "run, will overwrite" msgstr "" "%s: Destinationsfil %s lämnades möjligen oavslutad frÃ¥n tidigare A-REX-" "körning, kommer att skriva över" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1073 #, c-format msgid "%s: Failed writing local information" msgstr "%s: Misslyckades med att skriva lokal information" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1091 #, c-format msgid "%s: Cancelling active DTRs" msgstr "%s: Avbryter aktiva DTRer" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1098 msgid "DTRGenerator is asked to check files for null job" msgstr "DTR-generator fick förfrÃ¥gan att kontrollera filer för null-jobb" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1118 #, c-format msgid "%s: Can't read list of input files" msgstr "%s: Kan inte läsa lista med indatafiler" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1133 #, c-format msgid "%s: Checking user uploadable file: %s" msgstr "%s: Kontrollerar användaruppladdningsbar fil: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1138 #, c-format msgid "%s: User has uploaded file %s" msgstr "%s: Användare har laddat upp fil %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1145 #, c-format msgid "%s: Failed writing changed input file." msgstr "%s: Misslyckades med att skriva ändrad indatafil." #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1149 #, c-format msgid "%s: Critical error for uploadable file %s" msgstr "%s: Kritiskt fel för uppladdningsbar fil %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1155 #, c-format msgid "%s: User has NOT uploaded file %s" msgstr "%s: Användare har INTE laddat upp fil %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1167 #, c-format msgid "%s: Uploadable files timed out" msgstr "%s: Uppladdningsbara filer avbröts pÃ¥ grund av timeout" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1223 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1249 #, c-format msgid "%s: Can't convert checksum %s to int for %s" msgstr "%s: Kan inte konvertera checksumma %s till heltal för %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1230 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1244 #, c-format msgid "%s: Can't convert filesize %s to int for %s" msgstr "%s: Kan inte konvertera filstorlek %s till heltal för %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1239 #, c-format msgid "%s: Invalid size/checksum information (%s) for %s" msgstr "%s: Ogiltig storlek/checksumma information (%s) för %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1261 #, c-format msgid "%s: Invalid file: %s is too big." msgstr "%s: Ogiltig fil: %s är för stor." #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1277 #, c-format msgid "%s: Failed to switch user ID to %d/%d to read file %s" msgstr "" "%s: Misslyckades med att byta användar-id till %d/%d för att läsa fil %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1283 #, c-format msgid "%s: Failed to open file %s for reading" msgstr "%s: Misslyckades med att öppna fil %s för läsning" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1291 #, c-format msgid "%s: Error accessing file %s" msgstr "%s: Fel vid Ã¥tkomst för fil %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1303 #, c-format msgid "%s: Error reading file %s" msgstr "%s: Fel vid läsning av fil %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1318 #, c-format msgid "%s: File %s has wrong checksum: %llu. Expected %lli" msgstr "%s: Fil %s har felaktig checksumma: %llu. Förväntade %lli" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1324 #, c-format msgid "%s: Checksum %llu verified for %s" msgstr "%s: Checksumma %llu verifierad för %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1336 msgid "" "Found unfinished DTR transfers. It is possible the previous A-REX process " "did not shut down normally" msgstr "" "Hittade oavslutade DTR-överföringar. Det är möjligt att en tidigare A-REX-" "process inte stängde ned pÃ¥ normal sätt" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1343 #, c-format msgid "Found DTR %s for file %s left in transferring state from previous run" msgstr "" "Hittade DTR %s för fil %s kvarlämnad i överförande tillstÃ¥nd frÃ¥n tidigare " "körning" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1352 msgid "DTRGenerator is requested to clean links for null job" msgstr "DTR-generator fick begäran att ta bort länkar för null-jobb" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1368 #, c-format msgid "%s: Cache cleaning takes too long - %u.%06u seconds" msgstr "%s: Cacherensning tar för lÃ¥ng tid - %u.%06u sekunder" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:108 #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:190 #, c-format msgid "%s: Job monitoring counter is broken" msgstr "%s: Jobbmonitoreringsräknare är trasig" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:115 #, c-format msgid "%s: Job monitoring is unintentionally lost" msgstr "%s: Jobbmonitorering har oavsiktligt förlorats" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:124 #, c-format msgid "%s: Job monitoring stop success" msgstr "%s: Jobmonitorering avslutades" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:129 #, c-format msgid "" "%s: Job monitoring stop requested with %u active references and %s queue " "associated" msgstr "" "%s: Avslutande av jobbmonitorering begärd med %u aktiva referenser och kön " "%s associerad" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:131 #, c-format msgid "%s: Job monitoring stop requested with %u active references" msgstr "%s: Avslutande av jobbmonitorering begärd med %u aktiva referenser" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:195 #, c-format msgid "%s: Job monitoring is lost due to removal from queue" msgstr "%s: Jobbmonitorering förlorad pÃ¥ grund av borttagande frÃ¥n kö" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:278 #, c-format msgid "%s: PushSorted failed to find job where expected" msgstr "%s: PushSorted misslyckades med att hitta jobb där de förväntades" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:132 #, c-format msgid "Replacing queue '%s' with '%s'" msgstr "Byter ut kö '%s' mot '%s'" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:226 #, c-format msgid "Bad name for stdout: %s" msgstr "Felaktigt namn för stdout: %s" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:234 #, c-format msgid "Bad name for stderr: %s" msgstr "Felaktigt namn för stderr: %s" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:297 #, c-format msgid "Bad name for runtime environment: %s" msgstr "Felaktigt namn för runtime-miljö: %s" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:342 msgid "Job description file could not be read." msgstr "Jobbeskrivningsfil kunde inte läsas." #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:393 #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:407 #, c-format msgid "Bad name for executable: %s" msgstr "Felaktigt namn för executable: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:89 msgid "Failed to start data staging threads" msgstr "Misslyckades med att starta datastaging-trÃ¥dar" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:190 #, c-format msgid "" "%s: Failed reading .local and changing state, job and A-REX may be left in " "an inconsistent state" msgstr "" "%s: Misslyckades med att läsa .local och att ändra tillstÃ¥nd, jobb och A-REX " "kan lämnas i ett motsägande tillstÃ¥nd" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:195 #, c-format msgid "%s: unexpected failed job add request: %s" msgstr "%s: oväntad begäran att lägga till misslyckat jobb: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:206 #, c-format msgid "%s: unexpected job add request: %s" msgstr "%s: oväntad begäran att lägga till jobb: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:259 #, c-format msgid "%s: job for attention" msgstr "%s: jobb för uppmärksamhet" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:269 msgid "all for attention" msgstr "alla för uppmärksamhet" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:286 #, c-format msgid "%s: job found while scanning" msgstr "%s: Jobb hittat vid skanning" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:314 #, c-format msgid "%s: job will wait for external process" msgstr "%s: jobb kommer att vänta pÃ¥ extern process" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:331 #, c-format msgid "%s: job assigned for slow polling" msgstr "%s: jobb tilldelat för lÃ¥ngsam utfrÃ¥gning" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:349 #, c-format msgid "%s: job being processed" msgstr "%s: jobb processeras" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:384 #, c-format msgid "Current jobs in system (PREPARING to FINISHING) per-DN (%i entries)" msgstr "" "Nuvarande jobb i systemet (PREPARING till FINISHING) per-DN (%i poster)" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:386 #, c-format msgid "%s: %i" msgstr "%s: %i" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:398 #, c-format msgid "%s: Failed storing failure reason: %s" msgstr "%s: Misslyckades med lagra felorsak: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:404 #, c-format msgid "%s: Failed reading job description: %s" msgstr "%s: Misslyckades med att läsa jobbeskrivning: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:416 #, c-format msgid "%s: Failed parsing job request." msgstr "%s: Misslyckades med att tolka jobbegäran." #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:451 #, c-format msgid "%s: Failed writing list of output files: %s" msgstr "%s: Misslyckades med att skriva lista med utdatafiler: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:477 #, c-format msgid "%s: Failed obtaining lrms id" msgstr "%s: Misslyckades med att erhÃ¥lla LRMS-id" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:491 #, c-format msgid "%s: Failed writing local information: %s" msgstr "%s: Misslyckades med att skriva lokal information: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:523 #, c-format msgid "%s: Failed creating grami file" msgstr "%s: Misslyckades med att skapa grami-fil" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:527 #, c-format msgid "%s: Failed setting executable permissions" msgstr "%s: Misslyckades med att sätta körbar Ã¥tkomsträttighet" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:535 #, c-format msgid "%s: state SUBMIT: starting child: %s" msgstr "%s: tillstÃ¥nd SUBMIT: startar barnprocess: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:542 #, c-format msgid "%s: Failed running submission process" msgstr "%s: Misslyckades med att köra insändningsprocess" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:547 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:654 #, c-format msgid "%s: LRMS scripts limit of %u is reached - suspending submit/cancel" msgstr "" "%s: LRMS-skriptets gräns pÃ¥ %u är nÃ¥dd - suspenderar insändning/avbrytande" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:563 #, c-format msgid "" "%s: Job submission to LRMS takes too long, but ID is already obtained. " "Pretending submission is done." msgstr "" "%s: Jobbinsändning till LRMS tar för lÃ¥ng tid, men ID har redan erhÃ¥llits. " "LÃ¥tsas att insändning gjorts" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:570 #, c-format msgid "%s: Job submission to LRMS takes too long. Failing." msgstr "%s: Jobbinsändning till LRMS tar för lÃ¥ng tid. Misslyckas." #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:579 #, c-format msgid "%s: state SUBMIT: child exited with code %i" msgstr "%s: tillstÃ¥nd SUBMIT: barnprocess avslutades med kod %i" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:584 #, c-format msgid "%s: Job submission to LRMS failed" msgstr "%s: Jobbinsändning till LRMS misslyckades" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:605 #, c-format msgid "%s: state CANCELING: timeout waiting for cancellation" msgstr "%s: tillstÃ¥nd CANCELING: timeout vid väntan pÃ¥ avbrytande" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:611 #, c-format msgid "%s: state CANCELING: job diagnostics collected" msgstr "%s: tillstÃ¥nd CANCELING: jobbdiagnostik insamlad" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:639 #, c-format msgid "%s: state CANCELING: starting child: %s" msgstr "%s: tillstÃ¥nd CANCELING: startar barnprocess: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:641 #, c-format msgid "%s: Job has completed already. No action taken to cancel" msgstr "%s: Jobb har redan slutförts. Ingen handling tagen för att avbryta" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:649 #, c-format msgid "%s: Failed running cancellation process" msgstr "%s: Misslyckades med att köra avbrytningsprocess" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:668 #, c-format msgid "" "%s: Job cancellation takes too long, but diagnostic collection seems to be " "done. Pretending cancellation succeeded." msgstr "" "%s: Avbrytande av jobb tar för lÃ¥ng tid, men diagnostikinsamling verkar ha " "gjorts. LÃ¥tsas att avbrytande lyckades." #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:674 #, c-format msgid "%s: Job cancellation takes too long. Failing." msgstr "%s: Avbrytande av jobb tar för lÃ¥ng tid. Misslyckas." #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:684 #, c-format msgid "%s: state CANCELING: child exited with code %i" msgstr "%s: tillstÃ¥nd CANCELING: barnprocess avslutades med kod %i" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:690 #, c-format msgid "%s: Failed to cancel running job" msgstr "%s: Misslyckades med att avbryta körande jobb" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:709 #, c-format msgid "%s: State: %s: data staging finished" msgstr "%s: tillstÃ¥nd: %s: datastaging avslutad" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:744 #, c-format msgid "%s: State: %s: still in data staging" msgstr "%s: tillstÃ¥nd: %s: fortfarande i datastaging" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:757 #, c-format msgid "%s: Job is not allowed to be rerun anymore" msgstr "%s: Jobb har inte rätt att startas om längre" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:767 #, c-format msgid "%s: Job failed in unknown state. Won't rerun." msgstr "%s: Jobbet misslyckades i okänt tillstÃ¥nd. Kommer ej att starta om." #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:788 #, c-format msgid "%s: Reprocessing job description failed" msgstr "%s: Omprocessering av jobbeskrivning misslyckades." #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:795 #, c-format msgid "%s: Failed to read reprocessed list of output files" msgstr "%s: Misslyckades med att läsa omprocesserad lista med utdatafiler" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:799 #, c-format msgid "%s: Failed to read reprocessed list of input files" msgstr "%s: Misslyckades med att läsa omprocesserad lista med indatafiler" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:883 #, c-format msgid "%s: Reading status of new job failed" msgstr "%s: Läsandet av det nya jobbets status misslyckades" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:896 #, c-format msgid "%s: State: ACCEPTED: parsing job description" msgstr "%s: TillstÃ¥nd: ACCEPTED: tolkar jobbeskrivning" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:898 #, c-format msgid "%s: Processing job description failed" msgstr "%s: Processering av jobbeskrivning misslyckades" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:936 #, c-format msgid "%s: new job is accepted" msgstr "%s: nytt jobb har accepterats" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:948 #, c-format msgid "%s: %s: New job belongs to %i/%i" msgstr "%s: %s: Nytt jobb tillhör %i/%i" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:953 #, c-format msgid "%s: old job is accepted" msgstr "%s: gammalt jobb har accepterats<" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:964 #, c-format msgid "%s: State: ACCEPTED" msgstr "%s: TillstÃ¥nd: ACCEPTED" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:970 #, c-format msgid "%s: State: ACCEPTED: dryrun" msgstr "%s: TillstÃ¥nd: ACCEPTED: dryrun" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:993 #, c-format msgid "%s: State: ACCEPTED: has process time %s" msgstr "%s: TillstÃ¥nd: ACCEPTED: har process-tid %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:999 #, c-format msgid "%s: State: ACCEPTED: moving to PREPARING" msgstr "%s: TillstÃ¥nd: ACCEPTED: flyttar till PREPARING" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1015 #, c-format msgid "%s: State: PREPARING" msgstr "%s: TillstÃ¥nd: PREPARING" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1022 #, c-format msgid "%s: Failed obtaining local job information." msgstr "%s: Misslyckades med att erhÃ¥lla lokal information." #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1075 #, c-format msgid "%s: State: SUBMIT" msgstr "%s: TillstÃ¥nd: SUBMIT" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1095 #, c-format msgid "%s: State: CANCELING" msgstr "%s: TillstÃ¥nd: CANCELING" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1115 #, c-format msgid "%s: State: INLRMS" msgstr "%s: TillstÃ¥nd: INLRMS" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1120 #, c-format msgid "%s: State: INLRMS - checking for pending(%u) and mark" msgstr "%s: TillstÃ¥nd: INLRMS - letar efter pending(%u) och markerade" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1122 #, c-format msgid "%s: State: INLRMS - checking for not pending" msgstr "%s: TillstÃ¥nd: INLRMS - letar efter inte pending" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1124 #, c-format msgid "%s: Job finished" msgstr "%s: Jobbet avslutat" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1128 #, c-format msgid "%s: State: INLRMS: exit message is %i %s" msgstr "%s: TillstÃ¥nd: INLRMS: avslutningsmeddelande är %i %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1141 #, c-format msgid "%s: State: INLRMS - no mark found" msgstr "%s: TillstÃ¥nd: INLRMS - hittade ingen markering" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1153 #, c-format msgid "%s: State: FINISHING" msgstr "%s: TillstÃ¥nd: FINISHING" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1174 #, c-format msgid "%s: Job is requested to clean - deleting" msgstr "%s: Jobbet har fÃ¥tt begäran om att tas bort - tar bort" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1191 #, c-format msgid "%s: restarted PREPARING job" msgstr "%s: startade om PREPARING jobb" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1207 #, c-format msgid "%s: restarted INLRMS job" msgstr "%s: startade om INLRMS jobb" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1216 #, c-format msgid "%s: restarted FINISHING job" msgstr "%s: startade om FINISHING jobb" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1221 #, c-format msgid "%s: Can't rerun on request" msgstr "%s: Kan inte starta om pÃ¥ begäran" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1223 #, c-format msgid "%s: Can't rerun on request - not a suitable state" msgstr "%s: Kan inte starta om pÃ¥ begäran - inte ett lämpligt tillstÃ¥nd" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1234 #, c-format msgid "%s: Job is too old - deleting" msgstr "%s: Jobbet är för gammalt - raderar" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1279 #, c-format msgid "%s: Job is ancient - delete rest of information" msgstr "%s: Jobbet är antikt - raderar resterande information" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1297 #, c-format msgid "%s: Canceling job because of user request" msgstr "%s: Avbryter jobb pÃ¥ grund av användarbegäran" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1311 #, c-format msgid "%s: Failed to turn job into failed during cancel processing." msgstr "" "%s: Misslyckades med sätta jobbet som misslyckat under " "avbrytningsprocessering" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1343 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1351 #, c-format msgid "%s: Plugin at state %s : %s" msgstr "%s: Plugin vid tillstÃ¥nd %s : %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1357 #, c-format msgid "%s: Plugin execution failed" msgstr "%s: Pluginexekvering misslyckades" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1464 #, c-format msgid "%s: State: %s from %s" msgstr "%s: TillstÃ¥nd: %s frÃ¥n %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1515 #, c-format msgid "Failed to get DN information from .local file for job %s" msgstr "Misslyckades med att fÃ¥ DN-information frÃ¥n .local-fil för jobb %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1542 #, c-format msgid "%s: Delete request due to internal problems" msgstr "%s: Radera begäran pÃ¥ grund av interna problem" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1577 #, c-format msgid "%s: Job failure detected" msgstr "%s: Jobbfel detekterat" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1630 #, c-format msgid "Failed to move file %s to %s" msgstr "Misslyckades med att flytta fil %s till %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1638 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1748 #, c-format msgid "Failed reading control directory: %s" msgstr "Misslyckades med att läsa kontrollkatalog: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1708 #, c-format msgid "Failed reading control directory: %s: %s" msgstr "Misslyckades med att läsa kontrollkatalog: %s: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:2022 #, c-format msgid "Helper process start failed: %s" msgstr "Hjälpprocesstart misslyckades: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:2029 #, c-format msgid "Stopping helper process %s" msgstr "Stoppar hjälpprocess: %s" #: src/services/a-rex/grid-manager/log/HeartBeatMetrics.cpp:61 #, c-format msgid "Error with hearbeatfile: %s" msgstr "Fel med hjärtslagsfil: %s" #: src/services/a-rex/grid-manager/log/HeartBeatMetrics.cpp:73 #: src/services/a-rex/grid-manager/log/JobsMetrics.cpp:139 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:136 #, c-format msgid ": Metrics tool returned error code %i: %s" msgstr ": Metrikverktyg returnerade felkod %i: %s" #: src/services/a-rex/grid-manager/log/HeartBeatMetrics.cpp:107 #: src/services/a-rex/grid-manager/log/JobsMetrics.cpp:186 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:178 msgid "" "gmetric_bin_path empty in arc.conf (should never happen the default value " "should be used)" msgstr "" "gmetric_bin_path tom i arc.conf (ska aldrig hända, det förvalda värdet ska " "användas)" #: src/services/a-rex/grid-manager/log/JobLog.cpp:114 msgid ": Accounting records reporter tool is not specified" msgstr ": Bokföringspostrapporteringsverktyg är inte angivet" #: src/services/a-rex/grid-manager/log/JobLog.cpp:130 msgid ": Failure creating slot for accounting reporter child process" msgstr ": Misslyckades med skapa slot för bokföringsrapporterings-barnprocess" #: src/services/a-rex/grid-manager/log/JobLog.cpp:143 msgid ": Failure starting accounting reporter child process" msgstr ": Misslyckande med att starta bokföringsrapporterings-barnprocess" #: src/services/a-rex/grid-manager/log/JobLog.cpp:176 msgid ": Failure creating accounting database connection" msgstr ": Misslyckades med att skapa bokföringsdatabasförbindelse" #: src/services/a-rex/grid-manager/log/JobLog.cpp:202 #, c-format msgid ": writing accounting record took %llu ms" msgstr ": skrivning av bokföringspost tog %llu ms" #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:74 #, c-format msgid "Session dir '%s' contains user specific substitutions - skipping it" msgstr "" "Sessionskatalog '%s' innehÃ¥ller användarspecifika substitutioner - hoppar " "över" #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:86 #, c-format msgid "Sessiondir %s: Free space %f GB" msgstr "Sessionskatalog %s: Fritt utrymme %f GB" #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:94 msgid "No session directories found in configuration." msgstr "Hittade ingen sessionkatalog i inställningar." #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:125 msgid "No cachedirs found/configured for calculation of free space." msgstr "" "Inga cachekataloger hittade/konfigurerade för beräkning av fritt utrymme." #: src/services/a-rex/grid-manager/mail/send_mail.cpp:29 msgid "Failed reading local information" msgstr "Misslyckades med att läsa lokal information" #: src/services/a-rex/grid-manager/mail/send_mail.cpp:79 #, c-format msgid "Running mailer command (%s)" msgstr "Kör e-postsändar-kommando (%s)" #: src/services/a-rex/grid-manager/mail/send_mail.cpp:81 msgid "Failed running mailer" msgstr "Misslyckades med att köra e-postsändare" #: src/services/a-rex/grid-manager/run/RunParallel.cpp:33 #, c-format msgid "%s: Job's helper exited" msgstr "%s: Jobbets hjälpprogram avslutades" #: src/services/a-rex/grid-manager/run/RunParallel.cpp:70 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:24 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:56 #, c-format msgid "%s: Failure creating slot for child process" msgstr "%s: Misslyckades med skapa slot för barnprocess" #: src/services/a-rex/grid-manager/run/RunParallel.cpp:119 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:41 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:73 #, c-format msgid "%s: Failure starting child process" msgstr "%s: Misslyckande med att starta av barnprocess" #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:30 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:62 #, c-format msgid "%s: Failure creating data storage for child process" msgstr "%s: Misslyckades med att skapa datalagring för barnprocess" #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:46 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:78 #, c-format msgid "%s: Failure waiting for child process to finish" msgstr "%s: Misslyckande med att vänta pÃ¥ att barnprocess skall avslutas" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:46 msgid "[job description input]" msgstr "[jobbeskrivningsinput]" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:47 msgid "" "Tool for writing the grami file representation of a job description file." msgstr "" "Verktyg för att skriva grami-filrepresentationen av en jobbeskrivningsfil." #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:51 msgid "Name of grami file" msgstr "Namn pÃ¥ grami-fil" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:56 msgid "Configuration file to load" msgstr "Inställningsfil att ladda in" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:57 msgid "arc.conf" msgstr "arc.conf" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:61 msgid "Session directory to use" msgstr "Sessionskatalog att använda" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:62 msgid "directory" msgstr "katalog" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:78 msgid "No job description file name provided." msgstr "Ingen jobbeskrivningsfil tillhandahölls." #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:84 #, c-format msgid "Unable to parse job description input: %s" msgstr "Kunde inte tolka jobbeskrivningsinput: %s" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:90 msgid "Unable to load ARC configuration file." msgstr "Kunde inte ladda in ARC-inställningsfil." #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:107 #, c-format msgid "Unable to write grami file: %s" msgstr "Kunde inte skriva grami-fil: %s" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:112 #, c-format msgid "Unable to write 'output' file: %s" msgstr "Kunde inte skriva 'output'-fil: %s" #: src/services/a-rex/information_collector.cpp:53 #, c-format msgid "Resource information provider: %s" msgstr "ResursinformationstillhandahÃ¥llare: %s" #: src/services/a-rex/information_collector.cpp:56 msgid "Resource information provider failed to start" msgstr "ResursinformationstillhandahÃ¥llare misslyckades med att starta" #: src/services/a-rex/information_collector.cpp:59 msgid "Resource information provider failed to run" msgstr "ResursinformationstillhandahÃ¥llare misslyckades med att köra" #: src/services/a-rex/information_collector.cpp:63 #, c-format msgid "" "Resource information provider failed with exit status: %i\n" "%s" msgstr "" "ResursinformationstillhandahÃ¥llare misslyckades med avslutningsstatus: %i\n" "%s" #: src/services/a-rex/information_collector.cpp:65 #, c-format msgid "" "Resource information provider log:\n" "%s" msgstr "" "ResursinformationstillhandahÃ¥llarlogg:\n" "%s" #: src/services/a-rex/information_collector.cpp:71 msgid "No new informational document assigned" msgstr "Inget nytt informationsdokument tilldelat" #: src/services/a-rex/information_collector.cpp:73 #, c-format msgid "Obtained XML: %s" msgstr "ErhÃ¥llen XML: %s" #: src/services/a-rex/information_collector.cpp:87 msgid "Informational document is empty" msgstr "Informationsdokument är tomt" #: src/services/a-rex/information_collector.cpp:212 msgid "OptimizedInformationContainer failed to create temporary file" msgstr "OptimizedInformationContainer misslyckades med att skapa temporär fil" #: src/services/a-rex/information_collector.cpp:215 #, c-format msgid "OptimizedInformationContainer created temporary file: %s" msgstr "OptimizedInformationContainer skapade temporär fil: %s" #: src/services/a-rex/information_collector.cpp:221 msgid "" "OptimizedInformationContainer failed to store XML document to temporary file" msgstr "" "OptimizedInformationContainer misslyckades med att lagra XML-dokument till " "temporär fil" #: src/services/a-rex/information_collector.cpp:230 msgid "OptimizedInformationContainer failed to parse XML" msgstr "OptimizedInformationContainer misslyckades med att tolka XML" #: src/services/a-rex/information_collector.cpp:242 msgid "OptimizedInformationContainer failed to rename temprary file" msgstr "" "OptimizedInformationContainer misslyckades med att byta namn pÃ¥ temporär fil" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:36 msgid "Default INTERNAL client contructor" msgstr "Förvald INTERNAL klient-konstruktor" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:39 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:59 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:81 msgid "Failed to load grid-manager configfile" msgstr "Misslyckades med att ladda in grid-managerns inställningsfil" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:44 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:64 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:86 msgid "Failed to set INTERNAL endpoint" msgstr "Misslyckades med att sätta INTERNAL ändpunkt" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:129 msgid "Failed to identify grid-manager config file" msgstr "Misslyckades med att identifiera grid-managerns inställningsfil" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:148 #, c-format msgid "Failed to run configuration parser at %s." msgstr "Misslyckades med att köra inställningstolk pÃ¥ %s." #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:152 #, c-format msgid "Parser failed with error code %i." msgstr "Tolk misslyckades med felkod %i." #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:158 #, c-format msgid "No pid file is found at '%s'. Probably A-REX is not running." msgstr "Hittade ingen pid-fil '%s'. Troligen kör inte A-REX." #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:173 #, c-format msgid "Failed to load grid-manager config file from %s" msgstr "Misslyckades med att ladda in grid-managerns inställningsfil frÃ¥n %s" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:257 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:363 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:396 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:442 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:496 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:548 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:566 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:616 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:646 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:664 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:682 msgid "INTERNALClient is not initialized" msgstr "INTERNALClient är inte initierad" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:447 msgid "Submitting job " msgstr "Sänder in jobb " #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:508 #, c-format msgid "Failed to copy input file: %s to path: %s" msgstr "Misslyckades med att kopiera indatafil: %s till sökväg: %s" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:514 #, c-format msgid "Failed to set permissions on: %s" msgstr "Misslyckades med att sätta Ã¥tkomsträttigheter pÃ¥: %s" #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:51 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:92 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:119 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:145 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:184 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:246 msgid "Failed to load grid-manager config file" msgstr "Misslyckades med att ladda in grid-managerns inställningsfil" #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:324 msgid "Retrieving job description of INTERNAL jobs is not supported" msgstr "Att hämta jobbeskrivning för INTERNAL jobb stöds inte" #: src/services/a-rex/internaljobplugin/JobListRetrieverPluginINTERNAL.cpp:67 #, c-format msgid "Listing localjobs succeeded, %d localjobs found" msgstr "Listning av lokala jobb lyckades, hittade %d lokala jobb" #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:130 msgid "Failed submitting job description" msgstr "Misslyckades med att skicka in jobbeskrivning" #: src/services/a-rex/job.cpp:66 #, c-format msgid "Using cached local account '%s'" msgstr "Använder cachat lokalt konto '%s'" #: src/services/a-rex/job.cpp:77 msgid "Will not map to 'root' account by default" msgstr "Kommer ej att mappa 'root'-konto som förval" #: src/services/a-rex/job.cpp:90 msgid "No local account name specified" msgstr "Inget lokalt kontonamn angivet" #: src/services/a-rex/job.cpp:93 #, c-format msgid "Using local account '%s'" msgstr "Använder lokalt konto '%s'" #: src/services/a-rex/job.cpp:97 msgid "TLS provides no identity, going for OTokens" msgstr "TLS tillhandahÃ¥ller ingen identitet, försöker med OTokens." #: src/services/a-rex/job.cpp:155 msgid "Failed to acquire A-REX's configuration" msgstr "Misslyckades med att förvärva A-REX inställningar" #: src/services/a-rex/job.cpp:227 #, c-format msgid "Cannot handle local user %s" msgstr "Kan inte hantera lokal ägare %s" #: src/services/a-rex/job.cpp:275 #, c-format msgid "%s: Failed to parse user policy" msgstr "%s: Misslyckades med att tolka användarpolicy" #: src/services/a-rex/job.cpp:280 #, c-format msgid "%s: Failed to load evaluator for user policy " msgstr "%s: Misslyckades med ladda in utvärderare för användarpolicy " #: src/services/a-rex/job.cpp:385 #, c-format msgid "%s: Unknown user policy '%s'" msgstr "%s: Okänd användarpolicy '%s'" #: src/services/a-rex/job.cpp:707 src/services/a-rex/job.cpp:731 #, c-format msgid "Credential expires at %s" msgstr "Referensens livslängd gÃ¥r ut %s" #: src/services/a-rex/job.cpp:709 src/services/a-rex/job.cpp:733 #, c-format msgid "Credential handling exception: %s" msgstr "Referenshanteringsundantag: %s" #: src/services/a-rex/job.cpp:1031 #, c-format msgid "Out of tries while allocating new job ID in %s" msgstr "Slut pÃ¥ försök vid allokering av nytt jobb-id i %s" #: src/services/a-rex/job.cpp:1270 msgid "No non-draining session dirs available" msgstr "Inga non-draining sessionskataloger tillgänglig" #: src/services/a-rex/put.cpp:150 #, c-format msgid "%s: put file %s: there is no payload" msgstr "%s: put fil %s: det finns ingen nyttolast" #: src/services/a-rex/put.cpp:156 #, c-format msgid "%s: put file %s: unrecognized payload" msgstr "%s: put fil %s: okänd nyttolast" #: src/services/a-rex/put.cpp:172 src/services/a-rex/rest/rest.cpp:1595 #, c-format msgid "%s: put file %s: failed to create file: %s" msgstr "%s: put fil %s: misslyckades med att skapa fil: %s" #: src/services/a-rex/put.cpp:188 #, c-format msgid "%s: put file %s: %s" msgstr "%s: put fil %s: %s" #: src/services/a-rex/put.cpp:210 #, c-format msgid "%s: delete file %s: failed to obtain file path: %s" msgstr "%s: ta bort fil %s: misslyckades med att erhÃ¥lla filsökväg: %s" #: src/services/a-rex/put.cpp:221 #, c-format msgid "%s: delete file %s: failed to open file/dir: %s" msgstr "%s: ta bort fil %s: misslyckades med att öppna fil/katalog: %s" #: src/services/a-rex/rest/rest.cpp:683 #, c-format msgid "REST: process %s at %s" msgstr "REST: process %s pÃ¥ %s" #: src/services/a-rex/rest/rest.cpp:726 src/services/a-rex/rest/rest.cpp:742 #: src/services/a-rex/rest/rest.cpp:797 src/services/a-rex/rest/rest.cpp:876 #: src/services/a-rex/rest/rest.cpp:1104 src/services/a-rex/rest/rest.cpp:1696 #, c-format msgid "process: method %s is not supported for subpath %s" msgstr "process: metod %s stöds inte för subsökväg %s" #: src/services/a-rex/rest/rest.cpp:748 #, c-format msgid "process: schema %s is not supported for subpath %s" msgstr "process: schema %s stöds inte för subsökväg %s" #: src/services/a-rex/rest/rest.cpp:873 src/services/a-rex/rest/rest.cpp:1101 #, c-format msgid "process: action %s is not supported for subpath %s" msgstr "process: handling %s stöds inte för subsökväg %s" #: src/services/a-rex/rest/rest.cpp:1113 src/services/a-rex/rest/rest.cpp:1182 #: src/services/a-rex/rest/rest.cpp:1542 src/services/a-rex/rest/rest.cpp:1685 #, c-format msgid "REST:GET job %s - %s" msgstr "REST:GET jobb %s - %s" #: src/services/a-rex/rest/rest.cpp:1229 src/services/a-rex/rest/rest.cpp:1237 #, c-format msgid "REST:KILL job %s - %s" msgstr "REST:KILL jobb %s - %s" #: src/services/a-rex/rest/rest.cpp:1254 src/services/a-rex/rest/rest.cpp:1262 #, c-format msgid "REST:CLEAN job %s - %s" msgstr "REST:CLEAN jobb %s - %s" #: src/services/a-rex/rest/rest.cpp:1279 src/services/a-rex/rest/rest.cpp:1287 #: src/services/a-rex/rest/rest.cpp:1304 #, c-format msgid "REST:RESTART job %s - %s" msgstr "REST:RESTART jobb %s - %s" #: src/services/a-rex/rest/rest.cpp:1588 #, c-format msgid "REST:PUT job %s: file %s: there is no payload" msgstr "REST:PUT jobb %s: fil %s: det finns ingen nyttolast" #: src/services/a-rex/rest/rest.cpp:1608 #, c-format msgid "HTTP:PUT %s: put file %s: %s" msgstr "HTTP:PUT %s: put fil %s: %s" #: src/services/a-rex/test_cache_check.cpp:24 #: src/tests/count/test_client.cpp:20 #: src/tests/echo/echo_test4axis2c/test_client.cpp:20 #: src/tests/echo/test_client.cpp:21 msgid "Creating client side chain" msgstr "Skapar klientsidokedjan" #: src/services/a-rex/update_credentials.cpp:29 #, c-format msgid "" "UpdateCredentials: request = \n" "%s" msgstr "" "UpdateCredentials: begäran = \n" "%s" #: src/services/a-rex/update_credentials.cpp:35 msgid "UpdateCredentials: missing Reference" msgstr "UpdateCredentials: saknar Reference" #: src/services/a-rex/update_credentials.cpp:43 msgid "UpdateCredentials: wrong number of Reference" msgstr "UpdateCredentials: fel antal Reference" #: src/services/a-rex/update_credentials.cpp:51 msgid "UpdateCredentials: wrong number of elements inside Reference" msgstr "UpdateCredentials: fel antal element inuti Reference" #: src/services/a-rex/update_credentials.cpp:60 msgid "UpdateCredentials: EPR contains no JobID" msgstr "UpdateCredentials: EPR innehÃ¥ller inget jobb-id" #: src/services/a-rex/update_credentials.cpp:70 #, c-format msgid "UpdateCredentials: no job found: %s" msgstr "UpdateCredentials: hittade inga jobb: %s" #: src/services/a-rex/update_credentials.cpp:77 msgid "UpdateCredentials: failed to update credentials" msgstr "UpdateCredentials: misslyckades att uppdatera referenser" #: src/services/a-rex/update_credentials.cpp:85 #, c-format msgid "" "UpdateCredentials: response = \n" "%s" msgstr "" "UpdateCredentials: svar = \n" "%s" #: src/services/candypond/CandyPond.cpp:52 msgid "No A-REX config file found in candypond configuration" msgstr "Hittade ingen A-REX-inställningsfil i candypond-inställningarna" #: src/services/candypond/CandyPond.cpp:56 #, c-format msgid "Using A-REX config file %s" msgstr "Använder A-REX-inställningsfil: %s" #: src/services/candypond/CandyPond.cpp:60 #, c-format msgid "Failed to process A-REX configuration in %s" msgstr "Misslyckades med att processera AREX-inställningsfil %s" #: src/services/candypond/CandyPond.cpp:65 msgid "No caches defined in configuration" msgstr "Inga cacher definierade i inställningar" #: src/services/candypond/CandyPond.cpp:150 msgid "Empty filename returned from FileCache" msgstr "Tomt filnamn returnerat frÃ¥n FileCache" #: src/services/candypond/CandyPond.cpp:162 #, c-format msgid "Problem accessing cache file %s: %s" msgstr "Problem att komma Ã¥t cachefil %s: %s" #: src/services/candypond/CandyPond.cpp:210 #: src/services/candypond/CandyPond.cpp:474 msgid "No job ID supplied" msgstr "Inget jobb-id tillhandahÃ¥llet" #: src/services/candypond/CandyPond.cpp:219 #, c-format msgid "Bad number in priority element: %s" msgstr "Felaktigt nummer i priority-element: %s" #: src/services/candypond/CandyPond.cpp:228 msgid "No username supplied" msgstr "Inget användarnamn tillhandahÃ¥llet" #: src/services/candypond/CandyPond.cpp:235 #, c-format msgid "Supplied username %s does not match mapped username %s" msgstr "" "Det tillhandahÃ¥llna användarnamnet %s matchar inte det mappade " "användarnamnet %s" #: src/services/candypond/CandyPond.cpp:249 msgid "No session directory found" msgstr "Hittade ingen sessionskatalog" #: src/services/candypond/CandyPond.cpp:253 #, c-format msgid "Using session dir %s" msgstr "Använder sessionskatalog %s" #: src/services/candypond/CandyPond.cpp:257 #, c-format msgid "Failed to stat session dir %s" msgstr "Misslyckades med att göra stat pÃ¥ sessionskatalog %s" #: src/services/candypond/CandyPond.cpp:262 #, c-format msgid "Session dir %s is owned by %i, but current mapped user is %i" msgstr "Sessionkatalog %s ägs av %i, men nuvarande mappade användare är %i" #: src/services/candypond/CandyPond.cpp:289 #, c-format msgid "Failed to access proxy of given job id %s at %s" msgstr "Misslyckades med att komma Ã¥t proxy för givet jobb-id %s pÃ¥ %s" #: src/services/candypond/CandyPond.cpp:307 #, c-format msgid "DN is %s" msgstr "DN är %s" #: src/services/candypond/CandyPond.cpp:385 #, c-format msgid "Permission checking passed for url %s" msgstr "Ã…tkomsträttighetskontroll godkänd för URL %s" #: src/services/candypond/CandyPond.cpp:410 #: src/services/candypond/CandyPondGenerator.cpp:135 #, c-format msgid "Failed to move %s to %s: %s" msgstr "Misslyckades med att flytta %s till %s: %s" #: src/services/candypond/CandyPond.cpp:441 #, c-format msgid "Starting new DTR for %s" msgstr "Startar ny DTR för %s" #: src/services/candypond/CandyPond.cpp:443 #, c-format msgid "Failed to start new DTR for %s" msgstr "Misslyckades med att starta ny DTR för %s" #: src/services/candypond/CandyPond.cpp:487 #, c-format msgid "Job %s: all files downloaded successfully" msgstr "Jobb %s: alla filer nerladdade framgÃ¥ngsrikt" #: src/services/candypond/CandyPond.cpp:494 #, c-format msgid "Job %s: Some downloads failed" msgstr "Jobb %s: nÃ¥gra nerladdningar misslyckades" #: src/services/candypond/CandyPond.cpp:499 #, c-format msgid "Job %s: files still downloading" msgstr "Jobb %s: filer laddas fortfarande ner" #: src/services/candypond/CandyPond.cpp:511 msgid "CandyPond: Unauthorized" msgstr "CandyPond: Oauktoriserad" #: src/services/candypond/CandyPond.cpp:520 msgid "No local user mapping found" msgstr "Hittade ingen lokal användarmappning" #: src/services/candypond/CandyPond.cpp:527 #: src/services/data-staging/DataDeliveryService.cpp:625 #, c-format msgid "Identity is %s" msgstr "Identitet är: %s" #: src/services/candypond/CandyPond.cpp:585 #: src/services/data-staging/DataDeliveryService.cpp:697 msgid "Security Handlers processing failed" msgstr "Säkerhetshanterarprocessering misslyckades" #: src/services/candypond/CandyPond.cpp:592 msgid "Only POST is supported in CandyPond" msgstr "Endast POST stöds i CandyPond" #: src/services/candypond/CandyPondGenerator.cpp:88 #, c-format msgid "DTR %s finished with state %s" msgstr "DTR %s avslutades med tillstÃ¥nd %s" #: src/services/candypond/CandyPondGenerator.cpp:124 #, c-format msgid "Could not determine session directory from filename %s" msgstr "Kunde inte bestämma sessionskatalog frÃ¥n filnamn %s" #: src/services/candypond/CandyPondGenerator.cpp:164 #, c-format msgid "Invalid DTR for source %s, destination %s" msgstr "Ogiltig DTR för källa %s, destination %s" #: src/services/candypond/CandyPondGenerator.cpp:206 #, c-format msgid "DTRs still running for job %s" msgstr "DTRer kör fortfarande för jobb %s" #: src/services/candypond/CandyPondGenerator.cpp:215 #, c-format msgid "All DTRs finished for job %s" msgstr "Alla DTRer avslutade för jobb %s" #: src/services/candypond/CandyPondGenerator.cpp:222 #, c-format msgid "Job %s not found" msgstr "Hittade inte jobb %s" #: src/services/data-staging/DataDeliveryService.cpp:58 #, c-format msgid "Archiving DTR %s, state ERROR" msgstr "Arkiverar DTR %s, tillstÃ¥nd ERROR" #: src/services/data-staging/DataDeliveryService.cpp:62 #, c-format msgid "Archiving DTR %s, state %s" msgstr "Arkiverar DTR %s, tillstÃ¥nd %s" #: src/services/data-staging/DataDeliveryService.cpp:164 msgid "No delegation token in request" msgstr "Inget delegeringstoken i begäran" #: src/services/data-staging/DataDeliveryService.cpp:172 msgid "Failed to accept delegation" msgstr "Misslyckades med att acceptera delegering" #: src/services/data-staging/DataDeliveryService.cpp:201 #: src/services/data-staging/DataDeliveryService.cpp:208 msgid "ErrorDescription" msgstr "ErrorDescription" #: src/services/data-staging/DataDeliveryService.cpp:213 #, c-format msgid "All %u process slots used" msgstr "Alla %u processeringsslottar används" #: src/services/data-staging/DataDeliveryService.cpp:228 #, c-format msgid "Received retry for DTR %s still in transfer" msgstr "Mottog försök igen för DTR %s som fortfarande överför" #: src/services/data-staging/DataDeliveryService.cpp:235 #, c-format msgid "Replacing DTR %s in state %s with new request" msgstr "Byter ut DTR %s i tillstÃ¥nd %s med ny begäran" #: src/services/data-staging/DataDeliveryService.cpp:245 #, c-format msgid "Storing temp proxy at %s" msgstr "Lagrar temporär proxy pÃ¥ %s" #: src/services/data-staging/DataDeliveryService.cpp:253 #, c-format msgid "Failed to create temp proxy at %s: %s" msgstr "Misslyckades med att skapa temporär proxy pÃ¥ %s: %s" #: src/services/data-staging/DataDeliveryService.cpp:260 #, c-format msgid "Failed to change owner of temp proxy at %s to %i:%i: %s" msgstr "" "Misslyckades med att ändra ägare pÃ¥ temporär proxy pÃ¥ %s till %i:%i: %s" #: src/services/data-staging/DataDeliveryService.cpp:285 msgid "Invalid DTR" msgstr "Ogiltig DTR" #: src/services/data-staging/DataDeliveryService.cpp:289 #, c-format msgid "Failed to remove temporary proxy %s: %s" msgstr "Misslyckades med att ta bort temporär proxy %s: %s" #: src/services/data-staging/DataDeliveryService.cpp:390 #, c-format msgid "No such DTR %s" msgstr "Ingen sÃ¥dan DTR %s" #: src/services/data-staging/DataDeliveryService.cpp:401 #, c-format msgid "DTR %s failed: %s" msgstr "DTR %s misslyckades: %s" #: src/services/data-staging/DataDeliveryService.cpp:412 #, c-format msgid "DTR %s finished successfully" msgstr "DTR %s avslutades framgÃ¥ngsrikt" #: src/services/data-staging/DataDeliveryService.cpp:422 #, c-format msgid "DTR %s still in progress (%lluB transferred)" msgstr "DTR %s pÃ¥gÃ¥r fortfarande (%lluB överförda)" #: src/services/data-staging/DataDeliveryService.cpp:482 #, c-format msgid "No active DTR %s" msgstr "Ingen aktiv DTR %s" #: src/services/data-staging/DataDeliveryService.cpp:492 #, c-format msgid "DTR %s was already cancelled" msgstr "DTR %s har redan avbrutits" #: src/services/data-staging/DataDeliveryService.cpp:501 #, c-format msgid "DTR %s could not be cancelled" msgstr "DTR %s kunde inte avbrytas" #: src/services/data-staging/DataDeliveryService.cpp:545 #, c-format msgid "Failed to get load average: %s" msgstr "Misslyckades med att fÃ¥ medellast: %s" #: src/services/data-staging/DataDeliveryService.cpp:569 msgid "Invalid configuration - no allowed IP address specified" msgstr "Ogiltiga inställningar - ingen tillÃ¥ten IP-adress angiven" #: src/services/data-staging/DataDeliveryService.cpp:573 msgid "Invalid configuration - no transfer dirs specified" msgstr "Ogiltiga inställningar - inga överföringskataloger angivna" #: src/services/data-staging/DataDeliveryService.cpp:584 msgid "Failed to start archival thread" msgstr "Misslyckades med att starta arkiveringtrÃ¥d" #: src/services/data-staging/DataDeliveryService.cpp:609 msgid "Shutting down data delivery service" msgstr "Stänger ner dataleveranstjänst" #: src/services/data-staging/DataDeliveryService.cpp:618 msgid "Unauthorized" msgstr "Oauktoriserad" #: src/services/data-staging/DataDeliveryService.cpp:704 msgid "Only POST is supported in DataDeliveryService" msgstr "Endast POST stöds i dataleveranstjänst" #: src/services/examples/echo_python/EchoService.py:12 msgid "EchoService (python) constructor called" msgstr "EchoService (python) konstruktor anropad" #: src/services/examples/echo_python/EchoService.py:17 #, python-format msgid "EchoService (python) has prefix %(prefix)s and suffix %(suffix)s" msgstr "EchoService (python) har prefix %(prefix)s och suffix %(suffix)s" #: src/services/examples/echo_python/EchoService.py:24 msgid "EchoService (python) destructor called" msgstr "EchoService (python) destruktor anropad" #: src/services/examples/echo_python/EchoService.py:54 msgid "EchoService (python) thread test starting" msgstr "EchoService (python) trÃ¥dtest startar" #: src/services/examples/echo_python/EchoService.py:65 #, python-format msgid "EchoService (python) thread test, iteration %(iteration)s %(status)s" msgstr "EchoService (python) trÃ¥dtest, iteration %(iteration)s %(status)s" #: src/services/examples/echo_python/EchoService.py:82 msgid "EchoService (python) 'Process' called" msgstr "EchoService (python) 'Process' anropad" #: src/services/examples/echo_python/EchoService.py:86 #, python-format msgid "inmsg.Auth().Export(arc.SecAttr.ARCAuth) = %s" msgstr "inmsg.Auth().Export(arc.SecAttr.ARCAuth) = %s" #: src/services/examples/echo_python/EchoService.py:87 #, python-format msgid "inmsg.Attributes().getAll() = %s " msgstr "inmsg.Attributes().getAll() = %s " #: src/services/examples/echo_python/EchoService.py:88 #, python-format msgid "EchoService (python) got: %s " msgstr "EchoService (python) fick: %s " #: src/services/examples/echo_python/EchoService.py:93 #, python-format msgid "EchoService (python) request_namespace: %s" msgstr "EchoService (python) request_namespace: %s" #: src/services/examples/echo_python/EchoService.py:99 #: src/services/examples/echo_python/EchoService.py:171 #, python-format msgid "outpayload %s" msgstr "utnyttolast %s" #: src/services/examples/echo_python/EchoService.py:128 msgid "Calling https://localhost:60000/Echo using ClientSOAP" msgstr "Anropar https://localhost:60000/Echo med ClientSOAP" #: src/services/examples/echo_python/EchoService.py:131 msgid "Calling http://localhost:60000/Echo using ClientSOAP" msgstr "Anropar http://localhost:60000/Echo med ClientSOAP" #: src/services/examples/echo_python/EchoService.py:137 #: src/services/examples/echo_python/EchoService.py:155 #, python-format msgid "new_payload %s" msgstr "ny nyttplast %s" #: src/services/examples/echo_python/EchoService.py:149 msgid "Calling http://localhost:60000/Echo using httplib" msgstr "Anropar http://localhost:60000/Echo med httplib" #: src/services/examples/echo_python/EchoService.py:165 msgid "Start waiting 10 sec..." msgstr "Börja vänta 10 sek..." #: src/services/examples/echo_python/EchoService.py:167 msgid "Waiting ends." msgstr "Väntan slutar" #: src/services/gridftpd/auth/auth.cpp:328 #, c-format msgid "Unknown authorization command %s" msgstr "Okänt authorization-kommando %s" #: src/services/gridftpd/auth/auth.cpp:347 #, c-format msgid "" "The [vo] section labeled '%s' has no file associated and can't be used for " "matching" msgstr "" "[vo]-sektionen märkt '%s' har inga associerade filer och kan inte användas " "för matchning" #: src/services/gridftpd/auth/auth_plugin.cpp:73 #: src/services/gridftpd/auth/unixmap.cpp:217 #, c-format msgid "Plugin %s failed to run" msgstr "Plugin %s misslyckades med att köra" #: src/services/gridftpd/auth/auth_plugin.cpp:75 #: src/services/gridftpd/auth/unixmap.cpp:219 #, c-format msgid "Plugin %s printed: %u" msgstr "Plugin %s skrev ut: %u" #: src/services/gridftpd/auth/auth_plugin.cpp:76 #: src/services/gridftpd/auth/unixmap.cpp:220 #, c-format msgid "Plugin %s error: %u" msgstr "Plugin %s fel: %u" #: src/services/gridftpd/auth/auth_voms.cpp:28 #, c-format msgid "VOMS proxy processing returns: %i - %s" msgstr "VOMS-proxyprocessering returner: %i - %s" #: src/services/gridftpd/auth/auth_voms.cpp:120 #, c-format msgid "VOMS trust chains: %s" msgstr "VOMS-förtroendekedjor: %s" #: src/services/gridftpd/auth/unixmap.cpp:126 msgid "User name mapping has empty command" msgstr "Användarnamnsmappning har tomt kommando" #: src/services/gridftpd/auth/unixmap.cpp:154 #, c-format msgid "User name mapping has empty name: %s" msgstr "Användarnamnsmappning har tomt namn: %s" #: src/services/gridftpd/commands.cpp:46 #, c-format msgid "response: %s" msgstr "svar: %s" #: src/services/gridftpd/commands.cpp:50 #, c-format msgid "Send response failed: %s" msgstr "Sändning av svar misslyckades: %s" #: src/services/gridftpd/commands.cpp:80 msgid "Response sending error" msgstr "Svar sände fel" #: src/services/gridftpd/commands.cpp:93 msgid "Closed connection" msgstr "Stängd förbindelse" #: src/services/gridftpd/commands.cpp:131 #, c-format msgid "Socket conversion failed: %s" msgstr "Socketkonvertering misslyckas: %s" #: src/services/gridftpd/commands.cpp:141 #, c-format msgid "Failed to obtain own address: %s" msgstr "Misslyckades med att erhÃ¥lla egen adress: %s" #: src/services/gridftpd/commands.cpp:149 #, c-format msgid "Failed to recognize own address type (IPv4 or IPv6) - %u" msgstr "Misslyckades med att känna igen egen adresstyp (IPv4 eller IPv6) - %u" #: src/services/gridftpd/commands.cpp:159 #, c-format msgid "Accepted connection on [%s]:%u" msgstr "Accepterade förbindelse pÃ¥ [%s]:%u" #: src/services/gridftpd/commands.cpp:161 #, c-format msgid "Accepted connection on %u.%u.%u.%u:%u" msgstr "Accepterade förbindelse pÃ¥ %u.%u.%u.%u:%u" #: src/services/gridftpd/commands.cpp:196 msgid "Accept failed" msgstr "Accepterade inte" #: src/services/gridftpd/commands.cpp:204 #: src/services/gridftpd/listener.cpp:415 #, c-format msgid "Accept failed: %s" msgstr "Accepterade inte: %s" #: src/services/gridftpd/commands.cpp:219 #, c-format msgid "Accepted connection from [%s]:%u" msgstr "Accepterade förbindelse frÃ¥n [%s]:%u" #: src/services/gridftpd/commands.cpp:221 #, c-format msgid "Accepted connection from %u.%u.%u.%u:%u" msgstr "Accepterade förbindelse frÃ¥n %u.%u.%u.%u:%u" #: src/services/gridftpd/commands.cpp:230 msgid "Authenticate in commands failed" msgstr "Autentisering i kommandon misslyckas" #: src/services/gridftpd/commands.cpp:239 msgid "Authentication failure" msgstr "Misslyckades med autentisering" #: src/services/gridftpd/commands.cpp:247 #, c-format msgid "User subject: %s" msgstr "Användarsubjekt: %s" #: src/services/gridftpd/commands.cpp:248 #, c-format msgid "Encrypted: %s" msgstr "Krypterad: %s" #: src/services/gridftpd/commands.cpp:254 msgid "User has no proper configuration associated" msgstr "Användare har inga associerade lämpliga inställningar" #: src/services/gridftpd/commands.cpp:262 msgid "" "User has empty virtual directory tree.\n" "Either user has no authorised plugins or there are no plugins configured at " "all." msgstr "" "Användare har tomt virtuellt katalogträd.\n" "Endera har användaren inga auktoriserade pluginer eller det finns " "överhuvudtaget inga inställda pluginer." #: src/services/gridftpd/commands.cpp:279 msgid "Read commands in authenticate failed" msgstr "Läs-kommandon i authenticate misslyckades" #: src/services/gridftpd/commands.cpp:411 msgid "Control connection (probably) closed" msgstr "Kontrollförbindelse (troligen) stängd" #: src/services/gridftpd/commands.cpp:445 #: src/services/gridftpd/commands.cpp:724 msgid "Command EPRT" msgstr "Kommando EPRT" #: src/services/gridftpd/commands.cpp:446 #, c-format msgid "Failed to parse remote address %s" msgstr "Misslyckades med att tolka fjärradress %s" #: src/services/gridftpd/commands.cpp:468 #, c-format msgid "Command USER %s" msgstr "Kommando USER %s" #: src/services/gridftpd/commands.cpp:475 msgid "Command CDUP" msgstr "Kommando CDUP" #: src/services/gridftpd/commands.cpp:481 #, c-format msgid "Command CWD %s" msgstr "Kommando CWD %s" #: src/services/gridftpd/commands.cpp:497 #, c-format msgid "Command MKD %s" msgstr "Kommando MKD %s" #: src/services/gridftpd/commands.cpp:517 #, c-format msgid "Command SIZE %s" msgstr "Kommando SIZE %s" #: src/services/gridftpd/commands.cpp:532 #, c-format msgid "Command SBUF: %i" msgstr "Kommando SBUF: %i" #: src/services/gridftpd/commands.cpp:553 #, c-format msgid "Command MLST %s" msgstr "Kommando MLST %s" #: src/services/gridftpd/commands.cpp:576 #, c-format msgid "Command DELE %s" msgstr "Kommando DELE %s" #: src/services/gridftpd/commands.cpp:591 #, c-format msgid "Command RMD %s" msgstr "Kommando RMD %s" #: src/services/gridftpd/commands.cpp:605 #, c-format msgid "Command TYPE %c" msgstr "Kommando TYPE %c" #: src/services/gridftpd/commands.cpp:616 #, c-format msgid "Command MODE %c" msgstr "Kommando MODE %c" #: src/services/gridftpd/commands.cpp:628 msgid "Command ABOR" msgstr "Kommando ABOR" #: src/services/gridftpd/commands.cpp:641 #, c-format msgid "Command REST %s" msgstr "Kommando REST %s" #: src/services/gridftpd/commands.cpp:654 #, c-format msgid "Command EPSV %s" msgstr "Kommando EPSV %s" #: src/services/gridftpd/commands.cpp:656 msgid "Command SPAS" msgstr "Kommando SPAS" #: src/services/gridftpd/commands.cpp:658 msgid "Command PASV" msgstr "Kommando PASV" #: src/services/gridftpd/commands.cpp:679 msgid "local_pasv failed" msgstr "local_pasv misslyckades" #: src/services/gridftpd/commands.cpp:703 msgid "local_spas failed" msgstr "local_spas misslyckades" #: src/services/gridftpd/commands.cpp:726 msgid "Command PORT" msgstr "Kommando PORT" #: src/services/gridftpd/commands.cpp:729 msgid "active_data is disabled" msgstr "active_data är avstängt" #: src/services/gridftpd/commands.cpp:738 msgid "local_port failed" msgstr "local_port misslyckades" #: src/services/gridftpd/commands.cpp:751 #, c-format msgid "Command MLSD %s" msgstr "Kommando MLSD %s" #: src/services/gridftpd/commands.cpp:753 #, c-format msgid "Command NLST %s" msgstr "Kommando NLST %s" #: src/services/gridftpd/commands.cpp:755 #, c-format msgid "Command LIST %s" msgstr "Kommando LIST %s" #: src/services/gridftpd/commands.cpp:806 #, c-format msgid "Command ERET %s" msgstr "Kommando ERET %s" #: src/services/gridftpd/commands.cpp:836 #, c-format msgid "Command RETR %s" msgstr "Kommando RETR %s" #: src/services/gridftpd/commands.cpp:865 #, c-format msgid "Command STOR %s" msgstr "Kommando STOR %s" #: src/services/gridftpd/commands.cpp:893 #, c-format msgid "Command ALLO %i" msgstr "Kommando ALLO %i" #: src/services/gridftpd/commands.cpp:916 msgid "Command OPTS" msgstr "Kommando OPTS" #: src/services/gridftpd/commands.cpp:919 msgid "Command OPTS RETR" msgstr "Kommando OPTS RETR" #: src/services/gridftpd/commands.cpp:929 #, c-format msgid "Option: %s" msgstr "Alternativ: %s" #: src/services/gridftpd/commands.cpp:973 msgid "Command NOOP" msgstr "Kommando NOOP" #: src/services/gridftpd/commands.cpp:977 msgid "Command QUIT" msgstr "Kommando QUIT" #: src/services/gridftpd/commands.cpp:987 msgid "Failed to close, deleting client" msgstr "Misslyckades med att stänga, tar bort klient" #: src/services/gridftpd/commands.cpp:1001 #, c-format msgid "Command DCAU: %i '%s'" msgstr "Kommando DCAU: %i '%s'" #: src/services/gridftpd/commands.cpp:1029 #, c-format msgid "Command PBZS: %s" msgstr "Kommando PBZS: %s" #: src/services/gridftpd/commands.cpp:1037 #, c-format msgid "Setting pbsz to %lu" msgstr "Sätter pbsz till %lu" #: src/services/gridftpd/commands.cpp:1053 #, c-format msgid "Command PROT: %s" msgstr "Kommando PROT: %s" #: src/services/gridftpd/commands.cpp:1078 #, c-format msgid "Command MDTM %s" msgstr "Kommando MDTM %s" #: src/services/gridftpd/commands.cpp:1100 #, c-format msgid "Raw command: %s" msgstr "RÃ¥-kommando: %s" #: src/services/gridftpd/commands.cpp:1148 msgid "Failed to allocate memory for buffer" msgstr "Misslyckades med allokera minne för buffer" #: src/services/gridftpd/commands.cpp:1155 #, c-format msgid "Allocated %u buffers %llu bytes each." msgstr "Allokerade %u buffrar %llu bytes vardera." #: src/services/gridftpd/commands.cpp:1162 msgid "abort_callback: start" msgstr "abort_callback: start" #: src/services/gridftpd/commands.cpp:1165 #, c-format msgid "abort_callback: Globus error: %s" msgstr "abort_callback: Globusfel: %s" #: src/services/gridftpd/commands.cpp:1179 msgid "make_abort: start" msgstr "make_abort: start" #: src/services/gridftpd/commands.cpp:1191 msgid "Failed to abort data connection - ignoring and recovering" msgstr "" "Misslyckades med att avbryta dataförbindelsen - ignorerar och Ã¥terställer" #: src/services/gridftpd/commands.cpp:1199 msgid "make_abort: wait for abort flag to be reset" msgstr "make_abort: vänta pÃ¥ att abort-flaggan Ã¥terställs" #: src/services/gridftpd/commands.cpp:1209 msgid "make_abort: leaving" msgstr "make_abort: lämnar" #: src/services/gridftpd/commands.cpp:1224 msgid "check_abort: have Globus error" msgstr "check_abort: har Globusfel" #: src/services/gridftpd/commands.cpp:1225 msgid "Abort request caused by transfer error" msgstr "Begäran att avbryta orsakades av överföringsfel" #: src/services/gridftpd/commands.cpp:1228 msgid "check_abort: sending 426" msgstr "check_abort: sänder 426" #: src/services/gridftpd/commands.cpp:1249 msgid "Abort request caused by error in transfer function" msgstr "Begäran att avbryta orsakades av fel i överföringsfunktion" #: src/services/gridftpd/commands.cpp:1331 msgid "Failed to start timer thread - timeout won't work" msgstr "" "Misslyckades med att starta tidtagar-trÃ¥d: timeout kommer inte att fungera" #: src/services/gridftpd/commands.cpp:1383 msgid "Killing connection due to timeout" msgstr "Dödar förbindelse pÃ¥ grund av timeout" #: src/services/gridftpd/conf/conf_vo.cpp:22 #: src/services/gridftpd/conf/conf_vo.cpp:48 msgid "Configuration section [userlist] is missing name." msgstr "Inställningssektion [userlist] saknar namn." #: src/services/gridftpd/conf/daemon.cpp:58 #: src/services/gridftpd/conf/daemon.cpp:138 #, c-format msgid "No such user: %s" msgstr "Ingen sÃ¥dan användare: %s" #: src/services/gridftpd/conf/daemon.cpp:70 #: src/services/gridftpd/conf/daemon.cpp:150 #, c-format msgid "No such group: %s" msgstr "Ingen sÃ¥dan grupp: %s" #: src/services/gridftpd/conf/daemon.cpp:83 #: src/services/gridftpd/conf/daemon.cpp:163 #, c-format msgid "Improper debug level '%s'" msgstr "Ogiltig debugnivÃ¥ '%s'" #: src/services/gridftpd/conf/daemon.cpp:120 msgid "Missing option for command logreopen" msgstr "Saknat alternativ för kommandot logreopen" #: src/services/gridftpd/conf/daemon.cpp:125 msgid "Wrong option in logreopen" msgstr "Felaktigt alternativ i logreopen" #: src/services/gridftpd/conf/daemon.cpp:209 #, c-format msgid "Failed to open log file %s" msgstr "Misslyckades med att öppna loggfil %s" #: src/services/gridftpd/datalist.cpp:101 msgid "Closing channel (list)" msgstr "Stänger kanal (lista)" #: src/services/gridftpd/datalist.cpp:157 msgid "Data channel connected (list)" msgstr "Datakanal uppkopplad (lista)" #: src/services/gridftpd/dataread.cpp:24 msgid "data_connect_retrieve_callback" msgstr "data_connect_retrieve_callback" #: src/services/gridftpd/dataread.cpp:30 msgid "Data channel connected (retrieve)" msgstr "Datakanal uppkopplad (hämta)" #: src/services/gridftpd/dataread.cpp:37 msgid "data_connect_retrieve_callback: allocate_data_buffer" msgstr "data_connect_retrieve_callback: allocate_data_buffer" #: src/services/gridftpd/dataread.cpp:40 msgid "data_connect_retrieve_callback: allocate_data_buffer failed" msgstr "data_connect_retrieve_callback: allocate_data_buffer misslyckades" #: src/services/gridftpd/dataread.cpp:48 #, c-format msgid "data_connect_retrieve_callback: check for buffer %u" msgstr "data_connect_retrieve_callback: kontrollera buffer %u" #: src/services/gridftpd/dataread.cpp:61 src/services/gridftpd/dataread.cpp:158 #, c-format msgid "Closing channel (retrieve) due to local read error: %s" msgstr "Stänger kanal (hämta) pÃ¥ grund av lokalt läsfel: %s" #: src/services/gridftpd/dataread.cpp:75 src/services/gridftpd/dataread.cpp:172 msgid "Buffer registration failed" msgstr "Bufferregistrering misslyckades" #: src/services/gridftpd/dataread.cpp:88 msgid "data_retrieve_callback" msgstr "data_retrieve_callback" #: src/services/gridftpd/dataread.cpp:96 #, c-format msgid "Data channel (retrieve) %i %i %i" msgstr "Datakanal (hämta) %i %i %i" #: src/services/gridftpd/dataread.cpp:104 msgid "Closing channel (retrieve)" msgstr "Stänger kanal (hämta)" #: src/services/gridftpd/dataread.cpp:110 #: src/services/gridftpd/datawrite.cpp:128 #, c-format msgid "Time spent waiting for network: %.3f ms" msgstr "Tid spenderad i väntan pÃ¥ nätverk: %.3f ms" #: src/services/gridftpd/dataread.cpp:111 #: src/services/gridftpd/datawrite.cpp:129 #, c-format msgid "Time spent waiting for disc: %.3f ms" msgstr "Tid spenderad i väntan pÃ¥ disk: %.3f ms" #: src/services/gridftpd/dataread.cpp:122 msgid "data_retrieve_callback: lost buffer" msgstr "data_retrieve_callback: förlorad buffer" #: src/services/gridftpd/datawrite.cpp:24 msgid "data_connect_store_callback" msgstr "data_connect_store_callback" #: src/services/gridftpd/datawrite.cpp:30 msgid "Data channel connected (store)" msgstr "Datakanal uppkopplad (lagra)" #: src/services/gridftpd/datawrite.cpp:57 msgid "Failed to register any buffer" msgstr "Misslyckades med att registrera buffrar" #: src/services/gridftpd/datawrite.cpp:76 #, c-format msgid "Data channel (store) %i %i %i" msgstr "Datakanal (lägra) %i %i %i" #: src/services/gridftpd/datawrite.cpp:89 msgid "data_store_callback: lost buffer" msgstr "data_store_callback: förlorad buffer" #: src/services/gridftpd/datawrite.cpp:105 #, c-format msgid "Closing channel (store) due to error: %s" msgstr "Stänger kanal (lagra) pÃ¥ grund av fel: %s" #: src/services/gridftpd/datawrite.cpp:115 msgid "Closing channel (store)" msgstr "Stänger kanal (lagra)" #: src/services/gridftpd/fileplugin/fileplugin.cpp:55 msgid "Can't parse access rights in configuration line" msgstr "Kan inte tolka Ã¥tkomsträttigheterna pÃ¥ inställningsrad" #: src/services/gridftpd/fileplugin/fileplugin.cpp:61 msgid "Can't parse user:group in configuration line" msgstr "Kan inte tolka användare:grupp pÃ¥ inställningsrad" #: src/services/gridftpd/fileplugin/fileplugin.cpp:68 msgid "Can't recognize user in configuration line" msgstr "Kan inte känna igen användare pÃ¥ inställningsrad" #: src/services/gridftpd/fileplugin/fileplugin.cpp:77 msgid "Can't recognize group in configuration line" msgstr "Kan inte känna igen grupp pÃ¥ inställningsrad" #: src/services/gridftpd/fileplugin/fileplugin.cpp:84 #: src/services/gridftpd/fileplugin/fileplugin.cpp:89 msgid "Can't parse or:and in configuration line" msgstr "Kan inte tolka or:and pÃ¥ inställningsrad" #: src/services/gridftpd/fileplugin/fileplugin.cpp:116 msgid "Can't parse configuration line" msgstr "Kan inte tolka inställningsrad" #: src/services/gridftpd/fileplugin/fileplugin.cpp:120 #, c-format msgid "Bad directory name: %s" msgstr "Felaktigt katalognamn: %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:137 msgid "Can't parse create arguments in configuration line" msgstr "Kan inte tolka create-argument pÃ¥ inställningsrad" #: src/services/gridftpd/fileplugin/fileplugin.cpp:146 msgid "Can't parse mkdir arguments in configuration line" msgstr "Kan inte tolka mkdir-argument pÃ¥ inställningsrad" #: src/services/gridftpd/fileplugin/fileplugin.cpp:163 #, c-format msgid "Bad subcommand in configuration line: %s" msgstr "Felaktigt subkommando pÃ¥ inställningsrad: %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:175 msgid "Bad mount directory specified" msgstr "Felaktig monteringskatalog angiven" #: src/services/gridftpd/fileplugin/fileplugin.cpp:177 #, c-format msgid "Mount point %s" msgstr "Monteringspunkt: %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:215 #: src/services/gridftpd/fileplugin/fileplugin.cpp:274 #, c-format msgid "mkdir failed: %s" msgstr "mkdir misslyckades: %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:226 #, c-format msgid "Warning: mount point %s creation failed." msgstr "varning: misslyckades med att skapa monteringspunkt %s." #: src/services/gridftpd/fileplugin/fileplugin.cpp:330 #, c-format msgid "plugin: open: %s" msgstr "plugin: öppnad: %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:378 #: src/services/gridftpd/fileplugin/fileplugin.cpp:415 msgid "Not enough space to store file" msgstr "Inte tillräckligt med utrymme för att lagra fil" #: src/services/gridftpd/fileplugin/fileplugin.cpp:430 #, c-format msgid "open: changing owner for %s, %i, %i" msgstr "open: ändrar ägare för %s, %i, %i" #: src/services/gridftpd/fileplugin/fileplugin.cpp:437 #, c-format msgid "open: owner: %i %i" msgstr "open: ägare: %i %i" #: src/services/gridftpd/fileplugin/fileplugin.cpp:446 #: src/services/gridftpd/fileplugin/fileplugin.cpp:486 #, c-format msgid "Unknown open mode %s" msgstr "Okänd open-mode %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:451 msgid "plugin: close" msgstr "plugin: stängd" #: src/services/gridftpd/fileplugin/fileplugin.cpp:492 msgid "plugin: read" msgstr "plugin: läser" #: src/services/gridftpd/fileplugin/fileplugin.cpp:498 msgid "Error while reading file" msgstr "Fel vid läsning av fil" #: src/services/gridftpd/fileplugin/fileplugin.cpp:508 msgid "plugin: write" msgstr "plugin: skriver" #: src/services/gridftpd/fileplugin/fileplugin.cpp:519 msgid "Zero bytes written to file" msgstr "Noll bytes skrivna till fil" #: src/services/gridftpd/fileplugin/fileplugin.cpp:727 #, c-format msgid "plugin: checkdir: %s" msgstr "plugin: kontrollerar katalog: %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:730 #, c-format msgid "plugin: checkdir: access: %s" msgstr "plugin: kontrollerar katalog: Ã¥tkomst: %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:739 #, c-format msgid "plugin: checkdir: access: allowed: %s" msgstr "plugin: kontrollerar katalog: Ã¥tkomst: tillÃ¥ten: %s" #: src/services/gridftpd/fileroot.cpp:14 #, c-format msgid "No plugin is configured or authorised for requested path %s" msgstr "" "Ingen plugin är inställd eller auktoriserad för den begärda sökvägen %s" #: src/services/gridftpd/fileroot.cpp:19 msgid "FilePlugin: more unload than load" msgstr "FilePlugin: mer ladda ut än ladda in" #: src/services/gridftpd/fileroot.cpp:34 #, c-format msgid "Can't load plugin %s for access point %s" msgstr "Kan inte ladda in plugin %s för Ã¥tkomstpunkt %s" #: src/services/gridftpd/fileroot.cpp:39 src/services/gridftpd/fileroot.cpp:43 #, c-format msgid "Plugin %s for access point %s is broken." msgstr "Plugin %s för Ã¥tkomstpunkt %s är trasig." #: src/services/gridftpd/fileroot.cpp:47 #, c-format msgid "Plugin %s for access point %s acquire failed (should never happen)." msgstr "Plugin %s för Ã¥tkomstpunkt %s acquire misslyckades (ska aldrig hända)." #: src/services/gridftpd/fileroot.cpp:54 #, c-format msgid "Destructor with dlclose (%s)" msgstr "Destruktor med dlclose (%s)" #: src/services/gridftpd/fileroot.cpp:77 #, c-format msgid "FileNode: operator= (%s <- %s) %lu <- %lu" msgstr "FileNode: operator= (%s <- %s) %lu <- %lu" #: src/services/gridftpd/fileroot.cpp:80 msgid "Copying with dlclose" msgstr "Kopierar med dlclose" #: src/services/gridftpd/fileroot_config.cpp:31 #: src/services/gridftpd/fileroot_config.cpp:405 msgid "configuration file not found" msgstr "hittade inte inställningsfil" #: src/services/gridftpd/fileroot_config.cpp:54 msgid "Wrong port number in configuration" msgstr "Felaktigt portnummer i inställningar" #: src/services/gridftpd/fileroot_config.cpp:63 msgid "Wrong maxconnections number in configuration" msgstr "Felaktigt maxconnections-nummer i inställningar" #: src/services/gridftpd/fileroot_config.cpp:72 msgid "Wrong defaultbuffer number in configuration" msgstr "Felaktigt defaultbuffer-nummer i inställningar" #: src/services/gridftpd/fileroot_config.cpp:81 msgid "Wrong maxbuffer number in configuration" msgstr "Felaktigt maxbuffer-nummer in inställningar" #: src/services/gridftpd/fileroot_config.cpp:113 #: src/services/gridftpd/fileroot_config.cpp:121 #, c-format msgid "Can't resolve host %s" msgstr "Kan inte slÃ¥ upp värd %s" #: src/services/gridftpd/fileroot_config.cpp:173 msgid "Could not determine hostname from gethostname()" msgstr "Kunde inte bestämma värdnamn frÃ¥n gethostname()" #: src/services/gridftpd/fileroot_config.cpp:190 msgid "unnamed group" msgstr "grupp utan namn" #: src/services/gridftpd/fileroot_config.cpp:199 msgid "undefined plugin name" msgstr "odefinierat pluginnamn" #: src/services/gridftpd/fileroot_config.cpp:203 msgid "undefined virtual plugin path" msgstr "odefinierad virtuell pluginsökväg" #: src/services/gridftpd/fileroot_config.cpp:208 #, c-format msgid "bad directory for plugin: %s" msgstr "Felaktig katalog för pluginer: %s" #: src/services/gridftpd/fileroot_config.cpp:220 #, c-format msgid "Already have directory: %s" msgstr "Har redan katalog: %s" #: src/services/gridftpd/fileroot_config.cpp:223 #, c-format msgid "Registering directory: %s with plugin: %s" msgstr "Registrerar katalog: %s med plugin: %s" #: src/services/gridftpd/fileroot_config.cpp:236 #, c-format msgid "file node creation failed: %s" msgstr "skapande av filnod misslyckades: %s" #: src/services/gridftpd/fileroot_config.cpp:286 #, c-format msgid "improper attribute for allowencryption command: %s" msgstr "olämpligt attribut för allowencryption-kommando: %s" #: src/services/gridftpd/fileroot_config.cpp:300 #, c-format msgid "improper attribute for allowactvedata command: %s" msgstr "olämpligt attribut för allowactvedata-kommando: %s" #: src/services/gridftpd/fileroot_config.cpp:314 #, c-format msgid "failed while processing configuration command: %s %s" msgstr "Misslyckades under processering av inställningskommando: %s %s" #: src/services/gridftpd/fileroot_config.cpp:339 #, c-format msgid "Failed processing authorization group %s" msgstr "Misslyckades med att processera auktoriseringsgrupp %s" #: src/services/gridftpd/fileroot_config.cpp:352 msgid "Missing authgroup name in allowaccess" msgstr "Saknar authgroup-namn i allowaccess" #: src/services/gridftpd/fileroot_config.cpp:369 msgid "Missing authgroup name in denyaccess" msgstr "Saknar authgroup-namn i denyaccess" #: src/services/gridftpd/fileroot_config.cpp:419 msgid "failed to process client identification" msgstr "Misslyckades med att processera klientidentifikation" #: src/services/gridftpd/fileroot_config.cpp:426 msgid "failed to identify plugins path" msgstr "Misslyckades med att identifiera pluginsökväg" #: src/services/gridftpd/fileroot_config.cpp:453 #, c-format msgid "Registering dummy directory: %s" msgstr "Registrerar dummy-katalog: %s" #: src/services/gridftpd/listener.cpp:57 src/services/gridftpd/listener.cpp:466 msgid "Activation failed" msgstr "Aktivering misslyckades" #: src/services/gridftpd/listener.cpp:66 src/services/gridftpd/listener.cpp:172 msgid "Child exited" msgstr "Barn avslutades" #: src/services/gridftpd/listener.cpp:78 msgid "Globus connection error" msgstr "Globusförbindelsefel" #: src/services/gridftpd/listener.cpp:80 src/services/gridftpd/listener.cpp:424 msgid "New connection" msgstr "Ny förbindelse" #: src/services/gridftpd/listener.cpp:87 msgid "Server stopped" msgstr "Server stoppad" #: src/services/gridftpd/listener.cpp:157 msgid "Error: failed to set handler for SIGTERM" msgstr "Fel: misslyckades med att sätta hanterare for SIGTERM" #: src/services/gridftpd/listener.cpp:161 msgid "Starting controlled process" msgstr "Startar kontrollerad process" #: src/services/gridftpd/listener.cpp:164 msgid "fork failed" msgstr "fork misslyckades" #: src/services/gridftpd/listener.cpp:169 msgid "wait failed - killing child" msgstr "väntan misslyckades - dödar barn" #: src/services/gridftpd/listener.cpp:174 msgid "Killed with signal: " msgstr "Dödar med signal: " #: src/services/gridftpd/listener.cpp:176 msgid "Restarting after segmentation violation." msgstr "Startar om efter segmenteringsfel." #: src/services/gridftpd/listener.cpp:177 msgid "Waiting 1 minute" msgstr "Väntar 1 minut" #: src/services/gridftpd/listener.cpp:239 msgid "Error: failed to set handler for SIGCHLD" msgstr "Fel: misslyckades att sätta hanterare för SIGCHLD" #: src/services/gridftpd/listener.cpp:256 msgid "Missing argument" msgstr "Saknat argument" #: src/services/gridftpd/listener.cpp:257 msgid "Unknown option" msgstr "Okänt alternativ" #: src/services/gridftpd/listener.cpp:264 msgid "Wrong port number" msgstr "Felaktigt portnummer" #: src/services/gridftpd/listener.cpp:274 msgid "Wrong number of connections" msgstr "Fel antal förbindelser" #: src/services/gridftpd/listener.cpp:281 msgid "Wrong buffer size" msgstr "Felaktig bufferstorlek" #: src/services/gridftpd/listener.cpp:288 msgid "Wrong maximal buffer size" msgstr "Felaktig maximal bufferstorlek" #: src/services/gridftpd/listener.cpp:300 msgid "Failed reading configuration" msgstr "Misslyckades med att läsa inställningar" #: src/services/gridftpd/listener.cpp:331 #, c-format msgid "Failed to obtain local address: %s" msgstr "Misslyckades med att erhÃ¥lla lokal adress: %s" #: src/services/gridftpd/listener.cpp:338 #, c-format msgid "Failed to create socket(%s): %s" msgstr "Misslyckades med att skapa socket(%s): %s" #: src/services/gridftpd/listener.cpp:352 #, c-format msgid "Failed to limit socket to IPv6: %s" msgstr "Misslyckades med att begränsa socket till IPv6: %s" #: src/services/gridftpd/listener.cpp:359 #, c-format msgid "Failed to bind socket(%s): %s" msgstr "Misslyckades med att binda socket(%s): %s" #: src/services/gridftpd/listener.cpp:364 #, c-format msgid "Failed to listen on socket(%s): %s" msgstr "Misslyckades med att lyssna pÃ¥ socket(%s): %s" #: src/services/gridftpd/listener.cpp:371 msgid "Not listening to anything" msgstr "Lyssnar inte pÃ¥ nÃ¥gonting" #: src/services/gridftpd/listener.cpp:374 #, c-format msgid "Some addresses failed. Listening on %u of %u." msgstr "NÃ¥gra adresser misslyckades. Lyssnar pÃ¥ %u av %u." #: src/services/gridftpd/listener.cpp:382 #: src/services/gridftpd/listener.cpp:477 msgid "Listen started" msgstr "Började lyssna" #: src/services/gridftpd/listener.cpp:395 msgid "No valid handles left for listening" msgstr "Inget giltigt handtag kvar att lyssna pÃ¥" #: src/services/gridftpd/listener.cpp:401 #, c-format msgid "Select failed: %s" msgstr "Select misslyckades: %s" #: src/services/gridftpd/listener.cpp:422 #, c-format msgid "Have connections: %i, max: %i" msgstr "Har förbindelses: %i, max: %i" #: src/services/gridftpd/listener.cpp:427 #, c-format msgid "Fork failed: %s" msgstr "Fork misslyckades: %s" #: src/services/gridftpd/listener.cpp:445 msgid "Refusing connection: Connection limit exceeded" msgstr "Vägrar förbindelse: Förbindelsegräns nÃ¥dd" #: src/services/gridftpd/listener.cpp:471 msgid "Init failed" msgstr "Init misslyckades" #: src/services/gridftpd/listener.cpp:474 msgid "Listen failed" msgstr "Lyssnande misslyckades" #: src/services/gridftpd/listener.cpp:488 msgid "Listen finished" msgstr "Lyssnande avslutad" #: src/services/gridftpd/listener.cpp:493 msgid "Stopping server" msgstr "Stoppar server" #: src/services/gridftpd/listener.cpp:497 msgid "Destroying handle" msgstr "Förstör handtag" #: src/services/gridftpd/listener.cpp:500 msgid "Deactivating modules" msgstr "Deaktiverar moduler" #: src/services/gridftpd/listener.cpp:508 msgid "Exiting" msgstr "Avslutar" #: src/services/gridftpd/misc/ldapquery.cpp:253 #, c-format msgid "%s: %s:%i" msgstr "%s: %s:%i" #: src/services/gridftpd/misc/ldapquery.cpp:390 #: src/services/gridftpd/misc/ldapquery.cpp:467 #, c-format msgid "%s %s" msgstr "%s %s" #: src/services/gridftpd/misc/ldapquery.cpp:394 #, c-format msgid " %s: %s" msgstr " %s: %s" #: src/services/gridftpd/misc/ldapquery.cpp:396 #, c-format msgid " %s:" msgstr " %s:" #: src/services/gridftpd/userspec.cpp:83 src/services/gridftpd/userspec.cpp:133 msgid "No proxy provided" msgstr "Ingen proxy tillhandahÃ¥llen" #: src/services/gridftpd/userspec.cpp:85 #, c-format msgid "Proxy/credentials stored at %s" msgstr "Proxy/referenser lagrade pÃ¥ %s" #: src/services/gridftpd/userspec.cpp:91 src/services/gridftpd/userspec.cpp:141 msgid "Running user has no name" msgstr "Körande användare har inget namn" #: src/services/gridftpd/userspec.cpp:94 src/services/gridftpd/userspec.cpp:144 #, c-format msgid "Mapped to running user: %s" msgstr "Mappad till körande användare: %s" #: src/services/gridftpd/userspec.cpp:104 #: src/services/gridftpd/userspec.cpp:154 #, c-format msgid "Mapped to local id: %i" msgstr "Mappad till lokalt id: %i" #: src/services/gridftpd/userspec.cpp:109 #: src/services/gridftpd/userspec.cpp:159 #, c-format msgid "No group %i for mapped user" msgstr "Ingen grupp %i för mappad användare" #: src/services/gridftpd/userspec.cpp:113 #: src/services/gridftpd/userspec.cpp:163 #, c-format msgid "Mapped to local group id: %i" msgstr "Mappad till lokalt grupp-id: %i" #: src/services/gridftpd/userspec.cpp:114 #: src/services/gridftpd/userspec.cpp:164 #, c-format msgid "Mapped to local group name: %s" msgstr "Mappad till lokalt gruppnamn: %s" #: src/services/gridftpd/userspec.cpp:115 #: src/services/gridftpd/userspec.cpp:165 #, c-format msgid "Mapped user's home: %s" msgstr "Mappad användares hem: %s" #: src/services/gridftpd/userspec.cpp:135 #, c-format msgid "Proxy stored at %s" msgstr "Proxy lagrad pÃ¥ %s" #: src/services/gridftpd/userspec.cpp:195 #, c-format msgid "Undefined control sequence: %%%s" msgstr "Odefinierad kontrollsekvens: %%%s" #: src/services/gridftpd/userspec.cpp:218 #, c-format msgid "Local user %s does not exist" msgstr "Lokal användare %s existerar inte" #: src/services/gridftpd/userspec.cpp:227 #, c-format msgid "Local group %s does not exist" msgstr "Lokal grupp %s existerar inte" #: src/services/gridftpd/userspec.cpp:232 #, c-format msgid "Remapped to local user: %s" msgstr "Ommappad till lokal användare: %s" #: src/services/gridftpd/userspec.cpp:233 #, c-format msgid "Remapped to local id: %i" msgstr "Ommappad till lokalt id: %i" #: src/services/gridftpd/userspec.cpp:234 #, c-format msgid "Remapped to local group id: %i" msgstr "Ommappad till lokal grupp-id: %i" #: src/services/gridftpd/userspec.cpp:235 #, c-format msgid "Remapped to local group name: %s" msgstr "Ommappad till lokalt gruppnamn: %s" #: src/services/gridftpd/userspec.cpp:236 #, c-format msgid "Remapped user's home: %s" msgstr "Ommappad användares hem: %s" #: src/services/wrappers/python/pythonwrapper.cpp:103 #, c-format msgid "Loading %u-th Python service" msgstr "Laddar %ue Python-tjänsten" #: src/services/wrappers/python/pythonwrapper.cpp:107 #, c-format msgid "Initialized %u-th Python service" msgstr "Initierade %ue Python-tjänsten" #: src/services/wrappers/python/pythonwrapper.cpp:142 msgid "Invalid class name" msgstr "Ogiltigt klassnamn" #: src/services/wrappers/python/pythonwrapper.cpp:147 #, c-format msgid "class name: %s" msgstr "klassnamn: %s" #: src/services/wrappers/python/pythonwrapper.cpp:148 #, c-format msgid "module name: %s" msgstr "modulnamn: %s" #: src/services/wrappers/python/pythonwrapper.cpp:205 msgid "Cannot find ARC Config class" msgstr "Kan inte hitta ARCs inställningsklass" #: src/services/wrappers/python/pythonwrapper.cpp:212 msgid "Config class is not an object" msgstr "Inställnings-klass är inte ett objekt" #: src/services/wrappers/python/pythonwrapper.cpp:220 msgid "Cannot get dictionary of module" msgstr "Kan inte erhÃ¥lla ordlista för modulen" #: src/services/wrappers/python/pythonwrapper.cpp:229 msgid "Cannot find service class" msgstr "Kan inte hitta tjänsteklass" #: src/services/wrappers/python/pythonwrapper.cpp:238 msgid "Cannot create config argument" msgstr "Kan inte skapa inställningsargument" #: src/services/wrappers/python/pythonwrapper.cpp:245 msgid "Cannot convert config to Python object" msgstr "Kan inte konvertera inställningar till pythonobjekt" #: src/services/wrappers/python/pythonwrapper.cpp:268 #, c-format msgid "%s is not an object" msgstr "%s är inte ett objekt" #: src/services/wrappers/python/pythonwrapper.cpp:274 msgid "Message class is not an object" msgstr "Meddelande-klass är inte ett objekt" #: src/services/wrappers/python/pythonwrapper.cpp:280 msgid "Python Wrapper constructor succeeded" msgstr "Python-wrapper-konstruktor anropad" #: src/services/wrappers/python/pythonwrapper.cpp:295 #, c-format msgid "Python Wrapper destructor (%d)" msgstr "Python-wrapper-destruktor (%d)" #: src/services/wrappers/python/pythonwrapper.cpp:328 msgid "Python interpreter locked" msgstr "Pythontolkare lÃ¥st" #: src/services/wrappers/python/pythonwrapper.cpp:332 msgid "Python interpreter released" msgstr "Pythontolkare frigjord" #: src/services/wrappers/python/pythonwrapper.cpp:403 msgid "Python wrapper process called" msgstr "Python-wrapper-process anropad" #: src/services/wrappers/python/pythonwrapper.cpp:412 msgid "Failed to create input SOAP container" msgstr "Misslyckades med att skapa indata-SOAP-behÃ¥llare" #: src/services/wrappers/python/pythonwrapper.cpp:422 msgid "Cannot create inmsg argument" msgstr "Kan inte skapa inmsg-argument" #: src/services/wrappers/python/pythonwrapper.cpp:436 msgid "Cannot find ARC Message class" msgstr "Kan inte hitta arcmeddelandeklass" #: src/services/wrappers/python/pythonwrapper.cpp:442 msgid "Cannot convert inmsg to Python object" msgstr "Kan inte konvertera inmsg till pythonobjekt" #: src/services/wrappers/python/pythonwrapper.cpp:451 msgid "Failed to create SOAP containers" msgstr "Misslyckades med att skapa SOAP-behÃ¥llare" #: src/services/wrappers/python/pythonwrapper.cpp:457 msgid "Cannot create outmsg argument" msgstr "Kan inte skapa outmsg-argument" #: src/services/wrappers/python/pythonwrapper.cpp:463 msgid "Cannot convert outmsg to Python object" msgstr "Kan inte konvertera outmsg till pythonobjekt" #: src/tests/client/test_ClientInterface.cpp:36 #: src/tests/client/test_ClientSAML2SSO.cpp:68 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:78 #: src/tests/echo/test_clientinterface.cpp:41 #: src/tests/echo/test_clientinterface.cpp:132 #: src/tests/echo/test_clientinterface.py:12 msgid "Creating a soap client" msgstr "Skapar en echo-klient" #: src/tests/client/test_ClientInterface.cpp:73 #: src/tests/client/test_ClientSAML2SSO.cpp:47 #: src/tests/client/test_ClientSAML2SSO.cpp:71 #: src/tests/count/test_client.cpp:61 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:85 #: src/tests/echo/echo_test4axis2c/test_client.cpp:56 #: src/tests/echo/test.cpp:62 src/tests/echo/test_client.cpp:72 #: src/tests/echo/test_clientinterface.cpp:67 #: src/tests/echo/test_clientinterface.cpp:107 #: src/tests/echo/test_clientinterface.cpp:136 #: src/tests/echo/test_clientinterface.py:22 msgid "Creating and sending request" msgstr "Skapar och skickar begäran" #: src/tests/client/test_ClientInterface.cpp:84 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:97 #: src/tests/echo/test_clientinterface.cpp:78 #: src/tests/echo/test_clientinterface.py:30 msgid "SOAP invocation failed" msgstr "SOAP-anrop misslyckades" #: src/tests/client/test_ClientSAML2SSO.cpp:44 #: src/tests/echo/test_clientinterface.cpp:100 msgid "Creating a http client" msgstr "Skapar en http-klient" #: src/tests/client/test_ClientSAML2SSO.cpp:55 #: src/tests/echo/test_clientinterface.cpp:117 msgid "HTTP with SAML2SSO invocation failed" msgstr "HTTP med SAML2SSO-anrop misslyckades" #: src/tests/client/test_ClientSAML2SSO.cpp:59 #: src/tests/echo/test_clientinterface.cpp:121 msgid "There was no HTTP response" msgstr "Det fanns inget HTTP-svar" #: src/tests/client/test_ClientSAML2SSO.cpp:77 #: src/tests/echo/test_clientinterface.cpp:145 msgid "SOAP with SAML2SSO invocation failed" msgstr "SOAP med SAML2SSO-anrop misslyckades" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:37 #: src/tests/client/test_ClientX509Delegation_GridSite.cpp:38 #: src/tests/delegation/test_delegation_client.cpp:45 #: src/tests/delegation/test_delegation_client.cpp:76 #: src/tests/echo/test_clientinterface.cpp:172 #: src/tests/echo/test_clientinterface.cpp:194 msgid "Creating a delegation soap client" msgstr "Skapar en delegerings-SOAP-klient" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:46 #: src/tests/delegation/test_delegation_client.cpp:51 #: src/tests/echo/test_clientinterface.cpp:178 msgid "Delegation to ARC delegation service failed" msgstr "Delegering till ARCs delegeringstjänst misslyckades" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:50 #: src/tests/client/test_ClientX509Delegation_GridSite.cpp:49 #: src/tests/delegation/test_delegation_client.cpp:56 #: src/tests/delegation/test_delegation_client.cpp:88 #: src/tests/echo/test_clientinterface.cpp:182 #: src/tests/echo/test_clientinterface.cpp:205 #, c-format msgid "Delegation ID: %s" msgstr "Delegerings-ID: %s" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:58 #, c-format msgid "Delegated credential from delegation service: %s" msgstr "Delegerad referens frÃ¥n delegeringstjänst: %s" #: src/tests/client/test_ClientX509Delegation_GridSite.cpp:45 #: src/tests/delegation/test_delegation_client.cpp:83 #: src/tests/echo/test_clientinterface.cpp:201 msgid "Delegation to gridsite delegation service failed" msgstr "Delegering till gridsites delegeringstjänst misslyckades" #: src/tests/count/count.cpp:58 msgid "Input is not SOAP" msgstr "Indata är inte SOAP" #: src/tests/count/count.cpp:89 src/tests/echo/echo.cpp:83 msgid "echo: Unauthorized" msgstr "echo: Oauktoriserad" #: src/tests/count/count.cpp:98 src/tests/count/count.cpp:104 #, c-format msgid "Request is not supported - %s" msgstr "Begäran stöds inte - %s" #: src/tests/count/test_client.cpp:50 #: src/tests/echo/echo_test4axis2c/test_client.cpp:43 #: src/tests/echo/test_client.cpp:59 msgid "Failed to load client configuration" msgstr "Misslyckades med att ladda in klientinställningar" #: src/tests/count/test_client.cpp:54 #: src/tests/echo/echo_test4axis2c/test_client.cpp:47 #: src/tests/echo/test.cpp:58 src/tests/echo/test_client.cpp:63 msgid "Client side MCCs are loaded" msgstr "Klientsidans MCCer har laddats in" #: src/tests/count/test_client.cpp:57 #: src/tests/echo/echo_test4axis2c/test_client.cpp:50 #: src/tests/echo/test_client.cpp:66 msgid "Client chain does not have entry point" msgstr "Klientkedjan har ingen ingÃ¥ngspunkt" #: src/tests/count/test_client.cpp:84 #: src/tests/echo/echo_test4axis2c/test_client.cpp:74 #: src/tests/echo/test.cpp:74 src/tests/echo/test_client.cpp:90 msgid "Request failed" msgstr "Begäran misslyckades" #: src/tests/count/test_client.cpp:90 #: src/tests/echo/echo_test4axis2c/test_client.cpp:80 #: src/tests/echo/test.cpp:79 src/tests/echo/test_client.cpp:96 msgid "There is no response" msgstr "Det finns inget svar" #: src/tests/count/test_client.cpp:97 #: src/tests/echo/echo_test4axis2c/test_client.cpp:87 #: src/tests/echo/test_client.cpp:103 msgid "Response is not SOAP" msgstr "Svaret är inte SOAP" #: src/tests/count/test_service.cpp:22 src/tests/echo/test.cpp:23 #: src/tests/echo/test_service.cpp:22 msgid "Creating service side chain" msgstr "Skapar tjänstesidokedjan" #: src/tests/count/test_service.cpp:25 src/tests/echo/test.cpp:26 #: src/tests/echo/test_service.cpp:25 msgid "Failed to load service configuration" msgstr "Misslyckades med att ladda in tjänsteinställningar" #: src/tests/count/test_service.cpp:30 src/tests/echo/test_service.cpp:30 msgid "Service is waiting for requests" msgstr "Tjänsten väntar pÃ¥ begärningar" #: src/tests/echo/test.cpp:32 msgid "Creating client interface" msgstr "Skapar klientgränssnitt" #: src/tests/echo/test.cpp:82 msgid "Request succeed!!!" msgstr "Begäran lyckades!!!" nordugrid-arc-6.14.0/po/PaxHeaders.30264/hu.gmo0000644000000000000000000000013214152153477017104 xustar000000000000000030 mtime=1638455103.840629165 30 atime=1638455103.840629165 30 ctime=1638455103.899630051 nordugrid-arc-6.14.0/po/hu.gmo0000644000175000002070000001754314152153477017103 0ustar00mockbuildmock00000000000000Þ•Z켸¹ Öäí%þ,$'Q1y«Çá-ñ# )C m !{ ª  !Ò ô   = K "T w &… 0¬ 1Ý 6 F a p  ‘ Ÿ « ¸ Å â ò ý B AL 1Ž À Û "ó  / O j  ! ¿ Ñ ï þ / DOWKsH¿ $6;8r+«×öV4E‹ÑÙ7â!A;EăH d r€0•5Æ/ü7,$d‰§/¹*é- B-O}­"Ãæû+DU1^-¨3ÖG =RªÇÙ ò ÿ  +#O d pD{TÀ8N(d2,Àí+7-c‘¢¿Ï×;é%5Dc]UÁ I(DrF·0þ)/Yhjpx‹]ªc l yP†×/ßMa 8P> ,&O)F1%< HVBWI ; MN:D(XS/ =7G5A69"4'U#2C$*@3RK0?JLZ-E+QYT.!%d of %d jobs were submitted%s version %sAborted!Broker %s loadedCan not open job description file: %sCan't read list of destinations from file %sCan't read list of sources from file %sContacting VOMS server (named %s): %s on port: %sCurrent transfer FAILED: %sCurrent transfer completeDestination: %sFATAL, ERROR, WARNING, INFO, VERBOSE or DEBUGFailed configuration initializationFileset registration is not supported yetFunction : %sINI config file %s does not existIdentity: %sInvalid JobDescription:Invalid URL: %sJob description to be sent to %s:Job description: %sJob submission summary:Job submitted with jobid: %sLibrary : %sName: %sNo job description input specifiedNo jobs givenNo new informational document assignedNumbers of sources and destinations do not matchProxy generation failed: Certificate has expired.Proxy generation failed: Certificate is not valid yet.Proxy generation succeededProxy path: %sProxy type: %sPythonBroker initReason : %sRequest: %sResponse: %sResponse: %sService side MCCs are loadedShutdown daemonSource: %sSubject: %sThe 'sort' and 'rsort' flags cannot be specified at the same time.The arcget command is used for retrieving the results from a job.The arckill command is used to kill running jobs.There was no SOAP responseTime left for proxy: %sTime left for proxy: Proxy expiredUnable to load broker %sUnsupported destination url: %sUnsupported source url: %sUser interface errorVOMS attribute parsing failedXML config file %s does not existYour identity: %sYour proxy is valid until: %s[filename ...]brokerclass name: %sconfiguration file (default ~/.arc/client.conf)debugleveldirnamedo not ask for verificationdo not submit - dump job description in the language accepted by the targetdownload directory (the job directory will be created in this directory)exitfilenameforce migration, ignore kill failurejobdescription file describing the job to be submittedjobdescription string describing the job to be submittedkeep the files on the server (do not clean)long format (more information)module name: %snnumberorderpathprint version informationremove the job from the local list of jobs even if the job is not found in the infosysreverse sorting of jobs according to jobid, submissiontime or jobnamesecondsshutdownsort jobs according to jobid, submissiontime or jobnamestringtimeout in seconds (default 20)urluse GSI communication protocol for contacting VOMS servicesyProject-Id-Version: Arc Report-Msgid-Bugs-To: support@nordugrid.org POT-Creation-Date: 2021-12-02 15:25+0100 PO-Revision-Date: 2010-07-05 12:25+0100 Last-Translator: Gábor RÅ‘czei Language-Team: Hungarian Language: MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Plural-Forms: nplurals=2; plural=(n != 1); X-Poedit-Language: Hungarian X-Poedit-Country: HUNGARY X-Poedit-SourceCharset: utf-8 %d %d feladatból elküldve%s verzió %sMegszakítva!%s bróker betöltveNem tudom megnyitni a feladat leíró fájlt: %sNem tudom olvasni a célállomásokat a fájlból: %sNem tudom olvasni a forrásokat a fájlból: %sVOMS szerver elérése (neve: %s): %s ezen a porton: %sAz aktuális átvitel MEGSZAKADT: %sAz aktuális átvitel sikeresCélállomás: %sFATAL, ERROR, WARNING, INFO, VERBOSE vagy DEBUGNem sikerült betölteni a konfigurációtA fileset regisztcáció nem támogatott mégFunkció: %sAz INI konfigurációs fájl: %s nem létezikAzonosító: %sÉrvénytelen feladat leírás:Érvénytelen URL: %sFeladat leírás elküldve ide: %sFeladat leírás: %sJob küldési összefoglalóFeladat elküldve ezzel az azonítóval: %sKönyvtár : %sNév: %sNincs megadva feladat leírás bemeneti adatkéntNem adott meg feladatotNem jött létre új információs dokumentumA forrás és céállomások száma nem egyezik megProxy készítés sikertelen: A publikus kulcs érvényessége lejárt.Proxy készítés sikertelen: A publikus kulcs érvénytelen.Proxy készítés sikeresProxy elérési útvonal: %sProxy típusa: %sPythonBroker betöltéseIndok : %sKérés: %sVálasz: %sVálasz: %sA szolgáltatás oldali MCC-k betöltÅ‘dtekDémon leállításaForrás: %sTárgy: %sA 'sort' vagy az 'rsort' kapcsolókat nem lehet egyszerre használniAz arcget parancsot arra lehet használni, hogy a feladat eredményeit megjelenítseAz arckill paranccsal lehet megölni egy futó feladatotNincs SOAP-os válaszEnnyi ideig érvényes még a proxy: %sNem használható tovább a proxy: Lejárt a proxyNem sikerült betölteni a %s bróker modultNem támogatott url: %sNem támogatott url: %sFelhasználó oldali hibaVOMS attribútumok értelmezése sikertelenAz XML konfigurációs fájl: %s nem létezikAzonosítód: %sA proxy eddig érvényes: %s[fájlnév ...]brókerosztály neve: %skonfigurációs fájl (alapbeállítás ~/.arc/client.conf)logolási szintkönyvtárnévne kérjen ellenÅ‘rzéstnincs küldés - azon feladat leíró formátumban megjelenítése, amit a távoli klaszter elfogadkönyvtár letöltése (a feladat könyvtára ebben a könyvtárban fog létrejönni)kilépfájlnévmigráció kikényszerítése, megölési hiba figyelmen kívül hagyásaa feladat leíró fájl tartalmazza magát az elküldendÅ‘ feladatota feladat leíró szöveg tartalmazza magát az elküldendÅ‘ feladatotfájlok megÅ‘rzése a szerveren (nincs törlés)részletes formátum (több információ)modul neve: %snszámsorrendelérési útvonalverzió információ kiírásafeladat eltávolítása a helyi listából ha az nem található az információs rendszerbenfeladatok rendezésének megfordítása az azonosítójuk, az elküldés ideje vagy a neve alapjánmásodpercekleállításfeladatok rendezése az azonosítójuk, az elküldés ideje vagy a neve alapjánszövegidÅ‘korlát másodpercben (alapbeállítás 20)urlGSI kommunikációs protokoll használata a VOMS szolgáltatás eléréséhezynordugrid-arc-6.14.0/po/PaxHeaders.30264/nordugrid-arc.pot0000644000000000000000000000013214152153476021247 xustar000000000000000030 mtime=1638455102.404607588 30 atime=1638455102.514609241 30 ctime=1638455103.901630082 nordugrid-arc-6.14.0/po/nordugrid-arc.pot0000644000175000002070000204755614152153476021257 0ustar00mockbuildmock00000000000000# SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR NorduGrid collaboration # This file is distributed under the same license as the nordugrid-arc package. # FIRST AUTHOR , YEAR. # #, fuzzy msgid "" msgstr "" "Project-Id-Version: nordugrid-arc 6.14.0\n" "Report-Msgid-Bugs-To: support@nordugrid.org\n" "POT-Creation-Date: 2021-12-02 15:25+0100\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" "Language: \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=CHARSET\n" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=INTEGER; plural=EXPRESSION;\n" #: src/clients/compute/arccat.cpp:35 src/clients/compute/arcclean.cpp:34 #: src/clients/compute/arcget.cpp:35 src/clients/compute/arckill.cpp:33 #: src/clients/compute/arcrenew.cpp:32 src/clients/compute/arcresub.cpp:36 #: src/clients/compute/arcresume.cpp:32 src/clients/compute/arcstat.cpp:34 msgid "[job ...]" msgstr "" #: src/clients/compute/arccat.cpp:36 msgid "" "The arccat command performs the cat command on the stdout, stderr or grid\n" "manager's error log of the job." msgstr "" #: src/clients/compute/arccat.cpp:43 src/clients/compute/arcclean.cpp:41 #: src/clients/compute/arcget.cpp:42 src/clients/compute/arcinfo.cpp:45 #: src/clients/compute/arckill.cpp:40 src/clients/compute/arcrenew.cpp:37 #: src/clients/compute/arcresub.cpp:41 src/clients/compute/arcresume.cpp:37 #: src/clients/compute/arcstat.cpp:42 src/clients/compute/arcsub.cpp:53 #: src/clients/compute/arcsync.cpp:147 src/clients/compute/arctest.cpp:64 #: src/clients/credentials/arcproxy.cpp:457 src/clients/data/arccp.cpp:641 #: src/clients/data/arcls.cpp:347 src/clients/data/arcmkdir.cpp:125 #: src/clients/data/arcrename.cpp:136 src/clients/data/arcrm.cpp:151 #: src/hed/daemon/unix/main_unix.cpp:341 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1263 #: src/hed/libs/data/DataExternalHelper.cpp:358 #, c-format msgid "%s version %s" msgstr "" #: src/clients/compute/arccat.cpp:52 src/clients/compute/arcclean.cpp:50 #: src/clients/compute/arcget.cpp:51 src/clients/compute/arcinfo.cpp:53 #: src/clients/compute/arckill.cpp:49 src/clients/compute/arcrenew.cpp:46 #: src/clients/compute/arcresub.cpp:50 src/clients/compute/arcresume.cpp:46 #: src/clients/compute/arcstat.cpp:51 src/clients/compute/arcsub.cpp:62 #: src/clients/compute/arcsync.cpp:156 src/clients/compute/arctest.cpp:86 #: src/clients/credentials/arcproxy.cpp:465 src/clients/data/arccp.cpp:648 #: src/clients/data/arcls.cpp:355 src/clients/data/arcmkdir.cpp:133 #: src/clients/data/arcrename.cpp:144 src/clients/data/arcrm.cpp:160 #: src/libs/data-staging/DataDeliveryLocalComm.cpp:174 #: src/services/a-rex/grid-manager/GridManager.cpp:110 #: src/services/a-rex/grid-manager/log/JobLog.cpp:139 #, c-format msgid "Running command: %s" msgstr "" #: src/clients/compute/arccat.cpp:63 src/clients/compute/arcclean.cpp:61 #: src/clients/compute/arcget.cpp:62 src/clients/compute/arcinfo.cpp:65 #: src/clients/compute/arckill.cpp:60 src/clients/compute/arcrenew.cpp:57 #: src/clients/compute/arcresub.cpp:54 src/clients/compute/arcresume.cpp:50 #: src/clients/compute/arcstat.cpp:62 src/clients/compute/arcsub.cpp:66 #: src/clients/compute/arcsync.cpp:167 src/clients/compute/arctest.cpp:90 #: src/clients/data/arccp.cpp:671 src/clients/data/arcls.cpp:377 #: src/clients/data/arcmkdir.cpp:155 src/clients/data/arcrename.cpp:166 #: src/clients/data/arcrm.cpp:182 msgid "Failed configuration initialization" msgstr "" #: src/clients/compute/arccat.cpp:78 src/clients/compute/arcclean.cpp:76 #: src/clients/compute/arcget.cpp:90 src/clients/compute/arckill.cpp:75 #: src/clients/compute/arcrenew.cpp:70 src/clients/compute/arcresub.cpp:85 #: src/clients/compute/arcresume.cpp:72 src/clients/compute/arcstat.cpp:71 #, c-format msgid "Cannot read specified jobid file: %s" msgstr "" #: src/clients/compute/arccat.cpp:89 src/clients/compute/arcclean.cpp:87 #: src/clients/compute/arcget.cpp:101 src/clients/compute/arckill.cpp:86 #: src/clients/compute/arcrenew.cpp:81 src/clients/compute/arcresub.cpp:99 #: src/clients/compute/arcresume.cpp:83 src/clients/compute/arcstat.cpp:105 msgid "No jobs given" msgstr "" #: src/clients/compute/arccat.cpp:102 src/clients/compute/arcclean.cpp:100 #: src/clients/compute/arcget.cpp:114 src/clients/compute/arckill.cpp:99 #: src/clients/compute/arcrenew.cpp:94 src/clients/compute/arcresub.cpp:109 #: src/clients/compute/arcresume.cpp:96 src/clients/compute/arcstat.cpp:117 #, c-format msgid "Job list file (%s) doesn't exist" msgstr "" #: src/clients/compute/arccat.cpp:109 src/clients/compute/arcclean.cpp:107 #: src/clients/compute/arcget.cpp:121 src/clients/compute/arckill.cpp:106 #: src/clients/compute/arcrenew.cpp:101 src/clients/compute/arcresub.cpp:116 #: src/clients/compute/arcresume.cpp:103 src/clients/compute/arcstat.cpp:124 #: src/clients/compute/arctest.cpp:335 #, c-format msgid "Unable to read job information from file (%s)" msgstr "" #: src/clients/compute/arccat.cpp:118 src/clients/compute/arcclean.cpp:115 #: src/clients/compute/arcget.cpp:129 src/clients/compute/arckill.cpp:114 #: src/clients/compute/arcrenew.cpp:110 src/clients/compute/arcresub.cpp:124 #: src/clients/compute/arcresume.cpp:112 src/clients/compute/arcstat.cpp:133 #, c-format msgid "Warning: Job not found in job list: %s" msgstr "" #: src/clients/compute/arccat.cpp:131 src/clients/compute/arcclean.cpp:170 #: src/clients/compute/arcget.cpp:142 src/clients/compute/arckill.cpp:126 #: src/clients/compute/arcrenew.cpp:122 src/clients/compute/arcresub.cpp:136 #: src/clients/compute/arcresume.cpp:124 msgid "No jobs" msgstr "" #: src/clients/compute/arccat.cpp:146 #, c-format msgid "Could not create temporary file \"%s\"" msgstr "" #: src/clients/compute/arccat.cpp:147 src/clients/compute/arccat.cpp:153 #, c-format msgid "Cannot create output of %s for any jobs" msgstr "" #: src/clients/compute/arccat.cpp:154 #, c-format msgid "Invalid destination URL %s" msgstr "" #: src/clients/compute/arccat.cpp:172 #, c-format msgid "Job deleted: %s" msgstr "" #: src/clients/compute/arccat.cpp:182 #, c-format msgid "Job has not started yet: %s" msgstr "" #: src/clients/compute/arccat.cpp:223 #, c-format msgid "Cannot determine the %s location: %s" msgstr "" #: src/clients/compute/arccat.cpp:228 #, c-format msgid "Cannot create output of %s for job (%s): Invalid source %s" msgstr "" #: src/clients/compute/arccat.cpp:241 #, c-format msgid "Catting %s for job %s" msgstr "" #: src/clients/compute/arcclean.cpp:35 msgid "The arcclean command removes a job from the computing resource." msgstr "" #: src/clients/compute/arcclean.cpp:139 msgid "" "You are about to remove jobs from the job list for which no information " "could be\n" "found. NOTE: Recently submitted jobs might not have appeared in the " "information\n" "system, and this action will also remove such jobs." msgstr "" #: src/clients/compute/arcclean.cpp:142 msgid "Are you sure you want to clean jobs missing information?" msgstr "" #: src/clients/compute/arcclean.cpp:143 src/clients/compute/arcsync.cpp:221 msgid "y" msgstr "" #: src/clients/compute/arcclean.cpp:143 src/clients/compute/arcsync.cpp:221 msgid "n" msgstr "" #: src/clients/compute/arcclean.cpp:148 msgid "Jobs missing information will not be cleaned!" msgstr "" #: src/clients/compute/arcclean.cpp:164 src/clients/compute/arcresub.cpp:177 #: src/clients/compute/arctest.cpp:339 #, c-format msgid "Warning: Failed to write job information to file (%s)" msgstr "" #: src/clients/compute/arcclean.cpp:165 msgid "" " Run 'arcclean -s Undefined' to remove cleaned jobs from job list" msgstr "" #: src/clients/compute/arcclean.cpp:174 #, c-format msgid "Jobs processed: %d, deleted: %d" msgstr "" #: src/clients/compute/arcget.cpp:36 msgid "The arcget command is used for retrieving the results from a job." msgstr "" #: src/clients/compute/arcget.cpp:78 #, c-format msgid "Job download directory from user configuration file: %s" msgstr "" #: src/clients/compute/arcget.cpp:81 msgid "Job download directory will be created in present working directory." msgstr "" #: src/clients/compute/arcget.cpp:85 #, c-format msgid "Job download directory: %s" msgstr "" #: src/clients/compute/arcget.cpp:152 #, c-format msgid "Unable to create directory for storing results (%s) - %s" msgstr "" #: src/clients/compute/arcget.cpp:162 #, c-format msgid "Results stored at: %s" msgstr "" #: src/clients/compute/arcget.cpp:174 src/clients/compute/arckill.cpp:142 msgid "Warning: Some jobs were not removed from server" msgstr "" #: src/clients/compute/arcget.cpp:175 src/clients/compute/arcget.cpp:182 #: src/clients/compute/arckill.cpp:143 msgid " Use arcclean to remove retrieved jobs from job list" msgstr "" #: src/clients/compute/arcget.cpp:181 src/clients/compute/arckill.cpp:149 #: src/clients/compute/arcresub.cpp:207 #, c-format msgid "Warning: Failed removing jobs from file (%s)" msgstr "" #: src/clients/compute/arcget.cpp:186 #, c-format msgid "" "Jobs processed: %d, successfully retrieved: %d, successfully cleaned: %d" msgstr "" #: src/clients/compute/arcget.cpp:190 #, c-format msgid "Jobs processed: %d, successfully retrieved: %d" msgstr "" #: src/clients/compute/arcinfo.cpp:34 msgid "[resource ...]" msgstr "" #: src/clients/compute/arcinfo.cpp:35 msgid "" "The arcinfo command is used for obtaining the status of computing resources " "on the Grid." msgstr "" #: src/clients/compute/arcinfo.cpp:142 msgid "Information endpoint" msgstr "" #: src/clients/compute/arcinfo.cpp:153 msgid "Submission endpoint" msgstr "" #: src/clients/compute/arcinfo.cpp:155 msgid "status" msgstr "" #: src/clients/compute/arcinfo.cpp:157 msgid "interface" msgstr "" #: src/clients/compute/arcinfo.cpp:176 msgid "ERROR: Failed to retrieve information from the following endpoints:" msgstr "" #: src/clients/compute/arcinfo.cpp:189 msgid "ERROR: Failed to retrieve information" msgstr "" #: src/clients/compute/arcinfo.cpp:191 msgid "from the following endpoints:" msgstr "" #: src/clients/compute/arckill.cpp:34 msgid "The arckill command is used to kill running jobs." msgstr "" #: src/clients/compute/arckill.cpp:150 msgid "" " Run 'arcclean -s Undefined' to remove killed jobs from job list" msgstr "" #: src/clients/compute/arckill.cpp:153 #, c-format msgid "Jobs processed: %d, successfully killed: %d, successfully cleaned: %d" msgstr "" #: src/clients/compute/arckill.cpp:155 #, c-format msgid "Jobs processed: %d, successfully killed: %d" msgstr "" #: src/clients/compute/arcrenew.cpp:128 #, c-format msgid "Jobs processed: %d, renewed: %d" msgstr "" #: src/clients/compute/arcresub.cpp:79 msgid "--same and --not-same cannot be specified together." msgstr "" #: src/clients/compute/arcresub.cpp:153 msgid "" "It is not possible to resubmit jobs without new target information discovery" msgstr "" #: src/clients/compute/arcresub.cpp:166 msgid "No jobs to resubmit with the specified status" msgstr "" #: src/clients/compute/arcresub.cpp:173 src/clients/compute/submit.cpp:34 #, c-format msgid "Job submitted with jobid: %s" msgstr "" #: src/clients/compute/arcresub.cpp:178 msgid " To recover missing jobs, run arcsync" msgstr "" #: src/clients/compute/arcresub.cpp:183 #, c-format msgid "Cannot write jobids to file (%s)" msgstr "" #: src/clients/compute/arcresub.cpp:194 #, c-format msgid "" "Resubmission of job (%s) succeeded, but killing the job failed - it will " "still appear in the job list" msgstr "" #: src/clients/compute/arcresub.cpp:203 #, c-format msgid "" "Resubmission of job (%s) succeeded, but cleaning the job failed - it will " "still appear in the job list" msgstr "" #: src/clients/compute/arcresub.cpp:208 msgid " Use arcclean to remove non-existing jobs" msgstr "" #: src/clients/compute/arcresub.cpp:215 msgid "Job resubmission summary:" msgstr "" #: src/clients/compute/arcresub.cpp:217 #, c-format msgid "%d of %d jobs were resubmitted" msgstr "" #: src/clients/compute/arcresub.cpp:219 #, c-format msgid "The following %d were not resubmitted" msgstr "" #: src/clients/compute/arcresume.cpp:130 #, c-format msgid "Jobs processed: %d, resumed: %d" msgstr "" #: src/clients/compute/arcstat.cpp:35 msgid "" "The arcstat command is used for obtaining the status of jobs that have\n" "been submitted to Grid enabled resources." msgstr "" #: src/clients/compute/arcstat.cpp:79 msgid "The 'sort' and 'rsort' flags cannot be specified at the same time." msgstr "" #: src/clients/compute/arcstat.cpp:149 msgid "No jobs found, try later" msgstr "" #: src/clients/compute/arcstat.cpp:193 #, c-format msgid "Status of %d jobs was queried, %d jobs returned information" msgstr "" #: src/clients/compute/arcsub.cpp:45 msgid "[filename ...]" msgstr "" #: src/clients/compute/arcsub.cpp:46 msgid "" "The arcsub command is used for submitting jobs to Grid enabled computing\n" "resources." msgstr "" #: src/clients/compute/arcsub.cpp:94 msgid "No job description input specified" msgstr "" #: src/clients/compute/arcsub.cpp:107 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:582 #, c-format msgid "Can not open job description file: %s" msgstr "" #: src/clients/compute/arcsub.cpp:135 src/clients/compute/arcsub.cpp:163 msgid "Invalid JobDescription:" msgstr "" #: src/clients/compute/arcsub.cpp:198 src/clients/compute/arctest.cpp:229 msgid "" "Cannot adapt job description to the submission target when information " "discovery is turned off" msgstr "" #: src/clients/compute/arcsync.cpp:66 src/clients/compute/arcsync.cpp:174 #, c-format msgid "Warning: Unable to open job list file (%s), unknown format" msgstr "" #: src/clients/compute/arcsync.cpp:76 msgid "Found the following jobs:" msgstr "" #: src/clients/compute/arcsync.cpp:86 msgid "Total number of jobs found: " msgstr "" #: src/clients/compute/arcsync.cpp:98 msgid "Found the following new jobs:" msgstr "" #: src/clients/compute/arcsync.cpp:108 msgid "Total number of new jobs found: " msgstr "" #: src/clients/compute/arcsync.cpp:113 #, c-format msgid "ERROR: Failed to write job information to file (%s)" msgstr "" #: src/clients/compute/arcsync.cpp:140 msgid "" "The arcsync command synchronizes your local job list with the information " "at\n" "the given resources or index servers." msgstr "" #: src/clients/compute/arcsync.cpp:180 #, c-format msgid "Warning: Unable to read local list of jobs from file (%s)" msgstr "" #: src/clients/compute/arcsync.cpp:185 #, c-format msgid "Warning: Unable to truncate local list of jobs in file (%s)" msgstr "" #: src/clients/compute/arcsync.cpp:191 #, c-format msgid "Warning: Unable to create job list file (%s), jobs list is destroyed" msgstr "" #: src/clients/compute/arcsync.cpp:195 #, c-format msgid "" "Warning: Failed to write local list of jobs into file (%s), jobs list is " "destroyed" msgstr "" #: src/clients/compute/arcsync.cpp:215 msgid "" "Synchronizing the local list of active jobs with the information in the\n" "information system can result in some inconsistencies. Very recently " "submitted\n" "jobs might not yet be present, whereas jobs very recently scheduled for\n" "deletion can still be present." msgstr "" #: src/clients/compute/arcsync.cpp:220 msgid "Are you sure you want to synchronize your local job list?" msgstr "" #: src/clients/compute/arcsync.cpp:225 msgid "Cancelling synchronization request" msgstr "" #: src/clients/compute/arcsync.cpp:243 msgid "" "No services specified. Please configure default services in the client " "configuration, or specify a cluster or index (-c or -g options, see arcsync -" "h)." msgstr "" #: src/clients/compute/arctest.cpp:57 msgid " " msgstr "" #: src/clients/compute/arctest.cpp:58 msgid "The arctest command is used for testing clusters as resources." msgstr "" #: src/clients/compute/arctest.cpp:70 msgid "" "Nothing to do:\n" "you have to either specify a test job id with -J (--job)\n" "or query information about the certificates with -E (--certificate)\n" msgstr "" #: src/clients/compute/arctest.cpp:77 msgid "" "For the 1st test job you also have to specify a runtime value with -r (--" "runtime) option." msgstr "" #: src/clients/compute/arctest.cpp:111 msgid "Certificate information:" msgstr "" #: src/clients/compute/arctest.cpp:115 msgid "No user-certificate found" msgstr "" #: src/clients/compute/arctest.cpp:118 #, c-format msgid "Certificate: %s" msgstr "" #: src/clients/compute/arctest.cpp:120 #, c-format msgid "Subject name: %s" msgstr "" #: src/clients/compute/arctest.cpp:121 #, c-format msgid "Valid until: %s" msgstr "" #: src/clients/compute/arctest.cpp:125 msgid "Unable to determine certificate information" msgstr "" #: src/clients/compute/arctest.cpp:129 msgid "Proxy certificate information:" msgstr "" #: src/clients/compute/arctest.cpp:131 msgid "No proxy found" msgstr "" #: src/clients/compute/arctest.cpp:134 #, c-format msgid "Proxy: %s" msgstr "" #: src/clients/compute/arctest.cpp:135 #, c-format msgid "Proxy-subject: %s" msgstr "" #: src/clients/compute/arctest.cpp:137 msgid "Valid for: Proxy expired" msgstr "" #: src/clients/compute/arctest.cpp:139 msgid "Valid for: Proxy not valid" msgstr "" #: src/clients/compute/arctest.cpp:141 #, c-format msgid "Valid for: %s" msgstr "" #: src/clients/compute/arctest.cpp:146 #, c-format msgid "Certificate issuer: %s" msgstr "" #: src/clients/compute/arctest.cpp:150 msgid "CA-certificates installed:" msgstr "" #: src/clients/compute/arctest.cpp:172 msgid "Unable to detect if issuer certificate is installed." msgstr "" #: src/clients/compute/arctest.cpp:175 msgid "Your issuer's certificate is not installed" msgstr "" #: src/clients/compute/arctest.cpp:189 #, c-format msgid "No test-job, with ID \"%d\"" msgstr "" #: src/clients/compute/arctest.cpp:245 #, c-format msgid "Unable to load broker %s" msgstr "" #: src/clients/compute/arctest.cpp:248 #: src/hed/libs/compute/BrokerPlugin.cpp:106 #, c-format msgid "Broker %s loaded" msgstr "" #: src/clients/compute/arctest.cpp:270 msgid "Test aborted because no resource returned any information" msgstr "" #: src/clients/compute/arctest.cpp:272 src/clients/compute/submit.cpp:170 msgid "" "Unable to adapt job description to any resource, no resource information " "could be obtained." msgstr "" #: src/clients/compute/arctest.cpp:273 src/clients/compute/submit.cpp:171 msgid "Original job description is listed below:" msgstr "" #: src/clients/compute/arctest.cpp:283 msgid "" "ERROR: Test aborted because no suitable resources were found for the test-job" msgstr "" #: src/clients/compute/arctest.cpp:285 msgid "" "ERROR: Dumping job description aborted because no suitable resources were " "found for the test-job" msgstr "" #: src/clients/compute/arctest.cpp:294 #, c-format msgid "Submitting test-job %d:" msgstr "" #: src/clients/compute/arctest.cpp:298 #, c-format msgid "Client version: nordugrid-arc-%s" msgstr "" #: src/clients/compute/arctest.cpp:306 #, c-format msgid "Cannot write jobid (%s) to file (%s)" msgstr "" #: src/clients/compute/arctest.cpp:307 #, c-format msgid "Test submitted with jobid: %s" msgstr "" #: src/clients/compute/arctest.cpp:322 #, c-format msgid "Computing service: %s" msgstr "" #: src/clients/compute/arctest.cpp:328 msgid "Test failed, no more possible targets" msgstr "" #: src/clients/compute/arctest.cpp:341 src/clients/compute/submit.cpp:49 msgid "To recover missing jobs, run arcsync" msgstr "" #: src/clients/compute/arctest.cpp:354 src/clients/compute/submit.cpp:200 #, c-format msgid "" "Unable to prepare job description according to needs of the target resource " "(%s)." msgstr "" #: src/clients/compute/arctest.cpp:364 src/clients/compute/submit.cpp:216 #, c-format msgid "" "An error occurred during the generation of job description to be sent to %s" msgstr "" #: src/clients/compute/arctest.cpp:368 src/clients/compute/submit.cpp:220 #, c-format msgid "Job description to be sent to %s:" msgstr "" #: src/clients/compute/submit.cpp:40 #, c-format msgid "Cannot write job IDs to file (%s)" msgstr "" #: src/clients/compute/submit.cpp:45 #, c-format msgid "Unable to open job list file (%s), unknown format" msgstr "" #: src/clients/compute/submit.cpp:47 #, c-format msgid "Failed to write job information to database (%s)" msgstr "" #: src/clients/compute/submit.cpp:51 #, c-format msgid "Record about new job successfully added to the database (%s)" msgstr "" #: src/clients/compute/submit.cpp:57 msgid "Job submission summary:" msgstr "" #: src/clients/compute/submit.cpp:59 #, c-format msgid "%d of %d jobs were submitted" msgstr "" #: src/clients/compute/submit.cpp:61 msgid "The following jobs were not submitted:" msgstr "" #: src/clients/compute/submit.cpp:65 msgid "Job nr." msgstr "" #: src/clients/compute/submit.cpp:75 #, c-format msgid "ERROR: Unable to load broker %s" msgstr "" #: src/clients/compute/submit.cpp:79 msgid "" "ERROR: Job submission aborted because no resource returned any information" msgstr "" #: src/clients/compute/submit.cpp:83 msgid "ERROR: One or multiple job descriptions was not submitted." msgstr "" #: src/clients/compute/submit.cpp:100 #, c-format msgid "" "A computing resource using the GridFTP interface was requested, but\n" "%sthe corresponding plugin could not be loaded. Is the plugin installed?\n" "%sIf not, please install the package 'nordugrid-arc-plugins-globus'.\n" "%sDepending on your type of installation the package name might differ." msgstr "" #: src/clients/compute/submit.cpp:125 #, c-format msgid "Removing endpoint %s: It has an unrequested interface (%s)." msgstr "" #: src/clients/compute/submit.cpp:183 #, c-format msgid "Dumping job description aborted: Unable to load broker %s" msgstr "" #: src/clients/compute/submit.cpp:238 msgid "" "Unable to prepare job description according to needs of the target resource." msgstr "" #: src/clients/compute/submit.cpp:322 src/clients/compute/submit.cpp:352 #, c-format msgid "Service endpoint %s (type %s) added to the list for resource discovery" msgstr "" #: src/clients/compute/submit.cpp:332 msgid "" "There are no endpoints in registry that match requested info endpoint type" msgstr "" #: src/clients/compute/submit.cpp:373 #, c-format msgid "Service endpoint %s (type %s) added to the list for direct submission" msgstr "" #: src/clients/compute/submit.cpp:381 msgid "" "There are no endpoints in registry that match requested submission endpoint " "type" msgstr "" #: src/clients/compute/utils.cpp:109 #, c-format msgid "Types of execution services that %s is able to submit jobs to:" msgstr "" #: src/clients/compute/utils.cpp:112 #, c-format msgid "Types of registry services that %s is able to collect information from:" msgstr "" #: src/clients/compute/utils.cpp:115 #, c-format msgid "" "Types of local information services that %s is able to collect information " "from:" msgstr "" #: src/clients/compute/utils.cpp:118 #, c-format msgid "" "Types of local information services that %s is able to collect job " "information from:" msgstr "" #: src/clients/compute/utils.cpp:121 #, c-format msgid "Types of services that %s is able to manage jobs at:" msgstr "" #: src/clients/compute/utils.cpp:124 #, c-format msgid "Job description languages supported by %s:" msgstr "" #: src/clients/compute/utils.cpp:127 #, c-format msgid "Brokers available to %s:" msgstr "" #: src/clients/compute/utils.cpp:150 #, c-format msgid "" "Default broker (%s) is not available. When using %s a broker should be " "specified explicitly (-b option)." msgstr "" #: src/clients/compute/utils.cpp:160 msgid "Proxy expired. Job submission aborted. Please run 'arcproxy'!" msgstr "" #: src/clients/compute/utils.cpp:165 msgid "" "Cannot find any proxy. This application currently cannot run without a " "proxy.\n" " If you have the proxy file in a non-default location,\n" " please make sure the path is specified in the client configuration file.\n" " If you don't have a proxy yet, please run 'arcproxy'!" msgstr "" #: src/clients/compute/utils.cpp:277 msgid "" "It is impossible to mix ARC6 target selection options with legacy options. " "All legacy options will be ignored!" msgstr "" #: src/clients/compute/utils.cpp:345 #, c-format msgid "Unsupported submission endpoint type: %s" msgstr "" #: src/clients/compute/utils.cpp:383 #, c-format msgid "" "Requested to skip resource discovery. Will try direct submission to %s and " "%s submission endpoint types" msgstr "" #: src/clients/compute/utils.cpp:389 #, c-format msgid "Unsupported information endpoint type: %s" msgstr "" #: src/clients/compute/utils.cpp:434 msgid "Other actions" msgstr "" #: src/clients/compute/utils.cpp:435 msgid "Brokering and filtering" msgstr "" #: src/clients/compute/utils.cpp:436 msgid "Output format modifiers" msgstr "" #: src/clients/compute/utils.cpp:437 msgid "Behaviour tuning" msgstr "" #: src/clients/compute/utils.cpp:438 msgid "ARC6 submission endpoint selection" msgstr "" #: src/clients/compute/utils.cpp:439 msgid "Legacy options set for defining targets" msgstr "" #: src/clients/compute/utils.cpp:443 msgid "specify computing element hostname or a complete endpoint URL" msgstr "" #: src/clients/compute/utils.cpp:444 msgid "ce" msgstr "" #: src/clients/compute/utils.cpp:448 msgid "registry service URL with optional specification of protocol" msgstr "" #: src/clients/compute/utils.cpp:449 msgid "registry" msgstr "" #: src/clients/compute/utils.cpp:455 msgid "" "require the specified endpoint type for job submission.\n" "\tAllowed values are: arcrest, emies, gridftp or gridftpjob and internal." msgstr "" #: src/clients/compute/utils.cpp:457 src/clients/compute/utils.cpp:464 msgid "type" msgstr "" #: src/clients/compute/utils.cpp:461 msgid "" "require information query using the specified information endpoint type.\n" "\tSpecial value 'NONE' will disable all resource information queries and the " "following brokering.\n" "\tAllowed values are: ldap.nordugrid, ldap.glue2, emies, arcrest and " "internal." msgstr "" #: src/clients/compute/utils.cpp:470 msgid "" "select one or more computing elements: name can be an alias for a single CE, " "a group of CEs or a URL" msgstr "" #: src/clients/compute/utils.cpp:472 src/clients/compute/utils.cpp:477 #: src/clients/compute/utils.cpp:494 src/clients/compute/utils.cpp:614 msgid "name" msgstr "" #: src/clients/compute/utils.cpp:476 msgid "only select jobs that were submitted to this resource" msgstr "" #: src/clients/compute/utils.cpp:483 msgid "" "the computing element specified by URL at the command line should be queried " "using this information interface.\n" "\tAllowed values are: org.nordugrid.ldapng, org.nordugrid.ldapglue2 and org." "ogf.glue.emies.resourceinfo" msgstr "" #: src/clients/compute/utils.cpp:486 msgid "interfacename" msgstr "" #: src/clients/compute/utils.cpp:492 msgid "" "selecting a computing element for the new jobs with a URL or an alias, or " "selecting a group of computing elements with the name of the group" msgstr "" #: src/clients/compute/utils.cpp:500 msgid "force migration, ignore kill failure" msgstr "" #: src/clients/compute/utils.cpp:506 msgid "keep the files on the server (do not clean)" msgstr "" #: src/clients/compute/utils.cpp:512 msgid "do not ask for verification" msgstr "" #: src/clients/compute/utils.cpp:516 msgid "truncate the joblist before synchronizing" msgstr "" #: src/clients/compute/utils.cpp:520 msgid "do not collect information, only convert jobs storage format" msgstr "" #: src/clients/compute/utils.cpp:526 src/clients/data/arcls.cpp:288 msgid "long format (more information)" msgstr "" #: src/clients/compute/utils.cpp:532 msgid "print a list of services configured in the client.conf" msgstr "" #: src/clients/compute/utils.cpp:538 msgid "show the stdout of the job (default)" msgstr "" #: src/clients/compute/utils.cpp:542 msgid "show the stderr of the job" msgstr "" #: src/clients/compute/utils.cpp:546 msgid "show the CE's error log of the job" msgstr "" #: src/clients/compute/utils.cpp:550 msgid "show the specified file from job's session directory" msgstr "" #: src/clients/compute/utils.cpp:551 msgid "filepath" msgstr "" #: src/clients/compute/utils.cpp:557 msgid "" "download directory (the job directory will be created in this directory)" msgstr "" #: src/clients/compute/utils.cpp:559 msgid "dirname" msgstr "" #: src/clients/compute/utils.cpp:563 msgid "use the jobname instead of the short ID as the job directory name" msgstr "" #: src/clients/compute/utils.cpp:568 msgid "force download (overwrite existing job directory)" msgstr "" #: src/clients/compute/utils.cpp:574 msgid "instead of the status only the IDs of the selected jobs will be printed" msgstr "" #: src/clients/compute/utils.cpp:578 msgid "sort jobs according to jobid, submissiontime or jobname" msgstr "" #: src/clients/compute/utils.cpp:579 src/clients/compute/utils.cpp:582 msgid "order" msgstr "" #: src/clients/compute/utils.cpp:581 msgid "reverse sorting of jobs according to jobid, submissiontime or jobname" msgstr "" #: src/clients/compute/utils.cpp:585 msgid "show jobs where status information is unavailable" msgstr "" #: src/clients/compute/utils.cpp:589 msgid "show status information in JSON format" msgstr "" #: src/clients/compute/utils.cpp:595 msgid "resubmit to the same resource" msgstr "" #: src/clients/compute/utils.cpp:599 msgid "do not resubmit to the same resource" msgstr "" #: src/clients/compute/utils.cpp:605 msgid "" "remove the job from the local list of jobs even if the job is not found in " "the infosys" msgstr "" #: src/clients/compute/utils.cpp:612 msgid "" "select one or more registries: name can be an alias for a single registry, a " "group of registries or a URL" msgstr "" #: src/clients/compute/utils.cpp:620 msgid "submit test job given by the number" msgstr "" #: src/clients/compute/utils.cpp:621 src/clients/compute/utils.cpp:625 msgid "int" msgstr "" #: src/clients/compute/utils.cpp:624 msgid "test job runtime specified by the number" msgstr "" #: src/clients/compute/utils.cpp:631 msgid "only select jobs whose status is statusstr" msgstr "" #: src/clients/compute/utils.cpp:632 msgid "statusstr" msgstr "" #: src/clients/compute/utils.cpp:638 msgid "all jobs" msgstr "" #: src/clients/compute/utils.cpp:644 msgid "jobdescription string describing the job to be submitted" msgstr "" #: src/clients/compute/utils.cpp:646 src/clients/compute/utils.cpp:652 #: src/clients/credentials/arcproxy.cpp:345 #: src/clients/credentials/arcproxy.cpp:352 #: src/clients/credentials/arcproxy.cpp:371 #: src/clients/credentials/arcproxy.cpp:378 #: src/clients/credentials/arcproxy.cpp:396 #: src/clients/credentials/arcproxy.cpp:400 #: src/clients/credentials/arcproxy.cpp:415 #: src/clients/credentials/arcproxy.cpp:425 #: src/clients/credentials/arcproxy.cpp:429 msgid "string" msgstr "" #: src/clients/compute/utils.cpp:650 msgid "jobdescription file describing the job to be submitted" msgstr "" #: src/clients/compute/utils.cpp:658 msgid "select broker method (list available brokers with --listplugins flag)" msgstr "" #: src/clients/compute/utils.cpp:659 msgid "broker" msgstr "" #: src/clients/compute/utils.cpp:662 msgid "the IDs of the submitted jobs will be appended to this file" msgstr "" #: src/clients/compute/utils.cpp:663 src/clients/compute/utils.cpp:685 #: src/clients/compute/utils.cpp:722 src/clients/compute/utils.cpp:730 #: src/clients/credentials/arcproxy.cpp:438 src/clients/data/arccp.cpp:627 #: src/clients/data/arcls.cpp:333 src/clients/data/arcmkdir.cpp:111 #: src/clients/data/arcrename.cpp:122 src/clients/data/arcrm.cpp:137 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:52 msgid "filename" msgstr "" #: src/clients/compute/utils.cpp:667 msgid "" "only use this interface for submitting.\n" "\tAllowed values are: org.nordugrid.gridftpjob or org.nordugrid.gridftp, org." "ogf.glue.emies.activitycreation and org.nordugrid.internal" msgstr "" #: src/clients/compute/utils.cpp:669 src/clients/compute/utils.cpp:711 msgid "InterfaceName" msgstr "" #: src/clients/compute/utils.cpp:676 msgid "skip the service with the given URL during service discovery" msgstr "" #: src/clients/compute/utils.cpp:677 src/clients/compute/utils.cpp:690 #: src/clients/data/arccp.cpp:607 msgid "URL" msgstr "" #: src/clients/compute/utils.cpp:684 msgid "a file containing a list of jobIDs" msgstr "" #: src/clients/compute/utils.cpp:689 msgid "skip jobs that are on a computing element with a given URL" msgstr "" #: src/clients/compute/utils.cpp:695 msgid "submit jobs as dry run (no submission to batch system)" msgstr "" #: src/clients/compute/utils.cpp:698 msgid "submit directly - no resource discovery or matchmaking" msgstr "" #: src/clients/compute/utils.cpp:702 msgid "" "do not submit - dump job description in the language accepted by the target" msgstr "" #: src/clients/compute/utils.cpp:709 msgid "" "only get information about executon targets that support this job submission " "interface.\n" "\tAllowed values are org.nordugrid.gridftpjob or org.nordugrid.gridftp, org." "ogf.glue.emies.activitycreation and org.nordugrid.internal" msgstr "" #: src/clients/compute/utils.cpp:716 msgid "prints info about installed user- and CA-certificates" msgstr "" #: src/clients/compute/utils.cpp:721 #, c-format msgid "the file storing information about active jobs (default %s)" msgstr "" #: src/clients/compute/utils.cpp:729 src/clients/credentials/arcproxy.cpp:437 #: src/clients/data/arccp.cpp:626 src/clients/data/arcls.cpp:332 #: src/clients/data/arcmkdir.cpp:110 src/clients/data/arcrename.cpp:121 #: src/clients/data/arcrm.cpp:136 msgid "configuration file (default ~/.arc/client.conf)" msgstr "" #: src/clients/compute/utils.cpp:732 src/clients/credentials/arcproxy.cpp:432 #: src/clients/data/arccp.cpp:621 src/clients/data/arcls.cpp:327 #: src/clients/data/arcmkdir.cpp:105 src/clients/data/arcrename.cpp:116 #: src/clients/data/arcrm.cpp:131 msgid "timeout in seconds (default 20)" msgstr "" #: src/clients/compute/utils.cpp:733 src/clients/credentials/arcproxy.cpp:433 #: src/clients/data/arccp.cpp:622 src/clients/data/arcls.cpp:328 #: src/clients/data/arcmkdir.cpp:106 src/clients/data/arcrename.cpp:117 #: src/clients/data/arcrm.cpp:132 msgid "seconds" msgstr "" #: src/clients/compute/utils.cpp:736 msgid "list the available plugins" msgstr "" #: src/clients/compute/utils.cpp:740 src/clients/credentials/arcproxy.cpp:442 #: src/clients/data/arccp.cpp:631 src/clients/data/arcls.cpp:337 #: src/clients/data/arcmkdir.cpp:115 src/clients/data/arcrename.cpp:126 #: src/clients/data/arcrm.cpp:141 #: src/hed/libs/compute/test_jobdescription.cpp:38 #: src/services/a-rex/grid-manager/gm_jobs.cpp:190 #: src/services/a-rex/grid-manager/inputcheck.cpp:81 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:66 msgid "FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG" msgstr "" #: src/clients/compute/utils.cpp:741 src/clients/credentials/arcproxy.cpp:443 #: src/clients/data/arccp.cpp:632 src/clients/data/arcls.cpp:338 #: src/clients/data/arcmkdir.cpp:116 src/clients/data/arcrename.cpp:127 #: src/clients/data/arcrm.cpp:142 #: src/hed/libs/compute/test_jobdescription.cpp:39 #: src/services/a-rex/grid-manager/gm_jobs.cpp:191 #: src/services/a-rex/grid-manager/inputcheck.cpp:82 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:67 msgid "debuglevel" msgstr "" #: src/clients/compute/utils.cpp:743 src/clients/credentials/arcproxy.cpp:446 #: src/clients/data/arccp.cpp:635 src/clients/data/arcls.cpp:341 #: src/clients/data/arcmkdir.cpp:119 src/clients/data/arcrename.cpp:130 #: src/clients/data/arcrm.cpp:145 msgid "print version information" msgstr "" #: src/clients/credentials/arcproxy.cpp:146 #: src/hed/libs/credential/ARCProxyUtil.cpp:1216 #, c-format msgid "There are %d user certificates existing in the NSS database" msgstr "" #: src/clients/credentials/arcproxy.cpp:162 #: src/hed/libs/credential/ARCProxyUtil.cpp:1232 #, c-format msgid "Number %d is with nickname: %s%s" msgstr "" #: src/clients/credentials/arcproxy.cpp:171 #: src/hed/libs/credential/ARCProxyUtil.cpp:1241 #, c-format msgid " expiration time: %s " msgstr "" #: src/clients/credentials/arcproxy.cpp:175 #: src/hed/libs/credential/ARCProxyUtil.cpp:1245 #, c-format msgid " certificate dn: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:176 #: src/hed/libs/credential/ARCProxyUtil.cpp:1246 #, c-format msgid " issuer dn: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:177 #: src/hed/libs/credential/ARCProxyUtil.cpp:1247 #, c-format msgid " serial number: %d" msgstr "" #: src/clients/credentials/arcproxy.cpp:181 #: src/hed/libs/credential/ARCProxyUtil.cpp:1251 #, c-format msgid "Please choose the one you would use (1-%d): " msgstr "" #: src/clients/credentials/arcproxy.cpp:246 msgid "" "The arcproxy command creates a proxy from a key/certificate pair which can\n" "then be used to access grid resources." msgstr "" #: src/clients/credentials/arcproxy.cpp:248 msgid "" "Supported constraints are:\n" " validityStart=time (e.g. 2008-05-29T10:20:30Z; if not specified, start " "from now)\n" " validityEnd=time\n" " validityPeriod=time (e.g. 43200 or 12h or 12H; if both validityPeriod and " "validityEnd\n" " not specified, the default is 12 hours for local proxy, and 168 hours " "for delegated\n" " proxy on myproxy server)\n" " vomsACvalidityPeriod=time (e.g. 43200 or 12h or 12H; if not specified, the " "default\n" " is the minimum value of 12 hours and validityPeriod)\n" " myproxyvalidityPeriod=time (lifetime of proxies delegated by myproxy " "server,\n" " e.g. 43200 or 12h or 12H; if not specified, the default is the minimum " "value of\n" " 12 hours and validityPeriod (which is lifetime of the delegated proxy on " "myproxy server))\n" " proxyPolicy=policy content\n" " proxyPolicyFile=policy file\n" " keybits=number - length of the key to generate. Default is 2048 bits.\n" " Special value 'inherit' is to use key length of signing certificate.\n" " signingAlgorithm=name - signing algorithm to use for signing public key of " "proxy.\n" " Possible values are sha1, sha2 (alias for sha256), sha224, sha256, " "sha384, sha512\n" " and inherit (use algorithm of signing certificate). Default is inherit.\n" " With old systems, only sha1 is acceptable.\n" "\n" "Supported information item names are:\n" " subject - subject name of proxy certificate.\n" " identity - identity subject name of proxy certificate.\n" " issuer - issuer subject name of proxy certificate.\n" " ca - subject name of CA which issued initial certificate.\n" " path - file system path to file containing proxy.\n" " type - type of proxy certificate.\n" " validityStart - timestamp when proxy validity starts.\n" " validityEnd - timestamp when proxy validity ends.\n" " validityPeriod - duration of proxy validity in seconds.\n" " validityLeft - duration of proxy validity left in seconds.\n" " vomsVO - VO name represented by VOMS attribute\n" " vomsSubject - subject of certificate for which VOMS attribute is issued\n" " vomsIssuer - subject of service which issued VOMS certificate\n" " vomsACvalidityStart - timestamp when VOMS attribute validity starts.\n" " vomsACvalidityEnd - timestamp when VOMS attribute validity ends.\n" " vomsACvalidityPeriod - duration of VOMS attribute validity in seconds.\n" " vomsACvalidityLeft - duration of VOMS attribute validity left in seconds.\n" " proxyPolicy\n" " keybits - size of proxy certificate key in bits.\n" " signingAlgorithm - algorithm used to sign proxy certificate.\n" "Items are printed in requested order and are separated by newline.\n" "If item has multiple values they are printed in same line separated by |.\n" "\n" "Supported password destinations are:\n" " key - for reading private key\n" " myproxy - for accessing credentials at MyProxy service\n" " myproxynew - for creating credentials at MyProxy service\n" " all - for any purspose.\n" "\n" "Supported password sources are:\n" " quoted string (\"password\") - explicitly specified password\n" " int - interactively request password from console\n" " stdin - read password from standard input delimited by newline\n" " file:filename - read password from file named filename\n" " stream:# - read password from input stream number #.\n" " Currently only 0 (standard input) is supported.\n" msgstr "" #: src/clients/credentials/arcproxy.cpp:308 msgid "path to the proxy file" msgstr "" #: src/clients/credentials/arcproxy.cpp:309 #: src/clients/credentials/arcproxy.cpp:313 #: src/clients/credentials/arcproxy.cpp:317 #: src/clients/credentials/arcproxy.cpp:321 #: src/clients/credentials/arcproxy.cpp:325 #: src/clients/credentials/arcproxy.cpp:329 src/clients/data/arccp.cpp:584 msgid "path" msgstr "" #: src/clients/credentials/arcproxy.cpp:312 msgid "" "path to the certificate file, it can be either PEM, DER, or PKCS12 formatted" msgstr "" #: src/clients/credentials/arcproxy.cpp:316 msgid "" "path to the private key file, if the certificate is in PKCS12 format, then " "no need to give private key" msgstr "" #: src/clients/credentials/arcproxy.cpp:320 msgid "" "path to the trusted certificate directory, only needed for the VOMS client " "functionality" msgstr "" #: src/clients/credentials/arcproxy.cpp:324 msgid "" "path to the top directory of VOMS *.lsc files, only needed for the VOMS " "client functionality" msgstr "" #: src/clients/credentials/arcproxy.cpp:328 msgid "path to the VOMS server configuration file" msgstr "" #: src/clients/credentials/arcproxy.cpp:332 msgid "" "voms<:command>. Specify VOMS server (More than one VOMS server \n" " can be specified like this: --voms VOa:command1 --voms VOb:" "command2). \n" " :command is optional, and is used to ask for specific " "attributes(e.g: roles)\n" " command options are:\n" " all --- put all of this DN's attributes into AC; \n" " list ---list all of the DN's attribute, will not create AC " "extension; \n" " /Role=yourRole --- specify the role, if this DN \n" " has such a role, the role will be put into " "AC; \n" " /voname/groupname/Role=yourRole --- specify the VO, group and " "role; if this DN \n" " has such a role, the role will be put into " "AC. \n" " If this option is not specified values from configuration " "files are used.\n" " To avoid anything to be used specify -S with empty value.\n" msgstr "" #: src/clients/credentials/arcproxy.cpp:348 msgid "" "group<:role>. Specify ordering of attributes \n" " Example: --order /knowarc.eu/coredev:Developer,/knowarc.eu/" "testers:Tester \n" " or: --order /knowarc.eu/coredev:Developer --order /knowarc.eu/" "testers:Tester \n" " Note that it does not make sense to specify the order if you have two or " "more different VOMS servers specified" msgstr "" #: src/clients/credentials/arcproxy.cpp:355 msgid "use GSI communication protocol for contacting VOMS services" msgstr "" #: src/clients/credentials/arcproxy.cpp:358 msgid "" "use HTTP communication protocol for contacting VOMS services that provide " "RESTful access \n" " Note for RESTful access, 'list' command and multiple VOMS " "server are not supported\n" msgstr "" #: src/clients/credentials/arcproxy.cpp:362 msgid "" "use old communication protocol for contacting VOMS services instead of " "RESTful access\n" msgstr "" #: src/clients/credentials/arcproxy.cpp:365 msgid "" "this option is not functional (old GSI proxies are not supported anymore)" msgstr "" #: src/clients/credentials/arcproxy.cpp:368 msgid "print all information about this proxy." msgstr "" #: src/clients/credentials/arcproxy.cpp:371 msgid "print selected information about this proxy." msgstr "" #: src/clients/credentials/arcproxy.cpp:374 msgid "remove proxy" msgstr "" #: src/clients/credentials/arcproxy.cpp:377 msgid "" "username to MyProxy server (if missing subject of user certificate is used)" msgstr "" #: src/clients/credentials/arcproxy.cpp:382 msgid "" "don't prompt for a credential passphrase, when retrieve a \n" " credential from on MyProxy server. \n" " The precondition of this choice is the credential is PUT onto\n" " the MyProxy server without a passphrase by using -R (--" "retrievable_by_cert) \n" " option when being PUTing onto Myproxy server. \n" " This option is specific for the GET command when contacting " "Myproxy server." msgstr "" #: src/clients/credentials/arcproxy.cpp:393 msgid "" "Allow specified entity to retrieve credential without passphrase.\n" " This option is specific for the PUT command when contacting " "Myproxy server." msgstr "" #: src/clients/credentials/arcproxy.cpp:399 msgid "hostname[:port] of MyProxy server" msgstr "" #: src/clients/credentials/arcproxy.cpp:404 msgid "" "command to MyProxy server. The command can be PUT, GET, INFO, NEWPASS or " "DESTROY.\n" " PUT -- put a delegated credentials to the MyProxy server; \n" " GET -- get a delegated credentials from the MyProxy server; \n" " INFO -- get and present information about credentials stored " "at the MyProxy server; \n" " NEWPASS -- change password protecting credentials stored at " "the MyProxy server; \n" " DESTROY -- wipe off credentials stored at the MyProxy " "server; \n" " Local credentials (certificate and key) are not necessary " "except in case of PUT. \n" " MyProxy functionality can be used together with VOMS " "functionality.\n" " --voms and --vomses can be used for Get command if VOMS " "attributes\n" " is required to be included in the proxy.\n" msgstr "" #: src/clients/credentials/arcproxy.cpp:419 msgid "" "use NSS credential database in default Mozilla profiles, \n" " including Firefox, Seamonkey and Thunderbird.\n" msgstr "" #: src/clients/credentials/arcproxy.cpp:424 msgid "proxy constraints" msgstr "" #: src/clients/credentials/arcproxy.cpp:428 msgid "password destination=password source" msgstr "" #: src/clients/credentials/arcproxy.cpp:452 msgid "" "RESTful and old VOMS communication protocols can't be requested " "simultaneously." msgstr "" #: src/clients/credentials/arcproxy.cpp:482 #: src/clients/credentials/arcproxy.cpp:1187 msgid "Failed configuration initialization." msgstr "" #: src/clients/credentials/arcproxy.cpp:511 msgid "" "Failed to find certificate and/or private key or files have improper " "permissions or ownership." msgstr "" #: src/clients/credentials/arcproxy.cpp:512 #: src/clients/credentials/arcproxy.cpp:524 msgid "You may try to increase verbosity to get more information." msgstr "" #: src/clients/credentials/arcproxy.cpp:520 msgid "Failed to find CA certificates" msgstr "" #: src/clients/credentials/arcproxy.cpp:521 msgid "" "Cannot find the CA certificates directory path, please set environment " "variable X509_CERT_DIR, or cacertificatesdirectory in a configuration file." msgstr "" #: src/clients/credentials/arcproxy.cpp:525 msgid "" "The CA certificates directory is required for contacting VOMS and MyProxy " "servers." msgstr "" #: src/clients/credentials/arcproxy.cpp:537 msgid "" "$X509_VOMS_FILE, and $X509_VOMSES are not set;\n" "User has not specified the location for vomses information;\n" "There is also not vomses location information in user's configuration file;\n" "Can not find vomses in default locations: ~/.arc/vomses, ~/.voms/vomses,\n" "$ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/grid-security/vomses, $PWD/" "vomses,\n" "/etc/vomses, /etc/grid-security/vomses, and the location at the " "corresponding sub-directory" msgstr "" #: src/clients/credentials/arcproxy.cpp:582 msgid "Wrong number of arguments!" msgstr "" #: src/clients/credentials/arcproxy.cpp:590 #: src/clients/credentials/arcproxy.cpp:614 #: src/clients/credentials/arcproxy.cpp:747 msgid "" "Cannot find the path of the proxy file, please setup environment " "X509_USER_PROXY, or proxypath in a configuration file" msgstr "" #: src/clients/credentials/arcproxy.cpp:597 #, c-format msgid "Cannot remove proxy file at %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:599 #, c-format msgid "Cannot remove proxy file at %s, because it's not there" msgstr "" #: src/clients/credentials/arcproxy.cpp:608 msgid "Bearer token is available. It is preferred for job submission." msgstr "" #: src/clients/credentials/arcproxy.cpp:620 #: src/clients/credentials/arcproxy.cpp:753 #, c-format msgid "" "Cannot find file at %s for getting the proxy. Please make sure this file " "exists." msgstr "" #: src/clients/credentials/arcproxy.cpp:626 #: src/clients/credentials/arcproxy.cpp:759 #, c-format msgid "Cannot process proxy file at %s." msgstr "" #: src/clients/credentials/arcproxy.cpp:629 #, c-format msgid "Subject: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:630 #, c-format msgid "Issuer: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:631 #, c-format msgid "Identity: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:633 msgid "Time left for proxy: Proxy expired" msgstr "" #: src/clients/credentials/arcproxy.cpp:635 msgid "Time left for proxy: Proxy not valid yet" msgstr "" #: src/clients/credentials/arcproxy.cpp:637 #, c-format msgid "Time left for proxy: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:638 #, c-format msgid "Proxy path: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:639 #, c-format msgid "Proxy type: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:640 #, c-format msgid "Proxy key length: %i" msgstr "" #: src/clients/credentials/arcproxy.cpp:641 #, c-format msgid "Proxy signature: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:650 msgid "AC extension information for VO " msgstr "" #: src/clients/credentials/arcproxy.cpp:653 msgid "Error detected while parsing this AC" msgstr "" #: src/clients/credentials/arcproxy.cpp:666 msgid "AC is invalid: " msgstr "" #: src/clients/credentials/arcproxy.cpp:696 #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:222 #, c-format msgid "Malformed VOMS AC attribute %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:727 msgid "Time left for AC: AC is not valid yet" msgstr "" #: src/clients/credentials/arcproxy.cpp:729 msgid "Time left for AC: AC has expired" msgstr "" #: src/clients/credentials/arcproxy.cpp:731 #, c-format msgid "Time left for AC: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:838 #, c-format msgid "Information item '%s' is not known" msgstr "" #: src/clients/credentials/arcproxy.cpp:850 msgid "" "Cannot find the user certificate path, please setup environment " "X509_USER_CERT, or certificatepath in a configuration file" msgstr "" #: src/clients/credentials/arcproxy.cpp:854 msgid "" "Cannot find the user private key path, please setup environment " "X509_USER_KEY, or keypath in a configuration file" msgstr "" #: src/clients/credentials/arcproxy.cpp:878 #, c-format msgid "" "Cannot parse password source expression %s it must be of type=source format" msgstr "" #: src/clients/credentials/arcproxy.cpp:895 #, c-format msgid "" "Cannot parse password type %s. Currently supported values are " "'key','myproxy','myproxynew' and 'all'." msgstr "" #: src/clients/credentials/arcproxy.cpp:910 #, c-format msgid "" "Cannot parse password source %s it must be of source_type or source_type:" "data format. Supported source types are int,stdin,stream,file." msgstr "" #: src/clients/credentials/arcproxy.cpp:924 msgid "Only standard input is currently supported for password source." msgstr "" #: src/clients/credentials/arcproxy.cpp:929 #, c-format msgid "" "Cannot parse password source type %s. Supported source types are int,stdin," "stream,file." msgstr "" #: src/clients/credentials/arcproxy.cpp:968 msgid "The start, end and period can't be set simultaneously" msgstr "" #: src/clients/credentials/arcproxy.cpp:974 #, c-format msgid "The start time that you set: %s can't be recognized." msgstr "" #: src/clients/credentials/arcproxy.cpp:981 #, c-format msgid "The period that you set: %s can't be recognized." msgstr "" #: src/clients/credentials/arcproxy.cpp:988 #, c-format msgid "The end time that you set: %s can't be recognized." msgstr "" #: src/clients/credentials/arcproxy.cpp:997 #, c-format msgid "The end time that you set: %s is before start time: %s." msgstr "" #: src/clients/credentials/arcproxy.cpp:1008 #, c-format msgid "WARNING: The start time that you set: %s is before current time: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:1011 #, c-format msgid "WARNING: The end time that you set: %s is before current time: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:1021 #, c-format msgid "The VOMS AC period that you set: %s can't be recognized." msgstr "" #: src/clients/credentials/arcproxy.cpp:1039 #, c-format msgid "The MyProxy period that you set: %s can't be recognized." msgstr "" #: src/clients/credentials/arcproxy.cpp:1054 #, c-format msgid "The keybits constraint is wrong: %s." msgstr "" #: src/clients/credentials/arcproxy.cpp:1068 #: src/hed/libs/credential/ARCProxyUtil.cpp:1271 msgid "The NSS database can not be detected in the Firefox profile" msgstr "" #: src/clients/credentials/arcproxy.cpp:1077 #: src/hed/libs/credential/ARCProxyUtil.cpp:1279 #, c-format msgid "" "There are %d NSS base directories where the certificate, key, and module " "databases live" msgstr "" #: src/clients/credentials/arcproxy.cpp:1079 #: src/hed/libs/credential/ARCProxyUtil.cpp:1283 #, c-format msgid "Number %d is: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:1081 #: src/hed/libs/credential/ARCProxyUtil.cpp:1285 #, c-format msgid "Please choose the NSS database you would like to use (1-%d): " msgstr "" #: src/clients/credentials/arcproxy.cpp:1097 #: src/hed/libs/credential/ARCProxyUtil.cpp:1297 #, c-format msgid "NSS database to be accessed: %s\n" msgstr "" #: src/clients/credentials/arcproxy.cpp:1168 #: src/hed/libs/credential/ARCProxyUtil.cpp:1471 #, c-format msgid "Certificate to use is: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:1216 #: src/clients/credentials/arcproxy.cpp:1330 #: src/hed/libs/credential/ARCProxyUtil.cpp:1528 msgid "Proxy generation succeeded" msgstr "" #: src/clients/credentials/arcproxy.cpp:1217 #: src/clients/credentials/arcproxy.cpp:1331 #: src/hed/libs/credential/ARCProxyUtil.cpp:1529 #, c-format msgid "Your proxy is valid until: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:1236 msgid "" "The old GSI proxies are not supported anymore. Please do not use -O/--old " "option." msgstr "" #: src/clients/credentials/arcproxy.cpp:1255 src/hed/mcc/tls/MCCTLS.cpp:163 #: src/hed/mcc/tls/MCCTLS.cpp:196 src/hed/mcc/tls/MCCTLS.cpp:222 msgid "VOMS attribute parsing failed" msgstr "" #: src/clients/credentials/arcproxy.cpp:1257 msgid "Myproxy server did not return proxy with VOMS AC included" msgstr "" #: src/clients/credentials/arcproxy.cpp:1278 #: src/hed/libs/credential/ARCProxyUtil.cpp:337 msgid "Proxy generation failed: No valid certificate found." msgstr "" #: src/clients/credentials/arcproxy.cpp:1283 #: src/hed/libs/credential/ARCProxyUtil.cpp:343 msgid "Proxy generation failed: No valid private key found." msgstr "" #: src/clients/credentials/arcproxy.cpp:1287 #: src/hed/libs/credential/ARCProxyUtil.cpp:169 #, c-format msgid "Your identity: %s" msgstr "" #: src/clients/credentials/arcproxy.cpp:1289 #: src/hed/libs/credential/ARCProxyUtil.cpp:350 msgid "Proxy generation failed: Certificate has expired." msgstr "" #: src/clients/credentials/arcproxy.cpp:1293 #: src/hed/libs/credential/ARCProxyUtil.cpp:355 msgid "Proxy generation failed: Certificate is not valid yet." msgstr "" #: src/clients/credentials/arcproxy.cpp:1304 msgid "Proxy generation failed: Failed to create temporary file." msgstr "" #: src/clients/credentials/arcproxy.cpp:1312 msgid "Proxy generation failed: Failed to retrieve VOMS information." msgstr "" #: src/clients/credentials/arcproxy_myproxy.cpp:100 #: src/hed/libs/credential/ARCProxyUtil.cpp:838 msgid "Succeeded to get info from MyProxy server" msgstr "" #: src/clients/credentials/arcproxy_myproxy.cpp:144 #: src/hed/libs/credential/ARCProxyUtil.cpp:894 msgid "Succeeded to change password on MyProxy server" msgstr "" #: src/clients/credentials/arcproxy_myproxy.cpp:185 #: src/hed/libs/credential/ARCProxyUtil.cpp:943 msgid "Succeeded to destroy credential on MyProxy server" msgstr "" #: src/clients/credentials/arcproxy_myproxy.cpp:265 #: src/hed/libs/credential/ARCProxyUtil.cpp:1032 #, c-format msgid "Succeeded to get a proxy in %s from MyProxy server %s" msgstr "" #: src/clients/credentials/arcproxy_myproxy.cpp:318 #: src/hed/libs/credential/ARCProxyUtil.cpp:1091 msgid "Succeeded to put a proxy onto MyProxy server" msgstr "" #: src/clients/credentials/arcproxy_proxy.cpp:93 #: src/hed/libs/credential/ARCProxyUtil.cpp:397 #: src/hed/libs/credential/ARCProxyUtil.cpp:1378 msgid "Failed to add VOMS AC extension. Your proxy may be incomplete." msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:63 msgid "" "Failed to process VOMS configuration or no suitable configuration lines " "found." msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:75 #, c-format msgid "Failed to parse requested VOMS lifetime: %s" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:93 #: src/hed/libs/credential/ARCProxyUtil.cpp:634 #, c-format msgid "Cannot get VOMS server address information from vomses line: \"%s\"" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:97 #: src/clients/credentials/arcproxy_voms.cpp:99 #: src/hed/libs/credential/ARCProxyUtil.cpp:644 #: src/hed/libs/credential/ARCProxyUtil.cpp:646 #, c-format msgid "Contacting VOMS server (named %s): %s on port: %s" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:105 #, c-format msgid "Failed to parse requested VOMS server port number: %s" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:122 msgid "List functionality is not supported for RESTful VOMS interface" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:132 #: src/clients/credentials/arcproxy_voms.cpp:188 #, c-format msgid "" "The VOMS server with the information:\n" "\t%s\n" "can not be reached, please make sure it is available." msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:133 #: src/clients/credentials/arcproxy_voms.cpp:138 #: src/clients/credentials/arcproxy_voms.cpp:189 #: src/clients/credentials/arcproxy_voms.cpp:194 #, c-format msgid "" "Collected error is:\n" "\t%s" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:137 #: src/clients/credentials/arcproxy_voms.cpp:193 #, c-format msgid "No valid response from VOMS server: %s" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:155 msgid "List functionality is not supported for legacy VOMS interface" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:167 #, c-format msgid "Failed to parse VOMS command: %s" msgstr "" #: src/clients/credentials/arcproxy_voms.cpp:204 #, c-format msgid "" "There are %d servers with the same name: %s in your vomses file, but none of " "them can be reached, or can return a valid message." msgstr "" #: src/clients/data/arccp.cpp:77 src/clients/data/arccp.cpp:330 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:509 #, c-format msgid "Current transfer FAILED: %s" msgstr "" #: src/clients/data/arccp.cpp:79 src/clients/data/arccp.cpp:135 #: src/clients/data/arccp.cpp:332 src/clients/data/arcls.cpp:225 #: src/clients/data/arcmkdir.cpp:73 src/clients/data/arcrename.cpp:89 #: src/clients/data/arcrm.cpp:95 msgid "This seems like a temporary error, please try again later" msgstr "" #: src/clients/data/arccp.cpp:87 src/clients/data/arccp.cpp:96 #, c-format msgid "Unable to copy %s" msgstr "" #: src/clients/data/arccp.cpp:88 src/clients/data/arccp.cpp:97 #: src/clients/data/arcls.cpp:150 src/clients/data/arcls.cpp:159 #: src/clients/data/arcmkdir.cpp:55 src/clients/data/arcmkdir.cpp:64 #: src/clients/data/arcrename.cpp:67 src/clients/data/arcrename.cpp:76 #: src/clients/data/arcrm.cpp:68 src/clients/data/arcrm.cpp:80 msgid "Invalid credentials, please check proxy and/or CA certificates" msgstr "" #: src/clients/data/arccp.cpp:94 src/clients/data/arcls.cpp:156 #: src/clients/data/arcmkdir.cpp:61 src/clients/data/arcrename.cpp:73 #: src/clients/data/arcrm.cpp:77 msgid "Proxy expired" msgstr "" #: src/clients/data/arccp.cpp:112 src/clients/data/arccp.cpp:116 #: src/clients/data/arccp.cpp:149 src/clients/data/arccp.cpp:153 #: src/clients/data/arccp.cpp:358 src/clients/data/arccp.cpp:363 #: src/clients/data/arcls.cpp:123 src/clients/data/arcmkdir.cpp:28 #: src/clients/data/arcrename.cpp:29 src/clients/data/arcrename.cpp:33 #: src/clients/data/arcrm.cpp:36 #, c-format msgid "Invalid URL: %s" msgstr "" #: src/clients/data/arccp.cpp:128 msgid "Third party transfer is not supported for these endpoints" msgstr "" #: src/clients/data/arccp.cpp:130 msgid "" "Protocol(s) not supported - please check that the relevant gfal2\n" " plugins are installed (gfal2-plugin-* packages)" msgstr "" #: src/clients/data/arccp.cpp:133 #, c-format msgid "Transfer FAILED: %s" msgstr "" #: src/clients/data/arccp.cpp:161 src/clients/data/arccp.cpp:187 #: src/clients/data/arccp.cpp:374 src/clients/data/arccp.cpp:402 #, c-format msgid "Can't read list of sources from file %s" msgstr "" #: src/clients/data/arccp.cpp:166 src/clients/data/arccp.cpp:202 #: src/clients/data/arccp.cpp:379 src/clients/data/arccp.cpp:418 #, c-format msgid "Can't read list of destinations from file %s" msgstr "" #: src/clients/data/arccp.cpp:171 src/clients/data/arccp.cpp:385 msgid "Numbers of sources and destinations do not match" msgstr "" #: src/clients/data/arccp.cpp:216 msgid "Fileset registration is not supported yet" msgstr "" #: src/clients/data/arccp.cpp:222 src/clients/data/arccp.cpp:295 #: src/clients/data/arccp.cpp:456 #, c-format msgid "Unsupported source url: %s" msgstr "" #: src/clients/data/arccp.cpp:226 src/clients/data/arccp.cpp:299 #, c-format msgid "Unsupported destination url: %s" msgstr "" #: src/clients/data/arccp.cpp:233 msgid "" "For registration source must be ordinary URL and destination must be " "indexing service" msgstr "" #: src/clients/data/arccp.cpp:243 #, c-format msgid "Could not obtain information about source: %s" msgstr "" #: src/clients/data/arccp.cpp:250 msgid "" "Metadata of source does not match existing destination. Use the --force " "option to override this." msgstr "" #: src/clients/data/arccp.cpp:262 msgid "Failed to accept new file/destination" msgstr "" #: src/clients/data/arccp.cpp:268 src/clients/data/arccp.cpp:274 #, c-format msgid "Failed to register new file/destination: %s" msgstr "" #: src/clients/data/arccp.cpp:436 msgid "Fileset copy to single object is not supported yet" msgstr "" #: src/clients/data/arccp.cpp:446 msgid "Can't extract object's name from source url" msgstr "" #: src/clients/data/arccp.cpp:465 #, c-format msgid "%s. Cannot copy fileset" msgstr "" #: src/clients/data/arccp.cpp:475 src/hed/libs/compute/ExecutionTarget.cpp:256 #: src/hed/libs/compute/ExecutionTarget.cpp:328 #, c-format msgid "Name: %s" msgstr "" #: src/clients/data/arccp.cpp:478 #, c-format msgid "Source: %s" msgstr "" #: src/clients/data/arccp.cpp:479 #, c-format msgid "Destination: %s" msgstr "" #: src/clients/data/arccp.cpp:485 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:516 msgid "Current transfer complete" msgstr "" #: src/clients/data/arccp.cpp:488 msgid "Some transfers failed" msgstr "" #: src/clients/data/arccp.cpp:498 #, c-format msgid "Directory: %s" msgstr "" #: src/clients/data/arccp.cpp:518 msgid "Transfer complete" msgstr "" #: src/clients/data/arccp.cpp:537 msgid "source destination" msgstr "" #: src/clients/data/arccp.cpp:538 msgid "" "The arccp command copies files to, from and between grid storage elements." msgstr "" #: src/clients/data/arccp.cpp:543 msgid "" "use passive transfer (off by default if secure is on, on by default if " "secure is not requested)" msgstr "" #: src/clients/data/arccp.cpp:549 msgid "do not try to force passive transfer" msgstr "" #: src/clients/data/arccp.cpp:554 msgid "" "if the destination is an indexing service and not the same as the source and " "the destination is already registered, then the copy is normally not done. " "However, if this option is specified the source is assumed to be a replica " "of the destination created in an uncontrolled way and the copy is done like " "in case of replication. Using this option also skips validation of completed " "transfers." msgstr "" #: src/clients/data/arccp.cpp:567 msgid "show progress indicator" msgstr "" #: src/clients/data/arccp.cpp:572 msgid "" "do not transfer, but register source into destination. destination must be a " "meta-url." msgstr "" #: src/clients/data/arccp.cpp:578 msgid "use secure transfer (insecure by default)" msgstr "" #: src/clients/data/arccp.cpp:583 msgid "path to local cache (use to put file into cache)" msgstr "" #: src/clients/data/arccp.cpp:588 src/clients/data/arcls.cpp:301 msgid "operate recursively" msgstr "" #: src/clients/data/arccp.cpp:593 src/clients/data/arcls.cpp:306 msgid "operate recursively up to specified level" msgstr "" #: src/clients/data/arccp.cpp:594 src/clients/data/arcls.cpp:307 msgid "level" msgstr "" #: src/clients/data/arccp.cpp:598 msgid "number of retries before failing file transfer" msgstr "" #: src/clients/data/arccp.cpp:599 msgid "number" msgstr "" #: src/clients/data/arccp.cpp:603 msgid "" "physical location to write to when destination is an indexing service. Must " "be specified for indexing services which do not automatically generate " "physical locations. Can be specified multiple times - locations will be " "tried in order until one succeeds." msgstr "" #: src/clients/data/arccp.cpp:611 msgid "" "perform third party transfer, where the destination pulls from the source " "(only available with GFAL plugin)" msgstr "" #: src/clients/data/arccp.cpp:617 src/clients/data/arcls.cpp:323 #: src/clients/data/arcmkdir.cpp:101 src/clients/data/arcrename.cpp:112 #: src/clients/data/arcrm.cpp:127 msgid "list the available plugins (protocols supported)" msgstr "" #: src/clients/data/arccp.cpp:656 src/clients/data/arcls.cpp:363 #: src/clients/data/arcmkdir.cpp:141 src/clients/data/arcrename.cpp:152 #: src/clients/data/arcrm.cpp:168 msgid "Protocol plugins available:" msgstr "" #: src/clients/data/arccp.cpp:681 src/clients/data/arcls.cpp:388 #: src/clients/data/arcmkdir.cpp:165 src/clients/data/arcrename.cpp:175 #: src/clients/data/arcrm.cpp:193 msgid "Wrong number of parameters specified" msgstr "" #: src/clients/data/arccp.cpp:686 msgid "Options 'p' and 'n' can't be used simultaneously" msgstr "" #: src/clients/data/arcls.cpp:129 src/clients/data/arcmkdir.cpp:34 #: src/clients/data/arcrm.cpp:43 #, c-format msgid "Can't read list of locations from file %s" msgstr "" #: src/clients/data/arcls.cpp:144 src/clients/data/arcmkdir.cpp:49 #: src/clients/data/arcrename.cpp:61 msgid "Unsupported URL given" msgstr "" #: src/clients/data/arcls.cpp:149 src/clients/data/arcls.cpp:158 #, c-format msgid "Unable to list content of %s" msgstr "" #: src/clients/data/arcls.cpp:228 msgid "Warning: Failed listing files but some information is obtained" msgstr "" #: src/clients/data/arcls.cpp:282 src/clients/data/arcmkdir.cpp:90 msgid "url" msgstr "" #: src/clients/data/arcls.cpp:283 msgid "" "The arcls command is used for listing files in grid storage elements and " "file\n" "index catalogues." msgstr "" #: src/clients/data/arcls.cpp:292 msgid "show URLs of file locations" msgstr "" #: src/clients/data/arcls.cpp:296 msgid "display all available metadata" msgstr "" #: src/clients/data/arcls.cpp:310 msgid "" "show only description of requested object, do not list content of directories" msgstr "" #: src/clients/data/arcls.cpp:314 msgid "treat requested object as directory and always try to list content" msgstr "" #: src/clients/data/arcls.cpp:318 msgid "check readability of object, does not show any information about object" msgstr "" #: src/clients/data/arcls.cpp:393 msgid "Incompatible options --nolist and --forcelist requested" msgstr "" #: src/clients/data/arcls.cpp:398 msgid "Requesting recursion and --nolist has no sense" msgstr "" #: src/clients/data/arcmkdir.cpp:54 src/clients/data/arcmkdir.cpp:63 #, c-format msgid "Unable to create directory %s" msgstr "" #: src/clients/data/arcmkdir.cpp:91 msgid "" "The arcmkdir command creates directories on grid storage elements and " "catalogs." msgstr "" #: src/clients/data/arcmkdir.cpp:96 msgid "make parent directories as needed" msgstr "" #: src/clients/data/arcrename.cpp:41 msgid "Both URLs must have the same protocol, host and port" msgstr "" #: src/clients/data/arcrename.cpp:51 msgid "Cannot rename to or from root directory" msgstr "" #: src/clients/data/arcrename.cpp:55 msgid "Cannot rename to the same URL" msgstr "" #: src/clients/data/arcrename.cpp:66 src/clients/data/arcrename.cpp:75 #, c-format msgid "Unable to rename %s" msgstr "" #: src/clients/data/arcrename.cpp:106 msgid "old_url new_url" msgstr "" #: src/clients/data/arcrename.cpp:107 msgid "The arcrename command renames files on grid storage elements." msgstr "" #: src/clients/data/arcrm.cpp:58 #, c-format msgid "Unsupported URL given: %s" msgstr "" #: src/clients/data/arcrm.cpp:67 src/clients/data/arcrm.cpp:79 #, c-format msgid "Unable to remove file %s" msgstr "" #: src/clients/data/arcrm.cpp:115 msgid "url [url ...]" msgstr "" #: src/clients/data/arcrm.cpp:116 msgid "The arcrm command deletes files on grid storage elements." msgstr "" #: src/clients/data/arcrm.cpp:121 msgid "" "remove logical file name registration even if not all physical instances " "were removed" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:53 msgid "Cannot initialize ARCHERY domain name for query" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:60 msgid "Cannot create resolver from /etc/resolv.conf" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:68 msgid "Cannot query service endpoint TXT records from DNS" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:79 msgid "Cannot parse service endpoint TXT records." msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:124 #, c-format msgid "Wrong service record field \"%s\" found in the \"%s\"" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:129 #, c-format msgid "Malformed ARCHERY record found (endpoint url is not defined): %s" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:134 #, c-format msgid "Malformed ARCHERY record found (endpoint type is not defined): %s" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:138 #, c-format msgid "Found service endpoint %s (type %s)" msgstr "" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:157 #, c-format msgid "" "Status for service endpoint \"%s\" is set to inactive in ARCHERY. Skipping." msgstr "" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:229 #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:161 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:149 #, c-format msgid "Job %s has no delegation associated. Can't renew such job." msgstr "" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:241 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:161 #, c-format msgid "Job %s failed to renew delegation %s." msgstr "" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:313 #, c-format msgid "Failed to process jobs - wrong response: %u" msgstr "" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:314 #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:323 #, c-format msgid "Content: %s" msgstr "" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:317 #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:329 #, c-format msgid "Failed to process job: %s" msgstr "" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:327 msgid "Failed to process jobs - failed to parse response" msgstr "" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:340 #, c-format msgid "No response returned: %s" msgstr "" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:364 #, c-format msgid "Failed to process job: %s - %s %s" msgstr "" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:431 #, c-format msgid "Failed retrieving job description for job: %s" msgstr "" #: src/hed/acc/ARCREST/JobListRetrieverPluginREST.cpp:29 msgid "Collecting Job (A-REX REST jobs) information." msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:50 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:84 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:124 msgid "Failed to communicate to delegation endpoint." msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:55 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:89 #, c-format msgid "Unexpected response code from delegation endpoint - %u" msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:57 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:91 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:242 #: src/hed/dmc/gridftp/Lister.cpp:223 src/hed/dmc/gridftp/Lister.cpp:243 #: src/hed/dmc/gridftp/Lister.cpp:468 src/hed/dmc/gridftp/Lister.cpp:475 #: src/hed/dmc/gridftp/Lister.cpp:497 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:163 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:196 #, c-format msgid "Response: %s" msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:62 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:96 msgid "Missing response from delegation endpoint." msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:71 #, c-format msgid "Unexpected delegation location from delegation endpoint - %s." msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:128 #, c-format msgid "Unexpected response code from delegation endpoint: %u, %s." msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:177 msgid "Unable to submit jobs. Failed to delegate credentials." msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:192 msgid "Failed to prepare job description" msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:201 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:87 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:401 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:116 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:262 #, c-format msgid "Unable to submit job. Job description is not valid in the %s format: %s" msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:208 msgid "Unable to submit job. Failed to assign delegation to job description." msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:231 msgid "Failed to submit all jobs." msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:241 #, c-format msgid "Failed to submit all jobs: %u %s" msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:254 #, c-format msgid "Failed to submit all jobs: %s" msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:274 #, c-format msgid "Failed to submit all jobs: %s %s" msgstr "" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:289 msgid "Failed uploading local input files" msgstr "" #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:27 msgid "Querying WSRF GLUE2 computing REST endpoint." msgstr "" #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:60 #, c-format msgid "CONTENT %u: %s" msgstr "" #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:64 msgid "Response is not XML" msgstr "" #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:69 #, c-format msgid "Parsed domains: %u" msgstr "" #: src/hed/acc/Broker/DescriptorsBroker.cpp:14 msgid "Sorting according to free slots in queue" msgstr "" #: src/hed/acc/Broker/DescriptorsBroker.cpp:15 msgid "Random sorting" msgstr "" #: src/hed/acc/Broker/DescriptorsBroker.cpp:16 msgid "Sorting according to specified benchmark (default \"specint2000\")" msgstr "" #: src/hed/acc/Broker/DescriptorsBroker.cpp:17 msgid "Sorting according to input data availability at target" msgstr "" #: src/hed/acc/Broker/DescriptorsBroker.cpp:18 msgid "Performs neither sorting nor matching" msgstr "" #: src/hed/acc/Broker/FastestQueueBrokerPlugin.cpp:24 #, c-format msgid "" "Target %s removed by FastestQueueBroker, doesn't report number of waiting " "jobs" msgstr "" #: src/hed/acc/Broker/FastestQueueBrokerPlugin.cpp:27 #, c-format msgid "" "Target %s removed by FastestQueueBroker, doesn't report number of total slots" msgstr "" #: src/hed/acc/Broker/FastestQueueBrokerPlugin.cpp:30 #, c-format msgid "" "Target %s removed by FastestQueueBroker, doesn't report number of free slots" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:81 msgid "Creating an EMI ES client" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:85 msgid "Unable to create SOAP client used by EMIESClient." msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:133 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:505 msgid "Initiating delegation procedure" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:163 msgid "Re-creating an EMI ES client" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:180 #, c-format msgid "Processing a %s request" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:191 #, c-format msgid "%s request failed" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:200 #, c-format msgid "No response from %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:209 #, c-format msgid "%s request to %s failed with response: %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:224 #, c-format msgid "XML response: %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:234 #, c-format msgid "%s request to %s failed. Unexpected response: %s." msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:248 src/hed/acc/EMIES/EMIESClient.cpp:355 #, c-format msgid "Creating and sending job submit request to %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:313 src/hed/acc/EMIES/EMIESClient.cpp:416 #, c-format msgid "Job description to be sent: %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:426 src/hed/acc/EMIES/EMIESClient.cpp:609 #: src/hed/acc/EMIES/EMIESClient.cpp:1098 #, c-format msgid "New limit for vector queries returned by EMI ES service: %d" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:434 src/hed/acc/EMIES/EMIESClient.cpp:617 #: src/hed/acc/EMIES/EMIESClient.cpp:1106 #, c-format msgid "" "Error: Service returned a limit higher or equal to current limit (current: " "%d; returned: %d)" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:502 src/hed/acc/EMIES/EMIESClient.cpp:536 #: src/hed/acc/EMIES/EMIESClient.cpp:592 #, c-format msgid "Creating and sending job information query request to %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:775 #, c-format msgid "Creating and sending service information request to %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:832 #, c-format msgid "Creating and sending service information query request to %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:880 src/hed/acc/EMIES/EMIESClient.cpp:901 #, c-format msgid "Creating and sending job clean request to %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:922 #, c-format msgid "Creating and sending job suspend request to %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:943 #, c-format msgid "Creating and sending job resume request to %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:964 #, c-format msgid "Creating and sending job restart request to %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:1021 #, c-format msgid "Creating and sending job notify request to %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:1076 #, c-format msgid "Creating and sending notify request to %s" msgstr "" #: src/hed/acc/EMIES/EMIESClient.cpp:1166 #, c-format msgid "Creating and sending job list request to %s" msgstr "" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:175 #, c-format msgid "Job %s failed to renew delegation %s - %s." msgstr "" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:197 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:464 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:191 #, c-format msgid "Job %s does not report a resumable state" msgstr "" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:202 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:196 #, c-format msgid "Resuming job: %s at state: %s (%s)" msgstr "" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:215 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:520 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:205 msgid "Job resuming successful" msgstr "" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:248 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:251 #, c-format msgid "Failed retrieving information for job: %s" msgstr "" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:330 msgid "Retrieving job description of EMI ES jobs is not supported" msgstr "" #: src/hed/acc/EMIES/JobListRetrieverPluginEMIES.cpp:37 #, c-format msgid "Listing jobs succeeded, %d jobs found" msgstr "" #: src/hed/acc/EMIES/JobListRetrieverPluginEMIES.cpp:53 #: src/hed/acc/LDAP/JobListRetrieverPluginLDAPNG.cpp:111 #: src/services/a-rex/internaljobplugin/JobListRetrieverPluginINTERNAL.cpp:83 #, c-format msgid "" "Skipping retrieved job (%s) because it was submitted via another interface " "(%s)." msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:47 #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:41 msgid "" "Failed to delegate credentials to server - no delegation interface found" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:54 #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:48 #, c-format msgid "Failed to delegate credentials to server - %s" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:77 #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:87 msgid "Failed preparing job description" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:95 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:406 msgid "Unable to submit job. Job description is not valid XML" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:154 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:482 msgid "No valid job identifier returned by EMI ES" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:180 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:499 msgid "Job failed on service side" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:190 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:509 msgid "Failed to obtain state of job" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:205 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:521 msgid "Failed to wait for job to allow stage in" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:228 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:541 msgid "Failed to obtain valid stagein URL for input files" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:248 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:558 #, c-format msgid "Failed uploading local input files to %s" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:269 #, c-format msgid "Failed to submit job description: EMIESFault(%s , %s)" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:278 #, c-format msgid "Failed to submit job description: UnexpectedError(%s)" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:315 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:574 msgid "Failed to notify service" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:367 msgid "Failed preparing job description to target resources" msgstr "" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:475 #, c-format msgid "Failed to submit job description: %s" msgstr "" #: src/hed/acc/EMIES/TargetInformationRetrieverPluginEMIES.cpp:30 msgid "Collecting EMI-ES GLUE2 computing info endpoint information." msgstr "" #: src/hed/acc/EMIES/TargetInformationRetrieverPluginEMIES.cpp:50 msgid "Generating EMIES targets" msgstr "" #: src/hed/acc/EMIES/TargetInformationRetrieverPluginEMIES.cpp:59 #, c-format msgid "Generated EMIES target: %s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:75 #: src/hed/acc/EMIES/TestEMIESClient.cpp:79 #, c-format msgid "Query returned unexpected element: %s:%s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:85 #, c-format msgid "Element validation according to GLUE2 schema failed: %s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:114 msgid "Resource query failed" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:132 msgid "Submission failed" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:143 msgid "Obtaining status failed" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:153 msgid "Obtaining information failed" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:170 msgid "Cleaning failed" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:177 msgid "Notify failed" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:184 msgid "Kill failed" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:190 msgid "List failed" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:201 #, c-format msgid "Fetching resource description from %s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:204 #: src/hed/acc/EMIES/TestEMIESClient.cpp:272 #: src/hed/acc/EMIES/TestEMIESClient.cpp:282 #: src/hed/acc/EMIES/TestEMIESClient.cpp:293 #, c-format msgid "Failed to obtain resource description: %s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:213 #: src/hed/acc/EMIES/TestEMIESClient.cpp:217 #, c-format msgid "Resource description contains unexpected element: %s:%s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:223 msgid "Resource description validation according to GLUE2 schema failed: " msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:228 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:560 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:819 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:517 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:706 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:129 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:169 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1214 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1248 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1430 #: src/hed/identitymap/ArgusPDPClient.cpp:444 #: src/hed/identitymap/ArgusPEPClient.cpp:98 #: src/hed/identitymap/ArgusPEPClient.cpp:345 #: src/hed/libs/common/Thread.cpp:242 src/hed/libs/common/Thread.cpp:245 #: src/hed/libs/common/Thread.cpp:248 #: src/hed/libs/credential/Credential.cpp:1048 #: src/hed/libs/data/DataPointDelegate.cpp:628 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:68 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:84 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:100 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:119 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:129 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:137 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:146 #: src/hed/mcc/tls/PayloadTLSMCC.cpp:82 src/hed/shc/arcpdp/ArcPDP.cpp:235 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:293 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:247 #: src/libs/data-staging/Scheduler.cpp:117 #: src/services/a-rex/delegation/DelegationStore.cpp:40 #: src/services/a-rex/delegation/DelegationStore.cpp:45 #: src/services/a-rex/delegation/DelegationStore.cpp:50 #: src/services/a-rex/delegation/DelegationStore.cpp:82 #: src/services/a-rex/delegation/DelegationStore.cpp:88 #: src/services/a-rex/grid-manager/inputcheck.cpp:33 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:480 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:551 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:576 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:587 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:598 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:609 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:617 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:623 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:628 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:633 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:643 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:652 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:660 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:671 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:678 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:736 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:743 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:783 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:787 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:790 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:859 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:872 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:889 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:901 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1174 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1179 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1208 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1221 #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:379 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:386 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:426 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:478 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:593 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:627 #, c-format msgid "%s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:248 msgid "Resource description is empty" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:255 #, c-format msgid "Resource description provides URL for interface %s: %s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:260 msgid "Resource description provides no URLs for interfaces" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:263 msgid "Resource description validation passed" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:265 #, c-format msgid "Requesting ComputingService elements of resource description at %s" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:270 msgid "Performing /Services/ComputingService query" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:274 #: src/hed/acc/EMIES/TestEMIESClient.cpp:284 #: src/hed/acc/EMIES/TestEMIESClient.cpp:295 msgid "Query returned no elements." msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:280 msgid "Performing /ComputingService query" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:291 msgid "Performing /* query" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:301 msgid "All queries failed" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:331 #, c-format msgid "" "Number of ComputingService elements obtained from full document and XPath " "query do not match: %d != %d" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:334 msgid "Resource description query validation passed" msgstr "" #: src/hed/acc/EMIES/TestEMIESClient.cpp:336 #, c-format msgid "Unsupported command: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:142 #, c-format msgid "Connect: Failed to init handle: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:148 #, c-format msgid "Failed to enable IPv6: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:158 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:172 #, c-format msgid "Connect: Failed to connect: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:165 #, c-format msgid "Connect: Connecting timed out after %d ms" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:185 #, c-format msgid "Connect: Failed to init auth info handle: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:196 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:210 #, c-format msgid "Connect: Failed authentication: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:203 #, c-format msgid "Connect: Authentication timed out after %d ms" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:224 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:256 #, c-format msgid "SendCommand: Command: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:229 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:240 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:260 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:271 #, c-format msgid "SendCommand: Failed: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:235 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:266 #, c-format msgid "SendCommand: Timed out after %d ms" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:243 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:276 #, c-format msgid "SendCommand: Response: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:293 msgid "FTP Job Control: Failed sending EPSV and PASV commands" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:298 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:304 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:320 #, c-format msgid "FTP Job Control: Server PASV response parsing failed: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:330 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:336 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:343 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:350 #, c-format msgid "FTP Job Control: Server EPSV response parsing failed: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:357 #, c-format msgid "FTP Job Control: Server EPSV response port parsing failed: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:366 #, c-format msgid "FTP Job Control: Failed to apply local address to data connection: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:372 #, c-format msgid "" "FTP Job Control: Can't parse host and/or port in response to EPSV/PASV: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:377 #, c-format msgid "FTP Job Control: Data channel: %d.%d.%d.%d:%d" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:393 #, c-format msgid "FTP Job Control: Data channel: [%s]:%d" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:398 #, c-format msgid "FTP Job Control: Local port failed: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:422 msgid "FTP Job Control: Failed sending DCAU command" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:427 msgid "FTP Job Control: Failed sending TYPE command" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:436 #, c-format msgid "FTP Job Control: Local type failed: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:446 #, c-format msgid "FTP Job Control: Failed sending STOR command: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:454 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:475 #, c-format msgid "FTP Job Control: Data connect write failed: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:461 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:469 #, c-format msgid "FTP Job Control: Data connect write timed out after %d ms" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:487 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:507 #, c-format msgid "FTP Job Control: Data write failed: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:493 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:501 #, c-format msgid "FTP Job Control: Data write timed out after %d ms" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:527 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:538 #, c-format msgid "Disconnect: Failed aborting - ignoring: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:530 #, c-format msgid "Disconnect: Data close timed out after %d ms" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:541 #, c-format msgid "Disconnect: Abort timed out after %d ms" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:549 #, c-format msgid "Disconnect: Failed quitting - ignoring: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:552 #, c-format msgid "Disconnect: Quitting timed out after %d ms" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:561 #, c-format msgid "Disconnect: Failed closing - ignoring: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:567 #, c-format msgid "Disconnect: Closing timed out after %d ms" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:582 msgid "Disconnect: waiting for globus handle to settle" msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:596 msgid "Disconnect: globus handle is stuck." msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:604 #, c-format msgid "Disconnect: Failed destroying handle: %s. Can't handle such situation." msgstr "" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:607 msgid "Disconnect: handle destroyed." msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:43 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:44 msgid "" "Missing reference to factory and/or module. It is unsafe to use Globus in " "non-persistent mode - SubmitterPlugin for GRIDFTPJOB is disabled. Report to " "developers." msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:58 #, c-format msgid "Unable to query job information (%s), invalid URL provided (%s)" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:70 #, c-format msgid "Jobs left to query: %d" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:79 #, c-format msgid "Querying batch with %d jobs" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:97 msgid "Can't create information handle - is the ARC LDAP DMC plugin available?" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:130 #, c-format msgid "Job information not found in the information system: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:132 msgid "" "This job was very recently submitted and might not yet have reached the " "information system" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:319 #, c-format msgid "Cleaning job: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:323 msgid "Failed to connect for job cleaning" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:335 msgid "Failed sending CWD command for job cleaning" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:342 msgid "Failed sending RMD command for job cleaning" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:349 msgid "Failed to disconnect after job cleaning" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:356 msgid "Job cleaning successful" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:367 #, c-format msgid "Cancelling job: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:371 msgid "Failed to connect for job cancelling" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:383 msgid "Failed sending CWD command for job cancelling" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:390 msgid "Failed sending DELE command for job cancelling" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:397 msgid "Failed to disconnect after job cancelling" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:405 msgid "Job cancelling successful" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:416 #, c-format msgid "Renewing credentials for job: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:420 msgid "Failed to connect for credential renewal" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:432 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:439 msgid "Failed sending CWD command for credentials renewal" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:445 msgid "Failed to disconnect after credentials renewal" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:452 msgid "Renewal of credentials was successful" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:474 #, c-format msgid "Illegal jobID specified (%s)" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:481 #, c-format msgid "HER: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:487 #, c-format msgid "Could not create temporary file: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:556 #, c-format msgid "Trying to retrieve job description of %s from computing resource" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:561 #, c-format msgid "invalid jobID: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:603 msgid "clientxrsl found" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:606 msgid "could not find start of clientxrsl" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:611 msgid "could not find end of clientxrsl" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:624 #, c-format msgid "Job description: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:627 msgid "clientxrsl not found" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:633 #, c-format msgid "Invalid JobDescription: %s" msgstr "" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:636 msgid "Valid JobDescription found" msgstr "" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:60 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:206 msgid "Submit: Failed to connect" msgstr "" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:68 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:214 msgid "Submit: Failed sending CWD command" msgstr "" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:79 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:225 msgid "Submit: Failed sending CWD new command" msgstr "" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:106 msgid "Failed to prepare job description." msgstr "" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:123 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:269 msgid "Submit: Failed sending job description" msgstr "" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:138 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:284 msgid "Submit: Failed uploading local input files" msgstr "" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:194 msgid "" "Submit: service has no suitable information interface - need org.nordugrid." "ldapng" msgstr "" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:252 msgid "Failed to prepare job description to target resources." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:75 #, c-format msgid "[ADLParser] Unsupported EMI ES state %s." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:95 #, c-format msgid "[ADLParser] Unsupported internal state %s." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:105 #, c-format msgid "[ADLParser] Optional for %s elements are not supported yet." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:114 #, c-format msgid "[ADLParser] %s element must be boolean." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:126 #, c-format msgid "[ADLParser] Code in FailIfExitCodeNotEqualTo in %s is not valid number." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:364 msgid "[ADLParser] Root element is not ActivityDescription " msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:411 msgid "[ADLParser] priority is too large - using max value 100" msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:454 #, c-format msgid "[ADLParser] Unsupported URL %s for RemoteLogging." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:473 #, c-format msgid "[ADLParser] Wrong time %s in ExpirationTime." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:503 msgid "[ADLParser] AccessControl isn't valid XML." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:514 msgid "[ADLParser] CredentialService must contain valid URL." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:543 #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:546 msgid "[ADLParser] Only email Prorocol for Notification is supported yet." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:604 msgid "[ADLParser] Missing or wrong value in ProcessesPerSlot." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:609 msgid "[ADLParser] Missing or wrong value in ThreadsPerProcess." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:615 msgid "" "[ADLParser] Missing Name element or value in ParallelEnvironment/Option " "element." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:632 msgid "[ADLParser] NetworkInfo is not supported yet." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:646 #, c-format msgid "[ADLParser] NodeAccess value %s is not supported yet." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:654 msgid "[ADLParser] Missing or wrong value in NumberOfSlots." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:661 msgid "" "[ADLParser] The NumberOfSlots element should be specified, when the value of " "useNumberOfSlots attribute of SlotsPerHost element is \"true\"." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:668 msgid "[ADLParser] Missing or wrong value in SlotsPerHost." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:697 msgid "[ADLParser] Missing or wrong value in IndividualPhysicalMemory." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:707 msgid "[ADLParser] Missing or wrong value in IndividualVirtualMemory." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:717 msgid "[ADLParser] Missing or wrong value in DiskSpaceRequirement." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:731 msgid "[ADLParser] Benchmark is not supported yet." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:739 msgid "[ADLParser] Missing or wrong value in IndividualCPUTime." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:747 msgid "[ADLParser] Missing or wrong value in TotalCPUTime." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:756 msgid "[ADLParser] Missing or wrong value in WallTime." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:776 msgid "[ADLParser] Missing or empty Name in InputFile." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:787 #, c-format msgid "[ADLParser] Wrong URI specified in Source - %s." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:809 msgid "[ADLParser] Missing or empty Name in OutputFile." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:815 #, c-format msgid "[ADLParser] Wrong URI specified in Target - %s." msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:828 #, c-format msgid "Location URI for file %s is invalid" msgstr "" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:853 #, c-format msgid "[ADLParser] CreationFlag value %s is not supported." msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:42 msgid "Left operand for RSL concatenation does not evaluate to a literal" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:50 msgid "Right operand for RSL concatenation does not evaluate to a literal" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:161 msgid "Multi-request operator only allowed at top level" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:186 msgid "RSL substitution is not a sequence" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:192 msgid "RSL substitution sequence is not of length 2" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:211 msgid "RSL substitution variable name does not evaluate to a literal" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:220 msgid "RSL substitution variable value does not evaluate to a literal" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:313 msgid "End of comment not found" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:324 msgid "Junk at end of RSL" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:424 msgid "End of single quoted string not found" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:441 msgid "End of double quoted string not found" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:460 #, c-format msgid "End of user delimiter (%s) quoted string not found" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:518 #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:546 #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:625 msgid "')' expected" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:528 #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:609 msgid "'(' expected" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:536 msgid "Variable name expected" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:541 #, c-format msgid "Variable name (%s) contains invalid character (%s)" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:557 msgid "Broken string" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:570 msgid "No left operand for concatenation operator" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:574 msgid "No right operand for concatenation operator" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:638 msgid "Attribute name expected" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:643 #, c-format msgid "Attribute name (%s) contains invalid character (%s)" msgstr "" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:649 msgid "Relation operator expected" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:86 msgid "Error parsing the internally set executables attribute." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:102 #, c-format msgid "" "File '%s' in the 'executables' attribute is not present in the 'inputfiles' " "attribute" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:120 msgid "The value of the ftpthreads attribute must be a number from 1 to 10" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:177 msgid "'stdout' attribute must be specified when 'join' attribute is specified" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:181 msgid "" "Attribute 'join' cannot be specified when both 'stdout' and 'stderr' " "attributes is specified" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:200 msgid "Attributes 'gridtime' and 'cputime' cannot be specified together" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:204 msgid "Attributes 'gridtime' and 'walltime' cannot be specified together" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:226 msgid "" "When specifying 'countpernode' attribute, 'count' attribute must also be " "specified" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:229 msgid "Value of 'countpernode' attribute must be an integer" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:287 msgid "No RSL content in job description found" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:295 msgid "'action' attribute not allowed in user-side job description" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:304 #, c-format msgid "String successfully parsed as %s." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:313 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:331 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:349 #, c-format msgid "Attribute '%s' multiply defined" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:317 #, c-format msgid "Value of attribute '%s' expected to be single value" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:322 #, c-format msgid "Value of attribute '%s' expected to be a string" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:338 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:368 #, c-format msgid "Value of attribute '%s' is not a string" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:356 #, c-format msgid "Value of attribute '%s' is not sequence" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:360 #, c-format msgid "" "Value of attribute '%s' has wrong sequence length: Expected %d, found %d" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:492 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1375 msgid "Unexpected RSL type" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:557 msgid "At least two values are needed for the 'inputfiles' attribute" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:562 msgid "First value of 'inputfiles' attribute (filename) cannot be empty" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:587 #, c-format msgid "Invalid URL '%s' for input file '%s'" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:596 #, c-format msgid "Invalid URL option syntax in option '%s' for input file '%s'" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:606 #, c-format msgid "Invalid URL: '%s' in input file '%s'" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:677 msgid "At least two values are needed for the 'outputfiles' attribute" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:682 msgid "First value of 'outputfiles' attribute (filename) cannot be empty" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:694 #, c-format msgid "Invalid URL '%s' for output file '%s'" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:704 #, c-format msgid "Invalid URL option syntax in option '%s' for output file '%s'" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:714 #, c-format msgid "Invalid URL: '%s' in output file '%s'" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:746 #, c-format msgid "" "Invalid comparison operator '%s' used at 'delegationid' attribute, only \"=" "\" is allowed." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:764 #, c-format msgid "" "Invalid comparison operator '%s' used at 'queue' attribute in 'GRIDMANAGER' " "dialect, only \"=\" is allowed" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:770 #, c-format msgid "" "Invalid comparison operator '%s' used at 'queue' attribute, only \"!=\" or " "\"=\" are allowed." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:927 #, c-format msgid "Value of attribute '%s' expected not to be empty" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1036 msgid "The value of the acl XRSL attribute isn't valid XML." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1050 msgid "The cluster XRSL attribute is currently unsupported." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1066 #, c-format msgid "" "Syntax error in 'notify' attribute value ('%s'), it must contain an email " "address" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1074 #, c-format msgid "" "Syntax error in 'notify' attribute value ('%s'), it must only contain email " "addresses after state flag(s)" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1077 #, c-format msgid "" "Syntax error in 'notify' attribute value ('%s'), it contains unknown state " "flags" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1125 msgid "priority is too large - using max value 100" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1158 #, c-format msgid "Invalid nodeaccess value: %s" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1201 msgid "Value of 'count' attribute must be an integer" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1231 msgid "Value of 'exclusiveexecution' attribute must either be 'yes' or 'no'" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1277 #, c-format msgid "Invalid action value %s" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1367 #, c-format msgid "The specified Globus attribute (%s) is not supported. %s ignored." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1371 #, c-format msgid "Unknown XRSL attribute: %s - Ignoring it." msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1385 #, c-format msgid "Wrong language requested: %s" msgstr "" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1722 msgid "" "Cannot output XRSL representation: The Resources.SlotRequirement." "NumberOfSlots attribute must be specified when the Resources.SlotRequirement." "SlotsPerHost attribute is specified." msgstr "" #: src/hed/acc/LDAP/Extractor.h:22 #, c-format msgid "Extractor[%s] (%s): %s = %s" msgstr "" #: src/hed/acc/LDAP/Extractor.h:113 src/hed/acc/LDAP/Extractor.h:130 #, c-format msgid "Extractor[%s] (%s): %s contains %s" msgstr "" #: src/hed/acc/LDAP/JobListRetrieverPluginLDAPGLUE2.cpp:54 #, c-format msgid "Adding endpoint '%s' with interface name %s" msgstr "" #: src/hed/acc/LDAP/JobListRetrieverPluginLDAPNG.cpp:72 #: src/hed/acc/LDAP/ServiceEndpointRetrieverPluginEGIIS.cpp:46 #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPGLUE2.cpp:47 #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPNG.cpp:57 msgid "Can't create information handle - is the ARC ldap DMC plugin available?" msgstr "" #: src/hed/acc/LDAP/ServiceEndpointRetrieverPluginEGIIS.cpp:79 #, c-format msgid "Unknown entry in EGIIS (%s)" msgstr "" #: src/hed/acc/LDAP/ServiceEndpointRetrieverPluginEGIIS.cpp:87 msgid "" "Entry in EGIIS is missing one or more of the attributes 'Mds-Service-type', " "'Mds-Service-hn', 'Mds-Service-port' and/or 'Mds-Service-Ldap-suffix'" msgstr "" #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPGLUE2.cpp:219 msgid "" "The \"FreeSlotsWithDuration\" attribute is wrongly formatted. Ignoring it." msgstr "" #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPGLUE2.cpp:220 #: src/hed/libs/compute/GLUE2.cpp:248 #, c-format msgid "Wrong format of the \"FreeSlotsWithDuration\" = \"%s\" (\"%s\")" msgstr "" #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPNG.cpp:389 #, c-format msgid "Unable to parse the %s.%s value from execution service (%s)." msgstr "" #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPNG.cpp:390 #, c-format msgid "Value of %s.%s is \"%s\"" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:65 #: src/services/wrappers/python/pythonwrapper.cpp:92 msgid "Failed to initialize main Python thread" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:71 #: src/services/wrappers/python/pythonwrapper.cpp:97 msgid "Main Python thread was not initialized" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:81 #, c-format msgid "Loading Python broker (%i)" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:104 #: src/services/wrappers/python/pythonwrapper.cpp:134 msgid "Main Python thread is not initialized" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:108 msgid "PythonBroker init" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:116 msgid "" "Invalid class name. The broker argument for the PythonBroker should be\n" " Filename.Class.args (args is optional), for example SampleBroker." "MyBroker" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:122 #, c-format msgid "Class name: %s" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:123 #, c-format msgid "Module name: %s" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:132 #: src/services/wrappers/python/pythonwrapper.cpp:178 msgid "Cannot convert ARC module name to Python string" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:140 #: src/services/wrappers/python/pythonwrapper.cpp:186 msgid "Cannot import ARC module" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:149 #: src/services/wrappers/python/pythonwrapper.cpp:196 #: src/services/wrappers/python/pythonwrapper.cpp:429 msgid "Cannot get dictionary of ARC module" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:158 msgid "Cannot find ARC UserConfig class" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:166 msgid "UserConfig class is not an object" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:173 msgid "Cannot find ARC JobDescription class" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:181 msgid "JobDescription class is not an object" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:188 msgid "Cannot find ARC ExecutionTarget class" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:196 msgid "ExecutionTarget class is not an object" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:207 #: src/services/wrappers/python/pythonwrapper.cpp:157 msgid "Cannot convert module name to Python string" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:215 #: src/services/wrappers/python/pythonwrapper.cpp:164 msgid "Cannot import module" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:224 msgid "Cannot get dictionary of custom broker module" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:233 msgid "Cannot find custom broker class" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:241 #, c-format msgid "%s class is not an object" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:247 msgid "Cannot create UserConfig argument" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:255 msgid "Cannot convert UserConfig to Python object" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:263 #: src/services/wrappers/python/pythonwrapper.cpp:253 msgid "Cannot create argument of the constructor" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:272 #: src/services/wrappers/python/pythonwrapper.cpp:261 msgid "Cannot create instance of Python class" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:278 #, c-format msgid "Python broker constructor called (%d)" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:302 #, c-format msgid "Python broker destructor called (%d)" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:311 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:328 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:361 msgid "Cannot create ExecutionTarget argument" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:319 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:336 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:369 #, c-format msgid "Cannot convert ExecutionTarget (%s) to python object" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:393 msgid "Cannot create JobDescription argument" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:401 msgid "Cannot convert JobDescription to python object" msgstr "" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:422 msgid "Do sorting using user created python broker" msgstr "" #: src/hed/daemon/unix/daemon.cpp:84 #, c-format msgid "Daemonization fork failed: %s" msgstr "" #: src/hed/daemon/unix/daemon.cpp:95 msgid "Watchdog (re)starting application" msgstr "" #: src/hed/daemon/unix/daemon.cpp:100 #, c-format msgid "Watchdog fork failed: %s" msgstr "" #: src/hed/daemon/unix/daemon.cpp:110 msgid "Watchdog starting monitoring" msgstr "" #: src/hed/daemon/unix/daemon.cpp:136 #, c-format msgid "Watchdog detected application exit due to signal %u" msgstr "" #: src/hed/daemon/unix/daemon.cpp:138 #, c-format msgid "Watchdog detected application exited with code %u" msgstr "" #: src/hed/daemon/unix/daemon.cpp:140 msgid "Watchdog detected application exit" msgstr "" #: src/hed/daemon/unix/daemon.cpp:149 msgid "" "Watchdog exiting because application was purposely killed or exited itself" msgstr "" #: src/hed/daemon/unix/daemon.cpp:156 msgid "Watchdog detected application timeout or error - killing process" msgstr "" #: src/hed/daemon/unix/daemon.cpp:167 msgid "Watchdog failed to wait till application exited - sending KILL" msgstr "" #: src/hed/daemon/unix/daemon.cpp:179 msgid "Watchdog failed to kill application - giving up and exiting" msgstr "" #: src/hed/daemon/unix/daemon.cpp:200 msgid "Shutdown daemon" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:43 msgid "shutdown" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:46 msgid "exit" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:84 msgid "No server config part of config file" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:159 #, c-format msgid "Unknown log level %s" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:169 #, c-format msgid "Failed to open log file: %s" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:201 msgid "Start foreground" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:250 #, c-format msgid "XML config file %s does not exist" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:254 src/hed/daemon/unix/main_unix.cpp:269 #, c-format msgid "Failed to load service configuration from file %s" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:260 #, c-format msgid "INI config file %s does not exist" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:265 src/hed/daemon/unix/main_unix.cpp:287 msgid "Error evaluating profile" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:281 msgid "Error loading generated configuration" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:292 msgid "Failed to load service configuration from any default config file" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:353 msgid "Schema validation error" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:368 msgid "Configuration root element is not " msgstr "" #: src/hed/daemon/unix/main_unix.cpp:384 #, c-format msgid "Cannot switch to group (%s)" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:394 #, c-format msgid "Cannot switch to primary group for user (%s)" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:399 #, c-format msgid "Cannot switch to user (%s)" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:417 msgid "Failed to load service side MCCs" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:419 src/tests/count/test_service.cpp:29 #: src/tests/echo/test.cpp:30 src/tests/echo/test_service.cpp:29 msgid "Service side MCCs are loaded" msgstr "" #: src/hed/daemon/unix/main_unix.cpp:426 msgid "Unexpected arguments supplied" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:93 src/hed/dmc/acix/DataPointACIX.cpp:342 #: src/hed/dmc/rucio/DataPointRucio.cpp:220 #: src/hed/dmc/rucio/DataPointRucio.cpp:462 #, c-format msgid "No locations found for %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:121 #, c-format msgid "Found none or multiple URLs (%s) in ACIX URL: %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:131 #, c-format msgid "Cannot handle URL %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:138 #, c-format msgid "Could not resolve original source of %s: out of time" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:144 #, c-format msgid "Could not resolve original source of %s: %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:160 #, c-format msgid "Querying ACIX server at %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:161 #, c-format msgid "Calling acix with query %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:167 #, c-format msgid "Failed to query ACIX: %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:171 #: src/hed/dmc/acix/DataPointACIX.cpp:308 #, c-format msgid "Failed to parse ACIX response: %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:298 #, c-format msgid "ACIX returned %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:319 #, c-format msgid "No locations for %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:325 #, c-format msgid "%s: ACIX Location: %s" msgstr "" #: src/hed/dmc/acix/DataPointACIX.cpp:327 #, c-format msgid "%s: Location %s not accessible remotely, skipping" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:86 #, c-format msgid "Unknown channel %s for stdio protocol" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:93 #, c-format msgid "Failed to open stdio channel %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:94 #, c-format msgid "Failed to open stdio channel %d" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:334 #, c-format msgid "fsync of file %s failed: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:338 #: src/hed/dmc/file/DataPointFile.cpp:345 #, c-format msgid "closing file %s failed: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:364 #, c-format msgid "File is not accessible: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:370 #: src/hed/dmc/file/DataPointFile.cpp:455 #, c-format msgid "Can't stat file: %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:416 #: src/hed/dmc/file/DataPointFile.cpp:422 #, c-format msgid "Can't stat stdio channel %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:470 #, c-format msgid "%s is not a directory" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:485 src/hed/dmc/s3/DataPointS3.cpp:440 #: src/hed/dmc/s3/DataPointS3.cpp:550 #, c-format msgid "Failed to read object %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:498 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:517 #, c-format msgid "File is not accessible %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:504 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:523 #, c-format msgid "Can't delete directory %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:511 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:530 #, c-format msgid "Can't delete file %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:521 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:335 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:313 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1466 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:545 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:565 #, c-format msgid "Creating directory %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:529 src/hed/dmc/srm/DataPointSRM.cpp:171 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:579 #, c-format msgid "Renaming %s to %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:531 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:588 #, c-format msgid "Can't rename file %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:559 #, c-format msgid "Failed to open %s for reading: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:574 #: src/hed/dmc/file/DataPointFile.cpp:709 #, c-format msgid "Failed to switch user id to %d/%d" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:580 #, c-format msgid "Failed to create/open file %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:596 msgid "Failed to create thread" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:676 #, c-format msgid "Invalid url: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:685 src/hed/libs/data/FileCache.cpp:482 #, c-format msgid "Failed to create directory %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:698 #: src/hed/dmc/file/DataPointFile.cpp:717 #, c-format msgid "Failed to create file %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:729 #, c-format msgid "setting file %s to size %llu" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:749 #, c-format msgid "Failed to preallocate space for %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:790 src/hed/libs/data/FileCache.cpp:856 #, c-format msgid "Failed to clean up file %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:799 #, c-format msgid "Error during file validation. Can't stat file %s: %s" msgstr "" #: src/hed/dmc/file/DataPointFile.cpp:803 #, c-format msgid "" "Error during file validation: Local file size %llu does not match source " "file size %llu for file %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:53 #, c-format msgid "Using proxy %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:54 #, c-format msgid "Using key %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:55 #, c-format msgid "Using cert %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:113 msgid "Locations are missing in destination LFC URL" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:119 #, c-format msgid "Duplicate replica found in LFC: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:121 #, c-format msgid "Adding location: %s - %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:129 #: src/hed/libs/data/DataPointIndex.cpp:161 #, c-format msgid "Add location: url: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:130 #: src/hed/libs/data/DataPointIndex.cpp:162 #, c-format msgid "Add location: metadata: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:150 #: src/hed/dmc/gfal/DataPointGFAL.cpp:310 #, c-format msgid "gfal_open failed: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:163 #: src/hed/dmc/gfal/DataPointGFAL.cpp:223 #: src/hed/dmc/gfal/DataPointGFAL.cpp:249 #: src/hed/dmc/gfal/DataPointGFAL.cpp:324 #: src/hed/dmc/gfal/DataPointGFAL.cpp:403 #: src/hed/dmc/gfal/DataPointGFAL.cpp:430 #, c-format msgid "gfal_close failed: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:195 #, c-format msgid "gfal_read failed: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:237 msgid "StopReading starts waiting for transfer_condition." msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:239 msgid "StopReading finished waiting for transfer_condition." msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:271 #: src/libs/data-staging/DataDeliveryLocalComm.cpp:68 #: src/libs/data-staging/DataDeliveryLocalComm.cpp:73 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:42 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:47 #, c-format msgid "No locations defined for %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:278 #, c-format msgid "Failed to set LFC replicas: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:304 #, c-format msgid "gfal_mkdir failed (%s), trying to write anyway" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:359 #, c-format msgid "DataPointGFAL::write_file got position %d and offset %d, has to seek" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:388 #, c-format msgid "gfal_write failed: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:418 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:388 msgid "StopWriting starts waiting for transfer_condition." msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:420 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:390 msgid "StopWriting finished waiting for transfer_condition." msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:451 #, c-format msgid "gfal_stat failed: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:496 #, c-format msgid "gfal_listxattr failed, no replica information can be obtained: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:537 #, c-format msgid "gfal_opendir failed: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:549 #, c-format msgid "List will stat the URL %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:556 #, c-format msgid "gfal_closedir failed: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:584 #, c-format msgid "gfal_rmdir failed: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:587 #, c-format msgid "gfal_unlink failed: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:604 #, c-format msgid "gfal_mkdir failed: %s" msgstr "" #: src/hed/dmc/gfal/DataPointGFAL.cpp:619 #, c-format msgid "gfal_rename failed: %s" msgstr "" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:19 #, c-format msgid "Failed to obtain bytes transferred: %s" msgstr "" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:42 #, c-format msgid "Failed to get initiate GFAL2 parameter handle: %s" msgstr "" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:49 #, c-format msgid "Failed to get initiate new GFAL2 context: %s" msgstr "" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:56 #, c-format msgid "Failed to set GFAL2 monitor callback: %s" msgstr "" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:64 #, c-format msgid "Failed to set overwrite option in GFAL2: %s" msgstr "" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:72 #, c-format msgid "Failed to set GFAL2 transfer timeout, will use default: %s" msgstr "" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:84 msgid "Transfer failed" msgstr "" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:92 msgid "Transfer succeeded" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:38 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:54 msgid "ftp_complete_callback: success" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:44 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:60 #, c-format msgid "ftp_complete_callback: error: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:60 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:76 msgid "ftp_check_callback" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:62 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:90 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:116 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:135 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:305 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:340 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:678 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:847 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:879 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:913 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1052 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1104 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1114 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1122 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1130 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1138 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1144 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:78 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:106 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:283 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:319 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:729 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:762 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:799 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:930 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:994 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1004 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1012 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1020 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1028 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1034 #: src/services/gridftpd/commands.cpp:1226 #: src/services/gridftpd/dataread.cpp:76 src/services/gridftpd/dataread.cpp:173 #: src/services/gridftpd/datawrite.cpp:59 #: src/services/gridftpd/datawrite.cpp:146 #, c-format msgid "Globus error: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:73 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:89 msgid "Excessive data received while checking file access" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:89 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:105 msgid "Registration of Globus FTP buffer failed - cancel check" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:115 msgid "check_ftp: globus_ftp_client_size failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:119 msgid "check_ftp: timeout waiting for size" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:124 msgid "check_ftp: failed to get file's size" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:127 #, c-format msgid "check_ftp: obtained size: %lli" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:134 msgid "check_ftp: globus_ftp_client_modification_time failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:138 msgid "check_ftp: timeout waiting for modification_time" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:143 msgid "check_ftp: failed to get file's modification time" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:148 #, c-format msgid "check_ftp: obtained modification date: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:167 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:145 msgid "check_ftp: globus_ftp_client_get failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:174 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:152 msgid "check_ftp: globus_ftp_client_register_read" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:185 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:164 msgid "check_ftp: timeout waiting for partial get" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:215 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:191 #, c-format msgid "File delete failed, attempting directory delete for %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:225 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:202 msgid "delete_ftp: globus_ftp_client_delete failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:231 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:252 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:208 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:230 msgid "delete_ftp: timeout waiting for delete" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:246 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:224 msgid "delete_ftp: globus_ftp_client_rmdir failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:301 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:278 #, c-format msgid "mkdir_ftp: making %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:309 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:287 msgid "mkdir_ftp: timeout waiting for mkdir" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:344 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:323 msgid "Timeout waiting for mkdir" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:370 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:346 msgid "start_reading_ftp" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:374 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:350 msgid "start_reading_ftp: globus_ftp_client_get" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:388 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:364 msgid "start_reading_ftp: globus_ftp_client_get failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:399 msgid "start_reading_ftp: globus_thread_create failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:418 msgid "stop_reading_ftp: aborting connection" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:425 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:647 #, c-format msgid "Failed to abort transfer of ftp file: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:426 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:648 msgid "Assuming transfer is already aborted or failed." msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:433 msgid "stop_reading_ftp: waiting for transfer to finish" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:435 #, c-format msgid "stop_reading_ftp: exiting: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:449 msgid "ftp_read_thread: get and register buffers" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:455 #, c-format msgid "ftp_read_thread: for_read failed - aborting: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:465 #, c-format msgid "ftp_read_thread: data callback failed - aborting: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:477 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:380 #, c-format msgid "ftp_read_thread: Globus error: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:490 #, c-format msgid "ftp_read_thread: too many registration failures - abort: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:495 #, c-format msgid "ftp_read_thread: failed to register Globus buffer - will try later: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:508 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:396 msgid "ftp_read_thread: waiting for eof" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:512 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:400 msgid "ftp_read_thread: waiting for buffers released" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:516 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:408 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:662 msgid "ftp_read_thread: failed to release buffers - leaking" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:521 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:415 msgid "ftp_read_thread: exiting" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:539 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:436 #, c-format msgid "ftp_read_callback: failure: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:542 msgid "ftp_read_callback: success" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:558 msgid "Failed to get ftp file" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:594 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:543 msgid "start_writing_ftp: mkdir" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:597 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:545 msgid "start_writing_ftp: mkdir failed - still trying to write" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:599 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:547 msgid "start_writing_ftp: put" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:613 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:561 msgid "start_writing_ftp: put failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:623 msgid "start_writing_ftp: globus_thread_create failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:640 #: src/hed/libs/data/DataPointDelegate.cpp:307 msgid "StopWriting: aborting connection" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:664 #: src/hed/libs/data/DataPointDelegate.cpp:321 #, c-format msgid "StopWriting: Calculated checksum %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:668 #: src/hed/libs/data/DataPointDelegate.cpp:325 #, c-format msgid "StopWriting: looking for checksum of %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:677 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:912 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:798 msgid "list_files_ftp: globus_ftp_client_cksm failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:681 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:916 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:802 msgid "list_files_ftp: timeout waiting for cksum" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:688 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:923 msgid "list_files_ftp: no checksum information possible" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:691 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:926 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:815 #, c-format msgid "list_files_ftp: checksum %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:694 #: src/hed/libs/data/DataPointDelegate.cpp:332 msgid "" "Checksum type returned by server is different to requested type, cannot " "compare" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:696 #: src/hed/libs/data/DataPointDelegate.cpp:334 #, c-format msgid "Calculated checksum %s matches checksum reported by server" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:699 #: src/hed/libs/data/DataPointDelegate.cpp:337 #, c-format msgid "" "Checksum mismatch between calculated checksum %s and checksum reported by " "server %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:721 msgid "ftp_write_thread: get and register buffers" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:725 msgid "ftp_write_thread: for_write failed - aborting" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:743 msgid "ftp_write_thread: data callback failed - aborting" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:759 msgid "ftp_write_thread: waiting for eof" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:763 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:660 msgid "ftp_write_thread: waiting for buffers released" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:771 msgid "ftp_write_thread: failed to release buffers - leaking" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:776 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:667 msgid "ftp_write_thread: exiting" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:799 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:686 #, c-format msgid "ftp_write_callback: failure: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:802 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:688 #, c-format msgid "ftp_write_callback: success %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:817 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:704 msgid "Failed to store ftp file" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:825 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:709 msgid "ftp_put_complete_callback: success" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:841 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:723 #, c-format msgid "list_files_ftp: looking for size of %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:845 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:727 msgid "list_files_ftp: globus_ftp_client_size failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:851 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:852 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:733 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:734 msgid "list_files_ftp: timeout waiting for size" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:858 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:740 msgid "list_files_ftp: failed to get file's size" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:870 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:753 #, c-format msgid "list_files_ftp: looking for modification time of %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:876 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:759 msgid "list_files_ftp: globus_ftp_client_modification_time failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:883 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:766 msgid "list_files_ftp: timeout waiting for modification_time" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:891 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:774 msgid "list_files_ftp: failed to get file's modification time" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:903 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:788 #, c-format msgid "list_files_ftp: looking for checksum of %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:942 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:828 #, c-format msgid "Failed to obtain stat from FTP: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:948 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:833 msgid "No results returned from stat" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:954 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:839 #, c-format msgid "Wrong number of objects (%i) for stat from ftp: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:968 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:852 #, c-format msgid "Unexpected path %s returned from server" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1007 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:885 #, c-format msgid "Failed to obtain listing from FTP: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1050 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:928 msgid "Rename: globus_ftp_client_move failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1056 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:934 msgid "Rename: timeout waiting for operation to complete" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1103 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:993 msgid "init_handle: globus_ftp_client_handleattr_init failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1112 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1002 msgid "init_handle: globus_ftp_client_handleattr_set_gridftp2 failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1121 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1011 msgid "init_handle: globus_ftp_client_handle_init failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1128 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1018 msgid "init_handle: globus_ftp_client_operationattr_init failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1136 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1026 msgid "init_handle: globus_ftp_client_operationattr_set_allow_ipv6 failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1142 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1032 msgid "init_handle: globus_ftp_client_operationattr_set_delayed_pasv failed" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1190 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1218 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1084 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1113 #, c-format msgid "globus_ftp_client_operationattr_set_authorization: error: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1217 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1112 msgid "Failed to set credentials for GridFTP transfer" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1223 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1118 msgid "Using secure data transfer" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1228 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1123 msgid "Using insecure data transfer" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1255 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1150 msgid "~DataPoint: destroy ftp_handle" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1258 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1153 msgid "~DataPoint: destroy ftp_handle failed - retrying" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1276 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1171 msgid "~DataPoint: failed to destroy ftp_handle - leaking" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1296 msgid "" "Missing reference to factory and/or module. It is unsafe to use Globus in " "non-persistent mode - (Grid)FTP code is disabled. Report to developers." msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:388 msgid "ftp_read_thread: failed to register buffers" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:403 msgid "ftp_read_thread: failed to release buffers" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:441 #, c-format msgid "ftp_read_callback: success - offset=%u, length=%u, eof=%u, allow oof=%u" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:455 #, c-format msgid "ftp_read_callback: delayed data chunk: %llu %llu" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:462 #, c-format msgid "ftp_read_callback: unexpected data out of order: %llu != %llu" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:469 msgid "ftp_read_callback: too many unexpected out of order chunks" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:490 #, c-format msgid "ftp_read_callback: Globus error: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:515 msgid "ftp_get_complete_callback: Failed to get ftp file" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:520 msgid "ftp_get_complete_callback: success" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:575 msgid "start_writing_ftp: waiting for data tag" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:578 msgid "start_writing_ftp: failed to read data tag" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:583 msgid "start_writing_ftp: waiting for data chunk" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:585 msgid "start_writing_ftp: failed to read data chunk" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:596 #, c-format msgid "ftp_write_thread: data out of order in stream mode: %llu != %llu" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:603 msgid "ftp_write_thread: too many out of order chunks in stream mode" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:608 #, c-format msgid "start_writing_ftp: data chunk: %llu %llu" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:614 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:640 #, c-format msgid "ftp_write_thread: Globus error: %s" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:633 #, c-format msgid "start_writing_ftp: delayed data chunk: %llu %llu" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:652 msgid "start_writing_ftp: waiting for some buffers sent" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:658 msgid "ftp_write_thread: waiting for transfer complete" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:809 msgid "list_files_ftp: no checksum information supported" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:811 msgid "list_files_ftp: no checksum information returned" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:906 msgid "Too many failures to obtain checksum - giving up" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1266 msgid "Expecting Command and URL provided" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1273 #: src/hed/libs/data/DataExternalHelper.cpp:376 msgid "Expecting Command among arguments" msgstr "" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1277 #: src/hed/libs/data/DataExternalHelper.cpp:380 msgid "Expecting URL among arguments" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:221 src/hed/dmc/gridftp/Lister.cpp:289 #: src/hed/dmc/gridftp/Lister.cpp:384 src/hed/dmc/gridftp/Lister.cpp:767 #: src/hed/dmc/gridftp/Lister.cpp:812 #, c-format msgid "Failure: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:288 msgid "Error getting list of files (in list)" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:290 msgid "Assuming - file not found" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:307 #, c-format msgid "list record: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:362 msgid "Failed reading list of files" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:398 msgid "Failed reading data" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:426 #, c-format msgid "Command: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:430 src/hed/dmc/gridftp/Lister.cpp:471 #: src/hed/mcc/http/PayloadHTTP.cpp:990 msgid "Memory allocation error" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:438 #, c-format msgid "%s failed" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:442 msgid "Command is being sent" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:447 msgid "Waiting for response" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:452 msgid "Callback got failure" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:538 msgid "Failed in globus_cond_init" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:542 msgid "Failed in globus_mutex_init" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:549 msgid "Failed allocating memory for handle" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:554 msgid "Failed in globus_ftp_control_handle_init" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:562 msgid "Failed to enable IPv6" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:573 src/services/gridftpd/commands.cpp:984 msgid "Closing connection" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:580 src/hed/dmc/gridftp/Lister.cpp:595 msgid "Timeout waiting for Globus callback - leaking connection" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:605 msgid "Closed successfully" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:607 msgid "Closing may have failed" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:634 msgid "Waiting for globus handle to settle" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:639 #, c-format msgid "Handle is not in proper state %u/%u" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:645 msgid "Globus handle is stuck" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:661 #, c-format msgid "Failed destroying handle: %s. Can't handle such situation." msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:684 #, c-format msgid "EPSV failed: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:688 msgid "EPSV failed" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:695 #, c-format msgid "PASV failed: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:699 msgid "PASV failed" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:765 msgid "Failed to apply local address to data connection" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:783 msgid "Can't parse host and/or port in response to EPSV/PASV" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:788 #, c-format msgid "Data channel: %d.%d.%d.%d:%d" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:806 #, c-format msgid "Data channel: [%s]:%d" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:810 msgid "Obtained host and address are not acceptable" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:820 msgid "Failed to open data channel" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:838 #, c-format msgid "Unsupported protocol in url %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:850 msgid "Reusing connection" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:874 #, c-format msgid "Failed connecting to server %s:%d" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:880 #, c-format msgid "Failed to connect to server %s:%d" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:896 msgid "Missing authentication information" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:905 src/hed/dmc/gridftp/Lister.cpp:919 #, c-format msgid "Bad authentication information: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:928 src/hed/dmc/gridftp/Lister.cpp:943 #, c-format msgid "Failed authenticating: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:935 msgid "Failed authenticating" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:970 src/hed/dmc/gridftp/Lister.cpp:1126 #, c-format msgid "DCAU failed: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:974 src/hed/dmc/gridftp/Lister.cpp:1131 msgid "DCAU failed" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:994 msgid "MLST is not supported - trying LIST" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1010 #, c-format msgid "Immediate completion expected: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1014 msgid "Immediate completion expected" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1027 #, c-format msgid "Missing information in reply: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1061 #, c-format msgid "Missing final reply: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1085 #, c-format msgid "Unexpected immediate completion: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1097 #, c-format msgid "LIST/MLST failed: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1102 msgid "LIST/MLST failed" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1152 msgid "MLSD is not supported - trying NLST" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1166 #, c-format msgid "Immediate completion: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1174 #, c-format msgid "NLST/MLSD failed: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1180 msgid "NLST/MLSD failed" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1201 #, c-format msgid "Data transfer aborted: %s" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1206 msgid "Data transfer aborted" msgstr "" #: src/hed/dmc/gridftp/Lister.cpp:1218 msgid "Failed to transfer data" msgstr "" #: src/hed/dmc/http/DataPointHTTP.cpp:391 #: src/hed/dmc/http/DataPointHTTP.cpp:520 #: src/hed/dmc/http/DataPointHTTP.cpp:601 #: src/hed/dmc/http/DataPointHTTP.cpp:1003 #: src/hed/dmc/http/DataPointHTTP.cpp:1147 #: src/hed/dmc/http/DataPointHTTP.cpp:1292 #, c-format msgid "Redirecting to %s" msgstr "" #: src/hed/dmc/http/DataPointHTTP.cpp:673 #, c-format msgid "Stat: obtained size %llu" msgstr "" #: src/hed/dmc/http/DataPointHTTP.cpp:677 #, c-format msgid "Stat: obtained modification time %s" msgstr "" #: src/hed/dmc/http/DataPointHTTP.cpp:906 #, c-format msgid "Check: obtained size %llu" msgstr "" #: src/hed/dmc/http/DataPointHTTP.cpp:908 #, c-format msgid "Check: obtained modification time %s" msgstr "" #: src/hed/dmc/http/DataPointHTTP.cpp:1020 #: src/hed/dmc/http/DataPointHTTP.cpp:1167 #, c-format msgid "HTTP failure %u - %s" msgstr "" #: src/hed/dmc/ldap/DataPointLDAP.cpp:36 msgid "" "Missing reference to factory and/or module. Currently safe unloading of LDAP " "DMC is not supported. Report to developers." msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:151 msgid "SASL Interaction" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:199 #, c-format msgid "Challenge: %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:203 #, c-format msgid "Default: %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:279 #, c-format msgid "LDAPQuery: Initializing connection to %s:%d" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:283 #, c-format msgid "LDAP connection already open to %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:297 #, c-format msgid "Could not open LDAP connection to %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:318 #, c-format msgid "Failed to create ldap bind thread (%s)" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:325 #, c-format msgid "Ldap bind timeout (%s)" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:332 #, c-format msgid "Failed to bind to ldap server (%s)" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:353 #, c-format msgid "Could not set LDAP network timeout (%s)" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:361 #, c-format msgid "Could not set LDAP timelimit (%s)" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:368 #, c-format msgid "Could not set LDAP protocol version (%s)" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:436 #, c-format msgid "LDAPQuery: Querying %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:438 #, c-format msgid " base dn: %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:440 #, c-format msgid " filter: %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:442 msgid " attributes:" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:445 #: src/services/gridftpd/misc/ldapquery.cpp:399 #, c-format msgid " %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:482 src/hed/dmc/ldap/LDAPQuery.cpp:548 #, c-format msgid "%s (%s)" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:506 #, c-format msgid "LDAPQuery: Getting results from %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:509 #, c-format msgid "Error: no LDAP query started to %s" msgstr "" #: src/hed/dmc/ldap/LDAPQuery.cpp:543 #, c-format msgid "LDAP query timed out: %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:23 #, c-format msgid "Replacing existing token for %s in Rucio token cache" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:36 #, c-format msgid "Found existing token for %s in Rucio token cache with expiry time %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:39 #, c-format msgid "Rucio token for %s has expired or is about to expire" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:101 #, c-format msgid "Extracted nickname %s from credentials to use for RUCIO_ACCOUNT" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:104 msgid "Failed to extract VOMS nickname from proxy" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:106 #, c-format msgid "Using Rucio account %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:146 #, c-format msgid "" "Bad path for %s: Rucio supports read/write at /objectstores and read-only " "at /replicas" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:162 #: src/services/candypond/CandyPond.cpp:140 #: src/services/candypond/CandyPond.cpp:347 #, c-format msgid "Can't handle URL %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:310 #, c-format msgid "Acquired auth token for %s: %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:367 #, c-format msgid "Rucio returned %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:393 #, c-format msgid "Failed to parse Rucio response: %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:399 #, c-format msgid "Filename not returned in Rucio response: %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:405 #, c-format msgid "Unexpected name returned in Rucio response: %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:411 #, c-format msgid "No pfns returned in Rucio response: %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:422 #, c-format msgid "Cannot determine replica type for %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:424 #, c-format msgid "%s: replica type %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:427 #, c-format msgid "Skipping %s replica %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:447 #, c-format msgid "No filesize information returned in Rucio response for %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:450 #, c-format msgid "%s: size %llu" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:454 #, c-format msgid "No checksum information returned in Rucio response for %s" msgstr "" #: src/hed/dmc/rucio/DataPointRucio.cpp:457 #, c-format msgid "%s: checksum %s" msgstr "" #: src/hed/dmc/s3/DataPointS3.cpp:621 #, c-format msgid "Failed to write object %s: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:59 #, c-format msgid "TURL %s cannot be handled" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:86 #, c-format msgid "Check: looking for metadata: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:97 #, c-format msgid "Check: obtained size: %lli" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:103 #, c-format msgid "Check: obtained checksum: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:107 #, c-format msgid "Check: obtained modification date: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:111 msgid "Check: obtained access latency: low (ONLINE)" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:115 msgid "Check: obtained access latency: high (NEARLINE)" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:134 #, c-format msgid "Remove: deleting: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:152 #, c-format msgid "Creating directory: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:200 src/hed/dmc/srm/DataPointSRM.cpp:249 msgid "Calling PrepareReading when request was already prepared!" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:220 #, c-format msgid "File %s is NEARLINE, will make request to bring online" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:229 #, c-format msgid "Bring online request %s is still in queue, should wait" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:234 #, c-format msgid "Bring online request %s finished successfully, file is now ONLINE" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:240 #, c-format msgid "" "Bad logic for %s - bringOnline returned ok but SRM request is not finished " "successfully or on going" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:268 src/hed/dmc/srm/DataPointSRM.cpp:411 msgid "None of the requested transfer protocols are supported" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:281 #, c-format msgid "Get request %s is still in queue, should wait %i seconds" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:289 src/hed/dmc/srm/DataPointSRM.cpp:468 #, c-format msgid "Checking URL returned by SRM: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:304 src/hed/dmc/srm/DataPointSRM.cpp:483 #, c-format msgid "SRM returned no useful Transfer URLs: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:311 #, c-format msgid "" "Bad logic for %s - getTURLs returned ok but SRM request is not finished " "successfully or on going" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:319 msgid "StartReading" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:321 msgid "StartReading: File was not prepared properly" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:331 src/hed/dmc/srm/DataPointSRM.cpp:510 #, c-format msgid "Redirecting to new URL: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:392 msgid "Calling PrepareWriting when request was already prepared!" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:421 msgid "No space token specified" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:427 msgid "Warning: Using SRM protocol v1 which does not support space tokens" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:430 #, c-format msgid "Using space token description %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:436 #, c-format msgid "Error looking up space tokens matching description %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:440 #, c-format msgid "No space tokens found matching description %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:445 #, c-format msgid "Using space token %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:460 #, c-format msgid "Put request %s is still in queue, should wait %i seconds" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:490 #, c-format msgid "" "Bad logic for %s - putTURLs returned ok but SRM request is not finished " "successfully or on going" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:498 msgid "StartWriting" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:500 msgid "StartWriting: File was not prepared properly" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:559 #, c-format msgid "FinishWriting: looking for metadata: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:574 #, c-format msgid "FinishWriting: obtained checksum: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:577 #, c-format msgid "" "Calculated/supplied transfer checksum %s matches checksum reported by SRM " "destination %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:580 #, c-format msgid "" "Checksum mismatch between calculated/supplied checksum (%s) and checksum " "reported by SRM destination (%s)" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:583 #, c-format msgid "" "Checksum type of SRM (%s) and calculated/supplied checksum (%s) differ, " "cannot compare" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:584 src/hed/dmc/srm/DataPointSRM.cpp:585 msgid "No checksum information from server" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:586 src/hed/dmc/srm/DataPointSRM.cpp:587 msgid "No checksum verification possible" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:593 msgid "Failed to release completed request" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:636 src/hed/dmc/srm/DataPointSRM.cpp:703 #, c-format msgid "ListFiles: looking for metadata: %s" msgstr "" #: src/hed/dmc/srm/DataPointSRM.cpp:821 #, c-format msgid "plugin for transport protocol %s is not installed" msgstr "" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:51 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:90 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:142 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:181 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:221 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:259 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:303 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:365 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:438 msgid "SRM did not return any information" msgstr "" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:316 #, c-format msgid "File could not be moved to Running state: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:372 msgid "SRM did not return any useful information" msgstr "" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:450 msgid "File could not be moved to Done state" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:88 msgid "Could not determine version of server" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:94 #, c-format msgid "Server SRM version: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:99 #, c-format msgid "Server implementation: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:136 #, c-format msgid "Adding space token %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:163 msgid "No request tokens found" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:176 #, c-format msgid "Adding request token %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:237 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:642 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:828 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1385 #, c-format msgid "%s: File request %s in SRM queue. Sleeping for %i seconds" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:275 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:327 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:698 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:764 #, c-format msgid "File is ready! TURL is %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:359 #, c-format msgid "Setting userRequestDescription to %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:414 #, c-format msgid "%s: Bring online request %s in SRM queue. Sleeping for %i seconds" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:457 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1160 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1194 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1228 msgid "No request token specified!" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:524 msgid "Request is reported as ABORTED, but all files are done" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:530 msgid "Request is reported as ABORTED, since it was cancelled" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:536 #, c-format msgid "Request is reported as ABORTED. Reason: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:673 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:745 #, c-format msgid "Path %s is invalid, creating required directories" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:678 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:750 #, c-format msgid "Error creating required directories for %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:851 msgid "Too many files in one request - please try again with fewer files" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:899 msgid "" "Directory size is too large to list in one call, will have to call multiple " "times" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:936 msgid "" "Failure in parsing response from server - some information may be inaccurate" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:942 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:566 #: src/services/gridftpd/misc/ldapquery.cpp:183 #: src/services/gridftpd/misc/ldapquery.cpp:186 #: src/services/gridftpd/misc/ldapquery.cpp:392 #: src/services/gridftpd/misc/ldapquery.cpp:622 #: src/services/gridftpd/misc/ldapquery.cpp:631 #, c-format msgid "%s: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:975 #, c-format msgid "" "Directory size is larger than %i files, will have to call multiple times" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1185 #, c-format msgid "Files associated with request token %s released successfully" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1219 #, c-format msgid "Files associated with request token %s put done successfully" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1254 #, c-format msgid "Files associated with request token %s aborted successfully" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1271 #, c-format msgid "" "Failed to find metadata info on %s for determining file or directory delete" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1277 msgid "Type is file, calling srmRm" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1281 msgid "Type is dir, calling srmRmDir" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1285 msgid "File type is not available, attempting file delete" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1288 msgid "File delete failed, attempting directory delete" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1313 #, c-format msgid "File %s removed successfully" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1340 #, c-format msgid "Directory %s removed successfully" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1455 #, c-format msgid "Checking for existence of %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1458 #, c-format msgid "File already exists: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1495 #, c-format msgid "Error creating directory %s: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:82 #, c-format msgid "Attempting to contact %s on port %i" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:88 #, c-format msgid "Storing port %i for %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:102 #, c-format msgid "No port succeeded for %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:112 #, c-format msgid "URL %s disagrees with stored SRM info, testing new info" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:118 #, c-format msgid "Replacing old SRM info with new for URL %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:140 #, c-format msgid "SOAP request: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:147 #: src/hed/dmc/srm/srmclient/SRMClient.cpp:176 #, c-format msgid "SOAP fault: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:148 msgid "Reconnecting" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:158 #, c-format msgid "SRM Client status: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:164 msgid "No SOAP response" msgstr "" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:171 #: src/hed/identitymap/ArgusPDPClient.cpp:250 #, c-format msgid "SOAP response: %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:76 #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:162 #, c-format msgid "Failed to acquire lock on file %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:81 #, c-format msgid "Error reading info from file %s:%s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:95 #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:187 #, c-format msgid "Bad or old format detected in file %s, in line %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:100 #, c-format msgid "Cannot convert string %s to int in line %s" msgstr "" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:203 #, c-format msgid "Error writing srm info file %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:64 msgid "" "Missing reference to factory and/or module. It is unsafe to use Xrootd in " "non-persistent mode - Xrootd code is disabled. Report to developers." msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:103 #, c-format msgid "Could not handle checksum %s: skip checksum check" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:109 #, c-format msgid "Failed to create xrootd copy job: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:126 #, c-format msgid "Failed to copy %s: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:177 #, c-format msgid "Reading %u bytes from byte %llu" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:179 #, c-format msgid "Read %i bytes" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:210 #, c-format msgid "Could not open file %s for reading: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:225 #, c-format msgid "Unable to find file size of %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:289 #, c-format msgid "DataPointXrootd::write_file got position %d and offset %d, has to seek" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:312 #, c-format msgid "xrootd write failed: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:321 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:395 #, c-format msgid "xrootd close failed: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:344 #, c-format msgid "Failed to open %s, trying to create parent directories" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:357 #, c-format msgid "xrootd open failed: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:371 #, c-format msgid "close failed: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:413 #, c-format msgid "Read access not allowed for %s: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:432 #, c-format msgid "Could not stat file %s: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:437 msgid "Not getting checksum of zip constituent" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:441 #, c-format msgid "Could not get checksum of %s: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:445 #, c-format msgid "Checksum %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:483 #, c-format msgid "Failed to open directory %s: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:501 #, c-format msgid "Error while reading dir %s: %s" msgstr "" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:551 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:569 #, c-format msgid "Error creating required dirs: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:158 msgid "PDPD location is missing" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:161 #, c-format msgid "PDPD location: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:166 #: src/hed/identitymap/ArgusPEPClient.cpp:129 msgid "Conversion mode is set to SUBJECT" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:169 #: src/hed/identitymap/ArgusPEPClient.cpp:132 msgid "Conversion mode is set to CREAM" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:172 #: src/hed/identitymap/ArgusPEPClient.cpp:135 msgid "Conversion mode is set to EMI" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:175 #: src/hed/identitymap/ArgusPEPClient.cpp:138 #, c-format msgid "Unknown conversion mode %s, using default" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:242 #, c-format msgid "Failed to contact PDP server: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:245 #, c-format msgid "There was no SOAP response return from PDP server: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:360 #: src/hed/identitymap/ArgusPEPClient.cpp:286 #, c-format msgid "Have %i requests to process" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:362 msgid "Creating a client to Argus PDP service" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:375 #, c-format msgid "XACML authorisation request: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:386 #, c-format msgid "XACML authorisation response: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:426 #, c-format msgid "%s is not authorized to do action %s in resource %s " msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:429 #: src/hed/identitymap/ArgusPDPClient.cpp:434 #: src/hed/identitymap/ArgusPEPClient.cpp:336 msgid "Not authorized" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:439 #: src/hed/identitymap/ArgusPEPClient.cpp:341 #: src/hed/identitymap/IdentityMap.cpp:219 src/hed/shc/legacy/LegacyMap.cpp:216 #, c-format msgid "Grid identity is mapped to local identity '%s'" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:566 #: src/hed/identitymap/ArgusPEPClient.cpp:655 msgid "Doing CREAM request" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:580 #: src/hed/identitymap/ArgusPDPClient.cpp:748 #: src/hed/identitymap/ArgusPEPClient.cpp:683 #, c-format msgid "Adding profile-id value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:592 #: src/hed/identitymap/ArgusPDPClient.cpp:759 #: src/hed/identitymap/ArgusPEPClient.cpp:694 #, c-format msgid "Adding subject-id value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:600 #: src/hed/identitymap/ArgusPDPClient.cpp:767 #: src/hed/identitymap/ArgusPEPClient.cpp:704 #, c-format msgid "Adding subject-issuer value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:607 #: src/hed/identitymap/ArgusPEPClient.cpp:713 #, c-format msgid "Adding virtual-organization value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:620 #: src/hed/identitymap/ArgusPEPClient.cpp:730 #, c-format msgid "Adding FQAN value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:629 #: src/hed/identitymap/ArgusPEPClient.cpp:739 #, c-format msgid "Adding FQAN/primary value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:639 #: src/hed/identitymap/ArgusPEPClient.cpp:750 #, c-format msgid "Adding cert chain value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:648 #: src/hed/identitymap/ArgusPDPClient.cpp:840 #: src/hed/identitymap/ArgusPEPClient.cpp:760 #, c-format msgid "Adding resource-id value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:662 #: src/hed/identitymap/ArgusPDPClient.cpp:863 #: src/hed/identitymap/ArgusPEPClient.cpp:775 #, c-format msgid "Adding action-id value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:666 #: src/hed/identitymap/ArgusPEPClient.cpp:786 #, c-format msgid "CREAM request generation failed: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:732 msgid "Doing EMI request" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:774 #, c-format msgid "Adding Virtual Organization value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:797 #, c-format msgid "Adding VOMS group value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:803 #, c-format msgid "Adding VOMS primary group value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:822 #, c-format msgid "Adding VOMS role value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:829 #, c-format msgid "Adding VOMS primary role value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:846 #, c-format msgid "Adding resource-owner value: %s" msgstr "" #: src/hed/identitymap/ArgusPDPClient.cpp:867 #, c-format msgid "EMI request generation failed: %s" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:119 msgid "PEPD location is missing" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:122 #, c-format msgid "PEPD location: %s" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:126 msgid "Conversion mode is set to DIRECT" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:331 #, c-format msgid "" "Not authorized according to request:\n" "%s" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:333 #, c-format msgid "%s is not authorized to do action %s in resource %s" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:361 msgid "Subject of request is null" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:366 #, c-format msgid "Can not create XACML SubjectAttribute: %s" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:375 msgid "Can not create XACML Resource" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:381 #, c-format msgid "Can not create XACML ResourceAttribute: %s" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:390 msgid "Can not create XACML Action" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:397 #, c-format msgid "Can not create XACML ActionAttribute: %s" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:407 msgid "Can not create XACML request" msgstr "" #: src/hed/identitymap/ArgusPEPClient.cpp:539 #, c-format msgid "Converting to CREAM action - namespace: %s, operation: %s" msgstr "" #: src/hed/identitymap/IdentityMap.cpp:196 #, c-format msgid "PDP: %s can not be loaded" msgstr "" #: src/hed/libs/common/ArcLocation.cpp:128 #, c-format msgid "" "Can not determine the install location. Using %s. Please set ARC_LOCATION if " "this is not correct." msgstr "" #: src/hed/libs/common/DateTime.cpp:86 src/hed/libs/common/DateTime.cpp:631 #: src/hed/libs/common/StringConv.h:25 msgid "Empty string" msgstr "" #: src/hed/libs/common/DateTime.cpp:107 #, c-format msgid "Can not parse date: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:130 #, c-format msgid "Can not parse time: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:160 #, c-format msgid "Can not parse time zone offset: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:180 src/hed/libs/common/DateTime.cpp:199 #: src/hed/libs/common/DateTime.cpp:252 src/hed/libs/common/DateTime.cpp:291 #, c-format msgid "Illegal time format: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:230 src/hed/libs/common/DateTime.cpp:283 #, c-format msgid "Can not parse month: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:647 src/hed/libs/common/DateTime.cpp:688 #, c-format msgid "Invalid ISO duration format: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:752 #, c-format msgid "Invalid period string: %s" msgstr "" #: src/hed/libs/common/DateTime.cpp:874 msgid "hour" msgid_plural "hours" msgstr[0] "" msgstr[1] "" #: src/hed/libs/common/DateTime.cpp:880 msgid "minute" msgid_plural "minutes" msgstr[0] "" msgstr[1] "" #: src/hed/libs/common/DateTime.cpp:886 msgid "second" msgid_plural "seconds" msgstr[0] "" msgstr[1] "" #: src/hed/libs/common/FileLock.cpp:43 msgid "Cannot determine hostname from gethostname()" msgstr "" #: src/hed/libs/common/FileLock.cpp:92 #, c-format msgid "EACCES Error opening lock file %s: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:97 #, c-format msgid "Error opening lock file %s in initial check: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:104 #, c-format msgid "Error creating temporary file %s: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:113 #, c-format msgid "Could not create link to lock file %s as it already exists" msgstr "" #: src/hed/libs/common/FileLock.cpp:124 #, c-format msgid "Could not create lock file %s as it already exists" msgstr "" #: src/hed/libs/common/FileLock.cpp:128 #, c-format msgid "Error creating lock file %s: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:133 #, c-format msgid "Error writing to lock file %s: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:141 #, c-format msgid "Error linking tmp file %s to lock file %s: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:150 #, c-format msgid "Error in lock file %s, even though linking did not return an error" msgstr "" #: src/hed/libs/common/FileLock.cpp:159 #, c-format msgid "%li seconds since lock file %s was created" msgstr "" #: src/hed/libs/common/FileLock.cpp:162 #, c-format msgid "Timeout has expired, will remove lock file %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:166 #, c-format msgid "Failed to remove stale lock file %s: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:178 #, c-format msgid "This process already owns the lock on %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:183 #, c-format msgid "" "The process owning the lock on %s is no longer running, will remove lock" msgstr "" #: src/hed/libs/common/FileLock.cpp:185 #, c-format msgid "Failed to remove file %s: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:193 #, c-format msgid "The file %s is currently locked with a valid lock" msgstr "" #: src/hed/libs/common/FileLock.cpp:208 #, c-format msgid "Failed to unlock file with lock %s: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:220 #, c-format msgid "Lock file %s doesn't exist" msgstr "" #: src/hed/libs/common/FileLock.cpp:222 #, c-format msgid "Error listing lock file %s: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:228 #, c-format msgid "Found unexpected empty lock file %s. Must go back to acquire()" msgstr "" #: src/hed/libs/common/FileLock.cpp:234 #, c-format msgid "Error reading lock file %s: %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:238 #, c-format msgid "Error with formatting in lock file %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:248 #, c-format msgid "Lock %s is owned by a different host (%s)" msgstr "" #: src/hed/libs/common/FileLock.cpp:257 #, c-format msgid "Badly formatted pid %s in lock file %s" msgstr "" #: src/hed/libs/common/FileLock.cpp:260 #, c-format msgid "Another process (%s) owns the lock on file %s" msgstr "" #: src/hed/libs/common/IString.cpp:32 src/hed/libs/common/IString.cpp:41 #: src/hed/libs/common/IString.cpp:42 msgid "(empty)" msgstr "" #: src/hed/libs/common/IString.cpp:32 src/hed/libs/common/IString.cpp:41 #: src/hed/libs/common/IString.cpp:42 msgid "(null)" msgstr "" #: src/hed/libs/common/Logger.cpp:58 #, c-format msgid "Invalid log level. Using default %s." msgstr "" #: src/hed/libs/common/Logger.cpp:123 #, c-format msgid "Invalid old log level. Using default %s." msgstr "" #: src/hed/libs/common/OptionParser.cpp:106 #, c-format msgid "Cannot parse integer value '%s' for -%c" msgstr "" #: src/hed/libs/common/OptionParser.cpp:309 #: src/hed/libs/common/OptionParser.cpp:446 #, c-format msgid "Options Group %s:" msgstr "" #: src/hed/libs/common/OptionParser.cpp:311 #: src/hed/libs/common/OptionParser.cpp:449 #, c-format msgid "%s:" msgstr "" #: src/hed/libs/common/OptionParser.cpp:313 #, c-format msgid "Show %s help options" msgstr "" #: src/hed/libs/common/OptionParser.cpp:342 msgid "Failed to parse command line options" msgstr "" #: src/hed/libs/common/OptionParser.cpp:352 msgid "Use -? to get usage description" msgstr "" #: src/hed/libs/common/OptionParser.cpp:429 msgid "Usage:" msgstr "" #: src/hed/libs/common/OptionParser.cpp:432 msgid "OPTION..." msgstr "" #: src/hed/libs/common/OptionParser.cpp:438 msgid "Help Options:" msgstr "" #: src/hed/libs/common/OptionParser.cpp:439 msgid "Show help options" msgstr "" #: src/hed/libs/common/Profile.cpp:199 src/hed/libs/common/Profile.cpp:273 #: src/hed/libs/common/Profile.cpp:404 #, c-format msgid "" "Element \"%s\" in the profile ignored: the value of the \"inisections\" " "attribute cannot be the empty string." msgstr "" #: src/hed/libs/common/Profile.cpp:205 src/hed/libs/common/Profile.cpp:279 #: src/hed/libs/common/Profile.cpp:411 #, c-format msgid "" "Element \"%s\" in the profile ignored: the value of the \"initag\" attribute " "cannot be the empty string." msgstr "" #: src/hed/libs/common/Profile.cpp:419 #, c-format msgid "" "Element \"%s\" in the profile ignored: the value of the \"initype\" " "attribute cannot be the empty string." msgstr "" #: src/hed/libs/common/Profile.cpp:422 #, c-format msgid "" "Element \"%s\" in the profile ignored: the \"inidefaultvalue\" attribute " "cannot be specified when the \"inisections\" and \"initag\" attributes have " "not been specified." msgstr "" #: src/hed/libs/common/Profile.cpp:497 #, c-format msgid "" "In the configuration profile the 'initype' attribute on the \"%s\" element " "has a invalid value \"%s\"." msgstr "" #: src/hed/libs/common/Run_unix.cpp:226 msgid "Child monitoring signal detected" msgstr "" #: src/hed/libs/common/Run_unix.cpp:231 #, c-format msgid "Child monitoring error: %i" msgstr "" #: src/hed/libs/common/Run_unix.cpp:244 msgid "Child monitoring kick detected" msgstr "" #: src/hed/libs/common/Run_unix.cpp:247 msgid "Child monitoring internal communication error" msgstr "" #: src/hed/libs/common/Run_unix.cpp:259 msgid "Child monitoring stdout is closed" msgstr "" #: src/hed/libs/common/Run_unix.cpp:269 msgid "Child monitoring stderr is closed" msgstr "" #: src/hed/libs/common/Run_unix.cpp:279 msgid "Child monitoring stdin is closed" msgstr "" #: src/hed/libs/common/Run_unix.cpp:297 #, c-format msgid "Child monitoring child %d exited" msgstr "" #: src/hed/libs/common/Run_unix.cpp:301 #, c-format msgid "Child monitoring lost child %d (%d)" msgstr "" #: src/hed/libs/common/Run_unix.cpp:322 #, c-format msgid "Child monitoring drops abandoned child %d (%d)" msgstr "" #: src/hed/libs/common/Run_unix.cpp:483 msgid "Child was already started" msgstr "" #: src/hed/libs/common/Run_unix.cpp:487 msgid "No arguments are assigned for external process" msgstr "" #: src/hed/libs/common/Run_unix.cpp:620 src/hed/libs/common/Run_unix.cpp:625 #, c-format msgid "Excepton while trying to start external process: %s" msgstr "" #: src/hed/libs/common/StringConv.h:31 #, c-format msgid "Conversion failed: %s" msgstr "" #: src/hed/libs/common/StringConv.h:35 #, c-format msgid "Full string not used: %s" msgstr "" #: src/hed/libs/common/Thread.cpp:261 msgid "Maximum number of threads running - putting new request into queue" msgstr "" #: src/hed/libs/common/Thread.cpp:309 #, c-format msgid "Thread exited with Glib error: %s" msgstr "" #: src/hed/libs/common/Thread.cpp:311 #, c-format msgid "Thread exited with Glib exception: %s" msgstr "" #: src/hed/libs/common/Thread.cpp:313 #, c-format msgid "Thread exited with generic exception: %s" msgstr "" #: src/hed/libs/common/URL.cpp:137 #, c-format msgid "URL is not valid: %s" msgstr "" #: src/hed/libs/common/URL.cpp:188 #, c-format msgid "Illegal URL - path must be absolute: %s" msgstr "" #: src/hed/libs/common/URL.cpp:193 #, c-format msgid "Illegal URL - no hostname given: %s" msgstr "" #: src/hed/libs/common/URL.cpp:282 #, c-format msgid "Illegal URL - path must be absolute or empty: %s" msgstr "" #: src/hed/libs/common/URL.cpp:298 #, c-format msgid "Illegal URL - no closing ] for IPv6 address found: %s" msgstr "" #: src/hed/libs/common/URL.cpp:306 #, c-format msgid "" "Illegal URL - closing ] for IPv6 address is followed by illegal token: %s" msgstr "" #: src/hed/libs/common/URL.cpp:322 #, c-format msgid "Invalid port number in %s" msgstr "" #: src/hed/libs/common/URL.cpp:453 #, c-format msgid "Unknown LDAP scope %s - using base" msgstr "" #: src/hed/libs/common/URL.cpp:616 msgid "Attempt to assign relative path to URL - making it absolute" msgstr "" #: src/hed/libs/common/URL.cpp:715 #, c-format msgid "URL option %s does not have format name=value" msgstr "" #: src/hed/libs/common/URL.cpp:1180 #, c-format msgid "urllist %s contains invalid URL: %s" msgstr "" #: src/hed/libs/common/URL.cpp:1185 #, c-format msgid "URL protocol is not urllist: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:32 src/hed/libs/common/UserConfig.cpp:781 #: src/hed/libs/common/UserConfig.cpp:790 #: src/hed/libs/common/UserConfig.cpp:796 #: src/hed/libs/common/UserConfig.cpp:822 #: src/hed/libs/common/UserConfig.cpp:832 #: src/hed/libs/common/UserConfig.cpp:844 #: src/hed/libs/common/UserConfig.cpp:864 #, c-format msgid "Multiple %s attributes in configuration file (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:132 #, c-format msgid "Wrong ownership of certificate file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:134 #, c-format msgid "Wrong permissions of certificate file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:136 #, c-format msgid "Can not access certificate file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:143 #, c-format msgid "Wrong ownership of key file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:145 #, c-format msgid "Wrong permissions of key file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:147 #, c-format msgid "Can not access key file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:154 #, c-format msgid "Wrong ownership of proxy file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:156 #, c-format msgid "Wrong permissions of proxy file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:158 #, c-format msgid "Can not access proxy file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:169 msgid "computing" msgstr "" #: src/hed/libs/common/UserConfig.cpp:171 msgid "index" msgstr "" #: src/hed/libs/common/UserConfig.cpp:270 #: src/hed/libs/common/UserConfig.cpp:274 #: src/hed/libs/common/UserConfig.cpp:321 #: src/hed/libs/common/UserConfig.cpp:325 #, c-format msgid "System configuration file (%s) contains errors." msgstr "" #: src/hed/libs/common/UserConfig.cpp:278 #: src/hed/libs/common/UserConfig.cpp:329 #, c-format msgid "System configuration file (%s or %s) does not exist." msgstr "" #: src/hed/libs/common/UserConfig.cpp:280 #: src/hed/libs/common/UserConfig.cpp:331 #, c-format msgid "System configuration file (%s) does not exist." msgstr "" #: src/hed/libs/common/UserConfig.cpp:286 #: src/hed/libs/common/UserConfig.cpp:298 #: src/hed/libs/common/UserConfig.cpp:337 #: src/hed/libs/common/UserConfig.cpp:349 #, c-format msgid "User configuration file (%s) contains errors." msgstr "" #: src/hed/libs/common/UserConfig.cpp:291 #: src/hed/libs/common/UserConfig.cpp:342 msgid "No configuration file could be loaded." msgstr "" #: src/hed/libs/common/UserConfig.cpp:294 #: src/hed/libs/common/UserConfig.cpp:345 #, c-format msgid "User configuration file (%s) does not exist or cannot be loaded." msgstr "" #: src/hed/libs/common/UserConfig.cpp:406 #, c-format msgid "" "Unable to parse the specified verbosity (%s) to one of the allowed levels" msgstr "" #: src/hed/libs/common/UserConfig.cpp:418 #, c-format msgid "" "Unsupported job list type '%s', using 'BDB'. Supported types are: BDB, " "SQLITE, XML." msgstr "" #: src/hed/libs/common/UserConfig.cpp:463 msgid "Loading OToken failed - ignoring its presence" msgstr "" #: src/hed/libs/common/UserConfig.cpp:604 #, c-format msgid "Certificate and key ('%s' and '%s') not found in any of the paths: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:606 #, c-format msgid "" "If the proxy or certificate/key does exist, you can manually specify the " "locations via environment variables '%s'/'%s' or '%s', or the '%s'/'%s' or " "'%s' attributes in the client configuration file (e.g. '%s')" msgstr "" #: src/hed/libs/common/UserConfig.cpp:623 #: src/hed/libs/common/UserConfig.cpp:633 #, c-format msgid "" "Can not access CA certificate directory: %s. The certificates will not be " "verified." msgstr "" #: src/hed/libs/common/UserConfig.cpp:659 #, c-format msgid "" "Can not find CA certificates directory in default locations:\n" "~/.arc/certificates, ~/.globus/certificates,\n" "%s/etc/certificates, %s/etc/grid-security/certificates,\n" "%s/share/certificates, /etc/grid-security/certificates.\n" "The certificate will not be verified.\n" "If the CA certificates directory does exist, please manually specify the " "locations via env\n" "X509_CERT_DIR, or the cacertificatesdirectory item in client.conf\n" msgstr "" #: src/hed/libs/common/UserConfig.cpp:680 #, c-format msgid "Using proxy file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:683 #, c-format msgid "Using certificate file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:684 #, c-format msgid "Using key file: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:688 #, c-format msgid "Using CA certificate directory: %s" msgstr "" #: src/hed/libs/common/UserConfig.cpp:692 msgid "Using OToken" msgstr "" #: src/hed/libs/common/UserConfig.cpp:705 #: src/hed/libs/common/UserConfig.cpp:711 #, c-format msgid "Can not access VOMSES file/directory: %s." msgstr "" #: src/hed/libs/common/UserConfig.cpp:717 #, c-format msgid "Can not access VOMS file/directory: %s." msgstr "" #: src/hed/libs/common/UserConfig.cpp:731 msgid "" "Can not find voms service configuration file (vomses) in default locations: " "~/.arc/vomses, ~/.voms/vomses, $ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/" "grid-security/vomses, $PWD/vomses, /etc/vomses, /etc/grid-security/vomses" msgstr "" #: src/hed/libs/common/UserConfig.cpp:744 #, c-format msgid "Loading configuration (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:778 #, c-format msgid "" "The value of the timeout attribute in the configuration file (%s) was only " "partially parsed" msgstr "" #: src/hed/libs/common/UserConfig.cpp:803 msgid "" "The brokerarguments attribute can only be used in conjunction with the " "brokername attribute" msgstr "" #: src/hed/libs/common/UserConfig.cpp:819 #, c-format msgid "" "The value of the keysize attribute in the configuration file (%s) was only " "partially parsed" msgstr "" #: src/hed/libs/common/UserConfig.cpp:839 #, c-format msgid "" "Could not convert the slcs attribute value (%s) to an URL instance in " "configuration file (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:885 #, c-format msgid "Specified overlay file (%s) does not exist." msgstr "" #: src/hed/libs/common/UserConfig.cpp:889 #, c-format msgid "" "Unknown attribute %s in common section of configuration file (%s), ignoring " "it" msgstr "" #: src/hed/libs/common/UserConfig.cpp:930 #, c-format msgid "Unknown section %s, ignoring it" msgstr "" #: src/hed/libs/common/UserConfig.cpp:934 #, c-format msgid "Configuration (%s) loaded" msgstr "" #: src/hed/libs/common/UserConfig.cpp:937 #, c-format msgid "Could not load configuration (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:1032 #, c-format msgid "UserConfiguration saved to file (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:1045 #, c-format msgid "Unable to create %s directory." msgstr "" #: src/hed/libs/common/UserConfig.cpp:1054 #, c-format msgid "Configuration example file created (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:1056 #, c-format msgid "Unable to copy example configuration from existing configuration (%s)" msgstr "" #: src/hed/libs/common/UserConfig.cpp:1061 #, c-format msgid "Cannot copy example configuration (%s), it is not a regular file" msgstr "" #: src/hed/libs/common/UserConfig.cpp:1066 #, c-format msgid "Example configuration (%s) not created." msgstr "" #: src/hed/libs/common/UserConfig.cpp:1071 #, c-format msgid "The default configuration file (%s) is not a regular file." msgstr "" #: src/hed/libs/common/UserConfig.cpp:1089 #, c-format msgid "%s directory created" msgstr "" #: src/hed/libs/common/UserConfig.cpp:1091 #: src/hed/libs/common/UserConfig.cpp:1130 src/hed/libs/data/DataMover.cpp:679 #, c-format msgid "Failed to create directory %s" msgstr "" #: src/hed/libs/common/test/LoggerTest.cpp:58 msgid "This VERBOSE message should not be seen" msgstr "" #: src/hed/libs/common/test/LoggerTest.cpp:62 msgid "This INFO message should be seen" msgstr "" #: src/hed/libs/common/test/LoggerTest.cpp:73 msgid "This VERBOSE message should now be seen" msgstr "" #: src/hed/libs/common/test/LoggerTest.cpp:79 msgid "This INFO message should also be seen" msgstr "" #: src/hed/libs/common/test/LoggerTest.cpp:93 msgid "This message goes to initial destination" msgstr "" #: src/hed/libs/common/test/LoggerTest.cpp:108 msgid "This message goes to per-thread destination" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:80 msgid "Request failed: No response from SPService" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:84 #: src/hed/libs/communication/ClientSAML2SSO.cpp:137 msgid "Request failed: response from SPService is not as expected" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:92 #, c-format msgid "Authentication Request URL: %s" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:133 msgid "Request failed: No response from IdP" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:184 msgid "Request failed: No response from IdP when doing redirecting" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:188 msgid "" "Request failed: response from IdP is not as expected when doing redirecting" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:245 msgid "Request failed: No response from IdP when doing authentication" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:249 msgid "" "Request failed: response from IdP is not as expected when doing " "authentication" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:294 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:300 msgid "Succeeded to verify the signature under " msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:296 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:303 msgid "Failed to verify the signature under " msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:310 msgid "" "Request failed: No response from SP Service when sending SAML assertion to SP" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:314 msgid "" "Request failed: response from SP Service is not as expected when sending " "SAML assertion to SP" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:325 #, c-format msgid "IdP return some error message: %s" msgstr "" #: src/hed/libs/communication/ClientSAML2SSO.cpp:353 #: src/hed/libs/communication/ClientSAML2SSO.cpp:398 msgid "SAML2SSO process failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:54 msgid "Creating delegation credential to ARC delegation service" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:64 #: src/hed/libs/communication/ClientX509Delegation.cpp:267 msgid "DelegateCredentialsInit failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:68 #: src/hed/libs/communication/ClientX509Delegation.cpp:122 #: src/hed/libs/communication/ClientX509Delegation.cpp:157 #: src/hed/libs/communication/ClientX509Delegation.cpp:212 #: src/hed/libs/communication/ClientX509Delegation.cpp:271 msgid "There is no SOAP response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:73 msgid "There is no X509 request in the response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:78 msgid "There is no Format request in the response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:86 msgid "There is no Id or X509 request value in the response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:99 #: src/hed/libs/communication/ClientX509Delegation.cpp:187 msgid "DelegateProxy failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:118 msgid "UpdateCredentials failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:126 msgid "There is no UpdateCredentialsResponse in response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:134 #: src/hed/libs/communication/ClientX509Delegation.cpp:162 #: src/hed/libs/communication/ClientX509Delegation.cpp:217 #: src/hed/libs/communication/ClientX509Delegation.cpp:302 msgid "There is no SOAP connection chain configured" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:140 msgid "Creating delegation to CREAM delegation service" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:153 msgid "Delegation getProxyReq request failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:173 msgid "Creating delegation to CREAM delegation service failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:208 msgid "Delegation putProxy request failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:222 msgid "Creating delegation to CREAM delegation failed" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:237 msgid "Getting delegation credential from ARC delegation service" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:276 msgid "There is no Delegated X509 token in the response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:281 msgid "There is no Format delegated token in the response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:289 msgid "There is no Id or X509 token value in the response" msgstr "" #: src/hed/libs/communication/ClientX509Delegation.cpp:298 #, c-format msgid "" "Get delegated credential from delegation service: \n" " %s" msgstr "" #: src/hed/libs/compute/Broker.cpp:62 #, c-format msgid "Performing matchmaking against target (%s)." msgstr "" #: src/hed/libs/compute/Broker.cpp:72 #, c-format msgid "Matchmaking, ExecutionTarget: %s matches job description" msgstr "" #: src/hed/libs/compute/Broker.cpp:145 #, c-format msgid "" "The CA issuer (%s) of the credentials (%s) is not trusted by the target (%s)." msgstr "" #: src/hed/libs/compute/Broker.cpp:153 src/hed/libs/compute/Broker.cpp:171 #, c-format msgid "ComputingShareName of ExecutionTarget (%s) is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:157 src/hed/libs/compute/Broker.cpp:162 #, c-format msgid "ComputingShare (%s) explicitly rejected" msgstr "" #: src/hed/libs/compute/Broker.cpp:175 src/hed/libs/compute/Broker.cpp:180 #, c-format msgid "ComputingShare (%s) does not match selected queue (%s)" msgstr "" #: src/hed/libs/compute/Broker.cpp:189 #, c-format msgid "" "ProcessingStartTime (%s) specified in job description is inside the targets " "downtime period [ %s - %s ]." msgstr "" #: src/hed/libs/compute/Broker.cpp:194 #, c-format msgid "The downtime of the target (%s) is not published. Keeping target." msgstr "" #: src/hed/libs/compute/Broker.cpp:200 #, c-format msgid "HealthState of ExecutionTarget (%s) is not OK (%s)" msgstr "" #: src/hed/libs/compute/Broker.cpp:205 #, c-format msgid "Matchmaking, ExecutionTarget: %s, HealthState is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:212 #, c-format msgid "" "Matchmaking, Computing endpoint requirement not satisfied. ExecutionTarget: " "%s" msgstr "" #: src/hed/libs/compute/Broker.cpp:217 #, c-format msgid "Matchmaking, ExecutionTarget: %s, ImplementationName is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:243 #, c-format msgid "" "Matchmaking, %s (%d) is %s than %s (%d) published by the ExecutionTarget." msgstr "" #: src/hed/libs/compute/Broker.cpp:272 #, c-format msgid "" "Matchmaking, The %s scaled %s (%d) is %s than the %s (%d) published by the " "ExecutionTarget." msgstr "" #: src/hed/libs/compute/Broker.cpp:284 #, c-format msgid "Matchmaking, Benchmark %s is not published by the ExecutionTarget." msgstr "" #: src/hed/libs/compute/Broker.cpp:299 #, c-format msgid "" "Matchmaking, MaxTotalCPUTime problem, ExecutionTarget: %d (MaxTotalCPUTime), " "JobDescription: %d (TotalCPUTime)" msgstr "" #: src/hed/libs/compute/Broker.cpp:306 #, c-format msgid "" "Matchmaking, MaxCPUTime problem, ExecutionTarget: %d (MaxCPUTime), " "JobDescription: %d (TotalCPUTime/NumberOfSlots)" msgstr "" #: src/hed/libs/compute/Broker.cpp:311 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MaxTotalCPUTime or MaxCPUTime not " "defined, assuming no CPU time limit" msgstr "" #: src/hed/libs/compute/Broker.cpp:317 #, c-format msgid "" "Matchmaking, MinCPUTime problem, ExecutionTarget: %d (MinCPUTime), " "JobDescription: %d (TotalCPUTime/NumberOfSlots)" msgstr "" #: src/hed/libs/compute/Broker.cpp:322 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MinCPUTime not defined, assuming no CPU " "time limit" msgstr "" #: src/hed/libs/compute/Broker.cpp:330 #, c-format msgid "" "Matchmaking, MainMemorySize problem, ExecutionTarget: %d (MainMemorySize), " "JobDescription: %d (IndividualPhysicalMemory)" msgstr "" #: src/hed/libs/compute/Broker.cpp:336 #, c-format msgid "" "Matchmaking, MaxMainMemory problem, ExecutionTarget: %d (MaxMainMemory), " "JobDescription: %d (IndividualPhysicalMemory)" msgstr "" #: src/hed/libs/compute/Broker.cpp:341 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MaxMainMemory and MainMemorySize are not " "defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:349 #, c-format msgid "" "Matchmaking, MaxVirtualMemory problem, ExecutionTarget: %d " "(MaxVirtualMemory), JobDescription: %d (IndividualVirtualMemory)" msgstr "" #: src/hed/libs/compute/Broker.cpp:354 #, c-format msgid "Matchmaking, ExecutionTarget: %s, MaxVirtualMemory is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:362 #, c-format msgid "" "Matchmaking, Platform problem, ExecutionTarget: %s (Platform) " "JobDescription: %s (Platform)" msgstr "" #: src/hed/libs/compute/Broker.cpp:367 #, c-format msgid "Matchmaking, ExecutionTarget: %s, Platform is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:375 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, OperatingSystem requirements not satisfied" msgstr "" #: src/hed/libs/compute/Broker.cpp:380 #, c-format msgid "Matchmaking, ExecutionTarget: %s, OperatingSystem is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:388 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, RunTimeEnvironment requirements not " "satisfied" msgstr "" #: src/hed/libs/compute/Broker.cpp:393 #, c-format msgid "Matchmaking, ExecutionTarget: %s, ApplicationEnvironments not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:402 #, c-format msgid "" "Matchmaking, NetworkInfo demand not fulfilled, ExecutionTarget do not " "support %s, specified in the JobDescription." msgstr "" #: src/hed/libs/compute/Broker.cpp:406 #, c-format msgid "Matchmaking, ExecutionTarget: %s, NetworkInfo is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:414 #, c-format msgid "" "Matchmaking, MaxDiskSpace problem, ExecutionTarget: %d MB (MaxDiskSpace); " "JobDescription: %d MB (SessionDiskSpace)" msgstr "" #: src/hed/libs/compute/Broker.cpp:421 #, c-format msgid "" "Matchmaking, WorkingAreaFree problem, ExecutionTarget: %d MB " "(WorkingAreaFree); JobDescription: %d MB (SessionDiskSpace)" msgstr "" #: src/hed/libs/compute/Broker.cpp:427 src/hed/libs/compute/Broker.cpp:448 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MaxDiskSpace and WorkingAreaFree are not " "defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:435 #, c-format msgid "" "Matchmaking, MaxDiskSpace problem, ExecutionTarget: %d MB (MaxDiskSpace); " "JobDescription: %d MB (DiskSpace)" msgstr "" #: src/hed/libs/compute/Broker.cpp:442 #, c-format msgid "" "Matchmaking, WorkingAreaFree problem, ExecutionTarget: %d MB " "(WorkingAreaFree); JobDescription: %d MB (DiskSpace)" msgstr "" #: src/hed/libs/compute/Broker.cpp:456 #, c-format msgid "" "Matchmaking, CacheTotal problem, ExecutionTarget: %d MB (CacheTotal); " "JobDescription: %d MB (CacheDiskSpace)" msgstr "" #: src/hed/libs/compute/Broker.cpp:461 #, c-format msgid "Matchmaking, ExecutionTarget: %s, CacheTotal is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:469 #, c-format msgid "" "Matchmaking, TotalSlots problem, ExecutionTarget: %d (TotalSlots) " "JobDescription: %d (NumberOfProcesses)" msgstr "" #: src/hed/libs/compute/Broker.cpp:475 #, c-format msgid "" "Matchmaking, MaxSlotsPerJob problem, ExecutionTarget: %d (MaxSlotsPerJob) " "JobDescription: %d (NumberOfProcesses)" msgstr "" #: src/hed/libs/compute/Broker.cpp:481 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, TotalSlots and MaxSlotsPerJob are not " "defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:489 #, c-format msgid "" "Matchmaking, WorkingAreaLifeTime problem, ExecutionTarget: %s " "(WorkingAreaLifeTime) JobDescription: %s (SessionLifeTime)" msgstr "" #: src/hed/libs/compute/Broker.cpp:494 #, c-format msgid "Matchmaking, ExecutionTarget: %s, WorkingAreaLifeTime is not defined" msgstr "" #: src/hed/libs/compute/Broker.cpp:502 #, c-format msgid "" "Matchmaking, ConnectivityIn problem, ExecutionTarget: %s (ConnectivityIn) " "JobDescription: %s (InBound)" msgstr "" #: src/hed/libs/compute/Broker.cpp:509 #, c-format msgid "" "Matchmaking, ConnectivityOut problem, ExecutionTarget: %s (ConnectivityOut) " "JobDescription: %s (OutBound)" msgstr "" #: src/hed/libs/compute/Broker.cpp:532 msgid "Unable to sort added jobs. The BrokerPlugin plugin has not been loaded." msgstr "" #: src/hed/libs/compute/Broker.cpp:549 msgid "Unable to match target, marking it as not matching. Broker not valid." msgstr "" #: src/hed/libs/compute/Broker.cpp:585 msgid "Unable to sort ExecutionTarget objects - Invalid Broker object." msgstr "" #: src/hed/libs/compute/Broker.cpp:609 msgid "" "Unable to register job submission. Can't get JobDescription object from " "Broker, Broker is invalid." msgstr "" #: src/hed/libs/compute/BrokerPlugin.cpp:89 #, c-format msgid "Broker plugin \"%s\" not found." msgstr "" #: src/hed/libs/compute/BrokerPlugin.cpp:96 #, c-format msgid "Unable to load BrokerPlugin (%s)" msgstr "" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:27 #, c-format msgid "Uniq is replacing service coming from %s with service coming from %s" msgstr "" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:31 #, c-format msgid "Uniq is ignoring service coming from %s" msgstr "" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:38 #, c-format msgid "Uniq is adding service coming from %s" msgstr "" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:61 #, c-format msgid "Adding endpoint (%s) to TargetInformationRetriever" msgstr "" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:64 #, c-format msgid "Adding endpoint (%s) to ServiceEndpointRetriever" msgstr "" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:67 #, c-format msgid "" "Adding endpoint (%s) to both ServiceEndpointRetriever and " "TargetInformationRetriever" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:42 #, c-format msgid "The plugin %s does not support any interfaces, skipping it." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:47 #, c-format msgid "" "The first supported interface of the plugin %s is an empty string, skipping " "the plugin." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:95 #, c-format msgid "Interface on endpoint (%s) %s." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:101 #: src/hed/libs/compute/EntityRetriever.cpp:133 #: src/hed/libs/compute/EntityRetriever.cpp:425 #, c-format msgid "Ignoring endpoint (%s), it is already registered in retriever." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:110 #, c-format msgid "Service Loop: Endpoint %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:112 #, c-format msgid " This endpoint (%s) is STARTED or SUCCESSFUL" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:115 #, c-format msgid "" "Suspending querying of endpoint (%s) since the service at the endpoint is " "already being queried, or has been queried." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:122 #: src/hed/libs/compute/EntityRetriever.cpp:237 #, c-format msgid " Status of endpoint (%s) is %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:126 #, c-format msgid "Setting status (STARTED) for endpoint: %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:145 #, c-format msgid "Starting thread to query the endpoint on %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:147 #: src/hed/libs/compute/EntityRetriever.cpp:289 #, c-format msgid "Failed to start querying the endpoint on %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:174 #, c-format msgid "Found a registry, will query it recursively: %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:211 #, c-format msgid "Setting status (%s) for endpoint: %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:231 msgid "Checking for suspended endpoints which should be started." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:241 #, c-format msgid "Found started or successful endpoint (%s)" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:253 #, c-format msgid "Found suspended endpoint (%s)" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:264 #, c-format msgid "Trying to start suspended endpoint (%s)" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:284 #, c-format msgid "" "Starting querying of suspended endpoint (%s) - no other endpoints for this " "service is being queried or has been queried successfully." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:351 #, c-format msgid "Calling plugin %s to query endpoint on %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:373 #, c-format msgid "" "The interface of this endpoint (%s) is unspecified, will try all possible " "plugins" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:389 #, c-format msgid "Problem loading plugin %s, skipping it." msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:393 #, c-format msgid "The endpoint (%s) is not supported by this plugin (%s)" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:414 #, c-format msgid "" "New endpoint is created (%s) from the one with the unspecified interface (%s)" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:432 #, c-format msgid "Starting sub-thread to query the endpoint on %s" msgstr "" #: src/hed/libs/compute/EntityRetriever.cpp:434 #, c-format msgid "" "Failed to start querying the endpoint on %s (unable to create sub-thread)" msgstr "" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:32 #, c-format msgid "Found %s %s (it was loaded already)" msgstr "" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:41 #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:49 #: src/hed/libs/compute/JobControllerPlugin.cpp:100 #: src/hed/libs/compute/JobControllerPlugin.cpp:109 #: src/hed/libs/compute/SubmitterPlugin.cpp:171 #: src/hed/libs/compute/SubmitterPlugin.cpp:181 #, c-format msgid "" "Unable to locate the \"%s\" plugin. Please refer to installation " "instructions and check if package providing support for \"%s\" plugin is " "installed" msgstr "" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:42 #, c-format msgid "%s plugin \"%s\" not found." msgstr "" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:50 #, c-format msgid "%s %s could not be created." msgstr "" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:55 #, c-format msgid "Loaded %s %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:51 #, c-format msgid "" "Skipping ComputingEndpoint '%s', because it has '%s' interface instead of " "the requested '%s'." msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:132 #, c-format msgid "" "Computing endpoint %s (type %s) added to the list for submission brokering" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:239 #, c-format msgid "Address: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:240 #, c-format msgid "Place: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:241 #, c-format msgid "Country: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:242 #, c-format msgid "Postal code: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:243 #, c-format msgid "Latitude: %f" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:244 #, c-format msgid "Longitude: %f" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:250 #, c-format msgid "Owner: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:257 #, c-format msgid "ID: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:258 #, c-format msgid "Type: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:263 #, c-format msgid "URL: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:264 #, c-format msgid "Interface: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:266 msgid "Interface versions:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:271 msgid "Interface extensions:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:276 msgid "Capabilities:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:280 #, c-format msgid "Technology: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:282 msgid "Supported Profiles:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:286 #, c-format msgid "Implementor: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:287 #, c-format msgid "Implementation name: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:288 #, c-format msgid "Quality level: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:289 #, c-format msgid "Health state: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:290 #, c-format msgid "Health state info: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:291 #, c-format msgid "Serving state: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:292 #, c-format msgid "Issuer CA: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:294 msgid "Trusted CAs:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:298 #, c-format msgid "Downtime starts: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:299 #, c-format msgid "Downtime ends: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:300 #, c-format msgid "Staging: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:302 msgid "Job descriptions:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:314 #, c-format msgid "Scheme: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:317 #, c-format msgid "Rule: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:329 #, c-format msgid "Mapping queue: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:330 #, c-format msgid "Max wall-time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:331 #, c-format msgid "Max total wall-time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:332 #, c-format msgid "Min wall-time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:333 #, c-format msgid "Default wall-time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:334 #, c-format msgid "Max CPU time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:335 #, c-format msgid "Min CPU time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:336 #, c-format msgid "Default CPU time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:337 #, c-format msgid "Max total jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:338 #, c-format msgid "Max running jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:339 #, c-format msgid "Max waiting jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:340 #, c-format msgid "Max pre-LRMS waiting jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:341 #, c-format msgid "Max user running jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:342 #, c-format msgid "Max slots per job: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:343 #, c-format msgid "Max stage in streams: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:344 #, c-format msgid "Max stage out streams: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:345 #, c-format msgid "Scheduling policy: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:346 #, c-format msgid "Max memory: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:347 #, c-format msgid "Max virtual memory: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:348 #, c-format msgid "Max disk space: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:349 #, c-format msgid "Default Storage Service: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:350 msgid "Supports preemption" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:351 msgid "Doesn't support preemption" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:352 #, c-format msgid "Total jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:353 #, c-format msgid "Running jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:354 #, c-format msgid "Local running jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:355 #, c-format msgid "Waiting jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:356 #, c-format msgid "Local waiting jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:357 #, c-format msgid "Suspended jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:358 #, c-format msgid "Local suspended jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:359 #, c-format msgid "Staging jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:360 #, c-format msgid "Pre-LRMS waiting jobs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:361 #, c-format msgid "Estimated average waiting time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:362 #, c-format msgid "Estimated worst waiting time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:363 #, c-format msgid "Free slots: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:365 msgid "Free slots grouped according to time limits (limit: free slots):" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:368 #, c-format msgid " %s: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:369 #, c-format msgid " unspecified: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:372 #, c-format msgid "Used slots: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:373 #, c-format msgid "Requested slots: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:374 #, c-format msgid "Reservation policy: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:381 #, c-format msgid "Resource manager: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:383 #, c-format msgid " (%s)" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:387 #, c-format msgid "Total physical CPUs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:388 #, c-format msgid "Total logical CPUs: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:389 #, c-format msgid "Total slots: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:390 msgid "Supports advance reservations" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:391 msgid "Doesn't support advance reservations" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:392 msgid "Supports bulk submission" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:393 msgid "Doesn't support bulk Submission" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:394 msgid "Homogeneous resource" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:395 msgid "Non-homogeneous resource" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:397 msgid "Network information:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:402 msgid "Working area is shared among jobs" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:403 msgid "Working area is not shared among jobs" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:404 #, c-format msgid "Working area total size: %i GB" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:405 #, c-format msgid "Working area free size: %i GB" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:406 #, c-format msgid "Working area life time: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:407 #, c-format msgid "Cache area total size: %i GB" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:408 #, c-format msgid "Cache area free size: %i GB" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:414 #, c-format msgid "Platform: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:415 msgid "Execution environment supports inbound connections" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:416 msgid "Execution environment does not support inbound connections" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:417 msgid "Execution environment supports outbound connections" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:418 msgid "Execution environment does not support outbound connections" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:419 msgid "Execution environment is a virtual machine" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:420 msgid "Execution environment is a physical machine" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:421 #, c-format msgid "CPU vendor: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:422 #, c-format msgid "CPU model: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:423 #, c-format msgid "CPU version: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:424 #, c-format msgid "CPU clock speed: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:425 #, c-format msgid "Main memory size: %i" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:426 #, c-format msgid "OS family: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:427 #, c-format msgid "OS name: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:428 #, c-format msgid "OS version: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:435 msgid "Computing service:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:459 #, c-format msgid "%d Endpoints" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:464 msgid "Endpoint Information:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:476 #, c-format msgid "%d Batch Systems" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:481 msgid "Batch System Information:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:487 msgid "Installed application environments:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:500 #, c-format msgid "%d Shares" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:505 msgid "Share Information:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:511 #, c-format msgid "%d mapping policies" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:515 msgid "Mapping policy:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:531 #, c-format msgid "Execution Target on Computing Service: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:533 #, c-format msgid " Computing endpoint URL: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:535 #, c-format msgid " Computing endpoint interface name: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:537 #: src/hed/libs/compute/Job.cpp:575 #, c-format msgid " Queue: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:540 #, c-format msgid " Mapping queue: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:543 #, c-format msgid " Health state: %s" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:548 msgid "Service information:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:553 msgid " Installed application environments:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:560 msgid "Batch system information:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:563 msgid "Queue information:" msgstr "" #: src/hed/libs/compute/ExecutionTarget.cpp:570 msgid " Benchmark information:" msgstr "" #: src/hed/libs/compute/GLUE2.cpp:53 msgid "The Service doesn't advertise its Type." msgstr "" #: src/hed/libs/compute/GLUE2.cpp:58 msgid "The ComputingService doesn't advertise its Quality Level." msgstr "" #: src/hed/libs/compute/GLUE2.cpp:99 msgid "The ComputingEndpoint has no URL." msgstr "" #: src/hed/libs/compute/GLUE2.cpp:104 msgid "The Service advertises no Health State." msgstr "" #: src/hed/libs/compute/GLUE2.cpp:117 msgid "The ComputingEndpoint doesn't advertise its Quality Level." msgstr "" #: src/hed/libs/compute/GLUE2.cpp:128 msgid "The ComputingService doesn't advertise its Interface." msgstr "" #: src/hed/libs/compute/GLUE2.cpp:160 msgid "The ComputingEndpoint doesn't advertise its Serving State." msgstr "" #: src/hed/libs/compute/GLUE2.cpp:247 #, c-format msgid "" "The \"FreeSlotsWithDuration\" attribute published by \"%s\" is wrongly " "formatted. Ignoring it." msgstr "" #: src/hed/libs/compute/GLUE2.cpp:420 #, c-format msgid "" "Couldn't parse benchmark XML:\n" "%s" msgstr "" #: src/hed/libs/compute/Job.cpp:324 msgid "Unable to detect format of job record." msgstr "" #: src/hed/libs/compute/Job.cpp:545 #, c-format msgid "Job: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:547 #, c-format msgid " Name: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:548 #, c-format msgid " State: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:551 #, c-format msgid " Specific state: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:555 src/hed/libs/compute/Job.cpp:579 #, c-format msgid " Waiting Position: %d" msgstr "" #: src/hed/libs/compute/Job.cpp:559 #, c-format msgid " Exit Code: %d" msgstr "" #: src/hed/libs/compute/Job.cpp:563 #, c-format msgid " Job Error: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:568 #, c-format msgid " Owner: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:572 #, c-format msgid " Other Messages: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:577 #, c-format msgid " Requested Slots: %d" msgstr "" #: src/hed/libs/compute/Job.cpp:582 #, c-format msgid " Stdin: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:584 #, c-format msgid " Stdout: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:586 #, c-format msgid " Stderr: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:588 #, c-format msgid " Computing Service Log Directory: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:591 #, c-format msgid " Submitted: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:594 #, c-format msgid " End Time: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:597 #, c-format msgid " Submitted from: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:600 #, c-format msgid " Submitting client: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:603 #, c-format msgid " Requested CPU Time: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:608 #, c-format msgid " Used CPU Time: %s (%s per slot)" msgstr "" #: src/hed/libs/compute/Job.cpp:612 #, c-format msgid " Used CPU Time: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:618 #, c-format msgid " Used Wall Time: %s (%s per slot)" msgstr "" #: src/hed/libs/compute/Job.cpp:622 #, c-format msgid " Used Wall Time: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:627 #, c-format msgid " Used Memory: %d" msgstr "" #: src/hed/libs/compute/Job.cpp:631 #, c-format msgid " Results were deleted: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:632 #, c-format msgid " Results must be retrieved before: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:636 #, c-format msgid " Proxy valid until: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:640 #, c-format msgid " Entry valid from: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:643 #, c-format msgid " Entry valid for: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:647 msgid " Old job IDs:" msgstr "" #: src/hed/libs/compute/Job.cpp:655 #, c-format msgid " ID on service: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:656 #, c-format msgid " Service information URL: %s (%s)" msgstr "" #: src/hed/libs/compute/Job.cpp:657 #, c-format msgid " Job status URL: %s (%s)" msgstr "" #: src/hed/libs/compute/Job.cpp:658 #, c-format msgid " Job management URL: %s (%s)" msgstr "" #: src/hed/libs/compute/Job.cpp:659 #, c-format msgid " Stagein directory URL: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:660 #, c-format msgid " Stageout directory URL: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:661 #, c-format msgid " Session directory URL: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:663 msgid " Delegation IDs:" msgstr "" #: src/hed/libs/compute/Job.cpp:845 #, c-format msgid "Unable to handle job (%s), no interface specified." msgstr "" #: src/hed/libs/compute/Job.cpp:850 #, c-format msgid "" "Unable to handle job (%s), no plugin associated with the specified interface " "(%s)" msgstr "" #: src/hed/libs/compute/Job.cpp:872 #, c-format msgid "Invalid download destination path specified (%s)" msgstr "" #: src/hed/libs/compute/Job.cpp:877 #, c-format msgid "" "Unable to download job (%s), no JobControllerPlugin plugin was set to handle " "the job." msgstr "" #: src/hed/libs/compute/Job.cpp:881 #, c-format msgid "Downloading job: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:885 #, c-format msgid "" "Cant retrieve job files for job (%s) - unable to determine URL of stage out " "directory" msgstr "" #: src/hed/libs/compute/Job.cpp:890 #, c-format msgid "Invalid stage out path specified (%s)" msgstr "" #: src/hed/libs/compute/Job.cpp:897 #, c-format msgid "%s directory exist! Skipping job." msgstr "" #: src/hed/libs/compute/Job.cpp:903 #, c-format msgid "Unable to retrieve list of job files to download for job %s" msgstr "" #: src/hed/libs/compute/Job.cpp:908 #, c-format msgid "No files to retrieve for job %s" msgstr "" #: src/hed/libs/compute/Job.cpp:914 #, c-format msgid "Failed to create directory %s! Skipping job." msgstr "" #: src/hed/libs/compute/Job.cpp:927 #, c-format msgid "Failed downloading %s to %s, destination already exist" msgstr "" #: src/hed/libs/compute/Job.cpp:933 #, c-format msgid "Failed downloading %s to %s, unable to remove existing destination" msgstr "" #: src/hed/libs/compute/Job.cpp:939 #, c-format msgid "Failed downloading %s to %s" msgstr "" #: src/hed/libs/compute/Job.cpp:952 #, c-format msgid "Unable to initialize handler for %s" msgstr "" #: src/hed/libs/compute/Job.cpp:957 #, c-format msgid "Unable to list files at %s" msgstr "" #: src/hed/libs/compute/Job.cpp:999 msgid "Now copying (from -> to)" msgstr "" #: src/hed/libs/compute/Job.cpp:1000 #, c-format msgid " %s -> %s" msgstr "" #: src/hed/libs/compute/Job.cpp:1015 #, c-format msgid "Unable to initialise connection to source: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:1026 #, c-format msgid "Unable to initialise connection to destination: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:1045 #, c-format msgid "File download failed: %s" msgstr "" #: src/hed/libs/compute/Job.cpp:1084 src/hed/libs/compute/Job.cpp:1113 #: src/hed/libs/compute/Job.cpp:1145 src/hed/libs/compute/Job.cpp:1178 #, c-format msgid "Waiting for lock on file %s" msgstr "" #: src/hed/libs/compute/JobControllerPlugin.cpp:101 #, c-format msgid "JobControllerPlugin plugin \"%s\" not found." msgstr "" #: src/hed/libs/compute/JobControllerPlugin.cpp:110 #, c-format msgid "JobControllerPlugin %s could not be created" msgstr "" #: src/hed/libs/compute/JobControllerPlugin.cpp:115 #, c-format msgid "Loaded JobControllerPlugin %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:22 #, c-format msgid ": %d" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:24 #, c-format msgid ": %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:138 msgid " --- DRY RUN --- " msgstr "" #: src/hed/libs/compute/JobDescription.cpp:148 #, c-format msgid " Annotation: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:154 #, c-format msgid " Old activity ID: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:160 #, c-format msgid " Argument: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:171 #, c-format msgid " RemoteLogging (optional): %s (%s)" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:174 #, c-format msgid " RemoteLogging: %s (%s)" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:182 #, c-format msgid " Environment.name: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:183 #, c-format msgid " Environment: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:196 #, c-format msgid " PreExecutable.Argument: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:199 #: src/hed/libs/compute/JobDescription.cpp:217 #, c-format msgid " Exit code for successful execution: %d" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:202 #: src/hed/libs/compute/JobDescription.cpp:220 msgid " No exit code for successful execution specified." msgstr "" #: src/hed/libs/compute/JobDescription.cpp:214 #, c-format msgid " PostExecutable.Argument: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:230 #, c-format msgid " Access control: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:234 #, c-format msgid " Processing start time: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:237 msgid " Notify:" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:251 #, c-format msgid " Credential service: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:261 msgid " Operating system requirements:" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:279 msgid " Computing endpoint requirements:" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:292 msgid " Node access: inbound" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:295 msgid " Node access: outbound" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:298 msgid " Node access: inbound and outbound" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:308 msgid " Job requires exclusive execution" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:311 msgid " Job does not require exclusive execution" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:316 msgid " Run time environment requirements:" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:328 msgid " Inputfile element:" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:329 #: src/hed/libs/compute/JobDescription.cpp:351 #, c-format msgid " Name: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:331 msgid " Is executable: true" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:335 #, c-format msgid " Sources: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:337 #, c-format msgid " Sources.DelegationID: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:341 #, c-format msgid " Sources.Options: %s = %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:350 msgid " Outputfile element:" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:354 #, c-format msgid " Targets: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:356 #, c-format msgid " Targets.DelegationID: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:360 #, c-format msgid " Targets.Options: %s = %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:367 #, c-format msgid " DelegationID element: %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:374 #, c-format msgid " Other attributes: [%s], %s" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:440 msgid "Empty job description source string" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:473 msgid "No job description parsers available" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:475 #, c-format msgid "" "No job description parsers suitable for handling '%s' language are available" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:483 #, c-format msgid "%s parsing error" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:499 msgid "No job description parser was able to interpret job description" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:509 msgid "" "Job description language is not specified, unable to output description." msgstr "" #: src/hed/libs/compute/JobDescription.cpp:521 #, c-format msgid "Generating %s job description output" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:537 #, c-format msgid "Language (%s) not recognized by any job description parsers." msgstr "" #: src/hed/libs/compute/JobDescription.cpp:550 #, c-format msgid "Two input files have identical name '%s'." msgstr "" #: src/hed/libs/compute/JobDescription.cpp:569 #: src/hed/libs/compute/JobDescription.cpp:582 #, c-format msgid "Cannot stat local input file '%s'" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:602 #, c-format msgid "Cannot find local input file '%s' (%s)" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:644 msgid "Unable to select runtime environment" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:651 msgid "Unable to select middleware" msgstr "" #: src/hed/libs/compute/JobDescription.cpp:658 msgid "Unable to select operating system." msgstr "" #: src/hed/libs/compute/JobDescription.cpp:677 #, c-format msgid "No test-job with ID %d found." msgstr "" #: src/hed/libs/compute/JobDescription.cpp:689 #, c-format msgid "Test was defined with ID %d, but some error occurred during parsing it." msgstr "" #: src/hed/libs/compute/JobDescription.cpp:693 #, c-format msgid "No jobdescription resulted at %d test" msgstr "" #: src/hed/libs/compute/JobDescriptionParserPlugin.cpp:52 #, c-format msgid "JobDescriptionParserPlugin plugin \"%s\" not found." msgstr "" #: src/hed/libs/compute/JobDescriptionParserPlugin.cpp:59 #, c-format msgid "JobDescriptionParserPlugin %s could not be created" msgstr "" #: src/hed/libs/compute/JobDescriptionParserPlugin.cpp:64 #, c-format msgid "Loaded JobDescriptionParserPlugin %s" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:328 msgid "Unable to create temporary directory" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:336 #, c-format msgid "Unable to create data base environment (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:346 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:350 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:354 #, c-format msgid "Unable to set duplicate flags for secondary key DB (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:360 #, c-format msgid "Unable to create job database (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:364 #, c-format msgid "Unable to create DB for secondary name keys (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:368 #, c-format msgid "Unable to create DB for secondary endpoint keys (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:372 #, c-format msgid "Unable to create DB for secondary service info keys (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:377 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:381 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:385 #, c-format msgid "Unable to associate secondary DB with primary DB (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:388 #, c-format msgid "Job database created successfully (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:430 #, c-format msgid "Error from BDB: %s: %s" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:433 #, c-format msgid "Error from BDB: %s" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:453 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:221 #: src/hed/libs/compute/JobInformationStorageXML.cpp:27 #, c-format msgid "" "Job list file cannot be created: The parent directory (%s) doesn't exist." msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:457 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:225 #: src/hed/libs/compute/JobInformationStorageXML.cpp:31 #, c-format msgid "Job list file cannot be created: %s is not a directory" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:464 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:232 #: src/hed/libs/compute/JobInformationStorageXML.cpp:38 #, c-format msgid "Job list file (%s) is not a regular file" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:502 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:561 #, c-format msgid "Unable to write key/value pair to job database (%s): Key \"%s\"" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:728 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:622 #: src/hed/libs/compute/JobInformationStorageXML.cpp:137 #, c-format msgid "Unable to truncate job database (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:759 msgid "" "ENOENT: The file or directory does not exist, Or a nonexistent re_source " "file was specified." msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:762 msgid "" "DB_OLD_VERSION: The database cannot be opened without being first upgraded." msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:765 msgid "EEXIST: DB_CREATE and DB_EXCL were specified and the database exists." msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:767 msgid "EINVAL" msgstr "" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:770 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:655 #, c-format msgid "Unable to determine error (%d)" msgstr "" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:120 #, c-format msgid "Unable to create data base (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:128 #, c-format msgid "Unable to create jobs table in data base (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:137 #, c-format msgid "Unable to create jobs_new table in data base (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:143 #, c-format msgid "Unable to transfer from jobs to jobs_new in data base (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:149 #, c-format msgid "Unable to drop jobs in data base (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:155 #, c-format msgid "Unable to rename jobs table in data base (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:165 #, c-format msgid "Unable to create index for jobs table in data base (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:173 #, c-format msgid "Failed checking database (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:175 #, c-format msgid "Job database connection established successfully (%s)" msgstr "" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:197 #, c-format msgid "Error from SQLite: %s: %s" msgstr "" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:200 #, c-format msgid "Error from SQLite: %s" msgstr "" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:362 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:369 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:376 #, c-format msgid "Unable to write records into job database (%s): Id \"%s\"" msgstr "" #: src/hed/libs/compute/JobInformationStorageXML.cpp:51 #: src/hed/libs/compute/JobInformationStorageXML.cpp:223 #: src/hed/libs/compute/JobInformationStorageXML.cpp:264 #, c-format msgid "Waiting for lock on job list file %s" msgstr "" #: src/hed/libs/compute/JobInformationStorageXML.cpp:162 #, c-format msgid "Will remove %s on service %s." msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:36 msgid "Ignoring job, the job ID is empty" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:41 #, c-format msgid "Ignoring job (%s), the management interface name is unknown" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:46 #, c-format msgid "Ignoring job (%s), the job management URL is unknown" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:51 #, c-format msgid "Ignoring job (%s), the status interface name is unknown" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:56 #, c-format msgid "Ignoring job (%s), the job status URL is unknown" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:65 #, c-format msgid "Ignoring job (%s), unable to load JobControllerPlugin for %s" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:72 #, c-format msgid "" "Ignoring job (%s), already tried and were unable to load JobControllerPlugin" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:385 #, c-format msgid "Job resubmission failed: Unable to load broker (%s)" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:400 msgid "Job resubmission aborted because no resource returned any information" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:421 #, c-format msgid "Unable to resubmit job (%s), unable to parse obtained job description" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:443 #, c-format msgid "" "Unable to resubmit job (%s), target information retrieval failed for target: " "%s" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:469 #, c-format msgid "Unable to resubmit job (%s), no targets applicable for submission" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:504 #, c-format msgid "" "Unable to migrate job (%s), job description could not be retrieved remotely" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:524 msgid "Job migration aborted, no resource returned any information" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:536 #, c-format msgid "Job migration aborted, unable to load broker (%s)" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:552 #, c-format msgid "Unable to migrate job (%s), unable to parse obtained job description" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:573 #, c-format msgid "Unable to load submission plugin for %s interface" msgstr "" #: src/hed/libs/compute/JobSupervisor.cpp:583 #, c-format msgid "Job migration failed for job (%s), no applicable targets" msgstr "" #: src/hed/libs/compute/Software.cpp:65 src/hed/libs/compute/Software.cpp:94 #: src/hed/libs/compute/Software.cpp:113 #, c-format msgid "%s > %s => false" msgstr "" #: src/hed/libs/compute/Software.cpp:70 src/hed/libs/compute/Software.cpp:83 #: src/hed/libs/compute/Software.cpp:107 #, c-format msgid "%s > %s => true" msgstr "" #: src/hed/libs/compute/Software.cpp:90 src/hed/libs/compute/Software.cpp:102 #, c-format msgid "%s > %s => false: %s contains non numbers in the version part." msgstr "" #: src/hed/libs/compute/Software.cpp:199 src/hed/libs/compute/Software.cpp:210 #, c-format msgid "Requirement \"%s %s\" NOT satisfied." msgstr "" #: src/hed/libs/compute/Software.cpp:205 #, c-format msgid "Requirement \"%s %s\" satisfied." msgstr "" #: src/hed/libs/compute/Software.cpp:214 #, c-format msgid "Requirement \"%s %s\" satisfied by \"%s\"." msgstr "" #: src/hed/libs/compute/Software.cpp:219 msgid "All requirements satisfied." msgstr "" #: src/hed/libs/compute/Submitter.cpp:83 #, c-format msgid "Trying to submit directly to endpoint (%s)" msgstr "" #: src/hed/libs/compute/Submitter.cpp:88 #, c-format msgid "Interface (%s) specified, submitting only to that interface" msgstr "" #: src/hed/libs/compute/Submitter.cpp:106 msgid "Trying all available interfaces" msgstr "" #: src/hed/libs/compute/Submitter.cpp:112 #, c-format msgid "Trying to submit endpoint (%s) using interface (%s) with plugin (%s)." msgstr "" #: src/hed/libs/compute/Submitter.cpp:116 #, c-format msgid "" "Unable to load plugin (%s) for interface (%s) when trying to submit job " "description." msgstr "" #: src/hed/libs/compute/Submitter.cpp:130 #, c-format msgid "No more interfaces to try for endpoint %s." msgstr "" #: src/hed/libs/compute/Submitter.cpp:336 #, c-format msgid "Target %s does not match requested interface(s)." msgstr "" #: src/hed/libs/compute/SubmitterPlugin.cpp:64 msgid "No stagein URL is provided" msgstr "" #: src/hed/libs/compute/SubmitterPlugin.cpp:83 #, c-format msgid "Failed uploading file %s to %s: %s" msgstr "" #: src/hed/libs/compute/SubmitterPlugin.cpp:116 #, c-format msgid "Trying to migrate to %s: Migration to a %s interface is not supported." msgstr "" #: src/hed/libs/compute/SubmitterPlugin.cpp:172 #, c-format msgid "SubmitterPlugin plugin \"%s\" not found." msgstr "" #: src/hed/libs/compute/SubmitterPlugin.cpp:182 #, c-format msgid "SubmitterPlugin %s could not be created" msgstr "" #: src/hed/libs/compute/SubmitterPlugin.cpp:187 #, c-format msgid "Loaded SubmitterPlugin %s" msgstr "" #: src/hed/libs/compute/examples/basic_job_submission.cpp:28 msgid "Invalid job description" msgstr "" #: src/hed/libs/compute/examples/basic_job_submission.cpp:47 msgid "Failed to submit job" msgstr "" #: src/hed/libs/compute/examples/basic_job_submission.cpp:54 #, c-format msgid "Failed to write to local job list %s" msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:20 msgid "[job description ...]" msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:21 msgid "" "This tiny tool can be used for testing the JobDescription's conversion " "abilities." msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:23 msgid "" "The job description also can be a file or a string in ADL or XRSL format." msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:27 msgid "define the requested format (nordugrid:xrsl, emies:adl)" msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:28 msgid "format" msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:33 msgid "show the original job description" msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:43 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:71 msgid "Use --help option for detailed usage information" msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:50 msgid " [ JobDescription tester ] " msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:74 msgid " [ Parsing the original text ] " msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:80 msgid "Unable to parse." msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:89 msgid " [ emies:adl ] " msgstr "" #: src/hed/libs/compute/test_jobdescription.cpp:91 msgid " [ nordugrid:xrsl ] " msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:134 msgid "VOMS command is empty" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:254 #, c-format msgid "OpenSSL error -- %s" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:255 #, c-format msgid "Library : %s" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:256 #, c-format msgid "Function : %s" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:257 #, c-format msgid "Reason : %s" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:313 msgid "User interface error" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:319 msgid "Aborted!" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:418 #: src/hed/libs/credential/ARCProxyUtil.cpp:1399 msgid "Failed to sign proxy" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:437 #: src/hed/libs/credential/Credential.cpp:878 #, c-format msgid "Error: can't open policy file: %s" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:447 #: src/hed/libs/credential/Credential.cpp:891 #, c-format msgid "Error: policy location: %s is not a regular file" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:546 #, c-format msgid "VOMS line contains wrong number of tokens (%u expected): \"%s\"" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:590 #, c-format msgid "Cannot get VOMS server %s information from the vomses files" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:623 #, c-format msgid "There are %d commands to the same VOMS server %s" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:677 #, c-format msgid "Try to get attribute from VOMS server with order: %s" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:680 #, c-format msgid "Message sent to VOMS server %s is: %s" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:699 #: src/hed/libs/credential/ARCProxyUtil.cpp:720 #, c-format msgid "" "The VOMS server with the information:\n" "\t%s\n" "can not be reached, please make sure it is available" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:703 msgid "No HTTP response from VOMS server" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:708 #: src/hed/libs/credential/ARCProxyUtil.cpp:734 #, c-format msgid "Returned message from VOMS server: %s" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:724 msgid "No stream response from VOMS server" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:746 #, c-format msgid "" "The validity duration of VOMS AC is shortened from %s to %s, due to the " "validity constraint on voms server side.\n" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:749 #, c-format msgid "" "Cannot get any AC or attributes info from VOMS server: %s;\n" " Returned message from VOMS server: %s\n" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:754 #, c-format msgid "Returned message from VOMS server %s is: %s\n" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:776 #, c-format msgid "The attribute information from VOMS server: %s is list as following:" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:788 #, c-format msgid "" "There are %d servers with the same name: %s in your vomses file, but none of " "them can be reached, or can return valid message. But proxy without VOMS AC " "extension will still be generated." msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:1321 #: src/hed/libs/credential/ARCProxyUtil.cpp:1428 msgid "Failed to generate X509 request with NSS" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:1332 #: src/hed/libs/credential/ARCProxyUtil.cpp:1439 #: src/hed/libs/credential/ARCProxyUtil.cpp:1480 msgid "Failed to create X509 certificate with NSS" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:1344 #: src/hed/libs/credential/ARCProxyUtil.cpp:1451 #: src/hed/libs/credential/ARCProxyUtil.cpp:1504 msgid "Failed to export X509 certificate from NSS DB" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:1487 msgid "Failed to import X509 certificate into NSS DB" msgstr "" #: src/hed/libs/credential/ARCProxyUtil.cpp:1496 msgid "Failed to initialize the credential configuration" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:162 #, c-format msgid "Error number in store context: %i" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:163 msgid "Self-signed certificate" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:166 #, c-format msgid "The certificate with subject %s is not valid" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:169 #, c-format msgid "" "Can not find issuer certificate for the certificate with subject %s and " "hash: %lu" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:172 #, c-format msgid "Certificate with subject %s has expired" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:175 #, c-format msgid "" "Untrusted self-signed certificate in chain with subject %s and hash: %lu" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:177 #, c-format msgid "Certificate verification error: %s" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:189 msgid "Can not get the certificate type" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:229 msgid "Couldn't verify availability of CRL" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:242 msgid "In the available CRL the lastUpdate field is not valid" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:249 msgid "The available CRL is not yet valid" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:258 msgid "In the available CRL, the nextUpdate field is not valid" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:264 msgid "The available CRL has expired" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:287 #, c-format msgid "Certificate with serial number %s and subject \"%s\" is revoked" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:305 msgid "" "Directory of trusted CAs is not specified/found; Using current path as the " "CA direcroty" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:314 msgid "Can't allocate memory for CA policy path" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:360 #, c-format msgid "Certificate has unknown extension with numeric ID %u and SN %s" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:374 #: src/hed/libs/credential/Credential.cpp:1697 msgid "" "Can not convert DER encoded PROXY_CERT_INFO_EXTENSION extension to internal " "format" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:420 msgid "Trying to check X509 cert with check_cert_type" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:459 msgid "Can't convert DER encoded PROXYCERTINFO extension to internal format" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:463 msgid "Can't get policy from PROXYCERTINFO extension" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:467 msgid "Can't get policy language from PROXYCERTINFO extension" msgstr "" #: src/hed/libs/credential/CertUtil.cpp:499 msgid "The subject does not match the issuer name + proxy CN entry" msgstr "" #: src/hed/libs/credential/Credential.cpp:84 #, c-format msgid "OpenSSL error string: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:205 msgid "Can't get the first byte of input to determine its format" msgstr "" #: src/hed/libs/credential/Credential.cpp:219 msgid "Can't reset the input" msgstr "" #: src/hed/libs/credential/Credential.cpp:244 #: src/hed/libs/credential/Credential.cpp:280 msgid "Can't get the first byte of input BIO to get its format" msgstr "" #: src/hed/libs/credential/Credential.cpp:256 msgid "Can not read certificate/key string" msgstr "" #: src/hed/libs/credential/Credential.cpp:460 #, c-format msgid "Can not find certificate file: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:465 #, c-format msgid "Can not read certificate file: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:503 msgid "Can not read certificate string" msgstr "" #: src/hed/libs/credential/Credential.cpp:523 msgid "Certificate format is PEM" msgstr "" #: src/hed/libs/credential/Credential.cpp:550 msgid "Certificate format is DER" msgstr "" #: src/hed/libs/credential/Credential.cpp:579 msgid "Certificate format is PKCS" msgstr "" #: src/hed/libs/credential/Credential.cpp:605 msgid "Certificate format is unknown" msgstr "" #: src/hed/libs/credential/Credential.cpp:613 #, c-format msgid "Can not find key file: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:618 #, c-format msgid "Can not open key file %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:637 msgid "Can not read key string" msgstr "" #: src/hed/libs/credential/Credential.cpp:700 #: src/hed/libs/credential/VOMSUtil.cpp:244 msgid "Failed to lock arccredential library in memory" msgstr "" #: src/hed/libs/credential/Credential.cpp:712 msgid "Certificate verification succeeded" msgstr "" #: src/hed/libs/credential/Credential.cpp:716 msgid "Certificate verification failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:729 #: src/hed/libs/credential/Credential.cpp:747 #: src/hed/libs/credential/Credential.cpp:765 #: src/hed/libs/credential/Credential.cpp:996 #: src/hed/libs/credential/Credential.cpp:2368 #: src/hed/libs/credential/Credential.cpp:2397 msgid "Failed to initialize extensions member for Credential" msgstr "" #: src/hed/libs/credential/Credential.cpp:808 #, c-format msgid "Unsupported proxy policy language is requested - %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:820 #, c-format msgid "Unsupported proxy version is requested - %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:831 msgid "If you specify a policy you also need to specify a policy language" msgstr "" #: src/hed/libs/credential/Credential.cpp:1001 msgid "Certificate/Proxy path is empty" msgstr "" #: src/hed/libs/credential/Credential.cpp:1059 #: src/hed/libs/credential/Credential.cpp:2905 msgid "Failed to duplicate extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:1063 msgid "Failed to add extension into credential extensions" msgstr "" #: src/hed/libs/credential/Credential.cpp:1074 msgid "Certificate information collection failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:1113 #: src/hed/libs/credential/Credential.cpp:1118 msgid "Can not convert string into ASN1_OBJECT" msgstr "" #: src/hed/libs/credential/Credential.cpp:1125 msgid "Can not create ASN1_OCTET_STRING" msgstr "" #: src/hed/libs/credential/Credential.cpp:1134 msgid "Can not allocate memory for extension for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:1144 msgid "Can not create extension for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:1180 #: src/hed/libs/credential/Credential.cpp:1348 msgid "BN_set_word failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:1189 #: src/hed/libs/credential/Credential.cpp:1357 msgid "RSA_generate_key_ex failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:1198 #: src/hed/libs/credential/Credential.cpp:1365 msgid "BN_new || RSA_new failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:1209 msgid "Created RSA key, proceeding with request" msgstr "" #: src/hed/libs/credential/Credential.cpp:1214 msgid "pkey and rsa_key exist!" msgstr "" #: src/hed/libs/credential/Credential.cpp:1217 msgid "Generate new X509 request!" msgstr "" #: src/hed/libs/credential/Credential.cpp:1222 msgid "Setting subject name!" msgstr "" #: src/hed/libs/credential/Credential.cpp:1230 #: src/hed/libs/credential/Credential.cpp:1444 msgid "PEM_write_bio_X509_REQ failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:1260 #: src/hed/libs/credential/Credential.cpp:1301 #: src/hed/libs/credential/Credential.cpp:1476 #: src/hed/libs/credential/Credential.cpp:1496 msgid "Can not create BIO for request" msgstr "" #: src/hed/libs/credential/Credential.cpp:1278 msgid "Failed to write request into string" msgstr "" #: src/hed/libs/credential/Credential.cpp:1305 #: src/hed/libs/credential/Credential.cpp:1310 #: src/hed/libs/credential/Credential.cpp:1500 msgid "Can not set writable file for request BIO" msgstr "" #: src/hed/libs/credential/Credential.cpp:1316 #: src/hed/libs/credential/Credential.cpp:1505 msgid "Wrote request into a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:1318 #: src/hed/libs/credential/Credential.cpp:1508 msgid "Failed to write request into a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:1338 msgid "The credential's private key has already been initialized" msgstr "" #: src/hed/libs/credential/Credential.cpp:1386 msgid "" "Can not duplicate the subject name for the self-signing proxy certificate " "request" msgstr "" #: src/hed/libs/credential/Credential.cpp:1396 msgid "Can not create a new X509_NAME_ENTRY for the proxy certificate request" msgstr "" #: src/hed/libs/credential/Credential.cpp:1414 #: src/hed/libs/credential/Credential.cpp:1421 #: src/hed/libs/credential/Credential.cpp:1999 #: src/hed/libs/credential/Credential.cpp:2007 msgid "" "Can not convert PROXY_CERT_INFO_EXTENSION struct from internal to DER " "encoded format" msgstr "" #: src/hed/libs/credential/Credential.cpp:1451 msgid "Can't convert X509 request from internal to DER encoded format" msgstr "" #: src/hed/libs/credential/Credential.cpp:1461 msgid "Can not generate X509 request" msgstr "" #: src/hed/libs/credential/Credential.cpp:1463 msgid "Can not set private key" msgstr "" #: src/hed/libs/credential/Credential.cpp:1561 msgid "Failed to get private key" msgstr "" #: src/hed/libs/credential/Credential.cpp:1580 msgid "Failed to get public key from RSA object" msgstr "" #: src/hed/libs/credential/Credential.cpp:1588 msgid "Failed to get public key from X509 object" msgstr "" #: src/hed/libs/credential/Credential.cpp:1595 msgid "Failed to get public key" msgstr "" #: src/hed/libs/credential/Credential.cpp:1633 #, c-format msgid "Certiticate chain number %d" msgstr "" #: src/hed/libs/credential/Credential.cpp:1661 msgid "NULL BIO passed to InquireRequest" msgstr "" #: src/hed/libs/credential/Credential.cpp:1664 msgid "PEM_read_bio_X509_REQ failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:1668 msgid "d2i_X509_REQ_bio failed" msgstr "" #: src/hed/libs/credential/Credential.cpp:1690 msgid "Missing data in DER encoded PROXY_CERT_INFO_EXTENSION extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:1702 msgid "Can not create PROXY_CERT_INFO_EXTENSION extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:1712 msgid "Can not get policy from PROXY_CERT_INFO_EXTENSION extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:1716 msgid "Can not get policy language from PROXY_CERT_INFO_EXTENSION extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:1732 #, c-format msgid "Cert Type: %d" msgstr "" #: src/hed/libs/credential/Credential.cpp:1745 #: src/hed/libs/credential/Credential.cpp:1764 msgid "Can not create BIO for parsing request" msgstr "" #: src/hed/libs/credential/Credential.cpp:1750 msgid "Read request from a string" msgstr "" #: src/hed/libs/credential/Credential.cpp:1753 msgid "Failed to read request from a string" msgstr "" #: src/hed/libs/credential/Credential.cpp:1768 msgid "Can not set readable file for request BIO" msgstr "" #: src/hed/libs/credential/Credential.cpp:1773 msgid "Read request from a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:1776 msgid "Failed to read request from a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:1816 msgid "Can not convert private key to DER format" msgstr "" #: src/hed/libs/credential/Credential.cpp:1980 msgid "Credential is not initialized" msgstr "" #: src/hed/libs/credential/Credential.cpp:1986 msgid "Failed to duplicate X509 structure" msgstr "" #: src/hed/libs/credential/Credential.cpp:1991 msgid "Failed to initialize X509 structure" msgstr "" #: src/hed/libs/credential/Credential.cpp:2014 msgid "Can not create extension for PROXY_CERT_INFO" msgstr "" #: src/hed/libs/credential/Credential.cpp:2018 #: src/hed/libs/credential/Credential.cpp:2066 msgid "Can not add X509 extension to proxy cert" msgstr "" #: src/hed/libs/credential/Credential.cpp:2034 msgid "Can not convert keyUsage struct from DER encoded format" msgstr "" #: src/hed/libs/credential/Credential.cpp:2046 #: src/hed/libs/credential/Credential.cpp:2055 msgid "Can not convert keyUsage struct from internal to DER format" msgstr "" #: src/hed/libs/credential/Credential.cpp:2062 msgid "Can not create extension for keyUsage" msgstr "" #: src/hed/libs/credential/Credential.cpp:2075 msgid "Can not get extended KeyUsage extension from issuer certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2080 msgid "Can not copy extended KeyUsage extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:2085 msgid "Can not add X509 extended KeyUsage extension to new proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2095 msgid "Can not compute digest of public key" msgstr "" #: src/hed/libs/credential/Credential.cpp:2106 msgid "Can not copy the subject name from issuer for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2112 msgid "Can not create name entry CN for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2117 msgid "Can not set CN in proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2125 msgid "Can not set issuer's subject for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2130 msgid "Can not set version number for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2138 msgid "Can not set serial number for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2144 msgid "Can not duplicate serial number for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2150 msgid "Can not set the lifetime for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2154 msgid "Can not set pubkey for proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2170 #: src/hed/libs/credential/Credential.cpp:2795 msgid "The credential to be signed is NULL" msgstr "" #: src/hed/libs/credential/Credential.cpp:2174 #: src/hed/libs/credential/Credential.cpp:2799 msgid "The credential to be signed contains no request" msgstr "" #: src/hed/libs/credential/Credential.cpp:2178 #: src/hed/libs/credential/Credential.cpp:2803 msgid "The BIO for output is NULL" msgstr "" #: src/hed/libs/credential/Credential.cpp:2192 #: src/hed/libs/credential/Credential.cpp:2810 msgid "Error when extracting public key from request" msgstr "" #: src/hed/libs/credential/Credential.cpp:2197 #: src/hed/libs/credential/Credential.cpp:2814 msgid "Failed to verify the request" msgstr "" #: src/hed/libs/credential/Credential.cpp:2201 msgid "Failed to add issuer's extension into proxy" msgstr "" #: src/hed/libs/credential/Credential.cpp:2225 msgid "Failed to find extension" msgstr "" #: src/hed/libs/credential/Credential.cpp:2237 msgid "Can not get the issuer's private key" msgstr "" #: src/hed/libs/credential/Credential.cpp:2244 #: src/hed/libs/credential/Credential.cpp:2846 msgid "There is no digest in issuer's private key object" msgstr "" #: src/hed/libs/credential/Credential.cpp:2249 #: src/hed/libs/credential/Credential.cpp:2850 #, c-format msgid "%s is an unsupported digest type" msgstr "" #: src/hed/libs/credential/Credential.cpp:2260 #, c-format msgid "" "The signing algorithm %s is not allowed,it should be SHA1 or SHA2 to sign " "certificate requests" msgstr "" #: src/hed/libs/credential/Credential.cpp:2266 msgid "Failed to sign the proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2268 msgid "Succeeded to sign the proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2273 msgid "Failed to verify the signed certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2275 msgid "Succeeded to verify the signed certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2280 #: src/hed/libs/credential/Credential.cpp:2289 msgid "Output the proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2283 msgid "Can not convert signed proxy cert into PEM format" msgstr "" #: src/hed/libs/credential/Credential.cpp:2292 msgid "Can not convert signed proxy cert into DER format" msgstr "" #: src/hed/libs/credential/Credential.cpp:2308 #: src/hed/libs/credential/Credential.cpp:2331 msgid "Can not create BIO for signed proxy certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2335 msgid "Can not set writable file for signed proxy certificate BIO" msgstr "" #: src/hed/libs/credential/Credential.cpp:2340 msgid "Wrote signed proxy certificate into a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:2343 msgid "Failed to write signed proxy certificate into a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:2377 #: src/hed/libs/credential/Credential.cpp:2415 #, c-format msgid "ERROR: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:2423 #, c-format msgid "SSL error: %s, libs: %s, func: %s, reason: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:2468 #, c-format msgid "unable to load number from: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:2473 msgid "error converting number from bin to BIGNUM" msgstr "" #: src/hed/libs/credential/Credential.cpp:2500 msgid "file name too long" msgstr "" #: src/hed/libs/credential/Credential.cpp:2523 msgid "error converting serial to ASN.1 format" msgstr "" #: src/hed/libs/credential/Credential.cpp:2556 #, c-format msgid "load serial from %s failure" msgstr "" #: src/hed/libs/credential/Credential.cpp:2561 msgid "add_word failure" msgstr "" #: src/hed/libs/credential/Credential.cpp:2566 #, c-format msgid "save serial to %s failure" msgstr "" #: src/hed/libs/credential/Credential.cpp:2586 msgid "Error initialising X509 store" msgstr "" #: src/hed/libs/credential/Credential.cpp:2593 msgid "Out of memory when generate random serial" msgstr "" #: src/hed/libs/credential/Credential.cpp:2605 msgid "CA certificate and CA private key do not match" msgstr "" #: src/hed/libs/credential/Credential.cpp:2629 #, c-format msgid "Failed to load extension section: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:2666 msgid "malloc error" msgstr "" #: src/hed/libs/credential/Credential.cpp:2670 msgid "Subject does not start with '/'" msgstr "" #: src/hed/libs/credential/Credential.cpp:2686 #: src/hed/libs/credential/Credential.cpp:2707 msgid "escape character at end of string" msgstr "" #: src/hed/libs/credential/Credential.cpp:2698 #, c-format msgid "" "end of string encountered while processing type of subject name element #%d" msgstr "" #: src/hed/libs/credential/Credential.cpp:2735 #, c-format msgid "Subject Attribute %s has no known NID, skipped" msgstr "" #: src/hed/libs/credential/Credential.cpp:2739 #, c-format msgid "No value provided for Subject Attribute %s skipped" msgstr "" #: src/hed/libs/credential/Credential.cpp:2780 msgid "Failed to set the pubkey for X509 object by using pubkey from X509_REQ" msgstr "" #: src/hed/libs/credential/Credential.cpp:2790 msgid "The private key for signing is not initialized" msgstr "" #: src/hed/libs/credential/Credential.cpp:2869 #, c-format msgid "Error when loading the extension config file: %s" msgstr "" #: src/hed/libs/credential/Credential.cpp:2873 #, c-format msgid "Error when loading the extension config file: %s on line: %d" msgstr "" #: src/hed/libs/credential/Credential.cpp:2921 msgid "Can not sign a EEC" msgstr "" #: src/hed/libs/credential/Credential.cpp:2925 msgid "Output EEC certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2928 msgid "Can not convert signed EEC cert into DER format" msgstr "" #: src/hed/libs/credential/Credential.cpp:2942 #: src/hed/libs/credential/Credential.cpp:2961 msgid "Can not create BIO for signed EEC certificate" msgstr "" #: src/hed/libs/credential/Credential.cpp:2965 msgid "Can not set writable file for signed EEC certificate BIO" msgstr "" #: src/hed/libs/credential/Credential.cpp:2970 msgid "Wrote signed EEC certificate into a file" msgstr "" #: src/hed/libs/credential/Credential.cpp:2973 msgid "Failed to write signed EEC certificate into a file" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:143 msgid "Error writing raw certificate" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:222 msgid "Failed to add RFC proxy OID" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:225 #, c-format msgid "Succeeded to add RFC proxy OID, tag %d is returned" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:231 msgid "Failed to add anyLanguage OID" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:234 #, c-format msgid "Succeeded to add anyLanguage OID, tag %d is returned" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:240 msgid "Failed to add inheritAll OID" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:243 #, c-format msgid "Succeeded to add inheritAll OID, tag %d is returned" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:249 msgid "Failed to add Independent OID" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:252 #, c-format msgid "Succeeded to add Independent OID, tag %d is returned" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:258 msgid "Failed to add VOMS AC sequence OID" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:261 #, c-format msgid "Succeeded to add VOMS AC sequence OID, tag %d is returned" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:290 #, c-format msgid "NSS initialization failed on certificate database: %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:301 msgid "Succeeded to initialize NSS" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:323 #, c-format msgid "Failed to read attribute %x from private key." msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:375 msgid "Succeeded to get credential" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:376 msgid "Failed to get credential" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:438 msgid "p12 file is empty" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:448 msgid "Unable to write to p12 file" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:464 msgid "Failed to open p12 file" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:492 msgid "Failed to allocate p12 context" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1200 msgid "Failed to find issuer certificate for proxy certificate" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1351 #, c-format msgid "Failed to authenticate to PKCS11 slot %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1357 #, c-format msgid "Failed to find certificates by nickname: %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1362 #, c-format msgid "No user certificate by nickname %s found" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1375 #: src/hed/libs/credential/NSSUtil.cpp:1411 msgid "Certificate does not have a slot" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1381 msgid "Failed to create export context" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1396 msgid "PKCS12 output password not provided" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1403 msgid "PKCS12 add password integrity failed" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1424 msgid "Failed to create key or certificate safe" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1440 msgid "Failed to add certificate and key" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1449 #, c-format msgid "Failed to initialize PKCS12 file: %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1454 msgid "Failed to encode PKCS12" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1457 msgid "Succeeded to export PKCS12" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1485 #, c-format msgid "" "There is no certificate named %s found, the certificate could be removed " "when generating CSR" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1491 msgid "Failed to delete certificate" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1505 msgid "The name of the private key to delete is empty" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1510 #: src/hed/libs/credential/NSSUtil.cpp:2939 #: src/hed/libs/credential/NSSUtil.cpp:2956 #, c-format msgid "Failed to authenticate to token %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1517 #, c-format msgid "No private key with nickname %s exist in NSS database" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1550 msgid "Failed to delete private key and certificate" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1560 msgid "Failed to delete private key" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1571 #, c-format msgid "Can not find key with name: %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1599 msgid "Can not read PEM private key: probably bad password" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1601 msgid "Can not read PEM private key: failed to decrypt" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1603 #: src/hed/libs/credential/NSSUtil.cpp:1605 msgid "Can not read PEM private key: failed to obtain password" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1606 msgid "Can not read PEM private key" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1613 msgid "Failed to convert EVP_PKEY to PKCS8" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1650 msgid "Failed to load private key" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1651 msgid "Succeeded to load PrivateKeyInfo" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1654 msgid "Failed to convert PrivateKeyInfo to EVP_PKEY" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1655 msgid "Succeeded to convert PrivateKeyInfo to EVP_PKEY" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1692 msgid "Failed to import private key" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1695 msgid "Succeeded to import private key" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1708 #: src/hed/libs/credential/NSSUtil.cpp:1750 #: src/hed/libs/credential/NSSUtil.cpp:2889 msgid "Failed to authenticate to key database" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1717 msgid "Succeeded to generate public/private key pair" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1719 msgid "Failed to generate public/private key pair" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1724 msgid "Failed to export private key" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1791 msgid "Failed to create subject name" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1807 msgid "Failed to create certificate request" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1820 msgid "Failed to call PORT_NewArena" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1828 msgid "Failed to encode the certificate request with DER format" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1835 msgid "Unknown key or hash type" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1841 msgid "Failed to sign the certificate request" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1857 msgid "Failed to output the certificate request as ASCII format" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1866 msgid "Failed to output the certificate request as DER format" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1875 #, c-format msgid "Succeeded to output the certificate request into %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1914 #: src/hed/libs/credential/NSSUtil.cpp:1951 msgid "Failed to read data from input file" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1930 msgid "Input is without trailer\n" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1941 msgid "Failed to convert ASCII to DER" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:1992 msgid "Certificate request is invalid" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2212 #, c-format msgid "The policy language: %s is not supported" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2220 #: src/hed/libs/credential/NSSUtil.cpp:2245 #: src/hed/libs/credential/NSSUtil.cpp:2268 #: src/hed/libs/credential/NSSUtil.cpp:2290 msgid "Failed to new arena" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2229 #: src/hed/libs/credential/NSSUtil.cpp:2254 msgid "Failed to create path length" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2232 #: src/hed/libs/credential/NSSUtil.cpp:2257 #: src/hed/libs/credential/NSSUtil.cpp:2277 #: src/hed/libs/credential/NSSUtil.cpp:2299 msgid "Failed to create policy language" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2700 #, c-format msgid "Failed to parse certificate request from CSR file %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2707 #, c-format msgid "Can not find certificate with name %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2739 msgid "Can not allocate memory" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2747 #, c-format msgid "Proxy subject: %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2764 msgid "Failed to start certificate extension" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2769 msgid "Failed to add key usage extension" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2774 msgid "Failed to add proxy certificate information extension" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2778 msgid "Failed to add voms AC extension" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2798 msgid "Failed to retrieve private key for issuer" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2805 msgid "Unknown key or hash type of issuer" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2811 msgid "Failed to set signature algorithm ID" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2823 msgid "Failed to encode certificate" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2829 msgid "Failed to allocate item for certificate data" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2835 msgid "Failed to sign encoded certificate data" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2844 #, c-format msgid "Failed to open file %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2855 #, c-format msgid "Succeeded to output certificate to %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2896 #, c-format msgid "Failed to open input certificate file %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2913 msgid "Failed to read input certificate file" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2918 msgid "Failed to get certificate from certificate file" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2925 msgid "Failed to allocate certificate trust" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2930 msgid "Failed to decode trust string" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2944 #: src/hed/libs/credential/NSSUtil.cpp:2961 msgid "Failed to add certificate to token or database" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2947 #: src/hed/libs/credential/NSSUtil.cpp:2950 msgid "Succeeded to import certificate" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2964 #: src/hed/libs/credential/NSSUtil.cpp:2967 #, c-format msgid "Succeeded to change trusts to: %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2994 #, c-format msgid "Failed to import private key from file: %s" msgstr "" #: src/hed/libs/credential/NSSUtil.cpp:2996 #, c-format msgid "Failed to import certificate from file: %s" msgstr "" #: src/hed/libs/credential/VOMSConfig.cpp:142 #, c-format msgid "" "ERROR: VOMS configuration line contains too many tokens. Expecting 5 or 6. " "Line was: %s" msgstr "" #: src/hed/libs/credential/VOMSConfig.cpp:158 #, c-format msgid "" "ERROR: file tree is too deep while scanning VOMS configuration. Max allowed " "nesting is %i." msgstr "" #: src/hed/libs/credential/VOMSConfig.cpp:176 #, c-format msgid "ERROR: failed to read file %s while scanning VOMS configuration." msgstr "" #: src/hed/libs/credential/VOMSConfig.cpp:181 #, c-format msgid "" "ERROR: VOMS configuration file %s contains too many lines. Max supported " "number is %i." msgstr "" #: src/hed/libs/credential/VOMSConfig.cpp:188 #, c-format msgid "" "ERROR: VOMS configuration file %s contains too long line(s). Max supported " "length is %i characters." msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:171 #, c-format msgid "Failed to create OpenSSL object %s %s - %u %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:179 #, c-format msgid "Failed to obtain OpenSSL identifier for %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:332 #, c-format msgid "VOMS: create FQAN: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:370 #, c-format msgid "VOMS: create attribute: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:651 msgid "VOMS: Can not allocate memory for parsing AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:659 msgid "VOMS: Can not allocate memory for storing the order of AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:685 msgid "VOMS: Can not parse AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:715 msgid "VOMS: CA directory or CA file must be provided" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:779 msgid "VOMS: failed to verify AC signature" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:848 #, c-format msgid "VOMS: trust chain to check: %s " msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:856 #, c-format msgid "" "VOMS: the DN in certificate: %s does not match that in trusted DN list: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:862 #, c-format msgid "" "VOMS: the Issuer identity in certificate: %s does not match that in trusted " "DN list: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:897 #, c-format msgid "VOMS: The lsc file %s does not exist" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:903 #, c-format msgid "VOMS: The lsc file %s can not be open" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:951 msgid "" "VOMS: there is no constraints of trusted voms DNs, the certificates stack in " "AC will not be checked." msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:984 msgid "VOMS: unable to match certificate chain against VOMS trusted DNs" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1004 msgid "VOMS: AC signature verification failed" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1013 msgid "VOMS: unable to verify certificate chain" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1019 #, c-format msgid "VOMS: cannot validate AC issuer for VO %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1042 #, c-format msgid "VOMS: directory for trusted service certificates: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1068 #, c-format msgid "VOMS: Cannot find certificate of AC issuer for VO %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1090 msgid "VOMS: Can not find AC_ATTR with IETFATTR type" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1097 msgid "VOMS: case of multiple IETFATTR attributes not supported" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1107 msgid "VOMS: case of multiple policyAuthority not supported" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1123 msgid "VOMS: the format of policyAuthority is unsupported - expecting URI" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1132 msgid "" "VOMS: the format of IETFATTRVAL is not supported - expecting OCTET STRING" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1189 msgid "VOMS: the grantor attribute is empty" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1207 msgid "VOMS: the attribute name is empty" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1213 #, c-format msgid "VOMS: the attribute value for %s is empty" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1218 msgid "VOMS: the attribute qualifier is empty" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1250 msgid "" "VOMS: both idcenoRevAvail and authorityKeyIdentifier certificate extensions " "must be present" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1284 #, c-format msgid "VOMS: FQDN of this host %s does not match any target in AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1289 msgid "VOMS: the only supported critical extension of the AC is idceTargets" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1304 msgid "VOMS: failed to parse attributes from AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1348 msgid "VOMS: authorityKey is wrong" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1376 msgid "VOMS: missing AC parts" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1393 msgid "VOMS: unsupported time format in AC - expecting GENERALIZED TIME" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1399 msgid "VOMS: AC is not yet valid" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1406 msgid "VOMS: AC has expired" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1421 msgid "VOMS: AC is not complete - missing Serial or Issuer information" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1426 #, c-format msgid "VOMS: the holder serial number is: %lx" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1427 #, c-format msgid "VOMS: the serial number in AC is: %lx" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1430 #, c-format msgid "" "VOMS: the holder serial number %lx is not the same as the serial number in " "AC %lx, the holder certificate that is used to create a voms proxy could be " "a proxy certificate with a different serial number as the original EEC cert" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1439 msgid "VOMS: the holder information in AC is wrong" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1461 #, c-format msgid "VOMS: DN of holder in AC: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1462 #, c-format msgid "VOMS: DN of holder: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1463 #, c-format msgid "VOMS: DN of issuer: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1470 msgid "" "VOMS: the holder name in AC is not related to the distinguished name in " "holder certificate" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1482 #: src/hed/libs/credential/VOMSUtil.cpp:1489 msgid "VOMS: the holder issuerUID is not the same as that in AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1502 msgid "VOMS: the holder issuer name is not the same as that in AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1512 msgid "VOMS: the issuer information in AC is wrong" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1520 #, c-format msgid "VOMS: the issuer name %s is not the same as that in AC - %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1528 msgid "" "VOMS: the serial number of AC INFO is too long - expecting no more than 20 " "octets" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1558 #: src/hed/libs/credential/VOMSUtil.cpp:1566 #: src/hed/libs/credential/VOMSUtil.cpp:1574 #: src/hed/libs/credential/VOMSUtil.cpp:1582 #: src/hed/libs/credential/VOMSUtil.cpp:1605 msgid "VOMS: unable to extract VO name from AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1596 #, c-format msgid "VOMS: unable to determine hostname of AC from VO name: %s" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1615 msgid "VOMS: can not verify the signature of the AC" msgstr "" #: src/hed/libs/credential/VOMSUtil.cpp:1621 msgid "VOMS: problems while parsing information in AC" msgstr "" #: src/hed/libs/credential/test/VOMSUtilTest.cpp:128 #, c-format msgid "Line %d.%d of the attributes returned: %s" msgstr "" #: src/hed/libs/credentialstore/ClientVOMS.cpp:149 msgid "voms" msgstr "" #: src/hed/libs/credentialstore/CredentialStore.cpp:194 #: src/hed/libs/credentialstore/CredentialStore.cpp:245 #: src/hed/libs/credentialstore/CredentialStore.cpp:273 #: src/hed/libs/credentialstore/CredentialStore.cpp:336 #: src/hed/libs/credentialstore/CredentialStore.cpp:376 #: src/hed/libs/credentialstore/CredentialStore.cpp:406 #, c-format msgid "MyProxy failure: %s" msgstr "" #: src/hed/libs/crypto/OpenSSL.cpp:71 #, c-format msgid "SSL error: %d - %s:%s:%s" msgstr "" #: src/hed/libs/crypto/OpenSSL.cpp:84 msgid "SSL locks not initialized" msgstr "" #: src/hed/libs/crypto/OpenSSL.cpp:88 #, c-format msgid "wrong SSL lock requested: %i of %i: %i - %s" msgstr "" #: src/hed/libs/crypto/OpenSSL.cpp:111 msgid "Failed to lock arccrypto library in memory" msgstr "" #: src/hed/libs/crypto/OpenSSL.cpp:116 src/hed/libs/crypto/OpenSSL.cpp:130 msgid "Failed to initialize OpenSSL library" msgstr "" #: src/hed/libs/crypto/OpenSSL.cpp:152 msgid "Number of OpenSSL locks changed - reinitializing" msgstr "" #: src/hed/libs/data/DataExternalHelper.cpp:157 msgid "failed to read data tag" msgstr "" #: src/hed/libs/data/DataExternalHelper.cpp:161 msgid "waiting for data chunk" msgstr "" #: src/hed/libs/data/DataExternalHelper.cpp:163 msgid "failed to read data chunk" msgstr "" #: src/hed/libs/data/DataExternalHelper.cpp:171 #, c-format msgid "data chunk: %llu %llu" msgstr "" #: src/hed/libs/data/DataExternalHelper.cpp:242 #, c-format msgid "DataMove::Transfer: using supplied checksum %s" msgstr "" #: src/hed/libs/data/DataExternalHelper.cpp:361 msgid "Expecting Module, Command and URL provided" msgstr "" #: src/hed/libs/data/DataExternalHelper.cpp:368 msgid "Expecting Command module path among arguments" msgstr "" #: src/hed/libs/data/DataExternalHelper.cpp:372 msgid "Expecting Command module name among arguments" msgstr "" #: src/hed/libs/data/DataMover.cpp:115 msgid "No locations found - probably no more physical instances" msgstr "" #: src/hed/libs/data/DataMover.cpp:121 src/hed/libs/data/FileCache.cpp:552 #: src/libs/data-staging/Processor.cpp:443 #: src/libs/data-staging/Processor.cpp:457 #, c-format msgid "Removing %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:134 msgid "This instance was already deleted" msgstr "" #: src/hed/libs/data/DataMover.cpp:140 msgid "Failed to delete physical file" msgstr "" #: src/hed/libs/data/DataMover.cpp:151 #, c-format msgid "Removing metadata in %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:155 msgid "Failed to delete meta-information" msgstr "" #: src/hed/libs/data/DataMover.cpp:169 msgid "Failed to remove all physical instances" msgstr "" #: src/hed/libs/data/DataMover.cpp:173 #, c-format msgid "Removing logical file from metadata %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:176 msgid "Failed to delete logical file" msgstr "" #: src/hed/libs/data/DataMover.cpp:183 msgid "Failed to remove instance" msgstr "" #: src/hed/libs/data/DataMover.cpp:232 msgid "DataMover::Transfer : starting new thread" msgstr "" #: src/hed/libs/data/DataMover.cpp:260 #, c-format msgid "Transfer from %s to %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:262 msgid "Not valid source" msgstr "" #: src/hed/libs/data/DataMover.cpp:267 msgid "Not valid destination" msgstr "" #: src/hed/libs/data/DataMover.cpp:287 src/services/candypond/CandyPond.cpp:304 #, c-format msgid "Couldn't handle certificate: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:296 src/hed/libs/data/DataMover.cpp:590 #: src/libs/data-staging/Processor.cpp:133 #, c-format msgid "File %s is cached (%s) - checking permissions" msgstr "" #: src/hed/libs/data/DataMover.cpp:300 src/hed/libs/data/DataMover.cpp:609 #: src/hed/libs/data/DataMover.cpp:667 src/libs/data-staging/Processor.cpp:152 msgid "Permission checking passed" msgstr "" #: src/hed/libs/data/DataMover.cpp:301 src/hed/libs/data/DataMover.cpp:628 #: src/hed/libs/data/DataMover.cpp:1144 msgid "Linking/copying cached file" msgstr "" #: src/hed/libs/data/DataMover.cpp:325 #, c-format msgid "No locations for source found: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:329 #, c-format msgid "Failed to resolve source: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:341 src/hed/libs/data/DataMover.cpp:409 #, c-format msgid "No locations for destination found: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:346 src/hed/libs/data/DataMover.cpp:413 #, c-format msgid "Failed to resolve destination: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:361 #, c-format msgid "No locations for destination different from source found: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:382 #, c-format msgid "DataMover::Transfer: trying to destroy/overwrite destination: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:393 #, c-format msgid "Failed to delete %s but will still try to copy" msgstr "" #: src/hed/libs/data/DataMover.cpp:396 #, c-format msgid "Failed to delete %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:423 #, c-format msgid "Deleted but still have locations at %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:435 msgid "DataMover: cycle" msgstr "" #: src/hed/libs/data/DataMover.cpp:437 msgid "DataMover: no retries requested - exit" msgstr "" #: src/hed/libs/data/DataMover.cpp:442 msgid "DataMover: source out of tries - exit" msgstr "" #: src/hed/libs/data/DataMover.cpp:444 msgid "DataMover: destination out of tries - exit" msgstr "" #: src/hed/libs/data/DataMover.cpp:452 #, c-format msgid "Real transfer from %s to %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:478 #, c-format msgid "Creating buffer: %lli x %i" msgstr "" #: src/hed/libs/data/DataMover.cpp:494 #, c-format msgid "DataMove::Transfer: no checksum calculation for %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:499 #, c-format msgid "DataMove::Transfer: using supplied checksum %s:%s" msgstr "" #: src/hed/libs/data/DataMover.cpp:523 #, c-format msgid "DataMove::Transfer: will calculate %s checksum" msgstr "" #: src/hed/libs/data/DataMover.cpp:528 msgid "Buffer creation failed !" msgstr "" #: src/hed/libs/data/DataMover.cpp:551 #, c-format msgid "URL is mapped to: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:579 src/hed/libs/data/DataMover.cpp:637 #: src/libs/data-staging/Processor.cpp:88 msgid "Cached file is locked - should retry" msgstr "" #: src/hed/libs/data/DataMover.cpp:584 src/libs/data-staging/Processor.cpp:106 msgid "Failed to initiate cache" msgstr "" #: src/hed/libs/data/DataMover.cpp:601 src/services/candypond/CandyPond.cpp:379 #, c-format msgid "Permission checking failed: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:603 src/hed/libs/data/DataMover.cpp:661 #: src/hed/libs/data/DataMover.cpp:681 src/hed/libs/data/DataMover.cpp:692 msgid "source.next_location" msgstr "" #: src/hed/libs/data/DataMover.cpp:617 src/libs/data-staging/Processor.cpp:157 #, c-format msgid "Source modification date: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:618 src/libs/data-staging/Processor.cpp:158 #, c-format msgid "Cache creation date: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:624 src/libs/data-staging/Processor.cpp:163 msgid "Cached file is outdated, will re-download" msgstr "" #: src/hed/libs/data/DataMover.cpp:627 src/libs/data-staging/Processor.cpp:168 msgid "Cached copy is still valid" msgstr "" #: src/hed/libs/data/DataMover.cpp:654 msgid "URL is mapped to local access - checking permissions on original URL" msgstr "" #: src/hed/libs/data/DataMover.cpp:658 #, c-format msgid "Permission checking on original URL failed: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:669 msgid "Linking local file" msgstr "" #: src/hed/libs/data/DataMover.cpp:689 #, c-format msgid "Failed to make symbolic link %s to %s : %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:698 #, c-format msgid "Failed to change owner of symbolic link %s to %i" msgstr "" #: src/hed/libs/data/DataMover.cpp:709 #, c-format msgid "cache file: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:735 #, c-format msgid "Failed to stat source %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:737 src/hed/libs/data/DataMover.cpp:750 #: src/hed/libs/data/DataMover.cpp:781 src/hed/libs/data/DataMover.cpp:800 #: src/hed/libs/data/DataMover.cpp:822 src/hed/libs/data/DataMover.cpp:839 #: src/hed/libs/data/DataMover.cpp:996 src/hed/libs/data/DataMover.cpp:1028 #: src/hed/libs/data/DataMover.cpp:1038 src/hed/libs/data/DataMover.cpp:1111 msgid "(Re)Trying next source" msgstr "" #: src/hed/libs/data/DataMover.cpp:748 #, c-format msgid "Meta info of source and location do not match for %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:760 #, c-format msgid "" "Replica %s has high latency, but no more sources exist so will use this one" msgstr "" #: src/hed/libs/data/DataMover.cpp:764 #, c-format msgid "Replica %s has high latency, trying next source" msgstr "" #: src/hed/libs/data/DataMover.cpp:776 src/hed/libs/data/DataMover.cpp:796 #: src/libs/data-staging/DataStagingDelivery.cpp:344 #: src/libs/data-staging/DataStagingDelivery.cpp:367 #, c-format msgid "Using internal transfer method of %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:788 src/hed/libs/data/DataMover.cpp:805 #: src/libs/data-staging/DataStagingDelivery.cpp:360 #: src/libs/data-staging/DataStagingDelivery.cpp:381 #, c-format msgid "Internal transfer method is not supported for %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:812 msgid "Using buffered transfer method" msgstr "" #: src/hed/libs/data/DataMover.cpp:816 #, c-format msgid "Failed to prepare source: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:830 #, c-format msgid "Failed to start reading from source: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:849 msgid "Metadata of source and destination are different" msgstr "" #: src/hed/libs/data/DataMover.cpp:868 #, c-format msgid "Failed to preregister destination: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:873 src/hed/libs/data/DataMover.cpp:1135 msgid "destination.next_location" msgstr "" #: src/hed/libs/data/DataMover.cpp:884 #, c-format msgid "Failed to prepare destination: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:891 src/hed/libs/data/DataMover.cpp:914 #: src/hed/libs/data/DataMover.cpp:1132 #, c-format msgid "" "Failed to unregister preregistered lfn. You may need to unregister it " "manually: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:895 src/hed/libs/data/DataMover.cpp:917 #: src/hed/libs/data/DataMover.cpp:1005 src/hed/libs/data/DataMover.cpp:1021 #: src/hed/libs/data/DataMover.cpp:1044 src/hed/libs/data/DataMover.cpp:1089 msgid "(Re)Trying next destination" msgstr "" #: src/hed/libs/data/DataMover.cpp:906 #, c-format msgid "Failed to start writing to destination: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:929 msgid "Failed to start writing to cache" msgstr "" #: src/hed/libs/data/DataMover.cpp:937 src/hed/libs/data/DataMover.cpp:983 #: src/hed/libs/data/DataMover.cpp:1156 msgid "" "Failed to unregister preregistered lfn. You may need to unregister it " "manually" msgstr "" #: src/hed/libs/data/DataMover.cpp:944 msgid "Waiting for buffer" msgstr "" #: src/hed/libs/data/DataMover.cpp:951 #, c-format msgid "Failed updating timestamp on cache lock file %s for file %s: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:956 #, c-format msgid "buffer: read EOF : %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:957 #, c-format msgid "buffer: write EOF: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:958 #, c-format msgid "buffer: error : %s, read: %s, write: %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:959 msgid "Closing read channel" msgstr "" #: src/hed/libs/data/DataMover.cpp:966 msgid "Closing write channel" msgstr "" #: src/hed/libs/data/DataMover.cpp:974 msgid "Failed to complete writing to destination" msgstr "" #: src/hed/libs/data/DataMover.cpp:988 msgid "Transfer cancelled successfully" msgstr "" #: src/hed/libs/data/DataMover.cpp:1033 msgid "Cause of failure unclear - choosing randomly" msgstr "" #: src/hed/libs/data/DataMover.cpp:1076 #, c-format msgid "" "Checksum mismatch between checksum given as meta option (%s:%s) and " "calculated checksum (%s)" msgstr "" #: src/hed/libs/data/DataMover.cpp:1082 msgid "" "Failed to unregister preregistered lfn, You may need to unregister it " "manually" msgstr "" #: src/hed/libs/data/DataMover.cpp:1086 msgid "Failed to delete destination, retry may fail" msgstr "" #: src/hed/libs/data/DataMover.cpp:1096 msgid "Cannot compare empty checksum" msgstr "" #: src/hed/libs/data/DataMover.cpp:1103 #: src/libs/data-staging/DataStagingDelivery.cpp:538 msgid "Checksum type of source and calculated checksum differ, cannot compare" msgstr "" #: src/hed/libs/data/DataMover.cpp:1105 #, c-format msgid "Checksum mismatch between calcuated checksum %s and source checksum %s" msgstr "" #: src/hed/libs/data/DataMover.cpp:1116 #: src/libs/data-staging/DataStagingDelivery.cpp:554 #, c-format msgid "Calculated transfer checksum %s matches source checksum" msgstr "" #: src/hed/libs/data/DataMover.cpp:1122 #: src/libs/data-staging/DataStagingDelivery.cpp:557 msgid "Checksum not computed" msgstr "" #: src/hed/libs/data/DataMover.cpp:1128 #, c-format msgid "Failed to postregister destination %s" msgstr "" #: src/hed/libs/data/DataPoint.cpp:84 #, c-format msgid "Invalid URL option: %s" msgstr "" #: src/hed/libs/data/DataPoint.cpp:263 #, c-format msgid "Skipping invalid URL option %s" msgstr "" #: src/hed/libs/data/DataPoint.cpp:278 msgid "" "Third party transfer was requested but the corresponding plugin could\n" " not be loaded. Is the GFAL plugin installed? If not, please install " "the\n" " packages 'nordugrid-arc-plugins-gfal' and 'gfal2-all'. Depending on\n" " your type of installation the package names might differ." msgstr "" #: src/hed/libs/data/DataPoint.cpp:296 #, c-format msgid "Failed to load plugin for URL %s" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:75 #: src/hed/libs/data/DataPointDelegate.cpp:76 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:2011 #, c-format msgid "Starting helper process: %s" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:180 msgid "start_reading" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:189 msgid "start_reading: helper start failed" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:197 msgid "start_reading: thread create failed" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:213 msgid "StopReading: aborting connection" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:218 msgid "stop_reading: waiting for transfer to finish" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:221 #, c-format msgid "stop_reading: exiting: %s" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:231 msgid "read_thread: get and register buffers" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:239 #, c-format msgid "read_thread: for_read failed - aborting: %s" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:247 #, c-format msgid "read_thread: non-data tag '%c' from external process - leaving: %s" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:256 #, c-format msgid "read_thread: data read error from external process - aborting: %s" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:264 msgid "read_thread: exiting" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:285 msgid "start_writing_ftp: helper start failed" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:293 msgid "start_writing_ftp: thread create failed" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:343 msgid "No checksum information possible" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:359 msgid "write_thread: get and pass buffers" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:366 msgid "write_thread: for_write failed - aborting" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:370 msgid "write_thread: for_write eof" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:384 msgid "write_thread: out failed - aborting" msgstr "" #: src/hed/libs/data/DataPointDelegate.cpp:392 msgid "write_thread: exiting" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:91 #, c-format msgid "Can't handle location %s" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:183 msgid "Sorting replicas according to URL map" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:187 #, c-format msgid "Replica %s is mapped" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:195 #, c-format msgid "Sorting replicas according to preferred pattern %s" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:218 #: src/hed/libs/data/DataPointIndex.cpp:236 #, c-format msgid "Excluding replica %s matching pattern !%s" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:229 #, c-format msgid "Replica %s matches host pattern %s" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:247 #, c-format msgid "Replica %s matches pattern %s" msgstr "" #: src/hed/libs/data/DataPointIndex.cpp:263 #, c-format msgid "Replica %s doesn't match preferred pattern or URL map" msgstr "" #: src/hed/libs/data/DataStatus.cpp:12 msgid "Operation completed successfully" msgstr "" #: src/hed/libs/data/DataStatus.cpp:13 msgid "Source is invalid URL" msgstr "" #: src/hed/libs/data/DataStatus.cpp:14 msgid "Destination is invalid URL" msgstr "" #: src/hed/libs/data/DataStatus.cpp:15 msgid "Resolving of index service for source failed" msgstr "" #: src/hed/libs/data/DataStatus.cpp:16 msgid "Resolving of index service for destination failed" msgstr "" #: src/hed/libs/data/DataStatus.cpp:17 msgid "Can't read from source" msgstr "" #: src/hed/libs/data/DataStatus.cpp:18 msgid "Can't write to destination" msgstr "" #: src/hed/libs/data/DataStatus.cpp:19 msgid "Failed while reading from source" msgstr "" #: src/hed/libs/data/DataStatus.cpp:20 msgid "Failed while writing to destination" msgstr "" #: src/hed/libs/data/DataStatus.cpp:21 msgid "Failed while transferring data" msgstr "" #: src/hed/libs/data/DataStatus.cpp:22 msgid "Failed while finishing reading from source" msgstr "" #: src/hed/libs/data/DataStatus.cpp:23 msgid "Failed while finishing writing to destination" msgstr "" #: src/hed/libs/data/DataStatus.cpp:24 msgid "First stage of registration to index service failed" msgstr "" #: src/hed/libs/data/DataStatus.cpp:25 msgid "Last stage of registration to index service failed" msgstr "" #: src/hed/libs/data/DataStatus.cpp:26 msgid "Unregistering from index service failed" msgstr "" #: src/hed/libs/data/DataStatus.cpp:27 msgid "Error in caching procedure" msgstr "" #: src/hed/libs/data/DataStatus.cpp:28 msgid "Error due to expiration of provided credentials" msgstr "" #: src/hed/libs/data/DataStatus.cpp:29 msgid "Delete error" msgstr "" #: src/hed/libs/data/DataStatus.cpp:30 msgid "No valid location available" msgstr "" #: src/hed/libs/data/DataStatus.cpp:31 msgid "Location already exists" msgstr "" #: src/hed/libs/data/DataStatus.cpp:32 msgid "Operation not supported for this kind of URL" msgstr "" #: src/hed/libs/data/DataStatus.cpp:33 msgid "Feature is not implemented" msgstr "" #: src/hed/libs/data/DataStatus.cpp:34 msgid "Already reading from source" msgstr "" #: src/hed/libs/data/DataStatus.cpp:35 msgid "Already writing to destination" msgstr "" #: src/hed/libs/data/DataStatus.cpp:36 msgid "Read access check failed" msgstr "" #: src/hed/libs/data/DataStatus.cpp:37 msgid "Directory listing failed" msgstr "" #: src/hed/libs/data/DataStatus.cpp:38 msgid "Object is not suitable for listing" msgstr "" #: src/hed/libs/data/DataStatus.cpp:39 msgid "Failed to obtain information about file" msgstr "" #: src/hed/libs/data/DataStatus.cpp:40 msgid "No such file or directory" msgstr "" #: src/hed/libs/data/DataStatus.cpp:41 msgid "Object not initialized (internal error)" msgstr "" #: src/hed/libs/data/DataStatus.cpp:42 msgid "Operating System error" msgstr "" #: src/hed/libs/data/DataStatus.cpp:43 msgid "Failed to stage file(s)" msgstr "" #: src/hed/libs/data/DataStatus.cpp:44 msgid "Inconsistent metadata" msgstr "" #: src/hed/libs/data/DataStatus.cpp:45 msgid "Failed to prepare source" msgstr "" #: src/hed/libs/data/DataStatus.cpp:46 msgid "Should wait for source to be prepared" msgstr "" #: src/hed/libs/data/DataStatus.cpp:47 msgid "Failed to prepare destination" msgstr "" #: src/hed/libs/data/DataStatus.cpp:48 msgid "Should wait for destination to be prepared" msgstr "" #: src/hed/libs/data/DataStatus.cpp:49 msgid "Failed to finalize reading from source" msgstr "" #: src/hed/libs/data/DataStatus.cpp:50 msgid "Failed to finalize writing to destination" msgstr "" #: src/hed/libs/data/DataStatus.cpp:51 msgid "Failed to create directory" msgstr "" #: src/hed/libs/data/DataStatus.cpp:52 msgid "Failed to rename URL" msgstr "" #: src/hed/libs/data/DataStatus.cpp:53 msgid "Data was already cached" msgstr "" #: src/hed/libs/data/DataStatus.cpp:54 msgid "Operation cancelled successfully" msgstr "" #: src/hed/libs/data/DataStatus.cpp:55 msgid "Generic error" msgstr "" #: src/hed/libs/data/DataStatus.cpp:56 src/hed/libs/data/DataStatus.cpp:69 msgid "Unknown error" msgstr "" #: src/hed/libs/data/DataStatus.cpp:60 msgid "No error" msgstr "" #: src/hed/libs/data/DataStatus.cpp:61 msgid "Transfer timed out" msgstr "" #: src/hed/libs/data/DataStatus.cpp:62 msgid "Checksum mismatch" msgstr "" #: src/hed/libs/data/DataStatus.cpp:63 msgid "Bad logic" msgstr "" #: src/hed/libs/data/DataStatus.cpp:64 msgid "All results obtained are invalid" msgstr "" #: src/hed/libs/data/DataStatus.cpp:65 msgid "Temporary service error" msgstr "" #: src/hed/libs/data/DataStatus.cpp:66 msgid "Permanent service error" msgstr "" #: src/hed/libs/data/DataStatus.cpp:67 msgid "Error switching uid" msgstr "" #: src/hed/libs/data/DataStatus.cpp:68 msgid "Request timed out" msgstr "" #: src/hed/libs/data/FileCache.cpp:111 msgid "No cache directory specified" msgstr "" #: src/hed/libs/data/FileCache.cpp:128 msgid "No usable caches" msgstr "" #: src/hed/libs/data/FileCache.cpp:137 msgid "No draining cache directory specified" msgstr "" #: src/hed/libs/data/FileCache.cpp:155 msgid "No read-only cache directory specified" msgstr "" #: src/hed/libs/data/FileCache.cpp:184 #, c-format msgid "Failed to create cache directory for file %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:194 #, c-format msgid "Failed to create any cache directories for %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:201 #, c-format msgid "Failed to change permissions on %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:213 #, c-format msgid "Failed to delete stale cache file %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:216 #, c-format msgid "Failed to release lock on file %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:234 #, c-format msgid "Failed looking up attributes of cached file: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:240 #, c-format msgid "Failed to obtain lock on cache file %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:249 src/hed/libs/data/FileCache.cpp:309 #, c-format msgid "Error removing cache file %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:251 src/hed/libs/data/FileCache.cpp:262 #, c-format msgid "Failed to remove lock on %s. Some manual intervention may be required" msgstr "" #: src/hed/libs/data/FileCache.cpp:281 src/hed/libs/data/FileCache.cpp:315 #, c-format msgid "Failed to unlock file %s: %s. Manual intervention may be required" msgstr "" #: src/hed/libs/data/FileCache.cpp:298 #, c-format msgid "Invalid lock on file %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:304 #, c-format msgid "Failed to remove .meta file %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:369 #, c-format msgid "Cache not found for file %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:379 #, c-format msgid "" "Cache file %s was modified in the last second, sleeping 1 second to avoid " "race condition" msgstr "" #: src/hed/libs/data/FileCache.cpp:384 src/hed/libs/data/FileCache.cpp:689 #, c-format msgid "Cache file %s does not exist" msgstr "" #: src/hed/libs/data/FileCache.cpp:389 src/hed/libs/data/FileCache.cpp:691 #, c-format msgid "Error accessing cache file %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:395 #, c-format msgid "Cannot create directory %s for per-job hard links" msgstr "" #: src/hed/libs/data/FileCache.cpp:400 #, c-format msgid "Cannot change permission of %s: %s " msgstr "" #: src/hed/libs/data/FileCache.cpp:404 #, c-format msgid "Cannot change owner of %s: %s " msgstr "" #: src/hed/libs/data/FileCache.cpp:418 #, c-format msgid "Failed to remove existing hard link at %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:422 src/hed/libs/data/FileCache.cpp:433 #, c-format msgid "Failed to create hard link from %s to %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:428 #, c-format msgid "Cache file %s not found" msgstr "" #: src/hed/libs/data/FileCache.cpp:443 #, c-format msgid "Failed to change permissions or set owner of hard link %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:451 #, c-format msgid "Failed to release lock on cache file %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:462 #, c-format msgid "Cache file %s was locked during link/copy, must start again" msgstr "" #: src/hed/libs/data/FileCache.cpp:467 #, c-format msgid "Cache file %s was deleted during link/copy, must start again" msgstr "" #: src/hed/libs/data/FileCache.cpp:472 #, c-format msgid "Cache file %s was modified while linking, must start again" msgstr "" #: src/hed/libs/data/FileCache.cpp:490 #, c-format msgid "Failed to copy file %s to %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:496 #, c-format msgid "Failed to set executable bit on file %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:501 #, c-format msgid "Failed to set executable bit on file %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:515 #, c-format msgid "Failed to remove existing symbolic link at %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:519 src/hed/libs/data/FileCache.cpp:524 #, c-format msgid "Failed to create symbolic link from %s to %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:554 #, c-format msgid "Failed to remove cache per-job dir %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:573 src/hed/libs/data/FileCache.cpp:641 #, c-format msgid "Error reading meta file %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:578 src/hed/libs/data/FileCache.cpp:646 #, c-format msgid "Error opening meta file %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:583 src/hed/libs/data/FileCache.cpp:650 #, c-format msgid "meta file %s is empty" msgstr "" #: src/hed/libs/data/FileCache.cpp:593 #, c-format msgid "" "File %s is already cached at %s under a different URL: %s - will not add DN " "to cached list" msgstr "" #: src/hed/libs/data/FileCache.cpp:604 #, c-format msgid "Bad format detected in file %s, in line %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:620 #, c-format msgid "Could not acquire lock on meta file %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:624 #, c-format msgid "Error opening meta file for writing %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:660 #, c-format msgid "DN %s is cached and is valid until %s for URL %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:664 #, c-format msgid "DN %s is cached but has expired for URL %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:715 #, c-format msgid "Failed to acquire lock on cache meta file %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:720 #, c-format msgid "Failed to create cache meta file %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:735 #, c-format msgid "Failed to read cache meta file %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:740 #, c-format msgid "Cache meta file %s is empty, will recreate" msgstr "" #: src/hed/libs/data/FileCache.cpp:745 #, c-format msgid "Cache meta file %s possibly corrupted, will recreate" msgstr "" #: src/hed/libs/data/FileCache.cpp:749 #, c-format msgid "" "File %s is already cached at %s under a different URL: %s - this file will " "not be cached" msgstr "" #: src/hed/libs/data/FileCache.cpp:759 #, c-format msgid "Error looking up attributes of cache meta file %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:830 #, c-format msgid "Using cache %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:844 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:79 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:112 #, c-format msgid "Error getting info from statvfs for the path %s: %s" msgstr "" #: src/hed/libs/data/FileCache.cpp:850 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:118 #, c-format msgid "Cache %s: Free space %f GB" msgstr "" #: src/hed/libs/data/URLMap.cpp:33 #, c-format msgid "Can't use URL %s" msgstr "" #: src/hed/libs/data/URLMap.cpp:39 #, c-format msgid "file %s is not accessible" msgstr "" #: src/hed/libs/data/URLMap.cpp:49 #, c-format msgid "Mapping %s to %s" msgstr "" #: src/hed/libs/data/examples/simple_copy.cpp:17 msgid "Usage: copy source destination" msgstr "" #: src/hed/libs/data/examples/simple_copy.cpp:42 #, c-format msgid "Copy failed: %s" msgstr "" #: src/hed/libs/globusutils/GSSCredential.cpp:41 #, c-format msgid "Failed to read proxy file: %s" msgstr "" #: src/hed/libs/globusutils/GSSCredential.cpp:49 #, c-format msgid "Failed to read certificate file: %s" msgstr "" #: src/hed/libs/globusutils/GSSCredential.cpp:56 #, c-format msgid "Failed to read private key file: %s" msgstr "" #: src/hed/libs/globusutils/GSSCredential.cpp:82 #, c-format msgid "" "Failed to convert GSI credential to GSS credential (major: %d, minor: %d):%s:" "%s" msgstr "" #: src/hed/libs/globusutils/GSSCredential.cpp:94 #, c-format msgid "Failed to release GSS credential (major: %d, minor: %d):%s:%s" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:28 msgid "Module Manager Init" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:71 msgid "" "Busy plugins found while unloading Module Manager. Waiting for them to be " "released." msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:205 #, c-format msgid "Found %s in cache" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:212 #, c-format msgid "Could not locate module %s in following paths:" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:216 #, c-format msgid "\t%s" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:230 #, c-format msgid "Loaded %s" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:274 msgid "Module Manager Init by ModuleManager::setCfg" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:310 #: src/hed/libs/loader/ModuleManager.cpp:323 #, c-format msgid "%s made persistent" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:314 #, c-format msgid "Not found %s in cache" msgstr "" #: src/hed/libs/loader/ModuleManager.cpp:328 msgid "Specified module not found in cache" msgstr "" #: src/hed/libs/loader/Plugin.cpp:364 src/hed/libs/loader/Plugin.cpp:557 #, c-format msgid "Could not find loadable module descriptor by name %s" msgstr "" #: src/hed/libs/loader/Plugin.cpp:372 src/hed/libs/loader/Plugin.cpp:567 #, c-format msgid "Could not find loadable module by name %s (%s)" msgstr "" #: src/hed/libs/loader/Plugin.cpp:378 src/hed/libs/loader/Plugin.cpp:480 #: src/hed/libs/loader/Plugin.cpp:572 #, c-format msgid "Module %s is not an ARC plugin (%s)" msgstr "" #: src/hed/libs/loader/Plugin.cpp:395 src/hed/libs/loader/Plugin.cpp:490 #: src/hed/libs/loader/Plugin.cpp:598 #, c-format msgid "Module %s failed to reload (%s)" msgstr "" #: src/hed/libs/loader/Plugin.cpp:417 #, c-format msgid "Module %s contains no plugin %s" msgstr "" #: src/hed/libs/loader/Plugin.cpp:462 #, c-format msgid "Could not find loadable module descriptor by name %s or kind %s" msgstr "" #: src/hed/libs/loader/Plugin.cpp:467 #, c-format msgid "Loadable module %s contains no requested plugin %s of kind %s" msgstr "" #: src/hed/libs/loader/Plugin.cpp:474 #, c-format msgid "Could not find loadable module by names %s and %s (%s)" msgstr "" #: src/hed/libs/loader/Plugin.cpp:503 #, c-format msgid "Module %s contains no requested plugin %s of kind %s" msgstr "" #: src/hed/libs/loader/Plugin.cpp:588 #, c-format msgid "Module %s does not contain plugin(s) of specified kind(s)" msgstr "" #: src/hed/libs/message/MCC.cpp:76 src/hed/libs/message/Service.cpp:25 #, c-format msgid "No security processing/check requested for '%s'" msgstr "" #: src/hed/libs/message/MCC.cpp:85 #, c-format msgid "Security processing/check failed: %s" msgstr "" #: src/hed/libs/message/MCC.cpp:90 msgid "Security processing/check passed" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:16 msgid "Chain(s) configuration failed" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:133 msgid "SecHandler configuration is not defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:156 msgid "SecHandler has no configuration" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:162 msgid "SecHandler has no name attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:172 #, c-format msgid "Security Handler %s(%s) could not be created" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:176 #, c-format msgid "SecHandler: %s(%s)" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:188 msgid "Component has no name attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:193 msgid "Component has no ID attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:202 #, c-format msgid "Component %s(%s) could not be created" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:232 #, c-format msgid "Component's %s(%s) next has no ID attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:287 #, c-format msgid "Loaded MCC %s(%s)" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:305 #, c-format msgid "Plexer's (%s) next has no ID attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:315 #, c-format msgid "Loaded Plexer %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:323 msgid "Service has no Name attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:329 msgid "Service has no ID attribute defined" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:338 #, c-format msgid "Service %s(%s) could not be created" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:345 #, c-format msgid "Loaded Service %s(%s)" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:387 #, c-format msgid "Linking MCC %s(%s) to MCC (%s) under %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:398 #, c-format msgid "Linking MCC %s(%s) to Service (%s) under %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:407 #, c-format msgid "Linking MCC %s(%s) to Plexer (%s) under %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:412 #, c-format msgid "MCC %s(%s) - next %s(%s) has no target" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:431 #, c-format msgid "Linking Plexer %s to MCC (%s) under %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:442 #, c-format msgid "Linking Plexer %s to Service (%s) under %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:451 #, c-format msgid "Linking Plexer %s to Plexer (%s) under %s" msgstr "" #: src/hed/libs/message/MCCLoader.cpp:457 #, c-format msgid "Plexer (%s) - next %s(%s) has no target" msgstr "" #: src/hed/libs/message/Plexer.cpp:31 #, c-format msgid "Bad label: \"%s\"" msgstr "" #: src/hed/libs/message/Plexer.cpp:47 #, c-format msgid "Operation on path \"%s\"" msgstr "" #: src/hed/libs/message/Plexer.cpp:60 #, c-format msgid "No next MCC or Service at path \"%s\"" msgstr "" #: src/hed/libs/message/Service.cpp:35 #, c-format msgid "Security processing/check for '%s' failed: %s" msgstr "" #: src/hed/libs/message/Service.cpp:41 #, c-format msgid "Security processing/check for '%s' passed" msgstr "" #: src/hed/libs/otokens/jwse.cpp:55 #, c-format msgid "JWSE::Input: token: %s" msgstr "" #: src/hed/libs/otokens/jwse.cpp:75 #, c-format msgid "JWSE::Input: header: %s" msgstr "" #: src/hed/libs/otokens/jwse.cpp:101 #, c-format msgid "JWSE::Input: JWS content: %s" msgstr "" #: src/hed/libs/otokens/jwse.cpp:111 msgid "JWSE::Input: JWS: token too young" msgstr "" #: src/hed/libs/otokens/jwse.cpp:120 msgid "JWSE::Input: JWS: token too old" msgstr "" #: src/hed/libs/otokens/jwse.cpp:131 #, c-format msgid "JWSE::Input: JWS: signature algorithm: %s" msgstr "" #: src/hed/libs/otokens/jwse.cpp:190 msgid "JWSE::Input: JWS: signature verification failed" msgstr "" #: src/hed/libs/otokens/jwse.cpp:196 msgid "JWSE::Input: JWE: not supported yet" msgstr "" #: src/hed/libs/otokens/jwse_keys.cpp:271 msgid "JWSE::ExtractPublicKey: x5c key" msgstr "" #: src/hed/libs/otokens/jwse_keys.cpp:279 msgid "JWSE::ExtractPublicKey: jwk key" msgstr "" #: src/hed/libs/otokens/jwse_keys.cpp:286 msgid "JWSE::ExtractPublicKey: external jwk key" msgstr "" #: src/hed/libs/otokens/jwse_keys.cpp:303 #, c-format msgid "JWSE::ExtractPublicKey: fetching jwl key from %s" msgstr "" #: src/hed/libs/otokens/jwse_keys.cpp:316 msgid "JWSE::ExtractPublicKey: no supported key" msgstr "" #: src/hed/libs/otokens/jwse_keys.cpp:319 msgid "JWSE::ExtractPublicKey: key parsing error" msgstr "" #: src/hed/libs/otokens/openid_metadata.cpp:40 #: src/hed/libs/otokens/openid_metadata.cpp:45 #, c-format msgid "Input: metadata: %s" msgstr "" #: src/hed/libs/otokens/openid_metadata.cpp:414 #, c-format msgid "Fetch: response code: %u %s" msgstr "" #: src/hed/libs/otokens/openid_metadata.cpp:416 #, c-format msgid "Fetch: response body: %s" msgstr "" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:136 #, c-format msgid "Can not load ARC evaluator object: %s" msgstr "" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:187 #, c-format msgid "Can not load ARC request object: %s" msgstr "" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:228 #, c-format msgid "Can not load policy object: %s" msgstr "" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:276 msgid "Can not load policy object" msgstr "" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:324 msgid "Can not load request object" msgstr "" #: src/hed/libs/security/ArcPDP/PolicyParser.cpp:119 msgid "Can not generate policy object" msgstr "" #: src/hed/libs/security/ArcPDP/attr/RequestAttribute.cpp:37 #, c-format msgid "Id= %s,Type= %s,Issuer= %s,Value= %s" msgstr "" #: src/hed/libs/security/ArcPDP/attr/RequestAttribute.cpp:40 #, c-format msgid "No Attribute exists, which can deal with type: %s" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:168 #, c-format msgid "HTTP Error: %d %s" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:241 msgid "Cannot create http payload" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:311 msgid "No next element in the chain" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:320 #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:253 msgid "next element of the chain returned error status" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:329 msgid "next element of the chain returned no payload" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:341 msgid "next element of the chain returned invalid/unsupported payload" msgstr "" #: src/hed/mcc/http/MCCHTTP.cpp:423 msgid "Error to flush output payload" msgstr "" #: src/hed/mcc/http/PayloadHTTP.cpp:305 #, c-format msgid "<< %s" msgstr "" #: src/hed/mcc/http/PayloadHTTP.cpp:354 src/hed/mcc/http/PayloadHTTP.cpp:456 #, c-format msgid "< %s" msgstr "" #: src/hed/mcc/http/PayloadHTTP.cpp:575 msgid "Failed to parse HTTP header" msgstr "" #: src/hed/mcc/http/PayloadHTTP.cpp:836 msgid "Invalid HTTP object can't produce result" msgstr "" #: src/hed/mcc/http/PayloadHTTP.cpp:949 #, c-format msgid "> %s" msgstr "" #: src/hed/mcc/http/PayloadHTTP.cpp:974 msgid "Failed to write header to output stream" msgstr "" #: src/hed/mcc/http/PayloadHTTP.cpp:999 src/hed/mcc/http/PayloadHTTP.cpp:1005 #: src/hed/mcc/http/PayloadHTTP.cpp:1011 src/hed/mcc/http/PayloadHTTP.cpp:1021 #: src/hed/mcc/http/PayloadHTTP.cpp:1033 src/hed/mcc/http/PayloadHTTP.cpp:1038 #: src/hed/mcc/http/PayloadHTTP.cpp:1043 src/hed/mcc/http/PayloadHTTP.cpp:1051 #: src/hed/mcc/http/PayloadHTTP.cpp:1058 msgid "Failed to write body to output stream" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:31 msgid "Skipping service: no ServicePath found!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:37 msgid "Skipping service: no SchemaPath found!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:89 msgid "Parser Context creation failed!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:98 msgid "Cannot parse schema!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:110 msgid "Empty payload!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:119 msgid "Could not convert payload!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:125 #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:212 msgid "Could not create PayloadSOAP!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:196 msgid "Empty input payload!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:205 msgid "Could not convert incoming payload!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:232 msgid "Missing schema! Skipping validation..." msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:237 msgid "Could not validate message!" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:245 #: src/hed/mcc/soap/MCCSOAP.cpp:222 src/hed/mcc/soap/MCCSOAP.cpp:236 #: src/hed/mcc/soap/MCCSOAP.cpp:266 msgid "empty next chain element" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:257 #: src/hed/mcc/soap/MCCSOAP.cpp:282 msgid "next element of the chain returned empty payload" msgstr "" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:265 msgid "next element of the chain returned invalid payload" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:207 msgid "empty input payload" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:217 #, c-format msgid "MIME is not suitable for SOAP: %s" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:231 msgid "incoming message is not SOAP" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:258 #, c-format msgid "Security check failed in SOAP MCC for incoming message: %s" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:274 #, c-format msgid "next element of the chain returned error status: %s" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:293 msgid "next element of the chain returned unknown payload - passing through" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:298 src/hed/mcc/soap/MCCSOAP.cpp:314 #, c-format msgid "Security check failed in SOAP MCC for outgoing message: %s" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:368 msgid "Security check failed in SOAP MCC for outgoing message" msgstr "" #: src/hed/mcc/soap/MCCSOAP.cpp:421 msgid "Security check failed in SOAP MCC for incoming message" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:82 msgid "Missing Port in Listen element" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:91 msgid "Version in Listen element can't be recognized" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:100 #, c-format msgid "Failed to obtain local address for port %s - %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:102 #, c-format msgid "Failed to obtain local address for %s:%s - %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:109 #, c-format msgid "Trying to listen on TCP port %s(%s)" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:111 #, c-format msgid "Trying to listen on %s:%s(%s)" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:117 #, c-format msgid "Failed to create socket for listening at TCP port %s(%s): %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:119 #, c-format msgid "Failed to create socket for listening at %s:%s(%s): %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:134 #, c-format msgid "" "Failed to limit socket to IPv6 at TCP port %s - may cause errors for IPv4 at " "same port" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:136 #, c-format msgid "" "Failed to limit socket to IPv6 at %s:%s - may cause errors for IPv4 at same " "port" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:144 #, c-format msgid "Failed to bind socket for TCP port %s(%s): %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:146 #, c-format msgid "Failed to bind socket for %s:%s(%s): %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:161 #, c-format msgid "Failed to listen at TCP port %s(%s): %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:163 #, c-format msgid "Failed to listen at %s:%s(%s): %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:180 #, c-format msgid "Listening on TCP port %s(%s)" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:182 #, c-format msgid "Listening on %s:%s(%s)" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:189 #, c-format msgid "Failed to start listening on any address for %s:%s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:191 #, c-format msgid "Failed to start listening on any address for %s:%s(IPv%s)" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:197 msgid "No listening ports initiated" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:208 msgid "dropped" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:208 msgid "put on hold" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:208 #, c-format msgid "Setting connections limit to %i, connections over limit will be %s" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:212 msgid "Failed to start thread for listening" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:245 msgid "Failed to start thread for communication" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:271 msgid "Failed while waiting for connection request" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:293 msgid "Failed to accept connection request" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:302 msgid "Too many connections - dropping new one" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:309 msgid "Too many connections - waiting for old to close" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:533 msgid "next chain element called" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:548 msgid "Only Raw Buffer payload is supported for output" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:556 src/hed/mcc/tcp/MCCTCP.cpp:655 #: src/hed/mcc/tls/MCCTLS.cpp:542 msgid "Failed to send content of buffer" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:568 msgid "TCP executor is removed" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:570 #, c-format msgid "Sockets do not match on exit %i != %i" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:591 msgid "No Connect element specified" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:597 msgid "Missing Port in Connect element" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:603 msgid "Missing Host in Connect element" msgstr "" #: src/hed/mcc/tcp/MCCTCP.cpp:631 msgid "TCP client process called" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:67 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:83 #, c-format msgid "Failed to resolve %s (%s)" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:93 #, c-format msgid "Trying to connect %s(%s):%d" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:97 #, c-format msgid "Failed to create socket for connecting to %s(%s):%d - %s" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:109 #, c-format msgid "" "Failed to get TCP socket options for connection to %s(%s):%d - timeout won't " "work - %s" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:116 #, c-format msgid "Failed to connect to %s(%s):%i - %s" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:126 #, c-format msgid "Timeout connecting to %s(%s):%i - %i s" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:134 #, c-format msgid "Failed while waiting for connection to %s(%s):%i - %s" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:144 #, c-format msgid "Failed to connect to %s(%s):%i" msgstr "" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:200 msgid "" "Received message out-of-band (not critical, ERROR level is just for " "debugging purposes)" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:293 #, c-format msgid "Using DH parameters from file: %s" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:296 msgid "Failed to open file with DH parameters for reading" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:301 msgid "Failed to read file with DH parameters" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:304 msgid "Failed to apply DH parameters" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:306 msgid "DH parameters applied" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:321 #, c-format msgid "Using curve with NID: %u" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:324 msgid "Failed to generate EC key" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:327 msgid "Failed to apply ECDH parameters" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:329 msgid "ECDH parameters applied" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:336 #, c-format msgid "Using cipher list: %s" msgstr "" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:352 #, c-format msgid "Using protocol options: 0x%x" msgstr "" #: src/hed/mcc/tls/DelegationCollector.cpp:39 msgid "Independent proxy - no rights granted" msgstr "" #: src/hed/mcc/tls/DelegationCollector.cpp:43 msgid "Proxy with all rights inherited" msgstr "" #: src/hed/mcc/tls/DelegationCollector.cpp:51 msgid "Proxy with empty policy - fail on unrecognized policy" msgstr "" #: src/hed/mcc/tls/DelegationCollector.cpp:56 #, c-format msgid "Proxy with specific policy: %s" msgstr "" #: src/hed/mcc/tls/DelegationCollector.cpp:60 msgid "Proxy with ARC Policy" msgstr "" #: src/hed/mcc/tls/DelegationCollector.cpp:62 msgid "Proxy with unknown policy - fail on unrecognized policy" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:77 #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:128 #, c-format msgid "Was expecting %s at the beginning of \"%s\"" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:82 #, c-format msgid "We only support CAs in Globus signing policy - %s is not supported" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:87 #, c-format msgid "We only support X509 CAs in Globus signing policy - %s is not supported" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:92 msgid "Missing CA subject in Globus signing policy" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:102 msgid "Negative rights are not supported in Globus signing policy" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:106 #, c-format msgid "Unknown rights in Globus signing policy - %s" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:111 #, c-format msgid "" "Only globus rights are supported in Globus signing policy - %s is not " "supported" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:116 #, c-format msgid "" "Only signing rights are supported in Globus signing policy - %s is not " "supported" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:133 #, c-format msgid "" "We only support subjects conditions in Globus signing policy - %s is not " "supported" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:138 #, c-format msgid "" "We only support globus conditions in Globus signing policy - %s is not " "supported" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:144 msgid "Missing condition subjects in Globus signing policy" msgstr "" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:220 msgid "Unknown element in Globus signing policy" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:218 msgid "Critical VOMS attribute processing failed" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:226 msgid "VOMS attribute validation failed" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:228 msgid "VOMS attribute is ignored due to processing/validation error" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:420 src/hed/mcc/tls/MCCTLS.cpp:559 #: src/hed/mcc/tls/MCCTLS.cpp:578 #, c-format msgid "Failed to establish connection: %s" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:439 src/hed/mcc/tls/MCCTLS.cpp:521 #, c-format msgid "Peer name: %s" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:441 src/hed/mcc/tls/MCCTLS.cpp:523 #, c-format msgid "Identity name: %s" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:443 src/hed/mcc/tls/MCCTLS.cpp:525 #, c-format msgid "CA name: %s" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:450 msgid "Failed to process security attributes in TLS MCC for incoming message" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:458 msgid "Security check failed in TLS MCC for incoming message" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:531 msgid "Security check failed for outgoing TLS message" msgstr "" #: src/hed/mcc/tls/MCCTLS.cpp:563 msgid "Security check failed for incoming TLS message" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:93 msgid "" "Failed to allocate memory for certificate subject while matching policy." msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:98 msgid "" "Failed to retrieve link to TLS stream. Additional policy matching is skipped." msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:128 #, c-format msgid "Certificate %s already expired" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:136 #, c-format msgid "Certificate %s will expire in %s" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:157 msgid "Failed to store application data" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:185 msgid "Failed to retrieve application data from OpenSSL" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:257 src/hed/mcc/tls/PayloadTLSMCC.cpp:356 msgid "Can not create the SSL Context object" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:270 src/hed/mcc/tls/PayloadTLSMCC.cpp:376 msgid "Can't set OpenSSL verify flags" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:289 src/hed/mcc/tls/PayloadTLSMCC.cpp:390 msgid "Can not create the SSL object" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:299 msgid "Faile to assign hostname extension" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:313 msgid "Failed to establish SSL connection" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:316 src/hed/mcc/tls/PayloadTLSMCC.cpp:405 #, c-format msgid "Using cipher: %s" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:402 msgid "Failed to accept SSL connection" msgstr "" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:455 #, c-format msgid "Failed to shut down SSL: %s" msgstr "" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:47 msgid "" "ArcAuthZ: failed to initiate all PDPs - this instance will be non-functional" msgstr "" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:70 msgid "PDP: missing name attribute" msgstr "" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:74 #, c-format msgid "PDP: %s (%s)" msgstr "" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:79 #, c-format msgid "PDP: %s (%s) can not be loaded" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluationCtx.cpp:251 #, c-format msgid "There are %d RequestItems" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:60 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:55 msgid "Can not parse classname for FunctionFactory from configuration" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:68 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:63 msgid "Can not parse classname for AttributeFactory from configuration" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:76 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:71 msgid "" "Can not parse classname for CombiningAlgorithmFactory from configuration" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:84 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:79 msgid "Can not parse classname for Request from configuration" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:93 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:88 msgid "Can not parse classname for Policy from configuration" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:105 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:100 msgid "Can not dynamically produce AttributeFactory" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:110 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:105 msgid "Can not dynamically produce FnFactory" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:115 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:110 msgid "Can not dynamically produce AlgFacroty" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:126 #: src/hed/shc/gaclpdp/GACLEvaluator.cpp:31 #: src/hed/shc/gaclpdp/GACLEvaluator.cpp:37 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:121 msgid "Can not create PolicyStore object" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:177 src/hed/shc/test.cpp:183 #: src/hed/shc/testinterface_arc.cpp:102 src/hed/shc/testinterface_xacml.cpp:54 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:172 msgid "Can not dynamically produce Request" msgstr "" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:261 #, c-format msgid "Result value (0=Permit, 1=Deny, 2=Indeterminate, 3=Not_Applicable): %d" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:110 msgid "Can not find ArcPDPContext" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:139 src/hed/shc/xacmlpdp/XACMLPDP.cpp:117 msgid "Evaluator does not support loadable Combining Algorithms" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:143 src/hed/shc/xacmlpdp/XACMLPDP.cpp:121 #, c-format msgid "Evaluator does not support specified Combining Algorithm - %s" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:155 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:84 #: src/hed/shc/gaclpdp/GACLPDP.cpp:118 src/hed/shc/test.cpp:94 #: src/hed/shc/testinterface_arc.cpp:37 src/hed/shc/testinterface_xacml.cpp:37 #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:133 msgid "Can not dynamically produce Evaluator" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:158 msgid "Evaluator for ArcPDP was not loaded" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:165 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:57 #: src/hed/shc/gaclpdp/GACLPDP.cpp:128 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:87 #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:143 src/tests/echo/echo.cpp:108 msgid "Missing security object in message" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:173 src/hed/shc/arcpdp/ArcPDP.cpp:181 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:137 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:143 #: src/hed/shc/gaclpdp/GACLPDP.cpp:136 src/hed/shc/gaclpdp/GACLPDP.cpp:144 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:95 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:103 #: src/tests/echo/echo.cpp:116 src/tests/echo/echo.cpp:123 msgid "Failed to convert security information to ARC request" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:189 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:150 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:111 #, c-format msgid "ARC Auth. request: %s" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:192 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:153 #: src/hed/shc/gaclpdp/GACLPDP.cpp:155 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:114 #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:170 msgid "No requested security information was collected" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:199 msgid "Not authorized by arc.pdp - failed to get response from Evaluator" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:245 msgid "Authorized by arc.pdp" msgstr "" #: src/hed/shc/arcpdp/ArcPDP.cpp:246 msgid "" "Not authorized by arc.pdp - some of the RequestItem elements do not satisfy " "Policy" msgstr "" #: src/hed/shc/arcpdp/ArcPolicy.cpp:56 src/hed/shc/arcpdp/ArcPolicy.cpp:70 #: src/hed/shc/gaclpdp/GACLPolicy.cpp:46 src/hed/shc/gaclpdp/GACLPolicy.cpp:59 #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:48 #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:64 msgid "Policy is empty" msgstr "" #: src/hed/shc/arcpdp/ArcPolicy.cpp:114 #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:115 #, c-format msgid "PolicyId: %s Alg inside this policy is:-- %s" msgstr "" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:75 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:129 msgid "No delegation policies in this context and message - passing through" msgstr "" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:95 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:109 msgid "Failed to convert security information to ARC policy" msgstr "" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:116 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:123 #, c-format msgid "ARC delegation policy: %s" msgstr "" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:161 msgid "No authorization response was returned" msgstr "" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:164 #, c-format msgid "There are %d requests, which satisfy at least one policy" msgstr "" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:183 msgid "Delegation authorization passed" msgstr "" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:185 msgid "Delegation authorization failed" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:63 msgid "" "Missing CertificatePath element or ProxyPath element, or " " is missing" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:68 msgid "" "Missing or empty KeyPath element, or is missing" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:74 msgid "Missing or empty CertificatePath or CACertificatesDir element" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:81 #, c-format msgid "Delegation role not supported: %s" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:90 #, c-format msgid "Delegation type not supported: %s" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:115 msgid "Failed to acquire delegation context" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:143 #: src/hed/shc/delegationsh/DelegationSH.cpp:254 msgid "Can't create delegation context" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:149 msgid "Delegation handler with delegatee role starts to process" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:152 #: src/services/a-rex/arex.cpp:592 src/services/candypond/CandyPond.cpp:526 #: src/services/data-staging/DataDeliveryService.cpp:624 msgid "process: POST" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:159 #: src/services/a-rex/arex.cpp:599 src/services/candypond/CandyPond.cpp:535 #: src/services/data-staging/DataDeliveryService.cpp:633 #: src/services/wrappers/python/pythonwrapper.cpp:416 msgid "input is not SOAP" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:166 #, c-format msgid "Delegation service: %s" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:181 #: src/hed/shc/delegationsh/DelegationSH.cpp:188 #: src/tests/client/test_ClientX509Delegation_ARC.cpp:55 #, c-format msgid "Can not get the delegation credential: %s from delegation service: %s" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:204 #: src/hed/shc/delegationsh/DelegationSH.cpp:268 #, c-format msgid "Delegated credential identity: %s" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:205 #, c-format msgid "" "The delegated credential got from delegation service is stored into path: %s" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:218 msgid "The endpoint of delegation service should be configured" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:228 #: src/hed/shc/delegationsh/DelegationSH.cpp:340 msgid "Delegation handler with delegatee role ends" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:260 msgid "Delegation handler with delegator role starts to process" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:269 #, c-format msgid "The delegated credential got from path: %s" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:290 #, c-format msgid "Can not create delegation crendential to delegation service: %s" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:328 msgid "output is not SOAP" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:339 #, c-format msgid "" "Succeeded to send DelegationService: %s and DelegationID: %s info to peer " "service" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:345 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:220 #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:101 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:94 msgid "Incoming Message is not SOAP" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:352 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:341 #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:123 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:108 msgid "Outgoing Message is not SOAP" msgstr "" #: src/hed/shc/delegationsh/DelegationSH.cpp:356 msgid "Delegation handler is not configured" msgstr "" #: src/hed/shc/gaclpdp/GACLPDP.cpp:121 msgid "Evaluator for GACLPDP was not loaded" msgstr "" #: src/hed/shc/gaclpdp/GACLPDP.cpp:152 #, c-format msgid "GACL Auth. request: %s" msgstr "" #: src/hed/shc/gaclpdp/GACLPolicy.cpp:50 src/hed/shc/gaclpdp/GACLPolicy.cpp:63 msgid "Policy is not gacl" msgstr "" #: src/hed/shc/legacy/ConfigParser.cpp:13 msgid "Configuration file not specified" msgstr "" #: src/hed/shc/legacy/ConfigParser.cpp:18 #: src/hed/shc/legacy/ConfigParser.cpp:28 #: src/hed/shc/legacy/ConfigParser.cpp:33 msgid "Configuration file can not be read" msgstr "" #: src/hed/shc/legacy/ConfigParser.cpp:43 #, c-format msgid "Configuration file is broken - block name is too short: %s" msgstr "" #: src/hed/shc/legacy/ConfigParser.cpp:47 #, c-format msgid "Configuration file is broken - block name does not end with ]: %s" msgstr "" #: src/hed/shc/legacy/LegacyMap.cpp:39 src/hed/shc/legacy/LegacyPDP.cpp:119 msgid "Configuration file not specified in ConfigBlock" msgstr "" #: src/hed/shc/legacy/LegacyMap.cpp:48 src/hed/shc/legacy/LegacyPDP.cpp:128 msgid "BlockName is empty" msgstr "" #: src/hed/shc/legacy/LegacyMap.cpp:108 #, c-format msgid "Failed processing user mapping command: %s %s" msgstr "" #: src/hed/shc/legacy/LegacyMap.cpp:114 #: src/services/gridftpd/fileroot_config.cpp:320 #, c-format msgid "Failed to change mapping stack processing policy in: %s = %s" msgstr "" #: src/hed/shc/legacy/LegacyMap.cpp:174 msgid "LegacyMap: no configurations blocks defined" msgstr "" #: src/hed/shc/legacy/LegacyMap.cpp:196 src/hed/shc/legacy/LegacyPDP.cpp:239 #, c-format msgid "" "LegacyPDP: there is no %s Sec Attribute defined. Probably ARC Legacy Sec " "Handler is not configured or failed." msgstr "" #: src/hed/shc/legacy/LegacyMap.cpp:201 src/hed/shc/legacy/LegacyPDP.cpp:244 msgid "LegacyPDP: ARC Legacy Sec Attribute not recognized." msgstr "" #: src/hed/shc/legacy/LegacyPDP.cpp:138 #, c-format msgid "Failed to parse configuration file %s" msgstr "" #: src/hed/shc/legacy/LegacyPDP.cpp:144 #, c-format msgid "Block %s not found in configuration file %s" msgstr "" #: src/hed/shc/legacy/LegacySecHandler.cpp:40 #: src/hed/shc/legacy/LegacySecHandler.cpp:118 msgid "LegacySecHandler: configuration file not specified" msgstr "" #: src/hed/shc/legacy/arc_lcas.cpp:146 src/hed/shc/legacy/arc_lcmaps.cpp:161 #, c-format msgid "" "Failed to convert GSI credential to GSS credential (major: %d, minor: %d)" msgstr "" #: src/hed/shc/legacy/arc_lcas.cpp:171 src/hed/shc/legacy/arc_lcmaps.cpp:186 msgid "Missing subject name" msgstr "" #: src/hed/shc/legacy/arc_lcas.cpp:176 src/hed/shc/legacy/arc_lcmaps.cpp:191 msgid "Missing path of credentials file" msgstr "" #: src/hed/shc/legacy/arc_lcas.cpp:182 msgid "Missing name of LCAS library" msgstr "" #: src/hed/shc/legacy/arc_lcas.cpp:199 #, c-format msgid "Can't load LCAS library %s: %s" msgstr "" #: src/hed/shc/legacy/arc_lcas.cpp:209 #, c-format msgid "Can't find LCAS functions in a library %s" msgstr "" #: src/hed/shc/legacy/arc_lcas.cpp:219 msgid "Failed to initialize LCAS" msgstr "" #: src/hed/shc/legacy/arc_lcas.cpp:234 msgid "Failed to terminate LCAS" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:197 msgid "Missing name of LCMAPS library" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:211 msgid "Can't read policy names" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:222 #, c-format msgid "Can't load LCMAPS library %s: %s" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:234 #, c-format msgid "Can't find LCMAPS functions in a library %s" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:246 msgid "LCMAPS has lcmaps_run" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:247 msgid "LCMAPS has getCredentialData" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:251 msgid "Failed to initialize LCMAPS" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:291 #, c-format msgid "LCMAPS returned invalid GID: %u" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:294 msgid "LCMAPS did not return any GID" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:297 #, c-format msgid "LCMAPS returned UID which has no username: %u" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:300 #, c-format msgid "LCMAPS returned invalid UID: %u" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:303 msgid "LCMAPS did not return any UID" msgstr "" #: src/hed/shc/legacy/arc_lcmaps.cpp:312 msgid "Failed to terminate LCMAPS" msgstr "" #: src/hed/shc/legacy/auth.cpp:35 src/services/gridftpd/auth/auth.cpp:35 #, c-format msgid "Unexpected argument for 'all' rule - %s" msgstr "" #: src/hed/shc/legacy/auth.cpp:337 #, c-format msgid "Credentials stored in temporary file %s" msgstr "" #: src/hed/shc/legacy/auth.cpp:346 #, c-format msgid "Assigned to authorization group %s" msgstr "" #: src/hed/shc/legacy/auth.cpp:351 #, c-format msgid "Assigned to userlist %s" msgstr "" #: src/hed/shc/legacy/auth_file.cpp:22 #: src/services/gridftpd/auth/auth_file.cpp:22 #, c-format msgid "Failed to read file %s" msgstr "" #: src/hed/shc/legacy/auth_otokens.cpp:30 msgid "Missing subject in configuration" msgstr "" #: src/hed/shc/legacy/auth_otokens.cpp:35 msgid "Missing issuer in configuration" msgstr "" #: src/hed/shc/legacy/auth_otokens.cpp:40 msgid "Missing audience in configuration" msgstr "" #: src/hed/shc/legacy/auth_otokens.cpp:45 msgid "Missing scope in configuration" msgstr "" #: src/hed/shc/legacy/auth_otokens.cpp:50 src/hed/shc/legacy/auth_voms.cpp:47 #: src/services/gridftpd/auth/auth_voms.cpp:51 msgid "Missing group in configuration" msgstr "" #: src/hed/shc/legacy/auth_otokens.cpp:53 #, c-format msgid "Rule: subject: %s" msgstr "" #: src/hed/shc/legacy/auth_otokens.cpp:54 #, c-format msgid "Rule: issuer: %s" msgstr "" #: src/hed/shc/legacy/auth_otokens.cpp:55 #, c-format msgid "Rule: audience: %s" msgstr "" #: src/hed/shc/legacy/auth_otokens.cpp:56 #, c-format msgid "Rule: scope: %s" msgstr "" #: src/hed/shc/legacy/auth_otokens.cpp:57 src/hed/shc/legacy/auth_voms.cpp:66 #: src/services/gridftpd/auth/auth_voms.cpp:68 #, c-format msgid "Rule: group: %s" msgstr "" #: src/hed/shc/legacy/auth_otokens.cpp:60 #, c-format msgid "Match issuer: %s" msgstr "" #: src/hed/shc/legacy/auth_otokens.cpp:66 #, c-format msgid "Matched: %s %s %s" msgstr "" #: src/hed/shc/legacy/auth_otokens.cpp:80 src/hed/shc/legacy/auth_voms.cpp:93 #: src/services/gridftpd/auth/auth_voms.cpp:98 msgid "Matched nothing" msgstr "" #: src/hed/shc/legacy/auth_plugin.cpp:45 src/hed/shc/legacy/unixmap.cpp:215 #: src/services/gridftpd/auth/auth_plugin.cpp:70 #: src/services/gridftpd/auth/unixmap.cpp:214 #, c-format msgid "Plugin %s returned: %u" msgstr "" #: src/hed/shc/legacy/auth_plugin.cpp:49 src/hed/shc/legacy/unixmap.cpp:219 #, c-format msgid "Plugin %s timeout after %u seconds" msgstr "" #: src/hed/shc/legacy/auth_plugin.cpp:52 src/hed/shc/legacy/unixmap.cpp:222 #, c-format msgid "Plugin %s failed to start" msgstr "" #: src/hed/shc/legacy/auth_plugin.cpp:54 src/hed/shc/legacy/unixmap.cpp:224 #, c-format msgid "Plugin %s printed: %s" msgstr "" #: src/hed/shc/legacy/auth_plugin.cpp:55 src/hed/shc/legacy/unixmap.cpp:212 #: src/hed/shc/legacy/unixmap.cpp:225 #, c-format msgid "Plugin %s error: %s" msgstr "" #: src/hed/shc/legacy/auth_voms.cpp:42 #: src/services/gridftpd/auth/auth_voms.cpp:45 msgid "Missing VO in configuration" msgstr "" #: src/hed/shc/legacy/auth_voms.cpp:52 #: src/services/gridftpd/auth/auth_voms.cpp:57 msgid "Missing role in configuration" msgstr "" #: src/hed/shc/legacy/auth_voms.cpp:57 #: src/services/gridftpd/auth/auth_voms.cpp:63 msgid "Missing capabilities in configuration" msgstr "" #: src/hed/shc/legacy/auth_voms.cpp:62 msgid "Too many arguments in configuration" msgstr "" #: src/hed/shc/legacy/auth_voms.cpp:65 #: src/services/gridftpd/auth/auth_voms.cpp:67 #, c-format msgid "Rule: vo: %s" msgstr "" #: src/hed/shc/legacy/auth_voms.cpp:67 #: src/services/gridftpd/auth/auth_voms.cpp:69 #, c-format msgid "Rule: role: %s" msgstr "" #: src/hed/shc/legacy/auth_voms.cpp:68 #: src/services/gridftpd/auth/auth_voms.cpp:70 #, c-format msgid "Rule: capabilities: %s" msgstr "" #: src/hed/shc/legacy/auth_voms.cpp:71 #: src/services/gridftpd/auth/auth_voms.cpp:77 #, c-format msgid "Match vo: %s" msgstr "" #: src/hed/shc/legacy/auth_voms.cpp:78 #, c-format msgid "Matched: %s %s %s %s" msgstr "" #: src/hed/shc/legacy/simplemap.cpp:70 #: src/services/gridftpd/auth/simplemap.cpp:68 #, c-format msgid "SimpleMap: acquired new unmap time of %u seconds" msgstr "" #: src/hed/shc/legacy/simplemap.cpp:72 #: src/services/gridftpd/auth/simplemap.cpp:70 msgid "SimpleMap: wrong number in unmaptime command" msgstr "" #: src/hed/shc/legacy/simplemap.cpp:85 src/hed/shc/legacy/simplemap.cpp:90 #: src/services/gridftpd/auth/simplemap.cpp:83 #: src/services/gridftpd/auth/simplemap.cpp:88 #, c-format msgid "SimpleMap: %s" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:65 src/hed/shc/legacy/unixmap.cpp:70 #: src/services/gridftpd/auth/unixmap.cpp:63 #: src/services/gridftpd/auth/unixmap.cpp:68 msgid "Mapping policy option has empty value" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:80 src/services/gridftpd/auth/unixmap.cpp:78 #, c-format msgid "Unsupported mapping policy action: %s" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:91 src/services/gridftpd/auth/unixmap.cpp:89 #, c-format msgid "Unsupported mapping policy option: %s" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:103 src/hed/shc/legacy/unixmap.cpp:108 #: src/services/gridftpd/auth/unixmap.cpp:100 #: src/services/gridftpd/auth/unixmap.cpp:105 msgid "User name mapping command is empty" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:116 #: src/services/gridftpd/auth/unixmap.cpp:113 #, c-format msgid "User name mapping has empty authgroup: %s" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:147 #: src/services/gridftpd/auth/unixmap.cpp:147 #, c-format msgid "Unknown user name mapping rule %s" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:156 src/hed/shc/legacy/unixmap.cpp:161 #: src/hed/shc/legacy/unixmap.cpp:177 src/hed/shc/legacy/unixmap.cpp:183 #: src/services/gridftpd/auth/unixmap.cpp:175 #: src/services/gridftpd/auth/unixmap.cpp:180 #: src/services/gridftpd/auth/unixmap.cpp:196 msgid "Plugin (user mapping) command is empty" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:167 #: src/services/gridftpd/auth/unixmap.cpp:186 #, c-format msgid "Plugin (user mapping) timeout is not a number: %s" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:171 #: src/services/gridftpd/auth/unixmap.cpp:190 #, c-format msgid "Plugin (user mapping) timeout is wrong number: %s" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:203 #, c-format msgid "Plugin %s returned no username" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:208 #: src/services/gridftpd/auth/unixmap.cpp:211 #, c-format msgid "Plugin %s returned too much: %s" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:211 #, c-format msgid "Plugin %s returned no mapping" msgstr "" #: src/hed/shc/legacy/unixmap.cpp:234 msgid "User subject match is missing user subject." msgstr "" #: src/hed/shc/legacy/unixmap.cpp:238 #: src/services/gridftpd/auth/unixmap.cpp:230 #, c-format msgid "Mapfile at %s can't be opened." msgstr "" #: src/hed/shc/legacy/unixmap.cpp:262 #: src/services/gridftpd/auth/unixmap.cpp:255 msgid "User pool mapping is missing user subject." msgstr "" #: src/hed/shc/legacy/unixmap.cpp:267 #: src/services/gridftpd/auth/unixmap.cpp:260 #, c-format msgid "User pool at %s can't be opened." msgstr "" #: src/hed/shc/legacy/unixmap.cpp:272 #: src/services/gridftpd/auth/unixmap.cpp:265 #, c-format msgid "User pool at %s failed to perform user mapping." msgstr "" #: src/hed/shc/legacy/unixmap.cpp:290 #: src/services/gridftpd/auth/unixmap.cpp:283 #, c-format msgid "User name direct mapping is missing user name: %s." msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:63 msgid "OTokens: Attr: message" msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:68 #, c-format msgid "OTokens: Attr: %s = %s" msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:73 #, c-format msgid "OTokens: Attr: token: %s" msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:76 #, c-format msgid "OTokens: Attr: token: bearer: %s" msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:146 msgid "OTokens: Handle" msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:148 msgid "OTokens: Handle: message" msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:151 msgid "Failed to create OTokens security attributes" msgstr "" #: src/hed/shc/otokens/OTokensSH.cpp:155 #, c-format msgid "OTokens: Handle: attributes created: subject = %s" msgstr "" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:48 msgid "Creating a pdpservice client" msgstr "" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:80 msgid "Arc policy can not been carried by SAML2.0 profile of XACML" msgstr "" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:152 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:185 msgid "Policy Decision Service invocation failed" msgstr "" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:155 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:188 #: src/tests/client/test_ClientInterface.cpp:88 #: src/tests/client/test_ClientSAML2SSO.cpp:81 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:100 #: src/tests/echo/test_clientinterface.cpp:82 #: src/tests/echo/test_clientinterface.cpp:149 #: src/tests/echo/test_clientinterface.py:32 msgid "There was no SOAP response" msgstr "" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:170 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:204 msgid "Authorized from remote pdp service" msgstr "" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:171 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:205 msgid "Unauthorized from remote pdp service" msgstr "" #: src/hed/shc/saml2sso_assertionconsumersh/SAML2SSO_AssertionConsumerSH.cpp:69 msgid "Can not get SAMLAssertion SecAttr from message context" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:152 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:44 msgid "Missing or empty CertificatePath element" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:157 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:49 msgid "Missing or empty KeyPath element" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:163 msgid "" "Both of CACertificatePath and CACertificatesDir elements missing or empty" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:175 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:61 msgid "" "Missing or empty CertificatePath or CACertificatesDir element; will only " "check the signature, will not do message authentication" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:179 #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:65 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:65 #, c-format msgid "Processing type not supported: %s" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:199 msgid "Failed to parse SAML Token from incoming SOAP" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:209 msgid "Failed to authenticate SAML Token inside the incoming SOAP" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:212 msgid "Succeeded to authenticate SAMLToken" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:275 #, c-format msgid "No response from AA service %s" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:279 #, c-format msgid "SOAP Request to AA service %s failed" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:287 msgid "Cannot find content under response soap message" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:291 msgid "Cannot find under response soap message:" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:308 msgid "The Response is not going to this end" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:315 msgid "The StatusCode is Success" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:321 msgid "Succeeded to verify the signature under " msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:324 msgid "Failed to verify the signature under " msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:335 msgid "Failed to generate SAML Token for outgoing SOAP" msgstr "" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:345 msgid "SAML Token handler is not configured" msgstr "" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:29 #, c-format msgid "Access list location: %s" msgstr "" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:39 msgid "" "No policy file or DNs specified for simplelist.pdp, please set location " "attribute or at least one DN element for simplelist PDP node in " "configuration." msgstr "" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:42 #, c-format msgid "Subject to match: %s" msgstr "" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:45 #, c-format msgid "Policy subject: %s" msgstr "" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:47 #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:73 #, c-format msgid "Authorized from simplelist.pdp: %s" msgstr "" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:54 msgid "" "The policy file setup for simplelist.pdp does not exist, please check " "location attribute for simplelist PDP node in service configuration" msgstr "" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:61 #, c-format msgid "Policy line: %s" msgstr "" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:79 #, c-format msgid "Not authorized from simplelist.pdp: %s" msgstr "" #: src/hed/shc/test.cpp:27 src/hed/shc/testinterface_arc.cpp:26 #: src/hed/shc/testinterface_xacml.cpp:26 msgid "Start test" msgstr "" #: src/hed/shc/test.cpp:101 msgid "Input request from a file: Request.xml" msgstr "" #: src/hed/shc/test.cpp:107 src/hed/shc/test.cpp:197 #: src/hed/shc/testinterface_arc.cpp:124 #, c-format msgid "There is %d subjects, which satisfy at least one policy" msgstr "" #: src/hed/shc/test.cpp:121 #, c-format msgid "Attribute Value (1): %s" msgstr "" #: src/hed/shc/test.cpp:132 msgid "Input request from code" msgstr "" #: src/hed/shc/test.cpp:211 #, c-format msgid "Attribute Value (2): %s" msgstr "" #: src/hed/shc/testinterface_arc.cpp:75 src/hed/shc/testinterface_xacml.cpp:46 msgid "Can not dynamically produce Policy" msgstr "" #: src/hed/shc/testinterface_arc.cpp:138 #, c-format msgid "Attribute Value inside Subject: %s" msgstr "" #: src/hed/shc/testinterface_arc.cpp:148 msgid "The request has passed the policy evaluation" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:43 msgid "Missing or empty PasswordSource element" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:54 #, c-format msgid "Password encoding type not supported: %s" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:59 msgid "Missing or empty Username element" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:79 msgid "The payload of incoming message is empty" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:84 msgid "Failed to cast PayloadSOAP from incoming payload" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:89 msgid "Failed to parse Username Token from incoming SOAP" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:95 msgid "Failed to authenticate Username Token inside the incoming SOAP" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:98 msgid "Succeeded to authenticate UsernameToken" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:108 msgid "The payload of outgoing message is empty" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:113 msgid "Failed to cast PayloadSOAP from outgoing payload" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:119 msgid "Failed to generate Username Token for outgoing SOAP" msgstr "" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:127 msgid "Username Token handler is not configured" msgstr "" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:81 msgid "Failed to parse X509 Token from incoming SOAP" msgstr "" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:85 msgid "Failed to verify X509 Token inside the incoming SOAP" msgstr "" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:89 msgid "Failed to authenticate X509 Token inside the incoming SOAP" msgstr "" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:92 msgid "Succeeded to authenticate X509Token" msgstr "" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:102 msgid "Failed to generate X509 Token for outgoing SOAP" msgstr "" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:112 msgid "X509 Token handler is not configured" msgstr "" #: src/hed/shc/xacmlpdp/XACMLApply.cpp:29 msgid "Can not create function: FunctionId does not exist" msgstr "" #: src/hed/shc/xacmlpdp/XACMLApply.cpp:33 #: src/hed/shc/xacmlpdp/XACMLTarget.cpp:40 #, c-format msgid "Can not create function %s" msgstr "" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:88 msgid "Can not find XACMLPDPContext" msgstr "" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:136 msgid "Evaluator for XACMLPDP was not loaded" msgstr "" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:151 src/hed/shc/xacmlpdp/XACMLPDP.cpp:159 msgid "Failed to convert security information to XACML request" msgstr "" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:167 #, c-format msgid "XACML request: %s" msgstr "" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:179 msgid "Authorized from xacml.pdp" msgstr "" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:180 msgid "UnAuthorized from xacml.pdp" msgstr "" #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:55 msgid "Can not find element with proper namespace" msgstr "" #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:132 msgid "No target available inside the policy" msgstr "" #: src/hed/shc/xacmlpdp/XACMLRequest.cpp:34 msgid "Request is empty" msgstr "" #: src/hed/shc/xacmlpdp/XACMLRequest.cpp:39 msgid "Can not find element with proper namespace" msgstr "" #: src/hed/shc/xacmlpdp/XACMLRule.cpp:35 msgid "Invalid Effect" msgstr "" #: src/hed/shc/xacmlpdp/XACMLRule.cpp:48 msgid "No target available inside the rule" msgstr "" #: src/libs/data-staging/DTR.cpp:82 src/libs/data-staging/DTR.cpp:86 #, c-format msgid "Could not handle endpoint %s" msgstr "" #: src/libs/data-staging/DTR.cpp:96 msgid "Source is the same as destination" msgstr "" #: src/libs/data-staging/DTR.cpp:175 #, c-format msgid "Invalid ID: %s" msgstr "" #: src/libs/data-staging/DTR.cpp:212 #, c-format msgid "%s->%s" msgstr "" #: src/libs/data-staging/DTR.cpp:320 #, c-format msgid "No callback for %s defined" msgstr "" #: src/libs/data-staging/DTR.cpp:335 #, c-format msgid "NULL callback for %s" msgstr "" #: src/libs/data-staging/DTR.cpp:338 #, c-format msgid "Request to push to unknown owner - %u" msgstr "" #: src/libs/data-staging/DTRList.cpp:216 #, c-format msgid "Boosting priority from %i to %i due to incoming higher priority DTR" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:48 #: src/libs/data-staging/DataDelivery.cpp:72 msgid "Received invalid DTR" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:54 #, c-format msgid "Delivery received new DTR %s with source: %s, destination: %s" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:68 msgid "Received no DTR" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:80 #, c-format msgid "Cancelling DTR %s with source: %s, destination: %s" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:91 #, c-format msgid "DTR %s requested cancel but no active transfer" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:147 #, c-format msgid "Cleaning up after failure: deleting %s" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:188 #: src/libs/data-staging/DataDelivery.cpp:263 #: src/libs/data-staging/DataDelivery.cpp:303 #: src/libs/data-staging/DataDelivery.cpp:323 msgid "Failed to delete delivery object or deletion timed out" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:254 #, c-format msgid "Transfer finished: %llu bytes transferred %s" msgstr "" #: src/libs/data-staging/DataDelivery.cpp:329 msgid "Data delivery loop exited" msgstr "" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:45 msgid "No source defined" msgstr "" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:49 msgid "No destination defined" msgstr "" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:139 #, c-format msgid "Bad checksum format %s" msgstr "" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:178 #, c-format msgid "Failed to run command: %s" msgstr "" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:213 #, c-format msgid "DataDelivery: %s" msgstr "" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:225 #, c-format msgid "DataStagingDelivery exited with code %i" msgstr "" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:244 #, c-format msgid "Transfer killed after %i seconds without communication" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:67 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:315 #, c-format msgid "Connecting to Delivery service at %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:94 #, c-format msgid "Failed to set up credential delegation with %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:100 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:174 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:240 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:326 #, c-format msgid "" "Request:\n" "%s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:106 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:332 #, c-format msgid "Could not connect to service %s: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:114 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:340 #, c-format msgid "No SOAP response from Delivery service %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:119 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:193 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:267 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:346 #, c-format msgid "" "Response:\n" "%s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:128 #, c-format msgid "Failed to start transfer request: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:135 #, c-format msgid "Bad format in XML response from service at %s: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:143 #, c-format msgid "Could not make new transfer request: %s: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:148 #, c-format msgid "Started remote Delivery at %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:181 #, c-format msgid "Failed to send cancel request: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:188 msgid "Failed to cancel: No SOAP response" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:202 #, c-format msgid "Failed to cancel transfer request: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:209 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:290 #, c-format msgid "Bad format in XML response: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:216 #, c-format msgid "Failed to cancel: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:260 msgid "No SOAP response from delivery service" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:281 #, c-format msgid "Failed to query state: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:355 #, c-format msgid "SOAP fault from delivery service at %s: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:363 #, c-format msgid "Bad format in XML response from delivery service at %s: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:371 #, c-format msgid "Error pinging delivery service at %s: %s: %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:379 #, c-format msgid "Dir %s allowed at service %s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:473 #, c-format msgid "" "DataDelivery log tail:\n" "%s" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:486 msgid "Failed locating credentials" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:491 msgid "Failed to initiate client connection" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:497 msgid "Client connection has no entry point" msgstr "" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:507 msgid "Failed to initiate delegation credentials" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:97 #, c-format msgid "%5u s: %10.1f kB %8.1f kB/s" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:152 msgid "Unexpected arguments" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:155 msgid "Source URL missing" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:158 msgid "Destination URL missing" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:162 #, c-format msgid "Source URL not valid: %s" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:166 #, c-format msgid "Destination URL not valid: %s" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:223 #, c-format msgid "Unknown transfer option: %s" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:248 #, c-format msgid "Source URL not supported: %s" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:253 #: src/libs/data-staging/DataStagingDelivery.cpp:272 msgid "No credentials supplied" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:267 #, c-format msgid "Destination URL not supported: %s" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:316 #, c-format msgid "Will calculate %s checksum" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:327 msgid "Cannot use supplied --size option" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:540 #, c-format msgid "Checksum mismatch between calculated checksum %s and source checksum %s" msgstr "" #: src/libs/data-staging/DataStagingDelivery.cpp:550 #, c-format msgid "Failed cleaning up destination %s" msgstr "" #: src/libs/data-staging/Processor.cpp:59 #: src/services/candypond/CandyPond.cpp:117 msgid "Error creating cache" msgstr "" #: src/libs/data-staging/Processor.cpp:83 #, c-format msgid "Forcing re-download of file %s" msgstr "" #: src/libs/data-staging/Processor.cpp:100 #, c-format msgid "Will wait around %is" msgstr "" #: src/libs/data-staging/Processor.cpp:119 #, c-format msgid "Force-checking source of cache file %s" msgstr "" #: src/libs/data-staging/Processor.cpp:122 #, c-format msgid "Source check requested but failed: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:142 msgid "Permission checking failed, will try downloading without using cache" msgstr "" #: src/libs/data-staging/Processor.cpp:172 #, c-format msgid "Will download to cache file %s" msgstr "" #: src/libs/data-staging/Processor.cpp:193 msgid "Looking up source replicas" msgstr "" #: src/libs/data-staging/Processor.cpp:210 #: src/libs/data-staging/Processor.cpp:317 #, c-format msgid "Skipping replica on local host %s" msgstr "" #: src/libs/data-staging/Processor.cpp:218 #: src/libs/data-staging/Processor.cpp:325 #, c-format msgid "No locations left for %s" msgstr "" #: src/libs/data-staging/Processor.cpp:239 #: src/libs/data-staging/Processor.cpp:481 msgid "Resolving destination replicas" msgstr "" #: src/libs/data-staging/Processor.cpp:256 msgid "No locations for destination different from source found" msgstr "" #: src/libs/data-staging/Processor.cpp:267 msgid "Pre-registering destination in index service" msgstr "" #: src/libs/data-staging/Processor.cpp:293 msgid "Resolving source replicas in bulk" msgstr "" #: src/libs/data-staging/Processor.cpp:307 #, c-format msgid "No replicas found for %s" msgstr "" #: src/libs/data-staging/Processor.cpp:348 #, c-format msgid "Checking %s" msgstr "" #: src/libs/data-staging/Processor.cpp:357 #: src/libs/data-staging/Processor.cpp:415 msgid "Metadata of replica and index service differ" msgstr "" #: src/libs/data-staging/Processor.cpp:365 #, c-format msgid "Failed checking source replica %s: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:391 msgid "Querying source replicas in bulk" msgstr "" #: src/libs/data-staging/Processor.cpp:403 #, c-format msgid "Failed checking source replica: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:409 msgid "Failed checking source replica" msgstr "" #: src/libs/data-staging/Processor.cpp:449 msgid "Finding existing destination replicas" msgstr "" #: src/libs/data-staging/Processor.cpp:461 #, c-format msgid "Failed to delete replica %s: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:475 #, c-format msgid "Unregistering %s" msgstr "" #: src/libs/data-staging/Processor.cpp:486 msgid "Pre-registering destination" msgstr "" #: src/libs/data-staging/Processor.cpp:492 #, c-format msgid "Failed to pre-clean destination: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:515 msgid "Preparing to stage source" msgstr "" #: src/libs/data-staging/Processor.cpp:528 #, c-format msgid "Source is not ready, will wait %u seconds" msgstr "" #: src/libs/data-staging/Processor.cpp:534 msgid "No physical files found for source" msgstr "" #: src/libs/data-staging/Processor.cpp:552 msgid "Preparing to stage destination" msgstr "" #: src/libs/data-staging/Processor.cpp:565 #, c-format msgid "Destination is not ready, will wait %u seconds" msgstr "" #: src/libs/data-staging/Processor.cpp:571 msgid "No physical files found for destination" msgstr "" #: src/libs/data-staging/Processor.cpp:597 msgid "Releasing source" msgstr "" #: src/libs/data-staging/Processor.cpp:601 #, c-format msgid "There was a problem during post-transfer source handling: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:606 msgid "Releasing destination" msgstr "" #: src/libs/data-staging/Processor.cpp:610 #, c-format msgid "" "There was a problem during post-transfer destination handling after error: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:614 #, c-format msgid "Error with post-transfer destination handling: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:640 msgid "Removing pre-registered destination in index service" msgstr "" #: src/libs/data-staging/Processor.cpp:643 #, c-format msgid "" "Failed to unregister pre-registered destination %s: %s. You may need to " "unregister it manually" msgstr "" #: src/libs/data-staging/Processor.cpp:649 msgid "Registering destination replica" msgstr "" #: src/libs/data-staging/Processor.cpp:652 #, c-format msgid "Failed to register destination replica: %s" msgstr "" #: src/libs/data-staging/Processor.cpp:655 #, c-format msgid "" "Failed to unregister pre-registered destination %s. You may need to " "unregister it manually" msgstr "" #: src/libs/data-staging/Processor.cpp:685 msgid "Error creating cache. Stale locks may remain." msgstr "" #: src/libs/data-staging/Processor.cpp:718 #, c-format msgid "Linking/copying cached file to %s" msgstr "" #: src/libs/data-staging/Processor.cpp:739 #, c-format msgid "Failed linking cache file to %s" msgstr "" #: src/libs/data-staging/Processor.cpp:743 #, c-format msgid "Error linking cache file to %s." msgstr "" #: src/libs/data-staging/Processor.cpp:764 #: src/libs/data-staging/Processor.cpp:771 msgid "Adding to bulk request" msgstr "" #: src/libs/data-staging/Scheduler.cpp:174 #: src/libs/data-staging/Scheduler.cpp:181 msgid "source" msgstr "" #: src/libs/data-staging/Scheduler.cpp:174 #: src/libs/data-staging/Scheduler.cpp:181 msgid "destination" msgstr "" #: src/libs/data-staging/Scheduler.cpp:174 #, c-format msgid "Using next %s replica" msgstr "" #: src/libs/data-staging/Scheduler.cpp:181 #, c-format msgid "No more %s replicas" msgstr "" #: src/libs/data-staging/Scheduler.cpp:183 msgid "Will clean up pre-registered destination" msgstr "" #: src/libs/data-staging/Scheduler.cpp:187 msgid "Will release cache locks" msgstr "" #: src/libs/data-staging/Scheduler.cpp:190 msgid "Moving to end of data staging" msgstr "" #: src/libs/data-staging/Scheduler.cpp:199 #, c-format msgid "Source is mapped to %s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:203 msgid "Cannot link to source which can be modified, will copy instead" msgstr "" #: src/libs/data-staging/Scheduler.cpp:212 msgid "Cannot link to a remote destination. Will not use mapped URL" msgstr "" #: src/libs/data-staging/Scheduler.cpp:215 msgid "Linking mapped file" msgstr "" #: src/libs/data-staging/Scheduler.cpp:222 #, c-format msgid "Failed to create link: %s. Will not use mapped URL" msgstr "" #: src/libs/data-staging/Scheduler.cpp:247 #, c-format msgid "" "Scheduler received new DTR %s with source: %s, destination: %s, assigned to " "transfer share %s with priority %d" msgstr "" #: src/libs/data-staging/Scheduler.cpp:255 msgid "" "File is not cacheable, was requested not to be cached or no cache available, " "skipping cache check" msgstr "" #: src/libs/data-staging/Scheduler.cpp:261 msgid "File is cacheable, will check cache" msgstr "" #: src/libs/data-staging/Scheduler.cpp:264 #: src/libs/data-staging/Scheduler.cpp:289 #, c-format msgid "File is currently being cached, will wait %is" msgstr "" #: src/libs/data-staging/Scheduler.cpp:283 msgid "Timed out while waiting for cache lock" msgstr "" #: src/libs/data-staging/Scheduler.cpp:293 msgid "Checking cache again" msgstr "" #: src/libs/data-staging/Scheduler.cpp:313 msgid "Destination file is in cache" msgstr "" #: src/libs/data-staging/Scheduler.cpp:317 msgid "Source and/or destination is index service, will resolve replicas" msgstr "" #: src/libs/data-staging/Scheduler.cpp:320 msgid "" "Neither source nor destination are index services, will skip resolving " "replicas" msgstr "" #: src/libs/data-staging/Scheduler.cpp:331 msgid "Problem with index service, will release cache lock" msgstr "" #: src/libs/data-staging/Scheduler.cpp:335 msgid "Problem with index service, will proceed to end of data staging" msgstr "" #: src/libs/data-staging/Scheduler.cpp:345 msgid "Checking source file is present" msgstr "" #: src/libs/data-staging/Scheduler.cpp:353 msgid "Error with source file, moving to next replica" msgstr "" #: src/libs/data-staging/Scheduler.cpp:375 #, c-format msgid "Replica %s has long latency, trying next replica" msgstr "" #: src/libs/data-staging/Scheduler.cpp:377 #, c-format msgid "No more replicas, will use %s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:380 #, c-format msgid "Checking replica %s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:390 msgid "Overwrite requested - will pre-clean destination" msgstr "" #: src/libs/data-staging/Scheduler.cpp:393 msgid "No overwrite requested or allowed, skipping pre-cleaning" msgstr "" #: src/libs/data-staging/Scheduler.cpp:401 msgid "Pre-clean failed, will still try to copy" msgstr "" #: src/libs/data-staging/Scheduler.cpp:408 msgid "Source or destination requires staging" msgstr "" #: src/libs/data-staging/Scheduler.cpp:412 msgid "No need to stage source or destination, skipping staging" msgstr "" #: src/libs/data-staging/Scheduler.cpp:442 msgid "Staging request timed out, will release request" msgstr "" #: src/libs/data-staging/Scheduler.cpp:446 msgid "Querying status of staging request" msgstr "" #: src/libs/data-staging/Scheduler.cpp:455 msgid "Releasing requests" msgstr "" #: src/libs/data-staging/Scheduler.cpp:472 msgid "DTR is ready for transfer, moving to delivery queue" msgstr "" #: src/libs/data-staging/Scheduler.cpp:487 #, c-format msgid "Transfer failed: %s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:497 msgid "Releasing request(s) made during staging" msgstr "" #: src/libs/data-staging/Scheduler.cpp:500 msgid "Neither source nor destination were staged, skipping releasing requests" msgstr "" #: src/libs/data-staging/Scheduler.cpp:512 msgid "Trying next replica" msgstr "" #: src/libs/data-staging/Scheduler.cpp:517 msgid "unregister" msgstr "" #: src/libs/data-staging/Scheduler.cpp:517 msgid "register" msgstr "" #: src/libs/data-staging/Scheduler.cpp:516 #, c-format msgid "Will %s in destination index service" msgstr "" #: src/libs/data-staging/Scheduler.cpp:520 msgid "Destination is not index service, skipping replica registration" msgstr "" #: src/libs/data-staging/Scheduler.cpp:533 msgid "Error registering replica, moving to end of data staging" msgstr "" #: src/libs/data-staging/Scheduler.cpp:542 msgid "Will process cache" msgstr "" #: src/libs/data-staging/Scheduler.cpp:546 msgid "File is not cacheable, skipping cache processing" msgstr "" #: src/libs/data-staging/Scheduler.cpp:560 msgid "Cancellation complete" msgstr "" #: src/libs/data-staging/Scheduler.cpp:574 msgid "Will wait 10s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:580 msgid "Error in cache processing, will retry without caching" msgstr "" #: src/libs/data-staging/Scheduler.cpp:589 msgid "Will retry without caching" msgstr "" #: src/libs/data-staging/Scheduler.cpp:607 msgid "Proxy has expired" msgstr "" #: src/libs/data-staging/Scheduler.cpp:618 #, c-format msgid "%i retries left, will wait until %s before next attempt" msgstr "" #: src/libs/data-staging/Scheduler.cpp:634 msgid "Out of retries" msgstr "" #: src/libs/data-staging/Scheduler.cpp:636 msgid "Permanent failure" msgstr "" #: src/libs/data-staging/Scheduler.cpp:642 msgid "Finished successfully" msgstr "" #: src/libs/data-staging/Scheduler.cpp:652 msgid "Returning to generator" msgstr "" #: src/libs/data-staging/Scheduler.cpp:818 #, c-format msgid "File is smaller than %llu bytes, will use local delivery" msgstr "" #: src/libs/data-staging/Scheduler.cpp:872 #, c-format msgid "Delivery service at %s can copy to %s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:880 #, c-format msgid "Delivery service at %s can copy from %s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:893 msgid "Could not find any useable delivery service, forcing local transfer" msgstr "" #: src/libs/data-staging/Scheduler.cpp:909 #, c-format msgid "Not using delivery service at %s because it is full" msgstr "" #: src/libs/data-staging/Scheduler.cpp:936 #, c-format msgid "Not using delivery service %s due to previous failure" msgstr "" #: src/libs/data-staging/Scheduler.cpp:946 msgid "No remote delivery services are useable, forcing local delivery" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1150 msgid "Cancelling active transfer" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1160 msgid "Processing thread timed out. Restarting DTR" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1228 msgid "Will use bulk request" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1250 msgid "No delivery endpoints available, will try later" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1269 msgid "Scheduler received NULL DTR" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1279 msgid "Scheduler received invalid DTR" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1368 msgid "Scheduler starting up" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1369 msgid "Scheduler configuration:" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1370 #, c-format msgid " Pre-processor slots: %u" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1371 #, c-format msgid " Delivery slots: %u" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1372 #, c-format msgid " Post-processor slots: %u" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1373 #, c-format msgid " Emergency slots: %u" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1374 #, c-format msgid " Prepared slots: %u" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1375 #, c-format msgid "" " Shares configuration:\n" "%s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1378 msgid " Delivery service: LOCAL" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1379 #, c-format msgid " Delivery service: %s" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1384 msgid "Failed to create DTR dump thread" msgstr "" #: src/libs/data-staging/Scheduler.cpp:1401 #: src/services/data-staging/DataDeliveryService.cpp:507 #, c-format msgid "DTR %s cancelled" msgstr "" #: src/libs/data-staging/examples/Generator.cpp:15 msgid "Shutting down scheduler" msgstr "" #: src/libs/data-staging/examples/Generator.cpp:17 msgid "Scheduler stopped, exiting" msgstr "" #: src/libs/data-staging/examples/Generator.cpp:23 #, c-format msgid "Received DTR %s back from scheduler in state %s" msgstr "" #: src/libs/data-staging/examples/Generator.cpp:30 msgid "Generator started" msgstr "" #: src/libs/data-staging/examples/Generator.cpp:31 msgid "Starting DTR threads" msgstr "" #: src/libs/data-staging/examples/Generator.cpp:44 msgid "No valid credentials found, exiting" msgstr "" #: src/libs/data-staging/examples/Generator.cpp:55 #, c-format msgid "Problem creating dtr (source %s, destination %s)" msgstr "" #: src/services/a-rex/arex.cpp:452 src/services/candypond/CandyPond.cpp:569 #: src/services/data-staging/DataDeliveryService.cpp:681 #, c-format msgid "SOAP operation is not supported: %s" msgstr "" #: src/services/a-rex/arex.cpp:471 src/services/a-rex/arex.cpp:517 #, c-format msgid "Security Handlers processing failed: %s" msgstr "" #: src/services/a-rex/arex.cpp:485 msgid "Can't obtain configuration. Public information is disabled." msgstr "" #: src/services/a-rex/arex.cpp:495 msgid "" "Can't obtain configuration. Public information is disallowed for this user." msgstr "" #: src/services/a-rex/arex.cpp:502 msgid "Can't obtain configuration. Only public information is provided." msgstr "" #: src/services/a-rex/arex.cpp:530 src/services/a-rex/rest/rest.cpp:674 #, c-format msgid "Connection from %s: %s" msgstr "" #: src/services/a-rex/arex.cpp:533 src/services/a-rex/rest/rest.cpp:678 #, c-format msgid "process: method: %s" msgstr "" #: src/services/a-rex/arex.cpp:534 src/services/a-rex/rest/rest.cpp:679 #, c-format msgid "process: endpoint: %s" msgstr "" #: src/services/a-rex/arex.cpp:559 #, c-format msgid "process: id: %s" msgstr "" #: src/services/a-rex/arex.cpp:560 #, c-format msgid "process: subop: %s" msgstr "" #: src/services/a-rex/arex.cpp:567 #, c-format msgid "process: subpath: %s" msgstr "" #: src/services/a-rex/arex.cpp:605 src/services/candypond/CandyPond.cpp:543 #: src/services/data-staging/DataDeliveryService.cpp:641 #: src/tests/echo/echo.cpp:98 #, c-format msgid "process: request=%s" msgstr "" #: src/services/a-rex/arex.cpp:610 src/services/candypond/CandyPond.cpp:548 #: src/services/data-staging/DataDeliveryService.cpp:646 #: src/tests/count/count.cpp:69 msgid "input does not define operation" msgstr "" #: src/services/a-rex/arex.cpp:613 src/services/candypond/CandyPond.cpp:551 #: src/services/data-staging/DataDeliveryService.cpp:649 #: src/tests/count/count.cpp:72 #, c-format msgid "process: operation: %s" msgstr "" #: src/services/a-rex/arex.cpp:640 msgid "POST request on special path is not supported" msgstr "" #: src/services/a-rex/arex.cpp:645 msgid "process: factory endpoint" msgstr "" #: src/services/a-rex/arex.cpp:788 src/services/candypond/CandyPond.cpp:580 #: src/services/data-staging/DataDeliveryService.cpp:692 #: src/tests/echo/echo.cpp:158 #, c-format msgid "process: response=%s" msgstr "" #: src/services/a-rex/arex.cpp:794 msgid "Per-job POST/SOAP requests are not supported" msgstr "" #: src/services/a-rex/arex.cpp:803 msgid "process: GET" msgstr "" #: src/services/a-rex/arex.cpp:804 #, c-format msgid "GET: id %s path %s" msgstr "" #: src/services/a-rex/arex.cpp:837 msgid "process: HEAD" msgstr "" #: src/services/a-rex/arex.cpp:838 #, c-format msgid "HEAD: id %s path %s" msgstr "" #: src/services/a-rex/arex.cpp:871 msgid "process: PUT" msgstr "" #: src/services/a-rex/arex.cpp:904 msgid "process: DELETE" msgstr "" #: src/services/a-rex/arex.cpp:937 #, c-format msgid "process: method %s is not supported" msgstr "" #: src/services/a-rex/arex.cpp:940 msgid "process: method is not defined" msgstr "" #: src/services/a-rex/arex.cpp:1050 msgid "Failed to run Grid Manager thread" msgstr "" #: src/services/a-rex/arex.cpp:1109 #, c-format msgid "Failed to process configuration in %s" msgstr "" #: src/services/a-rex/arex.cpp:1114 msgid "No control directory set in configuration" msgstr "" #: src/services/a-rex/arex.cpp:1118 msgid "No session directory set in configuration" msgstr "" #: src/services/a-rex/arex.cpp:1122 msgid "No LRMS set in configuration" msgstr "" #: src/services/a-rex/arex.cpp:1127 #, c-format msgid "Failed to create control directory %s" msgstr "" #: src/services/a-rex/cachecheck.cpp:37 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:658 #, c-format msgid "Error with cache configuration: %s" msgstr "" #: src/services/a-rex/cachecheck.cpp:53 #: src/services/candypond/CandyPond.cpp:318 msgid "Error with cache configuration" msgstr "" #: src/services/a-rex/cachecheck.cpp:78 #: src/services/candypond/CandyPond.cpp:146 #: src/services/candypond/CandyPond.cpp:343 #, c-format msgid "Looking up URL %s" msgstr "" #: src/services/a-rex/cachecheck.cpp:80 #: src/services/candypond/CandyPond.cpp:155 #, c-format msgid "Cache file is %s" msgstr "" #: src/services/a-rex/change_activity_status.cpp:55 #: src/services/a-rex/change_activity_status.cpp:59 #, c-format msgid "EMIES:PauseActivity: job %s - %s" msgstr "" #: src/services/a-rex/change_activity_status.cpp:104 #: src/services/a-rex/change_activity_status.cpp:108 #, c-format msgid "EMIES:ResumeActivity: job %s - %s" msgstr "" #: src/services/a-rex/change_activity_status.cpp:153 #: src/services/a-rex/change_activity_status.cpp:158 #, c-format msgid "EMIES:CancelActivity: job %s - %s" msgstr "" #: src/services/a-rex/change_activity_status.cpp:166 #, c-format msgid "job %s cancelled successfully" msgstr "" #: src/services/a-rex/change_activity_status.cpp:212 #: src/services/a-rex/change_activity_status.cpp:227 #, c-format msgid "EMIES:WipeActivity: job %s - %s" msgstr "" #: src/services/a-rex/change_activity_status.cpp:231 #, c-format msgid "job %s (will be) cleaned successfully" msgstr "" #: src/services/a-rex/change_activity_status.cpp:277 #: src/services/a-rex/change_activity_status.cpp:282 #, c-format msgid "EMIES:RestartActivity: job %s - %s" msgstr "" #: src/services/a-rex/change_activity_status.cpp:286 #, c-format msgid "job %s restarted successfully" msgstr "" #: src/services/a-rex/change_activity_status.cpp:301 #: src/services/a-rex/put.cpp:163 src/services/a-rex/put.cpp:204 #, c-format msgid "%s: there is no such job: %s" msgstr "" #: src/services/a-rex/change_activity_status.cpp:309 #, c-format msgid "%s: put log %s: there is no payload" msgstr "" #: src/services/a-rex/change_activity_status.cpp:315 #, c-format msgid "%s: put log %s: unrecognized payload" msgstr "" #: src/services/a-rex/change_activity_status.cpp:354 msgid "A-REX REST: Failed to resume job" msgstr "" #: src/services/a-rex/change_activity_status.cpp:358 #, c-format msgid "A-REX REST: State change not allowed: from %s to %s" msgstr "" #: src/services/a-rex/create_activity.cpp:52 #, c-format msgid "" "EMIES:CreateActivity: request = \n" "%s" msgstr "" #: src/services/a-rex/create_activity.cpp:58 msgid "EMIES:CreateActivity: too many activity descriptions" msgstr "" #: src/services/a-rex/create_activity.cpp:68 msgid "EMIES:CreateActivity: no job description found" msgstr "" #: src/services/a-rex/create_activity.cpp:75 msgid "EMIES:CreateActivity: max jobs total limit reached" msgstr "" #: src/services/a-rex/create_activity.cpp:101 #, c-format msgid "ES:CreateActivity: Failed to create new job: %s" msgstr "" #: src/services/a-rex/create_activity.cpp:117 msgid "EMIES:CreateActivity finished successfully" msgstr "" #: src/services/a-rex/create_activity.cpp:118 #, c-format msgid "New job accepted with id %s" msgstr "" #: src/services/a-rex/create_activity.cpp:122 #, c-format msgid "" "EMIES:CreateActivity: response = \n" "%s" msgstr "" #: src/services/a-rex/create_activity.cpp:137 msgid "NEW: put new job: there is no payload" msgstr "" #: src/services/a-rex/create_activity.cpp:141 msgid "NEW: put new job: max jobs total limit reached" msgstr "" #: src/services/a-rex/delegation/DelegationStore.cpp:51 msgid "Wiping and re-creating whole storage" msgstr "" #: src/services/a-rex/delegation/DelegationStore.cpp:214 #: src/services/a-rex/delegation/DelegationStore.cpp:316 #, c-format msgid "DelegationStore: TouchConsumer failed to create file %s" msgstr "" #: src/services/a-rex/delegation/DelegationStore.cpp:276 msgid "DelegationStore: PeriodicCheckConsumers failed to resume iterator" msgstr "" #: src/services/a-rex/delegation/DelegationStore.cpp:296 #, c-format msgid "" "DelegationStore: PeriodicCheckConsumers failed to remove old delegation %s - " "%s" msgstr "" #: src/services/a-rex/get.cpp:174 src/services/a-rex/get.cpp:229 #: src/services/a-rex/get.cpp:313 #, c-format msgid "Get: there is no job %s - %s" msgstr "" #: src/services/a-rex/get.cpp:380 #, c-format msgid "Head: there is no job %s - %s" msgstr "" #: src/services/a-rex/get.cpp:436 msgid "Failed to extract credential information" msgstr "" #: src/services/a-rex/get.cpp:439 #, c-format msgid "Checking cache permissions: DN: %s" msgstr "" #: src/services/a-rex/get.cpp:440 #, c-format msgid "Checking cache permissions: VO: %s" msgstr "" #: src/services/a-rex/get.cpp:442 #, c-format msgid "Checking cache permissions: VOMS attr: %s" msgstr "" #: src/services/a-rex/get.cpp:452 #, c-format msgid "Cache access allowed to %s by DN %s" msgstr "" #: src/services/a-rex/get.cpp:455 #, c-format msgid "DN %s doesn't match %s" msgstr "" #: src/services/a-rex/get.cpp:458 #, c-format msgid "Cache access allowed to %s by VO %s" msgstr "" #: src/services/a-rex/get.cpp:461 #, c-format msgid "VO %s doesn't match %s" msgstr "" #: src/services/a-rex/get.cpp:467 src/services/a-rex/get.cpp:486 #, c-format msgid "Bad credential value %s in cache access rules" msgstr "" #: src/services/a-rex/get.cpp:475 src/services/a-rex/get.cpp:494 #, c-format msgid "VOMS attr %s matches %s" msgstr "" #: src/services/a-rex/get.cpp:476 #, c-format msgid "Cache access allowed to %s by VO %s and role %s" msgstr "" #: src/services/a-rex/get.cpp:479 src/services/a-rex/get.cpp:498 #, c-format msgid "VOMS attr %s doesn't match %s" msgstr "" #: src/services/a-rex/get.cpp:495 #, c-format msgid "Cache access allowed to %s by VO %s and group %s" msgstr "" #: src/services/a-rex/get.cpp:501 #, c-format msgid "Unknown credential type %s for URL pattern %s" msgstr "" #: src/services/a-rex/get.cpp:507 #, c-format msgid "No match found in cache access rules for %s" msgstr "" #: src/services/a-rex/get.cpp:517 #, c-format msgid "Get from cache: Looking in cache for %s" msgstr "" #: src/services/a-rex/get.cpp:520 #, c-format msgid "Get from cache: Invalid URL %s" msgstr "" #: src/services/a-rex/get.cpp:537 msgid "Get from cache: Error in cache configuration" msgstr "" #: src/services/a-rex/get.cpp:546 msgid "Get from cache: File not in cache" msgstr "" #: src/services/a-rex/get.cpp:549 #, c-format msgid "Get from cache: could not access cached file: %s" msgstr "" #: src/services/a-rex/get.cpp:559 msgid "Get from cache: Cached file is locked" msgstr "" #: src/services/a-rex/get_activity_statuses.cpp:214 #: src/services/a-rex/get_activity_statuses.cpp:320 #, c-format msgid "EMIES:GetActivityStatus: job %s - %s" msgstr "" #: src/services/a-rex/get_activity_statuses.cpp:455 #, c-format msgid "EMIES:GetActivityInfo: job %s - failed to retrieve GLUE2 information" msgstr "" #: src/services/a-rex/get_activity_statuses.cpp:507 #: src/services/a-rex/get_activity_statuses.cpp:514 #, c-format msgid "EMIES:NotifyService: job %s - %s" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:98 #, c-format msgid "" "Cannot create directories for log file %s. Messages will be logged to this " "log" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:104 #, c-format msgid "" "Cannot open cache log file %s: %s. Cache cleaning messages will be logged to " "this log" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:114 msgid "Failed to start cache clean script" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:115 msgid "Cache cleaning script failed" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:183 #, c-format msgid "External request for attention %s" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:201 #, c-format msgid "Failed to open heartbeat file %s" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:223 msgid "Starting jobs processing thread" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:224 #, c-format msgid "Used configuration file %s" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:232 #, c-format msgid "" "Error initiating delegation database in %s. Maybe permissions are not " "suitable. Returned error is: %s." msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:244 msgid "Failed to start new thread: cache won't be cleaned" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:251 msgid "Failed to activate Jobs Processing object, exiting Grid Manager thread" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:260 #, c-format msgid "" "Error adding communication interface in %s. Maybe another instance of A-REX " "is already running." msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:263 #, c-format msgid "" "Error adding communication interface in %s. Maybe permissions are not " "suitable." msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:270 msgid "Failed to start new thread for monitoring job requests" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:276 msgid "Picking up left jobs" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:279 msgid "Starting data staging threads" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:283 msgid "Starting jobs' monitoring" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:291 #, c-format msgid "" "SSHFS mount point of session directory (%s) is broken - waiting for " "reconnect ..." msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:295 #, c-format msgid "" "SSHFS mount point of runtime directory (%s) is broken - waiting for " "reconnect ..." msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:300 #, c-format msgid "" "SSHFS mount point of cache directory (%s) is broken - waiting for " "reconnect ..." msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:349 #, c-format msgid "Orphan delegation lock detected (%s) - cleaning" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:354 msgid "Failed to obtain delegation locks for cleaning orphaned locks" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:368 msgid "Waking up" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:371 msgid "Stopping jobs processing thread" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:373 msgid "Exiting jobs processing thread" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:391 msgid "Requesting to stop job processing" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:399 msgid "Waiting for main job processing thread to exit" msgstr "" #: src/services/a-rex/grid-manager/GridManager.cpp:401 msgid "Stopped job processing" msgstr "" #: src/services/a-rex/grid-manager/accounting/AAR.cpp:73 msgid "Cannot find information abouto job submission endpoint" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:53 #, c-format msgid "Failed to read database schema file at %s" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:63 msgid "Accounting database initialized succesfully" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:65 msgid "Accounting database connection has been established" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:75 #, c-format msgid "%s. SQLite database error: %s" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:77 #, c-format msgid "SQLite database error: %s" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:105 #, c-format msgid "Directory %s to store accounting database has been created." msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:107 #, c-format msgid "" "Accounting database cannot be created. Faile to create parent directory %s." msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:111 #, c-format msgid "Accounting database cannot be created: %s is not a directory" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:118 msgid "Failed to initialize accounting database" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:125 #, c-format msgid "Accounting database file (%s) is not a regular file" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:131 msgid "Error opening accounting database" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:149 msgid "Closing connection to SQLite accounting database" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:238 #, c-format msgid "Failed to fetch data from %s accounting database table" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:255 #, c-format msgid "Failed to add '%s' into the accounting database %s table" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:314 msgid "Failed to fetch data from accounting database Endpoints table" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:331 #, c-format msgid "" "Failed to add '%s' URL (interface type %s) into the accounting database " "Endpoints table" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:357 #, c-format msgid "Failed to query AAR database ID for job %s" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:412 #, c-format msgid "Failed to insert AAR into the database for job %s" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:413 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:460 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:491 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:507 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:523 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:544 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:560 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:575 #, c-format msgid "SQL statement used: %s" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:418 #, c-format msgid "Failed to write authtoken attributes for job %s" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:422 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:477 #, c-format msgid "Failed to write event records for job %s" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:433 #, c-format msgid "" "Cannot to update AAR. Cannot find registered AAR for job %s in accounting " "database." msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:459 #, c-format msgid "Failed to update AAR in the database for job %s" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:465 #, c-format msgid "Failed to write RTEs information for the job %s" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:469 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:473 #, c-format msgid "Failed to write data transfers information for the job %s" msgstr "" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:569 #, c-format msgid "Unable to add event: cannot find AAR for job %s in accounting database." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:73 #, c-format msgid "Unknown option %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:80 msgid "Job ID argument is required." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:86 msgid "Path to user's proxy file should be specified." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:92 msgid "User name should be specified." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:98 msgid "Path to .local job status file is required." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:106 msgid "Generating ceID prefix from hostname automatically" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:109 msgid "" "Cannot determine hostname from gethostname() to generate ceID automatically." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:118 #, c-format msgid "ceID prefix is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:126 #, c-format msgid "Getting currect timestamp for BLAH parser log: %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:135 msgid "Parsing .local file to obtain job-specific identifiers and info" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:145 #, c-format msgid "globalid is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:148 #, c-format msgid "headnode is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:151 #, c-format msgid "interface is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:155 msgid "There is no local LRMS ID. Message will not be written to BLAH log." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:158 #, c-format msgid "localid is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:161 #, c-format msgid "queue name is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:164 #, c-format msgid "owner subject is set to %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:166 msgid "" "Job did not finished successfully. Message will not be written to BLAH log." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:174 #, c-format msgid "Job timestamp successfully parsed as %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:178 msgid "Can not read information from the local job status file" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:194 #, c-format msgid "" "Unsupported submission interface %s. Seems arc-blahp-logger need to be " "updated accordingly. Please submit the bug to bugzilla." msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:204 msgid "Parsing VOMS AC to get FQANs information" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:217 #, c-format msgid "Found VOMS AC attribute: %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:230 msgid "VOMS AC attribute is a tag" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:237 msgid "Skipping policyAuthority VOMS AC attribute" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:241 msgid "VOMS AC attribute is the FQAN" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:249 msgid "No FQAN found. Using None as userFQAN value" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:263 #, c-format msgid "Assembling BLAH parser log entry: %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:268 #, c-format msgid "Writing the info to the BLAH parser log: %s" msgstr "" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:276 #, c-format msgid "Cannot open BLAH log file '%s'" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:34 #, c-format msgid "Missing cancel-%s-job - job cancellation may not work" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:38 #, c-format msgid "Missing submit-%s-job - job submission to LRMS may not work" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:42 #, c-format msgid "Missing scan-%s-job - may miss when job finished executing" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:56 #, c-format msgid "Wrong option in %s" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:67 #, c-format msgid "Can't read configuration file at %s" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:77 #, c-format msgid "Can't recognize type of configuration file at %s" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:80 msgid "Could not determine configuration type or configuration is empty" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:161 msgid "lrms is empty" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:194 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:203 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:212 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:221 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:230 msgid "Missing number in maxjobs" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:197 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:206 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:215 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:224 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:233 #, c-format msgid "Wrong number in maxjobs: %s" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:243 #, c-format msgid "Wrong number in wakeupperiod: %s" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:249 msgid "mail parameter is empty" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:255 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:259 msgid "Wrong number in defaultttl command" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:265 msgid "Wrong number in maxrerun command" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:272 msgid "State name for plugin is missing" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:276 msgid "Options for plugin are missing" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:279 #, c-format msgid "Failed to register plugin for state %s" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:285 msgid "Session root directory is missing" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:288 msgid "Junk in sessiondir command" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:300 msgid "Missing directory in controldir command" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:305 msgid "" "'control' configuration option is no longer supported, please use " "'controldir' instead" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:310 msgid "User for helper program is missing" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:313 msgid "Only user '.' for helper program is supported" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:316 msgid "Helper program is missing" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:337 msgid "Wrong option in fixdirectories" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:364 msgid "Wrong option in delegationdb" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:370 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:556 msgid "forcedefaultvoms parameter is empty" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:445 msgid "Wrong number in maxjobdesc command" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:495 msgid "Missing file name in [arex/jura] logfile" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:506 #, c-format msgid "Wrong number in urdelivery_frequency: %s" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:549 msgid "No queue name given in queue block name" msgstr "" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:565 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:600 msgid "advertisedvo parameter is empty" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:117 #, c-format msgid "\tSession root dir : %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:118 #, c-format msgid "\tControl dir : %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:119 #, c-format msgid "\tdefault LRMS : %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:120 #, c-format msgid "\tdefault queue : %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:121 #, c-format msgid "\tdefault ttl : %u" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:126 msgid "No valid caches found in configuration, caching is disabled" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:131 #, c-format msgid "\tCache : %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:133 #, c-format msgid "\tCache link dir : %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:136 #, c-format msgid "\tCache (read-only): %s" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:138 msgid "\tCache cleaning enabled" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:139 msgid "\tCache cleaning disabled" msgstr "" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:327 msgid "" "Globus location variable substitution is not supported anymore. Please " "specify path directly." msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:35 msgid "Can't read configuration file" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:41 #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:29 msgid "Can't recognize type of configuration file" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:47 msgid "Configuration error" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:77 msgid "Bad number in maxdelivery" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:83 msgid "Bad number in maxemergency" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:89 msgid "Bad number in maxprocessor" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:95 msgid "Bad number in maxprepared" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:101 msgid "Bad number in maxtransfertries" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:112 msgid "Bad number in speedcontrol" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:123 #, c-format msgid "Bad number in definedshare %s" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:132 #, c-format msgid "Bad URL in deliveryservice: %s" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:143 msgid "Bad number in remotesizelimit" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:168 msgid "Bad value for loglevel" msgstr "" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:182 msgid "Bad URL in acix_endpoint" msgstr "" #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:24 msgid "Can't open configuration file" msgstr "" #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:45 msgid "Not enough parameters in copyurl" msgstr "" #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:54 msgid "Not enough parameters in linkurl" msgstr "" #: src/services/a-rex/grid-manager/files/ControlFileContent.cpp:179 #, c-format msgid "Wrong directory in %s" msgstr "" #: src/services/a-rex/grid-manager/files/ControlFileHandling.cpp:102 #, c-format msgid "Failed setting file owner: %s" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:33 msgid "gm-delegations-converter changes format of delegation database." msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:38 #: src/services/a-rex/grid-manager/gm_jobs.cpp:110 #: src/services/a-rex/grid-manager/gm_kick.cpp:24 msgid "use specified configuration file" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:39 #: src/services/a-rex/grid-manager/gm_jobs.cpp:111 #: src/services/a-rex/grid-manager/gm_kick.cpp:25 msgid "file" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:43 #: src/services/a-rex/grid-manager/gm_jobs.cpp:115 msgid "read information from specified control directory" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:44 #: src/services/a-rex/grid-manager/gm_jobs.cpp:116 msgid "dir" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:48 msgid "convert from specified input database format [bdb|sqlite]" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:49 #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:54 msgid "database format" msgstr "" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:53 msgid "convert into specified output database format [bdb|sqlite]" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:36 #, c-format msgid "Could not read data staging configuration from %s" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:44 #, c-format msgid "Can't read transfer states from %s. Perhaps A-REX is not running?" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:100 msgid "gm-jobs displays information on current jobs in the system." msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:105 msgid "display more information on each job" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:120 msgid "print summary of jobs in each transfer share" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:125 msgid "do not print list of jobs" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:130 msgid "do not print number of jobs in each state" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:135 msgid "print state of the service" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:140 msgid "show only jobs of user(s) with specified subject name(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:141 #: src/services/a-rex/grid-manager/gm_jobs.cpp:151 #: src/services/a-rex/grid-manager/gm_jobs.cpp:161 msgid "dn" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:145 msgid "request to cancel job(s) with specified ID(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:146 #: src/services/a-rex/grid-manager/gm_jobs.cpp:156 #: src/services/a-rex/grid-manager/gm_jobs.cpp:166 #: src/services/a-rex/grid-manager/gm_jobs.cpp:176 #: src/services/a-rex/grid-manager/gm_kick.cpp:30 msgid "id" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:150 msgid "" "request to cancel jobs belonging to user(s) with specified subject name(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:155 msgid "request to clean job(s) with specified ID(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:160 msgid "" "request to clean jobs belonging to user(s) with specified subject name(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:165 msgid "show only jobs with specified ID(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:170 msgid "print list of available delegation IDs" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:175 msgid "print delegation token of specified ID(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:180 msgid "print main delegation token of specified Job ID(s)" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:181 msgid "job id" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:185 msgid "" "output requested elements (jobs list, delegation ids and tokens) to file" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:186 msgid "file name" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:209 #, c-format msgid "Using configuration at %s" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:232 #, c-format msgid "Failed to open output file '%s'" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:241 msgid "Looking for current jobs" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:278 #, c-format msgid "Job: %s : ERROR : Unrecognizable state" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:287 #, c-format msgid "Job: %s : ERROR : No local information." msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:461 #, c-format msgid "Job: %s : ERROR : Failed to put cancel mark" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:465 #, c-format msgid "Job: %s : Cancel request put but failed to communicate to service" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:467 #, c-format msgid "Job: %s : Cancel request put and communicated to service" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:478 #, c-format msgid "Job: %s : ERROR : Failed to put clean mark" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:482 #, c-format msgid "Job: %s : Clean request put but failed to communicate to service" msgstr "" #: src/services/a-rex/grid-manager/gm_jobs.cpp:484 #, c-format msgid "Job: %s : Clean request put and communicated to service" msgstr "" #: src/services/a-rex/grid-manager/gm_kick.cpp:18 msgid "" "gm-kick wakes up the A-REX corresponding to the given control file. If no " "file is given it uses the control directory found in the configuration file." msgstr "" #: src/services/a-rex/grid-manager/gm_kick.cpp:29 msgid "inform about changes in particular job (can be used multiple times)" msgstr "" #: src/services/a-rex/grid-manager/inputcheck.cpp:39 #, c-format msgid "Failed to acquire source: %s" msgstr "" #: src/services/a-rex/grid-manager/inputcheck.cpp:44 #, c-format msgid "Failed to resolve %s" msgstr "" #: src/services/a-rex/grid-manager/inputcheck.cpp:61 #, c-format msgid "Failed to check %s" msgstr "" #: src/services/a-rex/grid-manager/inputcheck.cpp:75 msgid "job_description_file [proxy_file]" msgstr "" #: src/services/a-rex/grid-manager/inputcheck.cpp:76 msgid "" "inputcheck checks that input files specified in the job description are " "available and accessible using the credentials in the given proxy file." msgstr "" #: src/services/a-rex/grid-manager/inputcheck.cpp:88 msgid "Wrong number of arguments given" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:143 #, c-format msgid "Unsupported value for allownew: %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:154 msgid "Wrong number in maxjobdesc" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:162 #: src/services/gridftpd/fileplugin/fileplugin.cpp:186 #, c-format msgid "Unsupported configuration command: %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:171 #, c-format msgid "Mapped user:group (%s:%s) not found" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:174 msgid "Job submission user can't be root" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:177 msgid "Failed processing A-REX configuration" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:201 msgid "This user is denied to submit new jobs." msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:206 msgid "No control or session directories defined in configuration" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:210 #, c-format msgid "Job submission user: %s (%i:%i)" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:213 msgid "Job plugin was not initialised" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:231 msgid "No delegated credentials were passed" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:305 #, c-format msgid "Cancelling job %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:360 #, c-format msgid "Cleaning job %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:400 msgid "Request to open file with storing in progress" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:434 #: src/services/gridftpd/fileplugin/fileplugin.cpp:344 #, c-format msgid "Retrieving file %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:484 #, c-format msgid "Accepting submission of new job or modification request: %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:506 #: src/services/gridftpd/fileplugin/fileplugin.cpp:384 #: src/services/gridftpd/fileplugin/fileplugin.cpp:421 #, c-format msgid "Storing file %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:527 #, c-format msgid "Unknown open mode %i" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:653 #, c-format msgid "action(%s) != request" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:704 msgid "Failed writing job description" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:920 msgid "Failed writing local description" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:930 msgid "Failed writing ACL" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:946 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:953 #: src/services/a-rex/job.cpp:819 #, c-format msgid "Failed to run external plugin: %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:957 #: src/services/a-rex/job.cpp:823 #, c-format msgid "Plugin response: %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:959 msgid "Failed to run external plugin" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:969 #, c-format msgid "Failed to create session directory %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:979 msgid "Failed writing status" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:993 #, c-format msgid "Failed to lock delegated credentials: %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1235 #, c-format msgid "Renewing proxy for job %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1251 #, c-format msgid "New proxy expires at %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1254 msgid "Failed to write 'local' information" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1257 msgid "Failed to renew proxy" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1260 msgid "New proxy expiry time is not later than old proxy, not renewing proxy" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1302 #, c-format msgid "Checking file %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1349 msgid "ID contains forbidden characters" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1383 #: src/services/a-rex/job.cpp:1023 #, c-format msgid "Failed to create file in %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1393 msgid "Out of tries while allocating new job ID" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1473 #, c-format msgid "Failed to read job's local description for job %s from %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1562 msgid "No non-draining session directories available" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1568 #, c-format msgid "Using control directory %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1569 #, c-format msgid "Using session directory %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:26 #, c-format msgid "Failed to read job's ACL for job %s from %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:70 #, c-format msgid "Failed to parse user policy for job %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:75 #, c-format msgid "Failed to load policy evaluator for policy of job %s" msgstr "" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:129 #, c-format msgid "Unknown ACL policy %s for job %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:73 #, c-format msgid "" "DTR Generator waiting to process: %d jobs to cancel, %d DTRs, %d new jobs" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:87 #, c-format msgid "%s: Job cancel request from DTR generator to scheduler" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:92 #, c-format msgid "%s: Returning canceled job from DTR generator" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:133 #, c-format msgid "%s: Re-requesting attention from DTR generator" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:143 #, c-format msgid "DTR Generator processed: %d jobs to cancel, %d DTRs, %d new jobs" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:162 msgid "Exiting Generator thread" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:234 msgid "Shutting down data staging threads" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:244 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:257 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:285 msgid "DTRGenerator is not running!" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:247 #, c-format msgid "Received DTR %s during Generator shutdown - may not be processed" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:261 msgid "DTRGenerator was sent null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:270 #, c-format msgid "%s: Received job in DTR generator" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:273 #, c-format msgid "%s: Failed to receive job in DTR generator" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:280 msgid "DTRGenerator got request to cancel null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:295 msgid "DTRGenerator is queried about null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:325 msgid "DTRGenerator is asked about null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:353 msgid "DTRGenerator is requested to remove null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:360 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:368 #, c-format msgid "%s: Trying to remove job from data staging which is still active" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:376 #, c-format msgid "%s: Trying remove job from data staging which does not exist" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:387 #, c-format msgid "%s: Invalid DTR" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:404 #, c-format msgid "%s: Received DTR %s to copy file %s in state %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:408 #, c-format msgid "%s: Received DTR belongs to inactive job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:425 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1067 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:459 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:517 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:631 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:841 #, c-format msgid "%s: Failed reading local information" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:434 #, c-format msgid "%s: DTR %s to copy file %s failed" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:440 #, c-format msgid "%s: Cancelling other DTRs" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:450 #, c-format msgid "%s: DTR %s to copy to %s failed but is not mandatory" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:460 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:722 #, c-format msgid "%s: Failed to read list of output files" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:474 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:615 #, c-format msgid "%s: Failed to read dynamic output files in %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:476 #, c-format msgid "%s: Going through files in list %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:480 #, c-format msgid "%s: Removing %s from dynamic output file %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:484 #, c-format msgid "%s: Failed to write back dynamic output files in %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:500 #, c-format msgid "%s: Failed to write list of output files" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:504 #, c-format msgid "%s: Failed to write list of output status files" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:516 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:734 #, c-format msgid "%s: Failed to read list of input files" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:535 #, c-format msgid "%s: Failed to write list of input files" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:547 #, c-format msgid "%s: Received DTR with two remote endpoints!" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:559 #: src/services/candypond/CandyPondGenerator.cpp:105 #, c-format msgid "No active job id %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:603 #, c-format msgid "%s: Failed to read list of output files, can't clean up session dir" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:629 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:648 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:772 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:895 #, c-format msgid "%s: Failed to clean up session dir" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:639 #, c-format msgid "%s: Failed to read list of input files, can't clean up session dir" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:661 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:665 msgid "uploads" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:661 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:665 msgid "downloads" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:662 msgid "cancelled" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:662 msgid "finished" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:660 #, c-format msgid "%s: All %s %s successfully" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:664 #, c-format msgid "%s: Some %s failed" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:668 #, c-format msgid "%s: Requesting attention from DTR generator" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:679 msgid "DTRGenerator is requested to process null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:685 msgid "download" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:685 msgid "upload" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:684 #, c-format msgid "%s: Received data staging request to %s files" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:743 #, c-format msgid "%s: Duplicate file in list of input files: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:792 #, c-format msgid "%s: Reading output files from user generated list in %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:794 #, c-format msgid "%s: Error reading user generated output file list in %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:821 #, c-format msgid "%s: Failed to list output directory %s: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:839 #, c-format msgid "%s: Adding new output file %s: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:861 #, c-format msgid "%s: Two identical output destinations: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:874 #, c-format msgid "%s: Cannot upload two different files %s and %s to same LFN: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:906 #, c-format msgid "%s: Received job in a bad state: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:914 #, c-format msgid "%s: Session directory processing takes too long - %u.%06u seconds" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:981 #, c-format msgid "" "%s: Destination file %s was possibly left unfinished from previous A-REX " "run, will overwrite" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1073 #, c-format msgid "%s: Failed writing local information" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1091 #, c-format msgid "%s: Cancelling active DTRs" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1098 msgid "DTRGenerator is asked to check files for null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1118 #, c-format msgid "%s: Can't read list of input files" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1133 #, c-format msgid "%s: Checking user uploadable file: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1138 #, c-format msgid "%s: User has uploaded file %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1145 #, c-format msgid "%s: Failed writing changed input file." msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1149 #, c-format msgid "%s: Critical error for uploadable file %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1155 #, c-format msgid "%s: User has NOT uploaded file %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1167 #, c-format msgid "%s: Uploadable files timed out" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1223 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1249 #, c-format msgid "%s: Can't convert checksum %s to int for %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1230 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1244 #, c-format msgid "%s: Can't convert filesize %s to int for %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1239 #, c-format msgid "%s: Invalid size/checksum information (%s) for %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1261 #, c-format msgid "%s: Invalid file: %s is too big." msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1277 #, c-format msgid "%s: Failed to switch user ID to %d/%d to read file %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1283 #, c-format msgid "%s: Failed to open file %s for reading" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1291 #, c-format msgid "%s: Error accessing file %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1303 #, c-format msgid "%s: Error reading file %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1318 #, c-format msgid "%s: File %s has wrong checksum: %llu. Expected %lli" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1324 #, c-format msgid "%s: Checksum %llu verified for %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1336 msgid "" "Found unfinished DTR transfers. It is possible the previous A-REX process " "did not shut down normally" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1343 #, c-format msgid "Found DTR %s for file %s left in transferring state from previous run" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1352 msgid "DTRGenerator is requested to clean links for null job" msgstr "" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1368 #, c-format msgid "%s: Cache cleaning takes too long - %u.%06u seconds" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:108 #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:190 #, c-format msgid "%s: Job monitoring counter is broken" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:115 #, c-format msgid "%s: Job monitoring is unintentionally lost" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:124 #, c-format msgid "%s: Job monitoring stop success" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:129 #, c-format msgid "" "%s: Job monitoring stop requested with %u active references and %s queue " "associated" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:131 #, c-format msgid "%s: Job monitoring stop requested with %u active references" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:195 #, c-format msgid "%s: Job monitoring is lost due to removal from queue" msgstr "" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:278 #, c-format msgid "%s: PushSorted failed to find job where expected" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:132 #, c-format msgid "Replacing queue '%s' with '%s'" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:226 #, c-format msgid "Bad name for stdout: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:234 #, c-format msgid "Bad name for stderr: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:297 #, c-format msgid "Bad name for runtime environment: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:342 msgid "Job description file could not be read." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:393 #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:407 #, c-format msgid "Bad name for executable: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:89 msgid "Failed to start data staging threads" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:190 #, c-format msgid "" "%s: Failed reading .local and changing state, job and A-REX may be left in " "an inconsistent state" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:195 #, c-format msgid "%s: unexpected failed job add request: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:206 #, c-format msgid "%s: unexpected job add request: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:259 #, c-format msgid "%s: job for attention" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:269 msgid "all for attention" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:286 #, c-format msgid "%s: job found while scanning" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:314 #, c-format msgid "%s: job will wait for external process" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:331 #, c-format msgid "%s: job assigned for slow polling" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:349 #, c-format msgid "%s: job being processed" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:384 #, c-format msgid "Current jobs in system (PREPARING to FINISHING) per-DN (%i entries)" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:386 #, c-format msgid "%s: %i" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:398 #, c-format msgid "%s: Failed storing failure reason: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:404 #, c-format msgid "%s: Failed reading job description: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:416 #, c-format msgid "%s: Failed parsing job request." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:451 #, c-format msgid "%s: Failed writing list of output files: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:477 #, c-format msgid "%s: Failed obtaining lrms id" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:491 #, c-format msgid "%s: Failed writing local information: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:523 #, c-format msgid "%s: Failed creating grami file" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:527 #, c-format msgid "%s: Failed setting executable permissions" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:535 #, c-format msgid "%s: state SUBMIT: starting child: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:542 #, c-format msgid "%s: Failed running submission process" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:547 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:654 #, c-format msgid "%s: LRMS scripts limit of %u is reached - suspending submit/cancel" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:563 #, c-format msgid "" "%s: Job submission to LRMS takes too long, but ID is already obtained. " "Pretending submission is done." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:570 #, c-format msgid "%s: Job submission to LRMS takes too long. Failing." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:579 #, c-format msgid "%s: state SUBMIT: child exited with code %i" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:584 #, c-format msgid "%s: Job submission to LRMS failed" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:605 #, c-format msgid "%s: state CANCELING: timeout waiting for cancellation" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:611 #, c-format msgid "%s: state CANCELING: job diagnostics collected" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:639 #, c-format msgid "%s: state CANCELING: starting child: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:641 #, c-format msgid "%s: Job has completed already. No action taken to cancel" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:649 #, c-format msgid "%s: Failed running cancellation process" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:668 #, c-format msgid "" "%s: Job cancellation takes too long, but diagnostic collection seems to be " "done. Pretending cancellation succeeded." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:674 #, c-format msgid "%s: Job cancellation takes too long. Failing." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:684 #, c-format msgid "%s: state CANCELING: child exited with code %i" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:690 #, c-format msgid "%s: Failed to cancel running job" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:709 #, c-format msgid "%s: State: %s: data staging finished" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:744 #, c-format msgid "%s: State: %s: still in data staging" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:757 #, c-format msgid "%s: Job is not allowed to be rerun anymore" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:767 #, c-format msgid "%s: Job failed in unknown state. Won't rerun." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:788 #, c-format msgid "%s: Reprocessing job description failed" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:795 #, c-format msgid "%s: Failed to read reprocessed list of output files" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:799 #, c-format msgid "%s: Failed to read reprocessed list of input files" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:883 #, c-format msgid "%s: Reading status of new job failed" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:896 #, c-format msgid "%s: State: ACCEPTED: parsing job description" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:898 #, c-format msgid "%s: Processing job description failed" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:936 #, c-format msgid "%s: new job is accepted" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:948 #, c-format msgid "%s: %s: New job belongs to %i/%i" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:953 #, c-format msgid "%s: old job is accepted" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:964 #, c-format msgid "%s: State: ACCEPTED" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:970 #, c-format msgid "%s: State: ACCEPTED: dryrun" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:993 #, c-format msgid "%s: State: ACCEPTED: has process time %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:999 #, c-format msgid "%s: State: ACCEPTED: moving to PREPARING" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1015 #, c-format msgid "%s: State: PREPARING" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1022 #, c-format msgid "%s: Failed obtaining local job information." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1075 #, c-format msgid "%s: State: SUBMIT" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1095 #, c-format msgid "%s: State: CANCELING" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1115 #, c-format msgid "%s: State: INLRMS" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1120 #, c-format msgid "%s: State: INLRMS - checking for pending(%u) and mark" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1122 #, c-format msgid "%s: State: INLRMS - checking for not pending" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1124 #, c-format msgid "%s: Job finished" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1128 #, c-format msgid "%s: State: INLRMS: exit message is %i %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1141 #, c-format msgid "%s: State: INLRMS - no mark found" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1153 #, c-format msgid "%s: State: FINISHING" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1174 #, c-format msgid "%s: Job is requested to clean - deleting" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1191 #, c-format msgid "%s: restarted PREPARING job" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1207 #, c-format msgid "%s: restarted INLRMS job" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1216 #, c-format msgid "%s: restarted FINISHING job" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1221 #, c-format msgid "%s: Can't rerun on request" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1223 #, c-format msgid "%s: Can't rerun on request - not a suitable state" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1234 #, c-format msgid "%s: Job is too old - deleting" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1279 #, c-format msgid "%s: Job is ancient - delete rest of information" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1297 #, c-format msgid "%s: Canceling job because of user request" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1311 #, c-format msgid "%s: Failed to turn job into failed during cancel processing." msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1343 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1351 #, c-format msgid "%s: Plugin at state %s : %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1357 #, c-format msgid "%s: Plugin execution failed" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1464 #, c-format msgid "%s: State: %s from %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1515 #, c-format msgid "Failed to get DN information from .local file for job %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1542 #, c-format msgid "%s: Delete request due to internal problems" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1577 #, c-format msgid "%s: Job failure detected" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1630 #, c-format msgid "Failed to move file %s to %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1638 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1748 #, c-format msgid "Failed reading control directory: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1708 #, c-format msgid "Failed reading control directory: %s: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:2022 #, c-format msgid "Helper process start failed: %s" msgstr "" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:2029 #, c-format msgid "Stopping helper process %s" msgstr "" #: src/services/a-rex/grid-manager/log/HeartBeatMetrics.cpp:61 #, c-format msgid "Error with hearbeatfile: %s" msgstr "" #: src/services/a-rex/grid-manager/log/HeartBeatMetrics.cpp:73 #: src/services/a-rex/grid-manager/log/JobsMetrics.cpp:139 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:136 #, c-format msgid ": Metrics tool returned error code %i: %s" msgstr "" #: src/services/a-rex/grid-manager/log/HeartBeatMetrics.cpp:107 #: src/services/a-rex/grid-manager/log/JobsMetrics.cpp:186 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:178 msgid "" "gmetric_bin_path empty in arc.conf (should never happen the default value " "should be used)" msgstr "" #: src/services/a-rex/grid-manager/log/JobLog.cpp:114 msgid ": Accounting records reporter tool is not specified" msgstr "" #: src/services/a-rex/grid-manager/log/JobLog.cpp:130 msgid ": Failure creating slot for accounting reporter child process" msgstr "" #: src/services/a-rex/grid-manager/log/JobLog.cpp:143 msgid ": Failure starting accounting reporter child process" msgstr "" #: src/services/a-rex/grid-manager/log/JobLog.cpp:176 msgid ": Failure creating accounting database connection" msgstr "" #: src/services/a-rex/grid-manager/log/JobLog.cpp:202 #, c-format msgid ": writing accounting record took %llu ms" msgstr "" #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:74 #, c-format msgid "Session dir '%s' contains user specific substitutions - skipping it" msgstr "" #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:86 #, c-format msgid "Sessiondir %s: Free space %f GB" msgstr "" #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:94 msgid "No session directories found in configuration." msgstr "" #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:125 msgid "No cachedirs found/configured for calculation of free space." msgstr "" #: src/services/a-rex/grid-manager/mail/send_mail.cpp:29 msgid "Failed reading local information" msgstr "" #: src/services/a-rex/grid-manager/mail/send_mail.cpp:79 #, c-format msgid "Running mailer command (%s)" msgstr "" #: src/services/a-rex/grid-manager/mail/send_mail.cpp:81 msgid "Failed running mailer" msgstr "" #: src/services/a-rex/grid-manager/run/RunParallel.cpp:33 #, c-format msgid "%s: Job's helper exited" msgstr "" #: src/services/a-rex/grid-manager/run/RunParallel.cpp:70 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:24 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:56 #, c-format msgid "%s: Failure creating slot for child process" msgstr "" #: src/services/a-rex/grid-manager/run/RunParallel.cpp:119 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:41 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:73 #, c-format msgid "%s: Failure starting child process" msgstr "" #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:30 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:62 #, c-format msgid "%s: Failure creating data storage for child process" msgstr "" #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:46 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:78 #, c-format msgid "%s: Failure waiting for child process to finish" msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:46 msgid "[job description input]" msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:47 msgid "" "Tool for writing the grami file representation of a job description file." msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:51 msgid "Name of grami file" msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:56 msgid "Configuration file to load" msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:57 msgid "arc.conf" msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:61 msgid "Session directory to use" msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:62 msgid "directory" msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:78 msgid "No job description file name provided." msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:84 #, c-format msgid "Unable to parse job description input: %s" msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:90 msgid "Unable to load ARC configuration file." msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:107 #, c-format msgid "Unable to write grami file: %s" msgstr "" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:112 #, c-format msgid "Unable to write 'output' file: %s" msgstr "" #: src/services/a-rex/information_collector.cpp:53 #, c-format msgid "Resource information provider: %s" msgstr "" #: src/services/a-rex/information_collector.cpp:56 msgid "Resource information provider failed to start" msgstr "" #: src/services/a-rex/information_collector.cpp:59 msgid "Resource information provider failed to run" msgstr "" #: src/services/a-rex/information_collector.cpp:63 #, c-format msgid "" "Resource information provider failed with exit status: %i\n" "%s" msgstr "" #: src/services/a-rex/information_collector.cpp:65 #, c-format msgid "" "Resource information provider log:\n" "%s" msgstr "" #: src/services/a-rex/information_collector.cpp:71 msgid "No new informational document assigned" msgstr "" #: src/services/a-rex/information_collector.cpp:73 #, c-format msgid "Obtained XML: %s" msgstr "" #: src/services/a-rex/information_collector.cpp:87 msgid "Informational document is empty" msgstr "" #: src/services/a-rex/information_collector.cpp:212 msgid "OptimizedInformationContainer failed to create temporary file" msgstr "" #: src/services/a-rex/information_collector.cpp:215 #, c-format msgid "OptimizedInformationContainer created temporary file: %s" msgstr "" #: src/services/a-rex/information_collector.cpp:221 msgid "" "OptimizedInformationContainer failed to store XML document to temporary file" msgstr "" #: src/services/a-rex/information_collector.cpp:230 msgid "OptimizedInformationContainer failed to parse XML" msgstr "" #: src/services/a-rex/information_collector.cpp:242 msgid "OptimizedInformationContainer failed to rename temprary file" msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:36 msgid "Default INTERNAL client contructor" msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:39 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:59 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:81 msgid "Failed to load grid-manager configfile" msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:44 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:64 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:86 msgid "Failed to set INTERNAL endpoint" msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:129 msgid "Failed to identify grid-manager config file" msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:148 #, c-format msgid "Failed to run configuration parser at %s." msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:152 #, c-format msgid "Parser failed with error code %i." msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:158 #, c-format msgid "No pid file is found at '%s'. Probably A-REX is not running." msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:173 #, c-format msgid "Failed to load grid-manager config file from %s" msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:257 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:363 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:396 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:442 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:496 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:548 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:566 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:616 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:646 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:664 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:682 msgid "INTERNALClient is not initialized" msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:447 msgid "Submitting job " msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:508 #, c-format msgid "Failed to copy input file: %s to path: %s" msgstr "" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:514 #, c-format msgid "Failed to set permissions on: %s" msgstr "" #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:51 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:92 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:119 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:145 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:184 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:246 msgid "Failed to load grid-manager config file" msgstr "" #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:324 msgid "Retrieving job description of INTERNAL jobs is not supported" msgstr "" #: src/services/a-rex/internaljobplugin/JobListRetrieverPluginINTERNAL.cpp:67 #, c-format msgid "Listing localjobs succeeded, %d localjobs found" msgstr "" #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:130 msgid "Failed submitting job description" msgstr "" #: src/services/a-rex/job.cpp:66 #, c-format msgid "Using cached local account '%s'" msgstr "" #: src/services/a-rex/job.cpp:77 msgid "Will not map to 'root' account by default" msgstr "" #: src/services/a-rex/job.cpp:90 msgid "No local account name specified" msgstr "" #: src/services/a-rex/job.cpp:93 #, c-format msgid "Using local account '%s'" msgstr "" #: src/services/a-rex/job.cpp:97 msgid "TLS provides no identity, going for OTokens" msgstr "" #: src/services/a-rex/job.cpp:155 msgid "Failed to acquire A-REX's configuration" msgstr "" #: src/services/a-rex/job.cpp:227 #, c-format msgid "Cannot handle local user %s" msgstr "" #: src/services/a-rex/job.cpp:275 #, c-format msgid "%s: Failed to parse user policy" msgstr "" #: src/services/a-rex/job.cpp:280 #, c-format msgid "%s: Failed to load evaluator for user policy " msgstr "" #: src/services/a-rex/job.cpp:385 #, c-format msgid "%s: Unknown user policy '%s'" msgstr "" #: src/services/a-rex/job.cpp:707 src/services/a-rex/job.cpp:731 #, c-format msgid "Credential expires at %s" msgstr "" #: src/services/a-rex/job.cpp:709 src/services/a-rex/job.cpp:733 #, c-format msgid "Credential handling exception: %s" msgstr "" #: src/services/a-rex/job.cpp:1031 #, c-format msgid "Out of tries while allocating new job ID in %s" msgstr "" #: src/services/a-rex/job.cpp:1270 msgid "No non-draining session dirs available" msgstr "" #: src/services/a-rex/put.cpp:150 #, c-format msgid "%s: put file %s: there is no payload" msgstr "" #: src/services/a-rex/put.cpp:156 #, c-format msgid "%s: put file %s: unrecognized payload" msgstr "" #: src/services/a-rex/put.cpp:172 src/services/a-rex/rest/rest.cpp:1595 #, c-format msgid "%s: put file %s: failed to create file: %s" msgstr "" #: src/services/a-rex/put.cpp:188 #, c-format msgid "%s: put file %s: %s" msgstr "" #: src/services/a-rex/put.cpp:210 #, c-format msgid "%s: delete file %s: failed to obtain file path: %s" msgstr "" #: src/services/a-rex/put.cpp:221 #, c-format msgid "%s: delete file %s: failed to open file/dir: %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:683 #, c-format msgid "REST: process %s at %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:726 src/services/a-rex/rest/rest.cpp:742 #: src/services/a-rex/rest/rest.cpp:797 src/services/a-rex/rest/rest.cpp:876 #: src/services/a-rex/rest/rest.cpp:1104 src/services/a-rex/rest/rest.cpp:1696 #, c-format msgid "process: method %s is not supported for subpath %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:748 #, c-format msgid "process: schema %s is not supported for subpath %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:873 src/services/a-rex/rest/rest.cpp:1101 #, c-format msgid "process: action %s is not supported for subpath %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:1113 src/services/a-rex/rest/rest.cpp:1182 #: src/services/a-rex/rest/rest.cpp:1542 src/services/a-rex/rest/rest.cpp:1685 #, c-format msgid "REST:GET job %s - %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:1229 src/services/a-rex/rest/rest.cpp:1237 #, c-format msgid "REST:KILL job %s - %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:1254 src/services/a-rex/rest/rest.cpp:1262 #, c-format msgid "REST:CLEAN job %s - %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:1279 src/services/a-rex/rest/rest.cpp:1287 #: src/services/a-rex/rest/rest.cpp:1304 #, c-format msgid "REST:RESTART job %s - %s" msgstr "" #: src/services/a-rex/rest/rest.cpp:1588 #, c-format msgid "REST:PUT job %s: file %s: there is no payload" msgstr "" #: src/services/a-rex/rest/rest.cpp:1608 #, c-format msgid "HTTP:PUT %s: put file %s: %s" msgstr "" #: src/services/a-rex/test_cache_check.cpp:24 #: src/tests/count/test_client.cpp:20 #: src/tests/echo/echo_test4axis2c/test_client.cpp:20 #: src/tests/echo/test_client.cpp:21 msgid "Creating client side chain" msgstr "" #: src/services/a-rex/update_credentials.cpp:29 #, c-format msgid "" "UpdateCredentials: request = \n" "%s" msgstr "" #: src/services/a-rex/update_credentials.cpp:35 msgid "UpdateCredentials: missing Reference" msgstr "" #: src/services/a-rex/update_credentials.cpp:43 msgid "UpdateCredentials: wrong number of Reference" msgstr "" #: src/services/a-rex/update_credentials.cpp:51 msgid "UpdateCredentials: wrong number of elements inside Reference" msgstr "" #: src/services/a-rex/update_credentials.cpp:60 msgid "UpdateCredentials: EPR contains no JobID" msgstr "" #: src/services/a-rex/update_credentials.cpp:70 #, c-format msgid "UpdateCredentials: no job found: %s" msgstr "" #: src/services/a-rex/update_credentials.cpp:77 msgid "UpdateCredentials: failed to update credentials" msgstr "" #: src/services/a-rex/update_credentials.cpp:85 #, c-format msgid "" "UpdateCredentials: response = \n" "%s" msgstr "" #: src/services/candypond/CandyPond.cpp:52 msgid "No A-REX config file found in candypond configuration" msgstr "" #: src/services/candypond/CandyPond.cpp:56 #, c-format msgid "Using A-REX config file %s" msgstr "" #: src/services/candypond/CandyPond.cpp:60 #, c-format msgid "Failed to process A-REX configuration in %s" msgstr "" #: src/services/candypond/CandyPond.cpp:65 msgid "No caches defined in configuration" msgstr "" #: src/services/candypond/CandyPond.cpp:150 msgid "Empty filename returned from FileCache" msgstr "" #: src/services/candypond/CandyPond.cpp:162 #, c-format msgid "Problem accessing cache file %s: %s" msgstr "" #: src/services/candypond/CandyPond.cpp:210 #: src/services/candypond/CandyPond.cpp:474 msgid "No job ID supplied" msgstr "" #: src/services/candypond/CandyPond.cpp:219 #, c-format msgid "Bad number in priority element: %s" msgstr "" #: src/services/candypond/CandyPond.cpp:228 msgid "No username supplied" msgstr "" #: src/services/candypond/CandyPond.cpp:235 #, c-format msgid "Supplied username %s does not match mapped username %s" msgstr "" #: src/services/candypond/CandyPond.cpp:249 msgid "No session directory found" msgstr "" #: src/services/candypond/CandyPond.cpp:253 #, c-format msgid "Using session dir %s" msgstr "" #: src/services/candypond/CandyPond.cpp:257 #, c-format msgid "Failed to stat session dir %s" msgstr "" #: src/services/candypond/CandyPond.cpp:262 #, c-format msgid "Session dir %s is owned by %i, but current mapped user is %i" msgstr "" #: src/services/candypond/CandyPond.cpp:289 #, c-format msgid "Failed to access proxy of given job id %s at %s" msgstr "" #: src/services/candypond/CandyPond.cpp:307 #, c-format msgid "DN is %s" msgstr "" #: src/services/candypond/CandyPond.cpp:385 #, c-format msgid "Permission checking passed for url %s" msgstr "" #: src/services/candypond/CandyPond.cpp:410 #: src/services/candypond/CandyPondGenerator.cpp:135 #, c-format msgid "Failed to move %s to %s: %s" msgstr "" #: src/services/candypond/CandyPond.cpp:441 #, c-format msgid "Starting new DTR for %s" msgstr "" #: src/services/candypond/CandyPond.cpp:443 #, c-format msgid "Failed to start new DTR for %s" msgstr "" #: src/services/candypond/CandyPond.cpp:487 #, c-format msgid "Job %s: all files downloaded successfully" msgstr "" #: src/services/candypond/CandyPond.cpp:494 #, c-format msgid "Job %s: Some downloads failed" msgstr "" #: src/services/candypond/CandyPond.cpp:499 #, c-format msgid "Job %s: files still downloading" msgstr "" #: src/services/candypond/CandyPond.cpp:511 msgid "CandyPond: Unauthorized" msgstr "" #: src/services/candypond/CandyPond.cpp:520 msgid "No local user mapping found" msgstr "" #: src/services/candypond/CandyPond.cpp:527 #: src/services/data-staging/DataDeliveryService.cpp:625 #, c-format msgid "Identity is %s" msgstr "" #: src/services/candypond/CandyPond.cpp:585 #: src/services/data-staging/DataDeliveryService.cpp:697 msgid "Security Handlers processing failed" msgstr "" #: src/services/candypond/CandyPond.cpp:592 msgid "Only POST is supported in CandyPond" msgstr "" #: src/services/candypond/CandyPondGenerator.cpp:88 #, c-format msgid "DTR %s finished with state %s" msgstr "" #: src/services/candypond/CandyPondGenerator.cpp:124 #, c-format msgid "Could not determine session directory from filename %s" msgstr "" #: src/services/candypond/CandyPondGenerator.cpp:164 #, c-format msgid "Invalid DTR for source %s, destination %s" msgstr "" #: src/services/candypond/CandyPondGenerator.cpp:206 #, c-format msgid "DTRs still running for job %s" msgstr "" #: src/services/candypond/CandyPondGenerator.cpp:215 #, c-format msgid "All DTRs finished for job %s" msgstr "" #: src/services/candypond/CandyPondGenerator.cpp:222 #, c-format msgid "Job %s not found" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:58 #, c-format msgid "Archiving DTR %s, state ERROR" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:62 #, c-format msgid "Archiving DTR %s, state %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:164 msgid "No delegation token in request" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:172 msgid "Failed to accept delegation" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:201 #: src/services/data-staging/DataDeliveryService.cpp:208 msgid "ErrorDescription" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:213 #, c-format msgid "All %u process slots used" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:228 #, c-format msgid "Received retry for DTR %s still in transfer" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:235 #, c-format msgid "Replacing DTR %s in state %s with new request" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:245 #, c-format msgid "Storing temp proxy at %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:253 #, c-format msgid "Failed to create temp proxy at %s: %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:260 #, c-format msgid "Failed to change owner of temp proxy at %s to %i:%i: %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:285 msgid "Invalid DTR" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:289 #, c-format msgid "Failed to remove temporary proxy %s: %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:390 #, c-format msgid "No such DTR %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:401 #, c-format msgid "DTR %s failed: %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:412 #, c-format msgid "DTR %s finished successfully" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:422 #, c-format msgid "DTR %s still in progress (%lluB transferred)" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:482 #, c-format msgid "No active DTR %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:492 #, c-format msgid "DTR %s was already cancelled" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:501 #, c-format msgid "DTR %s could not be cancelled" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:545 #, c-format msgid "Failed to get load average: %s" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:569 msgid "Invalid configuration - no allowed IP address specified" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:573 msgid "Invalid configuration - no transfer dirs specified" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:584 msgid "Failed to start archival thread" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:609 msgid "Shutting down data delivery service" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:618 msgid "Unauthorized" msgstr "" #: src/services/data-staging/DataDeliveryService.cpp:704 msgid "Only POST is supported in DataDeliveryService" msgstr "" #: src/services/examples/echo_python/EchoService.py:12 msgid "EchoService (python) constructor called" msgstr "" #: src/services/examples/echo_python/EchoService.py:17 #, python-format msgid "EchoService (python) has prefix %(prefix)s and suffix %(suffix)s" msgstr "" #: src/services/examples/echo_python/EchoService.py:24 msgid "EchoService (python) destructor called" msgstr "" #: src/services/examples/echo_python/EchoService.py:54 msgid "EchoService (python) thread test starting" msgstr "" #: src/services/examples/echo_python/EchoService.py:65 #, python-format msgid "EchoService (python) thread test, iteration %(iteration)s %(status)s" msgstr "" #: src/services/examples/echo_python/EchoService.py:82 msgid "EchoService (python) 'Process' called" msgstr "" #: src/services/examples/echo_python/EchoService.py:86 #, python-format msgid "inmsg.Auth().Export(arc.SecAttr.ARCAuth) = %s" msgstr "" #: src/services/examples/echo_python/EchoService.py:87 #, python-format msgid "inmsg.Attributes().getAll() = %s " msgstr "" #: src/services/examples/echo_python/EchoService.py:88 #, python-format msgid "EchoService (python) got: %s " msgstr "" #: src/services/examples/echo_python/EchoService.py:93 #, python-format msgid "EchoService (python) request_namespace: %s" msgstr "" #: src/services/examples/echo_python/EchoService.py:99 #: src/services/examples/echo_python/EchoService.py:171 #, python-format msgid "outpayload %s" msgstr "" #: src/services/examples/echo_python/EchoService.py:128 msgid "Calling https://localhost:60000/Echo using ClientSOAP" msgstr "" #: src/services/examples/echo_python/EchoService.py:131 msgid "Calling http://localhost:60000/Echo using ClientSOAP" msgstr "" #: src/services/examples/echo_python/EchoService.py:137 #: src/services/examples/echo_python/EchoService.py:155 #, python-format msgid "new_payload %s" msgstr "" #: src/services/examples/echo_python/EchoService.py:149 msgid "Calling http://localhost:60000/Echo using httplib" msgstr "" #: src/services/examples/echo_python/EchoService.py:165 msgid "Start waiting 10 sec..." msgstr "" #: src/services/examples/echo_python/EchoService.py:167 msgid "Waiting ends." msgstr "" #: src/services/gridftpd/auth/auth.cpp:328 #, c-format msgid "Unknown authorization command %s" msgstr "" #: src/services/gridftpd/auth/auth.cpp:347 #, c-format msgid "" "The [vo] section labeled '%s' has no file associated and can't be used for " "matching" msgstr "" #: src/services/gridftpd/auth/auth_plugin.cpp:73 #: src/services/gridftpd/auth/unixmap.cpp:217 #, c-format msgid "Plugin %s failed to run" msgstr "" #: src/services/gridftpd/auth/auth_plugin.cpp:75 #: src/services/gridftpd/auth/unixmap.cpp:219 #, c-format msgid "Plugin %s printed: %u" msgstr "" #: src/services/gridftpd/auth/auth_plugin.cpp:76 #: src/services/gridftpd/auth/unixmap.cpp:220 #, c-format msgid "Plugin %s error: %u" msgstr "" #: src/services/gridftpd/auth/auth_voms.cpp:28 #, c-format msgid "VOMS proxy processing returns: %i - %s" msgstr "" #: src/services/gridftpd/auth/auth_voms.cpp:120 #, c-format msgid "VOMS trust chains: %s" msgstr "" #: src/services/gridftpd/auth/unixmap.cpp:126 msgid "User name mapping has empty command" msgstr "" #: src/services/gridftpd/auth/unixmap.cpp:154 #, c-format msgid "User name mapping has empty name: %s" msgstr "" #: src/services/gridftpd/commands.cpp:46 #, c-format msgid "response: %s" msgstr "" #: src/services/gridftpd/commands.cpp:50 #, c-format msgid "Send response failed: %s" msgstr "" #: src/services/gridftpd/commands.cpp:80 msgid "Response sending error" msgstr "" #: src/services/gridftpd/commands.cpp:93 msgid "Closed connection" msgstr "" #: src/services/gridftpd/commands.cpp:131 #, c-format msgid "Socket conversion failed: %s" msgstr "" #: src/services/gridftpd/commands.cpp:141 #, c-format msgid "Failed to obtain own address: %s" msgstr "" #: src/services/gridftpd/commands.cpp:149 #, c-format msgid "Failed to recognize own address type (IPv4 or IPv6) - %u" msgstr "" #: src/services/gridftpd/commands.cpp:159 #, c-format msgid "Accepted connection on [%s]:%u" msgstr "" #: src/services/gridftpd/commands.cpp:161 #, c-format msgid "Accepted connection on %u.%u.%u.%u:%u" msgstr "" #: src/services/gridftpd/commands.cpp:196 msgid "Accept failed" msgstr "" #: src/services/gridftpd/commands.cpp:204 #: src/services/gridftpd/listener.cpp:415 #, c-format msgid "Accept failed: %s" msgstr "" #: src/services/gridftpd/commands.cpp:219 #, c-format msgid "Accepted connection from [%s]:%u" msgstr "" #: src/services/gridftpd/commands.cpp:221 #, c-format msgid "Accepted connection from %u.%u.%u.%u:%u" msgstr "" #: src/services/gridftpd/commands.cpp:230 msgid "Authenticate in commands failed" msgstr "" #: src/services/gridftpd/commands.cpp:239 msgid "Authentication failure" msgstr "" #: src/services/gridftpd/commands.cpp:247 #, c-format msgid "User subject: %s" msgstr "" #: src/services/gridftpd/commands.cpp:248 #, c-format msgid "Encrypted: %s" msgstr "" #: src/services/gridftpd/commands.cpp:254 msgid "User has no proper configuration associated" msgstr "" #: src/services/gridftpd/commands.cpp:262 msgid "" "User has empty virtual directory tree.\n" "Either user has no authorised plugins or there are no plugins configured at " "all." msgstr "" #: src/services/gridftpd/commands.cpp:279 msgid "Read commands in authenticate failed" msgstr "" #: src/services/gridftpd/commands.cpp:411 msgid "Control connection (probably) closed" msgstr "" #: src/services/gridftpd/commands.cpp:445 #: src/services/gridftpd/commands.cpp:724 msgid "Command EPRT" msgstr "" #: src/services/gridftpd/commands.cpp:446 #, c-format msgid "Failed to parse remote address %s" msgstr "" #: src/services/gridftpd/commands.cpp:468 #, c-format msgid "Command USER %s" msgstr "" #: src/services/gridftpd/commands.cpp:475 msgid "Command CDUP" msgstr "" #: src/services/gridftpd/commands.cpp:481 #, c-format msgid "Command CWD %s" msgstr "" #: src/services/gridftpd/commands.cpp:497 #, c-format msgid "Command MKD %s" msgstr "" #: src/services/gridftpd/commands.cpp:517 #, c-format msgid "Command SIZE %s" msgstr "" #: src/services/gridftpd/commands.cpp:532 #, c-format msgid "Command SBUF: %i" msgstr "" #: src/services/gridftpd/commands.cpp:553 #, c-format msgid "Command MLST %s" msgstr "" #: src/services/gridftpd/commands.cpp:576 #, c-format msgid "Command DELE %s" msgstr "" #: src/services/gridftpd/commands.cpp:591 #, c-format msgid "Command RMD %s" msgstr "" #: src/services/gridftpd/commands.cpp:605 #, c-format msgid "Command TYPE %c" msgstr "" #: src/services/gridftpd/commands.cpp:616 #, c-format msgid "Command MODE %c" msgstr "" #: src/services/gridftpd/commands.cpp:628 msgid "Command ABOR" msgstr "" #: src/services/gridftpd/commands.cpp:641 #, c-format msgid "Command REST %s" msgstr "" #: src/services/gridftpd/commands.cpp:654 #, c-format msgid "Command EPSV %s" msgstr "" #: src/services/gridftpd/commands.cpp:656 msgid "Command SPAS" msgstr "" #: src/services/gridftpd/commands.cpp:658 msgid "Command PASV" msgstr "" #: src/services/gridftpd/commands.cpp:679 msgid "local_pasv failed" msgstr "" #: src/services/gridftpd/commands.cpp:703 msgid "local_spas failed" msgstr "" #: src/services/gridftpd/commands.cpp:726 msgid "Command PORT" msgstr "" #: src/services/gridftpd/commands.cpp:729 msgid "active_data is disabled" msgstr "" #: src/services/gridftpd/commands.cpp:738 msgid "local_port failed" msgstr "" #: src/services/gridftpd/commands.cpp:751 #, c-format msgid "Command MLSD %s" msgstr "" #: src/services/gridftpd/commands.cpp:753 #, c-format msgid "Command NLST %s" msgstr "" #: src/services/gridftpd/commands.cpp:755 #, c-format msgid "Command LIST %s" msgstr "" #: src/services/gridftpd/commands.cpp:806 #, c-format msgid "Command ERET %s" msgstr "" #: src/services/gridftpd/commands.cpp:836 #, c-format msgid "Command RETR %s" msgstr "" #: src/services/gridftpd/commands.cpp:865 #, c-format msgid "Command STOR %s" msgstr "" #: src/services/gridftpd/commands.cpp:893 #, c-format msgid "Command ALLO %i" msgstr "" #: src/services/gridftpd/commands.cpp:916 msgid "Command OPTS" msgstr "" #: src/services/gridftpd/commands.cpp:919 msgid "Command OPTS RETR" msgstr "" #: src/services/gridftpd/commands.cpp:929 #, c-format msgid "Option: %s" msgstr "" #: src/services/gridftpd/commands.cpp:973 msgid "Command NOOP" msgstr "" #: src/services/gridftpd/commands.cpp:977 msgid "Command QUIT" msgstr "" #: src/services/gridftpd/commands.cpp:987 msgid "Failed to close, deleting client" msgstr "" #: src/services/gridftpd/commands.cpp:1001 #, c-format msgid "Command DCAU: %i '%s'" msgstr "" #: src/services/gridftpd/commands.cpp:1029 #, c-format msgid "Command PBZS: %s" msgstr "" #: src/services/gridftpd/commands.cpp:1037 #, c-format msgid "Setting pbsz to %lu" msgstr "" #: src/services/gridftpd/commands.cpp:1053 #, c-format msgid "Command PROT: %s" msgstr "" #: src/services/gridftpd/commands.cpp:1078 #, c-format msgid "Command MDTM %s" msgstr "" #: src/services/gridftpd/commands.cpp:1100 #, c-format msgid "Raw command: %s" msgstr "" #: src/services/gridftpd/commands.cpp:1148 msgid "Failed to allocate memory for buffer" msgstr "" #: src/services/gridftpd/commands.cpp:1155 #, c-format msgid "Allocated %u buffers %llu bytes each." msgstr "" #: src/services/gridftpd/commands.cpp:1162 msgid "abort_callback: start" msgstr "" #: src/services/gridftpd/commands.cpp:1165 #, c-format msgid "abort_callback: Globus error: %s" msgstr "" #: src/services/gridftpd/commands.cpp:1179 msgid "make_abort: start" msgstr "" #: src/services/gridftpd/commands.cpp:1191 msgid "Failed to abort data connection - ignoring and recovering" msgstr "" #: src/services/gridftpd/commands.cpp:1199 msgid "make_abort: wait for abort flag to be reset" msgstr "" #: src/services/gridftpd/commands.cpp:1209 msgid "make_abort: leaving" msgstr "" #: src/services/gridftpd/commands.cpp:1224 msgid "check_abort: have Globus error" msgstr "" #: src/services/gridftpd/commands.cpp:1225 msgid "Abort request caused by transfer error" msgstr "" #: src/services/gridftpd/commands.cpp:1228 msgid "check_abort: sending 426" msgstr "" #: src/services/gridftpd/commands.cpp:1249 msgid "Abort request caused by error in transfer function" msgstr "" #: src/services/gridftpd/commands.cpp:1331 msgid "Failed to start timer thread - timeout won't work" msgstr "" #: src/services/gridftpd/commands.cpp:1383 msgid "Killing connection due to timeout" msgstr "" #: src/services/gridftpd/conf/conf_vo.cpp:22 #: src/services/gridftpd/conf/conf_vo.cpp:48 msgid "Configuration section [userlist] is missing name." msgstr "" #: src/services/gridftpd/conf/daemon.cpp:58 #: src/services/gridftpd/conf/daemon.cpp:138 #, c-format msgid "No such user: %s" msgstr "" #: src/services/gridftpd/conf/daemon.cpp:70 #: src/services/gridftpd/conf/daemon.cpp:150 #, c-format msgid "No such group: %s" msgstr "" #: src/services/gridftpd/conf/daemon.cpp:83 #: src/services/gridftpd/conf/daemon.cpp:163 #, c-format msgid "Improper debug level '%s'" msgstr "" #: src/services/gridftpd/conf/daemon.cpp:120 msgid "Missing option for command logreopen" msgstr "" #: src/services/gridftpd/conf/daemon.cpp:125 msgid "Wrong option in logreopen" msgstr "" #: src/services/gridftpd/conf/daemon.cpp:209 #, c-format msgid "Failed to open log file %s" msgstr "" #: src/services/gridftpd/datalist.cpp:101 msgid "Closing channel (list)" msgstr "" #: src/services/gridftpd/datalist.cpp:157 msgid "Data channel connected (list)" msgstr "" #: src/services/gridftpd/dataread.cpp:24 msgid "data_connect_retrieve_callback" msgstr "" #: src/services/gridftpd/dataread.cpp:30 msgid "Data channel connected (retrieve)" msgstr "" #: src/services/gridftpd/dataread.cpp:37 msgid "data_connect_retrieve_callback: allocate_data_buffer" msgstr "" #: src/services/gridftpd/dataread.cpp:40 msgid "data_connect_retrieve_callback: allocate_data_buffer failed" msgstr "" #: src/services/gridftpd/dataread.cpp:48 #, c-format msgid "data_connect_retrieve_callback: check for buffer %u" msgstr "" #: src/services/gridftpd/dataread.cpp:61 src/services/gridftpd/dataread.cpp:158 #, c-format msgid "Closing channel (retrieve) due to local read error: %s" msgstr "" #: src/services/gridftpd/dataread.cpp:75 src/services/gridftpd/dataread.cpp:172 msgid "Buffer registration failed" msgstr "" #: src/services/gridftpd/dataread.cpp:88 msgid "data_retrieve_callback" msgstr "" #: src/services/gridftpd/dataread.cpp:96 #, c-format msgid "Data channel (retrieve) %i %i %i" msgstr "" #: src/services/gridftpd/dataread.cpp:104 msgid "Closing channel (retrieve)" msgstr "" #: src/services/gridftpd/dataread.cpp:110 #: src/services/gridftpd/datawrite.cpp:128 #, c-format msgid "Time spent waiting for network: %.3f ms" msgstr "" #: src/services/gridftpd/dataread.cpp:111 #: src/services/gridftpd/datawrite.cpp:129 #, c-format msgid "Time spent waiting for disc: %.3f ms" msgstr "" #: src/services/gridftpd/dataread.cpp:122 msgid "data_retrieve_callback: lost buffer" msgstr "" #: src/services/gridftpd/datawrite.cpp:24 msgid "data_connect_store_callback" msgstr "" #: src/services/gridftpd/datawrite.cpp:30 msgid "Data channel connected (store)" msgstr "" #: src/services/gridftpd/datawrite.cpp:57 msgid "Failed to register any buffer" msgstr "" #: src/services/gridftpd/datawrite.cpp:76 #, c-format msgid "Data channel (store) %i %i %i" msgstr "" #: src/services/gridftpd/datawrite.cpp:89 msgid "data_store_callback: lost buffer" msgstr "" #: src/services/gridftpd/datawrite.cpp:105 #, c-format msgid "Closing channel (store) due to error: %s" msgstr "" #: src/services/gridftpd/datawrite.cpp:115 msgid "Closing channel (store)" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:55 msgid "Can't parse access rights in configuration line" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:61 msgid "Can't parse user:group in configuration line" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:68 msgid "Can't recognize user in configuration line" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:77 msgid "Can't recognize group in configuration line" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:84 #: src/services/gridftpd/fileplugin/fileplugin.cpp:89 msgid "Can't parse or:and in configuration line" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:116 msgid "Can't parse configuration line" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:120 #, c-format msgid "Bad directory name: %s" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:137 msgid "Can't parse create arguments in configuration line" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:146 msgid "Can't parse mkdir arguments in configuration line" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:163 #, c-format msgid "Bad subcommand in configuration line: %s" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:175 msgid "Bad mount directory specified" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:177 #, c-format msgid "Mount point %s" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:215 #: src/services/gridftpd/fileplugin/fileplugin.cpp:274 #, c-format msgid "mkdir failed: %s" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:226 #, c-format msgid "Warning: mount point %s creation failed." msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:330 #, c-format msgid "plugin: open: %s" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:378 #: src/services/gridftpd/fileplugin/fileplugin.cpp:415 msgid "Not enough space to store file" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:430 #, c-format msgid "open: changing owner for %s, %i, %i" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:437 #, c-format msgid "open: owner: %i %i" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:446 #: src/services/gridftpd/fileplugin/fileplugin.cpp:486 #, c-format msgid "Unknown open mode %s" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:451 msgid "plugin: close" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:492 msgid "plugin: read" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:498 msgid "Error while reading file" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:508 msgid "plugin: write" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:519 msgid "Zero bytes written to file" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:727 #, c-format msgid "plugin: checkdir: %s" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:730 #, c-format msgid "plugin: checkdir: access: %s" msgstr "" #: src/services/gridftpd/fileplugin/fileplugin.cpp:739 #, c-format msgid "plugin: checkdir: access: allowed: %s" msgstr "" #: src/services/gridftpd/fileroot.cpp:14 #, c-format msgid "No plugin is configured or authorised for requested path %s" msgstr "" #: src/services/gridftpd/fileroot.cpp:19 msgid "FilePlugin: more unload than load" msgstr "" #: src/services/gridftpd/fileroot.cpp:34 #, c-format msgid "Can't load plugin %s for access point %s" msgstr "" #: src/services/gridftpd/fileroot.cpp:39 src/services/gridftpd/fileroot.cpp:43 #, c-format msgid "Plugin %s for access point %s is broken." msgstr "" #: src/services/gridftpd/fileroot.cpp:47 #, c-format msgid "Plugin %s for access point %s acquire failed (should never happen)." msgstr "" #: src/services/gridftpd/fileroot.cpp:54 #, c-format msgid "Destructor with dlclose (%s)" msgstr "" #: src/services/gridftpd/fileroot.cpp:77 #, c-format msgid "FileNode: operator= (%s <- %s) %lu <- %lu" msgstr "" #: src/services/gridftpd/fileroot.cpp:80 msgid "Copying with dlclose" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:31 #: src/services/gridftpd/fileroot_config.cpp:405 msgid "configuration file not found" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:54 msgid "Wrong port number in configuration" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:63 msgid "Wrong maxconnections number in configuration" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:72 msgid "Wrong defaultbuffer number in configuration" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:81 msgid "Wrong maxbuffer number in configuration" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:113 #: src/services/gridftpd/fileroot_config.cpp:121 #, c-format msgid "Can't resolve host %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:173 msgid "Could not determine hostname from gethostname()" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:190 msgid "unnamed group" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:199 msgid "undefined plugin name" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:203 msgid "undefined virtual plugin path" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:208 #, c-format msgid "bad directory for plugin: %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:220 #, c-format msgid "Already have directory: %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:223 #, c-format msgid "Registering directory: %s with plugin: %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:236 #, c-format msgid "file node creation failed: %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:286 #, c-format msgid "improper attribute for allowencryption command: %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:300 #, c-format msgid "improper attribute for allowactvedata command: %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:314 #, c-format msgid "failed while processing configuration command: %s %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:339 #, c-format msgid "Failed processing authorization group %s" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:352 msgid "Missing authgroup name in allowaccess" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:369 msgid "Missing authgroup name in denyaccess" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:419 msgid "failed to process client identification" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:426 msgid "failed to identify plugins path" msgstr "" #: src/services/gridftpd/fileroot_config.cpp:453 #, c-format msgid "Registering dummy directory: %s" msgstr "" #: src/services/gridftpd/listener.cpp:57 src/services/gridftpd/listener.cpp:466 msgid "Activation failed" msgstr "" #: src/services/gridftpd/listener.cpp:66 src/services/gridftpd/listener.cpp:172 msgid "Child exited" msgstr "" #: src/services/gridftpd/listener.cpp:78 msgid "Globus connection error" msgstr "" #: src/services/gridftpd/listener.cpp:80 src/services/gridftpd/listener.cpp:424 msgid "New connection" msgstr "" #: src/services/gridftpd/listener.cpp:87 msgid "Server stopped" msgstr "" #: src/services/gridftpd/listener.cpp:157 msgid "Error: failed to set handler for SIGTERM" msgstr "" #: src/services/gridftpd/listener.cpp:161 msgid "Starting controlled process" msgstr "" #: src/services/gridftpd/listener.cpp:164 msgid "fork failed" msgstr "" #: src/services/gridftpd/listener.cpp:169 msgid "wait failed - killing child" msgstr "" #: src/services/gridftpd/listener.cpp:174 msgid "Killed with signal: " msgstr "" #: src/services/gridftpd/listener.cpp:176 msgid "Restarting after segmentation violation." msgstr "" #: src/services/gridftpd/listener.cpp:177 msgid "Waiting 1 minute" msgstr "" #: src/services/gridftpd/listener.cpp:239 msgid "Error: failed to set handler for SIGCHLD" msgstr "" #: src/services/gridftpd/listener.cpp:256 msgid "Missing argument" msgstr "" #: src/services/gridftpd/listener.cpp:257 msgid "Unknown option" msgstr "" #: src/services/gridftpd/listener.cpp:264 msgid "Wrong port number" msgstr "" #: src/services/gridftpd/listener.cpp:274 msgid "Wrong number of connections" msgstr "" #: src/services/gridftpd/listener.cpp:281 msgid "Wrong buffer size" msgstr "" #: src/services/gridftpd/listener.cpp:288 msgid "Wrong maximal buffer size" msgstr "" #: src/services/gridftpd/listener.cpp:300 msgid "Failed reading configuration" msgstr "" #: src/services/gridftpd/listener.cpp:331 #, c-format msgid "Failed to obtain local address: %s" msgstr "" #: src/services/gridftpd/listener.cpp:338 #, c-format msgid "Failed to create socket(%s): %s" msgstr "" #: src/services/gridftpd/listener.cpp:352 #, c-format msgid "Failed to limit socket to IPv6: %s" msgstr "" #: src/services/gridftpd/listener.cpp:359 #, c-format msgid "Failed to bind socket(%s): %s" msgstr "" #: src/services/gridftpd/listener.cpp:364 #, c-format msgid "Failed to listen on socket(%s): %s" msgstr "" #: src/services/gridftpd/listener.cpp:371 msgid "Not listening to anything" msgstr "" #: src/services/gridftpd/listener.cpp:374 #, c-format msgid "Some addresses failed. Listening on %u of %u." msgstr "" #: src/services/gridftpd/listener.cpp:382 #: src/services/gridftpd/listener.cpp:477 msgid "Listen started" msgstr "" #: src/services/gridftpd/listener.cpp:395 msgid "No valid handles left for listening" msgstr "" #: src/services/gridftpd/listener.cpp:401 #, c-format msgid "Select failed: %s" msgstr "" #: src/services/gridftpd/listener.cpp:422 #, c-format msgid "Have connections: %i, max: %i" msgstr "" #: src/services/gridftpd/listener.cpp:427 #, c-format msgid "Fork failed: %s" msgstr "" #: src/services/gridftpd/listener.cpp:445 msgid "Refusing connection: Connection limit exceeded" msgstr "" #: src/services/gridftpd/listener.cpp:471 msgid "Init failed" msgstr "" #: src/services/gridftpd/listener.cpp:474 msgid "Listen failed" msgstr "" #: src/services/gridftpd/listener.cpp:488 msgid "Listen finished" msgstr "" #: src/services/gridftpd/listener.cpp:493 msgid "Stopping server" msgstr "" #: src/services/gridftpd/listener.cpp:497 msgid "Destroying handle" msgstr "" #: src/services/gridftpd/listener.cpp:500 msgid "Deactivating modules" msgstr "" #: src/services/gridftpd/listener.cpp:508 msgid "Exiting" msgstr "" #: src/services/gridftpd/misc/ldapquery.cpp:253 #, c-format msgid "%s: %s:%i" msgstr "" #: src/services/gridftpd/misc/ldapquery.cpp:390 #: src/services/gridftpd/misc/ldapquery.cpp:467 #, c-format msgid "%s %s" msgstr "" #: src/services/gridftpd/misc/ldapquery.cpp:394 #, c-format msgid " %s: %s" msgstr "" #: src/services/gridftpd/misc/ldapquery.cpp:396 #, c-format msgid " %s:" msgstr "" #: src/services/gridftpd/userspec.cpp:83 src/services/gridftpd/userspec.cpp:133 msgid "No proxy provided" msgstr "" #: src/services/gridftpd/userspec.cpp:85 #, c-format msgid "Proxy/credentials stored at %s" msgstr "" #: src/services/gridftpd/userspec.cpp:91 src/services/gridftpd/userspec.cpp:141 msgid "Running user has no name" msgstr "" #: src/services/gridftpd/userspec.cpp:94 src/services/gridftpd/userspec.cpp:144 #, c-format msgid "Mapped to running user: %s" msgstr "" #: src/services/gridftpd/userspec.cpp:104 #: src/services/gridftpd/userspec.cpp:154 #, c-format msgid "Mapped to local id: %i" msgstr "" #: src/services/gridftpd/userspec.cpp:109 #: src/services/gridftpd/userspec.cpp:159 #, c-format msgid "No group %i for mapped user" msgstr "" #: src/services/gridftpd/userspec.cpp:113 #: src/services/gridftpd/userspec.cpp:163 #, c-format msgid "Mapped to local group id: %i" msgstr "" #: src/services/gridftpd/userspec.cpp:114 #: src/services/gridftpd/userspec.cpp:164 #, c-format msgid "Mapped to local group name: %s" msgstr "" #: src/services/gridftpd/userspec.cpp:115 #: src/services/gridftpd/userspec.cpp:165 #, c-format msgid "Mapped user's home: %s" msgstr "" #: src/services/gridftpd/userspec.cpp:135 #, c-format msgid "Proxy stored at %s" msgstr "" #: src/services/gridftpd/userspec.cpp:195 #, c-format msgid "Undefined control sequence: %%%s" msgstr "" #: src/services/gridftpd/userspec.cpp:218 #, c-format msgid "Local user %s does not exist" msgstr "" #: src/services/gridftpd/userspec.cpp:227 #, c-format msgid "Local group %s does not exist" msgstr "" #: src/services/gridftpd/userspec.cpp:232 #, c-format msgid "Remapped to local user: %s" msgstr "" #: src/services/gridftpd/userspec.cpp:233 #, c-format msgid "Remapped to local id: %i" msgstr "" #: src/services/gridftpd/userspec.cpp:234 #, c-format msgid "Remapped to local group id: %i" msgstr "" #: src/services/gridftpd/userspec.cpp:235 #, c-format msgid "Remapped to local group name: %s" msgstr "" #: src/services/gridftpd/userspec.cpp:236 #, c-format msgid "Remapped user's home: %s" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:103 #, c-format msgid "Loading %u-th Python service" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:107 #, c-format msgid "Initialized %u-th Python service" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:142 msgid "Invalid class name" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:147 #, c-format msgid "class name: %s" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:148 #, c-format msgid "module name: %s" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:205 msgid "Cannot find ARC Config class" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:212 msgid "Config class is not an object" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:220 msgid "Cannot get dictionary of module" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:229 msgid "Cannot find service class" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:238 msgid "Cannot create config argument" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:245 msgid "Cannot convert config to Python object" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:268 #, c-format msgid "%s is not an object" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:274 msgid "Message class is not an object" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:280 msgid "Python Wrapper constructor succeeded" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:295 #, c-format msgid "Python Wrapper destructor (%d)" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:328 msgid "Python interpreter locked" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:332 msgid "Python interpreter released" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:403 msgid "Python wrapper process called" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:412 msgid "Failed to create input SOAP container" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:422 msgid "Cannot create inmsg argument" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:436 msgid "Cannot find ARC Message class" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:442 msgid "Cannot convert inmsg to Python object" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:451 msgid "Failed to create SOAP containers" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:457 msgid "Cannot create outmsg argument" msgstr "" #: src/services/wrappers/python/pythonwrapper.cpp:463 msgid "Cannot convert outmsg to Python object" msgstr "" #: src/tests/client/test_ClientInterface.cpp:36 #: src/tests/client/test_ClientSAML2SSO.cpp:68 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:78 #: src/tests/echo/test_clientinterface.cpp:41 #: src/tests/echo/test_clientinterface.cpp:132 #: src/tests/echo/test_clientinterface.py:12 msgid "Creating a soap client" msgstr "" #: src/tests/client/test_ClientInterface.cpp:73 #: src/tests/client/test_ClientSAML2SSO.cpp:47 #: src/tests/client/test_ClientSAML2SSO.cpp:71 #: src/tests/count/test_client.cpp:61 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:85 #: src/tests/echo/echo_test4axis2c/test_client.cpp:56 #: src/tests/echo/test.cpp:62 src/tests/echo/test_client.cpp:72 #: src/tests/echo/test_clientinterface.cpp:67 #: src/tests/echo/test_clientinterface.cpp:107 #: src/tests/echo/test_clientinterface.cpp:136 #: src/tests/echo/test_clientinterface.py:22 msgid "Creating and sending request" msgstr "" #: src/tests/client/test_ClientInterface.cpp:84 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:97 #: src/tests/echo/test_clientinterface.cpp:78 #: src/tests/echo/test_clientinterface.py:30 msgid "SOAP invocation failed" msgstr "" #: src/tests/client/test_ClientSAML2SSO.cpp:44 #: src/tests/echo/test_clientinterface.cpp:100 msgid "Creating a http client" msgstr "" #: src/tests/client/test_ClientSAML2SSO.cpp:55 #: src/tests/echo/test_clientinterface.cpp:117 msgid "HTTP with SAML2SSO invocation failed" msgstr "" #: src/tests/client/test_ClientSAML2SSO.cpp:59 #: src/tests/echo/test_clientinterface.cpp:121 msgid "There was no HTTP response" msgstr "" #: src/tests/client/test_ClientSAML2SSO.cpp:77 #: src/tests/echo/test_clientinterface.cpp:145 msgid "SOAP with SAML2SSO invocation failed" msgstr "" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:37 #: src/tests/client/test_ClientX509Delegation_GridSite.cpp:38 #: src/tests/delegation/test_delegation_client.cpp:45 #: src/tests/delegation/test_delegation_client.cpp:76 #: src/tests/echo/test_clientinterface.cpp:172 #: src/tests/echo/test_clientinterface.cpp:194 msgid "Creating a delegation soap client" msgstr "" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:46 #: src/tests/delegation/test_delegation_client.cpp:51 #: src/tests/echo/test_clientinterface.cpp:178 msgid "Delegation to ARC delegation service failed" msgstr "" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:50 #: src/tests/client/test_ClientX509Delegation_GridSite.cpp:49 #: src/tests/delegation/test_delegation_client.cpp:56 #: src/tests/delegation/test_delegation_client.cpp:88 #: src/tests/echo/test_clientinterface.cpp:182 #: src/tests/echo/test_clientinterface.cpp:205 #, c-format msgid "Delegation ID: %s" msgstr "" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:58 #, c-format msgid "Delegated credential from delegation service: %s" msgstr "" #: src/tests/client/test_ClientX509Delegation_GridSite.cpp:45 #: src/tests/delegation/test_delegation_client.cpp:83 #: src/tests/echo/test_clientinterface.cpp:201 msgid "Delegation to gridsite delegation service failed" msgstr "" #: src/tests/count/count.cpp:58 msgid "Input is not SOAP" msgstr "" #: src/tests/count/count.cpp:89 src/tests/echo/echo.cpp:83 msgid "echo: Unauthorized" msgstr "" #: src/tests/count/count.cpp:98 src/tests/count/count.cpp:104 #, c-format msgid "Request is not supported - %s" msgstr "" #: src/tests/count/test_client.cpp:50 #: src/tests/echo/echo_test4axis2c/test_client.cpp:43 #: src/tests/echo/test_client.cpp:59 msgid "Failed to load client configuration" msgstr "" #: src/tests/count/test_client.cpp:54 #: src/tests/echo/echo_test4axis2c/test_client.cpp:47 #: src/tests/echo/test.cpp:58 src/tests/echo/test_client.cpp:63 msgid "Client side MCCs are loaded" msgstr "" #: src/tests/count/test_client.cpp:57 #: src/tests/echo/echo_test4axis2c/test_client.cpp:50 #: src/tests/echo/test_client.cpp:66 msgid "Client chain does not have entry point" msgstr "" #: src/tests/count/test_client.cpp:84 #: src/tests/echo/echo_test4axis2c/test_client.cpp:74 #: src/tests/echo/test.cpp:74 src/tests/echo/test_client.cpp:90 msgid "Request failed" msgstr "" #: src/tests/count/test_client.cpp:90 #: src/tests/echo/echo_test4axis2c/test_client.cpp:80 #: src/tests/echo/test.cpp:79 src/tests/echo/test_client.cpp:96 msgid "There is no response" msgstr "" #: src/tests/count/test_client.cpp:97 #: src/tests/echo/echo_test4axis2c/test_client.cpp:87 #: src/tests/echo/test_client.cpp:103 msgid "Response is not SOAP" msgstr "" #: src/tests/count/test_service.cpp:22 src/tests/echo/test.cpp:23 #: src/tests/echo/test_service.cpp:22 msgid "Creating service side chain" msgstr "" #: src/tests/count/test_service.cpp:25 src/tests/echo/test.cpp:26 #: src/tests/echo/test_service.cpp:25 msgid "Failed to load service configuration" msgstr "" #: src/tests/count/test_service.cpp:30 src/tests/echo/test_service.cpp:30 msgid "Service is waiting for requests" msgstr "" #: src/tests/echo/test.cpp:32 msgid "Creating client interface" msgstr "" #: src/tests/echo/test.cpp:82 msgid "Request succeed!!!" msgstr "" nordugrid-arc-6.14.0/po/PaxHeaders.30264/Makevars0000644000000000000000000000013214152153376017456 xustar000000000000000030 mtime=1638455038.281644111 30 atime=1638455038.462646831 30 ctime=1638455103.887629871 nordugrid-arc-6.14.0/po/Makevars0000644000175000002070000000034614152153376017446 0ustar00mockbuildmock00000000000000DOMAIN = $(PACKAGE) subdir = po top_builddir = .. XGETTEXT_OPTIONS = -kmsg:2 -kIString:1 -kistring:1 -kFindNTrans:1,2 COPYRIGHT_HOLDER = NorduGrid collaboration MSGID_BUGS_ADDRESS = support@nordugrid.org EXTRA_LOCALE_CATEGORIES = nordugrid-arc-6.14.0/po/PaxHeaders.30264/ru.po0000644000000000000000000000013214152153476016751 xustar000000000000000030 mtime=1638455102.670611585 30 atime=1638455103.561624973 30 ctime=1638455103.890629916 nordugrid-arc-6.14.0/po/ru.po0000644000175000002070000447713614152153476016764 0ustar00mockbuildmock00000000000000# translation of Arc.po to Russian # Oxana Smirnova , 2007. # Translation file for the Advanced Resource Connector (Arc) msgid "" msgstr "" "Project-Id-Version: Arc\n" "Report-Msgid-Bugs-To: support@nordugrid.org\n" "POT-Creation-Date: 2021-12-02 15:25+0100\n" "PO-Revision-Date: 2021-11-26 12:09+0100\n" "Last-Translator: Oxana Smirnova \n" "Language-Team: Russian\n" "Language: ru\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "X-Generator: Poedit 2.3\n" "Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n" "%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n" "X-Poedit-KeywordsList: msg:2;IString:1;istring:1;FindNTrans:1,2\n" "X-Poedit-Basepath: /home/oxana/GITROOT/arc6\n" "X-Poedit-SearchPath-0: src\n" #: src/clients/compute/arccat.cpp:35 src/clients/compute/arcclean.cpp:34 #: src/clients/compute/arcget.cpp:35 src/clients/compute/arckill.cpp:33 #: src/clients/compute/arcrenew.cpp:32 src/clients/compute/arcresub.cpp:36 #: src/clients/compute/arcresume.cpp:32 src/clients/compute/arcstat.cpp:34 msgid "[job ...]" msgstr "[задача ...]" #: src/clients/compute/arccat.cpp:36 msgid "" "The arccat command performs the cat command on the stdout, stderr or grid\n" "manager's error log of the job." msgstr "" "Команда arccat предназначена Ð´Ð»Ñ Ð²Ñ‹Ð²Ð¾Ð´Ð° на Ñкран Ñообщений Ñтандартного\n" "выхода, Ñтандартной ошибки или ошибок ÑиÑтемы при иÑполнении задачи." #: src/clients/compute/arccat.cpp:43 src/clients/compute/arcclean.cpp:41 #: src/clients/compute/arcget.cpp:42 src/clients/compute/arcinfo.cpp:45 #: src/clients/compute/arckill.cpp:40 src/clients/compute/arcrenew.cpp:37 #: src/clients/compute/arcresub.cpp:41 src/clients/compute/arcresume.cpp:37 #: src/clients/compute/arcstat.cpp:42 src/clients/compute/arcsub.cpp:53 #: src/clients/compute/arcsync.cpp:147 src/clients/compute/arctest.cpp:64 #: src/clients/credentials/arcproxy.cpp:457 src/clients/data/arccp.cpp:641 #: src/clients/data/arcls.cpp:347 src/clients/data/arcmkdir.cpp:125 #: src/clients/data/arcrename.cpp:136 src/clients/data/arcrm.cpp:151 #: src/hed/daemon/unix/main_unix.cpp:341 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1263 #: src/hed/libs/data/DataExternalHelper.cpp:358 #, c-format msgid "%s version %s" msgstr "%s, верÑÐ¸Ñ %s" #: src/clients/compute/arccat.cpp:52 src/clients/compute/arcclean.cpp:50 #: src/clients/compute/arcget.cpp:51 src/clients/compute/arcinfo.cpp:53 #: src/clients/compute/arckill.cpp:49 src/clients/compute/arcrenew.cpp:46 #: src/clients/compute/arcresub.cpp:50 src/clients/compute/arcresume.cpp:46 #: src/clients/compute/arcstat.cpp:51 src/clients/compute/arcsub.cpp:62 #: src/clients/compute/arcsync.cpp:156 src/clients/compute/arctest.cpp:86 #: src/clients/credentials/arcproxy.cpp:465 src/clients/data/arccp.cpp:648 #: src/clients/data/arcls.cpp:355 src/clients/data/arcmkdir.cpp:133 #: src/clients/data/arcrename.cpp:144 src/clients/data/arcrm.cpp:160 #: src/libs/data-staging/DataDeliveryLocalComm.cpp:174 #: src/services/a-rex/grid-manager/GridManager.cpp:110 #: src/services/a-rex/grid-manager/log/JobLog.cpp:139 #, c-format msgid "Running command: %s" msgstr "ВыполнÑетÑÑ ÐºÐ¾Ð¼Ð°Ð½Ð´Ð° %s" #: src/clients/compute/arccat.cpp:63 src/clients/compute/arcclean.cpp:61 #: src/clients/compute/arcget.cpp:62 src/clients/compute/arcinfo.cpp:65 #: src/clients/compute/arckill.cpp:60 src/clients/compute/arcrenew.cpp:57 #: src/clients/compute/arcresub.cpp:54 src/clients/compute/arcresume.cpp:50 #: src/clients/compute/arcstat.cpp:62 src/clients/compute/arcsub.cpp:66 #: src/clients/compute/arcsync.cpp:167 src/clients/compute/arctest.cpp:90 #: src/clients/data/arccp.cpp:671 src/clients/data/arcls.cpp:377 #: src/clients/data/arcmkdir.cpp:155 src/clients/data/arcrename.cpp:166 #: src/clients/data/arcrm.cpp:182 msgid "Failed configuration initialization" msgstr "Ðе удалоÑÑŒ загрузить наÑтройки" #: src/clients/compute/arccat.cpp:78 src/clients/compute/arcclean.cpp:76 #: src/clients/compute/arcget.cpp:90 src/clients/compute/arckill.cpp:75 #: src/clients/compute/arcrenew.cpp:70 src/clients/compute/arcresub.cpp:85 #: src/clients/compute/arcresume.cpp:72 src/clients/compute/arcstat.cpp:71 #, c-format msgid "Cannot read specified jobid file: %s" msgstr "Ðе удаётÑÑ Ð¿Ñ€Ð¾Ñ‡ÐµÑть указанный файл, Ñодержащий Ñрлыки задач: %s" #: src/clients/compute/arccat.cpp:89 src/clients/compute/arcclean.cpp:87 #: src/clients/compute/arcget.cpp:101 src/clients/compute/arckill.cpp:86 #: src/clients/compute/arcrenew.cpp:81 src/clients/compute/arcresub.cpp:99 #: src/clients/compute/arcresume.cpp:83 src/clients/compute/arcstat.cpp:105 msgid "No jobs given" msgstr "Задачи не указаны" #: src/clients/compute/arccat.cpp:102 src/clients/compute/arcclean.cpp:100 #: src/clients/compute/arcget.cpp:114 src/clients/compute/arckill.cpp:99 #: src/clients/compute/arcrenew.cpp:94 src/clients/compute/arcresub.cpp:109 #: src/clients/compute/arcresume.cpp:96 src/clients/compute/arcstat.cpp:117 #, c-format msgid "Job list file (%s) doesn't exist" msgstr "Файл ÑпиÑка задач (%s) не ÑущеÑтвует" #: src/clients/compute/arccat.cpp:109 src/clients/compute/arcclean.cpp:107 #: src/clients/compute/arcget.cpp:121 src/clients/compute/arckill.cpp:106 #: src/clients/compute/arcrenew.cpp:101 src/clients/compute/arcresub.cpp:116 #: src/clients/compute/arcresume.cpp:103 src/clients/compute/arcstat.cpp:124 #: src/clients/compute/arctest.cpp:335 #, c-format msgid "Unable to read job information from file (%s)" msgstr "Ðевозможно прочитать информацию о задаче из файла (%s)" #: src/clients/compute/arccat.cpp:118 src/clients/compute/arcclean.cpp:115 #: src/clients/compute/arcget.cpp:129 src/clients/compute/arckill.cpp:114 #: src/clients/compute/arcrenew.cpp:110 src/clients/compute/arcresub.cpp:124 #: src/clients/compute/arcresume.cpp:112 src/clients/compute/arcstat.cpp:133 #, c-format msgid "Warning: Job not found in job list: %s" msgstr "Предупреждение: Задача не обнаружена в ÑпиÑке задач: %s" #: src/clients/compute/arccat.cpp:131 src/clients/compute/arcclean.cpp:170 #: src/clients/compute/arcget.cpp:142 src/clients/compute/arckill.cpp:126 #: src/clients/compute/arcrenew.cpp:122 src/clients/compute/arcresub.cpp:136 #: src/clients/compute/arcresume.cpp:124 msgid "No jobs" msgstr "Задач нет" #: src/clients/compute/arccat.cpp:146 #, c-format msgid "Could not create temporary file \"%s\"" msgstr "Ðе удалоÑÑŒ Ñоздать временный файл \"%s\"" #: src/clients/compute/arccat.cpp:147 src/clients/compute/arccat.cpp:153 #, c-format msgid "Cannot create output of %s for any jobs" msgstr "Ðевозможно Ñоздать выход %s ни Ð´Ð»Ñ Ð¾Ð´Ð½Ð¾Ð¹ задачи" #: src/clients/compute/arccat.cpp:154 #, c-format msgid "Invalid destination URL %s" msgstr "Ðеверный URL цели %s" #: src/clients/compute/arccat.cpp:172 #, c-format msgid "Job deleted: %s" msgstr "Задача удалена: %s" #: src/clients/compute/arccat.cpp:182 #, c-format msgid "Job has not started yet: %s" msgstr "ИÑполнение задачи ещё не началоÑÑŒ: %s" #: src/clients/compute/arccat.cpp:223 #, c-format msgid "Cannot determine the %s location: %s" msgstr "Ðе удаётÑÑ Ð¾Ð¿Ñ€ÐµÐ´ÐµÐ»Ð¸Ñ‚ÑŒ раÑположение %s: %s" #: src/clients/compute/arccat.cpp:228 #, c-format msgid "Cannot create output of %s for job (%s): Invalid source %s" msgstr "Ðевозможно Ñоздать вывод %s Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ (%s): ÐедопуÑтимый иÑточник %s" #: src/clients/compute/arccat.cpp:241 #, c-format msgid "Catting %s for job %s" msgstr "ПодцеплÑетÑÑ %s Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s" #: src/clients/compute/arcclean.cpp:35 msgid "The arcclean command removes a job from the computing resource." msgstr "" "Команда arcclean иÑпользуетÑÑ Ð´Ð»Ñ ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ñ€ÐµÐ·ÑƒÐ»ÑŒÑ‚Ð°Ñ‚Ð¾Ð² работы задач\n" "Ñ ÑƒÐ´Ð°Ð»Ñ‘Ð½Ð½Ð¾Ð³Ð¾ компьютера." #: src/clients/compute/arcclean.cpp:139 msgid "" "You are about to remove jobs from the job list for which no information " "could be\n" "found. NOTE: Recently submitted jobs might not have appeared in the " "information\n" "system, and this action will also remove such jobs." msgstr "" "Из ÑпиÑка задач будут удалены задачи, о которых не обнаружена информациÑ.\n" "Ð’ÐИМÐÐИЕ: задачи, запущенные недавно, могли ещё не поÑвитьÑÑ Ð² " "информационной\n" "ÑиÑтеме, и Ñта Ð¾Ð¿ÐµÑ€Ð°Ñ†Ð¸Ñ ÑƒÐ´Ð°Ð»Ð¸Ñ‚ также Ñти задачи." #: src/clients/compute/arcclean.cpp:142 msgid "Are you sure you want to clean jobs missing information?" msgstr "Ð’Ñ‹ уверены, что хотите вычиÑтить задачи Ñ Ð¾Ñ‚ÑутÑтвующей информацией?" #: src/clients/compute/arcclean.cpp:143 src/clients/compute/arcsync.cpp:221 msgid "y" msgstr "y" #: src/clients/compute/arcclean.cpp:143 src/clients/compute/arcsync.cpp:221 msgid "n" msgstr "n" #: src/clients/compute/arcclean.cpp:148 msgid "Jobs missing information will not be cleaned!" msgstr "Задачи Ñ Ð¾Ñ‚ÑутÑтвующей информацией не будут вычищены!" #: src/clients/compute/arcclean.cpp:164 src/clients/compute/arcresub.cpp:177 #: src/clients/compute/arctest.cpp:339 #, c-format msgid "Warning: Failed to write job information to file (%s)" msgstr "Предупреждение: Сбой запиÑи информации о задаче в файл (%s)" #: src/clients/compute/arcclean.cpp:165 msgid "" " Run 'arcclean -s Undefined' to remove cleaned jobs from job list" msgstr "" " ЗапуÑтите 'arcclean -s Undefined' Ð´Ð»Ñ ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð²Ñ‹Ñ‡Ð¸Ñ‰ÐµÐ½Ð½Ñ‹Ñ… задач из " "ÑпиÑка" #: src/clients/compute/arcclean.cpp:174 #, c-format msgid "Jobs processed: %d, deleted: %d" msgstr "Обработано задач: %d, уничтожено: %d" #: src/clients/compute/arcget.cpp:36 msgid "The arcget command is used for retrieving the results from a job." msgstr "Команда arcget иÑпользуетÑÑ Ð´Ð»Ñ Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ñ€ÐµÐ·ÑƒÐ»ÑŒÑ‚Ð°Ñ‚Ð¾Ð² работы задач." #: src/clients/compute/arcget.cpp:78 #, c-format msgid "Job download directory from user configuration file: %s" msgstr "Каталог Ð´Ð»Ñ Ð·Ð°Ð³Ñ€ÑƒÐ·ÐºÐ¸ задач из пользовательÑких наÑтроек: %s" #: src/clients/compute/arcget.cpp:81 msgid "Job download directory will be created in present working directory." msgstr "Каталог Ð´Ð»Ñ Ð·Ð°Ð³Ñ€ÑƒÐ·ÐºÐ¸ задачи будет Ñоздан в текущей рабочей директории." #: src/clients/compute/arcget.cpp:85 #, c-format msgid "Job download directory: %s" msgstr "Каталог Ð´Ð»Ñ Ð·Ð°Ð³Ñ€ÑƒÐ·ÐºÐ¸ задач: %s" #: src/clients/compute/arcget.cpp:152 #, c-format msgid "Unable to create directory for storing results (%s) - %s" msgstr "Ðе удалоÑÑŒ Ñоздать каталог Ð´Ð»Ñ ÑÐ¾Ñ…Ñ€Ð°Ð½ÐµÐ½Ð¸Ñ Ñ€ÐµÐ·ÑƒÐ»ÑŒÑ‚Ð°Ñ‚Ð¾Ð² (%s) - %s" #: src/clients/compute/arcget.cpp:162 #, c-format msgid "Results stored at: %s" msgstr "Результаты Ñохранены в: %s" #: src/clients/compute/arcget.cpp:174 src/clients/compute/arckill.cpp:142 msgid "Warning: Some jobs were not removed from server" msgstr "Предупреждение: некоторые задачи не были удалены Ñ Ñервера" #: src/clients/compute/arcget.cpp:175 src/clients/compute/arcget.cpp:182 #: src/clients/compute/arckill.cpp:143 msgid " Use arcclean to remove retrieved jobs from job list" msgstr " ИÑпользуйте arcclean Ð´Ð»Ñ ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð½Ñ‹Ñ… задач из ÑпиÑка" #: src/clients/compute/arcget.cpp:181 src/clients/compute/arckill.cpp:149 #: src/clients/compute/arcresub.cpp:207 #, c-format msgid "Warning: Failed removing jobs from file (%s)" msgstr "Предупреждение: Сбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ о задачах из файла (%s)" #: src/clients/compute/arcget.cpp:186 #, c-format msgid "" "Jobs processed: %d, successfully retrieved: %d, successfully cleaned: %d" msgstr "Обработано задач: %d, уÑпешно получено: %d, уÑпешно очищено: %d" #: src/clients/compute/arcget.cpp:190 #, c-format msgid "Jobs processed: %d, successfully retrieved: %d" msgstr "Обработано задач: %d, уÑпешно получено: %d" #: src/clients/compute/arcinfo.cpp:34 msgid "[resource ...]" msgstr "[реÑÑƒÑ€Ñ ...]" #: src/clients/compute/arcinfo.cpp:35 msgid "" "The arcinfo command is used for obtaining the status of computing resources " "on the Grid." msgstr "" "Команда arcinfo иÑпользуетÑÑ Ð´Ð»Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ¸ ÑоÑтоÑÐ½Ð¸Ñ Ð²Ñ‹Ñ‡Ð¸Ñлительных реÑурÑов " "на Гриде." #: src/clients/compute/arcinfo.cpp:142 msgid "Information endpoint" msgstr "Точка входа Ð´Ð»Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸" #: src/clients/compute/arcinfo.cpp:153 msgid "Submission endpoint" msgstr "Точка входа Ð´Ð»Ñ Ð·Ð°Ñылки задач" #: src/clients/compute/arcinfo.cpp:155 msgid "status" msgstr "ÑоÑтоÑние" #: src/clients/compute/arcinfo.cpp:157 msgid "interface" msgstr "интерфейÑ" #: src/clients/compute/arcinfo.cpp:176 msgid "ERROR: Failed to retrieve information from the following endpoints:" msgstr "ОШИБКÐ: Ðе удалоÑÑŒ получить информацию через Ñледующие точки входа:" #: src/clients/compute/arcinfo.cpp:189 msgid "ERROR: Failed to retrieve information" msgstr "ОШИБКÐ: не удалоÑÑŒ получить информацию" #: src/clients/compute/arcinfo.cpp:191 msgid "from the following endpoints:" msgstr "через Ñледующие точки входа:" #: src/clients/compute/arckill.cpp:34 msgid "The arckill command is used to kill running jobs." msgstr "Команда arckill иÑпользуетÑÑ Ð´Ð»Ñ Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð¸ÑполнÑющихÑÑ Ð·Ð°Ð´Ð°Ñ‡." #: src/clients/compute/arckill.cpp:150 msgid "" " Run 'arcclean -s Undefined' to remove killed jobs from job list" msgstr "" " ЗапуÑтите 'arcclean -s Undefined' Ð´Ð»Ñ ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð¾Ð±Ð¾Ñ€Ð²Ð°Ð½Ð½Ñ‹Ñ… задач из " "ÑпиÑка" #: src/clients/compute/arckill.cpp:153 #, c-format msgid "Jobs processed: %d, successfully killed: %d, successfully cleaned: %d" msgstr "Обработано задач: %d, уÑпешно оборвано: %d, уÑпешно очищено: %d" #: src/clients/compute/arckill.cpp:155 #, c-format msgid "Jobs processed: %d, successfully killed: %d" msgstr "Обработано задач: %d, уÑпешно оборвано: %d" #: src/clients/compute/arcrenew.cpp:128 #, c-format msgid "Jobs processed: %d, renewed: %d" msgstr "Обработано задач: %d, обновлено: %d" #: src/clients/compute/arcresub.cpp:79 msgid "--same and --not-same cannot be specified together." msgstr "--same и --not-same не могут быть заданы одновременно." #: src/clients/compute/arcresub.cpp:153 msgid "" "It is not possible to resubmit jobs without new target information discovery" msgstr "" "Ðевозможно перезапуÑтить задачу, не выполнив Ñнова поиÑк информации о " "реÑурÑах" #: src/clients/compute/arcresub.cpp:166 msgid "No jobs to resubmit with the specified status" msgstr "Ðет задач Ð´Ð»Ñ Ð¿ÐµÑ€ÐµÐ·Ð°Ð¿ÑƒÑка в указанном ÑоÑтоÑнии" #: src/clients/compute/arcresub.cpp:173 src/clients/compute/submit.cpp:34 #, c-format msgid "Job submitted with jobid: %s" msgstr "Задача запущена Ñ Ñрлыком: %s" #: src/clients/compute/arcresub.cpp:178 msgid " To recover missing jobs, run arcsync" msgstr " Ð”Ð»Ñ Ð²Ð¾ÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð½ÐµÐ´Ð¾Ñтающих задач, запуÑтите arcsync" #: src/clients/compute/arcresub.cpp:183 #, c-format msgid "Cannot write jobids to file (%s)" msgstr "Ðевозможно запиÑать Ñрлыки задач в файл (%s)" #: src/clients/compute/arcresub.cpp:194 #, c-format msgid "" "Resubmission of job (%s) succeeded, but killing the job failed - it will " "still appear in the job list" msgstr "" "УÑпешно завершена перезаÑылка задачи (%s), но прервать задачу не удалоÑÑŒ - " "она будет приÑутÑтвовать в ÑпиÑке задач" #: src/clients/compute/arcresub.cpp:203 #, c-format msgid "" "Resubmission of job (%s) succeeded, but cleaning the job failed - it will " "still appear in the job list" msgstr "" "УÑпешно завершена перезаÑылка задачи (%s), но очиÑтить задачу не удалоÑÑŒ - " "она будет приÑутÑтвовать в ÑпиÑке задач" #: src/clients/compute/arcresub.cpp:208 msgid " Use arcclean to remove non-existing jobs" msgstr " ИÑпользуйте arcclean Ð´Ð»Ñ ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð½ÐµÑущеÑтвующих задач" #: src/clients/compute/arcresub.cpp:215 msgid "Job resubmission summary:" msgstr "Сводка перезапуÑка задач:" #: src/clients/compute/arcresub.cpp:217 #, c-format msgid "%d of %d jobs were resubmitted" msgstr "%d из %d задач были перезапущены" #: src/clients/compute/arcresub.cpp:219 #, c-format msgid "The following %d were not resubmitted" msgstr "Следующие %d не были перезапущены" #: src/clients/compute/arcresume.cpp:130 #, c-format msgid "Jobs processed: %d, resumed: %d" msgstr "Обработано задач: %d, возобновлено: %d" #: src/clients/compute/arcstat.cpp:35 msgid "" "The arcstat command is used for obtaining the status of jobs that have\n" "been submitted to Grid enabled resources." msgstr "" "Команда arcstat иÑпользуетÑÑ Ð´Ð»Ñ Ð²Ñ‹Ð²Ð¾Ð´Ð° информации о ÑоÑтоÑнии\n" "задач, отправленных на Грид ." #: src/clients/compute/arcstat.cpp:79 msgid "The 'sort' and 'rsort' flags cannot be specified at the same time." msgstr "Опции 'sort' и 'rsort' не могут быть указаны одновременно." #: src/clients/compute/arcstat.cpp:149 msgid "No jobs found, try later" msgstr "Ðе найдено ни одной задачи, попробуйте позже" #: src/clients/compute/arcstat.cpp:193 #, c-format msgid "Status of %d jobs was queried, %d jobs returned information" msgstr "Опрошено ÑоÑтоÑние %d задач, %d задач отозвалиÑÑŒ" #: src/clients/compute/arcsub.cpp:45 msgid "[filename ...]" msgstr "[файл ...]" #: src/clients/compute/arcsub.cpp:46 msgid "" "The arcsub command is used for submitting jobs to Grid enabled computing\n" "resources." msgstr "" "Команда arcsub иÑпользуетÑÑ Ð´Ð»Ñ Ð·Ð°Ð¿ÑƒÑка задач на вычиÑлительные\n" "реÑурÑÑ‹ Грид." #: src/clients/compute/arcsub.cpp:94 msgid "No job description input specified" msgstr "Ðе задано опиÑание задачи" #: src/clients/compute/arcsub.cpp:107 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:582 #, c-format msgid "Can not open job description file: %s" msgstr "Ðевозможно открыть файл Ñ Ð¾Ð¿Ð¸Ñанием задачи: %s" #: src/clients/compute/arcsub.cpp:135 src/clients/compute/arcsub.cpp:163 msgid "Invalid JobDescription:" msgstr "Ðеверный Ñлемент JobDescription:" #: src/clients/compute/arcsub.cpp:198 src/clients/compute/arctest.cpp:229 msgid "" "Cannot adapt job description to the submission target when information " "discovery is turned off" msgstr "" "Ðевозможно адаптировать опиÑание задачи ни к одному реÑурÑу когда отключён " "Ñбор информации" #: src/clients/compute/arcsync.cpp:66 src/clients/compute/arcsync.cpp:174 #, c-format msgid "Warning: Unable to open job list file (%s), unknown format" msgstr "" "Предупреждение: Ðевозможно открыть файл ÑпиÑка задач (%s), формат неизвеÑтен" #: src/clients/compute/arcsync.cpp:76 msgid "Found the following jobs:" msgstr "Обнаружены Ñледующие задачи:" #: src/clients/compute/arcsync.cpp:86 msgid "Total number of jobs found: " msgstr "КоличеÑтво вÑех обнаруженных задач: " #: src/clients/compute/arcsync.cpp:98 msgid "Found the following new jobs:" msgstr "Обнаружены Ñледующие новые задачи:" #: src/clients/compute/arcsync.cpp:108 msgid "Total number of new jobs found: " msgstr "КоличеÑтво обнаруженных новых задач: " #: src/clients/compute/arcsync.cpp:113 #, c-format msgid "ERROR: Failed to write job information to file (%s)" msgstr "ОШИБКÐ: Сбой запиÑи информации о задаче в файл (%s)" #: src/clients/compute/arcsync.cpp:140 msgid "" "The arcsync command synchronizes your local job list with the information " "at\n" "the given resources or index servers." msgstr "" "Команда arcsync Ñинхронизирует Ваш локальный ÑпиÑок задач Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸ÐµÐ¹\n" "на заданных клаÑтерах или каталогах реÑурÑов." #: src/clients/compute/arcsync.cpp:180 #, c-format msgid "Warning: Unable to read local list of jobs from file (%s)" msgstr "Предупреждение: Сбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð¾Ð³Ð¾ ÑпиÑка задач из файла (%s)" #: src/clients/compute/arcsync.cpp:185 #, c-format msgid "Warning: Unable to truncate local list of jobs in file (%s)" msgstr "Предупреждение: Сбой ÑÐ¾ÐºÑ€Ð°Ñ‰ÐµÐ½Ð¸Ñ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð¾Ð³Ð¾ ÑпиÑка задач в файле (%s)" #: src/clients/compute/arcsync.cpp:191 #, c-format msgid "Warning: Unable to create job list file (%s), jobs list is destroyed" msgstr "" "Предупреждение: Ðевозможно Ñоздать файл ÑпиÑка задач (%s), ÑпиÑок задач " "уничтожен" #: src/clients/compute/arcsync.cpp:195 #, c-format msgid "" "Warning: Failed to write local list of jobs into file (%s), jobs list is " "destroyed" msgstr "" "Предупреждение: Сбой запиÑи ÑпиÑка локальных задач в файл (%s), ÑпиÑок задач " "уничтожен" #: src/clients/compute/arcsync.cpp:215 msgid "" "Synchronizing the local list of active jobs with the information in the\n" "information system can result in some inconsistencies. Very recently " "submitted\n" "jobs might not yet be present, whereas jobs very recently scheduled for\n" "deletion can still be present." msgstr "" "Ð¡Ð¸Ð½Ñ…Ñ€Ð¾Ð½Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð¾Ð³Ð¾ ÑпиÑка активных задач Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸ÐµÐ¹ в ÑиÑтеме Грид\n" "может привеÑти к некоторым неÑоответÑтвиÑм: только что запущенные задачи\n" "могут быть ещё не зарегиÑтрированы в ÑиÑтеме, тогда как только что " "удалённые\n" "задачи могут вÑÑ‘ ещё приÑутÑтвовать." #: src/clients/compute/arcsync.cpp:220 msgid "Are you sure you want to synchronize your local job list?" msgstr "Ð’Ñ‹ уверены, что хотите Ñинхронизировать ÑпиÑок локальных задач?" #: src/clients/compute/arcsync.cpp:225 msgid "Cancelling synchronization request" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð¾ Ñинхронизации отменÑетÑÑ" #: src/clients/compute/arcsync.cpp:243 msgid "" "No services specified. Please configure default services in the client " "configuration, or specify a cluster or index (-c or -g options, see arcsync -" "h)." msgstr "" "Ðе задано ни одного ÑервиÑа. ПожалуйÑта, наÑтройте ÑервиÑÑ‹ по умолчанию в " "файле наÑтроек клиента, либо укажите реÑÑƒÑ€Ñ Ð¸Ð»Ð¸ каталог реÑурÑов (опции -c " "или -g, Ñм. arcsync -h)." #: src/clients/compute/arctest.cpp:57 msgid " " msgstr " " #: src/clients/compute/arctest.cpp:58 msgid "The arctest command is used for testing clusters as resources." msgstr "" "Команда arctest иÑпользуетÑÑ Ð´Ð»Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ¸ клаÑтеров как вычиÑлительных " "реÑурÑов." #: src/clients/compute/arctest.cpp:70 msgid "" "Nothing to do:\n" "you have to either specify a test job id with -J (--job)\n" "or query information about the certificates with -E (--certificate)\n" msgstr "" "Задание не указано:\n" "Ð’Ñ‹ должны либо указать номер теÑтового заданиÑ, иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ Ð¾Ð¿Ñ†Ð¸ÑŽ -J (--job),\n" "либо запроÑить информацию о Ñертификатах, иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ Ð¾Ð¿Ñ†Ð¸ÑŽ -E (--" "certificate)\n" #: src/clients/compute/arctest.cpp:77 msgid "" "For the 1st test job you also have to specify a runtime value with -r (--" "runtime) option." msgstr "" "Ð”Ð»Ñ Ñ‚ÐµÑтовой задачи номер 1 необходимо задать Ð²Ñ€ÐµÐ¼Ñ Ð¸ÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ " "опции -r (--runtime)." #: src/clients/compute/arctest.cpp:111 msgid "Certificate information:" msgstr "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ Ñертификате:" #: src/clients/compute/arctest.cpp:115 msgid "No user-certificate found" msgstr "Сертификат Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð½Ðµ обнаружен" #: src/clients/compute/arctest.cpp:118 #, c-format msgid "Certificate: %s" msgstr "Сертификат: %s" #: src/clients/compute/arctest.cpp:120 #, c-format msgid "Subject name: %s" msgstr "Ð˜Ð¼Ñ Ñубъекта: %s" #: src/clients/compute/arctest.cpp:121 #, c-format msgid "Valid until: %s" msgstr "ДейÑтвует по: %s" #: src/clients/compute/arctest.cpp:125 msgid "Unable to determine certificate information" msgstr "Ðе удалоÑÑŒ получить информацию о Ñертификате" #: src/clients/compute/arctest.cpp:129 msgid "Proxy certificate information:" msgstr "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ доверенноÑти:" #: src/clients/compute/arctest.cpp:131 msgid "No proxy found" msgstr "Ðе удалоÑÑŒ обнаружить доверенноÑть" #: src/clients/compute/arctest.cpp:134 #, c-format msgid "Proxy: %s" msgstr "ДоверенноÑть: %s" #: src/clients/compute/arctest.cpp:135 #, c-format msgid "Proxy-subject: %s" msgstr "Ð˜Ð¼Ñ Ñубъекта доверенноÑти: %s" #: src/clients/compute/arctest.cpp:137 msgid "Valid for: Proxy expired" msgstr "ДоверенноÑть дейÑтвительна на: Срок дейÑÑ‚Ð²Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти вышел" #: src/clients/compute/arctest.cpp:139 msgid "Valid for: Proxy not valid" msgstr "ДоверенноÑть дейÑтвительна на: ДоверенноÑть недейÑтвительна" #: src/clients/compute/arctest.cpp:141 #, c-format msgid "Valid for: %s" msgstr "Срок дейÑÑ‚Ð²Ð¸Ñ Ð¸Ñтекает через: %s" #: src/clients/compute/arctest.cpp:146 #, c-format msgid "Certificate issuer: %s" msgstr "Сертификат выдан: %s" #: src/clients/compute/arctest.cpp:150 msgid "CA-certificates installed:" msgstr "УÑтановленные Ñертификаты CA:" #: src/clients/compute/arctest.cpp:172 msgid "Unable to detect if issuer certificate is installed." msgstr "Ðе удалоÑÑŒ определить, уÑтановлены ли ключи центра Ñертификации." #: src/clients/compute/arctest.cpp:175 msgid "Your issuer's certificate is not installed" msgstr "Ðе уÑтановлен Ñертификат Вашего центра Ñертификации" #: src/clients/compute/arctest.cpp:189 #, c-format msgid "No test-job, with ID \"%d\"" msgstr "ТеÑÑ‚Ð¾Ð²Ð°Ñ Ð·Ð°Ð´Ð°Ñ‡Ð° под номером \"%d\" не ÑущеÑтвует" #: src/clients/compute/arctest.cpp:245 #, c-format msgid "Unable to load broker %s" msgstr "Ðевозможно подгрузить брокер %s" #: src/clients/compute/arctest.cpp:248 #: src/hed/libs/compute/BrokerPlugin.cpp:106 #, c-format msgid "Broker %s loaded" msgstr "Подгружен планировщик %s" #: src/clients/compute/arctest.cpp:270 msgid "Test aborted because no resource returned any information" msgstr "" "Обрыв заÑылки теÑта, Ñ‚.к. ни один из реÑурÑов не предоÑтавил информацию" #: src/clients/compute/arctest.cpp:272 src/clients/compute/submit.cpp:170 msgid "" "Unable to adapt job description to any resource, no resource information " "could be obtained." msgstr "" "Ðе удалоÑÑŒ адаптировать опиÑание задачи ни к одному реÑурÑу, Ñ‚.к. не " "получено никакой информации." #: src/clients/compute/arctest.cpp:273 src/clients/compute/submit.cpp:171 msgid "Original job description is listed below:" msgstr "Изначальное опиÑание задачи приведено ниже:" #: src/clients/compute/arctest.cpp:283 msgid "" "ERROR: Test aborted because no suitable resources were found for the test-job" msgstr "ОШИБКÐ: Обрыв заÑылки теÑта, так как подходÑщих реÑурÑов не найдено" #: src/clients/compute/arctest.cpp:285 msgid "" "ERROR: Dumping job description aborted because no suitable resources were " "found for the test-job" msgstr "" "ОШИБКÐ: Обрыв раÑпечатки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸, так как подходÑщих реÑурÑов не " "найдено" #: src/clients/compute/arctest.cpp:294 #, c-format msgid "Submitting test-job %d:" msgstr "ЗапуÑкаетÑÑ Ñ‚ÐµÑÑ‚Ð¾Ð²Ð°Ñ Ð·Ð°Ð´Ð°Ñ‡Ð° %d:" #: src/clients/compute/arctest.cpp:298 #, c-format msgid "Client version: nordugrid-arc-%s" msgstr "ВерÑÐ¸Ñ ÐºÐ»Ð¸ÐµÐ½Ñ‚Ð°: nordugrid-arc-%s" #: src/clients/compute/arctest.cpp:306 #, c-format msgid "Cannot write jobid (%s) to file (%s)" msgstr "Ðевозможно запиÑать Ñрлык задачи (%s) в файл (%s)" #: src/clients/compute/arctest.cpp:307 #, c-format msgid "Test submitted with jobid: %s" msgstr "ТеÑÑ‚ запущен Ñ Ñрлыком: %s" #: src/clients/compute/arctest.cpp:322 #, c-format msgid "Computing service: %s" msgstr "ВычиÑлительный ÑервиÑ: %s" #: src/clients/compute/arctest.cpp:328 msgid "Test failed, no more possible targets" msgstr "Ðе удалоÑÑŒ заÑлать теÑÑ‚, возможные Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð¾Ñ‚ÑутÑтвуют" #: src/clients/compute/arctest.cpp:341 src/clients/compute/submit.cpp:49 msgid "To recover missing jobs, run arcsync" msgstr "Ð”Ð»Ñ Ð²Ð¾ÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð½ÐµÐ´Ð¾Ñтающих задач, запуÑтите arcsync" #: src/clients/compute/arctest.cpp:354 src/clients/compute/submit.cpp:200 #, c-format msgid "" "Unable to prepare job description according to needs of the target resource " "(%s)." msgstr "" "Ðевозможно адаптировать опиÑание задачи в ÑоответÑтвии Ñ Ñ‚Ñ€ÐµÐ±Ð¾Ð²Ð°Ð½Ð¸Ñми " "Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ (%s)." #: src/clients/compute/arctest.cpp:364 src/clients/compute/submit.cpp:216 #, c-format msgid "" "An error occurred during the generation of job description to be sent to %s" msgstr "Возникла ошибка при ÑоÑтавлении опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ Ð´Ð»Ñ Ð·Ð°Ñылки на %s" #: src/clients/compute/arctest.cpp:368 src/clients/compute/submit.cpp:220 #, c-format msgid "Job description to be sent to %s:" msgstr "ОпиÑание задачи Ð´Ð»Ñ Ð¾Ñ‚Ð¿Ñ€Ð°Ð²ÐºÐ¸ на %s:" #: src/clients/compute/submit.cpp:40 #, c-format msgid "Cannot write job IDs to file (%s)" msgstr "Ðевозможно запиÑать Ñрлыки задач в файл (%s)" #: src/clients/compute/submit.cpp:45 #, c-format msgid "Unable to open job list file (%s), unknown format" msgstr "Ðевозможно открыть файл ÑпиÑка задач (%s), формат неизвеÑтен" #: src/clients/compute/submit.cpp:47 #, c-format msgid "Failed to write job information to database (%s)" msgstr "Сбой запиÑи информации о задаче в базу данных (%s)" #: src/clients/compute/submit.cpp:51 #, c-format msgid "Record about new job successfully added to the database (%s)" msgstr "ЗапиÑÑŒ о новой задаче уÑпешно добавлена в базу данных (%s)" #: src/clients/compute/submit.cpp:57 msgid "Job submission summary:" msgstr "Сводка заÑылки задач:" #: src/clients/compute/submit.cpp:59 #, c-format msgid "%d of %d jobs were submitted" msgstr "%d из %d задач были заÑланы" #: src/clients/compute/submit.cpp:61 msgid "The following jobs were not submitted:" msgstr "Следующие задачи не были заÑланы:" #: src/clients/compute/submit.cpp:65 msgid "Job nr." msgstr "Задача номер" #: src/clients/compute/submit.cpp:75 #, c-format msgid "ERROR: Unable to load broker %s" msgstr "ОШИБКÐ: не удалоÑÑŒ подгрузить планировщик %s" #: src/clients/compute/submit.cpp:79 msgid "" "ERROR: Job submission aborted because no resource returned any information" msgstr "" "ОШИБКÐ: Обрыв заÑылки задачи, так как ни один из реÑурÑов не предоÑтавил " "информацию" #: src/clients/compute/submit.cpp:83 msgid "ERROR: One or multiple job descriptions was not submitted." msgstr "ОШИБКÐ: Одна или неÑколько задач не были запущены." #: src/clients/compute/submit.cpp:100 #, c-format msgid "" "A computing resource using the GridFTP interface was requested, but\n" "%sthe corresponding plugin could not be loaded. Is the plugin installed?\n" "%sIf not, please install the package 'nordugrid-arc-plugins-globus'.\n" "%sDepending on your type of installation the package name might differ." msgstr "" "Запрошен вычиÑлительный реÑурÑ, иÑпользующий Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ GridFTP, но " "необходимый\n" "%sподключаемый модуль не был подгружен. УÑтанавливали ли Ð’Ñ‹ Ñтот модуль?\n" "%sЕÑли нет, пожалуйÑта, уÑтановите пакет 'nordugrid-arc-plugins-globus'.\n" "%sÐазвание пакета может завиÑеть от типа вашего диÑтрибутива." #: src/clients/compute/submit.cpp:125 #, c-format msgid "Removing endpoint %s: It has an unrequested interface (%s)." msgstr "УдалÑетÑÑ Ñ‚Ð¾Ñ‡ÐºÐ° входа %s: она Ñодержит ненужный Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ (%s)." #: src/clients/compute/submit.cpp:183 #, c-format msgid "Dumping job description aborted: Unable to load broker %s" msgstr "" "РаÑпечатка опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ оборвана: Ðевозможно подгрузить планировщик %s" #: src/clients/compute/submit.cpp:238 msgid "" "Unable to prepare job description according to needs of the target resource." msgstr "" "Ðевозможно адаптировать опиÑание задачи в ÑоответÑтвии Ñ Ñ‚Ñ€ÐµÐ±Ð¾Ð²Ð°Ð½Ð¸Ñми " "назначениÑ." #: src/clients/compute/submit.cpp:322 src/clients/compute/submit.cpp:352 #, c-format msgid "Service endpoint %s (type %s) added to the list for resource discovery" msgstr "Точка входа ÑервиÑа %s (тип %s) добавлена в ÑпиÑок Ð´Ð»Ñ Ð¿Ð¾Ð¸Ñка реÑурÑов" #: src/clients/compute/submit.cpp:332 msgid "" "There are no endpoints in registry that match requested info endpoint type" msgstr "" "Ð’ учётном ÑпиÑке нет точек входа, ÑоответÑтвующих запрошенному типу точки " "входа информации" #: src/clients/compute/submit.cpp:373 #, c-format msgid "Service endpoint %s (type %s) added to the list for direct submission" msgstr "" "Точка входа ÑервиÑа %s (тип %s) добавлена в ÑпиÑок Ð´Ð»Ñ Ð½ÐµÐ¿Ð¾ÑредÑтвенной " "заÑылки" #: src/clients/compute/submit.cpp:381 msgid "" "There are no endpoints in registry that match requested submission endpoint " "type" msgstr "" "Ð’ учётном ÑпиÑке нет точек входа, ÑоответÑтвующих запрошенному типу точки " "входа заÑылки" #: src/clients/compute/utils.cpp:109 #, c-format msgid "Types of execution services that %s is able to submit jobs to:" msgstr "Типы Ñлужб Ð²Ñ‹Ð¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡, на которые %s может заÑылать задачи:" #: src/clients/compute/utils.cpp:112 #, c-format msgid "Types of registry services that %s is able to collect information from:" msgstr "Типы Ñлужб региÑтрации, в которых %s может получить информацию:" #: src/clients/compute/utils.cpp:115 #, c-format msgid "" "Types of local information services that %s is able to collect information " "from:" msgstr "" "Типы локальных Ñлужб информации, Ñ ÐºÐ¾Ñ‚Ð¾Ñ€Ñ‹Ñ… %s может получить информацию:" #: src/clients/compute/utils.cpp:118 #, c-format msgid "" "Types of local information services that %s is able to collect job " "information from:" msgstr "" "Типы локальных Ñлужб информации, Ñ ÐºÐ¾Ñ‚Ð¾Ñ€Ñ‹Ñ… %s может получить информацию о " "задачах:" #: src/clients/compute/utils.cpp:121 #, c-format msgid "Types of services that %s is able to manage jobs at:" msgstr "Типы Ñлужб,на которых %s может управлÑть задачами:" #: src/clients/compute/utils.cpp:124 #, c-format msgid "Job description languages supported by %s:" msgstr "Следующие Ñзыки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡ поддерживаютÑÑ %s:" #: src/clients/compute/utils.cpp:127 #, c-format msgid "Brokers available to %s:" msgstr "Следующие планировщики доÑтупны Ð´Ð»Ñ %s:" #: src/clients/compute/utils.cpp:150 #, c-format msgid "" "Default broker (%s) is not available. When using %s a broker should be " "specified explicitly (-b option)." msgstr "" "Планировщик по умолчанию (%s) недоÑтупен. При иÑпользовании %s планировщик " "должен быть указан Ñвным образом (Ð¾Ð¿Ñ†Ð¸Ñ -b)." #: src/clients/compute/utils.cpp:160 msgid "Proxy expired. Job submission aborted. Please run 'arcproxy'!" msgstr "" "Срок дейÑÑ‚Ð²Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти вышел. ЗаÑылка задачи оборвана. ПожалуйÑта, " "запуÑтите 'arcproxy'!" #: src/clients/compute/utils.cpp:165 msgid "" "Cannot find any proxy. This application currently cannot run without a " "proxy.\n" " If you have the proxy file in a non-default location,\n" " please make sure the path is specified in the client configuration file.\n" " If you don't have a proxy yet, please run 'arcproxy'!" msgstr "" "Ðе удалоÑÑŒ обнаружить доверенноÑть. Это приложение не работает без " "доверенноÑти.\n" " ЕÑли Ваша доверенноÑть хранитÑÑ Ð² неÑтандартном меÑте, пожалуйÑта,\n" " убедитеÑÑŒ, что в наÑтройках клиента указан правильный путь.\n" " ЕÑли же Ð’Ñ‹ пока не Ñоздали доверенноÑть, запуÑтите 'arcproxy'!" #: src/clients/compute/utils.cpp:277 msgid "" "It is impossible to mix ARC6 target selection options with legacy options. " "All legacy options will be ignored!" msgstr "" "Смешивать опции выбора реÑурÑа ARC6 Ñ ÑƒÑтаревшими опциÑми нельзÑ. Ð’Ñе " "уÑтаревшие опции будут проигнорированы!" #: src/clients/compute/utils.cpp:345 #, c-format msgid "Unsupported submission endpoint type: %s" msgstr "Ðеподдерживаемый тип точки входа заÑылки: %s" #: src/clients/compute/utils.cpp:383 #, c-format msgid "" "Requested to skip resource discovery. Will try direct submission to %s and " "%s submission endpoint types" msgstr "" "ПоÑтупил Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¿Ñ€Ð¾Ð¿ÑƒÑтить поиÑк реÑурÑов. Будет произведена попытка " "непоÑредÑтвенной заÑылки на точки входа заÑылки типа %s и %s" #: src/clients/compute/utils.cpp:389 #, c-format msgid "Unsupported information endpoint type: %s" msgstr "Ðеподдерживаемый тип точки входа информации: %s" #: src/clients/compute/utils.cpp:434 msgid "Other actions" msgstr "Другие дейÑтвиÑ" #: src/clients/compute/utils.cpp:435 msgid "Brokering and filtering" msgstr "Планировка и выборка" #: src/clients/compute/utils.cpp:436 msgid "Output format modifiers" msgstr "Варианты Ñ„Ð¾Ñ€Ð¼Ð°Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð²Ñ‹Ð´Ð°Ñ‡Ð¸" #: src/clients/compute/utils.cpp:437 msgid "Behaviour tuning" msgstr "ÐаÑтройки поведениÑ" #: src/clients/compute/utils.cpp:438 msgid "ARC6 submission endpoint selection" msgstr "Выбор точки входа Ð´Ð»Ñ Ð·Ð°Ñылки задач ARC6" #: src/clients/compute/utils.cpp:439 msgid "Legacy options set for defining targets" msgstr "УÑтаревшие варианты Ð¾Ð¿Ñ€ÐµÐ´ÐµÐ»ÐµÐ½Ð¸Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ð¹" #: src/clients/compute/utils.cpp:443 msgid "specify computing element hostname or a complete endpoint URL" msgstr "указать Ð¸Ð¼Ñ Ñервера вычиÑлительного реÑурÑа или полный URL точки входа" #: src/clients/compute/utils.cpp:444 msgid "ce" msgstr "ce" #: src/clients/compute/utils.cpp:448 msgid "registry service URL with optional specification of protocol" msgstr "URL Ñлужбы учёта Ñ Ð½ÐµÐ¾Ð±Ñзательным указанием протокола" #: src/clients/compute/utils.cpp:449 msgid "registry" msgstr "учётный ÑпиÑок" #: src/clients/compute/utils.cpp:455 msgid "" "require the specified endpoint type for job submission.\n" "\tAllowed values are: arcrest, emies, gridftp or gridftpjob and internal." msgstr "" "потребовать указанный тип точки входа Ð´Ð»Ñ Ð·Ð°Ð¿ÑƒÑка задачи.\n" "\tДопуÑтимые типы: arcrest, emies, gridftp или gridftpjob и internal." #: src/clients/compute/utils.cpp:457 src/clients/compute/utils.cpp:464 msgid "type" msgstr "тип" #: src/clients/compute/utils.cpp:461 msgid "" "require information query using the specified information endpoint type.\n" "\tSpecial value 'NONE' will disable all resource information queries and the " "following brokering.\n" "\tAllowed values are: ldap.nordugrid, ldap.glue2, emies, arcrest and " "internal." msgstr "" "потребовать поиÑк информации иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¹ тип точки входа " "информации.\n" "\tСпециальное значение 'NONE' предотвратит любой поиÑк информации и " "поÑледующую планировку\n" "\tДопуÑтимые значениÑ: ldap.nordugrid, ldap.glue2, emies, arcrest и internal." #: src/clients/compute/utils.cpp:470 msgid "" "select one or more computing elements: name can be an alias for a single CE, " "a group of CEs or a URL" msgstr "" "указать один или более вычиÑлительных реÑурÑов: Ð¸Ð¼Ñ Ð¼Ð¾Ð¶ÐµÑ‚ быть Ñокращением " "Ð´Ð»Ñ Ð¾Ð´Ð½Ð¾Ð³Ð¾ реÑурÑа, группы реÑурÑов, или URL" #: src/clients/compute/utils.cpp:472 src/clients/compute/utils.cpp:477 #: src/clients/compute/utils.cpp:494 src/clients/compute/utils.cpp:614 msgid "name" msgstr "имÑ" #: src/clients/compute/utils.cpp:476 msgid "only select jobs that were submitted to this resource" msgstr "выбрать лишь задачи, заÑланные на Ñтот реÑурÑ" #: src/clients/compute/utils.cpp:483 msgid "" "the computing element specified by URL at the command line should be queried " "using this information interface.\n" "\tAllowed values are: org.nordugrid.ldapng, org.nordugrid.ldapglue2 and org." "ogf.glue.emies.resourceinfo" msgstr "" "вычиÑлительный реÑурÑ, заданный URL в командной Ñтроке, должен быть опрошен " "иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¹ информационный интерфейÑ.\n" "\tДопуÑтимые значениÑ: org.nordugrid.ldapng, org.nordugrid.ldapglue2 и org." "ogf.glue.emies.resourceinfo" #: src/clients/compute/utils.cpp:486 msgid "interfacename" msgstr "interfacename" #: src/clients/compute/utils.cpp:492 msgid "" "selecting a computing element for the new jobs with a URL or an alias, or " "selecting a group of computing elements with the name of the group" msgstr "" "выбор вычиÑлительного реÑурÑа Ð´Ð»Ñ Ð½Ð¾Ð²Ñ‹Ñ… задач Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ URL или ÑокращениÑ, " "или выбор группы Ñлементов Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ Ð½Ð°Ð·Ð²Ð°Ð½Ð¸Ñ Ð³Ñ€ÑƒÐ¿Ð¿Ñ‹" #: src/clients/compute/utils.cpp:500 msgid "force migration, ignore kill failure" msgstr "Ð¿Ñ€Ð¸Ð½ÑƒÐ´Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ð¼Ð¸Ð³Ñ€Ð°Ñ†Ð¸Ñ, игнорируетÑÑ Ñбой прерываниÑ" #: src/clients/compute/utils.cpp:506 msgid "keep the files on the server (do not clean)" msgstr "ÑохранÑть файлы на Ñервере (не удалÑть)" #: src/clients/compute/utils.cpp:512 msgid "do not ask for verification" msgstr "не запрашивать подтверждениÑ" #: src/clients/compute/utils.cpp:516 msgid "truncate the joblist before synchronizing" msgstr "Ñжать ÑпиÑок задач перед Ñинхронизацией" #: src/clients/compute/utils.cpp:520 msgid "do not collect information, only convert jobs storage format" msgstr "не Ñобирать информацию, а лишь конвертировать формат Ñ…Ñ€Ð°Ð½ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡" #: src/clients/compute/utils.cpp:526 src/clients/data/arcls.cpp:288 msgid "long format (more information)" msgstr "раÑширенный формат (Ð´Ð¾Ð¿Ð¾Ð»Ð½Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ)" #: src/clients/compute/utils.cpp:532 msgid "print a list of services configured in the client.conf" msgstr "вывеÑти ÑпиÑок Ñлужб, наÑтроенных в client.conf" #: src/clients/compute/utils.cpp:538 msgid "show the stdout of the job (default)" msgstr "вывеÑти Ñтандартный выход задачи (по умолчанию)" #: src/clients/compute/utils.cpp:542 msgid "show the stderr of the job" msgstr "вывеÑти Ñтандартную ошибку задачи" #: src/clients/compute/utils.cpp:546 msgid "show the CE's error log of the job" msgstr "вывеÑти ошибки ÑиÑтемы при иÑполнении задачи" #: src/clients/compute/utils.cpp:550 msgid "show the specified file from job's session directory" msgstr "показать заданный файл из рабочего каталога задачи" #: src/clients/compute/utils.cpp:551 msgid "filepath" msgstr "путь к файлу" #: src/clients/compute/utils.cpp:557 msgid "" "download directory (the job directory will be created in this directory)" msgstr "каталог загрузки (подкаталог задачи будет Ñоздан в Ñтом каталоге)" #: src/clients/compute/utils.cpp:559 msgid "dirname" msgstr "каталог" #: src/clients/compute/utils.cpp:563 msgid "use the jobname instead of the short ID as the job directory name" msgstr "" "иÑпользовать Ð¸Ð¼Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ вмеÑто краткого идентификатора в качеÑтве Ð½Ð°Ð·Ð²Ð°Ð½Ð¸Ñ " "каталога" #: src/clients/compute/utils.cpp:568 msgid "force download (overwrite existing job directory)" msgstr "Ð¿Ñ€Ð¸Ð½ÑƒÐ´Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ð·Ð°Ð³Ñ€ÑƒÐ·ÐºÐ° (перезапиÑать ÑущеÑтвующий каталог задачи)" #: src/clients/compute/utils.cpp:574 msgid "instead of the status only the IDs of the selected jobs will be printed" msgstr "вмеÑто ÑоÑтоÑÐ½Ð¸Ñ Ð±ÑƒÐ´ÑƒÑ‚ выведены только Ñрлыки указанных задач" #: src/clients/compute/utils.cpp:578 msgid "sort jobs according to jobid, submissiontime or jobname" msgstr "Ñортировать задачи по идентификатору, времени запуÑка или имени" #: src/clients/compute/utils.cpp:579 src/clients/compute/utils.cpp:582 msgid "order" msgstr "порÑдок" #: src/clients/compute/utils.cpp:581 msgid "reverse sorting of jobs according to jobid, submissiontime or jobname" msgstr "" "Ñортировать задачи в обратном порÑдке по идентификатору, времени запуÑка или " "имени" #: src/clients/compute/utils.cpp:585 msgid "show jobs where status information is unavailable" msgstr "перечиÑлить задачи, Ð´Ð»Ñ ÐºÐ¾Ñ‚Ð¾Ñ€Ñ‹Ñ… отÑутÑтвует Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ ÑоÑтоÑнии" #: src/clients/compute/utils.cpp:589 msgid "show status information in JSON format" msgstr "вывеÑти информацию о ÑоÑтоÑнии в формате JSON" #: src/clients/compute/utils.cpp:595 msgid "resubmit to the same resource" msgstr "заÑлать заново на тот же реÑурÑ" #: src/clients/compute/utils.cpp:599 msgid "do not resubmit to the same resource" msgstr "не перезаÑылать на тот же реÑурÑ" #: src/clients/compute/utils.cpp:605 msgid "" "remove the job from the local list of jobs even if the job is not found in " "the infosys" msgstr "" "удалить задачу из локального ÑпиÑка, даже еÑли Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ ней \n" "отÑутÑтвует" #: src/clients/compute/utils.cpp:612 msgid "" "select one or more registries: name can be an alias for a single registry, a " "group of registries or a URL" msgstr "" "выбрать один или неÑколько рееÑтров: Ð¸Ð¼Ñ Ð¼Ð¾Ð¶ÐµÑ‚ быть Ñокращением Ð´Ð»Ñ Ð¾Ð´Ð½Ð¾Ð³Ð¾ " "рееÑтра, группы рееÑтров, или URL" #: src/clients/compute/utils.cpp:620 msgid "submit test job given by the number" msgstr "апуÑтить теÑтовую задачу под ÑоответÑтвующим номером" #: src/clients/compute/utils.cpp:621 src/clients/compute/utils.cpp:625 msgid "int" msgstr "чиÑло" #: src/clients/compute/utils.cpp:624 msgid "test job runtime specified by the number" msgstr "Ð²Ñ€ÐµÐ¼Ñ Ð¸ÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ñ‚ÐµÑтовой задачи" #: src/clients/compute/utils.cpp:631 msgid "only select jobs whose status is statusstr" msgstr "выполнить дейÑтвие лишь над задачами в указанном ÑоÑтоÑнии" #: src/clients/compute/utils.cpp:632 msgid "statusstr" msgstr "ÑоÑтоÑние" #: src/clients/compute/utils.cpp:638 msgid "all jobs" msgstr "вÑе задачи" #: src/clients/compute/utils.cpp:644 msgid "jobdescription string describing the job to be submitted" msgstr "Ñтрока, ÑÐ¾Ð´ÐµÑ€Ð¶Ð°Ñ‰Ð°Ñ Ð¾Ð¿Ð¸Ñание запуÑкаемой задачи" #: src/clients/compute/utils.cpp:646 src/clients/compute/utils.cpp:652 #: src/clients/credentials/arcproxy.cpp:345 #: src/clients/credentials/arcproxy.cpp:352 #: src/clients/credentials/arcproxy.cpp:371 #: src/clients/credentials/arcproxy.cpp:378 #: src/clients/credentials/arcproxy.cpp:396 #: src/clients/credentials/arcproxy.cpp:400 #: src/clients/credentials/arcproxy.cpp:415 #: src/clients/credentials/arcproxy.cpp:425 #: src/clients/credentials/arcproxy.cpp:429 msgid "string" msgstr "Ñтрока" #: src/clients/compute/utils.cpp:650 msgid "jobdescription file describing the job to be submitted" msgstr "файл, Ñодержащий опиÑание запуÑкаемой задачи" #: src/clients/compute/utils.cpp:658 msgid "select broker method (list available brokers with --listplugins flag)" msgstr "" "выбрать ÑпоÑоб планировки (ÑпиÑок доÑтупных планировщиков выводитÑÑ Ð¾Ð¿Ñ†Ð¸ÐµÐ¹ --" "listplugins)" #: src/clients/compute/utils.cpp:659 msgid "broker" msgstr "планировщик" #: src/clients/compute/utils.cpp:662 msgid "the IDs of the submitted jobs will be appended to this file" msgstr "Ñрлыки запущенных задач будут занеÑены в Ñтот файл" #: src/clients/compute/utils.cpp:663 src/clients/compute/utils.cpp:685 #: src/clients/compute/utils.cpp:722 src/clients/compute/utils.cpp:730 #: src/clients/credentials/arcproxy.cpp:438 src/clients/data/arccp.cpp:627 #: src/clients/data/arcls.cpp:333 src/clients/data/arcmkdir.cpp:111 #: src/clients/data/arcrename.cpp:122 src/clients/data/arcrm.cpp:137 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:52 msgid "filename" msgstr "файл" #: src/clients/compute/utils.cpp:667 msgid "" "only use this interface for submitting.\n" "\tAllowed values are: org.nordugrid.gridftpjob or org.nordugrid.gridftp, org." "ogf.glue.emies.activitycreation and org.nordugrid.internal" msgstr "" "иÑпользовать только указанный Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ð´Ð»Ñ Ð·Ð°Ñылки.\n" "\tДопуÑтимые значениÑ: org.nordugrid.gridftpjob или org.nordugrid.gridftp, " "org.ogf.glue.emies.activitycreation и org.nordugrid.internal" #: src/clients/compute/utils.cpp:669 src/clients/compute/utils.cpp:711 msgid "InterfaceName" msgstr "InterfaceName" #: src/clients/compute/utils.cpp:676 msgid "skip the service with the given URL during service discovery" msgstr "пропуÑтить Ñлужбу Ñ Ñтим URL при обнаружении Ñлужб" #: src/clients/compute/utils.cpp:677 src/clients/compute/utils.cpp:690 #: src/clients/data/arccp.cpp:607 msgid "URL" msgstr "URL" #: src/clients/compute/utils.cpp:684 msgid "a file containing a list of jobIDs" msgstr "файл, Ñодержащий Ñрлыки задач" #: src/clients/compute/utils.cpp:689 msgid "skip jobs that are on a computing element with a given URL" msgstr "" "пропуÑтить задачи, находÑщиеÑÑ Ð½Ð° вычиÑлительном реÑурÑе Ñ Ð·Ð°Ð´Ð°Ð½Ð½Ñ‹Ð¼ URL" #: src/clients/compute/utils.cpp:695 msgid "submit jobs as dry run (no submission to batch system)" msgstr "запуÑк задач в режиме холоÑтой прогонки (без заÑылки на Ñчёт)" #: src/clients/compute/utils.cpp:698 msgid "submit directly - no resource discovery or matchmaking" msgstr "запуÑтить напрÑмую, без Ð¾Ð±Ð½Ð°Ñ€ÑƒÐ¶ÐµÐ½Ð¸Ñ Ð¸ проверки ÑоответÑÑ‚Ð²Ð¸Ñ Ñ€ÐµÑурÑов" #: src/clients/compute/utils.cpp:702 msgid "" "do not submit - dump job description in the language accepted by the target" msgstr "" "не выполнÑть заÑылку: раÑпечатка опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ на Ñзыке, приемлемом " "назначением" #: src/clients/compute/utils.cpp:709 msgid "" "only get information about executon targets that support this job submission " "interface.\n" "\tAllowed values are org.nordugrid.gridftpjob or org.nordugrid.gridftp, org." "ogf.glue.emies.activitycreation and org.nordugrid.internal" msgstr "" "получить информацию только о тех вычиÑлительных реÑурÑах, которые " "поддерживают указанный Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ð´Ð»Ñ Ð·Ð°Ñылки задач.\n" "\tДопуÑтимые значениÑ: org.nordugrid.gridftpjob или org.nordugrid.gridftp, " "org.ogf.glue.emies.activitycreation и org.nordugrid.internal" #: src/clients/compute/utils.cpp:716 msgid "prints info about installed user- and CA-certificates" msgstr "" "вывеÑти информацию об уÑтановленных Ñертификатах Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð¸ " "Ñертификационных агентÑтв" #: src/clients/compute/utils.cpp:721 #, c-format msgid "the file storing information about active jobs (default %s)" msgstr "файл Ñ Ð·Ð°Ð¿Ð¸Ñью информации о задачах на Ñчёте (по умолчанию %s)" #: src/clients/compute/utils.cpp:729 src/clients/credentials/arcproxy.cpp:437 #: src/clients/data/arccp.cpp:626 src/clients/data/arcls.cpp:332 #: src/clients/data/arcmkdir.cpp:110 src/clients/data/arcrename.cpp:121 #: src/clients/data/arcrm.cpp:136 msgid "configuration file (default ~/.arc/client.conf)" msgstr "файл наÑтроек (по умолчанию ~/.arc/client.conf)" #: src/clients/compute/utils.cpp:732 src/clients/credentials/arcproxy.cpp:432 #: src/clients/data/arccp.cpp:621 src/clients/data/arcls.cpp:327 #: src/clients/data/arcmkdir.cpp:105 src/clients/data/arcrename.cpp:116 #: src/clients/data/arcrm.cpp:131 msgid "timeout in seconds (default 20)" msgstr "Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð² Ñекундах (по умолчанию 20)" #: src/clients/compute/utils.cpp:733 src/clients/credentials/arcproxy.cpp:433 #: src/clients/data/arccp.cpp:622 src/clients/data/arcls.cpp:328 #: src/clients/data/arcmkdir.cpp:106 src/clients/data/arcrename.cpp:117 #: src/clients/data/arcrm.cpp:132 msgid "seconds" msgstr "Ñекунд(а/Ñ‹)" #: src/clients/compute/utils.cpp:736 msgid "list the available plugins" msgstr "перечиÑление доÑтупных подключаемых модулей" #: src/clients/compute/utils.cpp:740 src/clients/credentials/arcproxy.cpp:442 #: src/clients/data/arccp.cpp:631 src/clients/data/arcls.cpp:337 #: src/clients/data/arcmkdir.cpp:115 src/clients/data/arcrename.cpp:126 #: src/clients/data/arcrm.cpp:141 #: src/hed/libs/compute/test_jobdescription.cpp:38 #: src/services/a-rex/grid-manager/gm_jobs.cpp:190 #: src/services/a-rex/grid-manager/inputcheck.cpp:81 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:66 msgid "FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG" msgstr "FATAL, ERROR, WARNING, INFO, VERBOSE или DEBUG" #: src/clients/compute/utils.cpp:741 src/clients/credentials/arcproxy.cpp:443 #: src/clients/data/arccp.cpp:632 src/clients/data/arcls.cpp:338 #: src/clients/data/arcmkdir.cpp:116 src/clients/data/arcrename.cpp:127 #: src/clients/data/arcrm.cpp:142 #: src/hed/libs/compute/test_jobdescription.cpp:39 #: src/services/a-rex/grid-manager/gm_jobs.cpp:191 #: src/services/a-rex/grid-manager/inputcheck.cpp:82 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:67 msgid "debuglevel" msgstr "уровень" #: src/clients/compute/utils.cpp:743 src/clients/credentials/arcproxy.cpp:446 #: src/clients/data/arccp.cpp:635 src/clients/data/arcls.cpp:341 #: src/clients/data/arcmkdir.cpp:119 src/clients/data/arcrename.cpp:130 #: src/clients/data/arcrm.cpp:145 msgid "print version information" msgstr "вывеÑти информацию о верÑии" #: src/clients/credentials/arcproxy.cpp:146 #: src/hed/libs/credential/ARCProxyUtil.cpp:1216 #, c-format msgid "There are %d user certificates existing in the NSS database" msgstr "Ð’ базе данных NSS обнаружено %d Ñертификата пользователÑ" #: src/clients/credentials/arcproxy.cpp:162 #: src/hed/libs/credential/ARCProxyUtil.cpp:1232 #, c-format msgid "Number %d is with nickname: %s%s" msgstr "Ðомер %d Ñ ÐºÑ€Ð°Ñ‚ÐºÐ¸Ð¼ именем: %s%s" #: src/clients/credentials/arcproxy.cpp:171 #: src/hed/libs/credential/ARCProxyUtil.cpp:1241 #, c-format msgid " expiration time: %s " msgstr " дейÑтвителен до: %s " #: src/clients/credentials/arcproxy.cpp:175 #: src/hed/libs/credential/ARCProxyUtil.cpp:1245 #, c-format msgid " certificate dn: %s" msgstr " DN Ñертификата: %s" #: src/clients/credentials/arcproxy.cpp:176 #: src/hed/libs/credential/ARCProxyUtil.cpp:1246 #, c-format msgid " issuer dn: %s" msgstr " DN Ñмитента: %s" #: src/clients/credentials/arcproxy.cpp:177 #: src/hed/libs/credential/ARCProxyUtil.cpp:1247 #, c-format msgid " serial number: %d" msgstr " Серийный номер: %d" #: src/clients/credentials/arcproxy.cpp:181 #: src/hed/libs/credential/ARCProxyUtil.cpp:1251 #, c-format msgid "Please choose the one you would use (1-%d): " msgstr "ПожалуйÑта, выберите то, что будет иÑпользоватьÑÑ (1-%d): " #: src/clients/credentials/arcproxy.cpp:246 msgid "" "The arcproxy command creates a proxy from a key/certificate pair which can\n" "then be used to access grid resources." msgstr "" "Команда arcproxy Ñоздаёт доверенноÑть из пары закрытый/открытый ключ\n" "Ð´Ð»Ñ Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð´Ð¾Ñтупа к гриду." #: src/clients/credentials/arcproxy.cpp:248 msgid "" "Supported constraints are:\n" " validityStart=time (e.g. 2008-05-29T10:20:30Z; if not specified, start " "from now)\n" " validityEnd=time\n" " validityPeriod=time (e.g. 43200 or 12h or 12H; if both validityPeriod and " "validityEnd\n" " not specified, the default is 12 hours for local proxy, and 168 hours " "for delegated\n" " proxy on myproxy server)\n" " vomsACvalidityPeriod=time (e.g. 43200 or 12h or 12H; if not specified, the " "default\n" " is the minimum value of 12 hours and validityPeriod)\n" " myproxyvalidityPeriod=time (lifetime of proxies delegated by myproxy " "server,\n" " e.g. 43200 or 12h or 12H; if not specified, the default is the minimum " "value of\n" " 12 hours and validityPeriod (which is lifetime of the delegated proxy on " "myproxy server))\n" " proxyPolicy=policy content\n" " proxyPolicyFile=policy file\n" " keybits=number - length of the key to generate. Default is 2048 bits.\n" " Special value 'inherit' is to use key length of signing certificate.\n" " signingAlgorithm=name - signing algorithm to use for signing public key of " "proxy.\n" " Possible values are sha1, sha2 (alias for sha256), sha224, sha256, " "sha384, sha512\n" " and inherit (use algorithm of signing certificate). Default is inherit.\n" " With old systems, only sha1 is acceptable.\n" "\n" "Supported information item names are:\n" " subject - subject name of proxy certificate.\n" " identity - identity subject name of proxy certificate.\n" " issuer - issuer subject name of proxy certificate.\n" " ca - subject name of CA which issued initial certificate.\n" " path - file system path to file containing proxy.\n" " type - type of proxy certificate.\n" " validityStart - timestamp when proxy validity starts.\n" " validityEnd - timestamp when proxy validity ends.\n" " validityPeriod - duration of proxy validity in seconds.\n" " validityLeft - duration of proxy validity left in seconds.\n" " vomsVO - VO name represented by VOMS attribute\n" " vomsSubject - subject of certificate for which VOMS attribute is issued\n" " vomsIssuer - subject of service which issued VOMS certificate\n" " vomsACvalidityStart - timestamp when VOMS attribute validity starts.\n" " vomsACvalidityEnd - timestamp when VOMS attribute validity ends.\n" " vomsACvalidityPeriod - duration of VOMS attribute validity in seconds.\n" " vomsACvalidityLeft - duration of VOMS attribute validity left in seconds.\n" " proxyPolicy\n" " keybits - size of proxy certificate key in bits.\n" " signingAlgorithm - algorithm used to sign proxy certificate.\n" "Items are printed in requested order and are separated by newline.\n" "If item has multiple values they are printed in same line separated by |.\n" "\n" "Supported password destinations are:\n" " key - for reading private key\n" " myproxy - for accessing credentials at MyProxy service\n" " myproxynew - for creating credentials at MyProxy service\n" " all - for any purspose.\n" "\n" "Supported password sources are:\n" " quoted string (\"password\") - explicitly specified password\n" " int - interactively request password from console\n" " stdin - read password from standard input delimited by newline\n" " file:filename - read password from file named filename\n" " stream:# - read password from input stream number #.\n" " Currently only 0 (standard input) is supported.\n" msgstr "" "Поддерживаемые ограничениÑ:\n" " validityStart=Ð²Ñ€ÐµÐ¼Ñ (например, 2008-05-29T10:20:30Z; еÑли не указано, то " "начинаетÑÑ Ð½ÐµÐ¼ÐµÐ´Ð»ÐµÐ½Ð½Ð¾)\n" " validityEnd=времÑ\n" " validityPeriod=Ð²Ñ€ÐµÐ¼Ñ (например, 43200, или 12h, или 12H; еÑли не указаны " "ни validityPeriod,\n" " ни validityEnd, то Ñрок дейÑÑ‚Ð²Ð¸Ñ Ð¿Ð¾ умолчанию ÑоÑтавлÑет 12 чаÑов Ð´Ð»Ñ " "локальной доверенноÑти,\n" " и 168 чаÑов Ð´Ð»Ñ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð½Ð¾Ð¹ доверенноÑти на Ñервере MyProxy)\n" " vomsACvalidityPeriod=Ð²Ñ€ÐµÐ¼Ñ (например, 43200, или 12h, или 12H; еÑли не " "указано, то иÑпользуетÑÑ\n" " наименьшее между 12 чаÑами и значением validityPeriod)\n" " myproxyvalidityPeriod=Ð²Ñ€ÐµÐ¼Ñ (Ñрок годноÑти доверенноÑти, делегированной " "через Ñервер MyProxy\n" " например, 43200, или 12h, или 12H; еÑли не указано, то иÑпользуетÑÑ " "наименьшее между 12 чаÑами\n" " и значением validityPeriod - Ñроком годноÑти доверенноÑти, " "делегированной через Ñервер MyProxy)\n" " proxyPolicy=Ñодержимое политики\n" " proxyPolicyFile=файл политики\n" " keybits=чиÑло - длина генерируемого ключа. По умолчанию - 2048 бит.\n" " Специальное значение 'inherit' означает иÑпользование длины ключа " "подпиÑывающего Ñертификата.\n" " signingAlgorithm=название - алгоритм, иÑпользуемый Ð´Ð»Ñ Ð¿Ð¾Ð´Ð¿Ð¸ÑÐ°Ð½Ð¸Ñ " "открытого ключа или доверенноÑти.\n" " По умолчанию - sha1. Возможные значениÑ: sha1, sha2 (Ñокращение от " "sha256), sha224, sha256, sha384,\n" " sha512 и inherit (иÑпользовать алгоритм подпиÑывающего Ñертификата). По " "умолчанию иÑпользуетÑÑ inherit.\n" " Старые ÑиÑтемы поддерживают лишь sha1.\n" "\n" "Поддерживаемые Ð¿Ð¾Ð»Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸:\n" " subject - Ð¸Ð¼Ñ Ñубъекта доверенноÑти.\n" " identity - идентифицируемое Ð¸Ð¼Ñ Ñубъекта доверенноÑти.\n" " issuer - Ð¸Ð¼Ñ Ñубъекта, выдавшего доверенноÑть.\n" " ca - Ð¸Ð¼Ñ Ñубъекта агентÑтва, выдавшего иÑходный Ñертификат\n" " path - локальный путь к файлу, Ñодержащему доверенноÑть.\n" " type - тип доверенноÑти.\n" " validityStart - Ð²Ñ€ÐµÐ¼Ñ Ð½Ð°Ñ‡Ð°Ð»Ð° дейÑÑ‚Ð²Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти.\n" " validityEnd - Ð²Ñ€ÐµÐ¼Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ Ð´ÐµÐ¹ÑÑ‚Ð²Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти.\n" " validityPeriod - продолжительноÑть годноÑти доверенноÑти в Ñекундах.\n" " validityLeft - оÑтавшаÑÑÑ Ð¿Ñ€Ð¾Ð´Ð¾Ð»Ð¶Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð¾Ñть годноÑти доверенноÑти в " "Ñекундах.\n" " vomsVO - Ð¸Ð¼Ñ Ð²Ð¸Ñ€Ñ‚ÑƒÐ°Ð»ÑŒÐ½Ð¾Ð¹ организации, указанное в атрибуте VOMS.\n" " vomsSubject - Ñубъект Ñертификата, которому был приÑвоен атрибут VOMS.\n" " vomsIssuer - Ñубъект Ñлужбы, выдавшей Ñертификат VOMS.\n" " vomsACvalidityStart - Ð²Ñ€ÐµÐ¼Ñ Ð½Ð°Ñ‡Ð°Ð»Ð° дейÑÑ‚Ð²Ð¸Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð° VOMS.\n" " vomsACvalidityEnd - Ð²Ñ€ÐµÐ¼Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ Ð´ÐµÐ¹ÑÑ‚Ð²Ð¸Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð° VOMS.\n" " vomsACvalidityPeriod - продолжительноÑть годноÑти атрибута VOMS в " "Ñекундах.\n" " vomsACvalidityLeft - оÑтавшаÑÑÑ Ð¿Ñ€Ð¾Ð´Ð¾Ð»Ð¶Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð¾Ñть годноÑти атрибута VOMS в " "Ñекундах.\n" " proxyPolicy - Ñодержимое политики\n" " keybits - длина ключа доверенноÑти в битах.\n" " signingAlgorithm - алгоритм, иÑпользуемый при подпиÑи Ñертификата.\n" "Ð—Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð²Ñ‹Ð²Ð¾Ð´ÑÑ‚ÑÑ Ð² порÑдке запроÑа, каждое Ñ Ð½Ð¾Ð²Ð¾Ð¹ Ñтроки.\n" "ЕÑли полю ÑоответÑтвуют неÑколько значений, они выводÑÑ‚ÑÑ Ð² Ñтроку и " "разделÑÑŽÑ‚ÑÑ |.\n" "\n" "Поддерживаемые Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð¿Ð°Ñ€Ð¾Ð»ÐµÐ¹:\n" " key - Ð´Ð»Ñ Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ñ‹Ñ… ключей\n" " myproxy - Ð´Ð»Ñ Ð´Ð¾Ñтупа к Ñертификатам на Ñервере MyProxy\n" " myproxynew - Ð´Ð»Ñ ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñертификатов на Ñервере MyProxy\n" " all - Ñ Ð»ÑŽÐ±Ð¾Ð¹ целью.\n" "\n" "Поддерживаемые иÑточники паролей:\n" " quoted string (\"password\") - Ñвно указанный пароль\n" " int - интерактивный Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° ввод Ð¿Ð°Ñ€Ð¾Ð»Ñ Ñ Ñ‚ÐµÑ€Ð¼Ð¸Ð½Ð°Ð»Ð°\n" " stdin - чтение Ð¿Ð°Ñ€Ð¾Ð»Ñ Ñо Ñтандартного ввода по переводу Ñтроки\n" " file:filename - чтение Ð¿Ð°Ñ€Ð¾Ð»Ñ Ð¸Ð· файла filename\n" " stream:# - чтение Ð¿Ð°Ñ€Ð¾Ð»Ñ Ð¸Ð· входного потока номер #.\n" " Ðа текущий момент поддерживаетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ 0 (Ñтандартный ввод).\n" #: src/clients/credentials/arcproxy.cpp:308 msgid "path to the proxy file" msgstr "путь к файлу доверенноÑти" #: src/clients/credentials/arcproxy.cpp:309 #: src/clients/credentials/arcproxy.cpp:313 #: src/clients/credentials/arcproxy.cpp:317 #: src/clients/credentials/arcproxy.cpp:321 #: src/clients/credentials/arcproxy.cpp:325 #: src/clients/credentials/arcproxy.cpp:329 src/clients/data/arccp.cpp:584 msgid "path" msgstr "путь" #: src/clients/credentials/arcproxy.cpp:312 msgid "" "path to the certificate file, it can be either PEM, DER, or PKCS12 formatted" msgstr "" "путь к файлу Ñертификата, который может быть в формате PEM, DER, или PKCS12" #: src/clients/credentials/arcproxy.cpp:316 msgid "" "path to the private key file, if the certificate is in PKCS12 format, then " "no need to give private key" msgstr "" "путь к закрытому ключу; еÑли Ñертификат указан в формате PKCS12, закрытый " "ключ не нужен" #: src/clients/credentials/arcproxy.cpp:320 msgid "" "path to the trusted certificate directory, only needed for the VOMS client " "functionality" msgstr "" "путь к каталогу Ñ Ð´Ð¾Ð²ÐµÑ€Ñемыми Ñертификатами, иÑпользуетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ клиентом " "VOMS" #: src/clients/credentials/arcproxy.cpp:324 msgid "" "path to the top directory of VOMS *.lsc files, only needed for the VOMS " "client functionality" msgstr "" "путь к корневому каталогу Ñ Ñ„Ð°Ð¹Ð»Ð°Ð¼Ð¸ VOMS *.lsc, иÑпользуетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ клиентом " "VOMS" #: src/clients/credentials/arcproxy.cpp:328 msgid "path to the VOMS server configuration file" msgstr "путь к файлу наÑтроек Ñерверов VOMS" #: src/clients/credentials/arcproxy.cpp:332 msgid "" "voms<:command>. Specify VOMS server (More than one VOMS server \n" " can be specified like this: --voms VOa:command1 --voms VOb:" "command2). \n" " :command is optional, and is used to ask for specific " "attributes(e.g: roles)\n" " command options are:\n" " all --- put all of this DN's attributes into AC; \n" " list ---list all of the DN's attribute, will not create AC " "extension; \n" " /Role=yourRole --- specify the role, if this DN \n" " has such a role, the role will be put into " "AC; \n" " /voname/groupname/Role=yourRole --- specify the VO, group and " "role; if this DN \n" " has such a role, the role will be put into " "AC. \n" " If this option is not specified values from configuration " "files are used.\n" " To avoid anything to be used specify -S with empty value.\n" msgstr "" "voms<:инÑтрукциÑ>. ОпиÑание Ñервера VOMS (неÑколько Ñерверов задаютÑÑ\n" " Ñледующим образом: --voms VOa:инÑтрукциÑ1 --voms VOb:" "инÑтрукциÑ2).\n" " <:инÑтрукциÑ> не обÑзательна и Ñлужит Ð´Ð»Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа " "дополнительных\n" " атрибутов (например, ролей)\n" " ИнÑтрукции:\n" " all --- добавить вÑе атрибуты, доÑтупные данному " "пользователю;\n" " list --- перечиÑлить вÑе атрибуты, доÑтупные данному " "пользователю,\n" " без ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ€Ð°ÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ AC; \n" " /Role=вашаРоль --- указать желаемую роль; еÑли данный " "пользователь\n" " может играть такую роль, она будет " "добавлена в AC;\n" " /voname/groupname/Role=вашаРоль --- указать ВО, группу и роль; " "еÑли\n" " данный пользователь может играть такую " "роль, она\n" " будет добавлена.\n" " ЕÑли Ñта Ð¾Ð¿Ñ†Ð¸Ñ Ð½Ðµ задана, будут иÑпользоватьÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð¸Ð· " "файлов наÑтроек.\n" " Ð”Ð»Ñ Ð¿Ñ€ÐµÐ´Ð¾Ñ‚Ð²Ñ€Ð°Ñ‰ÐµÐ½Ð¸Ñ Ð¸ÑÐ¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ‡ÐµÐ³Ð¾-либо, укажите -S без " "значениÑ.\n" #: src/clients/credentials/arcproxy.cpp:348 msgid "" "group<:role>. Specify ordering of attributes \n" " Example: --order /knowarc.eu/coredev:Developer,/knowarc.eu/" "testers:Tester \n" " or: --order /knowarc.eu/coredev:Developer --order /knowarc.eu/" "testers:Tester \n" " Note that it does not make sense to specify the order if you have two or " "more different VOMS servers specified" msgstr "" "group<:role>. Ð£ÐºÐ°Ð·Ð°Ð½Ð½Ð°Ñ Ð¿Ð¾ÑледовательноÑть атрибутов \n" " Пример: --order /knowarc.eu/coredev:Developer,/knowarc.eu/" "testers:Tester \n" " или: --order /knowarc.eu/coredev:Developer --order /knowarc.eu/" "testers:Tester \n" " Имейте в виду, что при иÑпользовании неÑкольких Ñерверов VOMS не имеет " "ÑмыÑла указывать поÑледовательноÑть атрибутов" #: src/clients/credentials/arcproxy.cpp:355 msgid "use GSI communication protocol for contacting VOMS services" msgstr "иÑпользовать протокол GSI Ð´Ð»Ñ ÐºÐ¾Ð½Ñ‚Ð°ÐºÑ‚Ð° Ñлужб VOMS" #: src/clients/credentials/arcproxy.cpp:358 msgid "" "use HTTP communication protocol for contacting VOMS services that provide " "RESTful access \n" " Note for RESTful access, 'list' command and multiple VOMS " "server are not supported\n" msgstr "" "иÑпользовать протокол HTTP Ð´Ð»Ñ ÑвÑзи Ñо Ñлужбами VOMS, поддерживающими " "доÑтуп типа REST \n" " Внимание: Ð´Ð»Ñ Ð´Ð¾Ñтупа REST, команда 'list' и множеÑтвенный " "Ñервер VOMS не поддерживаютÑÑ\n" #: src/clients/credentials/arcproxy.cpp:362 msgid "" "use old communication protocol for contacting VOMS services instead of " "RESTful access\n" msgstr "" "иÑпользовать уÑтаревший протокол ÑвÑзи Ñо Ñлужбами VOMS вмеÑто доÑтупа по " "протоколу REST\n" #: src/clients/credentials/arcproxy.cpp:365 msgid "" "this option is not functional (old GSI proxies are not supported anymore)" msgstr "Ð¾Ð¿Ñ†Ð¸Ñ Ð½ÐµÐ´Ð¾Ñтупна (Ñтарые доверенноÑти GSI более не поддерживаютÑÑ)" #: src/clients/credentials/arcproxy.cpp:368 msgid "print all information about this proxy." msgstr "вывеÑти вÑÑŽ информацию об Ñтой доверенноÑти." #: src/clients/credentials/arcproxy.cpp:371 msgid "print selected information about this proxy." msgstr "вывеÑти избранную информацию об Ñтой доверенноÑти." #: src/clients/credentials/arcproxy.cpp:374 msgid "remove proxy" msgstr "удаление доверенноÑти" #: src/clients/credentials/arcproxy.cpp:377 msgid "" "username to MyProxy server (if missing subject of user certificate is used)" msgstr "" "Ð¸Ð¼Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ñервера MyProxy (при отÑутÑтвии имени Ñубъекта, или при " "применении Ñертификата пользователÑ)" #: src/clients/credentials/arcproxy.cpp:382 msgid "" "don't prompt for a credential passphrase, when retrieve a \n" " credential from on MyProxy server. \n" " The precondition of this choice is the credential is PUT onto\n" " the MyProxy server without a passphrase by using -R (--" "retrievable_by_cert) \n" " option when being PUTing onto Myproxy server. \n" " This option is specific for the GET command when contacting " "Myproxy server." msgstr "" "не запрашивать пароль учётных данных при получении Ñтих \n" " данных Ñ Ñервера MyProxy. \n" " Это возможно при уÑловии, еÑли данные были Ñохранены методом " "PUT\n" " на Ñервере MyProxy без паролÑ, иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ Ð¾Ð¿Ñ†Ð¸ÑŽ -R (--" "retrievable_by_cert) \n" " при выполнении операции PUT в отношении Ñервера Myproxy. \n" " Эта Ð¾Ð¿Ñ†Ð¸Ñ Ð¸ÑпользуетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ командой GET в отношении Ñервера " "Myproxy." #: src/clients/credentials/arcproxy.cpp:393 msgid "" "Allow specified entity to retrieve credential without passphrase.\n" " This option is specific for the PUT command when contacting " "Myproxy server." msgstr "" "Разрешить указанному клиенту получать учётные данные без паролÑ.\n" " Эта Ð¾Ð¿Ñ†Ð¸Ñ Ð¸ÑпользуетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ командой PUT в отношении Ñервера " "Myproxy." #: src/clients/credentials/arcproxy.cpp:399 msgid "hostname[:port] of MyProxy server" msgstr "hostname[:port] Ñервера MyProxy" #: src/clients/credentials/arcproxy.cpp:404 msgid "" "command to MyProxy server. The command can be PUT, GET, INFO, NEWPASS or " "DESTROY.\n" " PUT -- put a delegated credentials to the MyProxy server; \n" " GET -- get a delegated credentials from the MyProxy server; \n" " INFO -- get and present information about credentials stored " "at the MyProxy server; \n" " NEWPASS -- change password protecting credentials stored at " "the MyProxy server; \n" " DESTROY -- wipe off credentials stored at the MyProxy " "server; \n" " Local credentials (certificate and key) are not necessary " "except in case of PUT. \n" " MyProxy functionality can be used together with VOMS " "functionality.\n" " --voms and --vomses can be used for Get command if VOMS " "attributes\n" " is required to be included in the proxy.\n" msgstr "" "инÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ Ñерверу MyProxy. Возможны Ñледующие инÑтрукции: PUT, GET, INFO, " "NEWPASS или DESTROY.\n" " PUT -- Ñохранить делегированный Ñертификат на Ñервере " "MyProxy;\n" " GET -- получить делегированный Ñертификат Ñ Ñервера MyProxy,\n" " INFO -- вывеÑти информацию о Ñертификатах, хранÑщихÑÑ Ð½Ð° " "Ñервере MyProxy; \n" " NEWPASS -- изменить пароль, защищающий Ñертификаты, хранÑщиеÑÑ " "на Ñервере MyProxy; \n" " DESTROY -- удалить Ñертификаты, хранÑщиеÑÑ Ð½Ð° Ñервере " "MyProxy; \n" " Личные Ñертификаты и ключи не требуютÑÑ, за иÑключением " "инÑтрукции PUT.\n" " ИнÑтрукции MyProxy и VOMS могут иÑпользоватьÑÑ Ð¾Ð´Ð½Ð¾Ð²Ñ€ÐµÐ¼ÐµÐ½Ð½Ð¾.\n" " Опции --voms and --vomses могут быть иÑпользованы Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ð¾Ð¹ " "Get, еÑли\n" " в доверенноÑть необходимо включить аÑ‚рибуты VOMS.\n" #: src/clients/credentials/arcproxy.cpp:419 msgid "" "use NSS credential database in default Mozilla profiles, \n" " including Firefox, Seamonkey and Thunderbird.\n" msgstr "" "иÑпользовать базу данных параметров доÑтупа NSS из профилей Mozilla \n" " по умолчанию, Ð²ÐºÐ»ÑŽÑ‡Ð°Ñ Firefox, Seamonkey и Thunderbird.\n" #: src/clients/credentials/arcproxy.cpp:424 msgid "proxy constraints" msgstr "Ð¾Ð³Ñ€Ð°Ð½Ð¸Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти" #: src/clients/credentials/arcproxy.cpp:428 msgid "password destination=password source" msgstr "назначение паролÑ=иÑточник паролÑ" #: src/clients/credentials/arcproxy.cpp:452 msgid "" "RESTful and old VOMS communication protocols can't be requested " "simultaneously." msgstr "Протоколы REST и уÑтаревший VOMS не могут быть запрошены одновременно." #: src/clients/credentials/arcproxy.cpp:482 #: src/clients/credentials/arcproxy.cpp:1187 msgid "Failed configuration initialization." msgstr "Ðе удалоÑÑŒ загрузить наÑтройки." #: src/clients/credentials/arcproxy.cpp:511 msgid "" "Failed to find certificate and/or private key or files have improper " "permissions or ownership." msgstr "" "Ðе удалоÑÑŒ обнаружить Ñертификат и/или закрытый ключ, либо у файлов " "неподходÑщие параметры доÑтупа." #: src/clients/credentials/arcproxy.cpp:512 #: src/clients/credentials/arcproxy.cpp:524 msgid "You may try to increase verbosity to get more information." msgstr "" "Ð’Ñ‹ можете попытатьÑÑ ÑƒÐ²ÐµÐ»Ð¸Ñ‡Ð¸Ñ‚ÑŒ уровень детальноÑти Ð´Ð»Ñ Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ " "дополнительной информации." #: src/clients/credentials/arcproxy.cpp:520 msgid "Failed to find CA certificates" msgstr "Ðевозможно найти Ñертификаты CA" #: src/clients/credentials/arcproxy.cpp:521 msgid "" "Cannot find the CA certificates directory path, please set environment " "variable X509_CERT_DIR, or cacertificatesdirectory in a configuration file." msgstr "" "Ðе удалоÑÑŒ найти каталог Ñ Ñертификатами агентÑтв CA. ПожалуйÑта, задайте " "переменную Ñреды X509_CERT_DIR, или значение cacertificatesdirectory в файле " "наÑтроек." #: src/clients/credentials/arcproxy.cpp:525 msgid "" "The CA certificates directory is required for contacting VOMS and MyProxy " "servers." msgstr "" "Каталог Ñертификатов агентÑтв CA необходим Ð´Ð»Ñ ÑвÑзи Ñ Ñерверами VOMS и " "MyProxy." #: src/clients/credentials/arcproxy.cpp:537 msgid "" "$X509_VOMS_FILE, and $X509_VOMSES are not set;\n" "User has not specified the location for vomses information;\n" "There is also not vomses location information in user's configuration file;\n" "Can not find vomses in default locations: ~/.arc/vomses, ~/.voms/vomses,\n" "$ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/grid-security/vomses, $PWD/" "vomses,\n" "/etc/vomses, /etc/grid-security/vomses, and the location at the " "corresponding sub-directory" msgstr "" "$X509_VOMS_FILE и $X509_VOMSES не наÑтроены;\n" "Пользователь не указал раÑположение файла vomses;\n" "РаÑположение файла vomses не найдено в файле наÑтроек пользователÑ;\n" "Файл vomses не обнаружен в ~/.arc/vomses, ~/.voms/vomses,\n" "$ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/grid-security/vomses, $PWD/" "vomses,\n" "/etc/vomses, /etc/grid-security/vomses, а также в ÑоответÑтвующих " "подкаталогах" #: src/clients/credentials/arcproxy.cpp:582 msgid "Wrong number of arguments!" msgstr "ÐедопуÑтимое чиÑло аргументов!" #: src/clients/credentials/arcproxy.cpp:590 #: src/clients/credentials/arcproxy.cpp:614 #: src/clients/credentials/arcproxy.cpp:747 msgid "" "Cannot find the path of the proxy file, please setup environment " "X509_USER_PROXY, or proxypath in a configuration file" msgstr "" "Ðе удалоÑÑŒ найти доверенноÑть пользователÑ. ПожалуйÑта, задайте переменную " "Ñреды X509_USER_PROXY, или значение proxypath в файле наÑтроек" #: src/clients/credentials/arcproxy.cpp:597 #, c-format msgid "Cannot remove proxy file at %s" msgstr "Ðевозможно удалить файл доверенноÑти в %s" #: src/clients/credentials/arcproxy.cpp:599 #, c-format msgid "Cannot remove proxy file at %s, because it's not there" msgstr "Ðевозможно удалить файл доверенноÑти в %s, потому что его там нет" #: src/clients/credentials/arcproxy.cpp:608 msgid "Bearer token is available. It is preferred for job submission." msgstr "ПриÑутÑтвует маркер доÑтупа. Предпочтителен Ð´Ð»Ñ Ð·Ð°Ñылки задач." #: src/clients/credentials/arcproxy.cpp:620 #: src/clients/credentials/arcproxy.cpp:753 #, c-format msgid "" "Cannot find file at %s for getting the proxy. Please make sure this file " "exists." msgstr "" "Ðе удалоÑÑŒ найти файл по адреÑу %s, Ñодержащий доверенноÑть. ПожалуйÑта, " "убедитеÑÑŒ, что файл ÑущеÑтвует." #: src/clients/credentials/arcproxy.cpp:626 #: src/clients/credentials/arcproxy.cpp:759 #, c-format msgid "Cannot process proxy file at %s." msgstr "Ðевозможно обработать файл доверенноÑти в %s." #: src/clients/credentials/arcproxy.cpp:629 #, c-format msgid "Subject: %s" msgstr "Субъект: %s" #: src/clients/credentials/arcproxy.cpp:630 #, c-format msgid "Issuer: %s" msgstr "Кем выдана: %s" #: src/clients/credentials/arcproxy.cpp:631 #, c-format msgid "Identity: %s" msgstr "Личные данные: %s" #: src/clients/credentials/arcproxy.cpp:633 msgid "Time left for proxy: Proxy expired" msgstr "ДоверенноÑть дейÑтвительна на: Срок дейÑÑ‚Ð²Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти вышел" #: src/clients/credentials/arcproxy.cpp:635 msgid "Time left for proxy: Proxy not valid yet" msgstr "ДоверенноÑть дейÑтвительна на: ДоверенноÑть пока недейÑтвительна" #: src/clients/credentials/arcproxy.cpp:637 #, c-format msgid "Time left for proxy: %s" msgstr "ДоверенноÑть дейÑтвительна на: %s" #: src/clients/credentials/arcproxy.cpp:638 #, c-format msgid "Proxy path: %s" msgstr "РаÑположение доверенноÑти: %s" #: src/clients/credentials/arcproxy.cpp:639 #, c-format msgid "Proxy type: %s" msgstr "Тип доверенноÑти: %s" #: src/clients/credentials/arcproxy.cpp:640 #, c-format msgid "Proxy key length: %i" msgstr "Длина ключа доверенноÑти: %i" #: src/clients/credentials/arcproxy.cpp:641 #, c-format msgid "Proxy signature: %s" msgstr "ПодпиÑÑŒ доверенноÑти: %s" #: src/clients/credentials/arcproxy.cpp:650 msgid "AC extension information for VO " msgstr "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ раÑширении AC Ð´Ð»Ñ VO " #: src/clients/credentials/arcproxy.cpp:653 msgid "Error detected while parsing this AC" msgstr "Обнаружена ошибка при разборе Ñертификата атрибута" #: src/clients/credentials/arcproxy.cpp:666 msgid "AC is invalid: " msgstr "Сертификат атрибута недейÑтвителен: " #: src/clients/credentials/arcproxy.cpp:696 #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:222 #, c-format msgid "Malformed VOMS AC attribute %s" msgstr "Ðеверный атрибут VOMS AC %s" #: src/clients/credentials/arcproxy.cpp:727 msgid "Time left for AC: AC is not valid yet" msgstr "Сертификат атрибута дейÑтвителен на: Сертификат пока недейÑтвителен" #: src/clients/credentials/arcproxy.cpp:729 msgid "Time left for AC: AC has expired" msgstr "" "Сертификат атрибута дейÑтвителен на: Срок дейÑÑ‚Ð²Ð¸Ñ Ñертификата закончилÑÑ" #: src/clients/credentials/arcproxy.cpp:731 #, c-format msgid "Time left for AC: %s" msgstr "Сертификат атрибута дейÑтвителен на: %s" #: src/clients/credentials/arcproxy.cpp:838 #, c-format msgid "Information item '%s' is not known" msgstr "ÐеизвеÑтный тип информации '%s'" #: src/clients/credentials/arcproxy.cpp:850 msgid "" "Cannot find the user certificate path, please setup environment " "X509_USER_CERT, or certificatepath in a configuration file" msgstr "" "Ðе удалоÑÑŒ найти путь к открытому ключу пользователÑ. ПожалуйÑта, задайте " "переменную Ñреды X509_USER_CERT, или значение certificatepath в файле " "наÑтроек" #: src/clients/credentials/arcproxy.cpp:854 msgid "" "Cannot find the user private key path, please setup environment " "X509_USER_KEY, or keypath in a configuration file" msgstr "" "Ðе удалоÑÑŒ найти закрытый ключ пользователÑ. ПожалуйÑта, задайте переменную " "Ñреды X509_USER_KEY, или значение keypath в файле наÑтроек" #: src/clients/credentials/arcproxy.cpp:878 #, c-format msgid "" "Cannot parse password source expression %s it must be of type=source format" msgstr "" "Ðе удалоÑÑŒ разобрать выражение %s Ð´Ð»Ñ Ð¸Ñточника паролÑ: формат должен быть " "type=source" #: src/clients/credentials/arcproxy.cpp:895 #, c-format msgid "" "Cannot parse password type %s. Currently supported values are " "'key','myproxy','myproxynew' and 'all'." msgstr "" "Ðе удалоÑÑŒ разобрать тип Ð¿Ð°Ñ€Ð¾Ð»Ñ %s. Ð’ наÑтоÑщий момент поддерживаютÑÑ " "Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ 'key','myproxy','myproxynew' и 'all'." #: src/clients/credentials/arcproxy.cpp:910 #, c-format msgid "" "Cannot parse password source %s it must be of source_type or source_type:" "data format. Supported source types are int,stdin,stream,file." msgstr "" "Ðе удалоÑÑŒ разобрать иÑточник Ð¿Ð°Ñ€Ð¾Ð»Ñ %s. Формат должен быть source_type или " "source_type:data format. ПоддерживаютÑÑ Ñледующие типы иÑточников: int,stdin," "stream,file." #: src/clients/credentials/arcproxy.cpp:924 msgid "Only standard input is currently supported for password source." msgstr "" "Ðа наÑтоÑщий момент единÑтвенным поддерживаемым иÑточником Ð¿Ð°Ñ€Ð¾Ð»Ñ ÑвлÑетÑÑ " "Ñтандартный вход." #: src/clients/credentials/arcproxy.cpp:929 #, c-format msgid "" "Cannot parse password source type %s. Supported source types are int,stdin," "stream,file." msgstr "" "Ðе удалоÑÑŒ разобрать тип иÑточника Ð¿Ð°Ñ€Ð¾Ð»Ñ %s. ПоддерживаютÑÑ Ñледующие типы " "иÑточников: int,stdin,stream,file." #: src/clients/credentials/arcproxy.cpp:968 msgid "The start, end and period can't be set simultaneously" msgstr "Опции start, end и period не могут быть заданы одновременно" #: src/clients/credentials/arcproxy.cpp:974 #, c-format msgid "The start time that you set: %s can't be recognized." msgstr "Ðевозможно раÑпознать заданное Вами Ð²Ñ€ÐµÐ¼Ñ Ð½Ð°Ñ‡Ð°Ð»Ð°: %s." #: src/clients/credentials/arcproxy.cpp:981 #, c-format msgid "The period that you set: %s can't be recognized." msgstr "Ðевозможно раÑпознать заданный Вами интервал: %s." #: src/clients/credentials/arcproxy.cpp:988 #, c-format msgid "The end time that you set: %s can't be recognized." msgstr "Ðевозможно раÑпознать заданное Вами Ð²Ñ€ÐµÐ¼Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ: %s." #: src/clients/credentials/arcproxy.cpp:997 #, c-format msgid "The end time that you set: %s is before start time: %s." msgstr "Заданное Вами Ð²Ñ€ÐµÐ¼Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ: %s предшеÑтвует времени начала: %s." #: src/clients/credentials/arcproxy.cpp:1008 #, c-format msgid "WARNING: The start time that you set: %s is before current time: %s" msgstr "" "ПРЕДУПРЕЖДЕÐИЕ: Заданное Вами Ð²Ñ€ÐµÐ¼Ñ Ð½Ð°Ñ‡Ð°Ð»Ð°: %s предшеÑтвует текущему " "времени: %s" #: src/clients/credentials/arcproxy.cpp:1011 #, c-format msgid "WARNING: The end time that you set: %s is before current time: %s" msgstr "" "ПРЕДУПРЕЖДЕÐИЕ: Заданное Вами Ð²Ñ€ÐµÐ¼Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ: %s предшеÑтвует текущему " "времени: %s" #: src/clients/credentials/arcproxy.cpp:1021 #, c-format msgid "The VOMS AC period that you set: %s can't be recognized." msgstr "Ðевозможно раÑпознать заданный Вами период VOMS AC: %s." #: src/clients/credentials/arcproxy.cpp:1039 #, c-format msgid "The MyProxy period that you set: %s can't be recognized." msgstr "Ðевозможно раÑпознать заданный Вами период MyProxy: %s." #: src/clients/credentials/arcproxy.cpp:1054 #, c-format msgid "The keybits constraint is wrong: %s." msgstr "ÐедопуÑтимое значение Ð¾Ð³Ñ€Ð°Ð½Ð¸Ñ‡ÐµÐ½Ð¸Ñ keybits: %s." #: src/clients/credentials/arcproxy.cpp:1068 #: src/hed/libs/credential/ARCProxyUtil.cpp:1271 msgid "The NSS database can not be detected in the Firefox profile" msgstr "База данных NSS в профиле Firefox не обнаружена" #: src/clients/credentials/arcproxy.cpp:1077 #: src/hed/libs/credential/ARCProxyUtil.cpp:1279 #, c-format msgid "" "There are %d NSS base directories where the certificate, key, and module " "databases live" msgstr "" "Обнаружено %d оÑновных директорий NSS, Ñодержащих базы данных Ñертификатов, " "ключей и модулей" #: src/clients/credentials/arcproxy.cpp:1079 #: src/hed/libs/credential/ARCProxyUtil.cpp:1283 #, c-format msgid "Number %d is: %s" msgstr "Ðомер %d: %s" #: src/clients/credentials/arcproxy.cpp:1081 #: src/hed/libs/credential/ARCProxyUtil.cpp:1285 #, c-format msgid "Please choose the NSS database you would like to use (1-%d): " msgstr "ПожалуйÑта, выберите базу данных NSS Ð´Ð»Ñ Ð¸ÑÐ¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ð½Ð¸Ñ (1-%d): " #: src/clients/credentials/arcproxy.cpp:1097 #: src/hed/libs/credential/ARCProxyUtil.cpp:1297 #, c-format msgid "NSS database to be accessed: %s\n" msgstr "Будет иÑпользоватьÑÑ Ð±Ð°Ð·Ð° данных NSS %s\n" #: src/clients/credentials/arcproxy.cpp:1168 #: src/hed/libs/credential/ARCProxyUtil.cpp:1471 #, c-format msgid "Certificate to use is: %s" msgstr "ИÑпользуемый Ñертификат: %s" #: src/clients/credentials/arcproxy.cpp:1216 #: src/clients/credentials/arcproxy.cpp:1330 #: src/hed/libs/credential/ARCProxyUtil.cpp:1528 msgid "Proxy generation succeeded" msgstr "ДоверенноÑть уÑпешно Ñоздана" #: src/clients/credentials/arcproxy.cpp:1217 #: src/clients/credentials/arcproxy.cpp:1331 #: src/hed/libs/credential/ARCProxyUtil.cpp:1529 #, c-format msgid "Your proxy is valid until: %s" msgstr "Ваша доверенноÑть дейÑтвительна до: %s" #: src/clients/credentials/arcproxy.cpp:1236 msgid "" "The old GSI proxies are not supported anymore. Please do not use -O/--old " "option." msgstr "" "Старые доверенноÑти GSI более не поддерживаютÑÑ. ПожалуйÑта, не иÑпользуйте " "опцию -O/--old." #: src/clients/credentials/arcproxy.cpp:1255 src/hed/mcc/tls/MCCTLS.cpp:163 #: src/hed/mcc/tls/MCCTLS.cpp:196 src/hed/mcc/tls/MCCTLS.cpp:222 msgid "VOMS attribute parsing failed" msgstr "Сбой обработки атрибутов VOMS" #: src/clients/credentials/arcproxy.cpp:1257 msgid "Myproxy server did not return proxy with VOMS AC included" msgstr "Сервер Myproxy не приÑлал Ñертификат Ñ Ñ€Ð°Ñширением VOMS AC" #: src/clients/credentials/arcproxy.cpp:1278 #: src/hed/libs/credential/ARCProxyUtil.cpp:337 msgid "Proxy generation failed: No valid certificate found." msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти: Ðе обнаружено дейÑтвительных Ñертификатов." #: src/clients/credentials/arcproxy.cpp:1283 #: src/hed/libs/credential/ARCProxyUtil.cpp:343 msgid "Proxy generation failed: No valid private key found." msgstr "" "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти: Ðе обнаружено дейÑтвительных закрытых ключей." #: src/clients/credentials/arcproxy.cpp:1287 #: src/hed/libs/credential/ARCProxyUtil.cpp:169 #, c-format msgid "Your identity: %s" msgstr "Ваши личные данные: %s" #: src/clients/credentials/arcproxy.cpp:1289 #: src/hed/libs/credential/ARCProxyUtil.cpp:350 msgid "Proxy generation failed: Certificate has expired." msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти: Срок дейÑÑ‚Ð²Ð¸Ñ Ñертификата иÑтёк." #: src/clients/credentials/arcproxy.cpp:1293 #: src/hed/libs/credential/ARCProxyUtil.cpp:355 msgid "Proxy generation failed: Certificate is not valid yet." msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти: Срок дейÑÑ‚Ð²Ð¸Ñ Ñертификата ещё не началÑÑ." #: src/clients/credentials/arcproxy.cpp:1304 msgid "Proxy generation failed: Failed to create temporary file." msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти: Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð²Ñ€ÐµÐ¼ÐµÐ½Ð½Ð¾Ð³Ð¾ файла." #: src/clients/credentials/arcproxy.cpp:1312 msgid "Proxy generation failed: Failed to retrieve VOMS information." msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти: Сбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ VOMS." #: src/clients/credentials/arcproxy_myproxy.cpp:100 #: src/hed/libs/credential/ARCProxyUtil.cpp:838 msgid "Succeeded to get info from MyProxy server" msgstr "УдалоÑÑŒ получить информацию Ñ Ñервера MyProxy" #: src/clients/credentials/arcproxy_myproxy.cpp:144 #: src/hed/libs/credential/ARCProxyUtil.cpp:894 msgid "Succeeded to change password on MyProxy server" msgstr "УдалоÑÑŒ поменÑть пароль на Ñервере MyProxy" #: src/clients/credentials/arcproxy_myproxy.cpp:185 #: src/hed/libs/credential/ARCProxyUtil.cpp:943 msgid "Succeeded to destroy credential on MyProxy server" msgstr "УдалоÑÑŒ уничтожить доверенноÑть на Ñервере MyProxy" #: src/clients/credentials/arcproxy_myproxy.cpp:265 #: src/hed/libs/credential/ARCProxyUtil.cpp:1032 #, c-format msgid "Succeeded to get a proxy in %s from MyProxy server %s" msgstr "УдалоÑÑŒ получить доверенноÑть в %s Ñ Ñервера MyProxy %s" #: src/clients/credentials/arcproxy_myproxy.cpp:318 #: src/hed/libs/credential/ARCProxyUtil.cpp:1091 msgid "Succeeded to put a proxy onto MyProxy server" msgstr "УдалоÑÑŒ делегировать доверенноÑть Ñерверу MyProxy" #: src/clients/credentials/arcproxy_proxy.cpp:93 #: src/hed/libs/credential/ARCProxyUtil.cpp:397 #: src/hed/libs/credential/ARCProxyUtil.cpp:1378 msgid "Failed to add VOMS AC extension. Your proxy may be incomplete." msgstr "" "Сбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñ€Ð°ÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ VOMS AC. Ваша доверенноÑть может быть неполной." #: src/clients/credentials/arcproxy_voms.cpp:63 msgid "" "Failed to process VOMS configuration or no suitable configuration lines " "found." msgstr "" "Ðе удалоÑÑŒ обработать наÑтройки VOMS, или не найдены приемлемые Ñтроки " "конфигурации." #: src/clients/credentials/arcproxy_voms.cpp:75 #, c-format msgid "Failed to parse requested VOMS lifetime: %s" msgstr "Сбой разборки указанного времени дейÑÑ‚Ð²Ð¸Ñ VOMS: %s" #: src/clients/credentials/arcproxy_voms.cpp:93 #: src/hed/libs/credential/ARCProxyUtil.cpp:634 #, c-format msgid "Cannot get VOMS server address information from vomses line: \"%s\"" msgstr "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾Ð± адреÑе Ñервера VOMS отÑутÑтвует в Ñтроке: %s\"" #: src/clients/credentials/arcproxy_voms.cpp:97 #: src/clients/credentials/arcproxy_voms.cpp:99 #: src/hed/libs/credential/ARCProxyUtil.cpp:644 #: src/hed/libs/credential/ARCProxyUtil.cpp:646 #, c-format msgid "Contacting VOMS server (named %s): %s on port: %s" msgstr "УÑтанавливаетÑÑ ÑвÑзь Ñ Ñервером VOMS (по имени %s): %s по порту: %s" #: src/clients/credentials/arcproxy_voms.cpp:105 #, c-format msgid "Failed to parse requested VOMS server port number: %s" msgstr "Сбой разборки указанного номера порта Ñервера VOMS: %s" #: src/clients/credentials/arcproxy_voms.cpp:122 msgid "List functionality is not supported for RESTful VOMS interface" msgstr "ПеречиÑление не поддерживаетÑÑ Ð´Ð»Ñ REST-интерфейÑа VOMS" #: src/clients/credentials/arcproxy_voms.cpp:132 #: src/clients/credentials/arcproxy_voms.cpp:188 #, c-format msgid "" "The VOMS server with the information:\n" "\t%s\n" "can not be reached, please make sure it is available." msgstr "" "Ðевозможно ÑвÑзатьÑÑ Ñ Ñервером VOMS Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸ÐµÐ¹:\n" "\t%s\n" "ПожалуйÑта, проверьте, доÑтупен ли Ñтот Ñервер." #: src/clients/credentials/arcproxy_voms.cpp:133 #: src/clients/credentials/arcproxy_voms.cpp:138 #: src/clients/credentials/arcproxy_voms.cpp:189 #: src/clients/credentials/arcproxy_voms.cpp:194 #, c-format msgid "" "Collected error is:\n" "\t%s" msgstr "" "ÐŸÐ¾Ð»ÑƒÑ‡ÐµÐ½Ð½Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ°:\n" "\t%s" #: src/clients/credentials/arcproxy_voms.cpp:137 #: src/clients/credentials/arcproxy_voms.cpp:193 #, c-format msgid "No valid response from VOMS server: %s" msgstr "Ðе получено приемлемого отзыва от Ñервера VOMS: %s" #: src/clients/credentials/arcproxy_voms.cpp:155 msgid "List functionality is not supported for legacy VOMS interface" msgstr "ПеречиÑление не поддерживаетÑÑ Ð´Ð»Ñ Ñ‚Ñ€Ð°Ð´Ð¸Ñ†Ð¸Ð¾Ð½Ð½Ð¾Ð³Ð¾ интерфейÑа VOMS" #: src/clients/credentials/arcproxy_voms.cpp:167 #, c-format msgid "Failed to parse VOMS command: %s" msgstr "Ðе удалоÑÑŒ разобрать команду VOMS: %s" #: src/clients/credentials/arcproxy_voms.cpp:204 #, c-format msgid "" "There are %d servers with the same name: %s in your vomses file, but none of " "them can be reached, or can return a valid message." msgstr "" "Ð’ Вашем файле vomses указаны %d Ñерверов Ñ Ð¾Ð´Ð¸Ð½Ð°ÐºÐ¾Ð²Ñ‹Ð¼ именем %s, но ни один " "из них не доÑтупен или не отзываетÑÑ Ð¿Ñ€Ð°Ð²Ð¸Ð»ÑŒÐ½Ð¾." #: src/clients/data/arccp.cpp:77 src/clients/data/arccp.cpp:330 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:509 #, c-format msgid "Current transfer FAILED: %s" msgstr "Ð¢ÐµÐºÑƒÑ‰Ð°Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð° ÐЕ СОСТОЯЛÐСЬ: %s" #: src/clients/data/arccp.cpp:79 src/clients/data/arccp.cpp:135 #: src/clients/data/arccp.cpp:332 src/clients/data/arcls.cpp:225 #: src/clients/data/arcmkdir.cpp:73 src/clients/data/arcrename.cpp:89 #: src/clients/data/arcrm.cpp:95 msgid "This seems like a temporary error, please try again later" msgstr "Похоже на временный Ñбой - пожалуйÑта, попытайтеÑÑŒ Ñнова попозже" #: src/clients/data/arccp.cpp:87 src/clients/data/arccp.cpp:96 #, c-format msgid "Unable to copy %s" msgstr "Ðе удалоÑÑŒ Ñкопировать %s" #: src/clients/data/arccp.cpp:88 src/clients/data/arccp.cpp:97 #: src/clients/data/arcls.cpp:150 src/clients/data/arcls.cpp:159 #: src/clients/data/arcmkdir.cpp:55 src/clients/data/arcmkdir.cpp:64 #: src/clients/data/arcrename.cpp:67 src/clients/data/arcrename.cpp:76 #: src/clients/data/arcrm.cpp:68 src/clients/data/arcrm.cpp:80 msgid "Invalid credentials, please check proxy and/or CA certificates" msgstr "" "ÐедейÑтвительные реквизиты доÑтупа, пожалуйÑта, проверьте Ñертификат " "доверенноÑти и/или реквизиты органа Ñертификации" #: src/clients/data/arccp.cpp:94 src/clients/data/arcls.cpp:156 #: src/clients/data/arcmkdir.cpp:61 src/clients/data/arcrename.cpp:73 #: src/clients/data/arcrm.cpp:77 msgid "Proxy expired" msgstr "Срок дейÑÑ‚Ð²Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти вышел" #: src/clients/data/arccp.cpp:112 src/clients/data/arccp.cpp:116 #: src/clients/data/arccp.cpp:149 src/clients/data/arccp.cpp:153 #: src/clients/data/arccp.cpp:358 src/clients/data/arccp.cpp:363 #: src/clients/data/arcls.cpp:123 src/clients/data/arcmkdir.cpp:28 #: src/clients/data/arcrename.cpp:29 src/clients/data/arcrename.cpp:33 #: src/clients/data/arcrm.cpp:36 #, c-format msgid "Invalid URL: %s" msgstr "Ðеверный URL: %s" #: src/clients/data/arccp.cpp:128 msgid "Third party transfer is not supported for these endpoints" msgstr "Ð”Ð»Ñ Ñтих точек входа ÑтороннÑÑ Ð¿ÐµÑ€ÐµÑылка не поддерживаетÑÑ" #: src/clients/data/arccp.cpp:130 msgid "" "Protocol(s) not supported - please check that the relevant gfal2\n" " plugins are installed (gfal2-plugin-* packages)" msgstr "" "Протокол не поддерживаетÑÑ - пожалуйÑта, убедитеÑÑŒ что\n" " уÑтановлены необходимые подключаемые модули gfal2 (пакеты gfal2-" "plugin-*)" #: src/clients/data/arccp.cpp:133 #, c-format msgid "Transfer FAILED: %s" msgstr "Передача ÐЕ УДÐЛÐСЬ: %s" #: src/clients/data/arccp.cpp:161 src/clients/data/arccp.cpp:187 #: src/clients/data/arccp.cpp:374 src/clients/data/arccp.cpp:402 #, c-format msgid "Can't read list of sources from file %s" msgstr "Ðевозможно прочеÑть ÑпиÑок иÑточников из файла %s" #: src/clients/data/arccp.cpp:166 src/clients/data/arccp.cpp:202 #: src/clients/data/arccp.cpp:379 src/clients/data/arccp.cpp:418 #, c-format msgid "Can't read list of destinations from file %s" msgstr "Ðевозможно прочеÑтьÑпиÑок назначений из файла %s" #: src/clients/data/arccp.cpp:171 src/clients/data/arccp.cpp:385 msgid "Numbers of sources and destinations do not match" msgstr "ЧиÑло иÑточников и чиÑло назначений не ÑоответÑтвуют друг другу" #: src/clients/data/arccp.cpp:216 msgid "Fileset registration is not supported yet" msgstr "РегиÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ Ð½Ð°Ð±Ð¾Ñ€Ð¾Ð² файлов пока не поддерживаетÑÑ" #: src/clients/data/arccp.cpp:222 src/clients/data/arccp.cpp:295 #: src/clients/data/arccp.cpp:456 #, c-format msgid "Unsupported source url: %s" msgstr "Ðеподдерживаемый URL иÑточника: %s" #: src/clients/data/arccp.cpp:226 src/clients/data/arccp.cpp:299 #, c-format msgid "Unsupported destination url: %s" msgstr "Ðеподдерживаемый URL назначениÑ: %s" #: src/clients/data/arccp.cpp:233 msgid "" "For registration source must be ordinary URL and destination must be " "indexing service" msgstr "" "Ð”Ð»Ñ Ñ€ÐµÐ³Ð¸Ñтрации, иÑточник должен быть задан обычным URL, а назначением " "должен быть каталог реÑурÑов" #: src/clients/data/arccp.cpp:243 #, c-format msgid "Could not obtain information about source: %s" msgstr "Ðе удалоÑÑŒ получить информацию об иÑточнике: %s" #: src/clients/data/arccp.cpp:250 msgid "" "Metadata of source does not match existing destination. Use the --force " "option to override this." msgstr "" "Метаданные иÑточника и цели не Ñовпадают. ИÑпользуйте опцию --force Ð´Ð»Ñ " "принудительного копированиÑ." #: src/clients/data/arccp.cpp:262 msgid "Failed to accept new file/destination" msgstr "Сбой при приёме нового файла/направлениÑ" #: src/clients/data/arccp.cpp:268 src/clients/data/arccp.cpp:274 #, c-format msgid "Failed to register new file/destination: %s" msgstr "Сбой при региÑтрации нового файла/цели: %s" #: src/clients/data/arccp.cpp:436 msgid "Fileset copy to single object is not supported yet" msgstr "Копирование набора файлов в отдельный объект пока не поддерживаетÑÑ" #: src/clients/data/arccp.cpp:446 msgid "Can't extract object's name from source url" msgstr "Ðевозможно извлечь Ð¸Ð¼Ñ Ð¾Ð±ÑŠÐµÐºÑ‚Ð° из URL иÑточника" #: src/clients/data/arccp.cpp:465 #, c-format msgid "%s. Cannot copy fileset" msgstr "%s. Ðевозможно Ñкопировать набор файлов" #: src/clients/data/arccp.cpp:475 src/hed/libs/compute/ExecutionTarget.cpp:256 #: src/hed/libs/compute/ExecutionTarget.cpp:328 #, c-format msgid "Name: %s" msgstr "ИмÑ: %s" #: src/clients/data/arccp.cpp:478 #, c-format msgid "Source: %s" msgstr "ИÑточник: %s" #: src/clients/data/arccp.cpp:479 #, c-format msgid "Destination: %s" msgstr "Ðазначение: %s" #: src/clients/data/arccp.cpp:485 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:516 msgid "Current transfer complete" msgstr "Ð¢ÐµÐºÑƒÑ‰Ð°Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð° завершена" #: src/clients/data/arccp.cpp:488 msgid "Some transfers failed" msgstr "Ðекоторые загрузки не удалиÑÑŒ" #: src/clients/data/arccp.cpp:498 #, c-format msgid "Directory: %s" msgstr "Каталог: %s" #: src/clients/data/arccp.cpp:518 msgid "Transfer complete" msgstr "Передача данных завершена" #: src/clients/data/arccp.cpp:537 msgid "source destination" msgstr "иÑточник назначение" #: src/clients/data/arccp.cpp:538 msgid "" "The arccp command copies files to, from and between grid storage elements." msgstr "" "Команда arccp копирует файлы на, Ñ Ð¸ между запоминающими уÑтройÑтвами Грид." #: src/clients/data/arccp.cpp:543 msgid "" "use passive transfer (off by default if secure is on, on by default if " "secure is not requested)" msgstr "" "иÑпользовать паÑÑивную передачу данных (по умолчанию, Ð¾Ð¿Ñ†Ð¸Ñ Ð¾Ñ‚ÐºÐ»ÑŽÑ‡ÐµÐ½Ð° при " "защищённой передаче, и включена при незащищённой)" #: src/clients/data/arccp.cpp:549 msgid "do not try to force passive transfer" msgstr "не пытатьÑÑ Ñ„Ð¾Ñ€Ñировать паÑÑивный ÑпоÑоб передачи данных" #: src/clients/data/arccp.cpp:554 msgid "" "if the destination is an indexing service and not the same as the source and " "the destination is already registered, then the copy is normally not done. " "However, if this option is specified the source is assumed to be a replica " "of the destination created in an uncontrolled way and the copy is done like " "in case of replication. Using this option also skips validation of completed " "transfers." msgstr "" "еÑли назначением задан индекÑирующий ÑервиÑ, отличный от иÑточника, и Ñто " "назначение уже зарегиÑтрировано, копирование обычно не допуÑкаетÑÑ. Ð’ Ñлучае " "же, когда указана Ñта опциÑ, иÑточник раÑÑматриваетÑÑ ÐºÐ°Ðº Ð½ÐµÐ¾Ñ„Ð¸Ñ†Ð¸Ð°Ð»ÑŒÐ½Ð°Ñ " "ÐºÐ¾Ð¿Ð¸Ñ Ð·Ð°Ñ€ÐµÐ³Ð¸Ñтрированного файла, и копирование производитÑÑ ÐºÐ°Ðº в Ñлучае " "тиражированиÑ. При иÑпользовании Ñтой опции пропуÑкаетÑÑ Ñверка завершённых " "передач." #: src/clients/data/arccp.cpp:567 msgid "show progress indicator" msgstr "показать индикатор выполнениÑ" #: src/clients/data/arccp.cpp:572 msgid "" "do not transfer, but register source into destination. destination must be a " "meta-url." msgstr "" "зарегиÑтрировать файл, не Ð¿ÐµÑ€ÐµÐ´Ð°Ð²Ð°Ñ ÐµÐ³Ð¾ - назначением должен быть мета-URL." #: src/clients/data/arccp.cpp:578 msgid "use secure transfer (insecure by default)" msgstr "" "иÑпользовать защищённую передачу данных (передача не защищена по умолчанию)" #: src/clients/data/arccp.cpp:583 msgid "path to local cache (use to put file into cache)" msgstr "путь к локальному кÑшу (иÑпользуетÑÑ Ð´Ð»Ñ Ð·Ð°Ð¿Ð¸Ñи файла в кÑш)" #: src/clients/data/arccp.cpp:588 src/clients/data/arcls.cpp:301 msgid "operate recursively" msgstr "обработать рекурÑивно" #: src/clients/data/arccp.cpp:593 src/clients/data/arcls.cpp:306 msgid "operate recursively up to specified level" msgstr "рекурÑивное иÑполнение до указанного уровнÑ" #: src/clients/data/arccp.cpp:594 src/clients/data/arcls.cpp:307 msgid "level" msgstr "уровень" #: src/clients/data/arccp.cpp:598 msgid "number of retries before failing file transfer" msgstr "количеÑтво попыток передачи файла" #: src/clients/data/arccp.cpp:599 msgid "number" msgstr "чиÑло" #: src/clients/data/arccp.cpp:603 msgid "" "physical location to write to when destination is an indexing service. Must " "be specified for indexing services which do not automatically generate " "physical locations. Can be specified multiple times - locations will be " "tried in order until one succeeds." msgstr "" "физичеÑкий Ð°Ð´Ñ€ÐµÑ Ð´Ð»Ñ Ð·Ð°Ð¿Ð¸Ñи, еÑли в качеÑтве Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ ÑƒÐºÐ°Ð·Ð°Ð½ каталог " "реÑурÑов. Должен быть указан Ð´Ð»Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð¾Ð², не генерирующих физичеÑкие " "адреÑа автоматичеÑки. ÐеÑколько значений может быть указано - адреÑа будут " "перебиратьÑÑ, пока не будет доÑтигнут уÑпех." #: src/clients/data/arccp.cpp:611 msgid "" "perform third party transfer, where the destination pulls from the source " "(only available with GFAL plugin)" msgstr "" "выполнить Ñтороннюю переÑылку, когда назначение закачивает файл из иÑточника " "(доÑтупно только Ñ Ð¼Ð¾Ð´ÑƒÐ»ÐµÐ¼ GFAL)" #: src/clients/data/arccp.cpp:617 src/clients/data/arcls.cpp:323 #: src/clients/data/arcmkdir.cpp:101 src/clients/data/arcrename.cpp:112 #: src/clients/data/arcrm.cpp:127 msgid "list the available plugins (protocols supported)" msgstr "показать ÑпиÑок доÑтупных модулей (поддерживаемые протоколы)" #: src/clients/data/arccp.cpp:656 src/clients/data/arcls.cpp:363 #: src/clients/data/arcmkdir.cpp:141 src/clients/data/arcrename.cpp:152 #: src/clients/data/arcrm.cpp:168 msgid "Protocol plugins available:" msgstr "ДоÑтупны модули Ð´Ð»Ñ Ñледующих протоколов:" #: src/clients/data/arccp.cpp:681 src/clients/data/arcls.cpp:388 #: src/clients/data/arcmkdir.cpp:165 src/clients/data/arcrename.cpp:175 #: src/clients/data/arcrm.cpp:193 msgid "Wrong number of parameters specified" msgstr "Задано неверное количеÑтво параметров" #: src/clients/data/arccp.cpp:686 msgid "Options 'p' and 'n' can't be used simultaneously" msgstr "Опции 'p' и 'n' не могут быть иÑпользованы одновременно" #: src/clients/data/arcls.cpp:129 src/clients/data/arcmkdir.cpp:34 #: src/clients/data/arcrm.cpp:43 #, c-format msgid "Can't read list of locations from file %s" msgstr "Ðевозможно прочеÑть ÑпиÑок адреÑов из файла %s" #: src/clients/data/arcls.cpp:144 src/clients/data/arcmkdir.cpp:49 #: src/clients/data/arcrename.cpp:61 msgid "Unsupported URL given" msgstr "Заданный URL не поддерживаетÑÑ" #: src/clients/data/arcls.cpp:149 src/clients/data/arcls.cpp:158 #, c-format msgid "Unable to list content of %s" msgstr "Ðе удалоÑÑŒ проÑмотреть Ñодержимое %s" #: src/clients/data/arcls.cpp:228 msgid "Warning: Failed listing files but some information is obtained" msgstr "" "Предупреждение: Ðе удалоÑÑŒ вывеÑти ÑпиÑок файлов, но Ð½ÐµÐºÐ¾Ñ‚Ð¾Ñ€Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ " "была получена" #: src/clients/data/arcls.cpp:282 src/clients/data/arcmkdir.cpp:90 msgid "url" msgstr "URL" #: src/clients/data/arcls.cpp:283 msgid "" "The arcls command is used for listing files in grid storage elements and " "file\n" "index catalogues." msgstr "" "Команда arcls иÑпользуетÑÑ Ð´Ð»Ñ Ð¿Ñ€Ð¾Ñмотра информации о файлах,\n" "хранÑщихÑÑ Ð½Ð° накопительных уÑтройÑтвах Грид, а также в занеÑённых\n" "в каталоги данных." #: src/clients/data/arcls.cpp:292 msgid "show URLs of file locations" msgstr "вывеÑти адреÑа физичеÑких файлов" #: src/clients/data/arcls.cpp:296 msgid "display all available metadata" msgstr "показать вÑе доÑтупные метаданные" #: src/clients/data/arcls.cpp:310 msgid "" "show only description of requested object, do not list content of directories" msgstr "" "показывать только опиÑание запрашиваемого объекта, не выводить Ñодержимое " "каталогов" #: src/clients/data/arcls.cpp:314 msgid "treat requested object as directory and always try to list content" msgstr "" "интерпретировать запрошенный объект как каталог, и вÑегда пытатьÑÑ Ð²Ñ‹Ð²ÐµÑти " "его Ñодержимое" #: src/clients/data/arcls.cpp:318 msgid "check readability of object, does not show any information about object" msgstr "проверить читаемоÑть объекта, не показывать информацию об объекте" #: src/clients/data/arcls.cpp:393 msgid "Incompatible options --nolist and --forcelist requested" msgstr "Запрошены неÑовмеÑтимые опции --nolist и --forcelist" #: src/clients/data/arcls.cpp:398 msgid "Requesting recursion and --nolist has no sense" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ñ€ÐµÐºÑƒÑ€Ñивного проÑмотра и --nolist не имеет ÑмыÑла" #: src/clients/data/arcmkdir.cpp:54 src/clients/data/arcmkdir.cpp:63 #, c-format msgid "Unable to create directory %s" msgstr "Ðе удалоÑÑŒ Ñоздать каталог %s" #: src/clients/data/arcmkdir.cpp:91 msgid "" "The arcmkdir command creates directories on grid storage elements and " "catalogs." msgstr "" "Команда arcmkdir Ñоздаёт директории на грид-хранилищах и в каталогах данных." #: src/clients/data/arcmkdir.cpp:96 msgid "make parent directories as needed" msgstr "Ñоздавать родительÑкие директории по мере необходимоÑти" #: src/clients/data/arcrename.cpp:41 msgid "Both URLs must have the same protocol, host and port" msgstr "Оба URL должны Ñодержать одинаковый протокол, Ð°Ð´Ñ€ÐµÑ Ñервера и порт" #: src/clients/data/arcrename.cpp:51 msgid "Cannot rename to or from root directory" msgstr "Ðевозможно перемеÑтить в корневую директорию или из неё" #: src/clients/data/arcrename.cpp:55 msgid "Cannot rename to the same URL" msgstr "Ðевозможно переименовать в идентичный URL" #: src/clients/data/arcrename.cpp:66 src/clients/data/arcrename.cpp:75 #, c-format msgid "Unable to rename %s" msgstr "Ðе удалоÑÑŒ переименовать %s" #: src/clients/data/arcrename.cpp:106 msgid "old_url new_url" msgstr "old_url new_url" #: src/clients/data/arcrename.cpp:107 msgid "The arcrename command renames files on grid storage elements." msgstr "Команда arcrename переименовывает файлы на запоминающих уÑтройÑтвах." #: src/clients/data/arcrm.cpp:58 #, c-format msgid "Unsupported URL given: %s" msgstr "Заданный URL не поддерживаетÑÑ: %s" #: src/clients/data/arcrm.cpp:67 src/clients/data/arcrm.cpp:79 #, c-format msgid "Unable to remove file %s" msgstr "Ðе удалоÑÑŒ удалить файл %s" #: src/clients/data/arcrm.cpp:115 msgid "url [url ...]" msgstr "url [url ...]" #: src/clients/data/arcrm.cpp:116 msgid "The arcrm command deletes files on grid storage elements." msgstr "Команда arcrm удалÑет файлы Ñ Ð·Ð°Ð¿Ð¾Ð¼Ð¸Ð½Ð°ÑŽÑ‰Ð¸Ñ… уÑтройÑтв." #: src/clients/data/arcrm.cpp:121 msgid "" "remove logical file name registration even if not all physical instances " "were removed" msgstr "" "удалить логичеÑкое Ð¸Ð¼Ñ Ñ„Ð°Ð¹Ð»Ð°, даже еÑли не вÑе физичеÑкие копии удалены" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:53 msgid "Cannot initialize ARCHERY domain name for query" msgstr "Ðе удалоÑÑŒ инициализировать доменное Ð¸Ð¼Ñ ARCHERY Ð´Ð»Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:60 msgid "Cannot create resolver from /etc/resolv.conf" msgstr "Ðе удалоÑÑŒ Ñоздать преобразователь из /etc/resolv.conf" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:68 msgid "Cannot query service endpoint TXT records from DNS" msgstr "Ðе удалоÑÑŒ запроÑить TXT-запиÑи конечных точек Ñлужбы из DNS" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:79 msgid "Cannot parse service endpoint TXT records." msgstr "Ðе удалоÑÑŒ разобрать TXT-запиÑи конечных точек Ñлужбы." #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:124 #, c-format msgid "Wrong service record field \"%s\" found in the \"%s\"" msgstr "Обнаружено недопуÑтимое поле запиÑи \"%s\" в \"%s\"" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:129 #, c-format msgid "Malformed ARCHERY record found (endpoint url is not defined): %s" msgstr "Обнаружена Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ ARCHERY (не задан URL конечной точки): %s" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:134 #, c-format msgid "Malformed ARCHERY record found (endpoint type is not defined): %s" msgstr "Обнаружена Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ ARCHERY (не задан тип конечной точки): %s" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:138 #, c-format msgid "Found service endpoint %s (type %s)" msgstr "Обнаружена ÐºÐ¾Ð½ÐµÑ‡Ð½Ð°Ñ Ñ‚Ð¾Ñ‡ÐºÐ° Ñлужбы %s (тип %s)" #: src/hed/acc/ARCHERY/ServiceEndpointRetrieverPluginARCHERY.cpp:157 #, c-format msgid "" "Status for service endpoint \"%s\" is set to inactive in ARCHERY. Skipping." msgstr "" "СоÑтоÑние точки доÑтупа Ñлужбы \"%s\" задано как неактивное в ARCHERY. " "ПропуÑкаетÑÑ." #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:229 #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:161 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:149 #, c-format msgid "Job %s has no delegation associated. Can't renew such job." msgstr "" "С задачей %s не аÑÑоциировано никакого делегированиÑ. Задача не может быть " "обновлена." #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:241 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:161 #, c-format msgid "Job %s failed to renew delegation %s." msgstr "Задача %s не Ñмогла обновить делегирование %s." #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:313 #, c-format msgid "Failed to process jobs - wrong response: %u" msgstr "Сбой обработки задач - неверный отклик: %u" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:314 #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:323 #, c-format msgid "Content: %s" msgstr "Содержимое: %s" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:317 #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:329 #, c-format msgid "Failed to process job: %s" msgstr "Сбой обработки задачи: %s" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:327 msgid "Failed to process jobs - failed to parse response" msgstr "Сбой обработки задач - Ñбой разборки отклика" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:340 #, c-format msgid "No response returned: %s" msgstr "Ðе получен отклик: %s" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:364 #, c-format msgid "Failed to process job: %s - %s %s" msgstr "Сбой разборки задачи: %s - %s %s" #: src/hed/acc/ARCREST/JobControllerPluginREST.cpp:431 #, c-format msgid "Failed retrieving job description for job: %s" msgstr "Сбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ð¾Ð¿Ð¸ÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸: %s" #: src/hed/acc/ARCREST/JobListRetrieverPluginREST.cpp:29 msgid "Collecting Job (A-REX REST jobs) information." msgstr "СобираетÑÑ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ задаче (задачи на A-REX REST)" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:50 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:84 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:124 msgid "Failed to communicate to delegation endpoint." msgstr "Сбой ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ Ñо Ñлужбой делегированиÑ." #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:55 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:89 #, c-format msgid "Unexpected response code from delegation endpoint - %u" msgstr "Ðеверный код отклика Ñлужбы Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ - %u" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:57 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:91 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:242 #: src/hed/dmc/gridftp/Lister.cpp:223 src/hed/dmc/gridftp/Lister.cpp:243 #: src/hed/dmc/gridftp/Lister.cpp:468 src/hed/dmc/gridftp/Lister.cpp:475 #: src/hed/dmc/gridftp/Lister.cpp:497 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:163 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:196 #, c-format msgid "Response: %s" msgstr "Ответ: %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:62 #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:96 msgid "Missing response from delegation endpoint." msgstr "ОтÑутÑтвует отклик Ñлужбы делегированиÑ." #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:71 #, c-format msgid "Unexpected delegation location from delegation endpoint - %s." msgstr "Ðеверное раÑположение Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¾Ñ‚ Ñлужбы Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ - %s." #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:128 #, c-format msgid "Unexpected response code from delegation endpoint: %u, %s." msgstr "Ðеверный код отклика Ñлужбы делегированиÑ: %u, %s." #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:177 msgid "Unable to submit jobs. Failed to delegate credentials." msgstr "ЗаÑылка задач не удалаÑÑŒ. Сбой Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупа." #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:192 msgid "Failed to prepare job description" msgstr "Сбой подготовки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:201 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:87 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:401 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:116 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:262 #, c-format msgid "Unable to submit job. Job description is not valid in the %s format: %s" msgstr "" "Ðевозможно заÑлать задачу. ОпиÑание задачи в формате %s недейÑтвительно: %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:208 msgid "Unable to submit job. Failed to assign delegation to job description." msgstr "" "ЗаÑылка задачи не удалаÑÑŒ. Сбой приÑÐ²Ð¾ÐµÐ½Ð¸Ñ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¾Ð¿Ð¸Ñанию задачи." #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:231 msgid "Failed to submit all jobs." msgstr "Сбой заÑылки вÑех задач." #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:241 #, c-format msgid "Failed to submit all jobs: %u %s" msgstr "Сбой заÑылки вÑех задач: %u %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:254 #, c-format msgid "Failed to submit all jobs: %s" msgstr "Сбой заÑылки вÑех задач: %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:274 #, c-format msgid "Failed to submit all jobs: %s %s" msgstr "Сбой заÑылки вÑех задач: %s %s" #: src/hed/acc/ARCREST/SubmitterPluginREST.cpp:289 msgid "Failed uploading local input files" msgstr "Сбой выгрузки локальных входных файлов" #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:27 msgid "Querying WSRF GLUE2 computing REST endpoint." msgstr "" "ОпрашиваетÑÑ Ñ‚Ð¾Ñ‡ÐºÐ° доÑтупа WSRF GLUE2 к информации о вычиÑлительном реÑурÑе " "REST." #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:60 #, c-format msgid "CONTENT %u: %s" msgstr "СОДЕРЖИМОЕ %u: %s" #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:64 msgid "Response is not XML" msgstr "Отклик не в формате XML" #: src/hed/acc/ARCREST/TargetInformationRetrieverPluginREST.cpp:69 #, c-format msgid "Parsed domains: %u" msgstr "Разобрано доменов: %u" #: src/hed/acc/Broker/DescriptorsBroker.cpp:14 msgid "Sorting according to free slots in queue" msgstr "Сортировка в ÑоответÑтвии Ñ Ð½Ð°Ð»Ð¸Ñ‡Ð¸ÐµÐ¼ Ñвободных меÑÑ‚ в очереди" #: src/hed/acc/Broker/DescriptorsBroker.cpp:15 msgid "Random sorting" msgstr "Ð¡Ð»ÑƒÑ‡Ð°Ð¹Ð½Ð°Ñ Ñортировка" #: src/hed/acc/Broker/DescriptorsBroker.cpp:16 msgid "Sorting according to specified benchmark (default \"specint2000\")" msgstr "" "Сортировка в ÑоответÑтвии Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¼ Ñталонным теÑтом (по умолчанию - " "\"specint2000\")" #: src/hed/acc/Broker/DescriptorsBroker.cpp:17 msgid "Sorting according to input data availability at target" msgstr "" "Сортировка в ÑоответÑтвии Ñ Ð´Ð¾ÑтупноÑтью входных данных в пункте назначениÑ" #: src/hed/acc/Broker/DescriptorsBroker.cpp:18 msgid "Performs neither sorting nor matching" msgstr "Ðе производитÑÑ Ð½Ð¸ Ñортировки, ни поиÑка ÑоответÑтвиÑ" #: src/hed/acc/Broker/FastestQueueBrokerPlugin.cpp:24 #, c-format msgid "" "Target %s removed by FastestQueueBroker, doesn't report number of waiting " "jobs" msgstr "" "Ðазначение %s отброшено алгоритмом FastestQueueBroker, Ñ‚.к. не Ñообщает " "чиÑло ожидающих задач" #: src/hed/acc/Broker/FastestQueueBrokerPlugin.cpp:27 #, c-format msgid "" "Target %s removed by FastestQueueBroker, doesn't report number of total slots" msgstr "" "Ðазначение %s отброшено алгоритмом FastestQueueBroker, Ñ‚.к. не Ñообщает " "общее чиÑло Ñчеек" #: src/hed/acc/Broker/FastestQueueBrokerPlugin.cpp:30 #, c-format msgid "" "Target %s removed by FastestQueueBroker, doesn't report number of free slots" msgstr "" "Ðазначение %s отброшено алгоритмом FastestQueueBroker, Ñ‚.к. не Ñообщает " "чиÑло Ñвободных Ñчеек" #: src/hed/acc/EMIES/EMIESClient.cpp:81 msgid "Creating an EMI ES client" msgstr "СоздаётÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚ EMI ES" #: src/hed/acc/EMIES/EMIESClient.cpp:85 msgid "Unable to create SOAP client used by EMIESClient." msgstr "Ðе удалоÑÑŒ Ñоздать клиент SOAP иÑпользующийÑÑ EMIESClient." #: src/hed/acc/EMIES/EMIESClient.cpp:133 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:505 msgid "Initiating delegation procedure" msgstr "Ð˜Ð½Ð¸Ñ†Ð¸Ð°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð¿Ñ€Ð¾Ñ†ÐµÐ´ÑƒÑ€Ñ‹ делегированиÑ" #: src/hed/acc/EMIES/EMIESClient.cpp:163 msgid "Re-creating an EMI ES client" msgstr "ВоÑÑоздаётÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚ EMI ES" #: src/hed/acc/EMIES/EMIESClient.cpp:180 #, c-format msgid "Processing a %s request" msgstr "Обработка запроÑа %s" #: src/hed/acc/EMIES/EMIESClient.cpp:191 #, c-format msgid "%s request failed" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ %s не выполнен" #: src/hed/acc/EMIES/EMIESClient.cpp:200 #, c-format msgid "No response from %s" msgstr "Ðет ответа от %s" #: src/hed/acc/EMIES/EMIESClient.cpp:209 #, c-format msgid "%s request to %s failed with response: %s" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ %s к %s не выполнен, получен ответ: %s" #: src/hed/acc/EMIES/EMIESClient.cpp:224 #, c-format msgid "XML response: %s" msgstr "Отзыв XML: %s" #: src/hed/acc/EMIES/EMIESClient.cpp:234 #, c-format msgid "%s request to %s failed. Unexpected response: %s." msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ %s к %s не выполнен, неожиданный ответ: %s." #: src/hed/acc/EMIES/EMIESClient.cpp:248 src/hed/acc/EMIES/EMIESClient.cpp:355 #, c-format msgid "Creating and sending job submit request to %s" msgstr "Создание и отправка запроÑа об иÑполнении задачи на %s" #: src/hed/acc/EMIES/EMIESClient.cpp:313 src/hed/acc/EMIES/EMIESClient.cpp:416 #, c-format msgid "Job description to be sent: %s" msgstr "ОпиÑание заÑылаемой задачи: %s" #: src/hed/acc/EMIES/EMIESClient.cpp:426 src/hed/acc/EMIES/EMIESClient.cpp:609 #: src/hed/acc/EMIES/EMIESClient.cpp:1098 #, c-format msgid "New limit for vector queries returned by EMI ES service: %d" msgstr "" "Ð¡ÐµÑ€Ð²Ð¸Ñ EMI ES уÑтановил новые Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð¿Ñ€ÐµÐ´ÐµÐ»Ð¾Ð² Ð´Ð»Ñ Ð¿Ð°Ñ€Ð°Ð»Ð»ÐµÐ»ÑŒÐ½Ñ‹Ñ… запроÑов: %d" #: src/hed/acc/EMIES/EMIESClient.cpp:434 src/hed/acc/EMIES/EMIESClient.cpp:617 #: src/hed/acc/EMIES/EMIESClient.cpp:1106 #, c-format msgid "" "Error: Service returned a limit higher or equal to current limit (current: " "%d; returned: %d)" msgstr "" "Ошибка: Ð¡ÐµÑ€Ð²Ð¸Ñ Ñ‚Ñ€ÐµÐ±ÑƒÐµÑ‚ предел, превышающий или равный текущему (текущий: %d; " "требуемый: %d)" #: src/hed/acc/EMIES/EMIESClient.cpp:502 src/hed/acc/EMIES/EMIESClient.cpp:536 #: src/hed/acc/EMIES/EMIESClient.cpp:592 #, c-format msgid "Creating and sending job information query request to %s" msgstr "Создание и отправка запроÑа о ÑоÑтоÑнии задачи на %s" #: src/hed/acc/EMIES/EMIESClient.cpp:775 #, c-format msgid "Creating and sending service information request to %s" msgstr "Создание и отправка запроÑа информации о Ñлужбе на %s" #: src/hed/acc/EMIES/EMIESClient.cpp:832 #, c-format msgid "Creating and sending service information query request to %s" msgstr "Создание и отправка запроÑа о ÑоÑтоÑнии Ñлужбы на %s" #: src/hed/acc/EMIES/EMIESClient.cpp:880 src/hed/acc/EMIES/EMIESClient.cpp:901 #, c-format msgid "Creating and sending job clean request to %s" msgstr "" "Создание и отправка запроÑа об удалении результатов работы задачи на %s" #: src/hed/acc/EMIES/EMIESClient.cpp:922 #, c-format msgid "Creating and sending job suspend request to %s" msgstr "Создание и отправка запроÑа о приоÑтановке задачи на %s" #: src/hed/acc/EMIES/EMIESClient.cpp:943 #, c-format msgid "Creating and sending job resume request to %s" msgstr "Создание и отправка запроÑа о возобновлении задачи на %s" #: src/hed/acc/EMIES/EMIESClient.cpp:964 #, c-format msgid "Creating and sending job restart request to %s" msgstr "Создание и отправка запроÑа о перезапуÑке задачи на %s" #: src/hed/acc/EMIES/EMIESClient.cpp:1021 #, c-format msgid "Creating and sending job notify request to %s" msgstr "Создание и отправка запроÑа об уведомлении о задаче на %s" #: src/hed/acc/EMIES/EMIESClient.cpp:1076 #, c-format msgid "Creating and sending notify request to %s" msgstr "Создание и отправка запроÑа об уведомлении на %s" #: src/hed/acc/EMIES/EMIESClient.cpp:1166 #, c-format msgid "Creating and sending job list request to %s" msgstr "Создание и отправка запроÑа о проÑмотре задачи на %s" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:175 #, c-format msgid "Job %s failed to renew delegation %s - %s." msgstr "Задача %s не Ñмогла обновить делегирование %s - %s." #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:197 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:464 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:191 #, c-format msgid "Job %s does not report a resumable state" msgstr "Задача %s не находитÑÑ Ð² возобновлÑемом ÑоÑтоÑнии" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:202 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:196 #, c-format msgid "Resuming job: %s at state: %s (%s)" msgstr "Возобновление задачи %s в ÑоÑтоÑнии %s (%s)" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:215 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:520 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:205 msgid "Job resuming successful" msgstr "Задача уÑпешно возобновлена" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:248 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:251 #, c-format msgid "Failed retrieving information for job: %s" msgstr "Сбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ о задаче: %s" #: src/hed/acc/EMIES/JobControllerPluginEMIES.cpp:330 msgid "Retrieving job description of EMI ES jobs is not supported" msgstr "Получение опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡ EMI ES не поддерживаетÑÑ" #: src/hed/acc/EMIES/JobListRetrieverPluginEMIES.cpp:37 #, c-format msgid "Listing jobs succeeded, %d jobs found" msgstr "Задачи уÑпешно перечиÑлены, обнаружено %d задач(и)" #: src/hed/acc/EMIES/JobListRetrieverPluginEMIES.cpp:53 #: src/hed/acc/LDAP/JobListRetrieverPluginLDAPNG.cpp:111 #: src/services/a-rex/internaljobplugin/JobListRetrieverPluginINTERNAL.cpp:83 #, c-format msgid "" "Skipping retrieved job (%s) because it was submitted via another interface " "(%s)." msgstr "" "ПропуÑкаетÑÑ ÑÐºÐ°Ñ‡Ð°Ð½Ð½Ð°Ñ Ð·Ð°Ð´Ð°Ñ‡Ð° (%s), так как она была запущена через другой " "Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ (%s)." #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:47 #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:41 msgid "" "Failed to delegate credentials to server - no delegation interface found" msgstr "" "Сбой Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупа на Ñервер - не обнаружен Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ " "делегированиÑ" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:54 #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:48 #, c-format msgid "Failed to delegate credentials to server - %s" msgstr "Сбой Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупа на Ñервер - %s" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:77 #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:87 msgid "Failed preparing job description" msgstr "Ðе удалоÑÑŒ подготовить опиÑание задачи" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:95 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:406 msgid "Unable to submit job. Job description is not valid XML" msgstr "" "Ðевозможно заÑлать задачу. ОпиÑание задачи не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым файлом XML" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:154 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:482 msgid "No valid job identifier returned by EMI ES" msgstr "EMI ES не возвратил дейÑтвительных Ñрлыков задач" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:180 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:499 msgid "Job failed on service side" msgstr "Задача дала Ñбой на Ñервере" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:190 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:509 msgid "Failed to obtain state of job" msgstr "Сбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ ÑоÑтоÑние задачи" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:205 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:521 msgid "Failed to wait for job to allow stage in" msgstr "Сбой Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ñ€Ð°Ð·Ñ€ÐµÑˆÐµÐ½Ð¸Ñ Ð¾Ñ‚ задачи на размещение входных файлов" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:228 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:541 msgid "Failed to obtain valid stagein URL for input files" msgstr "Сбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð´Ð¾Ð¿ÑƒÑтимых URL Ð´Ð»Ñ Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð²Ñ…Ð¾Ð´Ð½Ñ‹Ñ… файлов" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:248 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:558 #, c-format msgid "Failed uploading local input files to %s" msgstr "Сбой выгрузки локальных входных файлов в %s" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:269 #, c-format msgid "Failed to submit job description: EMIESFault(%s , %s)" msgstr "Сбой заÑылки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸: EMIESFault(%s , %s)" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:278 #, c-format msgid "Failed to submit job description: UnexpectedError(%s)" msgstr "Сбой заÑылки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸: UnexpectedError(%s)" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:315 #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:574 msgid "Failed to notify service" msgstr "Сбой ÑƒÐ²ÐµÐ´Ð¾Ð¼Ð»ÐµÐ½Ð¸Ñ Ñлужбы" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:367 msgid "Failed preparing job description to target resources" msgstr "Ðе удалоÑÑŒ адаптировать опиÑание задачи Ð´Ð»Ñ Ð·Ð°Ñылки по назначению" #: src/hed/acc/EMIES/SubmitterPluginEMIES.cpp:475 #, c-format msgid "Failed to submit job description: %s" msgstr "Сбой заÑылки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸: %s" #: src/hed/acc/EMIES/TargetInformationRetrieverPluginEMIES.cpp:30 msgid "Collecting EMI-ES GLUE2 computing info endpoint information." msgstr "СобираетÑÑ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ GLUE2 Ð´Ð»Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ входа EMI-ES." #: src/hed/acc/EMIES/TargetInformationRetrieverPluginEMIES.cpp:50 msgid "Generating EMIES targets" msgstr "СоздаютÑÑ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ EMIES" #: src/hed/acc/EMIES/TargetInformationRetrieverPluginEMIES.cpp:59 #, c-format msgid "Generated EMIES target: %s" msgstr "Созданы Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ EMIES: %s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:75 #: src/hed/acc/EMIES/TestEMIESClient.cpp:79 #, c-format msgid "Query returned unexpected element: %s:%s" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð²Ð¾Ð·Ð²Ñ€Ð°Ñ‚Ð¸Ð» неожиданный Ñлемент: %s:%s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:85 #, c-format msgid "Element validation according to GLUE2 schema failed: %s" msgstr "Проверка ÑоответÑÑ‚Ð²Ð¸Ñ Ñлемента Ñхеме GLUE2 не прошла: %s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:114 msgid "Resource query failed" msgstr "Сбой опроÑа реÑурÑа" #: src/hed/acc/EMIES/TestEMIESClient.cpp:132 msgid "Submission failed" msgstr "Сбой заÑылки задачи" #: src/hed/acc/EMIES/TestEMIESClient.cpp:143 msgid "Obtaining status failed" msgstr "Сбой Ð¾Ð¿Ñ€ÐµÐ´ÐµÐ»ÐµÐ½Ð¸Ñ ÑоÑтоÑниÑ" #: src/hed/acc/EMIES/TestEMIESClient.cpp:153 msgid "Obtaining information failed" msgstr "Сбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸" #: src/hed/acc/EMIES/TestEMIESClient.cpp:170 msgid "Cleaning failed" msgstr "Сбой очиÑтки" #: src/hed/acc/EMIES/TestEMIESClient.cpp:177 msgid "Notify failed" msgstr "Сбой уведомлениÑ" #: src/hed/acc/EMIES/TestEMIESClient.cpp:184 msgid "Kill failed" msgstr "Сбой Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #: src/hed/acc/EMIES/TestEMIESClient.cpp:190 msgid "List failed" msgstr "Сбой перечиÑÐ»ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡" #: src/hed/acc/EMIES/TestEMIESClient.cpp:201 #, c-format msgid "Fetching resource description from %s" msgstr "Получение опиÑÐ°Ð½Ð¸Ñ Ñ€ÐµÑурÑа Ñ %s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:204 #: src/hed/acc/EMIES/TestEMIESClient.cpp:272 #: src/hed/acc/EMIES/TestEMIESClient.cpp:282 #: src/hed/acc/EMIES/TestEMIESClient.cpp:293 #, c-format msgid "Failed to obtain resource description: %s" msgstr "Сбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð¾Ð¿Ð¸ÑÐ°Ð½Ð¸Ñ Ñ€ÐµÑурÑа: %s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:213 #: src/hed/acc/EMIES/TestEMIESClient.cpp:217 #, c-format msgid "Resource description contains unexpected element: %s:%s" msgstr "Схема опиÑÐ°Ð½Ð¸Ñ Ñ€ÐµÑурÑа Ñодержит недейÑтвительный Ñлемент: %s:%s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:223 msgid "Resource description validation according to GLUE2 schema failed: " msgstr "Проверка ÑоответÑÑ‚Ð²Ð¸Ñ Ð¾Ð¿Ð¸ÑÐ°Ð½Ð¸Ñ Ñ€ÐµÑурÑа Ñхеме GLUE2 не прошла: " #: src/hed/acc/EMIES/TestEMIESClient.cpp:228 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:560 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:819 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:517 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:706 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:129 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:169 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1214 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1248 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1430 #: src/hed/identitymap/ArgusPDPClient.cpp:444 #: src/hed/identitymap/ArgusPEPClient.cpp:98 #: src/hed/identitymap/ArgusPEPClient.cpp:345 #: src/hed/libs/common/Thread.cpp:242 src/hed/libs/common/Thread.cpp:245 #: src/hed/libs/common/Thread.cpp:248 #: src/hed/libs/credential/Credential.cpp:1048 #: src/hed/libs/data/DataPointDelegate.cpp:628 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:68 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:84 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:100 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:119 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:129 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:137 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:146 #: src/hed/mcc/tls/PayloadTLSMCC.cpp:82 src/hed/shc/arcpdp/ArcPDP.cpp:235 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:293 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:247 #: src/libs/data-staging/Scheduler.cpp:117 #: src/services/a-rex/delegation/DelegationStore.cpp:40 #: src/services/a-rex/delegation/DelegationStore.cpp:45 #: src/services/a-rex/delegation/DelegationStore.cpp:50 #: src/services/a-rex/delegation/DelegationStore.cpp:82 #: src/services/a-rex/delegation/DelegationStore.cpp:88 #: src/services/a-rex/grid-manager/inputcheck.cpp:33 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:480 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:551 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:576 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:587 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:598 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:609 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:617 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:623 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:628 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:633 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:643 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:652 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:660 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:671 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:678 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:736 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:743 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:783 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:787 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:790 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:859 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:872 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:889 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:901 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1174 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1179 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1208 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1221 #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:379 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:386 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:426 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:478 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:593 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:627 #, c-format msgid "%s" msgstr "%s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:248 msgid "Resource description is empty" msgstr "ОпиÑание реÑурÑа пуÑто" #: src/hed/acc/EMIES/TestEMIESClient.cpp:255 #, c-format msgid "Resource description provides URL for interface %s: %s" msgstr "ОпиÑание реÑурÑа Ñодержит URL интерфейÑа %s: %s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:260 msgid "Resource description provides no URLs for interfaces" msgstr "ОпиÑание реÑурÑа не Ñодержит URL интерфейÑов" #: src/hed/acc/EMIES/TestEMIESClient.cpp:263 msgid "Resource description validation passed" msgstr "Прошла проверка опиÑÐ°Ð½Ð¸Ñ Ñ€ÐµÑурÑа" #: src/hed/acc/EMIES/TestEMIESClient.cpp:265 #, c-format msgid "Requesting ComputingService elements of resource description at %s" msgstr "ЗапрашиваютÑÑ Ñлементы опиÑÐ°Ð½Ð¸Ñ Ñ€ÐµÑурÑа ComputingService Ñ %s" #: src/hed/acc/EMIES/TestEMIESClient.cpp:270 msgid "Performing /Services/ComputingService query" msgstr "ВыполнÑетÑÑ Ð·Ð°Ð¿Ñ€Ð¾Ñ /Services/ComputingService" #: src/hed/acc/EMIES/TestEMIESClient.cpp:274 #: src/hed/acc/EMIES/TestEMIESClient.cpp:284 #: src/hed/acc/EMIES/TestEMIESClient.cpp:295 msgid "Query returned no elements." msgstr "Результат запроÑа не Ñодержит Ñлементов." #: src/hed/acc/EMIES/TestEMIESClient.cpp:280 msgid "Performing /ComputingService query" msgstr "ВыполнÑетÑÑ Ð·Ð°Ð¿Ñ€Ð¾Ñ /ComputingService" #: src/hed/acc/EMIES/TestEMIESClient.cpp:291 msgid "Performing /* query" msgstr "ВыполнÑетÑÑ Ð·Ð°Ð¿Ñ€Ð¾Ñ /*" #: src/hed/acc/EMIES/TestEMIESClient.cpp:301 msgid "All queries failed" msgstr "Сбой вÑех запроÑов" #: src/hed/acc/EMIES/TestEMIESClient.cpp:331 #, c-format msgid "" "Number of ComputingService elements obtained from full document and XPath " "query do not match: %d != %d" msgstr "" "КоличеÑтво Ñлементов ComputingService полученных из полного документа и из " "запроÑа XPath не Ñовпадают: %d != %d" #: src/hed/acc/EMIES/TestEMIESClient.cpp:334 msgid "Resource description query validation passed" msgstr "Проверка ÑоответÑÑ‚Ð²Ð¸Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа опиÑÐ°Ð½Ð¸Ñ Ñ€ÐµÑурÑа прошла" #: src/hed/acc/EMIES/TestEMIESClient.cpp:336 #, c-format msgid "Unsupported command: %s" msgstr "ÐÐµÐ¿Ð¾Ð´Ð´ÐµÑ€Ð¶Ð¸Ð²Ð°ÐµÐ¼Ð°Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ð°: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:142 #, c-format msgid "Connect: Failed to init handle: %s" msgstr "Соединение: Ðе удалоÑÑŒ инициализировать ÑÑылку: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:148 #, c-format msgid "Failed to enable IPv6: %s" msgstr "Сбой Ð²ÐºÐ»ÑŽÑ‡ÐµÐ½Ð¸Ñ IPv6: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:158 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:172 #, c-format msgid "Connect: Failed to connect: %s" msgstr "Соединение: Сбой ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:165 #, c-format msgid "Connect: Connecting timed out after %d ms" msgstr "Соединение: Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ð¸Ñтекло поÑле %d мÑ" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:185 #, c-format msgid "Connect: Failed to init auth info handle: %s" msgstr "" "Соединение: Сбой инициализации идентификатора информации проверки " "подлинноÑти: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:196 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:210 #, c-format msgid "Connect: Failed authentication: %s" msgstr "Соединение: Ошибка проверки подлинноÑти: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:203 #, c-format msgid "Connect: Authentication timed out after %d ms" msgstr "Соединение: Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ¸ подлинноÑти иÑтекло поÑле %d мÑ" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:224 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:256 #, c-format msgid "SendCommand: Command: %s" msgstr "SendCommand: Команда: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:229 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:240 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:260 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:271 #, c-format msgid "SendCommand: Failed: %s" msgstr "Отправка команды: Сбой: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:235 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:266 #, c-format msgid "SendCommand: Timed out after %d ms" msgstr "Отправка команды: Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¸Ñтекло поÑле %d мÑ" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:243 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:276 #, c-format msgid "SendCommand: Response: %s" msgstr "SendCommand: Отзыв: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:293 msgid "FTP Job Control: Failed sending EPSV and PASV commands" msgstr "FTP Job Control: Сбой отÑылки команд EPSV и PASV" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:298 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:304 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:320 #, c-format msgid "FTP Job Control: Server PASV response parsing failed: %s" msgstr "FTP Job Control: Сбой разбора отзыва Ñервера PASV: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:330 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:336 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:343 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:350 #, c-format msgid "FTP Job Control: Server EPSV response parsing failed: %s" msgstr "FTP Job Control: Сбой разбора отзыва Ñервера EPSV: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:357 #, c-format msgid "FTP Job Control: Server EPSV response port parsing failed: %s" msgstr "FTP Job Control: Сбой разбора порта отзыва Ñервера EPSV: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:366 #, c-format msgid "FTP Job Control: Failed to apply local address to data connection: %s" msgstr "" "FTP Job Control: Ðе удалоÑÑŒ применить локальный Ð°Ð´Ñ€ÐµÑ Ðº каналу передачи " "данных: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:372 #, c-format msgid "" "FTP Job Control: Can't parse host and/or port in response to EPSV/PASV: %s" msgstr "" "FTP Job Control: Ðе удалоÑÑŒ извлечь Ð°Ð´Ñ€ÐµÑ ÑƒÐ·Ð»Ð° и/или номер порта из ответа " "на Ð·Ð°Ð¿Ñ€Ð¾Ñ EPSV/PASV: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:377 #, c-format msgid "FTP Job Control: Data channel: %d.%d.%d.%d:%d" msgstr "FTP Job Control: Канал передачи данных: %d.%d.%d.%d:%d" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:393 #, c-format msgid "FTP Job Control: Data channel: [%s]:%d" msgstr "FTP Job Control: Канал передачи данных: [%s]:%d" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:398 #, c-format msgid "FTP Job Control: Local port failed: %s" msgstr "FTP Job Control: Сбой локального порта: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:422 msgid "FTP Job Control: Failed sending DCAU command" msgstr "FTP Job Control: Сбой отправки команды DCAU" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:427 msgid "FTP Job Control: Failed sending TYPE command" msgstr "FTP Job Control: Сбой отправки команды TYPE" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:436 #, c-format msgid "FTP Job Control: Local type failed: %s" msgstr "FTP Job Control: Сбой локального типа: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:446 #, c-format msgid "FTP Job Control: Failed sending STOR command: %s" msgstr "FTP Job Control: Сбой отправки команды STOR: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:454 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:475 #, c-format msgid "FTP Job Control: Data connect write failed: %s" msgstr "FTP Job Control: Сбой ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ð¸ запиÑи данных: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:461 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:469 #, c-format msgid "FTP Job Control: Data connect write timed out after %d ms" msgstr "" "FTP Job Control: Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÐºÐ¾Ð½Ñ‚Ð°ÐºÑ‚Ð° и запиÑи данных иÑтекло поÑле %d мÑ" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:487 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:507 #, c-format msgid "FTP Job Control: Data write failed: %s" msgstr "FTP Job Control: Сбой запиÑи данных: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:493 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:501 #, c-format msgid "FTP Job Control: Data write timed out after %d ms" msgstr "FTP Job Control: Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð·Ð°Ð¿Ð¸Ñи данных иÑтекло поÑле %d мÑ" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:527 #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:538 #, c-format msgid "Disconnect: Failed aborting - ignoring: %s" msgstr "Отключение: Сбой Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ - игнорируетÑÑ: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:530 #, c-format msgid "Disconnect: Data close timed out after %d ms" msgstr "Отключение: Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ… иÑтекло поÑле %d мÑ" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:541 #, c-format msgid "Disconnect: Abort timed out after %d ms" msgstr "Отключение: Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð¸Ñтекло поÑле %d мÑ" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:549 #, c-format msgid "Disconnect: Failed quitting - ignoring: %s" msgstr "Отключение: Сбой выхода - игнорируетÑÑ: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:552 #, c-format msgid "Disconnect: Quitting timed out after %d ms" msgstr "Отключение: Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð²Ñ‹Ñ…Ð¾Ð´Ð° иÑтекло поÑле %d мÑ" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:561 #, c-format msgid "Disconnect: Failed closing - ignoring: %s" msgstr "Отключение: Сбой Ð¾Ñ‚ÐºÐ»ÑŽÑ‡ÐµÐ½Ð¸Ñ - игнорируетÑÑ: %s" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:567 #, c-format msgid "Disconnect: Closing timed out after %d ms" msgstr "Отключение: Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¾Ñ‚ÐºÐ»ÑŽÑ‡ÐµÐ½Ð¸Ñ Ð¸Ñтекло поÑле %d мÑ" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:582 msgid "Disconnect: waiting for globus handle to settle" msgstr "Отключение: ждём пока ÑÑылка globus уÑтаканитÑÑ" #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:596 msgid "Disconnect: globus handle is stuck." msgstr "Отключение: ÑÑылка globus заÑтрÑла." #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:604 #, c-format msgid "Disconnect: Failed destroying handle: %s. Can't handle such situation." msgstr "" "Отключение: Сбой ÑƒÐ½Ð¸Ñ‡Ñ‚Ð¾Ð¶ÐµÐ½Ð¸Ñ ÑÑылки: %s. Ðевозможно ÑправитьÑÑ Ñ Ñ‚Ð°ÐºÐ¸Ð¼ " "положением." #: src/hed/acc/GRIDFTPJOB/FTPControl.cpp:607 msgid "Disconnect: handle destroyed." msgstr "Отключение: ÑÑылка уничтожена." #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:43 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:44 msgid "" "Missing reference to factory and/or module. It is unsafe to use Globus in " "non-persistent mode - SubmitterPlugin for GRIDFTPJOB is disabled. Report to " "developers." msgstr "" "ОтÑутÑтвует указание на фабрику и/или модуль. ИÑпользование Globus во " "временном режиме небезопаÑно - SubmitterPlugin Ð´Ð»Ñ GRIDFTPJOB отключён. " "Сообщите разработчикам." #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:58 #, c-format msgid "Unable to query job information (%s), invalid URL provided (%s)" msgstr "" "Ðевозможно опроÑить информацию о задаче (%s), задан недопуÑтимый URL (%s)" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:70 #, c-format msgid "Jobs left to query: %d" msgstr "Ðеопрошенных задач: %d" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:79 #, c-format msgid "Querying batch with %d jobs" msgstr "ОпрашиваетÑÑ ÑпиÑок из %d задач(и)" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:97 msgid "Can't create information handle - is the ARC LDAP DMC plugin available?" msgstr "" "Ðе удалоÑÑŒ Ñоздать ÑÑылку Ð´Ð»Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ - проверьте, доÑтупен ли " "подгружаемый модуль ARC LDAP DMC." #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:130 #, c-format msgid "Job information not found in the information system: %s" msgstr "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ задаче в информационной ÑиÑтеме не обнаружена: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:132 msgid "" "This job was very recently submitted and might not yet have reached the " "information system" msgstr "" "Эта задача была запущена лишь недавно, и может быть ещё не зарегиÑтрирована " "в ÑиÑтеме" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:319 #, c-format msgid "Cleaning job: %s" msgstr "УдалÑетÑÑ Ð·Ð°Ð´Ð°Ñ‡Ð°: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:323 msgid "Failed to connect for job cleaning" msgstr "Ðе удалоÑÑŒ ÑоединитьÑÑ Ð´Ð»Ñ Ð¾Ñ‡Ð¸Ñтки задачи" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:335 msgid "Failed sending CWD command for job cleaning" msgstr "Ðе удалоÑÑŒ отправить инÑтрукцию CWD Ð´Ð»Ñ Ð¾Ñ‡Ð¸Ñтки задачи" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:342 msgid "Failed sending RMD command for job cleaning" msgstr "Ðе удалоÑÑŒ отправить инÑтрукцию RMD Ð´Ð»Ñ Ð¾Ñ‡Ð¸Ñтки задачи" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:349 msgid "Failed to disconnect after job cleaning" msgstr "Ðе удалоÑÑŒ отÑоединитьÑÑ Ð¿Ð¾Ñле очиÑтки задачи" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:356 msgid "Job cleaning successful" msgstr "Задача уÑпешно удалена" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:367 #, c-format msgid "Cancelling job: %s" msgstr "Прерывание задачи: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:371 msgid "Failed to connect for job cancelling" msgstr "Сбой ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:383 msgid "Failed sending CWD command for job cancelling" msgstr "Сбой отправки инÑтрукции CWD Ð´Ð»Ñ Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:390 msgid "Failed sending DELE command for job cancelling" msgstr "Сбой отправки инÑтрукции DELE Ð´Ð»Ñ Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:397 msgid "Failed to disconnect after job cancelling" msgstr "Сбой отÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ð¿Ð¾Ñле Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:405 msgid "Job cancelling successful" msgstr "Задача уÑпешно оборвана" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:416 #, c-format msgid "Renewing credentials for job: %s" msgstr "Обновление параметров доÑтупа Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:420 msgid "Failed to connect for credential renewal" msgstr "Сбой уÑÑ‚Ð°Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ ÑвÑзи Ð´Ð»Ñ Ð¾Ð±Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупа" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:432 #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:439 msgid "Failed sending CWD command for credentials renewal" msgstr "Сбой отправки инÑтрукции CWD Ð´Ð»Ñ Ð¾Ð±Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупа" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:445 msgid "Failed to disconnect after credentials renewal" msgstr "Сбой отÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ð¿Ð¾Ñле Ð¾Ð±Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупа" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:452 msgid "Renewal of credentials was successful" msgstr "Параметры доÑтупа уÑпешно обновлены" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:474 #, c-format msgid "Illegal jobID specified (%s)" msgstr "Задан недопуÑтимый Ñрлык задачи (%s)" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:481 #, c-format msgid "HER: %s" msgstr "HER: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:487 #, c-format msgid "Could not create temporary file: %s" msgstr "Ðе удалоÑÑŒ Ñоздать временный файл: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:556 #, c-format msgid "Trying to retrieve job description of %s from computing resource" msgstr "Попытка Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð¾Ð¿Ð¸ÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s Ñ Ð²Ñ‹Ñ‡Ð¸Ñлительного реÑурÑа" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:561 #, c-format msgid "invalid jobID: %s" msgstr "ÐедейÑтвительный Ñрлык задачи: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:603 msgid "clientxrsl found" msgstr "найден оригинал опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ в формате XRSL" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:606 msgid "could not find start of clientxrsl" msgstr "невозможно найти начало опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ в формате XRSL" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:611 msgid "could not find end of clientxrsl" msgstr "невозможно найти конец опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ в формате XRSL" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:624 #, c-format msgid "Job description: %s" msgstr "ОпиÑание задачи: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:627 msgid "clientxrsl not found" msgstr "оригинал опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ в формате XRSL не найден" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:633 #, c-format msgid "Invalid JobDescription: %s" msgstr "Ðеверный Ñлемент JobDescription: %s" #: src/hed/acc/GRIDFTPJOB/JobControllerPluginGRIDFTPJOB.cpp:636 msgid "Valid JobDescription found" msgstr "Обнаружено дейÑтвительное опиÑание JobDescription" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:60 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:206 msgid "Submit: Failed to connect" msgstr "ЗаÑылка: Сбой ÑвÑзи" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:68 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:214 msgid "Submit: Failed sending CWD command" msgstr "ЗаÑылка: Сбой отправки команды CWD" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:79 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:225 msgid "Submit: Failed sending CWD new command" msgstr "ЗаÑылка: Сбой отправки команды CWD new" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:106 msgid "Failed to prepare job description." msgstr "Сбой подготовки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸." #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:123 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:269 msgid "Submit: Failed sending job description" msgstr "ЗаÑылка: Сбой отправки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:138 #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:284 msgid "Submit: Failed uploading local input files" msgstr "ЗаÑылка: Сбой выгрузки локальных входных файлов" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:194 msgid "" "Submit: service has no suitable information interface - need org.nordugrid." "ldapng" msgstr "" "ЗаÑылка: ÑÐµÑ€Ð²Ð¸Ñ Ð½Ðµ предоÑтавлÑет подходÑщего информационного интерфейÑа - " "нужен org.nordugrid.ldapng" #: src/hed/acc/GRIDFTPJOB/SubmitterPluginGRIDFTPJOB.cpp:252 msgid "Failed to prepare job description to target resources." msgstr "Сбой подготовки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ Ð´Ð»Ñ Ð·Ð°Ñылки по назначению." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:75 #, c-format msgid "[ADLParser] Unsupported EMI ES state %s." msgstr "[ADLParser] Ðеподдерживаемое ÑоÑтоÑние EMI ES %s." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:95 #, c-format msgid "[ADLParser] Unsupported internal state %s." msgstr "[ADLParser] Ðеподдерживаемое внутреннее ÑоÑтоÑние %s." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:105 #, c-format msgid "[ADLParser] Optional for %s elements are not supported yet." msgstr "[ADLParser] Ðтрибут optional Ð´Ð»Ñ Ñлементов %s пока не поддерживаетÑÑ." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:114 #, c-format msgid "[ADLParser] %s element must be boolean." msgstr "[ADLParser] Ñлемент %s должен быть логичеÑким." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:126 #, c-format msgid "[ADLParser] Code in FailIfExitCodeNotEqualTo in %s is not valid number." msgstr "" "[ADLParser] Код в FailIfExitCodeNotEqualTo в %s не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым " "чиÑлом." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:364 msgid "[ADLParser] Root element is not ActivityDescription " msgstr "[ADLParser] Корневой Ñлемент не ÑвлÑетÑÑ ActivityDescription " #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:411 msgid "[ADLParser] priority is too large - using max value 100" msgstr "" "[ADLParser] Ñлишком выÑокий приоритет - иÑпользуетÑÑ Ð¼Ð°ÐºÑимальное значение " "100" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:454 #, c-format msgid "[ADLParser] Unsupported URL %s for RemoteLogging." msgstr "[ADLParser] Ðеподдерживаемый URL %s в RemoteLogging." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:473 #, c-format msgid "[ADLParser] Wrong time %s in ExpirationTime." msgstr "[ADLParser] ExpirationTime Ñодержит недопуÑтимое Ð²Ñ€ÐµÐ¼Ñ %s." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:503 msgid "[ADLParser] AccessControl isn't valid XML." msgstr "[ADLParser] AccessControl не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым XML." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:514 msgid "[ADLParser] CredentialService must contain valid URL." msgstr "[ADLParser] CredentialService должен Ñодержать допуÑтимый URL." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:543 #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:546 msgid "[ADLParser] Only email Prorocol for Notification is supported yet." msgstr "" "[ADLParser] Пока что поддерживаетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ email Prorocol Ð´Ð»Ñ Notification." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:604 msgid "[ADLParser] Missing or wrong value in ProcessesPerSlot." msgstr "[ADLParser] Значение ProcessesPerSlot отÑутÑтвует или неверно." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:609 msgid "[ADLParser] Missing or wrong value in ThreadsPerProcess." msgstr "[ADLParser] Значение ThreadsPerProcess отÑутÑтвует или неверно." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:615 msgid "" "[ADLParser] Missing Name element or value in ParallelEnvironment/Option " "element." msgstr "" "[ADLParser] ОтÑутÑтвует Ñлемент Name или значение Ñлемента " "ParallelEnvironment/Option." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:632 msgid "[ADLParser] NetworkInfo is not supported yet." msgstr "[ADLParser] NetworkInfo пока что не поддерживаетÑÑ." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:646 #, c-format msgid "[ADLParser] NodeAccess value %s is not supported yet." msgstr "[ADLParser] Значение NodeAccess %s пока что не поддерживаетÑÑ." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:654 msgid "[ADLParser] Missing or wrong value in NumberOfSlots." msgstr "[ADLParser] Значение NumberOfSlots отÑутÑтвует или неверно." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:661 msgid "" "[ADLParser] The NumberOfSlots element should be specified, when the value of " "useNumberOfSlots attribute of SlotsPerHost element is \"true\"." msgstr "" "[ADLParser] Значение Ñлемента NumberOfSlots должно быть указано, еÑли " "значение атрибута useNumberOfSlots Ñлемента SlotsPerHost - \"true\"." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:668 msgid "[ADLParser] Missing or wrong value in SlotsPerHost." msgstr "[ADLParser] Значение SlotsPerHost отÑутÑтвует или неверно." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:697 msgid "[ADLParser] Missing or wrong value in IndividualPhysicalMemory." msgstr "[ADLParser] Значение IndividualPhysicalMemory отÑутÑтвует или неверно." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:707 msgid "[ADLParser] Missing or wrong value in IndividualVirtualMemory." msgstr "[ADLParser] Значение IndividualVirtualMemory отÑутÑтвует или неверно." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:717 msgid "[ADLParser] Missing or wrong value in DiskSpaceRequirement." msgstr "[ADLParser] Значение DiskSpaceRequirement отÑутÑтвует или неверно." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:731 msgid "[ADLParser] Benchmark is not supported yet." msgstr "[ADLParser] Benchmark пока что не поддерживаетÑÑ." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:739 msgid "[ADLParser] Missing or wrong value in IndividualCPUTime." msgstr "[ADLParser] Значение IndividualCPUTime отÑутÑтвует или неверно." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:747 msgid "[ADLParser] Missing or wrong value in TotalCPUTime." msgstr "[ADLParser] Значение TotalCPUTime отÑутÑтвует или неверно." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:756 msgid "[ADLParser] Missing or wrong value in WallTime." msgstr "[ADLParser] Значение WallTime отÑутÑтвует или неверно." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:776 msgid "[ADLParser] Missing or empty Name in InputFile." msgstr "[ADLParser] Значение Name в InputFile отÑутÑтвует или неверно." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:787 #, c-format msgid "[ADLParser] Wrong URI specified in Source - %s." msgstr "[ADLParser] Указан неверный URI в Source - %s." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:809 msgid "[ADLParser] Missing or empty Name in OutputFile." msgstr "[ADLParser] Значение Name в OutputFile отÑутÑтвует или неверно." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:815 #, c-format msgid "[ADLParser] Wrong URI specified in Target - %s." msgstr "[ADLParser] Указан неверный URI в Target - %s." #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:828 #, c-format msgid "Location URI for file %s is invalid" msgstr "ÐедопуÑтимый URI в Location Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° %s" #: src/hed/acc/JobDescriptionParser/ADLParser.cpp:853 #, c-format msgid "[ADLParser] CreationFlag value %s is not supported." msgstr "[ADLParser] Значение CreationFlag %s не поддерживаетÑÑ." #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:42 msgid "Left operand for RSL concatenation does not evaluate to a literal" msgstr "Левый операнд Ð´Ð»Ñ ÑÑ†ÐµÐ¿Ð»ÐµÐ½Ð¸Ñ RSL не приводитÑÑ Ðº буквенной конÑтанте" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:50 msgid "Right operand for RSL concatenation does not evaluate to a literal" msgstr "Правый операнд Ð´Ð»Ñ ÑÑ†ÐµÐ¿Ð»ÐµÐ½Ð¸Ñ RSL не приводитÑÑ Ðº буквенной конÑтанте" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:161 msgid "Multi-request operator only allowed at top level" msgstr "Оператор множеÑтвенноÑти RSL допуÑкаетÑÑ Ð»Ð¸ÑˆÑŒ в начале документа" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:186 msgid "RSL substitution is not a sequence" msgstr "Замена в RSL не ÑвлÑетÑÑ Ð¿Ð¾ÑледовательноÑтью" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:192 msgid "RSL substitution sequence is not of length 2" msgstr "Замена в RSL не ÑвлÑетÑÑ Ð¿Ð¾ÑледовательноÑтью из двух Ñлементов" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:211 msgid "RSL substitution variable name does not evaluate to a literal" msgstr "Ð˜Ð¼Ñ Ð¿ÐµÑ€ÐµÐ¼ÐµÐ½Ð½Ð¾Ð¹ Ð´Ð»Ñ Ð·Ð°Ð¼ÐµÐ½Ñ‹ RSL не приводитÑÑ Ðº буквенной конÑтанте" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:220 msgid "RSL substitution variable value does not evaluate to a literal" msgstr "Значение переменной Ð´Ð»Ñ Ð·Ð°Ð¼ÐµÐ½Ñ‹ RSL не приводитÑÑ Ðº буквенной конÑтанте" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:313 msgid "End of comment not found" msgstr "Ðе найдено окончание комментариÑ" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:324 msgid "Junk at end of RSL" msgstr "Ðеразборчивые фрагменты в конце RSL" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:424 msgid "End of single quoted string not found" msgstr "Ðе обнаружено конца Ñтроки в одиночных кавычках" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:441 msgid "End of double quoted string not found" msgstr "Ðе обнаружено конца Ñтроки в двойных кавычках" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:460 #, c-format msgid "End of user delimiter (%s) quoted string not found" msgstr "" "Ðе обнаружено конца Ñтроки, выделенной пользовательÑким ограничителем (%s)" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:518 #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:546 #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:625 msgid "')' expected" msgstr "ожидаетÑÑ ')'" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:528 #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:609 msgid "'(' expected" msgstr "ожидаетÑÑ ')'" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:536 msgid "Variable name expected" msgstr "ОжидаетÑÑ Ð¸Ð¼Ñ Ð¿ÐµÑ€ÐµÐ¼ÐµÐ½Ð½Ð¾Ð¹" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:541 #, c-format msgid "Variable name (%s) contains invalid character (%s)" msgstr "Ð˜Ð¼Ñ Ð¿ÐµÑ€ÐµÐ¼ÐµÐ½Ð½Ð¾Ð¹ (%s) Ñодержит неверный Ñимвол (%s)" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:557 msgid "Broken string" msgstr "ÐедопуÑÑ‚Ð¸Ð¼Ð°Ñ Ñтрока" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:570 msgid "No left operand for concatenation operator" msgstr "ОтÑутÑтвует левый операнд оператора подцеплениÑ" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:574 msgid "No right operand for concatenation operator" msgstr "ОтÑутÑтвует правый операнд оператора подцеплениÑ" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:638 msgid "Attribute name expected" msgstr "ОжидаетÑÑ Ð¸Ð¼Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð°" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:643 #, c-format msgid "Attribute name (%s) contains invalid character (%s)" msgstr "Ð˜Ð¼Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð° (%s) Ñодержит неверный Ñимвол (%s)" #: src/hed/acc/JobDescriptionParser/RSLParser.cpp:649 msgid "Relation operator expected" msgstr "ОжидаетÑÑ Ð¸Ñпользование релÑционного оператора" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:86 msgid "Error parsing the internally set executables attribute." msgstr "Ошибка разбора переопределённого ÑиÑтемой атрибута executables." #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:102 #, c-format msgid "" "File '%s' in the 'executables' attribute is not present in the 'inputfiles' " "attribute" msgstr "" "Файл '%s' перечиÑленный в атрибуте 'executables' отÑутÑтвует в атрибуте " "'inputfiles'" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:120 msgid "The value of the ftpthreads attribute must be a number from 1 to 10" msgstr "Значение атрибута 'ftpthreads' должно быть целым чиÑлом от 1 до 10" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:177 msgid "'stdout' attribute must be specified when 'join' attribute is specified" msgstr "" "Ðеобходимо задать значение атрибута 'stdout', еÑли задано значение атрибута " "'join'" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:181 msgid "" "Attribute 'join' cannot be specified when both 'stdout' and 'stderr' " "attributes is specified" msgstr "" "Ðтрибут 'join' не может иÑпользоватьÑÑ, еÑли заданы оба атрибута 'stdout' и " "'stderr'" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:200 msgid "Attributes 'gridtime' and 'cputime' cannot be specified together" msgstr "Ðтрибуты 'gridtime' и 'cputime' не могут быть заданы одновременно" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:204 msgid "Attributes 'gridtime' and 'walltime' cannot be specified together" msgstr "Ðтрибуты 'gridtime' и 'walltime' не могут быть заданы одновременно" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:226 msgid "" "When specifying 'countpernode' attribute, 'count' attribute must also be " "specified" msgstr "" "При задании атрибута 'countpernode', атрибут 'count' также должен быть задан" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:229 msgid "Value of 'countpernode' attribute must be an integer" msgstr "Значение атрибута 'countpernode' должно быть целочиÑленным" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:287 msgid "No RSL content in job description found" msgstr "Ð’ опиÑании задачи не найдено Ñтруктуры RSL" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:295 msgid "'action' attribute not allowed in user-side job description" msgstr "" "ИÑпользование атрибута 'action' в пользовательÑком опиÑании задачи не " "допуÑкаетÑÑ" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:304 #, c-format msgid "String successfully parsed as %s." msgstr "Строка уÑпешно разобрана как %s." #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:313 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:331 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:349 #, c-format msgid "Attribute '%s' multiply defined" msgstr "Ðтрибут '%s' задан неÑколько раз" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:317 #, c-format msgid "Value of attribute '%s' expected to be single value" msgstr "Значение атрибута '%s' неоднозначно" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:322 #, c-format msgid "Value of attribute '%s' expected to be a string" msgstr "Значение атрибута '%s' не ÑвлÑетÑÑ Ñтрокой" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:338 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:368 #, c-format msgid "Value of attribute '%s' is not a string" msgstr "Значение атрибута '%s' не ÑвлÑетÑÑ Ñтрокой" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:356 #, c-format msgid "Value of attribute '%s' is not sequence" msgstr "Значение атрибута '%s' не ÑвлÑетÑÑ Ð¿Ð¾ÑледовательноÑтью" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:360 #, c-format msgid "" "Value of attribute '%s' has wrong sequence length: Expected %d, found %d" msgstr "" "Значение атрибута '%s' Ñодержит поÑледовательноÑть недопуÑтимой длины: " "ожидаетÑÑ %d, получено %d" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:492 #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1375 msgid "Unexpected RSL type" msgstr "Ðеожиданный тип RSL" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:557 msgid "At least two values are needed for the 'inputfiles' attribute" msgstr "Ð”Ð»Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð° 'inputfiles' необходимы как минимум два значениÑ" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:562 msgid "First value of 'inputfiles' attribute (filename) cannot be empty" msgstr "" "ÐŸÐµÑ€Ð²Ð°Ñ Ñ‡Ð°Ñть Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð° 'inputfiles' (filename) не может быть пуÑтой" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:587 #, c-format msgid "Invalid URL '%s' for input file '%s'" msgstr "ÐедопуÑтимый URL '%s' Ð´Ð»Ñ Ð²Ñ…Ð¾Ð´Ð½Ð¾Ð³Ð¾ файла '%s'" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:596 #, c-format msgid "Invalid URL option syntax in option '%s' for input file '%s'" msgstr "ÐедопуÑтимый ÑинтакÑÐ¸Ñ Ð¾Ð¿Ñ†Ð¸Ð¸ URL в опции '%s' Ð´Ð»Ñ Ð²Ñ…Ð¾Ð´Ð½Ð¾Ð³Ð¾ файла '%s'" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:606 #, c-format msgid "Invalid URL: '%s' in input file '%s'" msgstr "ÐедопуÑтимый URL: '%s' во входном файле '%s'" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:677 msgid "At least two values are needed for the 'outputfiles' attribute" msgstr "Ð”Ð»Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð° 'outputfiles' необходимы как минимум два значениÑ" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:682 msgid "First value of 'outputfiles' attribute (filename) cannot be empty" msgstr "" "ÐŸÐµÑ€Ð²Ð°Ñ Ñ‡Ð°Ñть Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð° 'outputfiles' (filename) не может быть пуÑтой" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:694 #, c-format msgid "Invalid URL '%s' for output file '%s'" msgstr "ÐедопуÑтимый URL '%s' Ð´Ð»Ñ Ð²Ñ‹Ñ…Ð¾Ð´Ð½Ð¾Ð³Ð¾ файла '%s'" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:704 #, c-format msgid "Invalid URL option syntax in option '%s' for output file '%s'" msgstr "ÐедопуÑтимый ÑинтакÑÐ¸Ñ Ð¾Ð¿Ñ†Ð¸Ð¸ URL в опции '%s' Ð´Ð»Ñ Ð²Ñ‹Ñ…Ð¾Ð´Ð½Ð¾Ð³Ð¾ файла '%s'" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:714 #, c-format msgid "Invalid URL: '%s' in output file '%s'" msgstr "ÐедопуÑтимый URL: '%s' в выходном файле '%s'" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:746 #, c-format msgid "" "Invalid comparison operator '%s' used at 'delegationid' attribute, only \"=" "\" is allowed." msgstr "" "ÐедопуÑтимый оператор ÑÑ€Ð°Ð²Ð½ÐµÐ½Ð¸Ñ '%s' иÑпользуетÑÑ Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð¾Ð¼ " "'delegationid', допуÑкаетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ \"=\"." #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:764 #, c-format msgid "" "Invalid comparison operator '%s' used at 'queue' attribute in 'GRIDMANAGER' " "dialect, only \"=\" is allowed" msgstr "" "ÐедопуÑтимый оператор ÑÑ€Ð°Ð²Ð½ÐµÐ½Ð¸Ñ '%s' иÑпользуетÑÑ Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð¾Ð¼ 'queue' в " "диалекте GRIDMANAGER, допуÑкаетÑÑ Ð»Ð¸ÑˆÑŒ \"=\"" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:770 #, c-format msgid "" "Invalid comparison operator '%s' used at 'queue' attribute, only \"!=\" or " "\"=\" are allowed." msgstr "" "ÐедопуÑтимый оператор ÑÑ€Ð°Ð²Ð½ÐµÐ½Ð¸Ñ '%s' иÑпользуетÑÑ Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð¾Ð¼ 'queue', " "допуÑкаютÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ \"!=\" или \"=\"." #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:927 #, c-format msgid "Value of attribute '%s' expected not to be empty" msgstr "Значение атрибута '%s' не должно быть пуÑтым" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1036 msgid "The value of the acl XRSL attribute isn't valid XML." msgstr "Значение атрибута XRSL acl не ÑвлÑетÑÑ Ð´ÐµÐ¹Ñтвительным кодом XML." #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1050 msgid "The cluster XRSL attribute is currently unsupported." msgstr "Ðтрибут XRSL cluster пока что не поддерживаетÑÑ." #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1066 #, c-format msgid "" "Syntax error in 'notify' attribute value ('%s'), it must contain an email " "address" msgstr "" "СинтакÑичеÑÐºÐ°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° в значении атрибута 'notify' ('%s'), он должен " "Ñодержать Ð°Ð´Ñ€ÐµÑ Ñлектронной почты" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1074 #, c-format msgid "" "Syntax error in 'notify' attribute value ('%s'), it must only contain email " "addresses after state flag(s)" msgstr "" "СинтакÑичеÑÐºÐ°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° в значении атрибута 'notify' ('%s'), он должен " "Ñодержать лишь адреÑа Ñлектронной почты поÑле меток ÑтатуÑа" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1077 #, c-format msgid "" "Syntax error in 'notify' attribute value ('%s'), it contains unknown state " "flags" msgstr "" "СинтакÑичеÑÐºÐ°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° в значении атрибута 'notify' ('%s'), он Ñодержит " "неизвеÑтные метки ÑтатуÑа" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1125 msgid "priority is too large - using max value 100" msgstr "Ñлишком выÑокий приоритет - иÑпользуетÑÑ Ð¼Ð°ÐºÑимальное значение 100" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1158 #, c-format msgid "Invalid nodeaccess value: %s" msgstr "ÐедопуÑтимое значение nodeaccess: %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1201 msgid "Value of 'count' attribute must be an integer" msgstr "Значение атрибута 'count' должно быть целочиÑленным" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1231 msgid "Value of 'exclusiveexecution' attribute must either be 'yes' or 'no'" msgstr "" "Значением атрибута 'exclusiveexecution' может быть либо 'yes', либо 'no'" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1277 #, c-format msgid "Invalid action value %s" msgstr "ÐедопуÑтимое значение action %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1367 #, c-format msgid "The specified Globus attribute (%s) is not supported. %s ignored." msgstr "Указанный атрибут Globus (%s) не поддерживаетÑÑ. %s игнорируетÑÑ." #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1371 #, c-format msgid "Unknown XRSL attribute: %s - Ignoring it." msgstr "ÐеизвеÑтный атрибут XRSL: %s - игнорируетÑÑ." #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1385 #, c-format msgid "Wrong language requested: %s" msgstr "Запрошен неверный Ñзык: %s" #: src/hed/acc/JobDescriptionParser/XRSLParser.cpp:1722 msgid "" "Cannot output XRSL representation: The Resources.SlotRequirement." "NumberOfSlots attribute must be specified when the Resources.SlotRequirement." "SlotsPerHost attribute is specified." msgstr "" "Ðевозможно вывеÑти предÑтавление XRSL: атрибут Resources.SlotRequirement." "NumberOfSlots должен быть задан, еÑли задан атрибут Resources." "SlotRequirement.SlotsPerHost ." #: src/hed/acc/LDAP/Extractor.h:22 #, c-format msgid "Extractor[%s] (%s): %s = %s" msgstr "Extractor[%s] (%s): %s = %s" #: src/hed/acc/LDAP/Extractor.h:113 src/hed/acc/LDAP/Extractor.h:130 #, c-format msgid "Extractor[%s] (%s): %s contains %s" msgstr "Extractor[%s] (%s): %s Ñодержит %s" #: src/hed/acc/LDAP/JobListRetrieverPluginLDAPGLUE2.cpp:54 #, c-format msgid "Adding endpoint '%s' with interface name %s" msgstr "ДобавлÑетÑÑ Ñ‚Ð¾Ñ‡ÐºÐ° доÑтупа '%s' Ñ Ð½Ð°Ð·Ð²Ð°Ð½Ð¸ÐµÐ¼ интерфейÑа %s" #: src/hed/acc/LDAP/JobListRetrieverPluginLDAPNG.cpp:72 #: src/hed/acc/LDAP/ServiceEndpointRetrieverPluginEGIIS.cpp:46 #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPGLUE2.cpp:47 #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPNG.cpp:57 msgid "Can't create information handle - is the ARC ldap DMC plugin available?" msgstr "" "Ðе удалоÑÑŒ Ñоздать ÑÑылку Ð´Ð»Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ - проверьте, доÑтупен ли " "подгружаемый модуль ARC LDAP DMC" #: src/hed/acc/LDAP/ServiceEndpointRetrieverPluginEGIIS.cpp:79 #, c-format msgid "Unknown entry in EGIIS (%s)" msgstr "ÐеизвеÑÑ‚Ð½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ в EGIIS (%s)" #: src/hed/acc/LDAP/ServiceEndpointRetrieverPluginEGIIS.cpp:87 msgid "" "Entry in EGIIS is missing one or more of the attributes 'Mds-Service-type', " "'Mds-Service-hn', 'Mds-Service-port' and/or 'Mds-Service-Ldap-suffix'" msgstr "" "ЗапиÑÑŒ в EGIIS не Ñодержит одного или неÑкольких атрибутов 'Mds-Service-" "type', 'Mds-Service-hn', 'Mds-Service-port' и/или 'Mds-Service-Ldap-suffix'" #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPGLUE2.cpp:219 msgid "" "The \"FreeSlotsWithDuration\" attribute is wrongly formatted. Ignoring it." msgstr "Ðтрибут \"FreeSlotsWithDuration\" неверно Ñформатирован; игнорируетÑÑ." #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPGLUE2.cpp:220 #: src/hed/libs/compute/GLUE2.cpp:248 #, c-format msgid "Wrong format of the \"FreeSlotsWithDuration\" = \"%s\" (\"%s\")" msgstr "Ðеверный формат \"FreeSlotsWithDuration\" = \"%s\" (\"%s\")" #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPNG.cpp:389 #, c-format msgid "Unable to parse the %s.%s value from execution service (%s)." msgstr "" "Ðевозможно разобрать %s.Получено значение %s от Ñлужбы иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ (%s)." #: src/hed/acc/LDAP/TargetInformationRetrieverPluginLDAPNG.cpp:390 #, c-format msgid "Value of %s.%s is \"%s\"" msgstr "Значение %s.%s: \"%s\"" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:65 #: src/services/wrappers/python/pythonwrapper.cpp:92 msgid "Failed to initialize main Python thread" msgstr "Сбой запуÑка головного потока Python" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:71 #: src/services/wrappers/python/pythonwrapper.cpp:97 msgid "Main Python thread was not initialized" msgstr "Головной поток Python не был запущен" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:81 #, c-format msgid "Loading Python broker (%i)" msgstr "Подгрузка Python broker (%i)" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:104 #: src/services/wrappers/python/pythonwrapper.cpp:134 msgid "Main Python thread is not initialized" msgstr "Головной процеÑÑ Python не был запущен" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:108 msgid "PythonBroker init" msgstr "Ð˜Ð½Ð¸Ñ†Ð¸Ð°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ PythonBroker" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:116 msgid "" "Invalid class name. The broker argument for the PythonBroker should be\n" " Filename.Class.args (args is optional), for example SampleBroker." "MyBroker" msgstr "" "ÐедопуÑтимое Ð¸Ð¼Ñ ÐºÐ»Ð°ÑÑа. Ðргумент брокера Ð´Ð»Ñ PythonBroker должен быть\n" " Filename.Class.args (args не обÑзательно), например: SampleBroker." "MyBroker" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:122 #, c-format msgid "Class name: %s" msgstr "Ðазвание клаÑÑа: %s" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:123 #, c-format msgid "Module name: %s" msgstr "Ðазвание модулÑ: %s" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:132 #: src/services/wrappers/python/pythonwrapper.cpp:178 msgid "Cannot convert ARC module name to Python string" msgstr "Ðевозможно перевеÑти название Ð¼Ð¾Ð´ÑƒÐ»Ñ ARC в Ñтроку Python" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:140 #: src/services/wrappers/python/pythonwrapper.cpp:186 msgid "Cannot import ARC module" msgstr "Ðе удалоÑÑŒ импортировать модуль ARC" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:149 #: src/services/wrappers/python/pythonwrapper.cpp:196 #: src/services/wrappers/python/pythonwrapper.cpp:429 msgid "Cannot get dictionary of ARC module" msgstr "Ошибка доÑтупа к Ñловарю Ð¼Ð¾Ð´ÑƒÐ»Ñ ARC" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:158 msgid "Cannot find ARC UserConfig class" msgstr "Ðе удалоÑÑŒ найти клаÑÑ ARC UserConfig" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:166 msgid "UserConfig class is not an object" msgstr "КлаÑÑ UserConfig не ÑвлÑетÑÑ Ð¾Ð±ÑŠÐµÐºÑ‚Ð¾Ð¼" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:173 msgid "Cannot find ARC JobDescription class" msgstr "Ðе удалоÑÑŒ найти клаÑÑ ARC JobDescription" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:181 msgid "JobDescription class is not an object" msgstr "КлаÑÑ JobDescription не ÑвлÑетÑÑ Ð¾Ð±ÑŠÐµÐºÑ‚Ð¾Ð¼" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:188 msgid "Cannot find ARC ExecutionTarget class" msgstr "Ðе удалоÑÑŒ найти клаÑÑ ARC ExecutionTarget" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:196 msgid "ExecutionTarget class is not an object" msgstr "КлаÑÑ ExecutionTarget не ÑвлÑетÑÑ Ð¾Ð±ÑŠÐµÐºÑ‚Ð¾Ð¼" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:207 #: src/services/wrappers/python/pythonwrapper.cpp:157 msgid "Cannot convert module name to Python string" msgstr "Ðевозможно перевеÑти название Ð¼Ð¾Ð´ÑƒÐ»Ñ Ð² Ñтроку Python" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:215 #: src/services/wrappers/python/pythonwrapper.cpp:164 msgid "Cannot import module" msgstr "Ðе удалоÑÑŒ импортировать модуль" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:224 msgid "Cannot get dictionary of custom broker module" msgstr "Ðевозможно обнаружить Ñловарь пользовательÑкого Ð¼Ð¾Ð´ÑƒÐ»Ñ Ð¿Ð»Ð°Ð½Ð¸Ñ€Ð¾Ð²Ñ‰Ð¸ÐºÐ°" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:233 msgid "Cannot find custom broker class" msgstr "Ðе обнаружен клаÑÑ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»ÑŒÑкого планировщика" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:241 #, c-format msgid "%s class is not an object" msgstr "КлаÑÑ %s не ÑвлÑетÑÑ Ð¾Ð±ÑŠÐµÐºÑ‚Ð¾Ð¼" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:247 msgid "Cannot create UserConfig argument" msgstr "Ðе удалоÑÑŒ Ñоздать аргумент UserConfig" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:255 msgid "Cannot convert UserConfig to Python object" msgstr "Ðе удалоÑÑŒ преобразовать UserConfig в объект Python" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:263 #: src/services/wrappers/python/pythonwrapper.cpp:253 msgid "Cannot create argument of the constructor" msgstr "Ðе удалоÑÑŒ Ñоздать аргумент конÑтруктора" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:272 #: src/services/wrappers/python/pythonwrapper.cpp:261 msgid "Cannot create instance of Python class" msgstr "Ðе удалоÑÑŒ реализовать клаÑÑ Python" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:278 #, c-format msgid "Python broker constructor called (%d)" msgstr "Вызван Python-конÑтруктор планировщика (%d)" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:302 #, c-format msgid "Python broker destructor called (%d)" msgstr "Вызван Python-деÑтруктор планировщика (%d)" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:311 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:328 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:361 msgid "Cannot create ExecutionTarget argument" msgstr "Ðевозможно Ñоздать аргумент ExecutionTarget" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:319 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:336 #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:369 #, c-format msgid "Cannot convert ExecutionTarget (%s) to python object" msgstr "Ðевозможно преобразовать ExecutionTarget (%s) в объект Python" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:393 msgid "Cannot create JobDescription argument" msgstr "Ðевозможно Ñоздать аргумент JobDescription" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:401 msgid "Cannot convert JobDescription to python object" msgstr "Ðевозможно преобразовать JobDescription в объект Python" #: src/hed/acc/PythonBroker/PythonBrokerPlugin.cpp:422 msgid "Do sorting using user created python broker" msgstr "" "Сортировка Ñ Ð¸Ñпользованием пользовательÑкого python-Ñкрипта планировщика" #: src/hed/daemon/unix/daemon.cpp:84 #, c-format msgid "Daemonization fork failed: %s" msgstr "Ðе удалоÑÑŒ Ñоздать дочерний демон: %s" #: src/hed/daemon/unix/daemon.cpp:95 msgid "Watchdog (re)starting application" msgstr "Самоконтроль (пере)запуÑкает приложение" #: src/hed/daemon/unix/daemon.cpp:100 #, c-format msgid "Watchdog fork failed: %s" msgstr "Ðе удалоÑÑŒ Ñоздать дочерний Ñторожевой процеÑÑ: %s" #: src/hed/daemon/unix/daemon.cpp:110 msgid "Watchdog starting monitoring" msgstr "Самоконтроль запуÑкает мониторинг" #: src/hed/daemon/unix/daemon.cpp:136 #, c-format msgid "Watchdog detected application exit due to signal %u" msgstr "Самоконтроль обнаружил завершение Ð¿Ñ€Ð¸Ð»Ð¾Ð¶ÐµÐ½Ð¸Ñ Ð¿Ð¾ Ñигналу %u" #: src/hed/daemon/unix/daemon.cpp:138 #, c-format msgid "Watchdog detected application exited with code %u" msgstr "Самоконтроль обнаружил приложение, завершившееÑÑ Ñ ÐºÐ¾Ð´Ð¾Ð¼ %u" #: src/hed/daemon/unix/daemon.cpp:140 msgid "Watchdog detected application exit" msgstr "Самоконтроль обнаружил завершение приложениÑ" #: src/hed/daemon/unix/daemon.cpp:149 msgid "" "Watchdog exiting because application was purposely killed or exited itself" msgstr "" "Самоконтроль оÑтанавливаетÑÑ, потому что приложение было прервано намеренно, " "или завершилоÑÑŒ" #: src/hed/daemon/unix/daemon.cpp:156 msgid "Watchdog detected application timeout or error - killing process" msgstr "" "Самоконтроль обнаружил превышение времени Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¿Ñ€Ð¸Ð»Ð¾Ð¶ÐµÐ½Ð¸Ñ Ð¸Ð»Ð¸ Ñбой - " "процеÑÑ Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°ÐµÑ‚ÑÑ" #: src/hed/daemon/unix/daemon.cpp:167 msgid "Watchdog failed to wait till application exited - sending KILL" msgstr "" "Самоконтроль не дождалÑÑ Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ð¿Ñ€Ð¸Ð»Ð¾Ð¶ÐµÐ½Ð¸Ñ - поÑылаетÑÑ Ñигнал KILL" #: src/hed/daemon/unix/daemon.cpp:179 msgid "Watchdog failed to kill application - giving up and exiting" msgstr "Самоконтроль не Ñмог оборвать приложение - отказ и завершение" #: src/hed/daemon/unix/daemon.cpp:200 msgid "Shutdown daemon" msgstr "ОÑтанов демона" #: src/hed/daemon/unix/main_unix.cpp:43 msgid "shutdown" msgstr "Выключение" #: src/hed/daemon/unix/main_unix.cpp:46 msgid "exit" msgstr "выход" #: src/hed/daemon/unix/main_unix.cpp:84 msgid "No server config part of config file" msgstr "Ð’ файле наÑтроек отÑутÑтвуют наÑтройки Ñервера" #: src/hed/daemon/unix/main_unix.cpp:159 #, c-format msgid "Unknown log level %s" msgstr "ÐеизвеÑтный уровень Ð¶ÑƒÑ€Ð½Ð°Ð»Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ %s" #: src/hed/daemon/unix/main_unix.cpp:169 #, c-format msgid "Failed to open log file: %s" msgstr "Ðе удалоÑÑŒ открыть журнальный файл: %s" #: src/hed/daemon/unix/main_unix.cpp:201 msgid "Start foreground" msgstr "ЗапуÑк Ñ Ð²Ñ‹Ñоким приоритетом" #: src/hed/daemon/unix/main_unix.cpp:250 #, c-format msgid "XML config file %s does not exist" msgstr "Файл наÑтроек XML %s не ÑущеÑтвует" #: src/hed/daemon/unix/main_unix.cpp:254 src/hed/daemon/unix/main_unix.cpp:269 #, c-format msgid "Failed to load service configuration from file %s" msgstr "Ðе удалоÑÑŒ загрузить наÑтройки ÑервиÑа из файла %s" #: src/hed/daemon/unix/main_unix.cpp:260 #, c-format msgid "INI config file %s does not exist" msgstr "Файл наÑтроек INI %s не ÑущеÑтвует" #: src/hed/daemon/unix/main_unix.cpp:265 src/hed/daemon/unix/main_unix.cpp:287 msgid "Error evaluating profile" msgstr "Ошибка проверки профилÑ" #: src/hed/daemon/unix/main_unix.cpp:281 msgid "Error loading generated configuration" msgstr "Ошибка загрузки Ñгенерированных наÑтроек" #: src/hed/daemon/unix/main_unix.cpp:292 msgid "Failed to load service configuration from any default config file" msgstr "Ðе удалоÑÑŒ загрузить наÑтройки ÑервиÑа ни из какого файла наÑтроек" #: src/hed/daemon/unix/main_unix.cpp:353 msgid "Schema validation error" msgstr "Ошибка проверки Ñхемы" #: src/hed/daemon/unix/main_unix.cpp:368 msgid "Configuration root element is not " msgstr "Корневой Ñлемент наÑтроек не ÑвлÑетÑÑ " #: src/hed/daemon/unix/main_unix.cpp:384 #, c-format msgid "Cannot switch to group (%s)" msgstr "Ðевозможно перейти к группе (%s)" #: src/hed/daemon/unix/main_unix.cpp:394 #, c-format msgid "Cannot switch to primary group for user (%s)" msgstr "Ðевозможно переключить на оÑновную группу Ð´Ð»Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ (%s)" #: src/hed/daemon/unix/main_unix.cpp:399 #, c-format msgid "Cannot switch to user (%s)" msgstr "Ðевозможно перейти к пользователю (%s)" #: src/hed/daemon/unix/main_unix.cpp:417 msgid "Failed to load service side MCCs" msgstr "Ðе удалоÑÑŒ загрузить компоненты MCC Ñервера" #: src/hed/daemon/unix/main_unix.cpp:419 src/tests/count/test_service.cpp:29 #: src/tests/echo/test.cpp:30 src/tests/echo/test_service.cpp:29 msgid "Service side MCCs are loaded" msgstr "Подгружены ÑервиÑные компоненты цепи Ñообщений" #: src/hed/daemon/unix/main_unix.cpp:426 msgid "Unexpected arguments supplied" msgstr "Заданы непредуÑмотренные аргументы" #: src/hed/dmc/acix/DataPointACIX.cpp:93 src/hed/dmc/acix/DataPointACIX.cpp:342 #: src/hed/dmc/rucio/DataPointRucio.cpp:220 #: src/hed/dmc/rucio/DataPointRucio.cpp:462 #, c-format msgid "No locations found for %s" msgstr "Ðе найдено раÑположений Ð´Ð»Ñ %s" #: src/hed/dmc/acix/DataPointACIX.cpp:121 #, c-format msgid "Found none or multiple URLs (%s) in ACIX URL: %s" msgstr "Ð’ ACIX URL обнаружено ни одного или неÑколько URL (%s): %s" #: src/hed/dmc/acix/DataPointACIX.cpp:131 #, c-format msgid "Cannot handle URL %s" msgstr "Ðевозможно обработать URL %s" #: src/hed/dmc/acix/DataPointACIX.cpp:138 #, c-format msgid "Could not resolve original source of %s: out of time" msgstr "Ðе удалоÑÑŒ определить иÑходный иÑточник %s: Ð²Ñ€ÐµÐ¼Ñ Ð¸Ñтекло" #: src/hed/dmc/acix/DataPointACIX.cpp:144 #, c-format msgid "Could not resolve original source of %s: %s" msgstr "Ðе удалоÑÑŒ определить иÑходный иÑточник %s: %s" #: src/hed/dmc/acix/DataPointACIX.cpp:160 #, c-format msgid "Querying ACIX server at %s" msgstr "ОпрашиваетÑÑ Ñервер ACIX на %s" #: src/hed/dmc/acix/DataPointACIX.cpp:161 #, c-format msgid "Calling acix with query %s" msgstr "Вызов ACIX Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñом %s" #: src/hed/dmc/acix/DataPointACIX.cpp:167 #, c-format msgid "Failed to query ACIX: %s" msgstr "Сбой запроÑа к ACIX: %s" #: src/hed/dmc/acix/DataPointACIX.cpp:171 #: src/hed/dmc/acix/DataPointACIX.cpp:308 #, c-format msgid "Failed to parse ACIX response: %s" msgstr "Сбой разборки отзыва ACIX: %s" #: src/hed/dmc/acix/DataPointACIX.cpp:298 #, c-format msgid "ACIX returned %s" msgstr "ACIX ответил %s" #: src/hed/dmc/acix/DataPointACIX.cpp:319 #, c-format msgid "No locations for %s" msgstr "Ðе найдено ни одного меÑÑ‚Ð¾Ð½Ð°Ñ…Ð¾Ð¶Ð´ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° %s" #: src/hed/dmc/acix/DataPointACIX.cpp:325 #, c-format msgid "%s: ACIX Location: %s" msgstr "%s: МеÑтонахождение в ACIX: %s" #: src/hed/dmc/acix/DataPointACIX.cpp:327 #, c-format msgid "%s: Location %s not accessible remotely, skipping" msgstr "%s: К раÑположению %s нет удалённого доÑтупа, пропуÑкаетÑÑ" #: src/hed/dmc/file/DataPointFile.cpp:86 #, c-format msgid "Unknown channel %s for stdio protocol" msgstr "ÐеизвеÑтный канал %s Ð´Ð»Ñ Ð¿Ñ€Ð¾Ñ‚Ð¾ÐºÐ¾Ð»Ð° stdio" #: src/hed/dmc/file/DataPointFile.cpp:93 #, c-format msgid "Failed to open stdio channel %s" msgstr "Ðе удалоÑÑŒ открыть канал stdio %s" #: src/hed/dmc/file/DataPointFile.cpp:94 #, c-format msgid "Failed to open stdio channel %d" msgstr "Ðе удалоÑÑŒ открыть канал stdio %d" #: src/hed/dmc/file/DataPointFile.cpp:334 #, c-format msgid "fsync of file %s failed: %s" msgstr "Сбой операции fsync на файле %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:338 #: src/hed/dmc/file/DataPointFile.cpp:345 #, c-format msgid "closing file %s failed: %s" msgstr "Ñбой при закрытии файла %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:364 #, c-format msgid "File is not accessible: %s" msgstr "Файл недоÑтупен: %s" #: src/hed/dmc/file/DataPointFile.cpp:370 #: src/hed/dmc/file/DataPointFile.cpp:455 #, c-format msgid "Can't stat file: %s: %s" msgstr "Ðевозможно получить ÑÑ‚Ð°Ñ‚ÑƒÑ Ñ„Ð°Ð¹Ð»Ð°: %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:416 #: src/hed/dmc/file/DataPointFile.cpp:422 #, c-format msgid "Can't stat stdio channel %s" msgstr "Ðевозможно выполнить операцию stat Ð´Ð»Ñ ÐºÐ°Ð½Ð°Ð»Ð° stdio %s" #: src/hed/dmc/file/DataPointFile.cpp:470 #, c-format msgid "%s is not a directory" msgstr "%s не ÑвлÑетÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð¾Ð¼" #: src/hed/dmc/file/DataPointFile.cpp:485 src/hed/dmc/s3/DataPointS3.cpp:440 #: src/hed/dmc/s3/DataPointS3.cpp:550 #, c-format msgid "Failed to read object %s: %s" msgstr "Сбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¾Ð±ÑŠÐµÐºÑ‚Ð° %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:498 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:517 #, c-format msgid "File is not accessible %s: %s" msgstr "Файл недоÑтупен %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:504 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:523 #, c-format msgid "Can't delete directory %s: %s" msgstr "Ðевозможно удалить каталог %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:511 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:530 #, c-format msgid "Can't delete file %s: %s" msgstr "Ðевозможно удалить файл %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:521 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:335 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:313 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1466 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:545 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:565 #, c-format msgid "Creating directory %s" msgstr "СоздаетÑÑ Ð´Ð¸Ñ€ÐµÐºÑ‚Ð¾Ñ€Ð¸Ñ %s" #: src/hed/dmc/file/DataPointFile.cpp:529 src/hed/dmc/srm/DataPointSRM.cpp:171 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:579 #, c-format msgid "Renaming %s to %s" msgstr "%s переименовываетÑÑ Ð² %s" #: src/hed/dmc/file/DataPointFile.cpp:531 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:588 #, c-format msgid "Can't rename file %s: %s" msgstr "Ðе удалоÑÑŒ переименовать файл %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:559 #, c-format msgid "Failed to open %s for reading: %s" msgstr "Ðевозможно открыть %s Ð´Ð»Ñ Ñ‡Ñ‚ÐµÐ½Ð¸Ñ: %s" #: src/hed/dmc/file/DataPointFile.cpp:574 #: src/hed/dmc/file/DataPointFile.cpp:709 #, c-format msgid "Failed to switch user id to %d/%d" msgstr "Ðе удалоÑÑŒ изменить идентификатор Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð½Ð° %d/%d" #: src/hed/dmc/file/DataPointFile.cpp:580 #, c-format msgid "Failed to create/open file %s: %s" msgstr "Сбой при Ñоздании/открытии файла %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:596 msgid "Failed to create thread" msgstr "Ðе удалоÑÑŒ Ñоздать поток" #: src/hed/dmc/file/DataPointFile.cpp:676 #, c-format msgid "Invalid url: %s" msgstr "Ðеверный URL: %s" #: src/hed/dmc/file/DataPointFile.cpp:685 src/hed/libs/data/FileCache.cpp:482 #, c-format msgid "Failed to create directory %s: %s" msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð° %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:698 #: src/hed/dmc/file/DataPointFile.cpp:717 #, c-format msgid "Failed to create file %s: %s" msgstr "Сбой при Ñоздании файла %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:729 #, c-format msgid "setting file %s to size %llu" msgstr "файлу %s приÑваиваетÑÑ Ñ€Ð°Ð·Ð¼ÐµÑ€ %llu" #: src/hed/dmc/file/DataPointFile.cpp:749 #, c-format msgid "Failed to preallocate space for %s" msgstr "Сбой предварительного Ñ€ÐµÐ·ÐµÑ€Ð²Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¼ÐµÑта Ð´Ð»Ñ %s" #: src/hed/dmc/file/DataPointFile.cpp:790 src/hed/libs/data/FileCache.cpp:856 #, c-format msgid "Failed to clean up file %s: %s" msgstr "Сбой при очиÑтке файла %s: %s" #: src/hed/dmc/file/DataPointFile.cpp:799 #, c-format msgid "Error during file validation. Can't stat file %s: %s" msgstr "" "Ошибка при проверке файла. Ðевозможно выполнить операцию stat Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° %s: " "%s" #: src/hed/dmc/file/DataPointFile.cpp:803 #, c-format msgid "" "Error during file validation: Local file size %llu does not match source " "file size %llu for file %s" msgstr "" "Ошибка при Ñверке: размер локального файла %llu не ÑоответÑтвует размеру " "файла-иÑточника %llu Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:53 #, c-format msgid "Using proxy %s" msgstr "ИÑпользуетÑÑ Ð¿Ñ€Ð¾ÐºÑи %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:54 #, c-format msgid "Using key %s" msgstr "ИÑпользуетÑÑ ÐºÐ»ÑŽÑ‡ %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:55 #, c-format msgid "Using cert %s" msgstr "ИÑпользуетÑÑ Ñертификат %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:113 msgid "Locations are missing in destination LFC URL" msgstr "Ð’ URL Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ LFC отÑутÑтвуют меÑтоположениÑ" #: src/hed/dmc/gfal/DataPointGFAL.cpp:119 #, c-format msgid "Duplicate replica found in LFC: %s" msgstr "Ð’ LFC обнаружена Ð¸Ð´ÐµÐ½Ñ‚Ð¸Ñ‡Ð½Ð°Ñ ÐºÐ¾Ð¿Ð¸Ñ: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:121 #, c-format msgid "Adding location: %s - %s" msgstr "ДобавлÑетÑÑ Ð°Ð´Ñ€ÐµÑ: %s - %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:129 #: src/hed/libs/data/DataPointIndex.cpp:161 #, c-format msgid "Add location: url: %s" msgstr "Добавление раÑположениÑ: url: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:130 #: src/hed/libs/data/DataPointIndex.cpp:162 #, c-format msgid "Add location: metadata: %s" msgstr "Добавление раÑположениÑ: metadata: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:150 #: src/hed/dmc/gfal/DataPointGFAL.cpp:310 #, c-format msgid "gfal_open failed: %s" msgstr "Сбой gfal_open: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:163 #: src/hed/dmc/gfal/DataPointGFAL.cpp:223 #: src/hed/dmc/gfal/DataPointGFAL.cpp:249 #: src/hed/dmc/gfal/DataPointGFAL.cpp:324 #: src/hed/dmc/gfal/DataPointGFAL.cpp:403 #: src/hed/dmc/gfal/DataPointGFAL.cpp:430 #, c-format msgid "gfal_close failed: %s" msgstr "Сбой gfal_close: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:195 #, c-format msgid "gfal_read failed: %s" msgstr "Сбой gfal_read: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:237 msgid "StopReading starts waiting for transfer_condition." msgstr "StopReading начинает ожидание transfer_condition." #: src/hed/dmc/gfal/DataPointGFAL.cpp:239 msgid "StopReading finished waiting for transfer_condition." msgstr "StopReading закончил ожидание transfer_condition." #: src/hed/dmc/gfal/DataPointGFAL.cpp:271 #: src/libs/data-staging/DataDeliveryLocalComm.cpp:68 #: src/libs/data-staging/DataDeliveryLocalComm.cpp:73 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:42 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:47 #, c-format msgid "No locations defined for %s" msgstr "Ðе найдено ни одного меÑÑ‚Ð¾Ð½Ð°Ñ…Ð¾Ð¶Ð´ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:278 #, c-format msgid "Failed to set LFC replicas: %s" msgstr "Сбой Ð·Ð°Ð´Ð°Ð½Ð¸Ñ ÐºÐ¾Ð¿Ð¸Ð¹ в LFC: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:304 #, c-format msgid "gfal_mkdir failed (%s), trying to write anyway" msgstr "Сбой в gfal_mkdir (%s), вÑÑ‘ же попытаемÑÑ Ð·Ð°Ð¿Ð¸Ñать" #: src/hed/dmc/gfal/DataPointGFAL.cpp:359 #, c-format msgid "DataPointGFAL::write_file got position %d and offset %d, has to seek" msgstr "" "DataPointGFAL::write_file получил на входе Ð°Ð´Ñ€ÐµÑ %d и Ñдвиг %d, проводитÑÑ " "поиÑк" #: src/hed/dmc/gfal/DataPointGFAL.cpp:388 #, c-format msgid "gfal_write failed: %s" msgstr "Сбой gfal_write: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:418 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:388 msgid "StopWriting starts waiting for transfer_condition." msgstr "StopWriting начинает ожидание transfer_condition." #: src/hed/dmc/gfal/DataPointGFAL.cpp:420 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:390 msgid "StopWriting finished waiting for transfer_condition." msgstr "StopWriting закончил ожидание transfer_condition." #: src/hed/dmc/gfal/DataPointGFAL.cpp:451 #, c-format msgid "gfal_stat failed: %s" msgstr "Сбой gfal_stat: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:496 #, c-format msgid "gfal_listxattr failed, no replica information can be obtained: %s" msgstr "Сбой в gfal_listxattr, невозможно получить информацию о копиÑÑ…: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:537 #, c-format msgid "gfal_opendir failed: %s" msgstr "Сбой gfal_opendir: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:549 #, c-format msgid "List will stat the URL %s" msgstr "ПеречиÑление запроÑит информацию stat об URL %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:556 #, c-format msgid "gfal_closedir failed: %s" msgstr "Сбой gfal_closedir: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:584 #, c-format msgid "gfal_rmdir failed: %s" msgstr "Сбой gfal_rmdir: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:587 #, c-format msgid "gfal_unlink failed: %s" msgstr "Сбой gfal_unlink: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:604 #, c-format msgid "gfal_mkdir failed: %s" msgstr "Сбой gfal_mkdir: %s" #: src/hed/dmc/gfal/DataPointGFAL.cpp:619 #, c-format msgid "gfal_rename failed: %s" msgstr "Сбой gfal_rename: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:19 #, c-format msgid "Failed to obtain bytes transferred: %s" msgstr "Сбой Ð¾Ð¿Ñ€ÐµÐ´ÐµÐ»ÐµÐ½Ð¸Ñ ÐºÐ¾Ð»Ð¸Ñ‡ÐµÑтва переданных байтов: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:42 #, c-format msgid "Failed to get initiate GFAL2 parameter handle: %s" msgstr "Ðе удалоÑÑŒ получить ÑÑылку параметра GFAL2: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:49 #, c-format msgid "Failed to get initiate new GFAL2 context: %s" msgstr "Ðе удалоÑÑŒ получить новый контекÑÑ‚ GFAL2: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:56 #, c-format msgid "Failed to set GFAL2 monitor callback: %s" msgstr "Сбой уÑтановки обратного вызова монитора GFAL2: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:64 #, c-format msgid "Failed to set overwrite option in GFAL2: %s" msgstr "Сбой уÑтановки опции перезапиÑи в GFAL2: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:72 #, c-format msgid "Failed to set GFAL2 transfer timeout, will use default: %s" msgstr "" "Сбой уÑтановки времени Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð¸ GFAL2, будет иÑпользоватьÑÑ " "значение по умолчанию: %s" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:84 msgid "Transfer failed" msgstr "Передача не удалаÑÑŒ" #: src/hed/dmc/gfal/GFALTransfer3rdParty.cpp:92 msgid "Transfer succeeded" msgstr "Передача удалаÑÑŒ" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:38 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:54 msgid "ftp_complete_callback: success" msgstr "ftp_complete_callback: уÑпех" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:44 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:60 #, c-format msgid "ftp_complete_callback: error: %s" msgstr "ftp_complete_callback: ошибка: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:60 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:76 msgid "ftp_check_callback" msgstr "ftp_check_callback" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:62 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:90 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:116 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:135 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:305 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:340 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:678 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:847 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:879 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:913 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1052 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1104 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1114 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1122 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1130 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1138 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1144 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:78 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:106 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:283 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:319 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:729 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:762 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:799 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:930 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:994 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1004 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1012 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1020 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1028 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1034 #: src/services/gridftpd/commands.cpp:1226 #: src/services/gridftpd/dataread.cpp:76 src/services/gridftpd/dataread.cpp:173 #: src/services/gridftpd/datawrite.cpp:59 #: src/services/gridftpd/datawrite.cpp:146 #, c-format msgid "Globus error: %s" msgstr "Ошибка Globus: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:73 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:89 msgid "Excessive data received while checking file access" msgstr "При проверке прав доÑтупа к файлу получены избыточные данные" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:89 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:105 msgid "Registration of Globus FTP buffer failed - cancel check" msgstr "Сбой региÑтрации буфера Globus FTP - проверка прерываетÑÑ" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:115 msgid "check_ftp: globus_ftp_client_size failed" msgstr "check_ftp: Ñбой в globus_ftp_client_size" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:119 msgid "check_ftp: timeout waiting for size" msgstr "check_ftp: иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ñ‹ size" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:124 msgid "check_ftp: failed to get file's size" msgstr "check_ftp: не удалоÑÑŒ определить размер файла" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:127 #, c-format msgid "check_ftp: obtained size: %lli" msgstr "check_ftp: получен размер: %lli" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:134 msgid "check_ftp: globus_ftp_client_modification_time failed" msgstr "check_ftp: Ñбой в globus_ftp_client_modification_time" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:138 msgid "check_ftp: timeout waiting for modification_time" msgstr "check_ftp: иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ modification_time" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:143 msgid "check_ftp: failed to get file's modification time" msgstr "check_ftp: Ñбой при определении времени Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð°" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:148 #, c-format msgid "check_ftp: obtained modification date: %s" msgstr "check_ftp: получена дата изменениÑ: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:167 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:145 msgid "check_ftp: globus_ftp_client_get failed" msgstr "check_ftp: Ñбой в globus_ftp_client_get" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:174 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:152 msgid "check_ftp: globus_ftp_client_register_read" msgstr "check_ftp: globus_ftp_client_register_read" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:185 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:164 msgid "check_ftp: timeout waiting for partial get" msgstr "check_ftp: иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ñ‡Ð°Ñтичной загрузки" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:215 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:191 #, c-format msgid "File delete failed, attempting directory delete for %s" msgstr "Сбой при удалении файла, попытка ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð° Ð´Ð»Ñ %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:225 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:202 msgid "delete_ftp: globus_ftp_client_delete failed" msgstr "delete_ftp: Ñбой в globus_ftp_client_delete" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:231 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:252 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:208 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:230 msgid "delete_ftp: timeout waiting for delete" msgstr "delete_ftp: иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ñ‹ delete" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:246 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:224 msgid "delete_ftp: globus_ftp_client_rmdir failed" msgstr "delete_ftp: Ñбой в globus_ftp_client_rmdir" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:301 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:278 #, c-format msgid "mkdir_ftp: making %s" msgstr "mkdir_ftp: ÑоздаётÑÑ %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:309 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:287 msgid "mkdir_ftp: timeout waiting for mkdir" msgstr "mkdir_ftp: иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ñ‹ mkdir" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:344 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:323 msgid "Timeout waiting for mkdir" msgstr "ИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ñ‹ mkdir" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:370 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:346 msgid "start_reading_ftp" msgstr "start_reading_ftp" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:374 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:350 msgid "start_reading_ftp: globus_ftp_client_get" msgstr "start_reading_ftp: globus_ftp_client_get" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:388 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:364 msgid "start_reading_ftp: globus_ftp_client_get failed" msgstr "start_reading_ftp: Ñбой в globus_ftp_client_get" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:399 msgid "start_reading_ftp: globus_thread_create failed" msgstr "start_reading_ftp: Ñбой в globus_thread_create" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:418 msgid "stop_reading_ftp: aborting connection" msgstr "stop_reading_ftp: отменÑетÑÑ Ñоединение" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:425 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:647 #, c-format msgid "Failed to abort transfer of ftp file: %s" msgstr "Сбой Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð¸ файла по ftp: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:426 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:648 msgid "Assuming transfer is already aborted or failed." msgstr "Предполагаем, что переÑылка уже отменена, либо оборвалаÑÑŒ." #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:433 msgid "stop_reading_ftp: waiting for transfer to finish" msgstr "stop_reading_ftp: ожидание Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ð¿ÐµÑ€ÐµÑылки" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:435 #, c-format msgid "stop_reading_ftp: exiting: %s" msgstr "stop_reading_ftp: выход: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:449 msgid "ftp_read_thread: get and register buffers" msgstr "ftp_read_thread: получение и региÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ Ð±ÑƒÑ„ÐµÑ€Ð¾Ð²" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:455 #, c-format msgid "ftp_read_thread: for_read failed - aborting: %s" msgstr "ftp_read_thread: Ñбой for_read - прерывание: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:465 #, c-format msgid "ftp_read_thread: data callback failed - aborting: %s" msgstr "ftp_read_thread: Ñбой обратного вызова данных - прерывание: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:477 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:380 #, c-format msgid "ftp_read_thread: Globus error: %s" msgstr "ftp_read_thread: ошибка Globus: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:490 #, c-format msgid "ftp_read_thread: too many registration failures - abort: %s" msgstr "ftp_read_thread: Ñлишком много Ñбоев региÑтрации - отмена: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:495 #, c-format msgid "ftp_read_thread: failed to register Globus buffer - will try later: %s" msgstr "" "ftp_read_thread: Ñбой при региÑтрации буфера Globus - попробуем попозже: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:508 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:396 msgid "ftp_read_thread: waiting for eof" msgstr "ftp_read_thread: ожидание конца файла" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:512 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:400 msgid "ftp_read_thread: waiting for buffers released" msgstr "ftp_read_thread: ожидание разблокировки буферов" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:516 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:408 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:662 msgid "ftp_read_thread: failed to release buffers - leaking" msgstr "ftp_read_thread: Ñбой ÑброÑа буферов - утечка" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:521 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:415 msgid "ftp_read_thread: exiting" msgstr "ftp_read_thread: выход" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:539 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:436 #, c-format msgid "ftp_read_callback: failure: %s" msgstr "ftp_read_callback: Ñбой: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:542 msgid "ftp_read_callback: success" msgstr "ftp_read_callback: уÑпех" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:558 msgid "Failed to get ftp file" msgstr "Ðе удалоÑÑŒ получить файл ftp" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:594 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:543 msgid "start_writing_ftp: mkdir" msgstr "start_writing_ftp: mkdir" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:597 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:545 msgid "start_writing_ftp: mkdir failed - still trying to write" msgstr "start_writing_ftp: Ñбой mkdir - вÑÑ‘ же пытаемÑÑ Ð·Ð°Ð¿Ð¸Ñать" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:599 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:547 msgid "start_writing_ftp: put" msgstr "start_writing_ftp: put" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:613 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:561 msgid "start_writing_ftp: put failed" msgstr "start_writing_ftp: Ñбой в put" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:623 msgid "start_writing_ftp: globus_thread_create failed" msgstr "start_writing_ftp: Ñбой в globus_thread_create" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:640 #: src/hed/libs/data/DataPointDelegate.cpp:307 msgid "StopWriting: aborting connection" msgstr "StopWriting: прерывание ÑвÑзи" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:664 #: src/hed/libs/data/DataPointDelegate.cpp:321 #, c-format msgid "StopWriting: Calculated checksum %s" msgstr "StopWriting: ВычиÑлена ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:668 #: src/hed/libs/data/DataPointDelegate.cpp:325 #, c-format msgid "StopWriting: looking for checksum of %s" msgstr "StopWriting: поиÑк контрольной Ñуммы %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:677 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:912 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:798 msgid "list_files_ftp: globus_ftp_client_cksm failed" msgstr "list_files_ftp: Ñбой globus_ftp_client_cksm" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:681 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:916 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:802 msgid "list_files_ftp: timeout waiting for cksum" msgstr "list_files_ftp: иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€Ð¾Ñ‡Ð½Ð¾Ð¹ Ñуммы" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:688 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:923 msgid "list_files_ftp: no checksum information possible" msgstr "list_files_ftp: Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ контрольных Ñуммах недоÑтупна" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:691 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:926 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:815 #, c-format msgid "list_files_ftp: checksum %s" msgstr "list_files_ftp: Ð¿Ñ€Ð¾Ð²ÐµÑ€Ð¾Ñ‡Ð½Ð°Ñ Ñумма %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:694 #: src/hed/libs/data/DataPointDelegate.cpp:332 msgid "" "Checksum type returned by server is different to requested type, cannot " "compare" msgstr "" "Тип контрольной Ñуммы на Ñервере отличаетÑÑ Ð¾Ñ‚ запрошенного, Ñравнение " "невозможно" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:696 #: src/hed/libs/data/DataPointDelegate.cpp:334 #, c-format msgid "Calculated checksum %s matches checksum reported by server" msgstr "" "ВычиÑÐ»ÐµÐ½Ð½Ð°Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€Ð¾Ñ‡Ð½Ð°Ñ Ñумма %s Ñовпадает Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€Ð¾Ñ‡Ð½Ð¾Ð¹ Ñуммой Ñервера" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:699 #: src/hed/libs/data/DataPointDelegate.cpp:337 #, c-format msgid "" "Checksum mismatch between calculated checksum %s and checksum reported by " "server %s" msgstr "" "ÐеÑовпадение между вычиÑленной контрольной Ñуммой %s и контрольной Ñуммой, " "выданной Ñервером %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:721 msgid "ftp_write_thread: get and register buffers" msgstr "ftp_write_thread: получение и региÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ Ð±ÑƒÑ„ÐµÑ€Ð¾Ð²" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:725 msgid "ftp_write_thread: for_write failed - aborting" msgstr "ftp_write_thread: Ñбой for_write - прерывание" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:743 msgid "ftp_write_thread: data callback failed - aborting" msgstr "ftp_write_thread: Ñбой обратного вызова данных - прерывание" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:759 msgid "ftp_write_thread: waiting for eof" msgstr "ftp_read_thread: ожидание конца файла" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:763 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:660 msgid "ftp_write_thread: waiting for buffers released" msgstr "ftp_write_thread: ожидание разблокировки буферов" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:771 msgid "ftp_write_thread: failed to release buffers - leaking" msgstr "ftp_write_thread: Ñбой ÑброÑа буферов - утечка" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:776 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:667 msgid "ftp_write_thread: exiting" msgstr "ftp_write_thread: выход" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:799 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:686 #, c-format msgid "ftp_write_callback: failure: %s" msgstr "ftp_write_callback: Ñбой: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:802 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:688 #, c-format msgid "ftp_write_callback: success %s" msgstr "ftp_write_callback: уÑпех %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:817 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:704 msgid "Failed to store ftp file" msgstr "Ðе удалоÑÑŒ Ñохранить файл ftp" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:825 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:709 msgid "ftp_put_complete_callback: success" msgstr "ftp_put_complete_callback: уÑпех" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:841 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:723 #, c-format msgid "list_files_ftp: looking for size of %s" msgstr "list_files_ftp: поиÑк размера %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:845 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:727 msgid "list_files_ftp: globus_ftp_client_size failed" msgstr "list_files_ftp: Ñбой в globus_ftp_client_size" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:851 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:852 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:733 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:734 msgid "list_files_ftp: timeout waiting for size" msgstr "list_files_ftp: иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ñ€Ð°Ð·Ð¼ÐµÑ€Ð°" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:858 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:740 msgid "list_files_ftp: failed to get file's size" msgstr "list_files_ftp: не удалоÑÑŒ определить размер файла" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:870 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:753 #, c-format msgid "list_files_ftp: looking for modification time of %s" msgstr "list_files_ftp: определение времени Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:876 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:759 msgid "list_files_ftp: globus_ftp_client_modification_time failed" msgstr "list_files_ftp: Ñбой globus_ftp_client_modification_time" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:883 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:766 msgid "list_files_ftp: timeout waiting for modification_time" msgstr "list_files_ftp: иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ modification_time" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:891 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:774 msgid "list_files_ftp: failed to get file's modification time" msgstr "list_files_ftp: не удалоÑÑŒ определить Ð²Ñ€ÐµÐ¼Ñ Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð°" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:903 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:788 #, c-format msgid "list_files_ftp: looking for checksum of %s" msgstr "list_files_ftp: поиÑк проверочной Ñуммы %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:942 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:828 #, c-format msgid "Failed to obtain stat from FTP: %s" msgstr "Ðе удалоÑÑŒ получить ÑпиÑок ÑÑ‚Ð°Ñ‚ÑƒÑ Ñ‡ÐµÑ€ÐµÐ· FTP: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:948 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:833 msgid "No results returned from stat" msgstr "Вызов stat не возвратил никаких результатов" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:954 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:839 #, c-format msgid "Wrong number of objects (%i) for stat from ftp: %s" msgstr "Ðеверное количеÑтво объектов (%i) Ð´Ð»Ñ Ð¾Ð¿ÐµÑ€Ð°Ñ†Ð¸Ð¸ stat от ftp: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:968 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:852 #, c-format msgid "Unexpected path %s returned from server" msgstr "Сервер возвратил неожиданный путь %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1007 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:885 #, c-format msgid "Failed to obtain listing from FTP: %s" msgstr "Ðе удалоÑÑŒ получить ÑпиÑок файлов через FTP: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1050 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:928 msgid "Rename: globus_ftp_client_move failed" msgstr "Переименование: Ñбой в globus_ftp_client_move" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1056 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:934 msgid "Rename: timeout waiting for operation to complete" msgstr "Переименование: иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ð¾Ð¿ÐµÑ€Ð°Ñ†Ð¸Ð¸" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1103 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:993 msgid "init_handle: globus_ftp_client_handleattr_init failed" msgstr "init_handle: Ñбой в globus_ftp_client_handleattr_init" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1112 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1002 msgid "init_handle: globus_ftp_client_handleattr_set_gridftp2 failed" msgstr "init_handle: Ñбой в globus_ftp_client_handleattr_set_gridftp2" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1121 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1011 msgid "init_handle: globus_ftp_client_handle_init failed" msgstr "init_handle: Ñбой в globus_ftp_client_handlea_init" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1128 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1018 msgid "init_handle: globus_ftp_client_operationattr_init failed" msgstr "init_handle: Ñбой в globus_ftp_client_operationattr_init" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1136 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1026 msgid "init_handle: globus_ftp_client_operationattr_set_allow_ipv6 failed" msgstr "init_handle: Ñбой globus_ftp_client_operationattr_set_allow_ipv6" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1142 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1032 msgid "init_handle: globus_ftp_client_operationattr_set_delayed_pasv failed" msgstr "init_handle: Ñбой globus_ftp_client_operationattr_set_delayed_pasv" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1190 #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1218 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1084 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1113 #, c-format msgid "globus_ftp_client_operationattr_set_authorization: error: %s" msgstr "globus_ftp_client_operationattr_set_authorization: ошибка: %s" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1217 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1112 msgid "Failed to set credentials for GridFTP transfer" msgstr "Ðе удалоÑÑŒ уÑтановить параметры доÑтупа Ð´Ð»Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð¸ данных по GridFTP" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1223 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1118 msgid "Using secure data transfer" msgstr "ИÑпользуетÑÑ Ð·Ð°Ñ‰Ð¸Ñ‰Ñ‘Ð½Ð½Ð°Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð° данных" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1228 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1123 msgid "Using insecure data transfer" msgstr "ИÑпользуетÑÑ Ð½ÐµÐ·Ð°Ñ‰Ð¸Ñ‰Ñ‘Ð½Ð½Ð°Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð° данных" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1255 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1150 msgid "~DataPoint: destroy ftp_handle" msgstr "~DataPoint: уничтожение ftp_handle" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1258 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1153 msgid "~DataPoint: destroy ftp_handle failed - retrying" msgstr "~DataPoint: уничтожение ftp_handle не удалоÑÑŒ - Ð½Ð¾Ð²Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ°" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1276 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1171 msgid "~DataPoint: failed to destroy ftp_handle - leaking" msgstr "~DataPoint: уничтожение ftp_handle не удалоÑÑŒ - утечка" #: src/hed/dmc/gridftp/DataPointGridFTP.cpp:1296 msgid "" "Missing reference to factory and/or module. It is unsafe to use Globus in " "non-persistent mode - (Grid)FTP code is disabled. Report to developers." msgstr "" "ОтÑутÑтвует указание на фабрику и/или модуль. ИÑпользование Globus в " "неопределённом режиме небезопаÑно - вызов (Grid)FTP заблокирован. СвÑжитеÑÑŒ " "Ñ Ñ€Ð°Ð·Ñ€Ð°Ð±Ð¾Ñ‚Ñ‡Ð¸ÐºÐ°Ð¼Ð¸." #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:388 msgid "ftp_read_thread: failed to register buffers" msgstr "ftp_read_thread: Ñбой региÑтрации буферов" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:403 msgid "ftp_read_thread: failed to release buffers" msgstr "ftp_read_thread: Ñбой ÑброÑа буферов" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:441 #, c-format msgid "ftp_read_callback: success - offset=%u, length=%u, eof=%u, allow oof=%u" msgstr "ftp_read_callback: уÑпех - offset=%u, length=%u, eof=%u, allow oof=%u" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:455 #, c-format msgid "ftp_read_callback: delayed data chunk: %llu %llu" msgstr "ftp_read_callback: задержанный блок данных: %llu %llu" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:462 #, c-format msgid "ftp_read_callback: unexpected data out of order: %llu != %llu" msgstr "ftp_read_callback: неверные неупорÑдоченные данные: %llu != %llu" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:469 msgid "ftp_read_callback: too many unexpected out of order chunks" msgstr "ftp_read_callback: избыток неверных неупорÑдоченных блоков" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:490 #, c-format msgid "ftp_read_callback: Globus error: %s" msgstr "ftp_read_callback: ошибка Globus: %s" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:515 msgid "ftp_get_complete_callback: Failed to get ftp file" msgstr "ftp_get_complete_callback: Сбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° ftp" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:520 msgid "ftp_get_complete_callback: success" msgstr "ftp_get_complete_callback: уÑпех" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:575 msgid "start_writing_ftp: waiting for data tag" msgstr "start_writing_ftp: ожидание метки данных" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:578 msgid "start_writing_ftp: failed to read data tag" msgstr "start_writing_ftp: Ñбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¼ÐµÑ‚ÐºÐ¸ данных" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:583 msgid "start_writing_ftp: waiting for data chunk" msgstr "start_writing_ftp: ожидание куÑка данных" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:585 msgid "start_writing_ftp: failed to read data chunk" msgstr "start_writing_ftp: Ñбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ ÐºÑƒÑка данных" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:596 #, c-format msgid "ftp_write_thread: data out of order in stream mode: %llu != %llu" msgstr "" "ftp_write_thread: неупорÑдоченные данные в поточном режиме: %llu != %llu" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:603 msgid "ftp_write_thread: too many out of order chunks in stream mode" msgstr "ftp_write_thread: избыток неупорÑдоченных блоков в поточном режиме" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:608 #, c-format msgid "start_writing_ftp: data chunk: %llu %llu" msgstr "start_writing_ftp: куÑок данных: %llu %llu" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:614 #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:640 #, c-format msgid "ftp_write_thread: Globus error: %s" msgstr "ftp_write_thread: ошибка Globus: %s" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:633 #, c-format msgid "start_writing_ftp: delayed data chunk: %llu %llu" msgstr "start_writing_ftp: задержанный блок данных: %llu %llu" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:652 msgid "start_writing_ftp: waiting for some buffers sent" msgstr "start_writing_ftp: ожидание отправки буферов" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:658 msgid "ftp_write_thread: waiting for transfer complete" msgstr "ftp_write_thread: ожидание Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð¸" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:809 msgid "list_files_ftp: no checksum information supported" msgstr "list_files_ftp: Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ контрольных Ñуммах не поддерживаетÑÑ" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:811 msgid "list_files_ftp: no checksum information returned" msgstr "list_files_ftp: не получена Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ контрольных Ñуммах" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:906 msgid "Too many failures to obtain checksum - giving up" msgstr "Слишком много Ñбоев попытки Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð¾Ð¹ Ñуммы - прерывание" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1266 msgid "Expecting Command and URL provided" msgstr "Задайте команду и URL" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1273 #: src/hed/libs/data/DataExternalHelper.cpp:376 msgid "Expecting Command among arguments" msgstr "Одним из аргументов должна быть команда" #: src/hed/dmc/gridftp/DataPointGridFTPHelper.cpp:1277 #: src/hed/libs/data/DataExternalHelper.cpp:380 msgid "Expecting URL among arguments" msgstr "Одним из аргументов должен быть URL" #: src/hed/dmc/gridftp/Lister.cpp:221 src/hed/dmc/gridftp/Lister.cpp:289 #: src/hed/dmc/gridftp/Lister.cpp:384 src/hed/dmc/gridftp/Lister.cpp:767 #: src/hed/dmc/gridftp/Lister.cpp:812 #, c-format msgid "Failure: %s" msgstr "Ошибка: %s" #: src/hed/dmc/gridftp/Lister.cpp:288 msgid "Error getting list of files (in list)" msgstr "Ðе удалоÑÑŒ получить ÑпиÑок файлов (в list)" #: src/hed/dmc/gridftp/Lister.cpp:290 msgid "Assuming - file not found" msgstr "ПредполагаетÑÑ, что файл не найден" #: src/hed/dmc/gridftp/Lister.cpp:307 #, c-format msgid "list record: %s" msgstr "перечиÑление запиÑи: %s" #: src/hed/dmc/gridftp/Lister.cpp:362 msgid "Failed reading list of files" msgstr "Ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ ÑпиÑка файлов" #: src/hed/dmc/gridftp/Lister.cpp:398 msgid "Failed reading data" msgstr "Ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ…" #: src/hed/dmc/gridftp/Lister.cpp:426 #, c-format msgid "Command: %s" msgstr "Команда: %s" #: src/hed/dmc/gridftp/Lister.cpp:430 src/hed/dmc/gridftp/Lister.cpp:471 #: src/hed/mcc/http/PayloadHTTP.cpp:990 msgid "Memory allocation error" msgstr "Ошибка Ð²Ñ‹Ð´ÐµÐ»ÐµÐ½Ð¸Ñ Ð¿Ð°Ð¼Ñти" #: src/hed/dmc/gridftp/Lister.cpp:438 #, c-format msgid "%s failed" msgstr "%s не удалоÑÑŒ" #: src/hed/dmc/gridftp/Lister.cpp:442 msgid "Command is being sent" msgstr "ПоÑылаетÑÑ Ð¸Ð½ÑтрукциÑ" #: src/hed/dmc/gridftp/Lister.cpp:447 msgid "Waiting for response" msgstr "Ожидание отклика" #: src/hed/dmc/gridftp/Lister.cpp:452 msgid "Callback got failure" msgstr "Сбой обратного вызова" #: src/hed/dmc/gridftp/Lister.cpp:538 msgid "Failed in globus_cond_init" msgstr "Сбой в globus_cond_init" #: src/hed/dmc/gridftp/Lister.cpp:542 msgid "Failed in globus_mutex_init" msgstr "Сбой в globus_mutex_init" #: src/hed/dmc/gridftp/Lister.cpp:549 msgid "Failed allocating memory for handle" msgstr "Ðе удалоÑÑŒ зарезервировать памÑть под ÑÑылку" #: src/hed/dmc/gridftp/Lister.cpp:554 msgid "Failed in globus_ftp_control_handle_init" msgstr "Сбой в globus_ftp_control_handle_init" #: src/hed/dmc/gridftp/Lister.cpp:562 msgid "Failed to enable IPv6" msgstr "Ðе удалоÑÑŒ включить IPv6" #: src/hed/dmc/gridftp/Lister.cpp:573 src/services/gridftpd/commands.cpp:984 msgid "Closing connection" msgstr "Прекращение ÑвÑзи" #: src/hed/dmc/gridftp/Lister.cpp:580 src/hed/dmc/gridftp/Lister.cpp:595 msgid "Timeout waiting for Globus callback - leaking connection" msgstr "ИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¾Ñ‚Ð²ÐµÑ‚Ð½Ð¾Ð³Ð¾ ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ Globus - утечка ÑоединениÑ" #: src/hed/dmc/gridftp/Lister.cpp:605 msgid "Closed successfully" msgstr "УÑпешное прекращение" #: src/hed/dmc/gridftp/Lister.cpp:607 msgid "Closing may have failed" msgstr "Возможно, был Ñбой про закрытии" #: src/hed/dmc/gridftp/Lister.cpp:634 msgid "Waiting for globus handle to settle" msgstr "Ждём пока ÑÑылка globus уÑтаканитÑÑ" #: src/hed/dmc/gridftp/Lister.cpp:639 #, c-format msgid "Handle is not in proper state %u/%u" msgstr "СÑылка в недопуÑтимом ÑоÑтоÑии %u/%u" #: src/hed/dmc/gridftp/Lister.cpp:645 msgid "Globus handle is stuck" msgstr "СÑылка globus заÑтрÑла" #: src/hed/dmc/gridftp/Lister.cpp:661 #, c-format msgid "Failed destroying handle: %s. Can't handle such situation." msgstr "" "Ðе удалоÑÑŒ уничтожить ÑÑылку: %s. Ðевозможно ÑправитьÑÑ Ñ Ñ‚Ð°ÐºÐ¸Ð¼ положением." #: src/hed/dmc/gridftp/Lister.cpp:684 #, c-format msgid "EPSV failed: %s" msgstr "Сбой EPSV: %s" #: src/hed/dmc/gridftp/Lister.cpp:688 msgid "EPSV failed" msgstr "Сбой EPSV" #: src/hed/dmc/gridftp/Lister.cpp:695 #, c-format msgid "PASV failed: %s" msgstr "ИнÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ PASV не прошла: %s" #: src/hed/dmc/gridftp/Lister.cpp:699 msgid "PASV failed" msgstr "ИнÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ PASV не прошла" #: src/hed/dmc/gridftp/Lister.cpp:765 msgid "Failed to apply local address to data connection" msgstr "Ðе удалоÑÑŒ применить локальный Ð°Ð´Ñ€ÐµÑ Ðº Ñоединению передачи данных" #: src/hed/dmc/gridftp/Lister.cpp:783 msgid "Can't parse host and/or port in response to EPSV/PASV" msgstr "" "Ðе удалоÑÑŒ извлечь Ð°Ð´Ñ€ÐµÑ ÑƒÐ·Ð»Ð° и/или номер порта из ответа на Ð·Ð°Ð¿Ñ€Ð¾Ñ EPSV/PASV" #: src/hed/dmc/gridftp/Lister.cpp:788 #, c-format msgid "Data channel: %d.%d.%d.%d:%d" msgstr "Канал передачи данных: %d.%d.%d.%d:%d" #: src/hed/dmc/gridftp/Lister.cpp:806 #, c-format msgid "Data channel: [%s]:%d" msgstr "Канал передачи данных: [%s]:%d" #: src/hed/dmc/gridftp/Lister.cpp:810 msgid "Obtained host and address are not acceptable" msgstr "Полученные Ð°Ð´Ñ€ÐµÑ Ð¸ номер порта неприемлемы" #: src/hed/dmc/gridftp/Lister.cpp:820 msgid "Failed to open data channel" msgstr "Ðе удалоÑÑŒ открыть канал передачи данных" #: src/hed/dmc/gridftp/Lister.cpp:838 #, c-format msgid "Unsupported protocol in url %s" msgstr "Ðеподдерживаемый протокол в URL %s" #: src/hed/dmc/gridftp/Lister.cpp:850 msgid "Reusing connection" msgstr "Повторное иÑпользование ÑоединениÑ" #: src/hed/dmc/gridftp/Lister.cpp:874 #, c-format msgid "Failed connecting to server %s:%d" msgstr "Сбой уÑÑ‚Ð°Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ ÑвÑзи Ñ Ñервером %s:%d" #: src/hed/dmc/gridftp/Lister.cpp:880 #, c-format msgid "Failed to connect to server %s:%d" msgstr "Ðе удалоÑÑŒ уÑтановить ÑвÑзь Ñ Ñервером %s:%d" #: src/hed/dmc/gridftp/Lister.cpp:896 msgid "Missing authentication information" msgstr "ОтÑутÑтвует Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð´Ð»Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ¸ подлинноÑти" #: src/hed/dmc/gridftp/Lister.cpp:905 src/hed/dmc/gridftp/Lister.cpp:919 #, c-format msgid "Bad authentication information: %s" msgstr "ÐÐµÐ¿Ñ€Ð¸ÐµÐ¼Ð»ÐµÐ¼Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð´Ð»Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ¸ подлинноÑти: %s" #: src/hed/dmc/gridftp/Lister.cpp:928 src/hed/dmc/gridftp/Lister.cpp:943 #, c-format msgid "Failed authenticating: %s" msgstr "Ошибка проверки подлинноÑти: %s" #: src/hed/dmc/gridftp/Lister.cpp:935 msgid "Failed authenticating" msgstr "Ошибка проверки подлинноÑти" #: src/hed/dmc/gridftp/Lister.cpp:970 src/hed/dmc/gridftp/Lister.cpp:1126 #, c-format msgid "DCAU failed: %s" msgstr "ИнÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ DCAU не прошла: %s" #: src/hed/dmc/gridftp/Lister.cpp:974 src/hed/dmc/gridftp/Lister.cpp:1131 msgid "DCAU failed" msgstr "ИнÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ DCAU не прошла" #: src/hed/dmc/gridftp/Lister.cpp:994 msgid "MLST is not supported - trying LIST" msgstr "MLST не поддерживаетÑÑ - пробуем LIST" #: src/hed/dmc/gridftp/Lister.cpp:1010 #, c-format msgid "Immediate completion expected: %s" msgstr "ОжидаетÑÑ Ð½ÐµÐ¼ÐµÐ´Ð»ÐµÐ½Ð½Ð¾Ðµ Ñоединение: %s" #: src/hed/dmc/gridftp/Lister.cpp:1014 msgid "Immediate completion expected" msgstr "ОжидаетÑÑ Ð½ÐµÐ¼ÐµÐ´Ð»ÐµÐ½Ð½Ð¾Ðµ Ñоединение" #: src/hed/dmc/gridftp/Lister.cpp:1027 #, c-format msgid "Missing information in reply: %s" msgstr "ÐÐµÐ¿Ð¾Ð»Ð½Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð² отклике: %s" #: src/hed/dmc/gridftp/Lister.cpp:1061 #, c-format msgid "Missing final reply: %s" msgstr "ОтÑутÑтвует заключительный отклик: %s" #: src/hed/dmc/gridftp/Lister.cpp:1085 #, c-format msgid "Unexpected immediate completion: %s" msgstr "Ðеожиданное немедленное завершение: %s" #: src/hed/dmc/gridftp/Lister.cpp:1097 #, c-format msgid "LIST/MLST failed: %s" msgstr "Сбой LIST/MLST: %s" #: src/hed/dmc/gridftp/Lister.cpp:1102 msgid "LIST/MLST failed" msgstr "Сбой LIST/MLST" #: src/hed/dmc/gridftp/Lister.cpp:1152 msgid "MLSD is not supported - trying NLST" msgstr "MLSD не поддерживаетÑÑ - пробуем NLST" #: src/hed/dmc/gridftp/Lister.cpp:1166 #, c-format msgid "Immediate completion: %s" msgstr "Ðемедленное завершение: %s" #: src/hed/dmc/gridftp/Lister.cpp:1174 #, c-format msgid "NLST/MLSD failed: %s" msgstr "ИнÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ NLST/MLSD не прошла: %s" #: src/hed/dmc/gridftp/Lister.cpp:1180 msgid "NLST/MLSD failed" msgstr "ИнÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ NLST/MLSD не прошла" #: src/hed/dmc/gridftp/Lister.cpp:1201 #, c-format msgid "Data transfer aborted: %s" msgstr "Передача данных прервана: %s" #: src/hed/dmc/gridftp/Lister.cpp:1206 msgid "Data transfer aborted" msgstr "Передача данных прервана" #: src/hed/dmc/gridftp/Lister.cpp:1218 msgid "Failed to transfer data" msgstr "Ðе удалоÑÑŒ передать данные" #: src/hed/dmc/http/DataPointHTTP.cpp:391 #: src/hed/dmc/http/DataPointHTTP.cpp:520 #: src/hed/dmc/http/DataPointHTTP.cpp:601 #: src/hed/dmc/http/DataPointHTTP.cpp:1003 #: src/hed/dmc/http/DataPointHTTP.cpp:1147 #: src/hed/dmc/http/DataPointHTTP.cpp:1292 #, c-format msgid "Redirecting to %s" msgstr "Перенаправление к %s" #: src/hed/dmc/http/DataPointHTTP.cpp:673 #, c-format msgid "Stat: obtained size %llu" msgstr "Проверка: получен размер %llu" #: src/hed/dmc/http/DataPointHTTP.cpp:677 #, c-format msgid "Stat: obtained modification time %s" msgstr "Stat: получено Ð²Ñ€ÐµÐ¼Ñ Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ %s" #: src/hed/dmc/http/DataPointHTTP.cpp:906 #, c-format msgid "Check: obtained size %llu" msgstr "Проверка: получен размер %llu" #: src/hed/dmc/http/DataPointHTTP.cpp:908 #, c-format msgid "Check: obtained modification time %s" msgstr "Check: получено Ð²Ñ€ÐµÐ¼Ñ Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ %s" #: src/hed/dmc/http/DataPointHTTP.cpp:1020 #: src/hed/dmc/http/DataPointHTTP.cpp:1167 #, c-format msgid "HTTP failure %u - %s" msgstr "Ошибка HTTP %u - %s" #: src/hed/dmc/ldap/DataPointLDAP.cpp:36 msgid "" "Missing reference to factory and/or module. Currently safe unloading of LDAP " "DMC is not supported. Report to developers." msgstr "" "ОтÑутÑтвует ÑÑылка на фабрику и/или модуль. Ð’ наÑтоÑщее Ð²Ñ€ÐµÐ¼Ñ Ð±ÐµÐ·Ð¾Ð¿Ð°ÑÐ½Ð°Ñ " "выгрузка LDAP DMC не поддерживаетÑÑ. ПожалуйтеÑÑŒ разработчикам." #: src/hed/dmc/ldap/LDAPQuery.cpp:151 msgid "SASL Interaction" msgstr "Обмен данными SASL" #: src/hed/dmc/ldap/LDAPQuery.cpp:199 #, c-format msgid "Challenge: %s" msgstr "ЗапроÑ: %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:203 #, c-format msgid "Default: %s" msgstr "По умолчанию: %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:279 #, c-format msgid "LDAPQuery: Initializing connection to %s:%d" msgstr "LDAPQuery: уÑтанавливаетÑÑ Ñоединение Ñ %s:%d" #: src/hed/dmc/ldap/LDAPQuery.cpp:283 #, c-format msgid "LDAP connection already open to %s" msgstr "Соединение LDAP Ñ %s уже уÑтановлено" #: src/hed/dmc/ldap/LDAPQuery.cpp:297 #, c-format msgid "Could not open LDAP connection to %s" msgstr "Ðевозможно уÑтановить Ñоединие LDAP Ñ %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:318 #, c-format msgid "Failed to create ldap bind thread (%s)" msgstr "Ðе удалоÑÑŒ Ñоздать поток Ð´Ð»Ñ Ð¿Ñ€Ð¸Ð²Ñзки к LDAP (%s)" #: src/hed/dmc/ldap/LDAPQuery.cpp:325 #, c-format msgid "Ldap bind timeout (%s)" msgstr "ИÑтекло Ð²Ñ€ÐµÐ¼Ñ ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ LDAP (%s)" #: src/hed/dmc/ldap/LDAPQuery.cpp:332 #, c-format msgid "Failed to bind to ldap server (%s)" msgstr "Сбой привÑзки к Ñерверу LDAP: %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:353 #, c-format msgid "Could not set LDAP network timeout (%s)" msgstr "Ðе удалоÑÑŒ задать Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ LDAP (%s)" #: src/hed/dmc/ldap/LDAPQuery.cpp:361 #, c-format msgid "Could not set LDAP timelimit (%s)" msgstr "Ðе удалоÑÑŒ задать Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¾Ñ‚Ð²ÐµÑ‚Ð° Ñервера LDAP (%s)" #: src/hed/dmc/ldap/LDAPQuery.cpp:368 #, c-format msgid "Could not set LDAP protocol version (%s)" msgstr "Ðе удалоÑÑŒ задать верÑию протокола LDAP (%s)" #: src/hed/dmc/ldap/LDAPQuery.cpp:436 #, c-format msgid "LDAPQuery: Querying %s" msgstr "LdapQuery: ЗапрашиваетÑÑ %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:438 #, c-format msgid " base dn: %s" msgstr " базовое ОИ (DN): %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:440 #, c-format msgid " filter: %s" msgstr " фильтр: %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:442 msgid " attributes:" msgstr " атрибуты:" #: src/hed/dmc/ldap/LDAPQuery.cpp:445 #: src/services/gridftpd/misc/ldapquery.cpp:399 #, c-format msgid " %s" msgstr " %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:482 src/hed/dmc/ldap/LDAPQuery.cpp:548 #, c-format msgid "%s (%s)" msgstr "%s (%s)" #: src/hed/dmc/ldap/LDAPQuery.cpp:506 #, c-format msgid "LDAPQuery: Getting results from %s" msgstr "LDAPQuery: Получение результатов Ñ %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:509 #, c-format msgid "Error: no LDAP query started to %s" msgstr "Ошибка: не поÑлан Ð·Ð°Ð¿Ñ€Ð¾Ñ LDAP к %s" #: src/hed/dmc/ldap/LDAPQuery.cpp:543 #, c-format msgid "LDAP query timed out: %s" msgstr "ИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¾Ñ‚Ð²ÐµÑ‚Ð° на Ð·Ð°Ð¿Ñ€Ð¾Ñ LDAP: %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:23 #, c-format msgid "Replacing existing token for %s in Rucio token cache" msgstr "ЗаменÑетÑÑ ÑущеÑтвующий маркер Ð´Ð»Ñ %s в кÑше маркеров Rucio" #: src/hed/dmc/rucio/DataPointRucio.cpp:36 #, c-format msgid "Found existing token for %s in Rucio token cache with expiry time %s" msgstr "" "Обнаружен ÑущеÑтвующий маркер Ð´Ð»Ñ %s в кÑше маркеров Rucio, иÑтекающий %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:39 #, c-format msgid "Rucio token for %s has expired or is about to expire" msgstr "Срок дейÑÑ‚Ð²Ð¸Ñ Ð¼Ð°Ñ€ÐºÐµÑ€Ð° Rucio Ð´Ð»Ñ %s иÑтёк, или вÑкоре иÑтечёт" #: src/hed/dmc/rucio/DataPointRucio.cpp:101 #, c-format msgid "Extracted nickname %s from credentials to use for RUCIO_ACCOUNT" msgstr "" "Выделен пÑевдоним %s Ð´Ð»Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупа, иÑпользуемых в RUCIO_ACCOUNT" #: src/hed/dmc/rucio/DataPointRucio.cpp:104 msgid "Failed to extract VOMS nickname from proxy" msgstr "Сбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ð¿Ñевдонима VOMS из Ñертификата доверенноÑти" #: src/hed/dmc/rucio/DataPointRucio.cpp:106 #, c-format msgid "Using Rucio account %s" msgstr "ИÑпользуетÑÑ ÑƒÑ‡Ñ‘Ñ‚Ð½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ Rucio %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:146 #, c-format msgid "" "Bad path for %s: Rucio supports read/write at /objectstores and read-only " "at /replicas" msgstr "" "Ðеверный путь к %s: Rucio поддерживает запиÑÑŒ/чтение в /objectstores и лишь " "чтение в /replicas" #: src/hed/dmc/rucio/DataPointRucio.cpp:162 #: src/services/candypond/CandyPond.cpp:140 #: src/services/candypond/CandyPond.cpp:347 #, c-format msgid "Can't handle URL %s" msgstr "Ðевозможно обработать URL %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:310 #, c-format msgid "Acquired auth token for %s: %s" msgstr "Получен маркер доÑтупа Ð´Ð»Ñ %s: %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:367 #, c-format msgid "Rucio returned %s" msgstr "Rucio возвратил %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:393 #, c-format msgid "Failed to parse Rucio response: %s" msgstr "Ðе удалоÑÑŒ разобрать отзыв Rucio: %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:399 #, c-format msgid "Filename not returned in Rucio response: %s" msgstr "Ð’ отзыве Rucio отÑутÑтвует Ð¸Ð¼Ñ Ñ„Ð°Ð¹Ð»Ð°: %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:405 #, c-format msgid "Unexpected name returned in Rucio response: %s" msgstr "Отзыв Rucio Ñодержит недопуÑтимое имÑ: %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:411 #, c-format msgid "No pfns returned in Rucio response: %s" msgstr "Ð’ отзыве Rucio отÑутÑтвуeÑ‚ pnfs: %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:422 #, c-format msgid "Cannot determine replica type for %s" msgstr "Ðе удаётÑÑ Ð¾Ð¿Ñ€ÐµÐ´ÐµÐ»Ð¸Ñ‚ÑŒ тип копии Ð´Ð»Ñ %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:424 #, c-format msgid "%s: replica type %s" msgstr "%s: тип копии %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:427 #, c-format msgid "Skipping %s replica %s" msgstr "ПропуÑкаетÑÑ %s ÐºÐ¾Ð¿Ð¸Ñ %s" #: src/hed/dmc/rucio/DataPointRucio.cpp:447 #, c-format msgid "No filesize information returned in Rucio response for %s" msgstr "Ð’ отзыве Rucio Ð´Ð»Ñ %s отÑутÑтвует Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ размере файла" #: src/hed/dmc/rucio/DataPointRucio.cpp:450 #, c-format msgid "%s: size %llu" msgstr "%s: размер %llu" #: src/hed/dmc/rucio/DataPointRucio.cpp:454 #, c-format msgid "No checksum information returned in Rucio response for %s" msgstr "Ð’ отзыве Rucio Ð´Ð»Ñ %s отÑутÑтвует Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ контрольной Ñумме" #: src/hed/dmc/rucio/DataPointRucio.cpp:457 #, c-format msgid "%s: checksum %s" msgstr "%s: ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма %s" #: src/hed/dmc/s3/DataPointS3.cpp:621 #, c-format msgid "Failed to write object %s: %s" msgstr "Сбой запиÑи объекта %s: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:59 #, c-format msgid "TURL %s cannot be handled" msgstr "TURL %s не может быть обработан" #: src/hed/dmc/srm/DataPointSRM.cpp:86 #, c-format msgid "Check: looking for metadata: %s" msgstr "Проверка: поиÑк метаданных: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:97 #, c-format msgid "Check: obtained size: %lli" msgstr "Проверка: получен размер: %lli" #: src/hed/dmc/srm/DataPointSRM.cpp:103 #, c-format msgid "Check: obtained checksum: %s" msgstr "Проверка: получена ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:107 #, c-format msgid "Check: obtained modification date: %s" msgstr "Проверка: получено Ð²Ñ€ÐµÐ¼Ñ Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:111 msgid "Check: obtained access latency: low (ONLINE)" msgstr "Проверка: получена задержка доÑтупа: ÐºÐ¾Ñ€Ð¾Ñ‚ÐºÐ°Ñ (ONLINE)" #: src/hed/dmc/srm/DataPointSRM.cpp:115 msgid "Check: obtained access latency: high (NEARLINE)" msgstr "Проверка: получена задержка доÑтупа: Ð´Ð»Ð¸Ð½Ð½Ð°Ñ (NEARLINE)" #: src/hed/dmc/srm/DataPointSRM.cpp:134 #, c-format msgid "Remove: deleting: %s" msgstr "Remove: удалÑетÑÑ: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:152 #, c-format msgid "Creating directory: %s" msgstr "СоздаетÑÑ Ð´Ð¸Ñ€ÐµÐºÑ‚Ð¾Ñ€Ð¸Ñ %s" #: src/hed/dmc/srm/DataPointSRM.cpp:200 src/hed/dmc/srm/DataPointSRM.cpp:249 msgid "Calling PrepareReading when request was already prepared!" msgstr "Вызов PrepareReading когда Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð±Ñ‹Ð» уже подготовлен!" #: src/hed/dmc/srm/DataPointSRM.cpp:220 #, c-format msgid "File %s is NEARLINE, will make request to bring online" msgstr "" "Файл %s в ÑоÑтоÑнии NEARLINE, будет Ñделан Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ размещении на диÑке" #: src/hed/dmc/srm/DataPointSRM.cpp:229 #, c-format msgid "Bring online request %s is still in queue, should wait" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ %s на размещение на диÑке вÑÑ‘ ещё в очереди, Ñледует подождать" #: src/hed/dmc/srm/DataPointSRM.cpp:234 #, c-format msgid "Bring online request %s finished successfully, file is now ONLINE" msgstr "" "Ð—Ð°Ð¿Ñ€Ð¾Ñ %s на размещение на диÑке уÑпешно выполнен, файл теперь в ÑоÑтоÑнии " "ONLINE" #: src/hed/dmc/srm/DataPointSRM.cpp:240 #, c-format msgid "" "Bad logic for %s - bringOnline returned ok but SRM request is not finished " "successfully or on going" msgstr "" "ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð»Ð¾Ð³Ð¸ÐºÐ° в %s - bringOnline завершилÑÑ ÑƒÑпешно, но Ð·Ð°Ð¿Ñ€Ð¾Ñ SRM не " "завершилÑÑ ÑƒÑпехом, либо ещё в процеÑÑе" #: src/hed/dmc/srm/DataPointSRM.cpp:268 src/hed/dmc/srm/DataPointSRM.cpp:411 msgid "None of the requested transfer protocols are supported" msgstr "" "Ðе поддерживаетÑÑ Ð½Ð¸ один из запрошенных протоколов транÑпортного уровнÑ" #: src/hed/dmc/srm/DataPointSRM.cpp:281 #, c-format msgid "Get request %s is still in queue, should wait %i seconds" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° получение %s вÑÑ‘ ещё в очереди, Ñледует подождать %i Ñекунд" #: src/hed/dmc/srm/DataPointSRM.cpp:289 src/hed/dmc/srm/DataPointSRM.cpp:468 #, c-format msgid "Checking URL returned by SRM: %s" msgstr "ПроверÑетÑÑ URL выданный SRM: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:304 src/hed/dmc/srm/DataPointSRM.cpp:483 #, c-format msgid "SRM returned no useful Transfer URLs: %s" msgstr "SRM не выдал пригодных Ð´Ð»Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð¸ URL: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:311 #, c-format msgid "" "Bad logic for %s - getTURLs returned ok but SRM request is not finished " "successfully or on going" msgstr "" "ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð»Ð¾Ð³Ð¸ÐºÐ° в %s - getTURLs завершилÑÑ ÑƒÑпешно, но Ð·Ð°Ð¿Ñ€Ð¾Ñ SRM не " "завершилÑÑ ÑƒÑпехом, либо ещё в процеÑÑе" #: src/hed/dmc/srm/DataPointSRM.cpp:319 msgid "StartReading" msgstr "Ðачало чтениÑ" #: src/hed/dmc/srm/DataPointSRM.cpp:321 msgid "StartReading: File was not prepared properly" msgstr "StartReading: Файл не был подготовлен должным образом" #: src/hed/dmc/srm/DataPointSRM.cpp:331 src/hed/dmc/srm/DataPointSRM.cpp:510 #, c-format msgid "Redirecting to new URL: %s" msgstr "Перенаправление к новому URL: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:392 msgid "Calling PrepareWriting when request was already prepared!" msgstr "Вызов PrepareWriting когда Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð±Ñ‹Ð» уже подготовлен!" #: src/hed/dmc/srm/DataPointSRM.cpp:421 msgid "No space token specified" msgstr "Ðе указан маркёр проÑтранÑтва памÑти" #: src/hed/dmc/srm/DataPointSRM.cpp:427 msgid "Warning: Using SRM protocol v1 which does not support space tokens" msgstr "" "Warning: ИÑпользуетÑÑ Ð²ÐµÑ€ÑÐ¸Ñ v1 протокола SRM, ÐºÐ¾Ñ‚Ð¾Ñ€Ð°Ñ Ð½Ðµ поддерживает " "токены меÑта" #: src/hed/dmc/srm/DataPointSRM.cpp:430 #, c-format msgid "Using space token description %s" msgstr "ИÑпользуетÑÑ Ð¾Ð¿Ð¸Ñание маркёра проÑтранÑтва памÑти %s" #: src/hed/dmc/srm/DataPointSRM.cpp:436 #, c-format msgid "Error looking up space tokens matching description %s" msgstr "" "Ошибка поиÑка маркёров проÑтранÑтва памÑти, ÑоответÑтвующих опиÑанию %s" #: src/hed/dmc/srm/DataPointSRM.cpp:440 #, c-format msgid "No space tokens found matching description %s" msgstr "Ðе найдены маркёры проÑтранÑтва памÑти, ÑоответÑтвующие опиÑанию %s" #: src/hed/dmc/srm/DataPointSRM.cpp:445 #, c-format msgid "Using space token %s" msgstr "ИÑпользуетÑÑ Ð¼Ð°Ñ€ÐºÑ‘Ñ€ проÑтранÑтва памÑти %s" #: src/hed/dmc/srm/DataPointSRM.cpp:460 #, c-format msgid "Put request %s is still in queue, should wait %i seconds" msgstr "" "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° размещение %s вÑÑ‘ ещё в очереди, Ñледует подождать %i Ñекунд" #: src/hed/dmc/srm/DataPointSRM.cpp:490 #, c-format msgid "" "Bad logic for %s - putTURLs returned ok but SRM request is not finished " "successfully or on going" msgstr "" "ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð»Ð¾Ð³Ð¸ÐºÐ° в %s - putTURLs завершилÑÑ ÑƒÑпешно, но Ð·Ð°Ð¿Ñ€Ð¾Ñ SRM не " "завершилÑÑ ÑƒÑпехом, либо ещё в процеÑÑе" #: src/hed/dmc/srm/DataPointSRM.cpp:498 msgid "StartWriting" msgstr "Ðачало запиÑи" #: src/hed/dmc/srm/DataPointSRM.cpp:500 msgid "StartWriting: File was not prepared properly" msgstr "StartWriting: Файл не был подготовлен должным образом" #: src/hed/dmc/srm/DataPointSRM.cpp:559 #, c-format msgid "FinishWriting: looking for metadata: %s" msgstr "FinishWriting: поиÑк метаданных: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:574 #, c-format msgid "FinishWriting: obtained checksum: %s" msgstr "FinishWriting: получена ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:577 #, c-format msgid "" "Calculated/supplied transfer checksum %s matches checksum reported by SRM " "destination %s" msgstr "" "ВычиÑленнаÑ/ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ð°Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма %s Ñовпадает Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð¾Ð¹ Ñуммой, " "заÑвленной точкой Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ SRM %s" #: src/hed/dmc/srm/DataPointSRM.cpp:580 #, c-format msgid "" "Checksum mismatch between calculated/supplied checksum (%s) and checksum " "reported by SRM destination (%s)" msgstr "" "ÐеÑовпадение между вычиÑленной/указанной контрольной Ñуммой %s и контрольной " "Ñуммой, заÑвленной точкой Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ SRM %s" #: src/hed/dmc/srm/DataPointSRM.cpp:583 #, c-format msgid "" "Checksum type of SRM (%s) and calculated/supplied checksum (%s) differ, " "cannot compare" msgstr "" "Типы контрольной Ñуммы в SRM (%s) и вычиÑленной/указанной контрольной Ñуммы " "(%s) различаютÑÑ, Ñравнение невозможно" #: src/hed/dmc/srm/DataPointSRM.cpp:584 src/hed/dmc/srm/DataPointSRM.cpp:585 msgid "No checksum information from server" msgstr "Сервер не выдал информацию о контрольной Ñумме" #: src/hed/dmc/srm/DataPointSRM.cpp:586 src/hed/dmc/srm/DataPointSRM.cpp:587 msgid "No checksum verification possible" msgstr "Ðевозможно подтвердить контрольную Ñумму" #: src/hed/dmc/srm/DataPointSRM.cpp:593 msgid "Failed to release completed request" msgstr "Сбой ÑброÑа завершившегоÑÑ Ð·Ð°Ð¿Ñ€Ð¾Ñа" #: src/hed/dmc/srm/DataPointSRM.cpp:636 src/hed/dmc/srm/DataPointSRM.cpp:703 #, c-format msgid "ListFiles: looking for metadata: %s" msgstr "ListFiles: поиÑк метаданных: %s" #: src/hed/dmc/srm/DataPointSRM.cpp:821 #, c-format msgid "plugin for transport protocol %s is not installed" msgstr "" "не уÑтановлен подключаемый модуль Ð´Ð»Ñ Ð¿Ñ€Ð¾Ñ‚Ð¾ÐºÐ¾Ð»Ð° транÑпортного ÑƒÑ€Ð¾Ð²Ð½Ñ %s" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:51 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:90 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:142 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:181 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:221 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:259 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:303 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:365 #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:438 msgid "SRM did not return any information" msgstr "SRM не возвратил никакой информации" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:316 #, c-format msgid "File could not be moved to Running state: %s" msgstr "Файл не может быть переведён в ÑоÑтоÑние Running: %s" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:372 msgid "SRM did not return any useful information" msgstr "SRM не возвратил никакой полезной информации" #: src/hed/dmc/srm/srmclient/SRM1Client.cpp:450 msgid "File could not be moved to Done state" msgstr "Файл не может быть переведён в ÑоÑтоÑние Done" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:88 msgid "Could not determine version of server" msgstr "Ðе удалоÑÑŒ определить верÑию Ñервера" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:94 #, c-format msgid "Server SRM version: %s" msgstr "ВерÑÐ¸Ñ Ñервера SRM: %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:99 #, c-format msgid "Server implementation: %s" msgstr "Ð ÐµÐ°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ Ñервера: %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:136 #, c-format msgid "Adding space token %s" msgstr "ДобавлÑетÑÑ Ð¼Ð°Ñ€ÐºÑ‘Ñ€ проÑтранÑтва памÑти %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:163 msgid "No request tokens found" msgstr "Ðе найдены маркёры запроÑа" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:176 #, c-format msgid "Adding request token %s" msgstr "ДобавлÑетÑÑ Ð¼Ð°Ñ€ÐºÑ‘Ñ€ запроÑа %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:237 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:642 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:828 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1385 #, c-format msgid "%s: File request %s in SRM queue. Sleeping for %i seconds" msgstr "%s: Ð—Ð°Ð¿Ñ€Ð¾Ñ Ñ„Ð°Ð¹Ð»Ð° %s в очереди SRM. Ожидание %i Ñекунд" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:275 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:327 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:698 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:764 #, c-format msgid "File is ready! TURL is %s" msgstr "Файл готов! TURL: %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:359 #, c-format msgid "Setting userRequestDescription to %s" msgstr "УÑтановка userRequestDescription в %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:414 #, c-format msgid "%s: Bring online request %s in SRM queue. Sleeping for %i seconds" msgstr "%s: Ð—Ð°Ð¿Ñ€Ð¾Ñ Ñ„Ð°Ð¹Ð»Ð° Ñ Ð»ÐµÐ½Ñ‚Ñ‹ %s в очереди SRM. Ожидание %i Ñекунд" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:457 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1160 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1194 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1228 msgid "No request token specified!" msgstr "Ðе указан маркёр запроÑа!" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:524 msgid "Request is reported as ABORTED, but all files are done" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð¿Ñ€ÐµÑ€Ð²Ð°Ð½ (ABORTED), но вÑе файлы готовы" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:530 msgid "Request is reported as ABORTED, since it was cancelled" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð¿Ñ€ÐµÑ€Ð²Ð°Ð½ (ABORTED), так как он был отменён" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:536 #, c-format msgid "Request is reported as ABORTED. Reason: %s" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð¿Ñ€ÐµÑ€Ð²Ð°Ð½ (ABORTED). Причина: %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:673 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:745 #, c-format msgid "Path %s is invalid, creating required directories" msgstr "Путь %s недейÑтвителен, ÑоздаютÑÑ Ð½ÐµÐ´Ð¾Ñтающие директории" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:678 #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:750 #, c-format msgid "Error creating required directories for %s" msgstr "Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð½ÐµÐ¾Ð±Ñ…Ð¾Ð´Ð¸Ð¼Ñ‹Ñ… каталогов Ð´Ð»Ñ %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:851 msgid "Too many files in one request - please try again with fewer files" msgstr "" "Слишком много файлов на один Ð·Ð°Ð¿Ñ€Ð¾Ñ - пожалуйÑта, попробуйте Ñнова, Ñ " "меньшим количеÑтвом файлов" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:899 msgid "" "Directory size is too large to list in one call, will have to call multiple " "times" msgstr "" "Размер директории Ñлишком велик Ð´Ð»Ñ Ñ€Ð°Ñпечатки в одном запроÑе, придётÑÑ " "делать неÑколько запроÑов" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:936 msgid "" "Failure in parsing response from server - some information may be inaccurate" msgstr "" "Ошибка при разборе отзыва Ñ Ñервера - Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¼Ð¾Ð¶ÐµÑ‚ быть чаÑтично неверной" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:942 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:566 #: src/services/gridftpd/misc/ldapquery.cpp:183 #: src/services/gridftpd/misc/ldapquery.cpp:186 #: src/services/gridftpd/misc/ldapquery.cpp:392 #: src/services/gridftpd/misc/ldapquery.cpp:622 #: src/services/gridftpd/misc/ldapquery.cpp:631 #, c-format msgid "%s: %s" msgstr "%s: %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:975 #, c-format msgid "" "Directory size is larger than %i files, will have to call multiple times" msgstr "" "Размер директории превышает %i файлов, придётÑÑ Ð´ÐµÐ»Ð°Ñ‚ÑŒ неÑколько запроÑов" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1185 #, c-format msgid "Files associated with request token %s released successfully" msgstr "Файлы, аÑÑоциированные Ñ Ð¼Ð°Ñ€ÐºÑ‘Ñ€Ð¾Ð¼ запроÑа %s, уÑпешно разблокированы" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1219 #, c-format msgid "Files associated with request token %s put done successfully" msgstr "Файлы, аÑÑоциированные Ñ Ð¼Ð°Ñ€ÐºÑ‘Ñ€Ð¾Ð¼ запроÑа %s, уÑпешно отгружены" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1254 #, c-format msgid "Files associated with request token %s aborted successfully" msgstr "Файлы, аÑÑоциированные Ñ Ð¼Ð°Ñ€ÐºÑ‘Ñ€Ð¾Ð¼ запроÑа %s, уÑпешно прерваны" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1271 #, c-format msgid "" "Failed to find metadata info on %s for determining file or directory delete" msgstr "" "Ðе удалоÑÑŒ найти информацию о типе %s, чтобы определить, ÑтираетÑÑ Ñ„Ð°Ð¹Ð» или " "каталог" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1277 msgid "Type is file, calling srmRm" msgstr "Тип file, вызываетÑÑ srmRm" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1281 msgid "Type is dir, calling srmRmDir" msgstr "Тип dir, вызываетÑÑ srmRmDir" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1285 msgid "File type is not available, attempting file delete" msgstr "Тип файла недоÑтупен, попытка Ñтереть файл" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1288 msgid "File delete failed, attempting directory delete" msgstr "Сбой при удалении файла, попытка ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð°" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1313 #, c-format msgid "File %s removed successfully" msgstr "УÑпешно удалён файл %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1340 #, c-format msgid "Directory %s removed successfully" msgstr "УÑпешно удалён каталог %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1455 #, c-format msgid "Checking for existence of %s" msgstr "Проверка ÑущеÑÑ‚Ð²Ð¾Ð²Ð°Ð½Ð¸Ñ %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1458 #, c-format msgid "File already exists: %s" msgstr "Файл уже ÑущеÑтвует: %s" #: src/hed/dmc/srm/srmclient/SRM22Client.cpp:1495 #, c-format msgid "Error creating directory %s: %s" msgstr "Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð° %s: %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:82 #, c-format msgid "Attempting to contact %s on port %i" msgstr "Попытка ÑоединитьÑÑ Ñ %s по порту %i" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:88 #, c-format msgid "Storing port %i for %s" msgstr "СохранÑетÑÑ Ð¿Ð¾Ñ€Ñ‚ %i Ð´Ð»Ñ %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:102 #, c-format msgid "No port succeeded for %s" msgstr "Ðе найдено подходÑщего порта Ð´Ð»Ñ %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:112 #, c-format msgid "URL %s disagrees with stored SRM info, testing new info" msgstr "" "URL %s не ÑоответÑтвует информации, хранÑщейÑÑ Ð² SRM info; проверÑетÑÑ Ð½Ð¾Ð²Ð°Ñ " "информациÑ" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:118 #, c-format msgid "Replacing old SRM info with new for URL %s" msgstr "Замена Ñтарой информации в SRM на новую Ð´Ð»Ñ URL %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:140 #, c-format msgid "SOAP request: %s" msgstr "ЗапроÑа SOAP: %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:147 #: src/hed/dmc/srm/srmclient/SRMClient.cpp:176 #, c-format msgid "SOAP fault: %s" msgstr "Ошибка SOAP: %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:148 msgid "Reconnecting" msgstr "ПереÑоединение" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:158 #, c-format msgid "SRM Client status: %s" msgstr "СоÑтоÑние клиента SRM: %s" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:164 msgid "No SOAP response" msgstr "Ðет ответа SOAP" #: src/hed/dmc/srm/srmclient/SRMClient.cpp:171 #: src/hed/identitymap/ArgusPDPClient.cpp:250 #, c-format msgid "SOAP response: %s" msgstr "Ответ SOAP: %s" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:76 #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:162 #, c-format msgid "Failed to acquire lock on file %s" msgstr "Сбой уÑтановки блокировки на файл %s" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:81 #, c-format msgid "Error reading info from file %s:%s" msgstr "Ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ из файла %s:%s" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:95 #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:187 #, c-format msgid "Bad or old format detected in file %s, in line %s" msgstr "Обнаружен неверный или уÑтаревший формат в файле %s, Ñтроке %s" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:100 #, c-format msgid "Cannot convert string %s to int in line %s" msgstr "" "Ðевозможно преобразовать Ñтроку %s в целочиÑленное значение в Ñтроке %s" #: src/hed/dmc/srm/srmclient/SRMInfo.cpp:203 #, c-format msgid "Error writing srm info file %s" msgstr "Ошибка запиÑи файла информации SRM %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:64 msgid "" "Missing reference to factory and/or module. It is unsafe to use Xrootd in " "non-persistent mode - Xrootd code is disabled. Report to developers." msgstr "" "ОтÑутÑтвует указание на фабрику и/или модуль. ИÑпользование Xrootd в " "неопределённом режиме небезопаÑно - Xrootd заблокирован. СвÑжитеÑÑŒ Ñ " "разработчиками." #: src/hed/dmc/xrootd/DataPointXrootd.cpp:103 #, c-format msgid "Could not handle checksum %s: skip checksum check" msgstr "" "Ðевозможно обработать контрольную Ñумму %s: пропуÑкаетÑÑ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ° " "контрольной Ñуммы" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:109 #, c-format msgid "Failed to create xrootd copy job: %s" msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ ÐºÐ¾Ð¿Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ xrootd %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:126 #, c-format msgid "Failed to copy %s: %s" msgstr "Сбой ÐºÐ¾Ð¿Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ %s: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:177 #, c-format msgid "Reading %u bytes from byte %llu" msgstr "Чтение %u байтов из байта %llu" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:179 #, c-format msgid "Read %i bytes" msgstr "Прочитано %i байт" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:210 #, c-format msgid "Could not open file %s for reading: %s" msgstr "Ðевозможно открыть файл %s Ð´Ð»Ñ Ñ‡Ñ‚ÐµÐ½Ð¸Ñ: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:225 #, c-format msgid "Unable to find file size of %s" msgstr "Ðе удалоÑÑŒ определить размер файла %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:289 #, c-format msgid "DataPointXrootd::write_file got position %d and offset %d, has to seek" msgstr "" "DataPointXrootd::write_file получил Ð°Ð´Ñ€ÐµÑ %d и Ñдвиг %d, проводитÑÑ Ð¿Ð¾Ð¸Ñк" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:312 #, c-format msgid "xrootd write failed: %s" msgstr "Сбой при запиÑи xrootd: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:321 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:395 #, c-format msgid "xrootd close failed: %s" msgstr "Сбой при закрытии xrootd: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:344 #, c-format msgid "Failed to open %s, trying to create parent directories" msgstr "Сбой Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ %s, попытка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ€Ð¾Ð´Ð¸Ñ‚ÐµÐ»ÑŒÑких каталогов" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:357 #, c-format msgid "xrootd open failed: %s" msgstr "Сбой при открытии xrootd: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:371 #, c-format msgid "close failed: %s" msgstr "Сбой при закрытии: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:413 #, c-format msgid "Read access not allowed for %s: %s" msgstr "Закрыт доÑтуп на чтение Ð´Ð»Ñ %s: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:432 #, c-format msgid "Could not stat file %s: %s" msgstr "Ðе удалоÑÑŒ определить ÑоÑтоÑние файла %s: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:437 msgid "Not getting checksum of zip constituent" msgstr "Ðе получена ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма Ð´Ð»Ñ zip-ÑоÑтавлÑющей" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:441 #, c-format msgid "Could not get checksum of %s: %s" msgstr "Ðе удалоÑÑŒ получить контрольную Ñумму %s: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:445 #, c-format msgid "Checksum %s" msgstr "ÐšÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:483 #, c-format msgid "Failed to open directory %s: %s" msgstr "Ðе удалоÑÑŒ открыть каталог %s: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:501 #, c-format msgid "Error while reading dir %s: %s" msgstr "Ошибка при чтении каталога %s: %s" #: src/hed/dmc/xrootd/DataPointXrootd.cpp:551 #: src/hed/dmc/xrootd/DataPointXrootd.cpp:569 #, c-format msgid "Error creating required dirs: %s" msgstr "Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ‚Ñ€ÐµÐ±ÑƒÐµÐ¼Ñ‹Ñ… каталогов: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:158 msgid "PDPD location is missing" msgstr "отÑутÑтвует раÑположение PDPD" #: src/hed/identitymap/ArgusPDPClient.cpp:161 #, c-format msgid "PDPD location: %s" msgstr "раÑположение PDPD: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:166 #: src/hed/identitymap/ArgusPEPClient.cpp:129 msgid "Conversion mode is set to SUBJECT" msgstr "Задан ÑпоÑоб Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ SUBJECT" #: src/hed/identitymap/ArgusPDPClient.cpp:169 #: src/hed/identitymap/ArgusPEPClient.cpp:132 msgid "Conversion mode is set to CREAM" msgstr "Задан ÑпоÑоб Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ CREAM" #: src/hed/identitymap/ArgusPDPClient.cpp:172 #: src/hed/identitymap/ArgusPEPClient.cpp:135 msgid "Conversion mode is set to EMI" msgstr "Задан ÑпоÑоб Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ EMI" #: src/hed/identitymap/ArgusPDPClient.cpp:175 #: src/hed/identitymap/ArgusPEPClient.cpp:138 #, c-format msgid "Unknown conversion mode %s, using default" msgstr "" "ÐеизвеÑтный ÑпоÑоб Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ %s, иÑпользуетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ по умолчанию" #: src/hed/identitymap/ArgusPDPClient.cpp:242 #, c-format msgid "Failed to contact PDP server: %s" msgstr "Сбой ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ Ñервером PDP: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:245 #, c-format msgid "There was no SOAP response return from PDP server: %s" msgstr "Сервер PDP не возвратил ответ SOAP: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:360 #: src/hed/identitymap/ArgusPEPClient.cpp:286 #, c-format msgid "Have %i requests to process" msgstr "%i запроÑов Ð´Ð»Ñ Ð¾Ð±Ñ€Ð°Ð±Ð¾Ñ‚ÐºÐ¸" #: src/hed/identitymap/ArgusPDPClient.cpp:362 msgid "Creating a client to Argus PDP service" msgstr "СоздаётÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚ Ð´Ð»Ñ Ñлужбы Argus PDP" #: src/hed/identitymap/ArgusPDPClient.cpp:375 #, c-format msgid "XACML authorisation request: %s" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð°Ð²Ñ‚Ð¾Ñ€Ð¸Ð·Ð°Ñ†Ð¸Ð¸ GACL: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:386 #, c-format msgid "XACML authorisation response: %s" msgstr "Отклик допуÑка XACML: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:426 #, c-format msgid "%s is not authorized to do action %s in resource %s " msgstr "%s не допущен к иÑполнению дейÑÑ‚Ð²Ð¸Ñ %s на реÑурÑе %s " #: src/hed/identitymap/ArgusPDPClient.cpp:429 #: src/hed/identitymap/ArgusPDPClient.cpp:434 #: src/hed/identitymap/ArgusPEPClient.cpp:336 msgid "Not authorized" msgstr "Ðет допуÑка" #: src/hed/identitymap/ArgusPDPClient.cpp:439 #: src/hed/identitymap/ArgusPEPClient.cpp:341 #: src/hed/identitymap/IdentityMap.cpp:219 src/hed/shc/legacy/LegacyMap.cpp:216 #, c-format msgid "Grid identity is mapped to local identity '%s'" msgstr "" "Опознавательные признаки Грид поÑтавлены в ÑоответÑтвие меÑтной учётной " "запиÑи '%s'" #: src/hed/identitymap/ArgusPDPClient.cpp:566 #: src/hed/identitymap/ArgusPEPClient.cpp:655 msgid "Doing CREAM request" msgstr "ПроизводитÑÑ Ð·Ð°Ð¿Ñ€Ð¾Ñ CREAM" #: src/hed/identitymap/ArgusPDPClient.cpp:580 #: src/hed/identitymap/ArgusPDPClient.cpp:748 #: src/hed/identitymap/ArgusPEPClient.cpp:683 #, c-format msgid "Adding profile-id value: %s" msgstr "ДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ profile-id: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:592 #: src/hed/identitymap/ArgusPDPClient.cpp:759 #: src/hed/identitymap/ArgusPEPClient.cpp:694 #, c-format msgid "Adding subject-id value: %s" msgstr "ДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ subject-id: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:600 #: src/hed/identitymap/ArgusPDPClient.cpp:767 #: src/hed/identitymap/ArgusPEPClient.cpp:704 #, c-format msgid "Adding subject-issuer value: %s" msgstr "ДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ subject-issuer: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:607 #: src/hed/identitymap/ArgusPEPClient.cpp:713 #, c-format msgid "Adding virtual-organization value: %s" msgstr "ДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ virtual-organization: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:620 #: src/hed/identitymap/ArgusPEPClient.cpp:730 #, c-format msgid "Adding FQAN value: %s" msgstr "ДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ FQAN: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:629 #: src/hed/identitymap/ArgusPEPClient.cpp:739 #, c-format msgid "Adding FQAN/primary value: %s" msgstr "ДобавлÑетÑÑ FQAN/первичное значение: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:639 #: src/hed/identitymap/ArgusPEPClient.cpp:750 #, c-format msgid "Adding cert chain value: %s" msgstr "ДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ цепочки Ñертификатов: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:648 #: src/hed/identitymap/ArgusPDPClient.cpp:840 #: src/hed/identitymap/ArgusPEPClient.cpp:760 #, c-format msgid "Adding resource-id value: %s" msgstr "ДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ идентификатора реÑурÑа: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:662 #: src/hed/identitymap/ArgusPDPClient.cpp:863 #: src/hed/identitymap/ArgusPEPClient.cpp:775 #, c-format msgid "Adding action-id value: %s" msgstr "ДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ action-id: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:666 #: src/hed/identitymap/ArgusPEPClient.cpp:786 #, c-format msgid "CREAM request generation failed: %s" msgstr "Ðе удалоÑÑŒ Ñоздать Ð·Ð°Ð¿Ñ€Ð¾Ñ CREAM: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:732 msgid "Doing EMI request" msgstr "ПроизводитÑÑ Ð·Ð°Ð¿Ñ€Ð¾Ñ EMI" #: src/hed/identitymap/ArgusPDPClient.cpp:774 #, c-format msgid "Adding Virtual Organization value: %s" msgstr "ДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ виртуальной организации: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:797 #, c-format msgid "Adding VOMS group value: %s" msgstr "ДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ группы VOMS: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:803 #, c-format msgid "Adding VOMS primary group value: %s" msgstr "ДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ первичной группы VOMS: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:822 #, c-format msgid "Adding VOMS role value: %s" msgstr "ДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ роли VOMS: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:829 #, c-format msgid "Adding VOMS primary role value: %s" msgstr "ДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ первичной роли VOMS: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:846 #, c-format msgid "Adding resource-owner value: %s" msgstr "ДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ владельца реÑурÑа: %s" #: src/hed/identitymap/ArgusPDPClient.cpp:867 #, c-format msgid "EMI request generation failed: %s" msgstr "Ðе удалоÑÑŒ Ñоздать Ð·Ð°Ð¿Ñ€Ð¾Ñ EMI: %s" #: src/hed/identitymap/ArgusPEPClient.cpp:119 msgid "PEPD location is missing" msgstr "отÑутÑтвует раÑположение PEPD" #: src/hed/identitymap/ArgusPEPClient.cpp:122 #, c-format msgid "PEPD location: %s" msgstr "раÑположение PEPD: %s" #: src/hed/identitymap/ArgusPEPClient.cpp:126 msgid "Conversion mode is set to DIRECT" msgstr "Задан ÑпоÑоб Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ DIRECT" #: src/hed/identitymap/ArgusPEPClient.cpp:331 #, c-format msgid "" "Not authorized according to request:\n" "%s" msgstr "" "Ðет допуÑка ÑоглаÑно запроÑу:\n" "%s" #: src/hed/identitymap/ArgusPEPClient.cpp:333 #, c-format msgid "%s is not authorized to do action %s in resource %s" msgstr "%s не допущен к иÑполнению дейÑÑ‚Ð²Ð¸Ñ %s на реÑурÑе %s" #: src/hed/identitymap/ArgusPEPClient.cpp:361 msgid "Subject of request is null" msgstr "ОтÑутÑтвует Ñубъект запроÑа" #: src/hed/identitymap/ArgusPEPClient.cpp:366 #, c-format msgid "Can not create XACML SubjectAttribute: %s" msgstr "Ðе удалоÑÑŒ Ñоздать атрибут XACML SubjectAttribute: %s" #: src/hed/identitymap/ArgusPEPClient.cpp:375 msgid "Can not create XACML Resource" msgstr "Ðе удалоÑÑŒ Ñоздать XACML Resource" #: src/hed/identitymap/ArgusPEPClient.cpp:381 #, c-format msgid "Can not create XACML ResourceAttribute: %s" msgstr "Ðе удалоÑÑŒ Ñоздать атрибут XACML ResourceAttribute: %s" #: src/hed/identitymap/ArgusPEPClient.cpp:390 msgid "Can not create XACML Action" msgstr "Ðе удалоÑÑŒ Ñоздать XACML Action" #: src/hed/identitymap/ArgusPEPClient.cpp:397 #, c-format msgid "Can not create XACML ActionAttribute: %s" msgstr "Ðе удалоÑÑŒ Ñоздать атрибут XACML ActionAttribute: %s" #: src/hed/identitymap/ArgusPEPClient.cpp:407 msgid "Can not create XACML request" msgstr "Ðе удалоÑÑŒ Ñоздать Ð·Ð°Ð¿Ñ€Ð¾Ñ XACML" #: src/hed/identitymap/ArgusPEPClient.cpp:539 #, c-format msgid "Converting to CREAM action - namespace: %s, operation: %s" msgstr "Преобразование в дейÑтвие CREAM - проÑтранÑтво имён: %s, операциÑ: %s" #: src/hed/identitymap/IdentityMap.cpp:196 #, c-format msgid "PDP: %s can not be loaded" msgstr "PDP: %s не может быть подгружен" #: src/hed/libs/common/ArcLocation.cpp:128 #, c-format msgid "" "Can not determine the install location. Using %s. Please set ARC_LOCATION if " "this is not correct." msgstr "" "Ðевозможно определить меÑто уÑтановки. ИÑпользуетÑÑ %s. ЕÑли Ñто не " "ÑоответÑтвует дейÑтвительноÑти, задайте, пожалуйÑта, переменную ARC_LOCATION." #: src/hed/libs/common/DateTime.cpp:86 src/hed/libs/common/DateTime.cpp:631 #: src/hed/libs/common/StringConv.h:25 msgid "Empty string" msgstr "ПуÑÑ‚Ð°Ñ Ñтрока" #: src/hed/libs/common/DateTime.cpp:107 #, c-format msgid "Can not parse date: %s" msgstr "Ðевозможно определить дату: %s" #: src/hed/libs/common/DateTime.cpp:130 #, c-format msgid "Can not parse time: %s" msgstr "Ðевозможно определить времÑ: %s" #: src/hed/libs/common/DateTime.cpp:160 #, c-format msgid "Can not parse time zone offset: %s" msgstr "Ðевозможно определить чаÑовой поÑÑ: %s" #: src/hed/libs/common/DateTime.cpp:180 src/hed/libs/common/DateTime.cpp:199 #: src/hed/libs/common/DateTime.cpp:252 src/hed/libs/common/DateTime.cpp:291 #, c-format msgid "Illegal time format: %s" msgstr "ÐедопуÑтимый формат времени: %s" #: src/hed/libs/common/DateTime.cpp:230 src/hed/libs/common/DateTime.cpp:283 #, c-format msgid "Can not parse month: %s" msgstr "Ðевозможно определить меÑÑц: %s" #: src/hed/libs/common/DateTime.cpp:647 src/hed/libs/common/DateTime.cpp:688 #, c-format msgid "Invalid ISO duration format: %s" msgstr "Ðеверный ISO-формат продолжительноÑти: %s" #: src/hed/libs/common/DateTime.cpp:752 #, c-format msgid "Invalid period string: %s" msgstr "ÐедопуÑтимый интервал времени: %s" #: src/hed/libs/common/DateTime.cpp:874 msgid "hour" msgid_plural "hours" msgstr[0] "чаÑ" msgstr[1] "чаÑа" msgstr[2] "чаÑов" #: src/hed/libs/common/DateTime.cpp:880 msgid "minute" msgid_plural "minutes" msgstr[0] "минута" msgstr[1] "минуты" msgstr[2] "минут" #: src/hed/libs/common/DateTime.cpp:886 msgid "second" msgid_plural "seconds" msgstr[0] "Ñекунда" msgstr[1] "Ñекунды" msgstr[2] "Ñекунд" #: src/hed/libs/common/FileLock.cpp:43 msgid "Cannot determine hostname from gethostname()" msgstr "Ðевозможно извлечь Ð¸Ð¼Ñ ÑƒÐ·Ð»Ð° иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ gethostname()" #: src/hed/libs/common/FileLock.cpp:92 #, c-format msgid "EACCES Error opening lock file %s: %s" msgstr "EACCES Ошибка Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° блокировки %s: %s" #: src/hed/libs/common/FileLock.cpp:97 #, c-format msgid "Error opening lock file %s in initial check: %s" msgstr "" "Ошибка при открытии файла блокировки %s при предварительной проверке: %s" #: src/hed/libs/common/FileLock.cpp:104 #, c-format msgid "Error creating temporary file %s: %s" msgstr "Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð²Ñ€ÐµÐ¼ÐµÐ½Ð½Ð¾Ð³Ð¾ файла %s: %s" #: src/hed/libs/common/FileLock.cpp:113 #, c-format msgid "Could not create link to lock file %s as it already exists" msgstr "" "Ðевозможно Ñоздать ÑÑылку на файл блокировки %s, потому что она уже " "ÑущеÑтвует" #: src/hed/libs/common/FileLock.cpp:124 #, c-format msgid "Could not create lock file %s as it already exists" msgstr "Ðевозможно Ñоздать файл блокировки %s, потому что он уже ÑущеÑтвует" #: src/hed/libs/common/FileLock.cpp:128 #, c-format msgid "Error creating lock file %s: %s" msgstr "Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° блокировки %s: %s" #: src/hed/libs/common/FileLock.cpp:133 #, c-format msgid "Error writing to lock file %s: %s" msgstr "Ошибка запиÑи в файл блокировки %s: %s" #: src/hed/libs/common/FileLock.cpp:141 #, c-format msgid "Error linking tmp file %s to lock file %s: %s" msgstr "Ðе удалоÑÑŒ ÑвÑзать временный файл %s Ñ Ñ„Ð°Ð¹Ð»Ð¾Ð¼ блокировки %s: %s" #: src/hed/libs/common/FileLock.cpp:150 #, c-format msgid "Error in lock file %s, even though linking did not return an error" msgstr "" "Ошибка в файле блокировки %s, неÑÐ¼Ð¾Ñ‚Ñ€Ñ Ð½Ð° то, что Ñоздание ÑÑылки прошло без " "Ñбоев" #: src/hed/libs/common/FileLock.cpp:159 #, c-format msgid "%li seconds since lock file %s was created" msgstr "%li Ñекунд(Ñ‹) Ñ Ð¼Ð¾Ð¼ÐµÐ½Ñ‚Ð° ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° блокировки %s" #: src/hed/libs/common/FileLock.cpp:162 #, c-format msgid "Timeout has expired, will remove lock file %s" msgstr "Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¸Ñтекло, файл блокировки %s будет удалён" #: src/hed/libs/common/FileLock.cpp:166 #, c-format msgid "Failed to remove stale lock file %s: %s" msgstr "Сбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ ÑƒÑтаревшего файла блокировки %s: %s" #: src/hed/libs/common/FileLock.cpp:178 #, c-format msgid "This process already owns the lock on %s" msgstr "У Ñтого процеÑÑа уже ÑущеÑтвует блокировка в %s" #: src/hed/libs/common/FileLock.cpp:183 #, c-format msgid "" "The process owning the lock on %s is no longer running, will remove lock" msgstr "" "ПроцеÑÑ, которому принадлежит блок в %s, больше не ÑущеÑтвует, блок будет " "удалён" #: src/hed/libs/common/FileLock.cpp:185 #, c-format msgid "Failed to remove file %s: %s" msgstr "Ðе удалоÑÑŒ удалить файл %s: %s" #: src/hed/libs/common/FileLock.cpp:193 #, c-format msgid "The file %s is currently locked with a valid lock" msgstr "Файл %s в наÑтоÑщий момент заблокирован дейÑтвительным блоком" #: src/hed/libs/common/FileLock.cpp:208 #, c-format msgid "Failed to unlock file with lock %s: %s" msgstr "Сбой Ñ€Ð°Ð·Ð±Ð»Ð¾ÐºÐ¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° Ñ Ð±Ð»Ð¾ÐºÐ¾Ð¼ %s: %s" #: src/hed/libs/common/FileLock.cpp:220 #, c-format msgid "Lock file %s doesn't exist" msgstr "Файл блокировки %s не ÑущеÑтвует" #: src/hed/libs/common/FileLock.cpp:222 #, c-format msgid "Error listing lock file %s: %s" msgstr "Ошибка перечиÑÐ»ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° блокировки %s: %s" #: src/hed/libs/common/FileLock.cpp:228 #, c-format msgid "Found unexpected empty lock file %s. Must go back to acquire()" msgstr "" "Ðайден непредвиденный пуÑтой файл блокировки %s. Ðеобходимо вернутьÑÑ Ð² " "acquire()" #: src/hed/libs/common/FileLock.cpp:234 #, c-format msgid "Error reading lock file %s: %s" msgstr "Ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° блокировки %s: %s" #: src/hed/libs/common/FileLock.cpp:238 #, c-format msgid "Error with formatting in lock file %s" msgstr "Ошибка формата в файле блокировки %s" #: src/hed/libs/common/FileLock.cpp:248 #, c-format msgid "Lock %s is owned by a different host (%s)" msgstr "Блок %s принадлежит другому процеÑÑу (%s)" #: src/hed/libs/common/FileLock.cpp:257 #, c-format msgid "Badly formatted pid %s in lock file %s" msgstr "Ðеверно Ñформированный pid %s в файле блокировки %s" #: src/hed/libs/common/FileLock.cpp:260 #, c-format msgid "Another process (%s) owns the lock on file %s" msgstr "Другой процеÑÑ (%s) обладает блоком файла %s" #: src/hed/libs/common/IString.cpp:32 src/hed/libs/common/IString.cpp:41 #: src/hed/libs/common/IString.cpp:42 msgid "(empty)" msgstr "(пуÑто)" #: src/hed/libs/common/IString.cpp:32 src/hed/libs/common/IString.cpp:41 #: src/hed/libs/common/IString.cpp:42 msgid "(null)" msgstr "(нулевой)" #: src/hed/libs/common/Logger.cpp:58 #, c-format msgid "Invalid log level. Using default %s." msgstr "Ðеверный уровень отладки. ИÑпользуетÑÑ ÑƒÑ€Ð¾Ð²ÐµÐ½ÑŒ по умолчанию %s." #: src/hed/libs/common/Logger.cpp:123 #, c-format msgid "Invalid old log level. Using default %s." msgstr "" "Ðеверный Ñтарый уровень отладки. ИÑпользуетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ по умолчанию %s." #: src/hed/libs/common/OptionParser.cpp:106 #, c-format msgid "Cannot parse integer value '%s' for -%c" msgstr "Ðе удаётÑÑ Ñ€Ð°Ð·Ð¾Ð±Ñ€Ð°Ñ‚ÑŒ целое значение '%s' Ð´Ð»Ñ -%c" #: src/hed/libs/common/OptionParser.cpp:309 #: src/hed/libs/common/OptionParser.cpp:446 #, c-format msgid "Options Group %s:" msgstr "Группа опций %s:" #: src/hed/libs/common/OptionParser.cpp:311 #: src/hed/libs/common/OptionParser.cpp:449 #, c-format msgid "%s:" msgstr "%s:" #: src/hed/libs/common/OptionParser.cpp:313 #, c-format msgid "Show %s help options" msgstr "Показать %s параметров Ñправки" #: src/hed/libs/common/OptionParser.cpp:342 msgid "Failed to parse command line options" msgstr "Ðе удалоÑÑŒ разобрать параметры командной Ñтроки" #: src/hed/libs/common/OptionParser.cpp:352 msgid "Use -? to get usage description" msgstr "Ð”Ð»Ñ Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ñправки иÑпользуйте \"-?\"" #: src/hed/libs/common/OptionParser.cpp:429 msgid "Usage:" msgstr "ИÑпользование:" #: src/hed/libs/common/OptionParser.cpp:432 msgid "OPTION..." msgstr "ПÐРÐМЕТР..." #: src/hed/libs/common/OptionParser.cpp:438 msgid "Help Options:" msgstr "Параметры Ñправки:" #: src/hed/libs/common/OptionParser.cpp:439 msgid "Show help options" msgstr "Показать параметры Ñправки" #: src/hed/libs/common/Profile.cpp:199 src/hed/libs/common/Profile.cpp:273 #: src/hed/libs/common/Profile.cpp:404 #, c-format msgid "" "Element \"%s\" in the profile ignored: the value of the \"inisections\" " "attribute cannot be the empty string." msgstr "" "Элемент \"%s\" в профиле игнорируетÑÑ: значение атрибута \"inisections\" не " "может быть пуÑтой Ñтрокой." #: src/hed/libs/common/Profile.cpp:205 src/hed/libs/common/Profile.cpp:279 #: src/hed/libs/common/Profile.cpp:411 #, c-format msgid "" "Element \"%s\" in the profile ignored: the value of the \"initag\" attribute " "cannot be the empty string." msgstr "" "Элемент \"%s\" в профиле игнорируетÑÑ: значение атрибута \"initag\" не может " "быть пуÑтой Ñтрокой." #: src/hed/libs/common/Profile.cpp:419 #, c-format msgid "" "Element \"%s\" in the profile ignored: the value of the \"initype\" " "attribute cannot be the empty string." msgstr "" "Элемент \"%s\" в профиле игнорируетÑÑ: значение атрибута \"initype\" не " "может быть пуÑтой Ñтрокой." #: src/hed/libs/common/Profile.cpp:422 #, c-format msgid "" "Element \"%s\" in the profile ignored: the \"inidefaultvalue\" attribute " "cannot be specified when the \"inisections\" and \"initag\" attributes have " "not been specified." msgstr "" "Элемент \"%s\" в профиле игнорируетÑÑ: значение атрибута \"inidefaultvalue\" " "не может быть задано, когда не заданы Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð¾Ð² \"inisections\" и " "\"initag\"." #: src/hed/libs/common/Profile.cpp:497 #, c-format msgid "" "In the configuration profile the 'initype' attribute on the \"%s\" element " "has a invalid value \"%s\"." msgstr "" "Ð’ профиле наÑтроек атрибут 'initype' Ñлемента \"%s\" имеет ÑобÑтвенное " "значение \"%s\"." #: src/hed/libs/common/Run_unix.cpp:226 msgid "Child monitoring signal detected" msgstr "Мониторинг дочерних процеÑÑов: обнаружен Ñигнал" #: src/hed/libs/common/Run_unix.cpp:231 #, c-format msgid "Child monitoring error: %i" msgstr "Мониторинг дочерних процеÑÑов: ошибка: %i" #: src/hed/libs/common/Run_unix.cpp:244 msgid "Child monitoring kick detected" msgstr "Мониторинг дочерних процеÑÑов: обнаружен запуÑк" #: src/hed/libs/common/Run_unix.cpp:247 msgid "Child monitoring internal communication error" msgstr "Мониторинг дочерних процеÑÑов: внутренний Ñбой взаимодейÑтвиÑ" #: src/hed/libs/common/Run_unix.cpp:259 msgid "Child monitoring stdout is closed" msgstr "Мониторинг дочерних процеÑÑов: stdout закрыт" #: src/hed/libs/common/Run_unix.cpp:269 msgid "Child monitoring stderr is closed" msgstr "Мониторинг дочерних процеÑÑов: stderr закрыт" #: src/hed/libs/common/Run_unix.cpp:279 msgid "Child monitoring stdin is closed" msgstr "Мониторинг дочерних процеÑÑов: stdin закрыт" #: src/hed/libs/common/Run_unix.cpp:297 #, c-format msgid "Child monitoring child %d exited" msgstr "Мониторинг дочерних процеÑÑов: процеÑÑ %d завершилÑÑ" #: src/hed/libs/common/Run_unix.cpp:301 #, c-format msgid "Child monitoring lost child %d (%d)" msgstr "Мониторинг дочерних процеÑÑов: потерÑн процеÑÑ %d (%d)" #: src/hed/libs/common/Run_unix.cpp:322 #, c-format msgid "Child monitoring drops abandoned child %d (%d)" msgstr "" "Мониторинг дочерних процеÑÑов: игнорируетÑÑ Ð·Ð°Ð±Ñ€Ð¾ÑˆÐµÐ½Ð½Ñ‹Ð¹ процеÑÑ %d (%d)" #: src/hed/libs/common/Run_unix.cpp:483 msgid "Child was already started" msgstr "Дочерний процеÑÑ ÑƒÐ¶Ðµ запущен" #: src/hed/libs/common/Run_unix.cpp:487 msgid "No arguments are assigned for external process" msgstr "Внешнему процеÑÑу не приÑвоены аргументы" #: src/hed/libs/common/Run_unix.cpp:620 src/hed/libs/common/Run_unix.cpp:625 #, c-format msgid "Excepton while trying to start external process: %s" msgstr "Сбой при попытке запуÑка внешнего процеÑÑа: %s" #: src/hed/libs/common/StringConv.h:31 #, c-format msgid "Conversion failed: %s" msgstr "Преобразование не удалоÑÑŒ: %s" #: src/hed/libs/common/StringConv.h:35 #, c-format msgid "Full string not used: %s" msgstr "Строка иÑпользована неполноÑтью: %s" #: src/hed/libs/common/Thread.cpp:261 msgid "Maximum number of threads running - putting new request into queue" msgstr "" "Запущено макÑимальное количеÑтво потоков - новый Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¿Ð¾Ð¼ÐµÑ‰Ñ‘Ð½ в очередь" #: src/hed/libs/common/Thread.cpp:309 #, c-format msgid "Thread exited with Glib error: %s" msgstr "Поток завершилÑÑ Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ¾Ð¹ Glib: %s" #: src/hed/libs/common/Thread.cpp:311 #, c-format msgid "Thread exited with Glib exception: %s" msgstr "Поток завершилÑÑ Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸ÐµÐ¼ в Glib: %s" #: src/hed/libs/common/Thread.cpp:313 #, c-format msgid "Thread exited with generic exception: %s" msgstr "Поток завершилÑÑ Ð¾Ð±Ñ‰Ð¸Ð¼ прерыванием: %s" #: src/hed/libs/common/URL.cpp:137 #, c-format msgid "URL is not valid: %s" msgstr "ÐедейÑтвительный адреÑ: %s" #: src/hed/libs/common/URL.cpp:188 #, c-format msgid "Illegal URL - path must be absolute: %s" msgstr "ÐедопуÑтимый URL - путь должен быть абÑолютным: %s" #: src/hed/libs/common/URL.cpp:193 #, c-format msgid "Illegal URL - no hostname given: %s" msgstr "ÐедопуÑтимый Ð°Ð´Ñ€ÐµÑ - не ÑодержитÑÑ Ð¸Ð¼Ñ ÑƒÐ·Ð»Ð°: %s" #: src/hed/libs/common/URL.cpp:282 #, c-format msgid "Illegal URL - path must be absolute or empty: %s" msgstr "ÐедопуÑтимый URL - путь должен быть абÑолютным или пуÑтым: %s" #: src/hed/libs/common/URL.cpp:298 #, c-format msgid "Illegal URL - no closing ] for IPv6 address found: %s" msgstr "" "ÐедопуÑтимый URL - отÑутÑтвует Ð·Ð°ÐºÑ€Ñ‹Ð²Ð°ÑŽÑ‰Ð°Ñ Ñкобка ] Ð´Ð»Ñ Ð°Ð´Ñ€ÐµÑа IPv6: %s" #: src/hed/libs/common/URL.cpp:306 #, c-format msgid "" "Illegal URL - closing ] for IPv6 address is followed by illegal token: %s" msgstr "" "ÐедопуÑтимый URL - за закрывающей Ñкобкой ] Ð´Ð»Ñ Ð°Ð´Ñ€ÐµÑа IPv6 Ñледует " "недопуÑтимый маркёр: %s" #: src/hed/libs/common/URL.cpp:322 #, c-format msgid "Invalid port number in %s" msgstr "ÐедопуÑтимый номер порта в %s" #: src/hed/libs/common/URL.cpp:453 #, c-format msgid "Unknown LDAP scope %s - using base" msgstr "ÐеизвеÑтный контекÑÑ‚ LDAP %s - иÑпользуетÑÑ base" #: src/hed/libs/common/URL.cpp:616 msgid "Attempt to assign relative path to URL - making it absolute" msgstr "" "Попытка интерпретации отноÑительного путь как URL - заменÑетÑÑ Ð½Ð° абÑолютный" #: src/hed/libs/common/URL.cpp:715 #, c-format msgid "URL option %s does not have format name=value" msgstr "ÐžÐ¿Ñ†Ð¸Ñ URL %s не задана в формате имÑ=значение" #: src/hed/libs/common/URL.cpp:1180 #, c-format msgid "urllist %s contains invalid URL: %s" msgstr "urllist %s Ñодержит недопуÑтимый URL: %s" #: src/hed/libs/common/URL.cpp:1185 #, c-format msgid "URL protocol is not urllist: %s" msgstr "Протокол URL не ÑвлÑетÑÑ urllist: %s" #: src/hed/libs/common/UserConfig.cpp:32 src/hed/libs/common/UserConfig.cpp:781 #: src/hed/libs/common/UserConfig.cpp:790 #: src/hed/libs/common/UserConfig.cpp:796 #: src/hed/libs/common/UserConfig.cpp:822 #: src/hed/libs/common/UserConfig.cpp:832 #: src/hed/libs/common/UserConfig.cpp:844 #: src/hed/libs/common/UserConfig.cpp:864 #, c-format msgid "Multiple %s attributes in configuration file (%s)" msgstr "МножеÑтвенные атрибуты %s в файле наÑтроек (%s)" #: src/hed/libs/common/UserConfig.cpp:132 #, c-format msgid "Wrong ownership of certificate file: %s" msgstr "ÐÐµÐ¿Ñ€Ð°Ð²Ð¸Ð»ÑŒÐ½Ð°Ñ Ð¿Ñ€Ð¸Ð½Ð°Ð´Ð»ÐµÐ¶Ð½Ð¾Ñть файла открытого ключа: %s" #: src/hed/libs/common/UserConfig.cpp:134 #, c-format msgid "Wrong permissions of certificate file: %s" msgstr "Ðеправильные права доÑтупа к файлу открытого ключа: %s" #: src/hed/libs/common/UserConfig.cpp:136 #, c-format msgid "Can not access certificate file: %s" msgstr "Ðет доÑтупа к файлу Ñертификата: %s" #: src/hed/libs/common/UserConfig.cpp:143 #, c-format msgid "Wrong ownership of key file: %s" msgstr "ÐÐµÐ¿Ñ€Ð°Ð²Ð¸Ð»ÑŒÐ½Ð°Ñ Ð¿Ñ€Ð¸Ð½Ð°Ð´Ð»ÐµÐ¶Ð½Ð¾Ñть файла личного ключа: %s" #: src/hed/libs/common/UserConfig.cpp:145 #, c-format msgid "Wrong permissions of key file: %s" msgstr "Ðеправильные права доÑтупа к файлу личного ключа: %s" #: src/hed/libs/common/UserConfig.cpp:147 #, c-format msgid "Can not access key file: %s" msgstr "Ðет доÑтупа к файлу личного ключа: %s" #: src/hed/libs/common/UserConfig.cpp:154 #, c-format msgid "Wrong ownership of proxy file: %s" msgstr "ÐÐµÐ¿Ñ€Ð°Ð²Ð¸Ð»ÑŒÐ½Ð°Ñ Ð¿Ñ€Ð¸Ð½Ð°Ð´Ð»ÐµÐ¶Ð½Ð¾Ñть файла доверенноÑти: %s" #: src/hed/libs/common/UserConfig.cpp:156 #, c-format msgid "Wrong permissions of proxy file: %s" msgstr "Ðеправильные права доÑтупа к файлу доверенноÑти: %s" #: src/hed/libs/common/UserConfig.cpp:158 #, c-format msgid "Can not access proxy file: %s" msgstr "Ðет доÑтупа к файлу доверенноÑти: %s" #: src/hed/libs/common/UserConfig.cpp:169 msgid "computing" msgstr "computing" #: src/hed/libs/common/UserConfig.cpp:171 msgid "index" msgstr "index" #: src/hed/libs/common/UserConfig.cpp:270 #: src/hed/libs/common/UserConfig.cpp:274 #: src/hed/libs/common/UserConfig.cpp:321 #: src/hed/libs/common/UserConfig.cpp:325 #, c-format msgid "System configuration file (%s) contains errors." msgstr "Файл ÑиÑтемных наÑтроек (%s) Ñодержит ошибки." #: src/hed/libs/common/UserConfig.cpp:278 #: src/hed/libs/common/UserConfig.cpp:329 #, c-format msgid "System configuration file (%s or %s) does not exist." msgstr "Файл ÑиÑтемных наÑтроек (%s or %s) не ÑущеÑтвует." #: src/hed/libs/common/UserConfig.cpp:280 #: src/hed/libs/common/UserConfig.cpp:331 #, c-format msgid "System configuration file (%s) does not exist." msgstr "Файл ÑиÑтемных наÑтроек (%s) не ÑущеÑтвует." #: src/hed/libs/common/UserConfig.cpp:286 #: src/hed/libs/common/UserConfig.cpp:298 #: src/hed/libs/common/UserConfig.cpp:337 #: src/hed/libs/common/UserConfig.cpp:349 #, c-format msgid "User configuration file (%s) contains errors." msgstr "Файл наÑтроек Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ (%s) Ñодержит ошибки." #: src/hed/libs/common/UserConfig.cpp:291 #: src/hed/libs/common/UserConfig.cpp:342 msgid "No configuration file could be loaded." msgstr "Файл наÑтроек не может быть подгружен." #: src/hed/libs/common/UserConfig.cpp:294 #: src/hed/libs/common/UserConfig.cpp:345 #, c-format msgid "User configuration file (%s) does not exist or cannot be loaded." msgstr "" "Файл наÑтроек Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ (%s) не ÑущеÑтвует или не может быть подгружен." #: src/hed/libs/common/UserConfig.cpp:406 #, c-format msgid "" "Unable to parse the specified verbosity (%s) to one of the allowed levels" msgstr "" "Ðевозможно ÑопоÑтавить запрашиваемый уровень отладки (%s) ни Ñ Ð¾Ð´Ð½Ð¸Ð¼ из " "допуÑтимых" #: src/hed/libs/common/UserConfig.cpp:418 #, c-format msgid "" "Unsupported job list type '%s', using 'BDB'. Supported types are: BDB, " "SQLITE, XML." msgstr "" "Тип ÑпиÑка задач '%s' не поддерживаетÑÑ, будет иÑпользоватьÑÑ 'BDB'. " "ПоддерживаютÑÑ Ñледующие типы: BDB, SQLITE, XML." #: src/hed/libs/common/UserConfig.cpp:463 msgid "Loading OToken failed - ignoring its presence" msgstr "Сбой загрузки OToken - токен игнорируетÑÑ" #: src/hed/libs/common/UserConfig.cpp:604 #, c-format msgid "Certificate and key ('%s' and '%s') not found in any of the paths: %s" msgstr "" "Сертификат и ключ ('%s' и '%s') не обнаружены ни в одном из раÑположений: %s" #: src/hed/libs/common/UserConfig.cpp:606 #, c-format msgid "" "If the proxy or certificate/key does exist, you can manually specify the " "locations via environment variables '%s'/'%s' or '%s', or the '%s'/'%s' or " "'%s' attributes in the client configuration file (e.g. '%s')" msgstr "" "ЕÑли пара Ñертификат/ключ или файл Ñертификата доверенноÑти ÑущеÑтвуют, Ð’Ñ‹ " "можете вручную указать их раÑположение Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ переменных Ñреды '%s'/'%s' " "или '%s', или Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ атрибутов '%s'/'%s' или '%s' в файле наÑтроек " "клиента (например, '%s')" #: src/hed/libs/common/UserConfig.cpp:623 #: src/hed/libs/common/UserConfig.cpp:633 #, c-format msgid "" "Can not access CA certificate directory: %s. The certificates will not be " "verified." msgstr "Ðе удалоÑÑŒ открыть каталог Ñертификатов CA: %s. Сертификаты ." #: src/hed/libs/common/UserConfig.cpp:659 #, c-format msgid "" "Can not find CA certificates directory in default locations:\n" "~/.arc/certificates, ~/.globus/certificates,\n" "%s/etc/certificates, %s/etc/grid-security/certificates,\n" "%s/share/certificates, /etc/grid-security/certificates.\n" "The certificate will not be verified.\n" "If the CA certificates directory does exist, please manually specify the " "locations via env\n" "X509_CERT_DIR, or the cacertificatesdirectory item in client.conf\n" msgstr "" "Каталог Ñертификатов СРне обнаружен ни в одном из Ñтандартных меÑÑ‚:\n" "~/.arc/certificates, ~/.globus/certificates,\n" "%s/etc/certificates, %s/etc/grid-security/certificates,\n" "%s/share/certificates, /etc/grid-security/certificates.\n" "Сертификат не будет подтверждён.\n" "ЕÑли каталог Ñертификатов СРÑущеÑтвует, пожалуйÑта, укажите вручную\n" "его раÑÐ¿Ð¾Ð»Ð¾Ð¶ÐµÐ½Ð¸Ñ Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ переменной X509_CERT_DIR, или задайте\n" "cacertificatesdirectory в файле наÑтроек клиента client.conf\n" #: src/hed/libs/common/UserConfig.cpp:680 #, c-format msgid "Using proxy file: %s" msgstr "ИÑпользуетÑÑ Ñ„Ð°Ð¹Ð» доверенноÑти: %s" #: src/hed/libs/common/UserConfig.cpp:683 #, c-format msgid "Using certificate file: %s" msgstr "ИÑпользуетÑÑ Ñ„Ð°Ð¹Ð» Ñертификата: %s" #: src/hed/libs/common/UserConfig.cpp:684 #, c-format msgid "Using key file: %s" msgstr "ИÑпользуетÑÑ Ñ„Ð°Ð¹Ð» личного ключа: %s" #: src/hed/libs/common/UserConfig.cpp:688 #, c-format msgid "Using CA certificate directory: %s" msgstr "ИÑпользуетÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³ доверенных Ñертификатов CA: %s" #: src/hed/libs/common/UserConfig.cpp:692 msgid "Using OToken" msgstr "ИÑпользуетÑÑ OToken" #: src/hed/libs/common/UserConfig.cpp:705 #: src/hed/libs/common/UserConfig.cpp:711 #, c-format msgid "Can not access VOMSES file/directory: %s." msgstr "Ðевозможно открыть каталог или файл VOMSES: %s." #: src/hed/libs/common/UserConfig.cpp:717 #, c-format msgid "Can not access VOMS file/directory: %s." msgstr "Ðевозможно открыть каталог или файл VOMS: %s." #: src/hed/libs/common/UserConfig.cpp:731 msgid "" "Can not find voms service configuration file (vomses) in default locations: " "~/.arc/vomses, ~/.voms/vomses, $ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/" "grid-security/vomses, $PWD/vomses, /etc/vomses, /etc/grid-security/vomses" msgstr "" "ÐšÐ¾Ð½Ñ„Ð¸Ð³ÑƒÑ€Ð°Ñ†Ð¸Ñ Ñерверов VOMS не обнаружена ни в одном из Ñтандартных " "раÑположений: ~/.arc/vomses, ~/.voms/vomses, $ARC_LOCATION/etc/vomses, " "$ARC_LOCATION/etc/grid-security/vomses, $PWD/vomses, /etc/vomses, /etc/grid-" "security/vomses" #: src/hed/libs/common/UserConfig.cpp:744 #, c-format msgid "Loading configuration (%s)" msgstr "Чтение файла наÑтроек (%s)" #: src/hed/libs/common/UserConfig.cpp:778 #, c-format msgid "" "The value of the timeout attribute in the configuration file (%s) was only " "partially parsed" msgstr "Значение атрибута timeout (%s) в файле наÑтроек разобрано неполноÑтью" #: src/hed/libs/common/UserConfig.cpp:803 msgid "" "The brokerarguments attribute can only be used in conjunction with the " "brokername attribute" msgstr "" "Ðтрибут brokerarguments может быть иÑпользован только в ÑвÑзи Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð¾Ð¼ " "brokername" #: src/hed/libs/common/UserConfig.cpp:819 #, c-format msgid "" "The value of the keysize attribute in the configuration file (%s) was only " "partially parsed" msgstr "Значение атрибута keysize (%s) в файле наÑтроек разобрано неполноÑтью" #: src/hed/libs/common/UserConfig.cpp:839 #, c-format msgid "" "Could not convert the slcs attribute value (%s) to an URL instance in " "configuration file (%s)" msgstr "" "Ðе удалоÑÑŒ преобразовать значение атрибута slcs (%s) в файле наÑтроек в URL " "(%s)" #: src/hed/libs/common/UserConfig.cpp:885 #, c-format msgid "Specified overlay file (%s) does not exist." msgstr "Указанный файл Ñ Ñ‚Ñ€Ð°Ñ„Ð°Ñ€ÐµÑ‚Ð¾Ð¼ (%s) не ÑущеÑтвует." #: src/hed/libs/common/UserConfig.cpp:889 #, c-format msgid "" "Unknown attribute %s in common section of configuration file (%s), ignoring " "it" msgstr "" "ИгнорируетÑÑ Ð½ÐµÐ¸Ð·Ð²ÐµÑтный атрибут %s в разделе common файла наÑтроек (%s)" #: src/hed/libs/common/UserConfig.cpp:930 #, c-format msgid "Unknown section %s, ignoring it" msgstr "ИгнорируетÑÑ Ð½ÐµÐ¸Ð·Ð²ÐµÑтный раздел %s" #: src/hed/libs/common/UserConfig.cpp:934 #, c-format msgid "Configuration (%s) loaded" msgstr "ÐаÑтройки (%s) подгружены" #: src/hed/libs/common/UserConfig.cpp:937 #, c-format msgid "Could not load configuration (%s)" msgstr "Ðе удалоÑÑŒ подгрузить наÑтройки (%s)" #: src/hed/libs/common/UserConfig.cpp:1032 #, c-format msgid "UserConfiguration saved to file (%s)" msgstr "UserConfiguration Ñохранены в файле (%s)" #: src/hed/libs/common/UserConfig.cpp:1045 #, c-format msgid "Unable to create %s directory." msgstr "Ðе удалоÑÑŒ Ñоздать каталог %s." #: src/hed/libs/common/UserConfig.cpp:1054 #, c-format msgid "Configuration example file created (%s)" msgstr "Создан шаблонный файл наÑтроек (%s)" #: src/hed/libs/common/UserConfig.cpp:1056 #, c-format msgid "Unable to copy example configuration from existing configuration (%s)" msgstr "Ðе удалоÑÑŒ Ñкопировать шаблон наÑтроек из ÑущеÑтвующих наÑтроек (%s)" #: src/hed/libs/common/UserConfig.cpp:1061 #, c-format msgid "Cannot copy example configuration (%s), it is not a regular file" msgstr "" "Ðе удалоÑÑŒ Ñкопировать шаблон наÑтроек (%s), Ñ‚.к. Ñто неÑтандартный файл" #: src/hed/libs/common/UserConfig.cpp:1066 #, c-format msgid "Example configuration (%s) not created." msgstr "Шаблон наÑтроек (%s) не Ñоздан." #: src/hed/libs/common/UserConfig.cpp:1071 #, c-format msgid "The default configuration file (%s) is not a regular file." msgstr "Файл наÑтроек по умолчанию (%s) не ÑвлÑетÑÑ Ð¾Ð±Ñ‹Ñ‡Ð½Ñ‹Ð¼ файлом." #: src/hed/libs/common/UserConfig.cpp:1089 #, c-format msgid "%s directory created" msgstr "Ñоздан каталог %s" #: src/hed/libs/common/UserConfig.cpp:1091 #: src/hed/libs/common/UserConfig.cpp:1130 src/hed/libs/data/DataMover.cpp:679 #, c-format msgid "Failed to create directory %s" msgstr "Ðе удалоÑÑŒ Ñоздать каталог %s" #: src/hed/libs/common/test/LoggerTest.cpp:58 msgid "This VERBOSE message should not be seen" msgstr "Этого ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ VERBOSE не должно быть видно" #: src/hed/libs/common/test/LoggerTest.cpp:62 msgid "This INFO message should be seen" msgstr "Это Ñообщение INFO должно быть видно" #: src/hed/libs/common/test/LoggerTest.cpp:73 msgid "This VERBOSE message should now be seen" msgstr "Это Ñообщение VERBOSE теперь должно быть видно" #: src/hed/libs/common/test/LoggerTest.cpp:79 msgid "This INFO message should also be seen" msgstr "Это Ñообщение INFO тоже должно быть видно" #: src/hed/libs/common/test/LoggerTest.cpp:93 msgid "This message goes to initial destination" msgstr "Это Ñообщение выводитÑÑ Ð² изначальное назначение" #: src/hed/libs/common/test/LoggerTest.cpp:108 msgid "This message goes to per-thread destination" msgstr "Это Ñообщение направлÑетÑÑ Ð² каждый поток" #: src/hed/libs/communication/ClientSAML2SSO.cpp:80 msgid "Request failed: No response from SPService" msgstr "Сбой запроÑа: нет ответа от Ñлужбы SPService" #: src/hed/libs/communication/ClientSAML2SSO.cpp:84 #: src/hed/libs/communication/ClientSAML2SSO.cpp:137 msgid "Request failed: response from SPService is not as expected" msgstr "Сбой запроÑа: неверный ответ от Ñлужбы SPService" #: src/hed/libs/communication/ClientSAML2SSO.cpp:92 #, c-format msgid "Authentication Request URL: %s" msgstr "ÐÐ´Ñ€ÐµÑ URL запроÑа Ð¿Ð¾Ð´Ñ‚Ð²ÐµÑ€Ð¶Ð´ÐµÐ½Ð¸Ñ Ð¿Ð¾Ð´Ð»Ð¸Ð½Ð½Ð¾Ñти: %s" #: src/hed/libs/communication/ClientSAML2SSO.cpp:133 msgid "Request failed: No response from IdP" msgstr "Сбой запроÑа: нет ответа от Ñлужбы IdP" #: src/hed/libs/communication/ClientSAML2SSO.cpp:184 msgid "Request failed: No response from IdP when doing redirecting" msgstr "Сбой запроÑа: нет ответа Ð¾Ñ Ñлужбы IdP при перенаправлении" #: src/hed/libs/communication/ClientSAML2SSO.cpp:188 msgid "" "Request failed: response from IdP is not as expected when doing redirecting" msgstr "Сбой запроÑа: неверный ответ от Ñлужбы IdP при перенаправлении" #: src/hed/libs/communication/ClientSAML2SSO.cpp:245 msgid "Request failed: No response from IdP when doing authentication" msgstr "Сбой запроÑа: нет ответа от Ñлужбы IdP при проверке подлинноÑти" #: src/hed/libs/communication/ClientSAML2SSO.cpp:249 msgid "" "Request failed: response from IdP is not as expected when doing " "authentication" msgstr "Сбой запроÑа: неверный ответ от Ñлужбы IdP при проверке подлинноÑти" #: src/hed/libs/communication/ClientSAML2SSO.cpp:294 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:300 msgid "Succeeded to verify the signature under " msgstr "ПодпиÑÑŒ уÑпешно подтверждена" #: src/hed/libs/communication/ClientSAML2SSO.cpp:296 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:303 msgid "Failed to verify the signature under " msgstr "ПодпиÑÑŒ не подтверждена" #: src/hed/libs/communication/ClientSAML2SSO.cpp:310 msgid "" "Request failed: No response from SP Service when sending SAML assertion to SP" msgstr "" "Сбой запроÑа: нет ответа от Ñлужбы SP при отÑылке ÑƒÑ‚Ð²ÐµÑ€Ð¶Ð´ÐµÐ½Ð¸Ñ SAML на SP" #: src/hed/libs/communication/ClientSAML2SSO.cpp:314 msgid "" "Request failed: response from SP Service is not as expected when sending " "SAML assertion to SP" msgstr "" "Сбой запроÑа: неприемлемый ответ от Ñлужбы SP при отÑылке ÑƒÑ‚Ð²ÐµÑ€Ð¶Ð´ÐµÐ½Ð¸Ñ SAML " "на SP" #: src/hed/libs/communication/ClientSAML2SSO.cpp:325 #, c-format msgid "IdP return some error message: %s" msgstr "Служба IdP выдала Ñообщение об ошибке: %s" #: src/hed/libs/communication/ClientSAML2SSO.cpp:353 #: src/hed/libs/communication/ClientSAML2SSO.cpp:398 msgid "SAML2SSO process failed" msgstr "Сбой процеÑÑа SAML2SSO" #: src/hed/libs/communication/ClientX509Delegation.cpp:54 msgid "Creating delegation credential to ARC delegation service" msgstr "Создание делегируемых параметров доÑтупа Ð´Ð»Ñ Ñлужбы Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ ARC" #: src/hed/libs/communication/ClientX509Delegation.cpp:64 #: src/hed/libs/communication/ClientX509Delegation.cpp:267 msgid "DelegateCredentialsInit failed" msgstr "Сбой в DelegateCredentialsInit" #: src/hed/libs/communication/ClientX509Delegation.cpp:68 #: src/hed/libs/communication/ClientX509Delegation.cpp:122 #: src/hed/libs/communication/ClientX509Delegation.cpp:157 #: src/hed/libs/communication/ClientX509Delegation.cpp:212 #: src/hed/libs/communication/ClientX509Delegation.cpp:271 msgid "There is no SOAP response" msgstr "Ðет ответа SOAP" #: src/hed/libs/communication/ClientX509Delegation.cpp:73 msgid "There is no X509 request in the response" msgstr "Ð’ ответе отÑутÑтвует Ð·Ð°Ð¿Ñ€Ð¾Ñ X509" #: src/hed/libs/communication/ClientX509Delegation.cpp:78 msgid "There is no Format request in the response" msgstr "Ð’ ответе отÑутÑтвует Ð·Ð°Ð¿Ñ€Ð¾Ñ Format" #: src/hed/libs/communication/ClientX509Delegation.cpp:86 msgid "There is no Id or X509 request value in the response" msgstr "Ответ не Ñодержит Id или значение запроÑа X509" #: src/hed/libs/communication/ClientX509Delegation.cpp:99 #: src/hed/libs/communication/ClientX509Delegation.cpp:187 msgid "DelegateProxy failed" msgstr "Сбой в DelegateProxy" #: src/hed/libs/communication/ClientX509Delegation.cpp:118 msgid "UpdateCredentials failed" msgstr "Сбой в UpdateCredentials" #: src/hed/libs/communication/ClientX509Delegation.cpp:126 msgid "There is no UpdateCredentialsResponse in response" msgstr "Ð’ ответе отÑутÑтвует UpdateCredentialsResponse" #: src/hed/libs/communication/ClientX509Delegation.cpp:134 #: src/hed/libs/communication/ClientX509Delegation.cpp:162 #: src/hed/libs/communication/ClientX509Delegation.cpp:217 #: src/hed/libs/communication/ClientX509Delegation.cpp:302 msgid "There is no SOAP connection chain configured" msgstr "Ðе наÑтроена цепочка ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ SOAP" #: src/hed/libs/communication/ClientX509Delegation.cpp:140 msgid "Creating delegation to CREAM delegation service" msgstr "Создание Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð´Ð»Ñ Ñлужбы Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ CREAM" #: src/hed/libs/communication/ClientX509Delegation.cpp:153 msgid "Delegation getProxyReq request failed" msgstr "Сбой запроÑа Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ getProxyReq" #: src/hed/libs/communication/ClientX509Delegation.cpp:173 msgid "Creating delegation to CREAM delegation service failed" msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð´Ð»Ñ Ñлужбы Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ CREAM" #: src/hed/libs/communication/ClientX509Delegation.cpp:208 msgid "Delegation putProxy request failed" msgstr "Сбой запроÑа Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ putProxy" #: src/hed/libs/communication/ClientX509Delegation.cpp:222 msgid "Creating delegation to CREAM delegation failed" msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð´Ð»Ñ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ CREAM" #: src/hed/libs/communication/ClientX509Delegation.cpp:237 msgid "Getting delegation credential from ARC delegation service" msgstr "" "Получение делегированных параметров доÑтупа от Ñлужбы Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ ARC" #: src/hed/libs/communication/ClientX509Delegation.cpp:276 msgid "There is no Delegated X509 token in the response" msgstr "Ответ не Ñодержит делегированный токен X509" #: src/hed/libs/communication/ClientX509Delegation.cpp:281 msgid "There is no Format delegated token in the response" msgstr "Ответ не Ñодержит делегированный токен в нужном формате" #: src/hed/libs/communication/ClientX509Delegation.cpp:289 msgid "There is no Id or X509 token value in the response" msgstr "Ответ не Ñодержит Id или значение маркёра X509" #: src/hed/libs/communication/ClientX509Delegation.cpp:298 #, c-format msgid "" "Get delegated credential from delegation service: \n" " %s" msgstr "" "Получение делегированных параметров доÑтупа от Ñлужбы делегированиÑ: \n" " %s" #: src/hed/libs/compute/Broker.cpp:62 #, c-format msgid "Performing matchmaking against target (%s)." msgstr "ПроизводитÑÑ Ñравнение Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸ÐµÐ¼ (%s)." #: src/hed/libs/compute/Broker.cpp:72 #, c-format msgid "Matchmaking, ExecutionTarget: %s matches job description" msgstr "Сравнение; ExecutionTarget: %s ÑоответÑтвует опиÑанию задачи" #: src/hed/libs/compute/Broker.cpp:145 #, c-format msgid "" "The CA issuer (%s) of the credentials (%s) is not trusted by the target (%s)." msgstr "" "ÐгентÑтво (%s), выдавшее Ñертификат (%s), не отноÑитÑÑ Ðº доверÑемым целью " "(%s)." #: src/hed/libs/compute/Broker.cpp:153 src/hed/libs/compute/Broker.cpp:171 #, c-format msgid "ComputingShareName of ExecutionTarget (%s) is not defined" msgstr "Ðе определён параметр ComputingShareName атрибута ExecutionTarget (%s)" #: src/hed/libs/compute/Broker.cpp:157 src/hed/libs/compute/Broker.cpp:162 #, c-format msgid "ComputingShare (%s) explicitly rejected" msgstr "Цель ComputingShare (%s) Ñвно отклонена" #: src/hed/libs/compute/Broker.cpp:175 src/hed/libs/compute/Broker.cpp:180 #, c-format msgid "ComputingShare (%s) does not match selected queue (%s)" msgstr "Цель ComputingShare (%s) не ÑоответÑтвует выбранной очереди (%s)" #: src/hed/libs/compute/Broker.cpp:189 #, c-format msgid "" "ProcessingStartTime (%s) specified in job description is inside the targets " "downtime period [ %s - %s ]." msgstr "" "Ð’Ñ€ÐµÐ¼Ñ Ð½Ð°Ñ‡Ð°Ð»Ð° Ñчёта (%s), указанное в опиÑании задачи, приходитÑÑ Ð½Ð° период " "недоÑтупноÑти цели [ %s - %s ]." #: src/hed/libs/compute/Broker.cpp:194 #, c-format msgid "The downtime of the target (%s) is not published. Keeping target." msgstr "Период недоÑтупноÑти цели (%s) не объÑвлен. Цель ÑохранÑетÑÑ." #: src/hed/libs/compute/Broker.cpp:200 #, c-format msgid "HealthState of ExecutionTarget (%s) is not OK (%s)" msgstr "" "СоÑтоÑние Ð·Ð´Ð¾Ñ€Ð¾Ð²ÑŒÑ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ (%s) неудовлетворительное (%s)" #: src/hed/libs/compute/Broker.cpp:205 #, c-format msgid "Matchmaking, ExecutionTarget: %s, HealthState is not defined" msgstr "" "Сравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, ÑоÑтоÑние Ð·Ð´Ð¾Ñ€Ð¾Ð²ÑŒÑ Ð½Ðµ определено" #: src/hed/libs/compute/Broker.cpp:212 #, c-format msgid "" "Matchmaking, Computing endpoint requirement not satisfied. ExecutionTarget: " "%s" msgstr "" "Сравнение; не удовлетворено требование к вычиÑлительному реÑурÑу. Ðазначение " "Ð´Ð»Ñ Ð¸ÑполнениÑ: %s" #: src/hed/libs/compute/Broker.cpp:217 #, c-format msgid "Matchmaking, ExecutionTarget: %s, ImplementationName is not defined" msgstr "" "Сравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, значение ImplementationName не " "определено" #: src/hed/libs/compute/Broker.cpp:243 #, c-format msgid "" "Matchmaking, %s (%d) is %s than %s (%d) published by the ExecutionTarget." msgstr "" "Сравнение; %s (%d) не ÑоответÑтвует (%s) значению %s (%d), публикуемому " "назначением Ð´Ð»Ñ Ð¸ÑполнениÑ." #: src/hed/libs/compute/Broker.cpp:272 #, c-format msgid "" "Matchmaking, The %s scaled %s (%d) is %s than the %s (%d) published by the " "ExecutionTarget." msgstr "" "Сравнение; приведённое к значению %s значение %s (%d) не ÑоответÑтвует (%s) " "значению %s (%d) публикуемому назначением Ð´Ð»Ñ Ð¸ÑполнениÑ." #: src/hed/libs/compute/Broker.cpp:284 #, c-format msgid "Matchmaking, Benchmark %s is not published by the ExecutionTarget." msgstr "" "Сравнение; значение Ñталонного теÑта %s не публикуетÑÑ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸ÐµÐ¼ Ð´Ð»Ñ " "иÑполнениÑ." #: src/hed/libs/compute/Broker.cpp:299 #, c-format msgid "" "Matchmaking, MaxTotalCPUTime problem, ExecutionTarget: %d (MaxTotalCPUTime), " "JobDescription: %d (TotalCPUTime)" msgstr "" "Сравнение; проблема Ñ MaxTotalCPUTime, ExecutionTarget: %d " "(MaxTotalCPUTime), JobDescription: %d (TotalCPUTime)" #: src/hed/libs/compute/Broker.cpp:306 #, c-format msgid "" "Matchmaking, MaxCPUTime problem, ExecutionTarget: %d (MaxCPUTime), " "JobDescription: %d (TotalCPUTime/NumberOfSlots)" msgstr "" "Сравнение; проблема Ñ MaxCPUTime, ExecutionTarget: %d (MaxCPUTime), " "JobDescription: %d (TotalCPUTime/NumberOfSlots)" #: src/hed/libs/compute/Broker.cpp:311 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MaxTotalCPUTime or MaxCPUTime not " "defined, assuming no CPU time limit" msgstr "" "Сравнение; ExecutionTarget: %s, не задано MaxTotalCPUTime или MaxCPUTime, " "предполагаетÑÑ Ð¾Ñ‚ÑутÑтвие ограничений на процеÑÑорное времÑ" #: src/hed/libs/compute/Broker.cpp:317 #, c-format msgid "" "Matchmaking, MinCPUTime problem, ExecutionTarget: %d (MinCPUTime), " "JobDescription: %d (TotalCPUTime/NumberOfSlots)" msgstr "" "Сравнение; проблема Ñ MinCPUTime, ExecutionTarget: %d (MinCPUTime), " "JobDescription: %d (TotalCPUTime/NumberOfSlots)" #: src/hed/libs/compute/Broker.cpp:322 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MinCPUTime not defined, assuming no CPU " "time limit" msgstr "" "Сравнение; ExecutionTarget: %s, не задано MinCPUTime, предполагаетÑÑ " "отÑутÑтвие ограничений на процеÑÑорное времÑ" #: src/hed/libs/compute/Broker.cpp:330 #, c-format msgid "" "Matchmaking, MainMemorySize problem, ExecutionTarget: %d (MainMemorySize), " "JobDescription: %d (IndividualPhysicalMemory)" msgstr "" "Сравнение; неÑовпадение MainMemorySize: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d " "(MainMemorySize), в опиÑании задачи: %d (IndividualPhysicalMemory)" #: src/hed/libs/compute/Broker.cpp:336 #, c-format msgid "" "Matchmaking, MaxMainMemory problem, ExecutionTarget: %d (MaxMainMemory), " "JobDescription: %d (IndividualPhysicalMemory)" msgstr "" "Сравнение; неÑовпадение MaxMainMemory: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d " "(MaxMainMemory), в опиÑании задачи: %d (IndividualPhysicalMemory)" #: src/hed/libs/compute/Broker.cpp:341 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MaxMainMemory and MainMemorySize are not " "defined" msgstr "" "Сравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ MaxMainMemory и " "MainMemorySize не определены" #: src/hed/libs/compute/Broker.cpp:349 #, c-format msgid "" "Matchmaking, MaxVirtualMemory problem, ExecutionTarget: %d " "(MaxVirtualMemory), JobDescription: %d (IndividualVirtualMemory)" msgstr "" "Сравнение; неÑовпадение MaxVirtualMemory: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d " "(MaxVirtualMemory), в опиÑании задачи: %d (IndividualVirtualMemory)" #: src/hed/libs/compute/Broker.cpp:354 #, c-format msgid "Matchmaking, ExecutionTarget: %s, MaxVirtualMemory is not defined" msgstr "" "Сравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, значение MaxVirtualMemory не " "определено" #: src/hed/libs/compute/Broker.cpp:362 #, c-format msgid "" "Matchmaking, Platform problem, ExecutionTarget: %s (Platform) " "JobDescription: %s (Platform)" msgstr "" "Сравнение; неÑовпадение платформ: ExecutionTarget: %s (Platform) " "JobDescription: %s (Platform)" #: src/hed/libs/compute/Broker.cpp:367 #, c-format msgid "Matchmaking, ExecutionTarget: %s, Platform is not defined" msgstr "" "Сравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, значение Platform не определено" #: src/hed/libs/compute/Broker.cpp:375 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, OperatingSystem requirements not satisfied" msgstr "" "Сравнение; не удовлетворены Ñ‚Ñ€ÐµÐ±Ð¾Ð²Ð°Ð½Ð¸Ñ OperatingSystem к ExecutionTarget: %s" #: src/hed/libs/compute/Broker.cpp:380 #, c-format msgid "Matchmaking, ExecutionTarget: %s, OperatingSystem is not defined" msgstr "" "Сравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, значение OperatingSystem не " "определено" #: src/hed/libs/compute/Broker.cpp:388 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, RunTimeEnvironment requirements not " "satisfied" msgstr "" "Сравнение; не удовлетворены Ñ‚Ñ€ÐµÐ±Ð¾Ð²Ð°Ð½Ð¸Ñ RunTimeEnvironment к ExecutionTarget: " "%s" #: src/hed/libs/compute/Broker.cpp:393 #, c-format msgid "Matchmaking, ExecutionTarget: %s, ApplicationEnvironments not defined" msgstr "" "Сравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, значение ApplicationEnvironments " "не определено" #: src/hed/libs/compute/Broker.cpp:402 #, c-format msgid "" "Matchmaking, NetworkInfo demand not fulfilled, ExecutionTarget do not " "support %s, specified in the JobDescription." msgstr "" "Сравнение; не удовлетворено требование NetworkInfo, назначение Ð´Ð»Ñ " "иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð½Ðµ поддерживает %s, указанное в опиÑании задачи." #: src/hed/libs/compute/Broker.cpp:406 #, c-format msgid "Matchmaking, ExecutionTarget: %s, NetworkInfo is not defined" msgstr "" "Сравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, значение NetworkInfo не определено" #: src/hed/libs/compute/Broker.cpp:414 #, c-format msgid "" "Matchmaking, MaxDiskSpace problem, ExecutionTarget: %d MB (MaxDiskSpace); " "JobDescription: %d MB (SessionDiskSpace)" msgstr "" "Сравнение; неÑовпадение MaxDiskSpace: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d MB " "(MaxDiskSpace), в опиÑании задачи: %d MB (SessionDiskSpace)" #: src/hed/libs/compute/Broker.cpp:421 #, c-format msgid "" "Matchmaking, WorkingAreaFree problem, ExecutionTarget: %d MB " "(WorkingAreaFree); JobDescription: %d MB (SessionDiskSpace)" msgstr "" "Сравнение; неÑовпадение WorkingAreaFree: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d MB " "(WorkingAreaFree), в опиÑании задачи: %d MB (SessionDiskSpace)" #: src/hed/libs/compute/Broker.cpp:427 src/hed/libs/compute/Broker.cpp:448 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, MaxDiskSpace and WorkingAreaFree are not " "defined" msgstr "" "Сравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ MaxDiskSpace и " "WorkingAreaFree не определено" #: src/hed/libs/compute/Broker.cpp:435 #, c-format msgid "" "Matchmaking, MaxDiskSpace problem, ExecutionTarget: %d MB (MaxDiskSpace); " "JobDescription: %d MB (DiskSpace)" msgstr "" "Сравнение; неÑовпадение MaxDiskSpace: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d MB " "(MaxDiskSpace), в опиÑании задачи: %d MB (DiskSpace)" #: src/hed/libs/compute/Broker.cpp:442 #, c-format msgid "" "Matchmaking, WorkingAreaFree problem, ExecutionTarget: %d MB " "(WorkingAreaFree); JobDescription: %d MB (DiskSpace)" msgstr "" "Сравнение; неÑовпадение WorkingAreaFree: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d MB " "(WorkingAreaFree), в опиÑании задачи: %d MB (DiskSpace)" #: src/hed/libs/compute/Broker.cpp:456 #, c-format msgid "" "Matchmaking, CacheTotal problem, ExecutionTarget: %d MB (CacheTotal); " "JobDescription: %d MB (CacheDiskSpace)" msgstr "" "Сравнение; неÑовпадение CacheTotal: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d MB " "(CacheTotal), в опиÑании задачи: %d MB (CacheDiskSpace)" #: src/hed/libs/compute/Broker.cpp:461 #, c-format msgid "Matchmaking, ExecutionTarget: %s, CacheTotal is not defined" msgstr "" "Сравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, значение CacheTotal не определено" #: src/hed/libs/compute/Broker.cpp:469 #, c-format msgid "" "Matchmaking, TotalSlots problem, ExecutionTarget: %d (TotalSlots) " "JobDescription: %d (NumberOfProcesses)" msgstr "" "Сравнение; неÑовпадение TotalSlots: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d " "(TotalSlots), в опиÑании задачи: %d (NumberOfProcesses)" #: src/hed/libs/compute/Broker.cpp:475 #, c-format msgid "" "Matchmaking, MaxSlotsPerJob problem, ExecutionTarget: %d (MaxSlotsPerJob) " "JobDescription: %d (NumberOfProcesses)" msgstr "" "Сравнение; неÑовпадение MaxSlotsPerJob: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d " "(MaxSlotsPerJob), в опиÑании задачи: %d (NumberOfProcesses)" #: src/hed/libs/compute/Broker.cpp:481 #, c-format msgid "" "Matchmaking, ExecutionTarget: %s, TotalSlots and MaxSlotsPerJob are not " "defined" msgstr "" "Сравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ TotalSlots и " "MaxSlotsPerJob не определены" #: src/hed/libs/compute/Broker.cpp:489 #, c-format msgid "" "Matchmaking, WorkingAreaLifeTime problem, ExecutionTarget: %s " "(WorkingAreaLifeTime) JobDescription: %s (SessionLifeTime)" msgstr "" "Сравнение; неÑовпадение WorkingAreaLifeTime: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %s " "(WorkingAreaLifeTime), в опиÑании задачи: %s (SessionLifeTime)" #: src/hed/libs/compute/Broker.cpp:494 #, c-format msgid "Matchmaking, ExecutionTarget: %s, WorkingAreaLifeTime is not defined" msgstr "" "Сравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, значение WorkingAreaLifeTime не " "определено" #: src/hed/libs/compute/Broker.cpp:502 #, c-format msgid "" "Matchmaking, ConnectivityIn problem, ExecutionTarget: %s (ConnectivityIn) " "JobDescription: %s (InBound)" msgstr "" "Сравнение; неÑовпадение ConnectivityIn: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %s " "(ConnectivityIn), в опиÑании задачи: %s (InBound)" #: src/hed/libs/compute/Broker.cpp:509 #, c-format msgid "" "Matchmaking, ConnectivityOut problem, ExecutionTarget: %s (ConnectivityOut) " "JobDescription: %s (OutBound)" msgstr "" "Сравнение; неÑовпадение ConnectivityOut: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %s " "(ConnectivityOut), в опиÑании задачи: %s (OutBound)" #: src/hed/libs/compute/Broker.cpp:532 msgid "Unable to sort added jobs. The BrokerPlugin plugin has not been loaded." msgstr "" "Ðевозможно упорÑдочить добавленные задачи. Подключаемый модуль BrokerPlugin " "не был подгружен." #: src/hed/libs/compute/Broker.cpp:549 msgid "Unable to match target, marking it as not matching. Broker not valid." msgstr "" "РеÑÑƒÑ€Ñ Ð½Ðµ ÑоответÑтвует заданию, помечаетÑÑ ÐºÐ°Ðº неÑоответÑтвующий. " "Планировщик недейÑтвителен." #: src/hed/libs/compute/Broker.cpp:585 msgid "Unable to sort ExecutionTarget objects - Invalid Broker object." msgstr "" "Ðевозможно упорÑдочить объекты ExecutionTarget - недопуÑтимый объект Broker." #: src/hed/libs/compute/Broker.cpp:609 msgid "" "Unable to register job submission. Can't get JobDescription object from " "Broker, Broker is invalid." msgstr "" "Ðевозможно зарегиÑтрировать заÑылку задачи. Ðевозможно получить объект " "JobDescription из планировщика, планировщик недейÑтвителен." #: src/hed/libs/compute/BrokerPlugin.cpp:89 #, c-format msgid "Broker plugin \"%s\" not found." msgstr "Подключаемый модуль брокера \"%s\" не обнаружен." #: src/hed/libs/compute/BrokerPlugin.cpp:96 #, c-format msgid "Unable to load BrokerPlugin (%s)" msgstr "Ðевозможно загрузить модуль BrokerPlugin (%s)" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:27 #, c-format msgid "Uniq is replacing service coming from %s with service coming from %s" msgstr "" "Uniq заменÑет ÑервиÑ, обнаруженный через %s, на ÑервиÑ, обнаруженный через %s" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:31 #, c-format msgid "Uniq is ignoring service coming from %s" msgstr "Uniq игнорирует ÑервиÑ, обнаруженный через %s" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:38 #, c-format msgid "Uniq is adding service coming from %s" msgstr "Uniq добавлÑет ÑервиÑ, обнаруженный через %s" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:61 #, c-format msgid "Adding endpoint (%s) to TargetInformationRetriever" msgstr "Добавление точки входа (%s) в TargetInformationRetriever" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:64 #, c-format msgid "Adding endpoint (%s) to ServiceEndpointRetriever" msgstr "Добавление точки входа (%s) в ServiceEndpointRetriever" #: src/hed/libs/compute/ComputingServiceRetriever.cpp:67 #, c-format msgid "" "Adding endpoint (%s) to both ServiceEndpointRetriever and " "TargetInformationRetriever" msgstr "" "Точка входа (%s) добавлÑетÑÑ ÐºÐ°Ðº к ServiceEndpointRetriever, так и к " "TargetInformationRetriever" #: src/hed/libs/compute/EntityRetriever.cpp:42 #, c-format msgid "The plugin %s does not support any interfaces, skipping it." msgstr "" "Подключаемый модуль %s не поддерживает никаких интерфейÑов, пропуÑкаетÑÑ." #: src/hed/libs/compute/EntityRetriever.cpp:47 #, c-format msgid "" "The first supported interface of the plugin %s is an empty string, skipping " "the plugin." msgstr "" "Первый поддерживаемый Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ð¾Ð³Ð¾ Ð¼Ð¾Ð´ÑƒÐ»Ñ %s оказалÑÑ Ð¿ÑƒÑтой " "Ñтрокой, модуль пропуÑкаетÑÑ." #: src/hed/libs/compute/EntityRetriever.cpp:95 #, c-format msgid "Interface on endpoint (%s) %s." msgstr "Ð˜Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ входа (%s) %s." #: src/hed/libs/compute/EntityRetriever.cpp:101 #: src/hed/libs/compute/EntityRetriever.cpp:133 #: src/hed/libs/compute/EntityRetriever.cpp:425 #, c-format msgid "Ignoring endpoint (%s), it is already registered in retriever." msgstr "" "ИгнорируетÑÑ Ñ‚Ð¾Ñ‡ÐºÐ° входа (%s), Ñ‚.к. она уже зарегиÑтрирована в загрузчике." #: src/hed/libs/compute/EntityRetriever.cpp:110 #, c-format msgid "Service Loop: Endpoint %s" msgstr "Цикл по ÑервиÑам: точка входа %s" #: src/hed/libs/compute/EntityRetriever.cpp:112 #, c-format msgid " This endpoint (%s) is STARTED or SUCCESSFUL" msgstr " СоÑтоÑние точки входа (%s) - STARTED или SUCCESSFUL" #: src/hed/libs/compute/EntityRetriever.cpp:115 #, c-format msgid "" "Suspending querying of endpoint (%s) since the service at the endpoint is " "already being queried, or has been queried." msgstr "" "ПриоÑтанавливаетÑÑ Ð¾Ð¿Ñ€Ð¾Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ входа (%s), Ñ‚.к. ÑÐµÑ€Ð²Ð¸Ñ Ð¿Ð¾ Ñтому адреÑу уже " "опрашиваетÑÑ Ð¸Ð»Ð¸ опрошен." #: src/hed/libs/compute/EntityRetriever.cpp:122 #: src/hed/libs/compute/EntityRetriever.cpp:237 #, c-format msgid " Status of endpoint (%s) is %s" msgstr " СоÑтоÑние точки входа (%s): %s" #: src/hed/libs/compute/EntityRetriever.cpp:126 #, c-format msgid "Setting status (STARTED) for endpoint: %s" msgstr "ЗадаётÑÑ ÑоÑтоÑние (STARTED) Ð´Ð»Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ входа: %s" #: src/hed/libs/compute/EntityRetriever.cpp:145 #, c-format msgid "Starting thread to query the endpoint on %s" msgstr "ЗапуÑкаетÑÑ Ð¿Ð¾Ñ‚Ð¾Ðº Ð´Ð»Ñ Ð¾Ð¿Ñ€Ð¾Ñа точки доÑтупа %s" #: src/hed/libs/compute/EntityRetriever.cpp:147 #: src/hed/libs/compute/EntityRetriever.cpp:289 #, c-format msgid "Failed to start querying the endpoint on %s" msgstr "Ðе удалоÑÑŒ начать Ð¾Ð¿Ñ€Ð¾Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ входа на %s" #: src/hed/libs/compute/EntityRetriever.cpp:174 #, c-format msgid "Found a registry, will query it recursively: %s" msgstr "Ðайден рееÑтр, который будет опрошен рекурÑивно: %s" #: src/hed/libs/compute/EntityRetriever.cpp:211 #, c-format msgid "Setting status (%s) for endpoint: %s" msgstr "ПриÑваиваетÑÑ ÑоÑтоÑние (%s) точки входа: %s" #: src/hed/libs/compute/EntityRetriever.cpp:231 msgid "Checking for suspended endpoints which should be started." msgstr "Проверка отложенных точек входа на предмет повторного опроÑа." #: src/hed/libs/compute/EntityRetriever.cpp:241 #, c-format msgid "Found started or successful endpoint (%s)" msgstr "Ðайдена точка входа в ÑоÑтоÑнии STARTED или SUCCESSFUL (%s)" #: src/hed/libs/compute/EntityRetriever.cpp:253 #, c-format msgid "Found suspended endpoint (%s)" msgstr "Обнаружена временно иÑÐºÐ»ÑŽÑ‡Ñ‘Ð½Ð½Ð°Ñ Ñ‚Ð¾Ñ‡ÐºÐ° входа (%s)" #: src/hed/libs/compute/EntityRetriever.cpp:264 #, c-format msgid "Trying to start suspended endpoint (%s)" msgstr "Попытка активации временно иÑключённой точки входа (%s)" #: src/hed/libs/compute/EntityRetriever.cpp:284 #, c-format msgid "" "Starting querying of suspended endpoint (%s) - no other endpoints for this " "service is being queried or has been queried successfully." msgstr "" "ÐачинаетÑÑ Ð¾Ð¿Ñ€Ð¾Ñ Ð¾Ñ‚Ð»Ð¾Ð¶ÐµÐ½Ð½Ð¾Ð¹ точки входа (%s) - другие точки входа Ñтого " "ÑервиÑа не опрашиваютÑÑ, либо были уже уÑпешно опрошены." #: src/hed/libs/compute/EntityRetriever.cpp:351 #, c-format msgid "Calling plugin %s to query endpoint on %s" msgstr "ВызываетÑÑ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ñ‹Ð¹ модуль %s Ð´Ð»Ñ Ð¾Ð¿Ñ€Ð¾Ñа точки входа на %s" #: src/hed/libs/compute/EntityRetriever.cpp:373 #, c-format msgid "" "The interface of this endpoint (%s) is unspecified, will try all possible " "plugins" msgstr "" "Ð˜Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ñтой точки доÑтупа (%s) не задан, пробуютÑÑ Ð²Ñе возможные " "подключаемые модули" #: src/hed/libs/compute/EntityRetriever.cpp:389 #, c-format msgid "Problem loading plugin %s, skipping it." msgstr "Проблемы при подключении Ð¼Ð¾Ð´ÑƒÐ»Ñ %s, модуль пропуÑкаетÑÑ." #: src/hed/libs/compute/EntityRetriever.cpp:393 #, c-format msgid "The endpoint (%s) is not supported by this plugin (%s)" msgstr "Точка входа (%s) не поддерживаетÑÑ Ñтим подключаемым модулем (%s)" #: src/hed/libs/compute/EntityRetriever.cpp:414 #, c-format msgid "" "New endpoint is created (%s) from the one with the unspecified interface (%s)" msgstr "" "Создана Ð½Ð¾Ð²Ð°Ñ Ñ‚Ð¾Ñ‡ÐºÐ° доÑтупа (%s) из точки Ñ Ð½ÐµÐ¸Ð·Ð²ÐµÑтным интерфейÑом (%s)" #: src/hed/libs/compute/EntityRetriever.cpp:432 #, c-format msgid "Starting sub-thread to query the endpoint on %s" msgstr "ЗапуÑкаетÑÑ Ð¿Ð¾Ð´Ð¿Ð¾Ñ‚Ð¾Ðº Ð´Ð»Ñ Ð¾Ð¿Ñ€Ð¾Ñа точки доÑтупа по %s" #: src/hed/libs/compute/EntityRetriever.cpp:434 #, c-format msgid "" "Failed to start querying the endpoint on %s (unable to create sub-thread)" msgstr "Сбой начала опроÑа точки доÑтупа по %s (не удалоÑÑŒ Ñоздать подпоток)" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:32 #, c-format msgid "Found %s %s (it was loaded already)" msgstr "Ðайден подключаемый модуль %s %s (уже подгружен)" #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:41 #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:49 #: src/hed/libs/compute/JobControllerPlugin.cpp:100 #: src/hed/libs/compute/JobControllerPlugin.cpp:109 #: src/hed/libs/compute/SubmitterPlugin.cpp:171 #: src/hed/libs/compute/SubmitterPlugin.cpp:181 #, c-format msgid "" "Unable to locate the \"%s\" plugin. Please refer to installation " "instructions and check if package providing support for \"%s\" plugin is " "installed" msgstr "" "Ðе удалоÑÑŒ обнаружить подключаемый модуль \"%s\". ПожалуйÑта, " "проконÑультируйтеÑÑŒ Ñ Ð¸Ð½Ñтрукцией по уÑтановке и проверьте, уÑтановлен ли " "пакет, Ñодержащий модуль \"%s\"." #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:42 #, c-format msgid "%s plugin \"%s\" not found." msgstr "Ðе найден подключаемый модуль %s \"%s\"." #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:50 #, c-format msgid "%s %s could not be created." msgstr "%s %s не может быть Ñоздан." #: src/hed/libs/compute/EntityRetrieverPlugin.cpp:55 #, c-format msgid "Loaded %s %s" msgstr "Загружен %s %s" #: src/hed/libs/compute/ExecutionTarget.cpp:51 #, c-format msgid "" "Skipping ComputingEndpoint '%s', because it has '%s' interface instead of " "the requested '%s'." msgstr "" "ПропуÑкаетÑÑ ComputingEndpoint '%s', потому что объÑвлен Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ '%s' " "вмеÑто запрошенного '%s'." #: src/hed/libs/compute/ExecutionTarget.cpp:132 #, c-format msgid "" "Computing endpoint %s (type %s) added to the list for submission brokering" msgstr "" "ВычиÑÐ»Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ñ‚Ð¾Ñ‡ÐºÐ° входа %s (тип %s) добавлена в ÑпиÑок Ð´Ð»Ñ Ð¿Ð»Ð°Ð½Ð¸Ñ€Ð¾Ð²ÐºÐ¸ " "заÑылки" #: src/hed/libs/compute/ExecutionTarget.cpp:239 #, c-format msgid "Address: %s" msgstr "ÐдреÑ: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:240 #, c-format msgid "Place: %s" msgstr "МеÑто: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:241 #, c-format msgid "Country: %s" msgstr "Страна: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:242 #, c-format msgid "Postal code: %s" msgstr "Почтовый индекÑ: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:243 #, c-format msgid "Latitude: %f" msgstr "Широта: %f" #: src/hed/libs/compute/ExecutionTarget.cpp:244 #, c-format msgid "Longitude: %f" msgstr "Долгота: %f" #: src/hed/libs/compute/ExecutionTarget.cpp:250 #, c-format msgid "Owner: %s" msgstr "Владелец: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:257 #, c-format msgid "ID: %s" msgstr "ID: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:258 #, c-format msgid "Type: %s" msgstr "Тип: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:263 #, c-format msgid "URL: %s" msgstr "URL: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:264 #, c-format msgid "Interface: %s" msgstr "Ð˜Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ %s" #: src/hed/libs/compute/ExecutionTarget.cpp:266 msgid "Interface versions:" msgstr "ВерÑии интерфейÑа:" #: src/hed/libs/compute/ExecutionTarget.cpp:271 msgid "Interface extensions:" msgstr "РаÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñа:" #: src/hed/libs/compute/ExecutionTarget.cpp:276 msgid "Capabilities:" msgstr "ВозможноÑти:" #: src/hed/libs/compute/ExecutionTarget.cpp:280 #, c-format msgid "Technology: %s" msgstr "ТехнологиÑ: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:282 msgid "Supported Profiles:" msgstr "Поддерживаемые профили:" #: src/hed/libs/compute/ExecutionTarget.cpp:286 #, c-format msgid "Implementor: %s" msgstr "Внедритель: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:287 #, c-format msgid "Implementation name: %s" msgstr "Ð˜Ð¼Ñ Ñ€ÐµÐ°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ð¸: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:288 #, c-format msgid "Quality level: %s" msgstr "Уровень качеÑтва: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:289 #, c-format msgid "Health state: %s" msgstr "СоÑтоÑние здоровьÑ: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:290 #, c-format msgid "Health state info: %s" msgstr "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ ÑоÑтоÑнии здоровьÑ: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:291 #, c-format msgid "Serving state: %s" msgstr "СоÑтоÑние обÑлуживаниÑ: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:292 #, c-format msgid "Issuer CA: %s" msgstr "Сертификат выдан CA: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:294 msgid "Trusted CAs:" msgstr "Доверенные центры Ñертификации:" #: src/hed/libs/compute/ExecutionTarget.cpp:298 #, c-format msgid "Downtime starts: %s" msgstr "Ðачало проÑтоÑ: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:299 #, c-format msgid "Downtime ends: %s" msgstr "Конец проÑтоÑ: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:300 #, c-format msgid "Staging: %s" msgstr "РазмещаетÑÑ: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:302 msgid "Job descriptions:" msgstr "ОпиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡:" #: src/hed/libs/compute/ExecutionTarget.cpp:314 #, c-format msgid "Scheme: %s" msgstr "Схема: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:317 #, c-format msgid "Rule: %s" msgstr "Правило: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:329 #, c-format msgid "Mapping queue: %s" msgstr "ÐазначаетÑÑ Ð¾Ñ‡ÐµÑ€ÐµÐ´ÑŒ: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:330 #, c-format msgid "Max wall-time: %s" msgstr "ДлительноÑть, Ð½Ð°Ð¸Ð±Ð¾Ð»ÑŒÑˆÐ°Ñ (по чаÑам): %s" #: src/hed/libs/compute/ExecutionTarget.cpp:331 #, c-format msgid "Max total wall-time: %s" msgstr "Предел общего времени (по чаÑам): %s" #: src/hed/libs/compute/ExecutionTarget.cpp:332 #, c-format msgid "Min wall-time: %s" msgstr "ДлительноÑть, Ð½Ð°Ð¸Ð¼ÐµÐ½ÑŒÑˆÐ°Ñ (по чаÑам): %s" #: src/hed/libs/compute/ExecutionTarget.cpp:333 #, c-format msgid "Default wall-time: %s" msgstr "ДлительноÑть по умолчанию (по чаÑам): %s" #: src/hed/libs/compute/ExecutionTarget.cpp:334 #, c-format msgid "Max CPU time: %s" msgstr "ДлительноÑть, Ð½Ð°Ð¸Ð±Ð¾Ð»ÑŒÑˆÐ°Ñ (процеÑÑорнаÑ): %s" #: src/hed/libs/compute/ExecutionTarget.cpp:335 #, c-format msgid "Min CPU time: %s" msgstr "ДлительноÑть, Ð½Ð°Ð¸Ð¼ÐµÐ½ÑŒÑˆÐ°Ñ (процеÑÑорнаÑ): %s" #: src/hed/libs/compute/ExecutionTarget.cpp:336 #, c-format msgid "Default CPU time: %s" msgstr "ДлительноÑть по умолчанию (процеÑÑорнаÑ): %s" #: src/hed/libs/compute/ExecutionTarget.cpp:337 #, c-format msgid "Max total jobs: %i" msgstr "Ð’Ñего заданий (предел): %i" #: src/hed/libs/compute/ExecutionTarget.cpp:338 #, c-format msgid "Max running jobs: %i" msgstr "Задачи в Ñчёте (предел): %i" #: src/hed/libs/compute/ExecutionTarget.cpp:339 #, c-format msgid "Max waiting jobs: %i" msgstr "Предел задач в очереди: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:340 #, c-format msgid "Max pre-LRMS waiting jobs: %i" msgstr "Предел задач в очереди до СУПО: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:341 #, c-format msgid "Max user running jobs: %i" msgstr "Задачи Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð² Ñчёте (предел): %i" #: src/hed/libs/compute/ExecutionTarget.cpp:342 #, c-format msgid "Max slots per job: %i" msgstr "Предел Ñегментов на задачу: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:343 #, c-format msgid "Max stage in streams: %i" msgstr "Предел потоков размещениÑ: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:344 #, c-format msgid "Max stage out streams: %i" msgstr "Потоки отгрузки (верхний предел): %i" #: src/hed/libs/compute/ExecutionTarget.cpp:345 #, c-format msgid "Scheduling policy: %s" msgstr "Правила планировки: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:346 #, c-format msgid "Max memory: %i" msgstr "МакÑ. памÑть: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:347 #, c-format msgid "Max virtual memory: %i" msgstr "Предел виртуальной памÑти: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:348 #, c-format msgid "Max disk space: %i" msgstr "Предел диÑкового проÑтранÑтва: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:349 #, c-format msgid "Default Storage Service: %s" msgstr "Хранилище по умолчанию: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:350 msgid "Supports preemption" msgstr "Поддержка упреждениÑ" #: src/hed/libs/compute/ExecutionTarget.cpp:351 msgid "Doesn't support preemption" msgstr "Упреждение не поддерживаетÑÑ" #: src/hed/libs/compute/ExecutionTarget.cpp:352 #, c-format msgid "Total jobs: %i" msgstr "Ð’Ñего задач: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:353 #, c-format msgid "Running jobs: %i" msgstr "Задачи в Ñчёте: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:354 #, c-format msgid "Local running jobs: %i" msgstr "Внутренние задачи в Ñчёте: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:355 #, c-format msgid "Waiting jobs: %i" msgstr "Задачи в очереди: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:356 #, c-format msgid "Local waiting jobs: %i" msgstr "Внутренние задачи в очереди: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:357 #, c-format msgid "Suspended jobs: %i" msgstr "ПриоÑтановленные задачи: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:358 #, c-format msgid "Local suspended jobs: %i" msgstr "Внутренние приоÑтановленные задачи: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:359 #, c-format msgid "Staging jobs: %i" msgstr "Задачи, выполнÑющие размещение данных: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:360 #, c-format msgid "Pre-LRMS waiting jobs: %i" msgstr "Задачи в очереди до СУПО: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:361 #, c-format msgid "Estimated average waiting time: %s" msgstr "Оценка уÑреднённого времени ожиданиÑ: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:362 #, c-format msgid "Estimated worst waiting time: %s" msgstr "Оценка худшего времени ожиданиÑ: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:363 #, c-format msgid "Free slots: %i" msgstr "Свободные Ñдра: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:365 msgid "Free slots grouped according to time limits (limit: free slots):" msgstr "" "ДоÑтупные меÑта Ñгруппированы по предельному времени (предел: доÑтупные " "меÑта):" #: src/hed/libs/compute/ExecutionTarget.cpp:368 #, c-format msgid " %s: %i" msgstr " %s: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:369 #, c-format msgid " unspecified: %i" msgstr " непределённых: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:372 #, c-format msgid "Used slots: %i" msgstr "ИÑпользованные Ñдра: %d" #: src/hed/libs/compute/ExecutionTarget.cpp:373 #, c-format msgid "Requested slots: %i" msgstr "Запрошено Ñегментов Ñдер: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:374 #, c-format msgid "Reservation policy: %s" msgstr "Политика бронированиÑ: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:381 #, c-format msgid "Resource manager: %s" msgstr "СиÑтема управлениÑ: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:383 #, c-format msgid " (%s)" msgstr " (%s)" #: src/hed/libs/compute/ExecutionTarget.cpp:387 #, c-format msgid "Total physical CPUs: %i" msgstr "Общее количеÑтво физичеÑких процеÑÑоров: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:388 #, c-format msgid "Total logical CPUs: %i" msgstr "Общее количеÑтво логичеÑких процеÑÑоров: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:389 #, c-format msgid "Total slots: %i" msgstr "Общее количеÑтво Ñдер: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:390 msgid "Supports advance reservations" msgstr "Поддержка предварительного бронированиÑ" #: src/hed/libs/compute/ExecutionTarget.cpp:391 msgid "Doesn't support advance reservations" msgstr "Ðет поддержки предварительного бронированиÑ" #: src/hed/libs/compute/ExecutionTarget.cpp:392 msgid "Supports bulk submission" msgstr "Поддерживает групповую заÑылку" #: src/hed/libs/compute/ExecutionTarget.cpp:393 msgid "Doesn't support bulk Submission" msgstr "Ðе поддерживает групповую заÑылку" #: src/hed/libs/compute/ExecutionTarget.cpp:394 msgid "Homogeneous resource" msgstr "Однородный реÑурÑ" #: src/hed/libs/compute/ExecutionTarget.cpp:395 msgid "Non-homogeneous resource" msgstr "Ðеоднородный реÑурÑ" #: src/hed/libs/compute/ExecutionTarget.cpp:397 msgid "Network information:" msgstr "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ Ñети:" #: src/hed/libs/compute/ExecutionTarget.cpp:402 msgid "Working area is shared among jobs" msgstr "Рабочее проÑтранÑтво иÑпользуетÑÑ Ñ€Ð°Ð·Ð½Ñ‹Ð¼Ð¸ задачами" #: src/hed/libs/compute/ExecutionTarget.cpp:403 msgid "Working area is not shared among jobs" msgstr "Рабочее проÑтранÑтво иÑпользуетÑÑ Ð¾Ð´Ð½Ð¾Ð¹ задачей" #: src/hed/libs/compute/ExecutionTarget.cpp:404 #, c-format msgid "Working area total size: %i GB" msgstr "Общий объём рабочего проÑтранÑтва: %i GB" #: src/hed/libs/compute/ExecutionTarget.cpp:405 #, c-format msgid "Working area free size: %i GB" msgstr "Свободное рабочее проÑтранÑтво: %i GB" #: src/hed/libs/compute/ExecutionTarget.cpp:406 #, c-format msgid "Working area life time: %s" msgstr "Ð’Ñ€ÐµÐ¼Ñ Ð¶Ð¸Ð·Ð½Ð¸ рабочего проÑтранÑтва: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:407 #, c-format msgid "Cache area total size: %i GB" msgstr "Общий объём проÑтранÑтва кÑша: %i GB" #: src/hed/libs/compute/ExecutionTarget.cpp:408 #, c-format msgid "Cache area free size: %i GB" msgstr "Свободное проÑтранÑтво кÑша: %i GB" #: src/hed/libs/compute/ExecutionTarget.cpp:414 #, c-format msgid "Platform: %s" msgstr "Платформа: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:415 msgid "Execution environment supports inbound connections" msgstr "Среда иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð¿Ð¾Ð´Ð´ÐµÑ€Ð¶Ð¸Ð²Ð°ÐµÑ‚ входÑщие ÑоединениÑ" #: src/hed/libs/compute/ExecutionTarget.cpp:416 msgid "Execution environment does not support inbound connections" msgstr "Среда иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð½Ðµ поддерживает входÑщие ÑоединениÑ" #: src/hed/libs/compute/ExecutionTarget.cpp:417 msgid "Execution environment supports outbound connections" msgstr "Среда иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð¿Ð¾Ð´Ð´ÐµÑ€Ð¶Ð¸Ð²Ð°ÐµÑ‚ иÑходÑщие ÑоединениÑ" #: src/hed/libs/compute/ExecutionTarget.cpp:418 msgid "Execution environment does not support outbound connections" msgstr "Среда иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð½Ðµ поддерживает иÑходÑщие ÑоединениÑ" #: src/hed/libs/compute/ExecutionTarget.cpp:419 msgid "Execution environment is a virtual machine" msgstr "Ð Ð°Ð±Ð¾Ñ‡Ð°Ñ Ñреда - Ð²Ð¸Ñ€Ñ‚ÑƒÐ°Ð»ÑŒÐ½Ð°Ñ Ð¼Ð°ÑˆÐ¸Ð½Ð°" #: src/hed/libs/compute/ExecutionTarget.cpp:420 msgid "Execution environment is a physical machine" msgstr "Ð Ð°Ð±Ð¾Ñ‡Ð°Ñ Ñреда - Ñ€ÐµÐ°Ð»ÑŒÐ½Ð°Ñ Ð¼Ð°ÑˆÐ¸Ð½Ð°" #: src/hed/libs/compute/ExecutionTarget.cpp:421 #, c-format msgid "CPU vendor: %s" msgstr "Производитель процеÑÑора: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:422 #, c-format msgid "CPU model: %s" msgstr "Модель процеÑÑора: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:423 #, c-format msgid "CPU version: %s" msgstr "ВерÑÐ¸Ñ Ð¿Ñ€Ð¾Ñ†ÐµÑÑора: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:424 #, c-format msgid "CPU clock speed: %i" msgstr "Ð¢Ð°ÐºÑ‚Ð¾Ð²Ð°Ñ Ñ‡Ð°Ñтота процеÑÑора: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:425 #, c-format msgid "Main memory size: %i" msgstr "Объём оÑновной памÑти: %i" #: src/hed/libs/compute/ExecutionTarget.cpp:426 #, c-format msgid "OS family: %s" msgstr "СемейÑтво ОС: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:427 #, c-format msgid "OS name: %s" msgstr "Ðазвание ОС: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:428 #, c-format msgid "OS version: %s" msgstr "ВерÑÐ¸Ñ ÐžÐ¡: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:435 msgid "Computing service:" msgstr "ВычиÑлительный ÑервиÑ:" #: src/hed/libs/compute/ExecutionTarget.cpp:459 #, c-format msgid "%d Endpoints" msgstr "%d точки входа" #: src/hed/libs/compute/ExecutionTarget.cpp:464 msgid "Endpoint Information:" msgstr "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ точке входа:" #: src/hed/libs/compute/ExecutionTarget.cpp:476 #, c-format msgid "%d Batch Systems" msgstr "%d ÑиÑтемы ÑƒÐ¿Ñ€Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ð¿Ð°ÐºÐµÑ‚Ð½Ð¾Ð¹ обработкой" #: src/hed/libs/compute/ExecutionTarget.cpp:481 msgid "Batch System Information:" msgstr "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ СУПО:" #: src/hed/libs/compute/ExecutionTarget.cpp:487 msgid "Installed application environments:" msgstr "УÑтановленные рабочие Ñреды:" #: src/hed/libs/compute/ExecutionTarget.cpp:500 #, c-format msgid "%d Shares" msgstr "%d СовмеÑтные реÑурÑÑ‹" #: src/hed/libs/compute/ExecutionTarget.cpp:505 msgid "Share Information:" msgstr "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ ÑовмеÑтном реÑурÑе:" #: src/hed/libs/compute/ExecutionTarget.cpp:511 #, c-format msgid "%d mapping policies" msgstr "%d правила приÑвоениÑ" #: src/hed/libs/compute/ExecutionTarget.cpp:515 msgid "Mapping policy:" msgstr "Правило приÑвоениÑ:" #: src/hed/libs/compute/ExecutionTarget.cpp:531 #, c-format msgid "Execution Target on Computing Service: %s" msgstr "ИÑполнÑющий реÑÑƒÑ€Ñ Ð²Ñ‹Ñ‡Ð¸Ñлительного ÑервиÑа: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:533 #, c-format msgid " Computing endpoint URL: %s" msgstr " URL точки входа Ð´Ð»Ñ Ð²Ñ‹Ñ‡Ð¸Ñлений: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:535 #, c-format msgid " Computing endpoint interface name: %s" msgstr " Ðазвание интерфейÑа точки входа Ð´Ð»Ñ Ð²Ñ‹Ñ‡Ð¸Ñлений: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:537 #: src/hed/libs/compute/Job.cpp:575 #, c-format msgid " Queue: %s" msgstr " Очередь: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:540 #, c-format msgid " Mapping queue: %s" msgstr " Очередь приÑвоениÑ: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:543 #, c-format msgid " Health state: %s" msgstr " СоÑтоÑние здоровьÑ: %s" #: src/hed/libs/compute/ExecutionTarget.cpp:548 msgid "Service information:" msgstr "Ð¡Ð²ÐµÐ´ÐµÐ½Ð¸Ñ Ð¾ Ñлужбе:" #: src/hed/libs/compute/ExecutionTarget.cpp:553 msgid " Installed application environments:" msgstr " УÑтановленные рабочие Ñреды:" #: src/hed/libs/compute/ExecutionTarget.cpp:560 msgid "Batch system information:" msgstr "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ СУПО:" #: src/hed/libs/compute/ExecutionTarget.cpp:563 msgid "Queue information:" msgstr "Ð¡Ð²ÐµÐ´ÐµÐ½Ð¸Ñ Ð¾Ð± очереди:" #: src/hed/libs/compute/ExecutionTarget.cpp:570 msgid " Benchmark information:" msgstr " Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾Ð± Ñталонных теÑтах:" #: src/hed/libs/compute/GLUE2.cpp:53 msgid "The Service doesn't advertise its Type." msgstr "Служба не Ñообщает о Ñвоём типе." #: src/hed/libs/compute/GLUE2.cpp:58 msgid "The ComputingService doesn't advertise its Quality Level." msgstr "Служба ComputingService не Ñообщает о Ñвоём уровне качеÑтва." #: src/hed/libs/compute/GLUE2.cpp:99 msgid "The ComputingEndpoint has no URL." msgstr "У ComputingEndpoint отÑутÑтвует URL." #: src/hed/libs/compute/GLUE2.cpp:104 msgid "The Service advertises no Health State." msgstr "Служба не предоÑтавлÑет информации о ÑоÑтоÑнии здоровьÑ." #: src/hed/libs/compute/GLUE2.cpp:117 msgid "The ComputingEndpoint doesn't advertise its Quality Level." msgstr "Служба ComputingEndpoint не Ñообщает о Ñвоём уровне качеÑтва." #: src/hed/libs/compute/GLUE2.cpp:128 msgid "The ComputingService doesn't advertise its Interface." msgstr "Служба ComputingService не Ñообщает о Ñвоём интерфейÑе." #: src/hed/libs/compute/GLUE2.cpp:160 msgid "The ComputingEndpoint doesn't advertise its Serving State." msgstr "Служба ComputingEndpoint не Ñообщает о Ñвоём ÑоÑтоÑнии обÑлуживаниÑ." #: src/hed/libs/compute/GLUE2.cpp:247 #, c-format msgid "" "The \"FreeSlotsWithDuration\" attribute published by \"%s\" is wrongly " "formatted. Ignoring it." msgstr "" "Ðеверно отформатирован атрибут \"FreeSlotsWithDuration\", публикуемый \"%s" "\", - игнорируетÑÑ." #: src/hed/libs/compute/GLUE2.cpp:420 #, c-format msgid "" "Couldn't parse benchmark XML:\n" "%s" msgstr "" "Ðевозможно разобрать Ñталонный XML:\n" "%s" #: src/hed/libs/compute/Job.cpp:324 msgid "Unable to detect format of job record." msgstr "Ðевозможно определить формат учётной запиÑи о задаче." #: src/hed/libs/compute/Job.cpp:545 #, c-format msgid "Job: %s" msgstr "Задача: %s" #: src/hed/libs/compute/Job.cpp:547 #, c-format msgid " Name: %s" msgstr " ИмÑ: %s" #: src/hed/libs/compute/Job.cpp:548 #, c-format msgid " State: %s" msgstr " СоÑтоÑние: %s" #: src/hed/libs/compute/Job.cpp:551 #, c-format msgid " Specific state: %s" msgstr " СпецифичеÑкое ÑоÑтоÑние: %s" #: src/hed/libs/compute/Job.cpp:555 src/hed/libs/compute/Job.cpp:579 #, c-format msgid " Waiting Position: %d" msgstr " Положение в очереди: %d" #: src/hed/libs/compute/Job.cpp:559 #, c-format msgid " Exit Code: %d" msgstr " Код выхода: %d" #: src/hed/libs/compute/Job.cpp:563 #, c-format msgid " Job Error: %s" msgstr " Ошибка задачи: %s" #: src/hed/libs/compute/Job.cpp:568 #, c-format msgid " Owner: %s" msgstr " Владелец: %s" #: src/hed/libs/compute/Job.cpp:572 #, c-format msgid " Other Messages: %s" msgstr " Другие ÑообщениÑ: %s" #: src/hed/libs/compute/Job.cpp:577 #, c-format msgid " Requested Slots: %d" msgstr " Запрошено ваканÑий: %i" #: src/hed/libs/compute/Job.cpp:582 #, c-format msgid " Stdin: %s" msgstr " Стандартный вход: %s" #: src/hed/libs/compute/Job.cpp:584 #, c-format msgid " Stdout: %s" msgstr " Стандартный выход: %s" #: src/hed/libs/compute/Job.cpp:586 #, c-format msgid " Stderr: %s" msgstr " Ð¡Ñ‚Ð°Ð½Ð´Ð°Ñ€Ñ‚Ð½Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ°: %s" #: src/hed/libs/compute/Job.cpp:588 #, c-format msgid " Computing Service Log Directory: %s" msgstr " Каталог, Ñодержащий журнальную запиÑÑŒ вычиÑлительного ÑервиÑа: %s" #: src/hed/libs/compute/Job.cpp:591 #, c-format msgid " Submitted: %s" msgstr " ЗаÑлана: %s" #: src/hed/libs/compute/Job.cpp:594 #, c-format msgid " End Time: %s" msgstr " Ð’Ñ€ÐµÐ¼Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ: %s" #: src/hed/libs/compute/Job.cpp:597 #, c-format msgid " Submitted from: %s" msgstr " ЗаÑылающий клиент: %s" #: src/hed/libs/compute/Job.cpp:600 #, c-format msgid " Submitting client: %s" msgstr " ВерÑÐ¸Ñ ÐºÐ»Ð¸ÐµÐ½Ñ‚Ð°: %s" #: src/hed/libs/compute/Job.cpp:603 #, c-format msgid " Requested CPU Time: %s" msgstr " Запрошенное процеÑÑорное времÑ: %s" #: src/hed/libs/compute/Job.cpp:608 #, c-format msgid " Used CPU Time: %s (%s per slot)" msgstr " ИÑпользованное процеÑÑорное времÑ: %s (%s на Ñлот)" #: src/hed/libs/compute/Job.cpp:612 #, c-format msgid " Used CPU Time: %s" msgstr " ИÑпользованное процеÑÑорное времÑ: %s" #: src/hed/libs/compute/Job.cpp:618 #, c-format msgid " Used Wall Time: %s (%s per slot)" msgstr " ИÑпользованное времÑ: %s (%s на Ñлот)" #: src/hed/libs/compute/Job.cpp:622 #, c-format msgid " Used Wall Time: %s" msgstr " ИÑпользованное времÑ: %s" #: src/hed/libs/compute/Job.cpp:627 #, c-format msgid " Used Memory: %d" msgstr " ИÑпользование ОЗУ: %d" #: src/hed/libs/compute/Job.cpp:631 #, c-format msgid " Results were deleted: %s" msgstr " Результаты были удалены: %s" #: src/hed/libs/compute/Job.cpp:632 #, c-format msgid " Results must be retrieved before: %s" msgstr " Результаты должны быть воÑтребованы до: %s" #: src/hed/libs/compute/Job.cpp:636 #, c-format msgid " Proxy valid until: %s" msgstr " ДоверенноÑть дейÑтвительна до: %s" #: src/hed/libs/compute/Job.cpp:640 #, c-format msgid " Entry valid from: %s" msgstr " ЗапиÑÑŒ дейÑтвительна Ñ: %s" #: src/hed/libs/compute/Job.cpp:643 #, c-format msgid " Entry valid for: %s" msgstr " ЗапиÑÑŒ дейÑтвительна на: %s" #: src/hed/libs/compute/Job.cpp:647 msgid " Old job IDs:" msgstr " Старый Ñрлык задачи:" #: src/hed/libs/compute/Job.cpp:655 #, c-format msgid " ID on service: %s" msgstr " ID ÑервиÑа: %s" #: src/hed/libs/compute/Job.cpp:656 #, c-format msgid " Service information URL: %s (%s)" msgstr " URL информации о ÑервиÑе: %s (%s)" #: src/hed/libs/compute/Job.cpp:657 #, c-format msgid " Job status URL: %s (%s)" msgstr " URL ÑоÑтоÑÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸: %s (%s)" #: src/hed/libs/compute/Job.cpp:658 #, c-format msgid " Job management URL: %s (%s)" msgstr " URL ÑƒÐ¿Ñ€Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡ÐµÐ¹: %s (%s)" #: src/hed/libs/compute/Job.cpp:659 #, c-format msgid " Stagein directory URL: %s" msgstr " URL каталога Ð´Ð»Ñ Ð·Ð°Ð³Ñ€ÑƒÐ·ÐºÐ¸: %s" #: src/hed/libs/compute/Job.cpp:660 #, c-format msgid " Stageout directory URL: %s" msgstr " URL каталога Ð´Ð»Ñ Ð¾Ñ‚Ð³Ñ€ÑƒÐ·ÐºÐ¸: %s" #: src/hed/libs/compute/Job.cpp:661 #, c-format msgid " Session directory URL: %s" msgstr " URL каталога Грид-ÑеÑии: %s" #: src/hed/libs/compute/Job.cpp:663 msgid " Delegation IDs:" msgstr " Идентификаторы делегированиÑ:" #: src/hed/libs/compute/Job.cpp:845 #, c-format msgid "Unable to handle job (%s), no interface specified." msgstr "Ðевозможно обработать задачу (%s), не указан интерфейÑ." #: src/hed/libs/compute/Job.cpp:850 #, c-format msgid "" "Unable to handle job (%s), no plugin associated with the specified interface " "(%s)" msgstr "" "Ðевозможно обработать задачу (%s), Ð´Ð»Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ð¾Ð³Ð¾ интерфейÑа (%s) нету " "подключаемых модулей" #: src/hed/libs/compute/Job.cpp:872 #, c-format msgid "Invalid download destination path specified (%s)" msgstr "Указан неверный путь к каталогу загрузки (%s)" #: src/hed/libs/compute/Job.cpp:877 #, c-format msgid "" "Unable to download job (%s), no JobControllerPlugin plugin was set to handle " "the job." msgstr "" "Ðевозможно загрузить задачу (%s), не был задан модуль JobControllerPlugin " "Ð´Ð»Ñ Ñ€Ð°Ð±Ð¾Ñ‚Ñ‹ Ñ Ð·Ð°Ð´Ð°Ñ‡ÐµÐ¹." #: src/hed/libs/compute/Job.cpp:881 #, c-format msgid "Downloading job: %s" msgstr "ЗагружаетÑÑ Ð·Ð°Ð´Ð°Ñ‡Ð°: %s" #: src/hed/libs/compute/Job.cpp:885 #, c-format msgid "" "Cant retrieve job files for job (%s) - unable to determine URL of stage out " "directory" msgstr "" "Ðе удалоÑÑŒ получить выходные файлы задачи (%s) - невозможно определить URL " "Ð´Ð»Ñ Ð·Ð°Ð¿Ð¸Ñи" #: src/hed/libs/compute/Job.cpp:890 #, c-format msgid "Invalid stage out path specified (%s)" msgstr "Указан неверный путь Ð´Ð»Ñ Ð¾Ñ‚Ð³Ñ€ÑƒÐ·ÐºÐ¸(%s)" #: src/hed/libs/compute/Job.cpp:897 #, c-format msgid "%s directory exist! Skipping job." msgstr "Каталог %s уже ÑушеÑтвует! Задача пропуÑкаетÑÑ." #: src/hed/libs/compute/Job.cpp:903 #, c-format msgid "Unable to retrieve list of job files to download for job %s" msgstr "Ðевозможно получить ÑпиÑок загружаемых файлов Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s" #: src/hed/libs/compute/Job.cpp:908 #, c-format msgid "No files to retrieve for job %s" msgstr "ОтÑутÑтвуют загружаемые файлы Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s" #: src/hed/libs/compute/Job.cpp:914 #, c-format msgid "Failed to create directory %s! Skipping job." msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð° %s! Задача пропуÑкаетÑÑ." #: src/hed/libs/compute/Job.cpp:927 #, c-format msgid "Failed downloading %s to %s, destination already exist" msgstr "Ошибка загрузки %s в %s, файл-приёмник уже ÑущеÑтвует" #: src/hed/libs/compute/Job.cpp:933 #, c-format msgid "Failed downloading %s to %s, unable to remove existing destination" msgstr "Ошибка загрузки %s в %s, Ñбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ ÑущеÑтвующего файла-приёмника" #: src/hed/libs/compute/Job.cpp:939 #, c-format msgid "Failed downloading %s to %s" msgstr "Ошибка загрузки %s в %s" #: src/hed/libs/compute/Job.cpp:952 #, c-format msgid "Unable to initialize handler for %s" msgstr "Ðевозможно инициализировать обработчик Ð´Ð»Ñ %s" #: src/hed/libs/compute/Job.cpp:957 #, c-format msgid "Unable to list files at %s" msgstr "Ðевозможно перечиÑлить файлы на %s" #: src/hed/libs/compute/Job.cpp:999 msgid "Now copying (from -> to)" msgstr "ПроизводитÑÑ ÐºÐ¾Ð¿Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ðµ (из -> в)" #: src/hed/libs/compute/Job.cpp:1000 #, c-format msgid " %s -> %s" msgstr " %s -> %s" #: src/hed/libs/compute/Job.cpp:1015 #, c-format msgid "Unable to initialise connection to source: %s" msgstr "Ðевозможно инициализировать Ñоединение Ñ Ð¸Ñточником: %s" #: src/hed/libs/compute/Job.cpp:1026 #, c-format msgid "Unable to initialise connection to destination: %s" msgstr "Ðевозможно инициализировать Ñоединение Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸ÐµÐ¼: %s" #: src/hed/libs/compute/Job.cpp:1045 #, c-format msgid "File download failed: %s" msgstr "Ðевозможно загрузить файл: %s" #: src/hed/libs/compute/Job.cpp:1084 src/hed/libs/compute/Job.cpp:1113 #: src/hed/libs/compute/Job.cpp:1145 src/hed/libs/compute/Job.cpp:1178 #, c-format msgid "Waiting for lock on file %s" msgstr "Ожидание Ñ€Ð°Ð·Ð±Ð»Ð¾ÐºÐ¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° %s" #: src/hed/libs/compute/JobControllerPlugin.cpp:101 #, c-format msgid "JobControllerPlugin plugin \"%s\" not found." msgstr "Подключаемый модуль JobControllerPlugin \"%s\" не обнаружен." #: src/hed/libs/compute/JobControllerPlugin.cpp:110 #, c-format msgid "JobControllerPlugin %s could not be created" msgstr "Подключаемый модуль JobControllerPlugin %s не может быть Ñоздан" #: src/hed/libs/compute/JobControllerPlugin.cpp:115 #, c-format msgid "Loaded JobControllerPlugin %s" msgstr "Подгружен JobControllerPlugin %s" #: src/hed/libs/compute/JobDescription.cpp:22 #, c-format msgid ": %d" msgstr ": %d" #: src/hed/libs/compute/JobDescription.cpp:24 #, c-format msgid ": %s" msgstr ": %s" #: src/hed/libs/compute/JobDescription.cpp:138 msgid " --- DRY RUN --- " msgstr " --- ХОЛОСТÐЯ ПРОГОÐКР--- " #: src/hed/libs/compute/JobDescription.cpp:148 #, c-format msgid " Annotation: %s" msgstr " ÐннотациÑ: %s" #: src/hed/libs/compute/JobDescription.cpp:154 #, c-format msgid " Old activity ID: %s" msgstr " Старый Ñрлык заданиÑ: %s" #: src/hed/libs/compute/JobDescription.cpp:160 #, c-format msgid " Argument: %s" msgstr " Argument: %s" #: src/hed/libs/compute/JobDescription.cpp:171 #, c-format msgid " RemoteLogging (optional): %s (%s)" msgstr " Удалённое журналирование (по выбору): %s (%s)" #: src/hed/libs/compute/JobDescription.cpp:174 #, c-format msgid " RemoteLogging: %s (%s)" msgstr " Удалённое журналирование: %s (%s)" #: src/hed/libs/compute/JobDescription.cpp:182 #, c-format msgid " Environment.name: %s" msgstr " Environment.name: %s" #: src/hed/libs/compute/JobDescription.cpp:183 #, c-format msgid " Environment: %s" msgstr " Environment: %s" #: src/hed/libs/compute/JobDescription.cpp:196 #, c-format msgid " PreExecutable.Argument: %s" msgstr " PreExecutable.Argument: %s" #: src/hed/libs/compute/JobDescription.cpp:199 #: src/hed/libs/compute/JobDescription.cpp:217 #, c-format msgid " Exit code for successful execution: %d" msgstr " Код выхода уÑпешного иÑполнениÑ: %d" #: src/hed/libs/compute/JobDescription.cpp:202 #: src/hed/libs/compute/JobDescription.cpp:220 msgid " No exit code for successful execution specified." msgstr " Код выхода Ð´Ð»Ñ ÑƒÑпешного иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð½Ðµ указан." #: src/hed/libs/compute/JobDescription.cpp:214 #, c-format msgid " PostExecutable.Argument: %s" msgstr " PostExecutable.Argument: %s" #: src/hed/libs/compute/JobDescription.cpp:230 #, c-format msgid " Access control: %s" msgstr " Контроль доÑтупа: %s" #: src/hed/libs/compute/JobDescription.cpp:234 #, c-format msgid " Processing start time: %s" msgstr " Ð’Ñ€ÐµÐ¼Ñ Ð½Ð°Ñ‡Ð°Ð»Ð° обработки: %s" #: src/hed/libs/compute/JobDescription.cpp:237 msgid " Notify:" msgstr " Уведомить:" #: src/hed/libs/compute/JobDescription.cpp:251 #, c-format msgid " Credential service: %s" msgstr " Служба параметров доÑтупа: %s" #: src/hed/libs/compute/JobDescription.cpp:261 msgid " Operating system requirements:" msgstr " Ð¢Ñ€ÐµÐ±Ð¾Ð²Ð°Ð½Ð¸Ñ Ðº операционной ÑиÑтеме:" #: src/hed/libs/compute/JobDescription.cpp:279 msgid " Computing endpoint requirements:" msgstr " Ð¢Ñ€ÐµÐ±Ð¾Ð²Ð°Ð½Ð¸Ñ Ðº вычиÑлительному реÑурÑу:" #: src/hed/libs/compute/JobDescription.cpp:292 msgid " Node access: inbound" msgstr " ДоÑтуп к узлу: входÑщий" #: src/hed/libs/compute/JobDescription.cpp:295 msgid " Node access: outbound" msgstr " ДоÑтуп к узлу: иÑходÑщий" #: src/hed/libs/compute/JobDescription.cpp:298 msgid " Node access: inbound and outbound" msgstr " ДоÑтуп к узлу: входÑщий и иÑходÑщий" #: src/hed/libs/compute/JobDescription.cpp:308 msgid " Job requires exclusive execution" msgstr " Задача требует ÑкÑклюзивного иÑполнениÑ" #: src/hed/libs/compute/JobDescription.cpp:311 msgid " Job does not require exclusive execution" msgstr " Задача не требует ÑкÑклюзивного иÑполнениÑ" #: src/hed/libs/compute/JobDescription.cpp:316 msgid " Run time environment requirements:" msgstr " Ð¢Ñ€ÐµÐ±Ð¾Ð²Ð°Ð½Ð¸Ñ Ñреды выполнениÑ:" #: src/hed/libs/compute/JobDescription.cpp:328 msgid " Inputfile element:" msgstr " Элемент Inputfile:" #: src/hed/libs/compute/JobDescription.cpp:329 #: src/hed/libs/compute/JobDescription.cpp:351 #, c-format msgid " Name: %s" msgstr " Name: %s" #: src/hed/libs/compute/JobDescription.cpp:331 msgid " Is executable: true" msgstr " ИÑполнÑемый: верно" #: src/hed/libs/compute/JobDescription.cpp:335 #, c-format msgid " Sources: %s" msgstr " ИÑточники: %s" #: src/hed/libs/compute/JobDescription.cpp:337 #, c-format msgid " Sources.DelegationID: %s" msgstr " Sources.DelegationID: %s" #: src/hed/libs/compute/JobDescription.cpp:341 #, c-format msgid " Sources.Options: %s = %s" msgstr " Sources.Options: %s = %s" #: src/hed/libs/compute/JobDescription.cpp:350 msgid " Outputfile element:" msgstr " Элемент Outputfile:" #: src/hed/libs/compute/JobDescription.cpp:354 #, c-format msgid " Targets: %s" msgstr " ÐазначениÑ: %s" #: src/hed/libs/compute/JobDescription.cpp:356 #, c-format msgid " Targets.DelegationID: %s" msgstr " Targets.DelegationID: %s" #: src/hed/libs/compute/JobDescription.cpp:360 #, c-format msgid " Targets.Options: %s = %s" msgstr " Targets.Options: %s = %s" #: src/hed/libs/compute/JobDescription.cpp:367 #, c-format msgid " DelegationID element: %s" msgstr " Элемент DelegationID: %s" #: src/hed/libs/compute/JobDescription.cpp:374 #, c-format msgid " Other attributes: [%s], %s" msgstr " Другие атрибуты: [%s], %s" #: src/hed/libs/compute/JobDescription.cpp:440 msgid "Empty job description source string" msgstr "ПуÑтое иÑходное опиÑание задачи" #: src/hed/libs/compute/JobDescription.cpp:473 msgid "No job description parsers available" msgstr "ОтÑутÑтвуют разборщики опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ð½Ð¸Ñ" #: src/hed/libs/compute/JobDescription.cpp:475 #, c-format msgid "" "No job description parsers suitable for handling '%s' language are available" msgstr "Ðет разборщиков опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸, подходÑщих Ð´Ð»Ñ Ð¾Ð±Ñ€Ð°Ð±Ð¾Ñ‚ÐºÐ¸ Ñзыка '%s'" #: src/hed/libs/compute/JobDescription.cpp:483 #, c-format msgid "%s parsing error" msgstr "%s ошибка разборки" #: src/hed/libs/compute/JobDescription.cpp:499 msgid "No job description parser was able to interpret job description" msgstr "Ðи один разборщик не Ñмог обработать опиÑание задачи" #: src/hed/libs/compute/JobDescription.cpp:509 msgid "" "Job description language is not specified, unable to output description." msgstr "Язык опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ не указан, невозможно вывеÑти опиÑание." #: src/hed/libs/compute/JobDescription.cpp:521 #, c-format msgid "Generating %s job description output" msgstr "СоздаётÑÑ Ð¾Ð¿Ð¸Ñание задачи в формате %s" #: src/hed/libs/compute/JobDescription.cpp:537 #, c-format msgid "Language (%s) not recognized by any job description parsers." msgstr "Язык (%s) не опознан ни одним из модулей разборки опиÑаний задач." #: src/hed/libs/compute/JobDescription.cpp:550 #, c-format msgid "Two input files have identical name '%s'." msgstr "Два входных файла Ñ Ð¸Ð´ÐµÐ½Ñ‚Ð¸Ñ‡Ð½Ñ‹Ð¼Ð¸ именами '%s'." #: src/hed/libs/compute/JobDescription.cpp:569 #: src/hed/libs/compute/JobDescription.cpp:582 #, c-format msgid "Cannot stat local input file '%s'" msgstr "Ðевозможно определить ÑÑ‚Ð°Ñ‚ÑƒÑ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð¾Ð³Ð¾ входного файла '%s'" #: src/hed/libs/compute/JobDescription.cpp:602 #, c-format msgid "Cannot find local input file '%s' (%s)" msgstr "Ðевозможно обнаружить локальный входной файл '%s' (%s)" #: src/hed/libs/compute/JobDescription.cpp:644 msgid "Unable to select runtime environment" msgstr "Ðевозможно выбрать Ñреду выполнениÑ" #: src/hed/libs/compute/JobDescription.cpp:651 msgid "Unable to select middleware" msgstr "Ðевозможно выбрать подпрограммное обеÑпечение" #: src/hed/libs/compute/JobDescription.cpp:658 msgid "Unable to select operating system." msgstr "Ðевозможно выбрать операционную ÑиÑтему." #: src/hed/libs/compute/JobDescription.cpp:677 #, c-format msgid "No test-job with ID %d found." msgstr "ТеÑÑ‚Ð¾Ð²Ð°Ñ Ð·Ð°Ð´Ð°Ñ‡Ð° под номером %d не найдена." #: src/hed/libs/compute/JobDescription.cpp:689 #, c-format msgid "Test was defined with ID %d, but some error occurred during parsing it." msgstr "" "ТеÑÑ‚ был Ñоздан Ñ Ð¸Ð´ÐµÐ½Ñ‚Ð¸Ñ„Ð¸ÐºÐ°Ñ‚Ð¾Ñ€Ð¾Ð¼ %d, но при обработке возникла ошибка." #: src/hed/libs/compute/JobDescription.cpp:693 #, c-format msgid "No jobdescription resulted at %d test" msgstr "Ð”Ð»Ñ Ñ‚ÐµÑта %d отÑутÑтвует опиÑание задачи" #: src/hed/libs/compute/JobDescriptionParserPlugin.cpp:52 #, c-format msgid "JobDescriptionParserPlugin plugin \"%s\" not found." msgstr "Подключаемый модуль JobDescriptionParserPlugin \"%s\" не обнаружен." #: src/hed/libs/compute/JobDescriptionParserPlugin.cpp:59 #, c-format msgid "JobDescriptionParserPlugin %s could not be created" msgstr "Подключаемый модуль JobDescriptionParserPlugin %s не может быть Ñоздан" #: src/hed/libs/compute/JobDescriptionParserPlugin.cpp:64 #, c-format msgid "Loaded JobDescriptionParserPlugin %s" msgstr "Подгружен JobDescriptionParserPlugin %s" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:328 msgid "Unable to create temporary directory" msgstr "Ðе удалоÑÑŒ Ñоздать временный каталог" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:336 #, c-format msgid "Unable to create data base environment (%s)" msgstr "Ðе удалоÑÑŒ Ñоздать окружение Ð´Ð»Ñ Ð±Ð°Ð·Ñ‹ данных (%s)" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:346 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:350 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:354 #, c-format msgid "Unable to set duplicate flags for secondary key DB (%s)" msgstr "" "Ðевозможно уÑтановить повторÑющиеÑÑ Ð¼ÐµÑ‚ÐºÐ¸ Ð´Ð»Ñ Ð²Ñ‚Ð¾Ñ€Ð¸Ñ‡Ð½Ð¾Ð¹ базы данных ключей " "(%s)" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:360 #, c-format msgid "Unable to create job database (%s)" msgstr "Ðе удалоÑÑŒ Ñоздать базу данных задач (%s)" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:364 #, c-format msgid "Unable to create DB for secondary name keys (%s)" msgstr "Ðе удалоÑÑŒ Ñоздать базу данных Ð´Ð»Ñ Ð²Ñ‚Ð¾Ñ€Ð¸Ñ‡Ð½Ñ‹Ñ… ключей имён (%s)" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:368 #, c-format msgid "Unable to create DB for secondary endpoint keys (%s)" msgstr "Ðе удалоÑÑŒ Ñоздать базу данных Ð´Ð»Ñ Ð²Ñ‚Ð¾Ñ€Ð¸Ñ‡Ð½Ñ‹Ñ… ключей точек входа (%s)" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:372 #, c-format msgid "Unable to create DB for secondary service info keys (%s)" msgstr "" "Ðе удалоÑÑŒ Ñоздать базу данных Ð´Ð»Ñ Ð²Ñ‚Ð¾Ñ€Ð¸Ñ‡Ð½Ñ‹Ñ… ключей информации о Ñлужбах (%s)" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:377 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:381 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:385 #, c-format msgid "Unable to associate secondary DB with primary DB (%s)" msgstr "" "Ðевозможно поÑтавить в ÑоответÑтвие вторичную базу данных первичной (%s)" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:388 #, c-format msgid "Job database created successfully (%s)" msgstr "УÑпешно Ñоздана база данных задач (%s)" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:430 #, c-format msgid "Error from BDB: %s: %s" msgstr "Ошибка BDB: %s: %s" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:433 #, c-format msgid "Error from BDB: %s" msgstr "Ошибка BDB: %s" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:453 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:221 #: src/hed/libs/compute/JobInformationStorageXML.cpp:27 #, c-format msgid "" "Job list file cannot be created: The parent directory (%s) doesn't exist." msgstr "" "Файл ÑпиÑка задач не может быть Ñоздан: родительÑкий каталог (%s) не " "ÑущеÑтвует." #: src/hed/libs/compute/JobInformationStorageBDB.cpp:457 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:225 #: src/hed/libs/compute/JobInformationStorageXML.cpp:31 #, c-format msgid "Job list file cannot be created: %s is not a directory" msgstr "Файл ÑпиÑка задач не может быть Ñоздан: %s не ÑвлÑетÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð¾Ð¼" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:464 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:232 #: src/hed/libs/compute/JobInformationStorageXML.cpp:38 #, c-format msgid "Job list file (%s) is not a regular file" msgstr "СпиÑок задач (%s) не ÑвлÑетÑÑ Ñтандартным файлом" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:502 #: src/hed/libs/compute/JobInformationStorageBDB.cpp:561 #, c-format msgid "Unable to write key/value pair to job database (%s): Key \"%s\"" msgstr "" "Ðевозможно запиÑать пару ключ/значение в базу данных задач (%s): Ключ \"%s\"" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:728 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:622 #: src/hed/libs/compute/JobInformationStorageXML.cpp:137 #, c-format msgid "Unable to truncate job database (%s)" msgstr "Ðе удалоÑÑŒ укоротить базу данных задач (%s)" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:759 msgid "" "ENOENT: The file or directory does not exist, Or a nonexistent re_source " "file was specified." msgstr "" "ENOENT: Файл или каталог не ÑущеÑтвуют, либо указан неÑущеÑтвующий файл " "re_source." #: src/hed/libs/compute/JobInformationStorageBDB.cpp:762 msgid "" "DB_OLD_VERSION: The database cannot be opened without being first upgraded." msgstr "" "DB_OLD_VERSION: База данных не может быть открыта без предварительного " "Ð¾Ð±Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ Ð²ÐµÑ€Ñии." #: src/hed/libs/compute/JobInformationStorageBDB.cpp:765 msgid "EEXIST: DB_CREATE and DB_EXCL were specified and the database exists." msgstr "EEXIST: были заданы DB_CREATE и DB_EXCL, и база данных ÑущеÑтвует ." #: src/hed/libs/compute/JobInformationStorageBDB.cpp:767 msgid "EINVAL" msgstr "EINVAL" #: src/hed/libs/compute/JobInformationStorageBDB.cpp:770 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:655 #, c-format msgid "Unable to determine error (%d)" msgstr "Ðевозможно раÑпознать ошибку (%d)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:120 #, c-format msgid "Unable to create data base (%s)" msgstr "Ðе удалоÑÑŒ Ñоздать базу данных (%s)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:128 #, c-format msgid "Unable to create jobs table in data base (%s)" msgstr "Ðе удалоÑÑŒ Ñоздать таблицу задач в базе данных (%s)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:137 #, c-format msgid "Unable to create jobs_new table in data base (%s)" msgstr "Ðе удалоÑÑŒ Ñоздать таблицу jobs_new в базе данных (%s)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:143 #, c-format msgid "Unable to transfer from jobs to jobs_new in data base (%s)" msgstr "Ðе удалоÑÑŒ перенеÑти из таблицы jobs в jobs_new в базе данных (%s)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:149 #, c-format msgid "Unable to drop jobs in data base (%s)" msgstr "Ðе удалоÑÑŒ ÑброÑить таблицу jobs в базе данных (%s)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:155 #, c-format msgid "Unable to rename jobs table in data base (%s)" msgstr "Ðе удалоÑÑŒ переименовать таблицу jobs в базе данных (%s)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:165 #, c-format msgid "Unable to create index for jobs table in data base (%s)" msgstr "Ðе удалоÑÑŒ Ñоздать Ð¸Ð½Ð´ÐµÐºÑ Ð´Ð»Ñ Ñ‚Ð°Ð±Ð»Ð¸Ñ†Ñ‹ задач в базе данных (%s)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:173 #, c-format msgid "Failed checking database (%s)" msgstr "Сбой проверки базы данных (%s)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:175 #, c-format msgid "Job database connection established successfully (%s)" msgstr "СвÑзь Ñ Ð±Ð°Ð·Ð¾Ð¹ данных задач уÑпешно уÑтановлена (%s)" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:197 #, c-format msgid "Error from SQLite: %s: %s" msgstr "Ошибка SQLite: %s: %s" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:200 #, c-format msgid "Error from SQLite: %s" msgstr "Ошибка SQLite: %s" #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:362 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:369 #: src/hed/libs/compute/JobInformationStorageSQLite.cpp:376 #, c-format msgid "Unable to write records into job database (%s): Id \"%s\"" msgstr "Ðе удалоÑÑŒ внеÑти запиÑи в базу данных задач (%s): Id \"%s\"" #: src/hed/libs/compute/JobInformationStorageXML.cpp:51 #: src/hed/libs/compute/JobInformationStorageXML.cpp:223 #: src/hed/libs/compute/JobInformationStorageXML.cpp:264 #, c-format msgid "Waiting for lock on job list file %s" msgstr "Ожидание Ñ€Ð°Ð·Ð±Ð»Ð¾ÐºÐ¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° ÑпиÑка задач %s" #: src/hed/libs/compute/JobInformationStorageXML.cpp:162 #, c-format msgid "Will remove %s on service %s." msgstr "Задача %s будет удалена Ñ ÑеривÑа %s." #: src/hed/libs/compute/JobSupervisor.cpp:36 msgid "Ignoring job, the job ID is empty" msgstr "Задача игнорируетÑÑ, так как её Ñрлык пуÑÑ‚" #: src/hed/libs/compute/JobSupervisor.cpp:41 #, c-format msgid "Ignoring job (%s), the management interface name is unknown" msgstr "ИгнорируетÑÑ Ð·Ð°Ð´Ð°Ñ‡Ð° (%s), отÑутÑтвует название интерфейÑа управлениÑ" #: src/hed/libs/compute/JobSupervisor.cpp:46 #, c-format msgid "Ignoring job (%s), the job management URL is unknown" msgstr "ИгнорируетÑÑ Ð·Ð°Ð´Ð°Ñ‡Ð° (%s), отÑутÑтвует URL интерфейÑа управлениÑ" #: src/hed/libs/compute/JobSupervisor.cpp:51 #, c-format msgid "Ignoring job (%s), the status interface name is unknown" msgstr "ИгнорируетÑÑ Ð·Ð°Ð´Ð°Ñ‡Ð° (%s), отÑутÑтвует название интерфейÑа ÑоÑтоÑниÑ" #: src/hed/libs/compute/JobSupervisor.cpp:56 #, c-format msgid "Ignoring job (%s), the job status URL is unknown" msgstr "ИгнорируетÑÑ Ð·Ð°Ð´Ð°Ñ‡Ð° (%s), отÑутÑтвует URL ÑоÑтоÑÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #: src/hed/libs/compute/JobSupervisor.cpp:65 #, c-format msgid "Ignoring job (%s), unable to load JobControllerPlugin for %s" msgstr "" "ИгнорируетÑÑ Ð·Ð°Ð´Ð°Ñ‡Ð° (%s), невозможно подгрузить JobControllerPlugin Ð´Ð»Ñ %s" #: src/hed/libs/compute/JobSupervisor.cpp:72 #, c-format msgid "" "Ignoring job (%s), already tried and were unable to load JobControllerPlugin" msgstr "" "ИгнорируетÑÑ Ð·Ð°Ð´Ð°Ñ‡Ð° (%s), Ð¿Ñ€ÐµÐ´Ñ‹Ð´ÑƒÑ‰Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ° подгрузить JobControllerPlugin " "завершилаÑÑŒ неудачей" #: src/hed/libs/compute/JobSupervisor.cpp:385 #, c-format msgid "Job resubmission failed: Unable to load broker (%s)" msgstr "ПерезаÑылка задачи оборвана: Ðевозможно подгрузить планировщик (%s)" #: src/hed/libs/compute/JobSupervisor.cpp:400 msgid "Job resubmission aborted because no resource returned any information" msgstr "" "ПерезаÑылка задачи оборвана, Ñ‚.к. ни один из реÑурÑов не предоÑтавил " "информацию" #: src/hed/libs/compute/JobSupervisor.cpp:421 #, c-format msgid "Unable to resubmit job (%s), unable to parse obtained job description" msgstr "" "Ðе удалоÑÑŒ перезаÑлать задачу (%s), Ñ‚.к. невозможно разобрать полученное " "опиÑание задачи" #: src/hed/libs/compute/JobSupervisor.cpp:443 #, c-format msgid "" "Unable to resubmit job (%s), target information retrieval failed for target: " "%s" msgstr "" "Ðевозможно перезапуÑтить задачу (%s), Ñбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ о цели %s" #: src/hed/libs/compute/JobSupervisor.cpp:469 #, c-format msgid "Unable to resubmit job (%s), no targets applicable for submission" msgstr "Ðевозможно перезапуÑтить задачу (%s), нет подходÑщих целей" #: src/hed/libs/compute/JobSupervisor.cpp:504 #, c-format msgid "" "Unable to migrate job (%s), job description could not be retrieved remotely" msgstr "" "Ðевозможно мигрировать задачу (%s), опиÑание задачи не может быть извлечено " "Ñ ÑƒÐ´Ð°Ð»Ñ‘Ð½Ð½Ð¾Ð³Ð¾ иÑточника" #: src/hed/libs/compute/JobSupervisor.cpp:524 msgid "Job migration aborted, no resource returned any information" msgstr "" "Перенаправление задачи оборвано, Ñ‚.к. ни один из реÑурÑов не предоÑтавил " "информацию" #: src/hed/libs/compute/JobSupervisor.cpp:536 #, c-format msgid "Job migration aborted, unable to load broker (%s)" msgstr "" "Перенаправление задачи оборвано, невозможно подгрузить планировщик (%s)" #: src/hed/libs/compute/JobSupervisor.cpp:552 #, c-format msgid "Unable to migrate job (%s), unable to parse obtained job description" msgstr "" "Ðе удалоÑÑŒ перенаправить задачу (%s), Ñ‚.к. невозможно разобрать полученное " "опиÑание задачи" #: src/hed/libs/compute/JobSupervisor.cpp:573 #, c-format msgid "Unable to load submission plugin for %s interface" msgstr "" "Ðевозможно погрузить подключаемый модуль Ð´Ð»Ñ Ð·Ð°Ð¿ÑƒÑка задач через Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ %s" #: src/hed/libs/compute/JobSupervisor.cpp:583 #, c-format msgid "Job migration failed for job (%s), no applicable targets" msgstr "Ðе удалоÑÑŒ перенаправить задачу (%s), возможные Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð¾Ñ‚ÑутÑтвуют" #: src/hed/libs/compute/Software.cpp:65 src/hed/libs/compute/Software.cpp:94 #: src/hed/libs/compute/Software.cpp:113 #, c-format msgid "%s > %s => false" msgstr "%s > %s => неверно" #: src/hed/libs/compute/Software.cpp:70 src/hed/libs/compute/Software.cpp:83 #: src/hed/libs/compute/Software.cpp:107 #, c-format msgid "%s > %s => true" msgstr "%s > %s => верно" #: src/hed/libs/compute/Software.cpp:90 src/hed/libs/compute/Software.cpp:102 #, c-format msgid "%s > %s => false: %s contains non numbers in the version part." msgstr "%s > %s => неверно: %s Ñодержит нецифровые Ñимволы в номере верÑии." #: src/hed/libs/compute/Software.cpp:199 src/hed/libs/compute/Software.cpp:210 #, c-format msgid "Requirement \"%s %s\" NOT satisfied." msgstr "Требование \"%s %s\" ÐЕ удовлетворено." #: src/hed/libs/compute/Software.cpp:205 #, c-format msgid "Requirement \"%s %s\" satisfied." msgstr "Требование \"%s %s\" удовлетворено." #: src/hed/libs/compute/Software.cpp:214 #, c-format msgid "Requirement \"%s %s\" satisfied by \"%s\"." msgstr "Требование \"%s %s\" удовлетворено \"%s\"." #: src/hed/libs/compute/Software.cpp:219 msgid "All requirements satisfied." msgstr "Ð’Ñе Ñ‚Ñ€ÐµÐ±Ð¾Ð²Ð°Ð½Ð¸Ñ ÑƒÐ´Ð¾Ð²Ð»ÐµÑ‚Ð²Ð¾Ñ€ÐµÐ½Ñ‹." #: src/hed/libs/compute/Submitter.cpp:83 #, c-format msgid "Trying to submit directly to endpoint (%s)" msgstr "Попытка заÑылки задачи напрÑмую к точке входа (%s)" #: src/hed/libs/compute/Submitter.cpp:88 #, c-format msgid "Interface (%s) specified, submitting only to that interface" msgstr "Задан Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ (%s), заÑылка производитÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ через него" #: src/hed/libs/compute/Submitter.cpp:106 msgid "Trying all available interfaces" msgstr "ПробуютÑÑ Ð²Ñе доÑтупные интерфейÑÑ‹" #: src/hed/libs/compute/Submitter.cpp:112 #, c-format msgid "Trying to submit endpoint (%s) using interface (%s) with plugin (%s)." msgstr "" "Попытка заÑылки на точку входа (%s) иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ (%s) Ñ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ñ‹Ð¼ " "модулем (%s)." #: src/hed/libs/compute/Submitter.cpp:116 #, c-format msgid "" "Unable to load plugin (%s) for interface (%s) when trying to submit job " "description." msgstr "" "Ðевозможно подгрузить модуль (%s) Ð´Ð»Ñ Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñа (%s) при попытке заÑылки " "опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸." #: src/hed/libs/compute/Submitter.cpp:130 #, c-format msgid "No more interfaces to try for endpoint %s." msgstr "Опробованы вÑе интерфейÑÑ‹ Ð´Ð»Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ входа %s." #: src/hed/libs/compute/Submitter.cpp:336 #, c-format msgid "Target %s does not match requested interface(s)." msgstr "Ðазначение %s не ÑоответÑтвует запрошенному интерфейÑу." #: src/hed/libs/compute/SubmitterPlugin.cpp:64 msgid "No stagein URL is provided" msgstr "Ðе указан URL Ð´Ð»Ñ Ð·Ð°Ð³Ñ€ÑƒÐ·ÐºÐ¸" #: src/hed/libs/compute/SubmitterPlugin.cpp:83 #, c-format msgid "Failed uploading file %s to %s: %s" msgstr "Ðе удалоÑÑŒ отгрузить файл %s в %s: %s" #: src/hed/libs/compute/SubmitterPlugin.cpp:116 #, c-format msgid "Trying to migrate to %s: Migration to a %s interface is not supported." msgstr "Попытка миграции на %s: ÐœÐ¸Ð³Ñ€Ð°Ñ†Ð¸Ñ Ð½Ð° Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ %s не поддерживаетÑÑ." #: src/hed/libs/compute/SubmitterPlugin.cpp:172 #, c-format msgid "SubmitterPlugin plugin \"%s\" not found." msgstr "Подключаемый модуль SubmitterPlugin \"%s\" не обнаружен." #: src/hed/libs/compute/SubmitterPlugin.cpp:182 #, c-format msgid "SubmitterPlugin %s could not be created" msgstr "Подключаемый модуль SubmitterPlugin %s не может быть Ñоздан" #: src/hed/libs/compute/SubmitterPlugin.cpp:187 #, c-format msgid "Loaded SubmitterPlugin %s" msgstr "Подгружен SubmitterPlugin %s" #: src/hed/libs/compute/examples/basic_job_submission.cpp:28 msgid "Invalid job description" msgstr "ÐедопуÑтимое опиÑание задачи" #: src/hed/libs/compute/examples/basic_job_submission.cpp:47 msgid "Failed to submit job" msgstr "Ошибка запуÑка задачи" #: src/hed/libs/compute/examples/basic_job_submission.cpp:54 #, c-format msgid "Failed to write to local job list %s" msgstr "Ошибка запиÑи в локальный файл ÑпиÑка задач %s" #: src/hed/libs/compute/test_jobdescription.cpp:20 msgid "[job description ...]" msgstr "[опиÑание задачи...]" #: src/hed/libs/compute/test_jobdescription.cpp:21 msgid "" "This tiny tool can be used for testing the JobDescription's conversion " "abilities." msgstr "" "Эта программулечка может быть иÑпользована Ð´Ð»Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ¸ ÑпоÑобноÑтей " "Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ JobDescription." #: src/hed/libs/compute/test_jobdescription.cpp:23 msgid "" "The job description also can be a file or a string in ADL or XRSL format." msgstr "" "ОпиÑание задачи может быть также задано файлом или Ñтрокой в формате ADL или " "XRSL." #: src/hed/libs/compute/test_jobdescription.cpp:27 msgid "define the requested format (nordugrid:xrsl, emies:adl)" msgstr "укажите запрашиваемый формат (nordugrid:xrsl, emies:adl)" #: src/hed/libs/compute/test_jobdescription.cpp:28 msgid "format" msgstr "формат" #: src/hed/libs/compute/test_jobdescription.cpp:33 msgid "show the original job description" msgstr "показать изначальное опиÑание задачи" #: src/hed/libs/compute/test_jobdescription.cpp:43 #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:71 msgid "Use --help option for detailed usage information" msgstr "ИÑпользуйте опцию --help Ð´Ð»Ñ Ð¿Ð¾Ð´Ñ€Ð¾Ð±Ð½Ð¾Ð³Ð¾ опиÑаниÑ" #: src/hed/libs/compute/test_jobdescription.cpp:50 msgid " [ JobDescription tester ] " msgstr " [ теÑтировщик JobDescription ] " #: src/hed/libs/compute/test_jobdescription.cpp:74 msgid " [ Parsing the original text ] " msgstr " [ ОбрабатываетÑÑ Ð¸Ñходный текÑÑ‚ ] " #: src/hed/libs/compute/test_jobdescription.cpp:80 msgid "Unable to parse." msgstr "Ðе удалоÑÑŒ обработать." #: src/hed/libs/compute/test_jobdescription.cpp:89 msgid " [ emies:adl ] " msgstr " [ emies:adl ] " #: src/hed/libs/compute/test_jobdescription.cpp:91 msgid " [ nordugrid:xrsl ] " msgstr " [ nordugrid:xrsl ] " #: src/hed/libs/credential/ARCProxyUtil.cpp:134 msgid "VOMS command is empty" msgstr "ПуÑÑ‚Ð°Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ð° VOMS" #: src/hed/libs/credential/ARCProxyUtil.cpp:254 #, c-format msgid "OpenSSL error -- %s" msgstr "Ошибка OpenSSL -- %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:255 #, c-format msgid "Library : %s" msgstr "Библиотека: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:256 #, c-format msgid "Function : %s" msgstr "ФункциÑ: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:257 #, c-format msgid "Reason : %s" msgstr "Причина: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:313 msgid "User interface error" msgstr "Ошибка интерфейÑа пользователÑ" #: src/hed/libs/credential/ARCProxyUtil.cpp:319 msgid "Aborted!" msgstr "ИÑполнение прервано!" #: src/hed/libs/credential/ARCProxyUtil.cpp:418 #: src/hed/libs/credential/ARCProxyUtil.cpp:1399 msgid "Failed to sign proxy" msgstr "Сбой подпиÑи доверенноÑти" #: src/hed/libs/credential/ARCProxyUtil.cpp:437 #: src/hed/libs/credential/Credential.cpp:878 #, c-format msgid "Error: can't open policy file: %s" msgstr "Ошибка: невозможно открыть файл политик: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:447 #: src/hed/libs/credential/Credential.cpp:891 #, c-format msgid "Error: policy location: %s is not a regular file" msgstr "Ошибка: меÑтонахождение политик: %s не ÑвлÑетÑÑ Ñтандартным файлом" #: src/hed/libs/credential/ARCProxyUtil.cpp:546 #, c-format msgid "VOMS line contains wrong number of tokens (%u expected): \"%s\"" msgstr "" "Строка VOMS Ñодержит неверное количеÑтво токенов (ожидаетÑÑ %u): \"%s\"" #: src/hed/libs/credential/ARCProxyUtil.cpp:590 #, c-format msgid "Cannot get VOMS server %s information from the vomses files" msgstr "Ðевозможно найти информацию о Ñервере VOMS %s из файлов vomses" #: src/hed/libs/credential/ARCProxyUtil.cpp:623 #, c-format msgid "There are %d commands to the same VOMS server %s" msgstr "%d инÑтрукций направлено на один и тот же Ñервер VOMS, %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:677 #, c-format msgid "Try to get attribute from VOMS server with order: %s" msgstr "Попытка получить атрибут Ñ Ñервера VOMS Ñ Ð¿Ð¾Ñ€Ñдком: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:680 #, c-format msgid "Message sent to VOMS server %s is: %s" msgstr "Сообщение, отправленное на Ñервер VOMS %s: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:699 #: src/hed/libs/credential/ARCProxyUtil.cpp:720 #, c-format msgid "" "The VOMS server with the information:\n" "\t%s\n" "can not be reached, please make sure it is available" msgstr "" "Ðевозможно ÑвÑзатьÑÑ Ñ Ñервером VOMS Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸ÐµÐ¹:\n" "\t%s\n" "пожалуйÑта, проверьте, доÑтупен ли Ñтот Ñервер" #: src/hed/libs/credential/ARCProxyUtil.cpp:703 msgid "No HTTP response from VOMS server" msgstr "Сервер VOMS не отзываетÑÑ Ð¿Ð¾ HTTP" #: src/hed/libs/credential/ARCProxyUtil.cpp:708 #: src/hed/libs/credential/ARCProxyUtil.cpp:734 #, c-format msgid "Returned message from VOMS server: %s" msgstr "Сообщение Ñ Ñервера VOMS: %s" #: src/hed/libs/credential/ARCProxyUtil.cpp:724 msgid "No stream response from VOMS server" msgstr "Сервер VOMS не отзываетÑÑ" #: src/hed/libs/credential/ARCProxyUtil.cpp:746 #, c-format msgid "" "The validity duration of VOMS AC is shortened from %s to %s, due to the " "validity constraint on voms server side.\n" msgstr "" "Срок дейÑÑ‚Ð²Ð¸Ñ Ñертификата атрибута VOMS (AC) Ñокращён Ñ %s до %s, в ÑвÑзи Ñ " "ограничением Ñо Ñтороны Ñервера VOMS.\n" #: src/hed/libs/credential/ARCProxyUtil.cpp:749 #, c-format msgid "" "Cannot get any AC or attributes info from VOMS server: %s;\n" " Returned message from VOMS server: %s\n" msgstr "" "Ðевозможно получить Ñертификат атрибута (AC) или информацию об атрибутах Ñ " "Ñервера VOMS: %s;\n" " Сообщение, возвращённое Ñервером VOMS: %s\n" #: src/hed/libs/credential/ARCProxyUtil.cpp:754 #, c-format msgid "Returned message from VOMS server %s is: %s\n" msgstr "Сообщение, полученное Ñ Ñервера VOMS %s: %s\n" #: src/hed/libs/credential/ARCProxyUtil.cpp:776 #, c-format msgid "The attribute information from VOMS server: %s is list as following:" msgstr "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾Ð± атрибутах Ñ Ñервера VOMS: %s Ñодержит:" #: src/hed/libs/credential/ARCProxyUtil.cpp:788 #, c-format msgid "" "There are %d servers with the same name: %s in your vomses file, but none of " "them can be reached, or can return valid message. But proxy without VOMS AC " "extension will still be generated." msgstr "" "Ð’ Вашем файле vomses указаны %d Ñерверов Ñ Ð¾Ð´Ð¸Ð½Ð°ÐºÐ¾Ð²Ñ‹Ð¼ именем %s, но не вÑе " "доÑтупны или правильно отзываютÑÑ. ДоверенноÑть без раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ VOMS AC будет " "вÑÑ‘ равно Ñоздана." #: src/hed/libs/credential/ARCProxyUtil.cpp:1321 #: src/hed/libs/credential/ARCProxyUtil.cpp:1428 msgid "Failed to generate X509 request with NSS" msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа X509 Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ NSS" #: src/hed/libs/credential/ARCProxyUtil.cpp:1332 #: src/hed/libs/credential/ARCProxyUtil.cpp:1439 #: src/hed/libs/credential/ARCProxyUtil.cpp:1480 msgid "Failed to create X509 certificate with NSS" msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñертификата X509 Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ NSS" #: src/hed/libs/credential/ARCProxyUtil.cpp:1344 #: src/hed/libs/credential/ARCProxyUtil.cpp:1451 #: src/hed/libs/credential/ARCProxyUtil.cpp:1504 msgid "Failed to export X509 certificate from NSS DB" msgstr "Сбой ÑÐ¾Ñ…Ñ€Ð°Ð½ÐµÐ½Ð¸Ñ Ñертификата X509 из базы данных NSS" #: src/hed/libs/credential/ARCProxyUtil.cpp:1487 msgid "Failed to import X509 certificate into NSS DB" msgstr "Сбой Ð¸Ð¼Ð¿Ð¾Ñ€Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñертификата X509 в базу данных NSS" #: src/hed/libs/credential/ARCProxyUtil.cpp:1496 msgid "Failed to initialize the credential configuration" msgstr "Сбой инициализации наÑтроек параметров доÑтупа" #: src/hed/libs/credential/CertUtil.cpp:162 #, c-format msgid "Error number in store context: %i" msgstr "Ðомер ошибки в контекÑте хранилища: %i" #: src/hed/libs/credential/CertUtil.cpp:163 msgid "Self-signed certificate" msgstr "СамоÑтоÑтельно подпиÑанный Ñертификат" #: src/hed/libs/credential/CertUtil.cpp:166 #, c-format msgid "The certificate with subject %s is not valid" msgstr "Сертификат Ñ Ñубъектом %s недейÑтвителен" #: src/hed/libs/credential/CertUtil.cpp:169 #, c-format msgid "" "Can not find issuer certificate for the certificate with subject %s and " "hash: %lu" msgstr "" "Ðевозможно найти Ñертификат агентÑтва, выдавшего Ñертификат Ñ Ñубъектом %s и " "отпечатком: %lu" #: src/hed/libs/credential/CertUtil.cpp:172 #, c-format msgid "Certificate with subject %s has expired" msgstr "Срок дейÑÑ‚Ð²Ð¸Ñ Ñертификата Ñ Ñубъектом %s иÑтёк" #: src/hed/libs/credential/CertUtil.cpp:175 #, c-format msgid "" "Untrusted self-signed certificate in chain with subject %s and hash: %lu" msgstr "" "Цепочка Ñодержит недоверÑемый ÑамоподпиÑанный Ñертификат Ñ Ñубъектом %s и " "отпечатком: %lu" #: src/hed/libs/credential/CertUtil.cpp:177 #, c-format msgid "Certificate verification error: %s" msgstr "Ошибка проверки Ñертификата: %s" #: src/hed/libs/credential/CertUtil.cpp:189 msgid "Can not get the certificate type" msgstr "Ðе удалоÑÑŒ определить тип Ñертификата" #: src/hed/libs/credential/CertUtil.cpp:229 msgid "Couldn't verify availability of CRL" msgstr "Ðевозможно подтвердить доÑтупноÑть ÑпиÑков отзыва Ñертификатов (CRL)" #: src/hed/libs/credential/CertUtil.cpp:242 msgid "In the available CRL the lastUpdate field is not valid" msgstr "" "Ð’ доÑтупном ÑпиÑке отзыва Ñертификатов (CRL) значение lastUpdate " "недейÑтвительно" #: src/hed/libs/credential/CertUtil.cpp:249 msgid "The available CRL is not yet valid" msgstr "ДоÑтупный ÑпиÑок отзыва Ñертификатов (CRL) пока недейÑтвителен" #: src/hed/libs/credential/CertUtil.cpp:258 msgid "In the available CRL, the nextUpdate field is not valid" msgstr "" "Ð’ доÑтупном ÑпиÑке отзыва Ñертификатов (CRL) значение nextUpdate " "недейÑтвительно" #: src/hed/libs/credential/CertUtil.cpp:264 msgid "The available CRL has expired" msgstr "ДоÑтупный ÑпиÑок отзыва Ñертификатов (CRL) проÑрочен" #: src/hed/libs/credential/CertUtil.cpp:287 #, c-format msgid "Certificate with serial number %s and subject \"%s\" is revoked" msgstr "Сертификат Ñ Ñерийным номером %s и Ñубъектом \"%s\" отозван" #: src/hed/libs/credential/CertUtil.cpp:305 msgid "" "Directory of trusted CAs is not specified/found; Using current path as the " "CA direcroty" msgstr "" "Каталог доверÑемых агентÑтв не указан/найден; в качеÑтве такового " "иÑпользуетÑÑ Ñ‚ÐµÐºÑƒÑ‰Ð¸Ð¹ путь" #: src/hed/libs/credential/CertUtil.cpp:314 msgid "Can't allocate memory for CA policy path" msgstr "Ðевозможно выделить памÑть Ð´Ð»Ñ Ð¿ÑƒÑ‚Ð¸ к файлу политик агентÑтва" #: src/hed/libs/credential/CertUtil.cpp:360 #, c-format msgid "Certificate has unknown extension with numeric ID %u and SN %s" msgstr "" "Сертификат Ñодержит неизвеÑтное раÑширение Ñ Ñ‡Ð¸Ñленным идентификатором %u и " "именем Ñубъекта %s" #: src/hed/libs/credential/CertUtil.cpp:374 #: src/hed/libs/credential/Credential.cpp:1697 msgid "" "Can not convert DER encoded PROXY_CERT_INFO_EXTENSION extension to internal " "format" msgstr "" "Ðевозможно преобразовать раÑширение PROXY_CERT_INFO_EXTENSION в кодировке " "DER во внутренний формат" #: src/hed/libs/credential/CertUtil.cpp:420 msgid "Trying to check X509 cert with check_cert_type" msgstr "Попытка проверки Ñертификата X509 Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ check_cert_type" #: src/hed/libs/credential/CertUtil.cpp:459 msgid "Can't convert DER encoded PROXYCERTINFO extension to internal format" msgstr "" "Ðевозможно преобразовать раÑширение PROXYCERTINFO в кодировке DER во " "внутренний формат" #: src/hed/libs/credential/CertUtil.cpp:463 msgid "Can't get policy from PROXYCERTINFO extension" msgstr "Ðевозможно извлечь политику из раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ PROXYCERTINFO" #: src/hed/libs/credential/CertUtil.cpp:467 msgid "Can't get policy language from PROXYCERTINFO extension" msgstr "Ðевозможно извлечь Ñзык политики из раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ PROXYCERTINFO" #: src/hed/libs/credential/CertUtil.cpp:499 msgid "The subject does not match the issuer name + proxy CN entry" msgstr "" "Субъект не ÑоответÑтвует имени выдавшего агентÑтва и атрибуту доверенноÑти CN" #: src/hed/libs/credential/Credential.cpp:84 #, c-format msgid "OpenSSL error string: %s" msgstr "Ошибка OpenSSL: %s" #: src/hed/libs/credential/Credential.cpp:205 msgid "Can't get the first byte of input to determine its format" msgstr "" "Ðевозможно получить первый байт Ñертификата, чтобы определить его формат" #: src/hed/libs/credential/Credential.cpp:219 msgid "Can't reset the input" msgstr "Ðевозможно ÑброÑить ввод" #: src/hed/libs/credential/Credential.cpp:244 #: src/hed/libs/credential/Credential.cpp:280 msgid "Can't get the first byte of input BIO to get its format" msgstr "" "Ðевозможно получить первый байт Ñертификата, чтобы определить его формат" #: src/hed/libs/credential/Credential.cpp:256 msgid "Can not read certificate/key string" msgstr "Ðевозможно прочеÑть Ñтроку Ñертификата/ключа" #: src/hed/libs/credential/Credential.cpp:460 #, c-format msgid "Can not find certificate file: %s" msgstr "Ðе найден файл Ñертификата: %s" #: src/hed/libs/credential/Credential.cpp:465 #, c-format msgid "Can not read certificate file: %s" msgstr "Ðе удалоÑÑŒ прочитать файл Ñертификата: %s" #: src/hed/libs/credential/Credential.cpp:503 msgid "Can not read certificate string" msgstr "Ðе удалоÑÑŒ прочитать Ñертификат" #: src/hed/libs/credential/Credential.cpp:523 msgid "Certificate format is PEM" msgstr "Сертификат в формате PEM" #: src/hed/libs/credential/Credential.cpp:550 msgid "Certificate format is DER" msgstr "Сертификат в формате DER" #: src/hed/libs/credential/Credential.cpp:579 msgid "Certificate format is PKCS" msgstr "Сертификат в формате PKCS" #: src/hed/libs/credential/Credential.cpp:605 msgid "Certificate format is unknown" msgstr "Формат Ñертификата неизвеÑтен" #: src/hed/libs/credential/Credential.cpp:613 #, c-format msgid "Can not find key file: %s" msgstr "Ðе удалоÑÑŒ обнаружить файл личного ключа: %s" #: src/hed/libs/credential/Credential.cpp:618 #, c-format msgid "Can not open key file %s" msgstr "Ðе удалоÑÑŒ открыть файл личного ключа %s" #: src/hed/libs/credential/Credential.cpp:637 msgid "Can not read key string" msgstr "Ðе удалоÑÑŒ прочитать личный ключ" #: src/hed/libs/credential/Credential.cpp:700 #: src/hed/libs/credential/VOMSUtil.cpp:244 msgid "Failed to lock arccredential library in memory" msgstr "Ðевозможно заблокировать библиотеку arccredential в памÑти" #: src/hed/libs/credential/Credential.cpp:712 msgid "Certificate verification succeeded" msgstr "УÑпешное подтверждение Ñертификата" #: src/hed/libs/credential/Credential.cpp:716 msgid "Certificate verification failed" msgstr "Сертификат не подтверждён" #: src/hed/libs/credential/Credential.cpp:729 #: src/hed/libs/credential/Credential.cpp:747 #: src/hed/libs/credential/Credential.cpp:765 #: src/hed/libs/credential/Credential.cpp:996 #: src/hed/libs/credential/Credential.cpp:2368 #: src/hed/libs/credential/Credential.cpp:2397 msgid "Failed to initialize extensions member for Credential" msgstr "Сбой инициализации раздела раÑширений параметров доÑтупа" #: src/hed/libs/credential/Credential.cpp:808 #, c-format msgid "Unsupported proxy policy language is requested - %s" msgstr "Запрошен неподдерживаемый Ñзык политик доверенноÑти - %s" #: src/hed/libs/credential/Credential.cpp:820 #, c-format msgid "Unsupported proxy version is requested - %s" msgstr "Запрошена Ð½ÐµÐ¿Ð¾Ð´Ð´ÐµÑ€Ð¶Ð¸Ð²Ð°ÐµÐ¼Ð°Ñ Ð²ÐµÑ€ÑÐ¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти - %s" #: src/hed/libs/credential/Credential.cpp:831 msgid "If you specify a policy you also need to specify a policy language" msgstr "Ð£ÐºÐ°Ð·Ñ‹Ð²Ð°Ñ Ð¿Ð¾Ð»Ð¸Ñ‚Ð¸ÐºÑƒ, указывайте также её Ñзык" #: src/hed/libs/credential/Credential.cpp:1001 msgid "Certificate/Proxy path is empty" msgstr "Путь к Ñертификату/доверенноÑти не задан" #: src/hed/libs/credential/Credential.cpp:1059 #: src/hed/libs/credential/Credential.cpp:2905 msgid "Failed to duplicate extension" msgstr "Ðе удалоÑÑŒ Ñкопировать раÑширение" #: src/hed/libs/credential/Credential.cpp:1063 msgid "Failed to add extension into credential extensions" msgstr "Ðе удалоÑÑŒ добавить раÑширение к раÑширениÑм параметров доÑтупа" #: src/hed/libs/credential/Credential.cpp:1074 msgid "Certificate information collection failed" msgstr "Сбой Ñбора информации о Ñертификате" #: src/hed/libs/credential/Credential.cpp:1113 #: src/hed/libs/credential/Credential.cpp:1118 msgid "Can not convert string into ASN1_OBJECT" msgstr "Ðевозможно преобразовать Ñтроку в ASN1_OBJECT" #: src/hed/libs/credential/Credential.cpp:1125 msgid "Can not create ASN1_OCTET_STRING" msgstr "Ðевозможно Ñоздать ASN1_OCTET_STRING" #: src/hed/libs/credential/Credential.cpp:1134 msgid "Can not allocate memory for extension for proxy certificate" msgstr "Ðевозможно зарезервировать памÑть Ð´Ð»Ñ Ñ€Ð°ÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти" #: src/hed/libs/credential/Credential.cpp:1144 msgid "Can not create extension for proxy certificate" msgstr "Ðевозможно Ñоздать раÑширение Ð´Ð»Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти" #: src/hed/libs/credential/Credential.cpp:1180 #: src/hed/libs/credential/Credential.cpp:1348 msgid "BN_set_word failed" msgstr "Сбой метода BN_set_word" #: src/hed/libs/credential/Credential.cpp:1189 #: src/hed/libs/credential/Credential.cpp:1357 msgid "RSA_generate_key_ex failed" msgstr "Сбой метода RSA_generate_key_ex" #: src/hed/libs/credential/Credential.cpp:1198 #: src/hed/libs/credential/Credential.cpp:1365 msgid "BN_new || RSA_new failed" msgstr "Сбой метода BN_new или RSA_new" #: src/hed/libs/credential/Credential.cpp:1209 msgid "Created RSA key, proceeding with request" msgstr "Создан ключ RSA, теперь обрабатываетÑÑ Ð·Ð°Ð¿Ñ€Ð¾Ñ" #: src/hed/libs/credential/Credential.cpp:1214 msgid "pkey and rsa_key exist!" msgstr "pkey и rsa_key ÑущеÑтвуют!" #: src/hed/libs/credential/Credential.cpp:1217 msgid "Generate new X509 request!" msgstr "Создайте новый Ð·Ð°Ð¿Ñ€Ð¾Ñ X509!" #: src/hed/libs/credential/Credential.cpp:1222 msgid "Setting subject name!" msgstr "ЗадаётÑÑ Ð¸Ð¼Ñ Ñубъекта!" #: src/hed/libs/credential/Credential.cpp:1230 #: src/hed/libs/credential/Credential.cpp:1444 msgid "PEM_write_bio_X509_REQ failed" msgstr "Сбой PEM_write_bio_X509_REQ" #: src/hed/libs/credential/Credential.cpp:1260 #: src/hed/libs/credential/Credential.cpp:1301 #: src/hed/libs/credential/Credential.cpp:1476 #: src/hed/libs/credential/Credential.cpp:1496 msgid "Can not create BIO for request" msgstr "Ðевозможно Ñоздать BIO Ð´Ð»Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа" #: src/hed/libs/credential/Credential.cpp:1278 msgid "Failed to write request into string" msgstr "Ðе удалоÑÑŒ запиÑать Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð² Ñтроку" #: src/hed/libs/credential/Credential.cpp:1305 #: src/hed/libs/credential/Credential.cpp:1310 #: src/hed/libs/credential/Credential.cpp:1500 msgid "Can not set writable file for request BIO" msgstr "Ðевозможно Ñоздать запиÑываемый файл Ð´Ð»Ñ BIO запроÑа" #: src/hed/libs/credential/Credential.cpp:1316 #: src/hed/libs/credential/Credential.cpp:1505 msgid "Wrote request into a file" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð·Ð°Ð¿Ð¸Ñан в файл" #: src/hed/libs/credential/Credential.cpp:1318 #: src/hed/libs/credential/Credential.cpp:1508 msgid "Failed to write request into a file" msgstr "Ðе удалоÑÑŒ запиÑать Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð² файл" #: src/hed/libs/credential/Credential.cpp:1338 msgid "The credential's private key has already been initialized" msgstr "Закрытый ключ параметров доÑтупа уже инициализирован" #: src/hed/libs/credential/Credential.cpp:1386 msgid "" "Can not duplicate the subject name for the self-signing proxy certificate " "request" msgstr "" "Ðевозможно дублировать Ð¸Ð¼Ñ Ñубъекта Ð´Ð»Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа ÑамозаверÑющей доверенноÑти" #: src/hed/libs/credential/Credential.cpp:1396 msgid "Can not create a new X509_NAME_ENTRY for the proxy certificate request" msgstr "" "Ðевозможно Ñоздать новую переменную X509_NAME_ENTRY Ð´Ð»Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа доверенноÑти" #: src/hed/libs/credential/Credential.cpp:1414 #: src/hed/libs/credential/Credential.cpp:1421 #: src/hed/libs/credential/Credential.cpp:1999 #: src/hed/libs/credential/Credential.cpp:2007 msgid "" "Can not convert PROXY_CERT_INFO_EXTENSION struct from internal to DER " "encoded format" msgstr "" "Ðевозможно преобразовать Ñтруктуру PROXY_CERT_INFO_EXTENSION из внутреннего " "формата в DER" #: src/hed/libs/credential/Credential.cpp:1451 msgid "Can't convert X509 request from internal to DER encoded format" msgstr "Ðевозможно преобразовать Ð·Ð°Ð¿Ñ€Ð¾Ñ X509 из внутреннего формата в DER" #: src/hed/libs/credential/Credential.cpp:1461 msgid "Can not generate X509 request" msgstr "Ðе удалоÑÑŒ Ñоздать Ð·Ð°Ð¿Ñ€Ð¾Ñ X509" #: src/hed/libs/credential/Credential.cpp:1463 msgid "Can not set private key" msgstr "Ðе удалоÑÑŒ задать закрытый ключ" #: src/hed/libs/credential/Credential.cpp:1561 msgid "Failed to get private key" msgstr "Ðе удалоÑÑŒ получить закрытый ключ" #: src/hed/libs/credential/Credential.cpp:1580 msgid "Failed to get public key from RSA object" msgstr "Ðевозможно извлечь открытый ключ из объекта RSA" #: src/hed/libs/credential/Credential.cpp:1588 msgid "Failed to get public key from X509 object" msgstr "Ðевозможно извлечь открытый ключ из объекта X509" #: src/hed/libs/credential/Credential.cpp:1595 msgid "Failed to get public key" msgstr "Ðе удалоÑÑŒ получить открытый ключ" #: src/hed/libs/credential/Credential.cpp:1633 #, c-format msgid "Certiticate chain number %d" msgstr "Ðомер цепочки Ñертификатов %d" #: src/hed/libs/credential/Credential.cpp:1661 msgid "NULL BIO passed to InquireRequest" msgstr "NULL BIO передан в InquireRequest" #: src/hed/libs/credential/Credential.cpp:1664 msgid "PEM_read_bio_X509_REQ failed" msgstr "Сбой PEM_read_bio_X509_REQ" #: src/hed/libs/credential/Credential.cpp:1668 msgid "d2i_X509_REQ_bio failed" msgstr "Сбой d2i_X509_REQ_bio" #: src/hed/libs/credential/Credential.cpp:1690 msgid "Missing data in DER encoded PROXY_CERT_INFO_EXTENSION extension" msgstr "" "ÐедоÑтаточно данных в раÑширении PROXY_CERT_INFO_EXTENSION в кодировке DER" #: src/hed/libs/credential/Credential.cpp:1702 msgid "Can not create PROXY_CERT_INFO_EXTENSION extension" msgstr "Ðевозможно Ñоздать раÑширение PROXY_CERT_INFO_EXTENSION" #: src/hed/libs/credential/Credential.cpp:1712 msgid "Can not get policy from PROXY_CERT_INFO_EXTENSION extension" msgstr "Ðевозможно извлечь политику из раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ PROXY_CERT_INFO_EXTENSION" #: src/hed/libs/credential/Credential.cpp:1716 msgid "Can not get policy language from PROXY_CERT_INFO_EXTENSION extension" msgstr "" "Ðевозможно извлечь Ñзык политик из раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ PROXY_CERT_INFO_EXTENSION" #: src/hed/libs/credential/Credential.cpp:1732 #, c-format msgid "Cert Type: %d" msgstr "Тип Ñертификата: %d" #: src/hed/libs/credential/Credential.cpp:1745 #: src/hed/libs/credential/Credential.cpp:1764 msgid "Can not create BIO for parsing request" msgstr "Ðевозможно Ñоздать BIO Ð´Ð»Ñ Ñ€Ð°Ð·Ð±Ð¾Ñ€Ð° запроÑа" #: src/hed/libs/credential/Credential.cpp:1750 msgid "Read request from a string" msgstr "Чтение запроÑа из Ñтроки" #: src/hed/libs/credential/Credential.cpp:1753 msgid "Failed to read request from a string" msgstr "Сбой при чтении запроÑа из Ñтроки" #: src/hed/libs/credential/Credential.cpp:1768 msgid "Can not set readable file for request BIO" msgstr "Ðевозможно открыть на чтение файл Ð´Ð»Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа BIO" #: src/hed/libs/credential/Credential.cpp:1773 msgid "Read request from a file" msgstr "ПрочеÑть Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¸Ð· файла" #: src/hed/libs/credential/Credential.cpp:1776 msgid "Failed to read request from a file" msgstr "Произошёл Ñбой при чтении запроÑа из файла" #: src/hed/libs/credential/Credential.cpp:1816 msgid "Can not convert private key to DER format" msgstr "Ðевозможно преобразовать закрытый ключ в формат DER" #: src/hed/libs/credential/Credential.cpp:1980 msgid "Credential is not initialized" msgstr "Параметры доÑтупа не инициализированы" #: src/hed/libs/credential/Credential.cpp:1986 msgid "Failed to duplicate X509 structure" msgstr "Ðе удалоÑÑŒ Ñкопировать Ñтруктуру X509" #: src/hed/libs/credential/Credential.cpp:1991 msgid "Failed to initialize X509 structure" msgstr "Ðе удалоÑÑŒ инициализировать Ñтруктуру X509" #: src/hed/libs/credential/Credential.cpp:2014 msgid "Can not create extension for PROXY_CERT_INFO" msgstr "Ðевозможно Ñоздать раÑширение Ð´Ð»Ñ PROXY_CERT_INFO" #: src/hed/libs/credential/Credential.cpp:2018 #: src/hed/libs/credential/Credential.cpp:2066 msgid "Can not add X509 extension to proxy cert" msgstr "Ðевозможно добавить раÑширение X509 к доверенноÑти" #: src/hed/libs/credential/Credential.cpp:2034 msgid "Can not convert keyUsage struct from DER encoded format" msgstr "Ðевозможно преобразовать Ñтруктуру keyUsage из формата кодировки DER" #: src/hed/libs/credential/Credential.cpp:2046 #: src/hed/libs/credential/Credential.cpp:2055 msgid "Can not convert keyUsage struct from internal to DER format" msgstr "" "Ðевозможно преобразовать Ñтруктуру keyUsage из внутреннего формата в DER" #: src/hed/libs/credential/Credential.cpp:2062 msgid "Can not create extension for keyUsage" msgstr "Ðевозможно Ñоздать раÑширение Ð´Ð»Ñ keyUsage" #: src/hed/libs/credential/Credential.cpp:2075 msgid "Can not get extended KeyUsage extension from issuer certificate" msgstr "" "Ðевозможно получить раÑширенное раÑширение KeyUsage из Ñертификата агентÑтва" #: src/hed/libs/credential/Credential.cpp:2080 msgid "Can not copy extended KeyUsage extension" msgstr "Ðевозможно Ñкопировать раÑширенное раÑширение KeyUsage" #: src/hed/libs/credential/Credential.cpp:2085 msgid "Can not add X509 extended KeyUsage extension to new proxy certificate" msgstr "" "Ðевозможно добавить раÑширенное X509 раÑширение KeyUsage к новой доверенноÑти" #: src/hed/libs/credential/Credential.cpp:2095 msgid "Can not compute digest of public key" msgstr "Ðе удалоÑÑŒ вычиÑлить профиль открытого ключа" #: src/hed/libs/credential/Credential.cpp:2106 msgid "Can not copy the subject name from issuer for proxy certificate" msgstr "Ðевозможно Ñкопировать Ð¸Ð¼Ñ Ñубъекта выдающего агентÑтва в доверенноÑть" #: src/hed/libs/credential/Credential.cpp:2112 msgid "Can not create name entry CN for proxy certificate" msgstr "Ðевозможно Ñоздать компонент Ð½Ð°Ð·Ð²Ð°Ð½Ð¸Ñ CN Ð´Ð»Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти" #: src/hed/libs/credential/Credential.cpp:2117 msgid "Can not set CN in proxy certificate" msgstr "Ðевозможно задать Ñлемент CN в доверенноÑти" #: src/hed/libs/credential/Credential.cpp:2125 msgid "Can not set issuer's subject for proxy certificate" msgstr "Ðевозможно задать Ð¸Ð¼Ñ Ð²Ñ‹Ð´Ð°ÑŽÑ‰ÐµÐ³Ð¾ агентÑтва в доверенноÑти" #: src/hed/libs/credential/Credential.cpp:2130 msgid "Can not set version number for proxy certificate" msgstr "Ðевозможно задать номер верÑии в доверенноÑти" #: src/hed/libs/credential/Credential.cpp:2138 msgid "Can not set serial number for proxy certificate" msgstr "Ðевозможно задать Ñерийный номер в доверенноÑти" #: src/hed/libs/credential/Credential.cpp:2144 msgid "Can not duplicate serial number for proxy certificate" msgstr "Ðевозможно Ñкопировать Ñерийный номер Ð´Ð»Ñ Ñертификата доверенноÑти" #: src/hed/libs/credential/Credential.cpp:2150 msgid "Can not set the lifetime for proxy certificate" msgstr "Ðевозможно задать Ñрок годноÑти доверенноÑти" #: src/hed/libs/credential/Credential.cpp:2154 msgid "Can not set pubkey for proxy certificate" msgstr "Ðевозможно задать открытый ключ доверенноÑти" #: src/hed/libs/credential/Credential.cpp:2170 #: src/hed/libs/credential/Credential.cpp:2795 msgid "The credential to be signed is NULL" msgstr "Параметры доÑтупа Ð´Ð»Ñ Ð¿Ð¾Ð´Ð¿Ð¸Ñи имеют значение NULL" #: src/hed/libs/credential/Credential.cpp:2174 #: src/hed/libs/credential/Credential.cpp:2799 msgid "The credential to be signed contains no request" msgstr "Параметры доÑтупа Ð´Ð»Ñ Ð¿Ð¾Ð´Ð¿Ð¸Ñи не Ñодержат запроÑа" #: src/hed/libs/credential/Credential.cpp:2178 #: src/hed/libs/credential/Credential.cpp:2803 msgid "The BIO for output is NULL" msgstr "BIO Ð´Ð»Ñ Ð²Ñ‹Ñ…Ð¾Ð´Ð°: NULL" #: src/hed/libs/credential/Credential.cpp:2192 #: src/hed/libs/credential/Credential.cpp:2810 msgid "Error when extracting public key from request" msgstr "Ошибка при извлечении открытого ключа из запроÑа" #: src/hed/libs/credential/Credential.cpp:2197 #: src/hed/libs/credential/Credential.cpp:2814 msgid "Failed to verify the request" msgstr "Ðе удалоÑÑŒ подтвердить запроÑ" #: src/hed/libs/credential/Credential.cpp:2201 msgid "Failed to add issuer's extension into proxy" msgstr "Сбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñ€Ð°ÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ð²Ñ‹Ð´Ð°ÑŽÑ‰ÐµÐ³Ð¾ агентÑтва в доверенноÑть" #: src/hed/libs/credential/Credential.cpp:2225 msgid "Failed to find extension" msgstr "Ðе удалоÑÑŒ найти раÑширение" #: src/hed/libs/credential/Credential.cpp:2237 msgid "Can not get the issuer's private key" msgstr "Ðевозможно извлечь закрытый ключ выдающего агентÑтва" #: src/hed/libs/credential/Credential.cpp:2244 #: src/hed/libs/credential/Credential.cpp:2846 msgid "There is no digest in issuer's private key object" msgstr "Ð’ объекте закрытого ключа Ð¸Ð·Ð´Ð°Ñ‚ÐµÐ»Ñ Ð¾Ñ‚ÑутÑтвует профиль" #: src/hed/libs/credential/Credential.cpp:2249 #: src/hed/libs/credential/Credential.cpp:2850 #, c-format msgid "%s is an unsupported digest type" msgstr "%s не ÑвлÑетÑÑ Ð¿Ð¾Ð´Ð´ÐµÑ€Ð¶Ð¸Ð²Ð°ÐµÐ¼Ñ‹Ð¼ типом профилÑ" #: src/hed/libs/credential/Credential.cpp:2260 #, c-format msgid "" "The signing algorithm %s is not allowed,it should be SHA1 or SHA2 to sign " "certificate requests" msgstr "" "ÐедопуÑтимый алгоритм подпиÑи %s: запроÑÑ‹ Ñертификата должны подпиÑыватьÑÑ " "SHA1 или SHA2" #: src/hed/libs/credential/Credential.cpp:2266 msgid "Failed to sign the proxy certificate" msgstr "Ðе удалоÑÑŒ подпиÑать доверенноÑть" #: src/hed/libs/credential/Credential.cpp:2268 msgid "Succeeded to sign the proxy certificate" msgstr "ДоверенноÑть уÑпешно подпиÑана" #: src/hed/libs/credential/Credential.cpp:2273 msgid "Failed to verify the signed certificate" msgstr "Сбой проверки подпиÑанного Ñертификата" #: src/hed/libs/credential/Credential.cpp:2275 msgid "Succeeded to verify the signed certificate" msgstr "ПодпиÑанный Ñертификат уÑпешно проверен" #: src/hed/libs/credential/Credential.cpp:2280 #: src/hed/libs/credential/Credential.cpp:2289 msgid "Output the proxy certificate" msgstr "Вывод доверенноÑти" #: src/hed/libs/credential/Credential.cpp:2283 msgid "Can not convert signed proxy cert into PEM format" msgstr "Ðевозможно преобразовать подпиÑанную доверенноÑть в формат PEM" #: src/hed/libs/credential/Credential.cpp:2292 msgid "Can not convert signed proxy cert into DER format" msgstr "Ðевозможно преобразовать подпиÑанную доверенноÑть в формат DER" #: src/hed/libs/credential/Credential.cpp:2308 #: src/hed/libs/credential/Credential.cpp:2331 msgid "Can not create BIO for signed proxy certificate" msgstr "" "Ðевозможно Ñоздать неформатированный ввод/вывод BIO Ð´Ð»Ñ Ð¿Ð¾Ð´Ð¿Ð¸Ñанной " "доверенноÑти" #: src/hed/libs/credential/Credential.cpp:2335 msgid "Can not set writable file for signed proxy certificate BIO" msgstr "" "Ðевозможно открыть на запиÑÑŒ файл Ð´Ð»Ñ Ð½ÐµÑ„Ð¾Ñ€Ð¼Ð°Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð½Ð¾Ð³Ð¾ ввода/вывода " "подпиÑанной доверенноÑти" #: src/hed/libs/credential/Credential.cpp:2340 msgid "Wrote signed proxy certificate into a file" msgstr "ПодпиÑÐ°Ð½Ð½Ð°Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñть запиÑана в файл" #: src/hed/libs/credential/Credential.cpp:2343 msgid "Failed to write signed proxy certificate into a file" msgstr "Сбой запиÑи подпиÑанной доверенноÑти в файл" #: src/hed/libs/credential/Credential.cpp:2377 #: src/hed/libs/credential/Credential.cpp:2415 #, c-format msgid "ERROR: %s" msgstr "Ошибка: %s" #: src/hed/libs/credential/Credential.cpp:2423 #, c-format msgid "SSL error: %s, libs: %s, func: %s, reason: %s" msgstr "Ошибка SSL: %s, libs: %s, func: %s, причина: %s" #: src/hed/libs/credential/Credential.cpp:2468 #, c-format msgid "unable to load number from: %s" msgstr "невозможно прочеÑть номер из: %s" #: src/hed/libs/credential/Credential.cpp:2473 msgid "error converting number from bin to BIGNUM" msgstr "ошибка Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ‡Ð¸Ñла из bin в BIGNUM" #: src/hed/libs/credential/Credential.cpp:2500 msgid "file name too long" msgstr "Ñлишком длинное Ð¸Ð¼Ñ Ñ„Ð°Ð¹Ð»Ð°" #: src/hed/libs/credential/Credential.cpp:2523 msgid "error converting serial to ASN.1 format" msgstr "ошибка Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ Ñерийного номера в формат ASN.1" #: src/hed/libs/credential/Credential.cpp:2556 #, c-format msgid "load serial from %s failure" msgstr "Ñбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ñерийного номера из %s" #: src/hed/libs/credential/Credential.cpp:2561 msgid "add_word failure" msgstr "Сбой add_word" #: src/hed/libs/credential/Credential.cpp:2566 #, c-format msgid "save serial to %s failure" msgstr "Ñбой запиÑи Ñерийного номера в %s" #: src/hed/libs/credential/Credential.cpp:2586 msgid "Error initialising X509 store" msgstr "Ошибка при инициализации хранилища X509" #: src/hed/libs/credential/Credential.cpp:2593 msgid "Out of memory when generate random serial" msgstr "ÐедоÑтаточно памÑти Ð´Ð»Ñ ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñлучайного Ñерийного номера" #: src/hed/libs/credential/Credential.cpp:2605 msgid "CA certificate and CA private key do not match" msgstr "Сертификат и закрытый ключ агентÑтва не Ñовпадают" #: src/hed/libs/credential/Credential.cpp:2629 #, c-format msgid "Failed to load extension section: %s" msgstr "Сбой загрузки раздела раÑширений: %s" #: src/hed/libs/credential/Credential.cpp:2666 msgid "malloc error" msgstr "ошибка malloc" #: src/hed/libs/credential/Credential.cpp:2670 msgid "Subject does not start with '/'" msgstr "Субъект не начинаетÑÑ Ñ '/'" #: src/hed/libs/credential/Credential.cpp:2686 #: src/hed/libs/credential/Credential.cpp:2707 msgid "escape character at end of string" msgstr "Ñимвол выхода в конце Ñтроки" #: src/hed/libs/credential/Credential.cpp:2698 #, c-format msgid "" "end of string encountered while processing type of subject name element #%d" msgstr "доÑтигнут конец Ñтроки при обработке типа Ñлемента имени Ñубъекта #%d" #: src/hed/libs/credential/Credential.cpp:2735 #, c-format msgid "Subject Attribute %s has no known NID, skipped" msgstr "Ðтрибут Ñубъекта %s не Ñодержит извеÑтного NID, пропуÑкаетÑÑ" #: src/hed/libs/credential/Credential.cpp:2739 #, c-format msgid "No value provided for Subject Attribute %s skipped" msgstr "Ðе задана значение атрибута Ñубъекта %s, пропуÑкаетÑÑ" #: src/hed/libs/credential/Credential.cpp:2780 msgid "Failed to set the pubkey for X509 object by using pubkey from X509_REQ" msgstr "" "Ðе удалоÑÑŒ задать открытый ключ Ð´Ð»Ñ Ð¾Ð±ÑŠÐµÐºÑ‚Ð° X509 иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ñ‹Ð¹ ключ из " "X509_REQ" #: src/hed/libs/credential/Credential.cpp:2790 msgid "The private key for signing is not initialized" msgstr "Закрытый ключ Ð´Ð»Ñ Ð¿Ð¾Ð´Ð¿Ð¸Ñи не инициализирован" #: src/hed/libs/credential/Credential.cpp:2869 #, c-format msgid "Error when loading the extension config file: %s" msgstr "Ошибка при загрузке файла наÑтроек раÑширений: %s" #: src/hed/libs/credential/Credential.cpp:2873 #, c-format msgid "Error when loading the extension config file: %s on line: %d" msgstr "Ошибка при загрузке файла наÑтроек раÑширений: %s в Ñтроке: %d" #: src/hed/libs/credential/Credential.cpp:2921 msgid "Can not sign a EEC" msgstr "Ðевозможно подпиÑать EEC" #: src/hed/libs/credential/Credential.cpp:2925 msgid "Output EEC certificate" msgstr "Вывод Ñертификата EEC" #: src/hed/libs/credential/Credential.cpp:2928 msgid "Can not convert signed EEC cert into DER format" msgstr "Ðевозможно преобразовать подпиÑанный Ñертификат EEC в формат DER" #: src/hed/libs/credential/Credential.cpp:2942 #: src/hed/libs/credential/Credential.cpp:2961 msgid "Can not create BIO for signed EEC certificate" msgstr "" "Ðевозможно Ñоздать неформатированный ввод/вывод BIO Ð´Ð»Ñ Ð¿Ð¾Ð´Ð¿Ð¸Ñанного " "Ñертификата EEC" #: src/hed/libs/credential/Credential.cpp:2965 msgid "Can not set writable file for signed EEC certificate BIO" msgstr "" "Ðевозможно открыть на запиÑÑŒ файл Ð´Ð»Ñ Ð½ÐµÑ„Ð¾Ñ€Ð¼Ð°Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð½Ð¾Ð³Ð¾ ввода/вывода " "подпиÑанного Ñертификата EEC" #: src/hed/libs/credential/Credential.cpp:2970 msgid "Wrote signed EEC certificate into a file" msgstr "ПодпиÑанный Ñертификат EEC запиÑан в файл" #: src/hed/libs/credential/Credential.cpp:2973 msgid "Failed to write signed EEC certificate into a file" msgstr "Сбой запиÑи подпиÑанного Ñертификата EEC в файл" #: src/hed/libs/credential/NSSUtil.cpp:143 msgid "Error writing raw certificate" msgstr "Ошибка запиÑи иÑходного Ñертификата" #: src/hed/libs/credential/NSSUtil.cpp:222 msgid "Failed to add RFC proxy OID" msgstr "Ðе удалоÑÑŒ добавить OID доверенноÑти RFC" #: src/hed/libs/credential/NSSUtil.cpp:225 #, c-format msgid "Succeeded to add RFC proxy OID, tag %d is returned" msgstr "УÑпешно добавлен OID доверенноÑти RFC, возвращена метка %d" #: src/hed/libs/credential/NSSUtil.cpp:231 msgid "Failed to add anyLanguage OID" msgstr "Ðе удалоÑÑŒ добавить anyLanguage OID" #: src/hed/libs/credential/NSSUtil.cpp:234 #, c-format msgid "Succeeded to add anyLanguage OID, tag %d is returned" msgstr "УÑпешно добавлен OID anyLanguage, возвращена метка %d" #: src/hed/libs/credential/NSSUtil.cpp:240 msgid "Failed to add inheritAll OID" msgstr "Ðе удалоÑÑŒ добавить inheritAll OID" #: src/hed/libs/credential/NSSUtil.cpp:243 #, c-format msgid "Succeeded to add inheritAll OID, tag %d is returned" msgstr "УÑпешно добавлен OID inheritAll, возвращена метка %d" #: src/hed/libs/credential/NSSUtil.cpp:249 msgid "Failed to add Independent OID" msgstr "Ðе удалоÑÑŒ добавить Independent OID" #: src/hed/libs/credential/NSSUtil.cpp:252 #, c-format msgid "Succeeded to add Independent OID, tag %d is returned" msgstr "УÑпешно добавлен Independent OID, возвращена метка %d" #: src/hed/libs/credential/NSSUtil.cpp:258 msgid "Failed to add VOMS AC sequence OID" msgstr "Ðе удалоÑÑŒ добавить OID поÑледовательноÑти VOMS AC" #: src/hed/libs/credential/NSSUtil.cpp:261 #, c-format msgid "Succeeded to add VOMS AC sequence OID, tag %d is returned" msgstr "УÑпешно добавлен OID поÑледовательноÑти VOMS AC, возвращена метка %d" #: src/hed/libs/credential/NSSUtil.cpp:290 #, c-format msgid "NSS initialization failed on certificate database: %s" msgstr "Ð˜Ð½Ð¸Ñ†Ð¸Ð°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ NSS оборвалаÑÑŒ на базе данных Ñертификатов: %s" #: src/hed/libs/credential/NSSUtil.cpp:301 msgid "Succeeded to initialize NSS" msgstr "NSS уÑпешно инициализирован" #: src/hed/libs/credential/NSSUtil.cpp:323 #, c-format msgid "Failed to read attribute %x from private key." msgstr "Ðе удалоÑÑŒ прочеÑть атрибут %x из закрытого ключа." #: src/hed/libs/credential/NSSUtil.cpp:375 msgid "Succeeded to get credential" msgstr "Параметры доÑтупа получены" #: src/hed/libs/credential/NSSUtil.cpp:376 msgid "Failed to get credential" msgstr "Ðе удалоÑÑŒ получить параметры доÑтупа" #: src/hed/libs/credential/NSSUtil.cpp:438 msgid "p12 file is empty" msgstr "Файл Ñертификата p12 пуÑÑ‚" #: src/hed/libs/credential/NSSUtil.cpp:448 msgid "Unable to write to p12 file" msgstr "Сбой запиÑи в файл p12" #: src/hed/libs/credential/NSSUtil.cpp:464 msgid "Failed to open p12 file" msgstr "Сбой при открытии файла pk12" #: src/hed/libs/credential/NSSUtil.cpp:492 msgid "Failed to allocate p12 context" msgstr "Ðе удалоÑÑŒ зарезервировать контекÑÑ‚ p12" #: src/hed/libs/credential/NSSUtil.cpp:1200 msgid "Failed to find issuer certificate for proxy certificate" msgstr "Ðе удалоÑÑŒ обнаружить агентÑтво, выдавшее Ñертификат доверенноÑти" #: src/hed/libs/credential/NSSUtil.cpp:1351 #, c-format msgid "Failed to authenticate to PKCS11 slot %s" msgstr "Сбой проверки подлинноÑти Ð´Ð»Ñ Ñчейки PKCS11 %s" #: src/hed/libs/credential/NSSUtil.cpp:1357 #, c-format msgid "Failed to find certificates by nickname: %s" msgstr "Ðе удалоÑÑŒ обнаружить Ñертификат по краткому имени: %s" #: src/hed/libs/credential/NSSUtil.cpp:1362 #, c-format msgid "No user certificate by nickname %s found" msgstr "Ðе удалоÑÑŒ обнаружить Ñертификат Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ñ ÐºÑ€Ð°Ñ‚ÐºÐ¸Ð¼ именем %s" #: src/hed/libs/credential/NSSUtil.cpp:1375 #: src/hed/libs/credential/NSSUtil.cpp:1411 msgid "Certificate does not have a slot" msgstr "У Ñертификата нет Ñчейки" #: src/hed/libs/credential/NSSUtil.cpp:1381 msgid "Failed to create export context" msgstr "Ðе удалоÑÑŒ Ñоздать контекÑÑ‚ Ð´Ð»Ñ ÑкÑпорта" #: src/hed/libs/credential/NSSUtil.cpp:1396 msgid "PKCS12 output password not provided" msgstr "Ðе задан пароль Ð´Ð»Ñ Ð½Ð¾Ð²Ð¾Ð³Ð¾ Ñертификата PKCS12" #: src/hed/libs/credential/NSSUtil.cpp:1403 msgid "PKCS12 add password integrity failed" msgstr "Ðе удалоÑÑŒ задать ÑпоÑоб проверки целоÑтноÑти PKCS12 и паролÑ" #: src/hed/libs/credential/NSSUtil.cpp:1424 msgid "Failed to create key or certificate safe" msgstr "" "Ðе удалоÑÑŒ Ñоздать безопаÑное хранилище Ð´Ð»Ñ Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ð¾Ð³Ð¾ ключа или Ñертификата" #: src/hed/libs/credential/NSSUtil.cpp:1440 msgid "Failed to add certificate and key" msgstr "Ðе удалоÑÑŒ добавить закрытый ключ и Ñертификат" #: src/hed/libs/credential/NSSUtil.cpp:1449 #, c-format msgid "Failed to initialize PKCS12 file: %s" msgstr "Ðе удалоÑÑŒ инициализировать файл PKCS12: %s" #: src/hed/libs/credential/NSSUtil.cpp:1454 msgid "Failed to encode PKCS12" msgstr "Ðе удалоÑÑŒ шифрование в формат PKCS12" #: src/hed/libs/credential/NSSUtil.cpp:1457 msgid "Succeeded to export PKCS12" msgstr "УдалоÑÑŒ извлечь Ñертификат в формате PKCS12" #: src/hed/libs/credential/NSSUtil.cpp:1485 #, c-format msgid "" "There is no certificate named %s found, the certificate could be removed " "when generating CSR" msgstr "" "Ðе найден Ñертификат Ñ Ð¸Ð¼ÐµÐ½ÐµÐ¼ %s, Ñертификат мог быть удалён при Ñоздании CSR" #: src/hed/libs/credential/NSSUtil.cpp:1491 msgid "Failed to delete certificate" msgstr "Ðе удалоÑÑŒ уничтожить Ñертификат" #: src/hed/libs/credential/NSSUtil.cpp:1505 msgid "The name of the private key to delete is empty" msgstr "Ð˜Ð¼Ñ Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ð¾Ð³Ð¾ ключа Ð´Ð»Ñ ÑƒÐ½Ð¸Ñ‡Ñ‚Ð¾Ð¶ÐµÐ½Ð¸Ñ Ð¿ÑƒÑто" #: src/hed/libs/credential/NSSUtil.cpp:1510 #: src/hed/libs/credential/NSSUtil.cpp:2939 #: src/hed/libs/credential/NSSUtil.cpp:2956 #, c-format msgid "Failed to authenticate to token %s" msgstr "Ðе удалоÑÑŒ аутентифицироватьÑÑ Ðº маркёру %s" #: src/hed/libs/credential/NSSUtil.cpp:1517 #, c-format msgid "No private key with nickname %s exist in NSS database" msgstr "Закрытый ключ Ñ Ð¸Ð¼ÐµÐ½ÐµÐ¼ %s отÑутÑтвует в базе данных NSS" #: src/hed/libs/credential/NSSUtil.cpp:1550 msgid "Failed to delete private key and certificate" msgstr "Ðе удалоÑÑŒ уничтожить закрытый ключ и Ñертификат" #: src/hed/libs/credential/NSSUtil.cpp:1560 msgid "Failed to delete private key" msgstr "Ðе удалоÑÑŒ уничтожить закрытый ключ" #: src/hed/libs/credential/NSSUtil.cpp:1571 #, c-format msgid "Can not find key with name: %s" msgstr "Ðе удалоÑÑŒ найти закрытый ключ по имени: %s" #: src/hed/libs/credential/NSSUtil.cpp:1599 msgid "Can not read PEM private key: probably bad password" msgstr "" "Ðевозможно прочеÑть закрытый ключ PEM: возможно, введён неверный пароль" #: src/hed/libs/credential/NSSUtil.cpp:1601 msgid "Can not read PEM private key: failed to decrypt" msgstr "Сбой при чтении файла личного ключа PEM: не удалоÑÑŒ раÑшифровать" #: src/hed/libs/credential/NSSUtil.cpp:1603 #: src/hed/libs/credential/NSSUtil.cpp:1605 msgid "Can not read PEM private key: failed to obtain password" msgstr "Сбой при чтении файла личного ключа PEM: не был введён пароль" #: src/hed/libs/credential/NSSUtil.cpp:1606 msgid "Can not read PEM private key" msgstr "Ðе удалоÑÑŒ прочеÑть закрытый ключ PEM" #: src/hed/libs/credential/NSSUtil.cpp:1613 msgid "Failed to convert EVP_PKEY to PKCS8" msgstr "Ðе удалоÑÑŒ преобразовать EVP_PKEY в PKCS8" #: src/hed/libs/credential/NSSUtil.cpp:1650 msgid "Failed to load private key" msgstr "Ðе удалоÑÑŒ загрузить закрытый ключ" #: src/hed/libs/credential/NSSUtil.cpp:1651 msgid "Succeeded to load PrivateKeyInfo" msgstr "УÑпешно подгружен PrivateKeyInfo" #: src/hed/libs/credential/NSSUtil.cpp:1654 msgid "Failed to convert PrivateKeyInfo to EVP_PKEY" msgstr "Сбой Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ PrivateKeyInfo в EVP_PKEY" #: src/hed/libs/credential/NSSUtil.cpp:1655 msgid "Succeeded to convert PrivateKeyInfo to EVP_PKEY" msgstr "УÑпешное преобразование PrivateKeyInfo в EVP_PKEY" #: src/hed/libs/credential/NSSUtil.cpp:1692 msgid "Failed to import private key" msgstr "Ðе удалоÑÑŒ получить закрытый ключ" #: src/hed/libs/credential/NSSUtil.cpp:1695 msgid "Succeeded to import private key" msgstr "Закрытый ключ уÑпешно получен" #: src/hed/libs/credential/NSSUtil.cpp:1708 #: src/hed/libs/credential/NSSUtil.cpp:1750 #: src/hed/libs/credential/NSSUtil.cpp:2889 msgid "Failed to authenticate to key database" msgstr "Сбой проверки подлинноÑти на базе данных ключей" #: src/hed/libs/credential/NSSUtil.cpp:1717 msgid "Succeeded to generate public/private key pair" msgstr "УÑпешное Ñоздание пары открытого/закрытого ключей" #: src/hed/libs/credential/NSSUtil.cpp:1719 msgid "Failed to generate public/private key pair" msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¿Ð°Ñ€Ñ‹ открытого/закрытого ключей" #: src/hed/libs/credential/NSSUtil.cpp:1724 msgid "Failed to export private key" msgstr "Ðе удалоÑÑŒ Ñохранить закрытый ключ" #: src/hed/libs/credential/NSSUtil.cpp:1791 msgid "Failed to create subject name" msgstr "Ðе удалоÑÑŒ Ñформировать Ð¸Ð¼Ñ Ñубъекта" #: src/hed/libs/credential/NSSUtil.cpp:1807 msgid "Failed to create certificate request" msgstr "Ðе удалоÑÑŒ Ñоздать Ð·Ð°Ð¿Ñ€Ð¾Ñ Ñертификата" #: src/hed/libs/credential/NSSUtil.cpp:1820 msgid "Failed to call PORT_NewArena" msgstr "Ðе удалоÑÑŒ вызвать PORT_NewArena" #: src/hed/libs/credential/NSSUtil.cpp:1828 msgid "Failed to encode the certificate request with DER format" msgstr "Сбой ÑˆÐ¸Ñ„Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа Ñертификата в формате DER" #: src/hed/libs/credential/NSSUtil.cpp:1835 msgid "Unknown key or hash type" msgstr "ÐеизвеÑтный ключ или тип хешированиÑ" #: src/hed/libs/credential/NSSUtil.cpp:1841 msgid "Failed to sign the certificate request" msgstr "Ðе удалоÑÑŒ подпиÑать Ð·Ð°Ð¿Ñ€Ð¾Ñ Ñертификата" #: src/hed/libs/credential/NSSUtil.cpp:1857 msgid "Failed to output the certificate request as ASCII format" msgstr "Сбой вывода запроÑа Ñертификата в формате ASCII" #: src/hed/libs/credential/NSSUtil.cpp:1866 msgid "Failed to output the certificate request as DER format" msgstr "Сбой вывода запроÑа Ñертификата в формате DER" #: src/hed/libs/credential/NSSUtil.cpp:1875 #, c-format msgid "Succeeded to output the certificate request into %s" msgstr "УÑпешный вывод запроÑа Ñертификата в %s" #: src/hed/libs/credential/NSSUtil.cpp:1914 #: src/hed/libs/credential/NSSUtil.cpp:1951 msgid "Failed to read data from input file" msgstr "Ðевозможно прочитать данные из входного файла" #: src/hed/libs/credential/NSSUtil.cpp:1930 msgid "Input is without trailer\n" msgstr "Входные данные не Ñодержат Ñтроки окончаниÑ\n" #: src/hed/libs/credential/NSSUtil.cpp:1941 msgid "Failed to convert ASCII to DER" msgstr "Ðе удалоÑÑŒ преобразовать ASCII в DER" #: src/hed/libs/credential/NSSUtil.cpp:1992 msgid "Certificate request is invalid" msgstr "ÐедопуÑтимый Ð·Ð°Ð¿Ñ€Ð¾Ñ Ñертификата" #: src/hed/libs/credential/NSSUtil.cpp:2212 #, c-format msgid "The policy language: %s is not supported" msgstr "Язык политик %s не поддерживаетÑÑ" #: src/hed/libs/credential/NSSUtil.cpp:2220 #: src/hed/libs/credential/NSSUtil.cpp:2245 #: src/hed/libs/credential/NSSUtil.cpp:2268 #: src/hed/libs/credential/NSSUtil.cpp:2290 msgid "Failed to new arena" msgstr "Сбой Ð²Ñ‹Ð´ÐµÐ»ÐµÐ½Ð¸Ñ Ð½Ð¾Ð²Ð¾Ð¹ облаÑти" #: src/hed/libs/credential/NSSUtil.cpp:2229 #: src/hed/libs/credential/NSSUtil.cpp:2254 msgid "Failed to create path length" msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´Ð»Ð¸Ð½Ñ‹ пути" #: src/hed/libs/credential/NSSUtil.cpp:2232 #: src/hed/libs/credential/NSSUtil.cpp:2257 #: src/hed/libs/credential/NSSUtil.cpp:2277 #: src/hed/libs/credential/NSSUtil.cpp:2299 msgid "Failed to create policy language" msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñзыка политик" #: src/hed/libs/credential/NSSUtil.cpp:2700 #, c-format msgid "Failed to parse certificate request from CSR file %s" msgstr "Сбой обработки запроÑа Ñертификата из файла CSR %s" #: src/hed/libs/credential/NSSUtil.cpp:2707 #, c-format msgid "Can not find certificate with name %s" msgstr "Ðе удалоÑÑŒ найти открытый ключ по имени: %s" #: src/hed/libs/credential/NSSUtil.cpp:2739 msgid "Can not allocate memory" msgstr "Ðе удалоÑÑŒ зарезервировать памÑть" #: src/hed/libs/credential/NSSUtil.cpp:2747 #, c-format msgid "Proxy subject: %s" msgstr "Ð˜Ð¼Ñ Ñубъекта доверенноÑти: %s" #: src/hed/libs/credential/NSSUtil.cpp:2764 msgid "Failed to start certificate extension" msgstr "Сбой начала ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ€Ð°ÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ñертификата" #: src/hed/libs/credential/NSSUtil.cpp:2769 msgid "Failed to add key usage extension" msgstr "Сбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñ€Ð°ÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ð¾Ð± иÑпользовании ключа" #: src/hed/libs/credential/NSSUtil.cpp:2774 msgid "Failed to add proxy certificate information extension" msgstr "Сбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñ€Ð°ÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ð¾Ð± информации Ñертификата доверенноÑти" #: src/hed/libs/credential/NSSUtil.cpp:2778 msgid "Failed to add voms AC extension" msgstr "Сбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñ€Ð°ÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ VOMS AC" #: src/hed/libs/credential/NSSUtil.cpp:2798 msgid "Failed to retrieve private key for issuer" msgstr "Сбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° закрытого ключа издателÑ" #: src/hed/libs/credential/NSSUtil.cpp:2805 msgid "Unknown key or hash type of issuer" msgstr "ÐеизвеÑтный ключ или тип Ñ…ÐµÑˆÐ¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¸Ð·Ð´Ð°Ñ‚ÐµÐ»Ñ Ñертификата" #: src/hed/libs/credential/NSSUtil.cpp:2811 msgid "Failed to set signature algorithm ID" msgstr "Сбой Ð·Ð°Ð´Ð°Ð½Ð¸Ñ ID алгоритма подпиÑи" #: src/hed/libs/credential/NSSUtil.cpp:2823 msgid "Failed to encode certificate" msgstr "Ошибка ÑˆÐ¸Ñ„Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñертификата" #: src/hed/libs/credential/NSSUtil.cpp:2829 msgid "Failed to allocate item for certificate data" msgstr "Ðе удалоÑÑŒ зарезервировать Ñлемент Ð´Ð»Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ… о Ñертификате" #: src/hed/libs/credential/NSSUtil.cpp:2835 msgid "Failed to sign encoded certificate data" msgstr "Сбой подпиÑи данных зашифрованного Ñертификата" #: src/hed/libs/credential/NSSUtil.cpp:2844 #, c-format msgid "Failed to open file %s" msgstr "Ðе удалоÑÑŒ открыть файл %s" #: src/hed/libs/credential/NSSUtil.cpp:2855 #, c-format msgid "Succeeded to output certificate to %s" msgstr "УÑпешный вывод Ñертификата в %s" #: src/hed/libs/credential/NSSUtil.cpp:2896 #, c-format msgid "Failed to open input certificate file %s" msgstr "Сбой Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° входного Ñертификата %s" #: src/hed/libs/credential/NSSUtil.cpp:2913 msgid "Failed to read input certificate file" msgstr "Сбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° входного Ñертификата" #: src/hed/libs/credential/NSSUtil.cpp:2918 msgid "Failed to get certificate from certificate file" msgstr "Сбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ñертификата из файла" #: src/hed/libs/credential/NSSUtil.cpp:2925 msgid "Failed to allocate certificate trust" msgstr "Сбой Ñ€ÐµÐ·ÐµÑ€Ð²Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ñ‹Ñ… отношений Ñертификата" #: src/hed/libs/credential/NSSUtil.cpp:2930 msgid "Failed to decode trust string" msgstr "Сбой раÑшифровки опиÑÐ°Ð½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ñ‹Ñ… отношений" #: src/hed/libs/credential/NSSUtil.cpp:2944 #: src/hed/libs/credential/NSSUtil.cpp:2961 msgid "Failed to add certificate to token or database" msgstr "Сбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñертификата к маркёру или базе данных" #: src/hed/libs/credential/NSSUtil.cpp:2947 #: src/hed/libs/credential/NSSUtil.cpp:2950 msgid "Succeeded to import certificate" msgstr "УÑпешное импортирование Ñертификата" #: src/hed/libs/credential/NSSUtil.cpp:2964 #: src/hed/libs/credential/NSSUtil.cpp:2967 #, c-format msgid "Succeeded to change trusts to: %s" msgstr "УÑÐ¿ÐµÑˆÐ½Ð°Ñ Ñмена доверительных отношений на: %s" #: src/hed/libs/credential/NSSUtil.cpp:2994 #, c-format msgid "Failed to import private key from file: %s" msgstr "Сбой Ð¸Ð¼Ð¿Ð¾Ñ€Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ð¾Ð³Ð¾ ключа из файла: %s" #: src/hed/libs/credential/NSSUtil.cpp:2996 #, c-format msgid "Failed to import certificate from file: %s" msgstr "Сбой Ð¸Ð¼Ð¿Ð¾Ñ€Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñертификата из файла: %s" #: src/hed/libs/credential/VOMSConfig.cpp:142 #, c-format msgid "" "ERROR: VOMS configuration line contains too many tokens. Expecting 5 or 6. " "Line was: %s" msgstr "" "ERROR: Ñтрока наÑтройки VOMS Ñодержит избыточное чиÑло Ñлементов. ОжидаетÑÑ " "5 или 6. Строка: %s" #: src/hed/libs/credential/VOMSConfig.cpp:158 #, c-format msgid "" "ERROR: file tree is too deep while scanning VOMS configuration. Max allowed " "nesting is %i." msgstr "" "ERROR: каталог Ñодержит Ñлишком много уровней Ð´Ð»Ñ ÑÐºÐ°Ð½Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð½Ð°Ñтроек " "VOMS. МакÑимально допуÑтимое чиÑло уровней: %i." #: src/hed/libs/credential/VOMSConfig.cpp:176 #, c-format msgid "ERROR: failed to read file %s while scanning VOMS configuration." msgstr "ERROR: Ñбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° %s при Ñканировании наÑтроек VOMS." #: src/hed/libs/credential/VOMSConfig.cpp:181 #, c-format msgid "" "ERROR: VOMS configuration file %s contains too many lines. Max supported " "number is %i." msgstr "" "ERROR: файл наÑтроек VOMS %s Ñодержит Ñлишком много Ñтрок. МакÑимально " "допуÑтимое количеÑтво: %i." #: src/hed/libs/credential/VOMSConfig.cpp:188 #, c-format msgid "" "ERROR: VOMS configuration file %s contains too long line(s). Max supported " "length is %i characters." msgstr "" "ERROR: файл наÑтроек VOMS %s Ñодержит Ñлишком длинную Ñтроку. МакÑимально " "допуÑÑ‚Ð¸Ð¼Ð°Ñ Ð´Ð»Ð¸Ð½Ð°: %i знаков." #: src/hed/libs/credential/VOMSUtil.cpp:171 #, c-format msgid "Failed to create OpenSSL object %s %s - %u %s" msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¾Ð±ÑŠÐµÐºÑ‚Ð° OpenSSL %s %s - %u %s" #: src/hed/libs/credential/VOMSUtil.cpp:179 #, c-format msgid "Failed to obtain OpenSSL identifier for %s" msgstr "Сбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð´ÐµÐ½Ñ‚Ð¸Ñ„Ð¸ÐºÐ°Ñ‚Ð¾Ñ€Ð° OpenSSL Ð´Ð»Ñ %s" #: src/hed/libs/credential/VOMSUtil.cpp:332 #, c-format msgid "VOMS: create FQAN: %s" msgstr "VOMS: ÑоÑтавление FQAN: %s" #: src/hed/libs/credential/VOMSUtil.cpp:370 #, c-format msgid "VOMS: create attribute: %s" msgstr "VOMS: Ñозадние атрибута: %s" #: src/hed/libs/credential/VOMSUtil.cpp:651 msgid "VOMS: Can not allocate memory for parsing AC" msgstr "VOMS: Ðе удалоÑÑŒ зарезервировать памÑть Ð´Ð»Ñ Ñ€Ð°Ð·Ð±Ð¾Ñ€Ð° AC" #: src/hed/libs/credential/VOMSUtil.cpp:659 msgid "VOMS: Can not allocate memory for storing the order of AC" msgstr "" "VOMS: Ðе удалоÑÑŒ зарезервировать памÑть Ð´Ð»Ñ Ñ…Ñ€Ð°Ð½ÐµÐ½Ð¸Ñ Ð¿Ð¾ÑледовательноÑти AC" #: src/hed/libs/credential/VOMSUtil.cpp:685 msgid "VOMS: Can not parse AC" msgstr "VOMS: Ðе удалоÑÑŒ обработать AC" #: src/hed/libs/credential/VOMSUtil.cpp:715 msgid "VOMS: CA directory or CA file must be provided" msgstr "VOMS: Ðеобходимо задать каталог или файл Ñертификационного агентÑтва" #: src/hed/libs/credential/VOMSUtil.cpp:779 msgid "VOMS: failed to verify AC signature" msgstr "VOMS: не удалоÑÑŒ подтвердить подпиÑÑŒ Ñертификата атрибута" #: src/hed/libs/credential/VOMSUtil.cpp:848 #, c-format msgid "VOMS: trust chain to check: %s " msgstr "VOMS: подтверждаетÑÑ Ñ†ÐµÐ¿Ð¾Ñ‡ÐºÐ° Ñертификатов: %s " #: src/hed/libs/credential/VOMSUtil.cpp:856 #, c-format msgid "" "VOMS: the DN in certificate: %s does not match that in trusted DN list: %s" msgstr "" "VOMS: Отличительное Ð¸Ð¼Ñ (DN) в Ñертификате %s не ÑоответÑтвует таковому в " "доверÑемом ÑпиÑке: %s" #: src/hed/libs/credential/VOMSUtil.cpp:862 #, c-format msgid "" "VOMS: the Issuer identity in certificate: %s does not match that in trusted " "DN list: %s" msgstr "" "VOMS: Отличительный признак агентÑтва, выдавшего Ñертификат %s, не " "ÑоответÑтвует таковому в доверÑемом ÑпиÑке: %s" #: src/hed/libs/credential/VOMSUtil.cpp:897 #, c-format msgid "VOMS: The lsc file %s does not exist" msgstr "VOMS: Файл lsc %s не ÑущеÑтвует" #: src/hed/libs/credential/VOMSUtil.cpp:903 #, c-format msgid "VOMS: The lsc file %s can not be open" msgstr "VOMS: Файл lsc %s не может быть открыт" #: src/hed/libs/credential/VOMSUtil.cpp:951 msgid "" "VOMS: there is no constraints of trusted voms DNs, the certificates stack in " "AC will not be checked." msgstr "" "VOMS: отÑутÑтвуют Ð¾Ð³Ñ€Ð°Ð½Ð¸Ñ‡ÐµÐ½Ð¸Ñ Ð¿Ð¾ отличительным признакам доверÑемых VOMS, " "цепочка Ñертификатов в Ñертификате атрибута (AC) не будет проверена." #: src/hed/libs/credential/VOMSUtil.cpp:984 msgid "VOMS: unable to match certificate chain against VOMS trusted DNs" msgstr "" "VOMS: невозможно найти цепочку Ñертификатов, ÑоответÑтвующую доверÑемым " "отличительным признакам VOMS" #: src/hed/libs/credential/VOMSUtil.cpp:1004 msgid "VOMS: AC signature verification failed" msgstr "VOMS: Ñбой Ð¿Ð¾Ð´Ñ‚Ð²ÐµÑ€Ð¶Ð´ÐµÐ½Ð¸Ñ Ð¿Ð¾Ð´Ð¿Ð¸Ñи Ñертификата атрибута" #: src/hed/libs/credential/VOMSUtil.cpp:1013 msgid "VOMS: unable to verify certificate chain" msgstr "VOMS: невозможно подтвердить цепочку Ñертификатов" #: src/hed/libs/credential/VOMSUtil.cpp:1019 #, c-format msgid "VOMS: cannot validate AC issuer for VO %s" msgstr "" "VOMS: невозможно удоÑтоверить лицо, выдавшее Ñертификат атрибута Ð´Ð»Ñ " "виртуальной организации %s" #: src/hed/libs/credential/VOMSUtil.cpp:1042 #, c-format msgid "VOMS: directory for trusted service certificates: %s" msgstr "VOMS: директориÑ, ÑÐ¾Ð´ÐµÑ€Ð¶Ð°Ñ‰Ð°Ñ Ñертификаты доверÑемых Ñлужб: %s" #: src/hed/libs/credential/VOMSUtil.cpp:1068 #, c-format msgid "VOMS: Cannot find certificate of AC issuer for VO %s" msgstr "" "VOMS: невозможно найти Ñертификат лица, выдавшего Ñертификат атрибута Ð´Ð»Ñ " "виртуальной организации %s" #: src/hed/libs/credential/VOMSUtil.cpp:1090 msgid "VOMS: Can not find AC_ATTR with IETFATTR type" msgstr "VOMS: Ðевозможно найти AC_ATTR типа IETFATTR" #: src/hed/libs/credential/VOMSUtil.cpp:1097 msgid "VOMS: case of multiple IETFATTR attributes not supported" msgstr "VOMS: иÑпользование множеÑтвенных атрибутов IETFATTR не поддерживаетÑÑ" #: src/hed/libs/credential/VOMSUtil.cpp:1107 msgid "VOMS: case of multiple policyAuthority not supported" msgstr "" "VOMS: иÑпользование множеÑтвенных атрибутов policyAuthority не поддерживаетÑÑ" #: src/hed/libs/credential/VOMSUtil.cpp:1123 msgid "VOMS: the format of policyAuthority is unsupported - expecting URI" msgstr "VOMS: недопуÑтимый формат атрибута policyAuthority - ожидаетÑÑ URI" #: src/hed/libs/credential/VOMSUtil.cpp:1132 msgid "" "VOMS: the format of IETFATTRVAL is not supported - expecting OCTET STRING" msgstr "VOMS: недопуÑтимый формат IETFATTRVAL - ожидаетÑÑ OCTET STRING" #: src/hed/libs/credential/VOMSUtil.cpp:1189 msgid "VOMS: the grantor attribute is empty" msgstr "VOMS: атрибут grantor пуÑÑ‚" #: src/hed/libs/credential/VOMSUtil.cpp:1207 msgid "VOMS: the attribute name is empty" msgstr "VOMS: отÑутÑтвует Ð¸Ð¼Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð°" #: src/hed/libs/credential/VOMSUtil.cpp:1213 #, c-format msgid "VOMS: the attribute value for %s is empty" msgstr "VOMS: отÑутÑвует значение атрибута Ð´Ð»Ñ %s" #: src/hed/libs/credential/VOMSUtil.cpp:1218 msgid "VOMS: the attribute qualifier is empty" msgstr "VOMS: атрибут qualifier пуÑÑ‚" #: src/hed/libs/credential/VOMSUtil.cpp:1250 msgid "" "VOMS: both idcenoRevAvail and authorityKeyIdentifier certificate extensions " "must be present" msgstr "" "VOMS: должны приÑутÑтвовать оба раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ñертификата, idcenoRevAvail и " "authorityKeyIdentifier" #: src/hed/libs/credential/VOMSUtil.cpp:1284 #, c-format msgid "VOMS: FQDN of this host %s does not match any target in AC" msgstr "" "VOMS: FQDN узла %s не ÑоответÑтвует ни одному из назначений в Ñертификате " "атрибута (AC)" #: src/hed/libs/credential/VOMSUtil.cpp:1289 msgid "VOMS: the only supported critical extension of the AC is idceTargets" msgstr "" "VOMS: единÑтвенным поддерживаемым критичеÑким раÑширением атрибута " "Ñертификата (AC) ÑвлÑетÑÑ idceTargets" #: src/hed/libs/credential/VOMSUtil.cpp:1304 msgid "VOMS: failed to parse attributes from AC" msgstr "VOMS: Ñбой при разборе атрибутов в Ñертификате атрибута (AC)" #: src/hed/libs/credential/VOMSUtil.cpp:1348 msgid "VOMS: authorityKey is wrong" msgstr "VOMS: неверный authorityKey" #: src/hed/libs/credential/VOMSUtil.cpp:1376 msgid "VOMS: missing AC parts" msgstr "VOMS: отÑутÑтвуют чаÑти AC" #: src/hed/libs/credential/VOMSUtil.cpp:1393 msgid "VOMS: unsupported time format in AC - expecting GENERALIZED TIME" msgstr "" "VOMS: неверный формат времени в Ñертификате атрибута (AC) - ожидаетÑÑ " "GENERALIZED TIME" #: src/hed/libs/credential/VOMSUtil.cpp:1399 msgid "VOMS: AC is not yet valid" msgstr "VOMS: Ñертификат атрибута ещё не дейÑтвителен" #: src/hed/libs/credential/VOMSUtil.cpp:1406 msgid "VOMS: AC has expired" msgstr "VOMS: Ñрок годноÑти AC вышел" #: src/hed/libs/credential/VOMSUtil.cpp:1421 msgid "VOMS: AC is not complete - missing Serial or Issuer information" msgstr "" "VOMS: Сертификат атрибута (AC) неполон - отÑутÑтвует Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾Ð± атрибутах " "Serial и/или Issuer" #: src/hed/libs/credential/VOMSUtil.cpp:1426 #, c-format msgid "VOMS: the holder serial number is: %lx" msgstr "VOMS: Ñерийный номер владельца: %lx" #: src/hed/libs/credential/VOMSUtil.cpp:1427 #, c-format msgid "VOMS: the serial number in AC is: %lx" msgstr "VOMS: Ñерийный номер в Ñертификате атрибута (AC): %lx" #: src/hed/libs/credential/VOMSUtil.cpp:1430 #, c-format msgid "" "VOMS: the holder serial number %lx is not the same as the serial number in " "AC %lx, the holder certificate that is used to create a voms proxy could be " "a proxy certificate with a different serial number as the original EEC cert" msgstr "" "VOMS: Ñерийный номер владельца %lx не Ñовпадает Ñ Ñ‚Ð°ÐºÐ¾Ð²Ñ‹Ð¼ в Ñертификате " "атрибута (AC) %lx; Ñертификат, иÑпользуемый Ð´Ð»Ñ ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти VOMS, " "может быть доверенноÑтью Ñ Ñерийным номером, отличным от изначального " "Ñертификата" #: src/hed/libs/credential/VOMSUtil.cpp:1439 msgid "VOMS: the holder information in AC is wrong" msgstr "VOMS: Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ владельце в Ñертификате атрибута (AC)" #: src/hed/libs/credential/VOMSUtil.cpp:1461 #, c-format msgid "VOMS: DN of holder in AC: %s" msgstr "VOMS: DN владельца в Ñертификате атрибута (AC): %s" #: src/hed/libs/credential/VOMSUtil.cpp:1462 #, c-format msgid "VOMS: DN of holder: %s" msgstr "VOMS: DN владельца: %s" #: src/hed/libs/credential/VOMSUtil.cpp:1463 #, c-format msgid "VOMS: DN of issuer: %s" msgstr "VOMS: DN Ñмитента: %s" #: src/hed/libs/credential/VOMSUtil.cpp:1470 msgid "" "VOMS: the holder name in AC is not related to the distinguished name in " "holder certificate" msgstr "" "VOMS: Ð¸Ð¼Ñ Ð²Ð»Ð°Ð´ÐµÐ»ÑŒÑ†Ð° в Ñертификате атрибута (AC) не имеет Ð¾Ñ‚Ð½Ð¾ÑˆÐµÐ½Ð¸Ñ Ðº " "отличительному имени в Ñертификате владельца" #: src/hed/libs/credential/VOMSUtil.cpp:1482 #: src/hed/libs/credential/VOMSUtil.cpp:1489 msgid "VOMS: the holder issuerUID is not the same as that in AC" msgstr "" "VOMS: атрибут issuerUID в Ñертификате владельца не Ñовпадает Ñ Ñ‚Ð°ÐºÐ¾Ð²Ñ‹Ð¼ в " "Ñертификате атрибута (AC)" #: src/hed/libs/credential/VOMSUtil.cpp:1502 msgid "VOMS: the holder issuer name is not the same as that in AC" msgstr "" "VOMS: Ð¸Ð¼Ñ Ð°Ð³ÐµÐ½Ñ‚Ñтва, выдавшего Ñертификат, не Ñовпадает Ñ Ñ‚Ð°ÐºÐ¾Ð²Ñ‹Ð¼ в " "Ñертификате атрибута (AC)" #: src/hed/libs/credential/VOMSUtil.cpp:1512 msgid "VOMS: the issuer information in AC is wrong" msgstr "" "VOMS: Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾Ð± агентÑтве, выдавшем Ñертификат, в Ñертификате " "атрибута (AC)" #: src/hed/libs/credential/VOMSUtil.cpp:1520 #, c-format msgid "VOMS: the issuer name %s is not the same as that in AC - %s" msgstr "" "VOMS: Ð¸Ð¼Ñ Ð°Ð³ÐµÐ½Ñ‚Ñтва, выдавшего Ñертификат - %s - не Ñовпадает Ñ Ñ‚Ð°ÐºÐ¾Ð²Ñ‹Ð¼ в " "Ñертификате атрибута (AC) - %s" #: src/hed/libs/credential/VOMSUtil.cpp:1528 msgid "" "VOMS: the serial number of AC INFO is too long - expecting no more than 20 " "octets" msgstr "" "VOMS: Ñлишком длинный Ñерийный номер AC INFO - ожидаетÑÑ Ð½Ðµ более 20-и " "октетов" #: src/hed/libs/credential/VOMSUtil.cpp:1558 #: src/hed/libs/credential/VOMSUtil.cpp:1566 #: src/hed/libs/credential/VOMSUtil.cpp:1574 #: src/hed/libs/credential/VOMSUtil.cpp:1582 #: src/hed/libs/credential/VOMSUtil.cpp:1605 msgid "VOMS: unable to extract VO name from AC" msgstr "" "VOMS: невозможно извлечь название виртуальной организации из Ñертификата " "атрибута (AC)" #: src/hed/libs/credential/VOMSUtil.cpp:1596 #, c-format msgid "VOMS: unable to determine hostname of AC from VO name: %s" msgstr "" "VOMS: невозможно определить название узла Ñертификата атрибута (AC) из " "Ð½Ð°Ð·Ð²Ð°Ð½Ð¸Ñ Ð²Ð¸Ñ€Ñ‚ÑƒÐ°Ð»ÑŒÐ½Ð¾Ð¹ организации: %s" #: src/hed/libs/credential/VOMSUtil.cpp:1615 msgid "VOMS: can not verify the signature of the AC" msgstr "VOMS: не удалоÑÑŒ подтвердить подпиÑÑŒ Ñертификата атрибута" #: src/hed/libs/credential/VOMSUtil.cpp:1621 msgid "VOMS: problems while parsing information in AC" msgstr "VOMS: проблемы при разборке информации в AC" #: src/hed/libs/credential/test/VOMSUtilTest.cpp:128 #, c-format msgid "Line %d.%d of the attributes returned: %s" msgstr "Строка %d.%d атрибутов выдала: %s" #: src/hed/libs/credentialstore/ClientVOMS.cpp:149 msgid "voms" msgstr "VOMS" #: src/hed/libs/credentialstore/CredentialStore.cpp:194 #: src/hed/libs/credentialstore/CredentialStore.cpp:245 #: src/hed/libs/credentialstore/CredentialStore.cpp:273 #: src/hed/libs/credentialstore/CredentialStore.cpp:336 #: src/hed/libs/credentialstore/CredentialStore.cpp:376 #: src/hed/libs/credentialstore/CredentialStore.cpp:406 #, c-format msgid "MyProxy failure: %s" msgstr "Сбой MyProxy: %s" #: src/hed/libs/crypto/OpenSSL.cpp:71 #, c-format msgid "SSL error: %d - %s:%s:%s" msgstr "Ошибка SSL: %d - %s:%s:%s" #: src/hed/libs/crypto/OpenSSL.cpp:84 msgid "SSL locks not initialized" msgstr "Блокировка SSL не инициализирована" #: src/hed/libs/crypto/OpenSSL.cpp:88 #, c-format msgid "wrong SSL lock requested: %i of %i: %i - %s" msgstr "Запрошена Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð±Ð»Ð¾ÐºÐ¸Ñ€Ð¾Ð²ÐºÐ° SSL: %i из %i: %i - %s" #: src/hed/libs/crypto/OpenSSL.cpp:111 msgid "Failed to lock arccrypto library in memory" msgstr "Ðевозможно заблокировать библиотеку arccrypto в памÑти" #: src/hed/libs/crypto/OpenSSL.cpp:116 src/hed/libs/crypto/OpenSSL.cpp:130 msgid "Failed to initialize OpenSSL library" msgstr "Ошибка инициализации библиотеки OpenSSL" #: src/hed/libs/crypto/OpenSSL.cpp:152 msgid "Number of OpenSSL locks changed - reinitializing" msgstr "ИзменилоÑÑŒ чиÑло блокировок OpenSSL - Ð¿Ð¾Ð²Ñ‚Ð¾Ñ€Ð½Ð°Ñ Ð¸Ð½Ð¸Ñ†Ð¸Ð°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ" #: src/hed/libs/data/DataExternalHelper.cpp:157 msgid "failed to read data tag" msgstr "Ñбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¼ÐµÑ‚ÐºÐ¸ данных" #: src/hed/libs/data/DataExternalHelper.cpp:161 msgid "waiting for data chunk" msgstr "ожидание куÑка данных" #: src/hed/libs/data/DataExternalHelper.cpp:163 msgid "failed to read data chunk" msgstr "Ñбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ ÐºÑƒÑка данных" #: src/hed/libs/data/DataExternalHelper.cpp:171 #, c-format msgid "data chunk: %llu %llu" msgstr "куÑок данных: %llu %llu" #: src/hed/libs/data/DataExternalHelper.cpp:242 #, c-format msgid "DataMove::Transfer: using supplied checksum %s" msgstr "DataMove::Transfer: иÑпользуетÑÑ Ð·Ð°Ð´Ð°Ð½Ð½Ð°Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма %s" #: src/hed/libs/data/DataExternalHelper.cpp:361 msgid "Expecting Module, Command and URL provided" msgstr "ОжидаетÑÑ ÑƒÐºÐ°Ð·Ð°Ð½Ð¸Ðµ модулÑ, команды и URL" #: src/hed/libs/data/DataExternalHelper.cpp:368 msgid "Expecting Command module path among arguments" msgstr "Одним из аргументов должен быть путь к Command module" #: src/hed/libs/data/DataExternalHelper.cpp:372 msgid "Expecting Command module name among arguments" msgstr "Одним из аргументов должно быть название Command module" #: src/hed/libs/data/DataMover.cpp:115 msgid "No locations found - probably no more physical instances" msgstr "Ðе найдено раÑположений - возможно, копий больше нет" #: src/hed/libs/data/DataMover.cpp:121 src/hed/libs/data/FileCache.cpp:552 #: src/libs/data-staging/Processor.cpp:443 #: src/libs/data-staging/Processor.cpp:457 #, c-format msgid "Removing %s" msgstr "УдалÑетÑÑ %s" #: src/hed/libs/data/DataMover.cpp:134 msgid "This instance was already deleted" msgstr "Эта ÐºÐ¾Ð¿Ð¸Ñ ÑƒÐ¶Ðµ удалена" #: src/hed/libs/data/DataMover.cpp:140 msgid "Failed to delete physical file" msgstr "Сбой при удалении физичеÑкого файла" #: src/hed/libs/data/DataMover.cpp:151 #, c-format msgid "Removing metadata in %s" msgstr "УдалÑÑŽÑ‚ÑÑ Ð¼ÐµÑ‚Ð°Ð´Ð°Ð½Ð½Ñ‹Ðµ в %s" #: src/hed/libs/data/DataMover.cpp:155 msgid "Failed to delete meta-information" msgstr "Сбой при удалении мета-информации" #: src/hed/libs/data/DataMover.cpp:169 msgid "Failed to remove all physical instances" msgstr "Сбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð²Ñех фактичеÑких копий" #: src/hed/libs/data/DataMover.cpp:173 #, c-format msgid "Removing logical file from metadata %s" msgstr "УдалÑетÑÑ Ð»Ð¾Ð³Ð¸Ñ‡ÐµÑкий файл из метаданных %s" #: src/hed/libs/data/DataMover.cpp:176 msgid "Failed to delete logical file" msgstr "Сбой при удалении логичеÑкого файла" #: src/hed/libs/data/DataMover.cpp:183 msgid "Failed to remove instance" msgstr "Ðе удалоÑÑŒ удалить копию" #: src/hed/libs/data/DataMover.cpp:232 msgid "DataMover::Transfer : starting new thread" msgstr "DataMover::Transfer : запуÑк нового потока" #: src/hed/libs/data/DataMover.cpp:260 #, c-format msgid "Transfer from %s to %s" msgstr "Передача из %s в %s" #: src/hed/libs/data/DataMover.cpp:262 msgid "Not valid source" msgstr "ÐедейÑтвительный иÑточник" #: src/hed/libs/data/DataMover.cpp:267 msgid "Not valid destination" msgstr "Цель недейÑтвительна" #: src/hed/libs/data/DataMover.cpp:287 src/services/candypond/CandyPond.cpp:304 #, c-format msgid "Couldn't handle certificate: %s" msgstr "Ðе удалоÑÑŒ иÑпользовать Ñертификат: %s" #: src/hed/libs/data/DataMover.cpp:296 src/hed/libs/data/DataMover.cpp:590 #: src/libs/data-staging/Processor.cpp:133 #, c-format msgid "File %s is cached (%s) - checking permissions" msgstr "Файл %s приÑутÑтвует в кÑше (%s) - проверÑетÑÑ Ð´Ð¾Ð¿ÑƒÑк" #: src/hed/libs/data/DataMover.cpp:300 src/hed/libs/data/DataMover.cpp:609 #: src/hed/libs/data/DataMover.cpp:667 src/libs/data-staging/Processor.cpp:152 msgid "Permission checking passed" msgstr "Проверка допуÑка пройдена" #: src/hed/libs/data/DataMover.cpp:301 src/hed/libs/data/DataMover.cpp:628 #: src/hed/libs/data/DataMover.cpp:1144 msgid "Linking/copying cached file" msgstr "Подцепление/копирование файла из кÑша" #: src/hed/libs/data/DataMover.cpp:325 #, c-format msgid "No locations for source found: %s" msgstr "Ðе найдено раÑположений Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° иÑточника: %s" #: src/hed/libs/data/DataMover.cpp:329 #, c-format msgid "Failed to resolve source: %s" msgstr "Ðе удалоÑÑŒ определить иÑточник: %s" #: src/hed/libs/data/DataMover.cpp:341 src/hed/libs/data/DataMover.cpp:409 #, c-format msgid "No locations for destination found: %s" msgstr "Ðе найдено физичеÑких адреÑов Ð´Ð»Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ: %s" #: src/hed/libs/data/DataMover.cpp:346 src/hed/libs/data/DataMover.cpp:413 #, c-format msgid "Failed to resolve destination: %s" msgstr "Ðе удалоÑÑŒ определить назначение: %s" #: src/hed/libs/data/DataMover.cpp:361 #, c-format msgid "No locations for destination different from source found: %s" msgstr "Ðе найдено раÑположений Ð´Ð»Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ, отличающихÑÑ Ð¾Ñ‚ иÑточника: %s" #: src/hed/libs/data/DataMover.cpp:382 #, c-format msgid "DataMover::Transfer: trying to destroy/overwrite destination: %s" msgstr "DataMover::Transfer: попытка Ñтереть/перезапиÑать назначение: %s" #: src/hed/libs/data/DataMover.cpp:393 #, c-format msgid "Failed to delete %s but will still try to copy" msgstr "Сбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ %s, вÑÑ‘ равно попытаемÑÑ Ñкопировать" #: src/hed/libs/data/DataMover.cpp:396 #, c-format msgid "Failed to delete %s" msgstr "Сбой при удалении %s" #: src/hed/libs/data/DataMover.cpp:423 #, c-format msgid "Deleted but still have locations at %s" msgstr "Удалён, но оÑталиÑÑŒ копии в %s" #: src/hed/libs/data/DataMover.cpp:435 msgid "DataMover: cycle" msgstr "DataMover: цикл" #: src/hed/libs/data/DataMover.cpp:437 msgid "DataMover: no retries requested - exit" msgstr "DataMover: не запрошено повторных попыток, выход" #: src/hed/libs/data/DataMover.cpp:442 msgid "DataMover: source out of tries - exit" msgstr "DataMover: закончилиÑÑŒ попытки поиÑка иÑточника - завершение" #: src/hed/libs/data/DataMover.cpp:444 msgid "DataMover: destination out of tries - exit" msgstr "DataMover: закончилиÑÑŒ попытки поиÑка назначений - завершение" #: src/hed/libs/data/DataMover.cpp:452 #, c-format msgid "Real transfer from %s to %s" msgstr "ФактичеÑÐºÐ°Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð° из %s в %s" #: src/hed/libs/data/DataMover.cpp:478 #, c-format msgid "Creating buffer: %lli x %i" msgstr "СоздаётÑÑ Ð±ÑƒÑ„ÐµÑ€: %lli x %i" #: src/hed/libs/data/DataMover.cpp:494 #, c-format msgid "DataMove::Transfer: no checksum calculation for %s" msgstr "DataMove::Transfer: ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма Ð´Ð»Ñ %s не будет вычиÑлена" #: src/hed/libs/data/DataMover.cpp:499 #, c-format msgid "DataMove::Transfer: using supplied checksum %s:%s" msgstr "DataMove::Transfer: иÑпользуетÑÑ Ð·Ð°Ð´Ð°Ð½Ð½Ð°Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма %s:%s" #: src/hed/libs/data/DataMover.cpp:523 #, c-format msgid "DataMove::Transfer: will calculate %s checksum" msgstr "DataMove::Transfer: будет вычиÑлена ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма Ð´Ð»Ñ %s" #: src/hed/libs/data/DataMover.cpp:528 msgid "Buffer creation failed !" msgstr "Ðевозможно Ñоздать буфер!" #: src/hed/libs/data/DataMover.cpp:551 #, c-format msgid "URL is mapped to: %s" msgstr "URL поÑтавлен в ÑоответÑтвие к: %s" #: src/hed/libs/data/DataMover.cpp:579 src/hed/libs/data/DataMover.cpp:637 #: src/libs/data-staging/Processor.cpp:88 msgid "Cached file is locked - should retry" msgstr "Файл в кÑше заблокирован - попытаемÑÑ Ð·Ð°Ð½Ð¾Ð²Ð¾" #: src/hed/libs/data/DataMover.cpp:584 src/libs/data-staging/Processor.cpp:106 msgid "Failed to initiate cache" msgstr "Сбой при инициализации кÑша" #: src/hed/libs/data/DataMover.cpp:601 src/services/candypond/CandyPond.cpp:379 #, c-format msgid "Permission checking failed: %s" msgstr "Проверка прав доÑтупа не удалаÑÑŒ: %s" #: src/hed/libs/data/DataMover.cpp:603 src/hed/libs/data/DataMover.cpp:661 #: src/hed/libs/data/DataMover.cpp:681 src/hed/libs/data/DataMover.cpp:692 msgid "source.next_location" msgstr "source.next_location" #: src/hed/libs/data/DataMover.cpp:617 src/libs/data-staging/Processor.cpp:157 #, c-format msgid "Source modification date: %s" msgstr "Дата Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ Ð¸Ñточника: %s" #: src/hed/libs/data/DataMover.cpp:618 src/libs/data-staging/Processor.cpp:158 #, c-format msgid "Cache creation date: %s" msgstr "Дата ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÐºÑша: %s" #: src/hed/libs/data/DataMover.cpp:624 src/libs/data-staging/Processor.cpp:163 msgid "Cached file is outdated, will re-download" msgstr "Файл в кÑше уÑтарел, будет загружен заново" #: src/hed/libs/data/DataMover.cpp:627 src/libs/data-staging/Processor.cpp:168 msgid "Cached copy is still valid" msgstr "ÐšÐ¾Ð¿Ð¸Ñ Ð² кÑше ещё дейÑтвительна" #: src/hed/libs/data/DataMover.cpp:654 msgid "URL is mapped to local access - checking permissions on original URL" msgstr "" "URL ÑопоÑтавлен локальному файлу - проверка прав доÑтупа к иÑходному URL" #: src/hed/libs/data/DataMover.cpp:658 #, c-format msgid "Permission checking on original URL failed: %s" msgstr "Сбой проверки прав доÑтупа к иÑходному URL: %s" #: src/hed/libs/data/DataMover.cpp:669 msgid "Linking local file" msgstr "ПодцеплÑетÑÑ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ñ‹Ð¹ файл" #: src/hed/libs/data/DataMover.cpp:689 #, c-format msgid "Failed to make symbolic link %s to %s : %s" msgstr "Сбой при Ñоздании ÑимволичеÑкой ÑÑылки %s на %s : %s" #: src/hed/libs/data/DataMover.cpp:698 #, c-format msgid "Failed to change owner of symbolic link %s to %i" msgstr "Ðевозможно заменить владельца Ñимвольной ÑÑылки %s на %i" #: src/hed/libs/data/DataMover.cpp:709 #, c-format msgid "cache file: %s" msgstr "кÑш-файл: %s" #: src/hed/libs/data/DataMover.cpp:735 #, c-format msgid "Failed to stat source %s" msgstr "Сбой проверки ÑтатуÑа иÑточника %s" #: src/hed/libs/data/DataMover.cpp:737 src/hed/libs/data/DataMover.cpp:750 #: src/hed/libs/data/DataMover.cpp:781 src/hed/libs/data/DataMover.cpp:800 #: src/hed/libs/data/DataMover.cpp:822 src/hed/libs/data/DataMover.cpp:839 #: src/hed/libs/data/DataMover.cpp:996 src/hed/libs/data/DataMover.cpp:1028 #: src/hed/libs/data/DataMover.cpp:1038 src/hed/libs/data/DataMover.cpp:1111 msgid "(Re)Trying next source" msgstr "Следующий иÑточник" #: src/hed/libs/data/DataMover.cpp:748 #, c-format msgid "Meta info of source and location do not match for %s" msgstr "Мета-Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¸Ñточника и Ð°Ð´Ñ€ÐµÑ Ð½Ðµ ÑоответÑтвуют друг другу Ð´Ð»Ñ %s" #: src/hed/libs/data/DataMover.cpp:760 #, c-format msgid "" "Replica %s has high latency, but no more sources exist so will use this one" msgstr "" "ÐšÐ¾Ð¿Ð¸Ñ %s доÑтупна Ñ Ð±Ð¾Ð»ÑŒÑˆÐ¾Ð¹ задержкой, но вÑÑ‘ равно будет иÑпользоватьÑÑ Ð² " "ÑвÑзи Ñ Ð¾Ñ‚ÑутÑтвием других иÑточников" #: src/hed/libs/data/DataMover.cpp:764 #, c-format msgid "Replica %s has high latency, trying next source" msgstr "ÐšÐ¾Ð¿Ð¸Ñ %s доÑтупна Ñ Ð±Ð¾Ð»ÑŒÑˆÐ¾Ð¹ задержкой, пробуетÑÑ Ð´Ñ€ÑƒÐ³Ð¾Ð¹ иÑточник" #: src/hed/libs/data/DataMover.cpp:776 src/hed/libs/data/DataMover.cpp:796 #: src/libs/data-staging/DataStagingDelivery.cpp:344 #: src/libs/data-staging/DataStagingDelivery.cpp:367 #, c-format msgid "Using internal transfer method of %s" msgstr "ИÑпользуетÑÑ Ð²Ð½ÑƒÑ‚Ñ€ÐµÐ½Ð½Ð¸Ð¹ метод передачи данных %s" #: src/hed/libs/data/DataMover.cpp:788 src/hed/libs/data/DataMover.cpp:805 #: src/libs/data-staging/DataStagingDelivery.cpp:360 #: src/libs/data-staging/DataStagingDelivery.cpp:381 #, c-format msgid "Internal transfer method is not supported for %s" msgstr "Внутренний метод передачи данных не поддерживаетÑÑ Ð´Ð»Ñ %s" #: src/hed/libs/data/DataMover.cpp:812 msgid "Using buffered transfer method" msgstr "ИÑпользуетÑÑ Ð±ÑƒÑ„ÐµÑ€Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð½Ñ‹Ð¹ метод передачи данных" #: src/hed/libs/data/DataMover.cpp:816 #, c-format msgid "Failed to prepare source: %s" msgstr "Ðе удалоÑÑŒ подготовить иÑточник: %s" #: src/hed/libs/data/DataMover.cpp:830 #, c-format msgid "Failed to start reading from source: %s" msgstr "Ðе удалоÑÑŒ начать чтение из иÑточника: %s" #: src/hed/libs/data/DataMover.cpp:849 msgid "Metadata of source and destination are different" msgstr "Метаданные иÑточника и Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð½Ðµ Ñовпадают" #: src/hed/libs/data/DataMover.cpp:868 #, c-format msgid "Failed to preregister destination: %s" msgstr "Ðе удалоÑÑŒ предварительно зарегиÑтрировать назначение: %s" #: src/hed/libs/data/DataMover.cpp:873 src/hed/libs/data/DataMover.cpp:1135 msgid "destination.next_location" msgstr "destination.next_location" #: src/hed/libs/data/DataMover.cpp:884 #, c-format msgid "Failed to prepare destination: %s" msgstr "Ðе удалоÑÑŒ подготовить назначение: %s" #: src/hed/libs/data/DataMover.cpp:891 src/hed/libs/data/DataMover.cpp:914 #: src/hed/libs/data/DataMover.cpp:1132 #, c-format msgid "" "Failed to unregister preregistered lfn. You may need to unregister it " "manually: %s" msgstr "" "Сбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð¿Ñ€ÐµÐ´Ð²Ð°Ñ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð¾Ð¹ запиÑи LFN. Возможно, необходимо удалить её " "вручную: %s" #: src/hed/libs/data/DataMover.cpp:895 src/hed/libs/data/DataMover.cpp:917 #: src/hed/libs/data/DataMover.cpp:1005 src/hed/libs/data/DataMover.cpp:1021 #: src/hed/libs/data/DataMover.cpp:1044 src/hed/libs/data/DataMover.cpp:1089 msgid "(Re)Trying next destination" msgstr "Следующее назначение" #: src/hed/libs/data/DataMover.cpp:906 #, c-format msgid "Failed to start writing to destination: %s" msgstr "Сбой начала запиÑи в назначение: %s" #: src/hed/libs/data/DataMover.cpp:929 msgid "Failed to start writing to cache" msgstr "Сбой начала запиÑи в кÑш" #: src/hed/libs/data/DataMover.cpp:937 src/hed/libs/data/DataMover.cpp:983 #: src/hed/libs/data/DataMover.cpp:1156 msgid "" "Failed to unregister preregistered lfn. You may need to unregister it " "manually" msgstr "" "Сбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð¿Ñ€ÐµÐ´Ð²Ð°Ñ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð¾Ð¹ запиÑи LFN. Возможно, необходимо удалить её " "вручную" #: src/hed/libs/data/DataMover.cpp:944 msgid "Waiting for buffer" msgstr "Ожидание буфера" #: src/hed/libs/data/DataMover.cpp:951 #, c-format msgid "Failed updating timestamp on cache lock file %s for file %s: %s" msgstr "" "Сбой Ð¾Ð±Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ Ð¼ÐµÑ‚ÐºÐ¸ времени файла блокировки кÑша %s Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° %s: %s" #: src/hed/libs/data/DataMover.cpp:956 #, c-format msgid "buffer: read EOF : %s" msgstr "буфер: чтение конца файла : %s" #: src/hed/libs/data/DataMover.cpp:957 #, c-format msgid "buffer: write EOF: %s" msgstr "буфер: запиÑÑŒ конца файла: %s" #: src/hed/libs/data/DataMover.cpp:958 #, c-format msgid "buffer: error : %s, read: %s, write: %s" msgstr "буфер: ошибка: %s, чтение: %s, запиÑÑŒ: %s" #: src/hed/libs/data/DataMover.cpp:959 msgid "Closing read channel" msgstr "ЗакрываетÑÑ ÐºÐ°Ð½Ð°Ð» чтениÑ" #: src/hed/libs/data/DataMover.cpp:966 msgid "Closing write channel" msgstr "ЗакрываетÑÑ ÐºÐ°Ð½Ð°Ð» передачи" #: src/hed/libs/data/DataMover.cpp:974 msgid "Failed to complete writing to destination" msgstr "Сбой Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ð·Ð°Ð¿Ð¸Ñи в назначение" #: src/hed/libs/data/DataMover.cpp:988 msgid "Transfer cancelled successfully" msgstr "Передача файлов уÑпешно отменена" #: src/hed/libs/data/DataMover.cpp:1033 msgid "Cause of failure unclear - choosing randomly" msgstr "Причина ÑÐ±Ð¾Ñ Ð½Ðµ уÑтановлена - выбираетÑÑ ÑÐ»ÑƒÑ‡Ð°Ð¹Ð½Ð°Ñ ÐºÐ¾Ð¿Ð¸Ñ" #: src/hed/libs/data/DataMover.cpp:1076 #, c-format msgid "" "Checksum mismatch between checksum given as meta option (%s:%s) and " "calculated checksum (%s)" msgstr "" "ÐеÑовпадение контрольной Ñуммы, указанной в метаданных (%s:%s), Ñ " "вычиÑленной (%s)" #: src/hed/libs/data/DataMover.cpp:1082 msgid "" "Failed to unregister preregistered lfn, You may need to unregister it " "manually" msgstr "" "Сбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð¿Ñ€ÐµÐ´Ð²Ð°Ñ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð¾Ð¹ запиÑи LFN. Возможно, необходимо удалить её " "вручную" #: src/hed/libs/data/DataMover.cpp:1086 msgid "Failed to delete destination, retry may fail" msgstr "" "Ðе удалоÑÑŒ уничтожить назначение, новые попытки могут быть безуÑпешными" #: src/hed/libs/data/DataMover.cpp:1096 msgid "Cannot compare empty checksum" msgstr "Ðевозможно Ñравнить пуÑтую контрольную Ñумму" #: src/hed/libs/data/DataMover.cpp:1103 #: src/libs/data-staging/DataStagingDelivery.cpp:538 msgid "Checksum type of source and calculated checksum differ, cannot compare" msgstr "" "Тип контрольной Ñуммы иÑточника отличаетÑÑ Ð¾Ñ‚ вычиÑленной, Ñравнение " "невозможно" #: src/hed/libs/data/DataMover.cpp:1105 #, c-format msgid "Checksum mismatch between calcuated checksum %s and source checksum %s" msgstr "" "ÐеÑовпадение вычиÑленной контрольной Ñуммы %s и контрольной Ñуммы иÑточника " "%s" #: src/hed/libs/data/DataMover.cpp:1116 #: src/libs/data-staging/DataStagingDelivery.cpp:554 #, c-format msgid "Calculated transfer checksum %s matches source checksum" msgstr "" "ВычиÑÐ»ÐµÐ½Ð½Ð°Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма передачи %s Ñовпадает Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð¾Ð¹ Ñуммой " "иÑточника" #: src/hed/libs/data/DataMover.cpp:1122 #: src/libs/data-staging/DataStagingDelivery.cpp:557 msgid "Checksum not computed" msgstr "ÐšÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма не вычиÑлена" #: src/hed/libs/data/DataMover.cpp:1128 #, c-format msgid "Failed to postregister destination %s" msgstr "Ðе удалоÑÑŒ зарегиÑтрировать назначение: %s" #: src/hed/libs/data/DataPoint.cpp:84 #, c-format msgid "Invalid URL option: %s" msgstr "ÐедопуÑÑ‚Ð¸Ð¼Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ URL: %s" #: src/hed/libs/data/DataPoint.cpp:263 #, c-format msgid "Skipping invalid URL option %s" msgstr "ПропуÑкаетÑÑ Ð½ÐµÐ´Ð¾Ð¿ÑƒÑÑ‚Ð¸Ð¼Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ URL %s" #: src/hed/libs/data/DataPoint.cpp:278 msgid "" "Third party transfer was requested but the corresponding plugin could\n" " not be loaded. Is the GFAL plugin installed? If not, please install " "the\n" " packages 'nordugrid-arc-plugins-gfal' and 'gfal2-all'. Depending on\n" " your type of installation the package names might differ." msgstr "" "Запрошена переÑылка файла третьим лицом, но необходимый\n" " подключаемый модуль не был подгружен. УÑтанавливали ли\n" " Ð’Ñ‹ модуль GFAL? ЕÑли нет, пожалуйÑта, уÑтановите пакеты\n" " 'nordugrid-arc-plugins-gfal' и 'gfal2-all'. Эти Ð½Ð°Ð·Ð²Ð°Ð½Ð¸Ñ Ð¼Ð¾Ð³ÑƒÑ‚ " "завиÑеть\n" " от типа вашего диÑтрибутива." #: src/hed/libs/data/DataPoint.cpp:296 #, c-format msgid "Failed to load plugin for URL %s" msgstr "Сбой подгрузки подключаемого Ð¼Ð¾Ð´ÑƒÐ»Ñ Ð´Ð»Ñ URL %s" #: src/hed/libs/data/DataPointDelegate.cpp:75 #: src/hed/libs/data/DataPointDelegate.cpp:76 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:2011 #, c-format msgid "Starting helper process: %s" msgstr "ЗапуÑкаетÑÑ Ð²Ñпомогательный процеÑÑ: %s" #: src/hed/libs/data/DataPointDelegate.cpp:180 msgid "start_reading" msgstr "start_reading" #: src/hed/libs/data/DataPointDelegate.cpp:189 msgid "start_reading: helper start failed" msgstr "start_reading: Ñбой запуÑка аÑÑиÑтента" #: src/hed/libs/data/DataPointDelegate.cpp:197 msgid "start_reading: thread create failed" msgstr "start_reading: Ñбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¿Ð¾Ñ‚Ð¾ÐºÐ°" #: src/hed/libs/data/DataPointDelegate.cpp:213 msgid "StopReading: aborting connection" msgstr "StopReading: прерывание ÑвÑзи" #: src/hed/libs/data/DataPointDelegate.cpp:218 msgid "stop_reading: waiting for transfer to finish" msgstr "stop_reading: ожидание Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ð¿ÐµÑ€ÐµÑылки" #: src/hed/libs/data/DataPointDelegate.cpp:221 #, c-format msgid "stop_reading: exiting: %s" msgstr "stop_reading: выход: %s" #: src/hed/libs/data/DataPointDelegate.cpp:231 msgid "read_thread: get and register buffers" msgstr "read_thread: получение и региÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ Ð±ÑƒÑ„ÐµÑ€Ð¾Ð²" #: src/hed/libs/data/DataPointDelegate.cpp:239 #, c-format msgid "read_thread: for_read failed - aborting: %s" msgstr "read_thread: Ñбой for_read - прерывание: %s" #: src/hed/libs/data/DataPointDelegate.cpp:247 #, c-format msgid "read_thread: non-data tag '%c' from external process - leaving: %s" msgstr "" "read_thread: неÑоответÑÑ‚Ð²ÑƒÑŽÑ‰Ð°Ñ Ð´Ð°Ð½Ð½Ñ‹Ð¼ метка '%c' из внешнего процеÑÑа - " "выход: %s" #: src/hed/libs/data/DataPointDelegate.cpp:256 #, c-format msgid "read_thread: data read error from external process - aborting: %s" msgstr "" "read_thread: ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ… из внешнего процеÑÑа - прерывание: %s" #: src/hed/libs/data/DataPointDelegate.cpp:264 msgid "read_thread: exiting" msgstr "read_thread: выход" #: src/hed/libs/data/DataPointDelegate.cpp:285 msgid "start_writing_ftp: helper start failed" msgstr "start_writing_ftp: Ñбой запуÑка аÑÑиÑтента" #: src/hed/libs/data/DataPointDelegate.cpp:293 msgid "start_writing_ftp: thread create failed" msgstr "start_writing_ftp: Ñбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¿Ð¾Ñ‚Ð¾ÐºÐ°" #: src/hed/libs/data/DataPointDelegate.cpp:343 msgid "No checksum information possible" msgstr "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ контрольных Ñуммах недоÑтупна" #: src/hed/libs/data/DataPointDelegate.cpp:359 msgid "write_thread: get and pass buffers" msgstr "write_thread: получение и передача буферов" #: src/hed/libs/data/DataPointDelegate.cpp:366 msgid "write_thread: for_write failed - aborting" msgstr "write_thread: Ñбой for_write - прерывание" #: src/hed/libs/data/DataPointDelegate.cpp:370 msgid "write_thread: for_write eof" msgstr "write_thread: конец файла for_write" #: src/hed/libs/data/DataPointDelegate.cpp:384 msgid "write_thread: out failed - aborting" msgstr "write_thread: Ñбой вывода - прерывание" #: src/hed/libs/data/DataPointDelegate.cpp:392 msgid "write_thread: exiting" msgstr "write_thread: выход" #: src/hed/libs/data/DataPointIndex.cpp:91 #, c-format msgid "Can't handle location %s" msgstr "Ðевозможно иÑпользовать Ð°Ð´Ñ€ÐµÑ %s" #: src/hed/libs/data/DataPointIndex.cpp:183 msgid "Sorting replicas according to URL map" msgstr "Копии ÑортируютÑÑ Ð² ÑоответÑтвии Ñ Ñ€Ð°Ñположением URL" #: src/hed/libs/data/DataPointIndex.cpp:187 #, c-format msgid "Replica %s is mapped" msgstr "ÐšÐ¾Ð¿Ð¸Ñ %s локализована" #: src/hed/libs/data/DataPointIndex.cpp:195 #, c-format msgid "Sorting replicas according to preferred pattern %s" msgstr "Копии ÑортируютÑÑ Ð² ÑоответÑтвии Ñ Ð¿Ñ€ÐµÐ´Ð¿Ð¾Ñ‡Ð¸Ñ‚Ð°ÐµÐ¼Ñ‹Ð¼ шаблоном %s" #: src/hed/libs/data/DataPointIndex.cpp:218 #: src/hed/libs/data/DataPointIndex.cpp:236 #, c-format msgid "Excluding replica %s matching pattern !%s" msgstr "ОтбраÑываетÑÑ ÐºÐ¾Ð¿Ð¸Ñ %s ÑоответÑÑ‚Ð²ÑƒÑŽÑ‰Ð°Ñ ÑˆÐ°Ð±Ð»Ð¾Ð½Ñƒ !%s" #: src/hed/libs/data/DataPointIndex.cpp:229 #, c-format msgid "Replica %s matches host pattern %s" msgstr "ÐšÐ¾Ð¿Ð¸Ñ %s ÑоответÑтвует шаблону узла %s" #: src/hed/libs/data/DataPointIndex.cpp:247 #, c-format msgid "Replica %s matches pattern %s" msgstr "ÐšÐ¾Ð¿Ð¸Ñ %s ÑоответÑтвует шаблону %s" #: src/hed/libs/data/DataPointIndex.cpp:263 #, c-format msgid "Replica %s doesn't match preferred pattern or URL map" msgstr "ÐšÐ¾Ð¿Ð¸Ñ %s не ÑоответÑтвует предпочитаемому шаблону или раÑположению URL" #: src/hed/libs/data/DataStatus.cpp:12 msgid "Operation completed successfully" msgstr "ÐžÐ¿ÐµÑ€Ð°Ñ†Ð¸Ñ Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð° уÑпешно" #: src/hed/libs/data/DataStatus.cpp:13 msgid "Source is invalid URL" msgstr "ÐедопуÑтимый URL иÑточника" #: src/hed/libs/data/DataStatus.cpp:14 msgid "Destination is invalid URL" msgstr "ÐедопуÑтимый URL цели" #: src/hed/libs/data/DataStatus.cpp:15 msgid "Resolving of index service for source failed" msgstr "Сбой Ð¾Ð±Ð½Ð°Ñ€ÑƒÐ¶ÐµÐ½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð° Ð´Ð»Ñ Ð¸Ñточника" #: src/hed/libs/data/DataStatus.cpp:16 msgid "Resolving of index service for destination failed" msgstr "Сбой Ð¾Ð±Ð½Ð°Ñ€ÑƒÐ¶ÐµÐ½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð° Ð´Ð»Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ" #: src/hed/libs/data/DataStatus.cpp:17 msgid "Can't read from source" msgstr "Ðе удалоÑÑŒ Ñчитать Ñ Ð¸Ñточника" #: src/hed/libs/data/DataStatus.cpp:18 msgid "Can't write to destination" msgstr "Ðе удалоÑÑŒ запиÑать в цель" #: src/hed/libs/data/DataStatus.cpp:19 msgid "Failed while reading from source" msgstr "Ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¸Ð· иÑточника" #: src/hed/libs/data/DataStatus.cpp:20 msgid "Failed while writing to destination" msgstr "Ошибка при запиÑи в цель" #: src/hed/libs/data/DataStatus.cpp:21 msgid "Failed while transferring data" msgstr "Сбой при передаче данных" #: src/hed/libs/data/DataStatus.cpp:22 msgid "Failed while finishing reading from source" msgstr "Сбой Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¸Ð· иÑточника" #: src/hed/libs/data/DataStatus.cpp:23 msgid "Failed while finishing writing to destination" msgstr "Сбой при завершении запиÑи в назначение" #: src/hed/libs/data/DataStatus.cpp:24 msgid "First stage of registration to index service failed" msgstr "Сбой первого шага региÑтрации в каталоге" #: src/hed/libs/data/DataStatus.cpp:25 msgid "Last stage of registration to index service failed" msgstr "Сбой поÑледнего шага региÑтрации в каталоге" #: src/hed/libs/data/DataStatus.cpp:26 msgid "Unregistering from index service failed" msgstr "Сбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ñ€ÐµÐ³Ð¸Ñтрации из каталога" #: src/hed/libs/data/DataStatus.cpp:27 msgid "Error in caching procedure" msgstr "Ошибка кÑшированиÑ" #: src/hed/libs/data/DataStatus.cpp:28 msgid "Error due to expiration of provided credentials" msgstr "" "Ошибка в ÑвÑзи Ñ Ð¸Ñтечением Ñрока годноÑти предоÑтавленных параметров доÑтупа" #: src/hed/libs/data/DataStatus.cpp:29 msgid "Delete error" msgstr "Ошибка удалениÑ" #: src/hed/libs/data/DataStatus.cpp:30 msgid "No valid location available" msgstr "Ðет допуÑтимых адреÑов" #: src/hed/libs/data/DataStatus.cpp:31 msgid "Location already exists" msgstr "Такой файл уже ÑущеÑтвует" #: src/hed/libs/data/DataStatus.cpp:32 msgid "Operation not supported for this kind of URL" msgstr "Эта Ð¾Ð¿ÐµÑ€Ð°Ñ†Ð¸Ñ Ð½Ðµ поддерживаетÑÑ Ð´Ð»Ñ Ð´Ð°Ð½Ð½Ð¾Ð³Ð¾ типа URL" #: src/hed/libs/data/DataStatus.cpp:33 msgid "Feature is not implemented" msgstr "Эта Ñ„ÑƒÐ½ÐºÑ†Ð¸Ñ Ð½Ðµ реализована" #: src/hed/libs/data/DataStatus.cpp:34 msgid "Already reading from source" msgstr "Ð§Ñ‚ÐµÐ½Ð¸Ñ Ð¸Ð· иÑточника уже в процеÑÑе" #: src/hed/libs/data/DataStatus.cpp:35 msgid "Already writing to destination" msgstr "ЗапиÑÑŒ в цель уже в процеÑÑе" #: src/hed/libs/data/DataStatus.cpp:36 msgid "Read access check failed" msgstr "Ðе удалоÑÑŒ подтвердить наличие доÑтупа на чтение" #: src/hed/libs/data/DataStatus.cpp:37 msgid "Directory listing failed" msgstr "Ðе удалоÑÑŒ вывеÑти ÑпиÑок каталога" #: src/hed/libs/data/DataStatus.cpp:38 msgid "Object is not suitable for listing" msgstr "Объект не подходит Ð´Ð»Ñ Ð¿ÐµÑ€ÐµÑ‡Ð¸ÑлениÑ" #: src/hed/libs/data/DataStatus.cpp:39 msgid "Failed to obtain information about file" msgstr "Сбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ о файле" #: src/hed/libs/data/DataStatus.cpp:40 msgid "No such file or directory" msgstr "Ðет такого файла или каталога" #: src/hed/libs/data/DataStatus.cpp:41 msgid "Object not initialized (internal error)" msgstr "Объект не инициализирован (внутреннÑÑ Ð¾ÑˆÐ¸Ð±ÐºÐ°)" #: src/hed/libs/data/DataStatus.cpp:42 msgid "Operating System error" msgstr "Ошибка операционной ÑиÑтемы" #: src/hed/libs/data/DataStatus.cpp:43 msgid "Failed to stage file(s)" msgstr "Ðе удалоÑÑŒ размеÑтить файл(Ñ‹)" #: src/hed/libs/data/DataStatus.cpp:44 msgid "Inconsistent metadata" msgstr "Противоречивые метаданные" #: src/hed/libs/data/DataStatus.cpp:45 msgid "Failed to prepare source" msgstr "Ðе удалоÑÑŒ подготовить иÑточник" #: src/hed/libs/data/DataStatus.cpp:46 msgid "Should wait for source to be prepared" msgstr "Следует подождать, когда иÑточник будет готов" #: src/hed/libs/data/DataStatus.cpp:47 msgid "Failed to prepare destination" msgstr "Ðе удалоÑÑŒ подготовить назначение" #: src/hed/libs/data/DataStatus.cpp:48 msgid "Should wait for destination to be prepared" msgstr "Следует подождать, когда назначение будет готово" #: src/hed/libs/data/DataStatus.cpp:49 msgid "Failed to finalize reading from source" msgstr "Сбой Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¸Ð· иÑточника" #: src/hed/libs/data/DataStatus.cpp:50 msgid "Failed to finalize writing to destination" msgstr "Сбой Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ð·Ð°Ð¿Ð¸Ñи в цель" #: src/hed/libs/data/DataStatus.cpp:51 msgid "Failed to create directory" msgstr "Ðе удалоÑÑŒ Ñоздать каталог" #: src/hed/libs/data/DataStatus.cpp:52 msgid "Failed to rename URL" msgstr "Ðе удалоÑÑŒ переименовать URL" #: src/hed/libs/data/DataStatus.cpp:53 msgid "Data was already cached" msgstr "Данные уже запиÑаны в кÑщ" #: src/hed/libs/data/DataStatus.cpp:54 msgid "Operation cancelled successfully" msgstr "ÐžÐ¿ÐµÑ€Ð°Ñ†Ð¸Ñ ÑƒÑпешно прервана" #: src/hed/libs/data/DataStatus.cpp:55 msgid "Generic error" msgstr "ÐеÑпецифичеÑÐºÐ°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ°" #: src/hed/libs/data/DataStatus.cpp:56 src/hed/libs/data/DataStatus.cpp:69 msgid "Unknown error" msgstr "ÐеизвеÑÑ‚Ð½Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ°" #: src/hed/libs/data/DataStatus.cpp:60 msgid "No error" msgstr "Ðет ошибок" #: src/hed/libs/data/DataStatus.cpp:61 msgid "Transfer timed out" msgstr "ИÑтечение времени Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÑоединениÑ" #: src/hed/libs/data/DataStatus.cpp:62 msgid "Checksum mismatch" msgstr "ÐеÑовпадение контрольной Ñумм" #: src/hed/libs/data/DataStatus.cpp:63 msgid "Bad logic" msgstr "ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð»Ð¾Ð³Ð¸ÐºÐ°" #: src/hed/libs/data/DataStatus.cpp:64 msgid "All results obtained are invalid" msgstr "Ð’Ñе полученные результаты неверны" #: src/hed/libs/data/DataStatus.cpp:65 msgid "Temporary service error" msgstr "ПреходÑÑ‰Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° Ñлужбы" #: src/hed/libs/data/DataStatus.cpp:66 msgid "Permanent service error" msgstr "ХроничеÑÐºÐ°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° Ñлужбы" #: src/hed/libs/data/DataStatus.cpp:67 msgid "Error switching uid" msgstr "Ошибка Ñмены uid" #: src/hed/libs/data/DataStatus.cpp:68 msgid "Request timed out" msgstr "ИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа" #: src/hed/libs/data/FileCache.cpp:111 msgid "No cache directory specified" msgstr "Ðе указан каталог кÑша" #: src/hed/libs/data/FileCache.cpp:128 msgid "No usable caches" msgstr "Ðет подходÑщих кÑшей" #: src/hed/libs/data/FileCache.cpp:137 msgid "No draining cache directory specified" msgstr "Ðе указан каталог кÑша Ð´Ð»Ñ Ð¾Ð¿Ð¾Ñ€Ð¾Ð¶Ð½ÐµÐ½Ð¸Ñ" #: src/hed/libs/data/FileCache.cpp:155 msgid "No read-only cache directory specified" msgstr "Ðе указан доÑтупный по чтению каталог кÑша" #: src/hed/libs/data/FileCache.cpp:184 #, c-format msgid "Failed to create cache directory for file %s: %s" msgstr "Ðе удалоÑÑŒ Ñоздать каталог кÑша Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° %s: %s" #: src/hed/libs/data/FileCache.cpp:194 #, c-format msgid "Failed to create any cache directories for %s" msgstr "Ðе удалоÑÑŒ Ñоздать каталоги кÑша Ð´Ð»Ñ %s" #: src/hed/libs/data/FileCache.cpp:201 #, c-format msgid "Failed to change permissions on %s: %s" msgstr "Ðевозможно изменить права доÑтупа к %s: %s" #: src/hed/libs/data/FileCache.cpp:213 #, c-format msgid "Failed to delete stale cache file %s: %s" msgstr "Ðе удалоÑÑŒ удалить уÑтаревший файл кÑша %s: %s" #: src/hed/libs/data/FileCache.cpp:216 #, c-format msgid "Failed to release lock on file %s" msgstr "Ðевозможно разблокировать файл %s" #: src/hed/libs/data/FileCache.cpp:234 #, c-format msgid "Failed looking up attributes of cached file: %s" msgstr "Ошибка поиÑка атрибутов кÑшированного файла: %s" #: src/hed/libs/data/FileCache.cpp:240 #, c-format msgid "Failed to obtain lock on cache file %s" msgstr "Ðевозможно заблокировать файл в кÑше %s" #: src/hed/libs/data/FileCache.cpp:249 src/hed/libs/data/FileCache.cpp:309 #, c-format msgid "Error removing cache file %s: %s" msgstr "Ошибка ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ ÐºÑшированного файла %s: %s" #: src/hed/libs/data/FileCache.cpp:251 src/hed/libs/data/FileCache.cpp:262 #, c-format msgid "Failed to remove lock on %s. Some manual intervention may be required" msgstr "" "Сбой Ñ€Ð°Ð·Ð±Ð»Ð¾ÐºÐ¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° на %s. Возможно, необходимо ручное вмешательÑтво" #: src/hed/libs/data/FileCache.cpp:281 src/hed/libs/data/FileCache.cpp:315 #, c-format msgid "Failed to unlock file %s: %s. Manual intervention may be required" msgstr "" "Ðе удалоÑÑŒ разблокировать файл %s: %s. Возможно, необходимо ручное " "вмешательÑтво" #: src/hed/libs/data/FileCache.cpp:298 #, c-format msgid "Invalid lock on file %s" msgstr "ÐедопуÑÑ‚Ð¸Ð¼Ð°Ñ Ð±Ð»Ð¾ÐºÐ¸Ñ€Ð¾Ð²ÐºÐ° файла %s" #: src/hed/libs/data/FileCache.cpp:304 #, c-format msgid "Failed to remove .meta file %s: %s" msgstr "Сбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° .meta %s: %s" #: src/hed/libs/data/FileCache.cpp:369 #, c-format msgid "Cache not found for file %s" msgstr "Ðе обнаружен кÑш файла %s" #: src/hed/libs/data/FileCache.cpp:379 #, c-format msgid "" "Cache file %s was modified in the last second, sleeping 1 second to avoid " "race condition" msgstr "" "КÑшированный файл %s был изменён в поÑледний момент, приоÑтановка процеÑÑа " "на 1 Ñекунду Ð´Ð»Ñ Ð¿Ñ€ÐµÐ´Ð¾Ñ‚Ð²Ñ€Ð°Ñ‰ÐµÐ½Ð¸Ñ Ð³Ð¾Ð½ÐºÐ¸" #: src/hed/libs/data/FileCache.cpp:384 src/hed/libs/data/FileCache.cpp:689 #, c-format msgid "Cache file %s does not exist" msgstr "КÑшированный файл %s не ÑущеÑтвует" #: src/hed/libs/data/FileCache.cpp:389 src/hed/libs/data/FileCache.cpp:691 #, c-format msgid "Error accessing cache file %s: %s" msgstr "Ошибка доÑтупа к кÑшированному файлу %s: %s" #: src/hed/libs/data/FileCache.cpp:395 #, c-format msgid "Cannot create directory %s for per-job hard links" msgstr "Ðевозможно Ñоздать каталог %s Ð´Ð»Ñ Ð¶Ñ‘Ñтких ÑÑылок задач" #: src/hed/libs/data/FileCache.cpp:400 #, c-format msgid "Cannot change permission of %s: %s " msgstr "Ðе удалоÑÑŒ изменить права доÑтупа к %s: %s " #: src/hed/libs/data/FileCache.cpp:404 #, c-format msgid "Cannot change owner of %s: %s " msgstr "Ðевозможно изменить владельца %s: %s " #: src/hed/libs/data/FileCache.cpp:418 #, c-format msgid "Failed to remove existing hard link at %s: %s" msgstr "Ðевозможно удалить ÑущеÑтвующую жёÑткую ÑÑылку на %s: %s" #: src/hed/libs/data/FileCache.cpp:422 src/hed/libs/data/FileCache.cpp:433 #, c-format msgid "Failed to create hard link from %s to %s: %s" msgstr "Ðевозможно Ñоздать жёÑткую ÑÑылку Ñ %s на %s: %s" #: src/hed/libs/data/FileCache.cpp:428 #, c-format msgid "Cache file %s not found" msgstr "Ðе обнаружен кÑшированый файл %s" #: src/hed/libs/data/FileCache.cpp:443 #, c-format msgid "Failed to change permissions or set owner of hard link %s: %s" msgstr "Ðе удалоÑÑŒ Ñменить права доÑтупа или владельца жёÑткой ÑÑылки %s: %s" #: src/hed/libs/data/FileCache.cpp:451 #, c-format msgid "Failed to release lock on cache file %s" msgstr "Ðевозможно разблокировать файл в кÑше %s" #: src/hed/libs/data/FileCache.cpp:462 #, c-format msgid "Cache file %s was locked during link/copy, must start again" msgstr "" "КÑшированный файл %s был заблокирован во Ð²Ñ€ÐµÐ¼Ñ ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÑÑылки или копии, " "Ð½Ð¾Ð²Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ°" #: src/hed/libs/data/FileCache.cpp:467 #, c-format msgid "Cache file %s was deleted during link/copy, must start again" msgstr "" "КÑшированный файл %s был удалён во Ð²Ñ€ÐµÐ¼Ñ ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÑÑылки или копии, Ð½Ð¾Ð²Ð°Ñ " "попытка" #: src/hed/libs/data/FileCache.cpp:472 #, c-format msgid "Cache file %s was modified while linking, must start again" msgstr "" "КÑшированный файл %s был изменён во Ð²Ñ€ÐµÐ¼Ñ ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÑÑылки или копии, Ð½Ð¾Ð²Ð°Ñ " "попытка" #: src/hed/libs/data/FileCache.cpp:490 #, c-format msgid "Failed to copy file %s to %s: %s" msgstr "Сбой ÐºÐ¾Ð¿Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° %s в %s: %s" #: src/hed/libs/data/FileCache.cpp:496 #, c-format msgid "Failed to set executable bit on file %s" msgstr "Ðевозможно выÑтавить иÑполнÑемый бит Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° %s" #: src/hed/libs/data/FileCache.cpp:501 #, c-format msgid "Failed to set executable bit on file %s: %s" msgstr "Ðевозможно выÑтавить иÑполнÑемый бит Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° %s: %s" #: src/hed/libs/data/FileCache.cpp:515 #, c-format msgid "Failed to remove existing symbolic link at %s: %s" msgstr "Ðевозможно удалить ÑущеÑтвующую Ñимвольную ÑÑылку на %s: %s" #: src/hed/libs/data/FileCache.cpp:519 src/hed/libs/data/FileCache.cpp:524 #, c-format msgid "Failed to create symbolic link from %s to %s: %s" msgstr "Ðевозможно Ñоздать Ñимвольную ÑÑылку Ñ %s на %s: %s" #: src/hed/libs/data/FileCache.cpp:554 #, c-format msgid "Failed to remove cache per-job dir %s: %s" msgstr "Сбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð° кÑша задач %s: %s" #: src/hed/libs/data/FileCache.cpp:573 src/hed/libs/data/FileCache.cpp:641 #, c-format msgid "Error reading meta file %s: %s" msgstr "Ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¼ÐµÑ‚Ð°-файла %s: %s" #: src/hed/libs/data/FileCache.cpp:578 src/hed/libs/data/FileCache.cpp:646 #, c-format msgid "Error opening meta file %s" msgstr "Ошибка Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ð¼ÐµÑ‚Ð°-файла %s" #: src/hed/libs/data/FileCache.cpp:583 src/hed/libs/data/FileCache.cpp:650 #, c-format msgid "meta file %s is empty" msgstr "Мета-файл %s пуÑÑ‚" #: src/hed/libs/data/FileCache.cpp:593 #, c-format msgid "" "File %s is already cached at %s under a different URL: %s - will not add DN " "to cached list" msgstr "" "Файл %s уже кÑширован в %s Ñ Ð´Ñ€ÑƒÐ³Ð¸Ð¼ URL: %s - выделенное Ð¸Ð¼Ñ Ð½Ðµ будет " "добавлено в кÑшированный ÑпиÑок" #: src/hed/libs/data/FileCache.cpp:604 #, c-format msgid "Bad format detected in file %s, in line %s" msgstr "Обнаружен недопуÑтимый формат в файле %s, Ñтроке %s" #: src/hed/libs/data/FileCache.cpp:620 #, c-format msgid "Could not acquire lock on meta file %s" msgstr "Ðевозможно уÑтановить блокировку на мета-файл %s" #: src/hed/libs/data/FileCache.cpp:624 #, c-format msgid "Error opening meta file for writing %s" msgstr "Ошибка Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ð¼ÐµÑ‚Ð°-файла Ð´Ð»Ñ Ð·Ð°Ð¿Ð¸Ñи %s" #: src/hed/libs/data/FileCache.cpp:660 #, c-format msgid "DN %s is cached and is valid until %s for URL %s" msgstr "Выделенное Ð¸Ð¼Ñ %s Ð´Ð»Ñ URL %s кÑшировано, и дейÑтвительно до %s" #: src/hed/libs/data/FileCache.cpp:664 #, c-format msgid "DN %s is cached but has expired for URL %s" msgstr "Выделенное Ð¸Ð¼Ñ %s Ð´Ð»Ñ URL %s кÑшировано, но уже проÑрочено" #: src/hed/libs/data/FileCache.cpp:715 #, c-format msgid "Failed to acquire lock on cache meta file %s" msgstr "Сбой уÑтановки блокировки на кÑшированный мета-файл %s" #: src/hed/libs/data/FileCache.cpp:720 #, c-format msgid "Failed to create cache meta file %s" msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¼ÐµÑ‚Ð°-файла кÑша %s" #: src/hed/libs/data/FileCache.cpp:735 #, c-format msgid "Failed to read cache meta file %s" msgstr "Сбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¼ÐµÑ‚Ð°-файла кÑша %s" #: src/hed/libs/data/FileCache.cpp:740 #, c-format msgid "Cache meta file %s is empty, will recreate" msgstr "Мета-файл кÑша %s пуÑÑ‚, будет воÑÑоздан" #: src/hed/libs/data/FileCache.cpp:745 #, c-format msgid "Cache meta file %s possibly corrupted, will recreate" msgstr "Мета-файл кÑша %s, возможно, повреждён, будет воÑÑоздан" #: src/hed/libs/data/FileCache.cpp:749 #, c-format msgid "" "File %s is already cached at %s under a different URL: %s - this file will " "not be cached" msgstr "" "Файл %s уже находитÑÑ Ð² кÑше %s Ñ Ð´Ñ€ÑƒÐ³Ð¸Ð¼ URL: %s - Ñтот файл не будет " "кÑширован" #: src/hed/libs/data/FileCache.cpp:759 #, c-format msgid "Error looking up attributes of cache meta file %s: %s" msgstr "Ошибка поиÑка атрибутов мета-файла кÑша %s: %s" #: src/hed/libs/data/FileCache.cpp:830 #, c-format msgid "Using cache %s" msgstr "ИÑпользуетÑÑ ÐºÑш %s" #: src/hed/libs/data/FileCache.cpp:844 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:79 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:112 #, c-format msgid "Error getting info from statvfs for the path %s: %s" msgstr "Ошибка Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ от statvfs Ð´Ð»Ñ Ð¿ÑƒÑ‚Ð¸ %s: %s" #: src/hed/libs/data/FileCache.cpp:850 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:118 #, c-format msgid "Cache %s: Free space %f GB" msgstr "КÑш %s: Свободное проÑтранÑтво %f GB" #: src/hed/libs/data/URLMap.cpp:33 #, c-format msgid "Can't use URL %s" msgstr "Ðевозможно иÑпользовать URL %s" #: src/hed/libs/data/URLMap.cpp:39 #, c-format msgid "file %s is not accessible" msgstr "файл %s недоÑтупен" #: src/hed/libs/data/URLMap.cpp:49 #, c-format msgid "Mapping %s to %s" msgstr "%s ÑтавитÑÑ Ð² ÑоответÑтвие %s" #: src/hed/libs/data/examples/simple_copy.cpp:17 msgid "Usage: copy source destination" msgstr "ИÑпользование: copy иÑточник назначение" #: src/hed/libs/data/examples/simple_copy.cpp:42 #, c-format msgid "Copy failed: %s" msgstr "Сбой копированиÑ: %s" #: src/hed/libs/globusutils/GSSCredential.cpp:41 #, c-format msgid "Failed to read proxy file: %s" msgstr "Сбой при чтении файла доверенноÑти: %s" #: src/hed/libs/globusutils/GSSCredential.cpp:49 #, c-format msgid "Failed to read certificate file: %s" msgstr "Сбой при чтении файла Ñертификата: %s" #: src/hed/libs/globusutils/GSSCredential.cpp:56 #, c-format msgid "Failed to read private key file: %s" msgstr "Сбой при чтении файла личного ключа: %s" #: src/hed/libs/globusutils/GSSCredential.cpp:82 #, c-format msgid "" "Failed to convert GSI credential to GSS credential (major: %d, minor: %d):%s:" "%s" msgstr "" "Ðе удалоÑÑŒ преобразовать параметры доÑтупа GSI в GSS (major: %d, minor: " "%d)%s:%s" #: src/hed/libs/globusutils/GSSCredential.cpp:94 #, c-format msgid "Failed to release GSS credential (major: %d, minor: %d):%s:%s" msgstr "" "Ðе удалоÑÑŒ оÑвободить параметры доÑтупа GSS (major: %d, minor: %d):%s:%s" #: src/hed/libs/loader/ModuleManager.cpp:28 msgid "Module Manager Init" msgstr "ЗапуÑк ÑƒÐ¿Ñ€Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ð¼Ð¾Ð´ÑƒÐ»Ñми" #: src/hed/libs/loader/ModuleManager.cpp:71 msgid "" "Busy plugins found while unloading Module Manager. Waiting for them to be " "released." msgstr "" "Ð’ процеÑÑе Ð¾Ñ‚ÐºÐ»ÑŽÑ‡ÐµÐ½Ð¸Ñ Ð¼ÐµÐ½ÐµÐ´Ð¶ÐµÑ€Ð° модулей обнаружены занÑтые подключаемые " "модули. ОжидаетÑÑ Ð¸Ñ… завершение." #: src/hed/libs/loader/ModuleManager.cpp:205 #, c-format msgid "Found %s in cache" msgstr "%s обнаружен в кÑше" #: src/hed/libs/loader/ModuleManager.cpp:212 #, c-format msgid "Could not locate module %s in following paths:" msgstr "Ðевозможно найти модуль %s в Ñледующих меÑтах:" #: src/hed/libs/loader/ModuleManager.cpp:216 #, c-format msgid "\t%s" msgstr "\t%s" #: src/hed/libs/loader/ModuleManager.cpp:230 #, c-format msgid "Loaded %s" msgstr "Подгружен модуль %s" #: src/hed/libs/loader/ModuleManager.cpp:274 msgid "Module Manager Init by ModuleManager::setCfg" msgstr "Ð˜Ð½Ð¸Ñ†Ð¸Ð°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð¼ÐµÐ½ÐµÐ´Ð¶ÐµÑ€Ð° модулей в ModuleManager::setCfg" #: src/hed/libs/loader/ModuleManager.cpp:310 #: src/hed/libs/loader/ModuleManager.cpp:323 #, c-format msgid "%s made persistent" msgstr "Модуль %s Ñброшен на диÑк" #: src/hed/libs/loader/ModuleManager.cpp:314 #, c-format msgid "Not found %s in cache" msgstr "Модуль %s не найден в кÑше" #: src/hed/libs/loader/ModuleManager.cpp:328 msgid "Specified module not found in cache" msgstr "Указанные модули не найдены в кÑше" #: src/hed/libs/loader/Plugin.cpp:364 src/hed/libs/loader/Plugin.cpp:557 #, c-format msgid "Could not find loadable module descriptor by name %s" msgstr "Ðе удалоÑÑŒ найти деÑкриптор подгружаемого Ð¼Ð¾Ð´ÑƒÐ»Ñ Ð¿Ð¾ имени %s" #: src/hed/libs/loader/Plugin.cpp:372 src/hed/libs/loader/Plugin.cpp:567 #, c-format msgid "Could not find loadable module by name %s (%s)" msgstr "Ðе удалоÑÑŒ найти подгружаемый модуль %s (%s)" #: src/hed/libs/loader/Plugin.cpp:378 src/hed/libs/loader/Plugin.cpp:480 #: src/hed/libs/loader/Plugin.cpp:572 #, c-format msgid "Module %s is not an ARC plugin (%s)" msgstr "Модуль %s не ÑвлÑетÑÑ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ñ‹Ð¼ модулем ARC (%s)" #: src/hed/libs/loader/Plugin.cpp:395 src/hed/libs/loader/Plugin.cpp:490 #: src/hed/libs/loader/Plugin.cpp:598 #, c-format msgid "Module %s failed to reload (%s)" msgstr "Ðе удалоÑÑŒ перезагрузить модуль %s (%s)" #: src/hed/libs/loader/Plugin.cpp:417 #, c-format msgid "Module %s contains no plugin %s" msgstr "Модуль %s не Ñодержит подключаемый модуль %s" #: src/hed/libs/loader/Plugin.cpp:462 #, c-format msgid "Could not find loadable module descriptor by name %s or kind %s" msgstr "" "Ðе удалоÑÑŒ найти деÑкрипторы подгружаемых модулей по имени %s или типу %s" #: src/hed/libs/loader/Plugin.cpp:467 #, c-format msgid "Loadable module %s contains no requested plugin %s of kind %s" msgstr "Подгружаемый модуль %s не Ñодержит запрашиваемого Ð¼Ð¾Ð´ÑƒÐ»Ñ %s типа %s" #: src/hed/libs/loader/Plugin.cpp:474 #, c-format msgid "Could not find loadable module by names %s and %s (%s)" msgstr "Ðевозможно найти подгружаемые модули по имени %s и %s (%s)" #: src/hed/libs/loader/Plugin.cpp:503 #, c-format msgid "Module %s contains no requested plugin %s of kind %s" msgstr "Модуль %s не Ñодержит запрашиваемого подключаемого Ð¼Ð¾Ð´ÑƒÐ»Ñ %s типа %s" #: src/hed/libs/loader/Plugin.cpp:588 #, c-format msgid "Module %s does not contain plugin(s) of specified kind(s)" msgstr "Модуль %s не Ñодержит подключаемых модулей указанных типов" #: src/hed/libs/message/MCC.cpp:76 src/hed/libs/message/Service.cpp:25 #, c-format msgid "No security processing/check requested for '%s'" msgstr "Обработка/проверка параметров доÑтупа не запрошена Ð´Ð»Ñ '%s'" #: src/hed/libs/message/MCC.cpp:85 #, c-format msgid "Security processing/check failed: %s" msgstr "Сбой обработки/проверки безопаÑноÑти: %s" #: src/hed/libs/message/MCC.cpp:90 msgid "Security processing/check passed" msgstr "Обработка/проверка параметров доÑтупа завершилаÑÑŒ уÑпехом" #: src/hed/libs/message/MCCLoader.cpp:16 msgid "Chain(s) configuration failed" msgstr "Ðе удалоÑÑŒ наÑтроить цепочку/и" #: src/hed/libs/message/MCCLoader.cpp:133 msgid "SecHandler configuration is not defined" msgstr "ÐаÑтройки SecHandler не заданы" #: src/hed/libs/message/MCCLoader.cpp:156 msgid "SecHandler has no configuration" msgstr "ÐаÑтройки SecHandler отÑутÑтвуют" #: src/hed/libs/message/MCCLoader.cpp:162 msgid "SecHandler has no name attribute defined" msgstr "Ðе задан атрибут name Ð´Ð»Ñ SecHandler" #: src/hed/libs/message/MCCLoader.cpp:172 #, c-format msgid "Security Handler %s(%s) could not be created" msgstr "Обработчик безопаÑноÑти %s(%s) не может быть Ñоздан" #: src/hed/libs/message/MCCLoader.cpp:176 #, c-format msgid "SecHandler: %s(%s)" msgstr "SecHandler: %s(%s)" #: src/hed/libs/message/MCCLoader.cpp:188 msgid "Component has no name attribute defined" msgstr "Ð”Ð»Ñ ÐºÐ¾Ð¼Ð¿Ð¾Ð½ÐµÐ½Ñ‚Ð° не задан атрибут name" #: src/hed/libs/message/MCCLoader.cpp:193 msgid "Component has no ID attribute defined" msgstr "Ð”Ð»Ñ ÐºÐ¾Ð¼Ð¿Ð¾Ð½ÐµÐ½Ñ‚Ð° не задан атрибут ID" #: src/hed/libs/message/MCCLoader.cpp:202 #, c-format msgid "Component %s(%s) could not be created" msgstr "Компонента %s(%s) не может быть Ñоздана" #: src/hed/libs/message/MCCLoader.cpp:232 #, c-format msgid "Component's %s(%s) next has no ID attribute defined" msgstr "Ð”Ð»Ñ ÐºÐ¾Ð¼Ð¿Ð¾Ð½ÐµÐ½Ñ‚Ð° %s(%s) отÑутÑтвует атрибут ID Ñледующей цели" #: src/hed/libs/message/MCCLoader.cpp:287 #, c-format msgid "Loaded MCC %s(%s)" msgstr "Подгружен MCC %s(%s)" #: src/hed/libs/message/MCCLoader.cpp:305 #, c-format msgid "Plexer's (%s) next has no ID attribute defined" msgstr "Ð”Ð»Ñ Ñледующего поÑле %s компонента Plexer не задан атрибут ID" #: src/hed/libs/message/MCCLoader.cpp:315 #, c-format msgid "Loaded Plexer %s" msgstr "Подгружен Plexer %s" #: src/hed/libs/message/MCCLoader.cpp:323 msgid "Service has no Name attribute defined" msgstr "Ð”Ð»Ñ Ñлужбы не задан атрибут Name" #: src/hed/libs/message/MCCLoader.cpp:329 msgid "Service has no ID attribute defined" msgstr "Ð”Ð»Ñ Ñлужбы не задан атрибут ID" #: src/hed/libs/message/MCCLoader.cpp:338 #, c-format msgid "Service %s(%s) could not be created" msgstr "Служба %s(%s) не может быть Ñоздана" #: src/hed/libs/message/MCCLoader.cpp:345 #, c-format msgid "Loaded Service %s(%s)" msgstr "Подгружена Ñлужба %s(%s)" #: src/hed/libs/message/MCCLoader.cpp:387 #, c-format msgid "Linking MCC %s(%s) to MCC (%s) under %s" msgstr "Подцепление MCC %s(%s) к MCC (%s) в %s" #: src/hed/libs/message/MCCLoader.cpp:398 #, c-format msgid "Linking MCC %s(%s) to Service (%s) under %s" msgstr "Подцепление MCC %s(%s) к Ñлужбе (%s) в %s" #: src/hed/libs/message/MCCLoader.cpp:407 #, c-format msgid "Linking MCC %s(%s) to Plexer (%s) under %s" msgstr "Подцепление MCC %s(%s) к коммутатору (%s) в %s" #: src/hed/libs/message/MCCLoader.cpp:412 #, c-format msgid "MCC %s(%s) - next %s(%s) has no target" msgstr "MCC %s(%s) - Ñледующий %s(%s) не Ñодержит назначениÑ" #: src/hed/libs/message/MCCLoader.cpp:431 #, c-format msgid "Linking Plexer %s to MCC (%s) under %s" msgstr "Подцепление коммутатора %s к MCC (%s) в %s" #: src/hed/libs/message/MCCLoader.cpp:442 #, c-format msgid "Linking Plexer %s to Service (%s) under %s" msgstr "Подцепление коммутатора %s к Ñлужбе (%s) в %s" #: src/hed/libs/message/MCCLoader.cpp:451 #, c-format msgid "Linking Plexer %s to Plexer (%s) under %s" msgstr "Подцепление коммутатора %s к коммутатору (%s) в %s" #: src/hed/libs/message/MCCLoader.cpp:457 #, c-format msgid "Plexer (%s) - next %s(%s) has no target" msgstr "Коммутатор (%s) - Ñледующий %s(%s) не Ñодержит назначениÑ" #: src/hed/libs/message/Plexer.cpp:31 #, c-format msgid "Bad label: \"%s\"" msgstr "ÐŸÐ»Ð¾Ñ…Ð°Ñ Ð¼ÐµÑ‚ÐºÐ°: \"%s\"" #: src/hed/libs/message/Plexer.cpp:47 #, c-format msgid "Operation on path \"%s\"" msgstr "ДейÑтвие над путём \"%s\"" #: src/hed/libs/message/Plexer.cpp:60 #, c-format msgid "No next MCC or Service at path \"%s\"" msgstr "Ðе найдено больше MCC или Ñлужб в пути \"%s\"" #: src/hed/libs/message/Service.cpp:35 #, c-format msgid "Security processing/check for '%s' failed: %s" msgstr "Сбой обработки/проверки безопаÑноÑти Ð´Ð»Ñ '%s': %s" #: src/hed/libs/message/Service.cpp:41 #, c-format msgid "Security processing/check for '%s' passed" msgstr "Обработка/проверка параметров доÑтупа '%s' завершилаÑÑŒ уÑпехом" #: src/hed/libs/otokens/jwse.cpp:55 #, c-format msgid "JWSE::Input: token: %s" msgstr "JWSE::Input: токен: %s" #: src/hed/libs/otokens/jwse.cpp:75 #, c-format msgid "JWSE::Input: header: %s" msgstr "JWSE::Input: заголовок: %s" #: src/hed/libs/otokens/jwse.cpp:101 #, c-format msgid "JWSE::Input: JWS content: %s" msgstr "JWSE::Input: Ñодержимое JWS: %s" #: src/hed/libs/otokens/jwse.cpp:111 msgid "JWSE::Input: JWS: token too young" msgstr "JWSE::Input: JWS: токен Ñлишком Ñвежий" #: src/hed/libs/otokens/jwse.cpp:120 msgid "JWSE::Input: JWS: token too old" msgstr "JWSE::Input: JWS: токен Ñлишком Ñтарый" #: src/hed/libs/otokens/jwse.cpp:131 #, c-format msgid "JWSE::Input: JWS: signature algorithm: %s" msgstr "JWSE::Input: JWS: алгоритм подпиÑи: %s" #: src/hed/libs/otokens/jwse.cpp:190 msgid "JWSE::Input: JWS: signature verification failed" msgstr "JWSE::Input: JWS: Ñбой Ð¿Ð¾Ð´Ñ‚Ð²ÐµÑ€Ð¶Ð´ÐµÐ½Ð¸Ñ Ð¿Ð¾Ð´Ð¿Ð¸Ñи" #: src/hed/libs/otokens/jwse.cpp:196 msgid "JWSE::Input: JWE: not supported yet" msgstr "JWSE::Input: JWE: пока не поддерживаетÑÑ" #: src/hed/libs/otokens/jwse_keys.cpp:271 msgid "JWSE::ExtractPublicKey: x5c key" msgstr "JWSE::ExtractPublicKey: ключ x5c" #: src/hed/libs/otokens/jwse_keys.cpp:279 msgid "JWSE::ExtractPublicKey: jwk key" msgstr "JWSE::ExtractPublicKey: ключ jwk" #: src/hed/libs/otokens/jwse_keys.cpp:286 msgid "JWSE::ExtractPublicKey: external jwk key" msgstr "JWSE::ExtractPublicKey: внешний ключ jwk" #: src/hed/libs/otokens/jwse_keys.cpp:303 #, c-format msgid "JWSE::ExtractPublicKey: fetching jwl key from %s" msgstr "JWSE::ExtractPublicKey: извлечение ключа jwk из: %s" #: src/hed/libs/otokens/jwse_keys.cpp:316 msgid "JWSE::ExtractPublicKey: no supported key" msgstr "JWSE::ExtractPublicKey: нет поддерживаемого ключа" #: src/hed/libs/otokens/jwse_keys.cpp:319 msgid "JWSE::ExtractPublicKey: key parsing error" msgstr "JWSE::ExtractPublicKey: ошибка разбора ключа" #: src/hed/libs/otokens/openid_metadata.cpp:40 #: src/hed/libs/otokens/openid_metadata.cpp:45 #, c-format msgid "Input: metadata: %s" msgstr "Ввод: метаданные: %s" #: src/hed/libs/otokens/openid_metadata.cpp:414 #, c-format msgid "Fetch: response code: %u %s" msgstr "Извлечение: код отклика: %u %s" #: src/hed/libs/otokens/openid_metadata.cpp:416 #, c-format msgid "Fetch: response body: %s" msgstr "Извлечение: тело отклика: %s" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:136 #, c-format msgid "Can not load ARC evaluator object: %s" msgstr "Ðевозможно подгрузить объект интерпретатора ARC : %s" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:187 #, c-format msgid "Can not load ARC request object: %s" msgstr "Ðевозможно подгрузить объект запроÑа ARC: %s" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:228 #, c-format msgid "Can not load policy object: %s" msgstr "Ðевозможно подгрузить объект политик: %s" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:276 msgid "Can not load policy object" msgstr "Ðевозможно подгрузить объект политик" #: src/hed/libs/security/ArcPDP/EvaluatorLoader.cpp:324 msgid "Can not load request object" msgstr "Ðевозможно подгрузить объект запроÑа" #: src/hed/libs/security/ArcPDP/PolicyParser.cpp:119 msgid "Can not generate policy object" msgstr "Ðевозможно Ñоздать объект правил доÑтупа" #: src/hed/libs/security/ArcPDP/attr/RequestAttribute.cpp:37 #, c-format msgid "Id= %s,Type= %s,Issuer= %s,Value= %s" msgstr "Id= %s,Тип= %s,Издатель= %s,Значение= %s" #: src/hed/libs/security/ArcPDP/attr/RequestAttribute.cpp:40 #, c-format msgid "No Attribute exists, which can deal with type: %s" msgstr "Ðе ÑущеÑтвует атрибутов, ÑпоÑобных трактовать Ñтот тип: %s" #: src/hed/mcc/http/MCCHTTP.cpp:168 #, c-format msgid "HTTP Error: %d %s" msgstr "Ошибка HTTP: %d %s" #: src/hed/mcc/http/MCCHTTP.cpp:241 msgid "Cannot create http payload" msgstr "Ðе удалоÑÑŒ Ñоздать нагрузку http" #: src/hed/mcc/http/MCCHTTP.cpp:311 msgid "No next element in the chain" msgstr "ОтÑутÑтвует Ñледующий Ñлемент цепи" #: src/hed/mcc/http/MCCHTTP.cpp:320 #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:253 msgid "next element of the chain returned error status" msgstr "Ñледующий Ñлемент цепи возвратил ÑÑ‚Ð°Ñ‚ÑƒÑ Ð¾ÑˆÐ¸Ð±ÐºÐ¸" #: src/hed/mcc/http/MCCHTTP.cpp:329 msgid "next element of the chain returned no payload" msgstr "Ñледующий Ñлемент в цепочке возвратил пуÑтую нагрузку" #: src/hed/mcc/http/MCCHTTP.cpp:341 msgid "next element of the chain returned invalid/unsupported payload" msgstr "" "Ñледующий Ñлемент в цепи возвратил недопуÑтимую или неподдерживаемую нагрузку" #: src/hed/mcc/http/MCCHTTP.cpp:423 msgid "Error to flush output payload" msgstr "Ошибка ÑброÑа иÑходÑщей нагрузки" #: src/hed/mcc/http/PayloadHTTP.cpp:305 #, c-format msgid "<< %s" msgstr "<< %s" #: src/hed/mcc/http/PayloadHTTP.cpp:354 src/hed/mcc/http/PayloadHTTP.cpp:456 #, c-format msgid "< %s" msgstr "< %s" #: src/hed/mcc/http/PayloadHTTP.cpp:575 msgid "Failed to parse HTTP header" msgstr "Сбой разбора заголовка HTTP" #: src/hed/mcc/http/PayloadHTTP.cpp:836 msgid "Invalid HTTP object can't produce result" msgstr "ÐедопуÑтимый объект HTTP не может дать результат" #: src/hed/mcc/http/PayloadHTTP.cpp:949 #, c-format msgid "> %s" msgstr "> %s" #: src/hed/mcc/http/PayloadHTTP.cpp:974 msgid "Failed to write header to output stream" msgstr "Сбой при запиÑи заголовка в выходной поток" #: src/hed/mcc/http/PayloadHTTP.cpp:999 src/hed/mcc/http/PayloadHTTP.cpp:1005 #: src/hed/mcc/http/PayloadHTTP.cpp:1011 src/hed/mcc/http/PayloadHTTP.cpp:1021 #: src/hed/mcc/http/PayloadHTTP.cpp:1033 src/hed/mcc/http/PayloadHTTP.cpp:1038 #: src/hed/mcc/http/PayloadHTTP.cpp:1043 src/hed/mcc/http/PayloadHTTP.cpp:1051 #: src/hed/mcc/http/PayloadHTTP.cpp:1058 msgid "Failed to write body to output stream" msgstr "Сбой при запиÑи тела в выходной поток" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:31 msgid "Skipping service: no ServicePath found!" msgstr "Ð¡ÐµÑ€Ð²Ð¸Ñ Ð¿Ñ€Ð¾Ð¿ÑƒÑкаетÑÑ: отÑутÑтвует ServicePath!" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:37 msgid "Skipping service: no SchemaPath found!" msgstr "Ð¡ÐµÑ€Ð²Ð¸Ñ Ð¿Ñ€Ð¾Ð¿ÑƒÑкаетÑÑ: отÑутÑтвует SchemaPath!" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:89 msgid "Parser Context creation failed!" msgstr "Ðе удалоÑÑŒ Ñоздать контекÑÑ‚ анализатора!" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:98 msgid "Cannot parse schema!" msgstr "Ðевозможно интерпретировать Ñхему!" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:110 msgid "Empty payload!" msgstr "ПуÑÑ‚Ð°Ñ Ð½Ð°Ð³Ñ€ÑƒÐ·ÐºÐ°!" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:119 msgid "Could not convert payload!" msgstr "Ðевозможно преобразовать нагрузку!" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:125 #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:212 msgid "Could not create PayloadSOAP!" msgstr "Ðе удалоÑÑŒ Ñоздать PayloadSOAP!" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:196 msgid "Empty input payload!" msgstr "ПуÑÑ‚Ð°Ñ Ð½Ð°Ð³Ñ€ÑƒÐ·ÐºÐ° на входе!" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:205 msgid "Could not convert incoming payload!" msgstr "Ðе удалоÑÑŒ преобразовать входную информацию!" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:232 msgid "Missing schema! Skipping validation..." msgstr "Схема отÑутÑтвует! Сверка пропуÑкаетÑÑ..." #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:237 msgid "Could not validate message!" msgstr "Ðе удалоÑÑŒ подтвердить доÑтоверноÑть ÑообщениÑ!" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:245 #: src/hed/mcc/soap/MCCSOAP.cpp:222 src/hed/mcc/soap/MCCSOAP.cpp:236 #: src/hed/mcc/soap/MCCSOAP.cpp:266 msgid "empty next chain element" msgstr "Ñледующий Ñлемент в цепи пуÑтой" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:257 #: src/hed/mcc/soap/MCCSOAP.cpp:282 msgid "next element of the chain returned empty payload" msgstr "Ñледующий Ñлемент в цепи возвратил пуÑтую нагрузку" #: src/hed/mcc/msgvalidator/MCCMsgValidator.cpp:265 msgid "next element of the chain returned invalid payload" msgstr "Ñледующий Ñлемент в цепи возвратил пуÑтую нагрузку" #: src/hed/mcc/soap/MCCSOAP.cpp:207 msgid "empty input payload" msgstr "пуÑÑ‚Ð°Ñ Ð½Ð°Ð³Ñ€ÑƒÐ·ÐºÐ° на входе" #: src/hed/mcc/soap/MCCSOAP.cpp:217 #, c-format msgid "MIME is not suitable for SOAP: %s" msgstr "MIME не подходит Ð´Ð»Ñ SOAP: %s" #: src/hed/mcc/soap/MCCSOAP.cpp:231 msgid "incoming message is not SOAP" msgstr "входÑщее Ñообщение не в формате SOAP" #: src/hed/mcc/soap/MCCSOAP.cpp:258 #, c-format msgid "Security check failed in SOAP MCC for incoming message: %s" msgstr "Сбой проверки безопаÑноÑти в SOAP MCC Ð´Ð»Ñ Ð²Ñ…Ð¾Ð´Ñщего ÑообщениÑ: %s" #: src/hed/mcc/soap/MCCSOAP.cpp:274 #, c-format msgid "next element of the chain returned error status: %s" msgstr "Ñледующий Ñлемент цепи возвратил ÑÑ‚Ð°Ñ‚ÑƒÑ Ð¾ÑˆÐ¸Ð±ÐºÐ¸: %s" #: src/hed/mcc/soap/MCCSOAP.cpp:293 msgid "next element of the chain returned unknown payload - passing through" msgstr "" "Ñледующий Ñлемент в цепи возвратил неопознанную нагрузку - пропуÑкаетÑÑ" #: src/hed/mcc/soap/MCCSOAP.cpp:298 src/hed/mcc/soap/MCCSOAP.cpp:314 #, c-format msgid "Security check failed in SOAP MCC for outgoing message: %s" msgstr "Сбой проверки безопаÑноÑти в SOAP MCC Ð´Ð»Ñ Ð¸ÑходÑщего ÑообщениÑ: %s" #: src/hed/mcc/soap/MCCSOAP.cpp:368 msgid "Security check failed in SOAP MCC for outgoing message" msgstr "Ðе прошла проверка безопаÑноÑти в SOAP MCC Ð´Ð»Ñ Ð¸ÑходÑщего ÑообщениÑ" #: src/hed/mcc/soap/MCCSOAP.cpp:421 msgid "Security check failed in SOAP MCC for incoming message" msgstr "Ðе прошла проверка безопаÑноÑти в SOAP MCC Ð´Ð»Ñ Ð²Ñ…Ð¾Ð´Ñщего ÑообщениÑ" #: src/hed/mcc/tcp/MCCTCP.cpp:82 msgid "Missing Port in Listen element" msgstr "Ð’ Ñлементе Listen отÑутÑтвует номер порта (Port)" #: src/hed/mcc/tcp/MCCTCP.cpp:91 msgid "Version in Listen element can't be recognized" msgstr "ВерÑÐ¸Ñ Ð² Ñлементе Listen не опознана" #: src/hed/mcc/tcp/MCCTCP.cpp:100 #, c-format msgid "Failed to obtain local address for port %s - %s" msgstr "Ðе удалоÑÑŒ получить локальный Ð°Ð´Ñ€ÐµÑ Ð´Ð»Ñ Ð¿Ð¾Ñ€Ñ‚Ð° %s - %s" #: src/hed/mcc/tcp/MCCTCP.cpp:102 #, c-format msgid "Failed to obtain local address for %s:%s - %s" msgstr "Ðе удалоÑÑŒ получить локальный Ð°Ð´Ñ€ÐµÑ Ð´Ð»Ñ %s:%s - %s" #: src/hed/mcc/tcp/MCCTCP.cpp:109 #, c-format msgid "Trying to listen on TCP port %s(%s)" msgstr "Попытка проÑлушать порт TCP %s(%s)" #: src/hed/mcc/tcp/MCCTCP.cpp:111 #, c-format msgid "Trying to listen on %s:%s(%s)" msgstr "Попытка проÑлушать %s:%s(%s)" #: src/hed/mcc/tcp/MCCTCP.cpp:117 #, c-format msgid "Failed to create socket for listening at TCP port %s(%s): %s" msgstr "Ðе удалоÑÑŒ Ñоздать Ñокет Ð´Ð»Ñ Ð¿Ñ€Ð¾Ñлушки порта TCP %s(%s): %s" #: src/hed/mcc/tcp/MCCTCP.cpp:119 #, c-format msgid "Failed to create socket for listening at %s:%s(%s): %s" msgstr "Ðе удалоÑÑŒ Ñоздать Ñокет Ð´Ð»Ñ Ð¿Ñ€Ð¾Ñлушки %s:%s(%s): %s" #: src/hed/mcc/tcp/MCCTCP.cpp:134 #, c-format msgid "" "Failed to limit socket to IPv6 at TCP port %s - may cause errors for IPv4 at " "same port" msgstr "" "Ðе удалоÑÑŒ ограничить Ñокет под IPv6 на порте TCP %s - может привеÑти к " "ошибкам Ð´Ð»Ñ IPv4 по Ñтому же порту" #: src/hed/mcc/tcp/MCCTCP.cpp:136 #, c-format msgid "" "Failed to limit socket to IPv6 at %s:%s - may cause errors for IPv4 at same " "port" msgstr "" "Ðе удалоÑÑŒ ограничить Ñокет под IPv6 на %s:%s - может привеÑти к ошибкам Ð´Ð»Ñ " "IPv4 по Ñтому же порту" #: src/hed/mcc/tcp/MCCTCP.cpp:144 #, c-format msgid "Failed to bind socket for TCP port %s(%s): %s" msgstr "Ðе удалоÑÑŒ ÑвÑзать Ñокет Ñ Ð¿Ð¾Ñ€Ñ‚Ð¾Ð¼ TCP %s(%s): %s" #: src/hed/mcc/tcp/MCCTCP.cpp:146 #, c-format msgid "Failed to bind socket for %s:%s(%s): %s" msgstr "Ðе удалоÑÑŒ ÑвÑзать Ñокет Ñ %s:%s(%s): %s" #: src/hed/mcc/tcp/MCCTCP.cpp:161 #, c-format msgid "Failed to listen at TCP port %s(%s): %s" msgstr "Ðе удалоÑÑŒ проÑлушать порт TCP %s(%s): %s" #: src/hed/mcc/tcp/MCCTCP.cpp:163 #, c-format msgid "Failed to listen at %s:%s(%s): %s" msgstr "Ðе удалоÑÑŒ проÑлушать %s:%s(%s): %s" #: src/hed/mcc/tcp/MCCTCP.cpp:180 #, c-format msgid "Listening on TCP port %s(%s)" msgstr "ПроÑлушиваетÑÑ Ð¿Ð¾Ñ€Ñ‚ TCP %s(%s)" #: src/hed/mcc/tcp/MCCTCP.cpp:182 #, c-format msgid "Listening on %s:%s(%s)" msgstr "ПроÑлушиваетÑÑ %s:%s(%s)" #: src/hed/mcc/tcp/MCCTCP.cpp:189 #, c-format msgid "Failed to start listening on any address for %s:%s" msgstr "Ðе удалоÑÑŒ начать проÑлушивание ни по какому адреÑу Ð´Ð»Ñ %s:%s" #: src/hed/mcc/tcp/MCCTCP.cpp:191 #, c-format msgid "Failed to start listening on any address for %s:%s(IPv%s)" msgstr "Ðе удалоÑÑŒ начать проÑлушивание ни по какому адреÑу Ð´Ð»Ñ %s:%s(IPv%s)" #: src/hed/mcc/tcp/MCCTCP.cpp:197 msgid "No listening ports initiated" msgstr "Ðе инициализированы проÑлушивающие порты" #: src/hed/mcc/tcp/MCCTCP.cpp:208 msgid "dropped" msgstr "игнорируетÑÑ" #: src/hed/mcc/tcp/MCCTCP.cpp:208 msgid "put on hold" msgstr "приоÑтановлен" #: src/hed/mcc/tcp/MCCTCP.cpp:208 #, c-format msgid "Setting connections limit to %i, connections over limit will be %s" msgstr "" "Предельное количеÑтво Ñоединений выÑтавлÑетÑÑ Ð½Ð° %i, ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñверх " "предела будут переведены в ÑоÑтоÑние %s" #: src/hed/mcc/tcp/MCCTCP.cpp:212 msgid "Failed to start thread for listening" msgstr "Ðе удалоÑÑŒ запуÑтить поток Ð´Ð»Ñ Ð¿Ñ€Ð¾ÑлушиваниÑ" #: src/hed/mcc/tcp/MCCTCP.cpp:245 msgid "Failed to start thread for communication" msgstr "Ðе удалоÑÑŒ запуÑтить поток Ð´Ð»Ñ Ð¾Ð±Ð¼ÐµÐ½Ð° информацией" #: src/hed/mcc/tcp/MCCTCP.cpp:271 msgid "Failed while waiting for connection request" msgstr "Сбой при ожидании запроÑа на Ñоединение" #: src/hed/mcc/tcp/MCCTCP.cpp:293 msgid "Failed to accept connection request" msgstr "Ðе удалоÑÑŒ принÑть Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° Ñоединение" #: src/hed/mcc/tcp/MCCTCP.cpp:302 msgid "Too many connections - dropping new one" msgstr "Слишком много Ñоединений - новое отклонено" #: src/hed/mcc/tcp/MCCTCP.cpp:309 msgid "Too many connections - waiting for old to close" msgstr "Слишком много Ñоединений - ожидание Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ñтарых" #: src/hed/mcc/tcp/MCCTCP.cpp:533 msgid "next chain element called" msgstr "вызван Ñледующий Ñлемент в цепи" #: src/hed/mcc/tcp/MCCTCP.cpp:548 msgid "Only Raw Buffer payload is supported for output" msgstr "Ð”Ð»Ñ Ð²Ñ‹Ð²Ð¾Ð´Ð° поддерживаетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ неформатированный буфер" #: src/hed/mcc/tcp/MCCTCP.cpp:556 src/hed/mcc/tcp/MCCTCP.cpp:655 #: src/hed/mcc/tls/MCCTLS.cpp:542 msgid "Failed to send content of buffer" msgstr "Ðе удалоÑÑŒ отправить Ñодержимое буфера" #: src/hed/mcc/tcp/MCCTCP.cpp:568 msgid "TCP executor is removed" msgstr "ИÑполнитель TCP удалён" #: src/hed/mcc/tcp/MCCTCP.cpp:570 #, c-format msgid "Sockets do not match on exit %i != %i" msgstr "ÐеÑовпадение Ñокетов при завершении %i != %i" #: src/hed/mcc/tcp/MCCTCP.cpp:591 msgid "No Connect element specified" msgstr "Ðе задан Ñлемент Connect" #: src/hed/mcc/tcp/MCCTCP.cpp:597 msgid "Missing Port in Connect element" msgstr "Ð’ Ñлементе Connect отÑутÑтвует номер порта (Port)" #: src/hed/mcc/tcp/MCCTCP.cpp:603 msgid "Missing Host in Connect element" msgstr "Ð’ Ñлементе Connect отÑутÑтвует название узла (Host)" #: src/hed/mcc/tcp/MCCTCP.cpp:631 msgid "TCP client process called" msgstr "Вызван процеÑÑ TCP клиента" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:67 #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:83 #, c-format msgid "Failed to resolve %s (%s)" msgstr "Сбой при разрешении %s (%s)" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:93 #, c-format msgid "Trying to connect %s(%s):%d" msgstr "Попытка ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ %s(%s):%d" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:97 #, c-format msgid "Failed to create socket for connecting to %s(%s):%d - %s" msgstr "Ðе удалоÑÑŒ Ñоздать Ñокет Ð´Ð»Ñ ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ %s(%s):%d - %s" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:109 #, c-format msgid "" "Failed to get TCP socket options for connection to %s(%s):%d - timeout won't " "work - %s" msgstr "" "Ðе удалоÑÑŒ получить параметры TCP-Ñокета Ð´Ð»Ñ ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ %s(%s):%d - " "прерывание по времени не будет работать - %s" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:116 #, c-format msgid "Failed to connect to %s(%s):%i - %s" msgstr "Ðе удалоÑÑŒ уÑтановить Ñоединение Ñ %s(%s):%i - %s" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:126 #, c-format msgid "Timeout connecting to %s(%s):%i - %i s" msgstr "ИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ %s(%s):%i - %i Ñ" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:134 #, c-format msgid "Failed while waiting for connection to %s(%s):%i - %s" msgstr "Сбой при ожидании ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ %s(%s):%i - %s" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:144 #, c-format msgid "Failed to connect to %s(%s):%i" msgstr "Ðе удалоÑÑŒ уÑтановить Ñоединение Ñ %s(%s):%i" #: src/hed/mcc/tcp/PayloadTCPSocket.cpp:200 msgid "" "Received message out-of-band (not critical, ERROR level is just for " "debugging purposes)" msgstr "" "Получено Ñообщение вне полоÑÑ‹ (некритично, уровень ERROR лишь Ð´Ð»Ñ Ð¾Ñ‚Ð»Ð°Ð´ÐºÐ¸)" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:293 #, c-format msgid "Using DH parameters from file: %s" msgstr "ИÑпользуютÑÑ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ñ‹ DH из файла %s" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:296 msgid "Failed to open file with DH parameters for reading" msgstr "Ðе удалоÑÑŒ открыть на чтение файл Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð°Ð¼Ð¸ DH" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:301 msgid "Failed to read file with DH parameters" msgstr "Сбой при чтении файла Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð°Ð¼Ð¸ DH" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:304 msgid "Failed to apply DH parameters" msgstr "Ðе удалоÑÑŒ применить параметры DH" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:306 msgid "DH parameters applied" msgstr "Применены параметры DH" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:321 #, c-format msgid "Using curve with NID: %u" msgstr "ИÑпользуетÑÑ ÐºÑ€Ð¸Ð²Ð°Ñ Ñ NID %u" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:324 msgid "Failed to generate EC key" msgstr "Сбой про Ñоздании ключа EC" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:327 msgid "Failed to apply ECDH parameters" msgstr "Сбой Ð¿Ñ€Ð¸Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² ECDH" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:329 msgid "ECDH parameters applied" msgstr "Применены параметры ECDH" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:336 #, c-format msgid "Using cipher list: %s" msgstr "ИÑпользуемый ÑпиÑок шифров: %s" #: src/hed/mcc/tls/ConfigTLSMCC.cpp:352 #, c-format msgid "Using protocol options: 0x%x" msgstr "ИÑпользуемые опции протокола: 0x%x" #: src/hed/mcc/tls/DelegationCollector.cpp:39 msgid "Independent proxy - no rights granted" msgstr "ÐезавиÑÐ¸Ð¼Ð°Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñть - права не выделены" #: src/hed/mcc/tls/DelegationCollector.cpp:43 msgid "Proxy with all rights inherited" msgstr "ДоверенноÑть Ñо вÑеми унаÑледованными правами" #: src/hed/mcc/tls/DelegationCollector.cpp:51 msgid "Proxy with empty policy - fail on unrecognized policy" msgstr "ДоверенноÑть Ñ Ð½ÐµÐ·Ð°Ð¿Ð¾Ð»Ð½ÐµÐ½Ð½Ð¾Ð¹ политикой - отказ по неизвеÑтной политике" #: src/hed/mcc/tls/DelegationCollector.cpp:56 #, c-format msgid "Proxy with specific policy: %s" msgstr "ДоверенноÑть Ñ Ð¾Ð³Ñ€Ð°Ð½Ð¸Ñ‡ÐµÐ½Ð½Ð¾Ð¹ политикой: %s" #: src/hed/mcc/tls/DelegationCollector.cpp:60 msgid "Proxy with ARC Policy" msgstr "ДоверенноÑть Ñ Ð¿Ð¾Ð»Ð¸Ñ‚Ð¸ÐºÐ¾Ð¹ ARC" #: src/hed/mcc/tls/DelegationCollector.cpp:62 msgid "Proxy with unknown policy - fail on unrecognized policy" msgstr "ДоверенноÑть Ñ Ð½ÐµÐ¸Ð·Ð²ÐµÑтной политикой - отказ по неизвеÑтной политике" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:77 #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:128 #, c-format msgid "Was expecting %s at the beginning of \"%s\"" msgstr "ОжидалоÑÑŒ %s в начале \"%s\"" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:82 #, c-format msgid "We only support CAs in Globus signing policy - %s is not supported" msgstr "" "Мы поддерживаем только CA в Globus signing policy - %s не поддерживаетÑÑ" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:87 #, c-format msgid "We only support X509 CAs in Globus signing policy - %s is not supported" msgstr "" "Мы поддерживаем только центры Ñертификации X509 в политике подпиÑи Globus - " "%s не поддерживаетÑÑ" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:92 msgid "Missing CA subject in Globus signing policy" msgstr "Субъект центра Ñертификации отÑутÑтвует в политике подпиÑи Globus" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:102 msgid "Negative rights are not supported in Globus signing policy" msgstr "Отрицательные права не поддерживаютÑÑ Ð¿Ð¾Ð»Ð¸Ñ‚Ð¸ÐºÐ¾Ð¹ подпиÑи Globus" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:106 #, c-format msgid "Unknown rights in Globus signing policy - %s" msgstr "ÐеизвеÑтные права в политике подпиÑи Globus - %s" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:111 #, c-format msgid "" "Only globus rights are supported in Globus signing policy - %s is not " "supported" msgstr "" "Мы поддерживаем только права globus в политике подпиÑи Globus - %s не " "поддерживаетÑÑ" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:116 #, c-format msgid "" "Only signing rights are supported in Globus signing policy - %s is not " "supported" msgstr "" "Мы поддерживаем только права подпиÑи в политике подпиÑи Globus - %s не " "поддерживаетÑÑ" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:133 #, c-format msgid "" "We only support subjects conditions in Globus signing policy - %s is not " "supported" msgstr "" "Мы поддерживаем только уÑÐ»Ð¾Ð²Ð¸Ñ Ñубъекта в политике подпиÑи Globus - %s не " "поддерживаетÑÑ" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:138 #, c-format msgid "" "We only support globus conditions in Globus signing policy - %s is not " "supported" msgstr "" "Мы поддерживаем только уÑÐ»Ð¾Ð²Ð¸Ñ globus в политике подпиÑи Globus - %s не " "поддерживаетÑÑ" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:144 msgid "Missing condition subjects in Globus signing policy" msgstr "УÑÐ»Ð¾Ð²Ð¸Ñ Ñубъекта отÑутÑтвуют в политике подпиÑи Globus" #: src/hed/mcc/tls/GlobusSigningPolicy.cpp:220 msgid "Unknown element in Globus signing policy" msgstr "ÐеизвеÑтный Ñлемент в политике подпиÑи Globus" #: src/hed/mcc/tls/MCCTLS.cpp:218 msgid "Critical VOMS attribute processing failed" msgstr "Сбой обработки критичеÑкого атрибута VOMS" #: src/hed/mcc/tls/MCCTLS.cpp:226 msgid "VOMS attribute validation failed" msgstr "Сбой проверки атрибутов VOMS" #: src/hed/mcc/tls/MCCTLS.cpp:228 msgid "VOMS attribute is ignored due to processing/validation error" msgstr "Ðтрибут VOMS игнорируетÑÑ Ð¸Ð·-за ошибки обработки или проверки" #: src/hed/mcc/tls/MCCTLS.cpp:420 src/hed/mcc/tls/MCCTLS.cpp:559 #: src/hed/mcc/tls/MCCTLS.cpp:578 #, c-format msgid "Failed to establish connection: %s" msgstr "Сбой уÑÑ‚Ð°Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ ÑоединениÑ: %s" #: src/hed/mcc/tls/MCCTLS.cpp:439 src/hed/mcc/tls/MCCTLS.cpp:521 #, c-format msgid "Peer name: %s" msgstr "Ð˜Ð¼Ñ ÐºÐ¾Ð½Ñ‚Ð°ÐºÑ‚Ð°: %s" #: src/hed/mcc/tls/MCCTLS.cpp:441 src/hed/mcc/tls/MCCTLS.cpp:523 #, c-format msgid "Identity name: %s" msgstr "Выделенное имÑ: %s" #: src/hed/mcc/tls/MCCTLS.cpp:443 src/hed/mcc/tls/MCCTLS.cpp:525 #, c-format msgid "CA name: %s" msgstr "Ð˜Ð¼Ñ Ñертификационного агентÑтва: %s" #: src/hed/mcc/tls/MCCTLS.cpp:450 msgid "Failed to process security attributes in TLS MCC for incoming message" msgstr "" "Ðе удалоÑÑŒ обработать атрибуты безопаÑноÑти в TLS MCC Ð´Ð»Ñ Ð²Ñ…Ð¾Ð´Ñщего ÑообщениÑ" #: src/hed/mcc/tls/MCCTLS.cpp:458 msgid "Security check failed in TLS MCC for incoming message" msgstr "Ðе прошла проверка безопаÑноÑти в TLS MCC Ð´Ð»Ñ Ð²Ñ…Ð¾Ð´Ñщего ÑообщениÑ" #: src/hed/mcc/tls/MCCTLS.cpp:531 msgid "Security check failed for outgoing TLS message" msgstr "Ðе прошла проверка безопаÑноÑти Ð´Ð»Ñ Ð¸ÑходÑщего ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ TLS" #: src/hed/mcc/tls/MCCTLS.cpp:563 msgid "Security check failed for incoming TLS message" msgstr "Ðе прошла проверка безопаÑноÑти Ð´Ð»Ñ Ð²Ñ…Ð¾Ð´Ñщего ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ TLS" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:93 msgid "" "Failed to allocate memory for certificate subject while matching policy." msgstr "" "Ðе удалоÑÑŒ зарезервировать памÑть Ð´Ð»Ñ Ð¸Ð¼ÐµÐ½Ð¸ Ñубъекта Ñертификата при Ñверке " "Ñ Ð¿Ð¾Ð»Ð¸Ñ‚Ð¸ÐºÐ°Ð¼Ð¸." #: src/hed/mcc/tls/PayloadTLSMCC.cpp:98 msgid "" "Failed to retrieve link to TLS stream. Additional policy matching is skipped." msgstr "" "Ðе удалоÑÑŒ получить ÑÑылку на поток TLS. Ð”Ð¾Ð¿Ð¾Ð»Ð½Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ñверка политики " "пропуÑкаетÑÑ." #: src/hed/mcc/tls/PayloadTLSMCC.cpp:128 #, c-format msgid "Certificate %s already expired" msgstr "Срок дейÑÑ‚Ð²Ð¸Ñ Ñертификата %s уже иÑтёк" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:136 #, c-format msgid "Certificate %s will expire in %s" msgstr "Срок дейÑÑ‚Ð²Ð¸Ñ Ñертификата %s иÑтечёт через %s" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:157 msgid "Failed to store application data" msgstr "Ðе удалоÑÑŒ запиÑать данные приложениÑ" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:185 msgid "Failed to retrieve application data from OpenSSL" msgstr "Ðе удалоÑÑŒ получить данные о приложении через OpenSSL" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:257 src/hed/mcc/tls/PayloadTLSMCC.cpp:356 msgid "Can not create the SSL Context object" msgstr "Ðе удалоÑÑŒ Ñоздать объект SSL Context" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:270 src/hed/mcc/tls/PayloadTLSMCC.cpp:376 msgid "Can't set OpenSSL verify flags" msgstr "Ðе удалоÑÑŒ выÑтавить метки Ð¿Ð¾Ð´Ñ‚Ð²ÐµÑ€Ð¶Ð´ÐµÐ½Ð¸Ñ OpenSSL" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:289 src/hed/mcc/tls/PayloadTLSMCC.cpp:390 msgid "Can not create the SSL object" msgstr "Ðе удалоÑÑŒ Ñоздать объект SSL" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:299 msgid "Faile to assign hostname extension" msgstr "Ðе удалоÑÑŒ приÑвоить раÑширение hostname" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:313 msgid "Failed to establish SSL connection" msgstr "Ðе удалоÑÑŒ уÑтановить Ñоединение SSL" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:316 src/hed/mcc/tls/PayloadTLSMCC.cpp:405 #, c-format msgid "Using cipher: %s" msgstr "ИÑпользуетÑÑ Ð°Ð»Ð³Ð¾Ñ€Ð¸Ñ‚Ð¼ ÑˆÐ¸Ñ„Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ %s" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:402 msgid "Failed to accept SSL connection" msgstr "Ðе удалоÑÑŒ принÑть Ñоединение SSL" #: src/hed/mcc/tls/PayloadTLSMCC.cpp:455 #, c-format msgid "Failed to shut down SSL: %s" msgstr "Ðе удалоÑÑŒ прервать Ñоединение SSL: %s" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:47 msgid "" "ArcAuthZ: failed to initiate all PDPs - this instance will be non-functional" msgstr "" "ArcAuthZ: не удалоÑÑŒ инициализировать вÑе PDP - Ñтот процеÑÑ Ð±ÑƒÐ´ÐµÑ‚ нерабочим" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:70 msgid "PDP: missing name attribute" msgstr "PDP: отÑутÑтвует атрибут имени" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:74 #, c-format msgid "PDP: %s (%s)" msgstr "PDP: %s (%s)" #: src/hed/shc/arcauthzsh/ArcAuthZ.cpp:79 #, c-format msgid "PDP: %s (%s) can not be loaded" msgstr "PDP: %s (%s) не может быть подгружен" #: src/hed/shc/arcpdp/ArcEvaluationCtx.cpp:251 #, c-format msgid "There are %d RequestItems" msgstr "Обнаружено %d Ñлементов запроÑа" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:60 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:55 msgid "Can not parse classname for FunctionFactory from configuration" msgstr "Ðе удалоÑÑŒ определить Ð¸Ð¼Ñ ÐºÐ»Ð°ÑÑа Ð´Ð»Ñ FunctionFactory из наÑтроек" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:68 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:63 msgid "Can not parse classname for AttributeFactory from configuration" msgstr "Ðе удалоÑÑŒ определить Ð¸Ð¼Ñ ÐºÐ»Ð°ÑÑа Ð´Ð»Ñ AttributeFactory из наÑтроек" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:76 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:71 msgid "" "Can not parse classname for CombiningAlgorithmFactory from configuration" msgstr "" "Ðе удалоÑÑŒ определить Ð¸Ð¼Ñ ÐºÐ»Ð°ÑÑа Ð´Ð»Ñ CombiningAlgorithmFactory из наÑтроек" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:84 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:79 msgid "Can not parse classname for Request from configuration" msgstr "Ðе удалоÑÑŒ определить Ð¸Ð¼Ñ ÐºÐ»Ð°ÑÑа Ð´Ð»Ñ Request из наÑтроек" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:93 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:88 msgid "Can not parse classname for Policy from configuration" msgstr "Ðе удалоÑÑŒ определить Ð¸Ð¼Ñ ÐºÐ»Ð°ÑÑа Ð´Ð»Ñ Policy из наÑтроек" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:105 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:100 msgid "Can not dynamically produce AttributeFactory" msgstr "Ðе удалоÑÑŒ динамичеÑки Ñоздать AttributeFactory" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:110 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:105 msgid "Can not dynamically produce FnFactory" msgstr "Ðе удалоÑÑŒ динамичеÑки Ñоздать FnFactory" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:115 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:110 msgid "Can not dynamically produce AlgFacroty" msgstr "Ðе удалоÑÑŒ динамичеÑки Ñоздать AlgFacroty" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:126 #: src/hed/shc/gaclpdp/GACLEvaluator.cpp:31 #: src/hed/shc/gaclpdp/GACLEvaluator.cpp:37 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:121 msgid "Can not create PolicyStore object" msgstr "Ðе удалоÑÑŒ Ñоздать объект PolicyStore" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:177 src/hed/shc/test.cpp:183 #: src/hed/shc/testinterface_arc.cpp:102 src/hed/shc/testinterface_xacml.cpp:54 #: src/hed/shc/xacmlpdp/XACMLEvaluator.cpp:172 msgid "Can not dynamically produce Request" msgstr "Ðе удалоÑÑŒ динамичеÑки Ñоздать Request" #: src/hed/shc/arcpdp/ArcEvaluator.cpp:261 #, c-format msgid "Result value (0=Permit, 1=Deny, 2=Indeterminate, 3=Not_Applicable): %d" msgstr "Результат (0=ДопуÑк, 1=Отказ, 2=Ðеопределённый, 3=Ðеприменим): %d" #: src/hed/shc/arcpdp/ArcPDP.cpp:110 msgid "Can not find ArcPDPContext" msgstr "Ðе обнаружен ArcPDPContext" #: src/hed/shc/arcpdp/ArcPDP.cpp:139 src/hed/shc/xacmlpdp/XACMLPDP.cpp:117 msgid "Evaluator does not support loadable Combining Algorithms" msgstr "Обработчик не поддерживает подгружаемые алгоритмы комбинированиÑ" #: src/hed/shc/arcpdp/ArcPDP.cpp:143 src/hed/shc/xacmlpdp/XACMLPDP.cpp:121 #, c-format msgid "Evaluator does not support specified Combining Algorithm - %s" msgstr "Обработчик не поддерживает указанный алгоритм ÐºÐ¾Ð¼Ð±Ð¸Ð½Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ - %s" #: src/hed/shc/arcpdp/ArcPDP.cpp:155 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:84 #: src/hed/shc/gaclpdp/GACLPDP.cpp:118 src/hed/shc/test.cpp:94 #: src/hed/shc/testinterface_arc.cpp:37 src/hed/shc/testinterface_xacml.cpp:37 #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:133 msgid "Can not dynamically produce Evaluator" msgstr "Ðе удалоÑÑŒ динамичеÑки Ñоздать анализатор" #: src/hed/shc/arcpdp/ArcPDP.cpp:158 msgid "Evaluator for ArcPDP was not loaded" msgstr "Обработчик Ð´Ð»Ñ ArcPDP не был загружен" #: src/hed/shc/arcpdp/ArcPDP.cpp:165 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:57 #: src/hed/shc/gaclpdp/GACLPDP.cpp:128 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:87 #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:143 src/tests/echo/echo.cpp:108 msgid "Missing security object in message" msgstr "Ð’ Ñообщении отÑутÑтвует объект авторизации" #: src/hed/shc/arcpdp/ArcPDP.cpp:173 src/hed/shc/arcpdp/ArcPDP.cpp:181 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:137 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:143 #: src/hed/shc/gaclpdp/GACLPDP.cpp:136 src/hed/shc/gaclpdp/GACLPDP.cpp:144 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:95 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:103 #: src/tests/echo/echo.cpp:116 src/tests/echo/echo.cpp:123 msgid "Failed to convert security information to ARC request" msgstr "Ðе удалоÑÑŒ преобразовать информацию о защите в Ð·Ð°Ð¿Ñ€Ð¾Ñ ARC" #: src/hed/shc/arcpdp/ArcPDP.cpp:189 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:150 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:111 #, c-format msgid "ARC Auth. request: %s" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð°Ð²Ñ‚Ð¾Ñ€Ð¸Ð·Ð°Ñ†Ð¸Ð¸ ARC: %s" #: src/hed/shc/arcpdp/ArcPDP.cpp:192 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:153 #: src/hed/shc/gaclpdp/GACLPDP.cpp:155 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:114 #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:170 msgid "No requested security information was collected" msgstr "Ðе удалоÑÑŒ Ñобрать запрошенную информацию о безопаÑноÑти" #: src/hed/shc/arcpdp/ArcPDP.cpp:199 msgid "Not authorized by arc.pdp - failed to get response from Evaluator" msgstr "arc.pdp запретил доÑтуп - не удалоÑÑŒ получить отклик обработчика" #: src/hed/shc/arcpdp/ArcPDP.cpp:245 msgid "Authorized by arc.pdp" msgstr "Допущен через arc.pdp" #: src/hed/shc/arcpdp/ArcPDP.cpp:246 msgid "" "Not authorized by arc.pdp - some of the RequestItem elements do not satisfy " "Policy" msgstr "" "Ðет допуÑка от arc.pdp - некоторые Ñлементы RequestItem не удовлетворÑÑŽÑ‚ " "политике" #: src/hed/shc/arcpdp/ArcPolicy.cpp:56 src/hed/shc/arcpdp/ArcPolicy.cpp:70 #: src/hed/shc/gaclpdp/GACLPolicy.cpp:46 src/hed/shc/gaclpdp/GACLPolicy.cpp:59 #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:48 #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:64 msgid "Policy is empty" msgstr "ПуÑтые правила" #: src/hed/shc/arcpdp/ArcPolicy.cpp:114 #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:115 #, c-format msgid "PolicyId: %s Alg inside this policy is:-- %s" msgstr "PolicyId: %s Внутренний алгоритм политики:-- %s" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:75 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:129 msgid "No delegation policies in this context and message - passing through" msgstr "" "Ð’ данном контекÑте и Ñообщении отÑутÑтвуют политики Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ - " "пропуÑкаетÑÑ" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:95 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:109 msgid "Failed to convert security information to ARC policy" msgstr "Ðе удалоÑÑŒ преобразовать информацию о безопаÑноÑти в политику ARC" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:116 #: src/hed/shc/delegationpdp/DelegationPDP.cpp:123 #, c-format msgid "ARC delegation policy: %s" msgstr "Политика Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ ARC: %s" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:161 msgid "No authorization response was returned" msgstr "Ðе получен ответ о допуÑке" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:164 #, c-format msgid "There are %d requests, which satisfy at least one policy" msgstr "Обнаружены %d запроÑа, удовлетворÑющих Ñ…Ð¾Ñ‚Ñ Ð±Ñ‹ одной политике" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:183 msgid "Delegation authorization passed" msgstr "ÐÐ²Ñ‚Ð¾Ñ€Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ðµ пройдена" #: src/hed/shc/delegationpdp/DelegationPDP.cpp:185 msgid "Delegation authorization failed" msgstr "ÐÐ²Ñ‚Ð¾Ñ€Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð½Ð° делегирование не выдана" #: src/hed/shc/delegationsh/DelegationSH.cpp:63 msgid "" "Missing CertificatePath element or ProxyPath element, or " " is missing" msgstr "" "ОтÑутÑтвует Ñлемент CertificatePath или ProxyPath element, или " "" #: src/hed/shc/delegationsh/DelegationSH.cpp:68 msgid "" "Missing or empty KeyPath element, or is missing" msgstr "" "Элемент KeyPath отÑутÑтвует или пуÑÑ‚, либо отÑутÑтвует " "" #: src/hed/shc/delegationsh/DelegationSH.cpp:74 msgid "Missing or empty CertificatePath or CACertificatesDir element" msgstr "Элемент CertificatePath или CACertificatesDir отÑутÑтвует или пуÑÑ‚" #: src/hed/shc/delegationsh/DelegationSH.cpp:81 #, c-format msgid "Delegation role not supported: %s" msgstr "ÐÐµÐ¿Ð¾Ð´Ð´ÐµÑ€Ð¶Ð¸Ð²Ð°ÐµÐ¼Ð°Ñ Ñ€Ð¾Ð»ÑŒ делегированиÑ: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:90 #, c-format msgid "Delegation type not supported: %s" msgstr "Ðеподдерживаемый тип делегированиÑ: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:115 msgid "Failed to acquire delegation context" msgstr "Ðе удалоÑÑŒ извлечь контекÑÑ‚ делегированиÑ" #: src/hed/shc/delegationsh/DelegationSH.cpp:143 #: src/hed/shc/delegationsh/DelegationSH.cpp:254 msgid "Can't create delegation context" msgstr "Ðе удалоÑÑŒ Ñоздать контекÑÑ‚ делегированиÑ" #: src/hed/shc/delegationsh/DelegationSH.cpp:149 msgid "Delegation handler with delegatee role starts to process" msgstr "Запущен обработчик Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ Ñ€Ð¾Ð»ÑŒÑŽ делегата" #: src/hed/shc/delegationsh/DelegationSH.cpp:152 #: src/services/a-rex/arex.cpp:592 src/services/candypond/CandyPond.cpp:526 #: src/services/data-staging/DataDeliveryService.cpp:624 msgid "process: POST" msgstr "процеÑÑ: POST" #: src/hed/shc/delegationsh/DelegationSH.cpp:159 #: src/services/a-rex/arex.cpp:599 src/services/candypond/CandyPond.cpp:535 #: src/services/data-staging/DataDeliveryService.cpp:633 #: src/services/wrappers/python/pythonwrapper.cpp:416 msgid "input is not SOAP" msgstr "ввод не в формате SOAP" #: src/hed/shc/delegationsh/DelegationSH.cpp:166 #, c-format msgid "Delegation service: %s" msgstr "Служба делегированиÑ: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:181 #: src/hed/shc/delegationsh/DelegationSH.cpp:188 #: src/tests/client/test_ClientX509Delegation_ARC.cpp:55 #, c-format msgid "Can not get the delegation credential: %s from delegation service: %s" msgstr "" "Ðе удалоÑÑŒ получить делегированные параметры доÑтупа: %s от Ñлужбы " "делегированиÑ:%s" #: src/hed/shc/delegationsh/DelegationSH.cpp:204 #: src/hed/shc/delegationsh/DelegationSH.cpp:268 #, c-format msgid "Delegated credential identity: %s" msgstr "Отличительные признаки делегированных параметров доÑтупа: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:205 #, c-format msgid "" "The delegated credential got from delegation service is stored into path: %s" msgstr "" "Делегированные параметры доÑтупа полученные от Ñлужбы Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð¿Ð¸Ñаны " "в каталоге: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:218 msgid "The endpoint of delegation service should be configured" msgstr "ÐšÐ¾Ð½ÐµÑ‡Ð½Ð°Ñ Ñ‚Ð¾Ñ‡ÐºÐ° ÑервиÑа Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð´Ð¾Ð»Ð¶Ð½Ð° быть наÑтроена" #: src/hed/shc/delegationsh/DelegationSH.cpp:228 #: src/hed/shc/delegationsh/DelegationSH.cpp:340 msgid "Delegation handler with delegatee role ends" msgstr "Завершена обработка Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ Ñ€Ð¾Ð»ÑŒÑŽ делегата" #: src/hed/shc/delegationsh/DelegationSH.cpp:260 msgid "Delegation handler with delegator role starts to process" msgstr "Запущен обработчик Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ Ñ€Ð¾Ð»ÑŒÑŽ поручителÑ" #: src/hed/shc/delegationsh/DelegationSH.cpp:269 #, c-format msgid "The delegated credential got from path: %s" msgstr "Делегированные параметры доÑтупа извлечены из каталога: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:290 #, c-format msgid "Can not create delegation crendential to delegation service: %s" msgstr "Ðевозможно Ñоздать делегируемый документ Ð´Ð»Ñ Ñлужбы делегированию: %s" #: src/hed/shc/delegationsh/DelegationSH.cpp:328 msgid "output is not SOAP" msgstr "вывод не в формате SOAP" #: src/hed/shc/delegationsh/DelegationSH.cpp:339 #, c-format msgid "" "Succeeded to send DelegationService: %s and DelegationID: %s info to peer " "service" msgstr "" "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ DelegationService: %s и DelegationID: %s уÑпешно отправлена " "партнёрÑкому ÑервиÑу" #: src/hed/shc/delegationsh/DelegationSH.cpp:345 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:220 #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:101 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:94 msgid "Incoming Message is not SOAP" msgstr "ВходÑщее Ñообщение не в формате SOAP" #: src/hed/shc/delegationsh/DelegationSH.cpp:352 #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:341 #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:123 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:108 msgid "Outgoing Message is not SOAP" msgstr "ИÑходÑщее Ñообщение не ÑвлÑетÑÑ Ñообщением SOAP" #: src/hed/shc/delegationsh/DelegationSH.cpp:356 msgid "Delegation handler is not configured" msgstr "Обработчик Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð²Ð°Ð½Ð¸Ñ Ð½Ðµ наÑтроен" #: src/hed/shc/gaclpdp/GACLPDP.cpp:121 msgid "Evaluator for GACLPDP was not loaded" msgstr "Обработчик Ð´Ð»Ñ GACLPDP не был загружен" #: src/hed/shc/gaclpdp/GACLPDP.cpp:152 #, c-format msgid "GACL Auth. request: %s" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð°Ð²Ñ‚Ð¾Ñ€Ð¸Ð·Ð°Ñ†Ð¸Ð¸ GACL: %s" #: src/hed/shc/gaclpdp/GACLPolicy.cpp:50 src/hed/shc/gaclpdp/GACLPolicy.cpp:63 msgid "Policy is not gacl" msgstr "Политика не в формате GACL" #: src/hed/shc/legacy/ConfigParser.cpp:13 msgid "Configuration file not specified" msgstr "Ðе указан файл наÑтроек" #: src/hed/shc/legacy/ConfigParser.cpp:18 #: src/hed/shc/legacy/ConfigParser.cpp:28 #: src/hed/shc/legacy/ConfigParser.cpp:33 msgid "Configuration file can not be read" msgstr "Ðевозможно прочеÑть файл наÑтроек" #: src/hed/shc/legacy/ConfigParser.cpp:43 #, c-format msgid "Configuration file is broken - block name is too short: %s" msgstr "Файл наÑтроек иÑпорчен - Ñлишком короткое название блока: %s" #: src/hed/shc/legacy/ConfigParser.cpp:47 #, c-format msgid "Configuration file is broken - block name does not end with ]: %s" msgstr "Файл наÑтроек иÑпорчен - название блока не заканчиваетÑÑ ]: %s" #: src/hed/shc/legacy/LegacyMap.cpp:39 src/hed/shc/legacy/LegacyPDP.cpp:119 msgid "Configuration file not specified in ConfigBlock" msgstr "Ðе указан файл наÑтроек в ConfigBlock" #: src/hed/shc/legacy/LegacyMap.cpp:48 src/hed/shc/legacy/LegacyPDP.cpp:128 msgid "BlockName is empty" msgstr "Ðе указан BlockName" #: src/hed/shc/legacy/LegacyMap.cpp:108 #, c-format msgid "Failed processing user mapping command: %s %s" msgstr "Сбой работы команды ÑоответÑÑ‚Ð²Ð¸Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ: %s %s" #: src/hed/shc/legacy/LegacyMap.cpp:114 #: src/services/gridftpd/fileroot_config.cpp:320 #, c-format msgid "Failed to change mapping stack processing policy in: %s = %s" msgstr "Ðе удалоÑÑŒ изменить политики обработки Ñтека ÑоответÑтвий в: %s = %s" #: src/hed/shc/legacy/LegacyMap.cpp:174 msgid "LegacyMap: no configurations blocks defined" msgstr "LegacyMap: не заданы группы наÑтроек" #: src/hed/shc/legacy/LegacyMap.cpp:196 src/hed/shc/legacy/LegacyPDP.cpp:239 #, c-format msgid "" "LegacyPDP: there is no %s Sec Attribute defined. Probably ARC Legacy Sec " "Handler is not configured or failed." msgstr "" "LegacyPDP: атрибут безопаÑноÑти %s не задан. Возможно, обработчик " "безопаÑноÑти ARC Legacy не наÑтроен, или претерпел Ñбой." #: src/hed/shc/legacy/LegacyMap.cpp:201 src/hed/shc/legacy/LegacyPDP.cpp:244 msgid "LegacyPDP: ARC Legacy Sec Attribute not recognized." msgstr "LegacyPDP: атрибут безопаÑноÑти ARC Legacy не опознан." #: src/hed/shc/legacy/LegacyPDP.cpp:138 #, c-format msgid "Failed to parse configuration file %s" msgstr "Сбой при разборе файла наÑтроек %s" #: src/hed/shc/legacy/LegacyPDP.cpp:144 #, c-format msgid "Block %s not found in configuration file %s" msgstr "Блок %s не обнаружен в файле наÑтроек %s" #: src/hed/shc/legacy/LegacySecHandler.cpp:40 #: src/hed/shc/legacy/LegacySecHandler.cpp:118 msgid "LegacySecHandler: configuration file not specified" msgstr "LegacySecHandler: не указан файл наÑтроек" #: src/hed/shc/legacy/arc_lcas.cpp:146 src/hed/shc/legacy/arc_lcmaps.cpp:161 #, c-format msgid "" "Failed to convert GSI credential to GSS credential (major: %d, minor: %d)" msgstr "" "Ðе удалоÑÑŒ преобразовать параметры доÑтупа GSI в GSS (major: %d, minor: %d)" #: src/hed/shc/legacy/arc_lcas.cpp:171 src/hed/shc/legacy/arc_lcmaps.cpp:186 msgid "Missing subject name" msgstr "ОтÑутÑтвует Ð¸Ð¼Ñ Ñубъекта" #: src/hed/shc/legacy/arc_lcas.cpp:176 src/hed/shc/legacy/arc_lcmaps.cpp:191 msgid "Missing path of credentials file" msgstr "ОтÑутÑтвует путь к файлу параметров доÑтупа" #: src/hed/shc/legacy/arc_lcas.cpp:182 msgid "Missing name of LCAS library" msgstr "ОтÑутÑтвует Ð¸Ð¼Ñ Ð±Ð¸Ð±Ð»Ð¸Ð¾Ñ‚ÐµÐºÐ¸ LCAS" #: src/hed/shc/legacy/arc_lcas.cpp:199 #, c-format msgid "Can't load LCAS library %s: %s" msgstr "Ðевозможно загрузить библиотеку LCAS %s: %s" #: src/hed/shc/legacy/arc_lcas.cpp:209 #, c-format msgid "Can't find LCAS functions in a library %s" msgstr "Ðе удалоÑÑŒ обнаружить функции LCAS в библиотеке %s" #: src/hed/shc/legacy/arc_lcas.cpp:219 msgid "Failed to initialize LCAS" msgstr "Сбой инициализации LCAS" #: src/hed/shc/legacy/arc_lcas.cpp:234 msgid "Failed to terminate LCAS" msgstr "Сбой оÑтановки LCAS" #: src/hed/shc/legacy/arc_lcmaps.cpp:197 msgid "Missing name of LCMAPS library" msgstr "ОтÑутÑтвует Ð¸Ð¼Ñ Ð±Ð¸Ð±Ð»Ð¸Ð¾Ñ‚ÐµÐºÐ¸ LCMAPS" #: src/hed/shc/legacy/arc_lcmaps.cpp:211 msgid "Can't read policy names" msgstr "Ðевозможно прочеÑть Ð½Ð°Ð·Ð²Ð°Ð½Ð¸Ñ Ð¿Ð¾Ð»Ð¸Ñ‚Ð¸Ðº" #: src/hed/shc/legacy/arc_lcmaps.cpp:222 #, c-format msgid "Can't load LCMAPS library %s: %s" msgstr "Ðевозможно загрузить библиотеку LCMAPS %s: %s" #: src/hed/shc/legacy/arc_lcmaps.cpp:234 #, c-format msgid "Can't find LCMAPS functions in a library %s" msgstr "Ðе удалоÑÑŒ обнаружить функции LCMAPS в библиотеке %s" #: src/hed/shc/legacy/arc_lcmaps.cpp:246 msgid "LCMAPS has lcmaps_run" msgstr "LCMAPS Ñодержит lcmaps_run" #: src/hed/shc/legacy/arc_lcmaps.cpp:247 msgid "LCMAPS has getCredentialData" msgstr "LCMAPS Ñодержит getCredentialData" #: src/hed/shc/legacy/arc_lcmaps.cpp:251 msgid "Failed to initialize LCMAPS" msgstr "Сбой инициализации LCMAPS" #: src/hed/shc/legacy/arc_lcmaps.cpp:291 #, c-format msgid "LCMAPS returned invalid GID: %u" msgstr "LCMAPS возвратил недопуÑтимый GID: %u" #: src/hed/shc/legacy/arc_lcmaps.cpp:294 msgid "LCMAPS did not return any GID" msgstr "LCMAPS не возвратил никакого GID" #: src/hed/shc/legacy/arc_lcmaps.cpp:297 #, c-format msgid "LCMAPS returned UID which has no username: %u" msgstr "LCMAPS возвратил UID не ÑоответÑтвующий учётной запиÑи: %u" #: src/hed/shc/legacy/arc_lcmaps.cpp:300 #, c-format msgid "LCMAPS returned invalid UID: %u" msgstr "LCMAPS возвратил недопуÑтимый UID: %u" #: src/hed/shc/legacy/arc_lcmaps.cpp:303 msgid "LCMAPS did not return any UID" msgstr "LCMAPS не возвратил никакого UID" #: src/hed/shc/legacy/arc_lcmaps.cpp:312 msgid "Failed to terminate LCMAPS" msgstr "Сбой оÑтановки LCMAPS" #: src/hed/shc/legacy/auth.cpp:35 src/services/gridftpd/auth/auth.cpp:35 #, c-format msgid "Unexpected argument for 'all' rule - %s" msgstr "ÐепредуÑмотренный аргумент Ð´Ð»Ñ Ð¿Ñ€Ð°Ð²Ð¸Ð»Ð° 'all' - %s" #: src/hed/shc/legacy/auth.cpp:337 #, c-format msgid "Credentials stored in temporary file %s" msgstr "Параметры доÑтупа Ñохранены во временном файле %s" #: src/hed/shc/legacy/auth.cpp:346 #, c-format msgid "Assigned to authorization group %s" msgstr "ПрипиÑан к группе допуÑка %s" #: src/hed/shc/legacy/auth.cpp:351 #, c-format msgid "Assigned to userlist %s" msgstr "ПрипиÑан к ÑпиÑку пользователей %s" #: src/hed/shc/legacy/auth_file.cpp:22 #: src/services/gridftpd/auth/auth_file.cpp:22 #, c-format msgid "Failed to read file %s" msgstr "Сбой при чтении файла %s" #: src/hed/shc/legacy/auth_otokens.cpp:30 msgid "Missing subject in configuration" msgstr "Ð’ наÑтройках отÑутÑтвует Ñубъект" #: src/hed/shc/legacy/auth_otokens.cpp:35 msgid "Missing issuer in configuration" msgstr "Ð’ наÑтройках отÑутÑтвует издатель" #: src/hed/shc/legacy/auth_otokens.cpp:40 msgid "Missing audience in configuration" msgstr "Ð’ наÑтройках отÑутÑтвуют получатели" #: src/hed/shc/legacy/auth_otokens.cpp:45 msgid "Missing scope in configuration" msgstr "Ð’ наÑтройках отÑутÑтвует контекÑÑ‚" #: src/hed/shc/legacy/auth_otokens.cpp:50 src/hed/shc/legacy/auth_voms.cpp:47 #: src/services/gridftpd/auth/auth_voms.cpp:51 msgid "Missing group in configuration" msgstr "Ð’ наÑтройках отÑутÑтвует группа" #: src/hed/shc/legacy/auth_otokens.cpp:53 #, c-format msgid "Rule: subject: %s" msgstr "Правило: Ñубъект: %s" #: src/hed/shc/legacy/auth_otokens.cpp:54 #, c-format msgid "Rule: issuer: %s" msgstr "Правило: издатель: %s" #: src/hed/shc/legacy/auth_otokens.cpp:55 #, c-format msgid "Rule: audience: %s" msgstr "Правило: получатели: %s" #: src/hed/shc/legacy/auth_otokens.cpp:56 #, c-format msgid "Rule: scope: %s" msgstr "Правило: контекÑÑ‚: %s" #: src/hed/shc/legacy/auth_otokens.cpp:57 src/hed/shc/legacy/auth_voms.cpp:66 #: src/services/gridftpd/auth/auth_voms.cpp:68 #, c-format msgid "Rule: group: %s" msgstr "Правило: группа: %s" #: src/hed/shc/legacy/auth_otokens.cpp:60 #, c-format msgid "Match issuer: %s" msgstr "СоответÑтвующий издатель: %s" #: src/hed/shc/legacy/auth_otokens.cpp:66 #, c-format msgid "Matched: %s %s %s" msgstr "СоответÑтвие: %s %s %s" #: src/hed/shc/legacy/auth_otokens.cpp:80 src/hed/shc/legacy/auth_voms.cpp:93 #: src/services/gridftpd/auth/auth_voms.cpp:98 msgid "Matched nothing" msgstr "Совпадений нет" #: src/hed/shc/legacy/auth_plugin.cpp:45 src/hed/shc/legacy/unixmap.cpp:215 #: src/services/gridftpd/auth/auth_plugin.cpp:70 #: src/services/gridftpd/auth/unixmap.cpp:214 #, c-format msgid "Plugin %s returned: %u" msgstr "Подключаемый модуль %s ответил: %u" #: src/hed/shc/legacy/auth_plugin.cpp:49 src/hed/shc/legacy/unixmap.cpp:219 #, c-format msgid "Plugin %s timeout after %u seconds" msgstr "Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ð¾Ð³Ð¾ Ð¼Ð¾Ð´ÑƒÐ»Ñ %s иÑтекло поÑле %u Ñекунд" #: src/hed/shc/legacy/auth_plugin.cpp:52 src/hed/shc/legacy/unixmap.cpp:222 #, c-format msgid "Plugin %s failed to start" msgstr "Подключаемый модуль %s не Ñмог запуÑтитьÑÑ" #: src/hed/shc/legacy/auth_plugin.cpp:54 src/hed/shc/legacy/unixmap.cpp:224 #, c-format msgid "Plugin %s printed: %s" msgstr "Подключаемый модуль %s вывел на печать: %s" #: src/hed/shc/legacy/auth_plugin.cpp:55 src/hed/shc/legacy/unixmap.cpp:212 #: src/hed/shc/legacy/unixmap.cpp:225 #, c-format msgid "Plugin %s error: %s" msgstr "Ошибка подключаемого Ð¼Ð¾Ð´ÑƒÐ»Ñ %s: %s" #: src/hed/shc/legacy/auth_voms.cpp:42 #: src/services/gridftpd/auth/auth_voms.cpp:45 msgid "Missing VO in configuration" msgstr "Ð’ наÑтройках отÑутÑтвует ВО" #: src/hed/shc/legacy/auth_voms.cpp:52 #: src/services/gridftpd/auth/auth_voms.cpp:57 msgid "Missing role in configuration" msgstr "Ð’ наÑтройках отÑутÑтвует роль" #: src/hed/shc/legacy/auth_voms.cpp:57 #: src/services/gridftpd/auth/auth_voms.cpp:63 msgid "Missing capabilities in configuration" msgstr "Ð’ наÑтройках отÑутÑтвуют возможноÑти" #: src/hed/shc/legacy/auth_voms.cpp:62 msgid "Too many arguments in configuration" msgstr "Слишком много аргументов в наÑтройках" #: src/hed/shc/legacy/auth_voms.cpp:65 #: src/services/gridftpd/auth/auth_voms.cpp:67 #, c-format msgid "Rule: vo: %s" msgstr "Правило: ВО: %s" #: src/hed/shc/legacy/auth_voms.cpp:67 #: src/services/gridftpd/auth/auth_voms.cpp:69 #, c-format msgid "Rule: role: %s" msgstr "Правило: роль: %s" #: src/hed/shc/legacy/auth_voms.cpp:68 #: src/services/gridftpd/auth/auth_voms.cpp:70 #, c-format msgid "Rule: capabilities: %s" msgstr "Правило: возможноÑти: %s" #: src/hed/shc/legacy/auth_voms.cpp:71 #: src/services/gridftpd/auth/auth_voms.cpp:77 #, c-format msgid "Match vo: %s" msgstr "Совпадение ВО: %s" #: src/hed/shc/legacy/auth_voms.cpp:78 #, c-format msgid "Matched: %s %s %s %s" msgstr "СоответÑтвие: %s %s %s %s" #: src/hed/shc/legacy/simplemap.cpp:70 #: src/services/gridftpd/auth/simplemap.cpp:68 #, c-format msgid "SimpleMap: acquired new unmap time of %u seconds" msgstr "SimpleMap: получено новое Ð²Ñ€ÐµÐ¼Ñ Ñ€Ð°ÑÑоглаÑÐ¾Ð²Ð°Ð½Ð¸Ñ Ð½Ð° %u Ñекунд" #: src/hed/shc/legacy/simplemap.cpp:72 #: src/services/gridftpd/auth/simplemap.cpp:70 msgid "SimpleMap: wrong number in unmaptime command" msgstr "SimpleMap: недопуÑтимое значение в команде unmaptime" #: src/hed/shc/legacy/simplemap.cpp:85 src/hed/shc/legacy/simplemap.cpp:90 #: src/services/gridftpd/auth/simplemap.cpp:83 #: src/services/gridftpd/auth/simplemap.cpp:88 #, c-format msgid "SimpleMap: %s" msgstr "SimpleMap: %s" #: src/hed/shc/legacy/unixmap.cpp:65 src/hed/shc/legacy/unixmap.cpp:70 #: src/services/gridftpd/auth/unixmap.cpp:63 #: src/services/gridftpd/auth/unixmap.cpp:68 msgid "Mapping policy option has empty value" msgstr "Значение параметра политики приÑÐ²Ð¾ÐµÐ½Ð¸Ñ Ð¿ÑƒÑто" #: src/hed/shc/legacy/unixmap.cpp:80 src/services/gridftpd/auth/unixmap.cpp:78 #, c-format msgid "Unsupported mapping policy action: %s" msgstr "Ðеподдерживаемое дейÑтвие политики ÑоответÑтвиÑ: %s" #: src/hed/shc/legacy/unixmap.cpp:91 src/services/gridftpd/auth/unixmap.cpp:89 #, c-format msgid "Unsupported mapping policy option: %s" msgstr "ÐÐµÐ¿Ð¾Ð´Ð´ÐµÑ€Ð¶Ð¸Ð²Ð°ÐµÐ¼Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð¿Ð¾Ð»Ð¸Ñ‚Ð¸ÐºÐ¸ ÑоответÑтвиÑ: %s" #: src/hed/shc/legacy/unixmap.cpp:103 src/hed/shc/legacy/unixmap.cpp:108 #: src/services/gridftpd/auth/unixmap.cpp:100 #: src/services/gridftpd/auth/unixmap.cpp:105 msgid "User name mapping command is empty" msgstr "ПуÑÑ‚Ð°Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ð° в приÑвоении имени пользователÑ" #: src/hed/shc/legacy/unixmap.cpp:116 #: src/services/gridftpd/auth/unixmap.cpp:113 #, c-format msgid "User name mapping has empty authgroup: %s" msgstr "ПуÑÑ‚Ð°Ñ Ð³Ñ€ÑƒÐ¿Ð¿Ð° authgroup в приÑвоении имени пользователÑ: %s" #: src/hed/shc/legacy/unixmap.cpp:147 #: src/services/gridftpd/auth/unixmap.cpp:147 #, c-format msgid "Unknown user name mapping rule %s" msgstr "ÐеизвеÑтное правило приÑÐ²Ð¾ÐµÐ½Ð¸Ñ Ð¸Ð¼ÐµÐ½Ð¸ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ %s" #: src/hed/shc/legacy/unixmap.cpp:156 src/hed/shc/legacy/unixmap.cpp:161 #: src/hed/shc/legacy/unixmap.cpp:177 src/hed/shc/legacy/unixmap.cpp:183 #: src/services/gridftpd/auth/unixmap.cpp:175 #: src/services/gridftpd/auth/unixmap.cpp:180 #: src/services/gridftpd/auth/unixmap.cpp:196 msgid "Plugin (user mapping) command is empty" msgstr "ПуÑÑ‚Ð°Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ð° в подключаемом модуле (приÑвоение имени пользователÑ)" #: src/hed/shc/legacy/unixmap.cpp:167 #: src/services/gridftpd/auth/unixmap.cpp:186 #, c-format msgid "Plugin (user mapping) timeout is not a number: %s" msgstr "" "Ðецифровое значение времени Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð² подключаемом модуле (приÑвоение имени " "пользователÑ): %s" #: src/hed/shc/legacy/unixmap.cpp:171 #: src/services/gridftpd/auth/unixmap.cpp:190 #, c-format msgid "Plugin (user mapping) timeout is wrong number: %s" msgstr "" "Ðеприемлемое значение времени Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð² подключаемом модуле (приÑвоение " "имени пользователÑ): %s" #: src/hed/shc/legacy/unixmap.cpp:203 #, c-format msgid "Plugin %s returned no username" msgstr "Подключаемый модуль %s не выдал имени пользователÑ" #: src/hed/shc/legacy/unixmap.cpp:208 #: src/services/gridftpd/auth/unixmap.cpp:211 #, c-format msgid "Plugin %s returned too much: %s" msgstr "Подключаемый модуль %s ответил Ñлишком длинно: %s" #: src/hed/shc/legacy/unixmap.cpp:211 #, c-format msgid "Plugin %s returned no mapping" msgstr "Подключаемый модуль %s не выдал привÑзки" #: src/hed/shc/legacy/unixmap.cpp:234 msgid "User subject match is missing user subject." msgstr "ОтÑутÑтвует Ñубъект Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð´Ð»Ñ ÑоответÑÑ‚Ð²Ð¸Ñ Ñубъекта." #: src/hed/shc/legacy/unixmap.cpp:238 #: src/services/gridftpd/auth/unixmap.cpp:230 #, c-format msgid "Mapfile at %s can't be opened." msgstr "Ðевозможно открыть пул пользователей в %s." #: src/hed/shc/legacy/unixmap.cpp:262 #: src/services/gridftpd/auth/unixmap.cpp:255 msgid "User pool mapping is missing user subject." msgstr "ОтÑутÑтвует Ñубъект Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð² приÑвоении пула пользователей." #: src/hed/shc/legacy/unixmap.cpp:267 #: src/services/gridftpd/auth/unixmap.cpp:260 #, c-format msgid "User pool at %s can't be opened." msgstr "Ðевозможно открыть пул пользователей в %s." #: src/hed/shc/legacy/unixmap.cpp:272 #: src/services/gridftpd/auth/unixmap.cpp:265 #, c-format msgid "User pool at %s failed to perform user mapping." msgstr "" "Пул пользователей в %s не Ñмог уÑтановить ÑоответÑтвие Ð´Ð»Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ." #: src/hed/shc/legacy/unixmap.cpp:290 #: src/services/gridftpd/auth/unixmap.cpp:283 #, c-format msgid "User name direct mapping is missing user name: %s." msgstr "ОтÑутÑтвует Ð¸Ð¼Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð² прÑмом приÑвоении имени: %s." #: src/hed/shc/otokens/OTokensSH.cpp:63 msgid "OTokens: Attr: message" msgstr "OTokens: Attr: Ñообщение" #: src/hed/shc/otokens/OTokensSH.cpp:68 #, c-format msgid "OTokens: Attr: %s = %s" msgstr "OTokens: Attr: %s = %s" #: src/hed/shc/otokens/OTokensSH.cpp:73 #, c-format msgid "OTokens: Attr: token: %s" msgstr "OTokens: Attr: токен: %s" #: src/hed/shc/otokens/OTokensSH.cpp:76 #, c-format msgid "OTokens: Attr: token: bearer: %s" msgstr "OTokens: Attr: токен: ноÑитель: %s" #: src/hed/shc/otokens/OTokensSH.cpp:146 msgid "OTokens: Handle" msgstr "OTokens: Handle" #: src/hed/shc/otokens/OTokensSH.cpp:148 msgid "OTokens: Handle: message" msgstr "OTokens: Handle: Ñообщение" #: src/hed/shc/otokens/OTokensSH.cpp:151 msgid "Failed to create OTokens security attributes" msgstr "Ðе удалоÑÑŒ Ñоздать атрибуты безопаÑноÑти OTokens" #: src/hed/shc/otokens/OTokensSH.cpp:155 #, c-format msgid "OTokens: Handle: attributes created: subject = %s" msgstr "OTokens: Handle: Ñозданы атрибуты: Ñубъект = %s" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:48 msgid "Creating a pdpservice client" msgstr "СоздаётÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚ pdpservice" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:80 msgid "Arc policy can not been carried by SAML2.0 profile of XACML" msgstr "Политика ARC не может быть задана в профиле SAML2.0 XACML" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:152 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:185 msgid "Policy Decision Service invocation failed" msgstr "Ðе удалоÑÑŒ запуÑтить Ñлужбу принÑÑ‚Ð¸Ñ Ñ€ÐµÑˆÐµÐ½Ð¸Ð¹ по политикам" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:155 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:188 #: src/tests/client/test_ClientInterface.cpp:88 #: src/tests/client/test_ClientSAML2SSO.cpp:81 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:100 #: src/tests/echo/test_clientinterface.cpp:82 #: src/tests/echo/test_clientinterface.cpp:149 #: src/tests/echo/test_clientinterface.py:32 msgid "There was no SOAP response" msgstr "Ðет ответа SOAP" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:170 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:204 msgid "Authorized from remote pdp service" msgstr "Допущен удалённой Ñлужбой PDP" #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:171 #: src/hed/shc/pdpserviceinvoker/PDPServiceInvoker.cpp:205 msgid "Unauthorized from remote pdp service" msgstr "Ðе допущен удалённой Ñлужбой PDP" #: src/hed/shc/saml2sso_assertionconsumersh/SAML2SSO_AssertionConsumerSH.cpp:69 msgid "Can not get SAMLAssertion SecAttr from message context" msgstr "Ðевозможно извлечь SAMLAssertion SecAttr из контекÑта ÑообщениÑ" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:152 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:44 msgid "Missing or empty CertificatePath element" msgstr "Элемент CertificatePath отÑутÑтвует или пуÑÑ‚" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:157 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:49 msgid "Missing or empty KeyPath element" msgstr "Элемент KeyPath отÑутÑтвует или пуÑÑ‚" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:163 msgid "" "Both of CACertificatePath and CACertificatesDir elements missing or empty" msgstr "" "Оба Ñлемента CACertificatePath and CACertificatesDir отÑутÑтвуют или пуÑты" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:175 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:61 msgid "" "Missing or empty CertificatePath or CACertificatesDir element; will only " "check the signature, will not do message authentication" msgstr "" "Элемент CertificatePath или CACertificatesDir отÑутÑтвует или пуÑÑ‚; будет " "выполнена лишь проверка подпиÑи, а не удоÑтоверение подлинноÑти ÑообщениÑ" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:179 #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:65 #: src/hed/shc/x509tokensh/X509TokenSH.cpp:65 #, c-format msgid "Processing type not supported: %s" msgstr "Ðеподдерживаемый тип обработки: %s" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:199 msgid "Failed to parse SAML Token from incoming SOAP" msgstr "Ðе удалоÑÑŒ разобрать токен SAML из входÑщего документа SOAP" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:209 msgid "Failed to authenticate SAML Token inside the incoming SOAP" msgstr "" "Ðе удалоÑÑŒ уÑтановить подлинноÑть токена SAML во входÑщем документе SOAP" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:212 msgid "Succeeded to authenticate SAMLToken" msgstr "УÑÐ¿ÐµÑˆÐ½Ð°Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ° подлинноÑти токена SAML" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:275 #, c-format msgid "No response from AA service %s" msgstr "Ðет ответа от Ñервера AA %s" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:279 #, c-format msgid "SOAP Request to AA service %s failed" msgstr "Ошибка запроÑа SOAP к Ñерверу AA %s" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:287 msgid "Cannot find content under response soap message" msgstr "Ðе удалоÑÑŒ найти Ñодержание ответного ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ SOAP" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:291 msgid "Cannot find under response soap message:" msgstr "Ðе удалоÑÑŒ найти Ñлемент в ответном Ñообщении SOAP:" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:308 msgid "The Response is not going to this end" msgstr "Отклик доÑюда не дошёл" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:315 msgid "The StatusCode is Success" msgstr "StatusCode: Success" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:321 msgid "Succeeded to verify the signature under " msgstr "ПодпиÑÑŒ уÑпешно подтверждена" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:324 msgid "Failed to verify the signature under " msgstr "ПодпиÑÑŒ не подтверждена" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:335 msgid "Failed to generate SAML Token for outgoing SOAP" msgstr "Ðе удалоÑÑŒ Ñоздать токен SAML Ð´Ð»Ñ Ð¸ÑходÑщего ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ SOAP" #: src/hed/shc/samltokensh/SAMLTokenSH.cpp:345 msgid "SAML Token handler is not configured" msgstr "Обработчик токена SAML не наÑтроен" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:29 #, c-format msgid "Access list location: %s" msgstr "МеÑтонахождение ÑпиÑка доÑтупа: %s" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:39 msgid "" "No policy file or DNs specified for simplelist.pdp, please set location " "attribute or at least one DN element for simplelist PDP node in " "configuration." msgstr "" "Ð”Ð»Ñ simplelist.pdp не задан файл политик или DN; пожалуйÑта, задайте в " "наÑтройках атрибут location или Ñ…Ð¾Ñ‚Ñ Ð±Ñ‹ один Ñлемент DN Ð´Ð»Ñ ÑƒÐ·Ð»Ð° PDP " "simplelist." #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:42 #, c-format msgid "Subject to match: %s" msgstr "Субъект Ð´Ð»Ñ Ñверки: %s" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:45 #, c-format msgid "Policy subject: %s" msgstr "Субъект политики: %s" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:47 #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:73 #, c-format msgid "Authorized from simplelist.pdp: %s" msgstr "Допущен через simplelist.pdp: %s" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:54 msgid "" "The policy file setup for simplelist.pdp does not exist, please check " "location attribute for simplelist PDP node in service configuration" msgstr "" "Ð”Ð»Ñ simplelist.pdp не задан файл наÑтройки политик; пожалуйÑта, проверьте " "атрибут location в наÑтройках Ñлужбы узла PDP simplelist" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:61 #, c-format msgid "Policy line: %s" msgstr "Строка политики: %s" #: src/hed/shc/simplelistpdp/SimpleListPDP.cpp:79 #, c-format msgid "Not authorized from simplelist.pdp: %s" msgstr "Ðе допущен через simplelist.pdp: %s" #: src/hed/shc/test.cpp:27 src/hed/shc/testinterface_arc.cpp:26 #: src/hed/shc/testinterface_xacml.cpp:26 msgid "Start test" msgstr "Ðачать теÑÑ‚" #: src/hed/shc/test.cpp:101 msgid "Input request from a file: Request.xml" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð²Ð²Ð¾Ð´Ð° из файла: Request.xml" #: src/hed/shc/test.cpp:107 src/hed/shc/test.cpp:197 #: src/hed/shc/testinterface_arc.cpp:124 #, c-format msgid "There is %d subjects, which satisfy at least one policy" msgstr "Обнаружены %d Ñубъекта, удовлетворÑющих Ñ…Ð¾Ñ‚Ñ Ð±Ñ‹ одной политике" #: src/hed/shc/test.cpp:121 #, c-format msgid "Attribute Value (1): %s" msgstr "Значение атрибута (1): %s" #: src/hed/shc/test.cpp:132 msgid "Input request from code" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð²Ð²Ð¾Ð´Ð° из программы" #: src/hed/shc/test.cpp:211 #, c-format msgid "Attribute Value (2): %s" msgstr "Значение атрибута (2): %s" #: src/hed/shc/testinterface_arc.cpp:75 src/hed/shc/testinterface_xacml.cpp:46 msgid "Can not dynamically produce Policy" msgstr "Ðе удалоÑÑŒ динамичеÑки Ñоздать Policy" #: src/hed/shc/testinterface_arc.cpp:138 #, c-format msgid "Attribute Value inside Subject: %s" msgstr "Значение атрибута в Ñубъекте: %s" #: src/hed/shc/testinterface_arc.cpp:148 msgid "The request has passed the policy evaluation" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð¿Ñ€Ð¾ÑˆÑ‘Ð» Ñверку Ñ Ð¿Ð¾Ð»Ð¸Ñ‚Ð¸ÐºÐ¾Ð¹" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:43 msgid "Missing or empty PasswordSource element" msgstr "Элемент PasswordSource отÑутÑтвует или пуÑÑ‚" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:54 #, c-format msgid "Password encoding type not supported: %s" msgstr "Тип ÑˆÐ¸Ñ„Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¿Ð°Ñ€Ð¾Ð»Ñ Ð½Ðµ поддерживаетÑÑ: %s" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:59 msgid "Missing or empty Username element" msgstr "Элемент Username отÑутÑтвует или пуÑÑ‚" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:79 msgid "The payload of incoming message is empty" msgstr "Во входÑщем Ñообщении отÑутÑтвует Ð¿Ð¾Ð»ÐµÐ·Ð½Ð°Ñ Ð½Ð°Ð³Ñ€ÑƒÐ·ÐºÐ°" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:84 msgid "Failed to cast PayloadSOAP from incoming payload" msgstr "Ðе удалоÑÑŒ Ñоздать PayloadSOAP из входÑщей нагрузки" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:89 msgid "Failed to parse Username Token from incoming SOAP" msgstr "Ðе удалоÑÑŒ разобрать токен Username из входÑщего документа SOAP" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:95 msgid "Failed to authenticate Username Token inside the incoming SOAP" msgstr "" "Ðе удалоÑÑŒ уÑтановить подлинноÑть токена Username во входÑщем документе SOAP" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:98 msgid "Succeeded to authenticate UsernameToken" msgstr "УÑÐ¿ÐµÑˆÐ½Ð°Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ° подлинноÑти UsernameToken" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:108 msgid "The payload of outgoing message is empty" msgstr "Ð’ иÑходÑщем Ñообщении отÑутÑтвует Ð¿Ð¾Ð»ÐµÐ·Ð½Ð°Ñ Ð½Ð°Ð³Ñ€ÑƒÐ·ÐºÐ°" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:113 msgid "Failed to cast PayloadSOAP from outgoing payload" msgstr "Ðе удалоÑÑŒ Ñоздать PayloadSOAP из иÑходÑщей нагрузки" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:119 msgid "Failed to generate Username Token for outgoing SOAP" msgstr "" "Ðе удалоÑÑŒ Ñоздать токен имени Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð´Ð»Ñ Ð¸ÑходÑщего ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ SOAP" #: src/hed/shc/usernametokensh/UsernameTokenSH.cpp:127 msgid "Username Token handler is not configured" msgstr "Обработчик токена Username не наÑтроен" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:81 msgid "Failed to parse X509 Token from incoming SOAP" msgstr "Ðе удалоÑÑŒ разобрать токен X509 из входÑщего документа SOAP" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:85 msgid "Failed to verify X509 Token inside the incoming SOAP" msgstr "Ðе удалоÑÑŒ подтвердить токен X509 во входÑщем документе SOAP" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:89 msgid "Failed to authenticate X509 Token inside the incoming SOAP" msgstr "" "Ðе удалоÑÑŒ уÑтановить подлинноÑть токена X509 во входÑщем документе SOAP" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:92 msgid "Succeeded to authenticate X509Token" msgstr "УÑпешное подтверждение подлинноÑти токена X509" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:102 msgid "Failed to generate X509 Token for outgoing SOAP" msgstr "Ðе удалоÑÑŒ Ñоздать токен X509 Ð´Ð»Ñ Ð¸ÑходÑщего ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ SOAP" #: src/hed/shc/x509tokensh/X509TokenSH.cpp:112 msgid "X509 Token handler is not configured" msgstr "Обработчик токена X509 не наÑтроен" #: src/hed/shc/xacmlpdp/XACMLApply.cpp:29 msgid "Can not create function: FunctionId does not exist" msgstr "Ðевозможно Ñоздать функцию: FunctionId не ÑущеÑтвует" #: src/hed/shc/xacmlpdp/XACMLApply.cpp:33 #: src/hed/shc/xacmlpdp/XACMLTarget.cpp:40 #, c-format msgid "Can not create function %s" msgstr "Ðевозможно Ñоздать функцию %s" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:88 msgid "Can not find XACMLPDPContext" msgstr "Ðевозможно найти XACMLPDPContext" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:136 msgid "Evaluator for XACMLPDP was not loaded" msgstr "Обработчик Ð´Ð»Ñ XACMLPDP не был загружен" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:151 src/hed/shc/xacmlpdp/XACMLPDP.cpp:159 msgid "Failed to convert security information to XACML request" msgstr "Ðе удалоÑÑŒ преобразовать информацию о защите в Ð·Ð°Ð¿Ñ€Ð¾Ñ XACML" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:167 #, c-format msgid "XACML request: %s" msgstr "Ð·Ð°Ð¿Ñ€Ð¾Ñ XACML: %s" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:179 msgid "Authorized from xacml.pdp" msgstr "Допущен через xacml.pdp" #: src/hed/shc/xacmlpdp/XACMLPDP.cpp:180 msgid "UnAuthorized from xacml.pdp" msgstr "Ðе допущен через xacml.pdp" #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:55 msgid "Can not find element with proper namespace" msgstr "Ðевозможно найти Ñлемент Ñ Ð½ÑƒÐ¶Ð½Ñ‹Ð¼ проÑтранÑтвом имён" #: src/hed/shc/xacmlpdp/XACMLPolicy.cpp:132 msgid "No target available inside the policy" msgstr "Политика не Ñодержит назначений" #: src/hed/shc/xacmlpdp/XACMLRequest.cpp:34 msgid "Request is empty" msgstr "ПуÑтой запроÑ" #: src/hed/shc/xacmlpdp/XACMLRequest.cpp:39 msgid "Can not find element with proper namespace" msgstr "Ðевозможно найти Ñлемент Ñ Ð½ÑƒÐ¶Ð½Ñ‹Ð¼ проÑтранÑтвом имён" #: src/hed/shc/xacmlpdp/XACMLRule.cpp:35 msgid "Invalid Effect" msgstr "ÐедопуÑтимый Ñффект" #: src/hed/shc/xacmlpdp/XACMLRule.cpp:48 msgid "No target available inside the rule" msgstr "Правило не Ñодержит назначений" #: src/libs/data-staging/DTR.cpp:82 src/libs/data-staging/DTR.cpp:86 #, c-format msgid "Could not handle endpoint %s" msgstr "Ðевозможно обработать точку входа %s" #: src/libs/data-staging/DTR.cpp:96 msgid "Source is the same as destination" msgstr "ИÑточник идентичен назначению" #: src/libs/data-staging/DTR.cpp:175 #, c-format msgid "Invalid ID: %s" msgstr "Ðеверный ID: %s" #: src/libs/data-staging/DTR.cpp:212 #, c-format msgid "%s->%s" msgstr "%s->%s" #: src/libs/data-staging/DTR.cpp:320 #, c-format msgid "No callback for %s defined" msgstr "Ðе определена Ñ„ÑƒÐ½ÐºÑ†Ð¸Ñ Ð¾Ð±Ñ€Ð°Ñ‚Ð½Ð¾Ð³Ð¾ вызова Ð´Ð»Ñ %s" #: src/libs/data-staging/DTR.cpp:335 #, c-format msgid "NULL callback for %s" msgstr "Ðулевой обратный вызов Ð´Ð»Ñ %s" #: src/libs/data-staging/DTR.cpp:338 #, c-format msgid "Request to push to unknown owner - %u" msgstr "Попытка передачи неизвеÑтному владельцу - %u" #: src/libs/data-staging/DTRList.cpp:216 #, c-format msgid "Boosting priority from %i to %i due to incoming higher priority DTR" msgstr "" "Увеличение приоритета %i до %i в ÑвÑзи Ñ Ð²Ñ…Ð¾Ð´Ñщим DTR более выÑокого " "приоритета" #: src/libs/data-staging/DataDelivery.cpp:48 #: src/libs/data-staging/DataDelivery.cpp:72 msgid "Received invalid DTR" msgstr "ПринÑÑ‚ неверный Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR" #: src/libs/data-staging/DataDelivery.cpp:54 #, c-format msgid "Delivery received new DTR %s with source: %s, destination: %s" msgstr "" "Служба доÑтавки получила новый Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR %s Ñ Ð¸Ñточником %s и назначением %s" #: src/libs/data-staging/DataDelivery.cpp:68 msgid "Received no DTR" msgstr "Ðе получено запроÑов DTR" #: src/libs/data-staging/DataDelivery.cpp:80 #, c-format msgid "Cancelling DTR %s with source: %s, destination: %s" msgstr "ОтменÑетÑÑ DTR %s Ñ Ð¸Ñточником: %s, назначением: %s" #: src/libs/data-staging/DataDelivery.cpp:91 #, c-format msgid "DTR %s requested cancel but no active transfer" msgstr "DTR %s запроÑил прерывание, но активные передачи отÑутÑтвуют" #: src/libs/data-staging/DataDelivery.cpp:147 #, c-format msgid "Cleaning up after failure: deleting %s" msgstr "ОчиÑтка поÑле ÑбоÑ: уничтожаетÑÑ %s" #: src/libs/data-staging/DataDelivery.cpp:188 #: src/libs/data-staging/DataDelivery.cpp:263 #: src/libs/data-staging/DataDelivery.cpp:303 #: src/libs/data-staging/DataDelivery.cpp:323 msgid "Failed to delete delivery object or deletion timed out" msgstr "Сбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð¾Ð±ÑŠÐµÐºÑ‚Ð° доÑтавки, или иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ" #: src/libs/data-staging/DataDelivery.cpp:254 #, c-format msgid "Transfer finished: %llu bytes transferred %s" msgstr "Передача завершена: %llu байтов передано %s" #: src/libs/data-staging/DataDelivery.cpp:329 msgid "Data delivery loop exited" msgstr "Прерван цикл Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ…" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:45 msgid "No source defined" msgstr "ИÑточник не задан" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:49 msgid "No destination defined" msgstr "Ðазначение не задано" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:139 #, c-format msgid "Bad checksum format %s" msgstr "Ðеверный формат контрольной Ñуммы %s" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:178 #, c-format msgid "Failed to run command: %s" msgstr "Сбой иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ñ‹ %s" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:213 #, c-format msgid "DataDelivery: %s" msgstr "DataDelivery: %s" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:225 #, c-format msgid "DataStagingDelivery exited with code %i" msgstr "ПроцеÑÑ DataStagingDelivery завершилÑÑ Ñ ÐºÐ¾Ð´Ð¾Ð¼ %i" #: src/libs/data-staging/DataDeliveryLocalComm.cpp:244 #, c-format msgid "Transfer killed after %i seconds without communication" msgstr "ПереÑылка оборвана поÑле %i Ñекунд бездейÑтвиÑ" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:67 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:315 #, c-format msgid "Connecting to Delivery service at %s" msgstr "СоединÑемÑÑ Ñо Ñлужбой доÑтавки на %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:94 #, c-format msgid "Failed to set up credential delegation with %s" msgstr "Сбой уÑтановки Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¿Ñ€Ð°Ð² доÑтупа Ñ %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:100 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:174 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:240 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:326 #, c-format msgid "" "Request:\n" "%s" msgstr "" "ЗапроÑ:\n" "%s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:106 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:332 #, c-format msgid "Could not connect to service %s: %s" msgstr "Ðе удалоÑÑŒ ÑоединитьÑÑ Ñо Ñлужбой %s: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:114 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:340 #, c-format msgid "No SOAP response from Delivery service %s" msgstr "Ðет ответа SOAP от Ñлужбы доÑтавки %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:119 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:193 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:267 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:346 #, c-format msgid "" "Response:\n" "%s" msgstr "" "Отзыв:\n" "%s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:128 #, c-format msgid "Failed to start transfer request: %s" msgstr "Сбой запуÑка запроÑа на передачу: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:135 #, c-format msgid "Bad format in XML response from service at %s: %s" msgstr "ÐедопуÑтимый формат отзыва XML от ÑервиÑа в %s: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:143 #, c-format msgid "Could not make new transfer request: %s: %s" msgstr "Ðевозможно Ñоздать новый Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¿ÐµÑ€ÐµÑылки: %s: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:148 #, c-format msgid "Started remote Delivery at %s" msgstr "Запущена ÑƒÐ´Ð°Ð»Ñ‘Ð½Ð½Ð°Ñ Ñлужба доÑтавки на %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:181 #, c-format msgid "Failed to send cancel request: %s" msgstr "Сбой отправки запроÑа на прерывание: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:188 msgid "Failed to cancel: No SOAP response" msgstr "Сбой прерываниÑ: нет ответа SOAP" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:202 #, c-format msgid "Failed to cancel transfer request: %s" msgstr "Сбой Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа на передачу: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:209 #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:290 #, c-format msgid "Bad format in XML response: %s" msgstr "Ðеверный формат отклика XML: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:216 #, c-format msgid "Failed to cancel: %s" msgstr "Ошибка отмены: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:260 msgid "No SOAP response from delivery service" msgstr "Ðет ответа SOAP от Ñлужбы доÑтавки" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:281 #, c-format msgid "Failed to query state: %s" msgstr "Сбой опроÑа ÑоÑтоÑниÑ: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:355 #, c-format msgid "SOAP fault from delivery service at %s: %s" msgstr "Ошибка SOAP Ñлужбы доÑтавки на %s: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:363 #, c-format msgid "Bad format in XML response from delivery service at %s: %s" msgstr "Ðеверный формат отклика XML Ñлужбы доÑтавки на %s: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:371 #, c-format msgid "Error pinging delivery service at %s: %s: %s" msgstr "Ошибка ÑвÑзи Ñо Ñлужбой доÑтавки на %s: %s: %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:379 #, c-format msgid "Dir %s allowed at service %s" msgstr "Каталог %s допуÑкаетÑÑ Ð´Ð»Ñ Ñлужбы %s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:473 #, c-format msgid "" "DataDelivery log tail:\n" "%s" msgstr "" "ПоÑледние запиÑи журнала DataDelivery:\n" "%s" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:486 msgid "Failed locating credentials" msgstr "Сбой Ð¾Ð±Ð½Ð°Ñ€ÑƒÐ¶ÐµÐ½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупа" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:491 msgid "Failed to initiate client connection" msgstr "Сбой запуÑка ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ ÐºÐ»Ð¸ÐµÐ½Ñ‚Ð¾Ð¼" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:497 msgid "Client connection has no entry point" msgstr "ОтÑутÑтвует точка входа в клиентÑкую цепь" #: src/libs/data-staging/DataDeliveryRemoteComm.cpp:507 msgid "Failed to initiate delegation credentials" msgstr "Сбой инициализации параметров доÑтупа Ð´Ð»Ñ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ" #: src/libs/data-staging/DataStagingDelivery.cpp:97 #, c-format msgid "%5u s: %10.1f kB %8.1f kB/s" msgstr "%5u Ñ: %10.1f кБ %8.1f кБ/Ñ" #: src/libs/data-staging/DataStagingDelivery.cpp:152 msgid "Unexpected arguments" msgstr "ÐепредуÑмотренные аргументы" #: src/libs/data-staging/DataStagingDelivery.cpp:155 msgid "Source URL missing" msgstr "ОтÑутÑтвует URL иÑточника" #: src/libs/data-staging/DataStagingDelivery.cpp:158 msgid "Destination URL missing" msgstr "ОтÑутÑтвует URL назначениÑ" #: src/libs/data-staging/DataStagingDelivery.cpp:162 #, c-format msgid "Source URL not valid: %s" msgstr "ÐедейÑтвительный URL иÑточника: %s" #: src/libs/data-staging/DataStagingDelivery.cpp:166 #, c-format msgid "Destination URL not valid: %s" msgstr "ÐедейÑтвительный URL назначениÑ: %s" #: src/libs/data-staging/DataStagingDelivery.cpp:223 #, c-format msgid "Unknown transfer option: %s" msgstr "ÐеизвеÑÑ‚Ð½Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð¸ файлов: %s" #: src/libs/data-staging/DataStagingDelivery.cpp:248 #, c-format msgid "Source URL not supported: %s" msgstr "Ðеподдерживаемый URL иÑточника: %s" #: src/libs/data-staging/DataStagingDelivery.cpp:253 #: src/libs/data-staging/DataStagingDelivery.cpp:272 msgid "No credentials supplied" msgstr "Ðе указаны параметры доÑтупа" #: src/libs/data-staging/DataStagingDelivery.cpp:267 #, c-format msgid "Destination URL not supported: %s" msgstr "Ðеподдерживаемый URL назначениÑ: %s" #: src/libs/data-staging/DataStagingDelivery.cpp:316 #, c-format msgid "Will calculate %s checksum" msgstr "Будет вычиÑлена ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма Ð´Ð»Ñ %s" #: src/libs/data-staging/DataStagingDelivery.cpp:327 msgid "Cannot use supplied --size option" msgstr "Ðевозможно иÑпользовать заÑвленную опцию --size" #: src/libs/data-staging/DataStagingDelivery.cpp:540 #, c-format msgid "Checksum mismatch between calculated checksum %s and source checksum %s" msgstr "" "ÐеÑовпадение вычиÑленной контрольной Ñуммы %s и контрольной Ñуммы иÑточника " "%s" #: src/libs/data-staging/DataStagingDelivery.cpp:550 #, c-format msgid "Failed cleaning up destination %s" msgstr "Ошибка очиÑтки цели %s" #: src/libs/data-staging/Processor.cpp:59 #: src/services/candypond/CandyPond.cpp:117 msgid "Error creating cache" msgstr "Ошибка при Ñоздании кÑша" #: src/libs/data-staging/Processor.cpp:83 #, c-format msgid "Forcing re-download of file %s" msgstr "ÐŸÑ€Ð¸Ð½ÑƒÐ´Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ð¿ÐµÑ€ÐµÐ·Ð°Ð³Ñ€ÑƒÐ·ÐºÐ° файла %s" #: src/libs/data-staging/Processor.cpp:100 #, c-format msgid "Will wait around %is" msgstr "Ожидание порÑдка %i Ñек" #: src/libs/data-staging/Processor.cpp:119 #, c-format msgid "Force-checking source of cache file %s" msgstr "ÐŸÑ€Ð¸Ð½ÑƒÐ´Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ° иÑточника кÑшированного файла %s" #: src/libs/data-staging/Processor.cpp:122 #, c-format msgid "Source check requested but failed: %s" msgstr "Проверка иÑточника запрошена, но не прошла: %s" #: src/libs/data-staging/Processor.cpp:142 msgid "Permission checking failed, will try downloading without using cache" msgstr "Сбой проверки прав доÑтупа, попытка загрузки без иÑÐ¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ð½Ð¸Ñ ÐºÑша" #: src/libs/data-staging/Processor.cpp:172 #, c-format msgid "Will download to cache file %s" msgstr "Будет произведена загрузка в файл кÑша %s" #: src/libs/data-staging/Processor.cpp:193 msgid "Looking up source replicas" msgstr "ПоиÑк копий файла-иÑточника" #: src/libs/data-staging/Processor.cpp:210 #: src/libs/data-staging/Processor.cpp:317 #, c-format msgid "Skipping replica on local host %s" msgstr "ПропуÑкаетÑÑ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð°Ñ ÐºÐ¾Ð¿Ð¸Ñ %s" #: src/libs/data-staging/Processor.cpp:218 #: src/libs/data-staging/Processor.cpp:325 #, c-format msgid "No locations left for %s" msgstr "Ðе оÑталоÑÑŒ раÑположений Ð´Ð»Ñ %s" #: src/libs/data-staging/Processor.cpp:239 #: src/libs/data-staging/Processor.cpp:481 msgid "Resolving destination replicas" msgstr "Обнаружение копий назначениÑ" #: src/libs/data-staging/Processor.cpp:256 msgid "No locations for destination different from source found" msgstr "Ðе найдено раÑположений Ð´Ð»Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ, отличающихÑÑ Ð¾Ñ‚ иÑточника" #: src/libs/data-staging/Processor.cpp:267 msgid "Pre-registering destination in index service" msgstr "ÐŸÑ€ÐµÐ´Ð²Ð°Ñ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ñ€ÐµÐ³Ð¸ÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð² каталоге" #: src/libs/data-staging/Processor.cpp:293 msgid "Resolving source replicas in bulk" msgstr "МаÑÑовое обнаружение копий иÑточника" #: src/libs/data-staging/Processor.cpp:307 #, c-format msgid "No replicas found for %s" msgstr "Ðе найдено копий Ð´Ð»Ñ %s" #: src/libs/data-staging/Processor.cpp:348 #, c-format msgid "Checking %s" msgstr "Проверка %s" #: src/libs/data-staging/Processor.cpp:357 #: src/libs/data-staging/Processor.cpp:415 msgid "Metadata of replica and index service differ" msgstr "Метаданные копии отличаютÑÑ Ð¾Ñ‚ тех, что в каталоге" #: src/libs/data-staging/Processor.cpp:365 #, c-format msgid "Failed checking source replica %s: %s" msgstr "Сбой проверки копии иÑточника %s: %s" #: src/libs/data-staging/Processor.cpp:391 msgid "Querying source replicas in bulk" msgstr "МаÑÑовый Ð¾Ð¿Ñ€Ð¾Ñ ÐºÐ¾Ð¿Ð¸Ð¹ иÑточника" #: src/libs/data-staging/Processor.cpp:403 #, c-format msgid "Failed checking source replica: %s" msgstr "Сбой проверки копии иÑточника: %s" #: src/libs/data-staging/Processor.cpp:409 msgid "Failed checking source replica" msgstr "Сбой проверки копии иÑточника" #: src/libs/data-staging/Processor.cpp:449 msgid "Finding existing destination replicas" msgstr "Обнаружение ÑущеÑтвующих копий назначениÑ" #: src/libs/data-staging/Processor.cpp:461 #, c-format msgid "Failed to delete replica %s: %s" msgstr "Сбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ ÐºÐ¾Ð¿Ð¸Ð¸ %s: %s" #: src/libs/data-staging/Processor.cpp:475 #, c-format msgid "Unregistering %s" msgstr "УдалÑетÑÑ Ð·Ð°Ð¿Ð¸ÑÑŒ о %s" #: src/libs/data-staging/Processor.cpp:486 msgid "Pre-registering destination" msgstr "ÐŸÑ€ÐµÐ´Ð²Ð°Ñ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ñ€ÐµÐ³Ð¸ÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ" #: src/libs/data-staging/Processor.cpp:492 #, c-format msgid "Failed to pre-clean destination: %s" msgstr "Сбой предварительной очиÑтки назначениÑ: %s" #: src/libs/data-staging/Processor.cpp:515 msgid "Preparing to stage source" msgstr "Подготовка к размещению файла-иÑточника" #: src/libs/data-staging/Processor.cpp:528 #, c-format msgid "Source is not ready, will wait %u seconds" msgstr "ИÑточник неготов, ÑÐ»ÐµÐ´ÑƒÑŽÑ‰Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ° через %u Ñек" #: src/libs/data-staging/Processor.cpp:534 msgid "No physical files found for source" msgstr "Ðе найдено реальных файлов иÑточника" #: src/libs/data-staging/Processor.cpp:552 msgid "Preparing to stage destination" msgstr "Подготовка к размещению назначениÑ" #: src/libs/data-staging/Processor.cpp:565 #, c-format msgid "Destination is not ready, will wait %u seconds" msgstr "Ðазначение неготово, ÑÐ»ÐµÐ´ÑƒÑŽÑ‰Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ° через %u Ñек" #: src/libs/data-staging/Processor.cpp:571 msgid "No physical files found for destination" msgstr "Ðе найдено реальных файлов Ð´Ð»Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ" #: src/libs/data-staging/Processor.cpp:597 msgid "Releasing source" msgstr "Ð¡Ð±Ñ€Ð¾Ñ Ð¸Ñточника" #: src/libs/data-staging/Processor.cpp:601 #, c-format msgid "There was a problem during post-transfer source handling: %s" msgstr "Обнаружена проблема при обÑлуживании иÑточника поÑле переÑылки: %s" #: src/libs/data-staging/Processor.cpp:606 msgid "Releasing destination" msgstr "Ð¡Ð±Ñ€Ð¾Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ" #: src/libs/data-staging/Processor.cpp:610 #, c-format msgid "" "There was a problem during post-transfer destination handling after error: %s" msgstr "" "Обнаружена проблема при обÑлуживании Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð¿Ð¾Ñле переÑылки поÑле ÑбоÑ: " "%s" #: src/libs/data-staging/Processor.cpp:614 #, c-format msgid "Error with post-transfer destination handling: %s" msgstr "Ошибка обÑÐ»ÑƒÐ¶Ð¸Ð²Ð°Ð½Ð¸Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð¿Ð¾Ñле переÑылки: %s" #: src/libs/data-staging/Processor.cpp:640 msgid "Removing pre-registered destination in index service" msgstr "Отмена предварительной региÑтрации Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð² каталоге" #: src/libs/data-staging/Processor.cpp:643 #, c-format msgid "" "Failed to unregister pre-registered destination %s: %s. You may need to " "unregister it manually" msgstr "" "Ðе удалоÑÑŒ отменить предварительную региÑтрацию Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ %s: %s. Возможно, " "Вам придётÑÑ Ñделать Ñто вручную" #: src/libs/data-staging/Processor.cpp:649 msgid "Registering destination replica" msgstr "РегиÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ ÐºÐ¾Ð¿Ð¸Ð¸ назначениÑ" #: src/libs/data-staging/Processor.cpp:652 #, c-format msgid "Failed to register destination replica: %s" msgstr "Сбой региÑтрации копии назначениÑ: %s" #: src/libs/data-staging/Processor.cpp:655 #, c-format msgid "" "Failed to unregister pre-registered destination %s. You may need to " "unregister it manually" msgstr "" "Ðе удалоÑÑŒ отменить предварительную региÑтрацию Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ %s. Возможно, Вам " "придётÑÑ Ñделать Ñто вручную" #: src/libs/data-staging/Processor.cpp:685 msgid "Error creating cache. Stale locks may remain." msgstr "Ошибка про Ñоздании кÑша. Возможно, оÑталиÑÑŒ Ñтарые блокировки." #: src/libs/data-staging/Processor.cpp:718 #, c-format msgid "Linking/copying cached file to %s" msgstr "Создание ÑÑылки/копирование файла из кÑша в %s" #: src/libs/data-staging/Processor.cpp:739 #, c-format msgid "Failed linking cache file to %s" msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÑÑылки на файл из кÑша из %s" #: src/libs/data-staging/Processor.cpp:743 #, c-format msgid "Error linking cache file to %s." msgstr "Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÑÑылки на файл из кÑша из %s." #: src/libs/data-staging/Processor.cpp:764 #: src/libs/data-staging/Processor.cpp:771 msgid "Adding to bulk request" msgstr "Добавление к маÑÑовому запроÑу" #: src/libs/data-staging/Scheduler.cpp:174 #: src/libs/data-staging/Scheduler.cpp:181 msgid "source" msgstr "иÑточник" #: src/libs/data-staging/Scheduler.cpp:174 #: src/libs/data-staging/Scheduler.cpp:181 msgid "destination" msgstr "назначение" #: src/libs/data-staging/Scheduler.cpp:174 #, c-format msgid "Using next %s replica" msgstr "ИÑпользуетÑÑ ÑÐ»ÐµÐ´ÑƒÑŽÑ‰Ð°Ñ ÐºÐ¾Ð¿Ð¸Ñ (%s)" #: src/libs/data-staging/Scheduler.cpp:181 #, c-format msgid "No more %s replicas" msgstr "Больше копий нет (%s)" #: src/libs/data-staging/Scheduler.cpp:183 msgid "Will clean up pre-registered destination" msgstr "Предварительное назначение будет Ñброшено" #: src/libs/data-staging/Scheduler.cpp:187 msgid "Will release cache locks" msgstr "Будет отменены блокировки в кÑше" #: src/libs/data-staging/Scheduler.cpp:190 msgid "Moving to end of data staging" msgstr "ЗаканчиваетÑÑ Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ðµ данных" #: src/libs/data-staging/Scheduler.cpp:199 #, c-format msgid "Source is mapped to %s" msgstr "ИÑточник поÑтавлен в ÑоответÑтвие %s" #: src/libs/data-staging/Scheduler.cpp:203 msgid "Cannot link to source which can be modified, will copy instead" msgstr "" "Ðевозможно Ñоздать ÑÑылку на иÑточник, который может изменитьÑÑ; будет " "Ñделана копиÑ" #: src/libs/data-staging/Scheduler.cpp:212 msgid "Cannot link to a remote destination. Will not use mapped URL" msgstr "" "Ðевозможно Ñоздать ÑÑылку на удалённое назначение. ПрипиÑанный URL не будет " "иÑпользован" #: src/libs/data-staging/Scheduler.cpp:215 msgid "Linking mapped file" msgstr "СоздаётÑÑ ÑимволичеÑÐºÐ°Ñ ÑÑылка на ÑоответÑтвующий файл" #: src/libs/data-staging/Scheduler.cpp:222 #, c-format msgid "Failed to create link: %s. Will not use mapped URL" msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÑÑылки %s. ПрипиÑанный URL не будет иÑпользован" #: src/libs/data-staging/Scheduler.cpp:247 #, c-format msgid "" "Scheduler received new DTR %s with source: %s, destination: %s, assigned to " "transfer share %s with priority %d" msgstr "" "Планировщик получил новый Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR %s Ñ Ð¸Ñточником: %s, назначением: %s, " "припиÑан к доле %s Ñ Ð¿Ñ€Ð¸Ð¾Ñ€Ð¸Ñ‚ÐµÑ‚Ð¾Ð¼ %d" #: src/libs/data-staging/Scheduler.cpp:255 msgid "" "File is not cacheable, was requested not to be cached or no cache available, " "skipping cache check" msgstr "" "Файл либо не может быть кÑширован, либо кÑширование не было запрошено, либо " "кÑша нет; пропуÑкаетÑÑ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ° кÑша" #: src/libs/data-staging/Scheduler.cpp:261 msgid "File is cacheable, will check cache" msgstr "Файл может быть кÑширован, проверÑетÑÑ ÐºÑш" #: src/libs/data-staging/Scheduler.cpp:264 #: src/libs/data-staging/Scheduler.cpp:289 #, c-format msgid "File is currently being cached, will wait %is" msgstr "Файл ещё кÑшируетÑÑ, ожидание %i Ñек" #: src/libs/data-staging/Scheduler.cpp:283 msgid "Timed out while waiting for cache lock" msgstr "ИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð±Ð»Ð¾ÐºÐ¸Ñ€Ð¾Ð²ÐºÐ¸ кÑша" #: src/libs/data-staging/Scheduler.cpp:293 msgid "Checking cache again" msgstr "КÑш проверÑетÑÑ Ñнова" #: src/libs/data-staging/Scheduler.cpp:313 msgid "Destination file is in cache" msgstr "Файл Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð·Ð°Ð¿Ð¸Ñан в кÑш" #: src/libs/data-staging/Scheduler.cpp:317 msgid "Source and/or destination is index service, will resolve replicas" msgstr "" "ИÑточник и/или назначение ÑвлÑетÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð¾Ð¼, будет произведён поиÑк копий" #: src/libs/data-staging/Scheduler.cpp:320 msgid "" "Neither source nor destination are index services, will skip resolving " "replicas" msgstr "" "Ðи иÑточник, ни назначение не ÑвлÑÑŽÑ‚ÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð¾Ð¼, поиÑк копий не будет " "произведён" #: src/libs/data-staging/Scheduler.cpp:331 msgid "Problem with index service, will release cache lock" msgstr "Проблема Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð¾Ð¼, кÑш будет разблокирован" #: src/libs/data-staging/Scheduler.cpp:335 msgid "Problem with index service, will proceed to end of data staging" msgstr "Проблема Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð¾Ð¼, переходим к завершению Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ…" #: src/libs/data-staging/Scheduler.cpp:345 msgid "Checking source file is present" msgstr "Проверка Ð½Ð°Ð»Ð¸Ñ‡Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð°-иÑточника" #: src/libs/data-staging/Scheduler.cpp:353 msgid "Error with source file, moving to next replica" msgstr "Ошибка в файле иÑточника, пробуем другую копию" #: src/libs/data-staging/Scheduler.cpp:375 #, c-format msgid "Replica %s has long latency, trying next replica" msgstr "У копии %s Ð´Ð¾Ð»Ð³Ð°Ñ Ð·Ð°Ð´ÐµÑ€Ð¶ÐºÐ°, пробуем Ñледующую копию" #: src/libs/data-staging/Scheduler.cpp:377 #, c-format msgid "No more replicas, will use %s" msgstr "Больше копий нет, будет иÑпользован файл %s" #: src/libs/data-staging/Scheduler.cpp:380 #, c-format msgid "Checking replica %s" msgstr "ПроверÑетÑÑ ÐºÐ¾Ð¿Ð¸Ñ %s" #: src/libs/data-staging/Scheduler.cpp:390 msgid "Overwrite requested - will pre-clean destination" msgstr "Запрошена перезапиÑÑŒ - назначение будет предварительно очищено" #: src/libs/data-staging/Scheduler.cpp:393 msgid "No overwrite requested or allowed, skipping pre-cleaning" msgstr "" "ПерезапиÑÑŒ не запрошена или не разрешена, Ð¿Ñ€ÐµÐ´Ð²Ð°Ñ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ð¾Ñ‡Ð¸Ñтка " "пропуÑкаетÑÑ" #: src/libs/data-staging/Scheduler.cpp:401 msgid "Pre-clean failed, will still try to copy" msgstr "Сбой предварительной очиÑтки, вÑÑ‘ же попытаемÑÑ Ñкопировать" #: src/libs/data-staging/Scheduler.cpp:408 msgid "Source or destination requires staging" msgstr "ИÑточник или назначение требуют Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ñ Ð»ÐµÐ½Ñ‚Ð¾Ñ‡Ð½Ð¾Ð³Ð¾ накопителÑ" #: src/libs/data-staging/Scheduler.cpp:412 msgid "No need to stage source or destination, skipping staging" msgstr "" "Ðе требуетÑÑ Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ðµ Ñ Ð»ÐµÐ½Ñ‚Ð¾Ñ‡Ð½Ð¾Ð³Ð¾ Ð½Ð°ÐºÐ¾Ð¿Ð¸Ñ‚ÐµÐ»Ñ Ð½Ð¸ иÑточника, ни назначениÑ; " "размещение пропуÑкаетÑÑ" #: src/libs/data-staging/Scheduler.cpp:442 msgid "Staging request timed out, will release request" msgstr "ИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа на размещение, Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð±ÑƒÐ´ÐµÑ‚ отозван" #: src/libs/data-staging/Scheduler.cpp:446 msgid "Querying status of staging request" msgstr "ÐžÐ¿Ñ€Ð¾Ñ ÑоÑтоÑÐ½Ð¸Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа на размещение" #: src/libs/data-staging/Scheduler.cpp:455 msgid "Releasing requests" msgstr "Ð¡Ð±Ñ€Ð¾Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñов" #: src/libs/data-staging/Scheduler.cpp:472 msgid "DTR is ready for transfer, moving to delivery queue" msgstr "DTR готов к переÑылке, переводитÑÑ Ð² очередь на доÑтавку" #: src/libs/data-staging/Scheduler.cpp:487 #, c-format msgid "Transfer failed: %s" msgstr "Сбой передачи: %s" #: src/libs/data-staging/Scheduler.cpp:497 msgid "Releasing request(s) made during staging" msgstr "Отзыв запроÑов, Ñделанных при размещении" #: src/libs/data-staging/Scheduler.cpp:500 msgid "Neither source nor destination were staged, skipping releasing requests" msgstr "" "Ðи иÑточник, ни назначение не были размещены Ñ Ð»ÐµÐ½Ñ‚Ð¾Ñ‡Ð½Ð¾Ð³Ð¾ накопителÑ, " "пропуÑкаетÑÑ Ð¾Ñ‚Ð¼ÐµÐ½Ð° запроÑов" #: src/libs/data-staging/Scheduler.cpp:512 msgid "Trying next replica" msgstr "Пробуем Ñледующую копию" #: src/libs/data-staging/Scheduler.cpp:517 msgid "unregister" msgstr "дерегиÑтрациÑ" #: src/libs/data-staging/Scheduler.cpp:517 msgid "register" msgstr "региÑтрациÑ" #: src/libs/data-staging/Scheduler.cpp:516 #, c-format msgid "Will %s in destination index service" msgstr "Будет выполнена Ð¾Ð¿ÐµÑ€Ð°Ñ†Ð¸Ñ %s в каталоге назначениÑ" #: src/libs/data-staging/Scheduler.cpp:520 msgid "Destination is not index service, skipping replica registration" msgstr "Ðазначение не ÑвлÑетÑÑ ÑƒÐºÐ°Ð·Ð°Ñ‚ÐµÐ»ÐµÐ¼, пропуÑкаетÑÑ Ñ€ÐµÐ³Ð¸ÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ ÐºÐ¾Ð¿Ð¸Ð¸" #: src/libs/data-staging/Scheduler.cpp:533 msgid "Error registering replica, moving to end of data staging" msgstr "Ошибка региÑтрации копии, переход к завершению размещениÑ" #: src/libs/data-staging/Scheduler.cpp:542 msgid "Will process cache" msgstr "Будет обработан кÑш" #: src/libs/data-staging/Scheduler.cpp:546 msgid "File is not cacheable, skipping cache processing" msgstr "Файл не может быть кÑширован, пропуÑкаетÑÑ Ð¾Ð±Ñ€Ð°Ð±Ð¾Ñ‚ÐºÐ° кÑша" #: src/libs/data-staging/Scheduler.cpp:560 msgid "Cancellation complete" msgstr "Отмена завершена" #: src/libs/data-staging/Scheduler.cpp:574 msgid "Will wait 10s" msgstr "Ожидание 10 Ñекунд" #: src/libs/data-staging/Scheduler.cpp:580 msgid "Error in cache processing, will retry without caching" msgstr "Ошибка при обработке кÑша, попытаемÑÑ Ð±ÐµÐ· кÑшированиÑ" #: src/libs/data-staging/Scheduler.cpp:589 msgid "Will retry without caching" msgstr "Будет произведена Ð¿Ð¾Ð²Ñ‚Ð¾Ñ€Ð½Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ° без кÑшированиÑ" #: src/libs/data-staging/Scheduler.cpp:607 msgid "Proxy has expired" msgstr "Срок дейÑÑ‚Ð²Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти иÑтёк" #: src/libs/data-staging/Scheduler.cpp:618 #, c-format msgid "%i retries left, will wait until %s before next attempt" msgstr "ОÑталоÑÑŒ %i попыток, Ð¿Ð¾Ð²Ñ‚Ð¾Ñ€Ð½Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ° в %s" #: src/libs/data-staging/Scheduler.cpp:634 msgid "Out of retries" msgstr "ДоÑтигнут предел количеÑтва попыток" #: src/libs/data-staging/Scheduler.cpp:636 msgid "Permanent failure" msgstr "УÑтойчивый Ñбой" #: src/libs/data-staging/Scheduler.cpp:642 msgid "Finished successfully" msgstr "УÑпешное завершение" #: src/libs/data-staging/Scheduler.cpp:652 msgid "Returning to generator" msgstr "Возврат в генератор" #: src/libs/data-staging/Scheduler.cpp:818 #, c-format msgid "File is smaller than %llu bytes, will use local delivery" msgstr "Файл меньше %llu байт, будет иÑпользована Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð°Ñ Ð´Ð¾Ñтавка" #: src/libs/data-staging/Scheduler.cpp:872 #, c-format msgid "Delivery service at %s can copy to %s" msgstr "Служба доÑтавки в %s может копировать в %s" #: src/libs/data-staging/Scheduler.cpp:880 #, c-format msgid "Delivery service at %s can copy from %s" msgstr "Служба доÑтавки в %s может копировать из %s" #: src/libs/data-staging/Scheduler.cpp:893 msgid "Could not find any useable delivery service, forcing local transfer" msgstr "" "Ðе удалоÑÑŒ обнаружить подходÑщую Ñлужбу доÑтавки, вынужденно иÑпользуетÑÑ " "Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð°Ñ Ð¿ÐµÑ€ÐµÑылка" #: src/libs/data-staging/Scheduler.cpp:909 #, c-format msgid "Not using delivery service at %s because it is full" msgstr "Служба доÑтавки на %s не иÑпользуетÑÑ Ð² ÑвÑзи Ñ Ð¿ÐµÑ€ÐµÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸ÐµÐ¼" #: src/libs/data-staging/Scheduler.cpp:936 #, c-format msgid "Not using delivery service %s due to previous failure" msgstr "Служба доÑтавки %s не иÑпользуетÑÑ Ð² ÑвÑзи Ñ Ð¿Ñ€ÐµÐ´Ñ‹Ð´ÑƒÑ‰Ð¸Ð¼ Ñбоем" #: src/libs/data-staging/Scheduler.cpp:946 msgid "No remote delivery services are useable, forcing local delivery" msgstr "" "Ðи одна из удалённых Ñлужб доÑтавки не подходит, вынужденно иÑпользуетÑÑ " "Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð°Ñ Ð´Ð¾Ñтавка" #: src/libs/data-staging/Scheduler.cpp:1150 msgid "Cancelling active transfer" msgstr "Отмена активных передач" #: src/libs/data-staging/Scheduler.cpp:1160 msgid "Processing thread timed out. Restarting DTR" msgstr "Вышло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¿Ð¾Ñ‚Ð¾ÐºÐ° обработки. DTR перезапуÑкаетÑÑ" #: src/libs/data-staging/Scheduler.cpp:1228 msgid "Will use bulk request" msgstr "Будет иÑпользован маÑÑовый запроÑ" #: src/libs/data-staging/Scheduler.cpp:1250 msgid "No delivery endpoints available, will try later" msgstr "Ðет доÑтупных назначений Ð´Ð»Ñ Ð¾Ñ‚Ð³Ñ€ÑƒÐ·ÐºÐ¸, попытаемÑÑ Ð¿Ð¾Ð·Ð¶Ðµ" #: src/libs/data-staging/Scheduler.cpp:1269 msgid "Scheduler received NULL DTR" msgstr "Планировщик получил пуÑтой Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR" #: src/libs/data-staging/Scheduler.cpp:1279 msgid "Scheduler received invalid DTR" msgstr "Планировщик получил недопуÑтимый Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR" #: src/libs/data-staging/Scheduler.cpp:1368 msgid "Scheduler starting up" msgstr "ЗапуÑк планировщика" #: src/libs/data-staging/Scheduler.cpp:1369 msgid "Scheduler configuration:" msgstr "ÐšÐ¾Ð½Ñ„Ð¸Ð³ÑƒÑ€Ð°Ñ†Ð¸Ñ Ð¿Ð»Ð°Ð½Ð¸Ñ€Ð¾Ð²Ñ‰Ð¸ÐºÐ°:" #: src/libs/data-staging/Scheduler.cpp:1370 #, c-format msgid " Pre-processor slots: %u" msgstr " МеÑÑ‚ Ð´Ð»Ñ Ð¿Ñ€ÐµÐ´Ð²Ð°Ñ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð¾Ð¹ обработки: %u" #: src/libs/data-staging/Scheduler.cpp:1371 #, c-format msgid " Delivery slots: %u" msgstr " МеÑÑ‚ Ð´Ð»Ñ Ð¾Ñ‚Ð³Ñ€ÑƒÐ·ÐºÐ¸: %u" #: src/libs/data-staging/Scheduler.cpp:1372 #, c-format msgid " Post-processor slots: %u" msgstr " МеÑÑ‚ Ð´Ð»Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ñ‚ÐµÐ»ÑŒÐ½Ð¾Ð¹ обработки: %u" #: src/libs/data-staging/Scheduler.cpp:1373 #, c-format msgid " Emergency slots: %u" msgstr " МеÑÑ‚ Ð´Ð»Ñ Ñрочной обработки: %u" #: src/libs/data-staging/Scheduler.cpp:1374 #, c-format msgid " Prepared slots: %u" msgstr " Подготовленных меÑÑ‚: %u" #: src/libs/data-staging/Scheduler.cpp:1375 #, c-format msgid "" " Shares configuration:\n" "%s" msgstr "" " ÐšÐ¾Ð½Ñ„Ð¸Ð³ÑƒÑ€Ð°Ñ†Ð¸Ñ ÐºÐ²Ð¾Ñ‚:\n" "%s" #: src/libs/data-staging/Scheduler.cpp:1378 msgid " Delivery service: LOCAL" msgstr " Служба доÑтавки: LOCAL" #: src/libs/data-staging/Scheduler.cpp:1379 #, c-format msgid " Delivery service: %s" msgstr " Служба доÑтавки: %s" #: src/libs/data-staging/Scheduler.cpp:1384 msgid "Failed to create DTR dump thread" msgstr "Ðе удалоÑÑŒ Ñоздать поток ÑброÑа DTR" #: src/libs/data-staging/Scheduler.cpp:1401 #: src/services/data-staging/DataDeliveryService.cpp:507 #, c-format msgid "DTR %s cancelled" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ DTR %s отменён" #: src/libs/data-staging/examples/Generator.cpp:15 msgid "Shutting down scheduler" msgstr "Планировщик оÑтанавливаетÑÑ" #: src/libs/data-staging/examples/Generator.cpp:17 msgid "Scheduler stopped, exiting" msgstr "Планировщик оÑтановлен, выход" #: src/libs/data-staging/examples/Generator.cpp:23 #, c-format msgid "Received DTR %s back from scheduler in state %s" msgstr "Планировщик вернул Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR %s в ÑоÑтоÑнии %s" #: src/libs/data-staging/examples/Generator.cpp:30 msgid "Generator started" msgstr "Генератор запущен" #: src/libs/data-staging/examples/Generator.cpp:31 msgid "Starting DTR threads" msgstr "ЗапуÑкаютÑÑ Ð¿Ð¾Ñ‚Ð¾ÐºÐ¸ DTR" #: src/libs/data-staging/examples/Generator.cpp:44 msgid "No valid credentials found, exiting" msgstr "Ðе найдены дейÑтвительные параметры доÑтупа, выход" #: src/libs/data-staging/examples/Generator.cpp:55 #, c-format msgid "Problem creating dtr (source %s, destination %s)" msgstr "Проблема при Ñоздании DTR (иÑточник %s, назначение %s)" #: src/services/a-rex/arex.cpp:452 src/services/candypond/CandyPond.cpp:569 #: src/services/data-staging/DataDeliveryService.cpp:681 #, c-format msgid "SOAP operation is not supported: %s" msgstr "ÐžÐ¿ÐµÑ€Ð°Ñ†Ð¸Ñ SOAP не поддерживаетÑÑ: %s" #: src/services/a-rex/arex.cpp:471 src/services/a-rex/arex.cpp:517 #, c-format msgid "Security Handlers processing failed: %s" msgstr "Сбой в процеÑÑе обработки прав доÑтупа: %s" #: src/services/a-rex/arex.cpp:485 msgid "Can't obtain configuration. Public information is disabled." msgstr "Ðе удалоÑÑŒ получить конфигурацию. ÐžÑ‚ÐºÑ€Ñ‹Ñ‚Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð½ÐµÐ´Ð¾Ñтупна." #: src/services/a-rex/arex.cpp:495 msgid "" "Can't obtain configuration. Public information is disallowed for this user." msgstr "" "Ðе удалоÑÑŒ получить конфигурацию. ÐžÑ‚ÐºÑ€Ñ‹Ñ‚Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð½ÐµÐ´Ð¾Ñтупна Ð´Ð»Ñ Ñтого " "пользователÑ." #: src/services/a-rex/arex.cpp:502 msgid "Can't obtain configuration. Only public information is provided." msgstr "Ðе удалоÑÑŒ получить конфигурацию. Только Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð´Ð¾Ñтупна." #: src/services/a-rex/arex.cpp:530 src/services/a-rex/rest/rest.cpp:674 #, c-format msgid "Connection from %s: %s" msgstr "Соединение Ñ %s: %s" #: src/services/a-rex/arex.cpp:533 src/services/a-rex/rest/rest.cpp:678 #, c-format msgid "process: method: %s" msgstr "процеÑÑ: метод: %s" #: src/services/a-rex/arex.cpp:534 src/services/a-rex/rest/rest.cpp:679 #, c-format msgid "process: endpoint: %s" msgstr "процеÑÑ: ÐºÐ¾Ð½ÐµÑ‡Ð½Ð°Ñ Ñ‚Ð¾Ñ‡ÐºÐ°: %s" #: src/services/a-rex/arex.cpp:559 #, c-format msgid "process: id: %s" msgstr "процеÑÑ: идентификатор: %s" #: src/services/a-rex/arex.cpp:560 #, c-format msgid "process: subop: %s" msgstr "процеÑÑ: подопциÑ: %s" #: src/services/a-rex/arex.cpp:567 #, c-format msgid "process: subpath: %s" msgstr "процеÑÑ: подкаталог: %s" #: src/services/a-rex/arex.cpp:605 src/services/candypond/CandyPond.cpp:543 #: src/services/data-staging/DataDeliveryService.cpp:641 #: src/tests/echo/echo.cpp:98 #, c-format msgid "process: request=%s" msgstr "процеÑÑ: запроÑ=%s" #: src/services/a-rex/arex.cpp:610 src/services/candypond/CandyPond.cpp:548 #: src/services/data-staging/DataDeliveryService.cpp:646 #: src/tests/count/count.cpp:69 msgid "input does not define operation" msgstr "не задана Ð¾Ð¿ÐµÑ€Ð°Ñ†Ð¸Ñ Ð½Ð° вводе" #: src/services/a-rex/arex.cpp:613 src/services/candypond/CandyPond.cpp:551 #: src/services/data-staging/DataDeliveryService.cpp:649 #: src/tests/count/count.cpp:72 #, c-format msgid "process: operation: %s" msgstr "процеÑÑ: операциÑ: %s" #: src/services/a-rex/arex.cpp:640 msgid "POST request on special path is not supported" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ POST на ÑпецифичеÑкий путь не поддерживаетÑÑ" #: src/services/a-rex/arex.cpp:645 msgid "process: factory endpoint" msgstr "процеÑÑ: ÐºÐ¾Ð½ÐµÑ‡Ð½Ð°Ñ Ñ‚Ð¾Ñ‡ÐºÐ° фабрики" #: src/services/a-rex/arex.cpp:788 src/services/candypond/CandyPond.cpp:580 #: src/services/data-staging/DataDeliveryService.cpp:692 #: src/tests/echo/echo.cpp:158 #, c-format msgid "process: response=%s" msgstr "процеÑÑ: отзыв=%s" #: src/services/a-rex/arex.cpp:794 msgid "Per-job POST/SOAP requests are not supported" msgstr "ЗапроÑÑ‹ POST/SOAP предварÑющие задачу не поддерживаютÑÑ" #: src/services/a-rex/arex.cpp:803 msgid "process: GET" msgstr "процеÑÑ: GET" #: src/services/a-rex/arex.cpp:804 #, c-format msgid "GET: id %s path %s" msgstr "GET: идентификатор %s путь %s" #: src/services/a-rex/arex.cpp:837 msgid "process: HEAD" msgstr "процеÑÑ: HEAD" #: src/services/a-rex/arex.cpp:838 #, c-format msgid "HEAD: id %s path %s" msgstr "HEAD: идентификатор %s путь %s" #: src/services/a-rex/arex.cpp:871 msgid "process: PUT" msgstr "процеÑ: PUT" #: src/services/a-rex/arex.cpp:904 msgid "process: DELETE" msgstr "процеÑÑ: DELETE" #: src/services/a-rex/arex.cpp:937 #, c-format msgid "process: method %s is not supported" msgstr "процеÑÑ: метод %s не поддерживаетÑÑ" #: src/services/a-rex/arex.cpp:940 msgid "process: method is not defined" msgstr "процеÑÑ: неопределённый метод" #: src/services/a-rex/arex.cpp:1050 msgid "Failed to run Grid Manager thread" msgstr "Сбой запуÑка потока Grid Manager" #: src/services/a-rex/arex.cpp:1109 #, c-format msgid "Failed to process configuration in %s" msgstr "Ðе удалоÑÑŒ обработать наÑтройки в %s" #: src/services/a-rex/arex.cpp:1114 msgid "No control directory set in configuration" msgstr "Ðе найден контрольный каталог в файле наÑтроек" #: src/services/a-rex/arex.cpp:1118 msgid "No session directory set in configuration" msgstr "Ðе найден каталог ÑеÑÑии в файле наÑтроек" #: src/services/a-rex/arex.cpp:1122 msgid "No LRMS set in configuration" msgstr "Ðе найдена СУПО в файле наÑтроек" #: src/services/a-rex/arex.cpp:1127 #, c-format msgid "Failed to create control directory %s" msgstr "Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð¾Ð³Ð¾ каталога %s" #: src/services/a-rex/cachecheck.cpp:37 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:658 #, c-format msgid "Error with cache configuration: %s" msgstr "Ошибка при наÑтройке кÑша: %s" #: src/services/a-rex/cachecheck.cpp:53 #: src/services/candypond/CandyPond.cpp:318 msgid "Error with cache configuration" msgstr "Ошибка при наÑтройке кÑша" #: src/services/a-rex/cachecheck.cpp:78 #: src/services/candypond/CandyPond.cpp:146 #: src/services/candypond/CandyPond.cpp:343 #, c-format msgid "Looking up URL %s" msgstr "ПоиÑк URL %s" #: src/services/a-rex/cachecheck.cpp:80 #: src/services/candypond/CandyPond.cpp:155 #, c-format msgid "Cache file is %s" msgstr "Файл кÑша: %s" #: src/services/a-rex/change_activity_status.cpp:55 #: src/services/a-rex/change_activity_status.cpp:59 #, c-format msgid "EMIES:PauseActivity: job %s - %s" msgstr "EMIES:PauseActivity: задача %s - %s" #: src/services/a-rex/change_activity_status.cpp:104 #: src/services/a-rex/change_activity_status.cpp:108 #, c-format msgid "EMIES:ResumeActivity: job %s - %s" msgstr "EMIES:ResumeActivity: задача %s - %s" #: src/services/a-rex/change_activity_status.cpp:153 #: src/services/a-rex/change_activity_status.cpp:158 #, c-format msgid "EMIES:CancelActivity: job %s - %s" msgstr "EMIES:CancelActivity: задача %s - %s" #: src/services/a-rex/change_activity_status.cpp:166 #, c-format msgid "job %s cancelled successfully" msgstr "задача %s уÑпешно прервана" #: src/services/a-rex/change_activity_status.cpp:212 #: src/services/a-rex/change_activity_status.cpp:227 #, c-format msgid "EMIES:WipeActivity: job %s - %s" msgstr "EMIES:WipeActivity: задача %s - %s" #: src/services/a-rex/change_activity_status.cpp:231 #, c-format msgid "job %s (will be) cleaned successfully" msgstr "задача %s (будет) уÑпешно очищена" #: src/services/a-rex/change_activity_status.cpp:277 #: src/services/a-rex/change_activity_status.cpp:282 #, c-format msgid "EMIES:RestartActivity: job %s - %s" msgstr "EMIES:RestartActivity: задача %s - %s" #: src/services/a-rex/change_activity_status.cpp:286 #, c-format msgid "job %s restarted successfully" msgstr "задача %s уÑпешно перезапущена" #: src/services/a-rex/change_activity_status.cpp:301 #: src/services/a-rex/put.cpp:163 src/services/a-rex/put.cpp:204 #, c-format msgid "%s: there is no such job: %s" msgstr "%s: задача отÑутÑтвует: %s" #: src/services/a-rex/change_activity_status.cpp:309 #, c-format msgid "%s: put log %s: there is no payload" msgstr "%s: запиÑÑŒ журнала %s: отÑутÑтвуют полезные файлы" #: src/services/a-rex/change_activity_status.cpp:315 #, c-format msgid "%s: put log %s: unrecognized payload" msgstr "%s: запиÑÑŒ журнала %s: неопознанные полезные файлы" #: src/services/a-rex/change_activity_status.cpp:354 msgid "A-REX REST: Failed to resume job" msgstr "A-REX REST: Сбой Ð²Ð¾Ð·Ð¾Ð±Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #: src/services/a-rex/change_activity_status.cpp:358 #, c-format msgid "A-REX REST: State change not allowed: from %s to %s" msgstr "A-REX REST: ÐедопуÑтимое изменение ÑоÑтоÑниÑ: Ñ %s на %s" #: src/services/a-rex/create_activity.cpp:52 #, c-format msgid "" "EMIES:CreateActivity: request = \n" "%s" msgstr "" "EMIES:CreateActivity: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" "%s" #: src/services/a-rex/create_activity.cpp:58 msgid "EMIES:CreateActivity: too many activity descriptions" msgstr "EMIES:CreateActivity: обнаружено Ñлишком много опиÑаний задач" #: src/services/a-rex/create_activity.cpp:68 msgid "EMIES:CreateActivity: no job description found" msgstr "EMIES:CreateActivity: опиÑание задачи не обнаружено" #: src/services/a-rex/create_activity.cpp:75 msgid "EMIES:CreateActivity: max jobs total limit reached" msgstr "EMIES:CreateActivity: доÑтигнут предел общего чиÑла задач" #: src/services/a-rex/create_activity.cpp:101 #, c-format msgid "ES:CreateActivity: Failed to create new job: %s" msgstr "ES:CreateActivity: Ðе удалоÑÑŒ Ñоздать новую задачу: %s" #: src/services/a-rex/create_activity.cpp:117 msgid "EMIES:CreateActivity finished successfully" msgstr "EMIES:CreateActivity уÑпешно завершено" #: src/services/a-rex/create_activity.cpp:118 #, c-format msgid "New job accepted with id %s" msgstr "ÐÐ¾Ð²Ð°Ñ Ð·Ð°Ð´Ð°Ñ‡Ð° принÑта Ñ Ð¸Ð´ÐµÐ½Ñ‚Ð¸Ñ„Ð¸ÐºÐ°Ñ‚Ð¾Ñ€Ð¾Ð¼ %s" #: src/services/a-rex/create_activity.cpp:122 #, c-format msgid "" "EMIES:CreateActivity: response = \n" "%s" msgstr "" "EMIES:CreateActivity: ответ = \n" "%s" #: src/services/a-rex/create_activity.cpp:137 msgid "NEW: put new job: there is no payload" msgstr "NEW: запиÑÑŒ новой задачи: отÑутÑтвуют полезные файлы" #: src/services/a-rex/create_activity.cpp:141 msgid "NEW: put new job: max jobs total limit reached" msgstr "" "NEW: запиÑÑŒ новой задачи: доÑтигнут макÑимальный предел общего количеÑтва " "задач" #: src/services/a-rex/delegation/DelegationStore.cpp:51 msgid "Wiping and re-creating whole storage" msgstr "Уничтожение и воÑÑоздание вÑего хранилища" #: src/services/a-rex/delegation/DelegationStore.cpp:214 #: src/services/a-rex/delegation/DelegationStore.cpp:316 #, c-format msgid "DelegationStore: TouchConsumer failed to create file %s" msgstr "DelegationStore: TouchConsumer не Ñмог Ñоздать файл %s" #: src/services/a-rex/delegation/DelegationStore.cpp:276 msgid "DelegationStore: PeriodicCheckConsumers failed to resume iterator" msgstr "" "DelegationStore: Ñбой Ð²Ð¾Ð·Ð¾Ð±Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ Ð¸Ñ‚ÐµÑ€Ð°Ñ‚Ð¾Ñ€Ð° процеÑÑом " "PeriodicCheckConsumers" #: src/services/a-rex/delegation/DelegationStore.cpp:296 #, c-format msgid "" "DelegationStore: PeriodicCheckConsumers failed to remove old delegation %s - " "%s" msgstr "" "DelegationStore: Ñбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð¿Ñ€Ð¾Ñ†ÐµÑÑом PeriodicCheckConsumers уÑтаревшего " "Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ %s - %s" #: src/services/a-rex/get.cpp:174 src/services/a-rex/get.cpp:229 #: src/services/a-rex/get.cpp:313 #, c-format msgid "Get: there is no job %s - %s" msgstr "Get: отÑутÑвует задача %s - %s" #: src/services/a-rex/get.cpp:380 #, c-format msgid "Head: there is no job %s - %s" msgstr "Head: отÑутÑвует задача %s - %s" #: src/services/a-rex/get.cpp:436 msgid "Failed to extract credential information" msgstr "Сбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ о параметрах доÑтупа" #: src/services/a-rex/get.cpp:439 #, c-format msgid "Checking cache permissions: DN: %s" msgstr "Проверка прав доÑтупа к кÑшу: DN: %s" #: src/services/a-rex/get.cpp:440 #, c-format msgid "Checking cache permissions: VO: %s" msgstr "Проверка прав доÑтупа к кÑшу: ВО: %s" #: src/services/a-rex/get.cpp:442 #, c-format msgid "Checking cache permissions: VOMS attr: %s" msgstr "Checking cache permissions: атрибуты VOMS: %s" #: src/services/a-rex/get.cpp:452 #, c-format msgid "Cache access allowed to %s by DN %s" msgstr "ДоÑтуп к кÑшу разрешён Ð´Ð»Ñ %s пользователю Ñ DN %s" #: src/services/a-rex/get.cpp:455 #, c-format msgid "DN %s doesn't match %s" msgstr "DN %s не Ñовпадает Ñ %s" #: src/services/a-rex/get.cpp:458 #, c-format msgid "Cache access allowed to %s by VO %s" msgstr "ДоÑтуп к кÑшу разрешён Ð´Ð»Ñ %s Ð´Ð»Ñ Ð’Ðž %s" #: src/services/a-rex/get.cpp:461 #, c-format msgid "VO %s doesn't match %s" msgstr "ВО %s не Ñовпадает Ñ %s" #: src/services/a-rex/get.cpp:467 src/services/a-rex/get.cpp:486 #, c-format msgid "Bad credential value %s in cache access rules" msgstr "ÐедопуÑтимое значение параметра доÑтупа %s в правилах доÑтупа к кÑшу" #: src/services/a-rex/get.cpp:475 src/services/a-rex/get.cpp:494 #, c-format msgid "VOMS attr %s matches %s" msgstr "Ðтрибут VOMS %s Ñовпадает Ñ %s" #: src/services/a-rex/get.cpp:476 #, c-format msgid "Cache access allowed to %s by VO %s and role %s" msgstr "ДоÑтуп к кÑшу разрешён Ð´Ð»Ñ %s Ð´Ð»Ñ Ð’Ðž %s и роли %s" #: src/services/a-rex/get.cpp:479 src/services/a-rex/get.cpp:498 #, c-format msgid "VOMS attr %s doesn't match %s" msgstr "Ðтрибут VOMS %s не Ñовпадает Ñ %s" #: src/services/a-rex/get.cpp:495 #, c-format msgid "Cache access allowed to %s by VO %s and group %s" msgstr "ДоÑтуп к кÑшу разрешён Ð´Ð»Ñ %s Ð´Ð»Ñ Ð’Ðž %s и группы %s" #: src/services/a-rex/get.cpp:501 #, c-format msgid "Unknown credential type %s for URL pattern %s" msgstr "ÐеизвеÑтный тип параметра доÑтупа %s Ð´Ð»Ñ ÑˆÐ°Ð±Ð»Ð¾Ð½Ð° URL %s" #: src/services/a-rex/get.cpp:507 #, c-format msgid "No match found in cache access rules for %s" msgstr "Ðе найдено ÑоответÑÑ‚Ð²Ð¸Ñ Ð´Ð»Ñ %s в правилах доÑтупа к кÑшу" #: src/services/a-rex/get.cpp:517 #, c-format msgid "Get from cache: Looking in cache for %s" msgstr "Получение из кÑша: ПоиÑк %s в кÑше" #: src/services/a-rex/get.cpp:520 #, c-format msgid "Get from cache: Invalid URL %s" msgstr "Получение из кÑша: ÐедопуÑтимый URL %s" #: src/services/a-rex/get.cpp:537 msgid "Get from cache: Error in cache configuration" msgstr "Получение из кÑша: Ошибка наÑтроек кÑша" #: src/services/a-rex/get.cpp:546 msgid "Get from cache: File not in cache" msgstr "Получение из кÑша: Файла в кÑше нет" #: src/services/a-rex/get.cpp:549 #, c-format msgid "Get from cache: could not access cached file: %s" msgstr "" "Получение из кÑша: не удалоÑÑŒ получить доÑтуп к кÑшированному файлу: %s" #: src/services/a-rex/get.cpp:559 msgid "Get from cache: Cached file is locked" msgstr "Получение из кÑша: КÑшированный файл забклокирован" #: src/services/a-rex/get_activity_statuses.cpp:214 #: src/services/a-rex/get_activity_statuses.cpp:320 #, c-format msgid "EMIES:GetActivityStatus: job %s - %s" msgstr "EMIES:GetActivityStatus: задача %s - %s" #: src/services/a-rex/get_activity_statuses.cpp:455 #, c-format msgid "EMIES:GetActivityInfo: job %s - failed to retrieve GLUE2 information" msgstr "" "EMIES:GetActivityInfo: задача %s - не удалоÑÑŒ получить информацию по формату " "GLUE2" #: src/services/a-rex/get_activity_statuses.cpp:507 #: src/services/a-rex/get_activity_statuses.cpp:514 #, c-format msgid "EMIES:NotifyService: job %s - %s" msgstr "EMIES:NotifyService: задача %s - %s" #: src/services/a-rex/grid-manager/GridManager.cpp:98 #, c-format msgid "" "Cannot create directories for log file %s. Messages will be logged to this " "log" msgstr "" "Ðе удалоÑÑŒ Ñоздать каталоги Ð´Ð»Ñ Ð¶ÑƒÑ€Ð½Ð°Ð»ÑŒÐ½Ð¾Ð³Ð¾ файла %s. Ð¡Ð¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ Ð±ÑƒÐ´ÑƒÑ‚ " "запиÑыватьÑÑ Ð² Ñтот журнал" #: src/services/a-rex/grid-manager/GridManager.cpp:104 #, c-format msgid "" "Cannot open cache log file %s: %s. Cache cleaning messages will be logged to " "this log" msgstr "" "Ðе удалоÑÑŒ открыть журнальный файл кÑша %s: %s. Ð¡Ð¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ Ð¾Ð± очиÑтке кÑша " "будут запиÑыватьÑÑ Ð² Ñтот журнал" #: src/services/a-rex/grid-manager/GridManager.cpp:114 msgid "Failed to start cache clean script" msgstr "Ðе удалоÑÑŒ запуÑтить Ñкрипт очиÑтки кÑша" #: src/services/a-rex/grid-manager/GridManager.cpp:115 msgid "Cache cleaning script failed" msgstr "Сбой в работе Ñкрипта очиÑтки кÑша" #: src/services/a-rex/grid-manager/GridManager.cpp:183 #, c-format msgid "External request for attention %s" msgstr "Внешний Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° обÑлуживание %s" #: src/services/a-rex/grid-manager/GridManager.cpp:201 #, c-format msgid "Failed to open heartbeat file %s" msgstr "Ðе удалоÑÑŒ открыть мониторинговый файл %s" #: src/services/a-rex/grid-manager/GridManager.cpp:223 msgid "Starting jobs processing thread" msgstr "ЗапуÑкаетÑÑ Ð¿Ð¾Ñ‚Ð¾Ðº обработки задачи" #: src/services/a-rex/grid-manager/GridManager.cpp:224 #, c-format msgid "Used configuration file %s" msgstr "ИÑпользуетÑÑ Ñ„Ð°Ð¹Ð» наÑтроек %s" #: src/services/a-rex/grid-manager/GridManager.cpp:232 #, c-format msgid "" "Error initiating delegation database in %s. Maybe permissions are not " "suitable. Returned error is: %s." msgstr "" "Сбой при Ñоздании базы данных Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð² %s. Возможно, отÑутÑтвует " "доÑтуп к директории. Возвращена ошибка %s." #: src/services/a-rex/grid-manager/GridManager.cpp:244 msgid "Failed to start new thread: cache won't be cleaned" msgstr "Ðе удалоÑÑŒ запуÑтить новый поток: кÑш не будет очищен" #: src/services/a-rex/grid-manager/GridManager.cpp:251 msgid "Failed to activate Jobs Processing object, exiting Grid Manager thread" msgstr "" "Ðе удалоÑÑŒ активировать объект обработки задач, закрываетÑÑ Ð¿Ð¾Ñ‚Ð¾Ðº Grid " "Manager" #: src/services/a-rex/grid-manager/GridManager.cpp:260 #, c-format msgid "" "Error adding communication interface in %s. Maybe another instance of A-REX " "is already running." msgstr "" "Сбой при добавлении интерфейÑа ÑвÑзи в %s. Возможно, уже запущен другой " "процеÑÑ A-REX." #: src/services/a-rex/grid-manager/GridManager.cpp:263 #, c-format msgid "" "Error adding communication interface in %s. Maybe permissions are not " "suitable." msgstr "" "Сбой при добавлении интерфейÑа ÑвÑзи в %s. Возможно, отÑутÑтвует доÑтуп к " "директории." #: src/services/a-rex/grid-manager/GridManager.cpp:270 msgid "Failed to start new thread for monitoring job requests" msgstr "Ðе удалоÑÑŒ запуÑтить поток Ð´Ð»Ñ Ð¾Ñ‚ÑÐ»ÐµÐ¶Ð¸Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñов на задачи" #: src/services/a-rex/grid-manager/GridManager.cpp:276 msgid "Picking up left jobs" msgstr "Обработка оÑтавшихÑÑ Ð·Ð°Ð´Ð°Ñ‡" #: src/services/a-rex/grid-manager/GridManager.cpp:279 msgid "Starting data staging threads" msgstr "ЗапуÑкаютÑÑ Ð¿Ð¾Ñ‚Ð¾ÐºÐ¸ Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ…" #: src/services/a-rex/grid-manager/GridManager.cpp:283 msgid "Starting jobs' monitoring" msgstr "ЗапуÑк мониторинга задач" #: src/services/a-rex/grid-manager/GridManager.cpp:291 #, c-format msgid "" "SSHFS mount point of session directory (%s) is broken - waiting for " "reconnect ..." msgstr "" "Точка Ð¼Ð¾Ð½Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ SSHFS каталога ÑеÑÑии (%s) недоÑтупна - ожидаетÑÑ " "повторное Ñоединение ..." #: src/services/a-rex/grid-manager/GridManager.cpp:295 #, c-format msgid "" "SSHFS mount point of runtime directory (%s) is broken - waiting for " "reconnect ..." msgstr "" "Точка Ð¼Ð¾Ð½Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ SSHFS каталога иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ (%s) недоÑтупна - ожидаетÑÑ " "повторное Ñоединение ..." #: src/services/a-rex/grid-manager/GridManager.cpp:300 #, c-format msgid "" "SSHFS mount point of cache directory (%s) is broken - waiting for " "reconnect ..." msgstr "" "Точка Ð¼Ð¾Ð½Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ SSHFS каталога кÑша (%s) недоÑтупна - ожидаетÑÑ Ð¿Ð¾Ð²Ñ‚Ð¾Ñ€Ð½Ð¾Ðµ " "Ñоединение ..." #: src/services/a-rex/grid-manager/GridManager.cpp:349 #, c-format msgid "Orphan delegation lock detected (%s) - cleaning" msgstr "Обнаружен неиÑпользуемый блок Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ (%s) - очиÑтка" #: src/services/a-rex/grid-manager/GridManager.cpp:354 msgid "Failed to obtain delegation locks for cleaning orphaned locks" msgstr "" "Ðе удалоÑÑŒ получить блоки Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð´Ð»Ñ Ð¾Ñ‡Ð¸Ñтки неиÑпользуемых блоков" #: src/services/a-rex/grid-manager/GridManager.cpp:368 msgid "Waking up" msgstr "ÐктивизациÑ" #: src/services/a-rex/grid-manager/GridManager.cpp:371 msgid "Stopping jobs processing thread" msgstr "ОÑтанавливаетÑÑ Ð¿Ð¾Ñ‚Ð¾Ðº обработки задачи" #: src/services/a-rex/grid-manager/GridManager.cpp:373 msgid "Exiting jobs processing thread" msgstr "ОÑтанавливаетÑÑ Ð¿Ð¾Ñ‚Ð¾Ðº обработки задачи" #: src/services/a-rex/grid-manager/GridManager.cpp:391 msgid "Requesting to stop job processing" msgstr "ЗапрашиваетÑÑ Ð¿Ñ€ÐµÐºÑ€Ð°Ñ‰ÐµÐ½Ð¸Ðµ обработки задачи" #: src/services/a-rex/grid-manager/GridManager.cpp:399 msgid "Waiting for main job processing thread to exit" msgstr "Ожидание Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ð¾Ñновного потока обработки задачи" #: src/services/a-rex/grid-manager/GridManager.cpp:401 msgid "Stopped job processing" msgstr "Обработка задачи завершена" #: src/services/a-rex/grid-manager/accounting/AAR.cpp:73 msgid "Cannot find information abouto job submission endpoint" msgstr "Ðевозможно обнаружить информацию о меÑте Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:53 #, c-format msgid "Failed to read database schema file at %s" msgstr "Сбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° Ñхемы базы данных в %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:63 msgid "Accounting database initialized succesfully" msgstr "УÑпешно инициализирована база данных учёта задач" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:65 msgid "Accounting database connection has been established" msgstr "УÑтановлено Ñоединение Ñ ÑƒÑ‡Ñ‘Ñ‚Ð½Ð¾Ð¹ базой данных" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:75 #, c-format msgid "%s. SQLite database error: %s" msgstr "%s. ошибка базы данных SQLite: %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:77 #, c-format msgid "SQLite database error: %s" msgstr "Ошибка базы даных SQLite: %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:105 #, c-format msgid "Directory %s to store accounting database has been created." msgstr "Создан каталог %s Ð´Ð»Ñ Ñ…Ñ€Ð°Ð½ÐµÐ½Ð¸Ñ ÑƒÑ‡Ñ‘Ñ‚Ð½Ð¾Ð¹ базы данных." #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:107 #, c-format msgid "" "Accounting database cannot be created. Faile to create parent directory %s." msgstr "" "База данных учёта задач не может быть Ñоздана. Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ€Ð¾Ð´Ð¸Ñ‚ÐµÐ»ÑŒÑкого " "каталога %s." #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:111 #, c-format msgid "Accounting database cannot be created: %s is not a directory" msgstr "" "База данных учёта задач не может быть Ñоздана: %s не ÑвлÑетÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð¾Ð¼" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:118 msgid "Failed to initialize accounting database" msgstr "Сбой инициализации базы данных учёта задач" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:125 #, c-format msgid "Accounting database file (%s) is not a regular file" msgstr "Файл базы данных учёта задач (%s) не ÑвлÑетÑÑ Ñтандартным файлом" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:131 msgid "Error opening accounting database" msgstr "Ошибка Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ð±Ð°Ð·Ñ‹ данных учёта задач" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:149 msgid "Closing connection to SQLite accounting database" msgstr "ЗакрываетÑÑ Ñоединение Ñ ÑƒÑ‡Ñ‘Ñ‚Ð½Ð¾Ð¹ базой данных SQLite" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:238 #, c-format msgid "Failed to fetch data from %s accounting database table" msgstr "Сбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ… из таблицы %s базы данных учёта задач" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:255 #, c-format msgid "Failed to add '%s' into the accounting database %s table" msgstr "Сбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ '%s' в таблицу %s базы данных учёта задач" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:314 msgid "Failed to fetch data from accounting database Endpoints table" msgstr "Сбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ… из таблицы Endpoints базы данных учёта задач" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:331 #, c-format msgid "" "Failed to add '%s' URL (interface type %s) into the accounting database " "Endpoints table" msgstr "" "Сбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ '%s' URL (Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ñ‚Ð¸Ð¿Ð° %s) в таблицу Endpoints базы " "данных учёта задач" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:357 #, c-format msgid "Failed to query AAR database ID for job %s" msgstr "Ðе удалоÑÑŒ опроÑить базу данных о AAR Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:412 #, c-format msgid "Failed to insert AAR into the database for job %s" msgstr "Ðе удалоÑÑŒ добавить AAR в базу данных Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:413 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:460 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:491 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:507 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:523 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:544 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:560 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:575 #, c-format msgid "SQL statement used: %s" msgstr "ИÑÐ¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ð½Ð½Ð°Ñ Ð¸Ð½ÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ SQL: %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:418 #, c-format msgid "Failed to write authtoken attributes for job %s" msgstr "Сбой запиÑи атрибутов authtoken Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:422 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:477 #, c-format msgid "Failed to write event records for job %s" msgstr "Сбой запиÑи информации о ÑобытиÑÑ… Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:433 #, c-format msgid "" "Cannot to update AAR. Cannot find registered AAR for job %s in accounting " "database." msgstr "" "Ðевозможно обновить AAR. Ðе удалоÑÑŒ обнаружить зарегиÑтрированную запиÑÑŒ AAR " "Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s в учётной базе данных." #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:459 #, c-format msgid "Failed to update AAR in the database for job %s" msgstr "Ðе удалоÑÑŒ обновить AAR в базе данных Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:465 #, c-format msgid "Failed to write RTEs information for the job %s" msgstr "Сбой запиÑи информации о RTE Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:469 #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:473 #, c-format msgid "Failed to write data transfers information for the job %s" msgstr "Сбой запиÑи информации о передаче данных Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s" #: src/services/a-rex/grid-manager/accounting/AccountingDBSQLite.cpp:569 #, c-format msgid "Unable to add event: cannot find AAR for job %s in accounting database." msgstr "" "Сбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ ÑобытиÑ: не обнаружена запиÑÑŒ AAR Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s в базе " "данных учёта задач." #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:73 #, c-format msgid "Unknown option %s" msgstr "ÐеизвеÑтный параметр %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:80 msgid "Job ID argument is required." msgstr "ТребуетÑÑ Ð°Ñ€Ð³ÑƒÐ¼ÐµÐ½Ñ‚ - идентификатор задачи." #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:86 msgid "Path to user's proxy file should be specified." msgstr "Должен быть указан путь к Ñертификату доверенноÑти пользователÑ." #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:92 msgid "User name should be specified." msgstr "Должно быть указано Ð¸Ð¼Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ." #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:98 msgid "Path to .local job status file is required." msgstr "ТребуетÑÑ Ð¿ÑƒÑ‚ÑŒ к файлу ÑоÑтоÑÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡ .local." #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:106 msgid "Generating ceID prefix from hostname automatically" msgstr "ÐвтоматичеÑкое Ñоздание префикÑа ceID из имени узла" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:109 msgid "" "Cannot determine hostname from gethostname() to generate ceID automatically." msgstr "" "Ðевозможно определить hostname из gethostname() Ð´Ð»Ñ Ð°Ð²Ñ‚Ð¾Ð¼Ð°Ñ‚Ð¸Ñ‡ÐµÑкого ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ " "ceID." #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:118 #, c-format msgid "ceID prefix is set to %s" msgstr "ÐŸÑ€ÐµÑ„Ð¸ÐºÑ ceID задан как %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:126 #, c-format msgid "Getting currect timestamp for BLAH parser log: %s" msgstr "Создание текущей метки времени Ð´Ð»Ñ Ð¶ÑƒÑ€Ð½Ð°Ð»Ð° программы разбора BLAH: %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:135 msgid "Parsing .local file to obtain job-specific identifiers and info" msgstr "" "РазбираетÑÑ Ñ„Ð°Ð¹Ð» .local Ñ Ñ†ÐµÐ»ÑŒÑŽ Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ ÑпецифичеÑких Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ " "идентификаторов и информации" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:145 #, c-format msgid "globalid is set to %s" msgstr "globalid задан как %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:148 #, c-format msgid "headnode is set to %s" msgstr "Головной узел задан как %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:151 #, c-format msgid "interface is set to %s" msgstr "Ð˜Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ð·Ð°Ð´Ð°Ð½ как %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:155 msgid "There is no local LRMS ID. Message will not be written to BLAH log." msgstr "" "ОтÑутÑтвует идентификатор СУПО. Сообщение не будет запиÑано в журнал BLAH." #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:158 #, c-format msgid "localid is set to %s" msgstr "localid задан как %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:161 #, c-format msgid "queue name is set to %s" msgstr "Ð˜Ð¼Ñ Ð¾Ñ‡ÐµÑ€ÐµÐ´Ð¸ задано как %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:164 #, c-format msgid "owner subject is set to %s" msgstr "Ð˜Ð¼Ñ Ñубъекта владельца задано как %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:166 msgid "" "Job did not finished successfully. Message will not be written to BLAH log." msgstr "" "Задача не завершилаÑÑŒ уÑпехом. Сообщение не будет запиÑано в журнал BLAH." #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:174 #, c-format msgid "Job timestamp successfully parsed as %s" msgstr "Ð’Ñ€ÐµÐ¼ÐµÐ½Ð½Ð°Ñ Ð¼ÐµÑ‚ÐºÐ° задачи уÑпешно разобрана как %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:178 msgid "Can not read information from the local job status file" msgstr "Ðевозможно прочеÑть информацию из файла ÑоÑтоÑÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:194 #, c-format msgid "" "Unsupported submission interface %s. Seems arc-blahp-logger need to be " "updated accordingly. Please submit the bug to bugzilla." msgstr "" "Ð˜Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ð·Ð°Ñылки %s не поддерживаетÑÑ. Похоже, arc-blahp-logger пора " "обновить. ПожалуйÑта, опишите проблему в bugzill-е." #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:204 msgid "Parsing VOMS AC to get FQANs information" msgstr "Разборка VOMS AC Ñ Ñ†ÐµÐ»ÑŒÑŽ Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ о FQAN" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:217 #, c-format msgid "Found VOMS AC attribute: %s" msgstr "Обнаружен атрибут VOMS AC: %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:230 msgid "VOMS AC attribute is a tag" msgstr "Ðтрибут VOMS AC ÑвлÑетÑÑ Ñ‚ÐµÐ³Ð¾Ð¼" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:237 msgid "Skipping policyAuthority VOMS AC attribute" msgstr "ПропуÑкаетÑÑ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚ VOMS AC policyAuthority" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:241 msgid "VOMS AC attribute is the FQAN" msgstr "Ðтрибут VOMS AC ÑвлÑетÑÑ FQAN" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:249 msgid "No FQAN found. Using None as userFQAN value" msgstr "" "FQAN не обнаружен. Ð’ качеÑтве Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ userFQAN будет иÑпользоватьÑÑ None" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:263 #, c-format msgid "Assembling BLAH parser log entry: %s" msgstr "Формирование запиÑи журнала программы разбора BLAH: %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:268 #, c-format msgid "Writing the info to the BLAH parser log: %s" msgstr "ЗапиÑÑŒ информации в журнал программы разбора BLAH: %s" #: src/services/a-rex/grid-manager/arc_blahp_logger.cpp:276 #, c-format msgid "Cannot open BLAH log file '%s'" msgstr "Ðе удалоÑÑŒ открыть журнальный файл BLAH '%s'" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:34 #, c-format msgid "Missing cancel-%s-job - job cancellation may not work" msgstr "Ðе найден Ñкрипт cancel-%s-job - прерывание задачи может не работать" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:38 #, c-format msgid "Missing submit-%s-job - job submission to LRMS may not work" msgstr "" "Ðе найден Ñкрипт submit-%s-job - заÑылка задачи в СУПО может не работать" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:42 #, c-format msgid "Missing scan-%s-job - may miss when job finished executing" msgstr "Ðе найден Ñкрипт scan-%s-job - окончание задачи может быть незамеченым" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:56 #, c-format msgid "Wrong option in %s" msgstr "ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð² %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:67 #, c-format msgid "Can't read configuration file at %s" msgstr "Ðевозможно прочеÑть файл наÑтроек в %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:77 #, c-format msgid "Can't recognize type of configuration file at %s" msgstr "Ðевозможно определить тип файла наÑтроек в %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:80 msgid "Could not determine configuration type or configuration is empty" msgstr "Ðевозможно определить тип файла наÑтроек, или же он пуÑÑ‚" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:161 msgid "lrms is empty" msgstr "пуÑтое значение lrms" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:194 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:203 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:212 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:221 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:230 msgid "Missing number in maxjobs" msgstr "ÐедоÑтающее чиÑло в maxjobs" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:197 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:206 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:215 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:224 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:233 #, c-format msgid "Wrong number in maxjobs: %s" msgstr "ÐедопуÑтимое чиÑло в maxjobs: %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:243 #, c-format msgid "Wrong number in wakeupperiod: %s" msgstr "ÐедопуÑтимое чиÑло в wakeupperiod: %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:249 msgid "mail parameter is empty" msgstr "Параметр mail пуÑÑ‚" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:255 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:259 msgid "Wrong number in defaultttl command" msgstr "ÐедопуÑтимое чиÑло в команде defaultttl" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:265 msgid "Wrong number in maxrerun command" msgstr "ÐедопуÑтимое чиÑло в команде maxrerun" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:272 msgid "State name for plugin is missing" msgstr "ОтÑутÑтвует наименование ÑоÑтоÑÐ½Ð¸Ñ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ð¾Ð³Ð¾ модулÑ" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:276 msgid "Options for plugin are missing" msgstr "Этот модуль не имеет наÑтраиваемых параметров" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:279 #, c-format msgid "Failed to register plugin for state %s" msgstr "Сбой региÑтрации подключаемого Ð¼Ð¾Ð´ÑƒÐ»Ñ Ð´Ð»Ñ ÑоÑтоÑÐ½Ð¸Ñ %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:285 msgid "Session root directory is missing" msgstr "ОтÑутÑтвует ÐºÐ¾Ñ€Ð½ÐµÐ²Ð°Ñ Ð´Ð¸Ñ€ÐµÐºÑ‚Ð¾Ñ€Ð¸Ñ ÑеÑÑии" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:288 msgid "Junk in sessiondir command" msgstr "БеÑÑмыÑлица в команде sessiondir" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:300 msgid "Missing directory in controldir command" msgstr "Ð’ команде controldir пропущен каталог" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:305 msgid "" "'control' configuration option is no longer supported, please use " "'controldir' instead" msgstr "" "ÐžÐ¿Ñ†Ð¸Ñ Ð½Ð°Ñтроек 'control' теперь называетÑÑ 'controldir'; пожалуйÑта, " "иÑпользуйте новое название" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:310 msgid "User for helper program is missing" msgstr "ОтÑутÑтвует пользователь Ð´Ð»Ñ Ð²Ñпомогательной программы" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:313 msgid "Only user '.' for helper program is supported" msgstr "Ð”Ð»Ñ Ð²Ñпомогательной программы поддерживаетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ пользователь '.'" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:316 msgid "Helper program is missing" msgstr "ОтÑутÑтвует вÑÐ¿Ð¾Ð¼Ð¾Ð³Ð°Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ð¿Ñ€Ð¾Ð³Ñ€Ð°Ð¼Ð¼Ð°" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:337 msgid "Wrong option in fixdirectories" msgstr "ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð² fixdirectories" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:364 msgid "Wrong option in delegationdb" msgstr "ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð´Ð»Ñ Ð±Ð°Ð·Ñ‹ данных delegationdb" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:370 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:556 msgid "forcedefaultvoms parameter is empty" msgstr "Параметр forcedefaultvoms пуÑÑ‚" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:445 msgid "Wrong number in maxjobdesc command" msgstr "ÐедопуÑтимое чиÑло в команде maxjobdesc" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:495 msgid "Missing file name in [arex/jura] logfile" msgstr "ОтÑутÑтвует Ð¸Ð¼Ñ Ñ„Ð°Ð¹Ð»Ð° в журнальном файле [arex/jura]" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:506 #, c-format msgid "Wrong number in urdelivery_frequency: %s" msgstr "ÐедопуÑтимое значение в urdelivery_frequency: %s" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:549 msgid "No queue name given in queue block name" msgstr "Ðе указано название очереди в названии блока queue" #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:565 #: src/services/a-rex/grid-manager/conf/CoreConfig.cpp:600 msgid "advertisedvo parameter is empty" msgstr "Параметр authorizedvo пуÑÑ‚" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:117 #, c-format msgid "\tSession root dir : %s" msgstr "\tКорневой каталог ÑеÑÑии: %s" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:118 #, c-format msgid "\tControl dir : %s" msgstr "\tКонтрольный каталог: %s" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:119 #, c-format msgid "\tdefault LRMS : %s" msgstr "\tСУПО по умолчанию : %s" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:120 #, c-format msgid "\tdefault queue : %s" msgstr "\tочередь по умолчанию : %s" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:121 #, c-format msgid "\tdefault ttl : %u" msgstr "\tÐ’Ñ€ÐµÐ¼Ñ Ð¶Ð¸Ð·Ð½Ð¸ по умолчанию : %u" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:126 msgid "No valid caches found in configuration, caching is disabled" msgstr "" "Ð’ наÑтройках не обнаружено ни одного приемлемого кÑша, кÑширование отключено" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:131 #, c-format msgid "\tCache : %s" msgstr "\tКÑш : %s" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:133 #, c-format msgid "\tCache link dir : %s" msgstr "\tКаталог Ñ ÐºÑшем ÑÑылок: %s" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:136 #, c-format msgid "\tCache (read-only): %s" msgstr "\tПапка кÑша (только чтение): %s" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:138 msgid "\tCache cleaning enabled" msgstr "\tОчиÑтка кÑша включена" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:139 msgid "\tCache cleaning disabled" msgstr "\tОчиÑтка кÑша отключена" #: src/services/a-rex/grid-manager/conf/GMConfig.cpp:327 msgid "" "Globus location variable substitution is not supported anymore. Please " "specify path directly." msgstr "" "ПеременнаÑ, ÑƒÐºÐ°Ð·Ñ‹Ð²Ð°ÑŽÑ‰Ð°Ñ Ð½Ð° раÑположение Globus, больше не поддерживаетÑÑ. " "ПожалуйÑта, укажите полный путь." #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:35 msgid "Can't read configuration file" msgstr "Ðе удалоÑÑŒ прочеÑть файл наÑтроек" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:41 #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:29 msgid "Can't recognize type of configuration file" msgstr "Ðевозможно определить тип файла наÑтроек" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:47 msgid "Configuration error" msgstr "Ошибка наÑтройки" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:77 msgid "Bad number in maxdelivery" msgstr "ÐедопуÑтимое значение maxdelivery" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:83 msgid "Bad number in maxemergency" msgstr "ÐедопуÑтимое значение maxemergency" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:89 msgid "Bad number in maxprocessor" msgstr "ÐедопуÑтимое значение maxprocessor" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:95 msgid "Bad number in maxprepared" msgstr "ÐедопуÑтимое значение maxprepared" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:101 msgid "Bad number in maxtransfertries" msgstr "недопуÑтимое чиÑло в maxtransfertries" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:112 msgid "Bad number in speedcontrol" msgstr "ÐедопуÑтимое значение speedcontrol" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:123 #, c-format msgid "Bad number in definedshare %s" msgstr "ÐедопуÑтимое значение definedshare %s" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:132 #, c-format msgid "Bad URL in deliveryservice: %s" msgstr "ÐедопуÑтимый URL в deliveryservice: %s" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:143 msgid "Bad number in remotesizelimit" msgstr "ÐедопуÑтимое значение remotesizelimit" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:168 msgid "Bad value for loglevel" msgstr "ÐедопуÑтимое значение Ð´Ð»Ñ loglevel" #: src/services/a-rex/grid-manager/conf/StagingConfig.cpp:182 msgid "Bad URL in acix_endpoint" msgstr "ÐедопуÑтимый URL в acix_endpoint" #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:24 msgid "Can't open configuration file" msgstr "Ðе удалоÑÑŒ открыть файл наÑтроек" #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:45 msgid "Not enough parameters in copyurl" msgstr "ÐедоÑтаточное количеÑтво параметров в copyurl" #: src/services/a-rex/grid-manager/conf/UrlMapConfig.cpp:54 msgid "Not enough parameters in linkurl" msgstr "ÐедоÑтаточное количеÑтво параметров в linkurl" #: src/services/a-rex/grid-manager/files/ControlFileContent.cpp:179 #, c-format msgid "Wrong directory in %s" msgstr "Ðеверный каталог в %s" #: src/services/a-rex/grid-manager/files/ControlFileHandling.cpp:102 #, c-format msgid "Failed setting file owner: %s" msgstr "Ðе удалоÑÑŒ задать владельца файла: %s" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:33 msgid "gm-delegations-converter changes format of delegation database." msgstr "" "gm-delegations-converter преобразовывает формат базы данных делегированиÑ." #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:38 #: src/services/a-rex/grid-manager/gm_jobs.cpp:110 #: src/services/a-rex/grid-manager/gm_kick.cpp:24 msgid "use specified configuration file" msgstr "иÑпользовать указанный файл наÑтроек" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:39 #: src/services/a-rex/grid-manager/gm_jobs.cpp:111 #: src/services/a-rex/grid-manager/gm_kick.cpp:25 msgid "file" msgstr "файл" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:43 #: src/services/a-rex/grid-manager/gm_jobs.cpp:115 msgid "read information from specified control directory" msgstr "читать информацию из указанного контрольного каталога" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:44 #: src/services/a-rex/grid-manager/gm_jobs.cpp:116 msgid "dir" msgstr "каталог" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:48 msgid "convert from specified input database format [bdb|sqlite]" msgstr "преобразовать из указанного иÑходного формата базы данных [bdb|sqlite]" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:49 #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:54 msgid "database format" msgstr "формат базы данных" #: src/services/a-rex/grid-manager/gm_delegations_converter.cpp:53 msgid "convert into specified output database format [bdb|sqlite]" msgstr "преобразовать в указанный выходной формат базы данных [bdb|sqlite]" #: src/services/a-rex/grid-manager/gm_jobs.cpp:36 #, c-format msgid "Could not read data staging configuration from %s" msgstr "Ðе удалоÑÑŒ прочитать наÑтройки Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ… в %s" #: src/services/a-rex/grid-manager/gm_jobs.cpp:44 #, c-format msgid "Can't read transfer states from %s. Perhaps A-REX is not running?" msgstr "" "Ðе удалоÑÑŒ прочеÑть ÑоÑтоÑÐ½Ð¸Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡ Ñ %s. Возможно, A-REX не запущен?" #: src/services/a-rex/grid-manager/gm_jobs.cpp:100 msgid "gm-jobs displays information on current jobs in the system." msgstr "gm-jobs выводит информацию о текущих заданиÑÑ… в ÑиÑтеме." #: src/services/a-rex/grid-manager/gm_jobs.cpp:105 msgid "display more information on each job" msgstr "вывеÑти больше информации о каждом задании" #: src/services/a-rex/grid-manager/gm_jobs.cpp:120 msgid "print summary of jobs in each transfer share" msgstr "вывеÑти Ñводку о задачах в каждой из транÑферных квот" #: src/services/a-rex/grid-manager/gm_jobs.cpp:125 msgid "do not print list of jobs" msgstr "не выводить ÑпиÑок задач" #: src/services/a-rex/grid-manager/gm_jobs.cpp:130 msgid "do not print number of jobs in each state" msgstr "не выводить количеÑтво задач в каждом ÑоÑтоÑнии" #: src/services/a-rex/grid-manager/gm_jobs.cpp:135 msgid "print state of the service" msgstr "вывеÑти ÑоÑтоÑние ÑервиÑа" #: src/services/a-rex/grid-manager/gm_jobs.cpp:140 msgid "show only jobs of user(s) with specified subject name(s)" msgstr "" "показать задачи, принадлежащие пользователÑм Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¼Ð¸ именами Ñубъекта" #: src/services/a-rex/grid-manager/gm_jobs.cpp:141 #: src/services/a-rex/grid-manager/gm_jobs.cpp:151 #: src/services/a-rex/grid-manager/gm_jobs.cpp:161 msgid "dn" msgstr "DN" #: src/services/a-rex/grid-manager/gm_jobs.cpp:145 msgid "request to cancel job(s) with specified ID(s)" msgstr "запроÑить обрыв задач Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¼Ð¸ Ñрлыками" #: src/services/a-rex/grid-manager/gm_jobs.cpp:146 #: src/services/a-rex/grid-manager/gm_jobs.cpp:156 #: src/services/a-rex/grid-manager/gm_jobs.cpp:166 #: src/services/a-rex/grid-manager/gm_jobs.cpp:176 #: src/services/a-rex/grid-manager/gm_kick.cpp:30 msgid "id" msgstr "ID" #: src/services/a-rex/grid-manager/gm_jobs.cpp:150 msgid "" "request to cancel jobs belonging to user(s) with specified subject name(s)" msgstr "" "запроÑить обрыв задач, принадлежащих пользователÑм Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¼Ð¸ именами " "Ñубъекта" #: src/services/a-rex/grid-manager/gm_jobs.cpp:155 msgid "request to clean job(s) with specified ID(s)" msgstr "запроÑить удаление задач Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¼Ð¸ Ñрлыками" #: src/services/a-rex/grid-manager/gm_jobs.cpp:160 msgid "" "request to clean jobs belonging to user(s) with specified subject name(s)" msgstr "" "запроÑить удаление задач, принадлежащих пользователÑм Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¼Ð¸ именами " "Ñубъекта" #: src/services/a-rex/grid-manager/gm_jobs.cpp:165 msgid "show only jobs with specified ID(s)" msgstr "показать задачи Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¼Ð¸ Ñрлыками" #: src/services/a-rex/grid-manager/gm_jobs.cpp:170 msgid "print list of available delegation IDs" msgstr "вывеÑти ÑпиÑок доÑтупных идентификаторов делегированиÑ" #: src/services/a-rex/grid-manager/gm_jobs.cpp:175 msgid "print delegation token of specified ID(s)" msgstr "вывеÑти токен Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ð¾Ð³Ð¾ идентификатора" #: src/services/a-rex/grid-manager/gm_jobs.cpp:180 msgid "print main delegation token of specified Job ID(s)" msgstr "вывеÑти оÑновной токен Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ð¾Ð³Ð¾ идентификатора" #: src/services/a-rex/grid-manager/gm_jobs.cpp:181 msgid "job id" msgstr "ID заданиÑ" #: src/services/a-rex/grid-manager/gm_jobs.cpp:185 msgid "" "output requested elements (jobs list, delegation ids and tokens) to file" msgstr "" "запиÑать указанные Ñлементы (ÑпиÑок задач, идентификаторы и токены " "делегированиÑ) в файл" #: src/services/a-rex/grid-manager/gm_jobs.cpp:186 msgid "file name" msgstr "Ðазвание файла" #: src/services/a-rex/grid-manager/gm_jobs.cpp:209 #, c-format msgid "Using configuration at %s" msgstr "ИÑпользуютÑÑ Ð½Ð°Ñтройки в %s" #: src/services/a-rex/grid-manager/gm_jobs.cpp:232 #, c-format msgid "Failed to open output file '%s'" msgstr "Ðе удалоÑÑŒ открыть выходной файл '%s'" #: src/services/a-rex/grid-manager/gm_jobs.cpp:241 msgid "Looking for current jobs" msgstr "ПоиÑк текущих задач" #: src/services/a-rex/grid-manager/gm_jobs.cpp:278 #, c-format msgid "Job: %s : ERROR : Unrecognizable state" msgstr "Задача: %s : ERROR : Ðеопознанное ÑоÑтоÑние" #: src/services/a-rex/grid-manager/gm_jobs.cpp:287 #, c-format msgid "Job: %s : ERROR : No local information." msgstr "Задача: %s : ERROR : ОтÑутÑтвует Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ." #: src/services/a-rex/grid-manager/gm_jobs.cpp:461 #, c-format msgid "Job: %s : ERROR : Failed to put cancel mark" msgstr "Задача: %s : ERROR : Сбой запиÑи метки прерываниÑ" #: src/services/a-rex/grid-manager/gm_jobs.cpp:465 #, c-format msgid "Job: %s : Cancel request put but failed to communicate to service" msgstr "" "Задача: %s : Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° отмену отправлен, но ÑвÑзь Ñо Ñлужбой отÑутÑтвует" #: src/services/a-rex/grid-manager/gm_jobs.cpp:467 #, c-format msgid "Job: %s : Cancel request put and communicated to service" msgstr "Задача: %s : Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° отмену отправлен и Ñообщён Ñлужбе" #: src/services/a-rex/grid-manager/gm_jobs.cpp:478 #, c-format msgid "Job: %s : ERROR : Failed to put clean mark" msgstr "Задача: %s : ERROR : Сбой запиÑи отметки об очиÑтке" #: src/services/a-rex/grid-manager/gm_jobs.cpp:482 #, c-format msgid "Job: %s : Clean request put but failed to communicate to service" msgstr "Job: %s : Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° очиÑтку отправлен, но ÑвÑзь Ñо Ñлужбой отÑутÑтвует" #: src/services/a-rex/grid-manager/gm_jobs.cpp:484 #, c-format msgid "Job: %s : Clean request put and communicated to service" msgstr "Задача: %s : Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° очиÑтку отправлен и Ñообщён Ñлужбе" #: src/services/a-rex/grid-manager/gm_kick.cpp:18 msgid "" "gm-kick wakes up the A-REX corresponding to the given control file. If no " "file is given it uses the control directory found in the configuration file." msgstr "" "gm-kick принудительно запуÑкает цикл A-REX в ÑоответÑтвии Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¼ " "управлÑющим файлом. ЕÑли файл не указан, иÑпользуетÑÑ ÑƒÐ¿Ñ€Ð°Ð²Ð»Ñющий каталог из " "файла наÑтроек." #: src/services/a-rex/grid-manager/gm_kick.cpp:29 msgid "inform about changes in particular job (can be used multiple times)" msgstr "" "информировать об изменениÑÑ… в заданной задаче (допуÑкаетÑÑ Ð¼Ð½Ð¾Ð³Ð¾ÐºÑ€Ð°Ñ‚Ð½Ð¾Ðµ " "иÑпользование)" #: src/services/a-rex/grid-manager/inputcheck.cpp:39 #, c-format msgid "Failed to acquire source: %s" msgstr "Ðе удалоÑÑŒ получить иÑточник: %s" #: src/services/a-rex/grid-manager/inputcheck.cpp:44 #, c-format msgid "Failed to resolve %s" msgstr "Ðе удалоÑÑŒ разрешить %s" #: src/services/a-rex/grid-manager/inputcheck.cpp:61 #, c-format msgid "Failed to check %s" msgstr "Ðе удалоÑÑŒ проверить %s" #: src/services/a-rex/grid-manager/inputcheck.cpp:75 msgid "job_description_file [proxy_file]" msgstr "job_description_file [proxy_file]" #: src/services/a-rex/grid-manager/inputcheck.cpp:76 msgid "" "inputcheck checks that input files specified in the job description are " "available and accessible using the credentials in the given proxy file." msgstr "" "inputcheck проверÑет, доÑтупны ли входные файлы, указанные в опиÑании " "задачи, иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ñ‹ доÑтупа в указанном файле доверенноÑти." #: src/services/a-rex/grid-manager/inputcheck.cpp:88 msgid "Wrong number of arguments given" msgstr "Указано неверное количеÑтво аргументов" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:143 #, c-format msgid "Unsupported value for allownew: %s" msgstr "Ðеподдерживаемое значение Ð´Ð»Ñ allownew: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:154 msgid "Wrong number in maxjobdesc" msgstr "ÐедопуÑтимое чиÑло в maxjobdesc" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:162 #: src/services/gridftpd/fileplugin/fileplugin.cpp:186 #, c-format msgid "Unsupported configuration command: %s" msgstr "ÐÐµÐ¿Ð¾Ð´Ð´ÐµÑ€Ð¶Ð¸Ð²Ð°ÐµÐ¼Ð°Ñ Ð¸Ð½ÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ Ð½Ð°Ñтроек: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:171 #, c-format msgid "Mapped user:group (%s:%s) not found" msgstr "СоответÑтвующие user:group (%s:%s) не обнаружены" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:174 msgid "Job submission user can't be root" msgstr "Пользователь, заÑылающий задачи, не может быть Ñуперпользователем" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:177 msgid "Failed processing A-REX configuration" msgstr "Ðе удалоÑÑŒ обработать наÑтройки A-REX" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:201 msgid "This user is denied to submit new jobs." msgstr "Этому пользователю отказано в праве запуÑка новых задач." #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:206 msgid "No control or session directories defined in configuration" msgstr "Ð’ наÑтройках не заданы контрольные директории или каталоги ÑеÑÑий" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:210 #, c-format msgid "Job submission user: %s (%i:%i)" msgstr "Пользователь, отправивший задачу: %s (%i:%i)" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:213 msgid "Job plugin was not initialised" msgstr "Модуль обработки задач не был запущен" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:231 msgid "No delegated credentials were passed" msgstr "Делегированные параметры доÑтупа не переданы" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:305 #, c-format msgid "Cancelling job %s" msgstr "Прерывание задачи %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:360 #, c-format msgid "Cleaning job %s" msgstr "УдалÑетÑÑ Ð·Ð°Ð´Ð°Ñ‡Ð° %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:400 msgid "Request to open file with storing in progress" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° в процеÑÑе запиÑи" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:434 #: src/services/gridftpd/fileplugin/fileplugin.cpp:344 #, c-format msgid "Retrieving file %s" msgstr "Получение файла %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:484 #, c-format msgid "Accepting submission of new job or modification request: %s" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° заÑылку новой задачи или изменение Ñтарой принÑÑ‚: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:506 #: src/services/gridftpd/fileplugin/fileplugin.cpp:384 #: src/services/gridftpd/fileplugin/fileplugin.cpp:421 #, c-format msgid "Storing file %s" msgstr "ЗапиÑываетÑÑ Ñ„Ð°Ð¹Ð» %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:527 #, c-format msgid "Unknown open mode %i" msgstr "ÐеизвеÑтный режим Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ %i" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:653 #, c-format msgid "action(%s) != request" msgstr "action(%s) != request" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:704 msgid "Failed writing job description" msgstr "Ðе удалоÑÑŒ запиÑать опиÑание задачи" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:920 msgid "Failed writing local description" msgstr "Сбой запиÑи локального опиÑаниÑ" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:930 msgid "Failed writing ACL" msgstr "Ðе удалоÑÑŒ запиÑать ACL" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:946 #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:953 #: src/services/a-rex/job.cpp:819 #, c-format msgid "Failed to run external plugin: %s" msgstr "Ðе удалоÑÑŒ запуÑтить внешний подключаемый модуль: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:957 #: src/services/a-rex/job.cpp:823 #, c-format msgid "Plugin response: %s" msgstr "Ответ подключаемого модулÑ: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:959 msgid "Failed to run external plugin" msgstr "Ðе удалоÑÑŒ запуÑтить внешний подключаемый модуль" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:969 #, c-format msgid "Failed to create session directory %s" msgstr "Ðе удалоÑÑŒ Ñоздать каталог ÑеÑÑии %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:979 msgid "Failed writing status" msgstr "Ðе удалоÑÑŒ запиÑать ÑоÑтоÑние" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:993 #, c-format msgid "Failed to lock delegated credentials: %s" msgstr "Ðевозможно заблокировать делегированные параметры доÑтупа: %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1235 #, c-format msgid "Renewing proxy for job %s" msgstr "ОбновлÑетÑÑ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñть Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1251 #, c-format msgid "New proxy expires at %s" msgstr "Срок дейÑÑ‚Ð²Ð¸Ñ Ð½Ð¾Ð²Ð¾Ð¹ доверенноÑти иÑтекает в %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1254 msgid "Failed to write 'local' information" msgstr "Ðе удалоÑÑŒ запиÑать 'локальную' информацию" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1257 msgid "Failed to renew proxy" msgstr "Ðе удалоÑÑŒ обновить доверенноÑть" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1260 msgid "New proxy expiry time is not later than old proxy, not renewing proxy" msgstr "" "Срок дейÑÑ‚Ð²Ð¸Ñ Ð½Ð¾Ð²Ð¾Ð¹ доверенноÑти не дольше Ñтарой, доверенноÑть не " "обновлÑетÑÑ" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1302 #, c-format msgid "Checking file %s" msgstr "Проверка файла %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1349 msgid "ID contains forbidden characters" msgstr "ID Ñодержит недопуÑтимые Ñимволы" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1383 #: src/services/a-rex/job.cpp:1023 #, c-format msgid "Failed to create file in %s" msgstr "Ðе удалоÑÑŒ Ñоздать файл в %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1393 msgid "Out of tries while allocating new job ID" msgstr "ЗакончилиÑÑŒ попытки приÑÐ²Ð¾ÐµÐ½Ð¸Ñ Ð½Ð¾Ð²Ð¾Ð³Ð¾ Ñрлыка задачи" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1473 #, c-format msgid "Failed to read job's local description for job %s from %s" msgstr "Ðе удалоÑÑŒ прочеÑть локальное опиÑание Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s из %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1562 msgid "No non-draining session directories available" msgstr "Ðет каталогов ÑеÑÑий не в ÑоÑтоÑнии разгрузки" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1568 #, c-format msgid "Using control directory %s" msgstr "ИÑпользуетÑÑ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ñ‹Ð¹ каталог %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin.cpp:1569 #, c-format msgid "Using session directory %s" msgstr "ИÑпользуетÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³ ÑеÑÑии %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:26 #, c-format msgid "Failed to read job's ACL for job %s from %s" msgstr "Ðе удалоÑÑŒ прочеÑть правила доÑтупа Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s из %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:70 #, c-format msgid "Failed to parse user policy for job %s" msgstr "Сбой разбора правил допуÑка Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:75 #, c-format msgid "Failed to load policy evaluator for policy of job %s" msgstr "Ðе удалоÑÑŒ подгрузить анализатор Ð´Ð»Ñ Ð¿Ñ€Ð°Ð²Ð¸Ð» допуÑка задачи %s" #: src/services/a-rex/grid-manager/jobplugin/jobplugin_acl.cpp:129 #, c-format msgid "Unknown ACL policy %s for job %s" msgstr "ÐеизвеÑтное правило доÑтупа %s Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:73 #, c-format msgid "" "DTR Generator waiting to process: %d jobs to cancel, %d DTRs, %d new jobs" msgstr "" "DTRGenerator ожидает обработки: %d отменённых задач, %d DTR, %d новых задач" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:87 #, c-format msgid "%s: Job cancel request from DTR generator to scheduler" msgstr "%s: Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð¾ прерывании задачи от генератора DTR к планировщику" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:92 #, c-format msgid "%s: Returning canceled job from DTR generator" msgstr "%s: Возврат прерванной задачи из генератора DTR" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:133 #, c-format msgid "%s: Re-requesting attention from DTR generator" msgstr "%s: Повторный Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ð± обÑлуживании к генератору DTR" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:143 #, c-format msgid "DTR Generator processed: %d jobs to cancel, %d DTRs, %d new jobs" msgstr "DTRGenerator обработал: %d отменённых задач, %d DTR, %d новых задач" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:162 msgid "Exiting Generator thread" msgstr "ОÑтанавливаетÑÑ Ð¿Ð¾Ñ‚Ð¾Ðº Generator" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:234 msgid "Shutting down data staging threads" msgstr "ЗакрываютÑÑ Ð¿Ð¾Ñ‚Ð¾ÐºÐ¸ Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ…" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:244 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:257 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:285 msgid "DTRGenerator is not running!" msgstr "DTRGenerator не запущен!" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:247 #, c-format msgid "Received DTR %s during Generator shutdown - may not be processed" msgstr "" "Ð—Ð°Ð¿Ñ€Ð¾Ñ DTR %s получен в процеÑÑе Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ð³ÐµÐ½ÐµÑ€Ð°Ñ‚Ð¾Ñ€Ð° - не может быть " "обработан" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:261 msgid "DTRGenerator was sent null job" msgstr "DTRGenerator получил ноль задач" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:270 #, c-format msgid "%s: Received job in DTR generator" msgstr "%s: Получена задача в DTRGenerator" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:273 #, c-format msgid "%s: Failed to receive job in DTR generator" msgstr "%s: Сбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ в DTRGenerator" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:280 msgid "DTRGenerator got request to cancel null job" msgstr "DTRGenerator получил Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ñ‚Ð¼ÐµÐ½Ð¸Ñ‚ÑŒ ноль задач" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:295 msgid "DTRGenerator is queried about null job" msgstr "DTRGenerator опрошен о нуле задач" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:325 msgid "DTRGenerator is asked about null job" msgstr "DTRGenerator запрошен о нуле задач" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:353 msgid "DTRGenerator is requested to remove null job" msgstr "DTRGenerator получил Ð·Ð°Ð¿Ñ€Ð¾Ñ ÑƒÐ´Ð°Ð»Ð¸Ñ‚ÑŒ ноль задач" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:360 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:368 #, c-format msgid "%s: Trying to remove job from data staging which is still active" msgstr "%s: Попытка удалить задание из активного процеÑÑа Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ…" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:376 #, c-format msgid "%s: Trying remove job from data staging which does not exist" msgstr "" "%s: Попытка удалить задание из неÑущеÑтвующего процеÑÑа Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ…" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:387 #, c-format msgid "%s: Invalid DTR" msgstr "%s: ÐедейÑтвительный Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:404 #, c-format msgid "%s: Received DTR %s to copy file %s in state %s" msgstr "%s: Получен Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR %s на копирование файла %s в ÑоÑтоÑнии %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:408 #, c-format msgid "%s: Received DTR belongs to inactive job" msgstr "%s: Полученный DTR принадлежит неактивной задаче" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:425 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1067 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:459 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:517 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:631 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:841 #, c-format msgid "%s: Failed reading local information" msgstr "%s: Ðе удалоÑÑŒ прочеÑть локальную информацию" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:434 #, c-format msgid "%s: DTR %s to copy file %s failed" msgstr "%s: Сбой запроÑа DTR %s на копирование файла %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:440 #, c-format msgid "%s: Cancelling other DTRs" msgstr "%s: Прерывание оÑтальных запроÑов DTR" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:450 #, c-format msgid "%s: DTR %s to copy to %s failed but is not mandatory" msgstr "%s: копирование DTR %s в %s не удалоÑÑŒ, но не было обÑзательным" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:460 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:722 #, c-format msgid "%s: Failed to read list of output files" msgstr "%s: Ðе удалоÑÑŒ прочеÑть ÑпиÑок выходных файлов" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:474 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:615 #, c-format msgid "%s: Failed to read dynamic output files in %s" msgstr "%s: Ðе удалоÑÑŒ прочеÑть динамичеÑкий ÑпиÑок выходных файлов в %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:476 #, c-format msgid "%s: Going through files in list %s" msgstr "%s: ОбрабатываютÑÑ Ñ„Ð°Ð¹Ð»Ñ‹ в ÑпиÑке %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:480 #, c-format msgid "%s: Removing %s from dynamic output file %s" msgstr "%s: УдалÑетÑÑ %s из динамичеÑкого ÑпиÑка выходных файлов %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:484 #, c-format msgid "%s: Failed to write back dynamic output files in %s" msgstr "%s: Ðе удалоÑÑŒ запиÑать динамичеÑкие выходные файлы обратно в %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:500 #, c-format msgid "%s: Failed to write list of output files" msgstr "%s: Ðе удалоÑÑŒ запиÑать ÑпиÑок выходных файлов" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:504 #, c-format msgid "%s: Failed to write list of output status files" msgstr "%s: Ðе удалоÑÑŒ вывеÑти ÑпиÑок ÑоÑтоÑний выходных файлов" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:516 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:734 #, c-format msgid "%s: Failed to read list of input files" msgstr "%s: Ðе удалоÑÑŒ прочеÑть ÑпиÑок входных файлов" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:535 #, c-format msgid "%s: Failed to write list of input files" msgstr "%s: Ðе удалоÑÑŒ запиÑать ÑпиÑок входных файлов" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:547 #, c-format msgid "%s: Received DTR with two remote endpoints!" msgstr "%s: Получен Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR Ñ Ð´Ð²ÑƒÐ¼Ñ ÑƒÐ´Ð°Ð»Ñ‘Ð½Ð½Ñ‹Ð¼Ð¸ адреÑами!" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:559 #: src/services/candypond/CandyPondGenerator.cpp:105 #, c-format msgid "No active job id %s" msgstr "Ðет активной задачи Ñ Ñрлыком %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:603 #, c-format msgid "%s: Failed to read list of output files, can't clean up session dir" msgstr "" "%s: Ðе удалоÑÑŒ прочеÑть ÑпиÑок выходных файлов, невозможно очиÑтить каталог " "ÑеÑÑии" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:629 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:648 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:772 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:895 #, c-format msgid "%s: Failed to clean up session dir" msgstr "%s: Ðе удалоÑÑŒ очиÑтить каталог ÑеÑÑии" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:639 #, c-format msgid "%s: Failed to read list of input files, can't clean up session dir" msgstr "" "%s: Ðе удалоÑÑŒ прочеÑть ÑпиÑок входных файлов, невозможно очиÑтить каталог " "ÑеÑÑии" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:661 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:665 msgid "uploads" msgstr "отгрузок" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:661 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:665 msgid "downloads" msgstr "загрузок" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:662 msgid "cancelled" msgstr "отменено" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:662 msgid "finished" msgstr "готово" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:660 #, c-format msgid "%s: All %s %s successfully" msgstr "%s: Ð’Ñе процеÑÑÑ‹ %s уÑпешно завершилиÑÑŒ (%s)" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:664 #, c-format msgid "%s: Some %s failed" msgstr "%s: Ðекоторые процеÑÑÑ‹ %s дали Ñбой" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:668 #, c-format msgid "%s: Requesting attention from DTR generator" msgstr "%s: Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ð± обÑлуживании к генератору DTR" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:679 msgid "DTRGenerator is requested to process null job" msgstr "DTRGenerator получил Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° обработку Ð½ÑƒÐ»Ñ Ð·Ð°Ð´Ð°Ñ‡" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:685 msgid "download" msgstr "передача" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:685 msgid "upload" msgstr "отгрузка" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:684 #, c-format msgid "%s: Received data staging request to %s files" msgstr "%s: Получен Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° размещение файлов (%s)" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:743 #, c-format msgid "%s: Duplicate file in list of input files: %s" msgstr "%s: ПовторÑющееÑÑ Ð¸Ð¼Ñ Ñ„Ð°Ð¹Ð»Ð° в ÑпиÑке входных файлов: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:792 #, c-format msgid "%s: Reading output files from user generated list in %s" msgstr "%s: Чтение выходных файлов в ÑпиÑке Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:794 #, c-format msgid "%s: Error reading user generated output file list in %s" msgstr "%s: Чтение выходных файлов в ÑпиÑке Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:821 #, c-format msgid "%s: Failed to list output directory %s: %s" msgstr "%s: Сбой вывода Ñодержимого каталога Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ %s: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:839 #, c-format msgid "%s: Adding new output file %s: %s" msgstr "%s: Добавление нового файла выхода %s: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:861 #, c-format msgid "%s: Two identical output destinations: %s" msgstr "%s: Два одинаковых Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð²Ñ‹Ð´Ð°Ñ‡Ð¸: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:874 #, c-format msgid "%s: Cannot upload two different files %s and %s to same LFN: %s" msgstr "%s: Ðевозможно запиÑать два разных файла %s и %s Ñ Ð¾Ð´Ð½Ð¸Ð¼ LFN: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:906 #, c-format msgid "%s: Received job in a bad state: %s" msgstr "%s: Задача получена в плохом ÑоÑтоÑнии: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:914 #, c-format msgid "%s: Session directory processing takes too long - %u.%06u seconds" msgstr "" "%s: Обработка каталога ÑеÑÑий продолжаетÑÑ Ñлишком долго - %u.%06u Ñекунд" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:981 #, c-format msgid "" "%s: Destination file %s was possibly left unfinished from previous A-REX " "run, will overwrite" msgstr "" "%s: Файл Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ %s вероÑтно оÑталÑÑ Ð½ÐµÐ´Ð¾Ð¿Ð¸Ñанным поÑле предыдущего " "запуÑка A-REX, перезапиÑÑŒ" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1073 #, c-format msgid "%s: Failed writing local information" msgstr "%s: Ðе удалоÑÑŒ запиÑать локальную информацию" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1091 #, c-format msgid "%s: Cancelling active DTRs" msgstr "%s: Прерывание активных запроÑов" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1098 msgid "DTRGenerator is asked to check files for null job" msgstr "DTRGenerator получил Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€Ð¸Ñ‚ÑŒ файлы Ð½ÑƒÐ»Ñ Ð·Ð°Ð´Ð°Ñ‡" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1118 #, c-format msgid "%s: Can't read list of input files" msgstr "%s: Ðевозможно прочеÑть ÑпиÑок входных файлов" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1133 #, c-format msgid "%s: Checking user uploadable file: %s" msgstr "%s: Проверка отгружаемого файла пользователÑ: %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1138 #, c-format msgid "%s: User has uploaded file %s" msgstr "%s: Пользователь отгрузил файл %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1145 #, c-format msgid "%s: Failed writing changed input file." msgstr "%s: Ðе удалоÑÑŒ запиÑать изменившийÑÑ Ð²Ñ…Ð¾Ð´Ð½Ð¾Ð¹ файл." #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1149 #, c-format msgid "%s: Critical error for uploadable file %s" msgstr "%s: КритичеÑÐºÐ°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° Ð´Ð»Ñ Ð¾Ñ‚Ð³Ñ€ÑƒÐ¶Ð°ÐµÐ¼Ð¾Ð³Ð¾ файла %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1155 #, c-format msgid "%s: User has NOT uploaded file %s" msgstr "%s: Пользователь ÐЕ отгрузил файл %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1167 #, c-format msgid "%s: Uploadable files timed out" msgstr "%s: ИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¾Ñ‚Ð³Ñ€ÑƒÐ¶Ð°ÐµÐ¼Ñ‹Ñ… файлов" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1223 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1249 #, c-format msgid "%s: Can't convert checksum %s to int for %s" msgstr "%s: Ðевозможно преобразовать контрольную Ñумму файла %s в целое Ð´Ð»Ñ %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1230 #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1244 #, c-format msgid "%s: Can't convert filesize %s to int for %s" msgstr "%s: Ðевозможно преобразовать размер файла %s в целое Ð´Ð»Ñ %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1239 #, c-format msgid "%s: Invalid size/checksum information (%s) for %s" msgstr "%s: ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ размере/контрольной Ñумме (%s) Ð´Ð»Ñ %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1261 #, c-format msgid "%s: Invalid file: %s is too big." msgstr "%s: Ðеверный файл: %s Ñлишком велик." #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1277 #, c-format msgid "%s: Failed to switch user ID to %d/%d to read file %s" msgstr "" "%s: Ðе удалоÑÑŒ изменить идентификатор Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð½Ð° %d/%d Ð´Ð»Ñ Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° " "%s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1283 #, c-format msgid "%s: Failed to open file %s for reading" msgstr "%s: Ðе удалоÑÑŒ открыть файл %s на чтение" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1291 #, c-format msgid "%s: Error accessing file %s" msgstr "%s: Ошибка доÑтупа к файлу %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1303 #, c-format msgid "%s: Error reading file %s" msgstr "%s: Ошибка при чтении файла %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1318 #, c-format msgid "%s: File %s has wrong checksum: %llu. Expected %lli" msgstr "%s: У файла %s Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма: %llu. ОжидалаÑÑŒ %lli" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1324 #, c-format msgid "%s: Checksum %llu verified for %s" msgstr "%s: ÐŸÑ€Ð¾Ð²ÐµÑ€Ð¾Ñ‡Ð½Ð°Ñ Ñумма %llu подтверждена Ð´Ð»Ñ %s" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1336 msgid "" "Found unfinished DTR transfers. It is possible the previous A-REX process " "did not shut down normally" msgstr "" "Ðайдены незаконченные процеÑÑÑ‹ DTR. ВероÑтно, предыдущий процеÑÑ A-REX " "завершилÑÑ Ñбоем" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1343 #, c-format msgid "Found DTR %s for file %s left in transferring state from previous run" msgstr "" "Ðайден Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR %s Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° %s, оÑтавшийÑÑ Ð² ÑоÑтоÑнии передачи поÑле " "предыдущего запуÑка" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1352 msgid "DTRGenerator is requested to clean links for null job" msgstr "DTRGenerator получил Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ñ‡Ð¸Ñтить ÑÑылки Ð½ÑƒÐ»Ñ Ð·Ð°Ð´Ð°Ñ‡" #: src/services/a-rex/grid-manager/jobs/DTRGenerator.cpp:1368 #, c-format msgid "%s: Cache cleaning takes too long - %u.%06u seconds" msgstr "%s: ОчиÑтка кÑша продолжаетÑÑ Ñлишком долго - %u.%06u Ñекунд" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:108 #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:190 #, c-format msgid "%s: Job monitoring counter is broken" msgstr "%s: Счётчик ÑÐ»ÐµÐ¶ÐµÐ½Ð¸Ñ Ð·Ð° задачей Ñбит" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:115 #, c-format msgid "%s: Job monitoring is unintentionally lost" msgstr "%s: Слежение за задачей непреднамеренно прервано" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:124 #, c-format msgid "%s: Job monitoring stop success" msgstr "%s: Слежение за задачей уÑпешно прекращено" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:129 #, c-format msgid "" "%s: Job monitoring stop requested with %u active references and %s queue " "associated" msgstr "" "%s: Запрошено прекращение ÑÐ»ÐµÐ¶ÐµÐ½Ð¸Ñ Ð·Ð° задачей Ñ %u активными ÑÑылками и " "аÑÑоциированной очередью %s" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:131 #, c-format msgid "%s: Job monitoring stop requested with %u active references" msgstr "%s: Запрошено прекращение ÑÐ»ÐµÐ¶ÐµÐ½Ð¸Ñ Ð·Ð° задачей Ñ %u активными ÑÑылками" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:195 #, c-format msgid "%s: Job monitoring is lost due to removal from queue" msgstr "%s: Слежение за задачей прервано в ÑвÑзи Ñ ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸ÐµÐ¼ из очереди" #: src/services/a-rex/grid-manager/jobs/GMJob.cpp:278 #, c-format msgid "%s: PushSorted failed to find job where expected" msgstr "%s: PushSorted не Ñмог обнаружить задачу в ожидаемом меÑте" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:132 #, c-format msgid "Replacing queue '%s' with '%s'" msgstr "Очередь '%s' заменÑетÑÑ Ð½Ð° '%s'" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:226 #, c-format msgid "Bad name for stdout: %s" msgstr "ÐедопуÑтимое Ð¸Ð¼Ñ Ð´Ð»Ñ stdout: %s" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:234 #, c-format msgid "Bad name for stderr: %s" msgstr "ÐедопуÑтимое Ð¸Ð¼Ñ Ð´Ð»Ñ stderr: %s" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:297 #, c-format msgid "Bad name for runtime environment: %s" msgstr "ÐедопуÑтимое название Ñреды выполнениÑ: %s" #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:342 msgid "Job description file could not be read." msgstr "Ðевозможно прочеÑть файл Ñ Ð¾Ð¿Ð¸Ñанием задачи." #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:393 #: src/services/a-rex/grid-manager/jobs/JobDescriptionHandler.cpp:407 #, c-format msgid "Bad name for executable: %s" msgstr "ÐедопуÑтимое Ð¸Ð¼Ñ Ð´Ð»Ñ Ð¸ÑполнÑемого файла: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:89 msgid "Failed to start data staging threads" msgstr "Ðе удалоÑÑŒ запуÑтить потоки Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ…" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:190 #, c-format msgid "" "%s: Failed reading .local and changing state, job and A-REX may be left in " "an inconsistent state" msgstr "" "%s: Ошибка при чтении .local и изменении ÑоÑтоÑниÑ, задачи и A-REX могут " "оказатьÑÑ Ð² противоречивом ÑоÑтоÑнии" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:195 #, c-format msgid "%s: unexpected failed job add request: %s" msgstr "%s: непредуÑмотренный Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ð½ÐµÑƒÑпешной задачи: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:206 #, c-format msgid "%s: unexpected job add request: %s" msgstr "%s: непредуÑмотренный Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:259 #, c-format msgid "%s: job for attention" msgstr "%s: задача Ð´Ð»Ñ Ð¾Ð±ÑлуживаниÑ" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:269 msgid "all for attention" msgstr "вÑе Ð´Ð»Ñ Ð¾Ð±ÑлуживаниÑ" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:286 #, c-format msgid "%s: job found while scanning" msgstr "%s: задача обнаружена при Ñканировании" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:314 #, c-format msgid "%s: job will wait for external process" msgstr "%s: задача будет ожидать внешнего процеÑÑа" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:331 #, c-format msgid "%s: job assigned for slow polling" msgstr "%s: задача назначена Ð´Ð»Ñ Ð¼ÐµÐ´Ð»ÐµÐ½Ð½Ð¾Ð³Ð¾ опроÑа" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:349 #, c-format msgid "%s: job being processed" msgstr "%s: задача обрабатываетÑÑ" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:384 #, c-format msgid "Current jobs in system (PREPARING to FINISHING) per-DN (%i entries)" msgstr "" "Текущие задачи в ÑиÑтеме (от PREPARING до FINISHING) на DN (%i запиÑей)" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:386 #, c-format msgid "%s: %i" msgstr "%s: %i" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:398 #, c-format msgid "%s: Failed storing failure reason: %s" msgstr "%s: Сбой запиÑи причины ÑбоÑ: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:404 #, c-format msgid "%s: Failed reading job description: %s" msgstr "%s: Сбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¾Ð¿Ð¸ÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:416 #, c-format msgid "%s: Failed parsing job request." msgstr "%s: Сбой разборки запроÑа задачи." #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:451 #, c-format msgid "%s: Failed writing list of output files: %s" msgstr "%s: Ðе удалоÑÑŒ запиÑать ÑпиÑок выходных файлов: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:477 #, c-format msgid "%s: Failed obtaining lrms id" msgstr "%s: Ðе удалоÑÑŒ получить номер из СУПО" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:491 #, c-format msgid "%s: Failed writing local information: %s" msgstr "%s: Ðе удалоÑÑŒ запиÑать локальную информацию: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:523 #, c-format msgid "%s: Failed creating grami file" msgstr "%s: Ðе удалоÑÑŒ Ñоздать файл grami" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:527 #, c-format msgid "%s: Failed setting executable permissions" msgstr "%s: Ðе удалоÑÑŒ уÑтановить права на иÑполнение" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:535 #, c-format msgid "%s: state SUBMIT: starting child: %s" msgstr "%s: ÑоÑтоÑние SUBMIT: запуÑк дочернего процеÑÑа: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:542 #, c-format msgid "%s: Failed running submission process" msgstr "%s: Ðе удалоÑÑŒ выполнить процедуру запуÑка" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:547 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:654 #, c-format msgid "%s: LRMS scripts limit of %u is reached - suspending submit/cancel" msgstr "" "%s: доÑтигнут предел Ñкрипта СУПО %u - приоÑтанавливаетÑÑ Ð·Ð°Ð¿ÑƒÑк/ÑнÑтие" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:563 #, c-format msgid "" "%s: Job submission to LRMS takes too long, but ID is already obtained. " "Pretending submission is done." msgstr "" "%s: ЗаÑылка задачи в СУПО проиÑходит Ñлишком медленно, но идентификатор уже " "доÑтупен. Будем Ñчитать, что заÑылка произведена." #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:570 #, c-format msgid "%s: Job submission to LRMS takes too long. Failing." msgstr "%s: ЗаÑылка задачи в СУПО проиÑходит Ñлишком долго. Сбой." #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:579 #, c-format msgid "%s: state SUBMIT: child exited with code %i" msgstr "%s: ÑоÑтоÑние: SUBMIT: дочерний процеÑÑ Ð·Ð°Ð²ÐµÑ€ÑˆÐ¸Ð»ÑÑ Ñ ÐºÐ¾Ð´Ð¾Ð¼ выхода: %i" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:584 #, c-format msgid "%s: Job submission to LRMS failed" msgstr "%s: Ðе удалоÑÑŒ направить задачу в СУПО" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:605 #, c-format msgid "%s: state CANCELING: timeout waiting for cancellation" msgstr "%s: ÑоÑтоÑние CANCELING: Ñрок Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð¸Ñтёк" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:611 #, c-format msgid "%s: state CANCELING: job diagnostics collected" msgstr "%s: ÑоÑтоÑние CANCELING: ÑобираетÑÑ Ð´Ð¸Ð°Ð³Ð½Ð¾Ñтика задачи" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:639 #, c-format msgid "%s: state CANCELING: starting child: %s" msgstr "%s: ÑоÑтоÑние CANCELING: запуÑк дочернего процеÑÑа: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:641 #, c-format msgid "%s: Job has completed already. No action taken to cancel" msgstr "%s:Задача уже завершилаÑÑŒ. ДейÑÑ‚Ð²Ð¸Ñ Ð¿Ð¾ прерыванию не применÑÑŽÑ‚ÑÑ" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:649 #, c-format msgid "%s: Failed running cancellation process" msgstr "%s: Ðе удалоÑÑŒ выполнить процедуру прерываниÑ" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:668 #, c-format msgid "" "%s: Job cancellation takes too long, but diagnostic collection seems to be " "done. Pretending cancellation succeeded." msgstr "" "%s: Прерывание задачи проиÑходит Ñлишком медленно, но диагноÑтика уже " "доÑтупна. Будем Ñчитать, что прерывание произошло." #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:674 #, c-format msgid "%s: Job cancellation takes too long. Failing." msgstr "%s: Прерывание задачи проиÑходит Ñлишком долго. Сбой." #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:684 #, c-format msgid "%s: state CANCELING: child exited with code %i" msgstr "%s: ÑоÑтоÑние CANCELING: дочерний процеÑÑ Ð·Ð°Ð²ÐµÑ€ÑˆÐ¸Ð»ÑÑ Ñ ÐºÐ¾Ð´Ð¾Ð¼ выхода %i" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:690 #, c-format msgid "%s: Failed to cancel running job" msgstr "%s: Ðе удалоÑÑŒ оборвать иÑполнÑющуюÑÑ Ð·Ð°Ð´Ð°Ñ‡Ñƒ" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:709 #, c-format msgid "%s: State: %s: data staging finished" msgstr "%s: СоÑтоÑние: %s: размещение данных завершено" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:744 #, c-format msgid "%s: State: %s: still in data staging" msgstr "%s: СоÑтоÑние: %s: вÑÑ‘ ещё в процеÑÑе переноÑа данных" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:757 #, c-format msgid "%s: Job is not allowed to be rerun anymore" msgstr "%s: Задачу Ð½ÐµÐ»ÑŒÐ·Ñ Ð±Ð¾Ð»ÑŒÑˆÐµ перезапуÑкать" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:767 #, c-format msgid "%s: Job failed in unknown state. Won't rerun." msgstr "" "%s: Сбой иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ в неизвеÑтном ÑоÑтоÑнии. ПерезапуÑка не будет." #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:788 #, c-format msgid "%s: Reprocessing job description failed" msgstr "%s: Сбой повторной обработки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:795 #, c-format msgid "%s: Failed to read reprocessed list of output files" msgstr "%s: Ðе удалоÑÑŒ прочеÑть переработанный ÑпиÑок выходных файлов" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:799 #, c-format msgid "%s: Failed to read reprocessed list of input files" msgstr "%s: Ðе удалоÑÑŒ прочеÑть переработанный ÑпиÑок входных файлов" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:883 #, c-format msgid "%s: Reading status of new job failed" msgstr "%s: Ðе удалоÑÑŒ прочеÑть ÑоÑтоÑние новой задачи" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:896 #, c-format msgid "%s: State: ACCEPTED: parsing job description" msgstr "%s: СоÑтоÑние: ACCEPTED: обрабатываетÑÑ Ð¾Ð¿Ð¸Ñание задачи" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:898 #, c-format msgid "%s: Processing job description failed" msgstr "%s: Ðе удалоÑÑŒ обработать опиÑание задачи" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:936 #, c-format msgid "%s: new job is accepted" msgstr "%s: Ð½Ð¾Ð²Ð°Ñ Ð·Ð°Ð´Ð°Ñ‡Ð° принÑта" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:948 #, c-format msgid "%s: %s: New job belongs to %i/%i" msgstr "%s: %s: ÐÐ¾Ð²Ð°Ñ Ð·Ð°Ð´Ð°Ñ‡Ð° принадлежит %i/%i" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:953 #, c-format msgid "%s: old job is accepted" msgstr "%s: ÑÑ‚Ð°Ñ€Ð°Ñ Ð·Ð°Ð´Ð°Ñ‡Ð° принÑта" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:964 #, c-format msgid "%s: State: ACCEPTED" msgstr "%s: СоÑтоÑние: ACCEPTED" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:970 #, c-format msgid "%s: State: ACCEPTED: dryrun" msgstr "%s: СоÑтоÑние: ACCEPTED: dryrun" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:993 #, c-format msgid "%s: State: ACCEPTED: has process time %s" msgstr "%s: СоÑтоÑние: ACCEPTED: Ð²Ñ€ÐµÐ¼Ñ Ð½Ð° иÑполнение %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:999 #, c-format msgid "%s: State: ACCEPTED: moving to PREPARING" msgstr "%s: ÑоÑтоÑние ACCEPTED: переход в PREPARING" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1015 #, c-format msgid "%s: State: PREPARING" msgstr "%s: СоÑтоÑние: PREPARING" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1022 #, c-format msgid "%s: Failed obtaining local job information." msgstr "%s: Ðе удалоÑÑŒ извлечь информацию о локальном ÑоÑтоÑнии задачи." #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1075 #, c-format msgid "%s: State: SUBMIT" msgstr "%s: СоÑтоÑние: SUBMIT" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1095 #, c-format msgid "%s: State: CANCELING" msgstr "%s: СоÑтоÑние: CANCELING" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1115 #, c-format msgid "%s: State: INLRMS" msgstr "%s: СоÑтоÑние: INLRMS" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1120 #, c-format msgid "%s: State: INLRMS - checking for pending(%u) and mark" msgstr "%s: СоÑтоÑние: INLRMS - проверка приоÑтановки(%u) и метка" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1122 #, c-format msgid "%s: State: INLRMS - checking for not pending" msgstr "%s: СоÑтоÑние: INLRMS - проверка отÑутÑÑ‚Ð²Ð¸Ñ Ð¿Ñ€Ð¸Ð¾Ñтановки" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1124 #, c-format msgid "%s: Job finished" msgstr "%s: Задача завершена" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1128 #, c-format msgid "%s: State: INLRMS: exit message is %i %s" msgstr "%s: ÑоÑтоÑние INLRMS: Ñообщение на выходе %i %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1141 #, c-format msgid "%s: State: INLRMS - no mark found" msgstr "%s: СоÑтоÑние: INLRMS - метки не найдены" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1153 #, c-format msgid "%s: State: FINISHING" msgstr "%s: СоÑтоÑние: FINISHING" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1174 #, c-format msgid "%s: Job is requested to clean - deleting" msgstr "%s: ПоÑтупил Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° удаление задачи - удалÑетÑÑ" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1191 #, c-format msgid "%s: restarted PREPARING job" msgstr "%s: перезапущена задача из PREPARING" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1207 #, c-format msgid "%s: restarted INLRMS job" msgstr "%s: перезапущена задача из INLRMS" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1216 #, c-format msgid "%s: restarted FINISHING job" msgstr "%s: перезапущена задача из FINISHING" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1221 #, c-format msgid "%s: Can't rerun on request" msgstr "%s: ПерезапуÑк по требованию невозможен" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1223 #, c-format msgid "%s: Can't rerun on request - not a suitable state" msgstr "%s: ПерезапуÑк по запроÑу невозможен - неподходÑщее ÑоÑтоÑние" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1234 #, c-format msgid "%s: Job is too old - deleting" msgstr "%s: Задача Ñлишком ÑÑ‚Ð°Ñ€Ð°Ñ - удалÑетÑÑ" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1279 #, c-format msgid "%s: Job is ancient - delete rest of information" msgstr "%s: Задача уÑтарела - удалÑетÑÑ Ð¾ÑтавшаÑÑÑ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1297 #, c-format msgid "%s: Canceling job because of user request" msgstr "%s: Прерывание задачи по запроÑу пользователÑ" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1311 #, c-format msgid "%s: Failed to turn job into failed during cancel processing." msgstr "%s: Сбой приÑÐ²Ð¾ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ðµ ÑоÑтоÑÐ½Ð¸Ñ ÑÐ±Ð¾Ñ Ð¿Ñ€Ð¸ обрыве иÑполнениÑ." #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1343 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1351 #, c-format msgid "%s: Plugin at state %s : %s" msgstr "%s: Подключаемый модуль в ÑоÑтоÑнии %s : %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1357 #, c-format msgid "%s: Plugin execution failed" msgstr "%s: Сбой при иÑполнении подключаемого модулÑ" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1464 #, c-format msgid "%s: State: %s from %s" msgstr "%s: СоÑтоÑние: %s поÑле %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1515 #, c-format msgid "Failed to get DN information from .local file for job %s" msgstr "Ðе удалоÑÑŒ извлечь информацию о DN из файла .local задачи %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1542 #, c-format msgid "%s: Delete request due to internal problems" msgstr "%s: Удаление запроÑа в ÑвÑзи Ñ Ð²Ð½ÑƒÑ‚Ñ€ÐµÐ½Ð½Ð¸Ð¼Ð¸ неполадками" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1577 #, c-format msgid "%s: Job failure detected" msgstr "%s: Обнаружен Ñбой задачи" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1630 #, c-format msgid "Failed to move file %s to %s" msgstr "Ðе удалоÑÑŒ перемеÑтить файл %s в %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1638 #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1748 #, c-format msgid "Failed reading control directory: %s" msgstr "Сбой при чтении управлÑющего каталога: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:1708 #, c-format msgid "Failed reading control directory: %s: %s" msgstr "Сбой при чтении управлÑющего каталога: %s: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:2022 #, c-format msgid "Helper process start failed: %s" msgstr "Сбой при запуÑке вÑпомогательного процеÑÑа: %s" #: src/services/a-rex/grid-manager/jobs/JobsList.cpp:2029 #, c-format msgid "Stopping helper process %s" msgstr "ОÑтанавливаетÑÑ Ð²Ñпомогательный процеÑÑ %s" #: src/services/a-rex/grid-manager/log/HeartBeatMetrics.cpp:61 #, c-format msgid "Error with hearbeatfile: %s" msgstr "Ошибка в файле такта: %s" #: src/services/a-rex/grid-manager/log/HeartBeatMetrics.cpp:73 #: src/services/a-rex/grid-manager/log/JobsMetrics.cpp:139 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:136 #, c-format msgid ": Metrics tool returned error code %i: %s" msgstr ": СредÑтво Ð¸Ð·Ð¼ÐµÑ€ÐµÐ½Ð¸Ñ Ñ…Ð°Ñ€Ð°ÐºÑ‚ÐµÑ€Ð¸Ñтик выдало ошибку %i: %s" #: src/services/a-rex/grid-manager/log/HeartBeatMetrics.cpp:107 #: src/services/a-rex/grid-manager/log/JobsMetrics.cpp:186 #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:178 msgid "" "gmetric_bin_path empty in arc.conf (should never happen the default value " "should be used)" msgstr "" "Значение gmetric_bin_path пуÑто в arc.conf (никогда не должно ÑлучатьÑÑ, " "должно иÑпользоватьÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ по умолчанию)" #: src/services/a-rex/grid-manager/log/JobLog.cpp:114 msgid ": Accounting records reporter tool is not specified" msgstr ": Ðе указано ÑредÑтво ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¾Ñ‚Ñ‡Ñ‘Ñ‚Ð¾Ð² об учётных запиÑÑÑ…" #: src/services/a-rex/grid-manager/log/JobLog.cpp:130 msgid ": Failure creating slot for accounting reporter child process" msgstr ": Сбой подготовки дочернего процеÑÑа ÑредÑтва ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¾Ñ‚Ñ‡Ñ‘Ñ‚Ð¾Ð²" #: src/services/a-rex/grid-manager/log/JobLog.cpp:143 msgid ": Failure starting accounting reporter child process" msgstr ": Сбой запуÑка дочернего процеÑÑа ÑредÑтва ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¾Ñ‚Ñ‡Ñ‘Ñ‚Ð¾Ð²" #: src/services/a-rex/grid-manager/log/JobLog.cpp:176 msgid ": Failure creating accounting database connection" msgstr ": Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ðº базе данных учёта задач" #: src/services/a-rex/grid-manager/log/JobLog.cpp:202 #, c-format msgid ": writing accounting record took %llu ms" msgstr ": запиÑÑŒ учётной запиÑи занÑла %llu mÑ" #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:74 #, c-format msgid "Session dir '%s' contains user specific substitutions - skipping it" msgstr "Каталог ÑеÑÑии '%s' Ñодержит пользовательÑкие замены - пропуÑкаетÑÑ" #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:86 #, c-format msgid "Sessiondir %s: Free space %f GB" msgstr "Рабочий каталог %s: Свободное проÑтранÑтво %f ГБ" #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:94 msgid "No session directories found in configuration." msgstr "Ðе найдены каталоги ÑеÑÑий в файле наÑтроек." #: src/services/a-rex/grid-manager/log/SpaceMetrics.cpp:125 msgid "No cachedirs found/configured for calculation of free space." msgstr "" "Каталоги кÑша не найдены или не наÑтроены при вычиÑлении Ñвободного " "проÑтранÑтва." #: src/services/a-rex/grid-manager/mail/send_mail.cpp:29 msgid "Failed reading local information" msgstr "Ðе удалоÑÑŒ прочеÑть локальную информацию" #: src/services/a-rex/grid-manager/mail/send_mail.cpp:79 #, c-format msgid "Running mailer command (%s)" msgstr "Выполнение команды раÑÑылки (%s)" #: src/services/a-rex/grid-manager/mail/send_mail.cpp:81 msgid "Failed running mailer" msgstr "Ðе удалоÑÑŒ запуÑтить Ñлужбу раÑÑылки" #: src/services/a-rex/grid-manager/run/RunParallel.cpp:33 #, c-format msgid "%s: Job's helper exited" msgstr "%s: ÐÑÑиÑтент задачи прерван" #: src/services/a-rex/grid-manager/run/RunParallel.cpp:70 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:24 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:56 #, c-format msgid "%s: Failure creating slot for child process" msgstr "%s: Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¾Ð±Ð»Ð°Ñти памÑти Ð´Ð»Ñ Ð´Ð¾Ñ‡ÐµÑ€Ð½ÐµÐ³Ð¾ процеÑÑа" #: src/services/a-rex/grid-manager/run/RunParallel.cpp:119 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:41 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:73 #, c-format msgid "%s: Failure starting child process" msgstr "%s: Сбой при запуÑке дочернего процеÑÑа" #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:30 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:62 #, c-format msgid "%s: Failure creating data storage for child process" msgstr "%s: Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ…Ñ€Ð°Ð½Ð¸Ð»Ð¸Ñ‰Ð° данных Ð´Ð»Ñ Ð´Ð¾Ñ‡ÐµÑ€Ð½ÐµÐ³Ð¾ процеÑÑа" #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:46 #: src/services/a-rex/grid-manager/run/RunRedirected.cpp:78 #, c-format msgid "%s: Failure waiting for child process to finish" msgstr "%s: Сбой Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ Ð´Ð¾Ñ‡ÐµÑ€Ð½ÐµÐ³Ð¾ процеÑÑа" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:46 msgid "[job description input]" msgstr "[ввод опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸]" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:47 msgid "" "Tool for writing the grami file representation of a job description file." msgstr "Утилита Ð´Ð»Ñ Ð¿Ñ€ÐµÐ´ÑÑ‚Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ в виде файла grami." #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:51 msgid "Name of grami file" msgstr "Ð˜Ð¼Ñ Ñ„Ð°Ð¹Ð»Ð° grami" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:56 msgid "Configuration file to load" msgstr "ИÑпользуемый файл конфигурации" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:57 msgid "arc.conf" msgstr "arc.conf" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:61 msgid "Session directory to use" msgstr "ИÑпользуемый каталог ÑеÑÑии" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:62 msgid "directory" msgstr "каталог" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:78 msgid "No job description file name provided." msgstr "Ðе указан файл Ñ Ð¾Ð¿Ð¸Ñанием задачи." #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:84 #, c-format msgid "Unable to parse job description input: %s" msgstr "Ðевозможно разобрать введённое опиÑание задачи: %s" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:90 msgid "Unable to load ARC configuration file." msgstr "Ðе удалоÑÑŒ загрузить файл конфигурации ARC." #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:107 #, c-format msgid "Unable to write grami file: %s" msgstr "Ðе удалоÑÑŒ запиÑать файл grami: %s" #: src/services/a-rex/grid-manager/test_write_grami_file.cpp:112 #, c-format msgid "Unable to write 'output' file: %s" msgstr "Ðе удалоÑÑŒ запиÑать файл 'output': %s" #: src/services/a-rex/information_collector.cpp:53 #, c-format msgid "Resource information provider: %s" msgstr "Сборщик информации о реÑурÑе: %s" #: src/services/a-rex/information_collector.cpp:56 msgid "Resource information provider failed to start" msgstr "Сбой запуÑка Ñборщика информации о реÑурÑе" #: src/services/a-rex/information_collector.cpp:59 msgid "Resource information provider failed to run" msgstr "Сбой работы Ñборщика информации о реÑурÑе" #: src/services/a-rex/information_collector.cpp:63 #, c-format msgid "" "Resource information provider failed with exit status: %i\n" "%s" msgstr "" "Сбой Ñборщика информации о реÑурÑе Ñ Ð²Ñ‹Ñ…Ð¾Ð´Ð½Ñ‹Ð¼ ÑтатуÑом: %i\n" "%s" #: src/services/a-rex/information_collector.cpp:65 #, c-format msgid "" "Resource information provider log:\n" "%s" msgstr "" "Журнал Ñборщика информации о реÑурÑе:\n" "%s" #: src/services/a-rex/information_collector.cpp:71 msgid "No new informational document assigned" msgstr "Ðе приÑвоено новых информационных документов" #: src/services/a-rex/information_collector.cpp:73 #, c-format msgid "Obtained XML: %s" msgstr "Полученный XML: %s" #: src/services/a-rex/information_collector.cpp:87 msgid "Informational document is empty" msgstr "ПуÑтой информационный документ" #: src/services/a-rex/information_collector.cpp:212 msgid "OptimizedInformationContainer failed to create temporary file" msgstr "OptimizedInformationContainer не Ñмог Ñоздать временный файл" #: src/services/a-rex/information_collector.cpp:215 #, c-format msgid "OptimizedInformationContainer created temporary file: %s" msgstr "OptimizedInformationContainer Ñоздал временный файл: %s" #: src/services/a-rex/information_collector.cpp:221 msgid "" "OptimizedInformationContainer failed to store XML document to temporary file" msgstr "" "OptimizedInformationContainer не Ñмог запиÑать документ XML во временный файл" #: src/services/a-rex/information_collector.cpp:230 msgid "OptimizedInformationContainer failed to parse XML" msgstr "OptimizedInformationContainer не Ñмог разобрать XML" #: src/services/a-rex/information_collector.cpp:242 msgid "OptimizedInformationContainer failed to rename temprary file" msgstr "OptimizedInformationContainer не Ñмог переименовать временный файл" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:36 msgid "Default INTERNAL client contructor" msgstr "КонÑтруктор по умолчанию клиента INTERNAL" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:39 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:59 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:81 msgid "Failed to load grid-manager configfile" msgstr "Ðе удалоÑÑŒ подгрузить файл наÑтроек grid-manager" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:44 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:64 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:86 msgid "Failed to set INTERNAL endpoint" msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ входа INTERNAL" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:129 msgid "Failed to identify grid-manager config file" msgstr "Ðе удалоÑÑŒ обнаружить файл наÑтроек grid-manager" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:148 #, c-format msgid "Failed to run configuration parser at %s." msgstr "Сбой запуÑка разборщика файла наÑтроек %s." #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:152 #, c-format msgid "Parser failed with error code %i." msgstr "Сбой разборщика Ñ ÐºÐ¾Ð´Ð¾Ð¼ ошибки %i." #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:158 #, c-format msgid "No pid file is found at '%s'. Probably A-REX is not running." msgstr "Ðе обнаружен файл pid в '%s'. Возможно, A-REX не запущен." #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:173 #, c-format msgid "Failed to load grid-manager config file from %s" msgstr "Ðе удалоÑÑŒ подгрузить файл наÑтроек grid-manager из %s" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:257 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:363 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:396 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:442 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:496 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:548 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:566 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:616 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:646 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:664 #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:682 msgid "INTERNALClient is not initialized" msgstr "Клиент INTERNALClient не запущен" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:447 msgid "Submitting job " msgstr "ЗапуÑк задачи " #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:508 #, c-format msgid "Failed to copy input file: %s to path: %s" msgstr "Сбой ÐºÐ¾Ð¿Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð²Ñ…Ð¾Ð´Ð½Ð¾Ð³Ð¾ файла: %s в размещение: %s" #: src/services/a-rex/internaljobplugin/INTERNALClient.cpp:514 #, c-format msgid "Failed to set permissions on: %s" msgstr "Сбой Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ Ð¿Ñ€Ð°Ð² доÑтупа к %s" #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:51 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:92 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:119 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:145 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:184 #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:246 msgid "Failed to load grid-manager config file" msgstr "Ðе удалоÑÑŒ подгрузить файл наÑтроек grid-manager" #: src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp:324 msgid "Retrieving job description of INTERNAL jobs is not supported" msgstr "Получение опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡ INTERNAL не поддерживаетÑÑ" #: src/services/a-rex/internaljobplugin/JobListRetrieverPluginINTERNAL.cpp:67 #, c-format msgid "Listing localjobs succeeded, %d localjobs found" msgstr "Локальные задачи уÑпешно перечиÑлены, обнаружено %d задач(и)" #: src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp:130 msgid "Failed submitting job description" msgstr "Сбой заÑылки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #: src/services/a-rex/job.cpp:66 #, c-format msgid "Using cached local account '%s'" msgstr "ИÑпользуетÑÑ ÐºÑÑˆÐ¸Ñ€Ð¾Ð²Ð°Ð½Ð½Ð°Ñ Ð¼ÐµÑÑ‚Ð½Ð°Ñ ÑƒÑ‡Ñ‘Ñ‚Ð½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ '%s'" #: src/services/a-rex/job.cpp:77 msgid "Will not map to 'root' account by default" msgstr "По умолчанию привÑзки к учётной запиÑи 'root' не будет" #: src/services/a-rex/job.cpp:90 msgid "No local account name specified" msgstr "Ðе указано Ð¸Ð¼Ñ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð¾Ð¹ учётной запиÑи" #: src/services/a-rex/job.cpp:93 #, c-format msgid "Using local account '%s'" msgstr "ИÑпользуетÑÑ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð°Ñ ÑƒÑ‡Ñ‘Ñ‚Ð½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ '%s'" #: src/services/a-rex/job.cpp:97 msgid "TLS provides no identity, going for OTokens" msgstr "TLS не передал идентификацию, переход к OTokens" #: src/services/a-rex/job.cpp:155 msgid "Failed to acquire A-REX's configuration" msgstr "Ðе удалоÑÑŒ получить наÑтройки A-REX" #: src/services/a-rex/job.cpp:227 #, c-format msgid "Cannot handle local user %s" msgstr "Ðевозможно обÑлужить локального Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ %s" #: src/services/a-rex/job.cpp:275 #, c-format msgid "%s: Failed to parse user policy" msgstr "%s: Сбой при разборе правил допуÑка пользователÑ" #: src/services/a-rex/job.cpp:280 #, c-format msgid "%s: Failed to load evaluator for user policy " msgstr "%s: Ðе удалоÑÑŒ подгрузить анализатор Ð´Ð»Ñ Ð¿Ñ€Ð°Ð²Ð¸Ð» допуÑка пользователей " #: src/services/a-rex/job.cpp:385 #, c-format msgid "%s: Unknown user policy '%s'" msgstr "%s: ÐеизвеÑтное правило допуÑка Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ '%s'" #: src/services/a-rex/job.cpp:707 src/services/a-rex/job.cpp:731 #, c-format msgid "Credential expires at %s" msgstr "Срок дейÑÑ‚Ð²Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупа иÑтекает в %s" #: src/services/a-rex/job.cpp:709 src/services/a-rex/job.cpp:733 #, c-format msgid "Credential handling exception: %s" msgstr "Прерывание при обработке параметров доÑтупа: %s" #: src/services/a-rex/job.cpp:1031 #, c-format msgid "Out of tries while allocating new job ID in %s" msgstr "ЗакончилиÑÑŒ попытки приÑÐ²Ð¾ÐµÐ½Ð¸Ñ Ð½Ð¾Ð²Ð¾Ð³Ð¾ Ñрлыка задачи в %s" #: src/services/a-rex/job.cpp:1270 msgid "No non-draining session dirs available" msgstr "Ðет каталогов ÑеÑÑий не в ÑоÑтоÑнии разгрузки" #: src/services/a-rex/put.cpp:150 #, c-format msgid "%s: put file %s: there is no payload" msgstr "%s: запиÑÑŒ файла %s: отÑутÑтвуют полезные файлы" #: src/services/a-rex/put.cpp:156 #, c-format msgid "%s: put file %s: unrecognized payload" msgstr "%s: запиÑÑŒ файла %s: неопознанные полезные файлы" #: src/services/a-rex/put.cpp:172 src/services/a-rex/rest/rest.cpp:1595 #, c-format msgid "%s: put file %s: failed to create file: %s" msgstr "%s: запиÑÑŒ файла %s: Ñбой при Ñоздании файла: %s" #: src/services/a-rex/put.cpp:188 #, c-format msgid "%s: put file %s: %s" msgstr "%s: запиÑÑŒ файла %s: %s" #: src/services/a-rex/put.cpp:210 #, c-format msgid "%s: delete file %s: failed to obtain file path: %s" msgstr "%s: удаление файла %s: Ñбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð¿ÑƒÑ‚Ð¸ к файлу: %s" #: src/services/a-rex/put.cpp:221 #, c-format msgid "%s: delete file %s: failed to open file/dir: %s" msgstr "%s: удаление файла %s: Ñбой Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð°/каталога: %s" #: src/services/a-rex/rest/rest.cpp:683 #, c-format msgid "REST: process %s at %s" msgstr "REST: обработка %s в %s" #: src/services/a-rex/rest/rest.cpp:726 src/services/a-rex/rest/rest.cpp:742 #: src/services/a-rex/rest/rest.cpp:797 src/services/a-rex/rest/rest.cpp:876 #: src/services/a-rex/rest/rest.cpp:1104 src/services/a-rex/rest/rest.cpp:1696 #, c-format msgid "process: method %s is not supported for subpath %s" msgstr "обработка: метод %s не поддерживаетÑÑ Ð´Ð»Ñ Ñ‡Ð°Ñти пути %s" #: src/services/a-rex/rest/rest.cpp:748 #, c-format msgid "process: schema %s is not supported for subpath %s" msgstr "обработка: Ñхема %s не поддерживаетÑÑ Ð´Ð»Ñ Ñ‡Ð°Ñти пути %s" #: src/services/a-rex/rest/rest.cpp:873 src/services/a-rex/rest/rest.cpp:1101 #, c-format msgid "process: action %s is not supported for subpath %s" msgstr "обработка: дейÑтвие %s не поддерживаетÑÑ Ð´Ð»Ñ Ñ‡Ð°Ñти пути %s" #: src/services/a-rex/rest/rest.cpp:1113 src/services/a-rex/rest/rest.cpp:1182 #: src/services/a-rex/rest/rest.cpp:1542 src/services/a-rex/rest/rest.cpp:1685 #, c-format msgid "REST:GET job %s - %s" msgstr "REST:GET задачи %s - %s" #: src/services/a-rex/rest/rest.cpp:1229 src/services/a-rex/rest/rest.cpp:1237 #, c-format msgid "REST:KILL job %s - %s" msgstr "REST:KILL задачи %s - %s" #: src/services/a-rex/rest/rest.cpp:1254 src/services/a-rex/rest/rest.cpp:1262 #, c-format msgid "REST:CLEAN job %s - %s" msgstr "REST:CLEAN задачи %s - %s" #: src/services/a-rex/rest/rest.cpp:1279 src/services/a-rex/rest/rest.cpp:1287 #: src/services/a-rex/rest/rest.cpp:1304 #, c-format msgid "REST:RESTART job %s - %s" msgstr "REST:RESTART задачи %s - %s" #: src/services/a-rex/rest/rest.cpp:1588 #, c-format msgid "REST:PUT job %s: file %s: there is no payload" msgstr "REST:PUT задачи %s: файл %s: отÑутÑтвует нагрузка" #: src/services/a-rex/rest/rest.cpp:1608 #, c-format msgid "HTTP:PUT %s: put file %s: %s" msgstr "HTTP:PUT %s: запиÑÑŒ файла %s: %s" #: src/services/a-rex/test_cache_check.cpp:24 #: src/tests/count/test_client.cpp:20 #: src/tests/echo/echo_test4axis2c/test_client.cpp:20 #: src/tests/echo/test_client.cpp:21 msgid "Creating client side chain" msgstr "Создание цепи на Ñтороне клиента" #: src/services/a-rex/update_credentials.cpp:29 #, c-format msgid "" "UpdateCredentials: request = \n" "%s" msgstr "" "UpdateCredentials: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" "%s" #: src/services/a-rex/update_credentials.cpp:35 msgid "UpdateCredentials: missing Reference" msgstr "UpdateCredentials: отÑутÑтвует ÑÑылка" #: src/services/a-rex/update_credentials.cpp:43 msgid "UpdateCredentials: wrong number of Reference" msgstr "UpdateCredentials: недопуÑтимое количеÑтво ÑÑылок" #: src/services/a-rex/update_credentials.cpp:51 msgid "UpdateCredentials: wrong number of elements inside Reference" msgstr "UpdateCredentials: недопуÑтимое чиÑло Ñлементов внутри Reference" #: src/services/a-rex/update_credentials.cpp:60 msgid "UpdateCredentials: EPR contains no JobID" msgstr "UpdateCredentials: EPR не Ñодержит JobID" #: src/services/a-rex/update_credentials.cpp:70 #, c-format msgid "UpdateCredentials: no job found: %s" msgstr "UpdateCredentials: задача не обнаружена: %s" #: src/services/a-rex/update_credentials.cpp:77 msgid "UpdateCredentials: failed to update credentials" msgstr "UpdateCredentials: невозможно обновить параметры доÑтупа" #: src/services/a-rex/update_credentials.cpp:85 #, c-format msgid "" "UpdateCredentials: response = \n" "%s" msgstr "" "UpdateCredentials: отзыв = \n" "%s" #: src/services/candypond/CandyPond.cpp:52 msgid "No A-REX config file found in candypond configuration" msgstr "Файл наÑтроек A-REX в наÑтройках candypond не обнаружен" #: src/services/candypond/CandyPond.cpp:56 #, c-format msgid "Using A-REX config file %s" msgstr "ИÑпользуетÑÑ Ñ„Ð°Ð¹Ð» наÑтроек A-REX %s" #: src/services/candypond/CandyPond.cpp:60 #, c-format msgid "Failed to process A-REX configuration in %s" msgstr "Ðе удалоÑÑŒ обработать наÑтройки A-REX в %s" #: src/services/candypond/CandyPond.cpp:65 msgid "No caches defined in configuration" msgstr "КÑш не опиÑан в файле наÑтроек" #: src/services/candypond/CandyPond.cpp:150 msgid "Empty filename returned from FileCache" msgstr "FileCache возвратил пуÑтое Ð¸Ð¼Ñ Ñ„Ð°Ð¹Ð»Ð°" #: src/services/candypond/CandyPond.cpp:162 #, c-format msgid "Problem accessing cache file %s: %s" msgstr "Проблема при доÑтупе к кÑшированному файлу %s: %s" #: src/services/candypond/CandyPond.cpp:210 #: src/services/candypond/CandyPond.cpp:474 msgid "No job ID supplied" msgstr "Ðе указан Ñрлык задачи" #: src/services/candypond/CandyPond.cpp:219 #, c-format msgid "Bad number in priority element: %s" msgstr "ÐедопуÑтимый приоритет: %s" #: src/services/candypond/CandyPond.cpp:228 msgid "No username supplied" msgstr "Ðе указано Ð¸Ð¼Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ" #: src/services/candypond/CandyPond.cpp:235 #, c-format msgid "Supplied username %s does not match mapped username %s" msgstr "" "Указанное Ð¸Ð¼Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ %s не Ñовпадает Ñ ÑопоÑтавленным именем " "Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ %s" #: src/services/candypond/CandyPond.cpp:249 msgid "No session directory found" msgstr "Ðе найден каталог ÑеÑÑии" #: src/services/candypond/CandyPond.cpp:253 #, c-format msgid "Using session dir %s" msgstr "ИÑпользуетÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³ ÑеÑÑии %s" #: src/services/candypond/CandyPond.cpp:257 #, c-format msgid "Failed to stat session dir %s" msgstr "Ðе удалоÑÑŒ проверить ÑоÑтоÑние каталога ÑеÑÑии %s" #: src/services/candypond/CandyPond.cpp:262 #, c-format msgid "Session dir %s is owned by %i, but current mapped user is %i" msgstr "Каталог ÑеÑÑии %s принадлежит %i, но текущий пользователь - %i" #: src/services/candypond/CandyPond.cpp:289 #, c-format msgid "Failed to access proxy of given job id %s at %s" msgstr "Сбой доÑтупа к доверенноÑти указанной задачи %s в %s" #: src/services/candypond/CandyPond.cpp:307 #, c-format msgid "DN is %s" msgstr "DN: %s" #: src/services/candypond/CandyPond.cpp:385 #, c-format msgid "Permission checking passed for url %s" msgstr "Проверка прав доÑтупа пройдена Ð´Ð»Ñ URL %s" #: src/services/candypond/CandyPond.cpp:410 #: src/services/candypond/CandyPondGenerator.cpp:135 #, c-format msgid "Failed to move %s to %s: %s" msgstr "Ðе удалоÑÑŒ перемеÑтить %s в %s: %s" #: src/services/candypond/CandyPond.cpp:441 #, c-format msgid "Starting new DTR for %s" msgstr "ЗапуÑкаетÑÑ Ð½Ð¾Ð²Ñ‹Ð¹ Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR Ð´Ð»Ñ %s" #: src/services/candypond/CandyPond.cpp:443 #, c-format msgid "Failed to start new DTR for %s" msgstr "Ðе удалоÑÑŒ запуÑтить новый Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR Ð´Ð»Ñ %s" #: src/services/candypond/CandyPond.cpp:487 #, c-format msgid "Job %s: all files downloaded successfully" msgstr "Задача %s: вÑе файлы уÑпешно загружены" #: src/services/candypond/CandyPond.cpp:494 #, c-format msgid "Job %s: Some downloads failed" msgstr "Задача %s: Сбой некоторых загрузок" #: src/services/candypond/CandyPond.cpp:499 #, c-format msgid "Job %s: files still downloading" msgstr "Задача %s: файлы вÑÑ‘ ещё загружаютÑÑ" #: src/services/candypond/CandyPond.cpp:511 msgid "CandyPond: Unauthorized" msgstr "CandyPond: ДоÑтуп закрыт" #: src/services/candypond/CandyPond.cpp:520 msgid "No local user mapping found" msgstr "Пользователь не припиÑан ни к одному локальному имени" #: src/services/candypond/CandyPond.cpp:527 #: src/services/data-staging/DataDeliveryService.cpp:625 #, c-format msgid "Identity is %s" msgstr "Личные данные: %s" #: src/services/candypond/CandyPond.cpp:585 #: src/services/data-staging/DataDeliveryService.cpp:697 msgid "Security Handlers processing failed" msgstr "Сбой в процеÑÑе обработки прав доÑтупа" #: src/services/candypond/CandyPond.cpp:592 msgid "Only POST is supported in CandyPond" msgstr "CandyPond поддерживает только POST" #: src/services/candypond/CandyPondGenerator.cpp:88 #, c-format msgid "DTR %s finished with state %s" msgstr "DTR %s завершилÑÑ Ð² ÑоÑтоÑнии %s" #: src/services/candypond/CandyPondGenerator.cpp:124 #, c-format msgid "Could not determine session directory from filename %s" msgstr "Ðе удалоÑÑŒ определить каталог ÑеÑÑии из имени файла %s" #: src/services/candypond/CandyPondGenerator.cpp:164 #, c-format msgid "Invalid DTR for source %s, destination %s" msgstr "ÐедопуÑтимый DTR Ð´Ð»Ñ Ð¸Ñточника %s, Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ %s" #: src/services/candypond/CandyPondGenerator.cpp:206 #, c-format msgid "DTRs still running for job %s" msgstr "ЗапроÑÑ‹ DTR Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s вÑÑ‘ ещё иÑполнÑÑŽÑ‚ÑÑ" #: src/services/candypond/CandyPondGenerator.cpp:215 #, c-format msgid "All DTRs finished for job %s" msgstr "Ð’Ñе запроÑÑ‹ DTR Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s завершены" #: src/services/candypond/CandyPondGenerator.cpp:222 #, c-format msgid "Job %s not found" msgstr "Задача %s не обнаружена" #: src/services/data-staging/DataDeliveryService.cpp:58 #, c-format msgid "Archiving DTR %s, state ERROR" msgstr "Ðрхивирование запроÑа DTR %s, ÑоÑтоÑние ERROR" #: src/services/data-staging/DataDeliveryService.cpp:62 #, c-format msgid "Archiving DTR %s, state %s" msgstr "Ðрхивирование запроÑа DTR %s, ÑоÑтоÑние %s" #: src/services/data-staging/DataDeliveryService.cpp:164 msgid "No delegation token in request" msgstr "Ð’ запроÑе отÑутÑтвует токен делегированиÑ" #: src/services/data-staging/DataDeliveryService.cpp:172 msgid "Failed to accept delegation" msgstr "Ðе удалоÑÑŒ принÑть делегирование" #: src/services/data-staging/DataDeliveryService.cpp:201 #: src/services/data-staging/DataDeliveryService.cpp:208 msgid "ErrorDescription" msgstr "ОпиÑание ошибки" #: src/services/data-staging/DataDeliveryService.cpp:213 #, c-format msgid "All %u process slots used" msgstr "Квота на процеÑÑÑ‹ (%u) иÑпользована" #: src/services/data-staging/DataDeliveryService.cpp:228 #, c-format msgid "Received retry for DTR %s still in transfer" msgstr "" "Получена Ð¿Ð¾Ð²Ñ‚Ð¾Ñ€Ð½Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ° запроÑа DTR %s, вÑÑ‘ ещё в ÑоÑтоÑнии передачи" #: src/services/data-staging/DataDeliveryService.cpp:235 #, c-format msgid "Replacing DTR %s in state %s with new request" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ DTR %s в ÑоÑтоÑнии %s заменÑетÑÑ Ð½Ð¾Ð²Ñ‹Ð¼ запроÑом" #: src/services/data-staging/DataDeliveryService.cpp:245 #, c-format msgid "Storing temp proxy at %s" msgstr "Сохранение временной доверенноÑти в %s" #: src/services/data-staging/DataDeliveryService.cpp:253 #, c-format msgid "Failed to create temp proxy at %s: %s" msgstr "Ðе удалоÑÑŒ Ñоздать временную доверенноÑть в %s: %s" #: src/services/data-staging/DataDeliveryService.cpp:260 #, c-format msgid "Failed to change owner of temp proxy at %s to %i:%i: %s" msgstr "Ðе удалоÑÑŒ поменÑть владельца временной доверенноÑти в %s на %i:%i: %s" #: src/services/data-staging/DataDeliveryService.cpp:285 msgid "Invalid DTR" msgstr "ÐедейÑтвительный Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR" #: src/services/data-staging/DataDeliveryService.cpp:289 #, c-format msgid "Failed to remove temporary proxy %s: %s" msgstr "Ðе удалоÑÑŒ удалить временную доверенноÑть %s: %s" #: src/services/data-staging/DataDeliveryService.cpp:390 #, c-format msgid "No such DTR %s" msgstr "Ðет такого запроÑа DTR %s" #: src/services/data-staging/DataDeliveryService.cpp:401 #, c-format msgid "DTR %s failed: %s" msgstr "Сбой запроÑа DTR %s: %s" #: src/services/data-staging/DataDeliveryService.cpp:412 #, c-format msgid "DTR %s finished successfully" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ DTR %s уÑпешно завершён" #: src/services/data-staging/DataDeliveryService.cpp:422 #, c-format msgid "DTR %s still in progress (%lluB transferred)" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ DTR %s ещё в процеÑÑе (передано %lluB)" #: src/services/data-staging/DataDeliveryService.cpp:482 #, c-format msgid "No active DTR %s" msgstr "Ðет активных запроÑов DTR %s" #: src/services/data-staging/DataDeliveryService.cpp:492 #, c-format msgid "DTR %s was already cancelled" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ DTR %s уже был прерван" #: src/services/data-staging/DataDeliveryService.cpp:501 #, c-format msgid "DTR %s could not be cancelled" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ DTR %s не может быть прерван" #: src/services/data-staging/DataDeliveryService.cpp:545 #, c-format msgid "Failed to get load average: %s" msgstr "Сбой вычиÑÐ»ÐµÐ½Ð¸Ñ ÑƒÑреднённой загруженноÑти: %s" #: src/services/data-staging/DataDeliveryService.cpp:569 msgid "Invalid configuration - no allowed IP address specified" msgstr "ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð½Ð°Ñтройка - не указано ни одного допуÑтимого IP-адреÑа" #: src/services/data-staging/DataDeliveryService.cpp:573 msgid "Invalid configuration - no transfer dirs specified" msgstr "ÐедопуÑÑ‚Ð¸Ð¼Ð°Ñ Ð½Ð°Ñтройка - не указано ни одного каталога Ð´Ð»Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡" #: src/services/data-staging/DataDeliveryService.cpp:584 msgid "Failed to start archival thread" msgstr "Ðе удалоÑÑŒ запуÑтить поток архивированиÑ" #: src/services/data-staging/DataDeliveryService.cpp:609 msgid "Shutting down data delivery service" msgstr "ЗакрываетÑÑ Ñлужба Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ…" #: src/services/data-staging/DataDeliveryService.cpp:618 msgid "Unauthorized" msgstr "ДоÑтуп закрыт" #: src/services/data-staging/DataDeliveryService.cpp:704 msgid "Only POST is supported in DataDeliveryService" msgstr "DataDeliveryService поддерживает только POST" #: src/services/examples/echo_python/EchoService.py:12 msgid "EchoService (python) constructor called" msgstr "Вызван Python-конÑтруктор EchoService" #: src/services/examples/echo_python/EchoService.py:17 #, python-format msgid "EchoService (python) has prefix %(prefix)s and suffix %(suffix)s" msgstr "" "EchoService (Python) Ñодержит приÑтавку %(prefix)s и ÑÑƒÑ„Ñ„Ð¸ÐºÑ %(suffix)s" #: src/services/examples/echo_python/EchoService.py:24 msgid "EchoService (python) destructor called" msgstr "Вызван Python-деÑтруктор EchoService" #: src/services/examples/echo_python/EchoService.py:54 msgid "EchoService (python) thread test starting" msgstr "ЗапуÑк теÑта потоков Ñлужбы EchoService (python)" #: src/services/examples/echo_python/EchoService.py:65 #, python-format msgid "EchoService (python) thread test, iteration %(iteration)s %(status)s" msgstr "" "ЗапуÑк теÑта потоков Ñлужбы EchoService (python), Ð¸Ñ‚ÐµÑ€Ð°Ñ†Ð¸Ñ %(iteration)s " "%(status)s" #: src/services/examples/echo_python/EchoService.py:82 msgid "EchoService (python) 'Process' called" msgstr "Вызван 'Process' EchoService (Python)" #: src/services/examples/echo_python/EchoService.py:86 #, python-format msgid "inmsg.Auth().Export(arc.SecAttr.ARCAuth) = %s" msgstr "inmsg.Auth().Export(arc.SecAttr.ARCAuth) = %s" #: src/services/examples/echo_python/EchoService.py:87 #, python-format msgid "inmsg.Attributes().getAll() = %s " msgstr "inmsg.Attributes().getAll() = %s " #: src/services/examples/echo_python/EchoService.py:88 #, python-format msgid "EchoService (python) got: %s " msgstr "EchoService (python) получил: %s " #: src/services/examples/echo_python/EchoService.py:93 #, python-format msgid "EchoService (python) request_namespace: %s" msgstr "EchoService (python) request_namespace: %s" #: src/services/examples/echo_python/EchoService.py:99 #: src/services/examples/echo_python/EchoService.py:171 #, python-format msgid "outpayload %s" msgstr "outpayload %s" #: src/services/examples/echo_python/EchoService.py:128 msgid "Calling https://localhost:60000/Echo using ClientSOAP" msgstr "ВызываетÑÑ https://localhost:60000/Echo иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ ClientSOAP" #: src/services/examples/echo_python/EchoService.py:131 msgid "Calling http://localhost:60000/Echo using ClientSOAP" msgstr "ВызываетÑÑ http://localhost:60000/Echo иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ ClientSOAP" #: src/services/examples/echo_python/EchoService.py:137 #: src/services/examples/echo_python/EchoService.py:155 #, python-format msgid "new_payload %s" msgstr "new_payload %s" #: src/services/examples/echo_python/EchoService.py:149 msgid "Calling http://localhost:60000/Echo using httplib" msgstr "ВызываетÑÑ http://localhost:60000/Echo иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ httplib" #: src/services/examples/echo_python/EchoService.py:165 msgid "Start waiting 10 sec..." msgstr "Ждём 10 Ñекунд..." #: src/services/examples/echo_python/EchoService.py:167 msgid "Waiting ends." msgstr "Ожидание завершено." #: src/services/gridftpd/auth/auth.cpp:328 #, c-format msgid "Unknown authorization command %s" msgstr "ÐеизвеÑÑ‚Ð½Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° команды допуÑка %s" #: src/services/gridftpd/auth/auth.cpp:347 #, c-format msgid "" "The [vo] section labeled '%s' has no file associated and can't be used for " "matching" msgstr "" "Разделу [vo] Ñ Ð½Ð°Ð·Ð²Ð°Ð½Ð¸ÐµÐ¼ '%s' не поÑтавлен в ÑоответÑтвие файл, и он не " "может быть иÑпользован Ð´Ð»Ñ Ð°Ð²Ñ‚Ð¾Ñ€Ð¸Ð·Ð°Ñ†Ð¸Ð¸" #: src/services/gridftpd/auth/auth_plugin.cpp:73 #: src/services/gridftpd/auth/unixmap.cpp:217 #, c-format msgid "Plugin %s failed to run" msgstr "Подключаемый модуль %s не Ñмог запуÑтитьÑÑ" #: src/services/gridftpd/auth/auth_plugin.cpp:75 #: src/services/gridftpd/auth/unixmap.cpp:219 #, c-format msgid "Plugin %s printed: %u" msgstr "Подключаемый модуль %s вывел на печать: %u" #: src/services/gridftpd/auth/auth_plugin.cpp:76 #: src/services/gridftpd/auth/unixmap.cpp:220 #, c-format msgid "Plugin %s error: %u" msgstr "Ошибка подключаемого Ð¼Ð¾Ð´ÑƒÐ»Ñ %s: %u" #: src/services/gridftpd/auth/auth_voms.cpp:28 #, c-format msgid "VOMS proxy processing returns: %i - %s" msgstr "Обработка доверенноÑти VOMS выдаёт: %i - %s" #: src/services/gridftpd/auth/auth_voms.cpp:120 #, c-format msgid "VOMS trust chains: %s" msgstr "Цепочка Ñертификатов VOMS: %s" #: src/services/gridftpd/auth/unixmap.cpp:126 msgid "User name mapping has empty command" msgstr "ПуÑÑ‚Ð°Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ð° в приÑвоении имени пользователÑ" #: src/services/gridftpd/auth/unixmap.cpp:154 #, c-format msgid "User name mapping has empty name: %s" msgstr "ПуÑтое Ð¸Ð¼Ñ Ð² приÑвоении имени пользователÑ: %s" #: src/services/gridftpd/commands.cpp:46 #, c-format msgid "response: %s" msgstr "ответ: %s" #: src/services/gridftpd/commands.cpp:50 #, c-format msgid "Send response failed: %s" msgstr "Сбой отÑылки отклика: %s" #: src/services/gridftpd/commands.cpp:80 msgid "Response sending error" msgstr "Ошибка отÑылки отклика" #: src/services/gridftpd/commands.cpp:93 msgid "Closed connection" msgstr "Соединение закрыто" #: src/services/gridftpd/commands.cpp:131 #, c-format msgid "Socket conversion failed: %s" msgstr "Ошибка Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ Ñокета: %s" #: src/services/gridftpd/commands.cpp:141 #, c-format msgid "Failed to obtain own address: %s" msgstr "Сбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ ÑобÑтвенного адреÑа: %s" #: src/services/gridftpd/commands.cpp:149 #, c-format msgid "Failed to recognize own address type (IPv4 or IPv6) - %u" msgstr "Сбой раÑÐ¿Ð¾Ð·Ð½Ð°Ð²Ð°Ð½Ð¸Ñ Ñ‚Ð¸Ð¿Ð° ÑобÑтвенного адреÑа (IPv4 или IPv6) - %u" #: src/services/gridftpd/commands.cpp:159 #, c-format msgid "Accepted connection on [%s]:%u" msgstr "ПринÑто Ñоединение к [%s]:%u" #: src/services/gridftpd/commands.cpp:161 #, c-format msgid "Accepted connection on %u.%u.%u.%u:%u" msgstr "ПринÑто Ñоединение к %u.%u.%u.%u:%u" #: src/services/gridftpd/commands.cpp:196 msgid "Accept failed" msgstr "Сбой принÑтиÑ" #: src/services/gridftpd/commands.cpp:204 #: src/services/gridftpd/listener.cpp:415 #, c-format msgid "Accept failed: %s" msgstr "Сбой принÑтиÑ: %s" #: src/services/gridftpd/commands.cpp:219 #, c-format msgid "Accepted connection from [%s]:%u" msgstr "ПринÑто Ñоединение Ñ [%s]:%u" #: src/services/gridftpd/commands.cpp:221 #, c-format msgid "Accepted connection from %u.%u.%u.%u:%u" msgstr "ПринÑто Ñоединение Ñ %u.%u.%u.%u:%u" #: src/services/gridftpd/commands.cpp:230 msgid "Authenticate in commands failed" msgstr "Сбой проверки подлинноÑти при иÑполнении инÑтрукций" #: src/services/gridftpd/commands.cpp:239 msgid "Authentication failure" msgstr "Сбой при проверке подлинноÑти" #: src/services/gridftpd/commands.cpp:247 #, c-format msgid "User subject: %s" msgstr "Субъект Ñертификата: %s" #: src/services/gridftpd/commands.cpp:248 #, c-format msgid "Encrypted: %s" msgstr "Зашифрован: %s" #: src/services/gridftpd/commands.cpp:254 msgid "User has no proper configuration associated" msgstr "Пользователь не аÑÑоциирован Ñ Ð¿Ð¾Ð´Ñ…Ð¾Ð´Ñщей наÑтройкой" #: src/services/gridftpd/commands.cpp:262 msgid "" "User has empty virtual directory tree.\n" "Either user has no authorised plugins or there are no plugins configured at " "all." msgstr "" "Дерево виртуального каталога Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð¿ÑƒÑто.\n" "Либо у Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð½ÐµÑ‚ допущенных раÑширений, либо раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ð²Ð¾Ð¾Ð±Ñ‰Ðµ не " "наÑтроены." #: src/services/gridftpd/commands.cpp:279 msgid "Read commands in authenticate failed" msgstr "Сбой команд Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð² проверке подлинноÑти" #: src/services/gridftpd/commands.cpp:411 msgid "Control connection (probably) closed" msgstr "Контрольное Ñоединение (наверное) закрыто" #: src/services/gridftpd/commands.cpp:445 #: src/services/gridftpd/commands.cpp:724 msgid "Command EPRT" msgstr "Команда EPRT" #: src/services/gridftpd/commands.cpp:446 #, c-format msgid "Failed to parse remote address %s" msgstr "Сбой разборки удалённого адреÑа %s" #: src/services/gridftpd/commands.cpp:468 #, c-format msgid "Command USER %s" msgstr "Команда USER %s" #: src/services/gridftpd/commands.cpp:475 msgid "Command CDUP" msgstr "Команда CDUP" #: src/services/gridftpd/commands.cpp:481 #, c-format msgid "Command CWD %s" msgstr "Команда CWD %s" #: src/services/gridftpd/commands.cpp:497 #, c-format msgid "Command MKD %s" msgstr "Команда MKD %s" #: src/services/gridftpd/commands.cpp:517 #, c-format msgid "Command SIZE %s" msgstr "Команда SIZE %s" #: src/services/gridftpd/commands.cpp:532 #, c-format msgid "Command SBUF: %i" msgstr "Команда SBUF: %i" #: src/services/gridftpd/commands.cpp:553 #, c-format msgid "Command MLST %s" msgstr "Команда MLST %s" #: src/services/gridftpd/commands.cpp:576 #, c-format msgid "Command DELE %s" msgstr "Команда DELE %s" #: src/services/gridftpd/commands.cpp:591 #, c-format msgid "Command RMD %s" msgstr "Команда RMD %s" #: src/services/gridftpd/commands.cpp:605 #, c-format msgid "Command TYPE %c" msgstr "Команда TYPE %c" #: src/services/gridftpd/commands.cpp:616 #, c-format msgid "Command MODE %c" msgstr "Команда MODE %c" #: src/services/gridftpd/commands.cpp:628 msgid "Command ABOR" msgstr "Команда ABOR" #: src/services/gridftpd/commands.cpp:641 #, c-format msgid "Command REST %s" msgstr "Команда REST %s" #: src/services/gridftpd/commands.cpp:654 #, c-format msgid "Command EPSV %s" msgstr "Команда EPSV %s" #: src/services/gridftpd/commands.cpp:656 msgid "Command SPAS" msgstr "Команда SPAS" #: src/services/gridftpd/commands.cpp:658 msgid "Command PASV" msgstr "Команда PASV" #: src/services/gridftpd/commands.cpp:679 msgid "local_pasv failed" msgstr "Сбой local_pasv" #: src/services/gridftpd/commands.cpp:703 msgid "local_spas failed" msgstr "Сбой local_spas" #: src/services/gridftpd/commands.cpp:726 msgid "Command PORT" msgstr "Команда PORT" #: src/services/gridftpd/commands.cpp:729 msgid "active_data is disabled" msgstr "active_data отключено" #: src/services/gridftpd/commands.cpp:738 msgid "local_port failed" msgstr "Сбой local_port" #: src/services/gridftpd/commands.cpp:751 #, c-format msgid "Command MLSD %s" msgstr "Команда MLSD %s" #: src/services/gridftpd/commands.cpp:753 #, c-format msgid "Command NLST %s" msgstr "Команда NLST %s" #: src/services/gridftpd/commands.cpp:755 #, c-format msgid "Command LIST %s" msgstr "Команда LIST %s" #: src/services/gridftpd/commands.cpp:806 #, c-format msgid "Command ERET %s" msgstr "Команда ERET %s" #: src/services/gridftpd/commands.cpp:836 #, c-format msgid "Command RETR %s" msgstr "Команда RETR %s" #: src/services/gridftpd/commands.cpp:865 #, c-format msgid "Command STOR %s" msgstr "Команда STOR %s" #: src/services/gridftpd/commands.cpp:893 #, c-format msgid "Command ALLO %i" msgstr "Команда ALLO %i" #: src/services/gridftpd/commands.cpp:916 msgid "Command OPTS" msgstr "Команда OPTS" #: src/services/gridftpd/commands.cpp:919 msgid "Command OPTS RETR" msgstr "Команда OPTS RETR" #: src/services/gridftpd/commands.cpp:929 #, c-format msgid "Option: %s" msgstr "ОпциÑ: %s" #: src/services/gridftpd/commands.cpp:973 msgid "Command NOOP" msgstr "Команда NOOP" #: src/services/gridftpd/commands.cpp:977 msgid "Command QUIT" msgstr "Команда QUIT" #: src/services/gridftpd/commands.cpp:987 msgid "Failed to close, deleting client" msgstr "Ðе удалоÑÑŒ закрыть, уничтожаетÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚" #: src/services/gridftpd/commands.cpp:1001 #, c-format msgid "Command DCAU: %i '%s'" msgstr "Команда DCAU: %i '%s'" #: src/services/gridftpd/commands.cpp:1029 #, c-format msgid "Command PBZS: %s" msgstr "Команда PBZS: %s" #: src/services/gridftpd/commands.cpp:1037 #, c-format msgid "Setting pbsz to %lu" msgstr "ПоÑылаетÑÑ pbsz на %lu" #: src/services/gridftpd/commands.cpp:1053 #, c-format msgid "Command PROT: %s" msgstr "Команда PROT: %s" #: src/services/gridftpd/commands.cpp:1078 #, c-format msgid "Command MDTM %s" msgstr "Команда MDTM %s" #: src/services/gridftpd/commands.cpp:1100 #, c-format msgid "Raw command: %s" msgstr "ÐÐµÐ¾Ð±Ñ€Ð°Ð±Ð¾Ñ‚Ð°Ð½Ð½Ð°Ñ Ð¸Ð½ÑтрукциÑ: %s" #: src/services/gridftpd/commands.cpp:1148 msgid "Failed to allocate memory for buffer" msgstr "Ðе удалоÑÑŒ зарезервировать памÑть под буфер" #: src/services/gridftpd/commands.cpp:1155 #, c-format msgid "Allocated %u buffers %llu bytes each." msgstr "Выделено %u буферов по %llu байт каждый." #: src/services/gridftpd/commands.cpp:1162 msgid "abort_callback: start" msgstr "abort_callback: запуÑк" #: src/services/gridftpd/commands.cpp:1165 #, c-format msgid "abort_callback: Globus error: %s" msgstr "abort_callback: ошибка Globus: %s" #: src/services/gridftpd/commands.cpp:1179 msgid "make_abort: start" msgstr "make_abort: запуÑк" #: src/services/gridftpd/commands.cpp:1191 msgid "Failed to abort data connection - ignoring and recovering" msgstr "" "Ðе удалоÑÑŒ оборвать Ñоединение Ð´Ð»Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ… - игнорируем и воÑÑтанавливаемÑÑ" #: src/services/gridftpd/commands.cpp:1199 msgid "make_abort: wait for abort flag to be reset" msgstr "make_abort: ожидание ÑброÑа Ñемафора прерываниÑ" #: src/services/gridftpd/commands.cpp:1209 msgid "make_abort: leaving" msgstr "make_abort: выход" #: src/services/gridftpd/commands.cpp:1224 msgid "check_abort: have Globus error" msgstr "check_abort: получена ошибка Globus" #: src/services/gridftpd/commands.cpp:1225 msgid "Abort request caused by transfer error" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° прерывание по причине ошибки передачи" #: src/services/gridftpd/commands.cpp:1228 msgid "check_abort: sending 426" msgstr "check_abort: поÑылаетÑÑ 426" #: src/services/gridftpd/commands.cpp:1249 msgid "Abort request caused by error in transfer function" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° прерывание по причине ошибки в функции передачи" #: src/services/gridftpd/commands.cpp:1331 msgid "Failed to start timer thread - timeout won't work" msgstr "" "Ðе удалоÑÑŒ запуÑтить поток таймера - прерывание по времени не будет работать" #: src/services/gridftpd/commands.cpp:1383 msgid "Killing connection due to timeout" msgstr "Прерывание ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ð² ÑвÑзи Ñ Ð¸Ñтёкшим лимитом времени" #: src/services/gridftpd/conf/conf_vo.cpp:22 #: src/services/gridftpd/conf/conf_vo.cpp:48 msgid "Configuration section [userlist] is missing name." msgstr "Раздел наÑтроек [userlist] не Ñодержит названиÑ." #: src/services/gridftpd/conf/daemon.cpp:58 #: src/services/gridftpd/conf/daemon.cpp:138 #, c-format msgid "No such user: %s" msgstr "Ðет такого пользователÑ: %s" #: src/services/gridftpd/conf/daemon.cpp:70 #: src/services/gridftpd/conf/daemon.cpp:150 #, c-format msgid "No such group: %s" msgstr "Ðет такой группы: %s" #: src/services/gridftpd/conf/daemon.cpp:83 #: src/services/gridftpd/conf/daemon.cpp:163 #, c-format msgid "Improper debug level '%s'" msgstr "ÐедопуÑтимый уровень отладки '%s'" #: src/services/gridftpd/conf/daemon.cpp:120 msgid "Missing option for command logreopen" msgstr "ОтÑутÑÑ‚Ð²ÑƒÑŽÑ‰Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð´Ð»Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ñ‹ logreopen" #: src/services/gridftpd/conf/daemon.cpp:125 msgid "Wrong option in logreopen" msgstr "ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð´Ð»Ñ logreopen" #: src/services/gridftpd/conf/daemon.cpp:209 #, c-format msgid "Failed to open log file %s" msgstr "Ðе удалоÑÑŒ открыть журнальный файл %s" #: src/services/gridftpd/datalist.cpp:101 msgid "Closing channel (list)" msgstr "ЗакрываетÑÑ ÐºÐ°Ð½Ð°Ð» (перечиÑление)" #: src/services/gridftpd/datalist.cpp:157 msgid "Data channel connected (list)" msgstr "Канал передачи данных подÑоединён (перечиÑление)" #: src/services/gridftpd/dataread.cpp:24 msgid "data_connect_retrieve_callback" msgstr "data_connect_retrieve_callback" #: src/services/gridftpd/dataread.cpp:30 msgid "Data channel connected (retrieve)" msgstr "Канал передачи данных подÑоединён (получение)" #: src/services/gridftpd/dataread.cpp:37 msgid "data_connect_retrieve_callback: allocate_data_buffer" msgstr "data_connect_retrieve_callback: allocate_data_buffer" #: src/services/gridftpd/dataread.cpp:40 msgid "data_connect_retrieve_callback: allocate_data_buffer failed" msgstr "data_connect_retrieve_callback: Ñбой в allocate_data_buffer" #: src/services/gridftpd/dataread.cpp:48 #, c-format msgid "data_connect_retrieve_callback: check for buffer %u" msgstr "data_connect_retrieve_callback: проверка буфера %u" #: src/services/gridftpd/dataread.cpp:61 src/services/gridftpd/dataread.cpp:158 #, c-format msgid "Closing channel (retrieve) due to local read error: %s" msgstr "ЗакрываетÑÑ ÐºÐ°Ð½Ð°Ð» (загрузки) в ÑвÑзи Ñ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð¾Ð¹ ошибкой: %s" #: src/services/gridftpd/dataread.cpp:75 src/services/gridftpd/dataread.cpp:172 msgid "Buffer registration failed" msgstr "Сбой региÑтрации буфера" #: src/services/gridftpd/dataread.cpp:88 msgid "data_retrieve_callback" msgstr "data_retrieve_callback" #: src/services/gridftpd/dataread.cpp:96 #, c-format msgid "Data channel (retrieve) %i %i %i" msgstr "Канал передачи данных (получение) %i %i %i" #: src/services/gridftpd/dataread.cpp:104 msgid "Closing channel (retrieve)" msgstr "Канал закрываетÑÑ (получение)" #: src/services/gridftpd/dataread.cpp:110 #: src/services/gridftpd/datawrite.cpp:128 #, c-format msgid "Time spent waiting for network: %.3f ms" msgstr "ВремÑ, проведённое в ожидании ÑвÑзи: %.3f мÑ" #: src/services/gridftpd/dataread.cpp:111 #: src/services/gridftpd/datawrite.cpp:129 #, c-format msgid "Time spent waiting for disc: %.3f ms" msgstr "ВремÑ, проведённое в ожидании диÑка: %.3f мÑ" #: src/services/gridftpd/dataread.cpp:122 msgid "data_retrieve_callback: lost buffer" msgstr "data_retrieve_callback: буфер потерÑн" #: src/services/gridftpd/datawrite.cpp:24 msgid "data_connect_store_callback" msgstr "data_connect_store_callback" #: src/services/gridftpd/datawrite.cpp:30 msgid "Data channel connected (store)" msgstr "Канал передачи данных подÑоединён (запиÑÑŒ)" #: src/services/gridftpd/datawrite.cpp:57 msgid "Failed to register any buffer" msgstr "Ðе удалоÑÑŒ зарегиÑтрировать ни одного буфера" #: src/services/gridftpd/datawrite.cpp:76 #, c-format msgid "Data channel (store) %i %i %i" msgstr "Канал передачи данных (запиÑÑŒ) %i %i %i" #: src/services/gridftpd/datawrite.cpp:89 msgid "data_store_callback: lost buffer" msgstr "data_store_callback: буфер потерÑн" #: src/services/gridftpd/datawrite.cpp:105 #, c-format msgid "Closing channel (store) due to error: %s" msgstr "Прерывание канала (запиÑÑŒ) в ÑвÑзи Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ¾Ð¹: %s" #: src/services/gridftpd/datawrite.cpp:115 msgid "Closing channel (store)" msgstr "ЗакрываетÑÑ ÐºÐ°Ð½Ð°Ð» (запиÑÑŒ)" #: src/services/gridftpd/fileplugin/fileplugin.cpp:55 msgid "Can't parse access rights in configuration line" msgstr "Ðе удалоÑÑŒ разобрать права доÑтупа в Ñтроке наÑтроек" #: src/services/gridftpd/fileplugin/fileplugin.cpp:61 msgid "Can't parse user:group in configuration line" msgstr "Ðе удалоÑÑŒ разобрать user:group в Ñтроке наÑтроек" #: src/services/gridftpd/fileplugin/fileplugin.cpp:68 msgid "Can't recognize user in configuration line" msgstr "Ðе удалоÑÑŒ определить Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð² Ñтроке наÑтроек" #: src/services/gridftpd/fileplugin/fileplugin.cpp:77 msgid "Can't recognize group in configuration line" msgstr "Ðе удалоÑÑŒ определить группу в Ñтроке наÑтроек" #: src/services/gridftpd/fileplugin/fileplugin.cpp:84 #: src/services/gridftpd/fileplugin/fileplugin.cpp:89 msgid "Can't parse or:and in configuration line" msgstr "Ðе удалоÑÑŒ разобрать or:and в Ñтроке наÑтроек" #: src/services/gridftpd/fileplugin/fileplugin.cpp:116 msgid "Can't parse configuration line" msgstr "Ðе удалоÑÑŒ разобрать Ñтроку наÑтроек" #: src/services/gridftpd/fileplugin/fileplugin.cpp:120 #, c-format msgid "Bad directory name: %s" msgstr "Ðеверное Ð¸Ð¼Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð°: %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:137 msgid "Can't parse create arguments in configuration line" msgstr "Ðе удалоÑÑŒ обработать аргументы create в файле конфигурации" #: src/services/gridftpd/fileplugin/fileplugin.cpp:146 msgid "Can't parse mkdir arguments in configuration line" msgstr "Ðе удалоÑÑŒ обработать аргументы mkdir в файле конфигурации" #: src/services/gridftpd/fileplugin/fileplugin.cpp:163 #, c-format msgid "Bad subcommand in configuration line: %s" msgstr "ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¸Ð½ÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ Ð² Ñтроке наÑтроек: %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:175 msgid "Bad mount directory specified" msgstr "Указан неподходÑщий каталог Ð´Ð»Ñ Ð¼Ð¾Ð½Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ" #: src/services/gridftpd/fileplugin/fileplugin.cpp:177 #, c-format msgid "Mount point %s" msgstr "Точка Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡ÐµÐ½Ð¸Ñ %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:215 #: src/services/gridftpd/fileplugin/fileplugin.cpp:274 #, c-format msgid "mkdir failed: %s" msgstr "Ñбой mkdir: %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:226 #, c-format msgid "Warning: mount point %s creation failed." msgstr "Предупреждение: не удалоÑÑŒ Ñоздать точку Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡ÐµÐ½Ð¸Ñ %s." #: src/services/gridftpd/fileplugin/fileplugin.cpp:330 #, c-format msgid "plugin: open: %s" msgstr "подключаемый модуль: открытие: %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:378 #: src/services/gridftpd/fileplugin/fileplugin.cpp:415 msgid "Not enough space to store file" msgstr "ÐедоÑтаточно Ñвободного меÑта Ð´Ð»Ñ Ð·Ð°Ð¿Ð¸Ñи файла" #: src/services/gridftpd/fileplugin/fileplugin.cpp:430 #, c-format msgid "open: changing owner for %s, %i, %i" msgstr "открытие: Ñмена владельца Ð´Ð»Ñ %s, %i, %i" #: src/services/gridftpd/fileplugin/fileplugin.cpp:437 #, c-format msgid "open: owner: %i %i" msgstr "открытие: владелец: %i %i" #: src/services/gridftpd/fileplugin/fileplugin.cpp:446 #: src/services/gridftpd/fileplugin/fileplugin.cpp:486 #, c-format msgid "Unknown open mode %s" msgstr "ÐеизвеÑтный режим Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:451 msgid "plugin: close" msgstr "подключаемый модуль: закрытие" #: src/services/gridftpd/fileplugin/fileplugin.cpp:492 msgid "plugin: read" msgstr "подключаемый модуль: чтение" #: src/services/gridftpd/fileplugin/fileplugin.cpp:498 msgid "Error while reading file" msgstr "Ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð°%1" #: src/services/gridftpd/fileplugin/fileplugin.cpp:508 msgid "plugin: write" msgstr "подключаемый модуль: запиÑÑŒ" #: src/services/gridftpd/fileplugin/fileplugin.cpp:519 msgid "Zero bytes written to file" msgstr "Ð’ файл запиÑано ноль байтов" #: src/services/gridftpd/fileplugin/fileplugin.cpp:727 #, c-format msgid "plugin: checkdir: %s" msgstr "подключаемый модуль: проверка каталога: %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:730 #, c-format msgid "plugin: checkdir: access: %s" msgstr "подключаемый модуль: проверка каталога: доÑтуп: %s" #: src/services/gridftpd/fileplugin/fileplugin.cpp:739 #, c-format msgid "plugin: checkdir: access: allowed: %s" msgstr "подключаемый модуль: проверка каталога: доÑтуп: открыт: %s" #: src/services/gridftpd/fileroot.cpp:14 #, c-format msgid "No plugin is configured or authorised for requested path %s" msgstr "" "ОтÑутÑтвуют наÑтроенные или допущенные раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ð¿Ð¾ заданному адреÑу %s" #: src/services/gridftpd/fileroot.cpp:19 msgid "FilePlugin: more unload than load" msgstr "FilePlugin: разгрузок больше, чем загрузок" #: src/services/gridftpd/fileroot.cpp:34 #, c-format msgid "Can't load plugin %s for access point %s" msgstr "Ðевозможно загрузить подключаемый модуль %s Ð´Ð»Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ доÑтупа %s" #: src/services/gridftpd/fileroot.cpp:39 src/services/gridftpd/fileroot.cpp:43 #, c-format msgid "Plugin %s for access point %s is broken." msgstr "РаÑширение %s Ð´Ð»Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ доÑтупа %s неиÑправно." #: src/services/gridftpd/fileroot.cpp:47 #, c-format msgid "Plugin %s for access point %s acquire failed (should never happen)." msgstr "" "РаÑширение %s Ð´Ð»Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ доÑтупа %s недоÑтупно (никогда не должно ÑлучатьÑÑ)." #: src/services/gridftpd/fileroot.cpp:54 #, c-format msgid "Destructor with dlclose (%s)" msgstr "ДеÑтруктор Ñ dlclose (%s)" #: src/services/gridftpd/fileroot.cpp:77 #, c-format msgid "FileNode: operator= (%s <- %s) %lu <- %lu" msgstr "FileNode: operator= (%s <- %s) %lu <- %lu" #: src/services/gridftpd/fileroot.cpp:80 msgid "Copying with dlclose" msgstr "Копирование Ñ dlclose" #: src/services/gridftpd/fileroot_config.cpp:31 #: src/services/gridftpd/fileroot_config.cpp:405 msgid "configuration file not found" msgstr "файл наÑтроек не найден" #: src/services/gridftpd/fileroot_config.cpp:54 msgid "Wrong port number in configuration" msgstr "Ðеприемлемый номер порта в наÑтройках" #: src/services/gridftpd/fileroot_config.cpp:63 msgid "Wrong maxconnections number in configuration" msgstr "Ðеприемлемое значение maxconnections в наÑтройках" #: src/services/gridftpd/fileroot_config.cpp:72 msgid "Wrong defaultbuffer number in configuration" msgstr "Ðеприемлемое значение defaultbuffer в наÑтройках" #: src/services/gridftpd/fileroot_config.cpp:81 msgid "Wrong maxbuffer number in configuration" msgstr "Ðеприемлемое значение maxbuffer в наÑтройках" #: src/services/gridftpd/fileroot_config.cpp:113 #: src/services/gridftpd/fileroot_config.cpp:121 #, c-format msgid "Can't resolve host %s" msgstr "Ðе удалоÑÑŒ найти Ñервер %s" #: src/services/gridftpd/fileroot_config.cpp:173 msgid "Could not determine hostname from gethostname()" msgstr "Ðевозможно определить Ð¸Ð¼Ñ ÑƒÐ·Ð»Ð° иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ gethostname()" #: src/services/gridftpd/fileroot_config.cpp:190 msgid "unnamed group" msgstr "группа без имени" #: src/services/gridftpd/fileroot_config.cpp:199 msgid "undefined plugin name" msgstr "неизвеÑтное Ð¸Ð¼Ñ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ð¾Ð³Ð¾ модулÑ" #: src/services/gridftpd/fileroot_config.cpp:203 msgid "undefined virtual plugin path" msgstr "не задан путь к виртуальному раÑширению" #: src/services/gridftpd/fileroot_config.cpp:208 #, c-format msgid "bad directory for plugin: %s" msgstr "неверный каталог Ð´Ð»Ñ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ð¾Ð³Ð¾ модулÑ: %s" #: src/services/gridftpd/fileroot_config.cpp:220 #, c-format msgid "Already have directory: %s" msgstr "Каталог %s уже ÑущеÑтвует" #: src/services/gridftpd/fileroot_config.cpp:223 #, c-format msgid "Registering directory: %s with plugin: %s" msgstr "РегиÑтрируетÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³: %s Ñ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ñ‹Ð¼ модулем: %s" #: src/services/gridftpd/fileroot_config.cpp:236 #, c-format msgid "file node creation failed: %s" msgstr "Ñбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÑƒÐ·Ð»Ð° файла: %s" #: src/services/gridftpd/fileroot_config.cpp:286 #, c-format msgid "improper attribute for allowencryption command: %s" msgstr "недопуÑтимый атрибут команды allowencryption: %s" #: src/services/gridftpd/fileroot_config.cpp:300 #, c-format msgid "improper attribute for allowactvedata command: %s" msgstr "недопуÑтимый атрибут команды allowactvedata: %s" #: src/services/gridftpd/fileroot_config.cpp:314 #, c-format msgid "failed while processing configuration command: %s %s" msgstr "Ñбой при обработке команды наÑтройки: %s %s" #: src/services/gridftpd/fileroot_config.cpp:339 #, c-format msgid "Failed processing authorization group %s" msgstr "Ðе удалоÑÑŒ обработать группу допуÑка %s" #: src/services/gridftpd/fileroot_config.cpp:352 msgid "Missing authgroup name in allowaccess" msgstr "ОтÑутÑтвует название authgroup в allowaccess" #: src/services/gridftpd/fileroot_config.cpp:369 msgid "Missing authgroup name in denyaccess" msgstr "ОтÑутÑтвует название authgroup в denyaccess" #: src/services/gridftpd/fileroot_config.cpp:419 msgid "failed to process client identification" msgstr "Ðе удалоÑÑŒ обработать личные данные клиента" #: src/services/gridftpd/fileroot_config.cpp:426 msgid "failed to identify plugins path" msgstr "Ñбой Ð¾Ð±Ð½Ð°Ñ€ÑƒÐ¶ÐµÐ½Ð¸Ñ Ð¿ÑƒÑ‚Ð¸ к модулÑм" #: src/services/gridftpd/fileroot_config.cpp:453 #, c-format msgid "Registering dummy directory: %s" msgstr "РегиÑтрируетÑÑ Ð²Ñпомогательный каталог: %s" #: src/services/gridftpd/listener.cpp:57 src/services/gridftpd/listener.cpp:466 msgid "Activation failed" msgstr "Ошибка активации" #: src/services/gridftpd/listener.cpp:66 src/services/gridftpd/listener.cpp:172 msgid "Child exited" msgstr "Потомок завершил работу" #: src/services/gridftpd/listener.cpp:78 msgid "Globus connection error" msgstr "Ошибка ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Globus" #: src/services/gridftpd/listener.cpp:80 src/services/gridftpd/listener.cpp:424 msgid "New connection" msgstr "Ðовое Ñоединение" #: src/services/gridftpd/listener.cpp:87 msgid "Server stopped" msgstr "Сервер оÑтановлен" #: src/services/gridftpd/listener.cpp:157 msgid "Error: failed to set handler for SIGTERM" msgstr "Ошибка: не удалоÑÑŒ уÑтановить обработчик SIGTERM" #: src/services/gridftpd/listener.cpp:161 msgid "Starting controlled process" msgstr "ЗапуÑкаетÑÑ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»Ð¸Ñ€ÑƒÐµÐ¼Ñ‹Ð¹ процеÑÑ" #: src/services/gridftpd/listener.cpp:164 msgid "fork failed" msgstr "ошибка при выполнении ÑиÑтемного вызова fork" #: src/services/gridftpd/listener.cpp:169 msgid "wait failed - killing child" msgstr "ошибка Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ - прерывание процеÑÑа-потомка" #: src/services/gridftpd/listener.cpp:174 msgid "Killed with signal: " msgstr "Прерван Ñигналом: " #: src/services/gridftpd/listener.cpp:176 msgid "Restarting after segmentation violation." msgstr "ПерезапуÑк поÑле Ð½Ð°Ñ€ÑƒÑˆÐµÐ½Ð¸Ñ Ñегментации." #: src/services/gridftpd/listener.cpp:177 msgid "Waiting 1 minute" msgstr "Ожидание: 1 минута" #: src/services/gridftpd/listener.cpp:239 msgid "Error: failed to set handler for SIGCHLD" msgstr "Ошибка: не удалоÑÑŒ уÑтановить обработчик SIGCHLD" #: src/services/gridftpd/listener.cpp:256 msgid "Missing argument" msgstr "ОтÑутÑтвует аргумент" #: src/services/gridftpd/listener.cpp:257 msgid "Unknown option" msgstr "ÐеизвеÑтный параметр" #: src/services/gridftpd/listener.cpp:264 msgid "Wrong port number" msgstr "ÐедопуÑтимый номер порта" #: src/services/gridftpd/listener.cpp:274 msgid "Wrong number of connections" msgstr "ÐедопуÑтимое количеÑтво подключений" #: src/services/gridftpd/listener.cpp:281 msgid "Wrong buffer size" msgstr "ÐедопуÑтимый размер буфера" #: src/services/gridftpd/listener.cpp:288 msgid "Wrong maximal buffer size" msgstr "ÐедопуÑтимый макÑимальный размер буфера" #: src/services/gridftpd/listener.cpp:300 msgid "Failed reading configuration" msgstr "Сбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° наÑтроек" #: src/services/gridftpd/listener.cpp:331 #, c-format msgid "Failed to obtain local address: %s" msgstr "Ðе удалоÑÑŒ получить локальный адреÑ: %s" #: src/services/gridftpd/listener.cpp:338 #, c-format msgid "Failed to create socket(%s): %s" msgstr "Ðе удалоÑÑŒ Ñоздать Ñокет (%s): %s" #: src/services/gridftpd/listener.cpp:352 #, c-format msgid "Failed to limit socket to IPv6: %s" msgstr "Ðе удалоÑÑŒ ограничить Ñокет до IPv6: %s" #: src/services/gridftpd/listener.cpp:359 #, c-format msgid "Failed to bind socket(%s): %s" msgstr "Ðе удалоÑÑŒ ÑвÑзать Ñокет (%s): %s" #: src/services/gridftpd/listener.cpp:364 #, c-format msgid "Failed to listen on socket(%s): %s" msgstr "Ðе удалоÑÑŒ проÑлушать Ñокет (%s): %s" #: src/services/gridftpd/listener.cpp:371 msgid "Not listening to anything" msgstr "Ðичего не проÑлушиваетÑÑ" #: src/services/gridftpd/listener.cpp:374 #, c-format msgid "Some addresses failed. Listening on %u of %u." msgstr "Ðекоторые адреÑа недоÑтупны. ПроÑлушиваетÑÑ %u из %u." #: src/services/gridftpd/listener.cpp:382 #: src/services/gridftpd/listener.cpp:477 msgid "Listen started" msgstr "ПроÑлушивание началоÑÑŒ" #: src/services/gridftpd/listener.cpp:395 msgid "No valid handles left for listening" msgstr "Ðе оÑталоÑÑŒ допуÑтимых деÑкрипторов Ð´Ð»Ñ Ð¿Ñ€Ð¾ÑлушиваниÑ" #: src/services/gridftpd/listener.cpp:401 #, c-format msgid "Select failed: %s" msgstr "Выбор не удалÑÑ: %s" #: src/services/gridftpd/listener.cpp:422 #, c-format msgid "Have connections: %i, max: %i" msgstr "СущеÑтвующих Ñоединений: %i, макÑимально: %i" #: src/services/gridftpd/listener.cpp:427 #, c-format msgid "Fork failed: %s" msgstr "Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´Ð¾Ñ‡ÐµÑ€Ð½ÐµÐ³Ð¾ процеÑа: %s" #: src/services/gridftpd/listener.cpp:445 msgid "Refusing connection: Connection limit exceeded" msgstr "Отказано в Ñоединении: Превышен предел Ñоединений" #: src/services/gridftpd/listener.cpp:471 msgid "Init failed" msgstr "Сбой инициализации" #: src/services/gridftpd/listener.cpp:474 msgid "Listen failed" msgstr "Сбой проÑлушиваниÑ" #: src/services/gridftpd/listener.cpp:488 msgid "Listen finished" msgstr "ПроÑлушивание завершено" #: src/services/gridftpd/listener.cpp:493 msgid "Stopping server" msgstr "ОÑтанавливаетÑÑ Ñервер" #: src/services/gridftpd/listener.cpp:497 msgid "Destroying handle" msgstr "ОпиÑатель уничтожаетÑÑ" #: src/services/gridftpd/listener.cpp:500 msgid "Deactivating modules" msgstr "Выгрузка модулей" #: src/services/gridftpd/listener.cpp:508 msgid "Exiting" msgstr "ЗавершаетÑÑ" #: src/services/gridftpd/misc/ldapquery.cpp:253 #, c-format msgid "%s: %s:%i" msgstr "%s: %s:%i" #: src/services/gridftpd/misc/ldapquery.cpp:390 #: src/services/gridftpd/misc/ldapquery.cpp:467 #, c-format msgid "%s %s" msgstr "%s %s" #: src/services/gridftpd/misc/ldapquery.cpp:394 #, c-format msgid " %s: %s" msgstr " %s: %s" #: src/services/gridftpd/misc/ldapquery.cpp:396 #, c-format msgid " %s:" msgstr " %s:" #: src/services/gridftpd/userspec.cpp:83 src/services/gridftpd/userspec.cpp:133 msgid "No proxy provided" msgstr "ОтÑутÑтвует доверенноÑть" #: src/services/gridftpd/userspec.cpp:85 #, c-format msgid "Proxy/credentials stored at %s" msgstr "ДоверенноÑть/параметры доÑтупа Ñохранены в %s" #: src/services/gridftpd/userspec.cpp:91 src/services/gridftpd/userspec.cpp:141 msgid "Running user has no name" msgstr "Текущий пользователь не имеет имени" #: src/services/gridftpd/userspec.cpp:94 src/services/gridftpd/userspec.cpp:144 #, c-format msgid "Mapped to running user: %s" msgstr "ПривÑзка к текущему пользователю: %s" #: src/services/gridftpd/userspec.cpp:104 #: src/services/gridftpd/userspec.cpp:154 #, c-format msgid "Mapped to local id: %i" msgstr "ПривÑзка к локальному идентификатору: %i" #: src/services/gridftpd/userspec.cpp:109 #: src/services/gridftpd/userspec.cpp:159 #, c-format msgid "No group %i for mapped user" msgstr "Группа %i Ð´Ð»Ñ Ð¿Ñ€Ð¸Ð²Ñзанного Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð¾Ñ‚ÑутÑтвует" #: src/services/gridftpd/userspec.cpp:113 #: src/services/gridftpd/userspec.cpp:163 #, c-format msgid "Mapped to local group id: %i" msgstr "ПривÑзка к локальной группе Ñ Ð¸Ð´ÐµÐ½Ñ‚Ð¸Ñ„Ð¸ÐºÐ°Ñ‚Ð¾Ñ€Ð¾Ð¼: %i" #: src/services/gridftpd/userspec.cpp:114 #: src/services/gridftpd/userspec.cpp:164 #, c-format msgid "Mapped to local group name: %s" msgstr "ПривÑзка к локальной группе Ñ Ð¸Ð¼ÐµÐ½ÐµÐ¼: %s" #: src/services/gridftpd/userspec.cpp:115 #: src/services/gridftpd/userspec.cpp:165 #, c-format msgid "Mapped user's home: %s" msgstr "Домашний каталог привÑзанного пользователÑ: %s" #: src/services/gridftpd/userspec.cpp:135 #, c-format msgid "Proxy stored at %s" msgstr "ДоверенноÑть запиÑана в %s" #: src/services/gridftpd/userspec.cpp:195 #, c-format msgid "Undefined control sequence: %%%s" msgstr "ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ ÑƒÐ¿Ñ€Ð°Ð²Ð»ÑÑŽÑ‰Ð°Ñ Ð¿Ð¾ÑледовательноÑть: %%%s" #: src/services/gridftpd/userspec.cpp:218 #, c-format msgid "Local user %s does not exist" msgstr "Локальный пользователь %s не ÑущеÑтвует" #: src/services/gridftpd/userspec.cpp:227 #, c-format msgid "Local group %s does not exist" msgstr "Ð›Ð¾ÐºÐ°Ð»ÑŒÐ½Ð°Ñ Ð³Ñ€ÑƒÐ¿Ð¿Ð° %s не ÑущеÑтвует" #: src/services/gridftpd/userspec.cpp:232 #, c-format msgid "Remapped to local user: %s" msgstr "ПерепривÑзка к локальному пользователю: %s" #: src/services/gridftpd/userspec.cpp:233 #, c-format msgid "Remapped to local id: %i" msgstr "ПерепривÑзка к локальному идентификатору: %i" #: src/services/gridftpd/userspec.cpp:234 #, c-format msgid "Remapped to local group id: %i" msgstr "ПерепривÑзка к локальной группе Ñ Ð¸Ð´ÐµÐ½Ñ‚Ð¸Ñ„Ð¸ÐºÐ°Ñ‚Ð¾Ñ€Ð¾Ð¼: %i" #: src/services/gridftpd/userspec.cpp:235 #, c-format msgid "Remapped to local group name: %s" msgstr "ПерепривÑзка к локальной группе Ñ Ð¸Ð¼ÐµÐ½ÐµÐ¼: %s" #: src/services/gridftpd/userspec.cpp:236 #, c-format msgid "Remapped user's home: %s" msgstr "Домашний каталог перепривÑзанного пользователÑ: %s" #: src/services/wrappers/python/pythonwrapper.cpp:103 #, c-format msgid "Loading %u-th Python service" msgstr "ЗагружаетÑÑ %u-Ñ Ñлужба Python" #: src/services/wrappers/python/pythonwrapper.cpp:107 #, c-format msgid "Initialized %u-th Python service" msgstr "Запущена %u-Ñ Ñлужба Python" #: src/services/wrappers/python/pythonwrapper.cpp:142 msgid "Invalid class name" msgstr "Ðеверное название клаÑÑа" #: src/services/wrappers/python/pythonwrapper.cpp:147 #, c-format msgid "class name: %s" msgstr "название клаÑÑа: %s" #: src/services/wrappers/python/pythonwrapper.cpp:148 #, c-format msgid "module name: %s" msgstr "название модулÑ: %s" #: src/services/wrappers/python/pythonwrapper.cpp:205 msgid "Cannot find ARC Config class" msgstr "Ðе удалоÑÑŒ обнаружить клаÑÑ ARC Config" #: src/services/wrappers/python/pythonwrapper.cpp:212 msgid "Config class is not an object" msgstr "КлаÑÑ Config не ÑвлÑетÑÑ Ð¾Ð±ÑŠÐµÐºÑ‚Ð¾Ð¼" #: src/services/wrappers/python/pythonwrapper.cpp:220 msgid "Cannot get dictionary of module" msgstr "Ошибка доÑтупа к Ñловарю модулÑ" #: src/services/wrappers/python/pythonwrapper.cpp:229 msgid "Cannot find service class" msgstr "Ðе удалоÑÑŒ найти клаÑÑ ÑервиÑа" #: src/services/wrappers/python/pythonwrapper.cpp:238 msgid "Cannot create config argument" msgstr "Ðе удалоÑÑŒ Ñоздать аргумент наÑтроек" #: src/services/wrappers/python/pythonwrapper.cpp:245 msgid "Cannot convert config to Python object" msgstr "Ðе удалоÑÑŒ преобразовать наÑтройки в объект Python" #: src/services/wrappers/python/pythonwrapper.cpp:268 #, c-format msgid "%s is not an object" msgstr "%s не ÑвлÑетÑÑ Ð¾Ð±ÑŠÐµÐºÑ‚Ð¾Ð¼" #: src/services/wrappers/python/pythonwrapper.cpp:274 msgid "Message class is not an object" msgstr "КлаÑÑ Message не ÑвлÑетÑÑ Ð¾Ð±ÑŠÐµÐºÑ‚Ð¾Ð¼" #: src/services/wrappers/python/pythonwrapper.cpp:280 msgid "Python Wrapper constructor succeeded" msgstr "КонÑтруктор надÑтройки Python отработал уÑпешно" #: src/services/wrappers/python/pythonwrapper.cpp:295 #, c-format msgid "Python Wrapper destructor (%d)" msgstr "ДеÑтруктор оболочки Python (%d)" #: src/services/wrappers/python/pythonwrapper.cpp:328 msgid "Python interpreter locked" msgstr "Интерпретатор Python заблокирован" #: src/services/wrappers/python/pythonwrapper.cpp:332 msgid "Python interpreter released" msgstr "Интерпретатор Python разблокирован" #: src/services/wrappers/python/pythonwrapper.cpp:403 msgid "Python wrapper process called" msgstr "Вызван процеÑÑ Python wrapper" #: src/services/wrappers/python/pythonwrapper.cpp:412 msgid "Failed to create input SOAP container" msgstr "Ðе удалоÑÑŒ Ñоздать входной контейнер SOAP" #: src/services/wrappers/python/pythonwrapper.cpp:422 msgid "Cannot create inmsg argument" msgstr "Ðе удалоÑÑŒ Ñоздать аргумент inmsg" #: src/services/wrappers/python/pythonwrapper.cpp:436 msgid "Cannot find ARC Message class" msgstr "Ðе удалоÑÑŒ обнаружить клаÑÑ ARC Message" #: src/services/wrappers/python/pythonwrapper.cpp:442 msgid "Cannot convert inmsg to Python object" msgstr "Ðе удалоÑÑŒ преобразовать inmsg в объект Python" #: src/services/wrappers/python/pythonwrapper.cpp:451 msgid "Failed to create SOAP containers" msgstr "Ðе удалоÑÑŒ Ñоздать контейеры SOAP" #: src/services/wrappers/python/pythonwrapper.cpp:457 msgid "Cannot create outmsg argument" msgstr "Ðе удалоÑÑŒ Ñоздать аргумент outmsg" #: src/services/wrappers/python/pythonwrapper.cpp:463 msgid "Cannot convert outmsg to Python object" msgstr "Ðе удалоÑÑŒ преобразовать outmsg в объект Python" #: src/tests/client/test_ClientInterface.cpp:36 #: src/tests/client/test_ClientSAML2SSO.cpp:68 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:78 #: src/tests/echo/test_clientinterface.cpp:41 #: src/tests/echo/test_clientinterface.cpp:132 #: src/tests/echo/test_clientinterface.py:12 msgid "Creating a soap client" msgstr "СоздаётÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚ SOAP" #: src/tests/client/test_ClientInterface.cpp:73 #: src/tests/client/test_ClientSAML2SSO.cpp:47 #: src/tests/client/test_ClientSAML2SSO.cpp:71 #: src/tests/count/test_client.cpp:61 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:85 #: src/tests/echo/echo_test4axis2c/test_client.cpp:56 #: src/tests/echo/test.cpp:62 src/tests/echo/test_client.cpp:72 #: src/tests/echo/test_clientinterface.cpp:67 #: src/tests/echo/test_clientinterface.cpp:107 #: src/tests/echo/test_clientinterface.cpp:136 #: src/tests/echo/test_clientinterface.py:22 msgid "Creating and sending request" msgstr "Создание и заÑылка запроÑа" #: src/tests/client/test_ClientInterface.cpp:84 #: src/tests/delegation/test_client_with_delegation_sechandler.cpp:97 #: src/tests/echo/test_clientinterface.cpp:78 #: src/tests/echo/test_clientinterface.py:30 msgid "SOAP invocation failed" msgstr "Ðе удалаÑÑŒ Ð°ÐºÑ‚Ð¸Ð²Ð¸Ð·Ð°Ñ†Ð¸Ñ SOAP" #: src/tests/client/test_ClientSAML2SSO.cpp:44 #: src/tests/echo/test_clientinterface.cpp:100 msgid "Creating a http client" msgstr "СоздаётÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚ HTTP" #: src/tests/client/test_ClientSAML2SSO.cpp:55 #: src/tests/echo/test_clientinterface.cpp:117 msgid "HTTP with SAML2SSO invocation failed" msgstr "ÐÐºÑ‚Ð¸Ð²Ð¸Ð·Ð°Ñ†Ð¸Ñ HTTP Ñ SAML2SSO не выполнена" #: src/tests/client/test_ClientSAML2SSO.cpp:59 #: src/tests/echo/test_clientinterface.cpp:121 msgid "There was no HTTP response" msgstr "Ðет ответа HTTP" #: src/tests/client/test_ClientSAML2SSO.cpp:77 #: src/tests/echo/test_clientinterface.cpp:145 msgid "SOAP with SAML2SSO invocation failed" msgstr "ÐÐºÑ‚Ð¸Ð²Ð¸Ð·Ð°Ñ†Ð¸Ñ SOAP Ñ SAML2SSO не выполнена" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:37 #: src/tests/client/test_ClientX509Delegation_GridSite.cpp:38 #: src/tests/delegation/test_delegation_client.cpp:45 #: src/tests/delegation/test_delegation_client.cpp:76 #: src/tests/echo/test_clientinterface.cpp:172 #: src/tests/echo/test_clientinterface.cpp:194 msgid "Creating a delegation soap client" msgstr "Создание клиента SOAP Ð´Ð»Ñ Ð´ÐµÐ»ÐµÐ³Ð°Ñ†Ð¸Ð¸" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:46 #: src/tests/delegation/test_delegation_client.cpp:51 #: src/tests/echo/test_clientinterface.cpp:178 msgid "Delegation to ARC delegation service failed" msgstr "Сбой Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñлужбе Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ ARC" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:50 #: src/tests/client/test_ClientX509Delegation_GridSite.cpp:49 #: src/tests/delegation/test_delegation_client.cpp:56 #: src/tests/delegation/test_delegation_client.cpp:88 #: src/tests/echo/test_clientinterface.cpp:182 #: src/tests/echo/test_clientinterface.cpp:205 #, c-format msgid "Delegation ID: %s" msgstr "ID делегированиÑ: %s" #: src/tests/client/test_ClientX509Delegation_ARC.cpp:58 #, c-format msgid "Delegated credential from delegation service: %s" msgstr "Делегированные параметры доÑтупа от Ñлужбы делегации: %s" #: src/tests/client/test_ClientX509Delegation_GridSite.cpp:45 #: src/tests/delegation/test_delegation_client.cpp:83 #: src/tests/echo/test_clientinterface.cpp:201 msgid "Delegation to gridsite delegation service failed" msgstr "Сбой Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñлужбе Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Gridsite" #: src/tests/count/count.cpp:58 msgid "Input is not SOAP" msgstr "Ввод не в формате SOAP" #: src/tests/count/count.cpp:89 src/tests/echo/echo.cpp:83 msgid "echo: Unauthorized" msgstr "echo: ДоÑтуп закрыт" #: src/tests/count/count.cpp:98 src/tests/count/count.cpp:104 #, c-format msgid "Request is not supported - %s" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ðµ поддерживаетÑÑ - %s" #: src/tests/count/test_client.cpp:50 #: src/tests/echo/echo_test4axis2c/test_client.cpp:43 #: src/tests/echo/test_client.cpp:59 msgid "Failed to load client configuration" msgstr "Ðе удалоÑÑŒ загрузить наÑтройки клиента" #: src/tests/count/test_client.cpp:54 #: src/tests/echo/echo_test4axis2c/test_client.cpp:47 #: src/tests/echo/test.cpp:58 src/tests/echo/test_client.cpp:63 msgid "Client side MCCs are loaded" msgstr "Подгружены клиентÑкие компоненты цепи Ñообщений" #: src/tests/count/test_client.cpp:57 #: src/tests/echo/echo_test4axis2c/test_client.cpp:50 #: src/tests/echo/test_client.cpp:66 msgid "Client chain does not have entry point" msgstr "ОтÑутÑтует точка входа в клиентÑкую цепь" #: src/tests/count/test_client.cpp:84 #: src/tests/echo/echo_test4axis2c/test_client.cpp:74 #: src/tests/echo/test.cpp:74 src/tests/echo/test_client.cpp:90 msgid "Request failed" msgstr "Ошибка при выполнении запроÑа" #: src/tests/count/test_client.cpp:90 #: src/tests/echo/echo_test4axis2c/test_client.cpp:80 #: src/tests/echo/test.cpp:79 src/tests/echo/test_client.cpp:96 msgid "There is no response" msgstr "Ðет ответа" #: src/tests/count/test_client.cpp:97 #: src/tests/echo/echo_test4axis2c/test_client.cpp:87 #: src/tests/echo/test_client.cpp:103 msgid "Response is not SOAP" msgstr "Ответ не в формате SOAP" #: src/tests/count/test_service.cpp:22 src/tests/echo/test.cpp:23 #: src/tests/echo/test_service.cpp:22 msgid "Creating service side chain" msgstr "Создание цепи на Ñтороне ÑервиÑа" #: src/tests/count/test_service.cpp:25 src/tests/echo/test.cpp:26 #: src/tests/echo/test_service.cpp:25 msgid "Failed to load service configuration" msgstr "Ðе удалоÑÑŒ загрузить наÑтройки ÑервиÑа" #: src/tests/count/test_service.cpp:30 src/tests/echo/test_service.cpp:30 msgid "Service is waiting for requests" msgstr "Ð¡ÐµÑ€Ð²Ð¸Ñ Ð² ожидании запроÑов" #: src/tests/echo/test.cpp:32 msgid "Creating client interface" msgstr "СоздаётÑÑ Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ ÐºÐ»Ð¸ÐµÐ½Ñ‚Ð°" #: src/tests/echo/test.cpp:82 msgid "Request succeed!!!" msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ ÑƒÐ´Ð°Ð»ÑÑ!!!" #~ msgid "Multi-request job description not allowed in GRIDMANAGER dialect" #~ msgstr "" #~ "МножеÑтвенное опиÑание заданий не допуÑкаетÑÑ Ð² диалекте GRIDMANAGER" #~ msgid "%s: Failed to run plugin" #~ msgstr "%s: Сбой при запуÑке подключаемого модулÑ" #~ msgid "%s: Plugin failed" #~ msgstr "%s: Сбой подключаемого модулÑ" #~ msgid "Empty registration collector" #~ msgstr "ПуÑтой Ñборщик региÑтраций" #~ msgid "Passing service's information from collector to registrator" #~ msgstr "Идёт передача информации о Ñлужбе от Ñборщика к региÑтратору" #~ msgid "" #~ "Registered static information: \n" #~ " doc: %s" #~ msgstr "" #~ "ЗарегиÑтрирована ÑтатичеÑÐºÐ°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ: \n" #~ " документ: %s" #~ msgid "" #~ "Information registered without static attributes: \n" #~ " doc: %s" #~ msgstr "" #~ "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð·Ð°Ñ€ÐµÐ³Ð¸Ñтрирована без ÑтатичеÑких аттрибутов: \n" #~ " документ: %s" #~ msgid "Failed to create XMLNode container" #~ msgstr "Ðе удалоÑÑŒ Ñоздать контейнер XMLNode" #~ msgid "Cannot find ARC XMLNode class" #~ msgstr "КлаÑÑ ARC XMLNode не найден" #~ msgid "Cannot create doc argument" #~ msgstr "Ðе удалоÑÑŒ Ñоздать аргумент документации" #~ msgid "Cannot convert doc to Python object" #~ msgstr "Ðе удалоÑÑŒ преобразовать doc в объект Python" #~ msgid "Can't obtain configuration" #~ msgstr "Ðе удалоÑÑŒ получить наÑтройки" #~ msgid "require the specified endpoint type for job submission" #~ msgstr "потребовать указанный тип точки входа Ð´Ð»Ñ Ð·Ð°Ñылки задачи" #~ msgid "Failed to cancel job: %s" #~ msgstr "Ошибка отмены задачи: %s" #~ msgid "Failed retrieving job IDs: Unsupported url (%s) given" #~ msgstr "Сбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ñрлыков задач: задан неподдерживаемый URL (%s)" #~ msgid "Failed retrieving job IDs" #~ msgstr "Сбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ñрлыков задач" #~ msgid "" #~ "Error encoutered during job ID retrieval. All job IDs might not have been " #~ "retrieved" #~ msgstr "" #~ "Сбой в процеÑÑе Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ñрлыков задач: возможно, не вÑе Ñрлыки " #~ "извлечены" #~ msgid "Service access is not allowed for this user" #~ msgstr "ДоÑтуп к Ñлужбе Ð´Ð»Ñ Ñтого Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð·Ð°ÐºÑ€Ñ‹Ñ‚" #~ msgid "ServiceURL missing" #~ msgstr "ОтÑутÑтвует ServiceURL" #~ msgid "" #~ "Protocol is %s. It is recommended to use secure connection with https." #~ msgstr "Протокол %s. РекомендуетÑÑ Ð±ÐµÐ·Ð¾Ð¿Ð°Ñное Ñоединение по https." #~ msgid "Ignoring incomplete log file \"%s\"" #~ msgstr "ИгнорируетÑÑ Ð½ÐµÐ¿Ð¾Ð»Ð½Ñ‹Ð¹ журнальный файл \"%s\"" #~ msgid "Logging UR set of %d URs." #~ msgstr "ЗапиÑываетÑÑ Ð½Ð°Ð±Ð¾Ñ€ UR из %d запиÑей UR." #~ msgid "UR set dump: %s" #~ msgstr "Выведен набор запиÑей UR: %s" #~ msgid "Backup file (%s) created." #~ msgstr "Создан резервный файл (%s)." #~ msgid "APEL message file (%s) created." #~ msgstr "Создан файл Ñообщений APEL (%s)." #~ msgid "Running SSM client using: %s" #~ msgstr "Запущен клиент SSM иÑпользующий: %s" #~ msgid "SSM client exit code: %d" #~ msgstr "Код выхода клиента SSM: %d" #~ msgid "Aggregation record (%s) not exist, initialize it..." #~ msgstr "" #~ "ÐÐ³Ñ€ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ (%s) не ÑущеÑтвует, производитÑÑ Ð¸Ð½Ð¸Ñ†Ð¸Ð°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ..." #~ msgid "Aggregation record (%s) initialization successful." #~ msgstr "ÐÐ³Ñ€ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ (%s) уÑпешно инициализирована." #~ msgid "" #~ "Some error happens during the aggregation record (%s) initialization." #~ msgstr "ÐеизвеÑÑ‚Ð½Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° при инициализации агрегированной запиÑи (%s)." #~ msgid "Aggregation record (%s) read from file successful." #~ msgstr "ÐÐ³Ñ€ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ (%s) уÑпешно прочитана из файла." #~ msgid "Aggregation record (%s) stored successful." #~ msgstr "ÐÐ³Ñ€ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ (%s) уÑпешно Ñохранена." #~ msgid "Some error happens during the aggregation record (%s) storing." #~ msgstr "ÐеизвеÑÑ‚Ð½Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° при Ñохранении агрегированной запиÑи (%s)." #~ msgid "APEL aggregation message file (%s) created." #~ msgstr "Создан файл агрегированного ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ APEL (%s)." #~ msgid "SSM client return value: %d" #~ msgstr "Код возврата клиента SSM: %d" #~ msgid "year: %s" #~ msgstr "год: %s" #~ msgid "month: %s" #~ msgstr "меÑÑц: %s" #~ msgid "queue: %s" #~ msgstr "очередь: %s" #~ msgid "query: %s" #~ msgstr "запроÑ: %s" #~ msgid "list size: %d" #~ msgstr "длина ÑпиÑка: %d" #~ msgid "XML: %s" #~ msgstr "XML: %s" #~ msgid "UPDATE Aggregation Record called." #~ msgstr "Вызов метода UPDATE агрегированной запиÑи." #~ msgid "Does not sending empty aggregation/synch message." #~ msgstr "" #~ "ОтÑылка пуÑтого агрегированного/Ñинхронизационного ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ Ð½Ðµ " #~ "производитÑÑ." #~ msgid "synch message: %s" #~ msgstr "Ñинхронизационное Ñообщение: %s" #~ msgid "Protocol is %s, should be https" #~ msgstr "Указан протокол %s, а должен быть https" #~ msgid "Wrong loglevel (%s) config value given!" #~ msgstr "Задано недопуÑтимое значение наÑтройки loglevel (%s)!" #~ msgid "Wrong urdelivery_keepfailed (%s) config value given!" #~ msgstr "Задано недопуÑтимое значение наÑтройки urdelivery_keepfailed (%s)!" #~ msgid "Wrong urdelivery_frequency (%s) config value given!" #~ msgstr "" #~ "Задано недопуÑтимое значение urdelivery_frequency (%s) в наÑтройках!" #~ msgid "Name part is missing by SGAS section!" #~ msgstr "ОтÑутÑтвует подраздел Ñ Ð½Ð°Ð·Ð²Ð°Ð½Ð¸ÐµÐ¼ в разделе SGAS!" #~ msgid "Targeturl config value is missing by SGAS!" #~ msgstr "Значение наÑтройки Targeturl отÑутÑтвует Ð´Ð»Ñ SGAS!" #~ msgid "Block %s is not marked for legacy fallback processing. Skipping." #~ msgstr "" #~ "Блок %s не помечен Ð´Ð»Ñ Ð°Ð»ÑŒÑ‚ÐµÑ€Ð½Ð°Ñ‚Ð¸Ð²Ð½Ð¾Ð¹ обработки уÑтаревших запиÑей. " #~ "ПропуÑкаетÑÑ." #~ msgid "Wrong urbatchsize (%s) config value given by SGAS!" #~ msgstr "Задано недопуÑтимое значение наÑтройки urbatchsize (%s) в SGAS!" #~ msgid "Name part is missing by APEL section!" #~ msgstr "ОтÑутÑтвует подраздел Ñ Ð½Ð°Ð·Ð²Ð°Ð½Ð¸ÐµÐ¼ в разделе APEL!" #~ msgid "Targeturl config value is missing by APEL!" #~ msgstr "Значение наÑтройки Targeturl отÑутÑтвует Ð´Ð»Ñ APEL!" #~ msgid "Wrong benchmark_value (%s) config value given by APEL!" #~ msgstr "Задано недопуÑтимое значение наÑтройки benchmark_value (%s) в APEL!" #~ msgid "Wrong urbatchsize (%s) config value given by APEL!" #~ msgstr "Задано недопуÑтимое значение наÑтройки urbatchsize (%s) в APEL!" #~ msgid "Sent jobIDs: (nr. of job(s) %d)" #~ msgstr "Отправленные jobID: (вÑего %d задач(и))" #~ msgid "Unable to create adapter for the specific reporting destination type" #~ msgstr "Ðевозможно Ñоздать адаптер Ð´Ð»Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ð¾Ð³Ð¾ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð¾Ñ‚Ñ‡Ñ‘Ñ‚Ð½Ð¾Ñти" #~ msgid "Insert filter element: <%s,%s>" #~ msgstr "Ð’Ñтавка Ñлемента фильтра: <%s,%s>" #~ msgid "Not set filter for this URL (%s)." #~ msgstr "Ð”Ð»Ñ Ñтого URL (%s) фильтр не назначен." #~ msgid "Current job's VO name: %s" #~ msgstr "Ð˜Ð¼Ñ Ð’Ðž текущей задачи: %s" #~ msgid "VO filter for host: %s" #~ msgstr "Фильтр ВО Ð´Ð»Ñ ÑƒÐ·Ð»Ð°: %s" #~ msgid "Read archive file %s" #~ msgstr "ЧитаетÑÑ Ð°Ñ€Ñ…Ð¸Ð²Ð½Ñ‹Ð¹ файл %s" #~ msgid "" #~ "Could not read archive file %s for job log file %s (%s), generating new " #~ "Usage Record" #~ msgstr "" #~ "Ðевозможно прочеÑть архивный файл %s Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° журнала задач %s (%s), " #~ "ÑоздаётÑÑ Ð½Ð¾Ð²Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ Usage Record" #~ msgid "" #~ "Missing required Usage Record element \"RecordIdentity\", in job log file " #~ "%s" #~ msgstr "" #~ "ОтÑутÑтвует обÑзательный Ñлемент Usage Record \"RecordIdentity\", в файле " #~ "журнала задач %s" #~ msgid "VO (%s) not set for this (%s) SGAS server by VO filter." #~ msgstr "Фильтр ВО (%s) не наÑтроен Ð´Ð»Ñ Ñтого Ñервера SGAS (%s)." #~ msgid "[VO filter] Job log will be not send. %s." #~ msgstr "[VO filter] запиÑÑŒ о задаче не будет отправлена. %s." #~ msgid "Missing required element \"Status\" in job log file %s" #~ msgstr "" #~ "ОтÑутÑтвует обÑзательный Ñлемент \"Status\" в файле журнала задач %s" #~ msgid "Failed to create archive directory %s: %s" #~ msgstr "Ðе удалоÑÑŒ Ñоздать архивный каталог %s: %s" #~ msgid "Archiving Usage Record to file %s" #~ msgstr "Ðрхивирование запиÑи Usage Record в файл %s" #~ msgid "Failed to write file %s: %s" #~ msgstr "Сбой при запиÑи файла %s: %s" #~ msgid "Missing required element \"CpuDuration\" in job log file %s" #~ msgstr "" #~ "ОтÑутÑтвует обÑзательный Ñлемент \"CpuDuration\" в файле журнала задач %s" #~ msgid "Set non standard benchmark type: %s" #~ msgstr "Задан неÑтандартный тип Ñталонного теÑта: %s" #~ msgid "Ignored incoming benchmark value: %s, Use float value!" #~ msgstr "" #~ "ИгнорируетÑÑ Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð½Ð¾Ðµ значение Ñталонного теÑта: %s, иÑпользуйте " #~ "значение Ñ Ð¿Ð»Ð°Ð²Ð°ÑŽÑ‰ÐµÐ¹ запÑтой!" #~ msgid "Failed to delete file %s:%s" #~ msgstr "Ðе удалоÑÑŒ удалить файл %s: %s" #~ msgid "UsageRecords registration response: %s" #~ msgstr "Отклик региÑтрации запиÑи UsageRecords: %s" #~ msgid "Initialised, archived job log dir: %s" #~ msgstr "" #~ "Инициализирован каталог архивного Ñ…Ñ€Ð°Ð½ÐµÐ½Ð¸Ñ ÑƒÑ‡Ñ‘Ñ‚Ð½Ñ‹Ñ… запиÑей о задачах: %s" #~ msgid "Incoming time range: %s" #~ msgstr "Промежуток времени заÑылки: %s" #~ msgid "Requested time range: %d.%d.%d. 0:00 - %d.%d.%d. %d:%d " #~ msgstr "Запрошенный промежуток времени: %d.%d.%d. 0:00 - %d.%d.%d. %d:%d " #~ msgid "Interactive mode." #~ msgstr "Интерактивный режим." #~ msgid "Could not open log directory \"%s\": %s" #~ msgstr "Ðевозможно открыть каталог Ñ Ð¶ÑƒÑ€Ð½Ð°Ð»Ð°Ð¼Ð¸ \"%s\": %s" #~ msgid "Error reading log directory \"%s\": %s" #~ msgstr "Ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð° журналов \"%s\": %s" #~ msgid "Finished, job log dir: %s" #~ msgstr "Завершено, каталог журнала задач: %s" #~ msgid "Initialised, job log dir: %s" #~ msgstr "Запущено, каталог журнала задач: %s" #~ msgid "Expiration time: %d seconds" #~ msgstr "Ð’Ñ€ÐµÐ¼Ñ Ð¸ÑÑ‚ÐµÑ‡ÐµÐ½Ð¸Ñ Ð´ÐµÐ¹ÑтвительноÑти: %d Ñекунд" #~ msgid "Could not open output directory \"%s\": %s" #~ msgstr "Ðевозможно открыть выходной каталог \"%s\": %s" #~ msgid "Creating the output directory \"%s\"" #~ msgstr "СоздаетÑÑ Ð²Ñ‹ÑŒÐ¾Ð´Ð½Ð¾Ð¹ каталог %s" #~ msgid "Failed to create output directory \"%s\": %s" #~ msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð²Ñ‹Ñ…Ð¾Ð´Ð½Ð¾Ð³Ð¾ каталога %s: %s" #~ msgid "Removing outdated job log file %s" #~ msgstr "УдалÑетÑÑ ÑƒÑтаревший файл журнала задач %s" #~ msgid "Missing option argument" #~ msgstr "ОтÑутÑтвует аргумент опции" #~ msgid "Unrecognized option" #~ msgstr "ÐÐµÐ¾Ð¿Ð¾Ð·Ð½Ð°Ð½Ð½Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ" #~ msgid "Add URL value before a topic. (for example: -u [...] -t [...])" #~ msgstr "Добавьте значение URL перед темой. (например: -u [...] -t [...])" #~ msgid "Force resend all aggregation records." #~ msgstr "Принудительно отправить заново вÑе агрегированные запиÑи." #~ msgid "Sync message(s) will be send..." #~ msgstr "Ð¡Ð¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ Ñинхронизации будут отправлены..." #~ msgid "Options processing error" #~ msgstr "Ошибка при обработке опций" #~ msgid "Failed processing configuration file %s" #~ msgstr "Ðе удалоÑÑŒ обработать файл наÑтроек %s" #~ msgid "Topic missing for a (%s) host." #~ msgstr "ОтÑутÑтвует тема Ð´Ð»Ñ Ñервера (%s)." #~ msgid "Aggregation record(s) sending to %s" #~ msgstr "ÐÐ³Ñ€ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ отправлÑетÑÑ Ð½Ð° %s" #~ msgid "resend opt: %s" #~ msgstr "Ð¾Ð¿Ñ†Ð¸Ñ Ð¿Ð¾Ð²Ñ‚Ð¾Ñ€Ð½Ð¾Ð¹ отправки: %s" #~ msgid " Use arclean to remove retrieved jobs from job list" #~ msgstr "" #~ " ИÑпользуйте arclean Ð´Ð»Ñ ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð½Ñ‹Ñ… задач из ÑпиÑка" #~ msgid "No execuable path specified in GRIDMANAGER dialect" #~ msgstr "Ðе задан путь к иÑполнÑемому файлу в диалекте GRIDMANAGER" #~ msgid "Executable path not specified ('executable' attribute)" #~ msgstr "Ðе задан путь к иÑполнÑемому файлу (атрибут 'executable')" #~ msgid "Missing executable" #~ msgstr "ОтÑутÑтвует Ñлемент Executable" #~ msgid "Error evaulating profile" #~ msgstr "Ошибка проверки профилÑ" #~ msgid "Adding resoure-id value: %s" #~ msgstr "ДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ resoure-id: %s" #~ msgid "" #~ "Can not access CA certificates directory: %s. The certificates will not " #~ "be verified." #~ msgstr "" #~ "Ðе удалоÑÑŒ открыть каталог Ñертификатов CA: %s. Сертификаты не будут " #~ "проверены." #~ msgid "" #~ "Unable to locate the \"%s\" plugin. Please refer to installation " #~ "instructions and check if package providing support for %s plugin is " #~ "installed" #~ msgstr "" #~ "Ðе удалоÑÑŒ обнаружить подключаемый модуль \"%s\". ПожалуйÑта, " #~ "проконÑультируйтеÑÑŒ Ñ Ð¸Ð½Ñтрукцией по уÑтановке и проверьте, уÑтановлен ли " #~ "пакет, Ñодержащий модуль \"%s\"." #~ msgid "" #~ "The VOMS server with the information:\n" #~ "\t%s\"\n" #~ "can not be reached, please make sure it is available" #~ msgstr "" #~ "Ðевозможно ÑвÑзатьÑÑ Ñ Ñервером VOMS Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸ÐµÐ¹:\n" #~ "\t%s\"\n" #~ "пожалуйÑта, проверьте, доÑтупен ли Ñтот Ñервер" #~ msgid "Please choose the NSS database you would use (1-%d): " #~ msgstr "ПожалуйÑта, выберите базу данных NSS Ð´Ð»Ñ Ð¸ÑÐ¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ð½Ð¸Ñ (1-%d): " #~ msgid "Failed to authenticate to token %s." #~ msgstr "Ðе удалоÑÑŒ аутентифицироватьÑÑ Ðº маркёру %s." #~ msgid "Starting hepler process: %s" #~ msgstr "ЗапуÑкаетÑÑ Ð¿Ñ€Ð¾Ñ†ÐµÑÑ Ð°ÑÑиÑтента: %s" #~ msgid "Running command %s" #~ msgstr "ВыполнÑетÑÑ ÐºÐ¾Ð¼Ð°Ð½Ð´Ð° %s" #~ msgid "Bad name for executable: " #~ msgstr "ÐедопуÑтимое Ð¸Ð¼Ñ Ð´Ð»Ñ Ð¸ÑполнÑемого файла: " #~ msgid "Error getting info from statvfs for the path %s:" #~ msgstr "Ошибка Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ от statvfs Ð´Ð»Ñ Ð¿ÑƒÑ‚Ð¸ %s:" #~ msgid "Closing channel (retrieve) due to local read error :%s" #~ msgstr "" #~ "Прерывание канала (получение) в ÑвÑзи Ñ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð¾Ð¹ ошибкой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ :%s" #~ msgid "SOAP invokation failed" #~ msgstr "Ðе удалоÑÑŒ инициализировать SOAP" #~ msgid "Can not get the delegation credential: %s from delegation service:%s" #~ msgstr "" #~ "Ðе удалоÑÑŒ получить делегированные параметры доÑтупа %s от Ñлужбы " #~ "делегированиÑ:%s" #~ msgid "Application Options:" #~ msgstr "Параметры приложениÑ:" #~ msgid "Failed to delete private key that attaches to certificate: %s" #~ msgstr "Сбой ÑƒÐ½Ð¸Ñ‡Ñ‚Ð¾Ð¶ÐµÐ½Ð¸Ñ Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ð¾Ð³Ð¾ ключа, прикреплÑемого к Ñертификату: %s" #~ msgid "Missing file name in [arex/jura/archiving] logfile" #~ msgstr "ОтÑутÑтвует Ð¸Ð¼Ñ Ñ„Ð°Ð¹Ð»Ð° в журнальном файле [arex/jura/archiving] " #~ msgid "Wrong number in manage_frequency: %s" #~ msgstr "ÐедопуÑтимое значение в smanage_frequency: %s" #~ msgid ": Accounting archive management tool is not specified" #~ msgstr ": Ðе указано ÑредÑтво ÑƒÐ¿Ñ€Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ð°Ñ€Ñ…Ð¸Ð²Ð¾Ð¼ учётных запиÑей" #~ msgid ": Failure creating slot for accounting archive manager child process" #~ msgstr "" #~ ": Сбой подготовки дочернего процеÑÑа ÑредÑтва ÑƒÐ¿Ñ€Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ð°Ñ€Ñ…Ð¸Ð²Ð¾Ð¼ учётных " #~ "запиÑей" #~ msgid ": Failure starting accounting archive manager child process" #~ msgstr "" #~ ": Сбой запуÑка дочернего процеÑÑа ÑредÑтва ÑƒÐ¿Ñ€Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ð°Ñ€Ñ…Ð¸Ð²Ð¾Ð¼ учётных " #~ "запиÑей" #~ msgid " [ egee:jdl ] " #~ msgstr " [ egee:jdl ] " #~ msgid " [ nordugrid:jsdl ] " #~ msgstr " [ nordugrid:jsdl ] " #~ msgid "" #~ "Cannot use multiple session directories and remotegmdirs at the same time" #~ msgstr "" #~ "ÐедопуÑтимо одновременное иÑпользование неÑкольких каталогов ÑеанÑов и " #~ "remotegmdirs" #~ msgid "No non-draining control or session directories available" #~ msgstr "" #~ "Ðет контрольных каталогов или каталогов ÑеÑÑий не в ÑоÑтоÑнии разгрузки" #~ msgid "" #~ "Supported constraints are:\n" #~ " validityStart=time (e.g. 2008-05-29T10:20:30Z; if not specified, start " #~ "from now)\n" #~ " validityEnd=time\n" #~ " validityPeriod=time (e.g. 43200 or 12h or 12H; if both validityPeriod " #~ "and validityEnd\n" #~ " not specified, the default is 12 hours for local proxy, and 168 hours " #~ "for delegated\n" #~ " proxy on myproxy server)\n" #~ " vomsACvalidityPeriod=time (e.g. 43200 or 12h or 12H; if not specified, " #~ "the default\n" #~ " is the minimum value of 12 hours and validityPeriod)\n" #~ " myproxyvalidityPeriod=time (lifetime of proxies delegated by myproxy " #~ "server,\n" #~ " e.g. 43200 or 12h or 12H; if not specified, the default is the minimum " #~ "value of\n" #~ " 12 hours and validityPeriod (which is lifetime of the delegated proxy " #~ "on myproxy server))\n" #~ " proxyPolicy=policy content\n" #~ " proxyPolicyFile=policy file" #~ msgstr "" #~ "Поддерживаемые ограничениÑ:\n" #~ " validityStart=Ð²Ñ€ÐµÐ¼Ñ (например, 2008-05-29T10:20:30Z; еÑли не указано, " #~ "то начинаетÑÑ Ð½ÐµÐ¼ÐµÐ´Ð»ÐµÐ½Ð½Ð¾)\n" #~ " validityEnd=времÑ\n" #~ " validityPeriod=Ð²Ñ€ÐµÐ¼Ñ (например, 43200, или 12h, или 12H; еÑли не " #~ "указаны ни validityPeriod,\n" #~ " ни validityEnd, то Ñрок дейÑÑ‚Ð²Ð¸Ñ Ð¿Ð¾ умолчанию ÑоÑтавлÑет 12 чаÑов Ð´Ð»Ñ " #~ "локальной доверенноÑти,\n" #~ " и 168 чаÑов Ð´Ð»Ñ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð½Ð¾Ð¹ доверенноÑти на Ñервере MyProxy)\n" #~ " vomsACvalidityPeriod=Ð²Ñ€ÐµÐ¼Ñ (например, 43200, или 12h, или 12H; еÑли не " #~ "указано, то иÑпользуетÑÑ\n" #~ " наименьшее между 12 чаÑами и значением validityPeriod)\n" #~ " myproxyvalidityPeriod=Ð²Ñ€ÐµÐ¼Ñ (Ñрок годноÑти доверенноÑти, делегированной " #~ "через Ñервер MyProxy\n" #~ " например, 43200, или 12h, или 12H; еÑли не указано, то иÑпользуетÑÑ " #~ "наименьшее между 12 чаÑами\n" #~ " и значением validityPeriod - Ñроком годноÑти доверенноÑти, " #~ "делегированной через Ñервер MyProxy)\n" #~ " proxyPolicy=Ñодержимое политики\n" #~ " proxyPolicyFile=файл политики" #~ msgid "" #~ "print all information about this proxy. \n" #~ " In order to show the Identity (DN without CN as suffix for " #~ "proxy) \n" #~ " of the certificate, the 'trusted certdir' is needed." #~ msgstr "" #~ "вывеÑти вÑÑŽ информацию о данной доверенноÑти. \n" #~ " Ð”Ð»Ñ Ð²Ñ‹Ð²Ð¾Ð´Ð° перÑональной информации (DN без CN как ÑÑƒÑ„Ñ„Ð¸ÐºÑ " #~ "доверенноÑти) \n" #~ " из Ñертификата, необходим 'trusted certdir'." #~ msgid "username to MyProxy server" #~ msgstr "Ð˜Ð¼Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ñервера MyProxy" #~ msgid "" #~ "command to MyProxy server. The command can be PUT or GET.\n" #~ " PUT/put/Put -- put a delegated credential to the MyProxy " #~ "server; \n" #~ " GET/get/Get -- get a delegated credential from the MyProxy " #~ "server, \n" #~ " credential (certificate and key) is not needed in this " #~ "case. \n" #~ " MyProxy functionality can be used together with VOMS\n" #~ " functionality.\n" #~ msgstr "" #~ "инÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ Ñерверу MyProxy. Возможны две инÑтрукции: PUT и GET:\n" #~ " PUT/put/Put -- Ñохранить делегированный Ñертификат на " #~ "Ñервере MyProxy;\n" #~ " GET/get/Get -- получить делегированный Ñертификат Ñ Ñервера " #~ "MyProxy,\n" #~ " в Ñтом Ñлучае не требуютÑÑ Ð»Ð¸Ñ‡Ð½Ñ‹Ðµ " #~ "Ñертификаты и ключи.\n" #~ " ИнÑтрукции MyProxy и VOMS могут иÑпользоватьÑÑ " #~ "одновременно.\n" #~ msgid "use NSS credential database in the Firefox profile" #~ msgstr "иÑпользовать базу данных параметров доÑтупа NSS из Ð¿Ñ€Ð¾Ñ„Ð¸Ð»Ñ Firefox" #~ msgid "Failed to add extension: %s" #~ msgstr "Сбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñ€Ð°ÑширениÑ: %s" #~ msgid "" #~ "$X509_VOMS_FILE, and $X509_VOMSES are not set;\n" #~ "User has not specify the location for vomses information;\n" #~ "There is also not vomses location information in user's configuration " #~ "file;\n" #~ "Cannot find vomses in default locations: ~/.arc/vomses, ~/.voms/vomses, " #~ "$ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/grid-security/vomses, $PWD/" #~ "vomses, /etc/vomses, /etc/grid-security/vomses, and the location at the " #~ "corresponding sub-directory" #~ msgstr "" #~ "$X509_VOMS_FILE и $X509_VOMSES не наÑтроены;\n" #~ "Пользователь не указал раÑположение файла vomses;\n" #~ "РаÑположение файла vomses не найдено в файле наÑтроек пользователÑ;\n" #~ "Файл vomses не обнаружен в ~/.arc/vomses, ~/.voms/vomses, $ARC_LOCATION/" #~ "etc/vomses, $ARC_LOCATION/etc/grid-security/vomses, $PWD/vomses, /etc/" #~ "vomses, /etc/grid-security/vomses, а также в ÑоответÑтвующих подкаталогах" #~ msgid "No stream response" #~ msgstr "Ðе получен ответ Ñ Ñервера" #~ msgid "Returned msg from myproxy server: %s %d" #~ msgstr "Сервер myproxy возвратил Ñледующее Ñообщение: %s %d" #~ msgid "There are %d certificates in the returned msg" #~ msgstr "Ответное Ñообщение Ñодержит %d Ñертификатов" #~ msgid "Delegate proxy failed" #~ msgstr "Ðе удалоÑÑŒ делегирование доверенноÑти" #~ msgid "Returned msg from voms server: %s " #~ msgstr "Сообщение Ñ Ñервера VOMS: %s " #~ msgid "service message" #~ msgstr "Ñообщение Ñлужбы" #~ msgid "The arcecho command is a client for the ARC echo service." #~ msgstr "Команда arcecho ÑвлÑетÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚Ñким приложением Ñлужбы ARC echo." #~ msgid "" #~ "The service argument is a URL to an ARC echo service.\n" #~ "The message argument is the message the service should return." #~ msgstr "" #~ "Ðргументом Ñлужбы должен быть URL Ñхо-Ñервера ARC.\n" #~ "Ðргументом ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ Ð´Ð¾Ð»Ð¶Ð½Ð¾ быть Ñообщение, которое Ñтот Ñервер должен " #~ "возвратить." #~ msgid "service_url" #~ msgstr "service_url" #~ msgid "path to config file" #~ msgstr "путь к файлу наÑтроек" #~ msgid "SOAP Request failed: No response" #~ msgstr "Сбой запроÑа SOAP: Ðет ответа" #~ msgid "SOAP Request failed: Error" #~ msgstr "Сбой запроÑа SOAP: Ошибка" #~ msgid "No in SOAP response" #~ msgstr "Отзыв SOAP не Ñодержит " #~ msgid "No in SAML response" #~ msgstr "Ð’ отклике SAML отÑутÑтвует " #~ msgid "URL [query]" #~ msgstr "URL [запроÑ]" #~ msgid "" #~ "The arcwsrf command is used for obtaining the WS-ResourceProperties of\n" #~ "services." #~ msgstr "" #~ "Команда arcwsrf иÑпользуетÑÑ Ð´Ð»Ñ Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ð¹ WS-" #~ "ResourceProperties\n" #~ "различных Ñлужб." #~ msgid "Request for specific Resource Property" #~ msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð·Ð°Ð´Ð°Ð½Ð½Ð¾Ð³Ð¾ ÑвойÑтва реÑурÑа" #~ msgid "[-]name" #~ msgstr "[-]адреÑ" #~ msgid "Missing URL" #~ msgstr "ОтÑутÑтвует URL" #~ msgid "Too many parameters" #~ msgstr "Слишком много параметров" #~ msgid "Query is not a valid XML" #~ msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ðµ ÑвлÑетÑÑ ÐºÐ¾Ñ€Ñ€ÐµÐºÑ‚Ð½Ñ‹Ð¼ XML" #~ msgid "Failed to create WSRP request" #~ msgstr "Ðе удалоÑÑŒ Ñоздать корректный Ð·Ð°Ð¿Ñ€Ð¾Ñ WSRP" #~ msgid "Specified URL is not valid" #~ msgstr "Указанный Ð°Ð´Ñ€ÐµÑ Ð½ÐµÐ´ÐµÐ¹Ñтвителен" #~ msgid "Failed to send request" #~ msgstr "Ðе удалоÑÑŒ отправить запроÑ" #~ msgid "Failed to obtain SOAP response" #~ msgstr "Ðе удалоÑÑŒ получить отзыв SOAP" #~ msgid "SOAP fault received" #~ msgstr "Получена ошибка SOAP" #~ msgid "Creating an A-REX client" #~ msgstr "СоздаётÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚ A-REX" #~ msgid "Unable to create SOAP client used by AREXClient." #~ msgstr "Ðе удалоÑÑŒ Ñоздать клиент SOAP иÑпользующийÑÑ AREXClient." #~ msgid "Failed locating credentials." #~ msgstr "Сбой Ð¾Ð±Ð½Ð°Ñ€ÑƒÐ¶ÐµÐ½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупа." #~ msgid "Failed initiate client connection." #~ msgstr "Сбой инициализации ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ ÐºÐ»Ð¸ÐµÐ½Ñ‚Ð¾Ð¼." #~ msgid "Client connection has no entry point." #~ msgstr "ОтÑутÑтвует точка входа в клиентÑкую цепь." #~ msgid "Re-creating an A-REX client" #~ msgstr "ВоÑÑоздаётÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚ A-REX" #~ msgid "AREXClient was not created properly." #~ msgstr "AREXClient не был Ñоздан надлежащим образом." #~ msgid "%s request to %s failed. No expected response." #~ msgstr "Сбой запроÑа %s к %s. ОтÑутÑтвует ожидаемый отклик." #~ msgid "Creating and sending submit request to %s" #~ msgstr "СоздаётÑÑ Ð¸ отправлÑетÑÑ Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° заÑылку к %s" #~ msgid "Unable to retrieve status of job (%s)" #~ msgstr "Ошибка при получении информации о ÑоÑтоÑнии задачи (%s)" #~ msgid "Creating and sending ISIS information query request to %s" #~ msgstr "Создание и отправка запроÑа об информации ISIS на %s" #~ msgid "Service %s of type %s ignored" #~ msgstr "ИгнорируетÑÑ ÑÐµÑ€Ð²Ð¸Ñ %s типа %s" #~ msgid "No execution services registered in the index service" #~ msgstr "Ðи одна Ñлужба иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð½Ðµ зарегиÑтрирована в Ñлужбе региÑтрации" #~ msgid "Creating and sending terminate request to %s" #~ msgstr "Создание и отправка запроÑа о прерывании задачи на %s" #~ msgid "Job termination failed" #~ msgstr "Ошибка Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #~ msgid "Creating and sending clean request to %s" #~ msgstr "" #~ "Создание и отправка запроÑа об удалении результатов работы задачи на %s" #~ msgid "Creating and sending job description retrieval request to %s" #~ msgstr "Создание и отправка запроÑа на получение опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ на %s" #~ msgid "Creating and sending job migrate request to %s" #~ msgstr "Создание и отправка запроÑа о миграции задачи на %s" #~ msgid "Renewal of ARC1 jobs is not supported" #~ msgstr "Возобновление задач ARC1 не поддерживаетÑÑ" #~ msgid "Failed retrieving job status information" #~ msgstr "Ðе удалоÑÑŒ извлечь информацию о ÑоÑтоÑнии задачи" #~ msgid "Cleaning of BES jobs is not supported" #~ msgstr "ОчиÑтка результатов задач BES не поддерживаетÑÑ" #~ msgid "Renewal of BES jobs is not supported" #~ msgstr "Возобновление задач BES не поддерживаетÑÑ" #~ msgid "Resuming BES jobs is not supported" #~ msgstr "ПерезапуÑк задач BES не поддерживаетÑÑ" #~ msgid "Collecting Job (A-REX jobs) information." #~ msgstr "СобираетÑÑ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ задачах (задачи на A-REX)" #~ msgid "No job identifier returned by BES service" #~ msgstr "Служба BES не возвратила ни одного Ñрлыка задачи" #~ msgid "Failed adapting job description to target resources" #~ msgstr "Сбой Ð°Ð´Ð°Ð¿Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¾Ð¿Ð¸ÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ Ð´Ð»Ñ Ð·Ð°Ñылки по назначению" #~ msgid "" #~ "Unable to migrate job. Job description is not valid in the %s format: %s" #~ msgstr "" #~ "Ðевозможно мигрировать задачу. ОпиÑание задачи в формате %s " #~ "недейÑтвительно: %s" #~ msgid "No job identifier returned by A-REX" #~ msgstr "A-REX не возвратил ни одного Ñрлыка задачи" #~ msgid "The Service doesn't advertise its Quality Level." #~ msgstr "Служба не Ñообщает о Ñвоём уровне качеÑтва." #~ msgid "Generating A-REX target: %s" #~ msgstr "СоздаётÑÑ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ A-REX: %s" #~ msgid "The Service doesn't advertise its Interface." #~ msgstr "Служба не Ñообщает о Ñвоём интерфейÑе." #~ msgid "The Service doesn't advertise its Serving State." #~ msgstr "Служба не Ñообщает о Ñвоём ÑоÑтоÑнии обÑлуживаниÑ." #~ msgid "Creating a CREAM client" #~ msgstr "СоздаётÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚ CREAM" #~ msgid "Unable to create SOAP client used by CREAMClient." #~ msgstr "Ðе удалоÑÑŒ Ñоздать клиент SOAP иÑпользующийÑÑ CREAMClient." #~ msgid "CREAMClient not created properly" #~ msgstr "CREAMClient не был Ñоздан надлежащим образом" #~ msgid "Empty response" #~ msgstr "ПуÑтой ответ" #~ msgid "Request failed: %s" #~ msgstr "Сбой запроÑа: %s" #~ msgid "Creating and sending a status request" #~ msgstr "Создание и отправка запроÑа о ÑоÑтоÑнии" #~ msgid "Unable to retrieve job status." #~ msgstr "Ðевозможно извлечь информацию о ÑоÑтоÑнии задачи." #~ msgid "Creating and sending request to terminate a job" #~ msgstr "Создание и отправка запроÑа о прерывании задачи" #~ msgid "Creating and sending request to clean a job" #~ msgstr "Создание и отправка запроÑа об удалении результатов работы задачи" #~ msgid "Creating and sending request to resume a job" #~ msgstr "Создание и отправка запроÑа о возобновлении задачи" #~ msgid "Creating and sending request to list jobs" #~ msgstr "Создание и отправка запроÑа о проÑмотре ÑпиÑка задач" #~ msgid "Creating and sending job register request" #~ msgstr "Создание и отправка запроÑа о региÑтрации задачи" #~ msgid "No job ID in response" #~ msgstr "Отзыв не Ñодержит Ñрлыка задачи" #~ msgid "Creating and sending job start request" #~ msgstr "Создание и отправка запроÑа о начале задачи" #~ msgid "Creating delegation" #~ msgstr "Создание делегированиÑ" #~ msgid "Malformed response: missing getProxyReqReturn" #~ msgstr "ИÑкажённый отзыв: отÑутÑтвует getProxyReqReturn" #~ msgid "Delegatable credentials expired: %s" #~ msgstr "Срок дейÑÑ‚Ð²Ð¸Ñ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€ÑƒÐµÐ¼Ñ‹Ñ… параметров доÑтупа иÑтек: %s" #~ msgid "Failed signing certificate request" #~ msgstr "Сбой подпиÑи запроÑа Ñертификата" #~ msgid "Failed putting signed delegation certificate to service" #~ msgstr "Сбой при передаче подпиÑанного Ñертификата Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð½Ð° ÑервиÑ" #~ msgid "Failed cleaning job: %s" #~ msgstr "Сбой очиÑтки задачи: %s" #~ msgid "Failed canceling job: %s" #~ msgstr "Сбой Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸: %s" #~ msgid "Renewal of CREAM jobs is not supported" #~ msgstr "Возобновление задач CREAM не поддерживаетÑÑ" #~ msgid "Failed resuming job: %s" #~ msgstr "Сбой Ð²Ð¾Ð·Ð¾Ð±Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s" #~ msgid "Failed creating signed delegation certificate" #~ msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¿Ð¾Ð´Ð¿Ð¸Ñанного Ñертификата делегированиÑ" #~ msgid "Unable to submit job. Job description is not valid in the %s format" #~ msgstr "" #~ "Ðевозможно заÑлать задачу. ОпиÑание задачи в формате %s недейÑтвительно" #~ msgid "Failed registering job" #~ msgstr "Сбой региÑтрации задачи" #~ msgid "Failed starting job" #~ msgstr "Сбой запуÑка задачи" #~ msgid "Failed creating singed delegation certificate" #~ msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¿Ð¾Ð´Ð¿Ð¸Ñанного Ñертификата делегированиÑ" #~ msgid "Unknown operator '%s' in attribute require in Version element" #~ msgstr "ÐеизвеÑтный оператор '%s' в атрибуте require Ñлемента Version" #~ msgid "Multiple '%s' elements are not supported." #~ msgstr "МножеÑтвенные Ñлементы '%s' не поддерживаютÑÑ." #~ msgid "The 'exclusiveBound' attribute to the '%s' element is not supported." #~ msgstr "Ðтрибут 'exclusiveBound' Ñлемента '%s' не поддерживаетÑÑ." #~ msgid "The 'epsilon' attribute to the 'Exact' element is not supported." #~ msgstr "Ðтрибут 'epsilon' Ñлемента 'Exact' не поддерживаетÑÑ." #~ msgid "Parsing error: Value of %s element can't be parsed as number" #~ msgstr "" #~ "Ошибка разбора: Значение Ñлемента %s не может быть разобрано как чиÑло" #~ msgid "" #~ "Parsing error: Elements (%s) representing upper range have different " #~ "values" #~ msgstr "Ошибка разбора: Элементы (%s) задающие верхнюю границу различаютÑÑ" #~ msgid "" #~ "Parsing error: Elements (%s) representing lower range have different " #~ "values" #~ msgstr "Ошибка разбора: Элементы (%s) задающие нижнюю границу различаютÑÑ" #~ msgid "" #~ "Parsing error: Value of lower range (%s) is greater than value of upper " #~ "range (%s)" #~ msgstr "" #~ "Ошибка разбора: Значение нижней границы (%s) превышает значение верхней " #~ "(%s)" #~ msgid "[ARCJSDLParser] Not a JSDL - missing JobDescription element" #~ msgstr "[ARCJSDLParser] Это не JSDL - отÑутÑтвует Ñлемент JobDescription" #~ msgid "" #~ "[ARCJSDLParser] Error during the parsing: missed the name attributes of " #~ "the \"%s\" Environment" #~ msgstr "" #~ "[ARCJSDLParser] Ошибка при разборе: отÑутÑтвует атрибут name в Ñлементе " #~ "Environment \"%s\"" #~ msgid "[ARCJSDLParser] RemoteLogging URL is wrongly formatted." #~ msgstr "[ARCJSDLParser] Ðеверный формат RemoteLogging URL." #~ msgid "[ARCJSDLParser] priority is too large - using max value 100" #~ msgstr "" #~ "[ARCJSDLParser] Ñлишком выÑокий приоритет - иÑпользуетÑÑ Ð¼Ð°ÐºÑимальное " #~ "значение 100" #~ msgid "" #~ "Lower bounded range is not supported for the 'TotalCPUCount' element." #~ msgstr "" #~ "Интервал Ñ Ð½Ð¸Ð¶Ð½ÐµÐ¹ границей не поддерживаетÑÑ Ð´Ð»Ñ Ñлемента 'TotalCPUCount'." #~ msgid "" #~ "Parsing the \"require\" attribute of the \"QueueName\" nordugrid-JSDL " #~ "element failed. An invalid comparison operator was used, only \"ne\" or " #~ "\"eq\" are allowed." #~ msgstr "" #~ "Сбой разбора атрибута \"require\" Ñлемента \"QueueName\" из nordugrid-" #~ "JSDL. ИÑпользуетÑÑ Ð½ÐµÐ´Ð¾Ð¿ÑƒÑтимый оператор ÑравнениÑ, допуÑкаютÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ " #~ "\"ne\" или \"eq\"." #~ msgid "No URI element found in Location for file %s" #~ msgstr "Ðе обнаружено Ñлементов URI в Location Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° %s" #~ msgid "[JDLParser] Semicolon (;) is not allowed inside brackets, at '%s;'." #~ msgstr "" #~ "[JDLParser] Точка Ñ Ð·Ð°Ð¿Ñтой (;) не допуÑкаетÑÑ Ð²Ð½ÑƒÑ‚Ñ€Ð¸ Ñкобок, Ñтрока " #~ "'%s;'." #~ msgid "[JDLParser] This kind of JDL descriptor is not supported yet: %s" #~ msgstr "[JDLParser] Этот тип деÑкриптора JDL пока не поддерживаетÑÑ: %s" #~ msgid "[JDLParser] Attribute named %s has unknown value: %s" #~ msgstr "[JDLParser] У атрибута %s недейÑтвительное значение: %s" #~ msgid "Not enough outputsandboxdesturi elements!" #~ msgstr "ÐедоÑтаточно Ñлементов outputsandboxdesturi!" #~ msgid "" #~ "[JDLParser] Environment variable has been defined without any equals sign." #~ msgstr "" #~ "[JDLParser] ÐŸÐµÑ€ÐµÐ¼ÐµÐ½Ð½Ð°Ñ Ñреды задана без иÑÐ¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ð½Ð¸Ñ Ð·Ð½Ð°ÐºÐ¾Ð² равенÑтва." #~ msgid "[JDLParser]: Unknown attribute name: '%s', with value: %s" #~ msgstr "[JDLParser]: ÐеизвеÑтное название атрибута: '%s', значение: %s" #~ msgid "The inputsandboxbaseuri JDL attribute specifies an invalid URL." #~ msgstr "Ðтрибут JDL inputsandboxbaseuri задаёт недопуÑтимый URL." #~ msgid "[JDLParser] Syntax error found during the split function." #~ msgstr "" #~ "[JDLParser] Обнаружена ÑинтакÑичеÑÐºÐ°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° при выполнении разбиениÑ." #~ msgid "[JDLParser] Lines count is zero or other funny error has occurred." #~ msgstr "[JDLParser] Ðулевое количеÑтво Ñтрок, или Ð´Ñ€ÑƒÐ³Ð°Ñ Ð½ÐµÐ¿Ð¾Ð½ÑÑ‚Ð½Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ°." #~ msgid "" #~ "[JDLParser] JDL syntax error. There is at least one equals sign missing " #~ "where it would be expected." #~ msgstr "" #~ "[JDLParser] СинтакÑичеÑÐºÐ°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° JDL. По крайней мере один из ожидаемых " #~ "знаков равенÑтва отÑутÑтвует." #~ msgid "Found %u service endpoints from the index service at %s" #~ msgstr "Обнаружено %u точек входа Ñлужб в каталоге на %s" #~ msgid "Cleaning of UNICORE jobs is not supported" #~ msgstr "Удаление задач UNICORE не поддерживаетÑÑ" #~ msgid "Canceling of UNICORE jobs is not supported" #~ msgstr "Прерывание задач UNICORE не поддерживаетÑÑ" #~ msgid "Renewal of UNICORE jobs is not supported" #~ msgstr "Возобновление задач UNICORE не поддерживаетÑÑ" #~ msgid "Resumation of UNICORE jobs is not supported" #~ msgstr "ПерезапуÑк задач UNICORE не поддерживаетÑÑ" #~ msgid "Creating a UNICORE client" #~ msgstr "СоздаётÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚ UNICORE" #~ msgid "Failed to find delegation credentials in client configuration" #~ msgstr "" #~ "Сбой Ð¾Ð±Ð½Ð°Ñ€ÑƒÐ¶ÐµÐ½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупа Ð´Ð»Ñ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð² наÑтройках клиента" #~ msgid "Failed to initiate delegation" #~ msgstr "Сбой инициализации делегирование" #~ msgid "Submission request failed" #~ msgstr "Сбой запроÑа отправки задачи" #~ msgid "Submission request succeed" #~ msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð¾ заÑылке Ð·Ð°Ð´Ð°Ð½Ð¸Ñ ÑƒÐ´Ð°Ð»ÑÑ" #~ msgid "There was no response to a submission request" #~ msgstr "Ðе поÑтупил ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ð± отправке задачи" #~ msgid "A response to a submission request was not a SOAP message" #~ msgstr "Ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ заÑылке не ÑвлÑетÑÑ Ñообщением SOAP" #~ msgid "There is no connection chain configured" #~ msgstr "Ðе наÑтроена цепочка ÑвÑзи" #~ msgid "Submission returned failure: %s" #~ msgstr "Сбой при запуÑке: %s" #~ msgid "Submission failed, service returned: %s" #~ msgstr "Сбой при запуÑке, ÑÐµÑ€Ð²Ð¸Ñ Ð²Ð¾Ð·Ð²Ñ€Ð°Ñ‚Ð¸Ð» ошибку: %s" #~ msgid "Creating and sending a start job request" #~ msgstr "Создание и отправка запроÑа о запуÑке задачи" #~ msgid "A start job request failed" #~ msgstr "Ошибка запроÑа о запуÑке задачи" #~ msgid "A start job request succeeded" #~ msgstr "УÑпешный Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ запуÑке задачи" #~ msgid "There was no response to a start job request" #~ msgstr "Ðе поÑтупил ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ запуÑке задачи" #~ msgid "The response of a start job request was not a SOAP message" #~ msgstr "Ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ Ñоздании задачи не ÑвлÑетÑÑ Ñообщением SOAP" #~ msgid "A status request failed" #~ msgstr "Сбой запроÑа о ÑоÑтоÑнии" #~ msgid "A status request succeed" #~ msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð¾ ÑоÑтоÑнии удалÑÑ" #~ msgid "There was no response to a status request" #~ msgstr "Ðе поÑтупил ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ ÑоÑтоÑнии" #~ msgid "The response of a status request was not a SOAP message" #~ msgstr "Ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ ÑоÑтоÑнии не ÑвлÑетÑÑ Ñообщением SOAP" #~ msgid "The job status could not be retrieved" #~ msgstr "Ðе удалоÑÑŒ определить ÑоÑтоÑние задачи" #~ msgid "Creating and sending an index service query" #~ msgstr "Создание и отправка запроÑа в каталог реÑурÑов" #~ msgid "Creating and sending a service status request" #~ msgstr "Создание и отправка запроÑа о ÑоÑтоÑнии Ñлужбы" #~ msgid "A service status request failed" #~ msgstr "Ошибка запроÑа о ÑоÑтоÑнии Ñлужбы" #~ msgid "A service status request succeeded" #~ msgstr "УÑпешный Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ ÑоÑтоÑнии Ñлужбы" #~ msgid "There was no response to a service status request" #~ msgstr "Ðе поÑтупил ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ ÑоÑтоÑнии Ñлужбы" #~ msgid "The response of a service status request was not a SOAP message" #~ msgstr "Ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ ÑоÑтоÑнии Ñервера не ÑвлÑетÑÑ Ñообщением SOAP" #~ msgid "The service status could not be retrieved" #~ msgstr "Ðе удалоÑÑŒ определить ÑоÑтоÑние Ñлужбы" #~ msgid "A job termination request failed" #~ msgstr "Ошибка запроÑа об обрыве иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #~ msgid "A job termination request succeed" #~ msgstr "УÑпешный Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ð± обрыве иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #~ msgid "There was no response to a job termination request" #~ msgstr "Ðе поÑтупил ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ð± обрыве иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #~ msgid "The response of a job termination request was not a SOAP message" #~ msgstr "Ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ прерывании задачи не ÑвлÑетÑÑ Ñообщением SOAP" #~ msgid "A job cleaning request failed" #~ msgstr "Ошибка запроÑа об удалении результатов работы задачи" #~ msgid "A job cleaning request succeed" #~ msgstr "УÑпешный Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ð± удалении результатов работы задачи" #~ msgid "There was no response to a job cleaning request" #~ msgstr "Ðе поÑтупил ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ð± удалении результатов работы задачи" #~ msgid "The response of a job cleaning request was not a SOAP message" #~ msgstr "Ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ð± удалении задачи не ÑвлÑетÑÑ Ñообщением SOAP" #~ msgid "Adding CREAM computing service" #~ msgstr "ДобавлÑетÑÑ Ð²Ñ‹Ñ‡Ð¸ÑÐ»Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ñлужба типа CREAM" #~ msgid "" #~ "checingBartenderURL: Response:\n" #~ "%s" #~ msgstr "" #~ "checingBartenderURL: Ответ:\n" #~ "%s" #~ msgid "Hostname is not implemented for arc protocol" #~ msgstr "Hostname не поддерживаетÑÑ Ð¿Ñ€Ð¾Ñ‚Ð¾ÐºÐ¾Ð»Ð¾Ð¼ arc" #~ msgid "" #~ "nd:\n" #~ "%s" #~ msgstr "" #~ "nd:\n" #~ "%s" #~ msgid "Not a collection" #~ msgstr "Это не коллекциÑ" #~ msgid "Recieved transfer URL: %s" #~ msgstr "Получен транÑпортный URL: %s" #~ msgid "Calculated checksum: %s" #~ msgstr "ВычиÑÐ»ÐµÐ½Ð½Ð°Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñума: %s" #~ msgid "Check" #~ msgstr "Проверка" #~ msgid "Deleted %s" #~ msgstr "Удалён %s" #~ msgid "Found file %s in remote cache at %s" #~ msgstr "Файл %s обнаружен в удалённом кÑше %s" #~ msgid "Failed to delete stale remote cache file %s: %s" #~ msgstr "Ðе удалоÑÑŒ удалить уÑтаревший удалённо кÑшированный файл %s: %s" #~ msgid "Failed to release lock on remote cache file %s" #~ msgstr "Ðевозможно разблокировать удалённо кÑшированный файл %s" #~ msgid "Replicating file %s to local cache file %s" #~ msgstr "Копирование файла %s в локальный кÑш %s" #~ msgid "" #~ "Replicating file %s from remote cache failed due to source being deleted " #~ "or modified" #~ msgstr "" #~ "Копирование файла %s из удалённого кÑша не удалоÑÑŒ, Ñ‚.к. иÑточник был " #~ "удалён или изменён" #~ msgid "Failed to delete bad copy of remote cache file %s at %s: %s" #~ msgstr "" #~ "Ðе удалоÑÑŒ удалить иÑпорченную копию удалённо кÑшированного файла %s в " #~ "%s: %s" #~ msgid "Cache file for %s not found in any local or remote cache" #~ msgstr "" #~ "КÑшированный файл Ð´Ð»Ñ %s не был обнаружен ни в локальном, ни в удалённом " #~ "кÑшах" #~ msgid "Using remote cache file %s for url %s" #~ msgstr "ИÑпользуетÑÑ ÑƒÐ´Ð°Ð»Ñ‘Ð½Ð½Ð¾ кÑшированный файл %s Ð´Ð»Ñ URL %s" #~ msgid "Initialize ISIS handler" #~ msgstr "Ð˜Ð½Ð¸Ñ†Ð¸Ð°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð¾Ð±Ñ€Ð°Ð±Ð¾Ñ‚Ñ‡Ð¸ÐºÐ° ISIS" #~ msgid "Can't recognize URL: %s" #~ msgstr "Ðеприемлемый URL: %s" #~ msgid "Initialize ISIS handler succeeded" #~ msgstr "УÑÐ¿ÐµÑˆÐ½Ð°Ñ Ð¸Ð½Ð¸Ñ†Ð¸Ð°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð¾Ð±Ñ€Ð°Ð±Ð¾Ñ‚Ñ‡Ð¸ÐºÐ° ISIS" #~ msgid "Remove ISIS (%s) from list" #~ msgstr "Удаление ISIS (%s) из ÑпиÑка" #~ msgid "getISISList from %s" #~ msgstr "getISISList из %s" #~ msgid "Key %s, Cert: %s, CA: %s" #~ msgstr "Ключ %s, Ñертификат: %s, CA: %s" #~ msgid "ISIS (%s) is not available or not valid response. (%d. reconnection)" #~ msgstr "" #~ "ISIS (%s) недоÑтупен, или получен недопуÑтимый отклик. (%d. Повторное " #~ "Ñоединение)" #~ msgid "Connection to the ISIS (%s) is success and get the list of ISIS." #~ msgstr "УÑпешное Ñоединение Ñ ISIS (%s), получение ÑпиÑка ÑервиÑов ISIS." #~ msgid "GetISISList add this (%s) ISIS into the list." #~ msgstr "GetISISList добавлÑет Ñтот ÑÐµÑ€Ð²Ð¸Ñ (%s) ISIS в ÑпиÑок." #~ msgid "Chosen ISIS for communication: %s" #~ msgstr "Выбранный Ð´Ð»Ñ ÑвÑзи ISIS: %s" #~ msgid "Get ISIS from list of ISIS handler" #~ msgstr "Извлечение ÑервиÑа ISIS из ÑпиÑка обработчиков ISIS" #~ msgid "Here is the end of the infinite calling loop." #~ msgstr "ЗдеÑÑŒ и заканчиваетÑÑ Ð±ÐµÑконечный цикл запроÑов." #~ msgid "" #~ "There is no more ISIS available. The list of ISIS's is already empty." #~ msgstr "ДоÑтупных ÑевриÑов ISIS больше нет. СпиÑок ISIS-ов уже опуÑтел." #~ msgid "cannot create directory: %s" #~ msgstr "не удалоÑÑŒ Ñоздать каталог: %s" #~ msgid "Cache configuration: %s" #~ msgstr "ÐаÑтройки кÑша: %s" #~ msgid "Missing cache root in configuration" #~ msgstr "Ð’ наÑтройках кÑша отÑутÑтвует корневой каталог" #~ msgid "Missing service ID" #~ msgstr "ОтÑутÑтвует Ñрлык ÑервиÑа" #~ msgid "Cache root: %s" #~ msgstr "ÐšÐ¾Ñ€Ð½ÐµÐ²Ð°Ñ Ð¿Ð°Ð¿ÐºÐ° кÑша: %s" #~ msgid "InfoCache object is not set up" #~ msgstr "Объект InfoCache не Ñоздан" #~ msgid "Invalid path in Set(): %s" #~ msgstr "ÐедопуÑтимый путь в Set(): %s" #~ msgid "Invalid path in Get(): %s" #~ msgstr "ÐедопуÑтимый путь в Get(): %s" #~ msgid "" #~ "InfoRegistrar thread waiting %d seconds for the all Registers elements " #~ "creation." #~ msgstr "" #~ "Поток InfoRegistrar ожидает %d Ñекунд, пока ÑоздадутÑÑ Ð²Ñе Ñлементы " #~ "Registers." #~ msgid "" #~ "InfoRegister created with config:\n" #~ "%s" #~ msgstr "" #~ "InfoRegister Ñоздан Ñ Ð½Ð°Ñтройками:\n" #~ "%s" #~ msgid "InfoRegister to be registered in Registrar %s" #~ msgstr "InfoRegister будет занеÑён в Registrar %s" #~ msgid "" #~ "Discarding Registrar because the \"URL\" element is missing or empty." #~ msgstr "" #~ "Registrar игнорируетÑÑ, так как Ñлемент \"URL\" отÑутÑтвует, либо пуÑÑ‚." #~ msgid "InfoRegistrar id \"%s\" has been found." #~ msgstr "Обнаружен InfoRegistrar id \"%s\"." #~ msgid "InfoRegistrar id \"%s\" was not found. New registrar created" #~ msgstr "InfoRegistrar id \"%s\" не был обнаружен. Создан новый рееÑтр" #~ msgid "" #~ "Configuration error. Retry: \"%s\" is not a valid value. Default value " #~ "will be used." #~ msgstr "" #~ "Ошибйка наÑтроек. Retry: \"%s\" не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым значением. Будет " #~ "иÑпользовано значение по умолчанию." #~ msgid "Retry: %d" #~ msgstr "ÐŸÐ¾Ð²Ñ‚Ð¾Ñ€Ð½Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ°: %d" #~ msgid "Key: %s, cert: %s" #~ msgstr "Ключ: %s, Ñертификат: %s" #~ msgid "The service won't be registered." #~ msgstr "Ð¡ÐµÑ€Ð²Ð¸Ñ Ð½Ðµ будет зарегиÑтрирован." #~ msgid "Configuration error. Missing mandatory \"Period\" element." #~ msgstr "Ошибка конфигурации. ОтÑутÑтвует обÑзательный Ñлемент \"Period\"." #~ msgid "Configuration error. Missing mandatory \"Endpoint\" element." #~ msgstr "Ошибка конфигурации. ОтÑутÑтвует обÑзательный Ñлемент \"Endpoint\"." #~ msgid "Configuration error. Missing mandatory \"Expiration\" element." #~ msgstr "" #~ "Ошибка конфигурации. ОтÑутÑтвует обÑзательный Ñлемент \"Expiration\"." #~ msgid "" #~ "Service was already registered to the InfoRegistrar connecting to infosys " #~ "%s." #~ msgstr "" #~ "Ð¡ÐµÑ€Ð²Ð¸Ñ Ð±Ñ‹Ð» уже занеÑён в InfoRegistrar, подключённый к информационной " #~ "ÑиÑтеме %s." #~ msgid "" #~ "Service is successfully added to the InfoRegistrar connecting to infosys " #~ "%s." #~ msgstr "" #~ "Ð¡ÐµÑ€Ð²Ð¸Ñ ÑƒÑпешно добавлен в InfoRegistrar, подключённый к информационной " #~ "ÑиÑтеме %s." #~ msgid "Unregistred Service can not be removed." #~ msgstr "ÐезарегиÑÑ‚Ñ€Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð½Ð°Ñ Ñлужба не может быть удалена." #~ msgid "Key: %s, Cert: %s, Proxy: %s, CADir: %s CAPath" #~ msgstr "Ключ: %s, Сертификат: %s, ДоверенноÑть: %s, Каталог CA: %s, путь CA" #~ msgid "Response from the ISIS: %s" #~ msgstr "Отклик из ISIS: %s" #~ msgid "Failed to remove registration from %s ISIS" #~ msgstr "Ðе удалоÑÑŒ удалить учётную запиÑÑŒ Ñ Ñервера ISIS %s" #~ msgid "Successfuly removed registration from ISIS (%s)" #~ msgstr "УÑпешное удаление учётной запиÑи Ñ Ñервера ISIS (%s)" #~ msgid "Failed to remove registration from ISIS (%s) - %s" #~ msgstr "Ðе удалоÑÑŒ удалить учётную запиÑÑŒ Ñ Ñервера ISIS (%s) - %s" #~ msgid "Retry connecting to the ISIS (%s) %d time(s)." #~ msgstr "Повторные попытки ÑвÑзи Ñ Ñервером ISIS (%s) %d раз." #~ msgid "ISIS (%s) is not available." #~ msgstr "Сервер ISIS (%s) недоÑтупен." #~ msgid "Service removed from InfoRegistrar connecting to infosys %s." #~ msgstr "" #~ "Ð¡ÐµÑ€Ð²Ð¸Ñ ÑƒÐ´Ð°Ð»Ñ‘Ð½ из InfoRegistrar, подключённый к информационной ÑиÑтеме %s." #~ msgid "Failed to remove registration from %s EMIRegistry" #~ msgstr "Ðе удалоÑÑŒ удалить учётную запиÑÑŒ Ñ Ñервера EMIRegistry %s" #~ msgid "Successfuly removed registration from EMIRegistry (%s)" #~ msgstr "УÑпешное удаление учётной запиÑи Ñ Ñервера EMIRegistry (%s)" #~ msgid "Retry connecting to the EMIRegistry (%s) %d time(s)." #~ msgstr "Попытка повторного ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ EMIRegistry (%s) %d раз(а)." #~ msgid "EMIRegistry (%s) is not available." #~ msgstr "ÐедоÑтупен ÑÐµÑ€Ð²Ð¸Ñ EMIRegistry (%s)." #~ msgid "Registration starts: %s" #~ msgstr "РегиÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ Ð½Ð°Ñ‡Ð¸Ð½Ð°ÐµÑ‚ÑÑ: %s" #~ msgid "reg_.size(): %d" #~ msgstr "reg_.size(): %d" #~ msgid "Registrant has no proper URL specified. Registration end." #~ msgstr "Registrant не Ñодержит дейÑтвительного URL. РегиÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ Ð¾ÐºÐ¾Ð½Ñ‡ÐµÐ½Ð°." #~ msgid "Create RegEntry XML element" #~ msgstr "Создание Ñлемента XML RegEntry" #~ msgid "ServiceID attribute calculated from Endpoint Reference" #~ msgstr "Ðтрибут ServiceID получен из опиÑÐ°Ð½Ð¸Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ входа" #~ msgid "Generation Time attribute calculated from current time" #~ msgstr "Ðтрибут Generation Time получен из текущего времени" #~ msgid "ServiceID stored: %s" #~ msgstr "Сохранён ServiceID: %s" #~ msgid "Missing service document provided by the service %s" #~ msgstr "ОтÑутÑтвует документ Ñлужбы, публикуемый ÑервиÑом %s" #~ msgid "" #~ "Missing MetaServiceAdvertisment or Expiration values provided by the " #~ "service %s" #~ msgstr "" #~ "ОтÑутÑтвуют Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð¾Ð² MetaServiceAdvertisment или Expiration, " #~ "публикуемые ÑервиÑом %s" #~ msgid "Missing Type value provided by the service %s" #~ msgstr "ОтÑутÑтвует значение атрибута Type, публикуемое ÑервиÑом %s" #~ msgid "Missing Endpoint Reference value provided by the service %s" #~ msgstr "" #~ "ОтÑутÑтвует значение атрибута Endpoint Reference, публикуемое ÑервиÑом %s" #~ msgid "Registering to %s ISIS" #~ msgstr "РегиÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ Ð½Ð° Ñервере ISIS %s" #~ msgid "Key: %s, Cert: %s, Proxy: %s, CADir: %s, CAFile" #~ msgstr "Ключ: %s, Сертификат: %s, ДоверенноÑть: %s, Каталог CA: %s, файл CA" #~ msgid "Sent RegEntries: %s" #~ msgstr "ПоÑланы RegEntries: %s" #~ msgid "Error during registration to %s ISIS" #~ msgstr "Ошибка при региÑтрации в ÑÐµÑ€Ð²Ð¸Ñ ISIS %s" #~ msgid "Successful registration to ISIS (%s)" #~ msgstr "УÑÐ¿ÐµÑˆÐ½Ð°Ñ Ñ€ÐµÐ³Ð¸ÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ Ð² ÑÐµÑ€Ð²Ð¸Ñ ISIS (%s)" #~ msgid "Failed to register to ISIS (%s) - %s" #~ msgstr "Сбой региÑтрации в ÑÐµÑ€Ð²Ð¸Ñ ISIS (%s) - %s" #~ msgid "Registration ends: %s" #~ msgstr "Конец региÑтрации: %s" #~ msgid "Waiting period is %d second(s)." #~ msgstr "Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ - %d Ñекунд(Ñ‹)." #~ msgid "Registration exit: %s" #~ msgstr "Выход из региÑтрации: %s" #~ msgid "Registering to %s EMIRegistry" #~ msgstr "РегиÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ Ð½Ð° Ñервере EMIRegistry %s" #~ msgid "Sent entry: %s" #~ msgstr "Отправлена запиÑÑŒ: %s" #~ msgid "Error during %s to %s EMIRegistry" #~ msgstr "Сбой операции %s по отношению к ÑервиÑу EMIRegistry %s" #~ msgid "Successful %s to EMIRegistry (%s)" #~ msgstr "" #~ "УÑпешное завершение операции %s по отношению к ÑервиÑу EMIRegistry (%s)" #~ msgid "Failed to %s to EMIRegistry (%s) - %d" #~ msgstr "" #~ "Ðе удалоÑÑŒ выполнить операцию %s по отношению к ÑервиÑу EMIRegistry (%s) " #~ "- %d" #~ msgid "Cannot initialize winsock library" #~ msgstr "Ðе удалоÑÑŒ инициализировать библиотеку winsock" #~ msgid "Failed processing user mapping command: unixmap %s" #~ msgstr "Сбой работы команды ÑоответÑÑ‚Ð²Ð¸Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ: unixmap %s" #~ msgid "Failed processing user mapping command: unixgroup %s" #~ msgstr "Сбой работы команды припиÑки пользователей: unixgroup %s" #~ msgid "LDAP authorization is not supported anymore" #~ msgstr "ÐÐ²Ñ‚Ð¾Ñ€Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð´Ð»Ñ LDAP больше не поддерживаетÑÑ" #~ msgid "User name mapping has empty VO: %s" #~ msgstr "ПуÑÑ‚Ð°Ñ VO в приÑвоении имени пользователÑ: %s" #~ msgid "Linking mapped file - can't link on Windows" #~ msgstr "" #~ "СоздаётÑÑ ÑимволичеÑÐºÐ°Ñ ÑÑылка на ÑоответÑтвующий файл - невыполнимо на " #~ "Windows" #~ msgid "process: response is not SOAP" #~ msgstr "процеÑÑ: ответ не ÑвлÑетÑÑ Ð´Ð¾ÐºÑƒÐ¼ÐµÐ½Ñ‚Ð¾Ð¼ SOAP" #~ msgid "Storing configuration in temporary file %s" #~ msgstr "ЗапиÑÑŒ наÑтроек во временный файл %s" #~ msgid "Failed to process service configuration" #~ msgstr "Ðе удалоÑÑŒ обработать наÑтройки ÑервиÑа" #~ msgid "Provided LRMSName is not a valid URL: %s" #~ msgstr "Указанное значение LRMSName не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым URL: %s" #~ msgid "" #~ "No LRMSName is provided. This is needed if you wish to completely comply " #~ "with the BES specifications." #~ msgstr "" #~ "Ðе задан атрибут LRMSName. Он необходим Ð´Ð»Ñ Ð¿Ð¾Ð»Ð½Ð¾Ð³Ð¾ ÑоответÑÑ‚Ð²Ð¸Ñ " #~ "Ñпецификации интерфейÑа BES." #~ msgid "" #~ "ChangeActivityStatus: request = \n" #~ "%s" #~ msgstr "" #~ "ChangeActivityStatus: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #~ msgid "ChangeActivityStatus: no ActivityIdentifier found" #~ msgstr "ChangeActivityStatus: не найден ActivityIdentifier" #~ msgid "ChangeActivityStatus: EPR contains no JobID" #~ msgstr "ChangeActivityStatus: EPR не Ñодержит JobID" #~ msgid "ChangeActivityStatus: no job found: %s" #~ msgstr "ChangeActivityStatus: задача не найдена: %s" #~ msgid "ChangeActivityStatus: missing NewStatus element" #~ msgstr "ChangeActivityStatus: отÑутÑтвует Ñлемент NewStatus" #~ msgid "ChangeActivityStatus: Failed to accept delegation" #~ msgstr "ChangeActivityStatus: невозможно принÑть делегирование" #~ msgid "ChangeActivityStatus: old BES state does not match" #~ msgstr "ChangeActivityStatus: не найдено ÑоответÑÑ‚Ð²Ð¸Ñ Ñтарому ÑоÑтоÑнию BES" #~ msgid "ChangeActivityStatus: old A-REX state does not match" #~ msgstr "" #~ "ChangeActivityStatus: не найдено ÑоответÑÑ‚Ð²Ð¸Ñ Ñтарому ÑоÑтоÑнию A-REX" #~ msgid "ChangeActivityStatus: Failed to update credentials" #~ msgstr "ChangeActivityStatus: невозможно обновить параметры доÑтупа" #~ msgid "ChangeActivityStatus: Failed to resume job" #~ msgstr "ChangeActivityStatus: невозможно возобновить задачу" #~ msgid "ChangeActivityStatus: State change not allowed: from %s/%s to %s/%s" #~ msgstr "" #~ "ChangeActivityStatus: недопуÑтимое изменение ÑоÑтоÑниÑ: Ñ %s/%s на %s/%s" #~ msgid "" #~ "ChangeActivityStatus: response = \n" #~ "%s" #~ msgstr "" #~ "ChangeActivityStatus: ответ = \n" #~ "%s" #~ msgid "" #~ "CreateActivity: request = \n" #~ "%s" #~ msgstr "" #~ "CreateActivity: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #~ msgid "CreateActivity: no job description found" #~ msgstr "CreateActivity: ОпиÑание задачи не найдено" #~ msgid "CreateActivity: max jobs total limit reached" #~ msgstr "" #~ "CreateActivity: доÑтигнут макÑимальный предел общего количеÑтва задач" #~ msgid "CreateActivity: Failed to accept delegation" #~ msgstr "CreateActivity: Сбой при принÑтии делегированиÑ" #~ msgid "CreateActivity: Failed to create new job: %s" #~ msgstr "CreateActivity: Ðе удалоÑÑŒ Ñоздать новую задачу: %s" #~ msgid "CreateActivity: Failed to create new job" #~ msgstr "CreateActivity: Ðе удалоÑÑŒ Ñоздать новую задачу" #~ msgid "CreateActivity finished successfully" #~ msgstr "CreateActivity закончилоÑÑŒ уÑпешно" #~ msgid "" #~ "CreateActivity: response = \n" #~ "%s" #~ msgstr "" #~ "CreateActivity: ответ = \n" #~ "%s" #~ msgid "Get: can't process file %s" #~ msgstr "Get: невозможно обработать файл %s" #~ msgid "Head: can't process file %s" #~ msgstr "Head: невозможно обработать файл %s" #~ msgid "http_get: start=%llu, end=%llu, burl=%s, hpath=%s" #~ msgstr "http_get: start=%llu, end=%llu, burl=%s, hpath=%s" #~ msgid "" #~ "GetActivityDocuments: request = \n" #~ "%s" #~ msgstr "" #~ "GetActivityDocuments: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #~ msgid "GetActivityDocuments: non-AREX job requested" #~ msgstr "GetActivityDocuments: Ð·Ð°Ð¿Ñ€Ð¾ÑˆÐµÐ½Ð½Ð°Ñ Ð·Ð°Ð´Ð°Ñ‡Ð° не контролируетÑÑ AREX" #~ msgid "GetActivityDocuments: job %s - %s" #~ msgstr "GetActivityDocuments: задача %s - %s" #~ msgid "" #~ "GetActivityDocuments: response = \n" #~ "%s" #~ msgstr "" #~ "GetActivityDocuments: ответ = \n" #~ "%s" #~ msgid "" #~ "GetActivityStatuses: request = \n" #~ "%s" #~ msgstr "" #~ "GetActivityStatuses: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #~ msgid "GetActivityStatuses: unknown verbosity level requested: %s" #~ msgstr "GetActivityStatuses: запрошен неизвеÑтный уровень отладки: %s" #~ msgid "GetActivityStatuses: job %s - can't understand EPR" #~ msgstr "GetActivityStatuses: задание %s - невозможно интерпретировать EPR" #~ msgid "GetActivityStatuses: job %s - %s" #~ msgstr "GetActivityStatuses: задача %s - %s" #~ msgid "" #~ "GetActivityStatuses: response = \n" #~ "%s" #~ msgstr "" #~ "GetActivityStatuses: ответ = \n" #~ "%s" #~ msgid "" #~ "GetFactoryAttributesDocument: request = \n" #~ "%s" #~ msgstr "" #~ "GetFactoryAttributesDocument: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #~ msgid "" #~ "GetFactoryAttributesDocument: response = \n" #~ "%s" #~ msgstr "" #~ "GetFactoryAttributesDocument: ответ = \n" #~ "%s" #~ msgid "" #~ "Usage: %s -I -U -P -L [-c " #~ "] [-p ] [-d ]" #~ msgstr "" #~ "ИÑпользование: %s -I <задача> -U <пользователь> -P <доверенноÑть> -L " #~ "<файл ÑоÑтоÑÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸> [-c <Ð¿Ñ€ÐµÑ„Ð¸ÐºÑ ceID>] [-p <Ð¿Ñ€ÐµÑ„Ð¸ÐºÑ Ð¶ÑƒÑ€Ð½Ð°Ð»Ð°> ] [-d " #~ "<отладка>]" #~ msgid "" #~ "Usage: %s [-N] -P -L [-c ] [-d " #~ "]" #~ msgstr "" #~ "ИÑпользование: %s [-N] -P <доверенноÑть> -L <файл ÑоÑтоÑÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸> [-c " #~ "<файл наÑтроек>] [-d <отладка>]" #~ msgid "User proxy file is required but is not specified" #~ msgstr "Файл доверенноÑти Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð½ÐµÐ¾Ð±Ñ…Ð¾Ð´Ð¸Ð¼, но не указан" #~ msgid "Local job status file is required" #~ msgstr "Ðеобходимо указать файл ÑоÑтоÑÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #~ msgid "Making the decision for the queue %s" #~ msgstr "ПринимаетÑÑ Ñ€ÐµÑˆÐµÐ½Ð¸Ðµ Ð´Ð»Ñ Ð¾Ñ‡ÐµÑ€ÐµÐ´Ð¸ %s" #~ msgid "Can not parse the configuration file %s" #~ msgstr "Ðевозможно обработать файл наÑтроек %s" #~ msgid "Can not find queue '%s' in the configuration file" #~ msgstr "Ðе удалоÑÑŒ обнаружить очередь '%s' в файле наÑтроек" #~ msgid "No access policy to check, returning success" #~ msgstr "Ðет политик доÑтупа, нуждающихÑÑ Ð² Ñверке, уÑпешное завершение" #~ msgid "CA certificates directory %s does not exist" #~ msgstr "Каталог Ñертификатов агентÑтв CA %s не ÑущеÑтвует" #~ msgid "User proxy certificate is not valid" #~ msgstr "ДоверенноÑть Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð½ÐµÐ´ÐµÐ¹Ñтвительна" #~ msgid "Getting VOMS AC for: %s" #~ msgstr "Извлечение Ñертификата атрибутов VOMS AC длÑ: %s" #~ msgid "Checking a match for '%s'" #~ msgstr "Проверка ÑÐ¾Ð²Ð¿Ð°Ð´ÐµÐ½Ð¸Ñ Ð´Ð»Ñ '%s'" #~ msgid "FQAN '%s' IS a match to '%s'" #~ msgstr "Полный атрибут '%s' СОВПÐДÐЕТ Ñ '%s'" #~ msgid "" #~ "Queue '%s' usage is prohibited to FQAN '%s' by the site access policy" #~ msgstr "" #~ "ИÑпользование очереди '%s' запрещено Ð´Ð»Ñ Ð¿Ð¾Ð»Ð½Ð¾Ð³Ð¾ атрибута '%s' в " #~ "ÑоответÑтвии Ñ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð¾Ð¹ политикой доÑтупа" #~ msgid "FQAN '%s' IS NOT a match to '%s'" #~ msgstr "Полный атрибут '%s' ÐЕ СОВПÐДÐЕТ Ñ '%s'" #~ msgid "" #~ "Queue '%s' usage with provided FQANs is prohibited by the site access " #~ "policy" #~ msgstr "" #~ "ИÑпользование очереди '%s' запрещено Ð´Ð»Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ñ… полных атрибутов в " #~ "ÑоответÑтвии Ñ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð¾Ð¹ политикой доÑтупа" #~ msgid "Can't interpret configuration file %s as XML" #~ msgstr "Ðе удалоÑÑŒ разобрать файл наÑтроек %s как XML" #~ msgid "Wrong number in jobreport_period: %s" #~ msgstr "ÐедопуÑтимое чиÑло в jobreport_period: %s" #~ msgid "Wrong number in jobreport_period: %d, minimal value: %s" #~ msgstr "ÐедопуÑтимое чиÑло в jobreport_period: %d, наименьшее значение: %s" #~ msgid "defaultlrms is empty" #~ msgstr "пуÑтое значение defaultlrms" #~ msgid "Wrong number for timeout in plugin command" #~ msgstr "" #~ "ÐедопуÑтимое значение времени Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð² инÑтрукции подключаемого модулÑ" #~ msgid "Value for maxJobsTracked is incorrect number" #~ msgstr "Значение maxJobsTracked не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым чиÑлом" #~ msgid "Value for maxJobsRun is incorrect number" #~ msgstr "Значение maxJobsRun не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым чиÑлом" #~ msgid "Value for maxJobsTotal is incorrect number" #~ msgstr "Значение maxJobsTotal не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым чиÑлом" #~ msgid "Value for maxJobsPerDN is incorrect number" #~ msgstr "Значение maxJobsPerDN не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым чиÑлом" #~ msgid "Value for wakeupPeriod is incorrect number" #~ msgstr "Значение wakeupPeriod не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым чиÑлом" #~ msgid "Value for maxScripts is incorrect number" #~ msgstr "Значение maxScripts не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым чиÑлом" #~ msgid "serviceMail is empty" #~ msgstr "пуÑтой serviceMail" #~ msgid "Type in LRMS is missing" #~ msgstr "ОтÑутÑтвует тип в СУПО" #~ msgid "LRMS is missing" #~ msgstr "ОтÑутÑтвует СУПО" #~ msgid "State name for authPlugin is missing" #~ msgstr "ОтÑутÑтвует наименование ÑоÑтоÑÐ½Ð¸Ñ Ð¼Ð¾Ð´ÑƒÐ»Ñ authPlugin" #~ msgid "Command for authPlugin is missing" #~ msgstr "ОтÑутÑтвует команда Ð´Ð»Ñ Ð¼Ð¾Ð´ÑƒÐ»Ñ authPlugin" #~ msgid "Registering plugin for state %s; options: %s; command: %s" #~ msgstr "" #~ "РегиÑтрируетÑÑ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ñ‹Ð¹ модуль Ð´Ð»Ñ ÑоÑтоÑÐ½Ð¸Ñ %s; опции: %s; команда: " #~ "%s" #~ msgid "Command for localCred is missing" #~ msgstr "ОтÑутÑтвует команда Ð´Ð»Ñ Ð¼Ð¾Ð´ÑƒÐ»Ñ localCred" #~ msgid "Timeout for localCred is missing" #~ msgstr "ОтÑутÑтвует тайм-аут Ð´Ð»Ñ Ð¼Ð¾Ð´ÑƒÐ»Ñ localCred" #~ msgid "Timeout for localCred is incorrect number" #~ msgstr "ÐедопуÑтимое значение тайм-аута Ð´Ð»Ñ Ð¼Ð¾Ð´ÑƒÐ»Ñ localCred" #~ msgid "Control element must be present" #~ msgstr "Элемент Control должен приÑутÑтвовать" #~ msgid "controlDir is missing" #~ msgstr "ОтÑутÑтвует controlDir" #~ msgid "sessionRootDir is missing" #~ msgstr "ОтÑутÑтвует sessionRootDir" #~ msgid "Attribute drain for sessionRootDir is incorrect boolean" #~ msgstr "" #~ "Значение атрибута drain Ð´Ð»Ñ sessionRootDir не ÑвлÑетÑÑ Ð²ÐµÑ€Ð½Ñ‹Ð¼ булевÑким" #~ msgid "The fixDirectories element is incorrect value" #~ msgstr "Значение Ñлемента fixDirectories неверно" #~ msgid "The delegationDB element is incorrect value" #~ msgstr "Значение Ñлемента delegationDB неверно" #~ msgid "The maxReruns element is incorrect number" #~ msgstr "Значение Ñлемента maxReruns не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым чиÑлом" #~ msgid "The noRootPower element is incorrect number" #~ msgstr "Значение Ñлемента noRootPower не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым чиÑлом" #~ msgid "The defaultTTL element is incorrect number" #~ msgstr "Значение Ñлемента defaultTTL не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым чиÑлом" #~ msgid "The defaultTTR element is incorrect number" #~ msgstr "Значение Ñлемента defaultTTR не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым чиÑлом" #~ msgid "Command in helperUtility is missing" #~ msgstr "ОтÑутÑтвует команда в модуле helperUtility" #~ msgid "Username in helperUtility is empty" #~ msgstr "Ðе указано Ð¸Ð¼Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð² модуле helperUtility" #~ msgid "\tRemote cache : %s" #~ msgstr "\tУдалённый кÑш : %s" #~ msgid "\tRemote cache link: %s" #~ msgstr "\tСÑылка на удалённый кÑш: %s" #~ msgid "wrong boolean in %s" #~ msgstr "Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð±ÑƒÐ»ÐµÐ²Ð° Ð¿ÐµÑ€ÐµÐ¼ÐµÐ½Ð½Ð°Ñ Ð² %s" #~ msgid "wrong number in %s" #~ msgstr "неверное чиÑло в %s" #~ msgid "Can't interpret configuration file as XML" #~ msgstr "Ðе удалоÑÑŒ разобрать файл наÑтроек как XML" #~ msgid "Bad value for debug" #~ msgstr "ÐедопуÑтимое значение debug" #~ msgid "Bad URL in deliveryService: %s" #~ msgstr "ÐедопуÑтимый URL в deliveryService: %s" #~ msgid "Value for 'link' element in mapURL is incorrect" #~ msgstr "Значение Ñлемента 'link' в mapURL неверно" #~ msgid "Missing 'from' element in mapURL" #~ msgstr "ОтÑутÑтвующий Ñлемент 'from' в mapURL" #~ msgid "Missing 'to' element in mapURL" #~ msgstr "ОтÑутÑтвующий Ñлемент 'to' в mapURL" #~ msgid "Failed to run plugin" #~ msgstr "Ошибка иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð¼Ð¾Ð´ÑƒÐ»Ñ" #~ msgid "Plugin failed: %s" #~ msgstr "Сбой модулÑ: %s" #~ msgid "empty argument to remotegmdirs" #~ msgstr "не задан аргумент remotegmdirs" #~ msgid "bad arguments to remotegmdirs" #~ msgstr "неверные аргументы remotegmdirs" #~ msgid "Failed processing grid-manager configuration" #~ msgstr "Ðе удалоÑÑŒ обработать наÑтройки grid-manager" #~ msgid "%s: Destroying" #~ msgstr "%s: УничтожаетÑÑ" #~ msgid "%s: Can't read state - no comments, just cleaning" #~ msgstr "" #~ "%s: Ðевозможно прочеÑть ÑоÑтоÑние - никаких комментариев, проÑто чиÑтка" #~ msgid "%s: Cleaning control and session directories" #~ msgstr "%s: ОчиÑтка управлÑющей" #~ msgid "%s: This job may be still running - canceling" #~ msgstr "%s: Эта задача, возможно, ещё иÑполнÑетÑÑ - прерывание" #~ msgid "%s: Cancellation failed (probably job finished) - cleaning anyway" #~ msgstr "" #~ "%s: Прерывание не удалоÑÑŒ (вероÑтно, задача закончилаÑÑŒ) - вÑÑ‘ равно " #~ "удалÑем" #~ msgid "%s: Cancellation probably succeeded - cleaning" #~ msgstr "%s: Прерывание, вероÑтно, удалоÑÑŒ - удаление" #~ msgid "Invalid checksum in %s for %s" #~ msgstr "ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма в %s Ð´Ð»Ñ %s" #~ msgid "Invalid file size in %s for %s " #~ msgstr "ÐедопуÑтимый размер файла в %s Ð´Ð»Ñ %s " #~ msgid "Invalid file: %s is too big." #~ msgstr "Ðеверный файл: %s Ñлишком велик." #~ msgid "Error accessing file %s" #~ msgstr "Ошибка доÑтупа к файлу %s" #~ msgid "Error reading file %s" #~ msgstr "Ошибка при чтении файла %s" #~ msgid "File %s has wrong CRC." #~ msgstr "У файла %s Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма CRC." #~ msgid "Failed downloading file %s - %s" #~ msgstr "Сбой загрузки файла %s - %s" #~ msgid "Retrying" #~ msgstr "Повтор" #~ msgid "Downloaded file %s" #~ msgstr "Загружен файл %s" #~ msgid "Wrong number of threads: %s" #~ msgstr "Ðеверное чиÑло потоков: %s" #~ msgid "Wrong number of files: %s" #~ msgstr "Ðеверное количеÑтво файлов: %s" #~ msgid "Bad number: %s" #~ msgstr "Ðеверное чиÑло: %s" #~ msgid "Specified user can't be handled" #~ msgstr "Указанный пользователь не может быть обработан" #~ msgid "Missing parameter for option %c" #~ msgstr "ОтÑутÑтвует параметр Ð´Ð»Ñ Ð¾Ð¿Ñ†Ð¸Ð¸ %c" #~ msgid "Undefined processing error" #~ msgstr "ÐÐµÐ¾Ð¿Ñ€ÐµÐ´ÐµÐ»Ñ‘Ð½Ð½Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° при обработке" #~ msgid "Missing job id" #~ msgstr "ОтÑутÑтвует Ñрлык задачи" #~ msgid "Missing control directory" #~ msgstr "ОтÑутÑтвует каталог контролÑ" #~ msgid "Missing session directory" #~ msgstr "ОтÑутÑтвует каталог ÑеанÑов" #~ msgid "Minimal speed: %llu B/s during %i s" #~ msgstr "ÐœÐ¸Ð½Ð¸Ð¼Ð°Ð»ÑŒÐ½Ð°Ñ ÑкороÑть: %llu Б/Ñ Ð² течение %i Ñ" #~ msgid "Minimal average speed: %llu B/s" #~ msgstr "ÐœÐ¸Ð½Ð¸Ð¼Ð°Ð»ÑŒÐ½Ð°Ñ ÑреднÑÑ ÑкороÑть: %llu B/s" #~ msgid "Maximal inactivity time: %i s" #~ msgstr "МакÑимальное Ð²Ñ€ÐµÐ¼Ñ Ð±ÐµÐ·Ð´ÐµÐ¹ÑтвиÑ: %i s" #~ msgid "Won't use more than 10 threads" #~ msgstr "Будет иÑпользовано не более 10-и потоков" #~ msgid "Downloader started" #~ msgstr "Загрузчик запущен" #~ msgid "Can't read list of input files" #~ msgstr "Ðевозможно прочеÑть ÑпиÑок входных файлов" #~ msgid "Error: duplicate file in list of input files: %s" #~ msgstr "Ошибка: дублированное Ð¸Ð¼Ñ Ñ„Ð°Ð¹Ð»Ð° в ÑпиÑке входных файлов: %s" #~ msgid "Can't read list of output files" #~ msgstr "Ðевозможно прочеÑть ÑпиÑок выходных файлов" #~ msgid "Can't remove junk files" #~ msgstr "Ðевозможно удалить ненужные файлы" #~ msgid "Can't read job local description" #~ msgstr "Ðевозможно прочеÑть локальное опиÑание задачи" #~ msgid "Local source for download: %s" #~ msgstr "Локальный иÑточник загрузки: %s" #~ msgid "Can't accept URL: %s" #~ msgstr "Ðеприемлемый URL: %s" #~ msgid "Failed to initiate file transfer: %s - %s" #~ msgstr "Ðевозможно запуÑтить передачу файлов: %s - %s" #~ msgid "Downloaded %s" #~ msgstr "Загружен %s" #~ msgid "Failed to download (but may be retried) %s" #~ msgstr "Ðе удалоÑÑŒ загрузить (возможна Ð¿Ð¾Ð²Ñ‚Ð¾Ñ€Ð½Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ°) %s" #~ msgid "Failed to download %s" #~ msgstr "Ðе удалоÑÑŒ загрузить %s" #~ msgid "Some downloads failed" #~ msgstr "Ðекоторые загрузки не удалиÑÑŒ" #~ msgid "Some downloads failed, but may be retried" #~ msgstr "Ðекоторые загрузки не удалиÑÑŒ (возможна Ð¿Ð¾Ð²Ñ‚Ð¾Ñ€Ð½Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ°)" #~ msgid "Failed writing changed input file" #~ msgstr "Ðе удалоÑÑŒ запиÑать изменившийÑÑ Ð²Ñ…Ð¾Ð´Ð½Ð¾Ð¹ файл" #~ msgid "Checking user uploadable file: %s" #~ msgstr "Проверка отгружаемого файла пользователÑ: %s" #~ msgid "User has uploaded file %s" #~ msgstr "Пользователь отгрузил файл %s" #~ msgid "Failed writing changed input file." #~ msgstr "Ðе удалоÑÑŒ запиÑать изменившийÑÑ Ð²Ñ…Ð¾Ð´Ð½Ð¾Ð¹ файл." #~ msgid "Critical error for uploadable file %s" #~ msgstr "КритичеÑÐºÐ°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° Ð´Ð»Ñ Ð¾Ñ‚Ð³Ñ€ÑƒÐ¶Ð°ÐµÐ¼Ð¾Ð³Ð¾ файла %s" #~ msgid "No changes in uploadable files for %u seconds" #~ msgstr "Ðикаких изменений в отгружаемых файлах в течение %u Ñек" #~ msgid "Uploadable files timed out" #~ msgstr "ИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¾Ñ‚Ð³Ñ€ÑƒÐ¶Ð°ÐµÐ¼Ñ‹Ñ… файлов" #~ msgid "Leaving downloader (%i)" #~ msgstr "Выход из загрузчика (%i)" #~ msgid "Failed uploading file %s - %s" #~ msgstr "Ðе удалоÑÑŒ отгрузить файл %s - %s" #~ msgid "Uploaded file %s" #~ msgstr "Закачан файл %s" #~ msgid "Uploader started" #~ msgstr "Отгрузчик запущен" #~ msgid "Reading output files from user generated list in %s" #~ msgstr "Чтение выходных файлов в ÑпиÑке Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ %s" #~ msgid "Error reading user generated output file list in %s" #~ msgstr "Ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ ÑпиÑка выходных файлов Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð² %s" #~ msgid "Two identical output destinations: %s" #~ msgstr "Два одинаковых Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð²Ñ‹Ð´Ð°Ñ‡Ð¸: %s" #~ msgid "Cannot upload two different files %s and %s to same LFN: %s" #~ msgstr "Ðевозможно запиÑать два разных файла %s и %s Ñ Ð¾Ð´Ð½Ð¸Ð¼ LFN: %s" #~ msgid "Local destination for uploader %s" #~ msgstr "Локальный файл-приёмник Ð´Ð»Ñ Ð¾Ñ‚Ð³Ñ€ÑƒÐ·Ñ‡Ð¸ÐºÐ° %s" #~ msgid "Uploaded %s" #~ msgstr "Отгружен %s" #~ msgid "Failed writing output status file" #~ msgstr "Ðе удалоÑÑŒ запиÑать выходной файл ÑоÑтоÑниÑ" #~ msgid "Failed to upload (but may be retried) %s" #~ msgstr "Ðе удалоÑÑŒ выгрузить (возможна Ð¿Ð¾Ð²Ñ‚Ð¾Ñ€Ð½Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ°) %s" #~ msgid "Failed to upload %s" #~ msgstr "Ðе удалоÑÑŒ отгрузить %s" #~ msgid "Some uploads failed" #~ msgstr "Ðекоторые отгрузки не удалиÑÑŒ" #~ msgid "Writing back dynamic output file %s" #~ msgstr "ЗапиÑÑŒ динамичеÑкого ÑпиÑка выходных файлов %s" #~ msgid "Failed to rewrite output file list %s. Job resuming may not work" #~ msgstr "" #~ "Ðе удалоÑÑŒ перезапиÑать ÑпиÑок выходных файлов %s. ПерезапуÑк задач может " #~ "не работать" #~ msgid "Some uploads failed, but (some) may be retried" #~ msgstr "" #~ "Ðекоторые выгрузки не удалиÑÑŒ (Ð´Ð»Ñ Ð½ÐµÐºÐ¾Ñ‚Ð¾Ñ€Ñ‹Ñ… возможна Ð¿Ð¾Ð²Ñ‚Ð¾Ñ€Ð½Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ°)" #~ msgid "Failed writing changed output file" #~ msgstr "Ðе удалоÑÑŒ запиÑать изменившийÑÑ Ð²Ñ‹Ñ…Ð¾Ð´Ð½Ð¾Ð¹ файл" #~ msgid "Leaving uploader (%i)" #~ msgstr "Отгрузчик покидаетÑÑ (%i)" #~ msgid "system retval: %d" #~ msgstr "СиÑтемное значение retval: %d" #~ msgid "" #~ "MigrateActivity: request = \n" #~ "%s" #~ msgstr "" #~ "MigrateActivity: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #~ msgid "MigrateActivitys: no ActivityIdentifier found" #~ msgstr "MigrateActivitys: не обнаружен ActivityIdentifier" #~ msgid "MigrateActivity: EPR contains no JobID" #~ msgstr "MigrateActivity: EPR не Ñодержит JobID" #~ msgid "MigrateActivity: Failed to accept delegation" #~ msgstr "MigrateActivity: невозможно принÑть делегирование" #~ msgid "MigrateActivity: no job description found" #~ msgstr "MigrateActivity: не обнаружено опиÑание задачи" #~ msgid "Migration XML sent to AREXJob: %s" #~ msgstr "Миграционный документ XML поÑлан к AREXJob: %s" #~ msgid "MigrateActivity: Failed to migrate new job: %s" #~ msgstr "MigrateActivity: невозможно мигрировать новую задачу: %s" #~ msgid "MigrateActivity: Failed to migrate new job" #~ msgstr "MigrateActivity: невозможно мигрировать новую задачу" #~ msgid "MigrateActivity finished successfully" #~ msgstr "MigrateActivity уÑпешно завершён" #~ msgid "" #~ "MigrateActivity: response = \n" #~ "%s" #~ msgstr "" #~ "MigrateActivity: отзыв = \n" #~ "%s" #~ msgid "Put: there is no job: %s - %s" #~ msgstr "Put: задача отÑутÑтвует: %s - %s" #~ msgid "Put: there is no payload for file %s in job: %s" #~ msgstr "Put: отÑутÑтвует Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ файле %s в задании: %s" #~ msgid "Put: unrecognized payload for file %s in job: %s" #~ msgstr "Put: Ð½ÐµÐ¿Ñ€Ð¸ÐµÐ¼Ð»ÐµÐ¼Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ файле %s в задании: %s" #~ msgid "Put: failed to create file %s for job %s - %s" #~ msgstr "Put: не удалоÑÑŒ Ñоздать файл %s Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ð½Ð¸Ñ %s - %s" #~ msgid "Put: failed to set position of file %s for job %s to %Lu - %s" #~ msgstr "" #~ "Put: не удалоÑÑŒ уÑтановить позицию файла %s Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ð½Ð¸Ñ %s на %Lu - %s" #~ msgid "Put: failed to allocate memory for file %s in job %s" #~ msgstr "Put: не удалоÑÑŒ зарезервировать памÑть Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° %s в задании %s" #~ msgid "" #~ "TerminateActivities: request = \n" #~ "%s" #~ msgstr "" #~ "TerminateActivities: Ð·Ð°Ð¿Ñ€Ð¾Ñ = \n" #~ "%s" #~ msgid "TerminateActivities: non-AREX job requested" #~ msgstr "TerminateActivities: запрошена задача, неÑовмеÑÑ‚Ð¸Ð¼Ð°Ñ Ñ AREX" #~ msgid "TerminateActivities: job %s - %s" #~ msgstr "TerminateActivities: задача %s - %s" #~ msgid "" #~ "TerminateActivities: response = \n" #~ "%s" #~ msgstr "" #~ "TerminateActivities: ответ = \n" #~ "%s" #~ msgid "Response is not expected WS-RP" #~ msgstr "Отзыв не ÑвлÑетÑÑ Ð¾Ð¶Ð¸Ð´Ð°ÐµÐ¼Ñ‹Ð¼ WS-RP" #~ msgid "CacheService: Unauthorized" #~ msgstr "CacheService: Ðет допуÑка" #~ msgid "Only POST is supported in CacheService" #~ msgstr "CacheService поддерживает только POST" #~ msgid "Connecting to %s:%i" #~ msgstr "Соединение Ñ %s:%i" #~ msgid "Querying at %s" #~ msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ðº %s" #~ msgid "Failed to get results from LDAP server %s" #~ msgstr "Ðе удалоÑÑŒ получить информацию Ñ Ñервера LDAP %s" #~ msgid "LDAP authorization is not supported" #~ msgstr "ÐÐ²Ñ‚Ð¾Ñ€Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð´Ð»Ñ LDAP не поддерживаетÑÑ" #~ msgid "" #~ "Configuration section [vo] is missing name. Check for presence of name= " #~ "or vo= option." #~ msgstr "" #~ "Раздел наÑтроек [vo] не Ñодержит имени. УбедитеÑÑŒ в наличии опций name= " #~ "или vo= ." #~ msgid "Missing option for command daemon" #~ msgstr "Пропущены наÑтраиваемые параметры Ð´Ð»Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ð½Ð¾Ð³Ð¾ демона" #~ msgid "Wrong option in daemon" #~ msgstr "Ðеверные опции в демоне" #~ msgid "Improper size of log '%s'" #~ msgstr "ÐедопуÑтимый размер журнала '%s'" #~ msgid "Improper number of logs '%s'" #~ msgstr "ÐедопуÑтимое количеÑтво журналов '%s'" #~ msgid "Improper argument for logsize '%s'" #~ msgstr "ÐедопуÑтимый аргумент Ð´Ð»Ñ Ñ€Ð°Ð·Ð¼ÐµÑ€Ð° журнала '%s'" #~ msgid "" #~ "Central configuration file is missing at guessed location:\n" #~ " /etc/arc.conf\n" #~ "Use ARC_CONFIG variable for non-standard location" #~ msgstr "" #~ "Общий файл наÑтроек отÑутÑтвует в обычном меÑте:\n" #~ " /etc/arc.conf\n" #~ "ИÑпользуйте переменную Ñреды ARC_CONFIG Ð´Ð»Ñ Ð½ÐµÐ¾Ð±Ñ‹Ñ‡Ð½Ñ‹Ñ… меÑÑ‚" #~ msgid "couldn't open file %s" #~ msgstr "не удалоÑÑŒ открыть файл %s" #~ msgid "unknown (non-gridmap) user is not allowed" #~ msgstr "неизвеÑтный (не занеÑённый в gridmap) пользователь не допуÑкаетÑÑ" #~ msgid "couldn't process VO configuration" #~ msgstr "не удалоÑÑŒ обработать наÑтройки ВО" #~ msgid "can't parse configuration line: %s %s %s %s" #~ msgstr "невозможно разобрать Ñтроку наÑтроек: %s %s %s %s" #~ msgid "bad directory in plugin command: %s" #~ msgstr "неверный каталог в команде подключаемого модулÑ: %s" #~ msgid "unsupported configuration command: %s" #~ msgstr "Ð½ÐµÐ¿Ð¾Ð´Ð´ÐµÑ€Ð¶Ð¸Ð²Ð°ÐµÐ¼Ð°Ñ Ð¸Ð½ÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ Ð½Ð°Ñтроек: %s" #~ msgid "improper attribute for allowunknown command: %s" #~ msgstr "недопуÑтимый атрибут команды allowunknown: %s" #~ msgid "Mapfile is missing at %s" #~ msgstr "Файл припиÑки пользователей отÑутÑтвует в %s" #~ msgid "There is no local mapping for user" #~ msgstr "Пользователь не припиÑан ни к одному локальному имени" #~ msgid "There is no local name for user" #~ msgstr "Локальное Ð¸Ð¼Ñ Ð¿Ñ€Ð¸Ð¿Ð¸Ñки Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð½Ðµ указано" #~ msgid "Initially mapped to local user: %s" #~ msgstr "ÐÐ°Ñ‡Ð°Ð»ÑŒÐ½Ð°Ñ Ð¿Ñ€Ð¸Ð¿Ð¸Ñка к локальному имени пользователÑ: %s" #~ msgid "Initially mapped to local group: %s" #~ msgstr "ÐŸÑ€ÐµÐ´Ð²Ð°Ñ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ð¿Ñ€Ð¸Ð²Ñзка к локальной группе: %s" #~ msgid "Local user does not exist" #~ msgstr "Локальное Ð¸Ð¼Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð½Ðµ ÑущеÑтвует" #~ msgid "config: %s, class name: %s" #~ msgstr "наÑтройки: %s, клаÑÑ: %s" #~ msgid "libjvm.so not loadable - check your LD_LIBRARY_PATH" #~ msgstr "libjvm.so не можетр быть подгружена - проверьте LD_LIBRARY_PATH" #~ msgid "libjvm.so does not contain the expected symbols" #~ msgstr "libjvm.so не Ñодержит ожидаемых Ñимволов" #~ msgid "JVM started" #~ msgstr "Запущена JVM" #~ msgid "There is no service: %s in your Java class search path" #~ msgstr "ОтÑутÑтвие уÑлуги %s в пути поиÑка клаÑÑов Java" #~ msgid "There is no constructor function" #~ msgstr "ОтÑутÑтвует конÑтруктор" #~ msgid "%s constructed" #~ msgstr "%s Ñоздан" #~ msgid "Destroy JVM" #~ msgstr "Уничтожение JVM" #~ msgid "Cannot find MCC_Status object" #~ msgstr "Ðе удалоÑÑŒ обнаружить объект MCC_Status" #~ msgid "Java object returned NULL status" #~ msgstr "Объект Java возвратил ÑÑ‚Ð°Ñ‚ÑƒÑ NULL" #~ msgid "" #~ "The 'remote_host' attribute value is empty - a host name was expected" #~ msgstr "Значение Ñлемента 'remote_host' пуÑто - ожидалоÑÑŒ название реÑурÑа" #~ msgid "The 'remoteHost' element value is empty - a host name was expected" #~ msgstr "Значение Ñлемента 'remoteHost' пуÑто - ожидалоÑÑŒ название реÑурÑа" #, fuzzy #~ msgid "Failed processing user mapping command: unixlistmap %s" #~ msgstr "Сбой работы команды ÑоответÑÑ‚Ð²Ð¸Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ: unixmap %s" #, fuzzy #~ msgid "failed to initialize environment variables" #~ msgstr "Сбой инициализации параметров доÑтупа Ð´Ð»Ñ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ" #~ msgid "Using non-RFC proxy so only local delivery can be used" #~ msgstr "" #~ "ИÑпользуетÑÑ Ð½ÐµÑовмеÑÑ‚Ð¸Ð¼Ð°Ñ Ñ RFC доверенноÑть, поÑтому возможна только " #~ "Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð°Ñ Ð´Ð¾Ñтавка" #~ msgid "Using non-RFC proxy so forcing local delivery" #~ msgstr "" #~ "ИÑпользуетÑÑ Ð½ÐµÑовмеÑÑ‚Ð¸Ð¼Ð°Ñ Ñ RFC доверенноÑть, вынужденно иÑпользуетÑÑ " #~ "Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð°Ñ Ð´Ð¾Ñтавка" #~ msgid "use GSI proxy (RFC 3820 compliant proxy is default)" #~ msgstr "" #~ "иÑпользовать доверенноÑть GSI (по умолчанию иÑпользуетÑÑ\n" #~ " RFC 3820-ÑовмеÑÑ‚Ð¸Ð¼Ð°Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñть)" #~ msgid "Can not set the STORE_CTX for chain verification" #~ msgstr "Ðе удалоÑÑŒ задать STORE_CTX Ð´Ð»Ñ Ð¿Ð¾Ð´Ñ‚Ð²ÐµÑ€Ð¶Ð´ÐµÐ½Ð¸Ñ Ñ†ÐµÐ¿Ð¸" #~ msgid "X509_V_ERR_PATH_LENGTH_EXCEEDED" #~ msgstr "X509_V_ERR_PATH_LENGTH_EXCEEDED" #~ msgid "X509_V_ERR_PATH_LENGTH_EXCEEDED --- with proxy" #~ msgstr "X509_V_ERR_PATH_LENGTH_EXCEEDED --- Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñтью" #~ msgid "X509_V_ERR_UNHANDLED_CRITICAL_EXTENSION" #~ msgstr "X509_V_ERR_UNHANDLED_CRITICAL_EXTENSION" #~ msgid "" #~ "The proxy to be signed should be compatible with the signing certificate: " #~ "(%s) -> (%s)" #~ msgstr "" #~ "ПодпиÑÑ‹Ð²Ð°ÐµÐ¼Ð°Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñть должна быть ÑовмеÑтима Ñ Ð¿Ð¾Ð´Ð¿Ð¸Ñывающим " #~ "Ñертификатом: (%s) -> (%s)" #~ msgid "The proxy depth %i is out of maximum limit %i" #~ msgstr "Глубина доверенноÑти %i превышает предел %i" #~ msgid "proxy_depth: %i, path_length: %i" #~ msgstr "proxy_depth: %i, path_length: %i" #~ msgid "" #~ "Can not convert DER encoded PROXYCERTINFO extension to internal format" #~ msgstr "" #~ "Ðевозможно преобразовать раÑширение PROXYCERTINFO в кодировке DER во " #~ "внутренний формат" #~ msgid "Found more than one PCI extension" #~ msgstr "Обнаружено более одного раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ PCI" #~ msgid "" #~ "Globus legacy proxies can not carry policy data or path length constraints" #~ msgstr "" #~ "УÑтаревшие доверенноÑти Globus не могут Ñодержать данные о политиках или " #~ "Ð¾Ð³Ñ€Ð°Ð½Ð¸Ñ‡ÐµÐ½Ð¸Ñ Ð¿Ð¾ длине пути" #~ msgid "RSA_generate_key failed" #~ msgstr "Сбой метода RSA_generate_key" #~ msgid "Can not get X509V3_EXT_METHOD for %s" #~ msgstr "Ðевозможно извлечь X509V3_EXT_METHOD Ð´Ð»Ñ %s" #, fuzzy #~ msgid "Can not get policy from PROXYCERTINFO extension" #~ msgstr "Ðевозможно извлечь политику из раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ PROXYCERTINFO" #, fuzzy #~ msgid "Can not get policy language from PROXYCERTINFO extension" #~ msgstr "Ðевозможно извлечь Ñзык политики из раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ PROXYCERTINFO" #~ msgid "Can't get X509V3_EXT_METHOD for %s" #~ msgstr "Ðевозможно извлечь X509V3_EXT_METHOD Ð´Ð»Ñ %s" #~ msgid "Can not get extension from issuer certificate" #~ msgstr "Ðевозможно извлечь раÑширение из Ñертификата агентÑтва" #~ msgid "Failed to add extension into proxy" #~ msgstr "Сбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñ€Ð°ÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ð² доверенноÑть" #~ msgid "" #~ "The signing algorithm %s is not allowed,it should be SHA1/SHA2 to sign " #~ "certificate requests" #~ msgstr "" #~ "ÐедопуÑтимый алгоритм подпиÑи %s: запроÑÑ‹ Ñертификата должны " #~ "подпиÑыватьÑÑ SHA1 или SHA2" #~ msgid "Failed to add extension into EEC certificate" #~ msgstr "Сбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñ€Ð°ÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ð² Ñертификат EEC" #~ msgid "" #~ "Resource information provider timed out: %u seconds. Using heartbeat file " #~ "from now on... Consider increasing infoproviders_timeout in arc.conf" #~ msgstr "" #~ "ИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð½Ð° Ñбор информации о реÑурÑе: %u Ñекунд. ПроверÑетÑÑ " #~ "контрольный файл... Попробуйте увеличить значение infoproviders_timeout в " #~ "arc.conf" #~ msgid "" #~ "Resource information provider timed out: %u seconds. Checking heartbeat " #~ "file..." #~ msgstr "" #~ "ИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð½Ð° Ñбор информации о реÑурÑе: %u Ñекунд. ПроверÑетÑÑ " #~ "контрольный файл..." #~ msgid "" #~ "Cannot stat %s. Are infoproviders running? This message will not be " #~ "repeated." #~ msgstr "" #~ "Ðевозможно проверить %s. Запущены ли Ñборщики информации? Это Ñообщение " #~ "не будет больше повторÑтьÑÑ." #~ msgid "" #~ "Cannot stat %s. Are infoproviders running? It happened already %d times." #~ msgstr "" #~ "Ðевозможно проверить %s. Запущены ли Ñборщики информации? Это уже %d-й " #~ "раз." #~ msgid "" #~ "Checked time: %d | Heartbeat file stat: %d | %s has not beed touched " #~ "before timeout (%d). \n" #~ " The performance is too low, infoproviders will be killed. A-REX " #~ "functionality is not ensured." #~ msgstr "" #~ "Ð’Ñ€ÐµÐ¼Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ¸: %d | ПоÑледнее контрольное обновление: %d | %s не " #~ "обновилÑÑ Ð² Ñрок (%d). \n" #~ " ÐÐ¸Ð·ÐºÐ°Ñ Ð¿Ñ€Ð¾Ð¸Ð·Ð²Ð¾Ð´Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð¾Ñть, Ñборщики информации будут оÑтановлены. " #~ "РаботоÑпоÑобноÑть A-REX под угрозой." #~ msgid "Found recent heartbeat file %s , waiting other %d seconds" #~ msgstr "" #~ "Обнаружен недавно обновлённый контрольный файл %s , ожидание ещё %d Ñекунд" #~ msgid "Submit: Failed to disconnect after submission" #~ msgstr "ЗаÑылка: Сбой отÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ð¿Ð¾Ñле заÑылки" #~ msgid "EMIES:WipeActivity: job %s - state is %s, not terminal" #~ msgstr "EMIES:WipeActivity: задача %s - ÑоÑтоÑние %s, не конечное" #~ msgid "Unable to copy %s: No valid credentials found" #~ msgstr "" #~ "Ðевозможно Ñкопировать %s: Ðе обнаружено дейÑтвительных параметров доÑтупа" #~ msgid "Unable to list content of %s: No valid credentials found" #~ msgstr "" #~ "Ðевозможно проÑмотреть Ñодержимое %s: Ðе обнаружено дейÑтвительных " #~ "параметров доÑтупа" #~ msgid "Unable to create directory %s: No valid credentials found" #~ msgstr "" #~ "Ðевозможно Ñоздать директорию %s: Ðе обнаружено дейÑтвительных параметров " #~ "доÑтупа" #~ msgid "Unable to rename %s: No valid credentials found" #~ msgstr "" #~ "Ðевозможно переименовать %s: Ðе обнаружено дейÑтвительных параметров " #~ "доÑтупа" #~ msgid "Unable to remove file %s: No valid credentials found" #~ msgstr "" #~ "Ðевозможно Ñтереть %s: Ðе обнаружено дейÑтвительных параметров доÑтупа" #~ msgid "year" #~ msgid_plural "years" #~ msgstr[0] "год" #~ msgstr[1] "года" #~ msgstr[2] "лет" #~ msgid "month" #~ msgid_plural "months" #~ msgstr[0] "меÑÑц" #~ msgstr[1] "меÑÑца" #~ msgstr[2] "меÑÑцев" #~ msgid "day" #~ msgid_plural "days" #~ msgstr[0] "день" #~ msgstr[1] "днÑ" #~ msgstr[2] "дней" #~ msgid "arc_to_voms - %u attributes" #~ msgstr "arc_to_voms - %u атрибут(а)" #~ msgid "arc_to_voms: attribute: %s" #~ msgstr "arc_to_voms: атрибут: %s" #~ msgid "%s: Failed switching user" #~ msgstr "%s: Сбой при Ñмене пользователÑ" #~ msgid "Job could have died due to expired proxy: restarting" #~ msgstr "" #~ "Возможно, иÑполнение задачи прервалоÑÑŒ из-за проÑроченной доверенноÑти: " #~ "перезапуÑк" #~ msgid "Failed to report renewed proxy to job" #~ msgstr "Ðе удалоÑÑŒ Ñообщить задаче о новой доверенноÑти" #~ msgid "" #~ "Proxy certificate path was not explicitly set or does not exist or has\n" #~ "improper permissions/ownership and not found at default location.\n" #~ "Key/certificate paths were not explicitly set or do not exist or have\n" #~ "improper permissions/ownership and usercert.pem/userkey.pem not found\n" #~ "at default locations:\n" #~ "~/.arc/, ~/.globus/, %s/etc/arc, and ./.\n" #~ "If the proxy or certificate/key does exist, please manually specify the " #~ "locations via env\n" #~ "X509_USER_CERT/X509_USER_KEY or X509_USER_PROXY, or the certificatepath/" #~ "keypath or proxypath\n" #~ "item in client.conf\n" #~ "If the certificate/key does exist, and proxy is needed to be generated, " #~ "please\n" #~ "use arcproxy utility to create a proxy certificate." #~ msgstr "" #~ "МеÑтонахождение доверенноÑти не задано Ñвно, либо не ÑущеÑтвует,\n" #~ "либо у Ð’Ð°Ñ Ð½ÐµÐ´Ð¾Ñтаточные привилегии, а в Ñтандартном меÑте её нет.\n" #~ "МеÑÑ‚Ð¾Ð½Ð°Ñ…Ð¾Ð¶Ð´ÐµÐ½Ð¸Ñ Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ð¾Ð³Ð¾/открытого ключей не заданы Ñвно, либо их нет,\n" #~ "либо у Ð’Ð°Ñ Ð½ÐµÐ´Ð¾Ñтаточные привилегии, а файлов usercert.pem/userkey.pem " #~ "нет\n" #~ "в Ñтандартных меÑтах:\n" #~ "~/.arc/, ~/.globus/, %s/etc/arc, и ./.\n" #~ "ЕÑли у Ð’Ð°Ñ ÐµÑть Ñти файлы, пожалуйÑта, укажите вручную их раÑÐ¿Ð¾Ð»Ð¾Ð¶ÐµÐ½Ð¸Ñ Ñ " #~ "помощью\n" #~ "переменных X509_USER_CERT/X509_USER_KEY и/или X509_USER_PROXY, либо задав " #~ "значениÑ\n" #~ "certificatepath/keypath or proxypath в файле наÑтроек клиента client." #~ "conf\n" #~ "ЕÑли у Ð²Ð°Ñ ÐµÑть ключи, но нет доверенноÑти, иÑпользуйте ÑредÑтво arcproxy " #~ "Ð´Ð»Ñ ÐµÑ‘ ÑозданиÑ." #~ msgid "LDAP authorization is not implemented yet" #~ msgstr "ÐÐ²Ñ‚Ð¾Ñ€Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð´Ð»Ñ LDAP ещё не реализована" #~ msgid "Match group: %s" #~ msgstr "Совпадение группы: %s" #~ msgid "Match capabilities: %s" #~ msgstr "Совпадение возможноÑти: %s" #~ msgid "Failed writing RSL" #~ msgstr "Сбой запиÑи RSL" #~ msgid "" #~ "Parsing error:\n" #~ "%s" #~ msgstr "" #~ "Ошибка разбора:\n" #~ "%s" #~ msgid "Parsing string using ADLParser" #~ msgstr "Разбор Ñтроки Ñ Ð¸Ñпользованием ADLParser" #~ msgid "[ADLParser] Parsing error: %s\n" #~ msgstr "[ADLParser] Ошибка разбора: %s\n" #~ msgid "[ADLParser] Wrong XML structure! " #~ msgstr "[ADLParser] ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ñтруктура XML! " #~ msgid "Parsing string using ARCJSDLParser" #~ msgstr "Обработка Ñтроки Ñ Ð¸Ñпользованием ARCJSDLParser" #~ msgid "[ARCJSDLParser] XML parsing error: %s\n" #~ msgstr "[ARCJSDLParser] Ошибка разбора XML: %s\n" #~ msgid "[ARCJSDLParser] Wrong XML structure! " #~ msgstr "[ARCJSDLParser] ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ñтруктура XML! " #~ msgid "Parsing string using JDLParser" #~ msgstr "Разбор Ñтроки Ñ Ð¸Ñпользованием JDLParser" #~ msgid "" #~ "[JDLParser] There is at least one necessary square bracket missing or " #~ "their order is incorrect. ('[' or ']')" #~ msgstr "" #~ "[JDLParser] По крайней мере одна ÐºÐ²Ð°Ð´Ñ€Ð°Ñ‚Ð½Ð°Ñ Ñкобка отÑутÑтвует, или их " #~ "порÑдок неверен ('[' или ']')." #~ msgid "Can't evaluate left operand for RSL concatenation: %s" #~ msgstr "" #~ "Ðевозможно определить значение левого операнда Ð´Ð»Ñ Ð¿Ð¾Ð´Ñ†ÐµÐ¿Ð»ÐµÐ½Ð¸Ñ RSL: %s" #~ msgid "Can't evaluate right operand for RSL concatenation: %s" #~ msgstr "" #~ "Ðевозможно определить значение правого операнда Ð´Ð»Ñ Ð¿Ð¾Ð´Ñ†ÐµÐ¿Ð»ÐµÐ½Ð¸Ñ RSL: %s" #~ msgid "Can't evaluate RSL list member: %s" #~ msgstr "Ðевозможно определить значение Ñлемента ÑпиÑка RSL: %s" #~ msgid "Can't evaluate RSL sequence member: %s" #~ msgstr "Ðевозможно определить значение члена поÑледовательноÑти RSL: %s" #~ msgid "Unknown RSL value type - should not happen" #~ msgstr "ÐеизвеÑтный тип Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ RSL - не должно ÑлучатьÑÑ" #~ msgid "RSL (inside multi) could not be evaluated: %s" #~ msgstr "RSL (внутри множеÑтвенного опиÑаниÑ) не может быть обработан: %s" #~ msgid "RSL could not be evaluated: %s" #~ msgstr "RSL не может быть обработан: %s" #~ msgid "Can't evaluate RSL fragment: %s" #~ msgstr "Ðевозможно обработать фрагмент RSL: %s" #~ msgid "Can't evaluate RSL substitution variable name: %s" #~ msgstr "Ðевозможно определить Ð¸Ð¼Ñ Ð¿ÐµÑ€ÐµÐ¼ÐµÐ½Ð½Ð¾Ð¹ Ð´Ð»Ñ Ð·Ð°Ð¼ÐµÐ½Ñ‹ RSL: %s" #~ msgid "Can't evaluate RSL substitution variable value: %s" #~ msgstr "Ðевозможно определить значение переменной Ð´Ð»Ñ Ð·Ð°Ð¼ÐµÐ½Ñ‹ RSL: %s" #~ msgid "Can't evaluate RSL condition value: %s" #~ msgstr "Ðевозможно определить значение уÑÐ»Ð¾Ð²Ð¸Ñ RSL: %s" #~ msgid "Unknown RSL type - should not happen" #~ msgstr "ÐеизвеÑтный тип RSL - Ñто не должно ÑлучатьÑÑ" #~ msgid "RSL parsing failed at position %ld" #~ msgstr "Сбой обработки RSL на позиции %ld" #~ msgid "Expected ) at position %ld" #~ msgstr "ОжидаетÑÑ ) на позиции %ld" #~ msgid "Expected ( at position %ld" #~ msgstr "ОжидаетÑÑ ( на позиции %ld" #~ msgid "Expected variable name at position %ld" #~ msgstr "ОжидаетÑÑ Ð½Ð°Ð·Ð²Ð°Ð½Ð¸Ðµ переменной на позиции %ld" #~ msgid "Broken string at position %ld" #~ msgstr "ÐŸÐ¾Ð²Ñ€ÐµÐ¶Ð´Ñ‘Ð½Ð½Ð°Ñ Ñтрока на позиции %ld" #~ msgid "RSL parsing error at position %ld" #~ msgstr "Ошибка обработки RSL на позиции %ld" #~ msgid "Expected attribute name at position %ld" #~ msgstr "ОжидаетÑÑ Ð½Ð°Ð·Ð²Ð°Ð½Ð¸Ðµ атрибута на позиции %ld" #~ msgid "Expected relation operator at position %ld" #~ msgstr "ОжидаетÑÑ Ð¾Ð¿ÐµÑ€Ð°Ñ‚Ð¾Ñ€ ÑÑ€Ð°Ð²Ð½ÐµÐ½Ð¸Ñ Ð½Ð° позиции %ld" #~ msgid "Xrsl attribute join is set but attribute stdout is not set" #~ msgstr "Задан атрибут xRSL join, но атрибут stdout пропущен" #~ msgid "Xrsl attribute join is set but attribute stderr is also set" #~ msgstr "Задан атрибут xRSL join, но также задан атрибут stderr" #~ msgid "Parsing string using XRSLParser" #~ msgstr "Обработка Ñтроки Ñ Ð¸Ñпользованием XRSLParser" #~ msgid "XRSL parsing error" #~ msgstr "Ошибка обработки XRSL" #~ msgid "filename cannot be empty." #~ msgstr "Ð˜Ð¼Ñ Ñ„Ð°Ð¹Ð»Ð° не может быть пуÑтым." #~ msgid "" #~ "Parsing the queue xrsl attribute failed. An invalid comparison operator " #~ "was used, only \"=\" is allowed." #~ msgstr "" #~ "Ошибка разбора атрибута XRSL queue. ИÑпользуетÑÑ Ð½ÐµÐ´Ð¾Ð¿ÑƒÑтимый оператор " #~ "ÑравнениÑ, допуÑкаетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ \"=\"." #~ msgid "%d Queues" #~ msgstr "%d Очереди" #~ msgid "Queue Information:" #~ msgstr "Ð¡Ð²ÐµÐ´ÐµÐ½Ð¸Ñ Ð¾Ð± очереди:" #~ msgid "" #~ "Localtransfer is deprecated, but turned on in arc.conf. Job will be " #~ "submitted with localtransfer=no." #~ msgstr "" #~ "ÐžÐ¿Ñ†Ð¸Ñ localtransfer более не поддерживаетÑÑ, но указана в arc.conf. " #~ "Задача будет запущена Ñ Ð¾Ð¿Ñ†Ð¸ÐµÐ¹ localtransfer=no." #~ msgid "Localtransfer deprecated. Localtransfer has been turned off." #~ msgstr "ÐžÐ¿Ñ†Ð¸Ñ localtransfer более не поддерживаетÑÑ Ð¸ отключена." #~ msgid "Permission checking failed" #~ msgstr "Проверка прав доÑтупа не удалаÑÑŒ" #~ msgid "Cache file valid until: %s" #~ msgstr "Файл в кÑше дейÑтвителен до: %s" #~ msgid "Changing old validity time format to new in %s" #~ msgstr "ЗаменÑетÑÑ Ñтарый формат Ñрока годноÑти на новый в %s" #~ msgid "%s: adding to transfer share %s" #~ msgstr "%s: добавлÑетÑÑ Ðº транÑферной доле %s" #~ msgid "%s: state: %s: starting new child" #~ msgstr "%s: СоÑтоÑние: %s: запуÑкаетÑÑ Ð½Ð¾Ð²Ñ‹Ð¹ дочерний процеÑÑ" #~ msgid "%s: State %s: starting child: %s" #~ msgstr "%s: СоÑтоÑние %s: запуÑкаетÑÑ Ð´Ð¾Ñ‡ÐµÑ€Ð½Ð¸Ð¹ процеÑÑ: %s" #~ msgid "%s: Failed to run uploader process" #~ msgstr "%s: Ðе удалоÑÑŒ запуÑтить процеÑÑ Ð¾Ñ‚Ð³Ñ€ÑƒÐ·Ñ‡Ð¸ÐºÐ°" #~ msgid "%s: Failed to run downloader process" #~ msgstr "%s: Ðе удалоÑÑŒ запуÑтить процеÑÑ Ð·Ð°Ð³Ñ€ÑƒÐ·Ñ‡Ð¸ÐºÐ°" #~ msgid "%s: State: PREPARING/FINISHING: child is running" #~ msgstr "%s: ÑоÑтоÑние PREPARING/FINISHING: дочерний процеÑÑ Ð¸ÑполнÑетÑÑ" #~ msgid "%s: State: PREPARING: child exited with code: %i" #~ msgstr "" #~ "%s: ÑоÑтоÑние PREPARING: дочерний процеÑÑ Ð·Ð°Ð²ÐµÑ€ÑˆÐ¸Ð»ÑÑ Ñ ÐºÐ¾Ð´Ð¾Ð¼ выхода: %i" #~ msgid "%s: State: FINISHING: child exited with code: %i" #~ msgstr "" #~ "%s: ÑоÑтоÑние: FINISHING: дочерний процеÑÑ Ð·Ð°Ð²ÐµÑ€ÑˆÐ¸Ð»ÑÑ Ñ ÐºÐ¾Ð´Ð¾Ð¼ выхода: %i" #~ msgid "%s: State: FINISHING: unrecoverable error detected (exit code 1)" #~ msgstr "" #~ "%s: ÑоÑтоÑние FINISHING: обнаружена неиÑÐ¿Ñ€Ð°Ð²Ð¸Ð¼Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° (код выхода 1)" #~ msgid "%s: State: PREPARING: unrecoverable error detected (exit code 1)" #~ msgstr "" #~ "%s: ÑоÑтоÑние PREPARING: обнаружена неиÑÐ¿Ñ€Ð°Ð²Ð¸Ð¼Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° (код выхода 1)" #~ msgid "%s: State: PREPARING/FINISHING: retryable error" #~ msgstr "%s: ÑоÑтоÑние PREPARING/FINISHING: иÑÐ¿Ñ€Ð°Ð²Ð¸Ð¼Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ°" #~ msgid "%s: State: %s: credentials probably expired (exit code %i)" #~ msgstr "" #~ "%s: ÑоÑтоÑние: %s: вероÑтно, иÑтёк Ñрок дейÑÑ‚Ð²Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупа (код " #~ "выхода %i)" #~ msgid "%s: State: %s: trying to renew credentials" #~ msgstr "%s: СоÑтоÑние: %s: попытка обновить параметры доÑтупа" #~ msgid "%s: State: %s: failed to renew credentials" #~ msgstr "%s: СоÑтоÑние: %s: невозможно обновить параметры доÑтупа" #~ msgid "%s: State: %s: failed to create temporary proxy for renew: %s" #~ msgstr "" #~ "%s: СоÑтоÑние: %s: не удалоÑÑŒ Ñоздать временную доверенноÑть Ð´Ð»Ñ " #~ "обновлениÑ: %s" #~ msgid "" #~ "%s: State: %s: some error detected (exit code %i). Recover from such type " #~ "of errors is not supported yet." #~ msgstr "" #~ "%s: СоÑтоÑние: %s:обнаружена ошибка (код выхода %i). ВоÑÑтановление поÑле " #~ "такой ошибки пока что не поддерживаетÑÑ." #~ msgid "%s: Data staging failed. No retries left." #~ msgstr "%s: Сбой Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ…. Ð’Ñе попытки вышли." #~ msgid "" #~ "%s: Download failed. %d retries left. Will wait for %ds before retrying" #~ msgstr "%s: Сбой загрузки. ОÑталоÑÑŒ %d попыток. ÐŸÐ¾Ð²Ñ‚Ð¾Ñ€Ð½Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ° через %dÑ" #~ msgid "%s: Upload failed. No retries left." #~ msgstr "%s: Сбой отгрузки. Ð’Ñе попытки вышли." #~ msgid "" #~ "%s: Upload failed. %d retries left. Will wait for %ds before retrying." #~ msgstr "%s: Сбой отгрузки. ОÑталоÑÑŒ %d попыток. ÐŸÐ¾Ð²Ñ‚Ð¾Ñ€Ð½Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ° через %dÑ" #~ msgid "Wrong number in speedcontrol: %s" #~ msgstr "ÐедопуÑтимое чиÑло в speedcontrol: %s" #~ msgid "Wrong number in maxtransfertries" #~ msgstr "ÐедопуÑтимое чиÑло в maxtransfertries" #~ msgid "Empty root directory for GACL plugin" #~ msgstr "Корневой каталог раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ GACL пуÑÑ‚" #~ msgid "Failed to parse default GACL document" #~ msgstr "Ðе удалоÑÑŒ разобрать документ GACL по умолчанию" #~ msgid "Mount point %s creation failed." #~ msgstr "Ðе удалоÑÑŒ Ñоздать точку Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡ÐµÐ½Ð¸Ñ %s." #~ msgid "Creation of top level ACL %s failed." #~ msgstr "Ðе удалоÑÑŒ Ñоздать правила доÑтупа выÑшего ÑƒÑ€Ð¾Ð²Ð½Ñ %s." #~ msgid "plugin(gacl): open: %s" #~ msgstr "Подключаемый модуль(gacl): открытие: %s" #~ msgid "Failed to parse GACL" #~ msgstr "Ðевозможно обработать GACL" #~ msgid "GACL without is not allowed" #~ msgstr "ИнÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ GACL без недопуÑтима" #~ msgid "Failed to save GACL" #~ msgstr "Ðевозможно Ñохранить GACL" #~ msgid "GACL file %s is not an ordinary file" #~ msgstr "Файл GACL %s не ÑвлÑетÑÑ Ð¾Ð±Ñ‹Ñ‡Ð½Ñ‹Ð¼ файлом" #~ msgid "GACL description for file %s could not be loaded" #~ msgstr "Правила GACL Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° %s не могут быть загружены" #~ msgid "Request failed: No response" #~ msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ðµ удалÑÑ: нет ответа" #~ msgid "Request failed: Error" #~ msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ðµ удалÑÑ: ошибка." #~ msgid "Request succeeded!!!" #~ msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ ÑƒÐ´Ð°Ð»ÑÑ!!!" #~ msgid "SP Service name is %s" #~ msgstr "Ð˜Ð¼Ñ Ñлужбы провайдера уÑлуг: %s" #~ msgid "SAML Metadata is from %s" #~ msgstr "Метаданные SAML из %s" #~ msgid "saml2SP: Unauthorized" #~ msgstr "SAML2SP: ДоÑтуп закрыт" #~ msgid "no input payload" #~ msgstr "пуÑÑ‚Ð°Ñ Ð½Ð°Ð³Ñ€ÑƒÐ·ÐºÐ° на входе" #~ msgid "Using private key file to sign: %s" #~ msgstr "ИÑпользуетÑÑ Ñ„Ð°Ð¹Ð» личного ключа Ð´Ð»Ñ Ð¿Ð¾Ð´Ð¿Ð¸Ñи: %s" #~ msgid "After signature: %s" #~ msgstr "ПоÑле подпиÑи: %s" #~ msgid "Encrypted SAML assertion: %s" #~ msgstr "Зашифрованное утверждение SAML: %s" #~ msgid "Can not decrypt the EncryptedAssertion from SAML response" #~ msgstr "Ðе удалоÑÑŒ раÑшифровать EncryptedAssertion из отзыва SAML" #~ msgid "Decrypted SAML Assertion: %s" #~ msgstr "РаÑшифрованное утверждение SAML: %s" #~ msgid "Encrypted name ID: %s" #~ msgstr "Зашифрованный идентификатор имени: %s" #~ msgid "Can not decrypt the EncryptedID from SAML assertion" #~ msgstr "Ðе удалоÑÑŒ раÑшифровать EncryptedID из ÑƒÑ‚Ð²ÐµÑ€Ð¶Ð´ÐµÐ½Ð¸Ñ SAML" #~ msgid "Decrypted SAML name ID: %s" #~ msgstr "РаÑшифрованный идентификатор имени SAML: %s" #~ msgid "saml:Conditions, current time: %s is before the start time: %s" #~ msgstr "saml:Conditions, текущее времÑ: %s раньше времени начала: %s" #~ msgid "saml:Conditions, current time: %s is after the end time: %s" #~ msgstr "saml:Conditions, текущее времÑ: %s позже времени окончаниÑ: %s" #~ msgid "saml:Subject, current time is before the start time" #~ msgstr "SAML:Subject, текущее Ð²Ñ€ÐµÐ¼Ñ Ñ€Ð°Ð½ÑŒÑˆÐµ времени начала" #~ msgid "saml:Subject, current time is after the end time" #~ msgstr "SAML:Subject, текущее Ð²Ñ€ÐµÐ¼Ñ Ð¿Ð¾Ð·Ð¶Ðµ времени окончаниÑ" #~ msgid "Can not get saml:Assertion or saml:EncryptedAssertion from IdP" #~ msgstr "" #~ "Ðевозможно получить SAML:Assertion или SAML:EncryptedAssertion от IdP" #~ msgid "Succeeded to verify the signature under " #~ msgstr "ПодпиÑÑŒ уÑпешно подтверждена" #~ msgid "Failed to verify the signature under " #~ msgstr "ПодпиÑÑŒ не подтверждена" #~ msgid "" #~ "The NameID inside request is the same as the NameID from the tls " #~ "authentication: %s" #~ msgstr "" #~ "Параметр NameID в запроÑе идентичен NameID при проверке подлинноÑти TLS: " #~ "%s" #~ msgid "" #~ "Access database %s from server %s port %s, with user %s and password %s" #~ msgstr "" #~ "ДоÑтуп к базе данных %s на Ñервере %s по порту %s, как пользователь %s Ñ " #~ "паролем %s" #~ msgid "Can't establish connection to mysql database" #~ msgstr "Ðе удалоÑÑŒ уÑтановить ÑвÑзь Ñ Ð±Ð°Ð·Ð¾Ð¹ данных mysql" #~ msgid "Is connected to database? %s" #~ msgstr "ЕÑть ли ÑвÑзь Ñ Ð±Ð°Ð·Ð¾Ð¹ данных? %s" #~ msgid "Query: %s" #~ msgstr "ЗапроÑ: %s" #~ msgid "Get result array with %d rows" #~ msgstr "Получен маÑÑив результатов из %d Ñтрок" #~ msgid "Can not find StatusCode" #~ msgstr "Ðе обнаружен StatusCode" #~ msgid "" #~ "SAML Assertion parsed from SP Service:\n" #~ "%s" #~ msgstr "" #~ "Утверждение SAML выделенное из ÑервиÑа SP:\n" #~ "%s" #~ msgid "Can not get SAMLAssertion SecAttr from outgoing message AuthContext" #~ msgstr "" #~ "Ðевозможно получить SAMLAssertion SecAttr из иÑходÑщего ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ " #~ "AuthContext" #~ msgid "MessageAuthContext can not be parsed from outgoing message" #~ msgstr "Ðевозможно выделить MessageAuthContext из иÑходÑщего ÑообщениÑ" #~ msgid "Process: POST" #~ msgstr "ПроцеÑÑ: POST" #~ msgid "SOAP body does not include any request node" #~ msgstr "Тело SOAP не Ñодержит запроÑов" #~ msgid "Request: %s" #~ msgstr "ЗапроÑ: %s" #~ msgid "There is no X509Request node in the request message" #~ msgstr "Ð’ запроÑе отÑутÑтвует Ñлемент X509Request" #~ msgid "Composed DN: %s" #~ msgstr "Создан DN: %s" #~ msgid "get|put [object ...]" #~ msgstr "get|put [объект ...]" #~ msgid "" #~ "The arcacl command retrieves/sets permissions (ACL) of data or computing " #~ "objects." #~ msgstr "" #~ "Команда arcacl иÑпользуетÑÑ Ð´Ð»Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ¸ и приÑÐ²Ð¾ÐµÐ½Ð¸Ñ Ð¿Ñ€Ð°Ð² доÑтупа (ACL) " #~ "данным или вычиÑлительному заданию." #~ msgid "Unsupported command %s." #~ msgstr "ÐÐµÐ¿Ð¾Ð´Ð´ÐµÑ€Ð¶Ð¸Ð²Ð°ÐµÐ¼Ð°Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ð° %s" #~ msgid "Cannot read specified jobID file: %s" #~ msgstr "Ðе удаётÑÑ Ð¿Ñ€Ð¾Ñ‡ÐµÑть указанный файл, Ñодержащий Ñрлыки задач: %s" #~ msgid "No objects given" #~ msgstr "Объекты не указаны" #~ msgid "Processing data object %s" #~ msgstr "ОбрабатываетÑÑ Ð¾Ð±ÑŠÐµÐºÑ‚ данных %s" #~ msgid "Data object %s is not valid URL." #~ msgstr "Файловый объект %s не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым URL." #~ msgid "" #~ "Data object %s is not supported. Only GACL-enabled GridFTP servers are " #~ "supported yet." #~ msgstr "" #~ "Тип %s не поддерживаетÑÑ. Пока что поддерживаютÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ GridFTP Ñерверы " #~ "Ñ GACL." #~ msgid "URL %s is not supported." #~ msgstr "URL %s не поддерживаетÑÑ." #~ msgid "Object for stdout handling failed." #~ msgstr "Сбой обработки объекта stdout." #~ msgid "Object for stdin handling failed." #~ msgstr "Сбой обработки объекта stdin." #~ msgid "ACL transfer FAILED: %s" #~ msgstr "Сбой переÑылки ACL: %s" #~ msgid "" #~ "The arcmigrate command is used for migrating queued jobs to another " #~ "resource.\n" #~ "Note that migration is only supported between A-REX powered resources." #~ msgstr "" #~ "Команда arcmigrate иÑпользуетÑÑ Ð´Ð»Ñ Ð¼Ð¸Ð³Ñ€Ð°Ñ†Ð¸Ð¸ ожидающих задач на другой " #~ "реÑурÑ.\n" #~ "ÐœÐ¸Ð³Ñ€Ð°Ñ†Ð¸Ñ Ð¿Ð¾Ð´Ð´ÐµÑ€Ð¶Ð¸Ð²Ð°ÐµÑ‚ÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ между Ñлужбами A-REX." #~ msgid "Cannot write job IDs of submitted jobs to file (%s)" #~ msgstr "Ðевозможно запиÑать Ñрлыки запущенных задач в файл (%s) " #~ msgid "" #~ "Migration of job (%s) succeeded, but killing the job failed - it will " #~ "still appear in the job list" #~ msgstr "" #~ "УÑпешно завершена Ð¼Ð¸Ð³Ñ€Ð°Ñ†Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ (%s), но прервать задачу не удалоÑÑŒ - " #~ "она будет приÑутÑтвовать в ÑпиÑке задач" #~ msgid "" #~ "Migration of job (%s) succeeded, but cleaning the job failed - it will " #~ "still appear in the job list" #~ msgstr "" #~ "УÑпешно завершена Ð¼Ð¸Ð³Ñ€Ð°Ñ†Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ (%s), но очиÑтить задачу не удалоÑÑŒ - " #~ "она будет приÑутÑтвовать в ÑпиÑке задач" #~ msgid "Job migration summary:" #~ msgstr "Сводка перезаÑылки задач:" #~ msgid "%d of %d jobs were migrated" #~ msgstr "%d из %d задач были перезаÑланы" #~ msgid "The following %d were not migrated" #~ msgstr "Следующие %d не были перезаÑланы" #~ msgid "OpenSSL Error -- %s" #~ msgstr "Ошибка OpenSSL -- %s" #~ msgid "Creating and sending soap request" #~ msgstr "Создание и отправка запроÑа SOAP" #~ msgid "URL of SLCS service" #~ msgstr "URL Ñлужбы SLCS" #~ msgid "Identity provider name" #~ msgstr "Ð˜Ð¼Ñ Ð¿Ñ€Ð¾Ð²Ð°Ð¹Ð´ÐµÑ€Ð° идентификационной информации" #~ msgid "User account to identity provider" #~ msgstr "" #~ "Ð£Ñ‡Ñ‘Ñ‚Ð½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ñƒ провайдера идентификационной информации" #~ msgid "Password for user account to identity provider" #~ msgstr "Пароль учётной запиÑи у провайдера идентификационной информации" #~ msgid "Key size of the private key (512, 1024, 2048)" #~ msgstr "Длина Ñекретного ключа (512, 1024, 2048)" #~ msgid "Private key passphrase" #~ msgstr "Пароль Ñекретного ключа:" #~ msgid "passphrase" #~ msgstr "пароль" #~ msgid "Lifetime of the certificate, start with current time, hour as unit" #~ msgstr "Период дейÑÑ‚Ð²Ð¸Ñ Ñертификата, Ð½Ð°Ñ‡Ð¸Ð½Ð°Ñ Ñ Ñ‚ÐµÐºÑƒÑ‰ÐµÐ³Ð¾ момента, в чаÑах" #~ msgid "period" #~ msgstr "период" #~ msgid "Store directory for key and signed certificate" #~ msgstr "МеÑто Ð´Ð»Ñ Ñ…Ñ€Ð°Ð½ÐµÐ½Ð¸Ñ Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ð¾Ð³Ð¾ ключа и подпиÑанного Ñертификата" #~ msgid "" #~ "The VOMS server with the information:\n" #~ "\t%s\"\n" #~ "can not be reached, please make sure it is available." #~ msgstr "" #~ "Ðевозможно ÑвÑзатьÑÑ Ñ Ñервером VOMS Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸ÐµÐ¹:\n" #~ "\t%s\"\n" #~ "ПожалуйÑта, проверьте, доÑтупен ли Ñтот Ñервер." #~ msgid "Error: can't read policy file: %s" #~ msgstr "Ошибка: невозможно прочеÑть файл политик: %s" #~ msgid "" #~ "One of the elements 'Exact', 'UpperBoundedRange', 'LowerBoundedRange', " #~ "'Range', 'Min' or 'Max' was expected." #~ msgstr "" #~ "ОжидалÑÑ Ð¾Ð´Ð¸Ð½ из Ñлементов 'Exact', 'UpperBoundedRange', " #~ "'LowerBoundedRange', 'Range', 'Min' или 'Max'." #~ msgid "" #~ "Combinations of 'Exact', 'Range', 'UpperBoundedRange'/'LowerBoundedRange' " #~ "and 'Max'/'Min' are not supported." #~ msgstr "" #~ "Комбинации 'Exact', 'Range', 'UpperBoundedRange'/'LowerBoundedRange' и " #~ "'Max'/'Min' не поддерживаютÑÑ." #~ msgid "Called SAML2SSOHTTPClient constructor" #~ msgstr "Вызван конÑтруктор SAML2SSOHTTPClient" #~ msgid "Relaystate %s" #~ msgstr "Значение RelayState: %s" #~ msgid "Performing SSO with %s " #~ msgstr "ВыполнÑетÑÑ SSO Ñ %s " #~ msgid "The IdP login is %s" #~ msgstr "Ð˜Ð¼Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ IdP: %s" #~ msgid "Retrieving the remote SimpleSAMLphp installation failed!" #~ msgstr "Сбой доÑтупа к удалённой Ñлужбе SimpleSAMLphp!" #~ msgid "Getting from Confusa to the IdP page failed!" #~ msgstr "Сбой перехода Ñ Confusa на Ñтраницу IdP!" #~ msgid "Successfully redirected from Confusa to the IdP login!" #~ msgstr "УÑпешное перенаправление Ñ Confusa на вход в IdP!" #~ msgid "Getting the user consent for SSO failed!" #~ msgstr "Сбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ ÑоглаÑÐ¸Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð½Ð° SSO!" #~ msgid "Successfully logged in to the IdP!" #~ msgstr "УÑпешный вход в IdP!" #~ msgid "Directing back from the IdP to Confusa failed!" #~ msgstr "Сбой обратного Ð¿ÐµÑ€ÐµÐ½Ð°Ð¿Ñ€Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñ IdP на Confusa!" #~ msgid "Successfully redirected back from the IdP to Confusa!" #~ msgstr "УÑпешное обратное перенаправление Ñ IdP на Confusa!" #~ msgid "The used session cookies for the about page is %s" #~ msgstr "ИÑпользуемые куки Ð´Ð»Ñ Ñтраницы Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸ÐµÐ¹: %s" #~ msgid "The retrieved DN is %s" #~ msgstr "Полученное выделенное Ð¸Ð¼Ñ (DN): %s" #~ msgid "The location to which the GET is performed is %s" #~ msgstr "Ðазначение операции GET: %s" #~ msgid "Approving CSR on Confusa's approve page %s" #~ msgstr "Одобрение запроÑа CSR на Ñтранице Ð¾Ð´Ð¾Ð±Ñ€ÐµÐ½Ð¸Ñ Confusa %s" #~ msgid "The cookie sent with approve is %s" #~ msgstr "Куки-файл, поÑланный Ñ Ð¾Ð´Ð¾Ð±Ñ€ÐµÐ½Ð¸ÐµÐ¼: %s" #~ msgid "The server location is %s " #~ msgstr "Сервер раÑположен на %s " #~ msgid "The request URL is %s" #~ msgstr "URL запроÑа: %s" #~ msgid "Sending OAuth request to signed URL %s" #~ msgstr "Отправка запроÑа OAuth на подпиÑанный URL %s" #~ msgid "Please login at the following URL " #~ msgstr "ПожалуйÑта, войдите в ÑиÑтему по данному URL " #~ msgid "Press enter to continue\n" #~ msgstr "Ðажмите enter, чтобы продолжить\n" #~ msgid "The about-you request URL is %s" #~ msgstr "URL запроÑа о данных пользователÑ: %s" #~ msgid "Approving the certificate signing request at %s" #~ msgstr "ОдобрÑетÑÑ Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¿Ð¾Ð´Ð¿Ð¸Ñи Ñертификата на %s" #~ msgid "The OAuth request URL is %s" #~ msgstr "URL запроÑа OAuth: %s" #~ msgid "The request is NULL!" #~ msgstr "ОтÑутÑтвует Ñубъект запроÑа!" #~ msgid "No characters were read from the BIO in public key extraction" #~ msgstr "" #~ "Ðи одного Ñимвола не было Ñчитано Ñ BIO при извлечении открытого ключа" #~ msgid "Could not find any digest for the given name" #~ msgstr "Ðевозможно найти Ñводку Ð´Ð»Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ð¾Ð³Ð¾ имени" #~ msgid "SHA1Sum appears to be empty!" #~ msgstr "Похоже, отÑутÑтвует SHA1Sum!" #~ msgid "Could not create a certificate request for subject %s" #~ msgstr "Ðевозможно Ñоздать Ð·Ð°Ð¿Ñ€Ð¾Ñ Ñертификата Ð´Ð»Ñ Ñубъекта %s" #~ msgid "Trying to get content %s from XML element, size %d" #~ msgstr "Попытка извлечь Ñодержимое %s из Ñлемента XML, размер %d" #~ msgid "Failed to parse XML file!" #~ msgstr "Сбой при разборе файла формата XML!" #~ msgid "extract_body_information(): Body elements not found in passed string" #~ msgstr "" #~ "extract_body_information(): Элемент Body не обнаружен в переданной Ñтроке" #~ msgid "post_2_ssoservice_redirect URL is %s" #~ msgstr "URL post_2_ssoservice_redirect: %s" #~ msgid "The consent_page is %s" #~ msgstr "consent_page: %s" #~ msgid "SAML2SSOHTTPClient::processConsent()" #~ msgstr "SAML2SSOHTTPClient::processConsent()" #~ msgid "Trying to open confirm site %s" #~ msgstr "Попытка открыть подтверждённый Ñайт %s" #~ msgid "Found action is %s" #~ msgstr "Обнаруженное дейÑтвие: %s" #~ msgid "Post-IdP-authentication action is %s" #~ msgstr "ДейÑтвие проверки подлинноÑти поÑле IdP: %s" #~ msgid "Used session cookies for the assertion consumer are %s" #~ msgstr "ИÑпользованные маркёры Ð´Ð»Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ¸ утверждений: %s" #~ msgid "Got over the actual IP login 2 to %s, cookie %s " #~ msgstr "Подключение ÑобÑтвенно через вход IP к %s, куки-файл %s " #~ msgid "Posting username/password with the following session cookie %s to %s" #~ msgstr "Передача имени/Ð¿Ð°Ñ€Ð¾Ð»Ñ Ñ‡ÐµÑ€ÐµÐ· Ñледующий куки-файл ÑеÑÑии: %s на %s" #~ msgid "The idp_login_post_info cookie is %s, while the sent cookie was %s" #~ msgstr "" #~ "Куки-файл idp_login_post_info cookie ÑвлÑетÑÑ %s, тогда как отправленный " #~ "куки-файл был %s" #~ msgid "Getting SAML response" #~ msgstr "Ожидание отклика SAML" #~ msgid "Calling post-IdP site %s with relay state %s" #~ msgstr "ВызываетÑÑ Ñервер post-IdP %s Ñо ÑтатуÑом передачи %s" #~ msgid "Cookies %s" #~ msgstr "Куки %s" #~ msgid "Called HakaClient::processConsent()" #~ msgstr "Вызван HakaClient::processConsent()" #~ msgid "Checking if consent is necessary" #~ msgstr "ПроверÑем, необходимо ли ÑоглаÑие пользователÑ" #~ msgid "User consent to attribute transfer is necessary" #~ msgstr "Ðеобходимо ÑоглаÑие Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð½Ð° передачу атрибутов" #~ msgid "" #~ "Your identity provider will send the following information to the SLCS " #~ "service:" #~ msgstr "" #~ "Ð¡Ð»ÐµÐ´ÑƒÑŽÑ‰Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð±ÑƒÐ´ÐµÑ‚ поÑлана Вашим провайдером идентификации на " #~ "Ñервер SLCS:" #~ msgid "==============================================================================" #~ msgstr "==============================================================================" #~ msgid "Do you consent to the release of that information? (y/n) " #~ msgstr "СоглаÑны ли Ð’Ñ‹ на передачу Ñтой информации? (y/n)" #~ msgid "Consent confirm redirection URL is %s, cookies %s" #~ msgstr "URL Ð¿ÐµÑ€ÐµÐ½Ð°Ð¿Ñ€Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ð¿Ñ€Ð¸ подтверждённом ÑоглаÑии - %s, куки-файлы %s" #~ msgid "LFC resolve timed out" #~ msgstr "ИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ñ€Ð°Ð·Ð±Ð¾Ñ€Ð° LFC" #~ msgid "Error finding replicas: %s" #~ msgstr "Ошибка Ð¾Ð±Ð½Ð°Ñ€ÑƒÐ¶ÐµÐ½Ð¸Ñ ÐºÐ¾Ð¿Ð¸Ð¹: %s" #~ msgid "LFC resolve returned no entries" #~ msgstr "Разбор LFC не выдал запиÑей" #~ msgid "File does not exist in LFC" #~ msgstr "Этот файл не занеÑён в LFC" #~ msgid "Skipping invalid location: %s - %s" #~ msgstr "ПропуÑкаетÑÑ Ð½ÐµÐ²ÐµÑ€Ð½Ñ‹Ð¹ адреÑ: %s - %s" #~ msgid "Replica %s already exists for LFN %s" #~ msgstr "Реплика %s уже ÑущеÑтвует Ð´Ð»Ñ LFN %s" #~ msgid "Duplicate replica location: %s" #~ msgstr "Идентичное меÑтонахождение реплики: %s" #~ msgid "Resolve: checksum: %s" #~ msgstr "Разбор: ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма: %s" #~ msgid "Resolve: size: %llu" #~ msgstr "Разбор: размер: %llu" #~ msgid "Resolve: modified: %s" #~ msgstr "Разбор: Ð²Ñ€ÐµÐ¼Ñ Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ: %s" #~ msgid "LFN is missing in LFC (needed for replication)" #~ msgstr "Ð’ LFC отÑутÑтвует LFN (необходимо Ð´Ð»Ñ Ñ‚Ð¸Ñ€Ð°Ð¶Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ)" #~ msgid "LFN already exists in LFC" #~ msgstr "LFN уже зарегиÑтрирован в LFC" #~ msgid "Error starting session: %s" #~ msgstr "Ошибка запуÑка ÑеÑÑии: %s" #~ msgid "Using supplied guid %s" #~ msgstr "ИÑпользуетÑÑ Ð¿Ñ€ÐµÐ´Ð¾Ñтавленный guid %s" #~ msgid "Error creating LFC entry: %s" #~ msgstr "Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð·Ð°Ð¿Ð¸Ñи каталога LFC: %s" #~ msgid "Error finding info on LFC entry %s which should exist: %s" #~ msgstr "" #~ "Ошибка Ð¾Ð±Ð½Ð°Ñ€ÑƒÐ¶ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ о запиÑи LFC %s, ÐºÐ¾Ñ‚Ð¾Ñ€Ð°Ñ Ð´Ð¾Ð»Ð¶Ð½Ð° " #~ "ÑущеÑтвовать: %s" #~ msgid "Error creating LFC entry %s, guid %s: %s" #~ msgstr "Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð·Ð°Ð¿Ð¸Ñи каталога LFC %s, guid %s: %s" #~ msgid "Error entering metadata: %s" #~ msgstr "Ошибка при вводе метаданных: %s" #~ msgid "Warning: only md5 and adler32 checksums are supported by LFC" #~ msgstr "" #~ "Предупреждение: LFC поддерживает только проверочные Ñуммы типа md5 и " #~ "adler32" #~ msgid "No GUID defined for LFN - probably not preregistered" #~ msgstr "" #~ "Ð”Ð»Ñ LFN не задан GUID - возможно, не пройдена Ð¿Ñ€ÐµÐ´Ð²Ð°Ñ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ñ€ÐµÐ³Ð¸ÑтрациÑ" #~ msgid "Error adding replica: %s" #~ msgstr "Ошибка Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñ€ÐµÐ¿Ð»Ð¸ÐºÐ¸: %s" #~ msgid "Entering checksum type %s, value %s, file size %llu" #~ msgstr "" #~ "ЗаноÑитÑÑ Ð¿Ñ€Ð¾Ð²ÐµÑ€Ð¾Ñ‡Ð½Ð°Ñ Ñумма типа %s, Ñо значением %s, размер файла %llu" #~ msgid "Failed to remove LFN in LFC - You may need to do it by hand" #~ msgstr "" #~ "Ðе удалоÑÑŒ Ñтереть LFN в LFC - возможно, Вам придётÑÑ Ð´ÐµÐ»Ð°Ñ‚ÑŒ Ñто вручную" #~ msgid "Location is missing" #~ msgstr "ОтÑутÑтвует раÑположение" #~ msgid "Error getting replicas: %s" #~ msgstr "Ошибка Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ñ€ÐµÐ¿Ð»Ð¸Ðº: %s" #~ msgid "Failed to remove location from LFC: %s" #~ msgstr "Ошибка при удалении меÑÑ‚Ð¾Ð½Ð°Ñ…Ð¾Ð¶Ð´ÐµÐ½Ð¸Ñ Ð¸Ð· LFC: %s" #~ msgid "Failed to remove LFC directory: directory is not empty" #~ msgstr "Ðе удалоÑÑŒ Ñтереть директорию LFC: Ð´Ð¸Ñ€ÐµÐºÑ‚Ð¾Ñ€Ð¸Ñ Ð½Ðµ пуÑта" #~ msgid "Failed to remove LFC directory: %s" #~ msgstr "Ошибка при удалении каталога LFC: %s" #~ msgid "Failed to remove LFN in LFC: %s" #~ msgstr "Сбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ LFN из LFC: %s" #~ msgid "Error listing file or directory: %s" #~ msgstr "Ошибка вывода файла или каталога: %s" #~ msgid "Not a directory" #~ msgstr "Ðе ÑвлÑетÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð¾Ð¼" #~ msgid "Error opening directory: %s" #~ msgstr "Ошибка при открытии каталога: %s" #~ msgid "Error listing directory: %s" #~ msgstr "Ошибка вывода каталога: %s" #~ msgid "Error listing replicas: %s" #~ msgstr "Ошибка перечиÑÐ»ÐµÐ½Ð¸Ñ Ñ€ÐµÐ¿Ð»Ð¸Ðº: %s" #~ msgid "Creating LFC directory %s" #~ msgstr "Создание каталога LFC %s" #~ msgid "Error creating required LFC dirs: %s" #~ msgstr "Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ‚Ñ€ÐµÐ±ÑƒÐµÐ¼Ñ‹Ñ… директорий LFC: %s" #~ msgid "Cannot rename to root directory" #~ msgstr "Ðевозможно переименовать в корневой каталог" #~ msgid "Error renaming %s to %s: %s" #~ msgstr "Ошибка Ð¿ÐµÑ€ÐµÐ¸Ð¼ÐµÐ½Ð¾Ð²Ñ‹Ð²Ð°Ð½Ð¸Ñ %s в %s: %s" #~ msgid "Error finding LFN from GUID %s: %s" #~ msgstr "Ошибка Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ LFN Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ GUID %s: %s" #~ msgid "GUID %s resolved to LFN %s" #~ msgstr "GUID %s принадлежит LFN %s" #~ msgid "Mismatching protocol/host in bulk resolve!" #~ msgstr "ÐеÑовпадающий протокол/Ñервер в маÑÑовом разборе!" #~ msgid "Cannot use a mixture of GUIDs and LFNs in bulk resolve" #~ msgstr "Ð’ маÑÑовом разборе Ð½ÐµÐ»ÑŒÐ·Ñ Ð¸Ñпользовать ÑмеÑÑŒ GUID-ов и LFN-ов" #~ msgid "Bulk resolve returned no entries" #~ msgstr "МаÑÑовый разбор не обнаружил запиÑей" #~ msgid "GUID %s, SFN %s" #~ msgstr "GUID %s, SFN %s" #~ msgid "LFC returned more results than we asked for!" #~ msgstr "LFC выдаёт больше результатов, чем надо!" #~ msgid "Invalid dataset name: %s" #~ msgstr "Ðеверное название набора данных: %s" #~ msgid "Invalid DQ2 URL %s" #~ msgstr "ÐедопуÑтимый URL DQ2: %s" #~ msgid "Could not obtain information from AGIS" #~ msgstr "Ðе удалоÑÑŒ получить информацию из AGIS" #~ msgid "No suitable endpoints found in AGIS" #~ msgstr "Ðе обнаружено подходÑщих точек входа в AGIS" #~ msgid "Proxy certificate does not have ATLAS VO extension" #~ msgstr "У Ñертификата доверенноÑти нет раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ð’Ðž ATLAS" #~ msgid "Locations of dataset %s are cached" #~ msgstr "МеÑÑ‚Ð¾Ð¿Ð¾Ð»Ð¾Ð¶ÐµÐ½Ð¸Ñ Ð½Ð°Ð±Ð¾Ñ€Ð° данных %s в кÑше" #~ msgid "No such dataset: %s" #~ msgstr "Ðет такого набора данных: %s" #~ msgid "Malformed DQ2 response: %s" #~ msgstr "ИÑкажённый отзыв DQ2: %s" #~ msgid "Dataset %s: DUID %s" #~ msgstr "Ðабор данных %s: DUID %s" #~ msgid "Location: %s" #~ msgstr "РаÑположение: %s" #~ msgid "DQ2 returned %s" #~ msgstr "DQ2 ответил %s" #~ msgid "Duplicate location of file %s" #~ msgstr "ДублирующееÑÑ Ñ€Ð°Ñположение файла %s" #~ msgid "Site %s is not deterministic and cannot be used" #~ msgstr "Узел %s не определён однозначно и не может быть иÑпользован" #~ msgid "Site %s not found in AGIS info" #~ msgstr "Узел %s не обнаружен в информации AGIS" #~ msgid "Reading cached AGIS data from %s" #~ msgstr "Чтение кÑшрованных данных AGIS Ñ %s" #~ msgid "Cannot read cached AGIS info from %s, will re-download: %s" #~ msgstr "" #~ "Ðе удалоÑÑŒ прочеÑть информацию AGIS Ñ %s, будет произведена перезагрузка: " #~ "%s" #~ msgid "Cached AGIS info is out of date, will re-download" #~ msgstr "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ AGIS в кÑше уÑтарела, будет загружена заново" #~ msgid "Could not refresh AGIS info, cached version will be used: %s" #~ msgstr "" #~ "Ðе удалоÑÑŒ обновить информацию AGIS, будет иÑпользована кÑÑˆÐ¸Ñ€Ð¾Ð²Ð°Ð½Ð½Ð°Ñ " #~ "верÑиÑ: %s" #~ msgid "Could not download AGIS info: %s" #~ msgstr "Ðе удалоÑÑŒ загрузить информацию AGIS: %s" #~ msgid "AGIS returned %s" #~ msgstr "AGIS ответил %s " #~ msgid "Could not create file %s" #~ msgstr "Ðе удалоÑÑŒ Ñоздать файл %s" #~ msgid "Badly formatted output from AGIS" #~ msgstr "Ðеверно Ñформированный отзыв AGIS" #~ msgid "%s -> %s" #~ msgstr "%s -> %s" #~ msgid "Recieved token length: %i" #~ msgstr "Длина полученного токена: %i" #~ msgid "GSS accept security context failed: %i/%i%s" #~ msgstr "Сбой принÑÑ‚Ð¸Ñ ÐºÐ¾Ð½Ñ‚ÐµÐºÑта безопаÑноÑти GSS: %i/%i%s" #~ msgid "GSS accept security context: %i/%i" #~ msgstr "ПринÑтие контекÑта безопаÑноÑти GSS: %i/%i" #~ msgid "Returned token length: %i" #~ msgstr "Длина выданного токена: %i" #~ msgid "GSS unwrap failed: %i/%i%s" #~ msgstr "Сбой Ñ€Ð°Ð·Ð²Ñ‘Ñ€Ñ‚Ñ‹Ð²Ð°Ð½Ð¸Ñ GSS: %i/%i%s" #~ msgid "GSS unwrap: %i/%i" #~ msgstr "Развёртывание GSS: %i/%i" #~ msgid "Sent token length: %i" #~ msgstr "Длина отправленного токена: %i" #~ msgid "Security check failed in GSI MCC for incoming message" #~ msgstr "Ðе прошла проверка безопаÑноÑти в GSI MCC Ð´Ð»Ñ Ð²Ñ…Ð¾Ð´Ñщего ÑообщениÑ" #~ msgid "Security check failed in GSI MCC for outgoing message" #~ msgstr "Ðе прошла проверка безопаÑноÑти в GSI MCC Ð´Ð»Ñ Ð¸ÑходÑщего ÑообщениÑ" #~ msgid "GSS wrap failed: %i/%i%s" #~ msgstr "Сбой ÑÐ²Ñ‘Ñ€Ñ‚Ñ‹Ð²Ð°Ð½Ð¸Ñ GSS: %i/%i%s" #~ msgid "GSS wrap: %i/%i" #~ msgstr "Свёртывание GSS: %i/%i" #~ msgid "Could not resolve peer side's hostname" #~ msgstr "Ðевозможно разобрать доменное Ð¸Ð¼Ñ ÑƒÐ·Ð»Ð° партнёра" #~ msgid "Peer host name to which this client will access: %s" #~ msgstr "Доменное Ð¸Ð¼Ñ ÑƒÐ·Ð»Ð°, к которому будет Ñовершён доÑтуп: %s" #~ msgid "GSS import name failed: %i/%i%s" #~ msgstr "Сбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð¼ÐµÐ½Ð¸ GSS: %i/%i%s" #~ msgid "GSS init security context failed: %i/%i%s" #~ msgstr "Сбой инициализации контекÑта безопаÑноÑти GSS: %i/%i%s" #~ msgid "GSS init security context: %i/%i" #~ msgstr "Ð˜Ð½Ð¸Ñ†Ð¸Ð°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ ÐºÐ¾Ð½Ñ‚ÐµÐºÑта безопаÑноÑти GSS: %i/%i" #~ msgid "No payload during GSI context initialisation" #~ msgstr "ОтÑутÑтвует Ð¿Ð¾Ð»ÐµÐ·Ð½Ð°Ñ Ð½Ð°Ð³Ñ€ÑƒÐ·ÐºÐ° при инициализации контекÑта GSI" #~ msgid "Transfer protocol is TLS or SSL3" #~ msgstr "Протокол передачи TLS или SSL3" #~ msgid "Transfer protocol is GLOBUS SSL" #~ msgstr "Протокол передачи GLOBUS SSL" #~ msgid "Transfer protocol is SSL2" #~ msgstr "Протокол передачи SSL2" #~ msgid "Transfer protocol is GSI" #~ msgstr "Протокол передачи GSI" #~ msgid "input token length: %i" #~ msgstr "Длина входного токена: %i" #~ msgid "GSS wrap/unwrap failed: %i/%i%s" #~ msgstr "Сбой ÑвёртываниÑ/Ñ€Ð°Ð·Ð²Ñ‘Ñ€Ñ‚Ñ‹Ð²Ð°Ð½Ð¸Ñ GSS: %i/%i%s" #~ msgid "Output token length: %i" #~ msgstr "Длина выходного токена: %i" #~ msgid "password sources" #~ msgstr "иÑточники паролÑ" #~ msgid "" #~ "There are %d NSS base directories where the certificate, key, and module " #~ "datbases live" #~ msgstr "" #~ "Обнаружено %d оÑновных директорий NSS, Ñодержащих базы данных " #~ "Ñертификатов, ключей и модулей" #~ msgid "Writing to xrootd is not (yet) supported" #~ msgstr "ЗапиÑÑŒ по протоколу xrootd (пока) не поддерживаетÑÑ" #~ msgid "Cannot (yet) remove files through xrootd" #~ msgstr "Ðевозможно (пока) удалить файл через xrootd" #~ msgid "Cannot (yet) create directories through xrootd" #~ msgstr "Ðевозможно (пока) Ñоздать каталог через xrootd" #~ msgid "Cannot (yet) rename files through xrootd" #~ msgstr "Ðевозможно (пока) переименовать файл через xrootd" #~ msgid "Rucio returned malormed xml: %s" #~ msgstr "Rucio возвратил некорректный XML: %s" #~ msgid "" #~ "Matchmaking, MaxDiskSpace*1024 >= DiskSpace - CacheDiskSpace problem, " #~ "ExecutionTarget: %d MB (MaxDiskSpace); JobDescription: %d MB (DiskSpace), " #~ "%d MB (CacheDiskSpace)" #~ msgstr "" #~ "Сравнение; MaxDiskSpace*1024 >= DiskSpace - неÑовпадение CacheDiskSpace, " #~ "у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d MB (MaxDiskSpace); в опиÑании задачи: %d " #~ "MB (DiskSpace), %d MB (CacheDiskSpace)" #~ msgid "" #~ "Matchmaking, WorkingAreaFree*1024 >= DiskSpace - CacheDiskSpace problem, " #~ "ExecutionTarget: %d MB (MaxDiskSpace); JobDescription: %d MB (DiskSpace), " #~ "%d MB (CacheDiskSpace)" #~ msgstr "" #~ "Сравнение; WorkingAreaFree*1024 >= DiskSpace - неÑовпадение " #~ "CacheDiskSpace, у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d MB (MaxDiskSpace); в " #~ "опиÑании задачи: %d MB (DiskSpace), %d MB (CacheDiskSpace)" #~ msgid " State: %s (%s)" #~ msgstr " СоÑтоÑние: %s (%s)" #~ msgid "Renewal of EMI ES jobs is not supported" #~ msgstr "Возобновление задач EMI ES не поддерживаетÑÑ" #~ msgid "" #~ "Could not convert the bartender attribute value (%s) to an URL instance " #~ "in configuration file (%s)" #~ msgstr "" #~ "Ðе удалоÑÑŒ преобразовать значение атрибута bartender (%s) в файле " #~ "наÑтроек в URL (%s)" #~ msgid "Command PASV/SPAS" #~ msgstr "Команда PASV/SPAS" #~ msgid "Wrong number in maxload: %s" #~ msgstr "ÐедопуÑтимое чиÑло в maxload: %s" #~ msgid "Wrong number in maxloadshare: %s" #~ msgstr "ÐедопуÑтимое чиÑло в maxloadshare: %s" #~ msgid "The type of share is not set in maxloadshare" #~ msgstr "Тип квоты не указан в maxloadshare" #~ msgid "share_limit should be located after maxloadshare" #~ msgstr "share_limit должен раÑполагатьÑÑ Ð¿Ð¾Ñле maxloadshare" #~ msgid "The name of share is not set in share_limit" #~ msgstr "Ðазвание квоты не указано в share_limit" #~ msgid "" #~ "'newdatastaging' configuration option is deprecated, 'enable_dtr' should " #~ "be used instead" #~ msgstr "" #~ "ÐžÐ¿Ñ†Ð¸Ñ Ð½Ð°Ñтроек 'newdatastaging' теперь называетÑÑ 'enable_dtr'; " #~ "пожалуйÑта, иÑпользуйте новое название" #~ msgid "Resume of EMI ES jobs is not supported" #~ msgstr "Продолжение задач EMI ES не поддерживаетÑÑ" #~ msgid "Failed to read input passphrase" #~ msgstr "Ðе удалоÑÑŒ прочеÑть пароль" #~ msgid "Input phrase is too short (at least %d char)" #~ msgstr "Пароль Ñлишком короткий (иÑпользуйте по крайней мере %d Ñимволов)" #~ msgid "Password is too short, need at least %u charcters" #~ msgstr "Пароль Ñлишком короткий, иÑпользуйте Ñ…Ð¾Ñ‚Ñ Ð±Ñ‹ %u Ñимволов." #~ msgid "Password is too long, need at most %u characters" #~ msgstr "Слишком длинный пароль, требуетÑÑ Ð½Ðµ более %u Ñимволов" #~ msgid "" #~ "ERROR: A computing resource using the GridFTP interface was requested, but" #~ msgstr "" #~ "ОШИБКÐ: Был запрошен вычиÑлительный реÑÑƒÑ€Ñ Ñ Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñом GridFTP, но" #~ msgid "" #~ " the corresponding plugin could not be loaded. Is the plugin " #~ "installed?" #~ msgstr "" #~ " ÑоответÑтвующий модуль не может быть подгружен. Ð’Ñ‹ уÑтановили Ñтот " #~ "модуль?" #~ msgid "" #~ " If not, please install the package 'nordugrid-arc-plugins-globus'." #~ msgstr "" #~ " ЕÑли нет, пожалуйÑта, уÑтановите пакет 'nordugrid-arc-plugins-" #~ "globus'." #~ msgid "" #~ " Depending on your type of installation the package name might " #~ "differ. " #~ msgstr "" #~ " Ð˜Ð¼Ñ Ð¿Ð°ÐºÐµÑ‚Ð° может отличатьÑÑ, в завиÑимоÑти от типа вашей ÑиÑтемы. " #~ msgid "" #~ "Error: Unable to parse limit in VectorLimitExceededFault response from " #~ "service to an 'int': %s" #~ msgstr "" #~ "Ошибка: Ðевозможно разобрать предел в отзыве ÑервиÑа " #~ "VectorLimitExceededFault как 'int': %s" #~ msgid "" #~ "%s is not a directory, it is needed for the client to function correctly" #~ msgstr "" #~ "%s не ÑвлÑетÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð¾Ð¼. Он необходим Ð´Ð»Ñ Ð½Ð¾Ñ€Ð¼Ð°Ð»ÑŒÐ½Ð¾Ð¹ работы клиента" #~ msgid "Created empty ARC job list file: %s" #~ msgstr "Создан пуÑтой файл Ð´Ð»Ñ Ð·Ð°Ð¿Ð¸Ñи задач ARC: %s" #~ msgid "ERROR: Failed to lock job list file %s" #~ msgstr "Ошибка: Ðе удалоÑÑŒ заблокировать файл ÑпиÑка задач %s" #~ msgid "Please try again later, or manually clean up lock file" #~ msgstr "" #~ "ПожалуйÑта, попытайтеÑÑŒ заново попозже, или удалите файл блокировки " #~ "вручную" #~ msgid "Could not write meta file %s" #~ msgstr "Ðе удалоÑÑŒ запиÑать мета-файл %s" #~ msgid "DTR %s: Transfer failed: %s" #~ msgstr "DTR %s: Сбой передачи: %s" #~ msgid "DTR %s: No locations defined for %s" #~ msgstr "DTR %s: Ðе определены раÑÐ¿Ð¾Ð»Ð¾Ð¶ÐµÐ½Ð¸Ñ %s" #~ msgid "" #~ "DTR %s: Request:\n" #~ "%s" #~ msgstr "" #~ "DTR %s: ЗапроÑ:\n" #~ "%s" #~ msgid "" #~ "DTR %s: Response:\n" #~ "%s" #~ msgstr "" #~ "DTR %s: Отклик:\n" #~ "%s" #~ msgid "DTR %s: %s" #~ msgstr "DTR %s: %s" #~ msgid "DTR %s: Failed locating credentials" #~ msgstr "DTR %s: Сбой Ð¾Ð±Ð½Ð°Ñ€ÑƒÐ¶ÐµÐ½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупа" #~ msgid "DTR %s: Failed to initiate client connection" #~ msgstr "DTR %s: Сбой запуÑка ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ ÐºÐ»Ð¸ÐµÐ½Ñ‚Ð¾Ð¼" #~ msgid "DTR %s: Client connection has no entry point" #~ msgstr "DTR %s: ОтÑутÑтвует точка входа Ð´Ð»Ñ ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ ÐºÐ»Ð¸ÐµÐ½Ñ‚Ð¾Ð¼" #~ msgid "DTR %s: Initiating delegation procedure" #~ msgstr "DTR %s: Ð˜Ð½Ð¸Ñ†Ð¸Ð°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð¿Ñ€Ð¾Ñ†ÐµÐ´ÑƒÑ€Ñ‹ делегированиÑ" #~ msgid "DTR %s: Failed to initiate delegation credentials" #~ msgstr "DTR %s: Сбой инициализации делегируемых прав доÑтупа" #~ msgid "DTR %s: Running command: %s" #~ msgstr "DTR %s: ВыполнÑетÑÑ ÐºÐ¾Ð¼Ð°Ð½Ð´Ð° %s" #~ msgid "DTR %s: Error creating cache" #~ msgstr "DTR %s: Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÐºÑша" #~ msgid "DTR %s: Forcing re-download of file %s" #~ msgstr "DTR %s: ÐŸÑ€Ð¸Ð½ÑƒÐ´Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ð¿ÐµÑ€ÐµÐ·Ð°Ð³Ñ€ÑƒÐ·ÐºÐ° файла %s" #~ msgid "DTR %s: Cached file is locked - should retry" #~ msgstr "DTR %s: КÑшированный файл заблокирован - должна быть Ð½Ð¾Ð²Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ°" #~ msgid "DTR %s: Failed to initiate cache" #~ msgstr "DTR %s: Сбой инициализации кÑша" #~ msgid "DTR %s: File %s is cached (%s) - checking permissions" #~ msgstr "DTR %s: Файл %s занеÑён в кÑш (%s) - проверка прав доÑтупа" #~ msgid "DTR %s: Permission checking failed" #~ msgstr "DTR %s: Проверка доÑтупа не пройдена" #~ msgid "DTR %s: Permission checking passed" #~ msgstr "DTR %s: Проверка доÑтупа пройдена уÑпешно" #~ msgid "DTR %s: Source modification date: %s" #~ msgstr "DTR %s: Ð’Ñ€ÐµÐ¼Ñ Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ Ð¸Ñточника: %s" #~ msgid "DTR %s: Cache creation date: %s" #~ msgstr "DTR %s: Ð’Ñ€ÐµÐ¼Ñ ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÐºÑша: %s" #~ msgid "DTR %s: Cache file valid until: %s" #~ msgstr "DTR %s: Файл в кÑше дейÑтвителен до: %s" #~ msgid "DTR %s: Cached file is outdated, will re-download" #~ msgstr "DTR %s: КÑшированный файл уÑтарел, будет перезагружен" #~ msgid "DTR %s: Cached copy is still valid" #~ msgstr "DTR %s: КÑÑˆÐ¸Ñ€Ð¾Ð²Ð°Ð½Ð½Ð°Ñ ÐºÐ¾Ð¿Ð¸Ñ Ð²ÑÑ‘ ещё дейÑтвительна" #~ msgid "DTR %s: No locations for destination different from source found" #~ msgstr "DTR %s: Ðе обнаружено адреÑов цели, отличных от иÑточника" #~ msgid "DTR %s: Checking %s" #~ msgstr "DTR %s: ПроверÑетÑÑ %s" #~ msgid "DTR %s: Removing %s" #~ msgstr "DTR %s: УдалÑетÑÑ %s" #~ msgid "DTR %s: Linking/copying cached file to %s" #~ msgstr "DTR %s: Создание ÑÑылки/копирование файла из кÑша в %s" #~ msgid "Bad number in logsize: %s" #~ msgstr "ÐедопуÑтимое значение logsize: %s" #~ msgid "Starting grid-manager thread" #~ msgstr "ЗапуÑкаетÑÑ Ð¿Ð¾Ñ‚Ð¾Ðº Грид-менеджера" #~ msgid "Destroying jobs and waiting for underlying processes to finish" #~ msgstr "Уничтожение задач и ожидание Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ ÑоответÑтвующих им процеÑÑов" #~ msgid "Cannot open database" #~ msgstr "Ðе удалоÑÑŒ открыть базу данных" #~ msgid "Cannot abort transaction %s" #~ msgstr "Ðевозможно прервать транзакцию %s" #~ msgid "put: deadlock handling: try again" #~ msgstr "put: обработка взаимоблокировки, Ð½Ð¾Ð²Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ°" #~ msgid "put: cannot abort transaction: %s" #~ msgstr "put: невозможно оборвать транзакцию: %s" #~ msgid "put: %s" #~ msgstr "put: %s" #~ msgid "get: deadlock handling, try again" #~ msgstr "get: обработка взаимоблокировки, Ð½Ð¾Ð²Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ°" #~ msgid "get: cannot abort transaction: %s" #~ msgstr "get: невозможно оборвать транзакцию: %s" #~ msgid "get: %s" #~ msgstr "get: %s" #~ msgid "del: deadlock handling, try again" #~ msgstr "del: обработка взаимоблокировки, Ð½Ð¾Ð²Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ°" #~ msgid "del: cannot abort transaction: %s" #~ msgstr "del: невозможно оборвать транзакцию: %s" #~ msgid "del: %s" #~ msgstr "del: %s" #~ msgid "get_doc_name: deadlock handling, try again" #~ msgstr "get_doc_name: обработка взаимоблокировки, Ð½Ð¾Ð²Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ°" #~ msgid "get_doc_names: cannot abort transaction: %s" #~ msgstr "get_doc_names: невозможно прервать транзакцию: %s" #~ msgid "Error during the transaction: %s" #~ msgstr "Ошибка при транзакции: %s" #~ msgid "checkpoint: %s" #~ msgstr "ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñ‚Ð¾Ñ‡ÐºÐ°: %s" #~ msgid "Failed to create dir %s for temp proxies: %s" #~ msgstr "Ðе удалоÑÑŒ Ñоздать каталог %s Ð´Ð»Ñ Ð²Ñ€ÐµÐ¼ÐµÐ½Ð½Ñ‹Ñ… доверенноÑтей: %s" #~ msgid "Could not write temporary file: %s" #~ msgstr "Ðе удалоÑÑŒ запиÑать временный файл: %s" #~ msgid "Error creating file %s with mkstemp(): %s" #~ msgstr "Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° %s Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ mkstemp(): %s" #~ msgid "Error writing to tmp lock file %s: %s" #~ msgstr "Ошибка запиÑи во временный файл блокировки %s: %s" #~ msgid "Warning: closing tmp lock file %s failed" #~ msgstr "Предупреждение: Ñбой Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ð²Ñ€ÐµÐ¼ÐµÐ½Ð½Ð¾Ð³Ð¾ файла блокировки %s" #~ msgid "Source probably does not exist" #~ msgstr "ИÑточник Ñкорее вÑего не ÑущеÑтвует" #~ msgid "Problems resolving destination" #~ msgstr "Проблемы Ñ Ñ€Ð°Ð·Ð±Ð¾Ñ€Ð¾Ð¼ направлениÑ" #~ msgid "%s: Reprocessing RSL failed" #~ msgstr "%s: Сбой переобработки RSL" #~ msgid "" #~ "Dumping job description aborted because no resource returned any " #~ "information" #~ msgstr "" #~ "Обрыв раÑпечатки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸, Ñ‚.к. ни один из реÑурÑов не предоÑтавил " #~ "информацию" #~ msgid "Creating a PDP client" #~ msgstr "СоздаётÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚ PDP" #~ msgid "job.Resources.QueueName = %s" #~ msgstr "job.Resources.QueueName = %s" #, fuzzy #~ msgid "PrepareToGet request timed out after %i seconds" #~ msgstr "Ðе удаётÑÑ ÑоединитьÑÑ Ñ %s:%s (%s), connection timed out" #, fuzzy #~ msgid "Bring online request timed out after %i seconds" #~ msgstr "Ðе удаётÑÑ ÑоединитьÑÑ Ñ %s:%s (%s), connection timed out" #, fuzzy #~ msgid "PrepareToPut request timed out after %i seconds" #~ msgstr "Ðе удаётÑÑ ÑоединитьÑÑ Ñ %s:%s (%s), connection timed out" #, fuzzy #~ msgid "Ls request timed out after %i seconds" #~ msgstr "Ðе удаётÑÑ ÑоединитьÑÑ Ñ %s:%s (%s), connection timed out" #, fuzzy #~ msgid "copy request timed out after %i seconds" #~ msgstr "Ðе удаётÑÑ ÑоединитьÑÑ Ñ %s:%s (%s), connection timed out" #~ msgid "Could not load GFAL DMC. Please check that this plugin is installed" #~ msgstr "" #~ "Ðе удалоÑÑŒ подгрузить GFAL DMC. ПожалуйÑта, убедитеÑÑŒ, что Ñтот " #~ "подключаемый модуль уÑтановлен." #~ msgid "Unable to remove file %s: No valid proxy found" #~ msgstr "Ðевозможно удалить файл %s: Ðе обнаружено приемлемой доверенноÑти" #~ msgid "Unable to transfer file %s: No valid credentials found" #~ msgstr "" #~ "Ðевозможно переÑлать файл %s: Ðе обнаружено дейÑтвительных параметров " #~ "доÑтупа" #~ msgid "Unable to register file %s: No valid credentials found" #~ msgstr "" #~ "Ðевозможно зарегиÑтрировать файл %s: Ðе обнаружено дейÑтвительных " #~ "параметров доÑтупа" #~ msgid "Unable to copy from %s: No valid credentials found" #~ msgstr "" #~ "Ðевозможно Ñкопировать из %s: Ðе обнаружено дейÑтвительных параметров " #~ "доÑтупа" #, fuzzy #~ msgid "arrayOfFileStatuses" #~ msgstr "arrayOfFileStatuses" #, fuzzy #~ msgid "Failed to create reading thread" #~ msgstr "Ðе удалоÑÑŒ Ñоздать поток Ð´Ð»Ñ Ð¿Ñ€Ð¸Ð²Ñзки к LDAP (%s)" #, fuzzy #~ msgid "Failed to create writing thread" #~ msgstr "Ðе удалоÑÑŒ Ñоздать поток Ð´Ð»Ñ Ð¿Ñ€Ð¸Ð²Ñзки к LDAP (%s)" #, fuzzy #~ msgid "DTR %s: Failed to resolve any source replicas" #~ msgstr "Ðе удалоÑÑŒ загрузить иÑточник \"%s\": %s" #, fuzzy #~ msgid "DTR %s: Failed to resolve destination replicas" #~ msgstr "Ðе удалоÑÑŒ зарегиÑтрировать назначение: %s" #, fuzzy #~ msgid "DTR %s: Failed to pre-register destination" #~ msgstr "Сбой при региÑтрации нового файла/направлениÑ" #, fuzzy #~ msgid "DTR %s: Failed checking source replica" #~ msgstr "Ошибка проверки иÑходного раздела %1." #, fuzzy #~ msgid "DTR %s: Error resolving destination replicas" #~ msgstr "DTR %s: Обнаружение ÑущеÑтвующих копий назначениÑ" #, fuzzy #~ msgid "DTR %s: Failed to prepare source" #~ msgstr "Сбой при доÑтупе к иÑточнику(-ам) конфигурации: %s\n" #, fuzzy #~ msgid "DTR %s: Failed to prepare destination" #~ msgstr "Ошибка при запиÑи в цель" #, fuzzy #~ msgid "The retrieved dn is %s" #~ msgstr "принимаетÑÑ Ð·Ð° 1 Гц " #~ msgid "xacml authz request: %s" #~ msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° допуÑк XACML: %s" #~ msgid "xacml authz response: %s" #~ msgstr "Отклик допуÑка XACML: %s" #, fuzzy #~ msgid "Failed initing handle" #~ msgstr "Ðевозможно инициализировать мутекÑ" #~ msgid "Bad authentication information" #~ msgstr "ÐÐµÐ¿Ñ€Ð¸ÐµÐ¼Ð»ÐµÐ¼Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð´Ð»Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ¸ подлинноÑти" #~ msgid "nss db to be accesses: %s\n" #~ msgstr "будет иÑпользована база данных NSS %s\n" #~ msgid "Removing temp proxy %s" #~ msgstr "Удаление временной доверенноÑти %s" #~ msgid "Failed to create temporary file in %s - %s" #~ msgstr "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð²Ñ€ÐµÐ¼ÐµÐ½Ð½Ð¾Ð³Ð¾ файла в %s - %s" #~ msgid "Failed to create control (%s) or session (%s) directories" #~ msgstr "Ðе удалоÑÑŒ Ñоздать контрольный каталог (%s) или каталог ÑеÑÑии (%s)" #~ msgid "Failed to store configuration into temporary file: %s" #~ msgstr "Сбой запиÑи наÑтроек во временный файл: %s" #, fuzzy #~ msgid "Failed to create/detect control (%s) or session (%s) directories" #~ msgstr "Ðе удалоÑÑŒ Ñоздать контрольный каталог (%s) или каталог ÑеÑÑии (%s)" #~ msgid "pretend utility is run by user with given name" #~ msgstr "Ñделать вид, что утилита запущена пользователем Ñ Ð´Ñ€ÑƒÐ³Ð¸Ð¼ именем" #~ msgid "pretend utility is run by user with given UID" #~ msgstr "Ñделать вид, что утилита запущена пользователем Ñ Ð´Ñ€ÑƒÐ³Ð¸Ð¼ UID" #~ msgid "Error processing configuration - EXITING" #~ msgstr "Сбой при обработке наÑтроек - ВЫХОД" #~ msgid "No suitable users found in configuration - EXITING" #~ msgstr "Ð’ наÑтройках не указано, от чьего имени производить запуÑк - ВЫХОД" #~ msgid "Can't recognize own username - EXITING" #~ msgstr "Попытка запуÑка от имени неизвеÑтного Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ - ВЫХОД" #, fuzzy #~ msgid "Processing grid-manager configuration" #~ msgstr "Ðе удалоÑÑŒ обработать наÑтройки grid-manager" #~ msgid "Usage: inputcheck [-h] [-d debug_level] RSL_file [proxy_file]" #~ msgstr "" #~ "ИÑпользование: inputcheck [-h] [-d уровень_отладки] RSL_file " #~ "[файл_доверенноÑти]" #~ msgid "Environment could not be set up" #~ msgstr "Ðе удалоÑÑŒ наÑтроить Ñреду" #~ msgid "User %s is not valid" #~ msgstr "ÐедейÑтвительный пользователь %s" #~ msgid "No configuration file found" #~ msgstr "Ðе найден файл наÑтроек" #~ msgid "" #~ "Gridmap user list feature is not supported anymore. Plase use @filename " #~ "to specify user list." #~ msgstr "" #~ "ПерехиÑление пользователей в gridmap больше не поддерживаетÑÑ. " #~ "ПожалуйÑта, укажите @файл Ñо ÑпиÑком пользователей." #~ msgid "Can't read users in specified file %s" #~ msgstr "Ðевозможно прочеÑть пользователей в указанном файле %s" #~ msgid "Wrong number in speedcontrol: " #~ msgstr "ÐедопуÑтимое чиÑло в speedcontrol: " #~ msgid "Wrong option in securetransfer" #~ msgstr "ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð² securetransfer" #~ msgid "Wrong option in passivetransfer" #~ msgstr "ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð² passivetransfer" #~ msgid "Wrong option in norootpower" #~ msgstr "ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð² norootpower" #~ msgid "Wrong option in localtransfer" #~ msgstr "ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð² localtransfer" #~ msgid "Junk in defaultttl command" #~ msgstr "БеÑÑмыÑлица в команде defaultttl" #~ msgid "Junk in maxrerun command" #~ msgstr "БеÑÑмыÑлица в команде maxrerun" #~ msgid "diskspace is empty" #~ msgstr "пуÑтое значение diskspace" #~ msgid "junk in diskspace command" #~ msgstr "беÑÑмыÑлица в команде diskspace" #~ msgid "Wrong number in diskspace command" #~ msgstr "ÐедопуÑтимое чиÑло в команде diskspace" #~ msgid "Junk in defaultlrms command" #~ msgstr "БеÑÑмыÑлица в команде defaultlrms" #~ msgid "Timeout for plugin is missing" #~ msgstr "ОтÑутÑтвует тайм-аут Ð´Ð»Ñ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ð¾Ð³Ð¾ модулÑ" #~ msgid "preferredpattern value is missing" #~ msgstr "ОтÑутÑтвует значение preferredpattern" #~ msgid "Wrong option in newdatastaging" #~ msgstr "ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð² newdatastaging" #~ msgid "Bad URL in delivery_service: %s" #~ msgstr "ÐедопуÑтимый URL в delivery_service: %s" #~ msgid "Could not add file:/local to delivery services" #~ msgstr "Ðевозможно добавить file:/local к Ñлужбам доÑтавки" #~ msgid "Can't read user list in specified file %s" #~ msgstr "Ðевозможно прочеÑть ÑпиÑок пользователей в указанном файле %s" #~ msgid "Warning: creation of user \"%s\" failed" #~ msgstr "Предупреждение: не удалоÑÑŒ Ñоздать Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ \"%s\"" #~ msgid "" #~ "Gridmap user list feature is not supported anymore. Please use @filename " #~ "to specify user list." #~ msgstr "" #~ "СпиÑок пользователей в Gridmap больше не поддерживаетÑÑ. ПожалуйÑта, " #~ "иÑпользуйте @filename Ð´Ð»Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð¸Ñ ÑпиÑка пользователей." #~ msgid "No username entries in control directory" #~ msgstr "ОтÑутÑтвуют Ñлементы имени Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð² контрольном каталоге" #~ msgid "User %s for helperUtility is not configured" #~ msgstr "Ðе Ñконфигурирован пользователь %s Ð´Ð»Ñ Ð¼Ð¾Ð´ÑƒÐ»Ñ helperUtility" #~ msgid "Added user : %s" #~ msgstr "Добавлен пользователь : %s" #~ msgid "%s: No configured user found for uid %i" #~ msgstr "%s: Ðе обнаружено Ñконфигурированных пользователей Ð´Ð»Ñ uid %i" #~ msgid "%s: Added" #~ msgstr "%s: Добавлено" #~ msgid "Error with cache configuration: %s. Cannot clean up files for job %s" #~ msgstr "" #~ "Ошибка в наÑтройке кÑша: %s. Ðевозможно очиÑтить файлы Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s" #~ msgid "Wrong user name" #~ msgstr "Ðеверное Ð¸Ð¼Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ" #~ msgid "No configuration found for user %s in A-REX configuration" #~ msgstr "Ðе найдено наÑтроек Ð´Ð»Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ %s в наÑтройках A-REX" #~ msgid "Peer certificate cannot be extracted" #~ msgstr "Ðевозможно извлечь Ñертификат контакта" #~ msgid "Peer cert verification fail" #~ msgstr "Ðе удалоÑÑŒ подтвердить дейÑтвительноÑть Ñертификата узла партнёра" #~ msgid "" #~ "Certificate cannot be extracted, make sure it is the case where client " #~ "side authentication is turned off" #~ msgstr "" #~ "Ðевозможно извлечь Ñертификат - убедитеÑÑŒ, что проверка подлинноÑти на " #~ "Ñтороне клиента отключена " #~ msgid "Peer certificate chain cannot be extracted" #~ msgstr "Ðевозможно извлечь цепочку Ñертификатов узла партнёра" #~ msgid "Can not read file %s with list of trusted VOMS DNs" #~ msgstr "" #~ "Ðе удалоÑÑŒ прочеÑть файл %s Ñо ÑпиÑком уникальных имён доверÑемых " #~ "Ñерверов VOMS" #~ msgid "Can not assign CA location - %s" #~ msgstr "Ðе удалоÑÑŒ припиÑать меÑтонахождение агентÑтва - %s" #~ msgid "Can not load certificate file - %s" #~ msgstr "Ðевозможно подгрузить файл Ñертификата - %s" #~ msgid "Can not load key file - %s" #~ msgstr "Ðе удалоÑÑŒ подгрузить файл Ñекретного ключа - %s" #~ msgid "Private key %s does not match certificate %s" #~ msgstr "Секретный ключ %s не Ñовпадает Ñ Ñертификатом %s" #~ msgid "Certificate %s failed Globus signing policy" #~ msgstr "Сертификат %s не ÑоответÑтвует политике подпиÑи Globus" #~ msgid "Resumation of CREAM jobs is not supported" #~ msgstr "ПерезапуÑк задач CREAM не поддерживаетÑÑ" #~ msgid "EMIESClient was not created properly." #~ msgstr "EMIESClient не был Ñоздан надлежащим образом." #~ msgid "Missing ActivityManager in response from %s" #~ msgstr "ОтÑутÑтвует Ñлемент ActivityManager в отзыве Ñ %s" #~ msgid "Current transfer FAILED: %s - %s" #~ msgstr "Ð¢ÐµÐºÑƒÑ‰Ð°Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð° ÐЕ СОСТОЯЛÐСЬ: %s - %s" #~ msgid "" #~ "The inputsandbox JDL attribute is referencing a non-regular file (%s)." #~ msgstr "атрибут JDL inputsandbox JDL ÑÑылаетÑÑ Ð½Ð° необычный файл (%s)." #~ msgid "NSS set domestic policy failed (%s) on certificate database %s" #~ msgstr "" #~ "Сбой уÑтановки локальной политики NSS (%s) Ð´Ð»Ñ Ð±Ð°Ð·Ñ‹ данных Ñертификатов %s" #~ msgid "Failed while transferring data (mostly timeout)" #~ msgstr "Сбой при передаче данных (обычно иÑтечение Ñрока ожиданиÑ)" #~ msgid "Cannot create directory %s/%s for cache: %s" #~ msgstr "Ðе удалоÑÑŒ Ñоздать каталог %s/%s Ð´Ð»Ñ ÐºÑша: %s" #~ msgid "Failed uploading file: %s - %s" #~ msgstr "Ðе удалоÑÑŒ отгрузить файл %s - %s" #~ msgid "Failed uploading file: %s" #~ msgstr "Ðе удалоÑÑŒ отгрузить файл: %s" #, fuzzy #~ msgid " Cluster: %s" #~ msgstr "КлаÑтер" #, fuzzy #~ msgid " Management Interface: %s" #~ msgstr "Ð˜Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ ÑƒÐ¿Ñ€Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ð³Ð¾Ñ€Ñчими клавишами" #~ msgid "File download failed: %s - %s" #~ msgstr "Ошибка загрузи файла: %s - %s" #, fuzzy #~ msgid "" #~ "Ignoring job (%s), the Job::InterfaceName attribute must be specified" #~ msgstr "Задача (%s) игнорируетÑÑ, необходимо указывать атрибут Job::Flavour" #~ msgid "Broker %s could not be created" #~ msgstr "Брокер %s не может быть Ñоздан" #~ msgid "Loaded Broker %s" #~ msgstr "Подгружен брокер %s" #~ msgid "" #~ "Will not query endpoint (%s) because another thread is already querying it" #~ msgstr "" #~ "Точка доÑтупа (%s) не будет опрошена, так как её уже опрашивает другой " #~ "поток" #, fuzzy #~ msgid " Local information system URL: %s" #~ msgstr "Канал информации о ÑиÑтеме" #, fuzzy #~ msgid " Submission interface name: %s" #~ msgstr "Ðе найден Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ñ Ð¸Ð¼ÐµÐ½ÐµÐ¼ %s" #~ msgid "Location information:" #~ msgstr "Ð¡Ð²ÐµÐ´ÐµÐ½Ð¸Ñ Ð¾ раÑположении:" #~ msgid "Domain information:" #~ msgstr "Ð¡Ð²ÐµÐ´ÐµÐ½Ð¸Ñ Ð¾ домене:" #~ msgid " Service name: %s" #~ msgstr " Ð˜Ð¼Ñ Ñлужбы: %s" #~ msgid "Manager information:" #~ msgstr "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ ÑиÑтеме ÑƒÐ¿Ñ€Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñ€ÐµÑурÑом:" #~ msgid " Resource manager version: %s" #~ msgstr " ВерÑÐ¸Ñ ÑиÑтемы управлениÑ: %s" #~ msgid "Execution environment information:" #~ msgstr "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ рабочих Ñредах:" #~ msgid "Check: obtained creation date: %s" #~ msgstr "Проверка: получена дата ÑозданиÑ: %s" #~ msgid "meta_get_data: checksum: %s" #~ msgstr "meta_get_data: ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма: %s" #~ msgid "meta_get_data: size: %llu" #~ msgstr "meta_get_data: размер: %llu" #~ msgid "meta_get_data: created: %s" #~ msgstr "meta_get_data: Ñоздан: %s" #~ msgid "Failed to remove location from LFC" #~ msgstr "Ошибка при удалении меÑÑ‚Ð¾Ð½Ð°Ñ…Ð¾Ð¶Ð´ÐµÐ½Ð¸Ñ Ð¸Ð· LFC" #~ msgid "Contacting %s" #~ msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ %s" #, fuzzy #~ msgid "Warning: can't connect to RLS server %s: %s" #~ msgstr "" #~ "Ðевозможно подключитÑÑ Ðº Ñерверу. «%s» не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым адреÑом." #~ msgid "" #~ "Missing reference to factory and/or module. It is unsafe to use Globus in " #~ "non-persistent mode - RLS code is disabled. Report to developers." #~ msgstr "" #~ "ОтÑутÑтвует указание на фабрику и/или модуль. ИÑпользование Globus в " #~ "неопределённом режиме небезопаÑно - Обращение к RLS заблокировано. " #~ "СвÑжитеÑÑŒ Ñ Ñ€Ð°Ð·Ñ€Ð°Ð±Ð¾Ñ‚Ñ‡Ð¸ÐºÐ°Ð¼Ð¸." #, fuzzy #~ msgid "Warning: Failed to obtain attributes from %s: %s" #~ msgstr "VOMS: Ñбой при разборе атрибутов в Ñертификате атрибута (AC)" #~ msgid "Attribute: %s - %s" #~ msgstr "Ðтрибут: %s - %s" #~ msgid "RLS URL must contain host" #~ msgstr "RLS URL должен Ñодержать Ð¸Ð¼Ñ ÑƒÐ·Ð»Ð°" #, fuzzy #~ msgid "Source must contain LFN" #~ msgstr "RLS URL должен Ñодержать Ð¸Ð¼Ñ ÑƒÐ·Ð»Ð°" #, fuzzy #~ msgid "Destination must contain LFN" #~ msgstr "" #~ " Ðазначение \"%s\" должно быть каталогом \n" #~ " %s " #, fuzzy #~ msgid "No locations found for destination" #~ msgstr "Ðе найдено физичеÑких адреÑов Ð´Ð»Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ: %s" #~ msgid "LFN already exists in replica" #~ msgstr "LFN уже зарегиÑтрирован Ð´Ð»Ñ Ñ€ÐµÐ¿Ð»Ð¸ÐºÐ¸" #~ msgid "Failed to create GUID in RLS: %s" #~ msgstr "Ðе удалоÑÑŒ Ñоздать GUID в RLS: %s" #, fuzzy #~ msgid "There is same LFN in %s" #~ msgstr "Ðет подпроекта %1 в SUBDIRS" #~ msgid "Failed to add LFN-GUID to RLS: %s" #~ msgstr "Ðе удалоÑÑŒ добавить LFN-GUID в RLS: %s" #, fuzzy #~ msgid "Warning: failed to add attribute to RLS: %s" #~ msgstr "Ðе удалоÑÑŒ добавить LFN-GUID в RLS: %s" #, fuzzy #~ msgid "Failed to retrieve LFN/LRC: %s" #~ msgstr "Ðе могу получить ÑпиÑок каталогов!" #~ msgid "No LFNs found in %s" #~ msgstr "Ð’ %s не обнаружено логичеÑких имён файлов" #~ msgid "lfn: %s(%s) - %s" #~ msgstr "LFN: %s(%s) - %s" #~ msgid "lfn: %s - pfn: %s" #~ msgstr "LFN: %s - PFN: %s" #, fuzzy #~ msgid "Rename: failed to rename file" #~ msgstr "" #~ "Ðе удаетÑÑ Ð¿ÐµÑ€ÐµÐ¸Ð¼ÐµÐ½Ð¾Ð²Ð°Ñ‚ÑŒ файл '%s' в '%s': Ñбой функции g_rename(): %s" #, fuzzy #~ msgid "DTR %s: No SOAP response" #~ msgstr "Ðет ответа SOAP" #, fuzzy #~ msgid "DTR %s: Starting bulk request" #~ msgstr "Принимать _непрÑмые запроÑÑ‹" #~ msgid "Cancelling all DTRs" #~ msgstr "Прерывание вÑех запроÑов DTR" #, fuzzy #~ msgid "Received back DTR %s" #~ msgstr "Получен Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR %s, ÑоÑтоÑние %s" #~ msgid "Job submission failed, no more possible targets" #~ msgstr "Ðе удалоÑÑŒ заÑлать задачу, возможные Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð¾Ñ‚ÑутÑтвуют" #~ msgid "Unable to print job description: No matching target found." #~ msgstr "" #~ "Ðевозможно вывеÑти опиÑание задачи: Ðе найдено ни одного подходÑщего " #~ "назначениÑ." #~ msgid "Fileset copy for this kind of source is not supported" #~ msgstr "" #~ "Копирование набора файлов из иÑточника данного типа не поддерживаетÑÑ" #~ msgid "Failed listing metafiles" #~ msgstr "ПеречиÑление метафайлов не удалоÑÑŒ" #~ msgid "Failed listing files" #~ msgstr "ПеречиÑление файлов не удалоÑÑŒ" #~ msgid "%s%s" #~ msgstr "%s%s" #~ msgid "Delete failed: %s" #~ msgstr "Сбой при удалении: %s" #, fuzzy #~ msgid "Rename failed: %s" #~ msgstr "переименовать не удалоÑÑŒ, %s (%s -> %s)." #, fuzzy #~ msgid "Rename failed: %s (%s)" #~ msgstr "переименовать не удалоÑÑŒ, %s (%s -> %s)." #~ msgid "service" #~ msgstr "Ñлужба" #~ msgid "The arcsrmping command is a ping client for the SRM service." #~ msgstr "Команда arcsrmping ÑвлÑетÑÑ Ð°Ð½Ð°Ð»Ð¾Ð³Ð¾Ð¼ утилиты ping Ð´Ð»Ñ Ñлужб SRM." #~ msgid "The service argument is a URL to an SRM service." #~ msgstr "Ðргументом Ñлужбы должен быть URL Ñервера SRM" #, fuzzy #~ msgid "ExecutionTarget %s added to ExecutionTargetSet" #~ msgstr "Файл добавлен в проект" #~ msgid "AuthRequest(inmsg) = " #~ msgstr "AuthRequest(inmsg) = " #~ msgid "Starting:" #~ msgstr "ЗапуÑк:" #~ msgid "Stopping:" #~ msgstr "ОÑтановка:" #~ msgid "%(sn)s.%(rn)s called" #~ msgstr "вызов %(sn)s.%(rn)s" #, fuzzy #~ msgid "No URLs to connect to (in %s)" #~ msgstr "Ð’ выделении нет клонов." #~ msgid "ERROR connecting to" #~ msgstr "Ошибка ÑвÑзи Ñ" #~ msgid "ERROR connecting to all of these:" #~ msgstr "Ошибка ÑвÑзи Ñ ÐºÐ°Ð¶Ð´Ñ‹Ð¼ из:" #~ msgid "ID" #~ msgstr "Идентификатор объекта" #~ msgid "ZODBStore constructor called" #~ msgstr "Вызван конÑтруктор ZODBStore" #~ msgid "datadir:" #~ msgstr "datadir:" #~ msgid "TransDBStore constructor called" #~ msgstr "Вызван конÑтруктор TransDBStore" #~ msgid "db environment opened" #~ msgstr "Окружение базы данных открыто" #~ msgid "couldn't find DeadlockRetries, using 5 as default" #~ msgstr "не удалоÑÑŒ найти DeadlockRetries, по умолчанию иÑпользуетÑÑ 5" #~ msgid "couldn't find SleepTime, using %d as default" #~ msgstr "не удалоÑÑŒ найти SleepTime, по умолчанию иÑпользуетÑÑ %d" #~ msgid "got deadlock - retrying" #~ msgstr "взаимоблокировка - Ð½Ð¾Ð²Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ°" #~ msgid "Got deadlock error" #~ msgstr "Ошибка взаимоблокировки" #~ msgid "Got rep_dead_handle error" #~ msgstr "Получена ошибка rep_dead_handle" #~ msgid "Error getting %s" #~ msgstr "Ошибка Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ %s" #~ msgid "got DBLockDeadlockError" #~ msgstr "получена ошибка DBLockDeadlockError" #~ msgid "retrying transaction" #~ msgstr "Ð½Ð¾Ð²Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ° транзакции" #~ msgid "Deadlock exception, giving up..." #~ msgstr "Ошибка взаимоблокировки - ÑдаюÑÑŒ..." #, fuzzy #~ msgid "Read-only db. I'm not a master." #~ msgstr "umount: невозможно перемонтировать %s только Ð´Ð»Ñ Ñ‡Ñ‚ÐµÐ½Ð¸Ñ\n" #~ msgid "cannot delete non-existing entries" #~ msgstr "невозможно удалить неÑущеÑтвующие запиÑи" #~ msgid "Error setting %s" #~ msgstr "Ошибка приÑÐ²Ð¾ÐµÐ½Ð¸Ñ %s" #~ msgid "db environment closed" #~ msgstr "Окружение базы данных закрыто" #~ msgid "error closing environment" #~ msgstr "ошибка при закрытии Ñреды" #~ msgid "PickleStore constructor called" #~ msgstr "Вызван конÑтруктор PickleStore" #~ msgid "filename:" #~ msgstr "файл:" #~ msgid "StringStore constructor called" #~ msgstr "Вызван конÑтруктор StringStore" #~ msgid "CachedStringStore constructor called" #~ msgstr "Вызван конÑтруктор CachedStringStore" #, fuzzy #~ msgid "Failed to create parent directory, continuing anyway: %s" #~ msgstr "Ðе удалоÑÑŒ Ñоздать архивный каталог %s: %s" #, fuzzy #~ msgid "Failed to output the cert req as ascii format" #~ msgstr "Ðе удалоÑÑŒ запуÑтить Ñкрипт очиÑтки кÑша" #~ msgid "Not invoking janitor because it's not enabled in the config file" #~ msgstr "Janitor не будет запущен, Ñ‚.к. он не активирован в наÑтройках" #~ msgid "Janitor not enabled and job contains non-deployed RTEs" #~ msgstr "Janitor не запущен, а задача требует отÑутÑтвующую Ñреду иÑполнениÑ" #~ msgid "Janitor not installed and job contains non-deployed RTEs" #~ msgstr "" #~ "Janitor не уÑтановлен, а задача требует отÑутÑтвующую Ñреду иÑполнениÑ" #~ msgid "Janitor timeout while deploying Dynamic RTE(s)" #~ msgstr "" #~ "Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Janitor вышло при уÑтановке динамичеÑкой Ñреды иÑполнениÑ" #~ msgid "Janitor not enabled and there are missing RTE(s)" #~ msgstr "Janitor не запущен, а Ñреда иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð¾Ñ‚ÑутÑтвует" #~ msgid "Janitor failed to deploy Dynamic RTE(s)" #~ msgstr "Janitor не Ñмог уÑтановить динамичеÑкую Ñреду иÑполнениÑ" #~ msgid "" #~ "Janitor timeout while removing Dynamic RTE(s) associations (ignoring)" #~ msgstr "" #~ "Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Janitor вышло при удалении ÑвÑзей динамичеÑкой Ñреды " #~ "иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ (игнорируетÑÑ)" #~ msgid "Janitor failed to remove Dynamic RTE(s) associations (ignoring)" #~ msgstr "" #~ "Janitor не Ñмог удалить ÑвÑзи динамичеÑкой Ñреды иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ (игнорируетÑÑ)" #~ msgid "Janitor executable not found at %s" #~ msgstr "ИÑполнÑемый файл Janitor не найден в %s" #~ msgid "Can't run %s" #~ msgstr "Ðевозможно выполнить %s" #~ msgid "Can't start %s" #~ msgstr "Ðевозможно запуÑтить %s" #~ msgid "Stopping Master Thread." #~ msgstr "ОÑтанавливаетÑÑ Ð¾Ñновной поток" #~ msgid "Master Thread is deleting threads." #~ msgstr "Головной поток уничтожает потоки." #~ msgid "Master Thread stopped." #~ msgstr "ОÑновной поток оÑтановлен" #~ msgid "Thread %d, Pipes failed" #~ msgstr "Поток %d, Ñбой Ð¿ÐµÑ€ÐµÐ½Ð°Ð¿Ñ€Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ð¿Ð¾Ñ‚Ð¾ÐºÐ¾Ð²" #~ msgid "Thread %d, Fork failed" #~ msgstr "Поток %d, Ñбой почкованиÑ" #~ msgid "Thread %d, child is terminating." #~ msgstr "Поток %d, обрываетÑÑ Ð´Ð¾Ñ‡ÐµÑ€Ð½Ð¸Ð¹ поток" #~ msgid "Thread %d is ready." #~ msgstr "Поток %d готов" #~ msgid "Thread %d got Task %d." #~ msgstr "Поток %d получил задание %d." #~ msgid "Thread %d, Input is not SOAP" #~ msgstr "Поток %d, вход не в формате SOAP" #~ msgid "" #~ "Thread %d: Task %d Result:\n" #~ "%s\n" #~ msgstr "" #~ "Поток %d: Задание %d Результат:\n" #~ "%s\n" #~ msgid "Thread %d, TaskQueue returned empty Task." #~ msgstr "Поток %d, TaskQueue вернул пуÑтое задание." #~ msgid " Deconstructing Web Service" #~ msgstr "ЛиквидируетÑÑ Ð²ÐµÐ±-Ñлужба" #~ msgid " Flushing set and queue" #~ msgstr "СбраÑывютÑÑ Ð·Ð°Ð´Ð°Ð½Ð¸Ñ Ð¸ очередь" #~ msgid " Deconstructing is waiting for PerlProcessor" #~ msgstr "Ð›Ð¸ÐºÐ²Ð¸Ð´Ð°Ñ†Ð¸Ñ Ð¾Ð¶Ð¸Ð´Ð°ÐµÑ‚ PerlProcessor" #~ msgid " Deconstructing is waiting for TaskQueue" #~ msgstr "Ð›Ð¸ÐºÐ²Ð¸Ð´Ð°Ñ†Ð¸Ñ Ð¾Ð¶Ð¸Ð´Ð°ÐµÑ‚ TaskQueue" #~ msgid " Deconstructing is waiting for TaskSet" #~ msgstr "Ð›Ð¸ÐºÐ²Ð¸Ð´Ð°Ñ†Ð¸Ñ Ð¾Ð¶Ð¸Ð´Ð°ÐµÑ‚ TaskSet" #~ msgid " Deconstructing Web Service ... done" #~ msgstr "ЛиквидируетÑÑ Ð²ÐµÐ±-Ñлужба ... готово" #~ msgid "Creating fault! Reason: \"%s\"" #~ msgstr "СоздаётÑÑ Ð¾Ñ‚Ñ‡Ñ‘Ñ‚ о Ñбое! Причина: \"%s\"" #~ msgid "DREWEBSERVICE 1 %d" #~ msgstr "DREWEBSERVICE 1 %d" #~ msgid "DREWEBSERVICE 2 %d" #~ msgstr "DREWEBSERVICE 2 %d" #~ msgid "TaskSet is waiting for objects (%d) still using the set." #~ msgstr "TaskSet ожидает объекты (%d) вÑÑ‘ ещё иÑпользующие поÑтановку." #~ msgid "Added Task %d to the set. " #~ msgstr "Задание %d добавлено в группу. " #~ msgid "Removed Task %d out of to the set. " #~ msgstr "Задание %d удалено из поÑтановки. " #~ msgid "TaskSet is waiting for objects still using the set." #~ msgstr "TaskSet ожидает объекты вÑÑ‘ ещё иÑпользующие поÑтановку." #~ msgid "Pushed Task %d into the queue. " #~ msgstr "Задание %d переведено в очередь. " #~ msgid "Shifted Task %d out of to the queue. " #~ msgstr "Задание %d передвинуто в очередь. " #~ msgid "Chunk %u: %u - %u" #~ msgstr "Фрагмент %u: %u - %u" #~ msgid "Hopi SlaveMode is active, PUT is only allowed to existing files" #~ msgstr "" #~ "Ðктивирован подчинённый режим Хопи, Ð¾Ð¿ÐµÑ€Ð°Ñ†Ð¸Ñ PUT разрешена только Ð´Ð»Ñ " #~ "ÑущеÑтвующих файлов" #~ msgid "Removing complete file in slave mode" #~ msgstr "УдалÑетÑÑ Ð²ÐµÑÑŒ файл в подчинённом режиме" #~ msgid "Hopi Initialized" #~ msgstr "Hopi запущен" #~ msgid "Hopi DocumentRoot is " #~ msgstr "Hopi DocumentRoot:" #~ msgid "Hopi SlaveMode is on!" #~ msgstr "Включён режим Hopi SlaveMode!" #~ msgid "Hopi shutdown" #~ msgstr "Hopi оÑтанавливаетÑÑ" #~ msgid "PUT called" #~ msgstr "Вызван метод PUT" #~ msgid "File size is %u" #~ msgstr "Размер файла: %u" #~ msgid "error reading from HTTP stream" #~ msgstr "Ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¿Ð¾Ñ‚Ð¾ÐºÐ° HTTP" #~ msgid "error on write" #~ msgstr "ошибка запиÑи" #~ msgid "Input for PUT operation is neither stream nor buffer" #~ msgstr "Вход операции PUT не ÑвлÑетÑÑ Ð½Ð¸ потоком, ни буфером" #~ msgid "method=%s, path=%s, url=%s, base=%s" #~ msgstr "метод=%s, путь=%s, URL-адреÑ=%s, база=%s" #~ msgid "No content provided for PUT operation" #~ msgstr "Ðе указано Ñодержимое Ð´Ð»Ñ Ð¾Ð¿ÐµÑ€Ð°Ñ†Ð¸Ð¸ PUT" #~ msgid "Not supported operation" #~ msgstr "ÐžÐ¿ÐµÑ€Ð°Ñ†Ð¸Ñ Ð½Ðµ поддерживаетÑÑ" #~ msgid "request node is empty" #~ msgstr "ПуÑтой узел запроÑа" #~ msgid "Evaluator is not initialized" #~ msgstr "Обработчик не запущен" #~ msgid "Policy(ies) modified - reloading evaluator" #~ msgstr "Ðорматив(Ñ‹) изменен(Ñ‹) - перезагрузка анализатора" #~ msgid "NULL response" #~ msgstr "Ответ NULL" #~ msgid "Authorized from Charon service" #~ msgstr "Допущен Ñлужбой Charon" #~ msgid "" #~ "Not authorized from Charon service; Some of the RequestItem does not " #~ "satisfy Policy" #~ msgstr "" #~ "Ðе допущен Ñлужбой Charon; некоторые пункты RequestItem не удовлетворÑÑŽÑ‚ " #~ "нормативам" #~ msgid "process: %s: not supported" #~ msgstr "процеÑÑ: %s: не поддерживаетÑÑ" #~ msgid "Evaluator: %s" #~ msgstr "Обработчик: %s" #~ msgid "Policy location: %s" #~ msgstr "РаÑположение правил доÑтупа: %s" #~ msgid "Loading policy from %s" #~ msgstr "Загрузка правил из %s" #~ msgid "Failed loading policy from %s" #~ msgstr "Сбой загрузки правил из %s" #~ msgid "Checking policy modification: %s" #~ msgstr "Проверка изменений в правилах: %s" #~ msgid "Policy removed: %s" #~ msgstr "Правила удалены: %s" #~ msgid "Old policy times: %u/%u" #~ msgstr "Ð’Ñ€ÐµÐ¼Ñ Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ/ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñтарых правил: %u/%u" #~ msgid "New policy times: %u/%u" #~ msgstr "Ð’Ñ€ÐµÐ¼Ñ Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ/ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð½Ð¾Ð²Ñ‹Ñ… правил: %u/%u" #~ msgid "Policy Decision Request failed" #~ msgstr "Ошибка запроÑа Ñ€ÐµÑˆÐµÐ½Ð¸Ñ Ð¾ доÑтупе" #~ msgid "Policy Decision Request succeeded!!!" #~ msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð¾ принÑтии Ñ€ÐµÑˆÐµÐ½Ð¸Ñ Ñоздан!!!" #~ msgid "ES:CreateActivities: Failed to create new job: %s" #~ msgstr "ES: CreateActivity: Ðе удалоÑÑŒ Ñоздать новую задачу: %s" #~ msgid "Not all jobs are cleaned yet" #~ msgstr "Ещё не вÑе задачи вычищены" #~ msgid "Trying again" #~ msgstr "ÐÐ¾Ð²Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ°" #~ msgid "Jobs cleaned" #~ msgstr "задач очищено" #~ msgid "Preparing directories" #~ msgstr "Подготовка каталогов" #~ msgid "Empty URL list add to the thread." #~ msgstr "ПуÑтой ÑпиÑок URL добавлен к потоку." #~ msgid "Empty message add to the thread." #~ msgstr "Ð’ поток добавлено пуÑтое Ñообщение." #~ msgid "Status (%s): Failed" #~ msgstr "СоÑтоÑние (%s): Сбой" #~ msgid "Status (%s): OK" #~ msgstr "СоÑтоÑние (%s): УÑпех" #~ msgid "Empty message won't be send to the neighbors." #~ msgstr "ПуÑтое Ñообщение не будет разоÑлано ÑоÑедÑм." #~ msgid "%s: %d seconds to the next database cleaning." #~ msgstr "%s: %d Ñекунд(Ñ‹) до Ñледующей очиÑтки базы данных." #~ msgid "Parsing configuration parameters" #~ msgstr "Обработка параметров наÑтройки" #~ msgid "" #~ "The Endpoint element is defined multiple time in ISIS configuration. The " #~ "'%s' value will be used." #~ msgstr "" #~ "Элемент Endpoint задан неÑколько раз в наÑтройках ISIS. Будет " #~ "иÑпользовано значение '%s'." #~ msgid "Empty endpoint element in the configuration!" #~ msgstr "ПуÑтой Ñлемент endpoint в наÑтройках!" #~ msgid "KeyPath: %s" #~ msgstr "KeyPath: %s" #~ msgid "CertificatePath: %s" #~ msgstr "CertificatePath: %s" #~ msgid "CACertificatesDir: %s" #~ msgstr "CACertificatesDir: %s" #~ msgid "CACertficatePath: %s" #~ msgstr "CACertficatePath: %s" #~ msgid "Missing or empty KeyPath element in the configuration!" #~ msgstr "ПуÑтой или отÑутÑтвующий Ñлемент KeyPath в наÑтройках!" #~ msgid "Misisng or empty CertificatePath element in the configuration!" #~ msgstr "ПуÑтой или отÑутÑтвующий Ñлемент CertificatePath в наÑтройках!" #~ msgid "Missing or empty ProxyPath element in the configuration!" #~ msgstr "ПуÑтой или отÑутÑтвующий Ñлемент ProxyPath в наÑтройках!" #~ msgid "Missing or empty CACertificatesDir element in the configuration!" #~ msgstr "ПуÑтой или отÑутÑтвующий Ñлемент CACertificatesDir в наÑтройках!" #~ msgid "Missing or empty CACertificatePath element in the configuration!" #~ msgstr "ПуÑтой или отÑутÑтвующий Ñлемент CACertificatePath в наÑтройках!" #~ msgid "" #~ "Configuration error. Retry: \"%d\" is not a valid value. Default value " #~ "will be used." #~ msgstr "" #~ "Ошибйка наÑтроек. Retry: \"%d\" не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым значением. Будет " #~ "иÑпользовано значение по умолчанию." #~ msgid "" #~ "The Retry element is defined multiple time in ISIS configuration. The " #~ "'%d' value will be used." #~ msgstr "" #~ "Элемент Retry задан неÑколько раз в наÑтройках ISIS. Будет иÑпользовано " #~ "значение '%d'." #~ msgid "" #~ "Configuration error. Sparsity: \"%d\" is not a valid value. Default value " #~ "will be used." #~ msgstr "" #~ "Ошибйка наÑтроек. Sparsity: \"%d\" не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым значением. " #~ "Будет иÑпользовано значение по умолчанию." #~ msgid "" #~ "The Sparsity element is defined multiple time in ISIS configuration. The " #~ "'%d' value will be used." #~ msgstr "" #~ "Элемент Sparsity задан неÑколько раз в наÑтройках ISIS. Будет " #~ "иÑпользовано значение '%d'." #~ msgid "Sparsity: %d" #~ msgstr "Sparsity: %d" #~ msgid "" #~ "Configuration error. ETValid: \"%s\" is not a valid value. Default value " #~ "will be used." #~ msgstr "" #~ "Ошибйка наÑтроек. ETValid: \"%s\" не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым значением. Будет " #~ "иÑпользовано значение по умолчанию." #~ msgid "Configuration error. ETValid is empty. Default value will be used." #~ msgstr "" #~ "Ошибйка наÑтроек. Значение ETValid не задано. Будет иÑпользовано значение " #~ "по умолчанию." #~ msgid "ETValid: %d seconds" #~ msgstr "ETValid: %d Ñекунд" #~ msgid "" #~ "Configuration error. ETRemove: \"%s\" is not a valid value. Default value " #~ "will be used." #~ msgstr "" #~ "Ошибйка наÑтроек. ETRemove: \"%s\" не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым значением. " #~ "Будет иÑпользовано значение по умолчанию." #~ msgid "Configuration error. ETRemove is empty. Default value will be used." #~ msgstr "" #~ "Ошибйка наÑтроек. Значение ETRemove не задано. Будет иÑпользовано " #~ "значение по умолчанию." #~ msgid "ETRemove: %d seconds" #~ msgstr "ETRemove: %d Ñекунд" #~ msgid "Invalid database path definition" #~ msgstr "Ðеверное определение пути к базе данных" #~ msgid "The InfoProvider element in ISIS configuration is empty." #~ msgstr "Элемент InfoProvider в наÑтройках ISIS пуÑÑ‚." #~ msgid "RemoveRegistrations message sent to neighbors." #~ msgstr "Сообщение RemoveRegistrations разоÑлано ÑоÑедÑм." #~ msgid "ISIS (%s) has %d more thread%s" #~ msgstr "Ð’ ISIS (%s) ещё %d поток%s" #~ msgid "ISIS (%s) destroyed." #~ msgstr "ISIS (%s) ликвидирован." #~ msgid "Query received: %s" #~ msgstr "Получен запроÑ: %s" #~ msgid "Register received: ID=%s; EPR=%s; MsgGenTime=%s" #~ msgstr "Получена региÑтрациÑ: ID=%s; EPR=%s; MsgGenTime=%s" #~ msgid "RemoveRegistrations received: ID=%s" #~ msgstr "Получен Ð·Ð°Ð¿Ñ€Ð¾Ñ RemoveRegistrations: ID=%s" #~ msgid "GetISISList received" #~ msgstr "Получен Ð·Ð°Ð¿Ñ€Ð¾Ñ GetISISList" #~ msgid "Connect received" #~ msgstr "Получен Ð·Ð°Ð¿Ñ€Ð¾Ñ Connect" #~ msgid "Communication error: input is not SOAP" #~ msgstr "Сбой передачи данных: ввод не в формате SOAP" #~ msgid "Neighbors count recalculate from %d to %d (at ISIS %s)" #~ msgstr "КоличеÑтво ÑоÑедей переÑчитано Ñ %d на %d (Ð´Ð»Ñ ISIS %s)" #~ msgid "Query failed at %s, choosing new InfoProvider." #~ msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ðº %s не удалÑÑ, выбираем новый InfoProvider." #~ msgid "Remove ISIS (%s) from the list of InfoProviders." #~ msgstr "Удаление ISIS (%s) из ÑпиÑка InfoProviders." #~ msgid "No InfoProvider is available." #~ msgstr "Ðет доÑтупных InfoProvider." #~ msgid "Neighbors count: %d" #~ msgstr "КоличеÑтво ÑоÑедей: %d" #~ msgid "Connect status (%s): Failed" #~ msgstr "СоÑтоÑние ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ (%s): Сбой" #~ msgid "Connect status (%s): OK" #~ msgstr "СоÑтоÑние ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ (%s): УÑпех" #~ msgid "Database mass updated." #~ msgstr "База данных маÑÑово обновлена." #~ msgid "Error converting maxload parameter %s to integer" #~ msgstr "Ошибка Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð° maxload %s в целое" #~ msgid "Setting max downloads to %u" #~ msgstr "МакÑимальное чиÑло загрузок уÑтанавливаетÑÑ Ð½Ð° %u" #~ msgid "Failed writing file with inputs" #~ msgstr "Ðе удалоÑÑŒ запиÑать файл Ñ Ð²Ñ…Ð¾Ð´Ð½Ñ‹Ð¼Ð¸ ÑÑылками" #~ msgid "Starting child downloader process" #~ msgstr "ЗапуÑк дочернего процеÑÑа загрузчика" #~ msgid "Failed to run downloader process for job id %s" #~ msgstr "Сбой запуÑка процеÑÑа загрузчика Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s" #~ msgid "%s: child is running" #~ msgstr "%s: дочерний процеÑÑ Ð·Ð°Ð¿ÑƒÑ‰ÐµÐ½" #~ msgid "Download process for job %s timed out" #~ msgstr "ПроцеÑÑ Ð·Ð°Ð³Ñ€ÑƒÐ·ÐºÐ¸ Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s превыÑил Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ" #~ msgid "Downloader exited with code: %i" #~ msgstr "Загрузчик завершил работу Ñ ÐºÐ¾Ð´Ð¾Ð¼ выхода: %i" #~ msgid "TargetRetriver%s initialized with %s service url: %s" #~ msgstr "TargetRetriver%s запущен Ñ URL Ñлужбы %s: %s" #~ msgid "" #~ "Trying to migrate to %s: Migration to a CREAM resource is not supported." #~ msgstr "Попытка миграции на %s: ÐœÐ¸Ð³Ñ€Ð°Ñ†Ð¸Ñ Ð½Ð° реÑÑƒÑ€Ñ CREAM не поддерживаетÑÑ." #~ msgid "Failed dowloading %s to %s" #~ msgstr "Ðе удалоÑÑŒ загрузить %s в %s" #~ msgid "Migration for EMI ES is not implemented" #~ msgstr "ÐœÐ¸Ð³Ñ€Ð°Ñ†Ð¸Ñ Ð´Ð»Ñ EMI ES не реализована" #~ msgid "Collecting Job (%s jobs) information." #~ msgstr "СобираетÑÑ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ задачах (%s задач)" #~ msgid "%s directory exist! This job downloaded previously." #~ msgstr "Каталог %s уже ÑущеÑтвует! Эта задача была загружена ранее." #~ msgid "Cancel of EMI ES jobs is not supported" #~ msgstr "Прерывание задач EMI ES не поддерживаетÑÑ" #~ msgid "" #~ "Trying to migrate to %s: Migration to a legacy ARC resource is not " #~ "supported." #~ msgstr "" #~ "Попытка миграции на %s: ÐœÐ¸Ð³Ñ€Ð°Ñ†Ð¸Ñ Ð½Ð° Ñтарый реÑÑƒÑ€Ñ ARC не поддерживаетÑÑ." #~ msgid "" #~ "Missing reference to factory and/or module. It is unsafe to use Globus in " #~ "non-persistent mode - TargetRetriver for ARC0 is disabled. Report to " #~ "developers." #~ msgstr "" #~ "ОтÑутÑтвует указание на фабрику и/или модуль. ИÑпользование Globus в " #~ "неопределённом режиме небезопаÑно - TargetRetriever Ð´Ð»Ñ ARC0 " #~ "заблокирован. СвÑжитеÑÑŒ Ñ Ñ€Ð°Ð·Ñ€Ð°Ð±Ð¾Ñ‚Ñ‡Ð¸ÐºÐ°Ð¼Ð¸." #~ msgid "" #~ "Trying to migrate to %s: Migration to a UNICORE resource is not supported." #~ msgstr "" #~ "Попытка миграции на %s: ÐœÐ¸Ð³Ñ€Ð°Ñ†Ð¸Ñ Ð½Ð° реÑÑƒÑ€Ñ UNICORE не поддерживаетÑÑ." #~ msgid "Collecting ExecutionTarget (A-REX/BES) information." #~ msgstr "Сбор информации об ExecutionTarget (A-REX/BES)." #~ msgid "Generating BES target: %s" #~ msgstr "СоздаётÑÑ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ BES: %s" #~ msgid "" #~ "Multiple execution environments per queue specified for target: \"%s\". " #~ "Execution environment information will be ignored." #~ msgstr "" #~ "Ð”Ð»Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ \"%s\" указаны множеÑтвенные рабочие Ñреды очередей. " #~ "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ рабочих Ñредах игнорируетÑÑ." #~ msgid "ComputingShare is associated with the ExecutionEnvironment \"%s\"" #~ msgstr "ComputingShare аÑÑоциирована Ñ ExecutionEnvironment \"%s\"" #~ msgid "ExecutionEnvironment \"%s\" located" #~ msgstr "Обнаружена ExecutionEnvironment \"%s\"" #~ msgid "Getting BES jobs is not supported" #~ msgstr "Извлечение задач BES не поддерживаетÑÑ" #~ msgid "targets.size() = %d" #~ msgstr "targets.size() = %d" #~ msgid "Wrong middleware type: %s" #~ msgstr "ÐедопуÑтимый тип подпрограммного обеÑпечению: %s" #~ msgid "Found %u %s execution services from the index service at %s" #~ msgstr "" #~ "Обнаружено %u вычиÑлительных ÑервиÑов %s через ÑÐµÑ€Ð²Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð° на %s" #~ msgid "" #~ "Matching against job description,following targets possible for " #~ "BenchmarkBroker: %d" #~ msgstr "" #~ "Сравнение Ñ Ð¾Ð¿Ð¸Ñанием задачи; Ñледующие Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ñ€Ð°ÑÑматриваютÑÑ Ð² " #~ "алгоритме BenchmarkBroker: %d" #~ msgid "%d. Resource: %s; Queue: %s" #~ msgstr "%d. РеÑурÑ: %s; Очередь: %s" #~ msgid "Resource will be ranked according to the %s benchmark scenario" #~ msgstr "Ðазначение будет упорÑдочено в ÑоответÑтвии Ñ Ñталонным теÑтом %s" #~ msgid "Best targets are: %d" #~ msgstr "Ðаилучшие цели: %d" #~ msgid "FastestQueueBroker is filtering %d targets" #~ msgstr "FastestQueueBroker перебирает %d назначений" #~ msgid "FastestQueueBroker will rank the following %d targets" #~ msgstr "FastestQueueBroker упорÑдочивает Ñледующие %d назначений" #~ msgid "" #~ "Matching against job description, following targets possible for " #~ "DataBroker: %d" #~ msgstr "" #~ "Сравнение Ñ Ð¾Ð¿Ð¸Ñанием задачи; Ñледующие Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ñ€Ð°ÑÑматриваютÑÑ Ð² " #~ "алгоритме DataBroker: %d" #~ msgid "" #~ "Matching against job description, following targets possible for " #~ "RandomBroker: %d" #~ msgstr "" #~ "Сравнение Ñ Ð¾Ð¿Ð¸Ñанием задачи; Ñледующие Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ñ€Ð°ÑÑматриваютÑÑ Ð² " #~ "алгоритме RandomBroker: %d" #~ msgid "Cannot create Python list" #~ msgstr "Ðевозможно Ñоздать ÑпиÑок Python" #~ msgid "Private key of the credential object is NULL" #~ msgstr "Закрытый ключ объекта параметров доÑтупа имеет значение NULL" #~ msgid "Unable to get job (%s), job is deleted" #~ msgstr "Ðевозможно извлечь задачу (%s), задача удалена" #~ msgid "Unable to get job (%s), it has not finished yet" #~ msgstr "Ðевозможно извлечь задачу (%s), она ещё не завершилаÑÑŒ" #~ msgid "Unable to renew job (%s), job already finished" #~ msgstr "Ðевозможно возобновить задачу (%s), она уже завершилаÑÑŒ" #~ msgid "Unable to resume job (%s), job is %s and cannot be resumed" #~ msgstr "" #~ "Ðевозможно продолжить задачу (%s), задача в ÑоÑтоÑнии %s не может быть " #~ "продолжена" #~ msgid "" #~ "Unable to resubmit job (%s), job description could not be retrieved " #~ "remotely" #~ msgstr "" #~ "Ðевозможно перезапуÑтить задачу (%s), опиÑание задачи не может быть " #~ "извлечено Ñ ÑƒÐ´Ð°Ð»Ñ‘Ð½Ð½Ð¾Ð³Ð¾ иÑточника" #~ msgid "Unable to resubmit job (%s), local input file (%s) has changed" #~ msgstr "" #~ "Ðевозможно перезапуÑтить задачу (%s), локальный входной файл (%s) " #~ "изменилÑÑ" #~ msgid "Unable to kill job (%s), job is deleted" #~ msgstr "Ðевозможно прервать задачу (%s), задача удалена" #~ msgid "Unable to kill job (%s), job has already finished" #~ msgstr "Ðевозможно прервать задачу (%s), она уже завершилаÑÑŒ" #~ msgid "Unable to clean job (%s), job has not finished yet" #~ msgstr "Ðевозможно вычиÑтить задачу (%s), она ещё не завершилаÑÑŒ" #~ msgid "Target (%s) was explicitly rejected." #~ msgstr "Цель (%s) Ñвно отклонена." #~ msgid "Possible targets after prefiltering: %d" #~ msgstr "Возможные Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð¿Ð¾Ñле предварительного отбора: %d" #~ msgid "Health State: %s" #~ msgstr "СоÑтоÑние здоровьÑ: %s" #~ msgid "Target sorting not done, sorting them now" #~ msgstr "ÐÐ°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð½Ðµ упорÑдочены, ведётÑÑ Ñортировка" #~ msgid "For this middleware there are no testjobs defined." #~ msgstr "Ð”Ð»Ñ Ñтого Грид-ПО пробных задач пока что нет" #~ msgid "For this middleware only %s testjobs are defined." #~ msgstr "Ð”Ð»Ñ Ñтого Грид-ПО ÑущеÑтвуют только Ñледующие теÑтовые задачи: %s" #~ msgid "FreeSlots = %d; UsedSlots = %d; WaitingJobs = %d" #~ msgstr "Свободных меÑÑ‚ = %d; занÑтых меÑÑ‚ = %d; задач в очереди = %d" #~ msgid "Generating computing target: %s" #~ msgstr "СоздаётÑÑ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ Ð´Ð»Ñ Ð²Ñ‹Ñ‡Ð¸ÑлениÑ: %s" #~ msgid "" #~ "Multiple execution environments per queue specified for target. Execution " #~ "environment information will be ignored." #~ msgstr "" #~ "Ð”Ð»Ñ Ñ†ÐµÐ»Ð¸ указаны множеÑтвенные рабочие Ñреды очередей. Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ " #~ "рабочих Ñредах игнорируетÑÑ." #~ msgid "Found %ld targets" #~ msgstr "Обнаружено %ld назначений" #~ msgid "Resource: %s" #~ msgstr "РеÑурÑ: %s" #~ msgid "Found %ld jobs" #~ msgstr "Обнаружено %ld задач" #~ msgid " URL: %s:%s" #~ msgstr " URL: %s:%s" #~ msgid "TargetRetriever plugin \"%s\" not found." #~ msgstr "Подключаемый модуль TargetRetriever \"%s\" не обнаружен" #~ msgid "TargetRetriever %s could not be created." #~ msgstr "TargetRetriever %s не может быть Ñоздан" #~ msgid "Loaded TargetRetriever %s" #~ msgstr "Подгружен TargetRetriever %s" #~ msgid "Overwriting already defined alias \"%s\"" #~ msgstr "ПереопределÑетÑÑ ÑƒÐ¶Ðµ заданный пÑевдоним \"%s\"" #~ msgid "Could not resolve alias \"%s\" it is not defined." #~ msgstr "Ðе удалоÑÑŒ разобрать Ñокращённое название \"%s\" Ñ‚.к. оно не задано" #~ msgid "" #~ "The defaultservices attribute value contains a wrongly formated element " #~ "(%s) in configuration file (%s)" #~ msgstr "" #~ "Ðтрибут defaultservices Ñодержит неверно Ñформулированный Ñлемент (%s) в " #~ "файле наÑтроек (%s)" #~ msgid "" #~ "The defaultservices attribute value contains an unknown servicetype %s at " #~ "%s in configuration file (%s)" #~ msgstr "" #~ "Значение атрибута defaultservices Ñодержит неизвеÑтный тип ÑервиÑа %s в " #~ "%s в файле наÑтроек (%s)" #~ msgid "Adding selected service %s:%s" #~ msgstr "ДобавлÑетÑÑ Ð²Ñ‹Ð±Ñ€Ð°Ð½Ð½Ñ‹Ð¹ ÑÐµÑ€Ð²Ð¸Ñ %s:%s" #~ msgid "" #~ "The rejectservices attribute value contains a wrongly formated element " #~ "(%s) in configuration file (%s)" #~ msgstr "" #~ "Ðтрибут rejectservices Ñодержит неверно Ñформулированный Ñлемент (%s) в " #~ "файле наÑтроек (%s)" #~ msgid "" #~ "The rejectservices attribute value contains an unknown servicetype %s at " #~ "%s in configuration file (%s)" #~ msgstr "" #~ "Значение атрибута rejectservices Ñодержит неизвеÑтный тип ÑервиÑа %s в %s " #~ "в файле наÑтроек (%s)" #~ msgid "Adding rejected service %s:%s" #~ msgstr "БлокируетÑÑ ÑÐµÑ€Ð²Ð¸Ñ %s:%s" #~ msgid "rejected" #~ msgstr "отклонён" #~ msgid "Cannot resolve alias \"%s\". Loop detected: %s" #~ msgstr "" #~ "Ðевозможно разобратьÑокращённое названив \"%s\". Обнаружена цикличеÑÐºÐ°Ñ " #~ "завиÑимоÑть: %s" #, fuzzy #~ msgid "Cannot resolve alias %s, it is not defined" #~ msgstr "Ðе удалоÑÑŒ разобрать Ñокращённое название \"%s\" Ñ‚.к. оно не задано" #~ msgid "Alias name (%s) contains a unknown servicetype %s at %s" #~ msgstr "ПÑевдоним (%s) Ñодержит неизвеÑтный тип ÑервиÑа %s в %s" #, fuzzy #~ msgid "Adding service %s:%s from resolved alias %s" #~ msgstr "Ошибка при добавлении Ñлужбы. %s" #~ msgid "Alias (%s) contains a wrongly formatted element (%s)" #~ msgstr "ПÑевдоним (%s) Ñодержит неверно оформленый Ñлемент (%s)" #, fuzzy #~ msgid "DTR %s: Re-resolving destination replicas" #~ msgstr "Локальный &каталог назначениÑ:" #~ msgid "The testjob ID should be 1, 2 or 3.\n" #~ msgstr "Ðомер теÑтовой задачи может быть 1, 2 или 3.\n" #~ msgid "Unable to print job description: No target found." #~ msgstr "Ðевозможно вывеÑти опиÑание задачи: Ðе найдено ни одного назначениÑ" #~ msgid "" #~ "Cannot find any proxy. arcresub currently cannot run without a proxy.\n" #~ " If you have the proxy file in a non-default location,\n" #~ " please make sure the path is specified in the client configuration " #~ "file.\n" #~ " If you don't have a proxy yet, please run 'arcproxy'!" #~ msgstr "" #~ "Ðе удалоÑÑŒ обнаружить доверенноÑть. Ð’ Ñтой верÑии arcresub не работает " #~ "без доверенноÑти.\n" #~ " ЕÑли Ваша доверенноÑть хранитÑÑ Ð² неÑтандартном меÑте, пожалуйÑта,\n" #~ " убедитеÑÑŒ, что в наÑтройках клиента указан правильный путь.\n" #~ " ЕÑли же Ð’Ñ‹ пока не Ñоздали доверенноÑть, запуÑтите 'arcproxy'!" #~ msgid "explicitly select or reject a specific resource" #~ msgstr "Ñвным образом выбрать или отÑеÑть указанный реÑурÑ" #~ msgid "explicitly select or reject a specific resource for new jobs" #~ msgstr "Ñвным образом выбрать или отÑеÑть указанный реÑÑƒÑ€Ñ Ð´Ð»Ñ Ð½Ð¾Ð²Ñ‹Ñ… задач" #~ msgid "explicitly select or reject an index server" #~ msgstr "Ñвным образом выбрать или отÑеÑть каталог реÑурÑов" #~ msgid "Unable to find JobController for job %s (plugin type: %s)" #~ msgstr "" #~ "Ðевозможно обнаружить модуль JobController Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s (тип " #~ "подключаемого модулÑ: %s)" #~ msgid "No jobs selected for cleaning" #~ msgstr "Ðе выбраны задачи Ð´Ð»Ñ Ð²Ñ‹Ñ‡Ð¸Ñ‰ÐµÐ½Ð¸Ñ" #~ msgid "No jobs selected for migration" #~ msgstr "Ðе выбраны задачи Ð´Ð»Ñ Ð¿ÐµÑ€ÐµÐ·Ð°Ñылки" #~ msgid "No queuing jobs to migrate" #~ msgstr "Ðет задач в очереди Ð´Ð»Ñ Ð¿ÐµÑ€ÐµÐ·Ð°Ñылки" #~ msgid "No jobs selected for resubmission" #~ msgstr "Ðе выбраны задачи Ð´Ð»Ñ Ð¿ÐµÑ€ÐµÐ·Ð°Ð¿ÑƒÑка" #~ msgid "service_url request_file" #~ msgstr "service_url request_file" #~ msgid "url of the policy decision service" #~ msgstr "URL Ñлужбы принÑÑ‚Ð¸Ñ Ñ€ÐµÑˆÐµÐ½Ð¸Ð¹" #~ msgid "path to request file" #~ msgstr "путь к файлу Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñом" #~ msgid "use SAML 2.0 profile of XACML v2.0 to contact the service" #~ msgstr "Ð´Ð»Ñ ÑвÑзи Ñо Ñлужбой иÑпользуетÑÑ Ð¿Ñ€Ð¾Ñ„Ð¸Ð»ÑŒ SAML 2.0 XACML v2.0" #~ msgid "path to the certificate file" #~ msgstr "путь к файлу Ñертификата" #~ msgid "path to the private key file" #~ msgstr "путь к файлу Ñекретного ключа" #~ msgid "" #~ "Cannot find the path of the certificate/key file, and proxy file, please " #~ "setup environment X509_USER_CERT/X509_USER_KEY, or X509_USER_PROXY,or " #~ "setup certificatepath/keypath, or proxypath in a configuration file" #~ msgstr "" #~ "Ðе удалоÑÑŒ найти путь к открытому/закрытому ключу и доверенноÑти. " #~ "ПожалуйÑта, задайте переменную Ñреды X509_USER_CERT/X509_USER_KEY, или " #~ "X509_USER_PROXY, или значение certificatepath/keypath, или proxypath в " #~ "файле наÑтроек" #~ msgid "" #~ "CA certificate directory: %s is given by X509_CERT_DIR, but it can't been " #~ "accessed." #~ msgstr "" #~ "Каталог Ñертификатов агентÑтв CA %s задан X509_CERT_DIR, но не может быть " #~ "прочитан." #~ msgid "" #~ "The start time that you set plus validityPeriod: %s is before current " #~ "time: %s.\n" #~ "Please set the time constraints once again." #~ msgstr "" #~ "Указанное Ð²Ñ€ÐµÐ¼Ñ Ð½Ð°Ñ‡Ð°Ð»Ð° Ñ Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸ÐµÐ¼ Ñрока годноÑти %s предшеÑтвует " #~ "текущему времени: %s.\n" #~ "ПожалуйÑта, задайте Ñроки Ñнова." #~ msgid "" #~ "The start time that you set plus validityPeriod: %s is after current " #~ "time: %s.\n" #~ "The validityPeriod will be shorten to %s." #~ msgstr "" #~ "Указанное Ð²Ñ€ÐµÐ¼Ñ Ð½Ð°Ñ‡Ð°Ð»Ð° Ñ Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸ÐµÐ¼ Ñрока годноÑти %s позже текущего " #~ "времени: %s.\n" #~ "Срок годноÑти будет Ñокращён до %s." #~ msgid "" #~ "The start time that you set: %s is before current time: %s.\n" #~ "The current time will be used as start time." #~ msgstr "" #~ "Указанное Ð²Ñ€ÐµÐ¼Ñ Ð½Ð°Ñ‡Ð°Ð»Ð° %s предшеÑтвует текущему времени: %s.\n" #~ "Текущее Ð²Ñ€ÐµÐ¼Ñ Ð±ÑƒÐ´ÐµÑ‚ иÑпользовано в качеÑтве начального." #~ msgid "" #~ "The end time that you set: %s is after the start time plus " #~ "validityPeriod: %s.\n" #~ " The validityPeriod will not be changed.\n" #~ msgstr "" #~ "Указанное Ð²Ñ€ÐµÐ¼Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ %s позже времени начала Ñ Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸ÐµÐ¼ Ñрока " #~ "годноÑти: %s.\n" #~ "Срок годноÑти не будет изменён.\n" #~ msgid "" #~ "The end time that you set: %s is before the start time plus " #~ "validityPeriod: %s.\n" #~ "The validityPeriod will be shorten to: %s." #~ msgstr "" #~ "Указанное Ð²Ñ€ÐµÐ¼Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ %s предшеÑтвует времени начала Ñ Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸ÐµÐ¼ " #~ "Ñрока годноÑти: %s.\n" #~ "Срок годноÑти будет Ñокращён до: %s." #~ msgid "" #~ "The end time that you set: %s is before start time: %s.\n" #~ "Please set the time constraints once again.\n" #~ msgstr "" #~ "Указанное Ð²Ñ€ÐµÐ¼Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ %s предшеÑтвует времени начала: %s.\n" #~ "ПожалуйÑта, задайте Ñроки Ñнова.\n" #~ msgid " Service_ID's number is not equivalent with the EPR's number!" #~ msgstr "Ðомер Service_ID отличен от номера в EPR!!" #~ msgid "[ISIS testing ...]" #~ msgstr "[теÑтирование ISIS ...]" #~ msgid "This tiny tool can be used for testing the ISIS's abilities." #~ msgstr "" #~ "Эта ÑÐºÑ€Ð¾Ð¼Ð½Ð°Ñ ÑƒÑ‚Ð¸Ð»Ð¸Ñ‚Ð° может быть иÑпользована Ð´Ð»Ñ Ñ‚ÐµÑÑ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ " #~ "возможноÑтей ISIS" #~ msgid "The method are the folows: Query, Register, RemoveRegistration" #~ msgstr "Следующие методы доÑтупны: Query, Register, RemoveRegistration" #~ msgid "define the URL of the Bootstrap ISIS" #~ msgstr "задать URL начального ISIS" #~ msgid "isis" #~ msgstr "ISIS" #~ msgid "define the URL of the ISIS to connect directly" #~ msgstr "задать URL Ñервера ISIS Ð´Ð»Ñ Ð¿Ñ€Ñмого доÑтупа" #~ msgid "define which method are use (Query, Register, RemoveRegistration)" #~ msgstr "задать иÑпользуемый метод (Query, Register, RemoveRegistration)" #~ msgid "method" #~ msgstr "метод" #~ msgid "get neighbors list from the BootstrapISIS" #~ msgstr "получить ÑпиÑок ÑоÑедей Ñ Ð½Ð°Ñ‡Ð°Ð»ÑŒÐ½Ð¾Ð³Ð¾ ISIS" #~ msgid " ISIS tester start!" #~ msgstr " ЗапуÑк теÑтера ISIS!" #~ msgid " Not enough or too much parameters! %s" #~ msgstr "ÐедоÑтаток или избыток параметров! %s" #~ msgid "ByteIOBackend datadir:" #~ msgstr "Ð”Ð¸Ñ€ÐµÐºÑ‚Ð¾Ñ€Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ… ByteIOBackend:" #~ msgid "ByteIOBackend transferdir:" #~ msgstr "Каталог передачи ByteIOBackend:" #~ msgid "ByteIOService transfer dir:" #~ msgstr "Ð”Ð¸Ñ€ÐµÐºÑ‚Ð¾Ñ€Ð¸Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡ ByteIOService:" #~ msgid "Subject:" #~ msgstr "Тема:" #~ msgid "checking" #~ msgstr "проверÑетÑÑ" #~ msgid "HopiBackend datadir:" #~ msgstr "Ð”Ð¸Ñ€ÐµÐºÑ‚Ð¾Ñ€Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ… HopiBackend:" #~ msgid "HopiBackend transferdir:" #~ msgstr "Каталог передачи HopiBackend:" #~ msgid "ApacheBackend datadir:" #~ msgstr "Ð”Ð¸Ñ€ÐµÐºÑ‚Ð¾Ñ€Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ… ApacheBackend:" #~ msgid "ApacheBackend transferdir:" #~ msgstr "Каталог передачи ApacheBackend:" #~ msgid "Cannot import backend class %(c)s (reason: %(r)s)" #~ msgstr "" #~ "Ðевозможно импортировать клаÑÑ Ð²Ð½ÑƒÑ‚Ñ€ÐµÐ½Ð½ÐµÐ³Ð¾ интерфейÑа %(c)s (причина: " #~ "%(r)s)" #~ msgid "Cannot import store class" #~ msgstr "Ðевозможно импортировать клаÑÑ Ñ…Ñ€Ð°Ð½ÐµÐ½Ð¸Ñ" #~ msgid "Cannot set CheckPeriod, MinCheckInterval" #~ msgstr "Ðевозможно выÑтавить CheckPeriod, MinCheckInterval" #~ msgid "Got Librarian URLs from the config:" #~ msgstr "Ð’ наÑтройках обнаружен Ð°Ð´Ñ€ÐµÑ Ð‘Ð¸Ð±Ð»Ð¸Ð¾Ñ‚ÐµÐºÐ°Ñ€Ñ:" #~ msgid "" #~ "No Librarian URLs and no ISIS URLs found in the configuration: no self-" #~ "healing!" #~ msgstr "" #~ "Ð’ наÑтройках не найдены адреÑа ни БиблиотекарÑ, ни ISIS: " #~ "ÑамовоÑÑтановление невозможно!" #~ msgid "Got Bartender URLs from the config:" #~ msgstr "Ð’ наÑтройках обнаружен Ð°Ð´Ñ€ÐµÑ Ð‘Ð°Ñ€Ð¼ÐµÐ½Ð°:" #~ msgid "" #~ "No Bartender URLs and no ISIS URLs found in the configuration: no self-" #~ "healing!" #~ msgstr "" #~ "Ð’ наÑтройках не найдены адреÑа ни Бармена, ни ISIS: ÑамовоÑÑтановление " #~ "невозможно!" #~ msgid "Getting Librarians from ISISes" #~ msgstr "Получение ÑпиÑка Библиотекарей из ISIS-ов" #~ msgid "Trying to get Librarian from" #~ msgstr "Попытка получить Ð°Ð´Ñ€ÐµÑ Ð‘Ð¸Ð±Ð»Ð¸Ð¾Ñ‚ÐµÐºÐ°Ñ€Ñ Ð¸Ð·" #~ msgid "Got Librarian from ISIS:" #~ msgstr "Получен Ð°Ð´Ñ€ÐµÑ Ð‘Ð¾Ð±Ð»Ð¸Ð¾Ñ‚ÐµÐºÐ°Ñ€Ñ Ð¸Ð· ISIS:" #~ msgid "Error in isisLibrarianThread: %s" #~ msgstr "Ошибка в isisLibrarianThread: %s" #~ msgid "Getting Bartenders from ISISes" #~ msgstr "Получение ÑпиÑка Барменов из ISIS-ов" #~ msgid "Trying to get Bartender from" #~ msgstr "Попытка получить Ð°Ð´Ñ€ÐµÑ Ð‘Ð°Ñ€Ð¼ÐµÐ½Ð° из" #~ msgid "Got Bartender from ISIS:" #~ msgstr "Получен Ð°Ð´Ñ€ÐµÑ Ð‘Ð°Ñ€Ð¼ÐµÐ½Ð° из ISIS:" #~ msgid "Error in isisBartenderThread: %s" #~ msgstr "Ошибка в isisBartenderThread: %s" #~ msgid "Shepherd" #~ msgstr "Чабан" #~ msgid "" #~ "\n" #~ "CHECKSUM OK" #~ msgstr "" #~ "\n" #~ "CHECKSUM в порÑдке" #~ msgid "" #~ "\n" #~ "CHECKSUM MISMATCH" #~ msgstr "" #~ "\n" #~ "CHECKSUM не Ñовпадает" #~ msgid "checksum refreshed" #~ msgstr "ÐšÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма обновлена" #~ msgid "\n" #~ msgstr "\n" #~ msgid "" #~ "\n" #~ "\n" #~ "File" #~ msgstr "" #~ "\n" #~ "\n" #~ "Файл" #~ msgid "" #~ "\n" #~ "\n" #~ "I have an invalid replica of file" #~ msgstr "" #~ "\n" #~ "\n" #~ "Обнаружена Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ ÐºÐ¾Ð¿Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð°" #, fuzzy #~ msgid "ERROR checking checksum of %(rID)s, reason: %(r)s" #~ msgstr "неверный заголовок: ошибка контрольной Ñуммы" #~ msgid "changeState" #~ msgstr "changeState" #~ msgid "" #~ "\n" #~ "\n" #~ msgstr "" #~ "\n" #~ "\n" #~ msgid "Getting AHash URL from the config" #~ msgstr "Ð’ наÑтройках обнаружен Ð°Ð´Ñ€ÐµÑ Ð-Ð¥Ñш" #~ msgid "Got AHash URLs:" #~ msgstr "Получены адреÑа Ð-Ð¥Ñш:" #~ msgid "AHash URL found in the configuration." #~ msgstr "Ð’ наÑтройках обнаружен Ð°Ð´Ñ€ÐµÑ Ð-Ð¥Ñш" #, fuzzy #~ msgid "Setting running state to True" #~ msgstr "Создание и отправка запроÑа о ÑоÑтоÑнии" #~ msgid "No AHash from the config" #~ msgstr "Ð’ наÑтройках нет Ð-Ð¥Ñш" #, fuzzy #~ msgid "AHash URL and ISIS URL not found in the configuration." #~ msgstr "Ð’ наÑтройках не найдены адреÑа ни БиблиотекарÑ, ни ISIS." #, fuzzy #~ msgid "Trying to get A-Hash from ISISes" #~ msgstr "Попытка извлечь Ñодержимое %s из Ñлемента XML, размер %d" #~ msgid "Trying to get A-Hash from" #~ msgstr "Попытка получить Ð°Ð´Ñ€ÐµÑ Ð-Ð¥Ñш из" #~ msgid "Got A-Hash from ISIS:" #~ msgstr "Получен Ð°Ð´Ñ€ÐµÑ Ð-Ð¥Ñш из ISIS:" #~ msgid "Error in initThread: %s" #~ msgstr "Ошибка в initThread: %s" #, fuzzy #~ msgid "Error in Librarian's checking thread: %s" #~ msgstr "Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð½Ð¾Ð²Ð¾Ð¹ запиÑи в Librarian: %s" #~ msgid "Error processing report message" #~ msgstr "Ошибка обработки отчёта" #~ msgid "Error traversing: %s" #~ msgstr "Ошибка при проходе: %s" #~ msgid "Error in traverseLN method: %s" #~ msgstr "Ошибка метода traverseLN: %s" #~ msgid "CentralAHash constructor called" #~ msgstr "Вызван конÑтруктор CentralAHash" #~ msgid "Error importing" #~ msgstr "Сбой импортированиÑ" #~ msgid "Error importing class" #~ msgstr "Ошибка Ð¸Ð¼Ð¿Ð¾Ñ€Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ ÐºÐ»Ð°ÑÑа" #~ msgid "ReplicatedAHash constructor called" #~ msgstr "Вызван конÑтруктор ReplicatedAHash" #~ msgid "sending message of length" #~ msgstr "отправка ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ Ð´Ð»Ð¸Ð½Ð¾Ð¹" #~ msgid "sendt message, success=%s" #~ msgstr "Ñообщение отправлено, success=%s" #~ msgid "processing message..." #~ msgstr "обработка ÑообщениÑ..." #~ msgid "processing message... Finished" #~ msgstr "обработка ÑообщениÑ... Закончена" #~ msgid "Initialized replication environment" #~ msgstr "Инициализирована Ñреда репликации" #~ msgid "Couldn't start replication manager." #~ msgstr "Ðе удалоÑÑŒ запуÑтить менеджер репликации." #~ msgid "master locking" #~ msgstr "блокирование головного узла" #~ msgid "unlocking" #~ msgstr "разблокируетÑÑ" #~ msgid "unlocked" #~ msgstr "разблокирован" #~ msgid "couldn't unlock" #~ msgstr "не удалоÑÑŒ разблокировать" #~ msgid "checkingThread slept %d s" #~ msgstr "checkingThread ожидал %d Ñ" #, fuzzy #~ msgid "Resolved %d deadlocks" #~ msgstr "РаÑпознан пÑевдоним «%s» -> %s\n" #, fuzzy #~ msgid "wrote ahash list %s" #~ msgstr "конец ÑпиÑка поиÑка\n" #, fuzzy #~ msgid "but dbenv wasn't ready." #~ msgstr "ОжидалоÑÑŒ завершение процеÑÑа %s, но он не был запущен" #, fuzzy #~ msgid "entering start" #~ msgstr "ÐÐ°Ñ‡Ð°Ð»ÑŒÐ½Ð°Ñ Ñтрелка" #~ msgid "Couldn't start replication framework" #~ msgstr "Ðе удалоÑÑŒ запуÑтить инфраÑтруктуру репликации" #, fuzzy #~ msgid "entered election thread" #~ msgstr "%<__thread%> перед %" #~ msgid "%s: my role is" #~ msgstr "%s: Ð¼Ð¾Ñ Ñ€Ð¾Ð»ÑŒ" #~ msgid "%s: my role is now" #~ msgstr "%s: Ð¼Ð¾Ñ Ñ€Ð¾Ð»ÑŒ теперь" #~ msgid "Couldn't run election" #~ msgstr "Ðевозможно провеÑти выборы" #~ msgid "entering startElection" #~ msgstr "вход в startElection" #~ msgid "new role" #~ msgstr "Ð½Ð¾Ð²Ð°Ñ Ñ€Ð¾Ð»ÑŒ" #~ msgid "Couldn't begin role" #~ msgstr "Ðевозможно вÑтупить в роль" #~ msgid "entering send" #~ msgstr "переход в send" #~ msgid "failed to send to" #~ msgstr "Ñбой отправки на" #~ msgid "entering repSend" #~ msgstr "переход в repSend" #~ msgid "entering sendNewSiteMsg" #~ msgstr "переход в sendNewSiteMsg" #~ msgid "entering sendHeartbeatMsg" #~ msgstr "переход в sendHeartbeatMsg" #~ msgid "entering sendNewMasterMsg" #~ msgstr "переход в sendNewMasterMsg" #~ msgid "entering processMessage from " #~ msgstr "переход в processMessage из " #~ msgid "received message from myself!" #~ msgstr "получено ÑобÑтвенное Ñообщение!" #~ msgid "received master id" #~ msgstr "получен идентификатор головного узла" #~ msgid "received HEARTBEAT_MESSAGE" #~ msgstr "получено Ñообщение HEARTBEAT_MESSAGE" #~ msgid "received ELECTION_MESSAGE" #~ msgstr "получено Ñообщение ELECTION_MESSAGE" #~ msgid "received NEWSITE_MESSAGE" #~ msgstr "получено Ñообщение NEWSITE_MESSAGE" #~ msgid "processing message from %d" #~ msgstr "обработка ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ Ð¾Ñ‚ %d" #~ msgid "Got dbnotfound" #~ msgstr "Получено dbnotfound" #~ msgid "couldn't process message" #~ msgstr "не удалоÑÑŒ обработать Ñообщение" #~ msgid "received DB_REP_NEWSITE from %s" #~ msgstr "получено DB_REP_NEWSITE от %s" #~ msgid "received DB_REP_HOLDELECTION" #~ msgstr "получено DB_REP_HOLDELECTION" #~ msgid "REP_NOTPERM returned for LSN %s" #~ msgstr "REP_NOTPERM получено Ð´Ð»Ñ LSN %s" #~ msgid "REP_IGNORE received" #~ msgstr "получен Ñигнал REP_IGNORE" #~ msgid "JOIN_FAILURE received" #~ msgstr "получен Ñигнал JOIN_FAILURE" #~ msgid "I am now a master" #~ msgstr "Ñ Ñ‚ÐµÐ¿ÐµÑ€ÑŒ главный" #~ msgid "received DB_EVENT_REP_MASTER" #~ msgstr "получено DB_EVENT_REP_MASTER" #~ msgid "I am now a client" #~ msgstr "Я теперь клиент" #~ msgid "Getting permission failed" #~ msgstr "Ðе удалоÑÑŒ получить разрешение" #~ msgid "Write failed" #~ msgstr "ЗапиÑÑŒ не удалаÑÑŒ" #~ msgid "New master elected" #~ msgstr "Выбран новый головной узел" #~ msgid "I won the election: I am the MASTER" #~ msgstr "Я победил на выборах: Ñ Ñ‚ÐµÐ¿ÐµÑ€ÑŒ MASTER" #~ msgid "Oops! Internal DB panic!" #~ msgstr "Ой! ВнутреннÑÑ Ð¿Ð°Ð½Ð¸ÐºÐ° БД!" #~ msgid "accessing gateway: %s" #~ msgstr "Ñоединение Ñ ÑˆÐ»ÑŽÐ·Ð¾Ð¼: %s" #, fuzzy #~ msgid "This bartender does not support gateway" #~ msgstr "Сервер не поддерживает TLS" #~ msgid "Librarian URL or ISIS URL not found in the configuration." #~ msgstr "Ð’ наÑтройках не найдены адреÑа ни БиблиотекарÑ, ни ISIS." #, fuzzy #~ msgid "Error connecting to ISIS %(iu)s, reason: %(r)s" #~ msgstr "Ошибка Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡ÐµÐ½Ð¸Ñ Ðº беÑпроводной Ñети" #, fuzzy #~ msgid "Error in isisThread: %s" #~ msgstr "Ошибка в libkabc" #~ msgid "adding" #~ msgstr "добавлÑетÑÑ" #~ msgid "modifyMetadata response" #~ msgstr "возврат modifyMetadata" #~ msgid "modifyMetadata failed, removing the new librarian entry" #~ msgstr "" #~ "ошибка Ð²Ñ‹Ð¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ modifyMetadata, удаление новой запиÑи библиотекарÑ" #~ msgid "Error creating new entry in Librarian: %s" #~ msgstr "Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð½Ð¾Ð²Ð¾Ð¹ запиÑи в Librarian: %s" #~ msgid "//// response from the external store:" #~ msgstr "//// ответ внешнего запоминающего уÑтройÑтва:" #~ msgid "location chosen:" #~ msgstr "выбранное раÑположение:" #, fuzzy #~ msgid "ERROR from the chosen Shepherd" #~ msgstr "Ошибка %s при выборке из %s@%s\n" #~ msgid "addReplica" #~ msgstr "addReplica" #~ msgid "Registered Shepherds in Librarian" #~ msgstr "ЗарегиÑтрированные у Ð‘Ð¸Ð±Ð»Ð¸Ð¾Ñ‚ÐµÐºÐ°Ñ€Ñ Ð§Ð°Ð±Ð°Ð½Ñ‹" #~ msgid "Alive Shepherds:" #~ msgstr "Живые Чабаны:" #~ msgid "LN" #~ msgstr "LN" #~ msgid "metadata" #~ msgstr "метаданные" #~ msgid "Could not read entry" #~ msgstr "Ðе удалоÑÑŒ прочеÑть запиÑÑŒ" #~ msgid "\\/\\/" #~ msgstr "\\/\\/" #~ msgid "removing" #~ msgstr "удалÑетÑÑ" #~ msgid "Proxy store:" #~ msgstr "Хранилище доверенноÑтей:" #~ msgid "Delegation status: " #~ msgstr "Ð¡Ñ‚Ð°Ñ‚ÑƒÑ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ:" #~ msgid "creating proxy file : " #~ msgstr "ÑоздаётÑÑ Ñ„Ð°Ð¹Ð» доверенноÑти:" #~ msgid "Delegation failed: " #~ msgstr "Сбой делегированиÑ:" #~ msgid "ID: " #~ msgstr "ID: " #~ msgid "ProxyStore: %s" #~ msgstr "ProxyStore: %s" #~ msgid "removeCredentials: %s" #~ msgstr "removeCredentials: %s" #~ msgid "proxy store is not accessable." #~ msgstr "хранилище доверенноÑтей недоÑтупно." #~ msgid "Certificate directory is not accessable! Check configuration file." #~ msgstr "" #~ "Каталог Ñ Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ñ‹Ð¼Ð¸ ключами Ñертификационных агентÑтв недоÑтупен! " #~ "Проверьте файл наÑтроек." #, fuzzy #~ msgid "Proxy store is not accessable." #~ msgstr "хранилище доверенноÑтей недоÑтупно." #~ msgid "Failed retrieving job information for job: %s" #~ msgstr "Ðе удалоÑÑŒ извлечь информацию о задаче: %s" #~ msgid "Unable to select run time environment" #~ msgstr "Ðевозможно выбрать Ñреду выполнениÑ" #~ msgid "Submit: Failed to modify job description to be sent to target." #~ msgstr "" #~ "ЗаÑылка: Ðе удалоÑÑŒ адаптировать опиÑание задачи Ð´Ð»Ñ Ð·Ð°Ñылки по назначению" #~ msgid "[ADLParser] RemoteSessionAccess is not supported yet." #~ msgstr "[ADLParser] RemoteSessionAccess пока что не поддерживаетÑÑ." #~ msgid "Can't sign a non-limited, non-independent proxy with a limited proxy" #~ msgstr "" #~ "Ðевозможно подпиÑать неограниченную завиÑимую доверенноÑть ограниченной " #~ "доверенноÑтью" #~ msgid " Used Slots: %d" #~ msgstr "ИÑпользованные Ñдра: %d" #~ msgid "Job list file not specified." #~ msgstr "Ðе указан файл ÑпиÑка задач" #~ msgid "cFlavour = %s; service = %s" #~ msgstr "cFlavour = %s; service = %s" #~ msgid "" #~ "Unable to get job (%s), job information not found at execution service" #~ msgstr "" #~ "Ðевозможно извлечь задачу (%s), на ÑервиÑе иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð½Ðµ обнаружена " #~ "Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ задаче" #~ msgid "" #~ "Unable to kill job (%s), job information not found at execution service" #~ msgstr "" #~ "Ðевозможно прервать задачу (%s), на ÑервиÑе иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð½Ðµ обнаружена " #~ "Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ задаче" #~ msgid "Failed killing job (%s)" #~ msgstr "Сбой Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ (%s)" #~ msgid "" #~ "Unable to renew job (%s), job information not found at execution service" #~ msgstr "" #~ "Ðевозможно возобновить задачу (%s), на ÑервиÑе иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð½Ðµ обнаружена " #~ "Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ задаче" #~ msgid "Failed renewing job (%s)" #~ msgstr "Сбой Ð²Ð¾Ð·Ð¾Ð±Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ (%s)" #~ msgid "Unable to resume job (%s), job information not found" #~ msgstr "" #~ "Ðевозможно продолжить задачу (%s), не обнаружена Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ задаче" #~ msgid "Generating EMIES target: %s" #~ msgstr "Создание цели EMIES: %s" #~ msgid "" #~ "The middleware flavour of the job (%s) does not match that of the job " #~ "controller (%s)" #~ msgstr "" #~ "Тип подпрограммного обеÑÐ¿ÐµÑ‡ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ (%s) не ÑоответÑтвует типу " #~ "контроллера (%s)" #~ msgid "Job has not finished yet: %s" #~ msgstr "Задача ещё не завершилаÑÑŒ: %s" #~ msgid "Failed downloading job %s" #~ msgstr "Ðе удалоÑÑŒ получить результаты задачи %s" #~ msgid "Failed cleaning job %s" #~ msgstr "Ðе удалоÑÑŒ удалить задачу %s" #~ msgid "Job has already finished: %s" #~ msgstr "Задача уже завершилаÑÑŒ: %s" #~ msgid "Failed cancelling job %s" #~ msgstr "Ðе удалоÑÑŒ прервать задачу %s" #~ msgid "" #~ "Job information not found, job %s will only be deleted from local joblist" #~ msgstr "" #~ "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ задаче не обнаружена, задача %s будет удалена только из " #~ "локального ÑпиÑка" #~ msgid "Unknown output %s" #~ msgstr "ÐеизвеÑтный вывод %s" #~ msgid "Cannot create output of %s for job (%s): Invalid destination %s" #~ msgstr "" #~ "Ðевозможно Ñоздать вывод %s Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ (%s): ÐедопуÑтимое назначение %s" #~ msgid "%s from job %s" #~ msgstr "%s из задачи %s" #~ msgid "Cannot migrate job %s, it is not queuing." #~ msgstr "Ðевозможно мигрировать задачу %s, она не ожидает в очереди." #~ msgid "Job migration failed, for job %s, no more possible targets" #~ msgstr "ÐœÐ¸Ð³Ñ€Ð°Ñ†Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s не удалаÑÑŒ, отÑутÑтвуют возможные назначениÑ" #~ msgid "Failed to lock job list file %s. Job information will be out of sync" #~ msgstr "" #~ "Ðе удалоÑÑŒ заблокировать файл ÑпиÑка задач %s. Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ задачах будет " #~ "раÑÑинхронизована" #~ msgid "Failed renewing job %s" #~ msgstr "Ðе удалоÑÑŒ обновить доверенноÑть задачи %s" #~ msgid "Failed retrieving job description for job (%s)" #~ msgstr "Ðе удалоÑÑŒ получить опиÑание задачи (%s)" #, fuzzy #~ msgid "Scheduler loop exited" #~ msgstr "%s: программа %s завершилаÑÑŒ Ñ ÐºÐ¾Ð´Ð¾Ð¼ %d\n" #~ msgid "No job controller plugins loaded" #~ msgstr "Ðе подгружен ни один модуль ÑƒÐ¿Ñ€Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð°Ð¼Ð¸" #~ msgid "Credentials renewed" #~ msgstr "Параметры доÑтупа обновлены" #~ msgid "Failed to renew credentials for some or all jobs" #~ msgstr "Ðе удалоÑÑŒ обновить параметры доÑтупа Ð´Ð»Ñ Ð½ÐµÐºÐ¾Ñ‚Ð¾Ñ€Ñ‹Ñ… или вÑех задач" #~ msgid "add dryrun option if available" #~ msgstr "добавить холоÑтую прогонку, еÑли возможно" #~ msgid "select broker method (Random (default), FastestQueue, or custom)" #~ msgstr "" #~ "выбрать алгоритм планировщика (Random (по умолчанию), FastestQueue, или " #~ "Ñпециальный)" #~ msgid "Job description languages supported by ARC client tools:" #~ msgstr "" #~ "Следующие Ñзыки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡ поддерживаютÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚Ñкими ÑредÑтвами ARC:" #~ msgid "explicitly select or reject a resource holding queued jobs" #~ msgstr "Ñвным образом выбрать или отÑеÑть реÑурÑ, держащий задачи в очереди" #~ msgid "explicitly select or reject a resource to migrate to" #~ msgstr "Ñвным образом выбрать или отÑеÑть назначение миграции" #~ msgid "Brokers available to arcmigrate:" #~ msgstr "Следующие планировщики доÑтупны Ð´Ð»Ñ arcmigrate:" #~ msgid "Job migration aborted because no resource returned any information" #~ msgstr "" #~ "Обрыв заÑылки задачи, Ñ‚.к. ни один из реÑурÑов не предоÑтавил информацию" #~ msgid "All jobs were resumed" #~ msgstr "Ð’Ñе задачи были возобновлены" #~ msgid "Brokers available to arcresub:" #~ msgstr "Следующие планировщики доÑтупны Ð´Ð»Ñ arcresub:" #~ msgid "Disregarding %s" #~ msgstr "ИгнорируетÑÑ %s" #~ msgid "Job resubmission failed, unable to parse obtained job description" #~ msgstr "" #~ "Ðе удалоÑÑŒ перезаÑлать задачу, невозможно разобрать полученное опиÑание " #~ "задачи" #~ msgid "Job resubmitted with new jobid: %s" #~ msgstr "Задача запущена Ñ Ð½Ð¾Ð²Ñ‹Ð¼ Ñрлыком: %s" #~ msgid "Job resubmission failed, no more possible targets" #~ msgstr "Ðе удалоÑÑŒ перезаÑлать задачу, возможные Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð¾Ñ‚ÑутÑтвуют" #~ msgid "Job could not be killed or cleaned" #~ msgstr "Задача не может быть прервана или Ñтёрта" #~ msgid "" #~ "Cannot find any proxy. arcsub currently cannot run without a proxy.\n" #~ " If you have the proxy file in a non-default location,\n" #~ " please make sure the path is specified in the client configuration " #~ "file.\n" #~ " If you don't have a proxy yet, please run 'arcproxy'!" #~ msgstr "" #~ "Ðе удалоÑÑŒ обнаружить доверенноÑть. Ð’ Ñтой верÑии arcsub не работает без " #~ "доверенноÑти.\n" #~ " ЕÑли Ваша доверенноÑть хранитÑÑ Ð² неÑтандартном меÑте, пожалуйÑта,\n" #~ " убедитеÑÑŒ, что в наÑтройках клиента указан правильный путь.\n" #~ " ЕÑли же Ð’Ñ‹ пока не Ñоздали доверенноÑть, запуÑтите 'arcproxy'!" #, fuzzy #~ msgid "The request_url is %s" #~ msgstr "Ошибка в ÑÑылке '%1'." #~ msgid "Unable to calculate checksum of local input file %s" #~ msgstr "Ðе удалоÑÑŒ вычиÑлить контрольную Ñумму локального входного файла %s" #~ msgid "[ADLParser] %s element with false value is not supported yet." #~ msgstr "[ADLParser] Элемент %s Ñ Ð»Ð¾Ð¶Ð½Ñ‹Ð¼ значением пока не поддерживаетÑÑ." #~ msgid "[ADLParser] %s element with true value is not supported yet." #~ msgstr "[ADLParser] Элемент %s Ñ Ð¸Ñтиным значением пока не поддерживаетÑÑ." #~ msgid "" #~ "[ADLParser] Option element inside RuntimeEnvironment is not supported yet." #~ msgstr "" #~ "[ADLParser] Элемент Option внутри RuntimeEnvironment пока что не " #~ "поддерживаетÑÑ." #~ msgid "[ADLParser] ParallelEnvironment is not supported yet." #~ msgstr "[ADLParser] ParallelEnvironment пока что не поддерживаетÑÑ." #~ msgid " Keep data: true" #~ msgstr " ОÑтавлÑть данные: верно" #~ msgid "" #~ "[ADLParser] For useNumberOfSlots of SlotsPerHost only false value is " #~ "supported yet." #~ msgstr "" #~ "[ADLParser] Ð”Ð»Ñ useNumberOfSlots атрибута SlotsPerHost пока что " #~ "поддерживаетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ ложное значение." #~ msgid "[ADLParser] ExclusiveExecution is not supported yet." #~ msgstr "[ADLParser] ExclusiveExecution пока что не поддерживаетÑÑ." #, fuzzy #~ msgid "Invalid configuration - no allowed DNs specified" #~ msgstr "Ðе указан файл наÑтроек" #~ msgid "bind failed" #~ msgstr "Ñбой привÑзки" #~ msgid "%s: Failed reading list of output files" #~ msgstr "%s: Ðе удалоÑÑŒ прочеÑть ÑпиÑок выходных файлов" #~ msgid "ARC: acl element wrongly formated - missing Content element" #~ msgstr "" #~ "Ðевеврный формат Ñлемента ARC: acl element - отÑутÑтвует Ñлемент Content" #~ msgid "ARC: unsupported ACL type specified: %s" #~ msgstr "ARC: указан неподдерживаемый тип ACL: %s" #~ msgid "" #~ "[ADLParser] Missing FailIfExitCodeNotEqualTo in %s. Ignoring exit code is " #~ "not supported yet." #~ msgstr "" #~ "[ADLParser] Ð’ %s отÑутÑтвует FailIfExitCodeNotEqualTo. Игнорирование кода " #~ "выхода пока не поддерживаетÑÑ." #~ msgid "" #~ "[ADLParser] FailIfExitCodeNotEqualTo in %s contain non-zero code. This " #~ "feature is not supported yet." #~ msgstr "" #~ "[ADLParser] FailIfExitCodeNotEqualTo в %s Ñодержит ненулевой код. Ð¢Ð°ÐºÐ°Ñ " #~ "возможноÑть пока что не поддерживаетÑÑ." #~ msgid "[ADLParser] Multiple PreExecutable elements are not supported yet." #~ msgstr "" #~ "[ADLParser] МножеÑтвенные Ñлементы PreExecutable пока что не " #~ "поддерживаютÑÑ." #~ msgid "" #~ "[ADLParser] Only SGAS ServiceType for RemoteLogging is supported yet." #~ msgstr "" #~ "[ADLParser] Пока что поддерживаетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ SGAS ServiceType Ð´Ð»Ñ " #~ "RemoteLogging." #~ msgid "[ADLParser] For ClientDataPush only false value is supported yet." #~ msgstr "" #~ "[ADLParser] Ð”Ð»Ñ ClientDataPush пока что поддерживаетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ ложное " #~ "значение." #~ msgid "[ADLParser] DelegationID in Source is not supported yet." #~ msgstr "[ADLParser] DelegationID в Source пока что не поддерживаетÑÑ." #~ msgid "[ADLParser] Option in Source is not supported yet." #~ msgstr "[ADLParser] Option в Source пока что не поддерживаетÑÑ." #~ msgid "[ADLParser] DelegationID in Target is not supported yet." #~ msgstr "[ADLParser] DelegationID в Target пока что не поддерживаетÑÑ." #~ msgid "[ADLParser] Option in Target is not supported yet." #~ msgstr "[ADLParser] Option в Target пока что не поддерживаетÑÑ." #~ msgid "" #~ "The JobDescription::operator bool() method is DEPRECATED, use validity " #~ "checks when parsing sting or outputing contents of JobDescription object." #~ msgstr "" #~ "ИÑпользование метода JobDescription::operator bool() ÐЕ РЕКОМЕÐДУЕТСЯ, " #~ "проверÑйте дейÑтвительноÑть при разборке Ñтрок или выводе Ñодержимого " #~ "объекта JobDescription." #~ msgid "" #~ "The JobDescription::Print method is DEPRECATED, use the JobDescription::" #~ "SaveToStream method instead." #~ msgstr "" #~ "ИÑпользование метода JobDescription::Print ÐЕ РЕКОМЕÐДУЕТСЯ, иÑпользуйте " #~ "метод JobDescription::SaveToStream взамен." #~ msgid " User tag: %s" #~ msgstr " Метка пользователÑ: %s" #~ msgid " Prologue arguments: %s" #~ msgstr " Ðргументы пролога: %s" #~ msgid " Epilogue arguments: %s" #~ msgstr " Ðргументы Ñпилога: %s" #~ msgid "" #~ "This method is DEPRECATED, please use the JobDescription::Parse(const " #~ "std::string&, std::list&, const std::string&, const std::" #~ "string&) method instead." #~ msgstr "" #~ "ИÑпользование Ñтого метода ÐЕ РЕКОМЕÐДУЕТСЯ, пожалуйÑта, иÑпользуйте " #~ "метод JobDescription::Parse(const std::string&, std::" #~ "list&, const std::string&, const std::string&) взамен." #~ msgid "" #~ "This method is DEPRECATED, please use the JobDescription::UnParse(std::" #~ "string&, std::string, const std::string&) method instead." #~ msgstr "" #~ "ИÑпользование Ñтого метода ÐЕ РЕКОМЕÐДУЕТСЯ, пожалуйÑта, иÑпользуйте " #~ "метод JobDescription::UnParse(std::string&, std::string, const std::" #~ "string&) взамен." #~ msgid "" #~ "The Job::Print method is DEPRECATED, use the Job::SaveToStream method " #~ "instead." #~ msgstr "" #~ "ИÑпользование метода Job::Print ÐЕ РЕКОМЕÐДУЕТСЯ, иÑпользуйте метод Job::" #~ "SaveToStream взамен." #~ msgid "" #~ "The TargetGenerator::GetTargets method is DEPRECATED, use the " #~ "GetExecutionTargets or GetJobs method instead." #~ msgstr "" #~ "ИÑпользование метода TargetGenerator::GetTargets ÐЕ РЕКОМЕÐДУЕТСЯ, " #~ "иÑпользуйте метод GetExecutionTargets or GetJobs взамен." #~ msgid "Running resource (target) discovery" #~ msgstr "ВыполнÑетÑÑ Ð¾Ð±Ð½Ð°Ñ€ÑƒÐ¶ÐµÐ½Ð¸Ðµ реÑурÑов (назначений)" #~ msgid "" #~ "The TargetGenerator::ModifyFoundTargets method is DEPRECATED, use the " #~ "FoundTargets method instead." #~ msgstr "" #~ "ИÑпользование метода TargetGenerator::ModifyFoundTargets ÐЕ " #~ "РЕКОМЕÐДУЕТСЯ, иÑпользуйте метод FoundTargets взамен." #~ msgid "" #~ "The TargetGenerator::FoundJobs method is DEPRECATED, use the GetFoundJobs " #~ "method instead." #~ msgstr "" #~ "ИÑпользование метода TargetGenerator::FoundJobs ÐЕ РЕКОМЕÐДУЕТСЯ, " #~ "иÑпользуйте метод GetFoundJobs взамен." #~ msgid "" #~ "The TargetGenerator::AddJob(const XMLNode&) method is DEPRECATED, use the " #~ "AddJob(const Job&) method instead." #~ msgstr "" #~ "ИÑпользование метода TargetGenerator::AddJob(const XMLNode&) ÐЕ " #~ "РЕКОМЕÐДУЕТСЯ, иÑпользуйте метод AddJob(const Job&) взамен." #~ msgid "" #~ "The TargetGenerator::PrintTargetInfo method is DEPRECATED, use the " #~ "TargetGenerator::SaveTargetInfoToStream method instead." #~ msgstr "" #~ "ИÑпользование метода TargetGenerator::PrintTargetInfo ÐЕ РЕКОМЕÐДУЕТСЯ, " #~ "иÑпользуйте метод TargetGenerator::SaveTargetInfoToStream взамен." #~ msgid "" #~ "The JobController::Cat(const std::list&, const std::string&) " #~ "method is DEPRECATED, use the JobController::Cat(std::ostream&, const " #~ "std::list&, const std::string&) method instead." #~ msgstr "" #~ "ИÑпользование метода JobController::Cat(const std::list&, " #~ "const std::string&) ÐЕ РЕКОМЕÐДУЕТСЯ, иÑпользуйте метод JobController::" #~ "Cat(std::ostream&, const std::list&, const std::string&) " #~ "взамен." #~ msgid "" #~ "Specifying the \"gmlog\" value for the whichfile parameter in the Job::" #~ "Cat method is DEPRECATED, use the \"joblog\" value instead." #~ msgstr "" #~ "ИÑпользование Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ \"gmlog\" Ð´Ð»Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð° whichfile в методе Job::" #~ "Cat ÐЕ РЕКОМЕÐДУЕТСЯ, иÑпользуйте значение \"joblog\" взамен." #~ msgid "" #~ "The JobController::PrintJobStatus method is DEPRECATED, use the Job::" #~ "SaveJobStatusToStream method instead." #~ msgstr "" #~ "ИÑпользование метода JobController::PrintJobStatus ÐЕ РЕКОМЕÐДУЕТСЯ, " #~ "иÑпользуйте метод Job::SaveJobStatusToStream взамен." #~ msgid "Failed to lock job list file %s. Job list will be out of sync" #~ msgstr "" #~ "Сбой блокировки файла ÑпиÑка задач %s. Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ задачах будет " #~ "раÑÑинхронизована" #~ msgid "" #~ "The ExecutionTarget::Print method is DEPRECATED, use the ExecutionTarget::" #~ "SaveToStream method instead." #~ msgstr "" #~ "ИÑпользование метода ExecutionTarget::Print ÐЕ РЕКОМЕÐДУЕТСЯ, иÑпользуйте " #~ "метод ExecutionTarget::SaveToStream взамен." #, fuzzy #~ msgid "CreateActivity: has delegation: %s" #~ msgstr "CreateActivity: Сбой при принÑтии делегированиÑ" #, fuzzy #~ msgid "Error parsing VOMS AC" #~ msgstr "Обнаружена ошибка при разборе Ñертификата атрибута" #, fuzzy #~ msgid "Error opening lock file %s: %s" #~ msgstr "Ошибка Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° вывода" #, fuzzy #~ msgid "Found empty lock file %s" #~ msgstr "Ожидание блокировки файла" #, fuzzy #~ msgid "DTR %s: Failed linking cache file to %s due to existing write lock" #~ msgstr "Копирование файла '%s' из '%s'..." #~ msgid "Cannot determine hostname from uname()" #~ msgstr "Ðевозможно извлечь Ð¸Ð¼Ñ ÑƒÐ·Ð»Ð° иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ uname()" #~ msgid "Error reading meta file %s" #~ msgstr "Ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¼ÐµÑ‚Ð°-файла %s" #~ msgid "" #~ "File exists in remote cache at %s but is locked. Will download from source" #~ msgstr "" #~ "Файл приÑутÑтвует в удалённом кÑше на %s, но заблокирован. Будет " #~ "проведена загрузка из иÑточника" #~ msgid "Creating temporary link from %s to remote cache file %s" #~ msgstr "СоздаётÑÑ Ð²Ñ€ÐµÐ¼ÐµÐ½Ð½Ð°Ñ ÑÑылка Ñ %s на удалённо кÑшированный файл %s" #~ msgid "" #~ "Failed to create soft link to remote cache: %s. Will download %s from " #~ "source" #~ msgstr "" #~ "Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð³Ð¸Ð±ÐºÐ¾Ð¹ ÑÑылки на удалённый кÑш: %s. Будет произведена " #~ "загрузка %s из иÑточника" #~ msgid "" #~ "Could not read target of link %s. Manual intervention may be required to " #~ "remove lock in remote cache" #~ msgstr "" #~ "Ðевозможно прочеÑть цель ÑÑылки %s. Возможно, необходимо ручное " #~ "вмешательÑтво Ð´Ð»Ñ ÑнÑÑ‚Ð¸Ñ Ð±Ð»Ð¾ÐºÐ¸Ñ€Ð¾Ð²ÐºÐ¸ в удалённом кÑше" #~ msgid "" #~ "Failed to unlock remote cache file %s. Manual intervention may be required" #~ msgstr "" #~ "Сбой разблокировки удалённого кÑшированного файла %s. Возможно, " #~ "необходимо ручное вмешательÑтво" #~ msgid "Error removing file %s: %s. Manual intervention may be required" #~ msgstr "" #~ "Ошибка при удалении файла %s: %s. Возможно, необходимо ручное " #~ "вмешательÑтво" #~ msgid "Error: Cache file %s does not exist" #~ msgstr "Ошибка: КÑшированный файл %s не ÑущеÑтвует" #~ msgid "Could not read target of link %s" #~ msgstr "Ðевозможно прочеÑть цель ÑÑылки %s" #~ msgid "Couldn't match link target %s to any remote cache" #~ msgstr "Цель ÑÑылки %s не найдена ни в одном удалённом кÑше" #~ msgid "Error removing symlink %s: %s. Manual intervention may be required" #~ msgstr "" #~ "Ошибка при удалении Ñимвольной ÑÑылки %s: %s. Возможно, необходимо ручное " #~ "вмешательÑтво" #~ msgid "'../' is not allowed in filename" #~ msgstr "'../' не допуÑкаетÑÑ Ð² имени файла" #~ msgid "Your issuer CA's DN: %s." #~ msgstr "Выделенное Ð¸Ð¼Ñ Ð°Ð³ÐµÐ½Ñ‚Ñтва, выдавшего Ваш Ñертификат: %s." #~ msgid "Source is bad URL or can't be used due to some reason" #~ msgstr "" #~ "URL иÑточника недопуÑтим, или не может быть иÑпользован по какой-либо " #~ "причине" #~ msgid "Destination is bad URL or can't be used due to some reason" #~ msgstr "" #~ "URL Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð½ÐµÐ´Ð¾Ð¿ÑƒÑтим, или не может быть иÑпользован по какой-либо " #~ "причине" #~ msgid "Error deleting location or URL" #~ msgstr "Ошибка ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ñ€Ð°ÑÐ¿Ð¾Ð»Ð¾Ð¶ÐµÐ½Ð¸Ñ Ð¸Ð»Ð¸ URL" #~ msgid "DataPoint is already reading" #~ msgstr "DataPoint уже читает" #~ msgid "DataPoint is already writing" #~ msgstr "DataPoint уже пишет" #~ msgid "File stating failed" #~ msgstr "Ðе удалоÑÑŒ получить информацию о ÑоÑтоÑнии файла" #~ msgid "Failed to finish destination" #~ msgstr "Ðе удалоÑÑŒ завершить назначение" #~ msgid "" #~ "Cannot find file at %s for getting the certificate. Please make sure this " #~ "file exists." #~ msgstr "" #~ "Ðе удалоÑÑŒ найти файл по адреÑу %s, Ñодержащий Ñертификат. ПожалуйÑта, " #~ "убедитеÑÑŒ, что файл ÑущеÑтвует." #~ msgid "Timeleft for AC: %s" #~ msgstr "ОÑтавшееÑÑ Ð²Ñ€ÐµÐ¼Ñ Ð´Ð»Ñ AC: %s" #~ msgid "AC has been expired for: %s" #~ msgstr "Срок дейÑÑ‚Ð²Ð¸Ñ Ñертификат атрибута Ð´Ð»Ñ %s закончилÑÑ" #, fuzzy #~ msgid "Can get X509V3_EXT_METHOD for %s" #~ msgstr "Ðевозможно извлечь X509V3_EXT_METHOD Ð´Ð»Ñ %s" #, fuzzy #~ msgid "Service_doc: %s" #~ msgstr "Файл &DOC:" #, fuzzy #~ msgid "SOAP Fault: %s" #~ msgstr "Получена ошибка SOAP" #~ msgid "Proxy successfully verified." #~ msgstr "ДоверенноÑть подтверждена." #~ msgid "Proxy not valid. Job submission aborted. Please run 'arcproxy'!" #~ msgstr "" #~ "ДоверенноÑть недейÑтвительна. ЗаÑылка задачи оборвана. ПожалуйÑта, " #~ "запуÑтите 'arcproxy'!" #~ msgid "" #~ "Cannot find CA certificates directory. Please specify the location to the " #~ "directory in the client configuration file." #~ msgstr "" #~ "Ðе удалоÑÑŒ найти каталог Ñ Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ñ‹Ð¼Ð¸ ключами Ñертификационных агентÑтв. " #~ "ПожалуйÑта, введите раÑположение Ñтого каталога в файл наÑтроек клиента." #~ msgid "Local user does not match user of DTR %s" #~ msgstr "Локальный пользователь не ÑоответÑтвует пользователю DTR %s" #~ msgid "" #~ "No services specified. Please specify a cluster or index (-c or -g " #~ "options, see arcsync -h) or set the \"defaultservices\" attribute in the " #~ "client configuration." #~ msgstr "" #~ "Ðе задано ни одного назначениÑ. ПожалуйÑта, задайте значение аттрибута " #~ "\"defaultservices\" в файле наÑтроек клиента, либо укажите Ñвным образом " #~ "реÑÑƒÑ€Ñ Ð¸Ð»Ð¸ каталог реÑурÑов (опции -c или -g, Ñм. arcsync -h)" #~ msgid "Failed to read PEM from file %s" #~ msgstr "Ðе удалоÑÑŒ прочеÑть PEM из файла %s" #~ msgid "" #~ "Failed to read private key from file %s - probably no delegation was done" #~ msgstr "" #~ "Ðе удалоÑÑŒ прочитать файл личного ключа из файла %s - вероÑтно, не было " #~ "делегированиÑ" #~ msgid "Failed in SSL (sk_X509_new_null)" #~ msgstr "Сбой в SSL (sk_X509_new_null)" #~ msgid "Failed in SSL (sk_X509_insert)" #~ msgstr "Сбой в SSL (sk_X509_insert)" #~ msgid "Error: no VOMS extension found" #~ msgstr "Ошибка: не найдено раÑширений VOMS" #~ msgid "Shutting down grid-manager thread" #~ msgstr "ПрерываетÑÑ Ð¿Ð¾Ñ‚Ð¾Ðº Грид-менеджера" #~ msgid "Requirement satisfied. %s %s %s." #~ msgstr "Требование удовлетворено. %s %s %s." #~ msgid "Requirement NOT satisfied. %s %s %s." #~ msgstr "Требование ÐЕ удовлетворено. %s %s %s." #~ msgid "End of list reached requirement not met." #~ msgstr "ДоÑтигнут конец ÑпиÑка, Ñ‚Ñ€ÐµÐ±Ð¾Ð²Ð°Ð½Ð¸Ñ Ð½Ðµ удовлетворены" #~ msgid "Can't stat file: %s" #~ msgstr "Ðевозможно получить ÑÑ‚Ð°Ñ‚ÑƒÑ Ñ„Ð°Ð¹Ð»Ð°: %s" #~ msgid "File is not accessible: %s - %s" #~ msgstr "Файл недоÑтупен: %s - %s" #, fuzzy #~ msgid "delete_ftp: globus_ftp_client_delete timeout" #~ msgstr "check_ftp: Ñбой в globus_ftp_client_size" #~ msgid "Transfer FAILED: %s - %s" #~ msgstr "Передача ÐЕ УДÐЛÐСЬ: %s - %s" #, fuzzy #~ msgid "" #~ "Cannot find the path of the key file, please setup environment " #~ "X509_USER_KEY, or keypath in a configuration file" #~ msgstr "" #~ "Ðе удалоÑÑŒ найти закрытый ключ пользователÑ. ПожалуйÑта, задайте " #~ "переменную Ñреды X509_USER_KEY, или значение keypath в файле конфигурации" #, fuzzy #~ msgid "" #~ "Cannot find file at %s for getting the key. Please make sure this file " #~ "exists." #~ msgstr "" #~ "Ðе удалоÑÑŒ найти файл по адреÑу %s, Ñодержащий доверенноÑть. ПожалуйÑта, " #~ "убедитеÑÑŒ, что файл ÑущеÑтвует." #, fuzzy #~ msgid "[ARCJSDLParser] Failed to create parser context" #~ msgstr "Ðе удалоÑÑŒ зарезервировать памÑть Ð´Ð»Ñ ÐºÐ¾Ð½Ñ‚ÐµÐºÑта анализатора" #, fuzzy #~ msgid "[ARCJSDLParser] Validating error" #~ msgstr "%s: ошибка запиÑи файла '%s': %s\n" #~ msgid "Requirements not satisfied." #~ msgstr "Ð¢Ñ€ÐµÐ±Ð¾Ð²Ð°Ð½Ð¸Ñ Ð½Ðµ удовлетворены." #, fuzzy #~ msgid "Mismatching url in file %s: %s Expected %s" #~ msgstr "пропущено Ð¸Ð¼Ñ Ñ„Ð°Ð¹Ð»Ð° в URL" #, fuzzy #~ msgid "Bad separator in file %s: %s" #~ msgstr "Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ Ñтрока в файле-каркаÑе" #, fuzzy #~ msgid "Bad value of expiry time in %s: %s" #~ msgstr "Единица времени Ð¾Ð¿Ñ€ÐµÐ´ÐµÐ»ÐµÐ½Ð¸Ñ Ð¿ÐµÑ€Ð¸Ð¾Ð´Ð° уÑтареваниÑ." #, fuzzy #~ msgid "Illegal testjob-id given" #~ msgstr "Задан недопуÑтимый номер теÑтовой задачи" #~ msgid "Failed to terminate LCMAPS - has to keep library loaded" #~ msgstr "" #~ "Ðе удалоÑÑŒ прервать LCMAPS - придётÑÑ Ð¾Ñтавить библиотеку подгруженой" #~ msgid "VOMS config: vo: %s" #~ msgstr "ÐаÑтройки VOMS: ВО: %s" #~ msgid "VOMS config: group: %s" #~ msgstr "ÐаÑтройки VOMS: группа: %s" #~ msgid "VOMS config: role: %s" #~ msgstr "ÐаÑтройки VOMS: роль: %s" #~ msgid "VOMS config: capabilities: %s" #~ msgstr "ÐаÑтройки VOMS: возможноÑти: %s" #, fuzzy #~ msgid "VOMS matched" #~ msgstr "ПодходÑщие подгруппы:" #~ msgid "Failed to terminate LCAS - has to keep library loaded" #~ msgstr "Ðе удалоÑÑŒ прервать LCAS - придётÑÑ Ð¾Ñтавить библиотеку подгруженой" #~ msgid "Disconnect: Failed quitting: %s" #~ msgstr "Отключение: Ðе удалоÑÑŒ выйти: %s" #~ msgid "Failed to close connection 1" #~ msgstr "Ðе удалоÑÑŒ закрыть Ñоединение 1" #~ msgid "Failed to close connection 2" #~ msgstr "Ðе удалоÑÑŒ закрыть Ñоединение 2" #~ msgid "Failed to close connection 3" #~ msgstr "Ðе удалоÑÑŒ закрыть Ñоединение 3" #~ msgid "Reading configuration file: %s" #~ msgstr "Чтение файла наÑтроек: %s" #, fuzzy #~ msgid "subject: %s" #~ msgstr "Тема:" #~ msgid "Out of memory" #~ msgstr "Мало памÑти" #~ msgid "out of memory" #~ msgstr "мало памÑти" #, fuzzy #~ msgid "Error reading valid and existing meta file %s: %s" #~ msgstr "" #~ "\n" #~ "%s: ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð²Ñ…Ð¾Ð´Ð½Ð¾Ð³Ð¾ файла '%s': %s\n" #, fuzzy #~ msgid "Error listing dir %s: %s" #~ msgstr "Ошибка перечиÑÐ»ÐµÐ½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð¾Ð²: %s\n" #, fuzzy #~ msgid "Error reading srm info file %s:%s" #~ msgstr "%s: ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð²Ñпомогательного файла '%s': %s\n" #, fuzzy #~ msgid "Error creating srm info file %s" #~ msgstr "%s: ошибка Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ð²Ñпомогательного файла '%s': %s\n" #, fuzzy #~ msgid "DTR %s: Cache processing successful" #~ msgstr "Задача уÑпешно возобновлена" #, fuzzy #~ msgid "job_id url destination" #~ msgstr "Ðеверный URL цели." #, fuzzy #~ msgid "link the cache file" #~ msgstr "Обновление кÑш-файла" #, fuzzy #~ msgid "copy the cache file" #~ msgstr "Обновление кÑш-файла" #~ msgid "file is executable" #~ msgstr "файл ÑвлÑетÑÑ Ð¸ÑполнÑемым файлом" #, fuzzy #~ msgid "gid of destination owner" #~ msgstr "Указать владельца Ñхемы" #, fuzzy #~ msgid "One of -l and -c must be specified" #~ msgstr "должно быть одним из: C, S, E, P, или пуÑтым" #~ msgid "No configuration specified" #~ msgstr "Файл наÑтроек не указан" #, fuzzy #~ msgid "Error linking/copying cache file" #~ msgstr "Ошибка ÐºÐ¾Ð¿Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð²Ñ€ÐµÐ¼ÐµÐ½Ð½Ð¾Ð³Ð¾ почтового файла: %s" #, fuzzy #~ msgid "Adding %s service %s " #~ msgstr "Ошибка при добавлении Ñлужбы. %s" #, fuzzy #~ msgid "" #~ "Can not access CA certificate directory: %s. The certificates will not be " #~ "verified" #~ msgstr "Ðевозможно открыть файл Ñертификата: %s (%s)" #, fuzzy #~ msgid "" #~ "Trying to migrate to %s: Migration to a ARC GM-powered resource is not " #~ "supported." #~ msgstr "-mhard-float не поддерживаетÑÑ" #~ msgid "Using job list file %s" #~ msgstr "ИÑпользуетÑÑ ÑпиÑок задач из файла %s" #, fuzzy #~ msgid "Job not found in the job list: %s" #~ msgstr "Задача %s не обнаружена в ÑпиÑке задач." #~ msgid "Failed to use channel stdout" #~ msgstr "Сбой иÑÐ¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ð½Ð¸Ñ ÐºÐ°Ð½Ð°Ð»Ð° stdout" #~ msgid "" #~ "Cannot find any proxy. Please specify the path to the proxy file in the " #~ "client configuration file." #~ msgstr "" #~ "Ðе удалоÑÑŒ найти доверенноÑть пользователÑ. ПожалуйÑта, введите " #~ "раÑположение доверенноÑти в файл конфигурации клиента." #, fuzzy #~ msgid "Error allocating memory for info file %s:%s" #~ msgstr "%s: ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð²Ñпомогательного файла '%s': %s\n" #, fuzzy #~ msgid "Error opening srm info file for writing %s:%s" #~ msgstr "Ошибка: Ðевозможно открыть файл %s Ð´Ð»Ñ Ð·Ð°Ð¿Ð¸Ñи.\n" #, fuzzy #~ msgid "Error allocating memory for srm info file %s:%s" #~ msgstr "%s: ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð²Ñпомогательного файла '%s': %s\n" #, fuzzy #~ msgid "Error opening srm info file %s:%s" #~ msgstr "%s: ошибка Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ð²Ñпомогательного файла '%s': %s\n" #~ msgid "" #~ "Argument to -g has the format Flavour:URL e.g.\n" #~ "ARC0:ldap://grid.tsl.uu.se:2135/mds-vo-name=sweden,O=grid\n" #~ "CREAM:ldap://cream.grid.upjs.sk:2170/o=grid\n" #~ "\n" #~ "Argument to -c has the format Flavour:URL e.g.\n" #~ "ARC0:ldap://grid.tsl.uu.se:2135/nordugrid-cluster-name=grid.tsl.uu.se,Mds-" #~ "Vo-name=local,o=grid" #~ msgstr "" #~ "Ðргумент опции -g задаётÑÑ Ð¿Ð¾ форме Flavour:URL, например:\n" #~ "ARC0:ldap://grid.tsl.uu.se:2135/mds-vo-name=sweden,O=grid\n" #~ "CREAM:ldap://cream.grid.upjs.sk:2170/o=grid\n" #~ "\n" #~ "Ðргумент опции -c задаётÑÑ Ð¿Ð¾ форме Flavour:URL, например:\n" #~ "ARC0:ldap://grid.tsl.uu.se:2135/nordugrid-cluster-name=grid.tsl.uu.se,Mds-" #~ "Vo-name=local,o=grid" #~ msgid "" #~ "Argument to -c has the format Flavour:URL e.g.\n" #~ "ARC0:ldap://grid.tsl.uu.se:2135/nordugrid-cluster-name=grid.tsl.uu.se,Mds-" #~ "Vo-name=local,o=grid" #~ msgstr "" #~ "Ðргумент опции -c задаётÑÑ Ð¿Ð¾ форме Flavour:URL, например:\n" #~ "ARC0:ldap://grid.tsl.uu.se:2135/nordugrid-cluster-name=grid.tsl.uu.se,Mds-" #~ "Vo-name=local,o=grid" #, fuzzy #~ msgid "Getting job descriptions from local job file" #~ msgstr "Удалить передачу из ÑпиÑка." #, fuzzy #~ msgid "Disregarding job descriptions from local job file" #~ msgstr "Удалить передачу из ÑпиÑка." #, fuzzy #~ msgid "Valid job description found for: %s" #~ msgstr "CreateActivity: ОпиÑание задачи не найдено" #, fuzzy #~ msgid "Invalid job description found for: %s" #~ msgstr "CreateActivity: ОпиÑание задачи не найдено" #, fuzzy #~ msgid "Job description for %s retrieved locally" #~ msgstr "опиÑание заÑылаемой задачи: %s" #, fuzzy #~ msgid "Job %s can not be resubmitted" #~ msgstr "Задача не может быть перезапущена" #~ msgid "Job description for %s could not be retrieved locally" #~ msgstr "ОпиÑание задачи %s не может быть воÑÑтановлено локально" #~ msgid "file where the jobs will be stored" #~ msgstr "Файл Ð´Ð»Ñ Ð·Ð°Ð¿Ð¸Ñи Ñрлыков запущенных задач" #, fuzzy #~ msgid "Incompatible RSL attributes" #~ msgstr "Правка параметров ÑлоÑ" #, fuzzy #~ msgid "job.Resources.CandidateTarget.size() = %d" #~ msgstr "Размер ÑпиÑка недавно иÑпользовавшихÑÑ Ñ€ÐµÑурÑов" #, fuzzy #~ msgid "Error creating tmp file %s for remote lock with mkstemp(): %s" #~ msgstr "Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð²Ñ€ÐµÐ¼ÐµÐ½Ð½Ð¾Ð³Ð¾ файла Ñкрипта" #, fuzzy #~ msgid "Error writing to tmp lock file for remote lock %s: %s" #~ msgstr "Ожидание блокировки файла" #, fuzzy #~ msgid "Failed to change owner of destination dir to %i: %s" #~ msgstr "" #~ "Ðе удалоÑÑŒ Ñменить текущий каталог на админиÑтративный каталог %sinfo" #~ msgid " EndPointURL: %s" #~ msgstr "URL конечной точки: %s" #~ msgid " QueueName: %s" #~ msgstr "Ð˜Ð¼Ñ Ð¾Ñ‡ÐµÑ€ÐµÐ´Ð¸: %s" #, fuzzy #~ msgid " QueueName (ignored): %s" #~ msgstr "ИгнорируетÑÑ (уÑтаревшаÑ)" #~ msgid " Target.Mandatory: true" #~ msgstr " Target.Mandatory: true" #~ msgid " DownloadToCache: true" #~ msgstr " DownloadToCache: true" #~ msgid " Directory element:" #~ msgstr " Элемент Directory:" #, fuzzy #~ msgid "URL of ExecutionTarget is not properly defined" #~ msgstr "" #~ "Сравнение, назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, ÑоÑтоÑние Ð·Ð´Ð¾Ñ€Ð¾Ð²ÑŒÑ Ð½Ðµ " #~ "определено" #, fuzzy #~ msgid "URL of ExecutionTarget is not properly defined: %s." #~ msgstr "" #~ "Сравнение, назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, ÑоÑтоÑние Ð·Ð´Ð¾Ñ€Ð¾Ð²ÑŒÑ Ð½Ðµ " #~ "определено" #~ msgid "Filetransfer created" #~ msgstr "Передача файла начата" #~ msgid "Cannot accept destination as URL" #~ msgstr "Ðазначение должно быть URL" #~ msgid "Stage in" #~ msgstr "Подгрузка файлов" #~ msgid "Stage out" #~ msgstr "Выгрузка файлов" #~ msgid "Cannot collect resource information" #~ msgstr "Ðе удалоÑÑŒ Ñобрать информацию о реÑурÑе" #~ msgid "No response" #~ msgstr "Ðет ответа" #~ msgid "Cannot find job id" #~ msgstr "Ðе удалоÑÑŒ найти идентификатор задачи" #~ msgid "Cannot find scheduler endpoint" #~ msgstr "Ðе удалоÑÑŒ найти конечную точку планировщика" #~ msgid "Status: %s %d" #~ msgstr "СоÑтоÑние: %s %d" #~ msgid "Process job: %s" #~ msgstr "Обработка задачи: %s" #~ msgid "No scheduler configured" #~ msgstr "Ðи одного планировщика не наÑтроено" #~ msgid "Do Request: %s" #~ msgstr "ИÑполнение запроÑа: %s" #~ msgid "No free CPU slot" #~ msgstr "ОтÑутÑтвуют доÑтупные Ñвободные процеÑÑоры" #~ msgid "Per: %d" #~ msgstr "Период: %d" #~ msgid "Report status" #~ msgstr "Отчёт о ÑоÑтоÑнии" #~ msgid "%s reported %s" #~ msgstr "%s Ñообщает %s" #~ msgid "%s reported" #~ msgstr "%s Ñообщает" #~ msgid "%s job reported finished" #~ msgstr "Задача %s закончена" #~ msgid "Get activity status changes" #~ msgstr "Получение информации об изменении ÑоÑтоÑниÑ" #~ msgid "%s new status: %s" #~ msgstr "Ðовое ÑоÑтоÑние %s: %s" #~ msgid "Killing %s" #~ msgstr "ПрерываетÑÑ %s" #~ msgid "pre cleanup %s %d" #~ msgstr "Ð¿Ñ€ÐµÐ´Ð²Ð°Ñ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ð¾Ñ‡Ð¸Ñтка %s %d" #~ msgid "cleanup %s" #~ msgstr "очиÑтка %s" #~ msgid "cleanup 2 %s" #~ msgstr "очиÑтка 2 %s" #~ msgid "PaulService shutdown" #~ msgstr "Выключение PaulService" #~ msgid "Terminate job %s" #~ msgstr "Terminate job %s" #~ msgid "** %s" #~ msgstr "** %s" #~ msgid "Cannot allocate output raw buffer" #~ msgstr "Ðе удалоÑÑŒ зарезервировать буфер вывода" #~ msgid "Permission denied from %s host" #~ msgstr "Сервер %s host отказал в доÑтупе" #~ msgid "Start process" #~ msgstr "Ðачать процеÑÑ" #~ msgid "Invalid JSDL! Missing application section" #~ msgstr "ÐедопуÑтимый формат JSDL! ОтÑутÑтвует раздел \"application\"." #~ msgid "%s set exception" #~ msgstr "%s приÑвоена ошибка" #~ msgid "Empty executable" #~ msgstr "Ðе задан иÑполнÑемый файл" #~ msgid "Windows cmd path: %s" #~ msgstr "Путь поиÑка команд Windows: %s" #~ msgid "Cmd: %s" #~ msgstr "Команда: %s" #~ msgid "StdOut: %s" #~ msgstr "Стандартный выход: %s" #~ msgid "StdErr: %s" #~ msgstr "Ð¡Ñ‚Ð°Ð½Ð´Ð°Ñ€Ñ‚Ð½Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ°: %s" #~ msgid "return from run" #~ msgstr "возврат поÑле иÑполнениÑ" #~ msgid "Error during the application run" #~ msgstr "Ошибка при иÑполнении приложениÑ" #~ msgid "Exception: %s" #~ msgstr "Ошибка: %s" #~ msgid "SpawnError" #~ msgstr "SpawnError" #~ msgid "Status request failed" #~ msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð¾ ÑоÑтоÑнии удалÑÑ" #~ msgid "Status request succeed" #~ msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð¾ ÑоÑтоÑнии удалÑÑ" #~ msgid "The response to a status request was not a SOAP message" #~ msgstr "Ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ ÑоÑтоÑнии не ÑвлÑетÑÑ Ñообщением SOAP" #~ msgid "Service status request failed" #~ msgstr "Ошибка запроÑа о ÑоÑтоÑнии Ñлужбы" #~ msgid "Service status request succeed" #~ msgstr "УÑпешный Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ ÑоÑтоÑнии Ñлужбы" #~ msgid "Job termination request failed" #~ msgstr "ошибка запроÑа об обрыве иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #~ msgid "Job termination request succeed" #~ msgstr "уÑпешный Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ð± обрыве иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #~ msgid "file_name: " #~ msgstr "Ðазвание файла:" #~ msgid "Jsdl: " #~ msgstr "JSDL: " #~ msgid "The submited JSDL file's name: " #~ msgstr "Ð˜Ð¼Ñ Ð·Ð°Ð´Ð°Ð½Ð½Ð¾Ð³Ð¾ файла JSDL: " #~ msgid "Jod Id: " #~ msgstr "Идентификатор задачи:" #~ msgid "STATUS: " #~ msgstr "СОСТОЯÐИЕ:" #~ msgid "Info from the ISIS" #~ msgstr "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¸Ð· ISIS" #~ msgid "job(s) submit" #~ msgstr "запуÑк задач(и)" #~ msgid "Wrong Job submitting! URL: " #~ msgstr "Ðеверный запуÑк задачи! URL: " #~ msgid " Achitecture: " #~ msgstr " Ðрхитектура: " #~ msgid "Result(s) download" #~ msgstr "Загрузка результатов:" #~ msgid "Download Place: " #~ msgstr "РаÑположение загруженных файлов:" #~ msgid "Download cycle: start" #~ msgstr "Цикл загрузки: начало" #~ msgid "Current Arhitecture: " #~ msgstr "Ð¢ÐµÐºÑƒÑ‰Ð°Ñ Ð°Ñ€Ñ…Ð¸Ñ‚ÐµÐºÑ‚ÑƒÑ€Ð°:" #~ msgid "Empty Job ID. Go to the next Job ID." #~ msgstr "ПуÑтой Ñрлык задачи. Переход к Ñледующему Ñрлыку." #~ msgid "Download url: " #~ msgstr "URL Ð´Ð»Ñ Ð·Ð°Ð³Ñ€ÑƒÐ·ÐºÐ¸:" #~ msgid "Download path: " #~ msgstr "Путь Ð´Ð»Ñ Ð·Ð°Ð³Ñ€ÑƒÐ·ÐºÐ¸:" #~ msgid "Download cycle: end" #~ msgstr "Цикл загрузки: конец" #~ msgid "Finished the compile: " #~ msgstr "КомпилÑÑ†Ð¸Ñ Ð¾ÐºÐ¾Ð½Ñ‡ÐµÐ½Ð°:" #~ msgid " The SOAP message send and return" #~ msgstr " Отправленное и полученное Ñообщение SOAP" #~ msgid "Can not create output SOAP payload for delegation service" #~ msgstr "Ðе удалоÑÑŒ Ñоздать выходную нагрузку SOAP Ð´Ð»Ñ Ñлужбы делегированиÑ" #~ msgid "Can not store proxy certificate" #~ msgstr "Ðе удалоÑÑŒ Ñохранить доверенноÑть" #~ msgid "" #~ "Delegated credentials:\n" #~ " %s" #~ msgstr "" #~ "Делегированные параметры доÑтупа:\n" #~ " %s" #~ msgid "Can not find the corresponding credential from credential cache" #~ msgstr "Ðе удалоÑÑŒ найти ÑоответÑтвующие параметры доÑтупа в кÑше" #~ msgid "Signing proxy on delegation service failed" #~ msgstr "Ðе удалоÑÑŒ заверить доверенноÑть на Ñлужбе делегации" #~ msgid "Cannot create SOAP fault" #~ msgstr "Ðевозможно Ñформулировать ошибку SOAP" #~ msgid "GetActivityStatuses: job %s not found" #~ msgstr "GetActivityStatuses: задача %s не обнаружена" #~ msgid "ChangeActivityStatuses: job %s not found" #~ msgstr "ChangeActivityStatuses: задача %s не обнаружена" #~ msgid "GetActivityDocuments: job %s not found" #~ msgstr "GetActivityDocuments: задача %s не обнаружена" #~ msgid "GetActivityStatuses: job %s" #~ msgstr "GetActivityStatuses: задача %s" #~ msgid "doSched" #~ msgstr "Ð’ doSched..." #~ msgid "jobq checkpoint done" #~ msgstr "ÐšÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñ‚Ð¾Ñ‡ÐºÐ° jobq пройдена" #~ msgid "" #~ "Count of jobs: %i Count of resources: %i Scheduler period: %i Endpoint: " #~ "%s DBPath: %s" #~ msgstr "" #~ "КоличеÑтво задач: %i КоличеÑтво реÑурÑов: %i Период планировщика: %i " #~ "ÐšÐ¾Ð½ÐµÑ‡Ð½Ð°Ñ Ñ‚Ð¾Ñ‡ÐºÐ°: %s DBPath: %s" #~ msgid "NEW job: %s" #~ msgstr "ÐОВÐЯ задача: %s" #~ msgid "A-REX ID: %s" #~ msgstr "Идентификатор A-REX: %s" #~ msgid "Sched job ID: %s NOT SUBMITTED" #~ msgstr "Sched задача: %s NOT SUBMITTED" #~ msgid "%s set killed" #~ msgstr "%s оборвано" #~ msgid "%s remove from queue" #~ msgstr "%s удалено из очереди" #~ msgid "Sched job ID: %s (A-REX job ID is empty)" #~ msgstr "Sched задача: %s (пуÑтой Ñрлык задачи A-REX)" #~ msgid "Job RESCHEDULE: %s" #~ msgstr "задача перепланирована: %s" #~ msgid "JobID: %s state: %s" #~ msgstr "JobID: %s ÑоÑтоÑние: %s" #~ msgid "doReschedule" #~ msgstr "Ð’ doReschedule..." #~ msgid "Rescheduled job: %s" #~ msgstr "Rescheduled job: %s" #~ msgid "Error during database open: %s" #~ msgstr "Ошибка при открытии базы данных: %s" #~ msgid "Assigned new informational document" #~ msgstr "Добавлен новый информационный документ" #~ msgid "Failed to create informational document" #~ msgstr "Сбой при Ñоздании информационного документа" #~ msgid "%d <> %d" #~ msgstr "%d <> %d" #~ msgid "Cannot get resource ID" #~ msgstr "Ðевозможно получить идентификатор реÑурÑа" #~ msgid "invalid job id" #~ msgstr "неверный Ñрлык задачи" #~ msgid "Invalid status report" #~ msgstr "ÐедопуÑтимые данные о ÑоÑтоÑнии" #~ msgid "%s reports job status of %s but it is running on %s" #~ msgstr "%s отчитываетÑÑ Ð¾ ÑоÑтоÑнии задачи %s, но она запущена на %s" #~ msgid "%s try to status change: %s->%s" #~ msgstr "%s пытаетÑÑ Ð¸Ð·Ð¼ÐµÐ½Ð¸Ñ‚ÑŒ ÑоÑтоÑние: %s->%s" #~ msgid "refresh: Cannot abort transaction: %s" #~ msgstr "Ð’ refresh: Ðевозможно прервать передачу: %s" #~ msgid "refresh: Error during transaction: %s" #~ msgstr "обновление: Ошибка при транзакции: %s" #~ msgid "operator[]: Cannot abort transaction: %s" #~ msgstr "operator[]: Ðе удалоÑÑŒ прервать передачу: %s" #~ msgid "remove: Cannot abort transaction: %s" #~ msgstr "удаление: Ðевозможно оборвать транзакцию: %s" #~ msgid "Job type: single" #~ msgstr "Тип задачи: одиночнаÑ" #~ msgid "Job type: collection" #~ msgstr "Тип задачи: набор" #~ msgid "Job type: parallel" #~ msgstr "Тип задачи: параллельнаÑ" #~ msgid "Job type: workflownode" #~ msgstr "Тип задачи: узел поточного заданиÑ" #, fuzzy #~ msgid "Failed setting signal handler for SIGHUP" #~ msgstr "Ðе удалоÑÑŒ задать владельца файла: %s" #, fuzzy #~ msgid "Failed setting signal handler for SIGCHLD" #~ msgstr "Ошибка: не удалоÑÑŒ уÑтановить обработчик SIGCHLD" #, fuzzy #~ msgid "Failed setting signal handler for SIGTERM" #~ msgstr "Ошибка: не удалоÑÑŒ уÑтановить обработчик SIGTERM" #, fuzzy #~ msgid "Failed setting signal handler for SIGINT" #~ msgstr "Ðе удалоÑÑŒ задать владельца файла: %s" #, fuzzy #~ msgid "Failed to create thread for handling signals" #~ msgstr "Ðе удалоÑÑŒ Ñоздать контекÑÑ‚ GSI: %s" #, fuzzy #~ msgid "Failure creating slot for child process." #~ msgstr "%s: Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¾Ð±Ð»Ð°Ñти памÑти Ð´Ð»Ñ Ð´Ð¾Ñ‡ÐµÑ€Ð½ÐµÐ³Ð¾ процеÑÑа" #, fuzzy #~ msgid "Failure forking child process." #~ msgstr "%s: Сбой при запуÑке дочернего процеÑÑа" #, fuzzy #~ msgid "Timeout waiting for child to finish" #~ msgstr "%s: Сбой Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ Ð´Ð¾Ñ‡ÐµÑ€Ð½ÐµÐ³Ð¾ процеÑÑа" #, fuzzy #~ msgid "Failure opening pipes." #~ msgstr "Поток %d, Ñбой Ð¿ÐµÑ€ÐµÐ½Ð°Ð¿Ñ€Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ð¿Ð¾Ñ‚Ð¾ÐºÐ¾Ð²" #~ msgid "TargetRetriverCREAM initialized with %s service url: %s" #~ msgstr "TargetRetriverCREAM запущен Ñ Ð°Ð´Ñ€ÐµÑом Ñлужбы %s: %s" #~ msgid "TargetRetriverARC0 initialized with %s service url: %s" #~ msgstr "TargetRetriverARC0 запущен Ñ URL Ñлужбы %s:: %s" #~ msgid "TargetRetriverUNICORE initialized with %s service url: %s" #~ msgstr "TargetRetriverUNICORE запущен Ñ URL Ñлужбы %sl: %s" #~ msgid "TargetRetriverARC1 initialized with %s service url: %s" #~ msgstr "TargetRetriverARC1 запущен Ñ URL Ñлужбы %s:: %s" #, fuzzy #~ msgid "Failed locating delegation credentials in chain configuration" #~ msgstr "Ðе удалоÑÑŒ обнаружить доверенноÑти в конфигурации клиента" #, fuzzy #~ msgid "Found malformed job state string: %s" #~ msgstr "Ðе удалоÑÑŒ получить информацию о задаче: %s" #, fuzzy #~ msgid "Failed to set PEPd URL: '%s'" #~ msgstr "не удалоÑÑŒ уÑтановить Ñкан-код %x коду %d\n" #, fuzzy #~ msgid "Failed to create XACML request\n" #~ msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ %s к %s не выполнен, получен ответ: %s" #, fuzzy #~ msgid "Failed to authorize XACML request: %s\n" #~ msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ %s к %s не выполнен, получен ответ: %s" #, fuzzy #~ msgid "Response is null" #~ msgstr "Ключ имеет значение NULL" #, fuzzy #~ msgid "%s is not authorized" #~ msgstr "%s не ÑвлÑетÑÑ Ð¾Ð±ÑŠÐµÐºÑ‚Ð¾Ð¼" #, fuzzy #~ msgid "Failed to create soft link: %s" #~ msgstr "невозможно Ñоздать жеÑткую ÑÑылку %s на %s" #, fuzzy #~ msgid " XRSL_elements: [%s], %s" #~ msgstr "Дополнительные Ñлементы" #, fuzzy #~ msgid " JDL_elements: [%s], %s" #~ msgstr "Дополнительные Ñлементы" #~ msgid "Try to parse as XRSL" #~ msgstr "Попытка ÑинтакÑичеÑкого разбора как XRSL" #~ msgid "Try to parse as JDL" #~ msgstr "Попытка ÑинтакÑичеÑкого разбора как JDL" #~ msgid "Try to parse as ARCJSDL" #~ msgstr "Попытка ÑинтакÑичеÑкого разбора как ARC JSDL" #~ msgid "Generate JDL output" #~ msgstr "Создание JDL на выходе" #~ msgid "Generating %s output was unsuccessful" #~ msgstr "Создание %s на выходе не удалоÑÑŒ" #~ msgid "Generate XRSL output" #~ msgstr "Создание XRSL на выходе" #~ msgid "Generate ARCJSDL output" #~ msgstr "Созадние ARC JSDL на выходе" #~ msgid "Unknown output format: %s" #~ msgstr "ÐеизвеÑтный формат вывода: %s" #~ msgid " ExecutionCE: %s" #~ msgstr "ИÑполнÑющий вычиÑлительный Ñлемент: %s" #, fuzzy #~ msgid "Cannot parse the specified %s service (%s)" #~ msgstr "Ðе удалоÑÑŒ найти клаÑÑ ÑервиÑа" #, fuzzy #~ msgid "The specified %s service (%s) is not a valid URL" #~ msgstr "Ð—Ð°Ð´Ð°Ð½Ð½Ð°Ñ Ð¿Ð°Ð¿ÐºÐ° некорректна" #~ msgid "" #~ "cnd:\n" #~ "%s is a %s" #~ msgstr "" #~ "cnd:\n" #~ "%s is a %s" #, fuzzy #~ msgid "globus_io_cancel failed: %s" #~ msgstr "%s: Ðе удалоÑÑŒ выполнить процедуру прерываниÑ" #, fuzzy #~ msgid "Connect to %s failed: %s" #~ msgstr "Ðе удалоÑÑŒ уÑтановить Ñоединение Ñ %s" #, fuzzy #~ msgid "clear_input: %s" #~ msgstr "ОчиÑтить ввод" #~ msgid "Connection closed" #~ msgstr "Подключение закрыто" #, fuzzy #~ msgid "Globus error (read): %s" #~ msgstr "%s: ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð½Ð° %s\n" #, fuzzy #~ msgid "*** Server response: %s" #~ msgstr "&Ответ Ñервера:" #, fuzzy #~ msgid "Globus error (write): %s" #~ msgstr "" #~ "Ошибка:\n" #~ "\n" #~ "Ðе удалоÑÑŒ запиÑать %s\n" #, fuzzy #~ msgid "*** Client request: %s" #~ msgstr "Ð—Ð°Ð¿Ñ€Ð¾Ñ ÑƒÐ´Ð°Ð»ÑÑ!!!" #~ msgid "Authenticating: %s" #~ msgstr "Проверка подлинноÑти: %s" #, fuzzy #~ msgid "Connection to server failed: %s" #~ msgstr "Ðе удалоÑÑŒ прервать ÑвÑзь Ñ Ñервером" #~ msgid "Failed to read SSL token during authentication" #~ msgstr "Ðе удалоÑÑŒ прочеÑть токен SSL в процеÑÑе проверки подлинноÑти" #, fuzzy #~ msgid "Failed wrapping GSI token: %s" #~ msgstr "Ðе удалоÑÑŒ Ñоздать контекÑÑ‚ GSI: %s" #, fuzzy #~ msgid "Failed unwrapping GSI token: %s" #~ msgstr "Ðе удалоÑÑŒ Ñоздать контекÑÑ‚ GSI: %s" #, fuzzy #~ msgid "Urecognized SSL token received" #~ msgstr "получен неверный ответ на ÑоглаÑование по SSL: %c\n" #, fuzzy #~ msgid "Timeout while sending SOAP request" #~ msgstr "Создание и отправка запроÑа SOAP" #~ msgid "Error sending data to server" #~ msgstr "Ошибка передачи данных на Ñервер." #, fuzzy #~ msgid "read_response_header: line: %s" #~ msgstr "Слишком длинный заголовок" #, fuzzy #~ msgid "Timeout while reading response header" #~ msgstr "" #~ "Ошибка при чтении заголовка файла:\n" #~ " %1" #, fuzzy #~ msgid "Error while reading response header" #~ msgstr "" #~ "Ошибка при чтении заголовка файла:\n" #~ " %1" #, fuzzy #~ msgid "read_response_header: header finished" #~ msgstr "невозможно Ñчитать заголовок архива" #, fuzzy #~ msgid "skip_response_entity" #~ msgstr "Ð”Ñ€ÑƒÐ³Ð°Ñ Ð²Ð½ÐµÑˆÐ½ÑÑ ÑущноÑть" #, fuzzy #~ msgid "skip_response_entity: no entity" #~ msgstr "ÑущноÑть не имеет атрибута %s" #~ msgid "Not connected" #~ msgstr "Ðет подключениÑ" #, fuzzy #~ msgid "Timeout sending header" #~ msgstr "Шрифт колонтитулов:" #~ msgid "Early response from server" #~ msgstr "Преждевременный ответ Ñервера" #~ msgid "No response from server received" #~ msgstr "Ответ Ñервера не получен" #~ msgid "Failed to send body" #~ msgstr "Ðе удалоÑÑŒ отправить тело" #, fuzzy #~ msgid "Failure while receiving entity" #~ msgstr "ошибка при запиÑи данных Ð´Ð»Ñ ÐºÐ°Ñ‚ÐµÐ³Ð¾Ñ€Ð¸Ð¸`%s'" #, fuzzy #~ msgid "Timeout while sending header" #~ msgstr "ошибка при отправке %(message)s ( %(error)s )" #, fuzzy #~ msgid "GET: connection to be closed" #~ msgstr "Ðе удалоÑÑŒ принудительно прервать ÑвÑзь Ñ" #, fuzzy #~ msgid "GET callback returned error" #~ msgstr "Ошибка печати: команда «%s» возвратила %d\n" #, fuzzy #~ msgid "Failed while reading response content" #~ msgstr "ошибка при чтении данных ленты.\n" #, fuzzy #~ msgid "Timeout while reading response content" #~ msgstr "Создание и отправка запроÑа" #, fuzzy #~ msgid "Error while reading response content" #~ msgstr "Ошибка при чтении %d-ой из %d точек: %s\n" #, fuzzy #~ msgid "GET: calling callback: size: %u" #~ msgstr "Ðевозможно получить размер диÑка" #~ msgid "SOAP request failed (%s)" #~ msgstr "Ошибка запроÑа SOAP (%s)" #, fuzzy #~ msgid "SOAP request failed (srmMkdir)" #~ msgstr "Ошибка запроÑа SOAP (copy)" #~ msgid "SOAP request failed (get)" #~ msgstr "Ошибка запроÑа SOAP (get)" #~ msgid "SOAP request failed (getRequestStatus)" #~ msgstr "Ошибка запроÑа SOAP (getRequestStatus)" #~ msgid "SOAP request failed (put)" #~ msgstr "Ошибка запроÑа SOAP (put)" #~ msgid "SOAP request failed (copy)" #~ msgstr "Ошибка запроÑа SOAP (copy)" #~ msgid "SOAP request failed (setFileStatus)" #~ msgstr "Ошибка запроÑа SOAP (setFileStatus)" #~ msgid "SOAP request failed (getFileMetaData)" #~ msgstr "Ошибка запроÑа SOAP (getFileMetaData)" #, fuzzy #~ msgid "Response(%i): %s" #~ msgstr "Ответ" #~ msgid "Submission to %s failed, trying next target" #~ msgstr "Сбой заÑылки задачи на %s, проверка Ñледующего назначениÑ" #~ msgid "" #~ "path to local cache (use to put file into cache). The X509_USER_PROXY and " #~ "X509_CERT_DIR environment variables must be set correctly." #~ msgstr "" #~ "путь к локальному кÑшу (иÑпользуйте Ð´Ð»Ñ ÑÐ¾Ñ…Ñ€Ð°Ð½ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° в кÑше). " #~ "УбедитеÑÑŒ, что переменные Ñреды X509_USER_PROXY и X509_CERT_DIR заданы " #~ "правильно." #, fuzzy #~ msgid "" #~ "The config: \n" #~ "%s \n" #~ msgstr "ÐšÐ¾Ð½Ñ„Ð¸Ð³ÑƒÑ€Ð°Ñ†Ð¸Ñ %1" #~ msgid "%s > %s => false: \\%s contains non numbers in the version part." #~ msgstr "" #~ "%s > %s => неверно: \\%s Ñодержит нецифровые Ñимволы в номере верÑии." #, fuzzy #~ msgid "Can not locate CA certificate directory." #~ msgstr "не удалоÑÑŒ прочитать файл корневых Ñертификатов \"%s\": %s\n" #~ msgid "Client chain configuration: %s" #~ msgstr "ÐšÐ¾Ð½Ñ„Ð¸Ð³ÑƒÑ€Ð°Ñ†Ð¸Ñ Ñ†ÐµÐ¿Ð¾Ñ‡ÐºÐ¸Ð¿Ñ€Ð¸Ð¶Ð°Ñ‚Ñ‹Ðµ клиента: %s" #~ msgid "Cannot import arc module" #~ msgstr "Ðе удалоÑÑŒ импортировать модуль ARC" #, fuzzy #~ msgid "Cannot find arc XMLNode class" #~ msgstr "КлаÑÑ ARC XMLNode не найден" #, fuzzy #~ msgid "Cannot stat local executable input file %s" #~ msgstr "Ðевозможно прочеÑть локальный ÑпиÑок задач" #~ msgid "The parsing of the job description was unsuccessful" #~ msgstr "СинтакÑичеÑÐºÐ°Ñ Ñ€Ð°Ð·Ð±Ð¾Ñ€ÐºÐ° опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ не удалаÑÑŒ" #, fuzzy #~ msgid "XRSL parsing problem" #~ msgstr "&Выводить информацию о проблемах" #, fuzzy #~ msgid "Cannot find arc UserConfig class" #~ msgstr "нет файла Ð´Ð»Ñ ÐºÐ»Ð°ÑÑа %s" #, fuzzy #~ msgid "Encrypted saml assertion: %s" #~ msgstr "Разблокирование зашифрованных данных" #, fuzzy #~ msgid "Failed to create/find directory %s : %s" #~ msgstr "Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð° %s/.gnome." #~ msgid "Failed to create/find directory %s, (%d)" #~ msgstr "Ошибка ÑозданиÑ/Ð¾Ð±Ð½Ð°Ñ€ÑƒÐ¶ÐµÐ½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð° %s, (%d)" #, fuzzy #~ msgid "start_reading_srm: looking for metadata: %s" #~ msgstr "ПоиÑк пакетов Gentoo: " #, fuzzy #~ msgid "start_reading_srm: obtained checksum: %s" #~ msgstr "ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма %x, а должна быть %x" #~ msgid "explicity select or reject an index server" #~ msgstr "Ñвным образом выбрать или отÑеÑть указанный каталог реÑурÑов" #~ msgid "[job ...]\n" #~ msgstr "[задача ...]\n" #, fuzzy #~ msgid "Cannot find vomses at %s, %s, %s, %s and %s" #~ msgstr "Ðе удаётÑÑ Ð½Ð°Ð¹Ñ‚Ð¸ уÑтройÑтво диÑка %1 Ñ Ð¿Ð»Ð¾Ñ‚Ð½Ð¾Ñтью %2." #~ msgid "IdP name" #~ msgstr "Ð˜Ð¼Ñ IdP" #~ msgid "Configured username is invalid %s" #~ msgstr "ÐаÑтроенное Ð¸Ð¼Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð½ÐµÐ´Ð¾Ð¿ÑƒÑтимо %s" #~ msgid "%s: State FINISHING: starting child: %s" #~ msgstr "%s: ÑоÑтоÑние FINISHING: запуÑк дочернего процеÑÑа: %s" #~ msgid "%s: State: PREPARING: credentials probably expired (exit code 3)" #~ msgstr "" #~ "%s: ÑоÑтоÑние PREPARING: вероÑтно, иÑтёк Ñрок дейÑÑ‚Ð²Ð¸Ñ Ð¼Ð°Ð½Ð´Ð°Ñ‚Ð° (код " #~ "выхода 3)" #~ msgid "" #~ "%s: State: PREPARING: some error detected (exit code %i). Recover from " #~ "such type of errors is not supported yet." #~ msgstr "" #~ "%s: ÑоÑтоÑние PREPARING:обнаружена ошибка (код выхода %i). ВоÑÑтановление " #~ "поÑле такой ошибки пока не поддерживаетÑÑ." #~ msgid "url of myproxy server" #~ msgstr "URL Ñервера MyProxy" #~ msgid "Returned msg from myproxy server: %s" #~ msgstr "Сообщение Ñервера MyProxy: %s" #~ msgid "Myproxy server return failure msg" #~ msgstr "Сервер MyProxy Ñообщил об ошибке" #~ msgid "ARC_PLUGIN_PATH=%s" #~ msgstr "ARC_PLUGIN_PATH=%s" #~ msgid "Can not read key file: %s" #~ msgstr "Ðе удалоÑÑŒ прочитать файл личного ключа: %s" #, fuzzy #~ msgid "StartReading: obtained size: %lli" #~ msgstr "ДиÑковый кÑш, вÑего" #, fuzzy #~ msgid "Retrying with gsi protocol...\n" #~ msgstr "Проблема Ñ Ð¿Ð¾Ð´Ñ‚Ð²ÐµÑ€Ð¶Ð´ÐµÐ½Ð¸ÐµÐ¼ мандата" #, fuzzy #~ msgid "start_reading_ftp: failure" #~ msgstr "ftpfs: чтение каталога FTP %s... %s%s" #~ msgid "failed to send to %d of %s" #~ msgstr "не удалоÑÑŒ отправить на %d %s" #~ msgid "%s: Plugin in state %s : %s" #~ msgstr "%s: Подключаемый модуль в ÑоÑтоÑнии %s : %s" #~ msgid "Will not use caching" #~ msgstr "КÑширование иÑпользоватьÑÑ Ð½Ðµ будет" #~ msgid "Cannot clean up any cache files" #~ msgstr "Ðе удалоÑÑŒ Ñтереть кÑшированые файлы" #, fuzzy #~ msgid "store job descriptions in local sandbox." #~ msgstr "Ðевозможно открыть файл Ñ Ð¾Ð¿Ð¸Ñанием задачи: %s" #, fuzzy #~ msgid "Failed to load service configuration form file %s" #~ msgstr "Ðе удалоÑÑŒ загрузить конфигурацию ÑервиÑа" #, fuzzy #~ msgid "Contacting VOMS server (named %s): %s on port: %i" #~ msgstr "" #~ "УÑтанавливаетÑÑ ÑвÑзь Ñ Ñервером VOMS (по имени %s): %s по порту: %s" #, fuzzy #~ msgid "Getting %s jobs" #~ msgstr "" #~ " -a, -all применить ко вÑем задачам пользователÑ" #, fuzzy #~ msgid "Killing %s jobs" #~ msgstr "" #~ " -a, -all применить ко вÑем задачам пользователÑ" #, fuzzy #~ msgid "Cleaning %s jobs" #~ msgstr "" #~ " -a, -all применить ко вÑем задачам пользователÑ" #, fuzzy #~ msgid "Cannot migrate to a %s cluster." #~ msgstr "ДиÑплей DMX на который проиÑходит перемещение" #~ msgid "No valid jobdescription found for: %s" #~ msgstr "Ðе обнаружено допуÑтимых опиÑаний задачи: %s" #, fuzzy #~ msgid "Creating delegation failed" #~ msgstr "%s: Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÐºÐ°Ð½Ð°Ð»Ð°" #~ msgid "Job registration failed" #~ msgstr "Ошибка региÑтрации задачи" #~ msgid "Job starting failed" #~ msgstr "Ошибка запуÑка задачи" #~ msgid "Could not retrieve job information" #~ msgstr "Ðе удалоÑÑŒ получить информацию о задаче" #, fuzzy #~ msgid "The node %s has no %s element." #~ msgstr "Документ `%s' не имеет узла верхнего ÑƒÑ€Ð¾Ð²Ð½Ñ <%s>\n" #, fuzzy #~ msgid "The response was not a SOAP message" #~ msgstr "" #~ "Содержимое пиÑьма не было принÑто.\n" #~ "%1" #~ msgid "Fetching job state" #~ msgstr "ИзвлекаетÑÑ ÑоÑтоÑние задачи" #, fuzzy #~ msgid "The status of the job (%s) could not be retrieved." #~ msgstr "Файл не может быть Ñоздан" #, fuzzy #~ msgid "The response to a service status request is Fault message: " #~ msgstr "Ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ ÑоÑтоÑнии Ñлужбы не ÑвлÑетÑÑ Ñообщением SOAP" #, fuzzy #~ msgid "There was an empty response to an index service query" #~ msgstr "Ðе поÑтупил ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ ÑоÑтоÑнии Ñлужбы" #, fuzzy #~ msgid "The response of a index service query was not a SOAP message" #~ msgstr "Ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ ÑоÑтоÑнии не ÑвлÑетÑÑ Ñообщением SOAP" #, fuzzy #~ msgid "Request failed, service returned: %s" #~ msgstr "Ошибка публикации Ñлужбы" #, fuzzy #~ msgid "Migration failed, service returned: %s" #~ msgstr "Ошибка публикации Ñлужбы" #, fuzzy #~ msgid "Job resuming failed" #~ msgstr "Ðе удалоÑÑŒ поÑлать задачу" #, fuzzy #~ msgid "Job resumed at state: %s" #~ msgstr "ÃÂõòõрýþõ ÷ýðчõýøõ job-state!" #, fuzzy #~ msgid "Failed migrating job" #~ msgstr "Ðе удалоÑÑŒ поÑлать задачу" #~ msgid "Timer kicking" #~ msgstr "ЗапуÑкаетÑÑ Ñ‚Ð°Ð¹Ð¼ÐµÑ€" #~ msgid "Multiple " #~ msgstr "МножеÑтвенные" #, fuzzy #~ msgid "Multiple timeout attributes in configuration file (%s)" #~ msgstr "Ðе удалоÑÑŒ загрузить конфигурацию ÑервиÑа" #, fuzzy #~ msgid "Multiple brokername attributes in configuration file (%s)" #~ msgstr "Ðе удалоÑÑŒ загрузить конфигурацию ÑервиÑа" #, fuzzy #~ msgid "Multiple bartender attributes in configuration file (%s)" #~ msgstr "Чтение файла конфигурации: %s" #, fuzzy #~ msgid "Multiple keysize attributes in configuration file (%s)" #~ msgstr "Чтение файла конфигурации: %s" #~ msgid "lasso_assertion_query_new() failed" #~ msgstr "Ñбой в lasso_assertion_query_new()" #~ msgid "lasso_assertion_query_init_request failed" #~ msgstr "Ñбой в lasso_assertion_query_init_request" #~ msgid "lasso_assertion_query_build_request_msg failed" #~ msgstr "Ñбой в lasso_assertion_query_build_request_msg" #~ msgid "assertionRequestBody shouldn't be NULL" #~ msgstr "assertionRequestBody не может быть NULL" #~ msgid "lasso_assertion_query_process_response_msg failed" #~ msgstr "Ñбой lasso_assertion_query_process_response_msg" #~ msgid "Configuration: LRMS: %s" #~ msgstr "КонфигурациÑ: СУПО: %s" #~ msgid "Configuration: Queue: %s" #~ msgstr "КонфигурациÑ: Очередь: %s" #~ msgid "process: CreateActivity" #~ msgstr "процеÑÑ: CreateActivity" #, fuzzy #~ msgid "Couldn't parse value \"%s\" of benchmark %s. Parse error: \"%s\"." #~ msgstr "Ошибка: невозможно обработать %1 как значение параметра.\n" #, fuzzy #~ msgid "Couldn't parse benchmark string: \"%s\"." #~ msgstr "Ðевозможно открыть файл проекта" #, fuzzy #~ msgid "lhs > rhs TRUE" #~ msgstr "Перечёркнутый" #, fuzzy #~ msgid "lhs < rhs TRUE" #~ msgstr "Перечёркнутый" #, fuzzy #~ msgid "Failed resolving aliases" #~ msgstr "Ñоздание алиаÑов ÑловарÑ" #, fuzzy #~ msgid "Matchmaking, ExecutionTarget URL: %s " #~ msgstr "Ðеправильный URL: %1" #, fuzzy #~ msgid "Resolving alias: %s" #~ msgstr "&Изменить пÑевдоним..." #, fuzzy #~ msgid "Alias \"%s\" requested but not defined" #~ msgstr "метка %q+D определена, но не иÑпользуетÑÑ" #, fuzzy #~ msgid "Done resolving alias: %s" #~ msgstr "СинтакÑичеÑкий анализ Ñокращённого имени" #, fuzzy #~ msgid "Key is not a file: %s" #~ msgstr "%s: файл %s не ÑвлÑетÑÑ Ð°Ñ€Ñ…Ð¸Ð²Ð¾Ð¼\n" #, fuzzy #~ msgid "The specified configuration file (%s) is not a regular file" #~ msgstr "Файл \"%s\" не ÑвлÑетÑÑ Ð¾Ð±Ñ‹Ñ‡Ð½Ñ‹Ð¼ файлом или каталогом." #, fuzzy #~ msgid "XML user configuration (%s) loaded" #~ msgstr "Пакет конфигурации уÑпешно загружен." #, fuzzy #~ msgid "INI user configuration (%s) loaded" #~ msgstr "Пакет конфигурации уÑпешно загружен." #~ msgid "SSL_library_init failed" #~ msgstr "Сбой в SSL_library_init" #, fuzzy #~ msgid "timeout in seconds (default " #~ msgstr "Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð² Ñекундах (по умолчанию 20)" #~ msgid "select broker method (Random (default), QueueBalance, or custom)" #~ msgstr "" #~ "выбрать алгоритм планировщика (Random (по умолчанию), QueueBalance, или " #~ "Ñпециальный)" #~ msgid "ERROR" #~ msgstr "ОШИБКÐ" #~ msgid "DMCs are loaded" #~ msgstr "Подгружены компоненты цепи Ñообщений" #~ msgid " And now I am there" #~ msgstr "Вот мы и здеÑÑŒ" #~ msgid "wrong option in cacheregistration" #~ msgstr "Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð² cacheregistration" #, fuzzy #~ msgid "Failed to allocate SSL locks" #~ msgstr "Ðевозможно выделить памÑть Ð´Ð»Ñ Ð¸Ð·Ð¾Ð±Ñ€Ð°Ð¶ÐµÐ½Ð¸Ñ:" #, fuzzy #~ msgid "Current transfer FAILED" #~ msgstr "Сбой переноÑа файла." #, fuzzy #~ msgid "Creating and sending a service an index service query" #~ msgstr "Создание и отправка запроÑа о ÑоÑтоÑнии Ñлужбы" #, fuzzy #~ msgid "Creating client chain for UNICORE BES service" #~ msgstr "СоздаётÑÑ Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ ÐºÐ»Ð¸ÐµÐ½Ñ‚Ð°" #, fuzzy #~ msgid "Request xml structure is: %s" #~ msgstr "Ð˜Ð¼Ñ Ñ„Ð°Ð¹Ð»Ð° \"%s\" формата XML на указывает на каталог" #, fuzzy #~ msgid "UnAuthorized from count.pdp!!!" #~ msgstr "Защитить компьютер от неÑанкционированного иÑпользованиÑ" #~ msgid "Plugins element has no Name defined" #~ msgstr "Ð’ Ñлементе Plugins не задано имÑ" #, fuzzy #~ msgid "DataManager has no name attribute defined" #~ msgstr "Ðе задан ни один параметр Ñ Ð¸Ð¼ÐµÐ½ÐµÐ¼ «%s»" #, fuzzy #~ msgid "DataManager %s(%s) could not be created" #~ msgstr "Файл не может быть Ñоздан" #, fuzzy #~ msgid "Loaded DataManager %s(%s)" #~ msgstr "Страница загружена." #, fuzzy #~ msgid "ArcClientComponent has no name attribute defined" #~ msgstr "Ðе задан ни один параметр Ñ Ð¸Ð¼ÐµÐ½ÐµÐ¼ «%s»" #, fuzzy #~ msgid "ArcClientComponent %s(%s) could not be created" #~ msgstr "Файл не может быть Ñоздан" #, fuzzy #~ msgid "Loaded ArcClientComponent %s(%s)" #~ msgstr "Страница загружена." #, fuzzy #~ msgid "Adding job info to sandbox" #~ msgstr "" #~ "INFO: Þöøôðю þúþýчðýøѠòыÿþûýõýøѠ÷ðôðчø…\n" #, fuzzy #~ msgid "Request failed: Error1" #~ msgstr "Ошибка запроÑа DDE poke" #, fuzzy #~ msgid "Request failed: Error2" #~ msgstr "Ошибка запроÑа DDE poke" #, fuzzy #~ msgid "Request failed: Error3" #~ msgstr "Ошибка запроÑа DDE poke" #, fuzzy #~ msgid "Request failed: Error4" #~ msgstr "Ошибка запроÑа DDE poke" #, fuzzy #~ msgid "Request failed: Error5" #~ msgstr "Ошибка запроÑа DDE poke" #, fuzzy #~ msgid "Requirements in sub-requirements satisfied." #~ msgstr "(ÐедоÑтупно: завиÑимоÑти не удовлетворены)" #, fuzzy #~ msgid "Extracting local file list from job description failed" #~ msgstr "Ðевозможно открыть файл Ñ Ð¾Ð¿Ð¸Ñанием задачи: %s" #~ msgid "Failed uploading file" #~ msgstr "Ошибка загрузки файла" #~ msgid "Can not access ARC job list file: %s (%s)" #~ msgstr "Ðевозможно открыть файл задач ARC: %s (%s)" #, fuzzy #~ msgid "Cannot access ARC user config file: %s (%s)" #~ msgstr "Ðевозможно открыть файл пользовательÑкой конфигурации ARC: %s (%s)" #~ msgid "ARC user config file is not a regular file: %s" #~ msgstr "" #~ "Файл пользовательÑкой конфигурации ARC не ÑвлÑетÑÑ Ñтандартным файлом: %s" #, fuzzy #~ msgid "Could not load system client configuration" #~ msgstr "Ðе удалоÑÑŒ обнаружить ÑиÑтемную конфигурацию клиента" #~ msgid "Path is %s" #~ msgstr "Путь: %s" #, fuzzy #~ msgid "File type is neither file or directory" #~ msgstr "" #~ "%1:\n" #~ "ÐеизвеÑтный тип файла: ни каталог ни файл." #, fuzzy #~ msgid "Cannot migrate from %s clusters." #~ msgstr "недопуÑÑ‚Ð¸Ð¼Ð°Ñ Ð¸Ð½Ð¸Ñ†Ð¸Ð°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ %qT из %qT" #, fuzzy #~ msgid "Transfer FAILED" #~ msgstr "Сбой переноÑа файла." #, fuzzy #~ msgid "path for cache data (if different from -y)" #~ msgstr "" #~ " -Y, -cachedata путь путь к опиÑанию кÑша (еÑли отличен от -y)" #, fuzzy #~ msgid "Received AuthURL " #~ msgstr "Получен Ñигнал" #, fuzzy #~ msgid "Received status " #~ msgstr "Ðовое ÑоÑтоÑние %s: %s" #, fuzzy #~ msgid "use the Confusa SLCS service" #~ msgstr "ИÑпользовать Ñлужбу PC-To-Phone" #, fuzzy #~ msgid "Confusa Auth module" #~ msgstr "Модуль поддержки Ñодержимого" #, fuzzy #~ msgid "__del__" #~ msgstr " Удалить " #~ msgid "delete run" #~ msgstr "обрываетÑÑ Ð¸Ñполнение" #, fuzzy #~ msgid "passphrase to myproxy server" #~ msgstr "&Путь к Ñборнику переводов" #, fuzzy #~ msgid " Implementation Version: %s" #~ msgstr "collect2 верÑÐ¸Ñ %s" #, fuzzy #~ msgid " JobName: %s" #~ msgstr "Ðеверное Ð¸Ð¼Ñ Ð·Ð°Ð´Ð°Ð½Ð¸Ñ" #, fuzzy #~ msgid ", value: %s" #~ msgstr "Ошибка: %s" #, fuzzy #~ msgid " Author: %s" #~ msgstr "Ðвтор:" #, fuzzy #~ msgid " Input: %s" #~ msgstr "Вводить" #, fuzzy #~ msgid " Output: %s" #~ msgstr "ВЫВОД" #, fuzzy #~ msgid " Notification element: " #~ msgstr "Ðеожиданный Ñлемент" #, fuzzy #~ msgid " Address: %s" #~ msgstr "ÐÐ´Ñ€ÐµÑ - 1 1/8 x 3 1/2 дюйма" #, fuzzy #~ msgid " Total CPU Time: %s" #~ msgstr "%t - иÑпользование процеÑÑора (ÑиÑтема + пользователи)" #, fuzzy #~ msgid " Individual CPU Time: %s" #~ msgstr "ДлительноÑть по умолчанию (CPU)" #, fuzzy #~ msgid " Total Wall Time: %s" #~ msgstr "ДлительноÑть по умолчанию (по чаÑам)" #, fuzzy #~ msgid " Individual Wall Time: %s" #~ msgstr "ДлительноÑть по умолчанию (по чаÑам)" #, fuzzy #~ msgid " Benchmark: %s" #~ msgstr "Ðеприемлемый Ñталонный теÑÑ‚" #, fuzzy #~ msgid " value: %d" #~ msgstr "Ошибка: %s" #, fuzzy #~ msgid " time: %s" #~ msgstr "Ð’Ñ€ÐµÐ¼Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ" #, fuzzy #~ msgid " OSName: %s" #~ msgstr "название клаÑÑа: %s" #, fuzzy #~ msgid " OSVersion: %s" #~ msgstr "%s, верÑÐ¸Ñ %s" #, fuzzy #~ msgid " DiskSpace: %d" #~ msgstr "ÐедоÑтаточно меÑта на диÑке" #, fuzzy #~ msgid " Alias: %s" #~ msgstr "МеÑто" #~ msgid " Latitude: %s" #~ msgstr "Широта: %s" #~ msgid " Longitude: %s" #~ msgstr "Долгота: %s" #, fuzzy #~ msgid " Slots: %d" #~ msgstr "Разъёмы карт..." #, fuzzy #~ msgid " RunTimeEnvironment.Version: %s" #~ msgstr "collect2 верÑÐ¸Ñ %s" #, fuzzy #~ msgid " Homogeneous: true" #~ msgstr "Перечёркнутый" #, fuzzy #~ msgid " InBound: true" #~ msgstr "Перечёркнутый" #, fuzzy #~ msgid " OutBound: true" #~ msgstr "ИÑходÑщие данные:\n" #, fuzzy #~ msgid " Source.Threads: %d" #~ msgstr "X_vid" #, fuzzy #~ msgid " Target.Threads: %d" #~ msgstr "X_vid" #, fuzzy #~ msgid " Target.NeededReplicas: %d" #~ msgstr "Выбор узла назначениÑ" #, fuzzy #~ msgid "Try to parse as POSIX JSDL" #~ msgstr "опиÑание заÑылаемой задачи: %s" #, fuzzy #~ msgid "[PosixJSDLParser] Failed to create parser context" #~ msgstr "Ðе удалоÑÑŒ Ñоздать контекÑÑ‚ GSI: %s" #, fuzzy #~ msgid "Invalid notify attribute: %c" #~ msgstr "ÐедопуÑтимый интервал времени" #, fuzzy #~ msgid "My hash is: %s" #~ msgstr "Контур закрыт." #, fuzzy #~ msgid "RegistrationCollector function is running." #~ msgstr "Код возврата" #, fuzzy #~ msgid "The ServiceID (%s) is found in the database." #~ msgstr "Файл не может быть Ñоздан" #, fuzzy #~ msgid "RemoveRegistrations: MGenTime=%s" #~ msgstr "СмыÑл-конец" #, fuzzy #~ msgid "Connect" #~ msgstr "Ðет подключениÑ" #, fuzzy #~ msgid "[PeerID] calculated hash: %s" #~ msgstr "ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма %x, а должна быть %x" #, fuzzy #~ msgid "[Cert] calculated value: %s" #~ msgstr "ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма %x, а должна быть %x" #, fuzzy #~ msgid "[Key] calculated value: %s" #~ msgstr "ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма %x, а должна быть %x" #, fuzzy #~ msgid "[Proxy] calculated value: %s" #~ msgstr "ДоверенноÑть дейÑтвительна до: %s" #, fuzzy #~ msgid "[CaDir] calculated value: %s" #~ msgstr "ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма %x, а должна быть %x" #, fuzzy #~ msgid "find ServiceID: %s , hash: %d" #~ msgstr "Ð˜Ð¼Ñ ÑервиÑа SP: %s" #, fuzzy #~ msgid "Connect request failed, try again." #~ msgstr "Ошибка при выполнении запроÑа" #, fuzzy #~ msgid "File size is %ul" #~ msgstr "Файл '%s' имеет размер в ноль байт - иÑпользуетÑÑ %s." #~ msgid "years" #~ msgstr "года(лет)" #~ msgid "months" #~ msgstr "меÑÑца(ев)" #~ msgid "days" #~ msgstr "днÑ(дней)" #~ msgid "hours" #~ msgstr "чаÑа(ов)" #~ msgid "ENV: " #~ msgstr "ENV: " #~ msgid "Broken RSL in NAME" #~ msgstr "Ðеверный код RSL в NAME" #~ msgid "Broken RSL in clientsoftware" #~ msgstr "Ðеверный код RSL в clientsoftware" #~ msgid "Broken RSL" #~ msgstr "Ðеверный код RSL" #, fuzzy #~ msgid "Failed reading RSL" #~ msgstr "Чтение %s раздела %s завершилоÑÑŒ неудачей: %s" #, fuzzy #~ msgid "Failed parsing RSL" #~ msgstr "Ðе удалоÑÑŒ проанализировать XML" #~ msgid "Broken RSL in jobid" #~ msgstr "Ðеверный код RSL в jobid" #, fuzzy #~ msgid "slashes are not allowed in jobid" #~ msgstr "%s: пробелы в имени закладки не разрешаютÑÑ\n" #~ msgid "Broken RSL in action" #~ msgstr "Ðеверный код RSL в action" #~ msgid "Broken RSL in queue" #~ msgstr "Ðеверный код RSL в queue" #~ msgid "Broken RSL in replicacollection" #~ msgstr "Ðеверный код RSL в replicacollection" #~ msgid "Broken RSL in lifetime" #~ msgstr "Ðеверный код RSL в lifetime" #~ msgid "Broken RSL in starttime" #~ msgstr "Ðеверный код RSL в starttime" #~ msgid "Broken RSL in jobname" #~ msgstr "Ðеверный код RSL в jobname" #~ msgid "Broken RSL in jobreport" #~ msgstr "Ðеверный код RSL в jobreport" #~ msgid "Broken RSL in rerun" #~ msgstr "Ðеверный код RSL в rerun" #, fuzzy #~ msgid "Bad integer in rerun" #~ msgstr "переполнение при вычиÑлении целочиÑленного выражениÑ" #~ msgid "Broken RSL in disk" #~ msgstr "Ðеверный код RSL в disk" #, fuzzy #~ msgid "disk value is bad" #~ msgstr "Предупреждение: ошибка в подпиÑи." #~ msgid "Broken RSL in notify" #~ msgstr "Ðеверный код RSL в notify" #~ msgid "Broken RSL in inputdata" #~ msgstr "Ðеверный код RSL в inputdata" #~ msgid "Broken RSL in outputdata" #~ msgstr "Ðеверный код RSL в outputdata" #~ msgid "Broken RSL in gmlog" #~ msgstr "Ðеверный код RSL в gmlog" #~ msgid "Broken RSL in stdout" #~ msgstr "Ðеверный код RSL в stdout" #~ msgid "Broken RSL in stderr" #~ msgstr "Ðеверный код RSL в stderr" #~ msgid "Broken RSL in ftpthreads" #~ msgstr "Ðеверный код RSL в ftpthreads" #~ msgid "Broken RSL in cache" #~ msgstr "Ðеверный код RSL в cache" #~ msgid "Broken RSL in hostname" #~ msgstr "Ðеверный код RSL в hostname" #~ msgid "Broken RSL in dryrun" #~ msgstr "Ðеверный код RSL в dryrun" #~ msgid "Broken RSL in credentialserver" #~ msgstr "Ðеверный код RSL в credentialserver" #~ msgid "Broken RSL in acl" #~ msgstr "Ðеверный код RSL в acl" #, fuzzy #~ msgid "Failed evaluating RSL" #~ msgstr "Проверка правил фильтра: " #, fuzzy #~ msgid "UNKNOWN RSL STRUCTURE" #~ msgstr "Ð´ÐµÐºÑ€ÐµÐ¼ÐµÐ½Ñ‚Ð°Ñ†Ð¸Ñ ÑƒÐºÐ°Ð·Ð°Ñ‚ÐµÐ»Ñ Ð½Ð° неизвеÑтную Ñтруктуру" #, fuzzy #~ msgid "UNKNOWN RLS ELEMENT" #~ msgstr "ÐеизвеÑтный атрибут \"%s\"=\"%s\" в Ñ‚Ñге <%s>" #, fuzzy #~ msgid "Could not write the private key!" #~ msgstr "локаль '%s' не может быть уÑтановлена." #, fuzzy #~ msgid "Host not found: %s" #~ msgstr "Сервер не найден" #, fuzzy #~ msgid "Migration request failed" #~ msgstr "ошибка запроÑа об обрыве иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #, fuzzy #~ msgid "Migration request succeed" #~ msgstr "уÑпешный Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ð± обрыве иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" #, fuzzy #~ msgid "There was no response to a migration request" #~ msgstr "Ðе поÑтупил ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ð± отправке задачи" #, fuzzy #~ msgid "A job resuming request failed" #~ msgstr "Ошибка запроÑа DDE poke" #, fuzzy #~ msgid "A job resuming request succeed" #~ msgstr "ÐеизвеÑтный тип заданиÑ." #, fuzzy #~ msgid "There was no response to a job resuming request" #~ msgstr "Ðе поÑтупил ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ð± удалении результатов работы задачи" #, fuzzy #~ msgid "RegistrationCollector create: %s" #~ msgstr "Код возврата" #, fuzzy #~ msgid "Job description successfully stored in sandbox" #~ msgstr "опиÑание заÑылаемой задачи: %s" #, fuzzy #~ msgid "Maximal UR Set size is: %d" #~ msgstr "Файл '%s' имеет размер в ноль байт - иÑпользуетÑÑ %s." #, fuzzy #~ msgid "Deleting %s" #~ msgstr "Идентификатор A-REX: %s" #, fuzzy #~ msgid "Reporting interval is: %d s" #~ msgstr "&Изменить пÑевдоним..." #~ msgid "show information about clusters and queues" #~ msgstr "вывеÑти информацию о вычиÑлительных реÑурÑах и очередÑÑ…" #, fuzzy #~ msgid " Rank: %s" #~ msgstr "Положение в очереди" #, fuzzy #~ msgid "Error during the XML generation!" #~ msgstr "Ошибка при нахождении различий" #~ msgid " element: " #~ msgstr " Ñлемент:" #, fuzzy #~ msgid "Can not access user's home directory: %s (%s)" #~ msgstr "" #~ "%s: каталог %s не удалён (ÑвлÑетÑÑ Ð´Ð¾Ð¼Ð°ÑˆÐ½Ð¸Ð¼ каталогом Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ %s)\n" #, fuzzy #~ msgid "User's home directory is not a directory: %s" #~ msgstr "" #~ "%s: каталог %s не удалён (ÑвлÑетÑÑ Ð´Ð¾Ð¼Ð°ÑˆÐ½Ð¸Ð¼ каталогом Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ %s)\n" #, fuzzy #~ msgid "Can not create ARC user config directory: %s (%s)" #~ msgstr "Ðевозможно Ñоздать пользовательÑкий каталог конфигурации Dia" #, fuzzy #~ msgid "ARC user config directory is not a directory: %s" #~ msgstr "Ðевозможно Ñоздать пользовательÑкий каталог конфигурации Dia" #~ msgid "Created empty ARC user config file: %s" #~ msgstr "Создан пуÑтой файл Ð´Ð»Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»ÑŒÑкой конфигурации ARC: %s" #, fuzzy #~ msgid "CertificatePath defined, but not KeyPath" #~ msgstr "метка %q+D определена, но не иÑпользуетÑÑ" #, fuzzy #~ msgid "Delegation handler with service role starts to process" #~ msgstr "Поддержка ETRN не наÑтроена.\n" #~ msgid "Shepherd chosen:" #~ msgstr "Выбран Чабан:" #~ msgid "Couldn't acquire transaction lock" #~ msgstr "Сбой Ð¿Ñ€ÐµÐ´Ð¾Ñ…Ñ€Ð°Ð½ÐµÐ½Ð¸Ñ Ñ‚Ñ€Ð°Ð½Ð·Ð°ÐºÑ†Ð¸Ð¸" #~ msgid "Source Path size: %d" #~ msgstr "Длина пути в иÑточнике: %d" #~ msgid "Registration for Service: %s" #~ msgstr "РегиÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ ÑервиÑа: %s" #~ msgid "Outdated data: %s" #~ msgstr "УÑтаревшие данные: %s" #~ msgid "SOAP operation not supported: %s" #~ msgstr "ÐžÐ¿ÐµÑ€Ð°Ñ†Ð¸Ñ SOAP не поддерживаетÑÑ: %s" #~ msgid "Route" #~ msgstr "Маршрут" #~ msgid "Routing to %s" #~ msgstr "ÐœÐ°Ñ€ÑˆÑ€ÑƒÑ‚Ð¸Ð·Ð°Ñ†Ð¸Ñ Ðº %s" #~ msgid "error on seek" #~ msgstr "ошибка поиÑка" #~ msgid "\tCache data dir : %s" #~ msgstr "\tКаталог Ñ ÐºÑшем данных: %s" #, fuzzy #~ msgid "Can not parse PKCS12 file" #~ msgstr "Файл \"%file:1\" не может быть открыт" #, fuzzy #~ msgid "No per-job directory specified" #~ msgstr "Ðе задано опиÑание задачи" #, fuzzy #~ msgid "Number of possible targets : %d" #~ msgstr "Сортировка назначений" #, fuzzy #~ msgid " ReferenceTime.value: %s" #~ msgstr "некорректное значение %%C" #~ msgid "WSRF request failed" #~ msgstr "Ошибка запроÑа WSRF" #, fuzzy #~ msgid "path to CA directory" #~ msgstr "Путь к каталогу Ð´Ð»Ñ Ð²Ñ€ÐµÐ¼ÐµÐ½Ð½Ñ‹Ñ… файлов" #~ msgid "" #~ "\n" #~ "\n" #~ "!!!!check_again" #~ msgstr "" #~ "\n" #~ "\n" #~ "!!!!check_again" #~ msgid "" #~ "\n" #~ "\n" #~ "!!!!do_delete" #~ msgstr "" #~ "\n" #~ "\n" #~ "!!!!do_delete" #, fuzzy #~ msgid "Can't acquire delegation context" #~ msgstr "%s: Ðевозможно получить контекÑÑ‚ Ð´Ð»Ñ %s" #~ msgid "" #~ "Argumentd to -i has the format Flavour:URL e.g.\n" #~ "ARC0:ldap://grid.tsl.uu.se:2135/mds-vo-name=sweden,O=grid\n" #~ "CREAM:ldap://cream.grid.upjs.sk:2170/o=grid\n" #~ "\n" #~ "Argument to -c has the format Flavour:URL e.g.\n" #~ "ARC0:ldap://grid.tsl.uu.se:2135/nordugrid-cluster-name=grid.tsl.uu.se,Mds-" #~ "Vo-name=local,o=grid" #~ msgstr "" #~ "Ðргумент опции -i задаётÑÑ Ð¿Ð¾ форме Flavour:URL, например:\n" #~ "ARC0:ldap://grid.tsl.uu.se:2135/mds-vo-name=sweden,O=grid\n" #~ "CREAM:ldap://cream.grid.upjs.sk:2170/o=grid\n" #~ "\n" #~ "Ðргумент опции -c задаётÑÑ Ð¿Ð¾ форме Flavour:URL, например:\n" #~ "ARC0:ldap://grid.tsl.uu.se:2135/nordugrid-cluster-name=grid.tsl.uu.se,Mds-" #~ "Vo-name=local,o=grid" #~ msgid "Creating an A-REX client." #~ msgstr "СоздаётÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚ A-REX" #~ msgid "Client side MCCs are loaded." #~ msgstr "Подгружены клиентÑкие компоненты цепи Ñообщений" #~ msgid "Failed to find delegation credentials in client configuration." #~ msgstr "Ðе удалоÑÑŒ обнаружить доверенноÑти в конфигурации клиента" #~ msgid "There were no response to a submission request." #~ msgstr "Ðе поÑтупил ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ð± отправке задачи" #~ msgid "A response to a submission request was not a SOAP message." #~ msgstr "Ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ запуÑке задачи не ÑвлÑетÑÑ Ñообщением SOAP" #~ msgid "Creating and sending a status request." #~ msgstr "Создание и отправка запроÑа о ÑоÑтоÑнии" #~ msgid "There were no response to a status request." #~ msgstr "Ðе поÑтупил ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ ÑоÑтоÑнии" #~ msgid "Creating and sending a service status request." #~ msgstr "Создание и отправка запроÑа о ÑоÑтоÑнии Ñлужбы" #~ msgid "There were no response to a service status request." #~ msgstr "Ðе поÑтупил ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ ÑоÑтоÑнии Ñлужбы" #~ msgid "Creating and sending request to terminate a job." #~ msgstr "Создание и отправка запроÑа о прерывании задачи" #~ msgid "There was no response to a job termination request." #~ msgstr "Ðе поÑтупил ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ прерывании задачи" #~ msgid "There was no response to a job cleaning request." #~ msgstr "Ðе поÑтупил ответ на Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ð± очиÑтке задачи" nordugrid-arc-6.14.0/po/PaxHeaders.30264/de.gmo0000644000000000000000000000013214152153477017060 xustar000000000000000030 mtime=1638455103.789628398 30 atime=1638455103.788628384 30 ctime=1638455103.898630036 nordugrid-arc-6.14.0/po/de.gmo0000644000175000002070000011365414152153477017057 0ustar00mockbuildmock00000000000000Þ•‚, <` a h v „ ‘ ” œ ¦ )¸ â 9ð *!@!Z!c!L|!9É!""&"I@"Š"2›"&Î",õ"%"#%H#4n#5£#%Ù#?ÿ#H?$>ˆ$5Ç$6ý$4%GT%œ%³%"Î%+ñ%)&:G&/‚&P²&''8'U' p'‘'¢'¶' É'-Õ')("-(P(,o("œ(#¿(%ã($ )'.)(V)!) ¡)Â)Ù)ö) ***@*\* v*‚*0’**Ã*î*+ +*+<+!\+~+!•+·+HÇ+ ,),)H,*r,",/À,"ð,8-=L-#Š-$®-%Ó--ù-'.!=._.s.2.-Ã.+ñ../+L/:x/>³/:ò/"-0(P0"y0!œ04¾05ó0&)1.P1)1'©1/Ñ132/52$e2'Š2)²2Ü2-ø21&3-X3†3)£3Í3æ34þ36346j4"¡4*Ä4 ï4#5 45/@5p52‹5;¾5<ú5 76E6.V6…6 6 š6»6Û6ê677%7(57^7x77¯7Ã7Û7ó7"838"L8+o8›8²8 É8×8,ô8#!9&E9l9]„9(â9= :€I: Ê:Ië:'5;!];‘;"<4<E<Z<c<&t< ›<–©<@=%T=#z=0ž= Ï=ð= >>)>(C>1l>)ž>È>Ø>ð> ?? )? 7?%C? i?Š?6›?6Ò?* @ 4@ @@ M@Z@m@~@"•@)¸@(â@ A$A"M‡MšM$£M ÈMéMN!#N/EN)uN;ŸN ÛN-üN**O<UO ’O5O=ÓO8PJP6\P8“P+ÌPøP6þP)5Q:_Q-šQ3ÈQ&üQ5#R(YR‚R¡R°RÀRÂR.ÉR*øR)#SMS`SeSVSÖSåSíS T&T>T$YT~T(‘T/ºT.êT.U7HU€U žU%¨U0ÎUÿUV&V)*VTVqVVÈX ÏX ÛX éXöXùXYY+*Y VY;dY Y¶YÐYÙY`÷Y8XZ%‘Z·ZSÓZ'[79['q[-™[*Ç[&ò[;\<U\3’\EÆ\N ]D[]; ]<Ü]%^O?^^«^#È^9ì^0&_3W_(‹_o´_$` A`b`~`/›`Ë`"â`a a:'a6ba*™a%Äa9êa4$b)Yb)ƒb*­b9Øb/c'Bc%jcc¤c¾c Òcóc" d-dMdad7yd2±dädþde/e(Ae&je‘e%¨eÎeJ×e"f92fAlf9®f$èfC g+Qg8}gF¶g)ýg*'h+Rh-~h¬h$ÆhëhiC&i6ji4¡i7Öi4j=CjKj=Íj( k84k+mk$™k@¾kBÿk,BlBol7²l7êl8"m?[m9›m1Õm2n9:n!tn9–nFÐn3o"Ko9no¨oÈo9æoA pAbp*¤p*Ïpúpq 3qD>qƒq:¢q?Ýq?r ]rkr:}r¸rÀr"Ïr(òrs.sIs\sts2‡sºsÖs!óst*tCtbt#t(¥t$Ît,ót u7uNu^u'xu+ u,Ìuùu^v-rvD v—åv%}wK£w,ïw&x¯Cx&óxy4yRyZy.myœy¨±yZz(oz$˜z7½z#õz"{<{P{!h{)Š{7´{2ì{|.|D|_|n| }| ˆ|*”|$¿|ä|Võ|ML}<š} ×} ã} ï}"û}~/~&J~/q~*¡~Ì~æ~+þ~*A[w – ¡ ­ º Ç)Ó-ý.+€Z€4t€*©€/Ô€%?*>jU©ZÿWZ‚)²‚(Ü‚%ƒ+ƒþFƒME„1“„3Å„\ù„XV…0¯…@à…9!†[†w†Q”†æ†ú†‡-.‡%\‡*‚‡'­‡%Õ‡#û‡ˆ0>ˆoˆŽˆ«ˆ'ÀˆèˆBúˆ(=‰)f‰‰¢‰²‰Ɖä‰ ô‰ þ‰Š<Š/LŠ/|Š=¬Š*êŠ0‹?F‹6†‹½‹Í‹á‹8û‹'4Œ(\Œ …Œ‘Œ ¡ŒÂŒ-àŒN] rR|!Ï"ñŽ".Ž6QŽ/ˆŽ<¸ŽõŽ40I=z¸=ÏE @S”+ª2Ö8 ‘B‘IH‘1’‘BÄ‘5’E=’)ƒ’X­’@“G“]“ t“‚“„“D‹“$Г0õ“&”=”(B”Pk”¼”֔ߔ#þ”"•<•&R• y•7…•7½•6õ•/,–F\–%£– É–(Ó–0ü– -—5:—p—5t—ª—í×_w9•Å'Wi0¤kL*Q$]näéKÖcEJ<¨qÝ:ÏMøyD„‚Vª–8˜qPöO>šØ“I¢ÌŸÛYJY‚ mºS.ëT{É'[7<‡ÿ‰u.=༑ ·/ƒjSîD *5ËsæÚ®ù]ÎÍ9=­xkÄG;e¯%¹)\[!xzÓ§Ô^+ MÒvT1 w&yN´È ½Þ-¦z²ýÜ,#}@ Cc|j3¡7IÂâr€,¬û^è_Xž(`lQUU”+RÆß% BðhC>ÐtñÀvaK5ÇmbA¥ÙœZ0 Šp28`bfH†åi›o‹W¶hÑtÁ4çn$d3³ã™Ro~©ÊZêôPH÷Ž¿ E|r—}:-2» ¾óïXü£F /G€’õdF)p¸NŒ"µf&Aò Ã#s6±«a4?(u?Bá°l!Õˆ\þ~@gL6ì"g…{eV;Oú1 %s attributes: base dn: %s filter: %s%s%s (%s)%s failed%s request failed%s request to %s failed with response: %s%s version %s%s: File request %s in SRM queue. Sleeping for %i secondsARC Auth. request: %sARC delegation policy: %sAborted!Adding location: %s - %sArcAuthZ: failed to initiate all PDPs - this instance will be non-functionalAre you sure you want to synchronize your local job list?Authorized from remote pdp serviceAuthorized from xacml.pdpBoth of CACertificatePath and CACertificatesDir elements missing or emptyBroker %s loadedCan not create function: FunctionId does not existCan not dynamically produce AlgFacrotyCan not dynamically produce AttributeFactoryCan not dynamically produce EvaluatorCan not dynamically produce FnFactoryCan not find element with proper namespaceCan not find element with proper namespaceCan not open job description file: %sCan not parse classname for AttributeFactory from configurationCan not parse classname for CombiningAlgorithmFactory from configurationCan not parse classname for FunctionFactory from configurationCan not parse classname for Policy from configurationCan not parse classname for Request from configurationCan't create delegation contextCan't create information handle - is the ARC ldap DMC plugin available?Can't read from sourceCan't write to destinationCancelling synchronization requestCannot convert module name to Python stringCannot create argument of the constructorCannot find under response soap message:Cannot find content under response soap messageCannot find file at %s for getting the proxy. Please make sure this file exists.Cannot import moduleCheck: looking for metadata: %sCheck: obtained checksum: %sCheck: obtained size: %lliChecking URL returned by SRM: %sCleaning job: %sClosed successfullyClosing connectionCommand: %sConnect: Authentication timed out after %d msConnect: Connecting timed out after %d msConnect: Failed authentication: %sConnect: Failed to connect: %sConnect: Failed to init auth info handle: %sConnect: Failed to init handle: %sCould not create temporary file: %sCould not determine version of serverCould not open LDAP connection to %sCould not set LDAP network timeout (%s)Could not set LDAP protocol version (%s)Could not set LDAP timelimit (%s)Couldn't parse benchmark XML: %sCreating a http clientCreating a pdpservice clientCreating a soap clientCreating and sending requestCreating directory %sCurrent transfer FAILED: %sCurrent transfer completeDCAU failedDCAU failed: %sDN %s is cached and is valid until %s for URL %sDN %s is cached but has expired for URL %sData transfer abortedData transfer aborted: %sDefault: %sDelegation ID: %sDelegation authorization failedDelegation role not supported: %sDelegation service: %sDelegation type not supported: %sDestination: %sDirectory size is larger than %i files, will have to call multiple timesDirectory: %sDisconnect: Closing timed out after %d msDisconnect: Failed closing - ignoring: %sDisconnect: Quitting timed out after %d msDuplicate replica found in LFC: %sError opening lock file %s in initial check: %sError: no LDAP query started to %sEvaluator does not support loadable Combining AlgorithmsEvaluator does not support specified Combining Algorithm - %sEvaluator for ArcPDP was not loadedEvaluator for GACLPDP was not loadedEvaluator for XACMLPDP was not loadedFATAL, ERROR, WARNING, INFO, VERBOSE or DEBUGFailed authenticatingFailed connecting to server %s:%dFailed reading dataFailed reading list of filesFailed sending CWD command for credentials renewalFailed sending CWD command for job cancellingFailed sending CWD command for job cleaningFailed sending DELE command for job cancellingFailed sending RMD command for job cleaningFailed to authenticate SAML Token inside the incoming SOAPFailed to authenticate Username Token inside the incoming SOAPFailed to authenticate X509 Token inside the incoming SOAPFailed to bind to ldap server (%s)Failed to connect for credential renewalFailed to connect for job cleaningFailed to connect to server %s:%dFailed to convert security information to ARC policyFailed to convert security information to ARC requestFailed to create ldap bind thread (%s)Failed to disconnect after credentials renewalFailed to disconnect after job cancellingFailed to disconnect after job cleaningFailed to generate SAML Token for outgoing SOAPFailed to generate Username Token for outgoing SOAPFailed to generate X509 Token for outgoing SOAPFailed to initialize OpenSSL libraryFailed to initialize main Python threadFailed to initiate delegation credentialsFailed to open data channelFailed to parse SAML Token from incoming SOAPFailed to parse Username Token from incoming SOAPFailed to parse X509 Token from incoming SOAPFailed to read object %s: %sFailed to remove cache per-job dir %s: %sFailed to store ftp fileFailed to transfer dataFailed to verify X509 Token inside the incoming SOAPFailed to verify the signature under Failed to verify the signature under Failed uploading local input filesFailed while finishing reading from sourceFailed while reading from sourceFailed while writing to destinationFailure: %sFile delete failed, attempting directory deleteFile is not accessible: %sFile type is not available, attempting file deleteFiles associated with request token %s aborted successfullyFiles associated with request token %s released successfullyFunction : %sGlobus error: %sGrid identity is mapped to local identity '%s'HER: %sIdentity: %sInitialized %u-th Python serviceInitiating delegation procedureInvalid EffectInvalid JobDescription:Invalid URL: %sInvalid class nameInvalid url: %sJob %s does not report a resumable stateJob cancelling successfulJob cleaning successfulJob description to be sent: %sJob description: %sJob resuming successfulJob submission summary:Job submitted with jobid: %sLDAP connection already open to %sLDAP query timed out: %sLDAPQuery: Getting results from %sLDAPQuery: Initializing connection to %s:%dLDAPQuery: Querying %sLdap bind timeout (%s)Library : %sLoading %u-th Python serviceLocations are missing in destination LFC URLMLSD is not supported - trying NLSTMain Python thread was not initializedMemory allocation errorMissing CertificatePath element or ProxyPath element, or is missingMissing or empty CertificatePath elementMissing or empty CertificatePath or CACertificatesDir elementMissing or empty CertificatePath or CACertificatesDir element; will only check the signature, will not do message authenticationMissing or empty KeyPath elementMissing or empty KeyPath element, or is missingMissing or empty PasswordSource elementMissing or empty Username elementMissing reference to factory and/or module. It is unsafe to use Globus in non-persistent mode - (Grid)FTP code is disabled. Report to developers.Missing security object in messageNLST/MLSD failedNLST/MLSD failed: %sName: %sNo SOAP responseNo authorization response was returnedNo jobs givenNo policy file or DNs specified for simplelist.pdp, please set location attribute or at least one DN element for simplelist PDP node in configuration.No response from %sNo target available inside the policyNo target available inside the ruleNumber of OpenSSL locks changed - reinitializingOperation completed successfullyOutgoing Message is not SOAPPASV failedPASV failed: %sPDP: %s can not be loadedPassword encoding type not supported: %sPath %s is invalid, creating required directoriesPolicy Decision Service invocation failedPolicy is emptyProcessing a %s requestProxy generation succeededProxy path: %sProxy type: %sReason : %sRemoving %sRenewal of credentials was successfulRenewing credentials for job: %sRequest is emptyRequest is reported as ABORTED, but all files are doneRequest is reported as ABORTED, since it was cancelledRequest is reported as ABORTED. Reason: %sRequest: %sResponse: %sResponse: %sReusing connectionSASL InteractionSOAP invocation failedSRM did not return any informationSRM did not return any useful informationSRM returned no useful Transfer URLs: %sSSL error: %d - %s:%s:%sSendCommand: Failed: %sSendCommand: Timed out after %d msServer SRM version: %sServer implementation: %sService is waiting for requestsSome transfers failedSource: %sStart testStartReadingStartWritingSubject: %sSubmit: Failed sending CWD commandSubmit: Failed sending CWD new commandSubmit: Failed sending job descriptionSubmit: Failed to connectSubmit: Failed uploading local input filesSucceeded to authenticate SAMLTokenSucceeded to authenticate UsernameTokenSucceeded to authenticate X509TokenSucceeded to verify the signature under Succeeded to verify the signature under Target %s removed by FastestQueueBroker, doesn't report number of free slotsTarget %s removed by FastestQueueBroker, doesn't report number of total slotsTarget %s removed by FastestQueueBroker, doesn't report number of waiting jobsThe Response is not going to this endThe Service advertises no Health State.The Service doesn't advertise its Type.The StatusCode is SuccessThe arccat command performs the cat command on the stdout, stderr or grid manager's error log of the job.The arccp command copies files to, from and between grid storage elements.The arcget command is used for retrieving the results from a job.The arckill command is used to kill running jobs.The arcls command is used for listing files in grid storage elements and file index catalogues.The delegated credential got from delegation service is stored into path: %sThe request has passed the policy evaluationThere are %d requests, which satisfy at least one policyThere is %d subjects, which satisfy at least one policyThere was no HTTP responseThere was no SOAP responseThis seems like a temporary error, please try again laterTransfer FAILED: %sTransfer completeType is file, calling srmRmUnauthorized from remote pdp serviceUnsupported destination url: %sUnsupported protocol in url %sUnsupported source url: %sUpdateCredentials: request = %sUpdateCredentials: response = %sUser interface errorUsername Token handler is not configuredUsing insecure data transferUsing secure data transferUsing space token %sVOMS attribute parsing failedWaiting for responseWarning: Using SRM protocol v1 which does not support space tokensWrong number of parameters specifiedX509 Token handler is not configuredXACML request: %sXML response: %sYour identity: %sYour proxy is valid until: %s[filename ...][job ...]all jobsbrokercheck_ftp: failed to get file's modification timecheck_ftp: failed to get file's sizecheck_ftp: globus_ftp_client_get failedcheck_ftp: globus_ftp_client_modification_time failedcheck_ftp: globus_ftp_client_register_readcheck_ftp: globus_ftp_client_size failedcheck_ftp: timeout waiting for modification_timecheck_ftp: timeout waiting for sizeclass name: %sclientxrsl foundclientxrsl not foundconfiguration file (default ~/.arc/client.conf)could not find end of clientxrslcould not find start of clientxrsldirectorydirnamedisplay all available metadatado not ask for verificationdo not try to force passive transferdownload directory (the job directory will be created in this directory)echo: Unauthorizedfilenameforce migration, ignore kill failureftp_complete_callback: error: %sftp_complete_callback: successftp_read_callback: successftp_read_thread: Globus error: %sftp_read_thread: for_read failed - aborting: %sftp_read_thread: get and register buffersftp_read_thread: too many registration failures - abort: %sftp_read_thread: waiting for eofftp_write_thread: for_write failed - abortingftp_write_thread: get and register buffersglobus_ftp_client_operationattr_set_authorization: error: %shourhoursinit_handle: globus_ftp_client_handleattr_init failedinit_handle: globus_ftp_client_handleattr_set_gridftp2 failedinit_handle: globus_ftp_client_operationattr_init failedinput is not SOAPjobdescription file describing the job to be submittedjobdescription string describing the job to be submittedkeep the files on the server (do not clean)levellist_files_ftp: failed to get file's modification timelist_files_ftp: failed to get file's sizelist_files_ftp: globus_ftp_client_modification_time failedlist_files_ftp: globus_ftp_client_size failedlist_files_ftp: looking for modification time of %slist_files_ftp: looking for size of %slist_files_ftp: timeout waiting for modification_timelist_files_ftp: timeout waiting for sizelong format (more information)minuteminutesmodule name: %snnumbernumber of retries before failing file transferonly select jobs whose status is statusstroperate recursively up to specified leveloutput is not SOAPpathprint version informationremove the job from the local list of jobs even if the job is not found in the infosyssecondsecondssecondssetting file %s to size %llushow URLs of file locationsshow progress indicatorshow the stderr of the jobshow the stdout of the job (default)source destinationstart_reading_ftp: globus_ftp_client_getstart_reading_ftp: globus_ftp_client_get failedstart_reading_ftp: globus_thread_create failedstart_writing_ftp: globus_thread_create failedstart_writing_ftp: mkdir failed - still trying to writestart_writing_ftp: put failedstatusstrstop_reading_ftp: aborting connectionstop_reading_ftp: waiting for transfer to finishstringtimeout in seconds (default 20)urluse secure transfer (insecure by default)yProject-Id-Version: Arc Report-Msgid-Bugs-To: support@nordugrid.org POT-Creation-Date: 2021-12-02 15:25+0100 PO-Revision-Date: 2010-02-25 19:18+0100 Last-Translator: Steffen Möller Language-Team: German Language: MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit X-Generator: KBabel 1.11.4 Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2); X-Poedit-Language: Russian X-Poedit-KeywordsList: msg:2;IString:1;istring:1 X-Poedit-Basepath: /home/oxana/CVSROOT/ARC1 X-Poedit-SearchPath-0: src %s Attribute base dn: %s Filter: %s%s%s (%s)%s fehlgeschlagenAnfrage %s schlug fehl%s Anfrage an %s schlug fehl mit Antwort %s%s version %s%s: Datei Anfrage %s in SRM queue. Schlage für %i SekundenARC Auth. Anfrage: %sARC delegation policy: %sAbbruch!Füge location hinzu: %s - %sArcAuthZ: Fehler bei Initiierung wenigstens einer PDP - diese Instanz wird nicht funktional seinSoll die lokale job list wirklich synchronisiert werden?Authorisiert durch remote pdp serviceAuthorisiert durch xaml.pdpSowohl CACertificatePath als auch CACertificatesDir Elemente sind fehlend oder leerBroker %s geladenKann Funktion nicht anlegen: FunctionId existiert nichtKann AlgFactory nicht dynamisch anlegenKann AttributeFactory nicht dynamisch anlegenKann Evaluator nicht dynamisch produzierenKann FnFactory nicht dynamisch anlegenKann element mit passendem namespace nicht findenKann element mit passendem namespace nicht findenKonnte Datei mit Job Beschreibung nicht öffnen: %sKonnte classname für AttributeFactory nicht von Konfiguration parsenKonnte classname für CombiningAlgorithmFactory nicht von Konfiguration parsenKonnte classname für FunctionFactory nicht von Konfiguration parsenKonnte classname für Policy nicht von Konfiguration parsenKonnte classname für Request nicht von Konfiguration parsenKann delegation context nicht anlegenKann information handle nicht anlegen - ist das ARC LDAP DMC plugin verfügbar?Kann nicht von Quelle lesenKann nicht zu Ziel schreibenAbbruch der SynchronisationsanfrageKann Modul name nicht zu Python Zeichenkette konvertierenKann Argument für den Konstruktor nicht anlegenKann in SOAP-Antwort nicht findenKann Inhalt in SOAP-Antwort nicht findenKann Datei nicht bei %s finden, um den Proxy zu erhalten. Bitte stellen Sie sicher, dass diese Datei existiert.Kann Modul nicht importierenCheck: looking für Metadata: %sCheck: erhielt checksum: %sCheck: erhielt Größe: %lliÜberprüfen der URL zurückgegeben von SRM: %sAufräumen von Job: %sVerbindung erfolgreich geschlossenSchließe VerbindungKommando: %sConnect: Zeitüberschreitung der Authentikation nach %d msConnect: Zeitüberschreitung der Verbindung nach %d msConnect: Authentikation fehlgeschlagen: %sConnect: Verbindung zu %s schlug fehlConnect: Konnte auth info handle nicht initialisieren: %sConnect: Konnte init handle nicht initialisieren: %sKonnte temporäre Datei nicht anlegen: %sKonnte Version des Server nicht bestimmenKonnte LDAP Verbindung nicht öffnen zu %sKonnte LDAP netowrk Zeitüberschreitung nicht setzen (%s)Konnte LDAP Protokoll Version nicht setzen (%s)Konnte LDAP Zeitlimit nicht setzen (%s)Konnte benchmark XML nicht parsen: %sLege HTTP Client anLege pdpservice client anLege SOAP Clietn anErstellen und senden von AnfrageLege Verzeichnis %s anAktueller Transfer SCHLUG FEHL: %sAktueller Transfer vollständigDCAU fehlgeschlagenDCAU fehlgeschlagen: %sDN %s wird gecacht und ist gültig bis %s für Datei %sDN %s wird gecacht aber ist abgelaufen für URL %sDatentransfer abgebrochenDatentransfer abgebrochen: %sVoreinstellung: %sDelegation ID: %sDelegation Authorisierung fehlgeschlagenDelegation role nicht unterstützt: %sDelegation service: %sDelegation Typ nicht unterstützt: %sZiel: %sVerzeichnis enthält mehr als %i Dateien, werde Aufruf mehrfach ausführenVerzeichnis: %sDisconnect: Zeitüberschreitung vom Schließen nach %d msDisconnect: Fehler beim Schließen der Verbindung - ignoriert: %sDisconnect: Zeitüberschreitung beim Verlassen nach %d msDoppelte replica gefunden in LFC: %sFehler bei Öffnen von Lock-Datei %s in initialer Überprüfung: %sFehler: keine LDAP Anfrage gestartet bei %sEvaluator unterstützt ladare Combining Algorithms nichtEvaluator unterstützt die angegebenen Combining Algorithms nicht - %sEvaluator für ArcPDP wurde nicht geladenEvaluator für GACLPDP wurde nicht geladenEvaluator für XACMLPDP wurde nicht geladenFATAL, ERROR, WARNING, INFO, VERBOSE or DEBUGFehler bei AuthentisierenFehler bei Verbinden zu server %s:%dFehler bei Lesen von DatenFehler bei Lesen von DateilisteFehler beim Senden von CWD Kommando für Erneuerung von credentialsFehler beim Senden von CWD für den Abbruch eines JobsKonnte CWD Kommando nicht senden um Job aufzuräumenFehler beim Senden von DELE für den Abbruch eines JobsKonnte RMD Kommando nicht senden um Job aufzuräumenKonnte SAML Token aus eingehender SOAP nicht authentifizierenFehler bei der Authentifikation des Username Token in der einngehenden SOAPFehler bei Authentifizieren von X509 Token in eigehendem SOAPFehler bei Verbinden zu ldap server (%s)Fehler beim Verbindungen für Erneuerung von credentialsKonnte nicht verbinden, um Job aufzuräumenFehler bei Verbinden zu server %s:%dFehler bei Konvertieren von security information für ARC policyFehler bei Konvertierung von security information für ARC AnfrageFehler bei Anlegen von ldap bind thread (%s)Fehler bein Trennen der Verbindung nach Erneuerung der credentialsFehler beim Trennen der Verbindung nach Abbruch von JobKonnte Verbindung nicht trennen nach Aufräumen von JobKonnte SAML Token für ausgehendes SOAP nicht generierenFehler bei Erstellen von Nutzernamen Token für ausgehende SOAPFehler bei Generieren von X509 Token für ausgehende SOAPFehler bei Initialisierung von OpenSSL BibliothekFehler bei Initialisierung des main Python ThreadsFehler bei der Initialisierung der delegation credentialsFehler bei Öffnen von DatenkanalKonnte SAML Token nicht aus eingehender SOAP herausparsenKonnte Username Token nicht von eingehender SOAP Nachricht herauslesenFehler bei Parsen von X509 Token in eigehendem SOAPFehler bei Lesen von Objekt %s: %sFehler bei Entfernen von cache per-job Verzeichnis %s: %sFehler bei Ablage von FTP DateiFehler bei Transfer von DatenFehler bei Verifizieren von X509 Token in eigehendem SOAPFehler bei der Überprüfung der Signatur unter Fehler bei der Überprüfung der Signatur unter Konnte lokale Inputdateien nicht hochladenFehler bei Abschluß des Lesens von QuelleFehler bei Lesen von QuelleFehler bei Schreiben zu ZielFehler: %sLöschen von Datei schlug fehl, versuche als Verzeichnis zu löschenDatei ist nicht zugreifbar: %sDateitype ist nicht verfügbar, versuche Datei zu löschenDateien assoziiert mit Anfrage Token %s erfolgreich abgebrochenDateien assoziiert mit Anfrage Token %s erfolgreich freigegebenFunktion : %sGlobus Fehler: %sGrid Identität wird zugewiesen zu lokaler Identität '%s'HER: %sIdentität: %sInitialisierte %u-th Python servceInitialisierung der Delegations-ProzedurUngültiger EffektUngültige JobDescription:Ungültige URL: %sUngültiger KlassennameUngültige url: %sJob %s berichtet nicht von einem resumable ZustandJob erfolgreich abgebrochenJob erfolgreich aufgeräumt.Zu sendende Job-Beschreibung : %sJob Beschreibung: %sJob erfolgreich resumed.Job Hochladen Zusammenfassung:Job hochgeladen mit Job ID: %sLDAP Verbindung bereits offen zu %sZeitüberschreibung bei LDAP Anfrage: %sLDAPQuery: Erhalte Ergebnisse von %sLDAPQuery: Initialisiere Verbindung zu %s:%dLDAPQuery: Frage an %sLdap bind timeout (%s)Bibliothek : %sLade %u-th Python ServiceLocations fehlen in destination LFC URLMLSD ist nicht unterstützt - versuche NLSTMain Python Thread wurde nicht initialisiertSpeicherallokationsfehlerFehlendes CertificatePath Element oder ProxyPath Element, oder fehltFehlendes oder leeres CertificatePath ElementFehlendes oder leeres CertificatePath oder CACertificatesDir ElementFehlendes oder leeres CertificatePath oder CACertificatesDir Element; werde nur die Signature überprüfen, die Nachricht jedoch nicht authentifizierenFehlendes oder leeres KeyPath ElementFehlendes oder leeres KeyPath Element, oder fehltFehlendes oder leeres PasswordSource ElementFehlendes oder leeres Username ElementFehlende Referenz zu factory und/doer Module. Es ist unsicher, Globus im nicht-persistenten Modus zu nutzen - (Grid)FTP code wurde disabled. Bitte die Entwickler informieren.Fehlendes security Objekt in NachrichtNLST/UMLSD fehlgeschlagenNLST/UMLSD fehlgeschlagen: %sName %sKeine SOAP AntwortEs wurde keine authorization response erwidertKeine Jobs angegebenKeine Policy Datei oder DNs angegeben für simplelist.pdp, bitte setzen Sie ein location Attribut oder zumindest ein DN Element für den PDP Knoten in der KonfigurationKeine Antwort von %sKein Ziel innerhalb der Policy vorhandenKein Ziel verfügbar in dieser RegelAnzahl von OpenSSL locks verändert - reinitialisierungOperation erfolgreich abgeschlossenAusgehende Nachricht ist kein SOAPPASV fehlgeschlagenPASV fehlgeschlagen: %sPDP: %s kann nicht geladen werdenPasswort Kodierung nicht unterstützt: %sPfad %s ist ungültig, lege benötigte Verzeichnisse anAusführen des Policy Decision Service schlug fehlPolicy is leerVerarbeite %s AnfrageProxy erfolgreich angelegtProxy Pfad: %sProxy Typ: %sGrund : %sEntferne %sErneuerung der Credentials war erfolgreichErneuern der credentials für Job %sAnfrage ist leerAnfrage wurde berichtet als ABORTED (abgebrochen), aber alle Dateien wurden bearbeitetAnfrage wurde berichtet als ABORTED (abgebrochen), denn sie wurde abgebrochenAnfrage wurde berichtet als ABORTED (abgebrochen). Grund: %sAnfrage: %sAntwort: %sAntwort: %sWiederholte Nutzung von VerbindungSASL InteraktionSOAP Aufruf fehlgeschlagenSRM lieferte keine Information zurückSRM lieferte keinerlei gebrauchbare InformationSRM gab keine nützliche Transfer URLs: %sSSL Fehler: %d - %s:%s:%sSendCommand: Fehler: %sSendCommand: Zeitüberschreitung nach %d msServer SRM version: %sServer Implementation: %sService wartet auf AnfragenEinige Transfers schlugen fehlQuelle: %sStarte TestStartReadingStartWritingSubjekt: %sSubmit: Konnte CWD Kommmando nicht sendenSubmit: Konnte CWD new Kommmando nicht sendenSubmit: Fehler bei Senden von Job BeschreibungSubmit: VerbindungsfehlerSubmit; Hochladen der lokalen Inputfiles schlug fehlErfolreiche Anthentifikation von SAMLTOkenErfolgreiche Authentifikation des UsernameTokenX509Token erfolgreich authentifiziertErfolgreiche Überprüfung der Signatur unter Erfolgreiche Verifikation der Signatur unter Ziel %s entfernt durch FastestQueueBroker, die Anzahl freier slots wird nicht genanntZiel %s entfernt durch FastestQueueBroker, die Anzahl vorhandener slots wird nicht genanntZiel %s entfernt durch FastestQueueBroker, die Anzahl wartender Jobs wird nicht genanntDie Antwort geht nicht bis zu diesem EndeDer Service gibt keinen Health State an.Der Service gibt seinen Typ nicht an.Der StatusCode ist SuccessЭта команда предназначена Ð´Ð»Ñ Ð²Ñ‹Ð²Ð¾Ð´Ð° на Ñкран Ñообщений Ñтандартного выхода, Ñтандартной ошибки или ошибок ÑиÑтемы при иÑполнении задачиMit arccp werden Dateien zu, von und zwischen grid storage Elementen kopiert.Mit arcget erhält man die Ergebnisse eines Jobs.Mit arckill lassen sich laufenden Prozesse beenden.Mit arcls werden Verzeichniss auf grid storage Elementen und Datei Index Katalogen angegebenDas delegierte credential wie erhalten von delegation service is abgelegt unter Pfad: %sDie Anfrage hat die Policy Evaluierung bestandenEs gibt %d Anfragen, die wenigstens einer Policy Anfrage genügtEs gibt %d Subjekte, die wenigstens eine Policy erfüllenKeine HTTP Antwort erhaltenKeine SOAP response erhaltenDies scheint ein vorübergehender Fehler zu sein, bitte später nochmal probierenTransfer FEHLER: %sTransfer vollständigTyp ist Datei, rufe srmRm aufNicht authorisiert von entferntem PDP serviceNicht unterstützte URL für Ziel: %sNicht-unterstzütztes Protrokoll in URL %sNicht unterstützte URL für Quelle: %sUpdateCredentials: Ð·Ð°Ð¿Ñ€Ð¾Ñ = %sUpdateCredentials: отзыв = %sBenutzungsschnittstellenfehlerNutzernamen Token handler ist nicht konfiguriertNutze unsicheren DatentransferNutze sicheren DatentransferNutze space token %sKonnte VOMS Attribut nicht herauslesenWarte vor AntwortWarnung: Nutze SRM Protokol v1 das keine space tokens unterstütztFalsche Anzahl an Parametern übertragenX509 Token handler ist nicht konfiguriertXACML Anfrage: %sXML Antwort: %sIhre Identität: %sIhr Proxy ist gültig bis: %s[dateiname ...][Job ...]alle JobsBrokercheck_ftp: konnte Modification time von Datei nicht erhaltencheck_ftp: konnten Dateigröße nicht bestimmencheck_ftp: globus_ftp_client_get fehlgeschlagencheck_ftp: globus_ftp_client_modification_time fehlgeschlagencheck_ftp: globus_ftp_client_register_readcheck_ftp: globus_ftp_client_size fehlgeschlagencheck_ftp: Zeitüberschreitung bei Warten auf modification_timecheck_ftp: Zeitüberschreitung bei Warten für GrößeKlassenname: %sclientxrsl gefundenclientxrsl nicht gefundenKonfigurationsdatei (Vorteinstellung ~/.arc/client.conf)konnte Ende von clientxrsl nicht findenkonnte Start von clientxrsl nicht findenVerzeichnisVerzeichnisnamezeige alle verfügbare Metadatenfrage nicht nach Verifikationversuche nicht, passiven Transfer zu erzwigenDownload-Verzeichnis (das Job-Verzeichnis wird in diesem Verzeichnis abgelegt)echo: UnauthorisiertDateinameerzwinge Migration, ignoriere ein Fehlschlagen des Abbruchs bereits laufender Jobsftp_complete_callback: Fehler: %sftp_complete_callback: erfolgreichftp_read_callback: Erfolgftp_read_thread: Globus Fehler: %sftp_read_thread: for_read fehlgeschlagen - Abbruch: %sftp_read_thread: beziehe und registriere Pufferftp_read_thread: zu viele Registrierungsfehler - Abbruch: %sftp_read_thread: warte auf EOFftp_write_thread: for_write fehlgeschlagen - Abbruchftp_write_thread: Beziehe und Registriere Pufferglobus_ftp_client_operationattr_set_authorisation: Fehler: %sStundenStundeStundeninit_handle: globus_ftp_client_handleattr_init fehlgeschlageninit_handle: globus_ftp_client_handleattr_set_gridftp2 fehlgeschlageninit_handle: globus_ftp_client_operationattr_init fehlgeschlagenEingabe ist kein SOAPDatei mit Job-Beschreibung wird hochgeladenZeichenkette mit Job-Beschreibung wird hochgeladenbehalte die Dateien auf dem Server (dort nicht löschen)Tiefelist_files_ftp: Fehler bei Bezug von Zeitpunkt der letzten Dateiänderunglist_files_ftp: Fehler bei Bezug von Dateigrößelist_files_ftp: globus_ftp_client_modification_time fehlgeschlagenlist_files_ftp: globus_ftp_client_size fehlgeschlagenlist_files_ftp: Bezug von Zeitpunkt der letzten Dateiänderung von %slist_files_ftp: Suche nach Größe von %slist_files_ftp: Zeitüberschreitung bei Warten auf Zeitpunkt der letzten Dateiänderung list_files_ftp: Zeitüberschreitung bei Warten auf Dateigröße ausführliche AusgabeMinutenMinuteMinutenModulname: %snNummerAnzahl von Wiederholungen bis zu einem Abbruch der DateiübertragungSelektiere Jobs mit Status statusstrarbeite rekursiv bis zu einer festgelegten TiefeAusgabe ist nicht SOAPPfadAngabe des aktuellen Versionsbezeichnersentferne Job aus lokaler Liste selbst wenn der Job dem Infosys nicht bekannt istSekundenSekundeSekundenSekundenSetze Datei %s zu Größe %lluzeige URLs von Datei-Lokalisationenzeige Fortschrittsanzeigezeige stderr des JobsZeige stdout des Jobs (Voreinstellung)Quelle Zielstart_reading_ftp: globus_ftp_client_get fehlgeschlagenstart_reading_ftp: globus_ftp_client_get fehlgeschlagenstart_reading_ftp: globus_thread_create fehlgeschlagenstart_writitng_ftp: globus_thread_create failedstart_writing_ftp: mkdir fehlgeschlagen - versuche weiter zu schreibenstart_writing_ftp: put fehlgeschlagenstatusstrstop_reading_ftp: Abbruch der Verbindungstop_reading_ftp: warte auf Beenden von TransferZeichenketteZeitüberschreitung nach Sekunden (Voreinstellung 20)URLNutze sicheren Transfer (unsicher ist Voreinstellung)jnordugrid-arc-6.14.0/po/PaxHeaders.30264/en@quot.header0000644000000000000000000000013214152153401020534 xustar000000000000000030 mtime=1638455041.319689759 30 atime=1638455041.319689759 30 ctime=1638455103.884629826 nordugrid-arc-6.14.0/po/en@quot.header0000644000175000002070000000226314152153401020524 0ustar00mockbuildmock00000000000000# All this catalog "translates" are quotation characters. # The msgids must be ASCII and therefore cannot contain real quotation # characters, only substitutes like grave accent (0x60), apostrophe (0x27) # and double quote (0x22). These substitutes look strange; see # http://www.cl.cam.ac.uk/~mgk25/ucs/quotes.html # # This catalog translates grave accent (0x60) and apostrophe (0x27) to # left single quotation mark (U+2018) and right single quotation mark (U+2019). # It also translates pairs of apostrophe (0x27) to # left single quotation mark (U+2018) and right single quotation mark (U+2019) # and pairs of quotation mark (0x22) to # left double quotation mark (U+201C) and right double quotation mark (U+201D). # # When output to an UTF-8 terminal, the quotation characters appear perfectly. # When output to an ISO-8859-1 terminal, the single quotation marks are # transliterated to apostrophes (by iconv in glibc 2.2 or newer) or to # grave/acute accent (by libiconv), and the double quotation marks are # transliterated to 0x22. # When output to an ASCII terminal, the single quotation marks are # transliterated to apostrophes, and the double quotation marks are # transliterated to 0x22. # nordugrid-arc-6.14.0/po/PaxHeaders.30264/insert-header.sin0000644000000000000000000000013214152153401021214 xustar000000000000000030 mtime=1638455041.327689879 30 atime=1638455041.327689879 30 ctime=1638455103.885629841 nordugrid-arc-6.14.0/po/insert-header.sin0000644000175000002070000000124014152153401021176 0ustar00mockbuildmock00000000000000# Sed script that inserts the file called HEADER before the header entry. # # At each occurrence of a line starting with "msgid ", we execute the following # commands. At the first occurrence, insert the file. At the following # occurrences, do nothing. The distinction between the first and the following # occurrences is achieved by looking at the hold space. /^msgid /{ x # Test if the hold space is empty. s/m/m/ ta # Yes it was empty. First occurrence. Read the file. r HEADER # Output the file's contents by reading the next line. But don't lose the # current line while doing this. g N bb :a # The hold space was nonempty. Following occurrences. Do nothing. x :b } nordugrid-arc-6.14.0/po/PaxHeaders.30264/quot.sed0000644000000000000000000000013214152153401017434 xustar000000000000000030 mtime=1638455041.335689999 30 atime=1638455041.335689999 30 ctime=1638455103.882629796 nordugrid-arc-6.14.0/po/quot.sed0000644000175000002070000000023114152153401017415 0ustar00mockbuildmock00000000000000s/"\([^"]*\)"/“\1â€/g s/`\([^`']*\)'/‘\1’/g s/ '\([^`']*\)' / ‘\1’ /g s/ '\([^`']*\)'$/ ‘\1’/g s/^'\([^`']*\)' /‘\1’ /g s/“â€/""/g nordugrid-arc-6.14.0/po/PaxHeaders.30264/en@boldquot.header0000644000000000000000000000013214152153401021375 xustar000000000000000030 mtime=1638455041.311689639 30 atime=1638455041.311689639 30 ctime=1638455103.885629841 nordugrid-arc-6.14.0/po/en@boldquot.header0000644000175000002070000000247114152153401021366 0ustar00mockbuildmock00000000000000# All this catalog "translates" are quotation characters. # The msgids must be ASCII and therefore cannot contain real quotation # characters, only substitutes like grave accent (0x60), apostrophe (0x27) # and double quote (0x22). These substitutes look strange; see # http://www.cl.cam.ac.uk/~mgk25/ucs/quotes.html # # This catalog translates grave accent (0x60) and apostrophe (0x27) to # left single quotation mark (U+2018) and right single quotation mark (U+2019). # It also translates pairs of apostrophe (0x27) to # left single quotation mark (U+2018) and right single quotation mark (U+2019) # and pairs of quotation mark (0x22) to # left double quotation mark (U+201C) and right double quotation mark (U+201D). # # When output to an UTF-8 terminal, the quotation characters appear perfectly. # When output to an ISO-8859-1 terminal, the single quotation marks are # transliterated to apostrophes (by iconv in glibc 2.2 or newer) or to # grave/acute accent (by libiconv), and the double quotation marks are # transliterated to 0x22. # When output to an ASCII terminal, the single quotation marks are # transliterated to apostrophes, and the double quotation marks are # transliterated to 0x22. # # This catalog furthermore displays the text between the quotation marks in # bold face, assuming the VT100/XTerm escape sequences. # nordugrid-arc-6.14.0/po/PaxHeaders.30264/ru.gmo0000644000000000000000000000013114152153477017115 xustar000000000000000029 mtime=1638455103.65062631 30 atime=1638455103.647626265 30 ctime=1638455103.896630006 nordugrid-arc-6.14.0/po/ru.gmo0000644000175000002070000224460014152153477017113 0ustar00mockbuildmock00000000000000Þ•B,‚µ<[[[,[C[\[t[‹[¢[¹[Ð[ç[þ[I\HJ\-“\1Á\<ó\0] I]W]u]“]¤]Â]à]ñ]ø]^)^A^Y^_^h^q^ˆ^¢^·^Í^è^__2_-R_ €_ Ž_ œ_©_ »_Å_Ë_Ý_ñ_ ``$'`L`&h`!`±`É`Ú` ô`aa-aCaTa'ca‹aa°a$Äaéa)øa"b!?babzb b1—bÉb"ßbcc"c 7cEcecyc•c ªcµcÒcîc d d"+dNdfd~d%“d¹d#Ód!÷de4eHece e Še –e ¡e­eÁeÐeçe úef,f!@fbfxf”f´fÄfªÙf„h¡h ²h ¿hÉhÝhüh7i*Qi|ii…i¡i©i>ºiùi j#j!8j Zj dj…j›j3¯j4ãjk+ké•;(–#d–ˆ–\¨–——"5—3X—Œ—@¤—Aå—'˜G˜f˜}˜"“˜"¶˜Ù˜ó˜ ™™8™"W™z™-‘™¿™*Ö™:š1<šnšš šc§š` ›`l›Í›ë›$œ,œDœ\œzœ”œ¯œÉœäœ"&D1_V‘(èž&(žOžiž>ƒžž+ÓžÿžCŸ4VŸI‹ŸAÕŸ6  N \ m ‹ £ ¼ Õ Sð .D¡ s¡¡š¡©¡ ½¡Ë¡Ú¡#ê¡¢#)¢#M¢0q¢/¢¢Ò¢î¢ £(£@£]£<u£;²£Xî£:G¤‚¤*“¤4¾¤ó¤¥$*¥)O¥:y¥7´¥Xì¥E¦9Z¦9”¦Φ4é¦1§5P§)†§S°§'¨),¨#V¨z¨–¨E´¨(ú¨#©;;©$w©Rœ©Tï©7Dª;|ª)¸ª/âª1«1D«'v«(ž«?Ç« ¬&(¬O¬-n¬/œ¬2̬!ÿ¬!­(=­f­*„­)¯­Ù­Fö­?=®,}®%ª®.Юÿ®2¯2M¯%€¯¦¯aį5&°Q\°&®°,Õ°%±%(±"N±#q±4•±5ʱ²²¹³!Ö³%ø³Q´p´Š´à©´е¨µ6ǵ?þµ;>¶Dz¶ ¿¶Eà¶$&·%K·#q·•·°·Ï·%ë·¸?*¸Hj¸>³¸5ò¸6(¹_¹v¹"޹±¹ȹ/å¹7º3Mº!º£º#ú7纻#7»2[»Ž»(¦»)Ï»/ù».)¼0X¼)‰¼8³¼:ì¼'½(:½Dc½>¨½ç½G¾GO¾—¾µ¾+ξ)ú¾+$¿-P¿6~¿7µ¿9í¿'À;ÀTÀ sÀ(”À@½À;þÀK:Á†Á/¤ÁÔÁ2óÁ5&Â1\Â(ŽÂ,·ÂäÂ#Ã&Ã,=Ã)jÃ'”üÃAÔÃ+Ä*BÄ0mÄ*žÄÉÄâÄøÄÅ-ÅEÅaÅrÅÅ2£ÅÖÅñÅÆ"Æ9Æ^QưÆ#ÏÆóÆ/Ç4AÇ.vÇ*¥Ç&ÐÇ%÷Ç+È&IÈ*pÈ@›È&ÜÈ%É!)É)KÉuÉN“É1âÉÊ/Ê&LÊsÊ'‘Ê:¹Ê,ôÊ,!ËLNË$›Ë$ÀË:åË Ì%=Ì$cÌˆÌ ¦ÌÇÌ/ÐÍÎP Î6qÎ&¨ÎÏÎ’éÎv|ÏzóÏqnÐ;àÐAÑh^Ñ#ÇÑ-ëÑÒ9ÒNÒjÒƒÒ/˜Ò<ÈÒ>ÓDÓUcÓ²¹Ó'lÔ‡”ÔKÕWhÕeÀÕ&Ö*;Ö fÖ2‡Ö$ºÖßÖ6þÖ'5×]×!{××,¹×æ×SØ!UØ!wØ$™Ø ¾ØUߨ 5ÙCÙ,YÙ †Ù”Ù ³ÙEÔÙ Ú;ÚUÚoÚŠÚ>¨Ú)çÚÛ*ÛAÛ`Û"zÛÛ"½Û=àÛ'ÜFÜfÜvÜ’Ü °Ü¾Ü/ÞÜ,Ý;Ý%XÝ$~Ý£Ý½Ý ØÝ äÝÞ"Þ"=Þ)`ÞŠÞ›Þ9¸ÞòÞß &ß2ßFDßS‹ßGßßi'à\‘àîàVáF[áO¢á òá ÿá. âOâ-jâ˜â#·â Ûâ!üâ ã!?ãaã{ãŠãšãªã&»ã&âã$ ä.ä Jäkä}ä‘ä¨ä6Ãäúä(å;å0Nåå—å¬åÂå<Úå-æ EæRæ bæoæ~æ”æ ¤æ±æÁæÑæáæñæçç ç0ç @ç MçZç lçyç Šç—ç ¨çµçÅçÕçäçõç èè"è2èBè Xè%dè%Šè'°è3ØèJ éWéjé6€é'·é9ßéê7êQê'eê"êA°ê:òê -ë/Në~ë-™ë1Çë-ùë)'ì"Qìtì,“ì"Àì$ãìí1í Qí$]í‚í˜í ¸íÙí!÷í9îSîcî&xî#Ÿî#Ãîçî]ï`ï:~ï2¹ï$ìï#ð@5ð/vð6¦ð%ÝðCñ.Gñ6vñ4­ñ?âñ "ò1Còuò!’ò.´ò+ãò-ó$=ó&bó1‰ó+»ó4çó'ô(Dô!môôªôÆô æô#õ +õ(7õ&`õ!‡õ©õÀõÝõôõ,ö8;ö+tö- ö.Îö-ýö-+÷.Y÷)ˆ÷²÷<Ï÷6 øCø^øxø8“ø.Ìø/ûø6+ùbùxùù«ù!Äùæù'ú),úCVúšú¶úKÐú û(û8ûNû0eû*–ûÁûÊûÛûùû ü(ü.Fü,uü¢ü@¿üIý3Jý+~ý$ªý1Ïýþ&þ5Eþ-{þ,©þÖþõþÿ 1ÿRÿpÿ!Žÿ°ÿÏÿìÿ2Ld~2.Â1ñ.#R*c&Ž%µ)Û@DFF‹'Òú"$GhcÌ âî 0"!Su‡§%Ç$í+8>8w"°!Óõ+ 08!iO‹AÛ7 U&b=‰'Ç%ï!-OmŠ?¥.å $ 6 S !p ;’ Î Wç H? Qˆ  Ú 'è ) ,: *g )’ F¼ * *. #Y } /› +Ë $÷  < W k } ‘ £ 9· "ñ %:ER˜!Ÿ!Á*ã2.A#p$”4¹Dî$3 X y"š!½ß\ÿ \h x`‚%ãC 3MJ:ÌMUcuVÙW0@ˆZÉ/$%T'z&¢É@ç*()SD} ÂicdÍe27˜&Ð÷# 0 ? LZ%s%™2¿ò‘!š_¼Ol-¯Ï*ï $;$`/…4µcêNgz‘§3Á%õ5QBl¯fÍ4-T‚%¡5Ç5ý!3 !U /w § & 7é ,!!"N!q!!8¯! è! ""-;"0i"<š"×"ö"#".#%Q#w#1“#.Å#ô#$!1$\S$!°$(Ò$(û$"$%0G%x%"‰% ¬%8Í%=&#D&$h&%&'³&3Û&2')B')l':–';Ñ'+ (*9(2d(3—(&Ë(ò(ú()!2)"T)-w)-¥)*Ó)þ)!*?>*~*"š*-½*Jë*-6+&d+.‹+9º+&ô+1,,M,6z,0±,,â,E-&U-&|-8£-=Ü-8."S.#v.š.°.Ê.è.%/"-/!P/#r/$–/!»/:Ý/0640Bk0®0(É0ò01.1/J1 z14›1%Ð1(ö1-2M2$j2(2¸2Ì2 é2) 3-43b32x3-«3+Ù3.4+44`4!~49 4(Ú45##5G5%c5/‰5'¹5$á5,6!36U6Fr6W¹687J7h7>„7"Ã7æ7!8.&82U8ˆ8+¥8!Ñ85ó8)9$I9,n9$›9HÀ9 :(:F:0f::—:>Ò::;(L;&u;"œ;'¿;-ç;<"3<V<%s<™<"®<0Ñ<0=<3=0p=7¡=&Ù==>>>Q> p>-‘>)¿>(é>$?"7?Z?#y?!? ¿?à?#ÿ?I#@Om@,½@4ê@5A7UAA £A)ÄA îA,B-ãƒd"„@‡„È„ׄ ð„þ„…(…C…$^…ƒ…2œ…Ï… á…6ï…%&†,L†!y†›†'º†0â†8‡L‡1i‡9›‡Õ‡í‡þ‡]ˆ.sˆ¢ˆ¶ˆ¾ˆЈ$åˆ ‰#'‰K‰g‰…‰£‰¹‰2ʉ ý‰ Š+ŠEŠ ZŠ{Š!‚Š!¤Š$ÆŠ!ëŠ ‹‹ .‹Ð;‹B Œ>OŒLŽŒ4ÛŒ0;A7}<µ!òIŽ5^Ž#”Ž0¸Ž'éŽ.F!d†Ÿ·Ç6á7bP³7Б%‘D‘"Y‘|‘ œ‘ ¨‘É‘é‘û‘&’<’T’#h’;Œ’È’Þ’ý’ “ “0-“ ^“)j“”“(£“̓ۓû“”$.”%S”<y”=¶”ô” •$•%@•f•~•—‘•W)–h–Yê–7D—2|—>¯—î—0 ˜:˜R˜$j˜˜(¬˜Õ˜ï˜% ™/™ ?™ M™nX™LÇ™(š0=šnš)Žš(¸šáš#›%›)B›/l›œ›!¼›Þ›ö›( œ*6œ%aœ:‡œœÓœ)ñœ;Xr5Š&Àç'÷Hž*hž!“žµžÔžèžKúž7FŸD~ŸßÞŸùŸ7  M (n 6— IΠ;¡1T¡8†¡¿¡Ç¡Eæ¡3,¢`¢z¢’¢!ª¢Ì¢ì¢' £1£89£Ar£7´£@ì£+-¤*Y¤'„¤&¬¤+Ó¤*ÿ¤%*¥2P¥1ƒ¥µ¥-Ì¥ú¥¦:¦+Z¦E†¦.̦Hû¦D§W§ r§~§!“§µ§Ó§ñ§¨-$¨R¨r¨"’¨µ¨"Ψ+ñ¨©4©E©<Z©2—© Ê©שAî©'0ª+Xª3„ªm¸ª2&« Y«)g«'‘«*¹«+ä«&¬)7¬*a¬Œ¬Ÿ¬³¬!Ϭ ñ¬>ý¬=<­z­#”­ ¸­Æ­Ö­å­ü­%®/?®=o® ­® ·®Ä®$⮯¯*¯@¯Z¯-w¯¥¯À¯Û¯ù¯°)°F°#]°°,™°)ưð° ±±2±D±&_±!†±#¨±#̱%ð±&²=²AR²@”²Õ²ô²³0³O³f³³#˜³¼³%ͳó³´´ &´3´C´U´Ij´B´´l÷´Ndµf³µi¶B„¶<Ƕ=·DB·S‡·hÛ·UD¸=š¸:ظP¹Pd¹Eµ¹8û¹E4ºRzºAͺL»x\»rÕ»kH¼r´¼v'½pž½n¾{~¾rú¾rm¿[à¿[<Àh˜ÀqÁxsÁxìÁeÂv‰˜¶ÂËÂáÂúÂÃ'Ã?ÃYÃpÃ…ÃB—ÃÚÃòÃ%Ä47Ä,lÄ0™Ä`ÊÄ+Å<Å+NÅ]zÅØÅøÅÆ7ÆSÆ!dÆ"†Æ%©Æ$ÏÆ5ôÆ%*Ç3PÇ?„Ç'ÄÇ(ìÇÈ-È LÈmÈȪÈÉÈ$ãÈ(É=1É€oÉ ðÉIÊ'[Ê!ƒÊ ¥ÊxÆÊ‘?Ë¡ÑËŽsÌ*Í-Í:KÍ&†Í­Í"ÌÍ ïÍÎ;%ÎaÎ4Î9¶ÎðÎ#Ï4Ï,HÏuÏ…Ï”Ï0²Ï1ãÏÐ9)Ð.cÐ%’иÐÉÐ ÞÐ5ÿÐ!5ÑWÑlÑÑ:ˆÑOÃÑGÒ[ÒpÒMÒÍÒ;éÒ%ÓE=Ó5ƒÓ1¹ÓëÓ+Ô!4ÔVÔ'sÔ›Ô)¬Ô&ÖÔýÔÕ."Õ&QÕxÕ<•Õ"ÒÕõÕ#Ö 4Ö9UÖ!Ö&±Ö)ØÖ:×=×$U×Dz׿×/Þר%%ØKØTØ9tØ®ØÊØ&ÝØ"Ù?'Ù$gÙLŒÙ%ÙÙÿÙÚ Ú-.Ú*\Ú‡Ú¤ÚÄÚàÚüÚ8Û<IÛ&†Û!­Û8ÏÛÜ"Ü+;ÜgÜ*{ܦÜ8ÄÜ&ýÜ#$ÝHÝ-eÝ&“Ý8ºÝ&óÝ'Þ"BÞ<eÞ;¢Þ–ÞÞuß5ŽßÄßÓß'åß& à?4àtàà©à/Áàñàá$á=á+[á/‡á$·á—Üá.tâ£â)¾âèâúâ-ãAã#\ã€ãã©ã»ã%Ìã#òãä4äNä(_äˆä¢ä;·ä#óä#å*;åfå&‚å2©åÜå6õå,æ';æAcæR¥æ&øæ ç @çaç€ç'–ç¾ç5Øç3èBèXèŒiè öèé é>éfOé0¶é0çé ê "ê 0ê<êKêbêyê ’ê³ê1Ãêõê"ë'1ëYë,jë—ë´ë#Ìë-ðë/ìONìPžì?ïì-/í]íqíŠí ¡í Âí,ãíî8'î=`î1žî<ÐîL ï Zï0eï–ï¨ï)Çï/ñï !ð)/ðYð(hð.‘ðÀðÝðôð ñ0)ñ Zñ dñpñ €ññ¬ñÆñâñûñ ò*òHòaò$sò#˜ò-¼òêòýò!ó??ó(ó(¨ó1Ñó+ô./ô ^ô,lô™ô"­ô+Ðô+üô%(õNõ`õDxõ½õ.Üõ ö%&öLö aö kö=xö,¶ö'ãö. ÷:÷N÷b÷z÷C”÷(Ø÷øø-øKøjøŠø"¡ø&Äø1ëø1ùOù)cùùù°ùÀù-Óùúú(+úTú,púú¼ú#Öú0úú'+û?Sû3“ûÇû+ßû! üh-ü–üw²ü*ý Iý=Wý1•ý6Çý9þý=8þ4vþ4«þàþûþ ÿ"ÿ1ÿEÿXÿjÿyÿÿ6¯ÿæÿ8>P o8y$²×%ö$A[w•§¹(Õþ,F b"ƒ¦¹Ðçü-@OY©"Ä,ç=>R‘ ° ÍÛ"ô$<Up ¬/º@ê+W@˜+¨ Ô<á0.Kz)šÄ7ä7(Mv‰š ¹Úó '  < ;H &„ « 4à %ø 1 P %b  ˆ © -à 4ñ *& Q 5p K¦ /ò 0" S "h ‹ © $¸ >Ý ; MX *¦ NÑ K ]l:Ê646k*¢Íà-ò%  FRgfBÎ.!@"b&…¬Ëâ1,3!`7‚º6Ø4,DBq&´+Û-<5%r!˜ºÏåú % 2(?fheÏF5|"’µ:È<,@%m“ªB½4GPczŠ›ªº ÌÙíþ$3Xp$*¦Ñà#÷,$>cz”"ª)Í(÷O QpQÂ--[uŽªnÉ8Ni —'¢Ê(ê,&#S'w.Ÿ.Î6ý:46o:¦5á$ -< )j  ” µ Ç ß ø !)!"C!f!}!—!#¦!Ê!Eä!F*"#q"%•"»"Ð"ð" #<#C\# #!¹#Û#Bû#>$$R$)w$¡$$·$Ü$*ï$%%@%U%g%#w%"›%¾% Ö%0ä%,&B&]Y&·&*Ö&!'P#'&t''›'Ã'%à'-(4((J(6s(@ª(%ë(2)D)W)t)A)%Ï)õ) *)"*!L*n*&‹* ²*#½*+á* +/+ N+Z+ k+v+ Ž+,›+ È+,Õ+, ,5,Q,o,‹,«,Å,…Ý,/c-+“-#¿-ã- ü-I.;g.4£.2Ø. /4,/2a/#”/ ¸/'Ù/0030S0c0s0Š0!£0.Å0ô01%1@1 U1a1u1"‡1&ª1&Ñ1ø1*2Q=2'2&·2Þ2î2432;39n34¨33Ý3#4'54#]4.4!°4/Ò41545-O55}5³5)Ï5ù5696 U6%v63œ6,Ð6Qý6'O79w79±7*ë768M8E a8§DÅDÞDòDuEý{EPyFQÊFiG4†G/»G.ëGH4H+LHxH0’HLÃHMIN^I­I¼I9ÔI%J4JGRJHšJZãJB>KKRœKMïK:=L:xL!³L5ÕL9 M8EM;~M%ºM'àM'N0N8JN^ƒN_âNSBOi–O?PJ@PA‹PXÍP1&Q_XQO¸QqR=zR9¸RpòRScSr·S>*TDiT®T"ÌT[ïT,KU4xU/­U#ÝU9V:;VLvV*ÃVAîV20W7cW6›W7ÒW1 XWaJzaPÅa7b0Nb2b*²b4Ýb2c,Ecrc1Œc(¾c\çc1DdCvdºdMÏd<eZeue5e9Æe f%!g Gg'hg'g!¸gZÚg(5h+^h(Šh9³hQíh'?i!gi%‰i(¯iØi íi%j4j"Lj(oj$˜j'½j&åj& k-3k8akšk$´k#Ùk'ýk/%l0UlA†lIÈlm!m8m UmvmŽmžm²mÒmämôm,n5n6Lnƒn–n ©n4¶nën o.oNojo#ˆoF¬o@óo'4p*\pE‡p)Íp÷pq1q>:qPyqTÊqGr4grœr7 rDØrs2s-Gsus•ss[¹sGt5]t“tE¥tët4 u0?u8pu1©uÛu+ûu'v8Ev7~v"¶v-Ùv1w$9w&^w4…w+ºwæwUx%[xx2 xQÓx2%y-Xy#†yªyÇy&ây z*zTCz1˜zÊzEZ{K {Dì{11|)c|<|IÊ|}Q%}Lw}?Ä}-~b2~•~®~-Â~Að~O2E‚;È€" €$C€7h€? €Gà€E(6nG¥6í:$‚$_‚!„‚¦‚=Å‚7ƒ;ƒ Wƒ$dƒ ‰ƒªƒ'¾ƒæƒûƒ=„#W„.{„'ª„6Ò„: …%D…'j…D’… ×…"ø…)†NE† ”†%µ†)Û†-‡(3‡\‡ x‡†‡"Ÿ‡‡ׇ쇈ˆ,"ˆOˆoˆ!‹ˆ­ˆ'¾ˆæˆüˆ‰%.‰T‰)t‰Sž‰%ò‰%Š>Š3]Š+‘нŠ(ØŠ~‹"€‹H£‹ì‹(Œ/.Œ$^Œ#ƒŒ §Œ!ÈŒ,êŒ<T[0z«Ëæ-õ@#Ž"dŽw‡Ž+ÿŽ+2@"s)–#À$ä  (/I*y+¤Ð!á$‘((‘Q‘"l‘!‘ ±‘¾‘Õ‘ô‘’ #’1’L’b’s’’¨’Á’$Þ’ ““#“<“R“o“~“““®“ÓÞ“ ó“”+”F”d”‚”<š”×” õ”•=,•&j•‘•§•?¼•ü•&–.=–,l–9™–-Ó–—4—M—j——:˜—%Ó—$ù—˜[:˜,–˜)Ø8í˜4&™[™q™4Œ™(Á™#Ꙛ.%šJTšWŸš!÷š&›)@›Ij›B´›$÷›+œ:Hœ8ƒœZ¼œâ'ú+"ž;NžDŠž&ÏžQöždHŸ­Ÿ9ÍŸ' @/ (p @™ Ú  õ ¡¡7¡G¡-^¡4Œ¡DÁ¡0¢/7¢3g¢H›¢'ä¢' £24£g£-~£A¬£Cî£2¤ C¤Q¤#d¤ˆ¤$¤¤.ɤø¤ ¥ ¥>(¥,g¥5”¥RÊ¥&¦/D¦Dt¦:¹¦9ô¦;.§Bj§(­§)Ö§!¨""¨3E¨1y¨@«¨Jì¨;7©>s©²©Ë©Bè©G+ªPsªRĪR«$j««(ª«Ó«)ò«¬/¬H¬f¬¬ —¬¥¬$º¬߬%ý¬!#­E­`­+­«­+½­é­9ÿ­9®'V®,~®«®"Å®è®"¯&¯ B¯(c¯ Œ¯­¯ͯè¯2°$7°\°o°Œ°«°'Űí°! ±)/±!Y±#{±Ÿ±"±±1Ô±²( ²*I²$t²™² ¹²Ú²!ì²³Ô³:ô³/´*A´l´Š´'¥´*Í´+ø´G$µ3lµ5 µPÖµ/'¶0W¶;ˆ¶8Ķ?ý¶>=·4|·7±·3é·8¸3V¸/Џ-º¸5è¸B¹;a¹4¹ŠÒ¹(]º1†º*¸º/ãº/»,C»7p»¨» ·»Á»×»ï»"þ» !¼B¼X¼n¼†¼—¼·¼ɼÒ¼Û¼ø¼*ÿ¼*½@½V½ e½o½r½G‹½Ó½ò½1 ¾$=¾'b¾5о*À¾(ë¾)¿>¿0]¿*Ž¿#¹¿Ý¿ì¿ý¿À#À/>À nÃ/xèÃ9ÅÃ:ÿà :Ä"[Ä~ĖĬÄ4ËÄ;Å3<ÅpÅŒÅ#£Å ÇÅèÅ øÅ7Æ+;Æ*gÆ&’Æ ¹ÆÅÆ߯ ãÆíÆõÆ$Ç9Ç<Ç<XÇ•Ç)¯Ç$ÙÇKþÇVJÈ$¡ÈªÆÈqÊHzÊ ÃÊÍÊÕÊèÊüÊKË*aË'ŒË!´ËÖËÛË'ûË#Ì=Ì4UÌŠÌÌ ©Ì³ÌÆÌäÌíÌöÌ1ÿÌ$1Í#VÍ z͆ÍÍ«ÍÇÍ ÚÍûÍ1Î"LÎ"oÎ#’Î0¶ÎçÎÏG!Ï:iÏ=¤Ï!âÏ4Ð9ÐFRÐ+™Ð*ÅÐ4ðÐ/%Ñ)UÑ;Ñ-»Ñ éÑ Ò*Ò"IÒ1lÒ@žÒßÒ5ùÒ-/Ó*]Ó=ˆÓ.ÆÓ!õÓ/ÔGÔ]ÔAvÔ.¸ÔçÔýÔÕ*Õ?ÕVÕlÕÕ˜Õ®Õ<ÄÕ?Ö;AÖ–}ÖY×Rn×ÁØ!ר ùØÙ†Ù1ŽÚ2ÀÚóÚÛCÛ1ZÛ5ŒÛ=ÂÛ8ÜB9ÜD|Ü!ÁÜ-ãÜÝ1ÝCÝGÓÝÞ Þ)Þ @ÞNÞ%`Þ†Þ¤ÞÂÞ!ÉÞ6ëÞ8"ß+[߇ßßß0¸ßéß6à)<à-fà:”à-Ïà*ýà3(á&\á0ƒá0´á1åá)â5Aâ(wâ â¼âÎâàâòâã &ã4ã!Lãnã‚ã+”ã ÀãÍãããòãä$ä=äMäOäTäcä0}ä/®ä3Þä2å>Eå-„åD²å÷å.þå-æÝ=æ5ç*Qç®|ç#+èOèbè)vè è ¦è´èHÇèé+é$=ébé0gé*˜éLÃéfêwê\ŽêXëêkDëý°ë®ì1Æìøì í%*í Pí^í oí |í6Ší'Áí)éí&î2:î,mîšî,µîâî5üî+2ï^ï nï {ï ‰ï —ï2¤ï×ïíïð#ð2;ðnðð¡ð¸ðÌð2áðñ'ñ<ñ NñZñ1rñA¤ñæñ+ûñ%'òBMòò™ò<¢òUßò 5óVBó-™óJÇó,ôI?ô÷‰ô€õ ööE-ösööœöE¤ödêöiO÷Œ¹÷Føcø1øM±ø8ÿø#8ù\ù&tù"›ù!¾ù4àùú$0úUú:^ú<™ú7Öúûû(û==û {û"‰û#¬ûÐû(âû/ ü.;ü(jü0“ü,Äü*ñü.ý&Kýrý7‹ýÃýÚý'øý) þ'Jþ0rþ£þ ªþ´þ,Îþ%ûþ!ÿ0?ÿpÿ6wÿ6®ÿ#åÿ( ;2Õn;DI€ÊBê)-W\{‘ ¯ ½ÈÏ× Û#é; ¼IvV}_Ô)4 ^AKÁ ƒ– ² É ß )û "% #H +l ˜ ° Ç ß á 0 21 bd Ç Ë 4ã +)D.n*1È,ú-'3U‰}‹} d‡hìrU'È ðþ:Vt’°#·'Û %$JPY#b&†&­7ÔC GP+˜'Ä3ìK l ¯ ÐÚ+à$ 1 I;Wy“: \HG¥5í9# ]"~1¡/Ó*@C(„­Ä6ßQ51‡L¹-(4 ]Vi+ÀAì-.\+q&AÄ$(+Tp†£/¿=ï-MA9?É( M21€6²4é- 2L 1 1± ã (û $$!&I!&p!—! «!EÌ!X"&k",’"?¿")ÿ"+)#>U#”#¤#¹#"Ë%Lî%;&&T&&{&7¢&-Ú&K'ZT'¯'²',¸'å'í'p(x(4Ž(Ã(Uâ(8)OO)+Ÿ))Ë)Yõ)ZO*,ª* ×*Aø*%:+J`+S«+ÿ+,G,1b,”,˜,Ÿ,<¦, ã,.í,D-Ia-d«-e.{v.fò.RY/G¬/oô/Rd0:·0@ò0g31U›1Mñ1U?2L•2hâ2bK3ª®3`Y40º42ë4Z55y5r¯5A"6:d6¼Ÿ6;\7P˜7Ré7L<8Q‰85Û8P9Db9]§9€:D†:WË:r#;Q–;”è;S}<–Ñ<nh=p×=<H>‰…>t?r„?Q÷?SI@d@ZAW]APµATBh[BdÄBF)CTpCfÅCV,D>ƒD5ÂD<øDl5Eo¢EÛF_îF„NG,ÓG#Hu$H`šHEûHYAIA›I@ÝIqJXJ|éJ²fKLLCfLãªLeŽM2ôM'Ng©NGOPYOJªO_õO]UPZ³PSQibQUÌQX"RK{R1ÇRHùRgBSPªSJûSRFT~™T<U(UUP~U[ÏU +V(LVIuV<¿V\üV!YW!{WW_¼W\X?yXI¹X!Y%Y‡DY{ÌYLHZV•ZRìZ>?[9~[(¸[Zá[]<\Lš\-ç\0]EF]LŒ]*Ù],^"1^OT^R¤^T÷^VL_X£_ü_8`5N`8„`½`tÓ`ZHaV£a[úasVbSÊb+cnJcY¹cd*d’AdžÔdŒse'f#(f LfYfTjf¿fÄfiÉf_3gt“gnhawh@Ùhii%iê*i<kYRk:¬kDçk,l+Cl3olF£ljêlXUm&®mÕmïm5 n.Cn5rn.¨np×n>Hož‡o€&pV§ptþp[sq8Ïqr;(r6dr0›rCÌr=sPNsLŸs9ìsY&t5€tS¶tc uKnuMºu}v*†v6±v5èvWwMvwLÄw6x:Hx9ƒx@½xþx> yCLy"y7³y?ëyC+zoz-q{?Ÿ{3ß{w|L‹|ZØ|3}Gµ}Jý}~H~uÇ~_=2>Ð?€kO€g»€h#‹Œ>‚8W‚†‚)ƒ)Aƒ9kƒP¥ƒ*öƒg!„h‰„aò„ST…7¨…!à…4†,7†#d†+ˆ†!´†-Ö†3‡`8‡B™‡}܇,ZˆZ‡ˆZâˆT=‰4’‰ljå‰ºŠ·¾Š·v‹T.ŒNƒŒMÒŒ1 1R9„5¾6ô5+Ž6aŽ7˜Ž/ÐŽ96:nqàJq9¼Xö!O‘!q‘t“‘%’E.’t’‹’w“h”“ý“|‹”%•,.•R[•&®•GÕ•/–,M–Ãz–\>—@›—5Ü—˜8.˜%g˜3˜%Á˜9ç˜<!™U^™B´™U÷™QMš;Ÿš>Ûš?›&Z›>›:À›“û›ŸœÍ/•ý“žE©žbïž,RŸ8ŸQ¸ŸM  „X ™Ý Çw¡(?¢Wh¢WÀ¢&£N?£KŽ£OÚ£o*¤kš¤L¥NS¥>¢¥Aá¥@#¦†d¦[ë¦?G§u‡§Sý§ŸQ¨Žñ¨v€©}÷©]uªsÓªsG«s»«K/¬`{¬…ܬ5b­L˜­=å­™#®•½®RS¯;¦¯/â¯K°1^°M°LÞ°5+±‚a±ä±Of²H¶²Xÿ²5X³X޳jç³;R´3Ž´´~õB¶DÒ¶J·Nb·C±·@õ·A6¸lx¸må¸%S¹—y¹/¼5A¼Lw¼¦Ä¼Ok½L»½&¾4/¿Ld¿d±¿‰Àh Ào ÁFyÁ—ÀÁcXÂ\¼ÂMÃEgÃI­ÃE÷ÃS=ÄH‘ÄjÚÄsEÅi¹Å`#Æa„Æ6æÆ8ÇEVÇ8œÇBÕÇsÈlŒÈùÈJ{É;ÆÉTÊjWÊ<ÂÊOÿÊjOË:ºËTõËXJÌY£ÌTýÌURÍ_¨Í¶Î²¿Î+rÏržÏ“Ðs¥ÐNÑ«hѪÒ9¿Ò3ùÒU-ÓWƒÓYÛÓ\5Ôe’Ô‡øÔ‡€Õ0Ö;9ÖHuÖJ¾Öt ׃~×zØ¥}Ø<#Ùa`ÙDÂÙiÚ„qÚhöÚN_ÛR®Û>ÜE@Ü8†ÜY¿ÜTÝZnÝEÉÝzÞVŠÞLáÞR.ßbß>äß.#à.RàTàEÖàZá4wá0¬áÝáSýá,Qâ$~â%£â?Éâ$ ã©.ã@ØãIäTcä`¸ä[åUuåPËåXæKuæ]ÁæLçlç€îçDoèC´è>øèL7éD„é°Ééczê8Þê9ë<Që:ŽëUÉëxìW˜ìTðì‚EíDÈíH îlVî>Ãî=ï<@ï?}ï8½ïëöï^âñ^Aò¼ òn]ó^Ìó8+ôdôèiõþRöåQ÷g7øfŸøïù>öù5ú:·ú0òúX#û?|û;¼ûjøû cüœýJ¡ý¾ìýé«þR•ÿýèÿ“æ·z¶2Aéa+RiàrJK½u gKçl38 sÙDMÉ’T\N±T NU –¤ ; 2S h† !ï E PW ~¨ -' *U *€ +« 8× ­ B¾ /#1<U1’8Ä0ýB.dqTÖK+w58Æÿ5]F]¤HDK82É3ü03D(x<¡>Þ5S.rr¡$=9$w8œÕ®eÚ¤Ž9ÊH•˜©,B`o€ÐJQtœY_kYËM%LsMÀ5!Df"~#¡?ÅK MQ ZŸ -ú #(!'L!<t!6±!kè!0T"T…"!Ú"\ü"9Y#.“#2Â#&õ#U$Xr$Ë$ß$ö$ % %=%T%h%%–%­%Ä%Ú%ñ%&&6&J&^&w&‹&£&·&Ï&ã&ú&'''?'V'j''˜')¯'Ù'Cì'=0(?n(g®()*¦)-Ñ)cÿ)8c*aœ*8þ*,7+d+>„+?Ã+n,lr,+ß,: -:F-R-PÔ-{%.h¡.N /:Y/—”/[,0Cˆ0Ì0oë0[1Mt14Â1:÷1;228n2<§2tä2#Y3!}3XŸ3Eø3S>4A’4…Ô4/Z5ŽŠ5z6C”6BØ6g7Zƒ7bÞ7DA8°†8L79c„9nè9ƒW:MÛ:›);BÅ;@<SI<V<Uô<EJ=G=aØ=Q:>hŒ>\õ>KR?cž?N@YQ@E«@Bñ@}4A²AQÃA=B>SB$’B*·B$âB&C‚.C^±C^DgoDb×Df:Eb¡EdFWiF1ÁF^óF`RG(³G4ÜG<H€NH\ÏH`,IiI*÷I*"J<MJTŠJVßJG6KZ~KJÙKi$L=ŽL2ÌL˜ÿL+˜M/ÄM(ôM#NhANcªNO"O;8O"tO3—O4ËOkPGlP0´PeåPtKQdÀQM%R4sRZ¨R!S2%SZXST³SKT/TTN„TBÓTFU@]UZžUTùUNNV8V1ÖV9W.BW2qW.¤W?ÓWXb$Xb‡XeêX]PY®YiÂYP,Zg}Z<åZd"[x‡[j\Hk\´\OÔ\F$].k]Óš]Fn^µ^#Ñ^õ^g_pw_!è_H `BS`>–`CÕ`_a]yaa×a;9bHub+¾bOêbT:cFcŽÖcqedG×de4=e…reKøeIDf/Žf=¾f=üf6:g%qg{—g]hqh+Šh$¶h?Ûh-i\Ii@¦iªçi‡’j·kÒkhåkiNlr¸lR+mR~m•ÑmJgna²n;o8PoR‰o†ÜoScp?·p6÷p+.q)Zq'„q¬qÊqˆêq?srK³r*ÿrc*sŽs7•s*Ís6øsX/tMˆt(Öt&ÿt`&u|‡u-v)2v)\v+†v*²v(Ývˆw ww¯w”ÀwGUx|xYy˜ty[ z{izPåz®6{¥å{ ‹|a,}ÑŽ}Q`~+²~6Þ~4(J`s*ÔHÿsH€ö¼€¥³ Y‚¡ú‚aœƒ;þƒ.:„;i„¥„Ąބ=÷„T5…XŠ…ˆã….l†Ä›†K`‡–¬‡šCˆ-Þˆt ‰5‰D·‰Qü‰GNŠD–Š_ÛŠ‘;‹‡Í‹ºUŒ,=RkƒXŸHøcAŽ#¥Ž“ÉŽF]ͤOrmÂL0‘M}‘PË‘„’D¡’Iæ’‚0“4³“Hè“m1”LŸ”Dì”@1•4r•k§•J–^–=z–Z¸–Y—mm—8Û—&˜/;˜3k˜AŸ˜)á˜\ ™Uh™C¾™@šACš …šM&›St›SÈ›7œyTœΜIìœ?6{vxò?kž@«žAìž5.ŸSdŸp¸ŸZ) U„ cÚ e>¡9¤¡?Þ¡^¢`}¢AÞ¢ £37£Ik£Iµ£$ÿ£Z$¤U¤GÕ¤>¥<\¥z™¥¦*0¦1[¦™¦I'§Bq§O´§|¨7¨h¹¨>"©Ba©B¤©>穇&ª=®ª;ìªM(«Xv«MÏ«D¬Sb¬4¶¬8ë¬4$­7Y­>‘­;Э' ®94®:n®F©®ˆð®&y¯] ¯zþ¯y°*–°Á°Jß°C*±Vn±Hűz²A‰²G˲]³1q³J£³Nî³&=´3d´L˜´Få´?,µDlµr±µ[$¶b€¶\ã¶b@·B£·5æ·Š¸F§¸;î¸I*¹=t¹K²¹\þ¹=[ºN™ºcèºAL»9Ž»‡È»P¼dá¼4F½E{½ƒÁ½UE¾4›¾Vоc'¿w‹¿3Àt7À]¬À| Á:‡ÁjÂÁo-ÂQ¦ïÂG–Ã<ÞÃ7ÄzSÄÎăNÅÒÅNRÆX¡ÆOúÆ>JÇL‰Ç6ÖÇ4 È0BÈFsȺÈ7ØÈUÉWfÉx¾Ée7ÊxÊIËy`Ë)ÚË0ÌI5ÌKÌBËÌlÍF{ÍMÂÍJÎO[ÎM«Î9ùÎ;3Ï@oÏr°Ïw#Ð@›ÐwÜÐgTÑi¼Ñ&&Ò7MÒ[…Ò>áÒT Ó>uÓ:´ÓKïÓF;ÔS‚Ô9ÖÔFÕJWÕ1¢Õ4ÔÕU Ö1_ÖK‘Ö2ÝÖ1×QB×I”×Þ×RlØl¿Ø-,Ù3ZÙAŽÙ[ÐÙU,Úa‚Ú6äÚDÛW`ÛX¸Û-ÜG?ÜC‡Ü^ËÜ^*Ý¡‰Ý#+Þ[OÞ=«Þ}éÞ…gßBíß>0àBoàB²àZõà+PáP|áeÍáN3âU‚âCØâ?ã)\ã$†ã?«ã8ëãY$äA~ä:ÀäXûä@Tål•åYæe\æsÂæB6ç6yç9°ç·êçb¢è3é{9é–µé.Lêf{ê…âêfhëCÏëUìdiìÂÎìD‘íFÖí1îNOîJžîSéî>=ï>|ïU»ïVðOhð^¸ðPñ>hñW§ñ(ÿñ*(òDSòH˜òLáòO.ók~ó>êóX)ô3‚ô@¶ôi÷ôXaõ¦ºõ²aöA÷6V÷@÷<Î÷H øATøO–øWæøO>ùQŽùpàù@QúH’ú{Ûú[WûN³ûbü^eürÄüX7ý7ý<Èý5þ,;þLhþZµþŠÿ>›ÿQÚÿT,\FÞG%Em?³Qó=Elƒ>ði/K™8å.ZMK¨OôCDDˆAÍ06@6wS®Q/T/„;´fðjW>Âf Xh YÁ = >Y W˜ ^ð cO M³ O [Q ?­ Cí ;1 lm <Ú ;?Sj“Gþ˜FAß,!2NQJÓŠX©#,&ZS5®BäU'G}*ÅDðH5a~gà+HEtDºNÿ=NjŒS÷CKKdÛk@A¬Iî=81vD¨Eíd3j˜2-6ŠdWïUG2=Ð),8Ae=§\å›BWÞ16/hL˜[å_A F¡ Hè V1!¨ˆ!81"-j"|˜"Z#^p#HÏ#9$<R$$T %Au%X·%0&JA&?Œ&5Ì&L'KO'R›'Uî'lD(s±(L%)tr)aç)II*z“*I+\X+Sµ+Œ ,@–,,×,>-ZC->ž-FÝ-3$.,X./….2µ.2è.(/9D/J~/JÉ/c0 x0"™01¼0î0I1ÂÉ1ÆŒ2”S3”è3˜}4X5ho57Ø5?6>P6I6vÙ6NP7IŸ7Ié7D38cx8UÜ8N29X9+Ú9=:AD:U†:QÜ:S.;|‚;<ÿ;H<<N…<BÔ<I=1a=-“=IÁ=G >,S>(€>B©>;ì>7(?Ž`?ï?1@12@2d@8—@xÐ@ƒIA­ÍA[{B'×B‚ÿB(‚CO«CVûC]RDg°D4ENME@œE$ÝE!Fj$FÍF]Gl{GNèG)7HDaHB¦HqéHs[I}ÏI~MJ]ÌJO*K2zKE­K%óKKLweLxÝL´VM  Ni¬NCOCZOTžO!óO£P-¹P\çP~DQYÃQLRYjRWÄR5S@RS“S›!T’½TPU@pU±U,ÇU.ôU.#V-RVE€V-ÆV\ôV!QW+sW†ŸW^&XH…X?ÎXAY;PY€ŒY{ Z.‰Zz¸Z„3[(¸[á[$ù[¾\™Ý\/w]§]¯]È]?â]+"^?N^-Ž^M¼^/ _?:_'z_‡¢_"*`TM`H¢`!ë`; aIa:Pa/‹a;»aF÷a>b\b|b¥šbP@dƒ‘d ep¶ef'f}Žf{ gsˆgMügJhxèhSaijµiV j@wj8¸j>ñjB0k/sk£kÃk;Ük‰l‰¢lŠ,m?·mP÷m1HnRzn1Ín7ÿn:7o#ro*–oHÁo$ pR/p4‚p/·p"çp5 ql@q*­q1Øq" r-r CriQr1»rRír%@sVfs½sHÕs/t2NtItKËtuuwu+v1vFJvF‘v3Øv. wÞ;w©x½Äx®‚yr1zz¤zÝ{ ý{P|6o|;¦|sâ|8V}‚}<~4O~B„~Ç~&à~Ç’ç3z€D®€$ó€>HW$ <Å)‚5,‚Db‚8§‚8à‚#ƒ=ƒZYƒV´ƒQ „›]„)ù„=#…Da…@¦…Nç…,6†*c†\ކCë†/‡RO‡o¢‡Zˆ>mˆ6¬ˆ!㈉‚"‰l¥‰‚Š5•Š2ËŠBþŠqA‹?³‹Vó‹sJŒ¾Œ™Nƒè€lŽíŽE‘K{Ý/Y4‰'¾zæIa‘3«‘Vß‘6’cG’€«’e,“y’“P ”S]”Z±”F •bS•W¶•@–iO–^¹–'—c@—=¤—;â—A˜H`˜k©˜H™k^™?Ê™3 š*>š!iši‹š3õš3)›)]›"‡›aª›: œ:Gœ=‚œSÀœ:GO(—ÀÓsêQ^ž°ž;ÁžzýžQxŸ:ÊŸR ÌX =%¡c¡6|¡3³¡Fç¡<.¢Bk¢U®¢K£4P£g…£Fí£S4¤,ˆ¤`µ¤v¥O¥.Ý¥# ¦-0¦+^¦&Ц0±¦Zâ¦m=§w«§"#¨F¨)]¨0‡¨¸¨Ö¨(ó¨%©0B©Gs©%»©-á©<ª3LªF€ªHǪ7«=H«/†«S¶«G ¬:R¬¬$ ¬Ŭ3׬Q ­*]­<ˆ­<Å­A®=D®,‚®u¯®r%¯*˜¯LïY°Gj°I²°Aü°U>±K”±2à±T²$h²)²3·²ë² ³"%³%H³§n³—´º®´¯iµµ¶¸Ï¶„ˆ· ¸‡¸‡¹”¹Ð2º¹»€½»}>¼r¼¼‘/½ˆÁ½`J¾Œ«¾”8¿…Í¿oSÀÆÃÀ…ŠÁ¹ÂÀÊÂċÿPÄÅÉ’Å…\ÆÝâÆ{ÀÇå<È·"É¿ÚÉÆšÊÇaËM)Ì<wÌ´Ì;ÏÌ- Í59Í4oÍ?¤Í,äÍ>ÎFPÎ4—Î-ÌÎDúΈ?Ï,ÈÏ9õÏJ/Ð}zÐ\øÐWUÑ´­ÑMbÒD°ÒuõÒpkÓSÜÓO0ÔN€Ô3ÏÔ'ÕC+ÕZoÕ@ÊÕ? ÖqKÖE½Ö`×qd×<Ö×WØDkØ;°Ø;ìØ?(Ù7hÙ9 Ù,ÚÙEÚEMÚ^“ÚôòÚ=çÛx%ÜDžÜ>ãÜQ"ÝõtÝjމߞàL¯á7üáw4âK¬â?øâP8ã=‰ã.ÇãwöãNnäy½äk7åC£åVçå2>æUqæ!Çæ$éæ<çvKçRÂçè_*葊è^é0{é4¬éEáék'ê)“ê4½êòê ëp땆ë¶ì!Óìõì~íK”íˆàíTiî’¾îYQïi«ï&ðx<ð5µð;ëðK'ñsñ>Œñ;Ëñ/ò97òLqò0¾ò)ïò—ó7±óRéóV<ôP“ôpäôMUõF£õVêõzAö5¼öTòö–G÷NÞ÷g-ø&•øG¼øùOùhhù_Ñù)1ú>[ú/šúaÊúK,û{xûIôû>üQPü ¢üXÃüZýMwýIÅýcþRsþRÆþzÿ~”ÿWUk_Á6!8Xe‘#÷QMm»»TwGÌATVT«“4”MÉDZ\†·í>?,`lAÍ/ X? N˜ ªç (’ .» 1ê j ‡ -£ $Ñ Mö \D k¡ W +e Q‘-ãL ^D|Ä-A+o)›6Å"ü0;P9ŒKÆR&exŒE3KŽ^emUÓ*)VTa«% ˆ3¼9Òp …}1O5O…WÕ--[[.·oæoV'Æ1î% F<f2£Ö³èlœv €”°Êà!÷/7gEw#½BáT$ y O– 0æ 2!4J!>!k¾!*"•¸"®N#ý#}$˜$4±$0æ$2%\J%'§%JÏ%V&Bq&b´&r'Š'`™'ú'U(Ql(i¾(()rF)C¹)aý)g_*UÇ*%+:C+#~+u¢+,+-,/Y, ‰,8–,3Ï,5-49-!n--¯-4Ï-!.l&.N“.\â.%?/Ke/<±/¯î/Vž0Nõ0hD1O­1xý1v2`’2&ó253>P3L3cÜ3@40^4}4@ 5ON50ž5GÏ526J6Y6kp6dÜ6`A7g¢7; 8;F8M‚8MÐ8‡9Q¦9Iø9IB:IŒ:\Ö:W3;;‹;pÇ;8<­¸<±f=6>kO>»>,×>!?#&?JJ?!•?0·?oè?JX@^£@AAJDAVA[æAfBBt©BTC$sCj˜C>DµBDMøDãFE1*F=\F¡šFpWMxW-ÆW-ôW1"X5TXŠXP XŠñX.|Yƒ«Y*/Z~ZZÙZgöZ$^[6ƒ[\º[6\^N\N­\aü\Y^]¸]KØ]$^@^a^^OÀ^Q_Mb_]°_`-`nC`M²`-ak.a@šanÛa+JbCvbPºbF c]Rcg°cSd2ldŸdÍeuíe\cf&ÀfCçf:+g7fgBžgqághSh{¼hH8iyipûi‹ljPøjIk3ckJ—kOâk92lll9‰lIÃlP m^m2omì¢mbnaònQTo=¦o=äo8"p-[p6‰pLÀpJ qEXqržq*rPiƒ2¨ƒCÛƒO„ºo„%*…7P…'ˆ…(°…Ù…/è…4†5M†ƒ†Z–†Gñ†K9‡l…‡nò‡taˆoÖˆvF‰q½‰s/ŠI£ŠUíŠrC‹m¶‹ $ŒHEŒ*ŽŒ¹Œ-ÙŒ^##‚'¦!Î;ð8,ŽeŽ{óŽ5o7¥!Ý1ÿX1/Škºz&‘4¡‘IÖ‘U ’Èv’"?“Lb“N¯“)þ“/(”<X”Z•”Tð”7E•2}•°•EÌ•E–5X– Ž–eœ–P—)S—™}—C˜?[˜9›˜žÕ˜It™J¾™: šLDš^‘š7ðšr(›››)œ_ºœq-Œ;º;öŠ2žR½ž/ŸB@ŸWƒŸ8ÛŸ2 }G Å ?Ú S¡Jn¡{¹¡5¢5P¢†¢œ¢¸¢WÒ¢*£WD£Iœ£'æ£B¤EQ¤H—¤Aà¤."¥<Q¥玥]v¦RÔ¦7'§2_§j’§ý§U‹¨Aá¨A#©,e©A’©AÔ©Dª,[ª<ˆª2ŪOøªIH«+’«$¾«,ã«F¬9W¬k‘¬/ý¬-­4I­&~­¥­6¸­$ï­<®@Q®G’®#Ú®Xþ®©W¯^°S`°´°8ϰS±d\±tÁ±S6²RвJݲF(³To³HijR ´H`´X©´Kµ]Nµ[¬µ2¶N;¶Dж7϶1·09·8j·G£·Wë·C¸:ѸH ¹HU¹Kž¹–ê¹,ºj®ºLÐ:fÐ'¡Ð1ÉгûÐæ¯Ñ¦–Ó±=ÔáïÔRÑÕP$ÖLuÖ.ÂÖ'ñÖN×3h×fœ×™Ø‘Ø™/ÙÉÙ.âÙÚk“Ú-ÿÚ€-Ûk®ÛÜ^¨Ü݇%Ý…­Ýa3Þo•Þ0ßW6ß`Žß]ïßMMà)›àiÅà:/ájá]~áµÜá¶’âÂIãÿ ä¥ å…²åx8æ™±æsKç ¿ç…Ëè°Qéxê^{ê£ÚêŠ~ëÔ ìÞìYoí^Éíq(î…šîJ ïNkï\ºïXðcpðiÔð©>ñkèñmTòfÂòu)órŸónôqô¼óô<°õ=íõž+öÊöMZ÷M¨÷žö÷a•øa÷øYYù‡³ùÏ;ú< ûSHûœû<-ü›jümý`tý^Õý4þ¿Äþm„ÿkòÿw^wÖ¦N9õ_/pÕ'Ödþ©c£ r±M$gr:ÚOOe@µö@ 8P ˆ‰ f …y ÿ  z¤  8 <Q mŽ àü HÝ?&KfO²'œ*[ÇM#UqvÇ®>hí:VB‘EÔGŠbí<mwªz"LLêH7T€dÕ{:?¶[öFRN™_è€H°ÉwzòO C\E Oæ,6(c=Œ0Ê$û J>‰U¨þG;f]¢A ,B ]o 0Í -þ 7,!wd!sÜ!dP"Yµ"™#O©#)ù#'#$ K$uV$ƒÌ$•P%ræ%ZY&´&•¸&N'9Î'/(M8(6†(½((Å(±î(Ÿ )„@*-Å*|ó*5p+z¦+m!,‹,^-=z-W¸-4.nE.n´.H#/Zl/XÇ/D 0ce0wÉ0SA1;•1™Ñ1Vk2CÂ2b3£i3h 4fv4TÝ4B25>u5M´5F69I6 ƒ6$7(´7±Ý8³9£C:lç:\T;|±;–.<)Å<žï<™Ž=}(>a¦>è?.ñ?1 @`R@i³@„AŸ¢AmBBW°BLCCUC‘™Cz+D£¦DˆJE‹ÓE‡_FwçFi_GLÉG:H7QH€‰H_ I%jII9ªIRäI!7JTYJ5®JBäJw'KFŸKEæKB,LSoLWÃLLMNhM…·MN=NOŒNJÜN{'OD£ODèO†-P_´POQ1dQ#–QDºQlÿQElR5²R5èR'S*FSPqS?ÂSCTaFT%¨TFÎT6U:LU3‡UJ»U=VVDVÁ›V_]WY½W;XgSX^»X;YPVYƧYEnZ¢´ZW[2u[Z¨[6\=:\%x\#ž\MÂ\`]q]F]UÔ]D*^5o^)¥^VÏ^…&_h¬_ö`c a:pak«aWbcobWÓbS+cEcLÅc„d{—dpe)„e<®e7ëe@#f;df\ fAýf?g=_g^g"ügah0h<²h6ïhA&i1hiAši/ÜiO jY\j$¶j?ÛjLk;hk<¤k(ák> lKIl7•l7ÍlNmaTm%¶m2Üm,n4vgËv¬3wyàw€Zx%Ûx+yk-yg™ygz*izI”z£ÞzÉ‚{4L|'|F©|Yð|eJ}%°}qÖ}£H~¥ì~Ê’–]€:ô›/‚®Ë‚³zƒW.„†„ý…O†¾V†š‡µ°‡YfˆˆÀˆQI‰9›‰wÕ‰qMŠ¿ŠÛŠZøŠaS‹jµ‹N ŒKoŒ?»Œ¬ûŒK¨côTXŽ.­Ž=ÜŽ–± B$cˆ<¦=ãU!‘ew‘Ý‘"ý‘ ’¢7’sÚ’iN“›¸“cT”l¸”“%•й•uD–{º–Ž6—fÅ—*,˜JW˜U¢˜lø˜me™­Ó™­šq/›¡›[#œ@œmÀœ¦.‘Õ›gžyŸZ}ŸIØŸO" Jr ^½ $¡<A¡?~¡a¾¡? ¢ `¢)¢N«¢Aú¢Z<£`—£Cø£F<¤\ƒ¤2à¤O¥%c¥C‰¥.Í¥Kü¥PH¦K™¦@å¦1&§@X§2™§>̧E ¨7Q¨I‰¨9Ó¨D ©eR©G¸©!ªE"ª-hª,–ªbê^&«]…«bã«^F¬]¥¬.­F2­Py­'Ê­Jò­L=®<Š®,Ç®%ô®¯:1¯l¯m¯¬í°&š±aÁ±E#²2i²Iœ²Iæ²H0³my³Oç³W7´{´\ µ]hµ_Ƶ\&¶cƒ¶bç¶XJ·[£·Wÿ·\W¸W´¸S ¹J`¹]«¹i ºlsºVàºÅ7»Jý»EH¼X޼=ç¼=%½Sc½…·½=¾L¾"_¾(‚¾«¾6¾¾'õ¾¿:¿P¿o¿&¿&¨¿Ï¿ã¿Rì¿?ÀCVÀ3šÀ2ÎÀÁÁ&Á))ÁzSÁ1ÎÁ%Â^&ÂK…Â,ÑÂ:þÂ*9Ã-dÃ;’Ã,ÎÃGûÃYCÄIÄ!çÄR ÅW\Å$´Å2ÙÅõ Æ ËF Ë+SËyËrùË_lÌaÌÌ.Í"HÍkÍ4ŠÍ@¿Í@ÎAÎ]Î1tÎ.¦Î"ÕÎøÎRÏ0ZÏ/‹ÏL»ÏÐÐ7ÐFÐUÐ?dÐO¤ÐôÐ6÷Ð}.Ñ-¬ÑXÚÑ;3Ò—oÒ†ÓjŽÓ‘ùÓ‹ÖxœÖ×&×?×-_×:×~È×GGØ_Ø4ïØ $Ù:/ÙQjÙ-¼Ù-êÙKÚdÚ mÚŽÚ/ªÚ1ÚÚ ÛÛ ,Û}9Ûe·Û*ÜOHÜ ˜Ü4¥Ü6ÚÜÝ'$Ý!LÝEnÝ%´Ý%ÚÝ*ÞJ+ÞvÞ–ÞJ´Þ^ÿÞ]^ß(¼ßaåßGàscà?×à5áDMá=’áLÐá_âK}â7Éâ ã!"ã)Dã^nãlÍã:äEWä;äMÙäm'åL•å7âåHæcæ{æj–æNçPçhçç™ç°çÉçáçøçè)èCGèv‹èaé!dé†êïIë.9í&hííªí­íIpðJºð?ñEñ¢Kñ7îñ:&òBaò=¤òDâòF'ó!nó-ó2¾ó$ñóôôs õ õŠõ'õ Åõ<Óõ:ö/Kö7{ö³ö!ÅöSçöW;÷G“÷Û÷)ê÷Søqhø4ÚøaùPqù/Âù<òù2/ú?búK¢ú,îú`ûa|ûmÞû\LüL©üIöü=@ý~ý’ý¦ýºýW×ý"/þRþiqþÛþòþO ÿ[ÿoÿ$ÿ²ÿ ÅÿJæÿ!1 S U \ :k ^¦ W [] ^¹ ’ d« … – ?¡ á xñ Sj m¾ ÿ, A, )n )˜ R  $ &2 £Y Aý ,? ?l ¬ mµ =# {a ™Ý /w ‰§ Ž1 ÆÀ ᇠ%i „ L Za h¼ 7% ;] 3™ 3Í M RO f¢ h  wr ^ê 0I bz 3Ý ª y¼ 6 M a v ‹ g / :5 .p ?Ÿ aß 7A y $˜ ½ Ý aû $] (‚ /« Û ,ö e# u‰ ÿ 9 HQ „š  6 aR ƒ´ )8 Œb Qï —A WÙ 1 ’Ï Áb! $" 93" ™m" ;# *C# n# ›‚# Ø$ Ã÷$ ê»% :¦& =á& |' œ' Œ:( FÇ( 8) PG) S˜) Eì) ^2* ?‘* WÑ* )+ ‚>+ ZÁ+ v, “, %¤, Ê, €ß, `- ;n- 5ª- à- (ò- 4. 3P. 5„. Jº. @/ @F/ 3‡/ ?»/ û/ Q0 f0 "}0 9 0 ;Ú0 ;1 CR1 –1 ©1 ¼1 FÙ1 ; 2 \2 J}2 È2 €Õ2 oV3 cÆ3 =*4 ]h4 XÆ4 n6 xŽ6 L7 §T7 Jü7 G8 9N8 Eˆ8 IÎ8 9 79 R9 c9 t9 x9 <†9 UÃ9 =: ÏW; ž'< äÆ< Œ«= E8> ~> Ä? á? læ? TSF (¨F ÑF -êF 7G CPG :”G QÏG +!H +MH 'yH ¡H -£H ]ÑH P/I Î Æ Ô 3Sg ïw—E–î |û‘7 L9¢ F IC 9Ãu BPïàÍp #ˆW Þ©  ²WßsA¼ œ&f‹# à ?ó]<•| ôD ˜? $ \¯äï O ƒ¸  ÿÌ ù A – h ¶Ï É MtÎå®È +–Y ýÿ?¾ p”e2 õÏ‘ fo H ÿ ™§GÛë  ó ª tBJ±Áv 7¯: ý皦‰'Ù £ iˆ ÿ~b= ;0¢óÒ ´ 9Ž" Q . ‹ êK š p  ¥÷’€G | Ó ¹ µú µ62 눫 % - $ªÂy uk$åœR£EÎ Zã  Šó »äA ÅtÀ ”I# ôL\ q.†äOÐt8  È @ ]õ4 Ï  Ê< aÜ ‡ ÷ àréD !¸ ¬Ç /j { R µ *Å  ½ Ä J.„¦  å’È Å ÿ*vÚ3Mf >"å£ •«b › iÛÇK ‹á › ¨†® ´’@T Ö+Ì}0ÕH5Ç‘‘Y ïî –$g Ñ W í ’ aþ `Vº'?,É ò 0 ›Ì¡. €e™È£F± ü2ÆÏ ¦|LáeHŠ‚GW}Ÿ H b Å×Óó6• bí¾Ùk ± ³ ºs+£¹Å¾õ ²€_ w ufáè( *¤Q7 ( 7- é¶Ý ï 0x CŸ»§  + ' ö ® Ž ¦ àm © N dȹ  .4 ªo3#ÙÓµ n=¶ ÁG6“ Ïæ :Ý U°©1 +” ^ \ ×3û Û ž »?Å cy‹„d  Í N g´f¼‚á# ¯û` ¹}  ^ì÷ ˜ Ù · ‰º`-gú ÅTê ö_BÓZ² Ø +] ²iÔóœ/1Ž v ž8^zO5•; ˆr ;† ·­-è cm Ûñ lj¸   Y° e ó  Ë yyœ ¡ ¢ Í ¹3 ðñ €à¤þ x Fé 6 Öx8æ<u&^ " ·&bJ ;$Ù' áERÉ!+ + ãÃv˜ ˜{±õ ‡¿ý+Jø ¤ÇÊ;Š Ô1XÏT r—Ï ›W û  N DP° ¿8 } é[ ×Á  \¨-ÄSo>·s OÖ Vù œ1} ÎÖ? B xa œî¹ «¸ Dö òÁ  ²^ ô ]ÆŸ É¿V - (qKúÀq ·›Ô §± &¸ ± ‘ ˜Ä U Zrð¿0: ùèH¥²šÕ  &) Ä—/Ì~Ld mY£åþ ¦ :t ËÙ†¾!íKô L· . â de Û˜Ý ‰! Qò.9!YOŠZ ² Ž } ß h²"ªE eIsùŒ_ˆª )u à7 ¶u  û"@3 ® 5ßD …¸n ×kÛ 9 9æÎS x ˆ •Æ× ~ &ù˜Yc Ü\ã 3  ý Ì ÅúÂX ¨íOÍn: Í çâ áø‡ìV14 C ߈ ,=i8úœ ¬Ýª il È Aß6  8 ¯ r „ ÝÝ|î… úÉ ßcÁ ð¡þ * #' Ø á Ùƒ ˆH7 â „ç ¿æÜ{Âìa ÔýÕ… NU ¸æ @•"0» w†Üþ—§ Ùtì À  ÓÞý Ö ŽI¯Êê“ Û âe ( n‘ ’ Ó#÷ â@ 4S N c uƒ 4 Òìw;­ ÷ lb§¢ ã Žˆõm•)°[¬ è i ;¾@§` & G'ßÈ $Mý ~mFr pQSË~ U ‚ (ÈV W À/s ÖÌB=¶™., À*ë ] €t†‡ù<ê) õ °O/O ½Øk€ n ®0£PÞ îùq¢ yuòÊÐVï!  Ÿž , Tg•›ü ð|ç)k±è g%ïR ‘zs­ ø {¸ { ™¤¸–° Q 7˜¥÷Ð © y&J‘u“õüÓ‡ Þ=±m b 2 ÏÜöR Kð“ÈœÝ óI V o V" hf a3å7ÿ @rI| ʬ$ œ ¢À ÓÎ ê†ÌÄ y4, —ÞÕ &ç 1ðÍ † × )í 6c C' ’ K.Ä:L {‰ !õNάœ/ Ë ‡› d}¤> ,P½ñ ¼ ¬K×zÖ øÚB' E*• ú7 Uáh  <¤ d?C €< hSôw$1 ¡L3n õÔf Æ ÙÄ…êè kÌ„£ÀNó.E ¸¼Ø5 Áf»m/û: + ÂK6 ‹Ü:< »   n R©™² ú« 'ÜŒ®QX" R²b jf Ç Z x )Yâø c€£ îÝÛ 9 ãœJG ð ý®$¥ø'¹ÿ;r¬Ì ; ŸV [‘ æôÙ :Ÿ9 µ l ÞàèŽ]ÐÛä  ÷ [  m ¨s S ’ê ¸qà ¥ Ò h˜ô| ¥Œý ñÒ  kàÊ ~ íAÜ» ‘¿Úæ î ‰ Dí n Ê * â]lÆ ÊL ^ à >kØ î (eÚ~‘cZ Ä “zª ¸Ršr ¢ Dçö Ò 2Rê E ñóÚ Š:C Õ  :” Œä® Öv ÏLxµ 0e¨ GÊ § »6º G "8©nöfU¦ jñ‘ ÷Õ‚> 2jùF È”ØBk ¿ÏU 8Š ) æ ·Vö ùŒÄM  Yáä 9 ®òËÇñH[Ñ û45é¸ÇÜ<à í:o‹›Ü š RDeë [ |° ¿©3 >Pê =Ð QÉZ Ë ´W  @_¯ é ßþ? ¦  öQ Ƚ[_žØ ¢hòŽH  — Q7 ǃ¡ @ :ÂÁP  i=m,Í´ { i Î x 1aì vb ¬ æÑ ‚] % À ¡W*p Ô&_òýDÿ Ånµ“ Sz·tË î-´Á ¬ Õ× Ù Æ; ,Q ;>ß âä‡ ì1cO ¨ F  U$ ä¦Ð‡Ø ÂWÊ Ø !?Žu º2ª•ê ƒL 6 B ž ã _ JA  N¢M,¯ü4Xð /ÃÒjt*,Cw’…P€1 yµÚ ta 8¾“«i »žñ ¹J’ žƒò àÉ–÷ûCuÛ ! ‚ @AÝ ì Ç  k ì É ÎƒvÇ z ýå ¼Ã6 ™ ‰s ½( §N¬ Ö ü ºy   b‹â‰ M ¦Œ |4H #x¾«I Ø«U®Æ 4H¨±Þn§ê×wrIM Õ%06 4 ` ë& ´?\Õ 2 ¼Ä»wÈ(ô¤ ¥!£øÆpî~ ¶ žûµ ü¯ ÷n "©˜ ŽY ¯M é^-칡á^ ’ÑÖïjŸJ©z u µ| é Œ Ï¿ ™ r Ð Ã F–? – > K ¯ ’aí¯eƒ»â> ZÜë “C ßqz j 2º Î gË gt ¾ h‚k Oô, Õš Ù ZÚ!\ ¶ ä ·øz¢ú ”• 㣪¢ ï?¥ ©¶-¡T è: X$¤ Õ=Š Þ ™)¢g2THÅtðÞ j âÉ ”Œ Ÿ™ …òZ¹š8Ø7 m +› o ™ >Ú Ì%º Ö ¤îãÞ Ad”–^h · û ¶ öv4 | Ð]Ée è­ÚV% ž 9,G ã (Pßå'……¨Á§8÷v¦bÊB»ŠZ m / 5 Ì ý¦¯wþsà„ ±è Ü Ä_± ²€ Xjà  ´¸˜ ¿²o_x› ç ?! û^ÅZ 2O‚• €%‚ t ‘“ “  gÝ ­<³"kV Šñ « aÑS, 0‘ K i qüÇ{Ý͵™> ½ OWf’ƒ y [«6dœ%äTå BV1Ì . k ~õ„çÄ ƒú ®¤l­£lo%Úß   £9†y —w yö @×³ß ÜÓ p Kq… þò"à ›a*$"\3qµ È5¨ D ŸÆx‡UýÓ ù º† ¸} \Uò ˆf °] p¯ÇŒ J ñê7ášÂ2 HJ ¼ <<p CÔ Š±5 ¦P¼  bzM< S¶… _ 0R)®K ãïŠôÀ²c ¼  ’!F> Ds­í­ #ʳP ï…Ôh _XÄè'Ô° J ’  ‡Zm’ z— E.ê; ` ´ Ÿ aê 3Ù7 1  ò ¬rI ì· ó ´ncŒC ‚{ Eç~ƒ D[ - nð Á ¥ `ÌT¶”žp¤ h #Œ ×U«³~vú­šj#‹ L ,š ½ Î] {€¹LK™ ¬ Š oƒŒõÚ»Û­æ® AR ª Óx Ô-…ø ?‚ô  Žþ "}‹ Šª½]ph•`Í  ü};"élÔ ³  ¡ ºST ”øý—/“¹ü  ÙûCðG˳ºlÊ>À Æ€ XX K ¿ l~u Wvé SÆ èí ¬TÒqYß (· uE0®f °ˆ .G¾  Ð ¤ ñá ¢– ª$=ædÜ Ç ² ÿ É 1 Ò `AŸ 5 ë‚ „`sšS¨ ÑÑ5EC¾ˆ°iÏKt -Þ¿ ÈAÉuêòw Úî8* 4z i^÷ÃÔ9 W|³× ŽJU/>I¾ Ÿ â 9Á ÍÙ — T« " œj& ‰Õ ^ .j ¬¨ ï À­y ý ½ç`º Ì uÛû  w ,ÿæ}R Šlºç 2K¥ú Þà Ò« ‘ eMì~/¾MIÎûþ˜ Å v –l [ô,f­Ââ – á Y ž‹%~_ —8'rÒµ S Ø! ¼­  œ †  é‰d  • ž& Ôåq Y pÖ‹° JF³hŽ “„ pêÀö ç0 ‰[o= ”’ òš9®¡ ¼o |gML¯ÇᆠÁ€ ƒ ¡… &j :é |¼Æ\ìo´ Ëùù   6üúoÖ * F †ÑTâ Òü* ѽ—q ­ § ÿ%ý†Œ‰ Œ*sFd o ¢q FZ‡Õ ª¾ªø‚ù š“Å ^ ïs H Qv  d%6 ‰ G öbbÍ[+Ñ \@ Ø@Ƨ640  :§‚» )éUåšþp ›jÛü X å  5].ÐÓ¨ 3 J }bð r… a7  ¤æ; ¥Ë #. í/ e % ™Ï C/ (·p«ä 3 Ú 5‹ è´ E Ø( <- _ mä% Pø<ÐUÝ›ZµÐ`R½ÁÑ Â ‹û ¾l zÁylrúëø yP `+$O Yø] Ú¦ N qéž[ ×ÏM¾ì¤ ã F =­þ® i É Ë n>4òP æº  ï ÚÀ%X PÙ ø¡ oŸ5 d ãé Q B ù™ íóI gm 7³ pM 9{rkÖ a ÓHã“ N 6b èß/ † W>m˜ =f\@ «ÑI å ›±Ëádã4 AA  g ÿÕƒ ÄmN¡Q„ñ å ?¨ B”€ x   Y&; %_%SY ‡ ´+ q °Tz)!¢ô¯ » ³ ë ~ y ³x•þ Ð÷c˜1ý2ºÀ¥ ¶ e ´AG#ð³6 ö'ú½™ F ¶‡²îÇ@¶½ I\ ÉÏ_N¥ËxÁóæŸ8g þÑüjó£Á¢Cöƒ Þwõ÷9µIYè =·Ø .§ B Ë 1œ è~< Tgà ×µ\ i ]õ§  ¬  ˜þB“R´w Œ*( ¼{ š õ d‚   ùç D šì?¹) Q5 Ûþq {޼¹ ,á< CÄËE w ÿAÜ $ Î>vŸ« ` Ô !ø ˜ÃÊ vúŠ Þ sió X B /½‰À ÉT õ ÞJ¦g QL ek VÝѾ I«å{2d  0 œ›Ð°W £ N f8¡Ts›E `¿ ozÛ„ Î ì XóGX¡ë Å ]Ò# ¶a§ ãÔõ÷̳ - (ü Ýʈí P t Ÿ a A j“M¤©Fü…Ú ‹ˆ V- ë´íÍQÿã Î2Ò)ð ¦ÐW–Í¥ ò 3 ä ô ¨ŠÈ ƒ# — NÿÆ1 -B( @©¯ï öéñŒ¥ô¦0A à© –ßk ‡Â^ + ¬z ¿l Ó ëÜU ÍÍûÝ' »O ÍÅ0 Ïn& }† &à ñGB # 'h9— Ž ‡Ñ” 7D‹ |ô 2 H*$)½ c—[) „[süî äö± v· žØÎÝÄ = — W ; h £ } É_РƱð‹E€ 7”5ÄL "    × : ë– ÖÑ*VåÕ‰c ñ? aÒ w… /}D âc<53 t '!á S hG ÷ H°™3” ‚ M }[É¥^EÌ­„ àÅ Ö= DñðäO¶ ¤8 {=¹+Oë(Õù 4-ªR ÞäP5X ç³ 1¼lÈ„ – ª” ë„ Ò ž ` ) LžÀÛ í Z; Ã{©NFŽ8  ¿ ¼ ¨\ë+ ^@ ÓÓ ×â °¿Xæ ¨² = ç© Ê¸\‡  ˆ¹ ©  •çºx$ li#c Òÿ ï·( %s Cache : %s Cache (read-only): %s Cache cleaning disabled Cache cleaning enabled Cache link dir : %s Control dir : %s Session root dir : %s default LRMS : %s default queue : %s default ttl : %u Run 'arcclean -s Undefined' to remove cleaned jobs from job list Run 'arcclean -s Undefined' to remove killed jobs from job list To recover missing jobs, run arcsync Use arcclean to remove non-existing jobs Use arcclean to remove retrieved jobs from job list Is executable: true Name: %s Sources.DelegationID: %s Sources.Options: %s = %s Sources: %s Targets.DelegationID: %s Targets.Options: %s = %s Targets: %s %s certificate dn: %s expiration time: %s issuer dn: %s serial number: %d %s: %s: %i %s: %s Delivery service: %s Delivery service: LOCAL Delivery slots: %u Emergency slots: %u Post-processor slots: %u Pre-processor slots: %u Prepared slots: %u Shares configuration: %s Status of endpoint (%s) is %s This endpoint (%s) is STARTED or SUCCESSFUL attributes: base dn: %s filter: %s unspecified: %i %s -> %s (%s) --- DRY RUN --- Access control: %s Annotation: %s Argument: %s Benchmark information: Computing Service Log Directory: %s Computing endpoint URL: %s Computing endpoint interface name: %s Computing endpoint requirements: Credential service: %s Delegation IDs: DelegationID element: %s End Time: %s Entry valid for: %s Entry valid from: %s Environment.name: %s Environment: %s Exit Code: %d Exit code for successful execution: %d Health state: %s ID on service: %s Inputfile element: Installed application environments: Job Error: %s Job does not require exclusive execution Job management URL: %s (%s) Job requires exclusive execution Job status URL: %s (%s) Mapping queue: %s Name: %s No exit code for successful execution specified. Node access: inbound Node access: inbound and outbound Node access: outbound Notify: Old activity ID: %s Old job IDs: Operating system requirements: Other Messages: %s Other attributes: [%s], %s Outputfile element: Owner: %s PostExecutable.Argument: %s PreExecutable.Argument: %s Processing start time: %s Proxy valid until: %s Queue: %s RemoteLogging (optional): %s (%s) RemoteLogging: %s (%s) Requested CPU Time: %s Requested Slots: %d Results must be retrieved before: %s Results were deleted: %s Run time environment requirements: Service information URL: %s (%s) Session directory URL: %s Specific state: %s Stagein directory URL: %s Stageout directory URL: %s State: %s Stderr: %s Stdin: %s Stdout: %s Submitted from: %s Submitted: %s Submitting client: %s Used CPU Time: %s Used CPU Time: %s (%s per slot) Used Memory: %d Used Wall Time: %s Used Wall Time: %s (%s per slot) Waiting Position: %d [ JobDescription tester ] [ Parsing the original text ] [ emies:adl ] [ nordugrid:xrsl ] $X509_VOMS_FILE, and $X509_VOMSES are not set; User has not specified the location for vomses information; There is also not vomses location information in user's configuration file; Can not find vomses in default locations: ~/.arc/vomses, ~/.voms/vomses, $ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/grid-security/vomses, $PWD/vomses, /etc/vomses, /etc/grid-security/vomses, and the location at the corresponding sub-directory%5u s: %10.1f kB %8.1f kB/s%d Batch Systems%d Endpoints%d Shares%d mapping policies%d of %d jobs were resubmitted%d of %d jobs were submitted%i retries left, will wait until %s before next attempt%li seconds since lock file %s was created%s%s %s%s %s could not be created.%s (%s)%s > %s => false%s > %s => false: %s contains non numbers in the version part.%s > %s => true%s class is not an object%s directory created%s directory exist! Skipping job.%s failed%s is an unsupported digest type%s is not a directory%s is not an object%s is not authorized to do action %s in resource %s%s is not authorized to do action %s in resource %s %s made persistent%s parsing error%s plugin "%s" not found.%s request failed%s request to %s failed with response: %s%s request to %s failed. Unexpected response: %s.%s version %s%s->%s%s. Cannot copy fileset%s. SQLite database error: %s%s:%s: %i%s: %s%s: %s: New job belongs to %i/%i%s: %s:%i%s: ACIX Location: %s%s: Adding new output file %s: %s%s: All %s %s successfully%s: Bring online request %s in SRM queue. Sleeping for %i seconds%s: Cache cleaning takes too long - %u.%06u seconds%s: Can't convert checksum %s to int for %s%s: Can't convert filesize %s to int for %s%s: Can't read list of input files%s: Can't rerun on request%s: Can't rerun on request - not a suitable state%s: Canceling job because of user request%s: Cancelling active DTRs%s: Cancelling other DTRs%s: Cannot upload two different files %s and %s to same LFN: %s%s: Checking user uploadable file: %s%s: Checksum %llu verified for %s%s: Critical error for uploadable file %s%s: DTR %s to copy file %s failed%s: DTR %s to copy to %s failed but is not mandatory%s: Delete request due to internal problems%s: Destination file %s was possibly left unfinished from previous A-REX run, will overwrite%s: Duplicate file in list of input files: %s%s: Error accessing file %s%s: Error reading file %s%s: Error reading user generated output file list in %s%s: Failed creating grami file%s: Failed obtaining local job information.%s: Failed obtaining lrms id%s: Failed parsing job request.%s: Failed reading .local and changing state, job and A-REX may be left in an inconsistent state%s: Failed reading job description: %s%s: Failed reading local information%s: Failed running cancellation process%s: Failed running submission process%s: Failed setting executable permissions%s: Failed storing failure reason: %s%s: Failed to cancel running job%s: Failed to clean up session dir%s: Failed to list output directory %s: %s%s: Failed to load evaluator for user policy %s: Failed to open file %s for reading%s: Failed to parse user policy%s: Failed to read dynamic output files in %s%s: Failed to read list of input files%s: Failed to read list of input files, can't clean up session dir%s: Failed to read list of output files%s: Failed to read list of output files, can't clean up session dir%s: Failed to read reprocessed list of input files%s: Failed to read reprocessed list of output files%s: Failed to receive job in DTR generator%s: Failed to switch user ID to %d/%d to read file %s%s: Failed to turn job into failed during cancel processing.%s: Failed to write back dynamic output files in %s%s: Failed to write list of input files%s: Failed to write list of output files%s: Failed to write list of output status files%s: Failed writing changed input file.%s: Failed writing list of output files: %s%s: Failed writing local information%s: Failed writing local information: %s%s: Failure creating data storage for child process%s: Failure creating slot for child process%s: Failure starting child process%s: Failure waiting for child process to finish%s: File %s has wrong checksum: %llu. Expected %lli%s: File request %s in SRM queue. Sleeping for %i seconds%s: Going through files in list %s%s: Invalid DTR%s: Invalid file: %s is too big.%s: Invalid size/checksum information (%s) for %s%s: Job cancel request from DTR generator to scheduler%s: Job cancellation takes too long, but diagnostic collection seems to be done. Pretending cancellation succeeded.%s: Job cancellation takes too long. Failing.%s: Job failed in unknown state. Won't rerun.%s: Job failure detected%s: Job finished%s: Job has completed already. No action taken to cancel%s: Job is ancient - delete rest of information%s: Job is not allowed to be rerun anymore%s: Job is requested to clean - deleting%s: Job is too old - deleting%s: Job monitoring counter is broken%s: Job monitoring is lost due to removal from queue%s: Job monitoring is unintentionally lost%s: Job monitoring stop requested with %u active references%s: Job monitoring stop requested with %u active references and %s queue associated%s: Job monitoring stop success%s: Job submission to LRMS failed%s: Job submission to LRMS takes too long, but ID is already obtained. Pretending submission is done.%s: Job submission to LRMS takes too long. Failing.%s: Job's helper exited%s: LRMS scripts limit of %u is reached - suspending submit/cancel%s: Location %s not accessible remotely, skipping%s: Plugin at state %s : %s%s: Plugin execution failed%s: Processing job description failed%s: PushSorted failed to find job where expected%s: Re-requesting attention from DTR generator%s: Reading output files from user generated list in %s%s: Reading status of new job failed%s: Received DTR %s to copy file %s in state %s%s: Received DTR belongs to inactive job%s: Received DTR with two remote endpoints!%s: Received data staging request to %s files%s: Received job in DTR generator%s: Received job in a bad state: %s%s: Removing %s from dynamic output file %s%s: Reprocessing job description failed%s: Requesting attention from DTR generator%s: Returning canceled job from DTR generator%s: Session directory processing takes too long - %u.%06u seconds%s: Some %s failed%s: State: %s from %s%s: State: %s: data staging finished%s: State: %s: still in data staging%s: State: ACCEPTED%s: State: ACCEPTED: dryrun%s: State: ACCEPTED: has process time %s%s: State: ACCEPTED: moving to PREPARING%s: State: ACCEPTED: parsing job description%s: State: CANCELING%s: State: FINISHING%s: State: INLRMS%s: State: INLRMS - checking for not pending%s: State: INLRMS - checking for pending(%u) and mark%s: State: INLRMS - no mark found%s: State: INLRMS: exit message is %i %s%s: State: PREPARING%s: State: SUBMIT%s: Trying remove job from data staging which does not exist%s: Trying to remove job from data staging which is still active%s: Two identical output destinations: %s%s: Unknown user policy '%s'%s: Uploadable files timed out%s: User has NOT uploaded file %s%s: User has uploaded file %s%s: checksum %s%s: delete file %s: failed to obtain file path: %s%s: delete file %s: failed to open file/dir: %s%s: job assigned for slow polling%s: job being processed%s: job for attention%s: job found while scanning%s: job will wait for external process%s: new job is accepted%s: old job is accepted%s: put file %s: %s%s: put file %s: failed to create file: %s%s: put file %s: there is no payload%s: put file %s: unrecognized payload%s: put log %s: there is no payload%s: put log %s: unrecognized payload%s: replica type %s%s: restarted FINISHING job%s: restarted INLRMS job%s: restarted PREPARING job%s: size %llu%s: state CANCELING: child exited with code %i%s: state CANCELING: job diagnostics collected%s: state CANCELING: starting child: %s%s: state CANCELING: timeout waiting for cancellation%s: state SUBMIT: child exited with code %i%s: state SUBMIT: starting child: %s%s: there is no such job: %s%s: unexpected failed job add request: %s%s: unexpected job add request: %s'(' expected')' expected'action' attribute not allowed in user-side job description'control' configuration option is no longer supported, please use 'controldir' instead'stdout' attribute must be specified when 'join' attribute is specified(Re)Trying next destination(Re)Trying next source(empty)(null)--same and --not-same cannot be specified together.: %d: %s: Accounting records reporter tool is not specified: Failure creating accounting database connection: Failure creating slot for accounting reporter child process: Failure starting accounting reporter child process: Metrics tool returned error code %i: %s: writing accounting record took %llu ms< %s<< %s> %sA computing resource using the GridFTP interface was requested, but %sthe corresponding plugin could not be loaded. Is the plugin installed? %sIf not, please install the package 'nordugrid-arc-plugins-globus'. %sDepending on your type of installation the package name might differ.A-REX REST: Failed to resume jobA-REX REST: State change not allowed: from %s to %sAC extension information for VO AC is invalid: ACIX returned %sARC Auth. request: %sARC delegation policy: %sARC6 submission endpoint selectionAbort request caused by error in transfer functionAbort request caused by transfer errorAborted!Accept failedAccept failed: %sAccepted connection from %u.%u.%u.%u:%uAccepted connection from [%s]:%uAccepted connection on %u.%u.%u.%u:%uAccepted connection on [%s]:%uAccepting submission of new job or modification request: %sAccess list location: %sAccounting database cannot be created. Faile to create parent directory %s.Accounting database cannot be created: %s is not a directoryAccounting database connection has been establishedAccounting database file (%s) is not a regular fileAccounting database initialized succesfullyAcquired auth token for %s: %sActivation failedAdd location: metadata: %sAdd location: url: %sAdding FQAN value: %sAdding FQAN/primary value: %sAdding VOMS group value: %sAdding VOMS primary group value: %sAdding VOMS primary role value: %sAdding VOMS role value: %sAdding Virtual Organization value: %sAdding action-id value: %sAdding cert chain value: %sAdding endpoint '%s' with interface name %sAdding endpoint (%s) to ServiceEndpointRetrieverAdding endpoint (%s) to TargetInformationRetrieverAdding endpoint (%s) to both ServiceEndpointRetriever and TargetInformationRetrieverAdding location: %s - %sAdding profile-id value: %sAdding request token %sAdding resource-id value: %sAdding resource-owner value: %sAdding space token %sAdding subject-id value: %sAdding subject-issuer value: %sAdding to bulk requestAdding virtual-organization value: %sAddress: %sAll %u process slots usedAll DTRs finished for job %sAll queries failedAll requirements satisfied.All results obtained are invalidAllocated %u buffers %llu bytes each.Allow specified entity to retrieve credential without passphrase. This option is specific for the PUT command when contacting Myproxy server.Already have directory: %sAlready reading from sourceAlready writing to destinationAn error occurred during the generation of job description to be sent to %sAnother process (%s) owns the lock on file %sArc policy can not been carried by SAML2.0 profile of XACMLArcAuthZ: failed to initiate all PDPs - this instance will be non-functionalArchiving DTR %s, state %sArchiving DTR %s, state ERRORAre you sure you want to clean jobs missing information?Are you sure you want to synchronize your local job list?Assembling BLAH parser log entry: %sAssigned to authorization group %sAssigned to userlist %sAssuming - file not foundAssuming transfer is already aborted or failed.At least two values are needed for the 'inputfiles' attributeAt least two values are needed for the 'outputfiles' attributeAttempt to assign relative path to URL - making it absoluteAttempting to contact %s on port %iAttribute '%s' multiply definedAttribute 'join' cannot be specified when both 'stdout' and 'stderr' attributes is specifiedAttribute Value (1): %sAttribute Value (2): %sAttribute Value inside Subject: %sAttribute name (%s) contains invalid character (%s)Attribute name expectedAttributes 'gridtime' and 'cputime' cannot be specified togetherAttributes 'gridtime' and 'walltime' cannot be specified togetherAuthenticate in commands failedAuthentication Request URL: %sAuthentication failureAuthorized by arc.pdpAuthorized from remote pdp serviceAuthorized from simplelist.pdp: %sAuthorized from xacml.pdpBN_new || RSA_new failedBN_set_word failedBad URL in acix_endpointBad URL in deliveryservice: %sBad authentication information: %sBad checksum format %sBad credential value %s in cache access rulesBad directory name: %sBad format detected in file %s, in line %sBad format in XML response from delivery service at %s: %sBad format in XML response from service at %s: %sBad format in XML response: %sBad label: "%s"Bad logicBad logic for %s - bringOnline returned ok but SRM request is not finished successfully or on goingBad logic for %s - getTURLs returned ok but SRM request is not finished successfully or on goingBad logic for %s - putTURLs returned ok but SRM request is not finished successfully or on goingBad mount directory specifiedBad name for executable: %sBad name for runtime environment: %sBad name for stderr: %sBad name for stdout: %sBad number in definedshare %sBad number in maxdeliveryBad number in maxemergencyBad number in maxpreparedBad number in maxprocessorBad number in maxtransfertriesBad number in priority element: %sBad number in remotesizelimitBad number in speedcontrolBad or old format detected in file %s, in line %sBad path for %s: Rucio supports read/write at /objectstores and read-only at /replicasBad subcommand in configuration line: %sBad value for loglevelBadly formatted pid %s in lock file %sBatch System Information:Batch system information:Bearer token is available. It is preferred for job submission.Behaviour tuningBlock %s not found in configuration file %sBlockName is emptyBoosting priority from %i to %i due to incoming higher priority DTRBoth URLs must have the same protocol, host and portBoth of CACertificatePath and CACertificatesDir elements missing or emptyBring online request %s finished successfully, file is now ONLINEBring online request %s is still in queue, should waitBroken stringBroker %s loadedBroker plugin "%s" not found.Brokering and filteringBrokers available to %s:Buffer creation failed !Buffer registration failedBusy plugins found while unloading Module Manager. Waiting for them to be released.CA certificate and CA private key do not matchCA name: %sCA-certificates installed:CONTENT %u: %sCPU clock speed: %iCPU model: %sCPU vendor: %sCPU version: %sCREAM request generation failed: %sCache %s: Free space %f GBCache access allowed to %s by DN %sCache access allowed to %s by VO %sCache access allowed to %s by VO %s and group %sCache access allowed to %s by VO %s and role %sCache area free size: %i GBCache area total size: %i GBCache cleaning script failedCache creation date: %sCache file %s does not existCache file %s not foundCache file %s was deleted during link/copy, must start againCache file %s was locked during link/copy, must start againCache file %s was modified in the last second, sleeping 1 second to avoid race conditionCache file %s was modified while linking, must start againCache file is %sCache meta file %s is empty, will recreateCache meta file %s possibly corrupted, will recreateCache not found for file %sCached copy is still validCached file is locked - should retryCached file is outdated, will re-downloadCalculated checksum %s matches checksum reported by serverCalculated transfer checksum %s matches source checksumCalculated/supplied transfer checksum %s matches checksum reported by SRM destination %sCallback got failureCalling PrepareReading when request was already prepared!Calling PrepareWriting when request was already prepared!Calling acix with query %sCalling http://localhost:60000/Echo using ClientSOAPCalling http://localhost:60000/Echo using httplibCalling https://localhost:60000/Echo using ClientSOAPCalling plugin %s to query endpoint on %sCan not access CA certificate directory: %s. The certificates will not be verified.Can not access VOMS file/directory: %s.Can not access VOMSES file/directory: %s.Can not access certificate file: %sCan not access key file: %sCan not access proxy file: %sCan not add X509 extended KeyUsage extension to new proxy certificateCan not add X509 extension to proxy certCan not allocate memoryCan not allocate memory for extension for proxy certificateCan not compute digest of public keyCan not convert DER encoded PROXY_CERT_INFO_EXTENSION extension to internal formatCan not convert PROXY_CERT_INFO_EXTENSION struct from internal to DER encoded formatCan not convert keyUsage struct from DER encoded formatCan not convert keyUsage struct from internal to DER formatCan not convert private key to DER formatCan not convert signed EEC cert into DER formatCan not convert signed proxy cert into DER formatCan not convert signed proxy cert into PEM formatCan not convert string into ASN1_OBJECTCan not copy extended KeyUsage extensionCan not copy the subject name from issuer for proxy certificateCan not create ASN1_OCTET_STRINGCan not create BIO for parsing requestCan not create BIO for requestCan not create BIO for signed EEC certificateCan not create BIO for signed proxy certificateCan not create PROXY_CERT_INFO_EXTENSION extensionCan not create PolicyStore objectCan not create XACML ActionCan not create XACML ActionAttribute: %sCan not create XACML ResourceCan not create XACML ResourceAttribute: %sCan not create XACML SubjectAttribute: %sCan not create XACML requestCan not create a new X509_NAME_ENTRY for the proxy certificate requestCan not create delegation crendential to delegation service: %sCan not create extension for PROXY_CERT_INFOCan not create extension for keyUsageCan not create extension for proxy certificateCan not create function %sCan not create function: FunctionId does not existCan not create name entry CN for proxy certificateCan not create the SSL Context objectCan not create the SSL objectCan not determine the install location. Using %s. Please set ARC_LOCATION if this is not correct.Can not duplicate serial number for proxy certificateCan not duplicate the subject name for the self-signing proxy certificate requestCan not dynamically produce AlgFacrotyCan not dynamically produce AttributeFactoryCan not dynamically produce EvaluatorCan not dynamically produce FnFactoryCan not dynamically produce PolicyCan not dynamically produce RequestCan not find element with proper namespaceCan not find element with proper namespaceCan not find ArcPDPContextCan not find CA certificates directory in default locations: ~/.arc/certificates, ~/.globus/certificates, %s/etc/certificates, %s/etc/grid-security/certificates, %s/share/certificates, /etc/grid-security/certificates. The certificate will not be verified. If the CA certificates directory does exist, please manually specify the locations via env X509_CERT_DIR, or the cacertificatesdirectory item in client.conf Can not find XACMLPDPContextCan not find certificate file: %sCan not find certificate with name %sCan not find issuer certificate for the certificate with subject %s and hash: %luCan not find key file: %sCan not find key with name: %sCan not find voms service configuration file (vomses) in default locations: ~/.arc/vomses, ~/.voms/vomses, $ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/grid-security/vomses, $PWD/vomses, /etc/vomses, /etc/grid-security/vomsesCan not generate X509 requestCan not generate policy objectCan not get SAMLAssertion SecAttr from message contextCan not get extended KeyUsage extension from issuer certificateCan not get policy from PROXY_CERT_INFO_EXTENSION extensionCan not get policy language from PROXY_CERT_INFO_EXTENSION extensionCan not get the certificate typeCan not get the delegation credential: %s from delegation service: %sCan not get the issuer's private keyCan not load ARC evaluator object: %sCan not load ARC request object: %sCan not load policy objectCan not load policy object: %sCan not load request objectCan not open job description file: %sCan not open key file %sCan not parse classname for AttributeFactory from configurationCan not parse classname for CombiningAlgorithmFactory from configurationCan not parse classname for FunctionFactory from configurationCan not parse classname for Policy from configurationCan not parse classname for Request from configurationCan not parse date: %sCan not parse month: %sCan not parse time zone offset: %sCan not parse time: %sCan not read PEM private keyCan not read PEM private key: failed to decryptCan not read PEM private key: failed to obtain passwordCan not read PEM private key: probably bad passwordCan not read certificate file: %sCan not read certificate stringCan not read certificate/key stringCan not read information from the local job status fileCan not read key stringCan not set CN in proxy certificateCan not set issuer's subject for proxy certificateCan not set private keyCan not set pubkey for proxy certificateCan not set readable file for request BIOCan not set serial number for proxy certificateCan not set the lifetime for proxy certificateCan not set version number for proxy certificateCan not set writable file for request BIOCan not set writable file for signed EEC certificate BIOCan not set writable file for signed proxy certificate BIOCan not sign a EECCan't allocate memory for CA policy pathCan't convert DER encoded PROXYCERTINFO extension to internal formatCan't convert X509 request from internal to DER encoded formatCan't create delegation contextCan't create information handle - is the ARC LDAP DMC plugin available?Can't create information handle - is the ARC ldap DMC plugin available?Can't delete directory %s: %sCan't delete file %s: %sCan't extract object's name from source urlCan't find LCAS functions in a library %sCan't find LCMAPS functions in a library %sCan't get policy from PROXYCERTINFO extensionCan't get policy language from PROXYCERTINFO extensionCan't get the first byte of input BIO to get its formatCan't get the first byte of input to determine its formatCan't handle URL %sCan't handle location %sCan't load LCAS library %s: %sCan't load LCMAPS library %s: %sCan't load plugin %s for access point %sCan't obtain configuration. Only public information is provided.Can't obtain configuration. Public information is disabled.Can't obtain configuration. Public information is disallowed for this user.Can't open configuration fileCan't parse access rights in configuration lineCan't parse configuration lineCan't parse create arguments in configuration lineCan't parse host and/or port in response to EPSV/PASVCan't parse mkdir arguments in configuration lineCan't parse or:and in configuration lineCan't parse user:group in configuration lineCan't read configuration fileCan't read configuration file at %sCan't read from sourceCan't read list of destinations from file %sCan't read list of locations from file %sCan't read list of sources from file %sCan't read policy namesCan't read transfer states from %s. Perhaps A-REX is not running?Can't recognize group in configuration lineCan't recognize type of configuration fileCan't recognize type of configuration file at %sCan't recognize user in configuration lineCan't rename file %s: %sCan't reset the inputCan't resolve host %sCan't set OpenSSL verify flagsCan't stat file: %s: %sCan't stat stdio channel %sCan't use URL %sCan't write to destinationCancellation completeCancelling DTR %s with source: %s, destination: %sCancelling active transferCancelling job %sCancelling job: %sCancelling synchronization requestCandyPond: UnauthorizedCannot adapt job description to the submission target when information discovery is turned offCannot change owner of %s: %s Cannot change permission of %s: %s Cannot compare empty checksumCannot convert ARC module name to Python stringCannot convert ExecutionTarget (%s) to python objectCannot convert JobDescription to python objectCannot convert UserConfig to Python objectCannot convert config to Python objectCannot convert inmsg to Python objectCannot convert module name to Python stringCannot convert outmsg to Python objectCannot convert string %s to int in line %sCannot copy example configuration (%s), it is not a regular fileCannot create ExecutionTarget argumentCannot create JobDescription argumentCannot create UserConfig argumentCannot create argument of the constructorCannot create config argumentCannot create directories for log file %s. Messages will be logged to this logCannot create directory %s for per-job hard linksCannot create http payloadCannot create inmsg argumentCannot create instance of Python classCannot create outmsg argumentCannot create output of %s for any jobsCannot create output of %s for job (%s): Invalid source %sCannot create resolver from /etc/resolv.confCannot determine hostname from gethostname()Cannot determine hostname from gethostname() to generate ceID automatically.Cannot determine replica type for %sCannot determine the %s location: %sCannot find under response soap message:Cannot find ARC Config classCannot find ARC ExecutionTarget classCannot find ARC JobDescription classCannot find ARC Message classCannot find ARC UserConfig classCannot find any proxy. This application currently cannot run without a proxy. If you have the proxy file in a non-default location, please make sure the path is specified in the client configuration file. If you don't have a proxy yet, please run 'arcproxy'!Cannot find content under response soap messageCannot find custom broker classCannot find file at %s for getting the proxy. Please make sure this file exists.Cannot find information abouto job submission endpointCannot find local input file '%s' (%s)Cannot find service classCannot find the CA certificates directory path, please set environment variable X509_CERT_DIR, or cacertificatesdirectory in a configuration file.Cannot find the path of the proxy file, please setup environment X509_USER_PROXY, or proxypath in a configuration fileCannot find the user certificate path, please setup environment X509_USER_CERT, or certificatepath in a configuration fileCannot find the user private key path, please setup environment X509_USER_KEY, or keypath in a configuration fileCannot get VOMS server %s information from the vomses filesCannot get VOMS server address information from vomses line: "%s"Cannot get any AC or attributes info from VOMS server: %s; Returned message from VOMS server: %s Cannot get dictionary of ARC moduleCannot get dictionary of custom broker moduleCannot get dictionary of moduleCannot handle URL %sCannot handle local user %sCannot import ARC moduleCannot import moduleCannot initialize ARCHERY domain name for queryCannot link to a remote destination. Will not use mapped URLCannot link to source which can be modified, will copy insteadCannot open BLAH log file '%s'Cannot open cache log file %s: %s. Cache cleaning messages will be logged to this logCannot output XRSL representation: The Resources.SlotRequirement.NumberOfSlots attribute must be specified when the Resources.SlotRequirement.SlotsPerHost attribute is specified.Cannot parse integer value '%s' for -%cCannot parse password source %s it must be of source_type or source_type:data format. Supported source types are int,stdin,stream,file.Cannot parse password source expression %s it must be of type=source formatCannot parse password source type %s. Supported source types are int,stdin,stream,file.Cannot parse password type %s. Currently supported values are 'key','myproxy','myproxynew' and 'all'.Cannot parse schema!Cannot parse service endpoint TXT records.Cannot process proxy file at %s.Cannot query service endpoint TXT records from DNSCannot read specified jobid file: %sCannot remove proxy file at %sCannot remove proxy file at %s, because it's not thereCannot rename to or from root directoryCannot rename to the same URLCannot stat local input file '%s'Cannot switch to group (%s)Cannot switch to primary group for user (%s)Cannot switch to user (%s)Cannot to update AAR. Cannot find registered AAR for job %s in accounting database.Cannot use supplied --size optionCannot write job IDs to file (%s)Cannot write jobid (%s) to file (%s)Cannot write jobids to file (%s)Cant retrieve job files for job (%s) - unable to determine URL of stage out directoryCapabilities:Catting %s for job %sCause of failure unclear - choosing randomlyCert Type: %dCertificate %s already expiredCertificate %s will expire in %sCertificate and key ('%s' and '%s') not found in any of the paths: %sCertificate does not have a slotCertificate format is DERCertificate format is PEMCertificate format is PKCSCertificate format is unknownCertificate has unknown extension with numeric ID %u and SN %sCertificate information collection failedCertificate information:Certificate issuer: %sCertificate request is invalidCertificate to use is: %sCertificate verification error: %sCertificate verification failedCertificate verification succeededCertificate with serial number %s and subject "%s" is revokedCertificate with subject %s has expiredCertificate/Proxy path is emptyCertificate: %sCertiticate chain number %dChain(s) configuration failedChallenge: %sCheck: looking for metadata: %sCheck: obtained access latency: high (NEARLINE)Check: obtained access latency: low (ONLINE)Check: obtained checksum: %sCheck: obtained modification date: %sCheck: obtained modification time %sCheck: obtained size %lluCheck: obtained size: %lliChecking %sChecking URL returned by SRM: %sChecking cache againChecking cache permissions: DN: %sChecking cache permissions: VO: %sChecking cache permissions: VOMS attr: %sChecking file %sChecking for existence of %sChecking for suspended endpoints which should be started.Checking replica %sChecking source file is presentChecksum %sChecksum mismatchChecksum mismatch between calcuated checksum %s and source checksum %sChecksum mismatch between calculated checksum %s and checksum reported by server %sChecksum mismatch between calculated checksum %s and source checksum %sChecksum mismatch between calculated/supplied checksum (%s) and checksum reported by SRM destination (%s)Checksum mismatch between checksum given as meta option (%s:%s) and calculated checksum (%s)Checksum not computedChecksum type of SRM (%s) and calculated/supplied checksum (%s) differ, cannot compareChecksum type of source and calculated checksum differ, cannot compareChecksum type returned by server is different to requested type, cannot compareChild exitedChild monitoring child %d exitedChild monitoring drops abandoned child %d (%d)Child monitoring error: %iChild monitoring internal communication errorChild monitoring kick detectedChild monitoring lost child %d (%d)Child monitoring signal detectedChild monitoring stderr is closedChild monitoring stdin is closedChild monitoring stdout is closedChild was already startedClass name: %sCleaning failedCleaning job %sCleaning job: %sCleaning up after failure: deleting %sClient chain does not have entry pointClient connection has no entry pointClient side MCCs are loadedClient version: nordugrid-arc-%sClosed connectionClosed successfullyClosing channel (list)Closing channel (retrieve)Closing channel (retrieve) due to local read error: %sClosing channel (store)Closing channel (store) due to error: %sClosing connectionClosing connection to SQLite accounting databaseClosing may have failedClosing read channelClosing write channelCollected error is: %sCollecting EMI-ES GLUE2 computing info endpoint information.Collecting Job (A-REX REST jobs) information.Command ABORCommand ALLO %iCommand CDUPCommand CWD %sCommand DCAU: %i '%s'Command DELE %sCommand EPRTCommand EPSV %sCommand ERET %sCommand LIST %sCommand MDTM %sCommand MKD %sCommand MLSD %sCommand MLST %sCommand MODE %cCommand NLST %sCommand NOOPCommand OPTSCommand OPTS RETRCommand PASVCommand PBZS: %sCommand PORTCommand PROT: %sCommand QUITCommand REST %sCommand RETR %sCommand RMD %sCommand SBUF: %iCommand SIZE %sCommand SPASCommand STOR %sCommand TYPE %cCommand USER %sCommand is being sentCommand: %sComponent %s(%s) could not be createdComponent has no ID attribute definedComponent has no name attribute definedComponent's %s(%s) next has no ID attribute definedComputing endpoint %s (type %s) added to the list for submission brokeringComputing service:Computing service: %sComputingShare (%s) does not match selected queue (%s)ComputingShare (%s) explicitly rejectedComputingShareName of ExecutionTarget (%s) is not definedConfig class is not an objectConfiguration (%s) loadedConfiguration errorConfiguration example file created (%s)Configuration file can not be readConfiguration file is broken - block name does not end with ]: %sConfiguration file is broken - block name is too short: %sConfiguration file not specifiedConfiguration file not specified in ConfigBlockConfiguration file to loadConfiguration root element is not Configuration section [userlist] is missing name.Connect: Authentication timed out after %d msConnect: Connecting timed out after %d msConnect: Failed authentication: %sConnect: Failed to connect: %sConnect: Failed to init auth info handle: %sConnect: Failed to init handle: %sConnecting to Delivery service at %sConnection from %s: %sContacting VOMS server (named %s): %s on port: %sContent: %sControl connection (probably) closedConversion failed: %sConversion mode is set to CREAMConversion mode is set to DIRECTConversion mode is set to EMIConversion mode is set to SUBJECTConverting to CREAM action - namespace: %s, operation: %sCopy failed: %sCopying with dlcloseCould not acquire lock on meta file %sCould not connect to service %s: %sCould not convert incoming payload!Could not convert payload!Could not convert the slcs attribute value (%s) to an URL instance in configuration file (%s)Could not create PayloadSOAP!Could not create link to lock file %s as it already existsCould not create lock file %s as it already existsCould not create temporary file "%s"Could not create temporary file: %sCould not determine configuration type or configuration is emptyCould not determine hostname from gethostname()Could not determine session directory from filename %sCould not determine version of serverCould not find any useable delivery service, forcing local transferCould not find loadable module by name %s (%s)Could not find loadable module by names %s and %s (%s)Could not find loadable module descriptor by name %sCould not find loadable module descriptor by name %s or kind %sCould not get checksum of %s: %sCould not handle checksum %s: skip checksum checkCould not handle endpoint %sCould not load configuration (%s)Could not locate module %s in following paths:Could not make new transfer request: %s: %sCould not obtain information about source: %sCould not open LDAP connection to %sCould not open file %s for reading: %sCould not read data staging configuration from %sCould not resolve original source of %s: %sCould not resolve original source of %s: out of timeCould not set LDAP network timeout (%s)Could not set LDAP protocol version (%s)Could not set LDAP timelimit (%s)Could not stat file %s: %sCould not validate message!Couldn't handle certificate: %sCouldn't parse benchmark XML: %sCouldn't verify availability of CRLCountry: %sCreated RSA key, proceeding with requestCreating a client to Argus PDP serviceCreating a delegation soap clientCreating a http clientCreating a pdpservice clientCreating a soap clientCreating an EMI ES clientCreating and sending job clean request to %sCreating and sending job information query request to %sCreating and sending job list request to %sCreating and sending job notify request to %sCreating and sending job restart request to %sCreating and sending job resume request to %sCreating and sending job submit request to %sCreating and sending job suspend request to %sCreating and sending notify request to %sCreating and sending requestCreating and sending service information query request to %sCreating and sending service information request to %sCreating buffer: %lli x %iCreating client interfaceCreating client side chainCreating delegation credential to ARC delegation serviceCreating delegation to CREAM delegation failedCreating delegation to CREAM delegation serviceCreating delegation to CREAM delegation service failedCreating directory %sCreating directory: %sCreating service side chainCredential expires at %sCredential handling exception: %sCredential is not initializedCredentials stored in temporary file %sCritical VOMS attribute processing failedCurrent jobs in system (PREPARING to FINISHING) per-DN (%i entries)Current transfer FAILED: %sCurrent transfer completeDB_OLD_VERSION: The database cannot be opened without being first upgraded.DCAU failedDCAU failed: %sDH parameters appliedDN %s doesn't match %sDN %s is cached and is valid until %s for URL %sDN %s is cached but has expired for URL %sDN is %sDTR %s cancelledDTR %s could not be cancelledDTR %s failed: %sDTR %s finished successfullyDTR %s finished with state %sDTR %s requested cancel but no active transferDTR %s still in progress (%lluB transferred)DTR %s was already cancelledDTR Generator processed: %d jobs to cancel, %d DTRs, %d new jobsDTR Generator waiting to process: %d jobs to cancel, %d DTRs, %d new jobsDTR is ready for transfer, moving to delivery queueDTRGenerator got request to cancel null jobDTRGenerator is asked about null jobDTRGenerator is asked to check files for null jobDTRGenerator is not running!DTRGenerator is queried about null jobDTRGenerator is requested to clean links for null jobDTRGenerator is requested to process null jobDTRGenerator is requested to remove null jobDTRGenerator was sent null jobDTRs still running for job %sDaemonization fork failed: %sData channel (retrieve) %i %i %iData channel (store) %i %i %iData channel connected (list)Data channel connected (retrieve)Data channel connected (store)Data channel: %d.%d.%d.%d:%dData channel: [%s]:%dData delivery loop exitedData transfer abortedData transfer aborted: %sData was already cachedDataDelivery log tail: %sDataDelivery: %sDataMove::Transfer: no checksum calculation for %sDataMove::Transfer: using supplied checksum %sDataMove::Transfer: using supplied checksum %s:%sDataMove::Transfer: will calculate %s checksumDataMover: cycleDataMover: destination out of tries - exitDataMover: no retries requested - exitDataMover: source out of tries - exitDataMover::Transfer : starting new threadDataMover::Transfer: trying to destroy/overwrite destination: %sDataPointGFAL::write_file got position %d and offset %d, has to seekDataPointXrootd::write_file got position %d and offset %d, has to seekDataStagingDelivery exited with code %iDeactivating modulesDefault CPU time: %sDefault INTERNAL client contructorDefault Storage Service: %sDefault broker (%s) is not available. When using %s a broker should be specified explicitly (-b option).Default wall-time: %sDefault: %sDelegateCredentialsInit failedDelegateProxy failedDelegated credential from delegation service: %sDelegated credential identity: %sDelegation ID: %sDelegation authorization failedDelegation authorization passedDelegation getProxyReq request failedDelegation handler is not configuredDelegation handler with delegatee role endsDelegation handler with delegatee role starts to processDelegation handler with delegator role starts to processDelegation putProxy request failedDelegation role not supported: %sDelegation service: %sDelegation to ARC delegation service failedDelegation to gridsite delegation service failedDelegation type not supported: %sDelegationStore: PeriodicCheckConsumers failed to remove old delegation %s - %sDelegationStore: PeriodicCheckConsumers failed to resume iteratorDelegationStore: TouchConsumer failed to create file %sDelete errorDeleted but still have locations at %sDelivery received new DTR %s with source: %s, destination: %sDelivery service at %s can copy from %sDelivery service at %s can copy to %sDestination URL missingDestination URL not supported: %sDestination URL not valid: %sDestination file is in cacheDestination is invalid URLDestination is not index service, skipping replica registrationDestination is not ready, will wait %u secondsDestination: %sDestroying handleDestructor with dlclose (%s)Dir %s allowed at service %sDirectory %s removed successfullyDirectory %s to store accounting database has been created.Directory listing failedDirectory of trusted CAs is not specified/found; Using current path as the CA direcrotyDirectory size is larger than %i files, will have to call multiple timesDirectory size is too large to list in one call, will have to call multiple timesDirectory: %sDisconnect: Abort timed out after %d msDisconnect: Closing timed out after %d msDisconnect: Data close timed out after %d msDisconnect: Failed aborting - ignoring: %sDisconnect: Failed closing - ignoring: %sDisconnect: Failed destroying handle: %s. Can't handle such situation.Disconnect: Failed quitting - ignoring: %sDisconnect: Quitting timed out after %d msDisconnect: globus handle is stuck.Disconnect: handle destroyed.Disconnect: waiting for globus handle to settleDo sorting using user created python brokerDoesn't support advance reservationsDoesn't support bulk SubmissionDoesn't support preemptionDoing CREAM requestDoing EMI requestDownloading job: %sDowntime ends: %sDowntime starts: %sDumping job description aborted: Unable to load broker %sDuplicate replica found in LFC: %sEACCES Error opening lock file %s: %sECDH parameters appliedEEXIST: DB_CREATE and DB_EXCL were specified and the database exists.EINVALEMI request generation failed: %sEMIES:CancelActivity: job %s - %sEMIES:CreateActivity finished successfullyEMIES:CreateActivity: max jobs total limit reachedEMIES:CreateActivity: no job description foundEMIES:CreateActivity: request = %sEMIES:CreateActivity: response = %sEMIES:CreateActivity: too many activity descriptionsEMIES:GetActivityInfo: job %s - failed to retrieve GLUE2 informationEMIES:GetActivityStatus: job %s - %sEMIES:NotifyService: job %s - %sEMIES:PauseActivity: job %s - %sEMIES:RestartActivity: job %s - %sEMIES:ResumeActivity: job %s - %sEMIES:WipeActivity: job %s - %sENOENT: The file or directory does not exist, Or a nonexistent re_source file was specified.EPSV failedEPSV failed: %sERROR: %sERROR: Dumping job description aborted because no suitable resources were found for the test-jobERROR: Failed to retrieve informationERROR: Failed to retrieve information from the following endpoints:ERROR: Failed to write job information to file (%s)ERROR: Job submission aborted because no resource returned any informationERROR: One or multiple job descriptions was not submitted.ERROR: Test aborted because no suitable resources were found for the test-jobERROR: Unable to load broker %sERROR: VOMS configuration file %s contains too long line(s). Max supported length is %i characters.ERROR: VOMS configuration file %s contains too many lines. Max supported number is %i.ERROR: VOMS configuration line contains too many tokens. Expecting 5 or 6. Line was: %sERROR: failed to read file %s while scanning VOMS configuration.ERROR: file tree is too deep while scanning VOMS configuration. Max allowed nesting is %i.ES:CreateActivity: Failed to create new job: %sEchoService (python) 'Process' calledEchoService (python) constructor calledEchoService (python) destructor calledEchoService (python) got: %s EchoService (python) has prefix %(prefix)s and suffix %(suffix)sEchoService (python) request_namespace: %sEchoService (python) thread test startingEchoService (python) thread test, iteration %(iteration)s %(status)sElement "%s" in the profile ignored: the "inidefaultvalue" attribute cannot be specified when the "inisections" and "initag" attributes have not been specified.Element "%s" in the profile ignored: the value of the "inisections" attribute cannot be the empty string.Element "%s" in the profile ignored: the value of the "initag" attribute cannot be the empty string.Element "%s" in the profile ignored: the value of the "initype" attribute cannot be the empty string.Element validation according to GLUE2 schema failed: %sEmpty filename returned from FileCacheEmpty input payload!Empty job description source stringEmpty payload!Empty stringEncrypted: %sEnd of comment not foundEnd of double quoted string not foundEnd of single quoted string not foundEnd of user delimiter (%s) quoted string not foundEndpoint Information:Entry in EGIIS is missing one or more of the attributes 'Mds-Service-type', 'Mds-Service-hn', 'Mds-Service-port' and/or 'Mds-Service-Ldap-suffix'Error accessing cache file %s: %sError adding communication interface in %s. Maybe another instance of A-REX is already running.Error adding communication interface in %s. Maybe permissions are not suitable.Error creating cacheError creating cache. Stale locks may remain.Error creating directory %s: %sError creating lock file %s: %sError creating required directories for %sError creating required dirs: %sError creating temporary file %s: %sError detected while parsing this ACError due to expiration of provided credentialsError during file validation. Can't stat file %s: %sError during file validation: Local file size %llu does not match source file size %llu for file %sError evaluating profileError from BDB: %sError from BDB: %s: %sError from SQLite: %sError from SQLite: %s: %sError getting info from statvfs for the path %s: %sError getting list of files (in list)Error in cache processing, will retry without cachingError in caching procedureError in lock file %s, even though linking did not return an errorError initialising X509 storeError initiating delegation database in %s. Maybe permissions are not suitable. Returned error is: %s.Error linking cache file to %s.Error linking tmp file %s to lock file %s: %sError listing lock file %s: %sError loading generated configurationError looking up attributes of cache meta file %s: %sError looking up space tokens matching description %sError number in store context: %iError opening accounting databaseError opening lock file %s in initial check: %sError opening meta file %sError opening meta file for writing %sError parsing the internally set executables attribute.Error pinging delivery service at %s: %s: %sError reading info from file %s:%sError reading lock file %s: %sError reading meta file %s: %sError registering replica, moving to end of data stagingError removing cache file %s: %sError switching uidError to flush output payloadError when extracting public key from requestError when loading the extension config file: %sError when loading the extension config file: %s on line: %dError while reading dir %s: %sError while reading fileError with cache configurationError with cache configuration: %sError with formatting in lock file %sError with hearbeatfile: %sError with post-transfer destination handling: %sError with source file, moving to next replicaError writing raw certificateError writing srm info file %sError writing to lock file %s: %sError: Service returned a limit higher or equal to current limit (current: %d; returned: %d)Error: can't open policy file: %sError: failed to set handler for SIGCHLDError: failed to set handler for SIGTERMError: no LDAP query started to %sError: policy location: %s is not a regular fileErrorDescriptionEstimated average waiting time: %sEstimated worst waiting time: %sEvaluator does not support loadable Combining AlgorithmsEvaluator does not support specified Combining Algorithm - %sEvaluator for ArcPDP was not loadedEvaluator for GACLPDP was not loadedEvaluator for XACMLPDP was not loadedExample configuration (%s) not created.Excepton while trying to start external process: %sExcessive data received while checking file accessExcluding replica %s matching pattern !%sExecution Target on Computing Service: %sExecution environment does not support inbound connectionsExecution environment does not support outbound connectionsExecution environment is a physical machineExecution environment is a virtual machineExecution environment supports inbound connectionsExecution environment supports outbound connectionsExecutionTarget class is not an objectExitingExiting Generator threadExiting jobs processing threadExpecting Command among argumentsExpecting Command and URL providedExpecting Command module name among argumentsExpecting Command module path among argumentsExpecting Module, Command and URL providedExpecting URL among argumentsExternal request for attention %sExtracted nickname %s from credentials to use for RUCIO_ACCOUNTExtractor[%s] (%s): %s = %sExtractor[%s] (%s): %s contains %sFATAL, ERROR, WARNING, INFO, VERBOSE or DEBUGFTP Job Control: Can't parse host and/or port in response to EPSV/PASV: %sFTP Job Control: Data channel: %d.%d.%d.%d:%dFTP Job Control: Data channel: [%s]:%dFTP Job Control: Data connect write failed: %sFTP Job Control: Data connect write timed out after %d msFTP Job Control: Data write failed: %sFTP Job Control: Data write timed out after %d msFTP Job Control: Failed sending DCAU commandFTP Job Control: Failed sending EPSV and PASV commandsFTP Job Control: Failed sending STOR command: %sFTP Job Control: Failed sending TYPE commandFTP Job Control: Failed to apply local address to data connection: %sFTP Job Control: Local port failed: %sFTP Job Control: Local type failed: %sFTP Job Control: Server EPSV response parsing failed: %sFTP Job Control: Server EPSV response port parsing failed: %sFTP Job Control: Server PASV response parsing failed: %sFaile to assign hostname extensionFailed allocating memory for handleFailed authenticatingFailed authenticating: %sFailed checking database (%s)Failed checking source replicaFailed checking source replica %s: %sFailed checking source replica: %sFailed cleaning up destination %sFailed configuration initializationFailed configuration initialization.Failed connecting to server %s:%dFailed destroying handle: %s. Can't handle such situation.Failed downloading %s to %sFailed downloading %s to %s, destination already existFailed downloading %s to %s, unable to remove existing destinationFailed in globus_cond_initFailed in globus_ftp_control_handle_initFailed in globus_mutex_initFailed linking cache file to %sFailed locating credentialsFailed looking up attributes of cached file: %sFailed preparing job descriptionFailed preparing job description to target resourcesFailed processing A-REX configurationFailed processing authorization group %sFailed processing user mapping command: %s %sFailed reading configurationFailed reading control directory: %sFailed reading control directory: %s: %sFailed reading dataFailed reading list of filesFailed reading local informationFailed retrieving information for job: %sFailed retrieving job description for job: %sFailed running mailerFailed sending CWD command for credentials renewalFailed sending CWD command for job cancellingFailed sending CWD command for job cleaningFailed sending DELE command for job cancellingFailed sending RMD command for job cleaningFailed setting file owner: %sFailed submitting job descriptionFailed to abort data connection - ignoring and recoveringFailed to abort transfer of ftp file: %sFailed to accept SSL connectionFailed to accept connection requestFailed to accept delegationFailed to accept new file/destinationFailed to access proxy of given job id %s at %sFailed to acquire A-REX's configurationFailed to acquire delegation contextFailed to acquire lock on cache meta file %sFailed to acquire lock on file %sFailed to acquire source: %sFailed to activate Jobs Processing object, exiting Grid Manager threadFailed to add '%s' URL (interface type %s) into the accounting database Endpoints tableFailed to add '%s' into the accounting database %s tableFailed to add Independent OIDFailed to add RFC proxy OIDFailed to add VOMS AC extension. Your proxy may be incomplete.Failed to add VOMS AC sequence OIDFailed to add anyLanguage OIDFailed to add certificate and keyFailed to add certificate to token or databaseFailed to add extension into credential extensionsFailed to add inheritAll OIDFailed to add issuer's extension into proxyFailed to add key usage extensionFailed to add proxy certificate information extensionFailed to add voms AC extensionFailed to allocate certificate trustFailed to allocate item for certificate dataFailed to allocate memory for bufferFailed to allocate memory for certificate subject while matching policy.Failed to allocate p12 contextFailed to apply DH parametersFailed to apply ECDH parametersFailed to apply local address to data connectionFailed to authenticate SAML Token inside the incoming SOAPFailed to authenticate Username Token inside the incoming SOAPFailed to authenticate X509 Token inside the incoming SOAPFailed to authenticate to PKCS11 slot %sFailed to authenticate to key databaseFailed to authenticate to token %sFailed to bind socket for %s:%s(%s): %sFailed to bind socket for TCP port %s(%s): %sFailed to bind socket(%s): %sFailed to bind to ldap server (%s)Failed to call PORT_NewArenaFailed to cancel transfer request: %sFailed to cancel: %sFailed to cancel: No SOAP responseFailed to cast PayloadSOAP from incoming payloadFailed to cast PayloadSOAP from outgoing payloadFailed to change mapping stack processing policy in: %s = %sFailed to change owner of symbolic link %s to %iFailed to change owner of temp proxy at %s to %i:%i: %sFailed to change permissions on %s: %sFailed to change permissions or set owner of hard link %s: %sFailed to check %sFailed to clean up file %s: %sFailed to close, deleting clientFailed to communicate to delegation endpoint.Failed to complete writing to destinationFailed to connect for credential renewalFailed to connect for job cancellingFailed to connect for job cleaningFailed to connect to %s(%s):%iFailed to connect to %s(%s):%i - %sFailed to connect to server %s:%dFailed to contact PDP server: %sFailed to convert ASCII to DERFailed to convert EVP_PKEY to PKCS8Failed to convert GSI credential to GSS credential (major: %d, minor: %d)Failed to convert GSI credential to GSS credential (major: %d, minor: %d):%s:%sFailed to convert PrivateKeyInfo to EVP_PKEYFailed to convert security information to ARC policyFailed to convert security information to ARC requestFailed to convert security information to XACML requestFailed to copy %s: %sFailed to copy file %s to %s: %sFailed to copy input file: %s to path: %sFailed to create DTR dump threadFailed to create OTokens security attributesFailed to create OpenSSL object %s %s - %u %sFailed to create SOAP containersFailed to create X509 certificate with NSSFailed to create any cache directories for %sFailed to create cache directory for file %s: %sFailed to create cache meta file %sFailed to create certificate requestFailed to create control directory %sFailed to create directoryFailed to create directory %sFailed to create directory %s! Skipping job.Failed to create directory %s: %sFailed to create export contextFailed to create file %s: %sFailed to create file in %sFailed to create hard link from %s to %s: %sFailed to create input SOAP containerFailed to create key or certificate safeFailed to create ldap bind thread (%s)Failed to create link: %s. Will not use mapped URLFailed to create path lengthFailed to create policy languageFailed to create session directory %sFailed to create socket for connecting to %s(%s):%d - %sFailed to create socket for listening at %s:%s(%s): %sFailed to create socket for listening at TCP port %s(%s): %sFailed to create socket(%s): %sFailed to create subject nameFailed to create symbolic link from %s to %s: %sFailed to create temp proxy at %s: %sFailed to create threadFailed to create xrootd copy job: %sFailed to create/open file %s: %sFailed to decode trust stringFailed to delegate credentials to server - %sFailed to delegate credentials to server - no delegation interface foundFailed to delete %sFailed to delete %s but will still try to copyFailed to delete certificateFailed to delete delivery object or deletion timed outFailed to delete destination, retry may failFailed to delete logical fileFailed to delete meta-informationFailed to delete physical fileFailed to delete private keyFailed to delete private key and certificateFailed to delete replica %s: %sFailed to delete stale cache file %s: %sFailed to disconnect after credentials renewalFailed to disconnect after job cancellingFailed to disconnect after job cleaningFailed to duplicate X509 structureFailed to duplicate extensionFailed to enable IPv6Failed to enable IPv6: %sFailed to encode PKCS12Failed to encode certificateFailed to encode the certificate request with DER formatFailed to establish SSL connectionFailed to establish connection: %sFailed to export X509 certificate from NSS DBFailed to export private keyFailed to extract VOMS nickname from proxyFailed to extract credential informationFailed to fetch data from %s accounting database tableFailed to fetch data from accounting database Endpoints tableFailed to finalize reading from sourceFailed to finalize writing to destinationFailed to find CA certificatesFailed to find certificate and/or private key or files have improper permissions or ownership.Failed to find certificates by nickname: %sFailed to find extensionFailed to find issuer certificate for proxy certificateFailed to find metadata info on %s for determining file or directory deleteFailed to generate EC keyFailed to generate SAML Token for outgoing SOAPFailed to generate Username Token for outgoing SOAPFailed to generate X509 Token for outgoing SOAPFailed to generate X509 request with NSSFailed to generate public/private key pairFailed to get DN information from .local file for job %sFailed to get TCP socket options for connection to %s(%s):%d - timeout won't work - %sFailed to get certificate from certificate fileFailed to get credentialFailed to get ftp fileFailed to get initiate GFAL2 parameter handle: %sFailed to get initiate new GFAL2 context: %sFailed to get load average: %sFailed to get private keyFailed to get public keyFailed to get public key from RSA objectFailed to get public key from X509 objectFailed to identify grid-manager config fileFailed to import X509 certificate into NSS DBFailed to import certificate from file: %sFailed to import private keyFailed to import private key from file: %sFailed to initialize LCASFailed to initialize LCMAPSFailed to initialize OpenSSL libraryFailed to initialize PKCS12 file: %sFailed to initialize X509 structureFailed to initialize accounting databaseFailed to initialize extensions member for CredentialFailed to initialize main Python threadFailed to initialize the credential configurationFailed to initiate cacheFailed to initiate client connectionFailed to initiate delegation credentialsFailed to insert AAR into the database for job %sFailed to limit socket to IPv6 at %s:%s - may cause errors for IPv4 at same portFailed to limit socket to IPv6 at TCP port %s - may cause errors for IPv4 at same portFailed to limit socket to IPv6: %sFailed to listen at %s:%s(%s): %sFailed to listen at TCP port %s(%s): %sFailed to listen on socket(%s): %sFailed to load client configurationFailed to load extension section: %sFailed to load grid-manager config fileFailed to load grid-manager config file from %sFailed to load grid-manager configfileFailed to load plugin for URL %sFailed to load policy evaluator for policy of job %sFailed to load private keyFailed to load service configurationFailed to load service configuration from any default config fileFailed to load service configuration from file %sFailed to load service side MCCsFailed to lock arccredential library in memoryFailed to lock arccrypto library in memoryFailed to lock delegated credentials: %sFailed to make symbolic link %s to %s : %sFailed to move %s to %s: %sFailed to move file %s to %sFailed to new arenaFailed to notify serviceFailed to obtain OpenSSL identifier for %sFailed to obtain bytes transferred: %sFailed to obtain delegation locks for cleaning orphaned locksFailed to obtain information about fileFailed to obtain listing from FTP: %sFailed to obtain local address for %s:%s - %sFailed to obtain local address for port %s - %sFailed to obtain local address: %sFailed to obtain lock on cache file %sFailed to obtain own address: %sFailed to obtain resource description: %sFailed to obtain stat from FTP: %sFailed to obtain state of jobFailed to obtain valid stagein URL for input filesFailed to open %s for reading: %sFailed to open %s, trying to create parent directoriesFailed to open data channelFailed to open directory %s: %sFailed to open file %sFailed to open file with DH parameters for readingFailed to open heartbeat file %sFailed to open input certificate file %sFailed to open log file %sFailed to open log file: %sFailed to open output file '%s'Failed to open p12 fileFailed to open stdio channel %dFailed to open stdio channel %sFailed to output the certificate request as ASCII formatFailed to output the certificate request as DER formatFailed to parse ACIX response: %sFailed to parse HTTP headerFailed to parse Rucio response: %sFailed to parse SAML Token from incoming SOAPFailed to parse Username Token from incoming SOAPFailed to parse VOMS command: %sFailed to parse X509 Token from incoming SOAPFailed to parse certificate request from CSR file %sFailed to parse command line optionsFailed to parse configuration file %sFailed to parse remote address %sFailed to parse requested VOMS lifetime: %sFailed to parse requested VOMS server port number: %sFailed to parse user policy for job %sFailed to postregister destination %sFailed to pre-clean destination: %sFailed to preallocate space for %sFailed to prepare destinationFailed to prepare destination: %sFailed to prepare job descriptionFailed to prepare job description to target resources.Failed to prepare job description.Failed to prepare sourceFailed to prepare source: %sFailed to preregister destination: %sFailed to process A-REX configuration in %sFailed to process VOMS configuration or no suitable configuration lines found.Failed to process configuration in %sFailed to process job: %sFailed to process job: %s - %s %sFailed to process jobs - failed to parse responseFailed to process jobs - wrong response: %uFailed to process security attributes in TLS MCC for incoming messageFailed to query AAR database ID for job %sFailed to query ACIX: %sFailed to query state: %sFailed to read attribute %x from private key.Failed to read cache meta file %sFailed to read certificate file: %sFailed to read data from input fileFailed to read database schema file at %sFailed to read file %sFailed to read file with DH parametersFailed to read input certificate fileFailed to read job's ACL for job %s from %sFailed to read job's local description for job %s from %sFailed to read object %s: %sFailed to read private key file: %sFailed to read proxy file: %sFailed to read request from a fileFailed to read request from a stringFailed to recognize own address type (IPv4 or IPv6) - %uFailed to register any bufferFailed to register destination replica: %sFailed to register new file/destination: %sFailed to register plugin for state %sFailed to release GSS credential (major: %d, minor: %d):%s:%sFailed to release completed requestFailed to release lock on cache file %sFailed to release lock on file %sFailed to remove .meta file %s: %sFailed to remove all physical instancesFailed to remove cache per-job dir %s: %sFailed to remove existing hard link at %s: %sFailed to remove existing symbolic link at %s: %sFailed to remove file %s: %sFailed to remove instanceFailed to remove lock on %s. Some manual intervention may be requiredFailed to remove stale lock file %s: %sFailed to remove temporary proxy %s: %sFailed to rename URLFailed to renew proxyFailed to resolve %sFailed to resolve %s (%s)Failed to resolve destination: %sFailed to resolve source: %sFailed to retrieve application data from OpenSSLFailed to retrieve link to TLS stream. Additional policy matching is skipped.Failed to retrieve private key for issuerFailed to run Grid Manager threadFailed to run command: %sFailed to run configuration parser at %s.Failed to run external pluginFailed to run external plugin: %sFailed to send cancel request: %sFailed to send content of bufferFailed to set GFAL2 monitor callback: %sFailed to set GFAL2 transfer timeout, will use default: %sFailed to set INTERNAL endpointFailed to set LFC replicas: %sFailed to set credentials for GridFTP transferFailed to set executable bit on file %sFailed to set executable bit on file %s: %sFailed to set overwrite option in GFAL2: %sFailed to set permissions on: %sFailed to set signature algorithm IDFailed to set the pubkey for X509 object by using pubkey from X509_REQFailed to set up credential delegation with %sFailed to shut down SSL: %sFailed to sign encoded certificate dataFailed to sign proxyFailed to sign the certificate requestFailed to sign the proxy certificateFailed to stage file(s)Failed to start archival threadFailed to start cache clean scriptFailed to start certificate extensionFailed to start data staging threadsFailed to start listening on any address for %s:%sFailed to start listening on any address for %s:%s(IPv%s)Failed to start new DTR for %sFailed to start new thread for monitoring job requestsFailed to start new thread: cache won't be cleanedFailed to start querying the endpoint on %sFailed to start querying the endpoint on %s (unable to create sub-thread)Failed to start reading from source: %sFailed to start thread for communicationFailed to start thread for listeningFailed to start timer thread - timeout won't workFailed to start transfer request: %sFailed to start writing to cacheFailed to start writing to destination: %sFailed to stat session dir %sFailed to stat source %sFailed to store application dataFailed to store ftp fileFailed to submit all jobs.Failed to submit all jobs: %sFailed to submit all jobs: %s %sFailed to submit all jobs: %u %sFailed to submit jobFailed to submit job description: %sFailed to submit job description: EMIESFault(%s , %s)Failed to submit job description: UnexpectedError(%s)Failed to switch user id to %d/%dFailed to terminate LCASFailed to terminate LCMAPSFailed to transfer dataFailed to unlock file %s: %s. Manual intervention may be requiredFailed to unlock file with lock %s: %sFailed to unregister pre-registered destination %s. You may need to unregister it manuallyFailed to unregister pre-registered destination %s: %s. You may need to unregister it manuallyFailed to unregister preregistered lfn, You may need to unregister it manuallyFailed to unregister preregistered lfn. You may need to unregister it manuallyFailed to unregister preregistered lfn. You may need to unregister it manually: %sFailed to update AAR in the database for job %sFailed to verify X509 Token inside the incoming SOAPFailed to verify the requestFailed to verify the signature under Failed to verify the signature under Failed to verify the signed certificateFailed to wait for job to allow stage inFailed to write 'local' informationFailed to write RTEs information for the job %sFailed to write authtoken attributes for job %sFailed to write body to output streamFailed to write data transfers information for the job %sFailed to write event records for job %sFailed to write header to output streamFailed to write job information to database (%s)Failed to write object %s: %sFailed to write request into a fileFailed to write request into stringFailed to write signed EEC certificate into a fileFailed to write signed proxy certificate into a fileFailed to write to local job list %sFailed updating timestamp on cache lock file %s for file %s: %sFailed uploading file %s to %s: %sFailed uploading local input filesFailed uploading local input files to %sFailed while finishing reading from sourceFailed while finishing writing to destinationFailed while reading from sourceFailed while transferring dataFailed while waiting for connection requestFailed while waiting for connection to %s(%s):%i - %sFailed while writing to destinationFailed writing ACLFailed writing job descriptionFailed writing local descriptionFailed writing statusFailure in parsing response from server - some information may be inaccurateFailure: %sFeature is not implementedFetch: response body: %sFetch: response code: %u %sFetching resource description from %sFile %s is NEARLINE, will make request to bring onlineFile %s is already cached at %s under a different URL: %s - this file will not be cachedFile %s is already cached at %s under a different URL: %s - will not add DN to cached listFile %s is cached (%s) - checking permissionsFile %s removed successfullyFile '%s' in the 'executables' attribute is not present in the 'inputfiles' attributeFile already exists: %sFile could not be moved to Done stateFile could not be moved to Running state: %sFile delete failed, attempting directory deleteFile delete failed, attempting directory delete for %sFile download failed: %sFile is cacheable, will check cacheFile is currently being cached, will wait %isFile is not accessible %s: %sFile is not accessible: %sFile is not cacheable, skipping cache processingFile is not cacheable, was requested not to be cached or no cache available, skipping cache checkFile is ready! TURL is %sFile is smaller than %llu bytes, will use local deliveryFile type is not available, attempting file deleteFileNode: operator= (%s <- %s) %lu <- %luFilePlugin: more unload than loadFilename not returned in Rucio response: %sFiles associated with request token %s aborted successfullyFiles associated with request token %s put done successfullyFiles associated with request token %s released successfullyFileset copy to single object is not supported yetFileset registration is not supported yetFinding existing destination replicasFinishWriting: looking for metadata: %sFinishWriting: obtained checksum: %sFinished successfullyFirst stage of registration to index service failedFirst value of 'inputfiles' attribute (filename) cannot be emptyFirst value of 'outputfiles' attribute (filename) cannot be emptyFor registration source must be ordinary URL and destination must be indexing serviceFor the 1st test job you also have to specify a runtime value with -r (--runtime) option.Force-checking source of cache file %sForcing re-download of file %sFork failed: %sFound %s %s (it was loaded already)Found %s in cacheFound DTR %s for file %s left in transferring state from previous runFound VOMS AC attribute: %sFound a registry, will query it recursively: %sFound existing token for %s in Rucio token cache with expiry time %sFound none or multiple URLs (%s) in ACIX URL: %sFound service endpoint %s (type %s)Found started or successful endpoint (%s)Found suspended endpoint (%s)Found the following jobs:Found the following new jobs:Found unexpected empty lock file %s. Must go back to acquire()Found unfinished DTR transfers. It is possible the previous A-REX process did not shut down normallyFree slots grouped according to time limits (limit: free slots):Free slots: %iFull string not used: %sFunction : %sGACL Auth. request: %sGET: id %s path %sGenerate new X509 request!Generated EMIES target: %sGenerating %s job description outputGenerating EMIES targetsGenerating ceID prefix from hostname automaticallyGenerator startedGeneric errorGet delegated credential from delegation service: %sGet from cache: Cached file is lockedGet from cache: Error in cache configurationGet from cache: File not in cacheGet from cache: Invalid URL %sGet from cache: Looking in cache for %sGet from cache: could not access cached file: %sGet request %s is still in queue, should wait %i secondsGet: there is no job %s - %sGetting currect timestamp for BLAH parser log: %sGetting delegation credential from ARC delegation serviceGlobus connection errorGlobus error: %sGlobus handle is stuckGlobus location variable substitution is not supported anymore. Please specify path directly.Grid identity is mapped to local identity '%s'HEAD: id %s path %sHER: %sHTTP Error: %d %sHTTP failure %u - %sHTTP with SAML2SSO invocation failedHTTP:PUT %s: put file %s: %sHandle is not in proper state %u/%uHave %i requests to processHave connections: %i, max: %iHead: there is no job %s - %sHealth state info: %sHealth state: %sHealthState of ExecutionTarget (%s) is not OK (%s)Help Options:Helper process start failed: %sHelper program is missingHomogeneous resourceID contains forbidden charactersID: %sINI config file %s does not existINTERNALClient is not initializedId= %s,Type= %s,Issuer= %s,Value= %sIdP return some error message: %sIdentity is %sIdentity name: %sIdentity: %sIf the proxy or certificate/key does exist, you can manually specify the locations via environment variables '%s'/'%s' or '%s', or the '%s'/'%s' or '%s' attributes in the client configuration file (e.g. '%s')If you specify a policy you also need to specify a policy languageIgnoring endpoint (%s), it is already registered in retriever.Ignoring job (%s), already tried and were unable to load JobControllerPluginIgnoring job (%s), the job management URL is unknownIgnoring job (%s), the job status URL is unknownIgnoring job (%s), the management interface name is unknownIgnoring job (%s), the status interface name is unknownIgnoring job (%s), unable to load JobControllerPlugin for %sIgnoring job, the job ID is emptyIllegal URL - closing ] for IPv6 address is followed by illegal token: %sIllegal URL - no closing ] for IPv6 address found: %sIllegal URL - no hostname given: %sIllegal URL - path must be absolute or empty: %sIllegal URL - path must be absolute: %sIllegal jobID specified (%s)Illegal time format: %sImmediate completion expectedImmediate completion expected: %sImmediate completion: %sImplementation name: %sImplementor: %sImproper debug level '%s'In the available CRL the lastUpdate field is not validIn the available CRL, the nextUpdate field is not validIn the configuration profile the 'initype' attribute on the "%s" element has a invalid value "%s".Incoming Message is not SOAPIncompatible options --nolist and --forcelist requestedInconsistent metadataIndependent proxy - no rights grantedInformation endpointInformation item '%s' is not knownInformational document is emptyInit failedInitialized %u-th Python serviceInitiating delegation procedureInput is not SOAPInput is without trailer Input request from a file: Request.xmlInput request from codeInput: metadata: %sInstalled application environments:Interface (%s) specified, submitting only to that interfaceInterface extensions:Interface on endpoint (%s) %s.Interface versions:Interface: %sInterfaceNameInternal transfer method is not supported for %sInvalid DTRInvalid DTR for source %s, destination %sInvalid EffectInvalid HTTP object can't produce resultInvalid ID: %sInvalid ISO duration format: %sInvalid JobDescription:Invalid JobDescription: %sInvalid URL '%s' for input file '%s'Invalid URL '%s' for output file '%s'Invalid URL option syntax in option '%s' for input file '%s'Invalid URL option syntax in option '%s' for output file '%s'Invalid URL option: %sInvalid URL: %sInvalid URL: '%s' in input file '%s'Invalid URL: '%s' in output file '%s'Invalid action value %sInvalid class nameInvalid class name. The broker argument for the PythonBroker should be Filename.Class.args (args is optional), for example SampleBroker.MyBrokerInvalid comparison operator '%s' used at 'delegationid' attribute, only "=" is allowed.Invalid comparison operator '%s' used at 'queue' attribute in 'GRIDMANAGER' dialect, only "=" is allowedInvalid comparison operator '%s' used at 'queue' attribute, only "!=" or "=" are allowed.Invalid configuration - no allowed IP address specifiedInvalid configuration - no transfer dirs specifiedInvalid credentials, please check proxy and/or CA certificatesInvalid destination URL %sInvalid download destination path specified (%s)Invalid job descriptionInvalid lock on file %sInvalid log level. Using default %s.Invalid nodeaccess value: %sInvalid old log level. Using default %s.Invalid period string: %sInvalid port number in %sInvalid stage out path specified (%s)Invalid url: %sIssuer CA: %sIssuer: %sIt is impossible to mix ARC6 target selection options with legacy options. All legacy options will be ignored!It is not possible to resubmit jobs without new target information discoveryJWSE::ExtractPublicKey: external jwk keyJWSE::ExtractPublicKey: fetching jwl key from %sJWSE::ExtractPublicKey: jwk keyJWSE::ExtractPublicKey: key parsing errorJWSE::ExtractPublicKey: no supported keyJWSE::ExtractPublicKey: x5c keyJWSE::Input: JWE: not supported yetJWSE::Input: JWS content: %sJWSE::Input: JWS: signature algorithm: %sJWSE::Input: JWS: signature verification failedJWSE::Input: JWS: token too oldJWSE::Input: JWS: token too youngJWSE::Input: header: %sJWSE::Input: token: %sJob %s does not report a resumable stateJob %s failed to renew delegation %s - %s.Job %s failed to renew delegation %s.Job %s has no delegation associated. Can't renew such job.Job %s not foundJob %s: Some downloads failedJob %s: all files downloaded successfullyJob %s: files still downloadingJob ID argument is required.Job cancelling successfulJob cleaning successfulJob database connection established successfully (%s)Job database created successfully (%s)Job deleted: %sJob description file could not be read.Job description language is not specified, unable to output description.Job description languages supported by %s:Job description to be sent to %s:Job description to be sent: %sJob description: %sJob descriptions:Job did not finished successfully. Message will not be written to BLAH log.Job download directory from user configuration file: %sJob download directory will be created in present working directory.Job download directory: %sJob failed on service sideJob has not started yet: %sJob information not found in the information system: %sJob list file (%s) doesn't existJob list file (%s) is not a regular fileJob list file cannot be created: %s is not a directoryJob list file cannot be created: The parent directory (%s) doesn't exist.Job migration aborted, no resource returned any informationJob migration aborted, unable to load broker (%s)Job migration failed for job (%s), no applicable targetsJob nr.Job plugin was not initialisedJob resubmission aborted because no resource returned any informationJob resubmission failed: Unable to load broker (%s)Job resubmission summary:Job resuming successfulJob submission summary:Job submission user can't be rootJob submission user: %s (%i:%i)Job submitted with jobid: %sJob timestamp successfully parsed as %sJob: %sJob: %s : Cancel request put and communicated to serviceJob: %s : Cancel request put but failed to communicate to serviceJob: %s : Clean request put and communicated to serviceJob: %s : Clean request put but failed to communicate to serviceJob: %s : ERROR : Failed to put cancel markJob: %s : ERROR : Failed to put clean markJob: %s : ERROR : No local information.Job: %s : ERROR : Unrecognizable stateJobControllerPlugin %s could not be createdJobControllerPlugin plugin "%s" not found.JobDescription class is not an objectJobDescriptionParserPlugin %s could not be createdJobDescriptionParserPlugin plugin "%s" not found.Jobs left to query: %dJobs missing information will not be cleaned!Jobs processed: %d, deleted: %dJobs processed: %d, renewed: %dJobs processed: %d, resumed: %dJobs processed: %d, successfully killed: %dJobs processed: %d, successfully killed: %d, successfully cleaned: %dJobs processed: %d, successfully retrieved: %dJobs processed: %d, successfully retrieved: %d, successfully cleaned: %dJunk at end of RSLJunk in sessiondir commandKill failedKilled with signal: Killing connection due to timeoutLCMAPS did not return any GIDLCMAPS did not return any UIDLCMAPS has getCredentialDataLCMAPS has lcmaps_runLCMAPS returned UID which has no username: %uLCMAPS returned invalid GID: %uLCMAPS returned invalid UID: %uLDAP connection already open to %sLDAP query timed out: %sLDAPQuery: Getting results from %sLDAPQuery: Initializing connection to %s:%dLDAPQuery: Querying %sLIST/MLST failedLIST/MLST failed: %sLanguage (%s) not recognized by any job description parsers.Last stage of registration to index service failedLatitude: %fLdap bind timeout (%s)Left operand for RSL concatenation does not evaluate to a literalLegacy options set for defining targetsLegacyMap: no configurations blocks definedLegacyPDP: ARC Legacy Sec Attribute not recognized.LegacyPDP: there is no %s Sec Attribute defined. Probably ARC Legacy Sec Handler is not configured or failed.LegacySecHandler: configuration file not specifiedLibrary : %sLine %d.%d of the attributes returned: %sLinking MCC %s(%s) to MCC (%s) under %sLinking MCC %s(%s) to Plexer (%s) under %sLinking MCC %s(%s) to Service (%s) under %sLinking Plexer %s to MCC (%s) under %sLinking Plexer %s to Plexer (%s) under %sLinking Plexer %s to Service (%s) under %sLinking local fileLinking mapped fileLinking/copying cached fileLinking/copying cached file to %sList failedList functionality is not supported for RESTful VOMS interfaceList functionality is not supported for legacy VOMS interfaceList will stat the URL %sListFiles: looking for metadata: %sListen failedListen finishedListen startedListening on %s:%s(%s)Listening on TCP port %s(%s)Listing jobs succeeded, %d jobs foundListing localjobs succeeded, %d localjobs foundLoadable module %s contains no requested plugin %s of kind %sLoaded %sLoaded %s %sLoaded JobControllerPlugin %sLoaded JobDescriptionParserPlugin %sLoaded MCC %s(%s)Loaded Plexer %sLoaded Service %s(%s)Loaded SubmitterPlugin %sLoading %u-th Python serviceLoading OToken failed - ignoring its presenceLoading Python broker (%i)Loading configuration (%s)Local group %s does not existLocal running jobs: %iLocal suspended jobs: %iLocal user %s does not existLocal waiting jobs: %iLocation URI for file %s is invalidLocation already existsLocations are missing in destination LFC URLLock %s is owned by a different host (%s)Lock file %s doesn't existLongitude: %fLooking for current jobsLooking up URL %sLooking up source replicasMCC %s(%s) - next %s(%s) has no targetMIME is not suitable for SOAP: %sMLSD is not supported - trying NLSTMLST is not supported - trying LISTMain Python thread is not initializedMain Python thread was not initializedMain memory size: %iMalformed ARCHERY record found (endpoint type is not defined): %sMalformed ARCHERY record found (endpoint url is not defined): %sMalformed VOMS AC attribute %sMapfile at %s can't be opened.Mapped to local group id: %iMapped to local group name: %sMapped to local id: %iMapped to running user: %sMapped user's home: %sMapped user:group (%s:%s) not foundMapping %s to %sMapping policy option has empty valueMapping policy:Mapping queue: %sMatch issuer: %sMatch vo: %sMatched nothingMatched: %s %s %sMatched: %s %s %s %sMatchmaking, %s (%d) is %s than %s (%d) published by the ExecutionTarget.Matchmaking, Benchmark %s is not published by the ExecutionTarget.Matchmaking, CacheTotal problem, ExecutionTarget: %d MB (CacheTotal); JobDescription: %d MB (CacheDiskSpace)Matchmaking, Computing endpoint requirement not satisfied. ExecutionTarget: %sMatchmaking, ConnectivityIn problem, ExecutionTarget: %s (ConnectivityIn) JobDescription: %s (InBound)Matchmaking, ConnectivityOut problem, ExecutionTarget: %s (ConnectivityOut) JobDescription: %s (OutBound)Matchmaking, ExecutionTarget: %s, OperatingSystem is not definedMatchmaking, ExecutionTarget: %s, CacheTotal is not definedMatchmaking, ExecutionTarget: %s, HealthState is not definedMatchmaking, ExecutionTarget: %s, ImplementationName is not definedMatchmaking, ExecutionTarget: %s, MaxDiskSpace and WorkingAreaFree are not definedMatchmaking, ExecutionTarget: %s, MaxTotalCPUTime or MaxCPUTime not defined, assuming no CPU time limitMatchmaking, ExecutionTarget: %s, MinCPUTime not defined, assuming no CPU time limitMatchmaking, ExecutionTarget: %s, NetworkInfo is not definedMatchmaking, ExecutionTarget: %s, Platform is not definedMatchmaking, ExecutionTarget: %s, RunTimeEnvironment requirements not satisfiedMatchmaking, ExecutionTarget: %s, TotalSlots and MaxSlotsPerJob are not definedMatchmaking, ExecutionTarget: %s, WorkingAreaLifeTime is not definedMatchmaking, ExecutionTarget: %s matches job descriptionMatchmaking, ExecutionTarget: %s, ApplicationEnvironments not definedMatchmaking, ExecutionTarget: %s, MaxMainMemory and MainMemorySize are not definedMatchmaking, ExecutionTarget: %s, MaxVirtualMemory is not definedMatchmaking, ExecutionTarget: %s, OperatingSystem requirements not satisfiedMatchmaking, MainMemorySize problem, ExecutionTarget: %d (MainMemorySize), JobDescription: %d (IndividualPhysicalMemory)Matchmaking, MaxCPUTime problem, ExecutionTarget: %d (MaxCPUTime), JobDescription: %d (TotalCPUTime/NumberOfSlots)Matchmaking, MaxDiskSpace problem, ExecutionTarget: %d MB (MaxDiskSpace); JobDescription: %d MB (DiskSpace)Matchmaking, MaxDiskSpace problem, ExecutionTarget: %d MB (MaxDiskSpace); JobDescription: %d MB (SessionDiskSpace)Matchmaking, MaxMainMemory problem, ExecutionTarget: %d (MaxMainMemory), JobDescription: %d (IndividualPhysicalMemory)Matchmaking, MaxSlotsPerJob problem, ExecutionTarget: %d (MaxSlotsPerJob) JobDescription: %d (NumberOfProcesses)Matchmaking, MaxTotalCPUTime problem, ExecutionTarget: %d (MaxTotalCPUTime), JobDescription: %d (TotalCPUTime)Matchmaking, MaxVirtualMemory problem, ExecutionTarget: %d (MaxVirtualMemory), JobDescription: %d (IndividualVirtualMemory)Matchmaking, MinCPUTime problem, ExecutionTarget: %d (MinCPUTime), JobDescription: %d (TotalCPUTime/NumberOfSlots)Matchmaking, NetworkInfo demand not fulfilled, ExecutionTarget do not support %s, specified in the JobDescription.Matchmaking, Platform problem, ExecutionTarget: %s (Platform) JobDescription: %s (Platform)Matchmaking, The %s scaled %s (%d) is %s than the %s (%d) published by the ExecutionTarget.Matchmaking, TotalSlots problem, ExecutionTarget: %d (TotalSlots) JobDescription: %d (NumberOfProcesses)Matchmaking, WorkingAreaFree problem, ExecutionTarget: %d MB (WorkingAreaFree); JobDescription: %d MB (DiskSpace)Matchmaking, WorkingAreaFree problem, ExecutionTarget: %d MB (WorkingAreaFree); JobDescription: %d MB (SessionDiskSpace)Matchmaking, WorkingAreaLifeTime problem, ExecutionTarget: %s (WorkingAreaLifeTime) JobDescription: %s (SessionLifeTime)Max CPU time: %sMax disk space: %iMax memory: %iMax pre-LRMS waiting jobs: %iMax running jobs: %iMax slots per job: %iMax stage in streams: %iMax stage out streams: %iMax total jobs: %iMax total wall-time: %sMax user running jobs: %iMax virtual memory: %iMax waiting jobs: %iMax wall-time: %sMaximum number of threads running - putting new request into queueMemory allocation errorMessage class is not an objectMessage sent to VOMS server %s is: %sMeta info of source and location do not match for %sMetadata of replica and index service differMetadata of source and destination are differentMetadata of source does not match existing destination. Use the --force option to override this.Min CPU time: %sMin wall-time: %sMissing CA subject in Globus signing policyMissing CertificatePath element or ProxyPath element, or is missingMissing Host in Connect elementMissing Port in Connect elementMissing Port in Listen elementMissing VO in configurationMissing argumentMissing audience in configurationMissing authentication informationMissing authgroup name in allowaccessMissing authgroup name in denyaccessMissing cancel-%s-job - job cancellation may not workMissing capabilities in configurationMissing condition subjects in Globus signing policyMissing data in DER encoded PROXY_CERT_INFO_EXTENSION extensionMissing directory in controldir commandMissing file name in [arex/jura] logfileMissing final reply: %sMissing group in configurationMissing information in reply: %sMissing issuer in configurationMissing name of LCAS libraryMissing name of LCMAPS libraryMissing number in maxjobsMissing option for command logreopenMissing or empty CertificatePath elementMissing or empty CertificatePath or CACertificatesDir elementMissing or empty CertificatePath or CACertificatesDir element; will only check the signature, will not do message authenticationMissing or empty KeyPath elementMissing or empty KeyPath element, or is missingMissing or empty PasswordSource elementMissing or empty Username elementMissing path of credentials fileMissing reference to factory and/or module. Currently safe unloading of LDAP DMC is not supported. Report to developers.Missing reference to factory and/or module. It is unsafe to use Globus in non-persistent mode - (Grid)FTP code is disabled. Report to developers.Missing reference to factory and/or module. It is unsafe to use Globus in non-persistent mode - SubmitterPlugin for GRIDFTPJOB is disabled. Report to developers.Missing reference to factory and/or module. It is unsafe to use Xrootd in non-persistent mode - Xrootd code is disabled. Report to developers.Missing response from delegation endpoint.Missing role in configurationMissing scan-%s-job - may miss when job finished executingMissing schema! Skipping validation...Missing scope in configurationMissing security object in messageMissing subject in configurationMissing subject nameMissing submit-%s-job - job submission to LRMS may not workModule %s contains no plugin %sModule %s contains no requested plugin %s of kind %sModule %s does not contain plugin(s) of specified kind(s)Module %s failed to reload (%s)Module %s is not an ARC plugin (%s)Module Manager InitModule Manager Init by ModuleManager::setCfgModule name: %sMount point %sMoving to end of data stagingMulti-request operator only allowed at top levelMultiple %s attributes in configuration file (%s)MyProxy failure: %sMyproxy server did not return proxy with VOMS AC includedNEW: put new job: max jobs total limit reachedNEW: put new job: there is no payloadNLST/MLSD failedNLST/MLSD failed: %sNSS database to be accessed: %s NSS initialization failed on certificate database: %sNULL BIO passed to InquireRequestNULL callback for %sName of grami fileName: %sNegative rights are not supported in Globus signing policyNeither source nor destination are index services, will skip resolving replicasNeither source nor destination were staged, skipping releasing requestsNetwork information:New connectionNew endpoint is created (%s) from the one with the unspecified interface (%s)New job accepted with id %sNew limit for vector queries returned by EMI ES service: %dNew proxy expires at %sNew proxy expiry time is not later than old proxy, not renewing proxyNo A-REX config file found in candypond configurationNo Attribute exists, which can deal with type: %sNo Connect element specifiedNo FQAN found. Using None as userFQAN valueNo HTTP response from VOMS serverNo LRMS set in configurationNo RSL content in job description foundNo SOAP responseNo SOAP response from Delivery service %sNo SOAP response from delivery serviceNo active DTR %sNo active job id %sNo arguments are assigned for external processNo authorization response was returnedNo cache directory specifiedNo cachedirs found/configured for calculation of free space.No caches defined in configurationNo callback for %s definedNo checksum information from serverNo checksum information possibleNo checksum information returned in Rucio response for %sNo checksum verification possibleNo configuration file could be loaded.No control directory set in configurationNo control or session directories defined in configurationNo credentials suppliedNo delegated credentials were passedNo delegation policies in this context and message - passing throughNo delegation token in requestNo delivery endpoints available, will try laterNo destination definedNo draining cache directory specifiedNo errorNo files to retrieve for job %sNo filesize information returned in Rucio response for %sNo group %i for mapped userNo job ID suppliedNo job description file name provided.No job description input specifiedNo job description parser was able to interpret job descriptionNo job description parsers availableNo job description parsers suitable for handling '%s' language are availableNo jobdescription resulted at %d testNo jobsNo jobs found, try laterNo jobs givenNo jobs to resubmit with the specified statusNo left operand for concatenation operatorNo listening ports initiatedNo local account name specifiedNo local user mapping foundNo locations defined for %sNo locations for %sNo locations for destination different from source foundNo locations for destination different from source found: %sNo locations for destination found: %sNo locations for source found: %sNo locations found - probably no more physical instancesNo locations found for %sNo locations left for %sNo match found in cache access rules for %sNo more %s replicasNo more interfaces to try for endpoint %s.No more replicas, will use %sNo need to stage source or destination, skipping stagingNo new informational document assignedNo next MCC or Service at path "%s"No next element in the chainNo non-draining session directories availableNo non-draining session dirs availableNo overwrite requested or allowed, skipping pre-cleaningNo pfns returned in Rucio response: %sNo physical files found for destinationNo physical files found for sourceNo pid file is found at '%s'. Probably A-REX is not running.No plugin is configured or authorised for requested path %sNo policy file or DNs specified for simplelist.pdp, please set location attribute or at least one DN element for simplelist PDP node in configuration.No port succeeded for %sNo private key with nickname %s exist in NSS databaseNo proxy foundNo proxy providedNo queue name given in queue block nameNo read-only cache directory specifiedNo remote delivery services are useable, forcing local deliveryNo replicas found for %sNo request token specified!No request tokens foundNo requested security information was collectedNo response from %sNo response from AA service %sNo response returned: %sNo results returned from statNo right operand for concatenation operatorNo security processing/check requested for '%s'No server config part of config fileNo services specified. Please configure default services in the client configuration, or specify a cluster or index (-c or -g options, see arcsync -h).No session directories found in configuration.No session directory foundNo session directory set in configurationNo source definedNo space token specifiedNo space tokens found matching description %sNo stagein URL is providedNo stream response from VOMS serverNo such DTR %sNo such file or directoryNo such group: %sNo such user: %sNo target available inside the policyNo target available inside the ruleNo test-job with ID %d found.No test-job, with ID "%d"No usable cachesNo user certificate by nickname %s foundNo user-certificate foundNo username suppliedNo valid caches found in configuration, caching is disabledNo valid credentials found, exitingNo valid handles left for listeningNo valid job identifier returned by EMI ESNo valid location availableNo valid response from VOMS server: %sNo value provided for Subject Attribute %s skippedNon-homogeneous resourceNone of the requested transfer protocols are supportedNot authorizedNot authorized according to request: %sNot authorized by arc.pdp - failed to get response from EvaluatorNot authorized by arc.pdp - some of the RequestItem elements do not satisfy PolicyNot authorized from simplelist.pdp: %sNot enough parameters in copyurlNot enough parameters in linkurlNot enough space to store fileNot found %s in cacheNot getting checksum of zip constituentNot listening to anythingNot using delivery service %s due to previous failureNot using delivery service at %s because it is fullNot valid destinationNot valid sourceNothing to do: you have to either specify a test job id with -J (--job) or query information about the certificates with -E (--certificate) Notify failedNow copying (from -> to)Number %d is with nickname: %s%sNumber %d is: %sNumber of ComputingService elements obtained from full document and XPath query do not match: %d != %dNumber of OpenSSL locks changed - reinitializingNumbers of sources and destinations do not matchOPTION...OS family: %sOS name: %sOS version: %sOTokens: Attr: %s = %sOTokens: Attr: messageOTokens: Attr: token: %sOTokens: Attr: token: bearer: %sOTokens: HandleOTokens: Handle: attributes created: subject = %sOTokens: Handle: messageObject is not suitable for listingObject not initialized (internal error)Obtained XML: %sObtained host and address are not acceptableObtaining information failedObtaining status failedOnly POST is supported in CandyPondOnly POST is supported in DataDeliveryServiceOnly Raw Buffer payload is supported for outputOnly globus rights are supported in Globus signing policy - %s is not supportedOnly signing rights are supported in Globus signing policy - %s is not supportedOnly standard input is currently supported for password source.Only user '.' for helper program is supportedOpenSSL error -- %sOpenSSL error string: %sOperating System errorOperation cancelled successfullyOperation completed successfullyOperation not supported for this kind of URLOperation on path "%s"OptimizedInformationContainer created temporary file: %sOptimizedInformationContainer failed to create temporary fileOptimizedInformationContainer failed to parse XMLOptimizedInformationContainer failed to rename temprary fileOptimizedInformationContainer failed to store XML document to temporary fileOption: %sOptions 'p' and 'n' can't be used simultaneouslyOptions Group %s:Options for plugin are missingOriginal job description is listed below:Orphan delegation lock detected (%s) - cleaningOther actionsOut of memory when generate random serialOut of retriesOut of tries while allocating new job IDOut of tries while allocating new job ID in %sOutgoing Message is not SOAPOutput EEC certificateOutput format modifiersOutput the proxy certificateOverwrite requested - will pre-clean destinationOwner: %sPASV failedPASV failed: %sPDP: %s (%s)PDP: %s (%s) can not be loadedPDP: %s can not be loadedPDP: missing name attributePDPD location is missingPDPD location: %sPEM_read_bio_X509_REQ failedPEM_write_bio_X509_REQ failedPEPD location is missingPEPD location: %sPKCS12 add password integrity failedPKCS12 output password not providedPOST request on special path is not supportedParsed domains: %uParser Context creation failed!Parser failed with error code %i.Parsing .local file to obtain job-specific identifiers and infoParsing VOMS AC to get FQANs informationPassword encoding type not supported: %sPath %s is invalid, creating required directoriesPath to .local job status file is required.Path to user's proxy file should be specified.Peer name: %sPer-job POST/SOAP requests are not supportedPerforming /* queryPerforming /ComputingService queryPerforming /Services/ComputingService queryPerforming matchmaking against target (%s).Performs neither sorting nor matchingPermanent failurePermanent service errorPermission checking failed, will try downloading without using cachePermission checking failed: %sPermission checking on original URL failed: %sPermission checking passedPermission checking passed for url %sPicking up left jobsPlace: %sPlatform: %sPlease choose the NSS database you would like to use (1-%d): Please choose the one you would use (1-%d): Plexer (%s) - next %s(%s) has no targetPlexer's (%s) next has no ID attribute definedPlugin %s error: %sPlugin %s error: %uPlugin %s failed to runPlugin %s failed to startPlugin %s for access point %s acquire failed (should never happen).Plugin %s for access point %s is broken.Plugin %s printed: %sPlugin %s printed: %uPlugin %s returned no mappingPlugin %s returned no usernamePlugin %s returned too much: %sPlugin %s returned: %uPlugin %s timeout after %u secondsPlugin (user mapping) command is emptyPlugin (user mapping) timeout is not a number: %sPlugin (user mapping) timeout is wrong number: %sPlugin response: %sPolicy Decision Service invocation failedPolicy is emptyPolicy is not gaclPolicy line: %sPolicy subject: %sPolicyId: %s Alg inside this policy is:-- %sPostal code: %sPre-LRMS waiting jobs: %iPre-clean failed, will still try to copyPre-registering destinationPre-registering destination in index servicePreparing to stage destinationPreparing to stage sourceProblem accessing cache file %s: %sProblem creating dtr (source %s, destination %s)Problem loading plugin %s, skipping it.Problem with index service, will proceed to end of data stagingProblem with index service, will release cache lockProcessing a %s requestProcessing thread timed out. Restarting DTRProcessing type not supported: %sProcessingStartTime (%s) specified in job description is inside the targets downtime period [ %s - %s ].Protocol plugins available:Protocol(s) not supported - please check that the relevant gfal2 plugins are installed (gfal2-plugin-* packages)Proxy certificate information:Proxy expiredProxy expired. Job submission aborted. Please run 'arcproxy'!Proxy generation failed: Certificate has expired.Proxy generation failed: Certificate is not valid yet.Proxy generation failed: Failed to create temporary file.Proxy generation failed: Failed to retrieve VOMS information.Proxy generation failed: No valid certificate found.Proxy generation failed: No valid private key found.Proxy generation succeededProxy has expiredProxy key length: %iProxy path: %sProxy signature: %sProxy stored at %sProxy subject: %sProxy type: %sProxy with ARC PolicyProxy with all rights inheritedProxy with empty policy - fail on unrecognized policyProxy with specific policy: %sProxy with unknown policy - fail on unrecognized policyProxy-subject: %sProxy/credentials stored at %sProxy: %sPut request %s is still in queue, should wait %i secondsPython Wrapper constructor succeededPython Wrapper destructor (%d)Python broker constructor called (%d)Python broker destructor called (%d)Python interpreter lockedPython interpreter releasedPython wrapper process calledPythonBroker initQuality level: %sQuery returned no elements.Query returned unexpected element: %s:%sQuerying ACIX server at %sQuerying WSRF GLUE2 computing REST endpoint.Querying batch with %d jobsQuerying source replicas in bulkQuerying status of staging requestQueue information:REST: process %s at %sREST:CLEAN job %s - %sREST:GET job %s - %sREST:KILL job %s - %sREST:PUT job %s: file %s: there is no payloadREST:RESTART job %s - %sRESTful and old VOMS communication protocols can't be requested simultaneously.RSA_generate_key_ex failedRSL substitution is not a sequenceRSL substitution sequence is not of length 2RSL substitution variable name does not evaluate to a literalRSL substitution variable value does not evaluate to a literalRandom sortingRaw command: %sRe-creating an EMI ES clientRead %i bytesRead access check failedRead access not allowed for %s: %sRead commands in authenticate failedRead request from a fileRead request from a stringReading %u bytes from byte %lluReal transfer from %s to %sReason : %sReceived DTR %s back from scheduler in state %sReceived DTR %s during Generator shutdown - may not be processedReceived invalid DTRReceived message out-of-band (not critical, ERROR level is just for debugging purposes)Received no DTRReceived retry for DTR %s still in transferReconnectingRecord about new job successfully added to the database (%s)Redirecting to %sRedirecting to new URL: %sRefusing connection: Connection limit exceededRegistering destination replicaRegistering directory: %s with plugin: %sRegistering dummy directory: %sRegistration of Globus FTP buffer failed - cancel checkRelation operator expectedReleasing destinationReleasing request(s) made during stagingReleasing requestsReleasing sourceRemapped to local group id: %iRemapped to local group name: %sRemapped to local id: %iRemapped to local user: %sRemapped user's home: %sRemove: deleting: %sRemoving %sRemoving endpoint %s: It has an unrequested interface (%s).Removing logical file from metadata %sRemoving metadata in %sRemoving pre-registered destination in index serviceRename: globus_ftp_client_move failedRename: timeout waiting for operation to completeRenaming %s to %sRenewal of credentials was successfulRenewing credentials for job: %sRenewing proxy for job %sReplacing DTR %s in state %s with new requestReplacing existing token for %s in Rucio token cacheReplacing old SRM info with new for URL %sReplacing queue '%s' with '%s'Replica %s doesn't match preferred pattern or URL mapReplica %s has high latency, but no more sources exist so will use this oneReplica %s has high latency, trying next sourceReplica %s has long latency, trying next replicaReplica %s is mappedReplica %s matches host pattern %sReplica %s matches pattern %sRequest failedRequest failed: No response from IdPRequest failed: No response from IdP when doing authenticationRequest failed: No response from IdP when doing redirectingRequest failed: No response from SP Service when sending SAML assertion to SPRequest failed: No response from SPServiceRequest failed: response from IdP is not as expected when doing authenticationRequest failed: response from IdP is not as expected when doing redirectingRequest failed: response from SP Service is not as expected when sending SAML assertion to SPRequest failed: response from SPService is not as expectedRequest is emptyRequest is not supported - %sRequest is reported as ABORTED, but all files are doneRequest is reported as ABORTED, since it was cancelledRequest is reported as ABORTED. Reason: %sRequest succeed!!!Request timed outRequest to open file with storing in progressRequest to push to unknown owner - %uRequest: %sRequested slots: %iRequested to skip resource discovery. Will try direct submission to %s and %s submission endpoint typesRequesting ComputingService elements of resource description at %sRequesting recursion and --nolist has no senseRequesting to stop job processingRequirement "%s %s" NOT satisfied.Requirement "%s %s" satisfied by "%s".Requirement "%s %s" satisfied.Reservation policy: %sResolving destination replicasResolving of index service for destination failedResolving of index service for source failedResolving source replicas in bulkResource description contains unexpected element: %s:%sResource description is emptyResource description provides URL for interface %s: %sResource description provides no URLs for interfacesResource description query validation passedResource description validation according to GLUE2 schema failed: Resource description validation passedResource information provider failed to runResource information provider failed to startResource information provider failed with exit status: %i %sResource information provider log: %sResource information provider: %sResource manager: %sResource query failedResponse is not SOAPResponse is not XMLResponse sending errorResponse: %sResponse: %sRestarting after segmentation violation.Resubmission of job (%s) succeeded, but cleaning the job failed - it will still appear in the job listResubmission of job (%s) succeeded, but killing the job failed - it will still appear in the job listResult value (0=Permit, 1=Deny, 2=Indeterminate, 3=Not_Applicable): %dResults stored at: %sResuming job: %s at state: %s (%s)Retrieving file %sRetrieving job description of EMI ES jobs is not supportedRetrieving job description of INTERNAL jobs is not supportedReturned message from VOMS server %s is: %s Returned message from VOMS server: %sReturning to generatorReusing connectionRight operand for RSL concatenation does not evaluate to a literalRucio returned %sRucio token for %s has expired or is about to expireRule: %sRule: audience: %sRule: capabilities: %sRule: group: %sRule: issuer: %sRule: role: %sRule: scope: %sRule: subject: %sRule: vo: %sRunning command: %sRunning jobs: %iRunning mailer command (%s)Running user has no nameSAML Token handler is not configuredSAML2SSO process failedSASL InteractionSOAP Request to AA service %s failedSOAP fault from delivery service at %s: %sSOAP fault: %sSOAP invocation failedSOAP operation is not supported: %sSOAP request: %sSOAP response: %sSOAP with SAML2SSO invocation failedSQL statement used: %sSQLite database error: %sSRM Client status: %sSRM did not return any informationSRM did not return any useful informationSRM returned no useful Transfer URLs: %sSSHFS mount point of cache directory (%s) is broken - waiting for reconnect ...SSHFS mount point of runtime directory (%s) is broken - waiting for reconnect ...SSHFS mount point of session directory (%s) is broken - waiting for reconnect ...SSL error: %d - %s:%s:%sSSL error: %s, libs: %s, func: %s, reason: %sSSL locks not initializedScheduler configuration:Scheduler received NULL DTRScheduler received invalid DTRScheduler received new DTR %s with source: %s, destination: %s, assigned to transfer share %s with priority %dScheduler starting upScheduler stopped, exitingScheduling policy: %sSchema validation errorScheme: %sSecHandler configuration is not definedSecHandler has no configurationSecHandler has no name attribute definedSecHandler: %s(%s)Security Handler %s(%s) could not be createdSecurity Handlers processing failedSecurity Handlers processing failed: %sSecurity check failed for incoming TLS messageSecurity check failed for outgoing TLS messageSecurity check failed in SOAP MCC for incoming messageSecurity check failed in SOAP MCC for incoming message: %sSecurity check failed in SOAP MCC for outgoing messageSecurity check failed in SOAP MCC for outgoing message: %sSecurity check failed in TLS MCC for incoming messageSecurity processing/check failed: %sSecurity processing/check for '%s' failed: %sSecurity processing/check for '%s' passedSecurity processing/check passedSelect failed: %sSelf-signed certificateSend response failed: %sSendCommand: Command: %sSendCommand: Failed: %sSendCommand: Response: %sSendCommand: Timed out after %d msServer SRM version: %sServer implementation: %sServer stoppedService %s(%s) could not be createdService Loop: Endpoint %sService endpoint %s (type %s) added to the list for direct submissionService endpoint %s (type %s) added to the list for resource discoveryService has no ID attribute definedService has no Name attribute definedService information:Service is waiting for requestsService side MCCs are loadedServing state: %sSession dir %s is owned by %i, but current mapped user is %iSession dir '%s' contains user specific substitutions - skipping itSession directory to useSession root directory is missingSessiondir %s: Free space %f GBSetting connections limit to %i, connections over limit will be %sSetting pbsz to %luSetting status (%s) for endpoint: %sSetting status (STARTED) for endpoint: %sSetting subject name!Setting userRequestDescription to %sShare Information:Should wait for destination to be preparedShould wait for source to be preparedShow %s help optionsShow help optionsShutdown daemonShutting down data delivery serviceShutting down data staging threadsShutting down schedulerSimpleMap: %sSimpleMap: acquired new unmap time of %u secondsSimpleMap: wrong number in unmaptime commandSkipping %s replica %sSkipping ComputingEndpoint '%s', because it has '%s' interface instead of the requested '%s'.Skipping invalid URL option %sSkipping policyAuthority VOMS AC attributeSkipping replica on local host %sSkipping retrieved job (%s) because it was submitted via another interface (%s).Skipping service: no SchemaPath found!Skipping service: no ServicePath found!Socket conversion failed: %sSockets do not match on exit %i != %iSome addresses failed. Listening on %u of %u.Some transfers failedSorting according to free slots in queueSorting according to input data availability at targetSorting according to specified benchmark (default "specint2000")Sorting replicas according to URL mapSorting replicas according to preferred pattern %sSource URL missingSource URL not supported: %sSource URL not valid: %sSource and/or destination is index service, will resolve replicasSource check requested but failed: %sSource is invalid URLSource is mapped to %sSource is not ready, will wait %u secondsSource is the same as destinationSource modification date: %sSource or destination requires stagingSource: %sSpecified module not found in cacheSpecified overlay file (%s) does not exist.Staging jobs: %iStaging request timed out, will release requestStaging: %sStart foregroundStart testStart waiting 10 sec...StartReadingStartReading: File was not prepared properlyStartWritingStartWriting: File was not prepared properlyStarted remote Delivery at %sStarting DTR threadsStarting controlled processStarting data staging threadsStarting helper process: %sStarting jobs processing threadStarting jobs' monitoringStarting new DTR for %sStarting querying of suspended endpoint (%s) - no other endpoints for this service is being queried or has been queried successfully.Starting sub-thread to query the endpoint on %sStarting thread to query the endpoint on %sStat: obtained modification time %sStat: obtained size %lluState name for plugin is missingStatus for service endpoint "%s" is set to inactive in ARCHERY. Skipping.Status of %d jobs was queried, %d jobs returned informationStopReading finished waiting for transfer_condition.StopReading starts waiting for transfer_condition.StopReading: aborting connectionStopWriting finished waiting for transfer_condition.StopWriting starts waiting for transfer_condition.StopWriting: Calculated checksum %sStopWriting: aborting connectionStopWriting: looking for checksum of %sStopped job processingStopping helper process %sStopping jobs processing threadStopping serverStoring file %sStoring port %i for %sStoring temp proxy at %sString successfully parsed as %s.Subject Attribute %s has no known NID, skippedSubject does not start with '/'Subject name: %sSubject of request is nullSubject to match: %sSubject: %sSubmission endpointSubmission failedSubmit: Failed sending CWD commandSubmit: Failed sending CWD new commandSubmit: Failed sending job descriptionSubmit: Failed to connectSubmit: Failed uploading local input filesSubmit: service has no suitable information interface - need org.nordugrid.ldapngSubmitterPlugin %s could not be createdSubmitterPlugin plugin "%s" not found.Submitting job Submitting test-job %d:Succeeded to add Independent OID, tag %d is returnedSucceeded to add RFC proxy OID, tag %d is returnedSucceeded to add VOMS AC sequence OID, tag %d is returnedSucceeded to add anyLanguage OID, tag %d is returnedSucceeded to add inheritAll OID, tag %d is returnedSucceeded to authenticate SAMLTokenSucceeded to authenticate UsernameTokenSucceeded to authenticate X509TokenSucceeded to change password on MyProxy serverSucceeded to change trusts to: %sSucceeded to convert PrivateKeyInfo to EVP_PKEYSucceeded to destroy credential on MyProxy serverSucceeded to export PKCS12Succeeded to generate public/private key pairSucceeded to get a proxy in %s from MyProxy server %sSucceeded to get credentialSucceeded to get info from MyProxy serverSucceeded to import certificateSucceeded to import private keySucceeded to initialize NSSSucceeded to load PrivateKeyInfoSucceeded to output certificate to %sSucceeded to output the certificate request into %sSucceeded to put a proxy onto MyProxy serverSucceeded to send DelegationService: %s and DelegationID: %s info to peer serviceSucceeded to sign the proxy certificateSucceeded to verify the signature under Succeeded to verify the signature under Succeeded to verify the signed certificateSupplied username %s does not match mapped username %sSupported Profiles:Supported constraints are: validityStart=time (e.g. 2008-05-29T10:20:30Z; if not specified, start from now) validityEnd=time validityPeriod=time (e.g. 43200 or 12h or 12H; if both validityPeriod and validityEnd not specified, the default is 12 hours for local proxy, and 168 hours for delegated proxy on myproxy server) vomsACvalidityPeriod=time (e.g. 43200 or 12h or 12H; if not specified, the default is the minimum value of 12 hours and validityPeriod) myproxyvalidityPeriod=time (lifetime of proxies delegated by myproxy server, e.g. 43200 or 12h or 12H; if not specified, the default is the minimum value of 12 hours and validityPeriod (which is lifetime of the delegated proxy on myproxy server)) proxyPolicy=policy content proxyPolicyFile=policy file keybits=number - length of the key to generate. Default is 2048 bits. Special value 'inherit' is to use key length of signing certificate. signingAlgorithm=name - signing algorithm to use for signing public key of proxy. Possible values are sha1, sha2 (alias for sha256), sha224, sha256, sha384, sha512 and inherit (use algorithm of signing certificate). Default is inherit. With old systems, only sha1 is acceptable. Supported information item names are: subject - subject name of proxy certificate. identity - identity subject name of proxy certificate. issuer - issuer subject name of proxy certificate. ca - subject name of CA which issued initial certificate. path - file system path to file containing proxy. type - type of proxy certificate. validityStart - timestamp when proxy validity starts. validityEnd - timestamp when proxy validity ends. validityPeriod - duration of proxy validity in seconds. validityLeft - duration of proxy validity left in seconds. vomsVO - VO name represented by VOMS attribute vomsSubject - subject of certificate for which VOMS attribute is issued vomsIssuer - subject of service which issued VOMS certificate vomsACvalidityStart - timestamp when VOMS attribute validity starts. vomsACvalidityEnd - timestamp when VOMS attribute validity ends. vomsACvalidityPeriod - duration of VOMS attribute validity in seconds. vomsACvalidityLeft - duration of VOMS attribute validity left in seconds. proxyPolicy keybits - size of proxy certificate key in bits. signingAlgorithm - algorithm used to sign proxy certificate. Items are printed in requested order and are separated by newline. If item has multiple values they are printed in same line separated by |. Supported password destinations are: key - for reading private key myproxy - for accessing credentials at MyProxy service myproxynew - for creating credentials at MyProxy service all - for any purspose. Supported password sources are: quoted string ("password") - explicitly specified password int - interactively request password from console stdin - read password from standard input delimited by newline file:filename - read password from file named filename stream:# - read password from input stream number #. Currently only 0 (standard input) is supported. Supports advance reservationsSupports bulk submissionSupports preemptionSuspended jobs: %iSuspending querying of endpoint (%s) since the service at the endpoint is already being queried, or has been queried.Synchronizing the local list of active jobs with the information in the information system can result in some inconsistencies. Very recently submitted jobs might not yet be present, whereas jobs very recently scheduled for deletion can still be present.Syntax error in 'notify' attribute value ('%s'), it contains unknown state flagsSyntax error in 'notify' attribute value ('%s'), it must contain an email addressSyntax error in 'notify' attribute value ('%s'), it must only contain email addresses after state flag(s)System configuration file (%s or %s) does not exist.System configuration file (%s) contains errors.System configuration file (%s) does not exist.TCP client process calledTCP executor is removedTLS provides no identity, going for OTokensTURL %s cannot be handledTarget %s does not match requested interface(s).Target %s removed by FastestQueueBroker, doesn't report number of free slotsTarget %s removed by FastestQueueBroker, doesn't report number of total slotsTarget %s removed by FastestQueueBroker, doesn't report number of waiting jobsTechnology: %sTemporary service errorTest aborted because no resource returned any informationTest failed, no more possible targetsTest submitted with jobid: %sTest was defined with ID %d, but some error occurred during parsing it.The "FreeSlotsWithDuration" attribute is wrongly formatted. Ignoring it.The "FreeSlotsWithDuration" attribute published by "%s" is wrongly formatted. Ignoring it.The 'sort' and 'rsort' flags cannot be specified at the same time.The BIO for output is NULLThe CA certificates directory is required for contacting VOMS and MyProxy servers.The CA issuer (%s) of the credentials (%s) is not trusted by the target (%s).The ComputingEndpoint doesn't advertise its Quality Level.The ComputingEndpoint doesn't advertise its Serving State.The ComputingEndpoint has no URL.The ComputingService doesn't advertise its Interface.The ComputingService doesn't advertise its Quality Level.The MyProxy period that you set: %s can't be recognized.The NSS database can not be detected in the Firefox profileThe Response is not going to this endThe Service advertises no Health State.The Service doesn't advertise its Type.The StatusCode is SuccessThe VOMS AC period that you set: %s can't be recognized.The VOMS server with the information: %s can not be reached, please make sure it is availableThe VOMS server with the information: %s can not be reached, please make sure it is available.The [vo] section labeled '%s' has no file associated and can't be used for matchingThe arccat command performs the cat command on the stdout, stderr or grid manager's error log of the job.The arcclean command removes a job from the computing resource.The arccp command copies files to, from and between grid storage elements.The arcget command is used for retrieving the results from a job.The arcinfo command is used for obtaining the status of computing resources on the Grid.The arckill command is used to kill running jobs.The arcls command is used for listing files in grid storage elements and file index catalogues.The arcmkdir command creates directories on grid storage elements and catalogs.The arcproxy command creates a proxy from a key/certificate pair which can then be used to access grid resources.The arcrename command renames files on grid storage elements.The arcrm command deletes files on grid storage elements.The arcstat command is used for obtaining the status of jobs that have been submitted to Grid enabled resources.The arcsub command is used for submitting jobs to Grid enabled computing resources.The arcsync command synchronizes your local job list with the information at the given resources or index servers.The arctest command is used for testing clusters as resources.The attribute information from VOMS server: %s is list as following:The available CRL has expiredThe available CRL is not yet validThe brokerarguments attribute can only be used in conjunction with the brokername attributeThe certificate with subject %s is not validThe cluster XRSL attribute is currently unsupported.The credential to be signed contains no requestThe credential to be signed is NULLThe credential's private key has already been initializedThe default configuration file (%s) is not a regular file.The delegated credential got from delegation service is stored into path: %sThe delegated credential got from path: %sThe downtime of the target (%s) is not published. Keeping target.The end time that you set: %s can't be recognized.The end time that you set: %s is before start time: %s.The endpoint (%s) is not supported by this plugin (%s)The endpoint of delegation service should be configuredThe file %s is currently locked with a valid lockThe first supported interface of the plugin %s is an empty string, skipping the plugin.The following %d were not resubmittedThe following jobs were not submitted:The interface of this endpoint (%s) is unspecified, will try all possible pluginsThe job description also can be a file or a string in ADL or XRSL format.The keybits constraint is wrong: %s.The name of the private key to delete is emptyThe old GSI proxies are not supported anymore. Please do not use -O/--old option.The payload of incoming message is emptyThe payload of outgoing message is emptyThe period that you set: %s can't be recognized.The plugin %s does not support any interfaces, skipping it.The policy file setup for simplelist.pdp does not exist, please check location attribute for simplelist PDP node in service configurationThe policy language: %s is not supportedThe private key for signing is not initializedThe process owning the lock on %s is no longer running, will remove lockThe request has passed the policy evaluationThe signing algorithm %s is not allowed,it should be SHA1 or SHA2 to sign certificate requestsThe specified Globus attribute (%s) is not supported. %s ignored.The start time that you set: %s can't be recognized.The start, end and period can't be set simultaneouslyThe subject does not match the issuer name + proxy CN entryThe validity duration of VOMS AC is shortened from %s to %s, due to the validity constraint on voms server side. The value of the acl XRSL attribute isn't valid XML.The value of the ftpthreads attribute must be a number from 1 to 10The value of the keysize attribute in the configuration file (%s) was only partially parsedThe value of the timeout attribute in the configuration file (%s) was only partially parsedThere are %d NSS base directories where the certificate, key, and module databases liveThere are %d RequestItemsThere are %d commands to the same VOMS server %sThere are %d requests, which satisfy at least one policyThere are %d servers with the same name: %s in your vomses file, but none of them can be reached, or can return a valid message.There are %d servers with the same name: %s in your vomses file, but none of them can be reached, or can return valid message. But proxy without VOMS AC extension will still be generated.There are %d user certificates existing in the NSS databaseThere are no endpoints in registry that match requested info endpoint typeThere are no endpoints in registry that match requested submission endpoint typeThere is %d subjects, which satisfy at least one policyThere is no Delegated X509 token in the responseThere is no Format delegated token in the responseThere is no Format request in the responseThere is no Id or X509 request value in the responseThere is no Id or X509 token value in the responseThere is no SOAP connection chain configuredThere is no SOAP responseThere is no UpdateCredentialsResponse in responseThere is no X509 request in the responseThere is no certificate named %s found, the certificate could be removed when generating CSRThere is no digest in issuer's private key objectThere is no local LRMS ID. Message will not be written to BLAH log.There is no responseThere was a problem during post-transfer destination handling after error: %sThere was a problem during post-transfer source handling: %sThere was no HTTP responseThere was no SOAP responseThere was no SOAP response return from PDP server: %sThird party transfer is not supported for these endpointsThird party transfer was requested but the corresponding plugin could not be loaded. Is the GFAL plugin installed? If not, please install the packages 'nordugrid-arc-plugins-gfal' and 'gfal2-all'. Depending on your type of installation the package names might differ.This INFO message should also be seenThis INFO message should be seenThis VERBOSE message should not be seenThis VERBOSE message should now be seenThis instance was already deletedThis job was very recently submitted and might not yet have reached the information systemThis message goes to initial destinationThis message goes to per-thread destinationThis process already owns the lock on %sThis seems like a temporary error, please try again laterThis tiny tool can be used for testing the JobDescription's conversion abilities.This user is denied to submit new jobs.Thread exited with Glib error: %sThread exited with Glib exception: %sThread exited with generic exception: %sTime left for AC: %sTime left for AC: AC has expiredTime left for AC: AC is not valid yetTime left for proxy: %sTime left for proxy: Proxy expiredTime left for proxy: Proxy not valid yetTime spent waiting for disc: %.3f msTime spent waiting for network: %.3f msTimed out while waiting for cache lockTimeout connecting to %s(%s):%i - %i sTimeout has expired, will remove lock file %sTimeout waiting for Globus callback - leaking connectionTimeout waiting for mkdirTo recover missing jobs, run arcsyncToo many arguments in configurationToo many connections - dropping new oneToo many connections - waiting for old to closeToo many failures to obtain checksum - giving upToo many files in one request - please try again with fewer filesTool for writing the grami file representation of a job description file.Total jobs: %iTotal logical CPUs: %iTotal number of jobs found: Total number of new jobs found: Total physical CPUs: %iTotal slots: %iTransfer FAILED: %sTransfer cancelled successfullyTransfer completeTransfer failedTransfer failed: %sTransfer finished: %llu bytes transferred %sTransfer from %s to %sTransfer killed after %i seconds without communicationTransfer succeededTransfer timed outTrusted CAs:Try to get attribute from VOMS server with order: %sTrying all available interfacesTrying next replicaTrying to check X509 cert with check_cert_typeTrying to connect %s(%s):%dTrying to listen on %s:%s(%s)Trying to listen on TCP port %s(%s)Trying to migrate to %s: Migration to a %s interface is not supported.Trying to retrieve job description of %s from computing resourceTrying to start suspended endpoint (%s)Trying to submit directly to endpoint (%s)Trying to submit endpoint (%s) using interface (%s) with plugin (%s).Two input files have identical name '%s'.Type is dir, calling srmRmDirType is file, calling srmRmType: %sTypes of execution services that %s is able to submit jobs to:Types of local information services that %s is able to collect information from:Types of local information services that %s is able to collect job information from:Types of registry services that %s is able to collect information from:Types of services that %s is able to manage jobs at:URLURL %s disagrees with stored SRM info, testing new infoURL is mapped to local access - checking permissions on original URLURL is mapped to: %sURL is not valid: %sURL option %s does not have format name=valueURL protocol is not urllist: %sURL: %sUnAuthorized from xacml.pdpUnable to adapt job description to any resource, no resource information could be obtained.Unable to add event: cannot find AAR for job %s in accounting database.Unable to associate secondary DB with primary DB (%s)Unable to copy %sUnable to copy example configuration from existing configuration (%s)Unable to create %s directory.Unable to create DB for secondary endpoint keys (%s)Unable to create DB for secondary name keys (%s)Unable to create DB for secondary service info keys (%s)Unable to create SOAP client used by EMIESClient.Unable to create data base (%s)Unable to create data base environment (%s)Unable to create directory %sUnable to create directory for storing results (%s) - %sUnable to create index for jobs table in data base (%s)Unable to create job database (%s)Unable to create jobs table in data base (%s)Unable to create jobs_new table in data base (%s)Unable to create temporary directoryUnable to detect format of job record.Unable to detect if issuer certificate is installed.Unable to determine certificate informationUnable to determine error (%d)Unable to download job (%s), no JobControllerPlugin plugin was set to handle the job.Unable to drop jobs in data base (%s)Unable to find file size of %sUnable to handle job (%s), no interface specified.Unable to handle job (%s), no plugin associated with the specified interface (%s)Unable to initialise connection to destination: %sUnable to initialise connection to source: %sUnable to initialize handler for %sUnable to list content of %sUnable to list files at %sUnable to load ARC configuration file.Unable to load BrokerPlugin (%s)Unable to load broker %sUnable to load plugin (%s) for interface (%s) when trying to submit job description.Unable to load submission plugin for %s interfaceUnable to locate the "%s" plugin. Please refer to installation instructions and check if package providing support for "%s" plugin is installedUnable to match target, marking it as not matching. Broker not valid.Unable to migrate job (%s), job description could not be retrieved remotelyUnable to migrate job (%s), unable to parse obtained job descriptionUnable to open job list file (%s), unknown formatUnable to parse job description input: %sUnable to parse the %s.%s value from execution service (%s).Unable to parse the specified verbosity (%s) to one of the allowed levelsUnable to parse.Unable to prepare job description according to needs of the target resource (%s).Unable to prepare job description according to needs of the target resource.Unable to query job information (%s), invalid URL provided (%s)Unable to read job information from file (%s)Unable to register job submission. Can't get JobDescription object from Broker, Broker is invalid.Unable to remove file %sUnable to rename %sUnable to rename jobs table in data base (%s)Unable to resubmit job (%s), no targets applicable for submissionUnable to resubmit job (%s), target information retrieval failed for target: %sUnable to resubmit job (%s), unable to parse obtained job descriptionUnable to retrieve list of job files to download for job %sUnable to select middlewareUnable to select operating system.Unable to select runtime environmentUnable to set duplicate flags for secondary key DB (%s)Unable to sort ExecutionTarget objects - Invalid Broker object.Unable to sort added jobs. The BrokerPlugin plugin has not been loaded.Unable to submit job. Failed to assign delegation to job description.Unable to submit job. Job description is not valid XMLUnable to submit job. Job description is not valid in the %s format: %sUnable to submit jobs. Failed to delegate credentials.Unable to transfer from jobs to jobs_new in data base (%s)Unable to truncate job database (%s)Unable to write 'output' file: %sUnable to write grami file: %sUnable to write key/value pair to job database (%s): Key "%s"Unable to write records into job database (%s): Id "%s"Unable to write to p12 fileUnauthorizedUnauthorized from remote pdp serviceUndefined control sequence: %%%sUnexpected RSL typeUnexpected argument for 'all' rule - %sUnexpected argumentsUnexpected arguments suppliedUnexpected delegation location from delegation endpoint - %s.Unexpected immediate completion: %sUnexpected name returned in Rucio response: %sUnexpected path %s returned from serverUnexpected response code from delegation endpoint - %uUnexpected response code from delegation endpoint: %u, %s.Uniq is adding service coming from %sUniq is ignoring service coming from %sUniq is replacing service coming from %s with service coming from %sUnknown ACL policy %s for job %sUnknown LDAP scope %s - using baseUnknown XRSL attribute: %s - Ignoring it.Unknown attribute %s in common section of configuration file (%s), ignoring itUnknown authorization command %sUnknown channel %s for stdio protocolUnknown conversion mode %s, using defaultUnknown credential type %s for URL pattern %sUnknown element in Globus signing policyUnknown entry in EGIIS (%s)Unknown errorUnknown key or hash typeUnknown key or hash type of issuerUnknown log level %sUnknown open mode %iUnknown open mode %sUnknown optionUnknown option %sUnknown rights in Globus signing policy - %sUnknown section %s, ignoring itUnknown transfer option: %sUnknown user name mapping rule %sUnregistering %sUnregistering from index service failedUnsupported URL givenUnsupported URL given: %sUnsupported command: %sUnsupported configuration command: %sUnsupported destination url: %sUnsupported information endpoint type: %sUnsupported job list type '%s', using 'BDB'. Supported types are: BDB, SQLITE, XML.Unsupported mapping policy action: %sUnsupported mapping policy option: %sUnsupported protocol in url %sUnsupported proxy policy language is requested - %sUnsupported proxy version is requested - %sUnsupported source url: %sUnsupported submission endpoint type: %sUnsupported submission interface %s. Seems arc-blahp-logger need to be updated accordingly. Please submit the bug to bugzilla.Unsupported value for allownew: %sUntrusted self-signed certificate in chain with subject %s and hash: %luUpdateCredentials failedUpdateCredentials: EPR contains no JobIDUpdateCredentials: failed to update credentialsUpdateCredentials: missing ReferenceUpdateCredentials: no job found: %sUpdateCredentials: request = %sUpdateCredentials: response = %sUpdateCredentials: wrong number of ReferenceUpdateCredentials: wrong number of elements inside ReferenceUsage:Usage: copy source destinationUse --help option for detailed usage informationUse -? to get usage descriptionUsed configuration file %sUsed slots: %iUser configuration file (%s) contains errors.User configuration file (%s) does not exist or cannot be loaded.User for helper program is missingUser has empty virtual directory tree. Either user has no authorised plugins or there are no plugins configured at all.User has no proper configuration associatedUser interface errorUser name direct mapping is missing user name: %s.User name mapping command is emptyUser name mapping has empty authgroup: %sUser name mapping has empty commandUser name mapping has empty name: %sUser name should be specified.User pool at %s can't be opened.User pool at %s failed to perform user mapping.User pool mapping is missing user subject.User subject match is missing user subject.User subject: %sUserConfig class is not an objectUserConfiguration saved to file (%s)Username Token handler is not configuredUsing A-REX config file %sUsing CA certificate directory: %sUsing DH parameters from file: %sUsing OTokenUsing Rucio account %sUsing buffered transfer methodUsing cache %sUsing cached local account '%s'Using cert %sUsing certificate file: %sUsing cipher list: %sUsing cipher: %sUsing configuration at %sUsing control directory %sUsing curve with NID: %uUsing insecure data transferUsing internal transfer method of %sUsing key %sUsing key file: %sUsing local account '%s'Using next %s replicaUsing protocol options: 0x%xUsing proxy %sUsing proxy file: %sUsing secure data transferUsing session dir %sUsing session directory %sUsing space token %sUsing space token description %sVO %s doesn't match %sVOMS AC attribute is a tagVOMS AC attribute is the FQANVOMS attr %s doesn't match %sVOMS attr %s matches %sVOMS attribute is ignored due to processing/validation errorVOMS attribute parsing failedVOMS attribute validation failedVOMS command is emptyVOMS line contains wrong number of tokens (%u expected): "%s"VOMS proxy processing returns: %i - %sVOMS trust chains: %sVOMS: AC has expiredVOMS: AC is not complete - missing Serial or Issuer informationVOMS: AC is not yet validVOMS: AC signature verification failedVOMS: CA directory or CA file must be providedVOMS: Can not allocate memory for parsing ACVOMS: Can not allocate memory for storing the order of ACVOMS: Can not find AC_ATTR with IETFATTR typeVOMS: Can not parse ACVOMS: Cannot find certificate of AC issuer for VO %sVOMS: DN of holder in AC: %sVOMS: DN of holder: %sVOMS: DN of issuer: %sVOMS: FQDN of this host %s does not match any target in ACVOMS: The lsc file %s can not be openVOMS: The lsc file %s does not existVOMS: authorityKey is wrongVOMS: both idcenoRevAvail and authorityKeyIdentifier certificate extensions must be presentVOMS: can not verify the signature of the ACVOMS: cannot validate AC issuer for VO %sVOMS: case of multiple IETFATTR attributes not supportedVOMS: case of multiple policyAuthority not supportedVOMS: create FQAN: %sVOMS: create attribute: %sVOMS: directory for trusted service certificates: %sVOMS: failed to parse attributes from ACVOMS: failed to verify AC signatureVOMS: missing AC partsVOMS: problems while parsing information in ACVOMS: the DN in certificate: %s does not match that in trusted DN list: %sVOMS: the Issuer identity in certificate: %s does not match that in trusted DN list: %sVOMS: the attribute name is emptyVOMS: the attribute qualifier is emptyVOMS: the attribute value for %s is emptyVOMS: the format of IETFATTRVAL is not supported - expecting OCTET STRINGVOMS: the format of policyAuthority is unsupported - expecting URIVOMS: the grantor attribute is emptyVOMS: the holder information in AC is wrongVOMS: the holder issuer name is not the same as that in ACVOMS: the holder issuerUID is not the same as that in ACVOMS: the holder name in AC is not related to the distinguished name in holder certificateVOMS: the holder serial number %lx is not the same as the serial number in AC %lx, the holder certificate that is used to create a voms proxy could be a proxy certificate with a different serial number as the original EEC certVOMS: the holder serial number is: %lxVOMS: the issuer information in AC is wrongVOMS: the issuer name %s is not the same as that in AC - %sVOMS: the only supported critical extension of the AC is idceTargetsVOMS: the serial number in AC is: %lxVOMS: the serial number of AC INFO is too long - expecting no more than 20 octetsVOMS: there is no constraints of trusted voms DNs, the certificates stack in AC will not be checked.VOMS: trust chain to check: %s VOMS: unable to determine hostname of AC from VO name: %sVOMS: unable to extract VO name from ACVOMS: unable to match certificate chain against VOMS trusted DNsVOMS: unable to verify certificate chainVOMS: unsupported time format in AC - expecting GENERALIZED TIMEValid JobDescription foundValid for: %sValid for: Proxy expiredValid for: Proxy not validValid until: %sValue of %s.%s is "%s"Value of 'count' attribute must be an integerValue of 'countpernode' attribute must be an integerValue of 'exclusiveexecution' attribute must either be 'yes' or 'no'Value of attribute '%s' expected not to be emptyValue of attribute '%s' expected to be a stringValue of attribute '%s' expected to be single valueValue of attribute '%s' has wrong sequence length: Expected %d, found %dValue of attribute '%s' is not a stringValue of attribute '%s' is not sequenceVariable name (%s) contains invalid character (%s)Variable name expectedVersion in Listen element can't be recognizedWARNING: The end time that you set: %s is before current time: %sWARNING: The start time that you set: %s is before current time: %sWaiting 1 minuteWaiting ends.Waiting for bufferWaiting for globus handle to settleWaiting for lock on file %sWaiting for lock on job list file %sWaiting for main job processing thread to exitWaiting for responseWaiting jobs: %iWaking upWarning: Failed listing files but some information is obtainedWarning: Failed removing jobs from file (%s)Warning: Failed to write job information to file (%s)Warning: Failed to write local list of jobs into file (%s), jobs list is destroyedWarning: Job not found in job list: %sWarning: Some jobs were not removed from serverWarning: Unable to create job list file (%s), jobs list is destroyedWarning: Unable to open job list file (%s), unknown formatWarning: Unable to read local list of jobs from file (%s)Warning: Unable to truncate local list of jobs in file (%s)Warning: Using SRM protocol v1 which does not support space tokensWarning: mount point %s creation failed.Was expecting %s at the beginning of "%s"Watchdog (re)starting applicationWatchdog detected application exitWatchdog detected application exit due to signal %uWatchdog detected application exited with code %uWatchdog detected application timeout or error - killing processWatchdog exiting because application was purposely killed or exited itselfWatchdog failed to kill application - giving up and exitingWatchdog failed to wait till application exited - sending KILLWatchdog fork failed: %sWatchdog starting monitoringWe only support CAs in Globus signing policy - %s is not supportedWe only support X509 CAs in Globus signing policy - %s is not supportedWe only support globus conditions in Globus signing policy - %s is not supportedWe only support subjects conditions in Globus signing policy - %s is not supportedWhen specifying 'countpernode' attribute, 'count' attribute must also be specifiedWill %s in destination index serviceWill calculate %s checksumWill clean up pre-registered destinationWill download to cache file %sWill not map to 'root' account by defaultWill process cacheWill release cache locksWill remove %s on service %s.Will retry without cachingWill use bulk requestWill wait 10sWill wait around %isWiping and re-creating whole storageWorking area free size: %i GBWorking area is not shared among jobsWorking area is shared among jobsWorking area life time: %sWorking area total size: %i GBWriting the info to the BLAH parser log: %sWrong buffer sizeWrong defaultbuffer number in configurationWrong directory in %sWrong format of the "FreeSlotsWithDuration" = "%s" ("%s")Wrong language requested: %sWrong maxbuffer number in configurationWrong maxconnections number in configurationWrong maximal buffer sizeWrong number in defaultttl commandWrong number in maxjobdescWrong number in maxjobdesc commandWrong number in maxjobs: %sWrong number in maxrerun commandWrong number in urdelivery_frequency: %sWrong number in wakeupperiod: %sWrong number of arguments givenWrong number of arguments!Wrong number of connectionsWrong number of objects (%i) for stat from ftp: %sWrong number of parameters specifiedWrong option in %sWrong option in delegationdbWrong option in fixdirectoriesWrong option in logreopenWrong ownership of certificate file: %sWrong ownership of key file: %sWrong ownership of proxy file: %sWrong permissions of certificate file: %sWrong permissions of key file: %sWrong permissions of proxy file: %sWrong port numberWrong port number in configurationWrong service record field "%s" found in the "%s"Wrote request into a fileWrote signed EEC certificate into a fileWrote signed proxy certificate into a fileX509 Token handler is not configuredXACML authorisation request: %sXACML authorisation response: %sXACML request: %sXML config file %s does not existXML response: %sYou are about to remove jobs from the job list for which no information could be found. NOTE: Recently submitted jobs might not have appeared in the information system, and this action will also remove such jobs.You may try to increase verbosity to get more information.Your identity: %sYour issuer's certificate is not installedYour proxy is valid until: %sZero bytes written to file[ADLParser] %s element must be boolean.[ADLParser] AccessControl isn't valid XML.[ADLParser] Benchmark is not supported yet.[ADLParser] Code in FailIfExitCodeNotEqualTo in %s is not valid number.[ADLParser] CreationFlag value %s is not supported.[ADLParser] CredentialService must contain valid URL.[ADLParser] Missing Name element or value in ParallelEnvironment/Option element.[ADLParser] Missing or empty Name in InputFile.[ADLParser] Missing or empty Name in OutputFile.[ADLParser] Missing or wrong value in DiskSpaceRequirement.[ADLParser] Missing or wrong value in IndividualCPUTime.[ADLParser] Missing or wrong value in IndividualPhysicalMemory.[ADLParser] Missing or wrong value in IndividualVirtualMemory.[ADLParser] Missing or wrong value in NumberOfSlots.[ADLParser] Missing or wrong value in ProcessesPerSlot.[ADLParser] Missing or wrong value in SlotsPerHost.[ADLParser] Missing or wrong value in ThreadsPerProcess.[ADLParser] Missing or wrong value in TotalCPUTime.[ADLParser] Missing or wrong value in WallTime.[ADLParser] NetworkInfo is not supported yet.[ADLParser] NodeAccess value %s is not supported yet.[ADLParser] Only email Prorocol for Notification is supported yet.[ADLParser] Optional for %s elements are not supported yet.[ADLParser] Root element is not ActivityDescription [ADLParser] The NumberOfSlots element should be specified, when the value of useNumberOfSlots attribute of SlotsPerHost element is "true".[ADLParser] Unsupported EMI ES state %s.[ADLParser] Unsupported URL %s for RemoteLogging.[ADLParser] Unsupported internal state %s.[ADLParser] Wrong URI specified in Source - %s.[ADLParser] Wrong URI specified in Target - %s.[ADLParser] Wrong time %s in ExpirationTime.[ADLParser] priority is too large - using max value 100[filename ...][job ...][job description ...][job description input][resource ...]a file containing a list of jobIDsabort_callback: Globus error: %sabort_callback: startaction(%s) != requestactive_data is disabledadd_word failureadvertisedvo parameter is emptyall for attentionall jobsarc.confbad directory for plugin: %sbrokerbuffer: error : %s, read: %s, write: %sbuffer: read EOF : %sbuffer: write EOF: %scache file: %scancelledceceID prefix is set to %scheck readability of object, does not show any information about objectcheck_abort: have Globus errorcheck_abort: sending 426check_ftp: failed to get file's modification timecheck_ftp: failed to get file's sizecheck_ftp: globus_ftp_client_get failedcheck_ftp: globus_ftp_client_modification_time failedcheck_ftp: globus_ftp_client_register_readcheck_ftp: globus_ftp_client_size failedcheck_ftp: obtained modification date: %scheck_ftp: obtained size: %llicheck_ftp: timeout waiting for modification_timecheck_ftp: timeout waiting for partial getcheck_ftp: timeout waiting for sizeclass name: %sclientxrsl foundclientxrsl not foundclose failed: %sclosing file %s failed: %scommand to MyProxy server. The command can be PUT, GET, INFO, NEWPASS or DESTROY. PUT -- put a delegated credentials to the MyProxy server; GET -- get a delegated credentials from the MyProxy server; INFO -- get and present information about credentials stored at the MyProxy server; NEWPASS -- change password protecting credentials stored at the MyProxy server; DESTROY -- wipe off credentials stored at the MyProxy server; Local credentials (certificate and key) are not necessary except in case of PUT. MyProxy functionality can be used together with VOMS functionality. --voms and --vomses can be used for Get command if VOMS attributes is required to be included in the proxy. computingconfiguration file (default ~/.arc/client.conf)configuration file not foundconvert from specified input database format [bdb|sqlite]convert into specified output database format [bdb|sqlite]could not find end of clientxrslcould not find start of clientxrsld2i_X509_REQ_bio faileddata chunk: %llu %lludata_connect_retrieve_callbackdata_connect_retrieve_callback: allocate_data_bufferdata_connect_retrieve_callback: allocate_data_buffer faileddata_connect_retrieve_callback: check for buffer %udata_connect_store_callbackdata_retrieve_callbackdata_retrieve_callback: lost bufferdata_store_callback: lost bufferdatabase formatdebugleveldefine the requested format (nordugrid:xrsl, emies:adl)delete_ftp: globus_ftp_client_delete faileddelete_ftp: globus_ftp_client_rmdir faileddelete_ftp: timeout waiting for deletedestinationdestination.next_locationdirdirectorydirnamedisplay all available metadatadisplay more information on each jobdndo not ask for verificationdo not collect information, only convert jobs storage formatdo not print list of jobsdo not print number of jobs in each statedo not resubmit to the same resourcedo not submit - dump job description in the language accepted by the targetdo not transfer, but register source into destination. destination must be a meta-url.do not try to force passive transferdon't prompt for a credential passphrase, when retrieve a credential from on MyProxy server. The precondition of this choice is the credential is PUT onto the MyProxy server without a passphrase by using -R (--retrievable_by_cert) option when being PUTing onto Myproxy server. This option is specific for the GET command when contacting Myproxy server.downloaddownload directory (the job directory will be created in this directory)downloadsdroppedecho: Unauthorizedempty input payloadempty next chain elementend of string encountered while processing type of subject name element #%derror converting number from bin to BIGNUMerror converting serial to ASN.1 formatescape character at end of stringexitfailed to identify plugins pathfailed to process client identificationfailed to read data chunkfailed to read data tagfailed while processing configuration command: %s %sfilefile %s is not accessiblefile namefile name too longfile node creation failed: %sfilenamefilepathfinishedforce download (overwrite existing job directory)force migration, ignore kill failureforcedefaultvoms parameter is emptyfork failedformatfrom the following endpoints:fsync of file %s failed: %sftp_check_callbackftp_complete_callback: error: %sftp_complete_callback: successftp_get_complete_callback: Failed to get ftp fileftp_get_complete_callback: successftp_put_complete_callback: successftp_read_callback: Globus error: %sftp_read_callback: delayed data chunk: %llu %lluftp_read_callback: failure: %sftp_read_callback: successftp_read_callback: success - offset=%u, length=%u, eof=%u, allow oof=%uftp_read_callback: too many unexpected out of order chunksftp_read_callback: unexpected data out of order: %llu != %lluftp_read_thread: Globus error: %sftp_read_thread: data callback failed - aborting: %sftp_read_thread: exitingftp_read_thread: failed to register Globus buffer - will try later: %sftp_read_thread: failed to register buffersftp_read_thread: failed to release buffersftp_read_thread: failed to release buffers - leakingftp_read_thread: for_read failed - aborting: %sftp_read_thread: get and register buffersftp_read_thread: too many registration failures - abort: %sftp_read_thread: waiting for buffers releasedftp_read_thread: waiting for eofftp_write_callback: failure: %sftp_write_callback: success %sftp_write_thread: Globus error: %sftp_write_thread: data callback failed - abortingftp_write_thread: data out of order in stream mode: %llu != %lluftp_write_thread: exitingftp_write_thread: failed to release buffers - leakingftp_write_thread: for_write failed - abortingftp_write_thread: get and register buffersftp_write_thread: too many out of order chunks in stream modeftp_write_thread: waiting for buffers releasedftp_write_thread: waiting for eofftp_write_thread: waiting for transfer completegfal_close failed: %sgfal_closedir failed: %sgfal_listxattr failed, no replica information can be obtained: %sgfal_mkdir failed (%s), trying to write anywaygfal_mkdir failed: %sgfal_open failed: %sgfal_opendir failed: %sgfal_read failed: %sgfal_rename failed: %sgfal_rmdir failed: %sgfal_stat failed: %sgfal_unlink failed: %sgfal_write failed: %sglobalid is set to %sglobus_ftp_client_operationattr_set_authorization: error: %sgm-delegations-converter changes format of delegation database.gm-jobs displays information on current jobs in the system.gm-kick wakes up the A-REX corresponding to the given control file. If no file is given it uses the control directory found in the configuration file.gmetric_bin_path empty in arc.conf (should never happen the default value should be used)group<:role>. Specify ordering of attributes Example: --order /knowarc.eu/coredev:Developer,/knowarc.eu/testers:Tester or: --order /knowarc.eu/coredev:Developer --order /knowarc.eu/testers:Tester Note that it does not make sense to specify the order if you have two or more different VOMS servers specifiedheadnode is set to %shostname[:port] of MyProxy serverhourhoursidif the destination is an indexing service and not the same as the source and the destination is already registered, then the copy is normally not done. However, if this option is specified the source is assumed to be a replica of the destination created in an uncontrolled way and the copy is done like in case of replication. Using this option also skips validation of completed transfers.improper attribute for allowactvedata command: %simproper attribute for allowencryption command: %sincoming message is not SOAPindexinform about changes in particular job (can be used multiple times)init_handle: globus_ftp_client_handle_init failedinit_handle: globus_ftp_client_handleattr_init failedinit_handle: globus_ftp_client_handleattr_set_gridftp2 failedinit_handle: globus_ftp_client_operationattr_init failedinit_handle: globus_ftp_client_operationattr_set_allow_ipv6 failedinit_handle: globus_ftp_client_operationattr_set_delayed_pasv failedinmsg.Attributes().getAll() = %s inmsg.Auth().Export(arc.SecAttr.ARCAuth) = %sinput does not define operationinput is not SOAPinputcheck checks that input files specified in the job description are available and accessible using the credentials in the given proxy file.instead of the status only the IDs of the selected jobs will be printedintinterfaceinterface is set to %sinterfacenameinvalid jobID: %sjob %s (will be) cleaned successfullyjob %s cancelled successfullyjob %s restarted successfullyjob idjob_description_file [proxy_file]jobdescription file describing the job to be submittedjobdescription string describing the job to be submittedkeep the files on the server (do not clean)levellist record: %slist the available pluginslist the available plugins (protocols supported)list_files_ftp: checksum %slist_files_ftp: failed to get file's modification timelist_files_ftp: failed to get file's sizelist_files_ftp: globus_ftp_client_cksm failedlist_files_ftp: globus_ftp_client_modification_time failedlist_files_ftp: globus_ftp_client_size failedlist_files_ftp: looking for checksum of %slist_files_ftp: looking for modification time of %slist_files_ftp: looking for size of %slist_files_ftp: no checksum information possiblelist_files_ftp: no checksum information returnedlist_files_ftp: no checksum information supportedlist_files_ftp: timeout waiting for cksumlist_files_ftp: timeout waiting for modification_timelist_files_ftp: timeout waiting for sizeload serial from %s failurelocal_pasv failedlocal_port failedlocal_spas failedlocalid is set to %slong format (more information)lrms is emptymail parameter is emptymake parent directories as neededmake_abort: leavingmake_abort: startmake_abort: wait for abort flag to be resetmalloc errormeta file %s is emptyminuteminutesmkdir failed: %smkdir_ftp: making %smkdir_ftp: timeout waiting for mkdirmodule name: %snnamenew_payload %snext chain element callednext element of the chain returned empty payloadnext element of the chain returned error statusnext element of the chain returned error status: %snext element of the chain returned invalid payloadnext element of the chain returned invalid/unsupported payloadnext element of the chain returned no payloadnext element of the chain returned unknown payload - passing throughnumbernumber of retries before failing file transferold_url new_urlonly get information about executon targets that support this job submission interface. Allowed values are org.nordugrid.gridftpjob or org.nordugrid.gridftp, org.ogf.glue.emies.activitycreation and org.nordugrid.internalonly select jobs that were submitted to this resourceonly select jobs whose status is statusstronly use this interface for submitting. Allowed values are: org.nordugrid.gridftpjob or org.nordugrid.gridftp, org.ogf.glue.emies.activitycreation and org.nordugrid.internalopen: changing owner for %s, %i, %iopen: owner: %i %ioperate recursivelyoperate recursively up to specified levelorderoutpayload %soutput is not SOAPoutput requested elements (jobs list, delegation ids and tokens) to fileowner subject is set to %sp12 file is emptypassword destination=password sourcepathpath to local cache (use to put file into cache)path to the VOMS server configuration filepath to the certificate file, it can be either PEM, DER, or PKCS12 formattedpath to the private key file, if the certificate is in PKCS12 format, then no need to give private keypath to the proxy filepath to the top directory of VOMS *.lsc files, only needed for the VOMS client functionalitypath to the trusted certificate directory, only needed for the VOMS client functionalityperform third party transfer, where the destination pulls from the source (only available with GFAL plugin)physical location to write to when destination is an indexing service. Must be specified for indexing services which do not automatically generate physical locations. Can be specified multiple times - locations will be tried in order until one succeeds.pkey and rsa_key exist!plugin for transport protocol %s is not installedplugin: checkdir: %splugin: checkdir: access: %splugin: checkdir: access: allowed: %splugin: closeplugin: open: %splugin: readplugin: writeprint a list of services configured in the client.confprint all information about this proxy.print delegation token of specified ID(s)print list of available delegation IDsprint main delegation token of specified Job ID(s)print selected information about this proxy.print state of the serviceprint summary of jobs in each transfer shareprint version informationprints info about installed user- and CA-certificatespriority is too large - using max value 100process: DELETEprocess: GETprocess: HEADprocess: POSTprocess: PUTprocess: action %s is not supported for subpath %sprocess: endpoint: %sprocess: factory endpointprocess: id: %sprocess: method %s is not supportedprocess: method %s is not supported for subpath %sprocess: method is not definedprocess: method: %sprocess: operation: %sprocess: request=%sprocess: response=%sprocess: schema %s is not supported for subpath %sprocess: subop: %sprocess: subpath: %sproxy constraintsput on holdqueue name is set to %sread information from specified control directoryread_thread: data read error from external process - aborting: %sread_thread: exitingread_thread: for_read failed - aborting: %sread_thread: get and register buffersread_thread: non-data tag '%c' from external process - leaving: %sregisterregistryregistry service URL with optional specification of protocolremove logical file name registration even if not all physical instances were removedremove proxyremove the job from the local list of jobs even if the job is not found in the infosysrequest to cancel job(s) with specified ID(s)request to cancel jobs belonging to user(s) with specified subject name(s)request to clean job(s) with specified ID(s)request to clean jobs belonging to user(s) with specified subject name(s)require information query using the specified information endpoint type. Special value 'NONE' will disable all resource information queries and the following brokering. Allowed values are: ldap.nordugrid, ldap.glue2, emies, arcrest and internal.require the specified endpoint type for job submission. Allowed values are: arcrest, emies, gridftp or gridftpjob and internal.response: %sresubmit to the same resourcereverse sorting of jobs according to jobid, submissiontime or jobnamesave serial to %s failuresecondsecondssecondsselect broker method (list available brokers with --listplugins flag)select one or more computing elements: name can be an alias for a single CE, a group of CEs or a URLselect one or more registries: name can be an alias for a single registry, a group of registries or a URLselecting a computing element for the new jobs with a URL or an alias, or selecting a group of computing elements with the name of the groupsetting file %s to size %llushow URLs of file locationsshow jobs where status information is unavailableshow only description of requested object, do not list content of directoriesshow only jobs of user(s) with specified subject name(s)show only jobs with specified ID(s)show progress indicatorshow status information in JSON formatshow the CE's error log of the jobshow the original job descriptionshow the specified file from job's session directoryshow the stderr of the jobshow the stdout of the job (default)shutdownskip jobs that are on a computing element with a given URLskip the service with the given URL during service discoverysort jobs according to jobid, submissiontime or jobnamesourcesource destinationsource.next_locationspecify computing element hostname or a complete endpoint URLstart_readingstart_reading: helper start failedstart_reading: thread create failedstart_reading_ftpstart_reading_ftp: globus_ftp_client_getstart_reading_ftp: globus_ftp_client_get failedstart_reading_ftp: globus_thread_create failedstart_writing_ftp: data chunk: %llu %llustart_writing_ftp: delayed data chunk: %llu %llustart_writing_ftp: failed to read data chunkstart_writing_ftp: failed to read data tagstart_writing_ftp: globus_thread_create failedstart_writing_ftp: helper start failedstart_writing_ftp: mkdirstart_writing_ftp: mkdir failed - still trying to writestart_writing_ftp: putstart_writing_ftp: put failedstart_writing_ftp: thread create failedstart_writing_ftp: waiting for data chunkstart_writing_ftp: waiting for data tagstart_writing_ftp: waiting for some buffers sentstatusstatusstrstop_reading: exiting: %sstop_reading: waiting for transfer to finishstop_reading_ftp: aborting connectionstop_reading_ftp: exiting: %sstop_reading_ftp: waiting for transfer to finishstringsubmit directly - no resource discovery or matchmakingsubmit jobs as dry run (no submission to batch system)submit test job given by the numbertest job runtime specified by the numberthe IDs of the submitted jobs will be appended to this filethe computing element specified by URL at the command line should be queried using this information interface. Allowed values are: org.nordugrid.ldapng, org.nordugrid.ldapglue2 and org.ogf.glue.emies.resourceinfothe file storing information about active jobs (default %s)this option is not functional (old GSI proxies are not supported anymore)timeout in seconds (default 20)treat requested object as directory and always try to list contenttruncate the joblist before synchronizingtypeunable to load number from: %sundefined plugin nameundefined virtual plugin pathunnamed groupunregisteruploaduploadsurlurl [url ...]urllist %s contains invalid URL: %suse GSI communication protocol for contacting VOMS servicesuse HTTP communication protocol for contacting VOMS services that provide RESTful access Note for RESTful access, 'list' command and multiple VOMS server are not supported use NSS credential database in default Mozilla profiles, including Firefox, Seamonkey and Thunderbird. use old communication protocol for contacting VOMS services instead of RESTful access use passive transfer (off by default if secure is on, on by default if secure is not requested)use secure transfer (insecure by default)use specified configuration fileuse the jobname instead of the short ID as the job directory nameusername to MyProxy server (if missing subject of user certificate is used)vomsvoms<:command>. Specify VOMS server (More than one VOMS server can be specified like this: --voms VOa:command1 --voms VOb:command2). :command is optional, and is used to ask for specific attributes(e.g: roles) command options are: all --- put all of this DN's attributes into AC; list ---list all of the DN's attribute, will not create AC extension; /Role=yourRole --- specify the role, if this DN has such a role, the role will be put into AC; /voname/groupname/Role=yourRole --- specify the VO, group and role; if this DN has such a role, the role will be put into AC. If this option is not specified values from configuration files are used. To avoid anything to be used specify -S with empty value. wait failed - killing childwaiting for data chunkwrite_thread: exitingwrite_thread: for_write eofwrite_thread: for_write failed - abortingwrite_thread: get and pass bufferswrite_thread: out failed - abortingwrong SSL lock requested: %i of %i: %i - %sxrootd close failed: %sxrootd open failed: %sxrootd write failed: %sy~DataPoint: destroy ftp_handle~DataPoint: destroy ftp_handle failed - retrying~DataPoint: failed to destroy ftp_handle - leakingProject-Id-Version: Arc Report-Msgid-Bugs-To: support@nordugrid.org POT-Creation-Date: 2021-12-02 15:25+0100 PO-Revision-Date: 2021-11-26 12:09+0100 Last-Translator: Oxana Smirnova Language-Team: Russian Language: ru MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit X-Generator: Poedit 2.3 Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2); X-Poedit-KeywordsList: msg:2;IString:1;istring:1;FindNTrans:1,2 X-Poedit-Basepath: /home/oxana/GITROOT/arc6 X-Poedit-SearchPath-0: src %s КÑш : %s Папка кÑша (только чтение): %s ОчиÑтка кÑша отключена ОчиÑтка кÑша включена Каталог Ñ ÐºÑшем ÑÑылок: %s Контрольный каталог: %s Корневой каталог ÑеÑÑии: %s СУПО по умолчанию : %s очередь по умолчанию : %s Ð’Ñ€ÐµÐ¼Ñ Ð¶Ð¸Ð·Ð½Ð¸ по умолчанию : %u ЗапуÑтите 'arcclean -s Undefined' Ð´Ð»Ñ ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð²Ñ‹Ñ‡Ð¸Ñ‰ÐµÐ½Ð½Ñ‹Ñ… задач из ÑпиÑка ЗапуÑтите 'arcclean -s Undefined' Ð´Ð»Ñ ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð¾Ð±Ð¾Ñ€Ð²Ð°Ð½Ð½Ñ‹Ñ… задач из ÑпиÑка Ð”Ð»Ñ Ð²Ð¾ÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð½ÐµÐ´Ð¾Ñтающих задач, запуÑтите arcsync ИÑпользуйте arcclean Ð´Ð»Ñ ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð½ÐµÑущеÑтвующих задач ИÑпользуйте arcclean Ð´Ð»Ñ ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð½Ñ‹Ñ… задач из ÑпиÑка ИÑполнÑемый: верно Name: %s Sources.DelegationID: %s Sources.Options: %s = %s ИÑточники: %s Targets.DelegationID: %s Targets.Options: %s = %s ÐазначениÑ: %s %s DN Ñертификата: %s дейÑтвителен до: %s DN Ñмитента: %s Серийный номер: %d %s: %s: %i %s: %s Служба доÑтавки: %s Служба доÑтавки: LOCAL МеÑÑ‚ Ð´Ð»Ñ Ð¾Ñ‚Ð³Ñ€ÑƒÐ·ÐºÐ¸: %u МеÑÑ‚ Ð´Ð»Ñ Ñрочной обработки: %u МеÑÑ‚ Ð´Ð»Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ñ‚ÐµÐ»ÑŒÐ½Ð¾Ð¹ обработки: %u МеÑÑ‚ Ð´Ð»Ñ Ð¿Ñ€ÐµÐ´Ð²Ð°Ñ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð¾Ð¹ обработки: %u Подготовленных меÑÑ‚: %u ÐšÐ¾Ð½Ñ„Ð¸Ð³ÑƒÑ€Ð°Ñ†Ð¸Ñ ÐºÐ²Ð¾Ñ‚: %s СоÑтоÑние точки входа (%s): %s СоÑтоÑние точки входа (%s) - STARTED или SUCCESSFUL атрибуты: базовое ОИ (DN): %s фильтр: %s непределённых: %i %s -> %s (%s) --- ХОЛОСТÐЯ ПРОГОÐКР--- Контроль доÑтупа: %s ÐннотациÑ: %s Argument: %s Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾Ð± Ñталонных теÑтах: Каталог, Ñодержащий журнальную запиÑÑŒ вычиÑлительного ÑервиÑа: %s URL точки входа Ð´Ð»Ñ Ð²Ñ‹Ñ‡Ð¸Ñлений: %s Ðазвание интерфейÑа точки входа Ð´Ð»Ñ Ð²Ñ‹Ñ‡Ð¸Ñлений: %s Ð¢Ñ€ÐµÐ±Ð¾Ð²Ð°Ð½Ð¸Ñ Ðº вычиÑлительному реÑурÑу: Служба параметров доÑтупа: %s Идентификаторы делегированиÑ: Элемент DelegationID: %s Ð’Ñ€ÐµÐ¼Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ: %s ЗапиÑÑŒ дейÑтвительна на: %s ЗапиÑÑŒ дейÑтвительна Ñ: %s Environment.name: %s Environment: %s Код выхода: %d Код выхода уÑпешного иÑполнениÑ: %d СоÑтоÑние здоровьÑ: %s ID ÑервиÑа: %s Элемент Inputfile: УÑтановленные рабочие Ñреды: Ошибка задачи: %s Задача не требует ÑкÑклюзивного иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ URL ÑƒÐ¿Ñ€Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡ÐµÐ¹: %s (%s) Задача требует ÑкÑклюзивного иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ URL ÑоÑтоÑÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸: %s (%s) Очередь приÑвоениÑ: %s ИмÑ: %s Код выхода Ð´Ð»Ñ ÑƒÑпешного иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð½Ðµ указан. ДоÑтуп к узлу: входÑщий ДоÑтуп к узлу: входÑщий и иÑходÑщий ДоÑтуп к узлу: иÑходÑщий Уведомить: Старый Ñрлык заданиÑ: %s Старый Ñрлык задачи: Ð¢Ñ€ÐµÐ±Ð¾Ð²Ð°Ð½Ð¸Ñ Ðº операционной ÑиÑтеме: Другие ÑообщениÑ: %s Другие атрибуты: [%s], %s Элемент Outputfile: Владелец: %s PostExecutable.Argument: %s PreExecutable.Argument: %s Ð’Ñ€ÐµÐ¼Ñ Ð½Ð°Ñ‡Ð°Ð»Ð° обработки: %s ДоверенноÑть дейÑтвительна до: %s Очередь: %s Удалённое журналирование (по выбору): %s (%s) Удалённое журналирование: %s (%s) Запрошенное процеÑÑорное времÑ: %s Запрошено ваканÑий: %i Результаты должны быть воÑтребованы до: %s Результаты были удалены: %s Ð¢Ñ€ÐµÐ±Ð¾Ð²Ð°Ð½Ð¸Ñ Ñреды выполнениÑ: URL информации о ÑервиÑе: %s (%s) URL каталога Грид-ÑеÑии: %s СпецифичеÑкое ÑоÑтоÑние: %s URL каталога Ð´Ð»Ñ Ð·Ð°Ð³Ñ€ÑƒÐ·ÐºÐ¸: %s URL каталога Ð´Ð»Ñ Ð¾Ñ‚Ð³Ñ€ÑƒÐ·ÐºÐ¸: %s СоÑтоÑние: %s Ð¡Ñ‚Ð°Ð½Ð´Ð°Ñ€Ñ‚Ð½Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ°: %s Стандартный вход: %s Стандартный выход: %s ЗаÑылающий клиент: %s ЗаÑлана: %s ВерÑÐ¸Ñ ÐºÐ»Ð¸ÐµÐ½Ñ‚Ð°: %s ИÑпользованное процеÑÑорное времÑ: %s ИÑпользованное процеÑÑорное времÑ: %s (%s на Ñлот) ИÑпользование ОЗУ: %d ИÑпользованное времÑ: %s ИÑпользованное времÑ: %s (%s на Ñлот) Положение в очереди: %d [ теÑтировщик JobDescription ] [ ОбрабатываетÑÑ Ð¸Ñходный текÑÑ‚ ] [ emies:adl ] [ nordugrid:xrsl ] $X509_VOMS_FILE и $X509_VOMSES не наÑтроены; Пользователь не указал раÑположение файла vomses; РаÑположение файла vomses не найдено в файле наÑтроек пользователÑ; Файл vomses не обнаружен в ~/.arc/vomses, ~/.voms/vomses, $ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/grid-security/vomses, $PWD/vomses, /etc/vomses, /etc/grid-security/vomses, а также в ÑоответÑтвующих подкаталогах%5u Ñ: %10.1f кБ %8.1f кБ/Ñ%d ÑиÑтемы ÑƒÐ¿Ñ€Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ð¿Ð°ÐºÐµÑ‚Ð½Ð¾Ð¹ обработкой%d точки входа%d СовмеÑтные реÑурÑÑ‹%d правила приÑвоениÑ%d из %d задач были перезапущены%d из %d задач были заÑланыОÑталоÑÑŒ %i попыток, Ð¿Ð¾Ð²Ñ‚Ð¾Ñ€Ð½Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ° в %s%li Ñекунд(Ñ‹) Ñ Ð¼Ð¾Ð¼ÐµÐ½Ñ‚Ð° ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° блокировки %s%s%s %s%s %s не может быть Ñоздан.%s (%s)%s > %s => неверно%s > %s => неверно: %s Ñодержит нецифровые Ñимволы в номере верÑии.%s > %s => верноКлаÑÑ %s не ÑвлÑетÑÑ Ð¾Ð±ÑŠÐµÐºÑ‚Ð¾Ð¼Ñоздан каталог %sКаталог %s уже ÑушеÑтвует! Задача пропуÑкаетÑÑ.%s не удалоÑÑŒ%s не ÑвлÑетÑÑ Ð¿Ð¾Ð´Ð´ÐµÑ€Ð¶Ð¸Ð²Ð°ÐµÐ¼Ñ‹Ð¼ типом профилÑ%s не ÑвлÑетÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð¾Ð¼%s не ÑвлÑетÑÑ Ð¾Ð±ÑŠÐµÐºÑ‚Ð¾Ð¼%s не допущен к иÑполнению дейÑÑ‚Ð²Ð¸Ñ %s на реÑурÑе %s%s не допущен к иÑполнению дейÑÑ‚Ð²Ð¸Ñ %s на реÑурÑе %s Модуль %s Ñброшен на диÑк%s ошибка разборкиÐе найден подключаемый модуль %s "%s".Ð—Ð°Ð¿Ñ€Ð¾Ñ %s не Ð²Ñ‹Ð¿Ð¾Ð»Ð½ÐµÐ½Ð—Ð°Ð¿Ñ€Ð¾Ñ %s к %s не выполнен, получен ответ: %sÐ—Ð°Ð¿Ñ€Ð¾Ñ %s к %s не выполнен, неожиданный ответ: %s.%s, верÑÐ¸Ñ %s%s->%s%s. Ðевозможно Ñкопировать набор файлов%s. ошибка базы данных SQLite: %s%s:%s: %i%s: %s%s: %s: ÐÐ¾Ð²Ð°Ñ Ð·Ð°Ð´Ð°Ñ‡Ð° принадлежит %i/%i%s: %s:%i%s: МеÑтонахождение в ACIX: %s%s: Добавление нового файла выхода %s: %s%s: Ð’Ñе процеÑÑÑ‹ %s уÑпешно завершилиÑÑŒ (%s)%s: Ð—Ð°Ð¿Ñ€Ð¾Ñ Ñ„Ð°Ð¹Ð»Ð° Ñ Ð»ÐµÐ½Ñ‚Ñ‹ %s в очереди SRM. Ожидание %i Ñекунд%s: ОчиÑтка кÑша продолжаетÑÑ Ñлишком долго - %u.%06u Ñекунд%s: Ðевозможно преобразовать контрольную Ñумму файла %s в целое Ð´Ð»Ñ %s%s: Ðевозможно преобразовать размер файла %s в целое Ð´Ð»Ñ %s%s: Ðевозможно прочеÑть ÑпиÑок входных файлов%s: ПерезапуÑк по требованию невозможен%s: ПерезапуÑк по запроÑу невозможен - неподходÑщее ÑоÑтоÑние%s: Прерывание задачи по запроÑу пользователÑ%s: Прерывание активных запроÑов%s: Прерывание оÑтальных запроÑов DTR%s: Ðевозможно запиÑать два разных файла %s и %s Ñ Ð¾Ð´Ð½Ð¸Ð¼ LFN: %s%s: Проверка отгружаемого файла пользователÑ: %s%s: ÐŸÑ€Ð¾Ð²ÐµÑ€Ð¾Ñ‡Ð½Ð°Ñ Ñумма %llu подтверждена Ð´Ð»Ñ %s%s: КритичеÑÐºÐ°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° Ð´Ð»Ñ Ð¾Ñ‚Ð³Ñ€ÑƒÐ¶Ð°ÐµÐ¼Ð¾Ð³Ð¾ файла %s%s: Сбой запроÑа DTR %s на копирование файла %s%s: копирование DTR %s в %s не удалоÑÑŒ, но не было обÑзательным%s: Удаление запроÑа в ÑвÑзи Ñ Ð²Ð½ÑƒÑ‚Ñ€ÐµÐ½Ð½Ð¸Ð¼Ð¸ неполадками%s: Файл Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ %s вероÑтно оÑталÑÑ Ð½ÐµÐ´Ð¾Ð¿Ð¸Ñанным поÑле предыдущего запуÑка A-REX, перезапиÑÑŒ%s: ПовторÑющееÑÑ Ð¸Ð¼Ñ Ñ„Ð°Ð¹Ð»Ð° в ÑпиÑке входных файлов: %s%s: Ошибка доÑтупа к файлу %s%s: Ошибка при чтении файла %s%s: Чтение выходных файлов в ÑпиÑке Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ %s%s: Ðе удалоÑÑŒ Ñоздать файл grami%s: Ðе удалоÑÑŒ извлечь информацию о локальном ÑоÑтоÑнии задачи.%s: Ðе удалоÑÑŒ получить номер из СУПО%s: Сбой разборки запроÑа задачи.%s: Ошибка при чтении .local и изменении ÑоÑтоÑниÑ, задачи и A-REX могут оказатьÑÑ Ð² противоречивом ÑоÑтоÑнии%s: Сбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¾Ð¿Ð¸ÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸: %s%s: Ðе удалоÑÑŒ прочеÑть локальную информацию%s: Ðе удалоÑÑŒ выполнить процедуру прерываниÑ%s: Ðе удалоÑÑŒ выполнить процедуру запуÑка%s: Ðе удалоÑÑŒ уÑтановить права на иÑполнение%s: Сбой запиÑи причины ÑбоÑ: %s%s: Ðе удалоÑÑŒ оборвать иÑполнÑющуюÑÑ Ð·Ð°Ð´Ð°Ñ‡Ñƒ%s: Ðе удалоÑÑŒ очиÑтить каталог ÑеÑÑии%s: Сбой вывода Ñодержимого каталога Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ %s: %s%s: Ðе удалоÑÑŒ подгрузить анализатор Ð´Ð»Ñ Ð¿Ñ€Ð°Ð²Ð¸Ð» допуÑка пользователей %s: Ðе удалоÑÑŒ открыть файл %s на чтение%s: Сбой при разборе правил допуÑка пользователÑ%s: Ðе удалоÑÑŒ прочеÑть динамичеÑкий ÑпиÑок выходных файлов в %s%s: Ðе удалоÑÑŒ прочеÑть ÑпиÑок входных файлов%s: Ðе удалоÑÑŒ прочеÑть ÑпиÑок входных файлов, невозможно очиÑтить каталог ÑеÑÑии%s: Ðе удалоÑÑŒ прочеÑть ÑпиÑок выходных файлов%s: Ðе удалоÑÑŒ прочеÑть ÑпиÑок выходных файлов, невозможно очиÑтить каталог ÑеÑÑии%s: Ðе удалоÑÑŒ прочеÑть переработанный ÑпиÑок входных файлов%s: Ðе удалоÑÑŒ прочеÑть переработанный ÑпиÑок выходных файлов%s: Сбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ в DTRGenerator%s: Ðе удалоÑÑŒ изменить идентификатор Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð½Ð° %d/%d Ð´Ð»Ñ Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° %s%s: Сбой приÑÐ²Ð¾ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ðµ ÑоÑтоÑÐ½Ð¸Ñ ÑÐ±Ð¾Ñ Ð¿Ñ€Ð¸ обрыве иÑполнениÑ.%s: Ðе удалоÑÑŒ запиÑать динамичеÑкие выходные файлы обратно в %s%s: Ðе удалоÑÑŒ запиÑать ÑпиÑок входных файлов%s: Ðе удалоÑÑŒ запиÑать ÑпиÑок выходных файлов%s: Ðе удалоÑÑŒ вывеÑти ÑпиÑок ÑоÑтоÑний выходных файлов%s: Ðе удалоÑÑŒ запиÑать изменившийÑÑ Ð²Ñ…Ð¾Ð´Ð½Ð¾Ð¹ файл.%s: Ðе удалоÑÑŒ запиÑать ÑпиÑок выходных файлов: %s%s: Ðе удалоÑÑŒ запиÑать локальную информацию%s: Ðе удалоÑÑŒ запиÑать локальную информацию: %s%s: Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ…Ñ€Ð°Ð½Ð¸Ð»Ð¸Ñ‰Ð° данных Ð´Ð»Ñ Ð´Ð¾Ñ‡ÐµÑ€Ð½ÐµÐ³Ð¾ процеÑÑа%s: Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¾Ð±Ð»Ð°Ñти памÑти Ð´Ð»Ñ Ð´Ð¾Ñ‡ÐµÑ€Ð½ÐµÐ³Ð¾ процеÑÑа%s: Сбой при запуÑке дочернего процеÑÑа%s: Сбой Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ Ð´Ð¾Ñ‡ÐµÑ€Ð½ÐµÐ³Ð¾ процеÑÑа%s: У файла %s Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма: %llu. ОжидалаÑÑŒ %lli%s: Ð—Ð°Ð¿Ñ€Ð¾Ñ Ñ„Ð°Ð¹Ð»Ð° %s в очереди SRM. Ожидание %i Ñекунд%s: ОбрабатываютÑÑ Ñ„Ð°Ð¹Ð»Ñ‹ в ÑпиÑке %s%s: ÐедейÑтвительный Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR%s: Ðеверный файл: %s Ñлишком велик.%s: ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ размере/контрольной Ñумме (%s) Ð´Ð»Ñ %s%s: Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð¾ прерывании задачи от генератора DTR к планировщику%s: Прерывание задачи проиÑходит Ñлишком медленно, но диагноÑтика уже доÑтупна. Будем Ñчитать, что прерывание произошло.%s: Прерывание задачи проиÑходит Ñлишком долго. Сбой.%s: Сбой иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ в неизвеÑтном ÑоÑтоÑнии. ПерезапуÑка не будет.%s: Обнаружен Ñбой задачи%s: Задача завершена%s:Задача уже завершилаÑÑŒ. ДейÑÑ‚Ð²Ð¸Ñ Ð¿Ð¾ прерыванию не применÑÑŽÑ‚ÑÑ%s: Задача уÑтарела - удалÑетÑÑ Ð¾ÑтавшаÑÑÑ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ%s: Задачу Ð½ÐµÐ»ÑŒÐ·Ñ Ð±Ð¾Ð»ÑŒÑˆÐµ перезапуÑкать%s: ПоÑтупил Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° удаление задачи - удалÑетÑÑ%s: Задача Ñлишком ÑÑ‚Ð°Ñ€Ð°Ñ - удалÑетÑÑ%s: Счётчик ÑÐ»ÐµÐ¶ÐµÐ½Ð¸Ñ Ð·Ð° задачей Ñбит%s: Слежение за задачей прервано в ÑвÑзи Ñ ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸ÐµÐ¼ из очереди%s: Слежение за задачей непреднамеренно прервано%s: Запрошено прекращение ÑÐ»ÐµÐ¶ÐµÐ½Ð¸Ñ Ð·Ð° задачей Ñ %u активными ÑÑылками%s: Запрошено прекращение ÑÐ»ÐµÐ¶ÐµÐ½Ð¸Ñ Ð·Ð° задачей Ñ %u активными ÑÑылками и аÑÑоциированной очередью %s%s: Слежение за задачей уÑпешно прекращено%s: Ðе удалоÑÑŒ направить задачу в СУПО%s: ЗаÑылка задачи в СУПО проиÑходит Ñлишком медленно, но идентификатор уже доÑтупен. Будем Ñчитать, что заÑылка произведена.%s: ЗаÑылка задачи в СУПО проиÑходит Ñлишком долго. Сбой.%s: ÐÑÑиÑтент задачи прерван%s: доÑтигнут предел Ñкрипта СУПО %u - приоÑтанавливаетÑÑ Ð·Ð°Ð¿ÑƒÑк/ÑнÑтие%s: К раÑположению %s нет удалённого доÑтупа, пропуÑкаетÑÑ%s: Подключаемый модуль в ÑоÑтоÑнии %s : %s%s: Сбой при иÑполнении подключаемого модулÑ%s: Ðе удалоÑÑŒ обработать опиÑание задачи%s: PushSorted не Ñмог обнаружить задачу в ожидаемом меÑте%s: Повторный Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ð± обÑлуживании к генератору DTR%s: Чтение выходных файлов в ÑпиÑке Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ %s%s: Ðе удалоÑÑŒ прочеÑть ÑоÑтоÑние новой задачи%s: Получен Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR %s на копирование файла %s в ÑоÑтоÑнии %s%s: Полученный DTR принадлежит неактивной задаче%s: Получен Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR Ñ Ð´Ð²ÑƒÐ¼Ñ ÑƒÐ´Ð°Ð»Ñ‘Ð½Ð½Ñ‹Ð¼Ð¸ адреÑами!%s: Получен Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° размещение файлов (%s)%s: Получена задача в DTRGenerator%s: Задача получена в плохом ÑоÑтоÑнии: %s%s: УдалÑетÑÑ %s из динамичеÑкого ÑпиÑка выходных файлов %s%s: Сбой повторной обработки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸%s: Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ð± обÑлуживании к генератору DTR%s: Возврат прерванной задачи из генератора DTR%s: Обработка каталога ÑеÑÑий продолжаетÑÑ Ñлишком долго - %u.%06u Ñекунд%s: Ðекоторые процеÑÑÑ‹ %s дали Ñбой%s: СоÑтоÑние: %s поÑле %s%s: СоÑтоÑние: %s: размещение данных завершено%s: СоÑтоÑние: %s: вÑÑ‘ ещё в процеÑÑе переноÑа данных%s: СоÑтоÑние: ACCEPTED%s: СоÑтоÑние: ACCEPTED: dryrun%s: СоÑтоÑние: ACCEPTED: Ð²Ñ€ÐµÐ¼Ñ Ð½Ð° иÑполнение %s%s: ÑоÑтоÑние ACCEPTED: переход в PREPARING%s: СоÑтоÑние: ACCEPTED: обрабатываетÑÑ Ð¾Ð¿Ð¸Ñание задачи%s: СоÑтоÑние: CANCELING%s: СоÑтоÑние: FINISHING%s: СоÑтоÑние: INLRMS%s: СоÑтоÑние: INLRMS - проверка отÑутÑÑ‚Ð²Ð¸Ñ Ð¿Ñ€Ð¸Ð¾Ñтановки%s: СоÑтоÑние: INLRMS - проверка приоÑтановки(%u) и метка%s: СоÑтоÑние: INLRMS - метки не найдены%s: ÑоÑтоÑние INLRMS: Ñообщение на выходе %i %s%s: СоÑтоÑние: PREPARING%s: СоÑтоÑние: SUBMIT%s: Попытка удалить задание из неÑущеÑтвующего процеÑÑа Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ…%s: Попытка удалить задание из активного процеÑÑа Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ…%s: Два одинаковых Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð²Ñ‹Ð´Ð°Ñ‡Ð¸: %s%s: ÐеизвеÑтное правило допуÑка Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ '%s'%s: ИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¾Ñ‚Ð³Ñ€ÑƒÐ¶Ð°ÐµÐ¼Ñ‹Ñ… файлов%s: Пользователь ÐЕ отгрузил файл %s%s: Пользователь отгрузил файл %s%s: ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма %s%s: удаление файла %s: Ñбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð¿ÑƒÑ‚Ð¸ к файлу: %s%s: удаление файла %s: Ñбой Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð°/каталога: %s%s: задача назначена Ð´Ð»Ñ Ð¼ÐµÐ´Ð»ÐµÐ½Ð½Ð¾Ð³Ð¾ опроÑа%s: задача обрабатываетÑÑ%s: задача Ð´Ð»Ñ Ð¾Ð±ÑлуживаниÑ%s: задача обнаружена при Ñканировании%s: задача будет ожидать внешнего процеÑÑа%s: Ð½Ð¾Ð²Ð°Ñ Ð·Ð°Ð´Ð°Ñ‡Ð° принÑта%s: ÑÑ‚Ð°Ñ€Ð°Ñ Ð·Ð°Ð´Ð°Ñ‡Ð° принÑта%s: запиÑÑŒ файла %s: %s%s: запиÑÑŒ файла %s: Ñбой при Ñоздании файла: %s%s: запиÑÑŒ файла %s: отÑутÑтвуют полезные файлы%s: запиÑÑŒ файла %s: неопознанные полезные файлы%s: запиÑÑŒ журнала %s: отÑутÑтвуют полезные файлы%s: запиÑÑŒ журнала %s: неопознанные полезные файлы%s: тип копии %s%s: перезапущена задача из FINISHING%s: перезапущена задача из INLRMS%s: перезапущена задача из PREPARING%s: размер %llu%s: ÑоÑтоÑние CANCELING: дочерний процеÑÑ Ð·Ð°Ð²ÐµÑ€ÑˆÐ¸Ð»ÑÑ Ñ ÐºÐ¾Ð´Ð¾Ð¼ выхода %i%s: ÑоÑтоÑние CANCELING: ÑобираетÑÑ Ð´Ð¸Ð°Ð³Ð½Ð¾Ñтика задачи%s: ÑоÑтоÑние CANCELING: запуÑк дочернего процеÑÑа: %s%s: ÑоÑтоÑние CANCELING: Ñрок Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð¸Ñтёк%s: ÑоÑтоÑние: SUBMIT: дочерний процеÑÑ Ð·Ð°Ð²ÐµÑ€ÑˆÐ¸Ð»ÑÑ Ñ ÐºÐ¾Ð´Ð¾Ð¼ выхода: %i%s: ÑоÑтоÑние SUBMIT: запуÑк дочернего процеÑÑа: %s%s: задача отÑутÑтвует: %s%s: непредуÑмотренный Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ð½ÐµÑƒÑпешной задачи: %s%s: непредуÑмотренный Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸: %sожидаетÑÑ ')'ожидаетÑÑ ')'ИÑпользование атрибута 'action' в пользовательÑком опиÑании задачи не допуÑкаетÑÑÐžÐ¿Ñ†Ð¸Ñ Ð½Ð°Ñтроек 'control' теперь называетÑÑ 'controldir'; пожалуйÑта, иÑпользуйте новое названиеÐеобходимо задать значение атрибута 'stdout', еÑли задано значение атрибута 'join'Следующее назначениеСледующий иÑточник(пуÑто)(нулевой)--same и --not-same не могут быть заданы одновременно.: %d: %s: Ðе указано ÑредÑтво ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¾Ñ‚Ñ‡Ñ‘Ñ‚Ð¾Ð² об учётных запиÑÑÑ…: Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ðº базе данных учёта задач: Сбой подготовки дочернего процеÑÑа ÑредÑтва ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¾Ñ‚Ñ‡Ñ‘Ñ‚Ð¾Ð²: Сбой запуÑка дочернего процеÑÑа ÑредÑтва ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¾Ñ‚Ñ‡Ñ‘Ñ‚Ð¾Ð²: СредÑтво Ð¸Ð·Ð¼ÐµÑ€ÐµÐ½Ð¸Ñ Ñ…Ð°Ñ€Ð°ÐºÑ‚ÐµÑ€Ð¸Ñтик выдало ошибку %i: %s: запиÑÑŒ учётной запиÑи занÑла %llu mÑ< %s<< %s> %sЗапрошен вычиÑлительный реÑурÑ, иÑпользующий Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ GridFTP, но необходимый %sподключаемый модуль не был подгружен. УÑтанавливали ли Ð’Ñ‹ Ñтот модуль? %sЕÑли нет, пожалуйÑта, уÑтановите пакет 'nordugrid-arc-plugins-globus'. %sÐазвание пакета может завиÑеть от типа вашего диÑтрибутива.A-REX REST: Сбой Ð²Ð¾Ð·Ð¾Ð±Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸A-REX REST: ÐедопуÑтимое изменение ÑоÑтоÑниÑ: Ñ %s на %sÐ˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ раÑширении AC Ð´Ð»Ñ VO Сертификат атрибута недейÑтвителен: ACIX ответил %sÐ—Ð°Ð¿Ñ€Ð¾Ñ Ð°Ð²Ñ‚Ð¾Ñ€Ð¸Ð·Ð°Ñ†Ð¸Ð¸ ARC: %sПолитика Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ ARC: %sВыбор точки входа Ð´Ð»Ñ Ð·Ð°Ñылки задач ARC6Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° прерывание по причине ошибки в функции Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð¸Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° прерывание по причине ошибки передачиИÑполнение прервано!Сбой принÑтиÑСбой принÑтиÑ: %sПринÑто Ñоединение Ñ %u.%u.%u.%u:%uПринÑто Ñоединение Ñ [%s]:%uПринÑто Ñоединение к %u.%u.%u.%u:%uПринÑто Ñоединение к [%s]:%uÐ—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° заÑылку новой задачи или изменение Ñтарой принÑÑ‚: %sМеÑтонахождение ÑпиÑка доÑтупа: %sБаза данных учёта задач не может быть Ñоздана. Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ€Ð¾Ð´Ð¸Ñ‚ÐµÐ»ÑŒÑкого каталога %s.База данных учёта задач не может быть Ñоздана: %s не ÑвлÑетÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð¾Ð¼Ð£Ñтановлено Ñоединение Ñ ÑƒÑ‡Ñ‘Ñ‚Ð½Ð¾Ð¹ базой данныхФайл базы данных учёта задач (%s) не ÑвлÑетÑÑ Ñтандартным файломУÑпешно инициализирована база данных учёта задачПолучен маркер доÑтупа Ð´Ð»Ñ %s: %sОшибка активацииДобавление раÑположениÑ: metadata: %sДобавление раÑположениÑ: url: %sДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ FQAN: %sДобавлÑетÑÑ FQAN/первичное значение: %sДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ группы VOMS: %sДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ первичной группы VOMS: %sДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ первичной роли VOMS: %sДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ роли VOMS: %sДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ виртуальной организации: %sДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ action-id: %sДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ цепочки Ñертификатов: %sДобавлÑетÑÑ Ñ‚Ð¾Ñ‡ÐºÐ° доÑтупа '%s' Ñ Ð½Ð°Ð·Ð²Ð°Ð½Ð¸ÐµÐ¼ интерфейÑа %sДобавление точки входа (%s) в ServiceEndpointRetrieverДобавление точки входа (%s) в TargetInformationRetrieverТочка входа (%s) добавлÑетÑÑ ÐºÐ°Ðº к ServiceEndpointRetriever, так и к TargetInformationRetrieverДобавлÑетÑÑ Ð°Ð´Ñ€ÐµÑ: %s - %sДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ profile-id: %sДобавлÑетÑÑ Ð¼Ð°Ñ€ÐºÑ‘Ñ€ запроÑа %sДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ идентификатора реÑурÑа: %sДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ владельца реÑурÑа: %sДобавлÑетÑÑ Ð¼Ð°Ñ€ÐºÑ‘Ñ€ проÑтранÑтва памÑти %sДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ subject-id: %sДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ subject-issuer: %sДобавление к маÑÑовому запроÑуДобавлÑетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ virtual-organization: %sÐдреÑ: %sКвота на процеÑÑÑ‹ (%u) иÑпользованаВÑе запроÑÑ‹ DTR Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s завершеныСбой вÑех запроÑовВÑе Ñ‚Ñ€ÐµÐ±Ð¾Ð²Ð°Ð½Ð¸Ñ ÑƒÐ´Ð¾Ð²Ð»ÐµÑ‚Ð²Ð¾Ñ€ÐµÐ½Ñ‹.Ð’Ñе полученные результаты неверныВыделено %u буферов по %llu байт каждый.Разрешить указанному клиенту получать учётные данные без паролÑ. Эта Ð¾Ð¿Ñ†Ð¸Ñ Ð¸ÑпользуетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ командой PUT в отношении Ñервера Myproxy.Каталог %s уже ÑущеÑÑ‚Ð²ÑƒÐµÑ‚Ð§Ñ‚ÐµÐ½Ð¸Ñ Ð¸Ð· иÑточника уже в процеÑÑеЗапиÑÑŒ в цель уже в процеÑÑеВозникла ошибка при ÑоÑтавлении опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ Ð´Ð»Ñ Ð·Ð°Ñылки на %sДругой процеÑÑ (%s) обладает блоком файла %sПолитика ARC не может быть задана в профиле SAML2.0 XACMLArcAuthZ: не удалоÑÑŒ инициализировать вÑе PDP - Ñтот процеÑÑ Ð±ÑƒÐ´ÐµÑ‚ нерабочимÐрхивирование запроÑа DTR %s, ÑоÑтоÑние %sÐрхивирование запроÑа DTR %s, ÑоÑтоÑние ERRORÐ’Ñ‹ уверены, что хотите вычиÑтить задачи Ñ Ð¾Ñ‚ÑутÑтвующей информацией?Ð’Ñ‹ уверены, что хотите Ñинхронизировать ÑпиÑок локальных задач?Формирование запиÑи журнала программы разбора BLAH: %sПрипиÑан к группе допуÑка %sПрипиÑан к ÑпиÑку пользователей %sПредполагаетÑÑ, что файл не найденПредполагаем, что переÑылка уже отменена, либо оборвалаÑÑŒ.Ð”Ð»Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð° 'inputfiles' необходимы как минимум два значениÑÐ”Ð»Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð° 'outputfiles' необходимы как минимум два значениÑПопытка интерпретации отноÑительного путь как URL - заменÑетÑÑ Ð½Ð° абÑолютныйПопытка ÑоединитьÑÑ Ñ %s по порту %iÐтрибут '%s' задан неÑколько разÐтрибут 'join' не может иÑпользоватьÑÑ, еÑли заданы оба атрибута 'stdout' и 'stderr'Значение атрибута (1): %sЗначение атрибута (2): %sЗначение атрибута в Ñубъекте: %sÐ˜Ð¼Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð° (%s) Ñодержит неверный Ñимвол (%s)ОжидаетÑÑ Ð¸Ð¼Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð°Ðтрибуты 'gridtime' и 'cputime' не могут быть заданы одновременноÐтрибуты 'gridtime' и 'walltime' не могут быть заданы одновременноСбой проверки подлинноÑти при иÑполнении инÑтрукцийÐÐ´Ñ€ÐµÑ URL запроÑа Ð¿Ð¾Ð´Ñ‚Ð²ÐµÑ€Ð¶Ð´ÐµÐ½Ð¸Ñ Ð¿Ð¾Ð´Ð»Ð¸Ð½Ð½Ð¾Ñти: %sСбой при проверке подлинноÑтиДопущен через arc.pdpДопущен удалённой Ñлужбой PDPДопущен через simplelist.pdp: %sДопущен через xacml.pdpСбой метода BN_new или RSA_newСбой метода BN_set_wordÐедопуÑтимый URL в acix_endpointÐедопуÑтимый URL в deliveryservice: %sÐÐµÐ¿Ñ€Ð¸ÐµÐ¼Ð»ÐµÐ¼Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð´Ð»Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ¸ подлинноÑти: %sÐеверный формат контрольной Ñуммы %sÐедопуÑтимое значение параметра доÑтупа %s в правилах доÑтупа к кÑшуÐеверное Ð¸Ð¼Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð°: %sОбнаружен недопуÑтимый формат в файле %s, Ñтроке %sÐеверный формат отклика XML Ñлужбы доÑтавки на %s: %sÐедопуÑтимый формат отзыва XML от ÑервиÑа в %s: %sÐеверный формат отклика XML: %sÐŸÐ»Ð¾Ñ…Ð°Ñ Ð¼ÐµÑ‚ÐºÐ°: "%s"ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð»Ð¾Ð³Ð¸ÐºÐ°ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð»Ð¾Ð³Ð¸ÐºÐ° в %s - bringOnline завершилÑÑ ÑƒÑпешно, но Ð·Ð°Ð¿Ñ€Ð¾Ñ SRM не завершилÑÑ ÑƒÑпехом, либо ещё в процеÑÑеÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð»Ð¾Ð³Ð¸ÐºÐ° в %s - getTURLs завершилÑÑ ÑƒÑпешно, но Ð·Ð°Ð¿Ñ€Ð¾Ñ SRM не завершилÑÑ ÑƒÑпехом, либо ещё в процеÑÑеÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð»Ð¾Ð³Ð¸ÐºÐ° в %s - putTURLs завершилÑÑ ÑƒÑпешно, но Ð·Ð°Ð¿Ñ€Ð¾Ñ SRM не завершилÑÑ ÑƒÑпехом, либо ещё в процеÑÑеУказан неподходÑщий каталог Ð´Ð»Ñ Ð¼Ð¾Ð½Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸ÑÐедопуÑтимое Ð¸Ð¼Ñ Ð´Ð»Ñ Ð¸ÑполнÑемого файла: %sÐедопуÑтимое название Ñреды выполнениÑ: %sÐедопуÑтимое Ð¸Ð¼Ñ Ð´Ð»Ñ stderr: %sÐедопуÑтимое Ð¸Ð¼Ñ Ð´Ð»Ñ stdout: %sÐедопуÑтимое значение definedshare %sÐедопуÑтимое значение maxdeliveryÐедопуÑтимое значение maxemergencyÐедопуÑтимое значение maxpreparedÐедопуÑтимое значение maxprocessorнедопуÑтимое чиÑло в maxtransfertriesÐедопуÑтимый приоритет: %sÐедопуÑтимое значение remotesizelimitÐедопуÑтимое значение speedcontrolОбнаружен неверный или уÑтаревший формат в файле %s, Ñтроке %sÐеверный путь к %s: Rucio поддерживает запиÑÑŒ/чтение в /objectstores и лишь чтение в /replicasÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¸Ð½ÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ Ð² Ñтроке наÑтроек: %sÐедопуÑтимое значение Ð´Ð»Ñ loglevelÐеверно Ñформированный pid %s в файле блокировки %sÐ˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ СУПО:Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ СУПО:ПриÑутÑтвует маркер доÑтупа. Предпочтителен Ð´Ð»Ñ Ð·Ð°Ñылки задач.ÐаÑтройки поведениÑБлок %s не обнаружен в файле наÑтроек %sÐе указан BlockNameУвеличение приоритета %i до %i в ÑвÑзи Ñ Ð²Ñ…Ð¾Ð´Ñщим DTR более выÑокого приоритетаОба URL должны Ñодержать одинаковый протокол, Ð°Ð´Ñ€ÐµÑ Ñервера и портОба Ñлемента CACertificatePath and CACertificatesDir отÑутÑтвуют или пуÑÑ‚Ñ‹Ð—Ð°Ð¿Ñ€Ð¾Ñ %s на размещение на диÑке уÑпешно выполнен, файл теперь в ÑоÑтоÑнии ONLINEÐ—Ð°Ð¿Ñ€Ð¾Ñ %s на размещение на диÑке вÑÑ‘ ещё в очереди, Ñледует подождатьÐедопуÑÑ‚Ð¸Ð¼Ð°Ñ ÑтрокаПодгружен планировщик %sПодключаемый модуль брокера "%s" не обнаружен.Планировка и выборкаСледующие планировщики доÑтупны Ð´Ð»Ñ %s:Ðевозможно Ñоздать буфер!Сбой региÑтрации буфераВ процеÑÑе Ð¾Ñ‚ÐºÐ»ÑŽÑ‡ÐµÐ½Ð¸Ñ Ð¼ÐµÐ½ÐµÐ´Ð¶ÐµÑ€Ð° модулей обнаружены занÑтые подключаемые модули. ОжидаетÑÑ Ð¸Ñ… завершение.Сертификат и закрытый ключ агентÑтва не ÑÐ¾Ð²Ð¿Ð°Ð´Ð°ÑŽÑ‚Ð˜Ð¼Ñ Ñертификационного агентÑтва: %sУÑтановленные Ñертификаты CA:СОДЕРЖИМОЕ %u: %sÐ¢Ð°ÐºÑ‚Ð¾Ð²Ð°Ñ Ñ‡Ð°Ñтота процеÑÑора: %iМодель процеÑÑора: %sПроизводитель процеÑÑора: %sВерÑÐ¸Ñ Ð¿Ñ€Ð¾Ñ†ÐµÑÑора: %sÐе удалоÑÑŒ Ñоздать Ð·Ð°Ð¿Ñ€Ð¾Ñ CREAM: %sКÑш %s: Свободное проÑтранÑтво %f GBДоÑтуп к кÑшу разрешён Ð´Ð»Ñ %s пользователю Ñ DN %sДоÑтуп к кÑшу разрешён Ð´Ð»Ñ %s Ð´Ð»Ñ Ð’Ðž %sДоÑтуп к кÑшу разрешён Ð´Ð»Ñ %s Ð´Ð»Ñ Ð’Ðž %s и группы %sДоÑтуп к кÑшу разрешён Ð´Ð»Ñ %s Ð´Ð»Ñ Ð’Ðž %s и роли %sСвободное проÑтранÑтво кÑша: %i GBОбщий объём проÑтранÑтва кÑша: %i GBСбой в работе Ñкрипта очиÑтки кÑшаДата ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÐºÑша: %sКÑшированный файл %s не ÑущеÑтвуетÐе обнаружен кÑшированый файл %sКÑшированный файл %s был удалён во Ð²Ñ€ÐµÐ¼Ñ ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÑÑылки или копии, Ð½Ð¾Ð²Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ°ÐšÑшированный файл %s был заблокирован во Ð²Ñ€ÐµÐ¼Ñ ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÑÑылки или копии, Ð½Ð¾Ð²Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ°ÐšÑшированный файл %s был изменён в поÑледний момент, приоÑтановка процеÑÑа на 1 Ñекунду Ð´Ð»Ñ Ð¿Ñ€ÐµÐ´Ð¾Ñ‚Ð²Ñ€Ð°Ñ‰ÐµÐ½Ð¸Ñ Ð³Ð¾Ð½ÐºÐ¸ÐšÑшированный файл %s был изменён во Ð²Ñ€ÐµÐ¼Ñ ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÑÑылки или копии, Ð½Ð¾Ð²Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ°Ð¤Ð°Ð¹Ð» кÑша: %sМета-файл кÑша %s пуÑÑ‚, будет воÑÑозданМета-файл кÑша %s, возможно, повреждён, будет воÑÑозданÐе обнаружен кÑш файла %sÐšÐ¾Ð¿Ð¸Ñ Ð² кÑше ещё дейÑтвительнаФайл в кÑше заблокирован - попытаемÑÑ Ð·Ð°Ð½Ð¾Ð²Ð¾Ð¤Ð°Ð¹Ð» в кÑше уÑтарел, будет загружен зановоВычиÑÐ»ÐµÐ½Ð½Ð°Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€Ð¾Ñ‡Ð½Ð°Ñ Ñумма %s Ñовпадает Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€Ð¾Ñ‡Ð½Ð¾Ð¹ Ñуммой ÑервераВычиÑÐ»ÐµÐ½Ð½Ð°Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма передачи %s Ñовпадает Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð¾Ð¹ Ñуммой иÑточникаВычиÑленнаÑ/ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ð°Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма %s Ñовпадает Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð¾Ð¹ Ñуммой, заÑвленной точкой Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ SRM %sСбой обратного вызоваВызов PrepareReading когда Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð±Ñ‹Ð» уже подготовлен!Вызов PrepareWriting когда Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð±Ñ‹Ð» уже подготовлен!Вызов ACIX Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñом %sВызываетÑÑ http://localhost:60000/Echo иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ ClientSOAPВызываетÑÑ http://localhost:60000/Echo иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ httplibВызываетÑÑ https://localhost:60000/Echo иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ ClientSOAPВызываетÑÑ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ñ‹Ð¹ модуль %s Ð´Ð»Ñ Ð¾Ð¿Ñ€Ð¾Ñа точки входа на %sÐе удалоÑÑŒ открыть каталог Ñертификатов CA: %s. Сертификаты .Ðевозможно открыть каталог или файл VOMS: %s.Ðевозможно открыть каталог или файл VOMSES: %s.Ðет доÑтупа к файлу Ñертификата: %sÐет доÑтупа к файлу личного ключа: %sÐет доÑтупа к файлу доверенноÑти: %sÐевозможно добавить раÑширенное X509 раÑширение KeyUsage к новой доверенноÑтиÐевозможно добавить раÑширение X509 к доверенноÑтиÐе удалоÑÑŒ зарезервировать памÑтьÐевозможно зарезервировать памÑть Ð´Ð»Ñ Ñ€Ð°ÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾ÑтиÐе удалоÑÑŒ вычиÑлить профиль открытого ключаÐевозможно преобразовать раÑширение PROXY_CERT_INFO_EXTENSION в кодировке DER во внутренний форматÐевозможно преобразовать Ñтруктуру PROXY_CERT_INFO_EXTENSION из внутреннего формата в DERÐевозможно преобразовать Ñтруктуру keyUsage из формата кодировки DERÐевозможно преобразовать Ñтруктуру keyUsage из внутреннего формата в DERÐевозможно преобразовать закрытый ключ в формат DERÐевозможно преобразовать подпиÑанный Ñертификат EEC в формат DERÐевозможно преобразовать подпиÑанную доверенноÑть в формат DERÐевозможно преобразовать подпиÑанную доверенноÑть в формат PEMÐевозможно преобразовать Ñтроку в ASN1_OBJECTÐевозможно Ñкопировать раÑширенное раÑширение KeyUsageÐевозможно Ñкопировать Ð¸Ð¼Ñ Ñубъекта выдающего агентÑтва в доверенноÑтьÐевозможно Ñоздать ASN1_OCTET_STRINGÐевозможно Ñоздать BIO Ð´Ð»Ñ Ñ€Ð°Ð·Ð±Ð¾Ñ€Ð° запроÑаÐевозможно Ñоздать BIO Ð´Ð»Ñ Ð·Ð°Ð¿Ñ€Ð¾ÑаÐевозможно Ñоздать неформатированный ввод/вывод BIO Ð´Ð»Ñ Ð¿Ð¾Ð´Ð¿Ð¸Ñанного Ñертификата EECÐевозможно Ñоздать неформатированный ввод/вывод BIO Ð´Ð»Ñ Ð¿Ð¾Ð´Ð¿Ð¸Ñанной доверенноÑтиÐевозможно Ñоздать раÑширение PROXY_CERT_INFO_EXTENSIONÐе удалоÑÑŒ Ñоздать объект PolicyStoreÐе удалоÑÑŒ Ñоздать XACML ActionÐе удалоÑÑŒ Ñоздать атрибут XACML ActionAttribute: %sÐе удалоÑÑŒ Ñоздать XACML ResourceÐе удалоÑÑŒ Ñоздать атрибут XACML ResourceAttribute: %sÐе удалоÑÑŒ Ñоздать атрибут XACML SubjectAttribute: %sÐе удалоÑÑŒ Ñоздать Ð·Ð°Ð¿Ñ€Ð¾Ñ XACMLÐевозможно Ñоздать новую переменную X509_NAME_ENTRY Ð´Ð»Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа доверенноÑтиÐевозможно Ñоздать делегируемый документ Ð´Ð»Ñ Ñлужбы делегированию: %sÐевозможно Ñоздать раÑширение Ð´Ð»Ñ PROXY_CERT_INFOÐевозможно Ñоздать раÑширение Ð´Ð»Ñ keyUsageÐевозможно Ñоздать раÑширение Ð´Ð»Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾ÑтиÐевозможно Ñоздать функцию %sÐевозможно Ñоздать функцию: FunctionId не ÑущеÑтвуетÐевозможно Ñоздать компонент Ð½Ð°Ð·Ð²Ð°Ð½Ð¸Ñ CN Ð´Ð»Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾ÑтиÐе удалоÑÑŒ Ñоздать объект SSL ContextÐе удалоÑÑŒ Ñоздать объект SSLÐевозможно определить меÑто уÑтановки. ИÑпользуетÑÑ %s. ЕÑли Ñто не ÑоответÑтвует дейÑтвительноÑти, задайте, пожалуйÑта, переменную ARC_LOCATION.Ðевозможно Ñкопировать Ñерийный номер Ð´Ð»Ñ Ñертификата доверенноÑтиÐевозможно дублировать Ð¸Ð¼Ñ Ñубъекта Ð´Ð»Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа ÑамозаверÑющей доверенноÑтиÐе удалоÑÑŒ динамичеÑки Ñоздать AlgFacrotyÐе удалоÑÑŒ динамичеÑки Ñоздать AttributeFactoryÐе удалоÑÑŒ динамичеÑки Ñоздать анализаторÐе удалоÑÑŒ динамичеÑки Ñоздать FnFactoryÐе удалоÑÑŒ динамичеÑки Ñоздать PolicyÐе удалоÑÑŒ динамичеÑки Ñоздать RequestÐевозможно найти Ñлемент Ñ Ð½ÑƒÐ¶Ð½Ñ‹Ð¼ проÑтранÑтвом имёнÐевозможно найти Ñлемент Ñ Ð½ÑƒÐ¶Ð½Ñ‹Ð¼ проÑтранÑтвом имёнÐе обнаружен ArcPDPContextКаталог Ñертификатов СРне обнаружен ни в одном из Ñтандартных меÑÑ‚: ~/.arc/certificates, ~/.globus/certificates, %s/etc/certificates, %s/etc/grid-security/certificates, %s/share/certificates, /etc/grid-security/certificates. Сертификат не будет подтверждён. ЕÑли каталог Ñертификатов СРÑущеÑтвует, пожалуйÑта, укажите вручную его раÑÐ¿Ð¾Ð»Ð¾Ð¶ÐµÐ½Ð¸Ñ Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ переменной X509_CERT_DIR, или задайте cacertificatesdirectory в файле наÑтроек клиента client.conf Ðевозможно найти XACMLPDPContextÐе найден файл Ñертификата: %sÐе удалоÑÑŒ найти открытый ключ по имени: %sÐевозможно найти Ñертификат агентÑтва, выдавшего Ñертификат Ñ Ñубъектом %s и отпечатком: %luÐе удалоÑÑŒ обнаружить файл личного ключа: %sÐе удалоÑÑŒ найти закрытый ключ по имени: %sÐšÐ¾Ð½Ñ„Ð¸Ð³ÑƒÑ€Ð°Ñ†Ð¸Ñ Ñерверов VOMS не обнаружена ни в одном из Ñтандартных раÑположений: ~/.arc/vomses, ~/.voms/vomses, $ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/grid-security/vomses, $PWD/vomses, /etc/vomses, /etc/grid-security/vomsesÐе удалоÑÑŒ Ñоздать Ð·Ð°Ð¿Ñ€Ð¾Ñ X509Ðевозможно Ñоздать объект правил доÑтупаÐевозможно извлечь SAMLAssertion SecAttr из контекÑта ÑообщениÑÐевозможно получить раÑширенное раÑширение KeyUsage из Ñертификата агентÑтваÐевозможно извлечь политику из раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ PROXY_CERT_INFO_EXTENSIONÐевозможно извлечь Ñзык политик из раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ PROXY_CERT_INFO_EXTENSIONÐе удалоÑÑŒ определить тип ÑертификатаÐе удалоÑÑŒ получить делегированные параметры доÑтупа: %s от Ñлужбы делегированиÑ:%sÐевозможно извлечь закрытый ключ выдающего агентÑтваÐевозможно подгрузить объект интерпретатора ARC : %sÐевозможно подгрузить объект запроÑа ARC: %sÐевозможно подгрузить объект политикÐевозможно подгрузить объект политик: %sÐевозможно подгрузить объект запроÑаÐевозможно открыть файл Ñ Ð¾Ð¿Ð¸Ñанием задачи: %sÐе удалоÑÑŒ открыть файл личного ключа %sÐе удалоÑÑŒ определить Ð¸Ð¼Ñ ÐºÐ»Ð°ÑÑа Ð´Ð»Ñ AttributeFactory из наÑтроекÐе удалоÑÑŒ определить Ð¸Ð¼Ñ ÐºÐ»Ð°ÑÑа Ð´Ð»Ñ CombiningAlgorithmFactory из наÑтроекÐе удалоÑÑŒ определить Ð¸Ð¼Ñ ÐºÐ»Ð°ÑÑа Ð´Ð»Ñ FunctionFactory из наÑтроекÐе удалоÑÑŒ определить Ð¸Ð¼Ñ ÐºÐ»Ð°ÑÑа Ð´Ð»Ñ Policy из наÑтроекÐе удалоÑÑŒ определить Ð¸Ð¼Ñ ÐºÐ»Ð°ÑÑа Ð´Ð»Ñ Request из наÑтроекÐевозможно определить дату: %sÐевозможно определить меÑÑц: %sÐевозможно определить чаÑовой поÑÑ: %sÐевозможно определить времÑ: %sÐе удалоÑÑŒ прочеÑть закрытый ключ PEMСбой при чтении файла личного ключа PEM: не удалоÑÑŒ раÑшифроватьСбой при чтении файла личного ключа PEM: не был введён парольÐевозможно прочеÑть закрытый ключ PEM: возможно, введён неверный парольÐе удалоÑÑŒ прочитать файл Ñертификата: %sÐе удалоÑÑŒ прочитать ÑертификатÐевозможно прочеÑть Ñтроку Ñертификата/ключаÐевозможно прочеÑть информацию из файла ÑоÑтоÑÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸Ðе удалоÑÑŒ прочитать личный ключÐевозможно задать Ñлемент CN в доверенноÑтиÐевозможно задать Ð¸Ð¼Ñ Ð²Ñ‹Ð´Ð°ÑŽÑ‰ÐµÐ³Ð¾ агентÑтва в доверенноÑтиÐе удалоÑÑŒ задать закрытый ключÐевозможно задать открытый ключ доверенноÑтиÐевозможно открыть на чтение файл Ð´Ð»Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа BIOÐевозможно задать Ñерийный номер в доверенноÑтиÐевозможно задать Ñрок годноÑти доверенноÑтиÐевозможно задать номер верÑии в доверенноÑтиÐевозможно Ñоздать запиÑываемый файл Ð´Ð»Ñ BIO запроÑаÐевозможно открыть на запиÑÑŒ файл Ð´Ð»Ñ Ð½ÐµÑ„Ð¾Ñ€Ð¼Ð°Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð½Ð¾Ð³Ð¾ ввода/вывода подпиÑанного Ñертификата EECÐевозможно открыть на запиÑÑŒ файл Ð´Ð»Ñ Ð½ÐµÑ„Ð¾Ñ€Ð¼Ð°Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð½Ð¾Ð³Ð¾ ввода/вывода подпиÑанной доверенноÑтиÐевозможно подпиÑать EECÐевозможно выделить памÑть Ð´Ð»Ñ Ð¿ÑƒÑ‚Ð¸ к файлу политик агентÑтваÐевозможно преобразовать раÑширение PROXYCERTINFO в кодировке DER во внутренний форматÐевозможно преобразовать Ð·Ð°Ð¿Ñ€Ð¾Ñ X509 из внутреннего формата в DERÐе удалоÑÑŒ Ñоздать контекÑÑ‚ делегированиÑÐе удалоÑÑŒ Ñоздать ÑÑылку Ð´Ð»Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ - проверьте, доÑтупен ли подгружаемый модуль ARC LDAP DMC.Ðе удалоÑÑŒ Ñоздать ÑÑылку Ð´Ð»Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ - проверьте, доÑтупен ли подгружаемый модуль ARC LDAP DMCÐевозможно удалить каталог %s: %sÐевозможно удалить файл %s: %sÐевозможно извлечь Ð¸Ð¼Ñ Ð¾Ð±ÑŠÐµÐºÑ‚Ð° из URL иÑточникаÐе удалоÑÑŒ обнаружить функции LCAS в библиотеке %sÐе удалоÑÑŒ обнаружить функции LCMAPS в библиотеке %sÐевозможно извлечь политику из раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ PROXYCERTINFOÐевозможно извлечь Ñзык политики из раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ PROXYCERTINFOÐевозможно получить первый байт Ñертификата, чтобы определить его форматÐевозможно получить первый байт Ñертификата, чтобы определить его форматÐевозможно обработать URL %sÐевозможно иÑпользовать Ð°Ð´Ñ€ÐµÑ %sÐевозможно загрузить библиотеку LCAS %s: %sÐевозможно загрузить библиотеку LCMAPS %s: %sÐевозможно загрузить подключаемый модуль %s Ð´Ð»Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ доÑтупа %sÐе удалоÑÑŒ получить конфигурацию. Только Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð´Ð¾Ñтупна.Ðе удалоÑÑŒ получить конфигурацию. ÐžÑ‚ÐºÑ€Ñ‹Ñ‚Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð½ÐµÐ´Ð¾Ñтупна.Ðе удалоÑÑŒ получить конфигурацию. ÐžÑ‚ÐºÑ€Ñ‹Ñ‚Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð½ÐµÐ´Ð¾Ñтупна Ð´Ð»Ñ Ñтого пользователÑ.Ðе удалоÑÑŒ открыть файл наÑтроекÐе удалоÑÑŒ разобрать права доÑтупа в Ñтроке наÑтроекÐе удалоÑÑŒ разобрать Ñтроку наÑтроекÐе удалоÑÑŒ обработать аргументы create в файле конфигурацииÐе удалоÑÑŒ извлечь Ð°Ð´Ñ€ÐµÑ ÑƒÐ·Ð»Ð° и/или номер порта из ответа на Ð·Ð°Ð¿Ñ€Ð¾Ñ EPSV/PASVÐе удалоÑÑŒ обработать аргументы mkdir в файле конфигурацииÐе удалоÑÑŒ разобрать or:and в Ñтроке наÑтроекÐе удалоÑÑŒ разобрать user:group в Ñтроке наÑтроекÐе удалоÑÑŒ прочеÑть файл наÑтроекÐевозможно прочеÑть файл наÑтроек в %sÐе удалоÑÑŒ Ñчитать Ñ Ð¸ÑточникаÐевозможно прочеÑтьÑпиÑок назначений из файла %sÐевозможно прочеÑть ÑпиÑок адреÑов из файла %sÐевозможно прочеÑть ÑпиÑок иÑточников из файла %sÐевозможно прочеÑть Ð½Ð°Ð·Ð²Ð°Ð½Ð¸Ñ Ð¿Ð¾Ð»Ð¸Ñ‚Ð¸ÐºÐе удалоÑÑŒ прочеÑть ÑоÑтоÑÐ½Ð¸Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡ Ñ %s. Возможно, A-REX не запущен?Ðе удалоÑÑŒ определить группу в Ñтроке наÑтроекÐевозможно определить тип файла наÑтроекÐевозможно определить тип файла наÑтроек в %sÐе удалоÑÑŒ определить Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð² Ñтроке наÑтроекÐе удалоÑÑŒ переименовать файл %s: %sÐевозможно ÑброÑить вводÐе удалоÑÑŒ найти Ñервер %sÐе удалоÑÑŒ выÑтавить метки Ð¿Ð¾Ð´Ñ‚Ð²ÐµÑ€Ð¶Ð´ÐµÐ½Ð¸Ñ OpenSSLÐевозможно получить ÑÑ‚Ð°Ñ‚ÑƒÑ Ñ„Ð°Ð¹Ð»Ð°: %s: %sÐевозможно выполнить операцию stat Ð´Ð»Ñ ÐºÐ°Ð½Ð°Ð»Ð° stdio %sÐевозможно иÑпользовать URL %sÐе удалоÑÑŒ запиÑать в цельОтмена завершенаОтменÑетÑÑ DTR %s Ñ Ð¸Ñточником: %s, назначением: %sОтмена активных передачПрерывание задачи %sПрерывание задачи: %sÐ—Ð°Ð¿Ñ€Ð¾Ñ Ð¾ Ñинхронизации отменÑетÑÑCandyPond: ДоÑтуп закрытÐевозможно адаптировать опиÑание задачи ни к одному реÑурÑу когда отключён Ñбор информацииÐевозможно изменить владельца %s: %s Ðе удалоÑÑŒ изменить права доÑтупа к %s: %s Ðевозможно Ñравнить пуÑтую контрольную ÑуммуÐевозможно перевеÑти название Ð¼Ð¾Ð´ÑƒÐ»Ñ ARC в Ñтроку PythonÐевозможно преобразовать ExecutionTarget (%s) в объект PythonÐевозможно преобразовать JobDescription в объект PythonÐе удалоÑÑŒ преобразовать UserConfig в объект PythonÐе удалоÑÑŒ преобразовать наÑтройки в объект PythonÐе удалоÑÑŒ преобразовать inmsg в объект PythonÐевозможно перевеÑти название Ð¼Ð¾Ð´ÑƒÐ»Ñ Ð² Ñтроку PythonÐе удалоÑÑŒ преобразовать outmsg в объект PythonÐевозможно преобразовать Ñтроку %s в целочиÑленное значение в Ñтроке %sÐе удалоÑÑŒ Ñкопировать шаблон наÑтроек (%s), Ñ‚.к. Ñто неÑтандартный файлÐевозможно Ñоздать аргумент ExecutionTargetÐевозможно Ñоздать аргумент JobDescriptionÐе удалоÑÑŒ Ñоздать аргумент UserConfigÐе удалоÑÑŒ Ñоздать аргумент конÑтруктораÐе удалоÑÑŒ Ñоздать аргумент наÑтроекÐе удалоÑÑŒ Ñоздать каталоги Ð´Ð»Ñ Ð¶ÑƒÑ€Ð½Ð°Ð»ÑŒÐ½Ð¾Ð³Ð¾ файла %s. Ð¡Ð¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ Ð±ÑƒÐ´ÑƒÑ‚ запиÑыватьÑÑ Ð² Ñтот журналÐевозможно Ñоздать каталог %s Ð´Ð»Ñ Ð¶Ñ‘Ñтких ÑÑылок задачÐе удалоÑÑŒ Ñоздать нагрузку httpÐе удалоÑÑŒ Ñоздать аргумент inmsgÐе удалоÑÑŒ реализовать клаÑÑ PythonÐе удалоÑÑŒ Ñоздать аргумент outmsgÐевозможно Ñоздать выход %s ни Ð´Ð»Ñ Ð¾Ð´Ð½Ð¾Ð¹ задачиÐевозможно Ñоздать вывод %s Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ (%s): ÐедопуÑтимый иÑточник %sÐе удалоÑÑŒ Ñоздать преобразователь из /etc/resolv.confÐевозможно извлечь Ð¸Ð¼Ñ ÑƒÐ·Ð»Ð° иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ gethostname()Ðевозможно определить hostname из gethostname() Ð´Ð»Ñ Ð°Ð²Ñ‚Ð¾Ð¼Ð°Ñ‚Ð¸Ñ‡ÐµÑкого ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ceID.Ðе удаётÑÑ Ð¾Ð¿Ñ€ÐµÐ´ÐµÐ»Ð¸Ñ‚ÑŒ тип копии Ð´Ð»Ñ %sÐе удаётÑÑ Ð¾Ð¿Ñ€ÐµÐ´ÐµÐ»Ð¸Ñ‚ÑŒ раÑположение %s: %sÐе удалоÑÑŒ найти Ñлемент в ответном Ñообщении SOAP:Ðе удалоÑÑŒ обнаружить клаÑÑ ARC ConfigÐе удалоÑÑŒ найти клаÑÑ ARC ExecutionTargetÐе удалоÑÑŒ найти клаÑÑ ARC JobDescriptionÐе удалоÑÑŒ обнаружить клаÑÑ ARC MessageÐе удалоÑÑŒ найти клаÑÑ ARC UserConfigÐе удалоÑÑŒ обнаружить доверенноÑть. Это приложение не работает без доверенноÑти. ЕÑли Ваша доверенноÑть хранитÑÑ Ð² неÑтандартном меÑте, пожалуйÑта, убедитеÑÑŒ, что в наÑтройках клиента указан правильный путь. ЕÑли же Ð’Ñ‹ пока не Ñоздали доверенноÑть, запуÑтите 'arcproxy'!Ðе удалоÑÑŒ найти Ñодержание ответного ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ SOAPÐе обнаружен клаÑÑ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»ÑŒÑкого планировщикаÐе удалоÑÑŒ найти файл по адреÑу %s, Ñодержащий доверенноÑть. ПожалуйÑта, убедитеÑÑŒ, что файл ÑущеÑтвует.Ðевозможно обнаружить информацию о меÑте Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸Ðевозможно обнаружить локальный входной файл '%s' (%s)Ðе удалоÑÑŒ найти клаÑÑ ÑервиÑаÐе удалоÑÑŒ найти каталог Ñ Ñертификатами агентÑтв CA. ПожалуйÑта, задайте переменную Ñреды X509_CERT_DIR, или значение cacertificatesdirectory в файле наÑтроек.Ðе удалоÑÑŒ найти доверенноÑть пользователÑ. ПожалуйÑта, задайте переменную Ñреды X509_USER_PROXY, или значение proxypath в файле наÑтроекÐе удалоÑÑŒ найти путь к открытому ключу пользователÑ. ПожалуйÑта, задайте переменную Ñреды X509_USER_CERT, или значение certificatepath в файле наÑтроекÐе удалоÑÑŒ найти закрытый ключ пользователÑ. ПожалуйÑта, задайте переменную Ñреды X509_USER_KEY, или значение keypath в файле наÑтроекÐевозможно найти информацию о Ñервере VOMS %s из файлов vomsesÐ˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾Ð± адреÑе Ñервера VOMS отÑутÑтвует в Ñтроке: %s"Ðевозможно получить Ñертификат атрибута (AC) или информацию об атрибутах Ñ Ñервера VOMS: %s; Сообщение, возвращённое Ñервером VOMS: %s Ошибка доÑтупа к Ñловарю Ð¼Ð¾Ð´ÑƒÐ»Ñ ARCÐевозможно обнаружить Ñловарь пользовательÑкого Ð¼Ð¾Ð´ÑƒÐ»Ñ Ð¿Ð»Ð°Ð½Ð¸Ñ€Ð¾Ð²Ñ‰Ð¸ÐºÐ°ÐžÑˆÐ¸Ð±ÐºÐ° доÑтупа к Ñловарю модулÑÐевозможно обработать URL %sÐевозможно обÑлужить локального Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ %sÐе удалоÑÑŒ импортировать модуль ARCÐе удалоÑÑŒ импортировать модульÐе удалоÑÑŒ инициализировать доменное Ð¸Ð¼Ñ ARCHERY Ð´Ð»Ñ Ð·Ð°Ð¿Ñ€Ð¾ÑаÐевозможно Ñоздать ÑÑылку на удалённое назначение. ПрипиÑанный URL не будет иÑпользованÐевозможно Ñоздать ÑÑылку на иÑточник, который может изменитьÑÑ; будет Ñделана копиÑÐе удалоÑÑŒ открыть журнальный файл BLAH '%s'Ðе удалоÑÑŒ открыть журнальный файл кÑша %s: %s. Ð¡Ð¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ Ð¾Ð± очиÑтке кÑша будут запиÑыватьÑÑ Ð² Ñтот журналÐевозможно вывеÑти предÑтавление XRSL: атрибут Resources.SlotRequirement.NumberOfSlots должен быть задан, еÑли задан атрибут Resources.SlotRequirement.SlotsPerHost .Ðе удаётÑÑ Ñ€Ð°Ð·Ð¾Ð±Ñ€Ð°Ñ‚ÑŒ целое значение '%s' Ð´Ð»Ñ -%cÐе удалоÑÑŒ разобрать иÑточник Ð¿Ð°Ñ€Ð¾Ð»Ñ %s. Формат должен быть source_type или source_type:data format. ПоддерживаютÑÑ Ñледующие типы иÑточников: int,stdin,stream,file.Ðе удалоÑÑŒ разобрать выражение %s Ð´Ð»Ñ Ð¸Ñточника паролÑ: формат должен быть type=sourceÐе удалоÑÑŒ разобрать тип иÑточника Ð¿Ð°Ñ€Ð¾Ð»Ñ %s. ПоддерживаютÑÑ Ñледующие типы иÑточников: int,stdin,stream,file.Ðе удалоÑÑŒ разобрать тип Ð¿Ð°Ñ€Ð¾Ð»Ñ %s. Ð’ наÑтоÑщий момент поддерживаютÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ 'key','myproxy','myproxynew' и 'all'.Ðевозможно интерпретировать Ñхему!Ðе удалоÑÑŒ разобрать TXT-запиÑи конечных точек Ñлужбы.Ðевозможно обработать файл доверенноÑти в %s.Ðе удалоÑÑŒ запроÑить TXT-запиÑи конечных точек Ñлужбы из DNSÐе удаётÑÑ Ð¿Ñ€Ð¾Ñ‡ÐµÑть указанный файл, Ñодержащий Ñрлыки задач: %sÐевозможно удалить файл доверенноÑти в %sÐевозможно удалить файл доверенноÑти в %s, потому что его там нетÐевозможно перемеÑтить в корневую директорию или из неёÐевозможно переименовать в идентичный URLÐевозможно определить ÑÑ‚Ð°Ñ‚ÑƒÑ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð¾Ð³Ð¾ входного файла '%s'Ðевозможно перейти к группе (%s)Ðевозможно переключить на оÑновную группу Ð´Ð»Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ (%s)Ðевозможно перейти к пользователю (%s)Ðевозможно обновить AAR. Ðе удалоÑÑŒ обнаружить зарегиÑтрированную запиÑÑŒ AAR Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s в учётной базе данных.Ðевозможно иÑпользовать заÑвленную опцию --sizeÐевозможно запиÑать Ñрлыки задач в файл (%s)Ðевозможно запиÑать Ñрлык задачи (%s) в файл (%s)Ðевозможно запиÑать Ñрлыки задач в файл (%s)Ðе удалоÑÑŒ получить выходные файлы задачи (%s) - невозможно определить URL Ð´Ð»Ñ Ð·Ð°Ð¿Ð¸ÑиВозможноÑти:ПодцеплÑетÑÑ %s Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %sПричина ÑÐ±Ð¾Ñ Ð½Ðµ уÑтановлена - выбираетÑÑ ÑÐ»ÑƒÑ‡Ð°Ð¹Ð½Ð°Ñ ÐºÐ¾Ð¿Ð¸ÑТип Ñертификата: %dСрок дейÑÑ‚Ð²Ð¸Ñ Ñертификата %s уже иÑтёкСрок дейÑÑ‚Ð²Ð¸Ñ Ñертификата %s иÑтечёт через %sСертификат и ключ ('%s' и '%s') не обнаружены ни в одном из раÑположений: %sУ Ñертификата нет ÑчейкиСертификат в формате DERСертификат в формате PEMСертификат в формате PKCSФормат Ñертификата неизвеÑтенСертификат Ñодержит неизвеÑтное раÑширение Ñ Ñ‡Ð¸Ñленным идентификатором %u и именем Ñубъекта %sСбой Ñбора информации о ÑÐµÑ€Ñ‚Ð¸Ñ„Ð¸ÐºÐ°Ñ‚ÐµÐ˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ Ñертификате:Сертификат выдан: %sÐедопуÑтимый Ð·Ð°Ð¿Ñ€Ð¾Ñ ÑертификатаИÑпользуемый Ñертификат: %sОшибка проверки Ñертификата: %sСертификат не подтверждёнУÑпешное подтверждение ÑертификатаСертификат Ñ Ñерийным номером %s и Ñубъектом "%s" отозванСрок дейÑÑ‚Ð²Ð¸Ñ Ñертификата Ñ Ñубъектом %s иÑтёкПуть к Ñертификату/доверенноÑти не заданСертификат: %sÐомер цепочки Ñертификатов %dÐе удалоÑÑŒ наÑтроить цепочку/иЗапроÑ: %sПроверка: поиÑк метаданных: %sПроверка: получена задержка доÑтупа: Ð´Ð»Ð¸Ð½Ð½Ð°Ñ (NEARLINE)Проверка: получена задержка доÑтупа: ÐºÐ¾Ñ€Ð¾Ñ‚ÐºÐ°Ñ (ONLINE)Проверка: получена ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма: %sПроверка: получено Ð²Ñ€ÐµÐ¼Ñ Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ: %sCheck: получено Ð²Ñ€ÐµÐ¼Ñ Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ %sПроверка: получен размер %lluПроверка: получен размер: %lliПроверка %sПроверÑетÑÑ URL выданный SRM: %sКÑш проверÑетÑÑ ÑноваПроверка прав доÑтупа к кÑшу: DN: %sПроверка прав доÑтупа к кÑшу: ВО: %sChecking cache permissions: атрибуты VOMS: %sПроверка файла %sПроверка ÑущеÑÑ‚Ð²Ð¾Ð²Ð°Ð½Ð¸Ñ %sПроверка отложенных точек входа на предмет повторного опроÑа.ПроверÑетÑÑ ÐºÐ¾Ð¿Ð¸Ñ %sПроверка Ð½Ð°Ð»Ð¸Ñ‡Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð°-иÑÑ‚Ð¾Ñ‡Ð½Ð¸ÐºÐ°ÐšÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма %sÐеÑовпадение контрольной ÑуммÐеÑовпадение вычиÑленной контрольной Ñуммы %s и контрольной Ñуммы иÑточника %sÐеÑовпадение между вычиÑленной контрольной Ñуммой %s и контрольной Ñуммой, выданной Ñервером %sÐеÑовпадение вычиÑленной контрольной Ñуммы %s и контрольной Ñуммы иÑточника %sÐеÑовпадение между вычиÑленной/указанной контрольной Ñуммой %s и контрольной Ñуммой, заÑвленной точкой Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ SRM %sÐеÑовпадение контрольной Ñуммы, указанной в метаданных (%s:%s), Ñ Ð²Ñ‹Ñ‡Ð¸Ñленной (%s)ÐšÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма не вычиÑленаТипы контрольной Ñуммы в SRM (%s) и вычиÑленной/указанной контрольной Ñуммы (%s) различаютÑÑ, Ñравнение невозможноТип контрольной Ñуммы иÑточника отличаетÑÑ Ð¾Ñ‚ вычиÑленной, Ñравнение невозможноТип контрольной Ñуммы на Ñервере отличаетÑÑ Ð¾Ñ‚ запрошенного, Ñравнение невозможноПотомок завершил работуМониторинг дочерних процеÑÑов: процеÑÑ %d завершилÑÑМониторинг дочерних процеÑÑов: игнорируетÑÑ Ð·Ð°Ð±Ñ€Ð¾ÑˆÐµÐ½Ð½Ñ‹Ð¹ процеÑÑ %d (%d)Мониторинг дочерних процеÑÑов: ошибка: %iМониторинг дочерних процеÑÑов: внутренний Ñбой взаимодейÑтвиÑМониторинг дочерних процеÑÑов: обнаружен запуÑкМониторинг дочерних процеÑÑов: потерÑн процеÑÑ %d (%d)Мониторинг дочерних процеÑÑов: обнаружен ÑигналМониторинг дочерних процеÑÑов: stderr закрытМониторинг дочерних процеÑÑов: stdin закрытМониторинг дочерних процеÑÑов: stdout закрытДочерний процеÑÑ ÑƒÐ¶Ðµ запущенÐазвание клаÑÑа: %sСбой очиÑткиУдалÑетÑÑ Ð·Ð°Ð´Ð°Ñ‡Ð° %sУдалÑетÑÑ Ð·Ð°Ð´Ð°Ñ‡Ð°: %sОчиÑтка поÑле ÑбоÑ: уничтожаетÑÑ %sОтÑутÑтует точка входа в клиентÑкую цепьОтÑутÑтвует точка входа в клиентÑкую цепьПодгружены клиентÑкие компоненты цепи ÑообщенийВерÑÐ¸Ñ ÐºÐ»Ð¸ÐµÐ½Ñ‚Ð°: nordugrid-arc-%sСоединение закрытоУÑпешное прекращениеЗакрываетÑÑ ÐºÐ°Ð½Ð°Ð» (перечиÑление)Канал закрываетÑÑ (получение)ЗакрываетÑÑ ÐºÐ°Ð½Ð°Ð» (загрузки) в ÑвÑзи Ñ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð¾Ð¹ ошибкой: %sЗакрываетÑÑ ÐºÐ°Ð½Ð°Ð» (запиÑÑŒ)Прерывание канала (запиÑÑŒ) в ÑвÑзи Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ¾Ð¹: %sПрекращение ÑвÑзиЗакрываетÑÑ Ñоединение Ñ ÑƒÑ‡Ñ‘Ñ‚Ð½Ð¾Ð¹ базой данных SQLiteВозможно, был Ñбой про закрытииЗакрываетÑÑ ÐºÐ°Ð½Ð°Ð» чтениÑЗакрываетÑÑ ÐºÐ°Ð½Ð°Ð» Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð¸ÐŸÐ¾Ð»ÑƒÑ‡ÐµÐ½Ð½Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ°: %sСобираетÑÑ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ GLUE2 Ð´Ð»Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ входа EMI-ES.СобираетÑÑ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ задаче (задачи на A-REX REST)Команда ABORКоманда ALLO %iКоманда CDUPКоманда CWD %sКоманда DCAU: %i '%s'Команда DELE %sКоманда EPRTКоманда EPSV %sКоманда ERET %sКоманда LIST %sКоманда MDTM %sКоманда MKD %sКоманда MLSD %sКоманда MLST %sКоманда MODE %cКоманда NLST %sКоманда NOOPКоманда OPTSКоманда OPTS RETRКоманда PASVКоманда PBZS: %sКоманда PORTКоманда PROT: %sКоманда QUITКоманда REST %sКоманда RETR %sКоманда RMD %sКоманда SBUF: %iКоманда SIZE %sКоманда SPASКоманда STOR %sКоманда TYPE %cКоманда USER %sПоÑылаетÑÑ Ð¸Ð½ÑтрукциÑКоманда: %sКомпонента %s(%s) не может быть ÑÐ¾Ð·Ð´Ð°Ð½Ð°Ð”Ð»Ñ ÐºÐ¾Ð¼Ð¿Ð¾Ð½ÐµÐ½Ñ‚Ð° не задан атрибут IDÐ”Ð»Ñ ÐºÐ¾Ð¼Ð¿Ð¾Ð½ÐµÐ½Ñ‚Ð° не задан атрибут nameÐ”Ð»Ñ ÐºÐ¾Ð¼Ð¿Ð¾Ð½ÐµÐ½Ñ‚Ð° %s(%s) отÑутÑтвует атрибут ID Ñледующей целиВычиÑÐ»Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ñ‚Ð¾Ñ‡ÐºÐ° входа %s (тип %s) добавлена в ÑпиÑок Ð´Ð»Ñ Ð¿Ð»Ð°Ð½Ð¸Ñ€Ð¾Ð²ÐºÐ¸ заÑылкиВычиÑлительный ÑервиÑ:ВычиÑлительный ÑервиÑ: %sЦель ComputingShare (%s) не ÑоответÑтвует выбранной очереди (%s)Цель ComputingShare (%s) Ñвно отклоненаÐе определён параметр ComputingShareName атрибута ExecutionTarget (%s)КлаÑÑ Config не ÑвлÑетÑÑ Ð¾Ð±ÑŠÐµÐºÑ‚Ð¾Ð¼ÐаÑтройки (%s) подгруженыОшибка наÑтройкиСоздан шаблонный файл наÑтроек (%s)Ðевозможно прочеÑть файл наÑтроекФайл наÑтроек иÑпорчен - название блока не заканчиваетÑÑ ]: %sФайл наÑтроек иÑпорчен - Ñлишком короткое название блока: %sÐе указан файл наÑтроекÐе указан файл наÑтроек в ConfigBlockИÑпользуемый файл конфигурацииКорневой Ñлемент наÑтроек не ÑвлÑетÑÑ Ð Ð°Ð·Ð´ÐµÐ» наÑтроек [userlist] не Ñодержит названиÑ.Соединение: Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ¸ подлинноÑти иÑтекло поÑле %d мÑСоединение: Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ð¸Ñтекло поÑле %d мÑСоединение: Ошибка проверки подлинноÑти: %sСоединение: Сбой ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ: %sСоединение: Сбой инициализации идентификатора информации проверки подлинноÑти: %sСоединение: Ðе удалоÑÑŒ инициализировать ÑÑылку: %sСоединÑемÑÑ Ñо Ñлужбой доÑтавки на %sСоединение Ñ %s: %sУÑтанавливаетÑÑ ÑвÑзь Ñ Ñервером VOMS (по имени %s): %s по порту: %sСодержимое: %sКонтрольное Ñоединение (наверное) закрытоПреобразование не удалоÑÑŒ: %sЗадан ÑпоÑоб Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ CREAMЗадан ÑпоÑоб Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ DIRECTЗадан ÑпоÑоб Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ EMIЗадан ÑпоÑоб Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ SUBJECTПреобразование в дейÑтвие CREAM - проÑтранÑтво имён: %s, операциÑ: %sСбой копированиÑ: %sКопирование Ñ dlcloseÐевозможно уÑтановить блокировку на мета-файл %sÐе удалоÑÑŒ ÑоединитьÑÑ Ñо Ñлужбой %s: %sÐе удалоÑÑŒ преобразовать входную информацию!Ðевозможно преобразовать нагрузку!Ðе удалоÑÑŒ преобразовать значение атрибута slcs (%s) в файле наÑтроек в URL (%s)Ðе удалоÑÑŒ Ñоздать PayloadSOAP!Ðевозможно Ñоздать ÑÑылку на файл блокировки %s, потому что она уже ÑущеÑтвуетÐевозможно Ñоздать файл блокировки %s, потому что он уже ÑущеÑтвуетÐе удалоÑÑŒ Ñоздать временный файл "%s"Ðе удалоÑÑŒ Ñоздать временный файл: %sÐевозможно определить тип файла наÑтроек, или же он пуÑÑ‚Ðевозможно определить Ð¸Ð¼Ñ ÑƒÐ·Ð»Ð° иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ gethostname()Ðе удалоÑÑŒ определить каталог ÑеÑÑии из имени файла %sÐе удалоÑÑŒ определить верÑию ÑервераÐе удалоÑÑŒ обнаружить подходÑщую Ñлужбу доÑтавки, вынужденно иÑпользуетÑÑ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð°Ñ Ð¿ÐµÑ€ÐµÑылкаÐе удалоÑÑŒ найти подгружаемый модуль %s (%s)Ðевозможно найти подгружаемые модули по имени %s и %s (%s)Ðе удалоÑÑŒ найти деÑкриптор подгружаемого Ð¼Ð¾Ð´ÑƒÐ»Ñ Ð¿Ð¾ имени %sÐе удалоÑÑŒ найти деÑкрипторы подгружаемых модулей по имени %s или типу %sÐе удалоÑÑŒ получить контрольную Ñумму %s: %sÐевозможно обработать контрольную Ñумму %s: пропуÑкаетÑÑ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ° контрольной ÑуммыÐевозможно обработать точку входа %sÐе удалоÑÑŒ подгрузить наÑтройки (%s)Ðевозможно найти модуль %s в Ñледующих меÑтах:Ðевозможно Ñоздать новый Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¿ÐµÑ€ÐµÑылки: %s: %sÐе удалоÑÑŒ получить информацию об иÑточнике: %sÐевозможно уÑтановить Ñоединие LDAP Ñ %sÐевозможно открыть файл %s Ð´Ð»Ñ Ñ‡Ñ‚ÐµÐ½Ð¸Ñ: %sÐе удалоÑÑŒ прочитать наÑтройки Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ… в %sÐе удалоÑÑŒ определить иÑходный иÑточник %s: %sÐе удалоÑÑŒ определить иÑходный иÑточник %s: Ð²Ñ€ÐµÐ¼Ñ Ð¸ÑтеклоÐе удалоÑÑŒ задать Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ LDAP (%s)Ðе удалоÑÑŒ задать верÑию протокола LDAP (%s)Ðе удалоÑÑŒ задать Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¾Ñ‚Ð²ÐµÑ‚Ð° Ñервера LDAP (%s)Ðе удалоÑÑŒ определить ÑоÑтоÑние файла %s: %sÐе удалоÑÑŒ подтвердить доÑтоверноÑть ÑообщениÑ!Ðе удалоÑÑŒ иÑпользовать Ñертификат: %sÐевозможно разобрать Ñталонный XML: %sÐевозможно подтвердить доÑтупноÑть ÑпиÑков отзыва Ñертификатов (CRL)Страна: %sСоздан ключ RSA, теперь обрабатываетÑÑ Ð·Ð°Ð¿Ñ€Ð¾ÑСоздаётÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚ Ð´Ð»Ñ Ñлужбы Argus PDPСоздание клиента SOAP Ð´Ð»Ñ Ð´ÐµÐ»ÐµÐ³Ð°Ñ†Ð¸Ð¸Ð¡Ð¾Ð·Ð´Ð°Ñ‘Ñ‚ÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚ HTTPСоздаётÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚ pdpserviceСоздаётÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚ SOAPСоздаётÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚ EMI ESСоздание и отправка запроÑа об удалении результатов работы задачи на %sСоздание и отправка запроÑа о ÑоÑтоÑнии задачи на %sСоздание и отправка запроÑа о проÑмотре задачи на %sСоздание и отправка запроÑа об уведомлении о задаче на %sСоздание и отправка запроÑа о перезапуÑке задачи на %sСоздание и отправка запроÑа о возобновлении задачи на %sСоздание и отправка запроÑа об иÑполнении задачи на %sСоздание и отправка запроÑа о приоÑтановке задачи на %sСоздание и отправка запроÑа об уведомлении на %sСоздание и заÑылка запроÑаСоздание и отправка запроÑа о ÑоÑтоÑнии Ñлужбы на %sСоздание и отправка запроÑа информации о Ñлужбе на %sСоздаётÑÑ Ð±ÑƒÑ„ÐµÑ€: %lli x %iСоздаётÑÑ Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ ÐºÐ»Ð¸ÐµÐ½Ñ‚Ð°Ð¡Ð¾Ð·Ð´Ð°Ð½Ð¸Ðµ цепи на Ñтороне клиентаСоздание делегируемых параметров доÑтупа Ð´Ð»Ñ Ñлужбы Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ ARCСбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð´Ð»Ñ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ CREAMСоздание Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð´Ð»Ñ Ñлужбы Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ CREAMСбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð´Ð»Ñ Ñлужбы Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ CREAMСоздаетÑÑ Ð´Ð¸Ñ€ÐµÐºÑ‚Ð¾Ñ€Ð¸Ñ %sСоздаетÑÑ Ð´Ð¸Ñ€ÐµÐºÑ‚Ð¾Ñ€Ð¸Ñ %sСоздание цепи на Ñтороне ÑервиÑаСрок дейÑÑ‚Ð²Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупа иÑтекает в %sПрерывание при обработке параметров доÑтупа: %sПараметры доÑтупа не инициализированыПараметры доÑтупа Ñохранены во временном файле %sСбой обработки критичеÑкого атрибута VOMSТекущие задачи в ÑиÑтеме (от PREPARING до FINISHING) на DN (%i запиÑей)Ð¢ÐµÐºÑƒÑ‰Ð°Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð° ÐЕ СОСТОЯЛÐСЬ: %sÐ¢ÐµÐºÑƒÑ‰Ð°Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð° завершенаDB_OLD_VERSION: База данных не может быть открыта без предварительного Ð¾Ð±Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ Ð²ÐµÑ€Ñии.ИнÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ DCAU не прошлаИнÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ DCAU не прошла: %sПрименены параметры DHDN %s не Ñовпадает Ñ %sВыделенное Ð¸Ð¼Ñ %s Ð´Ð»Ñ URL %s кÑшировано, и дейÑтвительно до %sВыделенное Ð¸Ð¼Ñ %s Ð´Ð»Ñ URL %s кÑшировано, но уже проÑроченоDN: %sÐ—Ð°Ð¿Ñ€Ð¾Ñ DTR %s Ð¾Ñ‚Ð¼ÐµÐ½Ñ‘Ð½Ð—Ð°Ð¿Ñ€Ð¾Ñ DTR %s не может быть прерванСбой запроÑа DTR %s: %sÐ—Ð°Ð¿Ñ€Ð¾Ñ DTR %s уÑпешно завершёнDTR %s завершилÑÑ Ð² ÑоÑтоÑнии %sDTR %s запроÑил прерывание, но активные передачи отÑутÑÑ‚Ð²ÑƒÑŽÑ‚Ð—Ð°Ð¿Ñ€Ð¾Ñ DTR %s ещё в процеÑÑе (передано %lluB)Ð—Ð°Ð¿Ñ€Ð¾Ñ DTR %s уже был прерванDTRGenerator обработал: %d отменённых задач, %d DTR, %d новых задачDTRGenerator ожидает обработки: %d отменённых задач, %d DTR, %d новых задачDTR готов к переÑылке, переводитÑÑ Ð² очередь на доÑтавкуDTRGenerator получил Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ñ‚Ð¼ÐµÐ½Ð¸Ñ‚ÑŒ ноль задачDTRGenerator запрошен о нуле задачDTRGenerator получил Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€Ð¸Ñ‚ÑŒ файлы Ð½ÑƒÐ»Ñ Ð·Ð°Ð´Ð°Ñ‡DTRGenerator не запущен!DTRGenerator опрошен о нуле задачDTRGenerator получил Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ñ‡Ð¸Ñтить ÑÑылки Ð½ÑƒÐ»Ñ Ð·Ð°Ð´Ð°Ñ‡DTRGenerator получил Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° обработку Ð½ÑƒÐ»Ñ Ð·Ð°Ð´Ð°Ñ‡DTRGenerator получил Ð·Ð°Ð¿Ñ€Ð¾Ñ ÑƒÐ´Ð°Ð»Ð¸Ñ‚ÑŒ ноль задачDTRGenerator получил ноль задачЗапроÑÑ‹ DTR Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s вÑÑ‘ ещё иÑполнÑÑŽÑ‚ÑÑÐе удалоÑÑŒ Ñоздать дочерний демон: %sКанал передачи данных (получение) %i %i %iКанал передачи данных (запиÑÑŒ) %i %i %iКанал передачи данных подÑоединён (перечиÑление)Канал передачи данных подÑоединён (получение)Канал передачи данных подÑоединён (запиÑÑŒ)Канал передачи данных: %d.%d.%d.%d:%dКанал передачи данных: [%s]:%dПрерван цикл Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ…ÐŸÐµÑ€ÐµÐ´Ð°Ñ‡Ð° данных прерванаПередача данных прервана: %sДанные уже запиÑаны в кÑщПоÑледние запиÑи журнала DataDelivery: %sDataDelivery: %sDataMove::Transfer: ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма Ð´Ð»Ñ %s не будет вычиÑленаDataMove::Transfer: иÑпользуетÑÑ Ð·Ð°Ð´Ð°Ð½Ð½Ð°Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма %sDataMove::Transfer: иÑпользуетÑÑ Ð·Ð°Ð´Ð°Ð½Ð½Ð°Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма %s:%sDataMove::Transfer: будет вычиÑлена ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма Ð´Ð»Ñ %sDataMover: циклDataMover: закончилиÑÑŒ попытки поиÑка назначений - завершениеDataMover: не запрошено повторных попыток, выходDataMover: закончилиÑÑŒ попытки поиÑка иÑточника - завершениеDataMover::Transfer : запуÑк нового потокаDataMover::Transfer: попытка Ñтереть/перезапиÑать назначение: %sDataPointGFAL::write_file получил на входе Ð°Ð´Ñ€ÐµÑ %d и Ñдвиг %d, проводитÑÑ Ð¿Ð¾Ð¸ÑкDataPointXrootd::write_file получил Ð°Ð´Ñ€ÐµÑ %d и Ñдвиг %d, проводитÑÑ Ð¿Ð¾Ð¸ÑкПроцеÑÑ DataStagingDelivery завершилÑÑ Ñ ÐºÐ¾Ð´Ð¾Ð¼ %iВыгрузка модулейДлительноÑть по умолчанию (процеÑÑорнаÑ): %sКонÑтруктор по умолчанию клиента INTERNALХранилище по умолчанию: %sПланировщик по умолчанию (%s) недоÑтупен. При иÑпользовании %s планировщик должен быть указан Ñвным образом (Ð¾Ð¿Ñ†Ð¸Ñ -b).ДлительноÑть по умолчанию (по чаÑам): %sПо умолчанию: %sСбой в DelegateCredentialsInitСбой в DelegateProxyДелегированные параметры доÑтупа от Ñлужбы делегации: %sОтличительные признаки делегированных параметров доÑтупа: %sID делегированиÑ: %sÐÐ²Ñ‚Ð¾Ñ€Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð½Ð° делегирование не выданаÐÐ²Ñ‚Ð¾Ñ€Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ðµ пройденаСбой запроÑа Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ getProxyReqОбработчик Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð²Ð°Ð½Ð¸Ñ Ð½Ðµ наÑтроенЗавершена обработка Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ Ñ€Ð¾Ð»ÑŒÑŽ делегатаЗапущен обработчик Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ Ñ€Ð¾Ð»ÑŒÑŽ делегатаЗапущен обработчик Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ Ñ€Ð¾Ð»ÑŒÑŽ поручителÑСбой запроÑа Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ putProxyÐÐµÐ¿Ð¾Ð´Ð´ÐµÑ€Ð¶Ð¸Ð²Ð°ÐµÐ¼Ð°Ñ Ñ€Ð¾Ð»ÑŒ делегированиÑ: %sСлужба делегированиÑ: %sСбой Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñлужбе Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ ARCСбой Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñлужбе Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ GridsiteÐеподдерживаемый тип делегированиÑ: %sDelegationStore: Ñбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð¿Ñ€Ð¾Ñ†ÐµÑÑом PeriodicCheckConsumers уÑтаревшего Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ %s - %sDelegationStore: Ñбой Ð²Ð¾Ð·Ð¾Ð±Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ Ð¸Ñ‚ÐµÑ€Ð°Ñ‚Ð¾Ñ€Ð° процеÑÑом PeriodicCheckConsumersDelegationStore: TouchConsumer не Ñмог Ñоздать файл %sОшибка удалениÑУдалён, но оÑталиÑÑŒ копии в %sСлужба доÑтавки получила новый Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR %s Ñ Ð¸Ñточником %s и назначением %sСлужба доÑтавки в %s может копировать из %sСлужба доÑтавки в %s может копировать в %sОтÑутÑтвует URL назначениÑÐеподдерживаемый URL назначениÑ: %sÐедейÑтвительный URL назначениÑ: %sФайл Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð·Ð°Ð¿Ð¸Ñан в кÑшÐедопуÑтимый URL целиÐазначение не ÑвлÑетÑÑ ÑƒÐºÐ°Ð·Ð°Ñ‚ÐµÐ»ÐµÐ¼, пропуÑкаетÑÑ Ñ€ÐµÐ³Ð¸ÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ ÐºÐ¾Ð¿Ð¸Ð¸Ðазначение неготово, ÑÐ»ÐµÐ´ÑƒÑŽÑ‰Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ° через %u ÑекÐазначение: %sОпиÑатель уничтожаетÑÑДеÑтруктор Ñ dlclose (%s)Каталог %s допуÑкаетÑÑ Ð´Ð»Ñ Ñлужбы %sУÑпешно удалён каталог %sСоздан каталог %s Ð´Ð»Ñ Ñ…Ñ€Ð°Ð½ÐµÐ½Ð¸Ñ ÑƒÑ‡Ñ‘Ñ‚Ð½Ð¾Ð¹ базы данных.Ðе удалоÑÑŒ вывеÑти ÑпиÑок каталогаКаталог доверÑемых агентÑтв не указан/найден; в качеÑтве такового иÑпользуетÑÑ Ñ‚ÐµÐºÑƒÑ‰Ð¸Ð¹ путьРазмер директории превышает %i файлов, придётÑÑ Ð´ÐµÐ»Ð°Ñ‚ÑŒ неÑколько запроÑовРазмер директории Ñлишком велик Ð´Ð»Ñ Ñ€Ð°Ñпечатки в одном запроÑе, придётÑÑ Ð´ÐµÐ»Ð°Ñ‚ÑŒ неÑколько запроÑовКаталог: %sОтключение: Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð¸Ñтекло поÑле %d мÑОтключение: Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¾Ñ‚ÐºÐ»ÑŽÑ‡ÐµÐ½Ð¸Ñ Ð¸Ñтекло поÑле %d мÑОтключение: Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ… иÑтекло поÑле %d мÑОтключение: Сбой Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ - игнорируетÑÑ: %sОтключение: Сбой Ð¾Ñ‚ÐºÐ»ÑŽÑ‡ÐµÐ½Ð¸Ñ - игнорируетÑÑ: %sОтключение: Сбой ÑƒÐ½Ð¸Ñ‡Ñ‚Ð¾Ð¶ÐµÐ½Ð¸Ñ ÑÑылки: %s. Ðевозможно ÑправитьÑÑ Ñ Ñ‚Ð°ÐºÐ¸Ð¼ положением.Отключение: Сбой выхода - игнорируетÑÑ: %sОтключение: Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð²Ñ‹Ñ…Ð¾Ð´Ð° иÑтекло поÑле %d мÑОтключение: ÑÑылка globus заÑтрÑла.Отключение: ÑÑылка уничтожена.Отключение: ждём пока ÑÑылка globus уÑтаканитÑÑСортировка Ñ Ð¸Ñпользованием пользовательÑкого python-Ñкрипта планировщикаÐет поддержки предварительного бронированиÑÐе поддерживает групповую заÑылкуУпреждение не поддерживаетÑÑПроизводитÑÑ Ð·Ð°Ð¿Ñ€Ð¾Ñ CREAMПроизводитÑÑ Ð·Ð°Ð¿Ñ€Ð¾Ñ EMIЗагружаетÑÑ Ð·Ð°Ð´Ð°Ñ‡Ð°: %sКонец проÑтоÑ: %sÐачало проÑтоÑ: %sРаÑпечатка опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ оборвана: Ðевозможно подгрузить планировщик %sÐ’ LFC обнаружена Ð¸Ð´ÐµÐ½Ñ‚Ð¸Ñ‡Ð½Ð°Ñ ÐºÐ¾Ð¿Ð¸Ñ: %sEACCES Ошибка Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° блокировки %s: %sПрименены параметры ECDHEEXIST: были заданы DB_CREATE и DB_EXCL, и база данных ÑущеÑтвует .EINVALÐе удалоÑÑŒ Ñоздать Ð·Ð°Ð¿Ñ€Ð¾Ñ EMI: %sEMIES:CancelActivity: задача %s - %sEMIES:CreateActivity уÑпешно завершеноEMIES:CreateActivity: доÑтигнут предел общего чиÑла задачEMIES:CreateActivity: опиÑание задачи не обнаруженоEMIES:CreateActivity: Ð·Ð°Ð¿Ñ€Ð¾Ñ = %sEMIES:CreateActivity: ответ = %sEMIES:CreateActivity: обнаружено Ñлишком много опиÑаний задачEMIES:GetActivityInfo: задача %s - не удалоÑÑŒ получить информацию по формату GLUE2EMIES:GetActivityStatus: задача %s - %sEMIES:NotifyService: задача %s - %sEMIES:PauseActivity: задача %s - %sEMIES:RestartActivity: задача %s - %sEMIES:ResumeActivity: задача %s - %sEMIES:WipeActivity: задача %s - %sENOENT: Файл или каталог не ÑущеÑтвуют, либо указан неÑущеÑтвующий файл re_source.Сбой EPSVСбой EPSV: %sОшибка: %sОШИБКÐ: Обрыв раÑпечатки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸, так как подходÑщих реÑурÑов не найденоОШИБКÐ: не удалоÑÑŒ получить информациюОШИБКÐ: Ðе удалоÑÑŒ получить информацию через Ñледующие точки входа:ОШИБКÐ: Сбой запиÑи информации о задаче в файл (%s)ОШИБКÐ: Обрыв заÑылки задачи, так как ни один из реÑурÑов не предоÑтавил информациюОШИБКÐ: Одна или неÑколько задач не были запущены.ОШИБКÐ: Обрыв заÑылки теÑта, так как подходÑщих реÑурÑов не найденоОШИБКÐ: не удалоÑÑŒ подгрузить планировщик %sERROR: файл наÑтроек VOMS %s Ñодержит Ñлишком длинную Ñтроку. МакÑимально допуÑÑ‚Ð¸Ð¼Ð°Ñ Ð´Ð»Ð¸Ð½Ð°: %i знаков.ERROR: файл наÑтроек VOMS %s Ñодержит Ñлишком много Ñтрок. МакÑимально допуÑтимое количеÑтво: %i.ERROR: Ñтрока наÑтройки VOMS Ñодержит избыточное чиÑло Ñлементов. ОжидаетÑÑ 5 или 6. Строка: %sERROR: Ñбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° %s при Ñканировании наÑтроек VOMS.ERROR: каталог Ñодержит Ñлишком много уровней Ð´Ð»Ñ ÑÐºÐ°Ð½Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð½Ð°Ñтроек VOMS. МакÑимально допуÑтимое чиÑло уровней: %i.ES:CreateActivity: Ðе удалоÑÑŒ Ñоздать новую задачу: %sВызван 'Process' EchoService (Python)Вызван Python-конÑтруктор EchoServiceВызван Python-деÑтруктор EchoServiceEchoService (python) получил: %s EchoService (Python) Ñодержит приÑтавку %(prefix)s и ÑÑƒÑ„Ñ„Ð¸ÐºÑ %(suffix)sEchoService (python) request_namespace: %sЗапуÑк теÑта потоков Ñлужбы EchoService (python)ЗапуÑк теÑта потоков Ñлужбы EchoService (python), Ð¸Ñ‚ÐµÑ€Ð°Ñ†Ð¸Ñ %(iteration)s %(status)sЭлемент "%s" в профиле игнорируетÑÑ: значение атрибута "inidefaultvalue" не может быть задано, когда не заданы Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð¾Ð² "inisections" и "initag".Элемент "%s" в профиле игнорируетÑÑ: значение атрибута "inisections" не может быть пуÑтой Ñтрокой.Элемент "%s" в профиле игнорируетÑÑ: значение атрибута "initag" не может быть пуÑтой Ñтрокой.Элемент "%s" в профиле игнорируетÑÑ: значение атрибута "initype" не может быть пуÑтой Ñтрокой.Проверка ÑоответÑÑ‚Ð²Ð¸Ñ Ñлемента Ñхеме GLUE2 не прошла: %sFileCache возвратил пуÑтое Ð¸Ð¼Ñ Ñ„Ð°Ð¹Ð»Ð°ÐŸÑƒÑÑ‚Ð°Ñ Ð½Ð°Ð³Ñ€ÑƒÐ·ÐºÐ° на входе!ПуÑтое иÑходное опиÑание задачиПуÑÑ‚Ð°Ñ Ð½Ð°Ð³Ñ€ÑƒÐ·ÐºÐ°!ПуÑÑ‚Ð°Ñ ÑтрокаЗашифрован: %sÐе найдено окончание комментариÑÐе обнаружено конца Ñтроки в двойных кавычкахÐе обнаружено конца Ñтроки в одиночных кавычкахÐе обнаружено конца Ñтроки, выделенной пользовательÑким ограничителем (%s)Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ точке входа:ЗапиÑÑŒ в EGIIS не Ñодержит одного или неÑкольких атрибутов 'Mds-Service-type', 'Mds-Service-hn', 'Mds-Service-port' и/или 'Mds-Service-Ldap-suffix'Ошибка доÑтупа к кÑшированному файлу %s: %sСбой при добавлении интерфейÑа ÑвÑзи в %s. Возможно, уже запущен другой процеÑÑ A-REX.Сбой при добавлении интерфейÑа ÑвÑзи в %s. Возможно, отÑутÑтвует доÑтуп к директории.Ошибка при Ñоздании кÑшаОшибка про Ñоздании кÑша. Возможно, оÑталиÑÑŒ Ñтарые блокировки.Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð° %s: %sОшибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° блокировки %s: %sОшибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð½ÐµÐ¾Ð±Ñ…Ð¾Ð´Ð¸Ð¼Ñ‹Ñ… каталогов Ð´Ð»Ñ %sОшибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ‚Ñ€ÐµÐ±ÑƒÐµÐ¼Ñ‹Ñ… каталогов: %sОшибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð²Ñ€ÐµÐ¼ÐµÐ½Ð½Ð¾Ð³Ð¾ файла %s: %sОбнаружена ошибка при разборе Ñертификата атрибутаОшибка в ÑвÑзи Ñ Ð¸Ñтечением Ñрока годноÑти предоÑтавленных параметров доÑтупаОшибка при проверке файла. Ðевозможно выполнить операцию stat Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° %s: %sОшибка при Ñверке: размер локального файла %llu не ÑоответÑтвует размеру файла-иÑточника %llu Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° %sОшибка проверки профилÑОшибка BDB: %sОшибка BDB: %s: %sОшибка SQLite: %sОшибка SQLite: %s: %sОшибка Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ от statvfs Ð´Ð»Ñ Ð¿ÑƒÑ‚Ð¸ %s: %sÐе удалоÑÑŒ получить ÑпиÑок файлов (в list)Ошибка при обработке кÑша, попытаемÑÑ Ð±ÐµÐ· кÑшированиÑОшибка кÑшированиÑОшибка в файле блокировки %s, неÑÐ¼Ð¾Ñ‚Ñ€Ñ Ð½Ð° то, что Ñоздание ÑÑылки прошло без ÑбоевОшибка при инициализации хранилища X509Сбой при Ñоздании базы данных Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð² %s. Возможно, отÑутÑтвует доÑтуп к директории. Возвращена ошибка %s.Ошибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÑÑылки на файл из кÑша из %s.Ðе удалоÑÑŒ ÑвÑзать временный файл %s Ñ Ñ„Ð°Ð¹Ð»Ð¾Ð¼ блокировки %s: %sОшибка перечиÑÐ»ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° блокировки %s: %sОшибка загрузки Ñгенерированных наÑтроекОшибка поиÑка атрибутов мета-файла кÑша %s: %sОшибка поиÑка маркёров проÑтранÑтва памÑти, ÑоответÑтвующих опиÑанию %sÐомер ошибки в контекÑте хранилища: %iОшибка Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ð±Ð°Ð·Ñ‹ данных учёта задачОшибка при открытии файла блокировки %s при предварительной проверке: %sОшибка Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ð¼ÐµÑ‚Ð°-файла %sОшибка Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ð¼ÐµÑ‚Ð°-файла Ð´Ð»Ñ Ð·Ð°Ð¿Ð¸Ñи %sОшибка разбора переопределённого ÑиÑтемой атрибута executables.Ошибка ÑвÑзи Ñо Ñлужбой доÑтавки на %s: %s: %sОшибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ из файла %s:%sОшибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° блокировки %s: %sОшибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¼ÐµÑ‚Ð°-файла %s: %sОшибка региÑтрации копии, переход к завершению размещениÑОшибка ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ ÐºÑшированного файла %s: %sОшибка Ñмены uidОшибка ÑброÑа иÑходÑщей нагрузкиОшибка при извлечении открытого ключа из запроÑаОшибка при загрузке файла наÑтроек раÑширений: %sОшибка при загрузке файла наÑтроек раÑширений: %s в Ñтроке: %dОшибка при чтении каталога %s: %sОшибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð°%1Ошибка при наÑтройке кÑшаОшибка при наÑтройке кÑша: %sОшибка формата в файле блокировки %sОшибка в файле такта: %sОшибка обÑÐ»ÑƒÐ¶Ð¸Ð²Ð°Ð½Ð¸Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð¿Ð¾Ñле переÑылки: %sОшибка в файле иÑточника, пробуем другую копиюОшибка запиÑи иÑходного ÑертификатаОшибка запиÑи файла информации SRM %sОшибка запиÑи в файл блокировки %s: %sОшибка: Ð¡ÐµÑ€Ð²Ð¸Ñ Ñ‚Ñ€ÐµÐ±ÑƒÐµÑ‚ предел, превышающий или равный текущему (текущий: %d; требуемый: %d)Ошибка: невозможно открыть файл политик: %sОшибка: не удалоÑÑŒ уÑтановить обработчик SIGCHLDОшибка: не удалоÑÑŒ уÑтановить обработчик SIGTERMОшибка: не поÑлан Ð·Ð°Ð¿Ñ€Ð¾Ñ LDAP к %sОшибка: меÑтонахождение политик: %s не ÑвлÑетÑÑ Ñтандартным файломОпиÑание ошибкиОценка уÑреднённого времени ожиданиÑ: %sОценка худшего времени ожиданиÑ: %sОбработчик не поддерживает подгружаемые алгоритмы комбинированиÑОбработчик не поддерживает указанный алгоритм ÐºÐ¾Ð¼Ð±Ð¸Ð½Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ - %sОбработчик Ð´Ð»Ñ ArcPDP не был загруженОбработчик Ð´Ð»Ñ GACLPDP не был загруженОбработчик Ð´Ð»Ñ XACMLPDP не был загруженШаблон наÑтроек (%s) не Ñоздан.Сбой при попытке запуÑка внешнего процеÑÑа: %sПри проверке прав доÑтупа к файлу получены избыточные данныеОтбраÑываетÑÑ ÐºÐ¾Ð¿Ð¸Ñ %s ÑоответÑÑ‚Ð²ÑƒÑŽÑ‰Ð°Ñ ÑˆÐ°Ð±Ð»Ð¾Ð½Ñƒ !%sИÑполнÑющий реÑÑƒÑ€Ñ Ð²Ñ‹Ñ‡Ð¸Ñлительного ÑервиÑа: %sСреда иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð½Ðµ поддерживает входÑщие ÑоединениÑСреда иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð½Ðµ поддерживает иÑходÑщие ÑоединениÑÐ Ð°Ð±Ð¾Ñ‡Ð°Ñ Ñреда - Ñ€ÐµÐ°Ð»ÑŒÐ½Ð°Ñ Ð¼Ð°ÑˆÐ¸Ð½Ð°Ð Ð°Ð±Ð¾Ñ‡Ð°Ñ Ñреда - Ð²Ð¸Ñ€Ñ‚ÑƒÐ°Ð»ÑŒÐ½Ð°Ñ Ð¼Ð°ÑˆÐ¸Ð½Ð°Ð¡Ñ€ÐµÐ´Ð° иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð¿Ð¾Ð´Ð´ÐµÑ€Ð¶Ð¸Ð²Ð°ÐµÑ‚ входÑщие ÑоединениÑСреда иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð¿Ð¾Ð´Ð´ÐµÑ€Ð¶Ð¸Ð²Ð°ÐµÑ‚ иÑходÑщие ÑоединениÑКлаÑÑ ExecutionTarget не ÑвлÑетÑÑ Ð¾Ð±ÑŠÐµÐºÑ‚Ð¾Ð¼Ð—Ð°Ð²ÐµÑ€ÑˆÐ°ÐµÑ‚ÑÑОÑтанавливаетÑÑ Ð¿Ð¾Ñ‚Ð¾Ðº GeneratorОÑтанавливаетÑÑ Ð¿Ð¾Ñ‚Ð¾Ðº обработки задачиОдним из аргументов должна быть командаЗадайте команду и URLОдним из аргументов должно быть название Command moduleОдним из аргументов должен быть путь к Command moduleОжидаетÑÑ ÑƒÐºÐ°Ð·Ð°Ð½Ð¸Ðµ модулÑ, команды и URLОдним из аргументов должен быть URLВнешний Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° обÑлуживание %sВыделен пÑевдоним %s Ð´Ð»Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупа, иÑпользуемых в RUCIO_ACCOUNTExtractor[%s] (%s): %s = %sExtractor[%s] (%s): %s Ñодержит %sFATAL, ERROR, WARNING, INFO, VERBOSE или DEBUGFTP Job Control: Ðе удалоÑÑŒ извлечь Ð°Ð´Ñ€ÐµÑ ÑƒÐ·Ð»Ð° и/или номер порта из ответа на Ð·Ð°Ð¿Ñ€Ð¾Ñ EPSV/PASV: %sFTP Job Control: Канал передачи данных: %d.%d.%d.%d:%dFTP Job Control: Канал передачи данных: [%s]:%dFTP Job Control: Сбой ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ð¸ запиÑи данных: %sFTP Job Control: Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÐºÐ¾Ð½Ñ‚Ð°ÐºÑ‚Ð° и запиÑи данных иÑтекло поÑле %d мÑFTP Job Control: Сбой запиÑи данных: %sFTP Job Control: Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð·Ð°Ð¿Ð¸Ñи данных иÑтекло поÑле %d мÑFTP Job Control: Сбой отправки команды DCAUFTP Job Control: Сбой отÑылки команд EPSV и PASVFTP Job Control: Сбой отправки команды STOR: %sFTP Job Control: Сбой отправки команды TYPEFTP Job Control: Ðе удалоÑÑŒ применить локальный Ð°Ð´Ñ€ÐµÑ Ðº каналу передачи данных: %sFTP Job Control: Сбой локального порта: %sFTP Job Control: Сбой локального типа: %sFTP Job Control: Сбой разбора отзыва Ñервера EPSV: %sFTP Job Control: Сбой разбора порта отзыва Ñервера EPSV: %sFTP Job Control: Сбой разбора отзыва Ñервера PASV: %sÐе удалоÑÑŒ приÑвоить раÑширение hostnameÐе удалоÑÑŒ зарезервировать памÑть под ÑÑылкуОшибка проверки подлинноÑтиОшибка проверки подлинноÑти: %sСбой проверки базы данных (%s)Сбой проверки копии иÑточникаСбой проверки копии иÑточника %s: %sСбой проверки копии иÑточника: %sОшибка очиÑтки цели %sÐе удалоÑÑŒ загрузить наÑтройкиÐе удалоÑÑŒ загрузить наÑтройки.Сбой уÑÑ‚Ð°Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ ÑвÑзи Ñ Ñервером %s:%dÐе удалоÑÑŒ уничтожить ÑÑылку: %s. Ðевозможно ÑправитьÑÑ Ñ Ñ‚Ð°ÐºÐ¸Ð¼ положением.Ошибка загрузки %s в %sОшибка загрузки %s в %s, файл-приёмник уже ÑущеÑтвуетОшибка загрузки %s в %s, Ñбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ ÑущеÑтвующего файла-приёмникаСбой в globus_cond_initСбой в globus_ftp_control_handle_initСбой в globus_mutex_initСбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÑÑылки на файл из кÑша из %sСбой Ð¾Ð±Ð½Ð°Ñ€ÑƒÐ¶ÐµÐ½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупаОшибка поиÑка атрибутов кÑшированного файла: %sÐе удалоÑÑŒ подготовить опиÑание задачиÐе удалоÑÑŒ адаптировать опиÑание задачи Ð´Ð»Ñ Ð·Ð°Ñылки по назначениюÐе удалоÑÑŒ обработать наÑтройки A-REXÐе удалоÑÑŒ обработать группу допуÑка %sСбой работы команды ÑоответÑÑ‚Ð²Ð¸Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ: %s %sСбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° наÑтроекСбой при чтении управлÑющего каталога: %sСбой при чтении управлÑющего каталога: %s: %sОшибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ…ÐžÑˆÐ¸Ð±ÐºÐ° Ñ‡Ñ‚ÐµÐ½Ð¸Ñ ÑпиÑка файловÐе удалоÑÑŒ прочеÑть локальную информациюСбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ о задаче: %sСбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ð¾Ð¿Ð¸ÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸: %sÐе удалоÑÑŒ запуÑтить Ñлужбу раÑÑылкиСбой отправки инÑтрукции CWD Ð´Ð»Ñ Ð¾Ð±Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупаСбой отправки инÑтрукции CWD Ð´Ð»Ñ Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸Ðе удалоÑÑŒ отправить инÑтрукцию CWD Ð´Ð»Ñ Ð¾Ñ‡Ð¸Ñтки задачиСбой отправки инÑтрукции DELE Ð´Ð»Ñ Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸Ðе удалоÑÑŒ отправить инÑтрукцию RMD Ð´Ð»Ñ Ð¾Ñ‡Ð¸Ñтки задачиÐе удалоÑÑŒ задать владельца файла: %sСбой заÑылки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸Ðе удалоÑÑŒ оборвать Ñоединение Ð´Ð»Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ… - игнорируем и воÑÑтанавливаемÑÑСбой Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð¸ файла по ftp: %sÐе удалоÑÑŒ принÑть Ñоединение SSLÐе удалоÑÑŒ принÑть Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° ÑоединениеÐе удалоÑÑŒ принÑть делегированиеСбой при приёме нового файла/направлениÑСбой доÑтупа к доверенноÑти указанной задачи %s в %sÐе удалоÑÑŒ получить наÑтройки A-REXÐе удалоÑÑŒ извлечь контекÑÑ‚ делегированиÑСбой уÑтановки блокировки на кÑшированный мета-файл %sСбой уÑтановки блокировки на файл %sÐе удалоÑÑŒ получить иÑточник: %sÐе удалоÑÑŒ активировать объект обработки задач, закрываетÑÑ Ð¿Ð¾Ñ‚Ð¾Ðº Grid ManagerСбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ '%s' URL (Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ñ‚Ð¸Ð¿Ð° %s) в таблицу Endpoints базы данных учёта задачСбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ '%s' в таблицу %s базы данных учёта задачÐе удалоÑÑŒ добавить Independent OIDÐе удалоÑÑŒ добавить OID доверенноÑти RFCСбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñ€Ð°ÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ VOMS AC. Ваша доверенноÑть может быть неполной.Ðе удалоÑÑŒ добавить OID поÑледовательноÑти VOMS ACÐе удалоÑÑŒ добавить anyLanguage OIDÐе удалоÑÑŒ добавить закрытый ключ и ÑертификатСбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñертификата к маркёру или базе данныхÐе удалоÑÑŒ добавить раÑширение к раÑширениÑм параметров доÑтупаÐе удалоÑÑŒ добавить inheritAll OIDСбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñ€Ð°ÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ð²Ñ‹Ð´Ð°ÑŽÑ‰ÐµÐ³Ð¾ агентÑтва в доверенноÑтьСбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñ€Ð°ÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ð¾Ð± иÑпользовании ключаСбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñ€Ð°ÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ð¾Ð± информации Ñертификата доверенноÑтиСбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñ€Ð°ÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ VOMS ACСбой Ñ€ÐµÐ·ÐµÑ€Ð²Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ñ‹Ñ… отношений ÑертификатаÐе удалоÑÑŒ зарезервировать Ñлемент Ð´Ð»Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ… о ÑертификатеÐе удалоÑÑŒ зарезервировать памÑть под буферÐе удалоÑÑŒ зарезервировать памÑть Ð´Ð»Ñ Ð¸Ð¼ÐµÐ½Ð¸ Ñубъекта Ñертификата при Ñверке Ñ Ð¿Ð¾Ð»Ð¸Ñ‚Ð¸ÐºÐ°Ð¼Ð¸.Ðе удалоÑÑŒ зарезервировать контекÑÑ‚ p12Ðе удалоÑÑŒ применить параметры DHСбой Ð¿Ñ€Ð¸Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² ECDHÐе удалоÑÑŒ применить локальный Ð°Ð´Ñ€ÐµÑ Ðº Ñоединению передачи данныхÐе удалоÑÑŒ уÑтановить подлинноÑть токена SAML во входÑщем документе SOAPÐе удалоÑÑŒ уÑтановить подлинноÑть токена Username во входÑщем документе SOAPÐе удалоÑÑŒ уÑтановить подлинноÑть токена X509 во входÑщем документе SOAPСбой проверки подлинноÑти Ð´Ð»Ñ Ñчейки PKCS11 %sСбой проверки подлинноÑти на базе данных ключейÐе удалоÑÑŒ аутентифицироватьÑÑ Ðº маркёру %sÐе удалоÑÑŒ ÑвÑзать Ñокет Ñ %s:%s(%s): %sÐе удалоÑÑŒ ÑвÑзать Ñокет Ñ Ð¿Ð¾Ñ€Ñ‚Ð¾Ð¼ TCP %s(%s): %sÐе удалоÑÑŒ ÑвÑзать Ñокет (%s): %sСбой привÑзки к Ñерверу LDAP: %sÐе удалоÑÑŒ вызвать PORT_NewArenaСбой Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа на передачу: %sОшибка отмены: %sСбой прерываниÑ: нет ответа SOAPÐе удалоÑÑŒ Ñоздать PayloadSOAP из входÑщей нагрузкиÐе удалоÑÑŒ Ñоздать PayloadSOAP из иÑходÑщей нагрузкиÐе удалоÑÑŒ изменить политики обработки Ñтека ÑоответÑтвий в: %s = %sÐевозможно заменить владельца Ñимвольной ÑÑылки %s на %iÐе удалоÑÑŒ поменÑть владельца временной доверенноÑти в %s на %i:%i: %sÐевозможно изменить права доÑтупа к %s: %sÐе удалоÑÑŒ Ñменить права доÑтупа или владельца жёÑткой ÑÑылки %s: %sÐе удалоÑÑŒ проверить %sСбой при очиÑтке файла %s: %sÐе удалоÑÑŒ закрыть, уничтожаетÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚Ð¡Ð±Ð¾Ð¹ ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ Ñо Ñлужбой делегированиÑ.Сбой Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ð·Ð°Ð¿Ð¸Ñи в назначениеСбой уÑÑ‚Ð°Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ ÑвÑзи Ð´Ð»Ñ Ð¾Ð±Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупаСбой ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸Ðе удалоÑÑŒ ÑоединитьÑÑ Ð´Ð»Ñ Ð¾Ñ‡Ð¸Ñтки задачиÐе удалоÑÑŒ уÑтановить Ñоединение Ñ %s(%s):%iÐе удалоÑÑŒ уÑтановить Ñоединение Ñ %s(%s):%i - %sÐе удалоÑÑŒ уÑтановить ÑвÑзь Ñ Ñервером %s:%dСбой ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ Ñервером PDP: %sÐе удалоÑÑŒ преобразовать ASCII в DERÐе удалоÑÑŒ преобразовать EVP_PKEY в PKCS8Ðе удалоÑÑŒ преобразовать параметры доÑтупа GSI в GSS (major: %d, minor: %d)Ðе удалоÑÑŒ преобразовать параметры доÑтупа GSI в GSS (major: %d, minor: %d)%s:%sСбой Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ PrivateKeyInfo в EVP_PKEYÐе удалоÑÑŒ преобразовать информацию о безопаÑноÑти в политику ARCÐе удалоÑÑŒ преобразовать информацию о защите в Ð·Ð°Ð¿Ñ€Ð¾Ñ ARCÐе удалоÑÑŒ преобразовать информацию о защите в Ð·Ð°Ð¿Ñ€Ð¾Ñ XACMLСбой ÐºÐ¾Ð¿Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ %s: %sСбой ÐºÐ¾Ð¿Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° %s в %s: %sСбой ÐºÐ¾Ð¿Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð²Ñ…Ð¾Ð´Ð½Ð¾Ð³Ð¾ файла: %s в размещение: %sÐе удалоÑÑŒ Ñоздать поток ÑброÑа DTRÐе удалоÑÑŒ Ñоздать атрибуты безопаÑноÑти OTokensСбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¾Ð±ÑŠÐµÐºÑ‚Ð° OpenSSL %s %s - %u %sÐе удалоÑÑŒ Ñоздать контейеры SOAPСбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñертификата X509 Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ NSSÐе удалоÑÑŒ Ñоздать каталоги кÑша Ð´Ð»Ñ %sÐе удалоÑÑŒ Ñоздать каталог кÑша Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° %s: %sСбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¼ÐµÑ‚Ð°-файла кÑша %sÐе удалоÑÑŒ Ñоздать Ð·Ð°Ð¿Ñ€Ð¾Ñ ÑертификатаОшибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð¾Ð³Ð¾ каталога %sÐе удалоÑÑŒ Ñоздать каталогÐе удалоÑÑŒ Ñоздать каталог %sСбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð° %s! Задача пропуÑкаетÑÑ.Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð° %s: %sÐе удалоÑÑŒ Ñоздать контекÑÑ‚ Ð´Ð»Ñ ÑкÑпортаСбой при Ñоздании файла %s: %sÐе удалоÑÑŒ Ñоздать файл в %sÐевозможно Ñоздать жёÑткую ÑÑылку Ñ %s на %s: %sÐе удалоÑÑŒ Ñоздать входной контейнер SOAPÐе удалоÑÑŒ Ñоздать безопаÑное хранилище Ð´Ð»Ñ Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ð¾Ð³Ð¾ ключа или ÑертификатаÐе удалоÑÑŒ Ñоздать поток Ð´Ð»Ñ Ð¿Ñ€Ð¸Ð²Ñзки к LDAP (%s)Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÑÑылки %s. ПрипиÑанный URL не будет иÑпользованСбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´Ð»Ð¸Ð½Ñ‹ путиСбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñзыка политикÐе удалоÑÑŒ Ñоздать каталог ÑеÑÑии %sÐе удалоÑÑŒ Ñоздать Ñокет Ð´Ð»Ñ ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ %s(%s):%d - %sÐе удалоÑÑŒ Ñоздать Ñокет Ð´Ð»Ñ Ð¿Ñ€Ð¾Ñлушки %s:%s(%s): %sÐе удалоÑÑŒ Ñоздать Ñокет Ð´Ð»Ñ Ð¿Ñ€Ð¾Ñлушки порта TCP %s(%s): %sÐе удалоÑÑŒ Ñоздать Ñокет (%s): %sÐе удалоÑÑŒ Ñформировать Ð¸Ð¼Ñ ÑубъектаÐевозможно Ñоздать Ñимвольную ÑÑылку Ñ %s на %s: %sÐе удалоÑÑŒ Ñоздать временную доверенноÑть в %s: %sÐе удалоÑÑŒ Ñоздать потокСбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ ÐºÐ¾Ð¿Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ xrootd %sСбой при Ñоздании/открытии файла %s: %sСбой раÑшифровки опиÑÐ°Ð½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ñ‹Ñ… отношенийСбой Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупа на Ñервер - %sСбой Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупа на Ñервер - не обнаружен Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸ÑСбой при удалении %sСбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ %s, вÑÑ‘ равно попытаемÑÑ ÑкопироватьÐе удалоÑÑŒ уничтожить ÑертификатСбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð¾Ð±ÑŠÐµÐºÑ‚Ð° доÑтавки, или иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸ÑÐе удалоÑÑŒ уничтожить назначение, новые попытки могут быть безуÑпешнымиСбой при удалении логичеÑкого файлаСбой при удалении мета-информацииСбой при удалении физичеÑкого файлаÐе удалоÑÑŒ уничтожить закрытый ключÐе удалоÑÑŒ уничтожить закрытый ключ и ÑертификатСбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ ÐºÐ¾Ð¿Ð¸Ð¸ %s: %sÐе удалоÑÑŒ удалить уÑтаревший файл кÑша %s: %sСбой отÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ð¿Ð¾Ñле Ð¾Ð±Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупаСбой отÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ð¿Ð¾Ñле Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸Ðе удалоÑÑŒ отÑоединитьÑÑ Ð¿Ð¾Ñле очиÑтки задачиÐе удалоÑÑŒ Ñкопировать Ñтруктуру X509Ðе удалоÑÑŒ Ñкопировать раÑширениеÐе удалоÑÑŒ включить IPv6Сбой Ð²ÐºÐ»ÑŽÑ‡ÐµÐ½Ð¸Ñ IPv6: %sÐе удалоÑÑŒ шифрование в формат PKCS12Ошибка ÑˆÐ¸Ñ„Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ ÑертификатаСбой ÑˆÐ¸Ñ„Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа Ñертификата в формате DERÐе удалоÑÑŒ уÑтановить Ñоединение SSLСбой уÑÑ‚Ð°Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ ÑоединениÑ: %sСбой ÑÐ¾Ñ…Ñ€Ð°Ð½ÐµÐ½Ð¸Ñ Ñертификата X509 из базы данных NSSÐе удалоÑÑŒ Ñохранить закрытый ключСбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ð¿Ñевдонима VOMS из Ñертификата доверенноÑтиСбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ о параметрах доÑтупаСбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ… из таблицы %s базы данных учёта задачСбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ… из таблицы Endpoints базы данных учёта задачСбой Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¸Ð· иÑточникаСбой Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ð·Ð°Ð¿Ð¸Ñи в цельÐевозможно найти Ñертификаты CAÐе удалоÑÑŒ обнаружить Ñертификат и/или закрытый ключ, либо у файлов неподходÑщие параметры доÑтупа.Ðе удалоÑÑŒ обнаружить Ñертификат по краткому имени: %sÐе удалоÑÑŒ найти раÑширениеÐе удалоÑÑŒ обнаружить агентÑтво, выдавшее Ñертификат доверенноÑтиÐе удалоÑÑŒ найти информацию о типе %s, чтобы определить, ÑтираетÑÑ Ñ„Ð°Ð¹Ð» или каталогСбой про Ñоздании ключа ECÐе удалоÑÑŒ Ñоздать токен SAML Ð´Ð»Ñ Ð¸ÑходÑщего ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ SOAPÐе удалоÑÑŒ Ñоздать токен имени Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð´Ð»Ñ Ð¸ÑходÑщего ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ SOAPÐе удалоÑÑŒ Ñоздать токен X509 Ð´Ð»Ñ Ð¸ÑходÑщего ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ SOAPСбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа X509 Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ NSSСбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¿Ð°Ñ€Ñ‹ открытого/закрытого ключейÐе удалоÑÑŒ извлечь информацию о DN из файла .local задачи %sÐе удалоÑÑŒ получить параметры TCP-Ñокета Ð´Ð»Ñ ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ %s(%s):%d - прерывание по времени не будет работать - %sСбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ñертификата из файлаÐе удалоÑÑŒ получить параметры доÑтупаÐе удалоÑÑŒ получить файл ftpÐе удалоÑÑŒ получить ÑÑылку параметра GFAL2: %sÐе удалоÑÑŒ получить новый контекÑÑ‚ GFAL2: %sСбой вычиÑÐ»ÐµÐ½Ð¸Ñ ÑƒÑреднённой загруженноÑти: %sÐе удалоÑÑŒ получить закрытый ключÐе удалоÑÑŒ получить открытый ключÐевозможно извлечь открытый ключ из объекта RSAÐевозможно извлечь открытый ключ из объекта X509Ðе удалоÑÑŒ обнаружить файл наÑтроек grid-managerСбой Ð¸Ð¼Ð¿Ð¾Ñ€Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñертификата X509 в базу данных NSSСбой Ð¸Ð¼Ð¿Ð¾Ñ€Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñертификата из файла: %sÐе удалоÑÑŒ получить закрытый ключСбой Ð¸Ð¼Ð¿Ð¾Ñ€Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ð¾Ð³Ð¾ ключа из файла: %sСбой инициализации LCASСбой инициализации LCMAPSОшибка инициализации библиотеки OpenSSLÐе удалоÑÑŒ инициализировать файл PKCS12: %sÐе удалоÑÑŒ инициализировать Ñтруктуру X509Сбой инициализации базы данных учёта задачСбой инициализации раздела раÑширений параметров доÑтупаСбой запуÑка головного потока PythonСбой инициализации наÑтроек параметров доÑтупаСбой при инициализации кÑшаСбой запуÑка ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ ÐºÐ»Ð¸ÐµÐ½Ñ‚Ð¾Ð¼Ð¡Ð±Ð¾Ð¹ инициализации параметров доÑтупа Ð´Ð»Ñ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸ÑÐе удалоÑÑŒ добавить AAR в базу данных Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %sÐе удалоÑÑŒ ограничить Ñокет под IPv6 на %s:%s - может привеÑти к ошибкам Ð´Ð»Ñ IPv4 по Ñтому же портуÐе удалоÑÑŒ ограничить Ñокет под IPv6 на порте TCP %s - может привеÑти к ошибкам Ð´Ð»Ñ IPv4 по Ñтому же портуÐе удалоÑÑŒ ограничить Ñокет до IPv6: %sÐе удалоÑÑŒ проÑлушать %s:%s(%s): %sÐе удалоÑÑŒ проÑлушать порт TCP %s(%s): %sÐе удалоÑÑŒ проÑлушать Ñокет (%s): %sÐе удалоÑÑŒ загрузить наÑтройки клиентаСбой загрузки раздела раÑширений: %sÐе удалоÑÑŒ подгрузить файл наÑтроек grid-managerÐе удалоÑÑŒ подгрузить файл наÑтроек grid-manager из %sÐе удалоÑÑŒ подгрузить файл наÑтроек grid-managerСбой подгрузки подключаемого Ð¼Ð¾Ð´ÑƒÐ»Ñ Ð´Ð»Ñ URL %sÐе удалоÑÑŒ подгрузить анализатор Ð´Ð»Ñ Ð¿Ñ€Ð°Ð²Ð¸Ð» допуÑка задачи %sÐе удалоÑÑŒ загрузить закрытый ключÐе удалоÑÑŒ загрузить наÑтройки ÑервиÑаÐе удалоÑÑŒ загрузить наÑтройки ÑервиÑа ни из какого файла наÑтроекÐе удалоÑÑŒ загрузить наÑтройки ÑервиÑа из файла %sÐе удалоÑÑŒ загрузить компоненты MCC ÑервераÐевозможно заблокировать библиотеку arccredential в памÑтиÐевозможно заблокировать библиотеку arccrypto в памÑтиÐевозможно заблокировать делегированные параметры доÑтупа: %sСбой при Ñоздании ÑимволичеÑкой ÑÑылки %s на %s : %sÐе удалоÑÑŒ перемеÑтить %s в %s: %sÐе удалоÑÑŒ перемеÑтить файл %s в %sСбой Ð²Ñ‹Ð´ÐµÐ»ÐµÐ½Ð¸Ñ Ð½Ð¾Ð²Ð¾Ð¹ облаÑтиСбой ÑƒÐ²ÐµÐ´Ð¾Ð¼Ð»ÐµÐ½Ð¸Ñ ÑлужбыСбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð´ÐµÐ½Ñ‚Ð¸Ñ„Ð¸ÐºÐ°Ñ‚Ð¾Ñ€Ð° OpenSSL Ð´Ð»Ñ %sСбой Ð¾Ð¿Ñ€ÐµÐ´ÐµÐ»ÐµÐ½Ð¸Ñ ÐºÐ¾Ð»Ð¸Ñ‡ÐµÑтва переданных байтов: %sÐе удалоÑÑŒ получить блоки Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð´Ð»Ñ Ð¾Ñ‡Ð¸Ñтки неиÑпользуемых блоковСбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ о файлеÐе удалоÑÑŒ получить ÑпиÑок файлов через FTP: %sÐе удалоÑÑŒ получить локальный Ð°Ð´Ñ€ÐµÑ Ð´Ð»Ñ %s:%s - %sÐе удалоÑÑŒ получить локальный Ð°Ð´Ñ€ÐµÑ Ð´Ð»Ñ Ð¿Ð¾Ñ€Ñ‚Ð° %s - %sÐе удалоÑÑŒ получить локальный адреÑ: %sÐевозможно заблокировать файл в кÑше %sСбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ ÑобÑтвенного адреÑа: %sСбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð¾Ð¿Ð¸ÑÐ°Ð½Ð¸Ñ Ñ€ÐµÑурÑа: %sÐе удалоÑÑŒ получить ÑпиÑок ÑÑ‚Ð°Ñ‚ÑƒÑ Ñ‡ÐµÑ€ÐµÐ· FTP: %sСбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ ÑоÑтоÑние задачиСбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð´Ð¾Ð¿ÑƒÑтимых URL Ð´Ð»Ñ Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð²Ñ…Ð¾Ð´Ð½Ñ‹Ñ… файловÐевозможно открыть %s Ð´Ð»Ñ Ñ‡Ñ‚ÐµÐ½Ð¸Ñ: %sСбой Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ %s, попытка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ€Ð¾Ð´Ð¸Ñ‚ÐµÐ»ÑŒÑких каталоговÐе удалоÑÑŒ открыть канал передачи данныхÐе удалоÑÑŒ открыть каталог %s: %sÐе удалоÑÑŒ открыть файл %sÐе удалоÑÑŒ открыть на чтение файл Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð°Ð¼Ð¸ DHÐе удалоÑÑŒ открыть мониторинговый файл %sСбой Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° входного Ñертификата %sÐе удалоÑÑŒ открыть журнальный файл %sÐе удалоÑÑŒ открыть журнальный файл: %sÐе удалоÑÑŒ открыть выходной файл '%s'Сбой при открытии файла pk12Ðе удалоÑÑŒ открыть канал stdio %dÐе удалоÑÑŒ открыть канал stdio %sСбой вывода запроÑа Ñертификата в формате ASCIIСбой вывода запроÑа Ñертификата в формате DERСбой разборки отзыва ACIX: %sСбой разбора заголовка HTTPÐе удалоÑÑŒ разобрать отзыв Rucio: %sÐе удалоÑÑŒ разобрать токен SAML из входÑщего документа SOAPÐе удалоÑÑŒ разобрать токен Username из входÑщего документа SOAPÐе удалоÑÑŒ разобрать команду VOMS: %sÐе удалоÑÑŒ разобрать токен X509 из входÑщего документа SOAPСбой обработки запроÑа Ñертификата из файла CSR %sÐе удалоÑÑŒ разобрать параметры командной ÑтрокиСбой при разборе файла наÑтроек %sСбой разборки удалённого адреÑа %sСбой разборки указанного времени дейÑÑ‚Ð²Ð¸Ñ VOMS: %sСбой разборки указанного номера порта Ñервера VOMS: %sСбой разбора правил допуÑка Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %sÐе удалоÑÑŒ зарегиÑтрировать назначение: %sСбой предварительной очиÑтки назначениÑ: %sСбой предварительного Ñ€ÐµÐ·ÐµÑ€Ð²Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¼ÐµÑта Ð´Ð»Ñ %sÐе удалоÑÑŒ подготовить назначениеÐе удалоÑÑŒ подготовить назначение: %sСбой подготовки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸Ð¡Ð±Ð¾Ð¹ подготовки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ Ð´Ð»Ñ Ð·Ð°Ñылки по назначению.Сбой подготовки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸.Ðе удалоÑÑŒ подготовить иÑточникÐе удалоÑÑŒ подготовить иÑточник: %sÐе удалоÑÑŒ предварительно зарегиÑтрировать назначение: %sÐе удалоÑÑŒ обработать наÑтройки A-REX в %sÐе удалоÑÑŒ обработать наÑтройки VOMS, или не найдены приемлемые Ñтроки конфигурации.Ðе удалоÑÑŒ обработать наÑтройки в %sСбой обработки задачи: %sСбой разборки задачи: %s - %s %sСбой обработки задач - Ñбой разборки откликаСбой обработки задач - неверный отклик: %uÐе удалоÑÑŒ обработать атрибуты безопаÑноÑти в TLS MCC Ð´Ð»Ñ Ð²Ñ…Ð¾Ð´Ñщего ÑообщениÑÐе удалоÑÑŒ опроÑить базу данных о AAR Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %sСбой запроÑа к ACIX: %sСбой опроÑа ÑоÑтоÑниÑ: %sÐе удалоÑÑŒ прочеÑть атрибут %x из закрытого ключа.Сбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¼ÐµÑ‚Ð°-файла кÑша %sСбой при чтении файла Ñертификата: %sÐевозможно прочитать данные из входного файлаСбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° Ñхемы базы данных в %sСбой при чтении файла %sСбой при чтении файла Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð°Ð¼Ð¸ DHСбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° входного ÑертификатаÐе удалоÑÑŒ прочеÑть правила доÑтупа Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s из %sÐе удалоÑÑŒ прочеÑть локальное опиÑание Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s из %sСбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¾Ð±ÑŠÐµÐºÑ‚Ð° %s: %sСбой при чтении файла личного ключа: %sСбой при чтении файла доверенноÑти: %sПроизошёл Ñбой при чтении запроÑа из файлаСбой при чтении запроÑа из ÑтрокиСбой раÑÐ¿Ð¾Ð·Ð½Ð°Ð²Ð°Ð½Ð¸Ñ Ñ‚Ð¸Ð¿Ð° ÑобÑтвенного адреÑа (IPv4 или IPv6) - %uÐе удалоÑÑŒ зарегиÑтрировать ни одного буфераСбой региÑтрации копии назначениÑ: %sСбой при региÑтрации нового файла/цели: %sСбой региÑтрации подключаемого Ð¼Ð¾Ð´ÑƒÐ»Ñ Ð´Ð»Ñ ÑоÑтоÑÐ½Ð¸Ñ %sÐе удалоÑÑŒ оÑвободить параметры доÑтупа GSS (major: %d, minor: %d):%s:%sСбой ÑброÑа завершившегоÑÑ Ð·Ð°Ð¿Ñ€Ð¾ÑаÐевозможно разблокировать файл в кÑше %sÐевозможно разблокировать файл %sСбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° .meta %s: %sСбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð²Ñех фактичеÑких копийСбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð° кÑша задач %s: %sÐевозможно удалить ÑущеÑтвующую жёÑткую ÑÑылку на %s: %sÐевозможно удалить ÑущеÑтвующую Ñимвольную ÑÑылку на %s: %sÐе удалоÑÑŒ удалить файл %s: %sÐе удалоÑÑŒ удалить копиюСбой Ñ€Ð°Ð·Ð±Ð»Ð¾ÐºÐ¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° на %s. Возможно, необходимо ручное вмешательÑтвоСбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ ÑƒÑтаревшего файла блокировки %s: %sÐе удалоÑÑŒ удалить временную доверенноÑть %s: %sÐе удалоÑÑŒ переименовать URLÐе удалоÑÑŒ обновить доверенноÑтьÐе удалоÑÑŒ разрешить %sСбой при разрешении %s (%s)Ðе удалоÑÑŒ определить назначение: %sÐе удалоÑÑŒ определить иÑточник: %sÐе удалоÑÑŒ получить данные о приложении через OpenSSLÐе удалоÑÑŒ получить ÑÑылку на поток TLS. Ð”Ð¾Ð¿Ð¾Ð»Ð½Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ñверка политики пропуÑкаетÑÑ.Сбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° закрытого ключа издателÑСбой запуÑка потока Grid ManagerСбой иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ñ‹ %sСбой запуÑка разборщика файла наÑтроек %s.Ðе удалоÑÑŒ запуÑтить внешний подключаемый модульÐе удалоÑÑŒ запуÑтить внешний подключаемый модуль: %sСбой отправки запроÑа на прерывание: %sÐе удалоÑÑŒ отправить Ñодержимое буфераСбой уÑтановки обратного вызова монитора GFAL2: %sСбой уÑтановки времени Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð¸ GFAL2, будет иÑпользоватьÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ по умолчанию: %sСбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ входа INTERNALСбой Ð·Ð°Ð´Ð°Ð½Ð¸Ñ ÐºÐ¾Ð¿Ð¸Ð¹ в LFC: %sÐе удалоÑÑŒ уÑтановить параметры доÑтупа Ð´Ð»Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð¸ данных по GridFTPÐевозможно выÑтавить иÑполнÑемый бит Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° %sÐевозможно выÑтавить иÑполнÑемый бит Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° %s: %sСбой уÑтановки опции перезапиÑи в GFAL2: %sСбой Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ Ð¿Ñ€Ð°Ð² доÑтупа к %sСбой Ð·Ð°Ð´Ð°Ð½Ð¸Ñ ID алгоритма подпиÑиÐе удалоÑÑŒ задать открытый ключ Ð´Ð»Ñ Ð¾Ð±ÑŠÐµÐºÑ‚Ð° X509 иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ñ‹Ð¹ ключ из X509_REQСбой уÑтановки Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¿Ñ€Ð°Ð² доÑтупа Ñ %sÐе удалоÑÑŒ прервать Ñоединение SSL: %sСбой подпиÑи данных зашифрованного ÑертификатаСбой подпиÑи доверенноÑтиÐе удалоÑÑŒ подпиÑать Ð·Ð°Ð¿Ñ€Ð¾Ñ ÑертификатаÐе удалоÑÑŒ подпиÑать доверенноÑтьÐе удалоÑÑŒ размеÑтить файл(Ñ‹)Ðе удалоÑÑŒ запуÑтить поток архивированиÑÐе удалоÑÑŒ запуÑтить Ñкрипт очиÑтки кÑшаСбой начала ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ€Ð°ÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ ÑертификатаÐе удалоÑÑŒ запуÑтить потоки Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ…Ðе удалоÑÑŒ начать проÑлушивание ни по какому адреÑу Ð´Ð»Ñ %s:%sÐе удалоÑÑŒ начать проÑлушивание ни по какому адреÑу Ð´Ð»Ñ %s:%s(IPv%s)Ðе удалоÑÑŒ запуÑтить новый Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR Ð´Ð»Ñ %sÐе удалоÑÑŒ запуÑтить поток Ð´Ð»Ñ Ð¾Ñ‚ÑÐ»ÐµÐ¶Ð¸Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñов на задачиÐе удалоÑÑŒ запуÑтить новый поток: кÑш не будет очищенÐе удалоÑÑŒ начать Ð¾Ð¿Ñ€Ð¾Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ входа на %sСбой начала опроÑа точки доÑтупа по %s (не удалоÑÑŒ Ñоздать подпоток)Ðе удалоÑÑŒ начать чтение из иÑточника: %sÐе удалоÑÑŒ запуÑтить поток Ð´Ð»Ñ Ð¾Ð±Ð¼ÐµÐ½Ð° информациейÐе удалоÑÑŒ запуÑтить поток Ð´Ð»Ñ Ð¿Ñ€Ð¾ÑлушиваниÑÐе удалоÑÑŒ запуÑтить поток таймера - прерывание по времени не будет работатьСбой запуÑка запроÑа на передачу: %sСбой начала запиÑи в кÑшСбой начала запиÑи в назначение: %sÐе удалоÑÑŒ проверить ÑоÑтоÑние каталога ÑеÑÑии %sСбой проверки ÑтатуÑа иÑточника %sÐе удалоÑÑŒ запиÑать данные приложениÑÐе удалоÑÑŒ Ñохранить файл ftpСбой заÑылки вÑех задач.Сбой заÑылки вÑех задач: %sСбой заÑылки вÑех задач: %s %sСбой заÑылки вÑех задач: %u %sОшибка запуÑка задачиСбой заÑылки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸: %sСбой заÑылки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸: EMIESFault(%s , %s)Сбой заÑылки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸: UnexpectedError(%s)Ðе удалоÑÑŒ изменить идентификатор Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð½Ð° %d/%dСбой оÑтановки LCASСбой оÑтановки LCMAPSÐе удалоÑÑŒ передать данныеÐе удалоÑÑŒ разблокировать файл %s: %s. Возможно, необходимо ручное вмешательÑтвоСбой Ñ€Ð°Ð·Ð±Ð»Ð¾ÐºÐ¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° Ñ Ð±Ð»Ð¾ÐºÐ¾Ð¼ %s: %sÐе удалоÑÑŒ отменить предварительную региÑтрацию Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ %s. Возможно, Вам придётÑÑ Ñделать Ñто вручнуюÐе удалоÑÑŒ отменить предварительную региÑтрацию Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ %s: %s. Возможно, Вам придётÑÑ Ñделать Ñто вручнуюСбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð¿Ñ€ÐµÐ´Ð²Ð°Ñ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð¾Ð¹ запиÑи LFN. Возможно, необходимо удалить её вручнуюСбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð¿Ñ€ÐµÐ´Ð²Ð°Ñ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð¾Ð¹ запиÑи LFN. Возможно, необходимо удалить её вручнуюСбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð¿Ñ€ÐµÐ´Ð²Ð°Ñ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð¾Ð¹ запиÑи LFN. Возможно, необходимо удалить её вручную: %sÐе удалоÑÑŒ обновить AAR в базе данных Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %sÐе удалоÑÑŒ подтвердить токен X509 во входÑщем документе SOAPÐе удалоÑÑŒ подтвердить запроÑПодпиÑÑŒ не подтвержденаПодпиÑÑŒ не подтвержденаСбой проверки подпиÑанного ÑертификатаСбой Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ñ€Ð°Ð·Ñ€ÐµÑˆÐµÐ½Ð¸Ñ Ð¾Ñ‚ задачи на размещение входных файловÐе удалоÑÑŒ запиÑать 'локальную' информациюСбой запиÑи информации о RTE Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %sСбой запиÑи атрибутов authtoken Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %sСбой при запиÑи тела в выходной потокСбой запиÑи информации о передаче данных Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %sСбой запиÑи информации о ÑобытиÑÑ… Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %sСбой при запиÑи заголовка в выходной потокСбой запиÑи информации о задаче в базу данных (%s)Сбой запиÑи объекта %s: %sÐе удалоÑÑŒ запиÑать Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð² файлÐе удалоÑÑŒ запиÑать Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð² ÑтрокуСбой запиÑи подпиÑанного Ñертификата EEC в файлСбой запиÑи подпиÑанной доверенноÑти в файлОшибка запиÑи в локальный файл ÑпиÑка задач %sСбой Ð¾Ð±Ð½Ð¾Ð²Ð»ÐµÐ½Ð¸Ñ Ð¼ÐµÑ‚ÐºÐ¸ времени файла блокировки кÑша %s Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° %s: %sÐе удалоÑÑŒ отгрузить файл %s в %s: %sСбой выгрузки локальных входных файловСбой выгрузки локальных входных файлов в %sСбой Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¸Ð· иÑточникаСбой при завершении запиÑи в назначениеОшибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¸Ð· иÑточникаСбой при передаче данныхСбой при ожидании запроÑа на ÑоединениеСбой при ожидании ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ %s(%s):%i - %sОшибка при запиÑи в цельÐе удалоÑÑŒ запиÑать ACLÐе удалоÑÑŒ запиÑать опиÑание задачиСбой запиÑи локального опиÑаниÑÐе удалоÑÑŒ запиÑать ÑоÑтоÑниеОшибка при разборе отзыва Ñ Ñервера - Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¼Ð¾Ð¶ÐµÑ‚ быть чаÑтично невернойОшибка: %sЭта Ñ„ÑƒÐ½ÐºÑ†Ð¸Ñ Ð½Ðµ реализованаИзвлечение: тело отклика: %sИзвлечение: код отклика: %u %sПолучение опиÑÐ°Ð½Ð¸Ñ Ñ€ÐµÑурÑа Ñ %sФайл %s в ÑоÑтоÑнии NEARLINE, будет Ñделан Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¾ размещении на диÑкеФайл %s уже находитÑÑ Ð² кÑше %s Ñ Ð´Ñ€ÑƒÐ³Ð¸Ð¼ URL: %s - Ñтот файл не будет кÑшированФайл %s уже кÑширован в %s Ñ Ð´Ñ€ÑƒÐ³Ð¸Ð¼ URL: %s - выделенное Ð¸Ð¼Ñ Ð½Ðµ будет добавлено в кÑшированный ÑпиÑокФайл %s приÑутÑтвует в кÑше (%s) - проверÑетÑÑ Ð´Ð¾Ð¿ÑƒÑкУÑпешно удалён файл %sФайл '%s' перечиÑленный в атрибуте 'executables' отÑутÑтвует в атрибуте 'inputfiles'Файл уже ÑущеÑтвует: %sФайл не может быть переведён в ÑоÑтоÑние DoneФайл не может быть переведён в ÑоÑтоÑние Running: %sСбой при удалении файла, попытка ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð°Ð¡Ð±Ð¾Ð¹ при удалении файла, попытка ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð° Ð´Ð»Ñ %sÐевозможно загрузить файл: %sФайл может быть кÑширован, проверÑетÑÑ ÐºÑшФайл ещё кÑшируетÑÑ, ожидание %i ÑекФайл недоÑтупен %s: %sФайл недоÑтупен: %sФайл не может быть кÑширован, пропуÑкаетÑÑ Ð¾Ð±Ñ€Ð°Ð±Ð¾Ñ‚ÐºÐ° кÑшаФайл либо не может быть кÑширован, либо кÑширование не было запрошено, либо кÑша нет; пропуÑкаетÑÑ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ° кÑшаФайл готов! TURL: %sФайл меньше %llu байт, будет иÑпользована Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð°Ñ Ð´Ð¾ÑтавкаТип файла недоÑтупен, попытка Ñтереть файлFileNode: operator= (%s <- %s) %lu <- %luFilePlugin: разгрузок больше, чем загрузокВ отзыве Rucio отÑутÑтвует Ð¸Ð¼Ñ Ñ„Ð°Ð¹Ð»Ð°: %sФайлы, аÑÑоциированные Ñ Ð¼Ð°Ñ€ÐºÑ‘Ñ€Ð¾Ð¼ запроÑа %s, уÑпешно прерваныФайлы, аÑÑоциированные Ñ Ð¼Ð°Ñ€ÐºÑ‘Ñ€Ð¾Ð¼ запроÑа %s, уÑпешно отгруженыФайлы, аÑÑоциированные Ñ Ð¼Ð°Ñ€ÐºÑ‘Ñ€Ð¾Ð¼ запроÑа %s, уÑпешно разблокированыКопирование набора файлов в отдельный объект пока не поддерживаетÑÑРегиÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ Ð½Ð°Ð±Ð¾Ñ€Ð¾Ð² файлов пока не поддерживаетÑÑОбнаружение ÑущеÑтвующих копий назначениÑFinishWriting: поиÑк метаданных: %sFinishWriting: получена ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма: %sУÑпешное завершениеСбой первого шага региÑтрации в ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³ÐµÐŸÐµÑ€Ð²Ð°Ñ Ñ‡Ð°Ñть Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð° 'inputfiles' (filename) не может быть пуÑÑ‚Ð¾Ð¹ÐŸÐµÑ€Ð²Ð°Ñ Ñ‡Ð°Ñть Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð° 'outputfiles' (filename) не может быть пуÑÑ‚Ð¾Ð¹Ð”Ð»Ñ Ñ€ÐµÐ³Ð¸Ñтрации, иÑточник должен быть задан обычным URL, а назначением должен быть каталог реÑурÑÐ¾Ð²Ð”Ð»Ñ Ñ‚ÐµÑтовой задачи номер 1 необходимо задать Ð²Ñ€ÐµÐ¼Ñ Ð¸ÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ опции -r (--runtime).ÐŸÑ€Ð¸Ð½ÑƒÐ´Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ° иÑточника кÑшированного файла %sÐŸÑ€Ð¸Ð½ÑƒÐ´Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ð¿ÐµÑ€ÐµÐ·Ð°Ð³Ñ€ÑƒÐ·ÐºÐ° файла %sОшибка ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´Ð¾Ñ‡ÐµÑ€Ð½ÐµÐ³Ð¾ процеÑа: %sÐайден подключаемый модуль %s %s (уже подгружен)%s обнаружен в кÑшеÐайден Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR %s Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° %s, оÑтавшийÑÑ Ð² ÑоÑтоÑнии передачи поÑле предыдущего запуÑкаОбнаружен атрибут VOMS AC: %sÐайден рееÑтр, который будет опрошен рекурÑивно: %sОбнаружен ÑущеÑтвующий маркер Ð´Ð»Ñ %s в кÑше маркеров Rucio, иÑтекающий %sÐ’ ACIX URL обнаружено ни одного или неÑколько URL (%s): %sОбнаружена ÐºÐ¾Ð½ÐµÑ‡Ð½Ð°Ñ Ñ‚Ð¾Ñ‡ÐºÐ° Ñлужбы %s (тип %s)Ðайдена точка входа в ÑоÑтоÑнии STARTED или SUCCESSFUL (%s)Обнаружена временно иÑÐºÐ»ÑŽÑ‡Ñ‘Ð½Ð½Ð°Ñ Ñ‚Ð¾Ñ‡ÐºÐ° входа (%s)Обнаружены Ñледующие задачи:Обнаружены Ñледующие новые задачи:Ðайден непредвиденный пуÑтой файл блокировки %s. Ðеобходимо вернутьÑÑ Ð² acquire()Ðайдены незаконченные процеÑÑÑ‹ DTR. ВероÑтно, предыдущий процеÑÑ A-REX завершилÑÑ ÑбоемДоÑтупные меÑта Ñгруппированы по предельному времени (предел: доÑтупные меÑта):Свободные Ñдра: %iСтрока иÑпользована неполноÑтью: %sФункциÑ: %sÐ—Ð°Ð¿Ñ€Ð¾Ñ Ð°Ð²Ñ‚Ð¾Ñ€Ð¸Ð·Ð°Ñ†Ð¸Ð¸ GACL: %sGET: идентификатор %s путь %sСоздайте новый Ð·Ð°Ð¿Ñ€Ð¾Ñ X509!Созданы Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ EMIES: %sСоздаётÑÑ Ð¾Ð¿Ð¸Ñание задачи в формате %sСоздаютÑÑ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ EMIESÐвтоматичеÑкое Ñоздание префикÑа ceID из имени узлаГенератор запущенÐеÑпецифичеÑÐºÐ°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ°ÐŸÐ¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ðµ делегированных параметров доÑтупа от Ñлужбы делегированиÑ: %sПолучение из кÑша: КÑшированный файл забклокированПолучение из кÑша: Ошибка наÑтроек кÑшаПолучение из кÑша: Файла в кÑше нетПолучение из кÑша: ÐедопуÑтимый URL %sПолучение из кÑша: ПоиÑк %s в кÑшеПолучение из кÑша: не удалоÑÑŒ получить доÑтуп к кÑшированному файлу: %sÐ—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° получение %s вÑÑ‘ ещё в очереди, Ñледует подождать %i ÑекундGet: отÑутÑвует задача %s - %sСоздание текущей метки времени Ð´Ð»Ñ Ð¶ÑƒÑ€Ð½Ð°Ð»Ð° программы разбора BLAH: %sПолучение делегированных параметров доÑтупа от Ñлужбы Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ ARCОшибка ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ GlobusОшибка Globus: %sСÑылка globus заÑтрÑлаПеременнаÑ, ÑƒÐºÐ°Ð·Ñ‹Ð²Ð°ÑŽÑ‰Ð°Ñ Ð½Ð° раÑположение Globus, больше не поддерживаетÑÑ. ПожалуйÑта, укажите полный путь.Опознавательные признаки Грид поÑтавлены в ÑоответÑтвие меÑтной учётной запиÑи '%s'HEAD: идентификатор %s путь %sHER: %sОшибка HTTP: %d %sОшибка HTTP %u - %sÐÐºÑ‚Ð¸Ð²Ð¸Ð·Ð°Ñ†Ð¸Ñ HTTP Ñ SAML2SSO не выполненаHTTP:PUT %s: запиÑÑŒ файла %s: %sСÑылка в недопуÑтимом ÑоÑтоÑии %u/%u%i запроÑов Ð´Ð»Ñ Ð¾Ð±Ñ€Ð°Ð±Ð¾Ñ‚ÐºÐ¸Ð¡ÑƒÑ‰ÐµÑтвующих Ñоединений: %i, макÑимально: %iHead: отÑутÑвует задача %s - %sÐ˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ ÑоÑтоÑнии здоровьÑ: %sСоÑтоÑние здоровьÑ: %sСоÑтоÑние Ð·Ð´Ð¾Ñ€Ð¾Ð²ÑŒÑ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ (%s) неудовлетворительное (%s)Параметры Ñправки:Сбой при запуÑке вÑпомогательного процеÑÑа: %sОтÑутÑтвует вÑÐ¿Ð¾Ð¼Ð¾Ð³Ð°Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ð¿Ñ€Ð¾Ð³Ñ€Ð°Ð¼Ð¼Ð°ÐžÐ´Ð½Ð¾Ñ€Ð¾Ð´Ð½Ñ‹Ð¹ реÑурÑID Ñодержит недопуÑтимые ÑимволыID: %sФайл наÑтроек INI %s не ÑущеÑтвуетКлиент INTERNALClient не запущенId= %s,Тип= %s,Издатель= %s,Значение= %sСлужба IdP выдала Ñообщение об ошибке: %sЛичные данные: %sВыделенное имÑ: %sЛичные данные: %sЕÑли пара Ñертификат/ключ или файл Ñертификата доверенноÑти ÑущеÑтвуют, Ð’Ñ‹ можете вручную указать их раÑположение Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ переменных Ñреды '%s'/'%s' или '%s', или Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ атрибутов '%s'/'%s' или '%s' в файле наÑтроек клиента (например, '%s')Ð£ÐºÐ°Ð·Ñ‹Ð²Ð°Ñ Ð¿Ð¾Ð»Ð¸Ñ‚Ð¸ÐºÑƒ, указывайте также её ÑзыкИгнорируетÑÑ Ñ‚Ð¾Ñ‡ÐºÐ° входа (%s), Ñ‚.к. она уже зарегиÑтрирована в загрузчике.ИгнорируетÑÑ Ð·Ð°Ð´Ð°Ñ‡Ð° (%s), Ð¿Ñ€ÐµÐ´Ñ‹Ð´ÑƒÑ‰Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ° подгрузить JobControllerPlugin завершилаÑÑŒ неудачейИгнорируетÑÑ Ð·Ð°Ð´Ð°Ñ‡Ð° (%s), отÑутÑтвует URL интерфейÑа управлениÑИгнорируетÑÑ Ð·Ð°Ð´Ð°Ñ‡Ð° (%s), отÑутÑтвует URL ÑоÑтоÑÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸Ð˜Ð³Ð½Ð¾Ñ€Ð¸Ñ€ÑƒÐµÑ‚ÑÑ Ð·Ð°Ð´Ð°Ñ‡Ð° (%s), отÑутÑтвует название интерфейÑа управлениÑИгнорируетÑÑ Ð·Ð°Ð´Ð°Ñ‡Ð° (%s), отÑутÑтвует название интерфейÑа ÑоÑтоÑниÑИгнорируетÑÑ Ð·Ð°Ð´Ð°Ñ‡Ð° (%s), невозможно подгрузить JobControllerPlugin Ð´Ð»Ñ %sЗадача игнорируетÑÑ, так как её Ñрлык пуÑÑ‚ÐедопуÑтимый URL - за закрывающей Ñкобкой ] Ð´Ð»Ñ Ð°Ð´Ñ€ÐµÑа IPv6 Ñледует недопуÑтимый маркёр: %sÐедопуÑтимый URL - отÑутÑтвует Ð·Ð°ÐºÑ€Ñ‹Ð²Ð°ÑŽÑ‰Ð°Ñ Ñкобка ] Ð´Ð»Ñ Ð°Ð´Ñ€ÐµÑа IPv6: %sÐедопуÑтимый Ð°Ð´Ñ€ÐµÑ - не ÑодержитÑÑ Ð¸Ð¼Ñ ÑƒÐ·Ð»Ð°: %sÐедопуÑтимый URL - путь должен быть абÑолютным или пуÑтым: %sÐедопуÑтимый URL - путь должен быть абÑолютным: %sЗадан недопуÑтимый Ñрлык задачи (%s)ÐедопуÑтимый формат времени: %sОжидаетÑÑ Ð½ÐµÐ¼ÐµÐ´Ð»ÐµÐ½Ð½Ð¾Ðµ ÑоединениеОжидаетÑÑ Ð½ÐµÐ¼ÐµÐ´Ð»ÐµÐ½Ð½Ð¾Ðµ Ñоединение: %sÐемедленное завершение: %sÐ˜Ð¼Ñ Ñ€ÐµÐ°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ð¸: %sВнедритель: %sÐедопуÑтимый уровень отладки '%s'Ð’ доÑтупном ÑпиÑке отзыва Ñертификатов (CRL) значение lastUpdate недейÑтвительноВ доÑтупном ÑпиÑке отзыва Ñертификатов (CRL) значение nextUpdate недейÑтвительноВ профиле наÑтроек атрибут 'initype' Ñлемента "%s" имеет ÑобÑтвенное значение "%s".ВходÑщее Ñообщение не в формате SOAPЗапрошены неÑовмеÑтимые опции --nolist и --forcelistПротиворечивые метаданныеÐезавиÑÐ¸Ð¼Ð°Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñть - права не выделеныТочка входа Ð´Ð»Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ÐеизвеÑтный тип информации '%s'ПуÑтой информационный документСбой инициализацииЗапущена %u-Ñ Ñлужба PythonÐ˜Ð½Ð¸Ñ†Ð¸Ð°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð¿Ñ€Ð¾Ñ†ÐµÐ´ÑƒÑ€Ñ‹ делегированиÑВвод не в формате SOAPВходные данные не Ñодержат Ñтроки Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð²Ð²Ð¾Ð´Ð° из файла: Request.xmlÐ—Ð°Ð¿Ñ€Ð¾Ñ Ð²Ð²Ð¾Ð´Ð° из программыВвод: метаданные: %sУÑтановленные рабочие Ñреды:Задан Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ (%s), заÑылка производитÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ через негоРаÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñа:Ð˜Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ входа (%s) %s.ВерÑии интерфейÑа:Ð˜Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ %sInterfaceNameВнутренний метод передачи данных не поддерживаетÑÑ Ð´Ð»Ñ %sÐедейÑтвительный Ð·Ð°Ð¿Ñ€Ð¾Ñ DTRÐедопуÑтимый DTR Ð´Ð»Ñ Ð¸Ñточника %s, Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ %sÐедопуÑтимый ÑффектÐедопуÑтимый объект HTTP не может дать результатÐеверный ID: %sÐеверный ISO-формат продолжительноÑти: %sÐеверный Ñлемент JobDescription:Ðеверный Ñлемент JobDescription: %sÐедопуÑтимый URL '%s' Ð´Ð»Ñ Ð²Ñ…Ð¾Ð´Ð½Ð¾Ð³Ð¾ файла '%s'ÐедопуÑтимый URL '%s' Ð´Ð»Ñ Ð²Ñ‹Ñ…Ð¾Ð´Ð½Ð¾Ð³Ð¾ файла '%s'ÐедопуÑтимый ÑинтакÑÐ¸Ñ Ð¾Ð¿Ñ†Ð¸Ð¸ URL в опции '%s' Ð´Ð»Ñ Ð²Ñ…Ð¾Ð´Ð½Ð¾Ð³Ð¾ файла '%s'ÐедопуÑтимый ÑинтакÑÐ¸Ñ Ð¾Ð¿Ñ†Ð¸Ð¸ URL в опции '%s' Ð´Ð»Ñ Ð²Ñ‹Ñ…Ð¾Ð´Ð½Ð¾Ð³Ð¾ файла '%s'ÐедопуÑÑ‚Ð¸Ð¼Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ URL: %sÐеверный URL: %sÐедопуÑтимый URL: '%s' во входном файле '%s'ÐедопуÑтимый URL: '%s' в выходном файле '%s'ÐедопуÑтимое значение action %sÐеверное название клаÑÑаÐедопуÑтимое Ð¸Ð¼Ñ ÐºÐ»Ð°ÑÑа. Ðргумент брокера Ð´Ð»Ñ PythonBroker должен быть Filename.Class.args (args не обÑзательно), например: SampleBroker.MyBrokerÐедопуÑтимый оператор ÑÑ€Ð°Ð²Ð½ÐµÐ½Ð¸Ñ '%s' иÑпользуетÑÑ Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð¾Ð¼ 'delegationid', допуÑкаетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ "=".ÐедопуÑтимый оператор ÑÑ€Ð°Ð²Ð½ÐµÐ½Ð¸Ñ '%s' иÑпользуетÑÑ Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð¾Ð¼ 'queue' в диалекте GRIDMANAGER, допуÑкаетÑÑ Ð»Ð¸ÑˆÑŒ "="ÐедопуÑтимый оператор ÑÑ€Ð°Ð²Ð½ÐµÐ½Ð¸Ñ '%s' иÑпользуетÑÑ Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð¾Ð¼ 'queue', допуÑкаютÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ "!=" или "=".ÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð½Ð°Ñтройка - не указано ни одного допуÑтимого IP-адреÑаÐедопуÑÑ‚Ð¸Ð¼Ð°Ñ Ð½Ð°Ñтройка - не указано ни одного каталога Ð´Ð»Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡ÐедейÑтвительные реквизиты доÑтупа, пожалуйÑта, проверьте Ñертификат доверенноÑти и/или реквизиты органа ÑертификацииÐеверный URL цели %sУказан неверный путь к каталогу загрузки (%s)ÐедопуÑтимое опиÑание задачиÐедопуÑÑ‚Ð¸Ð¼Ð°Ñ Ð±Ð»Ð¾ÐºÐ¸Ñ€Ð¾Ð²ÐºÐ° файла %sÐеверный уровень отладки. ИÑпользуетÑÑ ÑƒÑ€Ð¾Ð²ÐµÐ½ÑŒ по умолчанию %s.ÐедопуÑтимое значение nodeaccess: %sÐеверный Ñтарый уровень отладки. ИÑпользуетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ по умолчанию %s.ÐедопуÑтимый интервал времени: %sÐедопуÑтимый номер порта в %sУказан неверный путь Ð´Ð»Ñ Ð¾Ñ‚Ð³Ñ€ÑƒÐ·ÐºÐ¸(%s)Ðеверный URL: %sСертификат выдан CA: %sКем выдана: %sСмешивать опции выбора реÑурÑа ARC6 Ñ ÑƒÑтаревшими опциÑми нельзÑ. Ð’Ñе уÑтаревшие опции будут проигнорированы!Ðевозможно перезапуÑтить задачу, не выполнив Ñнова поиÑк информации о реÑурÑахJWSE::ExtractPublicKey: внешний ключ jwkJWSE::ExtractPublicKey: извлечение ключа jwk из: %sJWSE::ExtractPublicKey: ключ jwkJWSE::ExtractPublicKey: ошибка разбора ключаJWSE::ExtractPublicKey: нет поддерживаемого ключаJWSE::ExtractPublicKey: ключ x5cJWSE::Input: JWE: пока не поддерживаетÑÑJWSE::Input: Ñодержимое JWS: %sJWSE::Input: JWS: алгоритм подпиÑи: %sJWSE::Input: JWS: Ñбой Ð¿Ð¾Ð´Ñ‚Ð²ÐµÑ€Ð¶Ð´ÐµÐ½Ð¸Ñ Ð¿Ð¾Ð´Ð¿Ð¸ÑиJWSE::Input: JWS: токен Ñлишком ÑтарыйJWSE::Input: JWS: токен Ñлишком ÑвежийJWSE::Input: заголовок: %sJWSE::Input: токен: %sЗадача %s не находитÑÑ Ð² возобновлÑемом ÑоÑтоÑнииЗадача %s не Ñмогла обновить делегирование %s - %s.Задача %s не Ñмогла обновить делегирование %s.С задачей %s не аÑÑоциировано никакого делегированиÑ. Задача не может быть обновлена.Задача %s не обнаруженаЗадача %s: Сбой некоторых загрузокЗадача %s: вÑе файлы уÑпешно загруженыЗадача %s: файлы вÑÑ‘ ещё загружаютÑÑТребуетÑÑ Ð°Ñ€Ð³ÑƒÐ¼ÐµÐ½Ñ‚ - идентификатор задачи.Задача уÑпешно оборванаЗадача уÑпешно удаленаСвÑзь Ñ Ð±Ð°Ð·Ð¾Ð¹ данных задач уÑпешно уÑтановлена (%s)УÑпешно Ñоздана база данных задач (%s)Задача удалена: %sÐевозможно прочеÑть файл Ñ Ð¾Ð¿Ð¸Ñанием задачи.Язык опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ не указан, невозможно вывеÑти опиÑание.Следующие Ñзыки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡ поддерживаютÑÑ %s:ОпиÑание задачи Ð´Ð»Ñ Ð¾Ñ‚Ð¿Ñ€Ð°Ð²ÐºÐ¸ на %s:ОпиÑание заÑылаемой задачи: %sОпиÑание задачи: %sОпиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡:Задача не завершилаÑÑŒ уÑпехом. Сообщение не будет запиÑано в журнал BLAH.Каталог Ð´Ð»Ñ Ð·Ð°Ð³Ñ€ÑƒÐ·ÐºÐ¸ задач из пользовательÑких наÑтроек: %sКаталог Ð´Ð»Ñ Ð·Ð°Ð³Ñ€ÑƒÐ·ÐºÐ¸ задачи будет Ñоздан в текущей рабочей директории.Каталог Ð´Ð»Ñ Ð·Ð°Ð³Ñ€ÑƒÐ·ÐºÐ¸ задач: %sЗадача дала Ñбой на ÑервереИÑполнение задачи ещё не началоÑÑŒ: %sÐ˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ задаче в информационной ÑиÑтеме не обнаружена: %sФайл ÑпиÑка задач (%s) не ÑущеÑтвуетСпиÑок задач (%s) не ÑвлÑетÑÑ Ñтандартным файломФайл ÑпиÑка задач не может быть Ñоздан: %s не ÑвлÑетÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð¾Ð¼Ð¤Ð°Ð¹Ð» ÑпиÑка задач не может быть Ñоздан: родительÑкий каталог (%s) не ÑущеÑтвует.Перенаправление задачи оборвано, Ñ‚.к. ни один из реÑурÑов не предоÑтавил информациюПеренаправление задачи оборвано, невозможно подгрузить планировщик (%s)Ðе удалоÑÑŒ перенаправить задачу (%s), возможные Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð¾Ñ‚ÑутÑтвуютЗадача номерМодуль обработки задач не был запущенПерезаÑылка задачи оборвана, Ñ‚.к. ни один из реÑурÑов не предоÑтавил информациюПерезаÑылка задачи оборвана: Ðевозможно подгрузить планировщик (%s)Сводка перезапуÑка задач:Задача уÑпешно возобновленаСводка заÑылки задач:Пользователь, заÑылающий задачи, не может быть ÑуперпользователемПользователь, отправивший задачу: %s (%i:%i)Задача запущена Ñ Ñрлыком: %sÐ’Ñ€ÐµÐ¼ÐµÐ½Ð½Ð°Ñ Ð¼ÐµÑ‚ÐºÐ° задачи уÑпешно разобрана как %sЗадача: %sЗадача: %s : Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° отмену отправлен и Ñообщён ÑлужбеЗадача: %s : Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° отмену отправлен, но ÑвÑзь Ñо Ñлужбой отÑутÑтвуетЗадача: %s : Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° очиÑтку отправлен и Ñообщён ÑлужбеJob: %s : Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° очиÑтку отправлен, но ÑвÑзь Ñо Ñлужбой отÑутÑтвуетЗадача: %s : ERROR : Сбой запиÑи метки прерываниÑЗадача: %s : ERROR : Сбой запиÑи отметки об очиÑткеЗадача: %s : ERROR : ОтÑутÑтвует Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ.Задача: %s : ERROR : Ðеопознанное ÑоÑтоÑниеПодключаемый модуль JobControllerPlugin %s не может быть ÑозданПодключаемый модуль JobControllerPlugin "%s" не обнаружен.КлаÑÑ JobDescription не ÑвлÑетÑÑ Ð¾Ð±ÑŠÐµÐºÑ‚Ð¾Ð¼ÐŸÐ¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ñ‹Ð¹ модуль JobDescriptionParserPlugin %s не может быть ÑозданПодключаемый модуль JobDescriptionParserPlugin "%s" не обнаружен.Ðеопрошенных задач: %dЗадачи Ñ Ð¾Ñ‚ÑутÑтвующей информацией не будут вычищены!Обработано задач: %d, уничтожено: %dОбработано задач: %d, обновлено: %dОбработано задач: %d, возобновлено: %dОбработано задач: %d, уÑпешно оборвано: %dОбработано задач: %d, уÑпешно оборвано: %d, уÑпешно очищено: %dОбработано задач: %d, уÑпешно получено: %dОбработано задач: %d, уÑпешно получено: %d, уÑпешно очищено: %dÐеразборчивые фрагменты в конце RSLБеÑÑмыÑлица в команде sessiondirСбой Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ÐŸÑ€ÐµÑ€Ð²Ð°Ð½ Ñигналом: Прерывание ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ð² ÑвÑзи Ñ Ð¸Ñтёкшим лимитом времениLCMAPS не возвратил никакого GIDLCMAPS не возвратил никакого UIDLCMAPS Ñодержит getCredentialDataLCMAPS Ñодержит lcmaps_runLCMAPS возвратил UID не ÑоответÑтвующий учётной запиÑи: %uLCMAPS возвратил недопуÑтимый GID: %uLCMAPS возвратил недопуÑтимый UID: %uСоединение LDAP Ñ %s уже уÑтановленоИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¾Ñ‚Ð²ÐµÑ‚Ð° на Ð·Ð°Ð¿Ñ€Ð¾Ñ LDAP: %sLDAPQuery: Получение результатов Ñ %sLDAPQuery: уÑтанавливаетÑÑ Ñоединение Ñ %s:%dLdapQuery: ЗапрашиваетÑÑ %sСбой LIST/MLSTСбой LIST/MLST: %sЯзык (%s) не опознан ни одним из модулей разборки опиÑаний задач.Сбой поÑледнего шага региÑтрации в каталогеШирота: %fИÑтекло Ð²Ñ€ÐµÐ¼Ñ ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ LDAP (%s)Левый операнд Ð´Ð»Ñ ÑÑ†ÐµÐ¿Ð»ÐµÐ½Ð¸Ñ RSL не приводитÑÑ Ðº буквенной конÑтантеУÑтаревшие варианты Ð¾Ð¿Ñ€ÐµÐ´ÐµÐ»ÐµÐ½Ð¸Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ð¹LegacyMap: не заданы группы наÑтроекLegacyPDP: атрибут безопаÑноÑти ARC Legacy не опознан.LegacyPDP: атрибут безопаÑноÑти %s не задан. Возможно, обработчик безопаÑноÑти ARC Legacy не наÑтроен, или претерпел Ñбой.LegacySecHandler: не указан файл наÑтроекБиблиотека: %sСтрока %d.%d атрибутов выдала: %sПодцепление MCC %s(%s) к MCC (%s) в %sПодцепление MCC %s(%s) к коммутатору (%s) в %sПодцепление MCC %s(%s) к Ñлужбе (%s) в %sПодцепление коммутатора %s к MCC (%s) в %sПодцепление коммутатора %s к коммутатору (%s) в %sПодцепление коммутатора %s к Ñлужбе (%s) в %sПодцеплÑетÑÑ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ñ‹Ð¹ файлСоздаётÑÑ ÑимволичеÑÐºÐ°Ñ ÑÑылка на ÑоответÑтвующий файлПодцепление/копирование файла из кÑшаСоздание ÑÑылки/копирование файла из кÑша в %sСбой перечиÑÐ»ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡ÐŸÐµÑ€ÐµÑ‡Ð¸Ñление не поддерживаетÑÑ Ð´Ð»Ñ REST-интерфейÑа VOMSПеречиÑление не поддерживаетÑÑ Ð´Ð»Ñ Ñ‚Ñ€Ð°Ð´Ð¸Ñ†Ð¸Ð¾Ð½Ð½Ð¾Ð³Ð¾ интерфейÑа VOMSПеречиÑление запроÑит информацию stat об URL %sListFiles: поиÑк метаданных: %sСбой проÑлушиваниÑПроÑлушивание завершеноПроÑлушивание началоÑьПроÑлушиваетÑÑ %s:%s(%s)ПроÑлушиваетÑÑ Ð¿Ð¾Ñ€Ñ‚ TCP %s(%s)Задачи уÑпешно перечиÑлены, обнаружено %d задач(и)Локальные задачи уÑпешно перечиÑлены, обнаружено %d задач(и)Подгружаемый модуль %s не Ñодержит запрашиваемого Ð¼Ð¾Ð´ÑƒÐ»Ñ %s типа %sПодгружен модуль %sЗагружен %s %sПодгружен JobControllerPlugin %sПодгружен JobDescriptionParserPlugin %sПодгружен MCC %s(%s)Подгружен Plexer %sПодгружена Ñлужба %s(%s)Подгружен SubmitterPlugin %sЗагружаетÑÑ %u-Ñ Ñлужба PythonСбой загрузки OToken - токен игнорируетÑÑПодгрузка Python broker (%i)Чтение файла наÑтроек (%s)Ð›Ð¾ÐºÐ°Ð»ÑŒÐ½Ð°Ñ Ð³Ñ€ÑƒÐ¿Ð¿Ð° %s не ÑущеÑтвуетВнутренние задачи в Ñчёте: %iВнутренние приоÑтановленные задачи: %iЛокальный пользователь %s не ÑущеÑтвуетВнутренние задачи в очереди: %iÐедопуÑтимый URI в Location Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° %sТакой файл уже ÑущеÑтвуетВ URL Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ LFC отÑутÑтвуют меÑтоположениÑБлок %s принадлежит другому процеÑÑу (%s)Файл блокировки %s не ÑущеÑтвуетДолгота: %fПоиÑк текущих задачПоиÑк URL %sПоиÑк копий файла-иÑточникаMCC %s(%s) - Ñледующий %s(%s) не Ñодержит назначениÑMIME не подходит Ð´Ð»Ñ SOAP: %sMLSD не поддерживаетÑÑ - пробуем NLSTMLST не поддерживаетÑÑ - пробуем LISTГоловной процеÑÑ Python не был запущенГоловной поток Python не был запущенОбъём оÑновной памÑти: %iОбнаружена Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ ARCHERY (не задан тип конечной точки): %sОбнаружена Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ ARCHERY (не задан URL конечной точки): %sÐеверный атрибут VOMS AC %sÐевозможно открыть пул пользователей в %s.ПривÑзка к локальной группе Ñ Ð¸Ð´ÐµÐ½Ñ‚Ð¸Ñ„Ð¸ÐºÐ°Ñ‚Ð¾Ñ€Ð¾Ð¼: %iПривÑзка к локальной группе Ñ Ð¸Ð¼ÐµÐ½ÐµÐ¼: %sПривÑзка к локальному идентификатору: %iПривÑзка к текущему пользователю: %sДомашний каталог привÑзанного пользователÑ: %sСоответÑтвующие user:group (%s:%s) не обнаружены%s ÑтавитÑÑ Ð² ÑоответÑтвие %sЗначение параметра политики приÑÐ²Ð¾ÐµÐ½Ð¸Ñ Ð¿ÑƒÑтоПравило приÑвоениÑ:ÐазначаетÑÑ Ð¾Ñ‡ÐµÑ€ÐµÐ´ÑŒ: %sСоответÑтвующий издатель: %sСовпадение ВО: %sСовпадений нетСоответÑтвие: %s %s %sСоответÑтвие: %s %s %s %sСравнение; %s (%d) не ÑоответÑтвует (%s) значению %s (%d), публикуемому назначением Ð´Ð»Ñ Ð¸ÑполнениÑ.Сравнение; значение Ñталонного теÑта %s не публикуетÑÑ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸ÐµÐ¼ Ð´Ð»Ñ Ð¸ÑполнениÑ.Сравнение; неÑовпадение CacheTotal: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d MB (CacheTotal), в опиÑании задачи: %d MB (CacheDiskSpace)Сравнение; не удовлетворено требование к вычиÑлительному реÑурÑу. Ðазначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %sСравнение; неÑовпадение ConnectivityIn: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %s (ConnectivityIn), в опиÑании задачи: %s (InBound)Сравнение; неÑовпадение ConnectivityOut: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %s (ConnectivityOut), в опиÑании задачи: %s (OutBound)Сравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, значение OperatingSystem не определеноСравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, значение CacheTotal не определеноСравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, ÑоÑтоÑние Ð·Ð´Ð¾Ñ€Ð¾Ð²ÑŒÑ Ð½Ðµ определеноСравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, значение ImplementationName не определеноСравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ MaxDiskSpace и WorkingAreaFree не определеноСравнение; ExecutionTarget: %s, не задано MaxTotalCPUTime или MaxCPUTime, предполагаетÑÑ Ð¾Ñ‚ÑутÑтвие ограничений на процеÑÑорное времÑСравнение; ExecutionTarget: %s, не задано MinCPUTime, предполагаетÑÑ Ð¾Ñ‚ÑутÑтвие ограничений на процеÑÑорное времÑСравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, значение NetworkInfo не определеноСравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, значение Platform не определеноСравнение; не удовлетворены Ñ‚Ñ€ÐµÐ±Ð¾Ð²Ð°Ð½Ð¸Ñ RunTimeEnvironment к ExecutionTarget: %sСравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ TotalSlots и MaxSlotsPerJob не определеныСравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, значение WorkingAreaLifeTime не определеноСравнение; ExecutionTarget: %s ÑоответÑтвует опиÑанию задачиСравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, значение ApplicationEnvironments не определеноСравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ MaxMainMemory и MainMemorySize не определеныСравнение; назначение Ð´Ð»Ñ Ð¸ÑполнениÑ: %s, значение MaxVirtualMemory не определеноСравнение; не удовлетворены Ñ‚Ñ€ÐµÐ±Ð¾Ð²Ð°Ð½Ð¸Ñ OperatingSystem к ExecutionTarget: %sСравнение; неÑовпадение MainMemorySize: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d (MainMemorySize), в опиÑании задачи: %d (IndividualPhysicalMemory)Сравнение; проблема Ñ MaxCPUTime, ExecutionTarget: %d (MaxCPUTime), JobDescription: %d (TotalCPUTime/NumberOfSlots)Сравнение; неÑовпадение MaxDiskSpace: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d MB (MaxDiskSpace), в опиÑании задачи: %d MB (DiskSpace)Сравнение; неÑовпадение MaxDiskSpace: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d MB (MaxDiskSpace), в опиÑании задачи: %d MB (SessionDiskSpace)Сравнение; неÑовпадение MaxMainMemory: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d (MaxMainMemory), в опиÑании задачи: %d (IndividualPhysicalMemory)Сравнение; неÑовпадение MaxSlotsPerJob: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d (MaxSlotsPerJob), в опиÑании задачи: %d (NumberOfProcesses)Сравнение; проблема Ñ MaxTotalCPUTime, ExecutionTarget: %d (MaxTotalCPUTime), JobDescription: %d (TotalCPUTime)Сравнение; неÑовпадение MaxVirtualMemory: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d (MaxVirtualMemory), в опиÑании задачи: %d (IndividualVirtualMemory)Сравнение; проблема Ñ MinCPUTime, ExecutionTarget: %d (MinCPUTime), JobDescription: %d (TotalCPUTime/NumberOfSlots)Сравнение; не удовлетворено требование NetworkInfo, назначение Ð´Ð»Ñ Ð¸ÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð½Ðµ поддерживает %s, указанное в опиÑании задачи.Сравнение; неÑовпадение платформ: ExecutionTarget: %s (Platform) JobDescription: %s (Platform)Сравнение; приведённое к значению %s значение %s (%d) не ÑоответÑтвует (%s) значению %s (%d) публикуемому назначением Ð´Ð»Ñ Ð¸ÑполнениÑ.Сравнение; неÑовпадение TotalSlots: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d (TotalSlots), в опиÑании задачи: %d (NumberOfProcesses)Сравнение; неÑовпадение WorkingAreaFree: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d MB (WorkingAreaFree), в опиÑании задачи: %d MB (DiskSpace)Сравнение; неÑовпадение WorkingAreaFree: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %d MB (WorkingAreaFree), в опиÑании задачи: %d MB (SessionDiskSpace)Сравнение; неÑовпадение WorkingAreaLifeTime: у Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð»Ñ Ð¸ÑполнениÑ: %s (WorkingAreaLifeTime), в опиÑании задачи: %s (SessionLifeTime)ДлительноÑть, Ð½Ð°Ð¸Ð±Ð¾Ð»ÑŒÑˆÐ°Ñ (процеÑÑорнаÑ): %sПредел диÑкового проÑтранÑтва: %iМакÑ. памÑть: %iПредел задач в очереди до СУПО: %iЗадачи в Ñчёте (предел): %iПредел Ñегментов на задачу: %iПредел потоков размещениÑ: %iПотоки отгрузки (верхний предел): %iÐ’Ñего заданий (предел): %iПредел общего времени (по чаÑам): %sЗадачи Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð² Ñчёте (предел): %iПредел виртуальной памÑти: %iПредел задач в очереди: %iДлительноÑть, Ð½Ð°Ð¸Ð±Ð¾Ð»ÑŒÑˆÐ°Ñ (по чаÑам): %sЗапущено макÑимальное количеÑтво потоков - новый Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¿Ð¾Ð¼ÐµÑ‰Ñ‘Ð½ в очередьОшибка Ð²Ñ‹Ð´ÐµÐ»ÐµÐ½Ð¸Ñ Ð¿Ð°Ð¼ÑтиКлаÑÑ Message не ÑвлÑетÑÑ Ð¾Ð±ÑŠÐµÐºÑ‚Ð¾Ð¼Ð¡Ð¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ðµ, отправленное на Ñервер VOMS %s: %sМета-Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¸Ñточника и Ð°Ð´Ñ€ÐµÑ Ð½Ðµ ÑоответÑтвуют друг другу Ð´Ð»Ñ %sМетаданные копии отличаютÑÑ Ð¾Ñ‚ тех, что в каталогеМетаданные иÑточника и Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð½Ðµ ÑовпадаютМетаданные иÑточника и цели не Ñовпадают. ИÑпользуйте опцию --force Ð´Ð»Ñ Ð¿Ñ€Ð¸Ð½ÑƒÐ´Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð¾Ð³Ð¾ копированиÑ.ДлительноÑть, Ð½Ð°Ð¸Ð¼ÐµÐ½ÑŒÑˆÐ°Ñ (процеÑÑорнаÑ): %sДлительноÑть, Ð½Ð°Ð¸Ð¼ÐµÐ½ÑŒÑˆÐ°Ñ (по чаÑам): %sСубъект центра Ñертификации отÑутÑтвует в политике подпиÑи GlobusОтÑутÑтвует Ñлемент CertificatePath или ProxyPath element, или Ð’ Ñлементе Connect отÑутÑтвует название узла (Host)Ð’ Ñлементе Connect отÑутÑтвует номер порта (Port)Ð’ Ñлементе Listen отÑутÑтвует номер порта (Port)Ð’ наÑтройках отÑутÑтвует ВООтÑутÑтвует аргументВ наÑтройках отÑутÑтвуют получателиОтÑутÑтвует Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð´Ð»Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ¸ подлинноÑтиОтÑутÑтвует название authgroup в allowaccessОтÑутÑтвует название authgroup в denyaccessÐе найден Ñкрипт cancel-%s-job - прерывание задачи может не работатьВ наÑтройках отÑутÑтвуют возможноÑтиУÑÐ»Ð¾Ð²Ð¸Ñ Ñубъекта отÑутÑтвуют в политике подпиÑи GlobusÐедоÑтаточно данных в раÑширении PROXY_CERT_INFO_EXTENSION в кодировке DERÐ’ команде controldir пропущен каталогОтÑутÑтвует Ð¸Ð¼Ñ Ñ„Ð°Ð¹Ð»Ð° в журнальном файле [arex/jura]ОтÑутÑтвует заключительный отклик: %sÐ’ наÑтройках отÑутÑтвует группаÐÐµÐ¿Ð¾Ð»Ð½Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð² отклике: %sÐ’ наÑтройках отÑутÑтвует издательОтÑутÑтвует Ð¸Ð¼Ñ Ð±Ð¸Ð±Ð»Ð¸Ð¾Ñ‚ÐµÐºÐ¸ LCASОтÑутÑтвует Ð¸Ð¼Ñ Ð±Ð¸Ð±Ð»Ð¸Ð¾Ñ‚ÐµÐºÐ¸ LCMAPSÐедоÑтающее чиÑло в maxjobsОтÑутÑÑ‚Ð²ÑƒÑŽÑ‰Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð´Ð»Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ñ‹ logreopenЭлемент CertificatePath отÑутÑтвует или пуÑтЭлемент CertificatePath или CACertificatesDir отÑутÑтвует или пуÑтЭлемент CertificatePath или CACertificatesDir отÑутÑтвует или пуÑÑ‚; будет выполнена лишь проверка подпиÑи, а не удоÑтоверение подлинноÑти ÑообщениÑЭлемент KeyPath отÑутÑтвует или пуÑтЭлемент KeyPath отÑутÑтвует или пуÑÑ‚, либо отÑутÑтвует Элемент PasswordSource отÑутÑтвует или пуÑтЭлемент Username отÑутÑтвует или пуÑтОтÑутÑтвует путь к файлу параметров доÑтупаОтÑутÑтвует ÑÑылка на фабрику и/или модуль. Ð’ наÑтоÑщее Ð²Ñ€ÐµÐ¼Ñ Ð±ÐµÐ·Ð¾Ð¿Ð°ÑÐ½Ð°Ñ Ð²Ñ‹Ð³Ñ€ÑƒÐ·ÐºÐ° LDAP DMC не поддерживаетÑÑ. ПожалуйтеÑÑŒ разработчикам.ОтÑутÑтвует указание на фабрику и/или модуль. ИÑпользование Globus в неопределённом режиме небезопаÑно - вызов (Grid)FTP заблокирован. СвÑжитеÑÑŒ Ñ Ñ€Ð°Ð·Ñ€Ð°Ð±Ð¾Ñ‚Ñ‡Ð¸ÐºÐ°Ð¼Ð¸.ОтÑутÑтвует указание на фабрику и/или модуль. ИÑпользование Globus во временном режиме небезопаÑно - SubmitterPlugin Ð´Ð»Ñ GRIDFTPJOB отключён. Сообщите разработчикам.ОтÑутÑтвует указание на фабрику и/или модуль. ИÑпользование Xrootd в неопределённом режиме небезопаÑно - Xrootd заблокирован. СвÑжитеÑÑŒ Ñ Ñ€Ð°Ð·Ñ€Ð°Ð±Ð¾Ñ‚Ñ‡Ð¸ÐºÐ°Ð¼Ð¸.ОтÑутÑтвует отклик Ñлужбы делегированиÑ.Ð’ наÑтройках отÑутÑтвует рольÐе найден Ñкрипт scan-%s-job - окончание задачи может быть незамеченымСхема отÑутÑтвует! Сверка пропуÑкаетÑÑ...Ð’ наÑтройках отÑутÑтвует контекÑтВ Ñообщении отÑутÑтвует объект авторизацииВ наÑтройках отÑутÑтвует ÑубъектОтÑутÑтвует Ð¸Ð¼Ñ ÑубъектаÐе найден Ñкрипт submit-%s-job - заÑылка задачи в СУПО может не работатьМодуль %s не Ñодержит подключаемый модуль %sМодуль %s не Ñодержит запрашиваемого подключаемого Ð¼Ð¾Ð´ÑƒÐ»Ñ %s типа %sМодуль %s не Ñодержит подключаемых модулей указанных типовÐе удалоÑÑŒ перезагрузить модуль %s (%s)Модуль %s не ÑвлÑетÑÑ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ñ‹Ð¼ модулем ARC (%s)ЗапуÑк ÑƒÐ¿Ñ€Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ð¼Ð¾Ð´ÑƒÐ»ÑÐ¼Ð¸Ð˜Ð½Ð¸Ñ†Ð¸Ð°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð¼ÐµÐ½ÐµÐ´Ð¶ÐµÑ€Ð° модулей в ModuleManager::setCfgÐазвание модулÑ: %sТочка Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡ÐµÐ½Ð¸Ñ %sЗаканчиваетÑÑ Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ðµ данныхОператор множеÑтвенноÑти RSL допуÑкаетÑÑ Ð»Ð¸ÑˆÑŒ в начале документаМножеÑтвенные атрибуты %s в файле наÑтроек (%s)Сбой MyProxy: %sСервер Myproxy не приÑлал Ñертификат Ñ Ñ€Ð°Ñширением VOMS ACNEW: запиÑÑŒ новой задачи: доÑтигнут макÑимальный предел общего количеÑтва задачNEW: запиÑÑŒ новой задачи: отÑутÑтвуют полезные файлыИнÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ NLST/MLSD не прошлаИнÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ NLST/MLSD не прошла: %sБудет иÑпользоватьÑÑ Ð±Ð°Ð·Ð° данных NSS %s Ð˜Ð½Ð¸Ñ†Ð¸Ð°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ NSS оборвалаÑÑŒ на базе данных Ñертификатов: %sNULL BIO передан в InquireRequestÐулевой обратный вызов Ð´Ð»Ñ %sÐ˜Ð¼Ñ Ñ„Ð°Ð¹Ð»Ð° gramiИмÑ: %sОтрицательные права не поддерживаютÑÑ Ð¿Ð¾Ð»Ð¸Ñ‚Ð¸ÐºÐ¾Ð¹ подпиÑи GlobusÐи иÑточник, ни назначение не ÑвлÑÑŽÑ‚ÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð¾Ð¼, поиÑк копий не будет произведёнÐи иÑточник, ни назначение не были размещены Ñ Ð»ÐµÐ½Ñ‚Ð¾Ñ‡Ð½Ð¾Ð³Ð¾ накопителÑ, пропуÑкаетÑÑ Ð¾Ñ‚Ð¼ÐµÐ½Ð° запроÑÐ¾Ð²Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ Ñети:Ðовое ÑоединениеСоздана Ð½Ð¾Ð²Ð°Ñ Ñ‚Ð¾Ñ‡ÐºÐ° доÑтупа (%s) из точки Ñ Ð½ÐµÐ¸Ð·Ð²ÐµÑтным интерфейÑом (%s)ÐÐ¾Ð²Ð°Ñ Ð·Ð°Ð´Ð°Ñ‡Ð° принÑта Ñ Ð¸Ð´ÐµÐ½Ñ‚Ð¸Ñ„Ð¸ÐºÐ°Ñ‚Ð¾Ñ€Ð¾Ð¼ %sÐ¡ÐµÑ€Ð²Ð¸Ñ EMI ES уÑтановил новые Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð¿Ñ€ÐµÐ´ÐµÐ»Ð¾Ð² Ð´Ð»Ñ Ð¿Ð°Ñ€Ð°Ð»Ð»ÐµÐ»ÑŒÐ½Ñ‹Ñ… запроÑов: %dСрок дейÑÑ‚Ð²Ð¸Ñ Ð½Ð¾Ð²Ð¾Ð¹ доверенноÑти иÑтекает в %sСрок дейÑÑ‚Ð²Ð¸Ñ Ð½Ð¾Ð²Ð¾Ð¹ доверенноÑти не дольше Ñтарой, доверенноÑть не обновлÑетÑÑФайл наÑтроек A-REX в наÑтройках candypond не обнаруженÐе ÑущеÑтвует атрибутов, ÑпоÑобных трактовать Ñтот тип: %sÐе задан Ñлемент ConnectFQAN не обнаружен. Ð’ качеÑтве Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ userFQAN будет иÑпользоватьÑÑ NoneСервер VOMS не отзываетÑÑ Ð¿Ð¾ HTTPÐе найдена СУПО в файле наÑтроекВ опиÑании задачи не найдено Ñтруктуры RSLÐет ответа SOAPÐет ответа SOAP от Ñлужбы доÑтавки %sÐет ответа SOAP от Ñлужбы доÑтавкиÐет активных запроÑов DTR %sÐет активной задачи Ñ Ñрлыком %sВнешнему процеÑÑу не приÑвоены аргументыÐе получен ответ о допуÑкеÐе указан каталог кÑшаКаталоги кÑша не найдены или не наÑтроены при вычиÑлении Ñвободного проÑтранÑтва.КÑш не опиÑан в файле наÑтроекÐе определена Ñ„ÑƒÐ½ÐºÑ†Ð¸Ñ Ð¾Ð±Ñ€Ð°Ñ‚Ð½Ð¾Ð³Ð¾ вызова Ð´Ð»Ñ %sСервер не выдал информацию о контрольной ÑÑƒÐ¼Ð¼ÐµÐ˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ контрольных Ñуммах недоÑтупнаВ отзыве Rucio Ð´Ð»Ñ %s отÑутÑтвует Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ контрольной ÑуммеÐевозможно подтвердить контрольную ÑуммуФайл наÑтроек не может быть подгружен.Ðе найден контрольный каталог в файле наÑтроекВ наÑтройках не заданы контрольные директории или каталоги ÑеÑÑийÐе указаны параметры доÑтупаДелегированные параметры доÑтупа не переданыВ данном контекÑте и Ñообщении отÑутÑтвуют политики Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ - пропуÑкаетÑÑÐ’ запроÑе отÑутÑтвует токен делегированиÑÐет доÑтупных назначений Ð´Ð»Ñ Ð¾Ñ‚Ð³Ñ€ÑƒÐ·ÐºÐ¸, попытаемÑÑ Ð¿Ð¾Ð·Ð¶ÐµÐазначение не заданоÐе указан каталог кÑша Ð´Ð»Ñ Ð¾Ð¿Ð¾Ñ€Ð¾Ð¶Ð½ÐµÐ½Ð¸ÑÐет ошибокОтÑутÑтвуют загружаемые файлы Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %sÐ’ отзыве Rucio Ð´Ð»Ñ %s отÑутÑтвует Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ размере файлаГруппа %i Ð´Ð»Ñ Ð¿Ñ€Ð¸Ð²Ñзанного Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð¾Ñ‚ÑутÑтвуетÐе указан Ñрлык задачиÐе указан файл Ñ Ð¾Ð¿Ð¸Ñанием задачи.Ðе задано опиÑание задачиÐи один разборщик не Ñмог обработать опиÑание задачиОтÑутÑтвуют разборщики опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ð½Ð¸ÑÐет разборщиков опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸, подходÑщих Ð´Ð»Ñ Ð¾Ð±Ñ€Ð°Ð±Ð¾Ñ‚ÐºÐ¸ Ñзыка '%s'Ð”Ð»Ñ Ñ‚ÐµÑта %d отÑутÑтвует опиÑание задачиЗадач нетÐе найдено ни одной задачи, попробуйте позжеЗадачи не указаныÐет задач Ð´Ð»Ñ Ð¿ÐµÑ€ÐµÐ·Ð°Ð¿ÑƒÑка в указанном ÑоÑтоÑнииОтÑутÑтвует левый операнд оператора подцеплениÑÐе инициализированы проÑлушивающие портыÐе указано Ð¸Ð¼Ñ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð¾Ð¹ учётной запиÑиПользователь не припиÑан ни к одному локальному имениÐе найдено ни одного меÑÑ‚Ð¾Ð½Ð°Ñ…Ð¾Ð¶Ð´ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° %sÐе найдено ни одного меÑÑ‚Ð¾Ð½Ð°Ñ…Ð¾Ð¶Ð´ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° %sÐе найдено раÑположений Ð´Ð»Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ, отличающихÑÑ Ð¾Ñ‚ иÑточникаÐе найдено раÑположений Ð´Ð»Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ, отличающихÑÑ Ð¾Ñ‚ иÑточника: %sÐе найдено физичеÑких адреÑов Ð´Ð»Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ: %sÐе найдено раÑположений Ð´Ð»Ñ Ñ„Ð°Ð¹Ð»Ð° иÑточника: %sÐе найдено раÑположений - возможно, копий больше нетÐе найдено раÑположений Ð´Ð»Ñ %sÐе оÑталоÑÑŒ раÑположений Ð´Ð»Ñ %sÐе найдено ÑоответÑÑ‚Ð²Ð¸Ñ Ð´Ð»Ñ %s в правилах доÑтупа к кÑшуБольше копий нет (%s)Опробованы вÑе интерфейÑÑ‹ Ð´Ð»Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ входа %s.Больше копий нет, будет иÑпользован файл %sÐе требуетÑÑ Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ðµ Ñ Ð»ÐµÐ½Ñ‚Ð¾Ñ‡Ð½Ð¾Ð³Ð¾ Ð½Ð°ÐºÐ¾Ð¿Ð¸Ñ‚ÐµÐ»Ñ Ð½Ð¸ иÑточника, ни назначениÑ; размещение пропуÑкаетÑÑÐе приÑвоено новых информационных документовÐе найдено больше MCC или Ñлужб в пути "%s"ОтÑутÑтвует Ñледующий Ñлемент цепиÐет каталогов ÑеÑÑий не в ÑоÑтоÑнии разгрузкиÐет каталогов ÑеÑÑий не в ÑоÑтоÑнии разгрузкиПерезапиÑÑŒ не запрошена или не разрешена, Ð¿Ñ€ÐµÐ´Ð²Ð°Ñ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ð¾Ñ‡Ð¸Ñтка пропуÑкаетÑÑÐ’ отзыве Rucio отÑутÑтвуeÑ‚ pnfs: %sÐе найдено реальных файлов Ð´Ð»Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸ÑÐе найдено реальных файлов иÑточникаÐе обнаружен файл pid в '%s'. Возможно, A-REX не запущен.ОтÑутÑтвуют наÑтроенные или допущенные раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ð¿Ð¾ заданному адреÑу %sÐ”Ð»Ñ simplelist.pdp не задан файл политик или DN; пожалуйÑта, задайте в наÑтройках атрибут location или Ñ…Ð¾Ñ‚Ñ Ð±Ñ‹ один Ñлемент DN Ð´Ð»Ñ ÑƒÐ·Ð»Ð° PDP simplelist.Ðе найдено подходÑщего порта Ð´Ð»Ñ %sЗакрытый ключ Ñ Ð¸Ð¼ÐµÐ½ÐµÐ¼ %s отÑутÑтвует в базе данных NSSÐе удалоÑÑŒ обнаружить доверенноÑтьОтÑутÑтвует доверенноÑтьÐе указано название очереди в названии блока queueÐе указан доÑтупный по чтению каталог кÑшаÐи одна из удалённых Ñлужб доÑтавки не подходит, вынужденно иÑпользуетÑÑ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð°Ñ Ð´Ð¾ÑтавкаÐе найдено копий Ð´Ð»Ñ %sÐе указан маркёр запроÑа!Ðе найдены маркёры запроÑаÐе удалоÑÑŒ Ñобрать запрошенную информацию о безопаÑноÑтиÐет ответа от %sÐет ответа от Ñервера AA %sÐе получен отклик: %sВызов stat не возвратил никаких результатовОтÑутÑтвует правый операнд оператора подцеплениÑОбработка/проверка параметров доÑтупа не запрошена Ð´Ð»Ñ '%s'Ð’ файле наÑтроек отÑутÑтвуют наÑтройки ÑервераÐе задано ни одного ÑервиÑа. ПожалуйÑта, наÑтройте ÑервиÑÑ‹ по умолчанию в файле наÑтроек клиента, либо укажите реÑÑƒÑ€Ñ Ð¸Ð»Ð¸ каталог реÑурÑов (опции -c или -g, Ñм. arcsync -h).Ðе найдены каталоги ÑеÑÑий в файле наÑтроек.Ðе найден каталог ÑеÑÑииÐе найден каталог ÑеÑÑии в файле наÑтроекИÑточник не заданÐе указан маркёр проÑтранÑтва памÑтиÐе найдены маркёры проÑтранÑтва памÑти, ÑоответÑтвующие опиÑанию %sÐе указан URL Ð´Ð»Ñ Ð·Ð°Ð³Ñ€ÑƒÐ·ÐºÐ¸Ð¡ÐµÑ€Ð²ÐµÑ€ VOMS не отзываетÑÑÐет такого запроÑа DTR %sÐет такого файла или каталогаÐет такой группы: %sÐет такого пользователÑ: %sПолитика не Ñодержит назначенийПравило не Ñодержит назначенийТеÑÑ‚Ð¾Ð²Ð°Ñ Ð·Ð°Ð´Ð°Ñ‡Ð° под номером %d не найдена.ТеÑÑ‚Ð¾Ð²Ð°Ñ Ð·Ð°Ð´Ð°Ñ‡Ð° под номером "%d" не ÑущеÑтвуетÐет подходÑщих кÑшейÐе удалоÑÑŒ обнаружить Ñертификат Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ñ ÐºÑ€Ð°Ñ‚ÐºÐ¸Ð¼ именем %sСертификат Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð½Ðµ обнаруженÐе указано Ð¸Ð¼Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»ÑÐ’ наÑтройках не обнаружено ни одного приемлемого кÑша, кÑширование отключеноÐе найдены дейÑтвительные параметры доÑтупа, выходÐе оÑталоÑÑŒ допуÑтимых деÑкрипторов Ð´Ð»Ñ Ð¿Ñ€Ð¾ÑлушиваниÑEMI ES не возвратил дейÑтвительных Ñрлыков задачÐет допуÑтимых адреÑовÐе получено приемлемого отзыва от Ñервера VOMS: %sÐе задана значение атрибута Ñубъекта %s, пропуÑкаетÑÑÐеоднородный реÑурÑÐе поддерживаетÑÑ Ð½Ð¸ один из запрошенных протоколов транÑпортного уровнÑÐет допуÑкаÐет допуÑка ÑоглаÑно запроÑу: %sarc.pdp запретил доÑтуп - не удалоÑÑŒ получить отклик обработчикаÐет допуÑка от arc.pdp - некоторые Ñлементы RequestItem не удовлетворÑÑŽÑ‚ политикеÐе допущен через simplelist.pdp: %sÐедоÑтаточное количеÑтво параметров в copyurlÐедоÑтаточное количеÑтво параметров в linkurlÐедоÑтаточно Ñвободного меÑта Ð´Ð»Ñ Ð·Ð°Ð¿Ð¸Ñи файлаМодуль %s не найден в кÑшеÐе получена ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма Ð´Ð»Ñ zip-ÑоÑтавлÑющейÐичего не проÑлушиваетÑÑСлужба доÑтавки %s не иÑпользуетÑÑ Ð² ÑвÑзи Ñ Ð¿Ñ€ÐµÐ´Ñ‹Ð´ÑƒÑ‰Ð¸Ð¼ ÑбоемСлужба доÑтавки на %s не иÑпользуетÑÑ Ð² ÑвÑзи Ñ Ð¿ÐµÑ€ÐµÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸ÐµÐ¼Ð¦ÐµÐ»ÑŒ недейÑтвительнаÐедейÑтвительный иÑточникЗадание не указано: Ð’Ñ‹ должны либо указать номер теÑтового заданиÑ, иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ Ð¾Ð¿Ñ†Ð¸ÑŽ -J (--job), либо запроÑить информацию о Ñертификатах, иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ Ð¾Ð¿Ñ†Ð¸ÑŽ -E (--certificate) Сбой уведомлениÑПроизводитÑÑ ÐºÐ¾Ð¿Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ðµ (из -> в)Ðомер %d Ñ ÐºÑ€Ð°Ñ‚ÐºÐ¸Ð¼ именем: %s%sÐомер %d: %sКоличеÑтво Ñлементов ComputingService полученных из полного документа и из запроÑа XPath не Ñовпадают: %d != %dИзменилоÑÑŒ чиÑло блокировок OpenSSL - Ð¿Ð¾Ð²Ñ‚Ð¾Ñ€Ð½Ð°Ñ Ð¸Ð½Ð¸Ñ†Ð¸Ð°Ð»Ð¸Ð·Ð°Ñ†Ð¸ÑЧиÑло иÑточников и чиÑло назначений не ÑоответÑтвуют друг другуПÐРÐМЕТР...СемейÑтво ОС: %sÐазвание ОС: %sВерÑÐ¸Ñ ÐžÐ¡: %sOTokens: Attr: %s = %sOTokens: Attr: ÑообщениеOTokens: Attr: токен: %sOTokens: Attr: токен: ноÑитель: %sOTokens: HandleOTokens: Handle: Ñозданы атрибуты: Ñубъект = %sOTokens: Handle: ÑообщениеОбъект не подходит Ð´Ð»Ñ Ð¿ÐµÑ€ÐµÑ‡Ð¸ÑлениÑОбъект не инициализирован (внутреннÑÑ Ð¾ÑˆÐ¸Ð±ÐºÐ°)Полученный XML: %sПолученные Ð°Ð´Ñ€ÐµÑ Ð¸ номер порта неприемлемыСбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸Ð¡Ð±Ð¾Ð¹ Ð¾Ð¿Ñ€ÐµÐ´ÐµÐ»ÐµÐ½Ð¸Ñ ÑоÑтоÑниÑCandyPond поддерживает только POSTDataDeliveryService поддерживает только POSTÐ”Ð»Ñ Ð²Ñ‹Ð²Ð¾Ð´Ð° поддерживаетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ неформатированный буферМы поддерживаем только права globus в политике подпиÑи Globus - %s не поддерживаетÑÑМы поддерживаем только права подпиÑи в политике подпиÑи Globus - %s не поддерживаетÑÑÐа наÑтоÑщий момент единÑтвенным поддерживаемым иÑточником Ð¿Ð°Ñ€Ð¾Ð»Ñ ÑвлÑетÑÑ Ñтандартный вход.Ð”Ð»Ñ Ð²Ñпомогательной программы поддерживаетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ пользователь '.'Ошибка OpenSSL -- %sОшибка OpenSSL: %sОшибка операционной ÑиÑÑ‚ÐµÐ¼Ñ‹ÐžÐ¿ÐµÑ€Ð°Ñ†Ð¸Ñ ÑƒÑпешно Ð¿Ñ€ÐµÑ€Ð²Ð°Ð½Ð°ÐžÐ¿ÐµÑ€Ð°Ñ†Ð¸Ñ Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð° уÑпешноЭта Ð¾Ð¿ÐµÑ€Ð°Ñ†Ð¸Ñ Ð½Ðµ поддерживаетÑÑ Ð´Ð»Ñ Ð´Ð°Ð½Ð½Ð¾Ð³Ð¾ типа URLДейÑтвие над путём "%s"OptimizedInformationContainer Ñоздал временный файл: %sOptimizedInformationContainer не Ñмог Ñоздать временный файлOptimizedInformationContainer не Ñмог разобрать XMLOptimizedInformationContainer не Ñмог переименовать временный файлOptimizedInformationContainer не Ñмог запиÑать документ XML во временный файлОпциÑ: %sОпции 'p' и 'n' не могут быть иÑпользованы одновременноГруппа опций %s:Этот модуль не имеет наÑтраиваемых параметровИзначальное опиÑание задачи приведено ниже:Обнаружен неиÑпользуемый блок Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ (%s) - очиÑткаДругие дейÑтвиÑÐедоÑтаточно памÑти Ð´Ð»Ñ ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñлучайного Ñерийного номераДоÑтигнут предел количеÑтва попытокЗакончилиÑÑŒ попытки приÑÐ²Ð¾ÐµÐ½Ð¸Ñ Ð½Ð¾Ð²Ð¾Ð³Ð¾ Ñрлыка задачиЗакончилиÑÑŒ попытки приÑÐ²Ð¾ÐµÐ½Ð¸Ñ Ð½Ð¾Ð²Ð¾Ð³Ð¾ Ñрлыка задачи в %sИÑходÑщее Ñообщение не ÑвлÑетÑÑ Ñообщением SOAPВывод Ñертификата EECВарианты Ñ„Ð¾Ñ€Ð¼Ð°Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð²Ñ‹Ð´Ð°Ñ‡Ð¸Ð’Ñ‹Ð²Ð¾Ð´ доверенноÑтиЗапрошена перезапиÑÑŒ - назначение будет предварительно очищеноВладелец: %sИнÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ PASV не прошлаИнÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ PASV не прошла: %sPDP: %s (%s)PDP: %s (%s) не может быть подгруженPDP: %s не может быть подгруженPDP: отÑутÑтвует атрибут имениотÑутÑтвует раÑположение PDPDраÑположение PDPD: %sСбой PEM_read_bio_X509_REQСбой PEM_write_bio_X509_REQотÑутÑтвует раÑположение PEPDраÑположение PEPD: %sÐе удалоÑÑŒ задать ÑпоÑоб проверки целоÑтноÑти PKCS12 и паролÑÐе задан пароль Ð´Ð»Ñ Ð½Ð¾Ð²Ð¾Ð³Ð¾ Ñертификата PKCS12Ð—Ð°Ð¿Ñ€Ð¾Ñ POST на ÑпецифичеÑкий путь не поддерживаетÑÑРазобрано доменов: %uÐе удалоÑÑŒ Ñоздать контекÑÑ‚ анализатора!Сбой разборщика Ñ ÐºÐ¾Ð´Ð¾Ð¼ ошибки %i.РазбираетÑÑ Ñ„Ð°Ð¹Ð» .local Ñ Ñ†ÐµÐ»ÑŒÑŽ Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ ÑпецифичеÑких Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ идентификаторов и информацииРазборка VOMS AC Ñ Ñ†ÐµÐ»ÑŒÑŽ Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ о FQANТип ÑˆÐ¸Ñ„Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¿Ð°Ñ€Ð¾Ð»Ñ Ð½Ðµ поддерживаетÑÑ: %sПуть %s недейÑтвителен, ÑоздаютÑÑ Ð½ÐµÐ´Ð¾Ñтающие директорииТребуетÑÑ Ð¿ÑƒÑ‚ÑŒ к файлу ÑоÑтоÑÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡ .local.Должен быть указан путь к Ñертификату доверенноÑти пользователÑ.Ð˜Ð¼Ñ ÐºÐ¾Ð½Ñ‚Ð°ÐºÑ‚Ð°: %sЗапроÑÑ‹ POST/SOAP предварÑющие задачу не поддерживаютÑÑВыполнÑетÑÑ Ð·Ð°Ð¿Ñ€Ð¾Ñ /*ВыполнÑетÑÑ Ð·Ð°Ð¿Ñ€Ð¾Ñ /ComputingServiceВыполнÑетÑÑ Ð·Ð°Ð¿Ñ€Ð¾Ñ /Services/ComputingServiceПроизводитÑÑ Ñравнение Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸ÐµÐ¼ (%s).Ðе производитÑÑ Ð½Ð¸ Ñортировки, ни поиÑка ÑоответÑтвиÑУÑтойчивый ÑбойХроничеÑÐºÐ°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° ÑлужбыСбой проверки прав доÑтупа, попытка загрузки без иÑÐ¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ð½Ð¸Ñ ÐºÑшаПроверка прав доÑтупа не удалаÑÑŒ: %sСбой проверки прав доÑтупа к иÑходному URL: %sПроверка допуÑка пройденаПроверка прав доÑтупа пройдена Ð´Ð»Ñ URL %sОбработка оÑтавшихÑÑ Ð·Ð°Ð´Ð°Ñ‡ÐœÐµÑто: %sПлатформа: %sПожалуйÑта, выберите базу данных NSS Ð´Ð»Ñ Ð¸ÑÐ¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ð½Ð¸Ñ (1-%d): ПожалуйÑта, выберите то, что будет иÑпользоватьÑÑ (1-%d): Коммутатор (%s) - Ñледующий %s(%s) не Ñодержит назначениÑÐ”Ð»Ñ Ñледующего поÑле %s компонента Plexer не задан атрибут IDОшибка подключаемого Ð¼Ð¾Ð´ÑƒÐ»Ñ %s: %sОшибка подключаемого Ð¼Ð¾Ð´ÑƒÐ»Ñ %s: %uПодключаемый модуль %s не Ñмог запуÑтитьÑÑПодключаемый модуль %s не Ñмог запуÑтитьÑÑРаÑширение %s Ð´Ð»Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ доÑтупа %s недоÑтупно (никогда не должно ÑлучатьÑÑ).РаÑширение %s Ð´Ð»Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ доÑтупа %s неиÑправно.Подключаемый модуль %s вывел на печать: %sПодключаемый модуль %s вывел на печать: %uПодключаемый модуль %s не выдал привÑзкиПодключаемый модуль %s не выдал имени пользователÑПодключаемый модуль %s ответил Ñлишком длинно: %sПодключаемый модуль %s ответил: %uÐ’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ð¾Ð³Ð¾ Ð¼Ð¾Ð´ÑƒÐ»Ñ %s иÑтекло поÑле %u ÑекундПуÑÑ‚Ð°Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ð° в подключаемом модуле (приÑвоение имени пользователÑ)Ðецифровое значение времени Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð² подключаемом модуле (приÑвоение имени пользователÑ): %sÐеприемлемое значение времени Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð² подключаемом модуле (приÑвоение имени пользователÑ): %sОтвет подключаемого модулÑ: %sÐе удалоÑÑŒ запуÑтить Ñлужбу принÑÑ‚Ð¸Ñ Ñ€ÐµÑˆÐµÐ½Ð¸Ð¹ по политикамПуÑтые правилаПолитика не в формате GACLСтрока политики: %sСубъект политики: %sPolicyId: %s Внутренний алгоритм политики:-- %sПочтовый индекÑ: %sЗадачи в очереди до СУПО: %iСбой предварительной очиÑтки, вÑÑ‘ же попытаемÑÑ ÑÐºÐ¾Ð¿Ð¸Ñ€Ð¾Ð²Ð°Ñ‚ÑŒÐŸÑ€ÐµÐ´Ð²Ð°Ñ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ñ€ÐµÐ³Ð¸ÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸ÑÐŸÑ€ÐµÐ´Ð²Ð°Ñ€Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ñ€ÐµÐ³Ð¸ÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð² каталогеПодготовка к размещению назначениÑПодготовка к размещению файла-иÑточникаПроблема при доÑтупе к кÑшированному файлу %s: %sПроблема при Ñоздании DTR (иÑточник %s, назначение %s)Проблемы при подключении Ð¼Ð¾Ð´ÑƒÐ»Ñ %s, модуль пропуÑкаетÑÑ.Проблема Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð¾Ð¼, переходим к завершению Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ…ÐŸÑ€Ð¾Ð±Ð»ÐµÐ¼Ð° Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð¾Ð¼, кÑш будет разблокированОбработка запроÑа %sВышло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¿Ð¾Ñ‚Ð¾ÐºÐ° обработки. DTR перезапуÑкаетÑÑÐеподдерживаемый тип обработки: %sÐ’Ñ€ÐµÐ¼Ñ Ð½Ð°Ñ‡Ð°Ð»Ð° Ñчёта (%s), указанное в опиÑании задачи, приходитÑÑ Ð½Ð° период недоÑтупноÑти цели [ %s - %s ].ДоÑтупны модули Ð´Ð»Ñ Ñледующих протоколов:Протокол не поддерживаетÑÑ - пожалуйÑта, убедитеÑÑŒ что уÑтановлены необходимые подключаемые модули gfal2 (пакеты gfal2-plugin-*)Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ доверенноÑти:Срок дейÑÑ‚Ð²Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти вышелСрок дейÑÑ‚Ð²Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти вышел. ЗаÑылка задачи оборвана. ПожалуйÑта, запуÑтите 'arcproxy'!Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти: Срок дейÑÑ‚Ð²Ð¸Ñ Ñертификата иÑтёк.Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти: Срок дейÑÑ‚Ð²Ð¸Ñ Ñертификата ещё не началÑÑ.Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти: Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð²Ñ€ÐµÐ¼ÐµÐ½Ð½Ð¾Ð³Ð¾ файла.Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти: Сбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ VOMS.Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти: Ðе обнаружено дейÑтвительных Ñертификатов.Сбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти: Ðе обнаружено дейÑтвительных закрытых ключей.ДоверенноÑть уÑпешно ÑозданаСрок дейÑÑ‚Ð²Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти иÑтёкДлина ключа доверенноÑти: %iРаÑположение доверенноÑти: %sПодпиÑÑŒ доверенноÑти: %sДоверенноÑть запиÑана в %sÐ˜Ð¼Ñ Ñубъекта доверенноÑти: %sТип доверенноÑти: %sДоверенноÑть Ñ Ð¿Ð¾Ð»Ð¸Ñ‚Ð¸ÐºÐ¾Ð¹ ARCДоверенноÑть Ñо вÑеми унаÑледованными правамиДоверенноÑть Ñ Ð½ÐµÐ·Ð°Ð¿Ð¾Ð»Ð½ÐµÐ½Ð½Ð¾Ð¹ политикой - отказ по неизвеÑтной политикеДоверенноÑть Ñ Ð¾Ð³Ñ€Ð°Ð½Ð¸Ñ‡ÐµÐ½Ð½Ð¾Ð¹ политикой: %sДоверенноÑть Ñ Ð½ÐµÐ¸Ð·Ð²ÐµÑтной политикой - отказ по неизвеÑтной Ð¿Ð¾Ð»Ð¸Ñ‚Ð¸ÐºÐµÐ˜Ð¼Ñ Ñубъекта доверенноÑти: %sДоверенноÑть/параметры доÑтупа Ñохранены в %sДоверенноÑть: %sÐ—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° размещение %s вÑÑ‘ ещё в очереди, Ñледует подождать %i ÑекундКонÑтруктор надÑтройки Python отработал уÑпешноДеÑтруктор оболочки Python (%d)Вызван Python-конÑтруктор планировщика (%d)Вызван Python-деÑтруктор планировщика (%d)Интерпретатор Python заблокированИнтерпретатор Python разблокированВызван процеÑÑ Python wrapperÐ˜Ð½Ð¸Ñ†Ð¸Ð°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ PythonBrokerУровень качеÑтва: %sРезультат запроÑа не Ñодержит Ñлементов.Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð²Ð¾Ð·Ð²Ñ€Ð°Ñ‚Ð¸Ð» неожиданный Ñлемент: %s:%sОпрашиваетÑÑ Ñервер ACIX на %sОпрашиваетÑÑ Ñ‚Ð¾Ñ‡ÐºÐ° доÑтупа WSRF GLUE2 к информации о вычиÑлительном реÑурÑе REST.ОпрашиваетÑÑ ÑпиÑок из %d задач(и)МаÑÑовый Ð¾Ð¿Ñ€Ð¾Ñ ÐºÐ¾Ð¿Ð¸Ð¹ иÑÑ‚Ð¾Ñ‡Ð½Ð¸ÐºÐ°ÐžÐ¿Ñ€Ð¾Ñ ÑоÑтоÑÐ½Ð¸Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа на Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸ÐµÐ¡Ð²ÐµÐ´ÐµÐ½Ð¸Ñ Ð¾Ð± очереди:REST: обработка %s в %sREST:CLEAN задачи %s - %sREST:GET задачи %s - %sREST:KILL задачи %s - %sREST:PUT задачи %s: файл %s: отÑутÑтвует нагрузкаREST:RESTART задачи %s - %sПротоколы REST и уÑтаревший VOMS не могут быть запрошены одновременно.Сбой метода RSA_generate_key_exЗамена в RSL не ÑвлÑетÑÑ Ð¿Ð¾ÑледовательноÑтьюЗамена в RSL не ÑвлÑетÑÑ Ð¿Ð¾ÑледовательноÑтью из двух ÑÐ»ÐµÐ¼ÐµÐ½Ñ‚Ð¾Ð²Ð˜Ð¼Ñ Ð¿ÐµÑ€ÐµÐ¼ÐµÐ½Ð½Ð¾Ð¹ Ð´Ð»Ñ Ð·Ð°Ð¼ÐµÐ½Ñ‹ RSL не приводитÑÑ Ðº буквенной конÑтантеЗначение переменной Ð´Ð»Ñ Ð·Ð°Ð¼ÐµÐ½Ñ‹ RSL не приводитÑÑ Ðº буквенной конÑÑ‚Ð°Ð½Ñ‚ÐµÐ¡Ð»ÑƒÑ‡Ð°Ð¹Ð½Ð°Ñ ÑортировкаÐÐµÐ¾Ð±Ñ€Ð°Ð±Ð¾Ñ‚Ð°Ð½Ð½Ð°Ñ Ð¸Ð½ÑтрукциÑ: %sВоÑÑоздаётÑÑ ÐºÐ»Ð¸ÐµÐ½Ñ‚ EMI ESПрочитано %i байтÐе удалоÑÑŒ подтвердить наличие доÑтупа на чтениеЗакрыт доÑтуп на чтение Ð´Ð»Ñ %s: %sСбой команд Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð² проверке подлинноÑтиПрочеÑть Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¸Ð· файлаЧтение запроÑа из ÑтрокиЧтение %u байтов из байта %lluФактичеÑÐºÐ°Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð° из %s в %sПричина: %sПланировщик вернул Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR %s в ÑоÑтоÑнии %sÐ—Ð°Ð¿Ñ€Ð¾Ñ DTR %s получен в процеÑÑе Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ð³ÐµÐ½ÐµÑ€Ð°Ñ‚Ð¾Ñ€Ð° - не может быть обработанПринÑÑ‚ неверный Ð·Ð°Ð¿Ñ€Ð¾Ñ DTRПолучено Ñообщение вне полоÑÑ‹ (некритично, уровень ERROR лишь Ð´Ð»Ñ Ð¾Ñ‚Ð»Ð°Ð´ÐºÐ¸)Ðе получено запроÑов DTRПолучена Ð¿Ð¾Ð²Ñ‚Ð¾Ñ€Ð½Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ° запроÑа DTR %s, вÑÑ‘ ещё в ÑоÑтоÑнии передачиПереÑоединениеЗапиÑÑŒ о новой задаче уÑпешно добавлена в базу данных (%s)Перенаправление к %sПеренаправление к новому URL: %sОтказано в Ñоединении: Превышен предел ÑоединенийРегиÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ ÐºÐ¾Ð¿Ð¸Ð¸ назначениÑРегиÑтрируетÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³: %s Ñ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ñ‹Ð¼ модулем: %sРегиÑтрируетÑÑ Ð²Ñпомогательный каталог: %sСбой региÑтрации буфера Globus FTP - проверка прерываетÑÑОжидаетÑÑ Ð¸Ñпользование релÑционного Ð¾Ð¿ÐµÑ€Ð°Ñ‚Ð¾Ñ€Ð°Ð¡Ð±Ñ€Ð¾Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸ÑОтзыв запроÑов, Ñделанных при Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ð¸Ð¡Ð±Ñ€Ð¾Ñ Ð·Ð°Ð¿Ñ€Ð¾ÑÐ¾Ð²Ð¡Ð±Ñ€Ð¾Ñ Ð¸ÑточникаПерепривÑзка к локальной группе Ñ Ð¸Ð´ÐµÐ½Ñ‚Ð¸Ñ„Ð¸ÐºÐ°Ñ‚Ð¾Ñ€Ð¾Ð¼: %iПерепривÑзка к локальной группе Ñ Ð¸Ð¼ÐµÐ½ÐµÐ¼: %sПерепривÑзка к локальному идентификатору: %iПерепривÑзка к локальному пользователю: %sДомашний каталог перепривÑзанного пользователÑ: %sRemove: удалÑетÑÑ: %sУдалÑетÑÑ %sУдалÑетÑÑ Ñ‚Ð¾Ñ‡ÐºÐ° входа %s: она Ñодержит ненужный Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ (%s).УдалÑетÑÑ Ð»Ð¾Ð³Ð¸Ñ‡ÐµÑкий файл из метаданных %sУдалÑÑŽÑ‚ÑÑ Ð¼ÐµÑ‚Ð°Ð´Ð°Ð½Ð½Ñ‹Ðµ в %sОтмена предварительной региÑтрации Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð² каталогеПереименование: Ñбой в globus_ftp_client_moveПереименование: иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ð¾Ð¿ÐµÑ€Ð°Ñ†Ð¸Ð¸%s переименовываетÑÑ Ð² %sПараметры доÑтупа уÑпешно обновленыОбновление параметров доÑтупа Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸: %sОбновлÑетÑÑ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñть Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %sÐ—Ð°Ð¿Ñ€Ð¾Ñ DTR %s в ÑоÑтоÑнии %s заменÑетÑÑ Ð½Ð¾Ð²Ñ‹Ð¼ запроÑомЗаменÑетÑÑ ÑущеÑтвующий маркер Ð´Ð»Ñ %s в кÑше маркеров RucioЗамена Ñтарой информации в SRM на новую Ð´Ð»Ñ URL %sОчередь '%s' заменÑетÑÑ Ð½Ð° '%s'ÐšÐ¾Ð¿Ð¸Ñ %s не ÑоответÑтвует предпочитаемому шаблону или раÑположению URLÐšÐ¾Ð¿Ð¸Ñ %s доÑтупна Ñ Ð±Ð¾Ð»ÑŒÑˆÐ¾Ð¹ задержкой, но вÑÑ‘ равно будет иÑпользоватьÑÑ Ð² ÑвÑзи Ñ Ð¾Ñ‚ÑутÑтвием других иÑÑ‚Ð¾Ñ‡Ð½Ð¸ÐºÐ¾Ð²ÐšÐ¾Ð¿Ð¸Ñ %s доÑтупна Ñ Ð±Ð¾Ð»ÑŒÑˆÐ¾Ð¹ задержкой, пробуетÑÑ Ð´Ñ€ÑƒÐ³Ð¾Ð¹ иÑточникУ копии %s Ð´Ð¾Ð»Ð³Ð°Ñ Ð·Ð°Ð´ÐµÑ€Ð¶ÐºÐ°, пробуем Ñледующую ÐºÐ¾Ð¿Ð¸ÑŽÐšÐ¾Ð¿Ð¸Ñ %s Ð»Ð¾ÐºÐ°Ð»Ð¸Ð·Ð¾Ð²Ð°Ð½Ð°ÐšÐ¾Ð¿Ð¸Ñ %s ÑоответÑтвует шаблону узла %sÐšÐ¾Ð¿Ð¸Ñ %s ÑоответÑтвует шаблону %sОшибка при выполнении запроÑаСбой запроÑа: нет ответа от Ñлужбы IdPСбой запроÑа: нет ответа от Ñлужбы IdP при проверке подлинноÑтиСбой запроÑа: нет ответа Ð¾Ñ Ñлужбы IdP при перенаправленииСбой запроÑа: нет ответа от Ñлужбы SP при отÑылке ÑƒÑ‚Ð²ÐµÑ€Ð¶Ð´ÐµÐ½Ð¸Ñ SAML на SPСбой запроÑа: нет ответа от Ñлужбы SPServiceСбой запроÑа: неверный ответ от Ñлужбы IdP при проверке подлинноÑтиСбой запроÑа: неверный ответ от Ñлужбы IdP при перенаправленииСбой запроÑа: неприемлемый ответ от Ñлужбы SP при отÑылке ÑƒÑ‚Ð²ÐµÑ€Ð¶Ð´ÐµÐ½Ð¸Ñ SAML на SPСбой запроÑа: неверный ответ от Ñлужбы SPServiceПуÑтой запроÑÐ—Ð°Ð¿Ñ€Ð¾Ñ Ð½Ðµ поддерживаетÑÑ - %sÐ—Ð°Ð¿Ñ€Ð¾Ñ Ð¿Ñ€ÐµÑ€Ð²Ð°Ð½ (ABORTED), но вÑе файлы Ð³Ð¾Ñ‚Ð¾Ð²Ñ‹Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð¿Ñ€ÐµÑ€Ð²Ð°Ð½ (ABORTED), так как он был Ð¾Ñ‚Ð¼ÐµÐ½Ñ‘Ð½Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð¿Ñ€ÐµÑ€Ð²Ð°Ð½ (ABORTED). Причина: %sÐ—Ð°Ð¿Ñ€Ð¾Ñ ÑƒÐ´Ð°Ð»ÑÑ!!!ИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð·Ð°Ð¿Ñ€Ð¾ÑÐ°Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° в процеÑÑе запиÑиПопытка передачи неизвеÑтному владельцу - %uЗапроÑ: %sЗапрошено Ñегментов Ñдер: %iПоÑтупил Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð¿Ñ€Ð¾Ð¿ÑƒÑтить поиÑк реÑурÑов. Будет произведена попытка непоÑредÑтвенной заÑылки на точки входа заÑылки типа %s и %sЗапрашиваютÑÑ Ñлементы опиÑÐ°Ð½Ð¸Ñ Ñ€ÐµÑурÑа ComputingService Ñ %sÐ—Ð°Ð¿Ñ€Ð¾Ñ Ñ€ÐµÐºÑƒÑ€Ñивного проÑмотра и --nolist не имеет ÑмыÑлаЗапрашиваетÑÑ Ð¿Ñ€ÐµÐºÑ€Ð°Ñ‰ÐµÐ½Ð¸Ðµ обработки задачиТребование "%s %s" ÐЕ удовлетворено.Требование "%s %s" удовлетворено "%s".Требование "%s %s" удовлетворено.Политика бронированиÑ: %sОбнаружение копий назначениÑСбой Ð¾Ð±Ð½Ð°Ñ€ÑƒÐ¶ÐµÐ½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð° Ð´Ð»Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸ÑСбой Ð¾Ð±Ð½Ð°Ñ€ÑƒÐ¶ÐµÐ½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð° Ð´Ð»Ñ Ð¸ÑточникаМаÑÑовое обнаружение копий иÑточникаСхема опиÑÐ°Ð½Ð¸Ñ Ñ€ÐµÑурÑа Ñодержит недейÑтвительный Ñлемент: %s:%sОпиÑание реÑурÑа пуÑтоОпиÑание реÑурÑа Ñодержит URL интерфейÑа %s: %sОпиÑание реÑурÑа не Ñодержит URL интерфейÑовПроверка ÑоответÑÑ‚Ð²Ð¸Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа опиÑÐ°Ð½Ð¸Ñ Ñ€ÐµÑурÑа прошлаПроверка ÑоответÑÑ‚Ð²Ð¸Ñ Ð¾Ð¿Ð¸ÑÐ°Ð½Ð¸Ñ Ñ€ÐµÑурÑа Ñхеме GLUE2 не прошла: Прошла проверка опиÑÐ°Ð½Ð¸Ñ Ñ€ÐµÑурÑаСбой работы Ñборщика информации о реÑурÑеСбой запуÑка Ñборщика информации о реÑурÑеСбой Ñборщика информации о реÑурÑе Ñ Ð²Ñ‹Ñ…Ð¾Ð´Ð½Ñ‹Ð¼ ÑтатуÑом: %i %sЖурнал Ñборщика информации о реÑурÑе: %sСборщик информации о реÑурÑе: %sСиÑтема управлениÑ: %sСбой опроÑа реÑурÑаОтвет не в формате SOAPОтклик не в формате XMLОшибка отÑылки откликаОтзыв: %sОтвет: %sПерезапуÑк поÑле Ð½Ð°Ñ€ÑƒÑˆÐµÐ½Ð¸Ñ Ñегментации.УÑпешно завершена перезаÑылка задачи (%s), но очиÑтить задачу не удалоÑÑŒ - она будет приÑутÑтвовать в ÑпиÑке задачУÑпешно завершена перезаÑылка задачи (%s), но прервать задачу не удалоÑÑŒ - она будет приÑутÑтвовать в ÑпиÑке задачРезультат (0=ДопуÑк, 1=Отказ, 2=Ðеопределённый, 3=Ðеприменим): %dРезультаты Ñохранены в: %sВозобновление задачи %s в ÑоÑтоÑнии %s (%s)Получение файла %sПолучение опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡ EMI ES не поддерживаетÑÑПолучение опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡ INTERNAL не поддерживаетÑÑСообщение, полученное Ñ Ñервера VOMS %s: %s Сообщение Ñ Ñервера VOMS: %sВозврат в генераторПовторное иÑпользование ÑоединениÑПравый операнд Ð´Ð»Ñ ÑÑ†ÐµÐ¿Ð»ÐµÐ½Ð¸Ñ RSL не приводитÑÑ Ðº буквенной конÑтантеRucio возвратил %sСрок дейÑÑ‚Ð²Ð¸Ñ Ð¼Ð°Ñ€ÐºÐµÑ€Ð° Rucio Ð´Ð»Ñ %s иÑтёк, или вÑкоре иÑтечётПравило: %sПравило: получатели: %sПравило: возможноÑти: %sПравило: группа: %sПравило: издатель: %sПравило: роль: %sПравило: контекÑÑ‚: %sПравило: Ñубъект: %sПравило: ВО: %sВыполнÑетÑÑ ÐºÐ¾Ð¼Ð°Ð½Ð´Ð° %sЗадачи в Ñчёте: %iВыполнение команды раÑÑылки (%s)Текущий пользователь не имеет имениОбработчик токена SAML не наÑтроенСбой процеÑÑа SAML2SSOОбмен данными SASLОшибка запроÑа SOAP к Ñерверу AA %sОшибка SOAP Ñлужбы доÑтавки на %s: %sОшибка SOAP: %sÐе удалаÑÑŒ Ð°ÐºÑ‚Ð¸Ð²Ð¸Ð·Ð°Ñ†Ð¸Ñ SOAPÐžÐ¿ÐµÑ€Ð°Ñ†Ð¸Ñ SOAP не поддерживаетÑÑ: %sЗапроÑа SOAP: %sОтвет SOAP: %sÐÐºÑ‚Ð¸Ð²Ð¸Ð·Ð°Ñ†Ð¸Ñ SOAP Ñ SAML2SSO не выполненаИÑÐ¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ð½Ð½Ð°Ñ Ð¸Ð½ÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ SQL: %sОшибка базы даных SQLite: %sСоÑтоÑние клиента SRM: %sSRM не возвратил никакой информацииSRM не возвратил никакой полезной информацииSRM не выдал пригодных Ð´Ð»Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð¸ URL: %sТочка Ð¼Ð¾Ð½Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ SSHFS каталога кÑша (%s) недоÑтупна - ожидаетÑÑ Ð¿Ð¾Ð²Ñ‚Ð¾Ñ€Ð½Ð¾Ðµ Ñоединение ...Точка Ð¼Ð¾Ð½Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ SSHFS каталога иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ (%s) недоÑтупна - ожидаетÑÑ Ð¿Ð¾Ð²Ñ‚Ð¾Ñ€Ð½Ð¾Ðµ Ñоединение ...Точка Ð¼Ð¾Ð½Ñ‚Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ SSHFS каталога ÑеÑÑии (%s) недоÑтупна - ожидаетÑÑ Ð¿Ð¾Ð²Ñ‚Ð¾Ñ€Ð½Ð¾Ðµ Ñоединение ...Ошибка SSL: %d - %s:%s:%sОшибка SSL: %s, libs: %s, func: %s, причина: %sБлокировка SSL не Ð¸Ð½Ð¸Ñ†Ð¸Ð°Ð»Ð¸Ð·Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð°ÐšÐ¾Ð½Ñ„Ð¸Ð³ÑƒÑ€Ð°Ñ†Ð¸Ñ Ð¿Ð»Ð°Ð½Ð¸Ñ€Ð¾Ð²Ñ‰Ð¸ÐºÐ°:Планировщик получил пуÑтой Ð·Ð°Ð¿Ñ€Ð¾Ñ DTRПланировщик получил недопуÑтимый Ð·Ð°Ð¿Ñ€Ð¾Ñ DTRПланировщик получил новый Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR %s Ñ Ð¸Ñточником: %s, назначением: %s, припиÑан к доле %s Ñ Ð¿Ñ€Ð¸Ð¾Ñ€Ð¸Ñ‚ÐµÑ‚Ð¾Ð¼ %dЗапуÑк планировщикаПланировщик оÑтановлен, выходПравила планировки: %sОшибка проверки ÑхемыСхема: %sÐаÑтройки SecHandler не заданыÐаÑтройки SecHandler отÑутÑтвуютÐе задан атрибут name Ð´Ð»Ñ SecHandlerSecHandler: %s(%s)Обработчик безопаÑноÑти %s(%s) не может быть ÑозданСбой в процеÑÑе обработки прав доÑтупаСбой в процеÑÑе обработки прав доÑтупа: %sÐе прошла проверка безопаÑноÑти Ð´Ð»Ñ Ð²Ñ…Ð¾Ð´Ñщего ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ TLSÐе прошла проверка безопаÑноÑти Ð´Ð»Ñ Ð¸ÑходÑщего ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ TLSÐе прошла проверка безопаÑноÑти в SOAP MCC Ð´Ð»Ñ Ð²Ñ…Ð¾Ð´Ñщего ÑообщениÑСбой проверки безопаÑноÑти в SOAP MCC Ð´Ð»Ñ Ð²Ñ…Ð¾Ð´Ñщего ÑообщениÑ: %sÐе прошла проверка безопаÑноÑти в SOAP MCC Ð´Ð»Ñ Ð¸ÑходÑщего ÑообщениÑСбой проверки безопаÑноÑти в SOAP MCC Ð´Ð»Ñ Ð¸ÑходÑщего ÑообщениÑ: %sÐе прошла проверка безопаÑноÑти в TLS MCC Ð´Ð»Ñ Ð²Ñ…Ð¾Ð´Ñщего ÑообщениÑСбой обработки/проверки безопаÑноÑти: %sСбой обработки/проверки безопаÑноÑти Ð´Ð»Ñ '%s': %sОбработка/проверка параметров доÑтупа '%s' завершилаÑÑŒ уÑпехомОбработка/проверка параметров доÑтупа завершилаÑÑŒ уÑпехомВыбор не удалÑÑ: %sСамоÑтоÑтельно подпиÑанный ÑертификатСбой отÑылки отклика: %sSendCommand: Команда: %sОтправка команды: Сбой: %sSendCommand: Отзыв: %sОтправка команды: Ð’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¸Ñтекло поÑле %d мÑВерÑÐ¸Ñ Ñервера SRM: %sÐ ÐµÐ°Ð»Ð¸Ð·Ð°Ñ†Ð¸Ñ Ñервера: %sСервер оÑтановленСлужба %s(%s) не может быть ÑозданаЦикл по ÑервиÑам: точка входа %sТочка входа ÑервиÑа %s (тип %s) добавлена в ÑпиÑок Ð´Ð»Ñ Ð½ÐµÐ¿Ð¾ÑредÑтвенной заÑылкиТочка входа ÑервиÑа %s (тип %s) добавлена в ÑпиÑок Ð´Ð»Ñ Ð¿Ð¾Ð¸Ñка реÑурÑÐ¾Ð²Ð”Ð»Ñ Ñлужбы не задан атрибут IDÐ”Ð»Ñ Ñлужбы не задан атрибут NameÐ¡Ð²ÐµÐ´ÐµÐ½Ð¸Ñ Ð¾ Ñлужбе:Ð¡ÐµÑ€Ð²Ð¸Ñ Ð² ожидании запроÑовПодгружены ÑервиÑные компоненты цепи ÑообщенийСоÑтоÑние обÑлуживаниÑ: %sКаталог ÑеÑÑии %s принадлежит %i, но текущий пользователь - %iКаталог ÑеÑÑии '%s' Ñодержит пользовательÑкие замены - пропуÑкаетÑÑИÑпользуемый каталог ÑеÑÑииОтÑутÑтвует ÐºÐ¾Ñ€Ð½ÐµÐ²Ð°Ñ Ð´Ð¸Ñ€ÐµÐºÑ‚Ð¾Ñ€Ð¸Ñ ÑеÑÑииРабочий каталог %s: Свободное проÑтранÑтво %f ГБПредельное количеÑтво Ñоединений выÑтавлÑетÑÑ Ð½Ð° %i, ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñверх предела будут переведены в ÑоÑтоÑние %sПоÑылаетÑÑ pbsz на %luПриÑваиваетÑÑ ÑоÑтоÑние (%s) точки входа: %sЗадаётÑÑ ÑоÑтоÑние (STARTED) Ð´Ð»Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ входа: %sЗадаётÑÑ Ð¸Ð¼Ñ Ñубъекта!УÑтановка userRequestDescription в %sÐ˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ ÑовмеÑтном реÑурÑе:Следует подождать, когда назначение будет готовоСледует подождать, когда иÑточник будет готовПоказать %s параметров ÑправкиПоказать параметры ÑправкиОÑтанов демонаЗакрываетÑÑ Ñлужба Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ…Ð—Ð°ÐºÑ€Ñ‹Ð²Ð°ÑŽÑ‚ÑÑ Ð¿Ð¾Ñ‚Ð¾ÐºÐ¸ Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ…ÐŸÐ»Ð°Ð½Ð¸Ñ€Ð¾Ð²Ñ‰Ð¸Ðº оÑтанавливаетÑÑSimpleMap: %sSimpleMap: получено новое Ð²Ñ€ÐµÐ¼Ñ Ñ€Ð°ÑÑоглаÑÐ¾Ð²Ð°Ð½Ð¸Ñ Ð½Ð° %u ÑекундSimpleMap: недопуÑтимое значение в команде unmaptimeПропуÑкаетÑÑ %s ÐºÐ¾Ð¿Ð¸Ñ %sПропуÑкаетÑÑ ComputingEndpoint '%s', потому что объÑвлен Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ '%s' вмеÑто запрошенного '%s'.ПропуÑкаетÑÑ Ð½ÐµÐ´Ð¾Ð¿ÑƒÑÑ‚Ð¸Ð¼Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ URL %sПропуÑкаетÑÑ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚ VOMS AC policyAuthorityПропуÑкаетÑÑ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð°Ñ ÐºÐ¾Ð¿Ð¸Ñ %sПропуÑкаетÑÑ ÑÐºÐ°Ñ‡Ð°Ð½Ð½Ð°Ñ Ð·Ð°Ð´Ð°Ñ‡Ð° (%s), так как она была запущена через другой Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ (%s).Ð¡ÐµÑ€Ð²Ð¸Ñ Ð¿Ñ€Ð¾Ð¿ÑƒÑкаетÑÑ: отÑутÑтвует SchemaPath!Ð¡ÐµÑ€Ð²Ð¸Ñ Ð¿Ñ€Ð¾Ð¿ÑƒÑкаетÑÑ: отÑутÑтвует ServicePath!Ошибка Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ Ñокета: %sÐеÑовпадение Ñокетов при завершении %i != %iÐекоторые адреÑа недоÑтупны. ПроÑлушиваетÑÑ %u из %u.Ðекоторые загрузки не удалиÑьСортировка в ÑоответÑтвии Ñ Ð½Ð°Ð»Ð¸Ñ‡Ð¸ÐµÐ¼ Ñвободных меÑÑ‚ в очередиСортировка в ÑоответÑтвии Ñ Ð´Ð¾ÑтупноÑтью входных данных в пункте назначениÑСортировка в ÑоответÑтвии Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¼ Ñталонным теÑтом (по умолчанию - "specint2000")Копии ÑортируютÑÑ Ð² ÑоответÑтвии Ñ Ñ€Ð°Ñположением URLКопии ÑортируютÑÑ Ð² ÑоответÑтвии Ñ Ð¿Ñ€ÐµÐ´Ð¿Ð¾Ñ‡Ð¸Ñ‚Ð°ÐµÐ¼Ñ‹Ð¼ шаблоном %sОтÑутÑтвует URL иÑточникаÐеподдерживаемый URL иÑточника: %sÐедейÑтвительный URL иÑточника: %sИÑточник и/или назначение ÑвлÑетÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð¾Ð¼, будет произведён поиÑк копийПроверка иÑточника запрошена, но не прошла: %sÐедопуÑтимый URL иÑточникаИÑточник поÑтавлен в ÑоответÑтвие %sИÑточник неготов, ÑÐ»ÐµÐ´ÑƒÑŽÑ‰Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ° через %u ÑекИÑточник идентичен назначениюДата Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ Ð¸Ñточника: %sИÑточник или назначение требуют Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ñ Ð»ÐµÐ½Ñ‚Ð¾Ñ‡Ð½Ð¾Ð³Ð¾ накопителÑИÑточник: %sУказанные модули не найдены в кÑшеУказанный файл Ñ Ñ‚Ñ€Ð°Ñ„Ð°Ñ€ÐµÑ‚Ð¾Ð¼ (%s) не ÑущеÑтвует.Задачи, выполнÑющие размещение данных: %iИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа на размещение, Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð±ÑƒÐ´ÐµÑ‚ отозванРазмещаетÑÑ: %sЗапуÑк Ñ Ð²Ñ‹Ñоким приоритетомÐачать теÑтЖдём 10 Ñекунд...Ðачало чтениÑStartReading: Файл не был подготовлен должным образомÐачало запиÑиStartWriting: Файл не был подготовлен должным образомЗапущена ÑƒÐ´Ð°Ð»Ñ‘Ð½Ð½Ð°Ñ Ñлужба доÑтавки на %sЗапуÑкаютÑÑ Ð¿Ð¾Ñ‚Ð¾ÐºÐ¸ DTRЗапуÑкаетÑÑ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»Ð¸Ñ€ÑƒÐµÐ¼Ñ‹Ð¹ процеÑÑЗапуÑкаютÑÑ Ð¿Ð¾Ñ‚Ð¾ÐºÐ¸ Ñ€Ð°Ð·Ð¼ÐµÑ‰ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ…Ð—Ð°Ð¿ÑƒÑкаетÑÑ Ð²Ñпомогательный процеÑÑ: %sЗапуÑкаетÑÑ Ð¿Ð¾Ñ‚Ð¾Ðº обработки задачиЗапуÑк мониторинга задачЗапуÑкаетÑÑ Ð½Ð¾Ð²Ñ‹Ð¹ Ð·Ð°Ð¿Ñ€Ð¾Ñ DTR Ð´Ð»Ñ %sÐачинаетÑÑ Ð¾Ð¿Ñ€Ð¾Ñ Ð¾Ñ‚Ð»Ð¾Ð¶ÐµÐ½Ð½Ð¾Ð¹ точки входа (%s) - другие точки входа Ñтого ÑервиÑа не опрашиваютÑÑ, либо были уже уÑпешно опрошены.ЗапуÑкаетÑÑ Ð¿Ð¾Ð´Ð¿Ð¾Ñ‚Ð¾Ðº Ð´Ð»Ñ Ð¾Ð¿Ñ€Ð¾Ñа точки доÑтупа по %sЗапуÑкаетÑÑ Ð¿Ð¾Ñ‚Ð¾Ðº Ð´Ð»Ñ Ð¾Ð¿Ñ€Ð¾Ñа точки доÑтупа %sStat: получено Ð²Ñ€ÐµÐ¼Ñ Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ %sПроверка: получен размер %lluОтÑутÑтвует наименование ÑоÑтоÑÐ½Ð¸Ñ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ð¾Ð³Ð¾ модулÑСоÑтоÑние точки доÑтупа Ñлужбы "%s" задано как неактивное в ARCHERY. ПропуÑкаетÑÑ.Опрошено ÑоÑтоÑние %d задач, %d задач отозвалиÑÑŒStopReading закончил ожидание transfer_condition.StopReading начинает ожидание transfer_condition.StopReading: прерывание ÑвÑзиStopWriting закончил ожидание transfer_condition.StopWriting начинает ожидание transfer_condition.StopWriting: ВычиÑлена ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма %sStopWriting: прерывание ÑвÑзиStopWriting: поиÑк контрольной Ñуммы %sОбработка задачи завершенаОÑтанавливаетÑÑ Ð²Ñпомогательный процеÑÑ %sОÑтанавливаетÑÑ Ð¿Ð¾Ñ‚Ð¾Ðº обработки задачиОÑтанавливаетÑÑ ÑерверЗапиÑываетÑÑ Ñ„Ð°Ð¹Ð» %sСохранÑетÑÑ Ð¿Ð¾Ñ€Ñ‚ %i Ð´Ð»Ñ %sСохранение временной доверенноÑти в %sСтрока уÑпешно разобрана как %s.Ðтрибут Ñубъекта %s не Ñодержит извеÑтного NID, пропуÑкаетÑÑСубъект не начинаетÑÑ Ñ '/'Ð˜Ð¼Ñ Ñубъекта: %sОтÑутÑтвует Ñубъект запроÑаСубъект Ð´Ð»Ñ Ñверки: %sСубъект: %sТочка входа Ð´Ð»Ñ Ð·Ð°Ñылки задачСбой заÑылки задачиЗаÑылка: Сбой отправки команды CWDЗаÑылка: Сбой отправки команды CWD newЗаÑылка: Сбой отправки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸Ð—Ð°Ñылка: Сбой ÑвÑзиЗаÑылка: Сбой выгрузки локальных входных файловЗаÑылка: ÑÐµÑ€Ð²Ð¸Ñ Ð½Ðµ предоÑтавлÑет подходÑщего информационного интерфейÑа - нужен org.nordugrid.ldapngПодключаемый модуль SubmitterPlugin %s не может быть ÑозданПодключаемый модуль SubmitterPlugin "%s" не обнаружен.ЗапуÑк задачи ЗапуÑкаетÑÑ Ñ‚ÐµÑÑ‚Ð¾Ð²Ð°Ñ Ð·Ð°Ð´Ð°Ñ‡Ð° %d:УÑпешно добавлен Independent OID, возвращена метка %dУÑпешно добавлен OID доверенноÑти RFC, возвращена метка %dУÑпешно добавлен OID поÑледовательноÑти VOMS AC, возвращена метка %dУÑпешно добавлен OID anyLanguage, возвращена метка %dУÑпешно добавлен OID inheritAll, возвращена метка %dУÑÐ¿ÐµÑˆÐ½Ð°Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ° подлинноÑти токена SAMLУÑÐ¿ÐµÑˆÐ½Ð°Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ° подлинноÑти UsernameTokenУÑпешное подтверждение подлинноÑти токена X509УдалоÑÑŒ поменÑть пароль на Ñервере MyProxyУÑÐ¿ÐµÑˆÐ½Ð°Ñ Ñмена доверительных отношений на: %sУÑпешное преобразование PrivateKeyInfo в EVP_PKEYУдалоÑÑŒ уничтожить доверенноÑть на Ñервере MyProxyУдалоÑÑŒ извлечь Ñертификат в формате PKCS12УÑпешное Ñоздание пары открытого/закрытого ключейУдалоÑÑŒ получить доверенноÑть в %s Ñ Ñервера MyProxy %sПараметры доÑтупа полученыУдалоÑÑŒ получить информацию Ñ Ñервера MyProxyУÑпешное импортирование ÑертификатаЗакрытый ключ уÑпешно полученNSS уÑпешно инициализированУÑпешно подгружен PrivateKeyInfoУÑпешный вывод Ñертификата в %sУÑпешный вывод запроÑа Ñертификата в %sУдалоÑÑŒ делегировать доверенноÑть Ñерверу MyProxyÐ˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ DelegationService: %s и DelegationID: %s уÑпешно отправлена партнёрÑкому ÑервиÑуДоверенноÑть уÑпешно подпиÑанаПодпиÑÑŒ уÑпешно подтвержденаПодпиÑÑŒ уÑпешно подтвержденаПодпиÑанный Ñертификат уÑпешно проверенУказанное Ð¸Ð¼Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ %s не Ñовпадает Ñ ÑопоÑтавленным именем Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ %sПоддерживаемые профили:Поддерживаемые ограничениÑ: validityStart=Ð²Ñ€ÐµÐ¼Ñ (например, 2008-05-29T10:20:30Z; еÑли не указано, то начинаетÑÑ Ð½ÐµÐ¼ÐµÐ´Ð»ÐµÐ½Ð½Ð¾) validityEnd=Ð²Ñ€ÐµÐ¼Ñ validityPeriod=Ð²Ñ€ÐµÐ¼Ñ (например, 43200, или 12h, или 12H; еÑли не указаны ни validityPeriod, ни validityEnd, то Ñрок дейÑÑ‚Ð²Ð¸Ñ Ð¿Ð¾ умолчанию ÑоÑтавлÑет 12 чаÑов Ð´Ð»Ñ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð¾Ð¹ доверенноÑти, и 168 чаÑов Ð´Ð»Ñ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð½Ð¾Ð¹ доверенноÑти на Ñервере MyProxy) vomsACvalidityPeriod=Ð²Ñ€ÐµÐ¼Ñ (например, 43200, или 12h, или 12H; еÑли не указано, то иÑпользуетÑÑ Ð½Ð°Ð¸Ð¼ÐµÐ½ÑŒÑˆÐµÐµ между 12 чаÑами и значением validityPeriod) myproxyvalidityPeriod=Ð²Ñ€ÐµÐ¼Ñ (Ñрок годноÑти доверенноÑти, делегированной через Ñервер MyProxy например, 43200, или 12h, или 12H; еÑли не указано, то иÑпользуетÑÑ Ð½Ð°Ð¸Ð¼ÐµÐ½ÑŒÑˆÐµÐµ между 12 чаÑами и значением validityPeriod - Ñроком годноÑти доверенноÑти, делегированной через Ñервер MyProxy) proxyPolicy=Ñодержимое политики proxyPolicyFile=файл политики keybits=чиÑло - длина генерируемого ключа. По умолчанию - 2048 бит. Специальное значение 'inherit' означает иÑпользование длины ключа подпиÑывающего Ñертификата. signingAlgorithm=название - алгоритм, иÑпользуемый Ð´Ð»Ñ Ð¿Ð¾Ð´Ð¿Ð¸ÑÐ°Ð½Ð¸Ñ Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¾Ð³Ð¾ ключа или доверенноÑти. По умолчанию - sha1. Возможные значениÑ: sha1, sha2 (Ñокращение от sha256), sha224, sha256, sha384, sha512 и inherit (иÑпользовать алгоритм подпиÑывающего Ñертификата). По умолчанию иÑпользуетÑÑ inherit. Старые ÑиÑтемы поддерживают лишь sha1. Поддерживаемые Ð¿Ð¾Ð»Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸: subject - Ð¸Ð¼Ñ Ñубъекта доверенноÑти. identity - идентифицируемое Ð¸Ð¼Ñ Ñубъекта доверенноÑти. issuer - Ð¸Ð¼Ñ Ñубъекта, выдавшего доверенноÑть. ca - Ð¸Ð¼Ñ Ñубъекта агентÑтва, выдавшего иÑходный Ñертификат path - локальный путь к файлу, Ñодержащему доверенноÑть. type - тип доверенноÑти. validityStart - Ð²Ñ€ÐµÐ¼Ñ Ð½Ð°Ñ‡Ð°Ð»Ð° дейÑÑ‚Ð²Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти. validityEnd - Ð²Ñ€ÐµÐ¼Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ Ð´ÐµÐ¹ÑÑ‚Ð²Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти. validityPeriod - продолжительноÑть годноÑти доверенноÑти в Ñекундах. validityLeft - оÑтавшаÑÑÑ Ð¿Ñ€Ð¾Ð´Ð¾Ð»Ð¶Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð¾Ñть годноÑти доверенноÑти в Ñекундах. vomsVO - Ð¸Ð¼Ñ Ð²Ð¸Ñ€Ñ‚ÑƒÐ°Ð»ÑŒÐ½Ð¾Ð¹ организации, указанное в атрибуте VOMS. vomsSubject - Ñубъект Ñертификата, которому был приÑвоен атрибут VOMS. vomsIssuer - Ñубъект Ñлужбы, выдавшей Ñертификат VOMS. vomsACvalidityStart - Ð²Ñ€ÐµÐ¼Ñ Ð½Ð°Ñ‡Ð°Ð»Ð° дейÑÑ‚Ð²Ð¸Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð° VOMS. vomsACvalidityEnd - Ð²Ñ€ÐµÐ¼Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ Ð´ÐµÐ¹ÑÑ‚Ð²Ð¸Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð° VOMS. vomsACvalidityPeriod - продолжительноÑть годноÑти атрибута VOMS в Ñекундах. vomsACvalidityLeft - оÑтавшаÑÑÑ Ð¿Ñ€Ð¾Ð´Ð¾Ð»Ð¶Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð¾Ñть годноÑти атрибута VOMS в Ñекундах. proxyPolicy - Ñодержимое политики keybits - длина ключа доверенноÑти в битах. signingAlgorithm - алгоритм, иÑпользуемый при подпиÑи Ñертификата. Ð—Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð²Ñ‹Ð²Ð¾Ð´ÑÑ‚ÑÑ Ð² порÑдке запроÑа, каждое Ñ Ð½Ð¾Ð²Ð¾Ð¹ Ñтроки. ЕÑли полю ÑоответÑтвуют неÑколько значений, они выводÑÑ‚ÑÑ Ð² Ñтроку и разделÑÑŽÑ‚ÑÑ |. Поддерживаемые Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð¿Ð°Ñ€Ð¾Ð»ÐµÐ¹: key - Ð´Ð»Ñ Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ñ‹Ñ… ключей myproxy - Ð´Ð»Ñ Ð´Ð¾Ñтупа к Ñертификатам на Ñервере MyProxy myproxynew - Ð´Ð»Ñ ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñертификатов на Ñервере MyProxy all - Ñ Ð»ÑŽÐ±Ð¾Ð¹ целью. Поддерживаемые иÑточники паролей: quoted string ("password") - Ñвно указанный пароль int - интерактивный Ð·Ð°Ð¿Ñ€Ð¾Ñ Ð½Ð° ввод Ð¿Ð°Ñ€Ð¾Ð»Ñ Ñ Ñ‚ÐµÑ€Ð¼Ð¸Ð½Ð°Ð»Ð° stdin - чтение Ð¿Ð°Ñ€Ð¾Ð»Ñ Ñо Ñтандартного ввода по переводу Ñтроки file:filename - чтение Ð¿Ð°Ñ€Ð¾Ð»Ñ Ð¸Ð· файла filename stream:# - чтение Ð¿Ð°Ñ€Ð¾Ð»Ñ Ð¸Ð· входного потока номер #. Ðа текущий момент поддерживаетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ 0 (Ñтандартный ввод). Поддержка предварительного бронированиÑПоддерживает групповую заÑылкуПоддержка упреждениÑПриоÑтановленные задачи: %iПриоÑтанавливаетÑÑ Ð¾Ð¿Ñ€Ð¾Ñ Ñ‚Ð¾Ñ‡ÐºÐ¸ входа (%s), Ñ‚.к. ÑÐµÑ€Ð²Ð¸Ñ Ð¿Ð¾ Ñтому адреÑу уже опрашиваетÑÑ Ð¸Ð»Ð¸ опрошен.Ð¡Ð¸Ð½Ñ…Ñ€Ð¾Ð½Ð¸Ð·Ð°Ñ†Ð¸Ñ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð¾Ð³Ð¾ ÑпиÑка активных задач Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸ÐµÐ¹ в ÑиÑтеме Грид может привеÑти к некоторым неÑоответÑтвиÑм: только что запущенные задачи могут быть ещё не зарегиÑтрированы в ÑиÑтеме, тогда как только что удалённые задачи могут вÑÑ‘ ещё приÑутÑтвовать.СинтакÑичеÑÐºÐ°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° в значении атрибута 'notify' ('%s'), он Ñодержит неизвеÑтные метки ÑтатуÑаСинтакÑичеÑÐºÐ°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° в значении атрибута 'notify' ('%s'), он должен Ñодержать Ð°Ð´Ñ€ÐµÑ Ñлектронной почтыСинтакÑичеÑÐºÐ°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° в значении атрибута 'notify' ('%s'), он должен Ñодержать лишь адреÑа Ñлектронной почты поÑле меток ÑтатуÑаФайл ÑиÑтемных наÑтроек (%s or %s) не ÑущеÑтвует.Файл ÑиÑтемных наÑтроек (%s) Ñодержит ошибки.Файл ÑиÑтемных наÑтроек (%s) не ÑущеÑтвует.Вызван процеÑÑ TCP клиентаИÑполнитель TCP удалёнTLS не передал идентификацию, переход к OTokensTURL %s не может быть обработанÐазначение %s не ÑоответÑтвует запрошенному интерфейÑу.Ðазначение %s отброшено алгоритмом FastestQueueBroker, Ñ‚.к. не Ñообщает чиÑло Ñвободных ÑчеекÐазначение %s отброшено алгоритмом FastestQueueBroker, Ñ‚.к. не Ñообщает общее чиÑло ÑчеекÐазначение %s отброшено алгоритмом FastestQueueBroker, Ñ‚.к. не Ñообщает чиÑло ожидающих задачТехнологиÑ: %sПреходÑÑ‰Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° ÑлужбыОбрыв заÑылки теÑта, Ñ‚.к. ни один из реÑурÑов не предоÑтавил информациюÐе удалоÑÑŒ заÑлать теÑÑ‚, возможные Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð¾Ñ‚ÑутÑтвуютТеÑÑ‚ запущен Ñ Ñрлыком: %sТеÑÑ‚ был Ñоздан Ñ Ð¸Ð´ÐµÐ½Ñ‚Ð¸Ñ„Ð¸ÐºÐ°Ñ‚Ð¾Ñ€Ð¾Ð¼ %d, но при обработке возникла ошибка.Ðтрибут "FreeSlotsWithDuration" неверно Ñформатирован; игнорируетÑÑ.Ðеверно отформатирован атрибут "FreeSlotsWithDuration", публикуемый "%s", - игнорируетÑÑ.Опции 'sort' и 'rsort' не могут быть указаны одновременно.BIO Ð´Ð»Ñ Ð²Ñ‹Ñ…Ð¾Ð´Ð°: NULLКаталог Ñертификатов агентÑтв CA необходим Ð´Ð»Ñ ÑвÑзи Ñ Ñерверами VOMS и MyProxy.ÐгентÑтво (%s), выдавшее Ñертификат (%s), не отноÑитÑÑ Ðº доверÑемым целью (%s).Служба ComputingEndpoint не Ñообщает о Ñвоём уровне качеÑтва.Служба ComputingEndpoint не Ñообщает о Ñвоём ÑоÑтоÑнии обÑлуживаниÑ.У ComputingEndpoint отÑутÑтвует URL.Служба ComputingService не Ñообщает о Ñвоём интерфейÑе.Служба ComputingService не Ñообщает о Ñвоём уровне качеÑтва.Ðевозможно раÑпознать заданный Вами период MyProxy: %s.База данных NSS в профиле Firefox не обнаруженаОтклик доÑюда не дошёлСлужба не предоÑтавлÑет информации о ÑоÑтоÑнии здоровьÑ.Служба не Ñообщает о Ñвоём типе.StatusCode: SuccessÐевозможно раÑпознать заданный Вами период VOMS AC: %s.Ðевозможно ÑвÑзатьÑÑ Ñ Ñервером VOMS Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸ÐµÐ¹: %s пожалуйÑта, проверьте, доÑтупен ли Ñтот ÑерверÐевозможно ÑвÑзатьÑÑ Ñ Ñервером VOMS Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸ÐµÐ¹: %s ПожалуйÑта, проверьте, доÑтупен ли Ñтот Ñервер.Разделу [vo] Ñ Ð½Ð°Ð·Ð²Ð°Ð½Ð¸ÐµÐ¼ '%s' не поÑтавлен в ÑоответÑтвие файл, и он не может быть иÑпользован Ð´Ð»Ñ Ð°Ð²Ñ‚Ð¾Ñ€Ð¸Ð·Ð°Ñ†Ð¸Ð¸ÐšÐ¾Ð¼Ð°Ð½Ð´Ð° arccat предназначена Ð´Ð»Ñ Ð²Ñ‹Ð²Ð¾Ð´Ð° на Ñкран Ñообщений Ñтандартного выхода, Ñтандартной ошибки или ошибок ÑиÑтемы при иÑполнении задачи.Команда arcclean иÑпользуетÑÑ Ð´Ð»Ñ ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ñ€ÐµÐ·ÑƒÐ»ÑŒÑ‚Ð°Ñ‚Ð¾Ð² работы задач Ñ ÑƒÐ´Ð°Ð»Ñ‘Ð½Ð½Ð¾Ð³Ð¾ компьютера.Команда arccp копирует файлы на, Ñ Ð¸ между запоминающими уÑтройÑтвами Грид.Команда arcget иÑпользуетÑÑ Ð´Ð»Ñ Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ñ€ÐµÐ·ÑƒÐ»ÑŒÑ‚Ð°Ñ‚Ð¾Ð² работы задач.Команда arcinfo иÑпользуетÑÑ Ð´Ð»Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ¸ ÑоÑтоÑÐ½Ð¸Ñ Ð²Ñ‹Ñ‡Ð¸Ñлительных реÑурÑов на Гриде.Команда arckill иÑпользуетÑÑ Ð´Ð»Ñ Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸Ñ Ð¸ÑполнÑющихÑÑ Ð·Ð°Ð´Ð°Ñ‡.Команда arcls иÑпользуетÑÑ Ð´Ð»Ñ Ð¿Ñ€Ð¾Ñмотра информации о файлах, хранÑщихÑÑ Ð½Ð° накопительных уÑтройÑтвах Грид, а также в занеÑённых в каталоги данных.Команда arcmkdir Ñоздаёт директории на грид-хранилищах и в каталогах данных.Команда arcproxy Ñоздаёт доверенноÑть из пары закрытый/открытый ключ Ð´Ð»Ñ Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð´Ð¾Ñтупа к гриду.Команда arcrename переименовывает файлы на запоминающих уÑтройÑтвах.Команда arcrm удалÑет файлы Ñ Ð·Ð°Ð¿Ð¾Ð¼Ð¸Ð½Ð°ÑŽÑ‰Ð¸Ñ… уÑтройÑтв.Команда arcstat иÑпользуетÑÑ Ð´Ð»Ñ Ð²Ñ‹Ð²Ð¾Ð´Ð° информации о ÑоÑтоÑнии задач, отправленных на Грид .Команда arcsub иÑпользуетÑÑ Ð´Ð»Ñ Ð·Ð°Ð¿ÑƒÑка задач на вычиÑлительные реÑурÑÑ‹ Грид.Команда arcsync Ñинхронизирует Ваш локальный ÑпиÑок задач Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸ÐµÐ¹ на заданных клаÑтерах или каталогах реÑурÑов.Команда arctest иÑпользуетÑÑ Ð´Ð»Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ¸ клаÑтеров как вычиÑлительных реÑурÑов.Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾Ð± атрибутах Ñ Ñервера VOMS: %s Ñодержит:ДоÑтупный ÑпиÑок отзыва Ñертификатов (CRL) проÑроченДоÑтупный ÑпиÑок отзыва Ñертификатов (CRL) пока недейÑтвителенÐтрибут brokerarguments может быть иÑпользован только в ÑвÑзи Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð¾Ð¼ brokernameСертификат Ñ Ñубъектом %s недейÑтвителенÐтрибут XRSL cluster пока что не поддерживаетÑÑ.Параметры доÑтупа Ð´Ð»Ñ Ð¿Ð¾Ð´Ð¿Ð¸Ñи не Ñодержат запроÑаПараметры доÑтупа Ð´Ð»Ñ Ð¿Ð¾Ð´Ð¿Ð¸Ñи имеют значение NULLЗакрытый ключ параметров доÑтупа уже инициализированФайл наÑтроек по умолчанию (%s) не ÑвлÑетÑÑ Ð¾Ð±Ñ‹Ñ‡Ð½Ñ‹Ð¼ файлом.Делегированные параметры доÑтупа полученные от Ñлужбы Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð·Ð°Ð¿Ð¸Ñаны в каталоге: %sДелегированные параметры доÑтупа извлечены из каталога: %sПериод недоÑтупноÑти цели (%s) не объÑвлен. Цель ÑохранÑетÑÑ.Ðевозможно раÑпознать заданное Вами Ð²Ñ€ÐµÐ¼Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ: %s.Заданное Вами Ð²Ñ€ÐµÐ¼Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ: %s предшеÑтвует времени начала: %s.Точка входа (%s) не поддерживаетÑÑ Ñтим подключаемым модулем (%s)ÐšÐ¾Ð½ÐµÑ‡Ð½Ð°Ñ Ñ‚Ð¾Ñ‡ÐºÐ° ÑервиÑа Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð´Ð¾Ð»Ð¶Ð½Ð° быть наÑтроенаФайл %s в наÑтоÑщий момент заблокирован дейÑтвительным блокомПервый поддерживаемый Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ð¾Ð³Ð¾ Ð¼Ð¾Ð´ÑƒÐ»Ñ %s оказалÑÑ Ð¿ÑƒÑтой Ñтрокой, модуль пропуÑкаетÑÑ.Следующие %d не были перезапущеныСледующие задачи не были заÑланы:Ð˜Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ñтой точки доÑтупа (%s) не задан, пробуютÑÑ Ð²Ñе возможные подключаемые модулиОпиÑание задачи может быть также задано файлом или Ñтрокой в формате ADL или XRSL.ÐедопуÑтимое значение Ð¾Ð³Ñ€Ð°Ð½Ð¸Ñ‡ÐµÐ½Ð¸Ñ keybits: %s.Ð˜Ð¼Ñ Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ð¾Ð³Ð¾ ключа Ð´Ð»Ñ ÑƒÐ½Ð¸Ñ‡Ñ‚Ð¾Ð¶ÐµÐ½Ð¸Ñ Ð¿ÑƒÑтоСтарые доверенноÑти GSI более не поддерживаютÑÑ. ПожалуйÑта, не иÑпользуйте опцию -O/--old.Во входÑщем Ñообщении отÑутÑтвует Ð¿Ð¾Ð»ÐµÐ·Ð½Ð°Ñ Ð½Ð°Ð³Ñ€ÑƒÐ·ÐºÐ°Ð’ иÑходÑщем Ñообщении отÑутÑтвует Ð¿Ð¾Ð»ÐµÐ·Ð½Ð°Ñ Ð½Ð°Ð³Ñ€ÑƒÐ·ÐºÐ°Ðевозможно раÑпознать заданный Вами интервал: %s.Подключаемый модуль %s не поддерживает никаких интерфейÑов, пропуÑкаетÑÑ.Ð”Ð»Ñ simplelist.pdp не задан файл наÑтройки политик; пожалуйÑта, проверьте атрибут location в наÑтройках Ñлужбы узла PDP simplelistЯзык политик %s не поддерживаетÑÑЗакрытый ключ Ð´Ð»Ñ Ð¿Ð¾Ð´Ð¿Ð¸Ñи не инициализированПроцеÑÑ, которому принадлежит блок в %s, больше не ÑущеÑтвует, блок будет ÑƒÐ´Ð°Ð»Ñ‘Ð½Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð¿Ñ€Ð¾ÑˆÑ‘Ð» Ñверку Ñ Ð¿Ð¾Ð»Ð¸Ñ‚Ð¸ÐºÐ¾Ð¹ÐедопуÑтимый алгоритм подпиÑи %s: запроÑÑ‹ Ñертификата должны подпиÑыватьÑÑ SHA1 или SHA2Указанный атрибут Globus (%s) не поддерживаетÑÑ. %s игнорируетÑÑ.Ðевозможно раÑпознать заданное Вами Ð²Ñ€ÐµÐ¼Ñ Ð½Ð°Ñ‡Ð°Ð»Ð°: %s.Опции start, end и period не могут быть заданы одновременноСубъект не ÑоответÑтвует имени выдавшего агентÑтва и атрибуту доверенноÑти CNСрок дейÑÑ‚Ð²Ð¸Ñ Ñертификата атрибута VOMS (AC) Ñокращён Ñ %s до %s, в ÑвÑзи Ñ Ð¾Ð³Ñ€Ð°Ð½Ð¸Ñ‡ÐµÐ½Ð¸ÐµÐ¼ Ñо Ñтороны Ñервера VOMS. Значение атрибута XRSL acl не ÑвлÑетÑÑ Ð´ÐµÐ¹Ñтвительным кодом XML.Значение атрибута 'ftpthreads' должно быть целым чиÑлом от 1 до 10Значение атрибута keysize (%s) в файле наÑтроек разобрано неполноÑтьюЗначение атрибута timeout (%s) в файле наÑтроек разобрано неполноÑтьюОбнаружено %d оÑновных директорий NSS, Ñодержащих базы данных Ñертификатов, ключей и модулейОбнаружено %d Ñлементов запроÑа%d инÑтрукций направлено на один и тот же Ñервер VOMS, %sОбнаружены %d запроÑа, удовлетворÑющих Ñ…Ð¾Ñ‚Ñ Ð±Ñ‹ одной политикеВ Вашем файле vomses указаны %d Ñерверов Ñ Ð¾Ð´Ð¸Ð½Ð°ÐºÐ¾Ð²Ñ‹Ð¼ именем %s, но ни один из них не доÑтупен или не отзываетÑÑ Ð¿Ñ€Ð°Ð²Ð¸Ð»ÑŒÐ½Ð¾.Ð’ Вашем файле vomses указаны %d Ñерверов Ñ Ð¾Ð´Ð¸Ð½Ð°ÐºÐ¾Ð²Ñ‹Ð¼ именем %s, но не вÑе доÑтупны или правильно отзываютÑÑ. ДоверенноÑть без раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ VOMS AC будет вÑÑ‘ равно Ñоздана.Ð’ базе данных NSS обнаружено %d Ñертификата пользователÑÐ’ учётном ÑпиÑке нет точек входа, ÑоответÑтвующих запрошенному типу точки входа информацииВ учётном ÑпиÑке нет точек входа, ÑоответÑтвующих запрошенному типу точки входа заÑылкиОбнаружены %d Ñубъекта, удовлетворÑющих Ñ…Ð¾Ñ‚Ñ Ð±Ñ‹ одной политикеОтвет не Ñодержит делегированный токен X509Ответ не Ñодержит делегированный токен в нужном форматеВ ответе отÑутÑтвует Ð·Ð°Ð¿Ñ€Ð¾Ñ FormatОтвет не Ñодержит Id или значение запроÑа X509Ответ не Ñодержит Id или значение маркёра X509Ðе наÑтроена цепочка ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ SOAPÐет ответа SOAPÐ’ ответе отÑутÑтвует UpdateCredentialsResponseÐ’ ответе отÑутÑтвует Ð·Ð°Ð¿Ñ€Ð¾Ñ X509Ðе найден Ñертификат Ñ Ð¸Ð¼ÐµÐ½ÐµÐ¼ %s, Ñертификат мог быть удалён при Ñоздании CSRÐ’ объекте закрытого ключа Ð¸Ð·Ð´Ð°Ñ‚ÐµÐ»Ñ Ð¾Ñ‚ÑутÑтвует профильОтÑутÑтвует идентификатор СУПО. Сообщение не будет запиÑано в журнал BLAH.Ðет ответаОбнаружена проблема при обÑлуживании Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð¿Ð¾Ñле переÑылки поÑле ÑбоÑ: %sОбнаружена проблема при обÑлуживании иÑточника поÑле переÑылки: %sÐет ответа HTTPÐет ответа SOAPСервер PDP не возвратил ответ SOAP: %sÐ”Ð»Ñ Ñтих точек входа ÑтороннÑÑ Ð¿ÐµÑ€ÐµÑылка не поддерживаетÑÑЗапрошена переÑылка файла третьим лицом, но необходимый подключаемый модуль не был подгружен. УÑтанавливали ли Ð’Ñ‹ модуль GFAL? ЕÑли нет, пожалуйÑта, уÑтановите пакеты 'nordugrid-arc-plugins-gfal' и 'gfal2-all'. Эти Ð½Ð°Ð·Ð²Ð°Ð½Ð¸Ñ Ð¼Ð¾Ð³ÑƒÑ‚ завиÑеть от типа вашего диÑтрибутива.Это Ñообщение INFO тоже должно быть видноЭто Ñообщение INFO должно быть видноЭтого ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ VERBOSE не должно быть видноЭто Ñообщение VERBOSE теперь должно быть видноЭта ÐºÐ¾Ð¿Ð¸Ñ ÑƒÐ¶Ðµ удаленаЭта задача была запущена лишь недавно, и может быть ещё не зарегиÑтрирована в ÑиÑтемеЭто Ñообщение выводитÑÑ Ð² изначальное назначениеЭто Ñообщение направлÑетÑÑ Ð² каждый потокУ Ñтого процеÑÑа уже ÑущеÑтвует блокировка в %sПохоже на временный Ñбой - пожалуйÑта, попытайтеÑÑŒ Ñнова попозжеЭта программулечка может быть иÑпользована Ð´Ð»Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€ÐºÐ¸ ÑпоÑобноÑтей Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ JobDescription.Этому пользователю отказано в праве запуÑка новых задач.Поток завершилÑÑ Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ¾Ð¹ Glib: %sПоток завершилÑÑ Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°Ð½Ð¸ÐµÐ¼ в Glib: %sПоток завершилÑÑ Ð¾Ð±Ñ‰Ð¸Ð¼ прерыванием: %sСертификат атрибута дейÑтвителен на: %sСертификат атрибута дейÑтвителен на: Срок дейÑÑ‚Ð²Ð¸Ñ Ñертификата закончилÑÑСертификат атрибута дейÑтвителен на: Сертификат пока недейÑтвителенДоверенноÑть дейÑтвительна на: %sДоверенноÑть дейÑтвительна на: Срок дейÑÑ‚Ð²Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти вышелДоверенноÑть дейÑтвительна на: ДоверенноÑть пока недейÑтвительнаВремÑ, проведённое в ожидании диÑка: %.3f мÑВремÑ, проведённое в ожидании ÑвÑзи: %.3f мÑИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð±Ð»Ð¾ÐºÐ¸Ñ€Ð¾Ð²ÐºÐ¸ кÑшаИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ %s(%s):%i - %i ÑÐ’Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¸Ñтекло, файл блокировки %s будет удалёнИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¾Ñ‚Ð²ÐµÑ‚Ð½Ð¾Ð³Ð¾ ÑÐ¾Ð¾Ð±Ñ‰ÐµÐ½Ð¸Ñ Globus - утечка ÑоединениÑИÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ñ‹ mkdirÐ”Ð»Ñ Ð²Ð¾ÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð½ÐµÐ´Ð¾Ñтающих задач, запуÑтите arcsyncСлишком много аргументов в наÑтройкахСлишком много Ñоединений - новое отклоненоСлишком много Ñоединений - ожидание Ð·Ð°ÐºÑ€Ñ‹Ñ‚Ð¸Ñ ÑтарыхСлишком много Ñбоев попытки Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð¾Ð¹ Ñуммы - прерываниеСлишком много файлов на один Ð·Ð°Ð¿Ñ€Ð¾Ñ - пожалуйÑта, попробуйте Ñнова, Ñ Ð¼ÐµÐ½ÑŒÑˆÐ¸Ð¼ количеÑтвом файловУтилита Ð´Ð»Ñ Ð¿Ñ€ÐµÐ´ÑÑ‚Ð°Ð²Ð»ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ в виде файла grami.Ð’Ñего задач: %iОбщее количеÑтво логичеÑких процеÑÑоров: %iКоличеÑтво вÑех обнаруженных задач: КоличеÑтво обнаруженных новых задач: Общее количеÑтво физичеÑких процеÑÑоров: %iОбщее количеÑтво Ñдер: %iПередача ÐЕ УДÐЛÐСЬ: %sПередача файлов уÑпешно отмененаПередача данных завершенаПередача не удалаÑьСбой передачи: %sПередача завершена: %llu байтов передано %sПередача из %s в %sПереÑылка оборвана поÑле %i Ñекунд бездейÑтвиÑПередача удалаÑьИÑтечение времени Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÑоединениÑДоверенные центры Ñертификации:Попытка получить атрибут Ñ Ñервера VOMS Ñ Ð¿Ð¾Ñ€Ñдком: %sПробуютÑÑ Ð²Ñе доÑтупные интерфейÑыПробуем Ñледующую копиюПопытка проверки Ñертификата X509 Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ check_cert_typeПопытка ÑÐ¾ÐµÐ´Ð¸Ð½ÐµÐ½Ð¸Ñ Ñ %s(%s):%dПопытка проÑлушать %s:%s(%s)Попытка проÑлушать порт TCP %s(%s)Попытка миграции на %s: ÐœÐ¸Ð³Ñ€Ð°Ñ†Ð¸Ñ Ð½Ð° Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ %s не поддерживаетÑÑ.Попытка Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð¾Ð¿Ð¸ÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s Ñ Ð²Ñ‹Ñ‡Ð¸Ñлительного реÑурÑаПопытка активации временно иÑключённой точки входа (%s)Попытка заÑылки задачи напрÑмую к точке входа (%s)Попытка заÑылки на точку входа (%s) иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ (%s) Ñ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ñ‹Ð¼ модулем (%s).Два входных файла Ñ Ð¸Ð´ÐµÐ½Ñ‚Ð¸Ñ‡Ð½Ñ‹Ð¼Ð¸ именами '%s'.Тип dir, вызываетÑÑ srmRmDirТип file, вызываетÑÑ srmRmТип: %sТипы Ñлужб Ð²Ñ‹Ð¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡, на которые %s может заÑылать задачи:Типы локальных Ñлужб информации, Ñ ÐºÐ¾Ñ‚Ð¾Ñ€Ñ‹Ñ… %s может получить информацию:Типы локальных Ñлужб информации, Ñ ÐºÐ¾Ñ‚Ð¾Ñ€Ñ‹Ñ… %s может получить информацию о задачах:Типы Ñлужб региÑтрации, в которых %s может получить информацию:Типы Ñлужб,на которых %s может управлÑть задачами:URLURL %s не ÑоответÑтвует информации, хранÑщейÑÑ Ð² SRM info; проверÑетÑÑ Ð½Ð¾Ð²Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸ÑURL ÑопоÑтавлен локальному файлу - проверка прав доÑтупа к иÑходному URLURL поÑтавлен в ÑоответÑтвие к: %sÐедейÑтвительный адреÑ: %sÐžÐ¿Ñ†Ð¸Ñ URL %s не задана в формате имÑ=значениеПротокол URL не ÑвлÑетÑÑ urllist: %sURL: %sÐе допущен через xacml.pdpÐе удалоÑÑŒ адаптировать опиÑание задачи ни к одному реÑурÑу, Ñ‚.к. не получено никакой информации.Сбой Ð´Ð¾Ð±Ð°Ð²Ð»ÐµÐ½Ð¸Ñ ÑобытиÑ: не обнаружена запиÑÑŒ AAR Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %s в базе данных учёта задач.Ðевозможно поÑтавить в ÑоответÑтвие вторичную базу данных первичной (%s)Ðе удалоÑÑŒ Ñкопировать %sÐе удалоÑÑŒ Ñкопировать шаблон наÑтроек из ÑущеÑтвующих наÑтроек (%s)Ðе удалоÑÑŒ Ñоздать каталог %s.Ðе удалоÑÑŒ Ñоздать базу данных Ð´Ð»Ñ Ð²Ñ‚Ð¾Ñ€Ð¸Ñ‡Ð½Ñ‹Ñ… ключей точек входа (%s)Ðе удалоÑÑŒ Ñоздать базу данных Ð´Ð»Ñ Ð²Ñ‚Ð¾Ñ€Ð¸Ñ‡Ð½Ñ‹Ñ… ключей имён (%s)Ðе удалоÑÑŒ Ñоздать базу данных Ð´Ð»Ñ Ð²Ñ‚Ð¾Ñ€Ð¸Ñ‡Ð½Ñ‹Ñ… ключей информации о Ñлужбах (%s)Ðе удалоÑÑŒ Ñоздать клиент SOAP иÑпользующийÑÑ EMIESClient.Ðе удалоÑÑŒ Ñоздать базу данных (%s)Ðе удалоÑÑŒ Ñоздать окружение Ð´Ð»Ñ Ð±Ð°Ð·Ñ‹ данных (%s)Ðе удалоÑÑŒ Ñоздать каталог %sÐе удалоÑÑŒ Ñоздать каталог Ð´Ð»Ñ ÑÐ¾Ñ…Ñ€Ð°Ð½ÐµÐ½Ð¸Ñ Ñ€ÐµÐ·ÑƒÐ»ÑŒÑ‚Ð°Ñ‚Ð¾Ð² (%s) - %sÐе удалоÑÑŒ Ñоздать Ð¸Ð½Ð´ÐµÐºÑ Ð´Ð»Ñ Ñ‚Ð°Ð±Ð»Ð¸Ñ†Ñ‹ задач в базе данных (%s)Ðе удалоÑÑŒ Ñоздать базу данных задач (%s)Ðе удалоÑÑŒ Ñоздать таблицу задач в базе данных (%s)Ðе удалоÑÑŒ Ñоздать таблицу jobs_new в базе данных (%s)Ðе удалоÑÑŒ Ñоздать временный каталогÐевозможно определить формат учётной запиÑи о задаче.Ðе удалоÑÑŒ определить, уÑтановлены ли ключи центра Ñертификации.Ðе удалоÑÑŒ получить информацию о ÑертификатеÐевозможно раÑпознать ошибку (%d)Ðевозможно загрузить задачу (%s), не был задан модуль JobControllerPlugin Ð´Ð»Ñ Ñ€Ð°Ð±Ð¾Ñ‚Ñ‹ Ñ Ð·Ð°Ð´Ð°Ñ‡ÐµÐ¹.Ðе удалоÑÑŒ ÑброÑить таблицу jobs в базе данных (%s)Ðе удалоÑÑŒ определить размер файла %sÐевозможно обработать задачу (%s), не указан интерфейÑ.Ðевозможно обработать задачу (%s), Ð´Ð»Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ð¾Ð³Ð¾ интерфейÑа (%s) нету подключаемых модулейÐевозможно инициализировать Ñоединение Ñ Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸ÐµÐ¼: %sÐевозможно инициализировать Ñоединение Ñ Ð¸Ñточником: %sÐевозможно инициализировать обработчик Ð´Ð»Ñ %sÐе удалоÑÑŒ проÑмотреть Ñодержимое %sÐевозможно перечиÑлить файлы на %sÐе удалоÑÑŒ загрузить файл конфигурации ARC.Ðевозможно загрузить модуль BrokerPlugin (%s)Ðевозможно подгрузить брокер %sÐевозможно подгрузить модуль (%s) Ð´Ð»Ñ Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñа (%s) при попытке заÑылки опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸.Ðевозможно погрузить подключаемый модуль Ð´Ð»Ñ Ð·Ð°Ð¿ÑƒÑка задач через Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ %sÐе удалоÑÑŒ обнаружить подключаемый модуль "%s". ПожалуйÑта, проконÑультируйтеÑÑŒ Ñ Ð¸Ð½Ñтрукцией по уÑтановке и проверьте, уÑтановлен ли пакет, Ñодержащий модуль "%s".РеÑÑƒÑ€Ñ Ð½Ðµ ÑоответÑтвует заданию, помечаетÑÑ ÐºÐ°Ðº неÑоответÑтвующий. Планировщик недейÑтвителен.Ðевозможно мигрировать задачу (%s), опиÑание задачи не может быть извлечено Ñ ÑƒÐ´Ð°Ð»Ñ‘Ð½Ð½Ð¾Ð³Ð¾ иÑточникаÐе удалоÑÑŒ перенаправить задачу (%s), Ñ‚.к. невозможно разобрать полученное опиÑание задачиÐевозможно открыть файл ÑпиÑка задач (%s), формат неизвеÑтенÐевозможно разобрать введённое опиÑание задачи: %sÐевозможно разобрать %s.Получено значение %s от Ñлужбы иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ (%s).Ðевозможно ÑопоÑтавить запрашиваемый уровень отладки (%s) ни Ñ Ð¾Ð´Ð½Ð¸Ð¼ из допуÑтимыхÐе удалоÑÑŒ обработать.Ðевозможно адаптировать опиÑание задачи в ÑоответÑтвии Ñ Ñ‚Ñ€ÐµÐ±Ð¾Ð²Ð°Ð½Ð¸Ñми Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ (%s).Ðевозможно адаптировать опиÑание задачи в ÑоответÑтвии Ñ Ñ‚Ñ€ÐµÐ±Ð¾Ð²Ð°Ð½Ð¸Ñми назначениÑ.Ðевозможно опроÑить информацию о задаче (%s), задан недопуÑтимый URL (%s)Ðевозможно прочитать информацию о задаче из файла (%s)Ðевозможно зарегиÑтрировать заÑылку задачи. Ðевозможно получить объект JobDescription из планировщика, планировщик недейÑтвителен.Ðе удалоÑÑŒ удалить файл %sÐе удалоÑÑŒ переименовать %sÐе удалоÑÑŒ переименовать таблицу jobs в базе данных (%s)Ðевозможно перезапуÑтить задачу (%s), нет подходÑщих целейÐевозможно перезапуÑтить задачу (%s), Ñбой Ð¸Ð·Ð²Ð»ÐµÑ‡ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ о цели %sÐе удалоÑÑŒ перезаÑлать задачу (%s), Ñ‚.к. невозможно разобрать полученное опиÑание задачиÐевозможно получить ÑпиÑок загружаемых файлов Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %sÐевозможно выбрать подпрограммное обеÑпечениеÐевозможно выбрать операционную ÑиÑтему.Ðевозможно выбрать Ñреду выполнениÑÐевозможно уÑтановить повторÑющиеÑÑ Ð¼ÐµÑ‚ÐºÐ¸ Ð´Ð»Ñ Ð²Ñ‚Ð¾Ñ€Ð¸Ñ‡Ð½Ð¾Ð¹ базы данных ключей (%s)Ðевозможно упорÑдочить объекты ExecutionTarget - недопуÑтимый объект Broker.Ðевозможно упорÑдочить добавленные задачи. Подключаемый модуль BrokerPlugin не был подгружен.ЗаÑылка задачи не удалаÑÑŒ. Сбой приÑÐ²Ð¾ÐµÐ½Ð¸Ñ Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¾Ð¿Ð¸Ñанию задачи.Ðевозможно заÑлать задачу. ОпиÑание задачи не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым файлом XMLÐевозможно заÑлать задачу. ОпиÑание задачи в формате %s недейÑтвительно: %sЗаÑылка задач не удалаÑÑŒ. Сбой Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ð¾Ð² доÑтупа.Ðе удалоÑÑŒ перенеÑти из таблицы jobs в jobs_new в базе данных (%s)Ðе удалоÑÑŒ укоротить базу данных задач (%s)Ðе удалоÑÑŒ запиÑать файл 'output': %sÐе удалоÑÑŒ запиÑать файл grami: %sÐевозможно запиÑать пару ключ/значение в базу данных задач (%s): Ключ "%s"Ðе удалоÑÑŒ внеÑти запиÑи в базу данных задач (%s): Id "%s"Сбой запиÑи в файл p12ДоÑтуп закрытÐе допущен удалённой Ñлужбой PDPÐÐµÐ²ÐµÑ€Ð½Ð°Ñ ÑƒÐ¿Ñ€Ð°Ð²Ð»ÑÑŽÑ‰Ð°Ñ Ð¿Ð¾ÑледовательноÑть: %%%sÐеожиданный тип RSLÐепредуÑмотренный аргумент Ð´Ð»Ñ Ð¿Ñ€Ð°Ð²Ð¸Ð»Ð° 'all' - %sÐепредуÑмотренные аргументыЗаданы непредуÑмотренные аргументыÐеверное раÑположение Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¾Ñ‚ Ñлужбы Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ - %s.Ðеожиданное немедленное завершение: %sОтзыв Rucio Ñодержит недопуÑтимое имÑ: %sСервер возвратил неожиданный путь %sÐеверный код отклика Ñлужбы Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ - %uÐеверный код отклика Ñлужбы делегированиÑ: %u, %s.Uniq добавлÑет ÑервиÑ, обнаруженный через %sUniq игнорирует ÑервиÑ, обнаруженный через %sUniq заменÑет ÑервиÑ, обнаруженный через %s, на ÑервиÑ, обнаруженный через %sÐеизвеÑтное правило доÑтупа %s Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ %sÐеизвеÑтный контекÑÑ‚ LDAP %s - иÑпользуетÑÑ baseÐеизвеÑтный атрибут XRSL: %s - игнорируетÑÑ.ИгнорируетÑÑ Ð½ÐµÐ¸Ð·Ð²ÐµÑтный атрибут %s в разделе common файла наÑтроек (%s)ÐеизвеÑÑ‚Ð½Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ° команды допуÑка %sÐеизвеÑтный канал %s Ð´Ð»Ñ Ð¿Ñ€Ð¾Ñ‚Ð¾ÐºÐ¾Ð»Ð° stdioÐеизвеÑтный ÑпоÑоб Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ %s, иÑпользуетÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ по умолчаниюÐеизвеÑтный тип параметра доÑтупа %s Ð´Ð»Ñ ÑˆÐ°Ð±Ð»Ð¾Ð½Ð° URL %sÐеизвеÑтный Ñлемент в политике подпиÑи GlobusÐеизвеÑÑ‚Ð½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ в EGIIS (%s)ÐеизвеÑÑ‚Ð½Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ°ÐеизвеÑтный ключ или тип хешированиÑÐеизвеÑтный ключ или тип Ñ…ÐµÑˆÐ¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ð¸Ð·Ð´Ð°Ñ‚ÐµÐ»Ñ ÑертификатаÐеизвеÑтный уровень Ð¶ÑƒÑ€Ð½Ð°Ð»Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ %sÐеизвеÑтный режим Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ %iÐеизвеÑтный режим Ð¾Ñ‚ÐºÑ€Ñ‹Ñ‚Ð¸Ñ %sÐеизвеÑтный параметрÐеизвеÑтный параметр %sÐеизвеÑтные права в политике подпиÑи Globus - %sИгнорируетÑÑ Ð½ÐµÐ¸Ð·Ð²ÐµÑтный раздел %sÐеизвеÑÑ‚Ð½Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð¸ файлов: %sÐеизвеÑтное правило приÑÐ²Ð¾ÐµÐ½Ð¸Ñ Ð¸Ð¼ÐµÐ½Ð¸ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ %sУдалÑетÑÑ Ð·Ð°Ð¿Ð¸ÑÑŒ о %sСбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ñ€ÐµÐ³Ð¸Ñтрации из каталогаЗаданный URL не поддерживаетÑÑЗаданный URL не поддерживаетÑÑ: %sÐÐµÐ¿Ð¾Ð´Ð´ÐµÑ€Ð¶Ð¸Ð²Ð°ÐµÐ¼Ð°Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ð°: %sÐÐµÐ¿Ð¾Ð´Ð´ÐµÑ€Ð¶Ð¸Ð²Ð°ÐµÐ¼Ð°Ñ Ð¸Ð½ÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ Ð½Ð°Ñтроек: %sÐеподдерживаемый URL назначениÑ: %sÐеподдерживаемый тип точки входа информации: %sТип ÑпиÑка задач '%s' не поддерживаетÑÑ, будет иÑпользоватьÑÑ 'BDB'. ПоддерживаютÑÑ Ñледующие типы: BDB, SQLITE, XML.Ðеподдерживаемое дейÑтвие политики ÑоответÑтвиÑ: %sÐÐµÐ¿Ð¾Ð´Ð´ÐµÑ€Ð¶Ð¸Ð²Ð°ÐµÐ¼Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð¿Ð¾Ð»Ð¸Ñ‚Ð¸ÐºÐ¸ ÑоответÑтвиÑ: %sÐеподдерживаемый протокол в URL %sЗапрошен неподдерживаемый Ñзык политик доверенноÑти - %sЗапрошена Ð½ÐµÐ¿Ð¾Ð´Ð´ÐµÑ€Ð¶Ð¸Ð²Ð°ÐµÐ¼Ð°Ñ Ð²ÐµÑ€ÑÐ¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти - %sÐеподдерживаемый URL иÑточника: %sÐеподдерживаемый тип точки входа заÑылки: %sÐ˜Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ð·Ð°Ñылки %s не поддерживаетÑÑ. Похоже, arc-blahp-logger пора обновить. ПожалуйÑта, опишите проблему в bugzill-е.Ðеподдерживаемое значение Ð´Ð»Ñ allownew: %sЦепочка Ñодержит недоверÑемый ÑамоподпиÑанный Ñертификат Ñ Ñубъектом %s и отпечатком: %luСбой в UpdateCredentialsUpdateCredentials: EPR не Ñодержит JobIDUpdateCredentials: невозможно обновить параметры доÑтупаUpdateCredentials: отÑутÑтвует ÑÑылкаUpdateCredentials: задача не обнаружена: %sUpdateCredentials: Ð·Ð°Ð¿Ñ€Ð¾Ñ = %sUpdateCredentials: отзыв = %sUpdateCredentials: недопуÑтимое количеÑтво ÑÑылокUpdateCredentials: недопуÑтимое чиÑло Ñлементов внутри ReferenceИÑпользование:ИÑпользование: copy иÑточник назначениеИÑпользуйте опцию --help Ð´Ð»Ñ Ð¿Ð¾Ð´Ñ€Ð¾Ð±Ð½Ð¾Ð³Ð¾ опиÑаниÑÐ”Ð»Ñ Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ñправки иÑпользуйте "-?"ИÑпользуетÑÑ Ñ„Ð°Ð¹Ð» наÑтроек %sИÑпользованные Ñдра: %dФайл наÑтроек Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ (%s) Ñодержит ошибки.Файл наÑтроек Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ (%s) не ÑущеÑтвует или не может быть подгружен.ОтÑутÑтвует пользователь Ð´Ð»Ñ Ð²Ñпомогательной программыДерево виртуального каталога Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð¿ÑƒÑто. Либо у Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð½ÐµÑ‚ допущенных раÑширений, либо раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ð²Ð¾Ð¾Ð±Ñ‰Ðµ не наÑтроены.Пользователь не аÑÑоциирован Ñ Ð¿Ð¾Ð´Ñ…Ð¾Ð´Ñщей наÑтройкойОшибка интерфейÑа пользователÑОтÑутÑтвует Ð¸Ð¼Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð² прÑмом приÑвоении имени: %s.ПуÑÑ‚Ð°Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ð° в приÑвоении имени пользователÑПуÑÑ‚Ð°Ñ Ð³Ñ€ÑƒÐ¿Ð¿Ð° authgroup в приÑвоении имени пользователÑ: %sПуÑÑ‚Ð°Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ð° в приÑвоении имени пользователÑПуÑтое Ð¸Ð¼Ñ Ð² приÑвоении имени пользователÑ: %sДолжно быть указано Ð¸Ð¼Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ.Ðевозможно открыть пул пользователей в %s.Пул пользователей в %s не Ñмог уÑтановить ÑоответÑтвие Ð´Ð»Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ.ОтÑутÑтвует Ñубъект Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð² приÑвоении пула пользователей.ОтÑутÑтвует Ñубъект Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð´Ð»Ñ ÑоответÑÑ‚Ð²Ð¸Ñ Ñубъекта.Субъект Ñертификата: %sКлаÑÑ UserConfig не ÑвлÑетÑÑ Ð¾Ð±ÑŠÐµÐºÑ‚Ð¾Ð¼UserConfiguration Ñохранены в файле (%s)Обработчик токена Username не наÑтроенИÑпользуетÑÑ Ñ„Ð°Ð¹Ð» наÑтроек A-REX %sИÑпользуетÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³ доверенных Ñертификатов CA: %sИÑпользуютÑÑ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ñ‹ DH из файла %sИÑпользуетÑÑ OTokenИÑпользуетÑÑ ÑƒÑ‡Ñ‘Ñ‚Ð½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ Rucio %sИÑпользуетÑÑ Ð±ÑƒÑ„ÐµÑ€Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð½Ñ‹Ð¹ метод передачи данныхИÑпользуетÑÑ ÐºÑш %sИÑпользуетÑÑ ÐºÑÑˆÐ¸Ñ€Ð¾Ð²Ð°Ð½Ð½Ð°Ñ Ð¼ÐµÑÑ‚Ð½Ð°Ñ ÑƒÑ‡Ñ‘Ñ‚Ð½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ '%s'ИÑпользуетÑÑ Ñертификат %sИÑпользуетÑÑ Ñ„Ð°Ð¹Ð» Ñертификата: %sИÑпользуемый ÑпиÑок шифров: %sИÑпользуетÑÑ Ð°Ð»Ð³Ð¾Ñ€Ð¸Ñ‚Ð¼ ÑˆÐ¸Ñ„Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ %sИÑпользуютÑÑ Ð½Ð°Ñтройки в %sИÑпользуетÑÑ ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ñ‹Ð¹ каталог %sИÑпользуетÑÑ ÐºÑ€Ð¸Ð²Ð°Ñ Ñ NID %uИÑпользуетÑÑ Ð½ÐµÐ·Ð°Ñ‰Ð¸Ñ‰Ñ‘Ð½Ð½Ð°Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð° данныхИÑпользуетÑÑ Ð²Ð½ÑƒÑ‚Ñ€ÐµÐ½Ð½Ð¸Ð¹ метод передачи данных %sИÑпользуетÑÑ ÐºÐ»ÑŽÑ‡ %sИÑпользуетÑÑ Ñ„Ð°Ð¹Ð» личного ключа: %sИÑпользуетÑÑ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð°Ñ ÑƒÑ‡Ñ‘Ñ‚Ð½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ '%s'ИÑпользуетÑÑ ÑÐ»ÐµÐ´ÑƒÑŽÑ‰Ð°Ñ ÐºÐ¾Ð¿Ð¸Ñ (%s)ИÑпользуемые опции протокола: 0x%xИÑпользуетÑÑ Ð¿Ñ€Ð¾ÐºÑи %sИÑпользуетÑÑ Ñ„Ð°Ð¹Ð» доверенноÑти: %sИÑпользуетÑÑ Ð·Ð°Ñ‰Ð¸Ñ‰Ñ‘Ð½Ð½Ð°Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð° данныхИÑпользуетÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³ ÑеÑÑии %sИÑпользуетÑÑ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³ ÑеÑÑии %sИÑпользуетÑÑ Ð¼Ð°Ñ€ÐºÑ‘Ñ€ проÑтранÑтва памÑти %sИÑпользуетÑÑ Ð¾Ð¿Ð¸Ñание маркёра проÑтранÑтва памÑти %sВО %s не Ñовпадает Ñ %sÐтрибут VOMS AC ÑвлÑетÑÑ Ñ‚ÐµÐ³Ð¾Ð¼Ðтрибут VOMS AC ÑвлÑетÑÑ FQANÐтрибут VOMS %s не Ñовпадает Ñ %sÐтрибут VOMS %s Ñовпадает Ñ %sÐтрибут VOMS игнорируетÑÑ Ð¸Ð·-за ошибки обработки или проверкиСбой обработки атрибутов VOMSСбой проверки атрибутов VOMSПуÑÑ‚Ð°Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ð° VOMSСтрока VOMS Ñодержит неверное количеÑтво токенов (ожидаетÑÑ %u): "%s"Обработка доверенноÑти VOMS выдаёт: %i - %sЦепочка Ñертификатов VOMS: %sVOMS: Ñрок годноÑти AC вышелVOMS: Сертификат атрибута (AC) неполон - отÑутÑтвует Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾Ð± атрибутах Serial и/или IssuerVOMS: Ñертификат атрибута ещё не дейÑтвителенVOMS: Ñбой Ð¿Ð¾Ð´Ñ‚Ð²ÐµÑ€Ð¶Ð´ÐµÐ½Ð¸Ñ Ð¿Ð¾Ð´Ð¿Ð¸Ñи Ñертификата атрибутаVOMS: Ðеобходимо задать каталог или файл Ñертификационного агентÑтваVOMS: Ðе удалоÑÑŒ зарезервировать памÑть Ð´Ð»Ñ Ñ€Ð°Ð·Ð±Ð¾Ñ€Ð° ACVOMS: Ðе удалоÑÑŒ зарезервировать памÑть Ð´Ð»Ñ Ñ…Ñ€Ð°Ð½ÐµÐ½Ð¸Ñ Ð¿Ð¾ÑледовательноÑти ACVOMS: Ðевозможно найти AC_ATTR типа IETFATTRVOMS: Ðе удалоÑÑŒ обработать ACVOMS: невозможно найти Ñертификат лица, выдавшего Ñертификат атрибута Ð´Ð»Ñ Ð²Ð¸Ñ€Ñ‚ÑƒÐ°Ð»ÑŒÐ½Ð¾Ð¹ организации %sVOMS: DN владельца в Ñертификате атрибута (AC): %sVOMS: DN владельца: %sVOMS: DN Ñмитента: %sVOMS: FQDN узла %s не ÑоответÑтвует ни одному из назначений в Ñертификате атрибута (AC)VOMS: Файл lsc %s не может быть открытVOMS: Файл lsc %s не ÑущеÑтвуетVOMS: неверный authorityKeyVOMS: должны приÑутÑтвовать оба раÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ Ñертификата, idcenoRevAvail и authorityKeyIdentifierVOMS: не удалоÑÑŒ подтвердить подпиÑÑŒ Ñертификата атрибутаVOMS: невозможно удоÑтоверить лицо, выдавшее Ñертификат атрибута Ð´Ð»Ñ Ð²Ð¸Ñ€Ñ‚ÑƒÐ°Ð»ÑŒÐ½Ð¾Ð¹ организации %sVOMS: иÑпользование множеÑтвенных атрибутов IETFATTR не поддерживаетÑÑVOMS: иÑпользование множеÑтвенных атрибутов policyAuthority не поддерживаетÑÑVOMS: ÑоÑтавление FQAN: %sVOMS: Ñозадние атрибута: %sVOMS: директориÑ, ÑÐ¾Ð´ÐµÑ€Ð¶Ð°Ñ‰Ð°Ñ Ñертификаты доверÑемых Ñлужб: %sVOMS: Ñбой при разборе атрибутов в Ñертификате атрибута (AC)VOMS: не удалоÑÑŒ подтвердить подпиÑÑŒ Ñертификата атрибутаVOMS: отÑутÑтвуют чаÑти ACVOMS: проблемы при разборке информации в ACVOMS: Отличительное Ð¸Ð¼Ñ (DN) в Ñертификате %s не ÑоответÑтвует таковому в доверÑемом ÑпиÑке: %sVOMS: Отличительный признак агентÑтва, выдавшего Ñертификат %s, не ÑоответÑтвует таковому в доверÑемом ÑпиÑке: %sVOMS: отÑутÑтвует Ð¸Ð¼Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð°VOMS: атрибут qualifier пуÑÑ‚VOMS: отÑутÑвует значение атрибута Ð´Ð»Ñ %sVOMS: недопуÑтимый формат IETFATTRVAL - ожидаетÑÑ OCTET STRINGVOMS: недопуÑтимый формат атрибута policyAuthority - ожидаетÑÑ URIVOMS: атрибут grantor пуÑÑ‚VOMS: Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ владельце в Ñертификате атрибута (AC)VOMS: Ð¸Ð¼Ñ Ð°Ð³ÐµÐ½Ñ‚Ñтва, выдавшего Ñертификат, не Ñовпадает Ñ Ñ‚Ð°ÐºÐ¾Ð²Ñ‹Ð¼ в Ñертификате атрибута (AC)VOMS: атрибут issuerUID в Ñертификате владельца не Ñовпадает Ñ Ñ‚Ð°ÐºÐ¾Ð²Ñ‹Ð¼ в Ñертификате атрибута (AC)VOMS: Ð¸Ð¼Ñ Ð²Ð»Ð°Ð´ÐµÐ»ÑŒÑ†Ð° в Ñертификате атрибута (AC) не имеет Ð¾Ñ‚Ð½Ð¾ÑˆÐµÐ½Ð¸Ñ Ðº отличительному имени в Ñертификате владельцаVOMS: Ñерийный номер владельца %lx не Ñовпадает Ñ Ñ‚Ð°ÐºÐ¾Ð²Ñ‹Ð¼ в Ñертификате атрибута (AC) %lx; Ñертификат, иÑпользуемый Ð´Ð»Ñ ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти VOMS, может быть доверенноÑтью Ñ Ñерийным номером, отличным от изначального ÑертификатаVOMS: Ñерийный номер владельца: %lxVOMS: Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾Ð± агентÑтве, выдавшем Ñертификат, в Ñертификате атрибута (AC)VOMS: Ð¸Ð¼Ñ Ð°Ð³ÐµÐ½Ñ‚Ñтва, выдавшего Ñертификат - %s - не Ñовпадает Ñ Ñ‚Ð°ÐºÐ¾Ð²Ñ‹Ð¼ в Ñертификате атрибута (AC) - %sVOMS: единÑтвенным поддерживаемым критичеÑким раÑширением атрибута Ñертификата (AC) ÑвлÑетÑÑ idceTargetsVOMS: Ñерийный номер в Ñертификате атрибута (AC): %lxVOMS: Ñлишком длинный Ñерийный номер AC INFO - ожидаетÑÑ Ð½Ðµ более 20-и октетовVOMS: отÑутÑтвуют Ð¾Ð³Ñ€Ð°Ð½Ð¸Ñ‡ÐµÐ½Ð¸Ñ Ð¿Ð¾ отличительным признакам доверÑемых VOMS, цепочка Ñертификатов в Ñертификате атрибута (AC) не будет проверена.VOMS: подтверждаетÑÑ Ñ†ÐµÐ¿Ð¾Ñ‡ÐºÐ° Ñертификатов: %s VOMS: невозможно определить название узла Ñертификата атрибута (AC) из Ð½Ð°Ð·Ð²Ð°Ð½Ð¸Ñ Ð²Ð¸Ñ€Ñ‚ÑƒÐ°Ð»ÑŒÐ½Ð¾Ð¹ организации: %sVOMS: невозможно извлечь название виртуальной организации из Ñертификата атрибута (AC)VOMS: невозможно найти цепочку Ñертификатов, ÑоответÑтвующую доверÑемым отличительным признакам VOMSVOMS: невозможно подтвердить цепочку ÑертификатовVOMS: неверный формат времени в Ñертификате атрибута (AC) - ожидаетÑÑ GENERALIZED TIMEОбнаружено дейÑтвительное опиÑание JobDescriptionСрок дейÑÑ‚Ð²Ð¸Ñ Ð¸Ñтекает через: %sДоверенноÑть дейÑтвительна на: Срок дейÑÑ‚Ð²Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñти вышелДоверенноÑть дейÑтвительна на: ДоверенноÑть недейÑтвительнаДейÑтвует по: %sЗначение %s.%s: "%s"Значение атрибута 'count' должно быть целочиÑленнымЗначение атрибута 'countpernode' должно быть целочиÑленнымЗначением атрибута 'exclusiveexecution' может быть либо 'yes', либо 'no'Значение атрибута '%s' не должно быть пуÑтымЗначение атрибута '%s' не ÑвлÑетÑÑ ÑтрокойЗначение атрибута '%s' неоднозначноЗначение атрибута '%s' Ñодержит поÑледовательноÑть недопуÑтимой длины: ожидаетÑÑ %d, получено %dЗначение атрибута '%s' не ÑвлÑетÑÑ ÑтрокойЗначение атрибута '%s' не ÑвлÑетÑÑ Ð¿Ð¾ÑледовательноÑÑ‚ÑŒÑŽÐ˜Ð¼Ñ Ð¿ÐµÑ€ÐµÐ¼ÐµÐ½Ð½Ð¾Ð¹ (%s) Ñодержит неверный Ñимвол (%s)ОжидаетÑÑ Ð¸Ð¼Ñ Ð¿ÐµÑ€ÐµÐ¼ÐµÐ½Ð½Ð¾Ð¹Ð’ÐµÑ€ÑÐ¸Ñ Ð² Ñлементе Listen не опознанаПРЕДУПРЕЖДЕÐИЕ: Заданное Вами Ð²Ñ€ÐµÐ¼Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ: %s предшеÑтвует текущему времени: %sПРЕДУПРЕЖДЕÐИЕ: Заданное Вами Ð²Ñ€ÐµÐ¼Ñ Ð½Ð°Ñ‡Ð°Ð»Ð°: %s предшеÑтвует текущему времени: %sОжидание: 1 минутаОжидание завершено.Ожидание буфераЖдём пока ÑÑылка globus уÑтаканитÑÑОжидание Ñ€Ð°Ð·Ð±Ð»Ð¾ÐºÐ¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° %sОжидание Ñ€Ð°Ð·Ð±Ð»Ð¾ÐºÐ¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° ÑпиÑка задач %sОжидание Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ð¾Ñновного потока обработки задачиОжидание откликаЗадачи в очереди: %iÐктивизациÑПредупреждение: Ðе удалоÑÑŒ вывеÑти ÑпиÑок файлов, но Ð½ÐµÐºÐ¾Ñ‚Ð¾Ñ€Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð±Ñ‹Ð»Ð° полученаПредупреждение: Сбой ÑƒÐ´Ð°Ð»ÐµÐ½Ð¸Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¸ о задачах из файла (%s)Предупреждение: Сбой запиÑи информации о задаче в файл (%s)Предупреждение: Сбой запиÑи ÑпиÑка локальных задач в файл (%s), ÑпиÑок задач уничтоженПредупреждение: Задача не обнаружена в ÑпиÑке задач: %sПредупреждение: некоторые задачи не были удалены Ñ ÑервераПредупреждение: Ðевозможно Ñоздать файл ÑпиÑка задач (%s), ÑпиÑок задач уничтоженПредупреждение: Ðевозможно открыть файл ÑпиÑка задач (%s), формат неизвеÑтенПредупреждение: Сбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð¾Ð³Ð¾ ÑпиÑка задач из файла (%s)Предупреждение: Сбой ÑÐ¾ÐºÑ€Ð°Ñ‰ÐµÐ½Ð¸Ñ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ð¾Ð³Ð¾ ÑпиÑка задач в файле (%s)Warning: ИÑпользуетÑÑ Ð²ÐµÑ€ÑÐ¸Ñ v1 протокола SRM, ÐºÐ¾Ñ‚Ð¾Ñ€Ð°Ñ Ð½Ðµ поддерживает токены меÑтаПредупреждение: не удалоÑÑŒ Ñоздать точку Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡ÐµÐ½Ð¸Ñ %s.ОжидалоÑÑŒ %s в начале "%s"Самоконтроль (пере)запуÑкает приложениеСамоконтроль обнаружил завершение приложениÑСамоконтроль обнаружил завершение Ð¿Ñ€Ð¸Ð»Ð¾Ð¶ÐµÐ½Ð¸Ñ Ð¿Ð¾ Ñигналу %uСамоконтроль обнаружил приложение, завершившееÑÑ Ñ ÐºÐ¾Ð´Ð¾Ð¼ %uСамоконтроль обнаружил превышение времени Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¿Ñ€Ð¸Ð»Ð¾Ð¶ÐµÐ½Ð¸Ñ Ð¸Ð»Ð¸ Ñбой - процеÑÑ Ð¿Ñ€ÐµÑ€Ñ‹Ð²Ð°ÐµÑ‚ÑÑСамоконтроль оÑтанавливаетÑÑ, потому что приложение было прервано намеренно, или завершилоÑьСамоконтроль не Ñмог оборвать приложение - отказ и завершениеСамоконтроль не дождалÑÑ Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ð¿Ñ€Ð¸Ð»Ð¾Ð¶ÐµÐ½Ð¸Ñ - поÑылаетÑÑ Ñигнал KILLÐе удалоÑÑŒ Ñоздать дочерний Ñторожевой процеÑÑ: %sСамоконтроль запуÑкает мониторингМы поддерживаем только CA в Globus signing policy - %s не поддерживаетÑÑМы поддерживаем только центры Ñертификации X509 в политике подпиÑи Globus - %s не поддерживаетÑÑМы поддерживаем только уÑÐ»Ð¾Ð²Ð¸Ñ globus в политике подпиÑи Globus - %s не поддерживаетÑÑМы поддерживаем только уÑÐ»Ð¾Ð²Ð¸Ñ Ñубъекта в политике подпиÑи Globus - %s не поддерживаетÑÑПри задании атрибута 'countpernode', атрибут 'count' также должен быть заданБудет выполнена Ð¾Ð¿ÐµÑ€Ð°Ñ†Ð¸Ñ %s в каталоге назначениÑБудет вычиÑлена ÐºÐ¾Ð½Ñ‚Ñ€Ð¾Ð»ÑŒÐ½Ð°Ñ Ñумма Ð´Ð»Ñ %sПредварительное назначение будет ÑброшеноБудет произведена загрузка в файл кÑша %sПо умолчанию привÑзки к учётной запиÑи 'root' не будетБудет обработан кÑшБудет отменены блокировки в кÑшеЗадача %s будет удалена Ñ ÑеривÑа %s.Будет произведена Ð¿Ð¾Ð²Ñ‚Ð¾Ñ€Ð½Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ° без кÑшированиÑБудет иÑпользован маÑÑовый запроÑОжидание 10 ÑекундОжидание порÑдка %i ÑекУничтожение и воÑÑоздание вÑего хранилищаСвободное рабочее проÑтранÑтво: %i GBРабочее проÑтранÑтво иÑпользуетÑÑ Ð¾Ð´Ð½Ð¾Ð¹ задачейРабочее проÑтранÑтво иÑпользуетÑÑ Ñ€Ð°Ð·Ð½Ñ‹Ð¼Ð¸ Ð·Ð°Ð´Ð°Ñ‡Ð°Ð¼Ð¸Ð’Ñ€ÐµÐ¼Ñ Ð¶Ð¸Ð·Ð½Ð¸ рабочего проÑтранÑтва: %sОбщий объём рабочего проÑтранÑтва: %i GBЗапиÑÑŒ информации в журнал программы разбора BLAH: %sÐедопуÑтимый размер буфераÐеприемлемое значение defaultbuffer в наÑтройкахÐеверный каталог в %sÐеверный формат "FreeSlotsWithDuration" = "%s" ("%s")Запрошен неверный Ñзык: %sÐеприемлемое значение maxbuffer в наÑтройкахÐеприемлемое значение maxconnections в наÑтройкахÐедопуÑтимый макÑимальный размер буфераÐедопуÑтимое чиÑло в команде defaultttlÐедопуÑтимое чиÑло в maxjobdescÐедопуÑтимое чиÑло в команде maxjobdescÐедопуÑтимое чиÑло в maxjobs: %sÐедопуÑтимое чиÑло в команде maxrerunÐедопуÑтимое значение в urdelivery_frequency: %sÐедопуÑтимое чиÑло в wakeupperiod: %sУказано неверное количеÑтво аргументовÐедопуÑтимое чиÑло аргументов!ÐедопуÑтимое количеÑтво подключенийÐеверное количеÑтво объектов (%i) Ð´Ð»Ñ Ð¾Ð¿ÐµÑ€Ð°Ñ†Ð¸Ð¸ stat от ftp: %sЗадано неверное количеÑтво параметровÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð² %sÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð´Ð»Ñ Ð±Ð°Ð·Ñ‹ данных delegationdbÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð² fixdirectoriesÐÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð¾Ð¿Ñ†Ð¸Ñ Ð´Ð»Ñ logreopenÐÐµÐ¿Ñ€Ð°Ð²Ð¸Ð»ÑŒÐ½Ð°Ñ Ð¿Ñ€Ð¸Ð½Ð°Ð´Ð»ÐµÐ¶Ð½Ð¾Ñть файла открытого ключа: %sÐÐµÐ¿Ñ€Ð°Ð²Ð¸Ð»ÑŒÐ½Ð°Ñ Ð¿Ñ€Ð¸Ð½Ð°Ð´Ð»ÐµÐ¶Ð½Ð¾Ñть файла личного ключа: %sÐÐµÐ¿Ñ€Ð°Ð²Ð¸Ð»ÑŒÐ½Ð°Ñ Ð¿Ñ€Ð¸Ð½Ð°Ð´Ð»ÐµÐ¶Ð½Ð¾Ñть файла доверенноÑти: %sÐеправильные права доÑтупа к файлу открытого ключа: %sÐеправильные права доÑтупа к файлу личного ключа: %sÐеправильные права доÑтупа к файлу доверенноÑти: %sÐедопуÑтимый номер портаÐеприемлемый номер порта в наÑтройкахОбнаружено недопуÑтимое поле запиÑи "%s" в "%s"Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð·Ð°Ð¿Ð¸Ñан в файлПодпиÑанный Ñертификат EEC запиÑан в файлПодпиÑÐ°Ð½Ð½Ð°Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾Ñть запиÑана в файлОбработчик токена X509 не наÑÑ‚Ñ€Ð¾ÐµÐ½Ð—Ð°Ð¿Ñ€Ð¾Ñ Ð°Ð²Ñ‚Ð¾Ñ€Ð¸Ð·Ð°Ñ†Ð¸Ð¸ GACL: %sОтклик допуÑка XACML: %sÐ·Ð°Ð¿Ñ€Ð¾Ñ XACML: %sФайл наÑтроек XML %s не ÑущеÑтвуетОтзыв XML: %sИз ÑпиÑка задач будут удалены задачи, о которых не обнаружена информациÑ. Ð’ÐИМÐÐИЕ: задачи, запущенные недавно, могли ещё не поÑвитьÑÑ Ð² информационной ÑиÑтеме, и Ñта Ð¾Ð¿ÐµÑ€Ð°Ñ†Ð¸Ñ ÑƒÐ´Ð°Ð»Ð¸Ñ‚ также Ñти задачи.Ð’Ñ‹ можете попытатьÑÑ ÑƒÐ²ÐµÐ»Ð¸Ñ‡Ð¸Ñ‚ÑŒ уровень детальноÑти Ð´Ð»Ñ Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ð´Ð¾Ð¿Ð¾Ð»Ð½Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð¾Ð¹ информации.Ваши личные данные: %sÐе уÑтановлен Ñертификат Вашего центра ÑертификацииВаша доверенноÑть дейÑтвительна до: %sÐ’ файл запиÑано ноль байтов[ADLParser] Ñлемент %s должен быть логичеÑким.[ADLParser] AccessControl не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым XML.[ADLParser] Benchmark пока что не поддерживаетÑÑ.[ADLParser] Код в FailIfExitCodeNotEqualTo в %s не ÑвлÑетÑÑ Ð´Ð¾Ð¿ÑƒÑтимым чиÑлом.[ADLParser] Значение CreationFlag %s не поддерживаетÑÑ.[ADLParser] CredentialService должен Ñодержать допуÑтимый URL.[ADLParser] ОтÑутÑтвует Ñлемент Name или значение Ñлемента ParallelEnvironment/Option.[ADLParser] Значение Name в InputFile отÑутÑтвует или неверно.[ADLParser] Значение Name в OutputFile отÑутÑтвует или неверно.[ADLParser] Значение DiskSpaceRequirement отÑутÑтвует или неверно.[ADLParser] Значение IndividualCPUTime отÑутÑтвует или неверно.[ADLParser] Значение IndividualPhysicalMemory отÑутÑтвует или неверно.[ADLParser] Значение IndividualVirtualMemory отÑутÑтвует или неверно.[ADLParser] Значение NumberOfSlots отÑутÑтвует или неверно.[ADLParser] Значение ProcessesPerSlot отÑутÑтвует или неверно.[ADLParser] Значение SlotsPerHost отÑутÑтвует или неверно.[ADLParser] Значение ThreadsPerProcess отÑутÑтвует или неверно.[ADLParser] Значение TotalCPUTime отÑутÑтвует или неверно.[ADLParser] Значение WallTime отÑутÑтвует или неверно.[ADLParser] NetworkInfo пока что не поддерживаетÑÑ.[ADLParser] Значение NodeAccess %s пока что не поддерживаетÑÑ.[ADLParser] Пока что поддерживаетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ email Prorocol Ð´Ð»Ñ Notification.[ADLParser] Ðтрибут optional Ð´Ð»Ñ Ñлементов %s пока не поддерживаетÑÑ.[ADLParser] Корневой Ñлемент не ÑвлÑетÑÑ ActivityDescription [ADLParser] Значение Ñлемента NumberOfSlots должно быть указано, еÑли значение атрибута useNumberOfSlots Ñлемента SlotsPerHost - "true".[ADLParser] Ðеподдерживаемое ÑоÑтоÑние EMI ES %s.[ADLParser] Ðеподдерживаемый URL %s в RemoteLogging.[ADLParser] Ðеподдерживаемое внутреннее ÑоÑтоÑние %s.[ADLParser] Указан неверный URI в Source - %s.[ADLParser] Указан неверный URI в Target - %s.[ADLParser] ExpirationTime Ñодержит недопуÑтимое Ð²Ñ€ÐµÐ¼Ñ %s.[ADLParser] Ñлишком выÑокий приоритет - иÑпользуетÑÑ Ð¼Ð°ÐºÑимальное значение 100[файл ...][задача ...][опиÑание задачи...][ввод опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸][реÑÑƒÑ€Ñ ...]файл, Ñодержащий Ñрлыки задачabort_callback: ошибка Globus: %sabort_callback: запуÑкaction(%s) != requestactive_data отключеноСбой add_wordПараметр authorizedvo пуÑтвÑе Ð´Ð»Ñ Ð¾Ð±ÑлуживаниÑвÑе задачиarc.confневерный каталог Ð´Ð»Ñ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ð¾Ð³Ð¾ модулÑ: %sпланировщикбуфер: ошибка: %s, чтение: %s, запиÑÑŒ: %sбуфер: чтение конца файла : %sбуфер: запиÑÑŒ конца файла: %sкÑш-файл: %sотмененоceÐŸÑ€ÐµÑ„Ð¸ÐºÑ ceID задан как %sпроверить читаемоÑть объекта, не показывать информацию об объектеcheck_abort: получена ошибка Globuscheck_abort: поÑылаетÑÑ 426check_ftp: Ñбой при определении времени Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð°check_ftp: не удалоÑÑŒ определить размер файлаcheck_ftp: Ñбой в globus_ftp_client_getcheck_ftp: Ñбой в globus_ftp_client_modification_timecheck_ftp: globus_ftp_client_register_readcheck_ftp: Ñбой в globus_ftp_client_sizecheck_ftp: получена дата изменениÑ: %scheck_ftp: получен размер: %llicheck_ftp: иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ modification_timecheck_ftp: иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ñ‡Ð°Ñтичной загрузкиcheck_ftp: иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ñ‹ sizeназвание клаÑÑа: %sнайден оригинал опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ в формате XRSLоригинал опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ в формате XRSL не найденСбой при закрытии: %sÑбой при закрытии файла %s: %sинÑÑ‚Ñ€ÑƒÐºÑ†Ð¸Ñ Ñерверу MyProxy. Возможны Ñледующие инÑтрукции: PUT, GET, INFO, NEWPASS или DESTROY. PUT -- Ñохранить делегированный Ñертификат на Ñервере MyProxy; GET -- получить делегированный Ñертификат Ñ Ñервера MyProxy, INFO -- вывеÑти информацию о Ñертификатах, хранÑщихÑÑ Ð½Ð° Ñервере MyProxy; NEWPASS -- изменить пароль, защищающий Ñертификаты, хранÑщиеÑÑ Ð½Ð° Ñервере MyProxy; DESTROY -- удалить Ñертификаты, хранÑщиеÑÑ Ð½Ð° Ñервере MyProxy; Личные Ñертификаты и ключи не требуютÑÑ, за иÑключением инÑтрукции PUT. ИнÑтрукции MyProxy и VOMS могут иÑпользоватьÑÑ Ð¾Ð´Ð½Ð¾Ð²Ñ€ÐµÐ¼ÐµÐ½Ð½Ð¾. Опции --voms and --vomses могут быть иÑпользованы Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ð¾Ð¹ Get, еÑли в доверенноÑть необходимо включить атрибуты VOMS. computingфайл наÑтроек (по умолчанию ~/.arc/client.conf)файл наÑтроек не найденпреобразовать из указанного иÑходного формата базы данных [bdb|sqlite]преобразовать в указанный выходной формат базы данных [bdb|sqlite]невозможно найти конец опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ в формате XRSLневозможно найти начало опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ в формате XRSLСбой d2i_X509_REQ_bioкуÑок данных: %llu %lludata_connect_retrieve_callbackdata_connect_retrieve_callback: allocate_data_bufferdata_connect_retrieve_callback: Ñбой в allocate_data_bufferdata_connect_retrieve_callback: проверка буфера %udata_connect_store_callbackdata_retrieve_callbackdata_retrieve_callback: буфер потерÑнdata_store_callback: буфер потерÑнформат базы данныхуровеньукажите запрашиваемый формат (nordugrid:xrsl, emies:adl)delete_ftp: Ñбой в globus_ftp_client_deletedelete_ftp: Ñбой в globus_ftp_client_rmdirdelete_ftp: иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ñ‹ deleteназначениеdestination.next_locationкаталогкаталогкаталогпоказать вÑе доÑтупные метаданныевывеÑти больше информации о каждом заданииDNне запрашивать подтверждениÑне Ñобирать информацию, а лишь конвертировать формат Ñ…Ñ€Ð°Ð½ÐµÐ½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð½Ðµ выводить ÑпиÑок задачне выводить количеÑтво задач в каждом ÑоÑтоÑниине перезаÑылать на тот же реÑурÑне выполнÑть заÑылку: раÑпечатка опиÑÐ°Ð½Ð¸Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ на Ñзыке, приемлемом назначениемзарегиÑтрировать файл, не Ð¿ÐµÑ€ÐµÐ´Ð°Ð²Ð°Ñ ÐµÐ³Ð¾ - назначением должен быть мета-URL.не пытатьÑÑ Ñ„Ð¾Ñ€Ñировать паÑÑивный ÑпоÑоб передачи данныхне запрашивать пароль учётных данных при получении Ñтих данных Ñ Ñервера MyProxy. Это возможно при уÑловии, еÑли данные были Ñохранены методом PUT на Ñервере MyProxy без паролÑ, иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ Ð¾Ð¿Ñ†Ð¸ÑŽ -R (--retrievable_by_cert) при выполнении операции PUT в отношении Ñервера Myproxy. Эта Ð¾Ð¿Ñ†Ð¸Ñ Ð¸ÑпользуетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ командой GET в отношении Ñервера Myproxy.передачакаталог загрузки (подкаталог задачи будет Ñоздан в Ñтом каталоге)загрузокигнорируетÑÑecho: ДоÑтуп закрытпуÑÑ‚Ð°Ñ Ð½Ð°Ð³Ñ€ÑƒÐ·ÐºÐ° на входеÑледующий Ñлемент в цепи пуÑтойдоÑтигнут конец Ñтроки при обработке типа Ñлемента имени Ñубъекта #%dошибка Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ‡Ð¸Ñла из bin в BIGNUMошибка Ð¿Ñ€ÐµÐ¾Ð±Ñ€Ð°Ð·Ð¾Ð²Ð°Ð½Ð¸Ñ Ñерийного номера в формат ASN.1Ñимвол выхода в конце ÑтрокивыходÑбой Ð¾Ð±Ð½Ð°Ñ€ÑƒÐ¶ÐµÐ½Ð¸Ñ Ð¿ÑƒÑ‚Ð¸ к модулÑмÐе удалоÑÑŒ обработать личные данные клиентаÑбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ ÐºÑƒÑка данныхÑбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¼ÐµÑ‚ÐºÐ¸ данныхÑбой при обработке команды наÑтройки: %s %sфайлфайл %s недоÑтупенÐазвание файлаÑлишком длинное Ð¸Ð¼Ñ Ñ„Ð°Ð¹Ð»Ð°Ñбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ ÑƒÐ·Ð»Ð° файла: %sфайлпуть к Ñ„Ð°Ð¹Ð»ÑƒÐ³Ð¾Ñ‚Ð¾Ð²Ð¾Ð¿Ñ€Ð¸Ð½ÑƒÐ´Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ð·Ð°Ð³Ñ€ÑƒÐ·ÐºÐ° (перезапиÑать ÑущеÑтвующий каталог задачи)Ð¿Ñ€Ð¸Ð½ÑƒÐ´Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ð¼Ð¸Ð³Ñ€Ð°Ñ†Ð¸Ñ, игнорируетÑÑ Ñбой прерываниÑПараметр forcedefaultvoms пуÑтошибка при выполнении ÑиÑтемного вызова forkформатчерез Ñледующие точки входа:Сбой операции fsync на файле %s: %sftp_check_callbackftp_complete_callback: ошибка: %sftp_complete_callback: уÑпехftp_get_complete_callback: Сбой Ð¿Ð¾Ð»ÑƒÑ‡ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð° ftpftp_get_complete_callback: уÑпехftp_put_complete_callback: уÑпехftp_read_callback: ошибка Globus: %sftp_read_callback: задержанный блок данных: %llu %lluftp_read_callback: Ñбой: %sftp_read_callback: уÑпехftp_read_callback: уÑпех - offset=%u, length=%u, eof=%u, allow oof=%uftp_read_callback: избыток неверных неупорÑдоченных блоковftp_read_callback: неверные неупорÑдоченные данные: %llu != %lluftp_read_thread: ошибка Globus: %sftp_read_thread: Ñбой обратного вызова данных - прерывание: %sftp_read_thread: выходftp_read_thread: Ñбой при региÑтрации буфера Globus - попробуем попозже: %sftp_read_thread: Ñбой региÑтрации буферовftp_read_thread: Ñбой ÑброÑа буферовftp_read_thread: Ñбой ÑброÑа буферов - утечкаftp_read_thread: Ñбой for_read - прерывание: %sftp_read_thread: получение и региÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ Ð±ÑƒÑ„ÐµÑ€Ð¾Ð²ftp_read_thread: Ñлишком много Ñбоев региÑтрации - отмена: %sftp_read_thread: ожидание разблокировки буферовftp_read_thread: ожидание конца файлаftp_write_callback: Ñбой: %sftp_write_callback: уÑпех %sftp_write_thread: ошибка Globus: %sftp_write_thread: Ñбой обратного вызова данных - прерываниеftp_write_thread: неупорÑдоченные данные в поточном режиме: %llu != %lluftp_write_thread: выходftp_write_thread: Ñбой ÑброÑа буферов - утечкаftp_write_thread: Ñбой for_write - прерываниеftp_write_thread: получение и региÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ Ð±ÑƒÑ„ÐµÑ€Ð¾Ð²ftp_write_thread: избыток неупорÑдоченных блоков в поточном режимеftp_write_thread: ожидание разблокировки буферовftp_read_thread: ожидание конца файлаftp_write_thread: ожидание Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ð¿ÐµÑ€ÐµÐ´Ð°Ñ‡Ð¸Ð¡Ð±Ð¾Ð¹ gfal_close: %sСбой gfal_closedir: %sСбой в gfal_listxattr, невозможно получить информацию о копиÑÑ…: %sСбой в gfal_mkdir (%s), вÑÑ‘ же попытаемÑÑ Ð·Ð°Ð¿Ð¸ÑатьСбой gfal_mkdir: %sСбой gfal_open: %sСбой gfal_opendir: %sСбой gfal_read: %sСбой gfal_rename: %sСбой gfal_rmdir: %sСбой gfal_stat: %sСбой gfal_unlink: %sСбой gfal_write: %sglobalid задан как %sglobus_ftp_client_operationattr_set_authorization: ошибка: %sgm-delegations-converter преобразовывает формат базы данных делегированиÑ.gm-jobs выводит информацию о текущих заданиÑÑ… в ÑиÑтеме.gm-kick принудительно запуÑкает цикл A-REX в ÑоответÑтвии Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¼ управлÑющим файлом. ЕÑли файл не указан, иÑпользуетÑÑ ÑƒÐ¿Ñ€Ð°Ð²Ð»Ñющий каталог из файла наÑтроек.Значение gmetric_bin_path пуÑто в arc.conf (никогда не должно ÑлучатьÑÑ, должно иÑпользоватьÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ðµ по умолчанию)group<:role>. Ð£ÐºÐ°Ð·Ð°Ð½Ð½Ð°Ñ Ð¿Ð¾ÑледовательноÑть атрибутов Пример: --order /knowarc.eu/coredev:Developer,/knowarc.eu/testers:Tester или: --order /knowarc.eu/coredev:Developer --order /knowarc.eu/testers:Tester Имейте в виду, что при иÑпользовании неÑкольких Ñерверов VOMS не имеет ÑмыÑла указывать поÑледовательноÑть атрибутовГоловной узел задан как %shostname[:port] Ñервера MyProxyчаÑчаÑачаÑовIDеÑли назначением задан индекÑирующий ÑервиÑ, отличный от иÑточника, и Ñто назначение уже зарегиÑтрировано, копирование обычно не допуÑкаетÑÑ. Ð’ Ñлучае же, когда указана Ñта опциÑ, иÑточник раÑÑматриваетÑÑ ÐºÐ°Ðº Ð½ÐµÐ¾Ñ„Ð¸Ñ†Ð¸Ð°Ð»ÑŒÐ½Ð°Ñ ÐºÐ¾Ð¿Ð¸Ñ Ð·Ð°Ñ€ÐµÐ³Ð¸Ñтрированного файла, и копирование производитÑÑ ÐºÐ°Ðº в Ñлучае тиражированиÑ. При иÑпользовании Ñтой опции пропуÑкаетÑÑ Ñверка завершённых передач.недопуÑтимый атрибут команды allowactvedata: %sнедопуÑтимый атрибут команды allowencryption: %sвходÑщее Ñообщение не в формате SOAPindexинформировать об изменениÑÑ… в заданной задаче (допуÑкаетÑÑ Ð¼Ð½Ð¾Ð³Ð¾ÐºÑ€Ð°Ñ‚Ð½Ð¾Ðµ иÑпользование)init_handle: Ñбой в globus_ftp_client_handlea_initinit_handle: Ñбой в globus_ftp_client_handleattr_initinit_handle: Ñбой в globus_ftp_client_handleattr_set_gridftp2init_handle: Ñбой в globus_ftp_client_operationattr_initinit_handle: Ñбой globus_ftp_client_operationattr_set_allow_ipv6init_handle: Ñбой globus_ftp_client_operationattr_set_delayed_pasvinmsg.Attributes().getAll() = %s inmsg.Auth().Export(arc.SecAttr.ARCAuth) = %sне задана Ð¾Ð¿ÐµÑ€Ð°Ñ†Ð¸Ñ Ð½Ð° вводеввод не в формате SOAPinputcheck проверÑет, доÑтупны ли входные файлы, указанные в опиÑании задачи, иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ Ð¿Ð°Ñ€Ð°Ð¼ÐµÑ‚Ñ€Ñ‹ доÑтупа в указанном файле доверенноÑти.вмеÑто ÑоÑтоÑÐ½Ð¸Ñ Ð±ÑƒÐ´ÑƒÑ‚ выведены только Ñрлыки указанных задаччиÑлоинтерфейÑÐ˜Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ð·Ð°Ð´Ð°Ð½ как %sinterfacenameÐедейÑтвительный Ñрлык задачи: %sзадача %s (будет) уÑпешно очищеназадача %s уÑпешно прерваназадача %s уÑпешно перезапущенаID заданиÑjob_description_file [proxy_file]файл, Ñодержащий опиÑание запуÑкаемой задачиÑтрока, ÑÐ¾Ð´ÐµÑ€Ð¶Ð°Ñ‰Ð°Ñ Ð¾Ð¿Ð¸Ñание запуÑкаемой задачиÑохранÑть файлы на Ñервере (не удалÑть)уровеньперечиÑление запиÑи: %sперечиÑление доÑтупных подключаемых модулейпоказать ÑпиÑок доÑтупных модулей (поддерживаемые протоколы)list_files_ftp: Ð¿Ñ€Ð¾Ð²ÐµÑ€Ð¾Ñ‡Ð½Ð°Ñ Ñумма %slist_files_ftp: не удалоÑÑŒ определить Ð²Ñ€ÐµÐ¼Ñ Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ Ñ„Ð°Ð¹Ð»Ð°list_files_ftp: не удалоÑÑŒ определить размер файлаlist_files_ftp: Ñбой globus_ftp_client_cksmlist_files_ftp: Ñбой globus_ftp_client_modification_timelist_files_ftp: Ñбой в globus_ftp_client_sizelist_files_ftp: поиÑк проверочной Ñуммы %slist_files_ftp: определение времени Ð¸Ð·Ð¼ÐµÐ½ÐµÐ½Ð¸Ñ %slist_files_ftp: поиÑк размера %slist_files_ftp: Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ контрольных Ñуммах недоÑтупнаlist_files_ftp: не получена Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ контрольных Ñуммахlist_files_ftp: Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ контрольных Ñуммах не поддерживаетÑÑlist_files_ftp: иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¿Ñ€Ð¾Ð²ÐµÑ€Ð¾Ñ‡Ð½Ð¾Ð¹ Ñуммыlist_files_ftp: иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ modification_timelist_files_ftp: иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ñ€Ð°Ð·Ð¼ÐµÑ€Ð°Ñбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ñерийного номера из %sСбой local_pasvСбой local_portСбой local_spaslocalid задан как %sраÑширенный формат (Ð´Ð¾Ð¿Ð¾Ð»Ð½Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ)пуÑтое значение lrmsПараметр mail пуÑÑ‚Ñоздавать родительÑкие директории по мере необходимоÑтиmake_abort: выходmake_abort: запуÑкmake_abort: ожидание ÑброÑа Ñемафора прерываниÑошибка mallocМета-файл %s пуÑтминутаминутыминутÑбой mkdir: %smkdir_ftp: ÑоздаётÑÑ %smkdir_ftp: иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ ÐºÐ¾Ð¼Ð°Ð½Ð´Ñ‹ mkdirназвание модулÑ: %snимÑnew_payload %sвызван Ñледующий Ñлемент в цепиÑледующий Ñлемент в цепи возвратил пуÑтую нагрузкуÑледующий Ñлемент цепи возвратил ÑÑ‚Ð°Ñ‚ÑƒÑ Ð¾ÑˆÐ¸Ð±ÐºÐ¸Ñледующий Ñлемент цепи возвратил ÑÑ‚Ð°Ñ‚ÑƒÑ Ð¾ÑˆÐ¸Ð±ÐºÐ¸: %sÑледующий Ñлемент в цепи возвратил пуÑтую нагрузкуÑледующий Ñлемент в цепи возвратил недопуÑтимую или неподдерживаемую нагрузкуÑледующий Ñлемент в цепочке возвратил пуÑтую нагрузкуÑледующий Ñлемент в цепи возвратил неопознанную нагрузку - пропуÑкаетÑÑчиÑлоколичеÑтво попыток передачи файлаold_url new_urlполучить информацию только о тех вычиÑлительных реÑурÑах, которые поддерживают указанный Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ð´Ð»Ñ Ð·Ð°Ñылки задач. ДопуÑтимые значениÑ: org.nordugrid.gridftpjob или org.nordugrid.gridftp, org.ogf.glue.emies.activitycreation и org.nordugrid.internalвыбрать лишь задачи, заÑланные на Ñтот реÑурÑвыполнить дейÑтвие лишь над задачами в указанном ÑоÑтоÑниииÑпользовать только указанный Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñ Ð´Ð»Ñ Ð·Ð°Ñылки. ДопуÑтимые значениÑ: org.nordugrid.gridftpjob или org.nordugrid.gridftp, org.ogf.glue.emies.activitycreation и org.nordugrid.internalоткрытие: Ñмена владельца Ð´Ð»Ñ %s, %i, %iоткрытие: владелец: %i %iобработать рекурÑивнорекурÑивное иÑполнение до указанного уровнÑпорÑдокoutpayload %sвывод не в формате SOAPзапиÑать указанные Ñлементы (ÑпиÑок задач, идентификаторы и токены делегированиÑ) в Ñ„Ð°Ð¹Ð»Ð˜Ð¼Ñ Ñубъекта владельца задано как %sФайл Ñертификата p12 пуÑтназначение паролÑ=иÑточник паролÑпутьпуть к локальному кÑшу (иÑпользуетÑÑ Ð´Ð»Ñ Ð·Ð°Ð¿Ð¸Ñи файла в кÑш)путь к файлу наÑтроек Ñерверов VOMSпуть к файлу Ñертификата, который может быть в формате PEM, DER, или PKCS12путь к закрытому ключу; еÑли Ñертификат указан в формате PKCS12, закрытый ключ не нуженпуть к файлу доверенноÑтипуть к корневому каталогу Ñ Ñ„Ð°Ð¹Ð»Ð°Ð¼Ð¸ VOMS *.lsc, иÑпользуетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ клиентом VOMSпуть к каталогу Ñ Ð´Ð¾Ð²ÐµÑ€Ñемыми Ñертификатами, иÑпользуетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ клиентом VOMSвыполнить Ñтороннюю переÑылку, когда назначение закачивает файл из иÑточника (доÑтупно только Ñ Ð¼Ð¾Ð´ÑƒÐ»ÐµÐ¼ GFAL)физичеÑкий Ð°Ð´Ñ€ÐµÑ Ð´Ð»Ñ Ð·Ð°Ð¿Ð¸Ñи, еÑли в качеÑтве Ð½Ð°Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ ÑƒÐºÐ°Ð·Ð°Ð½ каталог реÑурÑов. Должен быть указан Ð´Ð»Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð¾Ð², не генерирующих физичеÑкие адреÑа автоматичеÑки. ÐеÑколько значений может быть указано - адреÑа будут перебиратьÑÑ, пока не будет доÑтигнут уÑпех.pkey и rsa_key ÑущеÑтвуют!не уÑтановлен подключаемый модуль Ð´Ð»Ñ Ð¿Ñ€Ð¾Ñ‚Ð¾ÐºÐ¾Ð»Ð° транÑпортного ÑƒÑ€Ð¾Ð²Ð½Ñ %sподключаемый модуль: проверка каталога: %sподключаемый модуль: проверка каталога: доÑтуп: %sподключаемый модуль: проверка каталога: доÑтуп: открыт: %sподключаемый модуль: закрытиеподключаемый модуль: открытие: %sподключаемый модуль: чтениеподключаемый модуль: запиÑьвывеÑти ÑпиÑок Ñлужб, наÑтроенных в client.confвывеÑти вÑÑŽ информацию об Ñтой доверенноÑти.вывеÑти токен Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ð¾Ð³Ð¾ идентификаторавывеÑти ÑпиÑок доÑтупных идентификаторов делегированиÑвывеÑти оÑновной токен Ð´ÐµÐ»ÐµÐ³Ð¸Ñ€Ð¾Ð²Ð°Ð½Ð¸Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ð¾Ð³Ð¾ идентификаторавывеÑти избранную информацию об Ñтой доверенноÑти.вывеÑти ÑоÑтоÑние ÑервиÑавывеÑти Ñводку о задачах в каждой из транÑферных квотвывеÑти информацию о верÑиивывеÑти информацию об уÑтановленных Ñертификатах Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð¸ Ñертификационных агентÑтвÑлишком выÑокий приоритет - иÑпользуетÑÑ Ð¼Ð°ÐºÑимальное значение 100процеÑÑ: DELETEпроцеÑÑ: GETпроцеÑÑ: HEADпроцеÑÑ: POSTпроцеÑ: PUTобработка: дейÑтвие %s не поддерживаетÑÑ Ð´Ð»Ñ Ñ‡Ð°Ñти пути %sпроцеÑÑ: ÐºÐ¾Ð½ÐµÑ‡Ð½Ð°Ñ Ñ‚Ð¾Ñ‡ÐºÐ°: %sпроцеÑÑ: ÐºÐ¾Ð½ÐµÑ‡Ð½Ð°Ñ Ñ‚Ð¾Ñ‡ÐºÐ° фабрикипроцеÑÑ: идентификатор: %sпроцеÑÑ: метод %s не поддерживаетÑÑобработка: метод %s не поддерживаетÑÑ Ð´Ð»Ñ Ñ‡Ð°Ñти пути %sпроцеÑÑ: неопределённый методпроцеÑÑ: метод: %sпроцеÑÑ: операциÑ: %sпроцеÑÑ: запроÑ=%sпроцеÑÑ: отзыв=%sобработка: Ñхема %s не поддерживаетÑÑ Ð´Ð»Ñ Ñ‡Ð°Ñти пути %sпроцеÑÑ: подопциÑ: %sпроцеÑÑ: подкаталог: %sÐ¾Ð³Ñ€Ð°Ð½Ð¸Ñ‡ÐµÐ½Ð¸Ñ Ð´Ð¾Ð²ÐµÑ€ÐµÐ½Ð½Ð¾ÑтиприоÑÑ‚Ð°Ð½Ð¾Ð²Ð»ÐµÐ½Ð˜Ð¼Ñ Ð¾Ñ‡ÐµÑ€ÐµÐ´Ð¸ задано как %sчитать информацию из указанного контрольного каталогаread_thread: ошибка Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð´Ð°Ð½Ð½Ñ‹Ñ… из внешнего процеÑÑа - прерывание: %sread_thread: выходread_thread: Ñбой for_read - прерывание: %sread_thread: получение и региÑÑ‚Ñ€Ð°Ñ†Ð¸Ñ Ð±ÑƒÑ„ÐµÑ€Ð¾Ð²read_thread: неÑоответÑÑ‚Ð²ÑƒÑŽÑ‰Ð°Ñ Ð´Ð°Ð½Ð½Ñ‹Ð¼ метка '%c' из внешнего процеÑÑа - выход: %sрегиÑтрациÑучётный ÑпиÑокURL Ñлужбы учёта Ñ Ð½ÐµÐ¾Ð±Ñзательным указанием протоколаудалить логичеÑкое Ð¸Ð¼Ñ Ñ„Ð°Ð¹Ð»Ð°, даже еÑли не вÑе физичеÑкие копии удаленыудаление доверенноÑтиудалить задачу из локального ÑпиÑка, даже еÑли Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ ней отÑутÑтвуетзапроÑить обрыв задач Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¼Ð¸ ÑрлыкамизапроÑить обрыв задач, принадлежащих пользователÑм Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¼Ð¸ именами ÑубъектазапроÑить удаление задач Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¼Ð¸ ÑрлыкамизапроÑить удаление задач, принадлежащих пользователÑм Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¼Ð¸ именами Ñубъектапотребовать поиÑк информации иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¹ тип точки входа информации. Специальное значение 'NONE' предотвратит любой поиÑк информации и поÑледующую планировку ДопуÑтимые значениÑ: ldap.nordugrid, ldap.glue2, emies, arcrest и internal.потребовать указанный тип точки входа Ð´Ð»Ñ Ð·Ð°Ð¿ÑƒÑка задачи. ДопуÑтимые типы: arcrest, emies, gridftp или gridftpjob и internal.ответ: %sзаÑлать заново на тот же реÑурÑÑортировать задачи в обратном порÑдке по идентификатору, времени запуÑка или имениÑбой запиÑи Ñерийного номера в %sÑекундаÑекундыÑекундÑекунд(а/Ñ‹)выбрать ÑпоÑоб планировки (ÑпиÑок доÑтупных планировщиков выводитÑÑ Ð¾Ð¿Ñ†Ð¸ÐµÐ¹ --listplugins)указать один или более вычиÑлительных реÑурÑов: Ð¸Ð¼Ñ Ð¼Ð¾Ð¶ÐµÑ‚ быть Ñокращением Ð´Ð»Ñ Ð¾Ð´Ð½Ð¾Ð³Ð¾ реÑурÑа, группы реÑурÑов, или URLвыбрать один или неÑколько рееÑтров: Ð¸Ð¼Ñ Ð¼Ð¾Ð¶ÐµÑ‚ быть Ñокращением Ð´Ð»Ñ Ð¾Ð´Ð½Ð¾Ð³Ð¾ рееÑтра, группы рееÑтров, или URLвыбор вычиÑлительного реÑурÑа Ð´Ð»Ñ Ð½Ð¾Ð²Ñ‹Ñ… задач Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ URL или ÑокращениÑ, или выбор группы Ñлементов Ñ Ð¿Ð¾Ð¼Ð¾Ñ‰ÑŒÑŽ Ð½Ð°Ð·Ð²Ð°Ð½Ð¸Ñ Ð³Ñ€ÑƒÐ¿Ð¿Ñ‹Ñ„Ð°Ð¹Ð»Ñƒ %s приÑваиваетÑÑ Ñ€Ð°Ð·Ð¼ÐµÑ€ %lluвывеÑти адреÑа физичеÑких файловперечиÑлить задачи, Ð´Ð»Ñ ÐºÐ¾Ñ‚Ð¾Ñ€Ñ‹Ñ… отÑутÑтвует Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð¾ ÑоÑтоÑниипоказывать только опиÑание запрашиваемого объекта, не выводить Ñодержимое каталоговпоказать задачи, принадлежащие пользователÑм Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¼Ð¸ именами Ñубъектапоказать задачи Ñ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¼Ð¸ Ñрлыкамипоказать индикатор выполнениÑвывеÑти информацию о ÑоÑтоÑнии в формате JSONвывеÑти ошибки ÑиÑтемы при иÑполнении задачипоказать изначальное опиÑание задачипоказать заданный файл из рабочего каталога задачивывеÑти Ñтандартную ошибку задачивывеÑти Ñтандартный выход задачи (по умолчанию)ВыключениепропуÑтить задачи, находÑщиеÑÑ Ð½Ð° вычиÑлительном реÑурÑе Ñ Ð·Ð°Ð´Ð°Ð½Ð½Ñ‹Ð¼ URLпропуÑтить Ñлужбу Ñ Ñтим URL при обнаружении ÑлужбÑортировать задачи по идентификатору, времени запуÑка или именииÑточникиÑточник назначениеsource.next_locationуказать Ð¸Ð¼Ñ Ñервера вычиÑлительного реÑурÑа или полный URL точки входаstart_readingstart_reading: Ñбой запуÑка аÑÑиÑтентаstart_reading: Ñбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¿Ð¾Ñ‚Ð¾ÐºÐ°start_reading_ftpstart_reading_ftp: globus_ftp_client_getstart_reading_ftp: Ñбой в globus_ftp_client_getstart_reading_ftp: Ñбой в globus_thread_createstart_writing_ftp: куÑок данных: %llu %llustart_writing_ftp: задержанный блок данных: %llu %llustart_writing_ftp: Ñбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ ÐºÑƒÑка данныхstart_writing_ftp: Ñбой Ñ‡Ñ‚ÐµÐ½Ð¸Ñ Ð¼ÐµÑ‚ÐºÐ¸ данныхstart_writing_ftp: Ñбой в globus_thread_createstart_writing_ftp: Ñбой запуÑка аÑÑиÑтентаstart_writing_ftp: mkdirstart_writing_ftp: Ñбой mkdir - вÑÑ‘ же пытаемÑÑ Ð·Ð°Ð¿Ð¸Ñатьstart_writing_ftp: putstart_writing_ftp: Ñбой в putstart_writing_ftp: Ñбой ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ð¿Ð¾Ñ‚Ð¾ÐºÐ°start_writing_ftp: ожидание куÑка данныхstart_writing_ftp: ожидание метки данныхstart_writing_ftp: ожидание отправки буферовÑоÑтоÑниеÑоÑтоÑниеstop_reading: выход: %sstop_reading: ожидание Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ð¿ÐµÑ€ÐµÑылкиstop_reading_ftp: отменÑетÑÑ Ñоединениеstop_reading_ftp: выход: %sstop_reading_ftp: ожидание Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð¸Ñ Ð¿ÐµÑ€ÐµÑылкиÑтроказапуÑтить напрÑмую, без Ð¾Ð±Ð½Ð°Ñ€ÑƒÐ¶ÐµÐ½Ð¸Ñ Ð¸ проверки ÑоответÑÑ‚Ð²Ð¸Ñ Ñ€ÐµÑурÑовзапуÑк задач в режиме холоÑтой прогонки (без заÑылки на Ñчёт)апуÑтить теÑтовую задачу под ÑоответÑтвующим Ð½Ð¾Ð¼ÐµÑ€Ð¾Ð¼Ð²Ñ€ÐµÐ¼Ñ Ð¸ÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ñ‚ÐµÑтовой задачиÑрлыки запущенных задач будут занеÑены в Ñтот файлвычиÑлительный реÑурÑ, заданный URL в командной Ñтроке, должен быть опрошен иÑÐ¿Ð¾Ð»ÑŒÐ·ÑƒÑ ÑƒÐºÐ°Ð·Ð°Ð½Ð½Ñ‹Ð¹ информационный интерфейÑ. ДопуÑтимые значениÑ: org.nordugrid.ldapng, org.nordugrid.ldapglue2 и org.ogf.glue.emies.resourceinfoфайл Ñ Ð·Ð°Ð¿Ð¸Ñью информации о задачах на Ñчёте (по умолчанию %s)Ð¾Ð¿Ñ†Ð¸Ñ Ð½ÐµÐ´Ð¾Ñтупна (Ñтарые доверенноÑти GSI более не поддерживаютÑÑ)Ð²Ñ€ÐµÐ¼Ñ Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð² Ñекундах (по умолчанию 20)интерпретировать запрошенный объект как каталог, и вÑегда пытатьÑÑ Ð²Ñ‹Ð²ÐµÑти его ÑодержимоеÑжать ÑпиÑок задач перед Ñинхронизациейтипневозможно прочеÑть номер из: %sнеизвеÑтное Ð¸Ð¼Ñ Ð¿Ð¾Ð´ÐºÐ»ÑŽÑ‡Ð°ÐµÐ¼Ð¾Ð³Ð¾ модулÑне задан путь к виртуальному раÑширениюгруппа без именидерегиÑтрациÑотгрузкаотгрузокURLurl [url ...]urllist %s Ñодержит недопуÑтимый URL: %sиÑпользовать протокол GSI Ð´Ð»Ñ ÐºÐ¾Ð½Ñ‚Ð°ÐºÑ‚Ð° Ñлужб VOMSиÑпользовать протокол HTTP Ð´Ð»Ñ ÑвÑзи Ñо Ñлужбами VOMS, поддерживающими доÑтуп типа REST Внимание: Ð´Ð»Ñ Ð´Ð¾Ñтупа REST, команда 'list' и множеÑтвенный Ñервер VOMS не поддерживаютÑÑ Ð¸Ñпользовать базу данных параметров доÑтупа NSS из профилей Mozilla по умолчанию, Ð²ÐºÐ»ÑŽÑ‡Ð°Ñ Firefox, Seamonkey и Thunderbird. иÑпользовать уÑтаревший протокол ÑвÑзи Ñо Ñлужбами VOMS вмеÑто доÑтупа по протоколу REST иÑпользовать паÑÑивную передачу данных (по умолчанию, Ð¾Ð¿Ñ†Ð¸Ñ Ð¾Ñ‚ÐºÐ»ÑŽÑ‡ÐµÐ½Ð° при защищённой передаче, и включена при незащищённой)иÑпользовать защищённую передачу данных (передача не защищена по умолчанию)иÑпользовать указанный файл наÑтроекиÑпользовать Ð¸Ð¼Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸ вмеÑто краткого идентификатора в качеÑтве Ð½Ð°Ð·Ð²Ð°Ð½Ð¸Ñ ÐºÐ°Ñ‚Ð°Ð»Ð¾Ð³Ð°Ð¸Ð¼Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ñервера MyProxy (при отÑутÑтвии имени Ñубъекта, или при применении Ñертификата пользователÑ)VOMSvoms<:инÑтрукциÑ>. ОпиÑание Ñервера VOMS (неÑколько Ñерверов задаютÑÑ Ñледующим образом: --voms VOa:инÑтрукциÑ1 --voms VOb:инÑтрукциÑ2). <:инÑтрукциÑ> не обÑзательна и Ñлужит Ð´Ð»Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа дополнительных атрибутов (например, ролей) ИнÑтрукции: all --- добавить вÑе атрибуты, доÑтупные данному пользователю; list --- перечиÑлить вÑе атрибуты, доÑтупные данному пользователю, без ÑÐ¾Ð·Ð´Ð°Ð½Ð¸Ñ Ñ€Ð°ÑÑˆÐ¸Ñ€ÐµÐ½Ð¸Ñ AC; /Role=вашаРоль --- указать желаемую роль; еÑли данный пользователь может играть такую роль, она будет добавлена в AC; /voname/groupname/Role=вашаРоль --- указать ВО, группу и роль; еÑли данный пользователь может играть такую роль, она будет добавлена. ЕÑли Ñта Ð¾Ð¿Ñ†Ð¸Ñ Ð½Ðµ задана, будут иÑпользоватьÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð¸Ð· файлов наÑтроек. Ð”Ð»Ñ Ð¿Ñ€ÐµÐ´Ð¾Ñ‚Ð²Ñ€Ð°Ñ‰ÐµÐ½Ð¸Ñ Ð¸ÑÐ¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ð½Ð¸Ñ Ñ‡ÐµÐ³Ð¾-либо, укажите -S без значениÑ. ошибка Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ - прерывание процеÑÑа-потомкаожидание куÑка данныхwrite_thread: выходwrite_thread: конец файла for_writewrite_thread: Ñбой for_write - прерываниеwrite_thread: получение и передача буферовwrite_thread: Ñбой вывода - прерываниеЗапрошена Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ Ð±Ð»Ð¾ÐºÐ¸Ñ€Ð¾Ð²ÐºÐ° SSL: %i из %i: %i - %sСбой при закрытии xrootd: %sСбой при открытии xrootd: %sСбой при запиÑи xrootd: %sy~DataPoint: уничтожение ftp_handle~DataPoint: уничтожение ftp_handle не удалоÑÑŒ - Ð½Ð¾Ð²Ð°Ñ Ð¿Ð¾Ð¿Ñ‹Ñ‚ÐºÐ°~DataPoint: уничтожение ftp_handle не удалоÑÑŒ - утечкаnordugrid-arc-6.14.0/po/PaxHeaders.30264/sv.gmo0000644000000000000000000000013214152153477017120 xustar000000000000000030 mtime=1638455103.729627497 30 atime=1638455103.725627437 30 ctime=1638455103.897630021 nordugrid-arc-6.14.0/po/sv.gmo0000644000175000002070000161006014152153477017111 0ustar00mockbuildmock00000000000000Þ•B,‚µ<[[[,[C[\[t[‹[¢[¹[Ð[ç[þ[I\HJ\-“\1Á\<ó\0] I]W]u]“]¤]Â]à]ñ]ø]^)^A^Y^_^h^q^ˆ^¢^·^Í^è^__2_-R_ €_ Ž_ œ_©_ »_Å_Ë_Ý_ñ_ ``$'`L`&h`!`±`É`Ú` ô`aa-aCaTa'ca‹aa°a$Äaéa)øa"b!?babzb b1—bÉb"ßbcc"c 7cEcecyc•c ªcµcÒcîc d d"+dNdfd~d%“d¹d#Ód!÷de4eHece e Še –e ¡e­eÁeÐeçe úef,f!@fbfxf”f´fÄfªÙf„h¡h ²h ¿hÉhÝhüh7i*Qi|ii…i¡i©i>ºiùi j#j!8j Zj dj…j›j3¯j4ãjk+ké•;(–#d–ˆ–\¨–——"5—3X—Œ—@¤—Aå—'˜G˜f˜}˜"“˜"¶˜Ù˜ó˜ ™™8™"W™z™-‘™¿™*Ö™:š1<šnšš šc§š` ›`l›Í›ë›$œ,œDœ\œzœ”œ¯œÉœäœ"&D1_V‘(èž&(žOžiž>ƒžž+ÓžÿžCŸ4VŸI‹ŸAÕŸ6  N \ m ‹ £ ¼ Õ Sð .D¡ s¡¡š¡©¡ ½¡Ë¡Ú¡#ê¡¢#)¢#M¢0q¢/¢¢Ò¢î¢ £(£@£]£<u£;²£Xî£:G¤‚¤*“¤4¾¤ó¤¥$*¥)O¥:y¥7´¥Xì¥E¦9Z¦9”¦Φ4é¦1§5P§)†§S°§'¨),¨#V¨z¨–¨E´¨(ú¨#©;;©$w©Rœ©Tï©7Dª;|ª)¸ª/âª1«1D«'v«(ž«?Ç« ¬&(¬O¬-n¬/œ¬2̬!ÿ¬!­(=­f­*„­)¯­Ù­Fö­?=®,}®%ª®.Юÿ®2¯2M¯%€¯¦¯aį5&°Q\°&®°,Õ°%±%(±"N±#q±4•±5ʱ²²¹³!Ö³%ø³Q´p´Š´à©´е¨µ6ǵ?þµ;>¶Dz¶ ¿¶Eà¶$&·%K·#q·•·°·Ï·%ë·¸?*¸Hj¸>³¸5ò¸6(¹_¹v¹"޹±¹ȹ/å¹7º3Mº!º£º#ú7纻#7»2[»Ž»(¦»)Ï»/ù».)¼0X¼)‰¼8³¼:ì¼'½(:½Dc½>¨½ç½G¾GO¾—¾µ¾+ξ)ú¾+$¿-P¿6~¿7µ¿9í¿'À;ÀTÀ sÀ(”À@½À;þÀK:Á†Á/¤ÁÔÁ2óÁ5&Â1\Â(ŽÂ,·ÂäÂ#Ã&Ã,=Ã)jÃ'”üÃAÔÃ+Ä*BÄ0mÄ*žÄÉÄâÄøÄÅ-ÅEÅaÅrÅÅ2£ÅÖÅñÅÆ"Æ9Æ^QưÆ#ÏÆóÆ/Ç4AÇ.vÇ*¥Ç&ÐÇ%÷Ç+È&IÈ*pÈ@›È&ÜÈ%É!)É)KÉuÉN“É1âÉÊ/Ê&LÊsÊ'‘Ê:¹Ê,ôÊ,!ËLNË$›Ë$ÀË:åË Ì%=Ì$cÌˆÌ ¦ÌÇÌ/ÐÍÎP Î6qÎ&¨ÎÏÎ’éÎv|ÏzóÏqnÐ;àÐAÑh^Ñ#ÇÑ-ëÑÒ9ÒNÒjÒƒÒ/˜Ò<ÈÒ>ÓDÓUcÓ²¹Ó'lÔ‡”ÔKÕWhÕeÀÕ&Ö*;Ö fÖ2‡Ö$ºÖßÖ6þÖ'5×]×!{××,¹×æ×SØ!UØ!wØ$™Ø ¾ØUߨ 5ÙCÙ,YÙ †Ù”Ù ³ÙEÔÙ Ú;ÚUÚoÚŠÚ>¨Ú)çÚÛ*ÛAÛ`Û"zÛÛ"½Û=àÛ'ÜFÜfÜvÜ’Ü °Ü¾Ü/ÞÜ,Ý;Ý%XÝ$~Ý£Ý½Ý ØÝ äÝÞ"Þ"=Þ)`ÞŠÞ›Þ9¸ÞòÞß &ß2ßFDßS‹ßGßßi'à\‘àîàVáF[áO¢á òá ÿá. âOâ-jâ˜â#·â Ûâ!üâ ã!?ãaã{ãŠãšãªã&»ã&âã$ ä.ä Jäkä}ä‘ä¨ä6Ãäúä(å;å0Nåå—å¬åÂå<Úå-æ EæRæ bæoæ~æ”æ ¤æ±æÁæÑæáæñæçç ç0ç @ç MçZç lçyç Šç—ç ¨çµçÅçÕçäçõç èè"è2èBè Xè%dè%Šè'°è3ØèJ éWéjé6€é'·é9ßéê7êQê'eê"êA°ê:òê -ë/Në~ë-™ë1Çë-ùë)'ì"Qìtì,“ì"Àì$ãìí1í Qí$]í‚í˜í ¸íÙí!÷í9îSîcî&xî#Ÿî#Ãîçî]ï`ï:~ï2¹ï$ìï#ð@5ð/vð6¦ð%ÝðCñ.Gñ6vñ4­ñ?âñ "ò1Còuò!’ò.´ò+ãò-ó$=ó&bó1‰ó+»ó4çó'ô(Dô!môôªôÆô æô#õ +õ(7õ&`õ!‡õ©õÀõÝõôõ,ö8;ö+tö- ö.Îö-ýö-+÷.Y÷)ˆ÷²÷<Ï÷6 øCø^øxø8“ø.Ìø/ûø6+ùbùxùù«ù!Äùæù'ú),úCVúšú¶úKÐú û(û8ûNû0eû*–ûÁûÊûÛûùû ü(ü.Fü,uü¢ü@¿üIý3Jý+~ý$ªý1Ïýþ&þ5Eþ-{þ,©þÖþõþÿ 1ÿRÿpÿ!Žÿ°ÿÏÿìÿ2Ld~2.Â1ñ.#R*c&Ž%µ)Û@DFF‹'Òú"$GhcÌ âî 0"!Su‡§%Ç$í+8>8w"°!Óõ+ 08!iO‹AÛ7 U&b=‰'Ç%ï!-OmŠ?¥.å $ 6 S !p ;’ Î Wç H? Qˆ  Ú 'è ) ,: *g )’ F¼ * *. #Y } /› +Ë $÷  < W k } ‘ £ 9· "ñ %:ER˜!Ÿ!Á*ã2.A#p$”4¹Dî$3 X y"š!½ß\ÿ \h x`‚%ãC 3MJ:ÌMUcuVÙW0@ˆZÉ/$%T'z&¢É@ç*()SD} ÂicdÍe27˜&Ð÷# 0 ? LZ%s%™2¿ò‘!š_¼Ol-¯Ï*ï $;$`/…4µcêNgz‘§3Á%õ5QBl¯fÍ4-T‚%¡5Ç5ý!3 !U /w § & 7é ,!!"N!q!!8¯! è! ""-;"0i"<š"×"ö"#".#%Q#w#1“#.Å#ô#$!1$\S$!°$(Ò$(û$"$%0G%x%"‰% ¬%8Í%=&#D&$h&%&'³&3Û&2')B')l':–';Ñ'+ (*9(2d(3—(&Ë(ò(ú()!2)"T)-w)-¥)*Ó)þ)!*?>*~*"š*-½*Jë*-6+&d+.‹+9º+&ô+1,,M,6z,0±,,â,E-&U-&|-8£-=Ü-8."S.#v.š.°.Ê.è.%/"-/!P/#r/$–/!»/:Ý/0640Bk0®0(É0ò01.1/J1 z14›1%Ð1(ö1-2M2$j2(2¸2Ì2 é2) 3-43b32x3-«3+Ù3.4+44`4!~49 4(Ú45##5G5%c5/‰5'¹5$á5,6!36U6Fr6W¹687J7h7>„7"Ã7æ7!8.&82U8ˆ8+¥8!Ñ85ó8)9$I9,n9$›9HÀ9 :(:F:0f::—:>Ò::;(L;&u;"œ;'¿;-ç;<"3<V<%s<™<"®<0Ñ<0=<3=0p=7¡=&Ù==>>>Q> p>-‘>)¿>(é>$?"7?Z?#y?!? ¿?à?#ÿ?I#@Om@,½@4ê@5A7UAA £A)ÄA îA,B-ãƒd"„@‡„È„ׄ ð„þ„…(…C…$^…ƒ…2œ…Ï… á…6ï…%&†,L†!y†›†'º†0â†8‡L‡1i‡9›‡Õ‡í‡þ‡]ˆ.sˆ¢ˆ¶ˆ¾ˆЈ$åˆ ‰#'‰K‰g‰…‰£‰¹‰2ʉ ý‰ Š+ŠEŠ ZŠ{Š!‚Š!¤Š$ÆŠ!ëŠ ‹‹ .‹Ð;‹B Œ>OŒLŽŒ4ÛŒ0;A7}<µ!òIŽ5^Ž#”Ž0¸Ž'éŽ.F!d†Ÿ·Ç6á7bP³7Б%‘D‘"Y‘|‘ œ‘ ¨‘É‘é‘û‘&’<’T’#h’;Œ’È’Þ’ý’ “ “0-“ ^“)j“”“(£“̓ۓû“”$.”%S”<y”=¶”ô” •$•%@•f•~•—‘•W)–h–Yê–7D—2|—>¯—î—0 ˜:˜R˜$j˜˜(¬˜Õ˜ï˜% ™/™ ?™ M™nX™LÇ™(š0=šnš)Žš(¸šáš#›%›)B›/l›œ›!¼›Þ›ö›( œ*6œ%aœ:‡œœÓœ)ñœ;Xr5Š&Àç'÷Hž*hž!“žµžÔžèžKúž7FŸD~ŸßÞŸùŸ7  M (n 6— IΠ;¡1T¡8†¡¿¡Ç¡Eæ¡3,¢`¢z¢’¢!ª¢Ì¢ì¢' £1£89£Ar£7´£@ì£+-¤*Y¤'„¤&¬¤+Ó¤*ÿ¤%*¥2P¥1ƒ¥µ¥-Ì¥ú¥¦:¦+Z¦E†¦.̦Hû¦D§W§ r§~§!“§µ§Ó§ñ§¨-$¨R¨r¨"’¨µ¨"Ψ+ñ¨©4©E©<Z©2—© Ê©שAî©'0ª+Xª3„ªm¸ª2&« Y«)g«'‘«*¹«+ä«&¬)7¬*a¬Œ¬Ÿ¬³¬!Ϭ ñ¬>ý¬=<­z­#”­ ¸­Æ­Ö­å­ü­%®/?®=o® ­® ·®Ä®$⮯¯*¯@¯Z¯-w¯¥¯À¯Û¯ù¯°)°F°#]°°,™°)ưð° ±±2±D±&_±!†±#¨±#̱%ð±&²=²AR²@”²Õ²ô²³0³O³f³³#˜³¼³%ͳó³´´ &´3´C´U´Ij´B´´l÷´Ndµf³µi¶B„¶<Ƕ=·DB·S‡·hÛ·UD¸=š¸:ظP¹Pd¹Eµ¹8û¹E4ºRzºAͺL»x\»rÕ»kH¼r´¼v'½pž½n¾{~¾rú¾rm¿[à¿[<Àh˜ÀqÁxsÁxìÁeÂv‰˜¶ÂËÂáÂúÂÃ'Ã?ÃYÃpÃ…ÃB—ÃÚÃòÃ%Ä47Ä,lÄ0™Ä`ÊÄ+Å<Å+NÅ]zÅØÅøÅÆ7ÆSÆ!dÆ"†Æ%©Æ$ÏÆ5ôÆ%*Ç3PÇ?„Ç'ÄÇ(ìÇÈ-È LÈmÈȪÈÉÈ$ãÈ(É=1É€oÉ ðÉIÊ'[Ê!ƒÊ ¥ÊxÆÊ‘?Ë¡ÑËŽsÌ*Í-Í:KÍ&†Í­Í"ÌÍ ïÍÎ;%ÎaÎ4Î9¶ÎðÎ#Ï4Ï,HÏuÏ…Ï”Ï0²Ï1ãÏÐ9)Ð.cÐ%’иÐÉÐ ÞÐ5ÿÐ!5ÑWÑlÑÑ:ˆÑOÃÑGÒ[ÒpÒMÒÍÒ;éÒ%ÓE=Ó5ƒÓ1¹ÓëÓ+Ô!4ÔVÔ'sÔ›Ô)¬Ô&ÖÔýÔÕ."Õ&QÕxÕ<•Õ"ÒÕõÕ#Ö 4Ö9UÖ!Ö&±Ö)ØÖ:×=×$U×Dz׿×/Þר%%ØKØTØ9tØ®ØÊØ&ÝØ"Ù?'Ù$gÙLŒÙ%ÙÙÿÙÚ Ú-.Ú*\Ú‡Ú¤ÚÄÚàÚüÚ8Û<IÛ&†Û!­Û8ÏÛÜ"Ü+;ÜgÜ*{ܦÜ8ÄÜ&ýÜ#$ÝHÝ-eÝ&“Ý8ºÝ&óÝ'Þ"BÞ<eÞ;¢Þ–ÞÞuß5ŽßÄßÓß'åß& à?4àtàà©à/Áàñàá$á=á+[á/‡á$·á—Üá.tâ£â)¾âèâúâ-ãAã#\ã€ãã©ã»ã%Ìã#òãä4äNä(_äˆä¢ä;·ä#óä#å*;åfå&‚å2©åÜå6õå,æ';æAcæR¥æ&øæ ç @çaç€ç'–ç¾ç5Øç3èBèXèŒiè öèé é>éfOé0¶é0çé ê "ê 0ê<êKêbêyê ’ê³ê1Ãêõê"ë'1ëYë,jë—ë´ë#Ìë-ðë/ìONìPžì?ïì-/í]íqíŠí ¡í Âí,ãíî8'î=`î1žî<ÐîL ï Zï0eï–ï¨ï)Çï/ñï !ð)/ðYð(hð.‘ðÀðÝðôð ñ0)ñ Zñ dñpñ €ññ¬ñÆñâñûñ ò*òHòaò$sò#˜ò-¼òêòýò!ó??ó(ó(¨ó1Ñó+ô./ô ^ô,lô™ô"­ô+Ðô+üô%(õNõ`õDxõ½õ.Üõ ö%&öLö aö kö=xö,¶ö'ãö. ÷:÷N÷b÷z÷C”÷(Ø÷øø-øKøjøŠø"¡ø&Äø1ëø1ùOù)cùùù°ùÀù-Óùúú(+úTú,púú¼ú#Öú0úú'+û?Sû3“ûÇû+ßû! üh-ü–üw²ü*ý Iý=Wý1•ý6Çý9þý=8þ4vþ4«þàþûþ ÿ"ÿ1ÿEÿXÿjÿyÿÿ6¯ÿæÿ8>P o8y$²×%ö$A[w•§¹(Õþ,F b"ƒ¦¹Ðçü-@OY©"Ä,ç=>R‘ ° ÍÛ"ô$<Up ¬/º@ê+W@˜+¨ Ô<á0.Kz)šÄ7ä7(Mv‰š ¹Úó '  < ;H &„ « 4à %ø 1 P %b  ˆ © -à 4ñ *& Q 5p K¦ /ò 0" S "h ‹ © $¸ >Ý ; MX *¦ NÑ K ]l:Ê646k*¢Íà-ò%  FRgfBÎ.!@"b&…¬Ëâ1,3!`7‚º6Ø4,DBq&´+Û-<5%r!˜ºÏåú % 2(?fheÏF5|"’µ:È<,@%m“ªB½4GPczŠ›ªº ÌÙíþ$3Xp$*¦Ñà#÷,$>cz”"ª)Í(÷O QpQÂ--[uŽªnÉ8Ni —'¢Ê(ê,&#S'w.Ÿ.Î6ý:46o:¦5á$ -< )j  ” µ Ç ß ø !)!"C!f!}!—!#¦!Ê!Eä!F*"#q"%•"»"Ð"ð" #<#C\# #!¹#Û#Bû#>$$R$)w$¡$$·$Ü$*ï$%%@%U%g%#w%"›%¾% Ö%0ä%,&B&]Y&·&*Ö&!'P#'&t''›'Ã'%à'-(4((J(6s(@ª(%ë(2)D)W)t)A)%Ï)õ) *)"*!L*n*&‹* ²*#½*+á* +/+ N+Z+ k+v+ Ž+,›+ È+,Õ+, ,5,Q,o,‹,«,Å,…Ý,/c-+“-#¿-ã- ü-I.;g.4£.2Ø. /4,/2a/#”/ ¸/'Ù/0030S0c0s0Š0!£0.Å0ô01%1@1 U1a1u1"‡1&ª1&Ñ1ø1*2Q=2'2&·2Þ2î2432;39n34¨33Ý3#4'54#]4.4!°4/Ò41545-O55}5³5)Ï5ù5696 U6%v63œ6,Ð6Qý6'O79w79±7*ë768M8E a8§DÅDÞDòDuEý{EPyFQÊFiG4†G/»G.ëGH4H+LHxH0’HLÃHMIN^I­I¼I9ÔI%J4JGRJHšJZãJB>KKRœKMïK:=L:xL!³L5ÕL9 M8EM;~M%ºM'àM'N0N8JN^ƒN_âNSBOi–O?PJ@PA‹PXÍP1&Q_XQO¸QqR=zR9¸RpòRScSr·S>*TDiT®T"ÌT[ïT,KU4xU/­U#ÝU9V:;VLvV*ÃVAîV20W7cW6›W7ÒW1 XWaJzaPÅa7b0Nb2b*²b4Ýb2c,Ecrc1Œc(¾c\çc1DdCvdºdMÏd<eZeue5e9Æe f%!g Gg'hg'g!¸gZÚg(5h+^h(Šh9³hQíh'?i!gi%‰i(¯iØi íi%j4j"Lj(oj$˜j'½j&åj& k-3k8akšk$´k#Ùk'ýk/%l0UlA†lIÈlm!m8m UmvmŽmžm²mÒmämôm,n5n6Lnƒn–n ©n4¶nën o.oNojo#ˆoF¬o@óo'4p*\pE‡p)Íp÷pq1q>:qPyqTÊqGr4grœr7 rDØrs2s-Gsus•ss[¹sGt5]t“tE¥tët4 u0?u8pu1©uÛu+ûu'v8Ev7~v"¶v-Ùv1w$9w&^w4…w+ºwæwUx%[xx2 xQÓx2%y-Xy#†yªyÇy&ây z*zTCz1˜zÊzEZ{K {Dì{11|)c|<|IÊ|}Q%}Lw}?Ä}-~b2~•~®~-Â~Að~O2E‚;È€" €$C€7h€? €Gà€E(6nG¥6í:$‚$_‚!„‚¦‚=Å‚7ƒ;ƒ Wƒ$dƒ ‰ƒªƒ'¾ƒæƒûƒ=„#W„.{„'ª„6Ò„: …%D…'j…D’… ×…"ø…)†NE† ”†%µ†)Û†-‡(3‡\‡ x‡†‡"Ÿ‡‡ׇ쇈ˆ,"ˆOˆoˆ!‹ˆ­ˆ'¾ˆæˆüˆ‰%.‰T‰)t‰Sž‰%ò‰%Š>Š3]Š+‘нŠ(ØŠ~‹"€‹H£‹ì‹(Œ/.Œ$^Œ#ƒŒ §Œ!ÈŒ,êŒ<T[0z«Ëæ-õ@#Ž"dŽw‡Ž+ÿŽ+2@"s)–#À$ä  (/I*y+¤Ð!á$‘((‘Q‘"l‘!‘ ±‘¾‘Õ‘ô‘’ #’1’L’b’s’’¨’Á’$Þ’ ““#“<“R“o“~“““®“ÓÞ“ ó“”+”F”d”‚”<š”×” õ”•=,•&j•‘•§•?¼•ü•&–.=–,l–9™–-Ó–—4—M—j——:˜—%Ó—$ù—˜[:˜,–˜)Ø8í˜4&™[™q™4Œ™(Á™#Ꙛ.%šJTšWŸš!÷š&›)@›Ij›B´›$÷›+œ:Hœ8ƒœZ¼œâ'ú+"ž;NžDŠž&ÏžQöždHŸ­Ÿ9ÍŸ' @/ (p @™ Ú  õ ¡¡7¡G¡-^¡4Œ¡DÁ¡0¢/7¢3g¢H›¢'ä¢' £24£g£-~£A¬£Cî£2¤ C¤Q¤#d¤ˆ¤$¤¤.ɤø¤ ¥ ¥>(¥,g¥5”¥RÊ¥&¦/D¦Dt¦:¹¦9ô¦;.§Bj§(­§)Ö§!¨""¨3E¨1y¨@«¨Jì¨;7©>s©²©Ë©Bè©G+ªPsªRĪR«$j««(ª«Ó«)ò«¬/¬H¬f¬¬ —¬¥¬$º¬߬%ý¬!#­E­`­+­«­+½­é­9ÿ­9®'V®,~®«®"Å®è®"¯&¯ B¯(c¯ Œ¯­¯ͯè¯2°$7°\°o°Œ°«°'Űí°! ±)/±!Y±#{±Ÿ±"±±1Ô±²( ²*I²$t²™² ¹²Ú²!ì²³Ô³:ô³/´*A´l´Š´'¥´*Í´+ø´G$µ3lµ5 µPÖµ/'¶0W¶;ˆ¶8Ķ?ý¶>=·4|·7±·3é·8¸3V¸/Џ-º¸5è¸B¹;a¹4¹ŠÒ¹(]º1†º*¸º/ãº/»,C»7p»¨» ·»Á»×»ï»"þ» !¼B¼X¼n¼†¼—¼·¼ɼÒ¼Û¼ø¼*ÿ¼*½@½V½ e½o½r½G‹½Ó½ò½1 ¾$=¾'b¾5о*À¾(ë¾)¿>¿0]¿*Ž¿#¹¿Ý¿ì¿ý¿À#À/>À nÃ/xèÃ9ÅÃ:ÿà :Ä"[Ä~ĖĬÄ4ËÄ;Å3<ÅpÅŒÅ#£Å ÇÅèÅ øÅ7Æ+;Æ*gÆ&’Æ ¹ÆÅÆ߯ ãÆíÆõÆ$Ç9Ç<Ç<XÇ•Ç)¯Ç$ÙÇKþÇVJÈ$¡ÈªÆÈqÊHzÊ ÃÊÍÊÕÊèÊüÊKË*aË'ŒË!´ËÖËÛË'ûË#Ì=Ì4UÌŠÌÌ ©Ì³ÌÆÌäÌíÌöÌ1ÿÌ$1Í#VÍ z͆ÍÍ«ÍÇÍ ÚÍûÍ1Î"LÎ"oÎ#’Î0¶ÎçÎÏG!Ï:iÏ=¤Ï!âÏ4Ð9ÐFRÐ+™Ð*ÅÐ4ðÐ/%Ñ)UÑ;Ñ-»Ñ éÑ Ò*Ò"IÒ1lÒ@žÒßÒ5ùÒ-/Ó*]Ó=ˆÓ.ÆÓ!õÓ/ÔGÔ]ÔAvÔ.¸ÔçÔýÔÕ*Õ?ÕVÕlÕÕ˜Õ®Õ<ÄÕ?Ö;AÖ–}ÖY×Rn×ÁØ!ר ùØÙ†Ù1ŽÚ2ÀÚóÚÛCÛ1ZÛ5ŒÛ=ÂÛ8ÜB9ÜD|Ü!ÁÜ-ãÜÝ1ÝCÝGÓÝÞ Þ)Þ @ÞNÞ%`Þ†Þ¤ÞÂÞ!ÉÞ6ëÞ8"ß+[߇ßßß0¸ßéß6à)<à-fà:”à-Ïà*ýà3(á&\á0ƒá0´á1åá)â5Aâ(wâ â¼âÎâàâòâã &ã4ã!Lãnã‚ã+”ã ÀãÍãããòãä$ä=äMäOäTäcä0}ä/®ä3Þä2å>Eå-„åD²å÷å.þå-æÝ=æ5ç*Qç®|ç#+èOèbè)vè è ¦è´èHÇèé+é$=ébé0gé*˜éLÃéfêwê\ŽêXëêkDëý°ë®ì1Æìøì í%*í Pí^í oí |í6Ší'Áí)éí&î2:î,mîšî,µîâî5üî+2ï^ï nï {ï ‰ï —ï2¤ï×ïíïð#ð2;ðnðð¡ð¸ðÌð2áðñ'ñ<ñ NñZñ1rñA¤ñæñ+ûñ%'òBMòò™ò<¢òUßò 5óVBó-™óJÇó,ôI?ô÷‰ô€õ ööE-ösööœöE¤ödêöiO÷Œ¹÷Føcø1øM±ø8ÿø#8ù\ù&tù"›ù!¾ù4àùú$0úUú:^ú<™ú7Öúûû(û==û {û"‰û#¬ûÐû(âû/ ü.;ü(jü0“ü,Äü*ñü.ý&Kýrý7‹ýÃýÚý'øý) þ'Jþ0rþ£þ ªþ´þ,Îþ%ûþ!ÿ0?ÿpÿ6wÿ6®ÿ#åÿ( ;2Õn;DI€ÊBê)-W\{‘ ¯ ½ÈÏ× Û#é; ¼IvV}_Ô)4 ^AKÁ ƒ– ² É ß )û "% #H +l ˜ ° Ç ß á 0 21 {d à ä û  + D \ s Š ¢ » Ó VÕ U,9‚@¼IýG ao«¼Úø (AZrxŠ ¹Ð â#=#X3| ° ¼ ÉÖ êôú' 7E&\ƒ*¡Ìêÿ,@Sj€‘1¥×í 4&Ah!…§À Õ;ß'5] v€›«ÄÝù 6Rm„"°ÈÝ$ó5!Np‰£½Ø è ô ÿ  *B"Wz#£ÇÛ$ø-®Bñ ,7Li<‚(¿èëñ >$cs(¢ËÛö >$>c¢¶Æã-ü4* _m$t™³·¾ Å æð$(EF7Œ9Ä9þ(8 #a C… 0É ú !D/!1t!&¦!,Í!*ú!E%"0k"kœ",# 5#V#Eu#(»#4ä#)$+C$xo$1è$0%1K%2}%:°%'ë%.&2B&3u&@©&4ê&.'9N'4ˆ'X½'4(XK(B¤(Bç(3*)K^)Oª)Cú)5>*6t*<«*2è*9+1U+5‡+;½+0ù+.*,BY,;œ,4Ø, -.-">-7a-E™-uß-6U.EŒ.Ò.é.Dý.6B/.y/8¨/%á/(0A00/r0C¢0Zæ0A1*`1m‹1=ù1$72O\27¬2!ä2!3/(3DX323:Ð34 44@4'u4*4+È4ô4,5+A52m5- 50Î5Fÿ5F6a6(}6-¦6Ô6 í6,70;7/l7œ7¶7Ð71ç7>80X87‰8Á8Û8Dò8L79*„9¯98Ï9):$2:W:Ai:?«:,ë:;-;J;-g;•;!³;Õ;2è;)< E<*f<!‘<³<Æ<å<= =;1=1m=1Ÿ=<Ñ=8>.G>v>9“>.Í>ü>?D ?We?=½?(û?#$@H@N@/U@…@Š@6@;Æ@GABJA)A*·AâAçAíAòA0ùB@*C kCŒCC±CÐC!êC= D2JD }D‡D˜D-­D&ÛD+E$.EBSE–EV¨E<ÿE-k,dk‘k4¬k1ák l4laPl5²lRèl#;m)_m%‰m"¯mÒm òm3n4Gn|n¨™nBp ap%‚pQ¨púp"qá:qrzƒz/œzHÌzE{![{K}{KÉ{ |6|/R|.‚|/±|5á|<}IT}Dž}ã}û}'~)=~1g~L™~Eæ~^, ‹:¬ç3€4;€2p€*£€4΀&#J5d&š/ÁñH ‚/T‚)„‚0®‚4ß‚!ƒ6ƒSƒ*nƒ%™ƒ&¿ƒæƒ„„/4„d„€„‘„ £„Ä„_Ý„"=…0`…!‘…4³…:è…4#†0X†4‰†+¾†0ê†,‡5H‡G~‡'Ƈ&î‡"ˆ)8ˆ$bˆT‡ˆ6܈‰1‰%O‰u‰)”‰:¾‰.ù‰0(ŠTYŠ&®Š$ÕŠ<úŠ&7‹)^‹(ˆ‹!±‹$Ó‹þø‹3÷Œ+RG7š(ÒûŽu¨Ž‰v¨?B_u¢)‘*B‘'m‘•‘ ¬‘Í‘ì‘5’K;’L‡’!Ô’`ö’¡W“+ù“Œ%”W²”V •ha•Ê•,á•"–91–&k– ’–=³–0ñ–!"—,D—q—5—"Å—[è—4D˜$y˜*ž˜%ɘXï˜ H™T™4j™Ÿ™5±™:ç™O"šršŒš¥š¾šØš<ôš,1›^›u›Ž›+­›Ù›"÷›œ<9œ;vœ!²œÔœãœ"ýœ -2M0€±!Ñ óž3žRž&cžŠž.¢ž.Ñž5Ÿ6ŸJŸ5cŸ™Ÿ)±ŸÛŸ!êŸ]  Wj [ ¡q ¡¢m*¢T˜¢fí¢T£1d£?–£Ö£1ö£&(¤6O¤(†¤*¯¤)Ú¤*¥/¥ N¥\¥u¥†¥*˜¥$Ã¥+è¥!¦6¦V¦k¦„¦›¦7³¦ë¦+§.§4D§y§—§ª§¾§D×§,¨ I¨W¨ h¨v¨†¨¨ ®¨¼¨ͨި﨩©!©2©C© T© b©p© ƒ©‘© £©±© éÑ©â©ó©ªª &ª4ªEªVªgª wª"„ª*§ª+Òª;þªM:«ˆ«œ«.³«$â«@¬'H¬"p¬“¬&¤¬ ˬ=ì¬9*­d­+­­­1Ë­,ý­@*®=k®+©®,Õ®D¯2G¯&z¯¡¯4»¯ ð¯'þ¯&°&D°'k°$“°(¸°=á°±:±"O±2r±+¥± ѱ]ò± P²Cq²8µ²#î²"³G5³2}³5°³$æ³H ´6T´=‹´<É´Iµ#PµEtµºµ'ص:¶/;¶-k¶+™¶*Ŷ3ð¶/$·;T·*·*»·$æ·& ¸2¸$R¸"w¸-š¸ȸ,Ѹ'þ¸!&¹H¹^¹|¹’¹;ª¹Hæ¹6/º7fº=žº>ܺ=»>Y»3˜»Ì»Lè»=5¼s¼Œ¼¥¼6½¼?ô¼/4½?d½¤½¶½ɽ!ã½¾$¾%A¾1g¾G™¾'á¾ ¿G*¿r¿„¿š¿´¿4Ê¿@ÿ¿ @ÀJÀZÀuÀ À#®À8ÒÀ. Á:ÁFUÁTœÁ5ñÁ1'Â&YÂC€ÂÄÂ&ÜÂ>Ã8BÃ1{Ã&­Ã#ÔÃ%øÃÄ:ÄVÄsđĮÄÈÄÛÄ÷ÄÅ/ÅEÅaÅ6rÅ<©Å?æÅ5&Æ\Æ8tÆ4­Æ2âÆ&ÇG<ÇK„ÇMÐÇ)ÈHÈ\È$qÈ–Èq³È%É ;É$GÉlÉ/‡É ·ÉØÉ%ëÉ!Ê,3Ê(`Ê8‰ÊAÂÊAË)FËpËË4§Ë9ÜËÌ^5ÌP”Ì@åÌ&Í,5Í9bÍ+œÍ*ÈÍóÍ Î$,ÎQÎ"oÎC’Î8ÖÎÏÏ1Ï#MÏ#qÏ:•ÏÐÏkíÏRYÐb¬Ð Ñ@ÑC\ÑH Ñ8éÑ<"Ò^_Ò8¾Ò@÷Ò&8Ó$_Ó<„Ó+ÁÓíÓ Ô(Ô>ÔUÔjÔ~Ô”ÔA«Ô#íÔ*Õ<Õ=XÕ–Õ+Õ"ÉÕ.ìÕ<Ö2XÖ$‹Ö °Ö6ÑÖN×%W×!}×!Ÿ×#Á×"å× Ø])؇ؙدØb·Ø,ÙIGÙ>‘ÙSÐÙ5$ÚOZÚ$ªÚeÏÚY5Û_ÛKïÛ`;Ü;œÜ&ØÜ(ÿÜ'(ÝPÝ@oÝ*°Ý&ÛÝBÞ—EÞgÝÞbEßc¨ß6 à'Càkà à à ¯à »à!Éà4ëà3 áGTáœá޳á Bâ`câcÄâ(ã6Bã"yã#œã2Àã.óã)"ä$LäBqä;´änðä_å~åå¦å»åCÔå(æBAæ„æ>œæ Ûævüæ&sç9šç"Ôç/÷ç8'è<`èè'»è3ãèé/7éBgé2ªé/Ýé" ê"0êBSê&–ê½ê$Ñê0öê5'ëA]ë"ŸëÂëÚëöëì4ì:Oì,Šì#·ì#Ûì%ÿìo%í"•í3¸í6ìí.#î,Rîîî!¯î?ÑîBï'Tï(|ï)¥ï)Ïï5ùï9/ð.ið,˜ð7Åð6ýð&4ñ([ñ2„ñ1·ñ)éñòò5ò Uòvò2Žò5Áò:÷ò2ó$NóEsó¹ó'×ó0ÿóJ0ô+{ô$§ôBÌô[õ0kõIœõ;æõF"ö?iö;©öVåö-<÷,j÷A—÷HÙ÷A"ø0dø0•øÆø"åø-ù-6ù4dù2™ù-Ìù,úù-'ú'UúR}ú)ÐúHúúUCû™û-¹û çû,ü*5ü:`ü.›üCÊü4ý6Cý?zý)ºý.äý2þFþ#fþ,Šþ3·þ8ëþ)$ÿDNÿ>“ÿ>Òÿ?>Q#-´Lâ8//h3˜)Ì1öA(4j0Ÿ1Ð*(-RVj©I0^.T¾40H6yD°?õ/5Be;¨Eä1*/\7Œ+ÄOð%@,f.“@ÂA JE A 4Ò 3 .; 4j :Ÿ )Ú / )4 6^  • -¶ Kä J0 G{ @à L ;Q U #ã '-/8]9–;Ð5 5B-x2¦'Ù,..3]U‘[ç<CE€GÆI#X/|<¬-é57M*…2°1ã7*M-x-¦"Ô%÷9*W)‚%¬#Ò;ö128d4IÒ0(M-vG¤CìI0)z&¤?Ë5  A5b,˜*Å9ð[*†M¦'ô\Fy'À,è'*=9h+¢3Î<:?8z,³'à"&+ R$s;˜.Ô.@2,s8 2ÙB MO 4 9Ò ( !{5!6±!#è!C "jP"'»"<ã"B #9c#3#5Ñ#E${M$<É$&%#-%9Q%2‹%&¾%+å%+&<=&=z&@¸&?ù&79',q':ž'"Ù'$ü'1!(,S(+€(0¬(;Ý(0)6J)#)0¥)3Ö); *cF*iª*3+-H+3v+.ª+2Ù+0 ,==,F{,=Â,0-E1-+w-4£-[Ø-A4.2v.=©.5ç.4/:R/*/*¸/ã/&ý/;$08`0S™00í041:S1<Ž1.Ë12ú1--23[2020À2Bñ2043De3%ª3*Ð3"û3?4-^43Œ4&À4'ç4*5#:5*^5*‰5@´5>õ5(46%]6)ƒ6;­6Dé6,.7;[7>—71Ö7.8*783b8<–87Ó84 93@9,t9+¡9/Í9.ý9D,:/q:&¡:*È:3ó:8';b`;3Ã;&÷;.<DM<7’<WÊ<="= `=-=;¯=*ë=,>2C>2v>!©>0Ë>/ü><,?Ki?)µ?/ß?(@08@4i@Fž@'å@7 A6EA9|AG¶A0þA2/B-bB-B+¾B:êB?%CCeC'©C$ÑCKöC2BD3uD"©D"ÌD ïD%E-6E(dE;E\ÉE9&F-`F'ŽF4¶F(ëF,G4AG1vG6¨GNßG..H.]H@ŒH5ÍH9I:=I8xI.±IhàI9IJ'ƒJ1«J"ÝJ/K.0K#_K+ƒK0¯K1àK/L?BLF‚L*ÉLIôL@>M5MTµM8 N4CN1xNKªN5öN3,O=`O6žO.ÕO,P"1P)TP,~P/«P/ÛP# Q1/QBaQ@¤Q1åQ!R#9R$]RL‚R2ÏRjSnmS_ÜS_bV-¡V8ÏVFW2OWE‚W8ÈW4X=6X*tX0ŸX1ÐXFYGIY3‘YLÅY1Z1DZ9vZ2°Z7ãZ(['D[3l[@ [-á[\*/\-Z\"ˆ\S«\ÿ\])]?]"V]By]V¼]f^:z^µ^JÕ^ _+6_2b_F•_NÜ_+`,K`2x` «`Ì`5ê`j a‹aA¥a?ça)'b%Qb*wb?¢b?âbA"cDdc.©c'Øc'd'(dPd=jdF¨dFïd[6e^’e-ñe&fFf)\f†fY™fóf9gTJg6Ÿg%Ög/üg",hOhghCƒhxÇhE@i†i!™i »iÉiéijj6jRj0qj¢j ´j7Âj(új/#k$Sk#xk+œk7ÈkDl!El3gl8›lÔl êlølKm._mŽm¦m®m¾m$Ïmôm(n"9n\n"{nžn¸n6Íno"o9oNo ]o~o&…o!¬o(Îo$÷op.p ApÈOp<q@UqP–q2çq.r<Ir8†rD¿r sH%s=ns)¬s8Ös.t>t\t#tt'˜t Àtátùt u=$u=bu^ u#ÿu=#vav-wv¥v!»vÝvûv w-wJw_w*xw£w¾wÓwBñw4x"Jxmx„x”x-¥x Óx*ßx y0yJy"Yy|y”y$¯y$ÔyCùyC=zzz"­z"Ðzóz {– {`·{p|d‰|;î|=*}>h}§}5Ã}ù}~))~S~0r~£~½~'Ö~þ~z.J©)ô3€"R€*u€1 €"Ò€!õ€(62_$’!·ÙñE‚8N‚3‡‚G»‚ƒ*ƒ-Cƒ%qƒ$—ƒ¼ƒ؃5õƒ(+„T„%g„G„'Õ„)ý„$'…L…_…Wq…;É…D†J†#f†І7¨†à†'‡3(‡E\‡C¢‡9æ‡C ˆdˆ mˆRŽˆDáˆ$&‰"K‰n‰-މ%¼‰â‰0ÿ‰0Š@9ŠKzŠCÆŠN ‹=Y‹B—‹)Ú‹"Œ('Œ-PŒ(~Œ/§Œ4׌ 5,"b!…&§/ÎLþ0KŽN|ŽËŽãŽ(,Ur¬3Â"ö"*<2g&š,Áî‘‘;6‘;r‘ ®‘º‘HÑ‘+’.F’5u’‚«’/.“ ^“&l“)““,½“-ê“(”+A”,m”𔬔¿”#Û”ÿ”<•;R•%Ž•#´•ؕ–(–*D–8o–F¨– ï–ý–!—'0—X—n—ƒ——»—;×—˜2˜P˜n˜"˜"°˜Ó˜$ò˜™%-™#S™w™ ‘™ž™¹™Ë™(ã™$ š&1š&Xš%š#¥šÉšEàšF&›m›%‹›±› Ñ›ò›# œ0œ,Kœxœ)Šœ´œÅœÙœ ïœýœ!F6A}l¿L,žfyžiàžGJŸA’ŸBÔŸI Xa uº Y0¡BŠ¡?Í¡O ¢U]¢J³¢7þ¢N6£W…£FÝ£K$¤xp¤ré¤k\¥rÈ¥v;¦p²¦n#§{’§r¨k¨[í¨PI©hš©qªxuªxîªg«|«•«)¨« Ò«#ó«%¬&=¬d¬ƒ¬1 ¬Ò¬ שּׂ8&­_­$t­.™­DÈ­6 ®.D®}s®ñ®¯+¯^D¯£¯¯à¯ý¯°(° G°#h°"Œ°>¯°!î°1±:B±$}±$¢±DZݱû±"² <² ]²~²*–²-Á²Bï²›2³%γIô³,>´&k´ ’´„³´§8µ»àµ¤œ¶%A·g·<„·)Á·ë·$ ¸.¸N¸Ia¸$«¸7и6¹'?¹&g¹޹,¢¹ Ϲݹ!ñ¹1º/Eºuº<…º>º-»/»F»)a»5‹»$Á»æ»ü»¼;¼bT¼R·¼ ½ ½B0½s½E’½ ؽVù½AP¾2’¾ž;ã¾!¿"A¿,d¿‘¿(¡¿%Ê¿ð¿À+À%HÀnÀK‰À(ÕÀ!þÀ, Á'MÁ?uÁ'µÁ&ÝÁ-ÂC2 vÂ(—ÂIÀÂ! Ã=,ÃjÃ#‡Ã «Ã"µÃ?ØÃ%Ä>Ä)]ćÄ4¤Ä'ÙÄPÅ,RÅ Å#‰Å­Å4¿Å1ôÅ &ÆGÆ%fÆ ŒÆ­ÆEÂÆIÇ)RÇ$|Ç;¡ÇÝÇúÇ1ÈFÈ2\È*ÈBºÈ)ýÈ0'ÉXÉ/vÉ0¦ÉH×É7 Ê+XÊ%„Ê5ªÊMàÊœ.ËËË=çË%Ì9Ì%VÌ#|ÌB ÌãÌÍÍ/;ÍkÍÍžÍ$ºÍ/ßÍ7Î1GΠyÎ.ÏIÏ-gϕϭÏ3ÆÏ#úÏ#ÐBÐVÐuÐŒÐ(¨Ð'ÑÐ"ùÐÑ9Ñ(QÑ!zÑ$œÑDÁÑ)Ò)0Ò5ZÒÒ(°ÒCÙÒÓ41ÓfÓ%xÓEžÓRäÓ%7Ô+]Ô+‰Ô1µÔçÔ*ÿÔ*Õ;FÕ;‚Õ¾ÕÒÕŒáÕnÖ…Ö ¡ÖÂÖ€ÔÖ+U×8× º× È× Ö× â×ðר!Ø!:Ø\Ø0mØžØ'»Ø!ãØÙ.Ù)FÙ$pÙ•Ù(´Ù1ÝÙKÚO[ÚH«Ú/ôÚ$Û6ÛMÛ!`Û#‚Û-¦ÛÔÛ7ðÛF(Ü<oÜN¬ÜXûÜTÝ5cݙݭÝ'ËÝ9óÝ-Þ2=ÞpÞ0‚Þ5³Þ"éÞ ß&ßDß;_ß ›ß¦ß¸ß ÎßÛßûßà2àDà"Sà#vàšà¬à3»à-ïà-áKá*`á ‹áI¬á,öá%#â6Iâ4€â-µâ ãâ'ñâã$/ã-Tã#‚ã'¦ã ÎãÜãaòã-TäA‚ä$Ää0éäå4å <å/Jå'zå)¢å5Ìåææ$&æ%KæIqæ+»æçæþæ$ç):ç%dçŠç#¤ç,Èç:õç<0èmè)|è¦èµè ÊèØè.ëèé )éDJéé)ªé!Ôéöé%ê99ê5sêE©ê7ïê'ë@@ë ëd¢ë ìo(ì˜ì´ìHÑìGí@bíG£íKëí?7îAwî¹îÒîïîïï,ï@ï Qï^ï"sï>–ïÕïAóï5ðGð gðCqð"µðØð*÷ð)"ñLñ`ñwñ–ñ«ñ$¾ñ/ãñò..ò]òuò)ò¹òÉòáòùòó3&óZóMtó Âó$ãó-ô>6ô@uô¶ôÉôÚô÷ô,õ13õ*eõõ ­õÎõ#íõ ö9ö@Yöšö]­ö ÷:÷ W÷<e÷¢÷¶÷.Ò÷ø&!øHøBfø©øÉø.Ýø ù!ù 0ù"Qùtù"ù³ùÐù åùAñù%3úYú3pú+¤ú>Ðúû!)û!Kûmû/‰û7¹û+ñûü:8üZsü2Îü3ý5ý$Lýqýý+¦ýDÒýCþW[þ1³þWåþV=ÿj”ÿDÿÿDU>o8®,ç&):P*‹ ¶Ã~Ø>W0–#Çë! ,F]9|4¶ ë5 B>\>›1Ú? %L>r?±Mñ*?'j’¦ÅÚîþ"3~³G2z*‘¼:Ë8 3? +s Ÿ » F×  ?4  t ~  ¥ ¶ Ì Ü í     8 "Y '| ¤  -Ô )  , 9 Q n  € $Ž ³ È Þ 'ó 2 8N ]‡ ^å _D¤7»"ó3!Qzsî &B W.b)‘5»ñ,+1/]>=ÌE IPDšHßD(0m:ž,Ù")A"[~˜¶6Ì4"CfGDÉ*+9e"z#ÁEÛQ!s'«MÓ!&7+^Š&ŸÆ)Ù$(AW i Š« Ç,Õ03kN(º-ã(V:/‘0Á!ò-3B"v#™1½8ï (0Iz‹¥KÃ/?]3s&§Î(è #-@!nKÜï   , 39  m 3z ® Í á þ !4!T!m!•„!6"0Q"‚"¢""À"Qã"@5#1v#2¨#"Û#1þ#20$#c$"‡$0ª$Û$ö$%/% >%L%c%%%4§%Ü%ù% &%& <&H&^&0w&4¨&2Ý&''98'\r'$Ï')ô'(/(:H(8ƒ(>¼(;û(97)'q)0™)'Ê)4ò)'*0G*4x*­*)Ç*=ñ*/+8J+ƒ+ ¡+Â+ à+%,-',7U,Q,"ß,5-58--n-Sœ-ð-+ .1;N;f;y;}•;<V,=Qƒ=rÕ=4H>,}>+ª>Ö>ð>=?F?0`?P‘?Mâ?P0@ @@D¦@,ë@AF5AL|AXÉA6"BYBLmBKºB=CADC%†C9¬C;æC0"D4SD#ˆD3¬D%àDE0ESOEY£E[ýE[YF?µFKõFAAGYƒG:ÝG_HKxH{ÄHB@I8ƒI_¼IVJvsJ?êJ?*K9jK+¤KWÐK,(L5UL8‹L&ÄL/ëL=MSYM2­M?àM- N2NN1N/³N8ãNZO wO˜O_¸ORP;kP8§PMàP+.Q+ZQ,†Q>³Q‰òQ|R2œRLÏR*SpGS=¸S.öS5%T2[TrŽT4UH6UOUOÏUOVoV0ˆV=¹V{÷V½sW01X\bX[¿X8Y-TY/‚Y(²Y7ÛY4Z:HZƒZ3Z&ÑZTøZ;M[R‰[Ü[Zñ[HL\•\¯\1É\:û\6]&<^c^'ƒ^%«^#Ñ^Tõ^5J_2€_'³_<Û_c`.|`!«`&Í`+ô` a7Ãa6b)9b-cb9‘b-Ëb3ùb>-clc0Šc%»c'ác, d56d@ldK­dùde.eJeje‰e¢e$Àeåeüef14fff:„f¿f*Õf g< g%Jgpg:ŽgÉgég% hJ/h@zh,»h/èhNi)gi!‘i³iÎi=ÖiLjPajA²j/ôj$k@(kZikÄkØk/ðk$ lElMl`hlWÉl5!mWmNmm¼m?Ùm:nCTnC˜nÜn#ún%oDDo6‰o!Ào+âo/p">p*ap<Œp*Épôp^q-qq#Ÿq8ÃqXüq5Ur/‹r/»rër s))s%Ssys`™s:ús£5tNÙtH(uGqu/¹u)éu=vOQv¡vJ±vAüvJ>w9‰wmÃw1xKx3exN™x^èxMGy@•yÖyñyz@0zAqzJ³zXþz=W{G•{DÝ{="|$`|"…|¨|FÈ|7}G} f}#t}!˜}º}'Ë}ó}"~8)~$b~)‡~,±~/Þ~3-B*pI›!å%€)-€NW€ ¦€$Ç€/ì€*(Gp ‰”,±Þó ‚‚1‚3F‚ z‚"›‚&¾‚å‚.ö‚%ƒ=ƒYƒ%sƒ ™ƒ(ºƒVãƒ':„)b„Œ„1«„,Ý„ …)$…€N…$Ï…Kô…@†0_†8†#Ɇ(í†!‡8‡&V‡4}‡ ²‡'¿‡Cç‡/+ˆ[ˆwˆ/ŒˆI¼ˆ$‰˜+‰8ĉý‰7Š(RŠ6{Š(²Š(ÛŠ‹&‹DC‹.ˆ‹2·‹ê‹$ÿ‹-$Œ0RŒ$ƒŒ"¨Œ%ËŒñŒ$@"SvŽªÅÛûŽ"4Ž&WŽ~Ž’ŽªŽÆŽ#âŽ!0RoŒ#¥Éßü3BK&Ž$µÚ<ð(-‘V‘*q‘Dœ‘á‘)þ‘4(’/]’B’-Ð’þ’:“Q“p“Š“>¤“#ã“!”)”bI”%¬”2Ò”/•,5•b•w•2•2Õ0ö•'–.>–Cm–U±–—#%—&I—Fp—@·—!ø—/˜?J˜?Š˜Qʘì™( š.2š=ašEŸš åšP›gW›'¿›;ç›+#œ<Oœ*ŒœE·œýœ ,(Ut…5Ÿ<ÕOž7bž7šž?ÒžLŸ/_Ÿ/Ÿ2¿ŸòŸ,  B: C} Á Ñ à 0ó $¡#@¡7d¡œ¡­¡ Å¡MС9¢BX¢Z›¢(ö¢1£BQ£8”£8Í£7¤?>¤7~¤!¶¤"ؤ-û¤D)¥8n¥A§¥Wé¥GA¦N‰¦ئø¦?§DV§J›§Kæ§L2¨*¨!ª¨0̨%ý¨,#©P©l©'Š©(²© Û©ü©"ª#6ª Zª${ª ªÀª"Úª/ýª-«/D«t«;…«Á«,Þ«0 ¬<¬&[¬‚¬& ¬Ǭ$æ¬+ ­#7­[­v­Š­-¢­Эí­#®%+® Q®!r®”®²®/Ï®+ÿ®*+¯V¯%k¯+‘¯½¯0د1 °';° c°„°¡°&´° Û°Þè°CDZ ²/²M²k²+‡².³²&â²L ³/V³>†³Sų/´0I´Az´>¼´Eû´DAµ:†µ=Áµ9ÿµ>9¶9x¶5²¶(è¶1·=C·7·5¹·}ï·-m¸2›¸.θ/ý¸/-¹-]¹>‹¹ ʹ عã¹ø¹ º+ºHºfº|º•º¯ºźåº þº»"»4»*=»h»» •»¢»«»½»EÚ» ¼;¼<T¼7‘¼-ɼ;÷¼*3½.^½%½"³½/Ö½/¾*6¾ a¾o¾ƒ¾œ¾%¸¾&Þ¾ Â-Â>Â3\Â2Â'ÃÂ(ëÂÃ2ÃHÃ4gÃAœÃ5ÞÃÄ0Ä(GÄ%pÄ –Ä ¤Ä;¯Ä1ëÄ0Å/NÅ ~ŋťŭеÅÁÅ"àÅÆÆ;%ÆaÆ+yÆ$¥ÆMÊÆgÇ-€Ç€®Ç /ÉE9É ÉɔɨɼÉSÕÉ4)Ê5^Ê!”ʶÊ/½Ê4íÊ%"Ë$HË?m˭˱ËÌËÔË#èË ÌÌ Ì8'Ì>`Ì#ŸÌÃÌÕÌÜÌ"ùÌÍ/ÍNÍ>hͧÍÅÍ ãÍ3Î#8Î\ÎJrÎ:½Î9øÎ2Ï:QÏŒÏ[¦Ï8Ð6;Ð@rÐ5³Ð/éÐ<Ñ6VÑ$Ñ$²Ñ×ÑñÑ7ÒBIÒŒÒA§Ò3éÒ0ÓBNÓ6‘Ó%ÈÓ4îÓ#Ô?ÔH^Ô6§ÔÞÔúÔÕ3ÕNÕkÕ‡Õ¢Õ¿ÕÛÕ:õÕA0Ö7rÖ‘ªÖX<×B•רØ$òØ Ù$Ù™'Ù4ÁÚ5öÚ#,Û PÛG[Û7£Û;ÛÛCÜ>[ÜHšÜJãÜ!.Ý-PÝ~ÝÝ›²Ý9NÞˆÞ Þ›Þ¸ÞÉÞ3ÞÞß#2ßVß^ß:|ß>·ß*öß!à 'à!4à3VàŠàA¨à<êà3'á@[á3œá.Ðá1ÿá+1â6]â;”â5Ðâ-ã44ã/iã(™ãÂãÚãòã ä#ä CäPä%häŽä¢ä5´äêäÿä å å7å(Lå uåƒå…åŠåšå1¶å-èå1æ5Hæ?~æ3¾æGòæ:ç,Açnçê€ç2kè+žè¿Êè$Šé¯éÃé,Ôéê êêH-êvê”ê&¤êËêCÔê,ëOEëo•ëìbì_ìmáìøOíHî:dî Ÿî*Àî5ëî!ï1ïEïTï;dï( ï-Éï/÷ï8'ð+`ðŒð:«ðæðCñ1Gñyñ ‰ñ –ñ ¤ñ ²ñ4¿ñôñ ò%ò5ò1Sò"…ò¨ò»òÒòçò2øò+ó>óWókózó.“óAÂóô1ô+LôBxô »ôÆô9ÏôT õ ^õ]lõ+ÊõKöõ+BöKnöºö†Í÷Tø]øG}ø&ÅøìøüøLù‘RùtäùYú éú* û<5ûKrûD¾û%ü)ü$Eü&jü&‘ü4¸üíüý ýB,ý@oý:°ýëýñýþEþ _þ2mþ- þÎþ(àþ5 ÿ4?ÿ(tÿ3ÿ8Ñÿ7 4B6w®EÇ #$1H)z(¤:Í 74'l”;³ï?÷?7"w$š:¿Îú8ÉGJJj'µÝ$á#A Q ^ hv z(ˆG±ÏùwÉmA{¯1+ "] K€ bÌ / Ò4 "*AX/t/¤)Ô'þ"& I!jŒ Ž?¯@ïÎ Æ Ô 3Sg ïw—E–î |û‘7 L9¢ F IC 9Ãu BPïàÍp #ˆW Þ©  ²WßsA¼ œ&f‹# à ?ó]<•| ôD ˜? $ \¯äï O ƒ¸  ÿÌ ù A – h ¶Ï É MtÎå®È +–Y ýÿ?¾ p”e2 õÏ‘ fo H ÿ ™§GÛë  ó ª tBJ±Áv 7¯: ý皦‰'Ù £ iˆ ÿ~b= ;0¢óÒ ´ 9Ž" Q . ‹ êK š p  ¥÷’€G | Ó ¹ µú µ62 눫 % - $ªÂy uk$åœR£EÎ Zã  Šó »äA ÅtÀ ”I# ôL\ q.†äOÐt8  È @ ]õ4 Ï  Ê< aÜ ‡ ÷ àréD !¸ ¬Ç /j { R µ *Å  ½ Ä J.„¦  å’È Å ÿ*vÚ3Mf >"å£ •«b › iÛÇK ‹á › ¨†® ´’@T Ö+Ì}0ÕH5Ç‘‘Y ïî –$g Ñ W í ’ aþ `Vº'?,É ò 0 ›Ì¡. €e™È£F± ü2ÆÏ ¦|LáeHŠ‚GW}Ÿ H b Å×Óó6• bí¾Ùk ± ³ ºs+£¹Å¾õ ²€_ w ufáè( *¤Q7 ( 7- é¶Ý ï 0x CŸ»§  + ' ö ® Ž ¦ àm © N dȹ  .4 ªo3#ÙÓµ n=¶ ÁG6“ Ïæ :Ý U°©1 +” ^ \ ×3û Û ž »?Å cy‹„d  Í N g´f¼‚á# ¯û` ¹}  ^ì÷ ˜ Ù · ‰º`-gú ÅTê ö_BÓZ² Ø +] ²iÔóœ/1Ž v ž8^zO5•; ˆr ;† ·­-è cm Ûñ lj¸   Y° e ó  Ë yyœ ¡ ¢ Í ¹3 ðñ €à¤þ x Fé 6 Öx8æ<u&^ " ·&bJ ;$Ù' áERÉ!+ + ãÃv˜ ˜{±õ ‡¿ý+Jø ¤ÇÊ;Š Ô1XÏT r—Ï ›W û  N DP° ¿8 } é[ ×Á  \¨-ÄSo>·s OÖ Vù œ1} ÎÖ? B xa œî¹ «¸ Dö òÁ  ²^ ô ]ÆŸ É¿V - (qKúÀq ·›Ô §± &¸ ± ‘ ˜Ä U Zrð¿0: ùèH¥²šÕ  &) Ä—/Ì~Ld mY£åþ ¦ :t ËÙ†¾!íKô L· . â de Û˜Ý ‰! Qò.9!YOŠZ ² Ž } ß h²"ªE eIsùŒ_ˆª )u à7 ¶u  û"@3 ® 5ßD …¸n ×kÛ 9 9æÎS x ˆ •Æ× ~ &ù˜Yc Ü\ã 3  ý Ì ÅúÂX ¨íOÍn: Í çâ áø‡ìV14 C ߈ ,=i8úœ ¬Ýª il È Aß6  8 ¯ r „ ÝÝ|î… úÉ ßcÁ ð¡þ * #' Ø á Ùƒ ˆH7 â „ç ¿æÜ{Âìa ÔýÕ… NU ¸æ @•"0» w†Üþ—§ Ùtì À  ÓÞý Ö ŽI¯Êê“ Û âe ( n‘ ’ Ó#÷ â@ 4S N c uƒ 4 Òìw;­ ÷ lb§¢ ã Žˆõm•)°[¬ è i ;¾@§` & G'ßÈ $Mý ~mFr pQSË~ U ‚ (ÈV W À/s ÖÌB=¶™., À*ë ] €t†‡ù<ê) õ °O/O ½Øk€ n ®0£PÞ îùq¢ yuòÊÐVï!  Ÿž , Tg•›ü ð|ç)k±è g%ïR ‘zs­ ø {¸ { ™¤¸–° Q 7˜¥÷Ð © y&J‘u“õüÓ‡ Þ=±m b 2 ÏÜöR Kð“ÈœÝ óI V o V" hf a3å7ÿ @rI| ʬ$ œ ¢À ÓÎ ê†ÌÄ y4, —ÞÕ &ç 1ðÍ † × )í 6c C' ’ K.Ä:L {‰ !õNάœ/ Ë ‡› d}¤> ,P½ñ ¼ ¬K×zÖ øÚB' E*• ú7 Uáh  <¤ d?C €< hSôw$1 ¡L3n õÔf Æ ÙÄ…êè kÌ„£ÀNó.E ¸¼Ø5 Áf»m/û: + ÂK6 ‹Ü:< »   n R©™² ú« 'ÜŒ®QX" R²b jf Ç Z x )Yâø c€£ îÝÛ 9 ãœJG ð ý®$¥ø'¹ÿ;r¬Ì ; ŸV [‘ æôÙ :Ÿ9 µ l ÞàèŽ]ÐÛä  ÷ [  m ¨s S ’ê ¸qà ¥ Ò h˜ô| ¥Œý ñÒ  kàÊ ~ íAÜ» ‘¿Úæ î ‰ Dí n Ê * â]lÆ ÊL ^ à >kØ î (eÚ~‘cZ Ä “zª ¸Ršr ¢ Dçö Ò 2Rê E ñóÚ Š:C Õ  :” Œä® Öv ÏLxµ 0e¨ GÊ § »6º G "8©nöfU¦ jñ‘ ÷Õ‚> 2jùF È”ØBk ¿ÏU 8Š ) æ ·Vö ùŒÄM  Yáä 9 ®òËÇñH[Ñ û45é¸ÇÜ<à í:o‹›Ü š RDeë [ |° ¿©3 >Pê =Ð QÉZ Ë ´W  @_¯ é ßþ? ¦  öQ Ƚ[_žØ ¢hòŽH  — Q7 ǃ¡ @ :ÂÁP  i=m,Í´ { i Î x 1aì vb ¬ æÑ ‚] % À ¡W*p Ô&_òýDÿ Ånµ“ Sz·tË î-´Á ¬ Õ× Ù Æ; ,Q ;>ß âä‡ ì1cO ¨ F  U$ ä¦Ð‡Ø ÂWÊ Ø !?Žu º2ª•ê ƒL 6 B ž ã _ JA  N¢M,¯ü4Xð /ÃÒjt*,Cw’…P€1 yµÚ ta 8¾“«i »žñ ¹J’ žƒò àÉ–÷ûCuÛ ! ‚ @AÝ ì Ç  k ì É ÎƒvÇ z ýå ¼Ã6 ™ ‰s ½( §N¬ Ö ü ºy   b‹â‰ M ¦Œ |4H #x¾«I Ø«U®Æ 4H¨±Þn§ê×wrIM Õ%06 4 ` ë& ´?\Õ 2 ¼Ä»wÈ(ô¤ ¥!£øÆpî~ ¶ žûµ ü¯ ÷n "©˜ ŽY ¯M é^-칡á^ ’ÑÖïjŸJ©z u µ| é Œ Ï¿ ™ r Ð Ã F–? – > K ¯ ’aí¯eƒ»â> ZÜë “C ßqz j 2º Î gË gt ¾ h‚k Oô, Õš Ù ZÚ!\ ¶ ä ·øz¢ú ”• 㣪¢ ï?¥ ©¶-¡T è: X$¤ Õ=Š Þ ™)¢g2THÅtðÞ j âÉ ”Œ Ÿ™ …òZ¹š8Ø7 m +› o ™ >Ú Ì%º Ö ¤îãÞ Ad”–^h · û ¶ öv4 | Ð]Ée è­ÚV% ž 9,G ã (Pßå'……¨Á§8÷v¦bÊB»ŠZ m / 5 Ì ý¦¯wþsà„ ±è Ü Ä_± ²€ Xjà  ´¸˜ ¿²o_x› ç ?! û^ÅZ 2O‚• €%‚ t ‘“ “  gÝ ­<³"kV Šñ « aÑS, 0‘ K i qüÇ{Ý͵™> ½ OWf’ƒ y [«6dœ%äTå BV1Ì . k ~õ„çÄ ƒú ®¤l­£lo%Úß   £9†y —w yö @×³ß ÜÓ p Kq… þò"à ›a*$"\3qµ È5¨ D ŸÆx‡UýÓ ù º† ¸} \Uò ˆf °] p¯ÇŒ J ñê7ášÂ2 HJ ¼ <<p CÔ Š±5 ¦P¼  bzM< S¶… _ 0R)®K ãïŠôÀ²c ¼  ’!F> Ds­í­ #ʳP ï…Ôh _XÄè'Ô° J ’  ‡Zm’ z— E.ê; ` ´ Ÿ aê 3Ù7 1  ò ¬rI ì· ó ´ncŒC ‚{ Eç~ƒ D[ - nð Á ¥ `ÌT¶”žp¤ h #Œ ×U«³~vú­šj#‹ L ,š ½ Î] {€¹LK™ ¬ Š oƒŒõÚ»Û­æ® AR ª Óx Ô-…ø ?‚ô  Žþ "}‹ Šª½]ph•`Í  ü};"élÔ ³  ¡ ºST ”øý—/“¹ü  ÙûCðG˳ºlÊ>À Æ€ XX K ¿ l~u Wvé SÆ èí ¬TÒqYß (· uE0®f °ˆ .G¾  Ð ¤ ñá ¢– ª$=ædÜ Ç ² ÿ É 1 Ò `AŸ 5 ë‚ „`sšS¨ ÑÑ5EC¾ˆ°iÏKt -Þ¿ ÈAÉuêòw Úî8* 4z i^÷ÃÔ9 W|³× ŽJU/>I¾ Ÿ â 9Á ÍÙ — T« " œj& ‰Õ ^ .j ¬¨ ï À­y ý ½ç`º Ì uÛû  w ,ÿæ}R Šlºç 2K¥ú Þà Ò« ‘ eMì~/¾MIÎûþ˜ Å v –l [ô,f­Ââ – á Y ž‹%~_ —8'rÒµ S Ø! ¼­  œ †  é‰d  • ž& Ôåq Y pÖ‹° JF³hŽ “„ pêÀö ç0 ‰[o= ”’ òš9®¡ ¼o |gML¯ÇᆠÁ€ ƒ ¡… &j :é |¼Æ\ìo´ Ëùù   6üúoÖ * F †ÑTâ Òü* ѽ—q ­ § ÿ%ý†Œ‰ Œ*sFd o ¢q FZ‡Õ ª¾ªø‚ù š“Å ^ ïs H Qv  d%6 ‰ G öbbÍ[+Ñ \@ Ø@Ƨ640  :§‚» )éUåšþp ›jÛü X å  5].ÐÓ¨ 3 J }bð r… a7  ¤æ; ¥Ë #. í/ e % ™Ï C/ (·p«ä 3 Ú 5‹ è´ E Ø( <- _ mä% Pø<ÐUÝ›ZµÐ`R½ÁÑ Â ‹û ¾l zÁylrúëø yP `+$O Yø] Ú¦ N qéž[ ×ÏM¾ì¤ ã F =­þ® i É Ë n>4òP æº  ï ÚÀ%X PÙ ø¡ oŸ5 d ãé Q B ù™ íóI gm 7³ pM 9{rkÖ a ÓHã“ N 6b èß/ † W>m˜ =f\@ «ÑI å ›±Ëádã4 AA  g ÿÕƒ ÄmN¡Q„ñ å ?¨ B”€ x   Y&; %_%SY ‡ ´+ q °Tz)!¢ô¯ » ³ ë ~ y ³x•þ Ð÷c˜1ý2ºÀ¥ ¶ e ´AG#ð³6 ö'ú½™ F ¶‡²îÇ@¶½ I\ ÉÏ_N¥ËxÁóæŸ8g þÑüjó£Á¢Cöƒ Þwõ÷9µIYè =·Ø .§ B Ë 1œ è~< Tgà ×µ\ i ]õ§  ¬  ˜þB“R´w Œ*( ¼{ š õ d‚   ùç D šì?¹) Q5 Ûþq {޼¹ ,á< CÄËE w ÿAÜ $ Î>vŸ« ` Ô !ø ˜ÃÊ vúŠ Þ sió X B /½‰À ÉT õ ÞJ¦g QL ek VÝѾ I«å{2d  0 œ›Ð°W £ N f8¡Ts›E `¿ ozÛ„ Î ì XóGX¡ë Å ]Ò# ¶a§ ãÔõ÷̳ - (ü Ýʈí P t Ÿ a A j“M¤©Fü…Ú ‹ˆ V- ë´íÍQÿã Î2Ò)ð ¦ÐW–Í¥ ò 3 ä ô ¨ŠÈ ƒ# — NÿÆ1 -B( @©¯ï öéñŒ¥ô¦0A à© –ßk ‡Â^ + ¬z ¿l Ó ëÜU ÍÍûÝ' »O ÍÅ0 Ïn& }† &à ñGB # 'h9— Ž ‡Ñ” 7D‹ |ô 2 H*$)½ c—[) „[süî äö± v· žØÎÝÄ = — W ; h £ } É_РƱð‹E€ 7”5ÄL "    × : ë– ÖÑ*VåÕ‰c ñ? aÒ w… /}D âc<53 t '!á S hG ÷ H°™3” ‚ M }[É¥^EÌ­„ àÅ Ö= DñðäO¶ ¤8 {=¹+Oë(Õù 4-ªR ÞäP5X ç³ 1¼lÈ„ – ª” ë„ Ò ž ` ) LžÀÛ í Z; Ã{©NFŽ8  ¿ ¼ ¨\ë+ ^@ ÓÓ ×â °¿Xæ ¨² = ç© Ê¸\‡  ˆ¹ ©  •çºx$ li#c Òÿ ï·( %s Cache : %s Cache (read-only): %s Cache cleaning disabled Cache cleaning enabled Cache link dir : %s Control dir : %s Session root dir : %s default LRMS : %s default queue : %s default ttl : %u Run 'arcclean -s Undefined' to remove cleaned jobs from job list Run 'arcclean -s Undefined' to remove killed jobs from job list To recover missing jobs, run arcsync Use arcclean to remove non-existing jobs Use arcclean to remove retrieved jobs from job list Is executable: true Name: %s Sources.DelegationID: %s Sources.Options: %s = %s Sources: %s Targets.DelegationID: %s Targets.Options: %s = %s Targets: %s %s certificate dn: %s expiration time: %s issuer dn: %s serial number: %d %s: %s: %i %s: %s Delivery service: %s Delivery service: LOCAL Delivery slots: %u Emergency slots: %u Post-processor slots: %u Pre-processor slots: %u Prepared slots: %u Shares configuration: %s Status of endpoint (%s) is %s This endpoint (%s) is STARTED or SUCCESSFUL attributes: base dn: %s filter: %s unspecified: %i %s -> %s (%s) --- DRY RUN --- Access control: %s Annotation: %s Argument: %s Benchmark information: Computing Service Log Directory: %s Computing endpoint URL: %s Computing endpoint interface name: %s Computing endpoint requirements: Credential service: %s Delegation IDs: DelegationID element: %s End Time: %s Entry valid for: %s Entry valid from: %s Environment.name: %s Environment: %s Exit Code: %d Exit code for successful execution: %d Health state: %s ID on service: %s Inputfile element: Installed application environments: Job Error: %s Job does not require exclusive execution Job management URL: %s (%s) Job requires exclusive execution Job status URL: %s (%s) Mapping queue: %s Name: %s No exit code for successful execution specified. Node access: inbound Node access: inbound and outbound Node access: outbound Notify: Old activity ID: %s Old job IDs: Operating system requirements: Other Messages: %s Other attributes: [%s], %s Outputfile element: Owner: %s PostExecutable.Argument: %s PreExecutable.Argument: %s Processing start time: %s Proxy valid until: %s Queue: %s RemoteLogging (optional): %s (%s) RemoteLogging: %s (%s) Requested CPU Time: %s Requested Slots: %d Results must be retrieved before: %s Results were deleted: %s Run time environment requirements: Service information URL: %s (%s) Session directory URL: %s Specific state: %s Stagein directory URL: %s Stageout directory URL: %s State: %s Stderr: %s Stdin: %s Stdout: %s Submitted from: %s Submitted: %s Submitting client: %s Used CPU Time: %s Used CPU Time: %s (%s per slot) Used Memory: %d Used Wall Time: %s Used Wall Time: %s (%s per slot) Waiting Position: %d [ JobDescription tester ] [ Parsing the original text ] [ emies:adl ] [ nordugrid:xrsl ] $X509_VOMS_FILE, and $X509_VOMSES are not set; User has not specified the location for vomses information; There is also not vomses location information in user's configuration file; Can not find vomses in default locations: ~/.arc/vomses, ~/.voms/vomses, $ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/grid-security/vomses, $PWD/vomses, /etc/vomses, /etc/grid-security/vomses, and the location at the corresponding sub-directory%5u s: %10.1f kB %8.1f kB/s%d Batch Systems%d Endpoints%d Shares%d mapping policies%d of %d jobs were resubmitted%d of %d jobs were submitted%i retries left, will wait until %s before next attempt%li seconds since lock file %s was created%s%s %s%s %s could not be created.%s (%s)%s > %s => false%s > %s => false: %s contains non numbers in the version part.%s > %s => true%s class is not an object%s directory created%s directory exist! Skipping job.%s failed%s is an unsupported digest type%s is not a directory%s is not an object%s is not authorized to do action %s in resource %s%s is not authorized to do action %s in resource %s %s made persistent%s parsing error%s plugin "%s" not found.%s request failed%s request to %s failed with response: %s%s request to %s failed. Unexpected response: %s.%s version %s%s->%s%s. Cannot copy fileset%s. SQLite database error: %s%s:%s: %i%s: %s%s: %s: New job belongs to %i/%i%s: %s:%i%s: ACIX Location: %s%s: Adding new output file %s: %s%s: All %s %s successfully%s: Bring online request %s in SRM queue. Sleeping for %i seconds%s: Cache cleaning takes too long - %u.%06u seconds%s: Can't convert checksum %s to int for %s%s: Can't convert filesize %s to int for %s%s: Can't read list of input files%s: Can't rerun on request%s: Can't rerun on request - not a suitable state%s: Canceling job because of user request%s: Cancelling active DTRs%s: Cancelling other DTRs%s: Cannot upload two different files %s and %s to same LFN: %s%s: Checking user uploadable file: %s%s: Checksum %llu verified for %s%s: Critical error for uploadable file %s%s: DTR %s to copy file %s failed%s: DTR %s to copy to %s failed but is not mandatory%s: Delete request due to internal problems%s: Destination file %s was possibly left unfinished from previous A-REX run, will overwrite%s: Duplicate file in list of input files: %s%s: Error accessing file %s%s: Error reading file %s%s: Error reading user generated output file list in %s%s: Failed creating grami file%s: Failed obtaining local job information.%s: Failed obtaining lrms id%s: Failed parsing job request.%s: Failed reading .local and changing state, job and A-REX may be left in an inconsistent state%s: Failed reading job description: %s%s: Failed reading local information%s: Failed running cancellation process%s: Failed running submission process%s: Failed setting executable permissions%s: Failed storing failure reason: %s%s: Failed to cancel running job%s: Failed to clean up session dir%s: Failed to list output directory %s: %s%s: Failed to load evaluator for user policy %s: Failed to open file %s for reading%s: Failed to parse user policy%s: Failed to read dynamic output files in %s%s: Failed to read list of input files%s: Failed to read list of input files, can't clean up session dir%s: Failed to read list of output files%s: Failed to read list of output files, can't clean up session dir%s: Failed to read reprocessed list of input files%s: Failed to read reprocessed list of output files%s: Failed to receive job in DTR generator%s: Failed to switch user ID to %d/%d to read file %s%s: Failed to turn job into failed during cancel processing.%s: Failed to write back dynamic output files in %s%s: Failed to write list of input files%s: Failed to write list of output files%s: Failed to write list of output status files%s: Failed writing changed input file.%s: Failed writing list of output files: %s%s: Failed writing local information%s: Failed writing local information: %s%s: Failure creating data storage for child process%s: Failure creating slot for child process%s: Failure starting child process%s: Failure waiting for child process to finish%s: File %s has wrong checksum: %llu. Expected %lli%s: File request %s in SRM queue. Sleeping for %i seconds%s: Going through files in list %s%s: Invalid DTR%s: Invalid file: %s is too big.%s: Invalid size/checksum information (%s) for %s%s: Job cancel request from DTR generator to scheduler%s: Job cancellation takes too long, but diagnostic collection seems to be done. Pretending cancellation succeeded.%s: Job cancellation takes too long. Failing.%s: Job failed in unknown state. Won't rerun.%s: Job failure detected%s: Job finished%s: Job has completed already. No action taken to cancel%s: Job is ancient - delete rest of information%s: Job is not allowed to be rerun anymore%s: Job is requested to clean - deleting%s: Job is too old - deleting%s: Job monitoring counter is broken%s: Job monitoring is lost due to removal from queue%s: Job monitoring is unintentionally lost%s: Job monitoring stop requested with %u active references%s: Job monitoring stop requested with %u active references and %s queue associated%s: Job monitoring stop success%s: Job submission to LRMS failed%s: Job submission to LRMS takes too long, but ID is already obtained. Pretending submission is done.%s: Job submission to LRMS takes too long. Failing.%s: Job's helper exited%s: LRMS scripts limit of %u is reached - suspending submit/cancel%s: Location %s not accessible remotely, skipping%s: Plugin at state %s : %s%s: Plugin execution failed%s: Processing job description failed%s: PushSorted failed to find job where expected%s: Re-requesting attention from DTR generator%s: Reading output files from user generated list in %s%s: Reading status of new job failed%s: Received DTR %s to copy file %s in state %s%s: Received DTR belongs to inactive job%s: Received DTR with two remote endpoints!%s: Received data staging request to %s files%s: Received job in DTR generator%s: Received job in a bad state: %s%s: Removing %s from dynamic output file %s%s: Reprocessing job description failed%s: Requesting attention from DTR generator%s: Returning canceled job from DTR generator%s: Session directory processing takes too long - %u.%06u seconds%s: Some %s failed%s: State: %s from %s%s: State: %s: data staging finished%s: State: %s: still in data staging%s: State: ACCEPTED%s: State: ACCEPTED: dryrun%s: State: ACCEPTED: has process time %s%s: State: ACCEPTED: moving to PREPARING%s: State: ACCEPTED: parsing job description%s: State: CANCELING%s: State: FINISHING%s: State: INLRMS%s: State: INLRMS - checking for not pending%s: State: INLRMS - checking for pending(%u) and mark%s: State: INLRMS - no mark found%s: State: INLRMS: exit message is %i %s%s: State: PREPARING%s: State: SUBMIT%s: Trying remove job from data staging which does not exist%s: Trying to remove job from data staging which is still active%s: Two identical output destinations: %s%s: Unknown user policy '%s'%s: Uploadable files timed out%s: User has NOT uploaded file %s%s: User has uploaded file %s%s: checksum %s%s: delete file %s: failed to obtain file path: %s%s: delete file %s: failed to open file/dir: %s%s: job assigned for slow polling%s: job being processed%s: job for attention%s: job found while scanning%s: job will wait for external process%s: new job is accepted%s: old job is accepted%s: put file %s: %s%s: put file %s: failed to create file: %s%s: put file %s: there is no payload%s: put file %s: unrecognized payload%s: put log %s: there is no payload%s: put log %s: unrecognized payload%s: replica type %s%s: restarted FINISHING job%s: restarted INLRMS job%s: restarted PREPARING job%s: size %llu%s: state CANCELING: child exited with code %i%s: state CANCELING: job diagnostics collected%s: state CANCELING: starting child: %s%s: state CANCELING: timeout waiting for cancellation%s: state SUBMIT: child exited with code %i%s: state SUBMIT: starting child: %s%s: there is no such job: %s%s: unexpected failed job add request: %s%s: unexpected job add request: %s'(' expected')' expected'action' attribute not allowed in user-side job description'control' configuration option is no longer supported, please use 'controldir' instead'stdout' attribute must be specified when 'join' attribute is specified(Re)Trying next destination(Re)Trying next source(empty)(null)--same and --not-same cannot be specified together.: %d: %s: Accounting records reporter tool is not specified: Failure creating accounting database connection: Failure creating slot for accounting reporter child process: Failure starting accounting reporter child process: Metrics tool returned error code %i: %s: writing accounting record took %llu ms< %s<< %s> %sA computing resource using the GridFTP interface was requested, but %sthe corresponding plugin could not be loaded. Is the plugin installed? %sIf not, please install the package 'nordugrid-arc-plugins-globus'. %sDepending on your type of installation the package name might differ.A-REX REST: Failed to resume jobA-REX REST: State change not allowed: from %s to %sAC extension information for VO AC is invalid: ACIX returned %sARC Auth. request: %sARC delegation policy: %sARC6 submission endpoint selectionAbort request caused by error in transfer functionAbort request caused by transfer errorAborted!Accept failedAccept failed: %sAccepted connection from %u.%u.%u.%u:%uAccepted connection from [%s]:%uAccepted connection on %u.%u.%u.%u:%uAccepted connection on [%s]:%uAccepting submission of new job or modification request: %sAccess list location: %sAccounting database cannot be created. Faile to create parent directory %s.Accounting database cannot be created: %s is not a directoryAccounting database connection has been establishedAccounting database file (%s) is not a regular fileAccounting database initialized succesfullyAcquired auth token for %s: %sActivation failedAdd location: metadata: %sAdd location: url: %sAdding FQAN value: %sAdding FQAN/primary value: %sAdding VOMS group value: %sAdding VOMS primary group value: %sAdding VOMS primary role value: %sAdding VOMS role value: %sAdding Virtual Organization value: %sAdding action-id value: %sAdding cert chain value: %sAdding endpoint '%s' with interface name %sAdding endpoint (%s) to ServiceEndpointRetrieverAdding endpoint (%s) to TargetInformationRetrieverAdding endpoint (%s) to both ServiceEndpointRetriever and TargetInformationRetrieverAdding location: %s - %sAdding profile-id value: %sAdding request token %sAdding resource-id value: %sAdding resource-owner value: %sAdding space token %sAdding subject-id value: %sAdding subject-issuer value: %sAdding to bulk requestAdding virtual-organization value: %sAddress: %sAll %u process slots usedAll DTRs finished for job %sAll queries failedAll requirements satisfied.All results obtained are invalidAllocated %u buffers %llu bytes each.Allow specified entity to retrieve credential without passphrase. This option is specific for the PUT command when contacting Myproxy server.Already have directory: %sAlready reading from sourceAlready writing to destinationAn error occurred during the generation of job description to be sent to %sAnother process (%s) owns the lock on file %sArc policy can not been carried by SAML2.0 profile of XACMLArcAuthZ: failed to initiate all PDPs - this instance will be non-functionalArchiving DTR %s, state %sArchiving DTR %s, state ERRORAre you sure you want to clean jobs missing information?Are you sure you want to synchronize your local job list?Assembling BLAH parser log entry: %sAssigned to authorization group %sAssigned to userlist %sAssuming - file not foundAssuming transfer is already aborted or failed.At least two values are needed for the 'inputfiles' attributeAt least two values are needed for the 'outputfiles' attributeAttempt to assign relative path to URL - making it absoluteAttempting to contact %s on port %iAttribute '%s' multiply definedAttribute 'join' cannot be specified when both 'stdout' and 'stderr' attributes is specifiedAttribute Value (1): %sAttribute Value (2): %sAttribute Value inside Subject: %sAttribute name (%s) contains invalid character (%s)Attribute name expectedAttributes 'gridtime' and 'cputime' cannot be specified togetherAttributes 'gridtime' and 'walltime' cannot be specified togetherAuthenticate in commands failedAuthentication Request URL: %sAuthentication failureAuthorized by arc.pdpAuthorized from remote pdp serviceAuthorized from simplelist.pdp: %sAuthorized from xacml.pdpBN_new || RSA_new failedBN_set_word failedBad URL in acix_endpointBad URL in deliveryservice: %sBad authentication information: %sBad checksum format %sBad credential value %s in cache access rulesBad directory name: %sBad format detected in file %s, in line %sBad format in XML response from delivery service at %s: %sBad format in XML response from service at %s: %sBad format in XML response: %sBad label: "%s"Bad logicBad logic for %s - bringOnline returned ok but SRM request is not finished successfully or on goingBad logic for %s - getTURLs returned ok but SRM request is not finished successfully or on goingBad logic for %s - putTURLs returned ok but SRM request is not finished successfully or on goingBad mount directory specifiedBad name for executable: %sBad name for runtime environment: %sBad name for stderr: %sBad name for stdout: %sBad number in definedshare %sBad number in maxdeliveryBad number in maxemergencyBad number in maxpreparedBad number in maxprocessorBad number in maxtransfertriesBad number in priority element: %sBad number in remotesizelimitBad number in speedcontrolBad or old format detected in file %s, in line %sBad path for %s: Rucio supports read/write at /objectstores and read-only at /replicasBad subcommand in configuration line: %sBad value for loglevelBadly formatted pid %s in lock file %sBatch System Information:Batch system information:Bearer token is available. It is preferred for job submission.Behaviour tuningBlock %s not found in configuration file %sBlockName is emptyBoosting priority from %i to %i due to incoming higher priority DTRBoth URLs must have the same protocol, host and portBoth of CACertificatePath and CACertificatesDir elements missing or emptyBring online request %s finished successfully, file is now ONLINEBring online request %s is still in queue, should waitBroken stringBroker %s loadedBroker plugin "%s" not found.Brokering and filteringBrokers available to %s:Buffer creation failed !Buffer registration failedBusy plugins found while unloading Module Manager. Waiting for them to be released.CA certificate and CA private key do not matchCA name: %sCA-certificates installed:CONTENT %u: %sCPU clock speed: %iCPU model: %sCPU vendor: %sCPU version: %sCREAM request generation failed: %sCache %s: Free space %f GBCache access allowed to %s by DN %sCache access allowed to %s by VO %sCache access allowed to %s by VO %s and group %sCache access allowed to %s by VO %s and role %sCache area free size: %i GBCache area total size: %i GBCache cleaning script failedCache creation date: %sCache file %s does not existCache file %s not foundCache file %s was deleted during link/copy, must start againCache file %s was locked during link/copy, must start againCache file %s was modified in the last second, sleeping 1 second to avoid race conditionCache file %s was modified while linking, must start againCache file is %sCache meta file %s is empty, will recreateCache meta file %s possibly corrupted, will recreateCache not found for file %sCached copy is still validCached file is locked - should retryCached file is outdated, will re-downloadCalculated checksum %s matches checksum reported by serverCalculated transfer checksum %s matches source checksumCalculated/supplied transfer checksum %s matches checksum reported by SRM destination %sCallback got failureCalling PrepareReading when request was already prepared!Calling PrepareWriting when request was already prepared!Calling acix with query %sCalling http://localhost:60000/Echo using ClientSOAPCalling http://localhost:60000/Echo using httplibCalling https://localhost:60000/Echo using ClientSOAPCalling plugin %s to query endpoint on %sCan not access CA certificate directory: %s. The certificates will not be verified.Can not access VOMS file/directory: %s.Can not access VOMSES file/directory: %s.Can not access certificate file: %sCan not access key file: %sCan not access proxy file: %sCan not add X509 extended KeyUsage extension to new proxy certificateCan not add X509 extension to proxy certCan not allocate memoryCan not allocate memory for extension for proxy certificateCan not compute digest of public keyCan not convert DER encoded PROXY_CERT_INFO_EXTENSION extension to internal formatCan not convert PROXY_CERT_INFO_EXTENSION struct from internal to DER encoded formatCan not convert keyUsage struct from DER encoded formatCan not convert keyUsage struct from internal to DER formatCan not convert private key to DER formatCan not convert signed EEC cert into DER formatCan not convert signed proxy cert into DER formatCan not convert signed proxy cert into PEM formatCan not convert string into ASN1_OBJECTCan not copy extended KeyUsage extensionCan not copy the subject name from issuer for proxy certificateCan not create ASN1_OCTET_STRINGCan not create BIO for parsing requestCan not create BIO for requestCan not create BIO for signed EEC certificateCan not create BIO for signed proxy certificateCan not create PROXY_CERT_INFO_EXTENSION extensionCan not create PolicyStore objectCan not create XACML ActionCan not create XACML ActionAttribute: %sCan not create XACML ResourceCan not create XACML ResourceAttribute: %sCan not create XACML SubjectAttribute: %sCan not create XACML requestCan not create a new X509_NAME_ENTRY for the proxy certificate requestCan not create delegation crendential to delegation service: %sCan not create extension for PROXY_CERT_INFOCan not create extension for keyUsageCan not create extension for proxy certificateCan not create function %sCan not create function: FunctionId does not existCan not create name entry CN for proxy certificateCan not create the SSL Context objectCan not create the SSL objectCan not determine the install location. Using %s. Please set ARC_LOCATION if this is not correct.Can not duplicate serial number for proxy certificateCan not duplicate the subject name for the self-signing proxy certificate requestCan not dynamically produce AlgFacrotyCan not dynamically produce AttributeFactoryCan not dynamically produce EvaluatorCan not dynamically produce FnFactoryCan not dynamically produce PolicyCan not dynamically produce RequestCan not find element with proper namespaceCan not find element with proper namespaceCan not find ArcPDPContextCan not find CA certificates directory in default locations: ~/.arc/certificates, ~/.globus/certificates, %s/etc/certificates, %s/etc/grid-security/certificates, %s/share/certificates, /etc/grid-security/certificates. The certificate will not be verified. If the CA certificates directory does exist, please manually specify the locations via env X509_CERT_DIR, or the cacertificatesdirectory item in client.conf Can not find XACMLPDPContextCan not find certificate file: %sCan not find certificate with name %sCan not find issuer certificate for the certificate with subject %s and hash: %luCan not find key file: %sCan not find key with name: %sCan not find voms service configuration file (vomses) in default locations: ~/.arc/vomses, ~/.voms/vomses, $ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/grid-security/vomses, $PWD/vomses, /etc/vomses, /etc/grid-security/vomsesCan not generate X509 requestCan not generate policy objectCan not get SAMLAssertion SecAttr from message contextCan not get extended KeyUsage extension from issuer certificateCan not get policy from PROXY_CERT_INFO_EXTENSION extensionCan not get policy language from PROXY_CERT_INFO_EXTENSION extensionCan not get the certificate typeCan not get the delegation credential: %s from delegation service: %sCan not get the issuer's private keyCan not load ARC evaluator object: %sCan not load ARC request object: %sCan not load policy objectCan not load policy object: %sCan not load request objectCan not open job description file: %sCan not open key file %sCan not parse classname for AttributeFactory from configurationCan not parse classname for CombiningAlgorithmFactory from configurationCan not parse classname for FunctionFactory from configurationCan not parse classname for Policy from configurationCan not parse classname for Request from configurationCan not parse date: %sCan not parse month: %sCan not parse time zone offset: %sCan not parse time: %sCan not read PEM private keyCan not read PEM private key: failed to decryptCan not read PEM private key: failed to obtain passwordCan not read PEM private key: probably bad passwordCan not read certificate file: %sCan not read certificate stringCan not read certificate/key stringCan not read information from the local job status fileCan not read key stringCan not set CN in proxy certificateCan not set issuer's subject for proxy certificateCan not set private keyCan not set pubkey for proxy certificateCan not set readable file for request BIOCan not set serial number for proxy certificateCan not set the lifetime for proxy certificateCan not set version number for proxy certificateCan not set writable file for request BIOCan not set writable file for signed EEC certificate BIOCan not set writable file for signed proxy certificate BIOCan not sign a EECCan't allocate memory for CA policy pathCan't convert DER encoded PROXYCERTINFO extension to internal formatCan't convert X509 request from internal to DER encoded formatCan't create delegation contextCan't create information handle - is the ARC LDAP DMC plugin available?Can't create information handle - is the ARC ldap DMC plugin available?Can't delete directory %s: %sCan't delete file %s: %sCan't extract object's name from source urlCan't find LCAS functions in a library %sCan't find LCMAPS functions in a library %sCan't get policy from PROXYCERTINFO extensionCan't get policy language from PROXYCERTINFO extensionCan't get the first byte of input BIO to get its formatCan't get the first byte of input to determine its formatCan't handle URL %sCan't handle location %sCan't load LCAS library %s: %sCan't load LCMAPS library %s: %sCan't load plugin %s for access point %sCan't obtain configuration. Only public information is provided.Can't obtain configuration. Public information is disabled.Can't obtain configuration. Public information is disallowed for this user.Can't open configuration fileCan't parse access rights in configuration lineCan't parse configuration lineCan't parse create arguments in configuration lineCan't parse host and/or port in response to EPSV/PASVCan't parse mkdir arguments in configuration lineCan't parse or:and in configuration lineCan't parse user:group in configuration lineCan't read configuration fileCan't read configuration file at %sCan't read from sourceCan't read list of destinations from file %sCan't read list of locations from file %sCan't read list of sources from file %sCan't read policy namesCan't read transfer states from %s. Perhaps A-REX is not running?Can't recognize group in configuration lineCan't recognize type of configuration fileCan't recognize type of configuration file at %sCan't recognize user in configuration lineCan't rename file %s: %sCan't reset the inputCan't resolve host %sCan't set OpenSSL verify flagsCan't stat file: %s: %sCan't stat stdio channel %sCan't use URL %sCan't write to destinationCancellation completeCancelling DTR %s with source: %s, destination: %sCancelling active transferCancelling job %sCancelling job: %sCancelling synchronization requestCandyPond: UnauthorizedCannot adapt job description to the submission target when information discovery is turned offCannot change owner of %s: %s Cannot change permission of %s: %s Cannot compare empty checksumCannot convert ARC module name to Python stringCannot convert ExecutionTarget (%s) to python objectCannot convert JobDescription to python objectCannot convert UserConfig to Python objectCannot convert config to Python objectCannot convert inmsg to Python objectCannot convert module name to Python stringCannot convert outmsg to Python objectCannot convert string %s to int in line %sCannot copy example configuration (%s), it is not a regular fileCannot create ExecutionTarget argumentCannot create JobDescription argumentCannot create UserConfig argumentCannot create argument of the constructorCannot create config argumentCannot create directories for log file %s. Messages will be logged to this logCannot create directory %s for per-job hard linksCannot create http payloadCannot create inmsg argumentCannot create instance of Python classCannot create outmsg argumentCannot create output of %s for any jobsCannot create output of %s for job (%s): Invalid source %sCannot create resolver from /etc/resolv.confCannot determine hostname from gethostname()Cannot determine hostname from gethostname() to generate ceID automatically.Cannot determine replica type for %sCannot determine the %s location: %sCannot find under response soap message:Cannot find ARC Config classCannot find ARC ExecutionTarget classCannot find ARC JobDescription classCannot find ARC Message classCannot find ARC UserConfig classCannot find any proxy. This application currently cannot run without a proxy. If you have the proxy file in a non-default location, please make sure the path is specified in the client configuration file. If you don't have a proxy yet, please run 'arcproxy'!Cannot find content under response soap messageCannot find custom broker classCannot find file at %s for getting the proxy. Please make sure this file exists.Cannot find information abouto job submission endpointCannot find local input file '%s' (%s)Cannot find service classCannot find the CA certificates directory path, please set environment variable X509_CERT_DIR, or cacertificatesdirectory in a configuration file.Cannot find the path of the proxy file, please setup environment X509_USER_PROXY, or proxypath in a configuration fileCannot find the user certificate path, please setup environment X509_USER_CERT, or certificatepath in a configuration fileCannot find the user private key path, please setup environment X509_USER_KEY, or keypath in a configuration fileCannot get VOMS server %s information from the vomses filesCannot get VOMS server address information from vomses line: "%s"Cannot get any AC or attributes info from VOMS server: %s; Returned message from VOMS server: %s Cannot get dictionary of ARC moduleCannot get dictionary of custom broker moduleCannot get dictionary of moduleCannot handle URL %sCannot handle local user %sCannot import ARC moduleCannot import moduleCannot initialize ARCHERY domain name for queryCannot link to a remote destination. Will not use mapped URLCannot link to source which can be modified, will copy insteadCannot open BLAH log file '%s'Cannot open cache log file %s: %s. Cache cleaning messages will be logged to this logCannot output XRSL representation: The Resources.SlotRequirement.NumberOfSlots attribute must be specified when the Resources.SlotRequirement.SlotsPerHost attribute is specified.Cannot parse integer value '%s' for -%cCannot parse password source %s it must be of source_type or source_type:data format. Supported source types are int,stdin,stream,file.Cannot parse password source expression %s it must be of type=source formatCannot parse password source type %s. Supported source types are int,stdin,stream,file.Cannot parse password type %s. Currently supported values are 'key','myproxy','myproxynew' and 'all'.Cannot parse schema!Cannot parse service endpoint TXT records.Cannot process proxy file at %s.Cannot query service endpoint TXT records from DNSCannot read specified jobid file: %sCannot remove proxy file at %sCannot remove proxy file at %s, because it's not thereCannot rename to or from root directoryCannot rename to the same URLCannot stat local input file '%s'Cannot switch to group (%s)Cannot switch to primary group for user (%s)Cannot switch to user (%s)Cannot to update AAR. Cannot find registered AAR for job %s in accounting database.Cannot use supplied --size optionCannot write job IDs to file (%s)Cannot write jobid (%s) to file (%s)Cannot write jobids to file (%s)Cant retrieve job files for job (%s) - unable to determine URL of stage out directoryCapabilities:Catting %s for job %sCause of failure unclear - choosing randomlyCert Type: %dCertificate %s already expiredCertificate %s will expire in %sCertificate and key ('%s' and '%s') not found in any of the paths: %sCertificate does not have a slotCertificate format is DERCertificate format is PEMCertificate format is PKCSCertificate format is unknownCertificate has unknown extension with numeric ID %u and SN %sCertificate information collection failedCertificate information:Certificate issuer: %sCertificate request is invalidCertificate to use is: %sCertificate verification error: %sCertificate verification failedCertificate verification succeededCertificate with serial number %s and subject "%s" is revokedCertificate with subject %s has expiredCertificate/Proxy path is emptyCertificate: %sCertiticate chain number %dChain(s) configuration failedChallenge: %sCheck: looking for metadata: %sCheck: obtained access latency: high (NEARLINE)Check: obtained access latency: low (ONLINE)Check: obtained checksum: %sCheck: obtained modification date: %sCheck: obtained modification time %sCheck: obtained size %lluCheck: obtained size: %lliChecking %sChecking URL returned by SRM: %sChecking cache againChecking cache permissions: DN: %sChecking cache permissions: VO: %sChecking cache permissions: VOMS attr: %sChecking file %sChecking for existence of %sChecking for suspended endpoints which should be started.Checking replica %sChecking source file is presentChecksum %sChecksum mismatchChecksum mismatch between calcuated checksum %s and source checksum %sChecksum mismatch between calculated checksum %s and checksum reported by server %sChecksum mismatch between calculated checksum %s and source checksum %sChecksum mismatch between calculated/supplied checksum (%s) and checksum reported by SRM destination (%s)Checksum mismatch between checksum given as meta option (%s:%s) and calculated checksum (%s)Checksum not computedChecksum type of SRM (%s) and calculated/supplied checksum (%s) differ, cannot compareChecksum type of source and calculated checksum differ, cannot compareChecksum type returned by server is different to requested type, cannot compareChild exitedChild monitoring child %d exitedChild monitoring drops abandoned child %d (%d)Child monitoring error: %iChild monitoring internal communication errorChild monitoring kick detectedChild monitoring lost child %d (%d)Child monitoring signal detectedChild monitoring stderr is closedChild monitoring stdin is closedChild monitoring stdout is closedChild was already startedClass name: %sCleaning failedCleaning job %sCleaning job: %sCleaning up after failure: deleting %sClient chain does not have entry pointClient connection has no entry pointClient side MCCs are loadedClient version: nordugrid-arc-%sClosed connectionClosed successfullyClosing channel (list)Closing channel (retrieve)Closing channel (retrieve) due to local read error: %sClosing channel (store)Closing channel (store) due to error: %sClosing connectionClosing connection to SQLite accounting databaseClosing may have failedClosing read channelClosing write channelCollected error is: %sCollecting EMI-ES GLUE2 computing info endpoint information.Collecting Job (A-REX REST jobs) information.Command ABORCommand ALLO %iCommand CDUPCommand CWD %sCommand DCAU: %i '%s'Command DELE %sCommand EPRTCommand EPSV %sCommand ERET %sCommand LIST %sCommand MDTM %sCommand MKD %sCommand MLSD %sCommand MLST %sCommand MODE %cCommand NLST %sCommand NOOPCommand OPTSCommand OPTS RETRCommand PASVCommand PBZS: %sCommand PORTCommand PROT: %sCommand QUITCommand REST %sCommand RETR %sCommand RMD %sCommand SBUF: %iCommand SIZE %sCommand SPASCommand STOR %sCommand TYPE %cCommand USER %sCommand is being sentCommand: %sComponent %s(%s) could not be createdComponent has no ID attribute definedComponent has no name attribute definedComponent's %s(%s) next has no ID attribute definedComputing endpoint %s (type %s) added to the list for submission brokeringComputing service:Computing service: %sComputingShare (%s) does not match selected queue (%s)ComputingShare (%s) explicitly rejectedComputingShareName of ExecutionTarget (%s) is not definedConfig class is not an objectConfiguration (%s) loadedConfiguration errorConfiguration example file created (%s)Configuration file can not be readConfiguration file is broken - block name does not end with ]: %sConfiguration file is broken - block name is too short: %sConfiguration file not specifiedConfiguration file not specified in ConfigBlockConfiguration file to loadConfiguration root element is not Configuration section [userlist] is missing name.Connect: Authentication timed out after %d msConnect: Connecting timed out after %d msConnect: Failed authentication: %sConnect: Failed to connect: %sConnect: Failed to init auth info handle: %sConnect: Failed to init handle: %sConnecting to Delivery service at %sConnection from %s: %sContacting VOMS server (named %s): %s on port: %sContent: %sControl connection (probably) closedConversion failed: %sConversion mode is set to CREAMConversion mode is set to DIRECTConversion mode is set to EMIConversion mode is set to SUBJECTConverting to CREAM action - namespace: %s, operation: %sCopy failed: %sCopying with dlcloseCould not acquire lock on meta file %sCould not connect to service %s: %sCould not convert incoming payload!Could not convert payload!Could not convert the slcs attribute value (%s) to an URL instance in configuration file (%s)Could not create PayloadSOAP!Could not create link to lock file %s as it already existsCould not create lock file %s as it already existsCould not create temporary file "%s"Could not create temporary file: %sCould not determine configuration type or configuration is emptyCould not determine hostname from gethostname()Could not determine session directory from filename %sCould not determine version of serverCould not find any useable delivery service, forcing local transferCould not find loadable module by name %s (%s)Could not find loadable module by names %s and %s (%s)Could not find loadable module descriptor by name %sCould not find loadable module descriptor by name %s or kind %sCould not get checksum of %s: %sCould not handle checksum %s: skip checksum checkCould not handle endpoint %sCould not load configuration (%s)Could not locate module %s in following paths:Could not make new transfer request: %s: %sCould not obtain information about source: %sCould not open LDAP connection to %sCould not open file %s for reading: %sCould not read data staging configuration from %sCould not resolve original source of %s: %sCould not resolve original source of %s: out of timeCould not set LDAP network timeout (%s)Could not set LDAP protocol version (%s)Could not set LDAP timelimit (%s)Could not stat file %s: %sCould not validate message!Couldn't handle certificate: %sCouldn't parse benchmark XML: %sCouldn't verify availability of CRLCountry: %sCreated RSA key, proceeding with requestCreating a client to Argus PDP serviceCreating a delegation soap clientCreating a http clientCreating a pdpservice clientCreating a soap clientCreating an EMI ES clientCreating and sending job clean request to %sCreating and sending job information query request to %sCreating and sending job list request to %sCreating and sending job notify request to %sCreating and sending job restart request to %sCreating and sending job resume request to %sCreating and sending job submit request to %sCreating and sending job suspend request to %sCreating and sending notify request to %sCreating and sending requestCreating and sending service information query request to %sCreating and sending service information request to %sCreating buffer: %lli x %iCreating client interfaceCreating client side chainCreating delegation credential to ARC delegation serviceCreating delegation to CREAM delegation failedCreating delegation to CREAM delegation serviceCreating delegation to CREAM delegation service failedCreating directory %sCreating directory: %sCreating service side chainCredential expires at %sCredential handling exception: %sCredential is not initializedCredentials stored in temporary file %sCritical VOMS attribute processing failedCurrent jobs in system (PREPARING to FINISHING) per-DN (%i entries)Current transfer FAILED: %sCurrent transfer completeDB_OLD_VERSION: The database cannot be opened without being first upgraded.DCAU failedDCAU failed: %sDH parameters appliedDN %s doesn't match %sDN %s is cached and is valid until %s for URL %sDN %s is cached but has expired for URL %sDN is %sDTR %s cancelledDTR %s could not be cancelledDTR %s failed: %sDTR %s finished successfullyDTR %s finished with state %sDTR %s requested cancel but no active transferDTR %s still in progress (%lluB transferred)DTR %s was already cancelledDTR Generator processed: %d jobs to cancel, %d DTRs, %d new jobsDTR Generator waiting to process: %d jobs to cancel, %d DTRs, %d new jobsDTR is ready for transfer, moving to delivery queueDTRGenerator got request to cancel null jobDTRGenerator is asked about null jobDTRGenerator is asked to check files for null jobDTRGenerator is not running!DTRGenerator is queried about null jobDTRGenerator is requested to clean links for null jobDTRGenerator is requested to process null jobDTRGenerator is requested to remove null jobDTRGenerator was sent null jobDTRs still running for job %sDaemonization fork failed: %sData channel (retrieve) %i %i %iData channel (store) %i %i %iData channel connected (list)Data channel connected (retrieve)Data channel connected (store)Data channel: %d.%d.%d.%d:%dData channel: [%s]:%dData delivery loop exitedData transfer abortedData transfer aborted: %sData was already cachedDataDelivery log tail: %sDataDelivery: %sDataMove::Transfer: no checksum calculation for %sDataMove::Transfer: using supplied checksum %sDataMove::Transfer: using supplied checksum %s:%sDataMove::Transfer: will calculate %s checksumDataMover: cycleDataMover: destination out of tries - exitDataMover: no retries requested - exitDataMover: source out of tries - exitDataMover::Transfer : starting new threadDataMover::Transfer: trying to destroy/overwrite destination: %sDataPointGFAL::write_file got position %d and offset %d, has to seekDataPointXrootd::write_file got position %d and offset %d, has to seekDataStagingDelivery exited with code %iDeactivating modulesDefault CPU time: %sDefault INTERNAL client contructorDefault Storage Service: %sDefault broker (%s) is not available. When using %s a broker should be specified explicitly (-b option).Default wall-time: %sDefault: %sDelegateCredentialsInit failedDelegateProxy failedDelegated credential from delegation service: %sDelegated credential identity: %sDelegation ID: %sDelegation authorization failedDelegation authorization passedDelegation getProxyReq request failedDelegation handler is not configuredDelegation handler with delegatee role endsDelegation handler with delegatee role starts to processDelegation handler with delegator role starts to processDelegation putProxy request failedDelegation role not supported: %sDelegation service: %sDelegation to ARC delegation service failedDelegation to gridsite delegation service failedDelegation type not supported: %sDelegationStore: PeriodicCheckConsumers failed to remove old delegation %s - %sDelegationStore: PeriodicCheckConsumers failed to resume iteratorDelegationStore: TouchConsumer failed to create file %sDelete errorDeleted but still have locations at %sDelivery received new DTR %s with source: %s, destination: %sDelivery service at %s can copy from %sDelivery service at %s can copy to %sDestination URL missingDestination URL not supported: %sDestination URL not valid: %sDestination file is in cacheDestination is invalid URLDestination is not index service, skipping replica registrationDestination is not ready, will wait %u secondsDestination: %sDestroying handleDestructor with dlclose (%s)Dir %s allowed at service %sDirectory %s removed successfullyDirectory %s to store accounting database has been created.Directory listing failedDirectory of trusted CAs is not specified/found; Using current path as the CA direcrotyDirectory size is larger than %i files, will have to call multiple timesDirectory size is too large to list in one call, will have to call multiple timesDirectory: %sDisconnect: Abort timed out after %d msDisconnect: Closing timed out after %d msDisconnect: Data close timed out after %d msDisconnect: Failed aborting - ignoring: %sDisconnect: Failed closing - ignoring: %sDisconnect: Failed destroying handle: %s. Can't handle such situation.Disconnect: Failed quitting - ignoring: %sDisconnect: Quitting timed out after %d msDisconnect: globus handle is stuck.Disconnect: handle destroyed.Disconnect: waiting for globus handle to settleDo sorting using user created python brokerDoesn't support advance reservationsDoesn't support bulk SubmissionDoesn't support preemptionDoing CREAM requestDoing EMI requestDownloading job: %sDowntime ends: %sDowntime starts: %sDumping job description aborted: Unable to load broker %sDuplicate replica found in LFC: %sEACCES Error opening lock file %s: %sECDH parameters appliedEEXIST: DB_CREATE and DB_EXCL were specified and the database exists.EINVALEMI request generation failed: %sEMIES:CancelActivity: job %s - %sEMIES:CreateActivity finished successfullyEMIES:CreateActivity: max jobs total limit reachedEMIES:CreateActivity: no job description foundEMIES:CreateActivity: request = %sEMIES:CreateActivity: response = %sEMIES:CreateActivity: too many activity descriptionsEMIES:GetActivityInfo: job %s - failed to retrieve GLUE2 informationEMIES:GetActivityStatus: job %s - %sEMIES:NotifyService: job %s - %sEMIES:PauseActivity: job %s - %sEMIES:RestartActivity: job %s - %sEMIES:ResumeActivity: job %s - %sEMIES:WipeActivity: job %s - %sENOENT: The file or directory does not exist, Or a nonexistent re_source file was specified.EPSV failedEPSV failed: %sERROR: %sERROR: Dumping job description aborted because no suitable resources were found for the test-jobERROR: Failed to retrieve informationERROR: Failed to retrieve information from the following endpoints:ERROR: Failed to write job information to file (%s)ERROR: Job submission aborted because no resource returned any informationERROR: One or multiple job descriptions was not submitted.ERROR: Test aborted because no suitable resources were found for the test-jobERROR: Unable to load broker %sERROR: VOMS configuration file %s contains too long line(s). Max supported length is %i characters.ERROR: VOMS configuration file %s contains too many lines. Max supported number is %i.ERROR: VOMS configuration line contains too many tokens. Expecting 5 or 6. Line was: %sERROR: failed to read file %s while scanning VOMS configuration.ERROR: file tree is too deep while scanning VOMS configuration. Max allowed nesting is %i.ES:CreateActivity: Failed to create new job: %sEchoService (python) 'Process' calledEchoService (python) constructor calledEchoService (python) destructor calledEchoService (python) got: %s EchoService (python) has prefix %(prefix)s and suffix %(suffix)sEchoService (python) request_namespace: %sEchoService (python) thread test startingEchoService (python) thread test, iteration %(iteration)s %(status)sElement "%s" in the profile ignored: the "inidefaultvalue" attribute cannot be specified when the "inisections" and "initag" attributes have not been specified.Element "%s" in the profile ignored: the value of the "inisections" attribute cannot be the empty string.Element "%s" in the profile ignored: the value of the "initag" attribute cannot be the empty string.Element "%s" in the profile ignored: the value of the "initype" attribute cannot be the empty string.Element validation according to GLUE2 schema failed: %sEmpty filename returned from FileCacheEmpty input payload!Empty job description source stringEmpty payload!Empty stringEncrypted: %sEnd of comment not foundEnd of double quoted string not foundEnd of single quoted string not foundEnd of user delimiter (%s) quoted string not foundEndpoint Information:Entry in EGIIS is missing one or more of the attributes 'Mds-Service-type', 'Mds-Service-hn', 'Mds-Service-port' and/or 'Mds-Service-Ldap-suffix'Error accessing cache file %s: %sError adding communication interface in %s. Maybe another instance of A-REX is already running.Error adding communication interface in %s. Maybe permissions are not suitable.Error creating cacheError creating cache. Stale locks may remain.Error creating directory %s: %sError creating lock file %s: %sError creating required directories for %sError creating required dirs: %sError creating temporary file %s: %sError detected while parsing this ACError due to expiration of provided credentialsError during file validation. Can't stat file %s: %sError during file validation: Local file size %llu does not match source file size %llu for file %sError evaluating profileError from BDB: %sError from BDB: %s: %sError from SQLite: %sError from SQLite: %s: %sError getting info from statvfs for the path %s: %sError getting list of files (in list)Error in cache processing, will retry without cachingError in caching procedureError in lock file %s, even though linking did not return an errorError initialising X509 storeError initiating delegation database in %s. Maybe permissions are not suitable. Returned error is: %s.Error linking cache file to %s.Error linking tmp file %s to lock file %s: %sError listing lock file %s: %sError loading generated configurationError looking up attributes of cache meta file %s: %sError looking up space tokens matching description %sError number in store context: %iError opening accounting databaseError opening lock file %s in initial check: %sError opening meta file %sError opening meta file for writing %sError parsing the internally set executables attribute.Error pinging delivery service at %s: %s: %sError reading info from file %s:%sError reading lock file %s: %sError reading meta file %s: %sError registering replica, moving to end of data stagingError removing cache file %s: %sError switching uidError to flush output payloadError when extracting public key from requestError when loading the extension config file: %sError when loading the extension config file: %s on line: %dError while reading dir %s: %sError while reading fileError with cache configurationError with cache configuration: %sError with formatting in lock file %sError with hearbeatfile: %sError with post-transfer destination handling: %sError with source file, moving to next replicaError writing raw certificateError writing srm info file %sError writing to lock file %s: %sError: Service returned a limit higher or equal to current limit (current: %d; returned: %d)Error: can't open policy file: %sError: failed to set handler for SIGCHLDError: failed to set handler for SIGTERMError: no LDAP query started to %sError: policy location: %s is not a regular fileErrorDescriptionEstimated average waiting time: %sEstimated worst waiting time: %sEvaluator does not support loadable Combining AlgorithmsEvaluator does not support specified Combining Algorithm - %sEvaluator for ArcPDP was not loadedEvaluator for GACLPDP was not loadedEvaluator for XACMLPDP was not loadedExample configuration (%s) not created.Excepton while trying to start external process: %sExcessive data received while checking file accessExcluding replica %s matching pattern !%sExecution Target on Computing Service: %sExecution environment does not support inbound connectionsExecution environment does not support outbound connectionsExecution environment is a physical machineExecution environment is a virtual machineExecution environment supports inbound connectionsExecution environment supports outbound connectionsExecutionTarget class is not an objectExitingExiting Generator threadExiting jobs processing threadExpecting Command among argumentsExpecting Command and URL providedExpecting Command module name among argumentsExpecting Command module path among argumentsExpecting Module, Command and URL providedExpecting URL among argumentsExternal request for attention %sExtracted nickname %s from credentials to use for RUCIO_ACCOUNTExtractor[%s] (%s): %s = %sExtractor[%s] (%s): %s contains %sFATAL, ERROR, WARNING, INFO, VERBOSE or DEBUGFTP Job Control: Can't parse host and/or port in response to EPSV/PASV: %sFTP Job Control: Data channel: %d.%d.%d.%d:%dFTP Job Control: Data channel: [%s]:%dFTP Job Control: Data connect write failed: %sFTP Job Control: Data connect write timed out after %d msFTP Job Control: Data write failed: %sFTP Job Control: Data write timed out after %d msFTP Job Control: Failed sending DCAU commandFTP Job Control: Failed sending EPSV and PASV commandsFTP Job Control: Failed sending STOR command: %sFTP Job Control: Failed sending TYPE commandFTP Job Control: Failed to apply local address to data connection: %sFTP Job Control: Local port failed: %sFTP Job Control: Local type failed: %sFTP Job Control: Server EPSV response parsing failed: %sFTP Job Control: Server EPSV response port parsing failed: %sFTP Job Control: Server PASV response parsing failed: %sFaile to assign hostname extensionFailed allocating memory for handleFailed authenticatingFailed authenticating: %sFailed checking database (%s)Failed checking source replicaFailed checking source replica %s: %sFailed checking source replica: %sFailed cleaning up destination %sFailed configuration initializationFailed configuration initialization.Failed connecting to server %s:%dFailed destroying handle: %s. Can't handle such situation.Failed downloading %s to %sFailed downloading %s to %s, destination already existFailed downloading %s to %s, unable to remove existing destinationFailed in globus_cond_initFailed in globus_ftp_control_handle_initFailed in globus_mutex_initFailed linking cache file to %sFailed locating credentialsFailed looking up attributes of cached file: %sFailed preparing job descriptionFailed preparing job description to target resourcesFailed processing A-REX configurationFailed processing authorization group %sFailed processing user mapping command: %s %sFailed reading configurationFailed reading control directory: %sFailed reading control directory: %s: %sFailed reading dataFailed reading list of filesFailed reading local informationFailed retrieving information for job: %sFailed retrieving job description for job: %sFailed running mailerFailed sending CWD command for credentials renewalFailed sending CWD command for job cancellingFailed sending CWD command for job cleaningFailed sending DELE command for job cancellingFailed sending RMD command for job cleaningFailed setting file owner: %sFailed submitting job descriptionFailed to abort data connection - ignoring and recoveringFailed to abort transfer of ftp file: %sFailed to accept SSL connectionFailed to accept connection requestFailed to accept delegationFailed to accept new file/destinationFailed to access proxy of given job id %s at %sFailed to acquire A-REX's configurationFailed to acquire delegation contextFailed to acquire lock on cache meta file %sFailed to acquire lock on file %sFailed to acquire source: %sFailed to activate Jobs Processing object, exiting Grid Manager threadFailed to add '%s' URL (interface type %s) into the accounting database Endpoints tableFailed to add '%s' into the accounting database %s tableFailed to add Independent OIDFailed to add RFC proxy OIDFailed to add VOMS AC extension. Your proxy may be incomplete.Failed to add VOMS AC sequence OIDFailed to add anyLanguage OIDFailed to add certificate and keyFailed to add certificate to token or databaseFailed to add extension into credential extensionsFailed to add inheritAll OIDFailed to add issuer's extension into proxyFailed to add key usage extensionFailed to add proxy certificate information extensionFailed to add voms AC extensionFailed to allocate certificate trustFailed to allocate item for certificate dataFailed to allocate memory for bufferFailed to allocate memory for certificate subject while matching policy.Failed to allocate p12 contextFailed to apply DH parametersFailed to apply ECDH parametersFailed to apply local address to data connectionFailed to authenticate SAML Token inside the incoming SOAPFailed to authenticate Username Token inside the incoming SOAPFailed to authenticate X509 Token inside the incoming SOAPFailed to authenticate to PKCS11 slot %sFailed to authenticate to key databaseFailed to authenticate to token %sFailed to bind socket for %s:%s(%s): %sFailed to bind socket for TCP port %s(%s): %sFailed to bind socket(%s): %sFailed to bind to ldap server (%s)Failed to call PORT_NewArenaFailed to cancel transfer request: %sFailed to cancel: %sFailed to cancel: No SOAP responseFailed to cast PayloadSOAP from incoming payloadFailed to cast PayloadSOAP from outgoing payloadFailed to change mapping stack processing policy in: %s = %sFailed to change owner of symbolic link %s to %iFailed to change owner of temp proxy at %s to %i:%i: %sFailed to change permissions on %s: %sFailed to change permissions or set owner of hard link %s: %sFailed to check %sFailed to clean up file %s: %sFailed to close, deleting clientFailed to communicate to delegation endpoint.Failed to complete writing to destinationFailed to connect for credential renewalFailed to connect for job cancellingFailed to connect for job cleaningFailed to connect to %s(%s):%iFailed to connect to %s(%s):%i - %sFailed to connect to server %s:%dFailed to contact PDP server: %sFailed to convert ASCII to DERFailed to convert EVP_PKEY to PKCS8Failed to convert GSI credential to GSS credential (major: %d, minor: %d)Failed to convert GSI credential to GSS credential (major: %d, minor: %d):%s:%sFailed to convert PrivateKeyInfo to EVP_PKEYFailed to convert security information to ARC policyFailed to convert security information to ARC requestFailed to convert security information to XACML requestFailed to copy %s: %sFailed to copy file %s to %s: %sFailed to copy input file: %s to path: %sFailed to create DTR dump threadFailed to create OTokens security attributesFailed to create OpenSSL object %s %s - %u %sFailed to create SOAP containersFailed to create X509 certificate with NSSFailed to create any cache directories for %sFailed to create cache directory for file %s: %sFailed to create cache meta file %sFailed to create certificate requestFailed to create control directory %sFailed to create directoryFailed to create directory %sFailed to create directory %s! Skipping job.Failed to create directory %s: %sFailed to create export contextFailed to create file %s: %sFailed to create file in %sFailed to create hard link from %s to %s: %sFailed to create input SOAP containerFailed to create key or certificate safeFailed to create ldap bind thread (%s)Failed to create link: %s. Will not use mapped URLFailed to create path lengthFailed to create policy languageFailed to create session directory %sFailed to create socket for connecting to %s(%s):%d - %sFailed to create socket for listening at %s:%s(%s): %sFailed to create socket for listening at TCP port %s(%s): %sFailed to create socket(%s): %sFailed to create subject nameFailed to create symbolic link from %s to %s: %sFailed to create temp proxy at %s: %sFailed to create threadFailed to create xrootd copy job: %sFailed to create/open file %s: %sFailed to decode trust stringFailed to delegate credentials to server - %sFailed to delegate credentials to server - no delegation interface foundFailed to delete %sFailed to delete %s but will still try to copyFailed to delete certificateFailed to delete delivery object or deletion timed outFailed to delete destination, retry may failFailed to delete logical fileFailed to delete meta-informationFailed to delete physical fileFailed to delete private keyFailed to delete private key and certificateFailed to delete replica %s: %sFailed to delete stale cache file %s: %sFailed to disconnect after credentials renewalFailed to disconnect after job cancellingFailed to disconnect after job cleaningFailed to duplicate X509 structureFailed to duplicate extensionFailed to enable IPv6Failed to enable IPv6: %sFailed to encode PKCS12Failed to encode certificateFailed to encode the certificate request with DER formatFailed to establish SSL connectionFailed to establish connection: %sFailed to export X509 certificate from NSS DBFailed to export private keyFailed to extract VOMS nickname from proxyFailed to extract credential informationFailed to fetch data from %s accounting database tableFailed to fetch data from accounting database Endpoints tableFailed to finalize reading from sourceFailed to finalize writing to destinationFailed to find CA certificatesFailed to find certificate and/or private key or files have improper permissions or ownership.Failed to find certificates by nickname: %sFailed to find extensionFailed to find issuer certificate for proxy certificateFailed to find metadata info on %s for determining file or directory deleteFailed to generate EC keyFailed to generate SAML Token for outgoing SOAPFailed to generate Username Token for outgoing SOAPFailed to generate X509 Token for outgoing SOAPFailed to generate X509 request with NSSFailed to generate public/private key pairFailed to get DN information from .local file for job %sFailed to get TCP socket options for connection to %s(%s):%d - timeout won't work - %sFailed to get certificate from certificate fileFailed to get credentialFailed to get ftp fileFailed to get initiate GFAL2 parameter handle: %sFailed to get initiate new GFAL2 context: %sFailed to get load average: %sFailed to get private keyFailed to get public keyFailed to get public key from RSA objectFailed to get public key from X509 objectFailed to identify grid-manager config fileFailed to import X509 certificate into NSS DBFailed to import certificate from file: %sFailed to import private keyFailed to import private key from file: %sFailed to initialize LCASFailed to initialize LCMAPSFailed to initialize OpenSSL libraryFailed to initialize PKCS12 file: %sFailed to initialize X509 structureFailed to initialize accounting databaseFailed to initialize extensions member for CredentialFailed to initialize main Python threadFailed to initialize the credential configurationFailed to initiate cacheFailed to initiate client connectionFailed to initiate delegation credentialsFailed to insert AAR into the database for job %sFailed to limit socket to IPv6 at %s:%s - may cause errors for IPv4 at same portFailed to limit socket to IPv6 at TCP port %s - may cause errors for IPv4 at same portFailed to limit socket to IPv6: %sFailed to listen at %s:%s(%s): %sFailed to listen at TCP port %s(%s): %sFailed to listen on socket(%s): %sFailed to load client configurationFailed to load extension section: %sFailed to load grid-manager config fileFailed to load grid-manager config file from %sFailed to load grid-manager configfileFailed to load plugin for URL %sFailed to load policy evaluator for policy of job %sFailed to load private keyFailed to load service configurationFailed to load service configuration from any default config fileFailed to load service configuration from file %sFailed to load service side MCCsFailed to lock arccredential library in memoryFailed to lock arccrypto library in memoryFailed to lock delegated credentials: %sFailed to make symbolic link %s to %s : %sFailed to move %s to %s: %sFailed to move file %s to %sFailed to new arenaFailed to notify serviceFailed to obtain OpenSSL identifier for %sFailed to obtain bytes transferred: %sFailed to obtain delegation locks for cleaning orphaned locksFailed to obtain information about fileFailed to obtain listing from FTP: %sFailed to obtain local address for %s:%s - %sFailed to obtain local address for port %s - %sFailed to obtain local address: %sFailed to obtain lock on cache file %sFailed to obtain own address: %sFailed to obtain resource description: %sFailed to obtain stat from FTP: %sFailed to obtain state of jobFailed to obtain valid stagein URL for input filesFailed to open %s for reading: %sFailed to open %s, trying to create parent directoriesFailed to open data channelFailed to open directory %s: %sFailed to open file %sFailed to open file with DH parameters for readingFailed to open heartbeat file %sFailed to open input certificate file %sFailed to open log file %sFailed to open log file: %sFailed to open output file '%s'Failed to open p12 fileFailed to open stdio channel %dFailed to open stdio channel %sFailed to output the certificate request as ASCII formatFailed to output the certificate request as DER formatFailed to parse ACIX response: %sFailed to parse HTTP headerFailed to parse Rucio response: %sFailed to parse SAML Token from incoming SOAPFailed to parse Username Token from incoming SOAPFailed to parse VOMS command: %sFailed to parse X509 Token from incoming SOAPFailed to parse certificate request from CSR file %sFailed to parse command line optionsFailed to parse configuration file %sFailed to parse remote address %sFailed to parse requested VOMS lifetime: %sFailed to parse requested VOMS server port number: %sFailed to parse user policy for job %sFailed to postregister destination %sFailed to pre-clean destination: %sFailed to preallocate space for %sFailed to prepare destinationFailed to prepare destination: %sFailed to prepare job descriptionFailed to prepare job description to target resources.Failed to prepare job description.Failed to prepare sourceFailed to prepare source: %sFailed to preregister destination: %sFailed to process A-REX configuration in %sFailed to process VOMS configuration or no suitable configuration lines found.Failed to process configuration in %sFailed to process job: %sFailed to process job: %s - %s %sFailed to process jobs - failed to parse responseFailed to process jobs - wrong response: %uFailed to process security attributes in TLS MCC for incoming messageFailed to query AAR database ID for job %sFailed to query ACIX: %sFailed to query state: %sFailed to read attribute %x from private key.Failed to read cache meta file %sFailed to read certificate file: %sFailed to read data from input fileFailed to read database schema file at %sFailed to read file %sFailed to read file with DH parametersFailed to read input certificate fileFailed to read job's ACL for job %s from %sFailed to read job's local description for job %s from %sFailed to read object %s: %sFailed to read private key file: %sFailed to read proxy file: %sFailed to read request from a fileFailed to read request from a stringFailed to recognize own address type (IPv4 or IPv6) - %uFailed to register any bufferFailed to register destination replica: %sFailed to register new file/destination: %sFailed to register plugin for state %sFailed to release GSS credential (major: %d, minor: %d):%s:%sFailed to release completed requestFailed to release lock on cache file %sFailed to release lock on file %sFailed to remove .meta file %s: %sFailed to remove all physical instancesFailed to remove cache per-job dir %s: %sFailed to remove existing hard link at %s: %sFailed to remove existing symbolic link at %s: %sFailed to remove file %s: %sFailed to remove instanceFailed to remove lock on %s. Some manual intervention may be requiredFailed to remove stale lock file %s: %sFailed to remove temporary proxy %s: %sFailed to rename URLFailed to renew proxyFailed to resolve %sFailed to resolve %s (%s)Failed to resolve destination: %sFailed to resolve source: %sFailed to retrieve application data from OpenSSLFailed to retrieve link to TLS stream. Additional policy matching is skipped.Failed to retrieve private key for issuerFailed to run Grid Manager threadFailed to run command: %sFailed to run configuration parser at %s.Failed to run external pluginFailed to run external plugin: %sFailed to send cancel request: %sFailed to send content of bufferFailed to set GFAL2 monitor callback: %sFailed to set GFAL2 transfer timeout, will use default: %sFailed to set INTERNAL endpointFailed to set LFC replicas: %sFailed to set credentials for GridFTP transferFailed to set executable bit on file %sFailed to set executable bit on file %s: %sFailed to set overwrite option in GFAL2: %sFailed to set permissions on: %sFailed to set signature algorithm IDFailed to set the pubkey for X509 object by using pubkey from X509_REQFailed to set up credential delegation with %sFailed to shut down SSL: %sFailed to sign encoded certificate dataFailed to sign proxyFailed to sign the certificate requestFailed to sign the proxy certificateFailed to stage file(s)Failed to start archival threadFailed to start cache clean scriptFailed to start certificate extensionFailed to start data staging threadsFailed to start listening on any address for %s:%sFailed to start listening on any address for %s:%s(IPv%s)Failed to start new DTR for %sFailed to start new thread for monitoring job requestsFailed to start new thread: cache won't be cleanedFailed to start querying the endpoint on %sFailed to start querying the endpoint on %s (unable to create sub-thread)Failed to start reading from source: %sFailed to start thread for communicationFailed to start thread for listeningFailed to start timer thread - timeout won't workFailed to start transfer request: %sFailed to start writing to cacheFailed to start writing to destination: %sFailed to stat session dir %sFailed to stat source %sFailed to store application dataFailed to store ftp fileFailed to submit all jobs.Failed to submit all jobs: %sFailed to submit all jobs: %s %sFailed to submit all jobs: %u %sFailed to submit jobFailed to submit job description: %sFailed to submit job description: EMIESFault(%s , %s)Failed to submit job description: UnexpectedError(%s)Failed to switch user id to %d/%dFailed to terminate LCASFailed to terminate LCMAPSFailed to transfer dataFailed to unlock file %s: %s. Manual intervention may be requiredFailed to unlock file with lock %s: %sFailed to unregister pre-registered destination %s. You may need to unregister it manuallyFailed to unregister pre-registered destination %s: %s. You may need to unregister it manuallyFailed to unregister preregistered lfn, You may need to unregister it manuallyFailed to unregister preregistered lfn. You may need to unregister it manuallyFailed to unregister preregistered lfn. You may need to unregister it manually: %sFailed to update AAR in the database for job %sFailed to verify X509 Token inside the incoming SOAPFailed to verify the requestFailed to verify the signature under Failed to verify the signature under Failed to verify the signed certificateFailed to wait for job to allow stage inFailed to write 'local' informationFailed to write RTEs information for the job %sFailed to write authtoken attributes for job %sFailed to write body to output streamFailed to write data transfers information for the job %sFailed to write event records for job %sFailed to write header to output streamFailed to write job information to database (%s)Failed to write object %s: %sFailed to write request into a fileFailed to write request into stringFailed to write signed EEC certificate into a fileFailed to write signed proxy certificate into a fileFailed to write to local job list %sFailed updating timestamp on cache lock file %s for file %s: %sFailed uploading file %s to %s: %sFailed uploading local input filesFailed uploading local input files to %sFailed while finishing reading from sourceFailed while finishing writing to destinationFailed while reading from sourceFailed while transferring dataFailed while waiting for connection requestFailed while waiting for connection to %s(%s):%i - %sFailed while writing to destinationFailed writing ACLFailed writing job descriptionFailed writing local descriptionFailed writing statusFailure in parsing response from server - some information may be inaccurateFailure: %sFeature is not implementedFetch: response body: %sFetch: response code: %u %sFetching resource description from %sFile %s is NEARLINE, will make request to bring onlineFile %s is already cached at %s under a different URL: %s - this file will not be cachedFile %s is already cached at %s under a different URL: %s - will not add DN to cached listFile %s is cached (%s) - checking permissionsFile %s removed successfullyFile '%s' in the 'executables' attribute is not present in the 'inputfiles' attributeFile already exists: %sFile could not be moved to Done stateFile could not be moved to Running state: %sFile delete failed, attempting directory deleteFile delete failed, attempting directory delete for %sFile download failed: %sFile is cacheable, will check cacheFile is currently being cached, will wait %isFile is not accessible %s: %sFile is not accessible: %sFile is not cacheable, skipping cache processingFile is not cacheable, was requested not to be cached or no cache available, skipping cache checkFile is ready! TURL is %sFile is smaller than %llu bytes, will use local deliveryFile type is not available, attempting file deleteFileNode: operator= (%s <- %s) %lu <- %luFilePlugin: more unload than loadFilename not returned in Rucio response: %sFiles associated with request token %s aborted successfullyFiles associated with request token %s put done successfullyFiles associated with request token %s released successfullyFileset copy to single object is not supported yetFileset registration is not supported yetFinding existing destination replicasFinishWriting: looking for metadata: %sFinishWriting: obtained checksum: %sFinished successfullyFirst stage of registration to index service failedFirst value of 'inputfiles' attribute (filename) cannot be emptyFirst value of 'outputfiles' attribute (filename) cannot be emptyFor registration source must be ordinary URL and destination must be indexing serviceFor the 1st test job you also have to specify a runtime value with -r (--runtime) option.Force-checking source of cache file %sForcing re-download of file %sFork failed: %sFound %s %s (it was loaded already)Found %s in cacheFound DTR %s for file %s left in transferring state from previous runFound VOMS AC attribute: %sFound a registry, will query it recursively: %sFound existing token for %s in Rucio token cache with expiry time %sFound none or multiple URLs (%s) in ACIX URL: %sFound service endpoint %s (type %s)Found started or successful endpoint (%s)Found suspended endpoint (%s)Found the following jobs:Found the following new jobs:Found unexpected empty lock file %s. Must go back to acquire()Found unfinished DTR transfers. It is possible the previous A-REX process did not shut down normallyFree slots grouped according to time limits (limit: free slots):Free slots: %iFull string not used: %sFunction : %sGACL Auth. request: %sGET: id %s path %sGenerate new X509 request!Generated EMIES target: %sGenerating %s job description outputGenerating EMIES targetsGenerating ceID prefix from hostname automaticallyGenerator startedGeneric errorGet delegated credential from delegation service: %sGet from cache: Cached file is lockedGet from cache: Error in cache configurationGet from cache: File not in cacheGet from cache: Invalid URL %sGet from cache: Looking in cache for %sGet from cache: could not access cached file: %sGet request %s is still in queue, should wait %i secondsGet: there is no job %s - %sGetting currect timestamp for BLAH parser log: %sGetting delegation credential from ARC delegation serviceGlobus connection errorGlobus error: %sGlobus handle is stuckGlobus location variable substitution is not supported anymore. Please specify path directly.Grid identity is mapped to local identity '%s'HEAD: id %s path %sHER: %sHTTP Error: %d %sHTTP failure %u - %sHTTP with SAML2SSO invocation failedHTTP:PUT %s: put file %s: %sHandle is not in proper state %u/%uHave %i requests to processHave connections: %i, max: %iHead: there is no job %s - %sHealth state info: %sHealth state: %sHealthState of ExecutionTarget (%s) is not OK (%s)Help Options:Helper process start failed: %sHelper program is missingHomogeneous resourceID contains forbidden charactersID: %sINI config file %s does not existINTERNALClient is not initializedId= %s,Type= %s,Issuer= %s,Value= %sIdP return some error message: %sIdentity is %sIdentity name: %sIdentity: %sIf the proxy or certificate/key does exist, you can manually specify the locations via environment variables '%s'/'%s' or '%s', or the '%s'/'%s' or '%s' attributes in the client configuration file (e.g. '%s')If you specify a policy you also need to specify a policy languageIgnoring endpoint (%s), it is already registered in retriever.Ignoring job (%s), already tried and were unable to load JobControllerPluginIgnoring job (%s), the job management URL is unknownIgnoring job (%s), the job status URL is unknownIgnoring job (%s), the management interface name is unknownIgnoring job (%s), the status interface name is unknownIgnoring job (%s), unable to load JobControllerPlugin for %sIgnoring job, the job ID is emptyIllegal URL - closing ] for IPv6 address is followed by illegal token: %sIllegal URL - no closing ] for IPv6 address found: %sIllegal URL - no hostname given: %sIllegal URL - path must be absolute or empty: %sIllegal URL - path must be absolute: %sIllegal jobID specified (%s)Illegal time format: %sImmediate completion expectedImmediate completion expected: %sImmediate completion: %sImplementation name: %sImplementor: %sImproper debug level '%s'In the available CRL the lastUpdate field is not validIn the available CRL, the nextUpdate field is not validIn the configuration profile the 'initype' attribute on the "%s" element has a invalid value "%s".Incoming Message is not SOAPIncompatible options --nolist and --forcelist requestedInconsistent metadataIndependent proxy - no rights grantedInformation endpointInformation item '%s' is not knownInformational document is emptyInit failedInitialized %u-th Python serviceInitiating delegation procedureInput is not SOAPInput is without trailer Input request from a file: Request.xmlInput request from codeInput: metadata: %sInstalled application environments:Interface (%s) specified, submitting only to that interfaceInterface extensions:Interface on endpoint (%s) %s.Interface versions:Interface: %sInterfaceNameInternal transfer method is not supported for %sInvalid DTRInvalid DTR for source %s, destination %sInvalid EffectInvalid HTTP object can't produce resultInvalid ID: %sInvalid ISO duration format: %sInvalid JobDescription:Invalid JobDescription: %sInvalid URL '%s' for input file '%s'Invalid URL '%s' for output file '%s'Invalid URL option syntax in option '%s' for input file '%s'Invalid URL option syntax in option '%s' for output file '%s'Invalid URL option: %sInvalid URL: %sInvalid URL: '%s' in input file '%s'Invalid URL: '%s' in output file '%s'Invalid action value %sInvalid class nameInvalid class name. The broker argument for the PythonBroker should be Filename.Class.args (args is optional), for example SampleBroker.MyBrokerInvalid comparison operator '%s' used at 'delegationid' attribute, only "=" is allowed.Invalid comparison operator '%s' used at 'queue' attribute in 'GRIDMANAGER' dialect, only "=" is allowedInvalid comparison operator '%s' used at 'queue' attribute, only "!=" or "=" are allowed.Invalid configuration - no allowed IP address specifiedInvalid configuration - no transfer dirs specifiedInvalid credentials, please check proxy and/or CA certificatesInvalid destination URL %sInvalid download destination path specified (%s)Invalid job descriptionInvalid lock on file %sInvalid log level. Using default %s.Invalid nodeaccess value: %sInvalid old log level. Using default %s.Invalid period string: %sInvalid port number in %sInvalid stage out path specified (%s)Invalid url: %sIssuer CA: %sIssuer: %sIt is impossible to mix ARC6 target selection options with legacy options. All legacy options will be ignored!It is not possible to resubmit jobs without new target information discoveryJWSE::ExtractPublicKey: external jwk keyJWSE::ExtractPublicKey: fetching jwl key from %sJWSE::ExtractPublicKey: jwk keyJWSE::ExtractPublicKey: key parsing errorJWSE::ExtractPublicKey: no supported keyJWSE::ExtractPublicKey: x5c keyJWSE::Input: JWE: not supported yetJWSE::Input: JWS content: %sJWSE::Input: JWS: signature algorithm: %sJWSE::Input: JWS: signature verification failedJWSE::Input: JWS: token too oldJWSE::Input: JWS: token too youngJWSE::Input: header: %sJWSE::Input: token: %sJob %s does not report a resumable stateJob %s failed to renew delegation %s - %s.Job %s failed to renew delegation %s.Job %s has no delegation associated. Can't renew such job.Job %s not foundJob %s: Some downloads failedJob %s: all files downloaded successfullyJob %s: files still downloadingJob ID argument is required.Job cancelling successfulJob cleaning successfulJob database connection established successfully (%s)Job database created successfully (%s)Job deleted: %sJob description file could not be read.Job description language is not specified, unable to output description.Job description languages supported by %s:Job description to be sent to %s:Job description to be sent: %sJob description: %sJob descriptions:Job did not finished successfully. Message will not be written to BLAH log.Job download directory from user configuration file: %sJob download directory will be created in present working directory.Job download directory: %sJob failed on service sideJob has not started yet: %sJob information not found in the information system: %sJob list file (%s) doesn't existJob list file (%s) is not a regular fileJob list file cannot be created: %s is not a directoryJob list file cannot be created: The parent directory (%s) doesn't exist.Job migration aborted, no resource returned any informationJob migration aborted, unable to load broker (%s)Job migration failed for job (%s), no applicable targetsJob nr.Job plugin was not initialisedJob resubmission aborted because no resource returned any informationJob resubmission failed: Unable to load broker (%s)Job resubmission summary:Job resuming successfulJob submission summary:Job submission user can't be rootJob submission user: %s (%i:%i)Job submitted with jobid: %sJob timestamp successfully parsed as %sJob: %sJob: %s : Cancel request put and communicated to serviceJob: %s : Cancel request put but failed to communicate to serviceJob: %s : Clean request put and communicated to serviceJob: %s : Clean request put but failed to communicate to serviceJob: %s : ERROR : Failed to put cancel markJob: %s : ERROR : Failed to put clean markJob: %s : ERROR : No local information.Job: %s : ERROR : Unrecognizable stateJobControllerPlugin %s could not be createdJobControllerPlugin plugin "%s" not found.JobDescription class is not an objectJobDescriptionParserPlugin %s could not be createdJobDescriptionParserPlugin plugin "%s" not found.Jobs left to query: %dJobs missing information will not be cleaned!Jobs processed: %d, deleted: %dJobs processed: %d, renewed: %dJobs processed: %d, resumed: %dJobs processed: %d, successfully killed: %dJobs processed: %d, successfully killed: %d, successfully cleaned: %dJobs processed: %d, successfully retrieved: %dJobs processed: %d, successfully retrieved: %d, successfully cleaned: %dJunk at end of RSLJunk in sessiondir commandKill failedKilled with signal: Killing connection due to timeoutLCMAPS did not return any GIDLCMAPS did not return any UIDLCMAPS has getCredentialDataLCMAPS has lcmaps_runLCMAPS returned UID which has no username: %uLCMAPS returned invalid GID: %uLCMAPS returned invalid UID: %uLDAP connection already open to %sLDAP query timed out: %sLDAPQuery: Getting results from %sLDAPQuery: Initializing connection to %s:%dLDAPQuery: Querying %sLIST/MLST failedLIST/MLST failed: %sLanguage (%s) not recognized by any job description parsers.Last stage of registration to index service failedLatitude: %fLdap bind timeout (%s)Left operand for RSL concatenation does not evaluate to a literalLegacy options set for defining targetsLegacyMap: no configurations blocks definedLegacyPDP: ARC Legacy Sec Attribute not recognized.LegacyPDP: there is no %s Sec Attribute defined. Probably ARC Legacy Sec Handler is not configured or failed.LegacySecHandler: configuration file not specifiedLibrary : %sLine %d.%d of the attributes returned: %sLinking MCC %s(%s) to MCC (%s) under %sLinking MCC %s(%s) to Plexer (%s) under %sLinking MCC %s(%s) to Service (%s) under %sLinking Plexer %s to MCC (%s) under %sLinking Plexer %s to Plexer (%s) under %sLinking Plexer %s to Service (%s) under %sLinking local fileLinking mapped fileLinking/copying cached fileLinking/copying cached file to %sList failedList functionality is not supported for RESTful VOMS interfaceList functionality is not supported for legacy VOMS interfaceList will stat the URL %sListFiles: looking for metadata: %sListen failedListen finishedListen startedListening on %s:%s(%s)Listening on TCP port %s(%s)Listing jobs succeeded, %d jobs foundListing localjobs succeeded, %d localjobs foundLoadable module %s contains no requested plugin %s of kind %sLoaded %sLoaded %s %sLoaded JobControllerPlugin %sLoaded JobDescriptionParserPlugin %sLoaded MCC %s(%s)Loaded Plexer %sLoaded Service %s(%s)Loaded SubmitterPlugin %sLoading %u-th Python serviceLoading OToken failed - ignoring its presenceLoading Python broker (%i)Loading configuration (%s)Local group %s does not existLocal running jobs: %iLocal suspended jobs: %iLocal user %s does not existLocal waiting jobs: %iLocation URI for file %s is invalidLocation already existsLocations are missing in destination LFC URLLock %s is owned by a different host (%s)Lock file %s doesn't existLongitude: %fLooking for current jobsLooking up URL %sLooking up source replicasMCC %s(%s) - next %s(%s) has no targetMIME is not suitable for SOAP: %sMLSD is not supported - trying NLSTMLST is not supported - trying LISTMain Python thread is not initializedMain Python thread was not initializedMain memory size: %iMalformed ARCHERY record found (endpoint type is not defined): %sMalformed ARCHERY record found (endpoint url is not defined): %sMalformed VOMS AC attribute %sMapfile at %s can't be opened.Mapped to local group id: %iMapped to local group name: %sMapped to local id: %iMapped to running user: %sMapped user's home: %sMapped user:group (%s:%s) not foundMapping %s to %sMapping policy option has empty valueMapping policy:Mapping queue: %sMatch issuer: %sMatch vo: %sMatched nothingMatched: %s %s %sMatched: %s %s %s %sMatchmaking, %s (%d) is %s than %s (%d) published by the ExecutionTarget.Matchmaking, Benchmark %s is not published by the ExecutionTarget.Matchmaking, CacheTotal problem, ExecutionTarget: %d MB (CacheTotal); JobDescription: %d MB (CacheDiskSpace)Matchmaking, Computing endpoint requirement not satisfied. ExecutionTarget: %sMatchmaking, ConnectivityIn problem, ExecutionTarget: %s (ConnectivityIn) JobDescription: %s (InBound)Matchmaking, ConnectivityOut problem, ExecutionTarget: %s (ConnectivityOut) JobDescription: %s (OutBound)Matchmaking, ExecutionTarget: %s, OperatingSystem is not definedMatchmaking, ExecutionTarget: %s, CacheTotal is not definedMatchmaking, ExecutionTarget: %s, HealthState is not definedMatchmaking, ExecutionTarget: %s, ImplementationName is not definedMatchmaking, ExecutionTarget: %s, MaxDiskSpace and WorkingAreaFree are not definedMatchmaking, ExecutionTarget: %s, MaxTotalCPUTime or MaxCPUTime not defined, assuming no CPU time limitMatchmaking, ExecutionTarget: %s, MinCPUTime not defined, assuming no CPU time limitMatchmaking, ExecutionTarget: %s, NetworkInfo is not definedMatchmaking, ExecutionTarget: %s, Platform is not definedMatchmaking, ExecutionTarget: %s, RunTimeEnvironment requirements not satisfiedMatchmaking, ExecutionTarget: %s, TotalSlots and MaxSlotsPerJob are not definedMatchmaking, ExecutionTarget: %s, WorkingAreaLifeTime is not definedMatchmaking, ExecutionTarget: %s matches job descriptionMatchmaking, ExecutionTarget: %s, ApplicationEnvironments not definedMatchmaking, ExecutionTarget: %s, MaxMainMemory and MainMemorySize are not definedMatchmaking, ExecutionTarget: %s, MaxVirtualMemory is not definedMatchmaking, ExecutionTarget: %s, OperatingSystem requirements not satisfiedMatchmaking, MainMemorySize problem, ExecutionTarget: %d (MainMemorySize), JobDescription: %d (IndividualPhysicalMemory)Matchmaking, MaxCPUTime problem, ExecutionTarget: %d (MaxCPUTime), JobDescription: %d (TotalCPUTime/NumberOfSlots)Matchmaking, MaxDiskSpace problem, ExecutionTarget: %d MB (MaxDiskSpace); JobDescription: %d MB (DiskSpace)Matchmaking, MaxDiskSpace problem, ExecutionTarget: %d MB (MaxDiskSpace); JobDescription: %d MB (SessionDiskSpace)Matchmaking, MaxMainMemory problem, ExecutionTarget: %d (MaxMainMemory), JobDescription: %d (IndividualPhysicalMemory)Matchmaking, MaxSlotsPerJob problem, ExecutionTarget: %d (MaxSlotsPerJob) JobDescription: %d (NumberOfProcesses)Matchmaking, MaxTotalCPUTime problem, ExecutionTarget: %d (MaxTotalCPUTime), JobDescription: %d (TotalCPUTime)Matchmaking, MaxVirtualMemory problem, ExecutionTarget: %d (MaxVirtualMemory), JobDescription: %d (IndividualVirtualMemory)Matchmaking, MinCPUTime problem, ExecutionTarget: %d (MinCPUTime), JobDescription: %d (TotalCPUTime/NumberOfSlots)Matchmaking, NetworkInfo demand not fulfilled, ExecutionTarget do not support %s, specified in the JobDescription.Matchmaking, Platform problem, ExecutionTarget: %s (Platform) JobDescription: %s (Platform)Matchmaking, The %s scaled %s (%d) is %s than the %s (%d) published by the ExecutionTarget.Matchmaking, TotalSlots problem, ExecutionTarget: %d (TotalSlots) JobDescription: %d (NumberOfProcesses)Matchmaking, WorkingAreaFree problem, ExecutionTarget: %d MB (WorkingAreaFree); JobDescription: %d MB (DiskSpace)Matchmaking, WorkingAreaFree problem, ExecutionTarget: %d MB (WorkingAreaFree); JobDescription: %d MB (SessionDiskSpace)Matchmaking, WorkingAreaLifeTime problem, ExecutionTarget: %s (WorkingAreaLifeTime) JobDescription: %s (SessionLifeTime)Max CPU time: %sMax disk space: %iMax memory: %iMax pre-LRMS waiting jobs: %iMax running jobs: %iMax slots per job: %iMax stage in streams: %iMax stage out streams: %iMax total jobs: %iMax total wall-time: %sMax user running jobs: %iMax virtual memory: %iMax waiting jobs: %iMax wall-time: %sMaximum number of threads running - putting new request into queueMemory allocation errorMessage class is not an objectMessage sent to VOMS server %s is: %sMeta info of source and location do not match for %sMetadata of replica and index service differMetadata of source and destination are differentMetadata of source does not match existing destination. Use the --force option to override this.Min CPU time: %sMin wall-time: %sMissing CA subject in Globus signing policyMissing CertificatePath element or ProxyPath element, or is missingMissing Host in Connect elementMissing Port in Connect elementMissing Port in Listen elementMissing VO in configurationMissing argumentMissing audience in configurationMissing authentication informationMissing authgroup name in allowaccessMissing authgroup name in denyaccessMissing cancel-%s-job - job cancellation may not workMissing capabilities in configurationMissing condition subjects in Globus signing policyMissing data in DER encoded PROXY_CERT_INFO_EXTENSION extensionMissing directory in controldir commandMissing file name in [arex/jura] logfileMissing final reply: %sMissing group in configurationMissing information in reply: %sMissing issuer in configurationMissing name of LCAS libraryMissing name of LCMAPS libraryMissing number in maxjobsMissing option for command logreopenMissing or empty CertificatePath elementMissing or empty CertificatePath or CACertificatesDir elementMissing or empty CertificatePath or CACertificatesDir element; will only check the signature, will not do message authenticationMissing or empty KeyPath elementMissing or empty KeyPath element, or is missingMissing or empty PasswordSource elementMissing or empty Username elementMissing path of credentials fileMissing reference to factory and/or module. Currently safe unloading of LDAP DMC is not supported. Report to developers.Missing reference to factory and/or module. It is unsafe to use Globus in non-persistent mode - (Grid)FTP code is disabled. Report to developers.Missing reference to factory and/or module. It is unsafe to use Globus in non-persistent mode - SubmitterPlugin for GRIDFTPJOB is disabled. Report to developers.Missing reference to factory and/or module. It is unsafe to use Xrootd in non-persistent mode - Xrootd code is disabled. Report to developers.Missing response from delegation endpoint.Missing role in configurationMissing scan-%s-job - may miss when job finished executingMissing schema! Skipping validation...Missing scope in configurationMissing security object in messageMissing subject in configurationMissing subject nameMissing submit-%s-job - job submission to LRMS may not workModule %s contains no plugin %sModule %s contains no requested plugin %s of kind %sModule %s does not contain plugin(s) of specified kind(s)Module %s failed to reload (%s)Module %s is not an ARC plugin (%s)Module Manager InitModule Manager Init by ModuleManager::setCfgModule name: %sMount point %sMoving to end of data stagingMulti-request operator only allowed at top levelMultiple %s attributes in configuration file (%s)MyProxy failure: %sMyproxy server did not return proxy with VOMS AC includedNEW: put new job: max jobs total limit reachedNEW: put new job: there is no payloadNLST/MLSD failedNLST/MLSD failed: %sNSS database to be accessed: %s NSS initialization failed on certificate database: %sNULL BIO passed to InquireRequestNULL callback for %sName of grami fileName: %sNegative rights are not supported in Globus signing policyNeither source nor destination are index services, will skip resolving replicasNeither source nor destination were staged, skipping releasing requestsNetwork information:New connectionNew endpoint is created (%s) from the one with the unspecified interface (%s)New job accepted with id %sNew limit for vector queries returned by EMI ES service: %dNew proxy expires at %sNew proxy expiry time is not later than old proxy, not renewing proxyNo A-REX config file found in candypond configurationNo Attribute exists, which can deal with type: %sNo Connect element specifiedNo FQAN found. Using None as userFQAN valueNo HTTP response from VOMS serverNo LRMS set in configurationNo RSL content in job description foundNo SOAP responseNo SOAP response from Delivery service %sNo SOAP response from delivery serviceNo active DTR %sNo active job id %sNo arguments are assigned for external processNo authorization response was returnedNo cache directory specifiedNo cachedirs found/configured for calculation of free space.No caches defined in configurationNo callback for %s definedNo checksum information from serverNo checksum information possibleNo checksum information returned in Rucio response for %sNo checksum verification possibleNo configuration file could be loaded.No control directory set in configurationNo control or session directories defined in configurationNo credentials suppliedNo delegated credentials were passedNo delegation policies in this context and message - passing throughNo delegation token in requestNo delivery endpoints available, will try laterNo destination definedNo draining cache directory specifiedNo errorNo files to retrieve for job %sNo filesize information returned in Rucio response for %sNo group %i for mapped userNo job ID suppliedNo job description file name provided.No job description input specifiedNo job description parser was able to interpret job descriptionNo job description parsers availableNo job description parsers suitable for handling '%s' language are availableNo jobdescription resulted at %d testNo jobsNo jobs found, try laterNo jobs givenNo jobs to resubmit with the specified statusNo left operand for concatenation operatorNo listening ports initiatedNo local account name specifiedNo local user mapping foundNo locations defined for %sNo locations for %sNo locations for destination different from source foundNo locations for destination different from source found: %sNo locations for destination found: %sNo locations for source found: %sNo locations found - probably no more physical instancesNo locations found for %sNo locations left for %sNo match found in cache access rules for %sNo more %s replicasNo more interfaces to try for endpoint %s.No more replicas, will use %sNo need to stage source or destination, skipping stagingNo new informational document assignedNo next MCC or Service at path "%s"No next element in the chainNo non-draining session directories availableNo non-draining session dirs availableNo overwrite requested or allowed, skipping pre-cleaningNo pfns returned in Rucio response: %sNo physical files found for destinationNo physical files found for sourceNo pid file is found at '%s'. Probably A-REX is not running.No plugin is configured or authorised for requested path %sNo policy file or DNs specified for simplelist.pdp, please set location attribute or at least one DN element for simplelist PDP node in configuration.No port succeeded for %sNo private key with nickname %s exist in NSS databaseNo proxy foundNo proxy providedNo queue name given in queue block nameNo read-only cache directory specifiedNo remote delivery services are useable, forcing local deliveryNo replicas found for %sNo request token specified!No request tokens foundNo requested security information was collectedNo response from %sNo response from AA service %sNo response returned: %sNo results returned from statNo right operand for concatenation operatorNo security processing/check requested for '%s'No server config part of config fileNo services specified. Please configure default services in the client configuration, or specify a cluster or index (-c or -g options, see arcsync -h).No session directories found in configuration.No session directory foundNo session directory set in configurationNo source definedNo space token specifiedNo space tokens found matching description %sNo stagein URL is providedNo stream response from VOMS serverNo such DTR %sNo such file or directoryNo such group: %sNo such user: %sNo target available inside the policyNo target available inside the ruleNo test-job with ID %d found.No test-job, with ID "%d"No usable cachesNo user certificate by nickname %s foundNo user-certificate foundNo username suppliedNo valid caches found in configuration, caching is disabledNo valid credentials found, exitingNo valid handles left for listeningNo valid job identifier returned by EMI ESNo valid location availableNo valid response from VOMS server: %sNo value provided for Subject Attribute %s skippedNon-homogeneous resourceNone of the requested transfer protocols are supportedNot authorizedNot authorized according to request: %sNot authorized by arc.pdp - failed to get response from EvaluatorNot authorized by arc.pdp - some of the RequestItem elements do not satisfy PolicyNot authorized from simplelist.pdp: %sNot enough parameters in copyurlNot enough parameters in linkurlNot enough space to store fileNot found %s in cacheNot getting checksum of zip constituentNot listening to anythingNot using delivery service %s due to previous failureNot using delivery service at %s because it is fullNot valid destinationNot valid sourceNothing to do: you have to either specify a test job id with -J (--job) or query information about the certificates with -E (--certificate) Notify failedNow copying (from -> to)Number %d is with nickname: %s%sNumber %d is: %sNumber of ComputingService elements obtained from full document and XPath query do not match: %d != %dNumber of OpenSSL locks changed - reinitializingNumbers of sources and destinations do not matchOPTION...OS family: %sOS name: %sOS version: %sOTokens: Attr: %s = %sOTokens: Attr: messageOTokens: Attr: token: %sOTokens: Attr: token: bearer: %sOTokens: HandleOTokens: Handle: attributes created: subject = %sOTokens: Handle: messageObject is not suitable for listingObject not initialized (internal error)Obtained XML: %sObtained host and address are not acceptableObtaining information failedObtaining status failedOnly POST is supported in CandyPondOnly POST is supported in DataDeliveryServiceOnly Raw Buffer payload is supported for outputOnly globus rights are supported in Globus signing policy - %s is not supportedOnly signing rights are supported in Globus signing policy - %s is not supportedOnly standard input is currently supported for password source.Only user '.' for helper program is supportedOpenSSL error -- %sOpenSSL error string: %sOperating System errorOperation cancelled successfullyOperation completed successfullyOperation not supported for this kind of URLOperation on path "%s"OptimizedInformationContainer created temporary file: %sOptimizedInformationContainer failed to create temporary fileOptimizedInformationContainer failed to parse XMLOptimizedInformationContainer failed to rename temprary fileOptimizedInformationContainer failed to store XML document to temporary fileOption: %sOptions 'p' and 'n' can't be used simultaneouslyOptions Group %s:Options for plugin are missingOriginal job description is listed below:Orphan delegation lock detected (%s) - cleaningOther actionsOut of memory when generate random serialOut of retriesOut of tries while allocating new job IDOut of tries while allocating new job ID in %sOutgoing Message is not SOAPOutput EEC certificateOutput format modifiersOutput the proxy certificateOverwrite requested - will pre-clean destinationOwner: %sPASV failedPASV failed: %sPDP: %s (%s)PDP: %s (%s) can not be loadedPDP: %s can not be loadedPDP: missing name attributePDPD location is missingPDPD location: %sPEM_read_bio_X509_REQ failedPEM_write_bio_X509_REQ failedPEPD location is missingPEPD location: %sPKCS12 add password integrity failedPKCS12 output password not providedPOST request on special path is not supportedParsed domains: %uParser Context creation failed!Parser failed with error code %i.Parsing .local file to obtain job-specific identifiers and infoParsing VOMS AC to get FQANs informationPassword encoding type not supported: %sPath %s is invalid, creating required directoriesPath to .local job status file is required.Path to user's proxy file should be specified.Peer name: %sPer-job POST/SOAP requests are not supportedPerforming /* queryPerforming /ComputingService queryPerforming /Services/ComputingService queryPerforming matchmaking against target (%s).Performs neither sorting nor matchingPermanent failurePermanent service errorPermission checking failed, will try downloading without using cachePermission checking failed: %sPermission checking on original URL failed: %sPermission checking passedPermission checking passed for url %sPicking up left jobsPlace: %sPlatform: %sPlease choose the NSS database you would like to use (1-%d): Please choose the one you would use (1-%d): Plexer (%s) - next %s(%s) has no targetPlexer's (%s) next has no ID attribute definedPlugin %s error: %sPlugin %s error: %uPlugin %s failed to runPlugin %s failed to startPlugin %s for access point %s acquire failed (should never happen).Plugin %s for access point %s is broken.Plugin %s printed: %sPlugin %s printed: %uPlugin %s returned no mappingPlugin %s returned no usernamePlugin %s returned too much: %sPlugin %s returned: %uPlugin %s timeout after %u secondsPlugin (user mapping) command is emptyPlugin (user mapping) timeout is not a number: %sPlugin (user mapping) timeout is wrong number: %sPlugin response: %sPolicy Decision Service invocation failedPolicy is emptyPolicy is not gaclPolicy line: %sPolicy subject: %sPolicyId: %s Alg inside this policy is:-- %sPostal code: %sPre-LRMS waiting jobs: %iPre-clean failed, will still try to copyPre-registering destinationPre-registering destination in index servicePreparing to stage destinationPreparing to stage sourceProblem accessing cache file %s: %sProblem creating dtr (source %s, destination %s)Problem loading plugin %s, skipping it.Problem with index service, will proceed to end of data stagingProblem with index service, will release cache lockProcessing a %s requestProcessing thread timed out. Restarting DTRProcessing type not supported: %sProcessingStartTime (%s) specified in job description is inside the targets downtime period [ %s - %s ].Protocol plugins available:Protocol(s) not supported - please check that the relevant gfal2 plugins are installed (gfal2-plugin-* packages)Proxy certificate information:Proxy expiredProxy expired. Job submission aborted. Please run 'arcproxy'!Proxy generation failed: Certificate has expired.Proxy generation failed: Certificate is not valid yet.Proxy generation failed: Failed to create temporary file.Proxy generation failed: Failed to retrieve VOMS information.Proxy generation failed: No valid certificate found.Proxy generation failed: No valid private key found.Proxy generation succeededProxy has expiredProxy key length: %iProxy path: %sProxy signature: %sProxy stored at %sProxy subject: %sProxy type: %sProxy with ARC PolicyProxy with all rights inheritedProxy with empty policy - fail on unrecognized policyProxy with specific policy: %sProxy with unknown policy - fail on unrecognized policyProxy-subject: %sProxy/credentials stored at %sProxy: %sPut request %s is still in queue, should wait %i secondsPython Wrapper constructor succeededPython Wrapper destructor (%d)Python broker constructor called (%d)Python broker destructor called (%d)Python interpreter lockedPython interpreter releasedPython wrapper process calledPythonBroker initQuality level: %sQuery returned no elements.Query returned unexpected element: %s:%sQuerying ACIX server at %sQuerying WSRF GLUE2 computing REST endpoint.Querying batch with %d jobsQuerying source replicas in bulkQuerying status of staging requestQueue information:REST: process %s at %sREST:CLEAN job %s - %sREST:GET job %s - %sREST:KILL job %s - %sREST:PUT job %s: file %s: there is no payloadREST:RESTART job %s - %sRESTful and old VOMS communication protocols can't be requested simultaneously.RSA_generate_key_ex failedRSL substitution is not a sequenceRSL substitution sequence is not of length 2RSL substitution variable name does not evaluate to a literalRSL substitution variable value does not evaluate to a literalRandom sortingRaw command: %sRe-creating an EMI ES clientRead %i bytesRead access check failedRead access not allowed for %s: %sRead commands in authenticate failedRead request from a fileRead request from a stringReading %u bytes from byte %lluReal transfer from %s to %sReason : %sReceived DTR %s back from scheduler in state %sReceived DTR %s during Generator shutdown - may not be processedReceived invalid DTRReceived message out-of-band (not critical, ERROR level is just for debugging purposes)Received no DTRReceived retry for DTR %s still in transferReconnectingRecord about new job successfully added to the database (%s)Redirecting to %sRedirecting to new URL: %sRefusing connection: Connection limit exceededRegistering destination replicaRegistering directory: %s with plugin: %sRegistering dummy directory: %sRegistration of Globus FTP buffer failed - cancel checkRelation operator expectedReleasing destinationReleasing request(s) made during stagingReleasing requestsReleasing sourceRemapped to local group id: %iRemapped to local group name: %sRemapped to local id: %iRemapped to local user: %sRemapped user's home: %sRemove: deleting: %sRemoving %sRemoving endpoint %s: It has an unrequested interface (%s).Removing logical file from metadata %sRemoving metadata in %sRemoving pre-registered destination in index serviceRename: globus_ftp_client_move failedRename: timeout waiting for operation to completeRenaming %s to %sRenewal of credentials was successfulRenewing credentials for job: %sRenewing proxy for job %sReplacing DTR %s in state %s with new requestReplacing existing token for %s in Rucio token cacheReplacing old SRM info with new for URL %sReplacing queue '%s' with '%s'Replica %s doesn't match preferred pattern or URL mapReplica %s has high latency, but no more sources exist so will use this oneReplica %s has high latency, trying next sourceReplica %s has long latency, trying next replicaReplica %s is mappedReplica %s matches host pattern %sReplica %s matches pattern %sRequest failedRequest failed: No response from IdPRequest failed: No response from IdP when doing authenticationRequest failed: No response from IdP when doing redirectingRequest failed: No response from SP Service when sending SAML assertion to SPRequest failed: No response from SPServiceRequest failed: response from IdP is not as expected when doing authenticationRequest failed: response from IdP is not as expected when doing redirectingRequest failed: response from SP Service is not as expected when sending SAML assertion to SPRequest failed: response from SPService is not as expectedRequest is emptyRequest is not supported - %sRequest is reported as ABORTED, but all files are doneRequest is reported as ABORTED, since it was cancelledRequest is reported as ABORTED. Reason: %sRequest succeed!!!Request timed outRequest to open file with storing in progressRequest to push to unknown owner - %uRequest: %sRequested slots: %iRequested to skip resource discovery. Will try direct submission to %s and %s submission endpoint typesRequesting ComputingService elements of resource description at %sRequesting recursion and --nolist has no senseRequesting to stop job processingRequirement "%s %s" NOT satisfied.Requirement "%s %s" satisfied by "%s".Requirement "%s %s" satisfied.Reservation policy: %sResolving destination replicasResolving of index service for destination failedResolving of index service for source failedResolving source replicas in bulkResource description contains unexpected element: %s:%sResource description is emptyResource description provides URL for interface %s: %sResource description provides no URLs for interfacesResource description query validation passedResource description validation according to GLUE2 schema failed: Resource description validation passedResource information provider failed to runResource information provider failed to startResource information provider failed with exit status: %i %sResource information provider log: %sResource information provider: %sResource manager: %sResource query failedResponse is not SOAPResponse is not XMLResponse sending errorResponse: %sResponse: %sRestarting after segmentation violation.Resubmission of job (%s) succeeded, but cleaning the job failed - it will still appear in the job listResubmission of job (%s) succeeded, but killing the job failed - it will still appear in the job listResult value (0=Permit, 1=Deny, 2=Indeterminate, 3=Not_Applicable): %dResults stored at: %sResuming job: %s at state: %s (%s)Retrieving file %sRetrieving job description of EMI ES jobs is not supportedRetrieving job description of INTERNAL jobs is not supportedReturned message from VOMS server %s is: %s Returned message from VOMS server: %sReturning to generatorReusing connectionRight operand for RSL concatenation does not evaluate to a literalRucio returned %sRucio token for %s has expired or is about to expireRule: %sRule: audience: %sRule: capabilities: %sRule: group: %sRule: issuer: %sRule: role: %sRule: scope: %sRule: subject: %sRule: vo: %sRunning command: %sRunning jobs: %iRunning mailer command (%s)Running user has no nameSAML Token handler is not configuredSAML2SSO process failedSASL InteractionSOAP Request to AA service %s failedSOAP fault from delivery service at %s: %sSOAP fault: %sSOAP invocation failedSOAP operation is not supported: %sSOAP request: %sSOAP response: %sSOAP with SAML2SSO invocation failedSQL statement used: %sSQLite database error: %sSRM Client status: %sSRM did not return any informationSRM did not return any useful informationSRM returned no useful Transfer URLs: %sSSHFS mount point of cache directory (%s) is broken - waiting for reconnect ...SSHFS mount point of runtime directory (%s) is broken - waiting for reconnect ...SSHFS mount point of session directory (%s) is broken - waiting for reconnect ...SSL error: %d - %s:%s:%sSSL error: %s, libs: %s, func: %s, reason: %sSSL locks not initializedScheduler configuration:Scheduler received NULL DTRScheduler received invalid DTRScheduler received new DTR %s with source: %s, destination: %s, assigned to transfer share %s with priority %dScheduler starting upScheduler stopped, exitingScheduling policy: %sSchema validation errorScheme: %sSecHandler configuration is not definedSecHandler has no configurationSecHandler has no name attribute definedSecHandler: %s(%s)Security Handler %s(%s) could not be createdSecurity Handlers processing failedSecurity Handlers processing failed: %sSecurity check failed for incoming TLS messageSecurity check failed for outgoing TLS messageSecurity check failed in SOAP MCC for incoming messageSecurity check failed in SOAP MCC for incoming message: %sSecurity check failed in SOAP MCC for outgoing messageSecurity check failed in SOAP MCC for outgoing message: %sSecurity check failed in TLS MCC for incoming messageSecurity processing/check failed: %sSecurity processing/check for '%s' failed: %sSecurity processing/check for '%s' passedSecurity processing/check passedSelect failed: %sSelf-signed certificateSend response failed: %sSendCommand: Command: %sSendCommand: Failed: %sSendCommand: Response: %sSendCommand: Timed out after %d msServer SRM version: %sServer implementation: %sServer stoppedService %s(%s) could not be createdService Loop: Endpoint %sService endpoint %s (type %s) added to the list for direct submissionService endpoint %s (type %s) added to the list for resource discoveryService has no ID attribute definedService has no Name attribute definedService information:Service is waiting for requestsService side MCCs are loadedServing state: %sSession dir %s is owned by %i, but current mapped user is %iSession dir '%s' contains user specific substitutions - skipping itSession directory to useSession root directory is missingSessiondir %s: Free space %f GBSetting connections limit to %i, connections over limit will be %sSetting pbsz to %luSetting status (%s) for endpoint: %sSetting status (STARTED) for endpoint: %sSetting subject name!Setting userRequestDescription to %sShare Information:Should wait for destination to be preparedShould wait for source to be preparedShow %s help optionsShow help optionsShutdown daemonShutting down data delivery serviceShutting down data staging threadsShutting down schedulerSimpleMap: %sSimpleMap: acquired new unmap time of %u secondsSimpleMap: wrong number in unmaptime commandSkipping %s replica %sSkipping ComputingEndpoint '%s', because it has '%s' interface instead of the requested '%s'.Skipping invalid URL option %sSkipping policyAuthority VOMS AC attributeSkipping replica on local host %sSkipping retrieved job (%s) because it was submitted via another interface (%s).Skipping service: no SchemaPath found!Skipping service: no ServicePath found!Socket conversion failed: %sSockets do not match on exit %i != %iSome addresses failed. Listening on %u of %u.Some transfers failedSorting according to free slots in queueSorting according to input data availability at targetSorting according to specified benchmark (default "specint2000")Sorting replicas according to URL mapSorting replicas according to preferred pattern %sSource URL missingSource URL not supported: %sSource URL not valid: %sSource and/or destination is index service, will resolve replicasSource check requested but failed: %sSource is invalid URLSource is mapped to %sSource is not ready, will wait %u secondsSource is the same as destinationSource modification date: %sSource or destination requires stagingSource: %sSpecified module not found in cacheSpecified overlay file (%s) does not exist.Staging jobs: %iStaging request timed out, will release requestStaging: %sStart foregroundStart testStart waiting 10 sec...StartReadingStartReading: File was not prepared properlyStartWritingStartWriting: File was not prepared properlyStarted remote Delivery at %sStarting DTR threadsStarting controlled processStarting data staging threadsStarting helper process: %sStarting jobs processing threadStarting jobs' monitoringStarting new DTR for %sStarting querying of suspended endpoint (%s) - no other endpoints for this service is being queried or has been queried successfully.Starting sub-thread to query the endpoint on %sStarting thread to query the endpoint on %sStat: obtained modification time %sStat: obtained size %lluState name for plugin is missingStatus for service endpoint "%s" is set to inactive in ARCHERY. Skipping.Status of %d jobs was queried, %d jobs returned informationStopReading finished waiting for transfer_condition.StopReading starts waiting for transfer_condition.StopReading: aborting connectionStopWriting finished waiting for transfer_condition.StopWriting starts waiting for transfer_condition.StopWriting: Calculated checksum %sStopWriting: aborting connectionStopWriting: looking for checksum of %sStopped job processingStopping helper process %sStopping jobs processing threadStopping serverStoring file %sStoring port %i for %sStoring temp proxy at %sString successfully parsed as %s.Subject Attribute %s has no known NID, skippedSubject does not start with '/'Subject name: %sSubject of request is nullSubject to match: %sSubject: %sSubmission endpointSubmission failedSubmit: Failed sending CWD commandSubmit: Failed sending CWD new commandSubmit: Failed sending job descriptionSubmit: Failed to connectSubmit: Failed uploading local input filesSubmit: service has no suitable information interface - need org.nordugrid.ldapngSubmitterPlugin %s could not be createdSubmitterPlugin plugin "%s" not found.Submitting job Submitting test-job %d:Succeeded to add Independent OID, tag %d is returnedSucceeded to add RFC proxy OID, tag %d is returnedSucceeded to add VOMS AC sequence OID, tag %d is returnedSucceeded to add anyLanguage OID, tag %d is returnedSucceeded to add inheritAll OID, tag %d is returnedSucceeded to authenticate SAMLTokenSucceeded to authenticate UsernameTokenSucceeded to authenticate X509TokenSucceeded to change password on MyProxy serverSucceeded to change trusts to: %sSucceeded to convert PrivateKeyInfo to EVP_PKEYSucceeded to destroy credential on MyProxy serverSucceeded to export PKCS12Succeeded to generate public/private key pairSucceeded to get a proxy in %s from MyProxy server %sSucceeded to get credentialSucceeded to get info from MyProxy serverSucceeded to import certificateSucceeded to import private keySucceeded to initialize NSSSucceeded to load PrivateKeyInfoSucceeded to output certificate to %sSucceeded to output the certificate request into %sSucceeded to put a proxy onto MyProxy serverSucceeded to send DelegationService: %s and DelegationID: %s info to peer serviceSucceeded to sign the proxy certificateSucceeded to verify the signature under Succeeded to verify the signature under Succeeded to verify the signed certificateSupplied username %s does not match mapped username %sSupported Profiles:Supported constraints are: validityStart=time (e.g. 2008-05-29T10:20:30Z; if not specified, start from now) validityEnd=time validityPeriod=time (e.g. 43200 or 12h or 12H; if both validityPeriod and validityEnd not specified, the default is 12 hours for local proxy, and 168 hours for delegated proxy on myproxy server) vomsACvalidityPeriod=time (e.g. 43200 or 12h or 12H; if not specified, the default is the minimum value of 12 hours and validityPeriod) myproxyvalidityPeriod=time (lifetime of proxies delegated by myproxy server, e.g. 43200 or 12h or 12H; if not specified, the default is the minimum value of 12 hours and validityPeriod (which is lifetime of the delegated proxy on myproxy server)) proxyPolicy=policy content proxyPolicyFile=policy file keybits=number - length of the key to generate. Default is 2048 bits. Special value 'inherit' is to use key length of signing certificate. signingAlgorithm=name - signing algorithm to use for signing public key of proxy. Possible values are sha1, sha2 (alias for sha256), sha224, sha256, sha384, sha512 and inherit (use algorithm of signing certificate). Default is inherit. With old systems, only sha1 is acceptable. Supported information item names are: subject - subject name of proxy certificate. identity - identity subject name of proxy certificate. issuer - issuer subject name of proxy certificate. ca - subject name of CA which issued initial certificate. path - file system path to file containing proxy. type - type of proxy certificate. validityStart - timestamp when proxy validity starts. validityEnd - timestamp when proxy validity ends. validityPeriod - duration of proxy validity in seconds. validityLeft - duration of proxy validity left in seconds. vomsVO - VO name represented by VOMS attribute vomsSubject - subject of certificate for which VOMS attribute is issued vomsIssuer - subject of service which issued VOMS certificate vomsACvalidityStart - timestamp when VOMS attribute validity starts. vomsACvalidityEnd - timestamp when VOMS attribute validity ends. vomsACvalidityPeriod - duration of VOMS attribute validity in seconds. vomsACvalidityLeft - duration of VOMS attribute validity left in seconds. proxyPolicy keybits - size of proxy certificate key in bits. signingAlgorithm - algorithm used to sign proxy certificate. Items are printed in requested order and are separated by newline. If item has multiple values they are printed in same line separated by |. Supported password destinations are: key - for reading private key myproxy - for accessing credentials at MyProxy service myproxynew - for creating credentials at MyProxy service all - for any purspose. Supported password sources are: quoted string ("password") - explicitly specified password int - interactively request password from console stdin - read password from standard input delimited by newline file:filename - read password from file named filename stream:# - read password from input stream number #. Currently only 0 (standard input) is supported. Supports advance reservationsSupports bulk submissionSupports preemptionSuspended jobs: %iSuspending querying of endpoint (%s) since the service at the endpoint is already being queried, or has been queried.Synchronizing the local list of active jobs with the information in the information system can result in some inconsistencies. Very recently submitted jobs might not yet be present, whereas jobs very recently scheduled for deletion can still be present.Syntax error in 'notify' attribute value ('%s'), it contains unknown state flagsSyntax error in 'notify' attribute value ('%s'), it must contain an email addressSyntax error in 'notify' attribute value ('%s'), it must only contain email addresses after state flag(s)System configuration file (%s or %s) does not exist.System configuration file (%s) contains errors.System configuration file (%s) does not exist.TCP client process calledTCP executor is removedTLS provides no identity, going for OTokensTURL %s cannot be handledTarget %s does not match requested interface(s).Target %s removed by FastestQueueBroker, doesn't report number of free slotsTarget %s removed by FastestQueueBroker, doesn't report number of total slotsTarget %s removed by FastestQueueBroker, doesn't report number of waiting jobsTechnology: %sTemporary service errorTest aborted because no resource returned any informationTest failed, no more possible targetsTest submitted with jobid: %sTest was defined with ID %d, but some error occurred during parsing it.The "FreeSlotsWithDuration" attribute is wrongly formatted. Ignoring it.The "FreeSlotsWithDuration" attribute published by "%s" is wrongly formatted. Ignoring it.The 'sort' and 'rsort' flags cannot be specified at the same time.The BIO for output is NULLThe CA certificates directory is required for contacting VOMS and MyProxy servers.The CA issuer (%s) of the credentials (%s) is not trusted by the target (%s).The ComputingEndpoint doesn't advertise its Quality Level.The ComputingEndpoint doesn't advertise its Serving State.The ComputingEndpoint has no URL.The ComputingService doesn't advertise its Interface.The ComputingService doesn't advertise its Quality Level.The MyProxy period that you set: %s can't be recognized.The NSS database can not be detected in the Firefox profileThe Response is not going to this endThe Service advertises no Health State.The Service doesn't advertise its Type.The StatusCode is SuccessThe VOMS AC period that you set: %s can't be recognized.The VOMS server with the information: %s can not be reached, please make sure it is availableThe VOMS server with the information: %s can not be reached, please make sure it is available.The [vo] section labeled '%s' has no file associated and can't be used for matchingThe arccat command performs the cat command on the stdout, stderr or grid manager's error log of the job.The arcclean command removes a job from the computing resource.The arccp command copies files to, from and between grid storage elements.The arcget command is used for retrieving the results from a job.The arcinfo command is used for obtaining the status of computing resources on the Grid.The arckill command is used to kill running jobs.The arcls command is used for listing files in grid storage elements and file index catalogues.The arcmkdir command creates directories on grid storage elements and catalogs.The arcproxy command creates a proxy from a key/certificate pair which can then be used to access grid resources.The arcrename command renames files on grid storage elements.The arcrm command deletes files on grid storage elements.The arcstat command is used for obtaining the status of jobs that have been submitted to Grid enabled resources.The arcsub command is used for submitting jobs to Grid enabled computing resources.The arcsync command synchronizes your local job list with the information at the given resources or index servers.The arctest command is used for testing clusters as resources.The attribute information from VOMS server: %s is list as following:The available CRL has expiredThe available CRL is not yet validThe brokerarguments attribute can only be used in conjunction with the brokername attributeThe certificate with subject %s is not validThe cluster XRSL attribute is currently unsupported.The credential to be signed contains no requestThe credential to be signed is NULLThe credential's private key has already been initializedThe default configuration file (%s) is not a regular file.The delegated credential got from delegation service is stored into path: %sThe delegated credential got from path: %sThe downtime of the target (%s) is not published. Keeping target.The end time that you set: %s can't be recognized.The end time that you set: %s is before start time: %s.The endpoint (%s) is not supported by this plugin (%s)The endpoint of delegation service should be configuredThe file %s is currently locked with a valid lockThe first supported interface of the plugin %s is an empty string, skipping the plugin.The following %d were not resubmittedThe following jobs were not submitted:The interface of this endpoint (%s) is unspecified, will try all possible pluginsThe job description also can be a file or a string in ADL or XRSL format.The keybits constraint is wrong: %s.The name of the private key to delete is emptyThe old GSI proxies are not supported anymore. Please do not use -O/--old option.The payload of incoming message is emptyThe payload of outgoing message is emptyThe period that you set: %s can't be recognized.The plugin %s does not support any interfaces, skipping it.The policy file setup for simplelist.pdp does not exist, please check location attribute for simplelist PDP node in service configurationThe policy language: %s is not supportedThe private key for signing is not initializedThe process owning the lock on %s is no longer running, will remove lockThe request has passed the policy evaluationThe signing algorithm %s is not allowed,it should be SHA1 or SHA2 to sign certificate requestsThe specified Globus attribute (%s) is not supported. %s ignored.The start time that you set: %s can't be recognized.The start, end and period can't be set simultaneouslyThe subject does not match the issuer name + proxy CN entryThe validity duration of VOMS AC is shortened from %s to %s, due to the validity constraint on voms server side. The value of the acl XRSL attribute isn't valid XML.The value of the ftpthreads attribute must be a number from 1 to 10The value of the keysize attribute in the configuration file (%s) was only partially parsedThe value of the timeout attribute in the configuration file (%s) was only partially parsedThere are %d NSS base directories where the certificate, key, and module databases liveThere are %d RequestItemsThere are %d commands to the same VOMS server %sThere are %d requests, which satisfy at least one policyThere are %d servers with the same name: %s in your vomses file, but none of them can be reached, or can return a valid message.There are %d servers with the same name: %s in your vomses file, but none of them can be reached, or can return valid message. But proxy without VOMS AC extension will still be generated.There are %d user certificates existing in the NSS databaseThere are no endpoints in registry that match requested info endpoint typeThere are no endpoints in registry that match requested submission endpoint typeThere is %d subjects, which satisfy at least one policyThere is no Delegated X509 token in the responseThere is no Format delegated token in the responseThere is no Format request in the responseThere is no Id or X509 request value in the responseThere is no Id or X509 token value in the responseThere is no SOAP connection chain configuredThere is no SOAP responseThere is no UpdateCredentialsResponse in responseThere is no X509 request in the responseThere is no certificate named %s found, the certificate could be removed when generating CSRThere is no digest in issuer's private key objectThere is no local LRMS ID. Message will not be written to BLAH log.There is no responseThere was a problem during post-transfer destination handling after error: %sThere was a problem during post-transfer source handling: %sThere was no HTTP responseThere was no SOAP responseThere was no SOAP response return from PDP server: %sThird party transfer is not supported for these endpointsThird party transfer was requested but the corresponding plugin could not be loaded. Is the GFAL plugin installed? If not, please install the packages 'nordugrid-arc-plugins-gfal' and 'gfal2-all'. Depending on your type of installation the package names might differ.This INFO message should also be seenThis INFO message should be seenThis VERBOSE message should not be seenThis VERBOSE message should now be seenThis instance was already deletedThis job was very recently submitted and might not yet have reached the information systemThis message goes to initial destinationThis message goes to per-thread destinationThis process already owns the lock on %sThis seems like a temporary error, please try again laterThis tiny tool can be used for testing the JobDescription's conversion abilities.This user is denied to submit new jobs.Thread exited with Glib error: %sThread exited with Glib exception: %sThread exited with generic exception: %sTime left for AC: %sTime left for AC: AC has expiredTime left for AC: AC is not valid yetTime left for proxy: %sTime left for proxy: Proxy expiredTime left for proxy: Proxy not valid yetTime spent waiting for disc: %.3f msTime spent waiting for network: %.3f msTimed out while waiting for cache lockTimeout connecting to %s(%s):%i - %i sTimeout has expired, will remove lock file %sTimeout waiting for Globus callback - leaking connectionTimeout waiting for mkdirTo recover missing jobs, run arcsyncToo many arguments in configurationToo many connections - dropping new oneToo many connections - waiting for old to closeToo many failures to obtain checksum - giving upToo many files in one request - please try again with fewer filesTool for writing the grami file representation of a job description file.Total jobs: %iTotal logical CPUs: %iTotal number of jobs found: Total number of new jobs found: Total physical CPUs: %iTotal slots: %iTransfer FAILED: %sTransfer cancelled successfullyTransfer completeTransfer failedTransfer failed: %sTransfer finished: %llu bytes transferred %sTransfer from %s to %sTransfer killed after %i seconds without communicationTransfer succeededTransfer timed outTrusted CAs:Try to get attribute from VOMS server with order: %sTrying all available interfacesTrying next replicaTrying to check X509 cert with check_cert_typeTrying to connect %s(%s):%dTrying to listen on %s:%s(%s)Trying to listen on TCP port %s(%s)Trying to migrate to %s: Migration to a %s interface is not supported.Trying to retrieve job description of %s from computing resourceTrying to start suspended endpoint (%s)Trying to submit directly to endpoint (%s)Trying to submit endpoint (%s) using interface (%s) with plugin (%s).Two input files have identical name '%s'.Type is dir, calling srmRmDirType is file, calling srmRmType: %sTypes of execution services that %s is able to submit jobs to:Types of local information services that %s is able to collect information from:Types of local information services that %s is able to collect job information from:Types of registry services that %s is able to collect information from:Types of services that %s is able to manage jobs at:URLURL %s disagrees with stored SRM info, testing new infoURL is mapped to local access - checking permissions on original URLURL is mapped to: %sURL is not valid: %sURL option %s does not have format name=valueURL protocol is not urllist: %sURL: %sUnAuthorized from xacml.pdpUnable to adapt job description to any resource, no resource information could be obtained.Unable to add event: cannot find AAR for job %s in accounting database.Unable to associate secondary DB with primary DB (%s)Unable to copy %sUnable to copy example configuration from existing configuration (%s)Unable to create %s directory.Unable to create DB for secondary endpoint keys (%s)Unable to create DB for secondary name keys (%s)Unable to create DB for secondary service info keys (%s)Unable to create SOAP client used by EMIESClient.Unable to create data base (%s)Unable to create data base environment (%s)Unable to create directory %sUnable to create directory for storing results (%s) - %sUnable to create index for jobs table in data base (%s)Unable to create job database (%s)Unable to create jobs table in data base (%s)Unable to create jobs_new table in data base (%s)Unable to create temporary directoryUnable to detect format of job record.Unable to detect if issuer certificate is installed.Unable to determine certificate informationUnable to determine error (%d)Unable to download job (%s), no JobControllerPlugin plugin was set to handle the job.Unable to drop jobs in data base (%s)Unable to find file size of %sUnable to handle job (%s), no interface specified.Unable to handle job (%s), no plugin associated with the specified interface (%s)Unable to initialise connection to destination: %sUnable to initialise connection to source: %sUnable to initialize handler for %sUnable to list content of %sUnable to list files at %sUnable to load ARC configuration file.Unable to load BrokerPlugin (%s)Unable to load broker %sUnable to load plugin (%s) for interface (%s) when trying to submit job description.Unable to load submission plugin for %s interfaceUnable to locate the "%s" plugin. Please refer to installation instructions and check if package providing support for "%s" plugin is installedUnable to match target, marking it as not matching. Broker not valid.Unable to migrate job (%s), job description could not be retrieved remotelyUnable to migrate job (%s), unable to parse obtained job descriptionUnable to open job list file (%s), unknown formatUnable to parse job description input: %sUnable to parse the %s.%s value from execution service (%s).Unable to parse the specified verbosity (%s) to one of the allowed levelsUnable to parse.Unable to prepare job description according to needs of the target resource (%s).Unable to prepare job description according to needs of the target resource.Unable to query job information (%s), invalid URL provided (%s)Unable to read job information from file (%s)Unable to register job submission. Can't get JobDescription object from Broker, Broker is invalid.Unable to remove file %sUnable to rename %sUnable to rename jobs table in data base (%s)Unable to resubmit job (%s), no targets applicable for submissionUnable to resubmit job (%s), target information retrieval failed for target: %sUnable to resubmit job (%s), unable to parse obtained job descriptionUnable to retrieve list of job files to download for job %sUnable to select middlewareUnable to select operating system.Unable to select runtime environmentUnable to set duplicate flags for secondary key DB (%s)Unable to sort ExecutionTarget objects - Invalid Broker object.Unable to sort added jobs. The BrokerPlugin plugin has not been loaded.Unable to submit job. Failed to assign delegation to job description.Unable to submit job. Job description is not valid XMLUnable to submit job. Job description is not valid in the %s format: %sUnable to submit jobs. Failed to delegate credentials.Unable to transfer from jobs to jobs_new in data base (%s)Unable to truncate job database (%s)Unable to write 'output' file: %sUnable to write grami file: %sUnable to write key/value pair to job database (%s): Key "%s"Unable to write records into job database (%s): Id "%s"Unable to write to p12 fileUnauthorizedUnauthorized from remote pdp serviceUndefined control sequence: %%%sUnexpected RSL typeUnexpected argument for 'all' rule - %sUnexpected argumentsUnexpected arguments suppliedUnexpected delegation location from delegation endpoint - %s.Unexpected immediate completion: %sUnexpected name returned in Rucio response: %sUnexpected path %s returned from serverUnexpected response code from delegation endpoint - %uUnexpected response code from delegation endpoint: %u, %s.Uniq is adding service coming from %sUniq is ignoring service coming from %sUniq is replacing service coming from %s with service coming from %sUnknown ACL policy %s for job %sUnknown LDAP scope %s - using baseUnknown XRSL attribute: %s - Ignoring it.Unknown attribute %s in common section of configuration file (%s), ignoring itUnknown authorization command %sUnknown channel %s for stdio protocolUnknown conversion mode %s, using defaultUnknown credential type %s for URL pattern %sUnknown element in Globus signing policyUnknown entry in EGIIS (%s)Unknown errorUnknown key or hash typeUnknown key or hash type of issuerUnknown log level %sUnknown open mode %iUnknown open mode %sUnknown optionUnknown option %sUnknown rights in Globus signing policy - %sUnknown section %s, ignoring itUnknown transfer option: %sUnknown user name mapping rule %sUnregistering %sUnregistering from index service failedUnsupported URL givenUnsupported URL given: %sUnsupported command: %sUnsupported configuration command: %sUnsupported destination url: %sUnsupported information endpoint type: %sUnsupported job list type '%s', using 'BDB'. Supported types are: BDB, SQLITE, XML.Unsupported mapping policy action: %sUnsupported mapping policy option: %sUnsupported protocol in url %sUnsupported proxy policy language is requested - %sUnsupported proxy version is requested - %sUnsupported source url: %sUnsupported submission endpoint type: %sUnsupported submission interface %s. Seems arc-blahp-logger need to be updated accordingly. Please submit the bug to bugzilla.Unsupported value for allownew: %sUntrusted self-signed certificate in chain with subject %s and hash: %luUpdateCredentials failedUpdateCredentials: EPR contains no JobIDUpdateCredentials: failed to update credentialsUpdateCredentials: missing ReferenceUpdateCredentials: no job found: %sUpdateCredentials: request = %sUpdateCredentials: response = %sUpdateCredentials: wrong number of ReferenceUpdateCredentials: wrong number of elements inside ReferenceUsage:Usage: copy source destinationUse --help option for detailed usage informationUse -? to get usage descriptionUsed configuration file %sUsed slots: %iUser configuration file (%s) contains errors.User configuration file (%s) does not exist or cannot be loaded.User for helper program is missingUser has empty virtual directory tree. Either user has no authorised plugins or there are no plugins configured at all.User has no proper configuration associatedUser interface errorUser name direct mapping is missing user name: %s.User name mapping command is emptyUser name mapping has empty authgroup: %sUser name mapping has empty commandUser name mapping has empty name: %sUser name should be specified.User pool at %s can't be opened.User pool at %s failed to perform user mapping.User pool mapping is missing user subject.User subject match is missing user subject.User subject: %sUserConfig class is not an objectUserConfiguration saved to file (%s)Username Token handler is not configuredUsing A-REX config file %sUsing CA certificate directory: %sUsing DH parameters from file: %sUsing OTokenUsing Rucio account %sUsing buffered transfer methodUsing cache %sUsing cached local account '%s'Using cert %sUsing certificate file: %sUsing cipher list: %sUsing cipher: %sUsing configuration at %sUsing control directory %sUsing curve with NID: %uUsing insecure data transferUsing internal transfer method of %sUsing key %sUsing key file: %sUsing local account '%s'Using next %s replicaUsing protocol options: 0x%xUsing proxy %sUsing proxy file: %sUsing secure data transferUsing session dir %sUsing session directory %sUsing space token %sUsing space token description %sVO %s doesn't match %sVOMS AC attribute is a tagVOMS AC attribute is the FQANVOMS attr %s doesn't match %sVOMS attr %s matches %sVOMS attribute is ignored due to processing/validation errorVOMS attribute parsing failedVOMS attribute validation failedVOMS command is emptyVOMS line contains wrong number of tokens (%u expected): "%s"VOMS proxy processing returns: %i - %sVOMS trust chains: %sVOMS: AC has expiredVOMS: AC is not complete - missing Serial or Issuer informationVOMS: AC is not yet validVOMS: AC signature verification failedVOMS: CA directory or CA file must be providedVOMS: Can not allocate memory for parsing ACVOMS: Can not allocate memory for storing the order of ACVOMS: Can not find AC_ATTR with IETFATTR typeVOMS: Can not parse ACVOMS: Cannot find certificate of AC issuer for VO %sVOMS: DN of holder in AC: %sVOMS: DN of holder: %sVOMS: DN of issuer: %sVOMS: FQDN of this host %s does not match any target in ACVOMS: The lsc file %s can not be openVOMS: The lsc file %s does not existVOMS: authorityKey is wrongVOMS: both idcenoRevAvail and authorityKeyIdentifier certificate extensions must be presentVOMS: can not verify the signature of the ACVOMS: cannot validate AC issuer for VO %sVOMS: case of multiple IETFATTR attributes not supportedVOMS: case of multiple policyAuthority not supportedVOMS: create FQAN: %sVOMS: create attribute: %sVOMS: directory for trusted service certificates: %sVOMS: failed to parse attributes from ACVOMS: failed to verify AC signatureVOMS: missing AC partsVOMS: problems while parsing information in ACVOMS: the DN in certificate: %s does not match that in trusted DN list: %sVOMS: the Issuer identity in certificate: %s does not match that in trusted DN list: %sVOMS: the attribute name is emptyVOMS: the attribute qualifier is emptyVOMS: the attribute value for %s is emptyVOMS: the format of IETFATTRVAL is not supported - expecting OCTET STRINGVOMS: the format of policyAuthority is unsupported - expecting URIVOMS: the grantor attribute is emptyVOMS: the holder information in AC is wrongVOMS: the holder issuer name is not the same as that in ACVOMS: the holder issuerUID is not the same as that in ACVOMS: the holder name in AC is not related to the distinguished name in holder certificateVOMS: the holder serial number %lx is not the same as the serial number in AC %lx, the holder certificate that is used to create a voms proxy could be a proxy certificate with a different serial number as the original EEC certVOMS: the holder serial number is: %lxVOMS: the issuer information in AC is wrongVOMS: the issuer name %s is not the same as that in AC - %sVOMS: the only supported critical extension of the AC is idceTargetsVOMS: the serial number in AC is: %lxVOMS: the serial number of AC INFO is too long - expecting no more than 20 octetsVOMS: there is no constraints of trusted voms DNs, the certificates stack in AC will not be checked.VOMS: trust chain to check: %s VOMS: unable to determine hostname of AC from VO name: %sVOMS: unable to extract VO name from ACVOMS: unable to match certificate chain against VOMS trusted DNsVOMS: unable to verify certificate chainVOMS: unsupported time format in AC - expecting GENERALIZED TIMEValid JobDescription foundValid for: %sValid for: Proxy expiredValid for: Proxy not validValid until: %sValue of %s.%s is "%s"Value of 'count' attribute must be an integerValue of 'countpernode' attribute must be an integerValue of 'exclusiveexecution' attribute must either be 'yes' or 'no'Value of attribute '%s' expected not to be emptyValue of attribute '%s' expected to be a stringValue of attribute '%s' expected to be single valueValue of attribute '%s' has wrong sequence length: Expected %d, found %dValue of attribute '%s' is not a stringValue of attribute '%s' is not sequenceVariable name (%s) contains invalid character (%s)Variable name expectedVersion in Listen element can't be recognizedWARNING: The end time that you set: %s is before current time: %sWARNING: The start time that you set: %s is before current time: %sWaiting 1 minuteWaiting ends.Waiting for bufferWaiting for globus handle to settleWaiting for lock on file %sWaiting for lock on job list file %sWaiting for main job processing thread to exitWaiting for responseWaiting jobs: %iWaking upWarning: Failed listing files but some information is obtainedWarning: Failed removing jobs from file (%s)Warning: Failed to write job information to file (%s)Warning: Failed to write local list of jobs into file (%s), jobs list is destroyedWarning: Job not found in job list: %sWarning: Some jobs were not removed from serverWarning: Unable to create job list file (%s), jobs list is destroyedWarning: Unable to open job list file (%s), unknown formatWarning: Unable to read local list of jobs from file (%s)Warning: Unable to truncate local list of jobs in file (%s)Warning: Using SRM protocol v1 which does not support space tokensWarning: mount point %s creation failed.Was expecting %s at the beginning of "%s"Watchdog (re)starting applicationWatchdog detected application exitWatchdog detected application exit due to signal %uWatchdog detected application exited with code %uWatchdog detected application timeout or error - killing processWatchdog exiting because application was purposely killed or exited itselfWatchdog failed to kill application - giving up and exitingWatchdog failed to wait till application exited - sending KILLWatchdog fork failed: %sWatchdog starting monitoringWe only support CAs in Globus signing policy - %s is not supportedWe only support X509 CAs in Globus signing policy - %s is not supportedWe only support globus conditions in Globus signing policy - %s is not supportedWe only support subjects conditions in Globus signing policy - %s is not supportedWhen specifying 'countpernode' attribute, 'count' attribute must also be specifiedWill %s in destination index serviceWill calculate %s checksumWill clean up pre-registered destinationWill download to cache file %sWill not map to 'root' account by defaultWill process cacheWill release cache locksWill remove %s on service %s.Will retry without cachingWill use bulk requestWill wait 10sWill wait around %isWiping and re-creating whole storageWorking area free size: %i GBWorking area is not shared among jobsWorking area is shared among jobsWorking area life time: %sWorking area total size: %i GBWriting the info to the BLAH parser log: %sWrong buffer sizeWrong defaultbuffer number in configurationWrong directory in %sWrong format of the "FreeSlotsWithDuration" = "%s" ("%s")Wrong language requested: %sWrong maxbuffer number in configurationWrong maxconnections number in configurationWrong maximal buffer sizeWrong number in defaultttl commandWrong number in maxjobdescWrong number in maxjobdesc commandWrong number in maxjobs: %sWrong number in maxrerun commandWrong number in urdelivery_frequency: %sWrong number in wakeupperiod: %sWrong number of arguments givenWrong number of arguments!Wrong number of connectionsWrong number of objects (%i) for stat from ftp: %sWrong number of parameters specifiedWrong option in %sWrong option in delegationdbWrong option in fixdirectoriesWrong option in logreopenWrong ownership of certificate file: %sWrong ownership of key file: %sWrong ownership of proxy file: %sWrong permissions of certificate file: %sWrong permissions of key file: %sWrong permissions of proxy file: %sWrong port numberWrong port number in configurationWrong service record field "%s" found in the "%s"Wrote request into a fileWrote signed EEC certificate into a fileWrote signed proxy certificate into a fileX509 Token handler is not configuredXACML authorisation request: %sXACML authorisation response: %sXACML request: %sXML config file %s does not existXML response: %sYou are about to remove jobs from the job list for which no information could be found. NOTE: Recently submitted jobs might not have appeared in the information system, and this action will also remove such jobs.You may try to increase verbosity to get more information.Your identity: %sYour issuer's certificate is not installedYour proxy is valid until: %sZero bytes written to file[ADLParser] %s element must be boolean.[ADLParser] AccessControl isn't valid XML.[ADLParser] Benchmark is not supported yet.[ADLParser] Code in FailIfExitCodeNotEqualTo in %s is not valid number.[ADLParser] CreationFlag value %s is not supported.[ADLParser] CredentialService must contain valid URL.[ADLParser] Missing Name element or value in ParallelEnvironment/Option element.[ADLParser] Missing or empty Name in InputFile.[ADLParser] Missing or empty Name in OutputFile.[ADLParser] Missing or wrong value in DiskSpaceRequirement.[ADLParser] Missing or wrong value in IndividualCPUTime.[ADLParser] Missing or wrong value in IndividualPhysicalMemory.[ADLParser] Missing or wrong value in IndividualVirtualMemory.[ADLParser] Missing or wrong value in NumberOfSlots.[ADLParser] Missing or wrong value in ProcessesPerSlot.[ADLParser] Missing or wrong value in SlotsPerHost.[ADLParser] Missing or wrong value in ThreadsPerProcess.[ADLParser] Missing or wrong value in TotalCPUTime.[ADLParser] Missing or wrong value in WallTime.[ADLParser] NetworkInfo is not supported yet.[ADLParser] NodeAccess value %s is not supported yet.[ADLParser] Only email Prorocol for Notification is supported yet.[ADLParser] Optional for %s elements are not supported yet.[ADLParser] Root element is not ActivityDescription [ADLParser] The NumberOfSlots element should be specified, when the value of useNumberOfSlots attribute of SlotsPerHost element is "true".[ADLParser] Unsupported EMI ES state %s.[ADLParser] Unsupported URL %s for RemoteLogging.[ADLParser] Unsupported internal state %s.[ADLParser] Wrong URI specified in Source - %s.[ADLParser] Wrong URI specified in Target - %s.[ADLParser] Wrong time %s in ExpirationTime.[ADLParser] priority is too large - using max value 100[filename ...][job ...][job description ...][job description input][resource ...]a file containing a list of jobIDsabort_callback: Globus error: %sabort_callback: startaction(%s) != requestactive_data is disabledadd_word failureadvertisedvo parameter is emptyall for attentionall jobsarc.confbad directory for plugin: %sbrokerbuffer: error : %s, read: %s, write: %sbuffer: read EOF : %sbuffer: write EOF: %scache file: %scancelledceceID prefix is set to %scheck readability of object, does not show any information about objectcheck_abort: have Globus errorcheck_abort: sending 426check_ftp: failed to get file's modification timecheck_ftp: failed to get file's sizecheck_ftp: globus_ftp_client_get failedcheck_ftp: globus_ftp_client_modification_time failedcheck_ftp: globus_ftp_client_register_readcheck_ftp: globus_ftp_client_size failedcheck_ftp: obtained modification date: %scheck_ftp: obtained size: %llicheck_ftp: timeout waiting for modification_timecheck_ftp: timeout waiting for partial getcheck_ftp: timeout waiting for sizeclass name: %sclientxrsl foundclientxrsl not foundclose failed: %sclosing file %s failed: %scommand to MyProxy server. The command can be PUT, GET, INFO, NEWPASS or DESTROY. PUT -- put a delegated credentials to the MyProxy server; GET -- get a delegated credentials from the MyProxy server; INFO -- get and present information about credentials stored at the MyProxy server; NEWPASS -- change password protecting credentials stored at the MyProxy server; DESTROY -- wipe off credentials stored at the MyProxy server; Local credentials (certificate and key) are not necessary except in case of PUT. MyProxy functionality can be used together with VOMS functionality. --voms and --vomses can be used for Get command if VOMS attributes is required to be included in the proxy. computingconfiguration file (default ~/.arc/client.conf)configuration file not foundconvert from specified input database format [bdb|sqlite]convert into specified output database format [bdb|sqlite]could not find end of clientxrslcould not find start of clientxrsld2i_X509_REQ_bio faileddata chunk: %llu %lludata_connect_retrieve_callbackdata_connect_retrieve_callback: allocate_data_bufferdata_connect_retrieve_callback: allocate_data_buffer faileddata_connect_retrieve_callback: check for buffer %udata_connect_store_callbackdata_retrieve_callbackdata_retrieve_callback: lost bufferdata_store_callback: lost bufferdatabase formatdebugleveldefine the requested format (nordugrid:xrsl, emies:adl)delete_ftp: globus_ftp_client_delete faileddelete_ftp: globus_ftp_client_rmdir faileddelete_ftp: timeout waiting for deletedestinationdestination.next_locationdirdirectorydirnamedisplay all available metadatadisplay more information on each jobdndo not ask for verificationdo not collect information, only convert jobs storage formatdo not print list of jobsdo not print number of jobs in each statedo not resubmit to the same resourcedo not submit - dump job description in the language accepted by the targetdo not transfer, but register source into destination. destination must be a meta-url.do not try to force passive transferdon't prompt for a credential passphrase, when retrieve a credential from on MyProxy server. The precondition of this choice is the credential is PUT onto the MyProxy server without a passphrase by using -R (--retrievable_by_cert) option when being PUTing onto Myproxy server. This option is specific for the GET command when contacting Myproxy server.downloaddownload directory (the job directory will be created in this directory)downloadsdroppedecho: Unauthorizedempty input payloadempty next chain elementend of string encountered while processing type of subject name element #%derror converting number from bin to BIGNUMerror converting serial to ASN.1 formatescape character at end of stringexitfailed to identify plugins pathfailed to process client identificationfailed to read data chunkfailed to read data tagfailed while processing configuration command: %s %sfilefile %s is not accessiblefile namefile name too longfile node creation failed: %sfilenamefilepathfinishedforce download (overwrite existing job directory)force migration, ignore kill failureforcedefaultvoms parameter is emptyfork failedformatfrom the following endpoints:fsync of file %s failed: %sftp_check_callbackftp_complete_callback: error: %sftp_complete_callback: successftp_get_complete_callback: Failed to get ftp fileftp_get_complete_callback: successftp_put_complete_callback: successftp_read_callback: Globus error: %sftp_read_callback: delayed data chunk: %llu %lluftp_read_callback: failure: %sftp_read_callback: successftp_read_callback: success - offset=%u, length=%u, eof=%u, allow oof=%uftp_read_callback: too many unexpected out of order chunksftp_read_callback: unexpected data out of order: %llu != %lluftp_read_thread: Globus error: %sftp_read_thread: data callback failed - aborting: %sftp_read_thread: exitingftp_read_thread: failed to register Globus buffer - will try later: %sftp_read_thread: failed to register buffersftp_read_thread: failed to release buffersftp_read_thread: failed to release buffers - leakingftp_read_thread: for_read failed - aborting: %sftp_read_thread: get and register buffersftp_read_thread: too many registration failures - abort: %sftp_read_thread: waiting for buffers releasedftp_read_thread: waiting for eofftp_write_callback: failure: %sftp_write_callback: success %sftp_write_thread: Globus error: %sftp_write_thread: data callback failed - abortingftp_write_thread: data out of order in stream mode: %llu != %lluftp_write_thread: exitingftp_write_thread: failed to release buffers - leakingftp_write_thread: for_write failed - abortingftp_write_thread: get and register buffersftp_write_thread: too many out of order chunks in stream modeftp_write_thread: waiting for buffers releasedftp_write_thread: waiting for eofftp_write_thread: waiting for transfer completegfal_close failed: %sgfal_closedir failed: %sgfal_listxattr failed, no replica information can be obtained: %sgfal_mkdir failed (%s), trying to write anywaygfal_mkdir failed: %sgfal_open failed: %sgfal_opendir failed: %sgfal_read failed: %sgfal_rename failed: %sgfal_rmdir failed: %sgfal_stat failed: %sgfal_unlink failed: %sgfal_write failed: %sglobalid is set to %sglobus_ftp_client_operationattr_set_authorization: error: %sgm-delegations-converter changes format of delegation database.gm-jobs displays information on current jobs in the system.gm-kick wakes up the A-REX corresponding to the given control file. If no file is given it uses the control directory found in the configuration file.gmetric_bin_path empty in arc.conf (should never happen the default value should be used)group<:role>. Specify ordering of attributes Example: --order /knowarc.eu/coredev:Developer,/knowarc.eu/testers:Tester or: --order /knowarc.eu/coredev:Developer --order /knowarc.eu/testers:Tester Note that it does not make sense to specify the order if you have two or more different VOMS servers specifiedheadnode is set to %shostname[:port] of MyProxy serverhourhoursidif the destination is an indexing service and not the same as the source and the destination is already registered, then the copy is normally not done. However, if this option is specified the source is assumed to be a replica of the destination created in an uncontrolled way and the copy is done like in case of replication. Using this option also skips validation of completed transfers.improper attribute for allowactvedata command: %simproper attribute for allowencryption command: %sincoming message is not SOAPindexinform about changes in particular job (can be used multiple times)init_handle: globus_ftp_client_handle_init failedinit_handle: globus_ftp_client_handleattr_init failedinit_handle: globus_ftp_client_handleattr_set_gridftp2 failedinit_handle: globus_ftp_client_operationattr_init failedinit_handle: globus_ftp_client_operationattr_set_allow_ipv6 failedinit_handle: globus_ftp_client_operationattr_set_delayed_pasv failedinmsg.Attributes().getAll() = %s inmsg.Auth().Export(arc.SecAttr.ARCAuth) = %sinput does not define operationinput is not SOAPinputcheck checks that input files specified in the job description are available and accessible using the credentials in the given proxy file.instead of the status only the IDs of the selected jobs will be printedintinterfaceinterface is set to %sinterfacenameinvalid jobID: %sjob %s (will be) cleaned successfullyjob %s cancelled successfullyjob %s restarted successfullyjob idjob_description_file [proxy_file]jobdescription file describing the job to be submittedjobdescription string describing the job to be submittedkeep the files on the server (do not clean)levellist record: %slist the available pluginslist the available plugins (protocols supported)list_files_ftp: checksum %slist_files_ftp: failed to get file's modification timelist_files_ftp: failed to get file's sizelist_files_ftp: globus_ftp_client_cksm failedlist_files_ftp: globus_ftp_client_modification_time failedlist_files_ftp: globus_ftp_client_size failedlist_files_ftp: looking for checksum of %slist_files_ftp: looking for modification time of %slist_files_ftp: looking for size of %slist_files_ftp: no checksum information possiblelist_files_ftp: no checksum information returnedlist_files_ftp: no checksum information supportedlist_files_ftp: timeout waiting for cksumlist_files_ftp: timeout waiting for modification_timelist_files_ftp: timeout waiting for sizeload serial from %s failurelocal_pasv failedlocal_port failedlocal_spas failedlocalid is set to %slong format (more information)lrms is emptymail parameter is emptymake parent directories as neededmake_abort: leavingmake_abort: startmake_abort: wait for abort flag to be resetmalloc errormeta file %s is emptyminuteminutesmkdir failed: %smkdir_ftp: making %smkdir_ftp: timeout waiting for mkdirmodule name: %snnamenew_payload %snext chain element callednext element of the chain returned empty payloadnext element of the chain returned error statusnext element of the chain returned error status: %snext element of the chain returned invalid payloadnext element of the chain returned invalid/unsupported payloadnext element of the chain returned no payloadnext element of the chain returned unknown payload - passing throughnumbernumber of retries before failing file transferold_url new_urlonly get information about executon targets that support this job submission interface. Allowed values are org.nordugrid.gridftpjob or org.nordugrid.gridftp, org.ogf.glue.emies.activitycreation and org.nordugrid.internalonly select jobs that were submitted to this resourceonly select jobs whose status is statusstronly use this interface for submitting. Allowed values are: org.nordugrid.gridftpjob or org.nordugrid.gridftp, org.ogf.glue.emies.activitycreation and org.nordugrid.internalopen: changing owner for %s, %i, %iopen: owner: %i %ioperate recursivelyoperate recursively up to specified levelorderoutpayload %soutput is not SOAPoutput requested elements (jobs list, delegation ids and tokens) to fileowner subject is set to %sp12 file is emptypassword destination=password sourcepathpath to local cache (use to put file into cache)path to the VOMS server configuration filepath to the certificate file, it can be either PEM, DER, or PKCS12 formattedpath to the private key file, if the certificate is in PKCS12 format, then no need to give private keypath to the proxy filepath to the top directory of VOMS *.lsc files, only needed for the VOMS client functionalitypath to the trusted certificate directory, only needed for the VOMS client functionalityperform third party transfer, where the destination pulls from the source (only available with GFAL plugin)physical location to write to when destination is an indexing service. Must be specified for indexing services which do not automatically generate physical locations. Can be specified multiple times - locations will be tried in order until one succeeds.pkey and rsa_key exist!plugin for transport protocol %s is not installedplugin: checkdir: %splugin: checkdir: access: %splugin: checkdir: access: allowed: %splugin: closeplugin: open: %splugin: readplugin: writeprint a list of services configured in the client.confprint all information about this proxy.print delegation token of specified ID(s)print list of available delegation IDsprint main delegation token of specified Job ID(s)print selected information about this proxy.print state of the serviceprint summary of jobs in each transfer shareprint version informationprints info about installed user- and CA-certificatespriority is too large - using max value 100process: DELETEprocess: GETprocess: HEADprocess: POSTprocess: PUTprocess: action %s is not supported for subpath %sprocess: endpoint: %sprocess: factory endpointprocess: id: %sprocess: method %s is not supportedprocess: method %s is not supported for subpath %sprocess: method is not definedprocess: method: %sprocess: operation: %sprocess: request=%sprocess: response=%sprocess: schema %s is not supported for subpath %sprocess: subop: %sprocess: subpath: %sproxy constraintsput on holdqueue name is set to %sread information from specified control directoryread_thread: data read error from external process - aborting: %sread_thread: exitingread_thread: for_read failed - aborting: %sread_thread: get and register buffersread_thread: non-data tag '%c' from external process - leaving: %sregisterregistryregistry service URL with optional specification of protocolremove logical file name registration even if not all physical instances were removedremove proxyremove the job from the local list of jobs even if the job is not found in the infosysrequest to cancel job(s) with specified ID(s)request to cancel jobs belonging to user(s) with specified subject name(s)request to clean job(s) with specified ID(s)request to clean jobs belonging to user(s) with specified subject name(s)require information query using the specified information endpoint type. Special value 'NONE' will disable all resource information queries and the following brokering. Allowed values are: ldap.nordugrid, ldap.glue2, emies, arcrest and internal.require the specified endpoint type for job submission. Allowed values are: arcrest, emies, gridftp or gridftpjob and internal.response: %sresubmit to the same resourcereverse sorting of jobs according to jobid, submissiontime or jobnamesave serial to %s failuresecondsecondssecondsselect broker method (list available brokers with --listplugins flag)select one or more computing elements: name can be an alias for a single CE, a group of CEs or a URLselect one or more registries: name can be an alias for a single registry, a group of registries or a URLselecting a computing element for the new jobs with a URL or an alias, or selecting a group of computing elements with the name of the groupsetting file %s to size %llushow URLs of file locationsshow jobs where status information is unavailableshow only description of requested object, do not list content of directoriesshow only jobs of user(s) with specified subject name(s)show only jobs with specified ID(s)show progress indicatorshow status information in JSON formatshow the CE's error log of the jobshow the original job descriptionshow the specified file from job's session directoryshow the stderr of the jobshow the stdout of the job (default)shutdownskip jobs that are on a computing element with a given URLskip the service with the given URL during service discoverysort jobs according to jobid, submissiontime or jobnamesourcesource destinationsource.next_locationspecify computing element hostname or a complete endpoint URLstart_readingstart_reading: helper start failedstart_reading: thread create failedstart_reading_ftpstart_reading_ftp: globus_ftp_client_getstart_reading_ftp: globus_ftp_client_get failedstart_reading_ftp: globus_thread_create failedstart_writing_ftp: data chunk: %llu %llustart_writing_ftp: delayed data chunk: %llu %llustart_writing_ftp: failed to read data chunkstart_writing_ftp: failed to read data tagstart_writing_ftp: globus_thread_create failedstart_writing_ftp: helper start failedstart_writing_ftp: mkdirstart_writing_ftp: mkdir failed - still trying to writestart_writing_ftp: putstart_writing_ftp: put failedstart_writing_ftp: thread create failedstart_writing_ftp: waiting for data chunkstart_writing_ftp: waiting for data tagstart_writing_ftp: waiting for some buffers sentstatusstatusstrstop_reading: exiting: %sstop_reading: waiting for transfer to finishstop_reading_ftp: aborting connectionstop_reading_ftp: exiting: %sstop_reading_ftp: waiting for transfer to finishstringsubmit directly - no resource discovery or matchmakingsubmit jobs as dry run (no submission to batch system)submit test job given by the numbertest job runtime specified by the numberthe IDs of the submitted jobs will be appended to this filethe computing element specified by URL at the command line should be queried using this information interface. Allowed values are: org.nordugrid.ldapng, org.nordugrid.ldapglue2 and org.ogf.glue.emies.resourceinfothe file storing information about active jobs (default %s)this option is not functional (old GSI proxies are not supported anymore)timeout in seconds (default 20)treat requested object as directory and always try to list contenttruncate the joblist before synchronizingtypeunable to load number from: %sundefined plugin nameundefined virtual plugin pathunnamed groupunregisteruploaduploadsurlurl [url ...]urllist %s contains invalid URL: %suse GSI communication protocol for contacting VOMS servicesuse HTTP communication protocol for contacting VOMS services that provide RESTful access Note for RESTful access, 'list' command and multiple VOMS server are not supported use NSS credential database in default Mozilla profiles, including Firefox, Seamonkey and Thunderbird. use old communication protocol for contacting VOMS services instead of RESTful access use passive transfer (off by default if secure is on, on by default if secure is not requested)use secure transfer (insecure by default)use specified configuration fileuse the jobname instead of the short ID as the job directory nameusername to MyProxy server (if missing subject of user certificate is used)vomsvoms<:command>. Specify VOMS server (More than one VOMS server can be specified like this: --voms VOa:command1 --voms VOb:command2). :command is optional, and is used to ask for specific attributes(e.g: roles) command options are: all --- put all of this DN's attributes into AC; list ---list all of the DN's attribute, will not create AC extension; /Role=yourRole --- specify the role, if this DN has such a role, the role will be put into AC; /voname/groupname/Role=yourRole --- specify the VO, group and role; if this DN has such a role, the role will be put into AC. If this option is not specified values from configuration files are used. To avoid anything to be used specify -S with empty value. wait failed - killing childwaiting for data chunkwrite_thread: exitingwrite_thread: for_write eofwrite_thread: for_write failed - abortingwrite_thread: get and pass bufferswrite_thread: out failed - abortingwrong SSL lock requested: %i of %i: %i - %sxrootd close failed: %sxrootd open failed: %sxrootd write failed: %sy~DataPoint: destroy ftp_handle~DataPoint: destroy ftp_handle failed - retrying~DataPoint: failed to destroy ftp_handle - leakingProject-Id-Version: Arc Report-Msgid-Bugs-To: support@nordugrid.org POT-Creation-Date: 2021-12-02 15:25+0100 PO-Revision-Date: 2021-11-26 10:49+0100 Last-Translator: Mattias Ellert Language-Team: Swedish Language: sv MIME-Version: 1.0 Content-Type: text/plain; charset=utf-8 Content-Transfer-Encoding: 8bit Plural-Forms: nplurals=2; plural=n != 1; %s Cache : %s Cache (read-only): %s Cacherensning avstängd Cacherensning pÃ¥slagen Cachelänkkatalog : %s Kontrollkatalog : %s Sessionsrotkat : %s förvalt LRMS : %s förvald kö : %s förvald ttl : %u Kör 'arcclean -s Undefined' för att ta bort borttagna jobb frÃ¥n jobblistan Kör 'arcclean -s Undefined' för att ta bort avbrutna jobb frÃ¥n jobblistan För att Ã¥terställa saknade jobb, kör arcsync Använd arcclean för att ta bort icke existerande jobb Använd arcclean för att ta bort hämtade jobb frÃ¥n jobblistan Är exekverbar: sant Namn: %s Sources.DelegationID: %s Sources.Options: %s = %s Sources: %s Targets.DelegationID: %s Targets.Options: %s = %s Targets: %s %s certifikat-dn: %s giltig till: %s utfärdar-dn: %s serie-nummer: %d %s: %s: %i %s: %s Leveranstjänst: %s Leveranstjänst: LOKAL Leverans-slottar: %u Akutslottar: %u Efterprocesserings-slottar: %u Förprocesserings-slottar: %u Förberedda slottar: %u Andelsinställningar: %s Status för ändpunkt (%s) är %s Denna ändpunkt (%s) är STARTED eller SUCCESSFUL attribut: bas-dn: %s filter: %s ospecificerad: %i %s -> %s (%s) --- TORRKÖRNING --- Ã…tkomstkontroll: %s Annotering: %s Argument: %s Benchmarkinformation: Beräkningstjänstens loggkatalog: %s Beräkningsändpunkt-URL: %s Beräkningsändpunktsgränssnittsnamn: %s Beräkningändpunkt-villkor: Referenstjänst: %s Delegerings-ID: Delegerings-ID-element: %s Avslutningstid: %s Post giltig i: %s Post giltig frÃ¥n: %s Environment.name: %s Environment: %s Avslutningskod: %d Avslutningskod för framgÃ¥ngsrik exekvering: %d HälsotillstÃ¥nd: %s ID pÃ¥ tjänst: %s Indatafil-element: Installerade programmiljöer: Jobbfel: %s Jobb kräver inte exklusiv exekvering Jobbhanterings-URL: %s (%s) Jobb kräver exklusiv exekvering Jobbstatus-URL: %s (%s) Mappar till kö: %s Namn: %s Ingen avslutningkod för framgÃ¥ngsrik exekvering angiven. Nod-Ã¥tkomst: inkommande Nod-Ã¥tkomst: inkommande och utgÃ¥ende Nod-Ã¥tkomst: utgÃ¥ende Avisera: Gammalt aktivitets-ID: %s Gamla jobb-id: Operativsystem-villkor: Övriga meddelanden: %s Övriga attribut: [%s], %s Utdatafil-element: Ägare: %s PostExecutable.Argument: %s PreExecutable.Argument: %s Processering starttid: %s Proxy giltig till: %s Kö: %s RemoteLogging (valfritt): %s (%s) RemoteLogging: %s (%s) Begärd CPU-tid: %s Begärda slottar: %d Resultaten mÃ¥ste hämtas innan: %s Resultaten har raderats: %s Runtime-miljö-villkor: Tjänsteinformation-URL: %s (%s) Sessionskatalog-URL: %s Specifikt tillstÃ¥nd: %s Stage-in-katalog-URL: %s Stage-out-katalog-URL: %s TillstÃ¥nd: %s Stderr: %s Stdin: %s Stdout: %s Insänt frÃ¥n %s Insänt: %s Insänt med klient: %s Använd CPU-tid: %s Använd CPU-tid: %s (%s per slot) Använt minne: %i Använd klocktid: %s Använd klocktid: %s (%s per slot) Position i kö: %i [ JobDescription testare ] [ Tolkar den ursprungliga texten ] [ emies:adl ] [ nordugrid:xrsl ] $X509_VOMS_FILE och $X509_VOMSES är inte tilldelade; Användaren har inte angivit sökvägen till vomses-informationen; Det finns inte heller sökväg till vomses i användarens inställningsfil; Kan inte hitta vomses pÃ¥ förvalda sökvägar: ~/.arc/vomses, ~/.voms/vomses, $ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/grid-security/vomses, $PWD/vomses, /etc/vomses, /etc/grid-security/vomses, och de motsvarande underkatalogerna%5u s: %10.1f kB %8.1f kB/s%d batchsystem%d Ändpunkter%d andelar%d mappningspolicyer%d av %d jobb Ã¥terinsändes%d av %d jobb sändes in%i försök kvar, kommer att vänta %s innan nästa försök%li sekunder sedan lÃ¥sfilen %s skapades%s%s %s%s %s kunde inte skapas.%s (%s)%s > %s => falskt%s > %s => falskt: %s innehÃ¥ller icke-nummer i versionsdelen.%s > %s => sant%s-klass är inte ett objekt%s-katalog skapad%s-katalog existerar! Hoppar över jobb.%s misslyckadesDigesttypen %s stöds inte%s är inte en katalog%s är inte ett objekt%s är inte auktoriserat att utföra handling %s i resursen %s%s är inte auktoriserat att utföra handling %s i resursen %s%s gjord persistent%s-tolkningsfelHittade inte %s-plugin "%s".%s-begäran misslyckades%s-begäran till %s misslyckades med svar: %s%s-begäran till %s misslyckades: Oväntat svar: %s.%s version %s%s->%s%s. Kan inte kopiera filuppsättning%s. SQLite-databasfel: %s%s:%s: %i%s: %s%s: %s: Nytt jobb tillhör %i/%i%s: %s:%i%s: ACIX-plats: %s%s: Lägger till ny utdatafil %s: %s%s: Alla %s %s framgÃ¥ngsrikt%s: Begäran om att bringa online %s i SRM-kö. Väntar i %i sekunder%s: Cacherensning tar för lÃ¥ng tid - %u.%06u sekunder%s: Kan inte konvertera checksumma %s till heltal för %s%s: Kan inte konvertera filstorlek %s till heltal för %s%s: Kan inte läsa lista med indatafiler%s: Kan inte starta om pÃ¥ begäran%s: Kan inte starta om pÃ¥ begäran - inte ett lämpligt tillstÃ¥nd%s: Avbryter jobb pÃ¥ grund av användarbegäran%s: Avbryter aktiva DTRer%s: Avbryter övriga DTRer%s: Kan inte ladda upp tvÃ¥ olika filer %s och %s till samma LFN: %s%s: Kontrollerar användaruppladdningsbar fil: %s%s: Checksumma %llu verifierad för %s%s: Kritiskt fel för uppladdningsbar fil %s%s: DTR %s att kopiera fil %s misslyckades%s: DTR %s att kopiera till %s misslyckades men är inte obligatorisk%s: Radera begäran pÃ¥ grund av interna problem%s: Destinationsfil %s lämnades möjligen oavslutad frÃ¥n tidigare A-REX-körning, kommer att skriva över%s: Duplikatfil i lista med indatafiler: %s%s: Fel vid Ã¥tkomst för fil %s%s: Fel vid läsning av fil %s%s: Fel vid läsning av användargenererad lista med utdatafiler i %s%s: Misslyckades med att skapa grami-fil%s: Misslyckades med att erhÃ¥lla lokal information.%s: Misslyckades med att erhÃ¥lla LRMS-id%s: Misslyckades med att tolka jobbegäran.%s: Misslyckades med att läsa .local och att ändra tillstÃ¥nd, jobb och A-REX kan lämnas i ett motsägande tillstÃ¥nd%s: Misslyckades med att läsa jobbeskrivning: %s%s: Misslyckades med att läsa lokal information%s: Misslyckades med att köra avbrytningsprocess%s: Misslyckades med att köra insändningsprocess%s: Misslyckades med att sätta körbar Ã¥tkomsträttighet%s: Misslyckades med lagra felorsak: %s%s: Misslyckades med att avbryta körande jobb%s: Misslyckades med att rensa upp sessionskatalog%s: Misslyckades med att lista utdatakatalog %s: %s%s: Misslyckades med ladda in utvärderare för användarpolicy %s: Misslyckades med att öppna fil %s för läsning%s: Misslyckades med att tolka användarpolicy%s: Misslyckades med att läsa dynamiska utdatafiler i %s%s: Misslyckades med att läsa lista med indatafiler%s: Misslyckades med att läsa lista med indatafiler, kan inte rensa upp sessionskatalog%s: Misslyckades med att läsa lista med utdatafiler%s: Misslyckades med att läsa lista med utdatafiler, kan inte rensa upp sessionskatalog%s: Misslyckades med att läsa omprocesserad lista med indatafiler%s: Misslyckades med att läsa omprocesserad lista med utdatafiler%s: Misslyckades med att motta jobb i DTR-generator%s: Misslyckades med att byta användar-id till %d/%d för att läsa fil %s%s: Misslyckades med sätta jobbet som misslyckat under avbrytningsprocessering%s: Misslyckades med att skriva tillbaka dynamiska utdatafiler i %s%s: Misslyckades med att skriva lista med indatafiler%s: Misslyckades med att skriva lista med utdatafiler.%s: Misslyckades med att skriva lista med utdatastatusfiler.%s: Misslyckades med att skriva ändrad indatafil.%s: Misslyckades med att skriva lista med utdatafiler: %s%s: Misslyckades med att skriva lokal information%s: Misslyckades med att skriva lokal information: %s%s: Misslyckades med att skapa datalagring för barnprocess%s: Misslyckades med skapa slot för barnprocess%s: Misslyckande med att starta av barnprocess%s: Misslyckande med att vänta pÃ¥ att barnprocess skall avslutas%s: Fil %s har felaktig checksumma: %llu. Förväntade %lli%s: Fil-begäran %s i SRM-kö. Väntar i %i sekunder%s: GÃ¥r igenom filer i lista %s%s: Ogiltig DTR%s: Ogiltig fil: %s är för stor.%s: Ogiltig storlek/checksumma information (%s) för %s%s: Begäran att avbryta jobb frÃ¥n DTR-generator till schemaläggare%s: Avbrytande av jobb tar för lÃ¥ng tid, men diagnostikinsamling verkar ha gjorts. LÃ¥tsas att avbrytande lyckades.%s: Avbrytande av jobb tar för lÃ¥ng tid. Misslyckas.%s: Jobbet misslyckades i okänt tillstÃ¥nd. Kommer ej att starta om.%s: Jobbfel detekterat%s: Jobbet avslutat%s: Jobb har redan slutförts. Ingen handling tagen för att avbryta%s: Jobbet är antikt - raderar resterande information%s: Jobb har inte rätt att startas om längre%s: Jobbet har fÃ¥tt begäran om att tas bort - tar bort%s: Jobbet är för gammalt - raderar%s: Jobbmonitoreringsräknare är trasig%s: Jobbmonitorering förlorad pÃ¥ grund av borttagande frÃ¥n kö%s: Jobbmonitorering har oavsiktligt förlorats%s: Avslutande av jobbmonitorering begärd med %u aktiva referenser%s: Avslutande av jobbmonitorering begärd med %u aktiva referenser och kön %s associerad%s: Jobmonitorering avslutades%s: Jobbinsändning till LRMS misslyckades%s: Jobbinsändning till LRMS tar för lÃ¥ng tid, men ID har redan erhÃ¥llits. LÃ¥tsas att insändning gjorts%s: Jobbinsändning till LRMS tar för lÃ¥ng tid. Misslyckas.%s: Jobbets hjälpprogram avslutades%s: LRMS-skriptets gräns pÃ¥ %u är nÃ¥dd - suspenderar insändning/avbrytande%s: Plats %s kan inte kommas Ã¥t utifrÃ¥n, hoppar över%s: Plugin vid tillstÃ¥nd %s : %s%s: Pluginexekvering misslyckades%s: Processering av jobbeskrivning misslyckades%s: PushSorted misslyckades med att hitta jobb där de förväntades%s: Begär uppmärksamhet frÃ¥n DTR-generator igen%s: Läser utdatafiler frÃ¥n användargenererad lista i %s%s: Läsandet av det nya jobbets status misslyckades%s: Mottog DTR %s att kopiera fil %s i tillstÃ¥nd %s%s: Mottagen DTR tillhör inaktivt jobb%s: Mottog DTR med tvÃ¥ fjärrändpunkter!%s: Mottog datastagingbegäran att %s filer%s: Mottog jobb i DTR-generator%s: Mottog jobb i ett dÃ¥ligt tillstÃ¥nd: %s%s: Tar bort %s frÃ¥n dynamisk utdatafil %s%s: Omprocessering av jobbeskrivning misslyckades.%s: Begär uppmärksamhet frÃ¥n DTR-generator%s: Returnerar avbrutet jobb frÃ¥n DTR-generator%s: Sessionskatalogsprocessering tar för lÃ¥ng tid - %u.%06u sekunder%s: NÃ¥gra %s misslyckades%s: TillstÃ¥nd: %s frÃ¥n %s%s: tillstÃ¥nd: %s: datastaging avslutad%s: tillstÃ¥nd: %s: fortfarande i datastaging%s: TillstÃ¥nd: ACCEPTED%s: TillstÃ¥nd: ACCEPTED: dryrun%s: TillstÃ¥nd: ACCEPTED: har process-tid %s%s: TillstÃ¥nd: ACCEPTED: flyttar till PREPARING%s: TillstÃ¥nd: ACCEPTED: tolkar jobbeskrivning%s: TillstÃ¥nd: CANCELING%s: TillstÃ¥nd: FINISHING%s: TillstÃ¥nd: INLRMS%s: TillstÃ¥nd: INLRMS - letar efter inte pending%s: TillstÃ¥nd: INLRMS - letar efter pending(%u) och markerade%s: TillstÃ¥nd: INLRMS - hittade ingen markering%s: TillstÃ¥nd: INLRMS: avslutningsmeddelande är %i %s%s: TillstÃ¥nd: PREPARING%s: TillstÃ¥nd: SUBMIT%s: Försöker att ta bort jobb frÃ¥n datastaging som inte existerar%s: Försöker att ta bort jobb frÃ¥n datastaging som fortfarande är aktivt%s: TvÃ¥ identiska utdatadestinationer: %s%s: Okänd användarpolicy '%s'%s: Uppladdningsbara filer avbröts pÃ¥ grund av timeout%s: Användare har INTE laddat upp fil %s%s: Användare har laddat upp fil %s%s: checksumma %s%s: ta bort fil %s: misslyckades med att erhÃ¥lla filsökväg: %s%s: ta bort fil %s: misslyckades med att öppna fil/katalog: %s%s: jobb tilldelat för lÃ¥ngsam utfrÃ¥gning%s: jobb processeras%s: jobb för uppmärksamhet%s: Jobb hittat vid skanning%s: jobb kommer att vänta pÃ¥ extern process%s: nytt jobb har accepterats%s: gammalt jobb har accepterats<%s: put fil %s: %s%s: put fil %s: misslyckades med att skapa fil: %s%s: put fil %s: det finns ingen nyttolast%s: put fil %s: okänd nyttolast%s: put logg %s: det finns ingen nyttolast%s: put logg %s: okänd nyttolast%s: replika-typ %s%s: startade om FINISHING jobb%s: startade om INLRMS jobb%s: startade om PREPARING jobb%s: storlek %llu%s: tillstÃ¥nd CANCELING: barnprocess avslutades med kod %i%s: tillstÃ¥nd CANCELING: jobbdiagnostik insamlad%s: tillstÃ¥nd CANCELING: startar barnprocess: %s%s: tillstÃ¥nd CANCELING: timeout vid väntan pÃ¥ avbrytande%s: tillstÃ¥nd SUBMIT: barnprocess avslutades med kod %i%s: tillstÃ¥nd SUBMIT: startar barnprocess: %s%s: det finns inget jobb: %s%s: oväntad begäran att lägga till misslyckat jobb: %s%s: oväntad begäran att lägga till jobb: %s'(' förväntades')' förväntades'action'-attribut inte tillÃ¥tet i jobbeskrivning pÃ¥ användarsidan'control'-inställningsalternativet stöds inte längre, använd 'controldir' istället'stdout'-attributet mÃ¥ste anges när 'join'-attributet angesFörsöker med nästa destination (igen)Försöker med nästa källa (igen)(tom)(null)--same och --not-same kan inte anges samtidigt.: %d: %s: Bokföringspostrapporteringsverktyg är inte angivet: Misslyckades med att skapa bokföringsdatabasförbindelse: Misslyckades med skapa slot för bokföringsrapporterings-barnprocess: Misslyckande med att starta bokföringsrapporterings-barnprocess: Metrikverktyg returnerade felkod %i: %s: skrivning av bokföringspost tog %llu ms< %s<< %s> %sEn beräkningsresurs som använder GridFTP-gränssnittet begärdes, men %smotsvarande plugin kunde inte laddas in. Är pluginen installerad? %sOm inte, installera paketet 'nordugrid-arc-plugins-globus'. %sBeroende pÃ¥ din installationtyp kan paketnamnet variera.A-REX REST: Misslyckades med att Ã¥teruppta jobbA-REX REST: TillstÃ¥ndsändring inte tillÃ¥ten: frÃ¥n %s till %sAC-tilläggsinformation för VO AC är ogiltig: ACIX returnerade %sARC-auktoriseringsbegäran: %sARC delegeringspolicy: %sVal av ARC6-insändningsändpunktBegäran att avbryta orsakades av fel i överföringsfunktionBegäran att avbryta orsakades av överföringsfelAvbruten!Accepterade inteAccepterade inte: %sAccepterade förbindelse frÃ¥n %u.%u.%u.%u:%uAccepterade förbindelse frÃ¥n [%s]:%uAccepterade förbindelse pÃ¥ %u.%u.%u.%u:%uAccepterade förbindelse pÃ¥ [%s]:%uAccepterar insändning av nytt jobb eller modifieringsbegäran: %sÃ…tkomstlista: %sBokföringsdatabasen kan inte skapas. Misslyckades med att skapa föräldrakatalog %s.Bokföringsdatabasen kan inte skapas. %s är inte en katalogBokföringsdatabasförbindelse har etableratsBokföringsdatabasfil (%s) är inte en vanlig filBokföringsdatabas initieradErhöll autentiserings-token för %s: %sAktivering misslyckadesLägg till plats: metadata: %sLägg till plats: url: %sLägger till FQAN-värde: %sLägger till FQAN/primär-värde: %sLägger till VOMS-grupp-värde: %sLägger till VOMS-primär-grupp-värde: %sLägger till VOMS-primär-roll-värde: %sLägger till VOMS-roll-värde: %sLägger till virtuell-organisation-värde: %sLägger till handlings-id-värde: %sLägger till certifikat-kedja-värde: %sLägger till ändpunkt '%s' men gränssnittsnamn %sLägger till ändpunkt (%s) till ServiceEndpointRetrieverLägger till ändpunkt (%s) till TargetInformationRetrieverLägger till ändpunkt (%s) till bÃ¥de ServiceEndpointRetriever och TargetInformationRetrieverLägger till plats: %s - %sLägger till profil-id-värde: %sLägger till begäran-token %sLägger till resurs-id-värde: %sLägger till resurs-ägare-värde: %sLägger till spacetoken: %sLägger till subjekt-id-värde: %sLägger till subjekt-utfärdare-värde: %sLägger till till massbegäranLägger till virtuell-organisation-värde: %sAdress: %sAlla %u processeringsslottar användsAlla DTRer avslutade för jobb %sAlla förfrÃ¥gningar misslyckadesAlla villkor uppfyllda.Alla erhÃ¥llna resultat är ogiltigaAllokerade %u buffrar %llu bytes vardera.tillÃ¥t angiven entitet att hämta referens utan lösenord. Detta alternativ är specifikt för PUT-kommandot när myproxy-servern kontaktas.Har redan katalog: %sLäser redan frÃ¥n källaSkriver redan till destinationEtt fel inträffade under skapandet av jobbeskrivningen som ska sändas till %sEn annan process (%s) äger lÃ¥set pÃ¥ fil %sArc-policy kan inte överföras av XACMLs SAML2.0-profilArcAuthZ: misslyckades med att initiera alla PDPer - denna instans kommer inte att fungeraArkiverar DTR %s, tillstÃ¥nd %sArkiverar DTR %s, tillstÃ¥nd ERRORÄr du säker pÃ¥ att du vill ta bort jobb för vilka information saknas?Är du säker pÃ¥ att du vill synkronisera din lokala jobblista?Sätter samman BLAH-parser-logg-post: %sTilldelad till delegeringsgrupp %sTilldelad till användarlista %sAntar - hittade inte filenAntar att överföring redan är avbruten eller misslyckad.Minst tvÃ¥ värden behövs för 'inputfiles'-attributetMinst tvÃ¥ värden behövs för 'outputfiles'-attributetFörsöker tilldela relativ sökväg till URL - gör den absolutFörsöker att kontakta %s pÃ¥ port %iAttribut '%s' definierat mer än en gÃ¥ngAttributet 'join' kan inte anges när bÃ¥de 'stdout'- och 'stderr'-attributen angesAttributvärde (1): %sAttributvärde (2): %sAttributvärde inuti Subject: %sAttributnamn (%s) innehÃ¥ller ogiltigt tecken (%s)Attributnamn förväntadesAttributen 'gridtime' och 'cputime' kan inte anges samtidigtAttributen 'gridtime' och 'walltime' kan inte anges samtidigtAutentisering i kommandon misslyckasAutentiseringsbegäran-URL: %sMisslyckades med autentiseringAuktoriserad av arc.pdpAuktoriserad av fjärr-pdp-tjänstAuktoriserad av simplelist.pdp: %sAuktoriserad av xacml.pdpBN_new || RSA_new misslyckadesBN_set_word misslyckadesFelaktig URL i acix_endpointFelaktig URL i leveranstjänsten: %sFelaktig autentiseringsinformation: %sFelaktigt format för checksumma %sFelaktigt referensvärde %s i cacheÃ¥tkomstreglerFelaktigt katalognamn: %sFelaktigt format upptäckt i fil %s, pÃ¥ rad %sFelaktigt format i XML-svar frÃ¥n leveranstjänst pÃ¥ %s: %sFelaktigt format i XML-svar frÃ¥n tjänst pÃ¥: %s: %sFelaktigt format i XML-svar: %sDÃ¥lig etikett: "%s"DÃ¥lig logikDÃ¥lig logik för %s - bringOnline returnerade OK men SRM-begäran har inte avslutats framgÃ¥ngsrikt eller pÃ¥gÃ¥rDÃ¥lig logik för %s - getTURLs returnerade OK men SRM-begäran har inte avslutats framgÃ¥ngsrikt eller pÃ¥gÃ¥rDÃ¥lig logik för %s - putTURLs returnerade OK men SRM-begäran har inte avslutats framgÃ¥ngsrikt eller pÃ¥gÃ¥rFelaktig monteringskatalog angivenFelaktigt namn för executable: %sFelaktigt namn för runtime-miljö: %sFelaktigt namn för stderr: %sFelaktigt namn för stdout: %sFelaktigt nummer i definedshare %sFelaktigt nummer i maxdeliveryFelaktigt nummer i maxemergencyFelaktigt nummer i maxpreparedFelaktigt nummer i maxprocessorFelaktigt nummer i maxtransfertriesFelaktigt nummer i priority-element: %sFelaktigt nummer i remotesizelimitFelaktigt nummer i speedcontrolFelaktigt eller gammalt format upptäckt i fil %s, pÃ¥ rad %sFelaktig sökväg för %s: Rucio stöder läsning/skrivning pÃ¥ /objectstores och endast läsning pÃ¥ /replicasFelaktigt subkommando pÃ¥ inställningsrad: %sFelaktigt värde för loglevelFelaktigt formatterat pid %s i lÃ¥sfil %sBatchsysteminformation:Batchsysteminformation:Bärar-token är tillgängligt. Det föredras för jobbinsändning.BeteendeinställningHittade inte block %s i inställningsfil %sBlocknamn är tomtBoostar prioritet frÃ¥n %i till %i pÃ¥ grund av inkommande DTR med högre prioritetBÃ¥da URLerna mÃ¥ste ha samma protokoll, värd och portBÃ¥de CACertificatePath- och CACertificatesDir-elementen saknas eller är tommaBegäran att bringa online %s avslutades framgÃ¥ngsrikt, filen är nu ONLINEBegäran att bringa online %s är fortfarande i kö, ska väntaTrasig strängMäklare %s har laddats inHittade inte mäklar-plugin "%s".Resursmatchning och filtreringMäklare tillgängliga för %s:Skapande av buffer misslyckadesBufferregistrering misslyckadesUpptagna pluginer hittad när modul-hanteraren laddades ut. Väntar pÃ¥ att de ska frigöras.CA-certifikat och CA-privat-nyckel matchar inteCA-namn: %sInstallerade CA-certifikat:INNEHÃ…LL %u: %sCPU-klockhastighet: %iCPU-modell: %sCPU-tillverkare: %sCPU-version %sGenerering av CREAM-begäran misslyckades: %sCache %s: Fritt utrymme %f GBÃ…tkomst till cache tillÃ¥tet för %s av DN %sÃ…tkomst till cache tillÃ¥tet för %s av VO %sÃ…tkomst till cache tillÃ¥tet för %s av VO %s och grupp %sÃ…tkomst till cache tillÃ¥tet för %s av VO %s och roll %sCacheutrymme fri storlek: %i GBCacheutrymme total storlek: %i GBCacherensningsskript misslyckadesCache skapades: %sCachefil %s existerar inteHittade inte cachefil %sCachefil %s togs bort under länkning/kopiering, mÃ¥ste börja omCachefil %s lÃ¥stes under länkning/kopiering, mÃ¥ste börja omCachefil %s ändrades under den senaste sekunden, väntar 1 sekund för att undvika race conditionCachefil %s ändrades under länkning, mÃ¥ste börja om<Cachefil är %sCachemetafil %s är tom, kommer att Ã¥terskapaCachemetafil %s är möjligen korrupt, kommer att Ã¥terskapaHittade inte cache för fil %sCachad kopia är fortfarande giltigCachad fil är lÃ¥st - bör försöka igenCachad fil är gammal, kommer att ladda ner igenBeräknad checksumma %s stämmer överens med checksumma rapporterad av servernBeräknad överförings-checksumma %s stämmer överens med källans checksummaBeräknad/tillhandahÃ¥llen överföringschecksumma %s stämmer överens med checksumma rapporterad av SRM-destinationen %sCallback erhöll misslyckandeAnropar PrepareReading när begäran redan förberetts!Anropar PrepareWriting när begäran redan förberettsAnropar ACIX med förfrÃ¥gan %sAnropar http://localhost:60000/Echo med ClientSOAPAnropar http://localhost:60000/Echo med httplibAnropar https://localhost:60000/Echo med ClientSOAPAnropar plugin %s för att frÃ¥ga ändpunkt pÃ¥ %sKan inte komma Ã¥t CA-certifikatkatalog: %s. Certifikaten kommer inte att verifierasKan inte komma Ã¥t VOMS-fil/katalog: %s.Kan inte komma Ã¥t VOMSES-fil/katalog: %s.Kan inte komma Ã¥t certifikatfil: %sKan inte komma Ã¥t nyckelfil: %sKan inte komma Ã¥t proxyfil: %sKan inte lägga till X509-utökat KeyUsage-tillägg till det nya proxycertifikatetKan inte lägga till X509-tillägg till proxycertifikatKan inte allokera minneKan inte allokera minne för tillägg för proxycertifikatKan inte beräkna digest för publik nyckelKan inte konvertera DER-kodat PROXY_CERT_INFO_EXTENSION-tillägg till internt formatKan inte konvertera PROXY_CERT_INFO_EXTENSION-struct frÃ¥n internt till DER-kodat formatKan inte konvertera keyUsage-struct frÃ¥n DER-kodat formatKan inte konvertera keyUsage-struct frÃ¥n internt till DER-formatKan inte konvertera privat nyckel till DER-formatKan inte konvertera det signerade EEC-certifikatet till DER-formatKan inte konvertera det signerade proxycertifikatet till DER-formatKan inte konvertera det signerade proxycertifikatet till PEM-formatKan inte konvertera sträng till ASN1_OBJECTKan inte kopiera det utökade KeyUsage-tilläggetKan inte kopiera subjektnamnet frÃ¥n utfärdaren för proxycertifikatetKan inte skapa ASN1_OCTET_STRINGKan inte skapa BIO för att tolka begäranKan inte skapa BIO för begäranKan inte skapa BIO för det signerade EEC-certifikatetKan inte skapa BIO för det signerade proxycertifikatetKan inte skapa PROXY_CERT_INFO_EXTENSION-tilläggKan inte skapa PolicyStore-objektKan inte skapa XACML ActionKan inte skapa XACML ActionAttribute: %sKan inte skapa XACML ResourceKan inte skapa XACML ResourceAttribute: %sKan inte skapa XACML SubjectAttribute: %sKan inte skapa XACML-begäranKan inte skapa en ny X509_NAME_ENTRY för proxycertifikatbegäranKan inte skapa delegeringsreferens för delegeringstjänsten: %sKan inte skapa tillägg för PROXY_CERT_INFOKan inte skapa tillägg för keyUsageKan inte skapa tillägg för proxycertifikatKan inte skapa funktion %sKan inte skapa funktion: Funktions-id existerar inteKan inte skapa namnpost CN för proxycertifikatetKan inte skapa SSL-kontextobjektKan inte skapa SSL-objektetKan inte bestämma installationsplats. Använder %s. Ange ARC_LOCATION om detta inte är korrekt.Kan inte duplicera serienummer för proxycertifikatetKan inte duplicera subjektnamnet för den självsignerande proxycertifikatbegäranKan inte skapa AlgFactory dynamisktKan inte skapa AttributeFactory dynamisktKan inte skapa utvärderare dynamisktKan inte skapa FnFactory dynamisktKan inte skapa policy dynamisktKan inte skapa Request dynamisktKan inte hitta -element med rätt namnrymdKan inte hitta -element med rätt namnrymdKan inte hitta ArcPDPContextKan inte hitta CA-certifikatkatalogen pÃ¥ förvalda platser: ~/.arc/certificates, ~/.globus/certificates, %s/etc/certificates, %s/etc/grid-security/certificates, %s/share/certificates, /etc/grid-security/certificates. Certifikaten kommer inte att verifieras. Om CA-certifikatkatalogen existerar, ange dess plats manuellt ange platsen via miljövariabeln X509_CERT_DIR, eller attributet cacertificatesdirectory i client.conf Kan inte hitta XACMLPDPContextKan inte hitta certifikatfil: %sKan inte hitta certifikat med namn %sKan inte hitta utfärdarcertifikat för certifikatet med subjekt %s och hash: %luKan inte hitta nyckelfil: %sKan inte hitta nyckel med namn: %sKan inte hitta voms-tjänst-inställningsfil (vomses) pÃ¥ förvalda platser: ~/.arc/vomses, ~/.voms/vomses, $ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/grid-security/vomses, $PWD/vomses, /etc/vomses, /etc/grid-security/vomsesKan inte generera X509-begäranKan inte generera policyobjektKan inte erhÃ¥lla SAMLAssertion SecAttr frÃ¥n meddelandekontextKan inte erhÃ¥lla utökad KeyUsage-tillägg frÃ¥n utfärdarcertifikatetKan inte erhÃ¥lla policy frÃ¥n PROXY_CERT_INFO_EXTENSION-tilläggKan inte erhÃ¥lla policy-sprÃ¥k frÃ¥n PROXY_CERT_INFO_EXTENSION-tilläggKan inte erhÃ¥lla certifikattypKan inte erhÃ¥lla delegeringsreferens: %s frÃ¥n delegeringstjänst: %sKan inte erhÃ¥lla utfärdarens privata nyckelKan inte ladda in ARC-utvärderingsobjekt: %sKan inte ladda in ARC-begäranobjekt: %sKan inte ladda in policyobjektKan inte ladda in policyobjekt: %sKan inte ladda in begäranobjektKan inte öppna jobbeskrivningsfil: %sKan inte öppna nyckelfil: %sKan inte tolka klassnamn för AttributeFactory frÃ¥n konfigurationenKan inte tolka klassnamn för CombiningAlgorithmFactory frÃ¥n konfigurationenKan inte tolka klassnamn för FunctionFactory frÃ¥n konfigurationenKan inte tolka klassnamn för Policy frÃ¥n konfigurationenKan inte tolka klassnamn för Request frÃ¥n konfigurationenKan inte tolka datum: %sKan inte tolka mÃ¥nad: %sKan inte tolka tidszon: %sKan inte tolka tid: %sKan inte läsa PEM privat nyckelKan inte läsa PEM privat nyckel: misslyckades med att avkodaKan inte läsa PEM privat nyckel: misslycḱades med att erhÃ¥lla lösenordKan inte läsa PEM privat nyckel: troligen fel lösenordKan inte läsa certifikatfil: %sKan inte läsa certifikatsträng: %sKan inte läsa certifikat/nyckel-strängKan inte läsa information frÃ¥n den lokala statusfilenKan inte läsa nyckelsträngKan inte ange CN i proxycertifikatetKan inte ange utfärdarens subjekt för proxycertifikatetKan inte ange privat nyckelKan inte ange publik nyckel för proxycertifikatetKunde inte ange läsbar fil för begärans BIOKan inte ange serienummer för proxycertifikatetKan inte ange livstid för proxycertifikatetKan inte ange versionsnummer för proxycertifikatetKan inte ange skrivbar fil för begärans BIOKan inte ange skrivbar fil för det signerade EEC-certifikatets BIOKan inte ange skrivbar fil för det signerade proxycertifikatets BIOKan inte signera ett EECKan inte allokera minne för CA-policy-sökvägKan inte konvertera DER-kodat PROXYCERTINFO-tillägg till internt formatKan inte konvertera X509-begäran frÃ¥n internt till DER-kodat formatKan inte skapa delegeringskontextKan inte skapa informationshandtag - är ARCs LDAP-DMC-plugin tillgänglig?Kan inte skapa informationshandtag - är ARCs ldap-DMC-plugin tillgänglig?Kan inte ta bort katalog: %s: %sKan inte ta bort fil %s: %sKan ej extrahera objektets namn frÃ¥n käll-URLKan inte hitta LCAS-funktioner i bibliotek: %sKan inte hitta LCMAPS-funktioner i bibliotek %sKan inte erhÃ¥lla policy frÃ¥n PROXYCERTINFO-tilläggKan inte erhÃ¥lla policy-sprÃ¥k frÃ¥n PROXYCERTINFO-tilläggKan inte erhÃ¥lla indata-BIOs första byte för att bestämma dess formatKan inte erhÃ¥lla indatas fösta byte för att bestämma dess formatKan inte hantera URL %sKan inte hantera plats %sKan inte ladda in LCAS-bibliotek %s: %sKan inte ladda in LCMAPS-bibliotek %s: %sKan inte ladda in plugin %s för Ã¥tkomstpunkt %sKan inte erhÃ¥lla inställningar. Endast publik information tillhandahÃ¥lls.Kan inte erhÃ¥lla inställningar. Publik information har stängts av.Kan inte erhÃ¥lla inställningar. Publik information är inte tillÃ¥ten för denna användare.Kan inte öppna inställningsfilKan inte tolka Ã¥tkomsträttigheterna pÃ¥ inställningsradKan inte tolka inställningsradKan inte tolka create-argument pÃ¥ inställningsradKan inte tolka värd och/eller port i EPSV/PASV-svarKan inte tolka mkdir-argument pÃ¥ inställningsradKan inte tolka or:and pÃ¥ inställningsradKan inte tolka användare:grupp pÃ¥ inställningsradKan inte läsa inställningsfilKan inte läsa inställningsfil pÃ¥ %sKan ej läsa frÃ¥n källaKan inte läsa lista med destinationer frÃ¥n filen %sKan inte läsa platslista frÃ¥n fil %sKan inte läsa lista med källor frÃ¥n filen %sKan inte läsa policynamnKan inte läsa överföringstillstÃ¥nd frÃ¥n %s. Kanske kör inte A-REX?Kan inte känna igen grupp pÃ¥ inställningsradKänner inte igen typ av inställningsfilKänner inte igen typ av inställningsfil pÃ¥ %sKan inte känna igen användare pÃ¥ inställningsradKan inte byta namn pÃ¥ fil %s: %sKan inte Ã¥terställa indataKan inte slÃ¥ upp värd %sKan inte ange OpenSSL verifikationsflaggorKan inte göra stat pÃ¥ filen: %s: %sKan inte göra stat pÃ¥ stdio-kanal %sKan inte använda URL: %sKan ej skriva till destinationAvbrytande slutförtAvbryter DTR %s med källa: %s, destination: %sAvbryter aktiv överföringAvbryter jobb %sAvbryter jobb: %sAvbryter synkroniseringsbegäranCandyPond: OauktoriseradKan inte anpassa jobbeskrivning till insändnings-target när informationssökning är avslagenKan inte ändra ägare för %s: %sKan inte ändra Ã¥tkomsträttigheter för %s: %sKan inte jämföra tom checksummaKan inte konvertera ARC-modulnamn till pythonsträngKan inte konvertera ExecutionTarget (%s) till pythonobjektKan inte konvertera JobDescription till pythonobjektKan inte konvertera UserConfig till pythonobjektKan inte konvertera inställningar till pythonobjektKan inte konvertera inmsg till pythonobjektKan inte konvertera modulnamn till pythonsträngKan inte konvertera outmsg till pythonobjektKan inte konvertera sträng %s till heltal pÃ¥ rad %sKan inte kopiera exempelinställningar (%s), det är inte en vanlig filKan inte skapa ExecutionTarget-argumentKan inte skapa JobDescription-argumentKan inte skapa UserConfig-argumentKan inte skapa argument till konstruktornKan inte skapa inställningsargumentKan inte skapa kataloger för loggfil %s. Meddelanden kommer att loggas i denna loggKan inte skapa katalog %s för per-jobb hÃ¥rda länkarKan inte skapa http-nyttolastKan inte skapa inmsg-argumentKan inte skapa instans av pythonklassKan inte skapa outmsg-argumentKan inte skapa %s-utdata för nÃ¥got jobbKan inte skapa %s-utdata för jobb (%s): Ogiltig källa %sKan inte skapa resolver frÃ¥n /etc/resolv.confKan inte bestämma värdnamn frÃ¥n gethostname()Kan inte bestämma värdnamn frÃ¥n gethostname() för att generera ceID automatiskt.Kan inte bestämma replika-typ för %sKan inte bestämma plats för %s: %sKan inte hitta under svar-soap-meddelande:Kan inte hitta ARCs inställningsklassKan inte hitta ARCs ExecutionTarget-klassKan inte hitta ARCs JobDescription-klassKan inte hitta arcmeddelandeklassKan inte hitta ARCs UserConfig-klassKan inte hitta nÃ¥gon proxy. Detta program kan för närvarande inte köras utan en proxy. Om du har proxyfilen pÃ¥ en icke-förvald plats, se till att sökvägen är angiven i klientinställningsfilen. Om du inte har en proxy än, kör 'arcproxy'"Kan inte hitta innehÃ¥ll under svar-soap-meddelandeKan inte hitta mäklarklassKan inte hitta fil pÃ¥ %s för att hämta proxyn. Se till att denna fil existerar.Kan inte hitta information om jobbinsändningsändpunktKan inte hitta lokal indatafil '%s' (%s)Kan inte hitta tjänsteklassKan inte hitta sökväg till CA-certifikat-katalogen, sätt miljövariabeln X509_CERT_DIR, eller cacertificatesdirectory i en inställningsfil.Kan inte hitta sökväg till proxyfilen, sätt miljövariabeln X509_USER_PROXY, eller proxypath i en inställningsfilKan inte hitta sökväg till användarcertifikatet, sätt upp miljövariabeln X509_USER_CERT, eller certificatepath i en inställningsfilKan inte hitta sökväg till privata nyckeln, sätt miljövariabeln X509_USER_KEY, eller keypath i en inställningsfilKan inte hämta VOMS-server %s information frÃ¥n vomses-filernaKan inte hämta VOMS-serveradressinformation frÃ¥n vomsesrad: "%s"Kan inte fÃ¥ nÃ¥gon AC- eller attributinfo frÃ¥n VOMS-server: %s; Returnerat meddelande frÃ¥n VOMS-server: %s Kan inte hämta ordlista för ARC-modulenKan inte hämta ordlista för mäklarmodulKan inte erhÃ¥lla ordlista för modulenKan inte hanter URL %sKan inte hantera lokal ägare %sKan inte importera ARC-modulenKan inte importera modulKan inte initiera ARCHERY-domännamn för förfrÃ¥ganKan inte länka till fjärrdestination. Kommer inte att använda mappad URLKan inte länka till källa som kan modifieras, kommer att kopiera iställetKan inte öppna BLAH-loggfil '%s'Kan inte öppna cacheloggfil %s: %s. Cacherensningsmeddelanden kommer att loggas till denna loggKan inte skapa XRSL-representation: Resources.SlotRequirement.NumberOfSlots-attributet mÃ¥ste anges när Resources.SlotRequirement.SlotsPerHost-attributet anges.Kan inte tolka heltalsvärdet '%s' för -%cKan inte tolka lösenordskälla %s det mÃ¥ste vara i formatet källtyp eller källtyp:data. Giltiga källtyper är int, stdin, stream, file.Kan inte tolka uttrycket för lösenordskälla %s det mÃ¥ste vara i formatet typ=källaKan inte tolka lösenordskälltyp %s. Giltiga källtyper är int, stdin, stream, file.Kan inte tolka lösenordstyp %s. Nuvarande giltiga värden är 'key', 'myproxy', 'myproxynew' och 'all'.Kan inte tolka schema!Kan inte tolka tjänsteändpunkt-TXT-poster.Kan inte behandla proxyfil pÃ¥ %s.Kan inte frÃ¥ga om tjänsteändpunkt-TXT-poster frÃ¥n DNSKan inte läsa angiven jobb-id-fil: %sKan inte ta bort proxyfil pÃ¥ %sKan inte ta bort proxyfil pÃ¥ %s, eftersom den inte existerarKan inte byta namn till eller frÃ¥n rotkatalogenKan inte byta namn till samma URLKan inte göra stat pÃ¥ lokal indatafil '%s'Kan inte byta till grupp (%s)Kan inte byta till primär grupp för användare (%s)Kan inte byta till användare (%s)Kan inte uppdatera AAR. Kan inte hitta registrerad AAR för jobb %s i bokföringsdatabasen.Kan inte använda tillhandahÃ¥llet --size-alternativKan inte skriva jobb-id tillfil (%s)Kan inte skriva jobb-id (%s) till fil (%s)Kan inte skriva jobb-id till fil (%s)Kan inte hämta jobbfiler för jobb (%s) - kan inte bestämma URL till stage-out-katalogFörmÃ¥gor:Visar %s för jobb %sAnledning till misslyckande oklar - väljer slumpvisCertifikattyp: %dGiltighetstiden för certifikat %s har redan gÃ¥tt utGiltighetstiden för certifikat %s kommer att gÃ¥ ut om %sHittade inte certifikat och nyckel ('%s' och '%s') i nÃ¥gon av sökvägarna: %sCertifikat har ingen slotCertifikatformat är DERCertifikatformat är PEMCertifikatformat är PKCSCertifikatformat är okäntCertifikat har okänt tillägg med numeriskt ID %u och SN %sCertifikatinformationsinsamling misslyckadesCertifikatinformation:Certifikatutfärdare: %sCertifikatbegäran är ogiltigCertifikat som kommer att användas är: %sCertifikatverifieringsfel: %sCertifikatverifiering misslyckadesCertifikatverifiering lyckadesCertifikat med serienummer %s och subjekt "%s" är revokeratGiltighetstiden för certifikat med subjekt %s har gÃ¥tt utCertifikat/proxy-sökväg är tomCertifikat: %sCertifikatkedja nummer %dInställning av kedja misslyckadesUtmaning: %sCheck: letar efter metadata: %sCheck: erhÃ¥llen Ã¥tkomst-latency: hög (NEARLINE)Check: erhÃ¥llen Ã¥tkomst-latency: lÃ¥g (ONLINE)Check: erhÃ¥llen checksumma: %sCheck: erhÃ¥llen ändringstid: %sCheck: erhÃ¥llen ändringstid %sCheck: erhÃ¥llen storlek: %lluCheck: erhÃ¥llen storlek: %lliKontrollerar: %sKontrollerar URS returnerad av SRM: %sKontrollerar cache igenKontrollerar cacheÃ¥tkomsträttigheter: DN: %sKontrollerar cacheÃ¥tkomsträttigheter: VO: %sKontrollerar cacheÃ¥tkomsträttigheter: VOMS attr: %sKontrollerar fil %sKontrollerar om %s finnsLetar efter suspenderade ändpunkter som ska startas.Kontrollerar replika %sKontrollerar att källfil är nÃ¥rvarandeChecksumma: %sChecksumma stämmer inte överensChecksumma stämmer inte överens mellan beräknad checksumma (%s) och källans checksumma %sBeräknad checksumma %s stämmer inte överens med checksumma rapporterad av servern %sChecksumma stämmer inte överens mellan beräknad checksumma %s och källans checksumma %sBeräknad/tillhandahÃ¥llen överföringschecksumma %s stämmer inte överens med checksumma rapporterad av SRM-destinationen (%s)Checksumma stämmer inte överens mellan checksumma given som metaalternativ (%s:%s) och beräknad checksumma(%s)Checksumma ej beräknadTyp av checksumma frÃ¥n SRM (%s) och beräknad/tillhandahÃ¥llen checksumma (%s) är olika, kan inte jämföraTyp av checksumma för källa och beräknad checksumma är olika, kan inte jämföraTyp av checksumma som returnerades av servern skiljer sig frÃ¥n den begärda typen, kan inte jämföraBarn avslutadesBarnprocessmonitorering barnprocess %d avslutadesBarnprocessmonitorering släpper övergiven barnprocess %d (%d)Barnprocessmonitorering fel: %iBarnprocessmonitorering internt kommunikationsfelBarnprocessmonitorering kick upptäcktBarnprocessmonitorering borttappad barnprocess %d (%d)Barnprocessmonitorering signal upptäcktBarnprocessmonitorering stderr är stängdBarnprocessmonitorering stdin är stängdBarnprocessmonitorering stdout är stängdBarnprocess har redan startatsKlassnamn: %sBorttagning misslyckadesTar bort jobb %sTar bort jobb: %sRensar upp efter misslyckande: tar bort %sKlientkedjan har ingen ingÃ¥ngspunktKlientförbindelsen har ingen ingÃ¥ngspunktKlientsidans MCCer har laddats inKlientversion: nordugrid-arc-%sStängd förbindelseStängdes framgÃ¥ngsriktStänger kanal (lista)Stänger kanal (hämta)Stänger kanal (hämta) pÃ¥ grund av lokalt läsfel: %sStänger kanal (lagra)Stänger kanal (lagra) pÃ¥ grund av fel: %sStänger förbindelseStänger förbindelse till SQLite-bokföringsdatabasStängning kan ha misslyckatsStänger läskanalStänger skrivkanalInsamlade felet är: %sSamlar in EMI-ES-GLUE2-beräkningsinformationsändpunktsinformation.Samlar in jobbinformation (A-REX REST-jobb).Kommando ABORKommando ALLO %iKommando CDUPKommando CWD %sKommando DCAU: %i '%s'Kommando DELE %sKommando EPRTKommando EPSV %sKommando ERET %sKommando LIST %sKommando MDTM %sKommando MKD %sKommando MLSD %sKommando MLST %sKommando MODE %cKommando NLST %sKommando NOOPKommando OPTSKommando OPTS RETRKommando PASVKommando PBZS: %sKommando PORTKommando PROT: %sKommando QUITKommando REST %sKommando RETR %sKommando RMD %sKommando SBUF: %iKommando SIZE %sKommando SPASKommando STOR %sKommando TYPE %cKommando USER %sKommande sändsKommando: %sKomponent %s(%s) kunde inte skapasKomponent har inget id-attribut definieratKomponent har inget namnattribut definieratKomponentens %s(%s) nästa har inget id-attribut definieratBeräkningsändpunkt %s (typ %s) lagd till i listan för insändningsmäklingBeräkningstjänst:Beräkningstjänst: %sComputingShare (%s) matchar inte vald kö (%s)ComputingShare (%s) explicit avvisadComputingShareName för ExecutionTarget (%s) är inte definieratInställnings-klass är inte ett objektInställningar (%s) har laddats inInställningsfelExempel-inställningsfil skapades (%s)Inställningsfil kan inte läsasInställningsfil är trasig - blocknamn slutar inte med ]: %sInställningsfil är trasig - blocknamn är för kort: %sInställningfil inte angivenInställningsfil inte angiven i ConfigBlockInställningsfil att ladda inInställningarnas rotelement är inte Inställningssektion [userlist] saknar namn.Connect: Autentisering avbröts pÃ¥ grund av timeout efter %d msConnect: Anslutning avbröts pÃ¥ grund av timeout efter %d msConnect: Misslyckades med autentisering: %sConnect: Misslyckades med att koppla upp: %sConnect: Misslyckades med att initiera autentiseringsinfohandtag: %sConnect: Misslyckades med att initiera handtag: %sKopplar upp mot leveranstjänst pÃ¥ %sFörbindelse frÃ¥n %s: %sKontaktar VOMS-server (med namn %s): %s pÃ¥ port: %sInnehÃ¥ll: %sKontrollförbindelse (troligen) stängdKonvertering misslyckades: %sKonverterings-mode är satt till CREAMKonverterings-mode är satt till DIRECTKonverterings-mode är satt till EMIKonverterings-mode är satt till SUBJECTKonverterar till CREAM-handling - namnrymd: %s, operation: %sKopiering misslyckades: %sKopierar med dlcloseKunde inte fÃ¥ lÃ¥s pÃ¥ metafil %sMisslyckades med att koppla upp mot tjänst %s: %sKunde inte konvertera inkommande nyttolast!Kunde inte konvertera nyttolast!Kunde inte konvertera slcs-attributvärdet (%s) till en URL-instans i inställningsfilen (%s)Kunde inte skapa SOAP nyttolast!Kunde inte skapa länk till lÃ¥sfil %s eftersom den redan existerarKunde inte skapa lÃ¥sfil %s eftersom den redan existerarKunde inte skapa temporär fil "%s"Kunde inte skapa temporär fil: %sKunde inte bestämma typ av inställningar eller inställningar är tomKunde inte bestämma värdnamn frÃ¥n gethostname()Kunde inte bestämma sessionskatalog frÃ¥n filnamn %sKunde inte bestämma servers versionKunde inte hitta en lämplig leveranstjänst, tvingar lokal överföringKunde inte hitta inladdningsbar modul med namn %s (%s)Kunde inte hitta inladdningsbar modul med namn %s och %s (%s)Kunde inte hitta inladdningsbar modulbeskrivning med namn %sKunde inte hitta inladdningsbar modulbeskrivning med namn %s eller typ %sKunde inte hämta checksumma %s: %sKunde inte hantera checksumma %s: hoppar över kontroll av checksummaKan inte hantera ändpunkt %sKunde inte ladda in inställningar (%s)Kunde inte lokalisera modulen %s pÃ¥ följande sökvägar:Kan inte göra ny överföringsbegäran: %s: %sKunde inte erhÃ¥lla information om källa: %sKunde inte öppna LDAP-förbindelse till %sKunde inte öppna fil %s för läsning: %sKunde inte läsa datastaginginställningar frÃ¥n %sKunde inte slÃ¥ upp originalkällan för %s: %sKunde inte slÃ¥ upp originalkällan för %s: tiden tog slutKunde inte ange LDAP-nätverkstimeout (%s)Kunde inte ange LDAP-protokollversion (%s)Kunde inte ange LDAP-tidsgräns (%s)Kunde inte göra stat pÃ¥ filen %s: %sKunde inte validera meddelande!Kunde inte hantera certifikatfil: %sKunde inte tolka benchmark-XML: %sKunde inte verifiera tillgänglighet för CRLLand: %sSkapade RSA-nyckel, fortsätter med begäranSkapar en klient till Argus PDP-tjänstSkapar en delegerings-SOAP-klientSkapar en http-klientSkapar en pdp-tjänste-klientSkapar en echo-klientSkapar en EMI-ES-klientSkapar och skickar en begäran att ta bort ett jobb till %sSkapar och sänder en begäran om en jobbinformationsförfrÃ¥gan till %sSkapar och sänder en begäran om en jobblista till %sSkapar och sänder en begäran om jobbavisering till %sSkapar och sänder en begäran att starta om ett jobb till %sSkapar och sänder en begäran att Ã¥teruppta ett jobb till %sSkapar och skickar en begäran att sända in ett jobb till %sSkapar och sänder en begäran att suspendera ett jobb till %sSkapar och skickar en begäran om avisering till %sSkapar och skickar begäranSkapar och sänder en begäran om en tjänsteinformationsförfrÃ¥gan till %sSkapar och sänder en begäran om tjänsteinformation till %sSkapar buffer: %lli x %iSkapar klientgränssnittSkapar klientsidokedjanSkapar delegeringsreferens till ARC delegeringstjänstAtt skapa delegering till CREAM delegeringstjänst misslyckadesSkapar delegering till CREAM delegeringstjänstAtt skapa delegering till CREAM delegeringstjänst misslyckadesSkapar katalog %sSkapar katalog: %sSkapar tjänstesidokedjanReferensens livslängd gÃ¥r ut %sReferenshanteringsundantag: %sReferens har inte initieratsReferenser lagrade i temporär fil %sBehandling av kritiskt VOMS-attribut misslyckadesNuvarande jobb i systemet (PREPARING till FINISHING) per-DN (%i poster)Nuvarande överföring MISSLYCKADES: %sNuvarande överföring slutfördDB_OLD_VERSION: Databasen kan inte öppnas utan att först uppgraderas.DCAU misslyckadesDCAU misslyckades: %sDH-parametrar tillämpadeDN %s matchar inte %sDN %s är cachat och är giltigt till %s för URL %sDN %s är cachat men dess giltighetstid har gÃ¥tt ut för URL %sDN är %sDTR %s avbrötsDTR %s kunde inte avbrytasDTR %s misslyckades: %sDTR %s avslutades framgÃ¥ngsriktDTR %s avslutades med tillstÃ¥nd %sDTR %s begärd att avbrytas men ingen aktiv överföringDTR %s pÃ¥gÃ¥r fortfarande (%lluB överförda)DTR %s har redan avbrutitsDTR-generator processerade: %d jobb att avbryta, %d DTRer, %d nya jobbDTR-generator väntat pÃ¥ att processera: %d jobb att avbryta, %d DTRer, %d nya jobbDTR är redo att överföra, flyttar till leveransköDTR-generator fick begäran att avsluta null-jobbDTR-generator fick frÃ¥ga om null-jobbDTR-generator fick förfrÃ¥gan att kontrollera filer för null-jobbDTR-generator kör inteDTR-generator fick frÃ¥ga om null-jobbDTR-generator fick begäran att ta bort länkar för null-jobbDTR-generator fick begäran att att processera null-jobbDTR-generator fick begäran att ta bort null-jobbDTR-generator blev tillsänd null-jobbDTRer kör fortfarande för jobb %sDemonisering av fork misslyckades: %sDatakanal (hämta) %i %i %iDatakanal (lägra) %i %i %iDatakanal uppkopplad (lista)Datakanal uppkopplad (hämta)Datakanal uppkopplad (lagra)Datakanal: %d.%d.%d.%d:%dDatakanal: [%s]:%dDataleveransloop avslutadesDataöverföring avbrutenDataöverföring avbruten: %sData var redan cachatDataleverans-logg-svans: %sDataleverans: %sDataMove::Transfer: ingen checksumma beräknad för %sDataMove::Transfer: använder tillhandahÃ¥llen checksumma %sDataMove::Transfer: använder tillhandahÃ¥llen checksumma %s:%sDataMove::Transfer: kommer att beräkna %s-checksummaDataMover: nästa cykelDataMover: destinationen har slut pÃ¥ försök - avslutaDataMover: begärt att inte försöka igen - avslutaDataMover: källan har slut pÃ¥ försök - avslutaDataMover::Transfer : startar ny trÃ¥dDataMover::Transfer: försöker förstöra/skriva över destination: %sDataPointGFAL::write_file fick position %d och offset %d, mÃ¥ste göra seekDataPointXrootd::write_file fick position %d och offset %d, mÃ¥ste göra seekDataStagingDelivery avslutades med kod %iDeaktiverar modulerFörvald CPU-tid: %sFörvald INTERNAL klient-konstruktorFörvald lagringstjänst: %sFörvald mäklare (%s) är inte tillgänglig. När %s används mÃ¥ste en mäklare anges explicit (alternativ -b).Förvald klocktid: %sFörval: %sDelegateCredentialsInit misslyckadesDelegateProxy misslyckadesDelegerad referens frÃ¥n delegeringstjänst: %sDelegerad referens-identitet: %sDelegerings-ID: %sDelegeringsauktorisering misslyckadesDelegeringsauktorisering lyckadesDelegering-getProxyReq-begäran misslyckadesDelegeringshanteraren har ej ställts inDelegeringshanteraren med delegeringsmottagarroll slutarDelegeringshanterare med delegeringsmottagarroll börjar behandlaDelegeringshanteraren med delegeringssändarroll börjar behandlaDelegering-putProxy-begäran misslyckadesDelegeringsroll stöds inte: %sDelegeringstjänst: %sDelegering till ARCs delegeringstjänst misslyckadesDelegering till gridsites delegeringstjänst misslyckadesDelegeringstyp stöds inte: %sDelegationStore: PeriodicCheckConsumers misslyckades med att ta bort gammal delegering %s - %sDelegationStore: PeriodicCheckConsumers misslyckades med att Ã¥teruppta iteratorDelegationStore: TouchConsumer misslyckades med att skapa fil %sBorttagningfelBorttagen men har fortfarande platser pÃ¥ %sLeverans mottog ny DTR %s med källa: %s, destination: %sLeveranstjänst pÃ¥ %s kan kopiera frÃ¥n %sLeveranstjänst pÃ¥ %s kan kopiera till %sDestinations-URL saknasDestinations-URL stöds inte: %sDestinations-URL är inte giltig: %sDestinationsfil finns i cacheDestination är inte en giltig URLDestination är inte indextjänst, hoppar över replikaregistreringDestination är inte redo, kommer att vänta %u sekunderDestination: %sFörstör handtagDestruktor med dlclose (%s)Katalog %s tillÃ¥ten pÃ¥ tjänst %sKatalog %s borttagen framgÃ¥ngsriktKatalog %s som ska lagra bokföringsdatabasen har skapats.Kataloglistning MisslyckadesKatalog med betrodda CA har inte angivits eller kan inte hittas; Använder nuvarande katalog som CA-katalogKatalogstorleken är större än %i filer, kommer att behöva anropa flera gÃ¥ngerKatalogstorleken är för stor för att lista i ett anrop, kommer att behöva anropa flera gÃ¥ngerKatalog: %sDisconnect: Avbrytande avbröts pÃ¥ grund av timeout efter %d msDisconnect: Nedstängning avbröts pÃ¥ grund av timeout efter %d msDisconnect: Data-nedstängning avbröts pÃ¥ grund av timeout efter %d msDisconnect: Misslyckades med att avsluta - ignorerar: %sDisconnect: Misslyckades med att stänga ned - ignorerar: %sDisconnect: Misslyckades med att förstöra handtag: %s. Kan inte hantera en sÃ¥dan situation.Disconnect: Misslyckades med att avsluta - ignorerar: %sDisconnect: Avslutande avbröts pÃ¥ grund av timeout efter %d msDisconnect: globus-handtag har fastnatDisconnect: handtag har förstörts.Disconnect: väntar pÃ¥ att globus-handtag ska lugna ned sigSortera med användarskapad python-mäklareStöder ej framtida reserveringStöder ej massinsändningStöder ej preemptionGör en CREAM-begäranGör en EMI-begäranLaddar ner jobb: %sDriftstopp slutar: %sDriftstopp börjar: %sVisning av jobbeskrivning avbruten: kan inte ladda in mäklare %sDuplicerad replika hittad i LFC: %sEACCES-fel vid öppnande av lÃ¥sfil %s: %sECDH-parametrar tillämpadeEEXIST: DB_CREATE och DB_EXCL angavs och databasen existerar.EINVALGenerering av EMI-begäran misslyckades: %sEMIES:CancelActivity: jobb %s - %sEMIES:CreateActivity avslutades framgÃ¥ngsriktEMIES:CreateActivity: maxgräns för totalt antal jobb nÃ¥ddEMIES:CreateActivity: hittade ingen jobbeskrivningEMIES:CreateActivity: begäran = %sEMIES:CreateActivity: svar = %sEMIES:CreateActivity: för mÃ¥nga aktiva beskrivningarEMIES:GetActivityInfo: jobb %s - misslyckades med att hämta GLUE2-informationEMIES:GetActivityStatus: jobb %s - %sEMIES:NotifyService: jobb %s - %sEMIES:PauseActivity: jobb %s - %sEMIES:RestartActivity: jobb %s - %sEMIES:ResumeActivity: jobb %s - %sEMIES:WipeActivity: jobb %s - %sENOENT: Filen eller katalogen existerar inte, eller en icke-existerande re_source-fil angavs.EPSV misslyckadesEPSV misslyckades: %sFel: %sFel: Visning av jobbeskrivning avbröts eftersom inga lämpliga resurser hittades för test-jobbetFel: Misslyckades med att hämta informationFel: Misslyckades med att hämta information frÃ¥n följande ändpunkter:Fel: Misslyckades med att skriva jobbinformation till fil (%s)Fel: Jobbinsändning avbröts eftersom inga resurser returnerade nÃ¥gon informationFel: En eller flera jobbeskrivningar sändes inte in.Fel: Testet avbröts eftersom inga lämpliga resurser hittades för test-jobbetFel: Kunde inte ladda in mäklare %sFel: VOMS-inställningsfil %s innehÃ¥ller för lÃ¥ng(a) rad(er). Max längd som stöds är %i tecken.Fel: VOMS-inställningsfil %s innehÃ¥ller för mÃ¥nga rader. Max antal som stöds är %i.Fel: VOMS-inställningsrad innehÃ¥ller för mÃ¥nga token. Förväntade 5 eller 6. Raden är: %sFel: misslyckades med att läsa fil %s vid skanning av VOMS-inställningar.Fel: filträd är för djupt vid skanning av VOMS-inställningar. Max tillÃ¥ten nestning är %i.ES:CreateActivity: Misslyckades med att skapa nytt jobb: %sEchoService (python) 'Process' anropadEchoService (python) konstruktor anropadEchoService (python) destruktor anropadEchoService (python) fick: %s EchoService (python) har prefix %(prefix)s och suffix %(suffix)sEchoService (python) request_namespace: %sEchoService (python) trÃ¥dtest startarEchoService (python) trÃ¥dtest, iteration %(iteration)s %(status)sElementet "%s" i profilen ignoreras: värdet pÃ¥ "inidefaultvalue"-attributet kan inte anges när "inisections"- and "initag"-attributen inte angivits.Elementet "%s" i profilen ignoreras: värdet pÃ¥ "inisections"-attributet kan inte vara en tom sträng.Elementet "%s" i profilen ignoreras: värdet pÃ¥ "initag"-attributet kan inte vara en tom sträng.Elementet "%s" i profilen ignoreras: värdet pÃ¥ "initype"-attributet kan inte vara en tom sträng.Elementvalidering enligt GLUE2-schema misslyckades: %sTomt filnamn returnerat frÃ¥n FileCacheTom indatanyttolastTom jobbeskrivnings-källsträngTom nyttolast!Tom strängKrypterad: %sHittade inte slutet pÃ¥ kommentarHittade inte slutet pÃ¥ sträng i dubbla citatteckenHittade inte slutet pÃ¥ sträng i enkla citatteckenHittade inte slutet pÃ¥ sträng i användardefinierade citattecken (%s)Ändpunktsinformation:Post i EGIIS saknar ett eller flera av attributen 'Mds-Service-type', 'Mds-Service-hn', 'Mds-Service-port' och/eller 'Mds-Service-Ldap-suffix'Ã…tkomstfel för cachefil %s: %sFel när kommunikationsgränssnitt lades till i %s. Kanske kör redan en annan instans av A-REX.Fel när kommunikationsgränssnitt lades till i %s. Kanske är Ã¥tkomsträttigheter inte lämpliga.Fel vid skapande av cacheFel vid skapande av cache. Gamle lÃ¥s kan finnas kvar.Fel vid skapande av katalog %s: %sFel vid skapandet av lÃ¥sfil %s: %sFel vid skapande av nödvändiga kataloger för %sFel vid skapande av nödvändiga kataloger: %sFel vid skapandet av temporär fil %s: %sFel upptäckt när denna AC tolkadesFel eftersom den tillhandahÃ¥llna referensens livstid har gÃ¥tt utFel under filvalidering. Kan inte göra stat pÃ¥ fil %s: %sFel under filvalidering. Lokal filstorlek %llu stämmer inte överens med källans filstorlek %llu för fil %sFel vid utvärdering av profilFel frÃ¥n BDB: %sFel frÃ¥n BDB: %s: %sFel frÃ¥n SQLite: %sFel frÃ¥n SQLite: %s: %sFel vid hämtning av information frÃ¥n statvfs för sökväg %s: %sFel vid erhÃ¥llande av fillista (i list)Fel vid cacheprocessering, kommer att försöka igen utan cachningFel i cachningsprocedurFel i lÃ¥sfil %s, trots att länkning inte returnerade ett felFel vid initiering av X509-lagerFel vid initiering av delegeringsdatabas i %s. Kanske Ã¥tkomsträttigheter inte är lämpliga. Returnerat fel är: %s.Fel vid länkning av cachefil till %s.Fel vid länkning av temporär fil %s till lÃ¥sfil %s: %sFel vid listning av lÃ¥sfil %s: %sFel vid inladdning av genererade inställningarFel vid uppslagning av attribut för cachemetafil %s: %sFel vid uppslagning av spacetoken som matchar beskrivning %sFelnummer i lager-kontext: %iFel vid öppnande av bokföringsdatabasFel vid öppnande av lÃ¥sfil %s i initial check: %sFel vid öppnande av metafil %sFel vid öppnande av metafil för skrivning: %sFel vid tolkning av det internt tilldelade executables-attributet.Fel vid pingning av leveranstjänst pÃ¥ %s: %s: %sFel vid läsning av information frÃ¥n fil %s:%sFel vid läsning av lÃ¥sfil %s: %sFel vid läsning av metafil %s: %sFel vid registrering av replika, hoppar till slutet av datastagingFel vid borttagande av cachefil %s: %sFel vid byte av UIDFel vid utmatning av utdatanyttolastFel när publik nyckel extraheras frÃ¥n begäranFel vid inladdning av tilläggsinställningsfilen: %sFel vid inladdning av tilläggsinställningsfilen: %s pÃ¥ rad: %dFel vid läsande av katalog %s: %sFel vid läsning av filFel med cacheinställningarFel med cacheinställningar: %sFormatteringsfel i lÃ¥sfil %sFel med hjärtslagsfil: %sFel under destinationens efter-överförings-hantering: %sFel med källfil, hoppar till nästa replikaFel vid skrivning av raw certifikatFel vid skrivning av SRM-infofil %sFel vid skrivning till lÃ¥sfil %s: %sFel: Tjänsten returnerade en gräns högre än eller lika med nuvarande gräns (nuvarande: %d; returnerad: %d)Fel: Kan inte öppna policyfil: %sFel: misslyckades att sätta hanterare för SIGCHLDFel: misslyckades med att sätta hanterare for SIGTERMFel: ingen LDAP-förfrÃ¥gan pÃ¥börjad till %sFel: policy-plats: %s är inte en vanlig filErrorDescriptionFörväntad medelväntetid: %sFörväntad värsta väntetid: %sUtvärderare stöder inte laddningsbara kombinerande algoritmerUtvärderare stöder inte den angivna kombinerande algoritmen - %sUtvärderare för Arc-PDP laddades inteUtvärderare för GACL-PDP laddades inteUtvärderare för XACML-PDP laddades inteExempelinställningar (%s) skapades inte.Undantag under försök att starta extern process: %sFör mycket data mottaget när filÃ¥tkomst kontrolleradesExkluderar replika %s som matchar mönster !%sExekveringstarget pÃ¥ beräkningstjänst: %sExekveringsmiljö stöder inte inkommande förbindelserExekveringsmiljö stöder inte utgÃ¥ende förbindelserExekveringsmiljö är en fysisk maskinExekveringsmiljö är en virtuell maskinExekveringsmiljö stöder inkommande förbindelserExekveringsmiljö stöder utgÃ¥ende förbindelserExecutionTarget-klass är inte ett objektAvslutarAvslutar generator-trÃ¥dAvslutar jobbprocesseringstrÃ¥dSaknar kommando bland argumentenSaknar kommando och URLFörväntade kommando modul-namn bland argumenten<Förväntade kommando modul-sökväg bland argumentenFörväntade att modul, kommando och URL tillhandahÃ¥llitsSaknar URL bland argumentenExtern begäran om uppmärksamhet %sExtraherade smeknamn %s frÃ¥n referenser att använda som Rucio-kontoExtraherare[%s] (%s): %s = %sExtraherare[%s] (%s): %s innehÃ¥ller %sFATAL, ERROR, WARNING, INFO, VERBOSE eller DEBUGFTP-jobbkontroll: Kan inte tolka värd och/eller port i EPSV/PASV-svar: %sFTP-jobbkontroll: Datakanal: %d.%d.%d.%d:%dFTP-jobbkontroll: Datakanal: [%s]:%dFTP-jobbkontroll: Dataförbindelse för skrivning misslyckades: %sFTP-jobbkontroll: Dataförbindelse för skrivning avbröts pÃ¥ grund av timeout efter %d msFTP-jobbkontroll: Dataskrivning misslyckades: %sFTP-jobbkontroll: Dataskrivning avbröts pÃ¥ grund av timeout efter %d msFTP-jobbkontroll: Misslyckades med att sända DCAU-kommandoFTP-jobbkontroll: Misslyckades med att sända EPSV- och PASV-kommandonFTP-jobbkontroll: Misslyckades med att sända STOR-kommando: %sFTP-jobbkontroll: Misslyckades med att sända TYPE-kommandoFTP-jobbkontroll: Misslyckades med att tillämpa lokal adress pÃ¥ dataförbindelse: %sFTP-jobbkontroll: Lokal port misslyckades: %sFTP-jobbkontroll: Lokal typ misslyckades: %sFTP-jobbkontroll: Tolkning av serverns EPSV-svar misslyckades: %sFTP-jobbkontroll: Tolkning av port i serverns EPSV-svar misslyckades: %sFTP-jobbkontroll: Tolkning av serverns PASV-svar misslyckades: %sMisslyckades med att tilldela värdnamnstilläggMisslyckades med att allokera minne för handtagMisslyckades med autentiseringMisslyckades med autentisering: %sMisslyckades med att kontrollera databas (%s)Misslyckades med att kontrollera källreplikaMisslyckades med att kontrollera källreplika %s: %sMisslyckades med att kontrollera källreplikor: %sMisslyckades med att rensa upp destination %sMisslyckades med att initiera inställningarMisslyckades med att initiera inställningar.Misslyckades med att ansluta till %s:%dMisslyckades med att förstöra handtag: %s. Kan inte hantera en sÃ¥dan situation.Misslyckades med att ladda ned %s till %sMisslyckades med att ladda ned %s till %s, destinationen existerar redanMisslyckades med att ladda ned %s till %s, kunde inte ta bort existerande destinationMisslyckades i globus_cond_initMisslyckades i globus_ftp_control_handle_initMisslyckades i globus_mutex_initMisslyckades med att länka cachefil till %sMisslyckades med att lokalisera referenserMisslyckades med att slÃ¥ upp attribut för cachad fil: %sMisslyckades med att förbereda jobbeskrivningMisslyckades med att förbereda jobbeskrivning för target-resurserMisslyckades med att processera A-REX inställningarMisslyckades med att processera auktoriseringsgrupp %sMisslyckades med att behandla användarmappningskommando: %s %sMisslyckades med att läsa inställningarMisslyckades med att läsa kontrollkatalog: %sMisslyckades med att läsa kontrollkatalog: %s: %sMisslyckades med att läsa dataMisslyckades med att läsa fillistaMisslyckades med att läsa lokal informationMisslyckades med att hämta information om jobb: %sMisslyckades med att hämta jobbeskrivning för jobb: %sMisslyckades med att köra e-postsändareMisslyckades med att sända CWD-kommando för att förnya referenserMisslyckades med att skicka CWD-kommando för att avbryta jobbMisslyckades med att sända CWD-kommando för att ta bort jobbMisslyckades med att skicka DELE-kommando för att avbryta jobbMisslyckades med att sända RMD-kommando för att ta bort jobbMisslyckades med ange filägare: %sMisslyckades med att skicka in jobbeskrivningMisslyckades med att avbryta dataförbindelsen - ignorerar och Ã¥terställerMisslyckades med att avbryta överföring av ftp-fil: %sMisslyckades med att acceptera SSL-förbindelseMisslyckades med att acceptera förbindelsebegäranMisslyckades med att acceptera delegeringMisslyckades med att acceptera ny fil/destinationMisslyckades med att komma Ã¥t proxy för givet jobb-id %s pÃ¥ %sMisslyckades med att förvärva A-REX inställningarMisslyckades med att erhÃ¥lla delegeringskontextMisslyckades med att fÃ¥ lÃ¥s pÃ¥ cachemetafil %sMisslyckades med erhÃ¥lla lÃ¥s för fil %sMisslyckades med att erhÃ¥lla källa: %sMisslyckades med att aktivera jobbprocesseringsobjekt, avslutar grid-manager-trÃ¥dMisslyckades med att lägga till '%s' URL (gränssnittstyp %s) till bokföringsdatabasens Endpoints-tabellMisslyckades med att lägga till '%s' till bokföringsdatabasen %s-tabellMisslyckades med att lägga till Independent-OIDMisslyckades med att lägga till RFC-proxy-OIDMisslyckades med att lägga till VOMS-AC-tillägg. Din proxy kan vara ofullständig.Misslyckades med att lägga till VOMS-AC-sekvens-OIDMisslyckades med att lägga till anyLanguage-OIDMisslyckades med att lägga till certifikat och nyckelMisslyckades med att lägga till certifikat till token eller databasMisslyckades med att lägga till tillägg till referenstilläggMisslyckades med att lägga till inheritAll-OIDMisslyckades med att lägga till utfärdarens tillägg till proxynMisslyckades med att lägga till nyckelanvändningstilläggMisslyckades med att lägga till proxycertifikatsinformationstilläggMisslyckades med att lägga till VOMS-AC-tilläggMisslyckades med att allokera certifikat-tillitMisslyckades med att allokera minne för certifikatdataMisslyckades med allokera minne för bufferMisslyckades med att allokera minne för certifikatsubjekt vid policymatchning.Misslyckades med allokera p12-kontextMisslyckades med att tillämpa DH-parametrarMisslyckades med att tillämpa ECDH-parametrarMisslyckades med att tillämpa lokal adress pÃ¥ dataförbindelseMisslyckades med att autentisera SAML-token inuti inkommande SOAPMisslyckades med att autentisera användarnamnstoken inuti inkommande SOAPMisslyckades med att autentisera X509-token inuti inkommande SOAPMisslyckades med att autentisera till PKCS11 slot %sMisslyckades med att autentisera till nyckeldatabasMisslyckades med att autentisera till token %sMisslyckades med att binda socket för %s:%s(%s): %sMisslyckades med att binda socket för TCP-port %s(%s): %sMisslyckades med att binda socket(%s): %sMisslyckades med att binda till ldap-server: %sMisslyckades med att anropa PORT_NewArenaMisslyckades med att avbryta överföringsbegäran: %sMisslyckades med att avbryta: %sMisslyckades med att avbryta: inget SOAP-svarMisslyckades med att konvertera till PayloadSOAP frÃ¥n inkommande nyttolastMisslyckades med att konvertera till PayloadSOAP frÃ¥n utgÃ¥ende nyttolastMisslyckades med att ändra mappningsstack-behandlingspolicy i: %s = %sMisslyckades med att ändra ägare av symbolisk länk %s till %iMisslyckades med att ändra ägare pÃ¥ temporär proxy pÃ¥ %s till %i:%i: %sMisslyckades med att ändra Ã¥tkomsträttigheter pÃ¥ %s: %sMisslyckades med att ändra Ã¥tkomsträttigheter eller ägare för hÃ¥rd länk %s: %sMisslyckades med att kontrollera %sMisslyckades med att ta bort fil %s: %sMisslyckades med att stänga, tar bort klientMisslyckades med att kommunicera med delegeringstjänst.Misslyckades med att slutföra skrivning till destinationMisslyckades med att koppla upp för att förnya referenserMisslyckades med att koppla upp för att avbryta jobbMisslyckades med att koppla upp för att ta bort jobbMisslyckades med att koppla upp mot %s(%s):%iMisslyckades med att koppla upp mot %s(%s):%i - %sMisslyckades med att ansluta till %s:%dMisslyckades med att kontakta PDP-server: %sMisslyckades med att konvertera ASCII till DERMisslyckades med att konvertera EVP_PKEY till PKCS8Misslyckades med att konvertera GSI-referens till GSS-referens (major: %d, minor: %d)Misslyckades med att konvertera GSI-referens till GSS-referens (major: %d, minor: %d):%s:%sMisslyckades med att konvertera PrivateKeyInfo till EVP_PKEYMisslyckades med att konvertera säkerhetsinformation till ARC-policyMisslyckades med att konvertera säkerhetsinformation till ARC-begäranMisslyckades med att konvertera säkerhetsinformation till XACML-begäranMisslyckades med att kopiera %s: %sMisslyckades med att kopiera fil %s till %s: %sMisslyckades med att kopiera indatafil: %s till sökväg: %sMisslyckades med att skapa DTR-dumpningstrÃ¥dMisslyckades med att skapa OTokens säkerhetsattributMisslyckades med att skapa OpenSSL-objekt %s %s - %u %sMisslyckades med att skapa SOAP-behÃ¥llareMisslyckades med att skapa X509-certifikat med NSSMisslyckades med att skapa cachekataloger för %sMisslyckades med att skapa cachekatalog för fil %s: %sMisslyckades med att skapa cachemetafil %sMisslyckades med att skapa certifikatbegäranMisslyckades med att skapa kontrollkatalog %sMisslyckades med att skapa katalogMisslyckades med att skapa katalog %sMisslyckades med att skapa katalog %s! Hoppar över jobb.Misslyckades med att skapa katalog %s: %s"Misslyckades med att skapa export-kontextMisslyckades med att skapa fil %s: %sMisslyckades med att skapa fil i %sMisslyckades med att skapa hÃ¥rd länk frÃ¥n %s till %s: %sMisslyckades med att skapa indata-SOAP-behÃ¥llareMisslyckades med att skapa nyckel- eller certifikat-safeMisslyckades med att skapa trÃ¥d för ldap bind (%s)Misslyckades med att skapa länk: %s. Kommer inte att använda mappad URLMisslyckades med att skapa certifikatkedjelängdMisslyckades med att skapa policy-sprÃ¥kMisslyckades med att skapa sessionskatalog %sMisslyckades med att skapa socket för förbindelse till %s(%s):%d - %sMisslyckades med att skapa socket för att lyssna pÃ¥ %s:%s(%s): %sMisslyckades med att skapa socket för att lyssna pÃ¥ TCP-port %s(%s): %sMisslyckades med att skapa socket(%s): %sMisslyckades med att skapa subjektnamnMisslyckades med att skapa symbolisk länk frÃ¥n %s till %s: %sMisslyckades med att skapa temporär proxy pÃ¥ %s: %sMisslyckades med att skapa trÃ¥dMisslyckades med att skapa xrootd-kopierings-jobb: %sMisslyckades med att skapa/öppna fil %s: %sMisslyckades med att avkoda tillitssträngMisslyckades med att delegera referenser till server - %sMisslyckades med att delegera referenser till server - hittade inget delegeringsgränssnittMisslyckades med att ta bort %sMisslyckades med att ta bort %s, men kommer fortfarande att försöka kopieraMisslyckades med att ta bort certifikatMisslyckades med att ta bort leverans-objekt eller borttagning avbröts pÃ¥ grund av timeoutMisslyckades med att ta bort destination, nytt försök kan misslyckasMisslyckades med att ta bort logisk filMisslyckades med att ta bort metainformationMisslyckades med att ta bort fysisk filMisslyckades med att ta bort privat nyckelMisslyckades med att ta bort privat nyckel och certifikatMisslyckades med att ta bort replika %s: %sMisslyckades med att ta bort gammal cachefil %s: %sMisslyckades med koppla ner efter att ha förnyat referenserMisslyckades med att koppla ner efter att ha avbrutit jobbMisslyckades med koppla ner efter att ha tagit bort jobbMisslyckades med att duplicera X509-strukturMisslyckades med att duplicera tilläggMisslyckades med att aktivera IPv6Misslyckades med att aktivera IPv6: %sMisslyckades med att koda PKCS12Misslyckades med att koda certifikatMisslyckades med att koda certifikatbegäran med DER-formatMisslyckades med att etablera SSL-förbindelseMisslyckades med att etablera förbindelse: %sMisslyckades med att exportera X509-certifikat frÃ¥n NSS-databasMisslyckades med att exportera privat nyckelMisslyckades med att extrahera VOMS-smeknamn frÃ¥n proxyMisslyckades med att extrahera referensinformationMisslyckades med att hämta data frÃ¥n %s bokföringsdatabastabellMisslyckades med att hämta data frÃ¥n bokföringsdatabasens Endpoints-tabellMisslyckades med att slutföra läsning frÃ¥n källaMisslyckades med att slutföra skrivning till destinationMisslyckades med att hitta CA-certifikatMisslyckades med att hitta certifikat och/eller privat nyckel eller filer har olämpliga Ã¥tkomsträttigheter eller ägare.Misslyckades med att hitta certifikat med smeknamn: %sMisslyckades med att hitta tilläggMisslyckades med att hitta utfärdarcertifikat för proxycertifikatMisslyckades med att hitta metadatainformation för %s för att bestämma borttagande av fil eller katalogMisslyckades med att generera EC-nyckelMisslyckades med att generera SAML-token för utgÃ¥ende SOAPMisslyckades med att skapa användarnamnstoken för utgÃ¥ende SOAPMisslyckades med att skapa X509-token för utgÃ¥ende SOAPMisslyckades med att generera X509-begäran med NSSMisslyckades med att generera publik/privat nyckelparMisslyckades med att fÃ¥ DN-information frÃ¥n .local-fil för jobb %sMisslyckades med att erhÃ¥lla TCP-socket-alternativ för förbindelse till %s(%s):%d - timeout kommer inte att fungera - %sMisslyckades med att erhÃ¥lla certifikat frÃ¥n certifikatfilMisslyckades med att erhÃ¥lla referensMisslyckades med att hämta ftp-filMisslyckades med att initiera GFAL2-parameter-handtag: %sMisslyckades med att initiera ny GFAL2-kontext: %sMisslyckades med att fÃ¥ medellast: %sMisslyckades med att erhÃ¥lla privat nyckelMisslyckades med att erhÃ¥lla publik nyckelMisslyckades med att erhÃ¥lla publik nyckel frÃ¥n RSA-objektMisslyckades med att erhÃ¥lla publik nyckel frÃ¥n X509-objektMisslyckades med att identifiera grid-managerns inställningsfilMisslyckades med att importera X509-certifikat till NSS-databasMisslyckades med att importera certifikat frÃ¥n fil: %sMisslyckades med att importera privat nyckelMisslyckades med att importera privat nyckel frÃ¥n fil: %sMisslyckades med att initiera LCASMisslyckades med att initiera LCMAPSMisslyckades med att initiera OpenSSL-biblioteketMisslyckades med att initiera PKCS12-fil: %sMisslyckades med att initiera X509-strukturMisslyckades med att initiera bokföringsdatabasMisslyckades med att initiera tilläggsmedlem för referensMisslyckades med att initiera Pythons huvudtrÃ¥dMisslyckades med att initiera referensinställningarnaMisslyckades med att initiera cacheMisslyckades med att initiera klientförbindelseMisslyckades med att initiera delegeringsreferenserMisslyckades med att sätta in AAR i databasen för jobb %sMisslyckades med att begränsa socket till IPv6 pÃ¥ %s:%s - kan orsaka fel för IPv4 pÃ¥ samma portMisslyckades med att begränsa socket till IPv6 pÃ¥ TCP-port %s - kan orsaka fel för IPv4 pÃ¥ samma portMisslyckades med att begränsa socket till IPv6: %sMisslyckades med att lyssna pÃ¥ %s:%s(%s): %sMisslyckades med att lyssna pÃ¥ TCP-port %s(%s): %sMisslyckades med att lyssna pÃ¥ socket(%s): %sMisslyckades med att ladda in klientinställningarMisslyckades med att hämta tilläggssektion: %sMisslyckades med att ladda in grid-managerns inställningsfilMisslyckades med att ladda in grid-managerns inställningsfil frÃ¥n %sMisslyckades med att ladda in grid-managerns inställningsfilMisslyckades med att ladda in plugin för URL %sMisslyckades med ladda in policyutvärderare för policy för jobb %sMisslyckades med att ladda in privat nyckelMisslyckades med att ladda in tjänsteinställningarMisslyckades med att ladda in tjänsteinställningar frÃ¥n nÃ¥gon förvald inställningsfilMisslyckades med att ladda in tjänsteinställningar frÃ¥n fil %sMisslyckades med att ladda in tjänstesidans MCCerMisslyckades med att lÃ¥sa arccredential-biblioteket i minnetMisslyckades med lÃ¥sa arccrypto-biblioteket i minnetMisslyckades med att lÃ¥sa delegerade referenser: %sMisslyckades med att skapa symbolisk länk %s till %s : %sMisslyckades med att flytta %s till %s: %sMisslyckades med att flytta fil %s till %sMisslyckades med ny arenaMisslyckades med att meddela tjänstenMisslyckades med att erhÃ¥lla OpenSSL-identifierare för %sMisslyckades med att erhÃ¥lla antal överförda byte: %sMisslyckades med att erhÃ¥lla delegeringslÃ¥s för att ta bort föräldralösa lÃ¥sMisslyckades med att erhÃ¥lla information om filMisslyckades med att erhÃ¥lla listning frÃ¥n FTP: %sMisslyckades med att erhÃ¥lla lokal adress för %s:%s - %sMisslyckades med att erhÃ¥lla lokal adress för port %s - %sMisslyckades med att erhÃ¥lla lokal adress: %sMisslyckades med att erhÃ¥lla lÃ¥s pÃ¥ cachefil %sMisslyckades med att erhÃ¥lla egen adress: %sMisslyckades med att erhÃ¥lla resursbeskrivning: %sMisslyckades med att erhÃ¥lla stat frÃ¥n FTP: %sMisslyckades med att erhÃ¥lla jobbets tillstÃ¥ndMisslyckades med att erhÃ¥lla giltig stage-in-URL för indatafilerMisslyckades med att öppna %s för läsning: %sMisslyckades med att öppna %s, försöker skapa föräldrakatalogerMisslyckades med att öppna datakanalMisslyckades med att öppna katalog %s: %sMisslyckades med att öppna fil %sMisslyckades med att öppna fil med DH-parametrar för läsningMisslyckades med att öppna hjärtslagsfil %sMisslyckades med att öppna indata-certifikatfil %sMisslyckades med att öppna loggfil %sMisslyckades med att öppna loggfil: %sMisslyckades med att öppna utdatafil '%s'Misslyckades med att öppna p12-filMisslyckades med att öppna stdio-kanal %dMisslyckades med att öppna stdio-kanal %sMisslyckades med att skriva ut certifikatbegäran i ASCII-formatMisslyckades med att skriva ut certifikatbegäran i DER-formatMisslyckades med att tolka ACIX-svar: %sMisslyckades med att tolka HTTP-huvudMisslyckades med att tolka Rucio-svar: %sMisslyckades med att tolka SAML-token frÃ¥n inkommande SOAPMisslyckades med att tolka användarnamnstoken frÃ¥n inkommande SOAPMisslyckades med att tolka VOMS-kommando: %sMisslyckades med att tolka X509-token frÃ¥n inkommande SOAPMisslyckades med att tolka certifikatbegäran frÃ¥n CSR-fil %sMisslyckades med att tolka kommandoradsalternativMisslyckades med att tolka inställningsfil %sMisslyckades med att tolka fjärradress %sMisslyckades med att tolka begärd VOMS-livstid: %sMisslyckades med att tolka begärt VOMS-serverportnummer: %sMisslyckades med att tolka användarpolicy för jobb %sMisslyckades med att efterregistrera destination: %sMisslyckades med att förregistrera destination: %sMisslyckades med förallokera utrymme for %sMisslyckades med att förbereda destinationMisslyckades med att förbereda destination: %sMisslyckades med att förbereda jobbeskrivningMisslyckades med att förbereda jobbeskrivning för target-resurser.Misslyckades med att förbereda jobbeskrivning.Misslyckades med att förbereda källaMisslyckades med att förbereda källa: %sMisslyckades med att förregistrera destination: %sMisslyckades med att processera AREX-inställningsfil %sMisslyckades med att behandla VOMS-inställningar eller hittade inga lämpliga inställningsrader.Misslyckades med att processera inställningar i %sMisslyckades med att behandla jobb: %sMisslyckades med att behandla jobb: %s - %s %sMisslyckades med att behandla jobb - misslyckades med att tolka svarMisslyckades med att behandla jobb - felaktigt svar: %uMisslyckades med att processera säkerhetsattribut i TLS-MCC för inkommande meddelandeMisslyckades med att frÃ¥ga efter AAR-databas-ID för jobb %sMisslyckades med frÃ¥ga ACIX: %sMisslyckades med att frÃ¥ga om tillstÃ¥nd: %sMisslyckades med att läsa attribut %x frÃ¥n privat nyckel.Misslyckades med att läsa cachemetafil %sMisslyckades med att läsa certifikatfil: %sMisslyckades med att läsa data frÃ¥n indatabufferMisslyckades med att läsa databasschemafil pÃ¥ %sMisslyckades med att läsa fil %sMisslyckades med att läsa fil med DH-parametrarMisslyckades med att läsa indata-certifikatfilMisslyckades med att läsa jobbets ACL för jobb %s frÃ¥n %sMisslyckades med att läsa jobbets lokala beskrivning för jobb %s frÃ¥n %sMisslyckades med att läsa objekt: %s: %sMisslyckades med att läsa privat-nyckelfil: %sMisslyckades med att läsa proxy fil: %sMisslyckades med att läsa begäran frÃ¥n en filMisslyckades med att läsa begäran frÃ¥n en strängMisslyckades med att känna igen egen adresstyp (IPv4 eller IPv6) - %uMisslyckades med att registrera buffrarMisslyckades med att registrera destinationsreplika: %sMisslyckades med att registrera ny fil/destination: %sMisslyckades med att registrera plugin för tillstÃ¥nd %sMisslyckades med att frigöra GSS-referens (major: %d, minor: %d):%s:%sMisslyckades med att frigöra slutförd begäranMisslyckades med att frigöra lÃ¥s pÃ¥ cachefil %sMisslyckades med att frigöra lÃ¥s pÃ¥ fil %sMisslyckades med att ta bort .meta-fil %s: %sMisslyckades med att ta bort alla instanserMisslyckades med att ta bort cache per-jobb-katalog %s: %sMisslyckades med att ta bort existerande hÃ¥rd länk pÃ¥ %s: %sMisslyckades med att ta bort existerande symbolisk länk pÃ¥ %s: %sMisslyckades med att ta bort fil %s: %sMisslyckades med att ta bort instansMisslyckades med att ta bort lÃ¥s pÃ¥ %s. Manuell intervention kan behövasMisslyckades med att ta bort gammal lÃ¥sfil %s: %sMisslyckades med att ta bort temporär proxy %s: %sMisslyckades med byta namn pÃ¥ URLMisslyckades med att förnya proxyMisslyckades med att slÃ¥ upp %sMisslyckades med att slÃ¥ upp %s (%s)Misslyckades med att slÃ¥ upp destination: %sMisslyckades med att slÃ¥ upp källa: %sMisslyckades med att hämta tillämpningsdata frÃ¥n OpenSSLMisslyckades med att hämta länk till TLS-ström. Ytterligare policymatchning hoppas över.Misslyckades med att hämta privat nyckel för utfärdareMisslyckades med att köra Grid-Manager-trÃ¥dMisslyckades med att köra kommando: %sMisslyckades med att köra inställningstolk pÃ¥ %s.Misslyckades med att köra extern pluginMisslyckades med att köra extern plugin: %sMisslyckades med att sända begäran att avbryta: %sMisslyckades med att skicka innehÃ¥ll till bufferMisslyckades med att sätta GFAL2-monitor-callback: %sMisslyckades med att sätta GFAL2-överförings-timeout, använder förval: %sMisslyckades med att sätta INTERNAL ändpunktMisslyckades med att sätta in LFC-replika: %sMisslyckades med att sätta referenser för GridFTP-överföringMisslyckades med att sätta exekverbar bit pÃ¥ fil %sMisslyckades med att sätta exekverbar bit pÃ¥ fil %s: %sMisslyckades med att sätta skriv-över-option i GFAL2: %sMisslyckades med att sätta Ã¥tkomsträttigheter pÃ¥: %sMisslyckades med att sätta signeringsalgoritmMisslyckades med att ange publik nyckel för X509-objekt genom att använda publik nyckel frÃ¥n X509_REQMisslyckades med att sätta upp referensdelegering med %sMisslyckades med att stänga av SSL: %sMisslyckades med att signera kodad certifikatdataMisslyckades med att signera proxyMisslyckades med att signera certifikatbegäranMisslyckades med att signera proxycertifikatetMisslyckades med att staga file(er)Misslyckades med att starta arkiveringtrÃ¥dMisslyckades med att starta cacherensningsskriptMisslyckades med att pÃ¥börja certifikattilläggMisslyckades med att starta datastaging-trÃ¥darMisslyckades med att börja lyssna pÃ¥ nÃ¥gon adress för %s:%sMisslyckades med att börja lyssna pÃ¥ nÃ¥gon adress för %s:%s(IPv%s)Misslyckades med att starta ny DTR för %sMisslyckades med att starta ny trÃ¥d för monitorering av jobbegärningarMisslyckades med att starta ny trÃ¥d: cache kommer ej att rensasMisslyckades med att börja frÃ¥ga ändpunkten pÃ¥ %sMisslyckades med att börja frÃ¥ga ändpunkten pÃ¥ %s (kunde inte skapa under-trÃ¥d)Misslyckades med att pÃ¥börja läsning frÃ¥n källa: %sMisslyckades med att starta trÃ¥d för kommunikationMisslyckades med att starta trÃ¥d för att lyssnaMisslyckades med att starta tidtagar-trÃ¥d: timeout kommer inte att fungeraMisslyckades med att börja överföringsbegäran: %sMisslyckades med att pÃ¥börja skrivning till cacheMisslyckades med att pÃ¥börja skrivning till destination: %sMisslyckades med att göra stat pÃ¥ sessionskatalog %sMisslyckades med att göra stat pÃ¥ källa: %sMisslyckades med att lagra tillämpningsdataMisslyckades med att spara ftp-filMisslyckades med att sända in alla jobb.Misslyckades med att sända in alla jobb: %sMisslyckades med att sända in alla jobb: %s %sMisslyckades med att sända in alla jobb: %u %sMisslyckades med att sända in jobbMisslyckades med att sända in jobbeskrivning: %sMisslyckades med att sända in jobbeskrivning: EMI-ES-fel (%s, %s)Misslyckades med att sända in jobbeskrivning: Oväntat fel (%s)Misslyckades med att byta användar-id till %d/%dMisslyckades med att avsluta LCASMisslyckades med att avsluta LCMAPSMisslyckades med att överföra dataMisslyckades med att lÃ¥sa upp fil %s: %s. Manuell intervention kan behövasMisslyckades med att lÃ¥sa upp fil med lÃ¥s %s: %sMisslyckades med att avregistrera förregistrerad destination %s. Du kan behöva avregistrera den manuelltMisslyckades med att avregistrera förregistrerad destination %s: %s. Du kan behöva avregistrera den manuelltMisslyckades med att avregistrera förregistrerad lfn, du kan behöva avregistrera den manuelltMisslyckades med att avregistrera förregistrerad lfn. Du kan behöva avregistrera det manuelltMisslyckades med att avregistrera förregistrerad lfn. Du kan behöva avregistrera det manuellt: %sMisslyckades med att uppdatera AAR i databasen för jobb %sMisslyckades med att verifiera X509-token inuti inkommande SOAPMisslyckades med att verifiera begäranMisslyckades med att verifiera signaturen under Misslyckades med att verifiera signaturen under Misslyckades med att verifiera det signerade certifikatetMisslyckades med att vänta pÃ¥ att jobb ska tillÃ¥ta stage-inMisslyckades med att skriva lokal informationMisslyckades med att skriva RTE-information för jobb %sMisslyckades med att skriva auktoriserings-token-attribut för jobb %sMisslyckades med att skriva body till utdataströmMisslyckades med att skriva dataöverföringsinformation för jobb %sMisslyckades med att skriva händelseposter för jobb %sMisslyckades med att skriva header till utdataströmMisslyckades med att skriva jobbinformation till databas (%s)Misslyckades med att skriva objekt: %s: %sMisslyckades med att skriva begäran till en filMisslyckades med att skriva begäran till strängMisslyckades med att skriva det signerade EEC-certifikatet till en filMisslyckades med att skriva det signerade proxycertifikatet till en filMisslyckades med att skriva till lokal jobblista %sMisslyckades med att uppdatera klockslag pÃ¥ cachelÃ¥sfil %s för fil %s: %sMisslyckades med att ladda upp fil %s till %s: %sMisslyckades med att ladda upp lokala indatafilerMisslyckades med att ladda upp lokala indatafiler till %sMisslyckades med att avsluta läsning frÃ¥n källaMisslyckades med att avsluta skrivning till destinationMisslyckades under läsning frÃ¥n källaMisslyckades under överföring av dataMisslyckades under väntan pÃ¥ förbindelsebegäranMisslyckades under väntande pÃ¥ uppkoppling till %s(%s):%i - %sMisslyckades under skrivning till destinationMisslyckades med att skriva ACLMisslyckades med att skriva jobbeskrivningMisslyckades med att skriva lokal beskrivningMisslyckades med att skriva statusMisslyckades med att tolka svar frÃ¥n server - en del information kan vara felaktigMisslyckande: %sFeature ej implementeradFetch: svars-body: %sFetch: svarskod: %u %sHämtar resursbeskrivning frÃ¥n %sFilen %s är NEARLINE, kommer att göra begäran att bringa onlineFil %s är redan cachad pÃ¥ %s under en annan URL: %s - denna fil kommer ej att cachasFil %s är redan cachad pÃ¥ %s under en annan URL: %s - kommer ej att lägga till DN till cachad listaFil %s är cachad (%s) - kontrollerar Ã¥tkomsträttigheterFil %s borttagen framgÃ¥ngsriktFilen '%s' i 'executables'-attributet finns inte i 'inputfiles'-attributetFilen finns redan: %sFil kunde inte flyttas till tillstÃ¥nd DoneFil kunde inte flyttas till tillstÃ¥nd Running: %sBorttagande av fil misslyckades, försöker med borttagande av katalogBorttagning av fil misslyckades, försöker med borttagande av katalog för %sFilnedladdning misslyckades: %sFil kan cachas, kommer att kontrollera cacheFil hÃ¥ller pÃ¥ att cachas, kommer att vänta %i sFilen kan inte kommas Ã¥t %s: %sFilen kan inte kommas Ã¥t: %sFilen kan inte cachas, hoppar över cacheprocesseringFil kan inte cachas, begärdes att inte cachas eller inget cache tillgängligt, hoppar över cachekontrollFil är klar! TURL är %sFil är mindre än %llu bytes, kommer att använda lokal leveransFiltyp är inte tillgänglig, försöker med borttagande av filFileNode: operator= (%s <- %s) %lu <- %luFilePlugin: mer ladda ut än ladda inFilnamn returnerades inte i Rucio-svar: %sFiler associerade med begäran-token %s avbröts framgÃ¥ngsriktFiler associerade med begäran-token %s sparades framgÃ¥ngsriktFiler associerade med begäran-token %s frigjordes framgÃ¥ngsriktKopiering av filuppsättning till ett enstaka objekt stöds ej ännuFiluppsättningsregistrering stöds inte ännuHittar existerande destinationsreplikorFinishWriting: letar efter metadata: %sFinishWriting: erhÃ¥llen checksumma: %sAvslutades framgÃ¥ngsriktFörsta steget av registrering till indextjänst misslyckadesFörsta värdet i 'inputfiles'-attributet (filnamn) kan inte vara tomtFörsta värdet i 'inputfiles'-attributet (filnamn) kan inte vara tomtFör registrering mÃ¥ste källan vara en vanlig URL och destinationen en indexeringstjänstFör det första test-jobbet mÃ¥ste du ocksÃ¥ ange en körtid med alternativet -r (--runtime).TvÃ¥ngskontrollerar källa för cachad fil %sFramtvingar Ã¥ternedladdning av fil %sFork misslyckades: %sHittade %s %s (den hade redan laddats in)Hittade %s i cacheHittade DTR %s för fil %s kvarlämnad i överförande tillstÃ¥nd frÃ¥n tidigare körningHittade VOMS-AC-attribut: %sHittade ett register, kommer att frÃ¥ga det rekursivt: %sHittade existerande token för %s i Rucios token-cache vars giltighetstid gÃ¥r ut %sHittade ingen eller mer än en URL (%s) i ACIX-URL: %sHittade tjänsteändpunkt %s (typ %s)Hittade STARTED eller SUCCESSFUL ändpunkt (%s)Hittade suspenderad ändpunkt (%s)Hittade följande jobb:Hittade följande nya jobb:Hittade oväntad tom lÃ¥sfil %s. MÃ¥ste gÃ¥ tillbaka till acquire()Hittade oavslutade DTR-överföringar. Det är möjligt att en tidigare A-REX-process inte stängde ned pÃ¥ normal sättLediga slottar grupperade enligt tidsgräns (gräns: lediga slottar):Lediga slottar: %iHela strängen användes inte: %sFunktion : %sGACL-auktoriseringsbegäran: %sGET: id %s sökväg %sGenerera ny X509-begäranGenererad EMI-ES-target: %sGenererar %s-jobbeskrivningGenererar EMI-ES-destinationerGenererar ceID-prefix from värdnamn automatisktGenerator startadGeneriskt felHämta delegerad referens frÃ¥n delegeringstjänst: %sHämta frÃ¥n cache: Cachad fil är lÃ¥stHämta frÃ¥n cache: Fel med cacheinställningarHämta frÃ¥n cache: Fil inte i cacheHämta frÃ¥n cache: Ogiltig URL: %sHämta frÃ¥n cache: Söker i cache efter %sHämta frÃ¥n cache: kunde inte komma Ã¥t cachad fil: %sBegäran att hämta %s är fortfarande i kö, ska vänta %i sekunderGet: det finns inget jobb %s - %sHämtar nuvarande klockslag för BLAH-tolk-logg: %sHämtar delegeringsreferens frÃ¥n ARC delegeringstjänstGlobusförbindelsefelGlobusfel: %sGlobus-handtag har fastnatGlobus platsvariabelsubstitution stöds inte längre. Ange sökväg direkt.Grididentitet mappas till lokal identitet '%s'HEAD: id %s sökväg %sHER: %sHTTP-fel: %d %sHTTP-fel %u - %sHTTP med SAML2SSO-anrop misslyckadesHTTP:PUT %s: put fil %s: %sHandtag är i felaktigt tillstÃ¥nd %u/%uHar %i begärningar att processeraHar förbindelses: %i, max: %iHead: det finns inget jobb %s - %sHälsotillstÃ¥ndsinfo: %sHälsotillstÃ¥nd: %sHealthState för ExecutionTarget (%s) är inte OK (%s)Hjälpalternativ:Hjälpprocesstart misslyckades: %sHjälpprogram saknasHomogen resursID innehÃ¥ller förbjudet teckenID: %sINI-inställningsfil %s existerar inteINTERNALClient är inte initieradId= %s,Typ= %s,Utfärdare= %s,Värde= %sIdP returnerar ett felmeddelande: %sIdentitet är: %sIdentitetsnamn: %sIdentitet: %sOm proxy eller certifikat/nyckel existerar, kan du ange deras platser manuellt via miljövariablerna '%s'/'%s' eller '%s', eller attributen '%s'/'%s' eller '%s' i klientinställningsfilen (t.ex. '%s')Om du anger en policy mÃ¥ste du ocksÃ¥ ange ett policysprÃ¥kIgnorerar ändpunkt (%s), den är redan registrerad i insamlare.Ignorerar jobb (%s), redan försökt och kunde inte ladda in JobControllerPluginIgnorerar jobb (%s), jobbhanterings-URL är okändIgnorerar jobb (%s), jobbstatus-URL är okändIgnorerar jobb (%s), hanteringsgränssnittsnamnet är okäntIgnorerar jobb (%s), statusgränssnittsnamnet är okäntIgnorerar jobb (%s), kunde inte ladda in JobControllerPlugin gör %sIgnorerar jobb, jobb-id är tomtOgiltig URL - avslutande ] för IPv6-adress följs av ogiltigt token: %sOgiltig URL - hittade ingen avslutande ] för IPv6-adress: %sOgiltig URL - inget värdnamn angivet: %sOgiltig URL - sökväg mÃ¥ste vara absolut eller tom: %sOgiltig URL - sökväg mÃ¥ste vara absolut: %sOgiltigt jobb-id angivet (%s)Ogiltigt tidsformat: %sOmedelbart slutförande förväntasOmedelbart slutförande förväntas: %sOmedelbart färdigställande: %sImplementeringsnamn: %sImplementerare: %sOgiltig debugnivÃ¥ '%s'I den tillgängliga CRLen är lastUpdate-fältet inte giltigtI den tillgängliga CRLen är nextUpdate-fältet inte giltigtIn inställningsprofilen har 'initype'-attributet pÃ¥ "%s"-elementet ett ogiltigt värde "%s".Inkommande meddelande är inte SOAPInkompatibla alternativ --nolist och --forcelist har begärtsInkonsistent metadataOberoende proxy - inga rättigheter beviljadeInformationsändpunktInformationspunkt '%s' är okändInformationsdokument är tomtInit misslyckadesInitierade %ue Python-tjänstenInitierar delegeringsprocessIndata är inte SOAPIndata är utan trailer Mata in begäran frÃ¥n en fil: Request.xmlMata in begäran frÃ¥n kodIndata: metadata: %sInstallerade programmiljöer:Gränssnitt (%s) angivet, sänder endast in till detta gränssnittGränssnittstillägg:Gränssnitt pÃ¥ ändpunkt (%s) %s.Gränssnittsversioner:Gränssnitt: %sGränssnittsnamnIntern överföringsmetod stöds inte för %sOgiltig DTROgiltig DTR för källa %s, destination %sOgiltig effektOgiltigt HTTP-objekt kan inte producera resultatOgiltig ID: %sOgiltigt ISO-tidsperiodsformat: %sOgiltig jobbeskrivning:Ogiltig jobbeskrivning: %sOgiltig URL '%s' för indatafil '%s'Ogiltig URL '%s' för utdatafil '%s'Ogiltig URL-alternativ-syntax i alternativ '%s' för indatafil '%s'Ogiltig URL-alternativ-syntax i alternativ '%s' för utdatafil '%s'Ogiltigt URL-alternativ: %sOgiltig URL: %sOgiltig URL: '%s' i indatafil '%s'Ogiltig URL: '%s' i utdatafil '%s'Ogiltigt action-värde %sOgiltigt klassnamnOgiltigt klassnamn. broker-argumentet för Python-mäklaren ska vara Filnamn.Klass.args (args är valfritt), till exempel SampleBroker.MyBrokerOgiltig jämförelseoperator '%s' använd i 'delegationid'-attributet, endast "=" är tillÃ¥ten.Ogiltig jämförelseoperator '%s' använd i 'queue'-attributet i 'GRIDMANAGER'-dialekt, endast "=" är tillÃ¥tetOgiltig jämförelseoperator '%s' använd i 'queue'-attributet, endast "!=" eller "=" är tillÃ¥tna.Ogiltiga inställningar - ingen tillÃ¥ten IP-adress angivenOgiltiga inställningar - inga överföringskataloger angivnaOgiltiga referenser, kontrollera proxy och/eller CA-certifikatOgiltig destinations-URL %sOgiltig nedladdningsdestinationssökväg angiven (%s)Ogiltig jobbeskrivningOgiltigt lÃ¥s pÃ¥ fil %sOgiltig logg-nivÃ¥. Använder förval %s.Ogiltigt nodeaccess-värde: %sOgiltig gammal logg-nivÃ¥. Använder förval %s.Ogiltig periodsträng: %sOgiltigt portnummer i %sOgiltig stage-out-sökväg angiven (%s)Ogiltig URL: %sUtfärdar-CA: %sUtfärdare: %sDet är omöjligt att blanda ARC6-target-valalternativ med legacy-alternativ. Alla legacy-alternativ kommer att ignoreras!Det gÃ¥r inte att Ã¥terinsända jobb utan en ny targetinformationssökningJWSE::ExtractPublicKey: extern jwk-nyckelJWSE::ExtractPublicKey: hämtar jwl-nyckel frÃ¥n %sJWSE::ExtractPublicKey: jwk-nyckelJWSE::ExtractPublicKey: nyckeltolkningsfelJWSE::ExtractPublicKey: inte en nyckel som stödsJWSE::ExtractPublicKey: x5c-nyckelJWSE::Input: JWE: stöds inte änJWSE::Input: JWS innehÃ¥ll: %sJWSE::Input: JWS: signeringsalgoritm: %sJWSE::Input: JWS: signaturverifiering misslyckadesJWSE::Input: JWS: token för gammaltJWSE::Input: JWS: token för ungtJWSE::Input: header: %sJWSE::Input: token: %sJobb %s rapporterar inte ett tillstÃ¥nd varifrÃ¥n det kan Ã¥terupptasJobb %s misslyckades med att förnya delegering %s - %s.Jobb %s misslyckades med att förnya delegering %s.Jobb %s har ingen associerad delegering. Kan inte förnya sÃ¥dana jobb.Hittade inte jobb %sJobb %s: nÃ¥gra nerladdningar misslyckadesJobb %s: alla filer nerladdade framgÃ¥ngsriktJobb %s: filer laddas fortfarande nerJobb-id-alternativ är obligatorisktAvbrytande av jobb lyckadesBorttagande av jobb lyckadesJobbdatabasförbindelse etablerad framgÃ¥ngsrikt (%s)Jobbdatabas skapades framgÃ¥ngsrikt (%s)Jobb borttaget: %sJobbeskrivningsfil kunde inte läsas.JobbeskrivningssprÃ¥k är inte angivet, kan inte skriva ut beskrivning.JobbeskrivningssprÃ¥k som stöds av %s:Jobbeskrivning som skall sändas till: %sJobbeskrivning som skall sändas: %sJobbeskrivning: %sJobbeskrivningar:Jobb avslutades inte framgÃ¥ngsrikt. Meddelande kommer inte att skrivas till BLAH-logg.Jobbnedladdningskatalog frÃ¥n användarinställningsfil: %sJobbnedladdningskatalog kommer att skapas i nuvarande arbetskatalog.Jobbnedladdningskatalog: %sJobb misslyckades pÃ¥ tjänstesidanJobb har inte startat än: %sHittade inte jobbinformation i informationssystemet: %sJobblistfil (%s) existerar inteJobblistfil (%s) är inte en vanlig filJobblistfil kan inte skapas: %s är inte en katalogJobblistfil kan inte skapas: Föräldrakatalogen (%s) existerar inte.Jobbmigrering avbröts, ingen resurs returnerade nÃ¥gon informationJobbmigrering avbröts, kunde inte ladda in mäklare (%s)Jobbmigrering misslyckades för jobb (%s), inga tillämpbara targetJobb nr.Jobbplugin har inte installeratsJobbÃ¥terinsändning avbröts eftersom ingen resurs returnerade nÃ¥gon informationJobbÃ¥terinsändning misslyckades: Kunde inte ladda in mäklare (%s)JobbÃ¥terinsändningssammanfattning:Jobbet Ã¥terupptogs framgÃ¥ngsriktJobbinsändningssammanfattning:Jobbinsändningsanvändare kan inte vara rootJobbinsändningsanvändare %s (%i:%i)Jobb insänt med jobb-id: %sJobbets klockslag tolkades framgÃ¥ngsrikt som %sJobb: %sJobb: %s : Begäran att avbryta satt och meddelad till tjänstenJobb: %s : Begäran att avbryta satt men misslyckades att meddela tjänstenJobb: %s : Begäran om borttagning satt och meddelad till tjänstenJobb: %s : Begäran om borttagning satt men misslyckades att meddela tjänstenJobb: %s : Fel : Misslyckades med att sätta avbrytsmarkeringJobb: %s : Fel : Misslyckades med att sätta borttagningsmarkeringJobb: %s : Fel : Ingen lokal information.Jobb: %s : Fel : Okänt tillstÃ¥ndJobControllerPlugin %s kunde inte skapasHittade inte JobControllerPlugin-plugin "%s".JobDescription-klass är inte ett objektJobDescriptionParserPlugin %s kunde inte skapasHittade inte JobDescriptionParserPlugin-plugin "%s".Kvarvarande jobb att frÃ¥ga: %dJobb som saknar information kommer inte att tas bort!Jobb behandlade: %d, borttagna; %dJobb behandlade: %d, förnyade %dJobb behandlade: %d, Ã¥terupptagna: %dJobb behandlade: %d, framgÃ¥ngsrikt avbrutna %dJobb behandlade: %d, framgÃ¥ngsrikt avbrutna %d, framgÃ¥ngsrikt borttagna %dJobb behandlade: %d, framgÃ¥ngsrikt hämtade: %dJobb behandlade: %d, framgÃ¥ngsrikt hämtade: %d, framgÃ¥ngsrikt borttagna: %dSkräp i slutet pÃ¥ RSLSkräp i sessiondir-kommandoAvbrytande misslyckadesDödar med signal: Dödar förbindelse pÃ¥ grund av timeoutLCMAPS returnerade ingen GIDLCMAPS returnerade ingen UIDLCMAPS har getCredentialDataLCMAPS har lcmaps_runLCMAPS returnerade UID som saknar användarnamn: %uLCMAPS returnerade ogiltig GID: %uLCMAPS returnerade ogiltig UID: %uLDAP-förbindelse är redan öppen till %sLDAP-förfrÃ¥gan avbröts pÃ¥ grund av timeout: %sLDAPQuery: ErhÃ¥ller resultat frÃ¥n %sLDAPQuery: Initierar förbindelse till %s:%dLDAPQuery: FrÃ¥gar %sLIST/MLST misslyckadesLIST/MLST misslyckades: %sSprÃ¥k (%s) känns inte igen av nÃ¥gon jobbeskrivningstolk.Sista steget av registrering till indextjänst misslyckadesLatitud: %fLdap bind timeout (%s)Vänster operand för RSL-konkatenering utvärderas inte till en strängLegacy-alternativ för att definiera targetLegacyMap: inga inställningsblock definieradeLegacyPDP: ARC Legacy Sec Attribute känns inte igen.LegacyPDP: Det finns inga %s-säkerhetsattribut definierade. ARC Legacy Sec Handler troligen inte inställd eller har misslyckats.LegacySecHandler: inställningsfil inte angivenBibliotek: %sRad %d.%d i attributen returnerade: %sLänkar MCC %s(%s) till MCC (%s) under %sLänkar MCC %s(%s) till Plexer (%s) under %sLänkar MCC %s(%s) till tjänst (%s) under %sLänkar Plexer %s till MCC (%s) under %sLänkar Plexer %s till Plexer (%s) under %sLänkar Plexer %s till tjänst (%s) under %sLänkar lokal filLänkar mappad filLänkar/kopierar cachad filLänkar/kopierar cachad fil till %sListning misslyckadesListfunktionalitet stöds inte av RESTful-VOMS-gränssnittetListfunktionalitet stöds inte av legacy-VOMS-gränssnittetList kommer att göra stat pÃ¥ URL %sListFiles: letar efter metadata: %sLyssnande misslyckadesLyssnande avslutadBörjade lyssnaLyssnar pÃ¥ %s:%s(%s)Lyssnar pÃ¥ TCP-port %s(%s)Listning av jobb lyckades, hittade %d jobbListning av lokala jobb lyckades, hittade %d lokala jobbInladdningsbar modul %s innehÃ¥ller inte begärd plugin %s av typen %sLaddade in %sLaddade in %s %sLaddade in JobControllerPlugin %sLaddar in JobDescriptionParserPlugin %sLaddade in MCC %s(%s)Laddade in Plexer %sLaddade in tjänst %s(%s)Laddade in SubmitterPlugin %sLaddar %ue Python-tjänstenInladdning av OToken misslyckades - ignorerar dess närvaroLaddar in Python-mäklare (%i)Laddar in inställningar (%s)Lokal grupp %s existerar inteAntal lokala körande jobb: %iAntal lokala suspenderade jobb: %iLokal användare %s existerar inteAntal lokala väntade jobb: %iLocation URI för fil %s är ogiltigPlats existerar redanPlatser saknas i destinations-LFC-URLLÃ¥s %s ägs av en annan värd (%s)LÃ¥sfil %s existerar inteLongitud: %fLetar efter nuvarande jobbSlÃ¥r upp URL: %sSlÃ¥r upp källreplikorMCC %s(%s) - nästa %s(%s) saknar targetMIME är inte lämplig för SOAP: %sMSLD stöds inte - försöker med NLSTMLST stöds inte - försöker med LISTPythons huvudtrÃ¥d är inte initieradPythons huvudtrÃ¥d initierades inteHuvudminnesstorlek: %iHittade felaktig ARCHERY-post (ändpunktstyp är inte definierad): %sHittade felaktig ARCHERY-post (ändpunkts-URL är inte definierad): %sFelaktigt VOMS-AC-attribut %sMappningsfil pÃ¥ %s kan inte öppnas.Mappad till lokalt grupp-id: %iMappad till lokalt gruppnamn: %sMappad till lokalt id: %iMappad till körande användare: %sMappad användares hem: %sHittade inte mappad användare:grupp (%s:%s)Mappar %s till %sMappningspolicyalternativ har tomt värdeMappningspolicy:Mappar till kö: %sMatcha utfärdare: %sMatcha vo: %sMatchad ingentingMatchad: %s %s %sMatchad: %s %s %s %sMatchmaking, %s (%d) är %s än %s (%d) publicerat av ExecutionTarget.Matchmaking, Benchmark %s är inte publicerat av ExecutionTarget.Matchmaking, CacheTotal-problem, ExecutionTarget: %d MB (CacheTotal); JobDescription: %d MB (CacheDiskSpace)Matchmaking, Beräkningsändpunktsvillkor inte uppfyllt. ExecutionTarget: %sMatchmaking, ConnectivityIn-problem, ExecutionTarget: %s (ConnectivityIn) JobDescription: %s (InBound)Matchmaking, ConnectivityOut-problem, ExecutionTarget: %s (ConnectivityOut) JobDescription: %s (OutBound)Matchmaking, ExecutionTarget: %s, OperatingSystem är inte definieratMatchmaking, ExecutionTarget: %s, CacheTotal är inte definieratMatchmaking, ExecutionTarget: %s, HealthState är inte definieratMatchmaking, ExecutionTarget: %s, ImplementationName är inte definieratMatchmaking, ExecutionTarget: %s, MaxDiskSpace och WorkingAreaFree är inte definieradeMatchmaking, ExecutionTarget: %s, varken MaxTotalCPUTime eller MaxCPUTime är definierad, antar ingen CPU-tidsgränsMatchmaking, ExecutionTarget: %s, MinCPUTime inte definierat, antar ingen CPU-tidsgränsMatchmaking, ExecutionTarget: %s, NetworkInfo är inte definieradMatchmaking, ExecutionTarget: %s, Platform är inte definieradMatchmaking, ExecutionTarget: %s, RunTimeEnvironment-villkor är inte uppfylltMatchmaking, ExecutionTarget: %s, TotalSlots och MaxSlotsPerJob är inte definieradeMatchmaking, ExecutionTarget: %s, WorkingAreaLifeTime är inte definieradMatchmaking, ExecutionTarget: %s matchar jobbeskrivningMatchmaking, ExecutionTarget: %s, ApplicationEnvironments är inte definieradeMatchmaking, ExecutionTarget: %s, MaxMainMemory och MainMemorySize är inte definieradeMatchmaking, ExecutionTarget: %s, MaxVirtualMemory är inte definieratMatchmaking, ExecutionTarget: %s, OperatingSystem-villkor är inte uppfylltMatchmaking, MainMemorySize-problem, ExecutionTarget: %d (MainMemorySize), JobDescription: %d (IndividualPhysicalMemory)Matchmaking, MaxCPUTime-problem, ExecutionTarget: %d (MaxCPUTime), JobDescription: %d (TotalCPUTime/NumberOfSlots)Matchmaking, MaxDiskSpace-problem, ExecutionTarget: %d MB (MaxDiskSpace); JobDescription: %d MB (DiskSpace)Matchmaking, MaxDiskSpace-problem, ExecutionTarget: %d MB (MaxDiskSpace); JobDescription: %d MB (SessionDiskSpace)Matchmaking, MaxMainMemory-problem, ExecutionTarget: %d (MaxMainMemory), JobDescription: %d (IndividualPhysicalMemory)Matchmaking, MaxSlotsPerJob-problem, ExecutionTarget: %d (MaxSlotsPerJob) JobDescription: %d (NumberOfProcesses)Matchmaking, MaxTotalCPUTime-problem, ExecutionTarget: %d (MaxTotalCPUTime), JobDescription: %d (TotalCPUTime)Matchmaking, MaxVirtualMemory-problem, ExecutionTarget: %d (MaxVirtualMemory), JobDescription: %d (IndividualVirtualMemory)Matchmaking, MinCPUTime-problem, ExecutionTarget: %d (MinCPUTime), JobDescription: %d (TotalCPUTime/NumberOfSlots)Matchmaking, NetworkInfo-begäran inte uppfylld, ExecutionTarget stöder inte %s, angiven i jobbeskrivning.Matchmaking, Platform-problem, ExecutionTarget: %s (Platform) JobDescription: %s (Platform)Matchmaking, %s skalad %s (%d) är %s än %s (%d) publicerat av ExecutionTarget.Matchmaking, TotalSlots-problem, ExecutionTarget: %d (TotalSlots) JobDescription: %d (NumberOfProcesses)Matchmaking, WorkingAreaFree-problem, ExecutionTarget: %d MB (WorkingAreaFree); JobDescription: %d MB (DiskSpace)Matchmaking, WorkingAreaFree-problem, ExecutionTarget: %d MB (WorkingAreaFree); JobDescription: %d MB (SessionDiskSpace)Matchmaking, WorkingAreaLifeTime-problem, ExecutionTarget: %s (WorkingAreaLifeTime) JobDescription: %s (SessionLifeTime)Största CPU-tid: %sStörsta diskutrymme: %iStörsta minne: %iStörsta antal pre-LRMS-väntade jobb: %iStörsta antal körande jobb: %iStörsta antal slottar per jobb: %iStörsta antal stage-in-strömmar: %iStörsta antal stage-out-strömmar: %iStörsta totalt antal jobb: %iStörsta totala klocktid: %sStörsta antal körande jobb för användaren: %iStörsta virtuella minne: %iStörsta antal väntade jobb: %iStörsta klocktid: %sMaximalt antal trÃ¥dar kör - ställer ny begäran i köMinnesallokeringsfelMeddelande-klass är inte ett objektMeddelande skickat till VOMS-server %s är: %sMetainformation för källa och plats stämmer inte överens för %sMetadata för replika och indextjänst skiljer sig Ã¥tKällans och destinationens metadata är olikaKällans metadata stämmer inte överens med existerande destination. Använd alternativet --force för att överstyra detta.Minsta CPU-tid: %sMinsta klocktid: %sSaknat CA-subjekt i Globus signeringspolicySaknat CertificatePath-element eller ProxyPath-element, eller saknasVärd saknas i Connect-elementPort saknas i Connect-elementPort saknas i Listen-elementVO saknas i inställningarSaknat argumentPublik saknas i inställningarSaknad autentiseringsinformationSaknar authgroup-namn i allowaccessSaknar authgroup-namn i denyaccessSaknar cancel-%s-job - avbrytande av jobb kanske inte fungerarFörmÃ¥ga saknas i inställningarSaknade villkorssubjekt i Globus signeringspolicySaknar data i DER-kodat PROXY_CERT_INFO_EXTENSION-tilläggSaknar katalog i controldir-kommandoSaknat filnamn i [arex/jura] loggfilSaknat sista svar: %sGrupp saknas i inställningarSaknad information i svar: %sUtfärdare saknas i inställningarNamn pÃ¥ LCAS-biblioteket saknasNamn pÃ¥ LCMAPS-bibliotek saknasSaknar nummer i maxjobsSaknat alternativ för kommandot logreopenCertificatePath-element saknas eller är tomtSaknat eller tomt CertificatePath- eller CACertificatesDir-elementCertificatePath- eller CACertificatesDir-element saknas eller är tomt; kommer endast att kontrollera signatur, kommer ej att göra meddelandeautentiseringKeyPath-element saknas eller är tomtSaknat eller tomt KeyPath-element, eller saknasPasswordSource-element saknas eller är tomtUsername-element saknas eller är tomtSökväg till referensfil saknasSaknar referens till fabrik och/eller modul. För närvarande stöds inte säker utladdning av LDAP-DMC. Rapportera till utvecklare.Saknar referens till fabrik och/eller modul. Det är osäkert att använda Globus i icke-persistent mode - (Grid)FTP-koden är deaktiverad. Rapportera till utvecklare.Saknar referens till fabrik och/eller modul. Det är osäkert att använda Globus i icke-persistent mode - insändnings-plugin för GRIDFTPJOB är deaktiverad. Rapportera till utvecklare.Saknar referens till fabrik och/eller modul. Det är osäkert att använda Xrootd i icke-persistent mode - Xrootd-koden är deaktiverad. Rapportera till utvecklare.Saknat svar frÃ¥n delegeringstjänst.Roll saknas i inställningarSaknar scan-%s-job - kan missa när jobb har slutat exekveraSchema saknas! Hoppar över validering...Scope saknas i inställningarSäkerhetsobjekt saknas i meddelandeSubjekt saknas i inställningarSubjektnamn saknasSaknar submit-%s-job - insändning av jobb till LRMS kanske inte fungerarModul %s innehÃ¥ller ingen plugin %sModul %s innehÃ¥ller inte begärd plugin %s av typen %sModul %s innehÃ¥ller inte en plugin(er) av angiven typModul %s kunde inte laddas in igen (%s)Modulen %s är inte en ARC-plugin (%s)Modulhanterare initModulhanterare init av ModuleManager::setCfgModulnamn: %sMonteringspunkt: %sHoppar till slutet av datastagingFlerjobbsoperator endast tillÃ¥ten pÃ¥ toppnivÃ¥nMer än ett %s-attribut i inställningsfil (%s)MyProxy-fel: %sMyproxy-server returnerade inte proxy med VOMS AC inkluderatNYTT: put nytt jobb: gränsen för max totalt antal jobb nÃ¥ddNYTT: put nytt jobb: det finns inge nyttolastNLST/MLSD misslyckadesNLST/MLSD misslyckades: %sNSS-databas som kommer att användas: %s NSS-initiering misslyckades pÃ¥ certifikatdatabas: %sNULL BIO skickad till InquireRequestNULL-callback för %sNamn pÃ¥ grami-filNamn: %sNegativa rättigheter stöds inte i Globus signeringspolicyVarken källa eller destination är indextjänster, kommer att hoppa över uppslagning av replikorVarken källa eller destination stagades, hoppar över frigörande av begärningarNätverksinformation:Ny förbindelseNy ändpunkt skapas (%s) frÃ¥n den med ej angivet gränssnitt (%s)Nytt jobb accepterat med id %sNy gräns för vektorförfrÃ¥gningar returnerad av EMI-ES-tjänst: %dLivstid för ny proxy gÃ¥r ut %sDen nya proxyns livslängd är inte längre än den gamla proxyns, förnyar inte proxyHittade ingen A-REX-inställningsfil i candypond-inställningarnaInget attribut existerar som kan hantera typen: %sInget Connect-element angivetHittade inget FQAN. Använde None som användar-FQAN-värdeInget HTTP-svar frÃ¥n VOMS-serverInget LRMS satt i inställningarnaHittade inget RSL-innehÃ¥ll i jobbeskrivningInget SOAP-svarInget SOAP-svar frÃ¥n leveranstjänst %sInget SOAP-svar frÃ¥n leveranstjänstIngen aktiv DTR %sInget aktivt jobb-id: %sInga argument har tilldelats extern processInget auktoriseringssvar returneradesIngen cachekatalog angivenInga cachekataloger hittade/konfigurerade för beräkning av fritt utrymme.Inga cacher definierade i inställningarIngen callback för %s definieradIngen information om checksumma frÃ¥n serverIngen information om checksumma möjligIngen information om checksumma returnerad i Rucio-svar för %sIngen verifiering av checksumma möjligIngen inställningfil kunde laddas in.Ingen kontrollkatalog satt i inställningarnaInga kontroll- eller sessionskataloger definierade i inställningarInga referenser tillhandahÃ¥llnaInga delegerade referenser skickades medInga delegeringspolicyer i denna kontext och meddelande - passerar igenomInget delegeringstoken i begäranIngen leveransändpunkt tillgänglig, kommer försöka senareIngen destination definieradIngen draining-cachekatalog angivenInget felInga filer att hämta för jobb %sIngen information om filstorlek returnerad i Rucio-svar för %sIngen grupp %i för mappad användareInget jobb-id tillhandahÃ¥lletIngen jobbeskrivningsfil tillhandahölls.Ingen jobbeskrivning angivenIngen jobbeskrivningstolk kunde tolka jobbeskrivningInga jobbeskrivningstolkar tillgängligInga jobbeskrivningstolkar lämpliga att hantera '%s'-sprÃ¥ket är tillgängligaIngen jobbeskrivning resulterade vid %d testInga jobbInga jobb hittades, försök senareInga jobb angivnaInga jobb att Ã¥terinsända med den angivna statusenIngen vänsteroperand för konkateneringsoperatorInga lyssnande portar initieradeInget lokalt kontonamn angivetHittade ingen lokal användarmappningInga platser definierade för %sInga platser för %sHittade inga platser för destinationen som skiljer sig frÃ¥n källanHittade inga platser för destinationen som skiljer sig frÃ¥n källan: %sHittade inga platser för destination: %sHittade inga platser för källa: %sHittade inga platser - troligen inga fler fysiska instanserInga platser hittade för %sInga platser kvar för %sHittade ingen match i cacheÃ¥tkomstregler för %sInga fler %s-replikorInga fler gränssnitt att prova för ändpunkt %s.Inga fler replikor, kommer att använda %sBehöver inte staga källa eller destination, hoppar över stagingInget nytt informationsdokument tilldelatIngen nästa MCC eller tjänst pÃ¥ sökväg "%s"Inget nästa element i kedjanIngen non-draining sessionskatalog tillgängligInga non-draining sessionskataloger tillgängligIngen överskrivning begärd eller tillÃ¥ten, hoppar över förstädningInga fysiska filnamn (PFN) returnerade i Rucio-svar: %sHittade inga fysiska filer för destinationHittade inga platser för destinationHittade ingen pid-fil '%s'. Troligen kör inte A-REX.Ingen plugin är inställd eller auktoriserad för den begärda sökvägen %sIngen policyfil eller DN angiven för simplelist.pdp, ange ett location-attribut eller Ã¥tminstone ett DN-element i simplelist-PDP-noden i inställningarna.Ingen port lyckades för %sIngen privat nyckel med smeknamn %s existerar i NSS-databasenHittade ingen proxyIngen proxy tillhandahÃ¥llenInget könamn givet i queue-blocknamnIngen readonly-cachekatalog angivenInga fjärrleveranstjänster kan användas, tvingar lokal leveransHittade inga replikor för %sInget begäran-token angivet!Hittade inga begäran-tokenIngen begärd säkerhetsinformation samlades inInget svar frÃ¥n %sInget svar frÃ¥n AA-tjänst %sInget svar returnerades: %sInga resultat returnerade frÃ¥n statIngen högeroperand för konkateneringsoperatorIngen säkerhetsprocessering/kontroll begärd för '%s'Ingen serverinställningsdel i inställningsfilenInga tjänster angivna. Konfigurera förvalda tjänster i användarinställningarna, eller ange ett kluster eller index (alternativ -c eller -g, se arcsync -h).Hittade ingen sessionkatalog i inställningar.Hittade ingen sessionskatalogIngen sessionskatalog satt i inställningarnaIngen källa definieradInget spacetoken angivetHittade inget spacetoken som matchar beskrivning %sIngen stage-in-URL tillhandahÃ¥llenInget ström-svar frÃ¥n VOMS-serverIngen sÃ¥dan DTR %sIngen sÃ¥dan fil eller katalogIngen sÃ¥dan grupp: %sIngen sÃ¥dan användare: %sInget target tillgängligt inuti policynInget target tillgängligt inuti regelnHittade inget test-jobb med ID %d.Inget test-jobb, med ID "%d"Inga användbara cacherHittade inget certifikat med smeknamn %sHittade inget användarcertifikatInget användarnamn tillhandahÃ¥lletHittade inga giltiga cachar i inställningar, cachning är avstängdHittade inga giltiga referenser, avslutarInget giltigt handtag kvar att lyssna pÃ¥Ingen giltig jobbidentifierare returnerades av EMI-ESIngen giltig plats tillgängligInget giltigt svar frÃ¥n VOMS-server: %sInget värde tillhandahÃ¥llet för subjektattribut %s, hoppar överIcke-homogen resursInget av de begärda överföringsprotokollen stödsInte auktoriseradInte auktoriserad enligt begäran: %sInte auktoriserad av arc.pdp - kunde inte fÃ¥ svar frÃ¥n utvärderareEj auktoriserad av arc.pdp - nÃ¥gra av RequestItem-elementen uppfyller inte policyEj auktoriserad av simplelist.pdp: %sEj tillräckligt antal parametrar i copyurlEj tillräckligt antal parametrar i linkurlInte tillräckligt med utrymme för att lagra filHittade inte %s i cacheHämtar inte checksumma för zip-komponentLyssnar inte pÃ¥ nÃ¥gontingAnvänder inte leveranstjänst %s pÃ¥ grund av tidigare felAnvänder inte leveranstjänst pÃ¥ %s eftersom den är fullOgiltig destinationOgiltig källaInget att göra: du mÃ¥ste antingen ange ett test-jobb-id med -J (--job) eller frÃ¥ga om information om certifikaten med -E (--certificate) Avisering misslyckadesKopierar nu (frÃ¥n -> till)Nummer %d är med smeknamn: %s%sNummer %d är: %sAntalet ComputingService-element som erhölls frÃ¥n hela dokumentet och frÃ¥n XPath-förfrÃ¥gan stämmer inte överens: %d != %dAntalet SSL-lÃ¥s ändrades - Ã¥terinitierarAntalet källor och destinationer stämmer inte överensALTERNATIV...OS-familj: %sOS-namn: %sOS-version %sOTokens: Attr: %s = %sOTokens: Attr: meddelandeOTokens: Attr: token: %sOTokens: Attr: token: bärare: %sOTokens: HandtagOTokens: Handtag: attribut skapade: subjekt = %sOTokens: Handtag: meddelandeObjekt är inte lämpligt för listningObjekt ej initierat (internt fel)ErhÃ¥llen XML: %sErhÃ¥llen värd och adress kan inte accepterasMisslyckades med att erhÃ¥lla informationMisslyckades med att erhÃ¥lla statusEndast POST stöds i CandyPondEndast POST stöds i dataleveranstjänstEndast raw-buffer-nyttolast stöds för utmatningEndast globusrättigheter stöds i Globus signeringspolicy - %s stöds inteEndast signeringsrättigheter stöds i Globus signeringspolicy - %s stöds inteEndast standard input är för närvarande giltig för lösenordskälla.Endast användare '.' för hjälpprogram stödsOpenSSL-fel -- %sOpenSSL-felsträng: %sOperativsystem-felOperation avbröts framgÃ¥ngsriktOperation avslutades framgÃ¥ngsriktOperationen stöds inte för denna typ av URLOperation pÃ¥ sökväg "%s"OptimizedInformationContainer skapade temporär fil: %sOptimizedInformationContainer misslyckades med att skapa temporär filOptimizedInformationContainer misslyckades med att tolka XMLOptimizedInformationContainer misslyckades med att byta namn pÃ¥ temporär filOptimizedInformationContainer misslyckades med att lagra XML-dokument till temporär filAlternativ: %sAlternativen 'p' och 'n' kan inte användas samtidigtAlternativgrupp %s:Alternativ för plugin saknasUrsprunglig jobbeskrivning visas nedan:Föräldralöst delegeringslÃ¥s detekterat (%s) - städarÖvriga flaggorMinnet tog slut när slump-serienummer genereradesSlut pÃ¥ försökSlut pÃ¥ försök vid allokering av nytt jobb-idSlut pÃ¥ försök vid allokering av nytt jobb-id i %sUtgÃ¥ende meddelande är inte SOAPSkriv ut EEC-certifikatetUtdataformateringsmodifierareSkriv ut proxycertifikatetÖverskrivning begärd - kommer att för-städa destinationÄgare: %sPASV misslyckadesPASV misslyckades: %sPDP: %s (%s)PDP: %s (%s) kan inte laddas inPDP: %s kan inte laddas inPDP: %s namnattribut saknasPDPD-plats saknasPDPD-plats: %sPEM_read_bio_X509_REQ misslyckadesPEM_write_bio_X509_REQ misslyckadesPEPD-plats saknasPEPD-plats: %sPKCS12 lägg till lösenordsintegritet misslyckadesPKCS12 output-lösenord inte tillhandahÃ¥lletPOST-begäran pÃ¥ specialsökväg stöds inteTolkade domäner: %uSkapande av tolkningskontext misslyckades!Tolk misslyckades med felkod %i.Tolkar .local-fil för att erhÃ¥lla jobb-specifika identifierare och infoTolkar VOMS-AC för att fÃ¥ FQAN-informationLösenordskodningstyp stöds inte: %sSökväg %s är ogiltig, skapar nödvändiga katalogerSökväg till .local jobbstatusfil är obligatorisk.Sökväg till användarens proxyfil ska angesPeer-namn: %sPer-jobb POST/SOAP-begäran stöds inteUtför /*-förfrÃ¥ganUtför /ComputingService-förfrÃ¥ganUtför /Services/ComputingService-förfrÃ¥ganUtför matchmaking mot target (%s).Utför varken sortering eller matchningPermanent felPermanent tjänstefelÃ…tkomsträttighetskontroll inte godkänd, kommer att försöka ladda ned utan att använda cacheÃ…tkomsträttighetskontroll inte godkänd: %sÃ…tkomsträttighetskontroll pÃ¥ ursprunglig URL inte godkänd: %sÃ…tkomsträttighetskontroll godkändÃ…tkomsträttighetskontroll godkänd för URL %sPlockar upp lämnade jobbOrt: %sPlattform: %sVälj den NSS-databas du vill använda (1-%d): Välj det som du vill använda (1-%d): Plexer (%s) - nästa %s(%s) saknar targetPlexerns (%s) nästa har inget id-attribut definieratPlugin %s fel: %sPlugin %s fel: %uPlugin %s misslyckades med att köraPlugin %s misslyckades med att startaPlugin %s för Ã¥tkomstpunkt %s acquire misslyckades (ska aldrig hända).Plugin %s för Ã¥tkomstpunkt %s är trasig.Plugin %s skrev ut: %sPlugin %s skrev ut: %uPlugin %s returnerade ingen mappningPlugin %s returnerade inget användarnamnPlugin %s returnerade för mycket: %sPlugin %s returnerade: %uPlugin %s timeout efter %u sekunderPlugin (användarmappning) kommando är tomtPlugin (användarmappning) timeout är inte ett nummer: %sPlugin (användarmappning) timeout är felaktigt nummer: %s<Pluginsvar: %sPolicy-besluts-tjänst-anrop misslyckadesPolicy är tomPolicy är inte gaclpolicyrad: %sPolicy-subjekt: %sPolicyId: %s Alg inuti denna policy är:-- %sPostnummer: %sAntal pre-LRMS-väntade jobb: %iFörstädning misslyckades, kommer fortfarande att försöka kopieraFörregisterar destinationFörregisterar destination i indextjänstFörbereder att staga destinationFörberedar att staga källaProblem att komma Ã¥t cachefil %s: %sProblem med att skapa katalog (källa %s, destination %s)Problem med att ladda in plugin %s, hoppar över den.Problem med indextjänst, kommer att hoppa till slutet av datastagingProblem med indextjänst, kommer att frigöra cachelÃ¥sBehandlar en %s-begäranProcesseringstrÃ¥d avbröts pÃ¥ grund av timeout. Startar om DTRProcesseringstyp stöds inte: %sProcessingStartTime (%s) angiven i jobbeskrivning ligger inom targets driftstoppsperiod [ %s - %s ].Tillgängliga protokollpluginer:Protokoll stöds inte - kontrollera att relevanta gfal2-pluginer har installerats (gfal2-plugin-* paket)Proxycertifikatinformation:Proxyns livstid har gÃ¥tt utProxyns livstid har gÃ¥tt ut. Jobbinsändning avbruten. Kör 'arcproxy'!Proxygenerering misslyckades: Certifikatets giltighetstid har gÃ¥tt ut.Proxygenerering misslyckades: Certifikatet är inte giltigt än.Proxygenerering misslyckades: Misslyckades med att skapa temporär fil.Proxygenerering misslyckades: Misslyckades med att hämta VOMS-information.Proxygenerering misslyckades: Hittade inget giltigt certifikat.Proxygenerering misslyckades: Hittade ingen giltig privat nyckel.Proxygenerering lyckadesProxyns livstid har gÃ¥tt utProxyns nyckellängd: %iProxysökväg: %sProxysignatur: %sProxy lagrad pÃ¥ %sProxysubjekt: %sProxytyp: %sProxy med ARC-policyProxy med alla rättigheter ärvdaProxy med tom policy - misslyckades pÃ¥ grund av okänd policyProxy med specifik policy: %sProxy med okänd policy - misslyckades pÃ¥ grund av okänd policyProxy-subjekt: %sProxy/referenser lagrade pÃ¥ %sProxy: %sBegäran att spara %s är fortfarande i kö, ska vänta %i sekunderPython-wrapper-konstruktor anropadPython-wrapper-destruktor (%d)Python-mäklarens konstruktor anropad (%d)Python-mäklarens destruktor anropad (%d)Pythontolkare lÃ¥stPythontolkare frigjordPython-wrapper-process anropadPython-mäklare initKvalitetsnivÃ¥: %sFörfrÃ¥gan returnerade inga elementFörfrÃ¥gan returnerade oväntat element: %s:%sFrÃ¥gar ACIX-server pÃ¥ %sFrÃ¥gar WSRF-GLUE2-beräknings-REST-ändpunkt.FrÃ¥gebatch med %d jobbMassfrÃ¥gar källreplikorFrÃ¥gar efter status för stagingbegäranKöinformation:REST: process %s pÃ¥ %sREST:CLEAN jobb %s - %sREST:GET jobb %s - %sREST:KILL jobb %s - %sREST:PUT jobb %s: fil %s: det finns ingen nyttolastREST:RESTART jobb %s - %sRESTful och gammalt VOMS-kommunikationsprotokoll kan inte begäras samtidigt.RSA_generate_key_ex misslyckadesRSL-substitution är inte en sekvensRSL-substitutions-sekvens har inte längden 2RSL-substitutionsvariabelnamn utvärderas inte till en strängRSL-substitutionsvariabelvärde utvärderas inte till en strängSlumpvis sorteringRÃ¥-kommando: %sÃ…terskapar en EMI-ES-klientLäste %i byteÃ…tkomstkontroll för läsning inte godkändÃ…tkomst för läsning inte tillÃ¥ten för %s: %sLäs-kommandon i authenticate misslyckadesLäste begäran frÃ¥n en filLäste begäran frÃ¥n en strängLäser %u byte frÃ¥n byte %lluReell överföring frÃ¥n %s till %sAnledning: %sFick tillbaka DTR %s frÃ¥n schemaläggare i tillstÃ¥nd %sMottog DTR %s under generatoravstängning - kan inte processerasMottog ogiltig DTRMottog meddelande out-of-band (inte kritiskt, ERROR-nivÃ¥ är bara för debuggningsändamÃ¥l)Mottog ingen DTRMottog försök igen för DTR %s som fortfarande överförÃ…teransluterPost om nytt jobb framgÃ¥ngsrikt tillagd till databasen (%s)Omdirigerar till %sOmdirigerar till ny URL: %sVägrar förbindelse: Förbindelsegräns nÃ¥ddRegistrerar destinationsreplikaRegistrerar katalog: %s med plugin: %sRegistrerar dummy-katalog: %sRegistrering av Globus-FTP-buffer misslyckades - avbryter kontrollRelationsoperator förväntadesFrigör destinationFrigör begärningar som gjordes under stagingFrigör begärningarFrigör källaOmmappad till lokal grupp-id: %iOmmappad till lokalt gruppnamn: %sOmmappad till lokalt id: %iOmmappad till lokal användare: %sOmmappad användares hem: %sRemove: tar bort: %sTar bort %sTar bort ändpunkt %s: Den har ett icke begärt gränssnitt (%s).Tar bort logisk fil frÃ¥n metadata %sTar bort metadata i %sTar bort förregistrerad destination i indextjänstRename: globus_ftp_client_move misslyckadesRename: timeout vid väntan pÃ¥ att operationen ska slutförasByter namn pÃ¥ %s till %sFörnyelse av referenser lyckadesFörnyar referenser för jobb: %sFörnyas proxy för jobb %sByter ut DTR %s i tillstÃ¥nd %s med ny begäranByter ut existerande token för %s i Rucios token-cacheByter ut gammal SRM-info mot by för URL %sByter ut kö '%s' mot '%s'Replika %s matchar inte föredraget mönster eller URL-mapReplika %s har hög latency, men inga fler källor existerar sÃ¥ kommer att använda dennaReplika %s har hög latency, prövar nästa källaReplika %s har lÃ¥ng latency, provar nästa replikaReplika %s har mappatsReplika %s matchar värd-mönster %sReplika %s matchar mönster %sBegäran misslyckadesBegäran misslyckades: Inget svar frÃ¥n IdPBegäran misslyckades: Inget svar frÃ¥n IdP när autentisering görsBegäran misslyckades: Inget svar frÃ¥n IdP när omdirigering görsBegäran misslyckades: Inget svar frÃ¥n SP-tjänsten när SAML-assertion sänds till SPBegäran misslyckades: Inget svar frÃ¥n SPServiceBegäran misslyckades: svar frÃ¥n IdP är inte som förväntat när autentisering görsBegäran misslyckades: svar frÃ¥n IdP är inte som förväntat när omdirigering görsBegäran misslyckades: svar frÃ¥n SP-tjänsten är inte som förväntat när SAML-assertion sänds till SPBegäran misslyckades: svar frÃ¥n SPService är inte som förväntatBegäran är tomBegäran stöds inte - %sBegäran rapporteras som avbruten, men alla filer är färdigaBegäran rapporteras som avbruten, eftersom den avbrötsBegäran rapporteras som avbruten. Orsak: %sBegäran lyckades!!!Begäran avbröts pÃ¥ grund av timeoutBegäran att öppna fil medan den hÃ¥ller pÃ¥ att tas emotBegäran att pusha till okänd ägare - %uBegäran: %sBegärda slottar: %iBegärt att hoppa över resurssökning. Kommer att försöka med direkt insändning till %s och %s insändningsändpunktstyperBegär ComputingService-element frÃ¥n resursbeskrivning pÃ¥ %sAtt begära rekursion och --nolist saknar meningBegär att avsluta jobbprocesseringVillkor "%s %s" INTE uppfyllt.Villkor "%s %s" uppfyllt av "%s".Villkor "%s %s" uppfyllt.Reserveringspolicy: %sSlÃ¥r upp destinationsreplikorUppslagning av indextjänst för destination misslyckadesUppslagning av indextjänst för källa misslyckadesMassuppslagning av källreplikorResursbeskrivning innehÃ¥ller oväntat element: %s:%sResursbeskrivning är tomResursbeskrivning tillhandahÃ¥ller URL för gränssnitt %s: %sResursbeskrivning tillhandahÃ¥ller inga URLer för gränssnittResursbeskrivningsförfrÃ¥gansvalidering lyckadesResursbeskrivningsvalidering enligt GLUE2-schema misslyckades: Resursbeskrivningsvalidering lyckadesResursinformationstillhandahÃ¥llare misslyckades med att köraResursinformationstillhandahÃ¥llare misslyckades med att startaResursinformationstillhandahÃ¥llare misslyckades med avslutningsstatus: %i %sResursinformationstillhandahÃ¥llarlogg: %sResursinformationstillhandahÃ¥llare: %sResurshanterare: %sResursförfrÃ¥gan misslyckadesSvaret är inte SOAPSvaret är inte XMLSvar sände felSvar: %sSvar: %sStartar om efter segmenteringsfel.Ã…terinsändning av jobb (%s) lyckades, men borttagandet av jobbet misslyckades - det kommer fortfarande att synas i jobblistanÃ…terinsändning av jobb (%s) lyckades, men avbrytandet av jobbet misslyckades - det kommer fortfarande att synas i jobblistanResultatvärde (0=TillÃ¥t, 1=Vägra, 2=Obestämd, 3=Ej applicerbar): %dResultat lagrade i: %sÃ…terupptar jobb: %s i tillstÃ¥nd: %s (%s)Hämtar fil %sAtt hämta jobbeskrivning för ett EMI-ES-jobb stöds inteAtt hämta jobbeskrivning för INTERNAL jobb stöds inteReturnerat meddelande frÃ¥n VOMS-server %s är: %s Returnerat meddelande frÃ¥n VOMS-server: %sÃ…tervänder till generatorÃ…teranvänder förbindelseHöger operand för RSL-konkatenering utvärderas inte till en strängRucio returnerade: %sRucios token för %s har gÃ¥tt ut eller är pÃ¥ väg att gÃ¥ utRegel: %sRegel: publik: %sRegel: förmÃ¥ga: %sRegel: grupp: %sRegel: utfärdare: %sRegel: roll: %sRegel: scope: %sRegel: subjekt: %sRegel: vo: %sKör kommando: %sAntal körande jobb: %iKör e-postsändar-kommando (%s)Körande användare har inget namnSAML-tokenhanteraren har ej ställts inSAML2SSO-process misslyckadesSASL-växelverkanSOAP-begäran till AA-tjänst %s misslyckadesSOAP-fel frÃ¥n leveranstjänst pÃ¥ %s: %sSOAP-fel: %sSOAP-anrop misslyckadesSOAP-process stöds inte: %sSOAP-begäran: %sSOAP-svar: %sSOAP med SAML2SSO-anrop misslyckadesAnvänd SQL-sats: %sSQLite-databasfel: %sSRM-klientstatus: %sSRM returnerade inte nÃ¥gon informationSRM returnerade inte nÃ¥gon användbar informationSRM returnerade inga användbara överförings-URLer: %sSSHFS-monteringspunkt för cachekatalogen (%s) är trasig - väntar pÃ¥ Ã¥teruppkoppling ...<SSHFS-monteringspunkt för runtimekatalogen (%s) är trasig - väntar pÃ¥ Ã¥teruppkoppling ...SSHFS-monteringspunkt för sessionskatalogen (%s) är trasig - väntar pÃ¥ Ã¥teruppkoppling ...SSL-fel: %d - %s:%s:%sSSL-fel: %s, bibliotek: %s, funktion: %s, anledning: %sSSL ser ut att inte vara initieratSchemaläggarinställningar:Schemalägger mottog NULL-DTRSchemaläggare mottog ogiltig DTRSchemaläggaren mottog en ny DTR %s med källa: %s och destination: %s, tilldelad att överföra andel %s med prioritet %dSchemaläggare startarSchemaläggare stoppar, avstutarSchemaläggningsspolicy: %sSchemavalideringsfelSchema: %sSecHandler-inställningar är inte definieradeSäkerhetshanterare saknar inställningarSäkerhetshanterare har inget namnattribut definieratSecHandler: %s(%s)Säkerhetshanterare %s(%s) kunde inte skapasSäkerhetshanterarprocessering misslyckadesSäkerhetshanterarprocessering misslyckades: %sSäkerhetskontroll misslyckades för inkommande TLS-meddelandeSäkerhetskontroll misslyckades för utgÃ¥ende TLS-meddelandeSäkerhetskontroll misslyckades i SOAP-MCC för inkommande meddelandeSäkerhetskontroll misslyckades i SOAP-MCC för inkommande meddelande: %sSäkerhetskontroll misslyckades i SOAP-MCC för utgÃ¥ende meddelandeSäkerhetskontroll misslyckades i SOAP-MCC för utgÃ¥ende meddelande: %sSäkerhetskontroll misslyckades i TLS-MCC för inkommande meddelandeSäkerhetsprocessering/kontroll misslyckades: %sSäkerhetsprocessering/kontroll för '%s' misslyckades: %sSäkerhetsprocessering/kontroll för '%s' OKSäkerhetsprocessering/kontroll OKSelect misslyckades: %sSjälvsignerat certifikatSändning av svar misslyckades: %sSendCommand: Kommando: %sSendCommand: Misslyckades: %sSendCommand: Svar: %sSendCommand: Avbröts pÃ¥ grund av timeout efter %d msServer-SRM-version: %sServer-implementering: %sServer stoppadTjänsten %s(%s) kunde inte skapasTjänsteloop: Ändpunkt %sTjänsteändpunkt %s (typ %s) lagd till i listan för direktinsändningTjänsteändpunkt %s (typ %s) lagd till i listan för resurssökningTjänsten har inget id-attribut definieratTjänsten har inget namnattribut definieratTjänsteinformation:Tjänsten väntar pÃ¥ begärningarTjänstesidans MCCer har laddats inBetjäningstillstÃ¥nd: %sSessionkatalog %s ägs av %i, men nuvarande mappade användare är %iSessionskatalog '%s' innehÃ¥ller användarspecifika substitutioner - hoppar överSessionskatalog att användaSessions-rotkatalog saknasSessionskatalog %s: Fritt utrymme %f GBSätter förbindelsegräns till %i, förbindelse över gränsen kommer att %sSätter pbsz till %luSätter status (%s) för ändpunkt: %sSätter status (STARTED) för ändpunkt: %sSätter subjekt-namnSätter userRequestDescription till %sAndelsinformation:Ska vänta pÃ¥ att destination förberedsSka vänta pÃ¥ att källa förberedsVisa %s hjälpalternativVisa hjälpalternativStänger av demonStänger ner dataleveranstjänstStänger ner datastaging-trÃ¥darStänger ner schemaläggareSimpleMap: %sSimpleMap: fick ny unmap-tid pÃ¥ %u sekunderSimpleMap: felaktigt nummer i unmaptime-kommandoHoppar över %s replika %sHoppar över beräkningsändpukt '%s', eftersom den har '%s'-gränssnitt i stället för det begärda '%s'.Hoppar över ogiltigt URL-alternativ: %sHoppar över policyAuthority VOMS-AC-attributHoppar över replika pÃ¥ lokal värd: %sHoppar över hämtat jobb (%s) eftersom det sändes in via ett annat gränssnitt (%s).Hoppar över tjänst: hittade ingen SchemaPath!Hoppar över tjänst: hittade ingen ServicePath!Socketkonvertering misslyckas: %sSocketar passar inte ihop vid avslut %i != %iNÃ¥gra adresser misslyckades. Lyssnar pÃ¥ %u av %u.NÃ¥gra överföringar misslyckadesSorterar efter lediga slottar i köSorterar efter indatas tillgänglighet pÃ¥ targetSorterar efter angivet benchmark (förval "specint2000")Sortera replikor enligt URL-mappSorterar replikor enligt föredraget mönster %sKäll-URL saknasKäll-URL stöds inte: %sKäll-URL är inte giltig: %sKälla och/eller destination är indextjänst, kommer att slÃ¥ upp replikorKontroll av källa begärd men misslyckades: %sKälla är inte en giltig URLKälla mappas till %sKälla är inte redo, kommer att vänta %u sekunderkällan är densamma som destinationenKällans ändringstid: %sKälla eller destination kräver stagingKälla: %sAngiven modul hittades inte i cacheAngiven överlagringsfil (%s) existerar inte.Antal jobb som laddar ned/upp: %iStagingbegäran avbröts pÃ¥ grund av timeout, kommer att frigöra begäranLaddar ned/upp: %sStartar i förgrundenPÃ¥börja testBörja vänta 10 sek...StartReadingStartReading: Fil förbereddes inte pÃ¥ rätt sättStartWritingStartWriting: Fil förbereddes inte pÃ¥ rätt sättStartade fjärrleverans pÃ¥ %sStartar DTR-trÃ¥darStartar kontrollerad processStartar datastaging-trÃ¥darStartar hjälpprocess: %sStartar jobbprocesserings-trÃ¥dStartar jobbmonitoreringStartar ny DTR för %sBörjar frÃ¥ga suspenderad ändpunkt (%s) - ingen annan ändpunkt för denna tjänst hÃ¥ller pÃ¥ att frÃ¥gas eller har blivit frÃ¥gad framgÃ¥ngsrikt.Startar under-trÃ¥d för att frÃ¥ga ändpunkten pÃ¥ %sStartar trÃ¥d för att frÃ¥ga ändpunkten pÃ¥ %sStat: erhÃ¥llen ändringstid %sStat: erhÃ¥llen storlek: %lluTillstÃ¥ndsnamn för plugin saknasStatus för tjänsteändpunkt "%s" är satt till inaktiv i ARCHERY. Hoppar över.FrÃ¥gade om status för %d jobb, %d jobb returnerade informationStopReading slutar vänta pÃ¥ transfer_condition.StopReading börjar vänta pÃ¥ transfer_condition.StopReading: avbryter förbindelseStopWriting slutar vänta pÃ¥ transfer_condition.StopWriting börjar vänta pÃ¥ transfer_condition.StopWriting: Beräkna checksumma %sStopWriting: avbryter förbindelseStopWriting: letar efter för checksumma för %sAvslutade jobbprocesseringStoppar hjälpprocess: %sStoppar jobbprocesseringstrÃ¥dStoppar serverLagrar fil %sSparar port %i för %sLagrar temporär proxy pÃ¥ %sSträng framgÃ¥ngsrikt tolkad som %s.Subjektattribut %s har ingen känd NID, hoppar överSubjekt börjar inte med '/'Subjekt-namn: %sBegärans subjekt är nullSubjekt att matcha: %sSubjekt: %sInsändningsändpunktInsändning misslyckadesSubmit: Misslyckades med att sända CWD-kommandoSubmit: Misslyckades med att sända CWD new-kommandoSubmit: Misslyckades med att sända jobbeskrivningSubmit: Misslyckades med att koppla uppSubmit: Misslyckades med att ladda upp lokala indatafilerSubmit: tjänsten har inga lämpliga informationsgränssnitt - behöver org.nordugrid.ldapngSubmitterPlugin %s kunde inte skapasHittade inte SubmitterPlugin-plugin "%s".Sänder in jobb Sänder in test-jobb %d:Lyckades lägga till Independent-OID, tagg %d returneradesLyckades lägga till RFC-proxy-OID, tagg %d returneradesLyckades lägga till VOMS-AC-sekvens-OID, tagg %d returneradesLyckades lägga till annyLanguage-OID, tagg %d returneradesLyckades lägga till inheritAll-OID, tagg %d returneradesLyckades med att autentisera SAML-tokenLyckades med att autentisera användarnamnstokenLyckades med att autentisera X509-tokenLyckades med att ändra lösenord pÃ¥ myproxy-serverLyckades ändra tillit till: %sLyckades konvertera PrivateKeyInfo till EVP_PKEYLyckades med att ta bort referens pÃ¥ myproxy-serverLyckades exportera PKCS12Lyckades generera publik/privat nyckelparLyckades med att hämta en proxy i %s frÃ¥n myproxy-server %sLyckades erhÃ¥lla referensLyckades med att hämta information frÃ¥n myproxy-serverLyckades importera certifikatLyckades importera privat nyckelLyckades med att initiera NSSLyckades ladda in PrivateKeyInfoLyckades skriva ut certifikat till %sLyckades skriva ut certifikatbegäran till %sLyckades med att lägga upp en proxy pÃ¥ myproxy-serverLyckades sända DelegationService: %s och DelegationID: %s info till peer-tjänstLyckades signera proxycertifikatetLyckades verifiera signaturen under Lyckades verifiera signaturen under Lyckades verifiera det signerade certifikatetDet tillhandahÃ¥llna användarnamnet %s matchar inte det mappade användarnamnet %sProfiler som stöds:Dessa begränsningar stöds: validityStart=tid (t.ex. 2008-05-29T10:20:30Z; om ej angivet, börjar giltighetstiden nu) validityEnd=tid validityPeriod=tid (t.ex. 43200 eller 12h eller 12H; om varken validityPeriod eller validityEnd angivet, är förval 12 timmar för lokal proxy och 168 timmar för delegerad proxy pÃ¥ myproxy-server) vomsACvalidityPeriod=tid (t.ex. 43200 eller 12h eller 12H; om ej angivet, är förval minimum av 12 timmar och validityPeriod) myproxyvalidityPeriod=tid (livstid för proxyer som delegeras av myproxy-server, t.ex. 43200 eller 12h eller 12H; om ej angivet, är förval minimum av 12 timmar och validityPeriod (vilket är livstiden för den delegerade proxyn pÃ¥ myproxy-servern)) proxyPolicy=policy-text proxyPolicyFile=policy-fil keybits=nummer - längd för den genererade nyckeln, Förval är 2048 bitar. Särskilt värde 'inherit' betyder att det signerande certifikatets nyckellängd används. signingAlgorithm=namn - signeringsalgoritm att använda för att signera proxyns publika nyckel. Möjliga värden är sha1, sha2 (alias för sha256), sha224, sha256, sha384, sha512 och inherit (använd det signerande certifikatets algoritm). Förval är inherit. PÃ¥ gamla system, är endast sha1 möjligt. Informationspunker som stöds är: subject - proxycertifikatets subjektnamn. identity - proxycertifikatets identitets-subjektnamn. issuer - proxycertifikatets utfärdar-subjektnamn. ca - subjektnamn för CA som utfärdade det ursprungliga certifikatet. path - filsystem-sökväg till fil som innehÃ¥ller proxyn. type - typ av proxycertifikat. validityStart - klockslag dÃ¥ proxyns giltighetstid börjar. validityEnd - klockslag dÃ¥ proxyns giltighetstid slutar. validityPeriod - längd pÃ¥ proxyns giltighetstid i sekunder. validityLeft - kvarvarande längd pÃ¥ proxyns giltighetstid i sekunder. vomsVO - VO-namn representerat av VOMS-attribut vomsSubject - subjekt för certifikat för vilket VOMS-attribut utfärdats vomsIssuer - subjekt för tjänst som utfärdat VOMS-certifikat vomsACvalidityStart - klockslag dÃ¥ VOMS-attributets giltighetstid börjar. vomsACvalidityEnd - klockslag dÃ¥ VOMS-attributets giltighetstid slutar. vomsACvalidityPeriod - längd pÃ¥ VOMS-attributets giltighetstid i sekunder. vomsACvalidityLeft - kvarvarande längd pÃ¥ VOMS-attributets giltighetstid i sekunder. proxyPolicy keybits - proxycertifikatets nyckellängd i bitar. signingAlgorithm - algoritm som användes för att signera proxycertifikatet. Informationspunkterna skrivs i begärd ordning separerade av nyrad. Om en punkt har mer än ett värde skrivs dessa pÃ¥ samma rad separerade av |. Lösenordsdestinationer som stöds är: key - för att läsa privat nyckel myproxy - för att komma Ã¥t referens pÃ¥ myproxy-tjänst myproxynew - för att skapa referens pÃ¥ myproxy-tjänst all - för alla användningsomrÃ¥den. Lösenordskällor som stöds är: quoted string ("lösenord") - explicit angivet lösenord int - interaktiv begäran av lösenord frÃ¥n konsol stdin - läs lösenord frÃ¥n standard input avgränsat av nyrad file:filnamn - läs lösenord frÃ¥n fil med namn filnamn stream:# - läs lösenord frÃ¥n input stream nummer #. För närvarande stöds endast 0 (standard input). Stöder framtida reserveringStöder massinsändningStöder preemptionAntal suspenderade jobb: %iSuspenderar frÃ¥gandet av ändpunkt (%s) eftersom tjänsten pÃ¥ ändpunkten redan hÃ¥ller pÃ¥ att frÃ¥gas eller har frÃ¥gats.Att synkronisera den lokal listan med aktiva jobb med informationen i informationssystemet kan resultera i bristande överensstämmelse. Nyligen insända jobb kan ännu saknas i informationssystemet, medan jobb som nyligen schemalagts för borttagning fortfarande kan finnas kvar.Syntaxfel i 'notify'-attributvärde ('%s'), det innehÃ¥ller okända tillstÃ¥ndsflaggorSyntaxfel i 'notify'-attributvärde ('%s'), det mÃ¥ste innehÃ¥lla en e-postadressSyntaxfel i 'notify'-attributvärde ('%s'), det fÃ¥r endast innehÃ¥lla e-postadresser efter tillstÃ¥ndsflagg(a/or)Systeminställningsfil (%s eller %s) existerar inte.Systeminställningsfil (%s) innehÃ¥ller fel.Systeminställningsfil (%s) existerar inte.TCP-klientprocess anropadTCP-exekverare tas bortTLS tillhandahÃ¥ller ingen identitet, försöker med OTokens.TURL %s kan inte hanterasTarget %s matchar inte begär(t/da) gränssnitt.Target %s borttaget av FastestQueueBroker, rapporterar inte antal lediga slottarTarget %s borttaget av FastestQueueBroker, rapporterar inte totalt antal jobbTarget %s borttaget av FastestQueueBroker, rapporterar inte antal väntande jobbTeknologi: %sTemporärt tjänstefelTestet avbröts eftersom ingen resurs returnerade nÃ¥gon informationTest misslyckades, inga fler möjliga targetTest insänt med jobb-id: %sTest definierades med ID %d, men nÃ¥got fel uppstod när det tolkades.Attributet "FreeSlotsWithDuration" är felaktigt formatterat. Ignorerar det."FreeSlotsWithDuration"-attributet publicerat av "%s" är felformatterat. Ignorerar det.Flaggorna 'sort' och 'rsort' kan inte anges samtidigt.Utdata-BIO är NULLCA-certifikat-katalogen behövs för att kontakta VOMS- och myproxy-servrar.CA-utfärdaren (%s) för referenserna (%s) är inte betrodd av target (%s).Beräkningsändpunkten tillkännager inte sin kvalitetsnivÃ¥.Beräkningsändpunkten tillkännager inte sitt servicetillstÃ¥nd.Beräkningsändpunkten har ingen URL.Beräkningstjänsten tillkännager inte ditt gränssnitt.Beräkningstjänsten tillkännager inte sin kvalitetsnivÃ¥.Myproxy-perioden du angivit: %s kan inte tolkas.NSS-databasen kan inte upptäckas i Firefox-profilenSvaret kommer inte till denna ändeTjänsten tillkännaget inte sitt hälsotillstÃ¥nd.Tjänsten tillkännager inte sin typ.Statuskoden är SuccessVOMS-AC-perioden du angivit: %s kan inte tolkas.VOMS-servern med informationen: %s kan inte nÃ¥s, se till att den är tillgängligVOMS-servern med informationen: %s kan inte nÃ¥s, säkerställ att den är tillgänglig.[vo]-sektionen märkt '%s' har inga associerade filer och kan inte användas för matchningarccat-kommandot utför cat-kommandot pÃ¥ jobbets stdout, stderr eller gridmanager-fellogg.arcclean-kommandot tar bort ett jobb frÃ¥n en beräkningresurs.arccp-kommandot kopierar filer till, frÃ¥n och mellan gridlagringsresurser.arcget-kommandot används för att hämta resultatet av ett jobb.arcinfo-kommandot används för att erhÃ¥lla statusen pÃ¥ beräkningsresurser pÃ¥ griden.arckill-kommandot används för att avbryta körande jobb.arcls-kommandot används för att lista filer pÃ¥ gridlagringsresurser och i filindexkataloger.arcmkdir-kommandot skapar kataloger pÃ¥ gridlagringsresurser och kataloger.arcproxy-kommandot skapar en proxy frÃ¥n ett nyckel/certifikat-par som sedan kan användas för att komma Ã¥t gridresurser.arcrename-kommandot byter namn pÃ¥ filer pÃ¥ gridlagringsresurser.arcrm-kommandot tar bort filer pÃ¥ gridlagringsresurser.arcstat-kommandot används för att erhÃ¥lla statusen pÃ¥ jobb som sänts in till gridresurser.arcsub-kommandot används för att sända in jobb till beräkningsresurser pÃ¥ griden.arcsync-kommandot synkroniserar din lokala jobblista med information frÃ¥n de angivna resurserna eller indexservrarna.arctest-kommandot används för att testa kluster som resurser.Attributinformationen frÃ¥n VOMS-server: %s listas som följer:Giltighetstiden för den tillgängliga CRLen har gÃ¥tt utDen tillgängliga CRLen är inte giltig änbrokerarguments-attributet kan endast användas i kombination med brokername-attributetCertifikatet med subjekt %s är inte giltigtXRSL-attributet cluster stöds för närvarande inte.Referensen som skall signeras innehÃ¥ller ingen begäranReferensen som skall signeras är NULLReferensens privata nyckel har redan initieratsDen förvalda inställningsfilen (%s) är inte en vanlig fil.Delegerade referensen erhÃ¥llen frÃ¥n delegeringstjänsten lagras till sökväg: %sDelegerade referensen erhÃ¥llen frÃ¥n sökväg: %sTargets (%s) driftstopp har inte publicerats. BehÃ¥ller target.Sluttiden som du angivit: %s kan inte tolkas.Sluttiden du angivit: %s är före starttiden: %s.Ändpunkten (%s) stöds inte av denna plugin (%s)Delegeringstjänstens ändpunkt ska ställas inFilen %s är för tillfället lÃ¥st med ett giltigt lÃ¥sFörsta gränssnittet som stöds an pluginen %s är en tom sträng, hoppar över pluginen.Följande %d Ã¥terinsändes inteFöljande jobb sändes inte in:Gränssnittet för denna ändpunkt (%s) är ej angivet, kommer att prova alla möjliga pluginerJobbeskrivningen kan ocksÃ¥ vara en fil eller en sträng i ADL- eller XRSL-format.Begränsningen av antalet bitar i nyckeln är felaktig: %s.Namnen pÃ¥ den privata nyckeln som ska tas bort är tomtDe gamla GSI-proxyerna stöds inte längre. Använd inte alternativ -O/--old.Nyttolasten i inkommande meddelande är tomNyttolasten i utgÃ¥ende meddelande är tom,Längden som du angivit: %s kan inte tolkas.Pluginen %s stöder inte nÃ¥got gränssnitt, hoppar över den.Policyfilen angiven för simplelist.pdp existerar inte, kontrollera location-attributet i simplelist-PDP-noden i tjänsteinställningarnaPolicy-sprÃ¥ket: %s stöds intePrivata nyckeln för signering har inte initieratsProcessen som äger lÃ¥set pÃ¥ %s kör inte längre, kommer att ta bort lÃ¥sBegäran har passerat policyutvärderingenSigneringsalgoritmen %s är ej tillÃ¥ten, den skall vara SHA1 eller SHA2 för att signera certifikatbegärningarDet angivna Globus-attributet (%s) stöds inte. %s ignoreras.Starttiden som du angivit: %s kan inte tolkas.Början, slut och längd kan inte användas samtidigtSubjekt matchar inte utfärdarnamn + proxy-CN-postGiltighetstiden för VOMS-AC har kortats frÃ¥n %s till %s, pÃ¥ grund av giltighetsvillkoret pÃ¥ VOMS-serversidan. Värdet pÃ¥ XRSL-attributet acl är inte giltig XML.Värdet pÃ¥ ftpthreads-attributet mÃ¥ste vara ett nummer frÃ¥n 1 till 10Värdet pÃ¥ keysize-attributet i inställningsfilen (%s) tolkades endast delvisVärdet pÃ¥ timeout-attributet i inställningsfilen (%s) tolkades endast delvisDet finns %d NSS-baskataloger där certifikat, nycklar och moduldatabaser finnsDet finns %d RequestItemDet finns %d kommandon till samma VOMS-server %sDet finns %d begärningar som uppfyller Ã¥tminstone en policyDet finns %d servrar med samma namn: %s i din vomses-fil, men ingen av dem kan nÃ¥s eller returnera ett giltigt meddelande.Det finns %d servrar med samma namn: %s i din vomses-fil, men ingen av dem kan nÃ¥s eller returnera ett giltigt meddelande. Men proxy utan VOMS-AC-tillägg kommer fortfarande att genereras.Det finns %d användarcertifikat i NSS-databasenDet finns inga ändpunkter i registret som matchar den begärda informationsändpunktstypen.Det finns inga ändpunkter i registret som matchar den begärda insändningsändpunktstypenDet finns %d subjekt som uppfyller Ã¥tminstone en policyDet finns inget delegerat X509-token i svaretDet finns inget delegerat Format-token i svaretDet finns ingen Format-begäran i svaretDet finns inget Id- eller X509-begäran-värde i svaretDet finns inget Id- eller X509-token-värde i svaretDet finns ingen SOAP-förbindelse-kedja i inställningarnaDet finns inget SOAP-svarDet finns inget UpdateCredentialsResponse in svaretDet finns ingen X509-begäran i svaretDet finns inget certifikat med namn %s, certifikatet kan tas bort när CSR genererasDet finns inget digest i utfärdarens privata-nyckel-objektDet finns inget lokalt LRMS-ID. Meddelande kommer inte att skrivas till BLAH-logg.Det finns inget svarDet uppstod ett problem under destinationens efter-överförings-hantering efter felet: %sDet uppstod ett problem under källans efter-överförings-hantering: %sDet fanns inget HTTP-svarDet fanns inget SOAP-svarInget SOAP-svar returnerades frÃ¥n PDP-server: %sTredjepartsöverföring stöds inte för dessa ändpunkterTredjepartsöverföring begärdes men motsvarande plugin kunde inte laddas in. Är GFAL-plugin installerad? Om inte, installera paketen 'nordugrid-arc-plugins-gfal' och 'gfal2-all'. Beroende pÃ¥ din typ av installation kan paketnamnen variera.Detta INFO-meddelande borde ocksÃ¥ sesDetta INFO-meddelande borde sesDetta VERBOSE-meddelande borde inte sesDetta VERBOSE-meddelande borde nu sesDenna instans har redan tagits bortDetta jobb skickades nyligen in och har kanske inte nÃ¥tt informationssystemet ännuDetta meddelande gÃ¥r till ursprungliga destinationenDetta meddelande gÃ¥r till per-trÃ¥d-destinationenDenna process äger redan lÃ¥set pÃ¥ %sDetta verkar vara ett tillfälligt fel, försök igen senareDetta lilla verktyg kan användas för att testa JobDescription-klassens konverteringsmöjligheter.Denna användare nekas att sända in nya jobb.TrÃ¥d avslutades med Glib-fil: %sTrÃ¥d avslutades med Glib-undantag: %sTrÃ¥d avslutades med generellt undantag: %sKvarvarande tid för AC: %sKvarvarande tid för AC: ACs giltighetstid har gÃ¥tt utKvarvarande tid för AC: AC är inte giltig änKvarvarande tid för proxy: %sKvarvarande tid för proxy: Proxyns giltighetstid har gÃ¥tt utKvarvarande tid för proxy: Proxyn är inte giltig änTid spenderad i väntan pÃ¥ disk: %.3f msTid spenderad i väntan pÃ¥ nätverk: %.3f msAvbröts pÃ¥ grund av timeout under väntan pÃ¥ cachelÃ¥sTimeout vid uppkoppling till %s(%s):%i - %i sTimeout har passerat, kommer att ta bort lÃ¥sfil %sTimeout vid väntan pÃ¥ Globus callback - läcker förbindelseTimeout vid väntan pÃ¥ mkdirFör att Ã¥terställa saknade jobb, kör arcsyncFör mÃ¥nga argument i inställningarFör mÃ¥nga förbindelse - tappar en nyFör mÃ¥nga förbindelse - ställer ny i köFör mÃ¥nga fel för att erhÃ¥lla checksumma - ger upFör mÃ¥nga filer i en begäran - försök igen med färre filerVerktyg för att skriva grami-filrepresentationen av en jobbeskrivningsfil.Totalt antal jobb: %iTotalt antal logiska CPUer: %iTotalt antal hittade jobb: Totalt antal hittade nya jobb: Totalt antal fysiska CPUer: %iTotalt antal slottar: %iÖverföring MISSLYCKADES: %sÖverföring avbröts framgÃ¥ngsriktÖverföring slutfördÖverföring misslyckadesÖverföring misslyckades: %sÖverföring avslutad: %llu byteer överförda %sÖverföring frÃ¥n %s till %sÖverföring avbröts efter %i sekunder utan kommunikationÖverföring lyckadesÖverföring avbröts pÃ¥ grund av timeoutBetrodda CA:Försök att hämta attribut frÃ¥n VOMS-server i ordning: %sProvar alla tillgängliga gränssnittFörsöker med nästa replikaFörsöker kontrollera X509-certifikat med check_cert_typeFörsöker koppla upp %s(%s):%dFörsöker lyssna pÃ¥ %s:%s(%s)Försöker lyssna pÃ¥ TCP-port %s(%s)Försöker migrera till %s: Migrering till ett %s-gränssnitt stöds inte.Försöker hämta jobbeskrivning för %s frÃ¥n beräkningsresursFörsöker starta suspenderad ändpunkt (%s)Försöker sända in direkt till ändpunkt (%s)Försöker sända in till ändpunkt (%s) med gränssnitt (%s) med plugin (%s).TvÃ¥ indatafiler har identiska namn '%s'.Typ är katalog, anropar srmRmDirTyp är fil, anropar srmRmTyp: %sTyper av beräkningstjänster som %s kan sända in jobb till:Typer av lokala informationstjänster som %s kan samla in information frÃ¥n:Typer av lokala informationstjänster som %s kan samla in jobbinformation frÃ¥n:Typer av registertjänster som %s kan samla in information frÃ¥n:Typer av tjänster som %s kan hantera jobb pÃ¥:URLURL %s överensstämmer inte med sparad SRM-info, provar ny infoURL är mappad till lokal Ã¥tkomst - kontrollerar Ã¥tkomsträttigheter pÃ¥ ursprunglig URLURL mappas till: %sURL är inte giltig: %sURL-alternativ %s har inte formatet namn=värdeURL-protokollet är inte urllist: %sURL: %sOauktoriserad av xacml.pdpKunde inte anpassa jobbeskrivningen till nÃ¥gon resurs, ingen resursinformation kunde erhÃ¥llas.Kan inte lägga till händelse: kan inte hitta AAR för jobb %s i bokföringsdatabasen.Kunde inte associera sekundär DB med primär DB (%s)Kunde inte kopiera %sKunde inte kopiera exempelinställningar frÃ¥n existerande inställningar (%s)Kunde inte skapa %s katalog.Kunde inte skapa databas för sekundära ändpunkt-nycklar (%s)Kunde inte skapa databas för sekundära namn-nycklar (%s)Kunde inte skapa databas för sekundära tjänste-info-nycklar (%s)Misslyckades med att skapa SOAP-klient som används av EMIESClient.Kunde inte skapa databas (%s)Kunde inte skapa databasmiljö (%s)Misslyckades med att skapa katalog %sMisslyckades med att skapa katalog för att lagra resultat (%s) - %sKunde inte skapa index för jobs-tabell i databas (%s)Kunde inte skapa jobbdatabas (%s)Kunde inte skapa jobs-tabell i databas (%s)Kunde inte skapa jobs_new-tabell i databas (%s)Kunde inte skapa temporär katalogKunde inte detektera format för jobbpost.Kunde inte detektera om utfärdarcertifikat är installerat.Kunde inte bestämma certifikatinformationKunde inte bestämma fel (%d)Kunde inte ladda ner jobb (%s), ingen JobControllerPlugin-plugin har satts att hantera jobbet.Kunde inte ta bort jobs-tabell i databas (%s)Kunde inte hitta filstorlek för %sKunde inte hantera jobb (%s), inget gränssnitt angivet.Kunde inte hantera jobb (%s), ingen plugin associerad med det angivna gränssnittet (%s)Kunde inte initiera förbindelse till destination: %sKunde inte initiera förbindelse till källa %sMisslyckades med att initiera hanterare för %sKan inte lista innehÃ¥llet i %sKunde inte lista filer pÃ¥ %sKunde inte ladda in ARC-inställningsfil.Kunde inte ladda in BrokerPlugin (%s)Kunde inte ladda in mäklare %sKunde inte ladda in plugin (%s) för gränssnitt (%s) vid försök att sända in jobbeskrivning.Kunde inte ladda in insändningsplugin för %s-gränssnittKunde inte hitta "%s"-pluginen. Referera till installationsinstruktionerna och kontrollera om paketet som tillhandahÃ¥ller stöd för "%s"-pluginen är installeratKan inte matcha target, markerar det som inte matchande. Mäklare inte giltig.Kunde inte migrera jobb (%s), jobbeskrivning kunde inte hämtas utifrÃ¥nKunde inte migrera jobb (%s), kunde inte tolka erhÃ¥llen jobbeskrivningKan inte öppna jobblistfil (%s), okänt formatKunde inte tolka jobbeskrivningsinput: %sKan inte tolka %s.%s-värdet frÃ¥n beräkningstjänsten (%s).Kunde inte tolka den angivna debugnivÃ¥n (%s) till en av de tillÃ¥tna nivÃ¥ernaKan inte tolka.Kunde inte förbereda jobbeskrivningen enligt target-resursens behov (%s).Kan inte förbereda jobbeskrivning enligt target-resursens behov.Kan inte frÃ¥ga om jobbinformation (%s), ogiltig URL tillhandahÃ¥llen (%s)Misslyckades med att läsa jobbinformation frÃ¥n fil (%s)Kan inte registrera jobbinsändning. Kan inte fÃ¥ JobDescription-objekt frÃ¥n mäklare, Mäklare är ogiltig.Kunde inte ta bort fil %sKan inte byta namn pÃ¥ %sKunde inte byta namn pÃ¥ jobs-tabell i databas (%s)Kunde inte Ã¥terinsända jobb (%s), inget target tillämpbart för insändningKunde inte Ã¥terinsända jobb (%s), targetinformationsinhämtning misslyckades för target: %sKunde inte Ã¥terinsända jobb (%s), kunde inte tolka erhÃ¥llen jobbeskrivningKunde inte hämta lista med jobbfiler att ladda ned för jobb %sKan inte välja middlewareKan inte välja operativsystem.Kan inte välja runtime-miljöKunde inte sätta duplicate-flagga för sekundär-nyckel-DB (%s)Kan inte sortera ExecutionTarget-objekt - Ogiltigt Broker-objekt.Kan inte sortera tillagda jobb. BrokerPlugin-pluginen har inte laddats in.Kunde inte sända in jobb. Misslyckades med att tilldela delegering till jobbeskrivning.Kunde inte sända in jobb. Jobbeskrivning är inte giltig XMLKunde inte sända in jobb. Jobbeskrivning inte giltig i %s-formatet: %sKunde inte sända in jobb. Misslyckades med att delegera referenser.Kunde inte överföra frÃ¥n jobs till jobs_new i databas (%s)Kunde inte trunkera jobbdatabas (%s)Kunde inte skriva 'output'-fil: %sKunde inte skriva grami-fil: %sKunde inte skriva nyckel/värde-par till jobbdatabas (%s): Nyckel "%s"Kunde inte skriva poster till jobbdatabas (%s): Id "%s"Kunde inte skriva till p12-filOauktoriseradOauktoriserad av fjärr-pdp-tjänstOdefinierad kontrollsekvens: %%%sOväntad RSL-typOväntat argument för 'all'-regel - %sOväntade argumentOväntat argument tillhandahÃ¥lletOväntad delegeringsplats frÃ¥n delegeringstjänst - %s.Oväntat omedelbart slutförande: %sOväntat namn returnerat i Rucio-svar: %sOväntad sökväg %s returnerad frÃ¥n serverOväntad svarskod frÃ¥n delegeringstjänst - %uOväntad svarskod frÃ¥n delegeringstjänst: %u, %s.Uniq lägger till tjänst som kommer frÃ¥n %sUniq ignorerar tjänst som kommer frÃ¥n %sUniq byter ut tjänst som kommer frÃ¥n %s mot tjänst som kommer frÃ¥n %sOkänd ACL-policy %s för jobb %sOkänt LDAP-scope %s - använder baseOkänt XRSL-attribut: %s - ignorerar det.Okänt attribut %s i common-sektionen i inställningsfilen (%s), ignorerar detOkänt authorization-kommando %sOkänd kanal %s för stdio-protokollOkänd konverterings-mode %s, använder förvalOkänd referenstyp %s för URL-mönster %sOkänt element i Globus signeringspolicyOkänd post i EGIIS (%s)Okänt felOkänd nyckel- eller hashtypOkänd nyckel- eller hashtyp för utfärdareOkänd logg-nivÃ¥ %sOkänd öppningsmode %iOkänd open-mode %sOkänt alternativOkänt alternativ %sOkända rättigheter i Globus signeringspolicy - %sOkänd sektion %s, ignorerar denOkänt överföringsalternativ: %sOkänd användarnamnsmappningsregel %sAvregistrerar %sAvregistrering frÃ¥n indextjänst misslyckadesAngiven URL stöds inteAngiven URL stöds inte: %sKommandot stöds inte: %sInställningskommando stöds inte: %sDestinations-URL stöds inte: %sInformationsänpunktstyp stöds inte: %sJobblisttyp '%s' stöds inte, använder 'BDB'. Typer som stöds är: BDB, SQLITE, XML.Mappningspolicyhandling stöds inte: %sMappningspolicyalternativ stöds inte: %sProtokoll i url stöds inte %sDet begärda proxypolicysprÃ¥ken stöds inte - %sDen begärda proxyversionen stöds inte - %sKäll-URL stöds inte: %sInsändningsändpunktstyp stöds inte: %sInsändningsgränssnitt som inte stöds %s. Det ser ut som om arc-blahp-logger mÃ¥ste uppdateras. Sänd in buggen till bugzilla.Värde för allownew stöds inte: %sIcke betrott självsignerat certifikat i kedja med subjekt %s och hash: %luUpdateCredentials misslyckadesUpdateCredentials: EPR innehÃ¥ller inget jobb-idUpdateCredentials: misslyckades att uppdatera referenserUpdateCredentials: saknar ReferenceUpdateCredentials: hittade inga jobb: %sUpdateCredentials: begäran = %sUpdateCredentials: svar = %sUpdateCredentials: fel antal ReferenceUpdateCredentials: fel antal element inuti ReferenceAnvändning:Användning: kopiera källa destinationAnvänd alternativet --help för detaljerad användningsinformationAnvänd -? för att fÃ¥ användningsbeskrivningAnvänd inställningsfil %sAnvända slottar: %iAnvändarinställningsfil (%s) innehÃ¥ller fel.Användarinställningsfil (%s) existerar inte eller kunde inte laddas in.Användare för hjälpprogram saknasAnvändare har tomt virtuellt katalogträd. Endera har användaren inga auktoriserade pluginer eller det finns överhuvudtaget inga inställda pluginer.Användare har inga associerade lämpliga inställningarAnvändargränssnitt-fel: %sDirekt användarnamnsmappning saknar användarnamn: %s.Användarnamnsmappningskommando är tomtAnvändarnamnsmappning har tom auktoriseringsgrupp. %sAnvändarnamnsmappning har tomt kommandoAnvändarnamnsmappning har tomt namn: %sAnvändarnamn ska angesAnvändarpool pÃ¥ %s kan inte öppnas.Användarpool pÃ¥ %s misslyckades med att utföra användarmappning.Användarpoolmappning saknar användarsubjekt.Användarsubjektmatchning saknar användarsubjekt.Användarsubjekt: %sUserConfig-klass är inte ett objektAnvändarinställningar sparade till fil (%s)Användarnamnstokenhanteraren har ej ställts inAnvänder A-REX-inställningsfil: %sAnvänder CA-certifikatkatalog: %sAnvänder DH-parametrar frÃ¥n fil: %sAnvänder OTokenAnvänder Rucio-konto %sAnvänder buffrad överföringsmetodAnvänder cache %sAnvänder cachat lokalt konto '%s'Använder certifikat %sAnvänder certifikatfil: %sAnvänder chifferlista: %sAnvänder chiffer: %sAnvänder inställningar pÃ¥ %sAnvänder kontrollkatalog %sAnvänder kurva med NID: %uAnvänder osäker dataöverföringAnvänder intern överföringsmetod %sAnvänder nyckel %sAnvänder nyckelfil: %sAnvänder lokalt konto '%s'Använder nästa %s-replikaAnvänder protokollalternativ: 0x%xAnvänder proxy %sAnvänder proxyfil: %sAnvänder säker dataöverföringAnvänder sessionskatalog %sAnvänder sessionskatalog %sAnvänder spacetoken: %sAnvänder spacetokenbeskrivning: %sVO %s matchar inte %sVOMS-AC-attribut är en taggVOMS-AC-attribut är FQANVOMS attr %s matchar inte %sVOMS attr %s matchar %sVOMS-attribut ignoreras pÃ¥ grund av processerings-/valideringsfelTolkning av VOMS-attribut misslyckadesVOMS-attributvalidering misslyckadesVOMS-kommando är tomVOMS-rad innehÃ¥ller fel antal token (%u förväntade): "%s"VOMS-proxyprocessering returner: %i - %sVOMS-förtroendekedjor: %sVOMS: giltighetstiden för AC har gÃ¥tt utVOMS: AC är inte komplett - saknar Serial- eller Issuer-informationVOMS: AC är inte giltig änVOMS: AC-signaturverifiering misslyckadesVOMS: CA-katalog för CA-fil mÃ¥ste tillhandahÃ¥llasVOMS: Kan inte allokera minne för att tolka ACVOMS: Kan inte allokera minne för att att lagra ordningen för ACVOMS: kan inte hitta AC_ATTR med IETFATTR-typVOMS: Kan inte tolka ACVOMS: kan inte hitta AC-utfärdarens certifikat för VO %sVOMS: innehavarans DN i AC: %sVOMS: innehavarans DN: %sVOMS: utfärdarens DN: %sVOMS: FQDN för denna värd %s matchar inte nÃ¥got target i ACVOMS: lsc-filen %s kan inte öppnasVOMS: lsc-filen %s existerar inteVOMS: authorityKey är felaktigVOMS: bÃ¥de idcenoRevAvail och authorityKeyIdentifier certifikattilläggen mÃ¥ste vara närvarandeVOMS: kan inte verifiera ACs signaturVOMS: kunde inte validera AC-utfärdare för VO %sVOMS: mer än ett IETFATTR-attribut stöds inteVOMS: mer än en policyAuthority stöds inteVOMS: skapa FQAN: %sVOMS: skapa attribut: %sVOMS: katalog för betrodda tjänstecertifikat: %sVOMS: misslyckades med att tolka attribut frÃ¥n ACVOMS: misslyckades med att verifiera AC-signaturVOMS: saknade AC-delarVOMS: problem vid tolkning ac information i ACVOMS: DN i certifikat: %s matchar inte det i betrodda DN-listan: %sVOMS: utfärdaridentiteten i certifikat: %s matchar inte den i betrodda DN-listan: %sVOMS: attributnamnet är tomtVOMS: attributkvalifieraren är tomVOMS: attributvärdet för %s är tomtVOMS: formatet för IETFATTRVAL stöds inte - förväntar OCTET STRINGVOMS: formatet för policyAuthority stöd inte - förväntar URIVOMS: grantor-attributet är tomtVOMS: innehavarinformationen is AC är felaktigVOMS: innehavarens utfärdarnamn är inte detsamma som det i ACVOMS: innehavarens utfärdar-UID är inte detsamma som det i ACVOMS: innehavarens namn i AC är inte relaterat till DN i innehavarens certifikatVOMS: innehavarens serienummer %lx är inte detsamma som serienumret i AC %lx, innehavarens certifikat som används för att skapa en vomsproxy kan vara ett proxycertifikat med ett annat serienummer än det ursprungliga EEC-certifikatetVOMS: innehavarens serienummer är: %lxVOMS: utfärdarinformationen i AC är felaktigVOMS: utfärdarnamnet %s är inte det samma som det i AC - %sVOMS: det enda kritiska tillägget till AC som stöds är idceTargetsVOMS: serienumret i AC är: %lxVOMS: serienumret i AC INFO är för lÃ¥ngt - förväntade inte mer än 20 bytesVOMS: det finns inga villkor pÃ¥ betrodda voms-DN, certifikatstacken i AC kommer inte att kontrolleras.VOMS: tillitskedja att kontrollera: %s VOMS: Kunde inte bestämma värdnamn i AC frÃ¥n VO-namn: %sVOMS: kunde inte extrahera VO-namn frÃ¥n ACVOMS: kunde inte matcha certifikatkedja mot VOMS betrodda DNVOMS: kunde inte verifiera certifikatkedjaVOMS: tidsformat i AC som inte stöds - förväntade GENERALIZED TIMEHittade giltig jobbeskrivningGiltig i: %sGiltig i: Proxyns giltighetstid har gÃ¥tt utGiltig i: Proxyn är ej giltigGiltigt till: %sVärdet av %s.%s är "%s"Värdet pÃ¥ 'count'-attributet mÃ¥ste vara ett heltalVärdet pÃ¥ 'countpernode'-attributet mÃ¥ste vara ett heltalVärdet pÃ¥ 'exclusiveexecution'-attributet mÃ¥ste vara endera 'yes' eller 'no'Värdet av attributet '%s' förväntades inte vara tomtVärdet pÃ¥ attributet '%s' förväntas vara en strängVärdet pÃ¥ attributet '%s' förväntas vara ett enstaka värdeVärdet pÃ¥ attributet '%s' har fel sekvenslängd: förväntad %d, hittad %dVärdet pÃ¥ attributet '%s' är inte en strängVärdet pÃ¥ attributet '%s' är inte en sekvensVariabelnamn (%s) innehÃ¥ller ogiltigt tecken (%s)Variabelnamn förväntadesVersion i Listen-element kan ej kännas igenVarning: Sluttiden du angivit: %s är före nuvarande tidpunkt: %sVarning: Starttiden du angivit: %s är före nuvarande tidpunkt: %sVäntar 1 minutVäntan slutarVäntar pÃ¥ bufferVäntar pÃ¥ att globus-handtag ska lugna ned sigVäntar pÃ¥ lÃ¥s pÃ¥ fil %sVäntat pÃ¥ lÃ¥s pÃ¥ jobblistfil %sVäntar pÃ¥ att huvud-jobbprocesseringstrÃ¥den avslutasVäntar pÃ¥ svarAntal väntade jobb: %iVaknar uppVarning: Misslyckades med att lista filer men viss information har erhÃ¥llitsVarning: Misslyckades med att ta bort jobb frÃ¥n fil (%s)Varning: Misslyckades med att skriva jobbinformation till fil (%s)Varning: Misslyckades med att skriva lokal jobblista till fil (%s), jobblista har raderatsVarning: Jobb finns inte i jobblista: %sVarning: NÃ¥gra jobb togs inte bort frÃ¥n servernVarning: Kunde inte skapa jobblistfil (%s), jobblista har raderatsVarning: Kan inte öppna jobblistfil (%s), okänt formatVarning: Kunde inte läsa lokal jobblista frÃ¥n fil (%s)Varning: Kunde inte trunkera lokal jobblista i fil (%s)Varning: Använder SRM-protokoll v1 som inte stöder spacetokenvarning: misslyckades med att skapa monteringspunkt %s.Förväntade %s i början av "%s"Vakthund (Ã¥ter)startar programmetVakthund upptäckte att programmet avslutadesVakthund upptäckte att programmet avslutades pÃ¥ grund av signal %uVakthund upptäckte att programmet avslutades med kod %uVakthund upptäckte program-timeout eller -fel - avbryter processVakthund avslutades eftersom programmet avbröts avsiktligt eller avslutade sig självtVakthund misslyckades med att avbryta programmet - ger upp och avslutarVakthund misslyckades med att vänta pÃ¥ programmets avslutande - sänder KILLVakthunds-fork misslyckades: %sVakthund startar monitoreringVi stöder endast CA i Globus signeringspolicy - %s stöds inteVi stöder endast X509-CA i Globus signeringspolicy - %s stöds inteVi stöder endast globusvillkor i Globus signeringspolicy - %s stöds inteVi stöder endast subjektvillkor i Globus signeringspolicy - %s stöds inteNär 'countpernode'-attributet anges, mÃ¥ste 'count'-attributet ocksÃ¥ angesKommer att %s i destinationsindextjänstenKommer att beräkna %s-checksummaKommer att rensa upp förregistrerad destinationKommer att ladda ned till cachefil %sKommer ej att mappa 'root'-konto som förvalKommer att processera cacheKommer att frigöra cachelÃ¥sKommer att ta bort %s pÃ¥ tjänsten %s.Kommer att försöka igen utan cachning<Kommer att använda massbegäranKommer att vänta 10 sKommer att att vänta omkring %i sRaderar och Ã¥terskapar hela lagretArbetsutrymme fri storlek: %i GBArbetsutrymme delas inte mellan jobbArbetsutrymme delas mellan jobbArbetsutrymme livstid: %sArbetsutrymme total storlek: %i GBSkriver informationen till BLAH-tolk-loggen: %sFelaktig bufferstorlekFelaktigt defaultbuffer-nummer i inställningarFel katalog i %sFelaktigt format för "FreeSlotsWithDuration" = "%s" ("%s")Felaktigt sprÃ¥k begärt: %sFelaktigt maxbuffer-nummer in inställningarFelaktigt maxconnections-nummer i inställningarFelaktig maximal bufferstorlekFelaktigt nummer i defaultttl-kommandoFelaktigt nummer i maxjobdescFelaktigt nummer i maxjobdesc-kommandoFelaktigt nummer i maxjobs: %sFelaktigt nummer i maxrerun-kommandoFelaktigt nummer i urdelivery_frequency: %sFelaktigt nummer i wakeupperiod: %sFel antal argument angivnaFel antal argument!Fel antal förbindelserFel antal objekt (%i) för stat frÃ¥n ftp: %sFel antal parametrar angivnaFelaktigt alternativ i %sFelaktigt alternativ i delegationdbFelaktigt alternativ i fixdirectoriesFelaktigt alternativ i logreopenFel ägare för certifikatfil: %sFel ägare för nyckelfil: %sFel ägare för proxyfil: %sFel Ã¥tkomsträttigheter för certifikatfil: %sFel Ã¥tkomsträttigheter för nyckelfil: %sFel Ã¥tkomsträttigheter för proxyfil: %sFelaktigt portnummerFelaktigt portnummer i inställningarHittade fel tjänste-post-fält "%s" i "%s"Skrev begäran till en filSkrev det signerade EEC-certifikatet till en filSkrev det signerade proxycertifikatet till en filX509-tokenhanteraren har ej ställts inXACML-auktoriseringsbegäran: %sXACML-auktoriseringssvar: %sXACML-begäran: %sXML-inställningsfil %s existerar inteXML-svar: %sDu är pÃ¥ väg att ta bort jobb frÃ¥n jobblistan för vilka ingen information kunde hittas. Notera att nyligen insända jobb kan saknas i informationssystemet och att denna handling kommer att ta bort ocksÃ¥ sÃ¥dana jobb.Du kan försöka att öka debugnivÃ¥n för att fÃ¥ mer information.Din identitet: %sDin utfärdares certifikat är inte installeratDin proxy är giltig till: %sNoll bytes skrivna till fil[ADLParser] %s-element mÃ¥ste vara boolesk.[ADLParser] AccessControl är inte giltig XML.[ADLParser] Benchmark stöds inte än.[ADLParser] Kod i FailIfExitCodeNotEqualTo i %s är inte ett giltigt nummer.[ADLParser] CreationFlag-värde %s stöds inte.[ADLParser] CredentialService mÃ¥ste innehÃ¥lle en giltig URL.[ADLParser] Saknat Name-element eller -värde i ParallelEnvironment/Option-element.[ADLParser] Saknat eller tomt Name i InputFile.[ADLParser] Saknat eller tomt Name i OutputFile.[ADLParser] Saknat eller felaktigt värde i DiskSpaceRequirement.[ADLParser] Saknat eller felaktigt värde i IndividualCPUTime.[ADLParser] Saknat eller felaktigt värde i IndividualPhysicalMemory.[ADLParser] Saknat eller felaktigt värde i IndividualVirtualMemory.[ADLParser] Saknat eller felaktigt värde i NumberOfSlots.[ADLParser] Saknat eller felaktigt värde i ProcessesPerSlot.[ADLParser] Saknat eller felaktigt värde i SlotsPerHost.[ADLParser] Felaktigt eller saknat värde i ThreadsPerProcess.[ADLParser] Saknat eller felaktigt värde i TotalCPUTime.[ADLParser] Saknat eller felaktigt värde i WallTime.[ADLParser] NetworkInfo stöds inte än.[ADLParser] NodeAccess-värde %s stöds inte än.[ADLParser] Endast email-protokoll för avisering stöds än.[ADLParser] Utelämnande av %s-element stöds inte än.[ADLParser] Rot-element är inte ActivityDescription [ADLParser] NumberOfSlots-elementet ska anges när värdet pÃ¥ useNumberOfSlots-attributet i SlotsPerHost-element är "true".[ADLParser] EMI-ES-tillstÃ¥nd stöds inte %s.[ADLParser] URL %s för RemoteLogging stöds inte.[ADLParser] Internt tillstÃ¥nd stöds inte %s.[ADLParser] Felaktig URI angiven i Source - %s.[ADLParser] Felaktig URI angiven i target - %s.[ADLParser] Felaktig tid %s i ExpirationTime.[ADLParser] prioritet är för stor - använder maxvärdet 100[filnamn ...][jobb ...][jobbeskrivning ...][jobbeskrivningsinput][resurs ...]en fil som innehÃ¥ller en lista med jobb-idabort_callback: Globusfel: %sabort_callback: starthandling(%s) != begäranactive_data är avstängtadd_word misslyckadesadvertisedvo-parametern är tomalla för uppmärksamhetalla jobbarc.confFelaktig katalog för pluginer: %smäklarebuffer: fel : %s, lÃ¥s: %s, skriv: %sbuffer: läs EOF : %sbuffer: skriv EOF: %scachefil: %savbrötsberäkningsresursceID-prefix är satt till %skontrollera objektets läsbarhet, visar ingen information om objektetcheck_abort: har Globusfelcheck_abort: sänder 426check_ftp: misslyckades med att erhÃ¥lla filens ändringstidcheck_ftp: misslyckades med att erhÃ¥lla filens storlekcheck_ftp: globus_ftp_client_get misslyckadescheck_ftp: globus_ftp_client_modification_time misslyckadescheck_ftp: globus_ftp_client_register_readcheck_ftp: globus_ftp_client_size misslyckadescheck_ftp: erhÃ¥llen ändringstid: %scheck_ftp: erhÃ¥llen storlek: %llicheck_ftp: timeout vid väntan pÃ¥ ändringstidcheck_ftp: timeout vid väntan pÃ¥ partiell getcheck_ftp: timeout vid väntan pÃ¥ storlekklassnamn: %shittade klient-xrslhittade inte klient-xrslstängning misslyckades: %sstängning av fil %s misslyckades: %skommando till myproxy-server. Kommandot kan vara PUT, GET, INFO, NEWPASS eller DESTROY. PUT -- lägg upp en delegerad referens pÃ¥ myproxy-servern; GET -- hämta en delegerad referens frÃ¥n myproxy-servern; INFO -- hämta och presentera information om referenser lagrade pÃ¥ myproxy-servern; NEWPASS -- ändra lösenord som skyddar referenser lagrade pÃ¥ myproxy-servern; DESTROY -- ta bort referenser lagrade pÃ¥ myproxy-servern; Lokala referenser (certifikat och nyckel) är inte nödvändiga utom vid PUT. Myproxy-funktionalitet kan användas tillsammans med VOMS-funktionalitet. --voms och --vomses kan användas med GET-kommandot om VOMS-attribut mÃ¥ste inkluderas i proxyn. beräkninginställningsfil (förval ~/.arc/client.conf)hittade inte inställningsfilkonvertera frÃ¥n angivet databasformat [bdb|sqlite]konvertera till angivet databasformat [bdb|sqlite]kunde inte hitta slutet pÃ¥ klient-xrslkunde inte hitta början pÃ¥ klient-xrsld2i_X509_REQ_bio misslyckadesdata-chunk: %llu %lludata_connect_retrieve_callbackdata_connect_retrieve_callback: allocate_data_bufferdata_connect_retrieve_callback: allocate_data_buffer misslyckadesdata_connect_retrieve_callback: kontrollera buffer %udata_connect_store_callbackdata_retrieve_callbackdata_retrieve_callback: förlorad bufferdata_store_callback: förlorad bufferdatabasformatdebugnivÃ¥definiera det begärda formatet (nordugrid:xrsl, emies:adl)delete_ftp: globus_ftp_client_delete misslyckadesdelete_ftp: globus_ftp_client_rmdir misslyckadesdelete_ftp: timeout vid väntan pÃ¥ borttagandedestinationsdestination.next_locationkatalogkatalogkatalognamnvisa all tillgänglig metadatavisa mer information om varje jobbdnfrÃ¥ga inte efter bekräftelsesamla inte in information, konvertera endast lagringsformatskriv inte ut jobblistaskriv inte ut antal jobb i varje tillstÃ¥ndsänd inte in till samma resurs igensänd inte in - skriv ut jobbeskrivning i ett sprÃ¥k som accepteras av targetgör inte överföringen, men registrera källan i destinationen. destinationen mÃ¥ste vara en meta-urlförsök inte tvinga fram passiv överföringfrÃ¥ga inte efter lösenord när en referens hämtas frÃ¥n en myproxy-server. En förutsättning för detta val är att referensen har satts pÃ¥ myproxy-server utan lösenord genom att använda alternativ -R (--retrievable_by_cert). Detta alternativ är specifikt för GET-kommandot när myproxy-servern kontaktas.ladda nernedladdningskatalog (jobbkatalogen kommer att skapas i denna katalog)nedladdningartappasecho: Oauktoriseradtom indatanyttolasttomt nästa kedjeelementsträngens slut pÃ¥träffades medan typ för subjekt-namn-element #%d processeradesfel vid konvertering av nummer frÃ¥n bin till BIGNUMfel vid konvertering av serienummer till ASN.1-formatescape-tecken vid strängens slutavslutMisslyckades med att identifiera pluginsökvägMisslyckades med att processera klientidentifikationmisslyckades med att läsa data-chunkmisslyckades med att läsa data-taggMisslyckades under processering av inställningskommando: %s %sfilfil %s kan inte kommas Ã¥tfilnamnfilnamn för lÃ¥ngtskapande av filnod misslyckades: %sfilnamnsökvägavslutadetvÃ¥ngsnedladdning (skriv över existerande jobbkatalog)tvÃ¥ngsmigrering, ignorera om avbrytandet av jobbet misslyckasforcedefaultvoms-parametern är tomfork misslyckadesformatfrÃ¥n följande ändpunkter:fsync för fil %s misslyckades: %sftp_check_callbackftp_complete_callback: fel: %sftp_complete_callback: OKftp_get_complete_callback: Misslyckades med att hämta ftp-filftp_get_complete_callback: OKftp_put_complete_callback: OKftp_read_callback: Globusfel: %sftp_read_callback: fördröjd data-chunk: %llu %lluftp_read_callback: misslyckande: %sftp_read_callback: OKftp_read_callback: lyckades - offset=%u, längd=%u, eof=%u, tillÃ¥t oof=%uftp_read_callback: för mÃ¥nga oväntade chunks i oordningftp_read_callback: oväntad data i oordning: %llu != %lluftp_read_thread: Globusfel: %sftp_read_thread: data-callback misslyckades - avbryter: %sftp_read_thread: avslutarftp_read_thread: misslyckades med att registrera globusbuffer - kommer att prova senare: %sftp_read_thread: misslyckades med att registrera buffrarftp_read_thread: misslyckades med att frigöra buffrarftp_read_thread: misslyckades med att frigöra buffrar - läckerftp_read_thread: for_read misslyckades - avbryter: %sftp_read_thread: erhÃ¥ll och registrera buffrarftp_read_thread: för mÃ¥nga registreringsfel - avbryter: %sftp_read_thread: väntar pÃ¥ att buffrar ska frigörasftp_read_thread: väntar pÃ¥ filslutftp_write_callback: misslyckande: %sftp_write_callback: OK %sftp_write_thread: Globusfel: %sftp_write_thread: data-callback misslyckades - avbryterftp_write_thread: data i oordning i strömningsläge: %llu != %lluftp_write_thread: avslutarftp_write_thread: misslyckades med att frigöra buffrar - läckerftp_write_thread: for_write misslyckades - avbryterftp_write_thread: erhÃ¥ll och registrera buffrarftp_write_thread: för mÃ¥nga chunks i oordning i strömningslägeftp_read_thread: väntar pÃ¥ att buffrar ska frigörasftp_write_thread: väntar pÃ¥ filslutftp_write_thread: väntar pÃ¥ slutförd överföringgfal_close misslyckades: %sgfal_closedir misslyckades: %sgfal_listxattr misslyckades, ingen replika-information kan erhÃ¥llas: %sgfal_mkdir misslyckades (%s), försöker skriva ändÃ¥gfal_mkdir misslyckades: %sgfal_open misslyckades: %sgfal_opendir misslyckades: %sgfal_read misslyckades; %sgfal_rename misslyckades: %sgfal_rmdir misslyckades: %sgfal_stat misslyckades: %sgfal_unlink misslyckades: %sgfal_write misslyckades: %sglobalid är satt till %sglobus_ftp_client_operationattr_set_authorization: fel: %sgm-delegations-converter ändrar format pÃ¥ delegeringsdatabasen.gm-jobs visar information om nuvarande jobb i systemet.gm-kick väcker den A-REX som motsvarar den angivna kontrollfilen. Om ingen fil anges används kontrollkatalogen som hittas i inställningsfilen.gmetric_bin_path tom i arc.conf (ska aldrig hända, det förvalda värdet ska användas)grupp<:roll>. Ange attributens ordning Exempel: --order /knowarc.eu/coredev:Developer,/knowarc.eu/testers:Tester eller: --order /knowarc.eu/coredev:Developer --order /knowarc.eu/testers:Tester Notera att det saknar mening att ange ordningen om du har tvÃ¥ eller fler olika VOMS-servrar angivnaheadnode är satt till %svärdnamn[:port] för myproxy-servertimmetimmaridom destinationen är en indexeringstjänst och inte densamma som källan och destinationen redan är registrerad, sÃ¥ skapas kopian normalt inte. Men, om detta alternativ anges antas källan vara en kopia av destinationen som skapats pÃ¥ ett okontrollerat sätt och kopian skapas pÃ¥ samma sätt som vid replikering. Användning av detta alternativ hoppar ocksÃ¥ över validering av slutförda överföringar.olämpligt attribut för allowactvedata-kommando: %solämpligt attribut för allowencryption-kommando: %sinkommande meddelande är inte SOAPindexeringinformera om ändringar i enstaka jobb (kan användas mer än en gÃ¥ng)init_handle: globus_ftp_client_handle_init misslyckadesinit_handle: globus_ftp_client_handleattr_init misslyckadesinit_handle: globus_ftp_client_handleattr_set_gridftp2 misslyckadesinit_handle: globus_ftp_client_operationattr_init misslyckadesinit_handle: globus_ftp_client_operationattr_set_allow_ipv6 misslyckadesinit_handle: globus_ftp_client_operationattr_set_delayed_pasv misslyckadesinmsg.Attributes().getAll() = %s inmsg.Auth().Export(arc.SecAttr.ARCAuth) = %sindata definierar ej operationindata är inte SOAPinputcheck kontrollerar att indatafiler som angivits i jobbeskrivningen är tillgängliga och Ã¥tkomliga när referenserna i den givna proxyfilen används.i stället för status skriv endast de utvalda jobbens IDheltalgränssnittgränssnitt är satt till %sgränssnittsnamnogiltigt jobb-id: %sjobb %s (kommer att bli) bortstädat framgÃ¥ngsriktjobb %s avbröts framgÃ¥ngsriktjobb %s startades om framgÃ¥ngsriktjobb-idjobbeskrivningsfil [proxyfil]jobbeskrivningsfil som beskriver jobbet som ska sändas injobbeskrivningssträng som beskriver jobbet som ska sändas inbehÃ¥ll filerna pÃ¥ servern (ta inte bort)nivÃ¥listpost: %slista de tillgängliga pluginernalista tillgängliga pluginer (protokoll som stöds)list_files_ftp: checksumma %slist_files_ftp: misslyckades med att erhÃ¥lla filens ändringstidlist_files_ftp: misslyckades med att erhÃ¥lla filens storleklist_files_ftp: globus_ftp_client_cksm misslyckadeslist_files_ftp: globus_ftp_client_modification_time misslyckadeslist_files_ftp: globus_ftp_client_size misslyckadeslist_files_ftp: söker efter checksumma pÃ¥ %slist_files_ftp: söker efter ändringstid för %slist_files_ftp: söker efter storlek pÃ¥ %slist_files_ftp: information om checksumma inte möjliglist_files_ftp: information om checksumma returnerades intelist_files_ftp: information om checksumma stöds intelist_files_ftp: timeout vid väntan pÃ¥ cksumlist_files_ftp: timeout vid väntan pÃ¥ ändringstidlist_files_ftp: timeout vid väntan pÃ¥ storlekhämta serienummer frÃ¥n %s misslyckadeslocal_pasv misslyckadeslocal_port misslyckadeslocal_spas misslyckadeslocalid är satt till %slÃ¥ngt format (mer information)lrms är tommail-parametern är tomskapa föräldrakataloger efter behovmake_abort: lämnarmake_abort: startmake_abort: vänta pÃ¥ att abort-flaggan Ã¥terställsMinnesallokeringsfelmetafil %s är tomminutminutermkdir misslyckades: %smkdir_ftp: skapar %smkdir_ftp: timeout vid väntan pÃ¥ mkdirmodulnamn: %snnamnny nyttplast %snästa kedjeelement anropatnästa element i kedjan returnerade tom nyttolastnästa element i kedjan returnerade felstatusnästa element i kedjan returnerade felstatus: %snästa element i kedjan returnerade ogiltig nyttolastnästa element i kedjan returnerade ogiltig/ej stödd nyttolastnästa element i kedjan returnerade ingen nyttolastnästa element i kedjan returnerade ogiltig nyttolast - passerar igenomnummerantal försök innan överföring misslyckasgammal_url ny_urlhämta endast information om exekverings-target som stöder detta jobbinsändningsgränssnitt. TillÃ¥tna värden är org.nordugrid.gridftpjob eller org.nordugrid.gridftp, org.ogf.glue.emies.activitycreation och org.nordugrid.internalvälj endast jobb som sändes in till denna resursvälj endast jobb vars status är statusstranvänd endast detta gränssnitt för insändning. TillÃ¥tna värden är: org.nordugrid.gridftpjob eller org.nordugrid.gridftp, org.ogf.glue.emies.activitycreation och org.nordugrid.internalopen: ändrar ägare för %s, %i, %iopen: ägare: %i %iarbeta rekursivtarbeta rekursivt upp till den angivna nivÃ¥nordningutnyttolast %sutdata är inte SOAPskriv ut begärda element (jobblista, delegerings-id och token) till filägarsubjekt är satt till %sp12-fil är tomlösenordsdestination=lösenordskällasökvägsökväg till lokalt cache (använd för att lägga in fil i cache)sökväg till VOMS-server-inställningsfilensökväg till certifikatfilen, kan vara i endera PEM-, DER- eller PKCS12-formatsökväg till privata-nyckel-filen, om certifikatet är i PKCS12-format behöver inte den privata nyckeln angessökväg till proxyfilensökväg till huvudkatalogen för VOMS *.lsc-filer, behövs endast för VOMS-klient-funktionalitetsökväg till katalogen med betrodda certifikat, behövs endast för VOMS-klient-funktionalitetutför tredjepartsöverföring, där destinationen läser frÃ¥n källan (endast tillgänglig med GFAL-plugin)fysisk plats att skriva till när destinationen är en indexeringstjänst. MÃ¥ste anges för indexeringstjänster som inte genererar fysiska platser automatiskt. Kan anges flera gÃ¥nger - platser kommer att provas i angiven ordning tills en lyckas.pkey och rsa_key existerar!plugin för överföringsprotokoll %s är inte installeradplugin: kontrollerar katalog: %splugin: kontrollerar katalog: Ã¥tkomst: %splugin: kontrollerar katalog: Ã¥tkomst: tillÃ¥ten: %splugin: stängdplugin: öppnad: %splugin: läserplugin: skriverskriv ut en lista med tjänster konfigurerade i client.confskriv ut all information om denna proxy.skriv ut delegeringstoken med angiv(et/na) IDskriv ut lista med tillgängliga delegerings-IDskriv ut huvuddelegeringstoken för angiv(et/na) jobb-idskriv ut utvald information om denna proxy.skriv ut tjänstens tillstÃ¥ndskriv ut sammanfattning av jobb i varje överföringsandelskriv ut versionsinformationskriver ut information om installerade användar- och CA-certifikatprioritet är för stor - använder maxvärde 100process: DELETEprocess: GETprocess: HEADprocess: POSTprocess: PUTprocess: handling %s stöds inte för subsökväg %sprocess: ändpunkt: %sprocess: factoryändpunktprocess: id: %sprocess: metod %s stöds inteprocess: metod %s stöds inte för subsökväg %sprocess: metod är inte definieradprocess: metod: %sprocess: operation: %sprocess: begäran=%sprocess: svar=%sprocess: schema %s stöds inte för subsökväg %sprocess: subop: %sprocess: subsökväg: %sproxybegränsningarställas i kökönamn är satt till %släs information frÃ¥n angiven kontrollkatalogread_thread: dataläsningsfel frÃ¥n extern process - avbryter: %sread_thread: avslutarread_thread: for_read misslyckades - avbryter: %sread_thread: erhÃ¥ll och registrera buffrarread_thread: non-data-tagg '%c' frÃ¥n extern process - lämnar: %sregistreraregisterregistertjänst-URL med frivilligt angivande av protokollta bort logiska filnamnsregistreringen även om inte alla fysiska kopior tagits bortta bort proxyta bort jobbet frÃ¥n den lokala jobblistan även om jobbet inte hittas i informationssystemetbegär att avbryta jobb med angiv(et/na) IDbegär att avbryta jobb som ägs av användare med angiv(et/na) subjektnamnbegär att ta bort jobb med angiv(et/na) IDbegär att ta bort jobb som ägs av användare med angiv(et/na) subjektnamnkräv informationsförfrÃ¥gan med den angivna informationsändpunkstypen. Särskilda värdet 'NONE' stänger av alla resursinformationsförfrÃ¥gningar och den efterföljande resursmatchningen. TillÃ¥tna värden är: ldap.nordugrid, ldap.glue2, emies, arcrest och internal.kräv den angivna ändpunktstypen för jobbinsändning. TillÃ¥tna värden är: arcrest, emies, gridftp eller gridftpjob och internal.svar: %ssänd in till samma resurs igenomvänd sortering av jobb efter jobb-id, insändningstid eller jobbnamnspara serienummer till %s misslyckadessekundsekundersekundervälj resursmatchningsmetod (lista tillgängliga mäklare med --listplugins)välj en eller flera beräkningsresurser: namn kan vara ett alias för en enskild beräkningsresurs, en grupp av beräkningsresurser eller en URLvälj ett eller flera register: namn kan vara ett alias för ett enskilt register, en grupp av register eller en URLatt välja en beräkningsresurs för de nya jobben med en URL eller ett alias, eller att välja en grupp av beräkningselement med gruppen namnSätter fil %s till storlek %lluvisa URLer till filens registrerade kopiorvisa jobb för vilka statusinformation inte är tillgängligvisa endast beskrivning av begärt objekt, lista inte innehÃ¥ll i katalogervisa endast jobb som ägs av användare med angiv(et/na) subjektnamnvisa endast jobb med angiv(et/na) ID<visa fortskridandeindikatorvisa statusinformation i JSON-formatvisa jobbets beräkningsresurs-felloggvisa den ursprungliga jobbeskrivningenvisa den angivna filen frÃ¥n jobbets sessionskatalogvisa jobbets stderrvisa jobbets stdout (förval)avstängninghoppa över jobb som är pÃ¥ en beräkningsresurs med en given URLhoppa över tjänst med den angivna URLen under tjänstesökningsortera jobb efter jobb-id, insändningstid eller jobbnamnkällkälla destinationsource.next_locationange beräkningsresurs-värdnamn eller en fullständig ändpunkts-URLstart_readingstart_reading: start av hjälpprocess misslyckadesstart_reading: skapande av trÃ¥d misslyckadesstart_reading_ftpstart_reading_ftp: globus_ftp_client_getstart_reading_ftp: globus_ftp_client_get misslyckadesstart_reading_ftp: globus_thread_create misslyckadesstart_writing_ftp: data-chunk: %llu %llustart_writing_ftp: fördröjd data-chunk: %llu %llustart_writing_ftp: misslyckades med att läsa data-chunkstart_writing_ftp: misslyckades med att läsa data-taggstart_writing_ftp: globus_thread_create misslyckadesstart_writing_ftp: start av hjälpprocess misslyckadesstart_writing_ftp: mkdirstart_writing_ftp: mkdir misslyckades - försöker fortfarande skrivastart_writing_ftp: putstart_writing_ftp: put misslyckadesstart_writing_ftp: skapande av trÃ¥d misslyckadesstart_writing_ftp: väntar pÃ¥ data-chunkstart_writing_ftp: väntar pÃ¥ data-taggstart_writing_ftp: väntar pÃ¥ nÃ¥gra buffrar som skickatsstatusstatusstrstop_reading: avslutar: %sstop_reading: väntar pÃ¥ att överföring ska avslutasstop_reading_ftp: avbryter förbindelsestop_reading_ftp: avslutar: %sstop_reading_ftp: väntar pÃ¥ att överföring ska avslutassträngdirekt insändning - ingen resurssökning eller resursmatchningsänd in jobb som dryrun (ingen insändning till batchsystemet)sänd in test-jobb givet av numrettest-jobbets körtid anges av numretde insända jobbens ID kommer att läggas till i denna filberäkningsresursen angiven med URL pÃ¥ kommandoraden ska frÃ¥gas med detta informationsgränssnitt. TillÃ¥tna värden är: org.nordugrid.ldapng, org.nordugrid.ldapglue2 och org.ogf.glue.emies.resourceinfofilen som lagrar information om aktiva jobb (förval %s)detta alternativ gör ingenting (gamla GSI-proxyer stöds inte längre)timeout i sekunder (förval 20)behandla begärt objekt som en katalog och försök alltid lista innehÃ¥lltrunkera jobblistan för synkroniseringtypkunde inre ladda in nummer frÃ¥n: %sodefinierat pluginnamnodefinierad virtuell pluginsökväggrupp utan namnavregistreraladda uppuppladdningarurlurl [url ...]urllistan %s innehÃ¥ller ogiltig URL: %sanvänd GSI-kommunikationsprotokollet för att kontakta VOMS-tjänster.använd HTTP-kommunikationsprotokollet för att kontakta VOMS-tjänster som erbjuder RESTful Ã¥tkomst Notera att för RESTful Ã¥tkomst stöds inte 'list'-kommandot och mer än en VOMS-server använd NSS-referens-databas i förvalda Mozilla-profiler, inklusive Firefox, Seamonkey och Thunderbird. använd det gamla kommunikationsprotokollet för att kontakta VOMS-tjänster istället för RESTful Ã¥tkomst använd passiv överföring (förvalt av om säker överföring begärts, förvalt pÃ¥ om säker överföring inte begärtsanvänd säker överföring (osäker som förval)använd särskild inställningsfilanvänd jobbets namn i stället för dess korta ID för jobbkatalogens namnanvändarnamn till myproxy-server (om detta saknas används subjektet frÃ¥n användarcertifikatet)vomsvoms<:kommando>. Ange VOMS-server (Mer än en VOMS-server kan anges pÃ¥ detta sätt: --voms VOa:kommando1 --voms VOb:kommando2). :kommando är valfritt, och används för att begära specifika attribut (t.ex. roller) kommandoalternativ är: all --- lägg till detta DNs alla attribut i AC; list --- lista detta DNs alla attribut, skapar inte AC-tillägg; /Role=dinRoll --- ange roll, om detta DN har en sÃ¥dan roll, kommer rollen att läggas till i AC; /vonamn/gruppnamn/Role=dinRoll --- ange VO, grupp och roll; om detta DN har en sÃ¥dan roll, kommer rollen att läggas till i AC. Om detta alternativ inte anges kommer värden frÃ¥n inställningsfilerna att användas. För att undvika att dessa används ange -S med tomt värde. väntan misslyckades - dödar barnväntar pÃ¥ data-chunkwrite_thread: avslutarwrite_thread: for_write eofwrite_thread: for_write misslyckades - avbryterwrite_thread: erhÃ¥ll och skicka vidare buffrarwrite_thread: out misslyckades - avbryterfel SSL-lÃ¥s begärt: %i av %i: %i - %sxrootd stängning misslyckades: %sxrootd öppning misslyckades: %sxrootd skrivning misslyckades: %sj~DataPoint: förstör ftp_handle~DataPoint: förstör ftp_handle misslyckades - försöker igen~DataPoint: misslyckades med att förstöra ftp_handle - läckernordugrid-arc-6.14.0/PaxHeaders.30264/aclocal.m40000644000000000000000000000013214152153414017175 xustar000000000000000030 mtime=1638455052.951864538 30 atime=1638455053.054866086 30 ctime=1638455095.722507187 nordugrid-arc-6.14.0/aclocal.m40000644000175000002070000013506314152153414017172 0ustar00mockbuildmock00000000000000# generated automatically by aclocal 1.13.4 -*- Autoconf -*- # Copyright (C) 1996-2013 Free Software Foundation, Inc. # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. m4_ifndef([AC_CONFIG_MACRO_DIRS], [m4_defun([_AM_CONFIG_MACRO_DIRS], [])m4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])]) m4_ifndef([AC_AUTOCONF_VERSION], [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl m4_if(m4_defn([AC_AUTOCONF_VERSION]), [2.69],, [m4_warning([this file was generated for autoconf 2.69. You have another version of autoconf. It may work, but is not guaranteed to. If you have problems, you may need to regenerate the build system entirely. To do so, use the procedure documented by the package, typically 'autoreconf'.])]) # pkg.m4 - Macros to locate and utilise pkg-config. -*- Autoconf -*- # serial 1 (pkg-config-0.24) # # Copyright © 2004 Scott James Remnant . # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. # PKG_PROG_PKG_CONFIG([MIN-VERSION]) # ---------------------------------- AC_DEFUN([PKG_PROG_PKG_CONFIG], [m4_pattern_forbid([^_?PKG_[A-Z_]+$]) m4_pattern_allow([^PKG_CONFIG(_(PATH|LIBDIR|SYSROOT_DIR|ALLOW_SYSTEM_(CFLAGS|LIBS)))?$]) m4_pattern_allow([^PKG_CONFIG_(DISABLE_UNINSTALLED|TOP_BUILD_DIR|DEBUG_SPEW)$]) AC_ARG_VAR([PKG_CONFIG], [path to pkg-config utility]) AC_ARG_VAR([PKG_CONFIG_PATH], [directories to add to pkg-config's search path]) AC_ARG_VAR([PKG_CONFIG_LIBDIR], [path overriding pkg-config's built-in search path]) if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then AC_PATH_TOOL([PKG_CONFIG], [pkg-config]) fi if test -n "$PKG_CONFIG"; then _pkg_min_version=m4_default([$1], [0.9.0]) AC_MSG_CHECKING([pkg-config is at least version $_pkg_min_version]) if $PKG_CONFIG --atleast-pkgconfig-version $_pkg_min_version; then AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) PKG_CONFIG="" fi fi[]dnl ])# PKG_PROG_PKG_CONFIG # PKG_CHECK_EXISTS(MODULES, [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND]) # # Check to see whether a particular set of modules exists. Similar # to PKG_CHECK_MODULES(), but does not set variables or print errors. # # Please remember that m4 expands AC_REQUIRE([PKG_PROG_PKG_CONFIG]) # only at the first occurence in configure.ac, so if the first place # it's called might be skipped (such as if it is within an "if", you # have to call PKG_CHECK_EXISTS manually # -------------------------------------------------------------- AC_DEFUN([PKG_CHECK_EXISTS], [AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl if test -n "$PKG_CONFIG" && \ AC_RUN_LOG([$PKG_CONFIG --exists --print-errors "$1"]); then m4_default([$2], [:]) m4_ifvaln([$3], [else $3])dnl fi]) # _PKG_CONFIG([VARIABLE], [COMMAND], [MODULES]) # --------------------------------------------- m4_define([_PKG_CONFIG], [if test -n "$$1"; then pkg_cv_[]$1="$$1" elif test -n "$PKG_CONFIG"; then PKG_CHECK_EXISTS([$3], [pkg_cv_[]$1=`$PKG_CONFIG --[]$2 "$3" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes ], [pkg_failed=yes]) else pkg_failed=untried fi[]dnl ])# _PKG_CONFIG # _PKG_SHORT_ERRORS_SUPPORTED # ----------------------------- AC_DEFUN([_PKG_SHORT_ERRORS_SUPPORTED], [AC_REQUIRE([PKG_PROG_PKG_CONFIG]) if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi[]dnl ])# _PKG_SHORT_ERRORS_SUPPORTED # PKG_CHECK_MODULES(VARIABLE-PREFIX, MODULES, [ACTION-IF-FOUND], # [ACTION-IF-NOT-FOUND]) # # # Note that if there is a possibility the first call to # PKG_CHECK_MODULES might not happen, you should be sure to include an # explicit call to PKG_PROG_PKG_CONFIG in your configure.ac # # # -------------------------------------------------------------- AC_DEFUN([PKG_CHECK_MODULES], [AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl AC_ARG_VAR([$1][_CFLAGS], [C compiler flags for $1, overriding pkg-config])dnl AC_ARG_VAR([$1][_LIBS], [linker flags for $1, overriding pkg-config])dnl pkg_failed=no AC_MSG_CHECKING([for $1]) _PKG_CONFIG([$1][_CFLAGS], [cflags], [$2]) _PKG_CONFIG([$1][_LIBS], [libs], [$2]) m4_define([_PKG_TEXT], [Alternatively, you may set the environment variables $1[]_CFLAGS and $1[]_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details.]) if test $pkg_failed = yes; then AC_MSG_RESULT([no]) _PKG_SHORT_ERRORS_SUPPORTED if test $_pkg_short_errors_supported = yes; then $1[]_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$2" 2>&1` else $1[]_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$2" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$$1[]_PKG_ERRORS" >&AS_MESSAGE_LOG_FD m4_default([$4], [AC_MSG_ERROR( [Package requirements ($2) were not met: $$1_PKG_ERRORS Consider adjusting the PKG_CONFIG_PATH environment variable if you installed software in a non-standard prefix. _PKG_TEXT])[]dnl ]) elif test $pkg_failed = untried; then AC_MSG_RESULT([no]) m4_default([$4], [AC_MSG_FAILURE( [The pkg-config script could not be found or is too old. Make sure it is in your PATH or set the PKG_CONFIG environment variable to the full path to pkg-config. _PKG_TEXT To get pkg-config, see .])[]dnl ]) else $1[]_CFLAGS=$pkg_cv_[]$1[]_CFLAGS $1[]_LIBS=$pkg_cv_[]$1[]_LIBS AC_MSG_RESULT([yes]) $3 fi[]dnl ])# PKG_CHECK_MODULES # PKG_INSTALLDIR(DIRECTORY) # ------------------------- # Substitutes the variable pkgconfigdir as the location where a module # should install pkg-config .pc files. By default the directory is # $libdir/pkgconfig, but the default can be changed by passing # DIRECTORY. The user can override through the --with-pkgconfigdir # parameter. AC_DEFUN([PKG_INSTALLDIR], [m4_pushdef([pkg_default], [m4_default([$1], ['${libdir}/pkgconfig'])]) m4_pushdef([pkg_description], [pkg-config installation directory @<:@]pkg_default[@:>@]) AC_ARG_WITH([pkgconfigdir], [AS_HELP_STRING([--with-pkgconfigdir], pkg_description)],, [with_pkgconfigdir=]pkg_default) AC_SUBST([pkgconfigdir], [$with_pkgconfigdir]) m4_popdef([pkg_default]) m4_popdef([pkg_description]) ]) dnl PKG_INSTALLDIR # PKG_NOARCH_INSTALLDIR(DIRECTORY) # ------------------------- # Substitutes the variable noarch_pkgconfigdir as the location where a # module should install arch-independent pkg-config .pc files. By # default the directory is $datadir/pkgconfig, but the default can be # changed by passing DIRECTORY. The user can override through the # --with-noarch-pkgconfigdir parameter. AC_DEFUN([PKG_NOARCH_INSTALLDIR], [m4_pushdef([pkg_default], [m4_default([$1], ['${datadir}/pkgconfig'])]) m4_pushdef([pkg_description], [pkg-config arch-independent installation directory @<:@]pkg_default[@:>@]) AC_ARG_WITH([noarch-pkgconfigdir], [AS_HELP_STRING([--with-noarch-pkgconfigdir], pkg_description)],, [with_noarch_pkgconfigdir=]pkg_default) AC_SUBST([noarch_pkgconfigdir], [$with_noarch_pkgconfigdir]) m4_popdef([pkg_default]) m4_popdef([pkg_description]) ]) dnl PKG_NOARCH_INSTALLDIR # Copyright (C) 2002-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_AUTOMAKE_VERSION(VERSION) # ---------------------------- # Automake X.Y traces this macro to ensure aclocal.m4 has been # generated from the m4 files accompanying Automake X.Y. # (This private macro should not be called outside this file.) AC_DEFUN([AM_AUTOMAKE_VERSION], [am__api_version='1.13' dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to dnl require some minimum version. Point them to the right macro. m4_if([$1], [1.13.4], [], [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl ]) # _AM_AUTOCONF_VERSION(VERSION) # ----------------------------- # aclocal traces this macro to find the Autoconf version. # This is a private macro too. Using m4_define simplifies # the logic in aclocal, which can simply ignore this definition. m4_define([_AM_AUTOCONF_VERSION], []) # AM_SET_CURRENT_AUTOMAKE_VERSION # ------------------------------- # Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced. # This function is AC_REQUIREd by AM_INIT_AUTOMAKE. AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION], [AM_AUTOMAKE_VERSION([1.13.4])dnl m4_ifndef([AC_AUTOCONF_VERSION], [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl _AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))]) # AM_AUX_DIR_EXPAND -*- Autoconf -*- # Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets # $ac_aux_dir to '$srcdir/foo'. In other projects, it is set to # '$srcdir', '$srcdir/..', or '$srcdir/../..'. # # Of course, Automake must honor this variable whenever it calls a # tool from the auxiliary directory. The problem is that $srcdir (and # therefore $ac_aux_dir as well) can be either absolute or relative, # depending on how configure is run. This is pretty annoying, since # it makes $ac_aux_dir quite unusable in subdirectories: in the top # source directory, any form will work fine, but in subdirectories a # relative path needs to be adjusted first. # # $ac_aux_dir/missing # fails when called from a subdirectory if $ac_aux_dir is relative # $top_srcdir/$ac_aux_dir/missing # fails if $ac_aux_dir is absolute, # fails when called from a subdirectory in a VPATH build with # a relative $ac_aux_dir # # The reason of the latter failure is that $top_srcdir and $ac_aux_dir # are both prefixed by $srcdir. In an in-source build this is usually # harmless because $srcdir is '.', but things will broke when you # start a VPATH build or use an absolute $srcdir. # # So we could use something similar to $top_srcdir/$ac_aux_dir/missing, # iff we strip the leading $srcdir from $ac_aux_dir. That would be: # am_aux_dir='\$(top_srcdir)/'`expr "$ac_aux_dir" : "$srcdir//*\(.*\)"` # and then we would define $MISSING as # MISSING="\${SHELL} $am_aux_dir/missing" # This will work as long as MISSING is not called from configure, because # unfortunately $(top_srcdir) has no meaning in configure. # However there are other variables, like CC, which are often used in # configure, and could therefore not use this "fixed" $ac_aux_dir. # # Another solution, used here, is to always expand $ac_aux_dir to an # absolute PATH. The drawback is that using absolute paths prevent a # configured tree to be moved without reconfiguration. AC_DEFUN([AM_AUX_DIR_EXPAND], [dnl Rely on autoconf to set up CDPATH properly. AC_PREREQ([2.50])dnl # expand $ac_aux_dir to an absolute path am_aux_dir=`cd $ac_aux_dir && pwd` ]) # AM_CONDITIONAL -*- Autoconf -*- # Copyright (C) 1997-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_CONDITIONAL(NAME, SHELL-CONDITION) # ------------------------------------- # Define a conditional. AC_DEFUN([AM_CONDITIONAL], [AC_PREREQ([2.52])dnl m4_if([$1], [TRUE], [AC_FATAL([$0: invalid condition: $1])], [$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl AC_SUBST([$1_TRUE])dnl AC_SUBST([$1_FALSE])dnl _AM_SUBST_NOTMAKE([$1_TRUE])dnl _AM_SUBST_NOTMAKE([$1_FALSE])dnl m4_define([_AM_COND_VALUE_$1], [$2])dnl if $2; then $1_TRUE= $1_FALSE='#' else $1_TRUE='#' $1_FALSE= fi AC_CONFIG_COMMANDS_PRE( [if test -z "${$1_TRUE}" && test -z "${$1_FALSE}"; then AC_MSG_ERROR([[conditional "$1" was never defined. Usually this means the macro was only invoked conditionally.]]) fi])]) # Copyright (C) 1999-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # There are a few dirty hacks below to avoid letting 'AC_PROG_CC' be # written in clear, in which case automake, when reading aclocal.m4, # will think it sees a *use*, and therefore will trigger all it's # C support machinery. Also note that it means that autoscan, seeing # CC etc. in the Makefile, will ask for an AC_PROG_CC use... # _AM_DEPENDENCIES(NAME) # ---------------------- # See how the compiler implements dependency checking. # NAME is "CC", "CXX", "OBJC", "OBJCXX", "UPC", or "GJC". # We try a few techniques and use that to set a single cache variable. # # We don't AC_REQUIRE the corresponding AC_PROG_CC since the latter was # modified to invoke _AM_DEPENDENCIES(CC); we would have a circular # dependency, and given that the user is not expected to run this macro, # just rely on AC_PROG_CC. AC_DEFUN([_AM_DEPENDENCIES], [AC_REQUIRE([AM_SET_DEPDIR])dnl AC_REQUIRE([AM_OUTPUT_DEPENDENCY_COMMANDS])dnl AC_REQUIRE([AM_MAKE_INCLUDE])dnl AC_REQUIRE([AM_DEP_TRACK])dnl m4_if([$1], [CC], [depcc="$CC" am_compiler_list=], [$1], [CXX], [depcc="$CXX" am_compiler_list=], [$1], [OBJC], [depcc="$OBJC" am_compiler_list='gcc3 gcc'], [$1], [OBJCXX], [depcc="$OBJCXX" am_compiler_list='gcc3 gcc'], [$1], [UPC], [depcc="$UPC" am_compiler_list=], [$1], [GCJ], [depcc="$GCJ" am_compiler_list='gcc3 gcc'], [depcc="$$1" am_compiler_list=]) AC_CACHE_CHECK([dependency style of $depcc], [am_cv_$1_dependencies_compiler_type], [if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named 'D' -- because '-MD' means "put the output # in D". rm -rf conftest.dir mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_$1_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n ['s/^#*\([a-zA-Z0-9]*\))$/\1/p'] < ./depcomp` fi am__universal=false m4_case([$1], [CC], [case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac], [CXX], [case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac]) for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with # Solaris 10 /bin/sh. echo '/* dummy */' > sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with '-c' and '-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle '-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs. am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # After this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested. if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvc7 | msvc7msys | msvisualcpp | msvcmsys) # This compiler won't grok '-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_$1_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_$1_dependencies_compiler_type=none fi ]) AC_SUBST([$1DEPMODE], [depmode=$am_cv_$1_dependencies_compiler_type]) AM_CONDITIONAL([am__fastdep$1], [ test "x$enable_dependency_tracking" != xno \ && test "$am_cv_$1_dependencies_compiler_type" = gcc3]) ]) # AM_SET_DEPDIR # ------------- # Choose a directory name for dependency files. # This macro is AC_REQUIREd in _AM_DEPENDENCIES. AC_DEFUN([AM_SET_DEPDIR], [AC_REQUIRE([AM_SET_LEADING_DOT])dnl AC_SUBST([DEPDIR], ["${am__leading_dot}deps"])dnl ]) # AM_DEP_TRACK # ------------ AC_DEFUN([AM_DEP_TRACK], [AC_ARG_ENABLE([dependency-tracking], [dnl AS_HELP_STRING( [--enable-dependency-tracking], [do not reject slow dependency extractors]) AS_HELP_STRING( [--disable-dependency-tracking], [speeds up one-time build])]) if test "x$enable_dependency_tracking" != xno; then am_depcomp="$ac_aux_dir/depcomp" AMDEPBACKSLASH='\' am__nodep='_no' fi AM_CONDITIONAL([AMDEP], [test "x$enable_dependency_tracking" != xno]) AC_SUBST([AMDEPBACKSLASH])dnl _AM_SUBST_NOTMAKE([AMDEPBACKSLASH])dnl AC_SUBST([am__nodep])dnl _AM_SUBST_NOTMAKE([am__nodep])dnl ]) # Generate code to set up dependency tracking. -*- Autoconf -*- # Copyright (C) 1999-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_OUTPUT_DEPENDENCY_COMMANDS # ------------------------------ AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS], [{ # Older Autoconf quotes --file arguments for eval, but not when files # are listed without --file. Let's play safe and only enable the eval # if we detect the quoting. case $CONFIG_FILES in *\'*) eval set x "$CONFIG_FILES" ;; *) set x $CONFIG_FILES ;; esac shift for mf do # Strip MF so we end up with the name of the file. mf=`echo "$mf" | sed -e 's/:.*$//'` # Check whether this is an Automake generated Makefile or not. # We used to match only the files named 'Makefile.in', but # some people rename them; so instead we look at the file content. # Grep'ing the first line is not enough: some people post-process # each Makefile.in and add a new line on top of each file to say so. # Grep'ing the whole file is not good either: AIX grep has a line # limit of 2048, but all sed's we know have understand at least 4000. if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then dirpart=`AS_DIRNAME("$mf")` else continue fi # Extract the definition of DEPDIR, am__include, and am__quote # from the Makefile without running 'make'. DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` test -z "$DEPDIR" && continue am__include=`sed -n 's/^am__include = //p' < "$mf"` test -z "$am__include" && continue am__quote=`sed -n 's/^am__quote = //p' < "$mf"` # Find all dependency output files, they are included files with # $(DEPDIR) in their names. We invoke sed twice because it is the # simplest approach to changing $(DEPDIR) to its actual value in the # expansion. for file in `sed -n " s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g'`; do # Make sure the directory exists. test -f "$dirpart/$file" && continue fdir=`AS_DIRNAME(["$file"])` AS_MKDIR_P([$dirpart/$fdir]) # echo "creating $dirpart/$file" echo '# dummy' > "$dirpart/$file" done done } ])# _AM_OUTPUT_DEPENDENCY_COMMANDS # AM_OUTPUT_DEPENDENCY_COMMANDS # ----------------------------- # This macro should only be invoked once -- use via AC_REQUIRE. # # This code is only required when automatic dependency tracking # is enabled. FIXME. This creates each '.P' file that we will # need in order to bootstrap the dependency handling code. AC_DEFUN([AM_OUTPUT_DEPENDENCY_COMMANDS], [AC_CONFIG_COMMANDS([depfiles], [test x"$AMDEP_TRUE" != x"" || _AM_OUTPUT_DEPENDENCY_COMMANDS], [AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"]) ]) # Do all the work for Automake. -*- Autoconf -*- # Copyright (C) 1996-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This macro actually does too much. Some checks are only needed if # your package does certain things. But this isn't really a big deal. # AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE]) # AM_INIT_AUTOMAKE([OPTIONS]) # ----------------------------------------------- # The call with PACKAGE and VERSION arguments is the old style # call (pre autoconf-2.50), which is being phased out. PACKAGE # and VERSION should now be passed to AC_INIT and removed from # the call to AM_INIT_AUTOMAKE. # We support both call styles for the transition. After # the next Automake release, Autoconf can make the AC_INIT # arguments mandatory, and then we can depend on a new Autoconf # release and drop the old call support. AC_DEFUN([AM_INIT_AUTOMAKE], [AC_PREREQ([2.65])dnl dnl Autoconf wants to disallow AM_ names. We explicitly allow dnl the ones we care about. m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl AC_REQUIRE([AM_SET_CURRENT_AUTOMAKE_VERSION])dnl AC_REQUIRE([AC_PROG_INSTALL])dnl if test "`cd $srcdir && pwd`" != "`pwd`"; then # Use -I$(srcdir) only when $(srcdir) != ., so that make's output # is not polluted with repeated "-I." AC_SUBST([am__isrc], [' -I$(srcdir)'])_AM_SUBST_NOTMAKE([am__isrc])dnl # test to see if srcdir already configured if test -f $srcdir/config.status; then AC_MSG_ERROR([source directory already configured; run "make distclean" there first]) fi fi # test whether we have cygpath if test -z "$CYGPATH_W"; then if (cygpath --version) >/dev/null 2>/dev/null; then CYGPATH_W='cygpath -w' else CYGPATH_W=echo fi fi AC_SUBST([CYGPATH_W]) # Define the identity of the package. dnl Distinguish between old-style and new-style calls. m4_ifval([$2], [AC_DIAGNOSE([obsolete], [$0: two- and three-arguments forms are deprecated.]) m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl AC_SUBST([PACKAGE], [$1])dnl AC_SUBST([VERSION], [$2])], [_AM_SET_OPTIONS([$1])dnl dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT. m4_if( m4_ifdef([AC_PACKAGE_NAME], [ok]):m4_ifdef([AC_PACKAGE_VERSION], [ok]), [ok:ok],, [m4_fatal([AC_INIT should be called with package and version arguments])])dnl AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl AC_SUBST([VERSION], ['AC_PACKAGE_VERSION'])])dnl _AM_IF_OPTION([no-define],, [AC_DEFINE_UNQUOTED([PACKAGE], ["$PACKAGE"], [Name of package]) AC_DEFINE_UNQUOTED([VERSION], ["$VERSION"], [Version number of package])])dnl # Some tools Automake needs. AC_REQUIRE([AM_SANITY_CHECK])dnl AC_REQUIRE([AC_ARG_PROGRAM])dnl AM_MISSING_PROG([ACLOCAL], [aclocal-${am__api_version}]) AM_MISSING_PROG([AUTOCONF], [autoconf]) AM_MISSING_PROG([AUTOMAKE], [automake-${am__api_version}]) AM_MISSING_PROG([AUTOHEADER], [autoheader]) AM_MISSING_PROG([MAKEINFO], [makeinfo]) AC_REQUIRE([AM_PROG_INSTALL_SH])dnl AC_REQUIRE([AM_PROG_INSTALL_STRIP])dnl AC_REQUIRE([AC_PROG_MKDIR_P])dnl # For better backward compatibility. To be removed once Automake 1.9.x # dies out for good. For more background, see: # # AC_SUBST([mkdir_p], ['$(MKDIR_P)']) # We need awk for the "check" target. The system "awk" is bad on # some platforms. AC_REQUIRE([AC_PROG_AWK])dnl AC_REQUIRE([AC_PROG_MAKE_SET])dnl AC_REQUIRE([AM_SET_LEADING_DOT])dnl _AM_IF_OPTION([tar-ustar], [_AM_PROG_TAR([ustar])], [_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])], [_AM_PROG_TAR([v7])])]) _AM_IF_OPTION([no-dependencies],, [AC_PROVIDE_IFELSE([AC_PROG_CC], [_AM_DEPENDENCIES([CC])], [m4_define([AC_PROG_CC], m4_defn([AC_PROG_CC])[_AM_DEPENDENCIES([CC])])])dnl AC_PROVIDE_IFELSE([AC_PROG_CXX], [_AM_DEPENDENCIES([CXX])], [m4_define([AC_PROG_CXX], m4_defn([AC_PROG_CXX])[_AM_DEPENDENCIES([CXX])])])dnl AC_PROVIDE_IFELSE([AC_PROG_OBJC], [_AM_DEPENDENCIES([OBJC])], [m4_define([AC_PROG_OBJC], m4_defn([AC_PROG_OBJC])[_AM_DEPENDENCIES([OBJC])])])dnl AC_PROVIDE_IFELSE([AC_PROG_OBJCXX], [_AM_DEPENDENCIES([OBJCXX])], [m4_define([AC_PROG_OBJCXX], m4_defn([AC_PROG_OBJCXX])[_AM_DEPENDENCIES([OBJCXX])])])dnl ]) AC_REQUIRE([AM_SILENT_RULES])dnl dnl The testsuite driver may need to know about EXEEXT, so add the dnl 'am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen. This dnl macro is hooked onto _AC_COMPILER_EXEEXT early, see below. AC_CONFIG_COMMANDS_PRE(dnl [m4_provide_if([_AM_COMPILER_EXEEXT], [AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl ]) dnl Hook into '_AC_COMPILER_EXEEXT' early to learn its expansion. Do not dnl add the conditional right here, as _AC_COMPILER_EXEEXT may be further dnl mangled by Autoconf and run in a shell conditional statement. m4_define([_AC_COMPILER_EXEEXT], m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])]) # When config.status generates a header, we must update the stamp-h file. # This file resides in the same directory as the config header # that is generated. The stamp files are numbered to have different names. # Autoconf calls _AC_AM_CONFIG_HEADER_HOOK (when defined) in the # loop where config.status creates the headers, so we can generate # our stamp files there. AC_DEFUN([_AC_AM_CONFIG_HEADER_HOOK], [# Compute $1's index in $config_headers. _am_arg=$1 _am_stamp_count=1 for _am_header in $config_headers :; do case $_am_header in $_am_arg | $_am_arg:* ) break ;; * ) _am_stamp_count=`expr $_am_stamp_count + 1` ;; esac done echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count]) # Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_PROG_INSTALL_SH # ------------------ # Define $install_sh. AC_DEFUN([AM_PROG_INSTALL_SH], [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl if test x"${install_sh}" != xset; then case $am_aux_dir in *\ * | *\ *) install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; *) install_sh="\${SHELL} $am_aux_dir/install-sh" esac fi AC_SUBST([install_sh])]) # Copyright (C) 2003-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # Check whether the underlying file-system supports filenames # with a leading dot. For instance MS-DOS doesn't. AC_DEFUN([AM_SET_LEADING_DOT], [rm -rf .tst 2>/dev/null mkdir .tst 2>/dev/null if test -d .tst; then am__leading_dot=. else am__leading_dot=_ fi rmdir .tst 2>/dev/null AC_SUBST([am__leading_dot])]) # Check to see how 'make' treats includes. -*- Autoconf -*- # Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_MAKE_INCLUDE() # ----------------- # Check to see how make treats includes. AC_DEFUN([AM_MAKE_INCLUDE], [am_make=${MAKE-make} cat > confinc << 'END' am__doit: @echo this is the am__doit target .PHONY: am__doit END # If we don't find an include directive, just comment out the code. AC_MSG_CHECKING([for style of include used by $am_make]) am__include="#" am__quote= _am_result=none # First try GNU make style include. echo "include confinc" > confmf # Ignore all kinds of additional output from 'make'. case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=include am__quote= _am_result=GNU ;; esac # Now try BSD make style include. if test "$am__include" = "#"; then echo '.include "confinc"' > confmf case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=.include am__quote="\"" _am_result=BSD ;; esac fi AC_SUBST([am__include]) AC_SUBST([am__quote]) AC_MSG_RESULT([$_am_result]) rm -f confinc confmf ]) # Fake the existence of programs that GNU maintainers use. -*- Autoconf -*- # Copyright (C) 1997-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_MISSING_PROG(NAME, PROGRAM) # ------------------------------ AC_DEFUN([AM_MISSING_PROG], [AC_REQUIRE([AM_MISSING_HAS_RUN]) $1=${$1-"${am_missing_run}$2"} AC_SUBST($1)]) # AM_MISSING_HAS_RUN # ------------------ # Define MISSING if not defined so far and test if it is modern enough. # If it is, set am_missing_run to use it, otherwise, to nothing. AC_DEFUN([AM_MISSING_HAS_RUN], [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl AC_REQUIRE_AUX_FILE([missing])dnl if test x"${MISSING+set}" != xset; then case $am_aux_dir in *\ * | *\ *) MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; *) MISSING="\${SHELL} $am_aux_dir/missing" ;; esac fi # Use eval to expand $SHELL if eval "$MISSING --is-lightweight"; then am_missing_run="$MISSING " else am_missing_run= AC_MSG_WARN(['missing' script is too old or missing]) fi ]) # Copyright (C) 2003-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_PROG_MKDIR_P # --------------- # Check for 'mkdir -p'. AC_DEFUN([AM_PROG_MKDIR_P], [AC_PREREQ([2.60])dnl AC_REQUIRE([AC_PROG_MKDIR_P])dnl dnl FIXME we are no longer going to remove this! adjust warning dnl FIXME message accordingly. AC_DIAGNOSE([obsolete], [$0: this macro is deprecated, and will soon be removed. You should use the Autoconf-provided 'AC][_PROG_MKDIR_P' macro instead, and use '$(MKDIR_P)' instead of '$(mkdir_p)'in your Makefile.am files.]) dnl Automake 1.8 to 1.9.6 used to define mkdir_p. We now use MKDIR_P, dnl while keeping a definition of mkdir_p for backward compatibility. dnl @MKDIR_P@ is magic: AC_OUTPUT adjusts its value for each Makefile. dnl However we cannot define mkdir_p as $(MKDIR_P) for the sake of dnl Makefile.ins that do not define MKDIR_P, so we do our own dnl adjustment using top_builddir (which is defined more often than dnl MKDIR_P). AC_SUBST([mkdir_p], ["$MKDIR_P"])dnl case $mkdir_p in [[\\/$]]* | ?:[[\\/]]*) ;; */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;; esac ]) # Helper functions for option handling. -*- Autoconf -*- # Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_MANGLE_OPTION(NAME) # ----------------------- AC_DEFUN([_AM_MANGLE_OPTION], [[_AM_OPTION_]m4_bpatsubst($1, [[^a-zA-Z0-9_]], [_])]) # _AM_SET_OPTION(NAME) # -------------------- # Set option NAME. Presently that only means defining a flag for this option. AC_DEFUN([_AM_SET_OPTION], [m4_define(_AM_MANGLE_OPTION([$1]), [1])]) # _AM_SET_OPTIONS(OPTIONS) # ------------------------ # OPTIONS is a space-separated list of Automake options. AC_DEFUN([_AM_SET_OPTIONS], [m4_foreach_w([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])]) # _AM_IF_OPTION(OPTION, IF-SET, [IF-NOT-SET]) # ------------------------------------------- # Execute IF-SET if OPTION is set, IF-NOT-SET otherwise. AC_DEFUN([_AM_IF_OPTION], [m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])]) # Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_RUN_LOG(COMMAND) # ------------------- # Run COMMAND, save the exit status in ac_status, and log it. # (This has been adapted from Autoconf's _AC_RUN_LOG macro.) AC_DEFUN([AM_RUN_LOG], [{ echo "$as_me:$LINENO: $1" >&AS_MESSAGE_LOG_FD ($1) >&AS_MESSAGE_LOG_FD 2>&AS_MESSAGE_LOG_FD ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD (exit $ac_status); }]) # Check to make sure that the build environment is sane. -*- Autoconf -*- # Copyright (C) 1996-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_SANITY_CHECK # --------------- AC_DEFUN([AM_SANITY_CHECK], [AC_MSG_CHECKING([whether build environment is sane]) # Reject unsafe characters in $srcdir or the absolute working directory # name. Accept space and tab only in the latter. am_lf=' ' case `pwd` in *[[\\\"\#\$\&\'\`$am_lf]]*) AC_MSG_ERROR([unsafe absolute working directory name]);; esac case $srcdir in *[[\\\"\#\$\&\'\`$am_lf\ \ ]]*) AC_MSG_ERROR([unsafe srcdir value: '$srcdir']);; esac # Do 'set' in a subshell so we don't clobber the current shell's # arguments. Must try -L first in case configure is actually a # symlink; some systems play weird games with the mod time of symlinks # (eg FreeBSD returns the mod time of the symlink's containing # directory). if ( am_has_slept=no for am_try in 1 2; do echo "timestamp, slept: $am_has_slept" > conftest.file set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` if test "$[*]" = "X"; then # -L didn't work. set X `ls -t "$srcdir/configure" conftest.file` fi if test "$[*]" != "X $srcdir/configure conftest.file" \ && test "$[*]" != "X conftest.file $srcdir/configure"; then # If neither matched, then we have a broken ls. This can happen # if, for instance, CONFIG_SHELL is bash and it inherits a # broken ls alias from the environment. This has actually # happened. Such a system could not be considered "sane". AC_MSG_ERROR([ls -t appears to fail. Make sure there is not a broken alias in your environment]) fi if test "$[2]" = conftest.file || test $am_try -eq 2; then break fi # Just in case. sleep 1 am_has_slept=yes done test "$[2]" = conftest.file ) then # Ok. : else AC_MSG_ERROR([newly created file is older than distributed files! Check your system clock]) fi AC_MSG_RESULT([yes]) # If we didn't sleep, we still need to ensure time stamps of config.status and # generated files are strictly newer. am_sleep_pid= if grep 'slept: no' conftest.file >/dev/null 2>&1; then ( sleep 1 ) & am_sleep_pid=$! fi AC_CONFIG_COMMANDS_PRE( [AC_MSG_CHECKING([that generated files are newer than configure]) if test -n "$am_sleep_pid"; then # Hide warnings about reused PIDs. wait $am_sleep_pid 2>/dev/null fi AC_MSG_RESULT([done])]) rm -f conftest.file ]) # Copyright (C) 2009-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_SILENT_RULES([DEFAULT]) # -------------------------- # Enable less verbose build rules; with the default set to DEFAULT # ("yes" being less verbose, "no" or empty being verbose). AC_DEFUN([AM_SILENT_RULES], [AC_ARG_ENABLE([silent-rules], [dnl AS_HELP_STRING( [--enable-silent-rules], [less verbose build output (undo: "make V=1")]) AS_HELP_STRING( [--disable-silent-rules], [verbose build output (undo: "make V=0")])dnl ]) case $enable_silent_rules in @%:@ ((( yes) AM_DEFAULT_VERBOSITY=0;; no) AM_DEFAULT_VERBOSITY=1;; *) AM_DEFAULT_VERBOSITY=m4_if([$1], [yes], [0], [1]);; esac dnl dnl A few 'make' implementations (e.g., NonStop OS and NextStep) dnl do not support nested variable expansions. dnl See automake bug#9928 and bug#10237. am_make=${MAKE-make} AC_CACHE_CHECK([whether $am_make supports nested variables], [am_cv_make_support_nested_variables], [if AS_ECHO([['TRUE=$(BAR$(V)) BAR0=false BAR1=true V=1 am__doit: @$(TRUE) .PHONY: am__doit']]) | $am_make -f - >/dev/null 2>&1; then am_cv_make_support_nested_variables=yes else am_cv_make_support_nested_variables=no fi]) if test $am_cv_make_support_nested_variables = yes; then dnl Using '$V' instead of '$(V)' breaks IRIX make. AM_V='$(V)' AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)' else AM_V=$AM_DEFAULT_VERBOSITY AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY fi AC_SUBST([AM_V])dnl AM_SUBST_NOTMAKE([AM_V])dnl AC_SUBST([AM_DEFAULT_V])dnl AM_SUBST_NOTMAKE([AM_DEFAULT_V])dnl AC_SUBST([AM_DEFAULT_VERBOSITY])dnl AM_BACKSLASH='\' AC_SUBST([AM_BACKSLASH])dnl _AM_SUBST_NOTMAKE([AM_BACKSLASH])dnl ]) # Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_PROG_INSTALL_STRIP # --------------------- # One issue with vendor 'install' (even GNU) is that you can't # specify the program used to strip binaries. This is especially # annoying in cross-compiling environments, where the build's strip # is unlikely to handle the host's binaries. # Fortunately install-sh will honor a STRIPPROG variable, so we # always use install-sh in "make install-strip", and initialize # STRIPPROG with the value of the STRIP variable (set by the user). AC_DEFUN([AM_PROG_INSTALL_STRIP], [AC_REQUIRE([AM_PROG_INSTALL_SH])dnl # Installed binaries are usually stripped using 'strip' when the user # run "make install-strip". However 'strip' might not be the right # tool to use in cross-compilation environments, therefore Automake # will honor the 'STRIP' environment variable to overrule this program. dnl Don't test for $cross_compiling = yes, because it might be 'maybe'. if test "$cross_compiling" != no; then AC_CHECK_TOOL([STRIP], [strip], :) fi INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" AC_SUBST([INSTALL_STRIP_PROGRAM])]) # Copyright (C) 2006-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_SUBST_NOTMAKE(VARIABLE) # --------------------------- # Prevent Automake from outputting VARIABLE = @VARIABLE@ in Makefile.in. # This macro is traced by Automake. AC_DEFUN([_AM_SUBST_NOTMAKE]) # AM_SUBST_NOTMAKE(VARIABLE) # -------------------------- # Public sister of _AM_SUBST_NOTMAKE. AC_DEFUN([AM_SUBST_NOTMAKE], [_AM_SUBST_NOTMAKE($@)]) # Check how to create a tarball. -*- Autoconf -*- # Copyright (C) 2004-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_PROG_TAR(FORMAT) # -------------------- # Check how to create a tarball in format FORMAT. # FORMAT should be one of 'v7', 'ustar', or 'pax'. # # Substitute a variable $(am__tar) that is a command # writing to stdout a FORMAT-tarball containing the directory # $tardir. # tardir=directory && $(am__tar) > result.tar # # Substitute a variable $(am__untar) that extract such # a tarball read from stdin. # $(am__untar) < result.tar # AC_DEFUN([_AM_PROG_TAR], [# Always define AMTAR for backward compatibility. Yes, it's still used # in the wild :-( We should find a proper way to deprecate it ... AC_SUBST([AMTAR], ['$${TAR-tar}']) # We'll loop over all known methods to create a tar archive until one works. _am_tools='gnutar m4_if([$1], [ustar], [plaintar]) pax cpio none' m4_if([$1], [v7], [am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -'], [m4_case([$1], [ustar], [# The POSIX 1988 'ustar' format is defined with fixed-size fields. # There is notably a 21 bits limit for the UID and the GID. In fact, # the 'pax' utility can hang on bigger UID/GID (see automake bug#8343 # and bug#13588). am_max_uid=2097151 # 2^21 - 1 am_max_gid=$am_max_uid # The $UID and $GID variables are not portable, so we need to resort # to the POSIX-mandated id(1) utility. Errors in the 'id' calls # below are definitely unexpected, so allow the users to see them # (that is, avoid stderr redirection). am_uid=`id -u || echo unknown` am_gid=`id -g || echo unknown` AC_MSG_CHECKING([whether UID '$am_uid' is supported by ustar format]) if test $am_uid -le $am_max_uid; then AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) _am_tools=none fi AC_MSG_CHECKING([whether GID '$am_gid' is supported by ustar format]) if test $am_gid -le $am_max_gid; then AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) _am_tools=none fi], [pax], [], [m4_fatal([Unknown tar format])]) AC_MSG_CHECKING([how to create a $1 tar archive]) # Go ahead even if we have the value already cached. We do so because we # need to set the values for the 'am__tar' and 'am__untar' variables. _am_tools=${am_cv_prog_tar_$1-$_am_tools} for _am_tool in $_am_tools; do case $_am_tool in gnutar) for _am_tar in tar gnutar gtar; do AM_RUN_LOG([$_am_tar --version]) && break done am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"' am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"' am__untar="$_am_tar -xf -" ;; plaintar) # Must skip GNU tar: if it does not support --format= it doesn't create # ustar tarball either. (tar --version) >/dev/null 2>&1 && continue am__tar='tar chf - "$$tardir"' am__tar_='tar chf - "$tardir"' am__untar='tar xf -' ;; pax) am__tar='pax -L -x $1 -w "$$tardir"' am__tar_='pax -L -x $1 -w "$tardir"' am__untar='pax -r' ;; cpio) am__tar='find "$$tardir" -print | cpio -o -H $1 -L' am__tar_='find "$tardir" -print | cpio -o -H $1 -L' am__untar='cpio -i -H $1 -d' ;; none) am__tar=false am__tar_=false am__untar=false ;; esac # If the value was cached, stop now. We just wanted to have am__tar # and am__untar set. test -n "${am_cv_prog_tar_$1}" && break # tar/untar a dummy directory, and stop if the command works. rm -rf conftest.dir mkdir conftest.dir echo GrepMe > conftest.dir/file AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar]) rm -rf conftest.dir if test -s conftest.tar; then AM_RUN_LOG([$am__untar /dev/null 2>&1 && break fi done rm -rf conftest.dir AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool]) AC_MSG_RESULT([$am_cv_prog_tar_$1])]) AC_SUBST([am__tar]) AC_SUBST([am__untar]) ]) # _AM_PROG_TAR m4_include([m4/ac_cxx_have_dbdeadlockexception.m4]) m4_include([m4/ac_cxx_have_sstream.m4]) m4_include([m4/ac_cxx_namespaces.m4]) m4_include([m4/arc_api.m4]) m4_include([m4/arc_paths.m4]) m4_include([m4/fsusage.m4]) m4_include([m4/gettext.m4]) m4_include([m4/gpt.m4]) m4_include([m4/iconv.m4]) m4_include([m4/intlmacosx.m4]) m4_include([m4/lib-ld.m4]) m4_include([m4/lib-link.m4]) m4_include([m4/lib-prefix.m4]) m4_include([m4/libtool.m4]) m4_include([m4/ltoptions.m4]) m4_include([m4/ltsugar.m4]) m4_include([m4/ltversion.m4]) m4_include([m4/lt~obsolete.m4]) m4_include([m4/nls.m4]) m4_include([m4/po.m4]) m4_include([m4/progtest.m4]) nordugrid-arc-6.14.0/PaxHeaders.30264/include0000644000000000000000000000013214152153475016712 xustar000000000000000030 mtime=1638455101.387592307 30 atime=1638455103.995631494 30 ctime=1638455101.387592307 nordugrid-arc-6.14.0/include/0000755000175000002070000000000014152153475016754 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/PaxHeaders.30264/arc0000644000000000000000000000013214152153475017457 xustar000000000000000030 mtime=1638455101.415592728 30 atime=1638455103.995631494 30 ctime=1638455101.415592728 nordugrid-arc-6.14.0/include/arc/0000755000175000002070000000000014152153475017521 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/job0000644000000000000000000000013214152153475020231 xustar000000000000000030 mtime=1638455101.486593795 30 atime=1638455103.995631494 30 ctime=1638455101.486593795 nordugrid-arc-6.14.0/include/arc/job/0000755000175000002070000000000014152153475020273 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/job/PaxHeaders.30264/error.h0000644000000000000000000000013214152153376021610 xustar000000000000000030 mtime=1638455038.275644021 30 atime=1638455038.461646816 30 ctime=1638455101.485593779 nordugrid-arc-6.14.0/include/arc/job/error.h0000644000175000002070000000005514152153376021575 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/job/error.h" nordugrid-arc-6.14.0/include/arc/job/PaxHeaders.30264/runtimeenvironment.h0000644000000000000000000000013214152153376024427 xustar000000000000000030 mtime=1638455038.275644021 30 atime=1638455038.461646816 30 ctime=1638455101.486593795 nordugrid-arc-6.14.0/include/arc/job/runtimeenvironment.h0000644000175000002070000000007214152153376024413 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/job/runtimeenvironment.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/ArcLocation.h0000644000000000000000000000013214152153376022103 xustar000000000000000030 mtime=1638455038.269643931 30 atime=1638455038.460646801 30 ctime=1638455101.390592352 nordugrid-arc-6.14.0/include/arc/ArcLocation.h0000644000175000002070000000006314152153376022067 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/ArcLocation.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/ArcConfigIni.h0000644000000000000000000000013214152153376022200 xustar000000000000000030 mtime=1638455038.269643931 30 atime=1638455038.460646801 30 ctime=1638455101.389592337 nordugrid-arc-6.14.0/include/arc/ArcConfigIni.h0000644000175000002070000000006414152153376022165 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/ArcConfigIni.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/data0000644000000000000000000000013214152153475020370 xustar000000000000000030 mtime=1638455101.475593629 30 atime=1638455103.995631494 30 ctime=1638455101.475593629 nordugrid-arc-6.14.0/include/arc/data/0000755000175000002070000000000014152153475020432 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/data/PaxHeaders.30264/DataPointDelegate.h0000644000000000000000000000013214152153376024134 xustar000000000000000030 mtime=1638455038.274644006 30 atime=1638455038.461646816 30 ctime=1638455101.467593509 nordugrid-arc-6.14.0/include/arc/data/DataPointDelegate.h0000644000175000002070000000007214152153376024120 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/data/DataPointDelegate.h" nordugrid-arc-6.14.0/include/arc/data/PaxHeaders.30264/DataExternalHelper.h0000644000000000000000000000013214152153376024332 xustar000000000000000030 mtime=1638455038.274644006 30 atime=1638455038.461646816 30 ctime=1638455101.464593464 nordugrid-arc-6.14.0/include/arc/data/DataExternalHelper.h0000644000175000002070000000007314152153376024317 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/data/DataExternalHelper.h" nordugrid-arc-6.14.0/include/arc/data/PaxHeaders.30264/MkDirRecursive.h0000644000000000000000000000013214152153376023514 xustar000000000000000030 mtime=1638455038.274644006 30 atime=1638455038.461646816 30 ctime=1638455101.474593614 nordugrid-arc-6.14.0/include/arc/data/MkDirRecursive.h0000644000175000002070000000006714152153376023504 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/data/MkDirRecursive.h" nordugrid-arc-6.14.0/include/arc/data/PaxHeaders.30264/DataSpeed.h0000644000000000000000000000013214152153376022450 xustar000000000000000030 mtime=1638455038.274644006 30 atime=1638455038.461646816 30 ctime=1638455101.470593554 nordugrid-arc-6.14.0/include/arc/data/DataSpeed.h0000644000175000002070000000006214152153376022433 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/data/DataSpeed.h" nordugrid-arc-6.14.0/include/arc/data/PaxHeaders.30264/DataBuffer.h0000644000000000000000000000013214152153376022621 xustar000000000000000030 mtime=1638455038.273643991 30 atime=1638455038.461646816 30 ctime=1638455101.461593419 nordugrid-arc-6.14.0/include/arc/data/DataBuffer.h0000644000175000002070000000006314152153376022605 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/data/DataBuffer.h" nordugrid-arc-6.14.0/include/arc/data/PaxHeaders.30264/DataMover.h0000644000000000000000000000013214152153376022500 xustar000000000000000030 mtime=1638455038.274644006 30 atime=1638455038.461646816 30 ctime=1638455101.465593479 nordugrid-arc-6.14.0/include/arc/data/DataMover.h0000644000175000002070000000006214152153376022463 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/data/DataMover.h" nordugrid-arc-6.14.0/include/arc/data/PaxHeaders.30264/FileCacheHash.h0000644000000000000000000000013214152153376023225 xustar000000000000000030 mtime=1638455038.274644006 30 atime=1638455038.461646816 30 ctime=1638455101.472593584 nordugrid-arc-6.14.0/include/arc/data/FileCacheHash.h0000644000175000002070000000006614152153376023214 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/data/FileCacheHash.h" nordugrid-arc-6.14.0/include/arc/data/PaxHeaders.30264/URLMap.h0000644000000000000000000000013214152153376021716 xustar000000000000000030 mtime=1638455038.274644006 30 atime=1638455038.461646816 30 ctime=1638455101.475593629 nordugrid-arc-6.14.0/include/arc/data/URLMap.h0000644000175000002070000000005714152153376021705 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/data/URLMap.h" nordugrid-arc-6.14.0/include/arc/data/PaxHeaders.30264/DataHandle.h0000644000000000000000000000013214152153376022603 xustar000000000000000030 mtime=1638455038.274644006 30 atime=1638455038.461646816 30 ctime=1638455101.464593464 nordugrid-arc-6.14.0/include/arc/data/DataHandle.h0000644000175000002070000000006314152153376022567 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/data/DataHandle.h" nordugrid-arc-6.14.0/include/arc/data/PaxHeaders.30264/DataPointDirect.h0000644000000000000000000000013214152153376023634 xustar000000000000000030 mtime=1638455038.274644006 30 atime=1638455038.461646816 30 ctime=1638455101.468593524 nordugrid-arc-6.14.0/include/arc/data/DataPointDirect.h0000644000175000002070000000007014152153376023616 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/data/DataPointDirect.h" nordugrid-arc-6.14.0/include/arc/data/PaxHeaders.30264/DataPointIndex.h0000644000000000000000000000013214152153376023471 xustar000000000000000030 mtime=1638455038.274644006 30 atime=1638455038.461646816 30 ctime=1638455101.469593539 nordugrid-arc-6.14.0/include/arc/data/DataPointIndex.h0000644000175000002070000000006714152153376023461 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/data/DataPointIndex.h" nordugrid-arc-6.14.0/include/arc/data/PaxHeaders.30264/FileCache.h0000644000000000000000000000013214152153376022421 xustar000000000000000030 mtime=1638455038.274644006 30 atime=1638455038.461646816 30 ctime=1638455101.472593584 nordugrid-arc-6.14.0/include/arc/data/FileCache.h0000644000175000002070000000006214152153376022404 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/data/FileCache.h" nordugrid-arc-6.14.0/include/arc/data/PaxHeaders.30264/DataExternalComm.h0000644000000000000000000000013214152153376024006 xustar000000000000000030 mtime=1638455038.274644006 30 atime=1638455038.461646816 30 ctime=1638455101.463593449 nordugrid-arc-6.14.0/include/arc/data/DataExternalComm.h0000644000175000002070000000007114152153376023771 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/data/DataExternalComm.h" nordugrid-arc-6.14.0/include/arc/data/PaxHeaders.30264/DataCallback.h0000644000000000000000000000013214152153376023104 xustar000000000000000030 mtime=1638455038.274644006 30 atime=1638455038.461646816 30 ctime=1638455101.462593434 nordugrid-arc-6.14.0/include/arc/data/DataCallback.h0000644000175000002070000000006514152153376023072 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/data/DataCallback.h" nordugrid-arc-6.14.0/include/arc/data/PaxHeaders.30264/DataStatus.h0000644000000000000000000000013214152153376022673 xustar000000000000000030 mtime=1638455038.274644006 30 atime=1638455038.461646816 30 ctime=1638455101.471593569 nordugrid-arc-6.14.0/include/arc/data/DataStatus.h0000644000175000002070000000006314152153376022657 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/data/DataStatus.h" nordugrid-arc-6.14.0/include/arc/data/PaxHeaders.30264/DataPoint.h0000644000000000000000000000013214152153376022501 xustar000000000000000030 mtime=1638455038.274644006 30 atime=1638455038.461646816 30 ctime=1638455101.466593494 nordugrid-arc-6.14.0/include/arc/data/DataPoint.h0000644000175000002070000000006214152153376022464 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/data/DataPoint.h" nordugrid-arc-6.14.0/include/arc/data/PaxHeaders.30264/FileInfo.h0000644000000000000000000000013214152153376022311 xustar000000000000000030 mtime=1638455038.274644006 30 atime=1638455038.461646816 30 ctime=1638455101.473593599 nordugrid-arc-6.14.0/include/arc/data/FileInfo.h0000644000175000002070000000006114152153376022273 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/data/FileInfo.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/JobPerfLog.h0000644000000000000000000000013214152153376021676 xustar000000000000000030 mtime=1638455038.270643946 30 atime=1638455038.461646816 30 ctime=1638455101.404592562 nordugrid-arc-6.14.0/include/arc/JobPerfLog.h0000644000175000002070000000006214152153376021661 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/JobPerfLog.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/loader0000644000000000000000000000013014152153475020723 xustar000000000000000029 mtime=1638455101.48959384 30 atime=1638455103.995631494 29 ctime=1638455101.48959384 nordugrid-arc-6.14.0/include/arc/loader/0000755000175000002070000000000014152153475020767 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/loader/PaxHeaders.30264/ModuleManager.h0000644000000000000000000000013214152153376023673 xustar000000000000000030 mtime=1638455038.275644021 30 atime=1638455038.461646816 30 ctime=1638455101.488593825 nordugrid-arc-6.14.0/include/arc/loader/ModuleManager.h0000644000175000002070000000007014152153376023655 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/loader/ModuleManager.h" nordugrid-arc-6.14.0/include/arc/loader/PaxHeaders.30264/FinderLoader.h0000644000000000000000000000013214152153376023511 xustar000000000000000030 mtime=1638455038.275644021 30 atime=1638455038.461646816 30 ctime=1638455101.486593795 nordugrid-arc-6.14.0/include/arc/loader/FinderLoader.h0000644000175000002070000000006714152153376023501 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/loader/FinderLoader.h" nordugrid-arc-6.14.0/include/arc/loader/PaxHeaders.30264/Loader.h0000644000000000000000000000013214152153376022361 xustar000000000000000030 mtime=1638455038.275644021 30 atime=1638455038.461646816 30 ctime=1638455101.487593809 nordugrid-arc-6.14.0/include/arc/loader/Loader.h0000644000175000002070000000006114152153376022343 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/loader/Loader.h" nordugrid-arc-6.14.0/include/arc/loader/PaxHeaders.30264/Plugin.h0000644000000000000000000000013114152153376022410 xustar000000000000000030 mtime=1638455038.275644021 30 atime=1638455038.461646816 29 ctime=1638455101.48959384 nordugrid-arc-6.14.0/include/arc/loader/Plugin.h0000644000175000002070000000006114152153376022373 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/loader/Plugin.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/DateTime.h0000644000000000000000000000013214152153376021401 xustar000000000000000030 mtime=1638455038.269643931 30 atime=1638455038.460646801 30 ctime=1638455101.396592442 nordugrid-arc-6.14.0/include/arc/DateTime.h0000644000175000002070000000006014152153376021362 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/DateTime.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/data-staging0000644000000000000000000000013214152153475022022 xustar000000000000000030 mtime=1638455101.460593404 30 atime=1638455103.995631494 30 ctime=1638455101.460593404 nordugrid-arc-6.14.0/include/arc/data-staging/0000755000175000002070000000000014152153475022064 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/data-staging/PaxHeaders.30264/DTRList.h0000644000000000000000000000013214152153376023535 xustar000000000000000030 mtime=1638455038.273643991 30 atime=1638455038.461646816 30 ctime=1638455101.454593314 nordugrid-arc-6.14.0/include/arc/data-staging/DTRList.h0000644000175000002070000000006414152153376023522 0ustar00mockbuildmock00000000000000#include "../../../src/libs/data-staging/DTRList.h" nordugrid-arc-6.14.0/include/arc/data-staging/PaxHeaders.30264/Processor.h0000644000000000000000000000013214152153376024227 xustar000000000000000030 mtime=1638455038.273643991 30 atime=1638455038.461646816 30 ctime=1638455101.458593374 nordugrid-arc-6.14.0/include/arc/data-staging/Processor.h0000644000175000002070000000006614152153376024216 0ustar00mockbuildmock00000000000000#include "../../../src/libs/data-staging/Processor.h" nordugrid-arc-6.14.0/include/arc/data-staging/PaxHeaders.30264/Generator.h0000644000000000000000000000013214152153376024176 xustar000000000000000030 mtime=1638455038.273643991 30 atime=1638455038.461646816 30 ctime=1638455101.458593374 nordugrid-arc-6.14.0/include/arc/data-staging/Generator.h0000644000175000002070000000006614152153376024165 0ustar00mockbuildmock00000000000000#include "../../../src/libs/data-staging/Generator.h" nordugrid-arc-6.14.0/include/arc/data-staging/PaxHeaders.30264/DataDeliveryComm.h0000644000000000000000000000013214152153376025441 xustar000000000000000030 mtime=1638455038.273643991 30 atime=1638455038.461646816 30 ctime=1638455101.457593359 nordugrid-arc-6.14.0/include/arc/data-staging/DataDeliveryComm.h0000644000175000002070000000007514152153376025430 0ustar00mockbuildmock00000000000000#include "../../../src/libs/data-staging/DataDeliveryComm.h" nordugrid-arc-6.14.0/include/arc/data-staging/PaxHeaders.30264/DataDelivery.h0000644000000000000000000000013214152153376024625 xustar000000000000000030 mtime=1638455038.273643991 30 atime=1638455038.461646816 30 ctime=1638455101.456593344 nordugrid-arc-6.14.0/include/arc/data-staging/DataDelivery.h0000644000175000002070000000007114152153376024610 0ustar00mockbuildmock00000000000000#include "../../../src/libs/data-staging/DataDelivery.h" nordugrid-arc-6.14.0/include/arc/data-staging/PaxHeaders.30264/TransferShares.h0000644000000000000000000000013214152153376025202 xustar000000000000000030 mtime=1638455038.273643991 30 atime=1638455038.461646816 30 ctime=1638455101.460593404 nordugrid-arc-6.14.0/include/arc/data-staging/TransferShares.h0000644000175000002070000000007314152153376025167 0ustar00mockbuildmock00000000000000#include "../../../src/libs/data-staging/TransferShares.h" nordugrid-arc-6.14.0/include/arc/data-staging/PaxHeaders.30264/DTRStatus.h0000644000000000000000000000013214152153376024105 xustar000000000000000030 mtime=1638455038.273643991 30 atime=1638455038.461646816 30 ctime=1638455101.455593329 nordugrid-arc-6.14.0/include/arc/data-staging/DTRStatus.h0000644000175000002070000000006614152153376024074 0ustar00mockbuildmock00000000000000#include "../../../src/libs/data-staging/DTRStatus.h" nordugrid-arc-6.14.0/include/arc/data-staging/PaxHeaders.30264/DTR.h0000644000000000000000000000013214152153376022701 xustar000000000000000030 mtime=1638455038.273643991 30 atime=1638455038.461646816 30 ctime=1638455101.453593299 nordugrid-arc-6.14.0/include/arc/data-staging/DTR.h0000644000175000002070000000006014152153376022662 0ustar00mockbuildmock00000000000000#include "../../../src/libs/data-staging/DTR.h" nordugrid-arc-6.14.0/include/arc/data-staging/PaxHeaders.30264/Scheduler.h0000644000000000000000000000013214152153376024166 xustar000000000000000030 mtime=1638455038.273643991 30 atime=1638455038.461646816 30 ctime=1638455101.459593389 nordugrid-arc-6.14.0/include/arc/data-staging/Scheduler.h0000644000175000002070000000006614152153376024155 0ustar00mockbuildmock00000000000000#include "../../../src/libs/data-staging/Scheduler.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/XMLNode.h0000644000000000000000000000013214152153376021153 xustar000000000000000030 mtime=1638455038.270643946 30 atime=1638455038.461646816 30 ctime=1638455101.415592728 nordugrid-arc-6.14.0/include/arc/XMLNode.h0000644000175000002070000000005714152153376021142 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/XMLNode.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/ArcVersion.h.in0000644000000000000000000000013214152153376022365 xustar000000000000000030 mtime=1638455038.269643931 30 atime=1638455038.460646801 30 ctime=1638455095.723507202 nordugrid-arc-6.14.0/include/arc/ArcVersion.h.in0000644000175000002070000000006214152153376022350 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/ArcVersion.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/MysqlWrapper.h0000644000000000000000000000013214152153376022353 xustar000000000000000030 mtime=1638455038.270643946 30 atime=1638455038.461646816 30 ctime=1638455101.406592592 nordugrid-arc-6.14.0/include/arc/MysqlWrapper.h0000644000175000002070000000006414152153376022340 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/MysqlWrapper.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/User.h0000644000000000000000000000013214152153376020623 xustar000000000000000030 mtime=1638455038.270643946 30 atime=1638455038.461646816 30 ctime=1638455101.412592683 nordugrid-arc-6.14.0/include/arc/User.h0000644000175000002070000000005414152153376020607 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/User.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/communication0000644000000000000000000000013214152153475022324 xustar000000000000000030 mtime=1638455101.418592773 30 atime=1638455103.995631494 30 ctime=1638455101.418592773 nordugrid-arc-6.14.0/include/arc/communication/0000755000175000002070000000000014152153475022366 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/communication/PaxHeaders.30264/ClientInterface.h0000644000000000000000000000013214152153376025611 xustar000000000000000030 mtime=1638455038.270643946 30 atime=1638455038.461646816 30 ctime=1638455101.416592743 nordugrid-arc-6.14.0/include/arc/communication/ClientInterface.h0000644000175000002070000000010114152153376025566 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/communication/ClientInterface.h" nordugrid-arc-6.14.0/include/arc/communication/PaxHeaders.30264/ClientSAML2SSO.h0000644000000000000000000000013214152153376025114 xustar000000000000000030 mtime=1638455038.271643961 30 atime=1638455038.461646816 30 ctime=1638455101.417592758 nordugrid-arc-6.14.0/include/arc/communication/ClientSAML2SSO.h0000644000175000002070000000010014152153376025070 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/communication/ClientSAML2SSO.h" nordugrid-arc-6.14.0/include/arc/communication/PaxHeaders.30264/ClientX509Delegation.h0000644000000000000000000000013214152153376026352 xustar000000000000000030 mtime=1638455038.271643961 30 atime=1638455038.461646816 30 ctime=1638455101.418592773 nordugrid-arc-6.14.0/include/arc/communication/ClientX509Delegation.h0000644000175000002070000000010614152153376026334 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/communication/ClientX509Delegation.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/credential0000644000000000000000000000013214152153475021571 xustar000000000000000030 mtime=1638455101.449593238 30 atime=1638455103.995631494 30 ctime=1638455101.449593238 nordugrid-arc-6.14.0/include/arc/credential/0000755000175000002070000000000014152153475021633 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/credential/PaxHeaders.30264/Proxycertinfo.h0000644000000000000000000000013214152153376024672 xustar000000000000000030 mtime=1638455038.272643976 30 atime=1638455038.461646816 30 ctime=1638455101.446593193 nordugrid-arc-6.14.0/include/arc/credential/Proxycertinfo.h0000644000175000002070000000007414152153376024660 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/credential/Proxycertinfo.h" nordugrid-arc-6.14.0/include/arc/credential/PaxHeaders.30264/VOMSAttribute.h0000644000000000000000000000013214152153376024467 xustar000000000000000030 mtime=1638455038.272643976 30 atime=1638455038.461646816 30 ctime=1638455101.447593209 nordugrid-arc-6.14.0/include/arc/credential/VOMSAttribute.h0000644000175000002070000000007414152153376024455 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/credential/VOMSAttribute.h" nordugrid-arc-6.14.0/include/arc/credential/PaxHeaders.30264/PasswordSource.h0000644000000000000000000000013214152153376025002 xustar000000000000000030 mtime=1638455038.272643976 30 atime=1638455038.461646816 30 ctime=1638455101.445593178 nordugrid-arc-6.14.0/include/arc/credential/PasswordSource.h0000644000175000002070000000007614152153376024772 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/credential/PasswordSource.h" nordugrid-arc-6.14.0/include/arc/credential/PaxHeaders.30264/NSSUtil.h0000644000000000000000000000013214152153376023320 xustar000000000000000030 mtime=1638455038.272643976 30 atime=1638455038.461646816 30 ctime=1638455101.444593164 nordugrid-arc-6.14.0/include/arc/credential/NSSUtil.h0000644000175000002070000000006614152153376023307 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/credential/NSSUtil.h" nordugrid-arc-6.14.0/include/arc/credential/PaxHeaders.30264/VOMSConfig.h0000644000000000000000000000013214152153376023731 xustar000000000000000030 mtime=1638455038.272643976 30 atime=1638455038.461646816 30 ctime=1638455101.448593224 nordugrid-arc-6.14.0/include/arc/credential/VOMSConfig.h0000644000175000002070000000007114152153376023714 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/credential/VOMSConfig.h" nordugrid-arc-6.14.0/include/arc/credential/PaxHeaders.30264/Credential.h0000644000000000000000000000013214152153376024071 xustar000000000000000030 mtime=1638455038.272643976 30 atime=1638455038.461646816 30 ctime=1638455101.444593164 nordugrid-arc-6.14.0/include/arc/credential/Credential.h0000644000175000002070000000007214152153376024055 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/credential/Credential.h" nordugrid-arc-6.14.0/include/arc/credential/PaxHeaders.30264/VOMSUtil.h0000644000000000000000000000013214152153376023441 xustar000000000000000030 mtime=1638455038.273643991 30 atime=1638455038.461646816 30 ctime=1638455101.449593238 nordugrid-arc-6.14.0/include/arc/credential/VOMSUtil.h0000644000175000002070000000006714152153376023431 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/credential/VOMSUtil.h" nordugrid-arc-6.14.0/include/arc/credential/PaxHeaders.30264/CertUtil.h0000644000000000000000000000013214152153376023552 xustar000000000000000030 mtime=1638455038.272643976 30 atime=1638455038.461646816 30 ctime=1638455101.443593148 nordugrid-arc-6.14.0/include/arc/credential/CertUtil.h0000644000175000002070000000006714152153376023542 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/credential/CertUtil.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/ws-security0000644000000000000000000000013214152153475021755 xustar000000000000000030 mtime=1638455101.538594576 30 atime=1638455103.995631494 30 ctime=1638455101.538594576 nordugrid-arc-6.14.0/include/arc/ws-security/0000755000175000002070000000000014152153475022017 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/ws-security/PaxHeaders.30264/SAMLToken.h0000644000000000000000000000013214152153376023740 xustar000000000000000030 mtime=1638455038.279644081 30 atime=1638455038.462646831 30 ctime=1638455101.537594561 nordugrid-arc-6.14.0/include/arc/ws-security/SAMLToken.h0000644000175000002070000000007114152153376023723 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/ws-security/SAMLToken.h" nordugrid-arc-6.14.0/include/arc/ws-security/PaxHeaders.30264/X509Token.h0000644000000000000000000000013214152153376023651 xustar000000000000000030 mtime=1638455038.279644081 30 atime=1638455038.462646831 30 ctime=1638455101.538594576 nordugrid-arc-6.14.0/include/arc/ws-security/X509Token.h0000644000175000002070000000007114152153376023634 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/ws-security/X509Token.h" nordugrid-arc-6.14.0/include/arc/ws-security/PaxHeaders.30264/UsernameToken.h0000644000000000000000000000013214152153376024763 xustar000000000000000030 mtime=1638455038.279644081 30 atime=1638455038.462646831 30 ctime=1638455101.537594561 nordugrid-arc-6.14.0/include/arc/ws-security/UsernameToken.h0000644000175000002070000000007514152153376024752 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/ws-security/UsernameToken.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/ArcConfigFile.h0000644000000000000000000000013214152153376022340 xustar000000000000000030 mtime=1638455038.269643931 30 atime=1638455038.460646801 30 ctime=1638455101.388592322 nordugrid-arc-6.14.0/include/arc/ArcConfigFile.h0000644000175000002070000000006514152153376022326 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/ArcConfigFile.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/xmlsec0000644000000000000000000000013214152153475020752 xustar000000000000000030 mtime=1638455101.544594666 30 atime=1638455103.995631494 30 ctime=1638455101.544594666 nordugrid-arc-6.14.0/include/arc/xmlsec/0000755000175000002070000000000014152153475021014 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/xmlsec/PaxHeaders.30264/XmlSecUtils.h0000644000000000000000000000013214152153376023414 xustar000000000000000030 mtime=1638455038.279644081 30 atime=1638455038.462646831 30 ctime=1638455101.543594651 nordugrid-arc-6.14.0/include/arc/xmlsec/XmlSecUtils.h0000644000175000002070000000006614152153376023403 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/xmlsec/XmlSecUtils.h" nordugrid-arc-6.14.0/include/arc/xmlsec/PaxHeaders.30264/XMLSecNode.h0000644000000000000000000000013214152153376023101 xustar000000000000000030 mtime=1638455038.279644081 30 atime=1638455038.462646831 30 ctime=1638455101.542594636 nordugrid-arc-6.14.0/include/arc/xmlsec/XMLSecNode.h0000644000175000002070000000006514152153376023067 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/xmlsec/XMLSecNode.h" nordugrid-arc-6.14.0/include/arc/xmlsec/PaxHeaders.30264/saml_util.h0000644000000000000000000000013214152153376023171 xustar000000000000000030 mtime=1638455038.279644081 30 atime=1638455038.462646831 30 ctime=1638455101.544594666 nordugrid-arc-6.14.0/include/arc/xmlsec/saml_util.h0000644000175000002070000000006414152153376023156 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/xmlsec/saml_util.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/ArcConfig.h0000644000000000000000000000013214152153376021540 xustar000000000000000030 mtime=1638455038.269643931 30 atime=1638455038.460646801 30 ctime=1638455101.387592307 nordugrid-arc-6.14.0/include/arc/ArcConfig.h0000644000175000002070000000006114152153376021522 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/ArcConfig.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/Profile.h0000644000000000000000000000013214152153376021305 xustar000000000000000030 mtime=1638455038.270643946 30 atime=1638455038.461646816 30 ctime=1638455101.407592607 nordugrid-arc-6.14.0/include/arc/Profile.h0000644000175000002070000000005714152153376021274 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/Profile.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/IniConfig.h0000644000000000000000000000013214152153376021552 xustar000000000000000030 mtime=1638455038.270643946 30 atime=1638455038.461646816 30 ctime=1638455101.402592532 nordugrid-arc-6.14.0/include/arc/IniConfig.h0000644000175000002070000000006114152153376021534 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/IniConfig.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/external0000644000000000000000000000013214152153475021301 xustar000000000000000030 mtime=1638455101.383592247 30 atime=1638455103.995631494 30 ctime=1638455101.383592247 nordugrid-arc-6.14.0/include/arc/external/0000755000175000002070000000000014152153475021343 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/external/PaxHeaders.30264/cJSON0000644000000000000000000000013214152153475022215 xustar000000000000000030 mtime=1638455101.477593659 30 atime=1638455103.995631494 30 ctime=1638455101.477593659 nordugrid-arc-6.14.0/include/arc/external/cJSON/0000755000175000002070000000000014152153475022257 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/external/cJSON/PaxHeaders.30264/cJSON.h0000644000000000000000000000013214152153376023357 xustar000000000000000030 mtime=1638455038.275644021 30 atime=1638455038.461646816 30 ctime=1638455101.477593659 nordugrid-arc-6.14.0/include/arc/external/cJSON/cJSON.h0000644000175000002070000000006214152153376023342 0ustar00mockbuildmock00000000000000#include "../../../../src/external/cJSON/cJSON.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/JSON.h0000644000000000000000000000013214152153376020456 xustar000000000000000030 mtime=1638455038.270643946 30 atime=1638455038.461646816 30 ctime=1638455101.403592547 nordugrid-arc-6.14.0/include/arc/JSON.h0000644000175000002070000000005414152153376020442 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/JSON.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/UserConfig.h0000644000000000000000000000013214152153376021751 xustar000000000000000030 mtime=1638455038.270643946 30 atime=1638455038.461646816 30 ctime=1638455101.413592697 nordugrid-arc-6.14.0/include/arc/UserConfig.h0000644000175000002070000000006214152153376021734 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/UserConfig.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/IString.h0000644000000000000000000000013214152153376021264 xustar000000000000000030 mtime=1638455038.270643946 30 atime=1638455038.461646816 30 ctime=1638455101.401592517 nordugrid-arc-6.14.0/include/arc/IString.h0000644000175000002070000000005714152153376021253 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/IString.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/Watchdog.h0000644000000000000000000000013214152153376021445 xustar000000000000000030 mtime=1638455038.270643946 30 atime=1638455038.461646816 30 ctime=1638455101.414592713 nordugrid-arc-6.14.0/include/arc/Watchdog.h0000644000175000002070000000006014152153376021426 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/Watchdog.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/wsrf0000644000000000000000000000013214152153475020440 xustar000000000000000030 mtime=1638455101.541594621 30 atime=1638455103.995631494 30 ctime=1638455101.541594621 nordugrid-arc-6.14.0/include/arc/wsrf/0000755000175000002070000000000014152153475020502 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/wsrf/PaxHeaders.30264/WSResourceProperties.h0000644000000000000000000000013214152153376025004 xustar000000000000000030 mtime=1638455038.279644081 30 atime=1638455038.462646831 30 ctime=1638455101.541594621 nordugrid-arc-6.14.0/include/arc/wsrf/WSResourceProperties.h0000644000175000002070000000007514152153376024773 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/wsrf/WSResourceProperties.h" nordugrid-arc-6.14.0/include/arc/wsrf/PaxHeaders.30264/WSRFBaseFault.h0000644000000000000000000000013214152153376023236 xustar000000000000000030 mtime=1638455038.279644081 30 atime=1638455038.462646831 30 ctime=1638455101.540594606 nordugrid-arc-6.14.0/include/arc/wsrf/WSRFBaseFault.h0000644000175000002070000000006614152153376023225 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/wsrf/WSRFBaseFault.h" nordugrid-arc-6.14.0/include/arc/wsrf/PaxHeaders.30264/WSRF.h0000644000000000000000000000013214152153376021447 xustar000000000000000030 mtime=1638455038.279644081 30 atime=1638455038.462646831 30 ctime=1638455101.539594591 nordugrid-arc-6.14.0/include/arc/wsrf/WSRF.h0000644000175000002070000000005514152153376021434 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/wsrf/WSRF.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/infosys0000644000000000000000000000013214152153475021151 xustar000000000000000030 mtime=1638455101.484593764 30 atime=1638455103.995631494 30 ctime=1638455101.484593764 nordugrid-arc-6.14.0/include/arc/infosys/0000755000175000002070000000000014152153475021213 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/infosys/PaxHeaders.30264/InformationInterface.h0000644000000000000000000000013214152153376025505 xustar000000000000000030 mtime=1638455038.275644021 30 atime=1638455038.461646816 30 ctime=1638455101.483593749 nordugrid-arc-6.14.0/include/arc/infosys/InformationInterface.h0000644000175000002070000000010014152153376025461 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/infosys/InformationInterface.h" nordugrid-arc-6.14.0/include/arc/infosys/PaxHeaders.30264/InfoCache.h0000644000000000000000000000013214152153376023216 xustar000000000000000030 mtime=1638455038.275644021 30 atime=1638455038.461646816 30 ctime=1638455101.480593704 nordugrid-arc-6.14.0/include/arc/infosys/InfoCache.h0000644000175000002070000000006514152153376023204 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/infosys/InfoCache.h" nordugrid-arc-6.14.0/include/arc/infosys/PaxHeaders.30264/InfoRegister.h0000644000000000000000000000013214152153376023777 xustar000000000000000030 mtime=1638455038.275644021 30 atime=1638455038.461646816 30 ctime=1638455101.482593734 nordugrid-arc-6.14.0/include/arc/infosys/InfoRegister.h0000644000175000002070000000007014152153376023761 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/infosys/InfoRegister.h" nordugrid-arc-6.14.0/include/arc/infosys/PaxHeaders.30264/InfoFilter.h0000644000000000000000000000013214152153376023440 xustar000000000000000030 mtime=1638455038.275644021 30 atime=1638455038.461646816 30 ctime=1638455101.481593719 nordugrid-arc-6.14.0/include/arc/infosys/InfoFilter.h0000644000175000002070000000006614152153376023427 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/infosys/InfoFilter.h" nordugrid-arc-6.14.0/include/arc/infosys/PaxHeaders.30264/RegisteredService.h0000644000000000000000000000013214152153376025015 xustar000000000000000030 mtime=1638455038.275644021 30 atime=1638455038.461646816 30 ctime=1638455101.484593764 nordugrid-arc-6.14.0/include/arc/infosys/RegisteredService.h0000644000175000002070000000007514152153376025004 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/infosys/RegisteredService.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/Thread.h0000644000000000000000000000013214152153376021114 xustar000000000000000030 mtime=1638455038.270643946 30 atime=1638455038.461646816 30 ctime=1638455101.410592652 nordugrid-arc-6.14.0/include/arc/Thread.h0000644000175000002070000000005614152153376021102 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/Thread.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/Logger.h0000644000000000000000000000013214152153376021124 xustar000000000000000030 mtime=1638455038.270643946 30 atime=1638455038.461646816 30 ctime=1638455101.405592578 nordugrid-arc-6.14.0/include/arc/Logger.h0000644000175000002070000000005614152153376021112 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/Logger.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/credentialstore0000644000000000000000000000013214152153475022646 xustar000000000000000030 mtime=1638455101.451593269 30 atime=1638455103.995631494 30 ctime=1638455101.451593269 nordugrid-arc-6.14.0/include/arc/credentialstore/0000755000175000002070000000000014152153475022710 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/credentialstore/PaxHeaders.30264/ClientVOMS.h0000644000000000000000000000013214152153376025017 xustar000000000000000030 mtime=1638455038.273643991 30 atime=1638455038.461646816 30 ctime=1638455101.450593254 nordugrid-arc-6.14.0/include/arc/credentialstore/ClientVOMS.h0000644000175000002070000000007614152153376025007 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/credentialstore/ClientVOMS.h" nordugrid-arc-6.14.0/include/arc/credentialstore/PaxHeaders.30264/CredentialStore.h0000644000000000000000000000013214152153376026163 xustar000000000000000030 mtime=1638455038.273643991 30 atime=1638455038.461646816 30 ctime=1638455101.451593269 nordugrid-arc-6.14.0/include/arc/credentialstore/CredentialStore.h0000644000175000002070000000010414152153376026143 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/credentialstore/CredentialStore.h" nordugrid-arc-6.14.0/include/arc/credentialstore/PaxHeaders.30264/ClientVOMSRESTful.h0000644000000000000000000000013214152153376026224 xustar000000000000000030 mtime=1638455038.273643991 30 atime=1638455038.461646816 30 ctime=1638455101.451593269 nordugrid-arc-6.14.0/include/arc/credentialstore/ClientVOMSRESTful.h0000644000175000002070000000010514152153376026205 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/credentialstore/ClientVOMSRESTful.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/security0000644000000000000000000000013214152153475021326 xustar000000000000000030 mtime=1638455101.535594531 30 atime=1638455103.995631494 30 ctime=1638455101.535594531 nordugrid-arc-6.14.0/include/arc/security/0000755000175000002070000000000014152153475021370 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/security/PaxHeaders.30264/PDP.h0000644000000000000000000000013214152153376022177 xustar000000000000000030 mtime=1638455038.279644081 30 atime=1638455038.462646831 30 ctime=1638455101.534594516 nordugrid-arc-6.14.0/include/arc/security/PDP.h0000644000175000002070000000006014152153376022160 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/security/PDP.h" nordugrid-arc-6.14.0/include/arc/security/PaxHeaders.30264/ArcPDP0000644000000000000000000000012614152153475022402 xustar000000000000000028 mtime=1638455101.5135942 30 atime=1638455103.995631494 28 ctime=1638455101.5135942 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/0000755000175000002070000000000014152153475022441 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/security/ArcPDP/PaxHeaders.30264/alg0000644000000000000000000000013214152153475023142 xustar000000000000000030 mtime=1638455101.518594275 30 atime=1638455103.995631494 30 ctime=1638455101.518594275 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/alg/0000755000175000002070000000000014152153475023204 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/security/ArcPDP/alg/PaxHeaders.30264/AlgFactory.h0000644000000000000000000000013214152153376025423 xustar000000000000000030 mtime=1638455038.277644051 30 atime=1638455038.462646831 30 ctime=1638455101.514594215 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/alg/AlgFactory.h0000644000175000002070000000011014152153376025400 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/alg/AlgFactory.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/alg/PaxHeaders.30264/PermitOverridesAlg.h0000644000000000000000000000013214152153376027137 xustar000000000000000030 mtime=1638455038.277644051 30 atime=1638455038.462646831 30 ctime=1638455101.518594275 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/alg/PermitOverridesAlg.h0000644000175000002070000000012014152153376027115 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/alg/PermitOverridesAlg.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/alg/PaxHeaders.30264/DenyOverridesAlg.h0000644000000000000000000000013214152153376026576 xustar000000000000000030 mtime=1638455038.277644051 30 atime=1638455038.462646831 30 ctime=1638455101.516594245 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/alg/DenyOverridesAlg.h0000644000175000002070000000011614152153376026561 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/alg/DenyOverridesAlg.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/alg/PaxHeaders.30264/OrderedAlg.h0000644000000000000000000000013114152153376025377 xustar000000000000000030 mtime=1638455038.277644051 30 atime=1638455038.462646831 29 ctime=1638455101.51759426 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/alg/OrderedAlg.h0000644000175000002070000000011014152153376025355 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/alg/OrderedAlg.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/alg/PaxHeaders.30264/CombiningAlg.h0000644000000000000000000000013114152153376025720 xustar000000000000000030 mtime=1638455038.277644051 30 atime=1638455038.462646831 29 ctime=1638455101.51559423 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/alg/CombiningAlg.h0000644000175000002070000000011214152153376025700 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/alg/CombiningAlg.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/PaxHeaders.30264/EvaluationCtx.h0000644000000000000000000000013114152153376025412 xustar000000000000000030 mtime=1638455038.277644051 30 atime=1638455038.461646816 29 ctime=1638455101.50559408 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/EvaluationCtx.h0000644000175000002070000000010414152153376025373 0ustar00mockbuildmock00000000000000#include "../../../../src/hed/libs/security/ArcPDP/EvaluationCtx.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/PaxHeaders.30264/RequestItem.h0000644000000000000000000000013114152153376025073 xustar000000000000000030 mtime=1638455038.277644051 30 atime=1638455038.462646831 29 ctime=1638455101.51159417 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/RequestItem.h0000644000175000002070000000010214152153376025052 0ustar00mockbuildmock00000000000000#include "../../../../src/hed/libs/security/ArcPDP/RequestItem.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/PaxHeaders.30264/EvaluatorLoader.h0000644000000000000000000000013114152153376025715 xustar000000000000000030 mtime=1638455038.277644051 30 atime=1638455038.461646816 29 ctime=1638455101.50759411 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/EvaluatorLoader.h0000644000175000002070000000010614152153376025700 0ustar00mockbuildmock00000000000000#include "../../../../src/hed/libs/security/ArcPDP/EvaluatorLoader.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/PaxHeaders.30264/Evaluator.h0000644000000000000000000000013214152153376024567 xustar000000000000000030 mtime=1638455038.277644051 30 atime=1638455038.461646816 30 ctime=1638455101.506594095 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/Evaluator.h0000644000175000002070000000010014152153376024543 0ustar00mockbuildmock00000000000000#include "../../../../src/hed/libs/security/ArcPDP/Evaluator.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/PaxHeaders.30264/Source.h0000644000000000000000000000013214152153376024065 xustar000000000000000030 mtime=1638455038.277644051 30 atime=1638455038.462646831 30 ctime=1638455101.514594215 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/Source.h0000644000175000002070000000007514152153376024054 0ustar00mockbuildmock00000000000000#include "../../../../src/hed/libs/security/ArcPDP/Source.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/PaxHeaders.30264/fn0000644000000000000000000000013214152153475023002 xustar000000000000000030 mtime=1638455101.531594471 30 atime=1638455103.995631494 30 ctime=1638455101.531594471 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/fn/0000755000175000002070000000000014152153475023044 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/security/ArcPDP/fn/PaxHeaders.30264/FnFactory.h0000644000000000000000000000013114152153376025122 xustar000000000000000030 mtime=1638455038.278644066 30 atime=1638455038.462646831 29 ctime=1638455101.52959444 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/fn/FnFactory.h0000644000175000002070000000010614152153376025105 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/fn/FnFactory.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/fn/PaxHeaders.30264/EqualFunction.h0000644000000000000000000000013214152153376026005 xustar000000000000000030 mtime=1638455038.278644066 30 atime=1638455038.462646831 30 ctime=1638455101.528594426 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/fn/EqualFunction.h0000644000175000002070000000011214152153376025764 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/fn/EqualFunction.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/fn/PaxHeaders.30264/MatchFunction.h0000644000000000000000000000013214152153376025772 xustar000000000000000030 mtime=1638455038.278644066 30 atime=1638455038.462646831 30 ctime=1638455101.531594471 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/fn/MatchFunction.h0000644000175000002070000000011214152153376025751 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/fn/MatchFunction.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/fn/PaxHeaders.30264/Function.h0000644000000000000000000000013214152153376025015 xustar000000000000000030 mtime=1638455038.278644066 30 atime=1638455038.462646831 30 ctime=1638455101.530594456 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/fn/Function.h0000644000175000002070000000010514152153376024776 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/fn/Function.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/fn/PaxHeaders.30264/InRangeFunction.h0000644000000000000000000000013214152153376026261 xustar000000000000000030 mtime=1638455038.278644066 30 atime=1638455038.462646831 30 ctime=1638455101.530594456 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/fn/InRangeFunction.h0000644000175000002070000000011414152153376026242 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/fn/InRangeFunction.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/PaxHeaders.30264/Request.h0000644000000000000000000000013214152153376024255 xustar000000000000000030 mtime=1638455038.277644051 30 atime=1638455038.461646816 30 ctime=1638455101.510594155 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/Request.h0000644000175000002070000000007614152153376024245 0ustar00mockbuildmock00000000000000#include "../../../../src/hed/libs/security/ArcPDP/Request.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/PaxHeaders.30264/Result.h0000644000000000000000000000013014152153376024101 xustar000000000000000030 mtime=1638455038.277644051 30 atime=1638455038.462646831 28 ctime=1638455101.5135942 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/Result.h0000644000175000002070000000007514152153376024072 0ustar00mockbuildmock00000000000000#include "../../../../src/hed/libs/security/ArcPDP/Result.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/PaxHeaders.30264/Response.h0000644000000000000000000000013214152153376024423 xustar000000000000000030 mtime=1638455038.277644051 30 atime=1638455038.462646831 30 ctime=1638455101.512594185 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/Response.h0000644000175000002070000000007714152153376024414 0ustar00mockbuildmock00000000000000#include "../../../../src/hed/libs/security/ArcPDP/Response.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/PaxHeaders.30264/policy0000644000000000000000000000013214152153475023676 xustar000000000000000030 mtime=1638455101.532594486 30 atime=1638455103.995631494 30 ctime=1638455101.532594486 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/policy/0000755000175000002070000000000014152153475023740 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/security/ArcPDP/policy/PaxHeaders.30264/Policy.h0000644000000000000000000000013214152153376025363 xustar000000000000000030 mtime=1638455038.278644066 30 atime=1638455038.462646831 30 ctime=1638455101.532594486 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/policy/Policy.h0000644000175000002070000000010714152153376025346 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/policy/Policy.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/PaxHeaders.30264/PolicyParser.h0000644000000000000000000000013214152153376025241 xustar000000000000000030 mtime=1638455038.277644051 30 atime=1638455038.461646816 30 ctime=1638455101.508594125 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/PolicyParser.h0000644000175000002070000000010314152153376025220 0ustar00mockbuildmock00000000000000#include "../../../../src/hed/libs/security/ArcPDP/PolicyParser.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/PaxHeaders.30264/PolicyStore.h0000644000000000000000000000013114152153376025100 xustar000000000000000030 mtime=1638455038.277644051 30 atime=1638455038.461646816 29 ctime=1638455101.50959414 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/PolicyStore.h0000644000175000002070000000010214152153376025057 0ustar00mockbuildmock00000000000000#include "../../../../src/hed/libs/security/ArcPDP/PolicyStore.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/PaxHeaders.30264/attr0000644000000000000000000000013214152153475023351 xustar000000000000000030 mtime=1638455101.527594411 30 atime=1638455103.995631494 30 ctime=1638455101.527594411 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/0000755000175000002070000000000014152153475023413 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/PaxHeaders.30264/AttributeFactory.h0000644000000000000000000000013214152153376027072 xustar000000000000000030 mtime=1638455038.278644066 30 atime=1638455038.462646831 30 ctime=1638455101.520594305 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/AttributeFactory.h0000644000175000002070000000011714152153376027056 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/attr/AttributeFactory.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/PaxHeaders.30264/AnyURIAttribute.h0000644000000000000000000000013114152153376026571 xustar000000000000000030 mtime=1638455038.278644066 30 atime=1638455038.462646831 29 ctime=1638455101.51959429 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/AnyURIAttribute.h0000644000175000002070000000011614152153376026555 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/attr/AnyURIAttribute.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/PaxHeaders.30264/RequestAttribute.h0000644000000000000000000000013214152153376027113 xustar000000000000000030 mtime=1638455038.278644066 30 atime=1638455038.462646831 30 ctime=1638455101.525594381 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/RequestAttribute.h0000644000175000002070000000011714152153376027077 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/attr/RequestAttribute.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/PaxHeaders.30264/DateTimeAttribute.h0000644000000000000000000000013214152153376027157 xustar000000000000000030 mtime=1638455038.278644066 30 atime=1638455038.462646831 30 ctime=1638455101.524594366 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/DateTimeAttribute.h0000644000175000002070000000012014152153376027135 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/attr/DateTimeAttribute.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/PaxHeaders.30264/StringAttribute.h0000644000000000000000000000013214152153376026731 xustar000000000000000030 mtime=1638455038.278644066 30 atime=1638455038.462646831 30 ctime=1638455101.526594395 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/StringAttribute.h0000644000175000002070000000011614152153376026714 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/attr/StringAttribute.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/PaxHeaders.30264/GenericAttribute.h0000644000000000000000000000013214152153376027037 xustar000000000000000030 mtime=1638455038.278644066 30 atime=1638455038.462646831 30 ctime=1638455101.524594366 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/GenericAttribute.h0000644000175000002070000000011714152153376027023 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/attr/GenericAttribute.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/PaxHeaders.30264/AttributeProxy.h0000644000000000000000000000013114152153376026603 xustar000000000000000030 mtime=1638455038.278644066 30 atime=1638455038.462646831 29 ctime=1638455101.52159432 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/AttributeProxy.h0000644000175000002070000000011514152153376026566 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/attr/AttributeProxy.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/PaxHeaders.30264/X500NameAttribute.h0000644000000000000000000000013214152153376026720 xustar000000000000000030 mtime=1638455038.278644066 30 atime=1638455038.462646831 30 ctime=1638455101.527594411 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/X500NameAttribute.h0000644000175000002070000000012014152153376026676 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/attr/X500NameAttribute.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/PaxHeaders.30264/AttributeValue.h0000644000000000000000000000013214152153376026537 xustar000000000000000030 mtime=1638455038.278644066 30 atime=1638455038.462646831 30 ctime=1638455101.522594335 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/AttributeValue.h0000644000175000002070000000011514152153376026521 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/attr/AttributeValue.h" nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/PaxHeaders.30264/BooleanAttribute.h0000644000000000000000000000013114152153376027041 xustar000000000000000030 mtime=1638455038.278644066 30 atime=1638455038.462646831 29 ctime=1638455101.52359435 nordugrid-arc-6.14.0/include/arc/security/ArcPDP/attr/BooleanAttribute.h0000644000175000002070000000011714152153376027026 0ustar00mockbuildmock00000000000000#include "../../../../../src/hed/libs/security/ArcPDP/attr/BooleanAttribute.h" nordugrid-arc-6.14.0/include/arc/security/PaxHeaders.30264/ClassLoader.h0000644000000000000000000000013214152153376023750 xustar000000000000000030 mtime=1638455038.278644066 30 atime=1638455038.462646831 30 ctime=1638455101.533594501 nordugrid-arc-6.14.0/include/arc/security/ClassLoader.h0000644000175000002070000000007014152153376023732 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/security/ClassLoader.h" nordugrid-arc-6.14.0/include/arc/security/PaxHeaders.30264/Security.h0000644000000000000000000000013214152153376023363 xustar000000000000000030 mtime=1638455038.279644081 30 atime=1638455038.462646831 30 ctime=1638455101.535594531 nordugrid-arc-6.14.0/include/arc/security/Security.h0000644000175000002070000000006514152153376023351 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/security/Security.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/GUID.h0000644000000000000000000000013214152153376020435 xustar000000000000000030 mtime=1638455038.269643931 30 atime=1638455038.460646801 30 ctime=1638455101.399592487 nordugrid-arc-6.14.0/include/arc/GUID.h0000644000175000002070000000005414152153376020421 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/GUID.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/FileLock.h0000644000000000000000000000013214152153376021375 xustar000000000000000030 mtime=1638455038.269643931 30 atime=1638455038.460646801 30 ctime=1638455101.397592457 nordugrid-arc-6.14.0/include/arc/FileLock.h0000644000175000002070000000006014152153376021356 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/FileLock.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/DBInterface.h0000644000000000000000000000013214152153376022013 xustar000000000000000030 mtime=1638455038.269643931 30 atime=1638455038.460646801 30 ctime=1638455101.395592427 nordugrid-arc-6.14.0/include/arc/DBInterface.h0000644000175000002070000000006314152153376021777 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/DBInterface.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/Run.h0000644000000000000000000000013214152153376020451 xustar000000000000000030 mtime=1638455038.270643946 30 atime=1638455038.461646816 30 ctime=1638455101.408592623 nordugrid-arc-6.14.0/include/arc/Run.h0000644000175000002070000000005314152153376020434 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/Run.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/Base64.h0000644000000000000000000000013214152153376020731 xustar000000000000000030 mtime=1638455038.269643931 30 atime=1638455038.460646801 30 ctime=1638455101.392592382 nordugrid-arc-6.14.0/include/arc/Base64.h0000644000175000002070000000005614152153376020717 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/Base64.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/OptionParser.h0000644000000000000000000000013214152153376022332 xustar000000000000000030 mtime=1638455038.270643946 30 atime=1638455038.461646816 30 ctime=1638455101.407592607 nordugrid-arc-6.14.0/include/arc/OptionParser.h0000644000175000002070000000006414152153376022317 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/OptionParser.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/ArcRegex.h0000644000000000000000000000013214152153376021405 xustar000000000000000030 mtime=1638455038.269643931 30 atime=1638455038.460646801 30 ctime=1638455101.391592367 nordugrid-arc-6.14.0/include/arc/ArcRegex.h0000644000175000002070000000006014152153376021366 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/ArcRegex.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/FileUtils.h0000644000000000000000000000013214152153376021605 xustar000000000000000030 mtime=1638455038.269643931 30 atime=1638455038.460646801 30 ctime=1638455101.398592472 nordugrid-arc-6.14.0/include/arc/FileUtils.h0000644000175000002070000000006114152153376021567 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/FileUtils.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/crypto0000644000000000000000000000013214152153475020777 xustar000000000000000030 mtime=1638455101.452593283 30 atime=1638455103.995631494 30 ctime=1638455101.452593283 nordugrid-arc-6.14.0/include/arc/crypto/0000755000175000002070000000000014152153475021041 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/crypto/PaxHeaders.30264/OpenSSL.h0000644000000000000000000000013214152153376022510 xustar000000000000000030 mtime=1638455038.273643991 30 atime=1638455038.461646816 30 ctime=1638455101.452593283 nordugrid-arc-6.14.0/include/arc/crypto/OpenSSL.h0000644000175000002070000000006214152153376022473 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/crypto/OpenSSL.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/HostnameResolver.h0000644000000000000000000000013214152153376023205 xustar000000000000000030 mtime=1638455038.269643931 30 atime=1638455038.460646801 30 ctime=1638455101.400592502 nordugrid-arc-6.14.0/include/arc/HostnameResolver.h0000644000175000002070000000007014152153376023167 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/HostnameResolver.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/IntraProcessCounter.h0000644000000000000000000000013214152153376023661 xustar000000000000000030 mtime=1638455038.270643946 30 atime=1638455038.461646816 30 ctime=1638455101.402592532 nordugrid-arc-6.14.0/include/arc/IntraProcessCounter.h0000644000175000002070000000007314152153376023646 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/IntraProcessCounter.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/FileAccess.h0000644000000000000000000000013214152153376021706 xustar000000000000000030 mtime=1638455038.269643931 30 atime=1638455038.460646801 30 ctime=1638455101.396592442 nordugrid-arc-6.14.0/include/arc/FileAccess.h0000644000175000002070000000006214152153376021671 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/FileAccess.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/CheckSum.h0000644000000000000000000000013214152153376021407 xustar000000000000000030 mtime=1638455038.269643931 30 atime=1638455038.460646801 30 ctime=1638455101.393592397 nordugrid-arc-6.14.0/include/arc/CheckSum.h0000644000175000002070000000006014152153376021370 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/CheckSum.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/Utils.h0000644000000000000000000000013214152153376021005 xustar000000000000000030 mtime=1638455038.270643946 30 atime=1638455038.461646816 30 ctime=1638455101.413592697 nordugrid-arc-6.14.0/include/arc/Utils.h0000644000175000002070000000005514152153376020772 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/Utils.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/globusutils0000644000000000000000000000013214152153475022033 xustar000000000000000030 mtime=1638455101.479593689 30 atime=1638455103.995631494 30 ctime=1638455101.479593689 nordugrid-arc-6.14.0/include/arc/globusutils/0000755000175000002070000000000014152153475022075 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/globusutils/PaxHeaders.30264/GSSCredential.h0000644000000000000000000000013214152153376024710 xustar000000000000000030 mtime=1638455038.275644021 30 atime=1638455038.461646816 30 ctime=1638455101.478593674 nordugrid-arc-6.14.0/include/arc/globusutils/GSSCredential.h0000644000175000002070000000007514152153376024677 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/globusutils/GSSCredential.h" nordugrid-arc-6.14.0/include/arc/globusutils/PaxHeaders.30264/GlobusErrorUtils.h0000644000000000000000000000013214152153376025547 xustar000000000000000030 mtime=1638455038.275644021 30 atime=1638455038.461646816 30 ctime=1638455101.479593689 nordugrid-arc-6.14.0/include/arc/globusutils/GlobusErrorUtils.h0000644000175000002070000000010014152153376025523 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/globusutils/GlobusErrorUtils.h" nordugrid-arc-6.14.0/include/arc/globusutils/PaxHeaders.30264/GlobusWorkarounds.h0000644000000000000000000000013214152153376025753 xustar000000000000000030 mtime=1638455038.275644021 30 atime=1638455038.461646816 30 ctime=1638455101.479593689 nordugrid-arc-6.14.0/include/arc/globusutils/GlobusWorkarounds.h0000644000175000002070000000010114152153376025730 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/globusutils/GlobusWorkarounds.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/message0000644000000000000000000000013214152153475021103 xustar000000000000000030 mtime=1638455101.502594035 30 atime=1638455103.995631494 30 ctime=1638455101.502594035 nordugrid-arc-6.14.0/include/arc/message/0000755000175000002070000000000014152153475021145 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/message/PaxHeaders.30264/MCC_Status.h0000644000000000000000000000013214152153376023276 xustar000000000000000030 mtime=1638455038.276644036 30 atime=1638455038.461646816 30 ctime=1638455101.492593885 nordugrid-arc-6.14.0/include/arc/message/MCC_Status.h0000644000175000002070000000006614152153376023265 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/message/MCC_Status.h" nordugrid-arc-6.14.0/include/arc/message/PaxHeaders.30264/PayloadStream.h0000644000000000000000000000013114152153376024075 xustar000000000000000030 mtime=1638455038.276644036 30 atime=1638455038.461646816 29 ctime=1638455101.49759396 nordugrid-arc-6.14.0/include/arc/message/PayloadStream.h0000644000175000002070000000007114152153376024061 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/message/PayloadStream.h" nordugrid-arc-6.14.0/include/arc/message/PaxHeaders.30264/PayloadSOAP.h0000644000000000000000000000013214152153376023405 xustar000000000000000030 mtime=1638455038.276644036 30 atime=1638455038.461646816 30 ctime=1638455101.496593945 nordugrid-arc-6.14.0/include/arc/message/PayloadSOAP.h0000644000175000002070000000006714152153376023375 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/message/PayloadSOAP.h" nordugrid-arc-6.14.0/include/arc/message/PaxHeaders.30264/SecHandler.h0000644000000000000000000000013114152153376023340 xustar000000000000000030 mtime=1638455038.276644036 30 atime=1638455038.461646816 29 ctime=1638455101.50159402 nordugrid-arc-6.14.0/include/arc/message/SecHandler.h0000644000175000002070000000006614152153376023330 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/message/SecHandler.h" nordugrid-arc-6.14.0/include/arc/message/PaxHeaders.30264/Plexer.h0000644000000000000000000000013214152153376022570 xustar000000000000000030 mtime=1638455038.276644036 30 atime=1638455038.461646816 30 ctime=1638455101.498593975 nordugrid-arc-6.14.0/include/arc/message/Plexer.h0000644000175000002070000000006214152153376022553 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/message/Plexer.h" nordugrid-arc-6.14.0/include/arc/message/PaxHeaders.30264/MessageAuth.h0000644000000000000000000000013214152153376023537 xustar000000000000000030 mtime=1638455038.276644036 30 atime=1638455038.461646816 30 ctime=1638455101.494593915 nordugrid-arc-6.14.0/include/arc/message/MessageAuth.h0000644000175000002070000000006714152153376023527 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/message/MessageAuth.h" nordugrid-arc-6.14.0/include/arc/message/PaxHeaders.30264/SecAttr.h0000644000000000000000000000013114152153376022675 xustar000000000000000030 mtime=1638455038.276644036 30 atime=1638455038.461646816 29 ctime=1638455101.50159402 nordugrid-arc-6.14.0/include/arc/message/SecAttr.h0000644000175000002070000000006314152153376022662 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/message/SecAttr.h" nordugrid-arc-6.14.0/include/arc/message/PaxHeaders.30264/Message.h0000644000000000000000000000013014152153376022713 xustar000000000000000030 mtime=1638455038.276644036 30 atime=1638455038.461646816 28 ctime=1638455101.4935939 nordugrid-arc-6.14.0/include/arc/message/Message.h0000644000175000002070000000006314152153376022701 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/message/Message.h" nordugrid-arc-6.14.0/include/arc/message/PaxHeaders.30264/MCCLoader.h0000644000000000000000000000013114152153376023061 xustar000000000000000030 mtime=1638455038.276644036 30 atime=1638455038.461646816 29 ctime=1638455101.49159387 nordugrid-arc-6.14.0/include/arc/message/MCCLoader.h0000644000175000002070000000006514152153376023050 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/message/MCCLoader.h" nordugrid-arc-6.14.0/include/arc/message/PaxHeaders.30264/SOAPMessage.h0000644000000000000000000000013214152153376023400 xustar000000000000000030 mtime=1638455038.276644036 30 atime=1638455038.461646816 30 ctime=1638455101.500594005 nordugrid-arc-6.14.0/include/arc/message/SOAPMessage.h0000644000175000002070000000006714152153376023370 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/message/SOAPMessage.h" nordugrid-arc-6.14.0/include/arc/message/PaxHeaders.30264/MessageAttributes.h0000644000000000000000000000013014152153376024762 xustar000000000000000030 mtime=1638455038.276644036 30 atime=1638455038.461646816 28 ctime=1638455101.4935939 nordugrid-arc-6.14.0/include/arc/message/MessageAttributes.h0000644000175000002070000000007514152153376024753 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/message/MessageAttributes.h" nordugrid-arc-6.14.0/include/arc/message/PaxHeaders.30264/MCC.h0000644000000000000000000000013214152153376021733 xustar000000000000000030 mtime=1638455038.276644036 30 atime=1638455038.461646816 30 ctime=1638455101.490593854 nordugrid-arc-6.14.0/include/arc/message/MCC.h0000644000175000002070000000005714152153376021722 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/message/MCC.h" nordugrid-arc-6.14.0/include/arc/message/PaxHeaders.30264/PayloadRaw.h0000644000000000000000000000013114152153376023373 xustar000000000000000030 mtime=1638455038.276644036 30 atime=1638455038.461646816 29 ctime=1638455101.49559393 nordugrid-arc-6.14.0/include/arc/message/PayloadRaw.h0000644000175000002070000000006614152153376023363 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/message/PayloadRaw.h" nordugrid-arc-6.14.0/include/arc/message/PaxHeaders.30264/SOAPEnvelope.h0000644000000000000000000000013114152153376023570 xustar000000000000000030 mtime=1638455038.276644036 30 atime=1638455038.461646816 29 ctime=1638455101.49959399 nordugrid-arc-6.14.0/include/arc/message/SOAPEnvelope.h0000644000175000002070000000007014152153376023553 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/message/SOAPEnvelope.h" nordugrid-arc-6.14.0/include/arc/message/PaxHeaders.30264/Service.h0000644000000000000000000000013214152153376022731 xustar000000000000000030 mtime=1638455038.276644036 30 atime=1638455038.461646816 30 ctime=1638455101.502594035 nordugrid-arc-6.14.0/include/arc/message/Service.h0000644000175000002070000000006314152153376022715 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/message/Service.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/delegation0000644000000000000000000000013214152153475021572 xustar000000000000000030 mtime=1638455101.476593644 30 atime=1638455103.996631509 30 ctime=1638455101.476593644 nordugrid-arc-6.14.0/include/arc/delegation/0000755000175000002070000000000014152153475021634 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/delegation/PaxHeaders.30264/DelegationInterface.h0000644000000000000000000000013214152153376025714 xustar000000000000000030 mtime=1638455038.274644006 30 atime=1638455038.461646816 30 ctime=1638455101.476593644 nordugrid-arc-6.14.0/include/arc/delegation/DelegationInterface.h0000644000175000002070000000010214152153376025672 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/delegation/DelegationInterface.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/ws-addressing0000644000000000000000000000013214152153475022231 xustar000000000000000030 mtime=1638455101.536594546 30 atime=1638455103.996631509 30 ctime=1638455101.536594546 nordugrid-arc-6.14.0/include/arc/ws-addressing/0000755000175000002070000000000014152153475022273 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/ws-addressing/PaxHeaders.30264/WSA.h0000644000000000000000000000013214152153376023111 xustar000000000000000030 mtime=1638455038.279644081 30 atime=1638455038.462646831 30 ctime=1638455101.536594546 nordugrid-arc-6.14.0/include/arc/ws-addressing/WSA.h0000644000175000002070000000006514152153376023077 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/ws-addressing/WSA.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/URL.h0000644000000000000000000000013214152153376020347 xustar000000000000000030 mtime=1638455038.270643946 30 atime=1638455038.461646816 30 ctime=1638455101.411592668 nordugrid-arc-6.14.0/include/arc/URL.h0000644000175000002070000000005314152153376020332 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/URL.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/otokens0000644000000000000000000000013214152153475021141 xustar000000000000000030 mtime=1638455101.504594065 30 atime=1638455103.996631509 30 ctime=1638455101.504594065 nordugrid-arc-6.14.0/include/arc/otokens/0000755000175000002070000000000014152153475021203 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/otokens/PaxHeaders.30264/otokens.h0000644000000000000000000000013214152153376023051 xustar000000000000000030 mtime=1638455038.276644036 30 atime=1638455038.461646816 30 ctime=1638455101.504594065 nordugrid-arc-6.14.0/include/arc/otokens/otokens.h0000644000175000002070000000006314152153376023035 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/otokens/otokens.h" nordugrid-arc-6.14.0/include/arc/otokens/PaxHeaders.30264/openid_metadata.h0000644000000000000000000000013114152153376024504 xustar000000000000000030 mtime=1638455038.276644036 30 atime=1638455038.461646816 29 ctime=1638455101.50359405 nordugrid-arc-6.14.0/include/arc/otokens/openid_metadata.h0000644000175000002070000000007314152153376024472 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/otokens/openid_metadata.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/compute0000644000000000000000000000013214152153475021133 xustar000000000000000030 mtime=1638455101.442593133 30 atime=1638455103.996631509 30 ctime=1638455101.442593133 nordugrid-arc-6.14.0/include/arc/compute/0000755000175000002070000000000014152153475021175 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/SubmissionStatus.h0000644000000000000000000000013214152153376024720 xustar000000000000000030 mtime=1638455038.272643976 30 atime=1638455038.461646816 30 ctime=1638455101.438593073 nordugrid-arc-6.14.0/include/arc/compute/SubmissionStatus.h0000644000175000002070000000007414152153376024706 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/SubmissionStatus.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/JobControllerPlugin.h0000644000000000000000000000013214152153376025316 xustar000000000000000030 mtime=1638455038.271643961 30 atime=1638455038.461646816 30 ctime=1638455101.429592938 nordugrid-arc-6.14.0/include/arc/compute/JobControllerPlugin.h0000644000175000002070000000007714152153376025307 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/JobControllerPlugin.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/RSLParser.h0000644000000000000000000000013214152153376023176 xustar000000000000000030 mtime=1638455038.272643976 30 atime=1638455038.461646816 30 ctime=1638455101.437593058 nordugrid-arc-6.14.0/include/arc/compute/RSLParser.h0000644000175000002070000000006514152153376023164 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/RSLParser.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/GLUE2.h0000644000000000000000000000013214152153376022177 xustar000000000000000030 mtime=1638455038.271643961 30 atime=1638455038.461646816 30 ctime=1638455101.426592893 nordugrid-arc-6.14.0/include/arc/compute/GLUE2.h0000644000175000002070000000006114152153376022161 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/GLUE2.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/JobDescriptionParserPlugin.h0000644000000000000000000000013214152153376026633 xustar000000000000000030 mtime=1638455038.271643961 30 atime=1638455038.461646816 30 ctime=1638455101.430592953 nordugrid-arc-6.14.0/include/arc/compute/JobDescriptionParserPlugin.h0000644000175000002070000000010614152153376026615 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/JobDescriptionParserPlugin.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/JobInformationStorage.h0000644000000000000000000000013214152153376025626 xustar000000000000000030 mtime=1638455038.271643961 30 atime=1638455038.461646816 30 ctime=1638455101.431592968 nordugrid-arc-6.14.0/include/arc/compute/JobInformationStorage.h0000644000175000002070000000010114152153376025603 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/JobInformationStorage.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/GLUE2Entity.h0000644000000000000000000000013214152153376023374 xustar000000000000000030 mtime=1638455038.271643961 30 atime=1638455038.461646816 30 ctime=1638455101.427592908 nordugrid-arc-6.14.0/include/arc/compute/GLUE2Entity.h0000644000175000002070000000006714152153376023364 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/GLUE2Entity.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/JobDescription.h0000644000000000000000000000013214152153376024277 xustar000000000000000030 mtime=1638455038.271643961 30 atime=1638455038.461646816 30 ctime=1638455101.430592953 nordugrid-arc-6.14.0/include/arc/compute/JobDescription.h0000644000175000002070000000007214152153376024263 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/JobDescription.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/ComputingServiceRetriever.h0000644000000000000000000000013214152153376026537 xustar000000000000000030 mtime=1638455038.271643961 30 atime=1638455038.461646816 30 ctime=1638455101.420592803 nordugrid-arc-6.14.0/include/arc/compute/ComputingServiceRetriever.h0000644000175000002070000000010514152153376026520 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/ComputingServiceRetriever.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/EndpointQueryingStatus.h0000644000000000000000000000013214152153376026071 xustar000000000000000030 mtime=1638455038.271643961 30 atime=1638455038.461646816 30 ctime=1638455101.422592833 nordugrid-arc-6.14.0/include/arc/compute/EndpointQueryingStatus.h0000644000175000002070000000010214152153376026047 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/EndpointQueryingStatus.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/Broker.h0000644000000000000000000000013214152153376022605 xustar000000000000000030 mtime=1638455038.271643961 30 atime=1638455038.461646816 30 ctime=1638455101.419592788 nordugrid-arc-6.14.0/include/arc/compute/Broker.h0000644000175000002070000000006214152153376022570 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/Broker.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/SubmitterPlugin.h0000644000000000000000000000013214152153376024516 xustar000000000000000030 mtime=1638455038.272643976 30 atime=1638455038.461646816 30 ctime=1638455101.440593103 nordugrid-arc-6.14.0/include/arc/compute/SubmitterPlugin.h0000644000175000002070000000007314152153376024503 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/SubmitterPlugin.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/JobInformationStorageXML.h0000644000000000000000000000013214152153376026207 xustar000000000000000030 mtime=1638455038.272643976 30 atime=1638455038.461646816 30 ctime=1638455101.434593013 nordugrid-arc-6.14.0/include/arc/compute/JobInformationStorageXML.h0000644000175000002070000000010414152153376026167 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/JobInformationStorageXML.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/ExecutionTarget.h0000644000000000000000000000013214152153376024473 xustar000000000000000030 mtime=1638455038.271643961 30 atime=1638455038.461646816 30 ctime=1638455101.425592878 nordugrid-arc-6.14.0/include/arc/compute/ExecutionTarget.h0000644000175000002070000000007314152153376024460 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/ExecutionTarget.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/JobInformationStorageBDB.h0000644000000000000000000000013214152153376026136 xustar000000000000000030 mtime=1638455038.271643961 30 atime=1638455038.461646816 30 ctime=1638455101.432592983 nordugrid-arc-6.14.0/include/arc/compute/JobInformationStorageBDB.h0000644000175000002070000000010414152153376026116 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/JobInformationStorageBDB.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/Job.h0000644000000000000000000000013214152153376022073 xustar000000000000000030 mtime=1638455038.271643961 30 atime=1638455038.461646816 30 ctime=1638455101.428592923 nordugrid-arc-6.14.0/include/arc/compute/Job.h0000644000175000002070000000005714152153376022062 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/Job.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/JobSupervisor.h0000644000000000000000000000013214152153376024175 xustar000000000000000030 mtime=1638455038.272643976 30 atime=1638455038.461646816 30 ctime=1638455101.436593043 nordugrid-arc-6.14.0/include/arc/compute/JobSupervisor.h0000644000175000002070000000007114152153376024160 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/JobSupervisor.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/JobInformationStorageSQLite.h0000644000000000000000000000013214152153376026710 xustar000000000000000030 mtime=1638455038.272643976 30 atime=1638455038.461646816 30 ctime=1638455101.433592998 nordugrid-arc-6.14.0/include/arc/compute/JobInformationStorageSQLite.h0000644000175000002070000000010714152153376026673 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/JobInformationStorageSQLite.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/Submitter.h0000644000000000000000000000013214152153376023337 xustar000000000000000030 mtime=1638455038.272643976 30 atime=1638455038.461646816 30 ctime=1638455101.439593088 nordugrid-arc-6.14.0/include/arc/compute/Submitter.h0000644000175000002070000000006514152153376023325 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/Submitter.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/EntityRetriever.h0000644000000000000000000000013214152153376024525 xustar000000000000000030 mtime=1638455038.271643961 30 atime=1638455038.461646816 30 ctime=1638455101.423592848 nordugrid-arc-6.14.0/include/arc/compute/EntityRetriever.h0000644000175000002070000000007314152153376024512 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/EntityRetriever.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/JobState.h0000644000000000000000000000013214152153376023074 xustar000000000000000030 mtime=1638455038.272643976 30 atime=1638455038.461646816 30 ctime=1638455101.435593028 nordugrid-arc-6.14.0/include/arc/compute/JobState.h0000644000175000002070000000006414152153376023061 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/JobState.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/WSCommonPlugin.h0000644000000000000000000000013214152153376024242 xustar000000000000000030 mtime=1638455038.272643976 30 atime=1638455038.461646816 30 ctime=1638455101.442593133 nordugrid-arc-6.14.0/include/arc/compute/WSCommonPlugin.h0000644000175000002070000000007214152153376024226 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/WSCommonPlugin.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/Endpoint.h0000644000000000000000000000013214152153376023141 xustar000000000000000030 mtime=1638455038.271643961 30 atime=1638455038.461646816 30 ctime=1638455101.421592818 nordugrid-arc-6.14.0/include/arc/compute/Endpoint.h0000644000175000002070000000006414152153376023126 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/Endpoint.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/BrokerPlugin.h0000644000000000000000000000013214152153376023764 xustar000000000000000030 mtime=1638455038.271643961 30 atime=1638455038.461646816 30 ctime=1638455101.419592788 nordugrid-arc-6.14.0/include/arc/compute/BrokerPlugin.h0000644000175000002070000000007014152153376023746 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/BrokerPlugin.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/Software.h0000644000000000000000000000013214152153376023153 xustar000000000000000030 mtime=1638455038.272643976 30 atime=1638455038.461646816 30 ctime=1638455101.437593058 nordugrid-arc-6.14.0/include/arc/compute/Software.h0000644000175000002070000000006414152153376023140 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/Software.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/TestACCControl.h0000644000000000000000000000013214152153376024150 xustar000000000000000030 mtime=1638455038.272643976 30 atime=1638455038.461646816 30 ctime=1638455101.441593118 nordugrid-arc-6.14.0/include/arc/compute/TestACCControl.h0000644000175000002070000000007214152153376024134 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/TestACCControl.h" nordugrid-arc-6.14.0/include/arc/compute/PaxHeaders.30264/EntityRetrieverPlugin.h0000644000000000000000000000013214152153376025704 xustar000000000000000030 mtime=1638455038.271643961 30 atime=1638455038.461646816 30 ctime=1638455101.424592863 nordugrid-arc-6.14.0/include/arc/compute/EntityRetrieverPlugin.h0000644000175000002070000000010114152153376025661 0ustar00mockbuildmock00000000000000#include "../../../src/hed/libs/compute/EntityRetrieverPlugin.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/Counter.h0000644000000000000000000000013214152153376021324 xustar000000000000000030 mtime=1638455038.269643931 30 atime=1638455038.460646801 30 ctime=1638455101.394592412 nordugrid-arc-6.14.0/include/arc/Counter.h0000644000175000002070000000005714152153376021313 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/Counter.h" nordugrid-arc-6.14.0/include/arc/PaxHeaders.30264/StringConv.h0000644000000000000000000000013214152153376022001 xustar000000000000000030 mtime=1638455038.270643946 30 atime=1638455038.461646816 30 ctime=1638455101.409592638 nordugrid-arc-6.14.0/include/arc/StringConv.h0000644000175000002070000000006214152153376021764 0ustar00mockbuildmock00000000000000#include "../../src/hed/libs/common/StringConv.h" nordugrid-arc-6.14.0/include/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376021023 xustar000000000000000030 mtime=1638455038.269643931 30 atime=1638455038.460646801 30 ctime=1638455101.386592292 nordugrid-arc-6.14.0/include/Makefile.am0000644000175000002070000000100114152153376021000 0ustar00mockbuildmock00000000000000HEADERFILESCHECK: ./$(DEPDIR)/HEADERFILES: HEADERFILESCHECK echo "HEADERFILES = \\" > HEADERFILES find $(srcdir) -name \*.h -a ! -name ArcVersion.h -print | sort | \ sed -e 's|^$(srcdir)/||' -e 's/$$/ \\/' >> HEADERFILES echo "./$(DEPDIR)/HEADERFILES" >> HEADERFILES if diff ./$(DEPDIR)/HEADERFILES HEADERFILES >/dev/null 2>&1 ; then \ rm -f HEADERFILES ; \ else \ mkdir -p ./$(DEPDIR) ; \ mv HEADERFILES ./$(DEPDIR)/HEADERFILES ; \ fi include ./$(DEPDIR)/HEADERFILES EXTRA_DIST = $(HEADERFILES) nordugrid-arc-6.14.0/include/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153421021023 xustar000000000000000030 mtime=1638455057.045926053 30 atime=1638455092.023451607 30 ctime=1638455101.385592277 nordugrid-arc-6.14.0/include/Makefile.in0000644000175000002070000004543214152153421021020 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = include DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ EXTRA_DIST = $(HEADERFILES) all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign include/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign include/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: install-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ cscopelist-am ctags-am distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags-am uninstall uninstall-am HEADERFILESCHECK: ./$(DEPDIR)/HEADERFILES: HEADERFILESCHECK echo "HEADERFILES = \\" > HEADERFILES find $(srcdir) -name \*.h -a ! -name ArcVersion.h -print | sort | \ sed -e 's|^$(srcdir)/||' -e 's/$$/ \\/' >> HEADERFILES echo "./$(DEPDIR)/HEADERFILES" >> HEADERFILES if diff ./$(DEPDIR)/HEADERFILES HEADERFILES >/dev/null 2>&1 ; then \ rm -f HEADERFILES ; \ else \ mkdir -p ./$(DEPDIR) ; \ mv HEADERFILES ./$(DEPDIR)/HEADERFILES ; \ fi include ./$(DEPDIR)/HEADERFILES # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/include/PaxHeaders.30264/.deps0000644000000000000000000000013214152153475017723 xustar000000000000000030 mtime=1638455101.544594666 30 atime=1638455103.996631509 30 ctime=1638455101.544594666 nordugrid-arc-6.14.0/include/.deps/0000755000175000002070000000000014152153475017765 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/include/.deps/PaxHeaders.30264/HEADERFILES0000644000000000000000000000013214152153475021436 xustar000000000000000030 mtime=1638455101.364591961 30 atime=1638455101.368592021 30 ctime=1638455101.544594666 nordugrid-arc-6.14.0/include/.deps/HEADERFILES0000644000175000002070000001217114152153475021425 0ustar00mockbuildmock00000000000000HEADERFILES = \ arc/ArcConfig.h \ arc/ArcConfigFile.h \ arc/ArcConfigIni.h \ arc/ArcLocation.h \ arc/ArcRegex.h \ arc/Base64.h \ arc/CheckSum.h \ arc/Counter.h \ arc/DBInterface.h \ arc/DateTime.h \ arc/FileAccess.h \ arc/FileLock.h \ arc/FileUtils.h \ arc/GUID.h \ arc/HostnameResolver.h \ arc/IString.h \ arc/IniConfig.h \ arc/IntraProcessCounter.h \ arc/JSON.h \ arc/JobPerfLog.h \ arc/Logger.h \ arc/MysqlWrapper.h \ arc/OptionParser.h \ arc/Profile.h \ arc/Run.h \ arc/StringConv.h \ arc/Thread.h \ arc/URL.h \ arc/User.h \ arc/UserConfig.h \ arc/Utils.h \ arc/Watchdog.h \ arc/XMLNode.h \ arc/communication/ClientInterface.h \ arc/communication/ClientSAML2SSO.h \ arc/communication/ClientX509Delegation.h \ arc/compute/Broker.h \ arc/compute/BrokerPlugin.h \ arc/compute/ComputingServiceRetriever.h \ arc/compute/Endpoint.h \ arc/compute/EndpointQueryingStatus.h \ arc/compute/EntityRetriever.h \ arc/compute/EntityRetrieverPlugin.h \ arc/compute/ExecutionTarget.h \ arc/compute/GLUE2.h \ arc/compute/GLUE2Entity.h \ arc/compute/Job.h \ arc/compute/JobControllerPlugin.h \ arc/compute/JobDescription.h \ arc/compute/JobDescriptionParserPlugin.h \ arc/compute/JobInformationStorage.h \ arc/compute/JobInformationStorageBDB.h \ arc/compute/JobInformationStorageSQLite.h \ arc/compute/JobInformationStorageXML.h \ arc/compute/JobState.h \ arc/compute/JobSupervisor.h \ arc/compute/RSLParser.h \ arc/compute/Software.h \ arc/compute/SubmissionStatus.h \ arc/compute/Submitter.h \ arc/compute/SubmitterPlugin.h \ arc/compute/TestACCControl.h \ arc/compute/WSCommonPlugin.h \ arc/credential/CertUtil.h \ arc/credential/Credential.h \ arc/credential/NSSUtil.h \ arc/credential/PasswordSource.h \ arc/credential/Proxycertinfo.h \ arc/credential/VOMSAttribute.h \ arc/credential/VOMSConfig.h \ arc/credential/VOMSUtil.h \ arc/credentialstore/ClientVOMS.h \ arc/credentialstore/ClientVOMSRESTful.h \ arc/credentialstore/CredentialStore.h \ arc/crypto/OpenSSL.h \ arc/data-staging/DTR.h \ arc/data-staging/DTRList.h \ arc/data-staging/DTRStatus.h \ arc/data-staging/DataDelivery.h \ arc/data-staging/DataDeliveryComm.h \ arc/data-staging/Generator.h \ arc/data-staging/Processor.h \ arc/data-staging/Scheduler.h \ arc/data-staging/TransferShares.h \ arc/data/DataBuffer.h \ arc/data/DataCallback.h \ arc/data/DataExternalComm.h \ arc/data/DataExternalHelper.h \ arc/data/DataHandle.h \ arc/data/DataMover.h \ arc/data/DataPoint.h \ arc/data/DataPointDelegate.h \ arc/data/DataPointDirect.h \ arc/data/DataPointIndex.h \ arc/data/DataSpeed.h \ arc/data/DataStatus.h \ arc/data/FileCache.h \ arc/data/FileCacheHash.h \ arc/data/FileInfo.h \ arc/data/MkDirRecursive.h \ arc/data/URLMap.h \ arc/delegation/DelegationInterface.h \ arc/external/cJSON/cJSON.h \ arc/globusutils/GSSCredential.h \ arc/globusutils/GlobusErrorUtils.h \ arc/globusutils/GlobusWorkarounds.h \ arc/infosys/InfoCache.h \ arc/infosys/InfoFilter.h \ arc/infosys/InfoRegister.h \ arc/infosys/InformationInterface.h \ arc/infosys/RegisteredService.h \ arc/job/error.h \ arc/job/runtimeenvironment.h \ arc/loader/FinderLoader.h \ arc/loader/Loader.h \ arc/loader/ModuleManager.h \ arc/loader/Plugin.h \ arc/message/MCC.h \ arc/message/MCCLoader.h \ arc/message/MCC_Status.h \ arc/message/Message.h \ arc/message/MessageAttributes.h \ arc/message/MessageAuth.h \ arc/message/PayloadRaw.h \ arc/message/PayloadSOAP.h \ arc/message/PayloadStream.h \ arc/message/Plexer.h \ arc/message/SOAPEnvelope.h \ arc/message/SOAPMessage.h \ arc/message/SecAttr.h \ arc/message/SecHandler.h \ arc/message/Service.h \ arc/otokens/openid_metadata.h \ arc/otokens/otokens.h \ arc/security/ArcPDP/EvaluationCtx.h \ arc/security/ArcPDP/Evaluator.h \ arc/security/ArcPDP/EvaluatorLoader.h \ arc/security/ArcPDP/PolicyParser.h \ arc/security/ArcPDP/PolicyStore.h \ arc/security/ArcPDP/Request.h \ arc/security/ArcPDP/RequestItem.h \ arc/security/ArcPDP/Response.h \ arc/security/ArcPDP/Result.h \ arc/security/ArcPDP/Source.h \ arc/security/ArcPDP/alg/AlgFactory.h \ arc/security/ArcPDP/alg/CombiningAlg.h \ arc/security/ArcPDP/alg/DenyOverridesAlg.h \ arc/security/ArcPDP/alg/OrderedAlg.h \ arc/security/ArcPDP/alg/PermitOverridesAlg.h \ arc/security/ArcPDP/attr/AnyURIAttribute.h \ arc/security/ArcPDP/attr/AttributeFactory.h \ arc/security/ArcPDP/attr/AttributeProxy.h \ arc/security/ArcPDP/attr/AttributeValue.h \ arc/security/ArcPDP/attr/BooleanAttribute.h \ arc/security/ArcPDP/attr/DateTimeAttribute.h \ arc/security/ArcPDP/attr/GenericAttribute.h \ arc/security/ArcPDP/attr/RequestAttribute.h \ arc/security/ArcPDP/attr/StringAttribute.h \ arc/security/ArcPDP/attr/X500NameAttribute.h \ arc/security/ArcPDP/fn/EqualFunction.h \ arc/security/ArcPDP/fn/FnFactory.h \ arc/security/ArcPDP/fn/Function.h \ arc/security/ArcPDP/fn/InRangeFunction.h \ arc/security/ArcPDP/fn/MatchFunction.h \ arc/security/ArcPDP/policy/Policy.h \ arc/security/ClassLoader.h \ arc/security/PDP.h \ arc/security/Security.h \ arc/ws-addressing/WSA.h \ arc/ws-security/SAMLToken.h \ arc/ws-security/UsernameToken.h \ arc/ws-security/X509Token.h \ arc/wsrf/WSRF.h \ arc/wsrf/WSRFBaseFault.h \ arc/wsrf/WSResourceProperties.h \ arc/xmlsec/XMLSecNode.h \ arc/xmlsec/XmlSecUtils.h \ arc/xmlsec/saml_util.h \ ./.deps/HEADERFILES nordugrid-arc-6.14.0/include/PaxHeaders.30264/README0000644000000000000000000000013214152153376017647 xustar000000000000000030 mtime=1638455038.269643931 30 atime=1638455038.460646801 30 ctime=1638455101.387592307 nordugrid-arc-6.14.0/include/README0000644000175000002070000000200314152153376017627 0ustar00mockbuildmock00000000000000 This directory contains tree of header files refering to files with same name located in source directory src/ - actual header files. The refered files constitute API of ARC HED software. Files' location in this tree is defined by how API is presented to external developer. Actual header files are located next to corresponding source files and their location is defined by convenience of developers of ARC HED. These files are not used during istallation procedure. Upon instalaltion actual header files get located under same layout as files in this directory. Files in this directory are used through all ARC code. All source and header files of entire source tree must refer to actual header files through these ones. Code being built outside source tree should refer either to installed actual header files or to those located in this directory. Due to similar layout switch between two options does not require changes in source and header files of code. Only building procedure need to be changed. nordugrid-arc-6.14.0/PaxHeaders.30264/ABOUT-NLS0000644000000000000000000000013214152153400016557 xustar000000000000000030 mtime=1638455040.942684094 30 atime=1638455040.942684094 30 ctime=1638455095.728507277 nordugrid-arc-6.14.0/ABOUT-NLS0000644000175000002070000022532614152153400016556 0ustar00mockbuildmock000000000000001 Notes on the Free Translation Project *************************************** Free software is going international! The Free Translation Project is a way to get maintainers of free software, translators, and users all together, so that free software will gradually become able to speak many languages. A few packages already provide translations for their messages. If you found this `ABOUT-NLS' file inside a distribution, you may assume that the distributed package does use GNU `gettext' internally, itself available at your nearest GNU archive site. But you do _not_ need to install GNU `gettext' prior to configuring, installing or using this package with messages translated. Installers will find here some useful hints. These notes also explain how users should proceed for getting the programs to use the available translations. They tell how people wanting to contribute and work on translations can contact the appropriate team. When reporting bugs in the `intl/' directory or bugs which may be related to internationalization, you should tell about the version of `gettext' which is used. The information can be found in the `intl/VERSION' file, in internationalized packages. 1.1 Quick configuration advice ============================== If you want to exploit the full power of internationalization, you should configure it using ./configure --with-included-gettext to force usage of internationalizing routines provided within this package, despite the existence of internationalizing capabilities in the operating system where this package is being installed. So far, only the `gettext' implementation in the GNU C library version 2 provides as many features (such as locale alias, message inheritance, automatic charset conversion or plural form handling) as the implementation here. It is also not possible to offer this additional functionality on top of a `catgets' implementation. Future versions of GNU `gettext' will very likely convey even more functionality. So it might be a good idea to change to GNU `gettext' as soon as possible. So you need _not_ provide this option if you are using GNU libc 2 or you have installed a recent copy of the GNU gettext package with the included `libintl'. 1.2 INSTALL Matters =================== Some packages are "localizable" when properly installed; the programs they contain can be made to speak your own native language. Most such packages use GNU `gettext'. Other packages have their own ways to internationalization, predating GNU `gettext'. By default, this package will be installed to allow translation of messages. It will automatically detect whether the system already provides the GNU `gettext' functions. If not, the included GNU `gettext' library will be used. This library is wholly contained within this package, usually in the `intl/' subdirectory, so prior installation of the GNU `gettext' package is _not_ required. Installers may use special options at configuration time for changing the default behaviour. The commands: ./configure --with-included-gettext ./configure --disable-nls will, respectively, bypass any pre-existing `gettext' to use the internationalizing routines provided within this package, or else, _totally_ disable translation of messages. When you already have GNU `gettext' installed on your system and run configure without an option for your new package, `configure' will probably detect the previously built and installed `libintl.a' file and will decide to use this. This might not be desirable. You should use the more recent version of the GNU `gettext' library. I.e. if the file `intl/VERSION' shows that the library which comes with this package is more recent, you should use ./configure --with-included-gettext to prevent auto-detection. The configuration process will not test for the `catgets' function and therefore it will not be used. The reason is that even an emulation of `gettext' on top of `catgets' could not provide all the extensions of the GNU `gettext' library. Internationalized packages usually have many `po/LL.po' files, where LL gives an ISO 639 two-letter code identifying the language. Unless translations have been forbidden at `configure' time by using the `--disable-nls' switch, all available translations are installed together with the package. However, the environment variable `LINGUAS' may be set, prior to configuration, to limit the installed set. `LINGUAS' should then contain a space separated list of two-letter codes, stating which languages are allowed. 1.3 Using This Package ====================== As a user, if your language has been installed for this package, you only have to set the `LANG' environment variable to the appropriate `LL_CC' combination. If you happen to have the `LC_ALL' or some other `LC_xxx' environment variables set, you should unset them before setting `LANG', otherwise the setting of `LANG' will not have the desired effect. Here `LL' is an ISO 639 two-letter language code, and `CC' is an ISO 3166 two-letter country code. For example, let's suppose that you speak German and live in Germany. At the shell prompt, merely execute `setenv LANG de_DE' (in `csh'), `export LANG; LANG=de_DE' (in `sh') or `export LANG=de_DE' (in `bash'). This can be done from your `.login' or `.profile' file, once and for all. You might think that the country code specification is redundant. But in fact, some languages have dialects in different countries. For example, `de_AT' is used for Austria, and `pt_BR' for Brazil. The country code serves to distinguish the dialects. The locale naming convention of `LL_CC', with `LL' denoting the language and `CC' denoting the country, is the one use on systems based on GNU libc. On other systems, some variations of this scheme are used, such as `LL' or `LL_CC.ENCODING'. You can get the list of locales supported by your system for your language by running the command `locale -a | grep '^LL''. Not all programs have translations for all languages. By default, an English message is shown in place of a nonexistent translation. If you understand other languages, you can set up a priority list of languages. This is done through a different environment variable, called `LANGUAGE'. GNU `gettext' gives preference to `LANGUAGE' over `LANG' for the purpose of message handling, but you still need to have `LANG' set to the primary language; this is required by other parts of the system libraries. For example, some Swedish users who would rather read translations in German than English for when Swedish is not available, set `LANGUAGE' to `sv:de' while leaving `LANG' to `sv_SE'. Special advice for Norwegian users: The language code for Norwegian bokma*l changed from `no' to `nb' recently (in 2003). During the transition period, while some message catalogs for this language are installed under `nb' and some older ones under `no', it's recommended for Norwegian users to set `LANGUAGE' to `nb:no' so that both newer and older translations are used. In the `LANGUAGE' environment variable, but not in the `LANG' environment variable, `LL_CC' combinations can be abbreviated as `LL' to denote the language's main dialect. For example, `de' is equivalent to `de_DE' (German as spoken in Germany), and `pt' to `pt_PT' (Portuguese as spoken in Portugal) in this context. 1.4 Translating Teams ===================== For the Free Translation Project to be a success, we need interested people who like their own language and write it well, and who are also able to synergize with other translators speaking the same language. Each translation team has its own mailing list. The up-to-date list of teams can be found at the Free Translation Project's homepage, `http://translationproject.org/', in the "Teams" area. If you'd like to volunteer to _work_ at translating messages, you should become a member of the translating team for your own language. The subscribing address is _not_ the same as the list itself, it has `-request' appended. For example, speakers of Swedish can send a message to `sv-request@li.org', having this message body: subscribe Keep in mind that team members are expected to participate _actively_ in translations, or at solving translational difficulties, rather than merely lurking around. If your team does not exist yet and you want to start one, or if you are unsure about what to do or how to get started, please write to `coordinator@translationproject.org' to reach the coordinator for all translator teams. The English team is special. It works at improving and uniformizing the terminology in use. Proven linguistic skills are praised more than programming skills, here. 1.5 Available Packages ====================== Languages are not equally supported in all packages. The following matrix shows the current state of internationalization, as of November 2007. The matrix shows, in regard of each package, for which languages PO files have been submitted to translation coordination, with a translation percentage of at least 50%. Ready PO files af am ar az be bg bs ca cs cy da de el en en_GB eo +----------------------------------------------------+ Compendium | [] [] [] [] | a2ps | [] [] [] [] [] | aegis | () | ant-phone | () | anubis | [] | ap-utils | | aspell | [] [] [] [] [] | bash | [] | bfd | | bibshelf | [] | binutils | | bison | [] [] | bison-runtime | [] | bluez-pin | [] [] [] [] [] | cflow | [] | clisp | [] [] [] | console-tools | [] [] | coreutils | [] [] [] [] | cpio | | cpplib | [] [] [] | cryptonit | [] | dialog | | diffutils | [] [] [] [] [] [] | doodle | [] | e2fsprogs | [] [] | enscript | [] [] [] [] | fetchmail | [] [] () [] [] | findutils | [] | findutils_stable | [] [] [] | flex | [] [] [] | fslint | | gas | | gawk | [] [] [] | gcal | [] | gcc | [] | gettext-examples | [] [] [] [] [] | gettext-runtime | [] [] [] [] [] | gettext-tools | [] [] | gip | [] | gliv | [] [] | glunarclock | [] | gmult | [] [] | gnubiff | () | gnucash | [] [] () () [] | gnuedu | | gnulib | [] | gnunet | | gnunet-gtk | | gnutls | [] | gpe-aerial | [] [] | gpe-beam | [] [] | gpe-calendar | | gpe-clock | [] [] | gpe-conf | [] [] | gpe-contacts | | gpe-edit | [] | gpe-filemanager | | gpe-go | [] | gpe-login | [] [] | gpe-ownerinfo | [] [] | gpe-package | | gpe-sketchbook | [] [] | gpe-su | [] [] | gpe-taskmanager | [] [] | gpe-timesheet | [] | gpe-today | [] [] | gpe-todo | | gphoto2 | [] [] [] [] | gprof | [] [] | gpsdrive | | gramadoir | [] [] | grep | [] [] | gretl | () | gsasl | | gss | | gst-plugins-bad | [] [] | gst-plugins-base | [] [] | gst-plugins-good | [] [] [] | gst-plugins-ugly | [] [] | gstreamer | [] [] [] [] [] [] [] | gtick | () | gtkam | [] [] [] [] | gtkorphan | [] [] | gtkspell | [] [] [] [] | gutenprint | [] | hello | [] [] [] [] [] | herrie | [] | hylafax | | idutils | [] [] | indent | [] [] [] [] | iso_15924 | | iso_3166 | [] [] [] [] [] [] [] [] [] [] [] | iso_3166_2 | | iso_4217 | [] [] [] | iso_639 | [] [] [] [] | jpilot | [] | jtag | | jwhois | | kbd | [] [] [] [] | keytouch | [] [] | keytouch-editor | [] | keytouch-keyboa... | [] | latrine | () | ld | [] | leafpad | [] [] [] [] [] | libc | [] [] [] [] | libexif | [] | libextractor | [] | libgpewidget | [] [] [] | libgpg-error | [] | libgphoto2 | [] [] | libgphoto2_port | [] [] | libgsasl | | libiconv | [] [] | libidn | [] [] [] | lifelines | [] () | lilypond | [] | lingoteach | | lprng | | lynx | [] [] [] [] | m4 | [] [] [] [] | mailfromd | | mailutils | [] | make | [] [] | man-db | [] [] [] | minicom | [] [] [] | nano | [] [] [] | opcodes | [] | parted | [] [] | pilot-qof | | popt | [] [] [] | psmisc | [] | pwdutils | | qof | | radius | [] | recode | [] [] [] [] [] [] | rpm | [] | screem | | scrollkeeper | [] [] [] [] [] [] [] [] | sed | [] [] [] | shared-mime-info | [] [] [] [] () [] [] [] | sharutils | [] [] [] [] [] [] | shishi | | skencil | [] () | solfege | | soundtracker | [] [] | sp | [] | system-tools-ba... | [] [] [] [] [] [] [] [] [] | tar | [] [] | texinfo | [] [] [] | tin | () () | tuxpaint | [] [] [] [] [] [] | unicode-han-tra... | | unicode-transla... | | util-linux | [] [] [] [] | util-linux-ng | [] [] [] [] | vorbis-tools | [] | wastesedge | () | wdiff | [] [] [] [] | wget | [] [] [] | xchat | [] [] [] [] [] [] [] | xkeyboard-config | [] | xpad | [] [] [] | +----------------------------------------------------+ af am ar az be bg bs ca cs cy da de el en en_GB eo 6 0 2 1 8 26 2 40 48 2 56 88 15 1 15 18 es et eu fa fi fr ga gl gu he hi hr hu id is it +--------------------------------------------------+ Compendium | [] [] [] [] [] | a2ps | [] [] [] () | aegis | | ant-phone | [] | anubis | [] | ap-utils | [] [] | aspell | [] [] [] | bash | [] | bfd | [] [] | bibshelf | [] [] [] | binutils | [] [] [] | bison | [] [] [] [] [] [] | bison-runtime | [] [] [] [] [] | bluez-pin | [] [] [] [] [] | cflow | [] | clisp | [] [] | console-tools | | coreutils | [] [] [] [] [] [] | cpio | [] [] [] | cpplib | [] [] | cryptonit | [] | dialog | [] [] [] | diffutils | [] [] [] [] [] [] [] [] [] | doodle | [] [] | e2fsprogs | [] [] [] | enscript | [] [] [] | fetchmail | [] | findutils | [] [] [] | findutils_stable | [] [] [] [] | flex | [] [] [] | fslint | | gas | [] [] | gawk | [] [] [] [] () | gcal | [] [] | gcc | [] | gettext-examples | [] [] [] [] [] [] [] | gettext-runtime | [] [] [] [] [] [] | gettext-tools | [] [] [] [] | gip | [] [] [] [] | gliv | () | glunarclock | [] [] [] | gmult | [] [] [] | gnubiff | () () | gnucash | () () () | gnuedu | [] | gnulib | [] [] [] | gnunet | | gnunet-gtk | | gnutls | | gpe-aerial | [] [] | gpe-beam | [] [] | gpe-calendar | | gpe-clock | [] [] [] [] | gpe-conf | [] | gpe-contacts | [] [] | gpe-edit | [] [] [] [] | gpe-filemanager | [] | gpe-go | [] [] [] | gpe-login | [] [] [] | gpe-ownerinfo | [] [] [] [] [] | gpe-package | [] | gpe-sketchbook | [] [] | gpe-su | [] [] [] [] | gpe-taskmanager | [] [] [] | gpe-timesheet | [] [] [] [] | gpe-today | [] [] [] [] | gpe-todo | [] | gphoto2 | [] [] [] [] [] | gprof | [] [] [] [] [] | gpsdrive | [] | gramadoir | [] [] | grep | [] [] [] | gretl | [] [] [] () | gsasl | [] [] | gss | [] [] | gst-plugins-bad | [] [] [] [] | gst-plugins-base | [] [] [] [] | gst-plugins-good | [] [] [] [] [] | gst-plugins-ugly | [] [] [] [] | gstreamer | [] [] [] | gtick | [] [] [] | gtkam | [] [] [] [] | gtkorphan | [] [] | gtkspell | [] [] [] [] [] [] [] | gutenprint | [] | hello | [] [] [] [] [] [] [] [] [] [] [] [] [] | herrie | [] | hylafax | | idutils | [] [] [] [] [] | indent | [] [] [] [] [] [] [] [] [] [] | iso_15924 | [] | iso_3166 | [] [] [] [] [] [] [] [] [] [] [] [] [] | iso_3166_2 | [] | iso_4217 | [] [] [] [] [] [] | iso_639 | [] [] [] [] [] [] | jpilot | [] [] | jtag | [] | jwhois | [] [] [] [] [] | kbd | [] [] | keytouch | [] [] [] | keytouch-editor | [] | keytouch-keyboa... | [] [] | latrine | [] [] | ld | [] [] [] [] | leafpad | [] [] [] [] [] [] | libc | [] [] [] [] [] | libexif | [] | libextractor | [] | libgpewidget | [] [] [] [] [] | libgpg-error | [] | libgphoto2 | [] [] [] | libgphoto2_port | [] [] | libgsasl | [] [] | libiconv | [] [] [] | libidn | [] [] | lifelines | () | lilypond | [] [] [] | lingoteach | [] [] [] | lprng | | lynx | [] [] [] | m4 | [] [] [] [] | mailfromd | | mailutils | [] [] | make | [] [] [] [] [] [] [] [] | man-db | [] | minicom | [] [] [] [] | nano | [] [] [] [] [] [] [] | opcodes | [] [] [] [] | parted | [] [] [] | pilot-qof | | popt | [] [] [] [] | psmisc | [] [] | pwdutils | | qof | [] | radius | [] [] | recode | [] [] [] [] [] [] [] [] | rpm | [] [] | screem | | scrollkeeper | [] [] [] | sed | [] [] [] [] [] | shared-mime-info | [] [] [] [] [] [] | sharutils | [] [] [] [] [] [] [] [] | shishi | [] | skencil | [] [] | solfege | [] | soundtracker | [] [] [] | sp | [] | system-tools-ba... | [] [] [] [] [] [] [] [] [] | tar | [] [] [] [] [] | texinfo | [] [] [] | tin | [] () | tuxpaint | [] [] | unicode-han-tra... | | unicode-transla... | [] [] | util-linux | [] [] [] [] [] [] [] | util-linux-ng | [] [] [] [] [] [] [] | vorbis-tools | | wastesedge | () | wdiff | [] [] [] [] [] [] [] [] | wget | [] [] [] [] [] [] [] [] | xchat | [] [] [] [] [] [] [] | xkeyboard-config | [] [] [] [] | xpad | [] [] [] | +--------------------------------------------------+ es et eu fa fi fr ga gl gu he hi hr hu id is it 85 22 14 2 48 101 61 12 2 8 2 6 53 29 1 52 ja ka ko ku ky lg lt lv mk mn ms mt nb ne nl nn +--------------------------------------------------+ Compendium | [] | a2ps | () [] [] | aegis | () | ant-phone | [] | anubis | [] [] [] | ap-utils | [] | aspell | [] [] | bash | [] | bfd | | bibshelf | [] | binutils | | bison | [] [] [] | bison-runtime | [] [] [] | bluez-pin | [] [] [] | cflow | | clisp | [] | console-tools | | coreutils | [] | cpio | [] | cpplib | [] | cryptonit | [] | dialog | [] [] | diffutils | [] [] [] | doodle | | e2fsprogs | [] | enscript | [] | fetchmail | [] [] | findutils | [] | findutils_stable | [] | flex | [] [] | fslint | | gas | | gawk | [] [] | gcal | | gcc | | gettext-examples | [] [] [] | gettext-runtime | [] [] [] | gettext-tools | [] [] | gip | [] [] | gliv | [] | glunarclock | [] [] | gmult | [] [] [] | gnubiff | | gnucash | () () () | gnuedu | | gnulib | [] [] | gnunet | | gnunet-gtk | | gnutls | [] | gpe-aerial | [] | gpe-beam | [] | gpe-calendar | [] | gpe-clock | [] [] [] | gpe-conf | [] [] [] | gpe-contacts | [] | gpe-edit | [] [] [] | gpe-filemanager | [] [] | gpe-go | [] [] [] | gpe-login | [] [] [] | gpe-ownerinfo | [] [] | gpe-package | [] [] | gpe-sketchbook | [] [] | gpe-su | [] [] [] | gpe-taskmanager | [] [] [] [] | gpe-timesheet | [] | gpe-today | [] [] | gpe-todo | [] | gphoto2 | [] [] | gprof | [] | gpsdrive | [] | gramadoir | () | grep | [] [] | gretl | | gsasl | [] | gss | | gst-plugins-bad | [] | gst-plugins-base | [] | gst-plugins-good | [] | gst-plugins-ugly | [] | gstreamer | [] | gtick | [] | gtkam | [] [] | gtkorphan | [] | gtkspell | [] [] | gutenprint | [] | hello | [] [] [] [] [] [] [] | herrie | [] | hylafax | | idutils | [] | indent | [] [] | iso_15924 | [] | iso_3166 | [] [] [] [] [] [] [] [] | iso_3166_2 | [] | iso_4217 | [] [] [] | iso_639 | [] [] [] [] | jpilot | () () | jtag | | jwhois | [] | kbd | [] | keytouch | [] | keytouch-editor | [] | keytouch-keyboa... | | latrine | [] | ld | | leafpad | [] [] | libc | [] [] [] | libexif | | libextractor | | libgpewidget | [] | libgpg-error | | libgphoto2 | [] | libgphoto2_port | [] | libgsasl | [] | libiconv | [] | libidn | [] [] | lifelines | [] | lilypond | [] | lingoteach | [] | lprng | | lynx | [] [] | m4 | [] [] | mailfromd | | mailutils | | make | [] [] [] | man-db | | minicom | [] | nano | [] [] [] | opcodes | [] | parted | [] [] | pilot-qof | | popt | [] [] [] | psmisc | [] [] [] | pwdutils | | qof | | radius | | recode | [] | rpm | [] [] | screem | [] | scrollkeeper | [] [] [] [] | sed | [] [] | shared-mime-info | [] [] [] [] [] [] [] | sharutils | [] [] | shishi | | skencil | | solfege | () () | soundtracker | | sp | () | system-tools-ba... | [] [] [] [] | tar | [] [] [] | texinfo | [] [] | tin | | tuxpaint | () [] [] | unicode-han-tra... | | unicode-transla... | | util-linux | [] [] | util-linux-ng | [] [] | vorbis-tools | | wastesedge | [] | wdiff | [] [] | wget | [] [] | xchat | [] [] [] [] | xkeyboard-config | [] [] [] | xpad | [] [] [] | +--------------------------------------------------+ ja ka ko ku ky lg lt lv mk mn ms mt nb ne nl nn 51 2 25 3 2 0 6 0 2 2 20 0 11 1 103 6 or pa pl pt pt_BR rm ro ru rw sk sl sq sr sv ta +--------------------------------------------------+ Compendium | [] [] [] [] [] | a2ps | () [] [] [] [] [] [] | aegis | () () | ant-phone | [] [] | anubis | [] [] [] | ap-utils | () | aspell | [] [] [] | bash | [] [] | bfd | | bibshelf | [] | binutils | [] [] | bison | [] [] [] [] [] | bison-runtime | [] [] [] [] [] | bluez-pin | [] [] [] [] [] [] [] [] [] | cflow | [] | clisp | [] | console-tools | [] | coreutils | [] [] [] [] | cpio | [] [] [] | cpplib | [] | cryptonit | [] [] | dialog | [] | diffutils | [] [] [] [] [] [] | doodle | [] [] | e2fsprogs | [] [] | enscript | [] [] [] [] [] | fetchmail | [] [] [] | findutils | [] [] [] | findutils_stable | [] [] [] [] [] [] | flex | [] [] [] [] [] | fslint | [] | gas | | gawk | [] [] [] [] | gcal | [] | gcc | [] [] | gettext-examples | [] [] [] [] [] [] [] [] | gettext-runtime | [] [] [] [] [] [] [] [] | gettext-tools | [] [] [] [] [] [] [] | gip | [] [] [] [] | gliv | [] [] [] [] [] [] | glunarclock | [] [] [] [] [] [] | gmult | [] [] [] [] | gnubiff | () [] | gnucash | () [] | gnuedu | | gnulib | [] [] [] | gnunet | | gnunet-gtk | [] | gnutls | [] [] | gpe-aerial | [] [] [] [] [] [] [] | gpe-beam | [] [] [] [] [] [] [] | gpe-calendar | [] [] [] [] | gpe-clock | [] [] [] [] [] [] [] [] | gpe-conf | [] [] [] [] [] [] [] | gpe-contacts | [] [] [] [] [] | gpe-edit | [] [] [] [] [] [] [] [] [] | gpe-filemanager | [] [] | gpe-go | [] [] [] [] [] [] [] [] | gpe-login | [] [] [] [] [] [] [] [] | gpe-ownerinfo | [] [] [] [] [] [] [] [] | gpe-package | [] [] | gpe-sketchbook | [] [] [] [] [] [] [] [] | gpe-su | [] [] [] [] [] [] [] [] | gpe-taskmanager | [] [] [] [] [] [] [] [] | gpe-timesheet | [] [] [] [] [] [] [] [] | gpe-today | [] [] [] [] [] [] [] [] | gpe-todo | [] [] [] [] | gphoto2 | [] [] [] [] [] [] | gprof | [] [] [] | gpsdrive | [] [] | gramadoir | [] [] | grep | [] [] [] [] | gretl | [] [] [] | gsasl | [] [] [] | gss | [] [] [] [] | gst-plugins-bad | [] [] [] | gst-plugins-base | [] [] | gst-plugins-good | [] [] | gst-plugins-ugly | [] [] [] | gstreamer | [] [] [] [] | gtick | [] | gtkam | [] [] [] [] [] | gtkorphan | [] | gtkspell | [] [] [] [] [] [] [] [] | gutenprint | [] | hello | [] [] [] [] [] [] [] [] | herrie | [] [] [] | hylafax | | idutils | [] [] [] [] [] | indent | [] [] [] [] [] [] [] | iso_15924 | | iso_3166 | [] [] [] [] [] [] [] [] [] [] [] [] [] | iso_3166_2 | | iso_4217 | [] [] [] [] [] [] [] | iso_639 | [] [] [] [] [] [] [] | jpilot | | jtag | [] | jwhois | [] [] [] [] | kbd | [] [] [] | keytouch | [] | keytouch-editor | [] | keytouch-keyboa... | [] | latrine | | ld | [] | leafpad | [] [] [] [] [] [] | libc | [] [] [] [] | libexif | [] [] | libextractor | [] [] | libgpewidget | [] [] [] [] [] [] [] [] | libgpg-error | [] [] [] | libgphoto2 | [] | libgphoto2_port | [] [] [] | libgsasl | [] [] [] [] | libiconv | [] [] [] | libidn | [] [] () | lifelines | [] [] | lilypond | | lingoteach | [] | lprng | [] | lynx | [] [] [] | m4 | [] [] [] [] [] | mailfromd | [] | mailutils | [] [] [] | make | [] [] [] [] | man-db | [] [] [] [] | minicom | [] [] [] [] [] | nano | [] [] [] [] | opcodes | [] [] | parted | [] | pilot-qof | | popt | [] [] [] [] | psmisc | [] [] | pwdutils | [] [] | qof | [] [] | radius | [] [] | recode | [] [] [] [] [] [] [] | rpm | [] [] [] [] | screem | | scrollkeeper | [] [] [] [] [] [] [] | sed | [] [] [] [] [] [] [] [] [] | shared-mime-info | [] [] [] [] [] [] | sharutils | [] [] [] [] | shishi | [] | skencil | [] [] [] | solfege | [] | soundtracker | [] [] | sp | | system-tools-ba... | [] [] [] [] [] [] [] [] [] | tar | [] [] [] [] | texinfo | [] [] [] [] | tin | () | tuxpaint | [] [] [] [] [] [] | unicode-han-tra... | | unicode-transla... | | util-linux | [] [] [] [] | util-linux-ng | [] [] [] [] | vorbis-tools | [] | wastesedge | | wdiff | [] [] [] [] [] [] [] | wget | [] [] [] [] | xchat | [] [] [] [] [] [] [] | xkeyboard-config | [] [] [] | xpad | [] [] [] | +--------------------------------------------------+ or pa pl pt pt_BR rm ro ru rw sk sl sq sr sv ta 0 5 77 31 53 4 58 72 3 45 46 9 45 122 3 tg th tk tr uk ven vi wa xh zh_CN zh_HK zh_TW zu +---------------------------------------------------+ Compendium | [] [] [] [] | 19 a2ps | [] [] [] | 19 aegis | [] | 1 ant-phone | [] [] | 6 anubis | [] [] [] | 11 ap-utils | () [] | 4 aspell | [] [] [] | 16 bash | [] | 6 bfd | | 2 bibshelf | [] | 7 binutils | [] [] [] [] | 9 bison | [] [] [] [] | 20 bison-runtime | [] [] [] [] | 18 bluez-pin | [] [] [] [] [] [] | 28 cflow | [] [] | 5 clisp | | 9 console-tools | [] [] | 5 coreutils | [] [] [] | 18 cpio | [] [] [] [] | 11 cpplib | [] [] [] [] [] | 12 cryptonit | [] | 6 dialog | [] [] [] | 9 diffutils | [] [] [] [] [] | 29 doodle | [] | 6 e2fsprogs | [] [] | 10 enscript | [] [] [] | 16 fetchmail | [] [] | 12 findutils | [] [] [] | 11 findutils_stable | [] [] [] [] | 18 flex | [] [] | 15 fslint | [] | 2 gas | [] | 3 gawk | [] [] [] | 16 gcal | [] | 5 gcc | [] [] [] | 7 gettext-examples | [] [] [] [] [] [] | 29 gettext-runtime | [] [] [] [] [] [] | 28 gettext-tools | [] [] [] [] [] | 20 gip | [] [] | 13 gliv | [] [] | 11 glunarclock | [] [] [] | 15 gmult | [] [] [] [] | 16 gnubiff | [] | 2 gnucash | () [] | 5 gnuedu | [] | 2 gnulib | [] | 10 gnunet | | 0 gnunet-gtk | [] [] | 3 gnutls | | 4 gpe-aerial | [] [] | 14 gpe-beam | [] [] | 14 gpe-calendar | [] [] | 7 gpe-clock | [] [] [] [] | 21 gpe-conf | [] [] [] | 16 gpe-contacts | [] [] | 10 gpe-edit | [] [] [] [] [] | 22 gpe-filemanager | [] [] | 7 gpe-go | [] [] [] [] | 19 gpe-login | [] [] [] [] [] | 21 gpe-ownerinfo | [] [] [] [] | 21 gpe-package | [] | 6 gpe-sketchbook | [] [] | 16 gpe-su | [] [] [] [] | 21 gpe-taskmanager | [] [] [] [] | 21 gpe-timesheet | [] [] [] [] | 18 gpe-today | [] [] [] [] [] | 21 gpe-todo | [] [] | 8 gphoto2 | [] [] [] [] | 21 gprof | [] [] | 13 gpsdrive | [] | 5 gramadoir | [] | 7 grep | [] | 12 gretl | | 6 gsasl | [] [] [] | 9 gss | [] | 7 gst-plugins-bad | [] [] [] | 13 gst-plugins-base | [] [] | 11 gst-plugins-good | [] [] [] [] [] | 16 gst-plugins-ugly | [] [] [] | 13 gstreamer | [] [] [] | 18 gtick | [] [] | 7 gtkam | [] | 16 gtkorphan | [] | 7 gtkspell | [] [] [] [] [] [] | 27 gutenprint | | 4 hello | [] [] [] [] [] | 38 herrie | [] [] | 8 hylafax | | 0 idutils | [] [] | 15 indent | [] [] [] [] [] | 28 iso_15924 | [] [] | 4 iso_3166 | [] [] [] [] [] [] [] [] [] | 54 iso_3166_2 | [] [] | 4 iso_4217 | [] [] [] [] [] | 24 iso_639 | [] [] [] [] [] | 26 jpilot | [] [] [] [] | 7 jtag | [] | 3 jwhois | [] [] [] | 13 kbd | [] [] [] | 13 keytouch | [] | 8 keytouch-editor | [] | 5 keytouch-keyboa... | [] | 5 latrine | [] [] | 5 ld | [] [] [] [] | 10 leafpad | [] [] [] [] [] | 24 libc | [] [] [] | 19 libexif | [] | 5 libextractor | [] | 5 libgpewidget | [] [] [] | 20 libgpg-error | [] | 6 libgphoto2 | [] [] | 9 libgphoto2_port | [] [] [] | 11 libgsasl | [] | 8 libiconv | [] [] | 11 libidn | [] [] | 11 lifelines | | 4 lilypond | [] | 6 lingoteach | [] | 6 lprng | [] | 2 lynx | [] [] [] | 15 m4 | [] [] [] | 18 mailfromd | [] [] | 3 mailutils | [] [] | 8 make | [] [] [] | 20 man-db | [] | 9 minicom | [] | 14 nano | [] [] [] | 20 opcodes | [] [] | 10 parted | [] [] [] | 11 pilot-qof | [] | 1 popt | [] [] [] [] | 18 psmisc | [] [] | 10 pwdutils | [] | 3 qof | [] | 4 radius | [] [] | 7 recode | [] [] [] | 25 rpm | [] [] [] [] | 13 screem | [] | 2 scrollkeeper | [] [] [] [] | 26 sed | [] [] [] [] | 23 shared-mime-info | [] [] [] | 29 sharutils | [] [] [] | 23 shishi | [] | 3 skencil | [] | 7 solfege | [] | 3 soundtracker | [] [] | 9 sp | [] | 3 system-tools-ba... | [] [] [] [] [] [] [] | 38 tar | [] [] [] | 17 texinfo | [] [] [] | 15 tin | | 1 tuxpaint | [] [] [] | 19 unicode-han-tra... | | 0 unicode-transla... | | 2 util-linux | [] [] [] | 20 util-linux-ng | [] [] [] | 20 vorbis-tools | [] [] | 4 wastesedge | | 1 wdiff | [] [] | 23 wget | [] [] [] | 20 xchat | [] [] [] [] | 29 xkeyboard-config | [] [] [] | 14 xpad | [] [] [] | 15 +---------------------------------------------------+ 76 teams tg th tk tr uk ven vi wa xh zh_CN zh_HK zh_TW zu 163 domains 0 3 1 74 51 0 143 21 1 57 7 45 0 2036 Some counters in the preceding matrix are higher than the number of visible blocks let us expect. This is because a few extra PO files are used for implementing regional variants of languages, or language dialects. For a PO file in the matrix above to be effective, the package to which it applies should also have been internationalized and distributed as such by its maintainer. There might be an observable lag between the mere existence a PO file and its wide availability in a distribution. If November 2007 seems to be old, you may fetch a more recent copy of this `ABOUT-NLS' file on most GNU archive sites. The most up-to-date matrix with full percentage details can be found at `http://translationproject.org/extra/matrix.html'. 1.6 Using `gettext' in new packages =================================== If you are writing a freely available program and want to internationalize it you are welcome to use GNU `gettext' in your package. Of course you have to respect the GNU Library General Public License which covers the use of the GNU `gettext' library. This means in particular that even non-free programs can use `libintl' as a shared library, whereas only free software can use `libintl' as a static library or use modified versions of `libintl'. Once the sources are changed appropriately and the setup can handle the use of `gettext' the only thing missing are the translations. The Free Translation Project is also available for packages which are not developed inside the GNU project. Therefore the information given above applies also for every other Free Software Project. Contact `coordinator@translationproject.org' to make the `.pot' files available to the translation teams. nordugrid-arc-6.14.0/PaxHeaders.30264/missing0000644000000000000000000000013214152153420016726 xustar000000000000000030 mtime=1638455056.861923288 30 atime=1638455071.482142963 30 ctime=1638455095.734507367 nordugrid-arc-6.14.0/missing0000755000175000002070000001533114152153420016721 0ustar00mockbuildmock00000000000000#! /bin/sh # Common wrapper for a few potentially missing GNU programs. scriptversion=2012-06-26.16; # UTC # Copyright (C) 1996-2013 Free Software Foundation, Inc. # Originally written by Fran,cois Pinard , 1996. # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. if test $# -eq 0; then echo 1>&2 "Try '$0 --help' for more information" exit 1 fi case $1 in --is-lightweight) # Used by our autoconf macros to check whether the available missing # script is modern enough. exit 0 ;; --run) # Back-compat with the calling convention used by older automake. shift ;; -h|--h|--he|--hel|--help) echo "\ $0 [OPTION]... PROGRAM [ARGUMENT]... Run 'PROGRAM [ARGUMENT]...', returning a proper advice when this fails due to PROGRAM being missing or too old. Options: -h, --help display this help and exit -v, --version output version information and exit Supported PROGRAM values: aclocal autoconf autoheader autom4te automake makeinfo bison yacc flex lex help2man Version suffixes to PROGRAM as well as the prefixes 'gnu-', 'gnu', and 'g' are ignored when checking the name. Send bug reports to ." exit $? ;; -v|--v|--ve|--ver|--vers|--versi|--versio|--version) echo "missing $scriptversion (GNU Automake)" exit $? ;; -*) echo 1>&2 "$0: unknown '$1' option" echo 1>&2 "Try '$0 --help' for more information" exit 1 ;; esac # Run the given program, remember its exit status. "$@"; st=$? # If it succeeded, we are done. test $st -eq 0 && exit 0 # Also exit now if we it failed (or wasn't found), and '--version' was # passed; such an option is passed most likely to detect whether the # program is present and works. case $2 in --version|--help) exit $st;; esac # Exit code 63 means version mismatch. This often happens when the user # tries to use an ancient version of a tool on a file that requires a # minimum version. if test $st -eq 63; then msg="probably too old" elif test $st -eq 127; then # Program was missing. msg="missing on your system" else # Program was found and executed, but failed. Give up. exit $st fi perl_URL=http://www.perl.org/ flex_URL=http://flex.sourceforge.net/ gnu_software_URL=http://www.gnu.org/software program_details () { case $1 in aclocal|automake) echo "The '$1' program is part of the GNU Automake package:" echo "<$gnu_software_URL/automake>" echo "It also requires GNU Autoconf, GNU m4 and Perl in order to run:" echo "<$gnu_software_URL/autoconf>" echo "<$gnu_software_URL/m4/>" echo "<$perl_URL>" ;; autoconf|autom4te|autoheader) echo "The '$1' program is part of the GNU Autoconf package:" echo "<$gnu_software_URL/autoconf/>" echo "It also requires GNU m4 and Perl in order to run:" echo "<$gnu_software_URL/m4/>" echo "<$perl_URL>" ;; esac } give_advice () { # Normalize program name to check for. normalized_program=`echo "$1" | sed ' s/^gnu-//; t s/^gnu//; t s/^g//; t'` printf '%s\n' "'$1' is $msg." configure_deps="'configure.ac' or m4 files included by 'configure.ac'" case $normalized_program in autoconf*) echo "You should only need it if you modified 'configure.ac'," echo "or m4 files included by it." program_details 'autoconf' ;; autoheader*) echo "You should only need it if you modified 'acconfig.h' or" echo "$configure_deps." program_details 'autoheader' ;; automake*) echo "You should only need it if you modified 'Makefile.am' or" echo "$configure_deps." program_details 'automake' ;; aclocal*) echo "You should only need it if you modified 'acinclude.m4' or" echo "$configure_deps." program_details 'aclocal' ;; autom4te*) echo "You might have modified some maintainer files that require" echo "the 'automa4te' program to be rebuilt." program_details 'autom4te' ;; bison*|yacc*) echo "You should only need it if you modified a '.y' file." echo "You may want to install the GNU Bison package:" echo "<$gnu_software_URL/bison/>" ;; lex*|flex*) echo "You should only need it if you modified a '.l' file." echo "You may want to install the Fast Lexical Analyzer package:" echo "<$flex_URL>" ;; help2man*) echo "You should only need it if you modified a dependency" \ "of a man page." echo "You may want to install the GNU Help2man package:" echo "<$gnu_software_URL/help2man/>" ;; makeinfo*) echo "You should only need it if you modified a '.texi' file, or" echo "any other file indirectly affecting the aspect of the manual." echo "You might want to install the Texinfo package:" echo "<$gnu_software_URL/texinfo/>" echo "The spurious makeinfo call might also be the consequence of" echo "using a buggy 'make' (AIX, DU, IRIX), in which case you might" echo "want to install GNU make:" echo "<$gnu_software_URL/make/>" ;; *) echo "You might have modified some files without having the proper" echo "tools for further handling them. Check the 'README' file, it" echo "often tells you about the needed prerequisites for installing" echo "this package. You may also peek at any GNU archive site, in" echo "case some other package contains this missing '$1' program." ;; esac } give_advice "$1" | sed -e '1s/^/WARNING: /' \ -e '2,$s/^/ /' >&2 # Propagate the correct exit status (expected to be 127 for a program # not found, 63 for a program that failed due to version mismatch). exit $st # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" # time-stamp-time-zone: "UTC" # time-stamp-end: "; # UTC" # End: nordugrid-arc-6.14.0/PaxHeaders.30264/debian0000644000000000000000000000013214152153477016513 xustar000000000000000030 mtime=1638455103.992631449 30 atime=1638455103.996631509 30 ctime=1638455103.992631449 nordugrid-arc-6.14.0/debian/0000755000175000002070000000000014152153477016555 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-nordugridmap.install0000644000000000000000000000013214152153376025407 xustar000000000000000030 mtime=1638455038.267643901 30 atime=1638455038.460646801 30 ctime=1638455103.950630818 nordugrid-arc-6.14.0/debian/nordugrid-arc-nordugridmap.install0000644000175000002070000000016114152153376025372 0ustar00mockbuildmock00000000000000debian/tmp/usr/sbin/nordugridmap debian/tmp/etc/cron.d/nordugridmap debian/tmp/usr/share/man/man8/nordugridmap.8 nordugrid-arc-6.14.0/debian/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376020622 xustar000000000000000030 mtime=1638455038.263643841 30 atime=1638455038.460646801 30 ctime=1638455103.926630457 nordugrid-arc-6.14.0/debian/Makefile.am0000644000175000002070000000071214152153376020607 0ustar00mockbuildmock00000000000000EXTRA_DIST = changelog compat control copyright rules watch source/format \ $(srcdir)/*.install $(srcdir)/*.docs $(srcdir)/*.dirs \ $(srcdir)/*.enable $(srcdir)/*.no-enable $(srcdir)/*.logrotate \ $(srcdir)/*.postinst $(srcdir)/*.postrm $(srcdir)/*.prerm \ $(srcdir)/*.lintian-overrides README.Debian README.source changelog: changelog.deb cp -p changelog.deb changelog MAINTAINERCLEANFILES = changelog nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-acix-index.arc-acix-index.default.enable0000644000000000000000000000013214152153376030743 xustar000000000000000030 mtime=1638455038.264643856 30 atime=1638455038.460646801 30 ctime=1638455103.964631028 nordugrid-arc-6.14.0/debian/nordugrid-arc-acix-index.arc-acix-index.default.enable0000644000175000002070000000022314152153376030725 0ustar00mockbuildmock00000000000000# To enable arc-acix-index, i.e. to indicate that a readily usable # configuration is in place, comment out or delete the # following line. RUN=no nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-arex.postinst0000644000000000000000000000013214152153376024070 xustar000000000000000030 mtime=1638455038.266643886 30 atime=1638455038.460646801 30 ctime=1638455103.987631374 nordugrid-arc-6.14.0/debian/nordugrid-arc-arex.postinst0000644000175000002070000000037614152153376024063 0ustar00mockbuildmock00000000000000#!/bin/sh set -e if [ "$1" = "configure" ] ; then # check hostcert is already generated (update vs install) if [ ! -f /etc/grid-security/testCA-hostcert.pem ] ; then arcctl test-ca init arcctl test-ca hostcert fi fi #DEBHELPER# nordugrid-arc-6.14.0/debian/PaxHeaders.30264/python-nordugrid-arc.install0000644000000000000000000000013214152153376024235 xustar000000000000000030 mtime=1638455038.268643916 30 atime=1638455038.460646801 30 ctime=1638455103.959630953 nordugrid-arc-6.14.0/debian/python-nordugrid-arc.install0000644000175000002070000000111714152153376024222 0ustar00mockbuildmock00000000000000debian/tmp/usr/lib/python2.*/site-packages/_arc.*so debian/tmp/usr/lib/python2.*/site-packages/arc/common.py* debian/tmp/usr/lib/python2.*/site-packages/arc/communication.py* debian/tmp/usr/lib/python2.*/site-packages/arc/compute.py* debian/tmp/usr/lib/python2.*/site-packages/arc/credential.py* debian/tmp/usr/lib/python2.*/site-packages/arc/data.py* debian/tmp/usr/lib/python2.*/site-packages/arc/delegation.py* debian/tmp/usr/lib/python2.*/site-packages/arc/loader.py* debian/tmp/usr/lib/python2.*/site-packages/arc/message.py* debian/tmp/usr/lib/python2.*/site-packages/arc/security.py* nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-arcctl.install0000644000000000000000000000013214152153376024164 xustar000000000000000030 mtime=1638455038.265643871 30 atime=1638455038.460646801 30 ctime=1638455103.939630653 nordugrid-arc-6.14.0/debian/nordugrid-arc-arcctl.install0000644000175000002070000000077114152153376024156 0ustar00mockbuildmock00000000000000debian/tmp/usr/lib/python?.*/site-packages/arc/control/__init__.py* debian/tmp/usr/lib/python?.*/site-packages/arc/control/CertificateGenerator.py* debian/tmp/usr/lib/python?.*/site-packages/arc/control/ControlCommon.py* debian/tmp/usr/lib/python?.*/site-packages/arc/control/OSPackage.py* debian/tmp/usr/lib/python?.*/site-packages/arc/control/TestCA.py* debian/tmp/usr/lib/python?.*/site-packages/arc/control/ThirdPartyDeployment.py* debian/tmp/usr/share/man/man1/arcctl.1 debian/tmp/usr/sbin/arcctl nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-acix-scanner.arc-acix-scanner.default.ena0000644000000000000000000000013214152153376031124 xustar000000000000000030 mtime=1638455038.265643871 30 atime=1638455038.460646801 30 ctime=1638455103.965631043 nordugrid-arc-6.14.0/debian/nordugrid-arc-acix-scanner.arc-acix-scanner.default.enable0000644000175000002070000000022514152153376031573 0ustar00mockbuildmock00000000000000# To enable arc-acix-scanner, i.e. to indicate that a readily usable # configuration is in place, comment out or delete the # following line. RUN=no nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-datadelivery-service.preinst.no-enable0000644000000000000000000000013214152153376030704 xustar000000000000000030 mtime=1638455038.266643886 30 atime=1638455038.460646801 30 ctime=1638455103.977631224 nordugrid-arc-6.14.0/debian/nordugrid-arc-datadelivery-service.preinst.no-enable0000644000175000002070000000133614152153376030674 0ustar00mockbuildmock00000000000000#!/bin/sh set -e # Version 6.7.0 starts using defaults-disabled # Remove rc.d links if upgrading from an older version and the service is # disabled in /etc/default if [ -n "$2" ] ; then if dpkg --compare-versions "$2" "<<" "6.7.0" ; then [ -r /etc/default/arc-datadelivery-service ] && . /etc/default/arc-datadelivery-service if [ -n "${RUN}" ] && [ "${RUN}" != "yes" ] ; then update-rc.d -f arc-datadelivery-service remove >/dev/null if [ -x "/usr/bin/deb-systemd-helper" ]; then deb-systemd-helper purge 'arc-datadelivery-service.service' >/dev/null || true deb-systemd-helper unmask 'arc-datadelivery-service.service' >/dev/null || true fi fi fi fi #DEBHELPER# nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-plugins-arcrest.install0000644000000000000000000000013214152153376026036 xustar000000000000000030 mtime=1638455038.267643901 30 atime=1638455038.460646801 30 ctime=1638455103.951630833 nordugrid-arc-6.14.0/debian/nordugrid-arc-plugins-arcrest.install0000644000175000002070000000012114152153376026015 0ustar00mockbuildmock00000000000000debian/tmp/usr/lib/arc/libaccARCREST.so debian/tmp/usr/lib/arc/libaccARCREST.apd nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-community-rtes.install0000644000000000000000000000013214152153376025713 xustar000000000000000030 mtime=1638455038.266643886 30 atime=1638455038.460646801 30 ctime=1638455103.943630712 nordugrid-arc-6.14.0/debian/nordugrid-arc-community-rtes.install0000644000175000002070000000016314152153376025700 0ustar00mockbuildmock00000000000000debian/tmp/usr/share/arc/community_rtes.sh debian/tmp/usr/lib/python?.*/site-packages/arc/control/CommunityRTE.py* nordugrid-arc-6.14.0/debian/PaxHeaders.30264/rules0000644000000000000000000000013214152153376017643 xustar000000000000000030 mtime=1638455038.268643916 30 atime=1638455038.460646801 30 ctime=1638455103.931630532 nordugrid-arc-6.14.0/debian/rules0000755000175000002070000002317514152153376017643 0ustar00mockbuildmock00000000000000#!/usr/bin/make -f # Filter out -Wl,-Bsymbolic-functions from default Ubuntu LDFLAGS export DEB_LDFLAGS_MAINT_STRIP=-Wl,-Bsymbolic-functions ifeq ($(shell pkg-config --atleast-version 2.6 sigc++-2.0 && echo 1),1) ifeq ($(shell echo __GNUC__ | gcc -E - | tail -1),5) # Workaround for too new libsigc++/glibmm, too old gcc combination export DEB_CXXFLAGS_MAINT_APPEND=-std=c++11 endif endif ifeq ($(shell perl -mInline::Python < /dev/null 2>/dev/null && echo 1),1) N = else N = -Nnordugrid-arc-arex-python-lrms endif ifeq ($(shell dpkg-query -W -f '$${Depends}' libinline-python-perl 2>/dev/null | grep -q python3 && echo 1),1) PYVER = 3 WITH_ALT_PYTHON = P = -Npython-nordugrid-arc else PYVER = 2 WITH_ALT_PYTHON = --with-altpython=python3 P = --with python3 endif ifeq ($(shell type dh_systemd_enable > /dev/null 2> /dev/null && echo 1),1) WSD = --with systemd else WSD = endif ifeq ($(shell test -r /usr/include/systemd/sd-daemon.h && echo 1),1) ifeq ($(DEB_HOST_ARCH_OS),linux) SYSTEMD = yes else SYSTEMD = no endif else SYSTEMD = no endif ifeq ($(shell grep -q no-enable /usr/bin/dh_installinit 2>/dev/null && echo 1),1) SYSVNOENBL = --no-enable else SYSVNOENBL = endif ifeq ($(shell grep -q no-enable /usr/bin/dh_systemd_enable 2>/dev/null && echo 1),1) SYSDNOENBL = --no-enable else SYSDNOENBL = endif %: dh $@ --with autoreconf $(WSD) --with python$(PYVER) $(P) $(N) override_dh_auto_configure: if [ ! -f po/POTFILES.in.save ] ; then \ cp -p po/POTFILES.in po/POTFILES.in.save ; \ fi LDFLAGS="$(LDFLAGS) -Wl,-z,defs" \ dh_auto_configure -- \ --disable-doc \ --libdir=/usr/lib \ --libexecdir=/usr/lib \ --docdir=/usr/share/doc/nordugrid-arc \ --enable-internal \ --enable-systemd=$(SYSTEMD) \ --with-systemd-units-location=/lib/systemd/system \ --with-sysv-scripts-location=/etc/init.d \ --with-python=python$(PYVER) \ $(WITH_ALT_PYTHON) override_dh_auto_clean: dh_auto_clean rm -f debian/nordugrid-arc-arex.arc-arex.init rm -f debian/nordugrid-arc-arex.arc-arex-ws.init rm -f debian/nordugrid-arc-hed.arched.init rm -f debian/nordugrid-arc-datadelivery-service.arc-datadelivery-service.init rm -f debian/nordugrid-arc-gridftpd.arc-gridftpd.init rm -f debian/nordugrid-arc-infosys-ldap.arc-infosys-ldap.init rm -f debian/nordugrid-arc-acix-scanner.arc-acix-scanner.init rm -f debian/nordugrid-arc-acix-index.arc-acix-index.init rm -f debian/nordugrid-arc-arex.arc-arex.service rm -f debian/nordugrid-arc-arex.arc-arex-ws.service rm -f debian/nordugrid-arc-hed.arched.service rm -f debian/nordugrid-arc-datadelivery-service.arc-datadelivery-service.service rm -f debian/nordugrid-arc-gridftpd.arc-gridftpd.service rm -f debian/nordugrid-arc-infosys-ldap.arc-infosys-ldap.service rm -f debian/nordugrid-arc-infosys-ldap.arc-infosys-ldap-slapd.service rm -f debian/nordugrid-arc-acix-scanner.arc-acix-scanner.service rm -f debian/nordugrid-arc-acix-index.arc-acix-index.service rm -f debian/*.default rm -f debian/*.maintscript rm -f debian/*.preinst if [ -f po/POTFILES.in.save ] ; then \ mv po/POTFILES.in.save po/POTFILES.in ; \ fi find python src -name *.pyc -exec rm {} ';' find python src -depth -name __pycache__ -exec rm -rf {} ';' rm -rf src/services/acix/*/test/_trial_temp rm -rf src/services/a-rex/infoproviders/_Inline rm -rf src/services/a-rex/infoproviders/test/_Inline override_dh_auto_install: dh_auto_install find debian/tmp -name \*.la -exec rm -fv '{}' ';' rm -f debian/tmp/usr/lib/arc/*.a rm -f debian/tmp/usr/lib/libarcglobusutils.so find debian/tmp -depth -name __pycache__ -exec rm -rf '{}' ';' if [ -d debian/tmp/etc/bash_completion.d ]; then \ mkdir -p debian/tmp/usr/share/bash-completion; \ mv debian/tmp/etc/bash_completion.d \ debian/tmp/usr/share/bash-completion/completions; \ fi if [ -z "$(SYSVNOENBL)" ] ; then \ for x in debian/*.enable ; do \ cp -p $$x $${x%.enable} ; \ done ; \ else \ for x in debian/*.no-enable ; do \ cp -p $$x $${x%.no-enable} ; \ done ; \ fi mv debian/tmp/etc/init.d/arc-arex \ debian/nordugrid-arc-arex.arc-arex.init mv debian/tmp/etc/init.d/arc-arex-ws \ debian/nordugrid-arc-arex.arc-arex-ws.init mv debian/tmp/etc/init.d/arched \ debian/nordugrid-arc-hed.arched.init mv debian/tmp/etc/init.d/arc-datadelivery-service \ debian/nordugrid-arc-datadelivery-service.arc-datadelivery-service.init mv debian/tmp/etc/init.d/arc-gridftpd \ debian/nordugrid-arc-gridftpd.arc-gridftpd.init mv debian/tmp/etc/init.d/arc-infosys-ldap \ debian/nordugrid-arc-infosys-ldap.arc-infosys-ldap.init mv debian/tmp/etc/init.d/arc-acix-scanner \ debian/nordugrid-arc-acix-scanner.arc-acix-scanner.init mv debian/tmp/etc/init.d/arc-acix-index \ debian/nordugrid-arc-acix-index.arc-acix-index.init mv debian/tmp/lib/systemd/system/arc-arex.service \ debian/nordugrid-arc-arex.arc-arex.service mv debian/tmp/lib/systemd/system/arc-arex-ws.service \ debian/nordugrid-arc-arex.arc-arex-ws.service mv debian/tmp/lib/systemd/system/arched.service \ debian/nordugrid-arc-hed.arched.service mv debian/tmp/lib/systemd/system/arc-datadelivery-service.service \ debian/nordugrid-arc-datadelivery-service.arc-datadelivery-service.service mv debian/tmp/lib/systemd/system/arc-gridftpd.service \ debian/nordugrid-arc-gridftpd.arc-gridftpd.service mv debian/tmp/lib/systemd/system/arc-infosys-ldap.service \ debian/nordugrid-arc-infosys-ldap.arc-infosys-ldap.service mv debian/tmp/lib/systemd/system/arc-infosys-ldap-slapd.service \ debian/nordugrid-arc-infosys-ldap.arc-infosys-ldap-slapd.service mv debian/tmp/lib/systemd/system/arc-acix-scanner.service \ debian/nordugrid-arc-acix-scanner.arc-acix-scanner.service mv debian/tmp/lib/systemd/system/arc-acix-index.service \ debian/nordugrid-arc-acix-index.arc-acix-index.service chmod 4755 debian/tmp/usr/bin/arc-job-cgroup override_dh_install: dh_install -p libarccommon3 \ debian/tmp/usr/lib/python$(PYVER).*/site-packages/arc/__init__.py\* if [ $(PYVER) -eq 2 ] ; then \ dh_install -p python3-nordugrid-arc \ debian/tmp/usr/lib/python3.*/site-packages/arc/__init__.py\* ; fi rm debian/tmp/usr/lib/python?.*/site-packages/arc/__init__.py* dh_install --remaining-packages --fail-missing override_dh_installinit: dh_installinit $(SYSVNOENBL) -p nordugrid-arc-hed --name arched dh_installinit $(SYSVNOENBL) -p nordugrid-arc-arex --name arc-arex dh_installinit $(SYSVNOENBL) -p nordugrid-arc-arex --name arc-arex-ws dh_installinit $(SYSVNOENBL) -p nordugrid-arc-datadelivery-service --name arc-datadelivery-service dh_installinit $(SYSVNOENBL) -p nordugrid-arc-gridftpd --name arc-gridftpd dh_installinit $(SYSVNOENBL) -p nordugrid-arc-infosys-ldap --name arc-infosys-ldap dh_installinit $(SYSVNOENBL) -p nordugrid-arc-acix-scanner --name arc-acix-scanner dh_installinit $(SYSVNOENBL) -p nordugrid-arc-acix-index --name arc-acix-index override_dh_systemd_enable: dh_systemd_enable $(SYSDNOENBL) -p nordugrid-arc-hed --name arched dh_systemd_enable $(SYSDNOENBL) -p nordugrid-arc-arex --name arc-arex dh_systemd_enable $(SYSDNOENBL) -p nordugrid-arc-arex --name arc-arex-ws dh_systemd_enable $(SYSDNOENBL) -p nordugrid-arc-datadelivery-service --name arc-datadelivery-service dh_systemd_enable $(SYSDNOENBL) -p nordugrid-arc-gridftpd --name arc-gridftpd dh_systemd_enable $(SYSDNOENBL) -p nordugrid-arc-infosys-ldap --name arc-infosys-ldap dh_systemd_enable $(SYSDNOENBL) -p nordugrid-arc-infosys-ldap --name arc-infosys-ldap-slapd dh_systemd_enable $(SYSDNOENBL) -p nordugrid-arc-acix-scanner --name arc-acix-scanner dh_systemd_enable $(SYSDNOENBL) -p nordugrid-arc-acix-index --name arc-acix-index override_dh_systemd_start: dh_systemd_start -p nordugrid-arc-hed --name arched dh_systemd_start -p nordugrid-arc-arex --name arc-arex dh_systemd_start -p nordugrid-arc-arex --name arc-arex-ws dh_systemd_start -p nordugrid-arc-datadelivery-service --name arc-datadelivery-service dh_systemd_start -p nordugrid-arc-gridftpd --name arc-gridftpd dh_systemd_start -p nordugrid-arc-infosys-ldap --name arc-infosys-ldap dh_systemd_start -p nordugrid-arc-infosys-ldap --name arc-infosys-ldap-slapd dh_systemd_start -p nordugrid-arc-acix-scanner --name arc-acix-scanner dh_systemd_start -p nordugrid-arc-acix-index --name arc-acix-index override_dh_fixperms: dh_fixperms -X /usr/bin/arc-job-cgroup ifeq ($(PYVER),2) override_dh_python2: dh_python2 echo "pydeps:Depends=python-ldap, python-isodate" \ >> debian/nordugrid-arc-arex.substvars echo "pydeps:Depends=python-dns" \ >> debian/nordugrid-arc-community-rtes.substvars echo "pydeps:Depends=python-twisted, python-openssl" \ >> debian/nordugrid-arc-acix-core.substvars echo "pydeps:Depends=python-ldap, python-dns" \ >> debian/nordugrid-arc-archery-manage.substvars echo "pydeps:Depends=python-nordugrid-arc (= \$${binary:Version})" \ >> debian/nordugrid-arc-plugins-python.substvars echo "pydeps:Depends=python-nordugrid-arc (= \$${binary:Version})" \ >> debian/nordugrid-arc-arex-python-lrms.substvars endif ifeq ($(PYVER),3) override_dh_python3: dh_python3 echo "pydeps:Depends=python3-ldap, python3-isodate" \ >> debian/nordugrid-arc-arex.substvars echo "pydeps:Depends=python3-dns" \ >> debian/nordugrid-arc-community-rtes.substvars echo "pydeps:Depends=python3-twisted, python3-openssl" \ >> debian/nordugrid-arc-acix-core.substvars echo "pydeps:Depends=python3-ldap, python3-dns" \ >> debian/nordugrid-arc-archery-manage.substvars echo "pydeps:Depends=python3-nordugrid-arc (= \$${binary:Version})" \ >> debian/nordugrid-arc-plugins-python.substvars echo "pydeps:Depends=python3-nordugrid-arc (= \$${binary:Version})" \ >> debian/nordugrid-arc-arex-python-lrms.substvars endif nordugrid-arc-6.14.0/debian/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153420020621 xustar000000000000000030 mtime=1638455056.999925362 30 atime=1638455092.036451802 30 ctime=1638455103.925630442 nordugrid-arc-6.14.0/debian/Makefile.in0000644000175000002070000004572014152153420020616 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = debian DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(srcdir)/changelog.deb.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = changelog.deb CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ EXTRA_DIST = changelog compat control copyright rules watch source/format \ $(srcdir)/*.install $(srcdir)/*.docs $(srcdir)/*.dirs \ $(srcdir)/*.enable $(srcdir)/*.no-enable $(srcdir)/*.logrotate \ $(srcdir)/*.postinst $(srcdir)/*.postrm $(srcdir)/*.prerm \ $(srcdir)/*.lintian-overrides README.Debian README.source MAINTAINERCLEANFILES = changelog all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign debian/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign debian/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): changelog.deb: $(top_builddir)/config.status $(srcdir)/changelog.deb.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." -test -z "$(MAINTAINERCLEANFILES)" || rm -f $(MAINTAINERCLEANFILES) clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: install-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ cscopelist-am ctags-am distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags-am uninstall uninstall-am changelog: changelog.deb cp -p changelog.deb changelog # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-archery-manage.install0000644000000000000000000000013214152153376025577 xustar000000000000000030 mtime=1638455038.265643871 30 atime=1638455038.460646801 30 ctime=1638455103.940630667 nordugrid-arc-6.14.0/debian/nordugrid-arc-archery-manage.install0000644000175000002070000000004314152153376025561 0ustar00mockbuildmock00000000000000debian/tmp/usr/sbin/archery-manage nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-arex.logrotate0000644000000000000000000000013214152153376024205 xustar000000000000000030 mtime=1638455038.265643871 30 atime=1638455038.460646801 30 ctime=1638455103.983631314 nordugrid-arc-6.14.0/debian/nordugrid-arc-arex.logrotate0000644000175000002070000000204314152153376024171 0ustar00mockbuildmock00000000000000# # Logs written by A-REX # /var/log/arc/arex.log /var/log/arc/arex-jobs.log /var/log/arc/job.helper.errors /var/log/arc/datastaging.log { missingok compress delaycompress daily rotate 14 create sharedscripts postrotate kill -HUP `cat /run/arched-arex.pid 2> /dev/null` 2> /dev/null || true endscript } # # Logs written by A-REX WS interface # /var/log/arc/ws-interface.log { missingok compress delaycompress daily rotate 14 create sharedscripts postrotate kill -HUP `cat /run/arched-arex-ws.pid 2> /dev/null` 2> /dev/null || true endscript } # # External processes spawned by A-REX # /var/log/arc/infoprovider.log /var/log/arc/cache-cleaner.log /var/log/arc/jura.log { missingok compress delaycompress daily rotate 14 create } # # Extra services coupled with A-REX # /var/log/arc/nordugridmap.log { missingok compress delaycompress daily rotate 14 create } # # Developer-enabled performance logging # /var/log/arc/perfdata/*.perflog { missingok compress daily rotate 14 create } nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-infosys-ldap.logrotate0000644000000000000000000000013214152153376025656 xustar000000000000000030 mtime=1638455038.267643901 30 atime=1638455038.460646801 30 ctime=1638455103.986631359 nordugrid-arc-6.14.0/debian/nordugrid-arc-infosys-ldap.logrotate0000644000175000002070000000014114152153376025637 0ustar00mockbuildmock00000000000000/var/log/arc/bdii/bdii-update.log { missingok compress daily rotate 14 copytruncate } nordugrid-arc-6.14.0/debian/PaxHeaders.30264/changelog.deb.in0000644000000000000000000000013214152153376021576 xustar000000000000000030 mtime=1638455038.264643856 30 atime=1638455038.460646801 30 ctime=1638455103.927630472 nordugrid-arc-6.14.0/debian/changelog.deb.in0000644000175000002070000002160714152153376021571 0ustar00mockbuildmock00000000000000nordugrid-arc (@debianversion@-1) unstable; urgency=low * Unofficial build. -- Anders Waananen @DATER@ nordugrid-arc (4.0.0~rc2-1) unstable; urgency=low * 4.0.0 Release Candidate 2 -- Anders Waananen Thu, 07 Nov 2013 11:01:24 +0100 nordugrid-arc (4.0.0~rc1-1) unstable; urgency=low * 4.0.0 Release Candidate 1 -- Anders Waananen Tue, 29 Oct 2013 23:22:57 +0100 nordugrid-arc (3.0.3-1) unstable; urgency=low * 3.0.3 Final Release -- Anders Waananen Fri, 19 Jul 2013 12:05:50 +0200 nordugrid-arc (3.0.2-1) unstable; urgency=low * 3.0.2 Final Release -- Anders Waananen Wed, 12 Jun 2013 15:09:59 +0200 nordugrid-arc (3.0.1-1) unstable; urgency=low * 3.0.1 Final Release -- Anders Waananen Tue, 30 Apr 2013 00:47:43 +0200 nordugrid-arc (3.0.1~rc2-1) unstable; urgency=low * 3.0.1 Release Candidate 2 -- Anders Waananen Fri, 12 Apr 2013 16:56:03 +0200 nordugrid-arc (3.0.1~rc1-1) unstable; urgency=low * 3.0.1 Release Candidate 1 -- Anders Waananen Fri, 12 Apr 2013 13:50:41 +0200 nordugrid-arc (3.0.0-1) unstable; urgency=low * 3.0.0 Final Release -- Anders Waananen Fri, 22 Mar 2013 12:32:51 +0100 nordugrid-arc (3.0.0~rc5-1) unstable; urgency=low * 3.0.0 Release Candidate 5 -- Anders Waananen Wed, 06 Feb 2013 12:12:48 +0100 nordugrid-arc (3.0.0~rc4-1) unstable; urgency=low * 3.0.0 Release Candidate 4 -- Anders Waananen Sat, 02 Feb 2013 01:00:33 +0100 nordugrid-arc (3.0.0~rc3-1) unstable; urgency=low * 3.0.0 Release Candidate 3 -- Anders Waananen Wed, 30 Jan 2013 09:02:17 +0100 nordugrid-arc (3.0.0~rc2-1) unstable; urgency=low * 3.0.0 Release Candidate 2 -- Anders Waananen Mon, 28 Jan 2013 07:55:14 +0100 nordugrid-arc (3.0.0~rc1-1) unstable; urgency=low * 3.0.0 Release Candidate 1 -- Anders Waananen Thu, 06 Dec 2012 22:05:31 +0100 nordugrid-arc (2.0.1-1) unstable; urgency=low * 2.0.1 Final Release -- Anders Waananen Thu, 22 Nov 2012 23:47:19 +0100 nordugrid-arc (2.0.1rc2) unstable; urgency=low * 2.0.1rc2 Release Candidate 2 -- Anders Waananen Thu, 25 Oct 2012 13:00:02 +0200 nordugrid-arc (2.0.1rc1) unstable; urgency=low * 2.0.1rc1 Release Candidate 1 -- Anders Waananen Mon, 27 Aug 2012 13:26:30 +0200 nordugrid-arc (2.0.0-1) unstable; urgency=low * 2.0.0 Final Release -- Mattias Ellert Wed, 23 May 2012 19:27:47 +0200 nordugrid-arc (2.0.0~rc4-1) unstable; urgency=low * 2.0.0 Release Candidate 4 -- Mattias Ellert Mon, 02 Apr 2012 16:06:45 +0200 nordugrid-arc (2.0.0~rc3.1-1) unstable; urgency=low * 2.0.0 Release Candidate 3.1 -- Mattias Ellert Tue, 27 Mar 2012 10:30:23 +0200 nordugrid-arc (2.0.0~rc3-1) unstable; urgency=low * 2.0.0 Release Candidate 3 -- Mattias Ellert Mon, 05 Mar 2012 16:27:32 +0100 nordugrid-arc (2.0.0~rc2-1) unstable; urgency=low * 2.0.0 Release Candidate 2 -- Mattias Ellert Wed, 15 Feb 2012 13:54:17 +0100 nordugrid-arc (1.1.0-1) unstable; urgency=low * 1.1.0 Final Release -- Mattias Ellert Mon, 03 Oct 2011 14:30:45 +0200 nordugrid-arc (1.1.0~rc2-1) unstable; urgency=low * 1.1.0 Release Candidate 2 -- Mattias Ellert Sun, 25 Sep 2011 05:42:22 +0200 nordugrid-arc (1.1.0~rc1-1) unstable; urgency=low * 1.1.0 Release Candidate 1 -- Mattias Ellert Sun, 11 Sep 2011 20:08:33 +0200 nordugrid-arc (1.0.1-1) unstable; urgency=low * 1.0.0 Final Release -- Mattias Ellert Sat, 23 Jul 2011 09:32:53 +0200 nordugrid-arc (1.0.1~rc4-1) unstable; urgency=low * 1.0.1 Release Candidate 4 -- Mattias Ellert Tue, 19 Jul 2011 15:17:05 +0200 nordugrid-arc (1.0.1~rc1-1) unstable; urgency=low * 1.0.1 Release Candidate 1 -- Mattias Ellert Sat, 18 Jun 2011 18:29:09 +0200 nordugrid-arc (1.0.0-1) unstable; urgency=low * 1.0.0 Final Release -- Mattias Ellert Mon, 18 Apr 2011 08:59:55 +0200 nordugrid-arc (1.0.0~b5-1) unstable; urgency=low * 1.0.0 Beta Release 5 -- Mattias Ellert Wed, 06 Apr 2011 14:08:52 +0200 nordugrid-arc (1.0.0~b4-1) unstable; urgency=low * 1.0.0 Beta Release 4 -- Mattias Ellert Wed, 23 Mar 2011 15:19:08 +0100 nordugrid-arc (1.0.0~b3-1) unstable; urgency=low * 1.0.0 Beta Release 3 -- Mattias Ellert Thu, 10 Mar 2011 17:05:28 +0100 nordugrid-arc (1.0.0~b2-1) unstable; urgency=low * 1.0.0 Beta Release 2 -- Mattias Ellert Mon, 07 Mar 2011 05:12:30 +0100 nordugrid-arc (1.0.0~b1-1) unstable; urgency=low * 1.0.0 Beta Release 1 -- Mattias Ellert Mon, 14 Feb 2011 17:19:04 +0100 nordugrid-arc-nox (1.2.1-1) unstable; urgency=low * 1.2.1 Final release -- Mattias Ellert Tue, 21 Dec 2010 22:34:02 +0100 nordugrid-arc-nox (1.2.1~rc2-1) unstable; urgency=low * 1.2.1 Release Candidate 2 -- Mattias Ellert Tue, 21 Dec 2010 09:36:46 +0100 nordugrid-arc-nox (1.2.1~rc1-1) unstable; urgency=low * 1.2.1 Release Candidate 1 -- Mattias Ellert Wed, 08 Dec 2010 15:30:37 +0100 nordugrid-arc-nox (1.2.0-1) unstable; urgency=low * 1.2.0 Final release -- Mattias Ellert Fri, 22 Oct 2010 15:25:07 +0200 nordugrid-arc-nox (1.2.0~rc2-1) unstable; urgency=low * 1.2.0 Release Candidate 2 -- Mattias Ellert Thu, 30 Sep 2010 10:11:14 +0200 nordugrid-arc-nox (1.2.0~rc1-1) unstable; urgency=low * 1.2.0 Release Candidate 1 -- Mattias Ellert Mon, 13 Sep 2010 11:14:51 +0200 nordugrid-arc-nox (1.1.0-1) unstable; urgency=low * 1.1.0 Final release -- Mattias Ellert Wed, 05 May 2010 18:31:59 +0200 nordugrid-arc-nox (1.1.0~rc6-1) unstable; urgency=low * 1.1.0 Release Candidate 6 -- Mattias Ellert Mon, 08 Mar 2010 20:36:00 +0100 nordugrid-arc-nox (1.1.0~rc5-2) unstable; urgency=low * Rebuild for Globus Toolkit 5 -- Mattias Ellert Fri, 26 Feb 2010 16:25:39 +0100 nordugrid-arc-nox (1.1.0~rc5-1) unstable; urgency=low * 1.1.0 Release Candidate 5 -- Mattias Ellert Fri, 26 Feb 2010 15:07:39 +0100 nordugrid-arc-nox (1.1.0~rc4-1) unstable; urgency=low * 1.1.0 release candidate 4 -- Mattias Ellert Wed, 24 Feb 2010 12:34:41 +0100 nordugrid-arc-nox (1.1.0~rc3-1) unstable; urgency=low * 1.1.0 release candidate 3 -- Mattias Ellert Mon, 22 Feb 2010 10:20:27 +0100 nordugrid-arc-nox (1.1.0~rc2-1) unstable; urgency=low * 1.1.0 release candidate 2 -- Mattias Ellert Mon, 15 Feb 2010 19:08:07 +0100 nordugrid-arc-nox (1.1.0~rc1-1) unstable; urgency=low * 1.1.0 release candidate 1 -- Mattias Ellert Thu, 11 Feb 2010 19:48:01 +0100 nordugrid-arc-nox (1.0.0-1) unstable; urgency=low * 1.0.0 Final release -- Mattias Ellert Sun, 29 Nov 2009 23:13:41 +0100 nordugrid-arc-nox (1.0.0~rc7-1) unstable; urgency=low * 1.0.0 release candidate 7 -- Mattias Ellert Thu, 19 Nov 2009 15:30:32 +0100 nordugrid-arc-nox (1.0.0~rc6-1) unstable; urgency=low * 1.0.0 release candidate 6 -- Mattias Ellert Thu, 12 Nov 2009 10:12:45 +0100 nordugrid-arc-nox (1.0.0~rc5-1) unstable; urgency=low * 1.0.0 release candidate 5 -- Mattias Ellert Wed, 04 Nov 2009 16:45:22 +0100 nordugrid-arc1 (0.9.4~rc4-1) unstable; urgency=low * 0.9.3 release candidate 4 -- Mattias Ellert Mon, 26 Oct 2009 23:19:55 +0100 nordugrid-arc1 (0.9.4~rc3-1) unstable; urgency=low * 0.9.3 release candidate 3 -- Mattias Ellert Thu, 22 Oct 2009 19:22:31 +0200 nordugrid-arc1 (0.9.4~rc2-1) unstable; urgency=low * 0.9.3 release candidate 2 -- Mattias Ellert Thu, 15 Oct 2009 09:04:24 +0200 nordugrid-arc1 (0.9.3-1) unstable; urgency=low * Final 0.9.3 release -- Mattias Ellert Sun, 27 Sep 2009 01:27:31 +0200 nordugrid-arc1 (0.9.3~rc3-1) unstable; urgency=low * Initial release -- Mattias Ellert Mon, 5 Nov 2007 10:12:49 -0400 nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-client.install0000644000000000000000000000013214152153376024172 xustar000000000000000030 mtime=1638455038.266643886 30 atime=1638455038.460646801 30 ctime=1638455103.943630712 nordugrid-arc-6.14.0/debian/nordugrid-arc-client.install0000644000175000002070000000250514152153376024161 0ustar00mockbuildmock00000000000000debian/tmp/usr/bin/arccat debian/tmp/usr/bin/arcclean debian/tmp/usr/bin/arccp debian/tmp/usr/bin/arcget debian/tmp/usr/bin/arcinfo debian/tmp/usr/bin/arckill debian/tmp/usr/bin/arcls debian/tmp/usr/bin/arcmkdir debian/tmp/usr/bin/arcproxy debian/tmp/usr/bin/arcrename debian/tmp/usr/bin/arcrenew debian/tmp/usr/bin/arcresub debian/tmp/usr/bin/arcresume debian/tmp/usr/bin/arcrm debian/tmp/usr/bin/arcstat debian/tmp/usr/bin/arcsub debian/tmp/usr/bin/arcsync debian/tmp/usr/bin/arctest debian/tmp/etc/arc/client.conf debian/tmp/usr/share/arc/examples/client.conf debian/tmp/usr/share/man/man1/arccat.1 debian/tmp/usr/share/man/man1/arcclean.1 debian/tmp/usr/share/man/man1/arccp.1 debian/tmp/usr/share/man/man1/arcget.1 debian/tmp/usr/share/man/man1/arcinfo.1 debian/tmp/usr/share/man/man1/arckill.1 debian/tmp/usr/share/man/man1/arcls.1 debian/tmp/usr/share/man/man1/arcmkdir.1 debian/tmp/usr/share/man/man1/arcproxy.1 debian/tmp/usr/share/man/man1/arcrename.1 debian/tmp/usr/share/man/man1/arcrenew.1 debian/tmp/usr/share/man/man1/arcresub.1 debian/tmp/usr/share/man/man1/arcresume.1 debian/tmp/usr/share/man/man1/arcrm.1 debian/tmp/usr/share/man/man1/arcstat.1 debian/tmp/usr/share/man/man1/arcsub.1 debian/tmp/usr/share/man/man1/arcsync.1 debian/tmp/usr/share/man/man1/arctest.1 debian/tmp/usr/share/bash-completion/completions/arc-client-tools nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-hed.maintscript.no-enable0000644000000000000000000000013214152153376026202 xustar000000000000000030 mtime=1638455038.267643901 30 atime=1638455038.460646801 30 ctime=1638455103.980631269 nordugrid-arc-6.14.0/debian/nordugrid-arc-hed.maintscript.no-enable0000644000175000002070000000004614152153376026167 0ustar00mockbuildmock00000000000000rm_conffile /etc/default/arched 6.7.0 nordugrid-arc-6.14.0/debian/PaxHeaders.30264/source0000644000000000000000000000013214152153477020013 xustar000000000000000030 mtime=1638455103.933630562 30 atime=1638455103.996631509 30 ctime=1638455103.933630562 nordugrid-arc-6.14.0/debian/source/0000755000175000002070000000000014152153477020055 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/debian/source/PaxHeaders.30264/format0000644000000000000000000000013214152153376021301 xustar000000000000000030 mtime=1638455038.268643916 30 atime=1638455038.460646801 30 ctime=1638455103.933630562 nordugrid-arc-6.14.0/debian/source/format0000644000175000002070000000001414152153376021261 0ustar00mockbuildmock000000000000003.0 (quilt) nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-arex.install0000644000000000000000000000013214152153376023653 xustar000000000000000030 mtime=1638455038.265643871 30 atime=1638455038.460646801 30 ctime=1638455103.942630698 nordugrid-arc-6.14.0/debian/nordugrid-arc-arex.install0000644000175000002070000000643114152153376023644 0ustar00mockbuildmock00000000000000debian/tmp/usr/lib/arc/arc-blahp-logger debian/tmp/usr/lib/arc/arc-config-check debian/tmp/usr/lib/arc/cache-clean debian/tmp/usr/lib/arc/cache-list debian/tmp/usr/lib/arc/gm-* debian/tmp/usr/lib/arc/inputcheck debian/tmp/usr/lib/arc/jura-ng debian/tmp/usr/lib/arc/smtp-send debian/tmp/usr/lib/arc/smtp-send.sh debian/tmp/usr/share/arc/cancel-*-job debian/tmp/usr/share/arc/scan-*-job debian/tmp/usr/share/arc/submit-*-job debian/tmp/usr/share/arc/perferator debian/tmp/usr/share/arc/PerfData.pl debian/tmp/usr/share/arc/arc-arex-start debian/tmp/usr/share/arc/arc-arex-ws-start debian/tmp/usr/lib/arc/libarex.so debian/tmp/usr/lib/arc/libarex.apd debian/tmp/usr/lib/arc/libcandypond.so debian/tmp/usr/lib/arc/libcandypond.apd debian/tmp/usr/share/arc/CEinfo.pl debian/tmp/usr/share/arc/ARC0mod.pm debian/tmp/usr/share/arc/FORKmod.pm debian/tmp/usr/share/arc/Fork.pm debian/tmp/usr/share/arc/SGEmod.pm debian/tmp/usr/share/arc/SGE.pm debian/tmp/usr/share/arc/LL.pm debian/tmp/usr/share/arc/LSF.pm debian/tmp/usr/share/arc/PBS.pm debian/tmp/usr/share/arc/PBSPRO.pm debian/tmp/usr/share/arc/Condor.pm debian/tmp/usr/share/arc/SLURMmod.pm debian/tmp/usr/share/arc/SLURM.pm debian/tmp/usr/share/arc/Boinc.pm debian/tmp/usr/share/arc/XmlPrinter.pm debian/tmp/usr/share/arc/InfosysHelper.pm debian/tmp/usr/share/arc/LdifPrinter.pm debian/tmp/usr/share/arc/GLUE2xmlPrinter.pm debian/tmp/usr/share/arc/GLUE2ldifPrinter.pm debian/tmp/usr/share/arc/NGldifPrinter.pm debian/tmp/usr/share/arc/ARC0ClusterInfo.pm debian/tmp/usr/share/arc/ARC1ClusterInfo.pm debian/tmp/usr/share/arc/ConfigCentral.pm debian/tmp/usr/share/arc/GMJobsInfo.pm debian/tmp/usr/share/arc/HostInfo.pm debian/tmp/usr/share/arc/RTEInfo.pm debian/tmp/usr/share/arc/InfoChecker.pm debian/tmp/usr/share/arc/IniParser.pm debian/tmp/usr/share/arc/LRMSInfo.pm debian/tmp/usr/share/arc/Sysinfo.pm debian/tmp/usr/share/arc/LogUtils.pm debian/tmp/usr/share/arc/cancel_common.sh debian/tmp/usr/share/arc/condor_env.pm debian/tmp/usr/share/arc/configure-*-env.sh debian/tmp/usr/share/arc/submit_common.sh debian/tmp/usr/share/arc/scan_common.sh debian/tmp/usr/share/arc/lrms_common.sh debian/tmp/usr/share/arc/sql-schema/arex_accounting_db_schema_v1.sql debian/tmp/usr/share/man/man1/arc-config-check.1 debian/tmp/usr/share/man/man1/cache-clean.1 debian/tmp/usr/share/man/man1/cache-list.1 debian/tmp/usr/share/man/man8/a-rex-backtrace-collect.8 debian/tmp/usr/share/man/man8/arc-blahp-logger.8 debian/tmp/usr/share/man/man8/gm-*.8 debian/tmp/usr/lib/python?.*/site-packages/arc/control/AccountingDB.py* debian/tmp/usr/lib/python?.*/site-packages/arc/control/AccountingPublishing.py* debian/tmp/usr/lib/python?.*/site-packages/arc/control/Accounting.py* debian/tmp/usr/lib/python?.*/site-packages/arc/control/Cache.py* debian/tmp/usr/lib/python?.*/site-packages/arc/control/DataStaging.py* debian/tmp/usr/lib/python?.*/site-packages/arc/control/Jobs.py* debian/tmp/usr/lib/python?.*/site-packages/arc/control/RunTimeEnvironment.py* debian/tmp/usr/lib/arc/arccandypond debian/tmp/usr/share/arc/rte/ENV/LRMS-SCRATCH debian/tmp/usr/share/arc/rte/ENV/PROXY debian/tmp/usr/share/arc/rte/ENV/RTE debian/tmp/usr/share/arc/rte/ENV/CANDYPOND debian/tmp/usr/share/arc/rte/ENV/SINGULARITY debian/tmp/usr/share/arc/rte/ENV/CONDOR/DOCKER debian/tmp/usr/sbin/a-rex-backtrace-collect debian/tmp/etc/arc.conf nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-arex.maintscript.no-enable0000644000000000000000000000013214152153376026401 xustar000000000000000030 mtime=1638455038.265643871 30 atime=1638455038.460646801 30 ctime=1638455103.974631178 nordugrid-arc-6.14.0/debian/nordugrid-arc-arex.maintscript.no-enable0000644000175000002070000000005014152153376026361 0ustar00mockbuildmock00000000000000rm_conffile /etc/default/arc-arex 6.7.0 nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-hed.install0000644000000000000000000000013214152153376023454 xustar000000000000000030 mtime=1638455038.267643901 30 atime=1638455038.460646801 30 ctime=1638455103.948630788 nordugrid-arc-6.14.0/debian/nordugrid-arc-hed.install0000644000175000002070000000051014152153376023435 0ustar00mockbuildmock00000000000000debian/tmp/usr/sbin/arched debian/tmp/usr/lib/arc/libecho.so debian/tmp/usr/lib/arc/libecho.apd debian/tmp/usr/share/man/man8/arched.8 debian/tmp/usr/share/man/man5/arc.conf.5 debian/tmp/usr/share/arc/arched-start debian/tmp/usr/share/arc/profiles debian/tmp/usr/share/arc/examples/config debian/tmp/usr/share/arc/examples/echo nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-hed.arched.default.enable0000644000000000000000000000013214152153376026104 xustar000000000000000030 mtime=1638455038.267643901 30 atime=1638455038.460646801 30 ctime=1638455103.969631103 nordugrid-arc-6.14.0/debian/nordugrid-arc-hed.arched.default.enable0000644000175000002070000000021314152153376026065 0ustar00mockbuildmock00000000000000# To enable arched, i.e. to indicate that a readily usable # configuration is in place, comment out or delete the # following line. RUN=no nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-test-utils.install0000644000000000000000000000013214152153376025031 xustar000000000000000030 mtime=1638455038.268643916 30 atime=1638455038.460646801 30 ctime=1638455103.958630938 nordugrid-arc-6.14.0/debian/nordugrid-arc-test-utils.install0000644000175000002070000000023014152153376025011 0ustar00mockbuildmock00000000000000debian/tmp/usr/bin/arcemiestest debian/tmp/usr/bin/arcperftest debian/tmp/usr/share/man/man1/arcemiestest.1 debian/tmp/usr/share/man/man1/arcperftest.1 nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-dev.docs0000644000000000000000000000013214152153376022754 xustar000000000000000030 mtime=1638455038.266643886 30 atime=1638455038.460646801 30 ctime=1638455103.962630998 nordugrid-arc-6.14.0/debian/nordugrid-arc-dev.docs0000644000175000002070000000003114152153376022733 0ustar00mockbuildmock00000000000000src/hed/shc/arcpdp/*.xsd nordugrid-arc-6.14.0/debian/PaxHeaders.30264/copyright0000644000000000000000000000013214152153376020521 xustar000000000000000030 mtime=1638455038.264643856 30 atime=1638455038.460646801 30 ctime=1638455103.930630517 nordugrid-arc-6.14.0/debian/copyright0000644000175000002070000001205114152153376020505 0ustar00mockbuildmock00000000000000Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Upstream-Name: nordugrid-arc Upstream-Contact: contact@nordigrd.org Source: http://download.nordugrid.org/packages/nordugrid-arc/releases Files: * Copyright: 2006-2020 David Cameron Péter Dóbé Mattias Ellert Thomas FrÃ¥gÃ¥t Ali Gholami Michael Glodek Jørgen Beck Hansen Henrik Thostrup Jensen Daniel Johansson Johan Jönemo Dmytro Karpenko Tamás Kazinczy Marek KoÄan Aleksandr Konstantinov Balázs Kónya Hajo Nils Krabbenhöft Andrew Lahiff Juha Lento Peter Lundgaard Rosendahl Iván Márton Luca Mazzaferro Bjarte Mohn Steffen Möller Zsombor Nagy Aleksei Nazarov Jon Kerr Nilsen Markus Nordén Weizhong Qiang Gábor RÅ‘czei Florido Paganelli Andrii Salnikov Martin Savko Martin Skou Andersen Oxana Smirnova Ferenc Szalai Gábor Szigeti Christian Ulrik Søttrup Adrian Taga Salman Zubair Toor Olli Tourunen Petter Urkedal Wenjing Wu Anders Wäänänen Thomas Zangerl . University of Copenhagen (Denmark) NORDUnet - Nordic Infrastructure for Research and Education (Denmark) CSC - IT Center for Science Ltd (Finland) University of Lübeck (Germany) NIIFI - National Information Infrastructure Development Institute (Hungary) University of Oslo (Norway) NordForsk (Norway) Pavol Jozef Å afárik University in KoÅ¡ice (Slovakia) Linköping University (Sweden) Lund University (Sweden) Royal Institute of Technology (Sweden) Uppsala University (Sweden) Taras Shevchenko National University of Kyiv (Ukraine) License: Apache-2.0 Files: src/hed/acc/EMIES/schema/GLUE2.xsd Copyright: 2011 Sergio Andreozzi (EGI.eu) Balazs Konya (Lund University) Shiraz Memon (Forschungszentrum Juelich GmbH) Paul Millar (DESY) Adrian Taga (Oslo University) License: Apache-2.0 Files: src/hed/libs/data/cache-clean Copyright: 2008 Niklas Edmundsson, Tomas Ögren, David Cameron License: Apache-2.0 Files: src/services/a-rex/infoproviders/glite-info-provider-ldap Copyright: Members of the EGEE Collaboration 2004 License: Apache-2.0 Files: src/services/acix/core/hashes.py Copyright: Arash Partow - 2002 License: CPL Free use of the General Purpose Hash Function Algorithms Library is permitted under the guidelines and in accordance with the most current version of the Common Public License. http://www.opensource.org/licenses/cpl1.0.php Files: src/external/cJSON/cJSON.c src/external/cJSON/cJSON.h Copyright: 2009 Dave Gamble License: MIT Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: . The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. . THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. License: Apache-2.0 Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at . http://www.apache.org/licenses/LICENSE-2.0 . Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. . On Debian systems, the complete text of the Apache version 2.0 license can be found in /usr/share/common-licenses/Apache-2.0. nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-gridftpd.arc-gridftpd.default.enable0000644000000000000000000000013214152153376030267 xustar000000000000000030 mtime=1638455038.266643886 30 atime=1638455038.460646801 30 ctime=1638455103.968631088 nordugrid-arc-6.14.0/debian/nordugrid-arc-gridftpd.arc-gridftpd.default.enable0000644000175000002070000000022114152153376030247 0ustar00mockbuildmock00000000000000# To enable arc-gridftpd, i.e. to indicate that a readily usable # configuration is in place, comment out or delete the # following line. RUN=no nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-datadelivery-service.logrotate0000644000000000000000000000013214152153376027361 xustar000000000000000030 mtime=1638455038.266643886 30 atime=1638455038.460646801 30 ctime=1638455103.984631329 nordugrid-arc-6.14.0/debian/nordugrid-arc-datadelivery-service.logrotate0000644000175000002070000000034014152153376027343 0ustar00mockbuildmock00000000000000/var/log/arc/datadelivery-service.log { missingok compress delaycompress daily rotate 14 create postrotate kill -HUP `cat /run/arched-datadelivery-service.pid 2> /dev/null` 2> /dev/null || true endscript } nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-infosys-ldap.arc-infosys-ldap.default.ena0000644000000000000000000000013214152153376031216 xustar000000000000000030 mtime=1638455038.267643901 30 atime=1638455038.460646801 30 ctime=1638455103.969631103 nordugrid-arc-6.14.0/debian/nordugrid-arc-infosys-ldap.arc-infosys-ldap.default.enable0000644000175000002070000000022514152153376031665 0ustar00mockbuildmock00000000000000# To enable arc-infosys-ldap, i.e. to indicate that a readily usable # configuration is in place, comment out or delete the # following line. RUN=no nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-plugins-needed.install0000644000000000000000000000013214152153376025617 xustar000000000000000030 mtime=1638455038.268643916 30 atime=1638455038.460646801 30 ctime=1638455103.956630908 nordugrid-arc-6.14.0/debian/nordugrid-arc-plugins-needed.install0000644000175000002070000000333614152153376025611 0ustar00mockbuildmock00000000000000debian/tmp/usr/lib/arc/libaccBroker.so debian/tmp/usr/lib/arc/libaccEMIES.so debian/tmp/usr/lib/arc/libaccJobDescriptionParser.so debian/tmp/usr/lib/arc/libaccARCHERY.so debian/tmp/usr/lib/arc/libaccLDAP.so debian/tmp/usr/lib/arc/test/libaccTEST.so debian/tmp/usr/lib/arc/libarcshclegacy.so debian/tmp/usr/lib/arc/libarcshcotokens.so debian/tmp/usr/lib/arc/libarcshc.so debian/tmp/usr/lib/arc/libdmcfile.so debian/tmp/usr/lib/arc/libdmchttp.so debian/tmp/usr/lib/arc/libdmcldap.so debian/tmp/usr/lib/arc/libdmcsrm.so debian/tmp/usr/lib/arc/libdmcrucio.so debian/tmp/usr/lib/arc/libdmcacix.so debian/tmp/usr/lib/arc/libidentitymap.so debian/tmp/usr/lib/arc/libarguspdpclient.so debian/tmp/usr/lib/arc/libmcchttp.so debian/tmp/usr/lib/arc/libmccmsgvalidator.so debian/tmp/usr/lib/arc/libmccsoap.so debian/tmp/usr/lib/arc/libmcctcp.so debian/tmp/usr/lib/arc/libmcctls.so debian/tmp/usr/lib/arc/libaccBroker.apd debian/tmp/usr/lib/arc/libaccEMIES.apd debian/tmp/usr/lib/arc/libaccJobDescriptionParser.apd debian/tmp/usr/lib/arc/libaccARCHERY.apd debian/tmp/usr/lib/arc/libaccLDAP.apd debian/tmp/usr/lib/arc/test/libaccTEST.apd debian/tmp/usr/lib/arc/libarcshclegacy.apd debian/tmp/usr/lib/arc/libarcshcotokens.apd debian/tmp/usr/lib/arc/libarcshc.apd debian/tmp/usr/lib/arc/libdmcfile.apd debian/tmp/usr/lib/arc/libdmchttp.apd debian/tmp/usr/lib/arc/libmccmsgvalidator.apd debian/tmp/usr/lib/arc/libdmcldap.apd debian/tmp/usr/lib/arc/libdmcsrm.apd debian/tmp/usr/lib/arc/libdmcrucio.apd debian/tmp/usr/lib/arc/libdmcacix.apd debian/tmp/usr/lib/arc/libidentitymap.apd debian/tmp/usr/lib/arc/libarguspdpclient.apd debian/tmp/usr/lib/arc/libmcchttp.apd debian/tmp/usr/lib/arc/libmccsoap.apd debian/tmp/usr/lib/arc/libmcctcp.apd debian/tmp/usr/lib/arc/libmcctls.apd nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-infosys-ldap.install0000644000000000000000000000013214152153376025324 xustar000000000000000030 mtime=1638455038.267643901 30 atime=1638455038.460646801 30 ctime=1638455103.948630788 nordugrid-arc-6.14.0/debian/nordugrid-arc-infosys-ldap.install0000644000175000002070000000033314152153376025310 0ustar00mockbuildmock00000000000000debian/tmp/usr/share/arc/create-bdii-config debian/tmp/usr/share/arc/create-slapd-config debian/tmp/usr/share/arc/glue-generator.pl debian/tmp/usr/share/arc/glite-info-provider-ldap debian/tmp/usr/share/arc/ldap-schema nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-plugins-internal.install0000644000000000000000000000013214152153376026207 xustar000000000000000030 mtime=1638455038.268643916 30 atime=1638455038.460646801 30 ctime=1638455103.954630878 nordugrid-arc-6.14.0/debian/nordugrid-arc-plugins-internal.install0000644000175000002070000000012314152153376026170 0ustar00mockbuildmock00000000000000debian/tmp/usr/lib/arc/libaccINTERNAL.so debian/tmp/usr/lib/arc/libaccINTERNAL.apd nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-arex.arc-arex.default.enable0000644000000000000000000000013214152153376026557 xustar000000000000000030 mtime=1638455038.265643871 30 atime=1638455038.460646801 30 ctime=1638455103.966631058 nordugrid-arc-6.14.0/debian/nordugrid-arc-arex.arc-arex.default.enable0000644000175000002070000000021514152153376026542 0ustar00mockbuildmock00000000000000# To enable arc-arex, i.e. to indicate that a readily usable # configuration is in place, comment out or delete the # following line. RUN=no nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-infosys-ldap.postinst0000644000000000000000000000013214152153376025541 xustar000000000000000030 mtime=1638455038.267643901 30 atime=1638455038.460646801 30 ctime=1638455103.988631389 nordugrid-arc-6.14.0/debian/nordugrid-arc-infosys-ldap.postinst0000644000175000002070000000223514152153376025530 0ustar00mockbuildmock00000000000000#!/bin/sh set -e if [ "$1" = "configure" ] ; then APP_PROFILE=/etc/apparmor.d/usr.sbin.slapd LOCAL_APP_PROFILE=/etc/apparmor.d/local/usr.sbin.slapd if [ ! -r "$LOCAL_APP_PROFILE" ] ; then # Create the local profile if it does not yet exist tmp=`mktemp` cat < "$tmp" # Site-specific additions and overrides for usr.sbin.slapd. # For more details, please see /etc/apparmor.d/local/README. EOM mkdir -p `dirname $LOCAL_APP_PROFILE` 2>/dev/null || true mv -f "$tmp" "$LOCAL_APP_PROFILE" chmod 644 "$LOCAL_APP_PROFILE" fi grep -q "AppArmor profile for NorduGrid ARC ARIS" "$LOCAL_APP_PROFILE" || \ cat <> "$LOCAL_APP_PROFILE" # AppArmor profile for NorduGrid ARC ARIS START #include /etc/bdii/* r, /usr/share/arc/ldap-schema/* r, /{,var/}run/arc/infosys/bdii-slapd.conf r, /var/lib/arc/bdii/db/** rwk, /{,var/}run/arc/bdii/db/* w, # AppArmor profile for NorduGrid ARC ARIS END EOM if [ -r "$APP_PROFILE" ] ; then # Reload the profile if aa-status --enabled 2>/dev/null ; then apparmor_parser -r -T -W "$APP_PROFILE" || true fi fi fi #DEBHELPER# nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-datadelivery-service.install0000644000000000000000000000013214152153376027027 xustar000000000000000030 mtime=1638455038.266643886 30 atime=1638455038.460646801 30 ctime=1638455103.944630728 nordugrid-arc-6.14.0/debian/nordugrid-arc-datadelivery-service.install0000644000175000002070000000023314152153376027012 0ustar00mockbuildmock00000000000000debian/tmp/usr/lib/arc/libdatadeliveryservice.so debian/tmp/usr/lib/arc/libdatadeliveryservice.apd debian/tmp/usr/share/arc/arc-datadelivery-service-start nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-plugins-lcas-lcmaps.install0000644000000000000000000000013214152153376026572 xustar000000000000000030 mtime=1638455038.268643916 30 atime=1638455038.460646801 30 ctime=1638455103.955630893 nordugrid-arc-6.14.0/debian/nordugrid-arc-plugins-lcas-lcmaps.install0000644000175000002070000000010214152153376026550 0ustar00mockbuildmock00000000000000debian/tmp/usr/lib/arc/arc-lcas debian/tmp/usr/lib/arc/arc-lcmaps nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-plugins-gridftpjob.install0000644000000000000000000000013214152153376026525 xustar000000000000000030 mtime=1638455038.267643901 30 atime=1638455038.460646801 30 ctime=1638455103.953630863 nordugrid-arc-6.14.0/debian/nordugrid-arc-plugins-gridftpjob.install0000644000175000002070000000012714152153376026512 0ustar00mockbuildmock00000000000000debian/tmp/usr/lib/arc/libaccGRIDFTPJOB.so debian/tmp/usr/lib/arc/libaccGRIDFTPJOB.apd nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-acix-index.maintscript.no-enable0000644000000000000000000000013214152153376027473 xustar000000000000000030 mtime=1638455038.265643871 30 atime=1638455038.460646801 30 ctime=1638455103.970631118 nordugrid-arc-6.14.0/debian/nordugrid-arc-acix-index.maintscript.no-enable0000644000175000002070000000005614152153376027461 0ustar00mockbuildmock00000000000000rm_conffile /etc/default/arc-acix-index 6.7.0 nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-arex.preinst.no-enable0000644000000000000000000000013214152153376025530 xustar000000000000000030 mtime=1638455038.266643886 30 atime=1638455038.460646801 30 ctime=1638455103.975631193 nordugrid-arc-6.14.0/debian/nordugrid-arc-arex.preinst.no-enable0000644000175000002070000000121614152153376025515 0ustar00mockbuildmock00000000000000#!/bin/sh set -e # Version 6.7.0 starts using defaults-disabled # Remove rc.d links if upgrading from an older version and the service is # disabled in /etc/default if [ -n "$2" ] ; then if dpkg --compare-versions "$2" "<<" "6.7.0" ; then [ -r /etc/default/arc-arex ] && . /etc/default/arc-arex if [ -n "${RUN}" ] && [ "${RUN}" != "yes" ] ; then update-rc.d -f arc-arex remove >/dev/null if [ -x "/usr/bin/deb-systemd-helper" ]; then deb-systemd-helper purge 'arc-arex.service' >/dev/null || true deb-systemd-helper unmask 'arc-arex.service' >/dev/null || true fi fi fi fi #DEBHELPER# nordugrid-arc-6.14.0/debian/PaxHeaders.30264/python3-nordugrid-arc.install0000644000000000000000000000013214152153376024320 xustar000000000000000030 mtime=1638455038.268643916 30 atime=1638455038.460646801 30 ctime=1638455103.960630968 nordugrid-arc-6.14.0/debian/python3-nordugrid-arc.install0000644000175000002070000000111714152153376024305 0ustar00mockbuildmock00000000000000debian/tmp/usr/lib/python3.*/site-packages/_arc.*so debian/tmp/usr/lib/python3.*/site-packages/arc/common.py* debian/tmp/usr/lib/python3.*/site-packages/arc/communication.py* debian/tmp/usr/lib/python3.*/site-packages/arc/compute.py* debian/tmp/usr/lib/python3.*/site-packages/arc/credential.py* debian/tmp/usr/lib/python3.*/site-packages/arc/data.py* debian/tmp/usr/lib/python3.*/site-packages/arc/delegation.py* debian/tmp/usr/lib/python3.*/site-packages/arc/loader.py* debian/tmp/usr/lib/python3.*/site-packages/arc/message.py* debian/tmp/usr/lib/python3.*/site-packages/arc/security.py* nordugrid-arc-6.14.0/debian/PaxHeaders.30264/compat0000644000000000000000000000013214152153376017774 xustar000000000000000030 mtime=1638455038.264643856 30 atime=1638455038.460646801 30 ctime=1638455103.928630487 nordugrid-arc-6.14.0/debian/compat0000644000175000002070000000000214152153376017751 0ustar00mockbuildmock000000000000009 nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-arex.prerm0000644000000000000000000000013214152153376023332 xustar000000000000000030 mtime=1638455038.266643886 30 atime=1638455038.460646801 30 ctime=1638455103.990631419 nordugrid-arc-6.14.0/debian/nordugrid-arc-arex.prerm0000644000175000002070000000030214152153376023312 0ustar00mockbuildmock00000000000000#!/bin/sh set -e #DEBHELPER# if [ "$1" = "remove" ] ; then arcctl test-ca cleanup rmdir /etc/grid-security/certificates 2>/dev/null || : rmdir /etc/grid-security 2>/dev/null || : fi nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-gridftpd.maintscript.no-enable0000644000000000000000000000013214152153376027245 xustar000000000000000030 mtime=1638455038.266643886 30 atime=1638455038.460646801 30 ctime=1638455103.978631238 nordugrid-arc-6.14.0/debian/nordugrid-arc-gridftpd.maintscript.no-enable0000644000175000002070000000005414152153376027231 0ustar00mockbuildmock00000000000000rm_conffile /etc/default/arc-gridftpd 6.7.0 nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-acix-index.preinst.no-enable0000644000000000000000000000013214152153376026622 xustar000000000000000030 mtime=1638455038.265643871 30 atime=1638455038.460646801 30 ctime=1638455103.971631133 nordugrid-arc-6.14.0/debian/nordugrid-arc-acix-index.preinst.no-enable0000644000175000002070000000125414152153376026611 0ustar00mockbuildmock00000000000000#!/bin/sh set -e # Version 6.7.0 starts using defaults-disabled # Remove rc.d links if upgrading from an older version and the service is # disabled in /etc/default if [ -n "$2" ] ; then if dpkg --compare-versions "$2" "<<" "6.7.0" ; then [ -r /etc/default/arc-acix-index ] && . /etc/default/arc-acix-index if [ -n "${RUN}" ] && [ "${RUN}" != "yes" ] ; then update-rc.d -f arc-acix-index remove >/dev/null if [ -x "/usr/bin/deb-systemd-helper" ]; then deb-systemd-helper purge 'arc-acix-index.service' >/dev/null || true deb-systemd-helper unmask 'arc-acix-index.service' >/dev/null || true fi fi fi fi #DEBHELPER# nordugrid-arc-6.14.0/debian/PaxHeaders.30264/libarccommon3.install0000644000000000000000000000013214152153376022706 xustar000000000000000030 mtime=1638455038.264643856 30 atime=1638455038.460646801 30 ctime=1638455103.934630577 nordugrid-arc-6.14.0/debian/libarccommon3.install0000644000175000002070000000250514152153376022675 0ustar00mockbuildmock00000000000000debian/tmp/usr/lib/libarccompute.so.* debian/tmp/usr/lib/libarccommunication.so.* debian/tmp/usr/lib/libarccommon.so.* debian/tmp/usr/lib/libarccredential.so.* debian/tmp/usr/lib/libarccredentialstore.so.* debian/tmp/usr/lib/libarccrypto.so.* debian/tmp/usr/lib/libarcdata.so.* debian/tmp/usr/lib/libarcdatastaging.so.* debian/tmp/usr/lib/libarcloader.so.* debian/tmp/usr/lib/libarcmessage.so.* debian/tmp/usr/lib/libarcsecurity.so.* debian/tmp/usr/lib/libarcotokens.so.* debian/tmp/usr/lib/libarcinfosys.so.* debian/tmp/usr/lib/libarcwsaddressing.so.* debian/tmp/usr/lib/libarcwssecurity.so.* debian/tmp/usr/lib/libarcxmlsec.so.* debian/tmp/usr/lib/arc/libmodcrypto.so debian/tmp/usr/lib/arc/libmodcredential.so debian/tmp/usr/lib/arc/libmodcrypto.apd debian/tmp/usr/lib/arc/libmodcredential.apd debian/tmp/usr/lib/arc/arc-file-access debian/tmp/usr/lib/arc/arc-hostname-resolver debian/tmp/usr/lib/arc/DataStagingDelivery debian/tmp/usr/lib/arc/arcconfig-parser debian/tmp/usr/lib/arc/arc-dmc debian/tmp/usr/lib/python?.*/site-packages/arc/paths.py* debian/tmp/usr/lib/python?.*/site-packages/arc/paths_dist.py* debian/tmp/usr/lib/python?.*/site-packages/arc/utils debian/tmp/usr/share/arc/arc.parser.defaults debian/tmp/usr/share/arc/schema debian/tmp/usr/share/locale/*/LC_MESSAGES/nordugrid-arc.mo debian/tmp/usr/share/arc/test-jobs/test-job-* nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-hed.preinst.no-enable0000644000000000000000000000013214152153376025331 xustar000000000000000030 mtime=1638455038.267643901 30 atime=1638455038.460646801 30 ctime=1638455103.980631269 nordugrid-arc-6.14.0/debian/nordugrid-arc-hed.preinst.no-enable0000644000175000002070000000120414152153376025313 0ustar00mockbuildmock00000000000000#!/bin/sh set -e # Version 6.7.0 starts using defaults-disabled # Remove rc.d links if upgrading from an older version and the service is # disabled in /etc/default if [ -n "$2" ] ; then if dpkg --compare-versions "$2" "<<" "6.7.0" ; then [ -r /etc/default/arched ] && . /etc/default/arched if [ -n "${RUN}" ] && [ "${RUN}" != "yes" ] ; then update-rc.d -f arched remove >/dev/null if [ -x "/usr/bin/deb-systemd-helper" ]; then deb-systemd-helper purge 'arched.service' >/dev/null || true deb-systemd-helper unmask 'arched.service' >/dev/null || true fi fi fi fi #DEBHELPER# nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-gridftpd.install0000644000000000000000000000013214152153376024517 xustar000000000000000030 mtime=1638455038.266643886 30 atime=1638455038.460646801 30 ctime=1638455103.946630758 nordugrid-arc-6.14.0/debian/nordugrid-arc-gridftpd.install0000644000175000002070000000027514152153376024510 0ustar00mockbuildmock00000000000000debian/tmp/usr/sbin/gridftpd debian/tmp/usr/share/man/man8/gridftpd.8 debian/tmp/usr/lib/arc/filedirplugin.* debian/tmp/usr/lib/arc/jobsplugin.* debian/tmp/usr/share/arc/arc-gridftpd-start nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-dev.install0000644000000000000000000000013214152153376023472 xustar000000000000000030 mtime=1638455038.266643886 30 atime=1638455038.460646801 30 ctime=1638455103.945630743 nordugrid-arc-6.14.0/debian/nordugrid-arc-dev.install0000644000175000002070000000050314152153376023455 0ustar00mockbuildmock00000000000000debian/tmp/usr/include/arc debian/tmp/usr/lib/lib*.so debian/tmp/usr/bin/wsdl2hed debian/tmp/usr/share/man/man1/wsdl2hed.1 debian/tmp/usr/bin/arcplugin debian/tmp/usr/share/man/man1/arcplugin.1 debian/tmp/usr/share/arc/examples/sdk/*.cpp debian/tmp/usr/share/arc/examples/sdk/*.h debian/tmp/usr/share/arc/examples/sdk/*.py nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-datadelivery-service.arc-datadelivery-ser0000644000000000000000000000031214152153376031370 xustar0000000000000000112 path=nordugrid-arc-6.14.0/debian/nordugrid-arc-datadelivery-service.arc-datadelivery-service.default.enable 30 mtime=1638455038.266643886 30 atime=1638455038.460646801 30 ctime=1638455103.967631073 nordugrid-arc-6.14.0/debian/nordugrid-arc-datadelivery-service.arc-datadelivery-service.default.enab0000644000175000002070000000023514152153376034553 0ustar00mockbuildmock00000000000000# To enable arc-datadelivery-service, i.e. to indicate that a readily usable # configuration is in place, comment out or delete the # following line. RUN=no nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-acix-scanner.maintscript.no-enable0000644000000000000000000000013214152153376030015 xustar000000000000000030 mtime=1638455038.265643871 30 atime=1638455038.460646801 30 ctime=1638455103.972631148 nordugrid-arc-6.14.0/debian/nordugrid-arc-acix-scanner.maintscript.no-enable0000644000175000002070000000006014152153376027776 0ustar00mockbuildmock00000000000000rm_conffile /etc/default/arc-acix-scanner 6.7.0 nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-infosys-ldap.preinst.no-enable0000644000000000000000000000013214152153376027201 xustar000000000000000030 mtime=1638455038.267643901 30 atime=1638455038.460646801 30 ctime=1638455103.982631298 nordugrid-arc-6.14.0/debian/nordugrid-arc-infosys-ldap.preinst.no-enable0000644000175000002070000000126614152153376027173 0ustar00mockbuildmock00000000000000#!/bin/sh set -e # Version 6.7.0 starts using defaults-disabled # Remove rc.d links if upgrading from an older version and the service is # disabled in /etc/default if [ -n "$2" ] ; then if dpkg --compare-versions "$2" "<<" "6.7.0" ; then [ -r /etc/default/arc-infosys-ldap ] && . /etc/default/arc-infosys-ldap if [ -n "${RUN}" ] && [ "${RUN}" != "yes" ] ; then update-rc.d -f arc-infosys-ldap remove >/dev/null if [ -x "/usr/bin/deb-systemd-helper" ]; then deb-systemd-helper purge 'arc-infosys-ldap.service' >/dev/null || true deb-systemd-helper unmask 'arc-infosys-ldap.service' >/dev/null || true fi fi fi fi #DEBHELPER# nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-acix-index.install0000644000000000000000000000013214152153376024745 xustar000000000000000030 mtime=1638455038.265643871 30 atime=1638455038.460646801 30 ctime=1638455103.936630607 nordugrid-arc-6.14.0/debian/nordugrid-arc-acix-index.install0000644000175000002070000000015214152153376024730 0ustar00mockbuildmock00000000000000debian/tmp/usr/lib/python?.*/site-packages/acix/indexserver debian/tmp/usr/share/arc/arc-acix-index-start nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-monitor.install0000644000000000000000000000013214152153376024403 xustar000000000000000030 mtime=1638455038.267643901 30 atime=1638455038.460646801 30 ctime=1638455103.949630803 nordugrid-arc-6.14.0/debian/nordugrid-arc-monitor.install0000644000175000002070000000011514152153376024365 0ustar00mockbuildmock00000000000000debian/tmp/usr/share/arc/monitor/* debian/tmp/usr/share/man/man7/monitor.7* nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-plugins-python.install0000644000000000000000000000013214152153376025714 xustar000000000000000030 mtime=1638455038.268643916 30 atime=1638455038.460646801 30 ctime=1638455103.957630923 nordugrid-arc-6.14.0/debian/nordugrid-arc-plugins-python.install0000644000175000002070000000041714152153376025703 0ustar00mockbuildmock00000000000000debian/tmp/usr/lib/arc/libaccPythonBroker.so debian/tmp/usr/lib/arc/libpythonservice.so debian/tmp/usr/lib/arc/libaccPythonBroker.apd debian/tmp/usr/lib/arc/libpythonservice.apd debian/tmp/usr/share/arc/examples/PythonBroker debian/tmp/usr/share/arc/examples/echo_python nordugrid-arc-6.14.0/debian/PaxHeaders.30264/README.source0000644000000000000000000000013214152153376020745 xustar000000000000000030 mtime=1638455038.264643856 30 atime=1638455038.460646801 30 ctime=1638455103.992631449 nordugrid-arc-6.14.0/debian/README.source0000644000175000002070000000130614152153376020732 0ustar00mockbuildmock00000000000000The source code presented as .orig.tar.gz to Debian is functionally complete. The sources of all images shall be distributed with it. There is however the possibility that some files do exist in the subversion repository, that are not distributed further. This shall be considered a regular consequence of the development process, i.e. the Debian packages are not expected to be built from the development branch. The upstream developers, who are also maintaining this package, invite everyone to contribute actively with the future development of the ARC middleware and suggest to inspect http://svn.nordugrid.org for first steps. -- Steffen Moeller Thu, 14 Jan 2010 12:13:30 +0000 nordugrid-arc-6.14.0/debian/PaxHeaders.30264/README.Debian0000644000000000000000000000013214152153376020627 xustar000000000000000030 mtime=1638455038.264643856 30 atime=1638455038.460646801 30 ctime=1638455103.991631434 nordugrid-arc-6.14.0/debian/README.Debian0000644000175000002070000000361114152153376020615 0ustar00mockbuildmock00000000000000nordugrid-arc ============= Open Source, Linux distributions and grid computing --------------------------------------------------- Grid Computing is all about having a community that is full of trust in the integrity of its contributors. Everything is logged - but you would not expect anyone intentionally evil amongst yourselves. The Debian Society is a forerunner in the formal representation of such collaborative floks of individuals and much respected throughout the scientific community for its achievement that maintains most of today's computational grids. The development of this second generation of the Advanced Resource Connector was mostly funded by the EU project "KnowARC". The aim to appeal to smaller and/or more heterogeneous communities than the traditional High Energy Physics is key to the project. It was foreseen from the beginnings, to disseminate the development to the Linux community. The developers of ARC are found on the mailing list of the NorduGrid (http://www.nordugrid.org) where the project has its roots. You may also be interested in the wiki pages (http://wiki.nordugrid.org) for a summary of first steps for you to adopt the technology. If you are interested to prepare your own Campus Grid or when working in a larger company with CPU time to harvest for your computations, or if you just feel to join with your own cluster, then please join in. Comments on the packaging ------------------------- ARC-1 was developed with Debian in mind. No special adaptations were required. For PDF generation, doxygen needs the texlive-extra-utils, texlive-latex-base, texlive-latex-recommended and texlive-latex-extra package, which might come as a surprise to some. -- Mattias Ellert, Steffen Möller, Balazs Konya, Farid Ould-Saada, Anders Wäänänen, Aleksander Konstantinov, Peter Stefan and all other contributors of the ARC grid middleware. Wed, 09 Dec 2009 13:34:52 +0100 nordugrid-arc-6.14.0/debian/PaxHeaders.30264/watch0000644000000000000000000000013214152153376017617 xustar000000000000000030 mtime=1638455038.268643916 30 atime=1638455038.460646801 30 ctime=1638455103.932630547 nordugrid-arc-6.14.0/debian/watch0000644000175000002070000000042414152153376017604 0ustar00mockbuildmock00000000000000version = 3 http://download.nordugrid.org/packages/nordugrid-arc/releases/(\d\.\d\.\d)/src/nordugrid-arc-(\d\.\d\.\d)\.tar\.gz debian uupdate #opts=pasv ftp://download.nordugrid.org/packages/nordugrid-arc/releases/([\d\.]+)/src/nordugrid-arc-([\d\.]+)\.tar\.gz debian uupdate nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-arex-python-lrms.install0000644000000000000000000000013214152153376026145 xustar000000000000000030 mtime=1638455038.265643871 30 atime=1638455038.460646801 30 ctime=1638455103.941630683 nordugrid-arc-6.14.0/debian/nordugrid-arc-arex-python-lrms.install0000644000175000002070000000025414152153376026133 0ustar00mockbuildmock00000000000000debian/tmp/usr/share/arc/job_script.stubs debian/tmp/usr/share/arc/SLURMPYmod.pm debian/tmp/usr/lib/arc/arc-sshfs-mount debian/tmp/usr/lib/python?.*/site-packages/arc/lrms nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-infosys-ldap.postrm0000644000000000000000000000013214152153376025202 xustar000000000000000030 mtime=1638455038.267643901 30 atime=1638455038.460646801 30 ctime=1638455103.989631404 nordugrid-arc-6.14.0/debian/nordugrid-arc-infosys-ldap.postrm0000644000175000002070000000144614152153376025174 0ustar00mockbuildmock00000000000000#!/bin/sh set -e #DEBHELPER# if [ "$1" = "purge" ] ; then APP_PROFILE=/etc/apparmor.d/usr.sbin.slapd LOCAL_APP_PROFILE=/etc/apparmor.d/local/usr.sbin.slapd if [ -r "$LOCAL_APP_PROFILE" ] ; then sed '/# AppArmor profile for NorduGrid ARC ARIS START/,/# AppArmor profile for NorduGrid ARC ARIS END/d' -i "$LOCAL_APP_PROFILE" fi if [ ! -r "$APP_PROFILE" ] ; then if [ -r "$LOCAL_APP_PROFILE" ] ; then if [ -z "`sed '/^#/d' $LOCAL_APP_PROFILE`" ] ; then rm -f "$LOCAL_APP_PROFILE" || true fi fi rmdir /etc/apparmor.d/local 2>/dev/null || true rmdir /etc/apparmor.d 2>/dev/null || true fi if [ -r "$APP_PROFILE" ] ; then # Reload the profile if aa-status --enabled 2>/dev/null ; then apparmor_parser -r -T -W "$APP_PROFILE" || true fi fi fi nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-wn.lintian-overrides0000644000000000000000000000013214152153376025330 xustar000000000000000030 mtime=1638455038.268643916 30 atime=1638455038.460646801 30 ctime=1638455103.991631434 nordugrid-arc-6.14.0/debian/nordugrid-arc-wn.lintian-overrides0000644000175000002070000000010614152153376025312 0ustar00mockbuildmock00000000000000nordugrid-arc-wn: setuid-binary usr/bin/arc-job-cgroup 4755 root/root nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-arex.dirs0000644000000000000000000000013214152153376023146 xustar000000000000000030 mtime=1638455038.265643871 30 atime=1638455038.460646801 30 ctime=1638455103.963631013 nordugrid-arc-6.14.0/debian/nordugrid-arc-arex.dirs0000644000175000002070000000006214152153376023131 0ustar00mockbuildmock00000000000000var/spool/arc var/spool/arc/ssm var/spool/arc/urs nordugrid-arc-6.14.0/debian/PaxHeaders.30264/control0000644000000000000000000000013214152153376020171 xustar000000000000000030 mtime=1638455038.264643856 30 atime=1638455038.460646801 30 ctime=1638455103.929630502 nordugrid-arc-6.14.0/debian/control0000644000175000002070000005265414152153376020172 0ustar00mockbuildmock00000000000000Source: nordugrid-arc Section: net Priority: optional Maintainer: Mattias Ellert Uploaders: Anders Waananen Build-Depends: debhelper (>= 9), dh-autoreconf, autopoint, dh-python | python-support, dh-systemd | debhelper (>= 9.20160709) | debhelper (<< 9.20130630), libxml2-dev (>= 2.4.0), libssl-dev, libglibmm-2.4-dev, libltdl-dev, libldap2-dev, uuid-dev, libcppunit-dev, pkg-config, libdb++-dev, libxmlsec1-dev (>= 1.2.4), libglobus-common-dev, libglobus-gssapi-gsi-dev, libglobus-ftp-client-dev, libglobus-ftp-control-dev, openssl, swig, libnss3-dev, pylint, libjson-xs-perl, libxml-simple-perl, libdbi-perl, libsqlite3-dev (>= 3.6), libldns-dev, libsystemd-dev [linux-any] | debhelper (<< 9.20150101) [linux-any], libinline-python-perl | dpkg (<< 1.18), bash-completion, python-dev | libinline-python-perl (>= 0.56-2~), python-twisted | libinline-python-perl (>= 0.56-2~), python-openssl | libinline-python-perl (>= 0.56-2~), python3-dev, python3-twisted | libinline-python-perl (<< 0.56-2~) | dpkg (<< 1.18), python3-openssl | libinline-python-perl (<< 0.56-2~) | dpkg (<< 1.18) XS-Python-Version: current Standards-Version: 4.4.1 Homepage: http://www.nordugrid.org Package: libarccommon3 Replaces: nordugrid-arc-hed (<< 1.0.1~rc2~), nordugrid-arc-arex (<< 2.0.1~), libarccommon0, libarccommon1, libarccommon2 Conflicts: nordugrid-arc-chelonia (<< 2.0.0~), nordugrid-arc-hopi (<< 2.0.0~), nordugrid-arc-isis (<< 2.0.0~), nordugrid-arc-janitor (<< 2.0.0~), nordugrid-arc-doxygen (<< 4.0.0~), nordugrid-arc-arcproxyalt (<< 6.0.0~), nordugrid-arc-java (<< 6.0.0~), nordugrid-arc-egiis (<< 6.0.0~) Breaks: nordugrid-arc-hed (<< 1.0.1~rc2~), nordugrid-arc-arex (<< 2.0.1~), libarccommon0, libarccommon1, libarccommon2 Architecture: any Section: libs Depends: ${shlibs:Depends}, ${misc:Depends}, ${python:Depends}, ${python3:Depends}, openssl XB-Python-Version: ${python:Versions} Description: Advanced Resource Connector Middleware NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . The ARC middleware is a software solution that uses distributed computing technologies to enable sharing and federation of computing resources across different administrative and application domains. ARC is used to create distributed infrastructures of various scope and complexity, from campus to national and global deployments. Package: nordugrid-arc-client Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends}, libarccommon3 (= ${binary:Version}), nordugrid-arc-plugins-needed (= ${binary:Version}) Description: ARC command line clients NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . This client package contains all the CLI tools that are needed to operate with x509 proxies, submit and manage jobs and handle data transfers. Package: nordugrid-arc-hed Replaces: nordugrid-arc-client (<< 1.0.1~rc2~) Breaks: nordugrid-arc-client (<< 1.0.1~rc2~) Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends}, libarccommon3 (= ${binary:Version}), lsb-base (>= 3.0-6) Description: ARC Hosting Environment Daemon NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . The ARC Hosting Environment Daemon (HED) is a Web Service container for ARC services. Package: nordugrid-arc-gridftpd Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends}, libarccommon3 (= ${binary:Version}), nordugrid-arc-arcctl-service (= ${source:Version}), nordugrid-arc-plugins-gridftp (= ${binary:Version}), lsb-base (>= 3.0-6) Description: ARC GridFTP server NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . This package contains the ARC gridftp server which can be used as a custom job submission interface in front of an ARC enabled computing cluster or as a low-level dedicated gridftp file server. Package: nordugrid-arc-datadelivery-service Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends}, libarccommon3 (= ${binary:Version}), nordugrid-arc-hed (= ${binary:Version}), nordugrid-arc-arcctl-service (= ${source:Version}), nordugrid-arc-plugins-needed (= ${binary:Version}), lsb-base (>= 3.0-6) Description: ARC data delivery service NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . This package contains the ARC data delivery service. Package: nordugrid-arc-infosys-ldap Provides: nordugrid-arc-ldap-infosys, nordugrid-arc-aris Replaces: nordugrid-arc-ldap-infosys (<< 6.0.0~), nordugrid-arc-aris (<< 6.0.0~), nordugrid-arc-ldap-egiis (<< 6.0.0~) Conflicts: nordugrid-arc-ldap-infosys (<< 6.0.0~), nordugrid-arc-aris (<< 6.0.0~), nordugrid-arc-ldap-egiis (<< 6.0.0~) Architecture: all Depends: ${misc:Depends}, ${perl:Depends}, slapd, bdii, nordugrid-arc-arcctl-service (= ${source:Version}), lsb-base (>= 3.0-6) Description: ARC LDAP-based information services NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . This package contains the ARC information services relying on BDII and LDAP technologies to publish ARC CE information according to various LDAP schemas. Please note that the information collectors are part of another package, the nordugrid-arc-arex. Package: nordugrid-arc-monitor Replaces: nordugrid-arc-ldap-monitor (<< 6.0.0~), nordugrid-arc-ws-monitor (<< 6.0.0~) Conflicts: nordugrid-arc-ldap-monitor (<< 6.0.0~), nordugrid-arc-ws-monitor (<< 6.0.0~) Architecture: all Depends: ${misc:Depends}, php-common | php5-common, php-ldap | php5-ldap, php-gd | php5-gd Description: ARC LDAP monitor web application NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . This package contains the PHP web application that is used to set up a web-based monitor which pulls information from the LDAP information system and visualizes it. Package: nordugrid-arc-arcctl Replaces: libarccommon3 (<< 6.5.0~), nordugrid-arc-arex (<< 6.5.0~) Breaks: libarccommon3 (<< 6.5.0~), nordugrid-arc-arex (<< 6.5.0~) Architecture: all Depends: ${misc:Depends}, ${python:Depends}, ${python3:Depends}, libarccommon3 (>= ${source:Version}) XB-Python-Version: ${python:Versions} Description: ARC Resource-coupled EXecution service NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . This package contains the ARC Control Tool with basic set of control modules suitable for both server and client side. Package: nordugrid-arc-arcctl-service Replaces: libarccommon3 (<< 6.5.0~), nordugrid-arc-arcctl (<< 6.6.0~), nordugrid-arc-arex (<< 6.6.0~) Breaks: libarccommon3 (<< 6.5.0~), nordugrid-arc-arcctl (<< 6.6.0~), nordugrid-arc-arex (<< 6.6.0~) Architecture: all Depends: ${misc:Depends}, ${python:Depends}, ${python3:Depends}, libarccommon3 (>= ${source:Version}), nordugrid-arc-arcctl (= ${source:Version}) XB-Python-Version: ${python:Versions} Description: ARC Control Tool - service control modules NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . This package contains the service control modules for ARC Contol Tool that allow working with server-side config and manage ARC services. Package: nordugrid-arc-arex Provides: nordugrid-arc-cache-service, nordugrid-arc-candypond Replaces: nordugrid-arc-cache-service (<< 6.0.0~), nordugrid-arc-candypond (<< 6.0.0~), nordugrid-arc-aris (<< 6.0.0~), nordugrid-arc-infosys-ldap (<< 6.3.0~), libarccommon3 (<< 6.5.0~) Conflicts: nordugrid-arc-cache-service (<< 6.0.0~), nordugrid-arc-candypond (<< 6.0.0~) Breaks: nordugrid-arc-aris (<< 6.0.0~), nordugrid-arc-infosys-ldap (<< 6.3.0~), libarccommon3 (<< 6.5.0~) Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends}, ${perl:Depends}, ${python:Depends}, ${python3:Depends}, libarccommon3 (= ${binary:Version}), nordugrid-arc-hed (= ${binary:Version}), nordugrid-arc-arcctl (= ${source:Version}), nordugrid-arc-arcctl-service (= ${source:Version}), nordugrid-arc-plugins-needed (= ${binary:Version}), libjson-xs-perl, libxml-simple-perl, ${pydeps:Depends}, lsb-base (>= 3.0-6) XB-Python-Version: ${python:Versions} Description: ARC Resource-coupled EXecution service NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . The ARC Resource-coupled EXecution service (AREX) is the Computing Element of the ARC middleware. AREX offers a full-featured middle layer to manage computational tasks including interfacing to local batch systems, taking care of complex environments such as data staging, data caching, software environment provisioning, information collection and exposure, accounting information gathering and publishing. Package: nordugrid-arc-arex-python-lrms Replaces: nordugrid-arc-arex (<< 6.0.0~) Breaks: nordugrid-arc-arex (<< 6.0.0~) Architecture: any Depends: ${misc:Depends}, ${python:Depends}, ${python3:Depends}, libinline-python-perl, nordugrid-arc-arex (= ${binary:Version}), ${pydeps:Depends} XB-Python-Version: ${python:Versions} Description: ARC Resource-coupled EXecution service - Python LRMS backends NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . The Python LRMS backends are a new implementation of the AREX LRMS backend scripts written in Python. Currently only the SLURM LRMS is supported. It is released as a technology preview. Package: nordugrid-arc-community-rtes Architecture: all Depends: ${misc:Depends}, ${python:Depends}, ${python3:Depends}, nordugrid-arc-arex (>= ${source:Version}), nordugrid-arc-arcctl (= ${source:Version}), ${pydeps:Depends} XB-Python-Version: ${python:Versions} Description: ARC Resource-coupled EXecution service - Community RTEs framework NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . Community RTEs is the framework that allows deploying software packages (tarballs, containers, etc) provided by trusted communities to ARC CE using simple arcctl commands. It is released as a technology preview. Package: nordugrid-arc-plugins-needed Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends}, libarccommon3 (= ${binary:Version}) Description: ARC base plugins NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . ARC base plugins. This includes the Message Chain Components (MCCs) and Data Manager Components (DMCs). Package: nordugrid-arc-plugins-globus Architecture: all Section: oldlibs Depends: ${misc:Depends}, nordugrid-arc-plugins-gridftp (>= ${source:Version}), nordugrid-arc-plugins-gridftpjob (>= ${source:Version}), nordugrid-arc-plugins-lcas-lcmaps (>= ${source:Version}) Description: ARC Globus plugins (transitional package) NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . ARC Globus plugins. This compat metapackage brings all Globus dependent plugins at once, including: Data Manager Components (DMCs), Client plugin and LCAS/LCMAPS tools. . This is a transitional package. It is meant to allow smooth transition and will be removed from the upcoming releases. Package: libarcglobusutils3 Replaces: nordugrid-arc-plugins-globus (<< 6.5.0~) Breaks: nordugrid-arc-plugins-globus (<< 6.5.0~) Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends}, libarccommon3 (= ${binary:Version}) Description: ARC Globus plugins common libraries NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . ARC Globus plugins common libraries package includes the bundle of necessary Globus libraries needed for all other globus-dependent ARC components. Package: nordugrid-arc-plugins-gridftp Replaces: nordugrid-arc-plugins-globus (<< 6.5.0~) Breaks: nordugrid-arc-plugins-globus (<< 6.5.0~) Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends}, libarccommon3 (= ${binary:Version}), libarcglobusutils3 (= ${binary:Version}) Description: ARC Globus dependent DMCs NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . ARC Globus GridFTP plugins. These allow access to data through the gridftp protocol. Package: nordugrid-arc-plugins-lcas-lcmaps Replaces: nordugrid-arc-plugins-globus (<< 6.5.0~) Breaks: nordugrid-arc-plugins-globus (<< 6.5.0~) Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends}, libarccommon3 (= ${binary:Version}), libarcglobusutils3 (= ${binary:Version}) Description: ARC LCAS/LCMAPS plugins NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . ARC LCAS/LCMAPS tools allow configuring ARC CE to use LCAS/LCMAPS services for authorization and mapping. Package: nordugrid-arc-plugins-gridftpjob Replaces: nordugrid-arc-plugins-globus (<< 6.5.0~) Breaks: nordugrid-arc-plugins-globus (<< 6.5.0~) Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends}, libarccommon3 (= ${binary:Version}), libarcglobusutils3 (= ${binary:Version}), nordugrid-arc-plugins-gridftp (= ${binary:Version}) Description: ARC GRIDFTPJOB client plugin NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . ARC GRIDFTPJOB plugin allows submitting jobs via the gridftpd interface. Package: nordugrid-arc-plugins-internal Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends}, libarccommon3 (= ${binary:Version}), nordugrid-arc-arex (= ${binary:Version}) Description: ARC internal plugin NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . The ARC internal plugin. A special interface aimed for restrictive HPC sites, to be used with a local installation of the ARC Control Tower. Package: nordugrid-arc-plugins-arcrest Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends}, libarccommon3 (= ${binary:Version}) Description: ARC REST plugin NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . ARC plugin for ARC REST interface technology preview. Package: nordugrid-arc-plugins-python Replaces: nordugrid-arc-python (<< 6.6.0~) Breaks: nordugrid-arc-python (<< 6.6.0~) Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends}, libarccommon3 (= ${binary:Version}), ${pydeps:Depends} Description: ARC Python dependent plugin NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . ARC plugins dependent on Python. Package: nordugrid-arc-acix-core Architecture: all Depends: ${misc:Depends}, ${python:Depends}, ${python3:Depends}, ${pydeps:Depends} XB-Python-Version: ${python:Versions} Description: ARC cache index - core components NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . Core components of the ARC Cache Index (ACIX). Package: nordugrid-arc-acix-scanner Provides: nordugrid-arc-acix-cache Replaces: nordugrid-arc-acix-cache (<< 6.0.0~) Conflicts: nordugrid-arc-acix-cache (<< 6.0.0~) Architecture: all Depends: ${misc:Depends}, ${python:Depends}, ${python3:Depends}, libarccommon3 (>= ${source:Version}), nordugrid-arc-acix-core (= ${source:Version}), nordugrid-arc-arcctl-service (= ${source:Version}), lsb-base (>= 3.0-6) XB-Python-Version: ${python:Versions} Description: ARC cache index - scanner server NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . Cache scanner component of the ARC Cache Index (ACIX), usually installed alongside A-REX. This component collects information on the content of an A-REX cache. Package: nordugrid-arc-acix-index Architecture: all Depends: ${misc:Depends}, ${python:Depends}, ${python3:Depends}, libarccommon3 (>= ${source:Version}), nordugrid-arc-acix-core (= ${source:Version}), nordugrid-arc-arcctl-service (= ${source:Version}), lsb-base (>= 3.0-6) XB-Python-Version: ${python:Versions} Description: ARC cache index - index server NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . Index server component of the ARC Cache Index (ACIX), usually installed independently of any A-REX installation. This component pulls cache content from ACIX cache scanner servers and can be queried by clients for the location of cached files. Package: nordugrid-arc-dev Replaces: nordugrid-arc-python (<< 6.6.0~) Breaks: nordugrid-arc-python (<< 6.6.0~) Architecture: any Section: libdevel Depends: ${shlibs:Depends}, ${misc:Depends}, libarccommon3 (= ${binary:Version}), libxml2-dev (>= 2.4.0), libssl-dev, libglibmm-2.4-dev Description: ARC development files NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . Header files and libraries needed to develop applications using ARC. Package: nordugrid-arc-python Architecture: all Section: oldlibs Depends: ${misc:Depends}, nordugrid-arc-plugins-python (>= ${source:Version}), Description: ARC Python wrapper (transitional package) NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . This is a transitional package. It is meant to allow smooth transition and will be removed from the upcoming releases. Package: python-nordugrid-arc Replaces: nordugrid-arc-python (<< 6.6.0~) Breaks: nordugrid-arc-python (<< 6.6.0~) Architecture: any Section: python Depends: ${shlibs:Depends}, ${misc:Depends}, ${python:Depends}, libarccommon3 (= ${binary:Version}) XB-Python-Version: ${python:Versions} Description: ARC Python 2 wrapper NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . Python 2 bindings for ARC. Package: python3-nordugrid-arc Provides: ${python3:Provides} Replaces: nordugrid-arc-python (<< 6.6.0~) Breaks: nordugrid-arc-python (<< 6.6.0~) Architecture: any Section: python Depends: ${shlibs:Depends}, ${misc:Depends}, ${python3:Depends}, libarccommon3 (= ${binary:Version}) Description: ARC Python 3 wrapper NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . Python 3 bindings for ARC. Package: nordugrid-arc-nordugridmap Provides: nordugrid-arc-gridmap-utils Replaces: nordugrid-arc-gridmap-utils (<< 6.0.0~) Conflicts: nordugrid-arc-gridmap-utils (<< 6.0.0~) Architecture: all Depends: liblwp-protocol-https-perl, libsoap-lite-perl, liburi-perl, libxml-dom-perl, ${perl:Depends}, ${misc:Depends} Recommends: cron Description: ARC's nordugridmap tool NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . A simple tool to fetch list of users and eventually generate gridmap files. Package: nordugrid-arc-test-utils Provides: nordugrid-arc-misc-utils Replaces: nordugrid-arc-misc-utils (<< 6.0.0~) Conflicts: nordugrid-arc-misc-utils (<< 6.0.0~) Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends}, libarccommon3 (= ${binary:Version}), nordugrid-arc-plugins-needed (= ${binary:Version}) Description: ARC test tools NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . This package contains a few utilities useful to test various ARC subsystems. The package is not required by users or sysadmins and it is mainly for developers. Package: nordugrid-arc-archery-manage Architecture: all Depends: ${misc:Depends}, ${python:Depends}, ${python3:Depends}, ${pydeps:Depends} Description: ARCHERY administration tool NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . This package contains the archery-manage utility for administration of an ARCHERY DNS-embedded service endpoint registry. Package: nordugrid-arc-wn Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends} Description: ARC optional worker nodes components NorduGrid is a collaboration aiming at development, maintenance and support of the middleware, known as the Advanced Resource Connector (ARC). . This package contains the optional components that provide new job management features on the worker nodes (WN). nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-plugins-gridftp.install0000644000000000000000000000013214152153376026032 xustar000000000000000030 mtime=1638455038.267643901 30 atime=1638455038.460646801 30 ctime=1638455103.952630848 nordugrid-arc-6.14.0/debian/nordugrid-arc-plugins-gridftp.install0000644000175000002070000000020114152153376026010 0ustar00mockbuildmock00000000000000debian/tmp/usr/lib/arc/libdmcgridftpdeleg.so debian/tmp/usr/lib/arc/libdmcgridftpdeleg.apd debian/tmp/usr/lib/arc/arc-dmcgridftp nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-acix-scanner.install0000644000000000000000000000013214152153376025267 xustar000000000000000030 mtime=1638455038.265643871 30 atime=1638455038.460646801 30 ctime=1638455103.937630622 nordugrid-arc-6.14.0/debian/nordugrid-arc-acix-scanner.install0000644000175000002070000000015014152153376025250 0ustar00mockbuildmock00000000000000debian/tmp/usr/lib/python?.*/site-packages/acix/scanner debian/tmp/usr/share/arc/arc-acix-scanner-start nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-wn.install0000644000000000000000000000013214152153376023340 xustar000000000000000030 mtime=1638455038.268643916 30 atime=1638455038.460646801 30 ctime=1638455103.959630953 nordugrid-arc-6.14.0/debian/nordugrid-arc-wn.install0000644000175000002070000000004214152153376023321 0ustar00mockbuildmock00000000000000debian/tmp/usr/bin/arc-job-cgroup nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-arcctl-service.install0000644000000000000000000000013214152153376025622 xustar000000000000000030 mtime=1638455038.265643871 30 atime=1638455038.460646801 30 ctime=1638455103.938630638 nordugrid-arc-6.14.0/debian/nordugrid-arc-arcctl-service.install0000644000175000002070000000053114152153376025606 0ustar00mockbuildmock00000000000000debian/tmp/usr/lib/python?.*/site-packages/arc/control/Config.py* debian/tmp/usr/lib/python?.*/site-packages/arc/control/ServiceCommon.py* debian/tmp/usr/lib/python?.*/site-packages/arc/control/Services.py* debian/tmp/usr/lib/python?.*/site-packages/arc/control/OSService.py* debian/tmp/usr/lib/python?.*/site-packages/arc/control/Validator.py* nordugrid-arc-6.14.0/debian/PaxHeaders.30264/changelog0000644000000000000000000000013214152153464020436 xustar000000000000000030 mtime=1638455092.051452027 30 atime=1638455095.502503881 30 ctime=1638455103.928630487 nordugrid-arc-6.14.0/debian/changelog0000644000175000002070000002162614152153464020432 0ustar00mockbuildmock00000000000000nordugrid-arc (6.14.0-1) unstable; urgency=low * Unofficial build. -- Anders Waananen Thu, 02 Dec 2021 15:24:47 +0100 nordugrid-arc (4.0.0~rc2-1) unstable; urgency=low * 4.0.0 Release Candidate 2 -- Anders Waananen Thu, 07 Nov 2013 11:01:24 +0100 nordugrid-arc (4.0.0~rc1-1) unstable; urgency=low * 4.0.0 Release Candidate 1 -- Anders Waananen Tue, 29 Oct 2013 23:22:57 +0100 nordugrid-arc (3.0.3-1) unstable; urgency=low * 3.0.3 Final Release -- Anders Waananen Fri, 19 Jul 2013 12:05:50 +0200 nordugrid-arc (3.0.2-1) unstable; urgency=low * 3.0.2 Final Release -- Anders Waananen Wed, 12 Jun 2013 15:09:59 +0200 nordugrid-arc (3.0.1-1) unstable; urgency=low * 3.0.1 Final Release -- Anders Waananen Tue, 30 Apr 2013 00:47:43 +0200 nordugrid-arc (3.0.1~rc2-1) unstable; urgency=low * 3.0.1 Release Candidate 2 -- Anders Waananen Fri, 12 Apr 2013 16:56:03 +0200 nordugrid-arc (3.0.1~rc1-1) unstable; urgency=low * 3.0.1 Release Candidate 1 -- Anders Waananen Fri, 12 Apr 2013 13:50:41 +0200 nordugrid-arc (3.0.0-1) unstable; urgency=low * 3.0.0 Final Release -- Anders Waananen Fri, 22 Mar 2013 12:32:51 +0100 nordugrid-arc (3.0.0~rc5-1) unstable; urgency=low * 3.0.0 Release Candidate 5 -- Anders Waananen Wed, 06 Feb 2013 12:12:48 +0100 nordugrid-arc (3.0.0~rc4-1) unstable; urgency=low * 3.0.0 Release Candidate 4 -- Anders Waananen Sat, 02 Feb 2013 01:00:33 +0100 nordugrid-arc (3.0.0~rc3-1) unstable; urgency=low * 3.0.0 Release Candidate 3 -- Anders Waananen Wed, 30 Jan 2013 09:02:17 +0100 nordugrid-arc (3.0.0~rc2-1) unstable; urgency=low * 3.0.0 Release Candidate 2 -- Anders Waananen Mon, 28 Jan 2013 07:55:14 +0100 nordugrid-arc (3.0.0~rc1-1) unstable; urgency=low * 3.0.0 Release Candidate 1 -- Anders Waananen Thu, 06 Dec 2012 22:05:31 +0100 nordugrid-arc (2.0.1-1) unstable; urgency=low * 2.0.1 Final Release -- Anders Waananen Thu, 22 Nov 2012 23:47:19 +0100 nordugrid-arc (2.0.1rc2) unstable; urgency=low * 2.0.1rc2 Release Candidate 2 -- Anders Waananen Thu, 25 Oct 2012 13:00:02 +0200 nordugrid-arc (2.0.1rc1) unstable; urgency=low * 2.0.1rc1 Release Candidate 1 -- Anders Waananen Mon, 27 Aug 2012 13:26:30 +0200 nordugrid-arc (2.0.0-1) unstable; urgency=low * 2.0.0 Final Release -- Mattias Ellert Wed, 23 May 2012 19:27:47 +0200 nordugrid-arc (2.0.0~rc4-1) unstable; urgency=low * 2.0.0 Release Candidate 4 -- Mattias Ellert Mon, 02 Apr 2012 16:06:45 +0200 nordugrid-arc (2.0.0~rc3.1-1) unstable; urgency=low * 2.0.0 Release Candidate 3.1 -- Mattias Ellert Tue, 27 Mar 2012 10:30:23 +0200 nordugrid-arc (2.0.0~rc3-1) unstable; urgency=low * 2.0.0 Release Candidate 3 -- Mattias Ellert Mon, 05 Mar 2012 16:27:32 +0100 nordugrid-arc (2.0.0~rc2-1) unstable; urgency=low * 2.0.0 Release Candidate 2 -- Mattias Ellert Wed, 15 Feb 2012 13:54:17 +0100 nordugrid-arc (1.1.0-1) unstable; urgency=low * 1.1.0 Final Release -- Mattias Ellert Mon, 03 Oct 2011 14:30:45 +0200 nordugrid-arc (1.1.0~rc2-1) unstable; urgency=low * 1.1.0 Release Candidate 2 -- Mattias Ellert Sun, 25 Sep 2011 05:42:22 +0200 nordugrid-arc (1.1.0~rc1-1) unstable; urgency=low * 1.1.0 Release Candidate 1 -- Mattias Ellert Sun, 11 Sep 2011 20:08:33 +0200 nordugrid-arc (1.0.1-1) unstable; urgency=low * 1.0.0 Final Release -- Mattias Ellert Sat, 23 Jul 2011 09:32:53 +0200 nordugrid-arc (1.0.1~rc4-1) unstable; urgency=low * 1.0.1 Release Candidate 4 -- Mattias Ellert Tue, 19 Jul 2011 15:17:05 +0200 nordugrid-arc (1.0.1~rc1-1) unstable; urgency=low * 1.0.1 Release Candidate 1 -- Mattias Ellert Sat, 18 Jun 2011 18:29:09 +0200 nordugrid-arc (1.0.0-1) unstable; urgency=low * 1.0.0 Final Release -- Mattias Ellert Mon, 18 Apr 2011 08:59:55 +0200 nordugrid-arc (1.0.0~b5-1) unstable; urgency=low * 1.0.0 Beta Release 5 -- Mattias Ellert Wed, 06 Apr 2011 14:08:52 +0200 nordugrid-arc (1.0.0~b4-1) unstable; urgency=low * 1.0.0 Beta Release 4 -- Mattias Ellert Wed, 23 Mar 2011 15:19:08 +0100 nordugrid-arc (1.0.0~b3-1) unstable; urgency=low * 1.0.0 Beta Release 3 -- Mattias Ellert Thu, 10 Mar 2011 17:05:28 +0100 nordugrid-arc (1.0.0~b2-1) unstable; urgency=low * 1.0.0 Beta Release 2 -- Mattias Ellert Mon, 07 Mar 2011 05:12:30 +0100 nordugrid-arc (1.0.0~b1-1) unstable; urgency=low * 1.0.0 Beta Release 1 -- Mattias Ellert Mon, 14 Feb 2011 17:19:04 +0100 nordugrid-arc-nox (1.2.1-1) unstable; urgency=low * 1.2.1 Final release -- Mattias Ellert Tue, 21 Dec 2010 22:34:02 +0100 nordugrid-arc-nox (1.2.1~rc2-1) unstable; urgency=low * 1.2.1 Release Candidate 2 -- Mattias Ellert Tue, 21 Dec 2010 09:36:46 +0100 nordugrid-arc-nox (1.2.1~rc1-1) unstable; urgency=low * 1.2.1 Release Candidate 1 -- Mattias Ellert Wed, 08 Dec 2010 15:30:37 +0100 nordugrid-arc-nox (1.2.0-1) unstable; urgency=low * 1.2.0 Final release -- Mattias Ellert Fri, 22 Oct 2010 15:25:07 +0200 nordugrid-arc-nox (1.2.0~rc2-1) unstable; urgency=low * 1.2.0 Release Candidate 2 -- Mattias Ellert Thu, 30 Sep 2010 10:11:14 +0200 nordugrid-arc-nox (1.2.0~rc1-1) unstable; urgency=low * 1.2.0 Release Candidate 1 -- Mattias Ellert Mon, 13 Sep 2010 11:14:51 +0200 nordugrid-arc-nox (1.1.0-1) unstable; urgency=low * 1.1.0 Final release -- Mattias Ellert Wed, 05 May 2010 18:31:59 +0200 nordugrid-arc-nox (1.1.0~rc6-1) unstable; urgency=low * 1.1.0 Release Candidate 6 -- Mattias Ellert Mon, 08 Mar 2010 20:36:00 +0100 nordugrid-arc-nox (1.1.0~rc5-2) unstable; urgency=low * Rebuild for Globus Toolkit 5 -- Mattias Ellert Fri, 26 Feb 2010 16:25:39 +0100 nordugrid-arc-nox (1.1.0~rc5-1) unstable; urgency=low * 1.1.0 Release Candidate 5 -- Mattias Ellert Fri, 26 Feb 2010 15:07:39 +0100 nordugrid-arc-nox (1.1.0~rc4-1) unstable; urgency=low * 1.1.0 release candidate 4 -- Mattias Ellert Wed, 24 Feb 2010 12:34:41 +0100 nordugrid-arc-nox (1.1.0~rc3-1) unstable; urgency=low * 1.1.0 release candidate 3 -- Mattias Ellert Mon, 22 Feb 2010 10:20:27 +0100 nordugrid-arc-nox (1.1.0~rc2-1) unstable; urgency=low * 1.1.0 release candidate 2 -- Mattias Ellert Mon, 15 Feb 2010 19:08:07 +0100 nordugrid-arc-nox (1.1.0~rc1-1) unstable; urgency=low * 1.1.0 release candidate 1 -- Mattias Ellert Thu, 11 Feb 2010 19:48:01 +0100 nordugrid-arc-nox (1.0.0-1) unstable; urgency=low * 1.0.0 Final release -- Mattias Ellert Sun, 29 Nov 2009 23:13:41 +0100 nordugrid-arc-nox (1.0.0~rc7-1) unstable; urgency=low * 1.0.0 release candidate 7 -- Mattias Ellert Thu, 19 Nov 2009 15:30:32 +0100 nordugrid-arc-nox (1.0.0~rc6-1) unstable; urgency=low * 1.0.0 release candidate 6 -- Mattias Ellert Thu, 12 Nov 2009 10:12:45 +0100 nordugrid-arc-nox (1.0.0~rc5-1) unstable; urgency=low * 1.0.0 release candidate 5 -- Mattias Ellert Wed, 04 Nov 2009 16:45:22 +0100 nordugrid-arc1 (0.9.4~rc4-1) unstable; urgency=low * 0.9.3 release candidate 4 -- Mattias Ellert Mon, 26 Oct 2009 23:19:55 +0100 nordugrid-arc1 (0.9.4~rc3-1) unstable; urgency=low * 0.9.3 release candidate 3 -- Mattias Ellert Thu, 22 Oct 2009 19:22:31 +0200 nordugrid-arc1 (0.9.4~rc2-1) unstable; urgency=low * 0.9.3 release candidate 2 -- Mattias Ellert Thu, 15 Oct 2009 09:04:24 +0200 nordugrid-arc1 (0.9.3-1) unstable; urgency=low * Final 0.9.3 release -- Mattias Ellert Sun, 27 Sep 2009 01:27:31 +0200 nordugrid-arc1 (0.9.3~rc3-1) unstable; urgency=low * Initial release -- Mattias Ellert Mon, 5 Nov 2007 10:12:49 -0400 nordugrid-arc-6.14.0/debian/PaxHeaders.30264/libarccommon3.docs0000644000000000000000000000013214152153376022170 xustar000000000000000030 mtime=1638455038.264643856 30 atime=1638455038.460646801 30 ctime=1638455103.961630983 nordugrid-arc-6.14.0/debian/libarccommon3.docs0000644000175000002070000000015114152153376022152 0ustar00mockbuildmock00000000000000README AUTHORS NOTICE src/doc/arc.conf.reference src/doc/arc.conf.DELETED src/doc/arc.conf.DELETED-6.8.0 nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-acix-scanner.preinst.no-enable0000644000000000000000000000013214152153376027144 xustar000000000000000030 mtime=1638455038.265643871 30 atime=1638455038.460646801 30 ctime=1638455103.973631163 nordugrid-arc-6.14.0/debian/nordugrid-arc-acix-scanner.preinst.no-enable0000644000175000002070000000126614152153376027136 0ustar00mockbuildmock00000000000000#!/bin/sh set -e # Version 6.7.0 starts using defaults-disabled # Remove rc.d links if upgrading from an older version and the service is # disabled in /etc/default if [ -n "$2" ] ; then if dpkg --compare-versions "$2" "<<" "6.7.0" ; then [ -r /etc/default/arc-acix-scanner ] && . /etc/default/arc-acix-scanner if [ -n "${RUN}" ] && [ "${RUN}" != "yes" ] ; then update-rc.d -f arc-acix-scanner remove >/dev/null if [ -x "/usr/bin/deb-systemd-helper" ]; then deb-systemd-helper purge 'arc-acix-scanner.service' >/dev/null || true deb-systemd-helper unmask 'arc-acix-scanner.service' >/dev/null || true fi fi fi fi #DEBHELPER# nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-infosys-ldap.maintscript.no-enable0000644000000000000000000000013214152153376030052 xustar000000000000000030 mtime=1638455038.267643901 30 atime=1638455038.460646801 30 ctime=1638455103.981631284 nordugrid-arc-6.14.0/debian/nordugrid-arc-infosys-ldap.maintscript.no-enable0000644000175000002070000000006014152153376030033 0ustar00mockbuildmock00000000000000rm_conffile /etc/default/arc-infosys-ldap 6.7.0 nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-datadelivery-service.maintscript.no-enabl0000644000000000000000000000013214152153376031410 xustar000000000000000030 mtime=1638455038.266643886 30 atime=1638455038.460646801 30 ctime=1638455103.976631208 nordugrid-arc-6.14.0/debian/nordugrid-arc-datadelivery-service.maintscript.no-enable0000644000175000002070000000007014152153376031537 0ustar00mockbuildmock00000000000000rm_conffile /etc/default/arc-datadelivery-service 6.7.0 nordugrid-arc-6.14.0/debian/PaxHeaders.30264/libarcglobusutils3.install0000644000000000000000000000013214152153376023772 xustar000000000000000030 mtime=1638455038.264643856 30 atime=1638455038.460646801 30 ctime=1638455103.935630592 nordugrid-arc-6.14.0/debian/libarcglobusutils3.install0000644000175000002070000000005214152153376023754 0ustar00mockbuildmock00000000000000debian/tmp/usr/lib/libarcglobusutils.so.* nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-gridftpd.logrotate0000644000000000000000000000013214152153376025051 xustar000000000000000030 mtime=1638455038.266643886 30 atime=1638455038.460646801 30 ctime=1638455103.985631343 nordugrid-arc-6.14.0/debian/nordugrid-arc-gridftpd.logrotate0000644000175000002070000000030014152153376025027 0ustar00mockbuildmock00000000000000/var/log/arc/gridftpd.log { missingok compress delaycompress daily rotate 14 create postrotate kill -HUP `cat /run/gridftpd.pid 2> /dev/null` 2> /dev/null || true endscript } nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-acix-core.install0000644000000000000000000000013214152153376024566 xustar000000000000000030 mtime=1638455038.264643856 30 atime=1638455038.460646801 30 ctime=1638455103.935630592 nordugrid-arc-6.14.0/debian/nordugrid-arc-acix-core.install0000644000175000002070000000016214152153376024552 0ustar00mockbuildmock00000000000000debian/tmp/usr/lib/python?.*/site-packages/acix/__init__.py* debian/tmp/usr/lib/python?.*/site-packages/acix/core nordugrid-arc-6.14.0/debian/PaxHeaders.30264/nordugrid-arc-gridftpd.preinst.no-enable0000644000000000000000000000013214152153376026374 xustar000000000000000030 mtime=1638455038.267643901 30 atime=1638455038.460646801 30 ctime=1638455103.979631253 nordugrid-arc-6.14.0/debian/nordugrid-arc-gridftpd.preinst.no-enable0000644000175000002070000000124214152153376026360 0ustar00mockbuildmock00000000000000#!/bin/sh set -e # Version 6.7.0 starts using defaults-disabled # Remove rc.d links if upgrading from an older version and the service is # disabled in /etc/default if [ -n "$2" ] ; then if dpkg --compare-versions "$2" "<<" "6.7.0" ; then [ -r /etc/default/arc-gridftpd ] && . /etc/default/arc-gridftpd if [ -n "${RUN}" ] && [ "${RUN}" != "yes" ] ; then update-rc.d -f arc-gridftpd remove >/dev/null if [ -x "/usr/bin/deb-systemd-helper" ]; then deb-systemd-helper purge 'arc-gridftpd.service' >/dev/null || true deb-systemd-helper unmask 'arc-gridftpd.service' >/dev/null || true fi fi fi fi #DEBHELPER# nordugrid-arc-6.14.0/PaxHeaders.30264/config.guess0000644000000000000000000000013214152153420017647 xustar000000000000000030 mtime=1638455056.854923183 30 atime=1638455073.240169379 30 ctime=1638455095.730507307 nordugrid-arc-6.14.0/config.guess0000755000175000002070000013036114152153420017643 0ustar00mockbuildmock00000000000000#! /bin/sh # Attempt to guess a canonical system name. # Copyright 1992-2013 Free Software Foundation, Inc. timestamp='2013-06-10' # This file is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see . # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that # program. This Exception is an additional permission under section 7 # of the GNU General Public License, version 3 ("GPLv3"). # # Originally written by Per Bothner. # # You can get the latest version of this script from: # http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD # # Please send patches with a ChangeLog entry to config-patches@gnu.org. me=`echo "$0" | sed -e 's,.*/,,'` usage="\ Usage: $0 [OPTION] Output the configuration name of the system \`$me' is run on. Operation modes: -h, --help print this help, then exit -t, --time-stamp print date of last modification, then exit -v, --version print version number, then exit Report bugs and patches to ." version="\ GNU config.guess ($timestamp) Originally written by Per Bothner. Copyright 1992-2013 Free Software Foundation, Inc. This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." help=" Try \`$me --help' for more information." # Parse command line while test $# -gt 0 ; do case $1 in --time-stamp | --time* | -t ) echo "$timestamp" ; exit ;; --version | -v ) echo "$version" ; exit ;; --help | --h* | -h ) echo "$usage"; exit ;; -- ) # Stop option processing shift; break ;; - ) # Use stdin as input. break ;; -* ) echo "$me: invalid option $1$help" >&2 exit 1 ;; * ) break ;; esac done if test $# != 0; then echo "$me: too many arguments$help" >&2 exit 1 fi trap 'exit 1' 1 2 15 # CC_FOR_BUILD -- compiler used by this script. Note that the use of a # compiler to aid in system detection is discouraged as it requires # temporary files to be created and, as you can see below, it is a # headache to deal with in a portable fashion. # Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still # use `HOST_CC' if defined, but it is deprecated. # Portable tmp directory creation inspired by the Autoconf team. set_cc_for_build=' trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ; trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ; : ${TMPDIR=/tmp} ; { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } || { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } || { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } || { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ; dummy=$tmp/dummy ; tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ; case $CC_FOR_BUILD,$HOST_CC,$CC in ,,) echo "int x;" > $dummy.c ; for c in cc gcc c89 c99 ; do if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then CC_FOR_BUILD="$c"; break ; fi ; done ; if test x"$CC_FOR_BUILD" = x ; then CC_FOR_BUILD=no_compiler_found ; fi ;; ,,*) CC_FOR_BUILD=$CC ;; ,*,*) CC_FOR_BUILD=$HOST_CC ;; esac ; set_cc_for_build= ;' # This is needed to find uname on a Pyramid OSx when run in the BSD universe. # (ghazi@noc.rutgers.edu 1994-08-24) if (test -f /.attbin/uname) >/dev/null 2>&1 ; then PATH=$PATH:/.attbin ; export PATH fi UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown case "${UNAME_SYSTEM}" in Linux|GNU|GNU/*) # If the system lacks a compiler, then just pick glibc. # We could probably try harder. LIBC=gnu eval $set_cc_for_build cat <<-EOF > $dummy.c #include #if defined(__UCLIBC__) LIBC=uclibc #elif defined(__dietlibc__) LIBC=dietlibc #else LIBC=gnu #endif EOF eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC'` ;; esac # Note: order is significant - the case branches are not exclusive. case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in *:NetBSD:*:*) # NetBSD (nbsd) targets should (where applicable) match one or # more of the tuples: *-*-netbsdelf*, *-*-netbsdaout*, # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently # switched to ELF, *-*-netbsd* would select the old # object file format. This provides both forward # compatibility and a consistent mechanism for selecting the # object file format. # # Note: NetBSD doesn't particularly care about the vendor # portion of the name. We always set it to "unknown". sysctl="sysctl -n hw.machine_arch" UNAME_MACHINE_ARCH=`(/sbin/$sysctl 2>/dev/null || \ /usr/sbin/$sysctl 2>/dev/null || echo unknown)` case "${UNAME_MACHINE_ARCH}" in armeb) machine=armeb-unknown ;; arm*) machine=arm-unknown ;; sh3el) machine=shl-unknown ;; sh3eb) machine=sh-unknown ;; sh5el) machine=sh5le-unknown ;; *) machine=${UNAME_MACHINE_ARCH}-unknown ;; esac # The Operating System including object format, if it has switched # to ELF recently, or will in the future. case "${UNAME_MACHINE_ARCH}" in arm*|i386|m68k|ns32k|sh3*|sparc|vax) eval $set_cc_for_build if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \ | grep -q __ELF__ then # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout). # Return netbsd for either. FIX? os=netbsd else os=netbsdelf fi ;; *) os=netbsd ;; esac # The OS release # Debian GNU/NetBSD machines have a different userland, and # thus, need a distinct triplet. However, they do not need # kernel version information, so it can be replaced with a # suitable tag, in the style of linux-gnu. case "${UNAME_VERSION}" in Debian*) release='-gnu' ;; *) release=`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'` ;; esac # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM: # contains redundant information, the shorter form: # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used. echo "${machine}-${os}${release}" exit ;; *:Bitrig:*:*) UNAME_MACHINE_ARCH=`arch | sed 's/Bitrig.//'` echo ${UNAME_MACHINE_ARCH}-unknown-bitrig${UNAME_RELEASE} exit ;; *:OpenBSD:*:*) UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'` echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE} exit ;; *:ekkoBSD:*:*) echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE} exit ;; *:SolidBSD:*:*) echo ${UNAME_MACHINE}-unknown-solidbsd${UNAME_RELEASE} exit ;; macppc:MirBSD:*:*) echo powerpc-unknown-mirbsd${UNAME_RELEASE} exit ;; *:MirBSD:*:*) echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE} exit ;; alpha:OSF1:*:*) case $UNAME_RELEASE in *4.0) UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'` ;; *5.*) UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'` ;; esac # According to Compaq, /usr/sbin/psrinfo has been available on # OSF/1 and Tru64 systems produced since 1995. I hope that # covers most systems running today. This code pipes the CPU # types through head -n 1, so we only detect the type of CPU 0. ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1` case "$ALPHA_CPU_TYPE" in "EV4 (21064)") UNAME_MACHINE="alpha" ;; "EV4.5 (21064)") UNAME_MACHINE="alpha" ;; "LCA4 (21066/21068)") UNAME_MACHINE="alpha" ;; "EV5 (21164)") UNAME_MACHINE="alphaev5" ;; "EV5.6 (21164A)") UNAME_MACHINE="alphaev56" ;; "EV5.6 (21164PC)") UNAME_MACHINE="alphapca56" ;; "EV5.7 (21164PC)") UNAME_MACHINE="alphapca57" ;; "EV6 (21264)") UNAME_MACHINE="alphaev6" ;; "EV6.7 (21264A)") UNAME_MACHINE="alphaev67" ;; "EV6.8CB (21264C)") UNAME_MACHINE="alphaev68" ;; "EV6.8AL (21264B)") UNAME_MACHINE="alphaev68" ;; "EV6.8CX (21264D)") UNAME_MACHINE="alphaev68" ;; "EV6.9A (21264/EV69A)") UNAME_MACHINE="alphaev69" ;; "EV7 (21364)") UNAME_MACHINE="alphaev7" ;; "EV7.9 (21364A)") UNAME_MACHINE="alphaev79" ;; esac # A Pn.n version is a patched version. # A Vn.n version is a released version. # A Tn.n version is a released field test version. # A Xn.n version is an unreleased experimental baselevel. # 1.2 uses "1.2" for uname -r. echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'` # Reset EXIT trap before exiting to avoid spurious non-zero exit code. exitcode=$? trap '' 0 exit $exitcode ;; Alpha\ *:Windows_NT*:*) # How do we know it's Interix rather than the generic POSIX subsystem? # Should we change UNAME_MACHINE based on the output of uname instead # of the specific Alpha model? echo alpha-pc-interix exit ;; 21064:Windows_NT:50:3) echo alpha-dec-winnt3.5 exit ;; Amiga*:UNIX_System_V:4.0:*) echo m68k-unknown-sysv4 exit ;; *:[Aa]miga[Oo][Ss]:*:*) echo ${UNAME_MACHINE}-unknown-amigaos exit ;; *:[Mm]orph[Oo][Ss]:*:*) echo ${UNAME_MACHINE}-unknown-morphos exit ;; *:OS/390:*:*) echo i370-ibm-openedition exit ;; *:z/VM:*:*) echo s390-ibm-zvmoe exit ;; *:OS400:*:*) echo powerpc-ibm-os400 exit ;; arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*) echo arm-acorn-riscix${UNAME_RELEASE} exit ;; arm*:riscos:*:*|arm*:RISCOS:*:*) echo arm-unknown-riscos exit ;; SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*) echo hppa1.1-hitachi-hiuxmpp exit ;; Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*) # akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE. if test "`(/bin/universe) 2>/dev/null`" = att ; then echo pyramid-pyramid-sysv3 else echo pyramid-pyramid-bsd fi exit ;; NILE*:*:*:dcosx) echo pyramid-pyramid-svr4 exit ;; DRS?6000:unix:4.0:6*) echo sparc-icl-nx6 exit ;; DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*) case `/usr/bin/uname -p` in sparc) echo sparc-icl-nx7; exit ;; esac ;; s390x:SunOS:*:*) echo ${UNAME_MACHINE}-ibm-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; sun4H:SunOS:5.*:*) echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*) echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*) echo i386-pc-auroraux${UNAME_RELEASE} exit ;; i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*) eval $set_cc_for_build SUN_ARCH="i386" # If there is a compiler, see if it is configured for 64-bit objects. # Note that the Sun cc does not turn __LP64__ into 1 like gcc does. # This test works for both compilers. if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \ (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \ grep IS_64BIT_ARCH >/dev/null then SUN_ARCH="x86_64" fi fi echo ${SUN_ARCH}-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; sun4*:SunOS:6*:*) # According to config.sub, this is the proper way to canonicalize # SunOS6. Hard to guess exactly what SunOS6 will be like, but # it's likely to be more like Solaris than SunOS4. echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; sun4*:SunOS:*:*) case "`/usr/bin/arch -k`" in Series*|S4*) UNAME_RELEASE=`uname -v` ;; esac # Japanese Language versions have a version number like `4.1.3-JL'. echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'` exit ;; sun3*:SunOS:*:*) echo m68k-sun-sunos${UNAME_RELEASE} exit ;; sun*:*:4.2BSD:*) UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null` test "x${UNAME_RELEASE}" = "x" && UNAME_RELEASE=3 case "`/bin/arch`" in sun3) echo m68k-sun-sunos${UNAME_RELEASE} ;; sun4) echo sparc-sun-sunos${UNAME_RELEASE} ;; esac exit ;; aushp:SunOS:*:*) echo sparc-auspex-sunos${UNAME_RELEASE} exit ;; # The situation for MiNT is a little confusing. The machine name # can be virtually everything (everything which is not # "atarist" or "atariste" at least should have a processor # > m68000). The system name ranges from "MiNT" over "FreeMiNT" # to the lowercase version "mint" (or "freemint"). Finally # the system name "TOS" denotes a system which is actually not # MiNT. But MiNT is downward compatible to TOS, so this should # be no problem. atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*) echo m68k-atari-mint${UNAME_RELEASE} exit ;; atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*) echo m68k-atari-mint${UNAME_RELEASE} exit ;; *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*) echo m68k-atari-mint${UNAME_RELEASE} exit ;; milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*) echo m68k-milan-mint${UNAME_RELEASE} exit ;; hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*) echo m68k-hades-mint${UNAME_RELEASE} exit ;; *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*) echo m68k-unknown-mint${UNAME_RELEASE} exit ;; m68k:machten:*:*) echo m68k-apple-machten${UNAME_RELEASE} exit ;; powerpc:machten:*:*) echo powerpc-apple-machten${UNAME_RELEASE} exit ;; RISC*:Mach:*:*) echo mips-dec-mach_bsd4.3 exit ;; RISC*:ULTRIX:*:*) echo mips-dec-ultrix${UNAME_RELEASE} exit ;; VAX*:ULTRIX*:*:*) echo vax-dec-ultrix${UNAME_RELEASE} exit ;; 2020:CLIX:*:* | 2430:CLIX:*:*) echo clipper-intergraph-clix${UNAME_RELEASE} exit ;; mips:*:*:UMIPS | mips:*:*:RISCos) eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #ifdef __cplusplus #include /* for printf() prototype */ int main (int argc, char *argv[]) { #else int main (argc, argv) int argc; char *argv[]; { #endif #if defined (host_mips) && defined (MIPSEB) #if defined (SYSTYPE_SYSV) printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0); #endif #if defined (SYSTYPE_SVR4) printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0); #endif #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD) printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0); #endif #endif exit (-1); } EOF $CC_FOR_BUILD -o $dummy $dummy.c && dummyarg=`echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` && SYSTEM_NAME=`$dummy $dummyarg` && { echo "$SYSTEM_NAME"; exit; } echo mips-mips-riscos${UNAME_RELEASE} exit ;; Motorola:PowerMAX_OS:*:*) echo powerpc-motorola-powermax exit ;; Motorola:*:4.3:PL8-*) echo powerpc-harris-powermax exit ;; Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*) echo powerpc-harris-powermax exit ;; Night_Hawk:Power_UNIX:*:*) echo powerpc-harris-powerunix exit ;; m88k:CX/UX:7*:*) echo m88k-harris-cxux7 exit ;; m88k:*:4*:R4*) echo m88k-motorola-sysv4 exit ;; m88k:*:3*:R3*) echo m88k-motorola-sysv3 exit ;; AViiON:dgux:*:*) # DG/UX returns AViiON for all architectures UNAME_PROCESSOR=`/usr/bin/uname -p` if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ] then if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \ [ ${TARGET_BINARY_INTERFACE}x = x ] then echo m88k-dg-dgux${UNAME_RELEASE} else echo m88k-dg-dguxbcs${UNAME_RELEASE} fi else echo i586-dg-dgux${UNAME_RELEASE} fi exit ;; M88*:DolphinOS:*:*) # DolphinOS (SVR3) echo m88k-dolphin-sysv3 exit ;; M88*:*:R3*:*) # Delta 88k system running SVR3 echo m88k-motorola-sysv3 exit ;; XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3) echo m88k-tektronix-sysv3 exit ;; Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD) echo m68k-tektronix-bsd exit ;; *:IRIX*:*:*) echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'` exit ;; ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX. echo romp-ibm-aix # uname -m gives an 8 hex-code CPU id exit ;; # Note that: echo "'`uname -s`'" gives 'AIX ' i*86:AIX:*:*) echo i386-ibm-aix exit ;; ia64:AIX:*:*) if [ -x /usr/bin/oslevel ] ; then IBM_REV=`/usr/bin/oslevel` else IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} fi echo ${UNAME_MACHINE}-ibm-aix${IBM_REV} exit ;; *:AIX:2:3) if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #include main() { if (!__power_pc()) exit(1); puts("powerpc-ibm-aix3.2.5"); exit(0); } EOF if $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` then echo "$SYSTEM_NAME" else echo rs6000-ibm-aix3.2.5 fi elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then echo rs6000-ibm-aix3.2.4 else echo rs6000-ibm-aix3.2 fi exit ;; *:AIX:*:[4567]) IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'` if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then IBM_ARCH=rs6000 else IBM_ARCH=powerpc fi if [ -x /usr/bin/oslevel ] ; then IBM_REV=`/usr/bin/oslevel` else IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} fi echo ${IBM_ARCH}-ibm-aix${IBM_REV} exit ;; *:AIX:*:*) echo rs6000-ibm-aix exit ;; ibmrt:4.4BSD:*|romp-ibm:BSD:*) echo romp-ibm-bsd4.4 exit ;; ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and echo romp-ibm-bsd${UNAME_RELEASE} # 4.3 with uname added to exit ;; # report: romp-ibm BSD 4.3 *:BOSX:*:*) echo rs6000-bull-bosx exit ;; DPX/2?00:B.O.S.:*:*) echo m68k-bull-sysv3 exit ;; 9000/[34]??:4.3bsd:1.*:*) echo m68k-hp-bsd exit ;; hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*) echo m68k-hp-bsd4.4 exit ;; 9000/[34678]??:HP-UX:*:*) HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` case "${UNAME_MACHINE}" in 9000/31? ) HP_ARCH=m68000 ;; 9000/[34]?? ) HP_ARCH=m68k ;; 9000/[678][0-9][0-9]) if [ -x /usr/bin/getconf ]; then sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null` sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null` case "${sc_cpu_version}" in 523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0 528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1 532) # CPU_PA_RISC2_0 case "${sc_kernel_bits}" in 32) HP_ARCH="hppa2.0n" ;; 64) HP_ARCH="hppa2.0w" ;; '') HP_ARCH="hppa2.0" ;; # HP-UX 10.20 esac ;; esac fi if [ "${HP_ARCH}" = "" ]; then eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #define _HPUX_SOURCE #include #include int main () { #if defined(_SC_KERNEL_BITS) long bits = sysconf(_SC_KERNEL_BITS); #endif long cpu = sysconf (_SC_CPU_VERSION); switch (cpu) { case CPU_PA_RISC1_0: puts ("hppa1.0"); break; case CPU_PA_RISC1_1: puts ("hppa1.1"); break; case CPU_PA_RISC2_0: #if defined(_SC_KERNEL_BITS) switch (bits) { case 64: puts ("hppa2.0w"); break; case 32: puts ("hppa2.0n"); break; default: puts ("hppa2.0"); break; } break; #else /* !defined(_SC_KERNEL_BITS) */ puts ("hppa2.0"); break; #endif default: puts ("hppa1.0"); break; } exit (0); } EOF (CCOPTS= $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy` test -z "$HP_ARCH" && HP_ARCH=hppa fi ;; esac if [ ${HP_ARCH} = "hppa2.0w" ] then eval $set_cc_for_build # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating # 32-bit code. hppa64-hp-hpux* has the same kernel and a compiler # generating 64-bit code. GNU and HP use different nomenclature: # # $ CC_FOR_BUILD=cc ./config.guess # => hppa2.0w-hp-hpux11.23 # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess # => hppa64-hp-hpux11.23 if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | grep -q __LP64__ then HP_ARCH="hppa2.0w" else HP_ARCH="hppa64" fi fi echo ${HP_ARCH}-hp-hpux${HPUX_REV} exit ;; ia64:HP-UX:*:*) HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` echo ia64-hp-hpux${HPUX_REV} exit ;; 3050*:HI-UX:*:*) eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #include int main () { long cpu = sysconf (_SC_CPU_VERSION); /* The order matters, because CPU_IS_HP_MC68K erroneously returns true for CPU_PA_RISC1_0. CPU_IS_PA_RISC returns correct results, however. */ if (CPU_IS_PA_RISC (cpu)) { switch (cpu) { case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break; case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break; case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break; default: puts ("hppa-hitachi-hiuxwe2"); break; } } else if (CPU_IS_HP_MC68K (cpu)) puts ("m68k-hitachi-hiuxwe2"); else puts ("unknown-hitachi-hiuxwe2"); exit (0); } EOF $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` && { echo "$SYSTEM_NAME"; exit; } echo unknown-hitachi-hiuxwe2 exit ;; 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* ) echo hppa1.1-hp-bsd exit ;; 9000/8??:4.3bsd:*:*) echo hppa1.0-hp-bsd exit ;; *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*) echo hppa1.0-hp-mpeix exit ;; hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* ) echo hppa1.1-hp-osf exit ;; hp8??:OSF1:*:*) echo hppa1.0-hp-osf exit ;; i*86:OSF1:*:*) if [ -x /usr/sbin/sysversion ] ; then echo ${UNAME_MACHINE}-unknown-osf1mk else echo ${UNAME_MACHINE}-unknown-osf1 fi exit ;; parisc*:Lites*:*:*) echo hppa1.1-hp-lites exit ;; C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*) echo c1-convex-bsd exit ;; C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*) if getsysinfo -f scalar_acc then echo c32-convex-bsd else echo c2-convex-bsd fi exit ;; C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*) echo c34-convex-bsd exit ;; C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*) echo c38-convex-bsd exit ;; C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*) echo c4-convex-bsd exit ;; CRAY*Y-MP:*:*:*) echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; CRAY*[A-Z]90:*:*:*) echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \ | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \ -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \ -e 's/\.[^.]*$/.X/' exit ;; CRAY*TS:*:*:*) echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; CRAY*T3E:*:*:*) echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; CRAY*SV1:*:*:*) echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; *:UNICOS/mp:*:*) echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*) FUJITSU_PROC=`uname -m | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'` FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'` FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'` echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" exit ;; 5000:UNIX_System_V:4.*:*) FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'` FUJITSU_REL=`echo ${UNAME_RELEASE} | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/ /_/'` echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" exit ;; i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*) echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE} exit ;; sparc*:BSD/OS:*:*) echo sparc-unknown-bsdi${UNAME_RELEASE} exit ;; *:BSD/OS:*:*) echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE} exit ;; *:FreeBSD:*:*) UNAME_PROCESSOR=`/usr/bin/uname -p` case ${UNAME_PROCESSOR} in amd64) echo x86_64-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; *) echo ${UNAME_PROCESSOR}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; esac exit ;; i*:CYGWIN*:*) echo ${UNAME_MACHINE}-pc-cygwin exit ;; *:MINGW64*:*) echo ${UNAME_MACHINE}-pc-mingw64 exit ;; *:MINGW*:*) echo ${UNAME_MACHINE}-pc-mingw32 exit ;; i*:MSYS*:*) echo ${UNAME_MACHINE}-pc-msys exit ;; i*:windows32*:*) # uname -m includes "-pc" on this system. echo ${UNAME_MACHINE}-mingw32 exit ;; i*:PW*:*) echo ${UNAME_MACHINE}-pc-pw32 exit ;; *:Interix*:*) case ${UNAME_MACHINE} in x86) echo i586-pc-interix${UNAME_RELEASE} exit ;; authenticamd | genuineintel | EM64T) echo x86_64-unknown-interix${UNAME_RELEASE} exit ;; IA64) echo ia64-unknown-interix${UNAME_RELEASE} exit ;; esac ;; [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*) echo i${UNAME_MACHINE}-pc-mks exit ;; 8664:Windows_NT:*) echo x86_64-pc-mks exit ;; i*:Windows_NT*:* | Pentium*:Windows_NT*:*) # How do we know it's Interix rather than the generic POSIX subsystem? # It also conflicts with pre-2.0 versions of AT&T UWIN. Should we # UNAME_MACHINE based on the output of uname instead of i386? echo i586-pc-interix exit ;; i*:UWIN*:*) echo ${UNAME_MACHINE}-pc-uwin exit ;; amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*) echo x86_64-unknown-cygwin exit ;; p*:CYGWIN*:*) echo powerpcle-unknown-cygwin exit ;; prep*:SunOS:5.*:*) echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; *:GNU:*:*) # the GNU system echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-${LIBC}`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'` exit ;; *:GNU/*:*:*) # other systems with GNU libc and userland echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-${LIBC} exit ;; i*86:Minix:*:*) echo ${UNAME_MACHINE}-pc-minix exit ;; aarch64:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; aarch64_be:Linux:*:*) UNAME_MACHINE=aarch64_be echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; alpha:Linux:*:*) case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in EV5) UNAME_MACHINE=alphaev5 ;; EV56) UNAME_MACHINE=alphaev56 ;; PCA56) UNAME_MACHINE=alphapca56 ;; PCA57) UNAME_MACHINE=alphapca56 ;; EV6) UNAME_MACHINE=alphaev6 ;; EV67) UNAME_MACHINE=alphaev67 ;; EV68*) UNAME_MACHINE=alphaev68 ;; esac objdump --private-headers /bin/sh | grep -q ld.so.1 if test "$?" = 0 ; then LIBC="gnulibc1" ; fi echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; arc:Linux:*:* | arceb:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; arm*:Linux:*:*) eval $set_cc_for_build if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \ | grep -q __ARM_EABI__ then echo ${UNAME_MACHINE}-unknown-linux-${LIBC} else if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \ | grep -q __ARM_PCS_VFP then echo ${UNAME_MACHINE}-unknown-linux-${LIBC}eabi else echo ${UNAME_MACHINE}-unknown-linux-${LIBC}eabihf fi fi exit ;; avr32*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; cris:Linux:*:*) echo ${UNAME_MACHINE}-axis-linux-${LIBC} exit ;; crisv32:Linux:*:*) echo ${UNAME_MACHINE}-axis-linux-${LIBC} exit ;; frv:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; hexagon:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; i*86:Linux:*:*) echo ${UNAME_MACHINE}-pc-linux-${LIBC} exit ;; ia64:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; m32r*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; m68*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; mips:Linux:*:* | mips64:Linux:*:*) eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #undef CPU #undef ${UNAME_MACHINE} #undef ${UNAME_MACHINE}el #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL) CPU=${UNAME_MACHINE}el #else #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB) CPU=${UNAME_MACHINE} #else CPU= #endif #endif EOF eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^CPU'` test x"${CPU}" != x && { echo "${CPU}-unknown-linux-${LIBC}"; exit; } ;; or1k:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; or32:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; padre:Linux:*:*) echo sparc-unknown-linux-${LIBC} exit ;; parisc64:Linux:*:* | hppa64:Linux:*:*) echo hppa64-unknown-linux-${LIBC} exit ;; parisc:Linux:*:* | hppa:Linux:*:*) # Look for CPU level case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in PA7*) echo hppa1.1-unknown-linux-${LIBC} ;; PA8*) echo hppa2.0-unknown-linux-${LIBC} ;; *) echo hppa-unknown-linux-${LIBC} ;; esac exit ;; ppc64:Linux:*:*) echo powerpc64-unknown-linux-${LIBC} exit ;; ppc:Linux:*:*) echo powerpc-unknown-linux-${LIBC} exit ;; ppc64le:Linux:*:*) echo powerpc64le-unknown-linux-${LIBC} exit ;; ppcle:Linux:*:*) echo powerpcle-unknown-linux-${LIBC} exit ;; s390:Linux:*:* | s390x:Linux:*:*) echo ${UNAME_MACHINE}-ibm-linux-${LIBC} exit ;; sh64*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; sh*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; sparc:Linux:*:* | sparc64:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; tile*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; vax:Linux:*:*) echo ${UNAME_MACHINE}-dec-linux-${LIBC} exit ;; x86_64:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; xtensa*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; i*86:DYNIX/ptx:4*:*) # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there. # earlier versions are messed up and put the nodename in both # sysname and nodename. echo i386-sequent-sysv4 exit ;; i*86:UNIX_SV:4.2MP:2.*) # Unixware is an offshoot of SVR4, but it has its own version # number series starting with 2... # I am not positive that other SVR4 systems won't match this, # I just have to hope. -- rms. # Use sysv4.2uw... so that sysv4* matches it. echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION} exit ;; i*86:OS/2:*:*) # If we were able to find `uname', then EMX Unix compatibility # is probably installed. echo ${UNAME_MACHINE}-pc-os2-emx exit ;; i*86:XTS-300:*:STOP) echo ${UNAME_MACHINE}-unknown-stop exit ;; i*86:atheos:*:*) echo ${UNAME_MACHINE}-unknown-atheos exit ;; i*86:syllable:*:*) echo ${UNAME_MACHINE}-pc-syllable exit ;; i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*) echo i386-unknown-lynxos${UNAME_RELEASE} exit ;; i*86:*DOS:*:*) echo ${UNAME_MACHINE}-pc-msdosdjgpp exit ;; i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*) UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'` if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL} else echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL} fi exit ;; i*86:*:5:[678]*) # UnixWare 7.x, OpenUNIX and OpenServer 6. case `/bin/uname -X | grep "^Machine"` in *486*) UNAME_MACHINE=i486 ;; *Pentium) UNAME_MACHINE=i586 ;; *Pent*|*Celeron) UNAME_MACHINE=i686 ;; esac echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION} exit ;; i*86:*:3.2:*) if test -f /usr/options/cb.name; then UNAME_REL=`sed -n 's/.*Version //p' /dev/null >/dev/null ; then UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')` (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486 (/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \ && UNAME_MACHINE=i586 (/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \ && UNAME_MACHINE=i686 (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \ && UNAME_MACHINE=i686 echo ${UNAME_MACHINE}-pc-sco$UNAME_REL else echo ${UNAME_MACHINE}-pc-sysv32 fi exit ;; pc:*:*:*) # Left here for compatibility: # uname -m prints for DJGPP always 'pc', but it prints nothing about # the processor, so we play safe by assuming i586. # Note: whatever this is, it MUST be the same as what config.sub # prints for the "djgpp" host, or else GDB configury will decide that # this is a cross-build. echo i586-pc-msdosdjgpp exit ;; Intel:Mach:3*:*) echo i386-pc-mach3 exit ;; paragon:*:*:*) echo i860-intel-osf1 exit ;; i860:*:4.*:*) # i860-SVR4 if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4 else # Add other i860-SVR4 vendors below as they are discovered. echo i860-unknown-sysv${UNAME_RELEASE} # Unknown i860-SVR4 fi exit ;; mini*:CTIX:SYS*5:*) # "miniframe" echo m68010-convergent-sysv exit ;; mc68k:UNIX:SYSTEM5:3.51m) echo m68k-convergent-sysv exit ;; M680?0:D-NIX:5.3:*) echo m68k-diab-dnix exit ;; M68*:*:R3V[5678]*:*) test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;; 3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0) OS_REL='' test -r /etc/.relid \ && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ && { echo i486-ncr-sysv4.3${OS_REL}; exit; } /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*) /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ && { echo i486-ncr-sysv4; exit; } ;; NCR*:*:4.2:* | MPRAS*:*:4.2:*) OS_REL='.3' test -r /etc/.relid \ && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ && { echo i486-ncr-sysv4.3${OS_REL}; exit; } /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ && { echo i586-ncr-sysv4.3${OS_REL}; exit; } /bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \ && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*) echo m68k-unknown-lynxos${UNAME_RELEASE} exit ;; mc68030:UNIX_System_V:4.*:*) echo m68k-atari-sysv4 exit ;; TSUNAMI:LynxOS:2.*:*) echo sparc-unknown-lynxos${UNAME_RELEASE} exit ;; rs6000:LynxOS:2.*:*) echo rs6000-unknown-lynxos${UNAME_RELEASE} exit ;; PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*) echo powerpc-unknown-lynxos${UNAME_RELEASE} exit ;; SM[BE]S:UNIX_SV:*:*) echo mips-dde-sysv${UNAME_RELEASE} exit ;; RM*:ReliantUNIX-*:*:*) echo mips-sni-sysv4 exit ;; RM*:SINIX-*:*:*) echo mips-sni-sysv4 exit ;; *:SINIX-*:*:*) if uname -p 2>/dev/null >/dev/null ; then UNAME_MACHINE=`(uname -p) 2>/dev/null` echo ${UNAME_MACHINE}-sni-sysv4 else echo ns32k-sni-sysv fi exit ;; PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort # says echo i586-unisys-sysv4 exit ;; *:UNIX_System_V:4*:FTX*) # From Gerald Hewes . # How about differentiating between stratus architectures? -djm echo hppa1.1-stratus-sysv4 exit ;; *:*:*:FTX*) # From seanf@swdc.stratus.com. echo i860-stratus-sysv4 exit ;; i*86:VOS:*:*) # From Paul.Green@stratus.com. echo ${UNAME_MACHINE}-stratus-vos exit ;; *:VOS:*:*) # From Paul.Green@stratus.com. echo hppa1.1-stratus-vos exit ;; mc68*:A/UX:*:*) echo m68k-apple-aux${UNAME_RELEASE} exit ;; news*:NEWS-OS:6*:*) echo mips-sony-newsos6 exit ;; R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*) if [ -d /usr/nec ]; then echo mips-nec-sysv${UNAME_RELEASE} else echo mips-unknown-sysv${UNAME_RELEASE} fi exit ;; BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only. echo powerpc-be-beos exit ;; BeMac:BeOS:*:*) # BeOS running on Mac or Mac clone, PPC only. echo powerpc-apple-beos exit ;; BePC:BeOS:*:*) # BeOS running on Intel PC compatible. echo i586-pc-beos exit ;; BePC:Haiku:*:*) # Haiku running on Intel PC compatible. echo i586-pc-haiku exit ;; x86_64:Haiku:*:*) echo x86_64-unknown-haiku exit ;; SX-4:SUPER-UX:*:*) echo sx4-nec-superux${UNAME_RELEASE} exit ;; SX-5:SUPER-UX:*:*) echo sx5-nec-superux${UNAME_RELEASE} exit ;; SX-6:SUPER-UX:*:*) echo sx6-nec-superux${UNAME_RELEASE} exit ;; SX-7:SUPER-UX:*:*) echo sx7-nec-superux${UNAME_RELEASE} exit ;; SX-8:SUPER-UX:*:*) echo sx8-nec-superux${UNAME_RELEASE} exit ;; SX-8R:SUPER-UX:*:*) echo sx8r-nec-superux${UNAME_RELEASE} exit ;; Power*:Rhapsody:*:*) echo powerpc-apple-rhapsody${UNAME_RELEASE} exit ;; *:Rhapsody:*:*) echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE} exit ;; *:Darwin:*:*) UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown eval $set_cc_for_build if test "$UNAME_PROCESSOR" = unknown ; then UNAME_PROCESSOR=powerpc fi if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \ (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \ grep IS_64BIT_ARCH >/dev/null then case $UNAME_PROCESSOR in i386) UNAME_PROCESSOR=x86_64 ;; powerpc) UNAME_PROCESSOR=powerpc64 ;; esac fi fi echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE} exit ;; *:procnto*:*:* | *:QNX:[0123456789]*:*) UNAME_PROCESSOR=`uname -p` if test "$UNAME_PROCESSOR" = "x86"; then UNAME_PROCESSOR=i386 UNAME_MACHINE=pc fi echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE} exit ;; *:QNX:*:4*) echo i386-pc-qnx exit ;; NEO-?:NONSTOP_KERNEL:*:*) echo neo-tandem-nsk${UNAME_RELEASE} exit ;; NSE-*:NONSTOP_KERNEL:*:*) echo nse-tandem-nsk${UNAME_RELEASE} exit ;; NSR-?:NONSTOP_KERNEL:*:*) echo nsr-tandem-nsk${UNAME_RELEASE} exit ;; *:NonStop-UX:*:*) echo mips-compaq-nonstopux exit ;; BS2000:POSIX*:*:*) echo bs2000-siemens-sysv exit ;; DS/*:UNIX_System_V:*:*) echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE} exit ;; *:Plan9:*:*) # "uname -m" is not consistent, so use $cputype instead. 386 # is converted to i386 for consistency with other x86 # operating systems. if test "$cputype" = "386"; then UNAME_MACHINE=i386 else UNAME_MACHINE="$cputype" fi echo ${UNAME_MACHINE}-unknown-plan9 exit ;; *:TOPS-10:*:*) echo pdp10-unknown-tops10 exit ;; *:TENEX:*:*) echo pdp10-unknown-tenex exit ;; KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*) echo pdp10-dec-tops20 exit ;; XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*) echo pdp10-xkl-tops20 exit ;; *:TOPS-20:*:*) echo pdp10-unknown-tops20 exit ;; *:ITS:*:*) echo pdp10-unknown-its exit ;; SEI:*:*:SEIUX) echo mips-sei-seiux${UNAME_RELEASE} exit ;; *:DragonFly:*:*) echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` exit ;; *:*VMS:*:*) UNAME_MACHINE=`(uname -p) 2>/dev/null` case "${UNAME_MACHINE}" in A*) echo alpha-dec-vms ; exit ;; I*) echo ia64-dec-vms ; exit ;; V*) echo vax-dec-vms ; exit ;; esac ;; *:XENIX:*:SysV) echo i386-pc-xenix exit ;; i*86:skyos:*:*) echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE}` | sed -e 's/ .*$//' exit ;; i*86:rdos:*:*) echo ${UNAME_MACHINE}-pc-rdos exit ;; i*86:AROS:*:*) echo ${UNAME_MACHINE}-pc-aros exit ;; x86_64:VMkernel:*:*) echo ${UNAME_MACHINE}-unknown-esx exit ;; esac eval $set_cc_for_build cat >$dummy.c < # include #endif main () { #if defined (sony) #if defined (MIPSEB) /* BFD wants "bsd" instead of "newsos". Perhaps BFD should be changed, I don't know.... */ printf ("mips-sony-bsd\n"); exit (0); #else #include printf ("m68k-sony-newsos%s\n", #ifdef NEWSOS4 "4" #else "" #endif ); exit (0); #endif #endif #if defined (__arm) && defined (__acorn) && defined (__unix) printf ("arm-acorn-riscix\n"); exit (0); #endif #if defined (hp300) && !defined (hpux) printf ("m68k-hp-bsd\n"); exit (0); #endif #if defined (NeXT) #if !defined (__ARCHITECTURE__) #define __ARCHITECTURE__ "m68k" #endif int version; version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`; if (version < 4) printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version); else printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version); exit (0); #endif #if defined (MULTIMAX) || defined (n16) #if defined (UMAXV) printf ("ns32k-encore-sysv\n"); exit (0); #else #if defined (CMU) printf ("ns32k-encore-mach\n"); exit (0); #else printf ("ns32k-encore-bsd\n"); exit (0); #endif #endif #endif #if defined (__386BSD__) printf ("i386-pc-bsd\n"); exit (0); #endif #if defined (sequent) #if defined (i386) printf ("i386-sequent-dynix\n"); exit (0); #endif #if defined (ns32000) printf ("ns32k-sequent-dynix\n"); exit (0); #endif #endif #if defined (_SEQUENT_) struct utsname un; uname(&un); if (strncmp(un.version, "V2", 2) == 0) { printf ("i386-sequent-ptx2\n"); exit (0); } if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */ printf ("i386-sequent-ptx1\n"); exit (0); } printf ("i386-sequent-ptx\n"); exit (0); #endif #if defined (vax) # if !defined (ultrix) # include # if defined (BSD) # if BSD == 43 printf ("vax-dec-bsd4.3\n"); exit (0); # else # if BSD == 199006 printf ("vax-dec-bsd4.3reno\n"); exit (0); # else printf ("vax-dec-bsd\n"); exit (0); # endif # endif # else printf ("vax-dec-bsd\n"); exit (0); # endif # else printf ("vax-dec-ultrix\n"); exit (0); # endif #endif #if defined (alliant) && defined (i860) printf ("i860-alliant-bsd\n"); exit (0); #endif exit (1); } EOF $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null && SYSTEM_NAME=`$dummy` && { echo "$SYSTEM_NAME"; exit; } # Apollos put the system type in the environment. test -d /usr/apollo && { echo ${ISP}-apollo-${SYSTYPE}; exit; } # Convex versions that predate uname can use getsysinfo(1) if [ -x /usr/convex/getsysinfo ] then case `getsysinfo -f cpu_type` in c1*) echo c1-convex-bsd exit ;; c2*) if getsysinfo -f scalar_acc then echo c32-convex-bsd else echo c2-convex-bsd fi exit ;; c34*) echo c34-convex-bsd exit ;; c38*) echo c38-convex-bsd exit ;; c4*) echo c4-convex-bsd exit ;; esac fi cat >&2 < in order to provide the needed information to handle your system. config.guess timestamp = $timestamp uname -m = `(uname -m) 2>/dev/null || echo unknown` uname -r = `(uname -r) 2>/dev/null || echo unknown` uname -s = `(uname -s) 2>/dev/null || echo unknown` uname -v = `(uname -v) 2>/dev/null || echo unknown` /usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null` /bin/uname -X = `(/bin/uname -X) 2>/dev/null` hostinfo = `(hostinfo) 2>/dev/null` /bin/universe = `(/bin/universe) 2>/dev/null` /usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null` /bin/arch = `(/bin/arch) 2>/dev/null` /usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null` /usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null` UNAME_MACHINE = ${UNAME_MACHINE} UNAME_RELEASE = ${UNAME_RELEASE} UNAME_SYSTEM = ${UNAME_SYSTEM} UNAME_VERSION = ${UNAME_VERSION} EOF exit 1 # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "timestamp='" # time-stamp-format: "%:y-%02m-%02d" # time-stamp-end: "'" # End: nordugrid-arc-6.14.0/PaxHeaders.30264/ltmain.sh0000644000000000000000000000013214152153407017157 xustar000000000000000030 mtime=1638455047.993790041 30 atime=1638455095.533504347 30 ctime=1638455095.735507382 nordugrid-arc-6.14.0/ltmain.sh0000644000175000002070000105152214152153407017152 0ustar00mockbuildmock00000000000000 # libtool (GNU libtool) 2.4.2 # Written by Gordon Matzigkeit , 1996 # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, # 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc. # This is free software; see the source for copying conditions. There is NO # warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # GNU Libtool is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # As a special exception to the GNU General Public License, # if you distribute this file as part of a program or library that # is built using GNU Libtool, you may include this file under the # same distribution terms that you use for the rest of that program. # # GNU Libtool is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Libtool; see the file COPYING. If not, a copy # can be downloaded from http://www.gnu.org/licenses/gpl.html, # or obtained by writing to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # Usage: $progname [OPTION]... [MODE-ARG]... # # Provide generalized library-building support services. # # --config show all configuration variables # --debug enable verbose shell tracing # -n, --dry-run display commands without modifying any files # --features display basic configuration information and exit # --mode=MODE use operation mode MODE # --preserve-dup-deps don't remove duplicate dependency libraries # --quiet, --silent don't print informational messages # --no-quiet, --no-silent # print informational messages (default) # --no-warn don't display warning messages # --tag=TAG use configuration variables from tag TAG # -v, --verbose print more informational messages than default # --no-verbose don't print the extra informational messages # --version print version information # -h, --help, --help-all print short, long, or detailed help message # # MODE must be one of the following: # # clean remove files from the build directory # compile compile a source file into a libtool object # execute automatically set library path, then run a program # finish complete the installation of libtool libraries # install install libraries or executables # link create a library or an executable # uninstall remove libraries from an installed directory # # MODE-ARGS vary depending on the MODE. When passed as first option, # `--mode=MODE' may be abbreviated as `MODE' or a unique abbreviation of that. # Try `$progname --help --mode=MODE' for a more detailed description of MODE. # # When reporting a bug, please describe a test case to reproduce it and # include the following information: # # host-triplet: $host # shell: $SHELL # compiler: $LTCC # compiler flags: $LTCFLAGS # linker: $LD (gnu? $with_gnu_ld) # $progname: (GNU libtool) 2.4.2 # automake: $automake_version # autoconf: $autoconf_version # # Report bugs to . # GNU libtool home page: . # General help using GNU software: . PROGRAM=libtool PACKAGE=libtool VERSION=2.4.2 TIMESTAMP="" package_revision=1.3337 # Be Bourne compatible if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then emulate sh NULLCMD=: # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else case `(set -o) 2>/dev/null` in *posix*) set -o posix;; esac fi BIN_SH=xpg4; export BIN_SH # for Tru64 DUALCASE=1; export DUALCASE # for MKS sh # A function that is used when there is no print builtin or printf. func_fallback_echo () { eval 'cat <<_LTECHO_EOF $1 _LTECHO_EOF' } # NLS nuisances: We save the old values to restore during execute mode. lt_user_locale= lt_safe_locale= for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES do eval "if test \"\${$lt_var+set}\" = set; then save_$lt_var=\$$lt_var $lt_var=C export $lt_var lt_user_locale=\"$lt_var=\\\$save_\$lt_var; \$lt_user_locale\" lt_safe_locale=\"$lt_var=C; \$lt_safe_locale\" fi" done LC_ALL=C LANGUAGE=C export LANGUAGE LC_ALL $lt_unset CDPATH # Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh # is ksh but when the shell is invoked as "sh" and the current value of # the _XPG environment variable is not equal to 1 (one), the special # positional parameter $0, within a function call, is the name of the # function. progpath="$0" : ${CP="cp -f"} test "${ECHO+set}" = set || ECHO=${as_echo-'printf %s\n'} : ${MAKE="make"} : ${MKDIR="mkdir"} : ${MV="mv -f"} : ${RM="rm -f"} : ${SHELL="${CONFIG_SHELL-/bin/sh}"} : ${Xsed="$SED -e 1s/^X//"} # Global variables: EXIT_SUCCESS=0 EXIT_FAILURE=1 EXIT_MISMATCH=63 # $? = 63 is used to indicate version mismatch to missing. EXIT_SKIP=77 # $? = 77 is used to indicate a skipped test to automake. exit_status=$EXIT_SUCCESS # Make sure IFS has a sensible default lt_nl=' ' IFS=" $lt_nl" dirname="s,/[^/]*$,," basename="s,^.*/,," # func_dirname file append nondir_replacement # Compute the dirname of FILE. If nonempty, add APPEND to the result, # otherwise set result to NONDIR_REPLACEMENT. func_dirname () { func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` if test "X$func_dirname_result" = "X${1}"; then func_dirname_result="${3}" else func_dirname_result="$func_dirname_result${2}" fi } # func_dirname may be replaced by extended shell implementation # func_basename file func_basename () { func_basename_result=`$ECHO "${1}" | $SED "$basename"` } # func_basename may be replaced by extended shell implementation # func_dirname_and_basename file append nondir_replacement # perform func_basename and func_dirname in a single function # call: # dirname: Compute the dirname of FILE. If nonempty, # add APPEND to the result, otherwise set result # to NONDIR_REPLACEMENT. # value returned in "$func_dirname_result" # basename: Compute filename of FILE. # value retuned in "$func_basename_result" # Implementation must be kept synchronized with func_dirname # and func_basename. For efficiency, we do not delegate to # those functions but instead duplicate the functionality here. func_dirname_and_basename () { # Extract subdirectory from the argument. func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"` if test "X$func_dirname_result" = "X${1}"; then func_dirname_result="${3}" else func_dirname_result="$func_dirname_result${2}" fi func_basename_result=`$ECHO "${1}" | $SED -e "$basename"` } # func_dirname_and_basename may be replaced by extended shell implementation # func_stripname prefix suffix name # strip PREFIX and SUFFIX off of NAME. # PREFIX and SUFFIX must not contain globbing or regex special # characters, hashes, percent signs, but SUFFIX may contain a leading # dot (in which case that matches only a dot). # func_strip_suffix prefix name func_stripname () { case ${2} in .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; esac } # func_stripname may be replaced by extended shell implementation # These SED scripts presuppose an absolute path with a trailing slash. pathcar='s,^/\([^/]*\).*$,\1,' pathcdr='s,^/[^/]*,,' removedotparts=':dotsl s@/\./@/@g t dotsl s,/\.$,/,' collapseslashes='s@/\{1,\}@/@g' finalslash='s,/*$,/,' # func_normal_abspath PATH # Remove doubled-up and trailing slashes, "." path components, # and cancel out any ".." path components in PATH after making # it an absolute path. # value returned in "$func_normal_abspath_result" func_normal_abspath () { # Start from root dir and reassemble the path. func_normal_abspath_result= func_normal_abspath_tpath=$1 func_normal_abspath_altnamespace= case $func_normal_abspath_tpath in "") # Empty path, that just means $cwd. func_stripname '' '/' "`pwd`" func_normal_abspath_result=$func_stripname_result return ;; # The next three entries are used to spot a run of precisely # two leading slashes without using negated character classes; # we take advantage of case's first-match behaviour. ///*) # Unusual form of absolute path, do nothing. ;; //*) # Not necessarily an ordinary path; POSIX reserves leading '//' # and for example Cygwin uses it to access remote file shares # over CIFS/SMB, so we conserve a leading double slash if found. func_normal_abspath_altnamespace=/ ;; /*) # Absolute path, do nothing. ;; *) # Relative path, prepend $cwd. func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath ;; esac # Cancel out all the simple stuff to save iterations. We also want # the path to end with a slash for ease of parsing, so make sure # there is one (and only one) here. func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \ -e "$removedotparts" -e "$collapseslashes" -e "$finalslash"` while :; do # Processed it all yet? if test "$func_normal_abspath_tpath" = / ; then # If we ascended to the root using ".." the result may be empty now. if test -z "$func_normal_abspath_result" ; then func_normal_abspath_result=/ fi break fi func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \ -e "$pathcar"` func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \ -e "$pathcdr"` # Figure out what to do with it case $func_normal_abspath_tcomponent in "") # Trailing empty path component, ignore it. ;; ..) # Parent dir; strip last assembled component from result. func_dirname "$func_normal_abspath_result" func_normal_abspath_result=$func_dirname_result ;; *) # Actual path component, append it. func_normal_abspath_result=$func_normal_abspath_result/$func_normal_abspath_tcomponent ;; esac done # Restore leading double-slash if one was found on entry. func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result } # func_relative_path SRCDIR DSTDIR # generates a relative path from SRCDIR to DSTDIR, with a trailing # slash if non-empty, suitable for immediately appending a filename # without needing to append a separator. # value returned in "$func_relative_path_result" func_relative_path () { func_relative_path_result= func_normal_abspath "$1" func_relative_path_tlibdir=$func_normal_abspath_result func_normal_abspath "$2" func_relative_path_tbindir=$func_normal_abspath_result # Ascend the tree starting from libdir while :; do # check if we have found a prefix of bindir case $func_relative_path_tbindir in $func_relative_path_tlibdir) # found an exact match func_relative_path_tcancelled= break ;; $func_relative_path_tlibdir*) # found a matching prefix func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir" func_relative_path_tcancelled=$func_stripname_result if test -z "$func_relative_path_result"; then func_relative_path_result=. fi break ;; *) func_dirname $func_relative_path_tlibdir func_relative_path_tlibdir=${func_dirname_result} if test "x$func_relative_path_tlibdir" = x ; then # Have to descend all the way to the root! func_relative_path_result=../$func_relative_path_result func_relative_path_tcancelled=$func_relative_path_tbindir break fi func_relative_path_result=../$func_relative_path_result ;; esac done # Now calculate path; take care to avoid doubling-up slashes. func_stripname '' '/' "$func_relative_path_result" func_relative_path_result=$func_stripname_result func_stripname '/' '/' "$func_relative_path_tcancelled" if test "x$func_stripname_result" != x ; then func_relative_path_result=${func_relative_path_result}/${func_stripname_result} fi # Normalisation. If bindir is libdir, return empty string, # else relative path ending with a slash; either way, target # file name can be directly appended. if test ! -z "$func_relative_path_result"; then func_stripname './' '' "$func_relative_path_result/" func_relative_path_result=$func_stripname_result fi } # The name of this program: func_dirname_and_basename "$progpath" progname=$func_basename_result # Make sure we have an absolute path for reexecution: case $progpath in [\\/]*|[A-Za-z]:\\*) ;; *[\\/]*) progdir=$func_dirname_result progdir=`cd "$progdir" && pwd` progpath="$progdir/$progname" ;; *) save_IFS="$IFS" IFS=${PATH_SEPARATOR-:} for progdir in $PATH; do IFS="$save_IFS" test -x "$progdir/$progname" && break done IFS="$save_IFS" test -n "$progdir" || progdir=`pwd` progpath="$progdir/$progname" ;; esac # Sed substitution that helps us do robust quoting. It backslashifies # metacharacters that are still active within double-quoted strings. Xsed="${SED}"' -e 1s/^X//' sed_quote_subst='s/\([`"$\\]\)/\\\1/g' # Same as above, but do not quote variable references. double_quote_subst='s/\(["`\\]\)/\\\1/g' # Sed substitution that turns a string into a regex matching for the # string literally. sed_make_literal_regex='s,[].[^$\\*\/],\\&,g' # Sed substitution that converts a w32 file name or path # which contains forward slashes, into one that contains # (escaped) backslashes. A very naive implementation. lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g' # Re-`\' parameter expansions in output of double_quote_subst that were # `\'-ed in input to the same. If an odd number of `\' preceded a '$' # in input to double_quote_subst, that '$' was protected from expansion. # Since each input `\' is now two `\'s, look for any number of runs of # four `\'s followed by two `\'s and then a '$'. `\' that '$'. bs='\\' bs2='\\\\' bs4='\\\\\\\\' dollar='\$' sed_double_backslash="\ s/$bs4/&\\ /g s/^$bs2$dollar/$bs&/ s/\\([^$bs]\\)$bs2$dollar/\\1$bs2$bs$dollar/g s/\n//g" # Standard options: opt_dry_run=false opt_help=false opt_quiet=false opt_verbose=false opt_warning=: # func_echo arg... # Echo program name prefixed message, along with the current mode # name if it has been set yet. func_echo () { $ECHO "$progname: ${opt_mode+$opt_mode: }$*" } # func_verbose arg... # Echo program name prefixed message in verbose mode only. func_verbose () { $opt_verbose && func_echo ${1+"$@"} # A bug in bash halts the script if the last line of a function # fails when set -e is in force, so we need another command to # work around that: : } # func_echo_all arg... # Invoke $ECHO with all args, space-separated. func_echo_all () { $ECHO "$*" } # func_error arg... # Echo program name prefixed message to standard error. func_error () { $ECHO "$progname: ${opt_mode+$opt_mode: }"${1+"$@"} 1>&2 } # func_warning arg... # Echo program name prefixed warning message to standard error. func_warning () { $opt_warning && $ECHO "$progname: ${opt_mode+$opt_mode: }warning: "${1+"$@"} 1>&2 # bash bug again: : } # func_fatal_error arg... # Echo program name prefixed message to standard error, and exit. func_fatal_error () { func_error ${1+"$@"} exit $EXIT_FAILURE } # func_fatal_help arg... # Echo program name prefixed message to standard error, followed by # a help hint, and exit. func_fatal_help () { func_error ${1+"$@"} func_fatal_error "$help" } help="Try \`$progname --help' for more information." ## default # func_grep expression filename # Check whether EXPRESSION matches any line of FILENAME, without output. func_grep () { $GREP "$1" "$2" >/dev/null 2>&1 } # func_mkdir_p directory-path # Make sure the entire path to DIRECTORY-PATH is available. func_mkdir_p () { my_directory_path="$1" my_dir_list= if test -n "$my_directory_path" && test "$opt_dry_run" != ":"; then # Protect directory names starting with `-' case $my_directory_path in -*) my_directory_path="./$my_directory_path" ;; esac # While some portion of DIR does not yet exist... while test ! -d "$my_directory_path"; do # ...make a list in topmost first order. Use a colon delimited # list incase some portion of path contains whitespace. my_dir_list="$my_directory_path:$my_dir_list" # If the last portion added has no slash in it, the list is done case $my_directory_path in */*) ;; *) break ;; esac # ...otherwise throw away the child directory and loop my_directory_path=`$ECHO "$my_directory_path" | $SED -e "$dirname"` done my_dir_list=`$ECHO "$my_dir_list" | $SED 's,:*$,,'` save_mkdir_p_IFS="$IFS"; IFS=':' for my_dir in $my_dir_list; do IFS="$save_mkdir_p_IFS" # mkdir can fail with a `File exist' error if two processes # try to create one of the directories concurrently. Don't # stop in that case! $MKDIR "$my_dir" 2>/dev/null || : done IFS="$save_mkdir_p_IFS" # Bail out if we (or some other process) failed to create a directory. test -d "$my_directory_path" || \ func_fatal_error "Failed to create \`$1'" fi } # func_mktempdir [string] # Make a temporary directory that won't clash with other running # libtool processes, and avoids race conditions if possible. If # given, STRING is the basename for that directory. func_mktempdir () { my_template="${TMPDIR-/tmp}/${1-$progname}" if test "$opt_dry_run" = ":"; then # Return a directory name, but don't create it in dry-run mode my_tmpdir="${my_template}-$$" else # If mktemp works, use that first and foremost my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null` if test ! -d "$my_tmpdir"; then # Failing that, at least try and use $RANDOM to avoid a race my_tmpdir="${my_template}-${RANDOM-0}$$" save_mktempdir_umask=`umask` umask 0077 $MKDIR "$my_tmpdir" umask $save_mktempdir_umask fi # If we're not in dry-run mode, bomb out on failure test -d "$my_tmpdir" || \ func_fatal_error "cannot create temporary directory \`$my_tmpdir'" fi $ECHO "$my_tmpdir" } # func_quote_for_eval arg # Aesthetically quote ARG to be evaled later. # This function returns two values: FUNC_QUOTE_FOR_EVAL_RESULT # is double-quoted, suitable for a subsequent eval, whereas # FUNC_QUOTE_FOR_EVAL_UNQUOTED_RESULT has merely all characters # which are still active within double quotes backslashified. func_quote_for_eval () { case $1 in *[\\\`\"\$]*) func_quote_for_eval_unquoted_result=`$ECHO "$1" | $SED "$sed_quote_subst"` ;; *) func_quote_for_eval_unquoted_result="$1" ;; esac case $func_quote_for_eval_unquoted_result in # Double-quote args containing shell metacharacters to delay # word splitting, command substitution and and variable # expansion for a subsequent eval. # Many Bourne shells cannot handle close brackets correctly # in scan sets, so we specify it separately. *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") func_quote_for_eval_result="\"$func_quote_for_eval_unquoted_result\"" ;; *) func_quote_for_eval_result="$func_quote_for_eval_unquoted_result" esac } # func_quote_for_expand arg # Aesthetically quote ARG to be evaled later; same as above, # but do not quote variable references. func_quote_for_expand () { case $1 in *[\\\`\"]*) my_arg=`$ECHO "$1" | $SED \ -e "$double_quote_subst" -e "$sed_double_backslash"` ;; *) my_arg="$1" ;; esac case $my_arg in # Double-quote args containing shell metacharacters to delay # word splitting and command substitution for a subsequent eval. # Many Bourne shells cannot handle close brackets correctly # in scan sets, so we specify it separately. *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") my_arg="\"$my_arg\"" ;; esac func_quote_for_expand_result="$my_arg" } # func_show_eval cmd [fail_exp] # Unless opt_silent is true, then output CMD. Then, if opt_dryrun is # not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP # is given, then evaluate it. func_show_eval () { my_cmd="$1" my_fail_exp="${2-:}" ${opt_silent-false} || { func_quote_for_expand "$my_cmd" eval "func_echo $func_quote_for_expand_result" } if ${opt_dry_run-false}; then :; else eval "$my_cmd" my_status=$? if test "$my_status" -eq 0; then :; else eval "(exit $my_status); $my_fail_exp" fi fi } # func_show_eval_locale cmd [fail_exp] # Unless opt_silent is true, then output CMD. Then, if opt_dryrun is # not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP # is given, then evaluate it. Use the saved locale for evaluation. func_show_eval_locale () { my_cmd="$1" my_fail_exp="${2-:}" ${opt_silent-false} || { func_quote_for_expand "$my_cmd" eval "func_echo $func_quote_for_expand_result" } if ${opt_dry_run-false}; then :; else eval "$lt_user_locale $my_cmd" my_status=$? eval "$lt_safe_locale" if test "$my_status" -eq 0; then :; else eval "(exit $my_status); $my_fail_exp" fi fi } # func_tr_sh # Turn $1 into a string suitable for a shell variable name. # Result is stored in $func_tr_sh_result. All characters # not in the set a-zA-Z0-9_ are replaced with '_'. Further, # if $1 begins with a digit, a '_' is prepended as well. func_tr_sh () { case $1 in [0-9]* | *[!a-zA-Z0-9_]*) func_tr_sh_result=`$ECHO "$1" | $SED 's/^\([0-9]\)/_\1/; s/[^a-zA-Z0-9_]/_/g'` ;; * ) func_tr_sh_result=$1 ;; esac } # func_version # Echo version message to standard output and exit. func_version () { $opt_debug $SED -n '/(C)/!b go :more /\./!{ N s/\n# / / b more } :go /^# '$PROGRAM' (GNU /,/# warranty; / { s/^# // s/^# *$// s/\((C)\)[ 0-9,-]*\( [1-9][0-9]*\)/\1\2/ p }' < "$progpath" exit $? } # func_usage # Echo short help message to standard output and exit. func_usage () { $opt_debug $SED -n '/^# Usage:/,/^# *.*--help/ { s/^# // s/^# *$// s/\$progname/'$progname'/ p }' < "$progpath" echo $ECHO "run \`$progname --help | more' for full usage" exit $? } # func_help [NOEXIT] # Echo long help message to standard output and exit, # unless 'noexit' is passed as argument. func_help () { $opt_debug $SED -n '/^# Usage:/,/# Report bugs to/ { :print s/^# // s/^# *$// s*\$progname*'$progname'* s*\$host*'"$host"'* s*\$SHELL*'"$SHELL"'* s*\$LTCC*'"$LTCC"'* s*\$LTCFLAGS*'"$LTCFLAGS"'* s*\$LD*'"$LD"'* s/\$with_gnu_ld/'"$with_gnu_ld"'/ s/\$automake_version/'"`(${AUTOMAKE-automake} --version) 2>/dev/null |$SED 1q`"'/ s/\$autoconf_version/'"`(${AUTOCONF-autoconf} --version) 2>/dev/null |$SED 1q`"'/ p d } /^# .* home page:/b print /^# General help using/b print ' < "$progpath" ret=$? if test -z "$1"; then exit $ret fi } # func_missing_arg argname # Echo program name prefixed message to standard error and set global # exit_cmd. func_missing_arg () { $opt_debug func_error "missing argument for $1." exit_cmd=exit } # func_split_short_opt shortopt # Set func_split_short_opt_name and func_split_short_opt_arg shell # variables after splitting SHORTOPT after the 2nd character. func_split_short_opt () { my_sed_short_opt='1s/^\(..\).*$/\1/;q' my_sed_short_rest='1s/^..\(.*\)$/\1/;q' func_split_short_opt_name=`$ECHO "$1" | $SED "$my_sed_short_opt"` func_split_short_opt_arg=`$ECHO "$1" | $SED "$my_sed_short_rest"` } # func_split_short_opt may be replaced by extended shell implementation # func_split_long_opt longopt # Set func_split_long_opt_name and func_split_long_opt_arg shell # variables after splitting LONGOPT at the `=' sign. func_split_long_opt () { my_sed_long_opt='1s/^\(--[^=]*\)=.*/\1/;q' my_sed_long_arg='1s/^--[^=]*=//' func_split_long_opt_name=`$ECHO "$1" | $SED "$my_sed_long_opt"` func_split_long_opt_arg=`$ECHO "$1" | $SED "$my_sed_long_arg"` } # func_split_long_opt may be replaced by extended shell implementation exit_cmd=: magic="%%%MAGIC variable%%%" magic_exe="%%%MAGIC EXE variable%%%" # Global variables. nonopt= preserve_args= lo2o="s/\\.lo\$/.${objext}/" o2lo="s/\\.${objext}\$/.lo/" extracted_archives= extracted_serial=0 # If this variable is set in any of the actions, the command in it # will be execed at the end. This prevents here-documents from being # left over by shells. exec_cmd= # func_append var value # Append VALUE to the end of shell variable VAR. func_append () { eval "${1}=\$${1}\${2}" } # func_append may be replaced by extended shell implementation # func_append_quoted var value # Quote VALUE and append to the end of shell variable VAR, separated # by a space. func_append_quoted () { func_quote_for_eval "${2}" eval "${1}=\$${1}\\ \$func_quote_for_eval_result" } # func_append_quoted may be replaced by extended shell implementation # func_arith arithmetic-term... func_arith () { func_arith_result=`expr "${@}"` } # func_arith may be replaced by extended shell implementation # func_len string # STRING may not start with a hyphen. func_len () { func_len_result=`expr "${1}" : ".*" 2>/dev/null || echo $max_cmd_len` } # func_len may be replaced by extended shell implementation # func_lo2o object func_lo2o () { func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` } # func_lo2o may be replaced by extended shell implementation # func_xform libobj-or-source func_xform () { func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` } # func_xform may be replaced by extended shell implementation # func_fatal_configuration arg... # Echo program name prefixed message to standard error, followed by # a configuration failure hint, and exit. func_fatal_configuration () { func_error ${1+"$@"} func_error "See the $PACKAGE documentation for more information." func_fatal_error "Fatal configuration error." } # func_config # Display the configuration for all the tags in this script. func_config () { re_begincf='^# ### BEGIN LIBTOOL' re_endcf='^# ### END LIBTOOL' # Default configuration. $SED "1,/$re_begincf CONFIG/d;/$re_endcf CONFIG/,\$d" < "$progpath" # Now print the configurations for the tags. for tagname in $taglist; do $SED -n "/$re_begincf TAG CONFIG: $tagname\$/,/$re_endcf TAG CONFIG: $tagname\$/p" < "$progpath" done exit $? } # func_features # Display the features supported by this script. func_features () { echo "host: $host" if test "$build_libtool_libs" = yes; then echo "enable shared libraries" else echo "disable shared libraries" fi if test "$build_old_libs" = yes; then echo "enable static libraries" else echo "disable static libraries" fi exit $? } # func_enable_tag tagname # Verify that TAGNAME is valid, and either flag an error and exit, or # enable the TAGNAME tag. We also add TAGNAME to the global $taglist # variable here. func_enable_tag () { # Global variable: tagname="$1" re_begincf="^# ### BEGIN LIBTOOL TAG CONFIG: $tagname\$" re_endcf="^# ### END LIBTOOL TAG CONFIG: $tagname\$" sed_extractcf="/$re_begincf/,/$re_endcf/p" # Validate tagname. case $tagname in *[!-_A-Za-z0-9,/]*) func_fatal_error "invalid tag name: $tagname" ;; esac # Don't test for the "default" C tag, as we know it's # there but not specially marked. case $tagname in CC) ;; *) if $GREP "$re_begincf" "$progpath" >/dev/null 2>&1; then taglist="$taglist $tagname" # Evaluate the configuration. Be careful to quote the path # and the sed script, to avoid splitting on whitespace, but # also don't use non-portable quotes within backquotes within # quotes we have to do it in 2 steps: extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"` eval "$extractedcf" else func_error "ignoring unknown tag $tagname" fi ;; esac } # func_check_version_match # Ensure that we are using m4 macros, and libtool script from the same # release of libtool. func_check_version_match () { if test "$package_revision" != "$macro_revision"; then if test "$VERSION" != "$macro_version"; then if test -z "$macro_version"; then cat >&2 <<_LT_EOF $progname: Version mismatch error. This is $PACKAGE $VERSION, but the $progname: definition of this LT_INIT comes from an older release. $progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION $progname: and run autoconf again. _LT_EOF else cat >&2 <<_LT_EOF $progname: Version mismatch error. This is $PACKAGE $VERSION, but the $progname: definition of this LT_INIT comes from $PACKAGE $macro_version. $progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION $progname: and run autoconf again. _LT_EOF fi else cat >&2 <<_LT_EOF $progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision, $progname: but the definition of this LT_INIT comes from revision $macro_revision. $progname: You should recreate aclocal.m4 with macros from revision $package_revision $progname: of $PACKAGE $VERSION and run autoconf again. _LT_EOF fi exit $EXIT_MISMATCH fi } # Shorthand for --mode=foo, only valid as the first argument case $1 in clean|clea|cle|cl) shift; set dummy --mode clean ${1+"$@"}; shift ;; compile|compil|compi|comp|com|co|c) shift; set dummy --mode compile ${1+"$@"}; shift ;; execute|execut|execu|exec|exe|ex|e) shift; set dummy --mode execute ${1+"$@"}; shift ;; finish|finis|fini|fin|fi|f) shift; set dummy --mode finish ${1+"$@"}; shift ;; install|instal|insta|inst|ins|in|i) shift; set dummy --mode install ${1+"$@"}; shift ;; link|lin|li|l) shift; set dummy --mode link ${1+"$@"}; shift ;; uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u) shift; set dummy --mode uninstall ${1+"$@"}; shift ;; esac # Option defaults: opt_debug=: opt_dry_run=false opt_config=false opt_preserve_dup_deps=false opt_features=false opt_finish=false opt_help=false opt_help_all=false opt_silent=: opt_warning=: opt_verbose=: opt_silent=false opt_verbose=false # Parse options once, thoroughly. This comes as soon as possible in the # script to make things like `--version' happen as quickly as we can. { # this just eases exit handling while test $# -gt 0; do opt="$1" shift case $opt in --debug|-x) opt_debug='set -x' func_echo "enabling shell trace mode" $opt_debug ;; --dry-run|--dryrun|-n) opt_dry_run=: ;; --config) opt_config=: func_config ;; --dlopen|-dlopen) optarg="$1" opt_dlopen="${opt_dlopen+$opt_dlopen }$optarg" shift ;; --preserve-dup-deps) opt_preserve_dup_deps=: ;; --features) opt_features=: func_features ;; --finish) opt_finish=: set dummy --mode finish ${1+"$@"}; shift ;; --help) opt_help=: ;; --help-all) opt_help_all=: opt_help=': help-all' ;; --mode) test $# = 0 && func_missing_arg $opt && break optarg="$1" opt_mode="$optarg" case $optarg in # Valid mode arguments: clean|compile|execute|finish|install|link|relink|uninstall) ;; # Catch anything else as an error *) func_error "invalid argument for $opt" exit_cmd=exit break ;; esac shift ;; --no-silent|--no-quiet) opt_silent=false func_append preserve_args " $opt" ;; --no-warning|--no-warn) opt_warning=false func_append preserve_args " $opt" ;; --no-verbose) opt_verbose=false func_append preserve_args " $opt" ;; --silent|--quiet) opt_silent=: func_append preserve_args " $opt" opt_verbose=false ;; --verbose|-v) opt_verbose=: func_append preserve_args " $opt" opt_silent=false ;; --tag) test $# = 0 && func_missing_arg $opt && break optarg="$1" opt_tag="$optarg" func_append preserve_args " $opt $optarg" func_enable_tag "$optarg" shift ;; -\?|-h) func_usage ;; --help) func_help ;; --version) func_version ;; # Separate optargs to long options: --*=*) func_split_long_opt "$opt" set dummy "$func_split_long_opt_name" "$func_split_long_opt_arg" ${1+"$@"} shift ;; # Separate non-argument short options: -\?*|-h*|-n*|-v*) func_split_short_opt "$opt" set dummy "$func_split_short_opt_name" "-$func_split_short_opt_arg" ${1+"$@"} shift ;; --) break ;; -*) func_fatal_help "unrecognized option \`$opt'" ;; *) set dummy "$opt" ${1+"$@"}; shift; break ;; esac done # Validate options: # save first non-option argument if test "$#" -gt 0; then nonopt="$opt" shift fi # preserve --debug test "$opt_debug" = : || func_append preserve_args " --debug" case $host in *cygwin* | *mingw* | *pw32* | *cegcc*) # don't eliminate duplications in $postdeps and $predeps opt_duplicate_compiler_generated_deps=: ;; *) opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps ;; esac $opt_help || { # Sanity checks first: func_check_version_match if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then func_fatal_configuration "not configured to build any kind of library" fi # Darwin sucks eval std_shrext=\"$shrext_cmds\" # Only execute mode is allowed to have -dlopen flags. if test -n "$opt_dlopen" && test "$opt_mode" != execute; then func_error "unrecognized option \`-dlopen'" $ECHO "$help" 1>&2 exit $EXIT_FAILURE fi # Change the help message to a mode-specific one. generic_help="$help" help="Try \`$progname --help --mode=$opt_mode' for more information." } # Bail if the options were screwed $exit_cmd $EXIT_FAILURE } ## ----------- ## ## Main. ## ## ----------- ## # func_lalib_p file # True iff FILE is a libtool `.la' library or `.lo' object file. # This function is only a basic sanity check; it will hardly flush out # determined imposters. func_lalib_p () { test -f "$1" && $SED -e 4q "$1" 2>/dev/null \ | $GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1 } # func_lalib_unsafe_p file # True iff FILE is a libtool `.la' library or `.lo' object file. # This function implements the same check as func_lalib_p without # resorting to external programs. To this end, it redirects stdin and # closes it afterwards, without saving the original file descriptor. # As a safety measure, use it only where a negative result would be # fatal anyway. Works if `file' does not exist. func_lalib_unsafe_p () { lalib_p=no if test -f "$1" && test -r "$1" && exec 5<&0 <"$1"; then for lalib_p_l in 1 2 3 4 do read lalib_p_line case "$lalib_p_line" in \#\ Generated\ by\ *$PACKAGE* ) lalib_p=yes; break;; esac done exec 0<&5 5<&- fi test "$lalib_p" = yes } # func_ltwrapper_script_p file # True iff FILE is a libtool wrapper script # This function is only a basic sanity check; it will hardly flush out # determined imposters. func_ltwrapper_script_p () { func_lalib_p "$1" } # func_ltwrapper_executable_p file # True iff FILE is a libtool wrapper executable # This function is only a basic sanity check; it will hardly flush out # determined imposters. func_ltwrapper_executable_p () { func_ltwrapper_exec_suffix= case $1 in *.exe) ;; *) func_ltwrapper_exec_suffix=.exe ;; esac $GREP "$magic_exe" "$1$func_ltwrapper_exec_suffix" >/dev/null 2>&1 } # func_ltwrapper_scriptname file # Assumes file is an ltwrapper_executable # uses $file to determine the appropriate filename for a # temporary ltwrapper_script. func_ltwrapper_scriptname () { func_dirname_and_basename "$1" "" "." func_stripname '' '.exe' "$func_basename_result" func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper" } # func_ltwrapper_p file # True iff FILE is a libtool wrapper script or wrapper executable # This function is only a basic sanity check; it will hardly flush out # determined imposters. func_ltwrapper_p () { func_ltwrapper_script_p "$1" || func_ltwrapper_executable_p "$1" } # func_execute_cmds commands fail_cmd # Execute tilde-delimited COMMANDS. # If FAIL_CMD is given, eval that upon failure. # FAIL_CMD may read-access the current command in variable CMD! func_execute_cmds () { $opt_debug save_ifs=$IFS; IFS='~' for cmd in $1; do IFS=$save_ifs eval cmd=\"$cmd\" func_show_eval "$cmd" "${2-:}" done IFS=$save_ifs } # func_source file # Source FILE, adding directory component if necessary. # Note that it is not necessary on cygwin/mingw to append a dot to # FILE even if both FILE and FILE.exe exist: automatic-append-.exe # behavior happens only for exec(3), not for open(2)! Also, sourcing # `FILE.' does not work on cygwin managed mounts. func_source () { $opt_debug case $1 in */* | *\\*) . "$1" ;; *) . "./$1" ;; esac } # func_resolve_sysroot PATH # Replace a leading = in PATH with a sysroot. Store the result into # func_resolve_sysroot_result func_resolve_sysroot () { func_resolve_sysroot_result=$1 case $func_resolve_sysroot_result in =*) func_stripname '=' '' "$func_resolve_sysroot_result" func_resolve_sysroot_result=$lt_sysroot$func_stripname_result ;; esac } # func_replace_sysroot PATH # If PATH begins with the sysroot, replace it with = and # store the result into func_replace_sysroot_result. func_replace_sysroot () { case "$lt_sysroot:$1" in ?*:"$lt_sysroot"*) func_stripname "$lt_sysroot" '' "$1" func_replace_sysroot_result="=$func_stripname_result" ;; *) # Including no sysroot. func_replace_sysroot_result=$1 ;; esac } # func_infer_tag arg # Infer tagged configuration to use if any are available and # if one wasn't chosen via the "--tag" command line option. # Only attempt this if the compiler in the base compile # command doesn't match the default compiler. # arg is usually of the form 'gcc ...' func_infer_tag () { $opt_debug if test -n "$available_tags" && test -z "$tagname"; then CC_quoted= for arg in $CC; do func_append_quoted CC_quoted "$arg" done CC_expanded=`func_echo_all $CC` CC_quoted_expanded=`func_echo_all $CC_quoted` case $@ in # Blanks in the command may have been stripped by the calling shell, # but not from the CC environment variable when configure was run. " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \ " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*) ;; # Blanks at the start of $base_compile will cause this to fail # if we don't check for them as well. *) for z in $available_tags; do if $GREP "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then # Evaluate the configuration. eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`" CC_quoted= for arg in $CC; do # Double-quote args containing other shell metacharacters. func_append_quoted CC_quoted "$arg" done CC_expanded=`func_echo_all $CC` CC_quoted_expanded=`func_echo_all $CC_quoted` case "$@ " in " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \ " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*) # The compiler in the base compile command matches # the one in the tagged configuration. # Assume this is the tagged configuration we want. tagname=$z break ;; esac fi done # If $tagname still isn't set, then no tagged configuration # was found and let the user know that the "--tag" command # line option must be used. if test -z "$tagname"; then func_echo "unable to infer tagged configuration" func_fatal_error "specify a tag with \`--tag'" # else # func_verbose "using $tagname tagged configuration" fi ;; esac fi } # func_write_libtool_object output_name pic_name nonpic_name # Create a libtool object file (analogous to a ".la" file), # but don't create it if we're doing a dry run. func_write_libtool_object () { write_libobj=${1} if test "$build_libtool_libs" = yes; then write_lobj=\'${2}\' else write_lobj=none fi if test "$build_old_libs" = yes; then write_oldobj=\'${3}\' else write_oldobj=none fi $opt_dry_run || { cat >${write_libobj}T </dev/null` if test "$?" -eq 0 && test -n "${func_convert_core_file_wine_to_w32_tmp}"; then func_convert_core_file_wine_to_w32_result=`$ECHO "$func_convert_core_file_wine_to_w32_tmp" | $SED -e "$lt_sed_naive_backslashify"` else func_convert_core_file_wine_to_w32_result= fi fi } # end: func_convert_core_file_wine_to_w32 # func_convert_core_path_wine_to_w32 ARG # Helper function used by path conversion functions when $build is *nix, and # $host is mingw, cygwin, or some other w32 environment. Relies on a correctly # configured wine environment available, with the winepath program in $build's # $PATH. Assumes ARG has no leading or trailing path separator characters. # # ARG is path to be converted from $build format to win32. # Result is available in $func_convert_core_path_wine_to_w32_result. # Unconvertible file (directory) names in ARG are skipped; if no directory names # are convertible, then the result may be empty. func_convert_core_path_wine_to_w32 () { $opt_debug # unfortunately, winepath doesn't convert paths, only file names func_convert_core_path_wine_to_w32_result="" if test -n "$1"; then oldIFS=$IFS IFS=: for func_convert_core_path_wine_to_w32_f in $1; do IFS=$oldIFS func_convert_core_file_wine_to_w32 "$func_convert_core_path_wine_to_w32_f" if test -n "$func_convert_core_file_wine_to_w32_result" ; then if test -z "$func_convert_core_path_wine_to_w32_result"; then func_convert_core_path_wine_to_w32_result="$func_convert_core_file_wine_to_w32_result" else func_append func_convert_core_path_wine_to_w32_result ";$func_convert_core_file_wine_to_w32_result" fi fi done IFS=$oldIFS fi } # end: func_convert_core_path_wine_to_w32 # func_cygpath ARGS... # Wrapper around calling the cygpath program via LT_CYGPATH. This is used when # when (1) $build is *nix and Cygwin is hosted via a wine environment; or (2) # $build is MSYS and $host is Cygwin, or (3) $build is Cygwin. In case (1) or # (2), returns the Cygwin file name or path in func_cygpath_result (input # file name or path is assumed to be in w32 format, as previously converted # from $build's *nix or MSYS format). In case (3), returns the w32 file name # or path in func_cygpath_result (input file name or path is assumed to be in # Cygwin format). Returns an empty string on error. # # ARGS are passed to cygpath, with the last one being the file name or path to # be converted. # # Specify the absolute *nix (or w32) name to cygpath in the LT_CYGPATH # environment variable; do not put it in $PATH. func_cygpath () { $opt_debug if test -n "$LT_CYGPATH" && test -f "$LT_CYGPATH"; then func_cygpath_result=`$LT_CYGPATH "$@" 2>/dev/null` if test "$?" -ne 0; then # on failure, ensure result is empty func_cygpath_result= fi else func_cygpath_result= func_error "LT_CYGPATH is empty or specifies non-existent file: \`$LT_CYGPATH'" fi } #end: func_cygpath # func_convert_core_msys_to_w32 ARG # Convert file name or path ARG from MSYS format to w32 format. Return # result in func_convert_core_msys_to_w32_result. func_convert_core_msys_to_w32 () { $opt_debug # awkward: cmd appends spaces to result func_convert_core_msys_to_w32_result=`( cmd //c echo "$1" ) 2>/dev/null | $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"` } #end: func_convert_core_msys_to_w32 # func_convert_file_check ARG1 ARG2 # Verify that ARG1 (a file name in $build format) was converted to $host # format in ARG2. Otherwise, emit an error message, but continue (resetting # func_to_host_file_result to ARG1). func_convert_file_check () { $opt_debug if test -z "$2" && test -n "$1" ; then func_error "Could not determine host file name corresponding to" func_error " \`$1'" func_error "Continuing, but uninstalled executables may not work." # Fallback: func_to_host_file_result="$1" fi } # end func_convert_file_check # func_convert_path_check FROM_PATHSEP TO_PATHSEP FROM_PATH TO_PATH # Verify that FROM_PATH (a path in $build format) was converted to $host # format in TO_PATH. Otherwise, emit an error message, but continue, resetting # func_to_host_file_result to a simplistic fallback value (see below). func_convert_path_check () { $opt_debug if test -z "$4" && test -n "$3"; then func_error "Could not determine the host path corresponding to" func_error " \`$3'" func_error "Continuing, but uninstalled executables may not work." # Fallback. This is a deliberately simplistic "conversion" and # should not be "improved". See libtool.info. if test "x$1" != "x$2"; then lt_replace_pathsep_chars="s|$1|$2|g" func_to_host_path_result=`echo "$3" | $SED -e "$lt_replace_pathsep_chars"` else func_to_host_path_result="$3" fi fi } # end func_convert_path_check # func_convert_path_front_back_pathsep FRONTPAT BACKPAT REPL ORIG # Modifies func_to_host_path_result by prepending REPL if ORIG matches FRONTPAT # and appending REPL if ORIG matches BACKPAT. func_convert_path_front_back_pathsep () { $opt_debug case $4 in $1 ) func_to_host_path_result="$3$func_to_host_path_result" ;; esac case $4 in $2 ) func_append func_to_host_path_result "$3" ;; esac } # end func_convert_path_front_back_pathsep ################################################## # $build to $host FILE NAME CONVERSION FUNCTIONS # ################################################## # invoked via `$to_host_file_cmd ARG' # # In each case, ARG is the path to be converted from $build to $host format. # Result will be available in $func_to_host_file_result. # func_to_host_file ARG # Converts the file name ARG from $build format to $host format. Return result # in func_to_host_file_result. func_to_host_file () { $opt_debug $to_host_file_cmd "$1" } # end func_to_host_file # func_to_tool_file ARG LAZY # converts the file name ARG from $build format to toolchain format. Return # result in func_to_tool_file_result. If the conversion in use is listed # in (the comma separated) LAZY, no conversion takes place. func_to_tool_file () { $opt_debug case ,$2, in *,"$to_tool_file_cmd",*) func_to_tool_file_result=$1 ;; *) $to_tool_file_cmd "$1" func_to_tool_file_result=$func_to_host_file_result ;; esac } # end func_to_tool_file # func_convert_file_noop ARG # Copy ARG to func_to_host_file_result. func_convert_file_noop () { func_to_host_file_result="$1" } # end func_convert_file_noop # func_convert_file_msys_to_w32 ARG # Convert file name ARG from (mingw) MSYS to (mingw) w32 format; automatic # conversion to w32 is not available inside the cwrapper. Returns result in # func_to_host_file_result. func_convert_file_msys_to_w32 () { $opt_debug func_to_host_file_result="$1" if test -n "$1"; then func_convert_core_msys_to_w32 "$1" func_to_host_file_result="$func_convert_core_msys_to_w32_result" fi func_convert_file_check "$1" "$func_to_host_file_result" } # end func_convert_file_msys_to_w32 # func_convert_file_cygwin_to_w32 ARG # Convert file name ARG from Cygwin to w32 format. Returns result in # func_to_host_file_result. func_convert_file_cygwin_to_w32 () { $opt_debug func_to_host_file_result="$1" if test -n "$1"; then # because $build is cygwin, we call "the" cygpath in $PATH; no need to use # LT_CYGPATH in this case. func_to_host_file_result=`cygpath -m "$1"` fi func_convert_file_check "$1" "$func_to_host_file_result" } # end func_convert_file_cygwin_to_w32 # func_convert_file_nix_to_w32 ARG # Convert file name ARG from *nix to w32 format. Requires a wine environment # and a working winepath. Returns result in func_to_host_file_result. func_convert_file_nix_to_w32 () { $opt_debug func_to_host_file_result="$1" if test -n "$1"; then func_convert_core_file_wine_to_w32 "$1" func_to_host_file_result="$func_convert_core_file_wine_to_w32_result" fi func_convert_file_check "$1" "$func_to_host_file_result" } # end func_convert_file_nix_to_w32 # func_convert_file_msys_to_cygwin ARG # Convert file name ARG from MSYS to Cygwin format. Requires LT_CYGPATH set. # Returns result in func_to_host_file_result. func_convert_file_msys_to_cygwin () { $opt_debug func_to_host_file_result="$1" if test -n "$1"; then func_convert_core_msys_to_w32 "$1" func_cygpath -u "$func_convert_core_msys_to_w32_result" func_to_host_file_result="$func_cygpath_result" fi func_convert_file_check "$1" "$func_to_host_file_result" } # end func_convert_file_msys_to_cygwin # func_convert_file_nix_to_cygwin ARG # Convert file name ARG from *nix to Cygwin format. Requires Cygwin installed # in a wine environment, working winepath, and LT_CYGPATH set. Returns result # in func_to_host_file_result. func_convert_file_nix_to_cygwin () { $opt_debug func_to_host_file_result="$1" if test -n "$1"; then # convert from *nix to w32, then use cygpath to convert from w32 to cygwin. func_convert_core_file_wine_to_w32 "$1" func_cygpath -u "$func_convert_core_file_wine_to_w32_result" func_to_host_file_result="$func_cygpath_result" fi func_convert_file_check "$1" "$func_to_host_file_result" } # end func_convert_file_nix_to_cygwin ############################################# # $build to $host PATH CONVERSION FUNCTIONS # ############################################# # invoked via `$to_host_path_cmd ARG' # # In each case, ARG is the path to be converted from $build to $host format. # The result will be available in $func_to_host_path_result. # # Path separators are also converted from $build format to $host format. If # ARG begins or ends with a path separator character, it is preserved (but # converted to $host format) on output. # # All path conversion functions are named using the following convention: # file name conversion function : func_convert_file_X_to_Y () # path conversion function : func_convert_path_X_to_Y () # where, for any given $build/$host combination the 'X_to_Y' value is the # same. If conversion functions are added for new $build/$host combinations, # the two new functions must follow this pattern, or func_init_to_host_path_cmd # will break. # func_init_to_host_path_cmd # Ensures that function "pointer" variable $to_host_path_cmd is set to the # appropriate value, based on the value of $to_host_file_cmd. to_host_path_cmd= func_init_to_host_path_cmd () { $opt_debug if test -z "$to_host_path_cmd"; then func_stripname 'func_convert_file_' '' "$to_host_file_cmd" to_host_path_cmd="func_convert_path_${func_stripname_result}" fi } # func_to_host_path ARG # Converts the path ARG from $build format to $host format. Return result # in func_to_host_path_result. func_to_host_path () { $opt_debug func_init_to_host_path_cmd $to_host_path_cmd "$1" } # end func_to_host_path # func_convert_path_noop ARG # Copy ARG to func_to_host_path_result. func_convert_path_noop () { func_to_host_path_result="$1" } # end func_convert_path_noop # func_convert_path_msys_to_w32 ARG # Convert path ARG from (mingw) MSYS to (mingw) w32 format; automatic # conversion to w32 is not available inside the cwrapper. Returns result in # func_to_host_path_result. func_convert_path_msys_to_w32 () { $opt_debug func_to_host_path_result="$1" if test -n "$1"; then # Remove leading and trailing path separator characters from ARG. MSYS # behavior is inconsistent here; cygpath turns them into '.;' and ';.'; # and winepath ignores them completely. func_stripname : : "$1" func_to_host_path_tmp1=$func_stripname_result func_convert_core_msys_to_w32 "$func_to_host_path_tmp1" func_to_host_path_result="$func_convert_core_msys_to_w32_result" func_convert_path_check : ";" \ "$func_to_host_path_tmp1" "$func_to_host_path_result" func_convert_path_front_back_pathsep ":*" "*:" ";" "$1" fi } # end func_convert_path_msys_to_w32 # func_convert_path_cygwin_to_w32 ARG # Convert path ARG from Cygwin to w32 format. Returns result in # func_to_host_file_result. func_convert_path_cygwin_to_w32 () { $opt_debug func_to_host_path_result="$1" if test -n "$1"; then # See func_convert_path_msys_to_w32: func_stripname : : "$1" func_to_host_path_tmp1=$func_stripname_result func_to_host_path_result=`cygpath -m -p "$func_to_host_path_tmp1"` func_convert_path_check : ";" \ "$func_to_host_path_tmp1" "$func_to_host_path_result" func_convert_path_front_back_pathsep ":*" "*:" ";" "$1" fi } # end func_convert_path_cygwin_to_w32 # func_convert_path_nix_to_w32 ARG # Convert path ARG from *nix to w32 format. Requires a wine environment and # a working winepath. Returns result in func_to_host_file_result. func_convert_path_nix_to_w32 () { $opt_debug func_to_host_path_result="$1" if test -n "$1"; then # See func_convert_path_msys_to_w32: func_stripname : : "$1" func_to_host_path_tmp1=$func_stripname_result func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1" func_to_host_path_result="$func_convert_core_path_wine_to_w32_result" func_convert_path_check : ";" \ "$func_to_host_path_tmp1" "$func_to_host_path_result" func_convert_path_front_back_pathsep ":*" "*:" ";" "$1" fi } # end func_convert_path_nix_to_w32 # func_convert_path_msys_to_cygwin ARG # Convert path ARG from MSYS to Cygwin format. Requires LT_CYGPATH set. # Returns result in func_to_host_file_result. func_convert_path_msys_to_cygwin () { $opt_debug func_to_host_path_result="$1" if test -n "$1"; then # See func_convert_path_msys_to_w32: func_stripname : : "$1" func_to_host_path_tmp1=$func_stripname_result func_convert_core_msys_to_w32 "$func_to_host_path_tmp1" func_cygpath -u -p "$func_convert_core_msys_to_w32_result" func_to_host_path_result="$func_cygpath_result" func_convert_path_check : : \ "$func_to_host_path_tmp1" "$func_to_host_path_result" func_convert_path_front_back_pathsep ":*" "*:" : "$1" fi } # end func_convert_path_msys_to_cygwin # func_convert_path_nix_to_cygwin ARG # Convert path ARG from *nix to Cygwin format. Requires Cygwin installed in a # a wine environment, working winepath, and LT_CYGPATH set. Returns result in # func_to_host_file_result. func_convert_path_nix_to_cygwin () { $opt_debug func_to_host_path_result="$1" if test -n "$1"; then # Remove leading and trailing path separator characters from # ARG. msys behavior is inconsistent here, cygpath turns them # into '.;' and ';.', and winepath ignores them completely. func_stripname : : "$1" func_to_host_path_tmp1=$func_stripname_result func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1" func_cygpath -u -p "$func_convert_core_path_wine_to_w32_result" func_to_host_path_result="$func_cygpath_result" func_convert_path_check : : \ "$func_to_host_path_tmp1" "$func_to_host_path_result" func_convert_path_front_back_pathsep ":*" "*:" : "$1" fi } # end func_convert_path_nix_to_cygwin # func_mode_compile arg... func_mode_compile () { $opt_debug # Get the compilation command and the source file. base_compile= srcfile="$nonopt" # always keep a non-empty value in "srcfile" suppress_opt=yes suppress_output= arg_mode=normal libobj= later= pie_flag= for arg do case $arg_mode in arg ) # do not "continue". Instead, add this to base_compile lastarg="$arg" arg_mode=normal ;; target ) libobj="$arg" arg_mode=normal continue ;; normal ) # Accept any command-line options. case $arg in -o) test -n "$libobj" && \ func_fatal_error "you cannot specify \`-o' more than once" arg_mode=target continue ;; -pie | -fpie | -fPIE) func_append pie_flag " $arg" continue ;; -shared | -static | -prefer-pic | -prefer-non-pic) func_append later " $arg" continue ;; -no-suppress) suppress_opt=no continue ;; -Xcompiler) arg_mode=arg # the next one goes into the "base_compile" arg list continue # The current "srcfile" will either be retained or ;; # replaced later. I would guess that would be a bug. -Wc,*) func_stripname '-Wc,' '' "$arg" args=$func_stripname_result lastarg= save_ifs="$IFS"; IFS=',' for arg in $args; do IFS="$save_ifs" func_append_quoted lastarg "$arg" done IFS="$save_ifs" func_stripname ' ' '' "$lastarg" lastarg=$func_stripname_result # Add the arguments to base_compile. func_append base_compile " $lastarg" continue ;; *) # Accept the current argument as the source file. # The previous "srcfile" becomes the current argument. # lastarg="$srcfile" srcfile="$arg" ;; esac # case $arg ;; esac # case $arg_mode # Aesthetically quote the previous argument. func_append_quoted base_compile "$lastarg" done # for arg case $arg_mode in arg) func_fatal_error "you must specify an argument for -Xcompile" ;; target) func_fatal_error "you must specify a target with \`-o'" ;; *) # Get the name of the library object. test -z "$libobj" && { func_basename "$srcfile" libobj="$func_basename_result" } ;; esac # Recognize several different file suffixes. # If the user specifies -o file.o, it is replaced with file.lo case $libobj in *.[cCFSifmso] | \ *.ada | *.adb | *.ads | *.asm | \ *.c++ | *.cc | *.ii | *.class | *.cpp | *.cxx | \ *.[fF][09]? | *.for | *.java | *.go | *.obj | *.sx | *.cu | *.cup) func_xform "$libobj" libobj=$func_xform_result ;; esac case $libobj in *.lo) func_lo2o "$libobj"; obj=$func_lo2o_result ;; *) func_fatal_error "cannot determine name of library object from \`$libobj'" ;; esac func_infer_tag $base_compile for arg in $later; do case $arg in -shared) test "$build_libtool_libs" != yes && \ func_fatal_configuration "can not build a shared library" build_old_libs=no continue ;; -static) build_libtool_libs=no build_old_libs=yes continue ;; -prefer-pic) pic_mode=yes continue ;; -prefer-non-pic) pic_mode=no continue ;; esac done func_quote_for_eval "$libobj" test "X$libobj" != "X$func_quote_for_eval_result" \ && $ECHO "X$libobj" | $GREP '[]~#^*{};<>?"'"'"' &()|`$[]' \ && func_warning "libobj name \`$libobj' may not contain shell special characters." func_dirname_and_basename "$obj" "/" "" objname="$func_basename_result" xdir="$func_dirname_result" lobj=${xdir}$objdir/$objname test -z "$base_compile" && \ func_fatal_help "you must specify a compilation command" # Delete any leftover library objects. if test "$build_old_libs" = yes; then removelist="$obj $lobj $libobj ${libobj}T" else removelist="$lobj $libobj ${libobj}T" fi # On Cygwin there's no "real" PIC flag so we must build both object types case $host_os in cygwin* | mingw* | pw32* | os2* | cegcc*) pic_mode=default ;; esac if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then # non-PIC code in shared libraries is not supported pic_mode=default fi # Calculate the filename of the output object if compiler does # not support -o with -c if test "$compiler_c_o" = no; then output_obj=`$ECHO "$srcfile" | $SED 's%^.*/%%; s%\.[^.]*$%%'`.${objext} lockfile="$output_obj.lock" else output_obj= need_locks=no lockfile= fi # Lock this critical section if it is needed # We use this script file to make the link, it avoids creating a new file if test "$need_locks" = yes; then until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do func_echo "Waiting for $lockfile to be removed" sleep 2 done elif test "$need_locks" = warn; then if test -f "$lockfile"; then $ECHO "\ *** ERROR, $lockfile exists and contains: `cat $lockfile 2>/dev/null` This indicates that another process is trying to use the same temporary object file, and libtool could not work around it because your compiler does not support \`-c' and \`-o' together. If you repeat this compilation, it may succeed, by chance, but you had better avoid parallel builds (make -j) in this platform, or get a better compiler." $opt_dry_run || $RM $removelist exit $EXIT_FAILURE fi func_append removelist " $output_obj" $ECHO "$srcfile" > "$lockfile" fi $opt_dry_run || $RM $removelist func_append removelist " $lockfile" trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15 func_to_tool_file "$srcfile" func_convert_file_msys_to_w32 srcfile=$func_to_tool_file_result func_quote_for_eval "$srcfile" qsrcfile=$func_quote_for_eval_result # Only build a PIC object if we are building libtool libraries. if test "$build_libtool_libs" = yes; then # Without this assignment, base_compile gets emptied. fbsd_hideous_sh_bug=$base_compile if test "$pic_mode" != no; then command="$base_compile $qsrcfile $pic_flag" else # Don't build PIC code command="$base_compile $qsrcfile" fi func_mkdir_p "$xdir$objdir" if test -z "$output_obj"; then # Place PIC objects in $objdir func_append command " -o $lobj" fi func_show_eval_locale "$command" \ 'test -n "$output_obj" && $RM $removelist; exit $EXIT_FAILURE' if test "$need_locks" = warn && test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then $ECHO "\ *** ERROR, $lockfile contains: `cat $lockfile 2>/dev/null` but it should contain: $srcfile This indicates that another process is trying to use the same temporary object file, and libtool could not work around it because your compiler does not support \`-c' and \`-o' together. If you repeat this compilation, it may succeed, by chance, but you had better avoid parallel builds (make -j) in this platform, or get a better compiler." $opt_dry_run || $RM $removelist exit $EXIT_FAILURE fi # Just move the object if needed, then go on to compile the next one if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then func_show_eval '$MV "$output_obj" "$lobj"' \ 'error=$?; $opt_dry_run || $RM $removelist; exit $error' fi # Allow error messages only from the first compilation. if test "$suppress_opt" = yes; then suppress_output=' >/dev/null 2>&1' fi fi # Only build a position-dependent object if we build old libraries. if test "$build_old_libs" = yes; then if test "$pic_mode" != yes; then # Don't build PIC code command="$base_compile $qsrcfile$pie_flag" else command="$base_compile $qsrcfile $pic_flag" fi if test "$compiler_c_o" = yes; then func_append command " -o $obj" fi # Suppress compiler output if we already did a PIC compilation. func_append command "$suppress_output" func_show_eval_locale "$command" \ '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' if test "$need_locks" = warn && test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then $ECHO "\ *** ERROR, $lockfile contains: `cat $lockfile 2>/dev/null` but it should contain: $srcfile This indicates that another process is trying to use the same temporary object file, and libtool could not work around it because your compiler does not support \`-c' and \`-o' together. If you repeat this compilation, it may succeed, by chance, but you had better avoid parallel builds (make -j) in this platform, or get a better compiler." $opt_dry_run || $RM $removelist exit $EXIT_FAILURE fi # Just move the object if needed if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then func_show_eval '$MV "$output_obj" "$obj"' \ 'error=$?; $opt_dry_run || $RM $removelist; exit $error' fi fi $opt_dry_run || { func_write_libtool_object "$libobj" "$objdir/$objname" "$objname" # Unlock the critical section if it was locked if test "$need_locks" != no; then removelist=$lockfile $RM "$lockfile" fi } exit $EXIT_SUCCESS } $opt_help || { test "$opt_mode" = compile && func_mode_compile ${1+"$@"} } func_mode_help () { # We need to display help for each of the modes. case $opt_mode in "") # Generic help is extracted from the usage comments # at the start of this file. func_help ;; clean) $ECHO \ "Usage: $progname [OPTION]... --mode=clean RM [RM-OPTION]... FILE... Remove files from the build directory. RM is the name of the program to use to delete files associated with each FILE (typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed to RM. If FILE is a libtool library, object or program, all the files associated with it are deleted. Otherwise, only FILE itself is deleted using RM." ;; compile) $ECHO \ "Usage: $progname [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE Compile a source file into a libtool library object. This mode accepts the following additional options: -o OUTPUT-FILE set the output file name to OUTPUT-FILE -no-suppress do not suppress compiler output for multiple passes -prefer-pic try to build PIC objects only -prefer-non-pic try to build non-PIC objects only -shared do not build a \`.o' file suitable for static linking -static only build a \`.o' file suitable for static linking -Wc,FLAG pass FLAG directly to the compiler COMPILE-COMMAND is a command to be used in creating a \`standard' object file from the given SOURCEFILE. The output file name is determined by removing the directory component from SOURCEFILE, then substituting the C source code suffix \`.c' with the library object suffix, \`.lo'." ;; execute) $ECHO \ "Usage: $progname [OPTION]... --mode=execute COMMAND [ARGS]... Automatically set library path, then run a program. This mode accepts the following additional options: -dlopen FILE add the directory containing FILE to the library path This mode sets the library path environment variable according to \`-dlopen' flags. If any of the ARGS are libtool executable wrappers, then they are translated into their corresponding uninstalled binary, and any of their required library directories are added to the library path. Then, COMMAND is executed, with ARGS as arguments." ;; finish) $ECHO \ "Usage: $progname [OPTION]... --mode=finish [LIBDIR]... Complete the installation of libtool libraries. Each LIBDIR is a directory that contains libtool libraries. The commands that this mode executes may require superuser privileges. Use the \`--dry-run' option if you just want to see what would be executed." ;; install) $ECHO \ "Usage: $progname [OPTION]... --mode=install INSTALL-COMMAND... Install executables or libraries. INSTALL-COMMAND is the installation command. The first component should be either the \`install' or \`cp' program. The following components of INSTALL-COMMAND are treated specially: -inst-prefix-dir PREFIX-DIR Use PREFIX-DIR as a staging area for installation The rest of the components are interpreted as arguments to that command (only BSD-compatible install options are recognized)." ;; link) $ECHO \ "Usage: $progname [OPTION]... --mode=link LINK-COMMAND... Link object files or libraries together to form another library, or to create an executable program. LINK-COMMAND is a command using the C compiler that you would use to create a program from several object files. The following components of LINK-COMMAND are treated specially: -all-static do not do any dynamic linking at all -avoid-version do not add a version suffix if possible -bindir BINDIR specify path to binaries directory (for systems where libraries must be found in the PATH setting at runtime) -dlopen FILE \`-dlpreopen' FILE if it cannot be dlopened at runtime -dlpreopen FILE link in FILE and add its symbols to lt_preloaded_symbols -export-dynamic allow symbols from OUTPUT-FILE to be resolved with dlsym(3) -export-symbols SYMFILE try to export only the symbols listed in SYMFILE -export-symbols-regex REGEX try to export only the symbols matching REGEX -LLIBDIR search LIBDIR for required installed libraries -lNAME OUTPUT-FILE requires the installed library libNAME -module build a library that can dlopened -no-fast-install disable the fast-install mode -no-install link a not-installable executable -no-undefined declare that a library does not refer to external symbols -o OUTPUT-FILE create OUTPUT-FILE from the specified objects -objectlist FILE Use a list of object files found in FILE to specify objects -precious-files-regex REGEX don't remove output files matching REGEX -release RELEASE specify package release information -rpath LIBDIR the created library will eventually be installed in LIBDIR -R[ ]LIBDIR add LIBDIR to the runtime path of programs and libraries -shared only do dynamic linking of libtool libraries -shrext SUFFIX override the standard shared library file extension -static do not do any dynamic linking of uninstalled libtool libraries -static-libtool-libs do not do any dynamic linking of libtool libraries -version-info CURRENT[:REVISION[:AGE]] specify library version info [each variable defaults to 0] -weak LIBNAME declare that the target provides the LIBNAME interface -Wc,FLAG -Xcompiler FLAG pass linker-specific FLAG directly to the compiler -Wl,FLAG -Xlinker FLAG pass linker-specific FLAG directly to the linker -XCClinker FLAG pass link-specific FLAG to the compiler driver (CC) All other options (arguments beginning with \`-') are ignored. Every other argument is treated as a filename. Files ending in \`.la' are treated as uninstalled libtool libraries, other files are standard or library object files. If the OUTPUT-FILE ends in \`.la', then a libtool library is created, only library objects (\`.lo' files) may be specified, and \`-rpath' is required, except when creating a convenience library. If OUTPUT-FILE ends in \`.a' or \`.lib', then a standard library is created using \`ar' and \`ranlib', or on Windows using \`lib'. If OUTPUT-FILE ends in \`.lo' or \`.${objext}', then a reloadable object file is created, otherwise an executable program is created." ;; uninstall) $ECHO \ "Usage: $progname [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE... Remove libraries from an installation directory. RM is the name of the program to use to delete files associated with each FILE (typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed to RM. If FILE is a libtool library, all the files associated with it are deleted. Otherwise, only FILE itself is deleted using RM." ;; *) func_fatal_help "invalid operation mode \`$opt_mode'" ;; esac echo $ECHO "Try \`$progname --help' for more information about other modes." } # Now that we've collected a possible --mode arg, show help if necessary if $opt_help; then if test "$opt_help" = :; then func_mode_help else { func_help noexit for opt_mode in compile link execute install finish uninstall clean; do func_mode_help done } | sed -n '1p; 2,$s/^Usage:/ or: /p' { func_help noexit for opt_mode in compile link execute install finish uninstall clean; do echo func_mode_help done } | sed '1d /^When reporting/,/^Report/{ H d } $x /information about other modes/d /more detailed .*MODE/d s/^Usage:.*--mode=\([^ ]*\) .*/Description of \1 mode:/' fi exit $? fi # func_mode_execute arg... func_mode_execute () { $opt_debug # The first argument is the command name. cmd="$nonopt" test -z "$cmd" && \ func_fatal_help "you must specify a COMMAND" # Handle -dlopen flags immediately. for file in $opt_dlopen; do test -f "$file" \ || func_fatal_help "\`$file' is not a file" dir= case $file in *.la) func_resolve_sysroot "$file" file=$func_resolve_sysroot_result # Check to see that this really is a libtool archive. func_lalib_unsafe_p "$file" \ || func_fatal_help "\`$lib' is not a valid libtool archive" # Read the libtool library. dlname= library_names= func_source "$file" # Skip this library if it cannot be dlopened. if test -z "$dlname"; then # Warn if it was a shared library. test -n "$library_names" && \ func_warning "\`$file' was not linked with \`-export-dynamic'" continue fi func_dirname "$file" "" "." dir="$func_dirname_result" if test -f "$dir/$objdir/$dlname"; then func_append dir "/$objdir" else if test ! -f "$dir/$dlname"; then func_fatal_error "cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'" fi fi ;; *.lo) # Just add the directory containing the .lo file. func_dirname "$file" "" "." dir="$func_dirname_result" ;; *) func_warning "\`-dlopen' is ignored for non-libtool libraries and objects" continue ;; esac # Get the absolute pathname. absdir=`cd "$dir" && pwd` test -n "$absdir" && dir="$absdir" # Now add the directory to shlibpath_var. if eval "test -z \"\$$shlibpath_var\""; then eval "$shlibpath_var=\"\$dir\"" else eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\"" fi done # This variable tells wrapper scripts just to set shlibpath_var # rather than running their programs. libtool_execute_magic="$magic" # Check if any of the arguments is a wrapper script. args= for file do case $file in -* | *.la | *.lo ) ;; *) # Do a test to see if this is really a libtool program. if func_ltwrapper_script_p "$file"; then func_source "$file" # Transform arg to wrapped name. file="$progdir/$program" elif func_ltwrapper_executable_p "$file"; then func_ltwrapper_scriptname "$file" func_source "$func_ltwrapper_scriptname_result" # Transform arg to wrapped name. file="$progdir/$program" fi ;; esac # Quote arguments (to preserve shell metacharacters). func_append_quoted args "$file" done if test "X$opt_dry_run" = Xfalse; then if test -n "$shlibpath_var"; then # Export the shlibpath_var. eval "export $shlibpath_var" fi # Restore saved environment variables for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES do eval "if test \"\${save_$lt_var+set}\" = set; then $lt_var=\$save_$lt_var; export $lt_var else $lt_unset $lt_var fi" done # Now prepare to actually exec the command. exec_cmd="\$cmd$args" else # Display what would be done. if test -n "$shlibpath_var"; then eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\"" echo "export $shlibpath_var" fi $ECHO "$cmd$args" exit $EXIT_SUCCESS fi } test "$opt_mode" = execute && func_mode_execute ${1+"$@"} # func_mode_finish arg... func_mode_finish () { $opt_debug libs= libdirs= admincmds= for opt in "$nonopt" ${1+"$@"} do if test -d "$opt"; then func_append libdirs " $opt" elif test -f "$opt"; then if func_lalib_unsafe_p "$opt"; then func_append libs " $opt" else func_warning "\`$opt' is not a valid libtool archive" fi else func_fatal_error "invalid argument \`$opt'" fi done if test -n "$libs"; then if test -n "$lt_sysroot"; then sysroot_regex=`$ECHO "$lt_sysroot" | $SED "$sed_make_literal_regex"` sysroot_cmd="s/\([ ']\)$sysroot_regex/\1/g;" else sysroot_cmd= fi # Remove sysroot references if $opt_dry_run; then for lib in $libs; do echo "removing references to $lt_sysroot and \`=' prefixes from $lib" done else tmpdir=`func_mktempdir` for lib in $libs; do sed -e "${sysroot_cmd} s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \ > $tmpdir/tmp-la mv -f $tmpdir/tmp-la $lib done ${RM}r "$tmpdir" fi fi if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then for libdir in $libdirs; do if test -n "$finish_cmds"; then # Do each command in the finish commands. func_execute_cmds "$finish_cmds" 'admincmds="$admincmds '"$cmd"'"' fi if test -n "$finish_eval"; then # Do the single finish_eval. eval cmds=\"$finish_eval\" $opt_dry_run || eval "$cmds" || func_append admincmds " $cmds" fi done fi # Exit here if they wanted silent mode. $opt_silent && exit $EXIT_SUCCESS if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then echo "----------------------------------------------------------------------" echo "Libraries have been installed in:" for libdir in $libdirs; do $ECHO " $libdir" done echo echo "If you ever happen to want to link against installed libraries" echo "in a given directory, LIBDIR, you must either use libtool, and" echo "specify the full pathname of the library, or use the \`-LLIBDIR'" echo "flag during linking and do at least one of the following:" if test -n "$shlibpath_var"; then echo " - add LIBDIR to the \`$shlibpath_var' environment variable" echo " during execution" fi if test -n "$runpath_var"; then echo " - add LIBDIR to the \`$runpath_var' environment variable" echo " during linking" fi if test -n "$hardcode_libdir_flag_spec"; then libdir=LIBDIR eval flag=\"$hardcode_libdir_flag_spec\" $ECHO " - use the \`$flag' linker flag" fi if test -n "$admincmds"; then $ECHO " - have your system administrator run these commands:$admincmds" fi if test -f /etc/ld.so.conf; then echo " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'" fi echo echo "See any operating system documentation about shared libraries for" case $host in solaris2.[6789]|solaris2.1[0-9]) echo "more information, such as the ld(1), crle(1) and ld.so(8) manual" echo "pages." ;; *) echo "more information, such as the ld(1) and ld.so(8) manual pages." ;; esac echo "----------------------------------------------------------------------" fi exit $EXIT_SUCCESS } test "$opt_mode" = finish && func_mode_finish ${1+"$@"} # func_mode_install arg... func_mode_install () { $opt_debug # There may be an optional sh(1) argument at the beginning of # install_prog (especially on Windows NT). if test "$nonopt" = "$SHELL" || test "$nonopt" = /bin/sh || # Allow the use of GNU shtool's install command. case $nonopt in *shtool*) :;; *) false;; esac; then # Aesthetically quote it. func_quote_for_eval "$nonopt" install_prog="$func_quote_for_eval_result " arg=$1 shift else install_prog= arg=$nonopt fi # The real first argument should be the name of the installation program. # Aesthetically quote it. func_quote_for_eval "$arg" func_append install_prog "$func_quote_for_eval_result" install_shared_prog=$install_prog case " $install_prog " in *[\\\ /]cp\ *) install_cp=: ;; *) install_cp=false ;; esac # We need to accept at least all the BSD install flags. dest= files= opts= prev= install_type= isdir=no stripme= no_mode=: for arg do arg2= if test -n "$dest"; then func_append files " $dest" dest=$arg continue fi case $arg in -d) isdir=yes ;; -f) if $install_cp; then :; else prev=$arg fi ;; -g | -m | -o) prev=$arg ;; -s) stripme=" -s" continue ;; -*) ;; *) # If the previous option needed an argument, then skip it. if test -n "$prev"; then if test "x$prev" = x-m && test -n "$install_override_mode"; then arg2=$install_override_mode no_mode=false fi prev= else dest=$arg continue fi ;; esac # Aesthetically quote the argument. func_quote_for_eval "$arg" func_append install_prog " $func_quote_for_eval_result" if test -n "$arg2"; then func_quote_for_eval "$arg2" fi func_append install_shared_prog " $func_quote_for_eval_result" done test -z "$install_prog" && \ func_fatal_help "you must specify an install program" test -n "$prev" && \ func_fatal_help "the \`$prev' option requires an argument" if test -n "$install_override_mode" && $no_mode; then if $install_cp; then :; else func_quote_for_eval "$install_override_mode" func_append install_shared_prog " -m $func_quote_for_eval_result" fi fi if test -z "$files"; then if test -z "$dest"; then func_fatal_help "no file or destination specified" else func_fatal_help "you must specify a destination" fi fi # Strip any trailing slash from the destination. func_stripname '' '/' "$dest" dest=$func_stripname_result # Check to see that the destination is a directory. test -d "$dest" && isdir=yes if test "$isdir" = yes; then destdir="$dest" destname= else func_dirname_and_basename "$dest" "" "." destdir="$func_dirname_result" destname="$func_basename_result" # Not a directory, so check to see that there is only one file specified. set dummy $files; shift test "$#" -gt 1 && \ func_fatal_help "\`$dest' is not a directory" fi case $destdir in [\\/]* | [A-Za-z]:[\\/]*) ;; *) for file in $files; do case $file in *.lo) ;; *) func_fatal_help "\`$destdir' must be an absolute directory name" ;; esac done ;; esac # This variable tells wrapper scripts just to set variables rather # than running their programs. libtool_install_magic="$magic" staticlibs= future_libdirs= current_libdirs= for file in $files; do # Do each installation. case $file in *.$libext) # Do the static libraries later. func_append staticlibs " $file" ;; *.la) func_resolve_sysroot "$file" file=$func_resolve_sysroot_result # Check to see that this really is a libtool archive. func_lalib_unsafe_p "$file" \ || func_fatal_help "\`$file' is not a valid libtool archive" library_names= old_library= relink_command= func_source "$file" # Add the libdir to current_libdirs if it is the destination. if test "X$destdir" = "X$libdir"; then case "$current_libdirs " in *" $libdir "*) ;; *) func_append current_libdirs " $libdir" ;; esac else # Note the libdir as a future libdir. case "$future_libdirs " in *" $libdir "*) ;; *) func_append future_libdirs " $libdir" ;; esac fi func_dirname "$file" "/" "" dir="$func_dirname_result" func_append dir "$objdir" if test -n "$relink_command"; then # Determine the prefix the user has applied to our future dir. inst_prefix_dir=`$ECHO "$destdir" | $SED -e "s%$libdir\$%%"` # Don't allow the user to place us outside of our expected # location b/c this prevents finding dependent libraries that # are installed to the same prefix. # At present, this check doesn't affect windows .dll's that # are installed into $libdir/../bin (currently, that works fine) # but it's something to keep an eye on. test "$inst_prefix_dir" = "$destdir" && \ func_fatal_error "error: cannot install \`$file' to a directory not ending in $libdir" if test -n "$inst_prefix_dir"; then # Stick the inst_prefix_dir data into the link command. relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"` else relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%%"` fi func_warning "relinking \`$file'" func_show_eval "$relink_command" \ 'func_fatal_error "error: relink \`$file'\'' with the above command before installing it"' fi # See the names of the shared library. set dummy $library_names; shift if test -n "$1"; then realname="$1" shift srcname="$realname" test -n "$relink_command" && srcname="$realname"T # Install the shared library and build the symlinks. func_show_eval "$install_shared_prog $dir/$srcname $destdir/$realname" \ 'exit $?' tstripme="$stripme" case $host_os in cygwin* | mingw* | pw32* | cegcc*) case $realname in *.dll.a) tstripme="" ;; esac ;; esac if test -n "$tstripme" && test -n "$striplib"; then func_show_eval "$striplib $destdir/$realname" 'exit $?' fi if test "$#" -gt 0; then # Delete the old symlinks, and create new ones. # Try `ln -sf' first, because the `ln' binary might depend on # the symlink we replace! Solaris /bin/ln does not understand -f, # so we also need to try rm && ln -s. for linkname do test "$linkname" != "$realname" \ && func_show_eval "(cd $destdir && { $LN_S -f $realname $linkname || { $RM $linkname && $LN_S $realname $linkname; }; })" done fi # Do each command in the postinstall commands. lib="$destdir/$realname" func_execute_cmds "$postinstall_cmds" 'exit $?' fi # Install the pseudo-library for information purposes. func_basename "$file" name="$func_basename_result" instname="$dir/$name"i func_show_eval "$install_prog $instname $destdir/$name" 'exit $?' # Maybe install the static library, too. test -n "$old_library" && func_append staticlibs " $dir/$old_library" ;; *.lo) # Install (i.e. copy) a libtool object. # Figure out destination file name, if it wasn't already specified. if test -n "$destname"; then destfile="$destdir/$destname" else func_basename "$file" destfile="$func_basename_result" destfile="$destdir/$destfile" fi # Deduce the name of the destination old-style object file. case $destfile in *.lo) func_lo2o "$destfile" staticdest=$func_lo2o_result ;; *.$objext) staticdest="$destfile" destfile= ;; *) func_fatal_help "cannot copy a libtool object to \`$destfile'" ;; esac # Install the libtool object if requested. test -n "$destfile" && \ func_show_eval "$install_prog $file $destfile" 'exit $?' # Install the old object if enabled. if test "$build_old_libs" = yes; then # Deduce the name of the old-style object file. func_lo2o "$file" staticobj=$func_lo2o_result func_show_eval "$install_prog \$staticobj \$staticdest" 'exit $?' fi exit $EXIT_SUCCESS ;; *) # Figure out destination file name, if it wasn't already specified. if test -n "$destname"; then destfile="$destdir/$destname" else func_basename "$file" destfile="$func_basename_result" destfile="$destdir/$destfile" fi # If the file is missing, and there is a .exe on the end, strip it # because it is most likely a libtool script we actually want to # install stripped_ext="" case $file in *.exe) if test ! -f "$file"; then func_stripname '' '.exe' "$file" file=$func_stripname_result stripped_ext=".exe" fi ;; esac # Do a test to see if this is really a libtool program. case $host in *cygwin* | *mingw*) if func_ltwrapper_executable_p "$file"; then func_ltwrapper_scriptname "$file" wrapper=$func_ltwrapper_scriptname_result else func_stripname '' '.exe' "$file" wrapper=$func_stripname_result fi ;; *) wrapper=$file ;; esac if func_ltwrapper_script_p "$wrapper"; then notinst_deplibs= relink_command= func_source "$wrapper" # Check the variables that should have been set. test -z "$generated_by_libtool_version" && \ func_fatal_error "invalid libtool wrapper script \`$wrapper'" finalize=yes for lib in $notinst_deplibs; do # Check to see that each library is installed. libdir= if test -f "$lib"; then func_source "$lib" fi libfile="$libdir/"`$ECHO "$lib" | $SED 's%^.*/%%g'` ### testsuite: skip nested quoting test if test -n "$libdir" && test ! -f "$libfile"; then func_warning "\`$lib' has not been installed in \`$libdir'" finalize=no fi done relink_command= func_source "$wrapper" outputname= if test "$fast_install" = no && test -n "$relink_command"; then $opt_dry_run || { if test "$finalize" = yes; then tmpdir=`func_mktempdir` func_basename "$file$stripped_ext" file="$func_basename_result" outputname="$tmpdir/$file" # Replace the output file specification. relink_command=`$ECHO "$relink_command" | $SED 's%@OUTPUT@%'"$outputname"'%g'` $opt_silent || { func_quote_for_expand "$relink_command" eval "func_echo $func_quote_for_expand_result" } if eval "$relink_command"; then : else func_error "error: relink \`$file' with the above command before installing it" $opt_dry_run || ${RM}r "$tmpdir" continue fi file="$outputname" else func_warning "cannot relink \`$file'" fi } else # Install the binary that we compiled earlier. file=`$ECHO "$file$stripped_ext" | $SED "s%\([^/]*\)$%$objdir/\1%"` fi fi # remove .exe since cygwin /usr/bin/install will append another # one anyway case $install_prog,$host in */usr/bin/install*,*cygwin*) case $file:$destfile in *.exe:*.exe) # this is ok ;; *.exe:*) destfile=$destfile.exe ;; *:*.exe) func_stripname '' '.exe' "$destfile" destfile=$func_stripname_result ;; esac ;; esac func_show_eval "$install_prog\$stripme \$file \$destfile" 'exit $?' $opt_dry_run || if test -n "$outputname"; then ${RM}r "$tmpdir" fi ;; esac done for file in $staticlibs; do func_basename "$file" name="$func_basename_result" # Set up the ranlib parameters. oldlib="$destdir/$name" func_to_tool_file "$oldlib" func_convert_file_msys_to_w32 tool_oldlib=$func_to_tool_file_result func_show_eval "$install_prog \$file \$oldlib" 'exit $?' if test -n "$stripme" && test -n "$old_striplib"; then func_show_eval "$old_striplib $tool_oldlib" 'exit $?' fi # Do each command in the postinstall commands. func_execute_cmds "$old_postinstall_cmds" 'exit $?' done test -n "$future_libdirs" && \ func_warning "remember to run \`$progname --finish$future_libdirs'" if test -n "$current_libdirs"; then # Maybe just do a dry run. $opt_dry_run && current_libdirs=" -n$current_libdirs" exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs' else exit $EXIT_SUCCESS fi } test "$opt_mode" = install && func_mode_install ${1+"$@"} # func_generate_dlsyms outputname originator pic_p # Extract symbols from dlprefiles and create ${outputname}S.o with # a dlpreopen symbol table. func_generate_dlsyms () { $opt_debug my_outputname="$1" my_originator="$2" my_pic_p="${3-no}" my_prefix=`$ECHO "$my_originator" | sed 's%[^a-zA-Z0-9]%_%g'` my_dlsyms= if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then if test -n "$NM" && test -n "$global_symbol_pipe"; then my_dlsyms="${my_outputname}S.c" else func_error "not configured to extract global symbols from dlpreopened files" fi fi if test -n "$my_dlsyms"; then case $my_dlsyms in "") ;; *.c) # Discover the nlist of each of the dlfiles. nlist="$output_objdir/${my_outputname}.nm" func_show_eval "$RM $nlist ${nlist}S ${nlist}T" # Parse the name list into a source file. func_verbose "creating $output_objdir/$my_dlsyms" $opt_dry_run || $ECHO > "$output_objdir/$my_dlsyms" "\ /* $my_dlsyms - symbol resolution table for \`$my_outputname' dlsym emulation. */ /* Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION */ #ifdef __cplusplus extern \"C\" { #endif #if defined(__GNUC__) && (((__GNUC__ == 4) && (__GNUC_MINOR__ >= 4)) || (__GNUC__ > 4)) #pragma GCC diagnostic ignored \"-Wstrict-prototypes\" #endif /* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ #if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) /* DATA imports from DLLs on WIN32 con't be const, because runtime relocations are performed -- see ld's documentation on pseudo-relocs. */ # define LT_DLSYM_CONST #elif defined(__osf__) /* This system does not cope well with relocations in const data. */ # define LT_DLSYM_CONST #else # define LT_DLSYM_CONST const #endif /* External symbol declarations for the compiler. */\ " if test "$dlself" = yes; then func_verbose "generating symbol list for \`$output'" $opt_dry_run || echo ': @PROGRAM@ ' > "$nlist" # Add our own program objects to the symbol list. progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP` for progfile in $progfiles; do func_to_tool_file "$progfile" func_convert_file_msys_to_w32 func_verbose "extracting global C symbols from \`$func_to_tool_file_result'" $opt_dry_run || eval "$NM $func_to_tool_file_result | $global_symbol_pipe >> '$nlist'" done if test -n "$exclude_expsyms"; then $opt_dry_run || { eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T' eval '$MV "$nlist"T "$nlist"' } fi if test -n "$export_symbols_regex"; then $opt_dry_run || { eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T' eval '$MV "$nlist"T "$nlist"' } fi # Prepare the list of exported symbols if test -z "$export_symbols"; then export_symbols="$output_objdir/$outputname.exp" $opt_dry_run || { $RM $export_symbols eval "${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"' case $host in *cygwin* | *mingw* | *cegcc* ) eval "echo EXPORTS "'> "$output_objdir/$outputname.def"' eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"' ;; esac } else $opt_dry_run || { eval "${SED} -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"' eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T' eval '$MV "$nlist"T "$nlist"' case $host in *cygwin* | *mingw* | *cegcc* ) eval "echo EXPORTS "'> "$output_objdir/$outputname.def"' eval 'cat "$nlist" >> "$output_objdir/$outputname.def"' ;; esac } fi fi for dlprefile in $dlprefiles; do func_verbose "extracting global C symbols from \`$dlprefile'" func_basename "$dlprefile" name="$func_basename_result" case $host in *cygwin* | *mingw* | *cegcc* ) # if an import library, we need to obtain dlname if func_win32_import_lib_p "$dlprefile"; then func_tr_sh "$dlprefile" eval "curr_lafile=\$libfile_$func_tr_sh_result" dlprefile_dlbasename="" if test -n "$curr_lafile" && func_lalib_p "$curr_lafile"; then # Use subshell, to avoid clobbering current variable values dlprefile_dlname=`source "$curr_lafile" && echo "$dlname"` if test -n "$dlprefile_dlname" ; then func_basename "$dlprefile_dlname" dlprefile_dlbasename="$func_basename_result" else # no lafile. user explicitly requested -dlpreopen . $sharedlib_from_linklib_cmd "$dlprefile" dlprefile_dlbasename=$sharedlib_from_linklib_result fi fi $opt_dry_run || { if test -n "$dlprefile_dlbasename" ; then eval '$ECHO ": $dlprefile_dlbasename" >> "$nlist"' else func_warning "Could not compute DLL name from $name" eval '$ECHO ": $name " >> "$nlist"' fi func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32 eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe | $SED -e '/I __imp/d' -e 's/I __nm_/D /;s/_nm__//' >> '$nlist'" } else # not an import lib $opt_dry_run || { eval '$ECHO ": $name " >> "$nlist"' func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32 eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'" } fi ;; *) $opt_dry_run || { eval '$ECHO ": $name " >> "$nlist"' func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32 eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'" } ;; esac done $opt_dry_run || { # Make sure we have at least an empty file. test -f "$nlist" || : > "$nlist" if test -n "$exclude_expsyms"; then $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T $MV "$nlist"T "$nlist" fi # Try sorting and uniquifying the output. if $GREP -v "^: " < "$nlist" | if sort -k 3 /dev/null 2>&1; then sort -k 3 else sort +2 fi | uniq > "$nlist"S; then : else $GREP -v "^: " < "$nlist" > "$nlist"S fi if test -f "$nlist"S; then eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$my_dlsyms"' else echo '/* NONE */' >> "$output_objdir/$my_dlsyms" fi echo >> "$output_objdir/$my_dlsyms" "\ /* The mapping between symbol names and symbols. */ typedef struct { const char *name; void *address; } lt_dlsymlist; extern LT_DLSYM_CONST lt_dlsymlist lt_${my_prefix}_LTX_preloaded_symbols[]; LT_DLSYM_CONST lt_dlsymlist lt_${my_prefix}_LTX_preloaded_symbols[] = {\ { \"$my_originator\", (void *) 0 }," case $need_lib_prefix in no) eval "$global_symbol_to_c_name_address" < "$nlist" >> "$output_objdir/$my_dlsyms" ;; *) eval "$global_symbol_to_c_name_address_lib_prefix" < "$nlist" >> "$output_objdir/$my_dlsyms" ;; esac echo >> "$output_objdir/$my_dlsyms" "\ {0, (void *) 0} }; /* This works around a problem in FreeBSD linker */ #ifdef FREEBSD_WORKAROUND static const void *lt_preloaded_setup() { return lt_${my_prefix}_LTX_preloaded_symbols; } #endif #ifdef __cplusplus } #endif\ " } # !$opt_dry_run pic_flag_for_symtable= case "$compile_command " in *" -static "*) ;; *) case $host in # compiling the symbol table file with pic_flag works around # a FreeBSD bug that causes programs to crash when -lm is # linked before any other PIC object. But we must not use # pic_flag when linking with -static. The problem exists in # FreeBSD 2.2.6 and is fixed in FreeBSD 3.1. *-*-freebsd2.*|*-*-freebsd3.0*|*-*-freebsdelf3.0*) pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND" ;; *-*-hpux*) pic_flag_for_symtable=" $pic_flag" ;; *) if test "X$my_pic_p" != Xno; then pic_flag_for_symtable=" $pic_flag" fi ;; esac ;; esac symtab_cflags= for arg in $LTCFLAGS; do case $arg in -pie | -fpie | -fPIE) ;; *) func_append symtab_cflags " $arg" ;; esac done # Now compile the dynamic symbol file. func_show_eval '(cd $output_objdir && $LTCC$symtab_cflags -c$no_builtin_flag$pic_flag_for_symtable "$my_dlsyms")' 'exit $?' # Clean up the generated files. func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T"' # Transform the symbol file into the correct name. symfileobj="$output_objdir/${my_outputname}S.$objext" case $host in *cygwin* | *mingw* | *cegcc* ) if test -f "$output_objdir/$my_outputname.def"; then compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"` finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"` else compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"` finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"` fi ;; *) compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"` finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"` ;; esac ;; *) func_fatal_error "unknown suffix for \`$my_dlsyms'" ;; esac else # We keep going just in case the user didn't refer to # lt_preloaded_symbols. The linker will fail if global_symbol_pipe # really was required. # Nullify the symbol file. compile_command=`$ECHO "$compile_command" | $SED "s% @SYMFILE@%%"` finalize_command=`$ECHO "$finalize_command" | $SED "s% @SYMFILE@%%"` fi } # func_win32_libid arg # return the library type of file 'arg' # # Need a lot of goo to handle *both* DLLs and import libs # Has to be a shell function in order to 'eat' the argument # that is supplied when $file_magic_command is called. # Despite the name, also deal with 64 bit binaries. func_win32_libid () { $opt_debug win32_libid_type="unknown" win32_fileres=`file -L $1 2>/dev/null` case $win32_fileres in *ar\ archive\ import\ library*) # definitely import win32_libid_type="x86 archive import" ;; *ar\ archive*) # could be an import, or static # Keep the egrep pattern in sync with the one in _LT_CHECK_MAGIC_METHOD. if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null | $EGREP 'file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then func_to_tool_file "$1" func_convert_file_msys_to_w32 win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" | $SED -n -e ' 1,100{ / I /{ s,.*,import, p q } }'` case $win32_nmres in import*) win32_libid_type="x86 archive import";; *) win32_libid_type="x86 archive static";; esac fi ;; *DLL*) win32_libid_type="x86 DLL" ;; *executable*) # but shell scripts are "executable" too... case $win32_fileres in *MS\ Windows\ PE\ Intel*) win32_libid_type="x86 DLL" ;; esac ;; esac $ECHO "$win32_libid_type" } # func_cygming_dll_for_implib ARG # # Platform-specific function to extract the # name of the DLL associated with the specified # import library ARG. # Invoked by eval'ing the libtool variable # $sharedlib_from_linklib_cmd # Result is available in the variable # $sharedlib_from_linklib_result func_cygming_dll_for_implib () { $opt_debug sharedlib_from_linklib_result=`$DLLTOOL --identify-strict --identify "$1"` } # func_cygming_dll_for_implib_fallback_core SECTION_NAME LIBNAMEs # # The is the core of a fallback implementation of a # platform-specific function to extract the name of the # DLL associated with the specified import library LIBNAME. # # SECTION_NAME is either .idata$6 or .idata$7, depending # on the platform and compiler that created the implib. # # Echos the name of the DLL associated with the # specified import library. func_cygming_dll_for_implib_fallback_core () { $opt_debug match_literal=`$ECHO "$1" | $SED "$sed_make_literal_regex"` $OBJDUMP -s --section "$1" "$2" 2>/dev/null | $SED '/^Contents of section '"$match_literal"':/{ # Place marker at beginning of archive member dllname section s/.*/====MARK====/ p d } # These lines can sometimes be longer than 43 characters, but # are always uninteresting /:[ ]*file format pe[i]\{,1\}-/d /^In archive [^:]*:/d # Ensure marker is printed /^====MARK====/p # Remove all lines with less than 43 characters /^.\{43\}/!d # From remaining lines, remove first 43 characters s/^.\{43\}//' | $SED -n ' # Join marker and all lines until next marker into a single line /^====MARK====/ b para H $ b para b :para x s/\n//g # Remove the marker s/^====MARK====// # Remove trailing dots and whitespace s/[\. \t]*$// # Print /./p' | # we now have a list, one entry per line, of the stringified # contents of the appropriate section of all members of the # archive which possess that section. Heuristic: eliminate # all those which have a first or second character that is # a '.' (that is, objdump's representation of an unprintable # character.) This should work for all archives with less than # 0x302f exports -- but will fail for DLLs whose name actually # begins with a literal '.' or a single character followed by # a '.'. # # Of those that remain, print the first one. $SED -e '/^\./d;/^.\./d;q' } # func_cygming_gnu_implib_p ARG # This predicate returns with zero status (TRUE) if # ARG is a GNU/binutils-style import library. Returns # with nonzero status (FALSE) otherwise. func_cygming_gnu_implib_p () { $opt_debug func_to_tool_file "$1" func_convert_file_msys_to_w32 func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'` test -n "$func_cygming_gnu_implib_tmp" } # func_cygming_ms_implib_p ARG # This predicate returns with zero status (TRUE) if # ARG is an MS-style import library. Returns # with nonzero status (FALSE) otherwise. func_cygming_ms_implib_p () { $opt_debug func_to_tool_file "$1" func_convert_file_msys_to_w32 func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'` test -n "$func_cygming_ms_implib_tmp" } # func_cygming_dll_for_implib_fallback ARG # Platform-specific function to extract the # name of the DLL associated with the specified # import library ARG. # # This fallback implementation is for use when $DLLTOOL # does not support the --identify-strict option. # Invoked by eval'ing the libtool variable # $sharedlib_from_linklib_cmd # Result is available in the variable # $sharedlib_from_linklib_result func_cygming_dll_for_implib_fallback () { $opt_debug if func_cygming_gnu_implib_p "$1" ; then # binutils import library sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$7' "$1"` elif func_cygming_ms_implib_p "$1" ; then # ms-generated import library sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$6' "$1"` else # unknown sharedlib_from_linklib_result="" fi } # func_extract_an_archive dir oldlib func_extract_an_archive () { $opt_debug f_ex_an_ar_dir="$1"; shift f_ex_an_ar_oldlib="$1" if test "$lock_old_archive_extraction" = yes; then lockfile=$f_ex_an_ar_oldlib.lock until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do func_echo "Waiting for $lockfile to be removed" sleep 2 done fi func_show_eval "(cd \$f_ex_an_ar_dir && $AR x \"\$f_ex_an_ar_oldlib\")" \ 'stat=$?; rm -f "$lockfile"; exit $stat' if test "$lock_old_archive_extraction" = yes; then $opt_dry_run || rm -f "$lockfile" fi if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then : else func_fatal_error "object name conflicts in archive: $f_ex_an_ar_dir/$f_ex_an_ar_oldlib" fi } # func_extract_archives gentop oldlib ... func_extract_archives () { $opt_debug my_gentop="$1"; shift my_oldlibs=${1+"$@"} my_oldobjs="" my_xlib="" my_xabs="" my_xdir="" for my_xlib in $my_oldlibs; do # Extract the objects. case $my_xlib in [\\/]* | [A-Za-z]:[\\/]*) my_xabs="$my_xlib" ;; *) my_xabs=`pwd`"/$my_xlib" ;; esac func_basename "$my_xlib" my_xlib="$func_basename_result" my_xlib_u=$my_xlib while :; do case " $extracted_archives " in *" $my_xlib_u "*) func_arith $extracted_serial + 1 extracted_serial=$func_arith_result my_xlib_u=lt$extracted_serial-$my_xlib ;; *) break ;; esac done extracted_archives="$extracted_archives $my_xlib_u" my_xdir="$my_gentop/$my_xlib_u" func_mkdir_p "$my_xdir" case $host in *-darwin*) func_verbose "Extracting $my_xabs" # Do not bother doing anything if just a dry run $opt_dry_run || { darwin_orig_dir=`pwd` cd $my_xdir || exit $? darwin_archive=$my_xabs darwin_curdir=`pwd` darwin_base_archive=`basename "$darwin_archive"` darwin_arches=`$LIPO -info "$darwin_archive" 2>/dev/null | $GREP Architectures 2>/dev/null || true` if test -n "$darwin_arches"; then darwin_arches=`$ECHO "$darwin_arches" | $SED -e 's/.*are://'` darwin_arch= func_verbose "$darwin_base_archive has multiple architectures $darwin_arches" for darwin_arch in $darwin_arches ; do func_mkdir_p "unfat-$$/${darwin_base_archive}-${darwin_arch}" $LIPO -thin $darwin_arch -output "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" "${darwin_archive}" cd "unfat-$$/${darwin_base_archive}-${darwin_arch}" func_extract_an_archive "`pwd`" "${darwin_base_archive}" cd "$darwin_curdir" $RM "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" done # $darwin_arches ## Okay now we've a bunch of thin objects, gotta fatten them up :) darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$basename" | sort -u` darwin_file= darwin_files= for darwin_file in $darwin_filelist; do darwin_files=`find unfat-$$ -name $darwin_file -print | sort | $NL2SP` $LIPO -create -output "$darwin_file" $darwin_files done # $darwin_filelist $RM -rf unfat-$$ cd "$darwin_orig_dir" else cd $darwin_orig_dir func_extract_an_archive "$my_xdir" "$my_xabs" fi # $darwin_arches } # !$opt_dry_run ;; *) func_extract_an_archive "$my_xdir" "$my_xabs" ;; esac my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | sort | $NL2SP` done func_extract_archives_result="$my_oldobjs" } # func_emit_wrapper [arg=no] # # Emit a libtool wrapper script on stdout. # Don't directly open a file because we may want to # incorporate the script contents within a cygwin/mingw # wrapper executable. Must ONLY be called from within # func_mode_link because it depends on a number of variables # set therein. # # ARG is the value that the WRAPPER_SCRIPT_BELONGS_IN_OBJDIR # variable will take. If 'yes', then the emitted script # will assume that the directory in which it is stored is # the $objdir directory. This is a cygwin/mingw-specific # behavior. func_emit_wrapper () { func_emit_wrapper_arg1=${1-no} $ECHO "\ #! $SHELL # $output - temporary wrapper script for $objdir/$outputname # Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION # # The $output program cannot be directly executed until all the libtool # libraries that it depends on are installed. # # This wrapper script should never be moved out of the build directory. # If it is, it will not operate correctly. # Sed substitution that helps us do robust quoting. It backslashifies # metacharacters that are still active within double-quoted strings. sed_quote_subst='$sed_quote_subst' # Be Bourne compatible if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then emulate sh NULLCMD=: # Zsh 3.x and 4.x performs word splitting on \${1+\"\$@\"}, which # is contrary to our usage. Disable this feature. alias -g '\${1+\"\$@\"}'='\"\$@\"' setopt NO_GLOB_SUBST else case \`(set -o) 2>/dev/null\` in *posix*) set -o posix;; esac fi BIN_SH=xpg4; export BIN_SH # for Tru64 DUALCASE=1; export DUALCASE # for MKS sh # The HP-UX ksh and POSIX shell print the target directory to stdout # if CDPATH is set. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH relink_command=\"$relink_command\" # This environment variable determines our operation mode. if test \"\$libtool_install_magic\" = \"$magic\"; then # install mode needs the following variables: generated_by_libtool_version='$macro_version' notinst_deplibs='$notinst_deplibs' else # When we are sourced in execute mode, \$file and \$ECHO are already set. if test \"\$libtool_execute_magic\" != \"$magic\"; then file=\"\$0\"" qECHO=`$ECHO "$ECHO" | $SED "$sed_quote_subst"` $ECHO "\ # A function that is used when there is no print builtin or printf. func_fallback_echo () { eval 'cat <<_LTECHO_EOF \$1 _LTECHO_EOF' } ECHO=\"$qECHO\" fi # Very basic option parsing. These options are (a) specific to # the libtool wrapper, (b) are identical between the wrapper # /script/ and the wrapper /executable/ which is used only on # windows platforms, and (c) all begin with the string "--lt-" # (application programs are unlikely to have options which match # this pattern). # # There are only two supported options: --lt-debug and # --lt-dump-script. There is, deliberately, no --lt-help. # # The first argument to this parsing function should be the # script's $0 value, followed by "$@". lt_option_debug= func_parse_lt_options () { lt_script_arg0=\$0 shift for lt_opt do case \"\$lt_opt\" in --lt-debug) lt_option_debug=1 ;; --lt-dump-script) lt_dump_D=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%/[^/]*$%%'\` test \"X\$lt_dump_D\" = \"X\$lt_script_arg0\" && lt_dump_D=. lt_dump_F=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%^.*/%%'\` cat \"\$lt_dump_D/\$lt_dump_F\" exit 0 ;; --lt-*) \$ECHO \"Unrecognized --lt- option: '\$lt_opt'\" 1>&2 exit 1 ;; esac done # Print the debug banner immediately: if test -n \"\$lt_option_debug\"; then echo \"${outputname}:${output}:\${LINENO}: libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\" 1>&2 fi } # Used when --lt-debug. Prints its arguments to stdout # (redirection is the responsibility of the caller) func_lt_dump_args () { lt_dump_args_N=1; for lt_arg do \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[\$lt_dump_args_N]: \$lt_arg\" lt_dump_args_N=\`expr \$lt_dump_args_N + 1\` done } # Core function for launching the target application func_exec_program_core () { " case $host in # Backslashes separate directories on plain windows *-*-mingw | *-*-os2* | *-cegcc*) $ECHO "\ if test -n \"\$lt_option_debug\"; then \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir\\\\\$program\" 1>&2 func_lt_dump_args \${1+\"\$@\"} 1>&2 fi exec \"\$progdir\\\\\$program\" \${1+\"\$@\"} " ;; *) $ECHO "\ if test -n \"\$lt_option_debug\"; then \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir/\$program\" 1>&2 func_lt_dump_args \${1+\"\$@\"} 1>&2 fi exec \"\$progdir/\$program\" \${1+\"\$@\"} " ;; esac $ECHO "\ \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2 exit 1 } # A function to encapsulate launching the target application # Strips options in the --lt-* namespace from \$@ and # launches target application with the remaining arguments. func_exec_program () { case \" \$* \" in *\\ --lt-*) for lt_wr_arg do case \$lt_wr_arg in --lt-*) ;; *) set x \"\$@\" \"\$lt_wr_arg\"; shift;; esac shift done ;; esac func_exec_program_core \${1+\"\$@\"} } # Parse options func_parse_lt_options \"\$0\" \${1+\"\$@\"} # Find the directory that this script lives in. thisdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*$%%'\` test \"x\$thisdir\" = \"x\$file\" && thisdir=. # Follow symbolic links until we get to the real thisdir. file=\`ls -ld \"\$file\" | $SED -n 's/.*-> //p'\` while test -n \"\$file\"; do destdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*\$%%'\` # If there was a directory component, then change thisdir. if test \"x\$destdir\" != \"x\$file\"; then case \"\$destdir\" in [\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;; *) thisdir=\"\$thisdir/\$destdir\" ;; esac fi file=\`\$ECHO \"\$file\" | $SED 's%^.*/%%'\` file=\`ls -ld \"\$thisdir/\$file\" | $SED -n 's/.*-> //p'\` done # Usually 'no', except on cygwin/mingw when embedded into # the cwrapper. WRAPPER_SCRIPT_BELONGS_IN_OBJDIR=$func_emit_wrapper_arg1 if test \"\$WRAPPER_SCRIPT_BELONGS_IN_OBJDIR\" = \"yes\"; then # special case for '.' if test \"\$thisdir\" = \".\"; then thisdir=\`pwd\` fi # remove .libs from thisdir case \"\$thisdir\" in *[\\\\/]$objdir ) thisdir=\`\$ECHO \"\$thisdir\" | $SED 's%[\\\\/][^\\\\/]*$%%'\` ;; $objdir ) thisdir=. ;; esac fi # Try to get the absolute directory name. absdir=\`cd \"\$thisdir\" && pwd\` test -n \"\$absdir\" && thisdir=\"\$absdir\" " if test "$fast_install" = yes; then $ECHO "\ program=lt-'$outputname'$exeext progdir=\"\$thisdir/$objdir\" if test ! -f \"\$progdir/\$program\" || { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | ${SED} 1q\`; \\ test \"X\$file\" != \"X\$progdir/\$program\"; }; then file=\"\$\$-\$program\" if test ! -d \"\$progdir\"; then $MKDIR \"\$progdir\" else $RM \"\$progdir/\$file\" fi" $ECHO "\ # relink executable if necessary if test -n \"\$relink_command\"; then if relink_command_output=\`eval \$relink_command 2>&1\`; then : else $ECHO \"\$relink_command_output\" >&2 $RM \"\$progdir/\$file\" exit 1 fi fi $MV \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null || { $RM \"\$progdir/\$program\"; $MV \"\$progdir/\$file\" \"\$progdir/\$program\"; } $RM \"\$progdir/\$file\" fi" else $ECHO "\ program='$outputname' progdir=\"\$thisdir/$objdir\" " fi $ECHO "\ if test -f \"\$progdir/\$program\"; then" # fixup the dll searchpath if we need to. # # Fix the DLL searchpath if we need to. Do this before prepending # to shlibpath, because on Windows, both are PATH and uninstalled # libraries must come first. if test -n "$dllsearchpath"; then $ECHO "\ # Add the dll search path components to the executable PATH PATH=$dllsearchpath:\$PATH " fi # Export our shlibpath_var if we have one. if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then $ECHO "\ # Add our own library path to $shlibpath_var $shlibpath_var=\"$temp_rpath\$$shlibpath_var\" # Some systems cannot cope with colon-terminated $shlibpath_var # The second colon is a workaround for a bug in BeOS R4 sed $shlibpath_var=\`\$ECHO \"\$$shlibpath_var\" | $SED 's/::*\$//'\` export $shlibpath_var " fi $ECHO "\ if test \"\$libtool_execute_magic\" != \"$magic\"; then # Run the actual program with our arguments. func_exec_program \${1+\"\$@\"} fi else # The program doesn't exist. \$ECHO \"\$0: error: \\\`\$progdir/\$program' does not exist\" 1>&2 \$ECHO \"This script is just a wrapper for \$program.\" 1>&2 \$ECHO \"See the $PACKAGE documentation for more information.\" 1>&2 exit 1 fi fi\ " } # func_emit_cwrapperexe_src # emit the source code for a wrapper executable on stdout # Must ONLY be called from within func_mode_link because # it depends on a number of variable set therein. func_emit_cwrapperexe_src () { cat < #include #ifdef _MSC_VER # include # include # include #else # include # include # ifdef __CYGWIN__ # include # endif #endif #include #include #include #include #include #include #include #include /* declarations of non-ANSI functions */ #if defined(__MINGW32__) # ifdef __STRICT_ANSI__ int _putenv (const char *); # endif #elif defined(__CYGWIN__) # ifdef __STRICT_ANSI__ char *realpath (const char *, char *); int putenv (char *); int setenv (const char *, const char *, int); # endif /* #elif defined (other platforms) ... */ #endif /* portability defines, excluding path handling macros */ #if defined(_MSC_VER) # define setmode _setmode # define stat _stat # define chmod _chmod # define getcwd _getcwd # define putenv _putenv # define S_IXUSR _S_IEXEC # ifndef _INTPTR_T_DEFINED # define _INTPTR_T_DEFINED # define intptr_t int # endif #elif defined(__MINGW32__) # define setmode _setmode # define stat _stat # define chmod _chmod # define getcwd _getcwd # define putenv _putenv #elif defined(__CYGWIN__) # define HAVE_SETENV # define FOPEN_WB "wb" /* #elif defined (other platforms) ... */ #endif #if defined(PATH_MAX) # define LT_PATHMAX PATH_MAX #elif defined(MAXPATHLEN) # define LT_PATHMAX MAXPATHLEN #else # define LT_PATHMAX 1024 #endif #ifndef S_IXOTH # define S_IXOTH 0 #endif #ifndef S_IXGRP # define S_IXGRP 0 #endif /* path handling portability macros */ #ifndef DIR_SEPARATOR # define DIR_SEPARATOR '/' # define PATH_SEPARATOR ':' #endif #if defined (_WIN32) || defined (__MSDOS__) || defined (__DJGPP__) || \ defined (__OS2__) # define HAVE_DOS_BASED_FILE_SYSTEM # define FOPEN_WB "wb" # ifndef DIR_SEPARATOR_2 # define DIR_SEPARATOR_2 '\\' # endif # ifndef PATH_SEPARATOR_2 # define PATH_SEPARATOR_2 ';' # endif #endif #ifndef DIR_SEPARATOR_2 # define IS_DIR_SEPARATOR(ch) ((ch) == DIR_SEPARATOR) #else /* DIR_SEPARATOR_2 */ # define IS_DIR_SEPARATOR(ch) \ (((ch) == DIR_SEPARATOR) || ((ch) == DIR_SEPARATOR_2)) #endif /* DIR_SEPARATOR_2 */ #ifndef PATH_SEPARATOR_2 # define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR) #else /* PATH_SEPARATOR_2 */ # define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR_2) #endif /* PATH_SEPARATOR_2 */ #ifndef FOPEN_WB # define FOPEN_WB "w" #endif #ifndef _O_BINARY # define _O_BINARY 0 #endif #define XMALLOC(type, num) ((type *) xmalloc ((num) * sizeof(type))) #define XFREE(stale) do { \ if (stale) { free ((void *) stale); stale = 0; } \ } while (0) #if defined(LT_DEBUGWRAPPER) static int lt_debug = 1; #else static int lt_debug = 0; #endif const char *program_name = "libtool-wrapper"; /* in case xstrdup fails */ void *xmalloc (size_t num); char *xstrdup (const char *string); const char *base_name (const char *name); char *find_executable (const char *wrapper); char *chase_symlinks (const char *pathspec); int make_executable (const char *path); int check_executable (const char *path); char *strendzap (char *str, const char *pat); void lt_debugprintf (const char *file, int line, const char *fmt, ...); void lt_fatal (const char *file, int line, const char *message, ...); static const char *nonnull (const char *s); static const char *nonempty (const char *s); void lt_setenv (const char *name, const char *value); char *lt_extend_str (const char *orig_value, const char *add, int to_end); void lt_update_exe_path (const char *name, const char *value); void lt_update_lib_path (const char *name, const char *value); char **prepare_spawn (char **argv); void lt_dump_script (FILE *f); EOF cat <= 0) && (st.st_mode & (S_IXUSR | S_IXGRP | S_IXOTH))) return 1; else return 0; } int make_executable (const char *path) { int rval = 0; struct stat st; lt_debugprintf (__FILE__, __LINE__, "(make_executable): %s\n", nonempty (path)); if ((!path) || (!*path)) return 0; if (stat (path, &st) >= 0) { rval = chmod (path, st.st_mode | S_IXOTH | S_IXGRP | S_IXUSR); } return rval; } /* Searches for the full path of the wrapper. Returns newly allocated full path name if found, NULL otherwise Does not chase symlinks, even on platforms that support them. */ char * find_executable (const char *wrapper) { int has_slash = 0; const char *p; const char *p_next; /* static buffer for getcwd */ char tmp[LT_PATHMAX + 1]; int tmp_len; char *concat_name; lt_debugprintf (__FILE__, __LINE__, "(find_executable): %s\n", nonempty (wrapper)); if ((wrapper == NULL) || (*wrapper == '\0')) return NULL; /* Absolute path? */ #if defined (HAVE_DOS_BASED_FILE_SYSTEM) if (isalpha ((unsigned char) wrapper[0]) && wrapper[1] == ':') { concat_name = xstrdup (wrapper); if (check_executable (concat_name)) return concat_name; XFREE (concat_name); } else { #endif if (IS_DIR_SEPARATOR (wrapper[0])) { concat_name = xstrdup (wrapper); if (check_executable (concat_name)) return concat_name; XFREE (concat_name); } #if defined (HAVE_DOS_BASED_FILE_SYSTEM) } #endif for (p = wrapper; *p; p++) if (*p == '/') { has_slash = 1; break; } if (!has_slash) { /* no slashes; search PATH */ const char *path = getenv ("PATH"); if (path != NULL) { for (p = path; *p; p = p_next) { const char *q; size_t p_len; for (q = p; *q; q++) if (IS_PATH_SEPARATOR (*q)) break; p_len = q - p; p_next = (*q == '\0' ? q : q + 1); if (p_len == 0) { /* empty path: current directory */ if (getcwd (tmp, LT_PATHMAX) == NULL) lt_fatal (__FILE__, __LINE__, "getcwd failed: %s", nonnull (strerror (errno))); tmp_len = strlen (tmp); concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1); memcpy (concat_name, tmp, tmp_len); concat_name[tmp_len] = '/'; strcpy (concat_name + tmp_len + 1, wrapper); } else { concat_name = XMALLOC (char, p_len + 1 + strlen (wrapper) + 1); memcpy (concat_name, p, p_len); concat_name[p_len] = '/'; strcpy (concat_name + p_len + 1, wrapper); } if (check_executable (concat_name)) return concat_name; XFREE (concat_name); } } /* not found in PATH; assume curdir */ } /* Relative path | not found in path: prepend cwd */ if (getcwd (tmp, LT_PATHMAX) == NULL) lt_fatal (__FILE__, __LINE__, "getcwd failed: %s", nonnull (strerror (errno))); tmp_len = strlen (tmp); concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1); memcpy (concat_name, tmp, tmp_len); concat_name[tmp_len] = '/'; strcpy (concat_name + tmp_len + 1, wrapper); if (check_executable (concat_name)) return concat_name; XFREE (concat_name); return NULL; } char * chase_symlinks (const char *pathspec) { #ifndef S_ISLNK return xstrdup (pathspec); #else char buf[LT_PATHMAX]; struct stat s; char *tmp_pathspec = xstrdup (pathspec); char *p; int has_symlinks = 0; while (strlen (tmp_pathspec) && !has_symlinks) { lt_debugprintf (__FILE__, __LINE__, "checking path component for symlinks: %s\n", tmp_pathspec); if (lstat (tmp_pathspec, &s) == 0) { if (S_ISLNK (s.st_mode) != 0) { has_symlinks = 1; break; } /* search backwards for last DIR_SEPARATOR */ p = tmp_pathspec + strlen (tmp_pathspec) - 1; while ((p > tmp_pathspec) && (!IS_DIR_SEPARATOR (*p))) p--; if ((p == tmp_pathspec) && (!IS_DIR_SEPARATOR (*p))) { /* no more DIR_SEPARATORS left */ break; } *p = '\0'; } else { lt_fatal (__FILE__, __LINE__, "error accessing file \"%s\": %s", tmp_pathspec, nonnull (strerror (errno))); } } XFREE (tmp_pathspec); if (!has_symlinks) { return xstrdup (pathspec); } tmp_pathspec = realpath (pathspec, buf); if (tmp_pathspec == 0) { lt_fatal (__FILE__, __LINE__, "could not follow symlinks for %s", pathspec); } return xstrdup (tmp_pathspec); #endif } char * strendzap (char *str, const char *pat) { size_t len, patlen; assert (str != NULL); assert (pat != NULL); len = strlen (str); patlen = strlen (pat); if (patlen <= len) { str += len - patlen; if (strcmp (str, pat) == 0) *str = '\0'; } return str; } void lt_debugprintf (const char *file, int line, const char *fmt, ...) { va_list args; if (lt_debug) { (void) fprintf (stderr, "%s:%s:%d: ", program_name, file, line); va_start (args, fmt); (void) vfprintf (stderr, fmt, args); va_end (args); } } static void lt_error_core (int exit_status, const char *file, int line, const char *mode, const char *message, va_list ap) { fprintf (stderr, "%s:%s:%d: %s: ", program_name, file, line, mode); vfprintf (stderr, message, ap); fprintf (stderr, ".\n"); if (exit_status >= 0) exit (exit_status); } void lt_fatal (const char *file, int line, const char *message, ...) { va_list ap; va_start (ap, message); lt_error_core (EXIT_FAILURE, file, line, "FATAL", message, ap); va_end (ap); } static const char * nonnull (const char *s) { return s ? s : "(null)"; } static const char * nonempty (const char *s) { return (s && !*s) ? "(empty)" : nonnull (s); } void lt_setenv (const char *name, const char *value) { lt_debugprintf (__FILE__, __LINE__, "(lt_setenv) setting '%s' to '%s'\n", nonnull (name), nonnull (value)); { #ifdef HAVE_SETENV /* always make a copy, for consistency with !HAVE_SETENV */ char *str = xstrdup (value); setenv (name, str, 1); #else int len = strlen (name) + 1 + strlen (value) + 1; char *str = XMALLOC (char, len); sprintf (str, "%s=%s", name, value); if (putenv (str) != EXIT_SUCCESS) { XFREE (str); } #endif } } char * lt_extend_str (const char *orig_value, const char *add, int to_end) { char *new_value; if (orig_value && *orig_value) { int orig_value_len = strlen (orig_value); int add_len = strlen (add); new_value = XMALLOC (char, add_len + orig_value_len + 1); if (to_end) { strcpy (new_value, orig_value); strcpy (new_value + orig_value_len, add); } else { strcpy (new_value, add); strcpy (new_value + add_len, orig_value); } } else { new_value = xstrdup (add); } return new_value; } void lt_update_exe_path (const char *name, const char *value) { lt_debugprintf (__FILE__, __LINE__, "(lt_update_exe_path) modifying '%s' by prepending '%s'\n", nonnull (name), nonnull (value)); if (name && *name && value && *value) { char *new_value = lt_extend_str (getenv (name), value, 0); /* some systems can't cope with a ':'-terminated path #' */ int len = strlen (new_value); while (((len = strlen (new_value)) > 0) && IS_PATH_SEPARATOR (new_value[len-1])) { new_value[len-1] = '\0'; } lt_setenv (name, new_value); XFREE (new_value); } } void lt_update_lib_path (const char *name, const char *value) { lt_debugprintf (__FILE__, __LINE__, "(lt_update_lib_path) modifying '%s' by prepending '%s'\n", nonnull (name), nonnull (value)); if (name && *name && value && *value) { char *new_value = lt_extend_str (getenv (name), value, 0); lt_setenv (name, new_value); XFREE (new_value); } } EOF case $host_os in mingw*) cat <<"EOF" /* Prepares an argument vector before calling spawn(). Note that spawn() does not by itself call the command interpreter (getenv ("COMSPEC") != NULL ? getenv ("COMSPEC") : ({ OSVERSIONINFO v; v.dwOSVersionInfoSize = sizeof(OSVERSIONINFO); GetVersionEx(&v); v.dwPlatformId == VER_PLATFORM_WIN32_NT; }) ? "cmd.exe" : "command.com"). Instead it simply concatenates the arguments, separated by ' ', and calls CreateProcess(). We must quote the arguments since Win32 CreateProcess() interprets characters like ' ', '\t', '\\', '"' (but not '<' and '>') in a special way: - Space and tab are interpreted as delimiters. They are not treated as delimiters if they are surrounded by double quotes: "...". - Unescaped double quotes are removed from the input. Their only effect is that within double quotes, space and tab are treated like normal characters. - Backslashes not followed by double quotes are not special. - But 2*n+1 backslashes followed by a double quote become n backslashes followed by a double quote (n >= 0): \" -> " \\\" -> \" \\\\\" -> \\" */ #define SHELL_SPECIAL_CHARS "\"\\ \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037" #define SHELL_SPACE_CHARS " \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037" char ** prepare_spawn (char **argv) { size_t argc; char **new_argv; size_t i; /* Count number of arguments. */ for (argc = 0; argv[argc] != NULL; argc++) ; /* Allocate new argument vector. */ new_argv = XMALLOC (char *, argc + 1); /* Put quoted arguments into the new argument vector. */ for (i = 0; i < argc; i++) { const char *string = argv[i]; if (string[0] == '\0') new_argv[i] = xstrdup ("\"\""); else if (strpbrk (string, SHELL_SPECIAL_CHARS) != NULL) { int quote_around = (strpbrk (string, SHELL_SPACE_CHARS) != NULL); size_t length; unsigned int backslashes; const char *s; char *quoted_string; char *p; length = 0; backslashes = 0; if (quote_around) length++; for (s = string; *s != '\0'; s++) { char c = *s; if (c == '"') length += backslashes + 1; length++; if (c == '\\') backslashes++; else backslashes = 0; } if (quote_around) length += backslashes + 1; quoted_string = XMALLOC (char, length + 1); p = quoted_string; backslashes = 0; if (quote_around) *p++ = '"'; for (s = string; *s != '\0'; s++) { char c = *s; if (c == '"') { unsigned int j; for (j = backslashes + 1; j > 0; j--) *p++ = '\\'; } *p++ = c; if (c == '\\') backslashes++; else backslashes = 0; } if (quote_around) { unsigned int j; for (j = backslashes; j > 0; j--) *p++ = '\\'; *p++ = '"'; } *p = '\0'; new_argv[i] = quoted_string; } else new_argv[i] = (char *) string; } new_argv[argc] = NULL; return new_argv; } EOF ;; esac cat <<"EOF" void lt_dump_script (FILE* f) { EOF func_emit_wrapper yes | $SED -n -e ' s/^\(.\{79\}\)\(..*\)/\1\ \2/ h s/\([\\"]\)/\\\1/g s/$/\\n/ s/\([^\n]*\).*/ fputs ("\1", f);/p g D' cat <<"EOF" } EOF } # end: func_emit_cwrapperexe_src # func_win32_import_lib_p ARG # True if ARG is an import lib, as indicated by $file_magic_cmd func_win32_import_lib_p () { $opt_debug case `eval $file_magic_cmd \"\$1\" 2>/dev/null | $SED -e 10q` in *import*) : ;; *) false ;; esac } # func_mode_link arg... func_mode_link () { $opt_debug case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*) # It is impossible to link a dll without this setting, and # we shouldn't force the makefile maintainer to figure out # which system we are compiling for in order to pass an extra # flag for every libtool invocation. # allow_undefined=no # FIXME: Unfortunately, there are problems with the above when trying # to make a dll which has undefined symbols, in which case not # even a static library is built. For now, we need to specify # -no-undefined on the libtool link line when we can be certain # that all symbols are satisfied, otherwise we get a static library. allow_undefined=yes ;; *) allow_undefined=yes ;; esac libtool_args=$nonopt base_compile="$nonopt $@" compile_command=$nonopt finalize_command=$nonopt compile_rpath= finalize_rpath= compile_shlibpath= finalize_shlibpath= convenience= old_convenience= deplibs= old_deplibs= compiler_flags= linker_flags= dllsearchpath= lib_search_path=`pwd` inst_prefix_dir= new_inherited_linker_flags= avoid_version=no bindir= dlfiles= dlprefiles= dlself=no export_dynamic=no export_symbols= export_symbols_regex= generated= libobjs= ltlibs= module=no no_install=no objs= non_pic_objects= precious_files_regex= prefer_static_libs=no preload=no prev= prevarg= release= rpath= xrpath= perm_rpath= temp_rpath= thread_safe=no vinfo= vinfo_number=no weak_libs= single_module="${wl}-single_module" func_infer_tag $base_compile # We need to know -static, to get the right output filenames. for arg do case $arg in -shared) test "$build_libtool_libs" != yes && \ func_fatal_configuration "can not build a shared library" build_old_libs=no break ;; -all-static | -static | -static-libtool-libs) case $arg in -all-static) if test "$build_libtool_libs" = yes && test -z "$link_static_flag"; then func_warning "complete static linking is impossible in this configuration" fi if test -n "$link_static_flag"; then dlopen_self=$dlopen_self_static fi prefer_static_libs=yes ;; -static) if test -z "$pic_flag" && test -n "$link_static_flag"; then dlopen_self=$dlopen_self_static fi prefer_static_libs=built ;; -static-libtool-libs) if test -z "$pic_flag" && test -n "$link_static_flag"; then dlopen_self=$dlopen_self_static fi prefer_static_libs=yes ;; esac build_libtool_libs=no build_old_libs=yes break ;; esac done # See if our shared archives depend on static archives. test -n "$old_archive_from_new_cmds" && build_old_libs=yes # Go through the arguments, transforming them on the way. while test "$#" -gt 0; do arg="$1" shift func_quote_for_eval "$arg" qarg=$func_quote_for_eval_unquoted_result func_append libtool_args " $func_quote_for_eval_result" # If the previous option needs an argument, assign it. if test -n "$prev"; then case $prev in output) func_append compile_command " @OUTPUT@" func_append finalize_command " @OUTPUT@" ;; esac case $prev in bindir) bindir="$arg" prev= continue ;; dlfiles|dlprefiles) if test "$preload" = no; then # Add the symbol object into the linking commands. func_append compile_command " @SYMFILE@" func_append finalize_command " @SYMFILE@" preload=yes fi case $arg in *.la | *.lo) ;; # We handle these cases below. force) if test "$dlself" = no; then dlself=needless export_dynamic=yes fi prev= continue ;; self) if test "$prev" = dlprefiles; then dlself=yes elif test "$prev" = dlfiles && test "$dlopen_self" != yes; then dlself=yes else dlself=needless export_dynamic=yes fi prev= continue ;; *) if test "$prev" = dlfiles; then func_append dlfiles " $arg" else func_append dlprefiles " $arg" fi prev= continue ;; esac ;; expsyms) export_symbols="$arg" test -f "$arg" \ || func_fatal_error "symbol file \`$arg' does not exist" prev= continue ;; expsyms_regex) export_symbols_regex="$arg" prev= continue ;; framework) case $host in *-*-darwin*) case "$deplibs " in *" $qarg.ltframework "*) ;; *) func_append deplibs " $qarg.ltframework" # this is fixed later ;; esac ;; esac prev= continue ;; inst_prefix) inst_prefix_dir="$arg" prev= continue ;; objectlist) if test -f "$arg"; then save_arg=$arg moreargs= for fil in `cat "$save_arg"` do # func_append moreargs " $fil" arg=$fil # A libtool-controlled object. # Check to see that this really is a libtool object. if func_lalib_unsafe_p "$arg"; then pic_object= non_pic_object= # Read the .lo file func_source "$arg" if test -z "$pic_object" || test -z "$non_pic_object" || test "$pic_object" = none && test "$non_pic_object" = none; then func_fatal_error "cannot find name of object for \`$arg'" fi # Extract subdirectory from the argument. func_dirname "$arg" "/" "" xdir="$func_dirname_result" if test "$pic_object" != none; then # Prepend the subdirectory the object is found in. pic_object="$xdir$pic_object" if test "$prev" = dlfiles; then if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then func_append dlfiles " $pic_object" prev= continue else # If libtool objects are unsupported, then we need to preload. prev=dlprefiles fi fi # CHECK ME: I think I busted this. -Ossama if test "$prev" = dlprefiles; then # Preload the old-style object. func_append dlprefiles " $pic_object" prev= fi # A PIC object. func_append libobjs " $pic_object" arg="$pic_object" fi # Non-PIC object. if test "$non_pic_object" != none; then # Prepend the subdirectory the object is found in. non_pic_object="$xdir$non_pic_object" # A standard non-PIC object func_append non_pic_objects " $non_pic_object" if test -z "$pic_object" || test "$pic_object" = none ; then arg="$non_pic_object" fi else # If the PIC object exists, use it instead. # $xdir was prepended to $pic_object above. non_pic_object="$pic_object" func_append non_pic_objects " $non_pic_object" fi else # Only an error if not doing a dry-run. if $opt_dry_run; then # Extract subdirectory from the argument. func_dirname "$arg" "/" "" xdir="$func_dirname_result" func_lo2o "$arg" pic_object=$xdir$objdir/$func_lo2o_result non_pic_object=$xdir$func_lo2o_result func_append libobjs " $pic_object" func_append non_pic_objects " $non_pic_object" else func_fatal_error "\`$arg' is not a valid libtool object" fi fi done else func_fatal_error "link input file \`$arg' does not exist" fi arg=$save_arg prev= continue ;; precious_regex) precious_files_regex="$arg" prev= continue ;; release) release="-$arg" prev= continue ;; rpath | xrpath) # We need an absolute path. case $arg in [\\/]* | [A-Za-z]:[\\/]*) ;; *) func_fatal_error "only absolute run-paths are allowed" ;; esac if test "$prev" = rpath; then case "$rpath " in *" $arg "*) ;; *) func_append rpath " $arg" ;; esac else case "$xrpath " in *" $arg "*) ;; *) func_append xrpath " $arg" ;; esac fi prev= continue ;; shrext) shrext_cmds="$arg" prev= continue ;; weak) func_append weak_libs " $arg" prev= continue ;; xcclinker) func_append linker_flags " $qarg" func_append compiler_flags " $qarg" prev= func_append compile_command " $qarg" func_append finalize_command " $qarg" continue ;; xcompiler) func_append compiler_flags " $qarg" prev= func_append compile_command " $qarg" func_append finalize_command " $qarg" continue ;; xlinker) func_append linker_flags " $qarg" func_append compiler_flags " $wl$qarg" prev= func_append compile_command " $wl$qarg" func_append finalize_command " $wl$qarg" continue ;; *) eval "$prev=\"\$arg\"" prev= continue ;; esac fi # test -n "$prev" prevarg="$arg" case $arg in -all-static) if test -n "$link_static_flag"; then # See comment for -static flag below, for more details. func_append compile_command " $link_static_flag" func_append finalize_command " $link_static_flag" fi continue ;; -allow-undefined) # FIXME: remove this flag sometime in the future. func_fatal_error "\`-allow-undefined' must not be used because it is the default" ;; -avoid-version) avoid_version=yes continue ;; -bindir) prev=bindir continue ;; -dlopen) prev=dlfiles continue ;; -dlpreopen) prev=dlprefiles continue ;; -export-dynamic) export_dynamic=yes continue ;; -export-symbols | -export-symbols-regex) if test -n "$export_symbols" || test -n "$export_symbols_regex"; then func_fatal_error "more than one -exported-symbols argument is not allowed" fi if test "X$arg" = "X-export-symbols"; then prev=expsyms else prev=expsyms_regex fi continue ;; -framework) prev=framework continue ;; -inst-prefix-dir) prev=inst_prefix continue ;; # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:* # so, if we see these flags be careful not to treat them like -L -L[A-Z][A-Z]*:*) case $with_gcc/$host in no/*-*-irix* | /*-*-irix*) func_append compile_command " $arg" func_append finalize_command " $arg" ;; esac continue ;; -L*) func_stripname "-L" '' "$arg" if test -z "$func_stripname_result"; then if test "$#" -gt 0; then func_fatal_error "require no space between \`-L' and \`$1'" else func_fatal_error "need path for \`-L' option" fi fi func_resolve_sysroot "$func_stripname_result" dir=$func_resolve_sysroot_result # We need an absolute path. case $dir in [\\/]* | [A-Za-z]:[\\/]*) ;; *) absdir=`cd "$dir" && pwd` test -z "$absdir" && \ func_fatal_error "cannot determine absolute directory name of \`$dir'" dir="$absdir" ;; esac case "$deplibs " in *" -L$dir "* | *" $arg "*) # Will only happen for absolute or sysroot arguments ;; *) # Preserve sysroot, but never include relative directories case $dir in [\\/]* | [A-Za-z]:[\\/]* | =*) func_append deplibs " $arg" ;; *) func_append deplibs " -L$dir" ;; esac func_append lib_search_path " $dir" ;; esac case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*) testbindir=`$ECHO "$dir" | $SED 's*/lib$*/bin*'` case :$dllsearchpath: in *":$dir:"*) ;; ::) dllsearchpath=$dir;; *) func_append dllsearchpath ":$dir";; esac case :$dllsearchpath: in *":$testbindir:"*) ;; ::) dllsearchpath=$testbindir;; *) func_append dllsearchpath ":$testbindir";; esac ;; esac continue ;; -l*) if test "X$arg" = "X-lc" || test "X$arg" = "X-lm"; then case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-beos* | *-cegcc* | *-*-haiku*) # These systems don't actually have a C or math library (as such) continue ;; *-*-os2*) # These systems don't actually have a C library (as such) test "X$arg" = "X-lc" && continue ;; *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) # Do not include libc due to us having libc/libc_r. test "X$arg" = "X-lc" && continue ;; *-*-rhapsody* | *-*-darwin1.[012]) # Rhapsody C and math libraries are in the System framework func_append deplibs " System.ltframework" continue ;; *-*-sco3.2v5* | *-*-sco5v6*) # Causes problems with __ctype test "X$arg" = "X-lc" && continue ;; *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*) # Compiler inserts libc in the correct place for threads to work test "X$arg" = "X-lc" && continue ;; esac elif test "X$arg" = "X-lc_r"; then case $host in *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) # Do not include libc_r directly, use -pthread flag. continue ;; esac fi func_append deplibs " $arg" continue ;; -module) module=yes continue ;; # Tru64 UNIX uses -model [arg] to determine the layout of C++ # classes, name mangling, and exception handling. # Darwin uses the -arch flag to determine output architecture. -model|-arch|-isysroot|--sysroot) func_append compiler_flags " $arg" func_append compile_command " $arg" func_append finalize_command " $arg" prev=xcompiler continue ;; -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \ |-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*) func_append compiler_flags " $arg" func_append compile_command " $arg" func_append finalize_command " $arg" case "$new_inherited_linker_flags " in *" $arg "*) ;; * ) func_append new_inherited_linker_flags " $arg" ;; esac continue ;; -multi_module) single_module="${wl}-multi_module" continue ;; -no-fast-install) fast_install=no continue ;; -no-install) case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-darwin* | *-cegcc*) # The PATH hackery in wrapper scripts is required on Windows # and Darwin in order for the loader to find any dlls it needs. func_warning "\`-no-install' is ignored for $host" func_warning "assuming \`-no-fast-install' instead" fast_install=no ;; *) no_install=yes ;; esac continue ;; -no-undefined) allow_undefined=no continue ;; -objectlist) prev=objectlist continue ;; -o) prev=output ;; -precious-files-regex) prev=precious_regex continue ;; -release) prev=release continue ;; -rpath) prev=rpath continue ;; -R) prev=xrpath continue ;; -R*) func_stripname '-R' '' "$arg" dir=$func_stripname_result # We need an absolute path. case $dir in [\\/]* | [A-Za-z]:[\\/]*) ;; =*) func_stripname '=' '' "$dir" dir=$lt_sysroot$func_stripname_result ;; *) func_fatal_error "only absolute run-paths are allowed" ;; esac case "$xrpath " in *" $dir "*) ;; *) func_append xrpath " $dir" ;; esac continue ;; -shared) # The effects of -shared are defined in a previous loop. continue ;; -shrext) prev=shrext continue ;; -static | -static-libtool-libs) # The effects of -static are defined in a previous loop. # We used to do the same as -all-static on platforms that # didn't have a PIC flag, but the assumption that the effects # would be equivalent was wrong. It would break on at least # Digital Unix and AIX. continue ;; -thread-safe) thread_safe=yes continue ;; -version-info) prev=vinfo continue ;; -version-number) prev=vinfo vinfo_number=yes continue ;; -weak) prev=weak continue ;; -Wc,*) func_stripname '-Wc,' '' "$arg" args=$func_stripname_result arg= save_ifs="$IFS"; IFS=',' for flag in $args; do IFS="$save_ifs" func_quote_for_eval "$flag" func_append arg " $func_quote_for_eval_result" func_append compiler_flags " $func_quote_for_eval_result" done IFS="$save_ifs" func_stripname ' ' '' "$arg" arg=$func_stripname_result ;; -Wl,*) func_stripname '-Wl,' '' "$arg" args=$func_stripname_result arg= save_ifs="$IFS"; IFS=',' for flag in $args; do IFS="$save_ifs" func_quote_for_eval "$flag" func_append arg " $wl$func_quote_for_eval_result" func_append compiler_flags " $wl$func_quote_for_eval_result" func_append linker_flags " $func_quote_for_eval_result" done IFS="$save_ifs" func_stripname ' ' '' "$arg" arg=$func_stripname_result ;; -Xcompiler) prev=xcompiler continue ;; -Xlinker) prev=xlinker continue ;; -XCClinker) prev=xcclinker continue ;; # -msg_* for osf cc -msg_*) func_quote_for_eval "$arg" arg="$func_quote_for_eval_result" ;; # Flags to be passed through unchanged, with rationale: # -64, -mips[0-9] enable 64-bit mode for the SGI compiler # -r[0-9][0-9]* specify processor for the SGI compiler # -xarch=*, -xtarget=* enable 64-bit mode for the Sun compiler # +DA*, +DD* enable 64-bit mode for the HP compiler # -q* compiler args for the IBM compiler # -m*, -t[45]*, -txscale* architecture-specific flags for GCC # -F/path path to uninstalled frameworks, gcc on darwin # -p, -pg, --coverage, -fprofile-* profiling flags for GCC # @file GCC response files # -tp=* Portland pgcc target processor selection # --sysroot=* for sysroot support # -O*, -flto*, -fwhopr*, -fuse-linker-plugin GCC link-time optimization -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \ -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*|--sysroot=*| \ -O*|-flto*|-fwhopr*|-fuse-linker-plugin) func_quote_for_eval "$arg" arg="$func_quote_for_eval_result" func_append compile_command " $arg" func_append finalize_command " $arg" func_append compiler_flags " $arg" continue ;; # Some other compiler flag. -* | +*) func_quote_for_eval "$arg" arg="$func_quote_for_eval_result" ;; *.$objext) # A standard object. func_append objs " $arg" ;; *.lo) # A libtool-controlled object. # Check to see that this really is a libtool object. if func_lalib_unsafe_p "$arg"; then pic_object= non_pic_object= # Read the .lo file func_source "$arg" if test -z "$pic_object" || test -z "$non_pic_object" || test "$pic_object" = none && test "$non_pic_object" = none; then func_fatal_error "cannot find name of object for \`$arg'" fi # Extract subdirectory from the argument. func_dirname "$arg" "/" "" xdir="$func_dirname_result" if test "$pic_object" != none; then # Prepend the subdirectory the object is found in. pic_object="$xdir$pic_object" if test "$prev" = dlfiles; then if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then func_append dlfiles " $pic_object" prev= continue else # If libtool objects are unsupported, then we need to preload. prev=dlprefiles fi fi # CHECK ME: I think I busted this. -Ossama if test "$prev" = dlprefiles; then # Preload the old-style object. func_append dlprefiles " $pic_object" prev= fi # A PIC object. func_append libobjs " $pic_object" arg="$pic_object" fi # Non-PIC object. if test "$non_pic_object" != none; then # Prepend the subdirectory the object is found in. non_pic_object="$xdir$non_pic_object" # A standard non-PIC object func_append non_pic_objects " $non_pic_object" if test -z "$pic_object" || test "$pic_object" = none ; then arg="$non_pic_object" fi else # If the PIC object exists, use it instead. # $xdir was prepended to $pic_object above. non_pic_object="$pic_object" func_append non_pic_objects " $non_pic_object" fi else # Only an error if not doing a dry-run. if $opt_dry_run; then # Extract subdirectory from the argument. func_dirname "$arg" "/" "" xdir="$func_dirname_result" func_lo2o "$arg" pic_object=$xdir$objdir/$func_lo2o_result non_pic_object=$xdir$func_lo2o_result func_append libobjs " $pic_object" func_append non_pic_objects " $non_pic_object" else func_fatal_error "\`$arg' is not a valid libtool object" fi fi ;; *.$libext) # An archive. func_append deplibs " $arg" func_append old_deplibs " $arg" continue ;; *.la) # A libtool-controlled library. func_resolve_sysroot "$arg" if test "$prev" = dlfiles; then # This library was specified with -dlopen. func_append dlfiles " $func_resolve_sysroot_result" prev= elif test "$prev" = dlprefiles; then # The library was specified with -dlpreopen. func_append dlprefiles " $func_resolve_sysroot_result" prev= else func_append deplibs " $func_resolve_sysroot_result" fi continue ;; # Some other compiler argument. *) # Unknown arguments in both finalize_command and compile_command need # to be aesthetically quoted because they are evaled later. func_quote_for_eval "$arg" arg="$func_quote_for_eval_result" ;; esac # arg # Now actually substitute the argument into the commands. if test -n "$arg"; then func_append compile_command " $arg" func_append finalize_command " $arg" fi done # argument parsing loop test -n "$prev" && \ func_fatal_help "the \`$prevarg' option requires an argument" if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then eval arg=\"$export_dynamic_flag_spec\" func_append compile_command " $arg" func_append finalize_command " $arg" fi oldlibs= # calculate the name of the file, without its directory func_basename "$output" outputname="$func_basename_result" libobjs_save="$libobjs" if test -n "$shlibpath_var"; then # get the directories listed in $shlibpath_var eval shlib_search_path=\`\$ECHO \"\${$shlibpath_var}\" \| \$SED \'s/:/ /g\'\` else shlib_search_path= fi eval sys_lib_search_path=\"$sys_lib_search_path_spec\" eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\" func_dirname "$output" "/" "" output_objdir="$func_dirname_result$objdir" func_to_tool_file "$output_objdir/" tool_output_objdir=$func_to_tool_file_result # Create the object directory. func_mkdir_p "$output_objdir" # Determine the type of output case $output in "") func_fatal_help "you must specify an output file" ;; *.$libext) linkmode=oldlib ;; *.lo | *.$objext) linkmode=obj ;; *.la) linkmode=lib ;; *) linkmode=prog ;; # Anything else should be a program. esac specialdeplibs= libs= # Find all interdependent deplibs by searching for libraries # that are linked more than once (e.g. -la -lb -la) for deplib in $deplibs; do if $opt_preserve_dup_deps ; then case "$libs " in *" $deplib "*) func_append specialdeplibs " $deplib" ;; esac fi func_append libs " $deplib" done if test "$linkmode" = lib; then libs="$predeps $libs $compiler_lib_search_path $postdeps" # Compute libraries that are listed more than once in $predeps # $postdeps and mark them as special (i.e., whose duplicates are # not to be eliminated). pre_post_deps= if $opt_duplicate_compiler_generated_deps; then for pre_post_dep in $predeps $postdeps; do case "$pre_post_deps " in *" $pre_post_dep "*) func_append specialdeplibs " $pre_post_deps" ;; esac func_append pre_post_deps " $pre_post_dep" done fi pre_post_deps= fi deplibs= newdependency_libs= newlib_search_path= need_relink=no # whether we're linking any uninstalled libtool libraries notinst_deplibs= # not-installed libtool libraries notinst_path= # paths that contain not-installed libtool libraries case $linkmode in lib) passes="conv dlpreopen link" for file in $dlfiles $dlprefiles; do case $file in *.la) ;; *) func_fatal_help "libraries can \`-dlopen' only libtool libraries: $file" ;; esac done ;; prog) compile_deplibs= finalize_deplibs= alldeplibs=no newdlfiles= newdlprefiles= passes="conv scan dlopen dlpreopen link" ;; *) passes="conv" ;; esac for pass in $passes; do # The preopen pass in lib mode reverses $deplibs; put it back here # so that -L comes before libs that need it for instance... if test "$linkmode,$pass" = "lib,link"; then ## FIXME: Find the place where the list is rebuilt in the wrong ## order, and fix it there properly tmp_deplibs= for deplib in $deplibs; do tmp_deplibs="$deplib $tmp_deplibs" done deplibs="$tmp_deplibs" fi if test "$linkmode,$pass" = "lib,link" || test "$linkmode,$pass" = "prog,scan"; then libs="$deplibs" deplibs= fi if test "$linkmode" = prog; then case $pass in dlopen) libs="$dlfiles" ;; dlpreopen) libs="$dlprefiles" ;; link) libs="$deplibs %DEPLIBS% $dependency_libs" ;; esac fi if test "$linkmode,$pass" = "lib,dlpreopen"; then # Collect and forward deplibs of preopened libtool libs for lib in $dlprefiles; do # Ignore non-libtool-libs dependency_libs= func_resolve_sysroot "$lib" case $lib in *.la) func_source "$func_resolve_sysroot_result" ;; esac # Collect preopened libtool deplibs, except any this library # has declared as weak libs for deplib in $dependency_libs; do func_basename "$deplib" deplib_base=$func_basename_result case " $weak_libs " in *" $deplib_base "*) ;; *) func_append deplibs " $deplib" ;; esac done done libs="$dlprefiles" fi if test "$pass" = dlopen; then # Collect dlpreopened libraries save_deplibs="$deplibs" deplibs= fi for deplib in $libs; do lib= found=no case $deplib in -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \ |-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*) if test "$linkmode,$pass" = "prog,link"; then compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" else func_append compiler_flags " $deplib" if test "$linkmode" = lib ; then case "$new_inherited_linker_flags " in *" $deplib "*) ;; * ) func_append new_inherited_linker_flags " $deplib" ;; esac fi fi continue ;; -l*) if test "$linkmode" != lib && test "$linkmode" != prog; then func_warning "\`-l' is ignored for archives/objects" continue fi func_stripname '-l' '' "$deplib" name=$func_stripname_result if test "$linkmode" = lib; then searchdirs="$newlib_search_path $lib_search_path $compiler_lib_search_dirs $sys_lib_search_path $shlib_search_path" else searchdirs="$newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path" fi for searchdir in $searchdirs; do for search_ext in .la $std_shrext .so .a; do # Search the libtool library lib="$searchdir/lib${name}${search_ext}" if test -f "$lib"; then if test "$search_ext" = ".la"; then found=yes else found=no fi break 2 fi done done if test "$found" != yes; then # deplib doesn't seem to be a libtool library if test "$linkmode,$pass" = "prog,link"; then compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" else deplibs="$deplib $deplibs" test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs" fi continue else # deplib is a libtool library # If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib, # We need to do some special things here, and not later. if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then case " $predeps $postdeps " in *" $deplib "*) if func_lalib_p "$lib"; then library_names= old_library= func_source "$lib" for l in $old_library $library_names; do ll="$l" done if test "X$ll" = "X$old_library" ; then # only static version available found=no func_dirname "$lib" "" "." ladir="$func_dirname_result" lib=$ladir/$old_library if test "$linkmode,$pass" = "prog,link"; then compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" else deplibs="$deplib $deplibs" test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs" fi continue fi fi ;; *) ;; esac fi fi ;; # -l *.ltframework) if test "$linkmode,$pass" = "prog,link"; then compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" else deplibs="$deplib $deplibs" if test "$linkmode" = lib ; then case "$new_inherited_linker_flags " in *" $deplib "*) ;; * ) func_append new_inherited_linker_flags " $deplib" ;; esac fi fi continue ;; -L*) case $linkmode in lib) deplibs="$deplib $deplibs" test "$pass" = conv && continue newdependency_libs="$deplib $newdependency_libs" func_stripname '-L' '' "$deplib" func_resolve_sysroot "$func_stripname_result" func_append newlib_search_path " $func_resolve_sysroot_result" ;; prog) if test "$pass" = conv; then deplibs="$deplib $deplibs" continue fi if test "$pass" = scan; then deplibs="$deplib $deplibs" else compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" fi func_stripname '-L' '' "$deplib" func_resolve_sysroot "$func_stripname_result" func_append newlib_search_path " $func_resolve_sysroot_result" ;; *) func_warning "\`-L' is ignored for archives/objects" ;; esac # linkmode continue ;; # -L -R*) if test "$pass" = link; then func_stripname '-R' '' "$deplib" func_resolve_sysroot "$func_stripname_result" dir=$func_resolve_sysroot_result # Make sure the xrpath contains only unique directories. case "$xrpath " in *" $dir "*) ;; *) func_append xrpath " $dir" ;; esac fi deplibs="$deplib $deplibs" continue ;; *.la) func_resolve_sysroot "$deplib" lib=$func_resolve_sysroot_result ;; *.$libext) if test "$pass" = conv; then deplibs="$deplib $deplibs" continue fi case $linkmode in lib) # Linking convenience modules into shared libraries is allowed, # but linking other static libraries is non-portable. case " $dlpreconveniencelibs " in *" $deplib "*) ;; *) valid_a_lib=no case $deplibs_check_method in match_pattern*) set dummy $deplibs_check_method; shift match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"` if eval "\$ECHO \"$deplib\"" 2>/dev/null | $SED 10q \ | $EGREP "$match_pattern_regex" > /dev/null; then valid_a_lib=yes fi ;; pass_all) valid_a_lib=yes ;; esac if test "$valid_a_lib" != yes; then echo $ECHO "*** Warning: Trying to link with static lib archive $deplib." echo "*** I have the capability to make that library automatically link in when" echo "*** you link to this library. But I can only do this if you have a" echo "*** shared version of the library, which you do not appear to have" echo "*** because the file extensions .$libext of this argument makes me believe" echo "*** that it is just a static archive that I should not use here." else echo $ECHO "*** Warning: Linking the shared library $output against the" $ECHO "*** static library $deplib is not portable!" deplibs="$deplib $deplibs" fi ;; esac continue ;; prog) if test "$pass" != link; then deplibs="$deplib $deplibs" else compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" fi continue ;; esac # linkmode ;; # *.$libext *.lo | *.$objext) if test "$pass" = conv; then deplibs="$deplib $deplibs" elif test "$linkmode" = prog; then if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then # If there is no dlopen support or we're linking statically, # we need to preload. func_append newdlprefiles " $deplib" compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" else func_append newdlfiles " $deplib" fi fi continue ;; %DEPLIBS%) alldeplibs=yes continue ;; esac # case $deplib if test "$found" = yes || test -f "$lib"; then : else func_fatal_error "cannot find the library \`$lib' or unhandled argument \`$deplib'" fi # Check to see that this really is a libtool archive. func_lalib_unsafe_p "$lib" \ || func_fatal_error "\`$lib' is not a valid libtool archive" func_dirname "$lib" "" "." ladir="$func_dirname_result" dlname= dlopen= dlpreopen= libdir= library_names= old_library= inherited_linker_flags= # If the library was installed with an old release of libtool, # it will not redefine variables installed, or shouldnotlink installed=yes shouldnotlink=no avoidtemprpath= # Read the .la file func_source "$lib" # Convert "-framework foo" to "foo.ltframework" if test -n "$inherited_linker_flags"; then tmp_inherited_linker_flags=`$ECHO "$inherited_linker_flags" | $SED 's/-framework \([^ $]*\)/\1.ltframework/g'` for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do case " $new_inherited_linker_flags " in *" $tmp_inherited_linker_flag "*) ;; *) func_append new_inherited_linker_flags " $tmp_inherited_linker_flag";; esac done fi dependency_libs=`$ECHO " $dependency_libs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` if test "$linkmode,$pass" = "lib,link" || test "$linkmode,$pass" = "prog,scan" || { test "$linkmode" != prog && test "$linkmode" != lib; }; then test -n "$dlopen" && func_append dlfiles " $dlopen" test -n "$dlpreopen" && func_append dlprefiles " $dlpreopen" fi if test "$pass" = conv; then # Only check for convenience libraries deplibs="$lib $deplibs" if test -z "$libdir"; then if test -z "$old_library"; then func_fatal_error "cannot find name of link library for \`$lib'" fi # It is a libtool convenience library, so add in its objects. func_append convenience " $ladir/$objdir/$old_library" func_append old_convenience " $ladir/$objdir/$old_library" elif test "$linkmode" != prog && test "$linkmode" != lib; then func_fatal_error "\`$lib' is not a convenience library" fi tmp_libs= for deplib in $dependency_libs; do deplibs="$deplib $deplibs" if $opt_preserve_dup_deps ; then case "$tmp_libs " in *" $deplib "*) func_append specialdeplibs " $deplib" ;; esac fi func_append tmp_libs " $deplib" done continue fi # $pass = conv # Get the name of the library we link against. linklib= if test -n "$old_library" && { test "$prefer_static_libs" = yes || test "$prefer_static_libs,$installed" = "built,no"; }; then linklib=$old_library else for l in $old_library $library_names; do linklib="$l" done fi if test -z "$linklib"; then func_fatal_error "cannot find name of link library for \`$lib'" fi # This library was specified with -dlopen. if test "$pass" = dlopen; then if test -z "$libdir"; then func_fatal_error "cannot -dlopen a convenience library: \`$lib'" fi if test -z "$dlname" || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then # If there is no dlname, no dlopen support or we're linking # statically, we need to preload. We also need to preload any # dependent libraries so libltdl's deplib preloader doesn't # bomb out in the load deplibs phase. func_append dlprefiles " $lib $dependency_libs" else func_append newdlfiles " $lib" fi continue fi # $pass = dlopen # We need an absolute path. case $ladir in [\\/]* | [A-Za-z]:[\\/]*) abs_ladir="$ladir" ;; *) abs_ladir=`cd "$ladir" && pwd` if test -z "$abs_ladir"; then func_warning "cannot determine absolute directory name of \`$ladir'" func_warning "passing it literally to the linker, although it might fail" abs_ladir="$ladir" fi ;; esac func_basename "$lib" laname="$func_basename_result" # Find the relevant object directory and library name. if test "X$installed" = Xyes; then if test ! -f "$lt_sysroot$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then func_warning "library \`$lib' was moved." dir="$ladir" absdir="$abs_ladir" libdir="$abs_ladir" else dir="$lt_sysroot$libdir" absdir="$lt_sysroot$libdir" fi test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes else if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then dir="$ladir" absdir="$abs_ladir" # Remove this search path later func_append notinst_path " $abs_ladir" else dir="$ladir/$objdir" absdir="$abs_ladir/$objdir" # Remove this search path later func_append notinst_path " $abs_ladir" fi fi # $installed = yes func_stripname 'lib' '.la' "$laname" name=$func_stripname_result # This library was specified with -dlpreopen. if test "$pass" = dlpreopen; then if test -z "$libdir" && test "$linkmode" = prog; then func_fatal_error "only libraries may -dlpreopen a convenience library: \`$lib'" fi case "$host" in # special handling for platforms with PE-DLLs. *cygwin* | *mingw* | *cegcc* ) # Linker will automatically link against shared library if both # static and shared are present. Therefore, ensure we extract # symbols from the import library if a shared library is present # (otherwise, the dlopen module name will be incorrect). We do # this by putting the import library name into $newdlprefiles. # We recover the dlopen module name by 'saving' the la file # name in a special purpose variable, and (later) extracting the # dlname from the la file. if test -n "$dlname"; then func_tr_sh "$dir/$linklib" eval "libfile_$func_tr_sh_result=\$abs_ladir/\$laname" func_append newdlprefiles " $dir/$linklib" else func_append newdlprefiles " $dir/$old_library" # Keep a list of preopened convenience libraries to check # that they are being used correctly in the link pass. test -z "$libdir" && \ func_append dlpreconveniencelibs " $dir/$old_library" fi ;; * ) # Prefer using a static library (so that no silly _DYNAMIC symbols # are required to link). if test -n "$old_library"; then func_append newdlprefiles " $dir/$old_library" # Keep a list of preopened convenience libraries to check # that they are being used correctly in the link pass. test -z "$libdir" && \ func_append dlpreconveniencelibs " $dir/$old_library" # Otherwise, use the dlname, so that lt_dlopen finds it. elif test -n "$dlname"; then func_append newdlprefiles " $dir/$dlname" else func_append newdlprefiles " $dir/$linklib" fi ;; esac fi # $pass = dlpreopen if test -z "$libdir"; then # Link the convenience library if test "$linkmode" = lib; then deplibs="$dir/$old_library $deplibs" elif test "$linkmode,$pass" = "prog,link"; then compile_deplibs="$dir/$old_library $compile_deplibs" finalize_deplibs="$dir/$old_library $finalize_deplibs" else deplibs="$lib $deplibs" # used for prog,scan pass fi continue fi if test "$linkmode" = prog && test "$pass" != link; then func_append newlib_search_path " $ladir" deplibs="$lib $deplibs" linkalldeplibs=no if test "$link_all_deplibs" != no || test -z "$library_names" || test "$build_libtool_libs" = no; then linkalldeplibs=yes fi tmp_libs= for deplib in $dependency_libs; do case $deplib in -L*) func_stripname '-L' '' "$deplib" func_resolve_sysroot "$func_stripname_result" func_append newlib_search_path " $func_resolve_sysroot_result" ;; esac # Need to link against all dependency_libs? if test "$linkalldeplibs" = yes; then deplibs="$deplib $deplibs" else # Need to hardcode shared library paths # or/and link against static libraries newdependency_libs="$deplib $newdependency_libs" fi if $opt_preserve_dup_deps ; then case "$tmp_libs " in *" $deplib "*) func_append specialdeplibs " $deplib" ;; esac fi func_append tmp_libs " $deplib" done # for deplib continue fi # $linkmode = prog... if test "$linkmode,$pass" = "prog,link"; then if test -n "$library_names" && { { test "$prefer_static_libs" = no || test "$prefer_static_libs,$installed" = "built,yes"; } || test -z "$old_library"; }; then # We need to hardcode the library path if test -n "$shlibpath_var" && test -z "$avoidtemprpath" ; then # Make sure the rpath contains only unique directories. case "$temp_rpath:" in *"$absdir:"*) ;; *) func_append temp_rpath "$absdir:" ;; esac fi # Hardcode the library path. # Skip directories that are in the system default run-time # search path. case " $sys_lib_dlsearch_path " in *" $absdir "*) ;; *) case "$compile_rpath " in *" $absdir "*) ;; *) func_append compile_rpath " $absdir" ;; esac ;; esac case " $sys_lib_dlsearch_path " in *" $libdir "*) ;; *) case "$finalize_rpath " in *" $libdir "*) ;; *) func_append finalize_rpath " $libdir" ;; esac ;; esac fi # $linkmode,$pass = prog,link... if test "$alldeplibs" = yes && { test "$deplibs_check_method" = pass_all || { test "$build_libtool_libs" = yes && test -n "$library_names"; }; }; then # We only need to search for static libraries continue fi fi link_static=no # Whether the deplib will be linked statically use_static_libs=$prefer_static_libs if test "$use_static_libs" = built && test "$installed" = yes; then use_static_libs=no fi if test -n "$library_names" && { test "$use_static_libs" = no || test -z "$old_library"; }; then case $host in *cygwin* | *mingw* | *cegcc*) # No point in relinking DLLs because paths are not encoded func_append notinst_deplibs " $lib" need_relink=no ;; *) if test "$installed" = no; then func_append notinst_deplibs " $lib" need_relink=yes fi ;; esac # This is a shared library # Warn about portability, can't link against -module's on some # systems (darwin). Don't bleat about dlopened modules though! dlopenmodule="" for dlpremoduletest in $dlprefiles; do if test "X$dlpremoduletest" = "X$lib"; then dlopenmodule="$dlpremoduletest" break fi done if test -z "$dlopenmodule" && test "$shouldnotlink" = yes && test "$pass" = link; then echo if test "$linkmode" = prog; then $ECHO "*** Warning: Linking the executable $output against the loadable module" else $ECHO "*** Warning: Linking the shared library $output against the loadable module" fi $ECHO "*** $linklib is not portable!" fi if test "$linkmode" = lib && test "$hardcode_into_libs" = yes; then # Hardcode the library path. # Skip directories that are in the system default run-time # search path. case " $sys_lib_dlsearch_path " in *" $absdir "*) ;; *) case "$compile_rpath " in *" $absdir "*) ;; *) func_append compile_rpath " $absdir" ;; esac ;; esac case " $sys_lib_dlsearch_path " in *" $libdir "*) ;; *) case "$finalize_rpath " in *" $libdir "*) ;; *) func_append finalize_rpath " $libdir" ;; esac ;; esac fi if test -n "$old_archive_from_expsyms_cmds"; then # figure out the soname set dummy $library_names shift realname="$1" shift libname=`eval "\\$ECHO \"$libname_spec\""` # use dlname if we got it. it's perfectly good, no? if test -n "$dlname"; then soname="$dlname" elif test -n "$soname_spec"; then # bleh windows case $host in *cygwin* | mingw* | *cegcc*) func_arith $current - $age major=$func_arith_result versuffix="-$major" ;; esac eval soname=\"$soname_spec\" else soname="$realname" fi # Make a new name for the extract_expsyms_cmds to use soroot="$soname" func_basename "$soroot" soname="$func_basename_result" func_stripname 'lib' '.dll' "$soname" newlib=libimp-$func_stripname_result.a # If the library has no export list, then create one now if test -f "$output_objdir/$soname-def"; then : else func_verbose "extracting exported symbol list from \`$soname'" func_execute_cmds "$extract_expsyms_cmds" 'exit $?' fi # Create $newlib if test -f "$output_objdir/$newlib"; then :; else func_verbose "generating import library for \`$soname'" func_execute_cmds "$old_archive_from_expsyms_cmds" 'exit $?' fi # make sure the library variables are pointing to the new library dir=$output_objdir linklib=$newlib fi # test -n "$old_archive_from_expsyms_cmds" if test "$linkmode" = prog || test "$opt_mode" != relink; then add_shlibpath= add_dir= add= lib_linked=yes case $hardcode_action in immediate | unsupported) if test "$hardcode_direct" = no; then add="$dir/$linklib" case $host in *-*-sco3.2v5.0.[024]*) add_dir="-L$dir" ;; *-*-sysv4*uw2*) add_dir="-L$dir" ;; *-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \ *-*-unixware7*) add_dir="-L$dir" ;; *-*-darwin* ) # if the lib is a (non-dlopened) module then we can not # link against it, someone is ignoring the earlier warnings if /usr/bin/file -L $add 2> /dev/null | $GREP ": [^:]* bundle" >/dev/null ; then if test "X$dlopenmodule" != "X$lib"; then $ECHO "*** Warning: lib $linklib is a module, not a shared library" if test -z "$old_library" ; then echo echo "*** And there doesn't seem to be a static archive available" echo "*** The link will probably fail, sorry" else add="$dir/$old_library" fi elif test -n "$old_library"; then add="$dir/$old_library" fi fi esac elif test "$hardcode_minus_L" = no; then case $host in *-*-sunos*) add_shlibpath="$dir" ;; esac add_dir="-L$dir" add="-l$name" elif test "$hardcode_shlibpath_var" = no; then add_shlibpath="$dir" add="-l$name" else lib_linked=no fi ;; relink) if test "$hardcode_direct" = yes && test "$hardcode_direct_absolute" = no; then add="$dir/$linklib" elif test "$hardcode_minus_L" = yes; then add_dir="-L$absdir" # Try looking first in the location we're being installed to. if test -n "$inst_prefix_dir"; then case $libdir in [\\/]*) func_append add_dir " -L$inst_prefix_dir$libdir" ;; esac fi add="-l$name" elif test "$hardcode_shlibpath_var" = yes; then add_shlibpath="$dir" add="-l$name" else lib_linked=no fi ;; *) lib_linked=no ;; esac if test "$lib_linked" != yes; then func_fatal_configuration "unsupported hardcode properties" fi if test -n "$add_shlibpath"; then case :$compile_shlibpath: in *":$add_shlibpath:"*) ;; *) func_append compile_shlibpath "$add_shlibpath:" ;; esac fi if test "$linkmode" = prog; then test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs" test -n "$add" && compile_deplibs="$add $compile_deplibs" else test -n "$add_dir" && deplibs="$add_dir $deplibs" test -n "$add" && deplibs="$add $deplibs" if test "$hardcode_direct" != yes && test "$hardcode_minus_L" != yes && test "$hardcode_shlibpath_var" = yes; then case :$finalize_shlibpath: in *":$libdir:"*) ;; *) func_append finalize_shlibpath "$libdir:" ;; esac fi fi fi if test "$linkmode" = prog || test "$opt_mode" = relink; then add_shlibpath= add_dir= add= # Finalize command for both is simple: just hardcode it. if test "$hardcode_direct" = yes && test "$hardcode_direct_absolute" = no; then add="$libdir/$linklib" elif test "$hardcode_minus_L" = yes; then add_dir="-L$libdir" add="-l$name" elif test "$hardcode_shlibpath_var" = yes; then case :$finalize_shlibpath: in *":$libdir:"*) ;; *) func_append finalize_shlibpath "$libdir:" ;; esac add="-l$name" elif test "$hardcode_automatic" = yes; then if test -n "$inst_prefix_dir" && test -f "$inst_prefix_dir$libdir/$linklib" ; then add="$inst_prefix_dir$libdir/$linklib" else add="$libdir/$linklib" fi else # We cannot seem to hardcode it, guess we'll fake it. add_dir="-L$libdir" # Try looking first in the location we're being installed to. if test -n "$inst_prefix_dir"; then case $libdir in [\\/]*) func_append add_dir " -L$inst_prefix_dir$libdir" ;; esac fi add="-l$name" fi if test "$linkmode" = prog; then test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs" test -n "$add" && finalize_deplibs="$add $finalize_deplibs" else test -n "$add_dir" && deplibs="$add_dir $deplibs" test -n "$add" && deplibs="$add $deplibs" fi fi elif test "$linkmode" = prog; then # Here we assume that one of hardcode_direct or hardcode_minus_L # is not unsupported. This is valid on all known static and # shared platforms. if test "$hardcode_direct" != unsupported; then test -n "$old_library" && linklib="$old_library" compile_deplibs="$dir/$linklib $compile_deplibs" finalize_deplibs="$dir/$linklib $finalize_deplibs" else compile_deplibs="-l$name -L$dir $compile_deplibs" finalize_deplibs="-l$name -L$dir $finalize_deplibs" fi elif test "$build_libtool_libs" = yes; then # Not a shared library if test "$deplibs_check_method" != pass_all; then # We're trying link a shared library against a static one # but the system doesn't support it. # Just print a warning and add the library to dependency_libs so # that the program can be linked against the static library. echo $ECHO "*** Warning: This system can not link to static lib archive $lib." echo "*** I have the capability to make that library automatically link in when" echo "*** you link to this library. But I can only do this if you have a" echo "*** shared version of the library, which you do not appear to have." if test "$module" = yes; then echo "*** But as you try to build a module library, libtool will still create " echo "*** a static module, that should work as long as the dlopening application" echo "*** is linked with the -dlopen flag to resolve symbols at runtime." if test -z "$global_symbol_pipe"; then echo echo "*** However, this would only work if libtool was able to extract symbol" echo "*** lists from a program, using \`nm' or equivalent, but libtool could" echo "*** not find such a program. So, this module is probably useless." echo "*** \`nm' from GNU binutils and a full rebuild may help." fi if test "$build_old_libs" = no; then build_libtool_libs=module build_old_libs=yes else build_libtool_libs=no fi fi else deplibs="$dir/$old_library $deplibs" link_static=yes fi fi # link shared/static library? if test "$linkmode" = lib; then if test -n "$dependency_libs" && { test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes || test "$link_static" = yes; }; then # Extract -R from dependency_libs temp_deplibs= for libdir in $dependency_libs; do case $libdir in -R*) func_stripname '-R' '' "$libdir" temp_xrpath=$func_stripname_result case " $xrpath " in *" $temp_xrpath "*) ;; *) func_append xrpath " $temp_xrpath";; esac;; *) func_append temp_deplibs " $libdir";; esac done dependency_libs="$temp_deplibs" fi func_append newlib_search_path " $absdir" # Link against this library test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs" # ... and its dependency_libs tmp_libs= for deplib in $dependency_libs; do newdependency_libs="$deplib $newdependency_libs" case $deplib in -L*) func_stripname '-L' '' "$deplib" func_resolve_sysroot "$func_stripname_result";; *) func_resolve_sysroot "$deplib" ;; esac if $opt_preserve_dup_deps ; then case "$tmp_libs " in *" $func_resolve_sysroot_result "*) func_append specialdeplibs " $func_resolve_sysroot_result" ;; esac fi func_append tmp_libs " $func_resolve_sysroot_result" done if test "$link_all_deplibs" != no; then # Add the search paths of all dependency libraries for deplib in $dependency_libs; do path= case $deplib in -L*) path="$deplib" ;; *.la) func_resolve_sysroot "$deplib" deplib=$func_resolve_sysroot_result func_dirname "$deplib" "" "." dir=$func_dirname_result # We need an absolute path. case $dir in [\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;; *) absdir=`cd "$dir" && pwd` if test -z "$absdir"; then func_warning "cannot determine absolute directory name of \`$dir'" absdir="$dir" fi ;; esac if $GREP "^installed=no" $deplib > /dev/null; then case $host in *-*-darwin*) depdepl= eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib` if test -n "$deplibrary_names" ; then for tmp in $deplibrary_names ; do depdepl=$tmp done if test -f "$absdir/$objdir/$depdepl" ; then depdepl="$absdir/$objdir/$depdepl" darwin_install_name=`${OTOOL} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` if test -z "$darwin_install_name"; then darwin_install_name=`${OTOOL64} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` fi func_append compiler_flags " ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}" func_append linker_flags " -dylib_file ${darwin_install_name}:${depdepl}" path= fi fi ;; *) path="-L$absdir/$objdir" ;; esac else eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` test -z "$libdir" && \ func_fatal_error "\`$deplib' is not a valid libtool archive" test "$absdir" != "$libdir" && \ func_warning "\`$deplib' seems to be moved" path="-L$absdir" fi ;; esac case " $deplibs " in *" $path "*) ;; *) deplibs="$path $deplibs" ;; esac done fi # link_all_deplibs != no fi # linkmode = lib done # for deplib in $libs if test "$pass" = link; then if test "$linkmode" = "prog"; then compile_deplibs="$new_inherited_linker_flags $compile_deplibs" finalize_deplibs="$new_inherited_linker_flags $finalize_deplibs" else compiler_flags="$compiler_flags "`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` fi fi dependency_libs="$newdependency_libs" if test "$pass" = dlpreopen; then # Link the dlpreopened libraries before other libraries for deplib in $save_deplibs; do deplibs="$deplib $deplibs" done fi if test "$pass" != dlopen; then if test "$pass" != conv; then # Make sure lib_search_path contains only unique directories. lib_search_path= for dir in $newlib_search_path; do case "$lib_search_path " in *" $dir "*) ;; *) func_append lib_search_path " $dir" ;; esac done newlib_search_path= fi if test "$linkmode,$pass" != "prog,link"; then vars="deplibs" else vars="compile_deplibs finalize_deplibs" fi for var in $vars dependency_libs; do # Add libraries to $var in reverse order eval tmp_libs=\"\$$var\" new_libs= for deplib in $tmp_libs; do # FIXME: Pedantically, this is the right thing to do, so # that some nasty dependency loop isn't accidentally # broken: #new_libs="$deplib $new_libs" # Pragmatically, this seems to cause very few problems in # practice: case $deplib in -L*) new_libs="$deplib $new_libs" ;; -R*) ;; *) # And here is the reason: when a library appears more # than once as an explicit dependence of a library, or # is implicitly linked in more than once by the # compiler, it is considered special, and multiple # occurrences thereof are not removed. Compare this # with having the same library being listed as a # dependency of multiple other libraries: in this case, # we know (pedantically, we assume) the library does not # need to be listed more than once, so we keep only the # last copy. This is not always right, but it is rare # enough that we require users that really mean to play # such unportable linking tricks to link the library # using -Wl,-lname, so that libtool does not consider it # for duplicate removal. case " $specialdeplibs " in *" $deplib "*) new_libs="$deplib $new_libs" ;; *) case " $new_libs " in *" $deplib "*) ;; *) new_libs="$deplib $new_libs" ;; esac ;; esac ;; esac done tmp_libs= for deplib in $new_libs; do case $deplib in -L*) case " $tmp_libs " in *" $deplib "*) ;; *) func_append tmp_libs " $deplib" ;; esac ;; *) func_append tmp_libs " $deplib" ;; esac done eval $var=\"$tmp_libs\" done # for var fi # Last step: remove runtime libs from dependency_libs # (they stay in deplibs) tmp_libs= for i in $dependency_libs ; do case " $predeps $postdeps $compiler_lib_search_path " in *" $i "*) i="" ;; esac if test -n "$i" ; then func_append tmp_libs " $i" fi done dependency_libs=$tmp_libs done # for pass if test "$linkmode" = prog; then dlfiles="$newdlfiles" fi if test "$linkmode" = prog || test "$linkmode" = lib; then dlprefiles="$newdlprefiles" fi case $linkmode in oldlib) if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then func_warning "\`-dlopen' is ignored for archives" fi case " $deplibs" in *\ -l* | *\ -L*) func_warning "\`-l' and \`-L' are ignored for archives" ;; esac test -n "$rpath" && \ func_warning "\`-rpath' is ignored for archives" test -n "$xrpath" && \ func_warning "\`-R' is ignored for archives" test -n "$vinfo" && \ func_warning "\`-version-info/-version-number' is ignored for archives" test -n "$release" && \ func_warning "\`-release' is ignored for archives" test -n "$export_symbols$export_symbols_regex" && \ func_warning "\`-export-symbols' is ignored for archives" # Now set the variables for building old libraries. build_libtool_libs=no oldlibs="$output" func_append objs "$old_deplibs" ;; lib) # Make sure we only generate libraries of the form `libNAME.la'. case $outputname in lib*) func_stripname 'lib' '.la' "$outputname" name=$func_stripname_result eval shared_ext=\"$shrext_cmds\" eval libname=\"$libname_spec\" ;; *) test "$module" = no && \ func_fatal_help "libtool library \`$output' must begin with \`lib'" if test "$need_lib_prefix" != no; then # Add the "lib" prefix for modules if required func_stripname '' '.la' "$outputname" name=$func_stripname_result eval shared_ext=\"$shrext_cmds\" eval libname=\"$libname_spec\" else func_stripname '' '.la' "$outputname" libname=$func_stripname_result fi ;; esac if test -n "$objs"; then if test "$deplibs_check_method" != pass_all; then func_fatal_error "cannot build libtool library \`$output' from non-libtool objects on this host:$objs" else echo $ECHO "*** Warning: Linking the shared library $output against the non-libtool" $ECHO "*** objects $objs is not portable!" func_append libobjs " $objs" fi fi test "$dlself" != no && \ func_warning "\`-dlopen self' is ignored for libtool libraries" set dummy $rpath shift test "$#" -gt 1 && \ func_warning "ignoring multiple \`-rpath's for a libtool library" install_libdir="$1" oldlibs= if test -z "$rpath"; then if test "$build_libtool_libs" = yes; then # Building a libtool convenience library. # Some compilers have problems with a `.al' extension so # convenience libraries should have the same extension an # archive normally would. oldlibs="$output_objdir/$libname.$libext $oldlibs" build_libtool_libs=convenience build_old_libs=yes fi test -n "$vinfo" && \ func_warning "\`-version-info/-version-number' is ignored for convenience libraries" test -n "$release" && \ func_warning "\`-release' is ignored for convenience libraries" else # Parse the version information argument. save_ifs="$IFS"; IFS=':' set dummy $vinfo 0 0 0 shift IFS="$save_ifs" test -n "$7" && \ func_fatal_help "too many parameters to \`-version-info'" # convert absolute version numbers to libtool ages # this retains compatibility with .la files and attempts # to make the code below a bit more comprehensible case $vinfo_number in yes) number_major="$1" number_minor="$2" number_revision="$3" # # There are really only two kinds -- those that # use the current revision as the major version # and those that subtract age and use age as # a minor version. But, then there is irix # which has an extra 1 added just for fun # case $version_type in # correct linux to gnu/linux during the next big refactor darwin|linux|osf|windows|none) func_arith $number_major + $number_minor current=$func_arith_result age="$number_minor" revision="$number_revision" ;; freebsd-aout|freebsd-elf|qnx|sunos) current="$number_major" revision="$number_minor" age="0" ;; irix|nonstopux) func_arith $number_major + $number_minor current=$func_arith_result age="$number_minor" revision="$number_minor" lt_irix_increment=no ;; esac ;; no) current="$1" revision="$2" age="$3" ;; esac # Check that each of the things are valid numbers. case $current in 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; *) func_error "CURRENT \`$current' must be a nonnegative integer" func_fatal_error "\`$vinfo' is not valid version information" ;; esac case $revision in 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; *) func_error "REVISION \`$revision' must be a nonnegative integer" func_fatal_error "\`$vinfo' is not valid version information" ;; esac case $age in 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; *) func_error "AGE \`$age' must be a nonnegative integer" func_fatal_error "\`$vinfo' is not valid version information" ;; esac if test "$age" -gt "$current"; then func_error "AGE \`$age' is greater than the current interface number \`$current'" func_fatal_error "\`$vinfo' is not valid version information" fi # Calculate the version variables. major= versuffix= verstring= case $version_type in none) ;; darwin) # Like Linux, but with the current version available in # verstring for coding it into the library header func_arith $current - $age major=.$func_arith_result versuffix="$major.$age.$revision" # Darwin ld doesn't like 0 for these options... func_arith $current + 1 minor_current=$func_arith_result xlcverstring="${wl}-compatibility_version ${wl}$minor_current ${wl}-current_version ${wl}$minor_current.$revision" verstring="-compatibility_version $minor_current -current_version $minor_current.$revision" ;; freebsd-aout) major=".$current" versuffix=".$current.$revision"; ;; freebsd-elf) major=".$current" versuffix=".$current" ;; irix | nonstopux) if test "X$lt_irix_increment" = "Xno"; then func_arith $current - $age else func_arith $current - $age + 1 fi major=$func_arith_result case $version_type in nonstopux) verstring_prefix=nonstopux ;; *) verstring_prefix=sgi ;; esac verstring="$verstring_prefix$major.$revision" # Add in all the interfaces that we are compatible with. loop=$revision while test "$loop" -ne 0; do func_arith $revision - $loop iface=$func_arith_result func_arith $loop - 1 loop=$func_arith_result verstring="$verstring_prefix$major.$iface:$verstring" done # Before this point, $major must not contain `.'. major=.$major versuffix="$major.$revision" ;; linux) # correct to gnu/linux during the next big refactor func_arith $current - $age major=.$func_arith_result versuffix="$major.$age.$revision" ;; osf) func_arith $current - $age major=.$func_arith_result versuffix=".$current.$age.$revision" verstring="$current.$age.$revision" # Add in all the interfaces that we are compatible with. loop=$age while test "$loop" -ne 0; do func_arith $current - $loop iface=$func_arith_result func_arith $loop - 1 loop=$func_arith_result verstring="$verstring:${iface}.0" done # Make executables depend on our current version. func_append verstring ":${current}.0" ;; qnx) major=".$current" versuffix=".$current" ;; sunos) major=".$current" versuffix=".$current.$revision" ;; windows) # Use '-' rather than '.', since we only want one # extension on DOS 8.3 filesystems. func_arith $current - $age major=$func_arith_result versuffix="-$major" ;; *) func_fatal_configuration "unknown library version type \`$version_type'" ;; esac # Clear the version info if we defaulted, and they specified a release. if test -z "$vinfo" && test -n "$release"; then major= case $version_type in darwin) # we can't check for "0.0" in archive_cmds due to quoting # problems, so we reset it completely verstring= ;; *) verstring="0.0" ;; esac if test "$need_version" = no; then versuffix= else versuffix=".0.0" fi fi # Remove version info from name if versioning should be avoided if test "$avoid_version" = yes && test "$need_version" = no; then major= versuffix= verstring="" fi # Check to see if the archive will have undefined symbols. if test "$allow_undefined" = yes; then if test "$allow_undefined_flag" = unsupported; then func_warning "undefined symbols not allowed in $host shared libraries" build_libtool_libs=no build_old_libs=yes fi else # Don't allow undefined symbols. allow_undefined_flag="$no_undefined_flag" fi fi func_generate_dlsyms "$libname" "$libname" "yes" func_append libobjs " $symfileobj" test "X$libobjs" = "X " && libobjs= if test "$opt_mode" != relink; then # Remove our outputs, but don't remove object files since they # may have been created when compiling PIC objects. removelist= tempremovelist=`$ECHO "$output_objdir/*"` for p in $tempremovelist; do case $p in *.$objext | *.gcno) ;; $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/${libname}${release}.*) if test "X$precious_files_regex" != "X"; then if $ECHO "$p" | $EGREP -e "$precious_files_regex" >/dev/null 2>&1 then continue fi fi func_append removelist " $p" ;; *) ;; esac done test -n "$removelist" && \ func_show_eval "${RM}r \$removelist" fi # Now set the variables for building old libraries. if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then func_append oldlibs " $output_objdir/$libname.$libext" # Transform .lo files to .o files. oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; $lo2o" | $NL2SP` fi # Eliminate all temporary directories. #for path in $notinst_path; do # lib_search_path=`$ECHO "$lib_search_path " | $SED "s% $path % %g"` # deplibs=`$ECHO "$deplibs " | $SED "s% -L$path % %g"` # dependency_libs=`$ECHO "$dependency_libs " | $SED "s% -L$path % %g"` #done if test -n "$xrpath"; then # If the user specified any rpath flags, then add them. temp_xrpath= for libdir in $xrpath; do func_replace_sysroot "$libdir" func_append temp_xrpath " -R$func_replace_sysroot_result" case "$finalize_rpath " in *" $libdir "*) ;; *) func_append finalize_rpath " $libdir" ;; esac done if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then dependency_libs="$temp_xrpath $dependency_libs" fi fi # Make sure dlfiles contains only unique files that won't be dlpreopened old_dlfiles="$dlfiles" dlfiles= for lib in $old_dlfiles; do case " $dlprefiles $dlfiles " in *" $lib "*) ;; *) func_append dlfiles " $lib" ;; esac done # Make sure dlprefiles contains only unique files old_dlprefiles="$dlprefiles" dlprefiles= for lib in $old_dlprefiles; do case "$dlprefiles " in *" $lib "*) ;; *) func_append dlprefiles " $lib" ;; esac done if test "$build_libtool_libs" = yes; then if test -n "$rpath"; then case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos* | *-cegcc* | *-*-haiku*) # these systems don't actually have a c library (as such)! ;; *-*-rhapsody* | *-*-darwin1.[012]) # Rhapsody C library is in the System framework func_append deplibs " System.ltframework" ;; *-*-netbsd*) # Don't link with libc until the a.out ld.so is fixed. ;; *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) # Do not include libc due to us having libc/libc_r. ;; *-*-sco3.2v5* | *-*-sco5v6*) # Causes problems with __ctype ;; *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*) # Compiler inserts libc in the correct place for threads to work ;; *) # Add libc to deplibs on all other systems if necessary. if test "$build_libtool_need_lc" = "yes"; then func_append deplibs " -lc" fi ;; esac fi # Transform deplibs into only deplibs that can be linked in shared. name_save=$name libname_save=$libname release_save=$release versuffix_save=$versuffix major_save=$major # I'm not sure if I'm treating the release correctly. I think # release should show up in the -l (ie -lgmp5) so we don't want to # add it in twice. Is that correct? release="" versuffix="" major="" newdeplibs= droppeddeps=no case $deplibs_check_method in pass_all) # Don't check for shared/static. Everything works. # This might be a little naive. We might want to check # whether the library exists or not. But this is on # osf3 & osf4 and I'm not really sure... Just # implementing what was already the behavior. newdeplibs=$deplibs ;; test_compile) # This code stresses the "libraries are programs" paradigm to its # limits. Maybe even breaks it. We compile a program, linking it # against the deplibs as a proxy for the library. Then we can check # whether they linked in statically or dynamically with ldd. $opt_dry_run || $RM conftest.c cat > conftest.c </dev/null` $nocaseglob else potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null` fi for potent_lib in $potential_libs; do # Follow soft links. if ls -lLd "$potent_lib" 2>/dev/null | $GREP " -> " >/dev/null; then continue fi # The statement above tries to avoid entering an # endless loop below, in case of cyclic links. # We might still enter an endless loop, since a link # loop can be closed while we follow links, # but so what? potlib="$potent_lib" while test -h "$potlib" 2>/dev/null; do potliblink=`ls -ld $potlib | ${SED} 's/.* -> //'` case $potliblink in [\\/]* | [A-Za-z]:[\\/]*) potlib="$potliblink";; *) potlib=`$ECHO "$potlib" | $SED 's,[^/]*$,,'`"$potliblink";; esac done if eval $file_magic_cmd \"\$potlib\" 2>/dev/null | $SED -e 10q | $EGREP "$file_magic_regex" > /dev/null; then func_append newdeplibs " $a_deplib" a_deplib="" break 2 fi done done fi if test -n "$a_deplib" ; then droppeddeps=yes echo $ECHO "*** Warning: linker path does not have real file for library $a_deplib." echo "*** I have the capability to make that library automatically link in when" echo "*** you link to this library. But I can only do this if you have a" echo "*** shared version of the library, which you do not appear to have" echo "*** because I did check the linker path looking for a file starting" if test -z "$potlib" ; then $ECHO "*** with $libname but no candidates were found. (...for file magic test)" else $ECHO "*** with $libname and none of the candidates passed a file format test" $ECHO "*** using a file magic. Last file checked: $potlib" fi fi ;; *) # Add a -L argument. func_append newdeplibs " $a_deplib" ;; esac done # Gone through all deplibs. ;; match_pattern*) set dummy $deplibs_check_method; shift match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"` for a_deplib in $deplibs; do case $a_deplib in -l*) func_stripname -l '' "$a_deplib" name=$func_stripname_result if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then case " $predeps $postdeps " in *" $a_deplib "*) func_append newdeplibs " $a_deplib" a_deplib="" ;; esac fi if test -n "$a_deplib" ; then libname=`eval "\\$ECHO \"$libname_spec\""` for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do potential_libs=`ls $i/$libname[.-]* 2>/dev/null` for potent_lib in $potential_libs; do potlib="$potent_lib" # see symlink-check above in file_magic test if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \ $EGREP "$match_pattern_regex" > /dev/null; then func_append newdeplibs " $a_deplib" a_deplib="" break 2 fi done done fi if test -n "$a_deplib" ; then droppeddeps=yes echo $ECHO "*** Warning: linker path does not have real file for library $a_deplib." echo "*** I have the capability to make that library automatically link in when" echo "*** you link to this library. But I can only do this if you have a" echo "*** shared version of the library, which you do not appear to have" echo "*** because I did check the linker path looking for a file starting" if test -z "$potlib" ; then $ECHO "*** with $libname but no candidates were found. (...for regex pattern test)" else $ECHO "*** with $libname and none of the candidates passed a file format test" $ECHO "*** using a regex pattern. Last file checked: $potlib" fi fi ;; *) # Add a -L argument. func_append newdeplibs " $a_deplib" ;; esac done # Gone through all deplibs. ;; none | unknown | *) newdeplibs="" tmp_deplibs=`$ECHO " $deplibs" | $SED 's/ -lc$//; s/ -[LR][^ ]*//g'` if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then for i in $predeps $postdeps ; do # can't use Xsed below, because $i might contain '/' tmp_deplibs=`$ECHO " $tmp_deplibs" | $SED "s,$i,,"` done fi case $tmp_deplibs in *[!\ \ ]*) echo if test "X$deplibs_check_method" = "Xnone"; then echo "*** Warning: inter-library dependencies are not supported in this platform." else echo "*** Warning: inter-library dependencies are not known to be supported." fi echo "*** All declared inter-library dependencies are being dropped." droppeddeps=yes ;; esac ;; esac versuffix=$versuffix_save major=$major_save release=$release_save libname=$libname_save name=$name_save case $host in *-*-rhapsody* | *-*-darwin1.[012]) # On Rhapsody replace the C library with the System framework newdeplibs=`$ECHO " $newdeplibs" | $SED 's/ -lc / System.ltframework /'` ;; esac if test "$droppeddeps" = yes; then if test "$module" = yes; then echo echo "*** Warning: libtool could not satisfy all declared inter-library" $ECHO "*** dependencies of module $libname. Therefore, libtool will create" echo "*** a static module, that should work as long as the dlopening" echo "*** application is linked with the -dlopen flag." if test -z "$global_symbol_pipe"; then echo echo "*** However, this would only work if libtool was able to extract symbol" echo "*** lists from a program, using \`nm' or equivalent, but libtool could" echo "*** not find such a program. So, this module is probably useless." echo "*** \`nm' from GNU binutils and a full rebuild may help." fi if test "$build_old_libs" = no; then oldlibs="$output_objdir/$libname.$libext" build_libtool_libs=module build_old_libs=yes else build_libtool_libs=no fi else echo "*** The inter-library dependencies that have been dropped here will be" echo "*** automatically added whenever a program is linked with this library" echo "*** or is declared to -dlopen it." if test "$allow_undefined" = no; then echo echo "*** Since this library must not contain undefined symbols," echo "*** because either the platform does not support them or" echo "*** it was explicitly requested with -no-undefined," echo "*** libtool will only create a static version of it." if test "$build_old_libs" = no; then oldlibs="$output_objdir/$libname.$libext" build_libtool_libs=module build_old_libs=yes else build_libtool_libs=no fi fi fi fi # Done checking deplibs! deplibs=$newdeplibs fi # Time to change all our "foo.ltframework" stuff back to "-framework foo" case $host in *-*-darwin*) newdeplibs=`$ECHO " $newdeplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` new_inherited_linker_flags=`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` deplibs=`$ECHO " $deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` ;; esac # move library search paths that coincide with paths to not yet # installed libraries to the beginning of the library search list new_libs= for path in $notinst_path; do case " $new_libs " in *" -L$path/$objdir "*) ;; *) case " $deplibs " in *" -L$path/$objdir "*) func_append new_libs " -L$path/$objdir" ;; esac ;; esac done for deplib in $deplibs; do case $deplib in -L*) case " $new_libs " in *" $deplib "*) ;; *) func_append new_libs " $deplib" ;; esac ;; *) func_append new_libs " $deplib" ;; esac done deplibs="$new_libs" # All the library-specific variables (install_libdir is set above). library_names= old_library= dlname= # Test again, we may have decided not to build it any more if test "$build_libtool_libs" = yes; then # Remove ${wl} instances when linking with ld. # FIXME: should test the right _cmds variable. case $archive_cmds in *\$LD\ *) wl= ;; esac if test "$hardcode_into_libs" = yes; then # Hardcode the library paths hardcode_libdirs= dep_rpath= rpath="$finalize_rpath" test "$opt_mode" != relink && rpath="$compile_rpath$rpath" for libdir in $rpath; do if test -n "$hardcode_libdir_flag_spec"; then if test -n "$hardcode_libdir_separator"; then func_replace_sysroot "$libdir" libdir=$func_replace_sysroot_result if test -z "$hardcode_libdirs"; then hardcode_libdirs="$libdir" else # Just accumulate the unique libdirs. case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) ;; *) func_append hardcode_libdirs "$hardcode_libdir_separator$libdir" ;; esac fi else eval flag=\"$hardcode_libdir_flag_spec\" func_append dep_rpath " $flag" fi elif test -n "$runpath_var"; then case "$perm_rpath " in *" $libdir "*) ;; *) func_append perm_rpath " $libdir" ;; esac fi done # Substitute the hardcoded libdirs into the rpath. if test -n "$hardcode_libdir_separator" && test -n "$hardcode_libdirs"; then libdir="$hardcode_libdirs" eval "dep_rpath=\"$hardcode_libdir_flag_spec\"" fi if test -n "$runpath_var" && test -n "$perm_rpath"; then # We should set the runpath_var. rpath= for dir in $perm_rpath; do func_append rpath "$dir:" done eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var" fi test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs" fi shlibpath="$finalize_shlibpath" test "$opt_mode" != relink && shlibpath="$compile_shlibpath$shlibpath" if test -n "$shlibpath"; then eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var" fi # Get the real and link names of the library. eval shared_ext=\"$shrext_cmds\" eval library_names=\"$library_names_spec\" set dummy $library_names shift realname="$1" shift if test -n "$soname_spec"; then eval soname=\"$soname_spec\" else soname="$realname" fi if test -z "$dlname"; then dlname=$soname fi lib="$output_objdir/$realname" linknames= for link do func_append linknames " $link" done # Use standard objects if they are pic test -z "$pic_flag" && libobjs=`$ECHO "$libobjs" | $SP2NL | $SED "$lo2o" | $NL2SP` test "X$libobjs" = "X " && libobjs= delfiles= if test -n "$export_symbols" && test -n "$include_expsyms"; then $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp" export_symbols="$output_objdir/$libname.uexp" func_append delfiles " $export_symbols" fi orig_export_symbols= case $host_os in cygwin* | mingw* | cegcc*) if test -n "$export_symbols" && test -z "$export_symbols_regex"; then # exporting using user supplied symfile if test "x`$SED 1q $export_symbols`" != xEXPORTS; then # and it's NOT already a .def file. Must figure out # which of the given symbols are data symbols and tag # them as such. So, trigger use of export_symbols_cmds. # export_symbols gets reassigned inside the "prepare # the list of exported symbols" if statement, so the # include_expsyms logic still works. orig_export_symbols="$export_symbols" export_symbols= always_export_symbols=yes fi fi ;; esac # Prepare the list of exported symbols if test -z "$export_symbols"; then if test "$always_export_symbols" = yes || test -n "$export_symbols_regex"; then func_verbose "generating symbol list for \`$libname.la'" export_symbols="$output_objdir/$libname.exp" $opt_dry_run || $RM $export_symbols cmds=$export_symbols_cmds save_ifs="$IFS"; IFS='~' for cmd1 in $cmds; do IFS="$save_ifs" # Take the normal branch if the nm_file_list_spec branch # doesn't work or if tool conversion is not needed. case $nm_file_list_spec~$to_tool_file_cmd in *~func_convert_file_noop | *~func_convert_file_msys_to_w32 | ~*) try_normal_branch=yes eval cmd=\"$cmd1\" func_len " $cmd" len=$func_len_result ;; *) try_normal_branch=no ;; esac if test "$try_normal_branch" = yes \ && { test "$len" -lt "$max_cmd_len" \ || test "$max_cmd_len" -le -1; } then func_show_eval "$cmd" 'exit $?' skipped_export=false elif test -n "$nm_file_list_spec"; then func_basename "$output" output_la=$func_basename_result save_libobjs=$libobjs save_output=$output output=${output_objdir}/${output_la}.nm func_to_tool_file "$output" libobjs=$nm_file_list_spec$func_to_tool_file_result func_append delfiles " $output" func_verbose "creating $NM input file list: $output" for obj in $save_libobjs; do func_to_tool_file "$obj" $ECHO "$func_to_tool_file_result" done > "$output" eval cmd=\"$cmd1\" func_show_eval "$cmd" 'exit $?' output=$save_output libobjs=$save_libobjs skipped_export=false else # The command line is too long to execute in one step. func_verbose "using reloadable object file for export list..." skipped_export=: # Break out early, otherwise skipped_export may be # set to false by a later but shorter cmd. break fi done IFS="$save_ifs" if test -n "$export_symbols_regex" && test "X$skipped_export" != "X:"; then func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"' func_show_eval '$MV "${export_symbols}T" "$export_symbols"' fi fi fi if test -n "$export_symbols" && test -n "$include_expsyms"; then tmp_export_symbols="$export_symbols" test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols" $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"' fi if test "X$skipped_export" != "X:" && test -n "$orig_export_symbols"; then # The given exports_symbols file has to be filtered, so filter it. func_verbose "filter symbol list for \`$libname.la' to tag DATA exports" # FIXME: $output_objdir/$libname.filter potentially contains lots of # 's' commands which not all seds can handle. GNU sed should be fine # though. Also, the filter scales superlinearly with the number of # global variables. join(1) would be nice here, but unfortunately # isn't a blessed tool. $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter func_append delfiles " $export_symbols $output_objdir/$libname.filter" export_symbols=$output_objdir/$libname.def $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols fi tmp_deplibs= for test_deplib in $deplibs; do case " $convenience " in *" $test_deplib "*) ;; *) func_append tmp_deplibs " $test_deplib" ;; esac done deplibs="$tmp_deplibs" if test -n "$convenience"; then if test -n "$whole_archive_flag_spec" && test "$compiler_needs_object" = yes && test -z "$libobjs"; then # extract the archives, so we have objects to list. # TODO: could optimize this to just extract one archive. whole_archive_flag_spec= fi if test -n "$whole_archive_flag_spec"; then save_libobjs=$libobjs eval libobjs=\"\$libobjs $whole_archive_flag_spec\" test "X$libobjs" = "X " && libobjs= else gentop="$output_objdir/${outputname}x" func_append generated " $gentop" func_extract_archives $gentop $convenience func_append libobjs " $func_extract_archives_result" test "X$libobjs" = "X " && libobjs= fi fi if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then eval flag=\"$thread_safe_flag_spec\" func_append linker_flags " $flag" fi # Make a backup of the uninstalled library when relinking if test "$opt_mode" = relink; then $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $? fi # Do each of the archive commands. if test "$module" = yes && test -n "$module_cmds" ; then if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then eval test_cmds=\"$module_expsym_cmds\" cmds=$module_expsym_cmds else eval test_cmds=\"$module_cmds\" cmds=$module_cmds fi else if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then eval test_cmds=\"$archive_expsym_cmds\" cmds=$archive_expsym_cmds else eval test_cmds=\"$archive_cmds\" cmds=$archive_cmds fi fi if test "X$skipped_export" != "X:" && func_len " $test_cmds" && len=$func_len_result && test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then : else # The command line is too long to link in one step, link piecewise # or, if using GNU ld and skipped_export is not :, use a linker # script. # Save the value of $output and $libobjs because we want to # use them later. If we have whole_archive_flag_spec, we # want to use save_libobjs as it was before # whole_archive_flag_spec was expanded, because we can't # assume the linker understands whole_archive_flag_spec. # This may have to be revisited, in case too many # convenience libraries get linked in and end up exceeding # the spec. if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then save_libobjs=$libobjs fi save_output=$output func_basename "$output" output_la=$func_basename_result # Clear the reloadable object creation command queue and # initialize k to one. test_cmds= concat_cmds= objlist= last_robj= k=1 if test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "$with_gnu_ld" = yes; then output=${output_objdir}/${output_la}.lnkscript func_verbose "creating GNU ld script: $output" echo 'INPUT (' > $output for obj in $save_libobjs do func_to_tool_file "$obj" $ECHO "$func_to_tool_file_result" >> $output done echo ')' >> $output func_append delfiles " $output" func_to_tool_file "$output" output=$func_to_tool_file_result elif test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "X$file_list_spec" != X; then output=${output_objdir}/${output_la}.lnk func_verbose "creating linker input file list: $output" : > $output set x $save_libobjs shift firstobj= if test "$compiler_needs_object" = yes; then firstobj="$1 " shift fi for obj do func_to_tool_file "$obj" $ECHO "$func_to_tool_file_result" >> $output done func_append delfiles " $output" func_to_tool_file "$output" output=$firstobj\"$file_list_spec$func_to_tool_file_result\" else if test -n "$save_libobjs"; then func_verbose "creating reloadable object files..." output=$output_objdir/$output_la-${k}.$objext eval test_cmds=\"$reload_cmds\" func_len " $test_cmds" len0=$func_len_result len=$len0 # Loop over the list of objects to be linked. for obj in $save_libobjs do func_len " $obj" func_arith $len + $func_len_result len=$func_arith_result if test "X$objlist" = X || test "$len" -lt "$max_cmd_len"; then func_append objlist " $obj" else # The command $test_cmds is almost too long, add a # command to the queue. if test "$k" -eq 1 ; then # The first file doesn't have a previous command to add. reload_objs=$objlist eval concat_cmds=\"$reload_cmds\" else # All subsequent reloadable object files will link in # the last one created. reload_objs="$objlist $last_robj" eval concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\" fi last_robj=$output_objdir/$output_la-${k}.$objext func_arith $k + 1 k=$func_arith_result output=$output_objdir/$output_la-${k}.$objext objlist=" $obj" func_len " $last_robj" func_arith $len0 + $func_len_result len=$func_arith_result fi done # Handle the remaining objects by creating one last # reloadable object file. All subsequent reloadable object # files will link in the last one created. test -z "$concat_cmds" || concat_cmds=$concat_cmds~ reload_objs="$objlist $last_robj" eval concat_cmds=\"\${concat_cmds}$reload_cmds\" if test -n "$last_robj"; then eval concat_cmds=\"\${concat_cmds}~\$RM $last_robj\" fi func_append delfiles " $output" else output= fi if ${skipped_export-false}; then func_verbose "generating symbol list for \`$libname.la'" export_symbols="$output_objdir/$libname.exp" $opt_dry_run || $RM $export_symbols libobjs=$output # Append the command to create the export file. test -z "$concat_cmds" || concat_cmds=$concat_cmds~ eval concat_cmds=\"\$concat_cmds$export_symbols_cmds\" if test -n "$last_robj"; then eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\" fi fi test -n "$save_libobjs" && func_verbose "creating a temporary reloadable object file: $output" # Loop through the commands generated above and execute them. save_ifs="$IFS"; IFS='~' for cmd in $concat_cmds; do IFS="$save_ifs" $opt_silent || { func_quote_for_expand "$cmd" eval "func_echo $func_quote_for_expand_result" } $opt_dry_run || eval "$cmd" || { lt_exit=$? # Restore the uninstalled library and exit if test "$opt_mode" = relink; then ( cd "$output_objdir" && \ $RM "${realname}T" && \ $MV "${realname}U" "$realname" ) fi exit $lt_exit } done IFS="$save_ifs" if test -n "$export_symbols_regex" && ${skipped_export-false}; then func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"' func_show_eval '$MV "${export_symbols}T" "$export_symbols"' fi fi if ${skipped_export-false}; then if test -n "$export_symbols" && test -n "$include_expsyms"; then tmp_export_symbols="$export_symbols" test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols" $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"' fi if test -n "$orig_export_symbols"; then # The given exports_symbols file has to be filtered, so filter it. func_verbose "filter symbol list for \`$libname.la' to tag DATA exports" # FIXME: $output_objdir/$libname.filter potentially contains lots of # 's' commands which not all seds can handle. GNU sed should be fine # though. Also, the filter scales superlinearly with the number of # global variables. join(1) would be nice here, but unfortunately # isn't a blessed tool. $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter func_append delfiles " $export_symbols $output_objdir/$libname.filter" export_symbols=$output_objdir/$libname.def $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols fi fi libobjs=$output # Restore the value of output. output=$save_output if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then eval libobjs=\"\$libobjs $whole_archive_flag_spec\" test "X$libobjs" = "X " && libobjs= fi # Expand the library linking commands again to reset the # value of $libobjs for piecewise linking. # Do each of the archive commands. if test "$module" = yes && test -n "$module_cmds" ; then if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then cmds=$module_expsym_cmds else cmds=$module_cmds fi else if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then cmds=$archive_expsym_cmds else cmds=$archive_cmds fi fi fi if test -n "$delfiles"; then # Append the command to remove temporary files to $cmds. eval cmds=\"\$cmds~\$RM $delfiles\" fi # Add any objects from preloaded convenience libraries if test -n "$dlprefiles"; then gentop="$output_objdir/${outputname}x" func_append generated " $gentop" func_extract_archives $gentop $dlprefiles func_append libobjs " $func_extract_archives_result" test "X$libobjs" = "X " && libobjs= fi save_ifs="$IFS"; IFS='~' for cmd in $cmds; do IFS="$save_ifs" eval cmd=\"$cmd\" $opt_silent || { func_quote_for_expand "$cmd" eval "func_echo $func_quote_for_expand_result" } $opt_dry_run || eval "$cmd" || { lt_exit=$? # Restore the uninstalled library and exit if test "$opt_mode" = relink; then ( cd "$output_objdir" && \ $RM "${realname}T" && \ $MV "${realname}U" "$realname" ) fi exit $lt_exit } done IFS="$save_ifs" # Restore the uninstalled library and exit if test "$opt_mode" = relink; then $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $? if test -n "$convenience"; then if test -z "$whole_archive_flag_spec"; then func_show_eval '${RM}r "$gentop"' fi fi exit $EXIT_SUCCESS fi # Create links to the real library. for linkname in $linknames; do if test "$realname" != "$linkname"; then func_show_eval '(cd "$output_objdir" && $RM "$linkname" && $LN_S "$realname" "$linkname")' 'exit $?' fi done # If -module or -export-dynamic was specified, set the dlname. if test "$module" = yes || test "$export_dynamic" = yes; then # On all known operating systems, these are identical. dlname="$soname" fi fi ;; obj) if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then func_warning "\`-dlopen' is ignored for objects" fi case " $deplibs" in *\ -l* | *\ -L*) func_warning "\`-l' and \`-L' are ignored for objects" ;; esac test -n "$rpath" && \ func_warning "\`-rpath' is ignored for objects" test -n "$xrpath" && \ func_warning "\`-R' is ignored for objects" test -n "$vinfo" && \ func_warning "\`-version-info' is ignored for objects" test -n "$release" && \ func_warning "\`-release' is ignored for objects" case $output in *.lo) test -n "$objs$old_deplibs" && \ func_fatal_error "cannot build library object \`$output' from non-libtool objects" libobj=$output func_lo2o "$libobj" obj=$func_lo2o_result ;; *) libobj= obj="$output" ;; esac # Delete the old objects. $opt_dry_run || $RM $obj $libobj # Objects from convenience libraries. This assumes # single-version convenience libraries. Whenever we create # different ones for PIC/non-PIC, this we'll have to duplicate # the extraction. reload_conv_objs= gentop= # reload_cmds runs $LD directly, so let us get rid of # -Wl from whole_archive_flag_spec and hope we can get by with # turning comma into space.. wl= if test -n "$convenience"; then if test -n "$whole_archive_flag_spec"; then eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\" reload_conv_objs=$reload_objs\ `$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'` else gentop="$output_objdir/${obj}x" func_append generated " $gentop" func_extract_archives $gentop $convenience reload_conv_objs="$reload_objs $func_extract_archives_result" fi fi # If we're not building shared, we need to use non_pic_objs test "$build_libtool_libs" != yes && libobjs="$non_pic_objects" # Create the old-style object. reload_objs="$objs$old_deplibs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; /\.lib$/d; $lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test output="$obj" func_execute_cmds "$reload_cmds" 'exit $?' # Exit if we aren't doing a library object file. if test -z "$libobj"; then if test -n "$gentop"; then func_show_eval '${RM}r "$gentop"' fi exit $EXIT_SUCCESS fi if test "$build_libtool_libs" != yes; then if test -n "$gentop"; then func_show_eval '${RM}r "$gentop"' fi # Create an invalid libtool object if no PIC, so that we don't # accidentally link it into a program. # $show "echo timestamp > $libobj" # $opt_dry_run || eval "echo timestamp > $libobj" || exit $? exit $EXIT_SUCCESS fi if test -n "$pic_flag" || test "$pic_mode" != default; then # Only do commands if we really have different PIC objects. reload_objs="$libobjs $reload_conv_objs" output="$libobj" func_execute_cmds "$reload_cmds" 'exit $?' fi if test -n "$gentop"; then func_show_eval '${RM}r "$gentop"' fi exit $EXIT_SUCCESS ;; prog) case $host in *cygwin*) func_stripname '' '.exe' "$output" output=$func_stripname_result.exe;; esac test -n "$vinfo" && \ func_warning "\`-version-info' is ignored for programs" test -n "$release" && \ func_warning "\`-release' is ignored for programs" test "$preload" = yes \ && test "$dlopen_support" = unknown \ && test "$dlopen_self" = unknown \ && test "$dlopen_self_static" = unknown && \ func_warning "\`LT_INIT([dlopen])' not used. Assuming no dlopen support." case $host in *-*-rhapsody* | *-*-darwin1.[012]) # On Rhapsody replace the C library is the System framework compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's/ -lc / System.ltframework /'` finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's/ -lc / System.ltframework /'` ;; esac case $host in *-*-darwin*) # Don't allow lazy linking, it breaks C++ global constructors # But is supposedly fixed on 10.4 or later (yay!). if test "$tagname" = CXX ; then case ${MACOSX_DEPLOYMENT_TARGET-10.0} in 10.[0123]) func_append compile_command " ${wl}-bind_at_load" func_append finalize_command " ${wl}-bind_at_load" ;; esac fi # Time to change all our "foo.ltframework" stuff back to "-framework foo" compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` ;; esac # move library search paths that coincide with paths to not yet # installed libraries to the beginning of the library search list new_libs= for path in $notinst_path; do case " $new_libs " in *" -L$path/$objdir "*) ;; *) case " $compile_deplibs " in *" -L$path/$objdir "*) func_append new_libs " -L$path/$objdir" ;; esac ;; esac done for deplib in $compile_deplibs; do case $deplib in -L*) case " $new_libs " in *" $deplib "*) ;; *) func_append new_libs " $deplib" ;; esac ;; *) func_append new_libs " $deplib" ;; esac done compile_deplibs="$new_libs" func_append compile_command " $compile_deplibs" func_append finalize_command " $finalize_deplibs" if test -n "$rpath$xrpath"; then # If the user specified any rpath flags, then add them. for libdir in $rpath $xrpath; do # This is the magic to use -rpath. case "$finalize_rpath " in *" $libdir "*) ;; *) func_append finalize_rpath " $libdir" ;; esac done fi # Now hardcode the library paths rpath= hardcode_libdirs= for libdir in $compile_rpath $finalize_rpath; do if test -n "$hardcode_libdir_flag_spec"; then if test -n "$hardcode_libdir_separator"; then if test -z "$hardcode_libdirs"; then hardcode_libdirs="$libdir" else # Just accumulate the unique libdirs. case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) ;; *) func_append hardcode_libdirs "$hardcode_libdir_separator$libdir" ;; esac fi else eval flag=\"$hardcode_libdir_flag_spec\" func_append rpath " $flag" fi elif test -n "$runpath_var"; then case "$perm_rpath " in *" $libdir "*) ;; *) func_append perm_rpath " $libdir" ;; esac fi case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*) testbindir=`${ECHO} "$libdir" | ${SED} -e 's*/lib$*/bin*'` case :$dllsearchpath: in *":$libdir:"*) ;; ::) dllsearchpath=$libdir;; *) func_append dllsearchpath ":$libdir";; esac case :$dllsearchpath: in *":$testbindir:"*) ;; ::) dllsearchpath=$testbindir;; *) func_append dllsearchpath ":$testbindir";; esac ;; esac done # Substitute the hardcoded libdirs into the rpath. if test -n "$hardcode_libdir_separator" && test -n "$hardcode_libdirs"; then libdir="$hardcode_libdirs" eval rpath=\" $hardcode_libdir_flag_spec\" fi compile_rpath="$rpath" rpath= hardcode_libdirs= for libdir in $finalize_rpath; do if test -n "$hardcode_libdir_flag_spec"; then if test -n "$hardcode_libdir_separator"; then if test -z "$hardcode_libdirs"; then hardcode_libdirs="$libdir" else # Just accumulate the unique libdirs. case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) ;; *) func_append hardcode_libdirs "$hardcode_libdir_separator$libdir" ;; esac fi else eval flag=\"$hardcode_libdir_flag_spec\" func_append rpath " $flag" fi elif test -n "$runpath_var"; then case "$finalize_perm_rpath " in *" $libdir "*) ;; *) func_append finalize_perm_rpath " $libdir" ;; esac fi done # Substitute the hardcoded libdirs into the rpath. if test -n "$hardcode_libdir_separator" && test -n "$hardcode_libdirs"; then libdir="$hardcode_libdirs" eval rpath=\" $hardcode_libdir_flag_spec\" fi finalize_rpath="$rpath" if test -n "$libobjs" && test "$build_old_libs" = yes; then # Transform all the library objects into standard objects. compile_command=`$ECHO "$compile_command" | $SP2NL | $SED "$lo2o" | $NL2SP` finalize_command=`$ECHO "$finalize_command" | $SP2NL | $SED "$lo2o" | $NL2SP` fi func_generate_dlsyms "$outputname" "@PROGRAM@" "no" # template prelinking step if test -n "$prelink_cmds"; then func_execute_cmds "$prelink_cmds" 'exit $?' fi wrappers_required=yes case $host in *cegcc* | *mingw32ce*) # Disable wrappers for cegcc and mingw32ce hosts, we are cross compiling anyway. wrappers_required=no ;; *cygwin* | *mingw* ) if test "$build_libtool_libs" != yes; then wrappers_required=no fi ;; *) if test "$need_relink" = no || test "$build_libtool_libs" != yes; then wrappers_required=no fi ;; esac if test "$wrappers_required" = no; then # Replace the output file specification. compile_command=`$ECHO "$compile_command" | $SED 's%@OUTPUT@%'"$output"'%g'` link_command="$compile_command$compile_rpath" # We have no uninstalled library dependencies, so finalize right now. exit_status=0 func_show_eval "$link_command" 'exit_status=$?' if test -n "$postlink_cmds"; then func_to_tool_file "$output" postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'` func_execute_cmds "$postlink_cmds" 'exit $?' fi # Delete the generated files. if test -f "$output_objdir/${outputname}S.${objext}"; then func_show_eval '$RM "$output_objdir/${outputname}S.${objext}"' fi exit $exit_status fi if test -n "$compile_shlibpath$finalize_shlibpath"; then compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command" fi if test -n "$finalize_shlibpath"; then finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command" fi compile_var= finalize_var= if test -n "$runpath_var"; then if test -n "$perm_rpath"; then # We should set the runpath_var. rpath= for dir in $perm_rpath; do func_append rpath "$dir:" done compile_var="$runpath_var=\"$rpath\$$runpath_var\" " fi if test -n "$finalize_perm_rpath"; then # We should set the runpath_var. rpath= for dir in $finalize_perm_rpath; do func_append rpath "$dir:" done finalize_var="$runpath_var=\"$rpath\$$runpath_var\" " fi fi if test "$no_install" = yes; then # We don't need to create a wrapper script. link_command="$compile_var$compile_command$compile_rpath" # Replace the output file specification. link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output"'%g'` # Delete the old output file. $opt_dry_run || $RM $output # Link the executable and exit func_show_eval "$link_command" 'exit $?' if test -n "$postlink_cmds"; then func_to_tool_file "$output" postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'` func_execute_cmds "$postlink_cmds" 'exit $?' fi exit $EXIT_SUCCESS fi if test "$hardcode_action" = relink; then # Fast installation is not supported link_command="$compile_var$compile_command$compile_rpath" relink_command="$finalize_var$finalize_command$finalize_rpath" func_warning "this platform does not like uninstalled shared libraries" func_warning "\`$output' will be relinked during installation" else if test "$fast_install" != no; then link_command="$finalize_var$compile_command$finalize_rpath" if test "$fast_install" = yes; then relink_command=`$ECHO "$compile_var$compile_command$compile_rpath" | $SED 's%@OUTPUT@%\$progdir/\$file%g'` else # fast_install is set to needless relink_command= fi else link_command="$compile_var$compile_command$compile_rpath" relink_command="$finalize_var$finalize_command$finalize_rpath" fi fi # Replace the output file specification. link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'` # Delete the old output files. $opt_dry_run || $RM $output $output_objdir/$outputname $output_objdir/lt-$outputname func_show_eval "$link_command" 'exit $?' if test -n "$postlink_cmds"; then func_to_tool_file "$output_objdir/$outputname" postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'` func_execute_cmds "$postlink_cmds" 'exit $?' fi # Now create the wrapper script. func_verbose "creating $output" # Quote the relink command for shipping. if test -n "$relink_command"; then # Preserve any variables that may affect compiler behavior for var in $variables_saved_for_relink; do if eval test -z \"\${$var+set}\"; then relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command" elif eval var_value=\$$var; test -z "$var_value"; then relink_command="$var=; export $var; $relink_command" else func_quote_for_eval "$var_value" relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command" fi done relink_command="(cd `pwd`; $relink_command)" relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"` fi # Only actually do things if not in dry run mode. $opt_dry_run || { # win32 will think the script is a binary if it has # a .exe suffix, so we strip it off here. case $output in *.exe) func_stripname '' '.exe' "$output" output=$func_stripname_result ;; esac # test for cygwin because mv fails w/o .exe extensions case $host in *cygwin*) exeext=.exe func_stripname '' '.exe' "$outputname" outputname=$func_stripname_result ;; *) exeext= ;; esac case $host in *cygwin* | *mingw* ) func_dirname_and_basename "$output" "" "." output_name=$func_basename_result output_path=$func_dirname_result cwrappersource="$output_path/$objdir/lt-$output_name.c" cwrapper="$output_path/$output_name.exe" $RM $cwrappersource $cwrapper trap "$RM $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15 func_emit_cwrapperexe_src > $cwrappersource # The wrapper executable is built using the $host compiler, # because it contains $host paths and files. If cross- # compiling, it, like the target executable, must be # executed on the $host or under an emulation environment. $opt_dry_run || { $LTCC $LTCFLAGS -o $cwrapper $cwrappersource $STRIP $cwrapper } # Now, create the wrapper script for func_source use: func_ltwrapper_scriptname $cwrapper $RM $func_ltwrapper_scriptname_result trap "$RM $func_ltwrapper_scriptname_result; exit $EXIT_FAILURE" 1 2 15 $opt_dry_run || { # note: this script will not be executed, so do not chmod. if test "x$build" = "x$host" ; then $cwrapper --lt-dump-script > $func_ltwrapper_scriptname_result else func_emit_wrapper no > $func_ltwrapper_scriptname_result fi } ;; * ) $RM $output trap "$RM $output; exit $EXIT_FAILURE" 1 2 15 func_emit_wrapper no > $output chmod +x $output ;; esac } exit $EXIT_SUCCESS ;; esac # See if we need to build an old-fashioned archive. for oldlib in $oldlibs; do if test "$build_libtool_libs" = convenience; then oldobjs="$libobjs_save $symfileobj" addlibs="$convenience" build_libtool_libs=no else if test "$build_libtool_libs" = module; then oldobjs="$libobjs_save" build_libtool_libs=no else oldobjs="$old_deplibs $non_pic_objects" if test "$preload" = yes && test -f "$symfileobj"; then func_append oldobjs " $symfileobj" fi fi addlibs="$old_convenience" fi if test -n "$addlibs"; then gentop="$output_objdir/${outputname}x" func_append generated " $gentop" func_extract_archives $gentop $addlibs func_append oldobjs " $func_extract_archives_result" fi # Do each command in the archive commands. if test -n "$old_archive_from_new_cmds" && test "$build_libtool_libs" = yes; then cmds=$old_archive_from_new_cmds else # Add any objects from preloaded convenience libraries if test -n "$dlprefiles"; then gentop="$output_objdir/${outputname}x" func_append generated " $gentop" func_extract_archives $gentop $dlprefiles func_append oldobjs " $func_extract_archives_result" fi # POSIX demands no paths to be encoded in archives. We have # to avoid creating archives with duplicate basenames if we # might have to extract them afterwards, e.g., when creating a # static archive out of a convenience library, or when linking # the entirety of a libtool archive into another (currently # not supported by libtool). if (for obj in $oldobjs do func_basename "$obj" $ECHO "$func_basename_result" done | sort | sort -uc >/dev/null 2>&1); then : else echo "copying selected object files to avoid basename conflicts..." gentop="$output_objdir/${outputname}x" func_append generated " $gentop" func_mkdir_p "$gentop" save_oldobjs=$oldobjs oldobjs= counter=1 for obj in $save_oldobjs do func_basename "$obj" objbase="$func_basename_result" case " $oldobjs " in " ") oldobjs=$obj ;; *[\ /]"$objbase "*) while :; do # Make sure we don't pick an alternate name that also # overlaps. newobj=lt$counter-$objbase func_arith $counter + 1 counter=$func_arith_result case " $oldobjs " in *[\ /]"$newobj "*) ;; *) if test ! -f "$gentop/$newobj"; then break; fi ;; esac done func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj" func_append oldobjs " $gentop/$newobj" ;; *) func_append oldobjs " $obj" ;; esac done fi func_to_tool_file "$oldlib" func_convert_file_msys_to_w32 tool_oldlib=$func_to_tool_file_result eval cmds=\"$old_archive_cmds\" func_len " $cmds" len=$func_len_result if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then cmds=$old_archive_cmds elif test -n "$archiver_list_spec"; then func_verbose "using command file archive linking..." for obj in $oldobjs do func_to_tool_file "$obj" $ECHO "$func_to_tool_file_result" done > $output_objdir/$libname.libcmd func_to_tool_file "$output_objdir/$libname.libcmd" oldobjs=" $archiver_list_spec$func_to_tool_file_result" cmds=$old_archive_cmds else # the command line is too long to link in one step, link in parts func_verbose "using piecewise archive linking..." save_RANLIB=$RANLIB RANLIB=: objlist= concat_cmds= save_oldobjs=$oldobjs oldobjs= # Is there a better way of finding the last object in the list? for obj in $save_oldobjs do last_oldobj=$obj done eval test_cmds=\"$old_archive_cmds\" func_len " $test_cmds" len0=$func_len_result len=$len0 for obj in $save_oldobjs do func_len " $obj" func_arith $len + $func_len_result len=$func_arith_result func_append objlist " $obj" if test "$len" -lt "$max_cmd_len"; then : else # the above command should be used before it gets too long oldobjs=$objlist if test "$obj" = "$last_oldobj" ; then RANLIB=$save_RANLIB fi test -z "$concat_cmds" || concat_cmds=$concat_cmds~ eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\" objlist= len=$len0 fi done RANLIB=$save_RANLIB oldobjs=$objlist if test "X$oldobjs" = "X" ; then eval cmds=\"\$concat_cmds\" else eval cmds=\"\$concat_cmds~\$old_archive_cmds\" fi fi fi func_execute_cmds "$cmds" 'exit $?' done test -n "$generated" && \ func_show_eval "${RM}r$generated" # Now create the libtool archive. case $output in *.la) old_library= test "$build_old_libs" = yes && old_library="$libname.$libext" func_verbose "creating $output" # Preserve any variables that may affect compiler behavior for var in $variables_saved_for_relink; do if eval test -z \"\${$var+set}\"; then relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command" elif eval var_value=\$$var; test -z "$var_value"; then relink_command="$var=; export $var; $relink_command" else func_quote_for_eval "$var_value" relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command" fi done # Quote the link command for shipping. relink_command="(cd `pwd`; $SHELL $progpath $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)" relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"` if test "$hardcode_automatic" = yes ; then relink_command= fi # Only create the output if not a dry run. $opt_dry_run || { for installed in no yes; do if test "$installed" = yes; then if test -z "$install_libdir"; then break fi output="$output_objdir/$outputname"i # Replace all uninstalled libtool libraries with the installed ones newdependency_libs= for deplib in $dependency_libs; do case $deplib in *.la) func_basename "$deplib" name="$func_basename_result" func_resolve_sysroot "$deplib" eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result` test -z "$libdir" && \ func_fatal_error "\`$deplib' is not a valid libtool archive" func_append newdependency_libs " ${lt_sysroot:+=}$libdir/$name" ;; -L*) func_stripname -L '' "$deplib" func_replace_sysroot "$func_stripname_result" func_append newdependency_libs " -L$func_replace_sysroot_result" ;; -R*) func_stripname -R '' "$deplib" func_replace_sysroot "$func_stripname_result" func_append newdependency_libs " -R$func_replace_sysroot_result" ;; *) func_append newdependency_libs " $deplib" ;; esac done dependency_libs="$newdependency_libs" newdlfiles= for lib in $dlfiles; do case $lib in *.la) func_basename "$lib" name="$func_basename_result" eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` test -z "$libdir" && \ func_fatal_error "\`$lib' is not a valid libtool archive" func_append newdlfiles " ${lt_sysroot:+=}$libdir/$name" ;; *) func_append newdlfiles " $lib" ;; esac done dlfiles="$newdlfiles" newdlprefiles= for lib in $dlprefiles; do case $lib in *.la) # Only pass preopened files to the pseudo-archive (for # eventual linking with the app. that links it) if we # didn't already link the preopened objects directly into # the library: func_basename "$lib" name="$func_basename_result" eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` test -z "$libdir" && \ func_fatal_error "\`$lib' is not a valid libtool archive" func_append newdlprefiles " ${lt_sysroot:+=}$libdir/$name" ;; esac done dlprefiles="$newdlprefiles" else newdlfiles= for lib in $dlfiles; do case $lib in [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; *) abs=`pwd`"/$lib" ;; esac func_append newdlfiles " $abs" done dlfiles="$newdlfiles" newdlprefiles= for lib in $dlprefiles; do case $lib in [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; *) abs=`pwd`"/$lib" ;; esac func_append newdlprefiles " $abs" done dlprefiles="$newdlprefiles" fi $RM $output # place dlname in correct position for cygwin # In fact, it would be nice if we could use this code for all target # systems that can't hard-code library paths into their executables # and that have no shared library path variable independent of PATH, # but it turns out we can't easily determine that from inspecting # libtool variables, so we have to hard-code the OSs to which it # applies here; at the moment, that means platforms that use the PE # object format with DLL files. See the long comment at the top of # tests/bindir.at for full details. tdlname=$dlname case $host,$output,$installed,$module,$dlname in *cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll | *cegcc*,*lai,yes,no,*.dll) # If a -bindir argument was supplied, place the dll there. if test "x$bindir" != x ; then func_relative_path "$install_libdir" "$bindir" tdlname=$func_relative_path_result$dlname else # Otherwise fall back on heuristic. tdlname=../bin/$dlname fi ;; esac $ECHO > $output "\ # $outputname - a libtool library file # Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION # # Please DO NOT delete this file! # It is necessary for linking the library. # The name that we can dlopen(3). dlname='$tdlname' # Names of this library. library_names='$library_names' # The name of the static archive. old_library='$old_library' # Linker flags that can not go in dependency_libs. inherited_linker_flags='$new_inherited_linker_flags' # Libraries that this one depends upon. dependency_libs='$dependency_libs' # Names of additional weak libraries provided by this library weak_library_names='$weak_libs' # Version information for $libname. current=$current age=$age revision=$revision # Is this an already installed library? installed=$installed # Should we warn about portability when linking against -modules? shouldnotlink=$module # Files to dlopen/dlpreopen dlopen='$dlfiles' dlpreopen='$dlprefiles' # Directory that this library needs to be installed in: libdir='$install_libdir'" if test "$installed" = no && test "$need_relink" = yes; then $ECHO >> $output "\ relink_command=\"$relink_command\"" fi done } # Do a symbolic link so that the libtool archive can be found in # LD_LIBRARY_PATH before the program is installed. func_show_eval '( cd "$output_objdir" && $RM "$outputname" && $LN_S "../$outputname" "$outputname" )' 'exit $?' ;; esac exit $EXIT_SUCCESS } { test "$opt_mode" = link || test "$opt_mode" = relink; } && func_mode_link ${1+"$@"} # func_mode_uninstall arg... func_mode_uninstall () { $opt_debug RM="$nonopt" files= rmforce= exit_status=0 # This variable tells wrapper scripts just to set variables rather # than running their programs. libtool_install_magic="$magic" for arg do case $arg in -f) func_append RM " $arg"; rmforce=yes ;; -*) func_append RM " $arg" ;; *) func_append files " $arg" ;; esac done test -z "$RM" && \ func_fatal_help "you must specify an RM program" rmdirs= for file in $files; do func_dirname "$file" "" "." dir="$func_dirname_result" if test "X$dir" = X.; then odir="$objdir" else odir="$dir/$objdir" fi func_basename "$file" name="$func_basename_result" test "$opt_mode" = uninstall && odir="$dir" # Remember odir for removal later, being careful to avoid duplicates if test "$opt_mode" = clean; then case " $rmdirs " in *" $odir "*) ;; *) func_append rmdirs " $odir" ;; esac fi # Don't error if the file doesn't exist and rm -f was used. if { test -L "$file"; } >/dev/null 2>&1 || { test -h "$file"; } >/dev/null 2>&1 || test -f "$file"; then : elif test -d "$file"; then exit_status=1 continue elif test "$rmforce" = yes; then continue fi rmfiles="$file" case $name in *.la) # Possibly a libtool archive, so verify it. if func_lalib_p "$file"; then func_source $dir/$name # Delete the libtool libraries and symlinks. for n in $library_names; do func_append rmfiles " $odir/$n" done test -n "$old_library" && func_append rmfiles " $odir/$old_library" case "$opt_mode" in clean) case " $library_names " in *" $dlname "*) ;; *) test -n "$dlname" && func_append rmfiles " $odir/$dlname" ;; esac test -n "$libdir" && func_append rmfiles " $odir/$name $odir/${name}i" ;; uninstall) if test -n "$library_names"; then # Do each command in the postuninstall commands. func_execute_cmds "$postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1' fi if test -n "$old_library"; then # Do each command in the old_postuninstall commands. func_execute_cmds "$old_postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1' fi # FIXME: should reinstall the best remaining shared library. ;; esac fi ;; *.lo) # Possibly a libtool object, so verify it. if func_lalib_p "$file"; then # Read the .lo file func_source $dir/$name # Add PIC object to the list of files to remove. if test -n "$pic_object" && test "$pic_object" != none; then func_append rmfiles " $dir/$pic_object" fi # Add non-PIC object to the list of files to remove. if test -n "$non_pic_object" && test "$non_pic_object" != none; then func_append rmfiles " $dir/$non_pic_object" fi fi ;; *) if test "$opt_mode" = clean ; then noexename=$name case $file in *.exe) func_stripname '' '.exe' "$file" file=$func_stripname_result func_stripname '' '.exe' "$name" noexename=$func_stripname_result # $file with .exe has already been added to rmfiles, # add $file without .exe func_append rmfiles " $file" ;; esac # Do a test to see if this is a libtool program. if func_ltwrapper_p "$file"; then if func_ltwrapper_executable_p "$file"; then func_ltwrapper_scriptname "$file" relink_command= func_source $func_ltwrapper_scriptname_result func_append rmfiles " $func_ltwrapper_scriptname_result" else relink_command= func_source $dir/$noexename fi # note $name still contains .exe if it was in $file originally # as does the version of $file that was added into $rmfiles func_append rmfiles " $odir/$name $odir/${name}S.${objext}" if test "$fast_install" = yes && test -n "$relink_command"; then func_append rmfiles " $odir/lt-$name" fi if test "X$noexename" != "X$name" ; then func_append rmfiles " $odir/lt-${noexename}.c" fi fi fi ;; esac func_show_eval "$RM $rmfiles" 'exit_status=1' done # Try to remove the ${objdir}s in the directories where we deleted files for dir in $rmdirs; do if test -d "$dir"; then func_show_eval "rmdir $dir >/dev/null 2>&1" fi done exit $exit_status } { test "$opt_mode" = uninstall || test "$opt_mode" = clean; } && func_mode_uninstall ${1+"$@"} test -z "$opt_mode" && { help="$generic_help" func_fatal_help "you must specify a MODE" } test -z "$exec_cmd" && \ func_fatal_help "invalid operation mode \`$opt_mode'" if test -n "$exec_cmd"; then eval exec "$exec_cmd" exit $EXIT_FAILURE fi exit $exit_status # The TAGs below are defined such that we never get into a situation # in which we disable both kinds of libraries. Given conflicting # choices, we go for a static library, that is the most portable, # since we can't tell whether shared libraries were disabled because # the user asked for that or because the platform doesn't support # them. This is particularly important on AIX, because we don't # support having both static and shared libraries enabled at the same # time on that platform, so we default to a shared-only configuration. # If a disable-shared tag is given, we'll fallback to a static-only # configuration. But we'll never go from static-only to shared-only. # ### BEGIN LIBTOOL TAG CONFIG: disable-shared build_libtool_libs=no build_old_libs=yes # ### END LIBTOOL TAG CONFIG: disable-shared # ### BEGIN LIBTOOL TAG CONFIG: disable-static build_old_libs=`case $build_libtool_libs in yes) echo no;; *) echo yes;; esac` # ### END LIBTOOL TAG CONFIG: disable-static # Local Variables: # mode:shell-script # sh-indentation:2 # End: # vi:sw=2 nordugrid-arc-6.14.0/PaxHeaders.30264/autogen.sh0000644000000000000000000000013214152153376017342 xustar000000000000000030 mtime=1638455038.262643825 30 atime=1638455038.459646786 30 ctime=1638455095.737507412 nordugrid-arc-6.14.0/autogen.sh0000755000175000002070000000170514152153376017335 0ustar00mockbuildmock00000000000000#!/bin/sh # # autogen.sh glue # # Requires: automake 1.9, autoconf 2.57+ # Conflicts: autoconf 2.13 set -x cleanup() { find . -type d -name autom4te.cache -print | xargs rm -rf \; find . -type f \( -name missing -o -name install-sh \ -o -name mkinstalldirs \ -o -name depcomp -o -name ltmain.sh -o -name configure \ -o -name config.sub -o -name config.guess \ -o -name Makefile.in -o -name config.h.in -o -name aclocal.m4 \ -o -name autoscan.log -o -name configure.scan -o -name config.log \ -o -name config.status -o -name config.h -o -name stamp-h1 \ -o -name Makefile -o -name libtool \) \ -print | xargs rm -f } if [ "x$1" = "xclean" ]; then cleanup exit fi # Refresh GNU autotools toolchain. echo Cleaning autotools files... cleanup type glibtoolize > /dev/null 2>&1 && export LIBTOOLIZE=glibtoolize echo Running autoreconf... autoreconf --verbose --force --install exit 0 nordugrid-arc-6.14.0/PaxHeaders.30264/m40000644000000000000000000000013214152153467015610 xustar000000000000000030 mtime=1638455095.719507142 30 atime=1638455103.996631509 30 ctime=1638455095.719507142 nordugrid-arc-6.14.0/m4/0000755000175000002070000000000014152153467015652 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/m4/PaxHeaders.30264/ac_cxx_have_dbdeadlockexception.m40000644000000000000000000000013214152153376024451 xustar000000000000000030 mtime=1638455038.279644081 30 atime=1638455038.462646831 30 ctime=1638455095.701506871 nordugrid-arc-6.14.0/m4/ac_cxx_have_dbdeadlockexception.m40000644000175000002070000000137714152153376024446 0ustar00mockbuildmock00000000000000dnl @synopsis AC_DBCXX_HAVE_DBDEADLOCKEXXCEPTION dnl dnl If the C++ library has a working stringstream, define HAVE_SSTREAM. dnl dnl @author Ben Stanley dnl @version $Id: ac_cxx_have_sstream.m4 3830 2005-06-24 07:01:15Z waananen $ dnl AC_DEFUN([AC_DBCXX_HAVE_DBDEADLOCKEXCEPTION], [AC_CACHE_CHECK(whether the Berkeley DB has DbDeadlockException, ac_cv_dbcxx_dbdeadlockexception, [ AC_LANG_SAVE AC_LANG_CPLUSPLUS AC_TRY_COMPILE([#include ],[try { } catch(DbDeadlockException&) { }; return 0;], ac_cv_dbcxx_have_dbdeadlockexception=yes, ac_cv_dbcxx_have_dbdeadlockexception=no) AC_LANG_RESTORE ]) if test "$ac_cv_dbcxx_have_dbdeadlockexception" = yes; then AC_DEFINE(HAVE_DBDEADLOCKEXCEPTION,,[define if the Berkeley DB has DbDeadLockException]) fi ]) nordugrid-arc-6.14.0/m4/PaxHeaders.30264/gettext.m40000644000000000000000000000013214152153401017577 xustar000000000000000030 mtime=1638455041.047685672 30 atime=1638455042.434706513 30 ctime=1638455095.706506946 nordugrid-arc-6.14.0/m4/gettext.m40000644000175000002070000003457014152153401017575 0ustar00mockbuildmock00000000000000# gettext.m4 serial 60 (gettext-0.17) dnl Copyright (C) 1995-2007 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl dnl This file can can be used in projects which are not available under dnl the GNU General Public License or the GNU Library General Public dnl License but which still want to provide support for the GNU gettext dnl functionality. dnl Please note that the actual code of the GNU gettext library is covered dnl by the GNU Library General Public License, and the rest of the GNU dnl gettext package package is covered by the GNU General Public License. dnl They are *not* in the public domain. dnl Authors: dnl Ulrich Drepper , 1995-2000. dnl Bruno Haible , 2000-2006. dnl Macro to add for using GNU gettext. dnl Usage: AM_GNU_GETTEXT([INTLSYMBOL], [NEEDSYMBOL], [INTLDIR]). dnl INTLSYMBOL can be one of 'external', 'no-libtool', 'use-libtool'. The dnl default (if it is not specified or empty) is 'no-libtool'. dnl INTLSYMBOL should be 'external' for packages with no intl directory, dnl and 'no-libtool' or 'use-libtool' for packages with an intl directory. dnl If INTLSYMBOL is 'use-libtool', then a libtool library dnl $(top_builddir)/intl/libintl.la will be created (shared and/or static, dnl depending on --{enable,disable}-{shared,static} and on the presence of dnl AM-DISABLE-SHARED). If INTLSYMBOL is 'no-libtool', a static library dnl $(top_builddir)/intl/libintl.a will be created. dnl If NEEDSYMBOL is specified and is 'need-ngettext', then GNU gettext dnl implementations (in libc or libintl) without the ngettext() function dnl will be ignored. If NEEDSYMBOL is specified and is dnl 'need-formatstring-macros', then GNU gettext implementations that don't dnl support the ISO C 99 formatstring macros will be ignored. dnl INTLDIR is used to find the intl libraries. If empty, dnl the value `$(top_builddir)/intl/' is used. dnl dnl The result of the configuration is one of three cases: dnl 1) GNU gettext, as included in the intl subdirectory, will be compiled dnl and used. dnl Catalog format: GNU --> install in $(datadir) dnl Catalog extension: .mo after installation, .gmo in source tree dnl 2) GNU gettext has been found in the system's C library. dnl Catalog format: GNU --> install in $(datadir) dnl Catalog extension: .mo after installation, .gmo in source tree dnl 3) No internationalization, always use English msgid. dnl Catalog format: none dnl Catalog extension: none dnl If INTLSYMBOL is 'external', only cases 2 and 3 can occur. dnl The use of .gmo is historical (it was needed to avoid overwriting the dnl GNU format catalogs when building on a platform with an X/Open gettext), dnl but we keep it in order not to force irrelevant filename changes on the dnl maintainers. dnl AC_DEFUN([AM_GNU_GETTEXT], [ dnl Argument checking. ifelse([$1], [], , [ifelse([$1], [external], , [ifelse([$1], [no-libtool], , [ifelse([$1], [use-libtool], , [errprint([ERROR: invalid first argument to AM_GNU_GETTEXT ])])])])]) ifelse([$2], [], , [ifelse([$2], [need-ngettext], , [ifelse([$2], [need-formatstring-macros], , [errprint([ERROR: invalid second argument to AM_GNU_GETTEXT ])])])]) define([gt_included_intl], ifelse([$1], [external], ifdef([AM_GNU_GETTEXT_][INTL_SUBDIR], [yes], [no]), [yes])) define([gt_libtool_suffix_prefix], ifelse([$1], [use-libtool], [l], [])) gt_NEEDS_INIT AM_GNU_GETTEXT_NEED([$2]) AC_REQUIRE([AM_PO_SUBDIRS])dnl ifelse(gt_included_intl, yes, [ AC_REQUIRE([AM_INTL_SUBDIR])dnl ]) dnl Prerequisites of AC_LIB_LINKFLAGS_BODY. AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) AC_REQUIRE([AC_LIB_RPATH]) dnl Sometimes libintl requires libiconv, so first search for libiconv. dnl Ideally we would do this search only after the dnl if test "$USE_NLS" = "yes"; then dnl if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" != "yes"; }; then dnl tests. But if configure.in invokes AM_ICONV after AM_GNU_GETTEXT dnl the configure script would need to contain the same shell code dnl again, outside any 'if'. There are two solutions: dnl - Invoke AM_ICONV_LINKFLAGS_BODY here, outside any 'if'. dnl - Control the expansions in more detail using AC_PROVIDE_IFELSE. dnl Since AC_PROVIDE_IFELSE is only in autoconf >= 2.52 and not dnl documented, we avoid it. ifelse(gt_included_intl, yes, , [ AC_REQUIRE([AM_ICONV_LINKFLAGS_BODY]) ]) dnl Sometimes, on MacOS X, libintl requires linking with CoreFoundation. gt_INTL_MACOSX dnl Set USE_NLS. AC_REQUIRE([AM_NLS]) ifelse(gt_included_intl, yes, [ BUILD_INCLUDED_LIBINTL=no USE_INCLUDED_LIBINTL=no ]) LIBINTL= LTLIBINTL= POSUB= dnl Add a version number to the cache macros. case " $gt_needs " in *" need-formatstring-macros "*) gt_api_version=3 ;; *" need-ngettext "*) gt_api_version=2 ;; *) gt_api_version=1 ;; esac gt_func_gnugettext_libc="gt_cv_func_gnugettext${gt_api_version}_libc" gt_func_gnugettext_libintl="gt_cv_func_gnugettext${gt_api_version}_libintl" dnl If we use NLS figure out what method if test "$USE_NLS" = "yes"; then gt_use_preinstalled_gnugettext=no ifelse(gt_included_intl, yes, [ AC_MSG_CHECKING([whether included gettext is requested]) AC_ARG_WITH(included-gettext, [ --with-included-gettext use the GNU gettext library included here], nls_cv_force_use_gnu_gettext=$withval, nls_cv_force_use_gnu_gettext=no) AC_MSG_RESULT($nls_cv_force_use_gnu_gettext) nls_cv_use_gnu_gettext="$nls_cv_force_use_gnu_gettext" if test "$nls_cv_force_use_gnu_gettext" != "yes"; then ]) dnl User does not insist on using GNU NLS library. Figure out what dnl to use. If GNU gettext is available we use this. Else we have dnl to fall back to GNU NLS library. if test $gt_api_version -ge 3; then gt_revision_test_code=' #ifndef __GNU_GETTEXT_SUPPORTED_REVISION #define __GNU_GETTEXT_SUPPORTED_REVISION(major) ((major) == 0 ? 0 : -1) #endif changequote(,)dnl typedef int array [2 * (__GNU_GETTEXT_SUPPORTED_REVISION(0) >= 1) - 1]; changequote([,])dnl ' else gt_revision_test_code= fi if test $gt_api_version -ge 2; then gt_expression_test_code=' + * ngettext ("", "", 0)' else gt_expression_test_code= fi AC_CACHE_CHECK([for GNU gettext in libc], [$gt_func_gnugettext_libc], [AC_TRY_LINK([#include $gt_revision_test_code extern int _nl_msg_cat_cntr; extern int *_nl_domain_bindings;], [bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + _nl_msg_cat_cntr + *_nl_domain_bindings], [eval "$gt_func_gnugettext_libc=yes"], [eval "$gt_func_gnugettext_libc=no"])]) if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" != "yes"; }; then dnl Sometimes libintl requires libiconv, so first search for libiconv. ifelse(gt_included_intl, yes, , [ AM_ICONV_LINK ]) dnl Search for libintl and define LIBINTL, LTLIBINTL and INCINTL dnl accordingly. Don't use AC_LIB_LINKFLAGS_BODY([intl],[iconv]) dnl because that would add "-liconv" to LIBINTL and LTLIBINTL dnl even if libiconv doesn't exist. AC_LIB_LINKFLAGS_BODY([intl]) AC_CACHE_CHECK([for GNU gettext in libintl], [$gt_func_gnugettext_libintl], [gt_save_CPPFLAGS="$CPPFLAGS" CPPFLAGS="$CPPFLAGS $INCINTL" gt_save_LIBS="$LIBS" LIBS="$LIBS $LIBINTL" dnl Now see whether libintl exists and does not depend on libiconv. AC_TRY_LINK([#include $gt_revision_test_code extern int _nl_msg_cat_cntr; extern #ifdef __cplusplus "C" #endif const char *_nl_expand_alias (const char *);], [bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + _nl_msg_cat_cntr + *_nl_expand_alias ("")], [eval "$gt_func_gnugettext_libintl=yes"], [eval "$gt_func_gnugettext_libintl=no"]) dnl Now see whether libintl exists and depends on libiconv. if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" != yes; } && test -n "$LIBICONV"; then LIBS="$LIBS $LIBICONV" AC_TRY_LINK([#include $gt_revision_test_code extern int _nl_msg_cat_cntr; extern #ifdef __cplusplus "C" #endif const char *_nl_expand_alias (const char *);], [bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + _nl_msg_cat_cntr + *_nl_expand_alias ("")], [LIBINTL="$LIBINTL $LIBICONV" LTLIBINTL="$LTLIBINTL $LTLIBICONV" eval "$gt_func_gnugettext_libintl=yes" ]) fi CPPFLAGS="$gt_save_CPPFLAGS" LIBS="$gt_save_LIBS"]) fi dnl If an already present or preinstalled GNU gettext() is found, dnl use it. But if this macro is used in GNU gettext, and GNU dnl gettext is already preinstalled in libintl, we update this dnl libintl. (Cf. the install rule in intl/Makefile.in.) if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" = "yes"; } \ || { { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; } \ && test "$PACKAGE" != gettext-runtime \ && test "$PACKAGE" != gettext-tools; }; then gt_use_preinstalled_gnugettext=yes else dnl Reset the values set by searching for libintl. LIBINTL= LTLIBINTL= INCINTL= fi ifelse(gt_included_intl, yes, [ if test "$gt_use_preinstalled_gnugettext" != "yes"; then dnl GNU gettext is not found in the C library. dnl Fall back on included GNU gettext library. nls_cv_use_gnu_gettext=yes fi fi if test "$nls_cv_use_gnu_gettext" = "yes"; then dnl Mark actions used to generate GNU NLS library. BUILD_INCLUDED_LIBINTL=yes USE_INCLUDED_LIBINTL=yes LIBINTL="ifelse([$3],[],\${top_builddir}/intl,[$3])/libintl.[]gt_libtool_suffix_prefix[]a $LIBICONV $LIBTHREAD" LTLIBINTL="ifelse([$3],[],\${top_builddir}/intl,[$3])/libintl.[]gt_libtool_suffix_prefix[]a $LTLIBICONV $LTLIBTHREAD" LIBS=`echo " $LIBS " | sed -e 's/ -lintl / /' -e 's/^ //' -e 's/ $//'` fi CATOBJEXT= if test "$gt_use_preinstalled_gnugettext" = "yes" \ || test "$nls_cv_use_gnu_gettext" = "yes"; then dnl Mark actions to use GNU gettext tools. CATOBJEXT=.gmo fi ]) if test -n "$INTL_MACOSX_LIBS"; then if test "$gt_use_preinstalled_gnugettext" = "yes" \ || test "$nls_cv_use_gnu_gettext" = "yes"; then dnl Some extra flags are needed during linking. LIBINTL="$LIBINTL $INTL_MACOSX_LIBS" LTLIBINTL="$LTLIBINTL $INTL_MACOSX_LIBS" fi fi if test "$gt_use_preinstalled_gnugettext" = "yes" \ || test "$nls_cv_use_gnu_gettext" = "yes"; then AC_DEFINE(ENABLE_NLS, 1, [Define to 1 if translation of program messages to the user's native language is requested.]) else USE_NLS=no fi fi AC_MSG_CHECKING([whether to use NLS]) AC_MSG_RESULT([$USE_NLS]) if test "$USE_NLS" = "yes"; then AC_MSG_CHECKING([where the gettext function comes from]) if test "$gt_use_preinstalled_gnugettext" = "yes"; then if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; }; then gt_source="external libintl" else gt_source="libc" fi else gt_source="included intl directory" fi AC_MSG_RESULT([$gt_source]) fi if test "$USE_NLS" = "yes"; then if test "$gt_use_preinstalled_gnugettext" = "yes"; then if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; }; then AC_MSG_CHECKING([how to link with libintl]) AC_MSG_RESULT([$LIBINTL]) AC_LIB_APPENDTOVAR([CPPFLAGS], [$INCINTL]) fi dnl For backward compatibility. Some packages may be using this. AC_DEFINE(HAVE_GETTEXT, 1, [Define if the GNU gettext() function is already present or preinstalled.]) AC_DEFINE(HAVE_DCGETTEXT, 1, [Define if the GNU dcgettext() function is already present or preinstalled.]) fi dnl We need to process the po/ directory. POSUB=po fi ifelse(gt_included_intl, yes, [ dnl If this is used in GNU gettext we have to set BUILD_INCLUDED_LIBINTL dnl to 'yes' because some of the testsuite requires it. if test "$PACKAGE" = gettext-runtime || test "$PACKAGE" = gettext-tools; then BUILD_INCLUDED_LIBINTL=yes fi dnl Make all variables we use known to autoconf. AC_SUBST(BUILD_INCLUDED_LIBINTL) AC_SUBST(USE_INCLUDED_LIBINTL) AC_SUBST(CATOBJEXT) dnl For backward compatibility. Some configure.ins may be using this. nls_cv_header_intl= nls_cv_header_libgt= dnl For backward compatibility. Some Makefiles may be using this. DATADIRNAME=share AC_SUBST(DATADIRNAME) dnl For backward compatibility. Some Makefiles may be using this. INSTOBJEXT=.mo AC_SUBST(INSTOBJEXT) dnl For backward compatibility. Some Makefiles may be using this. GENCAT=gencat AC_SUBST(GENCAT) dnl For backward compatibility. Some Makefiles may be using this. INTLOBJS= if test "$USE_INCLUDED_LIBINTL" = yes; then INTLOBJS="\$(GETTOBJS)" fi AC_SUBST(INTLOBJS) dnl Enable libtool support if the surrounding package wishes it. INTL_LIBTOOL_SUFFIX_PREFIX=gt_libtool_suffix_prefix AC_SUBST(INTL_LIBTOOL_SUFFIX_PREFIX) ]) dnl For backward compatibility. Some Makefiles may be using this. INTLLIBS="$LIBINTL" AC_SUBST(INTLLIBS) dnl Make all documented variables known to autoconf. AC_SUBST(LIBINTL) AC_SUBST(LTLIBINTL) AC_SUBST(POSUB) ]) dnl gt_NEEDS_INIT ensures that the gt_needs variable is initialized. m4_define([gt_NEEDS_INIT], [ m4_divert_text([DEFAULTS], [gt_needs=]) m4_define([gt_NEEDS_INIT], []) ]) dnl Usage: AM_GNU_GETTEXT_NEED([NEEDSYMBOL]) AC_DEFUN([AM_GNU_GETTEXT_NEED], [ m4_divert_text([INIT_PREPARE], [gt_needs="$gt_needs $1"]) ]) dnl Usage: AM_GNU_GETTEXT_VERSION([gettext-version]) AC_DEFUN([AM_GNU_GETTEXT_VERSION], []) nordugrid-arc-6.14.0/m4/PaxHeaders.30264/lt~obsolete.m40000644000000000000000000000013214152153410020465 xustar000000000000000030 mtime=1638455048.342795285 30 atime=1638455048.474797268 30 ctime=1638455095.716507096 nordugrid-arc-6.14.0/m4/lt~obsolete.m40000644000175000002070000001375614152153410020466 0ustar00mockbuildmock00000000000000# lt~obsolete.m4 -- aclocal satisfying obsolete definitions. -*-Autoconf-*- # # Copyright (C) 2004, 2005, 2007, 2009 Free Software Foundation, Inc. # Written by Scott James Remnant, 2004. # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # serial 5 lt~obsolete.m4 # These exist entirely to fool aclocal when bootstrapping libtool. # # In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN) # which have later been changed to m4_define as they aren't part of the # exported API, or moved to Autoconf or Automake where they belong. # # The trouble is, aclocal is a bit thick. It'll see the old AC_DEFUN # in /usr/share/aclocal/libtool.m4 and remember it, then when it sees us # using a macro with the same name in our local m4/libtool.m4 it'll # pull the old libtool.m4 in (it doesn't see our shiny new m4_define # and doesn't know about Autoconf macros at all.) # # So we provide this file, which has a silly filename so it's always # included after everything else. This provides aclocal with the # AC_DEFUNs it wants, but when m4 processes it, it doesn't do anything # because those macros already exist, or will be overwritten later. # We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6. # # Anytime we withdraw an AC_DEFUN or AU_DEFUN, remember to add it here. # Yes, that means every name once taken will need to remain here until # we give up compatibility with versions before 1.7, at which point # we need to keep only those names which we still refer to. # This is to help aclocal find these macros, as it can't see m4_define. AC_DEFUN([LTOBSOLETE_VERSION], [m4_if([1])]) m4_ifndef([AC_LIBTOOL_LINKER_OPTION], [AC_DEFUN([AC_LIBTOOL_LINKER_OPTION])]) m4_ifndef([AC_PROG_EGREP], [AC_DEFUN([AC_PROG_EGREP])]) m4_ifndef([_LT_AC_PROG_ECHO_BACKSLASH], [AC_DEFUN([_LT_AC_PROG_ECHO_BACKSLASH])]) m4_ifndef([_LT_AC_SHELL_INIT], [AC_DEFUN([_LT_AC_SHELL_INIT])]) m4_ifndef([_LT_AC_SYS_LIBPATH_AIX], [AC_DEFUN([_LT_AC_SYS_LIBPATH_AIX])]) m4_ifndef([_LT_PROG_LTMAIN], [AC_DEFUN([_LT_PROG_LTMAIN])]) m4_ifndef([_LT_AC_TAGVAR], [AC_DEFUN([_LT_AC_TAGVAR])]) m4_ifndef([AC_LTDL_ENABLE_INSTALL], [AC_DEFUN([AC_LTDL_ENABLE_INSTALL])]) m4_ifndef([AC_LTDL_PREOPEN], [AC_DEFUN([AC_LTDL_PREOPEN])]) m4_ifndef([_LT_AC_SYS_COMPILER], [AC_DEFUN([_LT_AC_SYS_COMPILER])]) m4_ifndef([_LT_AC_LOCK], [AC_DEFUN([_LT_AC_LOCK])]) m4_ifndef([AC_LIBTOOL_SYS_OLD_ARCHIVE], [AC_DEFUN([AC_LIBTOOL_SYS_OLD_ARCHIVE])]) m4_ifndef([_LT_AC_TRY_DLOPEN_SELF], [AC_DEFUN([_LT_AC_TRY_DLOPEN_SELF])]) m4_ifndef([AC_LIBTOOL_PROG_CC_C_O], [AC_DEFUN([AC_LIBTOOL_PROG_CC_C_O])]) m4_ifndef([AC_LIBTOOL_SYS_HARD_LINK_LOCKS], [AC_DEFUN([AC_LIBTOOL_SYS_HARD_LINK_LOCKS])]) m4_ifndef([AC_LIBTOOL_OBJDIR], [AC_DEFUN([AC_LIBTOOL_OBJDIR])]) m4_ifndef([AC_LTDL_OBJDIR], [AC_DEFUN([AC_LTDL_OBJDIR])]) m4_ifndef([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH], [AC_DEFUN([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH])]) m4_ifndef([AC_LIBTOOL_SYS_LIB_STRIP], [AC_DEFUN([AC_LIBTOOL_SYS_LIB_STRIP])]) m4_ifndef([AC_PATH_MAGIC], [AC_DEFUN([AC_PATH_MAGIC])]) m4_ifndef([AC_PROG_LD_GNU], [AC_DEFUN([AC_PROG_LD_GNU])]) m4_ifndef([AC_PROG_LD_RELOAD_FLAG], [AC_DEFUN([AC_PROG_LD_RELOAD_FLAG])]) m4_ifndef([AC_DEPLIBS_CHECK_METHOD], [AC_DEFUN([AC_DEPLIBS_CHECK_METHOD])]) m4_ifndef([AC_LIBTOOL_PROG_COMPILER_NO_RTTI], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_NO_RTTI])]) m4_ifndef([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE], [AC_DEFUN([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE])]) m4_ifndef([AC_LIBTOOL_PROG_COMPILER_PIC], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_PIC])]) m4_ifndef([AC_LIBTOOL_PROG_LD_SHLIBS], [AC_DEFUN([AC_LIBTOOL_PROG_LD_SHLIBS])]) m4_ifndef([AC_LIBTOOL_POSTDEP_PREDEP], [AC_DEFUN([AC_LIBTOOL_POSTDEP_PREDEP])]) m4_ifndef([LT_AC_PROG_EGREP], [AC_DEFUN([LT_AC_PROG_EGREP])]) m4_ifndef([LT_AC_PROG_SED], [AC_DEFUN([LT_AC_PROG_SED])]) m4_ifndef([_LT_CC_BASENAME], [AC_DEFUN([_LT_CC_BASENAME])]) m4_ifndef([_LT_COMPILER_BOILERPLATE], [AC_DEFUN([_LT_COMPILER_BOILERPLATE])]) m4_ifndef([_LT_LINKER_BOILERPLATE], [AC_DEFUN([_LT_LINKER_BOILERPLATE])]) m4_ifndef([_AC_PROG_LIBTOOL], [AC_DEFUN([_AC_PROG_LIBTOOL])]) m4_ifndef([AC_LIBTOOL_SETUP], [AC_DEFUN([AC_LIBTOOL_SETUP])]) m4_ifndef([_LT_AC_CHECK_DLFCN], [AC_DEFUN([_LT_AC_CHECK_DLFCN])]) m4_ifndef([AC_LIBTOOL_SYS_DYNAMIC_LINKER], [AC_DEFUN([AC_LIBTOOL_SYS_DYNAMIC_LINKER])]) m4_ifndef([_LT_AC_TAGCONFIG], [AC_DEFUN([_LT_AC_TAGCONFIG])]) m4_ifndef([AC_DISABLE_FAST_INSTALL], [AC_DEFUN([AC_DISABLE_FAST_INSTALL])]) m4_ifndef([_LT_AC_LANG_CXX], [AC_DEFUN([_LT_AC_LANG_CXX])]) m4_ifndef([_LT_AC_LANG_F77], [AC_DEFUN([_LT_AC_LANG_F77])]) m4_ifndef([_LT_AC_LANG_GCJ], [AC_DEFUN([_LT_AC_LANG_GCJ])]) m4_ifndef([AC_LIBTOOL_LANG_C_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_C_CONFIG])]) m4_ifndef([_LT_AC_LANG_C_CONFIG], [AC_DEFUN([_LT_AC_LANG_C_CONFIG])]) m4_ifndef([AC_LIBTOOL_LANG_CXX_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_CXX_CONFIG])]) m4_ifndef([_LT_AC_LANG_CXX_CONFIG], [AC_DEFUN([_LT_AC_LANG_CXX_CONFIG])]) m4_ifndef([AC_LIBTOOL_LANG_F77_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_F77_CONFIG])]) m4_ifndef([_LT_AC_LANG_F77_CONFIG], [AC_DEFUN([_LT_AC_LANG_F77_CONFIG])]) m4_ifndef([AC_LIBTOOL_LANG_GCJ_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_GCJ_CONFIG])]) m4_ifndef([_LT_AC_LANG_GCJ_CONFIG], [AC_DEFUN([_LT_AC_LANG_GCJ_CONFIG])]) m4_ifndef([AC_LIBTOOL_LANG_RC_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_RC_CONFIG])]) m4_ifndef([_LT_AC_LANG_RC_CONFIG], [AC_DEFUN([_LT_AC_LANG_RC_CONFIG])]) m4_ifndef([AC_LIBTOOL_CONFIG], [AC_DEFUN([AC_LIBTOOL_CONFIG])]) m4_ifndef([_LT_AC_FILE_LTDLL_C], [AC_DEFUN([_LT_AC_FILE_LTDLL_C])]) m4_ifndef([_LT_REQUIRED_DARWIN_CHECKS], [AC_DEFUN([_LT_REQUIRED_DARWIN_CHECKS])]) m4_ifndef([_LT_AC_PROG_CXXCPP], [AC_DEFUN([_LT_AC_PROG_CXXCPP])]) m4_ifndef([_LT_PREPARE_SED_QUOTE_VARS], [AC_DEFUN([_LT_PREPARE_SED_QUOTE_VARS])]) m4_ifndef([_LT_PROG_ECHO_BACKSLASH], [AC_DEFUN([_LT_PROG_ECHO_BACKSLASH])]) m4_ifndef([_LT_PROG_F77], [AC_DEFUN([_LT_PROG_F77])]) m4_ifndef([_LT_PROG_FC], [AC_DEFUN([_LT_PROG_FC])]) m4_ifndef([_LT_PROG_CXX], [AC_DEFUN([_LT_PROG_CXX])]) nordugrid-arc-6.14.0/m4/PaxHeaders.30264/fsusage.m40000644000000000000000000000013214152153376017563 xustar000000000000000030 mtime=1638455038.280644096 30 atime=1638455038.462646831 30 ctime=1638455095.705506931 nordugrid-arc-6.14.0/m4/fsusage.m40000644000175000002070000001752214152153376017557 0ustar00mockbuildmock00000000000000#serial 23 # Obtaining file system usage information. # Copyright (C) 1997, 1998, 2000, 2001, 2003-2007 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # Written by Jim Meyering. AC_DEFUN([gl_FSUSAGE], [ AC_CHECK_HEADERS(sys/param.h) AC_CHECK_HEADERS(sys/vfs.h sys/fs_types.h) AC_CHECK_HEADERS(sys/mount.h, [], [], [AC_INCLUDES_DEFAULT [#if HAVE_SYS_PARAM_H #include #endif]]) gl_FILE_SYSTEM_USAGE([gl_cv_fs_space=yes], [gl_cv_fs_space=no]) if test $gl_cv_fs_space = yes; then AC_LIBOBJ(fsusage) gl_PREREQ_FSUSAGE_EXTRA fi ]) # Try to determine how a program can obtain file system usage information. # If successful, define the appropriate symbol (see fsusage.c) and # execute ACTION-IF-FOUND. Otherwise, execute ACTION-IF-NOT-FOUND. # # gl_FILE_SYSTEM_USAGE([ACTION-IF-FOUND[, ACTION-IF-NOT-FOUND]]) AC_DEFUN([gl_FILE_SYSTEM_USAGE], [ AC_MSG_NOTICE([checking how to get file system space usage]) ac_fsusage_space=no # Perform only the link test since it seems there are no variants of the # statvfs function. This check is more than just AC_CHECK_FUNCS(statvfs) # because that got a false positive on SCO OSR5. Adding the declaration # of a `struct statvfs' causes this test to fail (as it should) on such # systems. That system is reported to work fine with STAT_STATFS4 which # is what it gets when this test fails. if test $ac_fsusage_space = no; then # SVR4 AC_CACHE_CHECK([for statvfs function (SVR4)], fu_cv_sys_stat_statvfs, [AC_TRY_LINK([#include #if defined __GLIBC__ && !defined __BEOS__ Do not use statvfs on systems with GNU libc, because that function stats all preceding entries in /proc/mounts, and that makes df hang if even one of the corresponding file systems is hard-mounted, but not available. statvfs in GNU libc on BeOS operates differently: it only makes a system call. #endif #ifdef __osf__ "Do not use Tru64's statvfs implementation" #endif #include ], [struct statvfs fsd; statvfs (0, &fsd);], fu_cv_sys_stat_statvfs=yes, fu_cv_sys_stat_statvfs=no)]) if test $fu_cv_sys_stat_statvfs = yes; then ac_fsusage_space=yes AC_DEFINE(STAT_STATVFS, 1, [ Define if there is a function named statvfs. (SVR4)]) fi fi if test $ac_fsusage_space = no; then # DEC Alpha running OSF/1 AC_MSG_CHECKING([for 3-argument statfs function (DEC OSF/1)]) AC_CACHE_VAL(fu_cv_sys_stat_statfs3_osf1, [AC_TRY_RUN([ #include #include #include int main () { struct statfs fsd; fsd.f_fsize = 0; return statfs (".", &fsd, sizeof (struct statfs)) != 0; }], fu_cv_sys_stat_statfs3_osf1=yes, fu_cv_sys_stat_statfs3_osf1=no, fu_cv_sys_stat_statfs3_osf1=no)]) AC_MSG_RESULT($fu_cv_sys_stat_statfs3_osf1) if test $fu_cv_sys_stat_statfs3_osf1 = yes; then ac_fsusage_space=yes AC_DEFINE(STAT_STATFS3_OSF1, 1, [ Define if statfs takes 3 args. (DEC Alpha running OSF/1)]) fi fi if test $ac_fsusage_space = no; then # AIX AC_MSG_CHECKING([for two-argument statfs with statfs.bsize dnl member (AIX, 4.3BSD)]) AC_CACHE_VAL(fu_cv_sys_stat_statfs2_bsize, [AC_TRY_RUN([ #ifdef HAVE_SYS_PARAM_H #include #endif #ifdef HAVE_SYS_MOUNT_H #include #endif #ifdef HAVE_SYS_VFS_H #include #endif int main () { struct statfs fsd; fsd.f_bsize = 0; return statfs (".", &fsd) != 0; }], fu_cv_sys_stat_statfs2_bsize=yes, fu_cv_sys_stat_statfs2_bsize=no, fu_cv_sys_stat_statfs2_bsize=no)]) AC_MSG_RESULT($fu_cv_sys_stat_statfs2_bsize) if test $fu_cv_sys_stat_statfs2_bsize = yes; then ac_fsusage_space=yes AC_DEFINE(STAT_STATFS2_BSIZE, 1, [ Define if statfs takes 2 args and struct statfs has a field named f_bsize. (4.3BSD, SunOS 4, HP-UX, AIX PS/2)]) fi fi if test $ac_fsusage_space = no; then # SVR3 AC_MSG_CHECKING([for four-argument statfs (AIX-3.2.5, SVR3)]) AC_CACHE_VAL(fu_cv_sys_stat_statfs4, [AC_TRY_RUN([#include #include int main () { struct statfs fsd; return statfs (".", &fsd, sizeof fsd, 0) != 0; }], fu_cv_sys_stat_statfs4=yes, fu_cv_sys_stat_statfs4=no, fu_cv_sys_stat_statfs4=no)]) AC_MSG_RESULT($fu_cv_sys_stat_statfs4) if test $fu_cv_sys_stat_statfs4 = yes; then ac_fsusage_space=yes AC_DEFINE(STAT_STATFS4, 1, [ Define if statfs takes 4 args. (SVR3, Dynix, Irix, Dolphin)]) fi fi if test $ac_fsusage_space = no; then # 4.4BSD and NetBSD AC_MSG_CHECKING([for two-argument statfs with statfs.fsize dnl member (4.4BSD and NetBSD)]) AC_CACHE_VAL(fu_cv_sys_stat_statfs2_fsize, [AC_TRY_RUN([#include #ifdef HAVE_SYS_PARAM_H #include #endif #ifdef HAVE_SYS_MOUNT_H #include #endif int main () { struct statfs fsd; fsd.f_fsize = 0; return statfs (".", &fsd) != 0; }], fu_cv_sys_stat_statfs2_fsize=yes, fu_cv_sys_stat_statfs2_fsize=no, fu_cv_sys_stat_statfs2_fsize=no)]) AC_MSG_RESULT($fu_cv_sys_stat_statfs2_fsize) if test $fu_cv_sys_stat_statfs2_fsize = yes; then ac_fsusage_space=yes AC_DEFINE(STAT_STATFS2_FSIZE, 1, [ Define if statfs takes 2 args and struct statfs has a field named f_fsize. (4.4BSD, NetBSD)]) fi fi if test $ac_fsusage_space = no; then # Ultrix AC_MSG_CHECKING([for two-argument statfs with struct fs_data (Ultrix)]) AC_CACHE_VAL(fu_cv_sys_stat_fs_data, [AC_TRY_RUN([#include #ifdef HAVE_SYS_PARAM_H #include #endif #ifdef HAVE_SYS_MOUNT_H #include #endif #ifdef HAVE_SYS_FS_TYPES_H #include #endif int main () { struct fs_data fsd; /* Ultrix's statfs returns 1 for success, 0 for not mounted, -1 for failure. */ return statfs (".", &fsd) != 1; }], fu_cv_sys_stat_fs_data=yes, fu_cv_sys_stat_fs_data=no, fu_cv_sys_stat_fs_data=no)]) AC_MSG_RESULT($fu_cv_sys_stat_fs_data) if test $fu_cv_sys_stat_fs_data = yes; then ac_fsusage_space=yes AC_DEFINE(STAT_STATFS2_FS_DATA, 1, [ Define if statfs takes 2 args and the second argument has type struct fs_data. (Ultrix)]) fi fi if test $ac_fsusage_space = no; then # SVR2 AC_TRY_CPP([#include ], AC_DEFINE(STAT_READ_FILSYS, 1, [Define if there is no specific function for reading file systems usage information and you have the header file. (SVR2)]) ac_fsusage_space=yes) fi AS_IF([test $ac_fsusage_space = yes], [$1], [$2]) ]) # Check for SunOS statfs brokenness wrt partitions 2GB and larger. # If exists and struct statfs has a member named f_spare, # enable the work-around code in fsusage.c. AC_DEFUN([gl_STATFS_TRUNCATES], [ AC_MSG_CHECKING([for statfs that truncates block counts]) AC_CACHE_VAL(fu_cv_sys_truncating_statfs, [AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[ #if !defined(sun) && !defined(__sun) choke -- this is a workaround for a Sun-specific problem #endif #include #include ]], [[struct statfs t; long c = *(t.f_spare); if (c) return 0;]])], [fu_cv_sys_truncating_statfs=yes], [fu_cv_sys_truncating_statfs=no])]) if test $fu_cv_sys_truncating_statfs = yes; then AC_DEFINE(STATFS_TRUNCATES_BLOCK_COUNTS, 1, [Define if the block counts reported by statfs may be truncated to 2GB and the correct values may be stored in the f_spare array. (SunOS 4.1.2, 4.1.3, and 4.1.3_U1 are reported to have this problem. SunOS 4.1.1 seems not to be affected.)]) fi AC_MSG_RESULT($fu_cv_sys_truncating_statfs) ]) # Prerequisites of lib/fsusage.c not done by gl_FILE_SYSTEM_USAGE. AC_DEFUN([gl_PREREQ_FSUSAGE_EXTRA], [ AC_CHECK_HEADERS(dustat.h sys/fs/s5param.h sys/filsys.h sys/statfs.h) gl_STATFS_TRUNCATES ]) nordugrid-arc-6.14.0/m4/PaxHeaders.30264/ltversion.m40000644000000000000000000000013214152153410020140 xustar000000000000000030 mtime=1638455048.265794128 30 atime=1638455048.476797298 30 ctime=1638455095.715507081 nordugrid-arc-6.14.0/m4/ltversion.m40000644000175000002070000000126214152153410020126 0ustar00mockbuildmock00000000000000# ltversion.m4 -- version numbers -*- Autoconf -*- # # Copyright (C) 2004 Free Software Foundation, Inc. # Written by Scott James Remnant, 2004 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # @configure_input@ # serial 3337 ltversion.m4 # This file is part of GNU Libtool m4_define([LT_PACKAGE_VERSION], [2.4.2]) m4_define([LT_PACKAGE_REVISION], [1.3337]) AC_DEFUN([LTVERSION_VERSION], [macro_version='2.4.2' macro_revision='1.3337' _LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?]) _LT_DECL(, macro_revision, 0) ]) nordugrid-arc-6.14.0/m4/PaxHeaders.30264/ac_cxx_have_sstream.m40000644000000000000000000000013214152153376022134 xustar000000000000000030 mtime=1638455038.280644096 30 atime=1638455038.462646831 30 ctime=1638455095.702506886 nordugrid-arc-6.14.0/m4/ac_cxx_have_sstream.m40000644000175000002070000000134114152153376022120 0ustar00mockbuildmock00000000000000dnl @synopsis AC_CXX_HAVE_SSTREAM dnl dnl If the C++ library has a working stringstream, define HAVE_SSTREAM. dnl dnl @author Ben Stanley dnl @version $Id: ac_cxx_have_sstream.m4 3830 2005-06-24 07:01:15Z waananen $ dnl AC_DEFUN([AC_CXX_HAVE_SSTREAM], [AC_CACHE_CHECK(whether the compiler has stringstream, ac_cv_cxx_have_sstream, [AC_REQUIRE([AC_CXX_NAMESPACES]) AC_LANG_SAVE AC_LANG_CPLUSPLUS AC_TRY_COMPILE([#include #ifdef HAVE_NAMESPACES using namespace std; #endif],[stringstream message; message << "Hello"; return 0;], ac_cv_cxx_have_sstream=yes, ac_cv_cxx_have_sstream=no) AC_LANG_RESTORE ]) if test "$ac_cv_cxx_have_sstream" = yes; then AC_DEFINE(HAVE_SSTREAM,,[define if the compiler has stringstream]) fi ]) nordugrid-arc-6.14.0/m4/PaxHeaders.30264/gpt.m40000644000000000000000000000013214152153376016720 xustar000000000000000030 mtime=1638455038.280644096 30 atime=1638455038.462646831 30 ctime=1638455095.707506961 nordugrid-arc-6.14.0/m4/gpt.m40000644000175000002070000000615714152153376016716 0ustar00mockbuildmock00000000000000# globus.m4 -*- Autoconf -*- # Macros to for compiling and linking against globus/gpt packages AC_DEFUN([GPT_PROG_GPT_FLAVOR_CONFIGURATION], [ AC_ARG_VAR([GPT_FLAVOR_CONFIGURATION], [path to gpt-flavor-configuration]) if test "x$ac_cv_env_GPT_FLAVOR_CONFIGURATION_set" != "xset"; then AC_PATH_TOOL([GPT_FLAVOR_CONFIGURATION], [gpt-flavor-configuration], [], $PATH:/usr/sbin:/opt/gpt/sbin) fi if test -f "$GPT_FLAVOR_CONFIGURATION" && test "x$GPT_LOCATION" = "x"; then GPT_LOCATION=`dirname $GPT_FLAVOR_CONFIGURATION` GPT_LOCATION=`dirname $GPT_LOCATION` export GPT_LOCATION fi ]) AC_DEFUN([GPT_PROG_GPT_QUERY], [ AC_ARG_VAR([GPT_QUERY], [path to gpt-query]) if test "x$ac_cv_env_GPT_QUERY_set" != "xset"; then AC_PATH_TOOL([GPT_QUERY], [gpt-query], [], $PATH:/usr/sbin:/opt/gpt/sbin) fi if test -f "$GPT_QUERY" && test "x$GPT_LOCATION" = "x"; then GPT_LOCATION=`dirname $GPT_QUERY` GPT_LOCATION=`dirname $GPT_LOCATION` export GPT_LOCATION fi ]) AC_DEFUN([GPT_PROG_GLOBUS_MAKEFILE_HEADER], [ AC_ARG_VAR([GLOBUS_MAKEFILE_HEADER], [path to globus-makefile-header]) if test "x$ac_cv_env_GLOBUS_MAKEFILE_HEADER_set" != "xset"; then AC_PATH_TOOL([GLOBUS_MAKEFILE_HEADER], [globus-makefile-header], [], $PATH:/opt/globus/bin) fi if test -f "$GLOBUS_MAKEFILE_HEADER" && test "x$GLOBUS_LOCATION" = "x"; then GLOBUS_LOCATION=`dirname $GLOBUS_MAKEFILE_HEADER` GLOBUS_LOCATION=`dirname $GLOBUS_LOCATION` export GLOBUS_LOCATION fi ]) AC_DEFUN([GPT_ARG_GPT_FLAVOR], [ AC_REQUIRE([GPT_PROG_GPT_FLAVOR_CONFIGURATION]) AC_MSG_CHECKING([for gpt flavor]) AC_ARG_WITH([flavor], AC_HELP_STRING([--with-flavor=(flavor)], [Specify the gpt build flavor [[autodetect]]]), [GPT_FLAVOR=$withval], if test -n "$GPT_FLAVOR_CONFIGURATION" ; then [GPT_FLAVOR=`$GPT_FLAVOR_CONFIGURATION | \\ grep '^[[a-zA-Z]].*:$' | cut -f1 -d: | grep thr | tail -1`] fi) if test -n "$GPT_FLAVOR"; then AC_MSG_RESULT($GPT_FLAVOR) else AC_MSG_RESULT([none detected, is globus_core-devel installed?]) fi ]) AC_DEFUN([GPT_PKG_VERSION], [ AC_REQUIRE([GPT_PROG_GPT_QUERY]) AC_REQUIRE([GPT_ARG_GPT_FLAVOR]) if test -n "$GPT_QUERY" && test -n "$GPT_FLAVOR"; then gpt_cv_[]$1[]_version=`$GPT_QUERY $1[]-[]$GPT_FLAVOR[]-dev | \\ grep 'pkg version' | sed 's%.*: *%%'` fi ]) AC_DEFUN([GPT_PKG], [ AC_REQUIRE([GPT_PROG_GLOBUS_MAKEFILE_HEADER]) AC_REQUIRE([GPT_ARG_GPT_FLAVOR]) AC_MSG_CHECKING([for $1]) GPT_PKG_VERSION($1) if test -n "$gpt_cv_[]$1[]_version"; then if test -n "$GLOBUS_MAKEFILE_HEADER" && test -n "$GPT_FLAVOR" ; then gpt_cv_tmp=`$GLOBUS_MAKEFILE_HEADER --flavor=$GPT_FLAVOR $1 | \\ sed 's% *= *\(.*\)%="\1"%'` gpt_cv_[]$1[]_cflags=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_INCLUDES'` gpt_cv_[]$1[]_libs=`eval "$gpt_cv_tmp" \\ echo '$GLOBUS_LDFLAGS $GLOBUS_PKG_LIBS $GLOBUS_LIBS'` gpt_cv_tmp= fi fi if test -n "$gpt_cv_[]$1[]_version"; then AC_MSG_RESULT($gpt_cv_[]$1[]_version) m4_toupper([$1])[]_VERSION=$gpt_cv_[]$1[]_version m4_toupper([$1])[]_LIBS=$gpt_cv_[]$1[]_libs m4_toupper([$1])[]_CFLAGS=$gpt_cv_[]$1[]_cflags else AC_MSG_RESULT(no) fi ]) nordugrid-arc-6.14.0/m4/PaxHeaders.30264/arc_api.m40000644000000000000000000000013214152153376017524 xustar000000000000000030 mtime=1638455038.280644096 30 atime=1638455038.462646831 30 ctime=1638455095.703506901 nordugrid-arc-6.14.0/m4/arc_api.m40000644000175000002070000000435214152153376017515 0ustar00mockbuildmock00000000000000 # # ARC Public API # AC_DEFUN([ARC_API], [ ARCCLIENT_LIBS='$(top_builddir)/src/hed/libs/compute/libarccompute.la' ARCCLIENT_CFLAGS='-I$(top_srcdir)/include' AC_SUBST(ARCCLIENT_LIBS) AC_SUBST(ARCCLIENT_CFLAGS) ARCCOMMON_LIBS='$(top_builddir)/src/hed/libs/common/libarccommon.la' ARCCOMMON_CFLAGS='-I$(top_srcdir)/include' AC_SUBST(ARCCOMMON_LIBS) AC_SUBST(ARCCOMMON_CFLAGS) ARCCREDENTIAL_LIBS='$(top_builddir)/src/hed/libs/credential/libarccredential.la' ARCCREDENTIAL_CFLAGS='-I$(top_srcdir)/include' AC_SUBST(ARCCREDENTIAL_LIBS) AC_SUBST(ARCCREDENTIAL_CFLAGS) ARCDATA_LIBS='$(top_builddir)/src/hed/libs/data/libarcdata.la' ARCDATA_CFLAGS='-I$(top_srcdir)/include' AC_SUBST(ARCDATA_LIBS) AC_SUBST(ARCDATA_CFLAGS) ARCJOB_LIBS='$(top_builddir)/src/hed/libs/job/libarcjob.la' ARCJOB_CFLAGS='-I$(top_srcdir)/include' AC_SUBST(ARCJOB_LIBS) AC_SUBST(ARCJOB_CFLAGS) ARCLOADER_LIBS='$(top_builddir)/src/hed/libs/loader/libarcloader.la' ARCLOADER_CFLAGS='-I$(top_srcdir)/include' AC_SUBST(ARCLOADER_LIBS) AC_SUBST(ARCLOADER_CFLAGS) ARCMESSAGE_LIBS='$(top_builddir)/src/hed/libs/message/libarcmessage.la' ARCMESSAGE_CFLAGS='-I$(top_srcdir)/include' AC_SUBST(ARCMESSAGE_LIBS) AC_SUBST(ARCMESSAGE_CFLAGS) ARCSECURITY_LIBS='$(top_builddir)/src/hed/libs/security/libarcsecurity.la' ARCSECURITY_CFLAGS='-I$(top_srcdir)/include' AC_SUBST(ARCSECURITY_LIBS) AC_SUBST(ARCSECURITY_CFLAGS) ARCOTOKENS_LIBS='$(top_builddir)/src/hed/libs/security/libarcotokens.la' ARCOTOKENS_CFLAGS='-I$(top_srcdir)/include' AC_SUBST(ARCOTOKENS_LIBS) AC_SUBST(ARCOTOKENS_CFLAGS) ARCINFOSYS_LIBS='$(top_builddir)/src/hed/libs/infosys/libarcinfosys.la' ARCINFOSYS_CFLAGS='-I$(top_srcdir)/include' AC_SUBST(ARCINFOSYS_LIBS) AC_SUBST(ARCINFOSYS_CFLAGS) ARCWSADDRESSING_LIBS='$(top_builddir)/src/hed/libs/ws-addressing/libarcwsaddressing.la' ARCWSADDRESSING_CFLAGS='-I$(top_srcdir)/include' AC_SUBST(ARCWSADDRESSING_LIBS) AC_SUBST(ARCWSADDRESSING_CFLAGS) ARCWSSECURITY_LIBS='$(top_builddir)/src/hed/libs/ws-security/libarcwssecurity.la' ARCWSSECURITY_CFLAGS='-I$(top_srcdir)/include' AC_SUBST(ARCWSSECURITY_LIBS) AC_SUBST(ARCWSSECURITY_CFLAGS) ARCXMLSEC_LIBS='$(top_builddir)/src/hed/libs/xmlsec/libarcxmlsec.la' ARCXMLSEC_CFLAGS='-I$(top_srcdir)/include' AC_SUBST(ARCXMLSEC_LIBS) AC_SUBST(ARCXMLSEC_CFLAGS) ]) nordugrid-arc-6.14.0/m4/PaxHeaders.30264/progtest.m40000644000000000000000000000013214152153401017762 xustar000000000000000030 mtime=1638455041.215688196 30 atime=1638455042.417706257 30 ctime=1638455095.719507142 nordugrid-arc-6.14.0/m4/progtest.m40000644000175000002070000000555014152153401017754 0ustar00mockbuildmock00000000000000# progtest.m4 serial 4 (gettext-0.14.2) dnl Copyright (C) 1996-2003, 2005 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl dnl This file can can be used in projects which are not available under dnl the GNU General Public License or the GNU Library General Public dnl License but which still want to provide support for the GNU gettext dnl functionality. dnl Please note that the actual code of the GNU gettext library is covered dnl by the GNU Library General Public License, and the rest of the GNU dnl gettext package package is covered by the GNU General Public License. dnl They are *not* in the public domain. dnl Authors: dnl Ulrich Drepper , 1996. AC_PREREQ(2.50) # Search path for a program which passes the given test. dnl AM_PATH_PROG_WITH_TEST(VARIABLE, PROG-TO-CHECK-FOR, dnl TEST-PERFORMED-ON-FOUND_PROGRAM [, VALUE-IF-NOT-FOUND [, PATH]]) AC_DEFUN([AM_PATH_PROG_WITH_TEST], [ # Prepare PATH_SEPARATOR. # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then echo "#! /bin/sh" >conf$$.sh echo "exit 0" >>conf$$.sh chmod +x conf$$.sh if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then PATH_SEPARATOR=';' else PATH_SEPARATOR=: fi rm -f conf$$.sh fi # Find out how to test for executable files. Don't use a zero-byte file, # as systems may use methods other than mode bits to determine executability. cat >conf$$.file <<_ASEOF #! /bin/sh exit 0 _ASEOF chmod +x conf$$.file if test -x conf$$.file >/dev/null 2>&1; then ac_executable_p="test -x" else ac_executable_p="test -f" fi rm -f conf$$.file # Extract the first word of "$2", so it can be a program name with args. set dummy $2; ac_word=[$]2 AC_MSG_CHECKING([for $ac_word]) AC_CACHE_VAL(ac_cv_path_$1, [case "[$]$1" in [[\\/]]* | ?:[[\\/]]*) ac_cv_path_$1="[$]$1" # Let the user override the test with a path. ;; *) ac_save_IFS="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in ifelse([$5], , $PATH, [$5]); do IFS="$ac_save_IFS" test -z "$ac_dir" && ac_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $ac_executable_p "$ac_dir/$ac_word$ac_exec_ext"; then echo "$as_me: trying $ac_dir/$ac_word..." >&AS_MESSAGE_LOG_FD if [$3]; then ac_cv_path_$1="$ac_dir/$ac_word$ac_exec_ext" break 2 fi fi done done IFS="$ac_save_IFS" dnl If no 4th arg is given, leave the cache variable unset, dnl so AC_PATH_PROGS will keep looking. ifelse([$4], , , [ test -z "[$]ac_cv_path_$1" && ac_cv_path_$1="$4" ])dnl ;; esac])dnl $1="$ac_cv_path_$1" if test ifelse([$4], , [-n "[$]$1"], ["[$]$1" != "$4"]); then AC_MSG_RESULT([$]$1) else AC_MSG_RESULT(no) fi AC_SUBST($1)dnl ]) nordugrid-arc-6.14.0/m4/PaxHeaders.30264/ltsugar.m40000644000000000000000000000013214152153410017574 xustar000000000000000030 mtime=1638455048.190793001 30 atime=1638455048.476797298 30 ctime=1638455095.714507066 nordugrid-arc-6.14.0/m4/ltsugar.m40000644000175000002070000001042414152153410017562 0ustar00mockbuildmock00000000000000# ltsugar.m4 -- libtool m4 base layer. -*-Autoconf-*- # # Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc. # Written by Gary V. Vaughan, 2004 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # serial 6 ltsugar.m4 # This is to help aclocal find these macros, as it can't see m4_define. AC_DEFUN([LTSUGAR_VERSION], [m4_if([0.1])]) # lt_join(SEP, ARG1, [ARG2...]) # ----------------------------- # Produce ARG1SEPARG2...SEPARGn, omitting [] arguments and their # associated separator. # Needed until we can rely on m4_join from Autoconf 2.62, since all earlier # versions in m4sugar had bugs. m4_define([lt_join], [m4_if([$#], [1], [], [$#], [2], [[$2]], [m4_if([$2], [], [], [[$2]_])$0([$1], m4_shift(m4_shift($@)))])]) m4_define([_lt_join], [m4_if([$#$2], [2], [], [m4_if([$2], [], [], [[$1$2]])$0([$1], m4_shift(m4_shift($@)))])]) # lt_car(LIST) # lt_cdr(LIST) # ------------ # Manipulate m4 lists. # These macros are necessary as long as will still need to support # Autoconf-2.59 which quotes differently. m4_define([lt_car], [[$1]]) m4_define([lt_cdr], [m4_if([$#], 0, [m4_fatal([$0: cannot be called without arguments])], [$#], 1, [], [m4_dquote(m4_shift($@))])]) m4_define([lt_unquote], $1) # lt_append(MACRO-NAME, STRING, [SEPARATOR]) # ------------------------------------------ # Redefine MACRO-NAME to hold its former content plus `SEPARATOR'`STRING'. # Note that neither SEPARATOR nor STRING are expanded; they are appended # to MACRO-NAME as is (leaving the expansion for when MACRO-NAME is invoked). # No SEPARATOR is output if MACRO-NAME was previously undefined (different # than defined and empty). # # This macro is needed until we can rely on Autoconf 2.62, since earlier # versions of m4sugar mistakenly expanded SEPARATOR but not STRING. m4_define([lt_append], [m4_define([$1], m4_ifdef([$1], [m4_defn([$1])[$3]])[$2])]) # lt_combine(SEP, PREFIX-LIST, INFIX, SUFFIX1, [SUFFIX2...]) # ---------------------------------------------------------- # Produce a SEP delimited list of all paired combinations of elements of # PREFIX-LIST with SUFFIX1 through SUFFIXn. Each element of the list # has the form PREFIXmINFIXSUFFIXn. # Needed until we can rely on m4_combine added in Autoconf 2.62. m4_define([lt_combine], [m4_if(m4_eval([$# > 3]), [1], [m4_pushdef([_Lt_sep], [m4_define([_Lt_sep], m4_defn([lt_car]))])]]dnl [[m4_foreach([_Lt_prefix], [$2], [m4_foreach([_Lt_suffix], ]m4_dquote(m4_dquote(m4_shift(m4_shift(m4_shift($@)))))[, [_Lt_sep([$1])[]m4_defn([_Lt_prefix])[$3]m4_defn([_Lt_suffix])])])])]) # lt_if_append_uniq(MACRO-NAME, VARNAME, [SEPARATOR], [UNIQ], [NOT-UNIQ]) # ----------------------------------------------------------------------- # Iff MACRO-NAME does not yet contain VARNAME, then append it (delimited # by SEPARATOR if supplied) and expand UNIQ, else NOT-UNIQ. m4_define([lt_if_append_uniq], [m4_ifdef([$1], [m4_if(m4_index([$3]m4_defn([$1])[$3], [$3$2$3]), [-1], [lt_append([$1], [$2], [$3])$4], [$5])], [lt_append([$1], [$2], [$3])$4])]) # lt_dict_add(DICT, KEY, VALUE) # ----------------------------- m4_define([lt_dict_add], [m4_define([$1($2)], [$3])]) # lt_dict_add_subkey(DICT, KEY, SUBKEY, VALUE) # -------------------------------------------- m4_define([lt_dict_add_subkey], [m4_define([$1($2:$3)], [$4])]) # lt_dict_fetch(DICT, KEY, [SUBKEY]) # ---------------------------------- m4_define([lt_dict_fetch], [m4_ifval([$3], m4_ifdef([$1($2:$3)], [m4_defn([$1($2:$3)])]), m4_ifdef([$1($2)], [m4_defn([$1($2)])]))]) # lt_if_dict_fetch(DICT, KEY, [SUBKEY], VALUE, IF-TRUE, [IF-FALSE]) # ----------------------------------------------------------------- m4_define([lt_if_dict_fetch], [m4_if(lt_dict_fetch([$1], [$2], [$3]), [$4], [$5], [$6])]) # lt_dict_filter(DICT, [SUBKEY], VALUE, [SEPARATOR], KEY, [...]) # -------------------------------------------------------------- m4_define([lt_dict_filter], [m4_if([$5], [], [], [lt_join(m4_quote(m4_default([$4], [[, ]])), lt_unquote(m4_split(m4_normalize(m4_foreach(_Lt_key, lt_car([m4_shiftn(4, $@)]), [lt_if_dict_fetch([$1], _Lt_key, [$2], [$3], [_Lt_key ])])))))])[]dnl ]) nordugrid-arc-6.14.0/m4/PaxHeaders.30264/lib-link.m40000644000000000000000000000013214152153401017614 xustar000000000000000030 mtime=1638455041.157687325 30 atime=1638455042.424706363 30 ctime=1638455095.710507006 nordugrid-arc-6.14.0/m4/lib-link.m40000644000175000002070000007205514152153401017612 0ustar00mockbuildmock00000000000000# lib-link.m4 serial 13 (gettext-0.17) dnl Copyright (C) 2001-2007 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl From Bruno Haible. AC_PREREQ(2.54) dnl AC_LIB_LINKFLAGS(name [, dependencies]) searches for libname and dnl the libraries corresponding to explicit and implicit dependencies. dnl Sets and AC_SUBSTs the LIB${NAME} and LTLIB${NAME} variables and dnl augments the CPPFLAGS variable. dnl Sets and AC_SUBSTs the LIB${NAME}_PREFIX variable to nonempty if libname dnl was found in ${LIB${NAME}_PREFIX}/$acl_libdirstem. AC_DEFUN([AC_LIB_LINKFLAGS], [ AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) AC_REQUIRE([AC_LIB_RPATH]) define([Name],[translit([$1],[./-], [___])]) define([NAME],[translit([$1],[abcdefghijklmnopqrstuvwxyz./-], [ABCDEFGHIJKLMNOPQRSTUVWXYZ___])]) AC_CACHE_CHECK([how to link with lib[]$1], [ac_cv_lib[]Name[]_libs], [ AC_LIB_LINKFLAGS_BODY([$1], [$2]) ac_cv_lib[]Name[]_libs="$LIB[]NAME" ac_cv_lib[]Name[]_ltlibs="$LTLIB[]NAME" ac_cv_lib[]Name[]_cppflags="$INC[]NAME" ac_cv_lib[]Name[]_prefix="$LIB[]NAME[]_PREFIX" ]) LIB[]NAME="$ac_cv_lib[]Name[]_libs" LTLIB[]NAME="$ac_cv_lib[]Name[]_ltlibs" INC[]NAME="$ac_cv_lib[]Name[]_cppflags" LIB[]NAME[]_PREFIX="$ac_cv_lib[]Name[]_prefix" AC_LIB_APPENDTOVAR([CPPFLAGS], [$INC]NAME) AC_SUBST([LIB]NAME) AC_SUBST([LTLIB]NAME) AC_SUBST([LIB]NAME[_PREFIX]) dnl Also set HAVE_LIB[]NAME so that AC_LIB_HAVE_LINKFLAGS can reuse the dnl results of this search when this library appears as a dependency. HAVE_LIB[]NAME=yes undefine([Name]) undefine([NAME]) ]) dnl AC_LIB_HAVE_LINKFLAGS(name, dependencies, includes, testcode) dnl searches for libname and the libraries corresponding to explicit and dnl implicit dependencies, together with the specified include files and dnl the ability to compile and link the specified testcode. If found, it dnl sets and AC_SUBSTs HAVE_LIB${NAME}=yes and the LIB${NAME} and dnl LTLIB${NAME} variables and augments the CPPFLAGS variable, and dnl #defines HAVE_LIB${NAME} to 1. Otherwise, it sets and AC_SUBSTs dnl HAVE_LIB${NAME}=no and LIB${NAME} and LTLIB${NAME} to empty. dnl Sets and AC_SUBSTs the LIB${NAME}_PREFIX variable to nonempty if libname dnl was found in ${LIB${NAME}_PREFIX}/$acl_libdirstem. AC_DEFUN([AC_LIB_HAVE_LINKFLAGS], [ AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) AC_REQUIRE([AC_LIB_RPATH]) define([Name],[translit([$1],[./-], [___])]) define([NAME],[translit([$1],[abcdefghijklmnopqrstuvwxyz./-], [ABCDEFGHIJKLMNOPQRSTUVWXYZ___])]) dnl Search for lib[]Name and define LIB[]NAME, LTLIB[]NAME and INC[]NAME dnl accordingly. AC_LIB_LINKFLAGS_BODY([$1], [$2]) dnl Add $INC[]NAME to CPPFLAGS before performing the following checks, dnl because if the user has installed lib[]Name and not disabled its use dnl via --without-lib[]Name-prefix, he wants to use it. ac_save_CPPFLAGS="$CPPFLAGS" AC_LIB_APPENDTOVAR([CPPFLAGS], [$INC]NAME) AC_CACHE_CHECK([for lib[]$1], [ac_cv_lib[]Name], [ ac_save_LIBS="$LIBS" LIBS="$LIBS $LIB[]NAME" AC_TRY_LINK([$3], [$4], [ac_cv_lib[]Name=yes], [ac_cv_lib[]Name=no]) LIBS="$ac_save_LIBS" ]) if test "$ac_cv_lib[]Name" = yes; then HAVE_LIB[]NAME=yes AC_DEFINE([HAVE_LIB]NAME, 1, [Define if you have the $1 library.]) AC_MSG_CHECKING([how to link with lib[]$1]) AC_MSG_RESULT([$LIB[]NAME]) else HAVE_LIB[]NAME=no dnl If $LIB[]NAME didn't lead to a usable library, we don't need dnl $INC[]NAME either. CPPFLAGS="$ac_save_CPPFLAGS" LIB[]NAME= LTLIB[]NAME= LIB[]NAME[]_PREFIX= fi AC_SUBST([HAVE_LIB]NAME) AC_SUBST([LIB]NAME) AC_SUBST([LTLIB]NAME) AC_SUBST([LIB]NAME[_PREFIX]) undefine([Name]) undefine([NAME]) ]) dnl Determine the platform dependent parameters needed to use rpath: dnl acl_libext, dnl acl_shlibext, dnl acl_hardcode_libdir_flag_spec, dnl acl_hardcode_libdir_separator, dnl acl_hardcode_direct, dnl acl_hardcode_minus_L. AC_DEFUN([AC_LIB_RPATH], [ dnl Tell automake >= 1.10 to complain if config.rpath is missing. m4_ifdef([AC_REQUIRE_AUX_FILE], [AC_REQUIRE_AUX_FILE([config.rpath])]) AC_REQUIRE([AC_PROG_CC]) dnl we use $CC, $GCC, $LDFLAGS AC_REQUIRE([AC_LIB_PROG_LD]) dnl we use $LD, $with_gnu_ld AC_REQUIRE([AC_CANONICAL_HOST]) dnl we use $host AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT]) dnl we use $ac_aux_dir AC_CACHE_CHECK([for shared library run path origin], acl_cv_rpath, [ CC="$CC" GCC="$GCC" LDFLAGS="$LDFLAGS" LD="$LD" with_gnu_ld="$with_gnu_ld" \ ${CONFIG_SHELL-/bin/sh} "$ac_aux_dir/config.rpath" "$host" > conftest.sh . ./conftest.sh rm -f ./conftest.sh acl_cv_rpath=done ]) wl="$acl_cv_wl" acl_libext="$acl_cv_libext" acl_shlibext="$acl_cv_shlibext" acl_libname_spec="$acl_cv_libname_spec" acl_library_names_spec="$acl_cv_library_names_spec" acl_hardcode_libdir_flag_spec="$acl_cv_hardcode_libdir_flag_spec" acl_hardcode_libdir_separator="$acl_cv_hardcode_libdir_separator" acl_hardcode_direct="$acl_cv_hardcode_direct" acl_hardcode_minus_L="$acl_cv_hardcode_minus_L" dnl Determine whether the user wants rpath handling at all. AC_ARG_ENABLE(rpath, [ --disable-rpath do not hardcode runtime library paths], :, enable_rpath=yes) ]) dnl AC_LIB_LINKFLAGS_BODY(name [, dependencies]) searches for libname and dnl the libraries corresponding to explicit and implicit dependencies. dnl Sets the LIB${NAME}, LTLIB${NAME} and INC${NAME} variables. dnl Also, sets the LIB${NAME}_PREFIX variable to nonempty if libname was found dnl in ${LIB${NAME}_PREFIX}/$acl_libdirstem. AC_DEFUN([AC_LIB_LINKFLAGS_BODY], [ AC_REQUIRE([AC_LIB_PREPARE_MULTILIB]) define([NAME],[translit([$1],[abcdefghijklmnopqrstuvwxyz./-], [ABCDEFGHIJKLMNOPQRSTUVWXYZ___])]) dnl Autoconf >= 2.61 supports dots in --with options. define([N_A_M_E],[m4_if(m4_version_compare(m4_defn([m4_PACKAGE_VERSION]),[2.61]),[-1],[translit([$1],[.],[_])],[$1])]) dnl By default, look in $includedir and $libdir. use_additional=yes AC_LIB_WITH_FINAL_PREFIX([ eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" ]) AC_LIB_ARG_WITH([lib]N_A_M_E[-prefix], [ --with-lib]N_A_M_E[-prefix[=DIR] search for lib$1 in DIR/include and DIR/lib --without-lib]N_A_M_E[-prefix don't search for lib$1 in includedir and libdir], [ if test "X$withval" = "Xno"; then use_additional=no else if test "X$withval" = "X"; then AC_LIB_WITH_FINAL_PREFIX([ eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" ]) else additional_includedir="$withval/include" additional_libdir="$withval/$acl_libdirstem" fi fi ]) dnl Search the library and its dependencies in $additional_libdir and dnl $LDFLAGS. Using breadth-first-seach. LIB[]NAME= LTLIB[]NAME= INC[]NAME= LIB[]NAME[]_PREFIX= rpathdirs= ltrpathdirs= names_already_handled= names_next_round='$1 $2' while test -n "$names_next_round"; do names_this_round="$names_next_round" names_next_round= for name in $names_this_round; do already_handled= for n in $names_already_handled; do if test "$n" = "$name"; then already_handled=yes break fi done if test -z "$already_handled"; then names_already_handled="$names_already_handled $name" dnl See if it was already located by an earlier AC_LIB_LINKFLAGS dnl or AC_LIB_HAVE_LINKFLAGS call. uppername=`echo "$name" | sed -e 'y|abcdefghijklmnopqrstuvwxyz./-|ABCDEFGHIJKLMNOPQRSTUVWXYZ___|'` eval value=\"\$HAVE_LIB$uppername\" if test -n "$value"; then if test "$value" = yes; then eval value=\"\$LIB$uppername\" test -z "$value" || LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$value" eval value=\"\$LTLIB$uppername\" test -z "$value" || LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }$value" else dnl An earlier call to AC_LIB_HAVE_LINKFLAGS has determined dnl that this library doesn't exist. So just drop it. : fi else dnl Search the library lib$name in $additional_libdir and $LDFLAGS dnl and the already constructed $LIBNAME/$LTLIBNAME. found_dir= found_la= found_so= found_a= eval libname=\"$acl_libname_spec\" # typically: libname=lib$name if test -n "$acl_shlibext"; then shrext=".$acl_shlibext" # typically: shrext=.so else shrext= fi if test $use_additional = yes; then dir="$additional_libdir" dnl The same code as in the loop below: dnl First look for a shared library. if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi dnl Then look for a static library. if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi fi if test "X$found_dir" = "X"; then for x in $LDFLAGS $LTLIB[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) case "$x" in -L*) dir=`echo "X$x" | sed -e 's/^X-L//'` dnl First look for a shared library. if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi dnl Then look for a static library. if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi ;; esac if test "X$found_dir" != "X"; then break fi done fi if test "X$found_dir" != "X"; then dnl Found the library. LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-L$found_dir -l$name" if test "X$found_so" != "X"; then dnl Linking with a shared library. We attempt to hardcode its dnl directory into the executable's runpath, unless it's the dnl standard /usr/lib. if test "$enable_rpath" = no || test "X$found_dir" = "X/usr/$acl_libdirstem"; then dnl No hardcoding is needed. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" else dnl Use an explicit option to hardcode DIR into the resulting dnl binary. dnl Potentially add DIR to ltrpathdirs. dnl The ltrpathdirs will be appended to $LTLIBNAME at the end. haveit= for x in $ltrpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $found_dir" fi dnl The hardcoding into $LIBNAME is system dependent. if test "$acl_hardcode_direct" = yes; then dnl Using DIR/libNAME.so during linking hardcodes DIR into the dnl resulting binary. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" else if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then dnl Use an explicit option to hardcode DIR into the resulting dnl binary. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" dnl Potentially add DIR to rpathdirs. dnl The rpathdirs will be appended to $LIBNAME at the end. haveit= for x in $rpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $found_dir" fi else dnl Rely on "-L$found_dir". dnl But don't add it if it's already contained in the LDFLAGS dnl or the already constructed $LIBNAME haveit= for x in $LDFLAGS $LIB[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-L$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-L$found_dir" fi if test "$acl_hardcode_minus_L" != no; then dnl FIXME: Not sure whether we should use dnl "-L$found_dir -l$name" or "-L$found_dir $found_so" dnl here. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" else dnl We cannot use $acl_hardcode_runpath_var and LD_RUN_PATH dnl here, because this doesn't fit in flags passed to the dnl compiler. So give up. No hardcoding. This affects only dnl very old systems. dnl FIXME: Not sure whether we should use dnl "-L$found_dir -l$name" or "-L$found_dir $found_so" dnl here. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-l$name" fi fi fi fi else if test "X$found_a" != "X"; then dnl Linking with a static library. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_a" else dnl We shouldn't come here, but anyway it's good to have a dnl fallback. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-L$found_dir -l$name" fi fi dnl Assume the include files are nearby. additional_includedir= case "$found_dir" in */$acl_libdirstem | */$acl_libdirstem/) basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem/"'*$,,'` LIB[]NAME[]_PREFIX="$basedir" additional_includedir="$basedir/include" ;; esac if test "X$additional_includedir" != "X"; then dnl Potentially add $additional_includedir to $INCNAME. dnl But don't add it dnl 1. if it's the standard /usr/include, dnl 2. if it's /usr/local/include and we are using GCC on Linux, dnl 3. if it's already present in $CPPFLAGS or the already dnl constructed $INCNAME, dnl 4. if it doesn't exist as a directory. if test "X$additional_includedir" != "X/usr/include"; then haveit= if test "X$additional_includedir" = "X/usr/local/include"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then for x in $CPPFLAGS $INC[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-I$additional_includedir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_includedir"; then dnl Really add $additional_includedir to $INCNAME. INC[]NAME="${INC[]NAME}${INC[]NAME:+ }-I$additional_includedir" fi fi fi fi fi dnl Look for dependencies. if test -n "$found_la"; then dnl Read the .la file. It defines the variables dnl dlname, library_names, old_library, dependency_libs, current, dnl age, revision, installed, dlopen, dlpreopen, libdir. save_libdir="$libdir" case "$found_la" in */* | *\\*) . "$found_la" ;; *) . "./$found_la" ;; esac libdir="$save_libdir" dnl We use only dependency_libs. for dep in $dependency_libs; do case "$dep" in -L*) additional_libdir=`echo "X$dep" | sed -e 's/^X-L//'` dnl Potentially add $additional_libdir to $LIBNAME and $LTLIBNAME. dnl But don't add it dnl 1. if it's the standard /usr/lib, dnl 2. if it's /usr/local/lib and we are using GCC on Linux, dnl 3. if it's already present in $LDFLAGS or the already dnl constructed $LIBNAME, dnl 4. if it doesn't exist as a directory. if test "X$additional_libdir" != "X/usr/$acl_libdirstem"; then haveit= if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then haveit= for x in $LDFLAGS $LIB[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then dnl Really add $additional_libdir to $LIBNAME. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-L$additional_libdir" fi fi haveit= for x in $LDFLAGS $LTLIB[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then dnl Really add $additional_libdir to $LTLIBNAME. LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-L$additional_libdir" fi fi fi fi ;; -R*) dir=`echo "X$dep" | sed -e 's/^X-R//'` if test "$enable_rpath" != no; then dnl Potentially add DIR to rpathdirs. dnl The rpathdirs will be appended to $LIBNAME at the end. haveit= for x in $rpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $dir" fi dnl Potentially add DIR to ltrpathdirs. dnl The ltrpathdirs will be appended to $LTLIBNAME at the end. haveit= for x in $ltrpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $dir" fi fi ;; -l*) dnl Handle this in the next round. names_next_round="$names_next_round "`echo "X$dep" | sed -e 's/^X-l//'` ;; *.la) dnl Handle this in the next round. Throw away the .la's dnl directory; it is already contained in a preceding -L dnl option. names_next_round="$names_next_round "`echo "X$dep" | sed -e 's,^X.*/,,' -e 's,^lib,,' -e 's,\.la$,,'` ;; *) dnl Most likely an immediate library name. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$dep" LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }$dep" ;; esac done fi else dnl Didn't find the library; assume it is in the system directories dnl known to the linker and runtime loader. (All the system dnl directories known to the linker should also be known to the dnl runtime loader, otherwise the system is severely misconfigured.) LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-l$name" LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-l$name" fi fi fi done done if test "X$rpathdirs" != "X"; then if test -n "$acl_hardcode_libdir_separator"; then dnl Weird platform: only the last -rpath option counts, the user must dnl pass all path elements in one option. We can arrange that for a dnl single library, but not when more than one $LIBNAMEs are used. alldirs= for found_dir in $rpathdirs; do alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$found_dir" done dnl Note: acl_hardcode_libdir_flag_spec uses $libdir and $wl. acl_save_libdir="$libdir" libdir="$alldirs" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$flag" else dnl The -rpath options are cumulative. for found_dir in $rpathdirs; do acl_save_libdir="$libdir" libdir="$found_dir" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$flag" done fi fi if test "X$ltrpathdirs" != "X"; then dnl When using libtool, the option that works for both libraries and dnl executables is -R. The -R options are cumulative. for found_dir in $ltrpathdirs; do LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-R$found_dir" done fi ]) dnl AC_LIB_APPENDTOVAR(VAR, CONTENTS) appends the elements of CONTENTS to VAR, dnl unless already present in VAR. dnl Works only for CPPFLAGS, not for LIB* variables because that sometimes dnl contains two or three consecutive elements that belong together. AC_DEFUN([AC_LIB_APPENDTOVAR], [ for element in [$2]; do haveit= for x in $[$1]; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X$element"; then haveit=yes break fi done if test -z "$haveit"; then [$1]="${[$1]}${[$1]:+ }$element" fi done ]) dnl For those cases where a variable contains several -L and -l options dnl referring to unknown libraries and directories, this macro determines the dnl necessary additional linker options for the runtime path. dnl AC_LIB_LINKFLAGS_FROM_LIBS([LDADDVAR], [LIBSVALUE], [USE-LIBTOOL]) dnl sets LDADDVAR to linker options needed together with LIBSVALUE. dnl If USE-LIBTOOL evaluates to non-empty, linking with libtool is assumed, dnl otherwise linking without libtool is assumed. AC_DEFUN([AC_LIB_LINKFLAGS_FROM_LIBS], [ AC_REQUIRE([AC_LIB_RPATH]) AC_REQUIRE([AC_LIB_PREPARE_MULTILIB]) $1= if test "$enable_rpath" != no; then if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then dnl Use an explicit option to hardcode directories into the resulting dnl binary. rpathdirs= next= for opt in $2; do if test -n "$next"; then dir="$next" dnl No need to hardcode the standard /usr/lib. if test "X$dir" != "X/usr/$acl_libdirstem"; then rpathdirs="$rpathdirs $dir" fi next= else case $opt in -L) next=yes ;; -L*) dir=`echo "X$opt" | sed -e 's,^X-L,,'` dnl No need to hardcode the standard /usr/lib. if test "X$dir" != "X/usr/$acl_libdirstem"; then rpathdirs="$rpathdirs $dir" fi next= ;; *) next= ;; esac fi done if test "X$rpathdirs" != "X"; then if test -n ""$3""; then dnl libtool is used for linking. Use -R options. for dir in $rpathdirs; do $1="${$1}${$1:+ }-R$dir" done else dnl The linker is used for linking directly. if test -n "$acl_hardcode_libdir_separator"; then dnl Weird platform: only the last -rpath option counts, the user dnl must pass all path elements in one option. alldirs= for dir in $rpathdirs; do alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$dir" done acl_save_libdir="$libdir" libdir="$alldirs" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" $1="$flag" else dnl The -rpath options are cumulative. for dir in $rpathdirs; do acl_save_libdir="$libdir" libdir="$dir" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" $1="${$1}${$1:+ }$flag" done fi fi fi fi fi AC_SUBST([$1]) ]) nordugrid-arc-6.14.0/m4/PaxHeaders.30264/intlmacosx.m40000644000000000000000000000013214152153401020274 xustar000000000000000030 mtime=1638455041.107686573 30 atime=1638455042.429706438 30 ctime=1638455095.709506991 nordugrid-arc-6.14.0/m4/intlmacosx.m40000644000175000002070000000456514152153401020273 0ustar00mockbuildmock00000000000000# intlmacosx.m4 serial 1 (gettext-0.17) dnl Copyright (C) 2004-2007 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl dnl This file can can be used in projects which are not available under dnl the GNU General Public License or the GNU Library General Public dnl License but which still want to provide support for the GNU gettext dnl functionality. dnl Please note that the actual code of the GNU gettext library is covered dnl by the GNU Library General Public License, and the rest of the GNU dnl gettext package package is covered by the GNU General Public License. dnl They are *not* in the public domain. dnl Checks for special options needed on MacOS X. dnl Defines INTL_MACOSX_LIBS. AC_DEFUN([gt_INTL_MACOSX], [ dnl Check for API introduced in MacOS X 10.2. AC_CACHE_CHECK([for CFPreferencesCopyAppValue], gt_cv_func_CFPreferencesCopyAppValue, [gt_save_LIBS="$LIBS" LIBS="$LIBS -Wl,-framework -Wl,CoreFoundation" AC_TRY_LINK([#include ], [CFPreferencesCopyAppValue(NULL, NULL)], [gt_cv_func_CFPreferencesCopyAppValue=yes], [gt_cv_func_CFPreferencesCopyAppValue=no]) LIBS="$gt_save_LIBS"]) if test $gt_cv_func_CFPreferencesCopyAppValue = yes; then AC_DEFINE([HAVE_CFPREFERENCESCOPYAPPVALUE], 1, [Define to 1 if you have the MacOS X function CFPreferencesCopyAppValue in the CoreFoundation framework.]) fi dnl Check for API introduced in MacOS X 10.3. AC_CACHE_CHECK([for CFLocaleCopyCurrent], gt_cv_func_CFLocaleCopyCurrent, [gt_save_LIBS="$LIBS" LIBS="$LIBS -Wl,-framework -Wl,CoreFoundation" AC_TRY_LINK([#include ], [CFLocaleCopyCurrent();], [gt_cv_func_CFLocaleCopyCurrent=yes], [gt_cv_func_CFLocaleCopyCurrent=no]) LIBS="$gt_save_LIBS"]) if test $gt_cv_func_CFLocaleCopyCurrent = yes; then AC_DEFINE([HAVE_CFLOCALECOPYCURRENT], 1, [Define to 1 if you have the MacOS X function CFLocaleCopyCurrent in the CoreFoundation framework.]) fi INTL_MACOSX_LIBS= if test $gt_cv_func_CFPreferencesCopyAppValue = yes || test $gt_cv_func_CFLocaleCopyCurrent = yes; then INTL_MACOSX_LIBS="-Wl,-framework -Wl,CoreFoundation" fi AC_SUBST([INTL_MACOSX_LIBS]) ]) nordugrid-arc-6.14.0/m4/PaxHeaders.30264/lib-prefix.m40000644000000000000000000000013214152153401020154 xustar000000000000000030 mtime=1638455041.165687445 30 atime=1638455042.423706347 30 ctime=1638455095.711507021 nordugrid-arc-6.14.0/m4/lib-prefix.m40000644000175000002070000001503614152153401020146 0ustar00mockbuildmock00000000000000# lib-prefix.m4 serial 5 (gettext-0.15) dnl Copyright (C) 2001-2005 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl From Bruno Haible. dnl AC_LIB_ARG_WITH is synonymous to AC_ARG_WITH in autoconf-2.13, and dnl similar to AC_ARG_WITH in autoconf 2.52...2.57 except that is doesn't dnl require excessive bracketing. ifdef([AC_HELP_STRING], [AC_DEFUN([AC_LIB_ARG_WITH], [AC_ARG_WITH([$1],[[$2]],[$3],[$4])])], [AC_DEFUN([AC_][LIB_ARG_WITH], [AC_ARG_WITH([$1],[$2],[$3],[$4])])]) dnl AC_LIB_PREFIX adds to the CPPFLAGS and LDFLAGS the flags that are needed dnl to access previously installed libraries. The basic assumption is that dnl a user will want packages to use other packages he previously installed dnl with the same --prefix option. dnl This macro is not needed if only AC_LIB_LINKFLAGS is used to locate dnl libraries, but is otherwise very convenient. AC_DEFUN([AC_LIB_PREFIX], [ AC_BEFORE([$0], [AC_LIB_LINKFLAGS]) AC_REQUIRE([AC_PROG_CC]) AC_REQUIRE([AC_CANONICAL_HOST]) AC_REQUIRE([AC_LIB_PREPARE_MULTILIB]) AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) dnl By default, look in $includedir and $libdir. use_additional=yes AC_LIB_WITH_FINAL_PREFIX([ eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" ]) AC_LIB_ARG_WITH([lib-prefix], [ --with-lib-prefix[=DIR] search for libraries in DIR/include and DIR/lib --without-lib-prefix don't search for libraries in includedir and libdir], [ if test "X$withval" = "Xno"; then use_additional=no else if test "X$withval" = "X"; then AC_LIB_WITH_FINAL_PREFIX([ eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" ]) else additional_includedir="$withval/include" additional_libdir="$withval/$acl_libdirstem" fi fi ]) if test $use_additional = yes; then dnl Potentially add $additional_includedir to $CPPFLAGS. dnl But don't add it dnl 1. if it's the standard /usr/include, dnl 2. if it's already present in $CPPFLAGS, dnl 3. if it's /usr/local/include and we are using GCC on Linux, dnl 4. if it doesn't exist as a directory. if test "X$additional_includedir" != "X/usr/include"; then haveit= for x in $CPPFLAGS; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-I$additional_includedir"; then haveit=yes break fi done if test -z "$haveit"; then if test "X$additional_includedir" = "X/usr/local/include"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then if test -d "$additional_includedir"; then dnl Really add $additional_includedir to $CPPFLAGS. CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }-I$additional_includedir" fi fi fi fi dnl Potentially add $additional_libdir to $LDFLAGS. dnl But don't add it dnl 1. if it's the standard /usr/lib, dnl 2. if it's already present in $LDFLAGS, dnl 3. if it's /usr/local/lib and we are using GCC on Linux, dnl 4. if it doesn't exist as a directory. if test "X$additional_libdir" != "X/usr/$acl_libdirstem"; then haveit= for x in $LDFLAGS; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem"; then if test -n "$GCC"; then case $host_os in linux*) haveit=yes;; esac fi fi if test -z "$haveit"; then if test -d "$additional_libdir"; then dnl Really add $additional_libdir to $LDFLAGS. LDFLAGS="${LDFLAGS}${LDFLAGS:+ }-L$additional_libdir" fi fi fi fi fi ]) dnl AC_LIB_PREPARE_PREFIX creates variables acl_final_prefix, dnl acl_final_exec_prefix, containing the values to which $prefix and dnl $exec_prefix will expand at the end of the configure script. AC_DEFUN([AC_LIB_PREPARE_PREFIX], [ dnl Unfortunately, prefix and exec_prefix get only finally determined dnl at the end of configure. if test "X$prefix" = "XNONE"; then acl_final_prefix="$ac_default_prefix" else acl_final_prefix="$prefix" fi if test "X$exec_prefix" = "XNONE"; then acl_final_exec_prefix='${prefix}' else acl_final_exec_prefix="$exec_prefix" fi acl_save_prefix="$prefix" prefix="$acl_final_prefix" eval acl_final_exec_prefix=\"$acl_final_exec_prefix\" prefix="$acl_save_prefix" ]) dnl AC_LIB_WITH_FINAL_PREFIX([statement]) evaluates statement, with the dnl variables prefix and exec_prefix bound to the values they will have dnl at the end of the configure script. AC_DEFUN([AC_LIB_WITH_FINAL_PREFIX], [ acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" $1 exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" ]) dnl AC_LIB_PREPARE_MULTILIB creates a variable acl_libdirstem, containing dnl the basename of the libdir, either "lib" or "lib64". AC_DEFUN([AC_LIB_PREPARE_MULTILIB], [ dnl There is no formal standard regarding lib and lib64. The current dnl practice is that on a system supporting 32-bit and 64-bit instruction dnl sets or ABIs, 64-bit libraries go under $prefix/lib64 and 32-bit dnl libraries go under $prefix/lib. We determine the compiler's default dnl mode by looking at the compiler's library search path. If at least dnl of its elements ends in /lib64 or points to a directory whose absolute dnl pathname ends in /lib64, we assume a 64-bit ABI. Otherwise we use the dnl default, namely "lib". acl_libdirstem=lib searchpath=`(LC_ALL=C $CC -print-search-dirs) 2>/dev/null | sed -n -e 's,^libraries: ,,p' | sed -e 's,^=,,'` if test -n "$searchpath"; then acl_save_IFS="${IFS= }"; IFS=":" for searchdir in $searchpath; do if test -d "$searchdir"; then case "$searchdir" in */lib64/ | */lib64 ) acl_libdirstem=lib64 ;; *) searchdir=`cd "$searchdir" && pwd` case "$searchdir" in */lib64 ) acl_libdirstem=lib64 ;; esac ;; esac fi done IFS="$acl_save_IFS" fi ]) nordugrid-arc-6.14.0/m4/PaxHeaders.30264/lib-ld.m40000644000000000000000000000013114152153401017255 xustar000000000000000029 mtime=1638455041.14868719 30 atime=1638455042.428706423 30 ctime=1638455095.710507006 nordugrid-arc-6.14.0/m4/lib-ld.m40000644000175000002070000000653114152153401017250 0ustar00mockbuildmock00000000000000# lib-ld.m4 serial 3 (gettext-0.13) dnl Copyright (C) 1996-2003 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl Subroutines of libtool.m4, dnl with replacements s/AC_/AC_LIB/ and s/lt_cv/acl_cv/ to avoid collision dnl with libtool.m4. dnl From libtool-1.4. Sets the variable with_gnu_ld to yes or no. AC_DEFUN([AC_LIB_PROG_LD_GNU], [AC_CACHE_CHECK([if the linker ($LD) is GNU ld], acl_cv_prog_gnu_ld, [# I'd rather use --version here, but apparently some GNU ld's only accept -v. case `$LD -v 2>&1 conf$$.sh echo "exit 0" >>conf$$.sh chmod +x conf$$.sh if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then PATH_SEPARATOR=';' else PATH_SEPARATOR=: fi rm -f conf$$.sh fi ac_prog=ld if test "$GCC" = yes; then # Check if gcc -print-prog-name=ld gives a path. AC_MSG_CHECKING([for ld used by GCC]) case $host in *-*-mingw*) # gcc leaves a trailing carriage return which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [[\\/]* | [A-Za-z]:[\\/]*)] [re_direlt='/[^/][^/]*/\.\./'] # Canonicalize the path of ld ac_prog=`echo $ac_prog| sed 's%\\\\%/%g'` while echo $ac_prog | grep "$re_direlt" > /dev/null 2>&1; do ac_prog=`echo $ac_prog| sed "s%$re_direlt%/%"` done test -z "$LD" && LD="$ac_prog" ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test "$with_gnu_ld" = yes; then AC_MSG_CHECKING([for GNU ld]) else AC_MSG_CHECKING([for non-GNU ld]) fi AC_CACHE_VAL(acl_cv_path_LD, [if test -z "$LD"; then IFS="${IFS= }"; ac_save_ifs="$IFS"; IFS="${IFS}${PATH_SEPARATOR-:}" for ac_dir in $PATH; do test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then acl_cv_path_LD="$ac_dir/$ac_prog" # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some GNU ld's only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$acl_cv_path_LD" -v 2>&1 < /dev/null` in *GNU* | *'with BFD'*) test "$with_gnu_ld" != no && break ;; *) test "$with_gnu_ld" != yes && break ;; esac fi done IFS="$ac_save_ifs" else acl_cv_path_LD="$LD" # Let the user override the test with a path. fi]) LD="$acl_cv_path_LD" if test -n "$LD"; then AC_MSG_RESULT($LD) else AC_MSG_RESULT(no) fi test -z "$LD" && AC_MSG_ERROR([no acceptable ld found in \$PATH]) AC_LIB_PROG_LD_GNU ]) nordugrid-arc-6.14.0/m4/PaxHeaders.30264/arc_paths.m40000644000000000000000000000013214152153376020072 xustar000000000000000030 mtime=1638455038.280644096 30 atime=1638455038.462646831 30 ctime=1638455095.704506916 nordugrid-arc-6.14.0/m4/arc_paths.m40000644000175000002070000000522614152153376020064 0ustar00mockbuildmock00000000000000dnl dnl Substitite some relative paths dnl AC_DEFUN([ARC_RELATIVE_PATHS], [ AC_REQUIRE([ARC_RELATIVE_PATHS_INIT]) AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) AC_LIB_WITH_FINAL_PREFIX([ eval instprefix="\"${exec_prefix}\"" eval arc_libdir="\"${libdir}\"" eval arc_bindir="\"${bindir}\"" eval arc_sbindir="\"${sbindir}\"" eval arc_pkglibdir="\"${libdir}/arc\"" eval arc_pkglibexecdir="\"${libexecdir}/arc\"" # It seems arc_datadir should be evaluated twice to be expanded fully. eval arc_datadir="\"${datadir}/arc\"" eval arc_datadir="\"${arc_datadir}\"" ]) libsubdir=`get_relative_path "$instprefix" "$arc_libdir"` pkglibsubdir=`get_relative_path "$instprefix" "$arc_pkglibdir"` pkglibexecsubdir=`get_relative_path "$instprefix" "$arc_pkglibexecdir"` pkgdatasubdir=`get_relative_path "$instprefix" "$arc_datadir"` pkglibdir_rel_to_pkglibexecdir=`get_relative_path "$arc_pkglibexecdir" "$arc_pkglibdir"` sbindir_rel_to_pkglibexecdir=`get_relative_path "$arc_pkglibexecdir" "$arc_sbindir"` bindir_rel_to_pkglibexecdir=`get_relative_path "$arc_pkglibexecdir" "$arc_bindir"` pkgdatadir_rel_to_pkglibexecdir=`get_relative_path "$arc_pkglibexecdir" "$arc_datadir"` AC_MSG_NOTICE([pkglib subdirectory is: $pkglibsubdir]) AC_MSG_NOTICE([pkglibexec subdirectory is: $pkglibexecsubdir]) AC_MSG_NOTICE([relative path of pkglib to pkglibexec is: $pkglibdir_rel_to_pkglibexecdir]) AC_SUBST([libsubdir]) AC_SUBST([pkglibsubdir]) AC_SUBST([pkglibexecsubdir]) AC_SUBST([pkglibdir_rel_to_pkglibexecdir]) AC_SUBST([sbindir_rel_to_pkglibexecdir]) AC_SUBST([bindir_rel_to_pkglibexecdir]) AC_SUBST([pkgdatadir_rel_to_pkglibexecdir]) AC_SUBST([pkgdatasubdir]) AC_DEFINE_UNQUOTED([INSTPREFIX], ["${instprefix}"], [installation prefix]) AC_DEFINE_UNQUOTED([LIBSUBDIR], ["${libsubdir}"], [library installation subdirectory]) AC_DEFINE_UNQUOTED([PKGLIBSUBDIR], ["${pkglibsubdir}"], [plugin installation subdirectory]) AC_DEFINE_UNQUOTED([PKGLIBEXECSUBDIR], ["${pkglibexecsubdir}"], [helper programs installation subdirectory]) AC_DEFINE_UNQUOTED([PKGDATASUBDIR], ["${pkgdatasubdir}"], [package data subdirectory]) ]) AC_DEFUN([ARC_RELATIVE_PATHS_INIT], [ get_relative_path() { olddir=`echo $[]1 | sed -e 's|/+|/|g' -e 's|^/||' -e 's|/*$|/|'` newdir=`echo $[]2 | sed -e 's|/+|/|g' -e 's|^/||' -e 's|/*$|/|'` O_IFS=$IFS IFS=/ relative="" common="" for i in $olddir; do if echo "$newdir" | grep -q "^$common$i/"; then common="$common$i/" else relative="../$relative" fi done IFS=$O_IFS echo $newdir | sed "s|^$common|$relative|" | sed 's|/*$||' } ]) nordugrid-arc-6.14.0/m4/PaxHeaders.30264/ltoptions.m40000644000000000000000000000013214152153410020146 xustar000000000000000030 mtime=1638455048.117791904 30 atime=1638455048.477797313 30 ctime=1638455095.713507051 nordugrid-arc-6.14.0/m4/ltoptions.m40000644000175000002070000003007314152153410020136 0ustar00mockbuildmock00000000000000# Helper functions for option handling. -*- Autoconf -*- # # Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation, # Inc. # Written by Gary V. Vaughan, 2004 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # serial 7 ltoptions.m4 # This is to help aclocal find these macros, as it can't see m4_define. AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])]) # _LT_MANGLE_OPTION(MACRO-NAME, OPTION-NAME) # ------------------------------------------ m4_define([_LT_MANGLE_OPTION], [[_LT_OPTION_]m4_bpatsubst($1__$2, [[^a-zA-Z0-9_]], [_])]) # _LT_SET_OPTION(MACRO-NAME, OPTION-NAME) # --------------------------------------- # Set option OPTION-NAME for macro MACRO-NAME, and if there is a # matching handler defined, dispatch to it. Other OPTION-NAMEs are # saved as a flag. m4_define([_LT_SET_OPTION], [m4_define(_LT_MANGLE_OPTION([$1], [$2]))dnl m4_ifdef(_LT_MANGLE_DEFUN([$1], [$2]), _LT_MANGLE_DEFUN([$1], [$2]), [m4_warning([Unknown $1 option `$2'])])[]dnl ]) # _LT_IF_OPTION(MACRO-NAME, OPTION-NAME, IF-SET, [IF-NOT-SET]) # ------------------------------------------------------------ # Execute IF-SET if OPTION is set, IF-NOT-SET otherwise. m4_define([_LT_IF_OPTION], [m4_ifdef(_LT_MANGLE_OPTION([$1], [$2]), [$3], [$4])]) # _LT_UNLESS_OPTIONS(MACRO-NAME, OPTION-LIST, IF-NOT-SET) # ------------------------------------------------------- # Execute IF-NOT-SET unless all options in OPTION-LIST for MACRO-NAME # are set. m4_define([_LT_UNLESS_OPTIONS], [m4_foreach([_LT_Option], m4_split(m4_normalize([$2])), [m4_ifdef(_LT_MANGLE_OPTION([$1], _LT_Option), [m4_define([$0_found])])])[]dnl m4_ifdef([$0_found], [m4_undefine([$0_found])], [$3 ])[]dnl ]) # _LT_SET_OPTIONS(MACRO-NAME, OPTION-LIST) # ---------------------------------------- # OPTION-LIST is a space-separated list of Libtool options associated # with MACRO-NAME. If any OPTION has a matching handler declared with # LT_OPTION_DEFINE, dispatch to that macro; otherwise complain about # the unknown option and exit. m4_defun([_LT_SET_OPTIONS], [# Set options m4_foreach([_LT_Option], m4_split(m4_normalize([$2])), [_LT_SET_OPTION([$1], _LT_Option)]) m4_if([$1],[LT_INIT],[ dnl dnl Simply set some default values (i.e off) if boolean options were not dnl specified: _LT_UNLESS_OPTIONS([LT_INIT], [dlopen], [enable_dlopen=no ]) _LT_UNLESS_OPTIONS([LT_INIT], [win32-dll], [enable_win32_dll=no ]) dnl dnl If no reference was made to various pairs of opposing options, then dnl we run the default mode handler for the pair. For example, if neither dnl `shared' nor `disable-shared' was passed, we enable building of shared dnl archives by default: _LT_UNLESS_OPTIONS([LT_INIT], [shared disable-shared], [_LT_ENABLE_SHARED]) _LT_UNLESS_OPTIONS([LT_INIT], [static disable-static], [_LT_ENABLE_STATIC]) _LT_UNLESS_OPTIONS([LT_INIT], [pic-only no-pic], [_LT_WITH_PIC]) _LT_UNLESS_OPTIONS([LT_INIT], [fast-install disable-fast-install], [_LT_ENABLE_FAST_INSTALL]) ]) ])# _LT_SET_OPTIONS ## --------------------------------- ## ## Macros to handle LT_INIT options. ## ## --------------------------------- ## # _LT_MANGLE_DEFUN(MACRO-NAME, OPTION-NAME) # ----------------------------------------- m4_define([_LT_MANGLE_DEFUN], [[_LT_OPTION_DEFUN_]m4_bpatsubst(m4_toupper([$1__$2]), [[^A-Z0-9_]], [_])]) # LT_OPTION_DEFINE(MACRO-NAME, OPTION-NAME, CODE) # ----------------------------------------------- m4_define([LT_OPTION_DEFINE], [m4_define(_LT_MANGLE_DEFUN([$1], [$2]), [$3])[]dnl ])# LT_OPTION_DEFINE # dlopen # ------ LT_OPTION_DEFINE([LT_INIT], [dlopen], [enable_dlopen=yes ]) AU_DEFUN([AC_LIBTOOL_DLOPEN], [_LT_SET_OPTION([LT_INIT], [dlopen]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the `dlopen' option into LT_INIT's first parameter.]) ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_DLOPEN], []) # win32-dll # --------- # Declare package support for building win32 dll's. LT_OPTION_DEFINE([LT_INIT], [win32-dll], [enable_win32_dll=yes case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-cegcc*) AC_CHECK_TOOL(AS, as, false) AC_CHECK_TOOL(DLLTOOL, dlltool, false) AC_CHECK_TOOL(OBJDUMP, objdump, false) ;; esac test -z "$AS" && AS=as _LT_DECL([], [AS], [1], [Assembler program])dnl test -z "$DLLTOOL" && DLLTOOL=dlltool _LT_DECL([], [DLLTOOL], [1], [DLL creation program])dnl test -z "$OBJDUMP" && OBJDUMP=objdump _LT_DECL([], [OBJDUMP], [1], [Object dumper program])dnl ])# win32-dll AU_DEFUN([AC_LIBTOOL_WIN32_DLL], [AC_REQUIRE([AC_CANONICAL_HOST])dnl _LT_SET_OPTION([LT_INIT], [win32-dll]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the `win32-dll' option into LT_INIT's first parameter.]) ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_WIN32_DLL], []) # _LT_ENABLE_SHARED([DEFAULT]) # ---------------------------- # implement the --enable-shared flag, and supports the `shared' and # `disable-shared' LT_INIT options. # DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'. m4_define([_LT_ENABLE_SHARED], [m4_define([_LT_ENABLE_SHARED_DEFAULT], [m4_if($1, no, no, yes)])dnl AC_ARG_ENABLE([shared], [AS_HELP_STRING([--enable-shared@<:@=PKGS@:>@], [build shared libraries @<:@default=]_LT_ENABLE_SHARED_DEFAULT[@:>@])], [p=${PACKAGE-default} case $enableval in yes) enable_shared=yes ;; no) enable_shared=no ;; *) enable_shared=no # Look at the argument we got. We use all the common list separators. lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," for pkg in $enableval; do IFS="$lt_save_ifs" if test "X$pkg" = "X$p"; then enable_shared=yes fi done IFS="$lt_save_ifs" ;; esac], [enable_shared=]_LT_ENABLE_SHARED_DEFAULT) _LT_DECL([build_libtool_libs], [enable_shared], [0], [Whether or not to build shared libraries]) ])# _LT_ENABLE_SHARED LT_OPTION_DEFINE([LT_INIT], [shared], [_LT_ENABLE_SHARED([yes])]) LT_OPTION_DEFINE([LT_INIT], [disable-shared], [_LT_ENABLE_SHARED([no])]) # Old names: AC_DEFUN([AC_ENABLE_SHARED], [_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[shared]) ]) AC_DEFUN([AC_DISABLE_SHARED], [_LT_SET_OPTION([LT_INIT], [disable-shared]) ]) AU_DEFUN([AM_ENABLE_SHARED], [AC_ENABLE_SHARED($@)]) AU_DEFUN([AM_DISABLE_SHARED], [AC_DISABLE_SHARED($@)]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AM_ENABLE_SHARED], []) dnl AC_DEFUN([AM_DISABLE_SHARED], []) # _LT_ENABLE_STATIC([DEFAULT]) # ---------------------------- # implement the --enable-static flag, and support the `static' and # `disable-static' LT_INIT options. # DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'. m4_define([_LT_ENABLE_STATIC], [m4_define([_LT_ENABLE_STATIC_DEFAULT], [m4_if($1, no, no, yes)])dnl AC_ARG_ENABLE([static], [AS_HELP_STRING([--enable-static@<:@=PKGS@:>@], [build static libraries @<:@default=]_LT_ENABLE_STATIC_DEFAULT[@:>@])], [p=${PACKAGE-default} case $enableval in yes) enable_static=yes ;; no) enable_static=no ;; *) enable_static=no # Look at the argument we got. We use all the common list separators. lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," for pkg in $enableval; do IFS="$lt_save_ifs" if test "X$pkg" = "X$p"; then enable_static=yes fi done IFS="$lt_save_ifs" ;; esac], [enable_static=]_LT_ENABLE_STATIC_DEFAULT) _LT_DECL([build_old_libs], [enable_static], [0], [Whether or not to build static libraries]) ])# _LT_ENABLE_STATIC LT_OPTION_DEFINE([LT_INIT], [static], [_LT_ENABLE_STATIC([yes])]) LT_OPTION_DEFINE([LT_INIT], [disable-static], [_LT_ENABLE_STATIC([no])]) # Old names: AC_DEFUN([AC_ENABLE_STATIC], [_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[static]) ]) AC_DEFUN([AC_DISABLE_STATIC], [_LT_SET_OPTION([LT_INIT], [disable-static]) ]) AU_DEFUN([AM_ENABLE_STATIC], [AC_ENABLE_STATIC($@)]) AU_DEFUN([AM_DISABLE_STATIC], [AC_DISABLE_STATIC($@)]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AM_ENABLE_STATIC], []) dnl AC_DEFUN([AM_DISABLE_STATIC], []) # _LT_ENABLE_FAST_INSTALL([DEFAULT]) # ---------------------------------- # implement the --enable-fast-install flag, and support the `fast-install' # and `disable-fast-install' LT_INIT options. # DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'. m4_define([_LT_ENABLE_FAST_INSTALL], [m4_define([_LT_ENABLE_FAST_INSTALL_DEFAULT], [m4_if($1, no, no, yes)])dnl AC_ARG_ENABLE([fast-install], [AS_HELP_STRING([--enable-fast-install@<:@=PKGS@:>@], [optimize for fast installation @<:@default=]_LT_ENABLE_FAST_INSTALL_DEFAULT[@:>@])], [p=${PACKAGE-default} case $enableval in yes) enable_fast_install=yes ;; no) enable_fast_install=no ;; *) enable_fast_install=no # Look at the argument we got. We use all the common list separators. lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," for pkg in $enableval; do IFS="$lt_save_ifs" if test "X$pkg" = "X$p"; then enable_fast_install=yes fi done IFS="$lt_save_ifs" ;; esac], [enable_fast_install=]_LT_ENABLE_FAST_INSTALL_DEFAULT) _LT_DECL([fast_install], [enable_fast_install], [0], [Whether or not to optimize for fast installation])dnl ])# _LT_ENABLE_FAST_INSTALL LT_OPTION_DEFINE([LT_INIT], [fast-install], [_LT_ENABLE_FAST_INSTALL([yes])]) LT_OPTION_DEFINE([LT_INIT], [disable-fast-install], [_LT_ENABLE_FAST_INSTALL([no])]) # Old names: AU_DEFUN([AC_ENABLE_FAST_INSTALL], [_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[fast-install]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the `fast-install' option into LT_INIT's first parameter.]) ]) AU_DEFUN([AC_DISABLE_FAST_INSTALL], [_LT_SET_OPTION([LT_INIT], [disable-fast-install]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the `disable-fast-install' option into LT_INIT's first parameter.]) ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_ENABLE_FAST_INSTALL], []) dnl AC_DEFUN([AM_DISABLE_FAST_INSTALL], []) # _LT_WITH_PIC([MODE]) # -------------------- # implement the --with-pic flag, and support the `pic-only' and `no-pic' # LT_INIT options. # MODE is either `yes' or `no'. If omitted, it defaults to `both'. m4_define([_LT_WITH_PIC], [AC_ARG_WITH([pic], [AS_HELP_STRING([--with-pic@<:@=PKGS@:>@], [try to use only PIC/non-PIC objects @<:@default=use both@:>@])], [lt_p=${PACKAGE-default} case $withval in yes|no) pic_mode=$withval ;; *) pic_mode=default # Look at the argument we got. We use all the common list separators. lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," for lt_pkg in $withval; do IFS="$lt_save_ifs" if test "X$lt_pkg" = "X$lt_p"; then pic_mode=yes fi done IFS="$lt_save_ifs" ;; esac], [pic_mode=default]) test -z "$pic_mode" && pic_mode=m4_default([$1], [default]) _LT_DECL([], [pic_mode], [0], [What type of objects to build])dnl ])# _LT_WITH_PIC LT_OPTION_DEFINE([LT_INIT], [pic-only], [_LT_WITH_PIC([yes])]) LT_OPTION_DEFINE([LT_INIT], [no-pic], [_LT_WITH_PIC([no])]) # Old name: AU_DEFUN([AC_LIBTOOL_PICMODE], [_LT_SET_OPTION([LT_INIT], [pic-only]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the `pic-only' option into LT_INIT's first parameter.]) ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_PICMODE], []) ## ----------------- ## ## LTDL_INIT Options ## ## ----------------- ## m4_define([_LTDL_MODE], []) LT_OPTION_DEFINE([LTDL_INIT], [nonrecursive], [m4_define([_LTDL_MODE], [nonrecursive])]) LT_OPTION_DEFINE([LTDL_INIT], [recursive], [m4_define([_LTDL_MODE], [recursive])]) LT_OPTION_DEFINE([LTDL_INIT], [subproject], [m4_define([_LTDL_MODE], [subproject])]) m4_define([_LTDL_TYPE], []) LT_OPTION_DEFINE([LTDL_INIT], [installable], [m4_define([_LTDL_TYPE], [installable])]) LT_OPTION_DEFINE([LTDL_INIT], [convenience], [m4_define([_LTDL_TYPE], [convenience])]) nordugrid-arc-6.14.0/m4/PaxHeaders.30264/libtool.m40000644000000000000000000000013214152153410017557 xustar000000000000000030 mtime=1638455048.041790762 30 atime=1638455048.482797388 30 ctime=1638455095.712507036 nordugrid-arc-6.14.0/m4/libtool.m40000644000175000002070000105743214152153410017560 0ustar00mockbuildmock00000000000000# libtool.m4 - Configure libtool for the host system. -*-Autoconf-*- # # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, # 2006, 2007, 2008, 2009, 2010, 2011 Free Software # Foundation, Inc. # Written by Gordon Matzigkeit, 1996 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. m4_define([_LT_COPYING], [dnl # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, # 2006, 2007, 2008, 2009, 2010, 2011 Free Software # Foundation, Inc. # Written by Gordon Matzigkeit, 1996 # # This file is part of GNU Libtool. # # GNU Libtool is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of # the License, or (at your option) any later version. # # As a special exception to the GNU General Public License, # if you distribute this file as part of a program or library that # is built using GNU Libtool, you may include this file under the # same distribution terms that you use for the rest of that program. # # GNU Libtool is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Libtool; see the file COPYING. If not, a copy # can be downloaded from http://www.gnu.org/licenses/gpl.html, or # obtained by writing to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. ]) # serial 57 LT_INIT # LT_PREREQ(VERSION) # ------------------ # Complain and exit if this libtool version is less that VERSION. m4_defun([LT_PREREQ], [m4_if(m4_version_compare(m4_defn([LT_PACKAGE_VERSION]), [$1]), -1, [m4_default([$3], [m4_fatal([Libtool version $1 or higher is required], 63)])], [$2])]) # _LT_CHECK_BUILDDIR # ------------------ # Complain if the absolute build directory name contains unusual characters m4_defun([_LT_CHECK_BUILDDIR], [case `pwd` in *\ * | *\ *) AC_MSG_WARN([Libtool does not cope well with whitespace in `pwd`]) ;; esac ]) # LT_INIT([OPTIONS]) # ------------------ AC_DEFUN([LT_INIT], [AC_PREREQ([2.58])dnl We use AC_INCLUDES_DEFAULT AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl AC_BEFORE([$0], [LT_LANG])dnl AC_BEFORE([$0], [LT_OUTPUT])dnl AC_BEFORE([$0], [LTDL_INIT])dnl m4_require([_LT_CHECK_BUILDDIR])dnl dnl Autoconf doesn't catch unexpanded LT_ macros by default: m4_pattern_forbid([^_?LT_[A-Z_]+$])dnl m4_pattern_allow([^(_LT_EOF|LT_DLGLOBAL|LT_DLLAZY_OR_NOW|LT_MULTI_MODULE)$])dnl dnl aclocal doesn't pull ltoptions.m4, ltsugar.m4, or ltversion.m4 dnl unless we require an AC_DEFUNed macro: AC_REQUIRE([LTOPTIONS_VERSION])dnl AC_REQUIRE([LTSUGAR_VERSION])dnl AC_REQUIRE([LTVERSION_VERSION])dnl AC_REQUIRE([LTOBSOLETE_VERSION])dnl m4_require([_LT_PROG_LTMAIN])dnl _LT_SHELL_INIT([SHELL=${CONFIG_SHELL-/bin/sh}]) dnl Parse OPTIONS _LT_SET_OPTIONS([$0], [$1]) # This can be used to rebuild libtool when needed LIBTOOL_DEPS="$ltmain" # Always use our own libtool. LIBTOOL='$(SHELL) $(top_builddir)/libtool' AC_SUBST(LIBTOOL)dnl _LT_SETUP # Only expand once: m4_define([LT_INIT]) ])# LT_INIT # Old names: AU_ALIAS([AC_PROG_LIBTOOL], [LT_INIT]) AU_ALIAS([AM_PROG_LIBTOOL], [LT_INIT]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_PROG_LIBTOOL], []) dnl AC_DEFUN([AM_PROG_LIBTOOL], []) # _LT_CC_BASENAME(CC) # ------------------- # Calculate cc_basename. Skip known compiler wrappers and cross-prefix. m4_defun([_LT_CC_BASENAME], [for cc_temp in $1""; do case $cc_temp in compile | *[[\\/]]compile | ccache | *[[\\/]]ccache ) ;; distcc | *[[\\/]]distcc | purify | *[[\\/]]purify ) ;; \-*) ;; *) break;; esac done cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` ]) # _LT_FILEUTILS_DEFAULTS # ---------------------- # It is okay to use these file commands and assume they have been set # sensibly after `m4_require([_LT_FILEUTILS_DEFAULTS])'. m4_defun([_LT_FILEUTILS_DEFAULTS], [: ${CP="cp -f"} : ${MV="mv -f"} : ${RM="rm -f"} ])# _LT_FILEUTILS_DEFAULTS # _LT_SETUP # --------- m4_defun([_LT_SETUP], [AC_REQUIRE([AC_CANONICAL_HOST])dnl AC_REQUIRE([AC_CANONICAL_BUILD])dnl AC_REQUIRE([_LT_PREPARE_SED_QUOTE_VARS])dnl AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])dnl _LT_DECL([], [PATH_SEPARATOR], [1], [The PATH separator for the build system])dnl dnl _LT_DECL([], [host_alias], [0], [The host system])dnl _LT_DECL([], [host], [0])dnl _LT_DECL([], [host_os], [0])dnl dnl _LT_DECL([], [build_alias], [0], [The build system])dnl _LT_DECL([], [build], [0])dnl _LT_DECL([], [build_os], [0])dnl dnl AC_REQUIRE([AC_PROG_CC])dnl AC_REQUIRE([LT_PATH_LD])dnl AC_REQUIRE([LT_PATH_NM])dnl dnl AC_REQUIRE([AC_PROG_LN_S])dnl test -z "$LN_S" && LN_S="ln -s" _LT_DECL([], [LN_S], [1], [Whether we need soft or hard links])dnl dnl AC_REQUIRE([LT_CMD_MAX_LEN])dnl _LT_DECL([objext], [ac_objext], [0], [Object file suffix (normally "o")])dnl _LT_DECL([], [exeext], [0], [Executable file suffix (normally "")])dnl dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_CHECK_SHELL_FEATURES])dnl m4_require([_LT_PATH_CONVERSION_FUNCTIONS])dnl m4_require([_LT_CMD_RELOAD])dnl m4_require([_LT_CHECK_MAGIC_METHOD])dnl m4_require([_LT_CHECK_SHAREDLIB_FROM_LINKLIB])dnl m4_require([_LT_CMD_OLD_ARCHIVE])dnl m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl m4_require([_LT_WITH_SYSROOT])dnl _LT_CONFIG_LIBTOOL_INIT([ # See if we are running on zsh, and set the options which allow our # commands through without removal of \ escapes INIT. if test -n "\${ZSH_VERSION+set}" ; then setopt NO_GLOB_SUBST fi ]) if test -n "${ZSH_VERSION+set}" ; then setopt NO_GLOB_SUBST fi _LT_CHECK_OBJDIR m4_require([_LT_TAG_COMPILER])dnl case $host_os in aix3*) # AIX sometimes has problems with the GCC collect2 program. For some # reason, if we set the COLLECT_NAMES environment variable, the problems # vanish in a puff of smoke. if test "X${COLLECT_NAMES+set}" != Xset; then COLLECT_NAMES= export COLLECT_NAMES fi ;; esac # Global variables: ofile=libtool can_build_shared=yes # All known linkers require a `.a' archive for static linking (except MSVC, # which needs '.lib'). libext=a with_gnu_ld="$lt_cv_prog_gnu_ld" old_CC="$CC" old_CFLAGS="$CFLAGS" # Set sane defaults for various variables test -z "$CC" && CC=cc test -z "$LTCC" && LTCC=$CC test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS test -z "$LD" && LD=ld test -z "$ac_objext" && ac_objext=o _LT_CC_BASENAME([$compiler]) # Only perform the check for file, if the check method requires it test -z "$MAGIC_CMD" && MAGIC_CMD=file case $deplibs_check_method in file_magic*) if test "$file_magic_cmd" = '$MAGIC_CMD'; then _LT_PATH_MAGIC fi ;; esac # Use C for the default configuration in the libtool script LT_SUPPORTED_TAG([CC]) _LT_LANG_C_CONFIG _LT_LANG_DEFAULT_CONFIG _LT_CONFIG_COMMANDS ])# _LT_SETUP # _LT_PREPARE_SED_QUOTE_VARS # -------------------------- # Define a few sed substitution that help us do robust quoting. m4_defun([_LT_PREPARE_SED_QUOTE_VARS], [# Backslashify metacharacters that are still active within # double-quoted strings. sed_quote_subst='s/\([["`$\\]]\)/\\\1/g' # Same as above, but do not quote variable references. double_quote_subst='s/\([["`\\]]\)/\\\1/g' # Sed substitution to delay expansion of an escaped shell variable in a # double_quote_subst'ed string. delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g' # Sed substitution to delay expansion of an escaped single quote. delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g' # Sed substitution to avoid accidental globbing in evaled expressions no_glob_subst='s/\*/\\\*/g' ]) # _LT_PROG_LTMAIN # --------------- # Note that this code is called both from `configure', and `config.status' # now that we use AC_CONFIG_COMMANDS to generate libtool. Notably, # `config.status' has no value for ac_aux_dir unless we are using Automake, # so we pass a copy along to make sure it has a sensible value anyway. m4_defun([_LT_PROG_LTMAIN], [m4_ifdef([AC_REQUIRE_AUX_FILE], [AC_REQUIRE_AUX_FILE([ltmain.sh])])dnl _LT_CONFIG_LIBTOOL_INIT([ac_aux_dir='$ac_aux_dir']) ltmain="$ac_aux_dir/ltmain.sh" ])# _LT_PROG_LTMAIN ## ------------------------------------- ## ## Accumulate code for creating libtool. ## ## ------------------------------------- ## # So that we can recreate a full libtool script including additional # tags, we accumulate the chunks of code to send to AC_CONFIG_COMMANDS # in macros and then make a single call at the end using the `libtool' # label. # _LT_CONFIG_LIBTOOL_INIT([INIT-COMMANDS]) # ---------------------------------------- # Register INIT-COMMANDS to be passed to AC_CONFIG_COMMANDS later. m4_define([_LT_CONFIG_LIBTOOL_INIT], [m4_ifval([$1], [m4_append([_LT_OUTPUT_LIBTOOL_INIT], [$1 ])])]) # Initialize. m4_define([_LT_OUTPUT_LIBTOOL_INIT]) # _LT_CONFIG_LIBTOOL([COMMANDS]) # ------------------------------ # Register COMMANDS to be passed to AC_CONFIG_COMMANDS later. m4_define([_LT_CONFIG_LIBTOOL], [m4_ifval([$1], [m4_append([_LT_OUTPUT_LIBTOOL_COMMANDS], [$1 ])])]) # Initialize. m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS]) # _LT_CONFIG_SAVE_COMMANDS([COMMANDS], [INIT_COMMANDS]) # ----------------------------------------------------- m4_defun([_LT_CONFIG_SAVE_COMMANDS], [_LT_CONFIG_LIBTOOL([$1]) _LT_CONFIG_LIBTOOL_INIT([$2]) ]) # _LT_FORMAT_COMMENT([COMMENT]) # ----------------------------- # Add leading comment marks to the start of each line, and a trailing # full-stop to the whole comment if one is not present already. m4_define([_LT_FORMAT_COMMENT], [m4_ifval([$1], [ m4_bpatsubst([m4_bpatsubst([$1], [^ *], [# ])], [['`$\]], [\\\&])]m4_bmatch([$1], [[!?.]$], [], [.]) )]) ## ------------------------ ## ## FIXME: Eliminate VARNAME ## ## ------------------------ ## # _LT_DECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION], [IS-TAGGED?]) # ------------------------------------------------------------------- # CONFIGNAME is the name given to the value in the libtool script. # VARNAME is the (base) name used in the configure script. # VALUE may be 0, 1 or 2 for a computed quote escaped value based on # VARNAME. Any other value will be used directly. m4_define([_LT_DECL], [lt_if_append_uniq([lt_decl_varnames], [$2], [, ], [lt_dict_add_subkey([lt_decl_dict], [$2], [libtool_name], [m4_ifval([$1], [$1], [$2])]) lt_dict_add_subkey([lt_decl_dict], [$2], [value], [$3]) m4_ifval([$4], [lt_dict_add_subkey([lt_decl_dict], [$2], [description], [$4])]) lt_dict_add_subkey([lt_decl_dict], [$2], [tagged?], [m4_ifval([$5], [yes], [no])])]) ]) # _LT_TAGDECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION]) # -------------------------------------------------------- m4_define([_LT_TAGDECL], [_LT_DECL([$1], [$2], [$3], [$4], [yes])]) # lt_decl_tag_varnames([SEPARATOR], [VARNAME1...]) # ------------------------------------------------ m4_define([lt_decl_tag_varnames], [_lt_decl_filter([tagged?], [yes], $@)]) # _lt_decl_filter(SUBKEY, VALUE, [SEPARATOR], [VARNAME1..]) # --------------------------------------------------------- m4_define([_lt_decl_filter], [m4_case([$#], [0], [m4_fatal([$0: too few arguments: $#])], [1], [m4_fatal([$0: too few arguments: $#: $1])], [2], [lt_dict_filter([lt_decl_dict], [$1], [$2], [], lt_decl_varnames)], [3], [lt_dict_filter([lt_decl_dict], [$1], [$2], [$3], lt_decl_varnames)], [lt_dict_filter([lt_decl_dict], $@)])[]dnl ]) # lt_decl_quote_varnames([SEPARATOR], [VARNAME1...]) # -------------------------------------------------- m4_define([lt_decl_quote_varnames], [_lt_decl_filter([value], [1], $@)]) # lt_decl_dquote_varnames([SEPARATOR], [VARNAME1...]) # --------------------------------------------------- m4_define([lt_decl_dquote_varnames], [_lt_decl_filter([value], [2], $@)]) # lt_decl_varnames_tagged([SEPARATOR], [VARNAME1...]) # --------------------------------------------------- m4_define([lt_decl_varnames_tagged], [m4_assert([$# <= 2])dnl _$0(m4_quote(m4_default([$1], [[, ]])), m4_ifval([$2], [[$2]], [m4_dquote(lt_decl_tag_varnames)]), m4_split(m4_normalize(m4_quote(_LT_TAGS)), [ ]))]) m4_define([_lt_decl_varnames_tagged], [m4_ifval([$3], [lt_combine([$1], [$2], [_], $3)])]) # lt_decl_all_varnames([SEPARATOR], [VARNAME1...]) # ------------------------------------------------ m4_define([lt_decl_all_varnames], [_$0(m4_quote(m4_default([$1], [[, ]])), m4_if([$2], [], m4_quote(lt_decl_varnames), m4_quote(m4_shift($@))))[]dnl ]) m4_define([_lt_decl_all_varnames], [lt_join($@, lt_decl_varnames_tagged([$1], lt_decl_tag_varnames([[, ]], m4_shift($@))))dnl ]) # _LT_CONFIG_STATUS_DECLARE([VARNAME]) # ------------------------------------ # Quote a variable value, and forward it to `config.status' so that its # declaration there will have the same value as in `configure'. VARNAME # must have a single quote delimited value for this to work. m4_define([_LT_CONFIG_STATUS_DECLARE], [$1='`$ECHO "$][$1" | $SED "$delay_single_quote_subst"`']) # _LT_CONFIG_STATUS_DECLARATIONS # ------------------------------ # We delimit libtool config variables with single quotes, so when # we write them to config.status, we have to be sure to quote all # embedded single quotes properly. In configure, this macro expands # each variable declared with _LT_DECL (and _LT_TAGDECL) into: # # ='`$ECHO "$" | $SED "$delay_single_quote_subst"`' m4_defun([_LT_CONFIG_STATUS_DECLARATIONS], [m4_foreach([_lt_var], m4_quote(lt_decl_all_varnames), [m4_n([_LT_CONFIG_STATUS_DECLARE(_lt_var)])])]) # _LT_LIBTOOL_TAGS # ---------------- # Output comment and list of tags supported by the script m4_defun([_LT_LIBTOOL_TAGS], [_LT_FORMAT_COMMENT([The names of the tagged configurations supported by this script])dnl available_tags="_LT_TAGS"dnl ]) # _LT_LIBTOOL_DECLARE(VARNAME, [TAG]) # ----------------------------------- # Extract the dictionary values for VARNAME (optionally with TAG) and # expand to a commented shell variable setting: # # # Some comment about what VAR is for. # visible_name=$lt_internal_name m4_define([_LT_LIBTOOL_DECLARE], [_LT_FORMAT_COMMENT(m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [description])))[]dnl m4_pushdef([_libtool_name], m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [libtool_name])))[]dnl m4_case(m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [value])), [0], [_libtool_name=[$]$1], [1], [_libtool_name=$lt_[]$1], [2], [_libtool_name=$lt_[]$1], [_libtool_name=lt_dict_fetch([lt_decl_dict], [$1], [value])])[]dnl m4_ifval([$2], [_$2])[]m4_popdef([_libtool_name])[]dnl ]) # _LT_LIBTOOL_CONFIG_VARS # ----------------------- # Produce commented declarations of non-tagged libtool config variables # suitable for insertion in the LIBTOOL CONFIG section of the `libtool' # script. Tagged libtool config variables (even for the LIBTOOL CONFIG # section) are produced by _LT_LIBTOOL_TAG_VARS. m4_defun([_LT_LIBTOOL_CONFIG_VARS], [m4_foreach([_lt_var], m4_quote(_lt_decl_filter([tagged?], [no], [], lt_decl_varnames)), [m4_n([_LT_LIBTOOL_DECLARE(_lt_var)])])]) # _LT_LIBTOOL_TAG_VARS(TAG) # ------------------------- m4_define([_LT_LIBTOOL_TAG_VARS], [m4_foreach([_lt_var], m4_quote(lt_decl_tag_varnames), [m4_n([_LT_LIBTOOL_DECLARE(_lt_var, [$1])])])]) # _LT_TAGVAR(VARNAME, [TAGNAME]) # ------------------------------ m4_define([_LT_TAGVAR], [m4_ifval([$2], [$1_$2], [$1])]) # _LT_CONFIG_COMMANDS # ------------------- # Send accumulated output to $CONFIG_STATUS. Thanks to the lists of # variables for single and double quote escaping we saved from calls # to _LT_DECL, we can put quote escaped variables declarations # into `config.status', and then the shell code to quote escape them in # for loops in `config.status'. Finally, any additional code accumulated # from calls to _LT_CONFIG_LIBTOOL_INIT is expanded. m4_defun([_LT_CONFIG_COMMANDS], [AC_PROVIDE_IFELSE([LT_OUTPUT], dnl If the libtool generation code has been placed in $CONFIG_LT, dnl instead of duplicating it all over again into config.status, dnl then we will have config.status run $CONFIG_LT later, so it dnl needs to know what name is stored there: [AC_CONFIG_COMMANDS([libtool], [$SHELL $CONFIG_LT || AS_EXIT(1)], [CONFIG_LT='$CONFIG_LT'])], dnl If the libtool generation code is destined for config.status, dnl expand the accumulated commands and init code now: [AC_CONFIG_COMMANDS([libtool], [_LT_OUTPUT_LIBTOOL_COMMANDS], [_LT_OUTPUT_LIBTOOL_COMMANDS_INIT])]) ])#_LT_CONFIG_COMMANDS # Initialize. m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS_INIT], [ # The HP-UX ksh and POSIX shell print the target directory to stdout # if CDPATH is set. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH sed_quote_subst='$sed_quote_subst' double_quote_subst='$double_quote_subst' delay_variable_subst='$delay_variable_subst' _LT_CONFIG_STATUS_DECLARATIONS LTCC='$LTCC' LTCFLAGS='$LTCFLAGS' compiler='$compiler_DEFAULT' # A function that is used when there is no print builtin or printf. func_fallback_echo () { eval 'cat <<_LTECHO_EOF \$[]1 _LTECHO_EOF' } # Quote evaled strings. for var in lt_decl_all_varnames([[ \ ]], lt_decl_quote_varnames); do case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in *[[\\\\\\\`\\"\\\$]]*) eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" ;; *) eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" ;; esac done # Double-quote double-evaled strings. for var in lt_decl_all_varnames([[ \ ]], lt_decl_dquote_varnames); do case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in *[[\\\\\\\`\\"\\\$]]*) eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" ;; *) eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" ;; esac done _LT_OUTPUT_LIBTOOL_INIT ]) # _LT_GENERATED_FILE_INIT(FILE, [COMMENT]) # ------------------------------------ # Generate a child script FILE with all initialization necessary to # reuse the environment learned by the parent script, and make the # file executable. If COMMENT is supplied, it is inserted after the # `#!' sequence but before initialization text begins. After this # macro, additional text can be appended to FILE to form the body of # the child script. The macro ends with non-zero status if the # file could not be fully written (such as if the disk is full). m4_ifdef([AS_INIT_GENERATED], [m4_defun([_LT_GENERATED_FILE_INIT],[AS_INIT_GENERATED($@)])], [m4_defun([_LT_GENERATED_FILE_INIT], [m4_require([AS_PREPARE])]dnl [m4_pushdef([AS_MESSAGE_LOG_FD])]dnl [lt_write_fail=0 cat >$1 <<_ASEOF || lt_write_fail=1 #! $SHELL # Generated by $as_me. $2 SHELL=\${CONFIG_SHELL-$SHELL} export SHELL _ASEOF cat >>$1 <<\_ASEOF || lt_write_fail=1 AS_SHELL_SANITIZE _AS_PREPARE exec AS_MESSAGE_FD>&1 _ASEOF test $lt_write_fail = 0 && chmod +x $1[]dnl m4_popdef([AS_MESSAGE_LOG_FD])])])# _LT_GENERATED_FILE_INIT # LT_OUTPUT # --------- # This macro allows early generation of the libtool script (before # AC_OUTPUT is called), incase it is used in configure for compilation # tests. AC_DEFUN([LT_OUTPUT], [: ${CONFIG_LT=./config.lt} AC_MSG_NOTICE([creating $CONFIG_LT]) _LT_GENERATED_FILE_INIT(["$CONFIG_LT"], [# Run this file to recreate a libtool stub with the current configuration.]) cat >>"$CONFIG_LT" <<\_LTEOF lt_cl_silent=false exec AS_MESSAGE_LOG_FD>>config.log { echo AS_BOX([Running $as_me.]) } >&AS_MESSAGE_LOG_FD lt_cl_help="\ \`$as_me' creates a local libtool stub from the current configuration, for use in further configure time tests before the real libtool is generated. Usage: $[0] [[OPTIONS]] -h, --help print this help, then exit -V, --version print version number, then exit -q, --quiet do not print progress messages -d, --debug don't remove temporary files Report bugs to ." lt_cl_version="\ m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])config.lt[]dnl m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION]) configured by $[0], generated by m4_PACKAGE_STRING. Copyright (C) 2011 Free Software Foundation, Inc. This config.lt script is free software; the Free Software Foundation gives unlimited permision to copy, distribute and modify it." while test $[#] != 0 do case $[1] in --version | --v* | -V ) echo "$lt_cl_version"; exit 0 ;; --help | --h* | -h ) echo "$lt_cl_help"; exit 0 ;; --debug | --d* | -d ) debug=: ;; --quiet | --q* | --silent | --s* | -q ) lt_cl_silent=: ;; -*) AC_MSG_ERROR([unrecognized option: $[1] Try \`$[0] --help' for more information.]) ;; *) AC_MSG_ERROR([unrecognized argument: $[1] Try \`$[0] --help' for more information.]) ;; esac shift done if $lt_cl_silent; then exec AS_MESSAGE_FD>/dev/null fi _LTEOF cat >>"$CONFIG_LT" <<_LTEOF _LT_OUTPUT_LIBTOOL_COMMANDS_INIT _LTEOF cat >>"$CONFIG_LT" <<\_LTEOF AC_MSG_NOTICE([creating $ofile]) _LT_OUTPUT_LIBTOOL_COMMANDS AS_EXIT(0) _LTEOF chmod +x "$CONFIG_LT" # configure is writing to config.log, but config.lt does its own redirection, # appending to config.log, which fails on DOS, as config.log is still kept # open by configure. Here we exec the FD to /dev/null, effectively closing # config.log, so it can be properly (re)opened and appended to by config.lt. lt_cl_success=: test "$silent" = yes && lt_config_lt_args="$lt_config_lt_args --quiet" exec AS_MESSAGE_LOG_FD>/dev/null $SHELL "$CONFIG_LT" $lt_config_lt_args || lt_cl_success=false exec AS_MESSAGE_LOG_FD>>config.log $lt_cl_success || AS_EXIT(1) ])# LT_OUTPUT # _LT_CONFIG(TAG) # --------------- # If TAG is the built-in tag, create an initial libtool script with a # default configuration from the untagged config vars. Otherwise add code # to config.status for appending the configuration named by TAG from the # matching tagged config vars. m4_defun([_LT_CONFIG], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl _LT_CONFIG_SAVE_COMMANDS([ m4_define([_LT_TAG], m4_if([$1], [], [C], [$1]))dnl m4_if(_LT_TAG, [C], [ # See if we are running on zsh, and set the options which allow our # commands through without removal of \ escapes. if test -n "${ZSH_VERSION+set}" ; then setopt NO_GLOB_SUBST fi cfgfile="${ofile}T" trap "$RM \"$cfgfile\"; exit 1" 1 2 15 $RM "$cfgfile" cat <<_LT_EOF >> "$cfgfile" #! $SHELL # `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services. # Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION # Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: # NOTE: Changes made to this file will be lost: look at ltmain.sh. # _LT_COPYING _LT_LIBTOOL_TAGS # ### BEGIN LIBTOOL CONFIG _LT_LIBTOOL_CONFIG_VARS _LT_LIBTOOL_TAG_VARS # ### END LIBTOOL CONFIG _LT_EOF case $host_os in aix3*) cat <<\_LT_EOF >> "$cfgfile" # AIX sometimes has problems with the GCC collect2 program. For some # reason, if we set the COLLECT_NAMES environment variable, the problems # vanish in a puff of smoke. if test "X${COLLECT_NAMES+set}" != Xset; then COLLECT_NAMES= export COLLECT_NAMES fi _LT_EOF ;; esac _LT_PROG_LTMAIN # We use sed instead of cat because bash on DJGPP gets confused if # if finds mixed CR/LF and LF-only lines. Since sed operates in # text mode, it properly converts lines to CR/LF. This bash problem # is reportedly fixed, but why not run on old versions too? sed '$q' "$ltmain" >> "$cfgfile" \ || (rm -f "$cfgfile"; exit 1) _LT_PROG_REPLACE_SHELLFNS mv -f "$cfgfile" "$ofile" || (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") chmod +x "$ofile" ], [cat <<_LT_EOF >> "$ofile" dnl Unfortunately we have to use $1 here, since _LT_TAG is not expanded dnl in a comment (ie after a #). # ### BEGIN LIBTOOL TAG CONFIG: $1 _LT_LIBTOOL_TAG_VARS(_LT_TAG) # ### END LIBTOOL TAG CONFIG: $1 _LT_EOF ])dnl /m4_if ], [m4_if([$1], [], [ PACKAGE='$PACKAGE' VERSION='$VERSION' TIMESTAMP='$TIMESTAMP' RM='$RM' ofile='$ofile'], []) ])dnl /_LT_CONFIG_SAVE_COMMANDS ])# _LT_CONFIG # LT_SUPPORTED_TAG(TAG) # --------------------- # Trace this macro to discover what tags are supported by the libtool # --tag option, using: # autoconf --trace 'LT_SUPPORTED_TAG:$1' AC_DEFUN([LT_SUPPORTED_TAG], []) # C support is built-in for now m4_define([_LT_LANG_C_enabled], []) m4_define([_LT_TAGS], []) # LT_LANG(LANG) # ------------- # Enable libtool support for the given language if not already enabled. AC_DEFUN([LT_LANG], [AC_BEFORE([$0], [LT_OUTPUT])dnl m4_case([$1], [C], [_LT_LANG(C)], [C++], [_LT_LANG(CXX)], [Go], [_LT_LANG(GO)], [Java], [_LT_LANG(GCJ)], [Fortran 77], [_LT_LANG(F77)], [Fortran], [_LT_LANG(FC)], [Windows Resource], [_LT_LANG(RC)], [m4_ifdef([_LT_LANG_]$1[_CONFIG], [_LT_LANG($1)], [m4_fatal([$0: unsupported language: "$1"])])])dnl ])# LT_LANG # _LT_LANG(LANGNAME) # ------------------ m4_defun([_LT_LANG], [m4_ifdef([_LT_LANG_]$1[_enabled], [], [LT_SUPPORTED_TAG([$1])dnl m4_append([_LT_TAGS], [$1 ])dnl m4_define([_LT_LANG_]$1[_enabled], [])dnl _LT_LANG_$1_CONFIG($1)])dnl ])# _LT_LANG m4_ifndef([AC_PROG_GO], [ ############################################################ # NOTE: This macro has been submitted for inclusion into # # GNU Autoconf as AC_PROG_GO. When it is available in # # a released version of Autoconf we should remove this # # macro and use it instead. # ############################################################ m4_defun([AC_PROG_GO], [AC_LANG_PUSH(Go)dnl AC_ARG_VAR([GOC], [Go compiler command])dnl AC_ARG_VAR([GOFLAGS], [Go compiler flags])dnl _AC_ARG_VAR_LDFLAGS()dnl AC_CHECK_TOOL(GOC, gccgo) if test -z "$GOC"; then if test -n "$ac_tool_prefix"; then AC_CHECK_PROG(GOC, [${ac_tool_prefix}gccgo], [${ac_tool_prefix}gccgo]) fi fi if test -z "$GOC"; then AC_CHECK_PROG(GOC, gccgo, gccgo, false) fi ])#m4_defun ])#m4_ifndef # _LT_LANG_DEFAULT_CONFIG # ----------------------- m4_defun([_LT_LANG_DEFAULT_CONFIG], [AC_PROVIDE_IFELSE([AC_PROG_CXX], [LT_LANG(CXX)], [m4_define([AC_PROG_CXX], defn([AC_PROG_CXX])[LT_LANG(CXX)])]) AC_PROVIDE_IFELSE([AC_PROG_F77], [LT_LANG(F77)], [m4_define([AC_PROG_F77], defn([AC_PROG_F77])[LT_LANG(F77)])]) AC_PROVIDE_IFELSE([AC_PROG_FC], [LT_LANG(FC)], [m4_define([AC_PROG_FC], defn([AC_PROG_FC])[LT_LANG(FC)])]) dnl The call to [A][M_PROG_GCJ] is quoted like that to stop aclocal dnl pulling things in needlessly. AC_PROVIDE_IFELSE([AC_PROG_GCJ], [LT_LANG(GCJ)], [AC_PROVIDE_IFELSE([A][M_PROG_GCJ], [LT_LANG(GCJ)], [AC_PROVIDE_IFELSE([LT_PROG_GCJ], [LT_LANG(GCJ)], [m4_ifdef([AC_PROG_GCJ], [m4_define([AC_PROG_GCJ], defn([AC_PROG_GCJ])[LT_LANG(GCJ)])]) m4_ifdef([A][M_PROG_GCJ], [m4_define([A][M_PROG_GCJ], defn([A][M_PROG_GCJ])[LT_LANG(GCJ)])]) m4_ifdef([LT_PROG_GCJ], [m4_define([LT_PROG_GCJ], defn([LT_PROG_GCJ])[LT_LANG(GCJ)])])])])]) AC_PROVIDE_IFELSE([AC_PROG_GO], [LT_LANG(GO)], [m4_define([AC_PROG_GO], defn([AC_PROG_GO])[LT_LANG(GO)])]) AC_PROVIDE_IFELSE([LT_PROG_RC], [LT_LANG(RC)], [m4_define([LT_PROG_RC], defn([LT_PROG_RC])[LT_LANG(RC)])]) ])# _LT_LANG_DEFAULT_CONFIG # Obsolete macros: AU_DEFUN([AC_LIBTOOL_CXX], [LT_LANG(C++)]) AU_DEFUN([AC_LIBTOOL_F77], [LT_LANG(Fortran 77)]) AU_DEFUN([AC_LIBTOOL_FC], [LT_LANG(Fortran)]) AU_DEFUN([AC_LIBTOOL_GCJ], [LT_LANG(Java)]) AU_DEFUN([AC_LIBTOOL_RC], [LT_LANG(Windows Resource)]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_CXX], []) dnl AC_DEFUN([AC_LIBTOOL_F77], []) dnl AC_DEFUN([AC_LIBTOOL_FC], []) dnl AC_DEFUN([AC_LIBTOOL_GCJ], []) dnl AC_DEFUN([AC_LIBTOOL_RC], []) # _LT_TAG_COMPILER # ---------------- m4_defun([_LT_TAG_COMPILER], [AC_REQUIRE([AC_PROG_CC])dnl _LT_DECL([LTCC], [CC], [1], [A C compiler])dnl _LT_DECL([LTCFLAGS], [CFLAGS], [1], [LTCC compiler flags])dnl _LT_TAGDECL([CC], [compiler], [1], [A language specific compiler])dnl _LT_TAGDECL([with_gcc], [GCC], [0], [Is the compiler the GNU compiler?])dnl # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # If no C compiler flags were specified, use CFLAGS. LTCFLAGS=${LTCFLAGS-"$CFLAGS"} # Allow CC to be a program name with arguments. compiler=$CC ])# _LT_TAG_COMPILER # _LT_COMPILER_BOILERPLATE # ------------------------ # Check for compiler boilerplate output or warnings with # the simple compiler test code. m4_defun([_LT_COMPILER_BOILERPLATE], [m4_require([_LT_DECL_SED])dnl ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" >conftest.$ac_ext eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_compiler_boilerplate=`cat conftest.err` $RM conftest* ])# _LT_COMPILER_BOILERPLATE # _LT_LINKER_BOILERPLATE # ---------------------- # Check for linker boilerplate output or warnings with # the simple link test code. m4_defun([_LT_LINKER_BOILERPLATE], [m4_require([_LT_DECL_SED])dnl ac_outfile=conftest.$ac_objext echo "$lt_simple_link_test_code" >conftest.$ac_ext eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_linker_boilerplate=`cat conftest.err` $RM -r conftest* ])# _LT_LINKER_BOILERPLATE # _LT_REQUIRED_DARWIN_CHECKS # ------------------------- m4_defun_once([_LT_REQUIRED_DARWIN_CHECKS],[ case $host_os in rhapsody* | darwin*) AC_CHECK_TOOL([DSYMUTIL], [dsymutil], [:]) AC_CHECK_TOOL([NMEDIT], [nmedit], [:]) AC_CHECK_TOOL([LIPO], [lipo], [:]) AC_CHECK_TOOL([OTOOL], [otool], [:]) AC_CHECK_TOOL([OTOOL64], [otool64], [:]) _LT_DECL([], [DSYMUTIL], [1], [Tool to manipulate archived DWARF debug symbol files on Mac OS X]) _LT_DECL([], [NMEDIT], [1], [Tool to change global to local symbols on Mac OS X]) _LT_DECL([], [LIPO], [1], [Tool to manipulate fat objects and archives on Mac OS X]) _LT_DECL([], [OTOOL], [1], [ldd/readelf like tool for Mach-O binaries on Mac OS X]) _LT_DECL([], [OTOOL64], [1], [ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4]) AC_CACHE_CHECK([for -single_module linker flag],[lt_cv_apple_cc_single_mod], [lt_cv_apple_cc_single_mod=no if test -z "${LT_MULTI_MODULE}"; then # By default we will add the -single_module flag. You can override # by either setting the environment variable LT_MULTI_MODULE # non-empty at configure time, or by adding -multi_module to the # link flags. rm -rf libconftest.dylib* echo "int foo(void){return 1;}" > conftest.c echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ -dynamiclib -Wl,-single_module conftest.c" >&AS_MESSAGE_LOG_FD $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ -dynamiclib -Wl,-single_module conftest.c 2>conftest.err _lt_result=$? # If there is a non-empty error log, and "single_module" # appears in it, assume the flag caused a linker warning if test -s conftest.err && $GREP single_module conftest.err; then cat conftest.err >&AS_MESSAGE_LOG_FD # Otherwise, if the output was created with a 0 exit code from # the compiler, it worked. elif test -f libconftest.dylib && test $_lt_result -eq 0; then lt_cv_apple_cc_single_mod=yes else cat conftest.err >&AS_MESSAGE_LOG_FD fi rm -rf libconftest.dylib* rm -f conftest.* fi]) AC_CACHE_CHECK([for -exported_symbols_list linker flag], [lt_cv_ld_exported_symbols_list], [lt_cv_ld_exported_symbols_list=no save_LDFLAGS=$LDFLAGS echo "_main" > conftest.sym LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym" AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])], [lt_cv_ld_exported_symbols_list=yes], [lt_cv_ld_exported_symbols_list=no]) LDFLAGS="$save_LDFLAGS" ]) AC_CACHE_CHECK([for -force_load linker flag],[lt_cv_ld_force_load], [lt_cv_ld_force_load=no cat > conftest.c << _LT_EOF int forced_loaded() { return 2;} _LT_EOF echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&AS_MESSAGE_LOG_FD $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&AS_MESSAGE_LOG_FD echo "$AR cru libconftest.a conftest.o" >&AS_MESSAGE_LOG_FD $AR cru libconftest.a conftest.o 2>&AS_MESSAGE_LOG_FD echo "$RANLIB libconftest.a" >&AS_MESSAGE_LOG_FD $RANLIB libconftest.a 2>&AS_MESSAGE_LOG_FD cat > conftest.c << _LT_EOF int main() { return 0;} _LT_EOF echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&AS_MESSAGE_LOG_FD $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err _lt_result=$? if test -s conftest.err && $GREP force_load conftest.err; then cat conftest.err >&AS_MESSAGE_LOG_FD elif test -f conftest && test $_lt_result -eq 0 && $GREP forced_load conftest >/dev/null 2>&1 ; then lt_cv_ld_force_load=yes else cat conftest.err >&AS_MESSAGE_LOG_FD fi rm -f conftest.err libconftest.a conftest conftest.c rm -rf conftest.dSYM ]) case $host_os in rhapsody* | darwin1.[[012]]) _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;; darwin1.*) _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; darwin*) # darwin 5.x on # if running on 10.5 or later, the deployment target defaults # to the OS version, if on x86, and 10.4, the deployment # target defaults to 10.4. Don't you love it? case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in 10.0,*86*-darwin8*|10.0,*-darwin[[91]]*) _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; 10.[[012]]*) _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; 10.*) _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; esac ;; esac if test "$lt_cv_apple_cc_single_mod" = "yes"; then _lt_dar_single_mod='$single_module' fi if test "$lt_cv_ld_exported_symbols_list" = "yes"; then _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym' else _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}' fi if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then _lt_dsymutil='~$DSYMUTIL $lib || :' else _lt_dsymutil= fi ;; esac ]) # _LT_DARWIN_LINKER_FEATURES([TAG]) # --------------------------------- # Checks for linker and compiler features on darwin m4_defun([_LT_DARWIN_LINKER_FEATURES], [ m4_require([_LT_REQUIRED_DARWIN_CHECKS]) _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_automatic, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported if test "$lt_cv_ld_force_load" = "yes"; then _LT_TAGVAR(whole_archive_flag_spec, $1)='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' m4_case([$1], [F77], [_LT_TAGVAR(compiler_needs_object, $1)=yes], [FC], [_LT_TAGVAR(compiler_needs_object, $1)=yes]) else _LT_TAGVAR(whole_archive_flag_spec, $1)='' fi _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(allow_undefined_flag, $1)="$_lt_dar_allow_undefined" case $cc_basename in ifort*) _lt_dar_can_shared=yes ;; *) _lt_dar_can_shared=$GCC ;; esac if test "$_lt_dar_can_shared" = "yes"; then output_verbose_link_cmd=func_echo_all _LT_TAGVAR(archive_cmds, $1)="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" _LT_TAGVAR(module_cmds, $1)="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" _LT_TAGVAR(module_expsym_cmds, $1)="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" m4_if([$1], [CXX], [ if test "$lt_cv_apple_cc_single_mod" != "yes"; then _LT_TAGVAR(archive_cmds, $1)="\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dsymutil}" _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dar_export_syms}${_lt_dsymutil}" fi ],[]) else _LT_TAGVAR(ld_shlibs, $1)=no fi ]) # _LT_SYS_MODULE_PATH_AIX([TAGNAME]) # ---------------------------------- # Links a minimal program and checks the executable # for the system default hardcoded library path. In most cases, # this is /usr/lib:/lib, but when the MPI compilers are used # the location of the communication and MPI libs are included too. # If we don't find anything, use the default library path according # to the aix ld manual. # Store the results from the different compilers for each TAGNAME. # Allow to override them for all tags through lt_cv_aix_libpath. m4_defun([_LT_SYS_MODULE_PATH_AIX], [m4_require([_LT_DECL_SED])dnl if test "${lt_cv_aix_libpath+set}" = set; then aix_libpath=$lt_cv_aix_libpath else AC_CACHE_VAL([_LT_TAGVAR([lt_cv_aix_libpath_], [$1])], [AC_LINK_IFELSE([AC_LANG_PROGRAM],[ lt_aix_libpath_sed='[ /Import File Strings/,/^$/ { /^0/ { s/^0 *\([^ ]*\) *$/\1/ p } }]' _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi],[]) if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then _LT_TAGVAR([lt_cv_aix_libpath_], [$1])="/usr/lib:/lib" fi ]) aix_libpath=$_LT_TAGVAR([lt_cv_aix_libpath_], [$1]) fi ])# _LT_SYS_MODULE_PATH_AIX # _LT_SHELL_INIT(ARG) # ------------------- m4_define([_LT_SHELL_INIT], [m4_divert_text([M4SH-INIT], [$1 ])])# _LT_SHELL_INIT # _LT_PROG_ECHO_BACKSLASH # ----------------------- # Find how we can fake an echo command that does not interpret backslash. # In particular, with Autoconf 2.60 or later we add some code to the start # of the generated configure script which will find a shell with a builtin # printf (which we can use as an echo command). m4_defun([_LT_PROG_ECHO_BACKSLASH], [ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO AC_MSG_CHECKING([how to print strings]) # Test print first, because it will be a builtin if present. if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then ECHO='print -r --' elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then ECHO='printf %s\n' else # Use this function as a fallback that always works. func_fallback_echo () { eval 'cat <<_LTECHO_EOF $[]1 _LTECHO_EOF' } ECHO='func_fallback_echo' fi # func_echo_all arg... # Invoke $ECHO with all args, space-separated. func_echo_all () { $ECHO "$*" } case "$ECHO" in printf*) AC_MSG_RESULT([printf]) ;; print*) AC_MSG_RESULT([print -r]) ;; *) AC_MSG_RESULT([cat]) ;; esac m4_ifdef([_AS_DETECT_SUGGESTED], [_AS_DETECT_SUGGESTED([ test -n "${ZSH_VERSION+set}${BASH_VERSION+set}" || ( ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO PATH=/empty FPATH=/empty; export PATH FPATH test "X`printf %s $ECHO`" = "X$ECHO" \ || test "X`print -r -- $ECHO`" = "X$ECHO" )])]) _LT_DECL([], [SHELL], [1], [Shell to use when invoking shell scripts]) _LT_DECL([], [ECHO], [1], [An echo program that protects backslashes]) ])# _LT_PROG_ECHO_BACKSLASH # _LT_WITH_SYSROOT # ---------------- AC_DEFUN([_LT_WITH_SYSROOT], [AC_MSG_CHECKING([for sysroot]) AC_ARG_WITH([sysroot], [ --with-sysroot[=DIR] Search for dependent libraries within DIR (or the compiler's sysroot if not specified).], [], [with_sysroot=no]) dnl lt_sysroot will always be passed unquoted. We quote it here dnl in case the user passed a directory name. lt_sysroot= case ${with_sysroot} in #( yes) if test "$GCC" = yes; then lt_sysroot=`$CC --print-sysroot 2>/dev/null` fi ;; #( /*) lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"` ;; #( no|'') ;; #( *) AC_MSG_RESULT([${with_sysroot}]) AC_MSG_ERROR([The sysroot must be an absolute path.]) ;; esac AC_MSG_RESULT([${lt_sysroot:-no}]) _LT_DECL([], [lt_sysroot], [0], [The root where to search for ]dnl [dependent libraries, and in which our libraries should be installed.])]) # _LT_ENABLE_LOCK # --------------- m4_defun([_LT_ENABLE_LOCK], [AC_ARG_ENABLE([libtool-lock], [AS_HELP_STRING([--disable-libtool-lock], [avoid locking (might break parallel builds)])]) test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes # Some flags need to be propagated to the compiler or linker for good # libtool support. case $host in ia64-*-hpux*) # Find out which ABI we are using. echo 'int i;' > conftest.$ac_ext if AC_TRY_EVAL(ac_compile); then case `/usr/bin/file conftest.$ac_objext` in *ELF-32*) HPUX_IA64_MODE="32" ;; *ELF-64*) HPUX_IA64_MODE="64" ;; esac fi rm -rf conftest* ;; *-*-irix6*) # Find out which ABI we are using. echo '[#]line '$LINENO' "configure"' > conftest.$ac_ext if AC_TRY_EVAL(ac_compile); then if test "$lt_cv_prog_gnu_ld" = yes; then case `/usr/bin/file conftest.$ac_objext` in *32-bit*) LD="${LD-ld} -melf32bsmip" ;; *N32*) LD="${LD-ld} -melf32bmipn32" ;; *64-bit*) LD="${LD-ld} -melf64bmip" ;; esac else case `/usr/bin/file conftest.$ac_objext` in *32-bit*) LD="${LD-ld} -32" ;; *N32*) LD="${LD-ld} -n32" ;; *64-bit*) LD="${LD-ld} -64" ;; esac fi fi rm -rf conftest* ;; x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \ s390*-*linux*|s390*-*tpf*|sparc*-*linux*) # Find out which ABI we are using. echo 'int i;' > conftest.$ac_ext if AC_TRY_EVAL(ac_compile); then case `/usr/bin/file conftest.o` in *32-bit*) case $host in x86_64-*kfreebsd*-gnu) LD="${LD-ld} -m elf_i386_fbsd" ;; x86_64-*linux*) LD="${LD-ld} -m elf_i386" ;; ppc64-*linux*|powerpc64-*linux*) LD="${LD-ld} -m elf32ppclinux" ;; s390x-*linux*) LD="${LD-ld} -m elf_s390" ;; sparc64-*linux*) LD="${LD-ld} -m elf32_sparc" ;; esac ;; *64-bit*) case $host in x86_64-*kfreebsd*-gnu) LD="${LD-ld} -m elf_x86_64_fbsd" ;; x86_64-*linux*) LD="${LD-ld} -m elf_x86_64" ;; ppc*-*linux*|powerpc*-*linux*) LD="${LD-ld} -m elf64ppc" ;; s390*-*linux*|s390*-*tpf*) LD="${LD-ld} -m elf64_s390" ;; sparc*-*linux*) LD="${LD-ld} -m elf64_sparc" ;; esac ;; esac fi rm -rf conftest* ;; *-*-sco3.2v5*) # On SCO OpenServer 5, we need -belf to get full-featured binaries. SAVE_CFLAGS="$CFLAGS" CFLAGS="$CFLAGS -belf" AC_CACHE_CHECK([whether the C compiler needs -belf], lt_cv_cc_needs_belf, [AC_LANG_PUSH(C) AC_LINK_IFELSE([AC_LANG_PROGRAM([[]],[[]])],[lt_cv_cc_needs_belf=yes],[lt_cv_cc_needs_belf=no]) AC_LANG_POP]) if test x"$lt_cv_cc_needs_belf" != x"yes"; then # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf CFLAGS="$SAVE_CFLAGS" fi ;; *-*solaris*) # Find out which ABI we are using. echo 'int i;' > conftest.$ac_ext if AC_TRY_EVAL(ac_compile); then case `/usr/bin/file conftest.o` in *64-bit*) case $lt_cv_prog_gnu_ld in yes*) case $host in i?86-*-solaris*) LD="${LD-ld} -m elf_x86_64" ;; sparc*-*-solaris*) LD="${LD-ld} -m elf64_sparc" ;; esac # GNU ld 2.21 introduced _sol2 emulations. Use them if available. if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then LD="${LD-ld}_sol2" fi ;; *) if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then LD="${LD-ld} -64" fi ;; esac ;; esac fi rm -rf conftest* ;; esac need_locks="$enable_libtool_lock" ])# _LT_ENABLE_LOCK # _LT_PROG_AR # ----------- m4_defun([_LT_PROG_AR], [AC_CHECK_TOOLS(AR, [ar], false) : ${AR=ar} : ${AR_FLAGS=cru} _LT_DECL([], [AR], [1], [The archiver]) _LT_DECL([], [AR_FLAGS], [1], [Flags to create an archive]) AC_CACHE_CHECK([for archiver @FILE support], [lt_cv_ar_at_file], [lt_cv_ar_at_file=no AC_COMPILE_IFELSE([AC_LANG_PROGRAM], [echo conftest.$ac_objext > conftest.lst lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&AS_MESSAGE_LOG_FD' AC_TRY_EVAL([lt_ar_try]) if test "$ac_status" -eq 0; then # Ensure the archiver fails upon bogus file names. rm -f conftest.$ac_objext libconftest.a AC_TRY_EVAL([lt_ar_try]) if test "$ac_status" -ne 0; then lt_cv_ar_at_file=@ fi fi rm -f conftest.* libconftest.a ]) ]) if test "x$lt_cv_ar_at_file" = xno; then archiver_list_spec= else archiver_list_spec=$lt_cv_ar_at_file fi _LT_DECL([], [archiver_list_spec], [1], [How to feed a file listing to the archiver]) ])# _LT_PROG_AR # _LT_CMD_OLD_ARCHIVE # ------------------- m4_defun([_LT_CMD_OLD_ARCHIVE], [_LT_PROG_AR AC_CHECK_TOOL(STRIP, strip, :) test -z "$STRIP" && STRIP=: _LT_DECL([], [STRIP], [1], [A symbol stripping program]) AC_CHECK_TOOL(RANLIB, ranlib, :) test -z "$RANLIB" && RANLIB=: _LT_DECL([], [RANLIB], [1], [Commands used to install an old-style archive]) # Determine commands to create old-style static archives. old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs' old_postinstall_cmds='chmod 644 $oldlib' old_postuninstall_cmds= if test -n "$RANLIB"; then case $host_os in openbsd*) old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib" ;; *) old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$tool_oldlib" ;; esac old_archive_cmds="$old_archive_cmds~\$RANLIB \$tool_oldlib" fi case $host_os in darwin*) lock_old_archive_extraction=yes ;; *) lock_old_archive_extraction=no ;; esac _LT_DECL([], [old_postinstall_cmds], [2]) _LT_DECL([], [old_postuninstall_cmds], [2]) _LT_TAGDECL([], [old_archive_cmds], [2], [Commands used to build an old-style archive]) _LT_DECL([], [lock_old_archive_extraction], [0], [Whether to use a lock for old archive extraction]) ])# _LT_CMD_OLD_ARCHIVE # _LT_COMPILER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS, # [OUTPUT-FILE], [ACTION-SUCCESS], [ACTION-FAILURE]) # ---------------------------------------------------------------- # Check whether the given compiler option works AC_DEFUN([_LT_COMPILER_OPTION], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_SED])dnl AC_CACHE_CHECK([$1], [$2], [$2=no m4_if([$4], , [ac_outfile=conftest.$ac_objext], [ac_outfile=$4]) echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="$3" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&AS_MESSAGE_LOG_FD echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings other than the usual output. $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then $2=yes fi fi $RM conftest* ]) if test x"[$]$2" = xyes; then m4_if([$5], , :, [$5]) else m4_if([$6], , :, [$6]) fi ])# _LT_COMPILER_OPTION # Old name: AU_ALIAS([AC_LIBTOOL_COMPILER_OPTION], [_LT_COMPILER_OPTION]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_COMPILER_OPTION], []) # _LT_LINKER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS, # [ACTION-SUCCESS], [ACTION-FAILURE]) # ---------------------------------------------------- # Check whether the given linker option works AC_DEFUN([_LT_LINKER_OPTION], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_SED])dnl AC_CACHE_CHECK([$1], [$2], [$2=no save_LDFLAGS="$LDFLAGS" LDFLAGS="$LDFLAGS $3" echo "$lt_simple_link_test_code" > conftest.$ac_ext if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then # The linker can only warn and ignore the option if not recognized # So say no if there are warnings if test -s conftest.err; then # Append any errors to the config.log. cat conftest.err 1>&AS_MESSAGE_LOG_FD $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if diff conftest.exp conftest.er2 >/dev/null; then $2=yes fi else $2=yes fi fi $RM -r conftest* LDFLAGS="$save_LDFLAGS" ]) if test x"[$]$2" = xyes; then m4_if([$4], , :, [$4]) else m4_if([$5], , :, [$5]) fi ])# _LT_LINKER_OPTION # Old name: AU_ALIAS([AC_LIBTOOL_LINKER_OPTION], [_LT_LINKER_OPTION]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_LINKER_OPTION], []) # LT_CMD_MAX_LEN #--------------- AC_DEFUN([LT_CMD_MAX_LEN], [AC_REQUIRE([AC_CANONICAL_HOST])dnl # find the maximum length of command line arguments AC_MSG_CHECKING([the maximum length of command line arguments]) AC_CACHE_VAL([lt_cv_sys_max_cmd_len], [dnl i=0 teststring="ABCD" case $build_os in msdosdjgpp*) # On DJGPP, this test can blow up pretty badly due to problems in libc # (any single argument exceeding 2000 bytes causes a buffer overrun # during glob expansion). Even if it were fixed, the result of this # check would be larger than it should be. lt_cv_sys_max_cmd_len=12288; # 12K is about right ;; gnu*) # Under GNU Hurd, this test is not required because there is # no limit to the length of command line arguments. # Libtool will interpret -1 as no limit whatsoever lt_cv_sys_max_cmd_len=-1; ;; cygwin* | mingw* | cegcc*) # On Win9x/ME, this test blows up -- it succeeds, but takes # about 5 minutes as the teststring grows exponentially. # Worse, since 9x/ME are not pre-emptively multitasking, # you end up with a "frozen" computer, even though with patience # the test eventually succeeds (with a max line length of 256k). # Instead, let's just punt: use the minimum linelength reported by # all of the supported platforms: 8192 (on NT/2K/XP). lt_cv_sys_max_cmd_len=8192; ;; mint*) # On MiNT this can take a long time and run out of memory. lt_cv_sys_max_cmd_len=8192; ;; amigaos*) # On AmigaOS with pdksh, this test takes hours, literally. # So we just punt and use a minimum line length of 8192. lt_cv_sys_max_cmd_len=8192; ;; netbsd* | freebsd* | openbsd* | darwin* | dragonfly*) # This has been around since 386BSD, at least. Likely further. if test -x /sbin/sysctl; then lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax` elif test -x /usr/sbin/sysctl; then lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax` else lt_cv_sys_max_cmd_len=65536 # usable default for all BSDs fi # And add a safety zone lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` ;; interix*) # We know the value 262144 and hardcode it with a safety zone (like BSD) lt_cv_sys_max_cmd_len=196608 ;; os2*) # The test takes a long time on OS/2. lt_cv_sys_max_cmd_len=8192 ;; osf*) # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not # nice to cause kernel panics so lets avoid the loop below. # First set a reasonable default. lt_cv_sys_max_cmd_len=16384 # if test -x /sbin/sysconfig; then case `/sbin/sysconfig -q proc exec_disable_arg_limit` in *1*) lt_cv_sys_max_cmd_len=-1 ;; esac fi ;; sco3.2v5*) lt_cv_sys_max_cmd_len=102400 ;; sysv5* | sco5v6* | sysv4.2uw2*) kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null` if test -n "$kargmax"; then lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[[ ]]//'` else lt_cv_sys_max_cmd_len=32768 fi ;; *) lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null` if test -n "$lt_cv_sys_max_cmd_len"; then lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` else # Make teststring a little bigger before we do anything with it. # a 1K string should be a reasonable start. for i in 1 2 3 4 5 6 7 8 ; do teststring=$teststring$teststring done SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}} # If test is not a shell built-in, we'll probably end up computing a # maximum length that is only half of the actual maximum length, but # we can't tell. while { test "X"`env echo "$teststring$teststring" 2>/dev/null` \ = "X$teststring$teststring"; } >/dev/null 2>&1 && test $i != 17 # 1/2 MB should be enough do i=`expr $i + 1` teststring=$teststring$teststring done # Only check the string length outside the loop. lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1` teststring= # Add a significant safety factor because C++ compilers can tack on # massive amounts of additional arguments before passing them to the # linker. It appears as though 1/2 is a usable value. lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2` fi ;; esac ]) if test -n $lt_cv_sys_max_cmd_len ; then AC_MSG_RESULT($lt_cv_sys_max_cmd_len) else AC_MSG_RESULT(none) fi max_cmd_len=$lt_cv_sys_max_cmd_len _LT_DECL([], [max_cmd_len], [0], [What is the maximum length of a command?]) ])# LT_CMD_MAX_LEN # Old name: AU_ALIAS([AC_LIBTOOL_SYS_MAX_CMD_LEN], [LT_CMD_MAX_LEN]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_SYS_MAX_CMD_LEN], []) # _LT_HEADER_DLFCN # ---------------- m4_defun([_LT_HEADER_DLFCN], [AC_CHECK_HEADERS([dlfcn.h], [], [], [AC_INCLUDES_DEFAULT])dnl ])# _LT_HEADER_DLFCN # _LT_TRY_DLOPEN_SELF (ACTION-IF-TRUE, ACTION-IF-TRUE-W-USCORE, # ACTION-IF-FALSE, ACTION-IF-CROSS-COMPILING) # ---------------------------------------------------------------- m4_defun([_LT_TRY_DLOPEN_SELF], [m4_require([_LT_HEADER_DLFCN])dnl if test "$cross_compiling" = yes; then : [$4] else lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_status=$lt_dlunknown cat > conftest.$ac_ext <<_LT_EOF [#line $LINENO "configure" #include "confdefs.h" #if HAVE_DLFCN_H #include #endif #include #ifdef RTLD_GLOBAL # define LT_DLGLOBAL RTLD_GLOBAL #else # ifdef DL_GLOBAL # define LT_DLGLOBAL DL_GLOBAL # else # define LT_DLGLOBAL 0 # endif #endif /* We may have to define LT_DLLAZY_OR_NOW in the command line if we find out it does not work in some platform. */ #ifndef LT_DLLAZY_OR_NOW # ifdef RTLD_LAZY # define LT_DLLAZY_OR_NOW RTLD_LAZY # else # ifdef DL_LAZY # define LT_DLLAZY_OR_NOW DL_LAZY # else # ifdef RTLD_NOW # define LT_DLLAZY_OR_NOW RTLD_NOW # else # ifdef DL_NOW # define LT_DLLAZY_OR_NOW DL_NOW # else # define LT_DLLAZY_OR_NOW 0 # endif # endif # endif # endif #endif /* When -fvisbility=hidden is used, assume the code has been annotated correspondingly for the symbols needed. */ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) int fnord () __attribute__((visibility("default"))); #endif int fnord () { return 42; } int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); int status = $lt_dlunknown; if (self) { if (dlsym (self,"fnord")) status = $lt_dlno_uscore; else { if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; else puts (dlerror ()); } /* dlclose (self); */ } else puts (dlerror ()); return status; }] _LT_EOF if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext} 2>/dev/null; then (./conftest; exit; ) >&AS_MESSAGE_LOG_FD 2>/dev/null lt_status=$? case x$lt_status in x$lt_dlno_uscore) $1 ;; x$lt_dlneed_uscore) $2 ;; x$lt_dlunknown|x*) $3 ;; esac else : # compilation failed $3 fi fi rm -fr conftest* ])# _LT_TRY_DLOPEN_SELF # LT_SYS_DLOPEN_SELF # ------------------ AC_DEFUN([LT_SYS_DLOPEN_SELF], [m4_require([_LT_HEADER_DLFCN])dnl if test "x$enable_dlopen" != xyes; then enable_dlopen=unknown enable_dlopen_self=unknown enable_dlopen_self_static=unknown else lt_cv_dlopen=no lt_cv_dlopen_libs= case $host_os in beos*) lt_cv_dlopen="load_add_on" lt_cv_dlopen_libs= lt_cv_dlopen_self=yes ;; mingw* | pw32* | cegcc*) lt_cv_dlopen="LoadLibrary" lt_cv_dlopen_libs= ;; cygwin*) lt_cv_dlopen="dlopen" lt_cv_dlopen_libs= ;; darwin*) # if libdl is installed we need to link against it AC_CHECK_LIB([dl], [dlopen], [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"],[ lt_cv_dlopen="dyld" lt_cv_dlopen_libs= lt_cv_dlopen_self=yes ]) ;; *) AC_CHECK_FUNC([shl_load], [lt_cv_dlopen="shl_load"], [AC_CHECK_LIB([dld], [shl_load], [lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld"], [AC_CHECK_FUNC([dlopen], [lt_cv_dlopen="dlopen"], [AC_CHECK_LIB([dl], [dlopen], [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"], [AC_CHECK_LIB([svld], [dlopen], [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld"], [AC_CHECK_LIB([dld], [dld_link], [lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld"]) ]) ]) ]) ]) ]) ;; esac if test "x$lt_cv_dlopen" != xno; then enable_dlopen=yes else enable_dlopen=no fi case $lt_cv_dlopen in dlopen) save_CPPFLAGS="$CPPFLAGS" test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" save_LDFLAGS="$LDFLAGS" wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" save_LIBS="$LIBS" LIBS="$lt_cv_dlopen_libs $LIBS" AC_CACHE_CHECK([whether a program can dlopen itself], lt_cv_dlopen_self, [dnl _LT_TRY_DLOPEN_SELF( lt_cv_dlopen_self=yes, lt_cv_dlopen_self=yes, lt_cv_dlopen_self=no, lt_cv_dlopen_self=cross) ]) if test "x$lt_cv_dlopen_self" = xyes; then wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" AC_CACHE_CHECK([whether a statically linked program can dlopen itself], lt_cv_dlopen_self_static, [dnl _LT_TRY_DLOPEN_SELF( lt_cv_dlopen_self_static=yes, lt_cv_dlopen_self_static=yes, lt_cv_dlopen_self_static=no, lt_cv_dlopen_self_static=cross) ]) fi CPPFLAGS="$save_CPPFLAGS" LDFLAGS="$save_LDFLAGS" LIBS="$save_LIBS" ;; esac case $lt_cv_dlopen_self in yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;; *) enable_dlopen_self=unknown ;; esac case $lt_cv_dlopen_self_static in yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;; *) enable_dlopen_self_static=unknown ;; esac fi _LT_DECL([dlopen_support], [enable_dlopen], [0], [Whether dlopen is supported]) _LT_DECL([dlopen_self], [enable_dlopen_self], [0], [Whether dlopen of programs is supported]) _LT_DECL([dlopen_self_static], [enable_dlopen_self_static], [0], [Whether dlopen of statically linked programs is supported]) ])# LT_SYS_DLOPEN_SELF # Old name: AU_ALIAS([AC_LIBTOOL_DLOPEN_SELF], [LT_SYS_DLOPEN_SELF]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_DLOPEN_SELF], []) # _LT_COMPILER_C_O([TAGNAME]) # --------------------------- # Check to see if options -c and -o are simultaneously supported by compiler. # This macro does not hard code the compiler like AC_PROG_CC_C_O. m4_defun([_LT_COMPILER_C_O], [m4_require([_LT_DECL_SED])dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_TAG_COMPILER])dnl AC_CACHE_CHECK([if $compiler supports -c -o file.$ac_objext], [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)], [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&AS_MESSAGE_LOG_FD echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then _LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes fi fi chmod u+w . 2>&AS_MESSAGE_LOG_FD $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* ]) _LT_TAGDECL([compiler_c_o], [lt_cv_prog_compiler_c_o], [1], [Does compiler simultaneously support -c and -o options?]) ])# _LT_COMPILER_C_O # _LT_COMPILER_FILE_LOCKS([TAGNAME]) # ---------------------------------- # Check to see if we can do hard links to lock some files if needed m4_defun([_LT_COMPILER_FILE_LOCKS], [m4_require([_LT_ENABLE_LOCK])dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl _LT_COMPILER_C_O([$1]) hard_links="nottested" if test "$_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)" = no && test "$need_locks" != no; then # do not overwrite the value of need_locks provided by the user AC_MSG_CHECKING([if we can lock with hard links]) hard_links=yes $RM conftest* ln conftest.a conftest.b 2>/dev/null && hard_links=no touch conftest.a ln conftest.a conftest.b 2>&5 || hard_links=no ln conftest.a conftest.b 2>/dev/null && hard_links=no AC_MSG_RESULT([$hard_links]) if test "$hard_links" = no; then AC_MSG_WARN([`$CC' does not support `-c -o', so `make -j' may be unsafe]) need_locks=warn fi else need_locks=no fi _LT_DECL([], [need_locks], [1], [Must we lock files when doing compilation?]) ])# _LT_COMPILER_FILE_LOCKS # _LT_CHECK_OBJDIR # ---------------- m4_defun([_LT_CHECK_OBJDIR], [AC_CACHE_CHECK([for objdir], [lt_cv_objdir], [rm -f .libs 2>/dev/null mkdir .libs 2>/dev/null if test -d .libs; then lt_cv_objdir=.libs else # MS-DOS does not allow filenames that begin with a dot. lt_cv_objdir=_libs fi rmdir .libs 2>/dev/null]) objdir=$lt_cv_objdir _LT_DECL([], [objdir], [0], [The name of the directory that contains temporary libtool files])dnl m4_pattern_allow([LT_OBJDIR])dnl AC_DEFINE_UNQUOTED(LT_OBJDIR, "$lt_cv_objdir/", [Define to the sub-directory in which libtool stores uninstalled libraries.]) ])# _LT_CHECK_OBJDIR # _LT_LINKER_HARDCODE_LIBPATH([TAGNAME]) # -------------------------------------- # Check hardcoding attributes. m4_defun([_LT_LINKER_HARDCODE_LIBPATH], [AC_MSG_CHECKING([how to hardcode library paths into programs]) _LT_TAGVAR(hardcode_action, $1)= if test -n "$_LT_TAGVAR(hardcode_libdir_flag_spec, $1)" || test -n "$_LT_TAGVAR(runpath_var, $1)" || test "X$_LT_TAGVAR(hardcode_automatic, $1)" = "Xyes" ; then # We can hardcode non-existent directories. if test "$_LT_TAGVAR(hardcode_direct, $1)" != no && # If the only mechanism to avoid hardcoding is shlibpath_var, we # have to relink, otherwise we might link with an installed library # when we should be linking with a yet-to-be-installed one ## test "$_LT_TAGVAR(hardcode_shlibpath_var, $1)" != no && test "$_LT_TAGVAR(hardcode_minus_L, $1)" != no; then # Linking always hardcodes the temporary library directory. _LT_TAGVAR(hardcode_action, $1)=relink else # We can link without hardcoding, and we can hardcode nonexisting dirs. _LT_TAGVAR(hardcode_action, $1)=immediate fi else # We cannot hardcode anything, or else we can only hardcode existing # directories. _LT_TAGVAR(hardcode_action, $1)=unsupported fi AC_MSG_RESULT([$_LT_TAGVAR(hardcode_action, $1)]) if test "$_LT_TAGVAR(hardcode_action, $1)" = relink || test "$_LT_TAGVAR(inherit_rpath, $1)" = yes; then # Fast installation is not supported enable_fast_install=no elif test "$shlibpath_overrides_runpath" = yes || test "$enable_shared" = no; then # Fast installation is not necessary enable_fast_install=needless fi _LT_TAGDECL([], [hardcode_action], [0], [How to hardcode a shared library path into an executable]) ])# _LT_LINKER_HARDCODE_LIBPATH # _LT_CMD_STRIPLIB # ---------------- m4_defun([_LT_CMD_STRIPLIB], [m4_require([_LT_DECL_EGREP]) striplib= old_striplib= AC_MSG_CHECKING([whether stripping libraries is possible]) if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" test -z "$striplib" && striplib="$STRIP --strip-unneeded" AC_MSG_RESULT([yes]) else # FIXME - insert some real tests, host_os isn't really good enough case $host_os in darwin*) if test -n "$STRIP" ; then striplib="$STRIP -x" old_striplib="$STRIP -S" AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) fi ;; *) AC_MSG_RESULT([no]) ;; esac fi _LT_DECL([], [old_striplib], [1], [Commands to strip libraries]) _LT_DECL([], [striplib], [1]) ])# _LT_CMD_STRIPLIB # _LT_SYS_DYNAMIC_LINKER([TAG]) # ----------------------------- # PORTME Fill in your ld.so characteristics m4_defun([_LT_SYS_DYNAMIC_LINKER], [AC_REQUIRE([AC_CANONICAL_HOST])dnl m4_require([_LT_DECL_EGREP])dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_OBJDUMP])dnl m4_require([_LT_DECL_SED])dnl m4_require([_LT_CHECK_SHELL_FEATURES])dnl AC_MSG_CHECKING([dynamic linker characteristics]) m4_if([$1], [], [ if test "$GCC" = yes; then case $host_os in darwin*) lt_awk_arg="/^libraries:/,/LR/" ;; *) lt_awk_arg="/^libraries:/" ;; esac case $host_os in mingw* | cegcc*) lt_sed_strip_eq="s,=\([[A-Za-z]]:\),\1,g" ;; *) lt_sed_strip_eq="s,=/,/,g" ;; esac lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq` case $lt_search_path_spec in *\;*) # if the path contains ";" then we assume it to be the separator # otherwise default to the standard path separator (i.e. ":") - it is # assumed that no part of a normal pathname contains ";" but that should # okay in the real world where ";" in dirpaths is itself problematic. lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'` ;; *) lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"` ;; esac # Ok, now we have the path, separated by spaces, we can step through it # and add multilib dir if necessary. lt_tmp_lt_search_path_spec= lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` for lt_sys_path in $lt_search_path_spec; do if test -d "$lt_sys_path/$lt_multi_os_dir"; then lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir" else test -d "$lt_sys_path" && \ lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path" fi done lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk ' BEGIN {RS=" "; FS="/|\n";} { lt_foo=""; lt_count=0; for (lt_i = NF; lt_i > 0; lt_i--) { if ($lt_i != "" && $lt_i != ".") { if ($lt_i == "..") { lt_count++; } else { if (lt_count == 0) { lt_foo="/" $lt_i lt_foo; } else { lt_count--; } } } } if (lt_foo != "") { lt_freq[[lt_foo]]++; } if (lt_freq[[lt_foo]] == 1) { print lt_foo; } }'` # AWK program above erroneously prepends '/' to C:/dos/paths # for these hosts. case $host_os in mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\ $SED 's,/\([[A-Za-z]]:\),\1,g'` ;; esac sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP` else sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" fi]) library_names_spec= libname_spec='lib$name' soname_spec= shrext_cmds=".so" postinstall_cmds= postuninstall_cmds= finish_cmds= finish_eval= shlibpath_var= shlibpath_overrides_runpath=unknown version_type=none dynamic_linker="$host_os ld.so" sys_lib_dlsearch_path_spec="/lib /usr/lib" need_lib_prefix=unknown hardcode_into_libs=no # when you set need_version to no, make sure it does not cause -set_version # flags to be left without arguments need_version=unknown case $host_os in aix3*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' shlibpath_var=LIBPATH # AIX 3 has no versioning support, so we append a major version to the name. soname_spec='${libname}${release}${shared_ext}$major' ;; aix[[4-9]]*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no hardcode_into_libs=yes if test "$host_cpu" = ia64; then # AIX 5 supports IA64 library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH else # With GCC up to 2.95.x, collect2 would create an import file # for dependence libraries. The import file would start with # the line `#! .'. This would cause the generated library to # depend on `.', always an invalid library. This was fixed in # development snapshots of GCC prior to 3.0. case $host_os in aix4 | aix4.[[01]] | aix4.[[01]].*) if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' echo ' yes ' echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then : else can_build_shared=no fi ;; esac # AIX (on Power*) has no versioning support, so currently we can not hardcode correct # soname into executable. Probably we can add versioning support to # collect2, so additional links can be useful in future. if test "$aix_use_runtimelinking" = yes; then # If using run time linking (on AIX 4.2 or later) use lib.so # instead of lib.a to let people know that these are not # typical AIX shared libraries. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' else # We preserve .a as extension for shared libraries through AIX4.2 # and later when we are not doing run time linking. library_names_spec='${libname}${release}.a $libname.a' soname_spec='${libname}${release}${shared_ext}$major' fi shlibpath_var=LIBPATH fi ;; amigaos*) case $host_cpu in powerpc) # Since July 2007 AmigaOS4 officially supports .so libraries. # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ;; m68k) library_names_spec='$libname.ixlibrary $libname.a' # Create ${libname}_ixlibrary.a entries in /sys/libs. finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([[^/]]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' ;; esac ;; beos*) library_names_spec='${libname}${shared_ext}' dynamic_linker="$host_os ld.so" shlibpath_var=LIBRARY_PATH ;; bsdi[[45]]*) version_type=linux # correct to gnu/linux during the next big refactor need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" # the default ld.so.conf also contains /usr/contrib/lib and # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow # libtool to hard-code these into programs ;; cygwin* | mingw* | pw32* | cegcc*) version_type=windows shrext_cmds=".dll" need_version=no need_lib_prefix=no case $GCC,$cc_basename in yes,*) # gcc library_names_spec='$libname.dll.a' # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \${file}`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname~ chmod a+x \$dldir/$dlname~ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; fi' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes case $host_os in cygwin*) # Cygwin DLLs use 'cyg' prefix rather than 'lib' soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' m4_if([$1], [],[ sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api"]) ;; mingw* | cegcc*) # MinGW DLLs use traditional 'lib' prefix soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' ;; pw32*) # pw32 DLLs use 'pw' prefix rather than 'lib' library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' ;; esac dynamic_linker='Win32 ld.exe' ;; *,cl*) # Native MSVC libname_spec='$name' soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' library_names_spec='${libname}.dll.lib' case $build_os in mingw*) sys_lib_search_path_spec= lt_save_ifs=$IFS IFS=';' for lt_path in $LIB do IFS=$lt_save_ifs # Let DOS variable expansion print the short 8.3 style file name. lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" done IFS=$lt_save_ifs # Convert to MSYS style. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([[a-zA-Z]]\\):| /\\1|g' -e 's|^ ||'` ;; cygwin*) # Convert to unix form, then to dos form, then back to unix form # but this time dos style (no spaces!) so that the unix form looks # like /cygdrive/c/PROGRA~1:/cygdr... sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ;; *) sys_lib_search_path_spec="$LIB" if $ECHO "$sys_lib_search_path_spec" | [$GREP ';[c-zC-Z]:/' >/dev/null]; then # It is most probably a Windows format PATH. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi # FIXME: find the short name or the path components, as spaces are # common. (e.g. "Program Files" -> "PROGRA~1") ;; esac # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \${file}`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes dynamic_linker='Win32 link.exe' ;; *) # Assume MSVC wrapper library_names_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext} $libname.lib' dynamic_linker='Win32 ld.exe' ;; esac # FIXME: first we should search . and the directory the executable is in shlibpath_var=PATH ;; darwin* | rhapsody*) dynamic_linker="$host_os dyld" version_type=darwin need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' soname_spec='${libname}${release}${major}$shared_ext' shlibpath_overrides_runpath=yes shlibpath_var=DYLD_LIBRARY_PATH shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' m4_if([$1], [],[ sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"]) sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' ;; dgux*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; freebsd* | dragonfly*) # DragonFly does not have aout. When/if they implement a new # versioning mechanism, adjust this. if test -x /usr/bin/objformat; then objformat=`/usr/bin/objformat` else case $host_os in freebsd[[23]].*) objformat=aout ;; *) objformat=elf ;; esac fi version_type=freebsd-$objformat case $version_type in freebsd-elf*) library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' need_version=no need_lib_prefix=no ;; freebsd-*) library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' need_version=yes ;; esac shlibpath_var=LD_LIBRARY_PATH case $host_os in freebsd2.*) shlibpath_overrides_runpath=yes ;; freebsd3.[[01]]* | freebsdelf3.[[01]]*) shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; freebsd3.[[2-9]]* | freebsdelf3.[[2-9]]* | \ freebsd4.[[0-5]] | freebsdelf4.[[0-5]] | freebsd4.1.1 | freebsdelf4.1.1) shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; *) # from 4.6 on, and DragonFly shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; esac ;; gnu*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; haiku*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no dynamic_linker="$host_os runtime_loader" library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LIBRARY_PATH shlibpath_overrides_runpath=yes sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' hardcode_into_libs=yes ;; hpux9* | hpux10* | hpux11*) # Give a soname corresponding to the major version so that dld.sl refuses to # link against other versions. version_type=sunos need_lib_prefix=no need_version=no case $host_cpu in ia64*) shrext_cmds='.so' hardcode_into_libs=yes dynamic_linker="$host_os dld.so" shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' if test "X$HPUX_IA64_MODE" = X32; then sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" else sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" fi sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; hppa*64*) shrext_cmds='.sl' hardcode_into_libs=yes dynamic_linker="$host_os dld.sl" shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; *) shrext_cmds='.sl' dynamic_linker="$host_os dld.sl" shlibpath_var=SHLIB_PATH shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' ;; esac # HP-UX runs *really* slowly unless shared libraries are mode 555, ... postinstall_cmds='chmod 555 $lib' # or fails outright, so override atomically: install_override_mode=555 ;; interix[[3-9]]*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; irix5* | irix6* | nonstopux*) case $host_os in nonstopux*) version_type=nonstopux ;; *) if test "$lt_cv_prog_gnu_ld" = yes; then version_type=linux # correct to gnu/linux during the next big refactor else version_type=irix fi ;; esac need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' case $host_os in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in # libtool.m4 will add one of these switches to LD *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= libmagic=32-bit;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 libmagic=N32;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 libmagic=64-bit;; *) libsuff= shlibsuff= libmagic=never-match;; esac ;; esac shlibpath_var=LD_LIBRARY${shlibsuff}_PATH shlibpath_overrides_runpath=no sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" hardcode_into_libs=yes ;; # No shared lib support for Linux oldld, aout, or coff. linux*oldld* | linux*aout* | linux*coff*) dynamic_linker=no ;; # This must be glibc/ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no # Some binutils ld are patched to set DT_RUNPATH AC_CACHE_VAL([lt_cv_shlibpath_overrides_runpath], [lt_cv_shlibpath_overrides_runpath=no save_LDFLAGS=$LDFLAGS save_libdir=$libdir eval "libdir=/foo; wl=\"$_LT_TAGVAR(lt_prog_compiler_wl, $1)\"; \ LDFLAGS=\"\$LDFLAGS $_LT_TAGVAR(hardcode_libdir_flag_spec, $1)\"" AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])], [AS_IF([ ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null], [lt_cv_shlibpath_overrides_runpath=yes])]) LDFLAGS=$save_LDFLAGS libdir=$save_libdir ]) shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes # Add ABI-specific directories to the system library path. sys_lib_dlsearch_path_spec="/lib64 /usr/lib64 /lib /usr/lib" # Append ld.so.conf contents to the search path if test -f /etc/ld.so.conf; then lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \[$]2)); skip = 1; } { if (!skip) print \[$]0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` sys_lib_dlsearch_path_spec="$sys_lib_dlsearch_path_spec $lt_ld_extra" fi # We used to test for /lib/ld.so.1 and disable shared libraries on # powerpc, because MkLinux only supported shared libraries with the # GNU dynamic linker. Since this was broken with cross compilers, # most powerpc-linux boxes support dynamic linking these days and # people can always --disable-shared, the test was removed, and we # assume the GNU/Linux dynamic linker is in use. dynamic_linker='GNU/Linux ld.so' ;; netbsd*) version_type=sunos need_lib_prefix=no need_version=no if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' dynamic_linker='NetBSD (a.out) ld.so' else library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' dynamic_linker='NetBSD ld.elf_so' fi shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; newsos6) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; *nto* | *qnx*) version_type=qnx need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='ldqnx.so' ;; openbsd*) version_type=sunos sys_lib_dlsearch_path_spec="/usr/lib" need_lib_prefix=no # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. case $host_os in openbsd3.3 | openbsd3.3.*) need_version=yes ;; *) need_version=no ;; esac library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' shlibpath_var=LD_LIBRARY_PATH if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then case $host_os in openbsd2.[[89]] | openbsd2.[[89]].*) shlibpath_overrides_runpath=no ;; *) shlibpath_overrides_runpath=yes ;; esac else shlibpath_overrides_runpath=yes fi ;; os2*) libname_spec='$name' shrext_cmds=".dll" need_lib_prefix=no library_names_spec='$libname${shared_ext} $libname.a' dynamic_linker='OS/2 ld.exe' shlibpath_var=LIBPATH ;; osf3* | osf4* | osf5*) version_type=osf need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" ;; rdos*) dynamic_linker=no ;; solaris*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes # ldd complains unless libraries are executable postinstall_cmds='chmod +x $lib' ;; sunos4*) version_type=sunos library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes if test "$with_gnu_ld" = yes; then need_lib_prefix=no fi need_version=yes ;; sysv4 | sysv4.3*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH case $host_vendor in sni) shlibpath_overrides_runpath=no need_lib_prefix=no runpath_var=LD_RUN_PATH ;; siemens) need_lib_prefix=no ;; motorola) need_lib_prefix=no need_version=no shlibpath_overrides_runpath=no sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' ;; esac ;; sysv4*MP*) if test -d /usr/nec ;then version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' soname_spec='$libname${shared_ext}.$major' shlibpath_var=LD_LIBRARY_PATH fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) version_type=freebsd-elf need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes if test "$with_gnu_ld" = yes; then sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' else sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' case $host_os in sco3.2v5*) sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" ;; esac fi sys_lib_dlsearch_path_spec='/usr/lib' ;; tpf*) # TPF is a cross-target only. Preferred cross-host = GNU/Linux. version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; uts4*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; *) dynamic_linker=no ;; esac AC_MSG_RESULT([$dynamic_linker]) test "$dynamic_linker" = no && can_build_shared=no variables_saved_for_relink="PATH $shlibpath_var $runpath_var" if test "$GCC" = yes; then variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" fi if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" fi if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" fi _LT_DECL([], [variables_saved_for_relink], [1], [Variables whose values should be saved in libtool wrapper scripts and restored at link time]) _LT_DECL([], [need_lib_prefix], [0], [Do we need the "lib" prefix for modules?]) _LT_DECL([], [need_version], [0], [Do we need a version for libraries?]) _LT_DECL([], [version_type], [0], [Library versioning type]) _LT_DECL([], [runpath_var], [0], [Shared library runtime path variable]) _LT_DECL([], [shlibpath_var], [0],[Shared library path variable]) _LT_DECL([], [shlibpath_overrides_runpath], [0], [Is shlibpath searched before the hard-coded library search path?]) _LT_DECL([], [libname_spec], [1], [Format of library name prefix]) _LT_DECL([], [library_names_spec], [1], [[List of archive names. First name is the real one, the rest are links. The last name is the one that the linker finds with -lNAME]]) _LT_DECL([], [soname_spec], [1], [[The coded name of the library, if different from the real name]]) _LT_DECL([], [install_override_mode], [1], [Permission mode override for installation of shared libraries]) _LT_DECL([], [postinstall_cmds], [2], [Command to use after installation of a shared archive]) _LT_DECL([], [postuninstall_cmds], [2], [Command to use after uninstallation of a shared archive]) _LT_DECL([], [finish_cmds], [2], [Commands used to finish a libtool library installation in a directory]) _LT_DECL([], [finish_eval], [1], [[As "finish_cmds", except a single script fragment to be evaled but not shown]]) _LT_DECL([], [hardcode_into_libs], [0], [Whether we should hardcode library paths into libraries]) _LT_DECL([], [sys_lib_search_path_spec], [2], [Compile-time system search path for libraries]) _LT_DECL([], [sys_lib_dlsearch_path_spec], [2], [Run-time system search path for libraries]) ])# _LT_SYS_DYNAMIC_LINKER # _LT_PATH_TOOL_PREFIX(TOOL) # -------------------------- # find a file program which can recognize shared library AC_DEFUN([_LT_PATH_TOOL_PREFIX], [m4_require([_LT_DECL_EGREP])dnl AC_MSG_CHECKING([for $1]) AC_CACHE_VAL(lt_cv_path_MAGIC_CMD, [case $MAGIC_CMD in [[\\/*] | ?:[\\/]*]) lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. ;; *) lt_save_MAGIC_CMD="$MAGIC_CMD" lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR dnl $ac_dummy forces splitting on constant user-supplied paths. dnl POSIX.2 word splitting is done only on the output of word expansions, dnl not every word. This closes a longstanding sh security hole. ac_dummy="m4_if([$2], , $PATH, [$2])" for ac_dir in $ac_dummy; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. if test -f $ac_dir/$1; then lt_cv_path_MAGIC_CMD="$ac_dir/$1" if test -n "$file_magic_test_file"; then case $deplibs_check_method in "file_magic "*) file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` MAGIC_CMD="$lt_cv_path_MAGIC_CMD" if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | $EGREP "$file_magic_regex" > /dev/null; then : else cat <<_LT_EOF 1>&2 *** Warning: the command libtool uses to detect shared libraries, *** $file_magic_cmd, produces output that libtool cannot recognize. *** The result is that libtool may fail to recognize shared libraries *** as such. This will affect the creation of libtool libraries that *** depend on shared libraries, but programs linked with such libtool *** libraries will work regardless of this problem. Nevertheless, you *** may want to report the problem to your system manager and/or to *** bug-libtool@gnu.org _LT_EOF fi ;; esac fi break fi done IFS="$lt_save_ifs" MAGIC_CMD="$lt_save_MAGIC_CMD" ;; esac]) MAGIC_CMD="$lt_cv_path_MAGIC_CMD" if test -n "$MAGIC_CMD"; then AC_MSG_RESULT($MAGIC_CMD) else AC_MSG_RESULT(no) fi _LT_DECL([], [MAGIC_CMD], [0], [Used to examine libraries when file_magic_cmd begins with "file"])dnl ])# _LT_PATH_TOOL_PREFIX # Old name: AU_ALIAS([AC_PATH_TOOL_PREFIX], [_LT_PATH_TOOL_PREFIX]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_PATH_TOOL_PREFIX], []) # _LT_PATH_MAGIC # -------------- # find a file program which can recognize a shared library m4_defun([_LT_PATH_MAGIC], [_LT_PATH_TOOL_PREFIX(${ac_tool_prefix}file, /usr/bin$PATH_SEPARATOR$PATH) if test -z "$lt_cv_path_MAGIC_CMD"; then if test -n "$ac_tool_prefix"; then _LT_PATH_TOOL_PREFIX(file, /usr/bin$PATH_SEPARATOR$PATH) else MAGIC_CMD=: fi fi ])# _LT_PATH_MAGIC # LT_PATH_LD # ---------- # find the pathname to the GNU or non-GNU linker AC_DEFUN([LT_PATH_LD], [AC_REQUIRE([AC_PROG_CC])dnl AC_REQUIRE([AC_CANONICAL_HOST])dnl AC_REQUIRE([AC_CANONICAL_BUILD])dnl m4_require([_LT_DECL_SED])dnl m4_require([_LT_DECL_EGREP])dnl m4_require([_LT_PROG_ECHO_BACKSLASH])dnl AC_ARG_WITH([gnu-ld], [AS_HELP_STRING([--with-gnu-ld], [assume the C compiler uses GNU ld @<:@default=no@:>@])], [test "$withval" = no || with_gnu_ld=yes], [with_gnu_ld=no])dnl ac_prog=ld if test "$GCC" = yes; then # Check if gcc -print-prog-name=ld gives a path. AC_MSG_CHECKING([for ld used by $CC]) case $host in *-*-mingw*) # gcc leaves a trailing carriage return which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [[\\/]]* | ?:[[\\/]]*) re_direlt='/[[^/]][[^/]]*/\.\./' # Canonicalize the pathname of ld ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'` while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` done test -z "$LD" && LD="$ac_prog" ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test "$with_gnu_ld" = yes; then AC_MSG_CHECKING([for GNU ld]) else AC_MSG_CHECKING([for non-GNU ld]) fi AC_CACHE_VAL(lt_cv_path_LD, [if test -z "$LD"; then lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then lt_cv_path_LD="$ac_dir/$ac_prog" # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some variants of GNU ld only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$lt_cv_path_LD" -v 2>&1 &1 /dev/null 2>&1; then lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' lt_cv_file_magic_cmd='func_win32_libid' else # Keep this pattern in sync with the one in func_win32_libid. lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' lt_cv_file_magic_cmd='$OBJDUMP -f' fi ;; cegcc*) # use the weaker test based on 'objdump'. See mingw*. lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?' lt_cv_file_magic_cmd='$OBJDUMP -f' ;; darwin* | rhapsody*) lt_cv_deplibs_check_method=pass_all ;; freebsd* | dragonfly*) if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then case $host_cpu in i*86 ) # Not sure whether the presence of OpenBSD here was a mistake. # Let's accept both of them until this is cleared up. lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[[3-9]]86 (compact )?demand paged shared library' lt_cv_file_magic_cmd=/usr/bin/file lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*` ;; esac else lt_cv_deplibs_check_method=pass_all fi ;; gnu*) lt_cv_deplibs_check_method=pass_all ;; haiku*) lt_cv_deplibs_check_method=pass_all ;; hpux10.20* | hpux11*) lt_cv_file_magic_cmd=/usr/bin/file case $host_cpu in ia64*) lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|ELF-[[0-9]][[0-9]]) shared object file - IA64' lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so ;; hppa*64*) [lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]'] lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl ;; *) lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|PA-RISC[[0-9]]\.[[0-9]]) shared library' lt_cv_file_magic_test_file=/usr/lib/libc.sl ;; esac ;; interix[[3-9]]*) # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|\.a)$' ;; irix5* | irix6* | nonstopux*) case $LD in *-32|*"-32 ") libmagic=32-bit;; *-n32|*"-n32 ") libmagic=N32;; *-64|*"-64 ") libmagic=64-bit;; *) libmagic=never-match;; esac lt_cv_deplibs_check_method=pass_all ;; # This must be glibc/ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu) lt_cv_deplibs_check_method=pass_all ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$' else lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|_pic\.a)$' fi ;; newos6*) lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (executable|dynamic lib)' lt_cv_file_magic_cmd=/usr/bin/file lt_cv_file_magic_test_file=/usr/lib/libnls.so ;; *nto* | *qnx*) lt_cv_deplibs_check_method=pass_all ;; openbsd*) if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|\.so|_pic\.a)$' else lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$' fi ;; osf3* | osf4* | osf5*) lt_cv_deplibs_check_method=pass_all ;; rdos*) lt_cv_deplibs_check_method=pass_all ;; solaris*) lt_cv_deplibs_check_method=pass_all ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) lt_cv_deplibs_check_method=pass_all ;; sysv4 | sysv4.3*) case $host_vendor in motorola) lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib) M[[0-9]][[0-9]]* Version [[0-9]]' lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*` ;; ncr) lt_cv_deplibs_check_method=pass_all ;; sequent) lt_cv_file_magic_cmd='/bin/file' lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB (shared object|dynamic lib )' ;; sni) lt_cv_file_magic_cmd='/bin/file' lt_cv_deplibs_check_method="file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB dynamic lib" lt_cv_file_magic_test_file=/lib/libc.so ;; siemens) lt_cv_deplibs_check_method=pass_all ;; pc) lt_cv_deplibs_check_method=pass_all ;; esac ;; tpf*) lt_cv_deplibs_check_method=pass_all ;; esac ]) file_magic_glob= want_nocaseglob=no if test "$build" = "$host"; then case $host_os in mingw* | pw32*) if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then want_nocaseglob=yes else file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[[\1]]\/[[\1]]\/g;/g"` fi ;; esac fi file_magic_cmd=$lt_cv_file_magic_cmd deplibs_check_method=$lt_cv_deplibs_check_method test -z "$deplibs_check_method" && deplibs_check_method=unknown _LT_DECL([], [deplibs_check_method], [1], [Method to check whether dependent libraries are shared objects]) _LT_DECL([], [file_magic_cmd], [1], [Command to use when deplibs_check_method = "file_magic"]) _LT_DECL([], [file_magic_glob], [1], [How to find potential files when deplibs_check_method = "file_magic"]) _LT_DECL([], [want_nocaseglob], [1], [Find potential files using nocaseglob when deplibs_check_method = "file_magic"]) ])# _LT_CHECK_MAGIC_METHOD # LT_PATH_NM # ---------- # find the pathname to a BSD- or MS-compatible name lister AC_DEFUN([LT_PATH_NM], [AC_REQUIRE([AC_PROG_CC])dnl AC_CACHE_CHECK([for BSD- or MS-compatible name lister (nm)], lt_cv_path_NM, [if test -n "$NM"; then # Let the user override the test. lt_cv_path_NM="$NM" else lt_nm_to_check="${ac_tool_prefix}nm" if test -n "$ac_tool_prefix" && test "$build" = "$host"; then lt_nm_to_check="$lt_nm_to_check nm" fi for lt_tmp_nm in $lt_nm_to_check; do lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. tmp_nm="$ac_dir/$lt_tmp_nm" if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then # Check to see if the nm accepts a BSD-compat flag. # Adding the `sed 1q' prevents false positives on HP-UX, which says: # nm: unknown option "B" ignored # Tru64's nm complains that /dev/null is an invalid object file case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in */dev/null* | *'Invalid file or object type'*) lt_cv_path_NM="$tmp_nm -B" break ;; *) case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in */dev/null*) lt_cv_path_NM="$tmp_nm -p" break ;; *) lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but continue # so that we can try to find one that supports BSD flags ;; esac ;; esac fi done IFS="$lt_save_ifs" done : ${lt_cv_path_NM=no} fi]) if test "$lt_cv_path_NM" != "no"; then NM="$lt_cv_path_NM" else # Didn't find any BSD compatible name lister, look for dumpbin. if test -n "$DUMPBIN"; then : # Let the user override the test. else AC_CHECK_TOOLS(DUMPBIN, [dumpbin "link -dump"], :) case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in *COFF*) DUMPBIN="$DUMPBIN -symbols" ;; *) DUMPBIN=: ;; esac fi AC_SUBST([DUMPBIN]) if test "$DUMPBIN" != ":"; then NM="$DUMPBIN" fi fi test -z "$NM" && NM=nm AC_SUBST([NM]) _LT_DECL([], [NM], [1], [A BSD- or MS-compatible name lister])dnl AC_CACHE_CHECK([the name lister ($NM) interface], [lt_cv_nm_interface], [lt_cv_nm_interface="BSD nm" echo "int some_variable = 0;" > conftest.$ac_ext (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&AS_MESSAGE_LOG_FD) (eval "$ac_compile" 2>conftest.err) cat conftest.err >&AS_MESSAGE_LOG_FD (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&AS_MESSAGE_LOG_FD) (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out) cat conftest.err >&AS_MESSAGE_LOG_FD (eval echo "\"\$as_me:$LINENO: output\"" >&AS_MESSAGE_LOG_FD) cat conftest.out >&AS_MESSAGE_LOG_FD if $GREP 'External.*some_variable' conftest.out > /dev/null; then lt_cv_nm_interface="MS dumpbin" fi rm -f conftest*]) ])# LT_PATH_NM # Old names: AU_ALIAS([AM_PROG_NM], [LT_PATH_NM]) AU_ALIAS([AC_PROG_NM], [LT_PATH_NM]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AM_PROG_NM], []) dnl AC_DEFUN([AC_PROG_NM], []) # _LT_CHECK_SHAREDLIB_FROM_LINKLIB # -------------------------------- # how to determine the name of the shared library # associated with a specific link library. # -- PORTME fill in with the dynamic library characteristics m4_defun([_LT_CHECK_SHAREDLIB_FROM_LINKLIB], [m4_require([_LT_DECL_EGREP]) m4_require([_LT_DECL_OBJDUMP]) m4_require([_LT_DECL_DLLTOOL]) AC_CACHE_CHECK([how to associate runtime and link libraries], lt_cv_sharedlib_from_linklib_cmd, [lt_cv_sharedlib_from_linklib_cmd='unknown' case $host_os in cygwin* | mingw* | pw32* | cegcc*) # two different shell functions defined in ltmain.sh # decide which to use based on capabilities of $DLLTOOL case `$DLLTOOL --help 2>&1` in *--identify-strict*) lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib ;; *) lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback ;; esac ;; *) # fallback: assume linklib IS sharedlib lt_cv_sharedlib_from_linklib_cmd="$ECHO" ;; esac ]) sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO _LT_DECL([], [sharedlib_from_linklib_cmd], [1], [Command to associate shared and link libraries]) ])# _LT_CHECK_SHAREDLIB_FROM_LINKLIB # _LT_PATH_MANIFEST_TOOL # ---------------------- # locate the manifest tool m4_defun([_LT_PATH_MANIFEST_TOOL], [AC_CHECK_TOOL(MANIFEST_TOOL, mt, :) test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt AC_CACHE_CHECK([if $MANIFEST_TOOL is a manifest tool], [lt_cv_path_mainfest_tool], [lt_cv_path_mainfest_tool=no echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&AS_MESSAGE_LOG_FD $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out cat conftest.err >&AS_MESSAGE_LOG_FD if $GREP 'Manifest Tool' conftest.out > /dev/null; then lt_cv_path_mainfest_tool=yes fi rm -f conftest*]) if test "x$lt_cv_path_mainfest_tool" != xyes; then MANIFEST_TOOL=: fi _LT_DECL([], [MANIFEST_TOOL], [1], [Manifest tool])dnl ])# _LT_PATH_MANIFEST_TOOL # LT_LIB_M # -------- # check for math library AC_DEFUN([LT_LIB_M], [AC_REQUIRE([AC_CANONICAL_HOST])dnl LIBM= case $host in *-*-beos* | *-*-cegcc* | *-*-cygwin* | *-*-haiku* | *-*-pw32* | *-*-darwin*) # These system don't have libm, or don't need it ;; *-ncr-sysv4.3*) AC_CHECK_LIB(mw, _mwvalidcheckl, LIBM="-lmw") AC_CHECK_LIB(m, cos, LIBM="$LIBM -lm") ;; *) AC_CHECK_LIB(m, cos, LIBM="-lm") ;; esac AC_SUBST([LIBM]) ])# LT_LIB_M # Old name: AU_ALIAS([AC_CHECK_LIBM], [LT_LIB_M]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_CHECK_LIBM], []) # _LT_COMPILER_NO_RTTI([TAGNAME]) # ------------------------------- m4_defun([_LT_COMPILER_NO_RTTI], [m4_require([_LT_TAG_COMPILER])dnl _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)= if test "$GCC" = yes; then case $cc_basename in nvcc*) _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -Xcompiler -fno-builtin' ;; *) _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' ;; esac _LT_COMPILER_OPTION([if $compiler supports -fno-rtti -fno-exceptions], lt_cv_prog_compiler_rtti_exceptions, [-fno-rtti -fno-exceptions], [], [_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)="$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1) -fno-rtti -fno-exceptions"]) fi _LT_TAGDECL([no_builtin_flag], [lt_prog_compiler_no_builtin_flag], [1], [Compiler flag to turn off builtin functions]) ])# _LT_COMPILER_NO_RTTI # _LT_CMD_GLOBAL_SYMBOLS # ---------------------- m4_defun([_LT_CMD_GLOBAL_SYMBOLS], [AC_REQUIRE([AC_CANONICAL_HOST])dnl AC_REQUIRE([AC_PROG_CC])dnl AC_REQUIRE([AC_PROG_AWK])dnl AC_REQUIRE([LT_PATH_NM])dnl AC_REQUIRE([LT_PATH_LD])dnl m4_require([_LT_DECL_SED])dnl m4_require([_LT_DECL_EGREP])dnl m4_require([_LT_TAG_COMPILER])dnl # Check for command to grab the raw symbol name followed by C symbol from nm. AC_MSG_CHECKING([command to parse $NM output from $compiler object]) AC_CACHE_VAL([lt_cv_sys_global_symbol_pipe], [ # These are sane defaults that work on at least a few old systems. # [They come from Ultrix. What could be older than Ultrix?!! ;)] # Character class describing NM global symbol codes. symcode='[[BCDEGRST]]' # Regexp to match symbols that can be accessed directly from C. sympat='\([[_A-Za-z]][[_A-Za-z0-9]]*\)' # Define system-specific variables. case $host_os in aix*) symcode='[[BCDT]]' ;; cygwin* | mingw* | pw32* | cegcc*) symcode='[[ABCDGISTW]]' ;; hpux*) if test "$host_cpu" = ia64; then symcode='[[ABCDEGRST]]' fi ;; irix* | nonstopux*) symcode='[[BCDEGRST]]' ;; osf*) symcode='[[BCDEGQRST]]' ;; solaris*) symcode='[[BDRT]]' ;; sco3.2v5*) symcode='[[DT]]' ;; sysv4.2uw2*) symcode='[[DT]]' ;; sysv5* | sco5v6* | unixware* | OpenUNIX*) symcode='[[ABDT]]' ;; sysv4) symcode='[[DFNSTU]]' ;; esac # If we're using GNU nm, then use its standard symbol codes. case `$NM -V 2>&1` in *GNU* | *'with BFD'*) symcode='[[ABCDGIRSTW]]' ;; esac # Transform an extracted symbol line into a proper C declaration. # Some systems (esp. on ia64) link data and code symbols differently, # so use this general approach. lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" # Transform an extracted symbol line into symbol name and symbol address lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p'" lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"lib\2\", (void *) \&\2},/p'" # Handle CRLF in mingw tool chain opt_cr= case $build_os in mingw*) opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp ;; esac # Try without a prefix underscore, then with it. for ac_symprfx in "" "_"; do # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol. symxfrm="\\1 $ac_symprfx\\2 \\2" # Write the raw and C identifiers. if test "$lt_cv_nm_interface" = "MS dumpbin"; then # Fake it for dumpbin and say T for any non-static function # and D for any global variable. # Also find C++ and __fastcall symbols from MSVC++, # which start with @ or ?. lt_cv_sys_global_symbol_pipe="$AWK ['"\ " {last_section=section; section=\$ 3};"\ " /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\ " /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\ " \$ 0!~/External *\|/{next};"\ " / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\ " {if(hide[section]) next};"\ " {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\ " {split(\$ 0, a, /\||\r/); split(a[2], s)};"\ " s[1]~/^[@?]/{print s[1], s[1]; next};"\ " s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\ " ' prfx=^$ac_symprfx]" else lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[ ]]\($symcode$symcode*\)[[ ]][[ ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" fi lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" # Check to see that the pipe works correctly. pipe_works=no rm -f conftest* cat > conftest.$ac_ext <<_LT_EOF #ifdef __cplusplus extern "C" { #endif char nm_test_var; void nm_test_func(void); void nm_test_func(void){} #ifdef __cplusplus } #endif int main(){nm_test_var='a';nm_test_func();return(0);} _LT_EOF if AC_TRY_EVAL(ac_compile); then # Now try to grab the symbols. nlist=conftest.nm if AC_TRY_EVAL(NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) && test -s "$nlist"; then # Try sorting and uniquifying the output. if sort "$nlist" | uniq > "$nlist"T; then mv -f "$nlist"T "$nlist" else rm -f "$nlist"T fi # Make sure that we snagged all the symbols we need. if $GREP ' nm_test_var$' "$nlist" >/dev/null; then if $GREP ' nm_test_func$' "$nlist" >/dev/null; then cat <<_LT_EOF > conftest.$ac_ext /* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ #if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) /* DATA imports from DLLs on WIN32 con't be const, because runtime relocations are performed -- see ld's documentation on pseudo-relocs. */ # define LT@&t@_DLSYM_CONST #elif defined(__osf__) /* This system does not cope well with relocations in const data. */ # define LT@&t@_DLSYM_CONST #else # define LT@&t@_DLSYM_CONST const #endif #ifdef __cplusplus extern "C" { #endif _LT_EOF # Now generate the symbol file. eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext' cat <<_LT_EOF >> conftest.$ac_ext /* The mapping between symbol names and symbols. */ LT@&t@_DLSYM_CONST struct { const char *name; void *address; } lt__PROGRAM__LTX_preloaded_symbols[[]] = { { "@PROGRAM@", (void *) 0 }, _LT_EOF $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/ {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext cat <<\_LT_EOF >> conftest.$ac_ext {0, (void *) 0} }; /* This works around a problem in FreeBSD linker */ #ifdef FREEBSD_WORKAROUND static const void *lt_preloaded_setup() { return lt__PROGRAM__LTX_preloaded_symbols; } #endif #ifdef __cplusplus } #endif _LT_EOF # Now try linking the two files. mv conftest.$ac_objext conftstm.$ac_objext lt_globsym_save_LIBS=$LIBS lt_globsym_save_CFLAGS=$CFLAGS LIBS="conftstm.$ac_objext" CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)" if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext}; then pipe_works=yes fi LIBS=$lt_globsym_save_LIBS CFLAGS=$lt_globsym_save_CFLAGS else echo "cannot find nm_test_func in $nlist" >&AS_MESSAGE_LOG_FD fi else echo "cannot find nm_test_var in $nlist" >&AS_MESSAGE_LOG_FD fi else echo "cannot run $lt_cv_sys_global_symbol_pipe" >&AS_MESSAGE_LOG_FD fi else echo "$progname: failed program was:" >&AS_MESSAGE_LOG_FD cat conftest.$ac_ext >&5 fi rm -rf conftest* conftst* # Do not use the global_symbol_pipe unless it works. if test "$pipe_works" = yes; then break else lt_cv_sys_global_symbol_pipe= fi done ]) if test -z "$lt_cv_sys_global_symbol_pipe"; then lt_cv_sys_global_symbol_to_cdecl= fi if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then AC_MSG_RESULT(failed) else AC_MSG_RESULT(ok) fi # Response file support. if test "$lt_cv_nm_interface" = "MS dumpbin"; then nm_file_list_spec='@' elif $NM --help 2>/dev/null | grep '[[@]]FILE' >/dev/null; then nm_file_list_spec='@' fi _LT_DECL([global_symbol_pipe], [lt_cv_sys_global_symbol_pipe], [1], [Take the output of nm and produce a listing of raw symbols and C names]) _LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1], [Transform the output of nm in a proper C declaration]) _LT_DECL([global_symbol_to_c_name_address], [lt_cv_sys_global_symbol_to_c_name_address], [1], [Transform the output of nm in a C name address pair]) _LT_DECL([global_symbol_to_c_name_address_lib_prefix], [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1], [Transform the output of nm in a C name address pair when lib prefix is needed]) _LT_DECL([], [nm_file_list_spec], [1], [Specify filename containing input files for $NM]) ]) # _LT_CMD_GLOBAL_SYMBOLS # _LT_COMPILER_PIC([TAGNAME]) # --------------------------- m4_defun([_LT_COMPILER_PIC], [m4_require([_LT_TAG_COMPILER])dnl _LT_TAGVAR(lt_prog_compiler_wl, $1)= _LT_TAGVAR(lt_prog_compiler_pic, $1)= _LT_TAGVAR(lt_prog_compiler_static, $1)= m4_if([$1], [CXX], [ # C++ specific cases for pic, static, wl, etc. if test "$GXX" = yes; then _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' case $host_os in aix*) # All AIX code is PIC. if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; m68k) # FIXME: we need at least 68020 code to build shared libraries, but # adding the `-m68020' flag to GCC prevents building anything better, # like `-m68040'. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4' ;; esac ;; beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | cygwin* | os2* | pw32* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). # Although the cygwin gcc ignores -fPIC, still need this for old-style # (--disable-auto-import) libraries m4_if([$1], [GCJ], [], [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common' ;; *djgpp*) # DJGPP does not support shared libraries at all _LT_TAGVAR(lt_prog_compiler_pic, $1)= ;; haiku*) # PIC is the default for Haiku. # The "-static" flag exists, but is broken. _LT_TAGVAR(lt_prog_compiler_static, $1)= ;; interix[[3-9]]*) # Interix 3.x gcc -fpic/-fPIC options generate broken code. # Instead, we relocate shared libraries at runtime. ;; sysv4*MP*) if test -d /usr/nec; then _LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic fi ;; hpux*) # PIC is the default for 64-bit PA HP-UX, but not for 32-bit # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag # sets the default TLS model and affects inlining. case $host_cpu in hppa*64*) ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; esac ;; *qnx* | *nto*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; esac else case $host_os in aix[[4-9]]*) # All AIX code is PIC. if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' else _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp' fi ;; chorus*) case $cc_basename in cxch68*) # Green Hills C++ Compiler # _LT_TAGVAR(lt_prog_compiler_static, $1)="--no_auto_instantiation -u __main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a $MVME_DIR/lib/classix/libcx.s.a" ;; esac ;; mingw* | cygwin* | os2* | pw32* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). m4_if([$1], [GCJ], [], [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) ;; dgux*) case $cc_basename in ec++*) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' ;; ghcx*) # Green Hills C++ Compiler _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' ;; *) ;; esac ;; freebsd* | dragonfly*) # FreeBSD uses GNU C++ ;; hpux9* | hpux10* | hpux11*) case $cc_basename in CC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive' if test "$host_cpu" != ia64; then _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z' fi ;; aCC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive' case $host_cpu in hppa*64*|ia64*) # +Z the default ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z' ;; esac ;; *) ;; esac ;; interix*) # This is c89, which is MS Visual C++ (no shared libs) # Anyone wants to do a port? ;; irix5* | irix6* | nonstopux*) case $cc_basename in CC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' # CC pic flag -KPIC is the default. ;; *) ;; esac ;; linux* | k*bsd*-gnu | kopensolaris*-gnu) case $cc_basename in KCC*) # KAI C++ Compiler _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; ecpc* ) # old Intel C++ for x86_64 which still supported -KPIC. _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; icpc* ) # Intel C++, used to be incompatible with GCC. # ICC 10 doesn't accept -KPIC any more. _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; pgCC* | pgcpp*) # Portland Group C++ compiler _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; cxx*) # Compaq C++ # Make sure the PIC flag is empty. It appears that all Alpha # Linux and Compaq Tru64 Unix objects are PIC. _LT_TAGVAR(lt_prog_compiler_pic, $1)= _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; xlc* | xlC* | bgxl[[cC]]* | mpixl[[cC]]*) # IBM XL 8.0, 9.0 on PPC and BlueGene _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' ;; esac ;; esac ;; lynxos*) ;; m88k*) ;; mvs*) case $cc_basename in cxx*) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-W c,exportall' ;; *) ;; esac ;; netbsd*) ;; *qnx* | *nto*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' ;; osf3* | osf4* | osf5*) case $cc_basename in KCC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,' ;; RCC*) # Rational C++ 2.4.1 _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' ;; cxx*) # Digital/Compaq C++ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # Make sure the PIC flag is empty. It appears that all Alpha # Linux and Compaq Tru64 Unix objects are PIC. _LT_TAGVAR(lt_prog_compiler_pic, $1)= _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; *) ;; esac ;; psos*) ;; solaris*) case $cc_basename in CC* | sunCC*) # Sun C++ 4.2, 5.x and Centerline C++ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' ;; gcx*) # Green Hills C++ Compiler _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' ;; *) ;; esac ;; sunos4*) case $cc_basename in CC*) # Sun C++ 4.x _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; lcc*) # Lucid _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' ;; *) ;; esac ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) case $cc_basename in CC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; esac ;; tandem*) case $cc_basename in NCC*) # NonStop-UX NCC 3.20 _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' ;; *) ;; esac ;; vxworks*) ;; *) _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no ;; esac fi ], [ if test "$GCC" = yes; then _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' case $host_os in aix*) # All AIX code is PIC. if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; m68k) # FIXME: we need at least 68020 code to build shared libraries, but # adding the `-m68020' flag to GCC prevents building anything better, # like `-m68040'. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4' ;; esac ;; beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | cygwin* | pw32* | os2* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). # Although the cygwin gcc ignores -fPIC, still need this for old-style # (--disable-auto-import) libraries m4_if([$1], [GCJ], [], [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common' ;; haiku*) # PIC is the default for Haiku. # The "-static" flag exists, but is broken. _LT_TAGVAR(lt_prog_compiler_static, $1)= ;; hpux*) # PIC is the default for 64-bit PA HP-UX, but not for 32-bit # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag # sets the default TLS model and affects inlining. case $host_cpu in hppa*64*) # +Z the default ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; esac ;; interix[[3-9]]*) # Interix 3.x gcc -fpic/-fPIC options generate broken code. # Instead, we relocate shared libraries at runtime. ;; msdosdjgpp*) # Just because we use GCC doesn't mean we suddenly get shared libraries # on systems that don't support them. _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no enable_shared=no ;; *nto* | *qnx*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' ;; sysv4*MP*) if test -d /usr/nec; then _LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic fi ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; esac case $cc_basename in nvcc*) # Cuda Compiler Driver 2.2 _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Xlinker ' if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then _LT_TAGVAR(lt_prog_compiler_pic, $1)="-Xcompiler $_LT_TAGVAR(lt_prog_compiler_pic, $1)" fi ;; esac else # PORTME Check for flag to pass linker flags through the system compiler. case $host_os in aix*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' else _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp' fi ;; mingw* | cygwin* | pw32* | os2* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). m4_if([$1], [GCJ], [], [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) ;; hpux9* | hpux10* | hpux11*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but # not for PA HP-UX. case $host_cpu in hppa*64*|ia64*) # +Z the default ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z' ;; esac # Is there a better lt_prog_compiler_static that works with the bundled CC? _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive' ;; irix5* | irix6* | nonstopux*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # PIC (with -KPIC) is the default. _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; linux* | k*bsd*-gnu | kopensolaris*-gnu) case $cc_basename in # old Intel for x86_64 which still supported -KPIC. ecc*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; # icc used to be incompatible with GCC. # ICC 10 doesn't accept -KPIC any more. icc* | ifort*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; # Lahey Fortran 8.1. lf95*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='--shared' _LT_TAGVAR(lt_prog_compiler_static, $1)='--static' ;; nagfor*) # NAG Fortran compiler _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) # Portland Group compilers (*not* the Pentium gcc compiler, # which looks to be a dead project) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; ccc*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # All Alpha code is PIC. _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; xl* | bgxl* | bgf* | mpixl*) # IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [[1-7]].* | *Sun*Fortran*\ 8.[[0-3]]*) # Sun Fortran 8.3 passes all unrecognized flags to the linker _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' _LT_TAGVAR(lt_prog_compiler_wl, $1)='' ;; *Sun\ F* | *Sun*Fortran*) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' ;; *Sun\ C*) # Sun C 5.9 _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' ;; *Intel*\ [[CF]]*Compiler*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; *Portland\ Group*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; esac ;; esac ;; newsos6) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; *nto* | *qnx*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' ;; osf3* | osf4* | osf5*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # All OSF/1 code is PIC. _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; rdos*) _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; solaris*) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' case $cc_basename in f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ';; *) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,';; esac ;; sunos4*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; sysv4 | sysv4.2uw2* | sysv4.3*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; sysv4*MP*) if test -d /usr/nec ;then _LT_TAGVAR(lt_prog_compiler_pic, $1)='-Kconform_pic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' fi ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; unicos*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no ;; uts4*) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; *) _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no ;; esac fi ]) case $host_os in # For platforms which do not support PIC, -DPIC is meaningless: *djgpp*) _LT_TAGVAR(lt_prog_compiler_pic, $1)= ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)="$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])" ;; esac AC_CACHE_CHECK([for $compiler option to produce PIC], [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)], [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_prog_compiler_pic, $1)]) _LT_TAGVAR(lt_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_cv_prog_compiler_pic, $1) # # Check to make sure the PIC flag actually works. # if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then _LT_COMPILER_OPTION([if $compiler PIC flag $_LT_TAGVAR(lt_prog_compiler_pic, $1) works], [_LT_TAGVAR(lt_cv_prog_compiler_pic_works, $1)], [$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])], [], [case $_LT_TAGVAR(lt_prog_compiler_pic, $1) in "" | " "*) ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)=" $_LT_TAGVAR(lt_prog_compiler_pic, $1)" ;; esac], [_LT_TAGVAR(lt_prog_compiler_pic, $1)= _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no]) fi _LT_TAGDECL([pic_flag], [lt_prog_compiler_pic], [1], [Additional compiler flags for building library objects]) _LT_TAGDECL([wl], [lt_prog_compiler_wl], [1], [How to pass a linker flag through the compiler]) # # Check to make sure the static flag actually works. # wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1) eval lt_tmp_static_flag=\"$_LT_TAGVAR(lt_prog_compiler_static, $1)\" _LT_LINKER_OPTION([if $compiler static flag $lt_tmp_static_flag works], _LT_TAGVAR(lt_cv_prog_compiler_static_works, $1), $lt_tmp_static_flag, [], [_LT_TAGVAR(lt_prog_compiler_static, $1)=]) _LT_TAGDECL([link_static_flag], [lt_prog_compiler_static], [1], [Compiler flag to prevent dynamic linking]) ])# _LT_COMPILER_PIC # _LT_LINKER_SHLIBS([TAGNAME]) # ---------------------------- # See if the linker supports building shared libraries. m4_defun([_LT_LINKER_SHLIBS], [AC_REQUIRE([LT_PATH_LD])dnl AC_REQUIRE([LT_PATH_NM])dnl m4_require([_LT_PATH_MANIFEST_TOOL])dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_EGREP])dnl m4_require([_LT_DECL_SED])dnl m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl m4_require([_LT_TAG_COMPILER])dnl AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries]) m4_if([$1], [CXX], [ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] case $host_os in aix[[4-9]]*) # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to AIX nm, but means don't demangle with GNU nm # Also, AIX nm treats weak defined symbols like other global defined # symbols, whereas GNU nm marks them as "W". if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' else _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' fi ;; pw32*) _LT_TAGVAR(export_symbols_cmds, $1)="$ltdll_cmds" ;; cygwin* | mingw* | cegcc*) case $cc_basename in cl*) _LT_TAGVAR(exclude_expsyms, $1)='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*' ;; *) _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols' _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'] ;; esac ;; *) _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' ;; esac ], [ runpath_var= _LT_TAGVAR(allow_undefined_flag, $1)= _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(archive_cmds, $1)= _LT_TAGVAR(archive_expsym_cmds, $1)= _LT_TAGVAR(compiler_needs_object, $1)=no _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' _LT_TAGVAR(hardcode_automatic, $1)=no _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(hardcode_libdir_separator, $1)= _LT_TAGVAR(hardcode_minus_L, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported _LT_TAGVAR(inherit_rpath, $1)=no _LT_TAGVAR(link_all_deplibs, $1)=unknown _LT_TAGVAR(module_cmds, $1)= _LT_TAGVAR(module_expsym_cmds, $1)= _LT_TAGVAR(old_archive_from_new_cmds, $1)= _LT_TAGVAR(old_archive_from_expsyms_cmds, $1)= _LT_TAGVAR(thread_safe_flag_spec, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= # include_expsyms should be a list of space-separated symbols to be *always* # included in the symbol list _LT_TAGVAR(include_expsyms, $1)= # exclude_expsyms can be an extended regexp of symbols to exclude # it will be wrapped by ` (' and `)$', so one must not match beginning or # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', # as well as any symbol that contains `d'. _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out # platforms (ab)use it in PIC code, but their linkers get confused if # the symbol is explicitly referenced. Since portable code cannot # rely on this symbol name, it's probably fine to never include it in # preloaded symbol tables. # Exclude shared library initialization/finalization symbols. dnl Note also adjust exclude_expsyms for C++ above. extract_expsyms_cmds= case $host_os in cygwin* | mingw* | pw32* | cegcc*) # FIXME: the MSVC++ port hasn't been tested in a loooong time # When not using gcc, we currently assume that we are using # Microsoft Visual C++. if test "$GCC" != yes; then with_gnu_ld=no fi ;; interix*) # we just hope/assume this is gcc and not c89 (= MSVC++) with_gnu_ld=yes ;; openbsd*) with_gnu_ld=no ;; esac _LT_TAGVAR(ld_shlibs, $1)=yes # On some targets, GNU ld is compatible enough with the native linker # that we're better off using the native interface for both. lt_use_gnu_ld_interface=no if test "$with_gnu_ld" = yes; then case $host_os in aix*) # The AIX port of GNU ld has always aspired to compatibility # with the native linker. However, as the warning in the GNU ld # block says, versions before 2.19.5* couldn't really create working # shared libraries, regardless of the interface used. case `$LD -v 2>&1` in *\ \(GNU\ Binutils\)\ 2.19.5*) ;; *\ \(GNU\ Binutils\)\ 2.[[2-9]]*) ;; *\ \(GNU\ Binutils\)\ [[3-9]]*) ;; *) lt_use_gnu_ld_interface=yes ;; esac ;; *) lt_use_gnu_ld_interface=yes ;; esac fi if test "$lt_use_gnu_ld_interface" = yes; then # If archive_cmds runs LD, not CC, wlarc should be empty wlarc='${wl}' # Set some defaults for GNU ld with shared library support. These # are reset later if shared libraries are not supported. Putting them # here allows them to be overridden if necessary. runpath_var=LD_RUN_PATH _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' # ancient GNU ld didn't support --whole-archive et. al. if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' else _LT_TAGVAR(whole_archive_flag_spec, $1)= fi supports_anon_versioning=no case `$LD -v 2>&1` in *GNU\ gold*) supports_anon_versioning=yes ;; *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.10.*) ;; # catch versions < 2.11 *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... *\ 2.11.*) ;; # other 2.11 versions *) supports_anon_versioning=yes ;; esac # See if GNU ld supports shared libraries. case $host_os in aix[[3-9]]*) # On AIX/PPC, the GNU linker is very broken if test "$host_cpu" != ia64; then _LT_TAGVAR(ld_shlibs, $1)=no cat <<_LT_EOF 1>&2 *** Warning: the GNU linker, at least up to release 2.19, is reported *** to be unable to reliably create shared libraries on AIX. *** Therefore, libtool is disabling shared libraries support. If you *** really care for shared libraries, you may want to install binutils *** 2.20 or above, or modify your PATH so that a non-GNU linker is found. *** You will then need to restart the configuration process. _LT_EOF fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='' ;; m68k) _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_minus_L, $1)=yes ;; esac ;; beos*) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(allow_undefined_flag, $1)=unsupported # Joseph Beckenbach says some releases of gcc # support --undefined. This deserves some investigation. FIXME _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; cygwin* | mingw* | pw32* | cegcc*) # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, # as there is no search path for DLLs. _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols' _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols' _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'] if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' # If the export-symbols file already is a .def file (1st line # is EXPORTS), use it as is; otherwise, prepend... _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; haiku*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(link_all_deplibs, $1)=yes ;; interix[[3-9]]*) _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. # Instead, shared libraries are loaded at an image base (0x10000000 by # default) and relocated if they conflict, which is a slow very memory # consuming and fragmenting process. To avoid this, we pick a random, # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link # time. Moving up from 0x10000000 also allows more sbrk(2) space. _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' ;; gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) tmp_diet=no if test "$host_os" = linux-dietlibc; then case $cc_basename in diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) esac fi if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ && test "$tmp_diet" = no then tmp_addflag=' $pic_flag' tmp_sharedflag='-shared' case $cc_basename,$host_cpu in pgcc*) # Portland Group C compiler _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' tmp_addflag=' $pic_flag' ;; pgf77* | pgf90* | pgf95* | pgfortran*) # Portland Group f77 and f90 compilers _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' tmp_addflag=' $pic_flag -Mnomain' ;; ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 tmp_addflag=' -i_dynamic' ;; efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64 tmp_addflag=' -i_dynamic -nofor_main' ;; ifc* | ifort*) # Intel Fortran compiler tmp_addflag=' -nofor_main' ;; lf95*) # Lahey Fortran 8.1 _LT_TAGVAR(whole_archive_flag_spec, $1)= tmp_sharedflag='--shared' ;; xl[[cC]]* | bgxl[[cC]]* | mpixl[[cC]]*) # IBM XL C 8.0 on PPC (deal with xlf below) tmp_sharedflag='-qmkshrobj' tmp_addflag= ;; nvcc*) # Cuda Compiler Driver 2.2 _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' _LT_TAGVAR(compiler_needs_object, $1)=yes ;; esac case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C 5.9 _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' _LT_TAGVAR(compiler_needs_object, $1)=yes tmp_sharedflag='-G' ;; *Sun\ F*) # Sun Fortran 8.3 tmp_sharedflag='-G' ;; esac _LT_TAGVAR(archive_cmds, $1)='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' if test "x$supports_anon_versioning" = xyes; then _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' fi case $cc_basename in xlf* | bgf* | bgxlf* | mpixlf*) # IBM XL Fortran 10.1 on PPC cannot create shared libs itself _LT_TAGVAR(whole_archive_flag_spec, $1)='--whole-archive$convenience --no-whole-archive' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' if test "x$supports_anon_versioning" = xyes; then _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' fi ;; esac else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' wlarc= else _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' fi ;; solaris*) if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then _LT_TAGVAR(ld_shlibs, $1)=no cat <<_LT_EOF 1>&2 *** Warning: The releases 2.8.* of the GNU linker cannot reliably *** create shared libraries on Solaris systems. Therefore, libtool *** is disabling shared libraries support. We urge you to upgrade GNU *** binutils to release 2.9.1 or newer. Another option is to modify *** your PATH or compiler configuration so that the native linker is *** used, and then restart. _LT_EOF elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) case `$LD -v 2>&1` in *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.1[[0-5]].*) _LT_TAGVAR(ld_shlibs, $1)=no cat <<_LT_EOF 1>&2 *** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not *** reliably create shared libraries on SCO systems. Therefore, libtool *** is disabling shared libraries support. We urge you to upgrade GNU *** binutils to release 2.16.91.0.3 or newer. Another option is to modify *** your PATH or compiler configuration so that the native linker is *** used, and then restart. _LT_EOF ;; *) # For security reasons, it is highly recommended that you always # use absolute paths for naming shared libraries, and exclude the # DT_RUNPATH tag from executables and libraries. But doing so # requires that you compile everything twice, which is a pain. if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac ;; sunos4*) _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' wlarc= _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac if test "$_LT_TAGVAR(ld_shlibs, $1)" = no; then runpath_var= _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= fi else # PORTME fill in a description of your system's linker (not GNU ld) case $host_os in aix3*) _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(always_export_symbols, $1)=yes _LT_TAGVAR(archive_expsym_cmds, $1)='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' # Note: this linker hardcodes the directories in LIBPATH if there # are no directories specified by -L. _LT_TAGVAR(hardcode_minus_L, $1)=yes if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then # Neither direct hardcoding nor static linking is supported with a # broken collect2. _LT_TAGVAR(hardcode_direct, $1)=unsupported fi ;; aix[[4-9]]*) if test "$host_cpu" = ia64; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag="" else # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to AIX nm, but means don't demangle with GNU nm # Also, AIX nm treats weak defined symbols like other global # defined symbols, whereas GNU nm marks them as "W". if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' else _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' fi aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # need to do runtime linking. case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*) for ld_flag in $LDFLAGS; do if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then aix_use_runtimelinking=yes break fi done ;; esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. _LT_TAGVAR(archive_cmds, $1)='' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(hardcode_libdir_separator, $1)=':' _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(file_list_spec, $1)='${wl}-f,' if test "$GCC" = yes; then case $host_os in aix4.[[012]]|aix4.[[012]].*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`${CC} -print-prog-name=collect2` if test -f "$collect2name" && strings "$collect2name" | $GREP resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 _LT_TAGVAR(hardcode_direct, $1)=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)= fi ;; esac shared_flag='-shared' if test "$aix_use_runtimelinking" = yes; then shared_flag="$shared_flag "'${wl}-G' fi else # not using gcc if test "$host_cpu" = ia64; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test "$aix_use_runtimelinking" = yes; then shared_flag='${wl}-G' else shared_flag='${wl}-bM:SRE' fi fi fi _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall' # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to export. _LT_TAGVAR(always_export_symbols, $1)=yes if test "$aix_use_runtimelinking" = yes; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. _LT_TAGVAR(allow_undefined_flag, $1)='-berok' # Determine the default libpath from the value encoded in an # empty executable. _LT_SYS_MODULE_PATH_AIX([$1]) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" else if test "$host_cpu" = ia64; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib' _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs" _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an # empty executable. _LT_SYS_MODULE_PATH_AIX([$1]) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok' _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok' if test "$with_gnu_ld" = yes; then # We only use this code for GNU lds that support --whole-archive. _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive' else # Exported symbols can be pulled into shared objects from archives _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience' fi _LT_TAGVAR(archive_cmds_need_lc, $1)=yes # This is similar to how AIX traditionally builds its shared libraries. _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' fi fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='' ;; m68k) _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_minus_L, $1)=yes ;; esac ;; bsdi[[45]]*) _LT_TAGVAR(export_dynamic_flag_spec, $1)=-rdynamic ;; cygwin* | mingw* | pw32* | cegcc*) # When not using gcc, we currently assume that we are using # Microsoft Visual C++. # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. case $cc_basename in cl*) # Native MSVC _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(always_export_symbols, $1)=yes _LT_TAGVAR(file_list_spec, $1)='@' # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=".dll" # FIXME: Setting linknames here is a bad hack. _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; else sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; fi~ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ linknames=' # The linker will not automatically build a static lib if we build a DLL. # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes _LT_TAGVAR(exclude_expsyms, $1)='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*' _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1,DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols' # Don't use ranlib _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib' _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~ lt_tool_outputfile="@TOOL_OUTPUT@"~ case $lt_outputfile in *.exe|*.EXE) ;; *) lt_outputfile="$lt_outputfile.exe" lt_tool_outputfile="$lt_tool_outputfile.exe" ;; esac~ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; $RM "$lt_outputfile.manifest"; fi' ;; *) # Assume MSVC wrapper _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' _LT_TAGVAR(allow_undefined_flag, $1)=unsupported # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=".dll" # FIXME: Setting linknames here is a bad hack. _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' # The linker will automatically build a .lib file if we build a DLL. _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' # FIXME: Should let the user specify the lib program. _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs' _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes ;; esac ;; darwin* | rhapsody*) _LT_DARWIN_LINKER_FEATURES($1) ;; dgux*) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor # support. Future versions do this automatically, but an explicit c++rt0.o # does not break anything, and helps significantly (at the cost of a little # extra space). freebsd2.2*) _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; # Unfortunately, older versions of FreeBSD 2 do not have this feature. freebsd2.*) _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; # FreeBSD 3 and greater uses gcc -shared to do shared libraries. freebsd* | dragonfly*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; hpux9*) if test "$GCC" = yes; then _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' else _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(hardcode_direct, $1)=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' ;; hpux10*) if test "$GCC" = yes && test "$with_gnu_ld" = no; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' else _LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' fi if test "$with_gnu_ld" = no; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. _LT_TAGVAR(hardcode_minus_L, $1)=yes fi ;; hpux11*) if test "$GCC" = yes && test "$with_gnu_ld" = no; then case $host_cpu in hppa*64*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ;; ia64*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ;; esac else case $host_cpu in hppa*64*) _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ;; ia64*) _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ;; *) m4_if($1, [], [ # Older versions of the 11.00 compiler do not understand -b yet # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does) _LT_LINKER_OPTION([if $CC understands -b], _LT_TAGVAR(lt_cv_prog_compiler__b, $1), [-b], [_LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'], [_LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'])], [_LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags']) ;; esac fi if test "$with_gnu_ld" = no; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: case $host_cpu in hppa*64*|ia64*) _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *) _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. _LT_TAGVAR(hardcode_minus_L, $1)=yes ;; esac fi ;; irix5* | irix6* | nonstopux*) if test "$GCC" = yes; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' # Try to use the -exported_symbol ld option, if it does not # work, assume that -exports_file does not work either and # implicitly export all symbols. # This should be the same for all languages, so no per-tag cache variable. AC_CACHE_CHECK([whether the $host_os linker accepts -exported_symbol], [lt_cv_irix_exported_symbol], [save_LDFLAGS="$LDFLAGS" LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" AC_LINK_IFELSE( [AC_LANG_SOURCE( [AC_LANG_CASE([C], [[int foo (void) { return 0; }]], [C++], [[int foo (void) { return 0; }]], [Fortran 77], [[ subroutine foo end]], [Fortran], [[ subroutine foo end]])])], [lt_cv_irix_exported_symbol=yes], [lt_cv_irix_exported_symbol=no]) LDFLAGS="$save_LDFLAGS"]) if test "$lt_cv_irix_exported_symbol" = yes; then _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' fi else _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' fi _LT_TAGVAR(archive_cmds_need_lc, $1)='no' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(inherit_rpath, $1)=yes _LT_TAGVAR(link_all_deplibs, $1)=yes ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out else _LT_TAGVAR(archive_cmds, $1)='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; newsos6) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *nto* | *qnx*) ;; openbsd*) if test -f /usr/libexec/ld.so; then _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=yes if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' else case $host_os in openbsd[[01]].* | openbsd2.[[0-7]] | openbsd2.[[0-7]].*) _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' ;; esac fi else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; os2*) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' _LT_TAGVAR(old_archive_from_new_cmds, $1)='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' ;; osf3*) if test "$GCC" = yes; then _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' else _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' fi _LT_TAGVAR(archive_cmds_need_lc, $1)='no' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: ;; osf4* | osf5*) # as osf3* with the addition of -msym flag if test "$GCC" = yes; then _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' else _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp' # Both c and cxx compiler support -rpath directly _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' fi _LT_TAGVAR(archive_cmds_need_lc, $1)='no' _LT_TAGVAR(hardcode_libdir_separator, $1)=: ;; solaris*) _LT_TAGVAR(no_undefined_flag, $1)=' -z defs' if test "$GCC" = yes; then wlarc='${wl}' _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' else case `$CC -V 2>&1` in *"Compilers 5.0"*) wlarc='' _LT_TAGVAR(archive_cmds, $1)='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' ;; *) wlarc='${wl}' _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' ;; esac fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no case $host_os in solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; *) # The compiler driver will combine and reorder linker options, # but understands `-z linker_flag'. GCC discards it without `$wl', # but is careful enough not to reorder. # Supported since Solaris 2.6 (maybe 2.5.1?) if test "$GCC" = yes; then _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' else _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract' fi ;; esac _LT_TAGVAR(link_all_deplibs, $1)=yes ;; sunos4*) if test "x$host_vendor" = xsequent; then # Use $CC to link under sequent, because it throws in some extra .o # files that make .init and .fini sections work. _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' else _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; sysv4) case $host_vendor in sni) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_direct, $1)=yes # is this really true??? ;; siemens) ## LD is ld it makes a PLAMLIB ## CC just makes a GrossModule. _LT_TAGVAR(archive_cmds, $1)='$LD -G -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(reload_cmds, $1)='$CC -r -o $output$reload_objs' _LT_TAGVAR(hardcode_direct, $1)=no ;; motorola) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_direct, $1)=no #Motorola manual says yes, but my tests say they lie ;; esac runpath_var='LD_RUN_PATH' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; sysv4.3*) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(export_dynamic_flag_spec, $1)='-Bexport' ;; sysv4*MP*) if test -d /usr/nec; then _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no runpath_var=LD_RUN_PATH hardcode_runpath_var=yes _LT_TAGVAR(ld_shlibs, $1)=yes fi ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*) _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no runpath_var='LD_RUN_PATH' if test "$GCC" = yes; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' else _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' fi ;; sysv5* | sco3.2v5* | sco5v6*) # Note: We can NOT use -z defs as we might desire, because we do not # link with -lc, and that would cause any symbols used from libc to # always be unresolved, which means just about no library would # ever link correctly. If we're not using GNU ld we use -z text # though, which does catch some bad symbols but isn't as heavy-handed # as -z defs. _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' _LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs' _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=':' _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport' runpath_var='LD_RUN_PATH' if test "$GCC" = yes; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' else _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' fi ;; uts4*) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *) _LT_TAGVAR(ld_shlibs, $1)=no ;; esac if test x$host_vendor = xsni; then case $host in sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Blargedynsym' ;; esac fi fi ]) AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)]) test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no _LT_TAGVAR(with_gnu_ld, $1)=$with_gnu_ld _LT_DECL([], [libext], [0], [Old archive suffix (normally "a")])dnl _LT_DECL([], [shrext_cmds], [1], [Shared library suffix (normally ".so")])dnl _LT_DECL([], [extract_expsyms_cmds], [2], [The commands to extract the exported symbol list from a shared archive]) # # Do we need to explicitly link libc? # case "x$_LT_TAGVAR(archive_cmds_need_lc, $1)" in x|xyes) # Assume -lc should be added _LT_TAGVAR(archive_cmds_need_lc, $1)=yes if test "$enable_shared" = yes && test "$GCC" = yes; then case $_LT_TAGVAR(archive_cmds, $1) in *'~'*) # FIXME: we may have to deal with multi-command sequences. ;; '$CC '*) # Test whether the compiler implicitly links with -lc since on some # systems, -lgcc has to come before -lc. If gcc already passes -lc # to ld, don't add -lc before -lgcc. AC_CACHE_CHECK([whether -lc should be explicitly linked in], [lt_cv_]_LT_TAGVAR(archive_cmds_need_lc, $1), [$RM conftest* echo "$lt_simple_compile_test_code" > conftest.$ac_ext if AC_TRY_EVAL(ac_compile) 2>conftest.err; then soname=conftest lib=conftest libobjs=conftest.$ac_objext deplibs= wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1) pic_flag=$_LT_TAGVAR(lt_prog_compiler_pic, $1) compiler_flags=-v linker_flags=-v verstring= output_objdir=. libname=conftest lt_save_allow_undefined_flag=$_LT_TAGVAR(allow_undefined_flag, $1) _LT_TAGVAR(allow_undefined_flag, $1)= if AC_TRY_EVAL(_LT_TAGVAR(archive_cmds, $1) 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) then lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=no else lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=yes fi _LT_TAGVAR(allow_undefined_flag, $1)=$lt_save_allow_undefined_flag else cat conftest.err 1>&5 fi $RM conftest* ]) _LT_TAGVAR(archive_cmds_need_lc, $1)=$lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1) ;; esac fi ;; esac _LT_TAGDECL([build_libtool_need_lc], [archive_cmds_need_lc], [0], [Whether or not to add -lc for building shared libraries]) _LT_TAGDECL([allow_libtool_libs_with_static_runtimes], [enable_shared_with_static_runtimes], [0], [Whether or not to disallow shared libs when runtime libs are static]) _LT_TAGDECL([], [export_dynamic_flag_spec], [1], [Compiler flag to allow reflexive dlopens]) _LT_TAGDECL([], [whole_archive_flag_spec], [1], [Compiler flag to generate shared objects directly from archives]) _LT_TAGDECL([], [compiler_needs_object], [1], [Whether the compiler copes with passing no objects directly]) _LT_TAGDECL([], [old_archive_from_new_cmds], [2], [Create an old-style archive from a shared archive]) _LT_TAGDECL([], [old_archive_from_expsyms_cmds], [2], [Create a temporary old-style archive to link instead of a shared archive]) _LT_TAGDECL([], [archive_cmds], [2], [Commands used to build a shared archive]) _LT_TAGDECL([], [archive_expsym_cmds], [2]) _LT_TAGDECL([], [module_cmds], [2], [Commands used to build a loadable module if different from building a shared archive.]) _LT_TAGDECL([], [module_expsym_cmds], [2]) _LT_TAGDECL([], [with_gnu_ld], [1], [Whether we are building with GNU ld or not]) _LT_TAGDECL([], [allow_undefined_flag], [1], [Flag that allows shared libraries with undefined symbols to be built]) _LT_TAGDECL([], [no_undefined_flag], [1], [Flag that enforces no undefined symbols]) _LT_TAGDECL([], [hardcode_libdir_flag_spec], [1], [Flag to hardcode $libdir into a binary during linking. This must work even if $libdir does not exist]) _LT_TAGDECL([], [hardcode_libdir_separator], [1], [Whether we need a single "-rpath" flag with a separated argument]) _LT_TAGDECL([], [hardcode_direct], [0], [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes DIR into the resulting binary]) _LT_TAGDECL([], [hardcode_direct_absolute], [0], [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes DIR into the resulting binary and the resulting library dependency is "absolute", i.e impossible to change by setting ${shlibpath_var} if the library is relocated]) _LT_TAGDECL([], [hardcode_minus_L], [0], [Set to "yes" if using the -LDIR flag during linking hardcodes DIR into the resulting binary]) _LT_TAGDECL([], [hardcode_shlibpath_var], [0], [Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR into the resulting binary]) _LT_TAGDECL([], [hardcode_automatic], [0], [Set to "yes" if building a shared library automatically hardcodes DIR into the library and all subsequent libraries and executables linked against it]) _LT_TAGDECL([], [inherit_rpath], [0], [Set to yes if linker adds runtime paths of dependent libraries to runtime path list]) _LT_TAGDECL([], [link_all_deplibs], [0], [Whether libtool must link a program against all its dependency libraries]) _LT_TAGDECL([], [always_export_symbols], [0], [Set to "yes" if exported symbols are required]) _LT_TAGDECL([], [export_symbols_cmds], [2], [The commands to list exported symbols]) _LT_TAGDECL([], [exclude_expsyms], [1], [Symbols that should not be listed in the preloaded symbols]) _LT_TAGDECL([], [include_expsyms], [1], [Symbols that must always be exported]) _LT_TAGDECL([], [prelink_cmds], [2], [Commands necessary for linking programs (against libraries) with templates]) _LT_TAGDECL([], [postlink_cmds], [2], [Commands necessary for finishing linking programs]) _LT_TAGDECL([], [file_list_spec], [1], [Specify filename containing input files]) dnl FIXME: Not yet implemented dnl _LT_TAGDECL([], [thread_safe_flag_spec], [1], dnl [Compiler flag to generate thread safe objects]) ])# _LT_LINKER_SHLIBS # _LT_LANG_C_CONFIG([TAG]) # ------------------------ # Ensure that the configuration variables for a C compiler are suitably # defined. These variables are subsequently used by _LT_CONFIG to write # the compiler configuration to `libtool'. m4_defun([_LT_LANG_C_CONFIG], [m4_require([_LT_DECL_EGREP])dnl lt_save_CC="$CC" AC_LANG_PUSH(C) # Source file extension for C test sources. ac_ext=c # Object file extension for compiled C test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # Code to be used in simple compile tests lt_simple_compile_test_code="int some_variable = 0;" # Code to be used in simple link tests lt_simple_link_test_code='int main(){return(0);}' _LT_TAG_COMPILER # Save the default compiler, since it gets overwritten when the other # tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP. compiler_DEFAULT=$CC # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... if test -n "$compiler"; then _LT_COMPILER_NO_RTTI($1) _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_SYS_DYNAMIC_LINKER($1) _LT_LINKER_HARDCODE_LIBPATH($1) LT_SYS_DLOPEN_SELF _LT_CMD_STRIPLIB # Report which library types will actually be built AC_MSG_CHECKING([if libtool supports shared libraries]) AC_MSG_RESULT([$can_build_shared]) AC_MSG_CHECKING([whether to build shared libraries]) test "$can_build_shared" = "no" && enable_shared=no # On AIX, shared libraries and static libraries use the same namespace, and # are all built from PIC. case $host_os in aix3*) test "$enable_shared" = yes && enable_static=no if test -n "$RANLIB"; then archive_cmds="$archive_cmds~\$RANLIB \$lib" postinstall_cmds='$RANLIB $lib' fi ;; aix[[4-9]]*) if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then test "$enable_shared" = yes && enable_static=no fi ;; esac AC_MSG_RESULT([$enable_shared]) AC_MSG_CHECKING([whether to build static libraries]) # Make sure either enable_shared or enable_static is yes. test "$enable_shared" = yes || enable_static=yes AC_MSG_RESULT([$enable_static]) _LT_CONFIG($1) fi AC_LANG_POP CC="$lt_save_CC" ])# _LT_LANG_C_CONFIG # _LT_LANG_CXX_CONFIG([TAG]) # -------------------------- # Ensure that the configuration variables for a C++ compiler are suitably # defined. These variables are subsequently used by _LT_CONFIG to write # the compiler configuration to `libtool'. m4_defun([_LT_LANG_CXX_CONFIG], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_EGREP])dnl m4_require([_LT_PATH_MANIFEST_TOOL])dnl if test -n "$CXX" && ( test "X$CXX" != "Xno" && ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) || (test "X$CXX" != "Xg++"))) ; then AC_PROG_CXXCPP else _lt_caught_CXX_error=yes fi AC_LANG_PUSH(C++) _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(allow_undefined_flag, $1)= _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(archive_expsym_cmds, $1)= _LT_TAGVAR(compiler_needs_object, $1)=no _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(hardcode_libdir_separator, $1)= _LT_TAGVAR(hardcode_minus_L, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported _LT_TAGVAR(hardcode_automatic, $1)=no _LT_TAGVAR(inherit_rpath, $1)=no _LT_TAGVAR(module_cmds, $1)= _LT_TAGVAR(module_expsym_cmds, $1)= _LT_TAGVAR(link_all_deplibs, $1)=unknown _LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds _LT_TAGVAR(reload_flag, $1)=$reload_flag _LT_TAGVAR(reload_cmds, $1)=$reload_cmds _LT_TAGVAR(no_undefined_flag, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no # Source file extension for C++ test sources. ac_ext=cpp # Object file extension for compiled C++ test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # No sense in running all these tests if we already determined that # the CXX compiler isn't working. Some variables (like enable_shared) # are currently assumed to apply to all compilers on this platform, # and will be corrupted by setting them based on a non-working compiler. if test "$_lt_caught_CXX_error" != yes; then # Code to be used in simple compile tests lt_simple_compile_test_code="int some_variable = 0;" # Code to be used in simple link tests lt_simple_link_test_code='int main(int, char *[[]]) { return(0); }' # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_CFLAGS=$CFLAGS lt_save_LD=$LD lt_save_GCC=$GCC GCC=$GXX lt_save_with_gnu_ld=$with_gnu_ld lt_save_path_LD=$lt_cv_path_LD if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx else $as_unset lt_cv_prog_gnu_ld fi if test -n "${lt_cv_path_LDCXX+set}"; then lt_cv_path_LD=$lt_cv_path_LDCXX else $as_unset lt_cv_path_LD fi test -z "${LDCXX+set}" || LD=$LDCXX CC=${CXX-"c++"} CFLAGS=$CXXFLAGS compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_CC_BASENAME([$compiler]) if test -n "$compiler"; then # We don't want -fno-exception when compiling C++ code, so set the # no_builtin_flag separately if test "$GXX" = yes; then _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' else _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)= fi if test "$GXX" = yes; then # Set up default GNU C++ configuration LT_PATH_LD # Check if GNU C++ uses GNU ld as the underlying linker, since the # archiving commands below assume that GNU ld is being used. if test "$with_gnu_ld" = yes; then _LT_TAGVAR(archive_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' # If archive_cmds runs LD, not CC, wlarc should be empty # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to # investigate it a little bit more. (MM) wlarc='${wl}' # ancient GNU ld didn't support --whole-archive et. al. if eval "`$CC -print-prog-name=ld` --help 2>&1" | $GREP 'no-whole-archive' > /dev/null; then _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' else _LT_TAGVAR(whole_archive_flag_spec, $1)= fi else with_gnu_ld=no wlarc= # A generic and very simple default shared library creation # command for GNU C++ for the case where it uses the native # linker, instead of GNU ld. If possible, this setting should # overridden to take advantage of the native linker features on # the platform it is being used on. _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' fi # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' else GXX=no with_gnu_ld=no wlarc= fi # PORTME: fill in a description of your system's C++ link characteristics AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries]) _LT_TAGVAR(ld_shlibs, $1)=yes case $host_os in aix3*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; aix[[4-9]]*) if test "$host_cpu" = ia64; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag="" else aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # need to do runtime linking. case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*) for ld_flag in $LDFLAGS; do case $ld_flag in *-brtl*) aix_use_runtimelinking=yes break ;; esac done ;; esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. _LT_TAGVAR(archive_cmds, $1)='' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(hardcode_libdir_separator, $1)=':' _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(file_list_spec, $1)='${wl}-f,' if test "$GXX" = yes; then case $host_os in aix4.[[012]]|aix4.[[012]].*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`${CC} -print-prog-name=collect2` if test -f "$collect2name" && strings "$collect2name" | $GREP resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 _LT_TAGVAR(hardcode_direct, $1)=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)= fi esac shared_flag='-shared' if test "$aix_use_runtimelinking" = yes; then shared_flag="$shared_flag "'${wl}-G' fi else # not using gcc if test "$host_cpu" = ia64; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test "$aix_use_runtimelinking" = yes; then shared_flag='${wl}-G' else shared_flag='${wl}-bM:SRE' fi fi fi _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall' # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to # export. _LT_TAGVAR(always_export_symbols, $1)=yes if test "$aix_use_runtimelinking" = yes; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. _LT_TAGVAR(allow_undefined_flag, $1)='-berok' # Determine the default libpath from the value encoded in an empty # executable. _LT_SYS_MODULE_PATH_AIX([$1]) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" else if test "$host_cpu" = ia64; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib' _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs" _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an # empty executable. _LT_SYS_MODULE_PATH_AIX([$1]) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok' _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok' if test "$with_gnu_ld" = yes; then # We only use this code for GNU lds that support --whole-archive. _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive' else # Exported symbols can be pulled into shared objects from archives _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience' fi _LT_TAGVAR(archive_cmds_need_lc, $1)=yes # This is similar to how AIX traditionally builds its shared # libraries. _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' fi fi ;; beos*) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(allow_undefined_flag, $1)=unsupported # Joseph Beckenbach says some releases of gcc # support --undefined. This deserves some investigation. FIXME _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; chorus*) case $cc_basename in *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; cygwin* | mingw* | pw32* | cegcc*) case $GXX,$cc_basename in ,cl* | no,cl*) # Native MSVC # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(always_export_symbols, $1)=yes _LT_TAGVAR(file_list_spec, $1)='@' # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=".dll" # FIXME: Setting linknames here is a bad hack. _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; else $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; fi~ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ linknames=' # The linker will not automatically build a static lib if we build a DLL. # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes # Don't use ranlib _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib' _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~ lt_tool_outputfile="@TOOL_OUTPUT@"~ case $lt_outputfile in *.exe|*.EXE) ;; *) lt_outputfile="$lt_outputfile.exe" lt_tool_outputfile="$lt_tool_outputfile.exe" ;; esac~ func_to_tool_file "$lt_outputfile"~ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; $RM "$lt_outputfile.manifest"; fi' ;; *) # g++ # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, # as there is no search path for DLLs. _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols' _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' # If the export-symbols file already is a .def file (1st line # is EXPORTS), use it as is; otherwise, prepend... _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac ;; darwin* | rhapsody*) _LT_DARWIN_LINKER_FEATURES($1) ;; dgux*) case $cc_basename in ec++*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; ghcx*) # Green Hills C++ Compiler # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; freebsd2.*) # C++ shared libraries reported to be fairly broken before # switch to ELF _LT_TAGVAR(ld_shlibs, $1)=no ;; freebsd-elf*) _LT_TAGVAR(archive_cmds_need_lc, $1)=no ;; freebsd* | dragonfly*) # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF # conventions _LT_TAGVAR(ld_shlibs, $1)=yes ;; gnu*) ;; haiku*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(link_all_deplibs, $1)=yes ;; hpux9*) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH, # but as the default # location of the library. case $cc_basename in CC*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; aCC*) _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -b ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' ;; *) if test "$GXX" = yes; then _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' else # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac ;; hpux10*|hpux11*) if test $with_gnu_ld = no; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: case $host_cpu in hppa*64*|ia64*) ;; *) _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' ;; esac fi case $host_cpu in hppa*64*|ia64*) _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *) _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH, # but as the default # location of the library. ;; esac case $cc_basename in CC*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; aCC*) case $host_cpu in hppa*64*) _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; ia64*) _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' ;; *) if test "$GXX" = yes; then if test $with_gnu_ld = no; then case $host_cpu in hppa*64*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; ia64*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac fi else # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac ;; interix[[3-9]]*) _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. # Instead, shared libraries are loaded at an image base (0x10000000 by # default) and relocated if they conflict, which is a slow very memory # consuming and fragmenting process. To avoid this, we pick a random, # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link # time. Moving up from 0x10000000 also allows more sbrk(2) space. _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' ;; irix5* | irix6*) case $cc_basename in CC*) # SGI C++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' # Archives containing C++ object files must be created using # "CC -ar", where "CC" is the IRIX C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. _LT_TAGVAR(old_archive_cmds, $1)='$CC -ar -WR,-u -o $oldlib $oldobjs' ;; *) if test "$GXX" = yes; then if test "$with_gnu_ld" = no; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' else _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib' fi fi _LT_TAGVAR(link_all_deplibs, $1)=yes ;; esac _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(inherit_rpath, $1)=yes ;; linux* | k*bsd*-gnu | kopensolaris*-gnu) case $cc_basename in KCC*) # Kuck and Associates, Inc. (KAI) C++ Compiler # KCC will only create a shared library if the output file # ends with ".so" (or ".sl" for HP-UX), so rename the library # to its proper name (with version) after linking. _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib ${wl}-retain-symbols-file,$export_symbols; mv \$templib $lib' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' # Archives containing C++ object files must be created using # "CC -Bstatic", where "CC" is the KAI C++ compiler. _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs' ;; icpc* | ecpc* ) # Intel C++ with_gnu_ld=yes # version 8.0 and above of icpc choke on multiply defined symbols # if we add $predep_objects and $postdep_objects, however 7.1 and # earlier do not add the objects themselves. case `$CC -V 2>&1` in *"Version 7."*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ;; *) # Version 8.0 or newer tmp_idyn= case $host_cpu in ia64*) tmp_idyn=' -i_dynamic';; esac _LT_TAGVAR(archive_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ;; esac _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive' ;; pgCC* | pgcpp*) # Portland Group C++ compiler case `$CC -V` in *pgCC\ [[1-5]].* | *pgcpp\ [[1-5]].*) _LT_TAGVAR(prelink_cmds, $1)='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~ compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"' _LT_TAGVAR(old_archive_cmds, $1)='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~ $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~ $RANLIB $oldlib' _LT_TAGVAR(archive_cmds, $1)='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' ;; *) # Version 6 and above use weak symbols _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' ;; esac _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}--rpath ${wl}$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' ;; cxx*) # Compaq C++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib ${wl}-retain-symbols-file $wl$export_symbols' runpath_var=LD_RUN_PATH _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "X$list" | $Xsed' ;; xl* | mpixl* | bgxl*) # IBM XL 8.0 on PPC, with GNU ld _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' _LT_TAGVAR(archive_cmds, $1)='$CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' if test "x$supports_anon_versioning" = xyes; then _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' fi ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs' _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file ${wl}$export_symbols' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' _LT_TAGVAR(compiler_needs_object, $1)=yes # Not sure whether something based on # $CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 # would be better. output_verbose_link_cmd='func_echo_all' # Archives containing C++ object files must be created using # "CC -xar", where "CC" is the Sun C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs' ;; esac ;; esac ;; lynxos*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; m88k*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; mvs*) case $cc_basename in cxx*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $predep_objects $libobjs $deplibs $postdep_objects $linker_flags' wlarc= _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no fi # Workaround some broken pre-1.5 toolchains output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"' ;; *nto* | *qnx*) _LT_TAGVAR(ld_shlibs, $1)=yes ;; openbsd2*) # C++ shared libraries are fairly broken _LT_TAGVAR(ld_shlibs, $1)=no ;; openbsd*) if test -f /usr/libexec/ld.so; then _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file,$export_symbols -o $lib' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' fi output_verbose_link_cmd=func_echo_all else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; osf3* | osf4* | osf5*) case $cc_basename in KCC*) # Kuck and Associates, Inc. (KAI) C++ Compiler # KCC will only create a shared library if the output file # ends with ".so" (or ".sl" for HP-UX), so rename the library # to its proper name (with version) after linking. _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: # Archives containing C++ object files must be created using # the KAI C++ compiler. case $host in osf3*) _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs' ;; *) _LT_TAGVAR(old_archive_cmds, $1)='$CC -o $oldlib $oldobjs' ;; esac ;; RCC*) # Rational C++ 2.4.1 # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; cxx*) case $host in osf3*) _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $soname `test -n "$verstring" && func_echo_all "${wl}-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' ;; *) _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~ echo "-hidden">> $lib.exp~ $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname ${wl}-input ${wl}$lib.exp `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~ $RM $lib.exp' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' ;; esac _LT_TAGVAR(hardcode_libdir_separator, $1)=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' ;; *) if test "$GXX" = yes && test "$with_gnu_ld" = no; then _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' case $host in osf3*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ;; esac _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' else # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac ;; psos*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; sunos4*) case $cc_basename in CC*) # Sun C++ 4.x # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; lcc*) # Lucid # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; solaris*) case $cc_basename in CC* | sunCC*) # Sun C++ 4.2, 5.x and Centerline C++ _LT_TAGVAR(archive_cmds_need_lc,$1)=yes _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs' _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G${allow_undefined_flag} ${wl}-M ${wl}$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no case $host_os in solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; *) # The compiler driver will combine and reorder linker options, # but understands `-z linker_flag'. # Supported since Solaris 2.6 (maybe 2.5.1?) _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract' ;; esac _LT_TAGVAR(link_all_deplibs, $1)=yes output_verbose_link_cmd='func_echo_all' # Archives containing C++ object files must be created using # "CC -xar", where "CC" is the Sun C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs' ;; gcx*) # Green Hills C++ Compiler _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' # The C++ compiler must be used to create the archive. _LT_TAGVAR(old_archive_cmds, $1)='$CC $LDFLAGS -archive -o $oldlib $oldobjs' ;; *) # GNU C++ compiler with Solaris linker if test "$GXX" = yes && test "$with_gnu_ld" = no; then _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-z ${wl}defs' if $CC --version | $GREP -v '^2\.7' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' else # g++ 2.7 appears to require `-G' NOT `-shared' on this # platform. _LT_TAGVAR(archive_cmds, $1)='$CC -G -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $wl$libdir' case $host_os in solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; *) _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' ;; esac fi ;; esac ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*) _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no runpath_var='LD_RUN_PATH' case $cc_basename in CC*) _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; sysv5* | sco3.2v5* | sco5v6*) # Note: We can NOT use -z defs as we might desire, because we do not # link with -lc, and that would cause any symbols used from libc to # always be unresolved, which means just about no library would # ever link correctly. If we're not using GNU ld we use -z text # though, which does catch some bad symbols but isn't as heavy-handed # as -z defs. _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' _LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs' _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=':' _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport' runpath_var='LD_RUN_PATH' case $cc_basename in CC*) _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(old_archive_cmds, $1)='$CC -Tprelink_objects $oldobjs~ '"$_LT_TAGVAR(old_archive_cmds, $1)" _LT_TAGVAR(reload_cmds, $1)='$CC -Tprelink_objects $reload_objs~ '"$_LT_TAGVAR(reload_cmds, $1)" ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; tandem*) case $cc_basename in NCC*) # NonStop-UX NCC 3.20 # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; vxworks*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)]) test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no _LT_TAGVAR(GCC, $1)="$GXX" _LT_TAGVAR(LD, $1)="$LD" ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... _LT_SYS_HIDDEN_LIBDEPS($1) _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_SYS_DYNAMIC_LINKER($1) _LT_LINKER_HARDCODE_LIBPATH($1) _LT_CONFIG($1) fi # test -n "$compiler" CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS LDCXX=$LD LD=$lt_save_LD GCC=$lt_save_GCC with_gnu_ld=$lt_save_with_gnu_ld lt_cv_path_LDCXX=$lt_cv_path_LD lt_cv_path_LD=$lt_save_path_LD lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld fi # test "$_lt_caught_CXX_error" != yes AC_LANG_POP ])# _LT_LANG_CXX_CONFIG # _LT_FUNC_STRIPNAME_CNF # ---------------------- # func_stripname_cnf prefix suffix name # strip PREFIX and SUFFIX off of NAME. # PREFIX and SUFFIX must not contain globbing or regex special # characters, hashes, percent signs, but SUFFIX may contain a leading # dot (in which case that matches only a dot). # # This function is identical to the (non-XSI) version of func_stripname, # except this one can be used by m4 code that may be executed by configure, # rather than the libtool script. m4_defun([_LT_FUNC_STRIPNAME_CNF],[dnl AC_REQUIRE([_LT_DECL_SED]) AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH]) func_stripname_cnf () { case ${2} in .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; esac } # func_stripname_cnf ])# _LT_FUNC_STRIPNAME_CNF # _LT_SYS_HIDDEN_LIBDEPS([TAGNAME]) # --------------------------------- # Figure out "hidden" library dependencies from verbose # compiler output when linking a shared library. # Parse the compiler output and extract the necessary # objects, libraries and library flags. m4_defun([_LT_SYS_HIDDEN_LIBDEPS], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl AC_REQUIRE([_LT_FUNC_STRIPNAME_CNF])dnl # Dependencies to place before and after the object being linked: _LT_TAGVAR(predep_objects, $1)= _LT_TAGVAR(postdep_objects, $1)= _LT_TAGVAR(predeps, $1)= _LT_TAGVAR(postdeps, $1)= _LT_TAGVAR(compiler_lib_search_path, $1)= dnl we can't use the lt_simple_compile_test_code here, dnl because it contains code intended for an executable, dnl not a library. It's possible we should let each dnl tag define a new lt_????_link_test_code variable, dnl but it's only used here... m4_if([$1], [], [cat > conftest.$ac_ext <<_LT_EOF int a; void foo (void) { a = 0; } _LT_EOF ], [$1], [CXX], [cat > conftest.$ac_ext <<_LT_EOF class Foo { public: Foo (void) { a = 0; } private: int a; }; _LT_EOF ], [$1], [F77], [cat > conftest.$ac_ext <<_LT_EOF subroutine foo implicit none integer*4 a a=0 return end _LT_EOF ], [$1], [FC], [cat > conftest.$ac_ext <<_LT_EOF subroutine foo implicit none integer a a=0 return end _LT_EOF ], [$1], [GCJ], [cat > conftest.$ac_ext <<_LT_EOF public class foo { private int a; public void bar (void) { a = 0; } }; _LT_EOF ], [$1], [GO], [cat > conftest.$ac_ext <<_LT_EOF package foo func foo() { } _LT_EOF ]) _lt_libdeps_save_CFLAGS=$CFLAGS case "$CC $CFLAGS " in #( *\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;; *\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;; *\ -fuse-linker-plugin*\ *) CFLAGS="$CFLAGS -fno-use-linker-plugin" ;; esac dnl Parse the compiler output and extract the necessary dnl objects, libraries and library flags. if AC_TRY_EVAL(ac_compile); then # Parse the compiler output and extract the necessary # objects, libraries and library flags. # Sentinel used to keep track of whether or not we are before # the conftest object file. pre_test_object_deps_done=no for p in `eval "$output_verbose_link_cmd"`; do case ${prev}${p} in -L* | -R* | -l*) # Some compilers place space between "-{L,R}" and the path. # Remove the space. if test $p = "-L" || test $p = "-R"; then prev=$p continue fi # Expand the sysroot to ease extracting the directories later. if test -z "$prev"; then case $p in -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;; -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;; -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;; esac fi case $p in =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;; esac if test "$pre_test_object_deps_done" = no; then case ${prev} in -L | -R) # Internal compiler library paths should come after those # provided the user. The postdeps already come after the # user supplied libs so there is no need to process them. if test -z "$_LT_TAGVAR(compiler_lib_search_path, $1)"; then _LT_TAGVAR(compiler_lib_search_path, $1)="${prev}${p}" else _LT_TAGVAR(compiler_lib_search_path, $1)="${_LT_TAGVAR(compiler_lib_search_path, $1)} ${prev}${p}" fi ;; # The "-l" case would never come before the object being # linked, so don't bother handling this case. esac else if test -z "$_LT_TAGVAR(postdeps, $1)"; then _LT_TAGVAR(postdeps, $1)="${prev}${p}" else _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} ${prev}${p}" fi fi prev= ;; *.lto.$objext) ;; # Ignore GCC LTO objects *.$objext) # This assumes that the test object file only shows up # once in the compiler output. if test "$p" = "conftest.$objext"; then pre_test_object_deps_done=yes continue fi if test "$pre_test_object_deps_done" = no; then if test -z "$_LT_TAGVAR(predep_objects, $1)"; then _LT_TAGVAR(predep_objects, $1)="$p" else _LT_TAGVAR(predep_objects, $1)="$_LT_TAGVAR(predep_objects, $1) $p" fi else if test -z "$_LT_TAGVAR(postdep_objects, $1)"; then _LT_TAGVAR(postdep_objects, $1)="$p" else _LT_TAGVAR(postdep_objects, $1)="$_LT_TAGVAR(postdep_objects, $1) $p" fi fi ;; *) ;; # Ignore the rest. esac done # Clean up. rm -f a.out a.exe else echo "libtool.m4: error: problem compiling $1 test program" fi $RM -f confest.$objext CFLAGS=$_lt_libdeps_save_CFLAGS # PORTME: override above test on systems where it is broken m4_if([$1], [CXX], [case $host_os in interix[[3-9]]*) # Interix 3.5 installs completely hosed .la files for C++, so rather than # hack all around it, let's just trust "g++" to DTRT. _LT_TAGVAR(predep_objects,$1)= _LT_TAGVAR(postdep_objects,$1)= _LT_TAGVAR(postdeps,$1)= ;; linux*) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 # The more standards-conforming stlport4 library is # incompatible with the Cstd library. Avoid specifying # it if it's in CXXFLAGS. Ignore libCrun as # -library=stlport4 depends on it. case " $CXX $CXXFLAGS " in *" -library=stlport4 "*) solaris_use_stlport4=yes ;; esac if test "$solaris_use_stlport4" != yes; then _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun' fi ;; esac ;; solaris*) case $cc_basename in CC* | sunCC*) # The more standards-conforming stlport4 library is # incompatible with the Cstd library. Avoid specifying # it if it's in CXXFLAGS. Ignore libCrun as # -library=stlport4 depends on it. case " $CXX $CXXFLAGS " in *" -library=stlport4 "*) solaris_use_stlport4=yes ;; esac # Adding this requires a known-good setup of shared libraries for # Sun compiler versions before 5.6, else PIC objects from an old # archive will be linked into the output, leading to subtle bugs. if test "$solaris_use_stlport4" != yes; then _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun' fi ;; esac ;; esac ]) case " $_LT_TAGVAR(postdeps, $1) " in *" -lc "*) _LT_TAGVAR(archive_cmds_need_lc, $1)=no ;; esac _LT_TAGVAR(compiler_lib_search_dirs, $1)= if test -n "${_LT_TAGVAR(compiler_lib_search_path, $1)}"; then _LT_TAGVAR(compiler_lib_search_dirs, $1)=`echo " ${_LT_TAGVAR(compiler_lib_search_path, $1)}" | ${SED} -e 's! -L! !g' -e 's!^ !!'` fi _LT_TAGDECL([], [compiler_lib_search_dirs], [1], [The directories searched by this compiler when creating a shared library]) _LT_TAGDECL([], [predep_objects], [1], [Dependencies to place before and after the objects being linked to create a shared library]) _LT_TAGDECL([], [postdep_objects], [1]) _LT_TAGDECL([], [predeps], [1]) _LT_TAGDECL([], [postdeps], [1]) _LT_TAGDECL([], [compiler_lib_search_path], [1], [The library search path used internally by the compiler when linking a shared library]) ])# _LT_SYS_HIDDEN_LIBDEPS # _LT_LANG_F77_CONFIG([TAG]) # -------------------------- # Ensure that the configuration variables for a Fortran 77 compiler are # suitably defined. These variables are subsequently used by _LT_CONFIG # to write the compiler configuration to `libtool'. m4_defun([_LT_LANG_F77_CONFIG], [AC_LANG_PUSH(Fortran 77) if test -z "$F77" || test "X$F77" = "Xno"; then _lt_disable_F77=yes fi _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(allow_undefined_flag, $1)= _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(archive_expsym_cmds, $1)= _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(hardcode_libdir_separator, $1)= _LT_TAGVAR(hardcode_minus_L, $1)=no _LT_TAGVAR(hardcode_automatic, $1)=no _LT_TAGVAR(inherit_rpath, $1)=no _LT_TAGVAR(module_cmds, $1)= _LT_TAGVAR(module_expsym_cmds, $1)= _LT_TAGVAR(link_all_deplibs, $1)=unknown _LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds _LT_TAGVAR(reload_flag, $1)=$reload_flag _LT_TAGVAR(reload_cmds, $1)=$reload_cmds _LT_TAGVAR(no_undefined_flag, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no # Source file extension for f77 test sources. ac_ext=f # Object file extension for compiled f77 test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # No sense in running all these tests if we already determined that # the F77 compiler isn't working. Some variables (like enable_shared) # are currently assumed to apply to all compilers on this platform, # and will be corrupted by setting them based on a non-working compiler. if test "$_lt_disable_F77" != yes; then # Code to be used in simple compile tests lt_simple_compile_test_code="\ subroutine t return end " # Code to be used in simple link tests lt_simple_link_test_code="\ program t end " # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC="$CC" lt_save_GCC=$GCC lt_save_CFLAGS=$CFLAGS CC=${F77-"f77"} CFLAGS=$FFLAGS compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_CC_BASENAME([$compiler]) GCC=$G77 if test -n "$compiler"; then AC_MSG_CHECKING([if libtool supports shared libraries]) AC_MSG_RESULT([$can_build_shared]) AC_MSG_CHECKING([whether to build shared libraries]) test "$can_build_shared" = "no" && enable_shared=no # On AIX, shared libraries and static libraries use the same namespace, and # are all built from PIC. case $host_os in aix3*) test "$enable_shared" = yes && enable_static=no if test -n "$RANLIB"; then archive_cmds="$archive_cmds~\$RANLIB \$lib" postinstall_cmds='$RANLIB $lib' fi ;; aix[[4-9]]*) if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then test "$enable_shared" = yes && enable_static=no fi ;; esac AC_MSG_RESULT([$enable_shared]) AC_MSG_CHECKING([whether to build static libraries]) # Make sure either enable_shared or enable_static is yes. test "$enable_shared" = yes || enable_static=yes AC_MSG_RESULT([$enable_static]) _LT_TAGVAR(GCC, $1)="$G77" _LT_TAGVAR(LD, $1)="$LD" ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_SYS_DYNAMIC_LINKER($1) _LT_LINKER_HARDCODE_LIBPATH($1) _LT_CONFIG($1) fi # test -n "$compiler" GCC=$lt_save_GCC CC="$lt_save_CC" CFLAGS="$lt_save_CFLAGS" fi # test "$_lt_disable_F77" != yes AC_LANG_POP ])# _LT_LANG_F77_CONFIG # _LT_LANG_FC_CONFIG([TAG]) # ------------------------- # Ensure that the configuration variables for a Fortran compiler are # suitably defined. These variables are subsequently used by _LT_CONFIG # to write the compiler configuration to `libtool'. m4_defun([_LT_LANG_FC_CONFIG], [AC_LANG_PUSH(Fortran) if test -z "$FC" || test "X$FC" = "Xno"; then _lt_disable_FC=yes fi _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(allow_undefined_flag, $1)= _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(archive_expsym_cmds, $1)= _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(hardcode_libdir_separator, $1)= _LT_TAGVAR(hardcode_minus_L, $1)=no _LT_TAGVAR(hardcode_automatic, $1)=no _LT_TAGVAR(inherit_rpath, $1)=no _LT_TAGVAR(module_cmds, $1)= _LT_TAGVAR(module_expsym_cmds, $1)= _LT_TAGVAR(link_all_deplibs, $1)=unknown _LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds _LT_TAGVAR(reload_flag, $1)=$reload_flag _LT_TAGVAR(reload_cmds, $1)=$reload_cmds _LT_TAGVAR(no_undefined_flag, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no # Source file extension for fc test sources. ac_ext=${ac_fc_srcext-f} # Object file extension for compiled fc test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # No sense in running all these tests if we already determined that # the FC compiler isn't working. Some variables (like enable_shared) # are currently assumed to apply to all compilers on this platform, # and will be corrupted by setting them based on a non-working compiler. if test "$_lt_disable_FC" != yes; then # Code to be used in simple compile tests lt_simple_compile_test_code="\ subroutine t return end " # Code to be used in simple link tests lt_simple_link_test_code="\ program t end " # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC="$CC" lt_save_GCC=$GCC lt_save_CFLAGS=$CFLAGS CC=${FC-"f95"} CFLAGS=$FCFLAGS compiler=$CC GCC=$ac_cv_fc_compiler_gnu _LT_TAGVAR(compiler, $1)=$CC _LT_CC_BASENAME([$compiler]) if test -n "$compiler"; then AC_MSG_CHECKING([if libtool supports shared libraries]) AC_MSG_RESULT([$can_build_shared]) AC_MSG_CHECKING([whether to build shared libraries]) test "$can_build_shared" = "no" && enable_shared=no # On AIX, shared libraries and static libraries use the same namespace, and # are all built from PIC. case $host_os in aix3*) test "$enable_shared" = yes && enable_static=no if test -n "$RANLIB"; then archive_cmds="$archive_cmds~\$RANLIB \$lib" postinstall_cmds='$RANLIB $lib' fi ;; aix[[4-9]]*) if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then test "$enable_shared" = yes && enable_static=no fi ;; esac AC_MSG_RESULT([$enable_shared]) AC_MSG_CHECKING([whether to build static libraries]) # Make sure either enable_shared or enable_static is yes. test "$enable_shared" = yes || enable_static=yes AC_MSG_RESULT([$enable_static]) _LT_TAGVAR(GCC, $1)="$ac_cv_fc_compiler_gnu" _LT_TAGVAR(LD, $1)="$LD" ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... _LT_SYS_HIDDEN_LIBDEPS($1) _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_SYS_DYNAMIC_LINKER($1) _LT_LINKER_HARDCODE_LIBPATH($1) _LT_CONFIG($1) fi # test -n "$compiler" GCC=$lt_save_GCC CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS fi # test "$_lt_disable_FC" != yes AC_LANG_POP ])# _LT_LANG_FC_CONFIG # _LT_LANG_GCJ_CONFIG([TAG]) # -------------------------- # Ensure that the configuration variables for the GNU Java Compiler compiler # are suitably defined. These variables are subsequently used by _LT_CONFIG # to write the compiler configuration to `libtool'. m4_defun([_LT_LANG_GCJ_CONFIG], [AC_REQUIRE([LT_PROG_GCJ])dnl AC_LANG_SAVE # Source file extension for Java test sources. ac_ext=java # Object file extension for compiled Java test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # Code to be used in simple compile tests lt_simple_compile_test_code="class foo {}" # Code to be used in simple link tests lt_simple_link_test_code='public class conftest { public static void main(String[[]] argv) {}; }' # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_CFLAGS=$CFLAGS lt_save_GCC=$GCC GCC=yes CC=${GCJ-"gcj"} CFLAGS=$GCJFLAGS compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_TAGVAR(LD, $1)="$LD" _LT_CC_BASENAME([$compiler]) # GCJ did not exist at the time GCC didn't implicitly link libc in. _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds _LT_TAGVAR(reload_flag, $1)=$reload_flag _LT_TAGVAR(reload_cmds, $1)=$reload_cmds ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... if test -n "$compiler"; then _LT_COMPILER_NO_RTTI($1) _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_LINKER_HARDCODE_LIBPATH($1) _LT_CONFIG($1) fi AC_LANG_RESTORE GCC=$lt_save_GCC CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS ])# _LT_LANG_GCJ_CONFIG # _LT_LANG_GO_CONFIG([TAG]) # -------------------------- # Ensure that the configuration variables for the GNU Go compiler # are suitably defined. These variables are subsequently used by _LT_CONFIG # to write the compiler configuration to `libtool'. m4_defun([_LT_LANG_GO_CONFIG], [AC_REQUIRE([LT_PROG_GO])dnl AC_LANG_SAVE # Source file extension for Go test sources. ac_ext=go # Object file extension for compiled Go test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # Code to be used in simple compile tests lt_simple_compile_test_code="package main; func main() { }" # Code to be used in simple link tests lt_simple_link_test_code='package main; func main() { }' # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_CFLAGS=$CFLAGS lt_save_GCC=$GCC GCC=yes CC=${GOC-"gccgo"} CFLAGS=$GOFLAGS compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_TAGVAR(LD, $1)="$LD" _LT_CC_BASENAME([$compiler]) # Go did not exist at the time GCC didn't implicitly link libc in. _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds _LT_TAGVAR(reload_flag, $1)=$reload_flag _LT_TAGVAR(reload_cmds, $1)=$reload_cmds ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... if test -n "$compiler"; then _LT_COMPILER_NO_RTTI($1) _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_LINKER_HARDCODE_LIBPATH($1) _LT_CONFIG($1) fi AC_LANG_RESTORE GCC=$lt_save_GCC CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS ])# _LT_LANG_GO_CONFIG # _LT_LANG_RC_CONFIG([TAG]) # ------------------------- # Ensure that the configuration variables for the Windows resource compiler # are suitably defined. These variables are subsequently used by _LT_CONFIG # to write the compiler configuration to `libtool'. m4_defun([_LT_LANG_RC_CONFIG], [AC_REQUIRE([LT_PROG_RC])dnl AC_LANG_SAVE # Source file extension for RC test sources. ac_ext=rc # Object file extension for compiled RC test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # Code to be used in simple compile tests lt_simple_compile_test_code='sample MENU { MENUITEM "&Soup", 100, CHECKED }' # Code to be used in simple link tests lt_simple_link_test_code="$lt_simple_compile_test_code" # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC="$CC" lt_save_CFLAGS=$CFLAGS lt_save_GCC=$GCC GCC= CC=${RC-"windres"} CFLAGS= compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_CC_BASENAME([$compiler]) _LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes if test -n "$compiler"; then : _LT_CONFIG($1) fi GCC=$lt_save_GCC AC_LANG_RESTORE CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS ])# _LT_LANG_RC_CONFIG # LT_PROG_GCJ # ----------- AC_DEFUN([LT_PROG_GCJ], [m4_ifdef([AC_PROG_GCJ], [AC_PROG_GCJ], [m4_ifdef([A][M_PROG_GCJ], [A][M_PROG_GCJ], [AC_CHECK_TOOL(GCJ, gcj,) test "x${GCJFLAGS+set}" = xset || GCJFLAGS="-g -O2" AC_SUBST(GCJFLAGS)])])[]dnl ]) # Old name: AU_ALIAS([LT_AC_PROG_GCJ], [LT_PROG_GCJ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([LT_AC_PROG_GCJ], []) # LT_PROG_GO # ---------- AC_DEFUN([LT_PROG_GO], [AC_CHECK_TOOL(GOC, gccgo,) ]) # LT_PROG_RC # ---------- AC_DEFUN([LT_PROG_RC], [AC_CHECK_TOOL(RC, windres,) ]) # Old name: AU_ALIAS([LT_AC_PROG_RC], [LT_PROG_RC]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([LT_AC_PROG_RC], []) # _LT_DECL_EGREP # -------------- # If we don't have a new enough Autoconf to choose the best grep # available, choose the one first in the user's PATH. m4_defun([_LT_DECL_EGREP], [AC_REQUIRE([AC_PROG_EGREP])dnl AC_REQUIRE([AC_PROG_FGREP])dnl test -z "$GREP" && GREP=grep _LT_DECL([], [GREP], [1], [A grep program that handles long lines]) _LT_DECL([], [EGREP], [1], [An ERE matcher]) _LT_DECL([], [FGREP], [1], [A literal string matcher]) dnl Non-bleeding-edge autoconf doesn't subst GREP, so do it here too AC_SUBST([GREP]) ]) # _LT_DECL_OBJDUMP # -------------- # If we don't have a new enough Autoconf to choose the best objdump # available, choose the one first in the user's PATH. m4_defun([_LT_DECL_OBJDUMP], [AC_CHECK_TOOL(OBJDUMP, objdump, false) test -z "$OBJDUMP" && OBJDUMP=objdump _LT_DECL([], [OBJDUMP], [1], [An object symbol dumper]) AC_SUBST([OBJDUMP]) ]) # _LT_DECL_DLLTOOL # ---------------- # Ensure DLLTOOL variable is set. m4_defun([_LT_DECL_DLLTOOL], [AC_CHECK_TOOL(DLLTOOL, dlltool, false) test -z "$DLLTOOL" && DLLTOOL=dlltool _LT_DECL([], [DLLTOOL], [1], [DLL creation program]) AC_SUBST([DLLTOOL]) ]) # _LT_DECL_SED # ------------ # Check for a fully-functional sed program, that truncates # as few characters as possible. Prefer GNU sed if found. m4_defun([_LT_DECL_SED], [AC_PROG_SED test -z "$SED" && SED=sed Xsed="$SED -e 1s/^X//" _LT_DECL([], [SED], [1], [A sed program that does not truncate output]) _LT_DECL([], [Xsed], ["\$SED -e 1s/^X//"], [Sed that helps us avoid accidentally triggering echo(1) options like -n]) ])# _LT_DECL_SED m4_ifndef([AC_PROG_SED], [ ############################################################ # NOTE: This macro has been submitted for inclusion into # # GNU Autoconf as AC_PROG_SED. When it is available in # # a released version of Autoconf we should remove this # # macro and use it instead. # ############################################################ m4_defun([AC_PROG_SED], [AC_MSG_CHECKING([for a sed that does not truncate output]) AC_CACHE_VAL(lt_cv_path_SED, [# Loop through the user's path and test for sed and gsed. # Then use that list of sed's as ones to test for truncation. as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for lt_ac_prog in sed gsed; do for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$lt_ac_prog$ac_exec_ext"; then lt_ac_sed_list="$lt_ac_sed_list $as_dir/$lt_ac_prog$ac_exec_ext" fi done done done IFS=$as_save_IFS lt_ac_max=0 lt_ac_count=0 # Add /usr/xpg4/bin/sed as it is typically found on Solaris # along with /bin/sed that truncates output. for lt_ac_sed in $lt_ac_sed_list /usr/xpg4/bin/sed; do test ! -f $lt_ac_sed && continue cat /dev/null > conftest.in lt_ac_count=0 echo $ECHO_N "0123456789$ECHO_C" >conftest.in # Check for GNU sed and select it if it is found. if "$lt_ac_sed" --version 2>&1 < /dev/null | grep 'GNU' > /dev/null; then lt_cv_path_SED=$lt_ac_sed break fi while true; do cat conftest.in conftest.in >conftest.tmp mv conftest.tmp conftest.in cp conftest.in conftest.nl echo >>conftest.nl $lt_ac_sed -e 's/a$//' < conftest.nl >conftest.out || break cmp -s conftest.out conftest.nl || break # 10000 chars as input seems more than enough test $lt_ac_count -gt 10 && break lt_ac_count=`expr $lt_ac_count + 1` if test $lt_ac_count -gt $lt_ac_max; then lt_ac_max=$lt_ac_count lt_cv_path_SED=$lt_ac_sed fi done done ]) SED=$lt_cv_path_SED AC_SUBST([SED]) AC_MSG_RESULT([$SED]) ])#AC_PROG_SED ])#m4_ifndef # Old name: AU_ALIAS([LT_AC_PROG_SED], [AC_PROG_SED]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([LT_AC_PROG_SED], []) # _LT_CHECK_SHELL_FEATURES # ------------------------ # Find out whether the shell is Bourne or XSI compatible, # or has some other useful features. m4_defun([_LT_CHECK_SHELL_FEATURES], [AC_MSG_CHECKING([whether the shell understands some XSI constructs]) # Try some XSI features xsi_shell=no ( _lt_dummy="a/b/c" test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ = c,a/b,b/c, \ && eval 'test $(( 1 + 1 )) -eq 2 \ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ && xsi_shell=yes AC_MSG_RESULT([$xsi_shell]) _LT_CONFIG_LIBTOOL_INIT([xsi_shell='$xsi_shell']) AC_MSG_CHECKING([whether the shell understands "+="]) lt_shell_append=no ( foo=bar; set foo baz; eval "$[1]+=\$[2]" && test "$foo" = barbaz ) \ >/dev/null 2>&1 \ && lt_shell_append=yes AC_MSG_RESULT([$lt_shell_append]) _LT_CONFIG_LIBTOOL_INIT([lt_shell_append='$lt_shell_append']) if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then lt_unset=unset else lt_unset=false fi _LT_DECL([], [lt_unset], [0], [whether the shell understands "unset"])dnl # test EBCDIC or ASCII case `echo X|tr X '\101'` in A) # ASCII based system # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr lt_SP2NL='tr \040 \012' lt_NL2SP='tr \015\012 \040\040' ;; *) # EBCDIC based system lt_SP2NL='tr \100 \n' lt_NL2SP='tr \r\n \100\100' ;; esac _LT_DECL([SP2NL], [lt_SP2NL], [1], [turn spaces into newlines])dnl _LT_DECL([NL2SP], [lt_NL2SP], [1], [turn newlines into spaces])dnl ])# _LT_CHECK_SHELL_FEATURES # _LT_PROG_FUNCTION_REPLACE (FUNCNAME, REPLACEMENT-BODY) # ------------------------------------------------------ # In `$cfgfile', look for function FUNCNAME delimited by `^FUNCNAME ()$' and # '^} FUNCNAME ', and replace its body with REPLACEMENT-BODY. m4_defun([_LT_PROG_FUNCTION_REPLACE], [dnl { sed -e '/^$1 ()$/,/^} # $1 /c\ $1 ()\ {\ m4_bpatsubsts([$2], [$], [\\], [^\([ ]\)], [\\\1]) } # Extended-shell $1 implementation' "$cfgfile" > $cfgfile.tmp \ && mv -f "$cfgfile.tmp" "$cfgfile" \ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") test 0 -eq $? || _lt_function_replace_fail=: ]) # _LT_PROG_REPLACE_SHELLFNS # ------------------------- # Replace existing portable implementations of several shell functions with # equivalent extended shell implementations where those features are available.. m4_defun([_LT_PROG_REPLACE_SHELLFNS], [if test x"$xsi_shell" = xyes; then _LT_PROG_FUNCTION_REPLACE([func_dirname], [dnl case ${1} in */*) func_dirname_result="${1%/*}${2}" ;; * ) func_dirname_result="${3}" ;; esac]) _LT_PROG_FUNCTION_REPLACE([func_basename], [dnl func_basename_result="${1##*/}"]) _LT_PROG_FUNCTION_REPLACE([func_dirname_and_basename], [dnl case ${1} in */*) func_dirname_result="${1%/*}${2}" ;; * ) func_dirname_result="${3}" ;; esac func_basename_result="${1##*/}"]) _LT_PROG_FUNCTION_REPLACE([func_stripname], [dnl # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are # positional parameters, so assign one to ordinary parameter first. func_stripname_result=${3} func_stripname_result=${func_stripname_result#"${1}"} func_stripname_result=${func_stripname_result%"${2}"}]) _LT_PROG_FUNCTION_REPLACE([func_split_long_opt], [dnl func_split_long_opt_name=${1%%=*} func_split_long_opt_arg=${1#*=}]) _LT_PROG_FUNCTION_REPLACE([func_split_short_opt], [dnl func_split_short_opt_arg=${1#??} func_split_short_opt_name=${1%"$func_split_short_opt_arg"}]) _LT_PROG_FUNCTION_REPLACE([func_lo2o], [dnl case ${1} in *.lo) func_lo2o_result=${1%.lo}.${objext} ;; *) func_lo2o_result=${1} ;; esac]) _LT_PROG_FUNCTION_REPLACE([func_xform], [ func_xform_result=${1%.*}.lo]) _LT_PROG_FUNCTION_REPLACE([func_arith], [ func_arith_result=$(( $[*] ))]) _LT_PROG_FUNCTION_REPLACE([func_len], [ func_len_result=${#1}]) fi if test x"$lt_shell_append" = xyes; then _LT_PROG_FUNCTION_REPLACE([func_append], [ eval "${1}+=\\${2}"]) _LT_PROG_FUNCTION_REPLACE([func_append_quoted], [dnl func_quote_for_eval "${2}" dnl m4 expansion turns \\\\ into \\, and then the shell eval turns that into \ eval "${1}+=\\\\ \\$func_quote_for_eval_result"]) # Save a `func_append' function call where possible by direct use of '+=' sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ && mv -f "$cfgfile.tmp" "$cfgfile" \ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") test 0 -eq $? || _lt_function_replace_fail=: else # Save a `func_append' function call even when '+=' is not available sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ && mv -f "$cfgfile.tmp" "$cfgfile" \ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") test 0 -eq $? || _lt_function_replace_fail=: fi if test x"$_lt_function_replace_fail" = x":"; then AC_MSG_WARN([Unable to substitute extended shell functions in $ofile]) fi ]) # _LT_PATH_CONVERSION_FUNCTIONS # ----------------------------- # Determine which file name conversion functions should be used by # func_to_host_file (and, implicitly, by func_to_host_path). These are needed # for certain cross-compile configurations and native mingw. m4_defun([_LT_PATH_CONVERSION_FUNCTIONS], [AC_REQUIRE([AC_CANONICAL_HOST])dnl AC_REQUIRE([AC_CANONICAL_BUILD])dnl AC_MSG_CHECKING([how to convert $build file names to $host format]) AC_CACHE_VAL(lt_cv_to_host_file_cmd, [case $host in *-*-mingw* ) case $build in *-*-mingw* ) # actually msys lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 ;; *-*-cygwin* ) lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 ;; * ) # otherwise, assume *nix lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 ;; esac ;; *-*-cygwin* ) case $build in *-*-mingw* ) # actually msys lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin ;; *-*-cygwin* ) lt_cv_to_host_file_cmd=func_convert_file_noop ;; * ) # otherwise, assume *nix lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin ;; esac ;; * ) # unhandled hosts (and "normal" native builds) lt_cv_to_host_file_cmd=func_convert_file_noop ;; esac ]) to_host_file_cmd=$lt_cv_to_host_file_cmd AC_MSG_RESULT([$lt_cv_to_host_file_cmd]) _LT_DECL([to_host_file_cmd], [lt_cv_to_host_file_cmd], [0], [convert $build file names to $host format])dnl AC_MSG_CHECKING([how to convert $build file names to toolchain format]) AC_CACHE_VAL(lt_cv_to_tool_file_cmd, [#assume ordinary cross tools, or native build. lt_cv_to_tool_file_cmd=func_convert_file_noop case $host in *-*-mingw* ) case $build in *-*-mingw* ) # actually msys lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 ;; esac ;; esac ]) to_tool_file_cmd=$lt_cv_to_tool_file_cmd AC_MSG_RESULT([$lt_cv_to_tool_file_cmd]) _LT_DECL([to_tool_file_cmd], [lt_cv_to_tool_file_cmd], [0], [convert $build files to toolchain format])dnl ])# _LT_PATH_CONVERSION_FUNCTIONS nordugrid-arc-6.14.0/m4/PaxHeaders.30264/nls.m40000644000000000000000000000013214152153401016707 xustar000000000000000030 mtime=1638455041.190687821 30 atime=1638455042.420706302 30 ctime=1638455095.717507111 nordugrid-arc-6.14.0/m4/nls.m40000644000175000002070000000226614152153401016702 0ustar00mockbuildmock00000000000000# nls.m4 serial 3 (gettext-0.15) dnl Copyright (C) 1995-2003, 2005-2006 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl dnl This file can can be used in projects which are not available under dnl the GNU General Public License or the GNU Library General Public dnl License but which still want to provide support for the GNU gettext dnl functionality. dnl Please note that the actual code of the GNU gettext library is covered dnl by the GNU Library General Public License, and the rest of the GNU dnl gettext package package is covered by the GNU General Public License. dnl They are *not* in the public domain. dnl Authors: dnl Ulrich Drepper , 1995-2000. dnl Bruno Haible , 2000-2003. AC_PREREQ(2.50) AC_DEFUN([AM_NLS], [ AC_MSG_CHECKING([whether NLS is requested]) dnl Default is enabled NLS AC_ARG_ENABLE(nls, [ --disable-nls do not use Native Language Support], USE_NLS=$enableval, USE_NLS=yes) AC_MSG_RESULT($USE_NLS) AC_SUBST(USE_NLS) ]) nordugrid-arc-6.14.0/m4/PaxHeaders.30264/iconv.m40000644000000000000000000000013214152153401017231 xustar000000000000000030 mtime=1638455041.072686048 30 atime=1638455042.432706483 30 ctime=1638455095.708506976 nordugrid-arc-6.14.0/m4/iconv.m40000644000175000002070000001375314152153401017227 0ustar00mockbuildmock00000000000000# iconv.m4 serial AM6 (gettext-0.17) dnl Copyright (C) 2000-2002, 2007 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl From Bruno Haible. AC_DEFUN([AM_ICONV_LINKFLAGS_BODY], [ dnl Prerequisites of AC_LIB_LINKFLAGS_BODY. AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) AC_REQUIRE([AC_LIB_RPATH]) dnl Search for libiconv and define LIBICONV, LTLIBICONV and INCICONV dnl accordingly. AC_LIB_LINKFLAGS_BODY([iconv]) ]) AC_DEFUN([AM_ICONV_LINK], [ dnl Some systems have iconv in libc, some have it in libiconv (OSF/1 and dnl those with the standalone portable GNU libiconv installed). AC_REQUIRE([AC_CANONICAL_HOST]) dnl for cross-compiles dnl Search for libiconv and define LIBICONV, LTLIBICONV and INCICONV dnl accordingly. AC_REQUIRE([AM_ICONV_LINKFLAGS_BODY]) dnl Add $INCICONV to CPPFLAGS before performing the following checks, dnl because if the user has installed libiconv and not disabled its use dnl via --without-libiconv-prefix, he wants to use it. The first dnl AC_TRY_LINK will then fail, the second AC_TRY_LINK will succeed. am_save_CPPFLAGS="$CPPFLAGS" AC_LIB_APPENDTOVAR([CPPFLAGS], [$INCICONV]) AC_CACHE_CHECK([for iconv], am_cv_func_iconv, [ am_cv_func_iconv="no, consider installing GNU libiconv" am_cv_lib_iconv=no AC_TRY_LINK([#include #include ], [iconv_t cd = iconv_open("",""); iconv(cd,NULL,NULL,NULL,NULL); iconv_close(cd);], am_cv_func_iconv=yes) if test "$am_cv_func_iconv" != yes; then am_save_LIBS="$LIBS" LIBS="$LIBS $LIBICONV" AC_TRY_LINK([#include #include ], [iconv_t cd = iconv_open("",""); iconv(cd,NULL,NULL,NULL,NULL); iconv_close(cd);], am_cv_lib_iconv=yes am_cv_func_iconv=yes) LIBS="$am_save_LIBS" fi ]) if test "$am_cv_func_iconv" = yes; then AC_CACHE_CHECK([for working iconv], am_cv_func_iconv_works, [ dnl This tests against bugs in AIX 5.1 and HP-UX 11.11. am_save_LIBS="$LIBS" if test $am_cv_lib_iconv = yes; then LIBS="$LIBS $LIBICONV" fi AC_TRY_RUN([ #include #include int main () { /* Test against AIX 5.1 bug: Failures are not distinguishable from successful returns. */ { iconv_t cd_utf8_to_88591 = iconv_open ("ISO8859-1", "UTF-8"); if (cd_utf8_to_88591 != (iconv_t)(-1)) { static const char input[] = "\342\202\254"; /* EURO SIGN */ char buf[10]; const char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_utf8_to_88591, (char **) &inptr, &inbytesleft, &outptr, &outbytesleft); if (res == 0) return 1; } } #if 0 /* This bug could be worked around by the caller. */ /* Test against HP-UX 11.11 bug: Positive return value instead of 0. */ { iconv_t cd_88591_to_utf8 = iconv_open ("utf8", "iso88591"); if (cd_88591_to_utf8 != (iconv_t)(-1)) { static const char input[] = "\304rger mit b\366sen B\374bchen ohne Augenma\337"; char buf[50]; const char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_88591_to_utf8, (char **) &inptr, &inbytesleft, &outptr, &outbytesleft); if ((int)res > 0) return 1; } } #endif /* Test against HP-UX 11.11 bug: No converter from EUC-JP to UTF-8 is provided. */ if (/* Try standardized names. */ iconv_open ("UTF-8", "EUC-JP") == (iconv_t)(-1) /* Try IRIX, OSF/1 names. */ && iconv_open ("UTF-8", "eucJP") == (iconv_t)(-1) /* Try AIX names. */ && iconv_open ("UTF-8", "IBM-eucJP") == (iconv_t)(-1) /* Try HP-UX names. */ && iconv_open ("utf8", "eucJP") == (iconv_t)(-1)) return 1; return 0; }], [am_cv_func_iconv_works=yes], [am_cv_func_iconv_works=no], [case "$host_os" in aix* | hpux*) am_cv_func_iconv_works="guessing no" ;; *) am_cv_func_iconv_works="guessing yes" ;; esac]) LIBS="$am_save_LIBS" ]) case "$am_cv_func_iconv_works" in *no) am_func_iconv=no am_cv_lib_iconv=no ;; *) am_func_iconv=yes ;; esac else am_func_iconv=no am_cv_lib_iconv=no fi if test "$am_func_iconv" = yes; then AC_DEFINE(HAVE_ICONV, 1, [Define if you have the iconv() function and it works.]) fi if test "$am_cv_lib_iconv" = yes; then AC_MSG_CHECKING([how to link with libiconv]) AC_MSG_RESULT([$LIBICONV]) else dnl If $LIBICONV didn't lead to a usable library, we don't need $INCICONV dnl either. CPPFLAGS="$am_save_CPPFLAGS" LIBICONV= LTLIBICONV= fi AC_SUBST(LIBICONV) AC_SUBST(LTLIBICONV) ]) AC_DEFUN([AM_ICONV], [ AM_ICONV_LINK if test "$am_cv_func_iconv" = yes; then AC_MSG_CHECKING([for iconv declaration]) AC_CACHE_VAL(am_cv_proto_iconv, [ AC_TRY_COMPILE([ #include #include extern #ifdef __cplusplus "C" #endif #if defined(__STDC__) || defined(__cplusplus) size_t iconv (iconv_t cd, char * *inbuf, size_t *inbytesleft, char * *outbuf, size_t *outbytesleft); #else size_t iconv(); #endif ], [], am_cv_proto_iconv_arg1="", am_cv_proto_iconv_arg1="const") am_cv_proto_iconv="extern size_t iconv (iconv_t cd, $am_cv_proto_iconv_arg1 char * *inbuf, size_t *inbytesleft, char * *outbuf, size_t *outbytesleft);"]) am_cv_proto_iconv=`echo "[$]am_cv_proto_iconv" | tr -s ' ' | sed -e 's/( /(/'` AC_MSG_RESULT([$]{ac_t:- }[$]am_cv_proto_iconv) AC_DEFINE_UNQUOTED(ICONV_CONST, $am_cv_proto_iconv_arg1, [Define as const if the declaration of iconv() needs const.]) fi ]) nordugrid-arc-6.14.0/m4/PaxHeaders.30264/po.m40000644000000000000000000000013214152153401016531 xustar000000000000000030 mtime=1638455041.198687941 30 atime=1638455042.418706273 30 ctime=1638455095.718507126 nordugrid-arc-6.14.0/m4/po.m40000644000175000002070000004460614152153401016530 0ustar00mockbuildmock00000000000000# po.m4 serial 15 (gettext-0.17) dnl Copyright (C) 1995-2007 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl dnl This file can can be used in projects which are not available under dnl the GNU General Public License or the GNU Library General Public dnl License but which still want to provide support for the GNU gettext dnl functionality. dnl Please note that the actual code of the GNU gettext library is covered dnl by the GNU Library General Public License, and the rest of the GNU dnl gettext package package is covered by the GNU General Public License. dnl They are *not* in the public domain. dnl Authors: dnl Ulrich Drepper , 1995-2000. dnl Bruno Haible , 2000-2003. AC_PREREQ(2.50) dnl Checks for all prerequisites of the po subdirectory. AC_DEFUN([AM_PO_SUBDIRS], [ AC_REQUIRE([AC_PROG_MAKE_SET])dnl AC_REQUIRE([AC_PROG_INSTALL])dnl AC_REQUIRE([AM_PROG_MKDIR_P])dnl defined by automake AC_REQUIRE([AM_NLS])dnl dnl Release version of the gettext macros. This is used to ensure that dnl the gettext macros and po/Makefile.in.in are in sync. AC_SUBST([GETTEXT_MACRO_VERSION], [0.17]) dnl Perform the following tests also if --disable-nls has been given, dnl because they are needed for "make dist" to work. dnl Search for GNU msgfmt in the PATH. dnl The first test excludes Solaris msgfmt and early GNU msgfmt versions. dnl The second test excludes FreeBSD msgfmt. AM_PATH_PROG_WITH_TEST(MSGFMT, msgfmt, [$ac_dir/$ac_word --statistics /dev/null >&]AS_MESSAGE_LOG_FD[ 2>&1 && (if $ac_dir/$ac_word --statistics /dev/null 2>&1 >/dev/null | grep usage >/dev/null; then exit 1; else exit 0; fi)], :) AC_PATH_PROG(GMSGFMT, gmsgfmt, $MSGFMT) dnl Test whether it is GNU msgfmt >= 0.15. changequote(,)dnl case `$MSGFMT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) MSGFMT_015=: ;; *) MSGFMT_015=$MSGFMT ;; esac changequote([,])dnl AC_SUBST([MSGFMT_015]) changequote(,)dnl case `$GMSGFMT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) GMSGFMT_015=: ;; *) GMSGFMT_015=$GMSGFMT ;; esac changequote([,])dnl AC_SUBST([GMSGFMT_015]) dnl Search for GNU xgettext 0.12 or newer in the PATH. dnl The first test excludes Solaris xgettext and early GNU xgettext versions. dnl The second test excludes FreeBSD xgettext. AM_PATH_PROG_WITH_TEST(XGETTEXT, xgettext, [$ac_dir/$ac_word --omit-header --copyright-holder= --msgid-bugs-address= /dev/null >&]AS_MESSAGE_LOG_FD[ 2>&1 && (if $ac_dir/$ac_word --omit-header --copyright-holder= --msgid-bugs-address= /dev/null 2>&1 >/dev/null | grep usage >/dev/null; then exit 1; else exit 0; fi)], :) dnl Remove leftover from FreeBSD xgettext call. rm -f messages.po dnl Test whether it is GNU xgettext >= 0.15. changequote(,)dnl case `$XGETTEXT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) XGETTEXT_015=: ;; *) XGETTEXT_015=$XGETTEXT ;; esac changequote([,])dnl AC_SUBST([XGETTEXT_015]) dnl Search for GNU msgmerge 0.11 or newer in the PATH. AM_PATH_PROG_WITH_TEST(MSGMERGE, msgmerge, [$ac_dir/$ac_word --update -q /dev/null /dev/null >&]AS_MESSAGE_LOG_FD[ 2>&1], :) dnl Installation directories. dnl Autoconf >= 2.60 defines localedir. For older versions of autoconf, we dnl have to define it here, so that it can be used in po/Makefile. test -n "$localedir" || localedir='${datadir}/locale' AC_SUBST([localedir]) dnl Support for AM_XGETTEXT_OPTION. test -n "${XGETTEXT_EXTRA_OPTIONS+set}" || XGETTEXT_EXTRA_OPTIONS= AC_SUBST([XGETTEXT_EXTRA_OPTIONS]) AC_CONFIG_COMMANDS([po-directories], [[ for ac_file in $CONFIG_FILES; do # Support "outfile[:infile[:infile...]]" case "$ac_file" in *:*) ac_file=`echo "$ac_file"|sed 's%:.*%%'` ;; esac # PO directories have a Makefile.in generated from Makefile.in.in. case "$ac_file" in */Makefile.in) # Adjust a relative srcdir. ac_dir=`echo "$ac_file"|sed 's%/[^/][^/]*$%%'` ac_dir_suffix="/`echo "$ac_dir"|sed 's%^\./%%'`" ac_dots=`echo "$ac_dir_suffix"|sed 's%/[^/]*%../%g'` # In autoconf-2.13 it is called $ac_given_srcdir. # In autoconf-2.50 it is called $srcdir. test -n "$ac_given_srcdir" || ac_given_srcdir="$srcdir" case "$ac_given_srcdir" in .) top_srcdir=`echo $ac_dots|sed 's%/$%%'` ;; /*) top_srcdir="$ac_given_srcdir" ;; *) top_srcdir="$ac_dots$ac_given_srcdir" ;; esac # Treat a directory as a PO directory if and only if it has a # POTFILES.in file. This allows packages to have multiple PO # directories under different names or in different locations. if test -f "$ac_given_srcdir/$ac_dir/POTFILES.in"; then rm -f "$ac_dir/POTFILES" test -n "$as_me" && echo "$as_me: creating $ac_dir/POTFILES" || echo "creating $ac_dir/POTFILES" cat "$ac_given_srcdir/$ac_dir/POTFILES.in" | sed -e "/^#/d" -e "/^[ ]*\$/d" -e "s,.*, $top_srcdir/& \\\\," | sed -e "\$s/\(.*\) \\\\/\1/" > "$ac_dir/POTFILES" POMAKEFILEDEPS="POTFILES.in" # ALL_LINGUAS, POFILES, UPDATEPOFILES, DUMMYPOFILES, GMOFILES depend # on $ac_dir but don't depend on user-specified configuration # parameters. if test -f "$ac_given_srcdir/$ac_dir/LINGUAS"; then # The LINGUAS file contains the set of available languages. if test -n "$OBSOLETE_ALL_LINGUAS"; then test -n "$as_me" && echo "$as_me: setting ALL_LINGUAS in configure.in is obsolete" || echo "setting ALL_LINGUAS in configure.in is obsolete" fi ALL_LINGUAS_=`sed -e "/^#/d" -e "s/#.*//" "$ac_given_srcdir/$ac_dir/LINGUAS"` # Hide the ALL_LINGUAS assigment from automake < 1.5. eval 'ALL_LINGUAS''=$ALL_LINGUAS_' POMAKEFILEDEPS="$POMAKEFILEDEPS LINGUAS" else # The set of available languages was given in configure.in. # Hide the ALL_LINGUAS assigment from automake < 1.5. eval 'ALL_LINGUAS''=$OBSOLETE_ALL_LINGUAS' fi # Compute POFILES # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(lang).po) # Compute UPDATEPOFILES # as $(foreach lang, $(ALL_LINGUAS), $(lang).po-update) # Compute DUMMYPOFILES # as $(foreach lang, $(ALL_LINGUAS), $(lang).nop) # Compute GMOFILES # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(lang).gmo) case "$ac_given_srcdir" in .) srcdirpre= ;; *) srcdirpre='$(srcdir)/' ;; esac POFILES= UPDATEPOFILES= DUMMYPOFILES= GMOFILES= for lang in $ALL_LINGUAS; do POFILES="$POFILES $srcdirpre$lang.po" UPDATEPOFILES="$UPDATEPOFILES $lang.po-update" DUMMYPOFILES="$DUMMYPOFILES $lang.nop" GMOFILES="$GMOFILES $srcdirpre$lang.gmo" done # CATALOGS depends on both $ac_dir and the user's LINGUAS # environment variable. INST_LINGUAS= if test -n "$ALL_LINGUAS"; then for presentlang in $ALL_LINGUAS; do useit=no if test "%UNSET%" != "$LINGUAS"; then desiredlanguages="$LINGUAS" else desiredlanguages="$ALL_LINGUAS" fi for desiredlang in $desiredlanguages; do # Use the presentlang catalog if desiredlang is # a. equal to presentlang, or # b. a variant of presentlang (because in this case, # presentlang can be used as a fallback for messages # which are not translated in the desiredlang catalog). case "$desiredlang" in "$presentlang"*) useit=yes;; esac done if test $useit = yes; then INST_LINGUAS="$INST_LINGUAS $presentlang" fi done fi CATALOGS= if test -n "$INST_LINGUAS"; then for lang in $INST_LINGUAS; do CATALOGS="$CATALOGS $lang.gmo" done fi test -n "$as_me" && echo "$as_me: creating $ac_dir/Makefile" || echo "creating $ac_dir/Makefile" sed -e "/^POTFILES =/r $ac_dir/POTFILES" -e "/^# Makevars/r $ac_given_srcdir/$ac_dir/Makevars" -e "s|@POFILES@|$POFILES|g" -e "s|@UPDATEPOFILES@|$UPDATEPOFILES|g" -e "s|@DUMMYPOFILES@|$DUMMYPOFILES|g" -e "s|@GMOFILES@|$GMOFILES|g" -e "s|@CATALOGS@|$CATALOGS|g" -e "s|@POMAKEFILEDEPS@|$POMAKEFILEDEPS|g" "$ac_dir/Makefile.in" > "$ac_dir/Makefile" for f in "$ac_given_srcdir/$ac_dir"/Rules-*; do if test -f "$f"; then case "$f" in *.orig | *.bak | *~) ;; *) cat "$f" >> "$ac_dir/Makefile" ;; esac fi done fi ;; esac done]], [# Capture the value of obsolete ALL_LINGUAS because we need it to compute # POFILES, UPDATEPOFILES, DUMMYPOFILES, GMOFILES, CATALOGS. But hide it # from automake < 1.5. eval 'OBSOLETE_ALL_LINGUAS''="$ALL_LINGUAS"' # Capture the value of LINGUAS because we need it to compute CATALOGS. LINGUAS="${LINGUAS-%UNSET%}" ]) ]) dnl Postprocesses a Makefile in a directory containing PO files. AC_DEFUN([AM_POSTPROCESS_PO_MAKEFILE], [ # When this code is run, in config.status, two variables have already been # set: # - OBSOLETE_ALL_LINGUAS is the value of LINGUAS set in configure.in, # - LINGUAS is the value of the environment variable LINGUAS at configure # time. changequote(,)dnl # Adjust a relative srcdir. ac_dir=`echo "$ac_file"|sed 's%/[^/][^/]*$%%'` ac_dir_suffix="/`echo "$ac_dir"|sed 's%^\./%%'`" ac_dots=`echo "$ac_dir_suffix"|sed 's%/[^/]*%../%g'` # In autoconf-2.13 it is called $ac_given_srcdir. # In autoconf-2.50 it is called $srcdir. test -n "$ac_given_srcdir" || ac_given_srcdir="$srcdir" case "$ac_given_srcdir" in .) top_srcdir=`echo $ac_dots|sed 's%/$%%'` ;; /*) top_srcdir="$ac_given_srcdir" ;; *) top_srcdir="$ac_dots$ac_given_srcdir" ;; esac # Find a way to echo strings without interpreting backslash. if test "X`(echo '\t') 2>/dev/null`" = 'X\t'; then gt_echo='echo' else if test "X`(printf '%s\n' '\t') 2>/dev/null`" = 'X\t'; then gt_echo='printf %s\n' else echo_func () { cat < "$ac_file.tmp" if grep -l '@TCLCATALOGS@' "$ac_file" > /dev/null; then # Add dependencies that cannot be formulated as a simple suffix rule. for lang in $ALL_LINGUAS; do frobbedlang=`echo $lang | sed -e 's/\..*$//' -e 'y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/'` cat >> "$ac_file.tmp" < /dev/null; then # Add dependencies that cannot be formulated as a simple suffix rule. for lang in $ALL_LINGUAS; do frobbedlang=`echo $lang | sed -e 's/_/-/g' -e 's/^sr-CS/sr-SP/' -e 's/@latin$/-Latn/' -e 's/@cyrillic$/-Cyrl/' -e 's/^sr-SP$/sr-SP-Latn/' -e 's/^uz-UZ$/uz-UZ-Latn/'` cat >> "$ac_file.tmp" <> "$ac_file.tmp" <. # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. # Originally written by Alexandre Oliva . case $1 in '') echo "$0: No command. Try '$0 --help' for more information." 1>&2 exit 1; ;; -h | --h*) cat <<\EOF Usage: depcomp [--help] [--version] PROGRAM [ARGS] Run PROGRAMS ARGS to compile a file, generating dependencies as side-effects. Environment variables: depmode Dependency tracking mode. source Source file read by 'PROGRAMS ARGS'. object Object file output by 'PROGRAMS ARGS'. DEPDIR directory where to store dependencies. depfile Dependency file to output. tmpdepfile Temporary file to use when outputting dependencies. libtool Whether libtool is used (yes/no). Report bugs to . EOF exit $? ;; -v | --v*) echo "depcomp $scriptversion" exit $? ;; esac # Get the directory component of the given path, and save it in the # global variables '$dir'. Note that this directory component will # be either empty or ending with a '/' character. This is deliberate. set_dir_from () { case $1 in */*) dir=`echo "$1" | sed -e 's|/[^/]*$|/|'`;; *) dir=;; esac } # Get the suffix-stripped basename of the given path, and save it the # global variable '$base'. set_base_from () { base=`echo "$1" | sed -e 's|^.*/||' -e 's/\.[^.]*$//'` } # If no dependency file was actually created by the compiler invocation, # we still have to create a dummy depfile, to avoid errors with the # Makefile "include basename.Plo" scheme. make_dummy_depfile () { echo "#dummy" > "$depfile" } # Factor out some common post-processing of the generated depfile. # Requires the auxiliary global variable '$tmpdepfile' to be set. aix_post_process_depfile () { # If the compiler actually managed to produce a dependency file, # post-process it. if test -f "$tmpdepfile"; then # Each line is of the form 'foo.o: dependency.h'. # Do two passes, one to just change these to # $object: dependency.h # and one to simply output # dependency.h: # which is needed to avoid the deleted-header problem. { sed -e "s,^.*\.[$lower]*:,$object:," < "$tmpdepfile" sed -e "s,^.*\.[$lower]*:[$tab ]*,," -e 's,$,:,' < "$tmpdepfile" } > "$depfile" rm -f "$tmpdepfile" else make_dummy_depfile fi } # A tabulation character. tab=' ' # A newline character. nl=' ' # Character ranges might be problematic outside the C locale. # These definitions help. upper=ABCDEFGHIJKLMNOPQRSTUVWXYZ lower=abcdefghijklmnopqrstuvwxyz digits=0123456789 alpha=${upper}${lower} if test -z "$depmode" || test -z "$source" || test -z "$object"; then echo "depcomp: Variables source, object and depmode must be set" 1>&2 exit 1 fi # Dependencies for sub/bar.o or sub/bar.obj go into sub/.deps/bar.Po. depfile=${depfile-`echo "$object" | sed 's|[^\\/]*$|'${DEPDIR-.deps}'/&|;s|\.\([^.]*\)$|.P\1|;s|Pobj$|Po|'`} tmpdepfile=${tmpdepfile-`echo "$depfile" | sed 's/\.\([^.]*\)$/.T\1/'`} rm -f "$tmpdepfile" # Avoid interferences from the environment. gccflag= dashmflag= # Some modes work just like other modes, but use different flags. We # parameterize here, but still list the modes in the big case below, # to make depend.m4 easier to write. Note that we *cannot* use a case # here, because this file can only contain one case statement. if test "$depmode" = hp; then # HP compiler uses -M and no extra arg. gccflag=-M depmode=gcc fi if test "$depmode" = dashXmstdout; then # This is just like dashmstdout with a different argument. dashmflag=-xM depmode=dashmstdout fi cygpath_u="cygpath -u -f -" if test "$depmode" = msvcmsys; then # This is just like msvisualcpp but w/o cygpath translation. # Just convert the backslash-escaped backslashes to single forward # slashes to satisfy depend.m4 cygpath_u='sed s,\\\\,/,g' depmode=msvisualcpp fi if test "$depmode" = msvc7msys; then # This is just like msvc7 but w/o cygpath translation. # Just convert the backslash-escaped backslashes to single forward # slashes to satisfy depend.m4 cygpath_u='sed s,\\\\,/,g' depmode=msvc7 fi if test "$depmode" = xlc; then # IBM C/C++ Compilers xlc/xlC can output gcc-like dependency information. gccflag=-qmakedep=gcc,-MF depmode=gcc fi case "$depmode" in gcc3) ## gcc 3 implements dependency tracking that does exactly what ## we want. Yay! Note: for some reason libtool 1.4 doesn't like ## it if -MD -MP comes after the -MF stuff. Hmm. ## Unfortunately, FreeBSD c89 acceptance of flags depends upon ## the command line argument order; so add the flags where they ## appear in depend2.am. Note that the slowdown incurred here ## affects only configure: in makefiles, %FASTDEP% shortcuts this. for arg do case $arg in -c) set fnord "$@" -MT "$object" -MD -MP -MF "$tmpdepfile" "$arg" ;; *) set fnord "$@" "$arg" ;; esac shift # fnord shift # $arg done "$@" stat=$? if test $stat -ne 0; then rm -f "$tmpdepfile" exit $stat fi mv "$tmpdepfile" "$depfile" ;; gcc) ## Note that this doesn't just cater to obsosete pre-3.x GCC compilers. ## but also to in-use compilers like IMB xlc/xlC and the HP C compiler. ## (see the conditional assignment to $gccflag above). ## There are various ways to get dependency output from gcc. Here's ## why we pick this rather obscure method: ## - Don't want to use -MD because we'd like the dependencies to end ## up in a subdir. Having to rename by hand is ugly. ## (We might end up doing this anyway to support other compilers.) ## - The DEPENDENCIES_OUTPUT environment variable makes gcc act like ## -MM, not -M (despite what the docs say). Also, it might not be ## supported by the other compilers which use the 'gcc' depmode. ## - Using -M directly means running the compiler twice (even worse ## than renaming). if test -z "$gccflag"; then gccflag=-MD, fi "$@" -Wp,"$gccflag$tmpdepfile" stat=$? if test $stat -ne 0; then rm -f "$tmpdepfile" exit $stat fi rm -f "$depfile" echo "$object : \\" > "$depfile" # The second -e expression handles DOS-style file names with drive # letters. sed -e 's/^[^:]*: / /' \ -e 's/^['$alpha']:\/[^:]*: / /' < "$tmpdepfile" >> "$depfile" ## This next piece of magic avoids the "deleted header file" problem. ## The problem is that when a header file which appears in a .P file ## is deleted, the dependency causes make to die (because there is ## typically no way to rebuild the header). We avoid this by adding ## dummy dependencies for each header file. Too bad gcc doesn't do ## this for us directly. ## Some versions of gcc put a space before the ':'. On the theory ## that the space means something, we add a space to the output as ## well. hp depmode also adds that space, but also prefixes the VPATH ## to the object. Take care to not repeat it in the output. ## Some versions of the HPUX 10.20 sed can't process this invocation ## correctly. Breaking it into two sed invocations is a workaround. tr ' ' "$nl" < "$tmpdepfile" \ | sed -e 's/^\\$//' -e '/^$/d' -e "s|.*$object$||" -e '/:$/d' \ | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; hp) # This case exists only to let depend.m4 do its work. It works by # looking at the text of this script. This case will never be run, # since it is checked for above. exit 1 ;; sgi) if test "$libtool" = yes; then "$@" "-Wp,-MDupdate,$tmpdepfile" else "$@" -MDupdate "$tmpdepfile" fi stat=$? if test $stat -ne 0; then rm -f "$tmpdepfile" exit $stat fi rm -f "$depfile" if test -f "$tmpdepfile"; then # yes, the sourcefile depend on other files echo "$object : \\" > "$depfile" # Clip off the initial element (the dependent). Don't try to be # clever and replace this with sed code, as IRIX sed won't handle # lines with more than a fixed number of characters (4096 in # IRIX 6.2 sed, 8192 in IRIX 6.5). We also remove comment lines; # the IRIX cc adds comments like '#:fec' to the end of the # dependency line. tr ' ' "$nl" < "$tmpdepfile" \ | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' \ | tr "$nl" ' ' >> "$depfile" echo >> "$depfile" # The second pass generates a dummy entry for each header file. tr ' ' "$nl" < "$tmpdepfile" \ | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \ >> "$depfile" else make_dummy_depfile fi rm -f "$tmpdepfile" ;; xlc) # This case exists only to let depend.m4 do its work. It works by # looking at the text of this script. This case will never be run, # since it is checked for above. exit 1 ;; aix) # The C for AIX Compiler uses -M and outputs the dependencies # in a .u file. In older versions, this file always lives in the # current directory. Also, the AIX compiler puts '$object:' at the # start of each line; $object doesn't have directory information. # Version 6 uses the directory in both cases. set_dir_from "$object" set_base_from "$object" if test "$libtool" = yes; then tmpdepfile1=$dir$base.u tmpdepfile2=$base.u tmpdepfile3=$dir.libs/$base.u "$@" -Wc,-M else tmpdepfile1=$dir$base.u tmpdepfile2=$dir$base.u tmpdepfile3=$dir$base.u "$@" -M fi stat=$? if test $stat -ne 0; then rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" exit $stat fi for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" do test -f "$tmpdepfile" && break done aix_post_process_depfile ;; tcc) # tcc (Tiny C Compiler) understand '-MD -MF file' since version 0.9.26 # FIXME: That version still under development at the moment of writing. # Make that this statement remains true also for stable, released # versions. # It will wrap lines (doesn't matter whether long or short) with a # trailing '\', as in: # # foo.o : \ # foo.c \ # foo.h \ # # It will put a trailing '\' even on the last line, and will use leading # spaces rather than leading tabs (at least since its commit 0394caf7 # "Emit spaces for -MD"). "$@" -MD -MF "$tmpdepfile" stat=$? if test $stat -ne 0; then rm -f "$tmpdepfile" exit $stat fi rm -f "$depfile" # Each non-empty line is of the form 'foo.o : \' or ' dep.h \'. # We have to change lines of the first kind to '$object: \'. sed -e "s|.*:|$object :|" < "$tmpdepfile" > "$depfile" # And for each line of the second kind, we have to emit a 'dep.h:' # dummy dependency, to avoid the deleted-header problem. sed -n -e 's|^ *\(.*\) *\\$|\1:|p' < "$tmpdepfile" >> "$depfile" rm -f "$tmpdepfile" ;; ## The order of this option in the case statement is important, since the ## shell code in configure will try each of these formats in the order ## listed in this file. A plain '-MD' option would be understood by many ## compilers, so we must ensure this comes after the gcc and icc options. pgcc) # Portland's C compiler understands '-MD'. # Will always output deps to 'file.d' where file is the root name of the # source file under compilation, even if file resides in a subdirectory. # The object file name does not affect the name of the '.d' file. # pgcc 10.2 will output # foo.o: sub/foo.c sub/foo.h # and will wrap long lines using '\' : # foo.o: sub/foo.c ... \ # sub/foo.h ... \ # ... set_dir_from "$object" # Use the source, not the object, to determine the base name, since # that's sadly what pgcc will do too. set_base_from "$source" tmpdepfile=$base.d # For projects that build the same source file twice into different object # files, the pgcc approach of using the *source* file root name can cause # problems in parallel builds. Use a locking strategy to avoid stomping on # the same $tmpdepfile. lockdir=$base.d-lock trap " echo '$0: caught signal, cleaning up...' >&2 rmdir '$lockdir' exit 1 " 1 2 13 15 numtries=100 i=$numtries while test $i -gt 0; do # mkdir is a portable test-and-set. if mkdir "$lockdir" 2>/dev/null; then # This process acquired the lock. "$@" -MD stat=$? # Release the lock. rmdir "$lockdir" break else # If the lock is being held by a different process, wait # until the winning process is done or we timeout. while test -d "$lockdir" && test $i -gt 0; do sleep 1 i=`expr $i - 1` done fi i=`expr $i - 1` done trap - 1 2 13 15 if test $i -le 0; then echo "$0: failed to acquire lock after $numtries attempts" >&2 echo "$0: check lockdir '$lockdir'" >&2 exit 1 fi if test $stat -ne 0; then rm -f "$tmpdepfile" exit $stat fi rm -f "$depfile" # Each line is of the form `foo.o: dependent.h', # or `foo.o: dep1.h dep2.h \', or ` dep3.h dep4.h \'. # Do two passes, one to just change these to # `$object: dependent.h' and one to simply `dependent.h:'. sed "s,^[^:]*:,$object :," < "$tmpdepfile" > "$depfile" # Some versions of the HPUX 10.20 sed can't process this invocation # correctly. Breaking it into two sed invocations is a workaround. sed 's,^[^:]*: \(.*\)$,\1,;s/^\\$//;/^$/d;/:$/d' < "$tmpdepfile" \ | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; hp2) # The "hp" stanza above does not work with aCC (C++) and HP's ia64 # compilers, which have integrated preprocessors. The correct option # to use with these is +Maked; it writes dependencies to a file named # 'foo.d', which lands next to the object file, wherever that # happens to be. # Much of this is similar to the tru64 case; see comments there. set_dir_from "$object" set_base_from "$object" if test "$libtool" = yes; then tmpdepfile1=$dir$base.d tmpdepfile2=$dir.libs/$base.d "$@" -Wc,+Maked else tmpdepfile1=$dir$base.d tmpdepfile2=$dir$base.d "$@" +Maked fi stat=$? if test $stat -ne 0; then rm -f "$tmpdepfile1" "$tmpdepfile2" exit $stat fi for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" do test -f "$tmpdepfile" && break done if test -f "$tmpdepfile"; then sed -e "s,^.*\.[$lower]*:,$object:," "$tmpdepfile" > "$depfile" # Add 'dependent.h:' lines. sed -ne '2,${ s/^ *// s/ \\*$// s/$/:/ p }' "$tmpdepfile" >> "$depfile" else make_dummy_depfile fi rm -f "$tmpdepfile" "$tmpdepfile2" ;; tru64) # The Tru64 compiler uses -MD to generate dependencies as a side # effect. 'cc -MD -o foo.o ...' puts the dependencies into 'foo.o.d'. # At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put # dependencies in 'foo.d' instead, so we check for that too. # Subdirectories are respected. set_dir_from "$object" set_base_from "$object" if test "$libtool" = yes; then # Libtool generates 2 separate objects for the 2 libraries. These # two compilations output dependencies in $dir.libs/$base.o.d and # in $dir$base.o.d. We have to check for both files, because # one of the two compilations can be disabled. We should prefer # $dir$base.o.d over $dir.libs/$base.o.d because the latter is # automatically cleaned when .libs/ is deleted, while ignoring # the former would cause a distcleancheck panic. tmpdepfile1=$dir$base.o.d # libtool 1.5 tmpdepfile2=$dir.libs/$base.o.d # Likewise. tmpdepfile3=$dir.libs/$base.d # Compaq CCC V6.2-504 "$@" -Wc,-MD else tmpdepfile1=$dir$base.d tmpdepfile2=$dir$base.d tmpdepfile3=$dir$base.d "$@" -MD fi stat=$? if test $stat -ne 0; then rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" exit $stat fi for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" do test -f "$tmpdepfile" && break done # Same post-processing that is required for AIX mode. aix_post_process_depfile ;; msvc7) if test "$libtool" = yes; then showIncludes=-Wc,-showIncludes else showIncludes=-showIncludes fi "$@" $showIncludes > "$tmpdepfile" stat=$? grep -v '^Note: including file: ' "$tmpdepfile" if test $stat -ne 0; then rm -f "$tmpdepfile" exit $stat fi rm -f "$depfile" echo "$object : \\" > "$depfile" # The first sed program below extracts the file names and escapes # backslashes for cygpath. The second sed program outputs the file # name when reading, but also accumulates all include files in the # hold buffer in order to output them again at the end. This only # works with sed implementations that can handle large buffers. sed < "$tmpdepfile" -n ' /^Note: including file: *\(.*\)/ { s//\1/ s/\\/\\\\/g p }' | $cygpath_u | sort -u | sed -n ' s/ /\\ /g s/\(.*\)/'"$tab"'\1 \\/p s/.\(.*\) \\/\1:/ H $ { s/.*/'"$tab"'/ G p }' >> "$depfile" echo >> "$depfile" # make sure the fragment doesn't end with a backslash rm -f "$tmpdepfile" ;; msvc7msys) # This case exists only to let depend.m4 do its work. It works by # looking at the text of this script. This case will never be run, # since it is checked for above. exit 1 ;; #nosideeffect) # This comment above is used by automake to tell side-effect # dependency tracking mechanisms from slower ones. dashmstdout) # Important note: in order to support this mode, a compiler *must* # always write the preprocessed file to stdout, regardless of -o. "$@" || exit $? # Remove the call to Libtool. if test "$libtool" = yes; then while test "X$1" != 'X--mode=compile'; do shift done shift fi # Remove '-o $object'. IFS=" " for arg do case $arg in -o) shift ;; $object) shift ;; *) set fnord "$@" "$arg" shift # fnord shift # $arg ;; esac done test -z "$dashmflag" && dashmflag=-M # Require at least two characters before searching for ':' # in the target name. This is to cope with DOS-style filenames: # a dependency such as 'c:/foo/bar' could be seen as target 'c' otherwise. "$@" $dashmflag | sed "s|^[$tab ]*[^:$tab ][^:][^:]*:[$tab ]*|$object: |" > "$tmpdepfile" rm -f "$depfile" cat < "$tmpdepfile" > "$depfile" # Some versions of the HPUX 10.20 sed can't process this sed invocation # correctly. Breaking it into two sed invocations is a workaround. tr ' ' "$nl" < "$tmpdepfile" \ | sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \ | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; dashXmstdout) # This case only exists to satisfy depend.m4. It is never actually # run, as this mode is specially recognized in the preamble. exit 1 ;; makedepend) "$@" || exit $? # Remove any Libtool call if test "$libtool" = yes; then while test "X$1" != 'X--mode=compile'; do shift done shift fi # X makedepend shift cleared=no eat=no for arg do case $cleared in no) set ""; shift cleared=yes ;; esac if test $eat = yes; then eat=no continue fi case "$arg" in -D*|-I*) set fnord "$@" "$arg"; shift ;; # Strip any option that makedepend may not understand. Remove # the object too, otherwise makedepend will parse it as a source file. -arch) eat=yes ;; -*|$object) ;; *) set fnord "$@" "$arg"; shift ;; esac done obj_suffix=`echo "$object" | sed 's/^.*\././'` touch "$tmpdepfile" ${MAKEDEPEND-makedepend} -o"$obj_suffix" -f"$tmpdepfile" "$@" rm -f "$depfile" # makedepend may prepend the VPATH from the source file name to the object. # No need to regex-escape $object, excess matching of '.' is harmless. sed "s|^.*\($object *:\)|\1|" "$tmpdepfile" > "$depfile" # Some versions of the HPUX 10.20 sed can't process the last invocation # correctly. Breaking it into two sed invocations is a workaround. sed '1,2d' "$tmpdepfile" \ | tr ' ' "$nl" \ | sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \ | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" "$tmpdepfile".bak ;; cpp) # Important note: in order to support this mode, a compiler *must* # always write the preprocessed file to stdout. "$@" || exit $? # Remove the call to Libtool. if test "$libtool" = yes; then while test "X$1" != 'X--mode=compile'; do shift done shift fi # Remove '-o $object'. IFS=" " for arg do case $arg in -o) shift ;; $object) shift ;; *) set fnord "$@" "$arg" shift # fnord shift # $arg ;; esac done "$@" -E \ | sed -n -e '/^# [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \ -e '/^#line [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \ | sed '$ s: \\$::' > "$tmpdepfile" rm -f "$depfile" echo "$object : \\" > "$depfile" cat < "$tmpdepfile" >> "$depfile" sed < "$tmpdepfile" '/^$/d;s/^ //;s/ \\$//;s/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; msvisualcpp) # Important note: in order to support this mode, a compiler *must* # always write the preprocessed file to stdout. "$@" || exit $? # Remove the call to Libtool. if test "$libtool" = yes; then while test "X$1" != 'X--mode=compile'; do shift done shift fi IFS=" " for arg do case "$arg" in -o) shift ;; $object) shift ;; "-Gm"|"/Gm"|"-Gi"|"/Gi"|"-ZI"|"/ZI") set fnord "$@" shift shift ;; *) set fnord "$@" "$arg" shift shift ;; esac done "$@" -E 2>/dev/null | sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::\1:p' | $cygpath_u | sort -u > "$tmpdepfile" rm -f "$depfile" echo "$object : \\" > "$depfile" sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::'"$tab"'\1 \\:p' >> "$depfile" echo "$tab" >> "$depfile" sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::\1\::p' >> "$depfile" rm -f "$tmpdepfile" ;; msvcmsys) # This case exists only to let depend.m4 do its work. It works by # looking at the text of this script. This case will never be run, # since it is checked for above. exit 1 ;; none) exec "$@" ;; *) echo "Unknown depmode $depmode" 1>&2 exit 1 ;; esac exit 0 # Local Variables: # mode: shell-script # sh-indentation: 2 # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" # time-stamp-time-zone: "UTC" # time-stamp-end: "; # UTC" # End: nordugrid-arc-6.14.0/PaxHeaders.30264/config.sub0000644000000000000000000000013214152153420017312 xustar000000000000000030 mtime=1638455056.857923228 30 atime=1638455073.230169229 30 ctime=1638455095.732507337 nordugrid-arc-6.14.0/config.sub0000755000175000002070000010531514152153420017307 0ustar00mockbuildmock00000000000000#! /bin/sh # Configuration validation subroutine script. # Copyright 1992-2013 Free Software Foundation, Inc. timestamp='2013-04-24' # This file is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see . # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that # program. This Exception is an additional permission under section 7 # of the GNU General Public License, version 3 ("GPLv3"). # Please send patches with a ChangeLog entry to config-patches@gnu.org. # # Configuration subroutine to validate and canonicalize a configuration type. # Supply the specified configuration type as an argument. # If it is invalid, we print an error message on stderr and exit with code 1. # Otherwise, we print the canonical config type on stdout and succeed. # You can get the latest version of this script from: # http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD # This file is supposed to be the same for all GNU packages # and recognize all the CPU types, system types and aliases # that are meaningful with *any* GNU software. # Each package is responsible for reporting which valid configurations # it does not support. The user should be able to distinguish # a failure to support a valid configuration from a meaningless # configuration. # The goal of this file is to map all the various variations of a given # machine specification into a single specification in the form: # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM # or in some cases, the newer four-part form: # CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM # It is wrong to echo any other type of specification. me=`echo "$0" | sed -e 's,.*/,,'` usage="\ Usage: $0 [OPTION] CPU-MFR-OPSYS $0 [OPTION] ALIAS Canonicalize a configuration name. Operation modes: -h, --help print this help, then exit -t, --time-stamp print date of last modification, then exit -v, --version print version number, then exit Report bugs and patches to ." version="\ GNU config.sub ($timestamp) Copyright 1992-2013 Free Software Foundation, Inc. This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." help=" Try \`$me --help' for more information." # Parse command line while test $# -gt 0 ; do case $1 in --time-stamp | --time* | -t ) echo "$timestamp" ; exit ;; --version | -v ) echo "$version" ; exit ;; --help | --h* | -h ) echo "$usage"; exit ;; -- ) # Stop option processing shift; break ;; - ) # Use stdin as input. break ;; -* ) echo "$me: invalid option $1$help" exit 1 ;; *local*) # First pass through any local machine types. echo $1 exit ;; * ) break ;; esac done case $# in 0) echo "$me: missing argument$help" >&2 exit 1;; 1) ;; *) echo "$me: too many arguments$help" >&2 exit 1;; esac # Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any). # Here we must recognize all the valid KERNEL-OS combinations. maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'` case $maybe_os in nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc | linux-newlib* | \ linux-musl* | linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \ knetbsd*-gnu* | netbsd*-gnu* | \ kopensolaris*-gnu* | \ storm-chaos* | os2-emx* | rtmk-nova*) os=-$maybe_os basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'` ;; android-linux) os=-linux-android basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`-unknown ;; *) basic_machine=`echo $1 | sed 's/-[^-]*$//'` if [ $basic_machine != $1 ] then os=`echo $1 | sed 's/.*-/-/'` else os=; fi ;; esac ### Let's recognize common machines as not being operating systems so ### that things like config.sub decstation-3100 work. We also ### recognize some manufacturers as not being operating systems, so we ### can provide default operating systems below. case $os in -sun*os*) # Prevent following clause from handling this invalid input. ;; -dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \ -att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \ -unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \ -convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\ -c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \ -harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \ -apple | -axis | -knuth | -cray | -microblaze*) os= basic_machine=$1 ;; -bluegene*) os=-cnk ;; -sim | -cisco | -oki | -wec | -winbond) os= basic_machine=$1 ;; -scout) ;; -wrs) os=-vxworks basic_machine=$1 ;; -chorusos*) os=-chorusos basic_machine=$1 ;; -chorusrdb) os=-chorusrdb basic_machine=$1 ;; -hiux*) os=-hiuxwe2 ;; -sco6) os=-sco5v6 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco5) os=-sco3.2v5 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco4) os=-sco3.2v4 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco3.2.[4-9]*) os=`echo $os | sed -e 's/sco3.2./sco3.2v/'` basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco3.2v[4-9]*) # Don't forget version if it is 3.2v4 or newer. basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco5v6*) # Don't forget version if it is 3.2v4 or newer. basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco*) os=-sco3.2v2 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -udk*) basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -isc) os=-isc2.2 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -clix*) basic_machine=clipper-intergraph ;; -isc*) basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -lynx*178) os=-lynxos178 ;; -lynx*5) os=-lynxos5 ;; -lynx*) os=-lynxos ;; -ptx*) basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'` ;; -windowsnt*) os=`echo $os | sed -e 's/windowsnt/winnt/'` ;; -psos*) os=-psos ;; -mint | -mint[0-9]*) basic_machine=m68k-atari os=-mint ;; esac # Decode aliases for certain CPU-COMPANY combinations. case $basic_machine in # Recognize the basic CPU types without company name. # Some are omitted here because they have special meanings below. 1750a | 580 \ | a29k \ | aarch64 | aarch64_be \ | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \ | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \ | am33_2.0 \ | arc | arceb \ | arm | arm[bl]e | arme[lb] | armv[2-8] | armv[3-8][lb] | armv7[arm] \ | avr | avr32 \ | be32 | be64 \ | bfin \ | c4x | clipper \ | d10v | d30v | dlx | dsp16xx \ | epiphany \ | fido | fr30 | frv \ | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \ | hexagon \ | i370 | i860 | i960 | ia64 \ | ip2k | iq2000 \ | le32 | le64 \ | lm32 \ | m32c | m32r | m32rle | m68000 | m68k | m88k \ | maxq | mb | microblaze | microblazeel | mcore | mep | metag \ | mips | mipsbe | mipseb | mipsel | mipsle \ | mips16 \ | mips64 | mips64el \ | mips64octeon | mips64octeonel \ | mips64orion | mips64orionel \ | mips64r5900 | mips64r5900el \ | mips64vr | mips64vrel \ | mips64vr4100 | mips64vr4100el \ | mips64vr4300 | mips64vr4300el \ | mips64vr5000 | mips64vr5000el \ | mips64vr5900 | mips64vr5900el \ | mipsisa32 | mipsisa32el \ | mipsisa32r2 | mipsisa32r2el \ | mipsisa64 | mipsisa64el \ | mipsisa64r2 | mipsisa64r2el \ | mipsisa64sb1 | mipsisa64sb1el \ | mipsisa64sr71k | mipsisa64sr71kel \ | mipsr5900 | mipsr5900el \ | mipstx39 | mipstx39el \ | mn10200 | mn10300 \ | moxie \ | mt \ | msp430 \ | nds32 | nds32le | nds32be \ | nios | nios2 | nios2eb | nios2el \ | ns16k | ns32k \ | open8 \ | or1k | or32 \ | pdp10 | pdp11 | pj | pjl \ | powerpc | powerpc64 | powerpc64le | powerpcle \ | pyramid \ | rl78 | rx \ | score \ | sh | sh[1234] | sh[24]a | sh[24]aeb | sh[23]e | sh[34]eb | sheb | shbe | shle | sh[1234]le | sh3ele \ | sh64 | sh64le \ | sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \ | sparcv8 | sparcv9 | sparcv9b | sparcv9v \ | spu \ | tahoe | tic4x | tic54x | tic55x | tic6x | tic80 | tron \ | ubicom32 \ | v850 | v850e | v850e1 | v850e2 | v850es | v850e2v3 \ | we32k \ | x86 | xc16x | xstormy16 | xtensa \ | z8k | z80) basic_machine=$basic_machine-unknown ;; c54x) basic_machine=tic54x-unknown ;; c55x) basic_machine=tic55x-unknown ;; c6x) basic_machine=tic6x-unknown ;; m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | picochip) basic_machine=$basic_machine-unknown os=-none ;; m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k) ;; ms1) basic_machine=mt-unknown ;; strongarm | thumb | xscale) basic_machine=arm-unknown ;; xgate) basic_machine=$basic_machine-unknown os=-none ;; xscaleeb) basic_machine=armeb-unknown ;; xscaleel) basic_machine=armel-unknown ;; # We use `pc' rather than `unknown' # because (1) that's what they normally are, and # (2) the word "unknown" tends to confuse beginning users. i*86 | x86_64) basic_machine=$basic_machine-pc ;; # Object if more than one company name word. *-*-*) echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 exit 1 ;; # Recognize the basic CPU types with company name. 580-* \ | a29k-* \ | aarch64-* | aarch64_be-* \ | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \ | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \ | alphapca5[67]-* | alpha64pca5[67]-* | arc-* | arceb-* \ | arm-* | armbe-* | armle-* | armeb-* | armv*-* \ | avr-* | avr32-* \ | be32-* | be64-* \ | bfin-* | bs2000-* \ | c[123]* | c30-* | [cjt]90-* | c4x-* \ | clipper-* | craynv-* | cydra-* \ | d10v-* | d30v-* | dlx-* \ | elxsi-* \ | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \ | h8300-* | h8500-* \ | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \ | hexagon-* \ | i*86-* | i860-* | i960-* | ia64-* \ | ip2k-* | iq2000-* \ | le32-* | le64-* \ | lm32-* \ | m32c-* | m32r-* | m32rle-* \ | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \ | m88110-* | m88k-* | maxq-* | mcore-* | metag-* \ | microblaze-* | microblazeel-* \ | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \ | mips16-* \ | mips64-* | mips64el-* \ | mips64octeon-* | mips64octeonel-* \ | mips64orion-* | mips64orionel-* \ | mips64r5900-* | mips64r5900el-* \ | mips64vr-* | mips64vrel-* \ | mips64vr4100-* | mips64vr4100el-* \ | mips64vr4300-* | mips64vr4300el-* \ | mips64vr5000-* | mips64vr5000el-* \ | mips64vr5900-* | mips64vr5900el-* \ | mipsisa32-* | mipsisa32el-* \ | mipsisa32r2-* | mipsisa32r2el-* \ | mipsisa64-* | mipsisa64el-* \ | mipsisa64r2-* | mipsisa64r2el-* \ | mipsisa64sb1-* | mipsisa64sb1el-* \ | mipsisa64sr71k-* | mipsisa64sr71kel-* \ | mipsr5900-* | mipsr5900el-* \ | mipstx39-* | mipstx39el-* \ | mmix-* \ | mt-* \ | msp430-* \ | nds32-* | nds32le-* | nds32be-* \ | nios-* | nios2-* | nios2eb-* | nios2el-* \ | none-* | np1-* | ns16k-* | ns32k-* \ | open8-* \ | orion-* \ | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \ | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* \ | pyramid-* \ | rl78-* | romp-* | rs6000-* | rx-* \ | sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \ | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \ | sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \ | sparclite-* \ | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | sv1-* | sx?-* \ | tahoe-* \ | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \ | tile*-* \ | tron-* \ | ubicom32-* \ | v850-* | v850e-* | v850e1-* | v850es-* | v850e2-* | v850e2v3-* \ | vax-* \ | we32k-* \ | x86-* | x86_64-* | xc16x-* | xps100-* \ | xstormy16-* | xtensa*-* \ | ymp-* \ | z8k-* | z80-*) ;; # Recognize the basic CPU types without company name, with glob match. xtensa*) basic_machine=$basic_machine-unknown ;; # Recognize the various machine names and aliases which stand # for a CPU type and a company and sometimes even an OS. 386bsd) basic_machine=i386-unknown os=-bsd ;; 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc) basic_machine=m68000-att ;; 3b*) basic_machine=we32k-att ;; a29khif) basic_machine=a29k-amd os=-udi ;; abacus) basic_machine=abacus-unknown ;; adobe68k) basic_machine=m68010-adobe os=-scout ;; alliant | fx80) basic_machine=fx80-alliant ;; altos | altos3068) basic_machine=m68k-altos ;; am29k) basic_machine=a29k-none os=-bsd ;; amd64) basic_machine=x86_64-pc ;; amd64-*) basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'` ;; amdahl) basic_machine=580-amdahl os=-sysv ;; amiga | amiga-*) basic_machine=m68k-unknown ;; amigaos | amigados) basic_machine=m68k-unknown os=-amigaos ;; amigaunix | amix) basic_machine=m68k-unknown os=-sysv4 ;; apollo68) basic_machine=m68k-apollo os=-sysv ;; apollo68bsd) basic_machine=m68k-apollo os=-bsd ;; aros) basic_machine=i386-pc os=-aros ;; aux) basic_machine=m68k-apple os=-aux ;; balance) basic_machine=ns32k-sequent os=-dynix ;; blackfin) basic_machine=bfin-unknown os=-linux ;; blackfin-*) basic_machine=bfin-`echo $basic_machine | sed 's/^[^-]*-//'` os=-linux ;; bluegene*) basic_machine=powerpc-ibm os=-cnk ;; c54x-*) basic_machine=tic54x-`echo $basic_machine | sed 's/^[^-]*-//'` ;; c55x-*) basic_machine=tic55x-`echo $basic_machine | sed 's/^[^-]*-//'` ;; c6x-*) basic_machine=tic6x-`echo $basic_machine | sed 's/^[^-]*-//'` ;; c90) basic_machine=c90-cray os=-unicos ;; cegcc) basic_machine=arm-unknown os=-cegcc ;; convex-c1) basic_machine=c1-convex os=-bsd ;; convex-c2) basic_machine=c2-convex os=-bsd ;; convex-c32) basic_machine=c32-convex os=-bsd ;; convex-c34) basic_machine=c34-convex os=-bsd ;; convex-c38) basic_machine=c38-convex os=-bsd ;; cray | j90) basic_machine=j90-cray os=-unicos ;; craynv) basic_machine=craynv-cray os=-unicosmp ;; cr16 | cr16-*) basic_machine=cr16-unknown os=-elf ;; crds | unos) basic_machine=m68k-crds ;; crisv32 | crisv32-* | etraxfs*) basic_machine=crisv32-axis ;; cris | cris-* | etrax*) basic_machine=cris-axis ;; crx) basic_machine=crx-unknown os=-elf ;; da30 | da30-*) basic_machine=m68k-da30 ;; decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn) basic_machine=mips-dec ;; decsystem10* | dec10*) basic_machine=pdp10-dec os=-tops10 ;; decsystem20* | dec20*) basic_machine=pdp10-dec os=-tops20 ;; delta | 3300 | motorola-3300 | motorola-delta \ | 3300-motorola | delta-motorola) basic_machine=m68k-motorola ;; delta88) basic_machine=m88k-motorola os=-sysv3 ;; dicos) basic_machine=i686-pc os=-dicos ;; djgpp) basic_machine=i586-pc os=-msdosdjgpp ;; dpx20 | dpx20-*) basic_machine=rs6000-bull os=-bosx ;; dpx2* | dpx2*-bull) basic_machine=m68k-bull os=-sysv3 ;; ebmon29k) basic_machine=a29k-amd os=-ebmon ;; elxsi) basic_machine=elxsi-elxsi os=-bsd ;; encore | umax | mmax) basic_machine=ns32k-encore ;; es1800 | OSE68k | ose68k | ose | OSE) basic_machine=m68k-ericsson os=-ose ;; fx2800) basic_machine=i860-alliant ;; genix) basic_machine=ns32k-ns ;; gmicro) basic_machine=tron-gmicro os=-sysv ;; go32) basic_machine=i386-pc os=-go32 ;; h3050r* | hiux*) basic_machine=hppa1.1-hitachi os=-hiuxwe2 ;; h8300hms) basic_machine=h8300-hitachi os=-hms ;; h8300xray) basic_machine=h8300-hitachi os=-xray ;; h8500hms) basic_machine=h8500-hitachi os=-hms ;; harris) basic_machine=m88k-harris os=-sysv3 ;; hp300-*) basic_machine=m68k-hp ;; hp300bsd) basic_machine=m68k-hp os=-bsd ;; hp300hpux) basic_machine=m68k-hp os=-hpux ;; hp3k9[0-9][0-9] | hp9[0-9][0-9]) basic_machine=hppa1.0-hp ;; hp9k2[0-9][0-9] | hp9k31[0-9]) basic_machine=m68000-hp ;; hp9k3[2-9][0-9]) basic_machine=m68k-hp ;; hp9k6[0-9][0-9] | hp6[0-9][0-9]) basic_machine=hppa1.0-hp ;; hp9k7[0-79][0-9] | hp7[0-79][0-9]) basic_machine=hppa1.1-hp ;; hp9k78[0-9] | hp78[0-9]) # FIXME: really hppa2.0-hp basic_machine=hppa1.1-hp ;; hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893) # FIXME: really hppa2.0-hp basic_machine=hppa1.1-hp ;; hp9k8[0-9][13679] | hp8[0-9][13679]) basic_machine=hppa1.1-hp ;; hp9k8[0-9][0-9] | hp8[0-9][0-9]) basic_machine=hppa1.0-hp ;; hppa-next) os=-nextstep3 ;; hppaosf) basic_machine=hppa1.1-hp os=-osf ;; hppro) basic_machine=hppa1.1-hp os=-proelf ;; i370-ibm* | ibm*) basic_machine=i370-ibm ;; i*86v32) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-sysv32 ;; i*86v4*) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-sysv4 ;; i*86v) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-sysv ;; i*86sol2) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-solaris2 ;; i386mach) basic_machine=i386-mach os=-mach ;; i386-vsta | vsta) basic_machine=i386-unknown os=-vsta ;; iris | iris4d) basic_machine=mips-sgi case $os in -irix*) ;; *) os=-irix4 ;; esac ;; isi68 | isi) basic_machine=m68k-isi os=-sysv ;; m68knommu) basic_machine=m68k-unknown os=-linux ;; m68knommu-*) basic_machine=m68k-`echo $basic_machine | sed 's/^[^-]*-//'` os=-linux ;; m88k-omron*) basic_machine=m88k-omron ;; magnum | m3230) basic_machine=mips-mips os=-sysv ;; merlin) basic_machine=ns32k-utek os=-sysv ;; microblaze*) basic_machine=microblaze-xilinx ;; mingw64) basic_machine=x86_64-pc os=-mingw64 ;; mingw32) basic_machine=i386-pc os=-mingw32 ;; mingw32ce) basic_machine=arm-unknown os=-mingw32ce ;; miniframe) basic_machine=m68000-convergent ;; *mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*) basic_machine=m68k-atari os=-mint ;; mips3*-*) basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'` ;; mips3*) basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown ;; monitor) basic_machine=m68k-rom68k os=-coff ;; morphos) basic_machine=powerpc-unknown os=-morphos ;; msdos) basic_machine=i386-pc os=-msdos ;; ms1-*) basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'` ;; msys) basic_machine=i386-pc os=-msys ;; mvs) basic_machine=i370-ibm os=-mvs ;; nacl) basic_machine=le32-unknown os=-nacl ;; ncr3000) basic_machine=i486-ncr os=-sysv4 ;; netbsd386) basic_machine=i386-unknown os=-netbsd ;; netwinder) basic_machine=armv4l-rebel os=-linux ;; news | news700 | news800 | news900) basic_machine=m68k-sony os=-newsos ;; news1000) basic_machine=m68030-sony os=-newsos ;; news-3600 | risc-news) basic_machine=mips-sony os=-newsos ;; necv70) basic_machine=v70-nec os=-sysv ;; next | m*-next ) basic_machine=m68k-next case $os in -nextstep* ) ;; -ns2*) os=-nextstep2 ;; *) os=-nextstep3 ;; esac ;; nh3000) basic_machine=m68k-harris os=-cxux ;; nh[45]000) basic_machine=m88k-harris os=-cxux ;; nindy960) basic_machine=i960-intel os=-nindy ;; mon960) basic_machine=i960-intel os=-mon960 ;; nonstopux) basic_machine=mips-compaq os=-nonstopux ;; np1) basic_machine=np1-gould ;; neo-tandem) basic_machine=neo-tandem ;; nse-tandem) basic_machine=nse-tandem ;; nsr-tandem) basic_machine=nsr-tandem ;; op50n-* | op60c-*) basic_machine=hppa1.1-oki os=-proelf ;; openrisc | openrisc-*) basic_machine=or32-unknown ;; os400) basic_machine=powerpc-ibm os=-os400 ;; OSE68000 | ose68000) basic_machine=m68000-ericsson os=-ose ;; os68k) basic_machine=m68k-none os=-os68k ;; pa-hitachi) basic_machine=hppa1.1-hitachi os=-hiuxwe2 ;; paragon) basic_machine=i860-intel os=-osf ;; parisc) basic_machine=hppa-unknown os=-linux ;; parisc-*) basic_machine=hppa-`echo $basic_machine | sed 's/^[^-]*-//'` os=-linux ;; pbd) basic_machine=sparc-tti ;; pbb) basic_machine=m68k-tti ;; pc532 | pc532-*) basic_machine=ns32k-pc532 ;; pc98) basic_machine=i386-pc ;; pc98-*) basic_machine=i386-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pentium | p5 | k5 | k6 | nexgen | viac3) basic_machine=i586-pc ;; pentiumpro | p6 | 6x86 | athlon | athlon_*) basic_machine=i686-pc ;; pentiumii | pentium2 | pentiumiii | pentium3) basic_machine=i686-pc ;; pentium4) basic_machine=i786-pc ;; pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*) basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pentiumpro-* | p6-* | 6x86-* | athlon-*) basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*) basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pentium4-*) basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pn) basic_machine=pn-gould ;; power) basic_machine=power-ibm ;; ppc | ppcbe) basic_machine=powerpc-unknown ;; ppc-* | ppcbe-*) basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ppcle | powerpclittle | ppc-le | powerpc-little) basic_machine=powerpcle-unknown ;; ppcle-* | powerpclittle-*) basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ppc64) basic_machine=powerpc64-unknown ;; ppc64-* | ppc64p7-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ppc64le | powerpc64little | ppc64-le | powerpc64-little) basic_machine=powerpc64le-unknown ;; ppc64le-* | powerpc64little-*) basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ps2) basic_machine=i386-ibm ;; pw32) basic_machine=i586-unknown os=-pw32 ;; rdos | rdos64) basic_machine=x86_64-pc os=-rdos ;; rdos32) basic_machine=i386-pc os=-rdos ;; rom68k) basic_machine=m68k-rom68k os=-coff ;; rm[46]00) basic_machine=mips-siemens ;; rtpc | rtpc-*) basic_machine=romp-ibm ;; s390 | s390-*) basic_machine=s390-ibm ;; s390x | s390x-*) basic_machine=s390x-ibm ;; sa29200) basic_machine=a29k-amd os=-udi ;; sb1) basic_machine=mipsisa64sb1-unknown ;; sb1el) basic_machine=mipsisa64sb1el-unknown ;; sde) basic_machine=mipsisa32-sde os=-elf ;; sei) basic_machine=mips-sei os=-seiux ;; sequent) basic_machine=i386-sequent ;; sh) basic_machine=sh-hitachi os=-hms ;; sh5el) basic_machine=sh5le-unknown ;; sh64) basic_machine=sh64-unknown ;; sparclite-wrs | simso-wrs) basic_machine=sparclite-wrs os=-vxworks ;; sps7) basic_machine=m68k-bull os=-sysv2 ;; spur) basic_machine=spur-unknown ;; st2000) basic_machine=m68k-tandem ;; stratus) basic_machine=i860-stratus os=-sysv4 ;; strongarm-* | thumb-*) basic_machine=arm-`echo $basic_machine | sed 's/^[^-]*-//'` ;; sun2) basic_machine=m68000-sun ;; sun2os3) basic_machine=m68000-sun os=-sunos3 ;; sun2os4) basic_machine=m68000-sun os=-sunos4 ;; sun3os3) basic_machine=m68k-sun os=-sunos3 ;; sun3os4) basic_machine=m68k-sun os=-sunos4 ;; sun4os3) basic_machine=sparc-sun os=-sunos3 ;; sun4os4) basic_machine=sparc-sun os=-sunos4 ;; sun4sol2) basic_machine=sparc-sun os=-solaris2 ;; sun3 | sun3-*) basic_machine=m68k-sun ;; sun4) basic_machine=sparc-sun ;; sun386 | sun386i | roadrunner) basic_machine=i386-sun ;; sv1) basic_machine=sv1-cray os=-unicos ;; symmetry) basic_machine=i386-sequent os=-dynix ;; t3e) basic_machine=alphaev5-cray os=-unicos ;; t90) basic_machine=t90-cray os=-unicos ;; tile*) basic_machine=$basic_machine-unknown os=-linux-gnu ;; tx39) basic_machine=mipstx39-unknown ;; tx39el) basic_machine=mipstx39el-unknown ;; toad1) basic_machine=pdp10-xkl os=-tops20 ;; tower | tower-32) basic_machine=m68k-ncr ;; tpf) basic_machine=s390x-ibm os=-tpf ;; udi29k) basic_machine=a29k-amd os=-udi ;; ultra3) basic_machine=a29k-nyu os=-sym1 ;; v810 | necv810) basic_machine=v810-nec os=-none ;; vaxv) basic_machine=vax-dec os=-sysv ;; vms) basic_machine=vax-dec os=-vms ;; vpp*|vx|vx-*) basic_machine=f301-fujitsu ;; vxworks960) basic_machine=i960-wrs os=-vxworks ;; vxworks68) basic_machine=m68k-wrs os=-vxworks ;; vxworks29k) basic_machine=a29k-wrs os=-vxworks ;; w65*) basic_machine=w65-wdc os=-none ;; w89k-*) basic_machine=hppa1.1-winbond os=-proelf ;; xbox) basic_machine=i686-pc os=-mingw32 ;; xps | xps100) basic_machine=xps100-honeywell ;; xscale-* | xscalee[bl]-*) basic_machine=`echo $basic_machine | sed 's/^xscale/arm/'` ;; ymp) basic_machine=ymp-cray os=-unicos ;; z8k-*-coff) basic_machine=z8k-unknown os=-sim ;; z80-*-coff) basic_machine=z80-unknown os=-sim ;; none) basic_machine=none-none os=-none ;; # Here we handle the default manufacturer of certain CPU types. It is in # some cases the only manufacturer, in others, it is the most popular. w89k) basic_machine=hppa1.1-winbond ;; op50n) basic_machine=hppa1.1-oki ;; op60c) basic_machine=hppa1.1-oki ;; romp) basic_machine=romp-ibm ;; mmix) basic_machine=mmix-knuth ;; rs6000) basic_machine=rs6000-ibm ;; vax) basic_machine=vax-dec ;; pdp10) # there are many clones, so DEC is not a safe bet basic_machine=pdp10-unknown ;; pdp11) basic_machine=pdp11-dec ;; we32k) basic_machine=we32k-att ;; sh[1234] | sh[24]a | sh[24]aeb | sh[34]eb | sh[1234]le | sh[23]ele) basic_machine=sh-unknown ;; sparc | sparcv8 | sparcv9 | sparcv9b | sparcv9v) basic_machine=sparc-sun ;; cydra) basic_machine=cydra-cydrome ;; orion) basic_machine=orion-highlevel ;; orion105) basic_machine=clipper-highlevel ;; mac | mpw | mac-mpw) basic_machine=m68k-apple ;; pmac | pmac-mpw) basic_machine=powerpc-apple ;; *-unknown) # Make sure to match an already-canonicalized machine name. ;; *) echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 exit 1 ;; esac # Here we canonicalize certain aliases for manufacturers. case $basic_machine in *-digital*) basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'` ;; *-commodore*) basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'` ;; *) ;; esac # Decode manufacturer-specific aliases for certain operating systems. if [ x"$os" != x"" ] then case $os in # First match some system type aliases # that might get confused with valid system types. # -solaris* is a basic system type, with this one exception. -auroraux) os=-auroraux ;; -solaris1 | -solaris1.*) os=`echo $os | sed -e 's|solaris1|sunos4|'` ;; -solaris) os=-solaris2 ;; -svr4*) os=-sysv4 ;; -unixware*) os=-sysv4.2uw ;; -gnu/linux*) os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'` ;; # First accept the basic system types. # The portable systems comes first. # Each alternative MUST END IN A *, to match a version number. # -sysv* is not here because it comes later, after sysvr4. -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \ | -*vms* | -sco* | -esix* | -isc* | -aix* | -cnk* | -sunos | -sunos[34]*\ | -hpux* | -unos* | -osf* | -luna* | -dgux* | -auroraux* | -solaris* \ | -sym* | -kopensolaris* | -plan9* \ | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \ | -aos* | -aros* \ | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \ | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \ | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \ | -bitrig* | -openbsd* | -solidbsd* \ | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \ | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \ | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \ | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \ | -chorusos* | -chorusrdb* | -cegcc* \ | -cygwin* | -msys* | -pe* | -psos* | -moss* | -proelf* | -rtems* \ | -mingw32* | -mingw64* | -linux-gnu* | -linux-android* \ | -linux-newlib* | -linux-musl* | -linux-uclibc* \ | -uxpv* | -beos* | -mpeix* | -udk* \ | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \ | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \ | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \ | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \ | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \ | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \ | -skyos* | -haiku* | -rdos* | -toppers* | -drops* | -es*) # Remember, each alternative MUST END IN *, to match a version number. ;; -qnx*) case $basic_machine in x86-* | i*86-*) ;; *) os=-nto$os ;; esac ;; -nto-qnx*) ;; -nto*) os=`echo $os | sed -e 's|nto|nto-qnx|'` ;; -sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \ | -windows* | -osx | -abug | -netware* | -os9* | -beos* | -haiku* \ | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*) ;; -mac*) os=`echo $os | sed -e 's|mac|macos|'` ;; -linux-dietlibc) os=-linux-dietlibc ;; -linux*) os=`echo $os | sed -e 's|linux|linux-gnu|'` ;; -sunos5*) os=`echo $os | sed -e 's|sunos5|solaris2|'` ;; -sunos6*) os=`echo $os | sed -e 's|sunos6|solaris3|'` ;; -opened*) os=-openedition ;; -os400*) os=-os400 ;; -wince*) os=-wince ;; -osfrose*) os=-osfrose ;; -osf*) os=-osf ;; -utek*) os=-bsd ;; -dynix*) os=-bsd ;; -acis*) os=-aos ;; -atheos*) os=-atheos ;; -syllable*) os=-syllable ;; -386bsd) os=-bsd ;; -ctix* | -uts*) os=-sysv ;; -nova*) os=-rtmk-nova ;; -ns2 ) os=-nextstep2 ;; -nsk*) os=-nsk ;; # Preserve the version number of sinix5. -sinix5.*) os=`echo $os | sed -e 's|sinix|sysv|'` ;; -sinix*) os=-sysv4 ;; -tpf*) os=-tpf ;; -triton*) os=-sysv3 ;; -oss*) os=-sysv3 ;; -svr4) os=-sysv4 ;; -svr3) os=-sysv3 ;; -sysvr4) os=-sysv4 ;; # This must come after -sysvr4. -sysv*) ;; -ose*) os=-ose ;; -es1800*) os=-ose ;; -xenix) os=-xenix ;; -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) os=-mint ;; -aros*) os=-aros ;; -zvmoe) os=-zvmoe ;; -dicos*) os=-dicos ;; -nacl*) ;; -none) ;; *) # Get rid of the `-' at the beginning of $os. os=`echo $os | sed 's/[^-]*-//'` echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2 exit 1 ;; esac else # Here we handle the default operating systems that come with various machines. # The value should be what the vendor currently ships out the door with their # machine or put another way, the most popular os provided with the machine. # Note that if you're going to try to match "-MANUFACTURER" here (say, # "-sun"), then you have to tell the case statement up towards the top # that MANUFACTURER isn't an operating system. Otherwise, code above # will signal an error saying that MANUFACTURER isn't an operating # system, and we'll never get to this point. case $basic_machine in score-*) os=-elf ;; spu-*) os=-elf ;; *-acorn) os=-riscix1.2 ;; arm*-rebel) os=-linux ;; arm*-semi) os=-aout ;; c4x-* | tic4x-*) os=-coff ;; hexagon-*) os=-elf ;; tic54x-*) os=-coff ;; tic55x-*) os=-coff ;; tic6x-*) os=-coff ;; # This must come before the *-dec entry. pdp10-*) os=-tops20 ;; pdp11-*) os=-none ;; *-dec | vax-*) os=-ultrix4.2 ;; m68*-apollo) os=-domain ;; i386-sun) os=-sunos4.0.2 ;; m68000-sun) os=-sunos3 ;; m68*-cisco) os=-aout ;; mep-*) os=-elf ;; mips*-cisco) os=-elf ;; mips*-*) os=-elf ;; or1k-*) os=-elf ;; or32-*) os=-coff ;; *-tti) # must be before sparc entry or we get the wrong os. os=-sysv3 ;; sparc-* | *-sun) os=-sunos4.1.1 ;; *-be) os=-beos ;; *-haiku) os=-haiku ;; *-ibm) os=-aix ;; *-knuth) os=-mmixware ;; *-wec) os=-proelf ;; *-winbond) os=-proelf ;; *-oki) os=-proelf ;; *-hp) os=-hpux ;; *-hitachi) os=-hiux ;; i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent) os=-sysv ;; *-cbm) os=-amigaos ;; *-dg) os=-dgux ;; *-dolphin) os=-sysv3 ;; m68k-ccur) os=-rtu ;; m88k-omron*) os=-luna ;; *-next ) os=-nextstep ;; *-sequent) os=-ptx ;; *-crds) os=-unos ;; *-ns) os=-genix ;; i370-*) os=-mvs ;; *-next) os=-nextstep3 ;; *-gould) os=-sysv ;; *-highlevel) os=-bsd ;; *-encore) os=-bsd ;; *-sgi) os=-irix ;; *-siemens) os=-sysv4 ;; *-masscomp) os=-rtu ;; f30[01]-fujitsu | f700-fujitsu) os=-uxpv ;; *-rom68k) os=-coff ;; *-*bug) os=-coff ;; *-apple) os=-macos ;; *-atari*) os=-mint ;; *) os=-none ;; esac fi # Here we handle the case where we know the os, and the CPU type, but not the # manufacturer. We pick the logical manufacturer. vendor=unknown case $basic_machine in *-unknown) case $os in -riscix*) vendor=acorn ;; -sunos*) vendor=sun ;; -cnk*|-aix*) vendor=ibm ;; -beos*) vendor=be ;; -hpux*) vendor=hp ;; -mpeix*) vendor=hp ;; -hiux*) vendor=hitachi ;; -unos*) vendor=crds ;; -dgux*) vendor=dg ;; -luna*) vendor=omron ;; -genix*) vendor=ns ;; -mvs* | -opened*) vendor=ibm ;; -os400*) vendor=ibm ;; -ptx*) vendor=sequent ;; -tpf*) vendor=ibm ;; -vxsim* | -vxworks* | -windiss*) vendor=wrs ;; -aux*) vendor=apple ;; -hms*) vendor=hitachi ;; -mpw* | -macos*) vendor=apple ;; -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) vendor=atari ;; -vos*) vendor=stratus ;; esac basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"` ;; esac echo $basic_machine$os exit # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "timestamp='" # time-stamp-format: "%:y-%02m-%02d" # time-stamp-end: "'" # End: nordugrid-arc-6.14.0/PaxHeaders.30264/configure.ac0000644000000000000000000000013214152153376017632 xustar000000000000000030 mtime=1638455038.263643841 30 atime=1638455038.459646786 30 ctime=1638455095.721507171 nordugrid-arc-6.14.0/configure.ac0000644000175000002070000026775614152153376017646 0ustar00mockbuildmock00000000000000# -*- Autoconf -*- # Process this file with autoconf to produce a configure script. AC_PREREQ(2.56) AC_INIT([nordugrid-arc],m4_normalize(m4_include(VERSION)),[http://bugzilla.nordugrid.org/]) dnl serial-tests is not recognized before 1.12, and required after 1.13 m4_define([serial_tests], [ m4_esyscmd([case `${AUTOMAKE:-automake} --version | head -n 1` in *1.11.*|*1.10.*|*1.9.*);; *) echo serial-tests;; esac]) ]) AM_INIT_AUTOMAKE([foreign 1.9 tar-pax] serial_tests) AC_CONFIG_SRCDIR([Makefile.am]) AC_CONFIG_HEADERS([config.h]) baseversion=`echo $VERSION | sed 's/[[^0-9.]].*//'` preversion=`echo $VERSION | sed 's/^[[0-9.]]*//'` if test "x$baseversion" = "x" ; then baseversion=$VERSION preversion="" fi if test "x$preversion" = "x" ; then fedorarelease="1" fedorasetupopts="-q" debianversion="$baseversion" else fedorarelease="0.$preversion" fedorasetupopts="-q -n %{name}-%{version}$preversion" debianversion="$baseversion~$preversion" fi # numeric ARC_VERSION_* used for API fall back to current release seriese (e.g. when 'master' is specified in VESRION file, the "6.0.0" will be used) ARC_VERSION_MAJOR=`echo $VERSION | awk -F. '{print match($1, /^[[0-9]]+$/) ? $1 : "6"}'` ARC_VERSION_MINOR=`echo $VERSION | awk -F. '{print match($2, /[[^ ]]/) ? $2 : "0"}'` ARC_VERSION_PATCH=`echo $VERSION | awk -F. '{print match($3, /[[^ ]]/) ? $3 : "0"}'` ARC_VERSION_NUM=`printf "0x%02x%02x%02x" $ARC_VERSION_MAJOR $ARC_VERSION_MINOR $ARC_VERSION_PATCH` ARC_VERSION=`echo $ARC_VERSION_MAJOR.$ARC_VERSION_MINOR.$ARC_VERSION_PATCH` AC_SUBST(baseversion) AC_SUBST(preversion) AC_SUBST(fedorarelease) AC_SUBST(fedorasetupopts) AC_SUBST(debianversion) AC_SUBST(ARC_VERSION_MAJOR) AC_SUBST(ARC_VERSION_MINOR) AC_SUBST(ARC_VERSION_PATCH) AC_SUBST(ARC_VERSION_NUM) AC_SUBST(ARC_VERSION) # This macro was introduced in autoconf 2.57g? but we currently only require 2.56 m4_ifdef([AC_CONFIG_MACRO_DIR], [AC_CONFIG_MACRO_DIR([m4])]) m4_pattern_allow([AC_PATH_PROG]) m4_pattern_allow([AC_MSG_WARN]) AC_PROG_CXX AC_PROG_CC_STDC AC_PROG_CPP AC_GNU_SOURCE AC_PROG_AWK AC_PROG_INSTALL AC_PROG_LN_S AC_PROG_MAKE_SET AC_DISABLE_STATIC AM_PROG_LIBTOOL AC_PATH_PROG(PERL, perl, /usr/bin/perl, :) # EL-5 compatibility. $(mkdir_p) is now obsolete. test -n "$MKDIR_P" || MKDIR_P="$mkdir_p" AC_SUBST([MKDIR_P]) # Use arc for "pkgdir" instead of nordugrid-arc (@PACKAGE@) pkgdatadir='${datadir}/arc' pkgincludedir='${includedir}/arc' pkglibdir='${libdir}/arc' extpkglibdir='${libdir}/arc/external' pkglibexecdir='${libexecdir}/arc' AC_SUBST(pkgdatadir) AC_SUBST(pkgincludedir) AC_SUBST(pkglibdir) AC_SUBST(extpkglibdir) AC_SUBST(pkglibexecdir) ARC_API ARC_RELATIVE_PATHS AC_ARG_WITH(systemd-units-location, AC_HELP_STRING([--with-systemd-units-location=], [Location of the systemd unit files. [[None]]]), [ unitsdir="$withval" ], [ unitsdir= ] ) AC_MSG_RESULT($unitsdir) AC_SUBST(unitsdir) AM_CONDITIONAL([SYSTEMD_UNITS_ENABLED],[test "x$unitsdir" != "x"]) AC_ARG_WITH(sysv-scripts-location, AC_HELP_STRING([--with-sysv-scripts-location=], [Location of the SYSV init scripts. [[autodetect]]]), [ initddirauto="no" initddir="$withval" ], [ initddirauto="yes" initddir= case "${host}" in *linux* | *kfreebsd* | *gnu* ) for i in init.d rc.d/init.d rc.d; do if test -d "/etc/$i" -a ! -h "/etc/$i" ; then initddir="$sysconfdir/$i" break fi done if test -z "$initddir"; then AC_MSG_WARN(could not find a suitable location for the SYSV init scripts - not installing) fi ;; esac ] ) AC_MSG_RESULT($initddir) AC_SUBST(initddir) AM_CONDITIONAL([SYSV_SCRIPTS_ENABLED],[ ( test "x$initddirauto" == "xno" || test "x$unitsdir" = "x" ) && test "x$initddir" != "x"]) AC_ARG_WITH(cron-scripts-prefix, AC_HELP_STRING([--with-cron-scripts-prefix=], [Specify the location of the cron directory. [[SYSCONFDIR/cron.d]]]), [ cronddir="$withval" ], [ cronddir="$sysconfdir/cron.d" ] ) AC_SUBST(cronddir) # gettext AM_GNU_GETTEXT([external]) AM_GNU_GETTEXT_VERSION([0.17]) [[ -r $srcdir/po/POTFILES.in ]] || touch $srcdir/po/POTFILES.in # Portable 64bit file offsets AC_SYS_LARGEFILE # pkg-config needed for many checks AC_PATH_TOOL(PKG_CONFIG, pkg-config, no) if test "x$PKG_CONFIG" = "xno"; then AC_MSG_ERROR([ *** pkg-config not found]) else pkgconfigdir=${libdir}/pkgconfig AC_SUBST(pkgconfigdir) fi # Default enable/disable switches # Features enables_ldap=yes enables_mysql=no enables_systemd=no enables_swig_python=yes # Features directly related to components enables_cppunit=yes enables_python=yes enables_altpython=yes enables_pylint=yes enables_mock_dmc=no enables_gfal=no enables_s3=no enables_xrootd=yes enables_argus=no enables_xmlsec1=yes enables_dbjstore=yes enables_sqlitejstore=yes enables_ldns=yes # Libraries and plugins # Currently no fine-grained choice is supported. # Also this variable is used to check if source # build is needed at all because no component can # be built without HED. enables_hed=yes # Services enables_a_rex_service=yes enables_internal=no enables_gridftpd_service=yes enables_ldap_service=yes enables_candypond=yes enables_datadelivery_service=yes enables_monitor=yes # Clients enables_compute_client=yes enables_credentials_client=yes enables_data_client=yes enables_emies_client=yes enables_arcrest_client=yes # Documentation enables_doc=yes # ACIX cache index enables_acix=yes # Handle group enable/disable switches AC_ARG_ENABLE(all, AC_HELP_STRING([--disable-all], [disables all buildable components. Can be overwritten with --enable-* for group or specific component. It is also possible to use --enable-all to overwrite defaults for most of components.]), [ enables_a_rex_service=$enableval enables_internal=$enableval enables_gridftpd_service=$enableval enables_ldap_service=$enableval enables_monitor=$enableval enables_candypond=$enableval enables_datadelivery_service=$enableval enables_compute_client=$enableval enables_credentials_client=$enableval enables_echo_client=$enableval enables_data_client=$enableval enables_emies_client=$enableval enables_arcrest_client=$enableval enables_hed=$enableval enables_python=$enableval enables_altpython=$enableval enables_pylint=$enableval enables_mock_dmc=$enableval enables_gfal=$enableval enables_s3=$enableval enables_xrootd=$enableval enables_xmlsec1=$enableval enables_argus=$enableval enables_cppunit=$enableval enables_doc=$enableval enables_acix=$enableval enables_dbjstore=$enableval enables_ldns=$enableval enables_sqlitejstore=$enableval ], []) AC_ARG_ENABLE(all-clients, AC_HELP_STRING([--disable-all-clients], [disables all buildable client components. Can be overwritten with --enable-* for specific component. It is also possible to use --enable-all-clients to overwrite defaults and --enable-all.]), [ enables_compute_client=$enableval enables_credentials_client=$enableval enables_echo_client=$enableval enables_data_client=$enableval enables_emies_client=$enableval enables_arcrest_client=$enableval enables_doc=$enableval ], []) AC_ARG_ENABLE(all-data-clients, AC_HELP_STRING([--disable-all-data-clients], [disables all buildable client components providing data handling abilities. Can be overwritten with --enable-* for specific component. It is also possible to use --enable-all-data-clients to overwrite defaults, --enable-all and --enable-all-clients.]), [ enables_data_client=$enableval ], []) AC_ARG_ENABLE(all-services, AC_HELP_STRING([--disable-all-services], [disables all buildable service componets. Can be overwritten with --enable-* for specific component. It is also possible to use --enable-all-services to overwrite defaults and --enable-all.]), [ enables_a_rex_service=$enableval enables_gridftpd_service=$enableval enables_ldap_service=$enableval enables_monitor=$enableval enables_candypond=$enableval enables_datadelivery_service=$enableval enables_acix=$enableval ], []) # Be pedantic about compiler warnings. AC_ARG_ENABLE(pedantic-compile, AC_HELP_STRING([--enable-pedantic-compile], [add pedantic compiler flags]), [enables_pedantic_compile="yes"], [enables_pedantic_compile="no"]) if test "x$enables_pedantic_compile" = "xyes"; then # This check need to be enhanced. It won't work in case of cross-compilation # and if path to compiler is explicitly specified. if test x"$CXX" = x"g++"; then # GNU C/C++ flags AM_CXXFLAGS="-Wall -Wextra -Werror -Wno-sign-compare -Wno-unused" SAVE_CPPFLAGS=$CPPFLAGS AC_LANG_SAVE AC_LANG_CPLUSPLUS CPPFLAGS="$CPPFLAGS -Wno-unused-result" AC_TRY_COMPILE([],[], [ AM_CXXFLAGS="$AM_CXXFLAGS -Wno-unused-result" ], [ AC_MSG_NOTICE([compilation flag -Wno-unused-result is not supported]) ] ) AC_LANG_RESTORE CPPFLAGS=$SAVE_CPPFLAGS else # TODO: set generic flags for generic compiler AM_CXXFLAGS="" fi AC_SUBST(AM_CXXFLAGS) fi AM_CONDITIONAL([PEDANTIC_COMPILE], [test "x$enables_pedantic_compile" = "xyes"]) # Enable/disable switches for third-party. # Swig AC_ARG_ENABLE(swig-python, AC_HELP_STRING([--disable-swig-python], [disable SWIG python bindings]), [enables_swig_python=$enableval],[]) AC_ARG_ENABLE(swig, AC_HELP_STRING([--disable-swig], [disable all bindings through SWIG]), [enables_swig_python=$enableval],[]) if test "$enables_swig_python" = "yes"; then AC_PATH_PROGS(SWIG, swig) if test "x$SWIG" = "x"; then enables_swig="no" else swigver=`$SWIG -version 2>&1 | grep Version | sed 's/.* //'` swigver1=`echo $swigver | cut -d. -f1` swigver2=`echo $swigver | cut -d. -f2` swigver3=`echo $swigver | cut -d. -f3` if test $swigver1 -lt 1 || ( test $swigver1 -eq 1 && ( \ test $swigver2 -lt 3 || ( test $swigver2 -eq 3 && ( \ test $swigver3 -lt 25 ) ) ) ) ; then AC_MSG_NOTICE([swig is too old (< 1.3.25)]) SWIG="" enables_swig="no" elif test $swigver1 -eq 1 && test $swigver2 -eq 3 && test $swigver3 -eq 38 ; then AC_MSG_NOTICE([swig version 1.3.38 has bug which prevents it from being used for this software. Please upgrade or downgrade.]) SWIG="" enables_swig="no" else SWIG2="no" if test $swigver1 -ge 2 then SWIG2="yes" fi AC_SUBST(SWIG2) SWIG_PYTHON_NAMING="SwigPy" # In SWIG version 1.3.37 naming was changed from "PySwig" to "SwigPy". if test $swigver1 -lt 1 || ( test $swigver1 -eq 1 && ( \ test $swigver2 -lt 3 || ( test $swigver2 -eq 3 && ( \ test $swigver3 -lt 37 ) ) ) ) ; then SWIG_PYTHON_NAMING="PySwig" fi AC_SUBST(SWIG_PYTHON_NAMING) fi fi else SWIG="" fi AM_CONDITIONAL([SWIG_ENABLED],[test "x$enables_swig" = "xyes"]) AC_ARG_ENABLE(hed, AC_HELP_STRING([--disable-hed], [disable building HED libraries and plugins. Do not do that unless You do not want to build anything. Even in that case better use --disable-all.]), [enables_hed=$enableval],[]) # Python if test "$enables_hed" = "yes"; then AC_ARG_ENABLE(python, AC_HELP_STRING([--disable-python], [disable Python components]), [enables_python=$enableval enables_swig_python=$enableval], []) if test "$enables_python" = "yes"; then AC_ARG_WITH(python, AC_HELP_STRING([--with-python=(PYTHON)], [specify python program from PATH])) # We do not look for python binary when cross-compiling # but we need to make the variable non-empty if test "${build}" = "${host}"; then AC_PATH_PROGS(PYTHON, $with_python python) else PYTHON=/usr/bin/python fi if test "X$PYTHON" != "X"; then PYNAME=`basename $PYTHON` PKG_CHECK_MODULES(PYTHON, $PYNAME-embed, [ PYTHON_VERSION=`$PKG_CONFIG --modversion $PYNAME-embed` PYTHON_MAJOR=`echo $PYTHON_VERSION|cut -f1 -d.` ],[ PKG_CHECK_MODULES(PYTHON, $PYNAME, [ PYTHON_VERSION=`$PKG_CONFIG --modversion $PYNAME` PYTHON_MAJOR=`echo $PYTHON_VERSION|cut -f1 -d.` ],[ PYNAME=python-`$PYTHON -c 'import sys; print(sys.version[[:3]])'` PKG_CHECK_MODULES(PYTHON, $PYNAME-embed, [ PYTHON_VERSION=`$PKG_CONFIG --modversion $PYNAME-embed` PYTHON_MAJOR=`echo $PYTHON_VERSION|cut -f1 -d.` ],[ PKG_CHECK_MODULES(PYTHON, $PYNAME, [ PYTHON_VERSION=`$PKG_CONFIG --modversion $PYNAME` PYTHON_MAJOR=`echo $PYTHON_VERSION|cut -f1 -d.` ],[ PYTHON_VERSION=`$PYTHON -c 'import sys; print(sys.version[[:3]])'` PYTHON_MAJOR=`$PYTHON -c 'import sys; print(sys.version_info[[0]])'` PYTHON_CFLAGS=-I`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` PY_LIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` PY_SYSLIBS=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` PY_LIBDEST=`$PYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` PYTHON_LIBS="$PY_LIBS $PY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$PYTHON_LIBS $LDFLAGS" AC_CHECK_LIB([python$PYTHON_VERSION], [Py_Initialize],[ AC_MSG_NOTICE([No additional path to python library needed]) PYTHON_LIBS="-lpython$PYTHON_VERSION $PYTHON_LIBS"],[ LDFLAGS="-L$PY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value AC_CHECK_LIB([python$PYTHON_VERSION], [Py_Finalize],[ AC_MSG_NOTICE([Adding path to python library]) PYTHON_LIBS="-L$PY_LIBDEST/config -lpython$PYTHON_VERSION $PYTHON_LIBS"],[ PYTHON_LIBS=""])]) LDFLAGS=$SAVE_LDFLAGS ])])])]) AC_SUBST(PYTHON_VERSION) AC_SUBST(PYTHON_CFLAGS) AC_SUBST(PYTHON_LIBS) if test "${build}" = "${host}"; then PYTHON_EXT_SUFFIX=`$PYTHON -c "from distutils import sysconfig; v = sysconfig.get_config_vars(); print(v.get('EXT_SUFFIX', v.get('SO')))" | sed s/None//` else PYTHON_EXT_SUFFIX="" fi AC_SUBST(PYTHON_EXT_SUFFIX) AC_ARG_WITH(python-site-arch, AC_HELP_STRING([--with-python-site-arch=directory], [Direcory where Python modules will be installed - defaults is to query the Python binary])) if test "X$PYTHON_SITE_ARCH" = "X"; then if test "${build}" = "${host}"; then PYTHON_SITE_ARCH=`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_lib(1,0,"${prefix}"))'` else PYTHON_SITE_ARCH="${libdir}/python${PYTHON_VERSION}/site-packages" fi fi AC_SUBST(PYTHON_SITE_ARCH) AC_ARG_WITH(python-site-lib, AC_HELP_STRING([--with-python-site-lib=directory], [Direcory where Python modules will be installed - defaults is to query the Python binary])) if test "X$PYTHON_SITE_LIB" = "X"; then if test "${build}" = "${host}"; then PYTHON_SITE_LIB=`$PYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_lib(0,0,"${prefix}"))'` else PYTHON_SITE_LIB="${libdir}/python${PYTHON_VERSION}/site-packages" fi fi AC_SUBST(PYTHON_SITE_LIB) SAVE_LDFLAGS=$LDFLAGS SAVE_CPPFLAGS=$CPPFLAGS LDFLAGS="$LDFLAGS $PYTHON_LIBS" CPPFLAGS="$CPPFLAGS $PYTHON_CFLAGS" AC_CHECK_HEADER(Python.h, [pythonh="yes"], [pythonh="no"]) AC_TRY_COMPILE([#include ], [Py_InitializeEx(0)],[ AC_MSG_NOTICE([Python includes functionality of skipping initialization registration of signal handlers]) AC_DEFINE(HAVE_PYTHON_INITIALIZE_EX, 1, [Define if you have Py_InitializeEx function]) enables_python_service="yes" ],[ AC_MSG_NOTICE([Python does not include functionality of skipping initialization registration of signal handlers, since its version is below 2.4]) enables_python_service="no" ]) LDFLAGS=$SAVE_LDFLAGS CPPFLAGS=$SAVE_CPPFLAGS fi if test "X$PYTHON" = "X"; then AC_MSG_NOTICE([Missing Python - skipping Python components]) enables_python=no elif test "X$PYTHON_SITE_ARCH" = "X" || test "X$PYTHON_SITE_LIB" = "X"; then AC_MSG_NOTICE([Missing python site packages location - skipping Python components]) enables_python=no else AC_MSG_NOTICE([Python available: $PYTHON_VERSION]) fi if test "x$enables_python" != "xyes"; then AC_MSG_NOTICE([Missing Python - skipping Python bindings]) enables_swig_python=no elif test "X$PYTHON_LIBS" = "X"; then AC_MSG_NOTICE([Missing Python library - skipping Python bindings]) enables_swig_python=no elif test "X$pythonh" != "Xyes"; then AC_MSG_NOTICE([Missing Python header - skipping Python bindings]) enables_swig_python=no elif ! test -f python/arc_wrap.cpp && test "x$enables_swig_python" != "xyes"; then AC_MSG_NOTICE([Missing pre-compiled Python wrapper and SWIG - skipping Python bindings]) enables_swig_python=no fi fi fi AC_MSG_NOTICE([Python enabled: $enables_python]) AC_MSG_NOTICE([Python SWIG bindings enabled: $enables_swig_python]) AM_CONDITIONAL([PYTHON_ENABLED],[test "x$enables_python" = "xyes"]) AM_CONDITIONAL([PYTHON3], [test "x$enables_python" = "xyes" && test "x$PYTHON_MAJOR" = "x3"]) AM_CONDITIONAL([PYTHON_SWIG_ENABLED],[test "x$enables_swig_python" = "xyes"]) AM_CONDITIONAL([PYTHON_SERVICE],[test "x$enables_swig_python" = "xyes" && test "x$enables_python_service" = "xyes"]) # Alternative Python if test "$enables_hed" = "yes"; then AC_ARG_ENABLE(altpython, AC_HELP_STRING([--disable-altpython], [enable alternative Python binding]), [enables_altpython=$enableval], []) if test "$enables_altpython" = "yes"; then AC_ARG_WITH(altpython, AC_HELP_STRING([--with-altpython=(PYTHON)], [specify alternative python program from PATH])) AC_PATH_PROGS(ALTPYTHON, $with_altpython) if test "X$ALTPYTHON" != "X"; then ALTPYNAME=`basename $ALTPYTHON` PKG_CHECK_MODULES(ALTPYTHON, $ALTPYNAME-embed, [ ALTPYTHON_VERSION=`$PKG_CONFIG --modversion $ALTPYNAME-embed` ALTPYTHON_MAJOR=`echo $ALTPYTHON_VERSION|cut -f1 -d.` ],[ PKG_CHECK_MODULES(ALTPYTHON, $ALTPYNAME, [ ALTPYTHON_VERSION=`$PKG_CONFIG --modversion $ALTPYNAME` ALTPYTHON_MAJOR=`echo $ALTPYTHON_VERSION|cut -f1 -d.` ],[ ALTPYNAME=python-`$ALTPYTHON -c 'import sys; print(sys.version[[:3]])'` PKG_CHECK_MODULES(ALTPYTHON, $ALTPYNAME-embed, [ ALTPYTHON_VERSION=`$PKG_CONFIG --modversion $ALTPYNAME-embed` ALTPYTHON_MAJOR=`echo $ALTPYTHON_VERSION|cut -f1 -d.` ],[ PKG_CHECK_MODULES(ALTPYTHON, $ALTPYNAME, [ ALTPYTHON_VERSION=`$PKG_CONFIG --modversion $ALTPYNAME` ALTPYTHON_MAJOR=`echo $ALTPYTHON_VERSION|cut -f1 -d.` ],[ ALTPYTHON_VERSION=`$ALTPYTHON -c 'import sys; print(sys.version[[:3]])'` ALTPYTHON_MAJOR=`$ALTPYTHON -c 'import sys; print(sys.version_info[[0]])'` ALTPYTHON_CFLAGS=-I`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_inc())'` ALTPY_LIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBS'))" | sed s/None//` ALTPY_SYSLIBS=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('SYSLIBS'))" | sed s/None//` ALTPY_LIBDEST=`$ALTPYTHON -c "from distutils import sysconfig; print(sysconfig.get_config_vars().get('LIBDEST'))" | sed s/None//` ALTPYTHON_LIBS="$ALTPY_LIBS $ALTPY_SYSLIBS" SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$ALTPYTHON_LIBS $LDFLAGS" AC_CHECK_LIB([python$ALTPYTHON_VERSION], [Py_Initialize],[ AC_MSG_NOTICE([No additional path to python library needed]) ALTPYTHON_LIBS="-lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS"],[ LDFLAGS="-L$ALTPY_LIBDEST/config $LDFLAGS" # check a different symbol or else configure will used cached value AC_CHECK_LIB([python$ALTPYTHON_VERSION], [Py_Finalize],[ AC_MSG_NOTICE([Adding path to python library]) ALTPYTHON_LIBS="-L$ALTPY_LIBDEST/config -lpython$ALTPYTHON_VERSION $ALTPYTHON_LIBS"],[ ALTPYTHON_LIBS=""])]) LDFLAGS=$SAVE_LDFLAGS ])])])]) AC_SUBST(ALTPYTHON_VERSION) AC_SUBST(ALTPYTHON_CFLAGS) AC_SUBST(ALTPYTHON_LIBS) ALTPYTHON_EXT_SUFFIX=`$ALTPYTHON -c "from distutils import sysconfig; v = sysconfig.get_config_vars(); print(v.get('EXT_SUFFIX', v.get('SO')))" | sed s/None//` AC_SUBST(ALTPYTHON_EXT_SUFFIX) AC_ARG_WITH(altpython-site-arch, AC_HELP_STRING([--with-altpython-site-arch=directory], [Direcory where Python modules will be installed - defaults is to query the Python binary])) if test "X$ALTPYTHON_SITE_ARCH" = "X"; then ALTPYTHON_SITE_ARCH=`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_lib(1,0,"${prefix}"))'` fi AC_SUBST(ALTPYTHON_SITE_ARCH) AC_ARG_WITH(altpython-site-lib, AC_HELP_STRING([--with-altpython-site-lib=directory], [Direcory where Python modules will be installed - defaults is to query the Python binary])) if test "X$ALTPYTHON_SITE_LIB" = "X"; then ALTPYTHON_SITE_LIB=`$ALTPYTHON -c 'from distutils import sysconfig; print(sysconfig.get_python_lib(0,0,"${prefix}"))'` fi AC_SUBST(ALTPYTHON_SITE_LIB) SAVE_LDFLAGS=$LDFLAGS SAVE_CPPFLAGS=$CPPFLAGS LDFLAGS="$LDFLAGS $ALTPYTHON_LIBS" CPPFLAGS="$CPPFLAGS $ALTPYTHON_CFLAGS" AC_CHECK_HEADER(Python.h, [altpythonh="yes"], [altpythonh="no"]) LDFLAGS=$SAVE_LDFLAGS CPPFLAGS=$SAVE_CPPFLAGS fi if test "X$ALTPYTHON" = "X"; then AC_MSG_NOTICE([Missing alternative Python - skipping alternative Python]) enables_altpython=no elif test "X$ALTPYTHON_LIBS" = "X"; then AC_MSG_NOTICE([Missing alternative Python library - skipping alternative Python bindings]) enables_altpython=no elif test "X$altpythonh" != "Xyes"; then AC_MSG_NOTICE([Missing alternative Python header - skipping alternative Python bindings]) enables_altpython=no elif test "X$ALTPYTHON_SITE_ARCH" = "X" || test "X$ALTPYTHON_SITE_LIB" = "X"; then AC_MSG_NOTICE([Missing python site packages location - skipping Python bindings]) enables_altpython=no else AC_MSG_NOTICE([Alternative Python available: $ALTPYTHON_VERSION]) fi if test "x$enables_altpython" != "xyes"; then AC_MSG_NOTICE([Missing alternative Python - skipping alternative Python bindings]) enables_altpython=no elif ! test -f python/arc_wrap.cpp && test "x$enables_swig_python" != "xyes"; then AC_MSG_NOTICE([Missing pre-compiled Python wrapper and SWIG - skipping alternative Python bindings]) enables_altpython=no fi fi fi AC_MSG_NOTICE([Alternative Python enabled: $enables_altpython]) AM_CONDITIONAL([ALTPYTHON_ENABLED],[test "x$enables_altpython" = "xyes"]) AM_CONDITIONAL([ALTPYTHON3], [test "x$enables_altpython" = "xyes" && test "x$ALTPYTHON_MAJOR" = "x3"]) # check for pylint dnl Check if pylint is explicitly disabled. if test "$enables_hed" = "yes"; then AC_ARG_ENABLE(pylint, AC_HELP_STRING([--disable-pylint], [disable python example checking using pylint]), [enables_pylint=$enableval],[]) AC_PATH_PROGS(PYLINT, pylint) if test "x$PYLINT" = "x"; then enables_pylint="no" else PYLINT_VERSION=`$PYLINT --version 2> /dev/null | sed -n 's/^pylint \([[0-9.]]*\).*/\1/p'` # Check if pylint supports the following arguments, otherwise disable pylint (python example checking). # Do not generate report # Disable convention and recommendation messages - we are only interested in fatals, errors and warnings. PYLINT_ARGS="--reports=no --disable=C,R" if $PYLINT $PYLINT_ARGS /dev/null > /dev/null 2>&1 ; then AC_MSG_NOTICE([pylint version $PYLINT_VERSION found - version ok]) enables_pylint="yes" else AC_MSG_NOTICE([pylint version $PYLINT_VERSION found - bad version]) enables_pylint="no" PYLINT_ARGS="" fi AC_SUBST(PYLINT_ARGS) fi # Check if the --disable=W0221 option is supported # W0221: Disable arguments differ messages since Swig uses tuple syntax (*args). if test "$enables_pylint" = "yes"; then PYLINT_ARGS_ARGUMENTS_DIFFER="--disable=W0221" if ! $PYLINT $PYLINT_ARGS $PYLINT_ARGS_ARGUMENTS_DIFFER /dev/null > /dev/null 2>&1 ; then PYLINT_ARGS_ARGUMENTS_DIFFER="" fi AC_SUBST(PYLINT_ARGS_ARGUMENTS_DIFFER) fi fi AM_CONDITIONAL([PYLINT_ENABLED], [test "x$enables_pylint" = "xyes"]) AC_MSG_NOTICE([Python example checking with pylint enabled: $enables_pylint]) # check systemd daemon integration AC_ARG_ENABLE(systemd, AC_HELP_STRING([--enable-systemd], [enable use of the systemd daemon integration features]),[enables_systemd="$enableval"],[]) if test "x$enables_systemd" = "xyes"; then systemd_daemon_save_LIBS=$LIBS LIBS= AC_SEARCH_LIBS(sd_listen_fds,[systemd systemd-daemon], [have_sd_listen_fds=yes],[have_sd_listen_fds=no],$systemd_daemon_save_LIBS) AC_SEARCH_LIBS(sd_notify,[systemd systemd-daemon], [have_sd_notify=yes],[have_sd_notify=no],$systemd_daemon_save_LIBS) AC_CHECK_HEADERS(systemd/sd-daemon.h, [have_systemd_sd_daemon_h=yes],[have_systemd_sd_daemon_h=no]) if test x"$have_sd_listen_fds" = x"yes" && \ test x"$have_sd_notify" = x"yes" && \ test x"$have_systemd_sd_daemon_h" = x"yes"; then AC_DEFINE([HAVE_SYSTEMD_DAEMON],[1],[Define if you have systemd daemon]) SYSTEMD_DAEMON_LIBS=$LIBS else AC_MSG_FAILURE([--enable-systemd was given, but test for systemd libraries had failed]) fi LIBS=$systemd_daemon_save_LIBS fi AC_SUBST(SYSTEMD_DAEMON_LIBS) # check gthread if test "$enables_hed" = "yes"; then PKG_CHECK_MODULES(GTHREAD, [gthread-2.0 >= 2.4.7]) AC_SUBST(GTHREAD_CFLAGS) AC_SUBST(GTHREAD_LIBS) fi # check glibmm # check for giomm which became a part of glibmm as of version 2.16 if test "$enables_hed" = "yes"; then "$PKG_CONFIG" giomm-2.4 if test "$?" = '1'; then PKG_CHECK_MODULES(GLIBMM, [glibmm-2.4 >= 2.4.7]) else PKG_CHECK_MODULES(GLIBMM, [giomm-2.4]) AC_DEFINE(HAVE_GIOMM, 1, [define if giomm is supported in glibmm]) fi AC_SUBST(GLIBMM_CFLAGS) AC_SUBST(GLIBMM_LIBS) SAVE_CPPFLAGS=$CPPFLAGS AC_LANG_SAVE AC_LANG_CPLUSPLUS CPPFLAGS="$CPPFLAGS $GLIBMM_CFLAGS" AC_CHECK_HEADER([glibmm/optioncontext.h], [ AC_TRY_COMPILE([#include ], [Glib::OptionContext ctx; ctx.set_summary("summary")], [ AC_DEFINE(HAVE_GLIBMM_OPTIONCONTEXT_SET_SUMMARY, 1, [define if glibmm has Glib::OptionContext::set_summary()]) AC_MSG_NOTICE([using glibmm command line parsing]) ], [ AC_MSG_NOTICE([using getopt_long command line parsing]) ] ) AC_TRY_COMPILE([#include ], [Glib::OptionContext ctx; ctx.get_help();],[ AC_DEFINE(HAVE_GLIBMM_OPTIONCONTEXT_GET_HELP, 1, [define if glibmm has Glib::OptionContext::get_help()]) ], [ ] ) ]) AC_TRY_COMPILE([#include ],[Glib::ModuleFlags flags = Glib::MODULE_BIND_LOCAL;],[glibmm_bind_local=yes],[glibmm_bind_local=no]) if test "$glibmm_bind_local" = yes; then AC_DEFINE(HAVE_GLIBMM_BIND_LOCAL, 1, [define if glibmm have support local symbol resolution in shared libraries]) else AC_MSG_NOTICE([WARNING: glibmm has no way to limit scope of symbols of shared libraries. Make sure external libraries used by plugins have no conflicting symbols. HINT: use Globus compiled against system OpenSSL library.]) fi AC_TRY_COMPILE([#include ],[Glib::getenv("");],[glibmm_getenv=yes],[glibmm_getenv=no]) if test "$glibmm_getenv" = yes; then AC_DEFINE(HAVE_GLIBMM_GETENV, 1, [define if glibmm have getenv operations]) else AC_MSG_NOTICE([WARNING: glibmm has no support for getenv. Usage of libc getenv is unsafe in multi-threaded applications.]) fi AC_TRY_COMPILE([#include ],[Glib::setenv("", "");],[glibmm_setenv=yes],[glibmm_setenv=no]) if test "$glibmm_setenv" = yes; then AC_DEFINE(HAVE_GLIBMM_SETENV, 1, [define if glibmm have setenv operations]) else AC_MSG_NOTICE([WARNING: glibmm has no support for setenv. Usage of libc setenv may be unsafe in multi-threaded applications.]) fi AC_TRY_COMPILE([#include ],[Glib::unsetenv("");],[glibmm_unsetenv=yes],[glibmm_unsetenv=no]) if test "$glibmm_unsetenv" = yes; then AC_DEFINE(HAVE_GLIBMM_UNSETENV, 1, [define if glibmm have unsetenv operations]) else AC_MSG_NOTICE([WARNING: glibmm has no support for unsetenv. Usage of libc unsetenv may be unsafe in multi-threaded applications.]) fi AC_TRY_COMPILE([#include ],[Glib::listenv();],[glibmm_listenv=yes],[glibmm_listenv=no]) if test "$glibmm_listenv" = yes; then AC_DEFINE(HAVE_GLIBMM_LISTENV, 1, [define if glibmm have listenv operations]) else AC_MSG_NOTICE([WARNING: glibmm has no support for listenv. Usage of libc environ is unsafe in multi-threaded applications.]) fi AC_LANG_RESTORE CPPFLAGS=$SAVE_CPPFLAGS fi # check libxml if test "$enables_hed" = "yes"; then PKG_CHECK_MODULES(LIBXML2, [libxml-2.0 >= 2.4.0]) AC_SUBST(LIBXML2_CFLAGS) AC_SUBST(LIBXML2_LIBS) fi # check openssl if test "$enables_hed" = "yes"; then PKG_CHECK_MODULES(OPENSSL, [openssl >= 1.0.0]) PKG_CHECK_MODULES(OPENSSL_1_1, [openssl >= 1.1.0], [ OPENSSL_CFLAGS="$OPENSSL_CFLAGS -DOPENSSL_API_COMPAT=0x10100000L" AC_MSG_NOTICE([Forcing off deprecated functions for OpenSSL >= 1.1]) ], [ AC_MSG_NOTICE([OpenSSL is pre-1.1]) ]) AC_SUBST(OPENSSL_CFLAGS) AC_SUBST(OPENSSL_LIBS) fi # Check for available *_method functions in OpenSSL SAVE_CPPFLAGS=$CPPFLAGS SAVE_LIBS=$LIBS CPPFLAGS="$CPPFLAGS $OPENSSL_CFLAGS" LIBS="$LIBS $OPENSSL_LIBS" AC_LANG_PUSH([C++]) AC_COMPILE_IFELSE( [AC_LANG_PROGRAM([[ #include void _test(void) { (void)SSLv3_method(); } ]])], [AC_DEFINE(HAVE_SSLV3_METHOD,1,[define if SSLv3_method is available])], [AC_MSG_NOTICE([No SSLv3_method function avialable])]) AC_COMPILE_IFELSE( [AC_LANG_PROGRAM([[ #include void _test(void) { (void)TLSv1_method(); } ]])], [AC_DEFINE(HAVE_TLSV1_METHOD,1,[define if TLSv1_method is available])], [AC_MSG_NOTICE([No TLSv1_method function avialable])]) AC_COMPILE_IFELSE( [AC_LANG_PROGRAM([[ #include void _test(void) { (void)TLSv1_1_method(); } ]])], [AC_DEFINE(HAVE_TLSV1_1_METHOD,1,[define if TLSv1_1_method is available])], [AC_MSG_NOTICE([No TLSv1_1_method function avialable])]) AC_COMPILE_IFELSE( [AC_LANG_PROGRAM([[ #include void _test(void) { (void)TLSv1_2_method(); } ]])], [AC_DEFINE(HAVE_TLSV1_2_METHOD,1,[define if TLSv1_2_method is available])], [AC_MSG_NOTICE([No TLSv1_2_method function avialable])]) AC_COMPILE_IFELSE( [AC_LANG_PROGRAM([[ #include void _test(void) { (void)TLS_method(); } ]])], [AC_DEFINE(HAVE_TLS_METHOD,1,[define if TLS_method is available])], [AC_MSG_NOTICE([No TLS_method function avialable])]) AC_COMPILE_IFELSE( [AC_LANG_PROGRAM([[ #include void _test(void) { (void)DTLSv1_method(); } ]])], [AC_DEFINE(HAVE_DTLSV1_METHOD,1,[define if DTLSv1_method is available])], [AC_MSG_NOTICE([No DTLSv1_method function avialable])]) AC_COMPILE_IFELSE( [AC_LANG_PROGRAM([[ #include void _test(void) { (void)DTLSv1_2_method(); } ]])], [AC_DEFINE(HAVE_DTLSV1_2_METHOD,1,[define if DTLSv1_2_method is available])], [AC_MSG_NOTICE([No DTLSv1_2_method function avialable])]) AC_COMPILE_IFELSE( [AC_LANG_PROGRAM([[ #include void _test(void) { (void)DTLS_method(); } ]])], [AC_DEFINE(HAVE_DTLS_METHOD,1,[define if DTLS_method is available])], [AC_MSG_NOTICE([No DTLS_method function avialable])]) AC_LANG_POP([C++]) CPPFLAGS=$SAVE_CPPFLAGS LIBS=$SAVE_LIBS #check mozilla nss enables_nss=yes NSS_INSTALLED=no dnl Check if nss lib is explicitly enabled, default is disable. AC_ARG_ENABLE(nss, AC_HELP_STRING([--disable-nss], [disable use of the mozilla nss library]),[enables_nss="$enableval"],[]) if test "$enables_nss" = "yes"; then PKG_CHECK_MODULES(NSS, [nss >= 3.10], [NSS_INSTALLED=yes] , [ AC_MSG_WARN([Cannot locate nss lib]) NSS_INSTALLED=no enables_nss=no ]) if test "x$NSS_INSTALLED" = "xyes" ; then AC_DEFINE(HAVE_NSS, 1, [define if NSS is enabled and available]) fi fi AC_SUBST(NSS_CFLAGS) AC_SUBST(NSS_LIBS) AM_CONDITIONAL([NSS_ENABLED], test x$NSS_INSTALLED = xyes) #check SQLite SQLITE_INSTALLED=no PKG_CHECK_MODULES(SQLITE, [sqlite3 >= 3.6], [SQLITE_INSTALLED=yes] , [ AC_MSG_WARN([Cannot locate SQLite newer than 3.6]) SQLITE_INSTALLED=no enables_sqlite=no ]) if test "x$SQLITE_INSTALLED" = "xyes" ; then AC_DEFINE(HAVE_SQLITE, 1, [define if SQLite is available]) # Check for function available since 3.8 SAVE_CFLAGS=$CFLAGS SAVE_LIBS=$LIBS CFLAGS="$CFLAGS $SQLITE_CFLAGS" LIBS="$LIBS $SQLITE_LIBS" AC_CHECK_FUNCS(sqlite3_errstr) CFLAGS=$SAVE_CFLAGS LIBS=$SAVE_LIBS fi AC_SUBST(SQLITE_CFLAGS) AC_SUBST(SQLITE_LIBS) AM_CONDITIONAL([SQLITE_ENABLED], test x$SQLITE_INSTALLED = xyes) # check cppunit if test "$enables_hed" = "yes"; then AC_ARG_ENABLE(cppunit, AC_HELP_STRING([--disable-cppunit], [disable cppunit-based UNIT testing of code]),[enables_cppunit=$enableval],[]) if test "$enables_cppunit" = "yes"; then PKG_CHECK_MODULES(CPPUNIT, [cppunit],[], [AC_PATH_PROG(CPPUNIT_CONFIG, cppunit-config, no) if test "x$CPPUNIT_CONFIG" = "xno"; then AC_MSG_WARN([cppunit-config not found - no UNIT testing will be performed]) CPPUNIT_CFLAGS= CPPUNIT_LIBS= enables_cppunit="no" else CPPUNIT_CFLAGS="`$CPPUNIT_CONFIG --cflags`" CPPUNIT_LIBS="`$CPPUNIT_CONFIG --libs`" fi]) if test "x$CPPUNIT_CONFIG" != "xno" || test "x$CPPUNIT_PKG_ERRORS" != "x" then TEST_DIR=test else enables_cppunit=no TEST_DIR= fi fi AC_SUBST(CPPUNIT_CFLAGS) AC_SUBST(CPPUNIT_LIBS) AC_SUBST(TEST_DIR) else enables_cppunit="no" fi # check ldns library if test "$enables_compute_client" = "yes"; then AC_ARG_ENABLE(ldns, AC_HELP_STRING([--disable-ldns], [disable ldns library usage (makes ARCHERY client unavailable) ]),[enables_ldns=$enableval],[]) if test "$enables_ldns" = "yes"; then PKG_CHECK_MODULES(LDNS, [ldns],[], [AC_PATH_PROG(LDNS_CONFIG, ldns-config, no) if test "x$LDNS_CONFIG" = "xno"; then AC_CHECK_HEADER([ldns/ldns.h], [AC_CHECK_LIB([ldns], [ldns_dname_new_frm_str], [ LDNS_CFLAGS="$LDNS_CFLAGS" LDNS_LIBS="$LDNS_LIBS -lldns" ], [enables_ldns="no"]) ],[enables_ldns="no"]) else LDNS_CFLAGS="`$LDNS_CONFIG --cflags`" LDNS_LIBS="`$LDNS_CONFIG --libs`" fi ]) if test "$enables_ldns" = "no"; then AC_MSG_WARN([ldns library was not found. Compute clients will be built without ARCHERY support.]) fi fi else enables_ldns="no" fi if test "x$enables_ldns" = "xyes" ; then AC_DEFINE(HAVE_LDNS, 1, [define if LDNS is enabled and available]) else LDNS_CFLAGS= LDNS_LIBS= fi AC_SUBST(LDNS_CFLAGS) AC_SUBST(LDNS_LIBS) AM_CONDITIONAL(LDNS_ENABLED, test "x$enables_ldns" = "xyes") ############################## # # Check xmlsec1 # ############################# MACOSX="" case "${host}" in *darwin*) MACOSX="yes" ;; esac if test "x$MACOSX" = "xyes"; then AC_DEFINE(_MACOSX, 1, [Define if compiling for MacOSX]) fi AM_CONDITIONAL([MACOSX], [ test "x$MACOSX" = "xyes"]) if test "$enables_hed" = "yes"; then XMLSEC_MIN_VERSION="1.2.4" XMLSEC_OPENSSL_MIN_VERSION="1.2.4" XMLSEC_CONFIG="${XMLSEC1_CONFIG:-xmlsec1-config}" XMLSEC_CFLAGS="" XMLSEC_LIBS="" XMLSEC_INSTALLED=no dnl Check if xmlsec1 is explicitly disabled, default is enable. AC_ARG_ENABLE(xmlsec1, AC_HELP_STRING([--disable-xmlsec1], [disable features which need xmlsec1 library]),[enables_xmlsec1=$enableval],[]) if test "x$enables_xmlsec1" = "xyes"; then AC_ARG_WITH(xmlsec1, [ --with-xmlsec1=(PATH) xmlsec1 location]) if test "x$with_xmlsec1" = "x" ; then PKG_CHECK_MODULES(XMLSEC, [xmlsec1 >= $XMLSEC_MIN_VERSION], [XMLSEC_INSTALLED=yes], [XMLSEC_INSTALLED=no]) if test "x$XMLSEC_INSTALLED" = "xyes" ; then PKG_CHECK_MODULES(XMLSEC_OPENSSL, [xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION], [XMLSEC_INSTALLED=yes],[XMLSEC_INSTALLED=no]) fi # Find number of backslashes in XMLSEC_CFLAGS n=$(echo $XMLSEC_CFLAGS|sed 's/.*-DXMLSEC_CRYPTO=\([[^ ]]*\).*/\1/'|tr -d '[[A-Za-z0-1\n"]]'| wc -c) # Fixes due to bugs in pkg-config and/or xmlsec1 # # 0: Indicates a bug in pkg-config which removes the escaping of the quotes # 2: Correct value with escaped quotes # 6: Old xmlsec1 version which used 3 back-slashes to escape quotes # See eg. https://bugzilla.redhat.com/show_bug.cgi?id=675334 # Make sure that the quotes are escaped with single backslash if test $n = 0 -o $n = 6; then AC_MSG_NOTICE([Working around bad combination of pkgconfig and xmlsec1 with $n back-slashes]) XMLSEC_CFLAGS=$(echo $XMLSEC_CFLAGS|sed 's/\(.*-DXMLSEC_CRYPTO=\)\\*"\([[^ \\"]]*\)\\*" \(.*\)/\1\\"\2\\" \3/') XMLSEC_OPENSSL_CFLAGS=$(echo $XMLSEC_OPENSSL_CFLAGS|sed 's/\(.*-DXMLSEC_CRYPTO=\)\\*"\([[^ \\"]]*\)\\*" \(.*\)/\1\\"\2\\" \3/') fi fi if test "x$XMLSEC_INSTALLED" = "xno" -a "x$MACOSX" != "xyes"; then AC_MSG_CHECKING(for xmlsec1 libraries >= $XMLSEC_MIN_VERSION) if test "x$with_xmlsec1" != "x" ; then XMLSEC_CONFIG=$with_xmlsec1/bin/$XMLSEC_CONFIG fi "$XMLSEC_CONFIG" --version 2>/dev/null 1>/dev/null if test "$?" != '0' ; then AC_MSG_WARN(Could not find xmlsec1 anywhere; The xml security related functionality will not be compiled) else vers=`$XMLSEC_CONFIG --version 2>/dev/null | awk -F. '{ printf "%d", ($1 * 1000 + $2) * 1000 + $3;}'` minvers=`echo $XMLSEC_MIN_VERSION | awk -F. '{ printf "%d", ($1 * 1000 + $2) * 1000 + $3;}'` if test "$vers" -ge "$minvers" ; then XMLSEC_LIBS="`$XMLSEC_CONFIG --libs`" XMLSEC_CFLAGS="`$XMLSEC_CONFIG --cflags`" #check the xmlsec1-openssl here if test "x$PKG_CONFIG_PATH" != "x"; then PKG_CONFIG_PATH="$with_xmlsec1/lib/pkgconfig:$PKG_CONFIG_PATH" else PKG_CONFIG_PATH="$with_xmlsec1/lib/pkgconfig" fi PKG_CHECK_MODULES(XMLSEC_OPENSSL, [xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION], [XMLSEC_INSTALLED=yes],[XMLSEC_INSTALLED=no]) else AC_MSG_WARN(You need at least xmlsec1 $XMLSEC_MIN_VERSION for this version of arc) fi fi elif test "x$XMLSEC_INSTALLED" = "xno" -a "x$MACOSX" = "xyes"; then #MACOSX has no "ldd" which is needed by xmlsec1-config, so here simply we use PKG_CHECK_MODULES if test "x$PKG_CONFIG_PATH" != "x"; then PKG_CONFIG_PATH="$with_xmlsec1/lib/pkgconfig:$PKG_CONFIG_PATH" else PKG_CONFIG_PATH="$with_xmlsec1/lib/pkgconfig" fi PKG_CHECK_MODULES(XMLSEC, [xmlsec1 >= $XMLSEC_MIN_VERSION], [XMLSEC_INSTALLED=yes], [XMLSEC_INSTALLED=no]) if test "x$XMLSEC_INSTALLED" = "xyes" ; then PKG_CHECK_MODULES(XMLSEC_OPENSSL, [xmlsec1-openssl >= $XMLSEC_OPENSSL_MIN_VERSION], [XMLSEC_INSTALLED=yes],[XMLSEC_INSTALLED=no]) fi fi AC_SUBST(XMLSEC_CFLAGS) AC_SUBST(XMLSEC_LIBS) AC_SUBST(XMLSEC_OPENSSL_CFLAGS) AC_SUBST(XMLSEC_OPENSSL_LIBS) #AC_SUBST(XMLSEC_CONFIG) #AC_SUBST(XMLSEC_MIN_VERSION) enables_xmlsec1="$XMLSEC_INSTALLED" fi else enables_xmlsec1="no" fi ######################### # # Check libmysqlclient # ######################### MYSQL_INSTALLED=no if test "$enables_hed" = "yes"; then MYSQL_CONFIG="mysql_config" MYSQL_CFLAGS="" MYSQL_LIBS="" dnl Check if libmysqlclient is explicitly enabled, default is disable. AC_ARG_ENABLE(mysql, AC_HELP_STRING([--enable-mysql], [enable use of the MySQL client library]),[enables_mysql="$enableval"],[]) # Ask user for path to libmysqlclient if test "x$enables_mysql" = "xyes"; then AC_ARG_WITH(mysql, [ --with-mysql=(PATH) prefix of MySQL installation. e.g. /usr/local or /usr]) AC_MSG_CHECKING(for mysql client library) if test "x$with_mysql" != "x" ; then MYSQL_CONFIG=$with_mysql/bin/$MYSQL_CONFIG fi if ! $MYSQL_CONFIG --version > /dev/null 2>&1 ; then AC_MSG_ERROR(Could not find mysql C library anywhere (see config.log for details).) fi MYSQL_LIBS="`$MYSQL_CONFIG --libs`" MYSQL_CFLAGS="`$MYSQL_CONFIG --cflags`" MYSQL_INSTALLED="yes" AC_SUBST(MYSQL_LIBS) AC_SUBST(MYSQL_CFLAGS) enables_mysql=$MYSQL_INSTALLED fi AC_MSG_NOTICE([MySQL client library enabled: $MYSQL_INSTALLED]) fi AM_CONDITIONAL([MYSQL_LIBRARY_ENABLED],[test "x$MYSQL_INSTALLED" = "xyes"]) # Check monitor AC_ARG_ENABLE(monitor, AC_HELP_STRING([--enable-monitor], [enable use of the monitor]),[enables_monitor="$enableval"],[]) if test "x$enables_monitor" = "xyes"; then AC_ARG_WITH(monitor, [ --with-monitor=(PATH) where to install the monitor, eg /var/www/monitor or /usr/share/arc/monitor]) AC_MSG_CHECKING(for monitor installation path) if test "x$with_monitor" != "x" ; then monitor_prefix=$with_monitor else monitor_prefix=${datadir}/arc/monitor fi AC_MSG_RESULT([$monitor_prefix]) AC_SUBST(monitor_prefix) fi # check zlib ZLIB_CFLAGS= ZLIB_LDFLAGS= ZLIB_LIBS= if test "$enables_hed" = "yes"; then SAVE_CPPFLAGS=$CPPFLAGS SAVE_LDFLAGS=$LDFLAGS AC_ARG_WITH(zlib, AC_HELP_STRING([--with-zlib=PATH], [where zlib is installed]), [ if test -d "$withval"; then ZLIB_CFLAGS="${CPPFLAGS} -I$withval/include" ZLIB_LDFLAGS="${LDFLAGS} -L$withval/lib" fi ] ) CPPFLAGS="$CPPFLAGS $ZLIB_CFLAGS" LDFLAGS="$LDFLAGS $ZLIB_LDFLAGS" AC_CHECK_HEADER([zlib.h],[ZLIB_CFLAGS="$ZLIB_CFLAGS"],AC_MSG_ERROR([unable to find zlib header files])) AC_CHECK_LIB([z],[deflateInit2_],[ZLIB_LIBS="$ZLIB_LDFLAGS -lz"],AC_MSG_ERROR([unable to link with zlib library])) CPPFLAGS=$SAVE_CPPFLAGS LDFLAGS=$SAVE_LDFLAGS fi AC_SUBST(ZLIB_CFLAGS) AC_SUBST(ZLIB_LIBS) # check ARGUS ARGUS_CFLAGS= ARGUS_LIBS= AC_ARG_ENABLE(argus, AC_HELP_STRING([--enable-argus], [enable use of Argus PEP V2 libraries]),[enables_argus="$enableval"],[]) if test "x$enables_argus" = "xyes"; then AC_ARG_WITH(argus, AC_HELP_STRING([--with-argus=PATH], [ARGUS PEP installation path]), [ if test "x$PKG_CONFIG_PATH" != "x"; then PKG_CONFIG_PATH="$withval/lib/pkgconfig:$PKG_CONFIG_PATH" else PKG_CONFIG_PATH="$withval/lib/pkgconfig" fi ] ) PKG_CHECK_MODULES(ARGUS, [libargus-pep >= 2.0.0], [], [ AC_MSG_NOTICE([Failed to find Argus PEP libraries with version >= 2]) enables_argus=no ]) fi AC_SUBST(ARGUS_CFLAGS) AC_SUBST(ARGUS_LIBS) AM_CONDITIONAL(ARGUS_ENABLED, test "x$enables_argus" = "xyes") ############################################### # # Check for Berkeley DB C++ # ############################################### DBCXX_LIBS="" DBCXX_CPPFLAGS= if test "$enables_hed" = "yes"; then # # Allow the user to specify db_cxx.h location (we will still check though) # dbcxx_include_paths= AC_ARG_WITH(dbcxx-include, [ --with-dbcxx-include=PATH Specify path to db_cxx.h], [ if test "x$withval" = "xyes" ; then AC_MSG_ERROR([--with-dbcxx-include requires PATH argument]) fi if test "x$withval" != "xno" ; then dbcxx_include_paths=$withval fi ] ) # # Allow the user to specify DB4 library location (we will still check though) # db4_library_path= AC_ARG_WITH(db4-library-path, [ --with-db4-library-path=PATH Specify path to DB4 library], [ if test "x$withval" = "xyes" ; then AC_MSG_ERROR([--with-db4-library-path requires PATH argument]) fi if test "x$withval" != "xno" ; then db4_library_path=$withval fi ] ) AC_LANG_SAVE AC_LANG_CPLUSPLUS # # If user did not specify location we start by searching at the standard locations # if test "x$dbcxx_include_paths" = "x" then AC_MSG_NOTICE([Looking for db_cxx.h in standard locations]) AC_CHECK_HEADERS(db_cxx.h,HAVE_DBCXX=yes,HAVE_DBCXX=no) # If the user did not provide a location we have some good suggestions dbcxx_include_paths="/usr/include/db4 /usr/include/db44 /usr/include/db43" else HAVE_DBCXX=no fi # # Now Look for db_cxx.h in non-standard locations # if test "$HAVE_DBCXX" = no then for dbcxx_dir in $dbcxx_include_paths do SAVE_CPPFLAGS=$CPPFLAGS DBCXX_CPPFLAGS=-I$dbcxx_dir CPPFLAGS="$CPPFLAGS $DBCXX_CPPFLAGS" # Disable Autoconf caching unset ac_cv_header_db_cxx_h AC_MSG_NOTICE([Looking for db_cxx.h in $dbcxx_dir]) AC_CHECK_HEADERS(db_cxx.h,HAVE_DBCXX=yes,HAVE_DBCXX=no) CPPFLAGS=$SAVE_CPPFLAGS # If a db_cxx.h was found we break and keep the current value of DBCXX_CPPFLAGS if test "$HAVE_DBCXX" = yes then break fi DBCXX_CPPFLAGS= done fi AC_SUBST(DBCXX_CPPFLAGS) if test "x$db4_library_path" != "x" then db4_library_path="-L$db4_library_path" fi if test "$HAVE_DBCXX" = no then DBCXX_LIBS="" else SAVE_LDFLAGS=$LDFLAGS SAVE_CXXFLAGS=$CXXFLAGS # pthread needed for RH9 LDFLAGS="$LDFLAGS -lpthread" LDFLAGS="$LDFLAGS $db4_library_path" for db_ver in "" -4.7 -4.3 -4.2 do AC_CHECK_LIB(db_cxx$db_ver,main,DBCXX_LIBS="$db4_library_path -ldb_cxx$db_ver",DBCXX_LIBS="") if test "$DBCXX_LIBS" = "" then AC_MSG_WARN([BerkeleyDB library libdb_cxx$db_ver was not found!]) else break fi done if test "$DBCXX_LIBS" = "" then AC_MSG_WARN([No BerkeleyDB library found!]) fi LDFLAGS=$SAVE_LDFLAGS CXXFLAGS=$SAVE_CXXFLAGS fi AC_SUBST(DBCXX_LIBS) if test ! "x$DBCXX_LIBS" = "x" then AC_DEFINE(HAVE_DBCXX, 1, [define if Berkeley DB C++ binding is available]) SAVE_CXXFLAGS=$CXXFLAGS CXXFLAGS="$CXXFLAGS $DBCXX_CPPFLAGS" AC_DBCXX_HAVE_DBDEADLOCKEXCEPTION CXXFLAGS=$SAVE_CXXFLAGS fi AC_LANG_RESTORE fi # DBJSTORE (storing jobs information in BDB) AC_ARG_ENABLE(dbjstore, AC_HELP_STRING([--disable-dbjstore], [disable storing local jobs information in BDB]), [enables_dbjstore=$enableval],[]) if test "$enables_dbjstore" = "yes"; then if test "x$DBCXX_LIBS" = "x" ; then AC_MSG_NOTICE([For storing jobs in BDB C++ API is needed (dbcxx) - disabling]) enables_dbjstore="no" fi fi AC_MSG_NOTICE([Storing jobs in BDB enabled: $enables_dbjstore]) AM_CONDITIONAL([DBJSTORE_ENABLED],[test "x$enables_dbjstore" = "xyes"]) if test "x$enables_dbjstore" = "xyes"; then AC_DEFINE(DBJSTORE_ENABLED, 1, [define if to build job information in BDB storage]) fi # SQLITEJSTORE (storing jobs information in SQLite) AC_ARG_ENABLE(sqlitejstore, AC_HELP_STRING([--disable-sqlitejstore], [disable storing local jobs information in SQLite]), [enables_sqlitejstore=$enableval],[]) if test "$enables_sqlitejstore" = "yes"; then if test "x$SQLITE_INSTALLED" != "xyes" ; then AC_MSG_NOTICE([For storing jobs in SQLite install SQLite 3.6 or newer - disabling]) enables_dbjstore="no" fi fi AC_MSG_NOTICE([Storing jobs in SQLite enabled: $enables_sqlitejstore]) AM_CONDITIONAL([SQLITEJSTORE_ENABLED],[test "x$enables_sqlitejstore" = "xyes"]) if test "x$enables_sqlitejstore" = "xyes"; then AC_DEFINE(SQLITEJSTORE_ENABLED, 1, [define if to build job information in SQLite storage]) fi # globus/gpt packages # globus/gpt packages if test "$enables_hed" = "yes"; then PKG_CHECK_MODULES(GLOBUS_COMMON, [globus-common], [ GLOBUS_COMMON_VERSION=`$PKG_CONFIG --modversion globus-common`], [ GPT_PKG(globus_common) ]) AC_SUBST(GLOBUS_COMMON_CFLAGS) AC_SUBST(GLOBUS_COMMON_LIBS) PKG_CHECK_MODULES(GLOBUS_GSSAPI_GSI, [globus-gssapi-gsi], [ GLOBUS_GSSAPI_GSI_VERSION=`$PKG_CONFIG --modversion globus-gssapi-gsi`], [ GPT_PKG(globus_gssapi_gsi) ]) AC_SUBST(GLOBUS_GSSAPI_GSI_CFLAGS) AC_SUBST(GLOBUS_GSSAPI_GSI_LIBS) PKG_CHECK_MODULES(GLOBUS_GSS_ASSIST, [globus-gss-assist], [ GLOBUS_GSS_ASSIST_VERSION=`$PKG_CONFIG --modversion globus-gss-assist`], [ GPT_PKG(globus_gss_assist) ]) AC_SUBST(GLOBUS_GSS_ASSIST_CFLAGS) AC_SUBST(GLOBUS_GSS_ASSIST_LIBS) PKG_CHECK_MODULES(GLOBUS_GSI_CALLBACK, [globus-gsi-callback], [ GLOBUS_GSI_CALLBACK_VERSION=`$PKG_CONFIG --modversion globus-gsi-callback`], [ GPT_PKG(globus_gsi_callback) ]) AC_SUBST(GLOBUS_GSI_CALLBACK_CFLAGS) AC_SUBST(GLOBUS_GSI_CALLBACK_LIBS) PKG_CHECK_MODULES(GLOBUS_FTP_CLIENT, [globus-ftp-client], [ GLOBUS_FTP_CLIENT_VERSION=`$PKG_CONFIG --modversion globus-ftp-client`], [ GPT_PKG(globus_ftp_client) ]) AC_SUBST(GLOBUS_FTP_CLIENT_CFLAGS) AC_SUBST(GLOBUS_FTP_CLIENT_LIBS) PKG_CHECK_MODULES(GLOBUS_FTP_CONTROL, [globus-ftp-control], [ GLOBUS_FTP_CONTROL_VERSION=`$PKG_CONFIG --modversion globus-ftp-control`], [ GPT_PKG(globus_ftp_control) ]) AC_SUBST(GLOBUS_FTP_CONTROL_CFLAGS) AC_SUBST(GLOBUS_FTP_CONTROL_LIBS) PKG_CHECK_MODULES(GLOBUS_IO, [globus-io], [ GLOBUS_IO_VERSION=`$PKG_CONFIG --modversion globus-io`], [ GPT_PKG(globus_io) ]) AC_SUBST(GLOBUS_IO_CFLAGS) AC_SUBST(GLOBUS_IO_LIBS) PKG_CHECK_MODULES(GLOBUS_GSI_CERT_UTILS, [globus-gsi-cert-utils], [ GLOBUS_GSI_CERT_UTILS_VERSION=`$PKG_CONFIG --modversion globus-gsi-cert-utils`], [ GPT_PKG(globus_gsi_cert_utils) ]) AC_SUBST(GLOBUS_GSI_CERT_UTILS_CFLAGS) AC_SUBST(GLOBUS_GSI_CERT_UTILS_LIBS) PKG_CHECK_MODULES(GLOBUS_GSI_CREDENTIAL, [globus-gsi-credential], [ GLOBUS_GSI_CREDENTIAL_VERSION=`$PKG_CONFIG --modversion globus-gsi-credential`], [ GPT_PKG(globus_gsi_credential) ]) AC_SUBST(GLOBUS_GSI_CREDENTIAL_CFLAGS) AC_SUBST(GLOBUS_GSI_CREDENTIAL_LIBS) PKG_CHECK_MODULES(GLOBUS_OPENSSL_MODULE, [globus-openssl-module], [ GLOBUS_OPENSSL_MODULE_VERSION=`$PKG_CONFIG --modversion globus-openssl-module`], [ GPT_PKG(globus_openssl_module) ]) AC_SUBST(GLOBUS_OPENSSL_MODULE_CFLAGS) AC_SUBST(GLOBUS_OPENSSL_MODULE_LIBS) # Check for new globus thread model selection SAVE_CFLAGS=$CFLAGS SAVE_LIBS=$LIBS CFLAGS="$CFLAGS $GLOBUS_COMMON_CFLAGS" LIBS="$LIBS $GLOBUS_COMMON_LIBS" AC_CHECK_FUNCS(globus_thread_set_model) CFLAGS=$SAVE_CFLAGS LIBS=$SAVE_LIBS # Check for gridftp-v2 SAVE_CFLAGS=$CFLAGS SAVE_LIBS=$LIBS CFLAGS="$CFLAGS $GLOBUS_FTP_CLIENT_CFLAGS" LIBS="$LIBS $GLOBUS_FTP_CLIENT_LIBS" AC_CHECK_FUNCS(globus_ftp_client_handleattr_set_gridftp2) CFLAGS=$SAVE_CFLAGS LIBS=$SAVE_LIBS globus_openssl_detected= PKG_CHECK_MODULES(GLOBUS_OPENSSL, [globus-openssl], [ GLOBUS_OPENSSL_VERSION=`$PKG_CONFIG --modversion globus-openssl`], [ GPT_PKG(globus_openssl) ]) if test ! "x$GLOBUS_OPENSSL_LIBS" = "x" ; then globus_openssl_detected=`echo "$GLOBUS_OPENSSL_LIBS" | grep "lssl_$GPT_FLAVOR"` if test ! "x$globus_openssl_detected" = "x" ; then globus_openssl_detected="yes" fi fi if test "x$globus_openssl_detected" = "xyes" ; then AC_MSG_RESULT([ Globus own OpenSSL library detected. In order to avoid runtime conflicts following components will be disabled: GridFTP DMC, SRM DMC, GSI MCC. To enable these components use Globus compiled for system OpenSSL. ]) GLOBUS_FTP_CLIENT_VERSION= GLOBUS_FTP_CONTROL_VERSION= GLOBUS_IO_VERSION= GLOBUS_GSSAPI_GSI_VERSION= fi if test "x$GLOBUS_IO_VERSION" = "x"; then IO_VERSION_MAJOR=0 else IO_VERSION_MAJOR=`echo "$GLOBUS_IO_VERSION" | sed 's/^\([[^.]]*\).*/\1/'`; fi AC_DEFINE_UNQUOTED(GLOBUS_IO_VERSION,$IO_VERSION_MAJOR,[Globus IO version]) if test "x$GLOBUS_GSSAPI_GSI_VERSION" = "x"; then GLOBUS_GSSAPI_GSI_VERSION_MAJOR=0 GLOBUS_GSSAPI_GSI_VERSION_MINOR=0 else GLOBUS_GSSAPI_GSI_VERSION_MAJOR=`echo "$GLOBUS_GSSAPI_GSI_VERSION" | sed 's/^\([[^.]]*\).*/\1/'`; GLOBUS_GSSAPI_GSI_VERSION_MINOR=`echo "$GLOBUS_GSSAPI_GSI_VERSION" | sed 's/^[[^.]]*\.\([[^.]]*\).*/\1/'`; fi if test "$GLOBUS_GSSAPI_GSI_VERSION_MAJOR" -lt "12"; then GLOBUS_GSSAPI_GSI_OLD_OPENSSL=1 elif test "$GLOBUS_GSSAPI_GSI_VERSION_MAJOR" -eq "12"; then if test "$GLOBUS_GSSAPI_GSI_VERSION_MINOR" -lt "2"; then GLOBUS_GSSAPI_GSI_OLD_OPENSSL=1 else GLOBUS_GSSAPI_GSI_OLD_OPENSSL=0 fi else GLOBUS_GSSAPI_GSI_OLD_OPENSSL=0 fi AC_DEFINE_UNQUOTED(GLOBUS_GSSAPI_GSI_VERSION,$GSSAPI_GSI_VERSION_MAJOR,[Globus GSSAPI GSI version]) AC_DEFINE_UNQUOTED(GLOBUS_GSSAPI_GSI_OLD_OPENSSL,$GLOBUS_GSSAPI_GSI_OLD_OPENSSL,[Globus GSSAPI GSI is for OpenSSL post-1.1]) dnl dnl DEFAULT_GLOBUS_LOCATION dnl AC_MSG_CHECKING(for DEFAULT_GLOBUS_LOCATION) # GLOBUS_LOCATION is set by GPT macros DEFAULT_GLOBUS_LOCATION="$GLOBUS_LOCATION" AC_MSG_RESULT($DEFAULT_GLOBUS_LOCATION) AC_SUBST(DEFAULT_GLOBUS_LOCATION) #check lcas DEFAULT_LCAS_LOCATION=/opt/glite LCAS_LOCATION= LCAS_CFLAGS= LCAS_LIBS= AC_ARG_WITH(lcas-location, [ --with-lcas-location= Specify the LCAS installation path. [[/opt/glite]]], [ LCAS_LOCATION=$with_lcas_location if test ! -d $LCAS_LOCATION; then AC_MSG_WARN([LCAS_LOCATION ($LCAS_LOCATION) does not exist]) LCAS_LOCATION= fi ],[ if test "x$LCAS_LOCATION" = "x"; then LCAS_LOCATION=$DEFAULT_LCAS_LOCATION fi if test ! -d $LCAS_LOCATION; then LCAS_LOCATION= fi ] ) if test "x$LCAS_LOCATION" != "x"; then LCAS_CFLAGS=$LCAS_LOCATION/include/glite/security/lcas if test ! -d $LCAS_CFLAGS; then LCAS_CFLAGS=$LCAS_LOCATION/include/lcas if test ! -d $LCAS_CFLAGS; then LCAS_CFLAGS=$LCAS_LOCATION/include fi fi LCAS_CFLAGS=-I$LCAS_CFLAGS SAVE_CPPFLAGS=$CPPFLAGS CPPFLAGS="$LCAS_CFLAGS $GLOBUS_GSSAPI_GSI_CFLAGS" AC_CHECK_HEADERS([lcas.h], LCAS_LDFLAGS= if test -d $LCAS_LOCATION/lib64; then LCAS_LDFLAGS="-L$LCAS_LOCATION/lib64 $GLOBUS_GSSAPI_GSI_LIBS" else LCAS_LDFLAGS="-L$LCAS_LOCATION/lib $GLOBUS_GSSAPI_GSI_LIBS" fi SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS $LCAS_LDFLAGS" AC_CHECK_LIB(lcas,lcas_init, LCAS_LIBS="$LCAS_LDFLAGS -llcas",LCAS_LOCATION="",) LDFLAGS=$SAVE_LDFLAGS , LCAS_LOCATION="" ) CPPFLAGS=$SAVE_CPPFLAGS fi if test "x$LCAS_LOCATION" != "x"; then AC_DEFINE(HAVE_LCAS, 1, [define if lcas is available]) AC_SUBST(LCAS_LOCATION) AC_SUBST(LCAS_CFLAGS) AC_SUBST(LCAS_LIBS) fi #check lcmaps DEFAULT_LCMAPS_LOCATION=/opt/glite LCMAPS_LOCATION= LCMAPS_CFLAGS= LCMAPS_LIBS= AC_ARG_WITH(lcmaps-location, [ --with-lcmaps-location= Specify the LCMAPS installation path. [[/opt/glite]]], [ LCMAPS_LOCATION=$with_lcmaps_location if test ! -d $LCMAPS_LOCATION; then AC_MSG_WARN([LCMAPS_LOCATION ($LCMAPS_LOCATION) does not exist]) LCMAPS_LOCATION= fi ],[ if test "x$LCMAPS_LOCATION" = "x"; then LCMAPS_LOCATION=$DEFAULT_LCMAPS_LOCATION fi if test ! -d $LCMAPS_LOCATION; then LCMAPS_LOCATION= fi ] ) if test "x$LCMAPS_LOCATION" != "x"; then LCMAPS_CFLAGS=$LCMAPS_LOCATION/include/glite/security/lcmaps if test ! -d $LCMAPS_CFLAGS; then LCMAPS_CFLAGS=$LCMAPS_LOCATION/include/lcmaps if test ! -d $LCMAPS_CFLAGS; then LCMAPS_CFLAGS=$LCMAPS_LOCATION/include fi fi LCMAPS_CFLAGS=-I$LCMAPS_CFLAGS SAVE_CPPFLAGS=$CPPFLAGS CPPFLAGS="$LCMAPS_CFLAGS $GLOBUS_GSSAPI_GSI_CFLAGS" AC_CHECK_HEADERS([lcmaps.h], LCMAPS_LDFLAGS= if test -d $LCMAPS_LOCATION/lib64; then LCMAPS_LDFLAGS="-L$LCMAPS_LOCATION/lib64 $GLOBUS_GSSAPI_GSI_LIBS" else LCMAPS_LDFLAGS="-L$LCMAPS_LOCATION/lib $GLOBUS_GSSAPI_GSI_LIBS" fi SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS $LCMAPS_LDFLAGS" AC_CHECK_LIB(lcmaps,lcmaps_init, LCMAPS_LIBS="$LCMAPS_LDFLAGS -llcmaps",LCMAPS_LOCATION="",) LDFLAGS=$SAVE_LDFLAGS , LCMAPS_LOCATION="" ) CPPFLAGS=$SAVE_CPPFLAGS fi if test "x$LCMAPS_LOCATION" != "x"; then AC_DEFINE(HAVE_LCMAPS, 1, [define if lcmaps is available]) AC_SUBST(LCMAPS_LOCATION) AC_SUBST(LCMAPS_CFLAGS) AC_SUBST(LCMAPS_LIBS) fi # Check if mock DMC is enabled AC_ARG_ENABLE(mock-dmc, AC_HELP_STRING([--enable-mock-dmc], [enable mock DMC, default is disable]),[enables_mock_dmc="$enableval"],[]) # Check for GFAL2 AC_ARG_ENABLE(gfal, AC_HELP_STRING([--enable-gfal], [enable the GFAL support, default is disable]),[enables_gfal="$enableval"],[]) if test "x$enables_gfal" = "xyes"; then PKG_CHECK_MODULES(GFAL2, gfal_transfer, [], [enables_gfal="no"]) AC_SUBST(GFAL2_CFLAGS) AC_SUBST(GFAL2_LIBS) fi # Check for S3 AC_ARG_ENABLE(s3, AC_HELP_STRING([--enable-s3], [enable the S3 support, default is disable]),[enables_s3="$enableval"],[]) if test "x$enables_s3" = "xyes"; then AC_ARG_WITH(s3, [ --with-s3=(PATH) libs3 location]) if test ! "x$with_s3" = "x" ; then S3_LOCATION="$with_s3" S3_CPPFLAGS="-I$S3_LOCATION/include" if test -d $S3_LOCATION/lib64; then S3_LDFLAGS="-L$S3_LOCATION/lib64" else S3_LDFLAGS="-L$S3_LOCATION/lib" fi fi SAVE_CPPFLAGS=$CPPFLAGS CPPFLAGS="$CPPFLAGS $S3_CPPFLAGS" AC_CHECK_HEADER(libs3.h, [], [enables_s3="no"]) CPPFLAGS=$SAVE_CPPFLAGS SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS $S3_LDFLAGS" AC_CHECK_LIB([s3], [S3_initialize], [S3_LIBS="$S3_LDFLAGS -ls3"], [enables_s3="no"]) LDFLAGS=$SAVE_LDFLAGS AC_SUBST(S3_CPPFLAGS) AC_SUBST(S3_LIBS) if test x$enables_s3 = xyes then if s3 help 2>&1 | grep -q -- '--timeout' ; then AC_DEFINE([HAVE_S3_TIMEOUT], 1, [Define if S3 API has timeouts]) fi fi fi # Check for xrootd (c++) AC_LANG_SAVE AC_LANG_CPLUSPLUS AC_ARG_ENABLE(xrootd, AC_HELP_STRING([--disable-xrootd], [disable the xrootd support, default is enable]),[enables_xrootd="$enableval"],[]) if test "x$enables_xrootd" = "xyes"; then XROOTD_CPPFLAGS="-I/usr/include/xrootd" AC_ARG_WITH(xrootd, [ --with-xrootd=(PATH) Xrootd location]) if test ! "x$with_xrootd" = "x" ; then XROOTD_LOCATION="$with_xrootd" XROOTD_CPPFLAGS="-I$XROOTD_LOCATION/include/xrootd" if test -d $XROOTD_LOCATION/lib64; then XROOTD_LDFLAGS="-L$XROOTD_LOCATION/lib64" else XROOTD_LDFLAGS="-L$XROOTD_LOCATION/lib" fi fi AC_MSG_CHECKING([for XROOTD headers]) SAVE_CPPFLAGS=$CPPFLAGS CPPFLAGS="$CPPFLAGS $XROOTD_CPPFLAGS" AC_TRY_COMPILE([#include ], [], [ AC_MSG_RESULT([$XROOTD_CPPFLAGS]) ], [ XROOTD_CPPFLAGS="-std=c++0x $XROOTD_CPPFLAGS" CPPFLAGS="$SAVE_CPPFLAGS $XROOTD_CPPFLAGS" AC_TRY_COMPILE([#include ], [], [ AC_MSG_RESULT([$XROOTD_CPPFLAGS]) ], [ AC_MSG_RESULT([no]) enables_xrootd="no" ]) ]) CPPFLAGS=$SAVE_CPPFLAGS SAVE_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS $XROOTD_LDFLAGS" AC_CHECK_LIB([XrdPosix], [main], [XROOTD_LIBS="$XROOTD_LDFLAGS -lXrdPosix -lXrdCl"], [enables_xrootd="no"]) LDFLAGS=$SAVE_LDFLAGS fi AC_SUBST(XROOTD_CPPFLAGS) AC_SUBST(XROOTD_LIBS) fi AC_LANG_RESTORE # Setup conditionals AM_CONDITIONAL([GLOBUSUTILS_ENABLED], test -n "$GLOBUS_COMMON_VERSION") AM_CONDITIONAL([GRIDFTP_ENABLED], test -n "$GLOBUS_FTP_CLIENT_VERSION") AM_CONDITIONAL([MOCK_DMC_ENABLED], test x$enables_mock_dmc = xyes) AM_CONDITIONAL([GFAL_ENABLED], test x$enables_gfal = xyes) AM_CONDITIONAL([S3_DMC_ENABLED], test x$enables_s3 = xyes) AM_CONDITIONAL([XROOTD_ENABLED], test x$enables_xrootd = xyes) AM_CONDITIONAL([XMLSEC_ENABLED], test x$XMLSEC_INSTALLED = xyes) AM_CONDITIONAL([CPPUNIT_ENABLED], test x$enables_cppunit = xyes) enables_srm_dmc=no if test "$enables_hed" = "yes"; then enables_srm_dmc=yes fi AM_CONDITIONAL([SRM_DMC_ENABLED],[test "x$enables_srm_dmc" = "xyes"]) # Setup defines if test -n "$GLOBUS_COMMON_VERSION"; then AC_DEFINE(HAVE_GLOBUS, 1, [define if GLOBUS is available]) fi if test x"$XMLSEC_INSTALLED" = xyes; then AC_DEFINE(HAVE_XMLSEC, 1, [define if XMLSEC package is available]) fi # Setup messages for reporting enables_gridftp=no if test -n "$GLOBUS_FTP_CLIENT_VERSION" ; then enables_gridftp=yes; fi enables_dbcxx=no if test -n "$DBCXX_LIBS" ; then enables_dbcxx=yes; fi enables_sqlite=no if test "x$SQLITE_INSTALLED" = "xyes" ; then enables_sqlite=yes; fi # Check for LDAP if test "$enables_hed" = "yes"; then LDAP=no AC_ARG_ENABLE(ldap, AC_HELP_STRING([--disable-ldap], [disable the LDAP support - requires OpenLDAP]),[enables_ldap="$enableval"],[]) if test "x$enables_ldap" = "xyes"; then AC_CHECK_HEADER(ldap.h, [ LDAP=yes SAVE_LDFLAGS=$LDFLAGS LDFLAGS=-lpthread AC_CHECK_LIB([ldap_r], [ldap_first_message], [ AC_CHECK_LIB([ldap_r], [ldap_initialize], [ AC_DEFINE(HAVE_LDAP_INITIALIZE,[],[Define if you have ldap_initialize function]) ]) LDAP_LIBS=-lldap_r ], [ AC_CHECK_LIB([ldap], [ldap_first_message], [ AC_CHECK_LIB([ldap], [ldap_initialize], [ AC_DEFINE(HAVE_LDAP_INITIALIZE,[],[Define if you have ldap_initialize function]) ]) LDAP_LIBS=-lldap ], [ LDAP=no ]) ]) AC_CHECK_LIB([lber], [ber_init], [LDAP_LIBS="$LDAP_LIBS -llber"], []) AC_SUBST(LDAP_LIBS) LDFLAGS=$SAVE_LDFLAGS ], [ LDAP=no ]) enables_ldap="$LDAP" fi else enables_ldap="no" fi AM_CONDITIONAL([LDAP_ENABLED], test x$LDAP = xyes) if test "x$LDAP" = "xyes"; then AC_DEFINE(HAVE_LDAP,[],[Define if OpenLDAP is available]) fi # Check version of Test::More Perl module. min_perl_test_more_version_required="0.88" # Stable version of Test::More containing done_testing sub. PERL_TEST_DIR= perl_test_more_version_found=$(perl -MTest::More -e "print \"\$Test::More::VERSION\"") if test $(echo "$perl_test_more_version_found" | cut -d. -f1) -gt $(echo "$min_perl_test_more_version_required" | cut -d. -f1) || \ test $(echo "$perl_test_more_version_found" | cut -d. -f1) -eq $(echo "$min_perl_test_more_version_required" | cut -d. -f1) && \ test $(echo "$perl_test_more_version_found" | cut -d. -f2) -ge $(echo "$min_perl_test_more_version_required" | cut -d. -f2); then PERL_TEST_DIR="test" fi AC_SUBST(PERL_TEST_DIR) # Check for the Perl module Inline::Python - temporary during rewrite. PERL5LIB_INLINE_PYTHON= INLINE_PYTHON_FOUND=no AC_ARG_WITH(inline-python, AC_HELP_STRING([--with-inline-python=], [Location of the Perl module Inline::Python.]), [if test "$with_inline_python" = "yes" then PERL5LIB_INLINE_PYTHON= if ${PERL} -e "use Inline::Python; exit;" > /dev/null 2>&1 then INLINE_PYTHON_FOUND="yes" AC_MSG_NOTICE([Perl module Inline::Python found]) else AC_MSG_ERROR([Perl module Inline::Python not found]) fi elif test "$with_inline_python" = "no" then AC_MSG_NOTICE([Disabling PYTHON LRMS]) else PERL5LIB_INLINE_PYTHON="$with_inline_python" if test -d $PERL5LIB_INLINE_PYTHON; then PERL5LIB_INLINE_PYTHON="-I${PERL5LIB_INLINE_PYTHON}" if `${PERL} ${PERL5LIB_INLINE_PYTHON} -e "use Inline::Python; exit;" > /dev/null 2>&1` then INLINE_PYTHON_FOUND="yes" AC_MSG_NOTICE([Perl module Inline::Python found]) else AC_MSG_ERROR([--with-inline-python given, but test failed: PERL5LIB_INLINE_PYTHON=${PERL5LIB_INLINE_PYTHON}]) fi fi fi ],[ if `${PERL} -e "use Inline::Python; exit;" > /dev/null 2>&1` then INLINE_PYTHON_FOUND="yes" AC_MSG_NOTICE([Perl module Inline::Python found]) else AC_MSG_NOTICE([Perl module Inline::Python not found]) fi] ) AC_SUBST(PERL5LIB_INLINE_PYTHON) AM_CONDITIONAL([PYTHON_LRMS_ENABLED],[test "x${INLINE_PYTHON_FOUND}" = "xyes"]) # Check for the uuid lib UUID_LIBS="" if test "$enables_hed" = "yes"; then AC_CHECK_HEADER(uuid/uuid.h, [ AC_CHECK_FUNC([uuid_generate], [UUID_LIBS=], [ AC_CHECK_LIB([uuid], [uuid_generate], [UUID_LIBS=-luuid], [ AC_MSG_NOTICE([Can't find library containing uuid implementation]) ]) ]) ], [AC_MSG_NOTICE([Can't find uuid header])]) AC_SUBST(UUID_LIBS) LIBS="$LIBS $UUID_LIBS" fi # Check for dlopen DLOPEN_LIBS="" if test "$enables_hed" = "yes"; then AC_CHECK_FUNC([dlopen], [DLOPEN_LIBS=], [ AC_CHECK_LIB([dl], [dlopen], [DLOPEN_LIBS=-ldl], [ AC_MSG_NOTICE([Can't find library containing dlopen implementation]) ]) ]) AC_SUBST(DLOPEN_LIBS) fi # Check for clock_gettime AC_SEARCH_LIBS([clock_gettime], [rt]) # Define bash-completion dir PKG_CHECK_MODULES([BASH_COMPLETION], [bash-completion >= 2.0], [bashcompdir="`pkg-config --variable=completionsdir --define-variable=prefix=${prefix} bash-completion`"], [bashcompdir="${sysconfdir}/bash_completion.d"]) AC_SUBST([bashcompdir]) # check for fsusage if test "$enables_hed" = "yes"; then gl_FSUSAGE fi if test "$enables_hed" = "yes"; then # Checks for header files. AC_HEADER_DIRENT AC_HEADER_STDC AC_HEADER_SYS_WAIT AC_CHECK_HEADERS([arpa/inet.h fcntl.h float.h limits.h netdb.h netinet/in.h sasl.h sasl/sasl.h stdint.h stdlib.h string.h sys/file.h sys/socket.h sys/vfs.h unistd.h uuid/uuid.h getopt.h]) AC_CXX_HAVE_SSTREAM # Checks for typedefs, structures, and compiler characteristics. AC_HEADER_STDBOOL AC_C_CONST AC_TYPE_UID_T AC_C_INLINE AC_TYPE_MODE_T AC_TYPE_OFF_T AC_TYPE_PID_T AC_TYPE_SIZE_T AC_CHECK_MEMBERS([struct stat.st_blksize]) AC_HEADER_TIME AC_STRUCT_TM AC_CHECK_TYPES([ptrdiff_t]) # Checks for library functions. AC_FUNC_CHOWN AC_FUNC_CLOSEDIR_VOID AC_FUNC_ERROR_AT_LINE AC_FUNC_FORK AC_FUNC_LSTAT AC_FUNC_LSTAT_FOLLOWS_SLASHED_SYMLINK AC_FUNC_MEMCMP AC_FUNC_MKTIME AC_FUNC_MALLOC AC_FUNC_REALLOC AC_FUNC_SELECT_ARGTYPES AC_TYPE_SIGNAL AC_FUNC_STRERROR_R AC_FUNC_STAT AC_CHECK_FUNCS([acl dup2 floor ftruncate gethostname getdomainname getpid gmtime_r lchown localtime_r memchr memmove memset mkdir mkfifo regcomp rmdir select setenv socket strcasecmp strchr strcspn strdup strerror strncasecmp strstr strtol strtoul strtoull timegm tzset unsetenv getopt_long_only getgrouplist mkdtemp posix_fallocate readdir_r [mkstemp] mktemp]) AC_CHECK_LIB([resolv], [res_query], [LIBRESOLV=-lresolv], [LIBRESOLV=]) AC_CHECK_LIB([resolv], [__dn_skipname], [LIBRESOLV=-lresolv], [LIBRESOLV=]) AC_CHECK_LIB([nsl], [gethostbyname], [LIBRESOLV="$LIBRESOLV -lnsl"], []) AC_CHECK_LIB([nsl], [getdomainname]) AC_SUBST(LIBRESOLV) fi # check for platfom specific flags case " $LDFLAGS " in " -Wl,--no-undefined ") ;; " -Wl,-no-undefined ") ;; " -Wl,-z -Wl,defs ") ;; " -Wl,-z,defs ") ;; *) case "${host}" in *darwin*);; *) LDFLAGS="$LDFLAGS -Wl,--no-undefined" ;; esac ;; esac AC_PATH_PROGS(PDFLATEX, pdflatex) AC_PATH_PROGS(DOXYGEN, doxygen) AC_PATH_PROGS(DOT, dot) # Check if user asks to skip documentation build AC_ARG_ENABLE(doc, AC_HELP_STRING([--disable-doc], [disable building documentation (requires doxygen and pdflatex)]),[enables_doc=$enableval],[]) #if test "x$enables_doc" = "xyes"; then # There is no point disabling docs due to missing tools since the pdf # files are both in svn and in the dist tarball # if test "x$PDFLATEX" = "x"; then # enables_doc="no" # AC_MSG_NOTICE([WARNING: Missing pdflatex - documentation won't be built]) # elif test "x$DOXYGEN" = "x"; then # enables_doc="no" # AC_MSG_NOTICE([WARNING: Missing doxygen - documentation won't be built]) # elif test "x$DOT" = "x"; then # enables_doc="no" # AC_MSG_NOTICE([WARNING: Missing dot - documentation won't be built]) # fi #fi AC_MSG_NOTICE([Documentation enabled: $enables_doc]) AM_CONDITIONAL([DOC_ENABLED],[test "x$enables_doc" = "xyes"]) AM_CONDITIONAL([PYDOXYGEN],[test -f python/python/arc/index.xml -o "x$DOXYGEN" != "x"]) AM_CONDITIONAL([ALTPYDOXYGEN],[test -f python/altpython/arc/index.xml -o "x$DOXYGEN" != "x"]) # Check for explicitly and implicitely disabled services # A-Rex AC_ARG_ENABLE(a_rex_service, AC_HELP_STRING([--disable-a-rex-service], [disable building A-Rex service]), [enables_a_rex_service=$enableval],[]) if test "$enables_a_rex_service" = "yes"; then if test "x$SQLITE_INSTALLED" != "xyes" ; then AC_MSG_NOTICE([A-Rex can't be built without SQLite - disabling]) enables_a_rex_service="no" elif test "x$DBCXX_LIBS" = "x" ; then AC_MSG_NOTICE([A-Rex can't be built without C++ API for DB4.x - disabling]) enables_a_rex_service="no" fi fi AC_MSG_NOTICE([A-Rex service enabled: $enables_a_rex_service]) AM_CONDITIONAL([A_REX_SERVICE_ENABLED],[test "x$enables_a_rex_service" = "xyes"]) # Internal job plugin AC_ARG_ENABLE(internal, AC_HELP_STRING([--enable-internal], [enable building the internal job plugin]), [enables_internal=$enableval],[]) if test "$enables_internal" = "yes"; then if test "x$enables_a_rex_service" != "xyes" ; then AC_MSG_NOTICE([Internal job plugin can't be built without A-Rex - disabling]) enables_internal="no" fi fi AC_MSG_NOTICE([Internal plugin enabled: $enables_internal]) AM_CONDITIONAL([INTERNAL_ENABLED],[test "x$enables_internal" = "xyes"]) # Gridftpd AC_ARG_ENABLE(gridftpd_service, AC_HELP_STRING([--disable-gridftpd-service], [disable building Gridftpd service]), [enables_gridftpd_service=$enableval],[]) if test "$enables_gridftpd_service" = "yes"; then gridftpd_service_globus_pkgs="globus-common globus-io globus-gsi-credential globus-openssl-module globus-ftp-control" gridftpd_service_globus_pkgs_missing="" for pkg in $gridftpd_service_globus_pkgs do var=`echo '$'$pkg|tr '[\-a-z]' '[_A-Z]'|sed 's/$/_VERSION/'` if test -z "`eval echo $var`" then gridftpd_service_globus_pkgs_missing="$gridftpd_service_globus_pkgs_missing $pkg" fi done if test -n "$gridftpd_service_globus_pkgs_missing" ; then AC_MSG_NOTICE([GridFTP service can not be built (missing development packages for$gridftpd_service_globus_pkgs_missing) - disabling]) enables_gridftpd_service="no" fi #check for struct statfs AC_CHECK_FUNCS([fstatfs]) AC_CHECK_HEADERS([sys/param.h sys/statfs.h sys/mount.h sys/vfs.h])dnl AC_CHECK_MEMBERS([struct statfs.f_type],,, [$ac_includes_default #if HAVE_SYS_STATFS_H #include #endif #if HAVE_SYS_MOUNT_H #include #endif #if HAVE_SYS_VFS_H #include #endif]) fi AC_MSG_NOTICE([Gridftpd service enabled: $enables_gridftpd_service]) AM_CONDITIONAL([GRIDFTPD_SERVICE_ENABLED],[test "x$enables_gridftpd_service" = "xyes"]) # LDAP service AC_ARG_ENABLE(ldap_service, AC_HELP_STRING([--disable-ldap-service], [disable building LDAP Infosystem Service]), [enables_ldap_service=$enableval],[]) AC_MSG_NOTICE([LDAP Infosystem service enabled: $enables_ldap_service]) AM_CONDITIONAL([LDAP_SERVICE_ENABLED],[test "x$enables_ldap_service" = "xyes"]) # LDAP monitor AC_ARG_ENABLE(monitor, AC_HELP_STRING([--disable-monitor], [disable building LDAP Monitor]), [enables_monitor=$enableval],[]) AC_MSG_NOTICE([LDAP Monitor enabled: $enables_monitor]) AM_CONDITIONAL([MONITOR_ENABLED],[test "x$enables_monitor" = "xyes"]) # Cache service AC_ARG_ENABLE(candypond, AC_HELP_STRING([--disable-candypond], [disable building candypond]), [enables_candypond=$enableval],[]) if test "$enables_candypond" = "yes"; then if test ! "x$enables_a_rex_service" = "xyes" ; then enables_candypond="no" AC_MSG_NOTICE([CandyPond can't be built without A-REX - disabling]) fi fi AC_MSG_NOTICE([CandyPond enabled: $enables_candypond]) AM_CONDITIONAL([CANDYPOND_ENABLED],[test "x$enables_candypond" = "xyes"]) # DataDelivery service AC_ARG_ENABLE(datadelivery_service, AC_HELP_STRING([--disable-datadelivery-service], [disable building DataDelivery service]), [enables_datadelivery_service=$enableval],[]) AC_MSG_NOTICE([DataDelivery service enabled: $enables_datadelivery_service]) AM_CONDITIONAL([DATADELIVERY_SERVICE_ENABLED],[test "x$enables_datadelivery_service" = "xyes"]) # ACIX service AC_ARG_ENABLE(acix, AC_HELP_STRING([--disable-acix], [disable building ACIX service]), [enables_acix=$enableval],[]) AC_PATH_PROGS(TWISTD, twistd-${PYTHON_MAJOR} twistd${PYTHON_MAJOR} twistd) if test "x$TWISTD" = "x"; then AC_MSG_NOTICE([twistd not found - ACIX service disabled]) enables_acix="no" fi AC_MSG_NOTICE([ACIX enabled: $enables_acix]) AM_CONDITIONAL([ACIX_ENABLED],[test "x$enables_acix" = "xyes"]) # trial command (from python-twisted-core) is used for acix unittests AC_PATH_PROGS(TRIAL, trial-${PYTHON_MAJOR} trial${PYTHON_MAJOR} trial) if test "x$TRIAL" = "x"; then AC_MSG_NOTICE([trial not found - ACIX unit tests will be skipped]) fi # unit tests also require python >=2.6 AM_CONDITIONAL([ACIX_TESTS_ENABLED], [test "x$TRIAL" != "x" && test "x$PYTHON_VERSION" != "x2.4" && test "x$PYTHON_VERSION" != "x2.5"]) # Check for explicitly and implicitely disabled clients AC_ARG_ENABLE(compute_client, AC_HELP_STRING([--disable-compute-client], [disable building compute (job management) client tools]), [enables_compute_client=$enableval],[]) AC_MSG_NOTICE([Compute client tools enabled: $enables_compute_client]) AM_CONDITIONAL([COMPUTE_CLIENT_ENABLED],[test "x$enables_compute_client" = "xyes"]) AC_ARG_ENABLE(credentials_client, AC_HELP_STRING([--disable-credentials-client], [disable building client tools for handling X.509 credentials]), [enables_credentials_client=$enableval],[]) AC_MSG_NOTICE([Credentials client tools enabled: $enables_credentials_client]) AM_CONDITIONAL([CREDENTIALS_CLIENT_ENABLED],[test "x$enables_credentials_client" = "xyes"]) AC_ARG_ENABLE(data_client, AC_HELP_STRING([--disable-data-client], [disable building generic client tools for handling data]), [enables_data_client=$enableval],[]) AC_MSG_NOTICE([Data client tools enabled: $enables_data_client]) AM_CONDITIONAL([DATA_CLIENT_ENABLED],[test "x$enables_data_client" = "xyes"]) AC_ARG_ENABLE(emies_client, AC_HELP_STRING([--disable-emies-client], [disables building EMI ES-related client plugins.]), [enables_emies_client=$enableval],[]) AC_MSG_NOTICE([EMI ES plugin(s) enabled: $enables_emies_client]) AM_CONDITIONAL([EMIES_ENABLED],[test "x$enables_emies_client" = "xyes"]) AC_ARG_ENABLE(arcrest_client, AC_HELP_STRING([--disable-arcrest-client], [disables building ARC REST interface client plugins.]), [enables_arcrest_client=$enableval],[]) AC_MSG_NOTICE([ARC REST plugin(s) enabled: $enables_arcrest_client]) AM_CONDITIONAL([ARCREST_ENABLED],[test "x$enables_arcrest_client" = "xyes"]) # Check for consistency among disabled components if test "$enables_hed" = "no"; then if test "$enables_a_rex_service" = "yes" -o \ "$enables_candypond" = "yes" -o \ "$enables_datadelivery_service" = "yes" -o \ "$enables_compute_client" = "yes" -o \ "$enables_credentials_client" = "yes" -o \ "$enables_data_client" = "yes" -o \ ; then AC_MSG_ERROR(HED is needed for building any of the client or service tools. Please enable HED by using --enable-hed.) fi fi AM_CONDITIONAL([HED_ENABLED],[test "x$enables_hed" = "xyes"]) # A-Rex specific hack for backend scripts tmp_dir=/tmp gnu_time=/usr/bin/time case "${host}" in *darwin*) # hostname -f does not work on OS X nodename="hostname" ;; *) nodename="/bin/hostname -f" ;; esac arc_location=$prefix AC_SUBST(arc_location) AC_SUBST(tmp_dir) AC_SUBST(gnu_time) AC_SUBST(nodename) # Shell for the job control scripts posix_shell='/bin/sh' AC_SUBST(posix_shell) DATE=`date +%Y-%m-%d ${SOURCE_DATE_EPOCH:+-u -d @$SOURCE_DATE_EPOCH}` AC_SUBST(DATE) #DATER=`date -R` DATER=`date +'%a, %d %b %Y %H:%M:%S %z'` AC_SUBST(DATER) SPECDATE=`LANG=C date +"%a %b %d %Y"` AC_SUBST(SPECDATE) AC_CONFIG_FILES([Makefile include/arc/ArcVersion.h src/Makefile src/external/Makefile src/external/cJSON/Makefile src/hed/Makefile src/hed/libs/compute/Makefile src/hed/libs/compute/test/Makefile src/hed/libs/compute/examples/Makefile src/hed/libs/common/ArcVersion.h src/hed/libs/common/Makefile src/hed/libs/common/test/Makefile src/hed/libs/communication/Makefile src/hed/libs/credential/Makefile src/hed/libs/credential/test/Makefile src/hed/libs/credentialmod/Makefile src/hed/libs/crypto/Makefile src/hed/libs/cryptomod/Makefile src/hed/libs/data/Makefile src/hed/libs/data/cache-clean.1 src/hed/libs/data/cache-list.1 src/hed/libs/data/test/Makefile src/hed/libs/data/examples/Makefile src/hed/libs/Makefile src/hed/libs/loader/Makefile src/hed/libs/loader/schema/Makefile src/hed/libs/loader/test/Makefile src/hed/libs/message/Makefile src/hed/libs/message/test/Makefile src/hed/libs/security/Makefile src/hed/libs/security/ArcPDP/Makefile src/hed/libs/security/ArcPDP/attr/Makefile src/hed/libs/security/ArcPDP/policy/Makefile src/hed/libs/security/ArcPDP/alg/Makefile src/hed/libs/security/ArcPDP/fn/Makefile src/hed/libs/credentialstore/Makefile src/hed/libs/ws-addressing/Makefile src/hed/libs/ws-security/Makefile src/hed/libs/ws-security/test/Makefile src/hed/libs/infosys/Makefile src/hed/libs/infosys/schema/Makefile src/hed/libs/infosys/test/Makefile src/hed/libs/delegation/Makefile src/hed/libs/delegation/test/Makefile src/hed/libs/xmlsec/Makefile src/hed/libs/globusutils/Makefile src/hed/libs/otokens/Makefile src/hed/daemon/Makefile src/hed/daemon/scripts/Makefile src/hed/daemon/schema/Makefile src/hed/daemon/unix/Makefile src/hed/mcc/Makefile src/hed/mcc/soap/Makefile src/hed/mcc/tcp/Makefile src/hed/mcc/tcp/schema/Makefile src/hed/mcc/http/Makefile src/hed/mcc/http/schema/Makefile src/hed/mcc/tls/Makefile src/hed/mcc/tls/schema/Makefile src/hed/mcc/msgvalidator/Makefile src/hed/mcc/msgvalidator/schema/Makefile src/hed/acc/Makefile src/hed/acc/GRIDFTPJOB/Makefile src/hed/acc/ARCREST/Makefile src/hed/acc/EMIES/Makefile src/hed/acc/EMIES/arcemiestest.1 src/hed/acc/EMIES/schema/Makefile src/hed/acc/Broker/Makefile src/hed/acc/Broker/test/Makefile src/hed/acc/PythonBroker/Makefile src/hed/acc/JobDescriptionParser/Makefile src/hed/acc/JobDescriptionParser/test/Makefile src/hed/acc/ARCHERY/Makefile src/hed/acc/LDAP/Makefile src/hed/acc/TEST/Makefile src/hed/dmc/Makefile src/hed/dmc/file/Makefile src/hed/dmc/gridftp/Makefile src/hed/dmc/http/Makefile src/hed/dmc/ldap/Makefile src/hed/dmc/srm/Makefile src/hed/dmc/srm/srmclient/Makefile src/hed/dmc/gfal/Makefile src/hed/dmc/xrootd/Makefile src/hed/dmc/mock/Makefile src/hed/dmc/acix/Makefile src/hed/dmc/rucio/Makefile src/hed/dmc/s3/Makefile src/hed/profiles/general/general.xml src/hed/shc/Makefile src/hed/shc/arcpdp/Makefile src/hed/shc/arcpdp/schema/Makefile src/hed/shc/xacmlpdp/Makefile src/hed/shc/xacmlpdp/schema/Makefile src/hed/shc/delegationpdp/Makefile src/hed/shc/delegationpdp/schema/Makefile src/hed/shc/gaclpdp/Makefile src/hed/shc/pdpserviceinvoker/Makefile src/hed/shc/pdpserviceinvoker/schema/Makefile src/hed/shc/allowpdp/Makefile src/hed/shc/denypdp/Makefile src/hed/shc/simplelistpdp/Makefile src/hed/shc/simplelistpdp/schema/Makefile src/hed/shc/arcauthzsh/Makefile src/hed/shc/arcauthzsh/schema/Makefile src/hed/shc/usernametokensh/Makefile src/hed/shc/usernametokensh/schema/Makefile src/hed/shc/x509tokensh/Makefile src/hed/shc/x509tokensh/schema/Makefile src/hed/shc/samltokensh/Makefile src/hed/shc/samltokensh/schema/Makefile src/hed/shc/saml2sso_assertionconsumersh/Makefile src/hed/shc/delegationsh/Makefile src/hed/shc/delegationsh/schema/Makefile src/hed/shc/legacy/Makefile src/hed/shc/legacy/schema/Makefile src/hed/shc/otokens/Makefile src/hed/identitymap/Makefile src/hed/identitymap/schema/Makefile src/libs/Makefile src/libs/data-staging/Makefile src/libs/data-staging/test/Makefile src/libs/data-staging/examples/Makefile src/services/Makefile src/services/a-rex/Makefile src/services/a-rex/arc-arex src/services/a-rex/arc-arex.service src/services/a-rex/arc-arex-start src/services/a-rex/arc-arex-ws src/services/a-rex/arc-arex-ws.service src/services/a-rex/arc-arex-ws-start src/services/a-rex/a-rex-backtrace-collect src/services/a-rex/a-rex-backtrace-collect.8 src/services/a-rex/perferator src/services/a-rex/grid-manager/arc-blahp-logger.8 src/services/a-rex/grid-manager/gm-jobs.8 src/services/a-rex/grid-manager/gm-delegations-converter.8 src/services/a-rex/rest/Makefile src/services/a-rex/delegation/Makefile src/services/a-rex/grid-manager/Makefile src/services/a-rex/grid-manager/accounting/Makefile src/services/a-rex/grid-manager/conf/Makefile src/services/a-rex/grid-manager/files/Makefile src/services/a-rex/grid-manager/jobs/Makefile src/services/a-rex/grid-manager/jobplugin/Makefile src/services/a-rex/grid-manager/log/Makefile src/services/a-rex/grid-manager/mail/Makefile src/services/a-rex/grid-manager/misc/Makefile src/services/a-rex/grid-manager/run/Makefile src/services/a-rex/internaljobplugin/Makefile src/services/a-rex/grid-manager/arc-config-check.1 src/services/a-rex/infoproviders/Makefile src/services/a-rex/infoproviders/CEinfo.pl src/services/a-rex/infoproviders/ConfigCentral.pm src/services/a-rex/infoproviders/PerfData.pl src/services/a-rex/infoproviders/test/Makefile src/services/a-rex/lrms/Makefile src/services/a-rex/lrms/test/Makefile src/services/a-rex/lrms/arc/Makefile src/services/a-rex/lrms/arc/lrms/Makefile src/services/a-rex/lrms/arc/lrms/common/Makefile src/services/a-rex/lrms/lrms_common.sh src/services/a-rex/lrms/condor/Makefile src/services/a-rex/lrms/condor/scan-condor-job src/services/a-rex/lrms/condor/cancel-condor-job src/services/a-rex/lrms/condor/submit-condor-job src/services/a-rex/lrms/fork/Makefile src/services/a-rex/lrms/fork/scan-fork-job src/services/a-rex/lrms/fork/submit-fork-job src/services/a-rex/lrms/fork/cancel-fork-job src/services/a-rex/lrms/ll/Makefile src/services/a-rex/lrms/ll/submit-ll-job src/services/a-rex/lrms/ll/cancel-ll-job src/services/a-rex/lrms/ll/scan-ll-job src/services/a-rex/lrms/lsf/Makefile src/services/a-rex/lrms/lsf/submit-lsf-job src/services/a-rex/lrms/lsf/cancel-lsf-job src/services/a-rex/lrms/lsf/scan-lsf-job src/services/a-rex/lrms/pbs/Makefile src/services/a-rex/lrms/pbs/submit-pbs-job src/services/a-rex/lrms/pbs/cancel-pbs-job src/services/a-rex/lrms/pbs/scan-pbs-job src/services/a-rex/lrms/pbspro/Makefile src/services/a-rex/lrms/pbspro/submit-pbspro-job src/services/a-rex/lrms/pbspro/cancel-pbspro-job src/services/a-rex/lrms/pbspro/scan-pbspro-job src/services/a-rex/lrms/sge/Makefile src/services/a-rex/lrms/sge/submit-sge-job src/services/a-rex/lrms/sge/scan-sge-job src/services/a-rex/lrms/sge/cancel-sge-job src/services/a-rex/lrms/slurm/Makefile src/services/a-rex/lrms/slurm/submit-SLURM-job src/services/a-rex/lrms/slurm/scan-SLURM-job src/services/a-rex/lrms/slurm/cancel-SLURM-job src/services/a-rex/lrms/slurm/test/Makefile src/services/a-rex/lrms/slurm/test/scan/Makefile src/services/a-rex/lrms/slurm/test/submit/Makefile src/services/a-rex/lrms/boinc/Makefile src/services/a-rex/lrms/boinc/submit-boinc-job src/services/a-rex/lrms/boinc/scan-boinc-job src/services/a-rex/lrms/boinc/cancel-boinc-job src/services/a-rex/lrms/slurmpy/Makefile src/services/a-rex/lrms/slurmpy/submit-SLURMPY-job src/services/a-rex/lrms/slurmpy/scan-SLURMPY-job src/services/a-rex/lrms/slurmpy/cancel-SLURMPY-job src/services/a-rex/lrms/slurmpy/test/Makefile src/services/a-rex/lrms/slurmpy/test/submit/Makefile src/services/a-rex/lrms/slurmpy/test/scan/Makefile src/services/a-rex/rte/Makefile src/services/a-rex/rte/ENV/PROXY src/services/a-rex/rte/ENV/CANDYPOND src/services/a-rex/schema/Makefile src/services/acix/Makefile src/services/acix/scanner/Makefile src/services/acix/scanner/arc-acix-scanner src/services/acix/scanner/arc-acix-scanner-start src/services/acix/scanner/arc-acix-scanner.service src/services/acix/scanner/test/Makefile src/services/acix/core/Makefile src/services/acix/core/test/Makefile src/services/acix/indexserver/Makefile src/services/acix/indexserver/arc-acix-index src/services/acix/indexserver/arc-acix-index-start src/services/acix/indexserver/arc-acix-index.service src/services/acix/indexserver/test/Makefile src/services/candypond/Makefile src/services/data-staging/Makefile src/services/data-staging/arc-datadelivery-service src/services/data-staging/arc-datadelivery-service.service src/services/data-staging/arc-datadelivery-service-start src/services/gridftpd/Makefile src/services/gridftpd/arc-gridftpd src/services/gridftpd/arc-gridftpd.service src/services/gridftpd/arc-gridftpd-start src/services/gridftpd/gridftpd.8 src/services/gridftpd/auth/Makefile src/services/gridftpd/conf/Makefile src/services/gridftpd/misc/Makefile src/services/gridftpd/run/Makefile src/services/gridftpd/fileplugin/Makefile src/services/ldap-infosys/Makefile src/services/ldap-infosys/create-bdii-config src/services/ldap-infosys/create-slapd-config src/services/ldap-infosys/arc-infosys-ldap src/services/ldap-infosys/arc-infosys-ldap.service src/services/ldap-infosys/arc-infosys-ldap-slapd.service src/services/monitor/Makefile src/services/monitor/monitor src/services/monitor/README src/services/monitor/man/Makefile src/services/monitor/man/monitor.7 src/services/monitor/includes/Makefile src/services/monitor/mon-icons/Makefile src/services/monitor/lang/Makefile src/services/examples/Makefile src/services/examples/echo_python/Makefile src/services/wrappers/Makefile src/services/wrappers/python/Makefile src/services/wrappers/python/schema/Makefile src/clients/Makefile src/clients/data/Makefile src/clients/data/arccp.1 src/clients/data/arcls.1 src/clients/data/arcrm.1 src/clients/data/arcmkdir.1 src/clients/data/arcrename.1 src/clients/credentials/Makefile src/clients/credentials/arcproxy.1 src/clients/compute/Makefile src/clients/compute/arcstat.1 src/clients/compute/arcinfo.1 src/clients/compute/arcsub.1 src/clients/compute/arcclean.1 src/clients/compute/arckill.1 src/clients/compute/arcget.1 src/clients/compute/arccat.1 src/clients/compute/arcresub.1 src/clients/compute/arcsync.1 src/clients/compute/arcrenew.1 src/clients/compute/arcresume.1 src/clients/compute/arctest.1 src/tests/Makefile src/tests/echo/Makefile src/tests/echo/perftest.1 src/tests/echo/echo_service.xml.example src/tests/echo/schema/Makefile src/tests/policy-delegation/Makefile src/tests/delegation/Makefile src/tests/translator/Makefile src/tests/xpath/Makefile src/tests/arcpolicy/Makefile src/tests/perf/Makefile src/tests/perf/arcperftest.1 src/tests/client/Makefile src/tests/lrms/Makefile src/utils/archery/Makefile src/utils/archery/archery-manage src/utils/python/Makefile src/utils/python/arccandypond src/utils/python/arcctl src/utils/python/arcctl.1 src/utils/python/jura-ng src/utils/python/arc/Makefile src/utils/python/arc/gen_paths_dist.sh src/utils/python/arc/utils/Makefile src/utils/python/arc/control/Makefile src/utils/hed/wsdl2hed.1 src/utils/hed/arcplugin.1 src/utils/hed/Makefile src/utils/gridmap/nordugridmap.cron src/utils/gridmap/nordugridmap.8 src/utils/gridmap/Makefile src/utils/Makefile src/wn/Makefile src/doc/Makefile src/doc/arc.conf.5 swig/Makefile python/Makefile python/Doxyfile.api python/python/Makefile python/python/arc/Makefile python/altpython/Makefile python/altpython/arc/Makefile python/test/Makefile python/test/python/Makefile python/test/altpython/Makefile python/examples/Makefile po/Makefile.in include/Makefile debian/Makefile debian/changelog.deb nordugrid-arc.spec src/hed/daemon/arched.8 src/hed/daemon/scripts/arched src/hed/daemon/scripts/arched.service src/hed/daemon/scripts/arched-start src/doxygen/Makefile ]) AC_CONFIG_FILES([src/utils/python/arcconfig-parser], [chmod +x src/utils/python/arcconfig-parser]) AC_OUTPUT AC_MSG_RESULT([ Unit testing: ${enables_cppunit} Python binding: ${enables_swig_python} ($PYTHON_VERSION) Alt.Python binding: ${enables_altpython} ($ALTPYTHON_VERSION) Available third-party features: GridFTP: ${enables_gridftp} GFAL: ${enables_gfal} S3: ${enables_s3} Xrootd: ${enables_xrootd} MYSQL CLIENT LIB: ${enables_mysql} LDAP: ${enables_ldap} xmlsec1: ${enables_xmlsec1} ARGUS: ${enables_argus} NSS: ${enables_nss} BDB C++: ${enables_dbcxx} SQLite: ${enables_sqlite} LDNS: ${enables_ldns} Enabled features: Local jobs info in BDB: ${enables_dbjstore} Local jobs info in SQLite: ${enables_sqlitejstore} Systemd Integration: ${enables_systemd} Python LRMS back-end dependency: Inline::Python Perl module: ${INLINE_PYTHON_FOUND} Included components: HED: ${enables_hed} A-REX service: ${enables_a_rex_service} Internal plugin: ${enables_internal} GRIDFTPD service: ${enables_gridftpd_service} LDAP Info service: ${enables_ldap_service} CANDYPOND service: ${enables_candypond} DATADELIVERY service: ${enables_datadelivery_service} ACIX service: ${enables_acix} COMPUTE clients: ${enables_compute_client} DATA clients: ${enables_data_client} CREDENTIAL clients: ${enables_credentials_client} EMI ES client (ACC): ${enables_emies_client} ARC REST client (ACC): ${enables_arcrest_client} SRM client (DMC): ${enables_srm_dmc} Documentation: ${enables_doc} Monitoring: LDAP Monitor ${enables_monitor} ]) nordugrid-arc-6.14.0/PaxHeaders.30264/AUTHORS0000644000000000000000000000013214152153376016414 xustar000000000000000030 mtime=1638455038.261643811 30 atime=1638455038.459646786 30 ctime=1638455095.729507292 nordugrid-arc-6.14.0/AUTHORS0000644000175000002070000000447514152153376016413 0ustar00mockbuildmock00000000000000Individual contributors to the source code ------------------------------------------ David Cameron Péter Dóbé Mattias Ellert Thomas FrÃ¥gÃ¥t Ali Gholami Michael Glodek Jørgen Beck Hansen Henrik Thostrup Jensen Daniel Johansson Johan Jönemo Dmytro Karpenko Tamás Kazinczy Marek KoÄan Aleksandr Konstantinov Balázs Kónya Hajo Nils Krabbenhöft Andrew Lahiff Juha Lento Peter Lundgaard Rosendahl Iván Márton Luca Mazzaferro Bjarte Mohn Steffen Möller Zsombor Nagy Aleksei Nazarov Jon Kerr Nilsen Markus Nordén Weizhong Qiang Gábor RÅ‘czei Florido Paganelli Andrii Salnikov Martin Savko Martin Skou Andersen Oxana Smirnova Ferenc Szalai Gábor Szigeti Christian Ulrik Søttrup Adrian Taga Salman Zubair Toor Olli Tourunen Petter Urkedal Wenjing Wu Anders Wäänänen Thomas Zangerl Organisations employing contributors ------------------------------------ University of Copenhagen (Denmark) NORDUnet - Nordic Infrastructure for Research and Education (Denmark) CSC - IT Center for Science Ltd (Finland) University of Lübeck (Germany) NIIFI - National Information Infrastructure Development Institute (Hungary) University of Oslo (Norway) NordForsk (Norway) Pavol Jozef Å afárik University in KoÅ¡ice (Slovakia) Linköping University (Sweden) Lund University (Sweden) Royal Institute of Technology (Sweden) Uppsala University (Sweden) Taras Shevchenko National University of Kyiv (Ukraine) nordugrid-arc-6.14.0/PaxHeaders.30264/LICENSE0000644000000000000000000000013214152153376016351 xustar000000000000000030 mtime=1638455038.261643811 30 atime=1638455038.459646786 30 ctime=1638455095.738507427 nordugrid-arc-6.14.0/LICENSE0000644000175000002070000002367614152153376016354 0ustar00mockbuildmock00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS nordugrid-arc-6.14.0/PaxHeaders.30264/py-compile0000644000000000000000000000013214152153437017343 xustar000000000000000030 mtime=1638455071.248139447 30 atime=1638455071.248139447 30 ctime=1638455100.283575719 nordugrid-arc-6.14.0/py-compile0000755000175000002070000001107614152153437017340 0ustar00mockbuildmock00000000000000#!/bin/sh # py-compile - Compile a Python program scriptversion=2011-06-08.12; # UTC # Copyright (C) 2000-2013 Free Software Foundation, Inc. # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. # This file is maintained in Automake, please report # bugs to or send patches to # . if [ -z "$PYTHON" ]; then PYTHON=python fi me=py-compile usage_error () { echo "$me: $*" >&2 echo "Try '$me --help' for more information." >&2 exit 1 } basedir= destdir= while test $# -ne 0; do case "$1" in --basedir) if test $# -lt 2; then usage_error "option '--basedir' requires an argument" else basedir=$2 fi shift ;; --destdir) if test $# -lt 2; then usage_error "option '--destdir' requires an argument" else destdir=$2 fi shift ;; -h|--help) cat <<\EOF Usage: py-compile [--help] [--version] [--basedir DIR] [--destdir DIR] FILES..." Byte compile some python scripts FILES. Use --destdir to specify any leading directory path to the FILES that you don't want to include in the byte compiled file. Specify --basedir for any additional path information you do want to be shown in the byte compiled file. Example: py-compile --destdir /tmp/pkg-root --basedir /usr/share/test test.py test2.py Report bugs to . EOF exit $? ;; -v|--version) echo "$me $scriptversion" exit $? ;; --) shift break ;; -*) usage_error "unrecognized option '$1'" ;; *) break ;; esac shift done files=$* if test -z "$files"; then usage_error "no files given" fi # if basedir was given, then it should be prepended to filenames before # byte compilation. if [ -z "$basedir" ]; then pathtrans="path = file" else pathtrans="path = os.path.join('$basedir', file)" fi # if destdir was given, then it needs to be prepended to the filename to # byte compile but not go into the compiled file. if [ -z "$destdir" ]; then filetrans="filepath = path" else filetrans="filepath = os.path.normpath('$destdir' + os.sep + path)" fi $PYTHON -c " import sys, os, py_compile, imp files = '''$files''' sys.stdout.write('Byte-compiling python modules...\n') for file in files.split(): $pathtrans $filetrans if not os.path.exists(filepath) or not (len(filepath) >= 3 and filepath[-3:] == '.py'): continue sys.stdout.write(file) sys.stdout.flush() if hasattr(imp, 'get_tag'): py_compile.compile(filepath, imp.cache_from_source(filepath), path) else: py_compile.compile(filepath, filepath + 'c', path) sys.stdout.write('\n')" || exit $? # this will fail for python < 1.5, but that doesn't matter ... $PYTHON -O -c " import sys, os, py_compile, imp # pypy does not use .pyo optimization if hasattr(sys, 'pypy_translation_info'): sys.exit(0) files = '''$files''' sys.stdout.write('Byte-compiling python modules (optimized versions) ...\n') for file in files.split(): $pathtrans $filetrans if not os.path.exists(filepath) or not (len(filepath) >= 3 and filepath[-3:] == '.py'): continue sys.stdout.write(file) sys.stdout.flush() if hasattr(imp, 'get_tag'): py_compile.compile(filepath, imp.cache_from_source(filepath, False), path) else: py_compile.compile(filepath, filepath + 'o', path) sys.stdout.write('\n')" 2>/dev/null || : # Local Variables: # mode: shell-script # sh-indentation: 2 # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" # time-stamp-time-zone: "UTC" # time-stamp-end: "; # UTC" # End: nordugrid-arc-6.14.0/PaxHeaders.30264/NOTICE0000644000000000000000000000013214152153376016250 xustar000000000000000030 mtime=1638455038.262643825 30 atime=1638455038.459646786 30 ctime=1638455095.738507427 nordugrid-arc-6.14.0/NOTICE0000644000175000002070000000300014152153376016226 0ustar00mockbuildmock00000000000000Advanced Resource Connector (ARC) This product includes Advanced Resource Connector (ARC) software. The software is developed by the NorduGrid collaboration (http://www.nordugrid.org) with financial support from the European Commission and Nordic Research Councils. Unless stated otherwise, the Copyright is collectively owned by individual contributors and contributing organisations as listed in the AUTHORS file. The software is licensed under the Apache License, Version 2.0 (the "License"); you may not use files from this software distribution except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Some hash function code in ACIX is provided by the General Purpose Hash Function Algorithms Library licensed under the Common Public License and written by Arash Partow. This code is in src/services/acix/core/hashes.py. The original software is available from http://www.partow.net/programming/hashfunctions/index.html Support for json parsing is provided by the cJSON library written by Dave Gamble and licensed under the MIT license. This code is in src/external/cJSON. The original software is available from http://cjson.sourceforge.net/ nordugrid-arc-6.14.0/PaxHeaders.30264/src0000644000000000000000000000013214152153475016056 xustar000000000000000030 mtime=1638455101.328591421 30 atime=1638455103.996631509 30 ctime=1638455101.328591421 nordugrid-arc-6.14.0/src/0000755000175000002070000000000014152153475016120 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376020167 xustar000000000000000030 mtime=1638455038.336644937 30 atime=1638455038.472646981 30 ctime=1638455095.757507713 nordugrid-arc-6.14.0/src/Makefile.am0000644000175000002070000000165514152153376020163 0ustar00mockbuildmock00000000000000if HED_ENABLED if DOC_ENABLED BUILD_SOURCES = external doc hed libs tests services clients utils wn doxygen else BUILD_SOURCES = external doc hed libs tests services clients utils wn endif else BUILD_SOURCES = endif SUBDIRS = $(BUILD_SOURCES) DIST_SUBDIRS = external doc hed libs tests services clients utils doxygen wn # This won't work in case of cross-compilation. Please # some autotools experts fix it. if HED_ENABLED install-exec-hook: if test "x$(build_triplet)" = "x$(host_triplet)"; then env LD_LIBRARY_PATH=$(DESTDIR)$(libdir):$(LD_LIBRARY_PATH) $(top_builddir)/src/utils/hed/arcplugin$(EXEEXT) -c $(DESTDIR)$(pkglibdir) -c $(DESTDIR)$(pkglibdir)/test -c $(DESTDIR)$(pkglibdir)/external; else echo "No .apd files since we are cross-compiling"; fi uninstall-local: test "x$(build_triplet)" = "x$(host_triplet)" && rm -f $(DESTDIR)$(pkglibdir)/*.apd $(DESTDIR)$(pkglibdir)/test/*.apd $(DESTDIR)$(pkglibdir)/external/*.apd endif nordugrid-arc-6.14.0/src/PaxHeaders.30264/doc0000644000000000000000000000013214152153467016624 xustar000000000000000030 mtime=1638455095.824508719 30 atime=1638455103.996631509 30 ctime=1638455095.824508719 nordugrid-arc-6.14.0/src/doc/0000755000175000002070000000000014152153467016666 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/doc/PaxHeaders.30264/arc.conf.5.in0000644000000000000000000000013214152153376021064 xustar000000000000000030 mtime=1638455038.341645013 30 atime=1638455038.474647011 30 ctime=1638455095.822508689 nordugrid-arc-6.14.0/src/doc/arc.conf.5.in0000644000175000002070000000702714152153376021057 0ustar00mockbuildmock00000000000000.TH arc.conf 5 "@DATE@" "NorduGrid ARC @VERSION@" "NorduGrid ARC" .SH NAME arc.conf \- ARC services configuration .SH DESCRIPTION .PP ARC has two separate configuration files - one for client tools and another for services. This man page describes the services configuration file. For client configuration please see "ARC Clients User Manual" at http://www.nordugrid.org/documents/arc-ui.pdf .PP This man page IS NOT the ultimate source of information about \fBarc.conf\fR. The reference documentation containing all configuration option description is \fBarc.conf.reference\fR file that can be found in \fB@prefix@/@pkgdatasubdir@/doc\fR. .PP To get inline help about particular configuration option use \fBarcctl (1)\fR tool. .PP For example to get description of \fBsessiondir\fR option in \fB[arex]\fR block run .IP \f(CW# arcctl config describe arex sessiondir\fR .PP .SH BASIC STRUCTURE .PP A block configures an ARC service, a service interface, a utility or a subsystem. Enabling (turning on) a functionality, a service or an interface requires the presence of the appropriate configuration block. To disable a service or an interface, simply delete or comment out the related arc.conf block (you may need to rerun the corresponding startup script). .PP A block is identified by its block header. A block header may consist of keywords and optionally block identifiers. Keywords may be separated by "/" and used to label subblocks (e.g. \fI[arex/jura]\fR), while block identifiers are separated by ":" from keywords. .PP For example, in the \fI[queue:short]\fR block header \fIqueue\fR is a keyword while \fIshort\fR is an identifier, e.g. the name of the queue. Block headers must be UNIQUE. .PP A block starts with a unique \fI[keyword:identifier]\fR blockheader and ends where the next block starts, that is at the next \fI[blockheader]\fR directive. .PP A block may have sub-blocks e.g. the various interfaces of the AREX service are configured via sub-blocks (e.g. \fI[arex/ws]\fR). When a sub-block is enabled then the corresponding parent block MUST also appear in the arc.conf file. .PP Configuration blocks contain (config option, config value) pairs following the syntax: .I config_option=value element [optional value element] in single line. .PP Each of the configuration options have well-defined default that is specified in this reference file. The default can take either a pre-set value, a special substitution or the keyword \fIundefined\fR. Configuration options within an enabled block take their default values in case they are missing (or commented out). Configuration parameters with undefined defaults takes no values. Furthermore, configuration options within disabled blocks takes no values either. .PP Configuration blocks related to authorization are ORDER-DEPENDENT! The authorization blocks \fI[authgroup:name]\fR MUST be defined before used in the other blocks. Furthermore, the order of the authorization blocks itself may have influence over authorization decisions! .PP Note that quotes around the configuration value(s) must NOT be used any longer. .PP Note that the arc.conf is CASE-SENSITIVE! .SH FILES .I /etc/arc.conf, .I ${ARC_LOCATION}/etc/arc.conf, .SH REPORTING BUGS Report bugs to http://bugzilla.nordugrid.org/ .SH COPYRIGHT APACHE LICENSE Version 2.0 .SH AUTHOR ARC software is developed by the NorduGrid Collaboration (http://www.nordugrid.org), please consult the AUTHORS file distributed with ARC. Please report bugs and feature requests to http://bugzilla.nordugrid.org .SH SEE ALSO .BR arcctl (1), .BR arc-config-check (1) nordugrid-arc-6.14.0/src/doc/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376020734 xustar000000000000000030 mtime=1638455038.341645013 30 atime=1638455038.474647011 30 ctime=1638455095.821508674 nordugrid-arc-6.14.0/src/doc/Makefile.am0000644000175000002070000000013614152153376020721 0ustar00mockbuildmock00000000000000man_MANS = arc.conf.5 EXTRA_DIST = arc.conf.reference arc.conf.DELETED arc.conf.DELETED-6.8.0 nordugrid-arc-6.14.0/src/doc/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153422020735 xustar000000000000000030 mtime=1638455058.037940959 30 atime=1638455091.850449007 30 ctime=1638455095.820508659 nordugrid-arc-6.14.0/src/doc/Makefile.in0000644000175000002070000005307714152153422020736 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/doc DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(srcdir)/arc.conf.5.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = arc.conf.5 CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } man5dir = $(mandir)/man5 am__installdirs = "$(DESTDIR)$(man5dir)" NROFF = nroff MANS = $(man_MANS) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ man_MANS = arc.conf.5 EXTRA_DIST = arc.conf.reference arc.conf.DELETED arc.conf.DELETED-6.8.0 all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/doc/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/doc/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): arc.conf.5: $(top_builddir)/config.status $(srcdir)/arc.conf.5.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-man5: $(man_MANS) @$(NORMAL_INSTALL) @list1=''; \ list2='$(man_MANS)'; \ test -n "$(man5dir)" \ && test -n "`echo $$list1$$list2`" \ || exit 0; \ echo " $(MKDIR_P) '$(DESTDIR)$(man5dir)'"; \ $(MKDIR_P) "$(DESTDIR)$(man5dir)" || exit 1; \ { for i in $$list1; do echo "$$i"; done; \ if test -n "$$list2"; then \ for i in $$list2; do echo "$$i"; done \ | sed -n '/\.5[a-z]*$$/p'; \ fi; \ } | while read p; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; echo "$$p"; \ done | \ sed -e 'n;s,.*/,,;p;h;s,.*\.,,;s,^[^5][0-9a-z]*$$,5,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,' | \ sed 'N;N;s,\n, ,g' | { \ list=; while read file base inst; do \ if test "$$base" = "$$inst"; then list="$$list $$file"; else \ echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man5dir)/$$inst'"; \ $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man5dir)/$$inst" || exit $$?; \ fi; \ done; \ for i in $$list; do echo "$$i"; done | $(am__base_list) | \ while read files; do \ test -z "$$files" || { \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man5dir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(man5dir)" || exit $$?; }; \ done; } uninstall-man5: @$(NORMAL_UNINSTALL) @list=''; test -n "$(man5dir)" || exit 0; \ files=`{ for i in $$list; do echo "$$i"; done; \ l2='$(man_MANS)'; for i in $$l2; do echo "$$i"; done | \ sed -n '/\.5[a-z]*$$/p'; \ } | sed -e 's,.*/,,;h;s,.*\.,,;s,^[^5][0-9a-z]*$$,5,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,'`; \ dir='$(DESTDIR)$(man5dir)'; $(am__uninstall_files_from_dir) tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(MANS) installdirs: for dir in "$(DESTDIR)$(man5dir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-man install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-man5 install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-man uninstall-man: uninstall-man5 .MAKE: install-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ cscopelist-am ctags-am distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-man5 install-pdf install-pdf-am install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags-am uninstall uninstall-am uninstall-man \ uninstall-man5 # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/doc/PaxHeaders.30264/arc.conf.DELETED0000644000000000000000000000013214152153376021361 xustar000000000000000030 mtime=1638455038.341645013 30 atime=1638455038.474647011 30 ctime=1638455095.823508704 nordugrid-arc-6.14.0/src/doc/arc.conf.DELETED0000644000175000002070000005140414152153376021352 0ustar00mockbuildmock00000000000000############################################################################### ## ## This is the arc.conf.DELETED file that contains all the configuration blocks ## and options that got DELETED during the ARC6 confuguration reengineering. ####################################################################################### ########################## DELETED CONFIG OPTIONS ################################### ### The [common] block ############################################## ## voms_trust_chain - Define the DN chain that the host services trust when the ## voms AC from peer voms proxy certificate is parsed and validated. ### The [authgroup:name] (previously [group]) blocks ###################################### ## name authgroup_name - This optional parameter specifies the name of the authgroup. ## This must be the same as the one in the [authgroup:name] block name. ## If this parameter not set then the name of the subblock is used instead, for example [authgroup:allowedusers] ## CHANGE186: DELETED. use the name from the block header instead! ## lcas library directory database - Call LCAS functions to check rule. ## CHANGE35: DELETED. The same can be done with the "plugin" parameter ## remote URL ... - Check user's credentials against remote service. Only ## CHANGE36: DELETED. Feature is not used for ages. Code simplification. ### The [userlist:name] (previously [vo]) blocks ################################ ## id blockid - specifies the unique configuration block id (this does not affect nordugridmap utility) ## CHANGE18: DELETED should be removed from code and config now that the userlist blocks have unique names ## vo vo_name - This optional parameter specifies the name of the userlist. ## This must be the same as the one in the [userlist:name] block name. ## CHANGE19: DELETED. use the name from the block header instead! ## require_issuerdn yes/no - YES would map only those DNs obtained from the urls... ## CHANGE23: DELETED. no valid use-case for this feature any longer. ### The [mapping] block ############################################## ## gridmap = path - The gridmap file location. gridmap-based mapping is not recommended any longer. ## CHANGE: MOVED to this block from [common] ## CHANGE: DELETED. Use map_with_file instead. ## unixmap = [unixname][:unixgroup] rule - A more sophisticated way to map ## Grid identity of client to local account. If client matches 'rule' ## it's assigned specified unix identity or one generated by rule. ## Mapping commands are processed sequentially and processing stops ## at first successful one (like in [authgroup] section). For possible rules ## read "ARC Computing Element. System Administrator guide" manual. All ## rules defined in [authgroup] section can be used. There are also additional ## rules which produce not only yes/no result but also give back user and ## group names to which mapping should happen. The way it works is quite ## complex so it is better to read full documentation. ## For safety reasons if sophisticated mapping is used it is better to ## finish mapping sequence with default mapping to nonexistent or safe ## account. ## CHANGE: DELETED. Use map_to_user instaed. ## unixgroupmap = authgroup rule - (previously unixgroup) Mapping rule only for users belonging to ## specified authorization 'authgroup'. It is similar to an additional filter ## for unixmap command which filters out all users not belonging to specified ## authorization group. Only rules which generate unix user and group names ## may be used in this command. Please read "ARC Computing Element System ## Administrator Guide" for more information. ## CHANGE: DELETED. Use map_ options that provides the same 'rule' functionality instead. ## unixlistmap = userlist_name rule - (previously unixvo) Mapping rule only for users belonging to ## specified userlist defined via the [userlist:name] block. ## Only rules which generate unix identity name may be used in this command. ## Please read "ARC Computing Element. System Administrator Guide" for more ## information. This command is similar to 'unixgroupmap' described above and ## exists for convenience for setups which base mapping on userlists. ## CHANGE: DELETED. Define the authgroup with userlist and use map_ options instead. ### The [lrms] block ############################################## ## maui_bin_path = path - Sets the path of the maui commands like showbf. ## CHANGE: DELETED, not used any longer ### Desktop Bridge options: set these only in case of lrms=dgbridge ## dgbridge_stage_dir = path - Desktop Bridge www publish dir. ## CHANGE: DELETED ## dgbridge_stage_prepend = url - Desktop Bridge url prefix pointing to dgbridge_stage_dir. ## CHANGE: DELETED ### The [lrms/ssh] block ############################################## ## remote_runtimedir = path - Runtime environment directory on cluster frontend to ## be mounted (sshfs) on CE machine at directory specified by the 'runtimedir' ## attribute in the [arex] block. ## CHANGE: DELETED ### The [arex] block - previously [grid-manager] ##################### ## logsize size [number] - 'Size' specifies in bytes how big log file is ## CHANGE38: DELETED. This functionality is not used. ## logreopen yes|no - Specifies if log file must be closed after each record is added. ## CHANGE39: DELETED. not really used. hide it from the arc.conf template. Can stay as a "hidden feature". ## localcred timeout plugin_path - Every time an external executable ## CHANGE45: DELETED. remove from code, config. ## globus_tcp_port_range - Firewall configuration. ## CHANGE46: MOVED from this block to [arex/data-staging] ## globus_udp_port_range - Firewall configuration. ## CHANGE47: MOVED from this block to [arex/data-staging] ## x509_user_cert - Location of credentials for service. ## CHANGE48: DELETED. no need to separately set these for A-REX. The common block is enough. ## TODO: for consistency with other blocks maybe we should add the cert-related options back? ## x509_user_key - Location of credentials for service. ## CHANGE49: DELETED. no need to separately set these for A-REX. The common block is enough. ## x509_cert_dir - Location of trusted CA certificates ## CHANGE50: DELETED. no need to separately set these for A-REX. The common block is enough. ### The [arex/cache/cleaner] block ######################################### ## remotecachedir cache_path [link_path] - specifies caches which are under ## CHANGE54: DELETED, this feature is not in use any longer. remove parameter and clean code ### The [arex/data-staging] (previously [data-staging]) block ########### ## securetransfer = yes|no - if data connection allows to choose use ## secure|non-secure data transfer. Currently only works for gridftp. ## TODO: DELETE ## CHANGE: DELETED ### The [arex/ws] block ################################# ## enable_arc_interface yes|no - turns on or off the ARC own WS interface ## CHANGE68: DELETED. All non-EMIES interfaces (bes and co.) should be removed from the code since EMI-ES will be the only WS interface we support. ## enable_emies_interface - enable the EMI Execution Service interface. ## CHANGE69: DELETED. The interfaces are enabled/disabled by the block, no need for such parameter. ### The [arex/ws/candypond] block (previously cacheservice) ############# ## enable_cache_service yes|no - Turn on or off the cache service interface. ## CHANGE78: DELETED. Service interface, functionality is enabled/disabled by subblocks. ### The [arex/ws/emies] block ################################ ## require_gridmapfile = yes/no - (previously allowunknown) Specifies whether to check user subject ## against grid-mapfile and reject users not listed in gridmap-file. ## allowedvalues: yes no ## default: no #require_gridmapfile=no ## CHANGE: DELETED ### The [arex/jura] block ################################### ## urbatchsize = number - JURA sends usage records not one-by-one, but in batches. ## CHANGE: DELETED from this block because it should be set either in APEL or SGAS subblocks ## jobreport publisher - name of the accounting records publisher. ## CHANGE96: DELETED. hardcode JURA as THE publisher in A-REX. ## jobreport credentials path [key_file [cert_file [ca_dir]]] ## CHANGE100: DELETED. make sure jura can use the standard credentials from common block or the optional x509 parameters above ## jobreport options [name:value, ...]- specifies additional parameters for the jobreporter. ## CHANGE101: DELETED. a new block structure was created to configure those jura options. ## jobreport=URL ... number specifies that A-REX has to report information about jobs being ## CHANGE102: DELETED. new blocks are introduced as a replacement. ### The [arex/ganglia] block - previously [gangliarc] ############################### ## ganglialocation path - path to ganglia gmetric executable ## CHANGE188: DELETED. documented first in 5.3.0. Use gmetric_bin instead. ## logfile = path - log file of the daemon. ## default: /var/log/arc/gangliarc.log #logfile=/tmp/gangliarc.log ## CHANGE: DELETED ## pidfile = pid - pid file of the daemon. ## default: /run/gangliarc.pid #pidfile=/tmp/gangliarc.pid ## CHANGE: DELETED ## python_bin_path = path - The path to python executable. ## default: /usr/bin/python #python_bin_path=/usr/local/bin/python ## CHANGE: DELETED ### The [gridftpd] block ####################################################### ## require_gridmapfile = yes/no - (previously allowunknown) Specifies whether to check user subject ## against grid-mapfile and reject users not listed in gridmap-file. ## allowedvalues: yes no ## default: no #require_gridmapfile=no ## CHANGE: DELETED ## daemon yes|no - Whether the is run in daemon mode. Default is yes. ## CHANGE119: DELETED. remove this config parameter from arc.conf. the functionality will remain via cli option ## logsize size [number] - 'Size' specifies in bytes how big log file is ## CHANGE120: DELETED. remove for simplification purposes. ## include - Include contents of another config file. ## CHANGE121: DELETED. no need for this feature any longer ## pluginpath - directory where the plugin libraries are installed, default is ## CHANGE124: DELETED. hide this parameter from arc.conf. gridftp service should work out-of-the-box on a standard linux installation. ### The [gridftpd/jobs] block ############################## ## path virtdir - The path to the virtual gridftpd directory which is used during the ## CHANGE125: DELETED. remove this flexibility. the "/jobs" string must be hardcoded in the server. ## plugin name - specifies name of shared library to be loaded relative to "pluginpath". ## CHANGE126: DELETED. hide it from sysadmin. make the plugin loading/configuration automatic. if a gridftpd/jobs block is enabled, load the right plugin automatically. ## remotegmdirs controldir sessiondir - Specifies control ## and session directories to which jobs can be submitted but which are ## under the control of another A-REX. ## CHANGE129: DELETED. remove feature and config parameter. ## configfile service_configuration_path - If [gridftpd] and [arex] ## configuration parts are located in separate files this configuration ## CHANGE130: DELETED. ### The [gridftpd/filedir] block ######################################### ## plugin name - specifies name of shared library to be loaded relative to ## CHANGE131: DELETED. hide it from sysadmin. make the plugin loading/configuration automatic. if a gridftpd/filedir block is enabled, load the right plugin automatically. ### The [infosys] block ################################################ ## overwrite_config yes|no - determines if the infosys startup scripts ## should generate new low-level slapd configuration files. ## CHANGE135: DELETED. This functionality got lost during the years. startup scripts don't support this feature any longer ## oldconfsuffix .suffix - sets the suffix of the backup files of the low-level slapd config files. ## CHANGE136: DELETED. This functionality got lost during the years. startup scripts don't support this feature any longer ## debug - sets the debug level/verbosity of the startup script {0 or 1}. ## CHANGE137: DELETED. ## infosys_compat - Setting this variable will cause ARC to use the old ## infoproviders. NOTE: this only applies to ARC < 13.11. Old infoproviders ## CHANGE138: DELETED. not relevant for a very long time by now. ## cachetime affects old infoproviders, and forces the validity time of the record. ## CHANGE139: DELETED. not parsed any longer. ## giis_fifo - path to fifo used by EGIIS. default is /run/arc/giis-fifo ## CHANGE140: DELETED. hardcode it in startupscript! ## user = unix_user - the unix user running the infosys processes such as ## CHANGE: DELETED, moved to the [infosys/ldap] sub-block ### The [infosys/ldap] block ################################################ ## slapadd = path - Configure where the slapadd command is located. ## CHANGE: DELETED (not used in the code) ## giis_location - If giis_location is not set, ARC_LOCATION will be used instead. ## CHANGE142: DELETED. ## slapd_cron_checkpoint - LDAP checkpoint enable/disable ## This option was introduced to solve bug #2032, to reduce the number ## of log files produced by BDII. ## CHANGE143: DELETED. not relevant any longer. ## db_archive - path to slapd_db_archive binary ## Only used by the above LDAP checkpoint option ## CHANGE144: DELETED. not relevant any longer. ## db_checkpoint - path to slapd_db_checkpoint binary ## Only used by the above LDAP checkpoint option ## CHANGE145: DELETED. not relevant any longer. ## infosys_nordugrid enable - These three variables decide which schema should be used for ## CHANGE146: DELETED. subblocks are used to enable/disable schema-specific publishing. ## slapd_pid_file path - Allows to change slapd pidfiles filename and location ## CHANGE148: DELETED ## cachettl = number - cachettl affects registration to egiis ## The value is reported back by the index server as Mds-Service-Ldap-cachettl ## Default is zero, i.e. no limits are set. ## CHANGE: DELETED ### The [infosys/glue2/ldap] schema sub-block ########################### ## infosys_glue2_ldap - Enables GLUE2 schema ## CHANGE: DELETED. use block instead. ### The [infosys/glue1] (previously [infosys/glue12]) schema block ########## ## infosys_glue12 - Enables glue1.2/1.3 schema ## CHANGE162: DELETED. use block instead ## provide_glue_site_info yes/no - This variable decides if the GlueSite should be published. ## CHANGE163: DELETED. enabled if [infosys/glue1/site-bdii] block exists, otherwise disabled. ### The [infosys/cluster] block ################################################### ## gm_mount_point - this is the same as the "path" from the [gridftpd/jobs] ## CHANGE150: DELETED. hardcode "jobs" in the infoproviders. ## gm_port - this is the same as the "port" from the [gridftpd] block. The ## CHANGE151: DELETED. hardcode 2811 in the infoproviders. ## benchmark = name value - This optional multivalued attribute can be used to ## specify benchmark results on the cluster level. Use this cluster attribute ## if only the NODES are homogeneous with respect to the benchmark performance. ## Otherwise the similar queue-level attribute should be used. Please try to ## use standard benchmark names, if possible. ## CHANGE: DELETED since ARC 6.4. Queue-level values should be used instead. ### The [queue:name] blocks ####################################### ## name string - Sets the name of the grid-enabled queue. It MUST match the name ## label from the queue block header. ## CHANGE187: DELETED. use the name in the block header instead! ## scheduling_policy = MAUI/FIFO - This optional parameter tells the scheduling policy of ## CHANGE DELETED ## cachetime - LDAP parameters of the queue+jobs+users.pl (old) infoprovider, use the defaults, ## CHANGE153: DELETED. ## sizelimit - affects registration to EGIIS ## CHANGE154: DELETED ### The [nordugridmap] block ########################################### ## generate_vomapfile - control is nordugridmap will generate ## vo-mapfile used by arc-ur-logger. Default is 'yes'. ## CHANGE26: DELETED, we don't support arc-ur-logger any longer. don't generate that file any longer either. ## vomapfile - path to vo-mapfile location. ## CHANGE27: DELETED, we don't support arc-ur-logger any longer. don't generate that file any longer either. ## issuer_processing - control the behavior of [userlist/name] block's require_issuerdn ## CHANGE28: DELETED ####################################################################################### ########################## DELETED COMPLETE BLOCKS ################################## ### [registration/emir] block ####################################### ## Services registration into EMIR block ## configures and enables the registration process of a ## services enabled in this configuration file into EMI ## indexing service (EMIR). ## Currently only implemented for A-REX. ## CHANGE165: DELETED. entire emir registration block is removed from arc.conf. check for code cleanup as well. ## emiurls url - List of URL separated by comma of EMIR services which are to accept ## CHANGE166: DELETED ## validity sec - Time in seconds for which registration records should stay valid. ## CHANGE167: DELETED ## period sec - Time in seconds how othen registration record should be sent to the ## CHANGE168: DELETED ## disablereg_xbes yes/no - disablereg_xbes may be used to selectively disable registration of ## CHANGE169: DELETED ## ## ### end of the [registration/emir] block ############################ ### The [infosys/index/indexname] block ################################ ## WARNING: the ldap-based Info Index Service, together with the registration ## is OBSOLETE technology, therefore the new config format is not supporting GIIS. ## Use earlier ARC releases if you want to setup an EGIS server. ## CHANGE191: DELETED block, the entire block is deleted. ## name - The unique (within the hosting machine) name of the ## CHANGE192: DELETED ## allowregistration - Implements registration filtering within an Index Sevice ## CHANGE193: DELETED ## ### end of the [infosys/index/indexname] block ############## ### The [infosys/index/indexname/registration/registrationname] blocks #### ## WARNING: the ldap-based registration is OBSOLETE technology therefore ## the new config is not suppoprting seting up GIIS services. Use previous ARC releases for that. ## CHANGE194: DELETED. entire config block is deleted. ## targethostname - the hostname of the machine running the registration target ## CHANGE195: DELETED ## targetport - the port on which the target Index Service is running. ## CHANGE196: DELETED ## targetsuffix - the LDAP suffix of the target Index Service ## CHANGE197: DELETED ## regperiod - The registration period in seconds, the registration messages are ## CHANGE198: DELETED ## registranthostname - the hostname of the machine sending the registrations. ## CHANGE199: DELETED ## registrantport - the port of the slapd service hosting the ## CHANGE200: DELETED ## registrantsuffix - the LDAP suffix of the registrant Index Service. ## CHANGE201: DELETED ## timeout - The suggested timeout to be included in the registration. ## CHANGE202: DELETED ## ttl - The suggested TTL to be included in the registration. The default ## CHANGE203: DELETED ## ## ### end of the [infosys/index/indexname/registration/registrationname] block ######### ### The [infosys/cluster/registration/NAME] blocks ############ ## WARNING: the ldap-based registration is OBSOLETE technology ## ## CHANGE213: DELETED block, all the config options are OBSOLETED and DELETED in this block ## registrationlog = path - specifies the logfile for the registration processes ## initiated by your machine. ## CHANGE212: DELETED ## sizelimit = number - sizelimit affects registration to egiis ## The value is reported back by the index server as Mds-Service-Ldap-sizelimit ## CHANGE212: DELETED ## *targetgiis = name - the name of the index service to which the registration to be sent. ## CHANGE212: DELETED ## *targethostname = hostname - the hostname of the machine running the registration target ## CHANGE204: DELETED ## targetport = number - the port on which the target Index Service is running. ## CHANGE205: DELETED ## targetsuffix = string - the LDAP suffix of the target Index Service ## CHANGE206: DELETED ## regperiod = seconds - The registration period in seconds, the registration messages are ## CHANGE207: DELETED ## registranthostname = hostname - the hostname of the machine sending the registrations. ## CHANGE208: DELETED ## registrantport = port - the port of the slapd service hosting the ## registrant Index Service. The attribute inherits its value from the ## CHANGE209: DELETED ## registrantsuffix = string - the LDAP suffix of the registrant cluster resource ## It is automatically determined from the [infosys] block and the ## CHANGE210: DELETED ## ttl = seconds - The suggested TTL to be included in the registration. The default ## CHANGE211: DELETED ## ## ### end of the [infosys/cluster/registration/NAME] blocks ########### nordugrid-arc-6.14.0/src/doc/PaxHeaders.30264/arc.conf.reference0000644000000000000000000000013214152153376022251 xustar000000000000000030 mtime=1638455038.342645028 30 atime=1638455038.474647011 30 ctime=1638455095.823508704 nordugrid-arc-6.14.0/src/doc/arc.conf.reference0000644000175000002070000045242714152153376022254 0ustar00mockbuildmock00000000000000#################################################################### ## ## This is the arc.conf REFERENCE DOCUMENT defining the configuration blocks and ## configuration options for the ARC services. ## ## WARNING: this file will not work as a configuration template! ## NEVER USE THIS DOCUMENT AS A CONFIGURATION FILE! ## ## The arc.conf configuration file consists of the following blocks: ## ## [common] ## [authgroup:groupname] ## [mapping] ## [authtokens] ## [lrms] ## [lrms/ssh] ## [arex] ## [arex/cache] ## [arex/cache/cleaner] ## [arex/data-staging] ## [arex/ws] ## [arex/ws/jobs] ## [arex/ws/publicinfo] ## [arex/ws/cache] ## [arex/ws/candypond] ## [arex/ws/argus] ## [arex/jura] ## [arex/jura/sgas:targetname] ## [arex/jura/apel:targetname] ## [arex/jura/archiving] (removed in 6.8.0) ## [arex/ganglia] ## [gridftpd] ## [gridftpd/jobs] ## [gridftpd/filedir] ## [infosys] ## [infosys/ldap] ## [infosys/nordugrid] ## [infosys/glue2] ## [infosys/glue2/ldap] ## [infosys/glue1] ## [infosys/glue1/site-bdii] ## [infosys/cluster] ## [queue:name] ## [datadelivery-service] ## [acix-scanner] ## [acix-index] ## [userlist:name] ## [nordugridmap] ## [custom:blockname] ## ## A block configures an ARC service, a service interface, a utility or a subsystem. ## Enabling (turning on) a functionality, a service or an interface requires the presence of the ## appropriate configuration block. To disable a service or an interface, simply delete or ## comment out the related arc.conf block (you may need to rerun the corresponding startup script). ## ## The [common] block is mandatory even if not a single option is specified within. The presence of ## the block turns on the default values for the configuration options within the block. ## ## As an example, in order to set up a minimalistic ARC CE offering no external interfaces ## you need to configure at least the [common], [mapping], [arex], [lrms], ## [infosys] and [queue:name] blocks. ## ## As another example, an ARC-based data offloader would require the [common] and the ## [datadelivery-service] blocks. ## ## A block is identified by its block header. A block header may consist of ## keywords and optionally block identifiers. Keywords may be separated by "/" ## and used to label subblocks (e.g. [arex/jura]), while block identifiers ## are separated by ":" from keywords. For example, in the [queue:short] ## block header "queue" is a keyword while "short" is an identifier, e.g. the name of the queue. ## Block headers must be UNIQUE. ## ## A block starts with a unique [keyword:identifier] blockheader and ends where the next block ## starts, that is at the next [blockheader] directive. ## ## A block may have sub-blocks e.g. the various interfaces of the AREX service are configured via ## sub-blocks (e.g. [arex/ws]). When a sub-block is enabled then the corresponding parent block must ## also appear in the arc.conf file. ## ## Configuration blocks contain (config option, config value) pairs following ## the syntax in single line: ## config_option=value element [optional value element] ## NOTE that quotes around the configuration value(s) must NOT be used any longer. ## NOTE that the arc.conf is CASE-SENSITIVE! ## ## Space handling syntax in arc.conf ## for configuration lines: ## (stripped space)option(stripped space)=(stripped space)value(saved space)(value)(stripped space) ## ## and for block headers: ## [keyword:(stripped space)space is NOT allowed within identifier(stripped space)] ## ## Detailed textual definition: ## a) All trailing and leading spaces on each confiuration line are stripped and ignored. ## This aplies both to block headers and block content. ## b) All spaces around the "=" sign in "option=value" kind of string (after 'a' is applied) ## are stripped and ignored. For example line "hostname = myhost.info" is treated as ## identical to "hostname=myhost.info". ## c) In block headers of [keyword] kind (after 'a' is applied) no additional spaces are allowed ## around "keyword" and inside "keyword". ## d) In block headers of [keyword:identifier] kind (after 'a' is applied) no additional spaces ## are allowed around "keyword" and inside both "keyword" and "identifier". ## The spaces ARE allowed around "identifier" part and stripped and ignored. ## ## Mandatory configuration options are indicated by an asterix prefix to the ## option name e.g: "*mandatory_configoption". Mandatory options with undefined values ## will result in service stop during the startup process. ## ## Each of the configuration options have well-defined default that is specified in this reference ## file. The default can take either a pre-set value, a special substitution or the keyword ## "undefined". Configuration options within an enabled block take their default values in case ## they are missing (or commented out). Configuration parameters with "undefined" defaults takes ## no values. Furthermore, configuration options within disabled blocks takes no values either. ## ## Configuration blocks are ORDER-DEPENDENT. To be safe, please use the order as indicated in the list ## of blocks in this reference. This is especially important for configuration blocks related to ## authorization and mapping. The order dependency is also honoured within options inside a certain block. ## ## This means for instance that configuration blocks related to authorization MUST appear before used in ## the blocks such as [mapping], [arex/ws/jobs] or [gridftp/jobs]. Order dependency within a block is ## for instance important when it comes to authorization decisions, as the first matching rule is used. ## For more details see the specific block reference. ## ## Below we give a detailed description of all the configuration options of the ## different configuration blocks. Every configuration option is described ## in a dedicated paragraph with the following reference syntax notation. ## This file is parsed at buildtime to assist in configuration default parsing and validation script ## and so it is important that it follows the agreed syntax: For each block or ## option please add explanatory text with two "##" followed by a space at the ## beginning of the line and then an example with a single "#" and no spaces at ## the beginning of the line. ## ## example_config_option = value [optional values] - Here comes the explanation ## of the config option. Mandatory configuration options are indicated by an asterix prefix to the ## option name e.g: "*mandatory_configoption" vs. "optional_configoption". ## The explanation can be followed by the special keywords in a separate line: ## - "multivalued" - used to indicate that config option can be specified multiple times. ## This forms a set of values for the same configuration option irrespective of lines order. ## - "sequenced" - used to indicate that config option is a part of the sequence and its ## effect on configuration depends on the lines order. Sequenced option can be specified ## several times in the configuration sequence independently. ## Missing such keywords means the config option can only occur once in the arc.conf. ## By default the arc.conf config options are optional and single-valued. ## For some config options only a fix set of values are allowed. These are ## listed in a separate line after the "allowedvalues" keyword. ## The default of every config option is explicitly given in the "default:" line. ## Default can be a pre-set value, a substitution or the "undefined" keyword. ## The last line of the paragraph is always a valid example preceded by a single "#" ## multivalued ## allowedvalues: 12 34 56 ## default: 34 #example_config_option=56 ########################################################################### ### The [common] block ############################################## ## Common configuration affecting all ARC components, usually related to networking or security ## or service behaviour. This block is mandatory. ## The common block options may be overridden by the specific sections of the components later. ## The [common] always appears at the beginning of the config file. The config options set within ## this block are available for all the other blocks thus shared by the different components of ARC. #[common] ## hostname = string - The FQDN of the frontend on which the ARC services are deployed. ## default: $EXEC{hostname -f} #hostname=myhost.org ## CHANGE: MODIFIED semantics in 6.0.0, not mandatory any longer. ## http_proxy = url - The http proxy server. ## This setting affects all client HTTP(s) requests that initiated by ARC core services, ## including data staging, SAML communications, and pushing SGAS accounting records. ## This variable is similar to setting the ARC_HTTP_PROXY environmental variable. ## default: undefined #http_proxy=proxy.mydomain.org:3128 ## CHANGE: MOVED in 6.0.0 from the old [grid-manager] block. ### X509 related parameters ## x509_host_key = path - (previously x509_user_key) Server credential location. ## Sets the full path to the host private key. ## These variables are similar to the GSI enviroment variable "X509_USER_KEY" ## If indicated, the variable can be set individually for each service/component in the ## corresponding block. ## default: /etc/grid-security/hostkey.pem #x509_host_key=/etc/grid-security/hostkey.pem ## CHANGE: RENAMED in 6.0.0. ## x509_host_cert = path - (previously x509_user_cert) Server credential location. Sets the full ## path to the host public certificate. ## These variables are similar to the GSI environment variable "X509_USER_CERT" ## If indicated, the variable can be set individually for each service/component in the ## corresponding block. ## default: /etc/grid-security/hostcert.pem #x509_host_cert=/etc/grid-security/hostcert.pem ## CHANGE: RENAMED in 6.0.0. ## x509_cert_dir = path - Location of trusted CA certificates. ## This variable is similar to the GSI enviroment variable "X509_CERT_DIR" ## If indicated, the variable can be set individually for each service/component in the ## corresponding block. ## default: /etc/grid-security/certificates #x509_cert_dir=/etc/grid-security/certificates ### VOMS related parameters ## x509_voms_dir = path - the path to the directory containing *.lsc files ## needed for verification of VOMS service signature in the proxy-certificate. ## default: /etc/grid-security/vomsdir #x509_voms_dir=/etc/grid-security/vomsdir ## voms_processing = keyword - Defines how to behave if errors in VOMS AC processing detected. ## The following keywords are supported: ## "relaxed" ## use everything that passed validation. ## "standard" ## same as relaxed but fail if parsing errors took place and ## VOMS extension is marked as critical. This is a default. ## "strict" ## fail if any parsing error was discovered ## "noerrors" ## fail if any parsing or validation error happened. ## allowedvalues: relaxed standard strict noerrors ## default: standard #voms_processing=strict ## ## ### end of the [common] block ############################################## ### The [authgroup:groupname] (previously [group]) blocks ########################## ## These configuration blocks contain authorization rules. ## An [authrgroup:groupname] block always defines a group of users where members of the group are ## those who satisfy the authorization rules. ## The rules within the block determine which user belong to the authgroup. ## Then, access control and identity mapping of ARC services are implemented ## via associating a authgroup with an interface, queue or a mapping rule ## using one of the "allowaccess", "denyaccess" or [mapping] block parameters. ## For more info please read "Security Framework of ARC" at ## http://www.nordugrid.org/documents/arc-security-documentation.pdf ## The authgroup should not be mistaken for a virtual organisation (VO). ## An authgroup may match a single VO if only a single check (rule) on VO membership is perfomed. ## ## IMPORTANT: Rules in an authgroup are processed in their order of appearance. ## The first matching rule decides the membership of the user to the authgroup ## being evaluated and the processing STOPS within that authgroup. This does not mean that ## the same user is not processed for the next authgroup: all [authgroup:groupname] blocks are ## evaluated, even if a user already has a match with one of the earlier groups. ## ## All the objects used in the rules MUST be defined before it may be used. For example, ## to create group of authgroups you must first defined the child groups. ## ## There are positively and negatively matching rules. ## If a rule is matched positively then the user tested is accepted ## into the respective group and further processing is stopped. Upon a ## negative match the user would be rejected for that group - processing ## stops too. The sign of rule is determined by prepending the rule with ## "+" (for positive) or "-" (for negative) signs. "+" is default and can ## be omitted. A rule may also be prepended with "!" to invert result of rule, ## which will let the rule match the complement of users. That complement ## operator ("!") may be combined with the operator for positive or negative ## matching. #[authgroup:prodtesters] ## CHANGE: RENAMED the block in 6.0.0. ## subject = certificate_subject - Rule to match specific subject of user's ## X.509 certificate. No masks, patterns and regular expressions are allowed. ## sequenced ## default: undefined #subject=/O=Grid/O=Big VO/CN=Main Boss #subject=/O=Grid/O=Big VO/CN=Deputy Boss ## file = path - Processes a list of DNs stored in an external file one per line ## in grid-mapfile format (see map_with_file from [mapping] block, unixname is ignored) ## and adds those to the authgroup. ## sequenced ## default: undefined #file=/etc/grid-security/local_users #file=/etc/grid-security/atlas_users ## CHANGE: MODIFIED semantics in 6.0.0. The external file should only contain DNs, no complex rules. No need to change the code. ## voms = vo group role capabilities - Match VOMS attribute in user's credential. ## Use "*" to match any value. ## sequenced ## default: undefined #voms=nordugrid Guests * * #voms=atlas students prodman * ## authgroup = group_name [group_name ...] - (previously group) Match user already belonging to one ## of specified authgroups. The authgroup referred here must be defined earlier in ## arc.conf configuration file. Multiple authgroup names may be specified for this rule. ## That allows creating hierarchical structure of authorization groups like ## "all-atlas" are those which are "atlas-users" and "atlas-admins". ## sequenced ## default: undefined #authgroup=local_admins #authgroup=local_admins remote_users ## CHANGE: RENAMED in 6.0.0. ## userlist = ulist_name [ulist_name ...] - (previously vo) Match user belonging to ulist_name defined ## in an earlier [userlist:ulist_name] block. Multiple userlist names are allowed for this rule. ## sequenced ## default: undefined #userlist=biousers ## CHANGE: RENAMED in 6.0.0. ## plugin = timeout path [arg1 [arg2 [arg3...]]] - Run external executable or ## function from shared library. Rule is matched if plugin returns 0. ## Any other return code or timeout are treated as rule not matched. ## In arguments following substitutions are supported: ## - "%D" - subject of certicate ## - "%P" - path to proxy ## ARC ships with LCAS plugin that can be enabled with following plugin configuration. ## For more information about configuring LCAS refer to 'Using LCAS/LCMAPS' document. ## sequenced ## default: undefined #plugin=10 /usr/libexec/arc/arc-lcas %D %P liblcas.so /usr/lib64 /etc/lcas/lcas.db ## authtokens = subject issuer audience scope group - Match OIDC token claims. ## Use "*" to match any value. ## sequenced ## default: undefined #authtokens=e83eec5a-e2e3-43c6-bb67-df8f5ec3e8d0 https://wlcg.cloud.cnaf.infn.it/ * * * ## CHANGE: INTRODUCED in 6.6.0. ## all = yes|no - Matches any or none user identity. For "yes" argument this rule ## always returns positive match. For "no" it is always no match. ## sequenced ## default: undefined #all=yes ## CHANGE: MODIFIED options in 6.0.0. ## ## ### end of the [authgroup:name] blocks ############################### ### The [mapping] block ############################################## ## This block defines the grid-identity to local UNIX identity mapping rules ## used by various ARC components. ## ## Rules in the [mapping] block are processed IN A SEQUENCE in line order of the ## configuration file (from top to bottom). ## ## There are two kind of rules: ## - mapping rules that defines how the particular "authgroup" members are mapped ## - policy rules that modifies the mapping rules sequence processing ## ## Default policy for mapping rules processing is: ## - processing CONTINUES to the next rule if identity of user DO NOT match "authgroup" ## specified in the rule (can be redefined with "policy_on_nogroup" option) ## - processing STOPS if identity of user match "authgroup" specified in the mapping rule. ## Depend on whether this mapping rule returns valid UNIX identity the processing can be ## redefined with "policy_on_map" and "policy_on_nomap" options. ## ## Policy can be redefined at the any point of configuration sequence and affects ## all mapping rules defined after the polcy rule. ## ## NOTE that if mapping process STOPS and there is still no local UNIX identity ## identified, the user running A-REX will be used. ## NOTE that when grid-identity is mapped to "root" account - request processing fails implicitely. ## ## #[mapping] ## CHANGE: NEW block in 6.0.0. ## map_to_user = authgroup_name unixname[:unixgroup] - the users that belongs to ## specified authgroup are mapped to "unixname" local UNIX account that may be ## optionally followed by a "unixgroup" UNIX group. ## In case of non-existing "unixname" account the mapping rule treated as a rule that ## did not returned mapped identity (nomap). ## sequenced ## default: undefined #map_to_user=authgroupA nobody:nobody ## map_to_pool = authgroup_name directory - the user that belong to specified ## authgroup is assigned one of the local UNIX accounts in the pool. Account names that ## are part of this pool are stored line-by-line in the "pool" file inside the "directory". ## The "directory" also contains information about used accont names stored in another files. ## If there are no more available accounts in the defined pool for mapping then ## accounts not used for a configurable time period may be reassigned. ## The pool behaviour, including account reuse, is configureable with the opional ## "directory/config" file that has INI syntax (line-by-line "key=value"). ## Possible keys of the "config" file are: ## "timeout" ## Define the timeout in days (default is "10") after which the UNIX ## account can be reassigned to another user if not used. The "0" value ## means no lease expiration. ## sequenced ## default: undefined #map_to_pool=atlas /etc/grid-security/pool/atlas ## map_with_file = authgroup_name file - for users that belongs to specified ## authgroup the DN of certificate is matched against a list of DNs stored in ## the specified "file", one per line followed by a local UNIX account name. ## The DN must be quoted if it contains blank spaces. ## This rule can be used to implement legacy grid-mapfile aproach. ## sequenced ## default: undefined #map_with_file=authgroupB /etc/grid-security/grid-mapfile ## map_with_plugin = authgroup_name timeout plugin [arg1 [arg2 [...]]] - run ## external "plugin" executable with specified arguments to find the UNIX account ## name to which users that belogns to specified authgroup will be mapped to. ## A rule matches if the exit code is "0" and there is a UNIX account name ## printed on stdout (optionally followed by a UNIX group name separated by colon). ## The exit code 1 designates failed mapping. Any other code or timeout means fatal ## failure and will abort any further mapping processing. That will also cause ## rejection of corresponding connection. ## Plugin execution time is limited to "timeout" seconds. ## ## In the arguments the following substitutions are applied before the plugin is started: ## - "%D" - subject of user's certificate, ## - "%P" - path to credentials' proxy file. ## ## ARC ships with LCMAPS plugin that can be enabled with the corresponfing ## configuration. For more information about configuring LCMAPS refer to ## 'Using LCAS/LCMAPS' document. ## sequenced ## default: undefined #map_with_plugin=authgroupC 30 /usr/libexec/arc/arc-lcmaps %D %P liblcmaps.so /usr/lib64 /etc/lcmaps/lcmaps.db arc ## policy_on_nomap = continue/stop - redefines mapping rules sequence processing policy ## in case identity of user match "authgroup" specified in the mapping rule and mapping ## rule DO NOT return valid UNIX identity. Default policy is "stop" processing the furhter ## rules. ## For example this policy will be triggered if pool is depleted, certificate subject is ## missing in the map file used for defined authgroup or plugin execution failed. ## sequenced ## default: undefined ## allowedvalues: continue stop #policy_on_nomap=continue ## policy_on_map = continue/stop - redefines mapping rules sequence processing policy ## in case identity of user match "authgroup" specified in the mapping rule and mapping ## rule return valid UNIX identity. Default policy is "stop" processing the furhter ## rules. ## This policy will be triggered if rule successfully returns the result (allocated in pool, ## matched in map file, plugin call was successful). ## sequenced ## default: undefined ## allowedvalues: continue stop #policy_on_map=stop ## policy_on_nogroup = continue/stop - redefines mapping rules sequence processing policy ## in case identity of user DO NOT match "authgroup" specified in the mapping rule. ## Default policy is "continue" processing the furhter rules. ## sequenced ## default: undefined ## allowedvalues: continue stop #policy_on_nogroup=stop ## ### end of the [mapping] block ############################################## ### The [authtokens] block ############################################## ## This block activates processing of OIDC tokens as defined in WLCG profile. ## ## #[authtokens] ## CHANGE: NEW block in 6.6.0. ## ### end of the [authtokens] block ############################################## ### The [lrms] block ############################################## ## This block specifies the characteristics of the Local Resource Manager System (batch system) ## underneath the ARC CE. This block contains all the lrms-specific parameters and information. ## Configuration values in this block are available for A-REX, the backends, accounting and infosys ## ARC subsystems. ## ## ARC support the most common LRMS flavours. #[lrms] ## CHANGE: NEW block in 6.0.0. Contains parameters previously set in [common], [infosys], [cluster], [queue] ## *lrms = lrmstype [defaultqueue] - Sets the type of the LRMS (queue system) and optionally the ## default queue name. ## ONLY ONE LRMS IS ALLOWED. MULTIPLE LRMS ENTRIES WILL TRIGGER UNEXPECTED BEHAVIOUR. ## ## For lrmstype, the following values can be chosen: ## - fork - simple forking of jobs to the same node as the server ## - sge - (Sun/Oracle) Grid Engine ## - condor - Condor ## - pbs - PBS (covers Torque and other old PBS flavours e.g. OpenPBS, older PBSPro, etc) ## - pbspro - Altair PBS Professional ## - lsf - LSF ## - ll - LoadLeveler ## - slurm - SLURM ## - boinc - Boinc ## - slurmpy - new EXPERIMENTAL SLURM scripts (contains the ssh remote batch management as well) ## ## The optional "defaultqueue" parameter specifies the name of an existing LRMS queue ## in the cluster that will be used by AREX as the default queue to submit grid jobs in case ## the client does not specify a queue name during the job submission procees. ## This queue name must match one of the [queue:queue_name] blocks. ## ## allowedvalues: fork sge condor pbs pbspro lsf ll slurm boinc slurmpy ## default: undefined ## mandatory #lrms=pbspro gridlong #lrms=slurm ## CHANGE: slurmpy NEW in 6.0.0. pbspro NEW in 6.1.0 ## lrmsconfig = text - An optional free text field to describe the configuration of your ## Local Resource Management System (batch system). The value is published in the infosys, ## and is not used otherwise. ## default: undefined #lrmsconfig=single job per processor ## CHANGE: MOVED in 6.0.0 from the [cluster] block. ## benchmark = string - Defines the default benchmark specification to store in ## the accounting AAR records (as it is) if per-job data is missing. ## It is advised to set it to cluster-wide defaults in case of reporting to APEL ## to aviod records diversity for failed jobs or buggy backends. ## default: HEPSPEC:1.0 #benchmark=HEPSPEC:12.26 ## CHANGE: NEW in 6.8.0 ## defaultmemory = number - The LRMS memory request of job to be set by the LRMS backend ## scripts, if a user submits a job without specifying how much memory should be used. ## The order of precedence is: job description -> defaultmemory. ## This is the amount of memory (specified in MB) that a job will request. ## default: undefined #defaultmemory=512 ## CHANGE: MOVED in 6.0.0 from the [cluster] block. ## nodename = path - Redefine the command to obtain hostname of LRMS worker node. ## By default the value is defined on buildtime and depend on the OS. ## In most cases "/bin/hostname -f" will be used. ## NOTE that this way of getting WN hostname will be used only in case of ## particular LRMS backend had no native LRMS-defined way. ## default: undefined #nodename=/bin/hostname -s ## CHANGE: MOVED in 6.0.0 from the [arex] block. ## gnu_time = path - Path to the GNU time command on the LRMS worker nodes. ## If time command exists on the node, jobscript will write additional diagnostic information. ## default: /usr/bin/time #gnu_time=/usr/bin/time ## CHANGE: MOVED in 6.0.0 from the [arex] block. ## movetool = comand - Redefine the command used to move files during jobscript ## execution on LRMS worker node (the command should be available on WNs). ## This in particular applies to files movement from sessiondir to scratchdir ## in the shared sessiondir case. ## default: mv #movetool=rsync -av ## CHANGE: NEW in 6.8 ### PBS options: set these only in case of lrms=pbs ## pbs_bin_path = path - The path to the qstat,pbsnodes,qmgr etc PBS binaries, ## no need to set if PBS is not used ## default: /usr/bin #pbs_bin_path=/usr/bin ## pbs_log_path = path - The path of the PBS server logfiles which are used by A-REX to determine ## whether a PBS job is completed. If not specified, A-REX will use qstat for that. ## default: /var/spool/pbs/server_logs #pbs_log_path=/var/spool/pbs/server_logs ## pbs_dedicated_node_string = string - (previously dedicated_node_string) The string which ## is used in the PBS node config to distinguish the grid nodes from the rest. ## Suppose only a subset of nodes are available for grid jobs, ## and these nodes have a common "node property" string, ## this case the string should be set to this value and only the ## nodes with the corresponding "pbs node property" are counted as grid enabled ## nodes. Setting the dedicated_node_string to the value of the "pbs node ## property" of the grid-enabled nodes will influence how the totalcpus, user ## freecpus is calculated. You don't need to set this attribute if your cluster ## is fully available for the grid and your cluster's PBS config does not use ## the "node property" method to assign certain nodes to grid queues. You ## shouldn't use this config option unless you make sure your PBS config makes ## use of the above described setup. ## default: undefined #pbs_dedicated_node_string=gridnode ## CHANGE: RENAMED and MOVED in 6.0.0 from [cluster]. ### Condor options: set these only in case of lrms=condor ## condor_bin_path = path - Path to Condor binaries. Must be set if Condor ## is used. ## default: /usr/bin #condor_bin_path=/opt/condor/bin ## condor_config = path - Full path to Condor config file. Must be set if Condor ## is used and the config file is not in its default location ## (/etc/condor/condor_config or ~/condor/condor_config). ## The full path to the file should be given. ## default: /etc/condor/condor_config #condor_config=/opt/condor/etc/condor_config ## condor_rank = ClassAd_float_expression - If you are not happy with the way Condor picks nodes ## when running jobs, you can define your own ranking algorithm by optionally ## setting the condor_rank attribute. condor_rank should be set to a ## ClassAd float expression that you could use in the Rank attribute ## in a Condor job description. ## default: undefined #condor_rank=(1-LoadAvg/2)*(1-LoadAvg/2)*Memory/1000*KFlops/1000000 ## condor_requirements = string - Specify additional constraints for Condor resources. ## The value of "condor_requirements" must be a valid constraints string ## which is recognized by a "condor_status -constraint ..." command. It can ## reference pre-defined ClassAd attributes (like Memory, Opsys, Arch, HasJava, ## etc) but also custom ClassAd attributes. To define a custom attribute on a ## condor node, just add two lines like the ones below in the "$(hostname).local" ## config file on the node: ## NORDUGRID_RESOURCE=TRUE ## STARTD_EXPRS = NORDUGRID_RESOURCE, $(STARTD_EXPRS) ## A job submitted to this resource is allowed to run on any node which satisfies ## the "condor_requirements" constraint. If "condor_requirements" is not set, ## jobs will be allowed to run on any of the nodes in the pool. When configuring ## multiple queues, you can differentiate them based on memory size or disk ## space, for example. ## default: undefined #condor_requirements=(OpSys == "linux" && NORDUGRID_RESOURCE && Memory >= 1000 && Memory < 2000) ### SGE options: set these only in case of lrms=sge ## sge_bin_path = path - Path to Sun Grid Engine (SGE) binaries, ## Default is search for qsub command in the shell PATH ## default: undefined #sge_bin_path=/opt/n1ge6/bin/lx24-x86 ## sge_root = path - Path to SGE installation directory. MUST be set if SGE is used. ## default: /gridware/sge #sge_root=/opt/n1ge6 ## sge_cell = name - The name of the SGE cell to use. This option is only necessary ## in case SGE is set up with a cell name different from 'default' ## default: default #sge_cell=default ## sge_qmaster_port = port - The SGE port options should be used in case SGE ## command line clients require SGE_QMASTER_PORT and SGE_EXECD_PORT environment ## variables to be set. Usually they are not necessary. ## default: undefined #sge_qmaster_port=536 ## sge_execd_port = port - The SGE port options should be used in case SGE ## command line clients requre SGE_QMASTER_PORT and SGE_EXECD_PORT environment ## variables to be set. Usually they are not necessary. ## default: undefined #sge_execd_port=537 ## sge_jobopts = string - Additional SGE options to be used when submitting jobs to SGE ## default: undefined #sge_jobopts=-P atlas -r yes ### SLURM options: set these only in case of lrms=slurm ## slurm_bin_path = path - Path to SLURM binaries, must be set if installed ## outside of normal PATH ## default: /usr/bin #slurm_bin_path=/usr/bin ## slurm_wakeupperiod = numsec - How long should infosys wait before querying SLURM ## for new data (seconds) ## default: 30 #slurm_wakeupperiod=15 ## slurm_use_sacct = yes/no - Indicates whether ARC should use sacct instead of scontrol ## to obtain information about finished jobs ## Not supported by slurmpy. ## allowedvalues: yes no ## default: yes #slurm_use_sacct=yes ## slurm_requirements = string - Use this option to specify extra SLURM-specific parameters. ## default: undefined #slurm_requirements=memory on node >> 200 ## CHANGE: NEW in 6.0.0. ## slurm_query_retries = number - Number of sacct/scontrol retries performed in scan-SLURM-job ## If slurm is overloaded the sacct/scontrol command call may fail. ## If retries > 1 sacct/scontrol is retried after some seconds for that(those) particular job(s). ## If all retry attempts fail, the next scan-SLURM-job institiation will pick up the job(s) from last time. ## default: 1 #slurm_query_retries=3 ### LSF options: set these only in case of lrms=lsf ## lsf_bin_path = path - The PATH to LSF bin folder ## default: /usr/bin #lsf_bin_path=/usr/local/lsf/bin/ ## lsf_profile_path = path - Path to the profile.lsf file. Infoprovider scripts ## will source profile.lsf to setup LSF utilites environment. ## default: /usr/share/lsf/conf/profile.lsf #lsf_profile_path=/usr/local/share/lsf/conf/profile.lsf ## lsf_architecture = string - CPU architecture to request when submitting jobs to LSF. ## Use only if you know what you are doing. ## default: undefined #lsf_architecture=PowerPC ## CHANGE: MOVED in 6.0.0 from [queue:name] block. ### LoadLeveler options: set these only in case of lrms=ll ## ll_bin_path = path - The PATH to the LoadLeveler bin folder ## default: /usr/bin #ll_bin_path=/opt/ibmll/LoadL/full/bin ## ll_consumable_resources = yes/no - Indicates whether the LoadLeveler setup is ## using Consumable Resources. ## allowedvalues: yes no ## default: no #ll_consumable_resources=yes ### Boinc options: set these only in case of lrms=boinc ## boinc_db_host = hostname - Connection strings for the boinc database: host ## default: localhost #boinc_db_host=localhost ## boinc_db_port = port - Connection strings for the boinc database: port ## default: 3306 #boinc_db_port=3306 ## boinc_db_name = db_name - Connection strings for the boinc database: db_name ## default: undefined #boinc_db_name=myproject ## boinc_db_user = user - Connection strings for the boinc database: db_user ## default: undefined #boinc_db_user=boinc ## boinc_db_pass = pwd - Connection strings for the boinc database: pwd ## default: undefined #boinc_db_pass=password ## boinc_app_id - ID of the app handled by this CE. Setting this option ## makes database queries much faster in large projects with many apps. ## default: undefined #boinc_app_id=1 ## ## ### end of [lrms] block ##################################################### ### The [lrms/ssh] block ############################################## ## This sub-block configures the ssh environment for remote batch management. ## Currently ONLY the python slurm (slurmpy) EXPERIMENTAL batch module is able to utilize ## the remote ssh feature. ## Parameters within this block are relevant if the cluster frontend is remotely ## located wrt. CE frontend (machine running A-REX). If specified with the ## parameters below, the session, cache and runtime directories will be mounted ## from the cluster frontend on the CE frontend using sshfs. Job submission and ## management will done using ssh (Paramiko). #[lrms/ssh] ## CHANGE: NEW sub-block in 6.0.0 to configure ssh support in slurmpy as a part of tech preview. ## *remote_host = hostname - Define the remote cluster frontend machine which contains ## session, cache and runtime directories and where jobs should be submitted to. ## default: undefined ## mandatory #remote_host=myremotehost.org ## *private_key = path - Location of the private key which should be used establish ## connection to the machine specified by the 'remote_host' attribute. ## default: undefined ## mandatory #private_key=/etc/grid-security/hostkey-priv.pem ## *remote_user = user - User on remote cluster which should be used for mounting ## directories, submitting and managing jobs, and gathering information about ## cluster. ## default: undefined ## mandatory #remote_user=grid ## remote_sessiondir = path - Session directory on cluster frontend to be ## mounted (sshfs) on CE machine at directory specified by the 'sessiondir' ## attribute in the [arex] block. ## default: undefined ## mandatory #remote_sessiondir=/scratch/grid ## remote_cachedir = path - Cache directory on cluster frontend to be ## mounted (sshfs) on CE machine at directory specified by the 'cachedir' ## attribute in the [arex] block. ## default: undefined #remote_cachedir=/scratch/cache ## ssh_timeout = time - Timeout of establishing ssh connection. Unit in seconds. ## default: 10 #ssh_timeout=10 ## ## ### end of [lrms/ssh] block ##################################################### ### The [arex] block - previously [grid-manager] ##################### ## The [arex] block, together with its various subblocks, ## configures the A-REX service hosted in "arched". A-REX takes care of ## various middleware tasks on the frontend such as job creation and management, ## stagein/stageout, LRMS job submission, data caching, etc... #[arex] ## CHANGE: RENAMED block in 6.0.0. ## user = user[:group] - Switch to a non root user/group after startup. ## Use with caution because of limited functionality when arex is not run under root. ## default: root #user=grid:grid ## norootpower = yes|no - If set to yes, all job management processes ## will switch to mapped user's identity while accessing session directory. ## This is useful if session directory is on NFS with root squashing turned on. ## allowedvalues: yes no ## default: no #norootpower=yes ## delegationdb = db_name - specify which DB to use to store delegations. ## Currently supported db_names are bdb and sqlite ## default: sqlite #delegationdb=sqlite ## CHANGE: MODIFIED in 6.0.0 with new default. ## watchdog = yes/no - Specifies if additional watchdog processes is spawned to restart ## main process if it is stuck or dies. ## allowedvalues: yes no ## default: no #watchdog=no ## loglevel = level - (previously debug) Set loglevel of the arched daemon hosting A-REX service ## between 0 (FATAL) and 5 (DEBUG). Defaults to 3 (INFO). ## allowedvalues: 0 1 2 3 4 5 FATAL ERROR WARNING INFO VERBOSE DEBUG ## default: 3 #loglevel=3 ## CHANGE: RENAMED in 6.0.0. ## logfile = path - Specify A-REX log file location. If using an external log ## rotation tool be careful to make sure it matches the path specified here. ## default: /var/log/arc/arex.log #logfile=/var/log/arc/arex.log ## CHANGE: MODIFIED default (renamed file) in 6.0.0. ## joblog = path - Specifies where to store specialized log about started ## and finished jobs. If path is empty log is NOT written. ## Controlled by logrotate if deafult name is kept. ## This log is not used by any other part of ARC so can be safely disabled if you are ## not interested in storing jobs log. ## default: /var/log/arc/arex-jobs.log #joblog= ## fixdirectories = yes/missing/no - Specifies during startup A-REX should ## create all directories needed for it operation and set suitable default ## permissions. If "no" is specified then A-REX does nothing to prepare its ## operational environment. In case of "missing" A-REX only creates and ## sets permissions for directories which are not present yet. For "yes" ## all directories are created and permissions for all used directories are ## set to default safe values. ## allowedvalues: yes missing no ## default: yes #fixdirectories=yes ## controldir = path - The directory of the A-REX's internal job metadata files. ## For a heavy loaded computing elements you can consider to locate controldir on ## a dedicated partition optimized for small random reads and writes. ## The directory is not needed on the nodes. ## default: /var/spool/arc/jobstatus #controldir=/var/spool/arc/jobstatus ## sessiondir = path [drain] - the directory which holds the sessiondirs of the grid jobs. ## Multiple session directories may be specified. ## In this case jobs are spread evenly over the session directories. ## If "sessiondir=*" is set, the session directory will be spread over the ## "${HOME}/.jobs" directories of every locally mapped unix user. It is preferred ## to use common session directories. The path may be followed by "drain", in ## which case no new jobs will be assigned to that sessiondir, but current jobs ## will still be processed and accessible. ## multivalued ## default: /var/spool/arc/sessiondir #sessiondir=/scratch/arcsessions drain #sessiondir=* ## defaultttl = [ttl [ttr]] - The ttl parameter sets the time in seconds for how long a job session ## directory will survive after job execution has finished. If not specified ## the default is 1 week. The ttr parameter sets how long information about a job will be kept ## after the session directory is deleted. If not specified, the ttr default is one month. ## default: 604800 2592000 #defaultttl=2592000 ## shared_filesystem = yes/no - Specifies if computing nodes can access folders mounted ## with protocols like NFS with the same pathnames as the frontend. ## Note that the default 'yes' assumes that the paths to the session directories ## are the same on both frontend and nodes. ## If these paths are not the same, then one should set the scratchdir option. ## The option changes the RUNTIME_NODE_SEES_FRONTEND variable in the submission scripts. ## allowedvalues: yes no ## default: yes #shared_filesystem=yes ## scratchdir = path - The path on computing node to move session directory to before ## execution. If defined should contain the path to the directory on the ## computing node which can be used to store a jobs' files during execution. ## Sets the environment variable RUNTIME_LOCAL_SCRATCH_DIR. If the variable is not set, ## then the session dir is not moved before execution. Don't set this parameter unless ## you want to move the sessiondir to scratchdir on the node. ## default: undefined #scratchdir=/local/scratch/ ## shared_scratch = path - The path on frontend where scratchdir can be found. If ## defined should contain the path corresponding to that set in scratchdir as ## seen on the frontend machine. Sets the environment variable ## RUNTIME_FRONTEND_SEES_NODE. ## default: undefined #shared_scratch=/mnt/scratch ## tmpdir = path - A temporary directory used by A-REX. ## default: /tmp #tmpdir=/tmp ## runtimedir = path - The directory which holds the additional runtimeenvironment scripts, ## added by system administrator. Several directories can be specified. ## To enable RTEs to be advertised in the information system and used during submission ## the arcctl tool should be used. ## multivalued ## default: undefined #runtimedir=/var/spool/arc/extraruntimes #runtimedir=/cvmfs/vo/arcruntime ## maxjobs = number1 number2 number3 number4 number5 - specifies maximum allowed number of jobs. ## number1 - jobs which are not in FINISHED state (jobs tracked in RAM) ## number2 - jobs being run (SUBMITTING, INLRMS states) ## number3 - jobs being processed per DN ## number4 - jobs in whole system ## number5 - LRMS scripts limit (jobs in SUBMITTING and CANCELING) ## A parameter set to -1 means no limit. ## default: -1 -1 -1 -1 -1 #maxjobs=10000 10 2000 -1 -1 ## CHANGE: MODIFIED in 6.0.0. Explicitly indicate "no limit" with -1. "Missing number" should not be allowed. ## maxrerun = number - Specifies how many times job can be rerun if it failed in LRMS. ## This is only an upper limit, the actual rerun value is set by the user in his xrsl. ## default: 5 #maxrerun=5 ## statecallout = state options plugin_path [plugin_arguments] - (previously authplugin) ## Enables a callout feature of A-REX: every time job goes to "state" A-REX ## will run "plugin_path" executable. The following states are allowed: ## ACCEPTED, PREPARING, SUBMIT, FINISHING, FINISHED and DELETED. ## Options consist of "key=value" pairs separated by comma. Possible keys are: ## "timeout" ## defines the timeout in seconds to wait for plugin execution ("timeout=" can be omitted). ## "onsuccess", "onfailure", "ontimeout" ## defines the action that A-REX should take on succesfull execution (exit code 0), ## failed execution (exit code is not 0) or execution timeout respectively. ## Possible actions are: ## - "pass" - continue executing job, ## - "fail" - cancel job, ## - "log" - write to log about the failure and continue executing job. ## It is possible to use following sugstitutions to construct plugin command line: ## - "%R" - session root (value of sessiondir in [arex] block) ## - "%C" - controldir path ## - "%U" - username of mapped UNIX account ## - "%u" - numeric UID of mapped UNIX account ## - "%g" - numeric GID of mapped UNIX account ## - "%H" - home directory of mapped UNIX account as specified in "/etc/passwd" ## - "%Q" - default queue (see "lrms" configuration option in [lrms] block) ## - "%L" - LRMS name (see "lrms" configuration option in [lrms] block) ## - "%W" - ARC installation path (corresponds to the "ARC_LOCATION" environmental variable) ## - "%F" - path to configuration file for this instance ## - "%I" - job ID (substituted in runtime) ## - "%S" - job state (substituted in runtime) ## Plugins included into ARC distribution: ## - "arc-blahp-logger" - write accounting log for every finished job in BLAH format ## multivalued ## default: undefined #statecallout=FINISHED timeout=10,onfailure=pass /usr/libexec/arc/arc-blahp-logger -I %I -U %u -L %C/job.%I.local -P %C/job.%I.proxy ## CHANGE: RENAMED in 6.0.0 from authplugin. ## wakeupperiod = time - Specifies how often A-REX checks for new jobs ## arrived, job state change requests, etc. That is responsiveness of ## A-REX. "time" is time period in seconds. Default is 3 minutes. ## Usually no need to change this parameter because important state changes ## are also triggering out-of-schedule checks. ## NOTE that this parameter does not affect responsiveness of backend scripts - ## especially "scan--job". That means that upper estimation of time for ## detecting job finished executing is sum of responsiveness of backend ## script + wakeupperiod. ## default: 180 #wakeupperiod=180 ## infoproviders_timelimit = seconds - (previously infoproviders_timeout) Sets the ## execution time limit of the infoprovider scripts started by the A-REX. ## Infoprovider scripts running longer than the specified timelimit are ## gracefully handled by the A-REX (the behaviour depends on ## the state of the system) ## Increase this value if you have many jobs in the controldir and ## infoproviders need more time to process. ## default: 10800 #infoproviders_timelimit=10800 ## CHANGED: RENAMED and MOVED in 6.0.0 to [arex] block. ## pidfile = path - Specify location of file containing PID of daemon process. ## default: /run/arched-arex.pid #pidfile=/run/arched-arex.pid ## mail = email_address - Specifies the email address from where the notification mails are sent ## default: $VAR{user}@$VAR{[common]hostname} #mail=grid.support@somewhere.org ## CHANGE: MODIFIED in 6.0.0. ## gnu_time = path - The gnu time command ## CHANGE: MOVED in 6.0.0 to [lrms]. ## nodename = path - The command to obtain hostname of computing node. ## CHANGE: MOVED in 6.0.0 to [lrms]. ## helper = user executable arguments - By enabling this parameter A-REX will ## run an external "helper" program under the user useraccount. The program will be ## kept running, every time the executable finishes it will be started again. ## As a limitation, currently only '.' is supported as username, which corresponds ## to the user running A-REX. ## default: undefined #helper=. /usr/local/bin/myutility ## helperlog = path - Configuration option to specify the location of log for helpers. ## default: /var/log/arc/job.helper.errors #helperlog=/var/log/arc/job.helper.errors ## CHANGE: NEW PARAMETER in 6.0.0. ## forcedefaultvoms = VOMS_FQAN - specify VOMS FQAN which user will be ## assigned if his/her credentials contain no VOMS attributes. ## To assign different values to different queues put this command ## into [queue] block. ## default: undefined #forcedefaultvoms=/vo/group/subgroup ## ## ### end of the [arex] block ################################### ### The [arex/cache] block ######################################### ## This subblock enables and configures the cache functionality of A-REX. ## A-REX can cache input files downloaded as part of the stage-in process of grid jobs ## so that subsequent jobs requiring the same file don’t have to download it again. ## The cached file will be symlinked (or copied) into the session directory of the job. ## To disable to cache functionality simply comment out the [arex/cache] config block. ## It is a good idea to have the cache on its own separate file system that is shared with the nodes. ## For more information about the cache functionality of A-REX consult the Data Cache ## technical description in the online documentation. #[arex/cache] ## CHANGE: NEW block in 6.0.0. ## *cachedir = cache_path [link_path] - Specifies a directory to store cached ## data. Multiple cache directories may be specified. Cached data will be distributed ## evenly over the caches. ## Optional "link_path" specifies the path at which the "cache_path" is accessible on ## computing nodes, if it is different from the path on the A-REX host. ## If "link_path" is set to "." files are not soft-linked, but copied to session ## directory. ## If a cache directory needs to be drained, then "link_path" should specify "drain", ## in which case no new files will be added to the cache and files in the cache ## will no longer be used. ## Setting "link_path" to "readonly" ensures that no new files are written to ## this cache, but existing files can still be used. ## Draining and read-only caches are not cleaned by the A-REX cache cleaner. ## A restart of A-REX is required when changing cache options. ## multivalued ## default: undefined #cachedir=/scratch/cache #cachedir=/shared/cache /frontend/jobcache #cachedir=/fs1/cache drain ## CHANGE: Added readonly option in 6.7 ## ## ### end of the [arex/cache] ############################################# ### The [arex/cache/cleaner] block ######################################### ## This subblock enables the cleaning functionality of the cache. If this block is not enabled ## then the cache will not be cleaned by A-REX. Either cachesize or cachelifetime should also be ## set to enable cleaning. #[arex/cache/cleaner] ## CHANGE: NEW block in 6.0.0. ## logfile = path - (previously cachelogfile) sets the filename where output of the cache-clean ## tool should be logged. Defaults to /var/log/arc/cache-clean.log. ## default: /var/log/arc/cache-cleaner.log #logfile=/tmp/cache-clean.log ## CHANGE: RENAMED in 6.0.0. ## loglevel = level - (previously cacheloglevel) specifies the level of logging by the cache-clean ## tool, between 0 (FATAL) and 5 (DEBUG). Defaults to 3 (INFO). ## allowedvalues: 0 1 2 3 4 5 FATAL ERROR WARNING INFO VERBOSE DEBUG ## default: 3 #loglevel=4 ## CHANGE: RENAMED in 6.0.0. ## cachesize = max min - Specifies high and low watermarks for space used ## by cache, as a percentage of the space on the file system on which ## the cache directory is located. When the max is exceeded, files will ## be deleted to bring the used space down to the min level. It is a ## good idea to have the cache on its own separate file system. ## default: 100 100 #cachesize=50 20 ## calculatesize = filesystem/cachedir - (previously cacheshared) specifies the way the space ## occupied by the cache will be calculated. If set to cachedir then cache-clean calculates ## the size of the cache instead of using filesystem used space. ## allowedvalues: filesystem cachedir ## default: filesystem #calculatesize=cachedir ## CHANGE: RENAMED in 6.0.0 . ## cachelifetime = time - Turns on time-based file cleaning. Files accessed less recently than ## the given time period will be deleted. Example values of this option are 1800, 90s, 24h, 30d. ## When no suffix is given the unit is seconds. ## default: undefined #cachelifetime=30d ## cachespacetool = path [options] - specifies an alternative tool to "df" that ## cache-clean should use to obtain space information on the cache file system. ## The output of this command must be "total_bytes used_bytes". The cache ## directory is passed as the last argument to this command. ## default: undefined #cachespacetool=/etc/getspace.sh ## cachecleantimeout = time - the timeout in seconds for running the cache-clean ## tool. If using a large cache or slow file system this value can be ## increased to allow the cleaning to complete. Defaults to 3600 (1 hour). ## default: 3600 #cachecleantimeout=10000 ## ## ### end of the [arex/cache/cleaner] ############################################# ### The [arex/data-staging] (previously [data-staging]) block ########### ## This subblock enables and configures the data staging capabilities of A-REX. ## A subsystem called DTR (Data Transfer Reloaded) is responsible for collecting input data ## for a job before submission to the LRMS, and for staging out data ## after the job has finished. Automagic data staging of A-REX is a very powerful feature, ## disabling this functionality (by commenting out the subblock) is not recommended. #[arex/data-staging] ## CHANGE: RENAMED block in 6.0.0. ## loglevel = number - (previously debug) Sets the log level for transfer logging in job.id.errors files, ## between 0 (FATAL) and 5 (DEBUG). Default is to use value set by loglevel option in ## [arex] section. ## allowedvalues: 0 1 2 3 4 5 FATAL ERROR WARNING INFO VERBOSE DEBUG ## default: $VAR{[arex]loglevel} #loglevel=4 ## CHANGE: RENAMED in 6.0.0. ## logfile = path - (previously central_logfile) A central file in which all data staging messages ## from every job will be collected and logged in addition to their job.id.errors files. ## If this option is not present or the path is empty the log file is not created. ## This file is not automatically controlled by logrotate unless you name it as ## /var/log/arc/datastaging.log. ## default: undefined #logfile=/var/log/arc/datastaging.log ## CHANGE: RENAMED in 6.0.0. ## statefile = path - (previously dtrlog) A file in which data staging state information ## (for monitoring and recovery purposes) is periodically dumped. ## default: $VAR{[arex]controldir}/dtr.state #statefile=/tmp/dtr.state ## CHANGE: RENAMED and MODIFIED in 6.0.0, new default value. ## usehostcert = yes/no - Whether the A-REX host certificate should be used for ## communication with remote hosts instead of the users' proxies. ## allowedvalues: yes no ## default: no #usehostcert=yes ## maxtransfertries = number - the maximum number of times download and upload will ## be attempted per job (retries are only performed if an error is judged to be temporary) ## default: 10 #maxtransfertries=20 ## passivetransfer = yes/no - If yes, gridftp transfers are passive. Setting ## this option to yes can solve transfer problems caused by firewalls. ## allowedvalues: yes no ## default: yes #passivetransfer=yes ## CHANGE: NEW DEFAULT in 6.0.0. ## globus_tcp_port_range = port_range - In a firewalled environment ## the software which uses GSI needs to know what ports are available. ## This parameter is only needed if "passivetransfer=no" was set. ## These variable are similar to the Globus enviroment variables ## "GLOBUS_TCP_PORT_RANGE" and "GLOBUS_UDP_PORT_RANGE". ## default: 9000,9300 #globus_tcp_port_range=9000,12000 ## CHANGE: MOVED in 6.0.0. ## globus_udp_port_range = port_range - In a firewalled environment ## the software which uses GSI needs to know what ports are available. ## This parameter is only needed if "passivetransfer=no" was set. ## These variable are similar to the Globus enviroment variables ## "GLOBUS_TCP_PORT_RANGE" and "GLOBUS_UDP_PORT_RANGE". ## default: 9000,9300 #globus_udp_port_range=9000,12000 ## CHANGE: MOVED in 6.0.0. ## httpgetpartial = yes/no - If yes, HTTP GET transfers may transfer data in ## chunks/parts. If no - data is always transfered in one piece. ## allowedvalues: yes no ## default: no #httpgetpartial=no ## CHANGE: NEW default in 6.0.0. ## speedcontrol = min_speed min_time min_average_speed max_inactivity - specifies ## how slow data transfer must be to trigger error. Transfer is cancelled if ## speed is below min_speed bytes per second for at least min_time seconds, ## or if average rate is below min_average_speed bytes per second, or no data ## was transferred for longer than max_inactivity seconds. ## Value of zero turns feature off. ## default: 0 300 0 300 #speedcontrol=0 300 100 300 #speedcontrol= ## CHANGE: MODIFIED in 6.0.0: missing parameter "speedcontrol=" should also turn the feature off, not just zero value. ## maxdelivery = number - Maximum number of concurrent file transfers, i.e. active ## transfers using network bandwidth. This is the total number for the whole ## system including any remote staging hosts. ## default: 10 #maxdelivery=40 ## maxprocessor = number - Maximum number of concurrent files in each of the DTR ## internal pre- and post-processing states, eg cache check or replica resolution. ## default: 10 #maxprocessor=20 ## maxemergency = number - Maximum "emergency" slots which can be assigned to transfer ## shares when all slots up to the limits configured by the above two options ## are used by other shares. This ensures shares cannot be blocked by others. ## default: 1 #maxemergency=5 ## maxprepared = number - Maximum number of files in a prepared state, i.e. pinned on a ## remote storage such as SRM for transfer. A good value is a small multiple of maxdelivery. ## default: 200 #maxprepared=250 ## sharepolicy = grouping - (previously sharetype) Defines the mechanism to be used for the ## grouping of the job transfers. DTR assigns the transfers to shares, so that those shares ## can be assigned to different priorities. ## Possible values for "grouping" are dn, voms:vo, voms:role and voms:group: ## dn ## each job is assigned to a share based on the DN of the user sumbitting the job. ## voms:vo ## each job is assigned to a share based on the VO specified in the proxy. ## voms:role ## each job is assigned to a share based on the role specified in the first attribute ## found in the proxy. ## voms:group ## each job is assigned to a share based on the group specified in the first attribute ## found in the proxy. ## In case of the voms schemes, if the proxy is not a VOMS proxy, then a default share is used. ## If sharepolicy is not set then the client-defined priority is applied. ## default: undefined #sharepolicy=voms:role ## CHANGE: RENAMED in 6.0.0. ## sharepriority = share priority - (previously definedshare) Defines a share with a fixed priority, ## different from the default (50). Priority is an integer between 1 (lowest) and 100 (highest). ## multivalued ## default: undefined #sharepriority=myvo:students 20 #sharepriority=myvo:production 80 ## CHANGE: RENAMED in 6.0.0. ## copyurl = url_head local_path - Configures that DTR should use copy instead of download ## in case of certain stagein files. URLs, starting from 'url_head' ## should be accessed in a different way (most probaly unix open). The ## 'url_head' part of the URL will be replaced with 'local_path' and ## file from obtained path will be copied to the session directory. ## NOTE: 'local_path' can also be of URL type. ## multivalued ## default: undefined #copyurl=gsiftp://example.org:2811/data/ /data/ #copyurl=gsiftp://example2.org:2811/data/ /data/ ## linkurl = url_head local_path [node_path] - Identical to 'copyurl', configures DTR ## so that for certain URLs files won't be downloaded or copied (in case of copyurl), ## but soft-link will be created. The 'local_path' ## specifies the way to access the file from the frontend, and is used ## to check permissions. The 'node_path' specifies how the file can be ## accessed from computing nodes, and will be used for soft-link creation. ## If 'node_path' is missing - 'local_path' will be used. ## multivalued ## default: undefined #linkurl=gsiftp://somewhere.org/data /data #linkurl=gsiftp://example.org:2811/data/ /scratch/data/ ## use_remote_acix = URL - (previously acix_endpoint) If configured then the ## ARC Cache Index, available at the URL, will be queried for every input file ## specified in a job description and any replicas found in sites with accessible caches ## will be added to the replica list of the input file. ## The replicas will be tried in the order specified by preferredpattern variable. ## default: undefined #use_remote_acix=https://cacheindex.ndgf.org:6443/data/index ## CHANGE: RENAMED in 6.0.0. ## preferredpattern = pattern - specifies a preferred pattern on which ## to sort multiple replicas of an input file. It consists of one or ## more patterns separated by a pipe character (|) listed in order of ## preference. Replicas will be ordered by the earliest match. If the ## dollar character ($) is used at the end of a pattern, the pattern ## will be matched to the end of the hostname of the replica. If an ## exclamation mark (!) is used at the beginning of a pattern, any replicas ## matching the pattern will be excluded from the sorted replicas. ## default: undefined #preferredpattern=srm://myhost.ac.uk|.uk$|ndgf.org$|badhost.org$ ## The following options are used to configure multi-host data staging deployment scenario. ## In that setup a couple of additional data staging boxes are enabled to off-load data transfers. ## deliveryservice = URL - The URL to a remote data delivery service which can perform remote ## data staging. ## default: undefined #deliveryservice=https://myhost.org:443/datadeliveryservice ## localdelivery = yes/no - If any deliveryservice is defined, this option determines ## whether local data transfer is also performed. ## allowedvalues: yes no ## default: no #localdelivery=yes ## remotesizelimit = size - Lower limit on file size (in bytes) of files that remote ## hosts should transfer. Can be used to increase performance by transferring ## small files using local processes. ## default: undefined #remotesizelimit=100000 ## ## ### end of the [arex/data-staging] block ############################ ### The [arex/ws] block ################################# ## A-REX exposes a set of Web Service interfaces that can be used to create and ## manage jobs, obtain information about the CE and the jobs, handle delegations, ## access cache information, so on. Comment out this block if you don't want to ## provide WS-interfaces for various A-REX functionalities. #[arex/ws] ## CHANGE: NEW block in 6.0.0. Most of the parameters originates from the old [grid-manager] block ## wsurl = url - (previously arex_mount_point) Specifies the base URL under which ## the web service intrefaces will be available. The URL argument must be a ## full URL consisting of protocol+host+port+path: e.g. https://:/ ## Make sure the chosen port is not blocked by firewall or other security rules. ## default: https://$VAR{[common]hostname}:443/arex #wsurl=https://piff.hep.lu.se:443/arex ## CHANGE: RENAMED and MODIFIED in 6.0.0. ## logfile = path - (previously wslogfile) Specify log file location for WS-interface operations. ## default: /var/log/arc/ws-interface.log #logfile=/var/log/arc/ws-interface.log ## CHANGE: RENAMED in 6.0.0. ## pidfile = path - Specify location of file containing PID of daemon process. ## default: /run/arched-arex-ws.pid #pidfile=/run/arched-arex-ws.pid ## CHANGE: INTRODUCED in 6.13.0. ## max_job_control_requests = number - The max number of simultaneously processed job management ## requests over WS interface - like job submission, cancel, status check etc. ## default: 100 #max_job_control_requests=100 ## max_infosys_requests = number - The max number of simultaneously processed info ## requests over WS interface. ## default: 1 #max_infosys_requests=1 ## max_data_transfer_requests = number - The max number of simultaneously processed data transfer ## requests over WS interface - like data staging. ## default: 100 #max_data_transfer_requests=100 ## tlsciphers = ciphers_list - Override OpenSSL ciphers list enabled on server ## default: HIGH:!eNULL:!aNULL #tlsciphers=HIGH:!eNULL:!aNULL ## CHANGE: INTRODUCED in 6.14.0. ## tlsprotocols = SSL/TLS protocols - Specify which protocols to enable ## This is space separated list of values - SSLv2 SSLv3 TLSv1.0 TLSv1.1 TLSv1.2 TLSv1.3 ## default: TLSv1.2 TLSv1.3 #tlsprotocols=TLSv1.2 TLSv1.3 ## CHANGE: INTRODUCED in 6.14.0. ## tlscurve = curve - Specify SSL/TLS ECDH curve name (SN) ## default: secp521r1 #tlscurve=secp521r1 ## CHANGE: INTRODUCED in 6.14.0. ## ## ### end of the [arex/ws] block ############################## ### The [arex/ws/jobs] block ################################ ## AREX offers a set of web service interfaces implented via either REST or WS-SOAP. ## This block enables the job management, info query, delegation protocols through both the REST and ## EMIES interface. ## Consult the http://svn.nordugrid.org/repos/nordugrid/doc/trunk/tech_doc/emi-es/EMI-ES-Specification_v1.16.pdf for EMIES inteface description ## and read the http://www.nordugrid.org/documents/arc6/tech/rest/rest.html for the REST interface specification. #[arex/ws/jobs] ## CHANGE: NEW sub-block in 6.0.0. ## allownew = yes/no - The 'allownew' config parameter sets if the Computing Element accepts ## submission of new jobs via the WS-interface. This parameter can be used to close down the CE. ## allowedvalues: yes no ## default: yes #allownew=yes ## CHANGE: NEW in 6.0.0. Implement support in the code for WS-interface. ## allownew_override = [authgroup ...] - (previously allowsubmit) Defines which authorization ## groups are allowed to submit new jobs via the WS-interfaces ## when the CE is closed with allownew=no. Note that it requires the allownew=no to be set. ## multivalued ## default: undefined #allownew_override=biousers atlasusers #allownew_override=yourauthgroup ## CHANGE: RENAMED and MODIFIED behaviour in 6.0.0. Modified behaviour: possible to specify several groups on a single line! ## allowaccess = authgroup - (previously groupcfg) Defines that the specified authgroup members ## are authorized to access the ARC-CE via this interface. A related config option the ## "denyaccess" (see below) can be used to reject access. ## Multiple "allowaccess" and "denyaccess" authorization statements are allowed within a configuration block. ## These statements are processed sequentially in the order they are specified in the ## config block. The processing stops on first "allowaccess" or "denyaccess" statement matching the authgroup membership. ## If there are no authorization statements specified, then no additional restrictions are applied ## for authorizing user access and the interface is open to everybody authenticated. ## default: undefined ## multivalued #allowaccess=biousers #allowaccess=atlasusers ## denyaccess = authgroup - Defines that the specified authgroup members ## are REJECTED, not authorized to access the ARC-CE via this interface. Note that a related config option the ## "allowaccess" (see above) can be used to grant access. ## Multiple "denyaccess" and "allowaccess" authorization statements are allowed within a configuration block. ## These statements are processed sequentially in the order they are specified in the ## config block. The processing stops on first "allowaccess" or "denyaccess" statement matching the authgroup membership. ## If there are no authorization statements specified, then no additional restrictions are applied ## for authorizing user access and the interface is open to everybody authenticated. ## default: undefined ## multivalued #denyaccess=blacklisted-users ## maxjobdesc = size - specifies maximal allowed size of job description ## in bytes. Default value is 5MB. Use 0 to set unlimited size. ## default: 5242880 #maxjobdesc=0 ## CHANGE: NEW in 6.0.0. ## ## ### end of the [arex/ws/jobs] block ############################## ### The [arex/ws/publicinfo] block ################################ ## AREX allows access to public informaton for non-authorized users. ## Presence of this block enables such feature. #[arex/ws/publicinfo] ## CHANGE: NEW block in 6.7.0. ## allowaccess = authgroup - Defines that the specified authgroup members are authorized to access ## public information. For more information see similar configuration option in [arex/ws/jobs] block. ## default: undefined ## multivalued #allowaccess=monitors ## CHANGE: NEW in 6.7.0. ## denyaccess = authgroup - Defines that the specified authgroup members are REJECTED, not authorized ## to access public information. For more information see similar configuration option in [arex/ws/jobs] block. ## default: undefined ## multivalued #denyaccess=badactors ## CHANGE: NEW in 6.7.0. ### end of the [arex/ws/publicinfo] block #################### ### The [arex/ws/cache] block ################################ ## The content of the A-REX cache can be accessed via a WS-interface. ## Configuring this block will allow reading cache files through a special URL. ## For example, if the remote file gsiftp://remotehost/file1 is stored in the cache ## and the WS interfaces (configured above) are available via wsurl of https://hostname:443/arex/, ## then the cached copy of the file can be access via the following special URL: ## https://hostname:443/arex/cache/gsiftp://remotehost/file1 ## Comment out this block if you don't want to expose the cache content via WS-interface. #[arex/ws/cache] ## CHANGE: NEW block in 6.0.0. ## cacheaccess = rule - This parameter defines the access control rules for the cache wsinterface, ## the rules for allowing access to files in the cache remotely through the A-REX web interface. ## If not set, then noone can access anything. The default is not set that means complete denial. ## A rule has three parts: ## 1. Regular expression defining a URL pattern ## 2. Credential attribute to match against a client's credential ## 3. Regular expression defining a credential value to match against a client's ## credential ## A client is allowed to access the cached file if a URL pattern matches the ## cached file URL and the client's credential has the attribute and matches the ## value required for that pattern. Possible values for credential attribute are ## dn, voms:vo, voms:role and voms:group. ## multivalued ## default: undefined #cacheaccess=gsiftp://host.org/private/data/.* voms:vo myvo:production #cacheaccess=gsiftp://host.org/private/data/bob/.* dn /O=Grid/O=NorduGrid/.* ## ## ### end of the [arex/ws/cache] block #################### ### The [arex/ws/candypond] block (previously cacheservice) ############# ## The CandyPond (Cache and deliver your pilot on-demand data) A-REX Web ## Service (previously called Cache Service) exposes various useful data-staging related operations ## for the pilot job model where input data for jobs is not known until the job ## is running on the worker node. This service is intended to be used by A-REX managed jobs. ## This service requires the [arex/data-staging] functionality. ## ## The CandyPond service is available via the wsurl/candypond URL ## (e.g. https://hostname:443/arex/candypond) ## #[arex/ws/candypond] ## CHANGE: NEW block in 6.0.0, and RENAMED service. ## ## ### end of the [arex/ws/candypond] block #################### ### The [arex/ws/argus] block ############################ ## The Web Service components of A-REX may directly use the Argus service ## (https://twiki.cern.ch/twiki/bin/view/EGEE/AuthorizationFramework) ## for requesting authorization decisions and performing client mapping ## to a local user account. ## This block turns on and configures the A-REX WS - Argus integration. ## When this block is enabled, A-REX will communicate to Argus PEP or PDP service ## for every WS interface operation! ## Comment out this block if you don't intend to use any external Argus service with A-REX WS interfaces. ## Using Argus with gridftp interface is possible only via LCMAPS callout. #[arex/ws/argus] ## CHANGE: NEW block in 6.0.0. ## *arguspep_endpoint = url - Specifies URL of Argus PEPD service to use for ## authorization and user mapping. ## It is worth to mention that "requireClientCertAuthentication" (default is false) ## item of pepd.ini (configuration of Argus PEPD service) is set to be 'true', then ## https should be used, otherwise http is proper. ## NOTE that Argus will be contacted for every WS interface operation requested! ## default: undefined #arguspep_endpoint=https://somehost.somedomain:8154/authz ## arguspep_profile = profile_name - Defines which communication profile to use while ## communicating with Argus PEPD service. Possible values for profile_name are: ## direct - pass all authorization attributes (only for debugging) ## subject - pass only subject name of client ## emi - ARC native profile developed in EMI project. This is default option. ## allowedvalues: direct subject emi ## default: emi #arguspep_profile=emi ## CHANGE: MODIFIED parameter values in 6.0.0: removed CREAM. ## arguspep_usermap = yes/no - Specifies whether response from Argus service may define mapping ## of client to local account. Default is 'no'. ## Note that Argus is contacted after all the other user mapping is performed. Hence it can ## overwrite all other decisions. ## allowedvalues: yes no ## default: no #arguspep_usermap=no ## arguspdp_endpoint = url - Specifies URL of Argus PDP service to use for ## authorization and user mapping. ## It is worth to mention that "requireClientCertAuthentication" (default is false) ## item of pdp.ini (configuration of Argus PDP service) is set to be 'true', then ## https should be used, otherwise http is proper. ## NOTE that Argus will be contacted for every WS interface operation requested! ## default: undefined #arguspdp_endpoint=https://somehost.somedomain:8152/authz ## arguspdp_profile = profile_name - Defines which communication profile to use while communicating ## with Argus PDP service. Possible values for profile_name are: ## subject - pass only subject name of client ## emi - ARC native profile developed in EMI project. This is default option. ## allowedvalues: subject emi ## default: emi #arguspdp_profile=emi ## CHANGE: MODIFIED parameter values in 6.0.0: remove CREAM. ## arguspdp_accpetnotapplicable = yes/no - Specify if the "NotApplicable" decision returned by Argus ## PDP service is treated as reason to deny request. Default is 'no', which treats ## "NotApplicable" as reason to deny request. ## allowedvalues: yes no ## default: no #arguspdp_acceptnotapplicable=no ## ## #### end of the [arex/ws/argus] block ##################### ### The [arex/jura] block ################################### ## A-REX is responsible for collecting accounting measurements from various ARC ## subsystems, including batch system backends and DTR data staging. ## ## Staring from 6.4 release A-REX writes all accounting data into the local accounting ## database that can be queries with "arcctl accounting". ## ## JURA is the accounting record generating and reporting ARC CE module. ## A-REX periodically executes JURA to create usage records based on the ## accounting target configuration and using accounting data. ## ## NOTE that in the ARC releases before 6.4 the accounting measurements were ## collected in the job log files that are only used for publishing records. ## This functionality was supported for backward compatibility in 6.4-6.7 releases ## and was removed in ARC 6.8.0 ## ## Enable and configure this block if you want to send accounting records to ## accounting services. ## ## Note that a dedicated "accounting target" subblock is needed for every accounting ## destination. The target subblocks are either of a type "apel" or "sgas": ## "[arex/jura/apel:targetname]" or "[arex/jura/sgas:targetname]". ## #[arex/jura] ## logfile = path - (previously jobreport_logfile) The name of the jura logfile. ## default: /var/log/arc/jura.log #logfile=/var/log/arc/jura.log ## loglevel = number - Log level for the JURA accounting module. ## allowedvalues: 0 1 2 3 4 5 FATAL ERROR WARNING INFO VERBOSE DEBUG ## default: 3 #loglevel=3 ## vomsless_vo = voname[#voissuer] - This parameter allows the sysadmin to manually assign ## VOs during pubishing to jobs that were submitted with "VOMS-less grid proxies". ## "voname" is the VO name to be used in the generated records (the same as expected in voms-proxy) ## optional "voissuer" (relevant to SGAS only) value is the VOMS server identity (certificate DN). ## default: undefined #vomsless_vo=atlas #vomsless_vo=atlas#/DC=ch/DC=cern/OU=computers/CN=lcg-voms.cern.ch ## vo_group = group - Adds an additional VO group attribute(s) to the usage records. ## multivalued ## default: undefined #vo_group=/atlas/production ## urdelivery_frequency = seconds - (previously jobreport_period) Specifies the frequency of ## JURA process regular execution by the A-REX. ## The actual treshold of records reporting frequency can be defined on per-target basis. ## default: 3600 #urdelivery_frequency=3600 ## CHANGE: RENAMED in 6.0.0. ## x509_host_key = path - Optional parameter to overwrite [common] block values. ## default: $VAR{[common]x509_host_key} #x509_host_key=/etc/grid-security/hostkey.pem ## CHANGE: NEW in 6.0.0. ## x509_host_cert = path - Optional parameter to overwrite [common] block values. ## default: $VAR{[common]x509_host_cert} #x509_host_cert=/etc/grid-security/hostcert.pem ## CHANGE: NEW in 6.0.0. ## x509_cert_dir = path - Optional parameter to overwrite [common] block values. ## default: $VAR{[common]x509_cert_dir} #x509_cert_dir=/etc/grid-security/certificates ## CHANGE: NEW in 6.0.0. ### end of the [arex/jura] block ######################################## ### The [arex/jura/sgas:targetname] blocks ############################## ## An SGAS sub-block of [arex/jura] enables and configures an SGAS accounting ## server as a target destination to which JURA will send properly formatted usage records. ## You need to define a separate block with a unique targetname for every SGAS target server. ## ## Note that the block name will be used by JURA to track that latest records sent to ## this targed. Be aware that ff you rename the block, target will be handled as a new one. ## However "targeturl" change will not trigger a new target handling. # #[arex/jura/sgas:neic_sgas] ## *targeturl = url - The service endpoint URL of SGAS server. ## default: undefined #targeturl=https://grid.uio.no:8001/logger ## CHANGE: NEW in 6.0.0. ## localid_prefix = prefix_string - Sets a prefix value for the LocalJobID ur parameter ## for the SGAS usage records. ## default: undefined #localid_prefix=some_text_for_SGAS ## CHANGE: NEW in 6.0.0. ## vofilter = vo - Configures a job record filtering mechanism based on the ## VO attribute of the jobs. Only the matching job records, which was one ## of VO that you set here, will be sent to the target accounting service. ## multivalued ## default: undefined #vofilter=atlas #vofilter=fgi.csc.fi ## CHANGE: NEW in 6.0.0. ## urbatchsize = number - JURA sends usage records not one-by-one, but in batches. ## This options sets the size of a batch. Zero value means unlimited batch size. ## default: 50 #urbatchsize=80 ## urdelivery_frequency = seconds - (introduced in 6.4.0) Add optional minimal treshold ## of the interval between subsequent records publishing to this target. ## NOTE that the actual delivery interval is the value divisible by "urdelivery_frequency" ## defined in [arex/jura] block that define the entire JURA process invocation frequency. ## default: undefined #urdelivery_frequency=3600 ## CHANGE: NEW in 6.4.0. ## ### end of the [arex/jura/sgas:targetname] blocks ######################## ### The [arex/jura/apel:targetname] blocks ############################### ## An APEL sub-block of [arex/jura] enables and configures an APEL accounting ## server as a target destination to which JURA will send properly formatted usage records. ## You need to define a separate block with a unique targetname for every APEL target server. ## ## Note that the block name will be used by JURA to track that latest records sent to ## this targed. Be aware that ff you rename the block, target will be handled as a new one. ## However "targeturl" change will not trigger a new target handling. #[arex/jura/apel:egi_prod_apel] ## *targeturl = url - The service endpoint URL of the APEL accounting server. ## default: undefined #targeturl=https://msg.argo.grnet.gr ## topic = topic_name - Sets the name of the APEL topic to which JURA will publish the ## accounting records. ## AMS destination topic for compute element is 'gLite-APEL' ## default: gLite-APEL #topic=/queue/global.accounting.test.cpu.central ## *gocdb_name = name - Can be used to specify the GOCDB name of the resource. ## This value would be seen as Site attribute in the generated APEL records. ## default: undefined #gocdb_name=GRID_UIO_NO ## apel_messages = type - (introduced in 6.4.0) Define what kind of records JURA ## will send to APEL services during regular publishing process. ## Possible cases are: per-job EMI CAR records ("urs"), APEL summary records ("summaries"). ## APEL Sync messages are always generated. ## NOTE that on the heavy loaded sites with 10k+ jobs weekly "summaries" generation has ## a performance penalty in ARC < 6.8. It is advised to use "urs" in this case. ## allowedvalues: urs summaries ## default: summaries #apel_messages=urs ## CHANGE: NEW in 6.4.0. ## vofilter = vo - Configures a job record filtering mechanism based on the ## VO attribute of the jobs. Only the matching job records, which was one ## of VO that you set here, will be sent to the target accounting service. ## multivalued ## default: undefined #vofilter=atlas #vofilter=fgi.csc.fi ## urbatchsize = number - JURA sends usage records not one-by-one, but in batches. ## This options sets the size of a batch. Zero value means unlimited batch size. ## 500 is recommended to avoid too large messages using AMS ## default: 500 #urbatchsize=500 ## urdelivery_frequency = seconds - (introduced in 6.4.0) Add optional minimal ## treshold of the interval between subsequent records publishing to this target. ## NOTE that the actual delivery interval is the value divisible by "urdelivery_frequency" ## defined in [arex/jura] block that define the entire JURA process invocation frequency. ## APEL recommended value is once per day for "summaries". Use smaller values for "urs". ## default: 86000 #urdelivery_frequency=14000 ## CHANGE: NEW in 6.4.0. ## ### end of the [arex/jura/apel:targetname] blocks ############################ ### The [arex/ganglia] block - previously [gangliarc] ############################### ## ## This block enables the monitoring of ARC-specific metrics. ## Earlier versions (ARC < 6.0) relied only on the standalone tool gangliarc, ## ganglia is now instead integrated into ARC, and gangliarc is obsolete. ## Note that AREX ganglia (as gangliarc did) depends on an existing ganglia installation, ## as it sends its metrics to a running gmond process. #[arex/ganglia] ## CHANGE: RENAMED block in 6.0.0. ## gmetric_bin_path = path - (previously gmetric_exec) The path to gmetric executable. ## default: /usr/bin/gmetric #gmetric_bin_path=/usr/local/bin/gmetric ## CHANGE: MOVED and RENAMED in 6.0.0 from deleted [gangliarc] block. ## metrics = name_of_the_metrics - the metrics to be monitored. ## metrics takes a comma-separated list of one or more of the following metrics: ## - staging -- number of tasks in different data staging states - not yet implemented ## - cache -- free cache space ## - session -- free session directory space ## - heartbeat -- last modification time of A-REX heartbeat ## - processingjobs -- the number of jobs currently being processed by ARC (jobs ## between PREPARING and FINISHING states) - not yet implemented ## - failedjobs -- the number of failed jobs per last 100 finished ## - jobstates -- number of jobs in different A-REX stages ## - all -- all of the above metrics ## default: all ## allowedvalues: staging cache session heartbeat processingjobs failedjobs jobstates all #metrics=all ## frequency = seconds - The period between each information gathering cycle, in seconds. ## default: 60 #frequency=300 ## CHANGE: MODIFIED in 6.0.0. Default increased from 20s to one minute. ## ## ### end of the [arex/ganglia] block ############## ### The [gridftpd] block ####################################################### ## This block enables and configures the gridftp server. The usage of the gridftp is ## twofold in connection with ARC: 1) The server together with its custom jobplugin ## can be used as a job submission and management interface for an ARC CE. ## 2) The server with the filedirplugin can be used as a very simplistic storage element. ## This block configures the common server capabilities. To make the gridftp service functional, ## you need to enable at least one of the plugin subblocks as well. #[gridftpd] ## user = user[:group] - Switch to a non root user/group after startup ## WARNING: Make sure that the certificate files are owned by the user/group ## specified by this option. ## default: root:root #user=grid ## loglevel = level - (previously debug) Set log level of the gridftpd daemon, between ## 0 (FATAL) and 5 (DEBUG). Default is 3 (INFO). ## allowedvalues: 0 1 2 3 4 5 FATAL ERROR WARNING INFO VERBOSE DEBUG ## default: 3 #loglevel=2 ## CHANGE: RENAMED in 6.0.0. ## logfile = path - Set logfile location of the gridftp server. ## default: /var/log/arc/gridftpd.log #logfile=/var/log/arc/gridftpd.log ## pidfile = path - Specify location of file containing PID of daemon process. ## default: /run/gridftpd.pid #pidfile=/run/gridftpd.pid ## port = bindport - Port to listen on. For gridftp-based job submission strongly adviced to use ## the default 2811 port because 3rd party clients assume ARC CE using that port. ## default: 2811 #port=2811 ## allowencryption = yes/no - (previously encryption) should data encryption be allowed on client ## request. Encryption is very heavy, therefore the default is no. ## allowedvalues: yes no ## default: no #allowencryption=no ## CHANGE: RENAMED in 6.0.0. ## allowactivedata = yes/no - if no, only passive data transfer is allowed. ## By default both passive and active data transfers are allowed. ## default: yes #allowactivedata=yes ## maxconnections = number - The maximum number of connections accepted by a gridftpd server. ## default: 100 #maxconnections=200 ## defaultbuffer = size - Defines size of every buffer for data ## reading/writing. The actual value may decrease if the ## cumulative size of all buffers exceeds value specified by maxbuffer. ## default: 65536 #defaultbuffer=65536 ## maxbuffer = size - Defines maximal amount of memory in bytes to be ## allocated for all data reading/writing buffers. Default is 640kB. ## The number of buffers is (max {3, min {41, 2P + 1}}), where P is the ## parallelism level requested by the client. Hence, even without parallel ## streams enabled number of buffers will be 3. ## default: 655360 #maxbuffer=655360 ### Firewall specifics ## globus_tcp_port_range = port_range - In a firewalled environment ## the software which uses GSI needs to know what ports are available. ## If not set a random port is selected. ## These variable are similar to the Globus enviroment variables: ## GLOBUS_TCP_PORT_RANGE and GLOBUS_UDP_PORT_RANGE. ## default: 9000,9300 #globus_tcp_port_range=9000,12000 ## globus_udp_port_range = port_range - In a firewalled environment ## the software which uses GSI needs to know what ports are available. ## If not set a random port is selected. ## These variable are similar to the Globus enviroment variables: ## GLOBUS_TCP_PORT_RANGE and GLOBUS_UDP_PORT_RANGE. ## default: 9000,9300 #globus_udp_port_range=9000,12000 ## firewall = hostname - The hostname or IP address to use in response to PASV command ## instead of the IP address of a network interface of computer. ## default: undefined #firewall=my.host.org ### X509 related parameters ## x509_host_key = path - Optional parameter to overwrite [common] block values. ## default: $VAR{[common]x509_host_key} #x509_host_key=/etc/grid-security/hostkey.pem ## CHANGE: RENAMED in 6.0.0. ## x509_host_cert = path - Optional parameter to overwrite [common] block values. ## default: $VAR{[common]x509_host_cert} #x509_host_cert=/etc/grid-security/hostcert.pem ## CHANGE: RENAMED in 6.0.0. ## x509_cert_dir = path - Optional parameter to overwrite [common] block values. ## default: $VAR{[common]x509_cert_dir} #x509_cert_dir=/etc/grid-security/certificates ## ## ### end of the [gridftpd] block ############################### ### The [gridftpd/jobs] block ############################## ## The jobplugin of the gridftp server implements a custom ## job management and submission interface of ARC CE. ## This subblock enables and configures that interface. ## Consult the Technical Reference within the ARC sysadmin guide ## for the interface specification. ## Comment out this subblock if you don't want a gridftp-based jobinterface. #[gridftpd/jobs] ## allownew = yes/no - This parameter sets if the ARC CE accepts submission of new jobs ## via the gridftp interface. This parameter can be used to close down the ARC CE. ## allowedvalues: yes no ## default: yes #allownew=yes ## allownew_override = [authgroup ...] - (previously allowsubmit) Defines which authorization ## groups are allowed to submit new jobs via the gridftp interface ## when the CE is closed with allownew=no. Note that it requires the allownew=no to be set. ## multivalued ## default: undefined #allownew_override=biousers atlasusers #allownew_override=yourauthgroup ## CHANGE: RENAMED and MODIFIED in 6.0.0. Possible to specify several groups on a single line. ## allowaccess = authgroup - (previously groupcfg) Defines that the specified authgroup members ## are authorized to access the ARC-CE via this interface. A related config option the ## "denyaccess" (see below) can be used to reject access. ## Multiple "allowaccess" and "denyaccess" authorization statements are allowed within a configuration block. ## These statements are processed sequentially in the order they are specified in the ## config block. The processing stops on first "allowaccess" or "denyaccess" statement matching the authgroup membership. ## If there are no authorization statements specified, then no additional restrictions are applied ## for authorizing user access and the interface is open to everybody authenticated. ## default: undefined ## multivalued #allowaccess=biousers #allowaccess=atlasusers ## denyaccess = authgroup - Defines that the specified authgroup members ## are REJECTED, not authorized to access the ARC-CE via this interface. Note that a related config option the ## "allowaccess" (see above) can be used to grant access. ## Multiple "denyaccess" and "allowaccess" authorization statements are allowed within a configuration block. ## These statements are processed sequentially in the order they are specified in the ## config block. The processing stops on first "allowaccess" or "denyaccess" statement matching the authgroup membership. ## If there are no authorization statements specified, then no additional restrictions are applied ## for authorizing user access and the interface is open to everybody authenticated. ## default: undefined ## multivalued #denyaccess=blacklisted-users ## maxjobdesc = size - specifies maximal allowed size of job description ## in bytes. Default value is 5MB. Use 0 to set unlimited size. ## default: 5242880 #maxjobdesc=0 ## ## ### end of [gridftpd/jobs] block ##################################### ### The [gridftpd/filedir] block ######################################### ## The filedirplugin module of the gridftp server can be used to set up a ## simplistic grid storage element (SE). This subblock enables and configures ## such an SE by "exporting" a directory using the gridftpd's filedirplugin. ## Comment out this block if you don't need a SE. #[gridftpd/filedir] ## *path = virtdir - The name of the virtual directory served by the gridftp server. ## The exported storage area is accessible as "gsiftp://my_server/virtdir". ## Even "/" is a valid choice. ## default: undefined #path=/topdir ## *mount = path - The physical directory corresponding to the virtual one: ## gsiftp://my_server/virtdir will give access to this location. ## default: undefined #mount=/scratch/grid ## allowaccess = authgroup - (previously groupcfg) Defines that the specified authgroup members ## are authorized to access the gridftp file service. A related config option the ## "denyaccess" (see below) can be used to reject access. ## Multiple "allowaccess" and "denyaccess" authorization statements are allowed within a configuration block. ## These statements are processed sequentially in the order they are specified in the ## config block. The processing stops on first "allowaccess" or "denyaccess" statement matching the authgroup membership. ## If there are no authorization statements specified, then no additional restrictions are applied ## for authorizing user access and the interface is open to everybody authenticated. ## default: undefined ## multivalued #allowaccess=biousers #allowaccess=atlasusers ## denyaccess = authgroup - Defines that the specified authgroup members ## are REJECTED, not authorized to access the gridftp file service. Note that a related config option the ## "allowaccess" (see above) can be used to grant access. ## Multiple "denyaccess" and "allowaccess" authorization statements are allowed within a configuration block. ## These statements are processed sequentially in the order they are specified in the ## config block. The processing stops on first "allowaccess" or "denyaccess" statement matching the authgroup membership. ## If there are no authorization statements specified, then no additional restrictions are applied ## for authorizing user access and the interface is open to everybody authenticated. ## default: undefined ## multivalued #denyaccess=blacklisted-users ## dir = path options - Specifies access rules for accessing files in "path" ## (relative to virtual and real path) and all the files and directories below. ## Available permisisons check "options" are: ## nouser ## do not use local file system rights, only use those specifies in this line ## owner ## check only file owner access rights ## group ## check only group access rights ## other ## check only "others" access rights ## If none of the above specified usual unix access rights are applied. ## Avaiable permissions enforecement "options" are: ## read ## allow reading files ## delete ## allow deleting files ## append ## allow appending files (does not allow creation) ## overwrite ## allow overwriting already existing files (does not ## allow creation, file attributes are not changed) ## dirlist ## allow obtaining list of the files ## cd ## allow to make this directory current ## create owner:group permissions_or:permissions_and ## allow creating new files. File will be owned by "owner" and owning group ## will be "group". If "*" is used, the user/group to which connected user ## is mapped will be used. The permissions will be set to ## "permissions_or & permissions_and". (second number is reserved for ## the future usage). ## mkdir owner:group permissions_or:permissions_and ## allow creating new directories. ## Example shows setting permissions on mounted "/" directory and adjusting permissions ## on "/section1" and "/section2" subdirectories. ## multivalued ## default: undefined #dir=/ nouser read cd dirlist delete create *:* 664:664 mkdir *:* 775:775 #dir=/section1 nouser read mkdir *:* 700:700 cd dirlist #dir=/section2 nouser read mkdir *:* 700:700 cd dirlist ## ## ### end of [gridftpd/filedir] block ##################################### ### The [infosys] block ################################################ ## This block enables and configures the core part of the information system. ## Enables the information collection to be used by other ARC components, including interfaces. ## Parameters in this block applies to all the infosys subsystems. #[infosys] ## logfile = path - (previously providerlog) Specifies log file location for the information ## provider scripts. ## default: /var/log/arc/infoprovider.log #logfile=/var/log/arc/infoprovider.log ## CHANGE: RENAMED in 6.0.0. ## loglevel = number - (previously provider_loglevel) The loglevel for the infoprovider scripts (0-5). ## Each value corresponds to the following verbosity levels: ## FATAL => 0, ERROR => 1 , WARNING => 2, INFO => 3, VERBOSE => 4, DEBUG => 5 ## allowedvalues: 0 1 2 3 4 5 FATAL ERROR WARNING INFO VERBOSE DEBUG ## default: 3 #loglevel=3 ## CHANGE: RENAMED in 6.0.0. ## validity_ttl = seconds - The published infosys records advertise their ## validity e.g. how long the info should be considered up-to-date by the clients. ## Use this parameter to set the published validity value. ## NOTE that different schemas may render this information differently. ## default: 10800 #validity_ttl=10800 ## ## ### end of [infosys] block ################################################# ### The [infosys/ldap] block ################################################ ## This infosys subblock enables and configures the ldap hosting service ## for the infosys functionality. Using an LDAP server with some schema is one way to ## publish information about your Computing Element. ## Comment out this block if you don't want to run an LDAP-based information system. #[infosys/ldap] ## CHANGE: NEW block in 6.0.0. ## hostname = FQDN - the hostname of the machine running the slapd service ## will be the bind for slapd. If not present, will be taken from the [common] ## default: $VAR{[common]hostname} #hostname=my.testbox ## slapd_hostnamebind = string - May be used to set the hostname part of the ## network interface to which the slapd process will bind. Most of ## the cases no need to set since the hostname parameter is already ## sufficient. The example below will bind the slapd ## process to all the network interfaces available on the server. ## default: undefined #slapd_hostnamebind=* ## port = port_number - The port on which the slapd service runs. The default ## infosys port is assumed to be 2135 by many clients, therefore think twice before you change it ## because 3rd party clients assume 2135 to be the ldap infosys port. ## default: 2135 #port=2135 ## user = unix_user - overwrites the unix user running the slapd. ## By default the startup scripts search for well-known ldap-users like "ldap" or "openldap" ## than fall-back to "root" if not found. ## default: undefined #user=slapd ## CHANGE: MOVED in 6.0.0 from [infosys]. ## slapd = path - explicitly define the path to slapd command. ## By default the startup scripts search for "slapd" binary in the system PATH. ## default: undefined #slapd=/usr/sbin/slapd ## slapd_loglevel = number - Sets the native slapd loglevel (see man slapd). ## Slapd logs via syslog. The default is set to no-logging (0) and it is ## RECOMMENDED not to be changed in a production environment. ## Non-zero slap_loglevel value causes serious performance decrease. ## default: 0 #slapd_loglevel=0 ## threads = number - The native slapd threads parameter, default is 32. ## default: 32 #threads=128 ## timelimit = seconds - The native slapd timelimit parameter. Maximum number of seconds ## the slapd server will spend answering a search request. ## Default is 3600. You probably want a much lower value. ## default: 3600 #timelimit=1800 ## idletimeout = seconds - The native slapd idletimeout parameter. Maximum number of ## seconds the slapd server will wait before forcibly closing idle client ## connections. It's value must be larger than the value of "timelimit" option. ## If not set, it defaults to timelimit + 1. ## default: $EVAL{$VAR{timelimit} + 1} #idletimeout=1801 ## infosys_ldap_run_dir = path - The location where NorduGrid/GLUE2 LDAP ldif file ## will be generated, and where the fifo to sync between infoproviders ## and BDII will be generated. ## default: /run/arc/infosys #infosys_ldap_run_dir=/run/arc/infosys ## ldap_schema_dir = path - Allows to explicitly specify an additional path to the schema ## files. Note that this doesn't override standard location, but adds ## the specified path to the standard locations /etc/ldap and /etc/openldap. ## Normally it is sufficient to use only standard schema file locations, ## therefore not to set this parameter. ## default: undefined #ldap_schema_dir=/nfs/ldap/schema/ ## The following options configure the third-party bdii ldap parameters. ## In 99% of cases no need to change anything and use the defaults. ## These variables are usually automatically set by ARC, and are here mostly for debug purposes ## and to tweak exotic BDII installations. ## bdii_debug_level = level - Set this parameter to DEBUG to check bdii errors in bdii-update.log ## At the same time don't enable slapd logs this way reducing performance issues. ## default: WARNING #bdii_debug_level=ERROR ## bdii_provider_timeout = seconds - (previously provider_timeout in bdii block) ## This variable allows a system administrator to modify the behaviour of bdii-update. ## This is the time BDII waits for the bdii provider scripts generated by ## A-REX infosys to produce their output. ## default: 10800 #bdii_provider_timeout=10800 ## CHANGE: RENAMED parameter in 6.0.0. ## BDII5 uses these variables. These might change depending on BDII version. ## ARC sets them by inspecting distributed bdii configuration files. ## DO NOT CHANGE UNLESS YOU KNOW WHAT YOU'RE DOING ## bdii_location = path - The installation directory for the BDII. ## default: /usr #bdii_location=/usr ## bdii_run_dir = path - Contains BDII pid files and slapd pid files ## default: /run/arc/bdii #bdii_run_dir=/run/arc/bdii ## bdii_log_dir = path - Contains infosys logs ## default: /var/log/arc/bdii #bdii_log_dir=/var/log/arc/bdii ## bdii_tmp_dir = path - Contains provider scripts ## default: /var/tmp/arc/bdii #bdii_tmp_dir=/var/tmp/arc/bdii ## bdii_var_dir = path - Contains slapd databases ## default: /var/lib/arc/bdii #bdii_var_dir=/var/lib/arc/bdii ## bdii_update_pid_file = path - Allows to change bdii-update ## pidfiles filename and location ## default: $VAR{bdii_run_dir}/bdii-update.pid #bdii_update_pid_file=/run/arc/bdii/bdii-update.pid ## bdii_database = backend_type - Configure what ldap database backend should be used. ## default: hdb #bdii_database=hdb ## bdii_conf = path - Location of the bdii config file generated by ARC. ## default: $VAR{[infosys/ldap]infosys_ldap_run_dir}/bdii.conf #bdii_conf=/run/arc/infosys/bdii.conf ## bdii_update_cmd = path - path to bdii-update script ## default: $VAR{bdii_location}/sbin/bdii-update #bdii_update_cmd=/usr/sbin/bdii-update ## bdii_db_config = path - path to slapd database configuration file ## default: /etc/bdii/DB_CONFIG #bdii_db_config=/etc/bdii/DB_CONFIG ## bdii_archive_size = number - Sets BDII_ARCHIVE_SIZE in bdii configuration file ## default: 0 #bdii_archive_size=0 ## bdii_breathe_time = number - Sets BDII_BREATHE_TIME in bdii configuration file ## default: 10 #bdii_breathe_time=10 ## bdii_delete_delay = number - Sets BDII_DELETE_DELAY in bdii configuration file ## default: 0 #bdii_delete_delay=0 ## bdii_read_timeout = number - Sets BDII_READ_TIMEOUT in bdii configuration file ## default: $EVAL{$VAR{bdii_provider_timeout} + $VAR{[arex]infoproviders_timelimit} + $VAR{[arex]wakeupperiod}} #bdii_read_timeout=300 ## ## ### end of the [infosys/ldap] sub-block ######################################## ## Infosys Schema sub-blocks: The following infosys sub-blocks enable ## information publishing according to various information schema. ## In order to publish information in a certain schema, the corresponding ## sub-block must be defined in addition to the schema-neutral [infosys/cluster] ## and [queue:name] blocks! ## Comment out a specific schema block if you don't want to publish a specific ## information schema representation. ## Currently available information model (schema) sub-blocks: ## [infosys/nordugrid] - The native ARC info representation of a cluster and its queues ## [infosys/glue2] - The GLUE2 information model, both LDAP and XML (the latter is for WS-interface) ## [infosys/glue2/ldap] - The LDAP rendering of the GLUE2 model ## [infosys/glue1] - The legacy GLUE1 model (only LDAP) ## [infosys/glue1/site-bdii] - The site BDII element of the GLUE1 legacy model ### The [infosys/nordugrid] schema sub-block ########################### ## Enables the publication of the NorduGrid information model in ## the LDAP-based infosys. See the NORDUGRID-TECH-4 for schema definition. ## The configuration block does not contain any parameter. The information tree ## is populated based on the contents of the schema-neutral [infosys/cluster] ## and [queue:name] blocks. #[infosys/nordugrid] ## CHANGE: NEW block in 6.0.0. ## ## ### end of the [infosys/nordugrid] schema block ######################## ### The [infosys/glue2] schema sub-block ########################### ## Enables the publication of the GLUE2 information model both in the LDAP and ## XML rendering. ## The information tree is populated based on the contents of the schema-neutral ## [infosys/cluster] and [queue:name] blocks and the GLUE2 specific schema sub-blocks. #[infosys/glue2] ## CHANGE: NEW block in 6.0.0. ## AdminDomain entity parameters: ## admindomain_name = string - The Name attribute for the admindomain. This will show ## in top-BDII to group the resources belonging to this cluster. ## To group a bunch of clusters under the same AdminDomain, just use the same name. ## If not specified, will default to UNDEFINEDVALUE. ## default: UNDEFINEDVALUE #admindomain_name=ARC-TESTDOMAIN ## admindomain_description = text - The free-form description of this domain. ## default: undefined #admindomain_description=ARC test Domain ## admindomain_www = url - The URL pointing at a site holding information about the AdminDomain. ## default: undefined #admindomain_www=http://www.nordugrid.org/ ## admindomain_distributed = yes/no - Set this to yes if the domain is distributed ## that means, if the resources belonging to the domain ## are considered geographically distributed. ## allowedvalues: yes no ## default: no #admindomain_distributed=yes ## admindomain_owner = email - The contact email of a responsible person for the domain ## default: undefined #admindomain_owner=admin@nordugrid.org ## admindomain_otherinfo = text - Free-form text that fills the OtherInfo GLUE2 field. ## no need to set, used only for future development. ## default: undefined #admindomain_otherinfo=Test Other info ## ComputingService entity parameters: ## computingservice_qualitylevel = qlevel - (previously infosys_glue2_service_qualitylevel) ## Allows a sysadmin to define different GLUE2 QualityLevel values for A-REX. ## Refer to GLUE2 documentation for the qualitylevel definitions. ## allowedvalues: production pre-production testing development ## default: production #computingservice_qualitylevel=production ## CHANGE: RENAMED in 6.0.0.s ## ## ### end of the [infosys/glue2] schema block ######################## ### The [infosys/glue2/ldap] schema sub-block ########################### ## Enables the publication of the LDAP-rendering of the GLUE2 infomodel. ## The information tree is populated based on the contents of the schema-neutral ## [infosys/cluster] and [queue:name] blocks and the GLUE2 specific schema sub-blocks. #[infosys/glue2/ldap] ## CHANGE: NEW block in 6.0.0. ## showactivities = yes/no - (previously infosys_glue2_ldap_showactivities) Enables GLUE2 ## ComputingActivities to appear in the LDAP rendering ## allowedvalues: yes no ## default: no #showactivities=no ## CHANGE: RENAMED in 6.0.0. ## ## ### end of the [infosys/glue2/ldap] schema sub-block ########################### ### The [infosys/glue1] (previously [infosys/glue12]) schema block ########## ## This block enables the publication of GLUE1 LDAP representation of a CE. ## The information tree is populated based on the contents of the schema-neutral ## [infosys/cluster] and [queue:name] blocks and the GLUE1 specific schema sub-blocks. ## This block holds information that is needed by the glue1, in addition to the ## schema-neutral blocks. #[infosys/glue1] ## CHANGE: RENAMED block in 6.0.0. ## resource_location = string - GLUE1 location attribute. ## IMPORTANT: no slashes or backslashes here! ## default: undefined #resource_location=Kastrup, Denmark ## resource_latitude = latitude - GLUE1 latitude. ## default: undefined #resource_latitude=55.75000 ## resource_longitude = longitude - GLUE1 longitude. ## default: undefined #resource_longitude=12.41670 ## cpu_scaling_reference_si00 = number - GLUE1 CPU_scaling ## default: undefined #cpu_scaling_reference_si00=2400 ## processor_other_description = string - GLUE1 proc description ## The text entered under this option is copied "as in" in the glue1 ## rendering of GlueHostProcessorOtherDescription. ## Cores is used to calculate the number of physical processors for ## GlueSubClusterPhysicalCPUs ## The Benchmark value entered here is ignored by the rest ## of ARC configuration. It will NOT be used for accounting. ## Note that in ARC > 6.3 the proper way of configuring benchmark for both ## infosys and accounting is by adding it to the [queue:name] blocks. ## This generic string is kept here for backward compatibility, ## and most likely will not work as expected (or as used by CREAM). ## default: undefined #processor_other_description=Cores=3,Benchmark=9.8-HEP-SPEC06 ## glue_site_web = url - GLUE1 site web url ## default: undefined #glue_site_web=http://www.ndgf.org ## glue_site_unique_id = siteid - GLUE1 site id ## default: undefined #glue_site_unique_id=NDGF-T1 ## ## ### end of the [infosys/glue1] schema block ########################### ### The [infosys/glue1/site-bdii] (previously [infosys/site/sitename]) block ################ ## Enable this block ONLY if you want to publish a semi-fake GLUE1 site-bdii as part of ## the LDAP server. This block is used to configure ARC to generate a ## semi-fake site-bdii that can be registered in GOCDB. ## The default will generate an object that you can reference in GOCDB GIIS field as such: ## ldap://archost.domain:2135/mds-vo-name=,o=grid ## Example: ldap://archost.domain:2135/mds-vo-name=NDGF-T1,o=grid ## Since the original site-bdii listens on port 2170, you may want to port forward ## 2170 to 2135 in your firewall, but it is not a requirement. ## This fake site bdii is only good if you have a single ARC as a service in your organization. ## It cannot aggregate any information like the real site-bdii does. ## It just presents a partial rendering of a site-bdii that is good enough for top-bdii ## to pick up the information. ## GOCDB checks against this fake-site-bdii should be disabled as it is not fully compliant with ## the site-bdii specification. They will fail if enabled. #[infosys/glue1/site-bdii] ## CHANGE: RENAMED block in 6.0.0. ## *unique_id = id - The unique id used to identify this site, eg "NDGF-T1" ## default: undefined #unique_id=NDGF-T1 ## *url = url - This url is the insertion point in the arc Glue1 ldif tree ## for the semi-fake site-bdii ldap object. ## The content of the ldap object is mainly generated from the data in the ## [infosys/glue1] block. ## The url format is ldap://host.domain:ldapport/basedn ## Example: ldap://host.domain:2135/o=grid ## The generated site-bdii information will be attached under the basedn, ## in the example for o=grid: mds-vo-name=unique-id,o=grid ## In most cases you want to leave the default untouched, unless you know what you're doing. ## default: ldap://localhost:2135/o=grid #url=ldap://host.domain:2135/o=grid ## ## ### end of the [infosys/glue1/site-bdii] block ################################### ### The [infosys/cluster] block ################################################### ## Information schema-neutral blocks [infosys/cluster] and [queue:NAME] contain attributes ## that describe the computing cluster together with its queues. The parameters are ## available for every information model/schema representation. ## ## This block describes the cluster characteristics of a Computing Element. ## The information specified here is mostly used by the Infosys ARC component. #[infosys/cluster] ## alias = text - An arbitrary alias name of the cluster, optional. ## default: undefined #alias=Big Blue Cluster in Nowhere ## hostname = fqdn - Set the FQDN of the frontend. ## default: $VAR{[common]hostname} #hostname=myhost.org ## interactive_contactstring = url - the contact URL for interactive logins, set this ## if the cluster supports some sort of grid-enabled interactive login (gsi-ssh), ## multivalued ## default: undefined #interactive_contactstring=gsissh://frontend.cluster:2200 ## comment = text - Free text field for additional comments on the cluster in a single ## line, no newline character is allowed! ## default: undefined #comment=This cluster is specially designed for XYZ applications: www.xyz.org ## cluster_location = formatted_string - The geographical location of the cluster, preferably ## specified as a postal code with a two letter country prefix ## default: undefined #cluster_location=DK-2100 ## cluster_owner = text - It can be used to indicate the owner of a resource, multiple ## entries can be used ## multivalued ## default: undefined #cluster_owner=World Grid Project #cluster_owner=University of NeverLand ## advertisedvo = vo_name - (previously authorizedvo) This attribute is used to advertise ## which VOs are authorized on the cluster. ## Add only one VO for each advertisedvo entry. Multiple VOs in the same line ## will cause errors. ## These entries will be shown in all GLUE2 AccessPolicy and MappingPolicy ## objects, that is, they will apply for all Endpoints(Interfaces) and all ## Shares(currently queues). You can override the advertisedvos per queue. ## The information is also published in the NorduGrid schema. ## NOTE that it is IMPORTANT to understand that this parameter is NOT enforcing any ## access control, it is just for information publishing! ## multivalued ## default: undefined #advertisedvo=atlas #advertisedvo=community.nordugrid.org ## CHANGE: RENAMED in 6.0.0. ## clustersupport = email - This is the support email address of the resource. ## multivalued ## default: undefined #clustersupport=arc.support@mysite.org #clustersupport=arc.support@myproject.org ## homogeneity = True/False - Determines whether the cluster consists of identical NODES with ## respect to cputype, memory, installed software (opsys). The frontend is NOT ## needed to be homogeneous with the nodes. In case of inhomogeneous nodes, try ## to arrange the nodes into homogeneous groups assigned to a queue and use ## queue-level attributes. ## False may trigger multiple GLUE2 ExecutionEnvironments to be published ## if applicable. ## allowedvalues: True False ## default: True #homogeneity=True ## architecture = string - Sets the hardware architecture of the NODES. The "architecture" ## is defined as the output of the "uname -m" (e.g. i686). Use this cluster ## attribute if only the NODES are homogeneous with respect to the architecture. ## Otherwise the queue-level attribute may be used for inhomogeneous nodes. If ## the frontend's architecture agrees to the nodes, the "adotf" (Automatically ## Determine On The Frontend) can be used to request automatic determination. ## default: adotf #architecture=adotf ## opsys = formatted_string - This multivalued attribute is meant to describe the operating system ## of the computing NODES. Set it to the opsys distribution of the NODES and not ## the frontend! opsys can also be used to describe the kernel or libc version ## in case those differ from the originally shipped ones. The distribution name ## should be given as distroname-version.number, where spaces are not allowed. ## Kernel version should come in the form kernelname-version.number. ## If the NODES are inhomogeneous with respect to this attribute do NOT set it on ## cluster level, arrange your nodes into homogeneous groups assigned to a queue ## and use queue-level attributes. ## If opsys=adotf, will result in Automatic Determination of the Operating System ## On The Frontend, which should only be used if the frontend has the same ## OS as the nodes. ## The adotf discovered values will be used to fill GLUE2 OSName, OSVersion ## and OSFamily unless these values are explicitly defined for each queue. ## See the [queue:queuename] block for their usage. ## Note: Any custom value other than 'adotf' does NOT affect values in the GLUE2 schema. ## multivalued ## default: adotf #opsys=Linux-2.6.18 #opsys=glibc-2.5.58 #opsys=CentOS-5.6 ## nodecpu = formatted_string - This is the cputype of the homogeneous nodes. The string is ## constructed from the /proc/cpuinfo as the value of "model name" and "@" and ## value of "cpu MHz". Do NOT set this attribute on cluster level if the NODES ## are inhomogeneous with respect to cputype, instead arrange the nodes into ## homogeneous groups assigned to a queue and use queue-level attributes. Setting ## the nodecpu=adotf will result in Automatic Determination On The Frontend, ## which should only be used if the frontend has the same cputype as the ## homogeneous nodes. ## default: adotf #nodecpu=AMD Duron(tm) Processor @ 700 MHz ## nodememory = number - This is the amount of memory (specified in MB) on the node ## which can be guaranteed to be available for the application. Please note ## in most cases it is less than the physical memory installed in the nodes. ## Do NOT set this attribute on cluster level if the NODES are inhomogeneous ## with respect to their memories, instead arrange the nodes into homogeneous ## groups assigned to a queue and use queue-level attributes. ## default: undefined #nodememory=64000 ## middleware = string - The multivalued attribute shows the installed grid software on ## the cluster. Nordugrid-ARC is automatically set, no need to specify ## multivalued ## default: undefined #middleware=my software ## nodeaccess = inbound/outbound - Determines how the nodes can connect to the internet. ## Not setting anything means the nodes are sitting on a private isolated network. ## "outbound" access means the nodes can connect to the outside world while ## "inbound" access means the nodes can be connected from outside. ## inbound & outbound access together means the nodes are sitting on a fully open network. ## multivalued ## default: undefined ## allowedvalues: inbound outbound #nodeaccess=inbound #nodeaccess=outbound ## localse = url - This multivalued parameter tells the BROKER that certain URLs (and ## locations below that) should be considered "locally" available to the cluster. ## multivalued ## default: undefined #localse=gsiftp://my.storage/data1/ #localse=gsiftp://my.storage/data2/ ## cpudistribution = formatted_string - This is the CPU distribution over nodes ## given in the form "ncpu:m" where: ## "n" is the number of CPUs per machine ## "m" is the number of such machines ## Example: "1cpu:3,2cpu:4,4cpu:1" represents a cluster with ## 3 single CPU machines, 4 dual CPU machines and one machine with 4 CPUs. ## default: undefined #cpudistribution=1cpu:3,2cpu:4,4cpu:1 ## ## ## maxcputime = number - This is the maximum CPU time specified in seconds ## that the LRMS can allocate for the job. The default if not defined ## is that infoproviders get this value automatically from the LRMS. ## The purpose of this option is to tweak and override discovered value, ## or publish this value in case the LRMS module do not support automatic ## detection. ## default: undefined #maxcputime=300000 ## mincputime = number - This is the minimum CPU time specified in seconds ## that the LRMS can allocate for the job. The default if not defined ## is that infoproviders get this value automatically from the LRMS. ## The purpose of this option is to tweak and override discovered value, ## or publish this value in case the LRMS module do not support automatic ## detection. ## default: undefined #mincputime=1200 ## maxwalltime = number - This is the maximum Wall time specified in ## seconds that the LRMS can allocate for the job. The default ## if not defined is that infoproviders get this value automatically ## from the LRMS. ## The purpose of this option is to tweak and override discovered value, ## or publish this value in case the LRMS module do not support automatic ## detection. ## default: undefined #maxwalltime=600000 ## minwalltime = number - This is the minimum Wall time specified in ## seconds that the LRMS can allocate for the job. The default ## if not defined is that infoproviders get this value automatically ## from the LRMS. ## The purpose of this option is to tweak and override discovered value, ## or publish this value in case the LRMS module do not support automatic ## detection. ## default: undefined #maxwalltime=1800 ### end of the [infosys/cluster] block ##################### ### The [queue:name] blocks ####################################### ## Each grid-enabled queue on the cluster should be represented and described ## by a separate queue block. The queue_name should be used as a label in the block name. ## In case of fork, or other LRMSes with no queue names, just use any unique string. ## A queue can represent a PBS/LSF/SGE/SLURM/LL queue, a SGE pool, a Condor ## pool or a single machine in case 'fork' type of LRMS. ## This block describes the queue characteristics. #[queue:gridlong] ## homogeneity = True/False - determines whether the queue consists of identical NODES with ## respect to cputype, memory, installed software (opsys). ## In case of inhomogeneous nodes, try to arrange the nodes into homogeneous ## groups and assigned them to a queue. ## Possible values: True,False, the default is True. ## allowedvalues: True False ## default: $VAR{[infosys/cluster]homogeneity} #homogeneity=True ## comment = text - A free-form text field for additional comments on the queue in a single ## line, no newline character is allowed! ## default: undefined #comment=This queue is nothing more than a condor pool ## pbs_queue_node = string - (previously queue_node_string) In PBS you can assign nodes ## to a queue (or a queue to nodes) by using the "node property" mark in PBS config. ## ## Essentially, "pbs_queue_node" value is used to construct "nodes=" string in ## PBS script, such as "nodes=count:pbs_queue_node" where "count" is taken from ## the job description (1 if not specified). ## ## This corresponds to setting the following parameter in PBS for this queue: ## resources_default.neednodes = cpu_topology[:pbs_queue_node] ## ## Setting the "pbs_queue_node" changes how the queue-totalcpus, user freecpus are ## determined for this queue. ## ## You shouldn't use this option unless you are sure that your PBS configuration makes ## use of the above configuration. Read NorduGrid PBS instructions for more information: ## http://www.nordugrid.org/documents/pbs-config.html ## default: undefined #pbs_queue_node=gridlong_nodes #pbs_queue_node=ppn=4:ib ## CHANGE: RENAMED in 6.0.0. ## sge_jobopts = string - Per-queue override of additional SGE options to be used when ## submitting jobs to SGE to this queue ## default: undefined #sge_jobopts=-P atlas -r yes ## condor_requirements = string - It may be defined for each Condor queue. ## Use this option to determine which nodes belong to the current queue. ## The value of "condor_requirements" must be a valid constraints string ## which is recognized by a "condor_status -constraint ..." command. It can ## reference pre-defined ClassAd attributes (like Memory, Opsys, Arch, HasJava, ## etc) but also custom ClassAd attributes. To define a custom attribute on a ## condor node, just add two lines like the ones below in the "$(hostname).local" ## config file on the node: ## NORDUGRID_RESOURCE=TRUE ## STARTD_EXPRS = NORDUGRID_RESOURCE, $(STARTD_EXPRS) ## A job submitted to this queue is allowed to run on any node which satisfies ## the "condor_requirements" constraint. If "condor_requirements" is not set, ## jobs will be allowed to run on any of the nodes in the pool. When configuring ## multiple queues, you can differentiate them based on memory size or disk ## space, for example. ## default: $VAR{[lrms]condor_requirements} #condor_requirements=(OpSys == "linux" && NORDUGRID_RESOURCE && Memory >= 1000 && Memory < 2000) ## slurm_requirements = string - Use this option to specify extra SLURM-specific parameters. ## default: undefined #slurm_requirements=memory on node >> 200 ## CHANGE: NEW in 6.0.0. ## totalcpus = number - Manually sets the number of cpus assigned to the queue. No need to ## specify the parameter in case the queue_node_string method was used to assign ## nodes to the queue (this case it is dynamically calculated and the static ## value is overwritten) or when the queue have access to the entire cluster ## (this case the cluster level totalcpus is the relevant parameter). ## default: undefined #totalcpus=32 ## queue-level configuration parameters: nodecpu, nodememory, architecture, opsys ## should be set if they are homogeneous over the nodes assigned ## to the queue AND they are different from the cluster-level value. ## Their meanings are described in the [infosys/cluster] block. ## Usage: this queue collects nodes with "nodememory=512" while another queue has nodes ## with "nodememory=256" -> don't set the cluster attributes but use the queue-level ## attributes. When the frontend's architecture or cputype agrees with the queue ## nodes, the "adotf" (Automatically Determine On The Frontend) can be used to ## request automatic determination of architecture or nodecpu. ## For GLUE2, fine tune configuration of ExecutionEnvironments' OSName, OSVersion, OSFamily ## is allowed with dedicated options osname,osversion,osfamily. ## nodecpu = formatted_string - see description at [infosys/cluster] block ## default: $VAR{[infosys/cluster]nodecpu} #nodecpu=AMD Duron(tm) Processor @ 700 MHz ## nodememory = number - see description at [infosys/cluster] block ## default: $VAR{[infosys/cluster]nodememory} #nodememory=512 ## defaultmemory = number - The LRMS memory request of job to be set by the LRMS backend ## scripts, if a user submits a job without specifying how much memory should be used. ## The order of precedence is: job description -> [lrms-defaultmemory] -> [queue-defaultmemory]. ## This is the amount of memory (specified in MB) that a job will request. ## default: undefined #defaultmemory=512 ## CHANGE: NEW in 6.0.0. ## architecture = string - see description at [infosys/cluster] block ## default: $VAR{[infosys/cluster]architecture} #architecture=adotf ## opsys = formatted_string - see description at [infosys/cluster] block ## If osname, osversion are present, the values in opsys are ignored. ## multivalued ## default: $VAR{[infosys/cluster]opsys} #opsys=Linux-2.6.18 #opsys=glibc-2.5.58 ## osname = string - Only for GLUE2 ## overrides values defined in opsys for a single ExecutionEnvironment. ## Configuration of multiple ExecutionEnvironment for the same queue ## is not supported. Create a different queue for that. ## default: undefined #osname=Ubuntu ## osversion = string - Only for GLUE2 ## overrides values defined in opsys for a single ExecutionEnvironment. ## Configuration of multiple ExecutionEnvironment for the same queue ## is not supported. Create a different queue for that. ## default: undefined #osversion=12.04 ## osfamily = string - Only for GLUE2 ## overrides values defined in opsys for a single ExecutionEnvironment. ## Configuration of multiple ExecutionEnvironment for the same queue ## is not supported. Create a different queue for that. ## default: undefined #osfamily=linux ## benchmark = name value - Defines resource benchmark results for accounting and ## information publishing. The nodes in the same queue are assumed to be homogeneous ## with respect to the benchmark performance. ## NOTE that in ARC < 6.4 this parameter is only used for information publishing. ## In case of multiple benchmarks are specified: ## - Accounting subsystem will use only the FIRST defined benchmark. ## - Infosys will publish all defined benchmark values. ## ## The values represent per-core CPU performance. ## Note that APEL accounting services are accept "HEPSPEC" or "Si2k" benchmark types only. ## multivalued ## default: HEPSPEC 1.0 #benchmark=HEPSPEC 12.26 #benchmark=Si2k 3065 ## CHANGE: MODIFIED IN 6.4.0. ## allowaccess = authgroup - (previously groupcfg) Defines that the specified authgroup members ## are authorized to submit jobs to this queue of ARC-CE after the user already granted access to the CE via one of the interfaces. ## A related config option the "denyaccess" (see below) can be used to deny submission to the queue. ## Multiple "allowaccess" and "denyaccess" authorization statements are allowed within a configuration block. ## These statements are processed sequentially in the order they are specified in the ## config block. The processing stops on first "allowaccess" or "denyaccess" statement matching the authgroup membership. ## If there are no authorization statements specified, then the queue is accessible by everyone already authorized. ## default: undefined ## multivalued #allowaccess=biousers #allowaccess=atlasusers ## denyaccess = authgroup - Defines that the specified authgroup members ## are NOT allowed to submit jobs to this queue of ARC-CE after despite the user is already granted access to the CE via one of the interfaces. ## A related config option the "allowaccess" (see below) can be used to grant job submission to the queue. ## Multiple "allowaccess" and "denyaccess" authorization statements are allowed within a configuration block. ## These statements are processed sequentially in the order they are specified in the ## config block. The processing stops on first "allowaccess" or "denyaccess" statement matching the authgroup membership. ## If there are no authorization statements specified, then the queue is accessible by everyone already authorized. ## default: undefined ## multivalued #denyaccess=blacklisted-for-the-queue ## advertisedvo = vo_name - (previously authorizedvo) This attribute is used to advertise ## which VOs are authorized on the [queue:name] of the cluster. ## Add only one VO for each advertiseddvo entry. Multiple VOs in the same line ## will cause errors. ## These entries will be shown in the MappingPolicy objects, that is, ## they will apply for the Shares that corresponds to the queue. ## The information is also published in the NorduGrid schema. ## NOTE that if you have also configured "advertisedvo" in the [infosys/cluster] block, ## the result advertised VOs per queue will override whatever is defined in [infosys/cluster] block! ## NOTE that it is IMPORTANT to understand that this parameter is NOT enforcing any ## access control, it is just for information publishing! ## multivalued ## default: $VAR{[infosys/cluster]advertisedvo} #advertisedvo=atlas #advertisedvo=community.nordugrid.org ## CHANGE: RENAMED in 6.0.0. ## maxslotsperjob = number - This GLUE2 specific parameter configures the MaxSlotsPerJob value ## on a particular queue. This value is usually generated by LRMS infocollectors, ## but there are cases in which a system administrator might like to tweak it. ## Default is to publish what is returned by the LRMS, and if nothing is ## returned, NOT to publish the MaxSlotsPerJob attribute. ## If a system administrator sets the value here, that value will be ## published instead, regardless of what the LRMS returns. ## Each LRMS might have a different meaning for this value. ## default: undefined #maxslotsperjob=5 ## forcedefaultvoms = VOMS_FQAN - specify VOMS FQAN which user will be ## assigned if his/her credentials contain no VOMS attributes. ## default: $VAR{[arex]forcedefaultvoms} #forcedefaultvoms=/vo/group/subgroup ## CHANGE: documented in [queue] as described in [arex]. ## ## ## maxcputime = number - This value overrides the one defined in ## the [infosys/cluster] block. See description in that block. ## default: undefined #maxcputime=300000 ## mincputime = number - This value overrides the one defined in ## the [infosys/cluster] block. See description in that block. ## default: undefined #mincputime=1200 ## maxwalltime = number - This value overrides the one defined in ## the [infosys/cluster] block. See description in that block. ## default: undefined #maxwalltime=600000 ## minwalltime = number - This value overrides the one defined in ## the [infosys/cluster] block. See description in that block. ## default: undefined #minwalltime=1800 ### end of the [queue:name] blocks ######################## ### The [datadelivery-service] block ############################### ## This block configures and enables the data delivery service. This service is intended to off-load ## data-staging from A-REX and usually deployed on one or more separate machines. ## ## This service can also act as an independent data transfers service that case it would require ## an inteligent data manager that could replace A-REX's intelligence. ## #[datadelivery-service] ## CHANGE: NEW block in 6.0.0. ## *transfer_dir = path - (previously allowed_dir) The directori(es) on the DDS host in which ## the service is allowed to read and write. ## When DDS is used as a remote transfer service assisting A-REX then this is usually ## one or more cache and/or session directories shared as a common mount with A-REX. ## multivalued ## default: undefined #transfer_dir=/shared/arc/cache #transfer_dir=/shared/arc/session ## CHANGE: NEW in 6.0.0. ## hostname = FQDN - The hostname of the machine on which DDS service runs. ## default: $EXEC{hostname -f} #hostname=localhost ## CHANGE: NEW in 6.0.0. ## port = port - Port on which service listens ## default: 443 #port=8443 ## CHANGE: NEW in 6.0.0. ## pidfile = path - pid file of the daemon ## default: /run/arched-datadelivery-service.pid #pidfile=/run/arched-datadelivery-service.pid ## CHANGE: NEW in 6.0.0. ## logfile = path - log file of the daemon ## default: /var/log/arc/datadelivery-service.log #logfile=/tmp/delivery.log ## CHANGE: NEW in 6.0.0. ## loglevel = level - set loglevel of the data delivery service between 0 ## (FATAL) and 5 (DEBUG). Defaults to 3 (INFO). ## allowedvalues: 0 1 2 3 4 5 ## default: 3 #loglevel=4 ## CHANGE: NEW in 6.0.0. ## user = username - Overwrites the user under which the service runs. The default is the user ## starting the service. DDS is very limited if not run as root. ## default: undefined #user=ddsuser ## CHANGE: NEW in 6.0.0. ## secure = yes/no - Set to "no" if the service should run without a host certificate. In this case ## the corresponding deliveryservice option in the [arex/data-staging] A-REX configuration block ## should use http rather than https URLs. ## allowedvalues: yes no ## default: yes #secure=no ## CHANGE: NEW in 6.0.0. ## *allowed_ip = ip - IP address authorized to access service. Normally this is the ## A-REX host IP. By default the delivery service listens on all available ## interfaces, so if both IPv4 and IPv6 are enabled on this and the A-REX host, ## remember to add both A-REX host IPs here. ## multivalued ## default: undefined #allowed_ip=192.0.2.1 #allowed_ip=2001:db8:85a3::8a2e:370:7334 ## CHANGE: NEW in 6.0.0. ## allowed_dn = DN - DN authorized to access service. This option restricts access ## to specified DNs (of the users who submit jobs to A-REX). It is only effective if secure=yes. ## multivalued ## default: undefined #allowed_dn=/O=Grid/O=Big VO/CN=Main Boss ## CHANGE: NEW in 6.0.0. ### X509 related parameters ## x509_host_key = path - Optional parameter to overwrite [common] block values. ## default: $VAR{[common]x509_host_key} #x509_host_key=/etc/grid-security/hostkey.pem ## CHANGE: NEW in this block in 6.0.0. ## x509_host_cert = path - Optional parameter to overwrite [common] block values. ## default: $VAR{[common]x509_host_cert} #x509_host_cert=/etc/grid-security/hostcert.pem ## CHANGE: NEW in this block in 6.0.0. ## x509_cert_dir = path - Optional parameter to overwrite [common] block values. ## default: $VAR{[common]x509_cert_dir} #x509_cert_dir=/etc/grid-security/certificates ## CHANGE: NEW in this block in 6.0.0. ## ## ### end of the [datadelivery-service] block ############## ### The [acix-scanner] (previously [acix/cacheserver]) block ########## ## The ARC Cache Index (ACIX) is a distributed system that maintains ## a catalog of locations of cached files stored in various A-REX caches. ## ACIX consists of two components, the Cache Scanner (on the CEs), and the Index Server. ## ## This config block enables and configures the cache scanner component of ACIX. ## The scanning component of ACIX is a separate service that runs alongside A-REX/DDS and all it needs ## from A-REX/DDS service is the location of the cache. ## ## The acix-scanner periodically scans the cache directories and composes a Bloom ## filter of A-REX cache content which can be pulled via its public interface. ## In the current deployment the ACIX index services are the main consumers of the collected information. #[acix-scanner] ## CHANGE: RENAMED block, RENAMED component in 6.0.0 (cache scanner, instead of cacheserver) ## cachedir = cache_path - Specifies the cache directory to be scanned in case not set in ## [arex/cache] block (e.g. the scanner is running on a different machine then A-REX) ## multivalued ## default: $VAR{[arex/cache]cachedir} #cachedir=/scratch/cache #cachedir=/fs1/cache drain ## logfile = path - Log file location for the acix-scanner. ## default: /var/log/arc/arc-acix-scanner.log #logfile=/tmp/arc-acix-scanner.log ## CHANGE: MODIFIED default for the logfile. ## hostname = string - Hostname on which the acix-scanner listens ## default: $EXEC{hostname -f} #hostname=myhost.org ## port = port - Port on which the acix-scanner service listens ## default: 5443 #port=6000 ## cachedump = yes/no - Whether to make a dump of the list of files on the cache at ## $TMP/ARC-ACIX/timestamp each time the acix-scanner runs. ## default: no ## allowedvalues: yes no #cachedump=yes ### X509 related parameters ## x509_host_key = path - Optional parameter to overwrite [common] block values. ## default: $VAR{[common]x509_host_key} #x509_host_key=/etc/grid-security/hostkey.pem ## CHANGE: NEW in this block from 6.0.0. ## x509_host_cert = path - Optional parameter to overwrite [common] block values. ## default: $VAR{[common]x509_host_cert} #x509_host_cert=/etc/grid-security/hostcert.pem ## CHANGE: NEW in this block from 6.0.0. ## x509_cert_dir = path - Optional parameter to overwrite [common] block values. ## default: $VAR{[common]x509_cert_dir} #x509_cert_dir=/etc/grid-security/certificates ## CHANGE: NEW in this block from 6.0.0. ## ## ### end of the [acix-scanner] block ############################ ### The [acix-index] (previously [acix/indexserver]) block ################################# ## The ARC Cache Index (ACIX) is a distributed system that maintains ## a catalog of locations of cached files stored in various A-REX caches. ## ACIX consists of two components, the Cache Scanner (on the CEs), and the Index Server. ## This config block enables and configures the index server component of ACIX. ## ## The index server component of ACIX collects cache content filters generated by a set of ## acix-scanners and maintains an aggregated view of distributed cache contents. ## ## The acix-index server is deployed separately and can be queried for the location of cached files. ## The service endpoint is https://hostname:6443/data/index and a query is ## performed via giving the URLs to check as comma-separated values to the "url" option, e.g.: ## "index_service_endpoint?url=http://www.nordugrid.org:80/data/echo.sh,http://my.host/data1" #[acix-index] ## *cachescanner = url - (previously cacheserver) ACIX cache scanners from which to pull information ## multivalued ## default: undefined #cachescanner=https://some.host:5443/data/cache #cachescanner=https://another.host:5443/data/cache ## CHANGE: RENAMED in 6.0.0. ### X509 related parameters ## x509_host_key = path - Optional parameter to overwrite [common] block values. ## default: $VAR{[common]x509_host_key} #x509_host_key=/etc/grid-security/hostkey.pem ## CHANGE: NEW in this block from 6.0.0. ## x509_host_cert = path - Optional parameter to overwrite [common] block values. ## default: $VAR{[common]x509_host_cert} #x509_host_cert=/etc/grid-security/hostcert.pem ## CHANGE: NEW in this block from 6.0.0. ## x509_cert_dir = path - Optional parameter to overwrite [common] block values. ## default: $VAR{[common]x509_cert_dir} #x509_cert_dir=/etc/grid-security/certificates ## CHANGE: NEW in this block from 6.0.0. ## ## ### end of the [acix-index] block ############################ ### The [userlist:name] (previously [vo]) blocks ################################ ## The [userlist:name] blocks are used to define userlists and configure how those are ## generated by the nordugridmap utility, including the optional user mapping information. ## The userlist is identified with the generated file that is stored in the "outfile". ## Please note that behaviour of the nordugridmap external utility can be further modified by ## the optional [nordugridmap] block (see next block). ## Note that the [userlist:name] block by itself does not affect authorization. In order to define ## auth rules userlists can be referenced within the [authgroup] blocks by the userlist=name ## parameter. ## Also, the generated "outfile" can be used via the 'file' auth rule of the [authgroup] block. ## The order of this block in arc.conf may influence authorization decision. This block must ## appear before the [arex] block. #[userlist:biousers] ## CHANGE: RENAMED block in 6.0.0: blockname now better reflects the purpose of the config block. This block is not defining any VO! ## outfile = path - (previously file) The full path of the GENERATED file that contains the ## userlist (with optional mapping info). ## If the same file specified as output for different [userlist:name] blocks ## then nordugridmap will automatically merge entries following the order of the blocks. ## default: /etc/grid-security/grid-mapfile #outfile=/etc/grid-security/lists/atlas-users ## CHANGE: RENAMED in 6.0.0. ## *source = url - the URL of the VO database which is used to generate the userlist. ## The nordugridmap will use this URL to automatically generate and keep ## up-to-date userlist (mapfile) specified by the 'outfile' attribute. ## ## url is a multivalued attribute, several sources can be specified ## and all the users from those sources will be merged into the same file. ## The source URLs are processed in the given order. ## ## Currently supported URL types are: ## "http(s)://" ## URL to plain text file. File should contain a list of DNs. ## "voms(s)://" ## URL to VOMS-Admin interface ## "file://" ## local file (stand-alone or dynamicaly generated by ## "nordugridmap"). File should contain a list of DNs with ## optional mapped unixid: "user DN" [mapped user ID] ## Result of optional mapped unixid processing depend ## on "mapuser_processing" option settings. ## "userlist://" ## reference to another [userlist/name] configuration block ## "nordugrid" ## add NorduGrid VO members ## ## You can use either "userlist://" or "file://" entries to specify dependencies ## between [userlist/name] blocks, but using "userlist://" is a recommended way. ## ## For each separate source URL it is possible to override some parameters ## value. You can use the following syntax to perform this: ## "source=URL < parameter1=value1 parameter2=value2" ## You can override the following parameters: ## "mapped_unixid" ## for http(s),voms(s),ldap and file URLs ## "cache_enable" ## for http(s),voms(s),ldap and file URLs ## "voms_method" ## for voms(s) URLs ## "mapuser_processing" ## for file URLs with "mapped_unixid=''" overrided ## (control "mapped_unixid" overriding behaviour for URL) ## multivalued ## default: undefined #source=vomss://voms.ndgf.org:8443/voms/nordugrid.org #source=vomss://lcg-voms.cern.ch:8443/voms/atlas?/atlas/Role=VO-Admin < mapped_unixid=atlasadmin #source=vomss://kuiken.nikhef.nl:8443/voms/gin.ggf.org < voms_method=get #source=http://www.nordugrid.org/developers.dn #source=file:///etc/grid-security/priviliged_users.dn #source=userlist://biousers #source=nordugrid ## CHANGE: MODIFIED options in 6.0.0: edg-mkgridmap source option is removed (obsolete technology); vo source option is renamed as userlist. ## mapped_unixid = unixid - The local UNIXID which is optionally used in the generated ## outfile by the nordugridmap utility. ## ## If any of the sources have already provided mapping information (file:// ## or userlist://) behavior depends on 'mapuser_processing' from the [nordugridmap] block: ## "mapuser_processing = overwrite" ## ignore already provided mapping and ## apply "mapped_unixid" for all sources ## "mapuser_processing = keep" ## apply mapped_unixid only for sources that ## does not already has mapping information ## ## If the "mapped_unixid" config parameter is not specified or has empty value, ## then behavior depends on the value of "allow_empty_unixid" from the ## [nordugridmap] block: ## "allow_empty_unixid = yes" ## empty value will be used for "mapped_unixid" which means that ## nordugridmap will generate only the list of DNs without mapping ## (consider using "mapuser_processing = overwrite" along with this ## option or sources that does not provide previously defined mapping ## information) ## "allow_empty_unixid = no" ## then nordugridmap will skip users without mapping information ## (if no mapping information provided by sources) ## default: nobody #mapped_unixid= #mapped_unixid=gridtest ## CHANGE: MODIFIED in 6.0.0. The empty value should work the same way as missing parameter. set the default to "nobody" ## voms_fqan_map = fqan unixid - The local UNIXID which is used to map voms(s) ## sources with specific FQAN given. ## ## Several voms_fqan_map can be specified for a [userlist/name] block. ## For each voms(s) sources in [userlist/name] block and every voms_fqan_map record ## separate source record will be automatically generated with mapped_unixid ## overwritten to specified one. ## ## Sources are generated in a given voms_fqan_map order. Original voms(s) source ## URL are processed LAST. ## ## This allows to simplify configuration, especially in redundancy cases when ## several VOMS servers are used for the same VO. ## multivalued ## default: undefined #voms_fqan_map=/atlas/Role=VO-Admin atlasadmin #voms_fqan_map=/atlas/Role=production atlasprod ## filter = ACL string - An ACL filter for the nordugridmap utility. Multiple ## allow/deny statements are possible. The fetched DNs are filtered against ## the specified rules before they are added to the generated outfile. ## ## "*" can be used as a wildcard. You may run the nordugridmap with the "--test" ## command line option to see how the filters you specified work. ## ## If at least one allow filter is specified implicit deny is used at the end ## of ACL. If only deny filters are present - implicit allow used at the end. ## multivalued ## default: undefined #filter=deny *infn* #filter=allow *NorduGrid* ## ## ### end of the [userlist:name] block ###################################### ### The [nordugridmap] block ########################################### ## This optional block is used to fine-tune the behavior of the ## nordugridmap utility - an ARC tool used to generate grid-mapfiles. ## Normal setups don't need to configure this block. ## Please refer to [userlist/name] block above to find information how ## to specify sources for userlist generation. This section setup ## general source-independent parameters. #[nordugridmap] ### X509 related parameters ## x509_host_key = path - Optional parameter to overwrite [common] block values. ## default: $VAR{[common]x509_host_key} #x509_host_key=/etc/grid-security/hostkey.pem ## CHANGE: RENAMED in 6.0.0. ## x509_host_cert = path - Optional parameter to overwrite [common] block values. ## default: $VAR{[common]x509_host_cert} #x509_host_cert=/etc/grid-security/hostcert.pem ## CHANGE: RENAMED in 6.0.0. ## x509_cert_dir = path - Optional parameter to overwrite [common] block values. ## default: $VAR{[common]x509_cert_dir} #x509_cert_dir=/etc/grid-security/certificates ## gridmap_owner = username - The owner of the generated mapfiles. ## default: root #gridmap_owner=root ## gridmap_group = groupname - The group of generated gridmapfiles. ## default: root #gridmap_group=root ## gridmap_permissions = filemode - The permissions of generated gridmapfiles. ## default: 0600 #gridmap_permissions=0600 ## log_to_file = yes/no - control whether logging output of nordugridmap ## will be saved to file. If the value is 'no' nordugridmap will write all ## information to STDERR. ## allowedvalues: yes no ## default: yes #log_to_file=no ## logfile = path - specify the nordugridmap log file location when log_to_file is set yes. ## default: /var/log/arc/nordugridmap.log #logfile=/var/log/arc/nordugridmap.log ## cache_enable = yes/no - Controls whether caching of external sources will be used. ## allowedvalues: yes no ## default: yes #cache_enable=yes ## cachedir = path - Specifies the path where cached sources will be stored. ## default: /var/spool/arc/gridmapcache/ #cachedir=/var/spool/arc/gridmapcache/ ## cachetime = seconds - Controls how long (in seconds) the cached information ## remains valid. Default is 259200 (3 days). ## default: 259200 #cachetime=259200 ## mapuser_processing = owerwrite/keep - Controls the behavior of [userlist:name] block's mapped_unixid ## parameter usage. ## Please see 'mapped_unixid' description in [userlist:name] block for details. ## allowedvalues: keep overwrite ## default: keep #mapuser_processing=keep ## allow_empty_unixid = yes/no - Controls whether empty (or unspecified) ## 'mapped_unixid' [userlist:name] block option is allowed to be used. ## Please see 'mapped_unixid' description for details. ## allowedvalues: yes no ## default: yes #allow_empty_unixid=no ## CHANGE: MODIFIED in 6.0.0. ## voms_method = soap/get - Controls how to get information from VOMS(S) sources. ## Valid values are: ## soap - call SOAP method directly using SOAP::Lite ## get - use old implementation that manually parses XML response ## allowedvalues: soap get ## default: soap #voms_method=soap ## loglevel = level - (previously debug) Controls the verbosity of nordugridmap output. Valid ## values are: ## 0 - FATAL - only critical fatal error shown ## 1 - ERROR - errors, including non-critical are shown ## 2 - WARNING (default) - configuration errors that can be ignored ## 3 - INFO - processing information ## 4 - VERBOSE - a bit more processing information ## 5 - DEBUG - lot of processing information ## ## When test run is requested (--test command line option of the ## nordugridmap) loglevel is automatically set to 5 (DEBUG). ## allowedvalues: 0 1 2 3 4 5 ## default: 2 #loglevel=4 ## CHANGE: RENAMED in 6.0.0. ## fetch_timeout = seconds - Controls how many time (in seconds) nordugridmap will ## wait for external sources retrieval. ## default: 15 #fetch_timeout=15 ## ## ### end of the [nordugridmap] block ################################## ### The [custom:name] block ################################################### ## This optional block is for those who wish to include non-ARC configuration ## in arc.conf. Custom blocks will be ignored by ARC components including the ## configuration validator. Any non-ARC configuration which is not in a ## custom block will be flagged as an error by the validator and A-REX will not ## start. #[custom:mytool] ## ### end of the [custom] block ################################################# nordugrid-arc-6.14.0/src/doc/PaxHeaders.30264/arc.conf.DELETED-6.8.00000644000000000000000000000013214152153376022030 xustar000000000000000030 mtime=1638455038.341645013 30 atime=1638455038.474647011 30 ctime=1638455095.824508719 nordugrid-arc-6.14.0/src/doc/arc.conf.DELETED-6.8.00000644000175000002070000001127514152153376022023 0ustar00mockbuildmock00000000000000### The [arex/jura] block ################################### #[arex/jura] ## urdelivery_keepfailed = days - (deprecated in 6.4.0) Specifies for how many days ## JURA will try to send a record to the destination accounting service before it ## gives up. Records not successfully sent by after the number of days expired ## will be deleted from the controldir/logs directory. ## The deleted records are nevertheless archived if archiving was turned on. ## Scatring from 6.4 all records are stored in the accounting database without ## expiration time independently of publishing process. ## default: 30 #urdelivery_keepfailed=30 ## CHANGE: DEPRECATED in 6.4.0 ## ### end of the [arex/jura] block ######################################## ### The [arex/jura/sgas:targetname] blocks ############################## #[arex/jura/sgas:neic_sgas] ## legacy_fallback = yes/no - (introduced in 6.4.0) If set to "yes" then accounting records ## publishing to this target will be handled with the previous version of code (6.0-6.3) ## that relies on A-REX job log files. Use this option only if you experience problems ## with the new Jura. ## default: no #legacy_fallback=no ## CHANGE: NEW in 6.4.0. ## ### end of the [arex/jura/sgas:targetname] blocks ######################## ### The [arex/jura/apel:targetname] blocks ############################### #[arex/jura/apel:egi_prod_apel] ## use_ssl = yes/no - (deprecated in 6.4.0) Turns on/off ssl for the SSM ## communication with APEL server. Use only with ARC version < 6.4.0. ## In ARC 6.4+ "targeturl" protocol defines the usage of SSL. ## allowedvalues: yes no ## default: no #use_ssl=yes ## CHANGE: DEPRECATED in 6.4.0. ## benchmark_type = type - (deprecated in 6.4.0) Type of benchmark (Si2k, HEPSPEC) ## to be reported in every UR. ## Starting from 6.4 no need to specify the benchmark values here as those will be ## taken from the [queue] block "benchmark" option. ## default: undefined #benchmark_type=HEPSPEC ## CHANGE: DEPRECATED in 6.4.0. ## benchmark_value = number - (deprecated in 6.4.0) The value of the of benchmark ## to be reported in every UR. ## Starting from 6.4 no need to specify the benchmark values here as those will be ## taken from the [queue] block "benchmark" option. ## default: undefined #benchmark_value=2.4 ## CHANGE: DEPRECATED in 6.4.0. ## benchmark_description = string - (deprecated in 6.4.0) Additional description ## for the benchmark to be reported in every UR. ## default: undefined #benchmark_description=some description for benchmark ## CHANGE: DEPRECATED in 6.4.0. ## legacy_fallback = yes/no - (introduced in 6.4.0) If set to "yes" then accounting records ## publishing to this target will be handled with the previous version of code (6.0-6.3) ## that relies on A-REX job log files. Use this option only if you experience problems ## with the new Jura. ## default: no #legacy_fallback=no ## CHANGE: NEW in 6.4.0. ## ### end of the [arex/jura/apel:targetname] blocks ############################ ### The [arex/jura/archiving] block ############################## ## This block enables the archiving functionality of pre 6.4 jura implementaton ## ## NOTE that starting from ARC 6.4.0, with the next generation accounting subsystem ## introduction, this block only takes action if the "legacy_fallback" option ## is set to "yes" for a target. In 6.4 all accounting data is stored in the database ## eliminating the need of any additional records archiving. ## ## When enabled the usage records generated by JURA are stored ## in a the specified "archivedir" directory on the disk. ## ## Stored records are than processed by accounting archive manager ## that process the records, store data into SQLite accounting archive database, ## create new archive structure and maintains the accounting database aging. ## ## By default the archiving is turned off. #[arex/jura/archiving] ## CHANGE: DEPRECATED in 6.4.0. ## archivedir = path - Sets the directory path for the jura archived usage records. ## Usage records generated by jura reporter are stored in this directory with ## the following file naming convention used: ## "usagerecord..". ## Here Type is one of the supported record formats such as OGFUR or CAR. ## default: /var/spool/arc/jura/archive #archivedir=/var/spool/arc/jura/archive ## logfile = path - The name of the accounting database manager logfile. ## default: /var/log/arc/jura-archive-manager.log #logfile=/var/log/arc/jura-archive-manager.log ## archivettl = days - The records time to live (ttl) parameter ## sets the number of days to keep the usage record files in the accounting database ## If not specified the files are kept forever. ## default: undefined #archivettl=365 ## ### end of the [arex/jura/archiving] block ######################## nordugrid-arc-6.14.0/src/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153421020167 xustar000000000000000030 mtime=1638455057.635934918 30 atime=1638455088.000391158 30 ctime=1638455095.756507697 nordugrid-arc-6.14.0/src/Makefile.in0000644000175000002070000006326214152153421020165 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ @DOC_ENABLED_FALSE@@HED_ENABLED_TRUE@BUILD_SOURCES = external doc hed libs tests services clients utils wn @DOC_ENABLED_TRUE@@HED_ENABLED_TRUE@BUILD_SOURCES = external doc hed libs tests services clients utils wn doxygen @HED_ENABLED_FALSE@BUILD_SOURCES = SUBDIRS = $(BUILD_SOURCES) DIST_SUBDIRS = external doc hed libs tests services clients utils doxygen wn all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile installdirs: installdirs-recursive installdirs-am: install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." @HED_ENABLED_FALSE@uninstall-local: @HED_ENABLED_FALSE@install-exec-hook: clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: @$(NORMAL_INSTALL) $(MAKE) $(AM_MAKEFLAGS) install-exec-hook install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-local .MAKE: $(am__recursive_targets) install-am install-exec-am \ install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool cscopelist-am ctags \ ctags-am distclean distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-exec-hook \ install-html install-html-am install-info install-info-am \ install-man install-pdf install-pdf-am install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs installdirs-am maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \ uninstall-am uninstall-local # This won't work in case of cross-compilation. Please # some autotools experts fix it. @HED_ENABLED_TRUE@install-exec-hook: @HED_ENABLED_TRUE@ if test "x$(build_triplet)" = "x$(host_triplet)"; then env LD_LIBRARY_PATH=$(DESTDIR)$(libdir):$(LD_LIBRARY_PATH) $(top_builddir)/src/utils/hed/arcplugin$(EXEEXT) -c $(DESTDIR)$(pkglibdir) -c $(DESTDIR)$(pkglibdir)/test -c $(DESTDIR)$(pkglibdir)/external; else echo "No .apd files since we are cross-compiling"; fi @HED_ENABLED_TRUE@uninstall-local: @HED_ENABLED_TRUE@ test "x$(build_triplet)" = "x$(host_triplet)" && rm -f $(DESTDIR)$(pkglibdir)/*.apd $(DESTDIR)$(pkglibdir)/test/*.apd $(DESTDIR)$(pkglibdir)/external/*.apd # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/PaxHeaders.30264/doxygen0000644000000000000000000000013214152153475017533 xustar000000000000000030 mtime=1638455101.327591405 30 atime=1638455103.996631509 30 ctime=1638455101.327591405 nordugrid-arc-6.14.0/src/doxygen/0000755000175000002070000000000014152153475017575 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/doxygen/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376021644 xustar000000000000000030 mtime=1638455038.342645028 30 atime=1638455038.475647026 30 ctime=1638455101.321591315 nordugrid-arc-6.14.0/src/doxygen/Makefile.am0000644000175000002070000000660614152153376021641 0ustar00mockbuildmock00000000000000SWIG_DEPENDS = \ $(top_srcdir)/swig/common.i \ $(top_srcdir)/swig/credential.i \ $(top_srcdir)/swig/data.i \ $(top_srcdir)/swig/compute.i INPUT = $(top_srcdir)/src/hed/libs/common \ $(top_srcdir)/src/hed/libs/credential \ $(top_srcdir)/src/hed/libs/data \ $(top_srcdir)/src/libs/data-staging \ $(top_srcdir)/src/hed/libs/compute CPP_EXAMPLES = $(top_srcdir)/src/hed/libs/common/examples \ $(top_srcdir)/src/hed/libs/credential/examples \ $(top_srcdir)/src/hed/libs/data/examples \ $(top_srcdir)/src/libs/data-staging/examples \ $(top_srcdir)/src/hed/libs/compute/examples PYTHON_EXAMPLES = $(top_srcdir)/python/examples SPECIALISATION_MAPPINGS = JobState JobDescription SPECIALISATION_MAPPINGS_JobState = \ $(top_srcdir)/src/hed/acc/GRIDFTPJOB/JobStateGRIDFTPJOB.cpp \ $(top_srcdir)/src/hed/acc/EMIES/JobStateEMIES.cpp SPECIALISATION_MAPPINGS_JobDescription = \ $(top_srcdir)/src/hed/acc/JobDescriptionParser/XRSLParser.cpp \ $(top_srcdir)/src/hed/acc/JobDescriptionParser/ADLParser.cpp .SECONDEXPANSION: $(addsuffix _Mapping.dox, $(SPECIALISATION_MAPPINGS)): %_Mapping.dox: $(srcdir)/create-mapping-documentation.py $(top_srcdir)/src/hed/libs/compute/%.h $$(SPECIALISATION_MAPPINGS_%) $(PYTHON) $^ $*_Mapping.dox Doxyfile.SDK.build: $(top_srcdir)/src/doxygen/Doxyfile.SDK cp $(srcdir)/Doxyfile.SDK Doxyfile.SDK.build sed "s/@TOP_SRCDIR@/$(subst /,\/,$(top_srcdir))/g" Doxyfile.SDK.build > Doxyfile.SDK.build.tmp mv Doxyfile.SDK.build.tmp Doxyfile.SDK.build sed "s/@INPUT@/$(subst /,\/,$(INPUT) $(addsuffix _Mapping.dox, $(SPECIALISATION_MAPPINGS)))/g" Doxyfile.SDK.build > Doxyfile.SDK.build.tmp mv Doxyfile.SDK.build.tmp Doxyfile.SDK.build sed "s/@EXAMPLES@/$(subst /,\/,$(CPP_EXAMPLES) $(PYTHON_EXAMPLES))/g" Doxyfile.SDK.build > Doxyfile.SDK.build.tmp mv Doxyfile.SDK.build.tmp Doxyfile.SDK.build sed "s/Doxyfile.SDK.layout.xml/Doxyfile.SDK.build.layout.xml/g" Doxyfile.SDK.build > Doxyfile.SDK.build.tmp mv Doxyfile.SDK.build.tmp Doxyfile.SDK.build for mapping in $(SPECIALISATION_MAPPINGS); do \ sed "s/^FILTER_PATTERNS[[:space:]]*=/& *\/$${mapping}.h=$(subst /,\/,$(srcdir))\/adapt-and-filter-mapping-attributes.sed/g" Doxyfile.SDK.build > Doxyfile.SDK.build.tmp;\ mv Doxyfile.SDK.build.tmp Doxyfile.SDK.build;\ done Doxyfile.SDK.build.layout.xml: $(top_srcdir)/src/doxygen/Doxyfile.SDK.layout.xml cp $(srcdir)/Doxyfile.SDK.layout.xml Doxyfile.SDK.build.layout.xml SDKDEPENDENCIES = Doxyfile.SDK.build Doxyfile.SDK.build.layout.xml \ $(srcdir)/add-bindings-deviations-to-dox.py \ $(srcdir)/images/arcsdk.png \ $(srcdir)/adapt-and-filter-mapping-attributes.sed \ $(SWIG_DEPENDS) \ $(wildcard $(addsuffix /*.h, $(INPUT))) \ $(wildcard $(addsuffix /*.cpp, $(CPP_EXAMPLES))) \ $(wildcard $(addsuffix /*.py, $(PYTHON_EXAMPLES))) \ $(addsuffix _Mapping.dox, $(SPECIALISATION_MAPPINGS)) SDK: $(SDKDEPENDENCIES) doxygen -v | awk -F . '{ exit !($$1 >= 2 || $$1 == 1 && $$2 >= 8) }' || (echo "doxygen version 1.8.0 or greater required (version $$(doxygen -v) found)" && exit 1) doxygen Doxyfile.SDK.build # Postprocessing: Add deviations from SDK API for language bindings (Python). for file in $(SWIG_DEPENDS); do $(PYTHON) $(srcdir)/add-bindings-deviations-to-dox.py $${file} SDK/html; done EXTRA_DIST = Doxyfile.SDK Doxyfile.SDK.layout.xml images/arcsdk.png \ add-bindings-deviations-to-dox.py \ adapt-and-filter-mapping-attributes.sed \ create-mapping-documentation.py CLEANFILES = SDK nordugrid-arc-6.14.0/src/doxygen/PaxHeaders.30264/images0000644000000000000000000000013014152153475020776 xustar000000000000000029 mtime=1638455101.32459136 30 atime=1638455103.996631509 29 ctime=1638455101.32459136 nordugrid-arc-6.14.0/src/doxygen/images/0000755000175000002070000000000014152153475021042 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/doxygen/images/PaxHeaders.30264/arcsdk.png0000644000000000000000000000013114152153376023031 xustar000000000000000030 mtime=1638455038.343645043 30 atime=1638455038.475647026 29 ctime=1638455101.32459136 nordugrid-arc-6.14.0/src/doxygen/images/arcsdk.png0000644000175000002070000032432714152153376023032 0ustar00mockbuildmock00000000000000‰PNG  IHDR½° ØsBIT|dˆ pHYs × ×B(›xtEXtSoftwarewww.inkscape.org›î< IDATxœìÝwxÇ™?ðï.zg;)‘*Õ‹e[]²dÉ–›ÜË9Nâ8Í)—\’ߥÙIî.qr—»TÇq‰kÜmYÍêÕêV!%±÷ vô¶»¿? .¸ HA )½ŸçÉíbwg@/€ywfÞaAÐGOOª«*ÑÑÙ Ÿ×B¢Ža Qk`Æä¼<¨Tª±®‘„ÍfCuyº:šàóØÞ;ÖU"×V •F8sròf@£ÑŒu$ìv;j*ŠÑÙÞ¯Ûðô[B¢ŒUA©6 Μ†œ¼BhµÚ±®‘„ÓéDEE9:;:àv»ÞCHd1P©TˆO@Nn.ôz½ôÕÞA\¸PŒ†ºZ˜ãtШ•`YfLªLˆŸãawzÐÕãFáÌYHOOë*J/žECÅQä3oÀÄÖCÎPÇŒ ¯ E'—ƒr<Œ©…K0){òXW P^R„šÒcÈg^C¬¬rÆ=ÖU"×)Ÿ F—r<‚É7!;wêXW PUU‰ÒÒ$˜4Ðj”ÉØ±®¹Nq<§Ë‹ö.'r'ç!?Šø#`ßÞ=PÊx$Ä釸!ÑÅóZ;ìˆOÄìÙsƬ‚ àÐÞÍ09¶a²ü0àǬ.„ôÅ J\ä¾’Íœ:uý†‡ÍfCcå1d³[£^6!ášÎ¾‚âÓûàõFNŒÓéDåÅ#È“½õ² ×TÙ[(+:tyÌty<!‘Ä’qÌ«CYi œN'تªJÄSp@Æ9¹Œ…^£“^„†Úäâm04‰ŒŒcrÆ4aZZZ¢^vC]²ðõq…™Â'hijˆzÙMMM0®Ï¹M­8zâ,>Ù¾ÇOëê!0 ƒX£õõuwvv %ž2þ©•2t´·#999ªåvZêQ «‹j™„Œ†‰)C»¥™™™Q-·ÓRƒÉlmTË$d4bdÕhh­DVNt'õ··Y Q)¢ZæP|~?ÚÚ:aw8a0èk‚J©¼*e?ueÕ…B~UÊ í]àùÀc†a`NˆãE†V­D{[ä>¯,«ëú2,F‰ööè÷ xÜV(gÔË%d¤Ll#*:Z£^®ÝÖ -Ûõr )=Û kOôïÕîîn¤'ýð¢VK;Žž8‹òÊpœt>ÝÔü,_²1&ÃÕnbyóÝ-p:]•J‰ï|ýñ±­P„(•rØÛ{@¹µÈ„!—±p¹\Q/—çüQ/“Ñ1^pœ/êå2BôË$d4àÇà3â÷ûÇÅÄäã§Î£¤¬*$8€’²*¼ñÎfx<´¶(@ „B¹,^8 à .Ö„Ù3§aÙÍ ‘79K|ÝfsàäçEcWÁ0ðÂÈæò<‘'<ÏCaÙÙµ3ŒB!„ Êœ‡‡ï¿i)I’÷7ïDyE  ¥µ=ä<¯×‡†¦X­vØNð<“Ñ€ä¤$%&ˆÇùü~45[ÐÒÒŸß$s<üþ‘õÂïÜ{-­ÁáÄ÷Ýu+,í8~òšZ,ðùüHI6ㆳ›:ßêôÙ ¨­kD«¥§Kì½ÑjÔ0t˜š—ƒùsg@.—6'Þß¼SÜNKIªå‹qüÔ9TTÖ¢©¥ ñq10'Ä¡«»nwp¡TŸ×‡WßúHÜÖ¨Õ¸õ–¥ø ÏõÒS“±rÙ !õíòÕký-Ë;¢¿ÙÕ@!„BÈu"=54чN«ÿ­Ñ¨$¯í;t'O‰rûÊÉÊÀ}wß °´uà£-»ÑÙÕsEõkoïDS³EÜ>W\‚ƒGNIzêšÑÐØ‚UËnÀü¹…’óÏž¿„¶öNÉ>AàpºàpºÐÒÚŽÓg/àñGî–¼oŽã%媔JìÞ÷N)–\«­½–6éõyAœ«Ói¡×i¡ËQ[ßhµtà†…³¡Õ¨%õ:}¦vG`žcŒÉ€ø¸˜ðþPW 1"„B¹ŽpÇ ›ÍóJQ|1¸€\~n–äØÎ®žƒHN2œ.7^{óÙwð8X†AjJ¢¤A/ö8†öŽ®s”Jr³30«p*ÍŸ‰ÜìLhûœk³;°{ßgC–[×Ѐ\&‡LÚ|–ÉØàÿØÀëóçÌ_ç8EJ%ç44¶ˆÁÌœ1e\ÌU¨B!äºròôyì?|"dÿÊe7Hæ$ô—™ž‚Ì/èî±"#-ÐqìÄYx½Éß,à ?/3 ¦Àîpâð±Ó°Zí£®klŒÝ{; xžÇ–ûq±$°Ú//8xä$îÞx‹xüêå‹‘ššE¿!D}éŸp¹ å•–U[·2™lÀr9ŽƒV£Æê7bRF*8ŽG[G§8¬éÿþúÚ°YŒrs'Ád4 Çjœ+*Á¢ù³Ä×/•U‰ÿf…Ó§ŒôÏsÕP€@!„Bàtº! O±ããb‘“²¿¤OCwÚÔɸýÖâvEUÝ7.š ƒ!ŽŸeY¬\vJJ+ÅÉÊ ÒÅ!'e¦¬S`µÙáp¸àöxÀ lô¼  «Û:äxÿÛn]œ¬àû5G–ª–eÌ]€}ôÆÔ74##=‚ HþnÙ“ÒÅ÷9\S‚ÏïË0ƒFƒÃq»=èèìs¹+‹Œœ è±ÚÑÑÙ…´”$¨ÕªáO"„BHÔ¤$›±`n!\n,m°´Ö†8vò,¬6;6®_öµxž‡Ýî·{{"%9)A²­×i¡×ë`µ‚§Ë ×+.ôVZ^ÃGO‡ÌCHÕ>h€ ×i%ÁÁhÍš1‡?;ßåÉÚçŠJ‘ž‚º†f± /O®™áà‘S8}¦2‹›n˜‡ys Â>·©Å‚÷?Þ Çåq`³ §R€0 ;÷AQq©ø!xâÑ{(@ „BÆ™I™iâ“v8sþ>Ý}@ íõú T†·ò³Ãá’¤MHˆlžê!—K{=>¨”JÔÖ7áã­{$s&†É¨˲p8]’uaà¹cŒ@íµZ…éÓ&ã\Q  ¤¼ «W܈’Ò`ïF£F^”)Q xA@KKêš`µ9àñx¡V+¡Q«‘œ”€ôÔä+jHÚì;qF¼A9ya/îõxÅà€ŒžÍfƒB!„L ³fLÁî}Ÿã8p‡úÆæSˆD§Ó€eY±Qn·G¶=eµ9`2WwA2±W&“A¯×d0ê,š?7Þ0Wl~°y'Ê.§sNÿ9 Wbþœb€à÷s(ºP†Òò`€0cZÞ¨G¿\-W=@(ºP†#ÇN£»Ç6è1,ËbrN&V¯¸FÃÈ—"w¹Ü’èÕçóÃíö„ \ëÜn.”T ££ í]ˆ‹ÁÚÕKƺZdœ²:tÚyØxHŽca6±mb·WÀérZ:y8\b ,RâXÌÎU@q…ß@>?p¦Â‡Æv6§£ŽER,‹Ù“åÐ(ÇG&rmðs@[»“‡Ý-À¨e‘ÏB«ý}VÝ¡¤Î+…ŒAœ‘EA–©ñWž`°¦•Ã¥Z?Ú{x¨ bô rSåÈM_2>T×6JÒˆŽ¤ýIJ,L&º.g0*ºPŠ©ù9«ÛÅKå’aKUuâ„h ´wÎDïP©^ æÍß‹ËåFeu}Äê¥R*Å!B^¯.—š>)Lû2'Ä!óò°"8tô”ä=äG¬^‘rÕžç±mçA_, ëØŠª:ܺfé¨ÊJ4Ç#=5 M‰*Sós$Ñæõ®«ÛŠ]{ˆÛ}S}‘kÏÓ´âÅíÁqËg+±õWÃwù.ýN'*ýèq„®©ɱ2¬_¤Â“4˜š9üWÇñþç=vöÂí ½¦VÅà–ùJ|u£7ÏY0_\íÇ¿ïÀÖcØ]¡×V),›¥ÄWnÓ`Ý‚@ïdC‡)J0±¨}ÓVyÿõ–¿x=8ÉîÛ÷èð«'Fþ0ƒŒú63¾ØŸUw?‡ÅÓ‡FñÆ~õ†íœäÜ^&ƒé“äxbw/QA=L`jw xa«/nw¡ºeàÕ]óÓåxh¥OÝ®…A~âò xy‡ ÏâDEÓÀ×NŒaqûb¾¶Q+ùÿô;~÷npùw7éð‹/ |ÏùJÊ‚½Åû~‡…SƒÇ‚'ÚQÓ,ÿó¿ÆcJÆ53¢yBÛµ÷<^ÒRa2àç8Ô54£¬¼Z½Øƒ¼Àâ­›½Ÿ/Ôø±é™nÔY†n¤”5øñóWíX5W‰¹yá¯må°é™n\¬z˜§¥›Ç‹Û]Щüç—èAÚõ¦ª¦]ÝÖ!Þ.½i>Tª‘=´Y4&.\*ç啵(¯¬½¢ºöŠ5áô™ 8}æBèk1&Ì™9MÜž6%U5Á^‚¾ç± ƒØX“ØÓq¥¦æç º¶AÜnj¶ˆ‹¥Í™9-$@˜œ; F£>$£SaÁøšœÜëªe5âX«^&£kVÞ„Ôd3´Z8†¬©Ù‚óÅ%(œ>|÷ŠÛíÝá„L&'œôb“2Rî£Ãá„ÏÏÁpyÜÚ•òùü°Ûðú{L&#ØQŒÉèî±ÁétÁœÅÆ_‚—Û—Ë ¹\ƒA?ªòûòx½èé±Áåö@£V!Æd {ÒúØ9àÓúßàÀËß7E¬œ÷º¡S3øó·B'q}÷/V¼°Õ5ÀY{`yøú?ÉöÚ+¨·Œ]6¯|zOn;îAy#‡¼´È »é´ñ¸ïÙnì~.Ó'I¿¿ËüXóoöÐ $/MvpÐÒÉcõ÷;ÑÔ1ø„ËþZEŸë×냻ÏÝþ´5nX8 ú­L™L†Gî߈}£èbaàû\>Šqö÷ݹ»ö}&iø@Zjî¾}¤-XXæÖ6œ9wQR™ŒÅV£¶¾§# Ìœ1]=8}¶þºûasgN—¬?¡ËQ0mrDêiW%@8ÐoñÔ”DÜÏzɘ6†a`Ðë0%/Sò²¼NEU-*«êP[ß„« üòcYé)xpÓÀŸ_xþË]G:­_|lSÈõlvvíý M-p\7¦ËG=ã¾¶¾ §Ï£¹µ 6›ôi©LÆ">.Óò°`^áu›Ý=û¢º¶Aœ]Ï0 âãb°pÞÌS_õæÏ­ª©G]}3lv‡dbŽ\.Ô¼I.â^å•5ø¿¿¾&Ù·aí21Ú=~òN|^4àÄm£AÅ‹fcvá´q³Ú èº0¹ñìãzd˜Ãÿr~ýG&ÄêYô8”7øñê.*û [øÇN¾´^#iÄüsŸ;$8Щ|ën-LQ 1–ECšV[yðy¹nï ÿ¾³^üò ip ”3øêF nž¡DZ‚ Mj[9ì:íÅž3Ü»lô½’äÚñüîÐ /ÿ÷øføÙJn_¬Â“´àx ­›Ç®Ó¼wÐ-öhõ8üô;ÞûY°gÂçîûEOHp°¤P‰Gר15C§G@…ùJ?ÞÞïÆý#ný¯îààæJÜ·\ütÌ&VœïðÞAü¼€Âlòs½Q*øæS¢ÕÒŽ–ÖöËí*1&#bMF¤§%øpÙM Ä A¯ü¡ªF£ÆúµËpã saië@gW7ü~F½fsSò‚ó#Ì}RœêtZÙ¾O’Žª/žç%“&.×\}C3>üdœ.éð Ÿßæ–¶× jë Ïq¼˜[øRiîØ° ±1¡OpKʪ°}×Á÷)Ú;º°mç”–WaãúU’.?¿ŸÃÇ[÷ Z7¿Ÿ—ú¨n}sïößËëó šÕÉj³ãÓ݇QWׄ;n[=hù$úÞØã’4@î¼I…Žx&X¾°Õ…gÜü깪>㟢›¾Õªæ>c‹Ëýb€àõ xæUi>/M†~‹¬¤``2;7ðÿ߸C‹.:¼@ó'/ÛÑ÷¡TR,‹-¿Š•<©íFõÔíkÇ®|²'™Ø|~ào}çÂl9nA¼ÿ¹Ï_>a€IÞ}˜,ÃÊ9Áïâ‡V©±pšßûk0Ç™ Ÿäœìt¡¼A:ôçÙÇõøî&]Èäÿ‡W¿zÂ/¼ž†­Ç=øì‚´¼Ÿ>ªÇ÷ï×ísí©™rܺP…ïlÒ¡Û~O¹¶° ƒ”$3R’›`È…Äc2Œ* Ë0HI6#%9¼z'šã×kBlìÀ½é2‹Ìô”×M&ckBÜ ×í«¦¾IÒ³1gæô—-ÿõ¬®m”lÇÇÅ cðp¤&‡·VÏóØúéþà ZZZÛ±yëÞ.·« [vì4êUY]ý‡Ž¸Üä0?Hýõý2`YxîRY•8Ù†Œëóä>ÁÄâ/ß2I&8¾ºË5èƒpu –Ï’úEÕÁ†ÉîÓÞ±Õø¦Qônþór_H£ë7OB†qŒæÚäÚöñgn´vÄ®Ñà‘ÕÁ§óN€7ö„?$n wÞ$í©jéäÑÖ,óÅíÒ.Ëg+ñ¯÷†½r@¯ /`ùû6iÝMUàHƒƒþbôôÙ $Zš[Ú°ÿð |°y'¶ï< îOKMBz„•‹¤ˆ÷ ØlÒ'ˆqqCO —\.ÇC÷Þ½N ·ÇƒÎ®ćyí³E%’4«ZkVÝŒ´”D¸Ýœ:SŒóÅ¥W\Çå7/DaA><^*ªjqàðIqÆ|skΕH&ÓìÚû™d¢vjJ"V-[ ½^‹Òòjì?x\Lßz¶¨3gL4z6õ¸ë¶5Ði5pºÜèìêôÆKKM ógIö¥ôY.=-wÞ¶©)‰bwbCc N)–ô˜\*©U´M"ïP‘%uÁ{é¡•ju î_®mÝ<>þ̃{—Ž~ØMC»ôÉ£¶ÏÓÿ§=’×–*±¤02]§»NKƒè¼t96]Áû ׿m 6 U ®TÃåð«7ìèµú÷m.|mãèç£Õ0éXs9ýik¢jiïÁÔº¬¾Ü^‡ÎK?ß½72×&„DF[G'Ž8+Ù§T*°vÕøN7ñÁí–6LC¬kp±¤gûLfÎË™„óž#cYquc#ô#J UZV%Ù^·f)ò'g®eÐcú”܈F£::°pÞL( 츼2!—Š‚ÓåFEUp†¿R©À}wÝ*.·pÞL8n;¸©AÀÅ’ŠA¥B!¾f4êC–&ïË ×!ïòûìõþ9Œ33RaÐë$‚Õve©ËHä<¿Eúñ±5'¤_X§‘Ý,Ã=Kƒ÷l§UÚ»ÎZ áê´IAS3h±'2¼þ™‹»%8´è ë4b€/mw… ¼sÀ£}èv°tqf%úÖÝÁ§øýïÝä86ìùÃéÿ™“Ë€´„?n¯ Yç€aü¿‡¨·Œ?£]k¼šY03Çi*Ó¡D<@Ш¥O'‡ZA9ü~¿dInVµì;2™ 1&£¸²Ÿ×ëƒÕf‡Éh@{{§äØ&ô˜ŒÒÞ—öήc®†ÏŽŸÁᣧ%Y‘"4Ñm<øç~7|}°?¼Jú|p¥ç*ƒŸÃW>uá?¾8ü²ÏË}ƒ¾–Ïâãgc R?Ký›Iƒd¹¾ß¢¼4¹FyýÞÞ œ“ãXÉäâµóUˆ5°è²¾Ç6õ ÛÎ;>¿¥“GKçàß}OߥŗÖ‘þ_£‘ü\Œä§Ìíðoö @!dPï·IJ”ýiµ´‡ ;ЦþßÅ}W¯‹Y¿•¤X&°-—Kc³¾)\Å}ý~YdìÕjz¾¸œ”J¥1&Ã-dG®ž×wŸ’² °pªµ­œø¿¾+œÀ›{¥ÅHÝ·\3Ï'„ô˜´Ò{}¸Å F¤“^»ÞBÁ)Úö^tôy¾jŽ mÁÏES‡}&Ý»½ÞÚ7úDz ƒO²øXL¿ÞK7v†¢áÄõ›ˆïç+†BÈ•Šx¤ŒT0 #fìñz}8qú<–Þ´ ÒE…E!—ÃhУÇx‚êñza³9`0\ý''‚ HzPärô—sãöO6Ðð¡Ž龄øÑ ÁêŸ×x¨lNK+$ÛwlX…©ù9b¯Ëž}Шý ¶P ‰œs•~ÉH^nþVçg&+o;áÁ7½þÀ³ë¡R2øûV'ʃŽ3ƒô,LJfq¤ÏB—'J"Œg%Kƒã“¥>ÂÈž ’ëËë»¥Ã‹ÞØã6[Ñ+ŸºðÕÛ‡ž¬¼¤P‰Û«PÓÌá/Ÿ¿ í.nž!=~R¿{W÷ïHVLº™…ZÉHG<[éǤ!2‡ ÆÐ/kR‡uð ¼ÿkýÏ%„L|ïAÐj5!\<‡“ŸEº¨°õÏvtè詨”[QUWŸÆxlŒIlh'ÄÇJ†:U×6„ Ç:sþ¢d;>nt ºõ_ؤ¥µmÐÆ»ÅÒ!þ[¡cJ^vØC²ú¯ØÕmäH)ýAáúÇÎáÏ{rƒ߸C‹7þ_ 4Êà=PÞÈá/„¼qº´Ás¨È‹Se‘ O—¹e ~l=>v=“d|këæ±óÔÈïâjÿCë`Îd9¾q‡¿}Ê€ûú­þ½¿Ú$‹ u’ã¤?µÿó^xY†£R0X0EúÙxîmÇ Gmr¿Õ¤«[9I€À2@v Í "äZsU¦†/½i¾ä©5Ïóسÿ(^}ë#ìÙgÎ_Ä™óQ¥<úsfI¢8_\ŠÍÛö¢¬¢ M-håBiýú¼çŠJPY]‡#Ç>ÇæmÒ…ÌúNRQ*˜]LyÊó<Þ|çœ/.EUM=¶ï:ˆ¢ eâër¹ sfMÃh¨TJÉh¯×‡7ÞþÅ—ÊQSÛ€Óg. ¾±\QÔçóãÄéóâ1^†@«ï×+³ÿÐq_*v2:~x÷àè†DìùÜ#ÉÓ>”‚,9~ò¨t>Ì+Ÿºp¨HúßõöÅ*¨•Ò`ò‹Ïõàbíàã™ì.!¬áN+f+‘`’~]}ûOV»4xcÎãB2¼ëÃ;Ü!“êÃõÏ 3úý׌’Æ¿Ë+„d4€ûú­è½ã¤Ͼf?Äíîbf½Ëz©ðá™W‡¾ö@òÒ¤ yq¾*ôÃù»÷¤HF¢,äsO™ø®J>´ØÖß² ›·í•Œeoj¶ ©Ùr5ŠR^î$LÍÏAIŸt§K*p±¤bˆ³F®©Å‚¦–ߟ^§ T–ݼ¥åUâ«ÍŽm}Ñè놳a2Ž~eÂ93§áhŸ<š ° IDAT¼ M-hhj‘\?#-é©É’ÿFûÀþC' T*à÷ùÅu’™–‚“§ƒ=EÝ=6lÙ¾O<âÁ³ëñÄ:Í€×ty¤Y]úÓ¨˜+ÊOÆNÿFþ_¿mÄ-ó¾Ç¶÷à›6êß=àÆ~ÉYÏL:¿{ʈ‡ÿ#8ôÀ9/ÞÞçÆý+‚AÁ×ïÐâïÛ\pz‚ߟ¿þ§ûÏz±f¾ …Ùr˜t ºìŠ«ýØrÌ ËàÐï‡Oƒxßr5~û®¥õÁÏæoÞvàd©›–ª13GŽ=‹Æö¡#¦©™rLÉ‹×á`ãOºðÓGõ˜+‡Õ!à½CîÌPw/¡¹i$TWw˜ñâÀ^à÷û%Ù%Õ*ŒÆÁS⓱qÕ&OÍÏF£Æ–û`³®Ë3’Ö­^–e#„Ãd4àîk ï—V­VaÓ]ëðɶ}èêîð\†a0Î ,^8çŠê°`ÞLTTÕ¡­}àñé½û—Þ4õ Íhn•öª„3¹;or²&¥¡¦ÏjÚ Ã .ö꤯½ÞõoÝ¿\¤ØÁ[5÷-SãWoØoï?@±Ào¿bÄš Þ?e ~üe³Oß¼Æ3ë±çŒW²nÓ#\^‹aàaMït Ô‘ÛŽ{$½>?ðÚ.^Û5ð9ït 8Ü‚$àè/ÁÄR€0•5H‡ é5 6-SK†Éõuÿr5~ð7›Øx·tóØÖ‹UsÛ#pçM*,›¥ÄsÁµɆ 7¨ÄµÒÍ2<÷¾þÒÞ…ã%>b®NU3‡œa†ïÈeÀ?2aÙw;áp}g½Øw6üÞ[¹ øõ— ¸ó§Álymݼ$xê/-A±EßȵeËŽýhlj·¿ýµG2´utáo|(¾V0-·ßº"êu$C»ª«OLÊHÅSO<ˆµ«oFnvTÊà.˲Ðë´H4Ç!r6V¯¸ û­î›‘–‚ÌŒTdf¤»µäØÔÐcÕj6®_‰ûïY)yÙˆ‹ß`½iɘ3s: §çêýæMÎBzj245TJ%23R±hþ,|á‘»‘”8ðÂe©É‰øâc÷`ñÂÙHMI„ârv#ƒA‡¼ÜIx`Ó¬Z¾8$Ã0âûÍÌHtµ^Z=t'Λ‰ä¤q˜Z­BZjr³3²+=xïmX»úf¤¥&A§Ó‚e° ­F¸XÒS“1«pjÈ/¸ÿîõ¸ýÖÈÉÊ@|\ Ì q!¹rv—€-Ǥc¬‡{’wÏi#üD‰UÍáø±@²ÈÚýÓkŸ<ð:5ƒíÿ‹yùá/µå˜G2Ér0rðÁ3±a7Ü€À0 K˜C©Èµá­½ÒÀyý"Õ Á¸g×-~6þ¹ds{~ûiCs'?~$gðøZ ~ÿ5#FòuøÁ¡ð†;M›$ÇÎßÄ"Ã|eßµkæ)ñ¯÷êÂê=I‰cñâ÷ŒÐ©ix¹v|øÉ.üÏ_ÿ7p•«¾ä¢LÆbÎÌé˜330¼Æïçà÷û¡R)‡üºéεa—sÿ=ëÃ:.{R:²'¥8ŽƒßÏA©TDdm„iù9˜>uòˆÏ“ËåXvóBqþ>¯*ÕÐ ¹\†‡î½mDå(är¬\vƒ¸Íq|HàæGôýoÖ;¡9œ¿Ã0(˜–‡‚iy#ª™?sK†+LŸ$G^ÚЃ)Ò!ðÎ~7~8‚'€¿|B-Ç<âØþ‡€?mvJž"&DZØó\ÞÚëÂ_>q¢¨:t%Y Ð0»m± _X«‘¬§0“ŽÁGÏÆâÃÃnüé#'N•ù0@†`¨ ÖÎWâñµÄ¯U8ÉÐ’µàΛ†sÇM*|p8xÞæÏ<ø¿oC}MŸ$Ç—Ökñü–`Pð‡œøúZ´Ák|yƒËg)ðÜ;Nl9æp‘5†¦eÊñèMÈš&C™«Àé¿Æãù-NüãS*šþe,07O¥3þyöq=î¸Q…ý« g*|!s9Œ:,×àÙÇõ’÷FH¸X†‘<0V*ÆÏêß>Ÿ_²À¯p¯ºÃlß¶EÈJ]vœëÝÁ#'ñÙñ3âöÆõ+G ðÕ4vcݭბ²û“¿ãFÙ£Z浤ÃÊãb­í=<†A†™Ef¢ æ˜+o¸[ÎWûÐaàçd˜eÈL”!)–½nS Åï°jýÃQ-sÏ–—±˜ýQTËœèx¸PãGS{`5fs ‹ŒDÒ؈LúmhãPÝ¡Ó&À a`Žaa6±H0±a÷bøü@e“¥ bt ¦dÈC22MDG…_cÕ†‰j™Û·mEvúõÓÖzíŸ:Äh<{çƒí¨ª©·¿öå‡`4\ó#j›º¯~!äúod±¤ðÊs¾Ĩc"’Ožhc 0[ŽÂì«ó3œn–!ý ‡)äÉËýE$×'ŸÏ¢ ¥èèêFw·n:­ ñ±ÈÉJv(x/«ÍŽÏÏÓ¸'%&`Z¿ôø@`”ÇÅ’JXÚ;ÐÙÙ ¿ŸCBB,’Í(˜6Y&Þ×¹¢tõçÍܸp8ŽÃ…Kå¨olÛíABB,æÏžØX“x\IYZ,íèì’®?uìÄY(ûŒè¸qáqˆ¶Óé™ó—ÐÖÞ »Ã žç¡Q« Ói‘dN@jj"R’†þ=žÑ§žB!„ êbIö<›=tL~YE5Š/–ák_~(¬k9œ.ë“U±`Z^H€PUSÝû>“d;€Úú&ÀÉÏ‹p뚥!ó//–TˆÇaå;v”¬ËT[ß„¢ e¸ïî[Å9«•Õu’Ôò½ú2°pn!”JªkðÞGŸ‚ã›ÇWŠ“O}ñÁA^ÿ&~_áJKM‚¹…âÿú/ÈF!„2‘•”Uaó¶½!ÁLvu4·¶á½wHS¡ªU’E_-møç{[ XúzïÃ.Úêõú°gßÑQÕOlÞ¶W z¤&'Â`Љé\':êA¸¹Ù™ÈÍÎëjB!„DœÃáĎ݇$û¦æç`éM c„×ëC}c3·>} Žã°uÇ~ÉšKÖ.Çô©¹`X.–cë§û¯Ÿî>„Mw®òz+–.B^nšZ,øt×!øüDÍ­mèî±!ÆdÀ‹æbÎÌ騽ÿ3ÉZP÷l¼º>‰Z­BgW\®`Rƒ[×,Ŭ©â¶ßïGEUݘ¬ûI B!„•5õp»ƒ)µS’͸cÃ*1«¡J¥ÄäœI˜œ3)"åµ´¶£½#¸ǤŒTSÏäãØÉ³èè ̨¬®‡Ïç‡bLHsfMÇ¢Ë)ÙãbM¨¨¬•,šÛc ±1FÄÆ¡VI'R'%%„LRîŸÑ±¾±)Éfq18¹\Ž©ù9˜:À¼Š‰„B!„ÂÒ&]\µ`Z^DÒ¦·áßwû¥×Þ“ìs8ƒë”‚€Î®îAךêßH7è¥i½éz%ሉ1B¯ÓÂ~ùÜâ‹e(¾X™L†„øX¤$›19'3bAÓX¡B!„„è鑎ß7 W·<«M²mw8ņø`ºº­ƒ&“´¾l8«ƒeܶn9>Ù¾O¬p‡VK;Z-í8{þ2ÒSðঠf[š(@ „B!!L&£d»>âåõ @2ÒS˜ÍÑ_¶\«7ÁÅÇ !c¬‡ËD\BZÔË5šâ`ã’£^.!#ÕÃ¥"&.²ÊpÄÅÅÁåñF½ÜH˜6%Ö.—¬`ˆU„¯LrR6ݹq±&qŸÃáDS³]ÝV18­VѲ§äecڔܰ6»M-–à`Ñ‚Y2“‘ÛíƒÑd‚<>Á ‡Õ“!²dB"Ííáõrã³ÐÙƒ4ötÔË&d$º˜$&FñƸ¤ltvL†MQ/›‘è¦ >)úùé̉¨­ê„N31{¤ ò1%/ÅËÐÑÕîn+Ü/tZ-âc‘“•.9~VáTdO îë;ÔÆ ×áæÅóÄíþó '+_ú—{qáRÚÚ;ÑÕ݇ÓV½N ³99Yˆé—¥hÆô|d¤§ˆÛJ…BòzVfšdèþs†ÁVañÂÙhµtÀj³‹Ã›z{IT*%žxtšš[Ñbi‡Ýî€Ëí{¹7!6Ö„Óó%ÎDât{‘œ–Æét öïŤԘ«šÛ–+áñúÑÖåÁªÕ«£~Ÿz<Úù&É~ |Q-›p¹ΰÏ`ÅÚû¢žVÏï÷cߎ·°ù Œ{ø^A‡Sx+Ö=(i$FÇqؽk'RõPLàqéäÚÆñ<ê›­X½æ°S§ÀÒaþLBÆHK»7,^<&A¬J¥ÂÔY+p‰{,êe Îó_üÅëÆ$ç¶\.ÇÌù«QÌ9êe®bþIÌZxKÔƒÉdX°pZÚ©­EƯ–6;æÌ™ ¹\Èb”••­! -=’Ù„Œ5×Ú¦näO™½>2“ F#=3 ª”[pÚÿmxÝð'%N>Ç|?FzþrÄÄÄŒY=’’S“¹'ü߇[#ôÉÕÔÔ„³g?‡I¯‚Z%‡F¥œp3îÉÄççx¸Ü^¸Ü~x|ݰFãøhpXZ›qöän¤ {`B)Ll#dÌÄœxF&.Ÿ E—†Nf&ÚdK°àÆucôÕÖfÁ™ã»"ìE .ÁÄ6BÎxƺZä:ãÔèáÓÐ…´²+0oñZÄÅ…Žu 8yâ8ty ­¥V@6AWÛ%Ïóp¹}pyü°9¼˜¿`¡$Œ$@ŸÏ‡––´µYÐÙÙyÕf©_‹Þ—­E²adГAédÃà .>ÁŒ„„$''»¹1~¿ÍÍÍè´Ô «£œŸ„° œ¶fB°§R&SB­»§z‘B©F\Bâ3‘œœ<&ʆÂqZZZÐÑZƒ®Žfø} ŒÏ¾S|;ü>'†ÖýìT™\¡Bœ9 qæL¤¤¤Œ»ÏÏóhnnFG{;::Úáóù ¹ÌÙÓ²OkLÆY{a¼bÈärÄÇÇ#!ÁŒ”””¡w!=¯»»_¹ ï—ì¿å‹» PE??!ãÏcÃÎ×HöÉ•z¬ýÒî1ª!ã Ïy±ýù¥’}¬L[¿rhŒjDÈø³ýù%à9iÒ[¿r¬L9Èd¤ÆWH=Á)Õ1c² !„B!‘B!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!DD!„B!D$h§Ëå‚Õj… Ѯτ瓥Â/³KöYÚ:!“;ƨF„ŒœßŸ,U²O`uhii£2¾¼†„É>†UÐg„>¼l ø%ûZ[-`Ø›µd,ËÂ`0@£Ñ„¼Æ—£ŸÏ‡Ó§O£¶¶r¹:ê½p~7xÎ'n3Œ r¥v kDÈøâ÷¹ ðÁ/v–•C¦ýr"„Bâ÷: |pÃ@¡Ô]…&0§ÓŽó#++sçÎ…B¡p9@èèèÀÞ½{a6Ç#--2™lŒ«K!„B¹Ú8ŽCCC:::°rå*ÄÅÅñz½ÂæÍ›‘“““É8Öu$„B!„DYWW7jkë±qãF0§N¬ÖLš”1Öõ"„B!„Œ‘êêZÄÅŃmiiFBBüXׇB!„2†Ìæ´´4ƒµZmÐéh-!„B!×3N‹îî° ÃŒu]!„B!cŒa?ð:WÏóp»=P©”W”-Éáp ¹¹V«F£ ÉÉIÐë)Å!„B!WbÄ«¯¾Ž¿ÿýEqûÉ'¿„GyxÈsž~úÛ¨¨¨@}}œN'@&“!!!‰‰‰(((ÀC=€‚‚‚!¯ãõzñÎ;ïâÝwßÇÅ‹ÁóÁ¸,ËbêÔ)X·n~øAÄÄÄŒô­B!„rÝcÞzë-aáÂyaìóù°lÙ ´¶ZÄ}©©©Ø¿Ͻ3fÌ‚Ëåöú‹- /ü:.䵚š<ùäS¨¬¬ö:Ï>ûs<üðCÃG!„B :vìØ‘œðá‡I‚hjjÂŽŸF¤BÇÇ×¾ö øýÒå³[ZZpÿý…( lذ!"õ!„B!äzö#A$C‹úzñÅ—°aÃú°®“——‡¿üåOp:(.¾ˆ¢¢"¼óλà8pøð=z K–Ü,žóïü+ÚÛÛÅmFƒ¯~õ),Yr3’““ÑÖfAmm>ýt' &ÆîÛ"„B!„ôv€päÈg’'ø_øÂãxùåWçÎǹsç1kÖÌa¯£Ñ¨‘(((Àý÷ß‹ŒŒtüæ7¿)**„ƒáĉ“âkZ­}ô!rssÄ}‰‰f`ýú[Ã};„B!„„=Äè7Þÿ=mÚT|ï{ß…Á`÷½öÚ룮Daa¡d»ººZü÷Ö­Û$¯}ó›ß„B!„È +@hmmÅž={Åíûî»jµ7Þ.îÛºuºººFU‰ƒI¶SR’Å?~\òÚm·ÑüB!„B®–°†½ýö;â…B!›6Ý#ö,x½^¼ûî{xòÉ/y-›Í†ƒÂét¡³³ÂîÝ»%ÇÜrËyÍÍ-â~Fƒ”””0ß!„B!d¤† xžÇ»ï¾/n¯ZµR\c`æÌBLž<€wÞywØ¡ºº_øÂ—}ý‰'¾€3f\.—$£QBB®ÆÊÏÇ¡²²rÔ= db2 ÈË˃B¡Õù‡eeåðz=®¿¤¦¦ ##cÔWhmmEmmþ`rMËÈÍÍu úº>]éoÇq(++‡ÕÚáš‘ñÌh4!??ïŠ#ÂŽù MMMâö]wÝ!yý®»îÀsÏý@ ñòä),X0ĉ‰1á;ßù¶dÑ5F™L&ö^tttŒøºC/½ö:Ž^(‚aRXCèú äÚÅ;]p¼ù:f¤gâë_þrئžžüæ@çƒ6=Œrt_Þdbò}vîf XV._öygÏÃ_^ ŠÄx¨ãvDY¦ÉDÆqpíØ•Û‡|ýëHNNþœË^}ë->ºŒTúºÎ.7o¾Ž))ixú+_\^^žçñÂ?þSå¥Ðg¤‚Õk¯rMÉxÂÛ°¿ú fçäâ©'žu 0ìBiO?ýmÉDá¼¼<ÈåÁÂ'êêêÄí»ï¾ Ï=÷kÉ5ú.”f4±páBtwwáÔ©Óâ1騲e3ôz½äÜ¥KW ±±QÜ>vìÌfóßæÀþð·¿¡-^¤©y¹™˜º›á=}Ïüè‡ÃËó<¾öýï#ïÞÛ 6è‡=ž\».mÛûoZ†%7Þ8챕øÍ«/aÖ}wW¡”L ~¯ç_øÅ/¡Õßh{þ¥—Q«a6³ µ#ãUwc3\'‹ðËÿ÷°Žÿõÿþ/<¹iˆÏžt•kFƳŽêZ(ËðÃï|{Äç;vbè„îînìܹK²¯¼¼|È‹nß¾?ûÙOBú½²²&áùçÿ žçñØcãèÑc€úú<óÌ/B‚‹™3 %ÂK/½‚üàûCÖ!ÝÝ=¸ÐTY7ß1üÁäš—žŠ’âTTTbòäÜ!}ÿ㑼d!´FÃÇ‘k_Á†5xóÕwà þôÊ˘ußF°Ôkp]SªT˜rÇ:üùï/â{OsÈcN'ÎÔTbÖwF©vd¼ŠKOEé¥r”——#/oèšÍÍͨwXQsS”jGÆ+sNŠ‹JÐÔÔ„ÔÔÔŸ?d€°uë6ø|¾]ÐåraÇŽO±iÓ=CDz,~ûÛß`íÚõ°Ûí€>ø·ß¾K—.[½z¶oß!n¿úêk˜7oV¯^=àu½^/”Jå°õ!žî0™Ðt¼hØãê[šaš‘K÷ ĘÍh(®Õ¹ƒMMM8}úsq{éÒ%(,œ1à±¹¹9HOOCCC üø ´¶¶"))iØ <øàxë­¢¤¤@`¨Ñ /üßøÆ×Òªþïÿþ7|ð±7C|üñf|üñ毹wï>lذ~زŒ`¥8rM û>ºgˆ(ì‡t }× pîáòw Ý7d¤è¾!½®ä>4@øä“-ú<å®Á½aÃ<ÿüß&rnÞ¼_þò‡­€L&Ãüïxä‘ÇÄ}Ï?ÿ~ø!ÄÆÆæÌ™ƒ7Þx ßüæÓhmµ {Í]»v‡ °Xš0H¸»ºgHP˜÷C÷ ú"#Å`ú¾!LØm›PƒÍÍÍX´haoX¹rźë®;pöìYq»©)8±xÁ‚ùðx¹âssCÇx/^|{ìQ”––Šû<„;îØ(nÏ›7{÷îÆ;+ݻ÷àܹóâ܃Á€´´Tdeeaýú[±fÍÀóú£é¸†¿Ð6 bÃ+é¾!Aa?Ž ß(2 l¤îŽ×Ù6.ÌæÔ¿nô>X'ŸÛ+¹ ~þóŸèByyyxóÍ×|íå—_öüŸýì'ãV«ñØcâ±ÇQÝÃ0ÌUYxLÖUº&Eâ¾á::ž¿²ë ¸ÎNpv”Ql,sV+8‹ÊÜܨ>Éw>®=¡Ìyà bËŽZÙƒaÁ€å¹×m€@ãôH_áÞ×òPÇÉèþà}€nñPççq¼ hÿÓ]’¦x IDAT øü0¬\ã­ÃÏ/êÏ~äz¶²²V¯:;2_ÜáÏ\¹vï›ñÂ[RAÀÈdãþoNí®Åß(Ûöm°9 0ÝyTc\£k‘!F}ÏÍ}èïè€å·¿à÷C‘š†Äï|÷Šê®–_<Îj¤þ×oÀDqíeF&\gÎþ™9.>»,Àe°xÝ@Çé’ Ãv†sÌx5‘êN“”¯üa4ŸÞúzø-­Xú…7„ã½x‚Ïp?‡˜õF\Nß·ÉÆz¸÷÷XþÎp]Ý '˜|šÆ;ðµY K€lˆ×mo· OL{95ø{zÀÛ¬P˜“À¨_‹†·ÛÁY{ øü'$€Õé®è=dØ¿5ÏÃßfïõB‘˜dx‚ß/¹F4ð—ç»ö–Í6ˆqÉR(““ž‡:ʸ˜‡0ú)Ê×u€ÀŒI÷­·±={vÃÛØg'X­êIYˆ»{d11ÁãE÷®p••ÁßÚyl,”™“sË:È$׳ØwEümømV€ç¡0'B?ú‹`?~ ®KÀõX!7›¡ÎËCÌÚ[Á(â5œEçáB¡°é½WÜ»Üd«÷º½Þß»{µk­äµ±-lÏû<ûH{wî̹÷Ι3ç̺^]ŠgÏòn¿£ÿ‡øÙæ¸ÌõtÅ?ïŽ_À±zÕyÝ[6hi¦ðÞû&2Ž©V…ãÝGkކ•2þŒ“ÍÂr(R½Òé7!—‹ögþ‹§zgJùµ>ó4Þ}{¤Ñåå“só-hsr•sb;:h}ì/Ê÷ìk®Ã2c&¾†Zþô‡$ªHŸ6ö /V…=nšÿöWü ’g_#–©Óo}ü1åÛ™g‘yÙÈá0M¿ûuÒöÌ4~97Ü”0rôÑùÊR\7 H`=†¼;îÀ³y#]¯EöçJ?oö "î°mO?5èùúⲯ¿mVvÂqÇgkè~ýUB.×À“$‰Â{ÿ'b­8ŸgGíSVA8Þþþ¦&:–.Q*¥¡´ ýˆÈþr(ˆF8w½ô¢¢Hj5¦ ðîÙCÈåBøé\üæQUÊ@;Ûi§öxpmÙª Ò%­óĉ„ººð쯉ÈÒP¯ºÓØØ,aÿ=0‹ÆfùqaŸ?ú³„iôXT Îm[@– »\8V® ãKw@[]¯.UÊÕdd /(À½};²,ãoj¤çÝ·ÉZti´Äþ2µ™˜'LÀ×Ѐ'Úðø›špmÜ€uúŒÏ}ïS!åzp :ì°ËEßúuŠUIm³a(+Ã:s*9Æ{ oM ÁžnBhÔè²rHŸ;O™õ û}ô­X¯¾Ž°×‹.7óä)™ñìªÆ½w¶6Ô&3r¨ßœÌDèêız5¾ÆFT ºüa9Œ6#}álgœ™0ЋWDަù=õµ+ǧ­é[½JQ$­–ôsÎCŸŸOØë…°ŒJ’{½t½ü’¢èGŒÀ:ët\ë×)mD×›o`ž4ÍÁ‘K$‰¼›o!ÐÕEÏÇ)¾¾ºÜ<ÒÏ97b!]±9$ØÙ‰7®‰ç3/þ2Úœ\¼µèùèC‡ñìÜŽ{ㆤ}ü@0þ6&Û'@’Td\Ô/)E%I¸wíR”Mz:Y—^ŽÊb¡ë­7ñFßÇÊå¤Ï™{¸·ý˜q"¯Ae׿MxkììDcOÇßÞ®ü?èô·¶àܰk!—•V‹&#󸱘ƌ;(çþûÑ÷é2TæÈLª.;G©;ŽÏVã=°Ÿ`O/a¿MZ:ºìll³ç¢þ‚Y‰£ïšÖúôSxvU§œŸ.77¢ H’J…Š,õ·4ÓùÒb îþ¦rÎ`Äêµ>¯3]I£éwI ‡éùà}L£*1F-„o¼Þ¯HšôtBN'r €±´ôÐ1™4´™™ÚÛ‘ÔjäpXi ÝÛ¶Ò÷ÉGØÏ;?r’,Óô¿ã««ë¿úHŸ,#÷ ÅM^ ÒÏ„_&ì««¥ã¹g)üæ·•cÎ ëéxþÙø†Ê`$ìq+r©d9¥g|Lœž H?ÿ´®wÞR”Ëä)äÞtË€²½uõ¸£Z¼¤ÕRòƒ£±Û ûü4üé÷ø98Ö~–´Ëüò"Ô&3½Ÿ|LûÒ—0–•‘Ý„®ùOâÚ±øê`ixãÅH?s6¦±ã°v& ¿ÿ *–ü;¾Š$Iô|ø¯½‘·ö€r =} 4ÖÉSɽñ&P©ðìÞMcÔ…¤÷Óed^x*>áÚõÅÅd]r¯½B÷‡Dòß·ÛŒ™Ç—ühÖ÷®]´=óÁè@+†kËfÒN;I’hy1}«V8רçŸ$IøhùÇ“ –'wõNz—}‚ý‚/‘±àüþe™¶ÅÏ'Í3†$%^§cý:Ú_x>ÑÅdëfz–Jîõ7bÛ߉·<ù8ao¢+ Ѩ÷“)¸ó.4öŒXIJ’@GÝïô+†Ò2t9ÙÚÛ•ã–‰“°NžI Э‹1¼€ƒ5ô­YɈ{¾‹¤ÕTÊÑ 9š²^yËŠÞeŸ(ÿg\tö¹gHãÞµ3âŽHdP5â»ß ý̳¨ÿí¯Ó½}+i§Ÿ™p®„¤(hÈ(÷_›“yWgÌÄßÞŽ{Çvü­˜¥ƒx`5Cñ,&‚,Ó}çû>[uÚt¥,¥ÜA|‰“WIØÏ9o@ZׯõÊÿÙ—]ŽyÂ$ô7ÞÄþh˜mÏ®jìsç 8÷h“úЧ3ÌiØç¥õ_ÿÄ5Ĭ4q׿¯­¥û½w¤è[¹œ´³f“}ù•ñ§)ô._¦üo;N©;Ýï¿O £_éO¿œ‚»¿þ š“‰X•J½ñîß;Ú6K: ÎÇ:cÁÎNšÿ«âÏŸŸ}þÙØÏ>­ÝŽáÙ³‡æ'Ÿ@…ðìÝKØãAm2%4ŽÚìlr¯ïO¯ÍÊF’$Ô&3Eß¼]A*ƒ@g']o½¡L97oÂT5:"ëýÊù#¾{/ú¢ Ëøš›Ð4û£è›÷@t¢Hmµ)×wã-hìé¨ÍBn7}+WÐùæë@¤ïÍXp™p‰)j‹•¬…‹°L†¯®–†?þ^¹á±|&¢9ž{ÃM˜ÆŒÅµyí/¿ˆ âÝ_C µ]^r0@Gtü`›9 ûyç£ÍÊ¢áw¿Á[W;à™ Å1Ù(íTàxš'ýýf1ÛŒ™¨“¸Î[š”ÿ¥eè¢þk*ƒ˸qtEóð76$•;v=†ââþ<ûz•´ÆÒEA9Ióˆ™ã……H’ Yöù‘ü>T#ƸPeá¸<üMý²[&OAõE4UV¢6›#f²p˜@K Æ’Ò„™ãx÷ Cqÿlw¨·ç¸=ŸáX¤ìî¢åWÝ*½mV6aMFj]d¨ç¢2èÑ¤Û »\H=³9 õßÿR”]nšô4<{÷"‡Bt¿ý&æÑ£•gçX¿.A9PŒ¨­fíI¯ÓßÒLÛ³O#‡Bíò‘„¼uµ„ÝnÚž–²þ•AÏÁèró~]]‘¼Z[è~÷r¯¹.Rvü½”¤VµJ5àŸ•Q¥¥£6›Ñ¤¥¡¶Útu(×àoj¢wÙ'dœ» šõ±± N.Ǻ.Ë¡P È6~bÒ2ƒÍ-ÊÿÆÊª„4æÊJÅï?ÐÔ4h;`ˆ® Ýžþv¦h„¢ „®þ<âgþãf-£Ç( ‚¿¥uv)þ™Iƒ(c<þæþµo¼NWœ2Š$Eü„ÛÛ¿P3ö'ª M×$(«I­&ÐÓ£“âê€./•A>7É`ˆø‡G]+z—JÚÌÓ”~-AÙW«•¨J­IÈIF“–ì÷á­¯‹„Üt»è|m)E_ûƱ¼üa'Õzã­éw2WV‘m/uii¨ôú~!®ÍÔgç ‡ÃÚÛ öõ¡1™1–Ľg7áîn´fs•N©´,© ¦‘# ¹\øëëûüX'MV„Pw·R®ÆbÅOäîzû-2Ï¿Cq ÆÂÁCåKK“nðf,.&ì÷GÖ¸]˜G¦ûƒ÷û|ãÊôF-ª¶é3H›9+r?éO¾ãÉžÆdBk6“~Æ™ô­Z©›Ù.?_[¡è&À’VKî•W'¸<)å¦èÍ ,GÈÑ\¬8²,èîV¾›ËF&-7—ÆPX”&þ%vv*¿Åç[H¤³gö …”´ºÌ¸Î`\Ò€<ÐêP[­‘EË€’Q!¡Ïì×ÔÃÁ€’Güõ™ GôË.I ‹pEÝB¨JÊ]4ªSf–#nŽ×½ˆ<)šo’Lí¯,U”se…·Ý‰*M%ð'}>yW\ƒ-êvKÓµüSüí‘¢¶é3È¿þ&$I¹} Oü Y–éùàÌ_¹€®÷ÞSò˘;¬/}•AO×ûïÑöú+Ñ2û¯³ýÕWëPÞU×’vú´-}™®?$ÔׇsíÒg´j•ý¿€J…cý:šþûo›6‘Íõ®Í\5šw}}@«nñ÷¿â&t½kVÑüìÓxöÕ :wà{"qôžááÌëºòú”ç$©Õèì™IåSÌÔDñî~iéýé\®¤2ÇŽi-ýîGrXî?ç–$…ÂC¶U†ì~?Ú°;y™R0”4ŸÁîkÒöÕÙo¥ó·¶ø=R¾û¸´9)[žŽSu4‘z>ùXùž{Ù¤Ÿ9I­¦ù™§èýl xmæ’2*úµrcd9LóŸ¢o}d œwÿ~LÅ]%Ëïû Úì¬Ç‹n½=¡]ð··³ÿÁ_"Ëa¼5ûO¸{#U©S­7ÁŽ~‹³idÅ çÄ¿¯]}@ׇït8’gí·ãó¬Í twÓòü3¸vUX‘ýkbçeÌ™§(!®íÛpm߆±¬œì‹¾Œ©"~ÏžÄñÌÁm ÐòâóômX (37îˆ^°TVŽvmƒkëôùŠ‚v:Q!lëŸä1••£Ö&°–êBtÅè‰Ìb‡‚$ Õª¸‘øêë°Œ= ™>³`ïk¨OÍ·0G—‘‘TîØõ \lý_Qâ®=¡òÆß“¸|byKq†).o]f&g¤Áð6֡ω4Ú²,ã“]Ÿ™Éç ùUÉe?Ö¤ZNü5^±4¾†zåw}Nîm[ûeÕêüø[›QI‘Yf_[ÿ ræü³Ñ#3ÿjcÿº–ør|õµÊAµÙˆkë´öþÁ¤¯µeÐ:)©$Ò&O¦ùé§")¯|>T}KRò<K#Kàܱ [+a•Á¨üìí>tÿœ^˜Ó£Sæ`¨,fT:=aDQð¨Á\1p³;Cn¿ß¯{ß^²ãäòÄ-8Öçæ ù<{.R Ï4þxkû]tÑö@ç+hoÅT2p9¾ —4šH‡+õ+]fÁîÈ vöb3f `ÍqksRá¸õQG_gG¿UÔ 'cΤhÇ¡Ž[;Y?ý"I„\.œ;wèêD‡•É€P_Ï!ëQ5îýûñÖ×rö¡2ÑØíº: üÈwÄ æ$%åzP$ìõ zN,¿žÕ«h{e ™„0¡Òëñ64rG&bÏd°>¾¿l™ú¿þQY—¢IKCŸ›KØëÅïó=Ï6q"ÅwÝMÛk¯âzSxö×P÷—?RtëmØ&MN*÷Ár4>÷4½Q …Ê`ÀP4ä0ž” –X™js ´·¡RÖS%¿¶ƒ âIûŸøz¯Ö'öó* 4–þvÏßÑžØÎ’ÿPˆEÊGHÄ w|¢äèsrÁ¹e3¶ƒÂB¢•À}`?¡î^´vÂ>ΨÉ¢Ö…˜ÜñƒxT¨$UAý{Ö›äÚ¥¸{’ØKѼãÏéOk,,ÄS{ r}›7aŸ<T*œ»ª ¹]ѼUó #ùÄkÚRœŒƒ?ÖN9GC¦×£ÌÀHj5¶Ñc”NtÌ™‰é|-ýþö7ßHš…¿³•¤ÂßÝ©ÌÔh¬Vôý³oͦ*BnwÿL‘,Óð'“æèè¤Þ¨"×¥Ó£µÛñGÝ d¯•ÑxÐ;ùûx°/z,§öÿ«bŽ@Xî—é÷ðHI5—ãU— #FàŽ*ž=Ÿ.Ã\ZŽ*jž¹#ƒ"SÙH%½g ®;°Žsw5ÎÝý‹Í¥eIeî¿§ªäÇãîJb{Òÿ d•¤"ÐÝE÷ªÊqcQ±’^›ÑoMtnÙ‚mÜ„¾ÍñÏRm±îîF–eœ›7‘~Z$d® "i4˜ŠKqïÜ›¾ ëH›<}~¾R7|--èSˆ r4HU­<ž}ÔÑ"ØÙ?+m,*A­êf ö.w.û˜–¥/!“/æ”âßå¸ãÉîOÐå¢ö¯ÂS_Ç`¨äã×·URì ì«“£Û™Þµs'ª‹™Ÿcó&åXñm_Å:~Ïü—îÕ+Òjâ&kÝ]äñ65(Ê¡°ˆ‘÷þ’ZMÐÑGõîŽSlcÇc;×¾½´½þ*®}{A–é|ïÒ£kÓ"®Q^ ⢤‹ —*‡Ã8¢\*­–ªŸýu4LÿXq…•©ÏÍùc½ë×a;}Naw¿6^ÆAû™„µ ñÏf`zCNÿ3 tuÑ·n-iÓf@(DØïðLÅç©ç‡¥ „Ãaš››éèè$//œœì”fÐdYÆëõ"I†$Ñw†‹£9“x(rÎ;ŸýÑΩkŧøš›0@‡p×ÕQúÕ;1uì8;¶#ƒìùÕO±N˜„k÷.‚QLÙLÖg9›r¸3{ñÇfuâP%É›¸´ÙçœGÏg« CônÜ€§¶Ca!Žèl6@æì9h¢³Ô©ÈŸÿ±æx[$.’QXF…9ûÐed&Oÿ²g7 a*.V¼„<¢3£š8+AÈéDv9QGC_&}&:-’$!G• ëèщf¤X¹%%ƒ[bÏ6.ªPRk—´^œ§,ËÔ=ñ¸¢XÇMÀTZ‚Ðöî;‰×À±³ ¤ì+‚t\êrþÂEìû]$¸@ï¦8«w ÍÌBöûñuv0î¡G1“>}=ëÖöû©}ü¯hl¶„…ò–ŠJlã..†x‹d? ³e)Xüí/è22ñwu)Á$I"ûÜó”ôöéÓéŽîºÛ»y#ŽÛÑeeá '*seÝkVÐðÌS´½ûH¦âŠoù ¹ гfA— _[{þ«U¹ö ÃÁøG›4BÜÑ&e×´Ђ 1ö ý­ÍHôo˜™ì=ô44Ðüâ ‘c ö™³ÐÙ3ð45ÐÝ•6Þâ0ØJÒf´¼ø¼¢ ‹°Ÿ€¤VѹbÁÞˆéD´Ì¤JìKåú¬£*i®¿ñÔ×Ññþ;Øg†J£MËOŽ;ìîL>CM«³Zˆ­í ¹Ýt¾ÿ.öÓN¬k$¤8ëEÈá@ QiÔƒŽ1ú¶lF—™…¡ kEšË.gÏ£Ddqº”tZ[¿ktû{ï{Á…HZ !‡}^žr á`Ã6Öÿ%)3{Î<º–-CñÔ`Ï÷£µÙÄÄRJÿ“Ì" Ï°“>u="Ažú7Í/½ˆðŽs‰l¬v0*)e½r))[¶låø#«V­Æ·K^¯gܸq\~ùe,ZôeŒÑ†Áãñp÷Ýß ©©‰ææ\qq\µZ-ùùùŒ;†³ÏžÏ—¾ô%L&ã€2ÞxãMyäQåûO~ò#Î;ïÜ„4|ð!¿øÅ/÷5“ÉÄO<Έƒ/\"õ8úw¦OÖœyt,ûWÍ>\5ýæ|_cú4;EW]CÍŸþ1ÓFÛ1T:E×^ƘÜ4»ÕAÕ4™ éÒb“Ü“X¾ƒùƒsr)¸ä _ZŒ,‡ñwu*³ÅY¸˜ñ¤>|ñ²¤â·x,P¥ÚeKG©Î¨5èÒí‘àÚYá̳’÷²:kÈÎÁ]YL¥ÏÍ£ä¦[-Vgµ¡6 y=‘™Ö;°Ïˆ.¼JrïUZ]tæ?"§}Ú 2N;#åËL¬3RÂqÚ¸ºìmi9¤?yL._[h'`È/`ä]wG„eEAˆ?7^9 ¹=Gq BêéŽG]¶–WPtõµ4-y‰°ßOÈë#ÔØ¨üîkjÆRQAÑWG"¢E’Æ+æÒRŠo¼iHäÈßäÇßìä~¸„eüq‹âU* .½sÑå­¢ŠÌ3ΤseÄÂöûñÆC88ÿüó/ĵ«t!l,I±XªÌVJo»ƒºÿüKIt8|©ý--ƒ.¦NÄÒâ]ؾúLÉG¯Í×fÏžGÁe—зc»¢ Ä¿?ê¸6ÃßÚ‚17ÑêãŒú©T|óÅU͹k—¢ œˆk; õö&Õzc)-#óô3é\¹d™–W_¡åÕW¤‹Ý/ë¨JœÑ6£qñ ´½ûj£QéâӢђ>i2=›"ϰùÕ¥4¿º€œsñB4f3A—‹@_/Ûÿç^ôÙÙ ³äñϽé¥Åø»ºÔjÔ&Á8˱©¸DIgŸ6OÔ­¹{õ*º£¡-£¨øöw±Œ¬À]ó°û¡_¡ÏÊ$e}|™†¬lН¿‘†ž%äõFô&î;•ÊZƒÁúðÁÒ^v%Á¾>œ{÷(^ƒ•;Ÿ§žRAxöÙçøñšô7ŸÏdž ؼy3 œ«(Á`eË>MzN  ®®Žºº:Þ~û|ða~ö³Ÿ°pá—¤u:]4Æunî8³À¦M›ùö·¿ƒ'ºùŽZ­NM9 ¦ÝßÅW]ƒmÌX:–/ÃÛÜD »•Áˆ!/^J’0fe3ú?¡åÍ×èÛ½ _KÚô L%%\¼ýA» ZÊÊ º"œZ£E%I¨µZ,£"¾Çºt»rZ«U9nÌÏWŽ›òó ¹"ǵf«rÜR^NÀ™˜·&!û—3o>æÒRZßy W}A— SA!i&’{Þù ‹Æ´KRY4Fcÿñ¢ãö|§œ£%Sæ¬Ói~+âÔ´ôEBN'†ü|ÂîºZŠ.¹ T©RÒÀò³Ïbp IDATN;“®èÌiϺµ„½^lUcÐÙ휜»vQrãÍJ$ûŒ™t|ú õ/Þ8¿OâÞÌÓΠ9®á¥Å¸öíÅ2ª µV‡¯§‹@OE—]‘ôÛê>}Fg·+Ѳ‚½=Ôýû¤M™JÈéÄ4¢SqÉAfÛHžñ a}--ônÙ„¥|$r ˜PŽÕÄÚ¿ÁZ÷gk0æå“yÚé k=Ž„”-ODZ­É™=—´ÑcèÞ°oK þž.´& †ÂB ÙÙ‘ûn±Pñõoѽq=®½{ñ¶µ ÏÊÆRVŽ}æ¬îC^™y‹»ÝN~t##½¿±”•+Ç-ååý×wùy\H ¯ÙïÇ—m츤‹PK®»ûä©8öîÆÛÜŒ]Ï¥³gb*.Æ2²BÉߓØþ”ŽËñ45t»ÑÚÒ°Ž£¤±UŽfÌFçšÕx›šð¶µ!©%t™˜ŠKÚ¢cJ E Gu4Ч¥cU‰#:P?ð÷¿‘{Þè22ðµ÷/úŒ]Ÿ.n±{ßö-اN‹¸q8qiã¢èddàŠêͯ,!ØÛ‡Öb%èq‘uæl´qëý:>þ¬3ÎB¥ÑôÇ­çļ¯R‘Y:¬ëqåÕh-Z?xOñÁOÌNR"Êå»O]-=QW£@oJÕïæ\téø;;qäîåml@­ÑPvÛWÙÿ¿t:"ë¦âÜe!ªÜK!·4Š %(õ¦Å_s­RfÎÜy¸k(ŠI OS#*I¢ä†›Øûç?àmm„Im?(îAnÚ™3ga«cÏ.¼ÍͨFe£k14f‹’>1Ìiòg«÷‘ÿã&ÍâÒëÓÓ©üöwqì®Æ]ßa,,¤éÕ¥ø¢:«5¥gIsdu}HaÇŽÜÿ/×jµâLóçÏ##Î_ôpèééá;ßùMMMÜu×)ŸwàÀn¿ý«ŠrðË_þ‚¹s礜ÇpʶOœ„}â¤!ÓhŒŠâã>ÁÈ;Þ3­ÍFÕ=÷8n«¬ÂÝt$ž‚‹“û–ßž$o«5iÞ1,eåX’D¢ndEÒ|ŒyùC欎}ò\@÷úuxÛZ y}4½ž8s“}Æ™ò MêIꬵ²’ÌÓÏ sÕJdY¦wëz£~–1rÏ?SQd¯è’ËèÛ¾Wa¯—ÆW— yù Î6› {½t®^EgÜn´j£‰ƒÔ×Ců×Z,ØgÌ +Ù¤kÝZº¢»wç_¼sIibÓ&¡œg«Cß®Èr˜ýO<>ä5ØFA­Óòûy=Ô¿øY§Ÿqžåñ¯7©`ÈÉ%?º‹ç`H’Dæ´dN;ôf„ù_ºhÀ1]Z: œØ1—”bŽ ‡W¢ò_úÄɘÙÀï`ÒÆ'mÜø”ÒjÌfòâ÷üH–Æh"wÞÀý!Ž'©Y+‡§:”\#;úaŸw7õ/<; M¬ °VV¡±X:xÛÚØõ›G¦¥ÿýÉž3Ÿ®µŸàmkSòÖX,dŸ5‡Œ)Ó•‰Å–·ß¤åí7-û¤å0®O­ÓQ¸èRr满>2H—èMft˜F+VI£aäW¿†·­w]AG’FƒÚdBk±b,,RvkÐge1úû÷á¬Ù‡·¹‰p €ÎnÇ6z,’$a«ÍÄƱgþ®.B^“ µÉ„>;C^dÆlfâCÿ‹kß>ü==]N4f ÆÂB¬£*¯Go`äw᮫ÅÓØHÀá@k³a­¬B’$ô™ŒûÉý8÷×àkk%èr£6èQ›ÍèÒí î ¢KO'3ji¨}ú)åCNŽ’>cÚ ¥íÓgd*Ç .^HN´Í‰Ï?wþ9Ø£›ûró”ã±uS¶Ñc±Ž¬Wõ¶4+ÊJ«E—––š‹ëç¨çC*ï¿ÿ~‚"p饗ðõ¯ßMii N§“}ûjxíµ×™3gö…Ìš5‹_ýê„Ã!jkëØ·oÿüç¿h‹ çô裿aÒ¤Iœ~úi‡º­­[n¹î¸Ðš_ÿúÝ\}ujƒê'ª™Qpô9œzp´êŒJ§gÜ}?¦îåè\³:Ñ¿P«%ØÛ‡*¿pP—xÊ®¿KI¯¿’`z…È þþÐm*ƒ‘ñ?ü) ¯,¥sÍ*BþÄMÍ4 Ƽüþr´zÆþÏiXú2«V$˜€!ÖÒçG¥¸B¼‹Ñ`fÖÒ«¯GB¢sÝg þè²×›Ä­­ÿ¼‘·ÝAÝóÏнy#áƒ7ª FLÅÅýáäléTÜu7õ//ÆÓ؈>3n ¼‡ËaYžNñ¶f°çj’ªbybÞ'cv.~ò ê^|žÞm›¼Ÿºt;{&*$ô¶4FÝùuê—¼ˆ3ÎÝ(!mNŽrlå#)ÿÊW©{ᙄ¶NRk½> ο°ßOûŠeÊÚ=%Fƒ± µFsBÞ×TQqøí>-}Z:Œ›pÈ´¦œB7þÞä`82+m6'xÈž>ƒìé3ðwwGÚL‹Cf¶âóW|Ée/¼o/gjµÉŒÆbÆ9çœÍ7¿ùuÆ /;JZ¨Ö·ÉÊÁÄ+³fÍâw¿û ê¸È8‡‹x‰1†#Š‘àÄ'Õª ‰=AÂUDp,.‚Ÿg1þ ‚J¥â±Çþ£þšÿþ÷Âq›d„ÃaÞ{ï}>üð#¾õ­oðo:rM2²²]œÎñ^“±uëV­ZÍìÙÉcÇšd{NUR­1ó­@©/“$Qo‡è£ ¤êš&ê Ê1ÝÁd2ñ³Ÿý”[o½…çŸ_ÌóÏ¿=( ñ»ßýQ£Fqþù [€úú†„ï¶!|§M&#nw$¬©ÛíáöÛ¿Êþð;.¸`è°vÉÐD-'u¨3AÊÈa9%k”J­uF ©7C« *• 9õFpXhµ:ä°,êˆLÊjT‡î£Ô*5²,ê ‚,˨S¨7ÉHi'e€ââb¾ÿýïñÍo~¥K_åñÇÿº¸•þò—ÇŽHA¨®NŒ””Ÿ?øbÃüà>Þ~ûV¬X D6dûÎw¾‡ÝngÖ¬™‡U#Yµ§šâ²ÒÃYpÒRWǨ‘‡ŽF`1š þ\®m‚“‡î–VŠŠ†Þ˜Q«ÕöQ‹[E“B]SUÅÇ[7S2²ì8H$ø¢ÓT›ZUU1’ýµudçƒ@‚ŽæµTVY¤¥ÃŽ÷e0¸æš«xüñ¿& ’jjjeù°òÚ²e+|ð¡ò]­V3kÖ¬AÓ›Ífž|ò æÏŸ§óûýÜu×ÝìÝ;0~òPŒ;†½›¶(¦ˆÏ©ýÙøñ Î:ãôCÖ›ùsf³üÍw‡]^ñþ×ã!àô Ñzž%Ãf£»£cØeŸáÿì\¿‰)k~Ô¨ voˆì;Ü2‹Ïð>{ï#ÎHaŸ¨iÓ¦±mÕÚa—W|¾Ÿêµ˜65õý#â9¤‚à?hS¤•••÷‡$ „’mÓ=;wVsï½ßOX×°`ÁyX“Ä`G«Õòç?ÿ‘©qÜ××Ç×¾v7n·;åòµZ-W,\È’¿ÿ›ßÝXD|Nµá0ï/^ÂiS&cO²_ÀÁL2µÇÇÆOW »ìâ3|Gw7/üùÿøÖÝw‘ ߸ëN^ÿçÓ´54 »ìâ3|Ÿ½[·³wýF.Y˜|÷úxÔj57\u/=þÑGÂ)ÚG͘0‘ÌÌL…Édâ‚ùóyóé瑃Áa—_|†ç#ƒ¼ñÔ³œ;{6fsꡃãrêkÆ ÜqÇ]\~ù¥L™2…‚‚²³³èééᥗ–°ÿ%meå¨AgÒ:;;Y³f N§‹ÚÚZvìØÉ«¯¾– P˜LF¾ÿýï¥$´Á`à‰'þÆ¢E—ÒÐÙR½¦f?÷Ý÷CþøÇß§”Ài3g’—“Ã?ÿþB±˜r‚S‡°Œ ™k.¿œ±cSgü­»¿ÆÛï¾ËK=Iù”ª7r(@Ðß§|W©´¨õ)î¹p È`1ùÅ~8äš©xŒF#ÿâ~ÿû“¬èîF–àðì­'6™&/CÿdS§ËˆÓ§F‰Ž3²ŒZ’=j?ûáS>mê”)dgeñÏ>…?>¥Ú€¢t'jUÿ$b}·•°|êÜIIsÅ%—01«SŒ³çÏ£xÄžzâß„N±>JÈ2*®¿ú*ª*+8éÙgŸ•gΜ–ôÇŸþôç û Å~p·ßþ“''Ï3:ŽÇŒ9sf'þùÅüð‡?R¾ÿö·¿fÑ¢þ™—;vpÅWãóù”c=ôW]ueÊe ‚Ôqvl¡~Ý£ÊwsÖЧÿ¿a”Hp"вýŸt×÷»“æ»ûˆ³‡Q"Á‰Àž¿AÐß«|¯<çqÔZÓg‚£ÁêÕŸ¡ìGY–yÿý÷SÊ袋.T”ƒÃeòäI¼òÊË”ƒT;v,÷Ý—88yà‡hnn>"Y@ ‚SA]Œ$IbÉ’—Y²d +W®bçÎjº»»‘eFCnn.UU•ÜvÛ­œvZâÂNÇ5×\E}}M8<žÈb>«Õʈ#˜Gù¾eëÔ†Ža”Hp2¡R©±Ûídggc·Û•ãˆ4lŸ}öz½ŽÜÜlªªFŠ…_ÁÁŒ3Ü_hB¡0ÝÝ=lÛ¶NÏÔ©ÓP«ÕÃ-Ö1gß¾}lÞ¼™üü\ÒÒlX,æáIp‘™ù½áAp#Ëaúúzسg7éévfΜ‰V«E#Ë2Ë–-c„±ØíéÃ-§@ N`ÌfEE´´´±råJfÏž=Ü"SöìÙÃÞ½{™=û t:íp‹#‡M^^.£FdïÞ>úèC,8ÕöíÛ)++Ê@ Žyy9X,&êëë‡[”c†ÇãaÛ¶mLŸ>Y(à„§¢¢½^ÏîÝ»Pµ´43bDápË$¤Ä¶mÛyç÷illnQR¦ºz7«WF0nQŽ Ë—¯ä7Þ>ª~סPˆåËWòúëo´÷íT ´´„ººÚáã˜ÑÐÐ@QQ:n¸E†•={öòøãO°té+Ã-Šàs2jÔH8¸HYðÅÅï÷³yóV›ñù¼˜L& ò™4iÍû1ú|>š›[…BŒYþ¹ò:p ŽÖÖ6ªªF%ý½££ƒM›¶ÒÓÓƒ$IX­VªªFQRRü¹Ê=RdYfÍšµƒA (..¢µµ®®nJJF`2™Ž8ïÆÆ&¶mÛA__jµ›ÍƸqcÈÏÏÃëõÒÜÜ@YYéQ¹–ÁصkáðÑ lËJ¾²|Ô²gŒF÷øz+LWW'vû©µûD¥½½ƒ×_ƒ––Ün7YYYàì³çö‚ú¶¶vÇ÷g½½½,Yò ÍÍ͸\.ÒÒÒ(..æÂ /ÀjµQžñ¸ÝnöîÝÇĉ>w^©²té+<ðÀCÜtÓ\rÉ¢ãV®àèc6›p8œhÄbäƒwßý€ÖÖ6T*N§ §ÓÅÔ©“‡Y²CÓÔÔ̇~B^^îçV†¢³³‹×_›P(DîSOO/öaS$IbÚ´Étw÷ŸŸÀ‡~‚Ûí&''ëˆ„ÆÆ&Þyç}dYV tw÷PXX@~~µµõ,_¾’# ¹‚  ÅÉÜÇøý´ZáZt"°mÛ6¾ó‹}m6ßûÞwøö·¿™R>ûÛãÜw߸êª+ø¿ÿûÛɲÿî¹ç»Ž›Ífž|ò .¸`Áå «Ö„ SÐëu´´4q>‚SY ‰@OO¯¢,ZtYYY¸\.§ÒùÖÖÖ )**D§ÓÑÑÑA_ŸƒÌÌLÒÒlx<ZZZq:]ƒAÒÒlJZˆÌtÇf¶}>:]$¢UVVV‚,áp˜jéîîÁb±PZZŒ^¯Oµ½½ǃJ%)ƒbˆD˪©Ù@AA>ƒ€––Vš›[Ðj5‘žž8›ÓÖÖN{{;*•š@ 0è}ª©ÙO(";;‹/}é|Ôj]˜LÆ„t]]]444EåÈp‡úúFúúú0›Í”–£Õj©«‹øR———ÐÐЈßïW®Åï÷ÓÜÜ¢Üc‹ÅÌÈ‘åØli˜L&Âáñ‘…šèííÃd2‘–f£¹¹µZ­(3@Pñߎ•c÷î½È²LII1óæÍF’$Z[ÛÈÊÊLHçv÷ßó¢¢BT*Ôâp8 …B¤¥¥QVV¢X¡Ün7mmí¸\.üþV«…ââ .²,SW×@oo/fó@§½½ƒÖÖV</F£‚‚22úC§uw÷ÐÞÞ×ëE­V‘ŸŸOF†p8L}}½½}J݈G–eÚÛ;hllB¥R‘™™AQ‘p<å9‰ÁÑÇjµòØc¦½½ƒ>ø×_ƒŸýì~ ¹âŠËèëëc×®ÝôööRXX@ee¥+ŒL@y<^¥_ÎÊÊD­VS__Omm¡PˆŠŠ‘Ý>©T*þóŸâp8xúégY¾|wÝu7û÷ïA’$‚Á uuõttt`6›¨¬¬TÒ®®.¶oßANN£FU RE¶µ ‡ÃȲ¬ôé‘k¶(“QÁ`êê]8Æ—ÔbÑ××ǶmÛQ©TLš4£1҇ʲ̞={immeìØ1dff8÷`¼^ëÖ­C§Ó1nÜXÌfáëDA('ñ3p­­mddd`6›^´•+Wãv»¹ä’‹ÉÌ̤ºz7»vía֬餥cÓ¦-ìØQ¯Ñhä¼óÎ&;; —ËÍo¼= ìqãÆpÚi3ˆ«Ð›o¾KWW—òûúõ9ÿüsÉÌÌ ««›%K^M8úô©Øl‘¨§§—>ZÀ…žO~~kÖ¬cÛ¶íJúuë62þe¼~ýF6mÚrX÷ËíöÐ××Kff&ÙÙ‰ƒÿíÛw²fÍZ䨂$IÌš5ƒqѦ1kG¼?½ÍfÅnO磖!I’2X_»v=]]Ý\tÑäåxûí÷hoïMSV­ZƒÓédá‹ÈÎîW¦Ö­Û@qqsçÎfÙ²„B!®¾ú ,3|ôÑ2²³³(±:áp8p¹Ü¤¥Ù((Èp/:;;•{¾hÑEX­V>ùdyBšM›6sé¥ Ñh4¬Y³NQ(b˜L&-º“É„,ˇ\²cG5{÷îKuÞ¼Ù”——ÑÞÞÁ«¯¾‘~Ö¬¤¥Ùxë­w•-~ø ôû³çäd A z½Ž‹/¾€[o½™üàÇ<öØß¸ÿþ_* ‚_¢ºz—rÎØ±cyùåÈËëŸðzíµ×yíµ×ذa-~¿ŸÓN;3¡¬[o½™ßýî7ƒÊ"I’"ËYgÉ„ Sèé顦f?MMM\ýMôõõ)é{ìÏ\{í5<üðÿòë_ÿVYŸ5eÊdþõ¯')))QÒú|~ªªÆðóŸÿ”{îù›7oá¶ÛîPÚg£ÑÈÃ?ÈÍ7ßD€ÿýß_ó裿Qò>ûìù¼üòbêëëùÊWî`íÚu¨Õj¾ûÝ{øÑ~0èõ=øàÃüéOÁãñ0kÖLÞyçÍAÓ ¾Xá -ÍFaaM¬^½–Í›·1jÔH&NŸ0{Ÿ ååe”–³mÛÚÚÚùä“O¹üòK”ßÕj5çž;Ÿ¦¦fvì¨fûöäççSR2‚õë7ÒÕՅݞ΄ ãÙ·¯†ÆÆ&–/_Å¢E)yhµæÏŸG à'33SQ(ÒÒlLš4€ôô4ZZZÙ¶m;f³™9sÎÄétñé§+X±b5ÅÅ#èíícóæ­H’ÄĉãÉÎÎbõêµ8Τ×VY9Š;«q¹\,]ú:ÙÙYŒ?V\÷õ9X»v=š¹sçðñÇËX·nå¨Õj–-[Ï磨¨ªªQrrrü)ßã‰Ç“——̾} IDAT{HWè©S'c±X0›Mèt: ó©«kàÀZÆKCCcô™•8wôèJjjöÓÕÕÍ‹/.¡°°€qãÆ0bDQBºÌÌ Æ‹tV«½^Ïüùs°Z­¸ÝnV¯^K_Ÿƒººú%dôèJ òY¿~#½½}ìØQÍôéSÙ³gM ¦L™„V«aÙ² eŽ;šQ£F¢V«Ù½{»wïeÛ¶ ùëõzæÎ=‹@ @vv6ÕÕ»immÃh4*ns+V¬RÒ»\.jkëÐj5Ì;‹ÅD($6sŸÛn»•Çûõõõôöö’––Æõ×_‡Çã! ²xñ‹ìرƒ?ýé¯<ðÀ/”óJKK8ýôÓ°X,¤¥¥qË-7QZZJ}}=O?ý ÿü翹îºk™1cú2ž~úY bUÈÍÍaëÖ­ôõõ‘››Ãµ×^C{{§v/¿¼„‡þ_t:W]uŸ}¶–7qç_ç­·^SòT«Õ\uÕ•@DÁñz½ÜtÓ­ÔÖÖ2aÂxJJJxã7ùÎw¾Çøñã˜6m*Ï>û=ô*•Š .XÀ¨Q£=º €;ï¼›µk×QRR¬Y3XºôU}ô7TUU)ŠU<--­üþ÷D¥’¸çžoE7PÌþ|Kp\  ÂyçÍöí;Ù¾}'n·›-[¶±oß~-ºH1ÿ¥‚Íf¥¬¬”ÜÜž}v1½½}8NÅ< W”¢¢B\.755ûill¤¤dõõ‘ë´iS())¦  çž{‘ŽŽÄCÕjMBd¬˜‚`45j¤r|ûö@DYèííSÒx<\.­­mȲLff&Ó§O`ëÖíƒ*ii6-ú26lâÀZÚÛ;øè£e455sÖYg( ¥ÓÓ3q»ÝÊýèêꦫ«I’p¹\h4Î9g^ÂâïÃQrrr Ô“Q\\”`¢-//£®®ÚÚº¨‚™¥/++pn^^.‹]Ć ›¨¯o¤±±‰ÆÆ&¦M›ÂäÉ•t&Sâ=‡ˆ¢ÐØØ„ßÀd2ât:q:] i"®G¥ôôô²aÃ&åùÄ>WT”3vìh€ ‚ÍfeÿþZ‡b?8­V“pbùVUbôèJ`‚‚ ÑhP©TAöìÙËøñcÉËËMv[ eâ­Ì¤¥¥ñoÜͶmÛijjF’$yäQª«-ð3gÎà±ÇþœpìÁÅæÍ[3f »wïáÓO—S]]=¨‚ …˜6m&õõ øý‘>æÊ+/Çb±(iÊËËùùϪ|¿÷ÞÿÀw¿{÷Ý÷ÿèêꢢb4«W¯fïÞ}èõwP­V“ ß{ï½Omm-|òɇ¨T*îºënž{îžyæY¦M›ÊO< À~ð?|ÿûýë5jjö³rå*$IâƒÞ%++“ŠŠ |ðaþûß§“*ÝÝ]øý~Ìf3%%%\}õ•Ÿ+(‡àø#„µZÍĉã?~,Ô²lÙ \.;wîbêÔÉŠËI8œZ“É„^¯Ççóáñx’úfdØ©©ÙÛ1ÆÌ„6›MÉC£Ñ q¹Ü‡}M±AclpÇãU”³9u(-ÍÆüùsp»Ý¬_¿‘Ý»÷²k×&Ož¤(¬\ÙyPyå›Í: 2TìþʲL8NP¨ŽÅÅÅhµZ[Û¨¯oÄår‘››3¨Ïfff&çw}}>ûl-µµõlܸ™ Æ ZÆŽÕ¬Zµ•J…ÍfU%ysG¬ì˜¹9öLSJ=K—¾ŽÛíŽÖˆßî¡"Åò¬‰Y>V®\CmmµµuŒYμy'÷&\‚ÃGF„½¤ÎΑ¿Z­¦¼¼œÖÖ6.¹ärvîÜ™Îë:×Ûo¿Ãm·}—+q2Äë:üsMÍ~222(.Áe—]ÊwÜ>dú}û"îAÓ§O ##ƒ²²Röí«aß¾eâfày5L:E鿦M›ÆsϽ üs/3göAçFÊ,--QÖ¹ÅÊ{0cÆŒá+_¹…üã_|ç;ßãþûÉÃ?È5×\5äõ ¾8ýQŽà˜ktT*ååeŠ¿y¬Ñйõôô¦”_oo_Ü<ù ¬µµˆøÒÊâáØ`¾­­]8š-6Ø>¸á´Z#³$yy¹ÜvÛÍ Ÿìì,å÷––VCÇÂ÷ûJ:“ɤX 2pÍÊØlVn¹å†„òÊËË”kèêêVÎ1âéÆfÓ­6r?œN÷€ã•••Ȳ̧ŸFfåG®Lš‡×ëU¢5ÙlVÅj‡ñûýJ1å.Æž=‘†þì³çrùå—0fLòÎ$ÆÁhbëI“*MM͸Ýnòòr¹öÚ+Y¸ð¢i’Ëw¨MµJJйúêË™;÷,Ôj5ûöÕ¤\ß'/b‘²àÈðz½<øàÃLœ8ƒÁÀSOý—;wrÁç³mÛ&þóŸ&œë zzzŽÿêWâr¹xôÑGس§šk®¹úå«Õjº»ÛÙ·o}ô>ßüæ×1†v1bÛ·ïÀétR[[D‚èt‘óýþ@ÂÄWqqä¼ø5Û·GþÏÏŒ'rss¤‰/³®®‡Ã‘P~l,ë+býÀoûk–/ÿ„+¯¼œžž¾õ­{Ü7ÁaA8ðx<<ÿüKØíéX,f|>¿²˜3öBäÓÕÕÅÊ•«ihh¤½½=i^ÕÕ»éî¹ˆ Î-‹2› …xë­wñùütvFfÙc.*cÆŒfùò•|öÙ:ªªF)ÒdddØ‘$ ‡ÃÉk¯½‰Ùl¦ªj••£Øºu;--­,^ü2999øý>l¶4fÍšNQQQT6'Ï=·˜ÒÒâ!çÕջشi3 E~N‡ÝžNZZë×o¤¯ÏÁâÅKÈÍÍ! ¡V«™?ÙÙYdeeÑÑÑÁâÅ¿~ˆ¬m(.."//—––VÞzë] uu:™™™ôööñé§+8p NËé§Ï`üø1ìÜYÇãA¯×¢tÆMÔÔÀnOG¯×ÑÖYm³Y1ŠëRggo¼ñ6ƒ Æa4F¢mݺîîž!'#²ÎcÍÍ-¼øâ’„(U€}¨³³“µk×§li=ºŠ={öQ_ßÈK/-°¸¼§§—+V%DœÒëõÂd- —ËÍC=Bcc}ô1èt:þð‡ßýÖÎîînjjöh#ËÊJÈ‘>ú¬V+ ^¬Xïëëë©©©0!v´X¸ðË|úér~ÿû?âõzùä“eƒA**F2fÌhT*f³—ËÅ·¾usçΡ  €3Ï<ƒŒŒ ZZZ¸á†›9²œçŸ_Íób.¼ðKìÞ½‡‡z˜ššýŒ7†@ È 7\ÇèÑ£©®®æª«®eÞ¼¹<þø|ùË‘s322x÷Ýwyýõ7˜0a<Ï=÷“&MâÒK/á•W^C¥R%(‚/6BA8p»Ý ¾ò:–qãÆ*û Lž<ÞÞš3¡N§à  •Ù†œœì¤.íííAt:-3gÎP›••x½^6mÚBcc’$1jT³fÍR~³Ù̬YÓY»vÿŸ½û lë8½ÿGX°÷"JbE‘ê½7Y²{wʼní$›MöͽÙìÝ{7»ÙÜl’ëd×Iìôb'qKlYÍ’,Y½S¢(J좗ó~yHˆE’-6q~_$àÌ9œ€óœ™y¦¹¹h!))‘””dÖ¬Yɇ°Z;éì Þ™˜4)ø±ÔhÔòöÆÆ&ÊË+äçZ¹´gM€¾?èÑÑQÌŸ?WîÅX·n{÷ ­­M>O}«V-cß¾ÔÕÕ‡œ§ôôT.œÏž={ikkÉq³yÐKJf`±Xhoï ¢â²|÷‚“ÝÒÒR¨ªª!'gªœZïZ=?´Mòsññq,\œ4g2™9³˜S§Jå2iÌšUBWWMMÍòyÒhÔ¦Htt«V­àСÃtvÚäk¦×ëQ*•¤¤$“——Cyy¥¥gäë†ËŠK9|ø‹Uî0 (Á æÍÍ-ò{ Nx/D«9èA¸qN§“üàGòã9sfóïüyQ±ü¿þõo8|øë×oì·ÿŠË™3g6GŽå{ßû>¼Ñöå/?Í×¾ö žþg<ÿüÏúíw«lÞüGŽá7ÞâûßÿìøÕ¯~!ÿ-úæ7ÿ‘û·ïòÆoñÆo±iÓFV¬X΋/þœ§Ÿ~–÷ßf’S(|ík_åŽ;ÖÉû;wž;vòóŸ¿@rr2?þ(¿øÅÏyä‘Ç9xð`ãÆ <óÌ—€`àòãÿµµµlÛ¶‹Å"ׯÇ7¿ù7”U;wî.œ7Úõn€ËåÂét¡Õj p"¿ßÃáD«Õ„d8:xð0çÏ—1cÆt¦MËG©T…Üõïê²óÚko¢R©xôчðx<ƒŽ3—$ ›­‹ðð°A°ƒíg·ÛÑh4ý²/¹Ýn§œÑçZ^¯»ÝŽN§E¯×ºø’$Iò:ƒ«çx]]]èõºß§Ï磫+øz×n÷x¼x<´Zígj ºÝn|>ƒI’hnnÁív³oß~üþ÷ß¿!d²Úµ]]vaaaÖ%ÈŸ‡¾çÂívãñxÑétŸú=¸\.Ünz½®ßõôù|¸\nT*%:î¦æl8N</ƒ>¤Î=ŸmµZuSó…ѵÿaV®\9lÇ?øöÓ´7œ–ÏßøK¢“ІíõúÚ·o©©I!ë|c“ËåâÌ™³466âtºHLL`Ò¤LyøL_N§“½{?¡¥¥™ÈÈHâããÉÍÍ!**x}>¢¦¦†èèh–,YLXXµµµ=z ‡ÃALL )))dggˇ{ØívNž<…B¡`áÂÖ·µµ²²2L&……Óúm?sæ,¥¥¥ÄÅÅ3þÜ~7a.\¸À™3çP©TÌ;›ÔÔ`Rˆööv<„Íf£¤¤„ìì©ýŽ}üø .^¼ˆßï§  €’’b¹Þ¤¹¹™‚‚fÌýžõ¬yCff&¢®®ŽÈÈH,XЯgX»vìØ%„‰¢o€0sfq¿í}„'žxdj8±]»>À¬Y%ŽbáÖ‚ Âø²cÇ.1Äh¢HHˆ'ô[5¸‡F£&77û¦z„[Ç`00cÆtü~?II‰7”&U ß$e‘ÅHá³Â‘•5©ßj¼}ét:yüº0ò""ÂìÙAAi"Í© ‚ ‚ 2 ‚ ‚ ‚ ‚ ‚ ‚ 2 ‚ ã×µ)Xá[A¸9"@AAA&AAAd7æT’$œ—s8ê#:}†°ˆþ]ç×ð¸8í6$Ñ¥>,Tj á‘&”ʡ׆ø}ØmVü~ßÕlbQ(•Â"ÑêôC–“$ §Ý†Ç-~›†‹VoÀ9èJæÂõ5Ú*¹ÐtÀ3ÚU¹-Eêc˜–¸ˆp­iÈrv•³Ÿ`sµPÍ&•RK^Â#³†,ü”·§ÆRÖoE᳓€TóT²cg£RÞ\“ÿ¦„ƚ˸]Ž›ÝM¸Ia&â“3ÝÞÙÑJ{KýÖhbêh©'5+oÐ ÁçóPw¥I ŒpÍ&–ˆML#Â8øŠµMuWp9ºF°VSX¸‘ø”ÌѮƸt¤z o•þx´«qÛûÛ¹øÖŠ?©ø÷Âæîà»ÁëwpÍ&–­e¿aÓôdNúƒ–ùãÑÿMYó‘¬ÕÄ49¶˜/ÍûÑMísSCŒ?>_p’®Z­F­îÍ"$Z[ÛimëÀ ×‘”^§ 9n  ©¹‹•ˆð0bc¢ 3\·Þ=¯«R«Ð¨Õx½>ü~?*• ¦ÿé H·§_{ÎAKk;míLÆãû½AAAK†%@ðz}|çÿþ7~èÄA½NÇÌâi¬Z¾ƒ!˜ÓüåWÞæÒ嫤§%óå/>@—ÝÁ‹¿þmí"#ÂùÊÓb6 ì;pŒöìÇãñÊÇU*Ï(àsƒ©´.\¼Ì»ìÀbé )3on1ëï\5dÝÿ¶å#;Å«—²tñ\Þx{ ¥gËØ¸~ sgÍ)k³uñç×ßãjU-kV.fÅÒùò¶§ÎòÁÖÝØ½9Ùããbxäï6s3§SA̵7]ĺ0Âx!¥Ñ9Q7d:.{»—ŽzÆxÆxíÈÕO˜Ð†§A¨T*´Z .——ÛÍþCÇ©©kàéÍ¡RõÏ-ïñxùßޤ­Ý‚^§cócŸÃl2"I¿ùM.]¾| …‚ð0§‹@ €NüÒ´wXùËëïáõzQ(ÄÆDár{°ÙºˆŒˆ¸nÕëHINè~Ü|œ”(—ñûý9vš»÷ãpô_”©«ËÎ{|„Ëí&&& ½NG}CÍ-m¼ù·r 4–I’ÄG{àr¹‘€UËÈA]@’ØñÑ>¼^aazV,]0à1ÊÊ/Sq¹ªßóÙS'‘=e’üøju-G—ÒÜÒFJR‹Ì&6¦7µ$I8|‚ŠËW±;œÄÇŰxÁlâcoí›F­m;q†‹·ÛƒV«!2"œÔ”$¦OË‘¿ @€-Ûö›3™)YÁµ0¼>í>€ÏçC§Ó²bé|yŸ¦æVŽŸ:KSS+¶.̦H&e¦Q\T@dD8 f÷:Am}#N§ “)’ôÔ$fÑ]f0»÷Ânw°hþ,Ìf#;v}‚ÛíaùÒù„_Ó+'IŸ<†Õjcé¢9DFü;tô­­íÌ›3ƒØ˜èO}^‡›$IlÝñ1~€¨( çÍìW¦ôlÕ5õ €u«–Ê=‰K'‡Žž¢¡± ‡ÃE|| S²2˜1=¿ßBc'OŸ£®¾ NËê‹ú½F]}'OŸ`íªÅh4½½­ç.”sºômtZ-11QLŸ–ËÔÉ™·ðLãÉ‘×ð¹$åF0ivï‚aÍÎnk `u,º0'Þmºîñæ}>™co5âs=¯`M,†H5¥[[èlrãuP(Á`Ö•¢'{aÚ°¡žw$èlvÓQçF­S`NÖ¥¹þ~ÀG/VQq ƒìÅÑ,:}À2¿ÄŸ¾üî¯x6c¼) áu ~-4~o€€oè`Y¥Q Ò(A»Å‹ËæC €Á¨Æ`R£TÝþ#ËíT¶0ûsIhôýGÞ7^´SyÄ2àþ %Ì8¥÷ *uP¾‹öš`Ö s²ŽôF&Í2¡PŽŸs:lCŒz¬]¹˜%‹æàv{ø`ÛnŽ;MuM=uõM¤§%‡” üéÕw¨­kD­VñèCHJŒàèñR98(.Êgý«0ô$‰«Uµ»"åWðzƒ½ Ï|áód¤/\sKááaÖÑÖe§üÒ›ZºÿmÅbµÑÞüPÔ54…^¯ãµ·> ôlÙ ï9""œûî]‹ß ¸(öLì?tœÚÚyÈÒX¦P(0›Œ¼¹ûC¼^/÷­_ ÀÁCÇÙ½÷Þ÷ Ç(¿t…‡Oô{Þ`ÐËBéÙ2^}ó}9mkMmgÎ]äK›’€—_y›óeòþÕ5õœ.½Àãl’Ðc]]}#{öpÛÖó¥ÍmÆðÉÁc##˜’•A@’xåõ÷8_VR©äñ‡ï“??ÛvîåãOŽ„¤½mhlæÂÅ˨”JΟE{‡•Ÿþâ8½)Îêê9áQfE…yƒÖÛçó³c×'H’Ī q:]|´çjµŠ;×. )k±vòæ;[©èþžÎž9½_€ÐÕeç÷wpö|9 ‚Ær€ P(hjn£¬ü2jµŠ™ÅÓú gܺãcÚ;¬d¦§ÈÁÁ¡£§Ø²mwHOgM]ÇOžåðÑÓ|þÁõòoß*¯Öš’D^Îä×x÷ƒÁ „àyíùn¼·e'…~Ç*¯ÖP]SÏ7¾ºùa¼9¿«w—I"$@°wx9Ó dÌ >ßóx(sLâÜÎV<Ž¡Ó gÎ2!àø_Ü~äµzÖ~})Ó"oô­Œi:8øçzVoÈóÑ©zîüÖäë ¶æàz"Í7—î³ñ’ƒ÷¾{iÐíü —£o6på¨uÈã”lHdöý‰üåÏck ]ÛD¥Q0y^³6%{ûöZ´U;åï@ɽ m5ÎA¿' Eo€`kõ°ëÅ*/ÚCÊ4Uع´¿ƒGV€>r؛ݷ̈ÕT§ÓR<=Ÿ#ÇNÁûµÂko}@yÅ”J%}n=“û4þ9 @L´™û7Ü!7” Y™ir¹¾7æ®VÕ’ž–ŒB¡rXOss+o¼½%ä¹÷·î yüö{ÛÉLOE¯×QXCSs+KÍáÃícë ý0LŸ–òX¯6*tzݘzÌ*)äÌù‹\,¯äèñRJfL#Êldû®OÈË™LñôüA÷ïiæçM é1HKI‚á[wÈÍžÌì™…|¸}/­mílûh=´‘ /ËÁÁ†{Ö’œÀ»ïï ¶®‘¶îækÏ>ÞïnìX·|É<ÂèkhâTéy¬6^{ëýA{–ÞùÛvùlºw9SƒK×=Q*j1ÑfJfL#Ì §½ÃÊù‹ògp÷Çq:]( V¯XH|\,íí.V\!?oêumlj!È=a=ÿÄ„8”Êà©Ïçg˶Ý=QŠ×;ðâ~’$±ý£}ì?t<¤Ñ<åSV~ŸÏÏ…‹—C>óµu´wÿÏ(*‚Aî»ïï@’$""™3³cd8eå•”•_æju-}wO<²I>NÏ1vï= TTVÉÁAOÙ„øXÚ;¬rp™‘Êœ™Ó‘$¨ª©#)!nxN†p[Ñ…©(\ÛûY¹t°W§c¼–ŒâÞÀB©RP°*VîA¨;o£½Æ…Z«$oyïßÖðh }—Kšº0Šø¬0¬M.îmÃë °ý¿¯òØ ÓPiÆ×ïöµ½RÏéšåÇz£É/á¶ûqÛý„™¯ß‹°ìét®³2iŽéºe‡“ßìiPªh *æàÑ“Ì*.dîìòp‹kÅÄD±nõjj8wáI‰qæqér—+«HKM¢ oª<cZ~6…9lÛ¹ïºï½¡±Y¾+<{æôë–Kî[¿–Ÿüìw¸\nþúîVâb£q»=èõ:6v÷( Æá ¿Ê™:©ßü € +°vÚØ¸~ &c$~€¿¼þÊ*èê²sùJpˆRRbóf±vå~ûÒë446cí´a6oå[v3‹ åÏ¿F£æÈ±ÓÔÖ5××üÍܱë9¨¾sí2fO‚=:lÝ ç¶üý3… ;¹kÝrùÿÍ­Áˆcc¢B†ƒ-Y4gÐ:îØõ ÇOž•{ã:;müçs¿ÀÓý¸©¹_þîžÞüjµŠŠÊ*¼^)ɉÔÕ÷¿{¨P(¨ª®ÃãñZf¬ÊÏ›*ÿ¦œ:}.$@8uæ<JYÔ½÷ÁN$IB¯ÓñÕ/=ŠÙü|ΛSÌ{ìäÀᔕ_¦¼âŠ8wÚzW®®©çju™Ý½Ÿûö ©­»lKkïjê æ–ÈaÏgD®GoT³àÑÞá »puúˆN5„<0ç$ùÿû~_K{ AÙ¯œ¥¡÷nxú #Sæë¢Rtìû}-§Ÿö'qY÷èM—ì”n Éy,ûR:‘qÁ»ì¶Ínùf¥h¹â¤¹Ò½ÍCÀ/aJÒ“¿<†®VÚ0%MnL ½=“ÖF7Õ§:éjó`NÖY—;ÿgÉy¡=µ*µ’Ïføb°á嘕=¿ª`ã¿ecN ¾ÖµAZÎÒh–lNÃï•8ýA3Gßl ³ÉMÍiS†¶Ù&ª™÷& 7öo:_:ÐACYð·yæ}‰Ìº/±_™ñfØ„?9ÌþCÇå† @Rb<)É¡'¯g•T…BFZ­®.»|gÒlºkÒl6òä£÷óÖ»[iniÃbédçîýìÝ„û7ÞÉôî†}È>&#ËÏã½vŸ;•e‹çÑÐ7â@› IDATìR*š–Ç¢³äò7sǺ®¾‘ß½ôn·‡”äV-[xÃûŽ&c$ëï\ÉëÝBsKÍ-ÁÆæ=w¬ "1.{0@¸|¹ ƒ^OFZ ¦>ׯµµÉ2ƒÏ§$ç~H’D{‡Ew‹ÙbµÉÙ®âã{ïXµ¶uŒ»¡¯Žž;Ç’Dp ØÞÏÖ‰Óçä!o+—-`ÉÂÞ}Ss.Wðñ’…sB‚ƒk%ÄÇRU]GKk;¿ù ÍŸÍ”ÉC~Ž]n]v;~ðv ß¸æ±»Ý!—_çJôúಟýò¥¹vÕÑQf¾ÿÜ‹ƒ¾öX£Q«)*Ìãð±S\º\…Ýá$<Ì€$I”ž 5ÌË™ŒÁ ÇçóËó–æÎ™!=V¯XÄÁ#'‘$‰ªê:²§LÂÙ= ‚=3M-|rà(™é)4·´Q^q½^‡N§ÅjµÉ‰âbP(H’Ä»ï“2”Ì(¸îœ’ÛÏø¾=œ¬n*vÈû6ÞGš­µ·ç00†Wâ¾çwµ!I Ò(YùÕ ÂL½¿¿‘qZ9X8ÿQŸü±6dÿðh ù+b¸°«Ê£2ФMþV\=a壟Uáó =ߣ‡R­ Î#¸†Z«„îjht½Û5zåuç¨4 ²GsôÍ,õbåéÖ&7®®ÞžrƒQƒ.BEÓ¥`ÏJ£ øž„ѪÞ-5ìBß,>jµŠÂ‚Ö­^Ú/ehtT°‹­½ÃÊŸ_GÚ@nv°›=<"¥RI  ÓÖ8ϵ2ÒSøÆW7SQYÅ'ŽR^qÇËo}@fzJ¿†mk[‡Žœ”‡qT×Ôóþ‡»¸|%q_¾R…ÅÚ9ä¤ËTU×ñ»—ƒÁARb<›{ $•ëxQ2c'N£¢2x7?+3í†îRöÜé,=w‘ÒsQ*•,˜[Â]ë–£P(°ta†Þ;$}ç‰tÚº˜œ•ξGq:]üâ·!gjkovªñØ4(»x=ójãÑh4x}½?<=ÁAfzJ¿‰«Í-­òÿ†ž¬½rÙÊ+®`±trñÒ.^ºBL´™U+ :Dìž;VpÏ+øù¯^¦¦¶Ç?9ÙYüð'¿¤½ÃÊÓ›’ç÷Léž[[7xÏ@Oy«Õ6h™±jVI!‡"Pzæóç–Py¥Z¾ó_2#ø}him—û ó1ô##°vÚhl ^î>ÖŠe xõõ÷8wá+ûG’$æÌ*âjU-V«MNŒ`6Y¹l;wïÇîp²eû¶îÜKQa.w­]>…ã»ñy+Õ”vRSÚyý‚ä©ÂÇî§­ÖEù¾`—Z«$&ýú)ÇÇ²Žº`ƒ9.Ë E©RP²!ØpÔèn û<öý®Ÿ'€Þ¨&gq4‡Ÿ »Û=î‰w¹°«·=•”AÁªÏ–ÀÃë pzKïð)c‚nˆÒË;ÿ:÷cÞCÉÝ/&Œ ºq?|®Ç°³K¦S<£€ˆð0Ì&ã  äˆˆp{h#¿øí_hmëàO¯¾ÃÜÏ”¬ ” 1ÑfZZÛ9áw¯[ÑoÍk) ¦NÎdêäLöî?–m{ðú|Ô74õ ¬ÖNyÀ¥ËWå†Ó¦ÂÀ“.cë²óÒ_þ*_|âÁZ‡a,²vÚ¨í3$¤¡©[—}Ð![=Ößµ ‡ÃI‡ÅÊ…‹—ƒwFÃdŠdñ‚Ù® !õIQ¨P(È̒͞EsødÿQªkêCÆbÄÆŽÝ ®ƒ¹v~‹^§ãsïèW®'(¾Z]Çž}‡X¶xž¼íf29šŒ‘|ã+›ùäà1Ž;µÓF[»…×Þ|›­+¤g¢‡×çÃëñÊAJT” «ÕìÕQ(0›¸ÝtºÛwòZ_i©I$%ÆÑÐØÂÉÒóÌŸ[‰î¬B&c$¹òœÞ 3Ø%ê ”ÝÙ,úNOŒ%/w*ç.”³gïaN–žC©P°`N MÝ×Âéê-¿jùB&OJçãOŽpñR%@€“§Ïs¥ª–¯åÉ~ª…‰E©™(ës°6\/ÂÙk&v*” –?“¼»=Žuµ'ô†CÛ3‡Ÿ·ÿ5˜€¡dCbȰ¥ZÁÌC;i,·ã°{ZV~9ƒÔi‘üÒBýù®Ç*µ’‚¡³¹êò! µ¥6lmù,ì'ÆÈ6?zÿ¸ $Iâõ¿nÁår£Óiñz¼8.Þ|ûCž|ôþ!÷–Ÿ-ÿõŠEüæ¯Qyµ†3ç.²xÁly¸Qß;¨}ÿo4¯ßk–±`N Í-ØíN.^ª¤ôlzNš4ž$%ÆŽÑAjr"Å3 lÈ-[<ÚºÊ+®°uÇ^T*‹Ì &Æ,—khh–'~F§Ó²rÙ–/™Ç™syëÝ­x<^>ÞwxÀa˶=ì“êÇ?ý­üI’øþÿ{‘˜h3ÿãë_ºé÷?^Í›SÌÛïm§º¦žšº`¶-€9³Šä`76&ZöS_ßÔ¯‡Æfë’?ã‰Ý= }ƒAÏ‚¹Åœ»PÎác§(È›ŠÙlÄ`þ†8œ¡Ýý“2Ó˜”™F[[[¶ïáÜ…KX,œ>saÀ¹?ÂÄ1e~ ë;ÇÀ>dö›[Í” #Ø@r8œ´¶uÅÕêZ¹|ltïݳوÙläreeåÁÅ gÃ;~?¸¡ß$ýèuZ}h#¿}éu®VÕòÁÖݘMF rHŒC§Óâv{øø“ÃäæLî×3&I …‚€$É X¥RIQa—¯TsäØiy‘žŒD=¢£L˜MF,ÖN ÄD›éììÂÚi#22³)r\Ïýø4Чðá¶q¹ÝüùÕwñx¼(•ÊÄšÔäDjê8|ì³gN—3¨I’Ä–m{ä߬̌T ´G@«Ñ09+ƒ¸ØhyrÏoEÏZ/®>BßkÅú;WqîB°h»á˜‚0œfÝŸ(OR¾D%ëi©tÐRéÀÖâ ™sðYhû¤Ø´6º‰›tý aÒ3)ù7>ôy(iEFŠîŒ#̤!Ú³ŸÃÇNãp8å?´Z­†Ø˜($IB­Vm¦­Ý‚×ëËO>æ]ë–ÚåÞ3”¢gòtC÷ãÄO‘.0 Iòøûñ¬ÓÖÅÖ0uJ&ÅEø|~Ξ/§¥µ-Ûv“Ÿ;cdÇOžåÍw>ÄÁ7ÿá‹ÔÖ5pöÂ%’ãÑh5T^©–׎ÈL6Œ²§dÊÐ7ßþEùìÚs€¢Â-—É“‚ #õMûÚ37káüY¼ó·íÄÇÅÈk}¨»·¹»Ë;.žûéoÉž2‰Ø3:Ž‹å•ò±&Ίíãë®0þ¬ŽåÒþv~‰m?¹Â²/¥›ñÙG ô›qfk MEc¹áX1R Æ_¯ü­òñokBæ( V~åÆÖZÊYÍ™­-´×º8»£•¦ IyhtJ,õ.Z«œ<ø£¼~ ¿eà hÔjþóßÿç •ýÂã øü«—rÇê¥!Ï©Õ*Ö®ZÂÚUK°;œtvvn ""\ÎÈR\T@qQA÷v*•jȹ=Ö¬\Ìš•‹åÇ÷ÞµŠ{ïºþ@¾oóÙ~Ï)Š~ÿcÙ[ƒ =) î¹c%@÷YËùãŸßÂãñòáöypÓ]ÔÔîÖNÖÎN**«Øßg^GØî”²üòm¸g5~í=®V×ʽÑQ¦I¹'NåRÅU4 KÍaé¢9ãf=‰ÏÊ`Ðóù×óâ¯ÿŒÓéâõ·>à©'dáü™X¬q*ôºCð†Hß!~Ëm4X“â'‡QxG<¥[ši«vòÖ¿\D­ûì ù¨T=SFqi—öwpå˜õ†We>»«ÇC–S(¸áA¡T°êk™ìz¡ŠÖ«NZ®8h¹â)ÓXn')gü$“=7"<Ì@øãú¯·]šÛí!.6šUËm¹™—3™»×­Àåv£P(ðz½,[2€$‘GlL4Ó§å¢R©hllÆére"59‘Y%…!w¬s³'ó•/=Âé3hk·’”ÀÌ’B"úd3š3³ˆâéùÄDG‘+/:7ž$&ıjy0Åm߬M×R)•r¹ôôÞ…ÓR’ø»ÏÝCss0óM{»%8¤ä®UÏ(àre-­íx<^ŒÆH’âäµ:žzâï8á MÍ8N DG™™U\’vöZù9“™”‘ʤîìC…9äåL!%eð‰vFc„\ÿÁ²èèôZ¹Lt”yÀ2cU”ÙÄÆõk±vÚ3HJ¸‡qÆô|r³'sễç=>>–I™irÏA´”$V-_ð*•Ê~ ²§LB¥RÉË‹òÑëu\­ªÅbí$0›"Éš”F~îÔq·€ pë̸;Ÿ'@Â5ãØ#c5ÌìÎÏn`XLÞŠX¯œ'0ÅF &uHúÌúH•üÑi·ïßàùŸO&qj8GßhÀÒà’‘0Æk<¿7bÉÒ3k8»½58©¼18DZ¡s†A|V˜üy½VÏOhß2êVZgã¿eSyØB[‹ÎF7j’ˆ ©Ó"IÊ?Á€bçÎÒÂ…ó®_p»4TW s•„©“òP’ß¾öJ>¯gÀm­›„)zà†`GKÖŽ—`n-­Î@rÆÀ+?{ÜNê«FnòåD—’™ƒF{cúþý‡Y¹rå°Õåл_¥­®··rÞ½?#&eÖ{Ü:ûöí#55‰èègÿÿöðz-Ÿ†ÏlÈkô*ÂLê[Ò› IÐÙäÆï•0˜Ô˜;ÄE¤ñÍe¿p[sW Ïíyr„k4qýÃ’_’lœ|ý‚ÀŽ»Æo‚ ‚ Âh޵ B² ÂHßɈAAA¸¥D€ ‚ Œ[b®… ­'Aá¶!ÝÌ2ã‚ €D€ ‚ ‚ ‚ì¦F‹b€E•„[O©R™ç_;È‚o­§Ñž–T«ûôËÚ 7g¨ë Öh\ðM¸õ”J*µÈÍþi$DfŽv&Œ¡Îµ¸#g¨sm6ÄbÐÜšU …¡iU¢ 7µÏMýEUªÔDÅ&Š?ÃL­ÑŸkI5 ÁFë=Ïb6LÜÅ7G‚IÇÝÏÜt0vÓiNæXŒæXü~ßÍî*Ü •êú—E«3”>…@À/ÆÜ¥R‰B1tCH¡P—”ŽC–>…BRyýU³#MÑDš¢ÅoÓ0º‘ß&ap ‘“øòÂçqzmøÄçtX´á¨•C/R¦P(ypÆ·Ø4ý8=öªÙÄ¢V©1h¿¹ÖcfêZJR×Ð嶈EЇƒ"tfÜ|2‡Oýk/þPŒ 7Òp†ŸB©D%¦ôŒ â·Ië šH]£N­Ô©ÿt+ ·Ž‘ºP9¢E#‚ ‚ ‚L‚ ‚ ‚ ÈD€ ‚ ‚ ‚L‚ ‚ ‚ ÈD€ ‚ Œ_Š›ÏÎ!‚ M¤ûAnc8í³ÍÝÎÎò—¹Ø|oÀ3ÚÕ¹-õ1”¤¬fqÖ¦!Ëí«|‹u;ètµPÍ&RKNüVe?J¤.zÐrWÚKùøòÔZ."‰<§·œ)¦©,ɺŸÉ±Å7µ¯A„a$þè÷ØYþ2‡ªþ6ÚÕ¸­u¹;¨·VbšJVÌôËT¶•òþùG¸fOÏg}cá? ¸Ý/ùxõäbq6dµ&œ²æÃÔY/ñO+ÿŒZyãù••€XÜIA¸õÀuœH.·í*L•Cœë¡¶ ·ÖPŸùv{ƒFˆÍÝNKWõMí£Ôju¸\îaª’ ‚0Qµ··%@FÞPýV¢OK˜ˆnös¯,**âÄ M ‚ ·N àüùròóó‡õuˆIÊ‚ ·šÒl6“’’ÊÁƒGðxĤ)Aá³±Ûì߈‚‚|t:ÝhWGA¸Ij€œœâââ8~ü>Ÿ­VƒB¤ŽAnB áõzÐëõ,X°ððð¯ƒÈ„"‚ðÙÉYŒ¢££Y¾|^¯i §ŠAÆ¥R‰Z-’ã ‚ Œwþ’k47žIA„±Íï•è¨sÑÕæÁ`RcNÔ£‹P…”qZ}8;}ø}ÂÌÂÌšסó{ø½(@kPõ/ Çé@cP‰µìƸ‹{Û¹|ØB¸YÍÒ/¦vuÆ<) á÷I¨µ·w†6q«GA¿Dëóº*X8ðrö¯üœR¥`úqÌý»d¶ü°’šÒÎýT%YsLß›@T²^~þЫ œÝÖ‚B÷?—èT}È~e·ññojØð¯SI˜:òCÍ&²€OÂÚäÆañ’RyÝòµglÔœî$µðúe'º7þ© k“›§¿*v´«3¬nïðGA&°Ž:½P…½Ã‹Bæd=£š€_"<ªw´€ß\I¡mX°WÀï piû^]m½ILºZ‚ÿ—$8ñvcÈëüÇû<×Ù,’ŸŒ´Ã¯Õóú·ÊØ÷ûÚÑ®Êm§½Îì=›D‚ ‚pûóçBÔœî$à ž“MßË!&Ý@{‹ðèþÉs"Xÿ/Spvú8³µ…“ï5á´ú¨8haÆÝñØZ{ý•G­t6¹1&³UUè «­·§¢o`!ôò¹HhôýïÓzœ~Ô%Jõнc®.úp5Ÿ6Ó¯×@­½½zàÚªœœÙÖB[µI‚¸L MÅm÷qè•zfߟ„)QG˧?.Ô¶øÉ4tá*>‰Ò[¸zÂŠËæC¡&)7\îiëQöq;õe]ÌܘHTJ°­îœó»Ú°Ô¹ˆˆÕ2i–‰Üe1ò~µ.Îïn££Î…ÓâEcP7)Œœ%Ñ\Ü×Ns…AErn83îI@©½ë#AA¸]õ‚ÕPÖELš¦b'0ÕÝÏ©¿5!I`kî]PÕiõ Ò(ð{%Îlmaáã©”~زÍeóÝêw4æx]ÞûKØ;¼L™Å‚GSØ÷‡Z®µ7)Œuÿ_õº8ùn­W¸íÁ91éîÿ¿9Tìàè t6{Pª$L gñ“©rã‚sI޾ÙÀÅÛquùÐTdÍ53ÿáä~óA:›=¼ô•³¤YþLz¿×Qk•·ÍXúÊ£>úy•ØÛ½,}J‰­%ÀåC ׯaJÔáèðÉÏÍ8]¸Šƒ¯Ôsv[‹¼¿7JMÿFzË-W䯈%*ÎîheÿKµòŠdíµ.ªOuR¡‹ÏfÐPn9>@Ó%;g·‡>WwÖ†µÑ#_³Ñ AA¸Me9úF>O€ý/ÕQúa yËcÈ]ƒÁØ¿ I§kƒ›²½ír‡LR^D÷vpv7úó–ÅpvG+e{Û™ý¹$Z®:i«vbŒ×•¢§êd'.›ÄÞëhÑè•Ì}0™~x™3Û[˜4ÛDÀ'q~g+j’¤ P@S¹ºs6 8D’ ̼e{zçm(U ~‰†².Þþ×røA.1Zv½XEå‹\ÎãôS¶§ŽZ÷þëÔ)9R@’ƒ9·#øoÕÉN>úy•\&à—puÿ Îm÷³ïwµ|Q©zŠîŒ':M»ËS=,U'¬¤Mdñæ4> ¯+Я\þŠÒŠŒ@0ضwx9üJ=H˜Nñ= \>l¡|_;—öw0uA”\‚×îÞÿ3•Ú36޾Ù@Ö\3ù+b8ùnu绨8ØÁÂÇSN0D€0ÆyÝt¶VŒv5AÆ$¯+tbmg[J•ö–[K¸y|gu1%ê¸ãd±÷·5XÝØZ<y½Sï7³ü™t2KL!å/ÚùýÏ„<—˜NúŒ`ãÆm÷!‚QCj¡‘–«Nš.Ù)û¸†î!ÓÖÄÑZå¸-Ÿ7"µ0’µqœÙÚÂÇ¿©!àž£¤`J ],0޾ÙÈò§Óiºd—ƒƒwÇS|o÷¶sàå:š*ìT¶0yžY~c‚ŽMß͇ªœx'8?$*YϪ¿ÏÄœ¤cë+©)µ ï fõºäުŧÊíÍ Ö`kñÐzÕÉ•£Vr—FcJìß@I79³÷»Sq°Ÿ'H,y2¨T=iEF®·âqø©>Õ (”?9ŒøÉa”nmÆÝå'²OS@*µBžˆ›i`ý¿LA¡ 60=öÞm˜’i«ciºd§tK3«µNIÎ’hl 6DÝöÛ¿¡ÇÜ“©;g£½ÆÀ¤Y&ò–Çô/¨èù¥P PÓQç’©ü•±DÄhˆˆÑZIÅÁš.ÙhªiQ( x}ZƒŠikâ8þv#î.?M—ì!Bß çHÐV¬[æ,“<ÌL>þ›‚=Cà ˆŸ2tÖ¬¡¦)-x8…Ÿ«ÄiõqðÏu}³¤ |ûèl εQi”ò0…b3 Ô_袳eð¹8¦Í]ù;×7 ô¸Fïûs{ <A„[LñigŽA ¤Ï0r×?MfÖ}‰@pì|Oc¶GRNOý¾ˆÜ¥Ñ´^urõDo/ÛÑÛ`ÑèUdÍ1c0ª±wx‘SF¡ S¡ën˜z'@Pi˜û¤ƒ5% =Ï£‡«³·—%2¶·÷+¢ûÿ=wÆÝå´á*¹á¯P@Dt°œó:ó=|ž~oð.wHPx3'ÜK´×:ûm×z›»]­ƒ7Öã²Âxè¹|>žŠ1A‡Ï`ßïjèl  ªî‰ãžk†EƯßÀÚ])8B¯ëµ=ãº#ÅIÝ,AA¸M9­>.îm——€ÜÁFí@<š*g9úäµòÂggoÃH­ fÚé{wµ ;7¼Zl^L¤¡lO•‡-è"T(” N¿ßDíÙëÝ1÷™„ÜXn—ÿßT²ÕtD%ï,»»üXêƒ OÓ/7B{¶kôÁàÁiõÉf!xM ¦``Ð3âv‘05\Fuè•zªOubïðÏ¿ÖÕ3»lo;mUΪšÓ¸í> VŲòËÁ‰Å’Ô›®·'5på mÕN¬nœ>sÂå¬SÿRGˇ^­—ƒ»ä‚O7äi4Ý^!ä Õ›1ÅçŽv5Aèfi<‹×Ó%?6'LC£ Æ»®Ž*ü^'áæ4ÔÚO·0—ÓÖDWÇ•[\³ÑUq¨ƒ/× Ö*Q(‘']FÆiÀäè IDAT‰É0 ¸ŸF¯dÁÃ)ìøéU/'ßkbîƒÉxû yèI‘Y°&—ÍGx´FN£ªênŒõ·;k£›ý/ÏóÊ/gÐPfçä{Mìz¡Š~˜‹>bðæ–Á¨&%?‚ºó]»õÑMCw°0¥{ØPZ‘m˜ ÃÏÖ_!e,:J•‚I³ƒåŒ Á»Õ§Ÿ=¿®&µ0¿W"gI4YsÌœÛÑJõéN¶þ¸’)ó£èì“¡j¼ŠŒÓRtW<'ßk¢þ|õç{“z.c‚ŽüU±”ni¦î¬7ÿ×Åþ‘`çÏ«ð8ü(½C‘´•ü=™² Šï4ÑRéàÍcñ©ä¯Š¥d}ÇþÚHÕ‰Nªúôº¥äG5ÛÜïåÆ: Œ3¦ø\æÜý_£] Aºíë XšÎÉ }s´Q¬‘ðiUû+g?þaïc£§ÿ3Qk•DÆi±µxäI”ÌÒ2ÿá”!S\fÍ5“¼3‚ú ]œÝÖJáÚ8|î>=šà¾a& ‹7§…¾nwÏ„·;ßÿ51löþ¶Ÿ'@Ö3iÓ¤äGrùPÍ޼ÖÀ’/¤ ¹ÿ¢'ÓxÿûØÛ½! Í¥N‹”Wì5Õ,~"•=¿®ÆÚèæàŸƒ ˜µ©7Ö3ÇÞjÄÖâáâÞv.îm'̬!gI4sH¢ù²ƒ–JG¿†ìx7ç$b2 ”ín£½Ö…0%êðwOŸ÷wI"U\=ÞIg‹^…Á¨&&À֠ÂëZI{ ‡Å‹F§$6Ó@ɆD9ã×̉(” ªNX±wx 3käáZ3ïK$2^Ë…ÝmXêÝDÄhÈ(1Q²>A®c¸YCR^ª>ëÄfPjò4µV)O²­ F AA¸må-!oy ÎN‹•ZADŒVÔãžž2àþ÷ü¯Ðç§ÌbÊü¨ë¾n¢ngN«¤¼’ò"ÈYœ»¡T+Xþtµçl(Á±éI¹á̼/qÀFŸ9IÇ羟˅]m´U;Ñ”$åD0uATH :eAÑi*v`mp\Œ+1»·×L­U²é?r8·³K½ ¥JAì¤0‚ Îߙʥ4W8°·{ц«0ÆiI-Œös5Ü&Ï53yîÀwëJ3îI`Æ= nXý÷™C_©R0ë¾Dyϵ²E“½(zÐý3JŒd”Cž[ôDjÈ㈠ëÿ×ÀßÇ‘$AA€ñÚA·ÏmoƒQ=àºÂgg0©l0&æ„“˜ÓÛpOÊ )wðᇺp3îëE§é™“–4d]¸Š’{n+”Šë6dALRAAA&AAúw!Ü>=‚ 7C‚ ‚ ‚ ÈD€ ‚ Àµ=ceÁ"A„‘2cÉívÓÚÚŠÕjEj-jaĸ,õ¨õ½™ œn%çÎba$ét:bcc1™L#Ò˜ðûýX,¼^ï°¿–pc®½mmm¸¤ÆAJ #I¡P`41Îõ?Ò” qOn¤ u®Åu9Ck…¸#êf?÷j6:N:‰Ëå$::ŠˆˆO·ÈŒpëEF—ö³Ñ®†0ÇÃåËX­3¢ÓÝ6‚µ›84J-9ñs)JúˆüüŒ3ˆcáÂ…£X»Ñu³cvAnbÜ‚$IÔÖÖÒØØ„$Iäädc2™ú•«««£¾¾®®®ãt:)-=ƒF£!??_¾ãßÖÖ†ÏçÇd2¢×ëà]C·ÛƒÑÙ/ÅÞÞ½û¨ªªâŸþéò­oýŠŠf²sçG7ô^–,YÌc=ÊÙ³gyá…_pøðž{î'üë¿þo¹Luu5••W˜<9‹´´þµçÏ_Àï÷“””(ßýÍÈH磶£V«BÊÛl6Ž?Éd"??N‹ßïÀårÒÔÔ 'ë*•J555´´´¢ÕjÉÎÎÆívár¹‰ˆ'<<\®‡ÍÖ…^¯ðzc×ÓO‘ÂÂiìÝ»×_“_ÿú·¬]»†U«VÁ»Òeee´·wŸŸ×oâ¶$Iœ:u‡ÃÁ”)SBî\ÊË/ÑÚÚJAA>QQQò6›Í†Ãá”?Gn·‹Å‚V«‘ËõÜoiiA¯×“N§‚ßá³gÏáñx˜6­ äswéRõõõäææ†ÔçFÔ××ÓÐЈ×ë%;{jÈû½x18¤eÍšÕddd„ÕÕÕ\¹r•ìì©$%%…×çóQ]]Ckk+ááadggw¡6ªªª±X,dd¤“––&¿7£ÑÈ”)“Q©z¿Ç \¹r·ÛMVÖ$¹Áóå—ÏmSS3*•ŠØØ–.]̾}c0èCêd·Û9sæ,ƒ¼¼\´Zí Ÿ‹±NL’AšIhss (,,fõêu¬Ys“'çðüó½©C­V+›6=@AA«W¯ã‹_|ºß1víÚMQQ k×ÞÉŠ«))™Í‘#Gxå•×ÈÉÉgÆMr㸠 ˆ9sæ9ýòåË”••Q__Ï¢E7v—.==ûï¿ï|çÿpÿý›8{6xG×ãñðÌ3_fúô6lØDaa1Ï>ûU¹1/Ißýî÷È̜ªUkY»öN¦O/aݺ»8rä(99ù¬[w—üzO?ý,YYÙlذ‰åËWñoü!õyýõ7ÉÉÉ'''Ÿ¦¦&ÊÊ.’GQÑLV­ZË’%ËyþùŸòÓŸþœœœ|¾üå¿—÷ýú×ÿ‘œœü 7×ãvP\<ƒGy˜_þòE23ƒÍžÏaMM «W¯cÁ‚%Ü}÷½äåMã׿þ­¼ï–-’““Ïò嫸ë®õLÇårPYy…+V3oÞBî¾û^¦NÍã¹çz{¨¾ó'''_þþîÚµ‹œœ|~ø1Nž<ÅäÉ9̘1“Õ«×±xñ2~÷»ßðÒK/“••ÍêÕë¸ë®õ¬Xœ(j±X¸ÿþ™={÷Þ{yyÓøîw¿wÃçbÞ¼…äçOgåÊ5¬[wS¦äòÿñåí^o0[ÍsÏýDþ®8¬V+=ôÓ§—t¿n!_ùÊ×p»ƒå÷îÝGVV6%%³Y³æ.\Ê[oý•—^z™ôô,-ZÊÝwßKaa1wÞyùùÓY½zsç.`Ù²•´´´ÐÒÒJ^^!wÞy7ÞOQÑL6oþb¿ÿüÏÿ›œœ|–,YÀþðGrròC¾ó¯¾ú:¹¹ÓX·î.–.]ÁÌ™s9zôXŸs±¨ß¹¸™s9æÜf=ì‚ 7jBô X­lÞüÍÍ-|ðÁ¾óçî»ï"+kßÿþùè£]˜Íf6mÚÈùó8xð¼SS3O>ù‡ƒø‡¿Çn·ó›ßüŽgžù2‡à+_y–íÛw°wï>þöÎ;:ŽêìÃÏV­¤UïÕVµäÞ{lcŠé½×HB ¡}„Ð’L7ØÛ 6¸Ûr·eɲzïÒ6mßýþi¥µ$[[ºÏ9:G;sgæÝÙ)÷wßrÿýïÿðÍ70™L¼ýöÿˆ‹‹ëdÏüùó˜4i"«V}Êš5_ÍSO=Ñ£ïât:©««'77—70vì@ ûù裕L˜0žË/¿Œ>ø+>búôiÜzëÍ,_þÿøÇ‹hµZn»í¬V+Ë—¿ƒÑhèòXYY{øøãUsÏ=¿`âÄ ^mRR’™>}:¾¾¾”—WP_߀¿¿?·ß~+z½ž³ÎZˆVëÏóÏ¿ÀÆ›°Ùl¨T*ý—]vi¾»àôÁn·SZZÊλ)))Ú¯Ã_ýêöîÝDze2aÂx^zé_üéO3oÞ222xúé¿R[[Çå—_ÊŒ3hllD£Ñàr¹¸ãŽ_pàÀARR’™4i"«W¯å©§þÑ£39ï¼¥§´«¹¹™¦¦&‚ƒƒ¹ùæijjfÞ¼ylݺßüæ÷Ìž=‹éÓ§yF¶zèQ¾ûî{æÏŸÇ9çœÍë¯ÿ›üãEfÏžÅ9çœ}Êc¶=_®½ö¬V+kÖ¬åùç_à /ðº_&MšHFFJ¥Š|˜uëÖÆ’%KX»ö >øàCxðÁ?P__^¯'**’k¯½†ººzf͚ɺuëq»Ý¤¥¥röÙg±bÅÇlß¾ƒ#Fpå•—óÉ'Ÿqøðþýïÿðè£Îm·ÝBBB555¼ÿþ‡|öÙç\}õUœ{n{53¦“œœÜíˆvv6÷ÜóœN'ç·”êêjöï?ÀM7ÝJVÖ´Z-z½t.®¹æjìv«W¯åÿx‘ /¼€I“&žò\ìN+… ‡pˆ2§ýB &Œ„àŒµ-kÎEoeNû¥\MrØxT QšûLeX„6~ûÛ{1bÓ¦Íäøñ|öï?@rr«W¯àå—_䢋–±~ý·^aݺõèt:Î9çl®¿^*ǵmÛrrrÈÏÏ'33“7Þx•Y³æñÈ#ár¹¸þúë¸ä’‹»´ãå—_áÀƒÈd2l6sçÎ&%%™Gy µZå.t"+V|ÄŠy>§¥¥zâ†?üPZþÀ÷1jÔ(BCC¸çžß¶&ß̪UŸð‡?ÜÏo{/ß¿‘åË»Ÿ´§¦¦:þÉÉI\tѲNÕtæÌ™ÓeBXXO>ù¸×²ŒŒ rssùá‡ÍDEER_ßÀøñãHIIîÖÁéɽ÷þÎëó’%‹˜;w.ååålݺ€€yäOÈå *++yë­·ùî»dddxªâÄÆÆ²lÙž°š£GsØ·o?J¥’܈V«%!!_|‰÷Þû G¡ØØXžxâ/žÏ·Ür;7Üp=¯¼ò’g¹ÝnçÓO?C&“ñç??BPPV«•gžy–o¿ÝÐ#ÐÆ]wÝÉĉX¼¸Œ¬¬=ìÝ»ÏK \vÙ¥Ü{ïÝžã~üñJV¬ø€iÓ¦rÎ9gsûíwòþûòàƒðl—ššÊã?ÖéxãÆãoû+†—^ú&Œç¹çž%<<œgžyÖãÑxæ™ÿãÀètzJJJY·n=¹¹¹^ᆮóÊA8‘+>ÆáppñÅñÎ;’W&3sUUUlܸ‰‹.Zæu.&MšHYY9»wg±wï¾3C tJRî[B^]ïíy›Óܧûx¡Mà·óÞè¶sjwZyiË/©3– °eà µÂ—§>FzÄ´Á6Eð!F'’šš H£‡ÃC?vl×5‘óò¤Yþ¾ÿ~#Ó§ÏbúôYäääP^.U&‰åƯÇår!“ÉxàûºÝ×ã?IZZ*ûöe1qâ>úh%¿üå¯ù÷¿ÿÃλNj{LL Ó¦Mõ|~óÍÿœœ„Åb¡´TɽñÆ[˜>}÷ÜóÛVË<³Q£Fâ I´Å”WUUqÛmw2~üd~üqs¶íŠ+®¸ €µk¿`ݺõ€ðœ©Œ=šÔÔ@Ê=y÷Ý·Ñh|<ñöƒ™3ç2}ú,Þzëm ý^yæ™§Q«Õüë_¯2~üdzèQÜn7………€”,ß–t?uêϺ6z;jÛöóçÏóZ^TTŒÍfÃív³xñR¦OŸÅ3Ï< @YYy¯ŽÑFZZ U<ꎲ²rìv; …Â#"¦M›Òº® ›­ç£ËiiÒóÌd2”$Mxi0HžÁíÛw‘1†¥K/àꫯõÜ{ma]=¥°°hÿM:þ_PPØå6m碹¹¹WǪl:¾Bˆƒ ÎXÆÞò Ý®ß[¾AˆƒÀæ4³éøÐ¨€6–¡còžR©$<\*I¹wï¾.Û·­ÏÌÌäË/×xýµ½ sssyóÍ·ðóóÃívó»ß݇Ëåê´¯¬¬,Ün7çœsII#Y½úS&NœÀǯÂáppå•WœÔösÎ9›o¾ùÚSiè¾ûÀét¢Ñh<ª¿üåÏ^6¾òÊË$'K#õŸþ9‹•––“WÛQ©T|òÉÇ|õÕZ.\@ee%wÝ%…ùøH#3:î¤ûèÈM7݈FãÃÚµ_°rå*”J%×\sU·œ>üæ7w³~ý×DD„SWWÏSOI1÷ááá¨Õj>üð=¯ëð®»îàšk®âСý<ðÀ}(r^ý 6løÞ“P_XX€ÅbÚó¢£¥JN1ÜS¢¢¢Z÷wÄkyÛ½ ðê«/{ÙûØcôêmt|¾tGll, …§ÓéUmß5<<¬SâooާPx?ÖŸ}ö9š››y䑇ÈËËá—¿ôαR«ÛîeýI“ßjgû9lûÿÄäêmë­ <¼íìë ƒ­{Ñ(è[ŒÖîóÿN¶NзˆkþÌeX „YºT ]xøáGxî¹çÙ²e«×úóÎ;…BANNï¾û…9’MvöQBBB°ZmÜrËíX­V>ýt%K—.áÇ7ó ÿìt¬¶œ„U«Vñá‡+غuñññžõ{öì=¥½r¹œ—_þ'{÷îóŒÐ^xáù¼ýö»lݺ’’¾ÿ~#€T·àãW1rd27ÝtëI³{w¯¼ò---\zé%€ôÂw¹\žÄÔ7ñâ‹/ñúëoP[[wÒýEFFpÍ5W£×ë),,â‚ Î÷tügááa<󌔀úúëoMffÉÉIØl6^zé_:tˆüü¾üòkÏ|O<ñ»wïfÖ¬™$'§xö7vì˜Vo˜•«®º†gŸ}Ž×^{€eˤDúqãÆ°nÝ7üã/zòXNE[xÒ[o½Íï?~¸‚W^yÐÐPfÍš À«¯¾AVÖŠŠŠøò˯½î˾F£ñañâEÜyç/yá…òàƒíßµ¯h¨¨¬¬¤°°ÀãYh£í^þßÿÞâÍ7ßâ•W^ër?m!D«W¯áÑGã¶Û¨­VËÂ… úÔf@  .Ã*¡;ž|ò/³eËVOxAG233yî¹gyä‘GY¹òV®”bùÇŒÃ]wÝÉ?ÿù¹¹Ç¸æš«™5k& ñüðÃ<ÿü?¸ì²KINNòìkáÂÜqÇmüïË=}är9·ß~+›7oáÃW°xñ9žyw$%äž{îæùç_ào{Žk¯½š§žz’ââRvîÜɳÏ>çi;nÜX’““X²dkÖ|ÆG­¤¶¶¥RÁ7ßlèT†µuëÖóâ‹íñÚr¹œ‡~¹\ÎÒ¥ç2aÂx<ÄO<@rr ¡¡!]î«{î¹›wÞy·Û}ÚÎS!è9W^y9o½µœ;vòØcOðé§+ùÏÞà–[ng×®ÝìÚµÛÓö÷¿ÿ-f³Ù뚘9s&K–Hå×_•›o¾Í›·°yó@Jx½õÖ›¸øâe¼óÎ<6oÞÒ«ê87ß|#{öìåÃW°|ù;,_þ …‚;î¸ýë%n¼ñŽ=ÊOõlsÅ—y…Óô5ÿû³”——säH6O>ù4 %PŸ,é§ðË_þ‚;wy¾÷‰ÜvÛ­¬\¹Š¢¢bxà\wÝ5ÚÍ™3›?ýéüýïÿðˆ­VËk¯ý‹èè¨>µy°8Sü@ÐßÈ6oþÑ=~ü˜Á¶£ÏÐëõ:t¥RÁÌ™ÒÈàŽ;q:L:ƇÜÜ\êëHMMñÁÞ·o¿'· 22’ &áYßVµ£¼¼‚ˆˆp¦M›J\\{÷îÃl6{Õl?r$Û«Fù‰”––rôh …‚±cÇCUU…„††0zôh¯ö……ETVVå‰96›Íž°¨¶Zó.—‹½{÷‘——‡Ýî ==©S§ V«Ñëõ˜L&bbbhiiá¾ûà£Vríµ×ðú믠Óé<õͧL™ŒN§cçÎÝTWWͬY3<Þ-·oßAEEaaá,X0»ÝÎÁƒ‡Ðh4^+«Õ†š5kÖróÍ·‘™™ÉŽ[~Öo}:b0ilÔ1~|ßL„µÿ>bc£O:Ëö@²ÿL&éééž{£ººšüüd23gÎ@¡P`2™Ø³g/…øúj;v,ãÆõ\Ÿ……E8f̘Ñ)IÝ`0°}ûNêëë?~<ãÆyç¹\.öìÙKaa!*•š¨¨HÆG`` ÍÍÍ9’¿¿—I±ÙÙÙdgç`6›IKKeæÌÈårl6{öìåøñ|är™™™Lž<©ÓDhï¹9sf#“ÉØ½; ›ÍƤIñ÷÷çøñ|jjj9rñññäå§¶¶–¤¤‘ªšÙívvîÜEqq1ii©LŸ>ÝsÌúúrss ò:mωððp22FQ[[G^^!!ÁŒ3¦Ëíª«kصkF£‘bccÉÈå5oËλill`äȑ̜9ƒêêj ‹ˆˆˆ`Ô¨tÏñ ‹ÈÊÚƒŸŸ/3fÌðzFfeíÁjµ2qâ´Z-ùùTWWwû 8Ì‚ »\Wxp9ÛÚlòÄëÈœý›n÷µeËâãcN90ÒÆó?Ü*b߈Åé7±(ý¦.×}—÷.òÞ`‹†'ÚX¸|°Íô’ 6= èžO?ýœÛo¿ÓS©Åb± P(øî»oú½ÂÈ£>Æ›oþ‹ÅŠ\.gåÊžIµ†C] g:'>$gûËžÏB œ¹ P_l¦x¯cƒ ÿP©3C‰×œr»Ê#5ÇMø©ÈX0x“ pf²aÃFb4œˆŽŽâüóÏ£¸¸™LFjj ÷Þ{÷€” &==îºëÎ!)ÁPcˆO”æS³¥ZŽÿÉ“ë-F­è2 $Ç67òã›e¸]íÁoÁ1š „}ŸWSqÔHúÜÐA‚3q·#æÌ™Íœ9³åس¾‰`(Ó+àt‹ÉR§ÛÙí:—«ûu‚¾Åu]÷VGÏF¡?»³ûsm;É:AßcsöMXˉUŒ†j™ÓÞÐV=ÇPoã«g 8÷¾$ÆNÙA=†:üî(q™Z”>rË,̼.–ØL1ÛïÉPúÈYø‹DæÞ±Þ†ÊWÁÊs±µ8ñ•JŪ4 bZÏ£ÒÇ[€w2{WWSyÔˆÙà@­‘£ P§AÝê9»$·ËMñ>¡qÂ{ è="IY A—¤Ï áè÷õ読Ô· TËIÈìâØ½² ‹ÞAÁ®fOûæ ‹§ *ÇHEŽ‘°_Ü.7Ç·7yr8"’üŽñá¢GR»ÜÞ7HÉÜS”3•+dL¸ Ò+ ] è B @ÐC4Iyéï“p:ÜFzWÑ ‰×°¬µSÙVw_ÉUË &¿‡¥Ýs=ni©³C¨+lAWmE®”§!*Ý{’/AgJè9øUm§å ÈçOpš ‚@ 0l’”£Òºî„ª}]ŽþË2bFuÞÆ7PIâÄÀ>·o¨3rJn—}­ §ÃM@„š„ñŒœ4ئ „@@  ¢Óý…§@pÚ3$KØZœ=®)|*Ü.·§¤›@  lf'µ-TåÛ@  §­¡0«™}Ÿ×t»þ‡Rд›_™cdßêê‹[°šœÈ2Â}95ˆÉGptcG¿«àŠgF¿½‰_J±€?–†J#i¦¢=:ö¯©¡¡ÌŒËáÆG« *ÕŸéWÅ–èÛ/ßùt%ë“jJöéº\çªâ¼’ÈßÑÄ/jAçüz!'TNh®²òÝ+Åà†Y×Å76€}«k(ÜÝŒ&PÉ…JàØæFS‡©ÑŽÃæÂ7@‰˜ŠÄ d, ÃWL%?(ìälª§¾ØŒ©ÑŽo’ˆ$?’§>Rº/ v5S“gBí§`êåÑ]î'kUv‹‹è ’§{–×µp|k“/‰B ¤2ÇHñž®¯?€×Ä¢Pɰäll &ßDK“ƒÀH5c‡“qz$Lºn6r¥¬SYH—ÓÃ* D¨|]†¾›šì4–šA&#(JM`ÔO›…µ'l~³Œ‚]Í$LôTR– Ñ@ 8§m/ËjpÒPÚýL°ëï\QÙ)áÇåtSWÔ‚B%ós³½Ó>͇g™»uÆÁâ}:¾ý§÷|V£“Òún;¡:ÏÄ–åe¸Ý  T’¹0ŒÀ(ZšíT5’:;d@ì^r¥÷¨®B)“êû)Mðeì’pœv7[ß.Çbtpx]Ýi!Ú°[\¬û{!—=Žˆê¤mmnä‡ÿ–zB&Õ~ *fS“€H!’û“¾ž!@Be}ºOA×h}BÒ:Aß ˜wƒ ï9#Bìh­×èrGv¯ª¤j ?–ÖÞa‚È”ŸÖ™oÑI£>þ Â}Qª¥IH†ûH][ýëîhÑÙ=ÿ»Ýpx]so‘j5ZWç58gé0Ò{2d2)T,,Ñ—„ ”’&ç©/éÞ»$è{¶¾].‰ƒ%Wý5ß öGG›‡n ˜zyŒ' °#rd,{$Õ«Ã]z@OÞÖF/¯ÂéB‹ÎΆ—йèÑÔN‚§ S“íïW€[ª‰~ö¯Gx(,Õy&ü‚ºÆy[š05ÚPj„ÄiHì5éRU®‘ê<Æ;Ú0É3‚=¢¾m»>ªD&—>—¤iAÈäÞöêlînÆPo#8Ú‡”Y!^!€õÅfj [Ð×XqÚÝ„«HšŒ\)#{-:ÁÑ>$Oö*Ù§§&ß„Yï@í+'(FCúÜ~µ÷ĉÒúš³Ò®¥|ÏqlNñüêO"´ L‰_Üíú)ñ‹ÙZô)uF!ÖúµÂ—³Ò®l3?‘3B d}RªÃK-nL#&âv¹iªÜî)3ûîÅ7ZKC‰]µ•îÏ!ã¬02†yf8®XN¶¿Wáµ,ã¬0Bã¥\s«°1)’ýzŽmndÚ•1à†¼-Èä2âÇPvPÙÐûN[c©cƒ$BB†WÈ`b·¸h(“:4 üÄÁ@S²_çuŸ‡ÄiŠ–:´ÅÝâ¢â¨ ËòŒƒ\)£&ßÄÎ*™}C\—mò;Lž´àÎD/ï¥&@ÉÈ)Ý{Eê‹Í¬yò8[{8˜LÉÓ¤mÜnØòV9›¼¶Sû)¼BS¹…¦òöЦØÑZ.|(Õš_´GǦ7J¼ÂÎö|^ÍÒû’=UZ¾y±Ðsß¶±{U5N{ûvûÖÔpÙ“éžëëÀ—5T發¶;¼¾Ž+þ/…ê̬¯‘1Ç–|BaÃ!®žyQ½#PFBpÆIÛ¨><°p9e͹è- 'm+øi(åj’ÃÆ£Rô_®” 9#BîÞ7°B%cÄä@ õvœviħcÌ.@C©Ù*4éâh´a=ïÜO»"†¦ e‡ ˜šììý¬š}«k}N³®‹=c_N?›ÙÙ)O»"†É—x{06ß¼Pˆ©ÑŽÚOÁäKN¿¼¡Yׯ²í½ ¯¯#nL@—mÚ?Ô~Š^—DÌÙXÃæ’¾ÿÅQø)=çàè÷õq>Ò—”!˜õöN€p5g…Q™m â¨‘Ê£RÂxÒ´ Ì:?þ·»ÅÅÈ)AŒ˜ȱÍR8Ú[e\ù× :FèÄ 2Åœ¤ü™\Ƹs#0äïhÂØ`#gSƒç÷5?”¤iÁøø+¨É7‘³±æJ ù;š5¿¿BDNô ôýõ¬Rø0*rZŸïWÐ{N%$‚áÌ!ÆŸéVÓú²ÔtpG[MN¯m õ6Žn”^€™g‡÷J (}äœÿÇ*ŽÉþ¶Žâ½:Ü.7Ùê‘Éd̹©ë¿¡ŽÚOÁ¸¥^Ë‚c%qàv·',«}Œ]Aõ1Ù­" `Ì’jó¥Á¯îp»ñt@T9ËN%"yøæ‚ 8ûH'ôŸ*Øòv97½€¯ª)Ñ£ü½B\´áÞ÷µÕädý? ©Î3¡TËYúû¤ÓÒó—67”Ær 9›øñ¿¥L¹´³ˆ1¶æéøuá±YóÔq,z)³B˜zYçmÛΑB%#8FCâ¤@¯‚<‡×KB?6Së™9·+‚ã4L¾8ЉFòÖ‡qÚ]Ô—´4-ˆ’ý:¬&'>þ æßž€B)#"ÉO9Fc¹c£Ýë¹›éÏäK¢ññW°sE%n—›©—G£öSÐPb– ”µ‡Þd, ÃÖâÄPo#|„/5y&Ë-’µ@Ð÷œaÂù]æ ¨ýø)1ëT倾)Œ­%n´}­5O§Eg§pwó°>þŠ.;#à=÷„ÚWÁˆÉø©<‰Ë¡ bFù£«’^î= •[ž³±c›±[\èj¬B Á1íåjëK̤ÏûùûT¶zìVïŠH“×;ÆÊ·qþRºÌApÚݬû{!5ù&”>rÎ{ ù´.Ñ9ç¦8j [h(1³çÓêNëµ­UºLMöNëôÕ6ZtvÌÍ׌^ÎñmM˜uÖ¿PH`¤šéWÇ’2#—CšÁ8iNQGä Á1>4”š±¥{·±¬ý^~÷×G:mc¨·u90Óæq)Lí§ 4^CS…kë¾]7?¾YFþŽ&¯ŠuÐùšéSN¬b$Êœ ‚aÊ+“ÔZ"±ê˜É3*ös1ÔÛ¼bj#ÕD·Æ1wŒé´Ó+  ô‘!WÈÈ<;̳lÌ¢ðÖuÒ%g3÷l2;™LFTª?ón÷ÄFo{§¢ÇI΂ŸJ#÷Ìý‘ûCºjëÏÞg[u±Ú|“§¼0H‰Å ]'þÝ&莽ŸWS“oB®”±ô¾$bOcq PÉYtÏHÏ'Òæ³[\”Ò÷jß¡ñ®|vcÏ@¥‘£¯µñÝ¿Š©8b@®¡hMŒîXXàÔözw–Û’«*9£ÏëôçÜõø“¼Cˆf[ܶٺàð7uämmD®1íÊÜ‘Ði^@ ôg„aÍSùÈO2“.Š"}^(3®Š¡(«³ÎÁö÷+8òm¡ ¾˜{þâ;‘VRz@OHœÿP£ƒêcRhLTêé—ð8P˜í|ü‡œNËÝ;Y‡‘¶¶$Ò1‹Â)ÜÝŒB%#}^¨×:Ü`³8Qû*:í¯+*9snŽçëç °ìþ¤Šù·%üÌo$è)soŽgÍÓDZ[\|öç¨ÀÖâô8›‹‚Í^ËÂGú2ãêF/ §ô ²Cz*sŒTæ´ Ðx 3®ŽérŸ¹?6vZ‘쇿ÂãÝ«/6S_ì]F2~lÀi)@ìÈßÞ„î„çœ_ŠY×DZùÍRZší|õ·*9 •ÌËc×ÛÞ© <Û@p´Ôao}^¶Í¾<õòh¾~®}­O=F@¸»ÅEæÙaL¿ªës")3ƒÙÿE Må¶¾SÎΕø‡ª05Ú5?ÔSâø§Ð&>JÍ|þ—<üCUÔ´œb+@ ô§­@19e1Ý'Ïu,Å¡æü?¦ «¶ÒXfÁØ`C Ä/HID²Ÿ§rGúüÐN1ÉÉÓƒ=/#¥dpõß3©/n¡±Ì‚ÕäD£Uí3lçA˜´,ò¤UCBâ4¸]nO²c[hDWD¥úyÚµÕ<Ÿp~$©³BÒ­â¤m´a§ï„b •Œ¹·ÆóÕ³Öe,%$·ïWRWÔ‚ÓîÂÙêõ Rv[î×jr`Ñ;¨n Å“+edÌeÔ<©úWÂø@–ޗ̶÷Ê1ÔÙ<¹½™3B®±ìáT¶¿WAaV3›K ?“Ñ)o ·Œ^NS¥•c›$aÐzjT9ÚþL:9@€lóæÝãÇéQãJ}/m¾«ŸM´qÿÂåDj»öX¼“õGk¶°EÓ ±gqÝäGzÔÖ`0ÒØ¨cüøñ}rìýû÷FÓ3O‰¸.Ž“]ýñ¬t9Ý説ȕ2|•žî°šœ´4ÙA& ¤î’»-F¦;¾AÊn'¤ì‘m5V\v7êSÚÖSœv¦F;n·4  ñWv÷s²gå‰8p˜ v¹.o÷9¾çžÏéÓï$mêíÝîkëÖ-ÄÅÅ*Ê. ‚¡Ã† ›N_‚@ Ú‘+d„Ä÷0ƒ©W¡”w_rpFâ2 –f²k¶b±›ºm'øéhTZÆÅÌcZÂÒÁ6¥ÇÌIºŒòæ<òë÷âê0y ïˤGLaöÈ‹»mã«Òrá˜_ò}Þûè,uhÝð"Ø7’%£nF­èz>ˆF©T9ØfÁϦ×~ДðI¤„Oê[½@¥ðá¼Ì;8/óŽÁ6Ep¡UqûŒ¿bu´`stžõZðóñQizÔ!‘xÓÏÇhmƒ)ý ´>Á§L$Aï’ÁÄGé‡ÒïÔ ýŠ >¢F¾@ Î,D#@ à'ç ÁPC@ @àA@ èýDi@0T9@ t©¹”ºÒƒm†@àEHÌx”*‘c&è_„@:å ”å~IYî—ƒdŒ@Ð5ó®z—ÀðôÁ6C0Äéµ@8V—Å+©ÔÇ}bB—àg#“ɈÅ”kH=I9Y›ÓÂ7¹oq¤Z̃Ð_øª´Œ™ÇÒŒÛPÊÕƒmNp¹]ì+ßÀ‘ê­Xâºè4*-ã¢ç1)n²“$±ÖËØUúºãˆ:§ýŒ„àQLO¼€pÿ¸Á6F †½v§••þ&ÕõôÇëöRk(áÁ³ßG!ïú'ÚUò%[‹>`ˆ‡‰-…Ÿæˬ‘ ¶9=â`å&Vüû`›1ä9Z½•Bø˜y]®w»Ý¼õgêMålÙð¢°á Çj³øÝ‚ÿˆ|@ èCz%êLåB :K=-UDhº\_Øxh€-¾5:c‘ª-ƒm°áHõ–nB±Xˆƒ¢ÚPDƒ©²_¼þÁ‰øÆöù~‚ÞÐTu‡½e°Í 3D‚@0„pál† nw÷çÚu’u‚¾çd¿E¯ösB(XBæ2R&ÝØ'û~*[VÞˆ¾þø`›!fˆ2§@ t[Ã!@ €Î3) Á0E@ ºâ$Uª`(3 9ú+å–“¶Ñ(‰N÷ï7 u6Ë,´4Û±[œ¨|hCUÄdhQúxë¤Ú‚tÕV¬F.ø‡ªŒT–à‹\éýÂpØ\”6>Òm˜—ÓMé=¡ñ£|úí{  ¯@ €Åûôìø â¤mbGkYöpj¿ÙõIÇ·5uZ®TË™tq“/Žò,ûöŸE˜šìÚú‡ª˜|Qg…!WHBÁjtòÍ‹E,¸#Œ…a8ínϲY×Ç1þ¼ˆþøJg$v‹ •F8®ÎxÜ`3;Qû*aÚ‚¡‹¸¸Áðd@‚o ’°D_\.7M­ÞÿP­dBвk•¤Î ÁfrR“oBWm%kU‘)~Ä ðj« S‘쇩ÑNc™S£-o—ch°1ãjQú®7ÙPϾϫQúȹîÅуmŽà'R¼WÇÁ¯ki(1c·¸P¨d„ô#mvc‡°ou5…»uø*¹àO)]îç‹gò±¤Ì fÒEíâ¼ì ž]W°ôþ$´ajò¶6rèëºnmºäñ4”êvÑér¸Ùþ~Õy&¦\MÒÔ ¾øêCš¶sÖ\iaÜÒHFLl“‘ƒ À „´9!¤Í  ¥ÙÎ{÷d0å’h2Ïój[WÔÂáõu4–YÐ*IÀØs#<#öÆG¾­§¾ÄŒEï@¡’¯!óìp"SüNiK@˜š97Jõ²­F'ïÿ&‡ÍEU®©“@ˆÈ‚;¤yZtv6¾^Jž¬eÄÄ ¢Gõ_HÔPÃÔ`ìwqfÌH,èÌÆ×K:yáœv75ÇMhCU`l°ÓPjÆ?DÕí¾Ë-XôŽNa…V““†R³g߽ó¬+:V¸¬É7±ey9 %R{«ÑÑó/8Œq:ÜdWÀ¨a§h=ŒèE‚Ãá ¡¡††¬Vk?%nXj>¡žÏÇòòQ–™Ñ"ÁPB¡PBXX~~íýèÓj„¬f¾¥—³}§âˆ’ýz.|8™ Ë,üªÖk»Ú‚ŽmndîÍñŒ^Þ£cÙZœ”Ôã°I½‹°š“¶÷ R±àŽVÜ—ƒÛå¦ì~H —ÃÍÁ¯k©Ì1bl°£ U1ni‰9ðE õ%f"“ý~$;>¨ÀÔd'qb és¥WmA ¾¨¡¹ÊŠ\!# BÍ”K£ éë9ŽYïà»WŠáÇÄeÒþl-Nö­©¡æ¸ §ÝMD²“/‰òêdüª–ºÂôµ6œ>ŒZŠÝââø¶F,'Á±>L¸ Òã±ô ¹?4xÄAìh-“–EåCK³Ê£Æ»–ü. ¥Ú»ÓÖöyÓ¿KÉÛÒ8 v V““â½:JÌÈä‘ìGòô`,FÅ{t¤Ì ÁÇ_®ÚJE¶”•±°=$²*×HéAV“VIÌ(&x{ *² Ø-N’§£ ^¦&;EYÍ説D¨19ˆ èvϯÅà 2ÇHs¥³ÎÚOAdª?‰(ÌÒQWØ‚¿‚ØÑZ¢R‡Î³ ¬¬”APP Cëû —À¶ ‚!ŒÛí¦¡¡–£G³ eòä)(ŠÓG 8¬.¶½]Ëé&,Ñ—©—G 7W IDATS•käк:*sŒälj`ô Þ†óÿ˜ŒÅàdÿ54•[ر¢’‘Sƒð î~ä²®¸…7o=èHŸJò´àSÚ®F¦ÂPg£¡¬ûÍ3 ‡ÍÅÚ§ò©+jŸ©±¹ÒBÊ,ÉëS™c¤ì§ÝÍøó¥õ¥ô4WYñ V‘>tÕV¾ø¿|àh(5{…€ô;ì”fã¶™]L\‰Åàà³?ça¨·yÚÕµP°³‰‹ÿœFH¼$Þö~^ÝÒ¾ÿÆ2 Åût^û¯+j¡p·Ž«ŸËÞŠ>d÷*)ì'$VÃ…¥zV#ÕýZ\àDâÇt›Ãâ¢B®”1úœp²¿­RÑ"uE-|ûÏ"Œ í¹Q>Z)3CÐ×ØØ²\šµ9z”µ-žeisB‘+düª–+*½ö[¡í$ŽmnäØfIhE$û¡ PRvPÏw¯–`kqzÚí^UÍüÛâIŸêÙîÄý¨ý^Û!ƒù·&tòŸœxÉôÄŸŸOii1³gOG©A_׳žÔÞ1Hò£©Â‚¾¶û˜Î¨T?äJ.‡]ÍÉc?ÃGørÙSé¸ÝRùÕõ/Ñ\iaû»§÷:„BXŠvK#úñc<ù½Å?TòÚ8í.ò·5á¨ì”ÓRçaäï¤Ñ¶’°ISƒ™prdj´‘õI5U¹F6—Wj`¤š„ 4”šÙõq›‹ø±È•do¨§¹Êоֆ oh®j¿§´áÞÞ¹ª\#ùۥУ™×Åõ{…ª¶8ù64J@h+x0Ô(Ù¯GßúÜ9÷¾¤.﫞 kýi4JÂFøßuXeÇœ1€ãÛš°è%xÁŸRKôeÂ…‘¼ó«#8l. v6yy •>r&.‹dì’pÞºãn7Œ˜Ȥ‹¢Ð†©Ø²¼œ³Áá$8}è¡¢¢‚¸¸X!N3V®ü„‚‚î½÷n´Zm§õûöí§ªªŠ .8¬;}ÉË;ΦM?ÅÅ_ÔïÇ+**fÆï áÊ+/ï´Þb±°ví—̘1#Fô»=‰ôôTöî=xú„€°ö¨Æ23qc¤—`[rb@x÷á"Æ;.‡ô`?YxQGd2Šö!~Œ–æJ ¦&{{ÙÆnØ¿¶‡Uò:ôftý´Æ ú:©3}ÊP‘“ ÈŽœDê¬òw4Q°«™‚]ÍÄ `ÑÝ#º¥lÃiw{JÊF$µ‹®ð‘Òùu»ÁPkó„uÄ3¿„[J²”+eFùÐ\eõÄP ~>mû=á*4•[8º±€©WÄülpªAÿIEyÍE“1ôc½›+¥ªoJµœØŒÎž2ñÂ(¶¾SNeŽ‘UÊ%v´–¹·Ä{òü«¦ éø>þ ¤{T£U¯¡±ÌâY"J9‘>èk¬žë&4¾ý·OGÐ;ˆ:uC‡uëÖsôh:___âããX¸p }²·ÛÍ/~ñKî¸ãöNÁf³±lÙ%˜L&Ö¯ÿŠ™3gôÉqO¤¹¹™õë¿aûö¤§§qÇ·¡Ñt¯Y,V–/_À7ÞÐÉî•+?¡¡¡ž .8ŸÄÄÄÙP]]ƒÙl&)idÚþùjþú׿qË-7 ˆ@X·n?üg®¸â².ÂûïÈü‘É“'±qã†~·G ¡P(p:§@OòÃG«Àjt²ç3©fõ1“'Ö¿M0t¤ê˜‰ÀH5ûÖÔH d’7ádØ­.š*,ØZ¤j)9?H±¶Ú0u'q`j²S‘mÀÔh§d¿žÂÖ‘ö¨T¯¶3ø)15Ù=•_N¤í¼tÌèj?çÜ=‚±KÂ9üM…»š©8b ëÓjæÝïéÔÙÍ.¯Í*~A*ZtvÊÚ;mÿË@Û8”u_!y}NP”2¹ ·ËMeŽ‘Ñçô¬ÀÉðñS`Ñ;°š¼G}‡kÐGÛY¬Oº(jØÍ£¡ô‘΃ÃîÂÚæÓ…ªý>héïŠ1‹Ã MÐpðëZJ÷ë©ûý~|µZÍìÙ³((( 99¹ß޳hѹäçλï¾Çλxÿýwºm_TTÈC= €Á`ä|À³îСÃÜuׯp»Ý„„„ôH ¼ðÂ?yòɧ¹é¦yùåþÆŽCttóçÏlS†.—ûô*œ™×ÆñãK©>fbõãÇ=ëBâ4Œé¢:Ñ·/y}N›BHÜÉGÚ+-¬|0×k™\!cÞ­ñÚ–ÔSvPïµ,"Ésîá}¦“¡%G¥õüª–¤iÁ8l.d2éܵÆ#7WYÈÛÚHܘœï©®ÆŠYç |¤/gÿjƒ“Šlºj)4¢­‘Åè ÷‡'bÖ9Kô%n¬–ãÛš(Þ£#6S‹&@AÎ&)”$2ÙoØuO7äJ#&R¼OGá®fò§4‘:ëç ä htÕVÊ0륑d[‹“’Ö¤sß@å ê-a‰­Ï4·”G0qY$j_«K¥ïà}-=¨ïÖ«bÑ;ˆÉГ¡¥ô€žuÏbÑ;h,7•êJ#ÇnqQ™c$}nˆGF´zK]N7G¿¯güyìjöˆ‘ˆä¡nÙ‰ÁL¸âüêW¿dôè ~üq3Ÿ|òo¼ño–.]ÂÂ… 0›Í俣¾¾ž¨¨(F…Oûun³Ù(..¡¾¾ž  @RSÓ¼ÖŸˆ^¯Çl¶ T* ãõ×_ÅétÚþ«ªª¢²² »ÝFZZaa“èF#GæC||ç~Àöí;ÈÏ/àîãÑGfÊ”é|ÿýÆ“ž’’RÏÿ¯¿þoî¹çמ’“/¼ðOܭ⵬¬ÜÓ®¹¹™¼¼ãèõzâããHOO÷„»9Òà‹ÙÜBMTù1""ܳÞl6säH6V«•qãÆÔú«×ضm;¤¤$£P´ÖX­6ŠŠŠ¨¯¯'""‚´´T\.Gæàv»5jM{u³¦¦&ŽÏoµ1žôô´nCòÜn7µµR®T``'NäÇ7yý¶V«ââbêëë !%%¥K‘^ZZJiiAAA¤¤${•ïôŒË•2b2%×™ß uÒ3„â¤äà×µ4•Kó Ä `Ú1^am$N ¤©Â‚_°ŠÄ‰Lj-™Ù±™ZZtŒõ6O2­_°’ð~Œ]îU®oä” j [°¸]¡&(Ú‡“;ÅÏ+:~ŸÖð&™ϲ¶ØüÓ™iWÆPvXÕèdçŠJO%’̳Ø›4;ôÁ¯jqÚÝlz£´Ë}ìh&ë©ÒLÖ>ÛÚYIšÌŽ*qØ\üøf ×ë^ÍŒ«c)=¨Çbt°ñõÏ>åJ³oøi9‚¾eöMq”gpX]|ÿj û>¯!8·æÊîs~Zší¼{÷¯ea ¾\ð§Æ.§ô€ô›üÇÂ}©ÍoñTÁ»¤ëÙÇ?}ôX§>Û´+cHž~êg*qcˆåOÕ1û×Öph]ÚP†·¼1M ’¸1Td8øU-y[;λaÅý9ø*ñUal¼2¹ÌSí+nLÅ{uämi¤ü7næß–ÀÈ)AÄ  ü°+*Ù¿¶Æãù ŽÕül±x:á>-ÝC“É“'qå•—sà ׳sçnÊËË9r$›… póÍ·òí·ßyÚ&&&òñÇ™™É®]»Y¶ìl¶vvTT$o¿ý³fÍìtœÜÜ\/>£ÑÈ믿Â5×\ͬYs¨««gÛ¶3f óçŸÅ¡C‡=ÛÈd2î½÷nž|òq@êlÿùÏóŸÿü‡£ÝK·xñ"V­ú¨Û_@^ÞqÊÊÊY°`þIÏGuµ !—Ëijjâ½÷>஻¨˜µk¿@.—ãr¹¨­m/ó¾`Á9””´¿3'LϧŸ®"<¼]ܬZõ)«V} @NÎabbbxÿýøã¢¥Eª\8räØëÙæ³Ï>ç³Ï> 33“•+?$!!}ûö³dÉy^ç`̘1˜LFŠ‹%;¢££øøãL˜0€Ù³çSUUåi?yò$>ýt%!!Ÿ>ú^}õ5ÒÒRY¿þ+6nü;+ùóç±víçüðÃ\yå5ØííÕÜâââxï½·™­¢©Ü‚ÓîF $0RíI˜Ô†©ˆL‘f¶[]hÃT¤Î aâ…’hó R²ì‘T²VUÑ\eA&—1Ò·[QW>“ÁÎ*©-hÁis‘äÇÔ+¢½æ3ˆÉÐb·ºð‘ìÒ*=B¬-D"4Þ›ÙEHÜÕÂÕ\úD:›ß,£&ßDS¥…¦Êö0¹BÖ)¼ËíÆSý¨ Kˆô9aB 3®ŽeÏgUXN*¥2È<+Œ t-Úÿ| /¿ü ^xÓ§OãÕW_çµ×^'((ˆn¸žÆÆFV¬øƒÁØåþfÍšÉôéÓX½z _½Ž°°0ž~úÉ“ž¶ŽÿùçŸÇºuëùÏþË/~q¯½ö:.—‹K.¹˜Õ«×PW×^¨á¦›nÀåúöÎ3<ªjkÀïÔôÞ&½ $4)°QDÄŠHQ±¡ŸzÑkïz¯^ 6ì ½(öBï½BBz/“2½|?™dH`B€ì÷y|dæì³÷šsÎLÖÚ«Ù0|÷Ý"öìÙËûïÀã?æÇÀpsscãÆMÜsÏý9 ˆ¯¯óKbb£GfÙ²å:tˆÿüçuÞzë˜LF, >>>L›v K—.çÀx{{sdzøõ×ßÈÌÌâ¹ç^`ñâï˜>}2™ ƒÁÀwß-bçÎ],Xðÿú×ÃNk~üñ§¼ó넆†²té¢f=8z½³ÙLHH07Ü0•Ÿ~ú™Ã‡Óyøáñûï¿`0¸å–ide§ÿ~Lœ8>øÜÜ\ †æs¥-#|ø@ò~ÔwnŽ€h7Æ>Ør¼f·aþŽzè-ïΕóâ›=æá¯bԜ֫Œ{ÈyýÐîM ±SB[CpæøG¸2á©D* TäÐi%CÒÝOEPlC(Xï+ƒIÒü®rãp±>W“8Ô²,=5eFÜýTD¹áââtNü`?§Êf'S_’³1W?ð kzì|Dí®`Äì(†ÏŒtIžjGu/Ï5×ü;‹ÉFm™ •›7/¥Ãóêî+çÖw{¡×šÑk-¨Üx¨œ”ûz#Ф³¢«2ãî§rä©ÝŒº;šá³"©.1á¨nú×ûÊ`G²z¦¼ÒÃéuPœ;w|Õ§}/N{r”Æ=¸ë®{œ^;Æá¸ùæ›HO?BNN~~~<üð¿8|Ø94888ˆ§žz‚›o¾‰þýrøp::7·†Í¡[o½œœæÍ{„{ï½û”2͘q;    6²cÇÀ¢E‹xòÉÇ™1c:+V¬ä›oZö¼ûî{lß¾™L†ÉdbÈÁtë–ÈSO=ƒÍfã¹çžirNE…”™œœ„\.gåÊU,_¾‚… ¿%&&š)S&³|ù ÊÊ „œËÞ½û(**Æb±ðÆo5ÉïHKKsÊAøè£O¸á†©¼÷ÞüfåOKÂK/=OJJOæÌ¹—;w:ðçÙgŸ¦oß>LŸ>___^yå%FÅ”)SÙ¿ÿ€cìC=ÈÞ½û(..Áh42þ»Mdܺu;K—.'88ˆ•+—2a=""‚§Ÿ~’k¯Ä°a#ؽ{V«•íÛw•uÖ¬YZ­fÕªÕäææ¶:Ÿ yÎ;ÁÍ[Idª´kÝ\Ø‘@ è@dR³´Ö*ßøh\œBöZÃÃOåÔ-»Ù1þª6‡êÕ{–.4$¯@Ë×^©–·x\&“6NUéMí®@íÞ|57¥ZÞbyÔ ‘ƒÐaôìÙƒAϱc™h4!|öÙ'¨Õjt:S¦ÜÀúõÎÞƒ¡ùpƸ¸ØUW¬TUU9YYÇ‘Ëåôï߯M²%&&°aÃFªª¤œ¨¢¢"zôè~Ês³²ŽóÄOÃâÅß3sæl–,Y†\.gÅŠUŽÐ›“©÷bxyy1{öLV®\Å=÷ÜO]]O<ñ˜c—¿ºZWPPÀøñ“8z4ÃižSí”geI¹›§ yHH6àê¯ÃÉ$&& ÓÕ8ª%ÕÔH]Üsss?~™™Îù¢'˘“#….GDDvú›|‰‰’|V«•šš ¥û'Š´ç]ögPœ;W<ÏÄ·øGL Á¹Ëܹ÷±fÍPTTÌK/½ ÀêÕ?²~ýúõëË®]ÛOYÞR&“9’^í'y€î¿ÿ^l6·Þ:ƒž¶l“rGµ£Å‹—`2™±ûͱuë6l6#G^Jll Ë–-¦ÿ~,Z${íµ“š=O«•”pOOO†M;Û_‡››7Þxƒ£ìi½!ñÉ'Ÿqôh&ŒçàÁ½|ðÁ{N󹸜(.RUåô~p°¾Ù8Fÿt¯CÓãõ×½ùñ~ø1™™YLž<‰ƒ÷òÎ;o5;ϨQ#éÙ³';wîbæÌ;šÜÇ–89Ù9>^ºOdïÞ}Øl6Zô8ï @ ç?ÁÁA¼ð‚›/…žÆf“ hµZ²²²ù gÂ=÷ÜÍôéÓÐëõL›v;z}ó¥¼OÅwÎàÓO?'**®IxTc""¤ÂK–,㫯¾fݺõNÕŽNש§~×½Þ2{öLn¸áz¼½½ï×õUŠÊÊÊ8v,“²²R§ùbb¤Ý?ÿü‹×_ƒ÷Þ{Ÿ’’R®¸Bj÷ÙgŸ3wîƒ,\ø o¿ýÎé^Š6Q/ËÊÊÉÌÌrÊŸhŒŸŸ/ß¿€€Ö¬ù‰wß}¯Ùq§¢_¿¾ 8«ÕÊ%—\JXX{öì=cù»:ç]ˆ‘@ Áéî\ Ú©S¯ç“O>cëÖm<õÔ3|ðÁ{$$Ä“‘qŒI“®ûÇó¿ôÒ ¬_¿£G3xöÙxé¥çÛ<ÇĉðððdéÒ¥%÷÷ßÿÀݽi1Œ´´!Üyçl>øàCG2°\.ç¶Ûneóæ­|ÿýbFÍ”)“Ϋ­•ž=<¤|«[o½™ë¯Ÿ‚Z­rz¿ºº»ÝÎ7ÞÀgŸ}Áúõš„c”ÓÑ»w*{öìåÙg¥ÏÇ-·ÜĶmÛùꫯùì³/øì³/ËåÌœy{›¯Ë©¸ùæ›øò˯ù믿ù믿[Îo¼Æ-·ÜÆ3Ï<ÏØ±cÏhÍeËóöÛï°{÷nÜÜÜÙ´i3EEE­6©40@ h†æš1 þï¾;NG÷î ñüŸþ ÇŽe"“ÉðööfÓ¦õ¬_¿‚‚ÜÜÜvÄ»÷îÊêÕ+œêÚ¯X±›ÍF``2™ŒÕ«WàëëƒJ¥béÒEdgç TJ*ÏW_}Ùl&66€?^€Á` )IJªŸ3ç.&Nœ@T””,[QQAïÞ©\~ùhjkk„„„æ‹n¼üò‹Ü}÷]8p¹\N¯^É„……QTTLFFF“ªA¯¿þ_êêêHNN¤g¯q?   Çç²Ûí$&&pàÀÖ®]GYYÞÞÞ;ò$Ôj5¿ýö³£JT@@ ÇC.—3þ›Ì™sû÷D§Ó‘˜˜€‹‹ 7Þ8•´´!h4ºwïÆêÕ+áJÉÉɬ^½ÂáÍˆŽŽfõê¨T’ÊêÕ+P*¥P£=ºsðà^Ö­[ï$cýuž0a<©©©IaOW_}?ýô‹5#F gõêŽë5hÐ@V¯^——”‡ªR©פ>«°°ˆ‡z¥Rɾ}û9ò2 !_Bpú´É@ËDÌÿÙDÞJ[`…¸góé¹eÏ&-_kqÎW„¡£©¯WߘÐÐPBC’Sår9—^:¢Ùó}||:4Íé½!C.vz}òñÈÈH§Ê8ƒ t:~ÑEý^ÇÇÇ9âÙV­úûï???tº:ŒF©ƒöí·OoVÆæÖ©G€FÒìø¾}[¯î¥R©š|.wwwÆŽÓê9-%#'''“œœÜªÌõùõx{{;½vwwwzíêêÚDFe #,,Ìé½Áƒ9½ jh’ëïïï4¿L&k²ÞM7ÝBffÞÞÞ”——0vìåŽ+ÁéÓ¦„¯h4^1$Š 1>Ýðh¹IXï°KÏ¢4]›óéZw º¨³Eè2t êßâ±`Ïh|\›ïå h_üÝC ð;õÀ3B˜z)¯`ìØ1h4ºuëÆµ×Nä·ß~>­ªF‚³Ë¨Q£èׯ/~~¾¤¥ á±Çæñé§w¶Xç%mò È1}à‹ìÈû…|íQÑu²!#·;ý#.ku\¯ÐaÜzÑ3(ZÞRw–¤ëZ¸)=I FRHÓîœç*EŽÅ`®c_Ñ:Œ––+mÎ7¥'½B‡Ñ¯•ï¨B®ä拞dCÖ2òµGÏ¢t]‹HߤÅNlG/Ÿø›&hʨQ#5jdg‹!8 ^|ñ¹Îá‚¡Í9¾nÁŒJ¼¹#d´2zjÒè©I;õ`A—A)W1"a*#¦v¶(]ž(ß$¢ú&u¶@ ´QæT hÆ BŒA×D@ À0@ há@]a @¢ð†@ œ@@ 4ƒè+"º*¢“²@ öŽõ Ô™´ìÊÿƒ’Úì]§«"—ÉIèKÁ(åªÇÙìVÒK·r¤d;V»õ,JØuôˆ oØ(¼\ýZW\“Åî‚?©3UŸ%ɺ®JwRB‡éÛöžÂ@ 9díçA°Ú,¼µî.ªô%í6§ )›Ž¯ä¢È±\×û¡ÇüpðCÖg->‹RuMþÊø†y£¾F­pmöxnÕa毿ç,KÕõøûØ÷Ü6à9’B.>õàFÈ;xÃD èRX­V”Êö³»år9v»­Ýæ­Ñq3Êv ãà,±3ïW¬6K‹Ç·ç®9‹Òt]êLZonñø¶ÜŸÎ¢4]›­9mæåBùÚêêÚm>___´ZázÚ Y›¼íçA°ØLí6— ulv+¶Vv?Ž8{XlÆ–YÅ}8[Xlæ6Ÿ#÷ööA«Õv€8A×£¤¤´] „  `JKËÛm> +SSSƒ‡‡g‹ÇE#@ §¦¦’‘‘…É$,9àŸpôè1Û5ÄÈÝÝOO/ÊÊ*ÚmN +b³Ù8rä©©©§}ލb$º*r…BÁÀƒ8pà0¹¹ùèõúΖI 8o°X,”—W°wï~¼¼|ˆŽŽn÷5RSS)+« ##«UTÜÚJee;wî¡wï>¨T-W·@ Pxzz2bÄ¥äççSXXLmmâ—R 85J¥’ÀÀ@ú÷€»»{‡¬!—Ë4hEE…=š‰ÉdÂfßOàTÈd P(ðññeøðíêÝë*dn©âÈú *9—ÝÓÙâmÂb²Q™g@_mÁ/ܯ@µè~Ño‡ IDATš8ýZ†‡‡ÞY²‚VÐhBÑhB;[ àæ$ÃûV$üZFY¶c­›ÍŽ›· ωCüðqiÓ\£ªB#v› 8玂õdïª&(¶c6@Íc5Û©*4PSj"¦¿Og‹ã„Ù`£2߀Åh#,¹åœžÎÄl°±mQ!û-ÃÞhCÍ+HͰÛ"ˆìí݉Òˆí@ Î3v­*¦®¢ie’K‹è?QCÿIšÓžë‡WŽQt¤ŽÄ4?FÞÕþa’ç†Z †j f£ WO%î¾*ªÎ³üVƦ¯óqñTp[ÿ”N“£9Ö~’KÆÆJ"zy“‚ÍbgÙÓG¨Ì3 WÈpñP ¯¶PSjâÇÿd2|f$=F´_A‘ a @ÍtR>‡]'Žw'q¨?z­™£*©)5±}ia=½íîÑ®kYÍ6ìvPªå­Ž3ÔZpõP¶zù,FÈN=×Ù`ùÓG)ΨszO®”ÑË‹Á7†áÖ|£/Á¹ÉŽåÅã ÷•Á ˜¬A¡’Sv\ϯoeQ]bbÓÂ"{{ãá§¢2ß@ááZÉH) %Çt”סvS0¤¡´ÍbçèÆJʲõ¸y)ˆHñ&8¾Á»Vp°–ªBî¾*‡ççøN-ºJ3>Â{z9Æj,dl¬¤ªÈˆwšØ¾x©ÏÆ%:-„ ÁyФ½.  Û0¾ý¿C¥×¢PÊøéõL.»/Öa0ìXZÄßËp÷U1ù…2·jÉÛ¿€¾×„2&Èq¬µe åÊí6ŠsG&“¡«2“½Së0²¶kÑk-Ä ðu:ßj¶¡×JʲÙàÜDÕfµ#WȰ[íh‹üúöq&¿Ð€(7þú0‡£*)¤Ãl°qd}yz&=Û ™\†¶ÈHy¶T)Q¡’cµØ¨)3±þ³<ܼ•NGg{‘#fGplK¿½}}µ…ÂÃuD÷mˆY¯)3‘¹¥Šš2¾â/öÃÍ»A*ÏÖS’©£®ÒŒ±ÎЇŸ M74Ýšzt*ó äí«¡ªÈˆ«§‚¨ÞÞ„$6Œ³š%ÃËb´áêBÜ _'c¥0½Žò=5%Fì6ðV“p±†:+YÛª0ÖYñ w%~°¯“·æè†J*rõj­¸z*ðt#áb_dré™·Û¡,KGÉ1µ&l;>!.$l>,Gz¦ÌÈä2º õï´ð,CV ½;ùùíቋ‡c•ŠÜ¶Uí\ÿyy¼Õô¹&„ª|û~.e׊"âøívZóXL6þü Cµ…ÐîtæÏñZ²wV³î³\"R¼P¹v¾gM@ œ§T—I_[Ae¾¬í MOCÜ‘É ~/û~.åø-ƒoC§5S~B1:Y!äË%3"IoL@´×>ß²ã:–?s›ÅNÆÆJ¢Ü(;®w)cƒ¸èZ *Y÷YžãX·aþ޹Ôn ¦˜‚¶ÈÈê—2¨-—£:Û@hŒº‘‚æâÑ gm×òçûÙNÔöeEŒ}0ÎaüôzµåM{Kõ½&„S MìÿUÊ3°Y ÓÚr³“`1ÚØ±´ÈñzçŠb&<ˆ» »ÍÎÊçŽ6Ygë¢B,&»SrJ™øL7‡â¾åÛê*sXÿ]ÎÕ%þw9”ëtÜÕ[IòèÀ&륯­à¯9 ½-¢Ss7jJ®»›Oó*®› c•êÒÓïÿe6Ø߯ASÈL•<…éµ”ד»¯æ´ „â#uT‘É`øì(ܼ”Dõñæë=Ñk-”eëÛ=<ðLÂ9NuÙŽlû¨³Å‚ žêÒ#N¯Ï‡Fik)8Xëô^ÏË ï%)0ñƒ%A[d¤"Ï@i¦ì’²ÚÃY ‘+eŽp “‘ËeÈdëŽ_¸+åÙz‡‚Y|´!~¿ï5!¨Ý$dDzbtZ3ÅGëœ „z|4.„$zP[^ÕDYí òöÕðã«Ç¨)5¡-2RØV½Â®×ZøûÃÌ1ý}ˆîëMúÚ ŠŽÔ±ö“\®{©‡“Ó)<Ù“°d/26VRY``÷ê’Gࠦ䘎_äa·ƒg€šî—øc·Óäž(T2ú^‚¶ØHƦ*jJMì[SÊ ©aNãâùâé¯âÐŸå˜ 6T®rz^LyŽžÜ=Õ”çèÉÜZEbš.“26…J†R-'w_ ™[ª¤géP-aI ‰Ç2ôŸ¤A&¹²é®vþþÖ~,ý'jèÙŒq6ñ hˆá×UYš£;ñ¬¹û¶Òå$*ó Ž"g¿Í?ÞäxMÙéõ!Jv;Ž@§¹JÂ@œ£®‚⬵-†@ ÎA\½•ø†ºàî­Â3HMžN%IC<ðVS]"…Å”çœð h'i+~*ʳõX͒Ƥ¯‘1•«W¯µÂ3P…Nkvoi.Ài½³¨«471T 5LuV\<dïÒb¬³ââ¡à’‘(”2‚bÝYüït*ó ÔU˜œÔÐ$OúM!²·KŸ8‚Ýf§"×€g€šý¿”b·Ky7¼ž„\Ñü½PºÈ©ªKL­£ìxÓИ˜~>$¦ùa1Û9ø[r¥ŒAׇb·Ã§³öb6ØN„ÔHBï+ƒ1ÔX¨«0ãNþŒµVª ΂\Fÿ‰ÍWÄÒùù,lV;½¯ æ¢kO¿rVGáæ£ÄÅS±ÖJÆÆJRÇ9ÏÙ]I/5õ’Ï{Ë Õ\½›ªÆïOTo<ý‹z…^¡–Æé«[~æåÊsÉ û0Jç{ïzn$Å A ‚æ8÷ÄôóaøÌÈ–Ƞט 6~™OúÚr‡â’tiC,y}¼sK;®§¢¾ÊÙ`£"GO@´“²ùçK ˜~>\t­£ÎJi¦Ž-ß³»šõŸç1êîhÇί±ÎÊsö79¿¦ÔÙ@¨§^©Â@Uä¡OölÑ8h2O¤+ÅGë0ÖY[sb-³^ ’ÉÀ7Ì•ÒL†Zé<“ÎÊïïf“³§ºIë‹Ñ9÷¤5êÃy”.òN÷4&u\0ÛRš¥ã¯9\t­W/%kùëC)Jí¦ y¤$³W`Ã=ËÞ]M÷Kš÷v)ÕrGBû€ëBÆp]¥ÙaèÖÏUSb¤2߀_xÓg? òD(’]òbô›‚R-Çn½ÖÜ&ÏFG" @ šA&»0þDöÀö%EÔ–K»ã! Æ4ÄK{»5©cÛâB|4.¸y)O»™TD//\=•j-üòÖqzŽ䨿Jl;2YÓ\‡sO…#Ž<,É“Ü}5äï¯q”@­ßéU¨dto&dª^I<E3¡9Êz£L{úFYý<ö&åxiH2v¼wRNÀÖï ÉÙ]«—’¾×„ r•³mq!ú6È’ÒlÖÛÐiͬ~ù“žé†‹gçW{JDöN-%Çt¤¯­ }m…d쟸&2¹ŒKfD:dÕt÷À3@Em¹™¿ä°}I!&³¡¤r•Óçª`¶/-"gw5ŸÏÙO° F½»ÕÎmH½*âù²mqv›ïçÆ'Ø…š“òQ4Ý=ˆLõ"wo »V³çǼÕèµ4Ý=÷P\‡_£ÓáÂøõëBxv#qÀŒÎC œ }ó»ÔVf;^w<O¿®ÙlêBÃ78¹³EhT®r’G²{U1 å(4¦çè@Ò×U`1ÚØ¹\Óßç´ OÃnà÷r¨.6²éë|é€LŠK?ÝäÍs›ÅNqFTá$þ'v~­f;þ‘n$]€\)Ãfµc¬³:U2:Qnª¥4SGîÞ"zy"“˰m(]:¶‚Meä Ñtóp„àìû¹´Í‚W šÁ7†±üé£Tùíã\ùH|§{Þ”j9ãŸHdë¢BŽn¨DWevšîôŸ !"¥¡\!ãò¹±üù^•‡!­PÉœ<ý&jP{(ع¼Cm±äòðS9ò>|B\5'šõŸç9Q»)œ:œ_v_,[¿+àðßXL6GÎKãÄòÎFç.îþhb‡w¶àY» ‚hoüC{wž@‚.Aê¸`Lz«“' 5\Oì–6WNÔ/•kŸïΑuÔ”šP»ÉÆAToo\½”N»ã‰CüŠsÇ¿‘ò7пpW©ñS¡Ï©QTh†xöàúOÒ l´£‘âÊM{ gÎ&G7T’³§c­›URÔd2H H ß»WS‘g`ýçylþ¦ufº_âÏÐÛ"N{­>WsøÏr,&?¾z 7o%r… ßpW®šß!Ÿ¯ž€(7 Ö’½«š•Ïgàê©p*‘ÛÖ¹†ÜÎÚsÉÛWÃþ_Jé5&èÔ'v0r¥ŒÁ7„1xjÛ—±cir…Œž£žÉz‚bÝ™òjL:+uUf\=¤\†Æá_2¤Œ "eLu•ftUf\½”N!J ='ñƒ}ÑU™1ém¸z)š4T¹ÊI›Á[#¨.1bÖÛðôW5›ÿÐYœ;’ÁyÉÉÛeçÎàÂåääËæÐU™ÙüMV“ìÝR‰Æ~5Í–¡ô uq*ÁYOTo¢ú8{w–mŒ_¸+®k:G=ÁñîN]g"S½%#; w?ylV»c]&—¡éæAß«ƒÆ’\!ãªÇØøe>™Ûªv~e8 ŠÓÅÃ_Å„§Yûq.%™:GnHã’ªŀɡÔU˜ÉÚ®uxIê×vói{ü{Ò¥dm«"wo [?دÅ£g¤Ž âø-åÙz~'¹"¿pW©IàI_µ»¢ÅJ^ñðSµRV»¯ ÷SD×ÉdàÓȳp.qŽÜA@ 8O9¹™V+ñÁÁÙ¤8CçèOÒ.ÿ¹”Lz®pÅÃqØ,vô5,&)TÄÍ[ÕlŸ<7o%£îŽæRkÚb#6³¯ µ“RyÓ›'…¦ÉàŽ¯ú4™+ ʉÏtÃb´¡-6¢vS8v£SÇ51Ón 'íÖð†iå²&óö@ÎÍÌÆ?‘èôZå*ç²ûb°mÔUš‘ÉÀÅSédœ47O=£æD3jŽsåt¬×㟠vW0þ‰DÒ×–s|»–꾡. u®# @ øÈNÒ"ZK Î&¡Ý<yW4ÆZ ~®„÷ìÜús¹RvÊa§ñ Y»UgRºÈ©Ï&J9>šss÷º½Q¹Êéuy½.ïüð§óa Á?B„ ÎM\½•ŽÆX@Ð:6U^ .pd2çŸQ»ýôëˆ @p." @ @à@@ð@ \h8rL&‡¢¼¼›­å6Þ‚³‹Ý\"àRÇëJS¿ýök'J$8âââ éÐu ÈÊÊB¯×uè:‚¶a©¶ S7T†Ùµ{òôêN”HÐ…BA``III(•—vê‡\&Ç& È'Ä3¥¢å{áÛã΢D]—pŸÄVŽucGÞ/gQš®K„oË÷¡%”ÅÅÅlß¾ÄÄ8úöMA.Ž…s‹Ñ-€ ôz=éé‡9~ü8lRÕæŸb³ÙØ´iv»•„„xÜÜÚ§r† ½è×ÙZÁjµQZZÊ/¿ü Aƒh¾tcGãïÊUÉw².s)•ú¢N‘¡+å—ĸ3‘µRÃòʤ;ùñÐŽWìÇ.Š tÞ®\}5¯¸Ç о‚<íamÄh_J¡")xi±×¶ù\™^¯·ÿñÇ Ô¿SwW‚ó¬¬lÔj’“{¶ë¼»wïF.‡èèÈvW èJ˜Lf¶mÛÉe—]†JÕöfPͱnÝ:ºu‹ÇÕµm¥"ÍVs»¬/pF&¥üôï­ÅfmK:¥BÙª‘֛݆UD®t ¹¹¬íÍ÷6lØ‚r×®$'wÆA#ŒF..êSÍ–-ÛIHHD­nŸçG§ÓQZZÂÀýÛe> «¢V«èÞ=½{÷Ò¿ç~ŸTŠö1PÿŒ¶‚ŽC.“#WˆÈ•s yuu5~~§èÝEøúë…ÄÅucîÜÛtžÙlF¯×wTmçÇ×лw¾ýö»<×c=ÁÀ“‘q¬$»ð  ¬¬¬Ýæ+--E£ n·ùÎv»üü|JKÛï:œm>ýôsz÷îÏï¿ÿÑ¡ë˜Ífrss©®>;9 ÇŽe2pàÅ<öØge½s€***:[ @ 8ç‘_¨ù%%¥DEÅGYYùiSZZÖ¦? |DLLAA¡„†F“À¥—ŽæË/¿Âb±œ©èÿ˜µkבÍÏ?Ÿ^2³Ñhâ™gžã†nÆlvv}¯^ýGŽeÇŽ!ꇷ·'••í§€TUUáåu~t?-((àæ›§EÏž½ILìArr*_½°³Ek‘ùóßå¶Ûfž~Äéý_ýììlþþ{m‡¬«×ë¹çžû $%¥/QQqôìÙ»ÃÖ«g÷î=9r”åËWtè:ç*2™LT™‚Óà‚+²ÛíŽ]9›­cþ TTTPUU…§§'QQQäçç³k×nî½w.ß¿˜+–vJÂ÷ƒ>@bb"cÇ^~Zã =ÿûß›ØlΙ_|ñ)û÷ïg„ñí.ç…ˆ\.Çjm¿çÍf³EòòòHKŽV«ÅÕÕ…>}zSUUEvvŠVª‰t6ï½÷ùùùÜwß=Nï¿ôÒóŒ=Š ®éuŸxâ)¾úêk<<<4h uuu8p°°°Y¯ž«®º’ùóß$%%¥C×9—±‹ s@ 8%çî_îâ÷ßÿ`áÂo(,,"%¥ü¾þú Ö¬ù‰éÓg²nÝzÞ{ï}î¾{ƒ‘7ß|‹õë7àééÉ5×\Å 7Lm´æ~–-[ÎÆ›ðööbüøk¸é¦©íûï/`ãÆhµÕ¤¦¦ðÒK/PTTÄ·ß~Ï¡C‡(++ÇËË‹‡zÂÂ"öïß··7×]w-äË/’••EMM ÑÑÑ\sÍÕÍ<ò/är9£GâÊ+¯à?þ$77—~ýú’œœ @zúÞyç=Ž9Jll wÜ1‹>}z’çæ“O>åÈ‘£”––âííÍСC˜={–ãæää°dÉ2þøãO ƒbÞ¼GÎì† Î æÎý?´Z-©©)|ÿý·h4RÉ×ììl‚‚‚ã–.]Æòå+©¨¨ ÿ~<øà\|||Ø´i3Ë–-'''‡ÚÚ:ºwïÆu×Mæ—_~eçÎ]„„3mÚ­ r1;vìä»ï¾';;›ÚÚ:âãã¹îºk6l(eeå¼ð‹¸¸¸ðòË/ðâ‹/SZZʃÎ%2²!éûÿ{“ÀÀ’““™5k›6maß¾}DEE2zô(¶lÙÊòå+ÉÉÉ¡ªªŠÈÈH¦NˆÃs˜L&>ùäS6nÜLEEAAA¤¤ôâÞ{ïn’ûÇðä“sdz¨¬¬ÄÏÏÏ1¦²²’ÿþ÷ìܹ“àà`nºéF.¿|´ã³}óÍ·ìß¿ŸÒÒ2ÜÝÝ™3çNöíÛÏáǹâŠq\v™4ö›o¾eëÖmŒs9qqqìØ±“‚‚Bz÷Nu¬µpá7üüó¯Ï3Ïüðc§ãü©ÓZ‹/eÆ¿éÑ£?üð# |ä8¶aÃF¾øâK^yå%î¸c555|úé縻»; „Å‹—™™Å­·Þâd ¬Zµ€Ë/ͬY3X¿~_}õ5Œ=ŠeË–óþû œdùî»ïY´è[F…Ùlf̘+صk7>>>hµZ–-[ÎÌ™·71BCCÉÌÌbÁ‚ cìØ1NÆAMM Ç"''WWW +V¬ä£0yò$6mÚÄO<å4çôéÓ°Z-|úéçäåå; „ÿüç523³¸þúë(((àÓO?'))‰Gy€{¯¾úÚ1ÏŽ;ùï_¡¸¸„´´K(//ÇÍÍÍ›7³lÙ –,ùžK/ÁÞ½û˜<ùzüüüعs×_@ 8ÿé2‚N§ã©§žà¹çžáª«®äþû`íÚu¼øâ+¼ýöޱcÆ\ÆG-`ÍšŸ™=ûN-Z¼yÿ"..ö´Öêß_ª‹~àÀA@Rj¶nÝÆ AY±b)yyù :œ×_ƒ¹sïã×_£¢¢‚nÝùóÏßpuuE§“jþù—lݺ &„7Þø#F §´´Äi½‘#/eá¯(.."<<œŸn¾ñÈĉxë­ÿ±`ÁG<÷Ü ü÷¿¯sË-7áééé³}ûÔj•cW÷düI #S§^Ï¿ÿ=7ß|›>ú„G}œÍ›×;ÆÅÇDZqãz¾ùæ[æÎ}¯¿þ†×^ûÙÙ9lÛ¶6o^Odd$UUU§u]ç&ééGa|w¥“——Ç›o¾ À‚šÂÌ™³Ù¿ÿóç¿ëd¨Î{³gÏdâÄɤ§áÎ;ï`îÜû¸á†›Øµk7Ë—¯dÞ¼Žñ3fLçñÇã7ÞâÍ7ßæé§ŸeêÔ)mú ~I¯^=qssouܽ÷ÞÍÿýßÜÿƒ¬X±’/¿üšÑ£G±pá·ìÚµ›ˆˆ-ú†ØØ84šðçyòÉÇ™<ùzŽËäæ›§¡Ñ„pÿý÷rçw “ɘ?ÿ]rrr¸öÚ‰,Xð>7nâê«'ðÊ+¯2yò$Ç<}úôæ§Ÿ~ ¼¼œ€€@’““xüñ§X·nz½žüü23³gРAMr6oÞÌW_}R©dþü7¹úê«(+“<?þ$åååÌž=“W^y‰eË–sûí³xõÕÿré¥#øî»E\{íD>üðŒF#ryÛËéu%¶ç®a}Ö2Jks;[” ™LN\@*£o%Ê/©Åq…Õ™üœþ)ÇÊva³‹òšA€Gƒ£®fHì„ÇXlf~<ôû ×¢3ÕœEéº.JwzjÒ¸"i6n*ÏSŸÐˆ.c :t˜ÚÚZ¼¼¼¸ë®;P*•LŸ>µk×±uëV§±þþxyy1eÊdžþErrrHOO?m¡®NRî]]¥†VÛ¶I»ÿZm5sæÜ €Z­¦ººšÜÜ}›6m`Ò¤IŽ Æ;Ÿ...¸ºº8íÖ6‡J¥ÄËË‹œË‚R\\ÂÞ½ûœB"##[,ñj6›;¤÷Ýw7‘‘‘Ì{}ô ‡vªÄ"—ËqqQ3eÊdxàÿ0›Í”””Fhh(………Œ=†éÓocΜ;O㪠ÎU‡ßµß½sç., ñL™2€©S¯çñÇŸtòލT*ÂÂÂ:4Í‘<¬Ñ„–6„]»vSRRÒd¼ŸŸ=ö(~ø1:ŽC‡·©³µF£!**ê”ãT*•û¶bÅJòóó©WÀ•WŽ#))Éáak‰Aƒ²cÇVÞÿ>ÿüKŠŠŠyôÑÇ)++ç‰'þͶmÛÈÉÉeÖ,éû¡P(ÈÌÌr*€ }÷] —Œ‘ÐÐP.¾x06lä÷ßÿàØ±L&MšÐ¬§nãÆÍ šÆÔ©’7ÀÃÃÀ!ÃÁƒ‡˜1c¶cÝ#GŽ0`€T*tÉ’eqß}÷0vì˜S^îJ•¾„¥ûÞÄjë¼]ô’mTÊ™{É‚ǬØÿ6Yû΢T]âšlVx‡øÀ¾„x5¯¹â¬$ IDAT›lËý‰ YËϲd] ‹IËÖœñv à²nÓÚtýx†ÔÖÖ:þ­R)1›M€¤˜×+4õ ¼ÑØòó3©FôãkHMíHáõÿ/..¦¸¸˜””^¤¥ Án·ÍÊ•Ë>üŽÍàá‡ÿÅ´i·8 /¯¶Y~­!“Éðóó¤ðÆŠCk>l6;V«´Ûââ"];µº¡AÉÔ´ù»»»Ãà0›Í¸ºº°fÍj&MšHyy/¿ü*£F]ŽÉdúçLÐ)ÄÇ7tÊ\»v]³cêŸú羃¡ÙsêCnl'èÔ{µ,–æwü\\ÔŽï‰Á`@q¢®¶ÉdjR«žúg¿­… êKC×?·õž‡¶”¸ âÉ'çÀ½\wÔåò›o¤ÒÄõ¿UUUŽßŒÁƒ1xð S~WêÃ|–.]ÎÒ¥ËN¼×¼G¥þ·¯±±žzÊË+(..¦¼¼œ´´!¤¤H¿k&ŒgÁ‚÷éÑ£;7nbêÔ›xûíwNûów5òµG…qp–(¬Îlµ]nÕá³(M×ÅŽ>ŽüüBB‚¹ûî9ÜqÇL@êÌ»ví<ýô³ìÙ³×QJ²ªª FCZÚ&Þ¨¨HÒÒ†˜˜àô~LL4ÞÞÞDD„3xð žy¦¡ Ê»ï¾Í“O>Í®]»±Û턇KõØûô采þþR¸ÇWŒcÉ’ïyóÍ·ÉÈ8FïÞ©ÜqÇ,‡bàããCZÚ"##s2£Ñˆ‹‹+6›¾}û’——››+·Ýv+·Ürs«edç>ÑÑÑlÙ²‘—_þ;vì ;;???bcc9òRæÏ‹ÄÄDV­ZMee}ûöá©§žpT‹#-mÑÑQ'æŒ"-mqqRSddóÏubbjµ ÑÑQ 6”'žxÜqìwÞB£ aÓ¦ÍhµÕôêÕ“îÝ»9*-Í›÷f³™¿ÿ^‡N§sä÷$&&œxŽ¥×±±±¤¥ !&FÚ(ðóó#-m ñ޵^{í?<úè<²²²0\uÕx<<<š4º³Ùl\ýÖ¬ù‰ììld29II=˜0a¼£äiLL4ë×ÿÍ /¼Ä¾}û¨¨¨$)©QQ’<Í~÷ë™5k[·nÃÛÛ›+®h0z|}}O|Ž@ÊkøñÇU<÷Ü lܸ‰ââbââb±X, püñ+/¿ü*¤¶¶–ÔÔTǵ‹ŒŒÀßߟììlºuëÆ´i·0kÖŒS<)@ 8ýòËÏöûw¶‚dþüwyüñ'™2eòi—kœeeåÔÕHMm¾šO[Ùµk¾øøx·Ë|Ï>û<¯¿þwÞ9ÛQÆ´³0™LÌšu'ýû÷C.—³hÑböìÙËe—fÑ¢o;U63›7oc̘±í2׺uëèÖ-WW—SŽ=P´/¶?uÊq‚öáùqkP)TÍû÷ã°ØZÎQ´Sú}z;’ÿ‚ÎÆl°a·Ù‘+e(Õ­§ZL6*ó ÔU˜ñðSí†\!åÒXÍ6¬f©œ±Ú]ÕlÇj–ª€<·Ýf½µÙc‚FÁÁZöÿRŠNkaÂS‰-N—Âl°¡r½ðŸeñ׫ ˜˜Ð$n[ 8ßéÙ³g³È:‹˜˜hGŽ‚@p.a¬³òõý0lx¨¹áõ$‡ÂßC­…Í 8²¾»­¡§‰ÊUÎàÃIÀÖï Ù»¦•«œÛ?JEWefÑ£‡s_ÿj”.’ò´syÛ—pÙý1Ä ð=;¸ƒ9ð[eÇõj,`w>!.ÄöÅïù°¦ö¦,[OÖv-.ž"ïlqøï ¶}_ˆÍngÚ»ÿ¬úÞù€0@ ¸€9ø{fƒ´Ë_[n"sk û91T[Xôh::­›/WÊP»+0ÔX0l¸û4¯.x©<5ŒuŸåQ[nbÛâB.¾)m±‘]+¥¦†qƒ|/ã`÷ªjË›–ßòmƒ¦†‘:.¨¤t4ºJ3:­W¯®¡:wO)AÄfµsàW©Ÿg€šÚr{×”61Ö‘'2rS8=/ D®a1Ù8¾CKTï– %$ $s›–ü5ìû¹ŒÄ4¶|[€ÕlÃÕKɰi-ž{>çNâ?*rõdï®F¯µ°ia>aIžƸIƒì ?áipóVBSÇ4ÌfǤ·áâ®hq p"œ«•'0lÈ Pµ c:þ¥vkðBجvlVûyf³ÚÙ÷S)ùk©)5á᫤çeAÄðaßO¥gÔáéF¿ñR%¶­ßR]b$<Ù‹¤‘R§²ãzv­*¦2߀L&À}¯ !$ÁñŽÙ`å·ùÇð så¢I,F;WSt¤³ÁJ`Œ;ýƇà¤vœ»ÿ—RŠêбšmxªIêLék+Ðk-øh\HDp|ë=o:a @pr|»–ºJ3jw£æD³â¹£”fê(ÍÔ') Æ:+Ç6WÐ-ÍŸ”± ;àJµ¼‰1Ñ ŸÉ¢yR¨Ñ¯C_-5:-Wï SÕðpu\«ã;µüüzØ¡(½–À7ÿUΆ/ò±˜$ïB%'qˆ/Cn‰pİÿþN6‡jÑkÍØí t‘ÚÝ“¡·EàÜ XgÔ±þ³<Ês¤†§-ytŽïÔ²å›ªŠŒÈå2cÜ6=Òa°ë¬üüz&å¹L:É@póVÒst eÙzrvWc·Ùñq!í–p"[1 Ï%l;«^Ì èH㽪ˆéï@Ñ‘:2·Va¨±: „¼}5”féP¹*H@m¹™U/d8 '€Š\)cƒÖ²šíŽï‹¦»MÒ`Ò[YúÄ´EFǸ²ãzŽm®äê'+}×v¯*¡®Òì4ö®j§ùK³tdn«bò Ýñ wmËsFœ&¢@ ‚ÓâÀo’÷ qˆšîE±þ}€Ê|ƒãßQ}ÎL!ô Tsñáã ö"â_8¡E­a1Úÿvñ””w•››ÍŽg€¥ZŽÕlãðßì\^ä›w ]•¤0*T2,F¹{«ùéµL8‘RSjbÕ ”×c·Ù‘ÉpR2ëÉÚ&)U…Fd26«’c:–=}Äq-F…éu˜tVä 2¹ }µ…íK‹8¾C HÉåÚ"#?¿‘帗ç:»(¡èH2¹ŒäÑLx*‘+‰#vàé?‡j0é­Èä2Fßôw{qõc „$8ïä«ÝŒy –1Ä2hŠÔ3jÇÒ"´EF”j9® eä]Ñx¨0l¬û$¯ÉZI#¸ò_ñŽï£\!c̱ »=¹R†Íb'}mÅ?¸"ÿœ6›õ‡K¶òûÑ/É©<Ôò€Pï8.M¸‘Þa#ZSc,gÕ÷9P´‹­i,¤àŸã©ö¡wø¥\•<¹ìü°¥ –:Vî‡ýEë1Zt-Ή«Òƒ^¡—0¾×ݨ-ïî¬ÏZÆÆ¬¥”ë Ï¢t]™LN|@oÆt¿(¿æÆuu´EF ÕÐm˜?݇ùSv<Ÿc›«rK8j7µe C<üÏ<É6"Å ¹BRL¡a÷öB¥4Kdž/ò©,0PzLú½•+eDö–š#F÷õæöSP¨$ãàÏr8¶¹ŠœÝÕ šæ4× ©a¤Œ b×Êb¶/)¢2ß@éqA±îìþ¡«ÙŽÊUΘb Oöbó·ìù¡ÄiŽM_çïΨ9Ñj­üòFu•f¶~WȘcÆ_ñH¾a®üðÒ1* D¹1éÙnäì©æçÿea5ÛÉÞUMáþu ÛÌ­'vô»y0ì¶3 ióô—<6v›Œ•¸û( Köl2N®”5y¶söÔÙÛËá¡0ÔXØøU>¥Y: Õ'OšW šˆ/ªŠ(;ž‡Íj'´‡'. ÿYNi–Žê#I›´‹ÍÌ’½¯ ã ƒ)¬ÎdÉÞ×Ñ›k[ógÆ7ì)øSH­Iˆ¬åì)ø«³E9m6f­`GÞ/Â8è@ –:¶ç®aKöªÇ”Õå³úà{Â8è@ìve»Xy`~g‹rÎR¿©t‘S™o }m…#ÜÅb²‘±IRªWÞ©ORn+v;ü¹ ÇalZX€^{~ì@Ÿ ¹öÿRJþ~içÙ;XÍ„'q=áA°Yíü£œßßÉæ—7S[.][]Uó×D®‘0¤!œ«ÞKPž­$å7¼§ÈÀÝ×ÙÓiÍÔœ0ôºÀ;Ä…àxw¢ëCl2êh?¡'”`“ÞŠ\)#ºŸ#J׌§â\¤¦Dúìšn­´Û[<–ìIR.ÂñZV>ŸÁÊçŽ:<<­Q¯Ìׇ6úwuióºšOHC“Åúï¦FzϬ·5{ÎÙ¢MB±œjCÙ© þ1F‹ŽÒÚÜçV¥ŸEiº6¹U‡;[„Óæ|’õ|'·êH‹ÇòµG±Û;÷ǽ« ~ ›Çn³sdd XŒ6þZÃ_ rØò]ƒÑšþw9Þš%¥>¶º­ì[SBá oEê¸ d2iõïrÎô#œóź3`r¨#¡Wí® J ±Yì¬|>ƒ_æslKå9z*r%EßÞŠ’ÚØX³è7a¬• ŠÖªç46ļÕMþmª³:•®uZó„±QoÜÉdàæ#½gµœ¿cî¾Òµ)ÏÑ7{\í&Ý£šòÖ•ýá3#™ô\7Óü+d¦×±ù›@ò€ô}:ùÖ_çÆëW4úwã{Ò˜fƒNƒ~V8?â&@ œ6ùj;Ðñƒ}Iàø/,IÚ1.9¦C[dÄÃOEt_)÷ sK›¾Î§ºÄv0ÖZÉÜREuqËáÚ"#[I†Gì.¾)œ^c¤äÝì]ÕKÝQøGºÒoBCn‘r/ÊŽëÙ±¼Âêwþ'<•ÈÍoõä’gÖ¬ÔûÄ.sÞþG^ÂÉø†º “KšeãDÝâÿö i8~!ÚCz¦ Ö²ke1Úb#•‡ÂÂAkJŒþ»‚ºJ³£Á_=5¥& ÕâæÊˆÙQŽÊ]õ‰ÇõÆ›Åd“šÔU™÷8¼—V–»§†Ãý?{÷ßVy/~üs´å)ïÏ8‰3½I $„]fX—r;øQÊåÂí`”–² ½.´ÐrYRFØ£Œ ÙƒéÄŽãxÅ{Èò’µ¥ß'>¶"Ù±aÇyÞ¯—_‰ÎÐyt$ï3¾™êíì?š=,*Å€±—AåÃÙ™WbAAúT¼ÙÈ]Qο#©Ç½a{£“·î.àÐ& 3¯NdîM)ÊàÕ}_6²ïËF$©»GÆ´+˜yuRÐcmþG5—œsþÍrÿïY×&Qº½«ÅŶwjÈœ‰.ddNê•»(†C›š©+²²ç_õŒY¥Ô6ÙÛ†Z+ÑÞK7“ãÉšeâÈÑ4ªþá–ÄÒXêßT­U‘9=’Ò-ìû²¯Ç‹½ÍMÅ9CNÖ,>ýÊDÊw¶bksóý»µ|ÿ®°ŽžÅù¿HgìÂhv~\‡ÛáeýKÁ[µÊw¶²åŸò8ŽžŸý®€:}j:£§ÍÖ7ªÙòF5†p ·ü}"3¯L¤lG ¶V7ë_îîý¡RKJy¦-‚ ‚0‚¸^Ê~» eÎŒô @ÎíÞÕWúð¹v?2AÏuGÖL“Òe¦ë)4J«ô«?VùÎV*÷É7¡S.ŽW9kt*f\%燷·¹••G$INç*©äì3߯ª%&ͨäÎßùaïß_¤Ü´ÔØs£É™/Mh,ídÝ‹G8¼Õ°ÝÜ›RˆLÐãvxÙóY×7ãóúHÈ %ï’ø€íG’Ð(-×>1ŽñçÇ“fD¢&"AOÄÑ9Œ®|d ™3"‰LÐbÒ=Ê@ÆôH¥¥ Ĥ%!'”Ð(-Z£š¨TÓ¯Ldæ5r`¬ QsÙƒ£5%‚°áq:sBåù>"4\óø8Æ,ˆ&2QOh”–´¼®x(‡Ô£­ £CIÊ #,F.—>TMRnI¹a¨µò5*Ù@Rn1éÆÓy ˆAAA4:ÿþòä>·¹ò‘1ËBLZ.¸+|ÐÑìÄió­õ«ùŸ{c soì®͘ÉmÿÌ zŒq‹b”AŸ#Éäåq8:=Äö¸‹I3²èç£hkpʵÏ^—ÜŸMñf ¥8:=èCÔ#5Äft^Í»8—ë ®U©%¦x+*µ;KÚy·§“3?ŠªüvÚœh *B£u~)8Ãb´\õØX ¿3ÓTÞ‰Z«"atcÏVºi*åù#âå®KIãB™~e¢ß„i“–Æb·zH˜Åg¸2DhXpk’ ,ýÏÌ^×gÏ17-oLš‘‹~t1RÃâÛÒúÜÿ‚»2üÇe†pÙ£ý–M»"iW$ôù<§ƒAAè&¡Ôp zN$×Ó˜sÓæ.Ž!wqïAÒ”‹ýköUjI™™÷X£&G0jrßóTh *&/^>søûüIã”>ü]ºÆg¯ xÝ>Ú›œ¸ì^Ô:‰°h’¢+«ÅEK¯ÛGxœŽÈ$ƒÒëóúpÙå+½ •zäê,Ë‹çhv…cû”v˜´ÔÈzÂãu~)¼@nú¶6»”÷*"N×ëTð}G8}ܯ_êž$•¤|×<.Ÿ<èK¯&JáC™¡RkP)5Zn§¯Ûç÷\ÇrÙ½½fàP©%4úîý<./–jf'!&-¦$ýYùùñz|X-.œ$I"$RtFÛžß³`üÎ,µvÚêè *"Ž6¥ ‚ #Ë 4”tòñ‹nLIzÆ/‰eÒÒ8%e”ËîeÝKGä 5zl£ã’û²‰LÔS_ÜÉ'pÉ}ÙrÎaÁÏÖ±ç³ÔZ?])7cÛÚÜ|÷G¨Üë?exd¢žËÌÁ©¡bW_=]êÿd’œþkⱌ_«ô}Øñ~{?o@£Sñ“Wûn.NoþZΑ=mA×Åe†(]äÏ…œÁãʇÇ—å?ûäá­Öü­€‹ïÍVúe®ý[e?´“fäêÇÇ=ÎçO–P_¬WÞ·c©µ?]9³²¬©2 `êÄp.¾7ûä¿AaÈœñƒ”}^Ÿò#¨Ñ©”Úè–Z[Þ¨fÛ;5ʶ›^«¢t»èÃÔD%Pi$ÜN/áq¢9õDô ŒLIz¹õŇ’ÞËÛãfE£WɵÅ>9£ÆÖ·jxÿ¾"¥†Y8óôœuçGþ}>ü)6›G_šÊm|÷G°µº‘TQÉ á¼ß Í{&ò¸»¯‹‘‰z¢Räk]{““ï×ñÇ0êvzYý¿eJp§“ózKœ‘éûA„¾ Û+»×ã£hƒ<?&ÍHBŽ<€§¥ÆNMa’$)SÇwYvO&ɹaXªì|ùT)Öf¾ibÖµI¨Tå»ZȘ©L9îvz±TÛEW¢àvz©Ê—§·(†…?•s=»ìÞ^sg_ñ‡bÒŒX›]ä¯ndï ´Ö;Øøjçß‘~ÚÊ.ô_l†‘+þã·Lê‘¥½G€P±§ K•]dW¶£…ÖŸ…óà„”‰á,ÿ/ÿAf]]•ŽìiSºB]ûä8LIr÷6s…ðø³·àÂ{21%ðz||úÈaê[)ý¾% EE­•¸õÅIÇì-ŸÛÆÒN¬ÍrNýs2JéSmµ¸ð8ÏŒ‰”A„þ¶-*µœ3xãÊ*¾|ª[›¯ÇÇ7-gãÊ*,Õv%%Ô±ûŤÉž#§s;½ØÚäî]÷2­ eÊsNå75¶08]çÖRmWη֠:nš®Ðh-s®OfôÑ÷ëð6‹2D^$IB­UùýõÌõmk•¿Sj­Ür´ïËeÝÞϺ×!§=\,CWpßãrPWÔ¡t#ŒI7q–Q©%åüëCƒŸcÏ­rí6”t* …Fi•Iœ„¾©¤a[7âH’* µkOâ½8}ú:×j•xNµjà¿ÃúÝ™qU"Uùí4–u²ùõ*¢Gh®´“fdöŠä€í»+[ªìo–s;G$t¢Ëžm¢`­K•·ï) -/‚ñçÇ2j²gp"4:éÓ"(ÛÑJ}±•7ï* sF$ãÏ ÈŒÐ›Ì™‘r^g4WÚ”#aø°TÛyÿþ"¿e‹oKS‚ÀΣýþsŰÿ›&Š·X˜u]2m J: Ò’0:”Ò-ØÛו¬¾ØP†ewg§#sz$»>®Ããò±þåJö|Öp4Íb4†ða}©;¥Ö¿R‰$I4WÚpX=èŒjæ\xýô¸}çvÚ dÍ2Ÿe$"AO[½ƒƒëÌ”ýИùÑŒ_«´Ô};C4mö‘9«ðp2%y!UïÝ §§.akÅg§±Dg'£6Œñ s{]?-õ¾?òÅi,ÑÙkZÊ’ï3¬5Uj‰óïHçýû‹(ÙÞBé¹óü_¤m=øòüÀªµÓÔKvîM)Hj‰Âµf¼å;[)ßÙʨ),½+Ão€¬00‹~–†ÖPMñ¦f<./‡·Z8¼ÕÂè¹Q,þiÇíÂÕ3J{£SÃÛé  êrx•uî£ÿO†¥ÆAõv ¿3c©¶0á‚X:ŽvS±[ׂಖÁ}´6;*ÕÀ…÷d±qe%m NZël_UÞÏë9ÿgmE@]‘ÿàn¯×G[Csko—ß'µVÅE¿Îbý+•Ôvàè𿺑ýß41ëš$ò.Ù“0 Zµ–;æ?Çîêµ4vŸÉU81’¤&+f“’ô¹Ý%n'#z2‡›váõ‰qo§BtH2SSÏG¯é½‡Ffô$nŸ÷,ùµ±¹‚'ÂNŒ^ÂÄ„ùdÅŸ«¤/Ã:@y€ÝÔËØñ~->¯É—ÄûMÒS×MhW?äÅÿ/ìÙÝ“^ht*Ýa± IDATι%•i—'P°ÆÌ5MØÛÜTîmcçGu̾.°VMè]ˆšÅ·¥1ãÊD|ÛDáwfœoµ—idòE}ß@ôì¿™(j$‡#S’žÿ>ÊoYô(ù»èììþ‘ÕUL\Kõv|Ý„ÝêF­•È]þ¯pX÷£—œü¿§á±Ýã R'…³âr©ØÝFþêFj ä›Ù5Ï—sÃ3ãÏÊ®FKÿ3“ðXmõ ¾3S½¿µ¯ :Õà×P¥–²õü.F&ê¹ìÑ4–u’ÿU#%ÛZðz|l_UCâ˜PÇŠ þxLÆx^1ÔÅ8ëiT:òR“—²x¨‹rÖˈž@Fô„¡.†ݯ2wvz(Ú`VmhVú¸kù¯³¸æ‰qJ.õÝŸÖÍßbÒ2ãªD®ÿŸ\¥ëÁ±µlBï:-]µŠ­áq:æ\ŸÌµOŒSZdêÿܬ•ßcI%)7Âð¢5¨IÎ óûëºávô´F5éS#‹ÑÒÙêÂëö‘5Ë„!B£ô}ïP „>,° ǶüI*‰Œé‘\zÿh¥fÛaõ(-g›¨d=±F²f›˜Ó£kfýaÿ磻"àÜ›ã .3„ónOçÂ{º‹÷ç;.‚ œ9†}€°áU¹»@ê¤p’ƆÒÙâbíß*üæ1èÉ”¤gÖµI€œ½¤kp$@éŽË:•I¼nꣃ,ÕAºu˜å ÕzþyݽO(4RUïo§©Ü>9‹TÕ~9cQÏÔ°Å›-˜Ø”óãóù”¬A'BóÉÝEšÊm|ýLµ…€H Qã´uÏΚ9=2àyÖ½ØOôÚ'ÆõÚÅiDòÁ¦×«h©u Ñ©p÷Hi˜»H>ÿ.»— ¯Tâvz‘$Ð…ªqZ=ÊDÆŒÀsûþEËÇ„2ûhpדÇåeÕoú-‹ˆ×qýÓãOä• d>bã;,¿þ©\Ü=2OuÝ,Ž?/–³ C¸š„Ñr÷“®@|°™ªj ;‚–áÇÿ7‰Ã[šÙ¾ªöhTHªîãD&êÏ®ïm=TŒ$ùe±F’Çû'ð¸|AÏí²»31FhØüz›_—»"i *åù4:©“ÎÎñ‚ #Õ°­Rs;¼ly£€©—%­%)ÇÞ½{711&"##úµýë;~OAý–“rì3Ug« [‹µNEXŒö”e'›’¼˜¦=tÝÞšu¼µëÑSrÜpX=8m|^y†ó®ñYƒå´y°š]r@´¶×ùN·'/ù¶ßÛnÛ¶ƒeË.<)Çݸq#cÆdc0ˆÄ ‚ Œ›7oÞ]ŒN•ZYrA­•Ž[K¨ÖJýʉnŒÐ`ìßý® XH¤–ÈÞs ŸMô¡ê“z¯3ªÑ¥ @A8u†m#AAAN? ‚  hµ\.‘QM„‘E¥’D€ ‚ ƒƒÅÒ2ÔÅA8i¼^jµZ‚ ‚0ÉÉÉTW×âó}óã‚02•••“’’:°A£ÿN'µª÷1ä*é¬_>d4}¼ÃøŽž>jUïƒuûZ'œ\}]'Oµ°°0RSS)(8(‚AÎxMÔ×7’››;°,FáúFÇNåpÓîSU6ᨄðL’#G÷º~zê”7çŸÆ$$¦¥.êbôÛ´Ô ØW»~¨‹qV˜–º´×ucãf¦‹¤ÃÙzKtvš–rÁâÄI°eË÷¤¥¥F#DAÎ >´··S_߈ÇãeñâÅH’4ð4§7Mÿ=ûjÖSa)DÉÉ'‘™ÍääÅHÁ&8jæ¨ ‰4ÄQX¿§ÇqËwöÓ›˜’¼˜Äð¬¡.J¿å&Ìá¶¹O±¿n#vWçPgD2hØœ´€ŒèI½n£Uë¹ãœçØ]½³µæ4–îì!Ij²c¦01霡. ãÇ'==ºº:jjêq:ÅÀeAÎ *•Ѝ¨(rsǧ,ÐDi‚ ôm¨'J¡o's¢4A„‘J RAAA!AAA"@AAA!AAA"@AAA1¨f;*9ÒR(&†9$I"9"›¤ˆìãnkw[9ÔðNý4”ìì¦7‘;}H'b §ÛÆ¡¦Ø]Ö¡.ʈdÔ†1&nZµ¾Ïí|ø¨h>@“µú4•ì좒TdÅäa2ÆcAa@|çóñþ¿²µü“SQ¡‡II ¸iúz]¸i7+¿¿·×uKuö ÑEpÏ— ×GuQú¥¶­„¿m¾K§˜NcäÎsž'>,-èz>žÛôKªZŠNsÉÎ.ËsÊÂì놺(‚ #Ê€ºu:ÛØ^ñ¯SU¡‡üÚ4Y«z]¿¡ô=œògþó¡.F¿m*ûH§ÓmcsÙG½®/nÜ)‚ƒÓÀ‡ï¿3ÔÅAq 8Üx}žSUáÎö^×ÙúX'œ\6×™s®;]mC]„³F_Ÿ‹3é3s¦çZá䃔AAAPˆAAA…AAAPˆAAA…ARN›¯GÌ©"‚ ÃÅi›jß—t49ƒ®‹J1{^ e;Z©=ØZ+1ëÚ$$•ä·mK‚5f¦]ž€!B~ û¿n¤­ÞId’ž Kb[žÂïÌXªì¨u*ù8þ‡¡µÎAáwf¬Í.\­Šðx=Q)z²f™Ðèüc«ò­Ôt ÖI̾.¹_çd$ÙùQŽñ£C=7 €]ŸÔÓÞàÀaõàóBXŒ–ðx=Ó"ˆHðŸdÊÚìbïç ä.Ž!*ÕpÚ_ƒœ³ÓƒÖ¨øŽôeÝ‹G¨Úß΢Ÿ§‘:1<è6æ#6¾üïR¬+þ'—Èľ';Û9:<¬m¢©ÂFkC¨š¸ìPFÏ1“n¤µÎÁÁõÍt49qÙ=h jÂb´$Œ %=/ÂïZzhS3Me¶ Ç1Fj˜zYBÀò½Ÿ7`mvõº`Çûµ¸l^â²BÈ™°þðV ‡; Ö2åâøAž AáT;mBÉ6 %Aך¡‡·X(ÝÑ€)ÙÀØsý'¨Úöv »åTŽ3"IŽ t{ µEVRƇ7@°µ¹Ùüj<./ £CȘé·ùˆM¹a=Ö¶·k˜uMãÅ(Ëj‹¬ä¯nDkP•BÑúfÚ›œŒµE+ÂÎêðºk†·¿SCÎü(æÿ[*ZƒhÙÚÜä¯n yB˜†Xm‘•]ÕÑXÞ‰£ÃƒJ-=Ê@Æ´H¦ÿ(úÜ/‡6Yðy}”noé5@p;¼X-þsyo¶ôú½ÓèT\ñP_ü¹„ÎwŸåŸÿo)D§ùú/e´Õ;qÚäôÌ!&-¦$=ãÏ‹aÔ”ˆ>Ÿc8)ÛÑʆ••ØÛü_wuAÍ•6–ÿ*‹¦r{>«ºD‚žËMh´€Ê½íÞj º­)I4WÙÙövò8uR8q™!~Ûx=>vRϽŠQ“• ¿ã^©Äe÷bŒÐˆAa;mB—ˆxY³M~ËLIÝ7ƒí=Zö|VÏØÑÊ͈ùˆM Ú;ð2ìÿºI ö~Ñ ô”wI<†p -µJ¶Y°µºYÿJ%¡QÚ3ê&c¨¤Œ#cz$íMN*÷µc©¶S´¡Ÿÿ¿à3Ñ CçûwkÙýY=ôˆí¼Må6$•Äô+ûÜ_£W1gE5…L\7 cÛÚܘ¯Ùîjµ³TÛé0÷=I ³Ó‹Û奦 ã˜åZjì”ïleÚå ̼&i@å -µÖü­ˋ֠"÷¼X"tt¶¸©Úß´¦>ïÒÔZ‰úCVªö·ÓVï`Ý‹G¸øÞl¿íôajrÇø-3Fhžoß ÇkØ­ÍÝ?ü-µÊwµ*7ïÇÖ,v¶ |&a¯ÛGáÚ&&.‹cÿêFꊬ˜+lĤƒî3æœh¥F{æÕ‰|ð»Ct¶¸X÷R%7?7aÀe8ÛÄeâ2ùFqÎõ>Ö½XÉ¡MÍÚÔ̘s£I6Ä%ºThg÷§r-tLš‘W%•jÀÑá¡îU©îâvxi©uÐÑìD­–ˆN3¥%cz$iy~5Èn§s… {‡Ÿ¯ïqKÿ3®»©¢«‹ÌŸ¦ávÊ7šEš)ßÙŠZ+±ä—ʶqÙF|ÝuL»"ìÙQ´58øáƒ:ÌGlìú¤žñKb ¼!N¶üSníTi$~ôпֵWÖÆ.ˆÂ”,o÷É#ÅÔY©;dÅçõùu52†kŽÛâiow+­ ]×ÌÒï[˜{c2!¦îswlkÐoš˜zi½ ·ÃËo›üÖw¶¸‰ˆ×õã â±âmÍÇÛºo[’! UäT¦©H†áŸ©œn;•–ƒT˜ ¨´nˆ&=f<éÑ㉠Kêâ €öÁÖê¢|g«ß²” árWØÚå&ôèQš+íìû²‘Œé‘X-.J¶µ ÖJ„Åèh­s`oï»›A0¥;Z°µ¹Ñ…¨™³"‰Úƒ˜+lø¶‰s2ê¸û‡˜´L^Ƕ·kèlqÑÞä$ò\Æçîº^“r Úñ"i{o1îûò/ygÇt:ƒÏd?%u7Îþ†˜ ëa8:íí»Må6Vÿo™ß_WK€½ÃÏ+ßlä]’€F§Rnà ¾mÂëñ‘3?S’þèöž¿àh-Vμ(ÔZ•2Ž x³Eé§|<=[zë!ô.ĤUj–Íâü >h®”߬Y‘AoÎ{jorâõøÐT¤N 'qL()‚{Žßüµ{‡I%‘”æWûÌÁuf Ö4)ÍUöÁ½®cTìêþ7„Ÿö:’±Z\8;åëRô‚ƒ½_4²öï¼wßAšÊå÷4}ZàM¡Õâ ¸7•÷+æCiq·0•Zb̹KSáwfåz `k•­A¥Œ/Ø¿ºŸOþü[<ý …ãó9q|Îý¿é58pW¿‡}ãB'N<©Ï/œYN{€ŸÊE¿Î º®g€  Q3qi,×™)Þ,÷MJLš]¨§u`?0×7+ÿ_ó|E@ÚÆƒëÍý ê‹»kØz·p¶HûÖ:Ž£7±âü úPµœÀ‡r3ߥÃì¤`­|Ó9vaŒ_€Ð­µ@Nu“Ö¿÷<-/U2"~ðiPk ýkU#ô\pgjíð$jҢѩ”±ý5yym N¥ûP|vjm`0£å²F÷ú{ôpÀúƒëš{ògF¢F­•È]ÃîOëÙ÷¥XN¸ $ùsfku|Æ„ÁqW¾‰§ñ»íãswàÌ¿ý¬wÖµ¶¶rÿý÷ó /àõzýÖEDDÐØØô9££åŒƒ*•JùÿÉtá…²qãFîºë.üñ“þü•_½­¥Ÿ h—ÇÉk[ä7Ë^G%u_{>üðC~ö³Ÿl¯ÑhX±b/¿ü2zý©O#ßÿhµZL&Óq¶FºaÕ¾ÞóC«W§#!'TéƒÛ•¾T«?Úâ0€ŸŠ7uæÀ9J:±Ô؉Jî½V±±´“½G3zD%†}ÿåSÅçC °¬—ÒßÛÙ÷GÊÙéaë•ò ’ÅøƒaC£S­¥Ã좦°÷n ƒá=ÚbävùäìHýh8ZòËŒãvsê¯Q“#H"§9MÖ“46¬_erÄ¡¦ ƒÒ£sÄ$óÿÎ;ðä¹DÂãô|ø`ÍUv6¿^MÊ„ð]¯œ6e;äÃçóÏ0×¥|w+Žú0µrýÖ½>çžÞϺӞŽ9GnÐäa ×o¡;®Â‡·kÓÞúþOƒÚ¯Ü¼Ÿ­¥Ÿ2?ûŠ ë—-[†^¯g×®]TUUñÏþ½^ÏË/¿|"Åí—Å‹STTDhh¨_Ë…pv:í‚¥ÊÆÆ®ÄæÝœ‚ÓÞýƒ¡>š½dÒ…qÔ[ Ò’5KŽh»º$9;½ÏÓ›š‚vå&ö‚ÿÈ qlw¥­ÅÍû¿+Þl H}¸ÿ›&4z­uvŽìiÇçõ¡RK,¾=0E§ÇåãÛçÊý–…˜´Ì»iä|ÙÜ/ïÞ{QS"ÒRþC÷ ÇøìÀ¦ßšÂv¼WK‡Yî’ÐÙ*¿y—ÄäRØ÷y‡·øçhŸuMRÀäjÂÉ—9ÃDþêFJ:ÙóYy—žœ&ì®TÆö67Õí¤L>7©’5+ÒoÞ’3Éœë“ùè÷‡ðy}üëO%¤O‹ :ÕˆÛé¥þ°•øìPæÞ˜‰H­•Xðï£øä‘bœ6[߬öËôÐÙêz=žvE"UûÛ•lQWü!‡ðŠÍå6¾øïR¼n%Û-Œ??çÑîi]×çðX3")ÛÑʸs£Ñå–_ÍÑ– Ñ‚pâ¼mûñ¹Û½¿§y›_€ðöÛo+ÁÁÏþsžþy4ù6Áív+ã ‚©¨¨àþûïàÉ'Ÿ$=]Nƒk³ÙøÓŸþÄš5kp:̘1ƒ|ädù3ÛÐÐÀ³Ï>ËþýûihhÀáp““ÃM7ÝÄ¥—^pœ?þ˜ÒÒRî¸ã,X0è×?XMÕX:ë½qÃÎ^„W^y…””<çw6là“O>é5@ÈÏÏç±Çà/ù ñññ¬]»–_|ßSéhm^]]O?ý4»ví¢®®Ž¨¨(&L˜À­·ÞÊìÙ³),,äü#ýë_‰‹‹ã›o¾áý÷ß§´´”¦¦&"""˜“»ÌDEEñôÓO+ÁÈÝ]{ŸÅb±°jÕ*î»ï>ÒÓÓ±ÛíÌž=›üü|e»~øwÞy‡üü|RSS))) è2´{÷nÞ}÷]^zé%~úÓŸú­+,,¤°°€‹.ºhH„Šæ‚Ûß|üýÕj5 ò^ss3‡#èvuuuÊyüñlj§¤¤DYöÖ[o!IûöíãÜsÏ¥µÕ?kÙ¦M›ˆgöìÙ466*ûýéO"..Ž·ß~›•+Wúí³aÃÞ~ûmŠ‹‹‰Š œ‡EN[€»¸ï™KÕQ)e¦®A’äøždž1ýÊDT=zŒ]Cò„ð^ÓæE22ýÊDLIú€æx³ktݘ:¬â³C˜½"™ö&'.›QMD‚žèT)ÃÆ/Œš<%#€!LÝëë>]…V¯¢îk³“Ð(-qÙ!Œ™í×mãœK¥¥Ö®ôMÓ™¨'uR8†0ÿ^H”¦Ï ¸ŽÍ¿/œZƒ<[ñÆ•UÙÓF{£Sžð(IÂo\@Â5,½+ƒïþïíMNÊwµ'A1öÜhR'†S¶£K­ƒN‹‹(-‘=&žŒNí¾~öÌÎ4{E’Rûßy4ÓPæ¬H"“zo‘3DhHÎ %ql( Ù!soLÁ\iC’䊑ìÙ&â²B”,s _Ç mÂ’XÚ›œý‹"ôÎÛ^xBûûŽÙÿÀLœ8ñ¤ ~òÉ'ÉÏÏ'66–÷Þ{¨¨(n¼ñF8ÀÿøG^zé%¿íßyçt:÷Þ{/‡âé§Ÿ®ºê*nºé&¦OŸ~ÂeŒš–Þ“+ôG][9^Ÿ•xoÐÐÐ@]];vìàƒ>`ܸq'<á¿ø­­­$''óç?ÿ™%K–0sæL*+[ƒ™2e /¾ø"ß~û-<ðf³™×^{»ï¾û„Ê% _§-@èOó~\fHÐZùcÅg‡äd{n߃¢&/ï{F×” á~Ýôajò.é÷ŠÔIá~ùÙG2I%‘=ÇDöœ¾1õgÀw—H-3Ž3C¯pz„ÅèXþ«,Úê˜+ít49ч© 1i‰ËA4àzi¿ÁÌI¹a\ûçqÔ[±Ô8PI%ZîÚ.{¶‰Ø ùƱgMtoÆŸKZ^DРß®áÒ£pM}Ü Ÿ)B£µÊ„ƒÁD¥˜‘ø 1~·²fšÈšÙ÷ñ¦]Ñ÷÷1sf$™3»[ Ç,èßÀÔã]«…þ“BŽ?wOŸûý÷on–ÇéEFžœyºÒeÎ;—––ZZZ˜3gà‡~Ø~êÔ©Œ3†}ûöñÐCQRR°Í˜1c¸âŠàÝsN—˜ÐëZc2Æ ¦M›æ÷X¥Rð l‡ÃÁ¶mÛ¸õÖ[¹ñÆy0rEFF2kÖ,fÍšÅóÏ?OMMMÐ÷G9†Õ eA†ˆ}Ÿã>LÉL½üN&Œ^û¨Ñ©‚ñžB£µj-Šˆ×õÚj¨ÒH$çù­)ö IDATŠAðÂÈ¥Š˜|bûGú•EUU•ÒÇÿDr×¥®îK]jjjzÝoôh9°w:8Î^F•´èñ'´zLnŸëõz=IIIÌ;—;ùóç÷ëy{›¾©© GnÉ3fÌÀ DNN555´·~ü‹0ü‰AAÎ@ªÈ<”ÜăÚªßã &°aà øúë¯YºtéqŸÃíî}>¢¸¸8š››™3gwÝu—ߺ¾º0©TÝu»‚„¶¶Þ'$;]’MÙhÕz\žàãŽ'=zB¯ëJJJÈÊ ž ¾§®óÞ³µ§¼¼œììÀ9i’’’Ðét8N>ÿüsnºé&T*U@Ûþ öþ#x—Aá $é¢Ð¤ß:¸}CÒФ^ã·ì׿þ5ƒœqìòË/çø_}õß~û-Ï<ó ¿úÕ¯y>„.«V­Âj L,pî¹çòÀ䦦&¦M›ÆìÙ³1Ì;w@åíJ»ùÕW_±}ûvÊÊÊ8räÈ€žãdQIj.šøÓãoD„1†ss®îu}_c zž÷wß}«ÕJvv6jµÜ]éïÿ;JW1¥¼*?þñ•ý233™4iåååƒz ÂÙA‚ ‚p†Ò{)$s€{Iè'? jÿZüÌÌLžyæt:v»‡~˜åË—sÁp÷Ýwóì³Ïb6›7nœ2‹ï<ÀçŸô(?ü0‰‰‰¸Ýnî¼óNÆŽKVVW\qEÐ1}¹á†¹†}Μ9deeñ / ðuŸ<Ë&ü;1Ÿiø¦ÙªÜ1cÆ(™¤|ðA>ýôSbbbX±b|ð&LàÞ{ï Ø÷¿ÿû¿¹á†$‰#GŽpðàA%K•h‚]ŒAáL¥6¢Ÿúw?ÜŒÏ|–c?’ í˜ûPE¯Á¿í¶Û8çœsxòÉ')(( ´´”¨¨(ÒÒÒ¸øâ‹Ñjµ„„„°zõjüq ˜2e jµš… &ýILLT2mÞ¼™ââb “&MRf파Tö3å ʲ®þ?ûÙϰÙl¼ùæ›9r„ØØXRSSÞNJRqë¼GøËÚ;0[{OÑEBbÙ„39eaÀº¤¤$åõöÕ‚`4Y½z5=ö˜rÞ^}õUÒÓÓùæ›o¨¨¨ 22’ÄÄDòòò”óÁ›o¾ÉK/½DEEaaaäææâv»•Ö“ɤ”£«%iìØ±,\¸¼¼<¥S¦LÁëõ’›Û÷X áÌ&}ýõj߬YýKæpwòè·×átÛNq±•¤æÞóÿI¤!xÖ’·v=ÊÞšu§µLg«K'ÜÎ9™WõkÛ¦&3V«É“Olð`—Ý»wc"2²÷Á=}vàol*ûð¤[èÛ¢ì,Ï ÞÍàˆ¥ç7ßyšKtvŠ2&pïùoö{ûmÛv°lÙ…§°DCÃç´à,¸OÍǽn£ ÍF7ùYTQ3NcÉF6»ËÊû»žfÓáÞ¯»Q!‰üÛ܇ÈMœ}Kæï믿fûöíLœ8§ÓÉ?þñeR¼;wdO„µ è5!,s _½†Óc?Ue:ëiTZÎɺª×à`Ñèë9Òrð„fsŽ/Õ4–Y£.êbôÛ9YWQÔ¸ƒÆŽþå¶'!<ù™?êu}ZT.SS–°·f-^ßà ÇgÔ†sḟ u1†I…>ï¼i·à1oÁÛºoû$C2ªÈ)¨"óÐ$^jÃPuD1hC¹iöƒÌ˾œ¢ºï©h. ²¹ˆpC4é1ãIÏ´´%´'>¯Ä‰øì³Ïxî¹ç–ßyç"8‚P B§ÇN]{Ù`'}‘ >, ƒæø>êÚÊIAè[˜ÞDtHÒñ7ìa¨[ºÔ·Wàpwž”2þŒÚ0âÂú—¾ÝÑŒ¥³þ—è줒T$Ed£V ¬§ìHmA„¾¬[·Ž?þ˜òòr\.\rÉ%,_¾|¨‹& Sƒƒ SH3‰¾gCMB")âøéЄ³OBxúPAÂõÑ„ëÅÄ`‚ ­E‹±hÑ¢¡.†pC×AAAPˆAAA…AAAPˆAAA…AAAP 8‹QcG%Û*þÅ‘–‚SQHŽÈfvú¥$GduQ„3Œ×çáû#_²¿n£HszŠ4aLJZÀÌQ"I¢ŽEAy ¼µëQjÚJNEY„£ŽX )¬ÿžßž÷çøÎn»«×ðQþ3C]ŒïPãô#S’uQAá¤PõW‹­A§I«½š¶ÃC] á s nËPá¬QP/ε ‚02 (@ðá=Uå‚ðùÄTÕÂÀø|â;zºˆ¯§ ‚0R‰´‚ ‚ ‚ (D€ ‚ ‚ ‚B‚ ‚0Âøsg.g¨‹rÖq9x½â¼ g6‘"GAF§Çͺ’BJÌ Tµ6cw»PK*Â#eŠaaV. aC]ÌÇçóÑÜXCgG v›Ç €Þ‚ÁJTL†°!.¥ ŒAá Wniâ½}Û1wv(Ë´j5Öv­íä×V²$g" 2Ç¢’¤“z|·Ó‰ÇíFrRŸw¸³Û¬ÔVã°Î;ã°wâ°wÒÖÒDt\ ± £Nòyï‹ÛíÆjµyÚŽy¦©­­E¯×=ÔEvF|£­oVóù%ÚÔ<ÔEáGö´ñí_Ëùê©Ò¡.Š œ±öÔT°rÇÚvtjM¯’$±æðÞÞ³uÐÇÚðÞ›üöüY¬~õïʲß_²ˆÿ˜Ëîo¿</çŒam·Pqx_Ðà 'ŸÏ‡¹¡ŠÊÒ}¦?³ÛíX­Ö“R¶K.¹„ÐÐP{챓ò|#Í3Ïx°ˆöFÿÒèƒ_€`oïþ¾šÁ¨)x=>¾ùK9å;[ýöMÈ eùe¡“ûWþD .»íåŽ÷jñz|x=Ý79{þÕÀ•ŒAgTŸà‰„á¡Ía£ªÕ¢Œ5¯ÏÇ¡Æ:fŽÊÐ~£ÆŽçê_?ˆÎ` Xçè´R¸u#*µšô “1„úWšy½Ê˰Ô×â´Û‰•Nòè1¸N*‹ h®­Æç󓔨܉h´Z¿ýÛÌM4UVÐjn$,2ŠQ¹qÚ:éloC­Ö—–®lÛÒP‡ÝjEg0”2 ×x<^¯—Ö–¦ão„Ïç¥ÕÒH\bš²ì©§žÂb±°dÉ$Iâºë®ã“O>áí·ßö œN'EEETVVâñx7nœrëóù(..æàÁƒ¤§§7‹ÅB~~>MMM„‡‡3qâD’’’ü¶q:ìÙ³‡ŠŠ <iiiLŸ>½^ï·]cc#‡¦®®“ÉÄôéÓýÖ———c0È•¯cÇŽ¥¢¢»Ý®´˜t)))ÁårM|||ÐrÿðÔ””`0ÈÉÉaüøñ~ëÛÛÛÙ½{7‹…)S¦‘‘á·¾¢Bn=7nœòÿ¸¸8¿mŽ9ÂÁƒÈÉÉ¡ººšÎÎN¿ò666b6›IOOÇh4âõz9t艉‰˜L& )**Âjµ’””ÄäÉ“‰õ;–ÕjåСCTUU¡Ñh˜4i’ß9)(( °°ÔÔTòòòÎý©2d‚Ïkÿ~„Ö:j­Ħq9¼hôý¯åË_Ý@D‚ži—'àõøP©% #6]¾ …Çêð¸¼|÷B¶V7º5ãÅP¹§ K­oÕÈÛgø_g_—LmQGö´Éµ¤éFÒ§FR¼¹™öF'»?­gÒ²¸•_†«¢ÍJp“fdô\öiyýÏ‚R¶³•öF'*µÄÄ¥±ÄÅ\a#~t¨ßv:£š±çʃĢRä‘}_4(ÁAZ^†p Å›-Ô[ÙöN ê_•>5‚¨kÍ8;=¨µS.Œ§­ÁAÙŽVZëo²0áÿ‹³ œ©;Útiê1 ¹¿>þËŸÙúÉûä̘Í=¯¼ã·î?ýAù¿F§ãºß>Ä9W_ÀÁí›yñ¿~­½MÙF¥VóÌÖý¬{û|ø¿ò{®ÄÌl~ñ×Wˆ%ßìþÏ-×P²ç¿m®ú¯ûÑCxûÑß!IO®ÝAxt Ïüôê+Ê8÷Ú›¸þGü:ûâ°[Oh*uû1ç}þüù~ÇÇ'Ÿ|Bhh÷uòÍ7ßä¶Ûnó£@]]f³™+Vðí·ßö» ï¾û.+V¬h)ºîºëxî¹ç”›Ø—_~™;î¸Ão›ììl>ûì3rssXºt)ß|óß6?ü0K—.UÿèG?Rþo³Ùxøá‡Y¹r%™™™”–ÊcК››3f ^¯—•+WòãÿØï9m6]tëÖ­ó[¾yófæÍ›À;ï¼Ãí·ßNKK‹²þ–[ná¹çž#,LXÝn¹Rø7Þà7ÞPÊ{ÕUW)ûÜtÓMÊÿxôÑGy饗ÈÉÉQ‚€Ûo¿>ø€×^{[n¹…}ûö1uêT6nÜH\\\@ðÎ+¯¼Â5×\À#<£>ê×láÂ…¬[·Žææfn½õV>ýôS¿sÿæ›o2{ölNµ! êŠ:h,“ö,úY£çE êyTG»èCÔD§ˆË žA!sF¤Ò*P¹¯¶ù Yú¤L göµI¬üy>n§—C›š‰Íð¯uÈ»4ž¼KâYùó|œ6Éã˜yu"1é¾y¶—ÝK[£“èT‚p¦ËÿJ¾F‡rÅCƒkjUõˆ•#ô¤O {¶)`;}˜šy7ûß®— dL‹dÙ=™ÂÔìû²‘âÍÍ,¸5Õ¯2 uR8—Æ!©$vZÏ ³¯KBRIüóÎX-.Ì•¶A½AŽš¬íèص¨'Ë „¾Ã#HËHéÞ¸Þzô2&M!uìx:,ÍJp3}jN‡Vo ul.£§Í$6e–ú:ïúžº²>ûÛÿòïz€†#eD'%“”=†ÖƲ&O#>=“Uú^‡›Ö1粫0×TQ_!o?í‚å'õ5Bà þ€÷·îðàA^{í5*++yï½÷P©Tüæ7¿QÖ×ÔÔ`µZÑh4œ{î¹x½^¥füW¿ú•Œ;–°°0vîÜÙwìv%8X¶l¤¢¢‚U«Va·Ûùøã˜4ióæÍ#;;›ºº:Ö¯_OII ÷ÝwŸ²MWýÔÔT&MšD}}=sçÎõ;Þ˜1c0åJW•JÅŠ+X¹r%eee’››Ë·ß~‹×ëE«Õrùå—”ùÕW_U‚ƒsÏ=—qãÆ‘ŸŸ¯´Vìܹ“o¼¯×Kxx8éééìß¿Ÿ×_ÐÐPžþy¿ç‹‹‹#99ƒ­žF­h†‹/¾˜—^z‰ââbšššˆ‰‰aýúõ€ ÜrË-lÚ´ €¨¨(æÎ‹Ïç#//qãÆáv»Y³f ‹…Ûo¿Ë.» ½^OEEN§“°°0æÍ›‡Õjå¼óÎà¿øŸ~ú)&“‰‹/¾˜;vpèÐ!V¬XAaa¡Ò"sª Y€`©éîV4jòàó2O¹8ž5« ±¬“ˆøìι%µ×@A9~µ<(RRI$Œ‘?*DÂèª :”õ$¹†³þ°§MîbÚÝÒàèpßOÎ$>h©•¿]݇#s¦‰]ŸÔÓZç`ãkU|ÿ^-S.Ž'ïÒúÊöçuûh«—¯IãºkÑ’Æ…±ïËF<.m NLIM­]cˆ¼n§­A"*Õ€ÕâÂÑ.’#‡ÇçE«|  T‚uͯdîåWÓÑbáwžƒÃÖɦÞaÅýþýèÿã…7ÐètÊãܹ È»@yüésOñåKÏQž¿'à ¯û7–Þz›ß²±³æQ¸u#»×|Ŝˮ¢`Ë¢¢É™qòkZOt4¯×‹ÏçóKyzøðaž|òIåqFFF@Ë€ÉdbÍš5Êãööv¥üÎ;ïäÙgŸE’$&L˜@AAA¿Êóå—_âõzùíoËSO=Å'Ÿ|˜0a ,`óæÍʶ?þ8<ðßÿ}Àóüä'?ᡇRoß¾]ùÿªU«ÈËËSŸwÞyÄÆÆÒÔÔć~È<ÀêÕ«•uQQ•Ær¥ULL ¯¿þz@ס'žx¯×KJJ {÷î%::š»ï¾›gŸ}–^xGyÄ/éÍ7ßÌSO=¥<îy¾^}õU,èþL.Y²ƒÁ€ÝngË–-dffÒÔ$·°w6ÈŸ»åË—£>Ú²·{÷n¿ó1gÎÌf3¥¥¥J @^^žòúJKKYµJNðÁpÞyçÑÚÚJjj*ååå¬]»–‹.º(àLCÖFÛ£NgkàHý®Ŷ¶Þoº³ç˜øÑsÈžmB¥–h(éäÓÇ+Ï)=ŒÇí%Ô‡ÊožÏëÃÞÞýüGÇ#t­F¥õ¿³Qkz<>É\Ahtò—§³5ø÷O­•×÷üþKkPqÕ£c™um¡QZVß¿[ËžÏêGþîô+r°®5¨ŽßÙÒ}­0ôòUk/k]ןø‚ #H|XZµzÐ '7?~× o˜)ŠÌÉrW‹úòã§0®>tî¾{ÏŸÍΙÀúUò ¯µµå8{Êf^x)…[7â°u²ÃZòÎ[Šê¨ÞŒ¡ÇߨzcHÀ|yyy¼úê«<òÈ#äääP^^ÎÌ™3›‰¨¤¤ÏÑÙ²/¾øâAϳ V«¹õÖ[•Ç{÷îäÆ×\s )))„……)7Ôf³yPÇé¢Ñh¸úê«øè£ðù||ñÅÊòc]~ùå¨ÕjÌf3£GæÊ+¯ô»ï*óùçŸOLL ’$)ÏåõzÉÏÏtyCCCY¶lß}÷k×ÊŸ±„„ŠŠŠ¨©©aãÆJW¥öövîºë.²³³ çÒK/Užïxçoß¾}Êÿï¾ûnòòòX¸p¡’Ùª«[Ö©4dBLZw­ûþ¯›°­yw;åÁ†!&­Ò} *¿ ¯;ø»½ÍM\fKîÌøÿíÝyPÔgšðoßtsô4 r Aˆ,FÍ¥²¸jœ¨‰ºšÊZ©¤Ö-™2™ŒëNvk¶¦²:&¦ö5›¬Æ“5QÊ”kÖ(x‹ cXu7™%vióûi1:ÑÙʬô ªDæþMêlfî\èdï-–½ !A¥”ÊF ¨CF¾:ˆÃö(/ÀóªµÇŒ?¾± WËA(•à AŸ0¬{e¿¸"©N»—Ž”áV%3ã=gIÉF˜X2ºŽ…’Î×jµØ°a¶mÛÆ¦ µ··³³ÓOÒ—O09ú£Ñ¿P×ápÀjµ¢¨¨ß}÷D"–-[†éÓ§èÚuÅZ·n&5hÿþýÐëõì}’““ƒòòräççÃívãСCÈÍÍeS}àã?†^¯‡T*ÅâÅ‹0+»wïFGGŠ‹‹}V$éî~\«cµZa·Ûa·Û‘˜˜ˆôôtŸú”ñ´OØÈ8¦ÓÐÝË]¸y¢µ' ©„0w8°jGdJ¦â &Ôž2¢©º\¾ÿ;Í¡ßÕÁëBUBXûÍ.†©™Ôí nUpïJ7öÿC x<f¯ˆFú/HÍ@ý9~þ¡ ·Ê °[˜(<$B€ŒTÈHžm³—Gã‡ßßAÁ‰ƒÛëªÀåð"í…佋”¼pÜ8ÖG¯ßl¹¹Zè×­èö)#.hED¬<íÌX˜š .b3? üÏ»µ‹B æaù¿¦aίb û ]z;þôë[à 8l·¢9¯D?¥Ÿ!W˜X‚p±–´Ûäˆ ŸÝc -Ðt™ÍÕ¤ þ@yÿÖ ôv3ÍÞù½PÇ'àòÑÃøü½ÍC¾Ÿ$T†üe¯¢ü«ÿÆÁ?þœv¢’6'/ðÉ#Àã –†Ž¸!Dî›>ãt:}dïܹþôП––‡¯×‹ ¤¤\îÈæ~û§¦¦âÚµklZÏÁƒ‘ÇX0þéß_ºt)D"nܸúúz,X°Ë—/ÇæÍ›±{÷nÌuÒG»‰÷zÛ·oGii), <8èú¤¥¥±¯KKK±iÓ¦AŽA‚[ðÖ4\ŒlEm….‡]z;¸<z NÈ”B䯅ÃêÆý«Ýì 8@x´ºÚÑc`𾔿¨Ôü¿m…ÃêfÛ‹By«˜ÀfvÁíð<^à0ÅÄs~Å´Ò‹Œ#m,.~Û —ÃÎVf?¯—)Ž^Rš„ӟ݇Åä„Çí…PÊüU¤ŒÏƒ !“ ‡ÃÁœ¸$œ¿wgØ ‘J(¤Ož ߶䟯Å!¡øà‡ŠA¯yîз¸yþ4®? Ç ¾Pˆ¢W×zN˜êqqèÿy¸Wsm3¶àõ8ùͰ=êòè¾££MASýÕam”òp%BeǃÄÄDäää`Ú´ihnnFEósØ­F.—cãÆøì³Ïðõ×_£ªª .„N§ò˜æÏŸüøã˜.Jyyy>רºu+Š‹‹}R_‰Gdd$ŒF#Þ|óM=zF£Ÿ~ú)[\ZZŠW^yf³ü:&õ·k×.œ8q3gΗËeƒ'™Œ™hÚ²e ÊÊÊp÷î]$''###ƒ­×X³f_+Ó¿¤V«¡ÑhÐÒÒ‚Í›7ãÔ©SèééÁÎ;‘””¹\Ž%K– ¬¬ v»%%%Ðjµ˜5k[¾jÕ*özõõõسgÏJÏPÌ›7¹¹¹¨¬¬ÄÛo¿Ï?ÿÙÙÙhkk‡Ãr 1A øB.òׯâ¯^E÷C;_€¨Xß÷¢úúzèt:¿ú˜˜ìٳǯgþ@vî܉ÎÎN|ÿý÷¨««cÛpU_ŠÀ¤:íß¿\.qqqx÷Ýwñá‡â§Ÿ~òkeˆH$ÂŽ;°iÓ&˜ÍfìÛ·S¸Û—†³lÙ2$%%¡±±Ï?ÿLX,6dee>x®\¹…"aaCëôµïòvÜ|x~LîM÷ Í|¬yþ·Á¦ÊÊËxé¥ÅÁƘòh4éK}¾øâ ¬_¿³ñXMM ´Z-æÎËîЧ¡¡—.]‚ÉdBxx8T*²²²ØU€‹/Âjµ"!!Á¯³PßùUUUèêêBrr2 Ù1555!++‹ úêb4qþüyy¼^lV ìÖÞG…Ì!ü÷j!ÀåË—Q[[ €iñzèÐ! &˜©ùŽA!„<£¸d"1d¢ñÝiu"Xõ›°píð¸\PÆM€tš§…ÃáB"•A"¿ö±c¥¸¸.\€§¾z0›‹544@"‘ 111(c ƒ£B!“‡Ëö¾ P*•C*€/b±™™™A»? lXë9œ@;žB‚Šfaž&Ú•™BÈÔ4¬!L¢FThÂ8 …ô')  K | !ý¤«ï™MÆNºúÉíø!„ÉlX)FpðÚìm8· ͦ[ðÒ Ú˜ã€ƒy òþ|nðr)Éä4K»·×[ÏÂî²{8S’„Š1…ÈŽ]ì¡B!ãbØ5Q¡ X6óÇc,„Qâqùx!q^HßM‚!„2uQO)B!„B‹B!„B‹B!„B‹B!„B‹B!„B‹ëõR«RBÆŠËå‚@0víiù|>\.ט]gm&H!q½^€‚BÆFWW7 Ř]/22ÝÝÝcv=Bže</ØÃ „ ½þa°ÇAȤçñx`2uŽi€ R© ×·Áã¡ žÑÒéZ¡Ñh‚= B™ð¸YYY¸{·f³9Øc!dÒòz½¸~ý&23gŒé ¥P(DZÚtÔÔܤ•>BF¡³³ :]+ÒÓ3‚=B™ð8^¯×k±XPYY ™,J¥2Y(åi2‡&Stº¤¤¤"))i\îSWW‡¦¦»Ðj5‡P8vu„LU^¯ÝÝftt`µÚ1oÞ nordugrid-arc-6.14.0/src/doxygen/PaxHeaders.30264/Makefile.in0000644000000000000000000000013014152153422021643 xustar000000000000000030 mtime=1638455058.084941665 30 atime=1638455092.119453049 28 ctime=1638455101.3205913 nordugrid-arc-6.14.0/src/doxygen/Makefile.in0000644000175000002070000005332514152153422021642 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/doxygen DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ SWIG_DEPENDS = \ $(top_srcdir)/swig/common.i \ $(top_srcdir)/swig/credential.i \ $(top_srcdir)/swig/data.i \ $(top_srcdir)/swig/compute.i INPUT = $(top_srcdir)/src/hed/libs/common \ $(top_srcdir)/src/hed/libs/credential \ $(top_srcdir)/src/hed/libs/data \ $(top_srcdir)/src/libs/data-staging \ $(top_srcdir)/src/hed/libs/compute CPP_EXAMPLES = $(top_srcdir)/src/hed/libs/common/examples \ $(top_srcdir)/src/hed/libs/credential/examples \ $(top_srcdir)/src/hed/libs/data/examples \ $(top_srcdir)/src/libs/data-staging/examples \ $(top_srcdir)/src/hed/libs/compute/examples PYTHON_EXAMPLES = $(top_srcdir)/python/examples SPECIALISATION_MAPPINGS = JobState JobDescription SPECIALISATION_MAPPINGS_JobState = \ $(top_srcdir)/src/hed/acc/GRIDFTPJOB/JobStateGRIDFTPJOB.cpp \ $(top_srcdir)/src/hed/acc/EMIES/JobStateEMIES.cpp SPECIALISATION_MAPPINGS_JobDescription = \ $(top_srcdir)/src/hed/acc/JobDescriptionParser/XRSLParser.cpp \ $(top_srcdir)/src/hed/acc/JobDescriptionParser/ADLParser.cpp SDKDEPENDENCIES = Doxyfile.SDK.build Doxyfile.SDK.build.layout.xml \ $(srcdir)/add-bindings-deviations-to-dox.py \ $(srcdir)/images/arcsdk.png \ $(srcdir)/adapt-and-filter-mapping-attributes.sed \ $(SWIG_DEPENDS) \ $(wildcard $(addsuffix /*.h, $(INPUT))) \ $(wildcard $(addsuffix /*.cpp, $(CPP_EXAMPLES))) \ $(wildcard $(addsuffix /*.py, $(PYTHON_EXAMPLES))) \ $(addsuffix _Mapping.dox, $(SPECIALISATION_MAPPINGS)) EXTRA_DIST = Doxyfile.SDK Doxyfile.SDK.layout.xml images/arcsdk.png \ add-bindings-deviations-to-dox.py \ adapt-and-filter-mapping-attributes.sed \ create-mapping-documentation.py CLEANFILES = SDK all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/doxygen/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/doxygen/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: -test -z "$(CLEANFILES)" || rm -f $(CLEANFILES) distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: install-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ cscopelist-am ctags-am distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags-am uninstall uninstall-am .SECONDEXPANSION: $(addsuffix _Mapping.dox, $(SPECIALISATION_MAPPINGS)): %_Mapping.dox: $(srcdir)/create-mapping-documentation.py $(top_srcdir)/src/hed/libs/compute/%.h $$(SPECIALISATION_MAPPINGS_%) $(PYTHON) $^ $*_Mapping.dox Doxyfile.SDK.build: $(top_srcdir)/src/doxygen/Doxyfile.SDK cp $(srcdir)/Doxyfile.SDK Doxyfile.SDK.build sed "s/@TOP_SRCDIR@/$(subst /,\/,$(top_srcdir))/g" Doxyfile.SDK.build > Doxyfile.SDK.build.tmp mv Doxyfile.SDK.build.tmp Doxyfile.SDK.build sed "s/@INPUT@/$(subst /,\/,$(INPUT) $(addsuffix _Mapping.dox, $(SPECIALISATION_MAPPINGS)))/g" Doxyfile.SDK.build > Doxyfile.SDK.build.tmp mv Doxyfile.SDK.build.tmp Doxyfile.SDK.build sed "s/@EXAMPLES@/$(subst /,\/,$(CPP_EXAMPLES) $(PYTHON_EXAMPLES))/g" Doxyfile.SDK.build > Doxyfile.SDK.build.tmp mv Doxyfile.SDK.build.tmp Doxyfile.SDK.build sed "s/Doxyfile.SDK.layout.xml/Doxyfile.SDK.build.layout.xml/g" Doxyfile.SDK.build > Doxyfile.SDK.build.tmp mv Doxyfile.SDK.build.tmp Doxyfile.SDK.build for mapping in $(SPECIALISATION_MAPPINGS); do \ sed "s/^FILTER_PATTERNS[[:space:]]*=/& *\/$${mapping}.h=$(subst /,\/,$(srcdir))\/adapt-and-filter-mapping-attributes.sed/g" Doxyfile.SDK.build > Doxyfile.SDK.build.tmp;\ mv Doxyfile.SDK.build.tmp Doxyfile.SDK.build;\ done Doxyfile.SDK.build.layout.xml: $(top_srcdir)/src/doxygen/Doxyfile.SDK.layout.xml cp $(srcdir)/Doxyfile.SDK.layout.xml Doxyfile.SDK.build.layout.xml SDK: $(SDKDEPENDENCIES) doxygen -v | awk -F . '{ exit !($$1 >= 2 || $$1 == 1 && $$2 >= 8) }' || (echo "doxygen version 1.8.0 or greater required (version $$(doxygen -v) found)" && exit 1) doxygen Doxyfile.SDK.build # Postprocessing: Add deviations from SDK API for language bindings (Python). for file in $(SWIG_DEPENDS); do $(PYTHON) $(srcdir)/add-bindings-deviations-to-dox.py $${file} SDK/html; done # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/doxygen/PaxHeaders.30264/adapt-and-filter-mapping-attributes.sed0000644000000000000000000000013114152153376027235 xustar000000000000000030 mtime=1638455038.342645028 30 atime=1638455038.475647026 29 ctime=1638455101.32659139 nordugrid-arc-6.14.0/src/doxygen/adapt-and-filter-mapping-attributes.sed0000755000175000002070000000225314152153376027230 0ustar00mockbuildmock00000000000000#!/bin/sed -f # Copy mapdef ID to buffer /\\mapdef / { # Copy current line to buffer h # Remove every thing but mapdef ID. s/.*\\mapdef \([^[:space:]]\+\)[[:space:]]\+.*/\1/ # Swap buffer with pattern space. x } # Remove \mapdef attribute plus associated description. End at first empty line, # line with asterisks (*) or line with asterisks followed by slash (/) modulo # spaces. /\\mapdef /,/^[[:space:]]*\**\/\?[[:space:]]*$/ { /^[[:space:]]*\**\/\?[[:space:]]*$/ ! d } # Replace mapdefattr command with link to attribute mapping. /\\mapdefattr/ { # Append buffer (prefixed with new line) to pattern space. This should be the # mapdef ID copied above. Thus the assumption is that the mapdef command must # come before the mapdefattr command. G # Replace \mapdefattr line with a link pointing to mapping of specific # attribute. # mapdefattr name mapdef ID s/\\mapdefattr[[:space:]]\+\([^[:space:]]\+\)[[:space:]]\+[^[:space:]]\+\n\(.*\)$/\2.html#attr_\1/ s/[^[:space:]]\+$/Attribute mapping specific to this field\/value.<\/a>/ # :: should be transformed to _ in URLs. s/::/_/g } nordugrid-arc-6.14.0/src/doxygen/PaxHeaders.30264/add-bindings-deviations-to-dox.py0000644000000000000000000000013214152153376026060 xustar000000000000000030 mtime=1638455038.342645028 30 atime=1638455038.475647026 30 ctime=1638455101.325591375 nordugrid-arc-6.14.0/src/doxygen/add-bindings-deviations-to-dox.py0000644000175000002070000001654014152153376026053 0ustar00mockbuildmock00000000000000#!/usr/bin/env python # ''' Script for parsing Swig interface files (.i) and extracting renames (%rename) and ignores (%s), and adding that information to the doxygen generated HTML. Usage: add-bindings-deviations-to-dox.py E.g.: add-bindings-deviations-to-dox.py swig-interface.i dox/html Limitations: * Unable to handle #else or #elif statements. * Unable to handle templates. ''' from __future__ import print_function import sys, re from os.path import isfile # Location of swig file filename = sys.argv[1] # Location of generated doxygen HTML documentation sdkDocumentationLocation = sys.argv[2] # Use list to deal with scoping of #if and #ifdef statements. inIfdef = [] # Use dictionary below to group %rename and %ignore statements per HTML file. expressionsFound = {} f = open(filename, "r") for line in f: line = line.strip() regMatch = re.match('\A#if(n?def)?\s+(\w+)', line) if regMatch: inIfdef.append(regMatch.group(2)) #print " #ifdef %s" % inIfdef continue regMatch = re.search('\A#endif', line) if regMatch: #print " #endif // %s" % inIfdef inIfdef.pop() continue regMatch = re.match('%ignore\s+([^;]+)', line) if regMatch: ignoredName = regMatch.group(1) #print "Expression ignored: %s" % ignoredName regMatch = re.match('\A(Arc|ArcCredential|AuthN|DataStaging)::([^:<]+)(<([^:>]+(::[^:>]+)+)>)?::(.*)', ignoredName) if regMatch: namespaceName, className, _, templateParameters, _, methodName = regMatch.groups() if templateParameters: #print "Found template: %s::%s<%s>::%s" % (namespaceName, className, templateParameters, methodName) print("Error: Unable to handle template signatures %s" % ignoredName) continue #print " Ignoring method '%s' in class '%s' in Arc namespace." % (methodName, className) sdkFNOfIgnoredInstance = sdkDocumentationLocation + '/class' + namespaceName + '_1_1' + className + '.html' if sdkFNOfIgnoredInstance not in expressionsFound: expressionsFound[sdkFNOfIgnoredInstance] = [] ignoreScope = ["Python"] if "SWIGPYTHON" in inIfdef else ["Python"] expressionsFound[sdkFNOfIgnoredInstance].append({"text" : "Method is unavailable", "scope" : ignoreScope, "name" : methodName}) continue print("Error: Couldn't parse ignore signature %s" % ignoredName) continue regMatch = re.match('%rename\(([^)]+)\)\s+([^;]+)', line) if regMatch: #print "Expression '%s' renamed to '%s'" % (regMatch.group(2), regMatch.group(1)) toName, renameFullName = regMatch.groups() regMatch = re.match('\A(Arc|ArcCredential|AuthN|DataStaging)::([^:<]+)(<([^:>]+(::[^:>]+)+)>)?::(.*)', renameFullName) if regMatch: namespaceName, className, _, templateParameters, _, methodName = regMatch.groups() if templateParameters: #print "Found template: %s::%s<%s>::%s" % (namespaceName, className, templateParameters, methodName) print("Error: Unable to handle template signatures %s" % renameFullName) continue #print " Ignoring method '%s' in class '%s' in Arc namespace." % (methodName, className) sdkFNOfRenamedInstance = sdkDocumentationLocation + '/class' + namespaceName + '_1_1' + className + '.html' if sdkFNOfRenamedInstance not in expressionsFound: expressionsFound[sdkFNOfRenamedInstance] = [] renameScope = ["Python"] if "SWIGPYTHON" in inIfdef else ["Python"] expressionsFound[sdkFNOfRenamedInstance].append({"text" : "Renamed to " + toName + "", "scope" : renameScope, "name" : methodName}) continue print("Error: Couldn't parse rename signature %s" % renameFullName) continue f.close() #print expressionsFound for filename, v in expressionsFound.items(): if not isfile(filename): print("Error: No such file %s" % filename) continue doxHTMLFile = open(filename, "r") doxHTMLFileLines = doxHTMLFile.readlines() doxHTMLFile.close() doxHTMLFile = open(filename, "w") i = 0 while i < len(doxHTMLFileLines): doxHTMLFile.write(doxHTMLFileLines[i]) regMatch = re.match('\s+(.+)', doxHTMLFileLines[i]) if not regMatch: i += 1 continue doxMethodName = regMatch.group(1).strip() #print doxMethodName for entry in v: regMatch = re.match("(operator\(\)|[^(]+)" "(\(([^(]*)\))?" "\s*(const)?", entry["name"]) if regMatch: methodName, _, methodParameters, isConst = regMatch.groups() #print "Method name: '%s'; Parameters: '%s'; isConst: %s" % (methodName, methodParameters, str(bool(isConst))) #print "'%s\Z', %s" % (methodName.strip(), doxMethodName) doxMethodName = doxMethodName.replace(">", ">") if doxMethodName.endswith(methodName.strip()): #print "Method '%s' found in file '%s' as '%s'" % (methodName, filename, doxMethodName) isInsideMemdocDiv = False methodParameters = methodParameters.split(",") if methodParameters else [] while True: i += 1 regMatch = re.match('\s+(.+)', doxHTMLFileLines[i]) if regMatch: doxParam = regMatch.group(1).replace(" ", "").replace(" &", "\s*&").strip() doxParam = re.sub(']*>', '', doxParam) # Remove anchor tags if len(methodParameters) == 0: if doxParam != "void": # Doesn't match that in HTML document doxHTMLFile.write(doxHTMLFileLines[i]) break elif re.match(doxParam, methodParameters[0]): methodParameters.pop(0) elif isInsideMemdocDiv and re.match('', doxHTMLFileLines[i]): if len(methodParameters) > 0: # Doesn't match that in HTML document doxHTMLFile.write(doxHTMLFileLines[i]) break for scope in entry["scope"]: doxHTMLFile.write('
' + scope + ' interface deviation
' + entry["text"] + ' in ' + scope + ' interface
') v.remove(entry) doxHTMLFile.write(doxHTMLFileLines[i]) break elif re.search('
', doxHTMLFileLines[i]): isInsideMemdocDiv = True doxHTMLFile.write(doxHTMLFileLines[i]) break else: print("Error: Unable to parse method signature %s" % entry["name"]) i += 1 doxHTMLFile.close() if v: print("Error: The following methods was not found in the HTML file '%s':" % filename) for entry in v: print(" %s" % entry["name"]) print("??? => Is there a API description in the corresponding header file for these?") nordugrid-arc-6.14.0/src/doxygen/PaxHeaders.30264/Doxyfile.SDK0000644000000000000000000000013114152153376021735 xustar000000000000000030 mtime=1638455038.342645028 30 atime=1638455038.474647011 29 ctime=1638455101.32259133 nordugrid-arc-6.14.0/src/doxygen/Doxyfile.SDK0000644000175000002070000023414014152153376021727 0ustar00mockbuildmock00000000000000# Doxyfile 1.8.3.1 # This file describes the settings to be used by the documentation system # doxygen (www.doxygen.org) for a project. # # All text after a hash (#) is considered a comment and will be ignored. # The format is: # TAG = value [value, ...] # For lists items can also be appended using: # TAG += value [value, ...] # Values that contain spaces should be placed between quotes (" "). #--------------------------------------------------------------------------- # Project related configuration options #--------------------------------------------------------------------------- # This tag specifies the encoding used for all characters in the config file # that follow. The default is UTF-8 which is also the encoding used for all # text before the first occurrence of this tag. Doxygen uses libiconv (or the # iconv built into libc) for the transcoding. See # http://www.gnu.org/software/libiconv for the list of possible encodings. DOXYFILE_ENCODING = UTF-8 # The PROJECT_NAME tag is a single word (or sequence of words) that should # identify the project. Note that if you do not use Doxywizard you need # to put quotes around the project name if it contains spaces. PROJECT_NAME = "ARC SDK" # The PROJECT_NUMBER tag can be used to enter a project or revision number. # This could be handy for archiving the generated documentation or # if some version control system is used. PROJECT_NUMBER = # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer # a quick idea about the purpose of the project. Keep the description short. PROJECT_BRIEF = # With the PROJECT_LOGO tag one can specify an logo or icon that is # included in the documentation. The maximum height of the logo should not # exceed 55 pixels and the maximum width should not exceed 200 pixels. # Doxygen will copy the logo to the output directory. PROJECT_LOGO = # The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) # base path where the generated documentation will be put. # If a relative path is entered, it will be relative to the location # where doxygen was started. If left blank the current directory will be used. OUTPUT_DIRECTORY = SDK # If the CREATE_SUBDIRS tag is set to YES, then doxygen will create # 4096 sub-directories (in 2 levels) under the output directory of each output # format and will distribute the generated files over these directories. # Enabling this option can be useful when feeding doxygen a huge amount of # source files, where putting all generated files in the same directory would # otherwise cause performance problems for the file system. CREATE_SUBDIRS = NO # The OUTPUT_LANGUAGE tag is used to specify the language in which all # documentation generated by doxygen is written. Doxygen will use this # information to generate all constant output in the proper language. # The default language is English, other supported languages are: # Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Tradditional, # Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, # Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English # messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, # Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrillic, Slovak, # Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. OUTPUT_LANGUAGE = English # If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will # include brief member descriptions after the members that are listed in # the file and class documentation (similar to JavaDoc). # Set to NO to disable this. BRIEF_MEMBER_DESC = YES # If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend # the brief description of a member or function before the detailed description. # Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the # brief descriptions will be completely suppressed. REPEAT_BRIEF = YES # This tag implements a quasi-intelligent brief description abbreviator # that is used to form the text in various listings. Each string # in this list, if found as the leading text of the brief description, will be # stripped from the text and the result after processing the whole list, is # used as the annotated text. Otherwise, the brief description is used as-is. # If left blank, the following values are used ("$name" is automatically # replaced with the name of the entity): "The $name class" "The $name widget" # "The $name file" "is" "provides" "specifies" "contains" # "represents" "a" "an" "the" ABBREVIATE_BRIEF = # If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then # Doxygen will generate a detailed section even if there is only a brief # description. ALWAYS_DETAILED_SEC = YES # If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all # inherited members of a class in the documentation of that class as if those # members were ordinary class members. Constructors, destructors and assignment # operators of the base classes will not be shown. INLINE_INHERITED_MEMB = NO # If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full # path before files name in the file list and in the header files. If set # to NO the shortest path that makes the file name unique will be used. FULL_PATH_NAMES = NO # If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag # can be used to strip a user-defined part of the path. Stripping is # only done if one of the specified strings matches the left-hand part of # the path. The tag can be used to show relative paths in the file list. # If left blank the directory from which doxygen is run is used as the # path to strip. Note that you specify absolute paths here, but also # relative paths, which will be relative from the directory where doxygen is # started. STRIP_FROM_PATH = # The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of # the path mentioned in the documentation of a class, which tells # the reader which header file to include in order to use a class. # If left blank only the name of the header file containing the class # definition is used. Otherwise one should specify the include paths that # are normally passed to the compiler using the -I flag. STRIP_FROM_INC_PATH = # If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter # (but less readable) file names. This can be useful if your file system # doesn't support long names like on DOS, Mac, or CD-ROM. SHORT_NAMES = NO # If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen # will interpret the first line (until the first dot) of a JavaDoc-style # comment as the brief description. If set to NO, the JavaDoc # comments will behave just like regular Qt-style comments # (thus requiring an explicit @brief command for a brief description.) JAVADOC_AUTOBRIEF = NO # If the QT_AUTOBRIEF tag is set to YES then Doxygen will # interpret the first line (until the first dot) of a Qt-style # comment as the brief description. If set to NO, the comments # will behave just like regular Qt-style comments (thus requiring # an explicit \brief command for a brief description.) QT_AUTOBRIEF = NO # The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen # treat a multi-line C++ special comment block (i.e. a block of //! or /// # comments) as a brief description. This used to be the default behaviour. # The new default is to treat a multi-line C++ comment block as a detailed # description. Set this tag to YES if you prefer the old behaviour instead. MULTILINE_CPP_IS_BRIEF = NO # If the INHERIT_DOCS tag is set to YES (the default) then an undocumented # member inherits the documentation from any documented member that it # re-implements. INHERIT_DOCS = YES # If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce # a new page for each member. If set to NO, the documentation of a member will # be part of the file/class/namespace that contains it. SEPARATE_MEMBER_PAGES = NO # The TAB_SIZE tag can be used to set the number of spaces in a tab. # Doxygen uses this value to replace tabs by spaces in code fragments. TAB_SIZE = 8 # This tag can be used to specify a number of aliases that acts # as commands in the documentation. An alias has the form "name=value". # For example adding "sideeffect=\par Side Effects:\n" will allow you to # put the command \sideeffect (or @sideeffect) in the documentation, which # will result in a user-defined paragraph with heading "Side Effects:". # You can put \n's in the value part of an alias to insert newlines. ALIASES = # This tag can be used to specify a number of word-keyword mappings (TCL only). # A mapping has the form "name=value". For example adding # "class=itcl::class" will allow you to use the command class in the # itcl::class meaning. TCL_SUBST = # Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C # sources only. Doxygen will then generate output that is more tailored for C. # For instance, some of the names that are used will be different. The list # of all members will be omitted, etc. OPTIMIZE_OUTPUT_FOR_C = YES # Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java # sources only. Doxygen will then generate output that is more tailored for # Java. For instance, namespaces will be presented as packages, qualified # scopes will look different, etc. OPTIMIZE_OUTPUT_JAVA = NO # Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran # sources only. Doxygen will then generate output that is more tailored for # Fortran. OPTIMIZE_FOR_FORTRAN = NO # Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL # sources. Doxygen will then generate output that is tailored for # VHDL. OPTIMIZE_OUTPUT_VHDL = NO # Doxygen selects the parser to use depending on the extension of the files it # parses. With this tag you can assign which parser to use for a given # extension. Doxygen has a built-in mapping, but you can override or extend it # using this tag. The format is ext=language, where ext is a file extension, # and language is one of the parsers supported by doxygen: IDL, Java, # Javascript, CSharp, C, C++, D, PHP, Objective-C, Python, Fortran, VHDL, C, # C++. For instance to make doxygen treat .inc files as Fortran files (default # is PHP), and .f files as C (default is Fortran), use: inc=Fortran f=C. Note # that for custom extensions you also need to set FILE_PATTERNS otherwise the # files are not read by doxygen. EXTENSION_MAPPING = # If MARKDOWN_SUPPORT is enabled (the default) then doxygen pre-processes all # comments according to the Markdown format, which allows for more readable # documentation. See http://daringfireball.net/projects/markdown/ for details. # The output of markdown processing is further processed by doxygen, so you # can mix doxygen, HTML, and XML commands with Markdown formatting. # Disable only in case of backward compatibilities issues. MARKDOWN_SUPPORT = YES # When enabled doxygen tries to link words that correspond to documented classes, # or namespaces to their corresponding documentation. Such a link can be # prevented in individual cases by by putting a % sign in front of the word or # globally by setting AUTOLINK_SUPPORT to NO. AUTOLINK_SUPPORT = YES # If you use STL classes (i.e. std::string, std::vector, etc.) but do not want # to include (a tag file for) the STL sources as input, then you should # set this tag to YES in order to let doxygen match functions declarations and # definitions whose arguments contain STL classes (e.g. func(std::string); v.s. # func(std::string) {}). This also makes the inheritance and collaboration # diagrams that involve STL classes more complete and accurate. BUILTIN_STL_SUPPORT = NO # If you use Microsoft's C++/CLI language, you should set this option to YES to # enable parsing support. CPP_CLI_SUPPORT = NO # Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. # Doxygen will parse them like normal C++ but will assume all classes use public # instead of private inheritance when no explicit protection keyword is present. SIP_SUPPORT = NO # For Microsoft's IDL there are propget and propput attributes to indicate # getter and setter methods for a property. Setting this option to YES (the # default) will make doxygen replace the get and set methods by a property in # the documentation. This will only work if the methods are indeed getting or # setting a simple type. If this is not the case, or you want to show the # methods anyway, you should set this option to NO. IDL_PROPERTY_SUPPORT = YES # If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC # tag is set to YES, then doxygen will reuse the documentation of the first # member in the group (if any) for the other members of the group. By default # all members of a group must be documented explicitly. DISTRIBUTE_GROUP_DOC = NO # Set the SUBGROUPING tag to YES (the default) to allow class member groups of # the same type (for instance a group of public functions) to be put as a # subgroup of that type (e.g. under the Public Functions section). Set it to # NO to prevent subgrouping. Alternatively, this can be done per class using # the \nosubgrouping command. SUBGROUPING = YES # When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and # unions are shown inside the group in which they are included (e.g. using # @ingroup) instead of on a separate page (for HTML and Man pages) or # section (for LaTeX and RTF). INLINE_GROUPED_CLASSES = NO # When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and # unions with only public data fields will be shown inline in the documentation # of the scope in which they are defined (i.e. file, namespace, or group # documentation), provided this scope is documented. If set to NO (the default), # structs, classes, and unions are shown on a separate page (for HTML and Man # pages) or section (for LaTeX and RTF). INLINE_SIMPLE_STRUCTS = NO # When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum # is documented as struct, union, or enum with the name of the typedef. So # typedef struct TypeS {} TypeT, will appear in the documentation as a struct # with name TypeT. When disabled the typedef will appear as a member of a file, # namespace, or class. And the struct will be named TypeS. This can typically # be useful for C code in case the coding convention dictates that all compound # types are typedef'ed and only the typedef is referenced, never the tag name. TYPEDEF_HIDES_STRUCT = NO # The SYMBOL_CACHE_SIZE determines the size of the internal cache use to # determine which symbols to keep in memory and which to flush to disk. # When the cache is full, less often used symbols will be written to disk. # For small to medium size projects (<1000 input files) the default value is # probably good enough. For larger projects a too small cache size can cause # doxygen to be busy swapping symbols to and from disk most of the time # causing a significant performance penalty. # If the system has enough physical memory increasing the cache will improve the # performance by keeping more symbols in memory. Note that the value works on # a logarithmic scale so increasing the size by one will roughly double the # memory usage. The cache size is given by this formula: # 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, # corresponding to a cache size of 2^16 = 65536 symbols. SYMBOL_CACHE_SIZE = 0 # Similar to the SYMBOL_CACHE_SIZE the size of the symbol lookup cache can be # set using LOOKUP_CACHE_SIZE. This cache is used to resolve symbols given # their name and scope. Since this can be an expensive process and often the # same symbol appear multiple times in the code, doxygen keeps a cache of # pre-resolved symbols. If the cache is too small doxygen will become slower. # If the cache is too large, memory is wasted. The cache size is given by this # formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range is 0..9, the default is 0, # corresponding to a cache size of 2^16 = 65536 symbols. LOOKUP_CACHE_SIZE = 0 #--------------------------------------------------------------------------- # Build related configuration options #--------------------------------------------------------------------------- # If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in # documentation are documented, even if no documentation was available. # Private class members and static file members will be hidden unless # the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES EXTRACT_ALL = NO # If the EXTRACT_PRIVATE tag is set to YES all private members of a class # will be included in the documentation. EXTRACT_PRIVATE = NO # If the EXTRACT_PACKAGE tag is set to YES all members with package or internal # scope will be included in the documentation. EXTRACT_PACKAGE = NO # If the EXTRACT_STATIC tag is set to YES all static members of a file # will be included in the documentation. EXTRACT_STATIC = NO # If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) # defined locally in source files will be included in the documentation. # If set to NO only classes defined in header files are included. EXTRACT_LOCAL_CLASSES = YES # This flag is only useful for Objective-C code. When set to YES local # methods, which are defined in the implementation section but not in # the interface are included in the documentation. # If set to NO (the default) only methods in the interface are included. EXTRACT_LOCAL_METHODS = NO # If this flag is set to YES, the members of anonymous namespaces will be # extracted and appear in the documentation as a namespace called # 'anonymous_namespace{file}', where file will be replaced with the base # name of the file that contains the anonymous namespace. By default # anonymous namespaces are hidden. EXTRACT_ANON_NSPACES = NO # If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all # undocumented members of documented classes, files or namespaces. # If set to NO (the default) these members will be included in the # various overviews, but no documentation section is generated. # This option has no effect if EXTRACT_ALL is enabled. HIDE_UNDOC_MEMBERS = NO # If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all # undocumented classes that are normally visible in the class hierarchy. # If set to NO (the default) these classes will be included in the various # overviews. This option has no effect if EXTRACT_ALL is enabled. HIDE_UNDOC_CLASSES = NO # If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all # friend (class|struct|union) declarations. # If set to NO (the default) these declarations will be included in the # documentation. HIDE_FRIEND_COMPOUNDS = NO # If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any # documentation blocks found inside the body of a function. # If set to NO (the default) these blocks will be appended to the # function's detailed documentation block. HIDE_IN_BODY_DOCS = NO # The INTERNAL_DOCS tag determines if documentation # that is typed after a \internal command is included. If the tag is set # to NO (the default) then the documentation will be excluded. # Set it to YES to include the internal documentation. INTERNAL_DOCS = NO # If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate # file names in lower-case letters. If set to YES upper-case letters are also # allowed. This is useful if you have classes or files whose names only differ # in case and if your file system supports case sensitive file names. Windows # and Mac users are advised to set this option to NO. CASE_SENSE_NAMES = YES # If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen # will show members with their full class and namespace scopes in the # documentation. If set to YES the scope will be hidden. HIDE_SCOPE_NAMES = NO # If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen # will put a list of the files that are included by a file in the documentation # of that file. SHOW_INCLUDE_FILES = YES # If the FORCE_LOCAL_INCLUDES tag is set to YES then Doxygen # will list include files with double quotes in the documentation # rather than with sharp brackets. FORCE_LOCAL_INCLUDES = NO # If the INLINE_INFO tag is set to YES (the default) then a tag [inline] # is inserted in the documentation for inline members. INLINE_INFO = YES # If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen # will sort the (detailed) documentation of file and class members # alphabetically by member name. If set to NO the members will appear in # declaration order. SORT_MEMBER_DOCS = YES # If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the # brief documentation of file, namespace and class members alphabetically # by member name. If set to NO (the default) the members will appear in # declaration order. SORT_BRIEF_DOCS = NO # If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen # will sort the (brief and detailed) documentation of class members so that # constructors and destructors are listed first. If set to NO (the default) # the constructors will appear in the respective orders defined by # SORT_MEMBER_DOCS and SORT_BRIEF_DOCS. # This tag will be ignored for brief docs if SORT_BRIEF_DOCS is set to NO # and ignored for detailed docs if SORT_MEMBER_DOCS is set to NO. SORT_MEMBERS_CTORS_1ST = NO # If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the # hierarchy of group names into alphabetical order. If set to NO (the default) # the group names will appear in their defined order. SORT_GROUP_NAMES = NO # If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be # sorted by fully-qualified names, including namespaces. If set to # NO (the default), the class list will be sorted only by class name, # not including the namespace part. # Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. # Note: This option applies only to the class list, not to the # alphabetical list. SORT_BY_SCOPE_NAME = NO # If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to # do proper type resolution of all parameters of a function it will reject a # match between the prototype and the implementation of a member function even # if there is only one candidate or it is obvious which candidate to choose # by doing a simple string match. By disabling STRICT_PROTO_MATCHING doxygen # will still accept a match between prototype and implementation in such cases. STRICT_PROTO_MATCHING = NO # The GENERATE_TODOLIST tag can be used to enable (YES) or # disable (NO) the todo list. This list is created by putting \todo # commands in the documentation. GENERATE_TODOLIST = YES # The GENERATE_TESTLIST tag can be used to enable (YES) or # disable (NO) the test list. This list is created by putting \test # commands in the documentation. GENERATE_TESTLIST = YES # The GENERATE_BUGLIST tag can be used to enable (YES) or # disable (NO) the bug list. This list is created by putting \bug # commands in the documentation. GENERATE_BUGLIST = YES # The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or # disable (NO) the deprecated list. This list is created by putting # \deprecated commands in the documentation. GENERATE_DEPRECATEDLIST= YES # The ENABLED_SECTIONS tag can be used to enable conditional # documentation sections, marked by \if section-label ... \endif # and \cond section-label ... \endcond blocks. ENABLED_SECTIONS = # The MAX_INITIALIZER_LINES tag determines the maximum number of lines # the initial value of a variable or macro consists of for it to appear in # the documentation. If the initializer consists of more lines than specified # here it will be hidden. Use a value of 0 to hide initializers completely. # The appearance of the initializer of individual variables and macros in the # documentation can be controlled using \showinitializer or \hideinitializer # command in the documentation regardless of this setting. MAX_INITIALIZER_LINES = 30 # Set the SHOW_USED_FILES tag to NO to disable the list of files generated # at the bottom of the documentation of classes and structs. If set to YES the # list will mention the files that were used to generate the documentation. SHOW_USED_FILES = YES # Set the SHOW_FILES tag to NO to disable the generation of the Files page. # This will remove the Files entry from the Quick Index and from the # Folder Tree View (if specified). The default is YES. SHOW_FILES = YES # Set the SHOW_NAMESPACES tag to NO to disable the generation of the # Namespaces page. # This will remove the Namespaces entry from the Quick Index # and from the Folder Tree View (if specified). The default is YES. SHOW_NAMESPACES = YES # The FILE_VERSION_FILTER tag can be used to specify a program or script that # doxygen should invoke to get the current version for each file (typically from # the version control system). Doxygen will invoke the program by executing (via # popen()) the command , where is the value of # the FILE_VERSION_FILTER tag, and is the name of an input file # provided by doxygen. Whatever the program writes to standard output # is used as the file version. See the manual for examples. FILE_VERSION_FILTER = # The LAYOUT_FILE tag can be used to specify a layout file which will be parsed # by doxygen. The layout file controls the global structure of the generated # output files in an output format independent way. To create the layout file # that represents doxygen's defaults, run doxygen with the -l option. # You can optionally specify a file name after the option, if omitted # DoxygenLayout.xml will be used as the name of the layout file. LAYOUT_FILE = Doxyfile.SDK.layout.xml # The CITE_BIB_FILES tag can be used to specify one or more bib files # containing the references data. This must be a list of .bib files. The # .bib extension is automatically appended if omitted. Using this command # requires the bibtex tool to be installed. See also # http://en.wikipedia.org/wiki/BibTeX for more info. For LaTeX the style # of the bibliography can be controlled using LATEX_BIB_STYLE. To use this # feature you need bibtex and perl available in the search path. Do not use # file names with spaces, bibtex cannot handle them. CITE_BIB_FILES = #--------------------------------------------------------------------------- # configuration options related to warning and progress messages #--------------------------------------------------------------------------- # The QUIET tag can be used to turn on/off the messages that are generated # by doxygen. Possible values are YES and NO. If left blank NO is used. QUIET = NO # The WARNINGS tag can be used to turn on/off the warning messages that are # generated by doxygen. Possible values are YES and NO. If left blank # NO is used. WARNINGS = YES # If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings # for undocumented members. If EXTRACT_ALL is set to YES then this flag will # automatically be disabled. WARN_IF_UNDOCUMENTED = YES # If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for # potential errors in the documentation, such as not documenting some # parameters in a documented function, or documenting parameters that # don't exist or using markup commands wrongly. WARN_IF_DOC_ERROR = YES # The WARN_NO_PARAMDOC option can be enabled to get warnings for # functions that are documented, but have no documentation for their parameters # or return value. If set to NO (the default) doxygen will only warn about # wrong or incomplete parameter documentation, but not about the absence of # documentation. WARN_NO_PARAMDOC = NO # The WARN_FORMAT tag determines the format of the warning messages that # doxygen can produce. The string should contain the $file, $line, and $text # tags, which will be replaced by the file and line number from which the # warning originated and the warning text. Optionally the format may contain # $version, which will be replaced by the version of the file (if it could # be obtained via FILE_VERSION_FILTER) WARN_FORMAT = "$file:$line: $text" # The WARN_LOGFILE tag can be used to specify a file to which warning # and error messages should be written. If left blank the output is written # to stderr. WARN_LOGFILE = #--------------------------------------------------------------------------- # configuration options related to the input files #--------------------------------------------------------------------------- # The INPUT tag can be used to specify the files and/or directories that contain # documented source files. You may enter file names like "myfile.cpp" or # directories like "/usr/src/myproject". Separate the files or directories # with spaces. INPUT = @INPUT@ # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is # also the default input encoding. Doxygen uses libiconv (or the iconv built # into libc) for the transcoding. See http://www.gnu.org/software/libiconv for # the list of possible encodings. INPUT_ENCODING = UTF-8 # If the value of the INPUT tag contains directories, you can use the # FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp # and *.h) to filter out the source-files in the directories. If left # blank the following patterns are tested: # *.c *.cc *.cxx *.cpp *.c++ *.d *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh # *.hxx *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.dox *.py # *.f90 *.f *.for *.vhd *.vhdl FILE_PATTERNS = *.h *.dox # The RECURSIVE tag can be used to turn specify whether or not subdirectories # should be searched for input files as well. Possible values are YES and NO. # If left blank NO is used. RECURSIVE = NO # The EXCLUDE tag can be used to specify files and/or directories that should be # excluded from the INPUT source files. This way you can easily exclude a # subdirectory from a directory tree whose root is specified with the INPUT tag. # Note that relative paths are relative to the directory from which doxygen is # run. EXCLUDE = # The EXCLUDE_SYMLINKS tag can be used to select whether or not files or # directories that are symbolic links (a Unix file system feature) are excluded # from the input. EXCLUDE_SYMLINKS = YES # If the value of the INPUT tag contains directories, you can use the # EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude # certain files from those directories. Note that the wildcards are matched # against the file with absolute path, so to exclude all test directories # for example use the pattern */test/* EXCLUDE_PATTERNS = */test* # The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names # (namespaces, classes, functions, etc.) that should be excluded from the # output. The symbol name can be a fully qualified name, a word, or if the # wildcard * is used, a substring. Examples: ANamespace, AClass, # AClass::ANamespace, ANamespace::*Test EXCLUDE_SYMBOLS = # The EXAMPLE_PATH tag can be used to specify one or more files or # directories that contain example code fragments that are included (see # the \include command). EXAMPLE_PATH = @EXAMPLES@ # If the value of the EXAMPLE_PATH tag contains directories, you can use the # EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp # and *.h) to filter out the source-files in the directories. If left # blank all files are included. EXAMPLE_PATTERNS = # If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be # searched for input files to be used with the \include or \dontinclude # commands irrespective of the value of the RECURSIVE tag. # Possible values are YES and NO. If left blank NO is used. EXAMPLE_RECURSIVE = NO # The IMAGE_PATH tag can be used to specify one or more files or # directories that contain image that are included in the documentation (see # the \image command). IMAGE_PATH = @TOP_SRCDIR@/src/doxygen/images/ # The INPUT_FILTER tag can be used to specify a program that doxygen should # invoke to filter for each input file. Doxygen will invoke the filter program # by executing (via popen()) the command , where # is the value of the INPUT_FILTER tag, and is the name of an # input file. Doxygen will then use the output that the filter program writes # to standard output. # If FILTER_PATTERNS is specified, this tag will be # ignored. INPUT_FILTER = # The FILTER_PATTERNS tag can be used to specify filters on a per file pattern # basis. # Doxygen will compare the file name with each pattern and apply the # filter if there is a match. # The filters are a list of the form: # pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further # info on how filters are used. If FILTER_PATTERNS is empty or if # non of the patterns match the file name, INPUT_FILTER is applied. FILTER_PATTERNS = # If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using # INPUT_FILTER) will be used to filter the input files when producing source # files to browse (i.e. when SOURCE_BROWSER is set to YES). FILTER_SOURCE_FILES = NO # The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file # pattern. A pattern will override the setting for FILTER_PATTERN (if any) # and it is also possible to disable source filtering for a specific pattern # using *.ext= (so without naming a filter). This option only has effect when # FILTER_SOURCE_FILES is enabled. FILTER_SOURCE_PATTERNS = # If the USE_MD_FILE_AS_MAINPAGE tag refers to the name of a markdown file that # is part of the input, its contents will be placed on the main page (index.html). # This can be useful if you have a project on for instance GitHub and want reuse # the introduction page also for the doxygen output. USE_MDFILE_AS_MAINPAGE = #--------------------------------------------------------------------------- # configuration options related to source browsing #--------------------------------------------------------------------------- # If the SOURCE_BROWSER tag is set to YES then a list of source files will # be generated. Documented entities will be cross-referenced with these sources. # Note: To get rid of all source code in the generated output, make sure also # VERBATIM_HEADERS is set to NO. SOURCE_BROWSER = NO # Setting the INLINE_SOURCES tag to YES will include the body # of functions and classes directly in the documentation. INLINE_SOURCES = NO # Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct # doxygen to hide any special comment blocks from generated source code # fragments. Normal C, C++ and Fortran comments will always remain visible. STRIP_CODE_COMMENTS = YES # If the REFERENCED_BY_RELATION tag is set to YES # then for each documented function all documented # functions referencing it will be listed. REFERENCED_BY_RELATION = NO # If the REFERENCES_RELATION tag is set to YES # then for each documented function all documented entities # called/used by that function will be listed. REFERENCES_RELATION = NO # If the REFERENCES_LINK_SOURCE tag is set to YES (the default) # and SOURCE_BROWSER tag is set to YES, then the hyperlinks from # functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will # link to the source code. # Otherwise they will link to the documentation. REFERENCES_LINK_SOURCE = YES # If the USE_HTAGS tag is set to YES then the references to source code # will point to the HTML generated by the htags(1) tool instead of doxygen # built-in source browser. The htags tool is part of GNU's global source # tagging system (see http://www.gnu.org/software/global/global.html). You # will need version 4.8.6 or higher. USE_HTAGS = NO # If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen # will generate a verbatim copy of the header file for each class for # which an include is specified. Set to NO to disable this. VERBATIM_HEADERS = YES #--------------------------------------------------------------------------- # configuration options related to the alphabetical class index #--------------------------------------------------------------------------- # If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index # of all compounds will be generated. Enable this if the project # contains a lot of classes, structs, unions or interfaces. ALPHABETICAL_INDEX = YES # If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then # the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns # in which this list will be split (can be a number in the range [1..20]) COLS_IN_ALPHA_INDEX = 5 # In case all classes in a project start with a common prefix, all # classes will be put under the same header in the alphabetical index. # The IGNORE_PREFIX tag can be used to specify one or more prefixes that # should be ignored while generating the index headers. IGNORE_PREFIX = #--------------------------------------------------------------------------- # configuration options related to the HTML output #--------------------------------------------------------------------------- # If the GENERATE_HTML tag is set to YES (the default) Doxygen will # generate HTML output. GENERATE_HTML = YES # The HTML_OUTPUT tag is used to specify where the HTML docs will be put. # If a relative path is entered the value of OUTPUT_DIRECTORY will be # put in front of it. If left blank `html' will be used as the default path. HTML_OUTPUT = html # The HTML_FILE_EXTENSION tag can be used to specify the file extension for # each generated HTML page (for example: .htm,.php,.asp). If it is left blank # doxygen will generate files with .html extension. HTML_FILE_EXTENSION = .html # The HTML_HEADER tag can be used to specify a personal HTML header for # each generated HTML page. If it is left blank doxygen will generate a # standard header. Note that when using a custom header you are responsible # for the proper inclusion of any scripts and style sheets that doxygen # needs, which is dependent on the configuration options used. # It is advised to generate a default header using "doxygen -w html # header.html footer.html stylesheet.css YourConfigFile" and then modify # that header. Note that the header is subject to change so you typically # have to redo this when upgrading to a newer version of doxygen or when # changing the value of configuration settings such as GENERATE_TREEVIEW! HTML_HEADER = # The HTML_FOOTER tag can be used to specify a personal HTML footer for # each generated HTML page. If it is left blank doxygen will generate a # standard footer. HTML_FOOTER = # The HTML_STYLESHEET tag can be used to specify a user-defined cascading # style sheet that is used by each HTML page. It can be used to # fine-tune the look of the HTML output. If left blank doxygen will # generate a default style sheet. Note that it is recommended to use # HTML_EXTRA_STYLESHEET instead of this one, as it is more robust and this # tag will in the future become obsolete. HTML_STYLESHEET = # The HTML_EXTRA_STYLESHEET tag can be used to specify an additional # user-defined cascading style sheet that is included after the standard # style sheets created by doxygen. Using this option one can overrule # certain style aspects. This is preferred over using HTML_STYLESHEET # since it does not replace the standard style sheet and is therefor more # robust against future updates. Doxygen will copy the style sheet file to # the output directory. HTML_EXTRA_STYLESHEET = # The HTML_EXTRA_FILES tag can be used to specify one or more extra images or # other source files which should be copied to the HTML output directory. Note # that these files will be copied to the base HTML output directory. Use the # $relpath$ marker in the HTML_HEADER and/or HTML_FOOTER files to load these # files. In the HTML_STYLESHEET file, use the file name only. Also note that # the files will be copied as-is; there are no commands or markers available. HTML_EXTRA_FILES = # The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. # Doxygen will adjust the colors in the style sheet and background images # according to this color. Hue is specified as an angle on a colorwheel, # see http://en.wikipedia.org/wiki/Hue for more information. # For instance the value 0 represents red, 60 is yellow, 120 is green, # 180 is cyan, 240 is blue, 300 purple, and 360 is red again. # The allowed range is 0 to 359. HTML_COLORSTYLE_HUE = 220 # The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of # the colors in the HTML output. For a value of 0 the output will use # grayscales only. A value of 255 will produce the most vivid colors. HTML_COLORSTYLE_SAT = 100 # The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to # the luminance component of the colors in the HTML output. Values below # 100 gradually make the output lighter, whereas values above 100 make # the output darker. The value divided by 100 is the actual gamma applied, # so 80 represents a gamma of 0.8, The value 220 represents a gamma of 2.2, # and 100 does not change the gamma. HTML_COLORSTYLE_GAMMA = 80 # If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML # page will contain the date and time when the page was generated. Setting # this to NO can help when comparing the output of multiple runs. HTML_TIMESTAMP = YES # If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML # documentation will contain sections that can be hidden and shown after the # page has loaded. HTML_DYNAMIC_SECTIONS = NO # With HTML_INDEX_NUM_ENTRIES one can control the preferred number of # entries shown in the various tree structured indices initially; the user # can expand and collapse entries dynamically later on. Doxygen will expand # the tree to such a level that at most the specified number of entries are # visible (unless a fully collapsed tree already exceeds this amount). # So setting the number of entries 1 will produce a full collapsed tree by # default. 0 is a special value representing an infinite number of entries # and will result in a full expanded tree by default. HTML_INDEX_NUM_ENTRIES = 100 # If the GENERATE_DOCSET tag is set to YES, additional index files # will be generated that can be used as input for Apple's Xcode 3 # integrated development environment, introduced with OSX 10.5 (Leopard). # To create a documentation set, doxygen will generate a Makefile in the # HTML output directory. Running make will produce the docset in that # directory and running "make install" will install the docset in # ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find # it at startup. # See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html # for more information. GENERATE_DOCSET = NO # When GENERATE_DOCSET tag is set to YES, this tag determines the name of the # feed. A documentation feed provides an umbrella under which multiple # documentation sets from a single provider (such as a company or product suite) # can be grouped. DOCSET_FEEDNAME = "Doxygen generated docs" # When GENERATE_DOCSET tag is set to YES, this tag specifies a string that # should uniquely identify the documentation set bundle. This should be a # reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen # will append .docset to the name. DOCSET_BUNDLE_ID = org.doxygen.Project # When GENERATE_PUBLISHER_ID tag specifies a string that should uniquely # identify the documentation publisher. This should be a reverse domain-name # style string, e.g. com.mycompany.MyDocSet.documentation. DOCSET_PUBLISHER_ID = org.doxygen.Publisher # The GENERATE_PUBLISHER_NAME tag identifies the documentation publisher. DOCSET_PUBLISHER_NAME = Publisher # If the GENERATE_HTMLHELP tag is set to YES, additional index files # will be generated that can be used as input for tools like the # Microsoft HTML help workshop to generate a compiled HTML help file (.chm) # of the generated HTML documentation. GENERATE_HTMLHELP = NO # If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can # be used to specify the file name of the resulting .chm file. You # can add a path in front of the file if the result should not be # written to the html output directory. CHM_FILE = # If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can # be used to specify the location (absolute path including file name) of # the HTML help compiler (hhc.exe). If non-empty doxygen will try to run # the HTML help compiler on the generated index.hhp. HHC_LOCATION = # If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag # controls if a separate .chi index file is generated (YES) or that # it should be included in the master .chm file (NO). GENERATE_CHI = NO # If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING # is used to encode HtmlHelp index (hhk), content (hhc) and project file # content. CHM_INDEX_ENCODING = # If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag # controls whether a binary table of contents is generated (YES) or a # normal table of contents (NO) in the .chm file. BINARY_TOC = NO # The TOC_EXPAND flag can be set to YES to add extra items for group members # to the contents of the HTML help documentation and to the tree view. TOC_EXPAND = NO # If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and # QHP_VIRTUAL_FOLDER are set, an additional index file will be generated # that can be used as input for Qt's qhelpgenerator to generate a # Qt Compressed Help (.qch) of the generated HTML documentation. GENERATE_QHP = NO # If the QHG_LOCATION tag is specified, the QCH_FILE tag can # be used to specify the file name of the resulting .qch file. # The path specified is relative to the HTML output folder. QCH_FILE = # The QHP_NAMESPACE tag specifies the namespace to use when generating # Qt Help Project output. For more information please see # http://doc.trolltech.com/qthelpproject.html#namespace QHP_NAMESPACE = org.doxygen.Project # The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating # Qt Help Project output. For more information please see # http://doc.trolltech.com/qthelpproject.html#virtual-folders QHP_VIRTUAL_FOLDER = doc # If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to # add. For more information please see # http://doc.trolltech.com/qthelpproject.html#custom-filters QHP_CUST_FILTER_NAME = # The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the # custom filter to add. For more information please see # # Qt Help Project / Custom Filters. QHP_CUST_FILTER_ATTRS = # The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this # project's # filter section matches. # # Qt Help Project / Filter Attributes. QHP_SECT_FILTER_ATTRS = # If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can # be used to specify the location of Qt's qhelpgenerator. # If non-empty doxygen will try to run qhelpgenerator on the generated # .qhp file. QHG_LOCATION = # If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files # will be generated, which together with the HTML files, form an Eclipse help # plugin. To install this plugin and make it available under the help contents # menu in Eclipse, the contents of the directory containing the HTML and XML # files needs to be copied into the plugins directory of eclipse. The name of # the directory within the plugins directory should be the same as # the ECLIPSE_DOC_ID value. After copying Eclipse needs to be restarted before # the help appears. GENERATE_ECLIPSEHELP = NO # A unique identifier for the eclipse help plugin. When installing the plugin # the directory name containing the HTML and XML files should also have # this name. ECLIPSE_DOC_ID = org.doxygen.Project # The DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) # at top of each HTML page. The value NO (the default) enables the index and # the value YES disables it. Since the tabs have the same information as the # navigation tree you can set this option to NO if you already set # GENERATE_TREEVIEW to YES. DISABLE_INDEX = NO # The GENERATE_TREEVIEW tag is used to specify whether a tree-like index # structure should be generated to display hierarchical information. # If the tag value is set to YES, a side panel will be generated # containing a tree-like index structure (just like the one that # is generated for HTML Help). For this to work a browser that supports # JavaScript, DHTML, CSS and frames is required (i.e. any modern browser). # Windows users are probably better off using the HTML help feature. # Since the tree basically has the same information as the tab index you # could consider to set DISABLE_INDEX to NO when enabling this option. GENERATE_TREEVIEW = NO # The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values # (range [0,1..20]) that doxygen will group on one line in the generated HTML # documentation. Note that a value of 0 will completely suppress the enum # values from appearing in the overview section. ENUM_VALUES_PER_LINE = 4 # If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be # used to set the initial width (in pixels) of the frame in which the tree # is shown. TREEVIEW_WIDTH = 250 # When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open # links to external symbols imported via tag files in a separate window. EXT_LINKS_IN_WINDOW = NO # Use this tag to change the font size of Latex formulas included # as images in the HTML documentation. The default is 10. Note that # when you change the font size after a successful doxygen run you need # to manually remove any form_*.png images from the HTML output directory # to force them to be regenerated. FORMULA_FONTSIZE = 10 # Use the FORMULA_TRANPARENT tag to determine whether or not the images # generated for formulas are transparent PNGs. Transparent PNGs are # not supported properly for IE 6.0, but are supported on all modern browsers. # Note that when changing this option you need to delete any form_*.png files # in the HTML output before the changes have effect. FORMULA_TRANSPARENT = YES # Enable the USE_MATHJAX option to render LaTeX formulas using MathJax # (see http://www.mathjax.org) which uses client side Javascript for the # rendering instead of using prerendered bitmaps. Use this if you do not # have LaTeX installed or if you want to formulas look prettier in the HTML # output. When enabled you may also need to install MathJax separately and # configure the path to it using the MATHJAX_RELPATH option. USE_MATHJAX = NO # When MathJax is enabled you can set the default output format to be used for # thA MathJax output. Supported types are HTML-CSS, NativeMML (i.e. MathML) and # SVG. The default value is HTML-CSS, which is slower, but has the best # compatibility. MATHJAX_FORMAT = HTML-CSS # When MathJax is enabled you need to specify the location relative to the # HTML output directory using the MATHJAX_RELPATH option. The destination # directory should contain the MathJax.js script. For instance, if the mathjax # directory is located at the same level as the HTML output directory, then # MATHJAX_RELPATH should be ../mathjax. The default value points to # the MathJax Content Delivery Network so you can quickly see the result without # installing MathJax. # However, it is strongly recommended to install a local # copy of MathJax from http://www.mathjax.org before deployment. MATHJAX_RELPATH = http://www.mathjax.org/mathjax # The MATHJAX_EXTENSIONS tag can be used to specify one or MathJax extension # names that should be enabled during MathJax rendering. MATHJAX_EXTENSIONS = # When the SEARCHENGINE tag is enabled doxygen will generate a search box # for the HTML output. The underlying search engine uses javascript # and DHTML and should work on any modern browser. Note that when using # HTML help (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets # (GENERATE_DOCSET) there is already a search function so this one should # typically be disabled. For large projects the javascript based search engine # can be slow, then enabling SERVER_BASED_SEARCH may provide a better solution. SEARCHENGINE = NO # When the SERVER_BASED_SEARCH tag is enabled the search engine will be # implemented using a web server instead of a web client using Javascript. # There are two flavours of web server based search depending on the # EXTERNAL_SEARCH setting. When disabled, doxygen will generate a PHP script for # searching and an index file used by the script. When EXTERNAL_SEARCH is # enabled the indexing and searching needs to be provided by external tools. # See the manual for details. SERVER_BASED_SEARCH = NO # When EXTERNAL_SEARCH is enabled doxygen will no longer generate the PHP # script for searching. Instead the search results are written to an XML file # which needs to be processed by an external indexer. Doxygen will invoke an # external search engine pointed to by the SEARCHENGINE_URL option to obtain # the search results. Doxygen ships with an example indexer (doxyindexer) and # search engine (doxysearch.cgi) which are based on the open source search engine # library Xapian. See the manual for configuration details. EXTERNAL_SEARCH = NO # The SEARCHENGINE_URL should point to a search engine hosted by a web server # which will returned the search results when EXTERNAL_SEARCH is enabled. # Doxygen ships with an example search engine (doxysearch) which is based on # the open source search engine library Xapian. See the manual for configuration # details. SEARCHENGINE_URL = # When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed # search data is written to a file for indexing by an external tool. With the # SEARCHDATA_FILE tag the name of this file can be specified. SEARCHDATA_FILE = searchdata.xml # When SERVER_BASED_SEARCH AND EXTERNAL_SEARCH are both enabled the # EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is # useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple # projects and redirect the results back to the right project. EXTERNAL_SEARCH_ID = # The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen # projects other than the one defined by this configuration file, but that are # all added to the same external search index. Each project needs to have a # unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id # of to a relative location where the documentation can be found. # The format is: EXTRA_SEARCH_MAPPINGS = id1=loc1 id2=loc2 ... EXTRA_SEARCH_MAPPINGS = #--------------------------------------------------------------------------- # configuration options related to the LaTeX output #--------------------------------------------------------------------------- # If the GENERATE_LATEX tag is set to YES (the default) Doxygen will # generate Latex output. GENERATE_LATEX = NO # The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. # If a relative path is entered the value of OUTPUT_DIRECTORY will be # put in front of it. If left blank `latex' will be used as the default path. LATEX_OUTPUT = latex # The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be # invoked. If left blank `latex' will be used as the default command name. # Note that when enabling USE_PDFLATEX this option is only used for # generating bitmaps for formulas in the HTML output, but not in the # Makefile that is written to the output directory. LATEX_CMD_NAME = latex # The MAKEINDEX_CMD_NAME tag can be used to specify the command name to # generate index for LaTeX. If left blank `makeindex' will be used as the # default command name. MAKEINDEX_CMD_NAME = makeindex # If the COMPACT_LATEX tag is set to YES Doxygen generates more compact # LaTeX documents. This may be useful for small projects and may help to # save some trees in general. COMPACT_LATEX = NO # The PAPER_TYPE tag can be used to set the paper type that is used # by the printer. Possible values are: a4, letter, legal and # executive. If left blank a4wide will be used. PAPER_TYPE = a4wide # The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX # packages that should be included in the LaTeX output. EXTRA_PACKAGES = # The LATEX_HEADER tag can be used to specify a personal LaTeX header for # the generated latex document. The header should contain everything until # the first chapter. If it is left blank doxygen will generate a # standard header. Notice: only use this tag if you know what you are doing! LATEX_HEADER = # The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for # the generated latex document. The footer should contain everything after # the last chapter. If it is left blank doxygen will generate a # standard footer. Notice: only use this tag if you know what you are doing! LATEX_FOOTER = # If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated # is prepared for conversion to pdf (using ps2pdf). The pdf file will # contain links (just like the HTML output) instead of page references # This makes the output suitable for online browsing using a pdf viewer. PDF_HYPERLINKS = YES # If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of # plain latex in the generated Makefile. Set this option to YES to get a # higher quality PDF documentation. USE_PDFLATEX = YES # If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. # command to the generated LaTeX files. This will instruct LaTeX to keep # running if errors occur, instead of asking the user for help. # This option is also used when generating formulas in HTML. LATEX_BATCHMODE = NO # If LATEX_HIDE_INDICES is set to YES then doxygen will not # include the index chapters (such as File Index, Compound Index, etc.) # in the output. LATEX_HIDE_INDICES = NO # If LATEX_SOURCE_CODE is set to YES then doxygen will include # source code with syntax highlighting in the LaTeX output. # Note that which sources are shown also depends on other settings # such as SOURCE_BROWSER. LATEX_SOURCE_CODE = NO # The LATEX_BIB_STYLE tag can be used to specify the style to use for the # bibliography, e.g. plainnat, or ieeetr. The default style is "plain". See # http://en.wikipedia.org/wiki/BibTeX for more info. LATEX_BIB_STYLE = plain #--------------------------------------------------------------------------- # configuration options related to the RTF output #--------------------------------------------------------------------------- # If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output # The RTF output is optimized for Word 97 and may not look very pretty with # other RTF readers or editors. GENERATE_RTF = NO # The RTF_OUTPUT tag is used to specify where the RTF docs will be put. # If a relative path is entered the value of OUTPUT_DIRECTORY will be # put in front of it. If left blank `rtf' will be used as the default path. RTF_OUTPUT = rtf # If the COMPACT_RTF tag is set to YES Doxygen generates more compact # RTF documents. This may be useful for small projects and may help to # save some trees in general. COMPACT_RTF = NO # If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated # will contain hyperlink fields. The RTF file will # contain links (just like the HTML output) instead of page references. # This makes the output suitable for online browsing using WORD or other # programs which support those fields. # Note: wordpad (write) and others do not support links. RTF_HYPERLINKS = NO # Load style sheet definitions from file. Syntax is similar to doxygen's # config file, i.e. a series of assignments. You only have to provide # replacements, missing definitions are set to their default value. RTF_STYLESHEET_FILE = # Set optional variables used in the generation of an rtf document. # Syntax is similar to doxygen's config file. RTF_EXTENSIONS_FILE = #--------------------------------------------------------------------------- # configuration options related to the man page output #--------------------------------------------------------------------------- # If the GENERATE_MAN tag is set to YES (the default) Doxygen will # generate man pages GENERATE_MAN = NO # The MAN_OUTPUT tag is used to specify where the man pages will be put. # If a relative path is entered the value of OUTPUT_DIRECTORY will be # put in front of it. If left blank `man' will be used as the default path. MAN_OUTPUT = man # The MAN_EXTENSION tag determines the extension that is added to # the generated man pages (default is the subroutine's section .3) MAN_EXTENSION = .3 # If the MAN_LINKS tag is set to YES and Doxygen generates man output, # then it will generate one additional man file for each entity # documented in the real man page(s). These additional files # only source the real man page, but without them the man command # would be unable to find the correct page. The default is NO. MAN_LINKS = NO #--------------------------------------------------------------------------- # configuration options related to the XML output #--------------------------------------------------------------------------- # If the GENERATE_XML tag is set to YES Doxygen will # generate an XML file that captures the structure of # the code including all documentation. GENERATE_XML = NO # The XML_OUTPUT tag is used to specify where the XML pages will be put. # If a relative path is entered the value of OUTPUT_DIRECTORY will be # put in front of it. If left blank `xml' will be used as the default path. XML_OUTPUT = xml # The XML_SCHEMA tag can be used to specify an XML schema, # which can be used by a validating XML parser to check the # syntax of the XML files. XML_SCHEMA = # The XML_DTD tag can be used to specify an XML DTD, # which can be used by a validating XML parser to check the # syntax of the XML files. XML_DTD = # If the XML_PROGRAMLISTING tag is set to YES Doxygen will # dump the program listings (including syntax highlighting # and cross-referencing information) to the XML output. Note that # enabling this will significantly increase the size of the XML output. XML_PROGRAMLISTING = YES #--------------------------------------------------------------------------- # configuration options for the AutoGen Definitions output #--------------------------------------------------------------------------- # If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will # generate an AutoGen Definitions (see autogen.sf.net) file # that captures the structure of the code including all # documentation. Note that this feature is still experimental # and incomplete at the moment. GENERATE_AUTOGEN_DEF = NO #--------------------------------------------------------------------------- # configuration options related to the Perl module output #--------------------------------------------------------------------------- # If the GENERATE_PERLMOD tag is set to YES Doxygen will # generate a Perl module file that captures the structure of # the code including all documentation. Note that this # feature is still experimental and incomplete at the # moment. GENERATE_PERLMOD = NO # If the PERLMOD_LATEX tag is set to YES Doxygen will generate # the necessary Makefile rules, Perl scripts and LaTeX code to be able # to generate PDF and DVI output from the Perl module output. PERLMOD_LATEX = NO # If the PERLMOD_PRETTY tag is set to YES the Perl module output will be # nicely formatted so it can be parsed by a human reader. # This is useful # if you want to understand what is going on. # On the other hand, if this # tag is set to NO the size of the Perl module output will be much smaller # and Perl will parse it just the same. PERLMOD_PRETTY = YES # The names of the make variables in the generated doxyrules.make file # are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. # This is useful so different doxyrules.make files included by the same # Makefile don't overwrite each other's variables. PERLMOD_MAKEVAR_PREFIX = #--------------------------------------------------------------------------- # Configuration options related to the preprocessor #--------------------------------------------------------------------------- # If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will # evaluate all C-preprocessor directives found in the sources and include # files. ENABLE_PREPROCESSING = YES # If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro # names in the source code. If set to NO (the default) only conditional # compilation will be performed. Macro expansion can be done in a controlled # way by setting EXPAND_ONLY_PREDEF to YES. MACRO_EXPANSION = NO # If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES # then the macro expansion is limited to the macros specified with the # PREDEFINED and EXPAND_AS_DEFINED tags. EXPAND_ONLY_PREDEF = NO # If the SEARCH_INCLUDES tag is set to YES (the default) the includes files # pointed to by INCLUDE_PATH will be searched when a #include is found. SEARCH_INCLUDES = YES # The INCLUDE_PATH tag can be used to specify one or more directories that # contain include files that are not input files but should be processed by # the preprocessor. INCLUDE_PATH = # You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard # patterns (like *.h and *.hpp) to filter out the header-files in the # directories. If left blank, the patterns specified with FILE_PATTERNS will # be used. INCLUDE_FILE_PATTERNS = # The PREDEFINED tag can be used to specify one or more macro names that # are defined before the preprocessor is started (similar to the -D option of # gcc). The argument of the tag is a list of macros of the form: name # or name=definition (no spaces). If the definition and the = are # omitted =1 is assumed. To prevent a macro definition from being # undefined via #undef or recursively expanded use the := operator # instead of the = operator. PREDEFINED = # If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then # this tag can be used to specify a list of macro names that should be expanded. # The macro definition that is found in the sources will be used. # Use the PREDEFINED tag if you want to use a different macro definition that # overrules the definition found in the source code. EXPAND_AS_DEFINED = # If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then # doxygen's preprocessor will remove all references to function-like macros # that are alone on a line, have an all uppercase name, and do not end with a # semicolon, because these will confuse the parser if not removed. SKIP_FUNCTION_MACROS = YES #--------------------------------------------------------------------------- # Configuration::additions related to external references #--------------------------------------------------------------------------- # The TAGFILES option can be used to specify one or more tagfiles. For each # tag file the location of the external documentation should be added. The # format of a tag file without this location is as follows: # # TAGFILES = file1 file2 ... # Adding location for the tag files is done as follows: # # TAGFILES = file1=loc1 "file2 = loc2" ... # where "loc1" and "loc2" can be relative or absolute paths # or URLs. Note that each tag file must have a unique name (where the name does # NOT include the path). If a tag file is not located in the directory in which # doxygen is run, you must also specify the path to the tagfile here. TAGFILES = # When a file name is specified after GENERATE_TAGFILE, doxygen will create # a tag file that is based on the input files it reads. GENERATE_TAGFILE = # If the ALLEXTERNALS tag is set to YES all external classes will be listed # in the class index. If set to NO only the inherited external classes # will be listed. ALLEXTERNALS = NO # If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed # in the modules index. If set to NO, only the current project's groups will # be listed. EXTERNAL_GROUPS = YES # The PERL_PATH should be the absolute path and name of the perl script # interpreter (i.e. the result of `which perl'). PERL_PATH = /usr/bin/perl #--------------------------------------------------------------------------- # Configuration options related to the dot tool #--------------------------------------------------------------------------- # If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will # generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base # or super classes. Setting the tag to NO turns the diagrams off. Note that # this option also works with HAVE_DOT disabled, but it is recommended to # install and use dot, since it yields more powerful graphs. CLASS_DIAGRAMS = YES # You can define message sequence charts within doxygen comments using the \msc # command. Doxygen will then run the mscgen tool (see # http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the # documentation. The MSCGEN_PATH tag allows you to specify the directory where # the mscgen tool resides. If left empty the tool is assumed to be found in the # default search path. MSCGEN_PATH = # If set to YES, the inheritance and collaboration graphs will hide # inheritance and usage relations if the target is undocumented # or is not a class. HIDE_UNDOC_RELATIONS = YES # If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is # available from the path. This tool is part of Graphviz, a graph visualization # toolkit from AT&T and Lucent Bell Labs. The other options in this section # have no effect if this option is set to NO (the default) HAVE_DOT = NO # The DOT_NUM_THREADS specifies the number of dot invocations doxygen is # allowed to run in parallel. When set to 0 (the default) doxygen will # base this on the number of processors available in the system. You can set it # explicitly to a value larger than 0 to get control over the balance # between CPU load and processing speed. DOT_NUM_THREADS = 0 # By default doxygen will use the Helvetica font for all dot files that # doxygen generates. When you want a differently looking font you can specify # the font name using DOT_FONTNAME. You need to make sure dot is able to find # the font, which can be done by putting it in a standard location or by setting # the DOTFONTPATH environment variable or by setting DOT_FONTPATH to the # directory containing the font. DOT_FONTNAME = Helvetica # The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. # The default size is 10pt. DOT_FONTSIZE = 10 # By default doxygen will tell dot to use the Helvetica font. # If you specify a different font using DOT_FONTNAME you can use DOT_FONTPATH to # set the path where dot can find it. DOT_FONTPATH = # If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen # will generate a graph for each documented class showing the direct and # indirect inheritance relations. Setting this tag to YES will force the # CLASS_DIAGRAMS tag to NO. CLASS_GRAPH = YES # If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen # will generate a graph for each documented class showing the direct and # indirect implementation dependencies (inheritance, containment, and # class references variables) of the class with other documented classes. COLLABORATION_GRAPH = YES # If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen # will generate a graph for groups, showing the direct groups dependencies GROUP_GRAPHS = YES # If the UML_LOOK tag is set to YES doxygen will generate inheritance and # collaboration diagrams in a style similar to the OMG's Unified Modeling # Language. UML_LOOK = NO # If the UML_LOOK tag is enabled, the fields and methods are shown inside # the class node. If there are many fields or methods and many nodes the # graph may become too big to be useful. The UML_LIMIT_NUM_FIELDS # threshold limits the number of items for each type to make the size more # managable. Set this to 0 for no limit. Note that the threshold may be # exceeded by 50% before the limit is enforced. UML_LIMIT_NUM_FIELDS = 10 # If set to YES, the inheritance and collaboration graphs will show the # relations between templates and their instances. TEMPLATE_RELATIONS = NO # If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT # tags are set to YES then doxygen will generate a graph for each documented # file showing the direct and indirect include dependencies of the file with # other documented files. INCLUDE_GRAPH = YES # If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and # HAVE_DOT tags are set to YES then doxygen will generate a graph for each # documented header file showing the documented files that directly or # indirectly include this file. INCLUDED_BY_GRAPH = YES # If the CALL_GRAPH and HAVE_DOT options are set to YES then # doxygen will generate a call dependency graph for every global function # or class method. Note that enabling this option will significantly increase # the time of a run. So in most cases it will be better to enable call graphs # for selected functions only using the \callgraph command. CALL_GRAPH = NO # If the CALLER_GRAPH and HAVE_DOT tags are set to YES then # doxygen will generate a caller dependency graph for every global function # or class method. Note that enabling this option will significantly increase # the time of a run. So in most cases it will be better to enable caller # graphs for selected functions only using the \callergraph command. CALLER_GRAPH = NO # If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen # will generate a graphical hierarchy of all classes instead of a textual one. GRAPHICAL_HIERARCHY = YES # If the DIRECTORY_GRAPH and HAVE_DOT tags are set to YES # then doxygen will show the dependencies a directory has on other directories # in a graphical way. The dependency relations are determined by the #include # relations between the files in the directories. DIRECTORY_GRAPH = YES # The DOT_IMAGE_FORMAT tag can be used to set the image format of the images # generated by dot. Possible values are svg, png, jpg, or gif. # If left blank png will be used. If you choose svg you need to set # HTML_FILE_EXTENSION to xhtml in order to make the SVG files # visible in IE 9+ (other browsers do not have this requirement). DOT_IMAGE_FORMAT = png # If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to # enable generation of interactive SVG images that allow zooming and panning. # Note that this requires a modern browser other than Internet Explorer. # Tested and working are Firefox, Chrome, Safari, and Opera. For IE 9+ you # need to set HTML_FILE_EXTENSION to xhtml in order to make the SVG files # visible. Older versions of IE do not have SVG support. INTERACTIVE_SVG = NO # The tag DOT_PATH can be used to specify the path where the dot tool can be # found. If left blank, it is assumed the dot tool can be found in the path. DOT_PATH = # The DOTFILE_DIRS tag can be used to specify one or more directories that # contain dot files that are included in the documentation (see the # \dotfile command). DOTFILE_DIRS = # The MSCFILE_DIRS tag can be used to specify one or more directories that # contain msc files that are included in the documentation (see the # \mscfile command). MSCFILE_DIRS = # The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of # nodes that will be shown in the graph. If the number of nodes in a graph # becomes larger than this value, doxygen will truncate the graph, which is # visualized by representing a node as a red box. Note that doxygen if the # number of direct children of the root node in a graph is already larger than # DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note # that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. DOT_GRAPH_MAX_NODES = 50 # The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the # graphs generated by dot. A depth value of 3 means that only nodes reachable # from the root by following a path via at most 3 edges will be shown. Nodes # that lay further from the root node will be omitted. Note that setting this # option to 1 or 2 may greatly reduce the computation time needed for large # code bases. Also note that the size of a graph can be further restricted by # DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. MAX_DOT_GRAPH_DEPTH = 0 # Set the DOT_TRANSPARENT tag to YES to generate images with a transparent # background. This is disabled by default, because dot on Windows does not # seem to support this out of the box. Warning: Depending on the platform used, # enabling this option may lead to badly anti-aliased labels on the edges of # a graph (i.e. they become hard to read). DOT_TRANSPARENT = NO # Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output # files in one run (i.e. multiple -o and -T options on the command line). This # makes dot run faster, but since only newer versions of dot (>1.8.10) # support this, this feature is disabled by default. DOT_MULTI_TARGETS = YES # If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will # generate a legend page explaining the meaning of the various boxes and # arrows in the dot generated graphs. GENERATE_LEGEND = YES # If the DOT_CLEANUP tag is set to YES (the default) Doxygen will # remove the intermediate dot files that are used to generate # the various graphs. DOT_CLEANUP = YES nordugrid-arc-6.14.0/src/doxygen/PaxHeaders.30264/create-mapping-documentation.py0000644000000000000000000000013214152153376025725 xustar000000000000000030 mtime=1638455038.342645028 30 atime=1638455038.475647026 30 ctime=1638455101.327591405 nordugrid-arc-6.14.0/src/doxygen/create-mapping-documentation.py0000644000175000002070000002261514152153376025720 0ustar00mockbuildmock00000000000000#!/usr/bin/env python # TODO: Document how to use. # TODO: Add list of the plugins which provides the mappings. # TODO: Deal with multiple values. # TODO: Deal with fixed values. # TODO: Deal with conditional values. # TODO: Deal with units # TODO: Deal with expressions # TODO: Deal with attributes in specialisation not mapped to library # TODO: Deal with attributes in library which is not mapped to specialisation # # # Usable commands and syntax: # Use in library files: # \mapdef \n # \mapdefattr # Use in specialisation files: # \mapname \n # \mapattr {->|<-} [""] # \mapnote from __future__ import print_function import sys, re # File to write documentation to outfilename = sys.argv[-1] sourcefilename = sys.argv[1] # Find files which contains documentation on mappings, i.e. specifies the \mapfile attribute. mapfiles = sys.argv[2:-1] mapdef = {"id" : "", "name" : "", "description" : "", "attributes" : [], "attributeprefixes" : []} inMapdef = False justAfterMapdef = False # Go through library file sourcefile = open(sourcefilename, "r") i = 0 for line in sourcefile: i += 1 line = line.strip().lstrip("*").lstrip() if line[0:3] == "///": line = line.lstrip("/").lstrip() if justAfterMapdef: if line == "" or line == "/": justAfterMapdef = False continue mapdef["description"] += line + " " continue elif line[0:12] == "\mapdefattr ": regMatch = re.match("([^\s]+)\s+([^\s]+)", line[12:].lstrip()) if not regMatch: print("ERROR: Wrong format of the \mapdefattr attribute in '%s' file on line %d" % (sourcefilename, i)) sys.exit(1) mapdef["attributes"].append(regMatch.group(1)) mapdef["attributeprefixes"].append(regMatch.group(2)) elif line[0:8] == "\mapdef ": regMatch = re.match("(\w+)\s+(.+)", line[8:].lstrip()) if not regMatch: print("ERROR: Wrong format of the \mapdef attribute in '%s' file on line %d" % (sourcefilename, i)) sys.exit(1) mapdef["id"] = regMatch.group(1) mapdef["name"] = regMatch.group(2) inMapdef = True justAfterMapdef = True continue sourcefile.close() # Go through specialisation files mappings = [] for filename in mapfiles: m = {"id" : "", "name" : "", "description" : [], "notes" : [], "attributes" : {}} for attr in mapdef["attributes"]: m["attributes"][attr] = {} m["attributes"][attr]["in"] = [] m["attributes"][attr]["out"] = [] m["attributes"][attr]["in-note"] = [] m["attributes"][attr]["out-note"] = [] f = open(filename, "r") justAfterMapName = False i = 0 for line in f: i += 1 line = line.strip() if line[0:3] != "///": justAfterMapName = False continue line = line[3:].lstrip() if line[0:9] == "\mapname ": regMatch = re.match("(\w+)\s+(.+)", line[9:].lstrip()) if not regMatch: print("ERROR: Wrong format of the \mapname command in '%s' file on line %d" % (filename, i)) sys.exit(1) m["id"] = regMatch.group(1) m["name"] = regMatch.group(2) justAfterMapName = True elif line[0:9] == "\mapnote ": justAfterMapdef = False m["notes"].append(line[9:].lstrip()) elif line[0:9] == "\mapattr ": justAfterMapdef = False # -> [""] regMatch = re.match("(.+)\s+->\s+([^\s]+)(?:\s+\"([^\"]+)\")?", line[9:]) if regMatch: if regMatch.group(2) not in m["attributes"]: print("ERROR: The '%s' attribute present in file '%s' on line %d is not defined in file '%s'" % (regMatch.group(2), filename, i, sourcefilename)) sys.exit(1) m["attributes"][regMatch.group(2)]["in"].append(regMatch.group(1)) if regMatch.group(3): m["attributes"][regMatch.group(2)]["in-note"].append(regMatch.group(3)) continue regMatch = re.match("(.+)\s+<-\s+([^\s]+)(?:\s+\"([^\"]+)\")?", line[9:]) if regMatch: if regMatch.group(2) not in m["attributes"]: print("ERROR: The '%s' attribute present in file '%s' on line %d is not defined in file '%s'" % (regMatch.group(2), filename, i, sourcefilename)) sys.exit(1) m["attributes"][regMatch.group(2)]["out"].append(regMatch.group(1)) if regMatch.group(3): m["attributes"][regMatch.group(2)]["out-note"].append(regMatch.group(3)) continue elif justAfterMapName: m["description"].append(line) mappings.append(m) f.close() # Write mapping to doxygen formatted file. outfile = open(outfilename, "w") outfile.write("/** \n") outfile.write("\\page {id} {name}\n{description}\n".format(**mapdef)) outfile.write("\\tableofcontents\n") # Create mapping per lib. attribute outfile.write("\\section attr Grouped by libarccompute attributes\n") for i in range(len(mapdef["attributes"])): outfile.write("\n\\subsection attr_{formatted_attr} {attr}\n".format(formatted_attr = re.sub('::', "_", mapdef["attributes"][i]), attr = mapdef["attributes"][i])) outfile.write("\\ref {prefix}::{attr} \"Attribute description\"\n\n".format(attr = mapdef["attributes"][i], prefix = mapdef["attributeprefixes"][i])) has_input = has_output = False attributes_to_write_to_table = "" for m in mappings: has_input = has_input or m["attributes"][mapdef["attributes"][i]]["in"] has_output = has_output or m["attributes"][mapdef["attributes"][i]]["out"] notes = [] for m in mappings: attr = m["attributes"][mapdef["attributes"][i]] if attr["in"] or attr["out"]: attributes_to_write_to_table += "| %s |" % (m["name"]) if has_input: attributes_to_write_to_table += " %s" % (",
".join(attr["in"])) if attr["in-note"]: attributes_to_write_to_table += "[%s]" % ("][".join(str(x) for x in range(len(notes)+1, len(notes)+1+len(attr["in-note"])))) notes += attr["in-note"] attributes_to_write_to_table += " |" if has_output else "" if has_output: attributes_to_write_to_table += " %s" % (",
".join(attr["out"])) if attr["out-note"]: attributes_to_write_to_table += "[%s]" % ("][".join(str(x) for x in range(len(notes)+1, len(notes)+1+len(attr["out-note"])))) notes += attr["out-note"] attributes_to_write_to_table += " |\n" if attributes_to_write_to_table and (has_input or has_output): table_header = "| Specialisation" table_header += " | Input" if has_input else "" table_header += " | Output" if has_output else "" outfile.write(table_header + " |\n") outfile.write(re.sub(r'[ \w]', '-', table_header) + " |\n") outfile.write(attributes_to_write_to_table) if notes: outfile.write("Notes:
  1. %s
" % ("
  • ".join(notes))) else: outfile.write("No specialisations maps attributes to this field/value.\n") # Create mapping per specialisation outfile.write("\\section specialisation Grouped by plugin\n") for m in mappings: outfile.write("\n\\subsection specialisation_{id} {name}\n".format(**m)) if m["description"]: outfile.write(" ".join(m["description"]) + "\n") if len(m["notes"]) > 0: outfile.write('
    \n
    Note
    \n') for note in m["notes"]: outfile.write('
    ' + note+ '
    \n') outfile.write('
    \n') has_input = has_output = False for attr, m_attrs in m["attributes"].items(): has_input = has_input or bool(m_attrs["in"]) has_output = has_output or bool(m_attrs["out"]) table_header = "| Input " if has_input else "" table_header += "| Lib. attr. |" table_header += " Output |" if has_output else "" outfile.write(table_header + "\n") outfile.write(re.sub(r'[. \w]', '-', table_header) + "\n") notes = [] for attr, m_attrs in m["attributes"].items(): if not m_attrs["in"] and not m_attrs["out"]: continue line = "" if has_input: line += "| %s" % (", ".join(m_attrs["in"])) if m_attrs["in-note"]: line += "[%s]" % ("][".join(str(x) for x in range(len(notes)+1, len(notes)+1+len(m_attrs["in-note"])))) notes += m_attrs["in-note"] line += " " line += "| \\ref Arc::" + attr + ' "' + attr + '" |' if has_output: line += " %s" % (", ".join(m_attrs["out"])) if m_attrs["out-note"]: line += "[%s]" % ("][".join(str(x) for x in range(len(notes)+1, len(notes)+1+len(m_attrs["out-note"])))) notes += m_attrs["out-note"] line += " |" outfile.write(line + '\n') if notes: outfile.write("Notes:
    1. %s
    " % ("
  • ".join(notes))) outfile.write("**/\n") nordugrid-arc-6.14.0/src/PaxHeaders.30264/libs0000644000000000000000000000013214152153472017004 xustar000000000000000030 mtime=1638455098.819553721 30 atime=1638455103.996631509 30 ctime=1638455098.819553721 nordugrid-arc-6.14.0/src/libs/0000755000175000002070000000000014152153472017046 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/libs/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376021120 xustar000000000000000030 mtime=1638455038.402645929 30 atime=1638455038.498647372 30 ctime=1638455098.816553676 nordugrid-arc-6.14.0/src/libs/Makefile.am0000644000175000002070000000006314152153376021104 0ustar00mockbuildmock00000000000000SUBDIRS = data-staging DIST_SUBDIRS = data-staging nordugrid-arc-6.14.0/src/libs/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153431021121 xustar000000000000000030 mtime=1638455065.943059736 30 atime=1638455089.475413321 30 ctime=1638455098.815553661 nordugrid-arc-6.14.0/src/libs/Makefile.in0000644000175000002070000006104314152153431021112 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/libs DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ SUBDIRS = data-staging DIST_SUBDIRS = data-staging all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/libs/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/libs/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile installdirs: installdirs-recursive installdirs-am: install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool cscopelist-am ctags \ ctags-am distclean distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ installdirs-am maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags tags-am uninstall uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/libs/PaxHeaders.30264/data-staging0000644000000000000000000000013214152153472021347 xustar000000000000000030 mtime=1638455098.885554713 30 atime=1638455103.996631509 30 ctime=1638455098.885554713 nordugrid-arc-6.14.0/src/libs/data-staging/0000755000175000002070000000000014152153472021411 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376023463 xustar000000000000000030 mtime=1638455038.404645959 30 atime=1638455038.499647387 30 ctime=1638455098.839554021 nordugrid-arc-6.14.0/src/libs/data-staging/Makefile.am0000644000175000002070000000271614152153376023456 0ustar00mockbuildmock00000000000000DIST_SUBDIRS = test examples SUBDIRS = . $(TEST_DIR) examples lib_LTLIBRARIES = libarcdatastaging.la libarcdatastaging_ladir = $(pkgincludedir)/data-staging libarcdatastaging_la_HEADERS = DataDelivery.h DataDeliveryComm.h \ DataDeliveryLocalComm.h DataDeliveryRemoteComm.h DTR.h DTRList.h \ DTRStatus.h Processor.h Scheduler.h TransferShares.h libarcdatastaging_la_SOURCES = DataDelivery.cpp DataDeliveryComm.cpp \ DataDeliveryLocalComm.cpp DataDeliveryRemoteComm.cpp DTR.cpp DTRList.cpp \ DTRStatus.cpp Processor.cpp Scheduler.cpp TransferShares.cpp libarcdatastaging_la_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) libarcdatastaging_la_LIBADD = \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/communication/libarccommunication.la \ $(GLIBMM_LIBS) libarcdatastaging_la_LDFLAGS = -version-info 3:0:0 pgmpkglibdir = $(pkglibdir) pgmpkglib_PROGRAMS = DataStagingDelivery DataStagingDelivery_SOURCES = DataStagingDelivery.cpp DataStagingDelivery_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) DataStagingDelivery_LDADD = \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(GLIBMM_LIBS) nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/DTRList.h0000644000000000000000000000013214152153376023065 xustar000000000000000030 mtime=1638455038.403645944 30 atime=1638455038.499647387 30 ctime=1638455098.844554097 nordugrid-arc-6.14.0/src/libs/data-staging/DTRList.h0000644000175000002070000001172014152153376023053 0ustar00mockbuildmock00000000000000#ifndef DTRLIST_H_ #define DTRLIST_H_ #include #include "DTR.h" namespace DataStaging { /// Global list of all active DTRs in the system. /** * This class contains several methods for filtering the list by owner, state * etc. * \ingroup datastaging * \headerfile DTRList.h arc/data-staging/DTRList.h */ class DTRList { private: /// Internal list of DTRs std::list DTRs; /// Lock to protect list during modification Arc::SimpleCondition Lock; /// Internal set of sources that are currently being cached. /** * The source is mapped to the highest priority among all the DTRs with * that source. */ std::map CachingSources; /// Lock to protect caching sources set during modification Arc::SimpleCondition CachingLock; public: /// Put a new DTR into the list. bool add_dtr(DTR_ptr DTRToAdd); /// Remove a DTR from the list. bool delete_dtr(DTR_ptr DTRToDelete); /// Filter the queue to select DTRs owned by a specified process. /** * @param OwnerToFilter The owner to filter on * @param FilteredList This list is filled with filtered DTRs */ bool filter_dtrs_by_owner(StagingProcesses OwnerToFilter, std::list& FilteredList); /// Returns the number of DTRs owned by a particular process int number_of_dtrs_by_owner(StagingProcesses OwnerToFilter); /// Filter the queue to select DTRs with particular status. /** * If we have only one common queue for all DTRs, this method is * necessary to make virtual queues for the DTRs about to go into the * pre-, post-processor or delivery stages. * @param StatusToFilter DTR status to filter on * @param FilteredList This list is filled with filtered DTRs */ bool filter_dtrs_by_status(DTRStatus::DTRStatusType StatusToFilter, std::list& FilteredList); /// Filter the queue to select DTRs with particular statuses. /** * @param StatusesToFilter Vector of DTR statuses to filter on * @param FilteredList This list is filled with filtered DTRs */ bool filter_dtrs_by_statuses(const std::vector& StatusesToFilter, std::list& FilteredList); /// Filter the queue to select DTRs with particular statuses. /** * @param StatusesToFilter Vector of DTR statuses to filter on * @param FilteredList This map is filled with filtered DTRs, * one list per state. */ bool filter_dtrs_by_statuses(const std::vector& StatusesToFilter, std::map >& FilteredList); /// Select DTRs that are about to go to the specified process. /** * This selection is actually a virtual queue for pre-, post-processor * and delivery. * @param NextReceiver The process to filter on * @param FilteredList This list is filled with filtered DTRs */ bool filter_dtrs_by_next_receiver(StagingProcesses NextReceiver, std::list& FilteredList); /// Select DTRs that have just arrived from pre-, post-processor, delivery or generator. /** * These DTRs need some reaction from the scheduler. This selection is * actually a virtual queue of DTRs that need to be processed. * @param FilteredList This list is filled with filtered DTRs */ bool filter_pending_dtrs(std::list& FilteredList); /// Get the list of DTRs corresponding to the given job ID. /** * @param jobid Job id to filter on * @param FilteredList This list is filled with filtered DTRs */ bool filter_dtrs_by_job(const std::string& jobid, std::list& FilteredList); /// Check for requested changes in priority in filename /** * @param filename File which is checked for priority changes */ void check_priority_changes(const std::string& filename); /// Update the caching set, add a DTR (only if it is CACHEABLE). void caching_started(DTR_ptr request); /// Update the caching set, removing a DTR. void caching_finished(DTR_ptr request); /// Returns true if the DTR's source is currently in the caching set. bool is_being_cached(DTR_ptr DTRToCheck); /// Returns true if there are no DTRs in the list bool empty(); /// Get the list of all job IDs std::list all_jobs(); /// Return the size of the DTR list unsigned int size(); /// Dump state of all current DTRs to a destination, eg file, database, url... /** * Currently only file is supported. * @param path Path to the file in which to dump state. */ void dumpState(const std::string& path); }; } // namespace DataStaging #endif /*DTRLIST_H_*/ nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/Processor.h0000644000000000000000000000013214152153376023557 xustar000000000000000030 mtime=1638455038.404645959 30 atime=1638455038.499647387 30 ctime=1638455098.846554127 nordugrid-arc-6.14.0/src/libs/data-staging/Processor.h0000644000175000002070000000744114152153376023552 0ustar00mockbuildmock00000000000000#ifndef PROCESSOR_H_ #define PROCESSOR_H_ #include #include "DTR.h" namespace DataStaging { /// The Processor performs pre- and post-transfer operations. /** * The Processor takes care of everything that should happen before * and after a transfer takes place. Calling receiveDTR() spawns a * thread to perform the required operation depending on the DTR state. * \ingroup datastaging * \headerfile Processor.h arc/data-staging/Processor.h */ class Processor: public DTRCallback { private: /// Private copy constructor because Processor should not be copied Processor(const Processor&); /// Private assignment operator because Processor should not be copied Processor& operator=(const Processor&); /// Class used to pass information to spawned thread class ThreadArgument { public: Processor* proc; DTR_ptr dtr; ThreadArgument(Processor* proc_, DTR_ptr dtr_):proc(proc_),dtr(dtr_) { }; }; /// Class used to pass information to spawned thread (for bulk operations) class BulkThreadArgument { public: Processor* proc; std::list dtrs; BulkThreadArgument(Processor* proc_, const std::list& dtrs_):proc(proc_),dtrs(dtrs_) { }; }; /// Counter of active threads Arc::SimpleCounter thread_count; /// List of DTRs to be processed in bulk. Filled between receiveDTR /// receiving a DTR with bulk_start on and receiving one with bulk_end on. /// It is up to the caller to make sure that all the requests are suitable /// for bulk handling. The list is cleared after the DTR with bulk_end set. std::list bulk_list; /// Our hostname static std::string hostname; /* Thread methods which deal with each state */ /// Check the cache to see if the file already exists static void DTRCheckCache(void* arg); /// Resolve replicas of source and destination static void DTRResolve(void* arg); /// Bulk resolve replicas of source and destination static void DTRBulkResolve(void* arg); /// Check if source exists static void DTRQueryReplica(void* arg); /// Bulk check if source exists static void DTRBulkQueryReplica(void* arg); /// Remove destination file before creating a new version static void DTRPreClean(void *arg); /// Call external services to prepare physical files for reading/writing static void DTRStagePrepare(void* arg); /// Release requests made during DTRStagePrepare static void DTRReleaseRequest(void* arg); /// Register destination file in catalog static void DTRRegisterReplica(void* arg); /// Link cached file to final destination static void DTRProcessCache(void* arg); public: /// Constructor Processor(); /// Destructor waits for all active threads to stop. ~Processor() { stop(); }; /// Start Processor. /** * This method actually does nothing. It is here only to make all classes * of data staging to look alike. But it is better to call it before * starting to use object because it may do something in the future. */ void start(void); /// Stop Processor. /** * This method sends waits for all started threads to end and exits. Since * threads are short-lived it is better to wait rather than interrupt them. */ void stop(void); /// Send a DTR to the Processor. /** * The DTR is sent to the Processor through this method when some * long-latency processing is to be performed, eg contacting a * remote service. The Processor spawns a thread to do the processing, * and then returns. The thread pushes the DTR back to the scheduler when * it is finished. */ virtual void receiveDTR(DTR_ptr dtr); }; } // namespace DataStaging #endif /* PROCESSOR_H_ */ nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153432023465 xustar000000000000000030 mtime=1638455066.035061119 30 atime=1638455089.487413501 30 ctime=1638455098.838554007 nordugrid-arc-6.14.0/src/libs/data-staging/Makefile.in0000644000175000002070000015114114152153432023455 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ pgmpkglib_PROGRAMS = DataStagingDelivery$(EXEEXT) subdir = src/libs/data-staging DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(top_srcdir)/depcomp $(libarcdatastaging_la_HEADERS) README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(libdir)" "$(DESTDIR)$(pgmpkglibdir)" \ "$(DESTDIR)$(libarcdatastaging_ladir)" LTLIBRARIES = $(lib_LTLIBRARIES) am__DEPENDENCIES_1 = libarcdatastaging_la_DEPENDENCIES = \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/communication/libarccommunication.la \ $(am__DEPENDENCIES_1) am_libarcdatastaging_la_OBJECTS = \ libarcdatastaging_la-DataDelivery.lo \ libarcdatastaging_la-DataDeliveryComm.lo \ libarcdatastaging_la-DataDeliveryLocalComm.lo \ libarcdatastaging_la-DataDeliveryRemoteComm.lo \ libarcdatastaging_la-DTR.lo libarcdatastaging_la-DTRList.lo \ libarcdatastaging_la-DTRStatus.lo \ libarcdatastaging_la-Processor.lo \ libarcdatastaging_la-Scheduler.lo \ libarcdatastaging_la-TransferShares.lo libarcdatastaging_la_OBJECTS = $(am_libarcdatastaging_la_OBJECTS) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = libarcdatastaging_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) \ $(libarcdatastaging_la_LDFLAGS) $(LDFLAGS) -o $@ PROGRAMS = $(pgmpkglib_PROGRAMS) am_DataStagingDelivery_OBJECTS = \ DataStagingDelivery-DataStagingDelivery.$(OBJEXT) DataStagingDelivery_OBJECTS = $(am_DataStagingDelivery_OBJECTS) DataStagingDelivery_DEPENDENCIES = \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(am__DEPENDENCIES_1) DataStagingDelivery_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(DataStagingDelivery_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) \ $(LDFLAGS) -o $@ AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = SOURCES = $(libarcdatastaging_la_SOURCES) \ $(DataStagingDelivery_SOURCES) DIST_SOURCES = $(libarcdatastaging_la_SOURCES) \ $(DataStagingDelivery_SOURCES) RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac HEADERS = $(libarcdatastaging_la_HEADERS) RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ DIST_SUBDIRS = test examples SUBDIRS = . $(TEST_DIR) examples lib_LTLIBRARIES = libarcdatastaging.la libarcdatastaging_ladir = $(pkgincludedir)/data-staging libarcdatastaging_la_HEADERS = DataDelivery.h DataDeliveryComm.h \ DataDeliveryLocalComm.h DataDeliveryRemoteComm.h DTR.h DTRList.h \ DTRStatus.h Processor.h Scheduler.h TransferShares.h libarcdatastaging_la_SOURCES = DataDelivery.cpp DataDeliveryComm.cpp \ DataDeliveryLocalComm.cpp DataDeliveryRemoteComm.cpp DTR.cpp DTRList.cpp \ DTRStatus.cpp Processor.cpp Scheduler.cpp TransferShares.cpp libarcdatastaging_la_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) libarcdatastaging_la_LIBADD = \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/communication/libarccommunication.la \ $(GLIBMM_LIBS) libarcdatastaging_la_LDFLAGS = -version-info 3:0:0 pgmpkglibdir = $(pkglibdir) DataStagingDelivery_SOURCES = DataStagingDelivery.cpp DataStagingDelivery_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) DataStagingDelivery_LDADD = \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(GLIBMM_LIBS) all: all-recursive .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/libs/data-staging/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/libs/data-staging/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): install-libLTLIBRARIES: $(lib_LTLIBRARIES) @$(NORMAL_INSTALL) @list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \ list2=; for p in $$list; do \ if test -f $$p; then \ list2="$$list2 $$p"; \ else :; fi; \ done; \ test -z "$$list2" || { \ echo " $(MKDIR_P) '$(DESTDIR)$(libdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(libdir)" || exit 1; \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(libdir)'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(libdir)"; \ } uninstall-libLTLIBRARIES: @$(NORMAL_UNINSTALL) @list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \ for p in $$list; do \ $(am__strip_dir) \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(libdir)/$$f'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(libdir)/$$f"; \ done clean-libLTLIBRARIES: -test -z "$(lib_LTLIBRARIES)" || rm -f $(lib_LTLIBRARIES) @list='$(lib_LTLIBRARIES)'; \ locs=`for p in $$list; do echo $$p; done | \ sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ sort -u`; \ test -z "$$locs" || { \ echo rm -f $${locs}; \ rm -f $${locs}; \ } libarcdatastaging.la: $(libarcdatastaging_la_OBJECTS) $(libarcdatastaging_la_DEPENDENCIES) $(EXTRA_libarcdatastaging_la_DEPENDENCIES) $(AM_V_CXXLD)$(libarcdatastaging_la_LINK) -rpath $(libdir) $(libarcdatastaging_la_OBJECTS) $(libarcdatastaging_la_LIBADD) $(LIBS) install-pgmpkglibPROGRAMS: $(pgmpkglib_PROGRAMS) @$(NORMAL_INSTALL) @list='$(pgmpkglib_PROGRAMS)'; test -n "$(pgmpkglibdir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(pgmpkglibdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pgmpkglibdir)" || exit 1; \ fi; \ for p in $$list; do echo "$$p $$p"; done | \ sed 's/$(EXEEXT)$$//' | \ while read p p1; do if test -f $$p \ || test -f $$p1 \ ; then echo "$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n;h' \ -e 's|.*|.|' \ -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \ sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) files[d] = files[d] " " $$1; \ else { print "f", $$3 "/" $$4, $$1; } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(pgmpkglibdir)$$dir'"; \ $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(pgmpkglibdir)$$dir" || exit $$?; \ } \ ; done uninstall-pgmpkglibPROGRAMS: @$(NORMAL_UNINSTALL) @list='$(pgmpkglib_PROGRAMS)'; test -n "$(pgmpkglibdir)" || list=; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \ -e 's/$$/$(EXEEXT)/' \ `; \ test -n "$$list" || exit 0; \ echo " ( cd '$(DESTDIR)$(pgmpkglibdir)' && rm -f" $$files ")"; \ cd "$(DESTDIR)$(pgmpkglibdir)" && rm -f $$files clean-pgmpkglibPROGRAMS: @list='$(pgmpkglib_PROGRAMS)'; test -n "$$list" || exit 0; \ echo " rm -f" $$list; \ rm -f $$list || exit $$?; \ test -n "$(EXEEXT)" || exit 0; \ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ echo " rm -f" $$list; \ rm -f $$list DataStagingDelivery$(EXEEXT): $(DataStagingDelivery_OBJECTS) $(DataStagingDelivery_DEPENDENCIES) $(EXTRA_DataStagingDelivery_DEPENDENCIES) @rm -f DataStagingDelivery$(EXEEXT) $(AM_V_CXXLD)$(DataStagingDelivery_LINK) $(DataStagingDelivery_OBJECTS) $(DataStagingDelivery_LDADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/DataStagingDelivery-DataStagingDelivery.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarcdatastaging_la-DTR.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarcdatastaging_la-DTRList.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarcdatastaging_la-DTRStatus.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarcdatastaging_la-DataDelivery.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarcdatastaging_la-DataDeliveryComm.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarcdatastaging_la-DataDeliveryLocalComm.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarcdatastaging_la-DataDeliveryRemoteComm.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarcdatastaging_la-Processor.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarcdatastaging_la-Scheduler.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarcdatastaging_la-TransferShares.Plo@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< libarcdatastaging_la-DataDelivery.lo: DataDelivery.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -MT libarcdatastaging_la-DataDelivery.lo -MD -MP -MF $(DEPDIR)/libarcdatastaging_la-DataDelivery.Tpo -c -o libarcdatastaging_la-DataDelivery.lo `test -f 'DataDelivery.cpp' || echo '$(srcdir)/'`DataDelivery.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarcdatastaging_la-DataDelivery.Tpo $(DEPDIR)/libarcdatastaging_la-DataDelivery.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='DataDelivery.cpp' object='libarcdatastaging_la-DataDelivery.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -c -o libarcdatastaging_la-DataDelivery.lo `test -f 'DataDelivery.cpp' || echo '$(srcdir)/'`DataDelivery.cpp libarcdatastaging_la-DataDeliveryComm.lo: DataDeliveryComm.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -MT libarcdatastaging_la-DataDeliveryComm.lo -MD -MP -MF $(DEPDIR)/libarcdatastaging_la-DataDeliveryComm.Tpo -c -o libarcdatastaging_la-DataDeliveryComm.lo `test -f 'DataDeliveryComm.cpp' || echo '$(srcdir)/'`DataDeliveryComm.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarcdatastaging_la-DataDeliveryComm.Tpo $(DEPDIR)/libarcdatastaging_la-DataDeliveryComm.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='DataDeliveryComm.cpp' object='libarcdatastaging_la-DataDeliveryComm.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -c -o libarcdatastaging_la-DataDeliveryComm.lo `test -f 'DataDeliveryComm.cpp' || echo '$(srcdir)/'`DataDeliveryComm.cpp libarcdatastaging_la-DataDeliveryLocalComm.lo: DataDeliveryLocalComm.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -MT libarcdatastaging_la-DataDeliveryLocalComm.lo -MD -MP -MF $(DEPDIR)/libarcdatastaging_la-DataDeliveryLocalComm.Tpo -c -o libarcdatastaging_la-DataDeliveryLocalComm.lo `test -f 'DataDeliveryLocalComm.cpp' || echo '$(srcdir)/'`DataDeliveryLocalComm.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarcdatastaging_la-DataDeliveryLocalComm.Tpo $(DEPDIR)/libarcdatastaging_la-DataDeliveryLocalComm.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='DataDeliveryLocalComm.cpp' object='libarcdatastaging_la-DataDeliveryLocalComm.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -c -o libarcdatastaging_la-DataDeliveryLocalComm.lo `test -f 'DataDeliveryLocalComm.cpp' || echo '$(srcdir)/'`DataDeliveryLocalComm.cpp libarcdatastaging_la-DataDeliveryRemoteComm.lo: DataDeliveryRemoteComm.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -MT libarcdatastaging_la-DataDeliveryRemoteComm.lo -MD -MP -MF $(DEPDIR)/libarcdatastaging_la-DataDeliveryRemoteComm.Tpo -c -o libarcdatastaging_la-DataDeliveryRemoteComm.lo `test -f 'DataDeliveryRemoteComm.cpp' || echo '$(srcdir)/'`DataDeliveryRemoteComm.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarcdatastaging_la-DataDeliveryRemoteComm.Tpo $(DEPDIR)/libarcdatastaging_la-DataDeliveryRemoteComm.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='DataDeliveryRemoteComm.cpp' object='libarcdatastaging_la-DataDeliveryRemoteComm.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -c -o libarcdatastaging_la-DataDeliveryRemoteComm.lo `test -f 'DataDeliveryRemoteComm.cpp' || echo '$(srcdir)/'`DataDeliveryRemoteComm.cpp libarcdatastaging_la-DTR.lo: DTR.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -MT libarcdatastaging_la-DTR.lo -MD -MP -MF $(DEPDIR)/libarcdatastaging_la-DTR.Tpo -c -o libarcdatastaging_la-DTR.lo `test -f 'DTR.cpp' || echo '$(srcdir)/'`DTR.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarcdatastaging_la-DTR.Tpo $(DEPDIR)/libarcdatastaging_la-DTR.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='DTR.cpp' object='libarcdatastaging_la-DTR.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -c -o libarcdatastaging_la-DTR.lo `test -f 'DTR.cpp' || echo '$(srcdir)/'`DTR.cpp libarcdatastaging_la-DTRList.lo: DTRList.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -MT libarcdatastaging_la-DTRList.lo -MD -MP -MF $(DEPDIR)/libarcdatastaging_la-DTRList.Tpo -c -o libarcdatastaging_la-DTRList.lo `test -f 'DTRList.cpp' || echo '$(srcdir)/'`DTRList.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarcdatastaging_la-DTRList.Tpo $(DEPDIR)/libarcdatastaging_la-DTRList.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='DTRList.cpp' object='libarcdatastaging_la-DTRList.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -c -o libarcdatastaging_la-DTRList.lo `test -f 'DTRList.cpp' || echo '$(srcdir)/'`DTRList.cpp libarcdatastaging_la-DTRStatus.lo: DTRStatus.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -MT libarcdatastaging_la-DTRStatus.lo -MD -MP -MF $(DEPDIR)/libarcdatastaging_la-DTRStatus.Tpo -c -o libarcdatastaging_la-DTRStatus.lo `test -f 'DTRStatus.cpp' || echo '$(srcdir)/'`DTRStatus.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarcdatastaging_la-DTRStatus.Tpo $(DEPDIR)/libarcdatastaging_la-DTRStatus.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='DTRStatus.cpp' object='libarcdatastaging_la-DTRStatus.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -c -o libarcdatastaging_la-DTRStatus.lo `test -f 'DTRStatus.cpp' || echo '$(srcdir)/'`DTRStatus.cpp libarcdatastaging_la-Processor.lo: Processor.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -MT libarcdatastaging_la-Processor.lo -MD -MP -MF $(DEPDIR)/libarcdatastaging_la-Processor.Tpo -c -o libarcdatastaging_la-Processor.lo `test -f 'Processor.cpp' || echo '$(srcdir)/'`Processor.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarcdatastaging_la-Processor.Tpo $(DEPDIR)/libarcdatastaging_la-Processor.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='Processor.cpp' object='libarcdatastaging_la-Processor.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -c -o libarcdatastaging_la-Processor.lo `test -f 'Processor.cpp' || echo '$(srcdir)/'`Processor.cpp libarcdatastaging_la-Scheduler.lo: Scheduler.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -MT libarcdatastaging_la-Scheduler.lo -MD -MP -MF $(DEPDIR)/libarcdatastaging_la-Scheduler.Tpo -c -o libarcdatastaging_la-Scheduler.lo `test -f 'Scheduler.cpp' || echo '$(srcdir)/'`Scheduler.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarcdatastaging_la-Scheduler.Tpo $(DEPDIR)/libarcdatastaging_la-Scheduler.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='Scheduler.cpp' object='libarcdatastaging_la-Scheduler.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -c -o libarcdatastaging_la-Scheduler.lo `test -f 'Scheduler.cpp' || echo '$(srcdir)/'`Scheduler.cpp libarcdatastaging_la-TransferShares.lo: TransferShares.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -MT libarcdatastaging_la-TransferShares.lo -MD -MP -MF $(DEPDIR)/libarcdatastaging_la-TransferShares.Tpo -c -o libarcdatastaging_la-TransferShares.lo `test -f 'TransferShares.cpp' || echo '$(srcdir)/'`TransferShares.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarcdatastaging_la-TransferShares.Tpo $(DEPDIR)/libarcdatastaging_la-TransferShares.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='TransferShares.cpp' object='libarcdatastaging_la-TransferShares.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarcdatastaging_la_CXXFLAGS) $(CXXFLAGS) -c -o libarcdatastaging_la-TransferShares.lo `test -f 'TransferShares.cpp' || echo '$(srcdir)/'`TransferShares.cpp DataStagingDelivery-DataStagingDelivery.o: DataStagingDelivery.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DataStagingDelivery_CXXFLAGS) $(CXXFLAGS) -MT DataStagingDelivery-DataStagingDelivery.o -MD -MP -MF $(DEPDIR)/DataStagingDelivery-DataStagingDelivery.Tpo -c -o DataStagingDelivery-DataStagingDelivery.o `test -f 'DataStagingDelivery.cpp' || echo '$(srcdir)/'`DataStagingDelivery.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/DataStagingDelivery-DataStagingDelivery.Tpo $(DEPDIR)/DataStagingDelivery-DataStagingDelivery.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='DataStagingDelivery.cpp' object='DataStagingDelivery-DataStagingDelivery.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DataStagingDelivery_CXXFLAGS) $(CXXFLAGS) -c -o DataStagingDelivery-DataStagingDelivery.o `test -f 'DataStagingDelivery.cpp' || echo '$(srcdir)/'`DataStagingDelivery.cpp DataStagingDelivery-DataStagingDelivery.obj: DataStagingDelivery.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DataStagingDelivery_CXXFLAGS) $(CXXFLAGS) -MT DataStagingDelivery-DataStagingDelivery.obj -MD -MP -MF $(DEPDIR)/DataStagingDelivery-DataStagingDelivery.Tpo -c -o DataStagingDelivery-DataStagingDelivery.obj `if test -f 'DataStagingDelivery.cpp'; then $(CYGPATH_W) 'DataStagingDelivery.cpp'; else $(CYGPATH_W) '$(srcdir)/DataStagingDelivery.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/DataStagingDelivery-DataStagingDelivery.Tpo $(DEPDIR)/DataStagingDelivery-DataStagingDelivery.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='DataStagingDelivery.cpp' object='DataStagingDelivery-DataStagingDelivery.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DataStagingDelivery_CXXFLAGS) $(CXXFLAGS) -c -o DataStagingDelivery-DataStagingDelivery.obj `if test -f 'DataStagingDelivery.cpp'; then $(CYGPATH_W) 'DataStagingDelivery.cpp'; else $(CYGPATH_W) '$(srcdir)/DataStagingDelivery.cpp'; fi` mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-libarcdatastaging_laHEADERS: $(libarcdatastaging_la_HEADERS) @$(NORMAL_INSTALL) @list='$(libarcdatastaging_la_HEADERS)'; test -n "$(libarcdatastaging_ladir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(libarcdatastaging_ladir)'"; \ $(MKDIR_P) "$(DESTDIR)$(libarcdatastaging_ladir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_HEADER) $$files '$(DESTDIR)$(libarcdatastaging_ladir)'"; \ $(INSTALL_HEADER) $$files "$(DESTDIR)$(libarcdatastaging_ladir)" || exit $$?; \ done uninstall-libarcdatastaging_laHEADERS: @$(NORMAL_UNINSTALL) @list='$(libarcdatastaging_la_HEADERS)'; test -n "$(libarcdatastaging_ladir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(libarcdatastaging_ladir)'; $(am__uninstall_files_from_dir) # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile $(LTLIBRARIES) $(PROGRAMS) $(HEADERS) installdirs: installdirs-recursive installdirs-am: for dir in "$(DESTDIR)$(libdir)" "$(DESTDIR)$(pgmpkglibdir)" "$(DESTDIR)$(libarcdatastaging_ladir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libLTLIBRARIES clean-libtool \ clean-pgmpkglibPROGRAMS mostlyclean-am distclean: distclean-recursive -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-libarcdatastaging_laHEADERS \ install-pgmpkglibPROGRAMS install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-libLTLIBRARIES install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-libLTLIBRARIES \ uninstall-libarcdatastaging_laHEADERS \ uninstall-pgmpkglibPROGRAMS .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libLTLIBRARIES \ clean-libtool clean-pgmpkglibPROGRAMS cscopelist-am ctags \ ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-libLTLIBRARIES \ install-libarcdatastaging_laHEADERS install-man install-pdf \ install-pdf-am install-pgmpkglibPROGRAMS install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs installdirs-am maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags tags-am uninstall uninstall-am uninstall-libLTLIBRARIES \ uninstall-libarcdatastaging_laHEADERS \ uninstall-pgmpkglibPROGRAMS # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/DataDeliveryLocalComm.h0000644000000000000000000000013214152153376025744 xustar000000000000000030 mtime=1638455038.403645944 30 atime=1638455038.499647387 30 ctime=1638455098.842554066 nordugrid-arc-6.14.0/src/libs/data-staging/DataDeliveryLocalComm.h0000644000175000002070000000260114152153376025730 0ustar00mockbuildmock00000000000000#ifndef DATADELIVERYLOCALCOMM_H_ #define DATADELIVERYLOCALCOMM_H_ #include #include "DataDeliveryComm.h" namespace DataStaging { /// This class starts, monitors and controls a local Delivery process. /** * \ingroup datastaging * \headerfile DataDeliveryLocalComm.h arc/data-staging/DataDeliveryLocalComm.h */ class DataDeliveryLocalComm : public DataDeliveryComm { public: /// Starts child process DataDeliveryLocalComm(DTR_ptr dtr, const TransferParameters& params); /// This stops the child process virtual ~DataDeliveryLocalComm(); /// Read from stdout of child to get status virtual void PullStatus(); /// Returns "/" since local Delivery can access everywhere static bool CheckComm(DTR_ptr dtr, std::vector& allowed_dirs, std::string& load_avg); /// Returns true if child process exists virtual operator bool() const { return (child_ != NULL); }; /// Returns true if child process does not exist virtual bool operator!() const { return (child_ == NULL); }; private: /// Child process Arc::Run* child_; /// Stdin of child, used to pass credentials std::string stdin_; /// Temporary credentails location std::string tmp_proxy_; /// Time last communication was received from child Arc::Time last_comm; }; } // namespace DataStaging #endif /* DATADELIVERYLOCALCOMM_H_ */ nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/DTRStatus.cpp0000644000000000000000000000013214152153376023770 xustar000000000000000030 mtime=1638455038.403645944 30 atime=1638455038.499647387 30 ctime=1638455098.855554262 nordugrid-arc-6.14.0/src/libs/data-staging/DTRStatus.cpp0000644000175000002070000000500414152153376023754 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include "DTRStatus.h" namespace DataStaging { // to do states static const DTRStatus::DTRStatusType to_process_states[] = { DTRStatus::CHECK_CACHE, DTRStatus::RESOLVE, DTRStatus::QUERY_REPLICA, DTRStatus::PRE_CLEAN, DTRStatus::STAGE_PREPARE, DTRStatus::TRANSFER, DTRStatus::RELEASE_REQUEST, DTRStatus::REGISTER_REPLICA, DTRStatus::PROCESS_CACHE }; // doing states static const DTRStatus::DTRStatusType processing_states[] = { DTRStatus::CHECKING_CACHE, DTRStatus::RESOLVING, DTRStatus::QUERYING_REPLICA, DTRStatus::PRE_CLEANING, DTRStatus::STAGING_PREPARING, DTRStatus::TRANSFERRING, DTRStatus::RELEASING_REQUEST, DTRStatus::REGISTERING_REPLICA, DTRStatus::PROCESSING_CACHE }; static const DTRStatus::DTRStatusType staged_states[] = { DTRStatus::STAGING_PREPARING, DTRStatus::STAGING_PREPARING_WAIT, DTRStatus::STAGED_PREPARED, DTRStatus::TRANSFER, DTRStatus::TRANSFERRING, DTRStatus::TRANSFERRING_CANCEL, }; const std::vector DTRStatus::ToProcessStates(to_process_states, to_process_states + sizeof to_process_states / sizeof to_process_states[0]); const std::vector DTRStatus::ProcessingStates(processing_states, processing_states + sizeof processing_states / sizeof processing_states[0]); const std::vector DTRStatus::StagedStates(staged_states, staged_states + sizeof staged_states / sizeof staged_states[0]); static const std::string status_string[DTRStatus::NULL_STATE + 1] = { "NEW", "CHECK_CACHE", "CHECKING_CACHE", "CACHE_WAIT", "CACHE_CHECKED", "RESOLVE", "RESOLVING", "RESOLVED", "QUERY_REPLICA", "QUERYING_REPLICA", "REPLICA_QUERIED", "PRE_CLEAN", "PRE_CLEANING", "PRE_CLEANED", "STAGE_PREPARE", "STAGING_PREPARING", "STAGING_PREPARING_WAIT", "STAGED_PREPARED", "TRANSFER", "TRANSFERRING", "TRANSFERRING_CANCEL", "TRANSFERRED", "RELEASE_REQUEST", "RELEASING_REQUEST", "REQUEST_RELEASED", "REGISTER_REPLICA", "REGISTERING_REPLICA", "REPLICA_REGISTERED", "PROCESS_CACHE", "PROCESSING_CACHE", "CACHE_PROCESSED", "DONE", "CANCELLED", "CANCELLED_FINISHED", "ERROR", "NULL_STATE" }; std::string DTRStatus::str() const { return status_string[status]; } } // namespace DataStaging nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/DataDeliveryComm.h0000644000000000000000000000013214152153376024771 xustar000000000000000030 mtime=1638455038.403645944 30 atime=1638455038.499647387 30 ctime=1638455098.841554052 nordugrid-arc-6.14.0/src/libs/data-staging/DataDeliveryComm.h0000644000175000002070000001431214152153376024757 0ustar00mockbuildmock00000000000000#ifndef DATA_DELIVERY_COMM_H_ #define DATA_DELIVERY_COMM_H_ #include "DTR.h" namespace DataStaging { class DataDeliveryCommHandler; /// This class provides an abstract interface for the Delivery layer. /** * Different implementations provide different ways of providing Delivery * functionality. DataDeliveryLocalComm launches a local process to perform * the transfer and DataDeliveryRemoteComm contacts a remote service which * performs the transfer. The implementation is chosen depending on what is * set in the DTR, which the Scheduler should set based on various factors. * * CreateInstance() should be used to get a pointer to the instantiated * object. This also starts the transfer. Deleting this object stops the * transfer and cleans up any used resources. A singleton instance of * DataDeliveryCommHandler regularly polls all active transfers using * PullStatus() and fills the Status object with current information, * which can be obtained through GetStatus(). * \ingroup datastaging * \headerfile DataDeliveryComm.h arc/data-staging/DataDeliveryComm.h */ class DataDeliveryComm { friend class DataDeliveryCommHandler; public: /// Communication status with transfer enum CommStatusType { CommInit, ///< Initializing/starting transfer, rest of information not valid CommNoError, ///< Communication going on smoothly CommTimeout, ///< Communication experienced timeout CommClosed, ///< Communication channel was closed CommExited, ///< Transfer exited. Mostly same as CommClosed but exit detected before pipe closed CommFailed ///< Transfer failed. If we have CommFailed and no error code ///< reported that normally means segfault or external kill. }; #pragma pack(4) /// Plain C struct to pass information from executing process back to main thread /** \ingroup datastaging */ struct Status { CommStatusType commstatus; ///< Communication state (filled by main thread) time_t timestamp; ///< Time when information was generated (filled externally) DTRStatus::DTRStatusType status; ///< Generic status DTRErrorStatus::DTRErrorStatusType error; ///< Error type DTRErrorStatus::DTRErrorLocation error_location; ///< Where error happened char error_desc[1024]; ///< Error description unsigned int streams; ///< Number of transfer streams active unsigned long long int transferred;///< Number of bytes transferred unsigned long long int offset; ///< Last position to which file has no missing pieces unsigned long long int size; ///< File size as obtained by protocol unsigned int speed; ///< Current transfer speed in bytes/sec during last ~minute char checksum[128]; ///< Calculated checksum unsigned long long int transfer_time; ///< Time in ns to complete transfer (0 if not completed) }; #pragma pack() protected: /// Current status of transfer Status status_; /// Latest status of transfer is read into this buffer Status status_buf_; /// Reading position of Status buffer unsigned int status_pos_; /// Lock to protect access to status Glib::Mutex lock_; /// Pointer to singleton handler of all DataDeliveryComm objects DataDeliveryCommHandler* handler_; /// Transfer limits TransferParameters transfer_params; /// Time transfer was started Arc::Time start_; /// Logger object. Pointer to DTR's Logger. DTRLogger logger_; /// Check for new state and fill state accordingly. /** * This method is periodically called by the comm handler to obtain status * info. It detects communication and delivery failures and delivery * termination. */ virtual void PullStatus() = 0; /// Start transfer with parameters taken from DTR and supplied transfer limits. /** * Constructor should not be used directly, CreateInstance() should be used * instead. */ DataDeliveryComm(DTR_ptr dtr, const TransferParameters& params); public: /// Factory method to get DataDeliveryComm instance. static DataDeliveryComm* CreateInstance(DTR_ptr dtr, const TransferParameters& params); /// Destroy object. This stops any ongoing transfer and cleans up resources. virtual ~DataDeliveryComm() {}; /// Obtain status of transfer Status GetStatus() const; /// Check the delivery method is available. Calls CheckComm of the appropriate subclass. /** * \param dtr DTR from which credentials are used * \param allowed_dirs filled with list of dirs that this comm is allowed * to read/write * \param load_avg filled with the load average reported by the service * \return true if selected delivery method is available */ static bool CheckComm(DTR_ptr dtr, std::vector& allowed_dirs, std::string& load_avg); /// Get explanation of error std::string GetError() const { return status_.error_desc; }; /// Returns true if transfer is currently active virtual operator bool() const = 0; /// Returns true if transfer is currently not active virtual bool operator!() const = 0; }; /// Singleton class handling all active DataDeliveryComm objects /** * \ingroup datastaging * \headerfile DataDeliveryComm.h arc/data-staging/DataDeliveryComm.h */ class DataDeliveryCommHandler { private: Glib::Mutex lock_; static void func(void* arg); std::list items_; static DataDeliveryCommHandler* comm_handler; /// Constructor is private - getInstance() should be used instead DataDeliveryCommHandler(); DataDeliveryCommHandler(const DataDeliveryCommHandler&); DataDeliveryCommHandler& operator=(const DataDeliveryCommHandler&); public: ~DataDeliveryCommHandler() {}; /// Add a new DataDeliveryComm instance to the handler void Add(DataDeliveryComm* item); /// Remove a DataDeliveryComm instance from the handler void Remove(DataDeliveryComm* item); /// Get the singleton instance of the handler static DataDeliveryCommHandler* getInstance(); }; } // namespace DataStaging #endif // DATA_DELIVERY_COMM_H_ nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/DataDelivery.h0000644000000000000000000000013214152153376024155 xustar000000000000000030 mtime=1638455038.403645944 30 atime=1638455038.499647387 30 ctime=1638455098.840554037 nordugrid-arc-6.14.0/src/libs/data-staging/DataDelivery.h0000644000175000002070000000636014152153376024147 0ustar00mockbuildmock00000000000000#ifndef DATA_DELIVERY_H_ #define DATA_DELIVERY_H_ #include #include #include #include "DTR.h" #include "DTRList.h" #include "DTRStatus.h" namespace DataStaging { /// DataDelivery transfers data between specified physical locations. /** * start() must be called to start the delivery thread for processing DTRs * and stop() should be called to stop it (this waits for all data transfers * to exit). stop() is also called in the destructor. * * All meta-operations for a DTR such as resolving replicas must be done * before sending to DataDelivery. Calling receiveDTR() starts a new process * which performs data transfer as specified in DTR. * \ingroup datastaging * \headerfile DataDelivery.h arc/data-staging/DataDelivery.h */ class DataDelivery: public DTRCallback { private: /// lock for DTRs list Arc::SimpleCondition dtr_list_lock; /// Wrapper class around delivery process handler class delivery_pair_t; /// DTRs which delivery process has in its queue std::list dtr_list; /// Transfer limits TransferParameters transfer_params; /// Logger object static Arc::Logger logger; /// Flag describing delivery state. Used to decide whether to keep running main loop ProcessState delivery_state; /// Condition to signal end of running Arc::SimpleCondition run_signal; /// Condition on which main thread waits, so it can wake up immediately /// when a new transfer arrives Arc::SimpleCondition cond; /// Thread to start new Delivery process static void start_delivery(void* arg); /// Thread to stop Delivery process static void stop_delivery(void* arg); /// Delete delivery_pair_t object. Starts a new thread which calls stop_delivery() bool delete_delivery_pair(delivery_pair_t* dp); /// Static version of main_thread, used when thread is created static void main_thread(void* arg); /// Main thread, which runs until stopped void main_thread(void); /// Copy constructor is private because DataDelivery should not be copied DataDelivery(const DataDelivery&); /// Assignment constructor is private because DataDelivery should not be copied DataDelivery& operator=(const DataDelivery&); public: /// Constructor. DataDelivery(); /// Destructor calls stop() and waits for cancelled processes to exit. ~DataDelivery() { stop(); }; /// Pass a DTR to Delivery. /** * This method is called by the scheduler to pass a DTR to the delivery. * The DataDelivery starts the data transfer either using a local process * or by sending a request to a remote delivery service, and then returns. * DataDelivery's own thread then monitors the transfer. */ virtual void receiveDTR(DTR_ptr request); /// Stop the transfer corresponding to the given DTR. bool cancelDTR(DTR_ptr request); /// Start the Delivery thread, which runs until stop() is called. bool start(); /// Tell the delivery to stop all transfers and threads and exit. bool stop(); /// Set transfer limits. void SetTransferParameters(const TransferParameters& params); }; } // namespace DataStaging #endif /*DATA_DELIVERY_H_*/ nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/test0000644000000000000000000000013214152153472022326 xustar000000000000000030 mtime=1638455098.883554683 30 atime=1638455103.996631509 30 ctime=1638455098.883554683 nordugrid-arc-6.14.0/src/libs/data-staging/test/0000755000175000002070000000000014152153472022370 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/libs/data-staging/test/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376024442 xustar000000000000000030 mtime=1638455038.405645974 30 atime=1638455038.499647387 30 ctime=1638455098.880554637 nordugrid-arc-6.14.0/src/libs/data-staging/test/Makefile.am0000644000175000002070000000264214152153376024433 0ustar00mockbuildmock00000000000000# Tests require mock DMC which can be enabled via configure --enable-mock-dmc if MOCK_DMC_ENABLED TESTS = DTRTest ProcessorTest DeliveryTest else TESTS = endif check_PROGRAMS = $(TESTS) TESTS_ENVIRONMENT = env ARC_PLUGIN_PATH=$(top_builddir)/src/hed/dmc/mock/.libs:$(top_builddir)/src/hed/dmc/file/.libs DTRTest_SOURCES = $(top_srcdir)/src/Test.cpp DTRTest.cpp DTRTest_CXXFLAGS = -I$(top_srcdir)/include \ $(CPPUNIT_CFLAGS) $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(AM_CXXFLAGS) DTRTest_LDADD = ../libarcdatastaging.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(CPPUNIT_LIBS) $(GLIBMM_LIBS) ProcessorTest_SOURCES = $(top_srcdir)/src/Test.cpp ProcessorTest.cpp ProcessorTest_CXXFLAGS = -I$(top_srcdir)/include \ $(CPPUNIT_CFLAGS) $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(AM_CXXFLAGS) ProcessorTest_LDADD = ../libarcdatastaging.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(CPPUNIT_LIBS) $(GLIBMM_LIBS) DeliveryTest_SOURCES = $(top_srcdir)/src/Test.cpp DeliveryTest.cpp DeliveryTest_CXXFLAGS = -I$(top_srcdir)/include \ $(CPPUNIT_CFLAGS) $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(AM_CXXFLAGS) DeliveryTest_LDADD = ../libarcdatastaging.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(CPPUNIT_LIBS) $(GLIBMM_LIBS) nordugrid-arc-6.14.0/src/libs/data-staging/test/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153432024444 xustar000000000000000030 mtime=1638455066.185063372 30 atime=1638455089.501413712 30 ctime=1638455098.879554623 nordugrid-arc-6.14.0/src/libs/data-staging/test/Makefile.in0000644000175000002070000012601314152153432024434 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ @MOCK_DMC_ENABLED_TRUE@TESTS = DTRTest$(EXEEXT) ProcessorTest$(EXEEXT) \ @MOCK_DMC_ENABLED_TRUE@ DeliveryTest$(EXEEXT) check_PROGRAMS = $(am__EXEEXT_1) subdir = src/libs/data-staging/test DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(top_srcdir)/depcomp ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = @MOCK_DMC_ENABLED_TRUE@am__EXEEXT_1 = DTRTest$(EXEEXT) \ @MOCK_DMC_ENABLED_TRUE@ ProcessorTest$(EXEEXT) \ @MOCK_DMC_ENABLED_TRUE@ DeliveryTest$(EXEEXT) am_DTRTest_OBJECTS = DTRTest-Test.$(OBJEXT) DTRTest-DTRTest.$(OBJEXT) DTRTest_OBJECTS = $(am_DTRTest_OBJECTS) am__DEPENDENCIES_1 = DTRTest_DEPENDENCIES = ../libarcdatastaging.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = DTRTest_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(DTRTest_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ am_DeliveryTest_OBJECTS = DeliveryTest-Test.$(OBJEXT) \ DeliveryTest-DeliveryTest.$(OBJEXT) DeliveryTest_OBJECTS = $(am_DeliveryTest_OBJECTS) DeliveryTest_DEPENDENCIES = ../libarcdatastaging.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) DeliveryTest_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(DeliveryTest_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ am_ProcessorTest_OBJECTS = ProcessorTest-Test.$(OBJEXT) \ ProcessorTest-ProcessorTest.$(OBJEXT) ProcessorTest_OBJECTS = $(am_ProcessorTest_OBJECTS) ProcessorTest_DEPENDENCIES = ../libarcdatastaging.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) ProcessorTest_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(ProcessorTest_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) \ -o $@ AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = SOURCES = $(DTRTest_SOURCES) $(DeliveryTest_SOURCES) \ $(ProcessorTest_SOURCES) DIST_SOURCES = $(DTRTest_SOURCES) $(DeliveryTest_SOURCES) \ $(ProcessorTest_SOURCES) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags am__tty_colors_dummy = \ mgn= red= grn= lgn= blu= brg= std=; \ am__color_tests=no am__tty_colors = { \ $(am__tty_colors_dummy); \ if test "X$(AM_COLOR_TESTS)" = Xno; then \ am__color_tests=no; \ elif test "X$(AM_COLOR_TESTS)" = Xalways; then \ am__color_tests=yes; \ elif test "X$$TERM" != Xdumb && { test -t 1; } 2>/dev/null; then \ am__color_tests=yes; \ fi; \ if test $$am__color_tests = yes; then \ red=''; \ grn=''; \ lgn=''; \ blu=''; \ mgn=''; \ brg=''; \ std=''; \ fi; \ } DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ TESTS_ENVIRONMENT = env ARC_PLUGIN_PATH=$(top_builddir)/src/hed/dmc/mock/.libs:$(top_builddir)/src/hed/dmc/file/.libs DTRTest_SOURCES = $(top_srcdir)/src/Test.cpp DTRTest.cpp DTRTest_CXXFLAGS = -I$(top_srcdir)/include \ $(CPPUNIT_CFLAGS) $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(AM_CXXFLAGS) DTRTest_LDADD = ../libarcdatastaging.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(CPPUNIT_LIBS) $(GLIBMM_LIBS) ProcessorTest_SOURCES = $(top_srcdir)/src/Test.cpp ProcessorTest.cpp ProcessorTest_CXXFLAGS = -I$(top_srcdir)/include \ $(CPPUNIT_CFLAGS) $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(AM_CXXFLAGS) ProcessorTest_LDADD = ../libarcdatastaging.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(CPPUNIT_LIBS) $(GLIBMM_LIBS) DeliveryTest_SOURCES = $(top_srcdir)/src/Test.cpp DeliveryTest.cpp DeliveryTest_CXXFLAGS = -I$(top_srcdir)/include \ $(CPPUNIT_CFLAGS) $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(AM_CXXFLAGS) DeliveryTest_LDADD = ../libarcdatastaging.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(CPPUNIT_LIBS) $(GLIBMM_LIBS) all: all-am .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/libs/data-staging/test/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/libs/data-staging/test/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): clean-checkPROGRAMS: @list='$(check_PROGRAMS)'; test -n "$$list" || exit 0; \ echo " rm -f" $$list; \ rm -f $$list || exit $$?; \ test -n "$(EXEEXT)" || exit 0; \ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ echo " rm -f" $$list; \ rm -f $$list DTRTest$(EXEEXT): $(DTRTest_OBJECTS) $(DTRTest_DEPENDENCIES) $(EXTRA_DTRTest_DEPENDENCIES) @rm -f DTRTest$(EXEEXT) $(AM_V_CXXLD)$(DTRTest_LINK) $(DTRTest_OBJECTS) $(DTRTest_LDADD) $(LIBS) DeliveryTest$(EXEEXT): $(DeliveryTest_OBJECTS) $(DeliveryTest_DEPENDENCIES) $(EXTRA_DeliveryTest_DEPENDENCIES) @rm -f DeliveryTest$(EXEEXT) $(AM_V_CXXLD)$(DeliveryTest_LINK) $(DeliveryTest_OBJECTS) $(DeliveryTest_LDADD) $(LIBS) ProcessorTest$(EXEEXT): $(ProcessorTest_OBJECTS) $(ProcessorTest_DEPENDENCIES) $(EXTRA_ProcessorTest_DEPENDENCIES) @rm -f ProcessorTest$(EXEEXT) $(AM_V_CXXLD)$(ProcessorTest_LINK) $(ProcessorTest_OBJECTS) $(ProcessorTest_LDADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/DTRTest-DTRTest.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/DTRTest-Test.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/DeliveryTest-DeliveryTest.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/DeliveryTest-Test.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ProcessorTest-ProcessorTest.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ProcessorTest-Test.Po@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< DTRTest-Test.o: $(top_srcdir)/src/Test.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DTRTest_CXXFLAGS) $(CXXFLAGS) -MT DTRTest-Test.o -MD -MP -MF $(DEPDIR)/DTRTest-Test.Tpo -c -o DTRTest-Test.o `test -f '$(top_srcdir)/src/Test.cpp' || echo '$(srcdir)/'`$(top_srcdir)/src/Test.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/DTRTest-Test.Tpo $(DEPDIR)/DTRTest-Test.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$(top_srcdir)/src/Test.cpp' object='DTRTest-Test.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DTRTest_CXXFLAGS) $(CXXFLAGS) -c -o DTRTest-Test.o `test -f '$(top_srcdir)/src/Test.cpp' || echo '$(srcdir)/'`$(top_srcdir)/src/Test.cpp DTRTest-Test.obj: $(top_srcdir)/src/Test.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DTRTest_CXXFLAGS) $(CXXFLAGS) -MT DTRTest-Test.obj -MD -MP -MF $(DEPDIR)/DTRTest-Test.Tpo -c -o DTRTest-Test.obj `if test -f '$(top_srcdir)/src/Test.cpp'; then $(CYGPATH_W) '$(top_srcdir)/src/Test.cpp'; else $(CYGPATH_W) '$(srcdir)/$(top_srcdir)/src/Test.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/DTRTest-Test.Tpo $(DEPDIR)/DTRTest-Test.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$(top_srcdir)/src/Test.cpp' object='DTRTest-Test.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DTRTest_CXXFLAGS) $(CXXFLAGS) -c -o DTRTest-Test.obj `if test -f '$(top_srcdir)/src/Test.cpp'; then $(CYGPATH_W) '$(top_srcdir)/src/Test.cpp'; else $(CYGPATH_W) '$(srcdir)/$(top_srcdir)/src/Test.cpp'; fi` DTRTest-DTRTest.o: DTRTest.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DTRTest_CXXFLAGS) $(CXXFLAGS) -MT DTRTest-DTRTest.o -MD -MP -MF $(DEPDIR)/DTRTest-DTRTest.Tpo -c -o DTRTest-DTRTest.o `test -f 'DTRTest.cpp' || echo '$(srcdir)/'`DTRTest.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/DTRTest-DTRTest.Tpo $(DEPDIR)/DTRTest-DTRTest.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='DTRTest.cpp' object='DTRTest-DTRTest.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DTRTest_CXXFLAGS) $(CXXFLAGS) -c -o DTRTest-DTRTest.o `test -f 'DTRTest.cpp' || echo '$(srcdir)/'`DTRTest.cpp DTRTest-DTRTest.obj: DTRTest.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DTRTest_CXXFLAGS) $(CXXFLAGS) -MT DTRTest-DTRTest.obj -MD -MP -MF $(DEPDIR)/DTRTest-DTRTest.Tpo -c -o DTRTest-DTRTest.obj `if test -f 'DTRTest.cpp'; then $(CYGPATH_W) 'DTRTest.cpp'; else $(CYGPATH_W) '$(srcdir)/DTRTest.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/DTRTest-DTRTest.Tpo $(DEPDIR)/DTRTest-DTRTest.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='DTRTest.cpp' object='DTRTest-DTRTest.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DTRTest_CXXFLAGS) $(CXXFLAGS) -c -o DTRTest-DTRTest.obj `if test -f 'DTRTest.cpp'; then $(CYGPATH_W) 'DTRTest.cpp'; else $(CYGPATH_W) '$(srcdir)/DTRTest.cpp'; fi` DeliveryTest-Test.o: $(top_srcdir)/src/Test.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DeliveryTest_CXXFLAGS) $(CXXFLAGS) -MT DeliveryTest-Test.o -MD -MP -MF $(DEPDIR)/DeliveryTest-Test.Tpo -c -o DeliveryTest-Test.o `test -f '$(top_srcdir)/src/Test.cpp' || echo '$(srcdir)/'`$(top_srcdir)/src/Test.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/DeliveryTest-Test.Tpo $(DEPDIR)/DeliveryTest-Test.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$(top_srcdir)/src/Test.cpp' object='DeliveryTest-Test.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DeliveryTest_CXXFLAGS) $(CXXFLAGS) -c -o DeliveryTest-Test.o `test -f '$(top_srcdir)/src/Test.cpp' || echo '$(srcdir)/'`$(top_srcdir)/src/Test.cpp DeliveryTest-Test.obj: $(top_srcdir)/src/Test.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DeliveryTest_CXXFLAGS) $(CXXFLAGS) -MT DeliveryTest-Test.obj -MD -MP -MF $(DEPDIR)/DeliveryTest-Test.Tpo -c -o DeliveryTest-Test.obj `if test -f '$(top_srcdir)/src/Test.cpp'; then $(CYGPATH_W) '$(top_srcdir)/src/Test.cpp'; else $(CYGPATH_W) '$(srcdir)/$(top_srcdir)/src/Test.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/DeliveryTest-Test.Tpo $(DEPDIR)/DeliveryTest-Test.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$(top_srcdir)/src/Test.cpp' object='DeliveryTest-Test.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DeliveryTest_CXXFLAGS) $(CXXFLAGS) -c -o DeliveryTest-Test.obj `if test -f '$(top_srcdir)/src/Test.cpp'; then $(CYGPATH_W) '$(top_srcdir)/src/Test.cpp'; else $(CYGPATH_W) '$(srcdir)/$(top_srcdir)/src/Test.cpp'; fi` DeliveryTest-DeliveryTest.o: DeliveryTest.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DeliveryTest_CXXFLAGS) $(CXXFLAGS) -MT DeliveryTest-DeliveryTest.o -MD -MP -MF $(DEPDIR)/DeliveryTest-DeliveryTest.Tpo -c -o DeliveryTest-DeliveryTest.o `test -f 'DeliveryTest.cpp' || echo '$(srcdir)/'`DeliveryTest.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/DeliveryTest-DeliveryTest.Tpo $(DEPDIR)/DeliveryTest-DeliveryTest.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='DeliveryTest.cpp' object='DeliveryTest-DeliveryTest.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DeliveryTest_CXXFLAGS) $(CXXFLAGS) -c -o DeliveryTest-DeliveryTest.o `test -f 'DeliveryTest.cpp' || echo '$(srcdir)/'`DeliveryTest.cpp DeliveryTest-DeliveryTest.obj: DeliveryTest.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DeliveryTest_CXXFLAGS) $(CXXFLAGS) -MT DeliveryTest-DeliveryTest.obj -MD -MP -MF $(DEPDIR)/DeliveryTest-DeliveryTest.Tpo -c -o DeliveryTest-DeliveryTest.obj `if test -f 'DeliveryTest.cpp'; then $(CYGPATH_W) 'DeliveryTest.cpp'; else $(CYGPATH_W) '$(srcdir)/DeliveryTest.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/DeliveryTest-DeliveryTest.Tpo $(DEPDIR)/DeliveryTest-DeliveryTest.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='DeliveryTest.cpp' object='DeliveryTest-DeliveryTest.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(DeliveryTest_CXXFLAGS) $(CXXFLAGS) -c -o DeliveryTest-DeliveryTest.obj `if test -f 'DeliveryTest.cpp'; then $(CYGPATH_W) 'DeliveryTest.cpp'; else $(CYGPATH_W) '$(srcdir)/DeliveryTest.cpp'; fi` ProcessorTest-Test.o: $(top_srcdir)/src/Test.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ProcessorTest_CXXFLAGS) $(CXXFLAGS) -MT ProcessorTest-Test.o -MD -MP -MF $(DEPDIR)/ProcessorTest-Test.Tpo -c -o ProcessorTest-Test.o `test -f '$(top_srcdir)/src/Test.cpp' || echo '$(srcdir)/'`$(top_srcdir)/src/Test.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/ProcessorTest-Test.Tpo $(DEPDIR)/ProcessorTest-Test.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$(top_srcdir)/src/Test.cpp' object='ProcessorTest-Test.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ProcessorTest_CXXFLAGS) $(CXXFLAGS) -c -o ProcessorTest-Test.o `test -f '$(top_srcdir)/src/Test.cpp' || echo '$(srcdir)/'`$(top_srcdir)/src/Test.cpp ProcessorTest-Test.obj: $(top_srcdir)/src/Test.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ProcessorTest_CXXFLAGS) $(CXXFLAGS) -MT ProcessorTest-Test.obj -MD -MP -MF $(DEPDIR)/ProcessorTest-Test.Tpo -c -o ProcessorTest-Test.obj `if test -f '$(top_srcdir)/src/Test.cpp'; then $(CYGPATH_W) '$(top_srcdir)/src/Test.cpp'; else $(CYGPATH_W) '$(srcdir)/$(top_srcdir)/src/Test.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/ProcessorTest-Test.Tpo $(DEPDIR)/ProcessorTest-Test.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$(top_srcdir)/src/Test.cpp' object='ProcessorTest-Test.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ProcessorTest_CXXFLAGS) $(CXXFLAGS) -c -o ProcessorTest-Test.obj `if test -f '$(top_srcdir)/src/Test.cpp'; then $(CYGPATH_W) '$(top_srcdir)/src/Test.cpp'; else $(CYGPATH_W) '$(srcdir)/$(top_srcdir)/src/Test.cpp'; fi` ProcessorTest-ProcessorTest.o: ProcessorTest.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ProcessorTest_CXXFLAGS) $(CXXFLAGS) -MT ProcessorTest-ProcessorTest.o -MD -MP -MF $(DEPDIR)/ProcessorTest-ProcessorTest.Tpo -c -o ProcessorTest-ProcessorTest.o `test -f 'ProcessorTest.cpp' || echo '$(srcdir)/'`ProcessorTest.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/ProcessorTest-ProcessorTest.Tpo $(DEPDIR)/ProcessorTest-ProcessorTest.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='ProcessorTest.cpp' object='ProcessorTest-ProcessorTest.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ProcessorTest_CXXFLAGS) $(CXXFLAGS) -c -o ProcessorTest-ProcessorTest.o `test -f 'ProcessorTest.cpp' || echo '$(srcdir)/'`ProcessorTest.cpp ProcessorTest-ProcessorTest.obj: ProcessorTest.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ProcessorTest_CXXFLAGS) $(CXXFLAGS) -MT ProcessorTest-ProcessorTest.obj -MD -MP -MF $(DEPDIR)/ProcessorTest-ProcessorTest.Tpo -c -o ProcessorTest-ProcessorTest.obj `if test -f 'ProcessorTest.cpp'; then $(CYGPATH_W) 'ProcessorTest.cpp'; else $(CYGPATH_W) '$(srcdir)/ProcessorTest.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/ProcessorTest-ProcessorTest.Tpo $(DEPDIR)/ProcessorTest-ProcessorTest.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='ProcessorTest.cpp' object='ProcessorTest-ProcessorTest.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(ProcessorTest_CXXFLAGS) $(CXXFLAGS) -c -o ProcessorTest-ProcessorTest.obj `if test -f 'ProcessorTest.cpp'; then $(CYGPATH_W) 'ProcessorTest.cpp'; else $(CYGPATH_W) '$(srcdir)/ProcessorTest.cpp'; fi` mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags check-TESTS: $(TESTS) @failed=0; all=0; xfail=0; xpass=0; skip=0; \ srcdir=$(srcdir); export srcdir; \ list=' $(TESTS) '; \ $(am__tty_colors); \ if test -n "$$list"; then \ for tst in $$list; do \ if test -f ./$$tst; then dir=./; \ elif test -f $$tst; then dir=; \ else dir="$(srcdir)/"; fi; \ if $(TESTS_ENVIRONMENT) $${dir}$$tst $(AM_TESTS_FD_REDIRECT); then \ all=`expr $$all + 1`; \ case " $(XFAIL_TESTS) " in \ *[\ \ ]$$tst[\ \ ]*) \ xpass=`expr $$xpass + 1`; \ failed=`expr $$failed + 1`; \ col=$$red; res=XPASS; \ ;; \ *) \ col=$$grn; res=PASS; \ ;; \ esac; \ elif test $$? -ne 77; then \ all=`expr $$all + 1`; \ case " $(XFAIL_TESTS) " in \ *[\ \ ]$$tst[\ \ ]*) \ xfail=`expr $$xfail + 1`; \ col=$$lgn; res=XFAIL; \ ;; \ *) \ failed=`expr $$failed + 1`; \ col=$$red; res=FAIL; \ ;; \ esac; \ else \ skip=`expr $$skip + 1`; \ col=$$blu; res=SKIP; \ fi; \ echo "$${col}$$res$${std}: $$tst"; \ done; \ if test "$$all" -eq 1; then \ tests="test"; \ All=""; \ else \ tests="tests"; \ All="All "; \ fi; \ if test "$$failed" -eq 0; then \ if test "$$xfail" -eq 0; then \ banner="$$All$$all $$tests passed"; \ else \ if test "$$xfail" -eq 1; then failures=failure; else failures=failures; fi; \ banner="$$All$$all $$tests behaved as expected ($$xfail expected $$failures)"; \ fi; \ else \ if test "$$xpass" -eq 0; then \ banner="$$failed of $$all $$tests failed"; \ else \ if test "$$xpass" -eq 1; then passes=pass; else passes=passes; fi; \ banner="$$failed of $$all $$tests did not behave as expected ($$xpass unexpected $$passes)"; \ fi; \ fi; \ dashes="$$banner"; \ skipped=""; \ if test "$$skip" -ne 0; then \ if test "$$skip" -eq 1; then \ skipped="($$skip test was not run)"; \ else \ skipped="($$skip tests were not run)"; \ fi; \ test `echo "$$skipped" | wc -c` -le `echo "$$banner" | wc -c` || \ dashes="$$skipped"; \ fi; \ report=""; \ if test "$$failed" -ne 0 && test -n "$(PACKAGE_BUGREPORT)"; then \ report="Please report to $(PACKAGE_BUGREPORT)"; \ test `echo "$$report" | wc -c` -le `echo "$$banner" | wc -c` || \ dashes="$$report"; \ fi; \ dashes=`echo "$$dashes" | sed s/./=/g`; \ if test "$$failed" -eq 0; then \ col="$$grn"; \ else \ col="$$red"; \ fi; \ echo "$${col}$$dashes$${std}"; \ echo "$${col}$$banner$${std}"; \ test -z "$$skipped" || echo "$${col}$$skipped$${std}"; \ test -z "$$report" || echo "$${col}$$report$${std}"; \ echo "$${col}$$dashes$${std}"; \ test "$$failed" -eq 0; \ else :; fi distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am $(MAKE) $(AM_MAKEFLAGS) $(check_PROGRAMS) $(MAKE) $(AM_MAKEFLAGS) check-TESTS check: check-am all-am: Makefile installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-checkPROGRAMS clean-generic clean-libtool \ mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: check-am install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-TESTS check-am clean \ clean-checkPROGRAMS clean-generic clean-libtool cscopelist-am \ ctags ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags tags-am uninstall uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/libs/data-staging/test/PaxHeaders.30264/DeliveryTest.cpp0000644000000000000000000000013214152153376025535 xustar000000000000000030 mtime=1638455038.405645974 30 atime=1638455038.499647387 30 ctime=1638455098.882554668 nordugrid-arc-6.14.0/src/libs/data-staging/test/DeliveryTest.cpp0000644000175000002070000001172414152153376025527 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include "../DTRStatus.h" #include "../DTR.h" #include "../DataDelivery.h" using namespace DataStaging; class DeliveryTest : public CppUnit::TestFixture { CPPUNIT_TEST_SUITE(DeliveryTest); CPPUNIT_TEST(TestDeliverySimple); CPPUNIT_TEST(TestDeliveryFailure); CPPUNIT_TEST(TestDeliveryUnsupported); CPPUNIT_TEST_SUITE_END(); public: void TestDeliverySimple(); void TestDeliveryFailure(); void TestDeliveryUnsupported(); void setUp(); void tearDown(); private: std::list logs; char const * log_name; Arc::UserConfig cfg; }; void DeliveryTest::setUp() { // Hack to make sure DataStagingDelivery executable in the parent dir is used // A fake ARC location is used and a symlink is created in the libexec subdir // to the DataStagingDelivery in the parent dir. TODO: maybe put a test flag // in DTR code which tells it to use this local executable. Arc::DirCreate(std::string("../tmp/")+std::string(PKGLIBSUBDIR), S_IRWXU, true); Arc::ArcLocation::Init("../tmp/x/x"); Arc::FileLink("../../../DataStagingDelivery", std::string("../tmp/")+std::string(PKGLIBSUBDIR)+std::string("/DataStagingDelivery"), true); logs.clear(); const std::list& destinations = Arc::Logger::getRootLogger().getDestinations(); for(std::list::const_iterator dest = destinations.begin(); dest != destinations.end(); ++dest) { logs.push_back(*dest); } log_name = "DataStagingTest"; } void DeliveryTest::tearDown() { Arc::DirDelete("../tmp"); } void DeliveryTest::TestDeliverySimple() { std::string source("mock://mocksrc/1"); std::string destination("mock://mockdest/1"); std::string jobid("1234"); DataStaging::DTR_ptr dtr(new DataStaging::DTR(source,destination,cfg,jobid,Arc::User().get_uid(),logs,log_name)); CPPUNIT_ASSERT(*dtr); // Pass DTR to Delivery DataStaging::DataDelivery delivery; delivery.start(); delivery.receiveDTR(dtr); DataStaging::DTRStatus status = dtr->get_status(); // Wait for result. It must be either ERROR or TRANSFERRED at end. // During transfer state may be NULL or TRANSFERRING for(int cnt=0;;++cnt) { status = dtr->get_status(); if(status == DataStaging::DTRStatus::ERROR) { break; } else if(status == DataStaging::DTRStatus::TRANSFERRED) { break; } else if(status == DataStaging::DTRStatus::TRANSFERRING) { } else if(status == DataStaging::DTRStatus::NULL_STATE) { } else { break; } CPPUNIT_ASSERT(cnt < 300); // 30s limit on transfer time Glib::usleep(100000); } CPPUNIT_ASSERT_EQUAL(DataStaging::DTRStatus::TRANSFERRED, status.GetStatus()); CPPUNIT_ASSERT_EQUAL_MESSAGE(dtr->get_error_status().GetDesc(), DataStaging::DTRErrorStatus::NONE_ERROR, dtr->get_error_status().GetErrorStatus()); } void DeliveryTest::TestDeliveryFailure() { std::string source("fail://mocksrc/1"); std::string destination("fail://mockdest/1"); std::string jobid("1234"); DataStaging::DTR_ptr dtr(new DataStaging::DTR(source,destination,cfg,jobid,Arc::User().get_uid(),logs,log_name)); CPPUNIT_ASSERT(*dtr); // Pass DTR to Delivery DataStaging::DataDelivery delivery; delivery.start(); delivery.receiveDTR(dtr); DataStaging::DTRStatus status = dtr->get_status(); // Wait for result. It must be either ERROR or TRANSFERRED at end. // During transfer state may be NULL or TRANSFERRING for(int cnt=0;;++cnt) { status = dtr->get_status(); if(status == DataStaging::DTRStatus::ERROR) { break; } else if(status == DataStaging::DTRStatus::TRANSFERRED) { break; } else if(status == DataStaging::DTRStatus::TRANSFERRING) { } else if(status == DataStaging::DTRStatus::NULL_STATE) { } else { break; } CPPUNIT_ASSERT(cnt < 200); // 20s limit on transfer time Glib::usleep(100000); } CPPUNIT_ASSERT_EQUAL(DataStaging::DTRStatus::TRANSFERRED, status.GetStatus()); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRErrorStatus::TEMPORARY_REMOTE_ERROR, dtr->get_error_status().GetErrorStatus()); } void DeliveryTest::TestDeliveryUnsupported() { std::string source("proto://host/file"); std::string destination("mock://mockdest/1"); std::string jobid("1234"); DataStaging::DTR_ptr dtr(new DataStaging::DTR(source,destination,cfg,jobid,Arc::User().get_uid(),logs,log_name)); CPPUNIT_ASSERT(!(*dtr)); // Pass DTR to Delivery DataStaging::DataDelivery delivery; delivery.start(); delivery.receiveDTR(dtr); // DTR should be checked by delivery and immediately set to TRANSFERRED // with error status set to LOGIC error CPPUNIT_ASSERT_EQUAL(DataStaging::DTRStatus::TRANSFERRED, dtr->get_status().GetStatus()); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRErrorStatus::INTERNAL_LOGIC_ERROR, dtr->get_error_status().GetErrorStatus()); } CPPUNIT_TEST_SUITE_REGISTRATION(DeliveryTest); nordugrid-arc-6.14.0/src/libs/data-staging/test/PaxHeaders.30264/ProcessorTest.cpp0000644000000000000000000000013214152153376025731 xustar000000000000000030 mtime=1638455038.405645974 30 atime=1638455038.499647387 30 ctime=1638455098.883554683 nordugrid-arc-6.14.0/src/libs/data-staging/test/ProcessorTest.cpp0000644000175000002070000004464614152153376025734 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include "../DTRStatus.h" #include "../Processor.h" using namespace DataStaging; class ProcessorTest : public CppUnit::TestFixture { CPPUNIT_TEST_SUITE(ProcessorTest); CPPUNIT_TEST(TestPreClean); CPPUNIT_TEST(TestCacheCheck); CPPUNIT_TEST(TestResolve); CPPUNIT_TEST(TestQueryReplica); CPPUNIT_TEST(TestReplicaRegister); CPPUNIT_TEST(TestCacheProcess); CPPUNIT_TEST_SUITE_END(); public: void TestPreClean(); void TestCacheCheck(); void TestResolve(); void TestQueryReplica(); void TestReplicaRegister(); void TestCacheProcess(); void setUp(); void tearDown(); private: std::list logs; char const * log_name; Arc::UserConfig cfg; std::string tmpdir; }; void ProcessorTest::setUp() { logs.clear(); const std::list& destinations = Arc::Logger::getRootLogger().getDestinations(); for(std::list::const_iterator dest = destinations.begin(); dest != destinations.end(); ++dest) { logs.push_back(*dest); } log_name = "DataStagingTest"; } void ProcessorTest::tearDown() { if (!tmpdir.empty()) Arc::DirDelete(tmpdir); } void ProcessorTest::TestPreClean() { // Note: mock doesn't really delete, but reports success std::string jobid("123456789"); std::string source("mock://mocksrc/1"); std::string destination("mock://mockdest/1"); DataStaging::DTR_ptr dtr = new DataStaging::DTR(source, destination, cfg, jobid, Arc::User().get_uid(), logs, log_name); CPPUNIT_ASSERT(dtr); CPPUNIT_ASSERT(*dtr); dtr->set_status(DataStaging::DTRStatus::PRE_CLEAN); DataStaging::DTR::push(dtr, DataStaging::PRE_PROCESSOR); DataStaging::Processor processor; processor.start(); processor.receiveDTR(dtr); // sleep while thread deletes while (dtr->get_status().GetStatus() != DataStaging::DTRStatus::PRE_CLEANED) Glib::usleep(100); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRErrorStatus::NONE_ERROR, dtr->get_error_status().GetErrorStatus()); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRStatus::PRE_CLEANED, dtr->get_status().GetStatus()); // use a non-existent file destination = "fail://badhost/file1"; dtr = new DataStaging::DTR(source, destination, cfg, jobid, Arc::User().get_uid(), logs, log_name); CPPUNIT_ASSERT(dtr); CPPUNIT_ASSERT(*dtr); dtr->set_status(DataStaging::DTRStatus::PRE_CLEAN); DataStaging::DTR::push(dtr, DataStaging::PRE_PROCESSOR); processor.receiveDTR(dtr); // sleep while thread deletes while (dtr->get_status().GetStatus() != DataStaging::DTRStatus::PRE_CLEANED) Glib::usleep(100); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRErrorStatus::TEMPORARY_REMOTE_ERROR, dtr->get_error_status().GetErrorStatus()); // PRE_CLEANED is the correct status even after an error CPPUNIT_ASSERT_EQUAL(DataStaging::DTRStatus::PRE_CLEANED, dtr->get_status().GetStatus()); } void ProcessorTest::TestCacheCheck() { // create tmp cache dir for test CPPUNIT_ASSERT(Arc::TmpDirCreate(tmpdir)); std::string session(tmpdir); session += "/session"; std::string cache_dir(tmpdir); cache_dir += "/cache"; DataStaging::DTRCacheParameters cache_param; cache_param.cache_dirs.push_back(cache_dir); // use non-cacheable input and check it cannot be not cached std::string jobid("123456789"); std::string source("mock://mocksrc;cache=no/1"); std::string destination(std::string(session+"/file1")); DataStaging::DTR_ptr dtr = new DataStaging::DTR(source, destination, cfg, jobid, Arc::User().get_uid(), logs, log_name); CPPUNIT_ASSERT(dtr); CPPUNIT_ASSERT(*dtr); CPPUNIT_ASSERT(dtr->get_cache_state() == DataStaging::NON_CACHEABLE); dtr->set_cache_parameters(cache_param); // use cacheable input - set invariant since mock does not set a modification // time and so cache file will appear outdated source = "mock://mocksrc;cache=invariant/1"; dtr = new DataStaging::DTR(source, destination, cfg, jobid, Arc::User().get_uid(), logs, log_name); CPPUNIT_ASSERT(dtr); CPPUNIT_ASSERT(*dtr); CPPUNIT_ASSERT(dtr->get_cache_state() == DataStaging::CACHEABLE); dtr->set_cache_parameters(cache_param); dtr->set_status(DataStaging::DTRStatus::CHECK_CACHE); DataStaging::DTR::push(dtr, DataStaging::PRE_PROCESSOR); std::string cache_file(cache_dir + "/data/58/32ec5285b5990e13fd6628af93ea2b751dac7b"); DataStaging::Processor processor; processor.start(); processor.receiveDTR(dtr); // sleep while thread checks cache while (dtr->get_status().GetStatus() != DataStaging::DTRStatus::CACHE_CHECKED) Glib::usleep(100); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRStatus::CACHE_CHECKED, dtr->get_status().GetStatus()); CPPUNIT_ASSERT_EQUAL(DataStaging::CACHEABLE, dtr->get_cache_state()); CPPUNIT_ASSERT_EQUAL(cache_file, dtr->get_cache_file()); // locked file std::string lock_file(cache_file + ".lock"); int fd = ::open(lock_file.c_str(), O_WRONLY|O_CREAT, S_IRUSR|S_IWUSR); CPPUNIT_ASSERT(fd); char lock_contents[] = "1@localhost"; CPPUNIT_ASSERT(write(fd, lock_contents, sizeof(lock_contents)) > 0); CPPUNIT_ASSERT_EQUAL(0, close(fd)); dtr->set_status(DataStaging::DTRStatus::CHECK_CACHE); DataStaging::DTR::push(dtr, DataStaging::PRE_PROCESSOR); processor.receiveDTR(dtr); // sleep while thread checks cache while (dtr->get_status().GetStatus() != DataStaging::DTRStatus::CACHE_WAIT) Glib::usleep(100); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRStatus::CACHE_WAIT, dtr->get_status().GetStatus()); CPPUNIT_ASSERT_EQUAL(DataStaging::CACHE_LOCKED, dtr->get_cache_state()); CPPUNIT_ASSERT_EQUAL(0, remove(lock_file.c_str())); // write cache file fd = ::open(cache_file.c_str(), O_WRONLY|O_CREAT, S_IRUSR|S_IWUSR); CPPUNIT_ASSERT(fd); char cache_file_contents[] = "abcde"; CPPUNIT_ASSERT(write(fd, cache_file_contents, sizeof(cache_file_contents)) > 0); CPPUNIT_ASSERT_EQUAL(0, close(fd)); // check again, should return already present dtr->set_status(DataStaging::DTRStatus::CHECK_CACHE); DataStaging::DTR::push(dtr, DataStaging::PRE_PROCESSOR); processor.receiveDTR(dtr); // sleep while thread checks cache while (dtr->get_status().GetStatus() != DataStaging::DTRStatus::CACHE_CHECKED) Glib::usleep(100); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRStatus::CACHE_CHECKED, dtr->get_status().GetStatus()); CPPUNIT_ASSERT_EQUAL(DataStaging::CACHE_ALREADY_PRESENT, dtr->get_cache_state()); CPPUNIT_ASSERT_EQUAL(0, remove(cache_file.c_str())); // test files using guids are handled properly source = "mock://mocksrc/1:guid=4a2b61aa-1e57-4d32-9f23-873a9c9b9aed"; dtr = new DataStaging::DTR(source, destination, cfg, jobid, Arc::User().get_uid(), logs, log_name); CPPUNIT_ASSERT(dtr); CPPUNIT_ASSERT(*dtr); CPPUNIT_ASSERT(dtr->get_cache_state() == DataStaging::CACHEABLE); dtr->set_cache_parameters(cache_param); dtr->set_status(DataStaging::DTRStatus::CHECK_CACHE); DataStaging::DTR::push(dtr, DataStaging::PRE_PROCESSOR); cache_file = cache_dir + "/data/ba/bb0555ddfccde73069558aacfe512ea42c8c79"; processor.receiveDTR(dtr); // sleep while thread checks cache while (dtr->get_status().GetStatus() != DataStaging::DTRStatus::CACHE_CHECKED) Glib::usleep(100); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRStatus::CACHE_CHECKED, dtr->get_status().GetStatus()); CPPUNIT_ASSERT_EQUAL(DataStaging::CACHEABLE, dtr->get_cache_state()); CPPUNIT_ASSERT_EQUAL(cache_file, dtr->get_cache_file()); } void ProcessorTest::TestResolve() { // Note: using mock in resolve doesn't really test resolving since mock is // not a DataPointIndex DataStaging::Processor processor; processor.start(); std::string jobid("123456789"); // resolve a good source std::string source("mock://mocksrc/1"); std::string destination("mock://mockdest/1"); DataStaging::DTR_ptr dtr = new DataStaging::DTR(source, destination, cfg, jobid, Arc::User().get_uid(), logs, log_name); CPPUNIT_ASSERT(dtr); CPPUNIT_ASSERT(*dtr); dtr->set_status(DataStaging::DTRStatus::RESOLVE); DataStaging::DTR::push(dtr, DataStaging::PRE_PROCESSOR); processor.receiveDTR(dtr); // sleep while thread resolves while (dtr->get_status().GetStatus() != DataStaging::DTRStatus::RESOLVED) Glib::usleep(100); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRErrorStatus::NONE_ERROR, dtr->get_error_status().GetErrorStatus()); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRStatus::RESOLVED, dtr->get_status().GetStatus()); // check that it found replicas CPPUNIT_ASSERT(dtr->get_source()->HaveLocations()); /* This part can be uncommented if a mock index DataPoint exists // pre-register a good destination source = "mock://mocksrc/1"; destination = "mockindex://mock://mockdest/1@mockindexdest/1"; dtr = new DataStaging::DTR(source, destination, cfg, jobid, Arc::User().get_uid(), logs, log_name); CPPUNIT_ASSERT(dtr); CPPUNIT_ASSERT(*dtr); dtr->set_status(DataStaging::DTRStatus::RESOLVE); DataStaging::DTR::push(dtr, DataStaging::PRE_PROCESSOR); processor.receiveDTR(dtr); // sleep while thread resolves while (dtr->get_status().GetStatus() != DataStaging::DTRStatus::RESOLVED) Glib::usleep(100); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRErrorStatus::NONE_ERROR, dtr->get_error_status().GetErrorStatus()); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRStatus::RESOLVED, dtr->get_status().GetStatus()); // check that it added the destination replica CPPUNIT_ASSERT(dtr->get_destination()->HaveLocations()); CPPUNIT_ASSERT_EQUAL(std::string("mock://mockdest/1"), dtr->get_destination()->CurrentLocation().str()); std::list files; CPPUNIT_ASSERT(dtr->get_destination()->List(files)); CPPUNIT_ASSERT_EQUAL(1, (int)files.size()); CPPUNIT_ASSERT_EQUAL(std::string("mockindex://mockindexdest/1"), files.front().GetName()); // test replication source = "mockindex://mockdestindex/ABCDE"; destination = "mockindex://mock://mockdest/ABCDE@mockindexdest/ABCDE"; dtr = new DataStaging::DTR(source, destination, cfg, jobid, Arc::User().get_uid(), logs, log_name); CPPUNIT_ASSERT(dtr); CPPUNIT_ASSERT(*dtr); dtr->set_status(DataStaging::DTRStatus::RESOLVE); DataStaging::DTR::push(dtr, DataStaging::PRE_PROCESSOR); dtr->set_replication(true); // usually set automatically by scheduler processor.receiveDTR(dtr); // sleep while thread resolves while (dtr->get_status().GetStatus() != DataStaging::DTRStatus::RESOLVED) Glib::usleep(100); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRErrorStatus::NONE_ERROR, dtr->get_error_status().GetErrorStatus()); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRStatus::RESOLVED, dtr->get_status().GetStatus()); // check that it found replicas CPPUNIT_ASSERT(dtr->get_source()->HaveLocations()); // check that it added the destination replica CPPUNIT_ASSERT(dtr->get_destination()->HaveLocations()); CPPUNIT_ASSERT_EQUAL(std::string("mock://mockdest/ABCDE"), dtr->get_destination()->CurrentLocation().str()); // copy to an existing LFN from a different LFN source = "mock://mocksrc/2"; destination = "mockindex://mock://mockdest/2@mockindexdest/ABCDE"; dtr = new DataStaging::DTR(source, destination, cfg, jobid, Arc::User().get_uid(), logs, log_name); CPPUNIT_ASSERT(dtr); CPPUNIT_ASSERT(*dtr); dtr->set_status(DataStaging::DTRStatus::RESOLVE); DataStaging::DTR::push(dtr, DataStaging::PRE_PROCESSOR); processor.receiveDTR(dtr); // sleep while thread resolves while (dtr->get_status().GetStatus() != DataStaging::DTRStatus::RESOLVED) Glib::usleep(100); // will fail since force_registration is not set CPPUNIT_ASSERT_EQUAL(DataStaging::DTRErrorStatus::PERMANENT_REMOTE_ERROR, dtr->get_error_status().GetErrorStatus()); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRStatus::RESOLVED, dtr->get_status().GetStatus()); // set force registration and try again dtr->set_force_registration(true); dtr->reset_error_status(); dtr->set_status(DataStaging::DTRStatus::RESOLVE); DataStaging::DTR::push(dtr, DataStaging::PRE_PROCESSOR); processor.receiveDTR(dtr); // sleep while thread resolves while (dtr->get_status().GetStatus() != DataStaging::DTRStatus::RESOLVED) Glib::usleep(100); // should be successful now CPPUNIT_ASSERT_EQUAL(DataStaging::DTRErrorStatus::NONE_ERROR, dtr->get_error_status().GetErrorStatus()); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRStatus::RESOLVED, dtr->get_status().GetStatus()); // check that it added the destination replica CPPUNIT_ASSERT(dtr->get_destination()->HaveLocations()); CPPUNIT_ASSERT_EQUAL(std::string("mock://mockdest/2"), dtr->get_destination()->CurrentLocation().str()); */ } void ProcessorTest::TestQueryReplica() { // query a valid file std::string jobid("123456789"); std::string source("mock://mocksrc/1"); std::string destination("mock://mockdest/1"); DataStaging::DTR_ptr dtr = new DataStaging::DTR(source, destination, cfg, jobid, Arc::User().get_uid(), logs, log_name); CPPUNIT_ASSERT(dtr); CPPUNIT_ASSERT(*dtr); DataStaging::Processor processor; processor.start(); dtr->set_status(DataStaging::DTRStatus::QUERY_REPLICA); DataStaging::DTR::push(dtr, DataStaging::PRE_PROCESSOR); processor.receiveDTR(dtr); // sleep while replica is queried while (dtr->get_status().GetStatus() != DataStaging::DTRStatus::REPLICA_QUERIED) Glib::usleep(100); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRErrorStatus::NONE_ERROR, dtr->get_error_status().GetErrorStatus()); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRStatus::REPLICA_QUERIED, dtr->get_status().GetStatus()); // invalid file source = "fail://mocksrc/1"; dtr = new DataStaging::DTR(source, destination, cfg, jobid, Arc::User().get_uid(), logs, log_name); CPPUNIT_ASSERT(dtr); CPPUNIT_ASSERT(*dtr); dtr->set_status(DataStaging::DTRStatus::QUERY_REPLICA); DataStaging::DTR::push(dtr, DataStaging::PRE_PROCESSOR); processor.receiveDTR(dtr); // sleep while replica is queried while (dtr->get_status().GetStatus() != DataStaging::DTRStatus::REPLICA_QUERIED) Glib::usleep(100); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRErrorStatus::TEMPORARY_REMOTE_ERROR, dtr->get_error_status().GetErrorStatus()); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRStatus::REPLICA_QUERIED, dtr->get_status().GetStatus()); } void ProcessorTest::TestReplicaRegister() { /* Needs mock index DMC DataStaging::Processor processor; processor.start(); std::string jobid("123456789"); // register a file std::string source("mock://mocksrc/1"); std::string destination("mockindex://mock://mockdest/1@mockindexdest/1"); DataStaging::DTR_ptr dtr = new DataStaging::DTR(source, destination, cfg, jobid, Arc::User().get_uid(), logs, log_name); CPPUNIT_ASSERT(dtr); CPPUNIT_ASSERT(*dtr); // have to resolve first CPPUNIT_ASSERT(dtr->get_destination()->Resolve(false).Passed()); CPPUNIT_ASSERT(dtr->get_destination()->HaveLocations()); CPPUNIT_ASSERT_EQUAL(std::string("mock://mockdest/1"), dtr->get_destination()->CurrentLocation().str()); // pre-register CPPUNIT_ASSERT(dtr->get_destination()->PreRegister(false, false).Passed()); // post-register dtr->set_status(DataStaging::DTRStatus::REGISTER_REPLICA); DataStaging::DTR::push(dtr, DataStaging::POST_PROCESSOR); processor.receiveDTR(dtr); // sleep while thread resgisters while (dtr->get_status().GetStatus() != DataStaging::DTRStatus::REPLICA_REGISTERED) Glib::usleep(100); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRErrorStatus::NONE_ERROR, dtr->get_error_status().GetErrorStatus()); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRStatus::REPLICA_REGISTERED, dtr->get_status().GetStatus()); // check registration is ok Arc::FileInfo file; Arc::DataPoint::DataPointInfoType verb = (Arc::DataPoint::DataPointInfoType)(Arc::DataPoint::INFO_TYPE_CONTENT | Arc::DataPoint::INFO_TYPE_STRUCT); CPPUNIT_ASSERT(dtr->get_destination()->Stat(file, verb).Passed()); std::list replicas = file.GetURLs(); CPPUNIT_ASSERT_EQUAL(1, (int)replicas.size()); CPPUNIT_ASSERT_EQUAL(std::string("mock://mockdest/1"), replicas.front().str()); // clean up CPPUNIT_ASSERT(dtr->get_destination()->Unregister(true).Passed()); */ } void ProcessorTest::TestCacheProcess() { CPPUNIT_ASSERT(Arc::TmpDirCreate(tmpdir)); std::string session(tmpdir); session += "/session"; std::string cache_dir(tmpdir); cache_dir += "/cache"; DataStaging::DTRCacheParameters cache_param; cache_param.cache_dirs.push_back(cache_dir); std::string jobid("123456789"); std::string source("mock://mocksrc/1"); std::string destination(std::string(session+"/file1")); DataStaging::DTR_ptr dtr = new DataStaging::DTR(source, destination, cfg, jobid, Arc::User().get_uid(), logs, log_name); CPPUNIT_ASSERT(dtr); CPPUNIT_ASSERT(*dtr); CPPUNIT_ASSERT(dtr->get_cache_state() == DataStaging::CACHEABLE); dtr->set_cache_parameters(cache_param); // process with no cache file present std::string cache_file(cache_dir + "/data/58/32ec5285b5990e13fd6628af93ea2b751dac7b"); remove(cache_file.c_str()); DataStaging::Processor processor; processor.start(); dtr->set_status(DataStaging::DTRStatus::PROCESS_CACHE); DataStaging::DTR::push(dtr, DataStaging::POST_PROCESSOR); processor.receiveDTR(dtr); // sleep while cache is processed while (dtr->get_status().GetStatus() != DataStaging::DTRStatus::CACHE_PROCESSED) Glib::usleep(100); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRErrorStatus::CACHE_ERROR, dtr->get_error_status().GetErrorStatus()); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRStatus::CACHE_PROCESSED, dtr->get_status().GetStatus()); // create cache file and try again CPPUNIT_ASSERT(Arc::DirCreate(std::string(cache_dir+"/data/58"), 0700, true)); int fd = ::open(cache_file.c_str(), O_WRONLY|O_CREAT, S_IRUSR|S_IWUSR); CPPUNIT_ASSERT(fd); char cache_file_contents[] = "abcde"; CPPUNIT_ASSERT_EQUAL_MESSAGE(Arc::StrError(errno), (int)sizeof(cache_file_contents), (int)write(fd, cache_file_contents, sizeof(cache_file_contents))); CPPUNIT_ASSERT_EQUAL(0, close(fd)); dtr->reset_error_status(); dtr->set_status(DataStaging::DTRStatus::PROCESS_CACHE); DataStaging::DTR::push(dtr, DataStaging::POST_PROCESSOR); processor.receiveDTR(dtr); // sleep while cache is processed while (dtr->get_status().GetStatus() != DataStaging::DTRStatus::CACHE_PROCESSED) Glib::usleep(100); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRErrorStatus::NONE_ERROR, dtr->get_error_status().GetErrorStatus()); CPPUNIT_ASSERT_EQUAL(DataStaging::DTRStatus::CACHE_PROCESSED, dtr->get_status().GetStatus()); // check correct links exist struct stat st; CPPUNIT_ASSERT_EQUAL(0, stat(std::string(cache_dir + "/joblinks/123456789/file1").c_str(), &st)); CPPUNIT_ASSERT_EQUAL(0, stat(std::string(session + "/file1").c_str(), &st)); } CPPUNIT_TEST_SUITE_REGISTRATION(ProcessorTest); nordugrid-arc-6.14.0/src/libs/data-staging/test/PaxHeaders.30264/DTRTest.cpp0000644000000000000000000000013214152153376024403 xustar000000000000000030 mtime=1638455038.405645974 30 atime=1638455038.499647387 30 ctime=1638455098.881554653 nordugrid-arc-6.14.0/src/libs/data-staging/test/DTRTest.cpp0000644000175000002070000000602614152153376024374 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include "../DTR.h" using namespace DataStaging; class DTRTest : public CppUnit::TestFixture { CPPUNIT_TEST_SUITE(DTRTest); CPPUNIT_TEST(TestDTRConstructor); CPPUNIT_TEST(TestDTREndpoints); CPPUNIT_TEST_SUITE_END(); public: void TestDTRConstructor(); void TestDTREndpoints(); void setUp(); void tearDown(); private: std::list logs; char const * log_name; Arc::UserConfig cfg; }; void DTRTest::setUp() { logs.clear(); const std::list& destinations = Arc::Logger::getRootLogger().getDestinations(); for(std::list::const_iterator dest = destinations.begin(); dest != destinations.end(); ++dest) { logs.push_back(*dest); } log_name = "DataStagingTest"; } void DTRTest::tearDown() { } void DTRTest::TestDTRConstructor() { std::string jobid("123456789"); std::string source("mock://mocksrc/1"); std::string destination("mock://mockdest/1"); DataStaging::DTR_ptr dtr(new DataStaging::DTR(source, destination, cfg, jobid, Arc::User().get_uid(), logs, log_name)); CPPUNIT_ASSERT(*dtr); CPPUNIT_ASSERT(!dtr->get_id().empty()); // Copy constructor DataStaging::DTR_ptr dtr2(dtr); CPPUNIT_ASSERT(*dtr2); CPPUNIT_ASSERT_EQUAL(dtr->get_id(), dtr2->get_id()); // a new DataHandle object is created for the new DTR so they should // not be equal. Why does this test pass???? CPPUNIT_ASSERT_EQUAL(dtr->get_source(), dtr2->get_source()); CPPUNIT_ASSERT_EQUAL(dtr->get_owner(), dtr2->get_owner()); CPPUNIT_ASSERT_EQUAL(dtr->get_status().GetStatus(), dtr2->get_status().GetStatus()); // check that creating and destroying a copy doesn't affect the original { DataStaging::DTR_ptr dtr3(dtr); CPPUNIT_ASSERT(*dtr3); } CPPUNIT_ASSERT_EQUAL(std::string("mock://mocksrc/1"), dtr->get_source()->str()); // make a bad DTR source = "myprocotol://blabla/file1"; DataStaging::DTR_ptr dtr4(new DataStaging::DTR(source, destination, cfg, jobid, Arc::User().get_uid(), logs, log_name)); CPPUNIT_ASSERT(!(*dtr4)); // bad DTR copying to itself DataStaging::DTR_ptr dtr5(new DataStaging::DTR(source, source, cfg, jobid, Arc::User().get_uid(), logs, log_name)); CPPUNIT_ASSERT(!(*dtr5)); } void DTRTest::TestDTREndpoints() { std::string jobid("123456789"); std::string source("mock://mocksrc/1"); std::string destination("mock://mockdest/1"); DataStaging::DTR_ptr dtr(new DataStaging::DTR(source, destination, cfg, jobid, Arc::User().get_uid(), logs, log_name)); CPPUNIT_ASSERT(*dtr); CPPUNIT_ASSERT_EQUAL(std::string("mock://mocksrc/1"), dtr->get_source()->str()); CPPUNIT_ASSERT_EQUAL(std::string("mock://mockdest/1"), dtr->get_destination()->str()); // create a bad url source = "mock:/file1"; DataStaging::DTR_ptr dtrbad(new DataStaging::DTR(source, destination, cfg, jobid, Arc::User().get_uid(), logs, log_name)); CPPUNIT_ASSERT(!dtrbad->get_source()->GetURL()); // TODO DTR validity } CPPUNIT_TEST_SUITE_REGISTRATION(DTRTest); nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/DTR.cpp0000644000000000000000000000013214152153376022564 xustar000000000000000030 mtime=1638455038.402645929 30 atime=1638455038.498647372 30 ctime=1638455098.853554232 nordugrid-arc-6.14.0/src/libs/data-staging/DTR.cpp0000644000175000002070000003474314152153376022564 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include "Processor.h" #include "DataDelivery.h" #include "Scheduler.h" #include "DTR.h" namespace DataStaging { static const char* const owner_name[] = { "GENERATOR", "SCHEDULER", "PRE-PROCESSOR", "DELIVERY", "POST-PROCESSOR" }; static const char* get_owner_name(StagingProcesses proc) { if(((int)proc) < 0) return ""; if(((int)proc) >= sizeof(owner_name)/sizeof(const char*)) return ""; return owner_name[proc]; } const Arc::URL DTR::LOCAL_DELIVERY("file:/local"); Arc::LogLevel DTR::LOG_LEVEL(Arc::WARNING); DTR::DTR(const std::string& source, const std::string& destination, const Arc::UserConfig& usercfg, const std::string& jobid, const uid_t& uid, const std::list& logs, const std::string& logname) : DTR_ID(""), source_url(source), destination_url(destination), cfg(usercfg), source_endpoint(source_url, cfg), destination_endpoint(destination_url, cfg), source_url_str(source_url.str()), destination_url_str(destination_url.str()), use_acix(false), user(uid), parent_job_id(jobid), priority(50), transfershare("_default"), sub_share(""), tries_left(1), initial_tries(1), replication(false), force_registration(false), status(DTRStatus::NEW,"Created by the generator"), bytes_transferred(0), transfer_time(0), created(time(NULL)), cancel_request(false), bulk_start(false), bulk_end(false), source_supports_bulk(false), mandatory(true), delivery_endpoint(LOCAL_DELIVERY), use_host_cert_for_remote_delivery(false), current_owner(GENERATOR), log_destinations(logs), perf_record(perf_log) { logger = new Arc::Logger(Arc::Logger::getRootLogger(), logname.c_str()); logger->addDestinations(get_log_destinations()); // check that endpoints can be handled if (!source_endpoint || !(*source_endpoint)) { logger->msg(Arc::ERROR, "Could not handle endpoint %s", source); return; } if (!destination_endpoint || !(*destination_endpoint)) { logger->msg(Arc::ERROR, "Could not handle endpoint %s", destination); return; } // Some validation checks if (source_url == destination_url) { // It is possible to replicate inside an index service // The physical replicas will be checked in RESOLVING if (source_endpoint->IsIndex() && destination_endpoint->IsIndex()) { replication = true; } else { logger->msg(Arc::ERROR, "Source is the same as destination"); set_error_status(DTRErrorStatus::SELF_REPLICATION_ERROR, DTRErrorStatus::NO_ERROR_LOCATION, "Cannot replicate a file to itself"); return; } } // set insecure by default. Real value will come from configuration source_endpoint->SetSecure(false); destination_endpoint->SetSecure(false); // check for bulk support - call bulk methods with empty list std::list datapoints; if (source_endpoint->IsIndex()) { if (source_endpoint->Resolve(true, datapoints) == Arc::DataStatus::Success) source_supports_bulk = true; } else { std::list files; if (source_endpoint->Stat(files, datapoints) == Arc::DataStatus::Success) source_supports_bulk = true; } cache_state = (source_endpoint->Cache() && destination_endpoint->Local()) ? CACHEABLE : NON_CACHEABLE; if (source_url.Option("failureallowed") == "yes" || destination_url.Option("failureallowed") == "yes") { mandatory = false; } /* Think how to populate transfer parameters */ mark_modification(); set_timeout(60); // setting ID last means all the previous steps have to pass for the DTR to be valid DTR_ID = Arc::UUID(); // Prefix all log messages for this DTR with the short ID for (std::list::iterator dest = log_destinations.begin(); dest != log_destinations.end(); ++dest) { (*dest)->setPrefix("DTR " + get_short_id() + ": "); } } std::list DTR::get_log_destinations() const { std::list log_dest; for (std::list::const_iterator dest = log_destinations.begin(); dest != log_destinations.end(); ++dest) { log_dest.push_back(dest->Ptr()); } return log_dest; } void DTR::registerCallback(DTRCallback* cb, StagingProcesses owner) { lock.lock(); proc_callback[owner].push_back(cb); lock.unlock(); } void DTR::reset() { // remove resolved locations if (source_endpoint->IsIndex()) { source_endpoint->ClearLocations(); } // clear any transfer locations source_endpoint->ClearTransferLocations(); // reset retry count to 1 source_endpoint->SetTries(1); if (destination_endpoint->IsIndex()) { destination_endpoint->ClearLocations(); } destination_endpoint->ClearTransferLocations(); destination_endpoint->SetTries(1); // empty cache and map info cache_file.clear(); mapped_source.clear(); bytes_transferred = 0; transfer_time = 0; reset_error_status(); } void DTR::set_id(const std::string& id) { // sanity check - regular expressions would be useful here if (id.length() != DTR_ID.length()) { logger->msg(Arc::WARNING, "Invalid ID: %s", id); } else { DTR_ID = id; // Change logging prefix to new ID for (std::list::iterator dest = log_destinations.begin(); dest != log_destinations.end(); ++dest) { (*dest)->setPrefix("DTR " + get_short_id() + ": "); } } } std::string DTR::get_short_id() const { if(DTR_ID.length() < 8) return DTR_ID; std::string short_id(DTR_ID.substr(0,4)+"..."+DTR_ID.substr(DTR_ID.length()-4)); return short_id; } void DTR::set_priority(int pri) { // limit priority between 1 and 100 if (pri <= 0) pri = 1; if (pri > 100) pri = 100; priority = pri; mark_modification(); } void DTR::set_tries_left(unsigned int tries) { initial_tries = tries; tries_left = initial_tries; } void DTR::decrease_tries_left() { if (tries_left > 0) tries_left--; } void DTR::set_status(DTRStatus stat) { logger->msg(Arc::VERBOSE, "%s->%s", status.str(), stat.str()); lock.lock(); status = stat; lock.unlock(); mark_modification(); } DTRStatus DTR::get_status() { lock.lock(); DTRStatus s = status; lock.unlock(); return s; } void DTR::set_error_status(DTRErrorStatus::DTRErrorStatusType error_stat, DTRErrorStatus::DTRErrorLocation error_loc, const std::string& desc) { lock.lock(); error_status = DTRErrorStatus(error_stat, status.GetStatus(), error_loc, desc); lock.unlock(); mark_modification(); } void DTR::reset_error_status() { lock.lock(); error_status = DTRErrorStatus(); lock.unlock(); mark_modification(); } DTRErrorStatus DTR::get_error_status() { lock.lock(); DTRErrorStatus s = error_status; lock.unlock(); return s; } void DTR::set_bytes_transferred(unsigned long long int bytes) { bytes_transferred = bytes; } void DTR::set_transfer_time(unsigned long long int t) { transfer_time = t; } void DTR::set_cache_file(const std::string& filename) { cache_file = filename; mark_modification(); } void DTR::set_cache_state(CacheState state) { cache_state = state; mark_modification(); } void DTR::set_cancel_request() { cancel_request = true; // set process time to now so it is picked up straight away set_process_time(0); mark_modification(); } void DTR::set_process_time(const Arc::Period& process_time) { Arc::Time t; t = t + process_time; next_process_time.SetTime(t.GetTime(), t.GetTimeNanoseconds()); } bool DTR::bulk_possible() { if (status == DTRStatus::RESOLVE && source_supports_bulk) return true; if (status == DTRStatus::QUERY_REPLICA) { std::list files; std::list datapoints; if (source_endpoint->CurrentLocationHandle()->Stat(files, datapoints) == Arc::DataStatus::Success) return true; } return false; } std::list DTR::get_callbacks(const std::map >& proc_callback, StagingProcesses owner) { std::list l; lock.lock(); std::map >::const_iterator c = proc_callback.find(owner); if(c == proc_callback.end()) { lock.unlock(); return l; } l = c->second; lock.unlock(); return l; } void DTR::push(DTR_ptr dtr, StagingProcesses new_owner) { /* This function contains necessary operations * to pass the pointer to this DTR to another * process and make sure that the process accepted it */ dtr->lock.lock(); dtr->current_owner = new_owner; dtr->lock.unlock(); std::list callbacks = dtr->get_callbacks(dtr->proc_callback,dtr->current_owner); if (callbacks.empty()) dtr->logger->msg(Arc::INFO, "No callback for %s defined", get_owner_name(dtr->current_owner)); for (std::list::iterator callback = callbacks.begin(); callback != callbacks.end(); ++callback) { switch(dtr->current_owner) { case GENERATOR: case SCHEDULER: case PRE_PROCESSOR: case DELIVERY: case POST_PROCESSOR: { // call registered callback if (*callback) (*callback)->receiveDTR(dtr); else dtr->logger->msg(Arc::WARNING, "NULL callback for %s", get_owner_name(dtr->current_owner)); } break; default: // impossible dtr->logger->msg(Arc::INFO, "Request to push to unknown owner - %u", (unsigned int)dtr->current_owner); break; } } dtr->mark_modification(); } bool DTR::suspend() { /* This function will contain necessary operations * to stop the transfer in the DTR */ mark_modification(); return true; } bool DTR::is_destined_for_pre_processor() const { return (status == DTRStatus::PRE_CLEAN || status == DTRStatus::CHECK_CACHE || status == DTRStatus::RESOLVE || status == DTRStatus::QUERY_REPLICA || status == DTRStatus::STAGE_PREPARE); } bool DTR::is_destined_for_post_processor() const { return (status == DTRStatus::RELEASE_REQUEST || status == DTRStatus::REGISTER_REPLICA || status == DTRStatus::PROCESS_CACHE); } bool DTR::is_destined_for_delivery() const { return (status == DTRStatus::TRANSFER); } bool DTR::came_from_pre_processor() const { return (status == DTRStatus::PRE_CLEANED || status == DTRStatus::CACHE_WAIT || status == DTRStatus::CACHE_CHECKED || status == DTRStatus::RESOLVED || status == DTRStatus::REPLICA_QUERIED || status == DTRStatus::STAGING_PREPARING_WAIT || status == DTRStatus::STAGED_PREPARED); } bool DTR::came_from_post_processor() const { return (status == DTRStatus::REQUEST_RELEASED || status == DTRStatus::REPLICA_REGISTERED || status == DTRStatus::CACHE_PROCESSED); } bool DTR::came_from_delivery() const { return (status == DTRStatus::TRANSFERRED); } bool DTR::came_from_generator() const { return (status == DTRStatus::NEW); } bool DTR::is_in_final_state() const { return (status == DTRStatus::DONE || status == DTRStatus::CANCELLED || status == DTRStatus::ERROR); } void DTR::set_transfer_share(const std::string& share_name) { lock.lock(); transfershare = share_name; if (!sub_share.empty()) transfershare += "-" + sub_share; lock.unlock(); } DTRCacheParameters::DTRCacheParameters(std::vector caches, std::vector drain_caches, std::vector readonly_caches): cache_dirs(caches), drain_cache_dirs(drain_caches), readonly_cache_dirs(readonly_caches) { } DTRCredentialInfo::DTRCredentialInfo(const std::string& DN, const Arc::Time& expirytime, const std::list vomsfqans): DN(DN), expirytime(expirytime), vomsfqans(vomsfqans) { } std::string DTRCredentialInfo::extractVOMSVO() const { if (vomsfqans.empty()) return ""; std::vector parts; Arc::tokenize(*(vomsfqans.begin()), parts, "/"); return parts.at(0); } std::string DTRCredentialInfo::extractVOMSGroup() const { if (vomsfqans.empty()) return ""; std::string vomsvo; for (std::list::const_iterator i = vomsfqans.begin(); i != vomsfqans.end(); ++i) { std::vector parts; Arc::tokenize(*i, parts, "/"); if (vomsvo.empty()) vomsvo = parts.at(0); if (parts.size() > 1 && parts.at(1).find("Role=") != 0) { return std::string(vomsvo+":"+parts.at(1)); } } return std::string(vomsvo + ":null"); } std::string DTRCredentialInfo::extractVOMSRole() const { if (vomsfqans.empty()) return ""; std::string vomsvo; for (std::list::const_iterator i = vomsfqans.begin(); i != vomsfqans.end(); ++i) { std::vector parts; Arc::tokenize(*i, parts, "/"); if (vomsvo.empty()) vomsvo = parts.at(0); if (parts.size() > 1 && parts.at(1).find("Role=") == 0) { return std::string(parts.at(0)+":"+parts.at(1).substr(5)); } } return std::string(vomsvo + ":null"); } DTR_ptr createDTRPtr(const std::string& source, const std::string& destination, const Arc::UserConfig& usercfg, const std::string& jobid, const uid_t& uid, const std::list& logs, const std::string& logname) { return DTR_ptr(new DTR(source, destination, usercfg, jobid, uid, logs, logname)); } DTRLogger createDTRLogger(Arc::Logger& parent, const std::string& subdomain) { return DTRLogger(new Arc::Logger(parent, subdomain)); } } // namespace DataStaging nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/DTRList.cpp0000644000000000000000000000013214152153376023420 xustar000000000000000030 mtime=1638455038.403645944 30 atime=1638455038.499647387 30 ctime=1638455098.854554247 nordugrid-arc-6.14.0/src/libs/data-staging/DTRList.cpp0000644000175000002070000002050114152153376023403 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include "DTRList.h" namespace DataStaging { bool DTRList::add_dtr(DTR_ptr DTRToAdd) { Lock.lock(); DTRs.push_back(DTRToAdd); Lock.unlock(); // Added successfully return true; } bool DTRList::delete_dtr(DTR_ptr DTRToDelete) { Lock.lock(); DTRs.remove(DTRToDelete); Lock.unlock(); // Deleted successfully return true; } bool DTRList::filter_dtrs_by_owner(StagingProcesses OwnerToFilter, std::list& FilteredList){ std::list::iterator it; Lock.lock(); for(it = DTRs.begin();it != DTRs.end(); ++it) if((*it)->get_owner() == OwnerToFilter) FilteredList.push_back(*it); Lock.unlock(); // Filtered successfully return true; } int DTRList::number_of_dtrs_by_owner(StagingProcesses OwnerToFilter){ std::list::iterator it; int counter = 0; Lock.lock(); for(it = DTRs.begin();it != DTRs.end(); ++it) if((*it)->get_owner() == OwnerToFilter) counter++; Lock.unlock(); // Filtered successfully return counter; } bool DTRList::filter_dtrs_by_status(DTRStatus::DTRStatusType StatusToFilter, std::list& FilteredList){ std::vector StatusesToFilter(1, StatusToFilter); return filter_dtrs_by_statuses(StatusesToFilter, FilteredList); } bool DTRList::filter_dtrs_by_statuses(const std::vector& StatusesToFilter, std::list& FilteredList){ std::list::iterator it; Lock.lock(); for(it = DTRs.begin();it != DTRs.end(); ++it) { for (std::vector::const_iterator i = StatusesToFilter.begin(); i != StatusesToFilter.end(); ++i) { if((*it)->get_status().GetStatus() == *i) { FilteredList.push_back(*it); break; } } } Lock.unlock(); // Filtered successfully return true; } bool DTRList::filter_dtrs_by_statuses(const std::vector& StatusesToFilter, std::map >& FilteredList) { std::list::iterator it; Lock.lock(); for(it = DTRs.begin();it != DTRs.end(); ++it) { for (std::vector::const_iterator i = StatusesToFilter.begin(); i != StatusesToFilter.end(); ++i) { if((*it)->get_status().GetStatus() == *i) { FilteredList[*i].push_back(*it); break; } } } Lock.unlock(); // Filtered successfully return true; } bool DTRList::filter_dtrs_by_next_receiver(StagingProcesses NextReceiver, std::list& FilteredList) { std::list::iterator it; switch(NextReceiver){ case PRE_PROCESSOR: { Lock.lock(); for(it = DTRs.begin();it != DTRs.end(); ++it) if((*it)->is_destined_for_pre_processor()) FilteredList.push_back(*it); Lock.unlock(); return true; } case POST_PROCESSOR: { Lock.lock(); for(it = DTRs.begin();it != DTRs.end(); ++it) if((*it)->is_destined_for_post_processor()) FilteredList.push_back(*it); Lock.unlock(); return true; } case DELIVERY: { Lock.lock(); for(it = DTRs.begin();it != DTRs.end(); ++it) if((*it)->is_destined_for_delivery()) FilteredList.push_back(*it); Lock.unlock(); return true; } default: // A strange receiver requested return false; } } bool DTRList::filter_pending_dtrs(std::list& FilteredList){ std::list::iterator it; Arc::Time now; Lock.lock(); for(it = DTRs.begin();it != DTRs.end(); ++it){ if( ((*it)->came_from_pre_processor() || (*it)->came_from_post_processor() || (*it)->came_from_delivery() || (*it)->came_from_generator()) && ((*it)->get_process_time() <= now) ) FilteredList.push_back(*it); } Lock.unlock(); // Filtered successfully return true; } bool DTRList::filter_dtrs_by_job(const std::string& jobid, std::list& FilteredList) { std::list::iterator it; Lock.lock(); for(it = DTRs.begin();it != DTRs.end(); ++it) if((*it)->get_parent_job_id() == jobid) FilteredList.push_back(*it); Lock.unlock(); // Filtered successfully return true; } void DTRList::check_priority_changes(const std::string& filename) { // Check for file with requested changes std::list prio_info; if (!Arc::FileRead(filename, prio_info)) return; Arc::FileCopy(filename, std::string(filename + ".read")); Arc::FileDelete(filename); std::map new_prio; for (std::list::const_iterator i = prio_info.begin(); i != prio_info.end(); ++i) { std::list tokens; Arc::tokenize(*i, tokens); unsigned int prio; if (tokens.size() == 2 && Arc::stringto(tokens.back(), prio)) { new_prio[tokens.front()] = prio; } } std::list::iterator it; Lock.lock(); for(it = DTRs.begin();it != DTRs.end(); ++it) { if(new_prio.find((*it)->get_id()) != new_prio.end()) { (*it)->set_priority(new_prio[(*it)->get_id()]); } } Lock.unlock(); } void DTRList::caching_started(DTR_ptr request) { CachingLock.lock(); CachingSources[request->get_source_str()] = request->get_priority(); CachingLock.unlock(); } void DTRList::caching_finished(DTR_ptr request) { CachingLock.lock(); CachingSources.erase(request->get_source_str()); CachingLock.unlock(); } bool DTRList::is_being_cached(DTR_ptr DTRToCheck) { CachingLock.lock(); std::map::iterator i = CachingSources.find(DTRToCheck->get_source_str()); bool caching = (i != CachingSources.end()); // If already caching, find the DTR and increase its priority if necessary if (caching && i->second < DTRToCheck->get_priority()) { Lock.lock(); for(std::list::iterator it = DTRs.begin();it != DTRs.end(); ++it) { if ((*it)->get_source_str() == DTRToCheck->get_source_str() && ((*it)->get_status() != DTRStatus::CACHE_WAIT && (*it)->get_status() != DTRStatus::CHECK_CACHE)) { (*it)->get_logger()->msg(Arc::INFO, "Boosting priority from %i to %i due to incoming higher priority DTR", (*it)->get_priority(), DTRToCheck->get_priority()); (*it)->set_priority(DTRToCheck->get_priority()); CachingSources[DTRToCheck->get_source_str()] = DTRToCheck->get_priority(); } } Lock.unlock(); } CachingLock.unlock(); return caching; } bool DTRList::empty() { Lock.lock(); bool empty = DTRs.empty(); Lock.unlock(); return empty; } unsigned int DTRList::size() { Lock.lock(); unsigned int size = DTRs.size(); Lock.unlock(); return size; } std::list DTRList::all_jobs() { std::list alljobs; std::list::iterator it; Lock.lock(); for(it = DTRs.begin();it != DTRs.end(); ++it) { std::list::iterator i = alljobs.begin(); for (; i != alljobs.end(); ++i) { if (*i == (*it)->get_parent_job_id()) break; } if (i == alljobs.end()) alljobs.push_back((*it)->get_parent_job_id()); } Lock.unlock(); return alljobs; } void DTRList::dumpState(const std::string& path) { // only files supported for now - simply overwrite path std::string data; Lock.lock(); for(std::list::iterator it = DTRs.begin();it != DTRs.end(); ++it) { data += (*it)->get_id() + " " + (*it)->get_status().str() + " " + Arc::tostring((*it)->get_priority()) + " " + (*it)->get_transfer_share(); // add destination for recovery after crash if ((*it)->get_status() == DTRStatus::TRANSFERRING || (*it)->get_status() == DTRStatus::TRANSFER) { data += " " + (*it)->get_destination()->CurrentLocation().fullstr(); data += " " + (*it)->get_delivery_endpoint().Host(); } data += "\n"; } Lock.unlock(); Arc::FileCreate(path, data); } } // namespace DataStaging nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/TransferShares.h0000644000000000000000000000013214152153376024532 xustar000000000000000030 mtime=1638455038.404645959 30 atime=1638455038.499647387 30 ctime=1638455098.848554157 nordugrid-arc-6.14.0/src/libs/data-staging/TransferShares.h0000644000175000002070000001171014152153376024517 0ustar00mockbuildmock00000000000000#ifndef TRANSFERSHARES_H_ #define TRANSFERSHARES_H_ #include #include "DTR.h" namespace DataStaging { /// TransferSharesConf describes the configuration of TransferShares. /** * It allows reference shares to be defined with certain priorities. An * instance of this class is used when creating a TransferShares object. * \ingroup datastaging * \headerfile TransferShares.h arc/data-staging/TransferShares.h */ class TransferSharesConf { public: /// The criterion for assigning a share to a DTR enum ShareType { /// Shares are defined per DN of the user's proxy USER, /// Shares are defined per VOMS VO of the user's proxy VO, /// Shares are defined per VOMS group of the user's proxy GROUP, /// Shares are defined per VOMS role of the user's proxy ROLE, /// No share criterion - all DTRs will be assigned to a single share NONE }; private: /// ReferenceShares are special shares defined in the configuration with /// specific priorities. The "_default" share must always be defined. std::map ReferenceShares; /// Configured share type ShareType shareType; public: /// Construct a new TransferSharesConf with given share type and reference shares TransferSharesConf(const std::string& type, const std::map& ref_shares); /// Construct a new TransferSharesConf with no defined shares or policy TransferSharesConf(); /// Set the share type void set_share_type(const std::string& type); /// Add a reference share void set_reference_share(const std::string& RefShare, int Priority); /// Set reference shares void set_reference_shares(const std::map& shares); /// Returns true if the given share is a reference share bool is_configured(const std::string& ShareToCheck); /// Get the priority of this share int get_basic_priority(const std::string& ShareToCheck); /// Return human-readable configuration of shares std::string conf() const; /// Get the name of the share the DTR should be assigned to and the proxy type std::string extract_share_info(DTR_ptr DTRToExtract); }; /// TransferShares is used to implement fair-sharing and priorities. /** * TransferShares defines the algorithm used to prioritise and share * transfers among different users or groups. Configuration information on * the share type and reference shares is held in a TransferSharesConf * instance. The Scheduler uses TransferShares to determine which DTRs in the * queue for each process go first. The calculation is based on the * configuration and the currently active shares (the DTRs already in the * process). can_start() is the method called by the Scheduler to * determine whether a particular share has an available slot in the process. * \ingroup datastaging * \headerfile TransferShares.h arc/data-staging/TransferShares.h */ class TransferShares { private: /// Configuration of share type and reference shares TransferSharesConf conf; /// Shares which are active, ie running or in the queue, and number of DTRs std::map ActiveShares; /// How many transfer slots each active share can grab std::map ActiveSharesSlots; public: /// Create a new TransferShares with default configuration TransferShares() {}; /// Create a new TransferShares with given configuration TransferShares(const TransferSharesConf& shares_conf); /// Empty destructor ~TransferShares(){}; /// Set a new configuration, if a new reference share gets added for example void set_shares_conf(const TransferSharesConf& share_conf); /// Calculate how many slots to assign to each active share. /** * This method is called each time the Scheduler loops to calculate the * number of slots to assign to each share, based on the current number * of active shares and the shares' relative priorities. */ void calculate_shares(int TotalNumberOfSlots); /// Increase by one the active count for the given share. Called when a new DTR enters the queue. void increase_transfer_share(const std::string& ShareToIncrease); /// Decrease by one the active count for the given share. Called when a completed DTR leaves the queue. void decrease_transfer_share(const std::string& ShareToDecrease); /// Decrease by one the number of slots available to the given share. /** * Called when there is a slot already used by this share to reduce the * number available. */ void decrease_number_of_slots(const std::string& ShareToDecrease); /// Returns true if there is a slot available for the given share bool can_start(const std::string& ShareToStart); /// Returns the map of active shares std::map active_shares() const; }; // class TransferShares } // namespace DataStaging #endif /* TRANSFERSHARES_H_ */ nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/Processor.cpp0000644000000000000000000000013214152153376024112 xustar000000000000000030 mtime=1638455038.404645959 30 atime=1638455038.499647387 30 ctime=1638455098.856554277 nordugrid-arc-6.14.0/src/libs/data-staging/Processor.cpp0000644000175000002070000011726014152153376024106 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include "DTRStatus.h" #include "Processor.h" namespace DataStaging { std::string Processor::hostname; /** Set up logging. Should be called at the start of each thread method. */ void setUpLogger(DTR_ptr request) { // Move DTR destinations from DTR logger to Root logger to catch all messages. // disconnect this thread's root logger Arc::Logger::getRootLogger().setThreadContext(); request->get_logger()->setThreadContext(); Arc::Logger::getRootLogger().removeDestinations(); Arc::Logger::getRootLogger().addDestinations(request->get_logger()->getDestinations()); request->get_logger()->removeDestinations(); } Processor::Processor() { // Get hostname, needed to exclude ACIX replicas on localhost char hostn[256]; if (gethostname(hostn, sizeof(hostn)) == 0){ hostname = hostn; } } /* Thread methods for each state of the DTR */ void Processor::DTRCheckCache(void* arg) { ThreadArgument* targ = (ThreadArgument*)arg; DTR_ptr request = targ->dtr; delete targ; setUpLogger(request); // IMPORTANT: This method creates a lock on the cached file for // this DTR. It must be released at some point using ProcessCache // Create cache using configuration Arc::FileCache cache(request->get_cache_parameters().cache_dirs, request->get_cache_parameters().drain_cache_dirs, request->get_cache_parameters().readonly_cache_dirs, request->get_parent_job_id(), request->get_local_user().get_uid(), request->get_local_user().get_gid()); if (!cache) { request->get_logger()->msg(Arc::ERROR, "Error creating cache"); request->set_cache_state(CACHE_SKIP); request->set_error_status(DTRErrorStatus::CACHE_ERROR, DTRErrorStatus::ERROR_DESTINATION, "Failed to create cache"); request->set_status(DTRStatus::CACHE_CHECKED); DTR::push(request, SCHEDULER); return; } // DN is used for checking cache permissions std::string dn = request->get_credential_info().getDN(); Arc::Time exp_time = request->get_credential_info().getExpiryTime(); std::string canonic_url(request->get_source()->GetURL().plainstr()); std::string cacheoption(request->get_source()->GetURL().Option("cache")); // add guid if present // TODO handle guids better in URL class so we don't need to care here if (!request->get_source()->GetURL().MetaDataOption("guid").empty()) canonic_url += ":guid=" + request->get_source()->GetURL().MetaDataOption("guid"); bool is_in_cache = false; bool is_locked = false; // check for forced re-download option bool renew = (cacheoption == "renew"); if (renew) request->get_logger()->msg(Arc::VERBOSE, "Forcing re-download of file %s", canonic_url); for (;;) { if (!cache.Start(canonic_url, is_in_cache, is_locked, renew)) { if (is_locked) { request->get_logger()->msg(Arc::WARNING, "Cached file is locked - should retry"); request->set_cache_state(CACHE_LOCKED); request->set_status(DTRStatus::CACHE_WAIT); // set a flat wait time with some randomness, fine-grained to minimise lock clashes // this may change in future eg be taken from configuration or increase over time time_t cache_wait_time = 10; time_t randomness = (rand() % cache_wait_time) - (cache_wait_time/2); cache_wait_time += randomness; // add random number of milliseconds uint32_t nano_randomness = (rand() % 1000) * 1000000; Arc::Period cache_wait_period(cache_wait_time, nano_randomness); request->get_logger()->msg(Arc::INFO, "Will wait around %is", cache_wait_time); request->set_process_time(cache_wait_period); DTR::push(request, SCHEDULER); return; } request->get_logger()->msg(Arc::ERROR, "Failed to initiate cache"); request->set_cache_state(CACHE_SKIP); request->set_error_status(DTRErrorStatus::CACHE_ERROR, DTRErrorStatus::ERROR_DESTINATION, "Failed to initiate cache"); break; } request->set_cache_file(cache.File(canonic_url)); if (is_in_cache) { // Whether cache file is outdated bool outdated = (cacheoption != "invariant"); // Check source if requested if (cacheoption == "check") { request->get_logger()->msg(Arc::INFO, "Force-checking source of cache file %s", cache.File(canonic_url)); Arc::DataStatus cres = request->get_source()->Check(true); if (!cres.Passed()) { request->get_logger()->msg(Arc::ERROR, "Source check requested but failed: %s", std::string(cres)); // Try again skipping cache, maybe this is not worth it request->set_cache_state(CACHE_SKIP); request->set_error_status(DTRErrorStatus::CACHE_ERROR, DTRErrorStatus::ERROR_DESTINATION, "Failed to check source for " + canonic_url + ": " + std::string(cres)); break; } } else { // just need to check permissions request->get_logger()->msg(Arc::INFO, "File %s is cached (%s) - checking permissions", canonic_url, cache.File(canonic_url)); // check the list of cached DNs if (cache.CheckDN(canonic_url, dn)) { outdated = false; // If DN is cached then don't check creation date } else { Arc::DataStatus cres = request->get_source()->Check(cacheoption != "invariant"); if (!cres.Passed()) { request->get_logger()->msg(Arc::ERROR, "Permission checking failed, will try downloading without using cache"); request->set_cache_state(CACHE_SKIP); request->set_error_status(DTRErrorStatus::CACHE_ERROR, DTRErrorStatus::ERROR_DESTINATION, "Failed to check cache permissions for " + canonic_url + ": " + std::string(cres)); break; } cache.AddDN(canonic_url, dn, exp_time); } } request->get_logger()->msg(Arc::INFO, "Permission checking passed"); // check if file is fresh enough if (request->get_source()->CheckModified() && cache.CheckCreated(canonic_url)) { Arc::Time sourcetime = request->get_source()->GetModified(); Arc::Time cachetime = cache.GetCreated(canonic_url); request->get_logger()->msg(Arc::VERBOSE, "Source modification date: %s", sourcetime.str()); request->get_logger()->msg(Arc::VERBOSE, "Cache creation date: %s", cachetime.str()); if (sourcetime <= cachetime) outdated = false; } if (outdated) { request->get_logger()->msg(Arc::INFO, "Cached file is outdated, will re-download"); renew = true; continue; } // cached file is present and valid request->get_logger()->msg(Arc::VERBOSE, "Cached copy is still valid"); request->set_cache_state(CACHE_ALREADY_PRESENT); } else { // file is not there but we are ready to download it request->get_logger()->msg(Arc::VERBOSE, "Will download to cache file %s", request->get_cache_file()); request->set_cache_state(CACHEABLE); } break; } request->set_status(DTRStatus::CACHE_CHECKED); DTR::push(request, SCHEDULER); } void Processor::DTRResolve(void* arg) { // call request->source.Resolve() to get replicas // call request->destination.Resolve() to check supplied replicas // call request->destination.PreRegister() to lock destination LFN ThreadArgument* targ = (ThreadArgument*)arg; DTR_ptr request = targ->dtr; delete targ; setUpLogger(request); // check for source replicas if (request->get_source()->IsIndex()) { request->get_logger()->msg(Arc::VERBOSE, "Looking up source replicas"); Arc::DataStatus res = request->get_source()->Resolve(true); if (!res.Passed() || !request->get_source()->HaveLocations() || !request->get_source()->LocationValid()) { request->get_logger()->msg(Arc::ERROR, std::string(res)); request->set_error_status(res.Retryable() ? DTRErrorStatus::TEMPORARY_REMOTE_ERROR : DTRErrorStatus::PERMANENT_REMOTE_ERROR, DTRErrorStatus::ERROR_SOURCE, "Could not resolve any source replicas for " + request->get_source()->str() + ": " + std::string(res)); request->set_status(DTRStatus::RESOLVED); DTR::push(request, SCHEDULER); return; } } // If using ACIX, remove sources on our own host if (request->get_use_acix()) { int tries = request->get_source()->GetTries(); while (request->get_source()->LocationValid()) { if (request->get_source()->CurrentLocation().Host() == Processor::hostname) { request->get_logger()->msg(Arc::INFO, "Skipping replica on local host %s", request->get_source()->CurrentLocation().str()); request->get_source()->RemoveLocation(); } else { request->get_source()->NextLocation(); } } // Check that there are still replicas to use if (!request->get_source()->HaveLocations()) { request->get_logger()->msg(Arc::ERROR, "No locations left for %s", request->get_source()->str()); request->set_error_status(DTRErrorStatus::PERMANENT_REMOTE_ERROR, DTRErrorStatus::ERROR_SOURCE, "Could not resolve any source replicas for " + request->get_source()->str()); request->set_status(DTRStatus::RESOLVED); DTR::push(request, SCHEDULER); return; } // reset retries request->get_source()->SetTries(tries); } // If overwrite is requested, the resolving and pre-registering of the // destination will be done in the pre-clean stage after deleting. if (!request->is_replication() && request->get_destination()->GetURL().Option("overwrite") == "yes") { request->set_status(DTRStatus::RESOLVED); DTR::push(request, SCHEDULER); return; } // Check replicas supplied for destination if (request->get_destination()->IsIndex()) { request->get_logger()->msg(Arc::VERBOSE, "Resolving destination replicas"); Arc::DataStatus res = request->get_destination()->Resolve(false); if (!res.Passed() || !request->get_destination()->HaveLocations() || !request->get_destination()->LocationValid()) { request->get_logger()->msg(Arc::ERROR, std::string(res)); request->set_error_status(res.Retryable() ? DTRErrorStatus::TEMPORARY_REMOTE_ERROR : DTRErrorStatus::PERMANENT_REMOTE_ERROR, DTRErrorStatus::ERROR_DESTINATION, "Could not resolve any destination replicas for " + request->get_destination()->str() + ": " + std::string(res)); request->set_status(DTRStatus::RESOLVED); DTR::push(request, SCHEDULER); return; } } // check that replication is possible if (request->is_replication()) { // we do not want to replicate to same physical file request->get_destination()->RemoveLocations(*(request->get_source())); if (!request->get_destination()->HaveLocations()) { request->get_logger()->msg(Arc::ERROR, "No locations for destination different from source found"); request->set_error_status(DTRErrorStatus::SELF_REPLICATION_ERROR, DTRErrorStatus::NO_ERROR_LOCATION, "No locations for destination different from source found for " + request->get_destination()->str()); request->set_status(DTRStatus::RESOLVED); DTR::push(request, SCHEDULER); return; } } // pre-register destination if (request->get_destination()->IsIndex()) { request->get_logger()->msg(Arc::VERBOSE, "Pre-registering destination in index service"); Arc::DataStatus res = request->get_destination()->PreRegister(request->is_replication(), request->is_force_registration()); if (!res.Passed()) { request->get_logger()->msg(Arc::ERROR, std::string(res)); request->set_error_status(res.Retryable() ? DTRErrorStatus::TEMPORARY_REMOTE_ERROR : DTRErrorStatus::PERMANENT_REMOTE_ERROR, DTRErrorStatus::ERROR_DESTINATION, "Could not pre-register destination " + request->get_destination()->str() + ": " + std::string(res)); } } // finished with resolving - send back to scheduler request->set_status(DTRStatus::RESOLVED); DTR::push(request, SCHEDULER); } void Processor::DTRBulkResolve(void* arg) { // call request->source.BulkResolve() to get replicas // NOTE only source resolution can be done in bulk BulkThreadArgument* targ = (BulkThreadArgument*)arg; std::list requests = targ->dtrs; delete targ; if (requests.empty()) return; std::list sources; for (std::list::iterator i = requests.begin(); i != requests.end(); ++i) { setUpLogger(*i); (*i)->get_logger()->msg(Arc::VERBOSE, "Resolving source replicas in bulk"); sources.push_back(&(*((*i)->get_source()))); // nasty... } // check for source replicas Arc::DataStatus res = requests.front()->get_source()->Resolve(true, sources); for (std::list::iterator i = requests.begin(); i != requests.end(); ++i) { DTR_ptr request = *i; if (!res.Passed()) { request->get_logger()->msg(Arc::ERROR, std::string(res)); request->set_error_status(res.Retryable() ? DTRErrorStatus::TEMPORARY_REMOTE_ERROR : DTRErrorStatus::PERMANENT_REMOTE_ERROR, DTRErrorStatus::ERROR_SOURCE, "Could not resolve any source replicas for " + request->get_source()->str() + ": " + std::string(res)); } else if (!request->get_source()->HaveLocations() || !request->get_source()->LocationValid()) { request->get_logger()->msg(Arc::ERROR, "No replicas found for %s", request->get_source()->str()); request->set_error_status(DTRErrorStatus::PERMANENT_REMOTE_ERROR, DTRErrorStatus::ERROR_SOURCE, "No replicas found for " + request->get_source()->str()); } // If using ACIX, remove sources on our own host if (request->get_use_acix()) { int tries = request->get_source()->GetTries(); while (request->get_source()->LocationValid()) { if (request->get_source()->CurrentLocation().Host() == Processor::hostname) { request->get_logger()->msg(Arc::INFO, "Skipping replica on local host %s", request->get_source()->CurrentLocation().str()); request->get_source()->RemoveLocation(); } else { request->get_source()->NextLocation(); } } // Check that there are still replicas to use if (!request->get_source()->HaveLocations()) { request->get_logger()->msg(Arc::ERROR, "No locations left for %s", request->get_source()->str()); request->set_error_status(DTRErrorStatus::PERMANENT_REMOTE_ERROR, DTRErrorStatus::ERROR_SOURCE, "Could not resolve any source replicas for " + request->get_source()->str()); } // reset retries request->get_source()->SetTries(tries); } request->set_status(DTRStatus::RESOLVED); DTR::push(request, SCHEDULER); } } void Processor::DTRQueryReplica(void* arg) { // check source is ok and obtain metadata ThreadArgument* targ = (ThreadArgument*)arg; DTR_ptr request = targ->dtr; delete targ; setUpLogger(request); Arc::DataStatus res; request->get_logger()->msg(Arc::INFO, "Checking %s", request->get_source()->CurrentLocation().str()); if (request->get_source()->IsIndex()) { res = request->get_source()->CompareLocationMetadata(); } else { Arc::FileInfo file; res = request->get_source()->Stat(file, Arc::DataPoint::INFO_TYPE_CONTENT); } if (res == Arc::DataStatus::InconsistentMetadataError) { request->get_logger()->msg(Arc::ERROR, "Metadata of replica and index service differ"); request->set_error_status(DTRErrorStatus::PERMANENT_REMOTE_ERROR, DTRErrorStatus::ERROR_SOURCE, "Metadata of replica and index service differ for " + request->get_source()->CurrentLocation().str() + " and " + request->get_source()->str()); } else if (!res.Passed()) { request->get_logger()->msg(Arc::ERROR, "Failed checking source replica %s: %s", request->get_source()->CurrentLocation().str(), std::string(res) ); request->set_error_status(res.Retryable() ? DTRErrorStatus::TEMPORARY_REMOTE_ERROR : DTRErrorStatus::PERMANENT_REMOTE_ERROR, DTRErrorStatus::ERROR_SOURCE, "Failed checking source replica " + request->get_source()->CurrentLocation().str() + ": " + std::string(res)); } else { // assign metadata to destination request->get_destination()->SetMeta(*request->get_source()); } // finished querying - send back to scheduler request->set_status(DTRStatus::REPLICA_QUERIED); DTR::push(request, SCHEDULER); } void Processor::DTRBulkQueryReplica(void* arg) { BulkThreadArgument* targ = (BulkThreadArgument*)arg; std::list requests = targ->dtrs; delete targ; if (requests.empty()) return; std::list sources; for (std::list::iterator i = requests.begin(); i != requests.end(); ++i) { setUpLogger(*i); (*i)->get_logger()->msg(Arc::VERBOSE, "Querying source replicas in bulk"); sources.push_back((*i)->get_source()->CurrentLocationHandle()); } // Query source std::list files; Arc::DataStatus res = sources.front()->Stat(files, sources, Arc::DataPoint::INFO_TYPE_CONTENT); std::list::const_iterator file = files.begin(); for (std::list::iterator i = requests.begin(); i != requests.end(); ++i, ++file) { DTR_ptr request = *i; if (!res.Passed() || files.size() != requests.size()) { request->get_logger()->msg(Arc::ERROR, "Failed checking source replica: %s", std::string(res)); request->set_error_status(res.Retryable() ? DTRErrorStatus::TEMPORARY_REMOTE_ERROR : DTRErrorStatus::PERMANENT_REMOTE_ERROR, DTRErrorStatus::ERROR_SOURCE, "Failed checking source replica " + request->get_source()->CurrentLocation().str() + ": " + std::string(res)); } else if (!*file) { request->get_logger()->msg(Arc::ERROR, "Failed checking source replica"); request->set_error_status(DTRErrorStatus::PERMANENT_REMOTE_ERROR, DTRErrorStatus::ERROR_SOURCE, "Failed checking source replica " + request->get_source()->CurrentLocation().str()); } else if (request->get_source()->IsIndex() && !request->get_source()->CompareMeta(*(request->get_source()->CurrentLocationHandle()))) { request->get_logger()->msg(Arc::ERROR, "Metadata of replica and index service differ"); request->set_error_status(DTRErrorStatus::PERMANENT_REMOTE_ERROR, DTRErrorStatus::ERROR_SOURCE, "Metadata of replica and index service differ for " + request->get_source()->CurrentLocation().str() + " and " + request->get_source()->str()); } else { // assign metadata to destination request->get_destination()->SetMeta(*request->get_source()); } request->set_status(DTRStatus::REPLICA_QUERIED); DTR::push(request, SCHEDULER); } } void Processor::DTRPreClean(void *arg) { // for physical files call Remove() // for index services delete entry and all existing replicas // only if the entry already exists ThreadArgument* targ = (ThreadArgument*)arg; DTR_ptr request = targ->dtr; delete targ; setUpLogger(request); Arc::DataStatus res = Arc::DataStatus::Success; if (!request->get_destination()->IsIndex()) { request->get_logger()->msg(Arc::INFO, "Removing %s", request->get_destination()->CurrentLocation().str()); res = request->get_destination()->Remove(); } else { // get existing locations Arc::DataHandle dest(request->get_destination()->GetURL(), request->get_destination()->GetUserConfig()); request->get_logger()->msg(Arc::VERBOSE, "Finding existing destination replicas"); res = dest->Resolve(true); if (!res.Passed()) { request->get_logger()->msg(Arc::ERROR, std::string(res)); } else { if (dest->HaveLocations()) { while (dest->LocationValid()) { request->get_logger()->msg(Arc::INFO, "Removing %s", dest->CurrentLocation().str()); res = dest->Remove(); if (!res.Passed()) { // if we fail to delete one replica then bail out request->get_logger()->msg(Arc::ERROR, "Failed to delete replica %s: %s", dest->CurrentLocation().str(), std::string(res)); break; } // unregister this replica from the index // not critical if this fails as will be removed in the next step dest->Unregister(false); // next replica dest->RemoveLocation(); } } if (!dest->HaveLocations()) { // all replicas were deleted successfully, now unregister the LFN request->get_logger()->msg(Arc::INFO, "Unregistering %s", dest->str()); res = dest->Unregister(true); } } // if deletion was successful resolve destination and pre-register if (!dest->HaveLocations()) { request->get_logger()->msg(Arc::VERBOSE, "Resolving destination replicas"); res = request->get_destination()->Resolve(false); if (!res.Passed()) { request->get_logger()->msg(Arc::ERROR, std::string(res)); } else { request->get_logger()->msg(Arc::VERBOSE, "Pre-registering destination"); res = request->get_destination()->PreRegister(false, request->is_force_registration()); } } } if (!res.Passed()) { request->get_logger()->msg(Arc::ERROR, "Failed to pre-clean destination: %s", std::string(res)); request->set_error_status(res.Retryable() ? DTRErrorStatus::TEMPORARY_REMOTE_ERROR : DTRErrorStatus::PERMANENT_REMOTE_ERROR, DTRErrorStatus::ERROR_DESTINATION, "Failed to pre-clean destination " + request->get_destination()->str() + ": " + std::string(res)); } request->set_status(DTRStatus::PRE_CLEANED); DTR::push(request, SCHEDULER); } void Processor::DTRStagePrepare(void* arg) { // Only valid for stageable (SRM-like) protocols. // Call request->source.PrepareReading() to get TURL for reading or query status of request // and/or request->destination.PrepareWriting() to get TURL for writing or query status of request ThreadArgument* targ = (ThreadArgument*)arg; DTR_ptr request = targ->dtr; delete targ; setUpLogger(request); // first source - if stageable and not already staged yet if (request->get_source()->IsStageable() && request->get_source()->TransferLocations().empty()) { // give default wait time for cases where no wait time is given by the remote service unsigned int source_wait_time = 10; request->get_logger()->msg(Arc::VERBOSE, "Preparing to stage source"); Arc::DataStatus res = request->get_source()->PrepareReading(0, source_wait_time); if (!res.Passed()) { request->get_logger()->msg(Arc::ERROR, std::string(res)); request->set_error_status(res.Retryable() ? DTRErrorStatus::TEMPORARY_REMOTE_ERROR : DTRErrorStatus::PERMANENT_REMOTE_ERROR, DTRErrorStatus::ERROR_SOURCE, "Failed to prepare source " + request->get_source()->CurrentLocation().str() + ": " + std::string(res)); } else if (res == Arc::DataStatus::ReadPrepareWait) { // if timeout then don't wait - scheduler will deal with it immediately if (Arc::Time() < request->get_timeout()) { if (source_wait_time > 60) source_wait_time = 60; request->set_process_time(source_wait_time); request->get_logger()->msg(Arc::VERBOSE, "Source is not ready, will wait %u seconds", source_wait_time); } request->set_status(DTRStatus::STAGING_PREPARING_WAIT); } else { if (request->get_source()->TransferLocations().empty()) { request->get_logger()->msg(Arc::ERROR, "No physical files found for source"); request->set_error_status(DTRErrorStatus::PERMANENT_REMOTE_ERROR, DTRErrorStatus::ERROR_SOURCE, "No physical files found for source " + request->get_source()->CurrentLocation().str()); } else { // TODO order physical files according to eg preferred pattern } } } if (request->error()) { request->set_status(DTRStatus::STAGED_PREPARED); DTR::push(request, SCHEDULER); return; } // now destination - if stageable and not already staged yet if (request->get_destination()->IsStageable() && request->get_destination()->TransferLocations().empty()) { // give default wait time for cases where no wait time is given by the remote service unsigned int dest_wait_time = 10; request->get_logger()->msg(Arc::VERBOSE, "Preparing to stage destination"); Arc::DataStatus res = request->get_destination()->PrepareWriting(0, dest_wait_time); if (!res.Passed()) { request->get_logger()->msg(Arc::ERROR, std::string(res)); request->set_error_status(res.Retryable() ? DTRErrorStatus::TEMPORARY_REMOTE_ERROR : DTRErrorStatus::PERMANENT_REMOTE_ERROR, DTRErrorStatus::ERROR_DESTINATION, "Failed to prepare destination " + request->get_destination()->CurrentLocation().str() + ": " + std::string(res)); } else if (res == Arc::DataStatus::WritePrepareWait) { // if timeout then don't wait - scheduler will deal with it immediately if (Arc::Time() < request->get_timeout()) { if (dest_wait_time > 60) dest_wait_time = 60; request->set_process_time(dest_wait_time); request->get_logger()->msg(Arc::VERBOSE, "Destination is not ready, will wait %u seconds", dest_wait_time); } request->set_status(DTRStatus::STAGING_PREPARING_WAIT); } else { if (request->get_destination()->TransferLocations().empty()) { request->get_logger()->msg(Arc::ERROR, "No physical files found for destination"); request->set_error_status(DTRErrorStatus::PERMANENT_REMOTE_ERROR, DTRErrorStatus::ERROR_DESTINATION, "No physical files found for destination " + request->get_destination()->CurrentLocation().str()); } else { // TODO choose best physical file } } } // set to staged prepared if we don't have to wait for source or destination if (request->get_status() != DTRStatus::STAGING_PREPARING_WAIT) request->set_status(DTRStatus::STAGED_PREPARED); DTR::push(request, SCHEDULER); } void Processor::DTRReleaseRequest(void* arg) { // only valid for stageable (SRM-like) protocols. call request->source.FinishReading() and/or // request->destination.FinishWriting() to release or abort requests ThreadArgument* targ = (ThreadArgument*)arg; DTR_ptr request = targ->dtr; delete targ; setUpLogger(request); Arc::DataStatus res; if (request->get_source()->IsStageable()) { request->get_logger()->msg(Arc::VERBOSE, "Releasing source"); res = request->get_source()->FinishReading(request->error() || request->cancel_requested()); if (!res.Passed()) { // an error here is not critical to the transfer request->get_logger()->msg(Arc::WARNING, "There was a problem during post-transfer source handling: %s", std::string(res)); } } if (request->get_destination()->IsStageable()) { request->get_logger()->msg(Arc::VERBOSE, "Releasing destination"); res = request->get_destination()->FinishWriting(request->error() || request->cancel_requested()); if (!res.Passed()) { if (request->error()) { request->get_logger()->msg(Arc::WARNING, "There was a problem during post-transfer destination handling after error: %s", std::string(res)); } else { request->get_logger()->msg(Arc::ERROR, "Error with post-transfer destination handling: %s", std::string(res)); request->set_error_status(res.Retryable() ? DTRErrorStatus::TEMPORARY_REMOTE_ERROR : DTRErrorStatus::PERMANENT_REMOTE_ERROR, DTRErrorStatus::ERROR_DESTINATION, "Error with post-transfer destination handling of " + request->get_destination()->CurrentLocation().str() + ": " + std::string(res)); } } } request->set_status(DTRStatus::REQUEST_RELEASED); DTR::push(request, SCHEDULER); } void Processor::DTRRegisterReplica(void* arg) { // call request->destination.Register() to add new replica and metadata for normal workflow // call request->destination.PreUnregister() to delete LFN placed during // RESOLVE stage for error workflow ThreadArgument* targ = (ThreadArgument*)arg; DTR_ptr request = targ->dtr; delete targ; setUpLogger(request); // TODO: If the copy completed before request was cancelled, unregistering // here will lead to dark data. Need to check for successful copy if (request->error() || request->cancel_requested()) { request->get_logger()->msg(Arc::VERBOSE, "Removing pre-registered destination in index service"); Arc::DataStatus res = request->get_destination()->PreUnregister(request->is_replication()); if (!res.Passed()) { request->get_logger()->msg(Arc::ERROR, "Failed to unregister pre-registered destination %s: %s." " You may need to unregister it manually", request->get_destination()->str(), std::string(res)); } } else { request->get_logger()->msg(Arc::VERBOSE, "Registering destination replica"); Arc::DataStatus res = request->get_destination()->PostRegister(request->is_replication()); if (!res.Passed()) { request->get_logger()->msg(Arc::ERROR, "Failed to register destination replica: %s", std::string(res)); if (!request->get_destination()->PreUnregister(request->is_replication()).Passed()) { request->get_logger()->msg(Arc::ERROR, "Failed to unregister pre-registered destination %s." " You may need to unregister it manually", request->get_destination()->str()); } request->set_error_status(res.Retryable() ? DTRErrorStatus::TEMPORARY_REMOTE_ERROR : DTRErrorStatus::PERMANENT_REMOTE_ERROR, DTRErrorStatus::ERROR_DESTINATION, "Could not post-register destination " + request->get_destination()->str() + ": " + std::string(res)); } } // finished with registration - send back to scheduler request->set_status(DTRStatus::REPLICA_REGISTERED); DTR::push(request, SCHEDULER); } void Processor::DTRProcessCache(void* arg) { // link or copy cached file to session dir, or release locks in case // of error or deciding not to use cache (for example because of a mapped link) ThreadArgument* targ = (ThreadArgument*)arg; DTR_ptr request = targ->dtr; delete targ; setUpLogger(request); Arc::FileCache cache(request->get_cache_parameters().cache_dirs, request->get_cache_parameters().drain_cache_dirs, request->get_cache_parameters().readonly_cache_dirs, request->get_parent_job_id(), request->get_local_user().get_uid(), request->get_local_user().get_gid()); if (!cache) { request->get_logger()->msg(Arc::ERROR, "Error creating cache. Stale locks may remain."); request->set_error_status(DTRErrorStatus::CACHE_ERROR, DTRErrorStatus::ERROR_DESTINATION, "Failed to create cache for " + request->get_source()->str()); request->set_status(DTRStatus::CACHE_PROCESSED); DTR::push(request, SCHEDULER); return; } std::string canonic_url(request->get_source()->GetURL().plainstr()); // add guid if present if (!request->get_source()->GetURL().MetaDataOption("guid").empty()) canonic_url += ":guid=" + request->get_source()->GetURL().MetaDataOption("guid"); // don't link if error, cancellation or cache not being used if (request->error() || request->cancel_requested() || request->get_cache_state() == CACHE_NOT_USED) { // release locks if they were acquired if (request->get_cache_state() == CACHEABLE || request->get_cache_state() == CACHE_NOT_USED) { if (request->error() || request->cancel_requested()) { cache.StopAndDelete(canonic_url); } else { cache.Stop(canonic_url); } } request->set_status(DTRStatus::CACHE_PROCESSED); DTR::push(request, SCHEDULER); return; } // check options for whether to copy or link bool executable = (request->get_source()->GetURL().Option("exec") == "yes") ? true : false; bool cache_copy = (request->get_source()->GetURL().Option("cache") == "copy") ? true : false; request->get_logger()->msg(Arc::INFO, "Linking/copying cached file to %s", request->get_destination()->CurrentLocation().Path()); bool was_downloaded = (request->get_cache_state() == CACHE_DOWNLOADED) ? true : false; if (was_downloaded) { // Add DN to cached permissions std::string dn = request->get_credential_info().getDN(); Arc::Time exp_time = request->get_credential_info().getExpiryTime(); cache.AddDN(canonic_url, dn, exp_time); } bool try_again = false; if (!cache.Link(request->get_destination()->CurrentLocation().Path(), canonic_url, cache_copy, executable, was_downloaded, try_again)) { if (try_again) { // set cache status to CACHE_LOCKED, so that the Scheduler will try again request->set_cache_state(CACHE_LOCKED); request->get_logger()->msg(Arc::WARNING, "Failed linking cache file to %s", request->get_destination()->CurrentLocation().Path()); } else { request->get_logger()->msg(Arc::ERROR, "Error linking cache file to %s.", request->get_destination()->CurrentLocation().Path()); } request->set_error_status(DTRErrorStatus::CACHE_ERROR, DTRErrorStatus::ERROR_DESTINATION, "Failed to link/copy cache file to session dir"); } if (was_downloaded) cache.Stop(canonic_url); request->set_status(DTRStatus::CACHE_PROCESSED); DTR::push(request, SCHEDULER); } /* main process method called from DTR::push() */ void Processor::receiveDTR(DTR_ptr request) { BulkThreadArgument* bulk_arg = NULL; ThreadArgument* arg = NULL; // first deal with bulk if (request->get_bulk_end()) { // end of bulk request->get_logger()->msg(Arc::VERBOSE, "Adding to bulk request"); request->set_bulk_end(false); bulk_list.push_back(request); bulk_arg = new BulkThreadArgument(this, bulk_list); bulk_list.clear(); } else if (request->get_bulk_start() || !bulk_list.empty()) { // filling bulk list request->get_logger()->msg(Arc::VERBOSE, "Adding to bulk request"); bulk_list.push_back(request); if (request->get_bulk_start()) request->set_bulk_start(false); } else { // non-bulk request arg = new ThreadArgument(this, request); } // switch through the expected DTR states switch (request->get_status().GetStatus()) { // pre-processor states case DTRStatus::CHECK_CACHE: { request->set_status(DTRStatus::CHECKING_CACHE); Arc::CreateThreadFunction(&DTRCheckCache, (void*)arg, &thread_count); }; break; case DTRStatus::RESOLVE: { request->set_status(DTRStatus::RESOLVING); if (bulk_arg) Arc::CreateThreadFunction(&DTRBulkResolve, (void*)bulk_arg, &thread_count); else if (arg) Arc::CreateThreadFunction(&DTRResolve, (void*)arg, &thread_count); }; break; case DTRStatus::QUERY_REPLICA: { request->set_status(DTRStatus::QUERYING_REPLICA); if (bulk_arg) Arc::CreateThreadFunction(&DTRBulkQueryReplica, (void*)bulk_arg, &thread_count); else if (arg) Arc::CreateThreadFunction(&DTRQueryReplica, (void*)arg, &thread_count); }; break; case DTRStatus::PRE_CLEAN: { request->set_status(DTRStatus::PRE_CLEANING); Arc::CreateThreadFunction(&DTRPreClean, (void*)arg, &thread_count); }; break; case DTRStatus::STAGE_PREPARE: { request->set_status(DTRStatus::STAGING_PREPARING); Arc::CreateThreadFunction(&DTRStagePrepare, (void*)arg, &thread_count); }; break; // post-processor states case DTRStatus::RELEASE_REQUEST: { request->set_status(DTRStatus::RELEASING_REQUEST); Arc::CreateThreadFunction(&DTRReleaseRequest, (void*)arg, &thread_count); }; break; case DTRStatus::REGISTER_REPLICA: { request->set_status(DTRStatus::REGISTERING_REPLICA); Arc::CreateThreadFunction(&DTRRegisterReplica, (void*)arg, &thread_count); }; break; case DTRStatus::PROCESS_CACHE: { request->set_status(DTRStatus::PROCESSING_CACHE); Arc::CreateThreadFunction(&DTRProcessCache, (void*)arg, &thread_count); }; break; default: { // unexpected state - report error request->set_error_status(DTRErrorStatus::INTERNAL_LOGIC_ERROR, DTRErrorStatus::ERROR_UNKNOWN, "Received a DTR in an unexpected state ("+request->get_status().str()+") in processor"); DTR::push(request, SCHEDULER); if (arg) delete arg; if (bulk_arg) delete bulk_arg; }; break; } } void Processor::start(void) { } void Processor::stop(void) { // threads are short lived so wait for them to complete rather than interrupting thread_count.wait(60*1000); } } // namespace DataStaging nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/DTRStatus.h0000644000000000000000000000013214152153376023435 xustar000000000000000030 mtime=1638455038.403645944 30 atime=1638455038.499647387 30 ctime=1638455098.845554112 nordugrid-arc-6.14.0/src/libs/data-staging/DTRStatus.h0000644000175000002070000002164614152153376023433 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifndef __ARC_DTRSTATUS_H__ #define __ARC_DTRSTATUS_H__ #include #include namespace DataStaging { /// Class representing the status of a DTR. /** * \ingroup datastaging * \headerfile DTRStatus.h arc/data-staging/DTRStatus.h */ class DTRStatus { public: /// Possible state values enum DTRStatusType { // ORDER IS IMPORTANT!! /// Just created NEW, /// Check the cache for the file may be already there CHECK_CACHE, /// Checking the cache CHECKING_CACHE, /// Cache file is locked, waiting for its release CACHE_WAIT, /// Cache check completed CACHE_CHECKED, /// Resolve a meta-protocol RESOLVE, /// Resolving replicas RESOLVING, /// Replica resolution completed RESOLVED, /// Query a replica QUERY_REPLICA, /// Replica is being queried QUERYING_REPLICA, /// Replica was queried REPLICA_QUERIED, /// The destination should be deleted PRE_CLEAN, /// Deleting the destination PRE_CLEANING, /// The destination file has been deleted PRE_CLEANED, /// Prepare or stage the source and/or destination STAGE_PREPARE, /// Making a staging or preparing request STAGING_PREPARING, /// Wait for the status of the staging/preparing request STAGING_PREPARING_WAIT, /// Staging/preparing request completed STAGED_PREPARED, /// Transfer ready and can be started TRANSFER, /// Transfer is going TRANSFERRING, /// Transfer is on-going but scheduled for cancellation TRANSFERRING_CANCEL, /// Transfer completed TRANSFERRED, /// Transfer finished, release requests on the storage RELEASE_REQUEST, /// Releasing staging/preparing request RELEASING_REQUEST, /// Release of staging/preparing request completed REQUEST_RELEASED, /// Register a new replica of the destination REGISTER_REPLICA, /// Registering a replica in an index service REGISTERING_REPLICA, /// Replica registration completed REPLICA_REGISTERED, /// Destination is cacheable, process cache PROCESS_CACHE, /// Releasing locks and copying/linking cache files to the session dir PROCESSING_CACHE, /// Cache processing completed CACHE_PROCESSED, /// Everything completed successfully DONE, /// Cancellation request fulfilled successfully CANCELLED, /// Cancellation request fulfilled but DTR also completed transfer successfully CANCELLED_FINISHED, /// Error occured ERROR, /// "Stateless" DTR NULL_STATE }; /// Make new DTRStatus with given status and optional description. DTRStatus(const DTRStatusType& status, std::string desc="") : status(status), desc(desc) {} /// Make new DTRStatus with default NEW status DTRStatus() : status(NEW), desc ("") {} /// Returns true if this status is the same as the given DTRStatusType bool operator==(const DTRStatusType& s) const { return status == s; } /// Returns true if this status is the same as the given DTRStatus bool operator==(const DTRStatus& s) const { return status == s.status; } /// Returns true if this status is not the same as the given DTRStatusType bool operator!=(const DTRStatusType& s) const { return status != s; } /// Returns true if this status is not the same as the given DTRStatus bool operator!=(const DTRStatus& s) const { return status != s.status; } /// Make a new DTRStatus with the same status as the given DTRStatusType DTRStatus& operator=(const DTRStatusType& s) { status = s; return *this; } /// Returns a string representation of the current state std::string str() const; /// Set the detailed description of the current state void SetDesc(const std::string& d) { desc = d; } /// Get the detailed description of the current state std::string GetDesc() const { return desc; } /// Get the DTRStatusType of the current state DTRStatusType GetStatus() const { return status; } // The actions in the following two vectors must match /// Vector of states with a to be processed action, eg CHECK_CACHE static const std::vector ToProcessStates; /// Vector of states with a processing action, eg CHECKING_CACHE static const std::vector ProcessingStates; /// Vector of states where a DTR is staged - used to limit the number of staged files static const std::vector StagedStates; private: /// status code DTRStatusType status; /// description set by the owner process std::string desc; }; // DTRStatus /// A class to represent error states reported by various components. /** * \ingroup datastaging * \headerfile DTRStatus.h arc/data-staging/DTRStatus.h */ class DTRErrorStatus { public: /// A list of error types enum DTRErrorStatusType { /// No error NONE_ERROR, /// Internal error in Data Staging logic INTERNAL_LOGIC_ERROR, /// Internal processing error, like losing contact with external process INTERNAL_PROCESS_ERROR, /// Attempt to replicate a file to itself SELF_REPLICATION_ERROR, /// Permanent error with cache CACHE_ERROR, /// Temporary error with remote service TEMPORARY_REMOTE_ERROR, /// Permanent error with remote service PERMANENT_REMOTE_ERROR, /// Error with local file LOCAL_FILE_ERROR, /// Transfer rate was too slow TRANSFER_SPEED_ERROR, /// Waited for too long to become staging STAGING_TIMEOUT_ERROR }; /// Describes where the error occurred enum DTRErrorLocation { /// No error NO_ERROR_LOCATION, /// Error with source ERROR_SOURCE, /// Error with destination ERROR_DESTINATION, /// Error during transfer not directly related to source or destination ERROR_TRANSFER, /// Error occurred in an unknown location ERROR_UNKNOWN }; /// Create a new DTRErrorStatus with given error states /** * @param status Type of error * @param error_state DTR state in which the error occurred * @param location Location of error (at source, destination or during transfer) * @param desc Text description of error */ DTRErrorStatus(DTRErrorStatusType status, DTRStatus::DTRStatusType error_state, DTRErrorLocation location, const std::string& desc = ""): error_status(status), last_error_state(error_state), error_location(location), desc(desc) {}; /// Create a new DTRErrorStatus with default none/null error states DTRErrorStatus() : error_status(NONE_ERROR), last_error_state(DTRStatus::NULL_STATE), error_location(NO_ERROR_LOCATION), desc("") {}; /// Returns the error type DTRErrorStatusType GetErrorStatus() const { return error_status; } /// Returns the state in which the error occurred DTRStatus::DTRStatusType GetLastErrorState() const { return last_error_state.GetStatus(); } /// Returns the location at which the error occurred DTRErrorLocation GetErrorLocation() const { return error_location; } /// Returns the error description std::string GetDesc() const { return desc; } /// Returns true if this error status is the same as the given DTRErrorStatusType bool operator==(const DTRErrorStatusType& s) const { return error_status == s; } /// Returns true if this error status is the same as the given DTRErrorStatus bool operator==(const DTRErrorStatus& s) const { return error_status == s.error_status; } /// Returns true if this error status is not the same as the given DTRErrorStatusType bool operator!=(const DTRErrorStatusType& s) const { return error_status != s; } /// Returns true if this error status is not the same as the given DTRErrorStatus bool operator!=(const DTRErrorStatus& s) const { return error_status != s.error_status; } /// Make a new DTRErrorStatus with the same error status as the given DTRErrorStatusType DTRErrorStatus& operator=(const DTRErrorStatusType& s) { error_status = s; return *this; } private: /// error state DTRErrorStatusType error_status; /// state that error occurred in DTRStatus last_error_state; /// place where the error occurred DTRErrorLocation error_location; /// description of error std::string desc; }; } // namespace DataStaging #endif /*__ARC_DTRSTATUS_H_*/ nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/DataDeliveryComm.cpp0000644000000000000000000000013214152153376025324 xustar000000000000000030 mtime=1638455038.403645944 30 atime=1638455038.499647387 30 ctime=1638455098.851554202 nordugrid-arc-6.14.0/src/libs/data-staging/DataDeliveryComm.cpp0000644000175000002070000000571014152153376025314 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include "DataDeliveryComm.h" #include "DataDeliveryRemoteComm.h" #include "DataDeliveryLocalComm.h" namespace DataStaging { DataDeliveryComm* DataDeliveryComm::CreateInstance(DTR_ptr dtr, const TransferParameters& params) { if (!dtr->get_delivery_endpoint() || dtr->get_delivery_endpoint() == DTR::LOCAL_DELIVERY) return new DataDeliveryLocalComm(dtr, params); return new DataDeliveryRemoteComm(dtr, params); } DataDeliveryComm::DataDeliveryComm(DTR_ptr dtr, const TransferParameters& params) : status_pos_(0),transfer_params(params),logger_(dtr->get_logger()) { handler_= DataDeliveryCommHandler::getInstance(); } DataDeliveryComm::Status DataDeliveryComm::GetStatus(void) const { Glib::Mutex::Lock lock(*(const_cast(&lock_))); DataDeliveryComm::Status tmp = status_; return tmp; } bool DataDeliveryComm::CheckComm(DTR_ptr dtr, std::vector& allowed_dirs, std::string& load_avg) { if (!dtr->get_delivery_endpoint() || dtr->get_delivery_endpoint() == DTR::LOCAL_DELIVERY) return DataDeliveryLocalComm::CheckComm(dtr, allowed_dirs, load_avg); return DataDeliveryRemoteComm::CheckComm(dtr, allowed_dirs, load_avg); } DataDeliveryCommHandler::DataDeliveryCommHandler(void) { Glib::Mutex::Lock lock(lock_); Arc::CreateThreadFunction(&func,this); } void DataDeliveryCommHandler::Add(DataDeliveryComm* item) { Glib::Mutex::Lock lock(lock_); items_.push_back(item); } void DataDeliveryCommHandler::Remove(DataDeliveryComm* item) { Glib::Mutex::Lock lock(lock_); for(std::list::iterator i = items_.begin(); i!=items_.end();) { if(*i == item) { i=items_.erase(i); } else { ++i; } } } DataDeliveryCommHandler* DataDeliveryCommHandler::comm_handler = NULL; DataDeliveryCommHandler* DataDeliveryCommHandler::getInstance() { if(comm_handler) return comm_handler; return (comm_handler = new DataDeliveryCommHandler); } // This is a dedicated thread which periodically checks for // new state reported by comm instances and modifies states accordingly void DataDeliveryCommHandler::func(void* arg) { if(!arg) return; // disconnect from root logger since messages are logged to per-DTR Logger Arc::Logger::getRootLogger().setThreadContext(); Arc::Logger::getRootLogger().removeDestinations(); // We do not need extremely low latency, so this // thread simply polls for data 2 times per second. DataDeliveryCommHandler& it = *(DataDeliveryCommHandler*)arg; for(;;) { { Glib::Mutex::Lock lock(it.lock_); for(std::list::iterator i = it.items_.begin(); i != it.items_.end();++i) { DataDeliveryComm* comm = *i; if(comm) comm->PullStatus(); } } Glib::usleep(500000); } } } // namespace DataStaging nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/DataDeliveryRemoteComm.h0000644000000000000000000000013214152153376026145 xustar000000000000000030 mtime=1638455038.403645944 30 atime=1638455038.499647387 30 ctime=1638455098.842554066 nordugrid-arc-6.14.0/src/libs/data-staging/DataDeliveryRemoteComm.h0000644000175000002070000000472314152153376026140 0ustar00mockbuildmock00000000000000#ifndef DATADELIVERYREMOTECOMM_H_ #define DATADELIVERYREMOTECOMM_H_ #include #include #include #include "DataDeliveryComm.h" namespace DataStaging { /// This class contacts a remote service to make a Delivery request. /** * \ingroup datastaging * \headerfile DataDeliveryRemoteComm.h arc/data-staging/DataDeliveryRemoteComm.h */ class DataDeliveryRemoteComm : public DataDeliveryComm { public: /// Send the transfer request to the remote service. DataDeliveryRemoteComm(DTR_ptr dtr, const TransferParameters& params); /// If transfer is still ongoing, sends a cancellation message to the service. virtual ~DataDeliveryRemoteComm(); /// Read status from service virtual void PullStatus(); /// Pings service to find allowed dirs static bool CheckComm(DTR_ptr dtr, std::vector& allowed_dirs, std::string& load_avg); /// Returns true if service is still processing request virtual operator bool() const { return valid; }; /// Returns true if service is not processing request or down virtual bool operator!() const { return !valid; }; private: /// Connection to service Arc::ClientSOAP* client; /// Full DTR ID std::string dtr_full_id; /// Retries allowed after failing to query transfer status, so that a /// transfer is not lost due to temporary communication problem. If a /// transfer fails to start it is handled by the normal DTR retries. int query_retries; /// MCC configuration for connecting to service Arc::MCCConfig cfg; /// Endpoint of remote delivery service Arc::URL endpoint; /// Connection timeout int timeout; /// Flag to say whether transfer is running and service is still up bool valid; /// Logger object (main log, not DTR's log) static Arc::Logger logger; /// Cancel a DTR, by sending a cancel request to the service void CancelDTR(); /// Fill Status object with data in node. If empty fields are initialised /// to default values. void FillStatus(const Arc::XMLNode& node = Arc::XMLNode()); /// Set up delegation so the credentials can be used by the service bool SetupDelegation(Arc::XMLNode& op, const Arc::UserConfig& usercfg); /// Handle a fault during query of service. Attempts to reconnect void HandleQueryFault(const std::string& err=""); }; } // namespace DataStaging #endif /* DATADELIVERYREMOTECOMM_H_ */ nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/DataStagingDelivery.cpp0000644000000000000000000000013214152153376026025 xustar000000000000000030 mtime=1638455038.403645944 30 atime=1638455038.499647387 30 ctime=1638455098.858554307 nordugrid-arc-6.14.0/src/libs/data-staging/DataStagingDelivery.cpp0000644000175000002070000005221214152153376026014 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include "DataDeliveryComm.h" using namespace Arc; static Arc::Logger logger(Arc::Logger::getRootLogger(), "DataDelivery"); static bool delivery_shutdown = false; static Arc::Time start_time; static void sig_shutdown(int) { if(delivery_shutdown) _exit(0); delivery_shutdown = true; } static void ReportStatus(DataStaging::DTRStatus::DTRStatusType st, DataStaging::DTRErrorStatus::DTRErrorStatusType err, DataStaging::DTRErrorStatus::DTRErrorLocation err_loc, const std::string& err_desc, unsigned long long int transferred, unsigned long long int size, Arc::Time transfer_start_time, const std::string& checksum = "") { static DataStaging::DataDeliveryComm::Status status; static unsigned int status_pos = 0; static bool status_changed = true; unsigned long long int transfer_time = 0; if (transfer_start_time != Arc::Time(0)) { Arc::Period p = Arc::Time() - transfer_start_time; transfer_time = p.GetPeriod() * 1000000000 + p.GetPeriodNanoseconds(); } // Filling status.commstatus = DataStaging::DataDeliveryComm::CommNoError; status.timestamp = ::time(NULL); status.status = st; status.error = err; status.error_location = err_loc; strncpy(status.error_desc,err_desc.c_str(),sizeof(status.error_desc)); status.streams = 0; status.transferred = transferred; status.size = size; status.transfer_time = transfer_time; status.offset = 0; status.speed = 0; strncpy(status.checksum, checksum.c_str(), sizeof(status.checksum)); if(status_pos == 0) { status_changed=true; }; if(status_changed) { for(;;) { ssize_t l = ::write(STDOUT_FILENO,((char*)&status)+status_pos,sizeof(status)-status_pos); if(l == -1) { // error, parent exited? break; } else if(l == 0) { // will happen if stdout is non-blocking break; } else { status_pos+=l; }; if(status_pos >= sizeof(status)) { status_pos=0; status_changed=false; break; }; }; }; } static unsigned long long int transfer_bytes = 0; static void ReportOngoingStatus(unsigned long long int bytes) { transfer_bytes = bytes; // Send report on stdout ReportStatus(DataStaging::DTRStatus::TRANSFERRING, DataStaging::DTRErrorStatus::NONE_ERROR, DataStaging::DTRErrorStatus::NO_ERROR_LOCATION, "", bytes, 0, 0); // Log progress in log time_t t = Arc::Period(Arc::Time() - start_time).GetPeriod(); logger.msg(INFO, "%5u s: %10.1f kB %8.1f kB/s", (unsigned int)t, ((double)bytes) / 1024, (t == 0) ? 0 : ((double)bytes) / 1024 / t); } static unsigned long long int GetFileSize(const DataPoint& source, const DataPoint& dest) { if(source.CheckSize()) return source.GetSize(); if(dest.CheckSize()) return dest.GetSize(); return 0; } int main(int argc,char* argv[]) { // log to stderr Arc::Logger::getRootLogger().setThreshold(Arc::VERBOSE); //TODO: configurable Arc::LogStream logcerr(std::cerr); logcerr.setFormat(Arc::EmptyFormat); Arc::Logger::getRootLogger().addDestination(logcerr); // Collecting parameters // --surl: source URL // --durl: destination URL // --sopt: any URL option, credential - path to file storing credentials // --dopt: any URL option, credential - path to file storing credentials // --topt: minspeed, minspeedtime, minavgspeed, maxinacttime, avgtime // --size: total size of data to be transferred // --cstype: checksum type to calculate // --csvalue: checksum value of source file to validate against // surl, durl, cstype and csvalue may be given only once // sopt, dopt, topt may be given multiple times // type of credentials is detected automatically, so far only // X.509 proxies or key+certificate are accepted std::string source_str; std::string dest_str; std::list source_opts; std::list dest_opts; std::list transfer_opts; std::string size; std::string checksum_type; std::string checksum_value; std::string source_cred_path; std::string dest_cred_path; std::string source_ca_path; std::string dest_ca_path; OptionParser opt; opt.AddOption(0,"surl","","source URL",source_str); opt.AddOption(0,"durl","","destination URL",dest_str); opt.AddOption(0,"sopt","","source options",source_opts); opt.AddOption(0,"dopt","","destination options",dest_opts); opt.AddOption(0,"topt","","transfer options",transfer_opts); opt.AddOption(0,"size","","total size",size); opt.AddOption(0,"cstype","","checksum type",checksum_type); opt.AddOption(0,"csvalue","","checksum value",checksum_value); if(opt.Parse(argc,argv).size() != 0) { logger.msg(ERROR, "Unexpected arguments"); return -1; }; if(source_str.empty()) { logger.msg(ERROR, "Source URL missing"); return -1; }; if(dest_str.empty()) { logger.msg(ERROR, "Destination URL missing"); return -1; }; URL source_url(source_str); if(!source_url) { logger.msg(ERROR, "Source URL not valid: %s", source_str); return -1; }; URL dest_url(dest_str); if(!dest_url) { logger.msg(ERROR, "Destination URL not valid: %s", dest_str); return -1; }; for(std::list::iterator o = source_opts.begin(); o != source_opts.end();++o) { std::string::size_type p = o->find('='); if(p == std::string::npos) { source_url.AddOption(*o); } else { std::string name = o->substr(0,p); if(name == "credential") { source_cred_path = o->substr(p+1); } else if(name == "ca") { source_ca_path = o->substr(p+1); } else { source_url.AddOption(*o); }; }; }; for(std::list::iterator o = dest_opts.begin(); o != dest_opts.end();++o) { std::string::size_type p = o->find('='); if(p == std::string::npos) { dest_url.AddOption(*o); } else { std::string name = o->substr(0,p); if(name == "credential") { dest_cred_path = o->substr(p+1); } else if(name == "ca") { dest_ca_path = o->substr(p+1); } else { dest_url.AddOption(*o); }; }; }; DataBuffer buffer; buffer.speed.verbose(true); unsigned long long int minspeed = 0; time_t minspeedtime = 0; for(std::list::iterator o = transfer_opts.begin(); o != transfer_opts.end();++o) { std::string::size_type p = o->find('='); if(p != std::string::npos) { std::string name = o->substr(0,p); unsigned long long int value; if(stringto(o->substr(p+1),value)) { if(name == "minspeed") { minspeed=value; } else if(name == "minspeedtime") { minspeedtime=value; } else if(name == "minavgspeed") { buffer.speed.set_min_average_speed(value); } else if(name == "maxinacttime") { buffer.speed.set_max_inactivity_time(value); } else if(name == "avgtime") { buffer.speed.set_base(value); } else { logger.msg(ERROR, "Unknown transfer option: %s", name); _exit(-1); } }; }; } buffer.speed.set_min_speed(minspeed,minspeedtime); // Checksum objects must be destroyed after DataHandles CheckSumAny crc; CheckSumAny crc_source; CheckSumAny crc_dest; // Read credential from stdin if available std::string proxy_cred; std::getline(std::cin, proxy_cred, '\0'); initializeCredentialsType source_cred(initializeCredentialsType::SkipCredentials); UserConfig source_cfg(source_cred); if(!source_cred_path.empty()) source_cfg.ProxyPath(source_cred_path); else if (!proxy_cred.empty()) source_cfg.CredentialString(proxy_cred); if(!source_ca_path.empty()) source_cfg.CACertificatesDirectory(source_ca_path); //source_cfg.UtilsDirPath(...); - probably not needed DataHandle source(source_url, source_cfg); if(!source) { logger.msg(ERROR, "Source URL not supported: %s", source_url.str()); _exit(-1); //return -1; }; if (source->RequiresCredentialsInFile() && source_cred_path.empty()) { logger.msg(ERROR, "No credentials supplied"); _exit(-1); } source->SetSecure(false); source->Passive(true); initializeCredentialsType dest_cred(initializeCredentialsType::SkipCredentials); UserConfig dest_cfg(dest_cred); if(!dest_cred_path.empty()) dest_cfg.ProxyPath(dest_cred_path); else if (!proxy_cred.empty()) dest_cfg.CredentialString(proxy_cred); if(!dest_ca_path.empty()) dest_cfg.CACertificatesDirectory(dest_ca_path); //dest_cfg.UtilsDirPath(...); - probably not needed DataHandle dest(dest_url,dest_cfg); if(!dest) { logger.msg(ERROR, "Destination URL not supported: %s", dest_url.str()); _exit(-1); //return -1; }; if (dest->RequiresCredentialsInFile() && dest_cred_path.empty()) { logger.msg(ERROR, "No credentials supplied"); _exit(-1); } dest->SetSecure(false); dest->Passive(true); // set X509* for 3rd party tools which need it (eg GFAL) if (!source_cfg.ProxyPath().empty()) { SetEnv("X509_USER_PROXY", source_cfg.ProxyPath()); if (!source_cfg.CACertificatesDirectory().empty()) SetEnv("X509_CERT_DIR", source_cfg.CACertificatesDirectory()); // those tools also use hostcert by default if the user is root... if (getuid() == 0) { SetEnv("X509_USER_CERT", source_cfg.ProxyPath()); SetEnv("X509_USER_KEY", source_cfg.ProxyPath()); } } // set signal handlers signal(SIGTERM, sig_shutdown); signal(SIGINT, sig_shutdown); // Filling initial report buffer ReportStatus(DataStaging::DTRStatus::NULL_STATE, DataStaging::DTRErrorStatus::NONE_ERROR, DataStaging::DTRErrorStatus::NO_ERROR_LOCATION, "",0,0,0,""); // if checksum type is supplied, use that type, otherwise use default for the // destination (if checksum is supported by the destination protocol) std::string crc_type(""); if (!checksum_type.empty()) { crc_type = checksum_type; if (!checksum_value.empty()) source->SetCheckSum(checksum_type+':'+checksum_value); } else if (dest->AcceptsMeta() || dest->ProvidesMeta()) { crc_type = dest->DefaultCheckSum(); } if (!crc_type.empty()) { crc = crc_type.c_str(); crc_source = crc_type.c_str(); crc_dest = crc_type.c_str(); if (crc.Type() != CheckSumAny::none) logger.msg(INFO, "Will calculate %s checksum", crc_type); source->AddCheckSumObject(&crc_source); dest->AddCheckSumObject(&crc_dest); } buffer.set(&crc); if (!size.empty()) { unsigned long long int total_size; if (stringto(size, total_size)) { dest->SetSize(total_size); } else { logger.msg(WARNING, "Cannot use supplied --size option"); } } bool reported = false; bool eof_reached = false; // checksum validation against supplied value std::string calc_csum; // These will stay positive if corresponding transfer type is not used DataStatus source_st; DataStatus dest_st; DataStatus transfer_st; // Check if datapoint handles transfer by itself bool try_another_transfer = true; if (try_another_transfer) { if (source->SupportsTransfer()) { logger.msg(INFO, "Using internal transfer method of %s", source->str()); transfer_st = source->Transfer(dest->GetURL(), true, ReportOngoingStatus); if (transfer_st.Passed()) { try_another_transfer = false; eof_reached = true; // so that full copy is reported back to scheduler buffer.speed.verbose(false); unsigned long long bytes = GetFileSize(*source, *dest); if(bytes < transfer_bytes) bytes = transfer_bytes; buffer.speed.transfer(bytes); } else { if (transfer_st != DataStatus::UnimplementedError) { if (dest->Local()) dest->Remove(); // to allow retries try_another_transfer = false; } else { logger.msg(INFO, "Internal transfer method is not supported for %s", source->str()); } } } } if (try_another_transfer) { if (dest->SupportsTransfer()) { logger.msg(INFO, "Using internal transfer method of %s", dest->str()); transfer_st = dest->Transfer(source->GetURL(), false, ReportOngoingStatus); if (transfer_st.Passed()) { try_another_transfer = false; eof_reached = true; // so that full copy is reported back to scheduler buffer.speed.verbose(false); unsigned long long bytes = GetFileSize(*source, *dest); if(bytes < transfer_bytes) bytes = transfer_bytes; buffer.speed.transfer(bytes); } else { if (transfer_st != DataStatus::UnimplementedError) { try_another_transfer = false; } else { logger.msg(INFO, "Internal transfer method is not supported for %s", dest->str()); } } } } if (try_another_transfer) { // Initiating transfer source_st = source->StartReading(buffer); if(!source_st) { ReportStatus(DataStaging::DTRStatus::TRANSFERRED, (source_url.Protocol()!="file") ? (source_st.Retryable() ? DataStaging::DTRErrorStatus::TEMPORARY_REMOTE_ERROR : DataStaging::DTRErrorStatus::PERMANENT_REMOTE_ERROR) : DataStaging::DTRErrorStatus::LOCAL_FILE_ERROR, DataStaging::DTRErrorStatus::ERROR_SOURCE, std::string("Failed reading from source: ")+source->CurrentLocation().str()+ " : "+std::string(source_st), 0,0,0); _exit(-1); //return -1; }; dest_st = dest->StartWriting(buffer); if(!dest_st) { ReportStatus(DataStaging::DTRStatus::TRANSFERRED, (dest_url.Protocol() != "file") ? (dest_st.Retryable() ? DataStaging::DTRErrorStatus::TEMPORARY_REMOTE_ERROR : DataStaging::DTRErrorStatus::PERMANENT_REMOTE_ERROR) : DataStaging::DTRErrorStatus::LOCAL_FILE_ERROR, DataStaging::DTRErrorStatus::ERROR_DESTINATION, std::string("Failed writing to destination: ")+dest->CurrentLocation().str()+ " : "+std::string(dest_st), 0,0,0); _exit(-1); //return -1; } // While transfer is running in another threads // here we periodically report status to parent for(;!buffer.error() && !delivery_shutdown;) { if(buffer.eof_read() && buffer.eof_write()) { eof_reached = true; break; }; ReportStatus(DataStaging::DTRStatus::TRANSFERRING, DataStaging::DTRErrorStatus::NONE_ERROR, DataStaging::DTRErrorStatus::NO_ERROR_LOCATION, "", buffer.speed.transferred_size(), GetFileSize(*source,*dest),0); buffer.wait_any(); }; dest_st = dest->StopWriting(); source_st = source->StopReading(); } if (delivery_shutdown) { ReportStatus(DataStaging::DTRStatus::TRANSFERRED, DataStaging::DTRErrorStatus::INTERNAL_PROCESS_ERROR, DataStaging::DTRErrorStatus::ERROR_TRANSFER, "DataStagingProcess process killed", buffer.speed.transferred_size(), GetFileSize(*source,*dest),0); dest->StopWriting(); _exit(-1); } ReportStatus(DataStaging::DTRStatus::TRANSFERRING, DataStaging::DTRErrorStatus::NONE_ERROR, DataStaging::DTRErrorStatus::NO_ERROR_LOCATION, "", buffer.speed.transferred_size(), GetFileSize(*source,*dest),0); // These will return false if buffer was not used bool source_failed = buffer.error_read(); bool dest_failed = buffer.error_write(); // Error at source or destination if(source_failed || !source_st) { std::string err("Failed reading from source: "+source->CurrentLocation().str()); // If error reported in read callback, use that instead if (source->GetFailureReason() != DataStatus::UnknownError) source_st = source->GetFailureReason(); if (!source_st) err += " : " + std::string(source_st); ReportStatus(DataStaging::DTRStatus::TRANSFERRED, (source_url.Protocol() != "file") ? (((!source_st && source_st.Retryable()) || buffer.speed.transferred_size() > 0) ? DataStaging::DTRErrorStatus::TEMPORARY_REMOTE_ERROR : DataStaging::DTRErrorStatus::PERMANENT_REMOTE_ERROR) : DataStaging::DTRErrorStatus::LOCAL_FILE_ERROR, DataStaging::DTRErrorStatus::ERROR_SOURCE, err, buffer.speed.transferred_size(), GetFileSize(*source,*dest), start_time); reported = true; }; if(dest_failed || !dest_st) { std::string err("Failed writing to destination: "+dest->CurrentLocation().str()); // If error reported in write callback, use that instead if (dest->GetFailureReason() != DataStatus::UnknownError) dest_st = dest->GetFailureReason(); if (!dest_st) err += " : " + std::string(dest_st); ReportStatus(DataStaging::DTRStatus::TRANSFERRED, (dest_url.Protocol() != "file") ? (((!dest_st && dest_st.Retryable()) || buffer.speed.transferred_size() > 0) ? DataStaging::DTRErrorStatus::TEMPORARY_REMOTE_ERROR : DataStaging::DTRErrorStatus::PERMANENT_REMOTE_ERROR) : DataStaging::DTRErrorStatus::LOCAL_FILE_ERROR, DataStaging::DTRErrorStatus::ERROR_DESTINATION, err, buffer.speed.transferred_size(), GetFileSize(*source,*dest), start_time); reported = true; }; if (!transfer_st) { // Usually it's not possible to know at which end the transfer failed ReportStatus(DataStaging::DTRStatus::TRANSFERRED, DataStaging::DTRErrorStatus::PERMANENT_REMOTE_ERROR, DataStaging::DTRErrorStatus::ERROR_UNKNOWN, transfer_st.GetDesc(), 0, GetFileSize(*source,*dest), start_time); reported = true; } // Transfer error, usually timeout if(!eof_reached) { if((!dest_failed) && (!source_failed)) { ReportStatus(DataStaging::DTRStatus::TRANSFERRED, DataStaging::DTRErrorStatus::TRANSFER_SPEED_ERROR, DataStaging::DTRErrorStatus::ERROR_UNKNOWN, "Transfer timed out", buffer.speed.transferred_size(), GetFileSize(*source,*dest), start_time); reported = true; }; }; if (crc && buffer.checksum_valid()) { char buf[100]; crc.print(buf,100); calc_csum = buf; } else if(crc_source) { char buf[100]; crc_source.print(buf,100); calc_csum = buf; } else if(crc_dest) { char buf[100]; crc_dest.print(buf,100); calc_csum = buf; } if (!reported && !calc_csum.empty() && crc.Type() != CheckSumAny::none) { // compare calculated to any checksum given as an option if (source->CheckCheckSum()) { // Check the checksum types match. Some buggy GridFTP servers return a // different checksum type than requested so also check that the checksum // length matches before comparing. if (calc_csum.substr(0, calc_csum.find(":")) != checksum_type || calc_csum.substr(calc_csum.find(":")+1).length() != checksum_value.length()) { logger.msg(INFO, "Checksum type of source and calculated checksum differ, cannot compare"); } else if (calc_csum.substr(calc_csum.find(":")+1) != Arc::lower(checksum_value)) { logger.msg(ERROR, "Checksum mismatch between calculated checksum %s and source checksum %s", calc_csum, source->GetCheckSum()); ReportStatus(DataStaging::DTRStatus::TRANSFERRED, DataStaging::DTRErrorStatus::TRANSFER_SPEED_ERROR, DataStaging::DTRErrorStatus::ERROR_UNKNOWN, "Checksum mismatch", 0,0,start_time); reported = true; eof_reached = false; // TODO general error flag is better than this // Delete destination if (!dest->Remove().Passed()) { logger.msg(WARNING, "Failed cleaning up destination %s", dest->GetURL().str()); } } else logger.msg(INFO, "Calculated transfer checksum %s matches source checksum", calc_csum); } } else { logger.msg(VERBOSE, "Checksum not computed"); } if(!reported) { ReportStatus(DataStaging::DTRStatus::TRANSFERRED, DataStaging::DTRErrorStatus::NONE_ERROR, DataStaging::DTRErrorStatus::NO_ERROR_LOCATION, "", buffer.speed.transferred_size(), GetFileSize(*source,*dest), start_time, calc_csum); }; _exit(eof_reached?0:1); //return eof_reached?0:1; } nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/DataDeliveryLocalComm.cpp0000644000000000000000000000013214152153376026277 xustar000000000000000030 mtime=1638455038.403645944 30 atime=1638455038.499647387 30 ctime=1638455098.851554202 nordugrid-arc-6.14.0/src/libs/data-staging/DataDeliveryLocalComm.cpp0000644000175000002070000002360014152153376026265 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include "DataDeliveryLocalComm.h" namespace DataStaging { // Check if needed and create copy of proxy with suitable ownership static std::string prepare_proxy(const std::string& proxy_path, int child_uid, int child_gid) { if (proxy_path.empty()) return ""; // No credentials int my_uid = (int)::getuid(); if (my_uid != 0) return ""; // Can't switch user id if (child_uid == 0) return ""; // Not switching if (child_uid == my_uid) return ""; // Not switching // Check ownership of credentials. struct ::stat st; if(!Arc::FileStat(proxy_path,&st,true)) return ""; // Can't stat - won't read if(st.st_uid == child_uid) return ""; // Owned by child // Ownership may prevent reading of file. std::string proxy_content; if(!Arc::FileRead(proxy_path, proxy_content)) return ""; // Creating temporary file // Probably not most effective solution. But makes sure // access permissions are set properly. std::string proxy_new_path; if(!Arc::TmpFileCreate(proxy_new_path, proxy_content, child_uid, child_gid, S_IRUSR|S_IWUSR)) { if (!proxy_new_path.empty()) Arc::FileDelete(proxy_new_path); return ""; } return proxy_new_path; } DataDeliveryLocalComm::DataDeliveryLocalComm(DTR_ptr dtr, const TransferParameters& params) : DataDeliveryComm(dtr, params),child_(NULL),last_comm(Arc::Time()) { // Initial empty status memset(&status_,0,sizeof(status_)); status_.commstatus = CommInit; status_pos_ = 0; if(!dtr->get_source()) { logger_->msg(Arc::ERROR, "No source defined"); return; } if(!dtr->get_destination()) { logger_->msg(Arc::ERROR, "No destination defined"); return; } { Glib::Mutex::Lock lock(lock_); // Generate options for child std::list args; std::string execpath = Arc::ArcLocation::GetLibDir()+G_DIR_SEPARATOR_S+"DataStagingDelivery"; args.push_back(execpath); // check for alternative source or destination eg cache, mapped URL, TURL std::string surl; if (!dtr->get_mapped_source().empty()) { surl = dtr->get_mapped_source(); } else if (!dtr->get_source()->TransferLocations().empty()) { surl = dtr->get_source()->TransferLocations()[0].fullstr(); } else { logger_->msg(Arc::ERROR, "No locations defined for %s", dtr->get_source()->str()); return; } if (dtr->get_destination()->TransferLocations().empty()) { logger_->msg(Arc::ERROR, "No locations defined for %s", dtr->get_destination()->str()); return; } std::string durl = dtr->get_destination()->TransferLocations()[0].fullstr(); bool caching = false; if ((dtr->get_cache_state() == CACHEABLE) && !dtr->get_cache_file().empty()) { durl = dtr->get_cache_file(); caching = true; } int child_uid = 0; int child_gid = 0; if(!caching) { child_uid = dtr->get_local_user().get_uid(); child_gid = dtr->get_local_user().get_gid(); } args.push_back("--surl"); args.push_back(surl); args.push_back("--durl"); args.push_back(durl); // Check if credentials are needed for source/dest Arc::DataHandle surl_h(surl, dtr->get_usercfg()); Arc::DataHandle durl_h(durl, dtr->get_usercfg()); if (!dtr->get_usercfg().CredentialString().empty() && surl_h && !surl_h->RequiresCredentialsInFile() && durl_h && !durl_h->RequiresCredentialsInFile()) { // If file-based credentials are not required then send through stdin stdin_ = dtr->get_usercfg().CredentialString(); } else { // If child is going to be run under different user ID // we must ensure it will be able to read credentials. tmp_proxy_ = prepare_proxy(dtr->get_usercfg().ProxyPath(), child_uid, child_gid); if (!tmp_proxy_.empty()) { args.push_back("--sopt"); args.push_back("credential="+tmp_proxy_); args.push_back("--dopt"); args.push_back("credential="+tmp_proxy_); } else if(!dtr->get_usercfg().ProxyPath().empty()) { args.push_back("--sopt"); args.push_back("credential="+dtr->get_usercfg().ProxyPath()); args.push_back("--dopt"); args.push_back("credential="+dtr->get_usercfg().ProxyPath()); } } if (!dtr->get_usercfg().CACertificatesDirectory().empty()) { args.push_back("--sopt"); args.push_back("ca="+dtr->get_usercfg().CACertificatesDirectory()); args.push_back("--dopt"); args.push_back("ca="+dtr->get_usercfg().CACertificatesDirectory()); } args.push_back("--topt"); args.push_back("minspeed="+Arc::tostring(transfer_params.min_current_bandwidth)); args.push_back("--topt"); args.push_back("minspeedtime="+Arc::tostring(transfer_params.averaging_time)); args.push_back("--topt"); args.push_back("minavgspeed="+Arc::tostring(transfer_params.min_average_bandwidth)); args.push_back("--topt"); args.push_back("maxinacttime="+Arc::tostring(transfer_params.max_inactivity_time)); if (dtr->get_source()->CheckSize()) { args.push_back("--size"); args.push_back(Arc::tostring(dtr->get_source()->GetSize())); } if (dtr->get_source()->CheckCheckSum()) { std::string csum(dtr->get_source()->GetCheckSum()); std::string::size_type pos(csum.find(':')); if (pos == std::string::npos || pos == csum.length()-1) { logger_->msg(Arc::WARNING, "Bad checksum format %s", csum); } else { args.push_back("--cstype"); args.push_back(csum.substr(0, pos)); args.push_back("--csvalue"); args.push_back(csum.substr(pos+1)); } } else if (!dtr->get_destination()->GetURL().MetaDataOption("checksumtype").empty()) { args.push_back("--cstype"); args.push_back(dtr->get_destination()->GetURL().MetaDataOption("checksumtype")); if (!dtr->get_destination()->GetURL().MetaDataOption("checksumvalue").empty()) { args.push_back("--csvalue"); args.push_back(dtr->get_destination()->GetURL().MetaDataOption("checksumvalue")); } } else if (!dtr->get_destination()->GetURL().Option("checksum").empty()) { args.push_back("--cstype"); args.push_back(dtr->get_destination()->GetURL().Option("checksum")); } else if (dtr->get_destination()->AcceptsMeta() || dtr->get_destination()->ProvidesMeta()) { args.push_back("--cstype"); args.push_back(dtr->get_destination()->DefaultCheckSum()); } child_ = new Arc::Run(args); // Set up pipes child_->KeepStdout(false); child_->KeepStderr(false); child_->KeepStdin(false); child_->AssignUserId(child_uid); child_->AssignGroupId(child_gid); child_->AssignStdin(stdin_); // Start child std::string cmd; for(std::list::iterator arg = args.begin();arg!=args.end();++arg) { cmd += *arg; cmd += " "; } logger_->msg(Arc::DEBUG, "Running command: %s", cmd); if(!child_->Start()) { delete child_; child_=NULL; logger_->msg(Arc::ERROR, "Failed to run command: %s", cmd); return; } } handler_->Add(this); } DataDeliveryLocalComm::~DataDeliveryLocalComm(void) { { Glib::Mutex::Lock lock(lock_); if(child_) { child_->Kill(10); // Give it a chance delete child_; child_=NULL; // And then kill for sure } } if(!tmp_proxy_.empty()) Arc::FileDelete(tmp_proxy_); if(handler_) handler_->Remove(this); } void DataDeliveryLocalComm::PullStatus(void) { Glib::Mutex::Lock lock(lock_); if(!child_) return; for(;;) { if(status_pos_ < sizeof(status_buf_)) { int l; // TODO: direct redirect for(;;) { char buf[1024+1]; l = child_->ReadStderr(0,buf,sizeof(buf)-1); if(l <= 0) break; buf[l] = 0; char* start = buf; for(;*start;) { char* end = strchr(start,'\n'); if(end) *end = 0; logger_->msg(Arc::INFO, "DataDelivery: %s", start); if(!end) break; start = end + 1; } } l = child_->ReadStdout(0,((char*)&status_buf_)+status_pos_,sizeof(status_buf_)-status_pos_); if(l == -1) { // child error or closed comm if(child_->Running()) { status_.commstatus = CommClosed; } else { status_.commstatus = CommExited; if(child_->Result() != 0) { logger_->msg(Arc::ERROR, "DataStagingDelivery exited with code %i", child_->Result()); status_.commstatus = CommFailed; } } delete child_; child_=NULL; return; } if(l == 0) break; status_pos_+=l; last_comm = Arc::Time(); } if(status_pos_ >= sizeof(status_buf_)) { status_buf_.error_desc[sizeof(status_buf_.error_desc)-1] = 0; status_=status_buf_; status_pos_-=sizeof(status_buf_); } } // check for stuck child process (no report through comm channel) Arc::Period t = Arc::Time() - last_comm; if (transfer_params.max_inactivity_time > 0 && t >= transfer_params.max_inactivity_time*2) { logger_->msg(Arc::ERROR, "Transfer killed after %i seconds without communication", t.GetPeriod()); child_->Kill(1); delete child_; child_ = NULL; } } bool DataDeliveryLocalComm::CheckComm(DTR_ptr dtr, std::vector& allowed_dirs, std::string& load_avg) { allowed_dirs.push_back("/"); double avg[3]; if (getloadavg(avg, 3) != 3) { load_avg = "-1"; } else { load_avg = Arc::tostring(avg[1]); } return true; } } // namespace DataStaging nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/examples0000644000000000000000000000013214152153472023165 xustar000000000000000030 mtime=1638455098.909555073 30 atime=1638455103.996631509 30 ctime=1638455098.909555073 nordugrid-arc-6.14.0/src/libs/data-staging/examples/0000755000175000002070000000000014152153472023227 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/libs/data-staging/examples/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376025301 xustar000000000000000030 mtime=1638455038.404645959 30 atime=1638455038.499647387 30 ctime=1638455098.905555013 nordugrid-arc-6.14.0/src/libs/data-staging/examples/Makefile.am0000644000175000002070000000104714152153376025270 0ustar00mockbuildmock00000000000000check_PROGRAMS = generator generator_SOURCES = generator-main.cpp Generator.h Generator.cpp generator_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) generator_LDADD = \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ ../libarcdatastaging.la $(GLIBMM_LIBS) exampledir = $(pkgdatadir)/examples/sdk example_DATA = $(generator_SOURCES) EXTRA_DIST = $(generator_SOURCES) nordugrid-arc-6.14.0/src/libs/data-staging/examples/PaxHeaders.30264/Generator.h0000644000000000000000000000013214152153376025344 xustar000000000000000030 mtime=1638455038.404645959 30 atime=1638455038.499647387 30 ctime=1638455098.908555058 nordugrid-arc-6.14.0/src/libs/data-staging/examples/Generator.h0000644000175000002070000000253014152153376025331 0ustar00mockbuildmock00000000000000#ifndef GENERATOR_H_ #define GENERATOR_H_ #include #include #include // This Generator basic implementation shows how a Generator can // be written. It has one method, run(), which creates a single DTR // and submits it to the Scheduler. class Generator: public DataStaging::DTRCallback { private: // Condition to wait on until DTR has finished static Arc::SimpleCondition cond; // DTR Scheduler DataStaging::Scheduler scheduler; // Logger object static Arc::Logger logger; // Root LogDestinations to be used in receiveDTR std::list root_destinations; public: // Counter for main to know how many DTRs are in the system Arc::SimpleCounter counter; // Create a new Generator. start() must be called to start DTR threads. Generator(); // Stop Generator and DTR threads ~Generator(); // Implementation of callback from DTRCallback - the callback method used // when DTR processing is complete to pass the DTR back to the generator. // It decrements counter. virtual void receiveDTR(DataStaging::DTR_ptr dtr); // Start Generator and DTR threads void start(); // Submit a DTR with given source and destination. Increments counter. void run(const std::string& source, const std::string& destination); }; #endif /* GENERATOR_H_ */ nordugrid-arc-6.14.0/src/libs/data-staging/examples/PaxHeaders.30264/Makefile.in0000644000000000000000000000013114152153432025302 xustar000000000000000029 mtime=1638455066.10506217 30 atime=1638455089.514413907 30 ctime=1638455098.904554998 nordugrid-arc-6.14.0/src/libs/data-staging/examples/Makefile.in0000644000175000002070000007717514152153432025311 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ check_PROGRAMS = generator$(EXEEXT) subdir = src/libs/data-staging/examples DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(top_srcdir)/depcomp README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = am_generator_OBJECTS = generator-generator-main.$(OBJEXT) \ generator-Generator.$(OBJEXT) generator_OBJECTS = $(am_generator_OBJECTS) am__DEPENDENCIES_1 = generator_DEPENDENCIES = \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ ../libarcdatastaging.la $(am__DEPENDENCIES_1) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = generator_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(generator_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(generator_SOURCES) DIST_SOURCES = $(generator_SOURCES) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(exampledir)" DATA = $(example_DATA) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ generator_SOURCES = generator-main.cpp Generator.h Generator.cpp generator_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) generator_LDADD = \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ ../libarcdatastaging.la $(GLIBMM_LIBS) exampledir = $(pkgdatadir)/examples/sdk example_DATA = $(generator_SOURCES) EXTRA_DIST = $(generator_SOURCES) all: all-am .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/libs/data-staging/examples/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/libs/data-staging/examples/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): clean-checkPROGRAMS: @list='$(check_PROGRAMS)'; test -n "$$list" || exit 0; \ echo " rm -f" $$list; \ rm -f $$list || exit $$?; \ test -n "$(EXEEXT)" || exit 0; \ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ echo " rm -f" $$list; \ rm -f $$list generator$(EXEEXT): $(generator_OBJECTS) $(generator_DEPENDENCIES) $(EXTRA_generator_DEPENDENCIES) @rm -f generator$(EXEEXT) $(AM_V_CXXLD)$(generator_LINK) $(generator_OBJECTS) $(generator_LDADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/generator-Generator.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/generator-generator-main.Po@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< generator-generator-main.o: generator-main.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(generator_CXXFLAGS) $(CXXFLAGS) -MT generator-generator-main.o -MD -MP -MF $(DEPDIR)/generator-generator-main.Tpo -c -o generator-generator-main.o `test -f 'generator-main.cpp' || echo '$(srcdir)/'`generator-main.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/generator-generator-main.Tpo $(DEPDIR)/generator-generator-main.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='generator-main.cpp' object='generator-generator-main.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(generator_CXXFLAGS) $(CXXFLAGS) -c -o generator-generator-main.o `test -f 'generator-main.cpp' || echo '$(srcdir)/'`generator-main.cpp generator-generator-main.obj: generator-main.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(generator_CXXFLAGS) $(CXXFLAGS) -MT generator-generator-main.obj -MD -MP -MF $(DEPDIR)/generator-generator-main.Tpo -c -o generator-generator-main.obj `if test -f 'generator-main.cpp'; then $(CYGPATH_W) 'generator-main.cpp'; else $(CYGPATH_W) '$(srcdir)/generator-main.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/generator-generator-main.Tpo $(DEPDIR)/generator-generator-main.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='generator-main.cpp' object='generator-generator-main.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(generator_CXXFLAGS) $(CXXFLAGS) -c -o generator-generator-main.obj `if test -f 'generator-main.cpp'; then $(CYGPATH_W) 'generator-main.cpp'; else $(CYGPATH_W) '$(srcdir)/generator-main.cpp'; fi` generator-Generator.o: Generator.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(generator_CXXFLAGS) $(CXXFLAGS) -MT generator-Generator.o -MD -MP -MF $(DEPDIR)/generator-Generator.Tpo -c -o generator-Generator.o `test -f 'Generator.cpp' || echo '$(srcdir)/'`Generator.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/generator-Generator.Tpo $(DEPDIR)/generator-Generator.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='Generator.cpp' object='generator-Generator.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(generator_CXXFLAGS) $(CXXFLAGS) -c -o generator-Generator.o `test -f 'Generator.cpp' || echo '$(srcdir)/'`Generator.cpp generator-Generator.obj: Generator.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(generator_CXXFLAGS) $(CXXFLAGS) -MT generator-Generator.obj -MD -MP -MF $(DEPDIR)/generator-Generator.Tpo -c -o generator-Generator.obj `if test -f 'Generator.cpp'; then $(CYGPATH_W) 'Generator.cpp'; else $(CYGPATH_W) '$(srcdir)/Generator.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/generator-Generator.Tpo $(DEPDIR)/generator-Generator.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='Generator.cpp' object='generator-Generator.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(generator_CXXFLAGS) $(CXXFLAGS) -c -o generator-Generator.obj `if test -f 'Generator.cpp'; then $(CYGPATH_W) 'Generator.cpp'; else $(CYGPATH_W) '$(srcdir)/Generator.cpp'; fi` mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-exampleDATA: $(example_DATA) @$(NORMAL_INSTALL) @list='$(example_DATA)'; test -n "$(exampledir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(exampledir)'"; \ $(MKDIR_P) "$(DESTDIR)$(exampledir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(exampledir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(exampledir)" || exit $$?; \ done uninstall-exampleDATA: @$(NORMAL_UNINSTALL) @list='$(example_DATA)'; test -n "$(exampledir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(exampledir)'; $(am__uninstall_files_from_dir) ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am $(MAKE) $(AM_MAKEFLAGS) $(check_PROGRAMS) check: check-am all-am: Makefile $(DATA) installdirs: for dir in "$(DESTDIR)$(exampledir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-checkPROGRAMS clean-generic clean-libtool \ mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-exampleDATA install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-exampleDATA .MAKE: check-am install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-am clean \ clean-checkPROGRAMS clean-generic clean-libtool cscopelist-am \ ctags ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exampleDATA \ install-exec install-exec-am install-html install-html-am \ install-info install-info-am install-man install-pdf \ install-pdf-am install-ps install-ps-am install-strip \ installcheck installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags tags-am uninstall uninstall-am uninstall-exampleDATA # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/libs/data-staging/examples/PaxHeaders.30264/Generator.cpp0000644000000000000000000000013214152153376025677 xustar000000000000000030 mtime=1638455038.404645959 30 atime=1638455038.499647387 30 ctime=1638455098.909555073 nordugrid-arc-6.14.0/src/libs/data-staging/examples/Generator.cpp0000644000175000002070000000430314152153376025664 0ustar00mockbuildmock00000000000000#include #include #include "Generator.h" Arc::Logger Generator::logger(Arc::Logger::getRootLogger(), "Generator"); Arc::SimpleCondition Generator::cond; Generator::Generator() { // Set up logging root_destinations = Arc::Logger::getRootLogger().getDestinations(); DataStaging::DTR::LOG_LEVEL = Arc::Logger::getRootLogger().getThreshold(); } Generator::~Generator() { logger.msg(Arc::INFO, "Shutting down scheduler"); scheduler.stop(); logger.msg(Arc::INFO, "Scheduler stopped, exiting"); } void Generator::receiveDTR(DataStaging::DTR_ptr dtr) { // root logger is disabled in Scheduler thread so need to add it here Arc::Logger::getRootLogger().addDestinations(root_destinations); logger.msg(Arc::INFO, "Received DTR %s back from scheduler in state %s", dtr->get_id(), dtr->get_status().str()); Arc::Logger::getRootLogger().removeDestinations(); counter.dec(); } void Generator::start() { // Starting scheduler with default configuration logger.msg(Arc::INFO, "Generator started"); logger.msg(Arc::INFO, "Starting DTR threads"); scheduler.SetDumpLocation("/tmp/dtr.log"); scheduler.start(); } void Generator::run(const std::string& source, const std::string& destination) { std::string job_id = Arc::UUID(); Arc::initializeCredentialsType cred_type(Arc::initializeCredentialsType::TryCredentials); Arc::UserConfig cfg(cred_type); // check credentials if (!Arc::Credential::IsCredentialsValid(cfg)) { logger.msg(Arc::ERROR, "No valid credentials found, exiting"); return; } cfg.UtilsDirPath(Arc::UserConfig::ARCUSERDIRECTORY()); std::list logs; logs.push_back(new Arc::LogStream(std::cout)); DataStaging::DTR_ptr dtr(new DataStaging::DTR(source, destination, cfg, job_id, Arc::User().get_uid(), logs, "DataStaging")); if (!(*dtr)) { logger.msg(Arc::ERROR, "Problem creating dtr (source %s, destination %s)", source, destination); return; } // register callback with DTR dtr->registerCallback(this,DataStaging::GENERATOR); dtr->registerCallback(&scheduler,DataStaging::SCHEDULER); dtr->set_tries_left(5); DataStaging::DTR::push(dtr, DataStaging::SCHEDULER); counter.inc(); } nordugrid-arc-6.14.0/src/libs/data-staging/examples/PaxHeaders.30264/generator-main.cpp0000644000000000000000000000013214152153376026661 xustar000000000000000030 mtime=1638455038.404645959 30 atime=1638455038.499647387 30 ctime=1638455098.907555043 nordugrid-arc-6.14.0/src/libs/data-staging/examples/generator-main.cpp0000644000175000002070000000376714152153376026663 0ustar00mockbuildmock00000000000000/* // To compile this example requires that nordugrid-arc-devel be installed. It // also requires including headers of external libraries used by ARC core code: // // g++ -o generator `pkg-config --cflags glibmm-2.4` -I/usr/include/libxml2 \ // -larcdatastaging Generator.cpp Generator.h generator-main.cpp // // If ARC is installed in a non-standard location, the options // -L ARC_LOCATION/lib and -I ARC_LOCATION/include should also be used */ #ifdef HAVE_CONFIG_H #include #endif #include #include #include "Generator.h" static Arc::SimpleCounter counter; static bool run = true; static void do_shutdown(int) { run = false; } static void usage() { std::cout << "Usage: generator [num mock transfers]" << std::endl; std::cout << " generator source destination" << std::endl; std::cout << "To use mock transfers ARC must be built with configure --enable-mock-dmc" << std::endl; std::cout << "The default number of mock transfers is 10" << std::endl; } int main(int argc, char** argv) { signal(SIGTTOU,SIG_IGN); signal(SIGTTIN,SIG_IGN); signal(SIGINT, do_shutdown); // Log to stderr Arc::LogStream logcerr(std::cerr); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::INFO); Generator generator; int num = 10; if (argc == 1 || argc == 2) { // run mock a number of times if (argc == 2 && (std::string(argv[1]) == "-h" || !Arc::stringto(argv[1], num))) { usage(); return 1; } generator.start(); for (int i = 0; i < num; ++i) { std::string source = "mock://mocksrc/mock." + Arc::tostring(i); std::string destination = "mock://mockdest/mock." + Arc::tostring(i); generator.run(source, destination); } } else if (argc == 3) { // run with given source and destination generator.start(); generator.run(argv[1], argv[2]); } else { usage(); return 1; } while (generator.counter.get() > 0 && run) { sleep(1); } return 0; } nordugrid-arc-6.14.0/src/libs/data-staging/examples/PaxHeaders.30264/README0000644000000000000000000000013214152153376024125 xustar000000000000000030 mtime=1638455038.404645959 30 atime=1638455038.499647387 30 ctime=1638455098.906555028 nordugrid-arc-6.14.0/src/libs/data-staging/examples/README0000644000175000002070000000006014152153376024106 0ustar00mockbuildmock00000000000000Examples of how to use the data staging library.nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/DataDeliveryRemoteComm.cpp0000644000000000000000000000013214152153376026500 xustar000000000000000030 mtime=1638455038.403645944 30 atime=1638455038.499647387 30 ctime=1638455098.852554217 nordugrid-arc-6.14.0/src/libs/data-staging/DataDeliveryRemoteComm.cpp0000644000175000002070000004341714152153376026476 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include "DataDeliveryRemoteComm.h" namespace DataStaging { Arc::Logger DataDeliveryRemoteComm::logger(Arc::Logger::getRootLogger(), "DataStaging.DataDeliveryRemoteComm"); DataDeliveryRemoteComm::DataDeliveryRemoteComm(DTR_ptr dtr, const TransferParameters& params) : DataDeliveryComm(dtr, params), client(NULL), dtr_full_id(dtr->get_id()), query_retries(20), endpoint(dtr->get_delivery_endpoint()), timeout(dtr->get_usercfg().Timeout()), valid(false) { { Glib::Mutex::Lock lock(lock_); // Initial empty status memset(&status_,0,sizeof(status_)); FillStatus(); } if(!dtr->get_source()) return; if(!dtr->get_destination()) return; // check for alternative source or destination eg cache, mapped URL, TURL std::string surl; if (!dtr->get_mapped_source().empty()) { surl = dtr->get_mapped_source(); } else if (!dtr->get_source()->TransferLocations().empty()) { surl = dtr->get_source()->TransferLocations()[0].fullstr(); } else { logger_->msg(Arc::ERROR, "No locations defined for %s", dtr->get_source()->str()); return; } if (dtr->get_destination()->TransferLocations().empty()) { logger_->msg(Arc::ERROR, "No locations defined for %s", dtr->get_destination()->str()); return; } std::string durl = dtr->get_destination()->TransferLocations()[0].fullstr(); bool caching = false; if ((dtr->get_cache_state() == CACHEABLE) && !dtr->get_cache_file().empty()) { durl = dtr->get_cache_file(); caching = true; } if (dtr->host_cert_for_remote_delivery()) { Arc::initializeCredentialsType cred_type(Arc::initializeCredentialsType::TryCredentials); Arc::UserConfig host_cfg(cred_type); host_cfg.ProxyPath(""); // to force using cert/key files instead of non-existent proxy host_cfg.ApplyToConfig(cfg); } else { dtr->get_usercfg().ApplyToConfig(cfg); } // connect to service and make a new transfer request logger_->msg(Arc::VERBOSE, "Connecting to Delivery service at %s", endpoint.str()); client = new Arc::ClientSOAP(cfg, endpoint, timeout); Arc::NS ns; Arc::PayloadSOAP request(ns); Arc::XMLNode dtrnode = request.NewChild("DataDeliveryStart").NewChild("DTR"); dtrnode.NewChild("ID") = dtr_full_id; dtrnode.NewChild("Source") = surl; dtrnode.NewChild("Destination") = durl; if (dtr->get_source()->CheckSize()) dtrnode.NewChild("Size") = Arc::tostring(dtr->get_source()->GetSize()); if (dtr->get_source()->CheckCheckSum()) dtrnode.NewChild("CheckSum") = dtr->get_source()->GetCheckSum(); dtrnode.NewChild("Uid") = Arc::tostring(dtr->get_local_user().get_uid()); dtrnode.NewChild("Gid") = Arc::tostring(dtr->get_local_user().get_gid()); // transfer parameters dtrnode.NewChild("MinAverageSpeed") = Arc::tostring(params.min_average_bandwidth); dtrnode.NewChild("AverageTime") = Arc::tostring(params.averaging_time); dtrnode.NewChild("MinCurrentSpeed") = Arc::tostring(params.min_current_bandwidth); dtrnode.NewChild("MaxInactivityTime") = Arc::tostring(params.max_inactivity_time); // caching if (caching) dtrnode.NewChild("Caching") = "true"; else dtrnode.NewChild("Caching") = "false"; // delegate credentials Arc::XMLNode op = request.Child(0); if (!SetupDelegation(op, dtr->get_usercfg())) { logger_->msg(Arc::ERROR, "Failed to set up credential delegation with %s", endpoint.str()); return; } std::string xml; request.GetXML(xml, true); logger_->msg(Arc::DEBUG, "Request:\n%s", xml); Arc::PayloadSOAP *response = NULL; Arc::MCC_Status status = client->process(&request, &response); if (!status) { logger_->msg(Arc::ERROR, "Could not connect to service %s: %s", endpoint.str(), (std::string)status); if (response) delete response; return; } if (!response) { logger_->msg(Arc::ERROR, "No SOAP response from Delivery service %s", endpoint.str()); return; } response->GetXML(xml, true); logger_->msg(Arc::DEBUG, "Response:\n%s", xml); if (response->IsFault()) { Arc::SOAPFault& fault = *response->Fault(); std::string err("SOAP fault: %s", fault.Code()); for (int n = 0;;++n) { if (fault.Reason(n).empty()) break; err += ": " + fault.Reason(n); } logger_->msg(Arc::ERROR, "Failed to start transfer request: %s", err); delete response; return; } Arc::XMLNode resultnode = (*response)["DataDeliveryStartResponse"]["DataDeliveryStartResult"]["Result"][0]; if (!resultnode || !resultnode["ResultCode"]) { logger_->msg(Arc::ERROR, "Bad format in XML response from service at %s: %s", endpoint.str(), xml); delete response; return; } std::string resultcode = (std::string)(resultnode["ResultCode"]); if (resultcode != "OK") { logger_->msg(Arc::ERROR, "Could not make new transfer request: %s: %s", resultcode, (std::string)(resultnode[0]["ErrorDescription"])); delete response; return; } logger_->msg(Arc::INFO, "Started remote Delivery at %s", endpoint.str()); delete response; valid = true; handler_->Add(this); } DataDeliveryRemoteComm::~DataDeliveryRemoteComm() { // If transfer is still going, send cancellation request to service if (valid) CancelDTR(); if (handler_) handler_->Remove(this); Glib::Mutex::Lock lock(lock_); delete client; } void DataDeliveryRemoteComm::CancelDTR() { Glib::Mutex::Lock lock(lock_); if (!client) return; Arc::NS ns; Arc::PayloadSOAP request(ns); Arc::XMLNode dtrnode = request.NewChild("DataDeliveryCancel").NewChild("DTR"); dtrnode.NewChild("ID") = dtr_full_id; std::string xml; request.GetXML(xml, true); logger_->msg(Arc::DEBUG, "Request:\n%s", xml); Arc::PayloadSOAP *response = NULL; Arc::MCC_Status status = client->process(&request, &response); if (!status) { logger_->msg(Arc::ERROR, "Failed to send cancel request: %s", (std::string)status); if (response) delete response; return; } if (!response) { logger_->msg(Arc::ERROR, "Failed to cancel: No SOAP response"); return; } response->GetXML(xml, true); logger_->msg(Arc::DEBUG, "Response:\n%s", xml); if (response->IsFault()) { Arc::SOAPFault& fault = *response->Fault(); std::string err("SOAP fault: %s", fault.Code()); for (int n = 0;;++n) { if (fault.Reason(n).empty()) break; err += ": " + fault.Reason(n); } logger_->msg(Arc::ERROR, "Failed to cancel transfer request: %s", err); delete response; return; } Arc::XMLNode resultnode = (*response)["DataDeliveryCancelResponse"]["DataDeliveryCancelResult"]["Result"][0]; if (!resultnode || !resultnode["ResultCode"]) { logger_->msg(Arc::ERROR, "Bad format in XML response: %s", xml); delete response; return; } if ((std::string)resultnode["ResultCode"] != "OK") { Arc::XMLNode errnode = resultnode["ErrorDescription"]; logger_->msg(Arc::ERROR, "Failed to cancel: %s", (std::string)errnode); } delete response; } void DataDeliveryRemoteComm::PullStatus() { // send query request to service and fill status_ Glib::Mutex::Lock lock(lock_); if (!client) return; // check time since last query - check every second for the first 20s and // after every 5s // TODO be more intelligent, using transfer rate and file size if (Arc::Time() - start_ < 20 && Arc::Time() - Arc::Time(status_.timestamp) < 1) return; if (Arc::Time() - start_ > 20 && Arc::Time() - Arc::Time(status_.timestamp) < 5) return; Arc::NS ns; Arc::PayloadSOAP request(ns); Arc::XMLNode dtrnode = request.NewChild("DataDeliveryQuery").NewChild("DTR"); dtrnode.NewChild("ID") = dtr_full_id; std::string xml; request.GetXML(xml, true); logger_->msg(Arc::DEBUG, "Request:\n%s", xml); Arc::PayloadSOAP *response = NULL; Arc::MCC_Status status = client->process(&request, &response); if (!status) { logger_->msg(Arc::ERROR, "%s", (std::string)status); status_.commstatus = CommFailed; if (response) delete response; valid = false; return; } if (!response) { if (--query_retries > 0) { HandleQueryFault("No SOAP response from delivery service"); return; } logger_->msg(Arc::ERROR, "No SOAP response from delivery service"); status_.commstatus = CommFailed; valid = false; return; } response->GetXML(xml, true); logger_->msg(Arc::DEBUG, "Response:\n%s", xml); if (response->IsFault()) { Arc::SOAPFault& fault = *response->Fault(); std::string err("SOAP fault: %s", fault.Code()); for (int n = 0;;++n) { if (fault.Reason(n).empty()) break; err += ": " + fault.Reason(n); } delete response; if (--query_retries > 0) { HandleQueryFault("Failed to query state: " + err); return; } logger_->msg(Arc::ERROR, "Failed to query state: %s", err); status_.commstatus = CommFailed; strncpy(status_.error_desc, "SOAP error in connection with delivery service", sizeof(status_.error_desc)); valid = false; return; } Arc::XMLNode resultnode = (*response)["DataDeliveryQueryResponse"]["DataDeliveryQueryResult"]["Result"][0]; if (!resultnode || !resultnode["ResultCode"]) { logger_->msg(Arc::ERROR, "Bad format in XML response: %s", xml); delete response; status_.commstatus = CommFailed; valid = false; return; } // Fill status fields with results from service FillStatus(resultnode[0]); delete response; } bool DataDeliveryRemoteComm::CheckComm(DTR_ptr dtr, std::vector& allowed_dirs, std::string& load_avg) { // call Ping Arc::MCCConfig cfg; if (dtr->host_cert_for_remote_delivery()) { Arc::initializeCredentialsType cred_type(Arc::initializeCredentialsType::TryCredentials); Arc::UserConfig host_cfg(cred_type); host_cfg.ProxyPath(""); // to force using cert/key files instead of non-existent proxy host_cfg.ApplyToConfig(cfg); } else { dtr->get_usercfg().ApplyToConfig(cfg); } dtr->get_logger()->msg(Arc::VERBOSE, "Connecting to Delivery service at %s", dtr->get_delivery_endpoint().str()); Arc::ClientSOAP client(cfg, dtr->get_delivery_endpoint(), dtr->get_usercfg().Timeout()); Arc::NS ns; Arc::PayloadSOAP request(ns); Arc::XMLNode ping = request.NewChild("DataDeliveryPing"); std::string xml; request.GetXML(xml, true); dtr->get_logger()->msg(Arc::DEBUG, "Request:\n%s", xml); Arc::PayloadSOAP *response = NULL; Arc::MCC_Status status = client.process(&request, &response); if (!status) { dtr->get_logger()->msg(Arc::ERROR, "Could not connect to service %s: %s", dtr->get_delivery_endpoint().str(), (std::string)status); if (response) delete response; return false; } if (!response) { dtr->get_logger()->msg(Arc::ERROR, "No SOAP response from Delivery service %s", dtr->get_delivery_endpoint().str()); return false; } response->GetXML(xml, true); dtr->get_logger()->msg(Arc::DEBUG, "Response:\n%s", xml); if (response->IsFault()) { Arc::SOAPFault& fault = *response->Fault(); std::string err("SOAP fault: %s", fault.Code()); for (int n = 0;;++n) { if (fault.Reason(n).empty()) break; err += ": " + fault.Reason(n); } dtr->get_logger()->msg(Arc::ERROR, "SOAP fault from delivery service at %s: %s", dtr->get_delivery_endpoint().str(), err); delete response; return false; } Arc::XMLNode resultnode = (*response)["DataDeliveryPingResponse"]["DataDeliveryPingResult"]["Result"][0]; if (!resultnode || !resultnode["ResultCode"]) { dtr->get_logger()->msg(Arc::ERROR, "Bad format in XML response from delivery service at %s: %s", dtr->get_delivery_endpoint().str(), xml); delete response; return false; } std::string resultcode = (std::string)(resultnode["ResultCode"]); if (resultcode != "OK") { dtr->get_logger()->msg(Arc::ERROR, "Error pinging delivery service at %s: %s: %s", dtr->get_delivery_endpoint().str(), resultcode, (std::string)(resultnode[0]["ErrorDescription"])); delete response; return false; } for (Arc::XMLNode dir = resultnode["AllowedDir"]; dir; ++dir) { allowed_dirs.push_back((std::string)dir); dtr->get_logger()->msg(Arc::DEBUG, "Dir %s allowed at service %s", (std::string)dir, dtr->get_delivery_endpoint().str()); } if (resultnode["LoadAvg"]) { load_avg = (std::string)(resultnode["LoadAvg"]); } else { load_avg = "-1"; } delete response; return true; } void DataDeliveryRemoteComm::FillStatus(const Arc::XMLNode& node) { if (!node) { // initial state std::string empty(""); status_.commstatus = DataDeliveryComm::CommInit; status_.timestamp = ::time(NULL); status_.status = DTRStatus::NULL_STATE; status_.error = DTRErrorStatus::NONE_ERROR; status_.error_location = DTRErrorStatus::NO_ERROR_LOCATION; strncpy(status_.error_desc, empty.c_str(), sizeof(status_.error_desc)); status_.streams = 0; status_.transferred = 0; status_.size = 0; status_.transfer_time = 0; status_.offset = 0; status_.speed = 0; strncpy(status_.checksum, empty.c_str(), sizeof(status_.checksum)); return; } Arc::XMLNode datanode = node["ResultCode"]; if (std::string(datanode) == "TRANSFERRED") { status_.commstatus = CommExited; status_.status = DTRStatus::TRANSFERRED; } else if (std::string(datanode) == "TRANSFER_ERROR") { status_.commstatus = CommFailed; status_.status = DTRStatus::TRANSFERRED; } else if (std::string(datanode) == "SERVICE_ERROR") { status_.commstatus = CommFailed; status_.status = DTRStatus::TRANSFERRED; } else { status_.commstatus = CommNoError; status_.status = DTRStatus::TRANSFERRING; } status_.timestamp = time(NULL); datanode = node["ErrorStatus"]; if (datanode) { int error_status; Arc::stringto(std::string(datanode), error_status); status_.error = (DTRErrorStatus::DTRErrorStatusType)error_status; } datanode = node["ErrorLocation"]; if (datanode) { int error_location; Arc::stringto(std::string(datanode), error_location); status_.error_location = (DTRErrorStatus::DTRErrorLocation)error_location; } datanode = node["ErrorDescription"]; if (datanode) { strncpy(status_.error_desc, ((std::string)datanode).c_str(), sizeof(status_.error_desc)); } datanode = node["BytesTransferred"]; if (datanode) { unsigned long long int bytes; Arc::stringto(std::string(datanode), bytes); status_.transferred = bytes; } datanode = node["TransferTime"]; if (datanode) { unsigned long long int t; Arc::stringto(std::string(datanode), t); status_.transfer_time = t; } // TODO size, offset, speed (currently not used) datanode = node["CheckSum"]; if (datanode) { strncpy(status_.checksum, ((std::string)datanode).c_str(), sizeof(status_.checksum)); } // if terminal state, write log if (status_.commstatus != CommNoError) { // log message is limited to 2048 chars so just print last few lines std::string log = (std::string)node["Log"]; if (!log.empty()) { if (log.size() > 2000) log = log.substr(log.find('\n', log.size()-2000)); logger_->msg(Arc::INFO, "DataDelivery log tail:\n%s", log); } valid = false; } } bool DataDeliveryRemoteComm::SetupDelegation(Arc::XMLNode& op, const Arc::UserConfig& usercfg) { const std::string& cert = (!usercfg.ProxyPath().empty() ? usercfg.ProxyPath() : usercfg.CertificatePath()); const std::string& key = (!usercfg.ProxyPath().empty() ? usercfg.ProxyPath() : usercfg.KeyPath()); const std::string& credentials = usercfg.CredentialString(); if (credentials.empty() && (key.empty() || cert.empty())) { logger_->msg(Arc::VERBOSE, "Failed locating credentials"); return false; } if(!client->Load()) { logger_->msg(Arc::VERBOSE, "Failed to initiate client connection"); return false; } Arc::MCC* entry = client->GetEntry(); if(!entry) { logger_->msg(Arc::VERBOSE, "Client connection has no entry point"); return false; } Arc::DelegationProviderSOAP * deleg = NULL; // Use in-memory credentials if set in UserConfig if (!credentials.empty()) deleg = new Arc::DelegationProviderSOAP(credentials); else deleg = new Arc::DelegationProviderSOAP(cert, key); logger_->msg(Arc::VERBOSE, "Initiating delegation procedure"); if (!deleg->DelegateCredentialsInit(*entry, &(client->GetContext()))) { logger_->msg(Arc::VERBOSE, "Failed to initiate delegation credentials"); delete deleg; return false; } deleg->DelegatedToken(op); delete deleg; return true; } void DataDeliveryRemoteComm::HandleQueryFault(const std::string& err) { // Just return without changing status logger_->msg(Arc::WARNING, err); status_.timestamp = time(NULL); // A reconnect may be needed after losing connection delete client; client = new Arc::ClientSOAP(cfg, endpoint, timeout); } } // namespace DataStaging nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/DTR.h0000644000000000000000000000013214152153376022231 xustar000000000000000030 mtime=1638455038.403645944 30 atime=1638455038.499647387 30 ctime=1638455098.843554082 nordugrid-arc-6.14.0/src/libs/data-staging/DTR.h0000644000175000002070000006500614152153376022225 0ustar00mockbuildmock00000000000000// Summary page of data staging for doxygen namespace DataStaging { /** * \defgroup datastaging ARC data staging (libarcdatastaging) * * ARC data staging components form a complete data transfer management system. * Whereas \ref data is a library for data access, enabling several types of * operation on data files on the Grid using a variety of access protocols, * \ref datastaging is a framework for managed data transfer to and from the * Grid. The data staging system is designed to run as a persistent process, to * execute data transfers on demand. Data transfers are defined and fed into * the system, and then notification is given when they complete. No knowledge * is required of the internal workings of the Grid, a user only needs to * specify URLs representing the source and destination of the transfer. * * The system is highly configurable and features an intelligent priority, * fair-share and error handling mechanism, as well as the ability to spread * data transfer across multiple hosts using ARC's DataDelivery service. It is * used by ARC's Computing Element (A-REX) for pre- and post- job data transfer * of input and output files. Note that this system is primarily for data * transfer to and from local files and that third-party transfer is not * supported. It is designed for the case of pulling or pushing data between * the Grid and a local file system, rather than a service for transfer between * two Grid storage elements. It is possible to transfer data between two * remote endpoints, but all data flows through the client. * * Simple examples of how to use libarcdatastaging are shown for several * languages in the \ref dtrgenerator "DTR examples page". In all the examples * a Generator class receives as input a source and destination, and creates * a DTR which describes the data transfer. It is then passed to the Scheduler * and the Generator defines a receiveDTR() method for the Scheduler to calls * to notify that the transfer has finished. The examples all allow using the * Generator as a basic copy tool from the command line to copy a single file. * * For more information see http://wiki.nordugrid.org/index.php/Data_Staging */ } // namespace DataStaging #ifndef DTR_H_ #define DTR_H_ #include #include #include #include #include #include #include #include #include #include "DTRStatus.h" /// DataStaging contains all components for data transfer scheduling and execution. namespace DataStaging { class DTR; /// Provides automatic memory management of DTRs and thread-safe destruction. /** \ingroup datastaging */ typedef Arc::ThreadedPointer DTR_ptr; /// The DTR's Logger object can be used outside the DTR object with DTRLogger. /** \ingroup datastaging */ typedef Arc::ThreadedPointer DTRLogger; typedef Arc::ThreadedPointer DTRLogDestination; /// Components of the data staging framework /** \ingroup datastaging */ enum StagingProcesses { GENERATOR, ///< Creator of new DTRs and receiver of completed DTRs SCHEDULER, ///< Controls queues and moves DTRs bewteen other components when necessary PRE_PROCESSOR, ///< Performs all pre-transfer operations DELIVERY, ///< Performs physical transfer POST_PROCESSOR ///< Performs all post-transfer operations }; /// Internal state of StagingProcesses /** \ingroup datastaging */ enum ProcessState { INITIATED, ///< Process is ready to start RUNNING, ///< Process is running TO_STOP, ///< Process has been instructed to stop STOPPED ///< Proecess has stopped }; /// Represents limits and properties of a DTR transfer. These generally apply to all DTRs. /** * \ingroup datastaging * \headerfile DTR.h arc/data-staging/DTR.h */ class TransferParameters { public: /// Minimum average bandwidth in bytes/sec. /** * If the average bandwidth used over the whole transfer drops below this * level the transfer will be killed. */ unsigned long long int min_average_bandwidth; /// Maximum inactivity time in sec. /** * If transfer stops for longer than this time it will be killed. */ unsigned int max_inactivity_time; /// Minimum current bandwidth in bytes/sec. /** * If bandwidth averaged over the previous averaging_time seconds is less * than min_current_bandwidth the transfer will be killed (allows transfers * which slow down to be killed quicker). */ unsigned long long int min_current_bandwidth; /// The time in seconds over which to average the calculation of min_current_bandwidth. unsigned int averaging_time; /// Constructor. Initialises all values to zero. TransferParameters() : min_average_bandwidth(0), max_inactivity_time(0), min_current_bandwidth(0), averaging_time(0) {}; }; /// The configured cache directories /** * \ingroup datastaging * \headerfile DTR.h arc/data-staging/DTR.h */ class DTRCacheParameters { public: /// List of (cache dir [link dir]) std::vector cache_dirs; /// List of draining caches std::vector drain_cache_dirs; /// List of read-only caches std::vector readonly_cache_dirs; /// Constructor with empty lists initialised DTRCacheParameters(void) {}; /// Constructor with supplied cache lists DTRCacheParameters(std::vector caches, std::vector drain_caches, std::vector readonly_caches); }; /// Class for storing credential information /** * To avoid handling credentials directly this class is used to hold * information in simple string/time attributes. It should be filled before * the DTR is started. * \ingroup datastaging * \headerfile DTR.h arc/data-staging/DTR.h */ class DTRCredentialInfo { public: /// Default constructor DTRCredentialInfo() {}; /// Constructor with supplied credential info DTRCredentialInfo(const std::string& DN, const Arc::Time& expirytime, const std::list vomsfqans); /// Get the DN std::string getDN() const { return DN; }; /// Get the expiry time Arc::Time getExpiryTime() const { return expirytime; }; /// Get the VOMS VO std::string extractVOMSVO() const; /// Get the VOMS Group (first in the supplied list of fqans) std::string extractVOMSGroup() const; /// Get the VOMS Role (first in the supplied list of fqans) std::string extractVOMSRole() const; private: std::string DN; Arc::Time expirytime; std::list vomsfqans; }; /// Represents possible cache states of this DTR /** \ingroup datastaging */ enum CacheState { CACHEABLE, ///< Source should be cached NON_CACHEABLE, ///< Source should not be cached CACHE_ALREADY_PRESENT, ///< Source is available in cache from before CACHE_DOWNLOADED, ///< Source has just been downloaded and put in cache CACHE_LOCKED, ///< Cache file is locked CACHE_SKIP, ///< Source is cacheable but due to some problem should not be cached CACHE_NOT_USED ///< Cache was started but was not used }; /// The base class from which all callback-enabled classes should be derived. /** * This class is a container for a callback method which is called when a * DTR is to be passed to a component. Several components in data staging * (eg Scheduler, Generator) are subclasses of DTRCallback, which allows * them to receive DTRs through the callback system. * \ingroup datastaging * \headerfile DTR.h arc/data-staging/DTR.h */ class DTRCallback { public: /// Empty virtual destructor virtual ~DTRCallback() {}; /// Defines the callback method called when a DTR is pushed to this object. /** * The automatic memory management of DTR_ptr ensures that the DTR object * is only deleted when the last copy is deleted. */ virtual void receiveDTR(DTR_ptr dtr) = 0; // TODO //virtual void suspendDTR(DTR& dtr) = 0; //virtual void cancelDTR(DTR& dtr) = 0; }; /// Data Transfer Request. /** * DTR stands for Data Transfer Request and a DTR describes a data transfer * between two endpoints, a source and a destination. There are several * parameters and options relating to the transfer contained in a DTR. * The normal workflow is for a Generator to create a DTR and send it to the * Scheduler for processing using DTR::push(SCHEDULER). If the Generator is a * subclass of DTRCallback, when the Scheduler has finished with the DTR * the DTRCallback::receiveDTR() callback method is called. * * DTRs should always be used through the Arc::ThreadedPointer DTR_ptr. This * ensures proper memory management when passing DTRs among various threads. * To enforce this policy the copy constructor and assignment operator are * private. * * A lock protects member variables that are likely to be accessed and * modified by multiple threads. * \ingroup datastaging * \headerfile DTR.h arc/data-staging/DTR.h */ class DTR { private: /// Identifier std::string DTR_ID; /// UserConfig and URL objects. Needed as DataHandle keeps a reference to them. Arc::URL source_url; Arc::URL destination_url; Arc::UserConfig cfg; /// Source file Arc::DataHandle source_endpoint; /// Destination file Arc::DataHandle destination_endpoint; /// Source file as a string std::string source_url_str; /// Destination file as a string std::string destination_url_str; /// Endpoint of cached file. /* Kept as string so we don't need to duplicate DataHandle properties * of destination. Delivery should check if this is set and if so use * it as destination. */ std::string cache_file; /// Cache configuration DTRCacheParameters cache_parameters; /// Cache state for this DTR CacheState cache_state; /// Whether ACIX is used as a source bool use_acix; /// Local user information Arc::User user; /// Credential information DTRCredentialInfo credentials; /// Job that requested the transfer. Could be used as a generic way of grouping DTRs. std::string parent_job_id; /// A flattened number set by the scheduler int priority; /// Transfer share this DTR belongs to std::string transfershare; /// This string can be used to form sub-sets of transfer shares. /** It is appended to transfershare. It can be used by the Generator * for example to split uploads and downloads into separate shares or * make shares for different endpoints. */ std::string sub_share; /// Number of attempts left to complete this DTR unsigned int tries_left; /// Initial number of attempts unsigned int initial_tries; /// A flag to say whether the DTR is replicating inside the same LFN of an index service bool replication; /// A flag to say whether to forcibly register the destination in an index service. /** Even if the source is not the same file, the destination will be * registered to an existing LFN. It should be set to true in * the case where an output file is uploaded to several locations but * with the same index service LFN */ bool force_registration; /// The file that the current source is mapped to. /** Delivery should check if this is set and if so use this as source. */ std::string mapped_source; /// Status of the DTR DTRStatus status; /// Error status of the DTR DTRErrorStatus error_status; /// Number of bytes transferred so far unsigned long long int bytes_transferred; // TODO and/or offset? /// Time taken in ns to complete transfer (0 if incomplete) unsigned long long int transfer_time; /** Timing variables **/ /// When should we finish the current action Arc::Time timeout; /// Creation time Arc::Time created; /// Modification time Arc::Time last_modified; /// Wait until this time before doing more processing Arc::Time next_process_time; /// True if some process requested cancellation bool cancel_request; /// Bulk start flag bool bulk_start; /// Bulk end flag bool bulk_end; /// Whether bulk operations are supported for the source bool source_supports_bulk; /// Flag to say whether success of the DTR is mandatory bool mandatory; /// Endpoint of delivery service this DTR is scheduled for. /** By default it is LOCAL_DELIVERY so local Delivery is used. */ Arc::URL delivery_endpoint; /// List of problematic endpoints - those which the DTR definitely cannot use std::vector problematic_delivery_endpoints; /// Whether to use host instead of user credentials for contacting remote delivery services. bool use_host_cert_for_remote_delivery; /// The process in charge of this DTR right now StagingProcesses current_owner; /// Logger object. /** Creation and deletion of this object should be managed * in the Generator and a pointer passed in the DTR constructor. */ DTRLogger logger; /// Log Destinations. /** This list is kept here so that the Logger can be connected and * disconnected in threads which have their own root logger * to avoid duplicate messages */ std::list log_destinations; /// Flag to say whether to delete LogDestinations. /** Set to true when a DTR thread is stuck or lost so it doesn't crash when * waking up after DTR has finished */ //bool delete_log_destinations; /// Performance metric logger Arc::JobPerfLog perf_log; /// Performance record used for recording transfer time Arc::JobPerfRecord perf_record; /// List of callback methods called when DTR moves between processes std::map > proc_callback; /// Lock to avoid collisions while changing DTR properties Arc::SimpleCondition lock; /** Possible fields (types, names and so on are subject to change) ** /// DTRs that are grouped must have the same number here int affiliation; /// History of recent statuses DTRStatus::DTRStatusType *history_of_statuses; **/ /* Methods */ /// Change modification time void mark_modification () { last_modified.SetTime(time(NULL)); }; /// Get the list of callbacks for this owner. Protected by lock. std::list get_callbacks(const std::map >& proc_callback, StagingProcesses owner); /// Private and not implemented because DTR_ptr should always be used. DTR& operator=(const DTR& dtr); DTR(const DTR& dtr); DTR(); public: /// URL that is used to denote local Delivery should be used static const Arc::URL LOCAL_DELIVERY; /// Log level for all DTR activity static Arc::LogLevel LOG_LEVEL; /// Normal constructor. /** Construct a new DTR. * @param source Endpoint from which to read data * @param destination Endpoint to which to write data * @param usercfg Provides some user configuration information * @param jobid ID of the job associated with this data transfer * @param uid UID to use when accessing local file system if source * or destination is a local file. If this is different to the current * uid then the current uid must have sufficient privileges to change uid. * @param logs List of ThreadedPointers to Logger Destinations to be * receive DTR processing messages. * @param logname Subdomain name to use for internal DTR logger. */ DTR(const std::string& source, const std::string& destination, const Arc::UserConfig& usercfg, const std::string& jobid, const uid_t& uid, std::list const& logs, const std::string& logname = std::string("DTR")); /// Empty destructor ~DTR() {}; /// Is DTR valid? operator bool() const { return (!DTR_ID.empty()); } /// Is DTR not valid? bool operator!() const { return (DTR_ID.empty()); } /// Register callback objects to be used during DTR processing. /** * Objects deriving from DTRCallback can be registered with this method. * The callback method of these objects will then be called when the DTR * is passed to the specified owner. Protected by lock. */ void registerCallback(DTRCallback* cb, StagingProcesses owner); /// Reset information held on this DTR, such as resolved replicas, error state etc. /** * Useful when a failed DTR is to be retried. */ void reset(); /// Set the ID of this DTR. Useful when passing DTR between processes. void set_id(const std::string& id); /// Get the ID of this DTR std::string get_id() const { return DTR_ID; }; /// Get an abbreviated version of the DTR ID - useful to reduce logging verbosity std::string get_short_id() const; /// Get source handle. Return by reference since DataHandle cannot be copied Arc::DataHandle& get_source() { return source_endpoint; }; /// Get destination handle. Return by reference since DataHandle cannot be copied Arc::DataHandle& get_destination() { return destination_endpoint; }; /// Get source as a string std::string get_source_str() const { return source_url_str; }; /// Get destination as a string std::string get_destination_str() const { return destination_url_str; }; /// Get the UserConfig object associated with this DTR const Arc::UserConfig& get_usercfg() const { return cfg; }; /// Set the timeout for processing this DTR void set_timeout(time_t value) { timeout.SetTime(Arc::Time().GetTime() + value); }; /// Get the timeout for processing this DTR Arc::Time get_timeout() const { return timeout; }; /// Set the next processing time to current time + given time void set_process_time(const Arc::Period& process_time); /// Get the next processing time for the DTR Arc::Time get_process_time() const { return next_process_time; }; /// Get the creation time Arc::Time get_creation_time() const { return created; }; /// Get the modification time Arc::Time get_modification_time() const { return last_modified; }; /// Get the parent job ID std::string get_parent_job_id() const { return parent_job_id; }; /// Set the priority void set_priority(int pri); /// Get the priority int get_priority() const { return priority; }; /// Set credential info void set_credential_info(const DTRCredentialInfo& cred) { credentials = cred; }; /// Get credential info const DTRCredentialInfo& get_credential_info() const { return credentials; }; /// Set the transfer share. sub_share is automatically added to transfershare. void set_transfer_share(const std::string& share_name); /// Get the transfer share. sub_share is automatically added to transfershare. std::string get_transfer_share() const { return transfershare; }; /// Set sub-share void set_sub_share(const std::string& share) { sub_share = share; }; /// Get sub-share std::string get_sub_share() const { return sub_share; }; /// Set the number of attempts remaining void set_tries_left(unsigned int tries); /// Get the number of attempts remaining unsigned int get_tries_left() const { return tries_left; }; /// Get the initial number of attempts (set by set_tries_left()) unsigned int get_initial_tries() const { return initial_tries; } /// Decrease attempt number void decrease_tries_left(); /// Set the status. Protected by lock. void set_status(DTRStatus stat); /// Get the status. Protected by lock. DTRStatus get_status(); /// Set the error status. /** * The DTRErrorStatus last error state field is set to the current status * of the DTR. Protected by lock. */ void set_error_status(DTRErrorStatus::DTRErrorStatusType error_stat, DTRErrorStatus::DTRErrorLocation error_loc, const std::string& desc=""); /// Set the error status back to NONE_ERROR and clear other fields void reset_error_status(); /// Get the error status. DTRErrorStatus get_error_status(); /// Set bytes transferred (should be set by whatever is controlling the transfer) void set_bytes_transferred(unsigned long long int bytes); /// Get current number of bytes transferred unsigned long long int get_bytes_transferred() const { return bytes_transferred; }; /// Set transfer time (should be set by whatever is controlling the transfer) void set_transfer_time(unsigned long long int t); /// Get transfer time unsigned long long int get_transfer_time() const { return transfer_time; }; /// Set the DTR to be cancelled void set_cancel_request(); /// Returns true if cancellation has been requested bool cancel_requested() const { return cancel_request; }; /// Set delivery endpoint void set_delivery_endpoint(const Arc::URL& endpoint) { delivery_endpoint = endpoint; }; /// Returns delivery endpoint const Arc::URL& get_delivery_endpoint() const { return delivery_endpoint; }; /// Add problematic endpoint. /** * Should only be those endpoints where there is a problem with the service * itself and not the transfer. */ void add_problematic_delivery_service(const Arc::URL& endpoint) { problematic_delivery_endpoints.push_back(endpoint); }; /// Get all problematic endpoints const std::vector& get_problematic_delivery_services() const { return problematic_delivery_endpoints; }; /// Set the flag for using host certificate for contacting remote delivery services void host_cert_for_remote_delivery(bool host) { use_host_cert_for_remote_delivery = host; }; /// Get the flag for using host certificate for contacting remote delivery services bool host_cert_for_remote_delivery() const { return use_host_cert_for_remote_delivery; }; /// Set cache filename void set_cache_file(const std::string& filename); /// Get cache filename std::string get_cache_file() const { return cache_file; }; /// Set cache parameters void set_cache_parameters(const DTRCacheParameters& param) { cache_parameters = param; }; /// Get cache parameters const DTRCacheParameters& get_cache_parameters() const { return cache_parameters; }; /// Set the cache state void set_cache_state(CacheState state); /// Get the cache state CacheState get_cache_state() const { return cache_state; }; /// Set whether ACIX is a source void set_use_acix(bool acix) { use_acix = acix; }; /// Get whether ACIX is a source bool get_use_acix() const { return use_acix; }; /// Set the mapped file void set_mapped_source(const std::string& file = "") { mapped_source = file; }; /// Get the mapped file std::string get_mapped_source() const { return mapped_source; }; /// Find the DTR owner StagingProcesses get_owner() const { return current_owner; }; /// Get the local user information Arc::User get_local_user() const { return user; }; /// Set replication flag void set_replication(bool rep) { replication = rep; }; /// Get replication flag bool is_replication() const { return replication; }; /// Set force replication flag void set_force_registration(bool force) { force_registration = force; }; /// Get force replication flag bool is_force_registration() const { return force_registration; }; /// Set bulk start flag void set_bulk_start(bool value) { bulk_start = value; }; /// Get bulk start flag bool get_bulk_start() const { return bulk_start; }; /// Set bulk end flag void set_bulk_end(bool value) { bulk_end = value; }; /// Get bulk start flag bool get_bulk_end() const { return bulk_end; }; /// Whether bulk operation is possible according to current state and src/dest bool bulk_possible(); /// Whether DTR success is mandatory bool is_mandatory() const { return mandatory; }; /// Get Logger object, so that processes can log to this DTR's log const DTRLogger& get_logger() const { return logger; }; /// Get log destination sassigned to this instance. std::list get_log_destinations() const; /// Pass the DTR from one process to another. Protected by lock. static void push(DTR_ptr dtr, StagingProcesses new_owner); /// Suspend the DTR which is in doing transfer in the delivery process bool suspend(); /// Did an error happen? bool error() const { return (error_status != DTRErrorStatus::NONE_ERROR); } /// Returns true if this DTR is about to go into the pre-processor bool is_destined_for_pre_processor() const; /// Returns true if this DTR is about to go into the post-processor bool is_destined_for_post_processor() const; /// Returns true if this DTR is about to go into delivery bool is_destined_for_delivery() const; /// Returns true if this DTR just came from the pre-processor bool came_from_pre_processor() const; /// Returns true if this DTR just came from the post-processor bool came_from_post_processor() const; /// Returns true if this DTR just came from delivery bool came_from_delivery() const; /// Returns true if this DTR just came from the generator bool came_from_generator() const; /// Returns true if this DTR is in a final state (finished, failed or cancelled) bool is_in_final_state() const; /// Get the performance log Arc::JobPerfLog& get_job_perf_log() { return perf_log; }; /// Get the performance log record Arc::JobPerfRecord& get_job_perf_record() { return perf_record; }; }; /// Helper method to create smart pointer, only for swig bindings DTR_ptr createDTRPtr(const std::string& source, const std::string& destination, const Arc::UserConfig& usercfg, const std::string& jobid, const uid_t& uid, std::list const& logs, const std::string& logname = std::string("DTR")); /// Helper method to create smart pointer, only for swig bindings DTRLogger createDTRLogger(Arc::Logger& parent, const std::string& subdomain); } // namespace DataStaging #endif /*DTR_H_*/ nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/DataDelivery.cpp0000644000000000000000000000013214152153376024510 xustar000000000000000030 mtime=1638455038.403645944 30 atime=1638455038.499647387 30 ctime=1638455098.850554187 nordugrid-arc-6.14.0/src/libs/data-staging/DataDelivery.cpp0000644000175000002070000003134214152153376024500 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include "DataDeliveryComm.h" #include "DataDelivery.h" namespace DataStaging { Arc::Logger DataDelivery::logger(Arc::Logger::getRootLogger(), "DataStaging.DataDelivery"); /// Wrapper class around DataDeliveryComm class DataDelivery::delivery_pair_t { public: DTR_ptr dtr; TransferParameters params; DataDeliveryComm* comm; bool cancelled; Arc::SimpleCounter thread_count; delivery_pair_t(DTR_ptr request, const TransferParameters& params); ~delivery_pair_t(); void start(); }; DataDelivery::delivery_pair_t::delivery_pair_t(DTR_ptr request, const TransferParameters& params) :dtr(request),params(params),comm(NULL),cancelled(false) {} DataDelivery::delivery_pair_t::~delivery_pair_t() { if (comm) delete comm; } void DataDelivery::delivery_pair_t::start() { comm = DataDeliveryComm::CreateInstance(dtr, params); } DataDelivery::DataDelivery(): delivery_state(INITIATED) { } bool DataDelivery::start() { if(delivery_state == RUNNING || delivery_state == TO_STOP) return false; delivery_state = RUNNING; Arc::CreateThreadFunction(&main_thread,this); return true; } void DataDelivery::receiveDTR(DTR_ptr dtr) { if(!(*dtr)) { logger.msg(Arc::ERROR, "Received invalid DTR"); dtr->set_error_status(DTRErrorStatus::INTERNAL_LOGIC_ERROR, DTRErrorStatus::ERROR_UNKNOWN, "Invalid DTR"); dtr->set_status(DTRStatus::TRANSFERRED); DTR::push(dtr, SCHEDULER); return; } dtr->get_logger()->msg(Arc::INFO, "Delivery received new DTR %s with source: %s, destination: %s", dtr->get_id(), dtr->get_source()->CurrentLocation().str(), dtr->get_destination()->CurrentLocation().str()); dtr->set_status(DTRStatus::TRANSFERRING); delivery_pair_t* d = new delivery_pair_t(dtr, transfer_params); dtr_list_lock.lock(); dtr_list.push_back(d); dtr_list_lock.unlock(); cond.signal(); return; } bool DataDelivery::cancelDTR(DTR_ptr request) { if(!request) { logger.msg(Arc::ERROR, "Received no DTR"); return false; } if(!(*request)) { logger.msg(Arc::ERROR, "Received invalid DTR"); request->set_status(DTRStatus::ERROR); return false; } dtr_list_lock.lock(); for (std::list::iterator i = dtr_list.begin(); i != dtr_list.end(); ++i) { delivery_pair_t* ip = *i; if (ip->dtr->get_id() == request->get_id()) { request->get_logger()->msg(Arc::INFO, "Cancelling DTR %s with source: %s, destination: %s", request->get_id(), request->get_source()->str(), request->get_destination()->str()); ip->cancelled = true; ip->dtr->set_status(DTRStatus::TRANSFERRING_CANCEL); dtr_list_lock.unlock(); cond.signal(); return true; } } // DTR is not in the active transfer list, probably because it just finished dtr_list_lock.unlock(); request->get_logger()->msg(Arc::WARNING, "DTR %s requested cancel but no active transfer", request->get_id()); // if request is already TRANSFERRED, no need to push to Scheduler again if (request->get_status() != DTRStatus::TRANSFERRED) { request->set_status(DTRStatus::TRANSFERRED); DTR::push(request, SCHEDULER); } return true; } bool DataDelivery::stop() { if(delivery_state != RUNNING) return false; delivery_state = TO_STOP; cond.signal(); run_signal.wait(); delivery_state = STOPPED; return true; } void DataDelivery::SetTransferParameters(const TransferParameters& params) { transfer_params = params; } void DataDelivery::start_delivery(void* arg) { delivery_pair_t* dp = (delivery_pair_t*)arg; dp->start(); } void DataDelivery::stop_delivery(void* arg) { delivery_pair_t* dp = (delivery_pair_t*)arg; delete dp->comm; dp->comm = NULL; // In case transfer finished before getting cancel signal, delete destination if (dp->cancelled || dp->dtr->error()) dp->dtr->get_destination()->Remove(); } bool DataDelivery::delete_delivery_pair(delivery_pair_t* dp) { bool res = Arc::CreateThreadFunction(&stop_delivery, dp, &dp->thread_count); if (res) { res = dp->thread_count.wait(300*1000); } if (res) delete dp; return res; } // Delete DTR destination, called after losing contact with delivery process static void delete_dtr_destination(DTR_ptr dtr) { Arc::URL dest(dtr->get_destination()->CurrentLocation()); // Check for TURL if (!dtr->get_destination()->TransferLocations().empty()) { dest = dtr->get_destination()->TransferLocations().front(); } // Check for cache file if ((dtr->get_cache_state() == CACHEABLE) && !dtr->get_cache_file().empty()) { dest = dtr->get_cache_file(); } dtr->get_logger()->msg(Arc::VERBOSE, "Cleaning up after failure: deleting %s", dest.str()); Arc::DataHandle h(dest, dtr->get_usercfg()); if (h) h->Remove(); } void DataDelivery::main_thread (void* arg) { DataDelivery* it = (DataDelivery*)arg; it->main_thread(); } void DataDelivery::main_thread (void) { // disconnect from root logger so // messages are logged to per-DTR Logger Arc::Logger::getRootLogger().setThreadContext(); Arc::Logger::getRootLogger().removeDestinations(); Arc::Logger::getRootLogger().setThreshold(DTR::LOG_LEVEL); while(delivery_state != TO_STOP){ dtr_list_lock.lock(); std::list::iterator d = dtr_list.begin(); dtr_list_lock.unlock(); for(;;) { dtr_list_lock.lock(); if(d == dtr_list.end()) { dtr_list_lock.unlock(); break; } dtr_list_lock.unlock(); delivery_pair_t* dp = *d; // first check for cancellation if (dp->cancelled) { dtr_list_lock.lock(); d = dtr_list.erase(d); dtr_list_lock.unlock(); // deleting delivery_pair_t kills the spawned process // Do this before passing back to Scheduler to avoid race condition // of DTR being deleted before Comm object has finished with it. // With ThreadedPointer this may not be a problem any more. DTR_ptr tmp = dp->dtr; if (!delete_delivery_pair(dp)) { tmp->get_logger()->msg(Arc::ERROR, "Failed to delete delivery object or deletion timed out"); } tmp->get_job_perf_record().End("SchedulerTransferTime_"+tmp->get_delivery_endpoint().Host()); tmp->set_status(DTRStatus::TRANSFERRED); DTR::push(tmp, SCHEDULER); continue; } // check for new transfer if (!dp->comm) { dp->dtr->get_job_perf_record().Start(dp->dtr->get_short_id()); // Connecting to a remote delivery service can hang in rare cases, // so launch a separate thread with a timeout bool res = Arc::CreateThreadFunction(&start_delivery, dp, &dp->thread_count); if (res) { res = dp->thread_count.wait(300*1000); } if (!res) { // error or timeout - in this case do not delete dp since if the // thread timed out it may wake up at some point. Better to have a // small memory leak than seg fault. dtr_list_lock.lock(); d = dtr_list.erase(d); dtr_list_lock.unlock(); DTR_ptr tmp = dp->dtr; tmp->set_error_status(DTRErrorStatus::INTERNAL_PROCESS_ERROR, DTRErrorStatus::NO_ERROR_LOCATION, "Failed to start thread to start delivery or thread timed out"); tmp->get_job_perf_record().End("SchedulerTransferTime_"+tmp->get_delivery_endpoint().Host()); tmp->set_status(DTRStatus::TRANSFERRED); DTR::push(tmp, SCHEDULER); } else { dtr_list_lock.lock(); ++d; dtr_list_lock.unlock(); } continue; } // ongoing transfer - get status DataDeliveryComm::Status status; status = dp->comm->GetStatus(); dp->dtr->set_bytes_transferred(status.transferred); if((status.commstatus == DataDeliveryComm::CommExited) || (status.commstatus == DataDeliveryComm::CommClosed) || (status.commstatus == DataDeliveryComm::CommFailed)) { // Transfer finished - either successfully or with error dtr_list_lock.lock(); d = dtr_list.erase(d); dtr_list_lock.unlock(); if ((status.commstatus == DataDeliveryComm::CommFailed) || (status.error != DTRErrorStatus::NONE_ERROR)) { if (status.error == DTRErrorStatus::NONE_ERROR) { // Lost track of process - delete destination so it can be tried again delete_dtr_destination(dp->dtr); status.error = DTRErrorStatus::INTERNAL_PROCESS_ERROR; } dp->dtr->set_error_status(status.error,status.error_location, status.error_desc[0]?status.error_desc:dp->comm->GetError().c_str()); } else if (status.checksum) { dp->dtr->get_destination()->SetCheckSum(status.checksum); } dp->dtr->get_logger()->msg(Arc::INFO, "Transfer finished: %llu bytes transferred %s", status.transferred, (status.checksum[0] ? ": checksum "+std::string(status.checksum) : " ")); timespec dummy; dp->dtr->get_job_perf_log().Log("DeliveryTransferTime_"+dp->dtr->get_delivery_endpoint().Host(), dp->dtr->get_short_id()+"\t"+Arc::tostring(status.transfer_time), dummy, dummy); dp->dtr->set_transfer_time(status.transfer_time); DTR_ptr tmp = dp->dtr; if (!delete_delivery_pair(dp)) { tmp->get_logger()->msg(Arc::ERROR, "Failed to delete delivery object or deletion timed out"); } tmp->get_job_perf_record().End("SchedulerTransferTime_"+tmp->get_delivery_endpoint().Host()); tmp->set_status(DTRStatus::TRANSFERRED); DTR::push(tmp, SCHEDULER); continue; } if(!(*(dp->comm))) { // Error happened - either delivery process is stuck or could not start dtr_list_lock.lock(); d = dtr_list.erase(d); dtr_list_lock.unlock(); std::string comm_err = dp->comm->GetError(); if (status.commstatus == DataDeliveryComm::CommInit) { if (comm_err.empty()) comm_err = "Failed to start delivery process"; if (dp->dtr->get_delivery_endpoint() == DTR::LOCAL_DELIVERY) { // Serious problem, so mark permanent error dp->dtr->set_error_status(DTRErrorStatus::INTERNAL_LOGIC_ERROR, DTRErrorStatus::ERROR_TRANSFER, comm_err); } else { // Failing to start on remote service should be retried dp->dtr->add_problematic_delivery_service(dp->dtr->get_delivery_endpoint()); dp->dtr->set_error_status(DTRErrorStatus::INTERNAL_PROCESS_ERROR, DTRErrorStatus::ERROR_TRANSFER, comm_err); } } else { if (comm_err.empty()) comm_err = "Connection with delivery process lost"; // delete destination so it can be tried again delete_dtr_destination(dp->dtr); dp->dtr->set_error_status(DTRErrorStatus::INTERNAL_PROCESS_ERROR, DTRErrorStatus::ERROR_TRANSFER, comm_err); } DTR_ptr tmp = dp->dtr; if (!delete_delivery_pair(dp)) { tmp->get_logger()->msg(Arc::ERROR, "Failed to delete delivery object or deletion timed out"); } tmp->get_job_perf_record().End("SchedulerTransferTime_"+tmp->get_delivery_endpoint().Host()); tmp->set_status(DTRStatus::TRANSFERRED); DTR::push(tmp, SCHEDULER); continue; } dtr_list_lock.lock(); ++d; dtr_list_lock.unlock(); } // Go through main loop every half a second or when new transfer arrives cond.wait(100); } // Kill any transfers still running dtr_list_lock.lock(); for (std::list::iterator d = dtr_list.begin(); d != dtr_list.end();) { DTR_ptr tmp = (*d)->dtr; if (!delete_delivery_pair(*d)) { tmp->get_logger()->msg(Arc::ERROR, "Failed to delete delivery object or deletion timed out"); } d = dtr_list.erase(d); } dtr_list_lock.unlock(); logger.msg(Arc::INFO, "Data delivery loop exited"); run_signal.signal(); } } // namespace DataStaging nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/Scheduler.h0000644000000000000000000000013214152153376023516 xustar000000000000000030 mtime=1638455038.404645959 30 atime=1638455038.499647387 30 ctime=1638455098.847554142 nordugrid-arc-6.14.0/src/libs/data-staging/Scheduler.h0000644000175000002070000002716314152153376023514 0ustar00mockbuildmock00000000000000#ifndef SCHEDULER_H_ #define SCHEDULER_H_ #include #include #include #include #include #include "DTR.h" #include "DTRList.h" #include "Processor.h" #include "DataDelivery.h" #include "TransferShares.h" namespace DataStaging { /// The Scheduler is the control centre of the data staging framework. /** * The Scheduler manages a global list of DTRs and schedules when they should * go into the next state or be sent to other processes. The DTR priority is * used to decide each DTR's position in a queue. * \ingroup datastaging * \headerfile Scheduler.h arc/data-staging/Scheduler.h */ class Scheduler: public DTRCallback { private: /// All the DTRs the scheduler is aware of. /** The DTR comes to this list once received from the generator * and leaves the list only when pushed back to the generator. */ DTRList DtrList; /// A list of jobs that have been requested to be cancelled. /** External threads add items to this list, and the Scheduler * processes it during the main loop. */ std::list cancelled_jobs; /// A list of DTRs to process std::list events; /// Map of transfer shares to staged DTRs. Filled each event processing loop std::map > staged_queue; /// A lock for the cancelled jobs list Arc::SimpleCondition cancelled_jobs_lock; /// Configuration of transfer shares TransferSharesConf transferSharesConf; /// URLMap containing information on any local mappings defined in the configuration Arc::URLMap url_map; /// Preferred pattern to match replicas defined in configuration std::string preferred_pattern; /// Lock to protect multi-threaded access to start() and stop() Arc::SimpleCondition state_lock; /// Lock for events list Arc::SimpleCondition event_lock; /// Condition to signal end of running Arc::SimpleCondition run_signal; /// Condition to signal end of dump thread Arc::SimpleCondition dump_signal; /// Limit on number of DTRs in pre-processor unsigned int PreProcessorSlots; /// Limit on number of DTRs in delivery unsigned int DeliverySlots; /// Limit on number of DTRs in post-processor unsigned int PostProcessorSlots; /// Limit on number of emergency DTRs in each state unsigned int EmergencySlots; /// Limit on number of staged-prepared files, per share unsigned int StagedPreparedSlots; /// Where to dump DTR state. Currently only a path to a file is supported. std::string dumplocation; /// Performance metrics logger Arc::JobPerfLog job_perf_log; /// Endpoints of delivery services from configuration std::vector configured_delivery_services; /// Map of delivery services and directories they can access, filled after /// querying all services when the first DTR is processed std::map > usable_delivery_services; /// Timestamp of last check of delivery services Arc::Time delivery_last_checked; /// File size limit (in bytes) under which local transfer is used unsigned long long int remote_size_limit; /// Counter of transfers per delivery service std::map delivery_hosts; /// Logger object static Arc::Logger logger; /// Root logger destinations, to use when logging non-DTR specific messages std::list root_destinations; /// Flag describing scheduler state. Used to decide whether to keep running main loop. ProcessState scheduler_state; /// Processor object Processor processor; /// Delivery object DataDelivery delivery; /// Static instance of Scheduler static Scheduler* scheduler_instance; /// Lock for multiple threads getting static Scheduler instance static Glib::Mutex instance_lock; /// Copy constructor is private because Scheduler should not be copied Scheduler(const Scheduler&); // should not happen /// Assignment operator is private because Scheduler should not be copied Scheduler& operator=(const Scheduler&); // should not happen /* Functions to process every state of the DTR during normal workflow */ /// Process a DTR in the NEW state void ProcessDTRNEW(DTR_ptr request); /// Process a DTR in the CACHE_WAIT state void ProcessDTRCACHE_WAIT(DTR_ptr request); /// Process a DTR in the CACHE_CHECKED state void ProcessDTRCACHE_CHECKED(DTR_ptr request); /// Process a DTR in the RESOLVED state void ProcessDTRRESOLVED(DTR_ptr request); /// Process a DTR in the REPLICA_QUERIED state void ProcessDTRREPLICA_QUERIED(DTR_ptr request); /// Process a DTR in the PRE_CLEANED state void ProcessDTRPRE_CLEANED(DTR_ptr request); /// Process a DTR in the STAGING_PREPARING_WAIT state void ProcessDTRSTAGING_PREPARING_WAIT(DTR_ptr request); /// Process a DTR in the STAGED_PREPARED state void ProcessDTRSTAGED_PREPARED(DTR_ptr request); /// Process a DTR in the TRANSFERRED state void ProcessDTRTRANSFERRED(DTR_ptr request); /// Process a DTR in the REQUEST_RELEASED state void ProcessDTRREQUEST_RELEASED(DTR_ptr request); /// Process a DTR in the REPLICA_REGISTERED state void ProcessDTRREPLICA_REGISTERED(DTR_ptr request); /// Process a DTR in the CACHE_PROCESSED state void ProcessDTRCACHE_PROCESSED(DTR_ptr request); /// Process a DTR in a final state /* This is a special function to deal with states after which * the DTR is returned to the generator, i.e. DONE, ERROR, CANCELLED */ void ProcessDTRFINAL_STATE(DTR_ptr request); /// Log a message to the root logger. This sends the message to the log /// destinations attached to the root logger at the point the Scheduler /// was started. void log_to_root_logger(Arc::LogLevel level, const std::string& message); /// Call the appropriate Process method depending on the DTR state void map_state_and_process(DTR_ptr request); /// Maps the DTR to the appropriate state when it is cancelled. /** This is a separate function, since cancellation request * can arrive at any time, breaking the normal workflow. */ void map_cancel_state(DTR_ptr request); /// Map a DTR stuck in a processing state to new state from which it can /// recover and retry. void map_stuck_state(DTR_ptr request); /// Choose a delivery service for the DTR, based on the file system paths /// each service can access. These paths are determined by calling all the /// configured services when the first DTR is received. void choose_delivery_service(DTR_ptr request); /// Go through all DTRs waiting to go into a processing state and decide /// whether to push them into that state, depending on shares and limits. void revise_queues(); /// Add a new event for the Scheduler to process. Used in receiveDTR(). void add_event(DTR_ptr event); /// Process the pool of DTRs which have arrived from other processes void process_events(void); /// Move to the next replica in the DTR. /** Utility function which should be called in the case of error * if the next replica should be tried. It takes care of sending * the DTR to the appropriate state, depending on whether or not * there are more replicas to try. */ void next_replica(DTR_ptr request); /// Handle a DTR whose source is mapped to another URL. /** If a file is mapped, this method should be called to deal * with the mapping. It sets the mapped_file attribute of * request to mapped_url. Returns true if the processing was * successful. */ bool handle_mapped_source(DTR_ptr request, Arc::URL& mapped_url); /// Thread method for dumping state static void dump_thread(void* arg); /// Static version of main_thread, used when thread is created static void main_thread(void* arg); /// Main thread, which runs until stopped void main_thread(void); public: /// Get static instance of Scheduler, to use one DTR instance with multiple generators. /** * Configuration of Scheduler by Set* methods can only be done before * start() is called, so undetermined behaviour can result from multiple * threads simultaneously calling Set* then start(). It is safer to make * sure that all threads use the same configuration (calling start() twice * is harmless). It is also better to make sure that threads call stop() in * a roughly coordinated way, i.e. all generators stop at the same time. */ static Scheduler* getInstance(); /// Constructor, to be used when only one Generator uses this Scheduler. Scheduler(); /// Destructor calls stop(), which cancels all DTRs and waits for them to complete ~Scheduler() { stop(); }; /* The following Set/Add methods are only effective when called before start() */ /// Set number of slots for processor and delivery stages void SetSlots(int pre_processor = 0, int post_processor = 0, int delivery = 0, int emergency = 0, int staged_prepared = 0); /// Add URL mapping entry. See Arc::URLMap. void AddURLMapping(const Arc::URL& template_url, const Arc::URL& replacement_url, const Arc::URL& access_url = Arc::URL()); /// Replace all URL mapping entries void SetURLMapping(const Arc::URLMap& mapping = Arc::URLMap()); /// Set the preferred pattern for ordering replicas. /** * This pattern will be used in the case of an index service URL with * multiple physical replicas and allows sorting of those replicas in order * of preference. It consists of one or more patterns separated by a pipe * character (|) listed in order of preference. If the dollar character ($) * is used at the end of a pattern, the pattern will be matched to the end * of the hostname of the replica. Example: "srm://myhost.org|.uk$|.ch$" */ void SetPreferredPattern(const std::string& pattern); /// Set TransferShares configuration void SetTransferSharesConf(const TransferSharesConf& share_conf); /// Set transfer limits void SetTransferParameters(const TransferParameters& params); /// Set the list of delivery services. DTR::LOCAL_DELIVERY means local Delivery. void SetDeliveryServices(const std::vector& endpoints); /// Set the remote transfer size limit void SetRemoteSizeLimit(unsigned long long int limit); /// Set location for periodic dump of DTR state (only file paths currently supported) void SetDumpLocation(const std::string& location); /// Set JobPerfLog object for performance metrics logging void SetJobPerfLog(const Arc::JobPerfLog& perf_log); /// Start scheduling activity. /** * This method must be called after all configuration parameters are set * properly. Scheduler can be stopped either by calling stop() method or * by destroying its instance. */ bool start(void); /// Callback method implemented from DTRCallback. /** * This method is called by the generator when it wants to pass a DTR * to the scheduler and when other processes send a DTR back to the * scheduler after processing. */ virtual void receiveDTR(DTR_ptr dtr); /// Tell the Scheduler to cancel all the DTRs in the given job description bool cancelDTRs(const std::string& jobid); /// Tell the Scheduler to shut down all threads and exit. /** * All active DTRs are cancelled and this method waits until they finish * (all DTRs go to CANCELLED state) */ bool stop(); }; } // namespace DataStaging #endif /*SCHEDULER_H_*/ nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/TransferShares.cpp0000644000000000000000000000013214152153376025065 xustar000000000000000030 mtime=1638455038.404645959 30 atime=1638455038.499647387 30 ctime=1638455098.858554307 nordugrid-arc-6.14.0/src/libs/data-staging/TransferShares.cpp0000644000175000002070000001322114152153376025051 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include "TransferShares.h" #include #include namespace DataStaging { TransferSharesConf::TransferSharesConf(const std::string& type, const std::map& ref_shares) { set_share_type(type); set_reference_shares(ref_shares); } TransferSharesConf::TransferSharesConf() : shareType(NONE) { ReferenceShares["_default"] = 50; } void TransferSharesConf::set_share_type(const std::string& type) { if (Arc::lower(type) == "dn") shareType = USER; else if (Arc::lower(type) == "voms:vo") shareType = VO; else if (Arc::lower(type) == "voms:role") shareType = ROLE; else if (Arc::lower(type) == "voms:group") shareType = GROUP; else shareType = NONE; } bool TransferSharesConf::is_configured(const std::string& ShareToCheck) { return (ReferenceShares.find(ShareToCheck) != ReferenceShares.end()); } int TransferSharesConf::get_basic_priority(const std::string& ShareToCheck) { if (!is_configured(ShareToCheck)) return ReferenceShares["_default"]; return ReferenceShares[ShareToCheck]; } void TransferSharesConf::set_reference_share(const std::string& RefShare, int Priority) { ReferenceShares[RefShare] = Priority; } void TransferSharesConf::set_reference_shares(const std::map& shares) { ReferenceShares = shares; // there should always be a _default share defined if (ReferenceShares.find("_default") == ReferenceShares.end()) ReferenceShares["_default"] = 50; } std::string TransferSharesConf::conf() const { std::string conf; conf += " Share type: "; switch (shareType){ case USER: conf += "DN"; break; case VO: conf += "VOMS VO"; break; case GROUP: conf += "VOMS group"; break; case ROLE: conf += "VOMS role"; break; case NONE: conf += "None"; break; default: // Something really strange conf += "unknown"; break; } if (!ReferenceShares.empty()) { for (std::map::const_iterator i = ReferenceShares.begin(); i != ReferenceShares.end(); ++i) { conf += "\n Reference share " + i->first + ", priority " + Arc::tostring(i->second); } } return conf; } std::string TransferSharesConf::extract_share_info(DTR_ptr DTRToExtract) { DTRCredentialInfo cred = DTRToExtract->get_credential_info(); switch (shareType){ case USER: return cred.getDN(); case VO: return cred.extractVOMSVO(); case GROUP: return cred.extractVOMSGroup(); case ROLE: return cred.extractVOMSRole(); case NONE: return "_default"; default: // Something really strange return ""; } } TransferShares::TransferShares(const TransferSharesConf& shares_conf) : conf(shares_conf) { ActiveShares.clear(); ActiveSharesSlots.clear(); } void TransferShares::set_shares_conf(const TransferSharesConf& shares_conf) { conf = shares_conf; } void TransferShares::calculate_shares(int TotalNumberOfSlots) { ActiveSharesSlots.clear(); // clear active shares with 0 count // and compute the summarized priority of other active shares std::map::iterator i; int SummarizedPriority = 0; int TotalQueued = 0; for (i = ActiveShares.begin(); i != ActiveShares.end(); ){ if (i->second == 0) { ActiveShares.erase(i++); } else { SummarizedPriority += conf.get_basic_priority(i->first); TotalQueued += i->second; ++i; } } int slots_used = 0; // first calculate shares based on the share priority for (i = ActiveShares.begin(); i != ActiveShares.end(); i++){ // Number of slots for this share is its priority divided by total // priorities of all active shares multiplied by the total number of slots int slots = int(::floor(float(conf.get_basic_priority(i->first)) / float(SummarizedPriority) * float(TotalNumberOfSlots))); if (slots > i->second) { // Don't assign more slots than the share needs ActiveSharesSlots[i->first] = i->second; } else if (slots == 0) { // Some shares can receive 0 slots. // It can happen when there are lots of shares active // or one share has enormously big priority. // There should be no 0 in the number of slots, so every // share has at least theoretical possibility to start ActiveSharesSlots[i->first] = 1; } else { ActiveSharesSlots[i->first] = slots; } slots_used += ActiveSharesSlots[i->first]; } // now assign unused slots among shares with more DTRs than slots while (slots_used < TotalQueued && slots_used < TotalNumberOfSlots) { // TODO share slots using priorities for (i = ActiveShares.begin(); i != ActiveShares.end(); i++){ if (ActiveSharesSlots[i->first] < ActiveShares[i->first]) { ActiveSharesSlots[i->first]++; slots_used++; if (slots_used >= TotalQueued || slots_used >= TotalNumberOfSlots) break; } } } } void TransferShares::increase_transfer_share(const std::string& ShareToIncrease) { ActiveShares[ShareToIncrease]++; } void TransferShares::decrease_transfer_share(const std::string& ShareToDecrease) { ActiveShares[ShareToDecrease]--; } void TransferShares::decrease_number_of_slots(const std::string& ShareToDecrease) { ActiveSharesSlots[ShareToDecrease]--; } bool TransferShares::can_start(const std::string& ShareToStart) { return (ActiveSharesSlots[ShareToStart] > 0); } std::map TransferShares::active_shares() const { return ActiveShares; } } nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/Scheduler.cpp0000644000000000000000000000013214152153376024051 xustar000000000000000030 mtime=1638455038.404645959 30 atime=1638455038.499647387 30 ctime=1638455098.857554292 nordugrid-arc-6.14.0/src/libs/data-staging/Scheduler.cpp0000644000175000002070000017363014152153376024050 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include "Scheduler.h" #include "DataDeliveryRemoteComm.h" namespace DataStaging { Arc::Logger Scheduler::logger(Arc::Logger::getRootLogger(), "DataStaging.Scheduler"); Scheduler* Scheduler::scheduler_instance = NULL; Glib::Mutex Scheduler::instance_lock; Scheduler* Scheduler::getInstance() { Glib::Mutex::Lock lock(instance_lock); if (!scheduler_instance) { scheduler_instance = new Scheduler(); } return scheduler_instance; } Scheduler::Scheduler(): remote_size_limit(0), scheduler_state(INITIATED) { // Conservative defaults PreProcessorSlots = 20; DeliverySlots = 10; PostProcessorSlots = 20; EmergencySlots = 2; StagedPreparedSlots = 200; } void Scheduler::SetSlots(int pre_processor, int post_processor, int delivery, int emergency, int staged_prepared) { if (scheduler_state == INITIATED) { if(pre_processor > 0) PreProcessorSlots = pre_processor; if(post_processor > 0) PostProcessorSlots = post_processor; if(delivery > 0) DeliverySlots = delivery; if(emergency > 0) EmergencySlots = emergency; if(staged_prepared > 0) StagedPreparedSlots = staged_prepared; } } void Scheduler::AddURLMapping(const Arc::URL& template_url, const Arc::URL& replacement_url, const Arc::URL& access_url) { if (scheduler_state == INITIATED) url_map.add(template_url,replacement_url,access_url); // else should log warning, but logger is disconnected } void Scheduler::SetURLMapping(const Arc::URLMap& mapping) { if (scheduler_state == INITIATED) url_map = mapping; } void Scheduler::SetPreferredPattern(const std::string& pattern) { if (scheduler_state == INITIATED) preferred_pattern = pattern; } void Scheduler::SetTransferSharesConf(const TransferSharesConf& share_conf) { if (scheduler_state == INITIATED) transferSharesConf = share_conf; } void Scheduler::SetTransferParameters(const TransferParameters& params) { delivery.SetTransferParameters(params); } void Scheduler::SetDeliveryServices(const std::vector& endpoints) { if (scheduler_state == INITIATED) configured_delivery_services = endpoints; } void Scheduler::SetRemoteSizeLimit(unsigned long long int limit) { if (scheduler_state == INITIATED) remote_size_limit = limit; } void Scheduler::SetDumpLocation(const std::string& location) { dumplocation = location; } void Scheduler::SetJobPerfLog(const Arc::JobPerfLog& perf_log) { job_perf_log = perf_log; } bool Scheduler::start(void) { state_lock.lock(); if(scheduler_state == RUNNING || scheduler_state == TO_STOP) { state_lock.unlock(); return false; } scheduler_state = RUNNING; state_lock.unlock(); processor.start(); delivery.start(); // if no delivery services set, then use local if (configured_delivery_services.empty()) { std::vector services; services.push_back(DTR::LOCAL_DELIVERY); configured_delivery_services = services; } Arc::CreateThreadFunction(&main_thread, this); return true; } void Scheduler::log_to_root_logger(Arc::LogLevel level, const std::string& message) { Arc::Logger::getRootLogger().addDestinations(root_destinations); logger.msg(level, "%s", message); Arc::Logger::getRootLogger().removeDestinations(); } /* Function to sort the list of the pointers to DTRs * according to the priorities the DTRs have. * DTRs with higher priority go first to the beginning, * with lower -- to the end */ bool dtr_sort_predicate(DTR_ptr dtr1, DTR_ptr dtr2) { return dtr1->get_priority() > dtr2->get_priority(); } void Scheduler::next_replica(DTR_ptr request) { if (!request->error()) { // bad logic request->set_error_status(DTRErrorStatus::INTERNAL_LOGIC_ERROR, DTRErrorStatus::ERROR_UNKNOWN, "Bad logic: next_replica called when there is no error"); // TODO: how to deal with these internal errors? return; } // Logic of whether to go for next source or destination bool source_error(false); if (request->get_error_status().GetErrorLocation() == DTRErrorStatus::ERROR_SOURCE) source_error = true; else if (request->get_error_status().GetErrorLocation() == DTRErrorStatus::ERROR_DESTINATION) source_error = false; else if (request->get_source()->IsIndex() && !request->get_destination()->IsIndex()) source_error = true; else if (!request->get_source()->IsIndex() && request->get_destination()->IsIndex()) source_error = false; else if (!request->get_source()->LastLocation() && request->get_destination()->LastLocation()) source_error = true; else if (request->get_source()->LastLocation() && !request->get_destination()->LastLocation()) source_error = false; else // Unknown error location, and either both are index services with remaining // replicas or neither are index services. Choose source in this case. source_error = true; bool replica_exists; if (source_error) { // reset mapped file request->set_mapped_source(); replica_exists = request->get_source()->NextLocation(); } else { replica_exists = request->get_destination()->NextLocation(); } if (replica_exists) { // Use next replica // Clear the error flag to resume normal workflow request->reset_error_status(); request->get_logger()->msg(Arc::INFO, "Using next %s replica", source_error ? istring("source") : istring("destination")); // Perhaps not necessary to query replica again if the error was in the destination // but the error could have been caused by a source problem during transfer request->set_status(DTRStatus::QUERY_REPLICA); } else { // No replicas - move to appropriate state for the post-processor to do cleanup request->get_logger()->msg(Arc::ERROR, "No more %s replicas", source_error ? istring("source") : istring("destination")); if (request->get_destination()->IsIndex()) { request->get_logger()->msg(Arc::VERBOSE, "Will clean up pre-registered destination"); request->set_status(DTRStatus::REGISTER_REPLICA); } else if (!(request->get_cache_parameters().cache_dirs.empty() && request->get_cache_parameters().readonly_cache_dirs.empty()) && (request->get_cache_state() == CACHE_ALREADY_PRESENT || request->get_cache_state() == CACHEABLE)) { request->get_logger()->msg(Arc::VERBOSE, "Will release cache locks"); request->set_status(DTRStatus::PROCESS_CACHE); } else { // nothing to clean up - set to end state request->get_logger()->msg(Arc::VERBOSE, "Moving to end of data staging"); request->set_status(DTRStatus::CACHE_PROCESSED); } } } bool Scheduler::handle_mapped_source(DTR_ptr request, Arc::URL& mapped_url) { // The DTR source is mapped to another place so set the mapped location in request. // If mapped_url is set delivery will use it as source request->get_logger()->msg(Arc::INFO, "Source is mapped to %s", mapped_url.str()); if (!request->get_source()->ReadOnly() && mapped_url.Protocol() == "link") { // read-write access means user can potentially modify source, so copy instead request->get_logger()->msg(Arc::WARNING, "Cannot link to source which can be modified, will copy instead"); mapped_url.ChangeProtocol("file"); } if (mapped_url.Protocol() == "link") { // If the map is a link then do the link here and set to TRANSFERRED. Local file // copies should still have to wait in the queue. For links we should also // turn off caching, remembering that we still need to release any cache // locks later if necessary. if (!request->get_destination()->Local()) { request->get_logger()->msg(Arc::ERROR, "Cannot link to a remote destination. Will not use mapped URL"); } else { request->get_logger()->msg(Arc::INFO, "Linking mapped file"); // Access session dir under mapped user if (!Arc::FileLink(mapped_url.Path(), request->get_destination()->CurrentLocation().Path(), request->get_local_user().get_uid(), request->get_local_user().get_gid(), true)) { request->get_logger()->msg(Arc::ERROR, "Failed to create link: %s. Will not use mapped URL", Arc::StrError(errno)); } else { // successful link, so turn off caching, set to TRANSFERRED and return request->set_mapped_source(mapped_url.str()); if (request->get_cache_state() == CACHEABLE) request->set_cache_state(CACHE_NOT_USED); request->set_status(DTRStatus::TRANSFERRED); return true; } } } else { // Ready to copy mapped file // Assume that mapped urls are not index services or stageable // TODO: handle case when mapped url is index request->set_mapped_source(mapped_url.str()); request->set_status(DTRStatus::STAGED_PREPARED); return true; } return false; } void Scheduler::ProcessDTRNEW(DTR_ptr request){ request->get_logger()->msg(Arc::INFO, "Scheduler received new DTR %s with source: %s," " destination: %s, assigned to transfer share %s with priority %d", request->get_id(), request->get_source()->str(), request->get_destination()->str(), request->get_transfer_share(), request->get_priority()); // Normal workflow is CHECK_CACHE if (request->get_cache_state() == NON_CACHEABLE || (request->get_cache_parameters().cache_dirs.empty() && request->get_cache_parameters().readonly_cache_dirs.empty())) { request->get_logger()->msg(Arc::VERBOSE, "File is not cacheable, was requested not to be cached or no cache available, skipping cache check"); request->set_status(DTRStatus::CACHE_CHECKED); } else { // Cache checking should have quite a long timeout as it may // take a long time to download a big file or there is a long delivery queue request->set_timeout(86400); request->get_logger()->msg(Arc::VERBOSE, "File is cacheable, will check cache"); if (DtrList.is_being_cached(request)) { Arc::Period cache_wait_period(10); request->get_logger()->msg(Arc::VERBOSE, "File is currently being cached, will wait %is", cache_wait_period.GetPeriod()); request->set_process_time(cache_wait_period); request->set_status(DTRStatus::CACHE_WAIT); } else { request->set_status(DTRStatus::CHECK_CACHE); } } } void Scheduler::ProcessDTRCACHE_WAIT(DTR_ptr request){ // The waiting time should be calculated within DTRList so // by the time we are here we know to query the cache again // If we timed out on it send to CACHE_PROCESSED where it // may be retried without caching if(request->get_timeout() < time(NULL)) { request->set_error_status(DTRErrorStatus::CACHE_ERROR, DTRErrorStatus::ERROR_DESTINATION, "Timed out while waiting for cache for " + request->get_source()->str()); request->get_logger()->msg(Arc::ERROR, "Timed out while waiting for cache lock"); request->set_status(DTRStatus::CACHE_PROCESSED); } else if (DtrList.is_being_cached(request)) { // If the source is already being cached the priority of that DTR // will be raised by is_being_cached() if this DTR's priority is higher Arc::Period cache_wait_period(10); request->get_logger()->msg(Arc::VERBOSE, "File is currently being cached, will wait %is", cache_wait_period.GetPeriod()); request->set_process_time(cache_wait_period); } else { // Try to check cache again request->get_logger()->msg(Arc::VERBOSE, "Checking cache again"); request->set_status(DTRStatus::CHECK_CACHE); } } void Scheduler::ProcessDTRCACHE_CHECKED(DTR_ptr request){ // There's no need to check additionally for cache error // If the error has occurred -- we just proceed the normal // workflow as if it was not cached at all. // But we should clear error flag if it was set by the pre-processor //setting timeout back to 1 hour, was set to 1 day in ProcessDTRNEW(). request->set_timeout(3600); request->reset_error_status(); if (request->get_cache_state() == CACHEABLE) DtrList.caching_started(request); if(request->get_cache_state() == CACHE_ALREADY_PRESENT){ // File is on place already. After the post-processor // the DTR is DONE. request->get_logger()->msg(Arc::VERBOSE, "Destination file is in cache"); request->set_status(DTRStatus::PROCESS_CACHE); } else if (request->get_source()->IsIndex() || request->get_destination()->IsIndex()) { // The Normal workflow -- RESOLVE request->get_logger()->msg(Arc::VERBOSE, "Source and/or destination is index service, will resolve replicas"); request->set_status(DTRStatus::RESOLVE); } else { request->get_logger()->msg(Arc::VERBOSE, "Neither source nor destination are index services, will skip resolving replicas"); request->set_status(DTRStatus::RESOLVED); } } void Scheduler::ProcessDTRRESOLVED(DTR_ptr request){ if(request->error()){ // It's impossible to download anything, since no replica location is resolved // if cacheable, move to PROCESS_CACHE, the post-processor will do the cleanup if (request->get_cache_state() == CACHEABLE && !(request->get_cache_parameters().cache_dirs.empty() && request->get_cache_parameters().cache_dirs.empty())) { request->get_logger()->msg(Arc::ERROR, "Problem with index service, will release cache lock"); request->set_status(DTRStatus::PROCESS_CACHE); // else go to end state } else { request->get_logger()->msg(Arc::ERROR, "Problem with index service, will proceed to end of data staging"); request->set_status(DTRStatus::CACHE_PROCESSED); } } else { // Normal workflow is QUERY_REPLICA // Should we always do this? // logic to choose best replica - sort according to configured preference request->get_source()->SortLocations(preferred_pattern, url_map); // Access latency is not known until replica is queried request->get_logger()->msg(Arc::VERBOSE, "Checking source file is present"); request->set_status(DTRStatus::QUERY_REPLICA); } } void Scheduler::ProcessDTRREPLICA_QUERIED(DTR_ptr request){ if(request->error()){ // go to next replica or exit with error request->get_logger()->msg(Arc::ERROR, "Error with source file, moving to next replica"); next_replica(request); return; } if (request->get_source()->CheckSize()) { // Log performance metric with size of DTR timespec dummy; job_perf_log.Log("DTRSize", request->get_short_id()+"\t"+Arc::tostring(request->get_source()->GetSize()), dummy, dummy); } // Check if the replica is mapped if (url_map) { Arc::URL mapped_url(request->get_source()->CurrentLocation()); if (url_map.map(mapped_url)) { if (handle_mapped_source(request, mapped_url)) return; } } if (request->get_mapped_source().empty() && request->get_source()->GetAccessLatency() == Arc::DataPoint::ACCESS_LATENCY_LARGE) { // If the current source location is long latency, try the next replica // TODO add this replica to the end of location list, so that if there // are problems with other replicas, we eventually come back to this one request->get_logger()->msg(Arc::INFO, "Replica %s has long latency, trying next replica", request->get_source()->CurrentLocation().str()); if (request->get_source()->LastLocation()) { request->get_logger()->msg(Arc::INFO, "No more replicas, will use %s", request->get_source()->CurrentLocation().str()); } else { request->get_source()->NextLocation(); request->get_logger()->msg(Arc::VERBOSE, "Checking replica %s", request->get_source()->CurrentLocation().str()); request->set_status(DTRStatus::QUERY_REPLICA); return; } } // Normal workflow is PRE_CLEAN state // Delete destination if requested in URL options and not replication if (!request->is_replication() && (request->get_destination()->GetURL().Option("overwrite") == "yes" || request->get_destination()->CurrentLocation().Option("overwrite") == "yes")) { request->get_logger()->msg(Arc::VERBOSE, "Overwrite requested - will pre-clean destination"); request->set_status(DTRStatus::PRE_CLEAN); } else { request->get_logger()->msg(Arc::VERBOSE, "No overwrite requested or allowed, skipping pre-cleaning"); request->set_status(DTRStatus::PRE_CLEANED); } } void Scheduler::ProcessDTRPRE_CLEANED(DTR_ptr request){ // If an error occurred in pre-cleaning, try to copy anyway if (request->error()) request->get_logger()->msg(Arc::INFO, "Pre-clean failed, will still try to copy"); request->reset_error_status(); if (request->get_source()->IsStageable() || request->get_destination()->IsStageable()) { // Normal workflow is STAGE_PREPARE // Need to set the timeout to prevent from waiting for too long request->set_timeout(3600); // processor will take care of staging source or destination or both request->get_logger()->msg(Arc::VERBOSE, "Source or destination requires staging"); request->set_status(DTRStatus::STAGE_PREPARE); } else { request->get_logger()->msg(Arc::VERBOSE, "No need to stage source or destination, skipping staging"); request->set_status(DTRStatus::STAGED_PREPARED); } } void Scheduler::ProcessDTRSTAGING_PREPARING_WAIT(DTR_ptr request){ // The waiting time should be calculated within DTRList so // by the time we are here we know to query the request again // If there's timeout -- it's error case if(request->get_timeout() < time(NULL)){ // With a special error status we signal to the post-processor // that after releasing request this DTR should go into // QUERY_REPLICA again if necessary // Here we can't tell at which end the timeout was, so make an educated guess if (request->get_source()->IsStageable() && !request->get_destination()->IsStageable()) request->set_error_status(DTRErrorStatus::STAGING_TIMEOUT_ERROR, DTRErrorStatus::ERROR_SOURCE, "Stage request for source file timed out"); else if (!request->get_source()->IsStageable() && request->get_destination()->IsStageable()) request->set_error_status(DTRErrorStatus::STAGING_TIMEOUT_ERROR, DTRErrorStatus::ERROR_DESTINATION, "Stage request for destination file timed out"); else // both endpoints are stageable - don't know the error location request->set_error_status(DTRErrorStatus::STAGING_TIMEOUT_ERROR, DTRErrorStatus::ERROR_UNKNOWN, "Stage request for source or destination file timed out"); // Let the post-processor do the job request->get_logger()->msg(Arc::ERROR, "Staging request timed out, will release request"); request->set_status(DTRStatus::RELEASE_REQUEST); } else { // Normal workflow is STAGE_PREPARE again request->get_logger()->msg(Arc::VERBOSE, "Querying status of staging request"); request->set_status(DTRStatus::STAGE_PREPARE); } } void Scheduler::ProcessDTRSTAGED_PREPARED(DTR_ptr request){ if(request->error()){ // We have to try another replica if the source failed to stage // but first we have to release any requests request->get_logger()->msg(Arc::VERBOSE, "Releasing requests"); request->set_status(DTRStatus::RELEASE_REQUEST); return; } if (url_map && request->get_mapped_source().empty() && request->get_source()->IsStageable()) { // check if any TURLs are mapped std::vector turls = request->get_source()->TransferLocations(); for (std::vector::iterator i = turls.begin(); i != turls.end(); ++i) { Arc::URL mapped_url(i->fullstr()); if (url_map.map(mapped_url)) { if (handle_mapped_source(request, mapped_url)) return; } } } // After normal workflow the DTR is ready for delivery request->get_logger()->msg(Arc::VERBOSE, "DTR is ready for transfer, moving to delivery queue"); // set long timeout for waiting for transfer slot // (setting timeouts for active transfers is done in Delivery) request->set_timeout(7200); request->set_status(DTRStatus::TRANSFER); } void Scheduler::ProcessDTRTRANSFERRED(DTR_ptr request){ // We don't check if error has happened - if it has the post-processor // will take needed steps in RELEASE_REQUEST in any case. The error flag // will work now as a sign to return the DTR to QUERY_REPLICA again. // Delivery will clean up destination physical file on error if (request->error()) request->get_logger()->msg(Arc::ERROR, "Transfer failed: %s", request->get_error_status().GetDesc()); // Resuming normal workflow after the DTR has finished transferring // The next state is RELEASE_REQUEST // if cacheable and no cancellation or error, mark the DTR as CACHE_DOWNLOADED // Might be better to do this in delivery instead if (!request->cancel_requested() && !request->error() && request->get_cache_state() == CACHEABLE) request->set_cache_state(CACHE_DOWNLOADED); if (request->get_source()->IsStageable() || request->get_destination()->IsStageable()) { request->get_logger()->msg(Arc::VERBOSE, "Releasing request(s) made during staging"); request->set_status(DTRStatus::RELEASE_REQUEST); } else { request->get_logger()->msg(Arc::VERBOSE, "Neither source nor destination were staged, skipping releasing requests"); request->set_status(DTRStatus::REQUEST_RELEASED); } } void Scheduler::ProcessDTRREQUEST_RELEASED(DTR_ptr request){ // if the post-processor had troubles releasing the request, continue // normal workflow and the DTR will be cleaned up. If the error // originates from before (like Transfer errors, staging errors) // and is not from destination, we need to query another replica if (request->error() && request->get_error_status().GetLastErrorState() != DTRStatus::RELEASING_REQUEST) { request->get_logger()->msg(Arc::ERROR, "Trying next replica"); next_replica(request); } else if (request->get_destination()->IsIndex()) { // Normal workflow is REGISTER_REPLICA request->get_logger()->msg(Arc::VERBOSE, "Will %s in destination index service", ((request->error() || request->cancel_requested()) ? istring("unregister") : istring("register"))); request->set_status(DTRStatus::REGISTER_REPLICA); } else { request->get_logger()->msg(Arc::VERBOSE, "Destination is not index service, skipping replica registration"); request->set_status(DTRStatus::REPLICA_REGISTERED); } } void Scheduler::ProcessDTRREPLICA_REGISTERED(DTR_ptr request){ // If there was a problem registering the destination file, // using a different source replica won't help, so pass to final step // (remote destinations can't be cached). The post-processor should have // taken care of deleting the physical file. If the error originates from // before, follow normal workflow and processor will clean up if(request->error() && request->get_error_status().GetLastErrorState() == DTRStatus::REGISTERING_REPLICA) { request->get_logger()->msg(Arc::ERROR, "Error registering replica, moving to end of data staging"); request->set_status(DTRStatus::CACHE_PROCESSED); } else if (!(request->get_cache_parameters().cache_dirs.empty() && request->get_cache_parameters().readonly_cache_dirs.empty()) && (request->get_cache_state() == CACHE_ALREADY_PRESENT || request->get_cache_state() == CACHE_DOWNLOADED || request->get_cache_state() == CACHEABLE || request->get_cache_state() == CACHE_NOT_USED)) { // Normal workflow is PROCESS_CACHE request->get_logger()->msg(Arc::VERBOSE, "Will process cache"); request->set_status(DTRStatus::PROCESS_CACHE); } else { // not a cacheable file request->get_logger()->msg(Arc::VERBOSE, "File is not cacheable, skipping cache processing"); request->set_status(DTRStatus::CACHE_PROCESSED); } } void Scheduler::ProcessDTRCACHE_PROCESSED(DTR_ptr request){ // Final stage within scheduler. Retries are initiated from here if necessary, // otherwise report success or failure to generator // First remove from caching list DtrList.caching_finished(request); if (request->cancel_requested()) { // Cancellation steps finished request->get_logger()->msg(Arc::VERBOSE, "Cancellation complete"); request->set_status(DTRStatus::CANCELLED); } else if(request->error()) { // If the error occurred in cache processing we send back // to REPLICA_QUERIED to try the same replica again without cache, // or to CACHE_CHECKED if the file was already in cache, or to NEW // to try again if there was a locking problem during link. If there // was a cache timeout we also go back to CACHE_CHECKED. If in // another place we are finished and report error to generator if (request->get_error_status().GetLastErrorState() == DTRStatus::PROCESSING_CACHE) { if (request->get_cache_state() == CACHE_LOCKED) { // set a flat wait time of 10s Arc::Period cache_wait_period(10); request->get_logger()->msg(Arc::INFO, "Will wait 10s"); request->set_process_time(cache_wait_period); request->set_cache_state(CACHEABLE); request->set_status(DTRStatus::NEW); } else { request->get_logger()->msg(Arc::ERROR, "Error in cache processing, will retry without caching"); if (request->get_cache_state() == CACHE_ALREADY_PRESENT) request->set_status(DTRStatus::CACHE_CHECKED); else request->set_status(DTRStatus::REPLICA_QUERIED); request->set_cache_state(CACHE_SKIP); } request->reset_error_status(); return; } else if (request->get_error_status().GetLastErrorState() == DTRStatus::CACHE_WAIT) { request->get_logger()->msg(Arc::ERROR, "Will retry without caching"); request->set_cache_state(CACHE_SKIP); request->reset_error_status(); request->set_status(DTRStatus::CACHE_CHECKED); return; } else { request->decrease_tries_left(); // Here we decide to retry based on whether the error is // temporary or not and the configured retry strategy if (request->get_error_status().GetErrorStatus() == DTRErrorStatus::TEMPORARY_REMOTE_ERROR || request->get_error_status().GetErrorStatus() == DTRErrorStatus::TRANSFER_SPEED_ERROR || request->get_error_status().GetErrorStatus() == DTRErrorStatus::INTERNAL_PROCESS_ERROR) { if (request->get_tries_left() > 0) { // Check if credentials are ok if (request->get_source()->RequiresCredentials() || request->get_destination()->RequiresCredentials()) { Arc::Time exp_time = request->get_credential_info().getExpiryTime(); if (exp_time < Arc::Time()) { request->get_logger()->msg(Arc::WARNING, "Proxy has expired"); // Append this information to the error string DTRErrorStatus status = request->get_error_status(); request->set_error_status(status.GetErrorStatus(), status.GetErrorLocation(), status.GetDesc()+" (Proxy expired)"); request->set_status(DTRStatus::ERROR); return; } } // exponential back off - 10s, 40s, 90s, ... request->set_process_time(10*(request->get_initial_tries()-request->get_tries_left())* (request->get_initial_tries()-request->get_tries_left())); request->get_logger()->msg(Arc::INFO, "%i retries left, will wait until %s before next attempt", request->get_tries_left(), request->get_process_time().str()); // set state depending on where the error occurred if (request->get_error_status().GetLastErrorState() == DTRStatus::REGISTERING_REPLICA) { request->set_status(DTRStatus::REGISTER_REPLICA); } else if (request->get_error_status().GetLastErrorState() == DTRStatus::RELEASING_REQUEST) { request->set_status(DTRStatus::RELEASE_REQUEST); } else { // If error happened before or during transfer set back to NEW // Reset DTR information set during this transfer request->reset(); request->set_status(DTRStatus::NEW); } return; } else request->get_logger()->msg(Arc::ERROR, "Out of retries"); } request->get_logger()->msg(Arc::ERROR, "Permanent failure"); request->set_status(DTRStatus::ERROR); } } else { // Normal workflow is completed for this DTR successfully request->get_logger()->msg(Arc::INFO, "Finished successfully"); request->set_status(DTRStatus::DONE); } } void Scheduler::ProcessDTRFINAL_STATE(DTR_ptr request){ // This is the only place where the DTR is returned to the generator // and deleted from the global list // Return to the generator request->get_logger()->msg(Arc::INFO, "Returning to generator"); DTR::push(request, GENERATOR); // Delete from the global list DtrList.delete_dtr(request); } void Scheduler::map_state_and_process(DTR_ptr request){ // For cancelled DTRs set the appropriate post-processor state if(request->cancel_requested()) map_cancel_state(request); // Loop until the DTR is sent somewhere for some action to be done // This is more efficient because many DTRs will skip some states and // we don't want to have to wait for the full list to be processed before // advancing to the next state Arc::Time now; while((request->came_from_pre_processor() || request->came_from_delivery() || request->came_from_post_processor() || request->came_from_generator()) && request->get_process_time() <= now) { switch (request->get_status().GetStatus()) { case DTRStatus::NEW: ProcessDTRNEW(request); continue; case DTRStatus::CACHE_WAIT: ProcessDTRCACHE_WAIT(request); continue; case DTRStatus::CACHE_CHECKED: ProcessDTRCACHE_CHECKED(request); continue; case DTRStatus::RESOLVED: ProcessDTRRESOLVED(request); continue; case DTRStatus::REPLICA_QUERIED: ProcessDTRREPLICA_QUERIED(request); continue; case DTRStatus::PRE_CLEANED: ProcessDTRPRE_CLEANED(request); continue; case DTRStatus::STAGING_PREPARING_WAIT: ProcessDTRSTAGING_PREPARING_WAIT(request); continue; case DTRStatus::STAGED_PREPARED: ProcessDTRSTAGED_PREPARED(request); continue; case DTRStatus::TRANSFERRED: ProcessDTRTRANSFERRED(request); continue; case DTRStatus::REQUEST_RELEASED: ProcessDTRREQUEST_RELEASED(request); continue; case DTRStatus::REPLICA_REGISTERED: ProcessDTRREPLICA_REGISTERED(request); continue; case DTRStatus::CACHE_PROCESSED: ProcessDTRCACHE_PROCESSED(request); continue; default: break; //DoNothing } } } void Scheduler::map_cancel_state(DTR_ptr request){ switch (request->get_status().GetStatus()) { case DTRStatus::NEW: case DTRStatus::CHECK_CACHE: case DTRStatus::CACHE_WAIT: { // Nothing has yet been done to require cleanup or additional // activities. Return to the generator via CACHE_PROCESSED. request->set_status(DTRStatus::CACHE_PROCESSED); } break; case DTRStatus::CACHE_CHECKED: case DTRStatus::RESOLVE: { // The cache may have been started, so set to // REPLICA_REIGSTERED to allow post-processor to clean up cache request->set_status(DTRStatus::REPLICA_REGISTERED); } break; case DTRStatus::RESOLVED: case DTRStatus::QUERY_REPLICA: case DTRStatus::REPLICA_QUERIED: case DTRStatus::PRE_CLEAN: case DTRStatus::PRE_CLEANED: case DTRStatus::STAGE_PREPARE: { // At this stage we may have registered a file in an // index service so set to REQUEST_RELEASED to allow // the post-processor to clean it up request->set_status(DTRStatus::REQUEST_RELEASED); } break; case DTRStatus::STAGING_PREPARING_WAIT: case DTRStatus::STAGED_PREPARED: case DTRStatus::TRANSFER: { // At this stage we in addition to cache work // may already have pending requests. // The post-processor should take care of it too request->set_status(DTRStatus::TRANSFERRED); } break; case DTRStatus::TRANSFERRED: case DTRStatus::RELEASE_REQUEST: case DTRStatus::REQUEST_RELEASED: case DTRStatus::REGISTER_REPLICA: case DTRStatus::REPLICA_REGISTERED: case DTRStatus::PROCESS_CACHE: case DTRStatus::CACHE_PROCESSED: { // post-processing states // If the request was cancelled during the transfer, the delivery // should have cleaned up the destination file. If after the // transfer we have to decide whether to clean up or not. /* delete_destination_file() */ // No other action required here, just let the normal workflow // resume and the post-processor will take care of clean up } break; default: break; //Do Nothing } } void Scheduler::map_stuck_state(DTR_ptr request) { switch (request->get_status().GetStatus()) { case DTRStatus::CHECKING_CACHE: { // The cache may have been started, so set to // REPLICA_REIGSTERED to allow post-processor to clean up cache request->set_status(DTRStatus::REPLICA_REGISTERED); } break; case DTRStatus::RESOLVING: case DTRStatus::QUERYING_REPLICA: case DTRStatus::PRE_CLEANING: { // At this stage we may have registered a file in an // index service so set to REQUEST_RELEASED to allow // the post-processor to clean it up request->set_status(DTRStatus::REQUEST_RELEASED); } break; case DTRStatus::STAGING_PREPARING: { // At this stage we in addition to cache work // may already have pending requests. // The post-processor should take care of it too request->set_status(DTRStatus::TRANSFERRED); } break; // For post-processor states simply move on to next state case DTRStatus::RELEASING_REQUEST: { request->set_status(DTRStatus::REQUEST_RELEASED); } break; case DTRStatus::REGISTERING_REPLICA: { request->set_status(DTRStatus::REPLICA_REGISTERED); } break; case DTRStatus::PROCESSING_CACHE: { request->set_status(DTRStatus::CACHE_PROCESSED); } break; default: break; // Unexpected state - do nothing } } void Scheduler::add_event(DTR_ptr event) { event_lock.lock(); events.push_back(event); event_lock.unlock(); } void Scheduler::choose_delivery_service(DTR_ptr request) { if (configured_delivery_services.empty()) return; // Only local is configured if (configured_delivery_services.size() == 1 && configured_delivery_services.front() == DTR::LOCAL_DELIVERY) return; // Check for size limit under which local should be used if (remote_size_limit > 0 && request->get_source()->CheckSize() && request->get_source()->GetSize() < remote_size_limit) { request->get_logger()->msg(Arc::INFO, "File is smaller than %llu bytes, will use local delivery", remote_size_limit); request->set_delivery_endpoint(DTR::LOCAL_DELIVERY); return; } // Remember current endpoint Arc::URL delivery_endpoint(request->get_delivery_endpoint()); // Check delivery services when the first DTR is processed, and every 5 // minutes after that. The ones that work are the only ones that will be // used until the next check. // This method assumes that the DTR has permission on all services, // which may not be true if DN filtering is used on those services. if (usable_delivery_services.empty() || Arc::Time() - delivery_last_checked > 300) { delivery_last_checked = Arc::Time(); usable_delivery_services.clear(); for (std::vector::iterator service = configured_delivery_services.begin(); service != configured_delivery_services.end(); ++service) { request->set_delivery_endpoint(*service); std::vector allowed_dirs; std::string load_avg; if (!DataDeliveryComm::CheckComm(request, allowed_dirs, load_avg)) { log_to_root_logger(Arc::WARNING, "Error with delivery service at " + request->get_delivery_endpoint().str() + " - This service will not be used"); } else { usable_delivery_services[*service] = allowed_dirs; // This is not a timing measurement so use dummy timestamps timespec dummy; job_perf_log.Log("DTR_load_" + service->Host(), load_avg, dummy, dummy); } } request->set_delivery_endpoint(delivery_endpoint); if (usable_delivery_services.empty()) { log_to_root_logger(Arc::ERROR, "No usable delivery services found, will use local delivery"); return; } } // Make a list of the delivery services that this DTR can use std::vector possible_delivery_services; bool can_use_local = false; for (std::map >::iterator service = usable_delivery_services.begin(); service != usable_delivery_services.end(); ++service) { if (service->first == DTR::LOCAL_DELIVERY) can_use_local = true; for (std::vector::iterator dir = service->second.begin(); dir != service->second.end(); ++dir) { if (request->get_destination()->Local()) { // check for caching std::string dest = request->get_destination()->TransferLocations()[0].Path(); if ((request->get_cache_state() == CACHEABLE) && !request->get_cache_file().empty()) dest = request->get_cache_file(); if (dest.find(*dir) == 0) { request->get_logger()->msg(Arc::DEBUG, "Delivery service at %s can copy to %s", service->first.str(), *dir); possible_delivery_services.push_back(service->first); break; } } else if (request->get_source()->Local()) { if (request->get_source()->TransferLocations()[0].Path().find(*dir) == 0) { request->get_logger()->msg(Arc::DEBUG, "Delivery service at %s can copy from %s", service->first.str(), *dir); possible_delivery_services.push_back(service->first); break; } } else { // copy between two remote endpoints so any service is ok possible_delivery_services.push_back(service->first); break; } } } if (possible_delivery_services.empty()) { request->get_logger()->msg(Arc::WARNING, "Could not find any useable delivery service," " forcing local transfer"); request->set_delivery_endpoint(DTR::LOCAL_DELIVERY); return; } // only local if (possible_delivery_services.size() == 1 && can_use_local) { request->set_delivery_endpoint(DTR::LOCAL_DELIVERY); return; } // Exclude full services with transfers greater than slots/no services for (std::vector::iterator possible = possible_delivery_services.begin(); possible != possible_delivery_services.end();) { if (delivery_hosts[possible->Host()] > (int)(DeliverySlots/configured_delivery_services.size())) { request->get_logger()->msg(Arc::DEBUG, "Not using delivery service at %s because it is full", possible->str()); possible = possible_delivery_services.erase(possible); } else { ++possible; } } // If none left then we should not use local but wait if (possible_delivery_services.empty()) { request->set_delivery_endpoint(Arc::URL()); return; } // First try, use any service if (request->get_tries_left() == request->get_initial_tries()) { delivery_endpoint = possible_delivery_services.at(rand() % possible_delivery_services.size()); request->set_delivery_endpoint(delivery_endpoint); return; } // Retry, try not to use a previous problematic service. If all are // problematic then default to local (even if not configured) for (std::vector::iterator possible = possible_delivery_services.begin(); possible != possible_delivery_services.end();) { std::vector::const_iterator problem = request->get_problematic_delivery_services().begin(); while (problem != request->get_problematic_delivery_services().end()) { if (*possible == *problem) { request->get_logger()->msg(Arc::VERBOSE, "Not using delivery service %s due to previous failure", problem->str()); possible = possible_delivery_services.erase(possible); break; } ++problem; } if (problem == request->get_problematic_delivery_services().end()) ++possible; } if (possible_delivery_services.empty()) { // force local if (!can_use_local) request->get_logger()->msg(Arc::WARNING, "No remote delivery services " "are useable, forcing local delivery"); request->set_delivery_endpoint(DTR::LOCAL_DELIVERY); } else { // Find a random service different from the previous one, looping a // limited number of times in case all delivery services are the same url Arc::URL ep(possible_delivery_services.at(rand() % possible_delivery_services.size())); for (unsigned int i = 0; ep == delivery_endpoint && i < possible_delivery_services.size() * 10; ++i) { ep = possible_delivery_services.at(rand() % possible_delivery_services.size()); } request->set_delivery_endpoint(ep); } } void Scheduler::process_events(void){ Arc::Time now; event_lock.lock(); for (std::list::iterator event = events.begin(); event != events.end();) { DTR_ptr tmp = *event; event_lock.unlock(); if (tmp->get_process_time() <= now) { map_state_and_process(tmp); // If final state, the DTR is returned to the generator and deleted if (tmp->is_in_final_state()) { ProcessDTRFINAL_STATE(tmp); event_lock.lock(); event = events.erase(event); continue; } // If the event was sent on to a queue, erase it from the list if (tmp->is_destined_for_pre_processor() || tmp->is_destined_for_delivery() || tmp->is_destined_for_post_processor()) { event_lock.lock(); event = events.erase(event); continue; } } event_lock.lock(); ++event; } event_lock.unlock(); } void Scheduler::revise_queues() { // The DTRs ready to go into a processing state std::map > DTRQueueStates; DtrList.filter_dtrs_by_statuses(DTRStatus::ToProcessStates, DTRQueueStates); // The active DTRs currently in processing states std::map > DTRRunningStates; DtrList.filter_dtrs_by_statuses(DTRStatus::ProcessingStates, DTRRunningStates); // Get the number of current transfers for each delivery service for // enforcing limits per server delivery_hosts.clear(); for (std::list::const_iterator i = DTRRunningStates[DTRStatus::TRANSFERRING].begin(); i != DTRRunningStates[DTRStatus::TRANSFERRING].end(); i++) { delivery_hosts[(*i)->get_delivery_endpoint().Host()]++; } // Check for any requested changes in priority DtrList.check_priority_changes(std::string(dumplocation + ".prio")); // Get all the DTRs in a staged state staged_queue.clear(); std::list staged_queue_list; DtrList.filter_dtrs_by_statuses(DTRStatus::StagedStates, staged_queue_list); // filter out stageable DTRs per transfer share, putting the highest // priority at the front for (std::list::iterator i = staged_queue_list.begin(); i != staged_queue_list.end(); ++i) { if ((*i)->get_source()->IsStageable() || (*i)->get_destination()->IsStageable()) { std::list& queue = staged_queue[(*i)->get_transfer_share()]; if (!queue.empty() && (*i)->get_priority() > queue.front()->get_priority()) { queue.push_front(*i); } else { queue.push_back(*i); } } } Arc::Time now; // Go through "to process" states, work out shares and push DTRs for (unsigned int i = 0; i < DTRStatus::ToProcessStates.size(); ++i) { std::list DTRQueue = DTRQueueStates[DTRStatus::ToProcessStates.at(i)]; std::list ActiveDTRs = DTRRunningStates[DTRStatus::ProcessingStates.at(i)]; if (DTRQueue.empty() && ActiveDTRs.empty()) continue; // Map of job id to list of DTRs, used for grouping bulk requests std::map > bulk_requests; // Transfer shares for this queue TransferShares transferShares(transferSharesConf); // Sort the DTR queue according to the priorities the DTRs have. // Highest priority will be at the beginning of the list. DTRQueue.sort(dtr_sort_predicate); int highest_priority = 0; // First go over the queue and check for cancellation and timeout for (std::list::iterator dtr = DTRQueue.begin(); dtr != DTRQueue.end();) { DTR_ptr tmp = *dtr; if (dtr == DTRQueue.begin()) highest_priority = tmp->get_priority(); // There's no check for cancellation requests for the post-processor. // Most DTRs with cancellation requests will go to the post-processor // for cleanups, hold releases, etc., so the cancellation requests // don't break normal workflow in the post-processor (as opposed // to any other process), but instead act just as a sign that the // post-processor should do additional cleanup activities. if (tmp->is_destined_for_pre_processor() || tmp->is_destined_for_delivery()) { // The cancellation requests break the normal workflow. A cancelled // request will either go back to generator or be put into a // post-processor state for clean up. if (tmp->cancel_requested()) { map_cancel_state(tmp); add_event(tmp); dtr = DTRQueue.erase(dtr); continue; } } // To avoid the situation where DTRs get blocked due to higher // priority DTRs, DTRs that have passed their timeout should have their // priority boosted. But this should only happen if there are higher // priority DTRs, since there could be a large queue of low priority DTRs // which, after having their priority boosted, would then block new // high priority requests. // The simple solution here is to increase priority by 1 every 5 minutes. // There is plenty of scope for more intelligent solutions. // TODO reset priority back to original value once past this stage. if (tmp->get_timeout() < now && tmp->get_priority() < highest_priority) { tmp->set_priority(tmp->get_priority() + 1); tmp->set_timeout(300); } // STAGE_PREPARE is a special case where we have to apply a limit to // avoid preparing too many files and then pins expire while in the // transfer queue. In future it may be better to limit per remote host. // For now count DTRs staging and transferring in this share and apply // limit. In order not to block the highest priority DTRs here we allow // them to bypass the limit. if (DTRStatus::ToProcessStates.at(i) == DTRStatus::STAGE_PREPARE) { if (staged_queue[tmp->get_transfer_share()].size() < StagedPreparedSlots || staged_queue[tmp->get_transfer_share()].front()->get_priority() < tmp->get_priority() ) { // Reset timeout tmp->set_timeout(3600); // add to the staging queue and sort to put highest priority first staged_queue[tmp->get_transfer_share()].push_front(tmp); staged_queue[tmp->get_transfer_share()].sort(dtr_sort_predicate); } else { // Past limit - this DTR cannot be processed this time so erase from queue dtr = DTRQueue.erase(dtr); continue; } } // check if bulk operation is possible for this DTR. To keep it simple // there is only one bulk request per job per revise_queues loop if (tmp->bulk_possible()) { std::string jobid(tmp->get_parent_job_id()); if (bulk_requests.find(jobid) == bulk_requests.end()) { std::set bulk_list; bulk_list.insert(tmp); bulk_requests[jobid] = bulk_list; } else { DTR_ptr first_bulk = *bulk_requests[jobid].begin(); // Only source bulk operations supported at the moment and limit to 100 if (bulk_requests[jobid].size() < 100 && first_bulk->get_source()->GetURL().Protocol() == tmp->get_source()->GetURL().Protocol() && first_bulk->get_source()->GetURL().Host() == tmp->get_source()->GetURL().Host() && first_bulk->get_source()->CurrentLocation().Protocol() == tmp->get_source()->CurrentLocation().Protocol() && first_bulk->get_source()->CurrentLocation().Host() == tmp->get_source()->CurrentLocation().Host() && // This is because we cannot have a mix of LFNs and GUIDs when querying a catalog like LFC first_bulk->get_source()->GetURL().MetaDataOption("guid").length() == tmp->get_source()->GetURL().MetaDataOption("guid").length()) { bulk_requests[jobid].insert(tmp); } } } transferShares.increase_transfer_share(tmp->get_transfer_share()); ++dtr; } // Go over the active DTRs and add to transfer share for (std::list::iterator dtr = ActiveDTRs.begin(); dtr != ActiveDTRs.end();) { DTR_ptr tmp = *dtr; if (tmp->get_status() == DTRStatus::TRANSFERRING) { // If the DTR is in Delivery, check for cancellation. The pre- and // post-processor DTRs don't get cancelled here but are allowed to // continue processing. if ( tmp->cancel_requested()) { tmp->get_logger()->msg(Arc::INFO, "Cancelling active transfer"); delivery.cancelDTR(tmp); dtr = ActiveDTRs.erase(dtr); continue; } } else if (tmp->get_modification_time() + 3600 < now) { // Stuck in processing thread for more than one hour - assume a hang // and try to recover and retry. It is potentially dangerous if a // stuck thread wakes up. tmp->get_logger()->msg(Arc::WARNING, "Processing thread timed out. Restarting DTR"); tmp->set_error_status(DTRErrorStatus::INTERNAL_PROCESS_ERROR, DTRErrorStatus::NO_ERROR_LOCATION, "Processor thread timed out"); map_stuck_state(tmp); add_event(tmp); ++dtr; continue; } transferShares.increase_transfer_share((*dtr)->get_transfer_share()); ++dtr; } // If the queue is empty we can go straight to the next state if (DTRQueue.empty()) continue; // Slot limit for this state unsigned int slot_limit = DeliverySlots; if (DTRQueue.front()->is_destined_for_pre_processor()) slot_limit = PreProcessorSlots; else if (DTRQueue.front()->is_destined_for_post_processor()) slot_limit = PostProcessorSlots; // Calculate the slots available for each active share transferShares.calculate_shares(slot_limit); // Shares which have at least one DTR active and running. // Shares can only use emergency slots if they are not in this list. std::set active_shares; unsigned int running = ActiveDTRs.size(); // Go over the active DTRs again and decrease slots in corresponding shares for (std::list::iterator dtr = ActiveDTRs.begin(); dtr != ActiveDTRs.end(); ++dtr) { transferShares.decrease_number_of_slots((*dtr)->get_transfer_share()); active_shares.insert((*dtr)->get_transfer_share()); } // Now at the beginning of the queue we have DTRs that should be // launched first. Launch them, but with respect to the transfer shares. for (std::list::iterator dtr = DTRQueue.begin(); dtr != DTRQueue.end(); ++dtr) { DTR_ptr tmp = *dtr; // Check if there are any shares left in the queue which might need // an emergency share - if not we are done if (running >= slot_limit && transferShares.active_shares().size() == active_shares.size()) break; // Check if this DTR is still in a queue state (was not sent already // in a bulk operation) if (tmp->get_status() != DTRStatus::ToProcessStates.at(i)) continue; // Are there slots left for this share? bool can_start = transferShares.can_start(tmp->get_transfer_share()); // Check if it is possible to use an emergency share if (running >= slot_limit && active_shares.find(tmp->get_transfer_share()) != active_shares.end()) { can_start = false; } if (can_start) { transferShares.decrease_number_of_slots(tmp->get_transfer_share()); // Send to processor/delivery if (tmp->is_destined_for_pre_processor()) { // Check for bulk if (tmp->bulk_possible()) { std::set bulk_set(bulk_requests[tmp->get_parent_job_id()]); if (bulk_set.size() > 1 && bulk_set.find(tmp) != bulk_set.end()) { tmp->get_logger()->msg(Arc::INFO, "Will use bulk request"); unsigned int dtr_no = 0; for (std::set::iterator i = bulk_set.begin(); i != bulk_set.end(); ++i) { if (dtr_no == 0) (*i)->set_bulk_start(true); if (dtr_no == bulk_set.size() - 1) (*i)->set_bulk_end(true); DTR::push(*i, PRE_PROCESSOR); ++dtr_no; } } else { DTR::push(tmp, PRE_PROCESSOR); } } else { DTR::push(tmp, PRE_PROCESSOR); } } else if (tmp->is_destined_for_post_processor()) DTR::push(tmp, POST_PROCESSOR); else if (tmp->is_destined_for_delivery()) { choose_delivery_service(tmp); if (!tmp->get_delivery_endpoint()) { // With a large queue waiting for delivery and different dirs per // delivery service this could slow things down as it could go // through every DTR in the queue tmp->get_logger()->msg(Arc::DEBUG, "No delivery endpoints available, will try later"); continue; } DTR::push(tmp, DELIVERY); delivery_hosts[tmp->get_delivery_endpoint().Host()]++; } ++running; active_shares.insert(tmp->get_transfer_share()); } // Hard limit with all emergency slots used if (running == slot_limit + EmergencySlots) break; } } } void Scheduler::receiveDTR(DTR_ptr request){ if (!request) { logger.msg(Arc::ERROR, "Scheduler received NULL DTR"); return; } if (request->get_status() != DTRStatus::NEW) { add_event(request); return; } // New DTR - first check it is valid if (!(*request)) { logger.msg(Arc::ERROR, "Scheduler received invalid DTR"); request->set_status(DTRStatus::ERROR); DTR::push(request, GENERATOR); return; } request->registerCallback(&processor,PRE_PROCESSOR); request->registerCallback(&processor,POST_PROCESSOR); request->registerCallback(&delivery,DELIVERY); /* Shares part*/ // First, get the transfer share this dtr should belong to std::string DtrTransferShare = transferSharesConf.extract_share_info(request); // If no share information could be obtained, use default share if (DtrTransferShare.empty()) DtrTransferShare = "_default"; // If this share is a reference share, we have to add the sub-share // to the reference list bool in_reference = transferSharesConf.is_configured(DtrTransferShare); int priority = transferSharesConf.get_basic_priority(DtrTransferShare); request->set_transfer_share(DtrTransferShare); DtrTransferShare = request->get_transfer_share(); // Now the sub-share is added to DtrTransferShare, add it to reference // shares if appropriate and update each TransferShare if (in_reference && !transferSharesConf.is_configured(DtrTransferShare)) { transferSharesConf.set_reference_share(DtrTransferShare, priority); } // Compute the priority this DTR receives - this is the priority of the // share adjusted by the priority of the parent job request->set_priority(int(transferSharesConf.get_basic_priority(DtrTransferShare) * request->get_priority() * 0.01)); /* Shares part ends*/ DtrList.add_dtr(request); add_event(request); } bool Scheduler::cancelDTRs(const std::string& jobid) { cancelled_jobs_lock.lock(); cancelled_jobs.push_back(jobid); cancelled_jobs_lock.unlock(); return true; } void Scheduler::dump_thread(void* arg) { Scheduler* sched = (Scheduler*)arg; while (sched->scheduler_state == RUNNING && !sched->dumplocation.empty()) { // every second, dump state sched->DtrList.dumpState(sched->dumplocation); // Performance metric - total number of DTRs in the system timespec dummy; sched->job_perf_log.Log("DTR_total", Arc::tostring(sched->DtrList.size()), dummy, dummy); if (sched->dump_signal.wait(1000)) break; // notified by signal() } } bool Scheduler::stop() { state_lock.lock(); if(scheduler_state != RUNNING) { state_lock.unlock(); return false; } // cancel all jobs std::list alljobs = DtrList.all_jobs(); cancelled_jobs_lock.lock(); for (std::list::iterator job = alljobs.begin(); job != alljobs.end(); ++job) cancelled_jobs.push_back(*job); cancelled_jobs_lock.unlock(); // signal main loop to stop and wait for completion of all DTRs scheduler_state = TO_STOP; run_signal.wait(); scheduler_state = STOPPED; state_lock.unlock(); return true; } void Scheduler::main_thread (void* arg) { Scheduler* it = (Scheduler*)arg; it->main_thread(); } void Scheduler::main_thread (void) { logger.msg(Arc::INFO, "Scheduler starting up"); logger.msg(Arc::INFO, "Scheduler configuration:"); logger.msg(Arc::INFO, " Pre-processor slots: %u", PreProcessorSlots); logger.msg(Arc::INFO, " Delivery slots: %u", DeliverySlots); logger.msg(Arc::INFO, " Post-processor slots: %u", PostProcessorSlots); logger.msg(Arc::INFO, " Emergency slots: %u", EmergencySlots); logger.msg(Arc::INFO, " Prepared slots: %u", StagedPreparedSlots); logger.msg(Arc::INFO, " Shares configuration:\n%s", transferSharesConf.conf()); for (std::vector::iterator i = configured_delivery_services.begin(); i != configured_delivery_services.end(); ++i) { if (*i == DTR::LOCAL_DELIVERY) logger.msg(Arc::INFO, " Delivery service: LOCAL"); else logger.msg(Arc::INFO, " Delivery service: %s", i->str()); } // Start thread dumping DTR state if (!Arc::CreateThreadFunction(&dump_thread, this)) logger.msg(Arc::ERROR, "Failed to create DTR dump thread"); // Disconnect from root logger so that messages are logged to per-DTR Logger Arc::Logger::getRootLogger().setThreadContext(); root_destinations = Arc::Logger::getRootLogger().getDestinations(); Arc::Logger::getRootLogger().removeDestinations(); Arc::Logger::getRootLogger().setThreshold(DTR::LOG_LEVEL); while(scheduler_state != TO_STOP || !DtrList.empty()) { // first check for cancelled jobs cancelled_jobs_lock.lock(); std::list::iterator jobid = cancelled_jobs.begin(); for (;jobid != cancelled_jobs.end();) { std::list requests; DtrList.filter_dtrs_by_job(*jobid, requests); for (std::list::iterator dtr = requests.begin(); dtr != requests.end(); ++dtr) { (*dtr)->set_cancel_request(); (*dtr)->get_logger()->msg(Arc::INFO, "DTR %s cancelled", (*dtr)->get_id()); } jobid = cancelled_jobs.erase(jobid); } cancelled_jobs_lock.unlock(); // Dealing with pending events, i.e. DTRs from another processes process_events(); // Revise all the internal queues and take actions revise_queues(); Glib::usleep(50000); } // make sure final state is dumped before exit dump_signal.signal(); if (!dumplocation.empty()) DtrList.dumpState(dumplocation); log_to_root_logger(Arc::INFO, "Scheduler loop exited"); run_signal.signal(); } } // namespace DataStaging nordugrid-arc-6.14.0/src/libs/data-staging/PaxHeaders.30264/README0000644000000000000000000000013214152153376022307 xustar000000000000000030 mtime=1638455038.404645959 30 atime=1638455038.499647387 30 ctime=1638455098.849554172 nordugrid-arc-6.14.0/src/libs/data-staging/README0000644000175000002070000000015414152153376022274 0ustar00mockbuildmock00000000000000ARC data staging implementation. This code provides an advanced mechanism for data transfer and scheduling. nordugrid-arc-6.14.0/src/libs/PaxHeaders.30264/README0000644000000000000000000000013214152153376017744 xustar000000000000000030 mtime=1638455038.402645929 30 atime=1638455038.498647372 30 ctime=1638455098.817553691 nordugrid-arc-6.14.0/src/libs/README0000644000175000002070000000010514152153376017725 0ustar00mockbuildmock00000000000000ARC libraries. Libraries related to HED can be found in src/hed/libs.nordugrid-arc-6.14.0/src/PaxHeaders.30264/services0000644000000000000000000000013214152153474017700 xustar000000000000000030 mtime=1638455100.816583727 30 atime=1638455103.996631509 30 ctime=1638455100.816583727 nordugrid-arc-6.14.0/src/services/0000755000175000002070000000000014152153474017742 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/PaxHeaders.30264/acix0000644000000000000000000000013214152153474020624 xustar000000000000000030 mtime=1638455100.851584253 30 atime=1638455103.996631509 30 ctime=1638455100.851584253 nordugrid-arc-6.14.0/src/services/acix/0000755000175000002070000000000014152153474020666 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/acix/PaxHeaders.30264/Makefile.am0000644000000000000000000000013114152153376022735 xustar000000000000000029 mtime=1638455038.43064635 30 atime=1638455038.510647552 30 ctime=1638455100.835584013 nordugrid-arc-6.14.0/src/services/acix/Makefile.am0000644000175000002070000000160314152153376022723 0ustar00mockbuildmock00000000000000 if PYTHON_ENABLED pythondir = $(PYTHON_SITE_LIB)/acix nobase_python_PYTHON = __init__.py \ core/__init__.py \ core/bitvector.py \ core/bloomfilter.py \ core/cacheclient.py \ core/hashes.py \ core/indexclient.py \ core/ssl.py \ scanner/__init__.py \ scanner/cache.py \ scanner/cacheresource.py \ scanner/cachesetup.py \ scanner/pscan.py \ indexserver/__init__.py \ indexserver/index.py \ indexserver/indexresource.py \ indexserver/indexsetup.py SUBDIRS = core scanner indexserver endif DIST_SUBDIRS = core scanner indexserver nordugrid-arc-6.14.0/src/services/acix/PaxHeaders.30264/scanner0000644000000000000000000000013014152153474022253 xustar000000000000000029 mtime=1638455100.91258517 30 atime=1638455103.996631509 29 ctime=1638455100.91258517 nordugrid-arc-6.14.0/src/services/acix/scanner/0000755000175000002070000000000014152153474022317 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/acix/scanner/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376024367 xustar000000000000000030 mtime=1638455038.431646365 30 atime=1638455038.510647552 30 ctime=1638455100.907585095 nordugrid-arc-6.14.0/src/services/acix/scanner/Makefile.am0000644000175000002070000000053614152153376024360 0ustar00mockbuildmock00000000000000if SYSV_SCRIPTS_ENABLED CACHESERVER_SCRIPT = arc-acix-scanner else CACHESERVER_SCRIPT = endif initd_SCRIPTS = $(CACHESERVER_SCRIPT) if SYSTEMD_UNITS_ENABLED CACHESERVER_UNIT = arc-acix-scanner.service else CACHESERVER_UNIT = endif units_DATA = $(CACHESERVER_UNIT) pkgdata_SCRIPTS = arc-acix-scanner-start SUBDIRS = $(TEST_DIR) dist_SUBDIRS = test nordugrid-arc-6.14.0/src/services/acix/scanner/PaxHeaders.30264/arc-acix-scanner-start.in0000644000000000000000000000013214152153376027134 xustar000000000000000030 mtime=1638455038.431646365 30 atime=1638455038.510647552 30 ctime=1638455100.909585125 nordugrid-arc-6.14.0/src/services/acix/scanner/arc-acix-scanner-start.in0000644000175000002070000000377314152153376027133 0ustar00mockbuildmock00000000000000#!/bin/sh PIDFILE=/run/arc-acix-scanner.pid DEFAULT_LOGFILE=/var/log/arc/arc-acix-scanner.log prog=@TWISTD@ RUN=yes send_systemd_notify() { # return if no systemd-notify found type systemd-notify >/dev/null 2>&1 || return systemd-notify "$@" } log_failure_msg() { send_systemd_notify --status "Error: $@" echo $@ } # sysconfig files if [ -r /etc/sysconfig/nordugrid ]; then . /etc/sysconfig/nordugrid elif [ -r /etc/default/nordugrid ]; then . /etc/default/nordugrid fi if [ -r /etc/sysconfig/arc-acix-scanner ]; then . /etc/sysconfig/arc-acix-scanner elif [ -r /etc/default/arc-acix-scanner ]; then . /etc/default/arc-acix-scanner fi if [ "$RUN" != "yes" ] ; then echo "arc-acix-scanner service is disabled, please adjust the configuration to your" echo "needs and then set RUN to 'yes' in /etc/default/arc-acix-scanner to enable it." exit 0 fi # ARC_CONFIG if [ "x$ARC_CONFIG" = "x" ] && [ -r /etc/arc.conf ]; then ARC_CONFIG=/etc/arc.conf fi if [ ! -r "$ARC_CONFIG" ]; then echo "ARC configuration not found (usually /etc/arc.conf)" exit 1 fi # Check if service is defined in configuration libexecdir="${ARC_LOCATION:-@prefix@}/@pkglibexecsubdir@/" $libexecdir/arcconfig-parser --config $ARC_CONFIG --block acix-scanner > /dev/null 2>&1 if [ $? -eq 1 ]; then log_failure_msg "Block [acix-scanner] not defined in configuration" exit 1 fi # Load configuration into env vars eval "$( $libexecdir/arcconfig-parser --config $ARC_CONFIG --block acix-scanner --export bash )" LOGFILE=${CONFIG_logfile:-$DEFAULT_LOGFILE} LOGD=`dirname $LOGFILE` LOGN=`basename $LOGFILE` if [ ! -d $LOGD ]; then mkdir -p $LOGD fi APPSTART=" from acix import scanner; from twisted.python import log; from twisted.python.logfile import LogFile; application = scanner.createApplication(); log.startLogging(LogFile('$LOGN', '$LOGD', rotateLength=1000000, maxRotatedFiles=25)) " TACFILE=`mktemp` || exit 1 echo $APPSTART > $TACFILE exec $prog --pidfile $PIDFILE -y $TACFILE -l $LOGFILE nordugrid-arc-6.14.0/src/services/acix/scanner/PaxHeaders.30264/Makefile.in0000644000000000000000000000013114152153434024372 xustar000000000000000030 mtime=1638455068.884103927 30 atime=1638455090.620430526 29 ctime=1638455100.90658508 nordugrid-arc-6.14.0/src/services/acix/scanner/Makefile.in0000644000175000002070000007563514152153434024400 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/acix/scanner DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(srcdir)/arc-acix-scanner.in \ $(srcdir)/arc-acix-scanner-start.in \ $(srcdir)/arc-acix-scanner.service.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = arc-acix-scanner arc-acix-scanner-start \ arc-acix-scanner.service CONFIG_CLEAN_VPATH_FILES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(initddir)" "$(DESTDIR)$(pkgdatadir)" \ "$(DESTDIR)$(unitsdir)" SCRIPTS = $(initd_SCRIPTS) $(pkgdata_SCRIPTS) AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac DATA = $(units_DATA) RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DIST_SUBDIRS = $(SUBDIRS) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ @SYSV_SCRIPTS_ENABLED_FALSE@CACHESERVER_SCRIPT = @SYSV_SCRIPTS_ENABLED_TRUE@CACHESERVER_SCRIPT = arc-acix-scanner initd_SCRIPTS = $(CACHESERVER_SCRIPT) @SYSTEMD_UNITS_ENABLED_FALSE@CACHESERVER_UNIT = @SYSTEMD_UNITS_ENABLED_TRUE@CACHESERVER_UNIT = arc-acix-scanner.service units_DATA = $(CACHESERVER_UNIT) pkgdata_SCRIPTS = arc-acix-scanner-start SUBDIRS = $(TEST_DIR) dist_SUBDIRS = test all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/acix/scanner/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/acix/scanner/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): arc-acix-scanner: $(top_builddir)/config.status $(srcdir)/arc-acix-scanner.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arc-acix-scanner-start: $(top_builddir)/config.status $(srcdir)/arc-acix-scanner-start.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arc-acix-scanner.service: $(top_builddir)/config.status $(srcdir)/arc-acix-scanner.service.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ install-initdSCRIPTS: $(initd_SCRIPTS) @$(NORMAL_INSTALL) @list='$(initd_SCRIPTS)'; test -n "$(initddir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(initddir)'"; \ $(MKDIR_P) "$(DESTDIR)$(initddir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n' \ -e 'h;s|.*|.|' \ -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) { files[d] = files[d] " " $$1; \ if (++n[d] == $(am__install_max)) { \ print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ else { print "f", d "/" $$4, $$1 } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(initddir)$$dir'"; \ $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(initddir)$$dir" || exit $$?; \ } \ ; done uninstall-initdSCRIPTS: @$(NORMAL_UNINSTALL) @list='$(initd_SCRIPTS)'; test -n "$(initddir)" || exit 0; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 's,.*/,,;$(transform)'`; \ dir='$(DESTDIR)$(initddir)'; $(am__uninstall_files_from_dir) install-pkgdataSCRIPTS: $(pkgdata_SCRIPTS) @$(NORMAL_INSTALL) @list='$(pkgdata_SCRIPTS)'; test -n "$(pkgdatadir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(pkgdatadir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pkgdatadir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n' \ -e 'h;s|.*|.|' \ -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) { files[d] = files[d] " " $$1; \ if (++n[d] == $(am__install_max)) { \ print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ else { print "f", d "/" $$4, $$1 } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(pkgdatadir)$$dir'"; \ $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(pkgdatadir)$$dir" || exit $$?; \ } \ ; done uninstall-pkgdataSCRIPTS: @$(NORMAL_UNINSTALL) @list='$(pkgdata_SCRIPTS)'; test -n "$(pkgdatadir)" || exit 0; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 's,.*/,,;$(transform)'`; \ dir='$(DESTDIR)$(pkgdatadir)'; $(am__uninstall_files_from_dir) mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-unitsDATA: $(units_DATA) @$(NORMAL_INSTALL) @list='$(units_DATA)'; test -n "$(unitsdir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(unitsdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(unitsdir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(unitsdir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(unitsdir)" || exit $$?; \ done uninstall-unitsDATA: @$(NORMAL_UNINSTALL) @list='$(units_DATA)'; test -n "$(unitsdir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(unitsdir)'; $(am__uninstall_files_from_dir) # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile $(SCRIPTS) $(DATA) installdirs: installdirs-recursive installdirs-am: for dir in "$(DESTDIR)$(initddir)" "$(DESTDIR)$(pkgdatadir)" "$(DESTDIR)$(unitsdir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-initdSCRIPTS install-pkgdataSCRIPTS \ install-unitsDATA install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-initdSCRIPTS uninstall-pkgdataSCRIPTS \ uninstall-unitsDATA .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool cscopelist-am ctags \ ctags-am distclean distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am \ install-initdSCRIPTS install-man install-pdf install-pdf-am \ install-pkgdataSCRIPTS install-ps install-ps-am install-strip \ install-unitsDATA installcheck installcheck-am installdirs \ installdirs-am maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags tags-am uninstall uninstall-am \ uninstall-initdSCRIPTS uninstall-pkgdataSCRIPTS \ uninstall-unitsDATA # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/acix/scanner/PaxHeaders.30264/pscan.py0000644000000000000000000000013214152153376024011 xustar000000000000000030 mtime=1638455038.431646365 30 atime=1638455038.510647552 30 ctime=1638455100.846584178 nordugrid-arc-6.14.0/src/services/acix/scanner/pscan.py0000644000175000002070000001217314152153376024002 0ustar00mockbuildmock00000000000000""" Scans the ARC cache directory, but in another process in order to avoid blocking the twisted reactor. This is done in a not-so-nice way, where we create a python in a temporary file and execute that program. """ from __future__ import print_function import os import sys import tempfile import time from twisted.python import log from twisted.internet import reactor, defer, protocol from twisted.protocols import basic from arc.utils import config ARC_CONF = '/etc/arc.conf' DATA_CACHE_SUBDIR = 'data' SCAN_PROGRAM_DUMP = '''#generated by arc cacheindex from __future__ import print_function import os import shelve import tempfile import time f = None t = time.time() dump_file = '%s' if dump_file: f = tempfile.NamedTemporaryFile('wt', delete=False) m = shelve.open(tempfile.gettempdir() + '/ARC-ACIX/.db') for dirpath, dirnames, filenames in os.walk('%s'): for filename in filenames: if filename.endswith('.meta') and os.path.exists(os.path.join(dirpath, filename[:-5])): url = dirpath.rsplit('/')[-1] + filename.split('.')[0] print(url + "\\r\\n", end=' ') if dump_file and time.time() < t + 300: # Don't spend more than 5 mins looking up URLs try: murl = m[url] except KeyError: # first line of meta is url murl = '' with open(os.path.join(dirpath, filename), 'rt') as mf: murl = mf.readline().strip() m[url] = murl f.write(murl+'\\n') if dump_file: f.close() m.close() os.rename(f.name, dump_file) ''' class URLReceiver(basic.LineReceiver): def __init__(self, filter): self.filter = filter def lineReceived(self, line): self.filter(line.strip()) class ScanProtocol(protocol.ProcessProtocol): def __init__(self, filter, d): self.url_receiver = URLReceiver(filter) self.d = d def outReceived(self, data): self.url_receiver.dataReceived(data) def errReceived(self, data): log.msg("Error data received from scanning program. Oh noes: %s" % data) def processEnded(self, reason): if reason.value.exitCode == 0: self.d.callback(None) return # everything is just peachy log.err(reason) self.d.callback(reason) def getARCCacheDirs(): config.parse_arc_conf(os.environ['ARC_CONFIG'] if 'ARC_CONFIG' in os.environ else ARC_CONF) cache_dirs = config.get_value('cachedir', 'arex/cache', force_list=True) # First value is cachedir, second value can be 'drain' cache_dirs = [c.split()[0] for c in cache_dirs] if cache_dirs else [] return cache_dirs class CacheScanner(object): def __init__(self, cache_dir=[], cache_dump=False): if not cache_dir: cache_dir = getARCCacheDirs() self.cache_dir = cache_dir self.cache_dump = cache_dump def dir(self): return self.cache_dir def scan(self, filter): defs = [] dump_file = '' if self.cache_dump: dump_file = '%s/ARC-ACIX/%s' % (tempfile.gettempdir(), str(int(time.time()))) try: os.mkdir('%s/ARC-ACIX' % tempfile.gettempdir()) except: pass for cd in self.cache_dir: program = SCAN_PROGRAM_DUMP % (dump_file, cd) tf = tempfile.NamedTemporaryFile('wt') tf.write(program) # ensure file content is in kernel before spawning process tf.flush() d = defer.Deferred() pp = ScanProtocol(filter, d) pt = reactor.spawnProcess(pp, sys.executable, args=[sys.executable, tf.name]) def err(failure): log.err(failure) return failure def passthru(result, _): return result d.addErrback(err) # The semantics of the temporary file is that it will automatically # get deleted once it gets garbage collected. This means that if we # don't use the tf variable or set the delete flag to False, the # file will get deleted before we start using it. Unfortuantely # Python 2.5 and earlier does not support the delete flag, so # instead we keep the variable for the temporary file in use, # dealying its deletion until the filter has been generated, hence # the bogus passthru. d.addBoth(passthru, tf) defs.append(d) return defer.DeferredList(defs) @defer.inlineCallbacks def main(): import sys, time #cache_dirs = sys.argv[2:] #print "Cache dirs", cache_dirs class Count(object): def __init__(self): self.count = 0 def gotHash(self, hash): print(hash) self.count += 1 c = Count() t0 = time.time() #yield CacheScanner(cache_dirs).scan(c.gotHash) yield CacheScanner().scan(c.gotHash) td = time.time() - t0 print("Scan time:", td) print("Objects scanned: ", c.count) reactor.stop() if __name__ == '__main__': reactor.callWhenRunning(main) reactor.run() nordugrid-arc-6.14.0/src/services/acix/scanner/PaxHeaders.30264/test0000644000000000000000000000013014152153474023232 xustar000000000000000029 mtime=1638455100.93058544 30 atime=1638455103.996631509 29 ctime=1638455100.93058544 nordugrid-arc-6.14.0/src/services/acix/scanner/test/0000755000175000002070000000000014152153474023276 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/acix/scanner/test/PaxHeaders.30264/Makefile.am0000644000000000000000000000013114152153376025345 xustar000000000000000030 mtime=1638455038.431646365 30 atime=1638455038.510647552 29 ctime=1638455100.92858541 nordugrid-arc-6.14.0/src/services/acix/scanner/test/Makefile.am0000644000175000002070000000041314152153376025331 0ustar00mockbuildmock00000000000000TESTSCRIPTS = test_cacheresource.py test_scan.py if ACIX_TESTS_ENABLED TESTS_ENVIRONMENT = PYTHONPATH=$(top_srcdir)/src/services:${top_srcdir}/src/utils/python $(TRIAL) TESTS = $(TESTSCRIPTS) else TESTS = endif check_SCRIPTS = $(TESTS) EXTRA_DIST = $(TESTSCRIPTS) nordugrid-arc-6.14.0/src/services/acix/scanner/test/PaxHeaders.30264/test_cacheresource.py0000644000000000000000000000013214152153376027536 xustar000000000000000030 mtime=1638455038.431646365 30 atime=1638455038.510647552 30 ctime=1638455100.929585425 nordugrid-arc-6.14.0/src/services/acix/scanner/test/test_cacheresource.py0000644000175000002070000000333214152153376027524 0ustar00mockbuildmock00000000000000from twisted.trial import unittest from twisted.internet import reactor, defer from twisted.web import resource, server from acix.core import bloomfilter, cacheclient from acix.scanner import cache, cacheresource TEST_URLS1 = [ 'srm://srm.ndgf.org/biogrid/db/uniprot/UniProt12.6/uniprot_sprot.fasta.gz', 'gsiftp://grid.tsl.uu.se:2811/storage/sam/testfile'] class TestScanner(object): def __init__(self, urls): self.urls = urls def dir(self): return "testscanner (no dir)" def scan(self, filter): for url in self.urls: filter(url) d = defer.Deferred() d.callback(None) return d class CacheResourceTest(unittest.TestCase): port = 4080 @defer.inlineCallbacks def setUp(self): scanner = TestScanner(TEST_URLS1) self.cs = cache.Cache(scanner, 10000, 60, '') cr = cacheresource.CacheResource(self.cs) siteroot = resource.Resource() siteroot.putChild(b'cache', cr) site = server.Site(siteroot) yield self.cs.startService() self.iport = reactor.listenTCP(self.port, site) self.cache_url = 'http://localhost:%i/cache' % (self.port) @defer.inlineCallbacks def tearDown(self): yield self.cs.stopService() yield self.iport.stopListening() @defer.inlineCallbacks def testCacheRetrieval(self): hashes, cache_time, cache, cache_url = yield cacheclient.retrieveCache(self.cache_url) size = len(cache) * 8 bf = bloomfilter.BloomFilter(size, bits=cache, hashes=hashes) for url in TEST_URLS1: self.assertTrue(url in bf) self.assertFalse("gahh" in bf) self.assertFalse("whuu" in bf) nordugrid-arc-6.14.0/src/services/acix/scanner/test/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153434025352 xustar000000000000000030 mtime=1638455068.933104663 30 atime=1638455090.667431232 30 ctime=1638455100.927585395 nordugrid-arc-6.14.0/src/services/acix/scanner/test/Makefile.in0000644000175000002070000005404514152153434025347 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ @ACIX_TESTS_ENABLED_TRUE@TESTS = $(TESTSCRIPTS) subdir = src/services/acix/scanner/test DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) am__tty_colors_dummy = \ mgn= red= grn= lgn= blu= brg= std=; \ am__color_tests=no am__tty_colors = { \ $(am__tty_colors_dummy); \ if test "X$(AM_COLOR_TESTS)" = Xno; then \ am__color_tests=no; \ elif test "X$(AM_COLOR_TESTS)" = Xalways; then \ am__color_tests=yes; \ elif test "X$$TERM" != Xdumb && { test -t 1; } 2>/dev/null; then \ am__color_tests=yes; \ fi; \ if test $$am__color_tests = yes; then \ red=''; \ grn=''; \ lgn=''; \ blu=''; \ mgn=''; \ brg=''; \ std=''; \ fi; \ } DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ TESTSCRIPTS = test_cacheresource.py test_scan.py @ACIX_TESTS_ENABLED_TRUE@TESTS_ENVIRONMENT = PYTHONPATH=$(top_srcdir)/src/services:${top_srcdir}/src/utils/python $(TRIAL) check_SCRIPTS = $(TESTS) EXTRA_DIST = $(TESTSCRIPTS) all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/acix/scanner/test/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/acix/scanner/test/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs tags TAGS: ctags CTAGS: cscope cscopelist: check-TESTS: $(TESTS) @failed=0; all=0; xfail=0; xpass=0; skip=0; \ srcdir=$(srcdir); export srcdir; \ list=' $(TESTS) '; \ $(am__tty_colors); \ if test -n "$$list"; then \ for tst in $$list; do \ if test -f ./$$tst; then dir=./; \ elif test -f $$tst; then dir=; \ else dir="$(srcdir)/"; fi; \ if $(TESTS_ENVIRONMENT) $${dir}$$tst $(AM_TESTS_FD_REDIRECT); then \ all=`expr $$all + 1`; \ case " $(XFAIL_TESTS) " in \ *[\ \ ]$$tst[\ \ ]*) \ xpass=`expr $$xpass + 1`; \ failed=`expr $$failed + 1`; \ col=$$red; res=XPASS; \ ;; \ *) \ col=$$grn; res=PASS; \ ;; \ esac; \ elif test $$? -ne 77; then \ all=`expr $$all + 1`; \ case " $(XFAIL_TESTS) " in \ *[\ \ ]$$tst[\ \ ]*) \ xfail=`expr $$xfail + 1`; \ col=$$lgn; res=XFAIL; \ ;; \ *) \ failed=`expr $$failed + 1`; \ col=$$red; res=FAIL; \ ;; \ esac; \ else \ skip=`expr $$skip + 1`; \ col=$$blu; res=SKIP; \ fi; \ echo "$${col}$$res$${std}: $$tst"; \ done; \ if test "$$all" -eq 1; then \ tests="test"; \ All=""; \ else \ tests="tests"; \ All="All "; \ fi; \ if test "$$failed" -eq 0; then \ if test "$$xfail" -eq 0; then \ banner="$$All$$all $$tests passed"; \ else \ if test "$$xfail" -eq 1; then failures=failure; else failures=failures; fi; \ banner="$$All$$all $$tests behaved as expected ($$xfail expected $$failures)"; \ fi; \ else \ if test "$$xpass" -eq 0; then \ banner="$$failed of $$all $$tests failed"; \ else \ if test "$$xpass" -eq 1; then passes=pass; else passes=passes; fi; \ banner="$$failed of $$all $$tests did not behave as expected ($$xpass unexpected $$passes)"; \ fi; \ fi; \ dashes="$$banner"; \ skipped=""; \ if test "$$skip" -ne 0; then \ if test "$$skip" -eq 1; then \ skipped="($$skip test was not run)"; \ else \ skipped="($$skip tests were not run)"; \ fi; \ test `echo "$$skipped" | wc -c` -le `echo "$$banner" | wc -c` || \ dashes="$$skipped"; \ fi; \ report=""; \ if test "$$failed" -ne 0 && test -n "$(PACKAGE_BUGREPORT)"; then \ report="Please report to $(PACKAGE_BUGREPORT)"; \ test `echo "$$report" | wc -c` -le `echo "$$banner" | wc -c` || \ dashes="$$report"; \ fi; \ dashes=`echo "$$dashes" | sed s/./=/g`; \ if test "$$failed" -eq 0; then \ col="$$grn"; \ else \ col="$$red"; \ fi; \ echo "$${col}$$dashes$${std}"; \ echo "$${col}$$banner$${std}"; \ test -z "$$skipped" || echo "$${col}$$skipped$${std}"; \ test -z "$$report" || echo "$${col}$$report$${std}"; \ echo "$${col}$$dashes$${std}"; \ test "$$failed" -eq 0; \ else :; fi distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am $(MAKE) $(AM_MAKEFLAGS) $(check_SCRIPTS) $(MAKE) $(AM_MAKEFLAGS) check-TESTS check: check-am all-am: Makefile installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: check-am install-am install-strip .PHONY: all all-am check check-TESTS check-am clean clean-generic \ clean-libtool cscopelist-am ctags-am distclean \ distclean-generic distclean-libtool distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags-am uninstall \ uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/acix/scanner/test/PaxHeaders.30264/test_scan.py0000644000000000000000000000013114152153376025646 xustar000000000000000030 mtime=1638455038.431646365 30 atime=1638455038.510647552 29 ctime=1638455100.93058544 nordugrid-arc-6.14.0/src/services/acix/scanner/test/test_scan.py0000644000175000002070000000566614152153376025651 0ustar00mockbuildmock00000000000000import os import shutil import tempfile from twisted.trial import unittest from twisted.internet import defer from acix.scanner import pscan class ScanTestCase(unittest.TestCase): def setUp(self): # fill caches self.tmpdir = tempfile.mkdtemp(prefix='/tmp/acix-test-cache') os.mkdir(self.tmpdir+'/cache') os.mkdir(self.tmpdir+'/cache/data') os.mkdir(self.tmpdir+'/cache/data/6b') f = open(self.tmpdir+'/cache/data/6b/27f066ef9e22d2e3e40c668cae72e9e163fafd.meta', 'w') f.write('http://localhost/file1') f.close() f = open(self.tmpdir+'/cache/data/6b/27f066ef9e22d2e3e40c668cae72e9e163fafd', 'w') f.write('1234') f.close() os.mkdir(self.tmpdir+'/cache/data/a5') f = open(self.tmpdir+'/cache/data/a5/7c87cedbb464eb765a9fa8b8d506686cf0d0ee.meta', 'w') f.write('http://localhost/file2') f.close() f = open(self.tmpdir+'/cache/data/a5/7c87cedbb464eb765a9fa8b8d506686cf0d0ee', 'w') f.write('1234') f.close() self.tmpdir2 = tempfile.mkdtemp(prefix='/tmp/acix-test-cache2') os.mkdir(self.tmpdir2+'/cache') os.mkdir(self.tmpdir2+'/cache/data') os.mkdir(self.tmpdir2+'/cache/data/9f') f = open(self.tmpdir2+'/cache/data/9f/4f96f6aada65ef3dafce1af2e36ba8428aeb03.meta', 'w') f.write('http://localhost/file3') f.close() f = open(self.tmpdir2+'/cache/data/9f/4f96f6aada65ef3dafce1af2e36ba8428aeb03', 'w') f.write('1234') f.close() os.mkdir(self.tmpdir2+'/cache/data/dc') f = open(self.tmpdir2+'/cache/data/dc/294265ad76c92fe388f4f3c452734b10064ac2.meta', 'w') f.write('http://localhost/file4') f.close() f = open(self.tmpdir2+'/cache/data/dc/294265ad76c92fe388f4f3c452734b10064ac2', 'w') f.write('1234') f.close() def tearDown(self): shutil.rmtree(self.tmpdir) shutil.rmtree(self.tmpdir2) @defer.inlineCallbacks def testScan(self): scanner = pscan.CacheScanner([self.tmpdir+'/cache']) l = [] yield scanner.scan(lambda url : l.append(url.decode())) self.failUnlessIn('6b27f066ef9e22d2e3e40c668cae72e9e163fafd', l) self.failUnlessIn('a57c87cedbb464eb765a9fa8b8d506686cf0d0ee', l) self.failIfIn('abc', l) self.failIfIn('some_other_thing', l) @defer.inlineCallbacks def testScanMultipleDirs(self): scanner = pscan.CacheScanner([self.tmpdir+'/cache', self.tmpdir2+'/cache']) l = [] yield scanner.scan(lambda url : l.append(url.decode())) # from the first dir self.failUnlessIn('a57c87cedbb464eb765a9fa8b8d506686cf0d0ee', l) self.failUnlessIn('6b27f066ef9e22d2e3e40c668cae72e9e163fafd', l) # from the second dir self.failUnlessIn('9f4f96f6aada65ef3dafce1af2e36ba8428aeb03', l) self.failUnlessIn('dc294265ad76c92fe388f4f3c452734b10064ac2', l) nordugrid-arc-6.14.0/src/services/acix/scanner/PaxHeaders.30264/__init__.py0000644000000000000000000000013214152153376024444 xustar000000000000000030 mtime=1638455038.431646365 30 atime=1638455038.510647552 30 ctime=1638455100.843584133 nordugrid-arc-6.14.0/src/services/acix/scanner/__init__.py0000644000175000002070000000013414152153376024427 0ustar00mockbuildmock00000000000000from acix.scanner import cachesetup createApplication = cachesetup.createCacheApplication nordugrid-arc-6.14.0/src/services/acix/scanner/PaxHeaders.30264/cacheresource.py0000644000000000000000000000013214152153376025520 xustar000000000000000030 mtime=1638455038.431646365 30 atime=1638455038.510647552 30 ctime=1638455100.845584163 nordugrid-arc-6.14.0/src/services/acix/scanner/cacheresource.py0000644000175000002070000000305414152153376025507 0ustar00mockbuildmock00000000000000""" resource to fetch a bloom filter from """ import socket from twisted.python import log from twisted.web import resource class CacheResource(resource.Resource): isLeaf = True def __init__(self, cache_service): resource.Resource.__init__(self) self.cache_service = cache_service def render_GET(self, request): try: clienthost = request.getClientAddress().host # twisted >= 18.4 except: clienthost = request.getClientIP() # twisted < 18.4 def getClient(host): try: name, names, addresses = socket.gethostbyaddr(host) except socket.error: return host names.insert(0, name) for name in names: if '.' in name: return name return names[0] client = getClient(clienthost) + "/" + clienthost log.msg("GET request on cache from %s" % client) gen_time, hashes, cache, cache_url = self.cache_service.getCache() request.setHeader(b'Content-type', b'application/vnd.org.ndgf.acix.bloomfilter') if not cache: log.msg("Cache content has not been built yet") cache = b'' gen_time = 0 request.setHeader(b'Content-length', str(len(cache)).encode()) request.setHeader(b'x-hashes', ','.join(hashes).encode()) request.setHeader(b'x-cache-time', str(gen_time).encode()) request.setHeader(b'x-cache-url', str(cache_url).encode()) return cache nordugrid-arc-6.14.0/src/services/acix/scanner/PaxHeaders.30264/cache.py0000644000000000000000000000013214152153376023750 xustar000000000000000030 mtime=1638455038.431646365 30 atime=1638455038.510647552 30 ctime=1638455100.844584148 nordugrid-arc-6.14.0/src/services/acix/scanner/cache.py0000644000175000002070000000615414152153376023743 0ustar00mockbuildmock00000000000000""" resource to fetch a bloom filter from """ import time from twisted.python import log from twisted.internet import task from twisted.application import service from acix.core import bloomfilter CAPACITY_CHUNK = 10000 # 10k entries is the least we bother with WATERMARK_LOW = 10000 # 10k entries -> 35k memory, no reason to go lower class _Counter(object): def __init__(self): self.n = 0 def up(self): self.n += 1 class Cache(service.Service): def __init__(self, scanner, capacity, refresh_interval, cache_url): self.scanner = scanner self.capacity = capacity self.refresh_interval = refresh_interval self.cache_url = cache_url self.cache_task = task.LoopingCall(self.renewCache) self.cache = None self.generation_time = None self.hashes = [] def startService(self): log.msg("-" * 60) log.msg("Starting cache service") log.msg(" Directory : %s" % self.scanner.dir()) log.msg(" Capacity : %s" % self.capacity) log.msg(" Refresh interval : %i" % self.refresh_interval) log.msg("-" * 60) self.cache_task.start(self.refresh_interval) def stopService(self): self.cache_task.stop() def renewCache(self): n_bits = bloomfilter.calculateSize(capacity=self.capacity) log.msg("Renewing cache. Filter capacity %i, size: %i bits" % (self.capacity, n_bits)) filter = bloomfilter.BloomFilter(n_bits) file_counter = _Counter() def addEntry(key): file_counter.up() filter.add(key) t0 = time.time() d = self.scanner.scan(addEntry) d.addCallback(self._scanDone, filter, t0, file_counter) return d def _scanDone(self, _, filter, t0, file_counter): td = time.time() - t0 self.cache = filter.serialize() self.generation_time = time.time() self.hashes = filter.get_hashes() log.msg("Cache updated. Time taken: %f seconds. Entries: %i" % (round(td, 2), file_counter.n)) if file_counter.n == 0: log.msg("No file entries registered. Possible misconfiguration.") return self.checkCapacity(file_counter.n) def checkCapacity(self, n_files): if n_files > self.capacity: log.msg("Filter capacity exceeded. Capacity: %i. Files: %i" % (self.capacity, n_files)) self.capacity = (round(n_files / float(CAPACITY_CHUNK)) + 1) * CAPACITY_CHUNK log.msg("Filter capacity expanded to %i (will take effect on next cache run)" % self.capacity) return if self.capacity / float(n_files) > 3.0 and self.capacity > WATERMARK_LOW: # filter under 1/3 full log.msg("Filter capacity underutilized. Capacity: %i. Files: %i" % (self.capacity, n_files)) self.capacity = max(self.capacity - CAPACITY_CHUNK, WATERMARK_LOW) log.msg("Filter capacity reduced to %i (will take effect on next cache run)" % self.capacity) def getCache(self): return self.generation_time, self.hashes, self.cache, self.cache_url nordugrid-arc-6.14.0/src/services/acix/scanner/PaxHeaders.30264/arc-acix-scanner.service.in0000644000000000000000000000013214152153376027440 xustar000000000000000030 mtime=1638455038.431646365 30 atime=1638455038.510647552 30 ctime=1638455100.909585125 nordugrid-arc-6.14.0/src/services/acix/scanner/arc-acix-scanner.service.in0000644000175000002070000000035114152153376027424 0ustar00mockbuildmock00000000000000[Unit] Description=ARC cache index - scanner server After=network.target local_fs.target [Service] Type=forking PIDFile=/run/arc-acix-scanner.pid ExecStart=/usr/share/arc/arc-acix-scanner-start [Install] WantedBy=multi-user.target nordugrid-arc-6.14.0/src/services/acix/scanner/PaxHeaders.30264/arc-acix-scanner.in0000644000000000000000000000013114152153376026000 xustar000000000000000030 mtime=1638455038.431646365 30 atime=1638455038.510647552 29 ctime=1638455100.90858511 nordugrid-arc-6.14.0/src/services/acix/scanner/arc-acix-scanner.in0000644000175000002070000001066214152153376025773 0ustar00mockbuildmock00000000000000#!/bin/sh # ARC cache index - scanner server # # chkconfig: 2345 75 25 # description: The ARC cache server collects cache information which \ # can be pulled by a cache index. ### BEGIN INIT INFO # Provides: arc-acix-scanner # Required-Start: $network $local_fs # Required-Stop: $network $local_fs # Should-Start: $time # Should-Stop: $time # Default-Start: 2 3 4 5 # Default-Stop: 0 1 6 # Short-Description: ARC cache index - scanner server # Description: The ARC cache server collects cache information which # can be pulled by a cache index. ### END INIT INFO # source function library if [ -f /etc/init.d/functions ]; then . /etc/init.d/functions log_success_msg() { echo -n "$@" success "$@" echo } log_warning_msg() { echo -n "$@" warning "$@" echo } log_failure_msg() { echo -n "$@" failure "$@" echo } elif [ -f /lib/lsb/init-functions ]; then . /lib/lsb/init-functions else echo "Error: Cannot source neither init.d nor lsb functions" exit 1 fi PIDFILE=/run/arc-acix-scanner.pid prog=@TWISTD@ # sysconfig files if [ -r /etc/sysconfig/nordugrid ]; then . /etc/sysconfig/nordugrid elif [ -r /etc/default/nordugrid ]; then . /etc/default/nordugrid fi if [ -r /etc/sysconfig/arc-acix-scanner ]; then . /etc/sysconfig/arc-acix-scanner elif [ -r /etc/default/arc-acix-scanner ]; then . /etc/default/arc-acix-scanner fi # ARC_LOCATION ARC_LOCATION=${ARC_LOCATION:-@prefix@} if [ ! -d "$ARC_LOCATION" ]; then log_failure_msg "ARC_LOCATION ($ARC_LOCATION) not found" exit 1 fi if [ `id -u` = 0 ] ; then # Debian does not have /run/lock/subsys if [ -d /run/lock/subsys ]; then LOCKFILE=/run/lock/subsys/arc-acix-scanner else LOCKFILE=/run/lock/arc-acix-scanner fi else LOCKFILE=$HOME/arc-acix-scanner.lock fi do_start() { echo -n "Starting ARC cache scanner..." # Check if we are already running if [ -f "$PIDFILE" ]; then read pid < "$PIDFILE" if [ "x$pid" != "x" ]; then ps -p "$pid" -o comm 2>/dev/null | grep "^$prog$" 1>/dev/null 2>/dev/null if [ $? -eq 0 ] ; then log_success_msg "already running (pid $pid)" return 0 fi fi rm -f "$PIDFILE" "$LOCKFILE" fi ${ARC_LOCATION}/@pkgdatasubdir@/arc-acix-scanner-start RETVAL=$? if [ $RETVAL -eq 0 ]; then touch $LOCKFILE log_success_msg else log_failure_msg fi return $RETVAL } do_stop() { echo -n "Stopping ARC cache scanner..." if [ -f "$PIDFILE" ]; then read pid < "$PIDFILE" if [ ! -z "$pid" ] ; then kill "$pid" RETVAL=$? if [ $RETVAL -eq 0 ]; then log_success_msg else log_failure_msg fi timeout=2; # for stopping nicely while ( ps -p "$pid" -o comm 2>/dev/null | grep "^$prog$" 1>/dev/null 2>/dev/null ) && [ $timeout -ge 1 ] ; do sleep 1 timeout=$(($timeout - 1)) done [ $timeout -lt 1 ] && kill -9 "$pid" 1>/dev/null 2>&1 rm -f "$PIDFILE" "$LOCKFILE" else RETVAL=1 log_failure_msg "$prog shutdown - pidfile is empty" fi else RETVAL=0 log_success_msg "$prog shutdown - already stopped" fi return $RETVAL } do_status() { if [ -f "$PIDFILE" ]; then read pid < "$PIDFILE" if [ "$pid" != "" ]; then if ps -p "$pid" > /dev/null; then echo "$1 (pid $pid) is running..." return 0 fi echo "$1 stopped but pid file exists" return 1 fi fi if [ -f $LOCKFILE ]; then echo "$1 stopped but lockfile exists" return 2 fi echo "$1 is stopped" return 3 } do_restart() { do_stop do_start } case "$1" in start) do_start ;; stop) do_stop ;; restart|reload|force-reload) do_restart ;; condrestart|try-restart) [ -f $LOCKFILE ] && do_restart || : ;; status) do_status $prog ;; *) echo "Usage: $0 {start|stop|restart|status|reload|condrestart|try-restart}" exit 1 ;; esac exit 0 nordugrid-arc-6.14.0/src/services/acix/scanner/PaxHeaders.30264/cachesetup.py0000644000000000000000000000013214152153376025031 xustar000000000000000030 mtime=1638455038.431646365 30 atime=1638455038.510647552 30 ctime=1638455100.846584178 nordugrid-arc-6.14.0/src/services/acix/scanner/cachesetup.py0000644000175000002070000000625114152153376025022 0ustar00mockbuildmock00000000000000import os import socket from twisted.application import internet, service from twisted.web import resource, server from acix.core import ssl from acix.scanner import pscan, cache, cacheresource from arc.utils import config # -- constants SSL_DEFAULT = True CACHE_INTERFACE = '' CACHE_TCP_PORT = 5080 CACHE_SSL_PORT = 5443 DEFAULT_CAPACITY = 30000 # number of files in cache DEFAULT_CACHE_REFRESH_INTERVAL = 600 # seconds between updating cache DEFAULT_HOST_KEY = '/etc/grid-security/hostkey.pem' DEFAULT_HOST_CERT = '/etc/grid-security/hostcert.pem' DEFAULT_CERTIFICATES = '/etc/grid-security/certificates' DEFAULT_WS_INTERFACE = 'https://hostname:443/arex' # in case not defined in arc.conf ARC_CONF = '/etc/arc.conf' def getCacheConf(): '''Return a tuple of (cache_url, cache_dump, cache_host, cache_port, x509_host_key, x509_host_cert, x509_cert_dir)''' config.parse_arc_conf(os.environ['ARC_CONFIG'] if 'ARC_CONFIG' in os.environ else ARC_CONF) # Use cache access URL if [arex/ws/cache] is present cache_url = '' if config.check_blocks('arex/ws/cache'): arex_url = config.get_value('wsurl', 'arex/ws') if not arex_url: # Use default endpoint, but first we need hostname hostname = config.get_value('hostname', 'common') or socket.gethostname() arex_url = DEFAULT_WS_INTERFACE.replace('hostname', hostname) cache_url = '%s/cache' % arex_url cache_dump = config.get_value('cachedump', 'acix-scanner') == 'yes' cache_host = config.get_value('hostname', 'acix-scanner') or CACHE_INTERFACE cache_port = int(config.get_value('port', 'acix-scanner') or CACHE_SSL_PORT) x509_host_key = config.get_value('x509_host_key', ['acix-scanner', 'common']) or DEFAULT_HOST_KEY x509_host_cert = config.get_value('x509_host_cert', ['acix-scanner', 'common']) or DEFAULT_HOST_CERT x509_cert_dir = config.get_value('x509_cert_dir', ['acix-scanner', 'common']) or DEFAULT_CERTIFICATES return (cache_url, cache_dump, cache_host, cache_port, x509_host_key, x509_host_cert, x509_cert_dir) def createCacheApplication(use_ssl=SSL_DEFAULT, port=None, cache_dir=None, capacity=DEFAULT_CAPACITY, refresh_interval=DEFAULT_CACHE_REFRESH_INTERVAL): (cache_url, cache_dump, cache_host, cache_port, x509_host_key, x509_host_cert, x509_cert_dir) = getCacheConf() scanner = pscan.CacheScanner(cache_dir, cache_dump) cs = cache.Cache(scanner, capacity, refresh_interval, cache_url) cr = cacheresource.CacheResource(cs) siteroot = resource.Resource() dataroot = resource.Resource() dataroot.putChild(b'cache', cr) siteroot.putChild(b'data', dataroot) site = server.Site(siteroot) # setup application application = service.Application("acix-scanner") cs.setServiceParent(application) if use_ssl: cf = ssl.ContextFactory(key_path=x509_host_key, cert_path=x509_host_cert, ca_dir=x509_cert_dir) internet.SSLServer(port or cache_port, site, cf, interface=cache_host).setServiceParent(application) else: internet.TCPServer(port or CACHE_TCP_PORT, site, interface=cache_host).setServiceParent(application) return application nordugrid-arc-6.14.0/src/services/acix/PaxHeaders.30264/Makefile.in0000644000000000000000000000013114152153434022741 xustar000000000000000029 mtime=1638455068.63010011 30 atime=1638455090.608430345 30 ctime=1638455100.834583998 nordugrid-arc-6.14.0/src/services/acix/Makefile.in0000644000175000002070000007304514152153434022740 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/acix DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(am__nobase_python_PYTHON_DIST) $(top_srcdir)/py-compile \ README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__nobase_python_PYTHON_DIST = __init__.py core/__init__.py \ core/bitvector.py core/bloomfilter.py core/cacheclient.py \ core/hashes.py core/indexclient.py core/ssl.py \ scanner/__init__.py scanner/cache.py scanner/cacheresource.py \ scanner/cachesetup.py scanner/pscan.py indexserver/__init__.py \ indexserver/index.py indexserver/indexresource.py \ indexserver/indexsetup.py am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__py_compile = PYTHON=$(PYTHON) $(SHELL) $(py_compile) am__installdirs = "$(DESTDIR)$(pythondir)" am__pep3147_tweak = \ sed -e 's|\.py$$||' -e 's|[^/]*$$|__pycache__/&.*.py|' py_compile = $(top_srcdir)/py-compile RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ @PYTHON_ENABLED_TRUE@pythondir = $(PYTHON_SITE_LIB)/acix @PYTHON_ENABLED_TRUE@nobase_python_PYTHON = __init__.py \ @PYTHON_ENABLED_TRUE@ core/__init__.py \ @PYTHON_ENABLED_TRUE@ core/bitvector.py \ @PYTHON_ENABLED_TRUE@ core/bloomfilter.py \ @PYTHON_ENABLED_TRUE@ core/cacheclient.py \ @PYTHON_ENABLED_TRUE@ core/hashes.py \ @PYTHON_ENABLED_TRUE@ core/indexclient.py \ @PYTHON_ENABLED_TRUE@ core/ssl.py \ @PYTHON_ENABLED_TRUE@ scanner/__init__.py \ @PYTHON_ENABLED_TRUE@ scanner/cache.py \ @PYTHON_ENABLED_TRUE@ scanner/cacheresource.py \ @PYTHON_ENABLED_TRUE@ scanner/cachesetup.py \ @PYTHON_ENABLED_TRUE@ scanner/pscan.py \ @PYTHON_ENABLED_TRUE@ indexserver/__init__.py \ @PYTHON_ENABLED_TRUE@ indexserver/index.py \ @PYTHON_ENABLED_TRUE@ indexserver/indexresource.py \ @PYTHON_ENABLED_TRUE@ indexserver/indexsetup.py @PYTHON_ENABLED_TRUE@SUBDIRS = core scanner indexserver DIST_SUBDIRS = core scanner indexserver all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/acix/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/acix/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-nobase_pythonPYTHON: $(nobase_python_PYTHON) @$(NORMAL_INSTALL) @list='$(nobase_python_PYTHON)'; test -n "$(pythondir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(pythondir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pythondir)" || exit 1; \ fi; \ $(am__nobase_list) | while read dir files; do \ xfiles=; for p in $$files; do \ if test -f "$$p"; then b=; else b="$(srcdir)/"; fi; \ if test -f "$$b$$p"; then xfiles="$$xfiles $$b$$p"; dlist="$$dlist $$p"; \ else :; fi; done; \ test -z "$$xfiles" || { \ test "x$$dir" = x. || { \ echo "$(MKDIR_P) '$(DESTDIR)$(pythondir)/$$dir'"; \ $(MKDIR_P) "$(DESTDIR)$(pythondir)/$$dir"; }; \ echo " $(INSTALL_DATA) $$xfiles '$(DESTDIR)$(pythondir)/$$dir'"; \ $(INSTALL_DATA) $$xfiles "$(DESTDIR)$(pythondir)/$$dir" || exit $$?; }; \ if test -n "$$dlist"; then \ $(am__py_compile) --destdir "$(DESTDIR)" \ --basedir "$(pythondir)" $$dlist; \ else :; fi \ done uninstall-nobase_pythonPYTHON: @$(NORMAL_UNINSTALL) @list='$(nobase_python_PYTHON)'; test -n "$(pythondir)" || list=; \ $(am__nobase_strip_setup); py_files=`$(am__nobase_strip)`; \ test -n "$$py_files" || exit 0; \ dir='$(DESTDIR)$(pythondir)'; \ pyc_files=`echo "$$py_files" | sed 's|$$|c|'`; \ pyo_files=`echo "$$py_files" | sed 's|$$|o|'`; \ py_files_pep3147=`echo "$$py_files" | $(am__pep3147_tweak)`; \ echo "$$py_files_pep3147";\ pyc_files_pep3147=`echo "$$py_files_pep3147" | sed 's|$$|c|'`; \ pyo_files_pep3147=`echo "$$py_files_pep3147" | sed 's|$$|o|'`; \ st=0; \ for files in \ "$$py_files" \ "$$pyc_files" \ "$$pyo_files" \ "$$pyc_files_pep3147" \ "$$pyo_files_pep3147" \ ; do \ $(am__uninstall_files_from_dir) || st=$$?; \ done; \ exit $$st # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile installdirs: installdirs-recursive installdirs-am: for dir in "$(DESTDIR)$(pythondir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-nobase_pythonPYTHON install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-nobase_pythonPYTHON .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool cscopelist-am ctags \ ctags-am distclean distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-nobase_pythonPYTHON install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs installdirs-am maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \ uninstall-am uninstall-nobase_pythonPYTHON # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/acix/PaxHeaders.30264/indexserver0000644000000000000000000000013214152153474023162 xustar000000000000000030 mtime=1638455100.953585786 30 atime=1638455103.996631509 30 ctime=1638455100.953585786 nordugrid-arc-6.14.0/src/services/acix/indexserver/0000755000175000002070000000000014152153474023224 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/acix/indexserver/PaxHeaders.30264/indexresource.py0000644000000000000000000000013214152153376026471 xustar000000000000000030 mtime=1638455038.431646365 30 atime=1638455038.510647552 30 ctime=1638455100.849584223 nordugrid-arc-6.14.0/src/services/acix/indexserver/indexresource.py0000644000175000002070000000203414152153376026455 0ustar00mockbuildmock00000000000000""" resource to query urls for """ # json module is stock in Python 2.6, for Python 2.5 we use simplejson try: import json except ImportError: import simplejson as json from twisted.python import log from twisted.web import resource class IndexResource(resource.Resource): isLeaf = True def __init__(self, index): resource.Resource.__init__(self) self.index = index def render_GET(self, request): log.msg("Index get. Args:" + str(request.args)) try: urls = request.args[b'url'][0].decode().split(',') except KeyError as e: log.msg("Couldn't get url argument from request") request.setResponseCode(400) return "Couldn't get url argument from request" log.msg("Query for urls: " + str(urls)) result = self.index.query(urls) rv = json.dumps(result) request.setHeader(b'Content-type', b'application/json') request.setHeader(b'Content-length', str(len(rv)).encode()) return rv.encode() nordugrid-arc-6.14.0/src/services/acix/indexserver/PaxHeaders.30264/Makefile.am0000644000000000000000000000013114152153376025273 xustar000000000000000029 mtime=1638455038.43064635 30 atime=1638455038.510647552 30 ctime=1638455100.948585711 nordugrid-arc-6.14.0/src/services/acix/indexserver/Makefile.am0000644000175000002070000000053014152153376025257 0ustar00mockbuildmock00000000000000if SYSV_SCRIPTS_ENABLED INDEXSERVER_SCRIPT = arc-acix-index else INDEXSERVER_SCRIPT = endif initd_SCRIPTS = $(INDEXSERVER_SCRIPT) if SYSTEMD_UNITS_ENABLED INDEXSERVER_UNIT = arc-acix-index.service else INDEXSERVER_UNIT = endif units_DATA = $(INDEXSERVER_UNIT) pkgdata_SCRIPTS = arc-acix-index-start SUBDIRS = $(TEST_DIR) dist_SUBDIRS = test nordugrid-arc-6.14.0/src/services/acix/indexserver/PaxHeaders.30264/arc-acix-index.in0000644000000000000000000000013214152153376026364 xustar000000000000000030 mtime=1638455038.431646365 30 atime=1638455038.510647552 30 ctime=1638455100.949585726 nordugrid-arc-6.14.0/src/services/acix/indexserver/arc-acix-index.in0000644000175000002070000001102114152153376026344 0ustar00mockbuildmock00000000000000#!/bin/sh # ARC cache index - index server # # chkconfig: 2345 75 25 # description: The ARC cache index server collects cache information from \ # cache servers and can be queried for the locations of cached \ # files. ### BEGIN INIT INFO # Provides: arc-acix-index # Required-Start: $network $local_fs # Required-Stop: $network $local_fs # Should-Start: $time # Should-Stop: $time # Default-Start: 2 3 4 5 # Default-Stop: 0 1 6 # Short-Description: ARC cache index - index server # Description: The ARC cache index server collects cache information # from cache servers and can be queried for the locations # of cached files. ### END INIT INFO # source function library if [ -f /etc/init.d/functions ]; then . /etc/init.d/functions log_success_msg() { echo -n "$@" success "$@" echo } log_warning_msg() { echo -n "$@" warning "$@" echo } log_failure_msg() { echo -n "$@" failure "$@" echo } elif [ -f /lib/lsb/init-functions ]; then . /lib/lsb/init-functions else echo "Error: Cannot source neither init.d nor lsb functions" exit 1 fi PIDFILE=/run/arc-acix-index.pid prog=@TWISTD@ # sysconfig files if [ -r /etc/sysconfig/nordugrid ]; then . /etc/sysconfig/nordugrid elif [ -r /etc/default/nordugrid ]; then . /etc/default/nordugrid fi if [ -r /etc/sysconfig/arc-acix-index ]; then . /etc/sysconfig/arc-acix-index elif [ -r /etc/default/arc-acix-index ]; then . /etc/default/arc-acix-index fi # ARC_LOCATION ARC_LOCATION=${ARC_LOCATION:-@prefix@} if [ ! -d "$ARC_LOCATION" ]; then log_failure_msg "ARC_LOCATION ($ARC_LOCATION) not found" exit 1 fi if [ `id -u` = 0 ] ; then # Debian does not have /run/lock/subsys if [ -d /run/lock/subsys ]; then LOCKFILE=/run/lock/subsys/arc-acix-index else LOCKFILE=/run/lock/arc-acix-index fi else LOCKFILE=$HOME/arc-acix-index.lock fi do_start() { echo -n "Starting ARC cache index..." # Check if we are already running if [ -f "$PIDFILE" ]; then read pid < "$PIDFILE" if [ "x$pid" != "x" ]; then ps -p "$pid" -o comm 2>/dev/null | grep "^$prog$" 1>/dev/null 2>/dev/null if [ $? -eq 0 ] ; then log_success_msg "already running (pid $pid)" return 0 fi fi rm -f "$PIDFILE" "$LOCKFILE" fi ${ARC_LOCATION}/@pkgdatasubdir@/arc-acix-index-start RETVAL=$? if [ $RETVAL -eq 0 ]; then touch $LOCKFILE log_success_msg else log_failure_msg fi return $RETVAL } do_stop() { echo -n "Stopping ARC cache index..." if [ -f "$PIDFILE" ]; then read pid < "$PIDFILE" if [ ! -z "$pid" ] ; then kill "$pid" RETVAL=$? if [ $RETVAL -eq 0 ]; then log_success_msg else log_failure_msg fi timeout=2; # for stopping nicely while ( ps -p "$pid" -o comm 2>/dev/null | grep "^$prog$" 1>/dev/null 2>/dev/null ) && [ $timeout -ge 1 ] ; do sleep 1 timeout=$(($timeout - 1)) done [ $timeout -lt 1 ] && kill -9 "$pid" 1>/dev/null 2>&1 rm -f "$PIDFILE" "$LOCKFILE" else RETVAL=1 log_failure_msg "$prog shutdown - pidfile is empty" fi else RETVAL=0 log_success_msg "$prog shutdown - already stopped" fi return $RETVAL } do_status() { if [ -f "$PIDFILE" ]; then read pid < "$PIDFILE" if [ "$pid" != "" ]; then if ps -p "$pid" > /dev/null; then echo "$1 (pid $pid) is running..." return 0 fi echo "$1 stopped but pid file exists" return 1 fi fi if [ -f $LOCKFILE ]; then echo "$1 stopped but lockfile exists" return 2 fi echo "$1 is stopped" return 3 } do_restart() { do_stop do_start } case "$1" in start) do_start ;; stop) do_stop ;; restart|reload|force-reload) do_restart ;; condrestart|try-restart) [ -f $LOCKFILE ] && do_restart || : ;; status) do_status $prog ;; *) echo "Usage: $0 {start|stop|restart|status|reload|condrestart|try-restart}" exit 1 ;; esac exit 0 nordugrid-arc-6.14.0/src/services/acix/indexserver/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153434025300 xustar000000000000000030 mtime=1638455068.782102394 30 atime=1638455090.704431788 30 ctime=1638455100.947585695 nordugrid-arc-6.14.0/src/services/acix/indexserver/Makefile.in0000644000175000002070000007561014152153434025276 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/acix/indexserver DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(srcdir)/arc-acix-index.in $(srcdir)/arc-acix-index-start.in \ $(srcdir)/arc-acix-index.service.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = arc-acix-index arc-acix-index-start \ arc-acix-index.service CONFIG_CLEAN_VPATH_FILES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(initddir)" "$(DESTDIR)$(pkgdatadir)" \ "$(DESTDIR)$(unitsdir)" SCRIPTS = $(initd_SCRIPTS) $(pkgdata_SCRIPTS) AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac DATA = $(units_DATA) RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DIST_SUBDIRS = $(SUBDIRS) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ @SYSV_SCRIPTS_ENABLED_FALSE@INDEXSERVER_SCRIPT = @SYSV_SCRIPTS_ENABLED_TRUE@INDEXSERVER_SCRIPT = arc-acix-index initd_SCRIPTS = $(INDEXSERVER_SCRIPT) @SYSTEMD_UNITS_ENABLED_FALSE@INDEXSERVER_UNIT = @SYSTEMD_UNITS_ENABLED_TRUE@INDEXSERVER_UNIT = arc-acix-index.service units_DATA = $(INDEXSERVER_UNIT) pkgdata_SCRIPTS = arc-acix-index-start SUBDIRS = $(TEST_DIR) dist_SUBDIRS = test all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/acix/indexserver/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/acix/indexserver/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): arc-acix-index: $(top_builddir)/config.status $(srcdir)/arc-acix-index.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arc-acix-index-start: $(top_builddir)/config.status $(srcdir)/arc-acix-index-start.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arc-acix-index.service: $(top_builddir)/config.status $(srcdir)/arc-acix-index.service.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ install-initdSCRIPTS: $(initd_SCRIPTS) @$(NORMAL_INSTALL) @list='$(initd_SCRIPTS)'; test -n "$(initddir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(initddir)'"; \ $(MKDIR_P) "$(DESTDIR)$(initddir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n' \ -e 'h;s|.*|.|' \ -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) { files[d] = files[d] " " $$1; \ if (++n[d] == $(am__install_max)) { \ print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ else { print "f", d "/" $$4, $$1 } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(initddir)$$dir'"; \ $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(initddir)$$dir" || exit $$?; \ } \ ; done uninstall-initdSCRIPTS: @$(NORMAL_UNINSTALL) @list='$(initd_SCRIPTS)'; test -n "$(initddir)" || exit 0; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 's,.*/,,;$(transform)'`; \ dir='$(DESTDIR)$(initddir)'; $(am__uninstall_files_from_dir) install-pkgdataSCRIPTS: $(pkgdata_SCRIPTS) @$(NORMAL_INSTALL) @list='$(pkgdata_SCRIPTS)'; test -n "$(pkgdatadir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(pkgdatadir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pkgdatadir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n' \ -e 'h;s|.*|.|' \ -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) { files[d] = files[d] " " $$1; \ if (++n[d] == $(am__install_max)) { \ print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ else { print "f", d "/" $$4, $$1 } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(pkgdatadir)$$dir'"; \ $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(pkgdatadir)$$dir" || exit $$?; \ } \ ; done uninstall-pkgdataSCRIPTS: @$(NORMAL_UNINSTALL) @list='$(pkgdata_SCRIPTS)'; test -n "$(pkgdatadir)" || exit 0; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 's,.*/,,;$(transform)'`; \ dir='$(DESTDIR)$(pkgdatadir)'; $(am__uninstall_files_from_dir) mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-unitsDATA: $(units_DATA) @$(NORMAL_INSTALL) @list='$(units_DATA)'; test -n "$(unitsdir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(unitsdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(unitsdir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(unitsdir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(unitsdir)" || exit $$?; \ done uninstall-unitsDATA: @$(NORMAL_UNINSTALL) @list='$(units_DATA)'; test -n "$(unitsdir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(unitsdir)'; $(am__uninstall_files_from_dir) # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile $(SCRIPTS) $(DATA) installdirs: installdirs-recursive installdirs-am: for dir in "$(DESTDIR)$(initddir)" "$(DESTDIR)$(pkgdatadir)" "$(DESTDIR)$(unitsdir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-initdSCRIPTS install-pkgdataSCRIPTS \ install-unitsDATA install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-initdSCRIPTS uninstall-pkgdataSCRIPTS \ uninstall-unitsDATA .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool cscopelist-am ctags \ ctags-am distclean distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am \ install-initdSCRIPTS install-man install-pdf install-pdf-am \ install-pkgdataSCRIPTS install-ps install-ps-am install-strip \ install-unitsDATA installcheck installcheck-am installdirs \ installdirs-am maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags tags-am uninstall uninstall-am \ uninstall-initdSCRIPTS uninstall-pkgdataSCRIPTS \ uninstall-unitsDATA # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/acix/indexserver/PaxHeaders.30264/test0000644000000000000000000000013214152153474024141 xustar000000000000000030 mtime=1638455100.970586041 30 atime=1638455103.996631509 30 ctime=1638455100.970586041 nordugrid-arc-6.14.0/src/services/acix/indexserver/test/0000755000175000002070000000000014152153474024203 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/acix/indexserver/test/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376026253 xustar000000000000000030 mtime=1638455038.431646365 30 atime=1638455038.510647552 30 ctime=1638455100.969586026 nordugrid-arc-6.14.0/src/services/acix/indexserver/test/Makefile.am0000644000175000002070000000036714152153376026246 0ustar00mockbuildmock00000000000000TESTSCRIPTS = test_system.py if ACIX_TESTS_ENABLED TESTS_ENVIRONMENT = PYTHONPATH=$(top_srcdir)/src/services:${top_srcdir}/src/utils/python $(TRIAL) TESTS = $(TESTSCRIPTS) else TESTS = endif check_SCRIPTS = $(TESTS) EXTRA_DIST = $(TESTSCRIPTS) nordugrid-arc-6.14.0/src/services/acix/indexserver/test/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153434026257 xustar000000000000000030 mtime=1638455068.830103115 30 atime=1638455090.751432494 30 ctime=1638455100.968586011 nordugrid-arc-6.14.0/src/services/acix/indexserver/test/Makefile.in0000644000175000002070000005403514152153434026253 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ @ACIX_TESTS_ENABLED_TRUE@TESTS = $(TESTSCRIPTS) subdir = src/services/acix/indexserver/test DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) am__tty_colors_dummy = \ mgn= red= grn= lgn= blu= brg= std=; \ am__color_tests=no am__tty_colors = { \ $(am__tty_colors_dummy); \ if test "X$(AM_COLOR_TESTS)" = Xno; then \ am__color_tests=no; \ elif test "X$(AM_COLOR_TESTS)" = Xalways; then \ am__color_tests=yes; \ elif test "X$$TERM" != Xdumb && { test -t 1; } 2>/dev/null; then \ am__color_tests=yes; \ fi; \ if test $$am__color_tests = yes; then \ red=''; \ grn=''; \ lgn=''; \ blu=''; \ mgn=''; \ brg=''; \ std=''; \ fi; \ } DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ TESTSCRIPTS = test_system.py @ACIX_TESTS_ENABLED_TRUE@TESTS_ENVIRONMENT = PYTHONPATH=$(top_srcdir)/src/services:${top_srcdir}/src/utils/python $(TRIAL) check_SCRIPTS = $(TESTS) EXTRA_DIST = $(TESTSCRIPTS) all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/acix/indexserver/test/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/acix/indexserver/test/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs tags TAGS: ctags CTAGS: cscope cscopelist: check-TESTS: $(TESTS) @failed=0; all=0; xfail=0; xpass=0; skip=0; \ srcdir=$(srcdir); export srcdir; \ list=' $(TESTS) '; \ $(am__tty_colors); \ if test -n "$$list"; then \ for tst in $$list; do \ if test -f ./$$tst; then dir=./; \ elif test -f $$tst; then dir=; \ else dir="$(srcdir)/"; fi; \ if $(TESTS_ENVIRONMENT) $${dir}$$tst $(AM_TESTS_FD_REDIRECT); then \ all=`expr $$all + 1`; \ case " $(XFAIL_TESTS) " in \ *[\ \ ]$$tst[\ \ ]*) \ xpass=`expr $$xpass + 1`; \ failed=`expr $$failed + 1`; \ col=$$red; res=XPASS; \ ;; \ *) \ col=$$grn; res=PASS; \ ;; \ esac; \ elif test $$? -ne 77; then \ all=`expr $$all + 1`; \ case " $(XFAIL_TESTS) " in \ *[\ \ ]$$tst[\ \ ]*) \ xfail=`expr $$xfail + 1`; \ col=$$lgn; res=XFAIL; \ ;; \ *) \ failed=`expr $$failed + 1`; \ col=$$red; res=FAIL; \ ;; \ esac; \ else \ skip=`expr $$skip + 1`; \ col=$$blu; res=SKIP; \ fi; \ echo "$${col}$$res$${std}: $$tst"; \ done; \ if test "$$all" -eq 1; then \ tests="test"; \ All=""; \ else \ tests="tests"; \ All="All "; \ fi; \ if test "$$failed" -eq 0; then \ if test "$$xfail" -eq 0; then \ banner="$$All$$all $$tests passed"; \ else \ if test "$$xfail" -eq 1; then failures=failure; else failures=failures; fi; \ banner="$$All$$all $$tests behaved as expected ($$xfail expected $$failures)"; \ fi; \ else \ if test "$$xpass" -eq 0; then \ banner="$$failed of $$all $$tests failed"; \ else \ if test "$$xpass" -eq 1; then passes=pass; else passes=passes; fi; \ banner="$$failed of $$all $$tests did not behave as expected ($$xpass unexpected $$passes)"; \ fi; \ fi; \ dashes="$$banner"; \ skipped=""; \ if test "$$skip" -ne 0; then \ if test "$$skip" -eq 1; then \ skipped="($$skip test was not run)"; \ else \ skipped="($$skip tests were not run)"; \ fi; \ test `echo "$$skipped" | wc -c` -le `echo "$$banner" | wc -c` || \ dashes="$$skipped"; \ fi; \ report=""; \ if test "$$failed" -ne 0 && test -n "$(PACKAGE_BUGREPORT)"; then \ report="Please report to $(PACKAGE_BUGREPORT)"; \ test `echo "$$report" | wc -c` -le `echo "$$banner" | wc -c` || \ dashes="$$report"; \ fi; \ dashes=`echo "$$dashes" | sed s/./=/g`; \ if test "$$failed" -eq 0; then \ col="$$grn"; \ else \ col="$$red"; \ fi; \ echo "$${col}$$dashes$${std}"; \ echo "$${col}$$banner$${std}"; \ test -z "$$skipped" || echo "$${col}$$skipped$${std}"; \ test -z "$$report" || echo "$${col}$$report$${std}"; \ echo "$${col}$$dashes$${std}"; \ test "$$failed" -eq 0; \ else :; fi distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am $(MAKE) $(AM_MAKEFLAGS) $(check_SCRIPTS) $(MAKE) $(AM_MAKEFLAGS) check-TESTS check: check-am all-am: Makefile installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: check-am install-am install-strip .PHONY: all all-am check check-TESTS check-am clean clean-generic \ clean-libtool cscopelist-am ctags-am distclean \ distclean-generic distclean-libtool distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags-am uninstall \ uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/acix/indexserver/test/PaxHeaders.30264/test_system.py0000644000000000000000000000013214152153376027154 xustar000000000000000030 mtime=1638455038.431646365 30 atime=1638455038.510647552 30 ctime=1638455100.970586041 nordugrid-arc-6.14.0/src/services/acix/indexserver/test/test_system.py0000644000175000002070000000764514152153376027155 0ustar00mockbuildmock00000000000000""" setup two cache resources, and one index resources, and try out the whole things and see if it works :-) """ import hashlib from twisted.trial import unittest from twisted.internet import reactor, defer from twisted.web import resource, server from acix.core import indexclient from acix.scanner import cache, cacheresource from acix.indexserver import index, indexresource TEST_URLS1 = [ 'srm://srm.ndgf.org/pnfs/ndgf.org/data/ops/sam-test/testfile', 'gsiftp://grid.tsl.uu.se:2811/storage/sam/testfile'] TEST_URLS2 = [ 'lfc://lfc1.ndgf.org//grid/ops.ndgf.org/sam/testfile', 'srm://srm.ndgf.org/pnfs/ndgf.org/data/ops/sam-test/testfile'] class TestScanner(object): def __init__(self, urls): self.urls = urls def dir(self): return "testscanner (no dir)" def scan(self, filter): for url in self.urls: filter(hashlib.sha1(url.encode()).hexdigest()) return defer.succeed(None) class SystemTest(unittest.TestCase): cport1 = 4080 cport2 = 4081 xport = 4082 @defer.inlineCallbacks def setUp(self): # cheap trick to get multiple hostnames on one host self.cache_urls = [ 'http://localhost:%i/cache' % self.cport1, 'http://127.0.0.1:%i/cache' % self.cport2 ] scanner1 = TestScanner(TEST_URLS1) scanner2 = TestScanner(TEST_URLS2) self.cs1 = cache.Cache(scanner1, 10000, 60, '') self.cs2 = cache.Cache(scanner2, 10000, 60, 'http://127.0.0.1/arex/cache') self.idx = index.CacheIndex(self.cache_urls) cr1 = cacheresource.CacheResource(self.cs1) cr2 = cacheresource.CacheResource(self.cs2) idxr = indexresource.IndexResource(self.idx) c1siteroot = resource.Resource() c1siteroot.putChild(b'cache', cr1) c1site = server.Site(c1siteroot) c2siteroot = resource.Resource() c2siteroot.putChild(b'cache', cr2) c2site = server.Site(c2siteroot) idx_siteroot = resource.Resource() idx_siteroot.putChild(b'index', idxr) idx_site = server.Site(idx_siteroot) yield self.cs1.startService() yield self.cs2.startService() self.iport1 = reactor.listenTCP(self.cport1, c1site) self.iport2 = reactor.listenTCP(self.cport2, c2site) #yield self.idx.startService() yield self.idx.renewIndex() # ensure that we have fetched cache self.iport3 = reactor.listenTCP(self.xport, idx_site) self.index_url = "http://localhost:%i/index" % (self.xport) @defer.inlineCallbacks def tearDown(self): yield self.cs1.stopService() yield self.cs2.stopService() #yield self.idx.stopService() yield self.iport1.stopListening() yield self.iport2.stopListening() yield self.iport3.stopListening() @defer.inlineCallbacks def testIndexQuery(self): urls1 = [ TEST_URLS1[1] ] result = yield indexclient.queryIndex(self.index_url, urls1) self.failUnlessIn(urls1[0], result) locations = result[urls1[0]] self.failUnlessEqual(locations, ['localhost']) urls2 = [ TEST_URLS1[0] ] result = yield indexclient.queryIndex(self.index_url, urls2) self.failUnlessIn(urls2[0], result) locations = result[urls2[0]] self.failUnlessEqual(len(locations), 2) self.failUnlessIn('localhost', locations) self.failUnlessIn('http://127.0.0.1/arex/cache', locations) urls3 = [ 'srm://host/no_such_file' ] result = yield indexclient.queryIndex(self.index_url, urls3) self.failUnlessIn(urls3[0], result) self.failUnlessEqual(result, {urls3[0]: []}) urls4 = [ TEST_URLS2[0] ] result = yield indexclient.queryIndex(self.index_url, urls4) self.failUnlessIn(urls4[0], result) locations = result[urls4[0]] self.failUnlessEqual(locations, ['http://127.0.0.1/arex/cache']) nordugrid-arc-6.14.0/src/services/acix/indexserver/PaxHeaders.30264/__init__.py0000644000000000000000000000013114152153376025350 xustar000000000000000029 mtime=1638455038.43064635 30 atime=1638455038.510647552 30 ctime=1638455100.847584193 nordugrid-arc-6.14.0/src/services/acix/indexserver/__init__.py0000644000175000002070000000014014152153376025331 0ustar00mockbuildmock00000000000000from acix.indexserver import indexsetup createApplication = indexsetup.createIndexApplication nordugrid-arc-6.14.0/src/services/acix/indexserver/PaxHeaders.30264/arc-acix-index-start.in0000644000000000000000000000013114152153376027516 xustar000000000000000029 mtime=1638455038.43064635 30 atime=1638455038.510647552 30 ctime=1638455100.949585726 nordugrid-arc-6.14.0/src/services/acix/indexserver/arc-acix-index-start.in0000644000175000002070000000352714152153376027513 0ustar00mockbuildmock00000000000000#!/bin/sh PIDFILE=/run/arc-acix-index.pid DEFAULT_LOGFILE=/var/log/arc/arc-acix-index.log prog=@TWISTD@ RUN=yes send_systemd_notify() { # return if no systemd-notify found type systemd-notify >/dev/null 2>&1 || return systemd-notify "$@" } log_failure_msg() { send_systemd_notify --status "Error: $@" echo $@ } # sysconfig files if [ -r /etc/sysconfig/nordugrid ]; then . /etc/sysconfig/nordugrid elif [ -r /etc/default/nordugrid ]; then . /etc/default/nordugrid fi if [ -r /etc/sysconfig/arc-acix-index ]; then . /etc/sysconfig/arc-acix-index elif [ -r /etc/default/arc-acix-index ]; then . /etc/default/arc-acix-index fi if [ "$RUN" != "yes" ] ; then echo "arc-acix-index service is disabled, please adjust the configuration to your" echo "needs and then set RUN to 'yes' in /etc/default/arc-acix-index to enable it." exit 0 fi # ARC_CONFIG if [ "x$ARC_CONFIG" = "x" ] && [ -r /etc/arc.conf ]; then ARC_CONFIG=/etc/arc.conf fi if [ ! -r "$ARC_CONFIG" ]; then echo "ARC configuration not found (usually /etc/arc.conf)" exit 1 fi # Check if service is defined in configuration libexecdir="${ARC_LOCATION:-@prefix@}/@pkglibexecsubdir@/" $libexecdir/arcconfig-parser --config $ARC_CONFIG --block acix-index > /dev/null 2>&1 if [ $? -eq 1 ]; then log_failure_msg "Block [acix-index] not defined in configuration" exit 1 fi LOGD=`dirname $DEFAULT_LOGFILE` LOGN=`basename $DEFAULT_LOGFILE` if [ ! -d $LOGD ]; then mkdir -p $LOGD fi APPSTART=" from acix import indexserver; from twisted.python import log; from twisted.python.logfile import LogFile; application = indexserver.createApplication(); log.startLogging(LogFile('$LOGN', '$LOGD', rotateLength=1000000, maxRotatedFiles=25)) " TACFILE=`mktemp` || exit 1 echo $APPSTART > $TACFILE exec $prog --pidfile $PIDFILE -y $TACFILE -l $DEFAULT_LOGFILE nordugrid-arc-6.14.0/src/services/acix/indexserver/PaxHeaders.30264/arc-acix-index.service.in0000644000000000000000000000013214152153376030023 xustar000000000000000030 mtime=1638455038.431646365 30 atime=1638455038.510647552 30 ctime=1638455100.950585741 nordugrid-arc-6.14.0/src/services/acix/indexserver/arc-acix-index.service.in0000644000175000002070000000034314152153376030010 0ustar00mockbuildmock00000000000000[Unit] Description=ARC cache index - index server After=network.target local_fs.target [Service] Type=forking PIDFile=/run/arc-acix-index.pid ExecStart=/usr/share/arc/arc-acix-index-start [Install] WantedBy=multi-user.target nordugrid-arc-6.14.0/src/services/acix/indexserver/PaxHeaders.30264/indexsetup.py0000644000000000000000000000013214152153376026002 xustar000000000000000030 mtime=1638455038.431646365 30 atime=1638455038.510647552 30 ctime=1638455100.850584238 nordugrid-arc-6.14.0/src/services/acix/indexserver/indexsetup.py0000644000175000002070000000444514152153376025776 0ustar00mockbuildmock00000000000000import os from twisted.application import internet, service from twisted.web import resource, server from acix.core import ssl from acix.indexserver import index, indexresource from arc.utils import config # -- constants SSL_DEFAULT = True INDEX_TCP_PORT = 6080 INDEX_SSL_PORT = 6443 DEFAULT_INDEX_REFRESH_INTERVAL = 301 # seconds between updating cache ARC_CONF = '/etc/arc.conf' DEFAULT_HOST_KEY = '/etc/grid-security/hostkey.pem' DEFAULT_HOST_CERT = '/etc/grid-security/hostcert.pem' DEFAULT_CERTIFICATES = '/etc/grid-security/certificates' def getCacheConf(): '''Return a tuple of (cachescanners, x509_host_key, x509_host_cert, x509_cert_dir)''' config.parse_arc_conf(os.environ['ARC_CONFIG'] if 'ARC_CONFIG' in os.environ else ARC_CONF) cachescanners = config.get_value('cachescanner', 'acix-index', force_list=True) x509_host_key = config.get_value('x509_host_key', ['acix-scanner', 'common']) or DEFAULT_HOST_KEY x509_host_cert = config.get_value('x509_host_cert', ['acix-scanner', 'common']) or DEFAULT_HOST_CERT x509_cert_dir = config.get_value('x509_cert_dir', ['acix-scanner', 'common']) or DEFAULT_CERTIFICATES return (cachescanners, x509_host_key, x509_host_cert, x509_cert_dir) def createIndexApplication(use_ssl=SSL_DEFAULT, port=None, refresh_interval=DEFAULT_INDEX_REFRESH_INTERVAL): # monkey-patch fix for dealing with low url-length limit from twisted.protocols import basic basic.LineReceiver.MAX_LENGTH = 65535 cachescanners, x509_host_key, x509_host_cert, x509_cert_dir = getCacheConf() if not cachescanners: return None ci = index.CacheIndex(cachescanners, refresh_interval) siteroot = resource.Resource() dataroot = resource.Resource() dataroot.putChild(b'index', indexresource.IndexResource(ci)) siteroot.putChild(b'data', dataroot) site = server.Site(siteroot) application = service.Application("arc-indexserver") ci.setServiceParent(application) if use_ssl: cf = ssl.ContextFactory(key_path=x509_host_key, cert_path=x509_host_cert, ca_dir=x509_cert_dir) internet.SSLServer(port or INDEX_SSL_PORT, site, cf).setServiceParent(application) else: internet.TCPServer(port or INDEX_TCP_PORT, site).setServiceParent(application) return application nordugrid-arc-6.14.0/src/services/acix/indexserver/PaxHeaders.30264/index.py0000644000000000000000000000013214152153376024721 xustar000000000000000030 mtime=1638455038.431646365 30 atime=1638455038.510647552 30 ctime=1638455100.848584208 nordugrid-arc-6.14.0/src/services/acix/indexserver/index.py0000644000175000002070000000463014152153376024711 0ustar00mockbuildmock00000000000000""" resource to fetch a bloom filter from """ import hashlib try: import urllib.parse as urlparse except ImportError: import urlparse from twisted.python import log from twisted.internet import defer, task from twisted.application import service from acix.core import bloomfilter, cacheclient, ssl class CacheIndex(service.Service): def __init__(self, urls, refresh_interval=300): self.urls = urls self.refresh_interval = refresh_interval # seconds self.index_task = task.LoopingCall(self.renewIndex) self.filters = {} # host -> filter mapping def startService(self): self.index_task.start(self.refresh_interval) def stopService(self): self.index_task.stop() def renewIndex(self): log.msg("Renewing index") dl = [] cf = ssl.ContextFactory(verify=True) for url in self.urls: log.msg("Fetching cache from: " + url) d = cacheclient.retrieveCache(url, cf) d.addCallback(self._updateCache, url) d.addErrback(self._failedCacheRetrieval, url) dl.append(d) return defer.DeferredList(dl) def _updateCache(self, result, url): hashes, cache_time, cache, cache_url = result if not cache_url: host = urlparse.urlparse(url).netloc if ':' in host: host = host.split(':')[0] cache_url = host if not cache: log.msg("No cache info returned from %s" % cache_url) return try: size = len(cache) * 8 self.filters[cache_url] = bloomfilter.BloomFilter(size=size, bits=cache, hashes=hashes) except Exception as e: log.err(e) log.msg("New cache added for " + cache_url) def _failedCacheRetrieval(self, failure, url): log.msg("Failed to retrieve cache index from %s. Reason: %s" % (url, failure.getErrorMessage())) def query(self, keys): results = {} for host, filter_ in self.filters.items(): for key in keys: khash = hashlib.sha1(key.encode()).hexdigest() hosts = results.setdefault(key, []) log.msg("Query: %s for %s" % (khash, host)) if khash in filter_: log.msg("Found match for %s at %s" % (key, host)) hosts.append(host) return results nordugrid-arc-6.14.0/src/services/acix/PaxHeaders.30264/__init__.py0000644000000000000000000000013014152153376023011 xustar000000000000000029 mtime=1638455038.43064635 29 atime=1638455038.43064635 30 ctime=1638455100.836584028 nordugrid-arc-6.14.0/src/services/acix/__init__.py0000644000175000002070000000000014152153376022766 0ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/acix/PaxHeaders.30264/README0000644000000000000000000000013114152153376021561 xustar000000000000000029 mtime=1638455038.43064635 30 atime=1638455038.510647552 30 ctime=1638455100.851584253 nordugrid-arc-6.14.0/src/services/acix/README0000644000175000002070000001012414152153376021545 0ustar00mockbuildmock00000000000000ARC Cache IndeX - allows publishing of cache contents from several sites to an index, which can be queried for data-aware brokering. It consists of two components: a cache scanner which runs alongside A-REX and gathers cache content information, and a cache index to which the scanner publishes the content using a Bloom filter to reduce the data volume. Several cache servers can publish to one index. Required software: * Python. Only 2.6 and 2.7 have been tested and are supported. * Twisted Core and twisted web (python-twisted-core and python-twisted-web) * pyOpenSSL (package name python-openssl in Ubuntu) ACIX Cache Scanner: ----------------- This is the component which runs on each CE collecting cache information. Usually no configuration is necessary, but it is possible to specify a custom logfile location by setting the logfile parameter in arc.conf, like this: --- [acix/cachescanner] logfile="/tmp/arc-cacheserver.log" --- Starting instructions: /etc/init.d/acix-cache start Update your rc* catalogs accordingly. You can stop the daemon with: $ /etc/init.d/acix-cache stop You can inspect the log file to check that everything is running. It is located at /var/log/arc/acix-cachescanner.log. An initial warning about the creation of zombie process is typically generated (no zombie processes from the program has been observed). If any zombie processes are observed, please file a bug report. Send the URL at which your cache filter is located at, to the index admins(s). Unless you changed anything in the configuration, this will be: https://HOST_FQDN:5443/data/cache This is important as the index server pulls the cache filter from your site (the filter doesn't get registered automatically). If you have both arex_mount_point and at least one cacheaccess rule defined in arc.conf then the URL for remote cache access will be sent to the index, otherwise just the hostname is used. ACIX Index Server: ----------------- This is the index of registered caches which is queried by users to discover locations of cached files. To configure, edit /etc/arc.conf to include cache server URLs corresponding to the sites to be indexed. --- [acix/indexserver] cachescanner="https://myhost:5443/data/cache" cachescanner="https://anotherhost:5443/data/cache" --- Starting instructions. $ /etc/init.d/acix-index start Update your rc* catalogs accordingly. You can stop the daemon with: $ /etc/init.d/acix-index stop A log file is at /var/log/arc/acix-index.log. By default the index server will listen on port 6443 (ssl+http) so you need to open this port (or the configured port) in the firewall. It is possible to configure port, use of ssl, and the index refresh interval. See the indexsetup.py file (a bit of Python understanding is required). Clients: ------- To query an index server, construct a URL, like this: https://cacheindex.ndgf.org:6443/data/index?url=http://www.nordugrid.org:80/data/echo.sh Here you ask the index services located at https://cacheindex.ndgf.org:6443/data/index for the location(s) of the file http://www.nordugrid.org:80/data/echo.sh It is possible to query for multiple files by comma-seperating the files, e.g.: index?url=http://www.nordugrid.org:80/data/echo.sh,http://www.nordugrid.org:80/data/echo.sh Remember to quote/urlencode the strings when performing the get (wget and curl will do this automatically, but most http libraries won't) The result is a JSON encoded datastructure with the top level structure being a dictionary/hash-table with the mapping: url -> [machines], where [machines] is a list of the machines on which the files are cached. You should always use a JSON parser to decode the result (the string might be escaped). If a machine has enabled remote cache access then a URL at which cache files may be accessed is shown, otherwise just the hostname is used. To access a cached file remotely, simply append the URL of the original file to the cache access endpoint and call HTTP GET (or use wget, curl, arccp,...), eg https://arex.host/arex/cache/http://www.nordugrid.org:80/data/echo.sh Some encoding of the original URL may be necessary depending on the tool you use. nordugrid-arc-6.14.0/src/services/acix/PaxHeaders.30264/core0000644000000000000000000000013214152153474021554 xustar000000000000000030 mtime=1638455100.872584569 30 atime=1638455103.996631509 30 ctime=1638455100.872584569 nordugrid-arc-6.14.0/src/services/acix/core/0000755000175000002070000000000014152153474021616 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/acix/core/PaxHeaders.30264/Makefile.am0000644000000000000000000000013114152153376023665 xustar000000000000000029 mtime=1638455038.43064635 30 atime=1638455038.510647552 30 ctime=1638455100.869584524 nordugrid-arc-6.14.0/src/services/acix/core/Makefile.am0000644000175000002070000000005214152153376023650 0ustar00mockbuildmock00000000000000SUBDIRS = $(TEST_DIR) dist_SUBDIRS = test nordugrid-arc-6.14.0/src/services/acix/core/PaxHeaders.30264/bloomfilter.py0000644000000000000000000000013114152153376024521 xustar000000000000000029 mtime=1638455038.43064635 30 atime=1638455038.510647552 30 ctime=1638455100.839584073 nordugrid-arc-6.14.0/src/services/acix/core/bloomfilter.py0000644000175000002070000000651314152153376024514 0ustar00mockbuildmock00000000000000""" Bloom Filter for Acix. Heavily inspired from: http://stackoverflow.com/questions/311202/modern-high-performance-bloom-filter-in-python but modified to use bitarray instead of BitVector, as serialization utterly sucks for the latter. The calculateSize is more or less copied from pybloom (which also doesn't support serialization and restore in a sensibile way. The hash library is from: http://www.partow.net/programming/hashfunctions/index.html """ from __future__ import print_function import math from acix.core import bitvector, hashes # Note: These names are used to identify hashes used to generate a bloom # filter between machines, i.e., they are used in the protocol. # Do NOT change unless you are REALLY certain you know what you are doing HASHES = { 'rs' : hashes.RSHash, 'js' : hashes.JSHash, 'pjw' : hashes.PJWHash, 'elf' : hashes.ELFHash, 'bkdr' : hashes.BKDRHash, 'sdbm' : hashes.SDBMHash, 'djb' : hashes.DJBHash, 'dek' : hashes.DEKHash, 'bp' : hashes.BPHash, 'fnv' : hashes.FNVHash, } # These hashes have been tested to be reasonably fast # By all means try to avoid the rs hash, as it is awfully slow. DEFAULT_HASHES = [ 'dek', 'elf', 'djb', 'sdbm' ] def calculateSize(capacity, error_rate=0.001): slices = math.ceil(math.log(1 / error_rate, 2)) # the error_rate constraint assumes a fill rate of 1/2 # so we double the capacity to simplify the API bits = math.ceil( (2 * capacity * abs(math.log(error_rate))) / (slices * (math.log(2) ** 2)) ) size = int(slices * bits) ROUND_TO = 32 # make sure we return a multiple of 32 (otherwise bitvector serialization will explode) if size % ROUND_TO != 0: mp = size // ROUND_TO + 1 size = mp * ROUND_TO return size class BloomFilter(object): def __init__(self, size=None, bits=None, hashes=None): self.size = size if bits is None: self.bits = bitvector.BitVector(size) else: assert size == len(bits) * 8, "Size and bit length does not match (%i,%i)" % (size, len(bits)) self.bits = bitvector.BitVector(size, bits) self.used_hashes = [] self.hashes = [] if hashes is None: hashes = DEFAULT_HASHES[:] for hash in hashes: self.used_hashes.append(hash) self.hashes.append(HASHES[hash]) def __contains__(self, key): for i in self._indexes(key): if not self.bits[i]: return False return True def add(self, key): for i in self._indexes(key): self.bits[i] = 1 def _indexes(self, key): ords = [ ord(c) for c in key ] return [ hash(ords) % self.size for hash in self.hashes ] def get_hashes(self): return self.used_hashes[:] def serialize(self): try: return self.bits.tobytes() except AttributeError: return self.bits.tostring() if __name__ == '__main__': import time from acix.scanner import pscan try: scanner = pscan.CacheScanner() except IOError: scanner = pscan.CacheScanner('test/cache') bf = BloomFilter(1000672) t0 = time.time() scanner.scan(bf.add) td = time.time() - t0 print("Time taken for bloom filter build: %s" % td) nordugrid-arc-6.14.0/src/services/acix/core/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153434023672 xustar000000000000000030 mtime=1638455068.679100846 30 atime=1638455090.679431412 30 ctime=1638455100.868584509 nordugrid-arc-6.14.0/src/services/acix/core/Makefile.in0000644000175000002070000006112714152153434023666 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/acix/core DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DIST_SUBDIRS = $(SUBDIRS) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ SUBDIRS = $(TEST_DIR) dist_SUBDIRS = test all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/acix/core/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/acix/core/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile installdirs: installdirs-recursive installdirs-am: install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool cscopelist-am ctags \ ctags-am distclean distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ installdirs-am maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags tags-am uninstall uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/acix/core/PaxHeaders.30264/ssl.py0000644000000000000000000000013114152153376023004 xustar000000000000000029 mtime=1638455038.43064635 30 atime=1638455038.510647552 30 ctime=1638455100.842584118 nordugrid-arc-6.14.0/src/services/acix/core/ssl.py0000644000175000002070000000331714152153376022776 0ustar00mockbuildmock00000000000000from __future__ import print_function import os from OpenSSL import SSL DEFAULT_HOST_KEY = '/etc/grid-security/hostkey.pem' DEFAULT_HOST_CERT = '/etc/grid-security/hostcert.pem' DEFAULT_CERTIFICATES = '/etc/grid-security/certificates' class ContextFactory(object): def __init__(self, key_path=DEFAULT_HOST_KEY, cert_path=DEFAULT_HOST_CERT, verify=False, ca_dir=None): self.key_path = key_path self.cert_path = cert_path self.verify = verify self.ca_dir = ca_dir if self.verify and ca_dir is None: self.ca_dir = DEFAULT_CERTIFICATES self.ctx = None def getContext(self): if self.ctx is not None: return self.ctx ctx = SSL.Context(SSL.SSLv23_METHOD) # this also allows tls 1.0 ctx.set_options(SSL.OP_NO_SSLv2) # ssl2 is unsafe ctx.set_options(SSL.OP_NO_SSLv3) # ssl3 is also unsafe ctx.use_privatekey_file(self.key_path) ctx.use_certificate_file(self.cert_path) ctx.check_privatekey() # sanity check def verify_callback(conn, x509, error_number, error_depth, allowed): # just return what openssl thinks is right return allowed if self.verify: ctx.set_verify(SSL.VERIFY_PEER, verify_callback) calist = [ ca for ca in os.listdir(self.ca_dir) if ca.endswith('.0') ] for ca in calist: # openssl wants absolute paths ca = os.path.join(self.ca_dir, ca) ctx.load_verify_locations(ca) if self.ctx is None: self.ctx = ctx return ctx if __name__ == '__main__': cf = ContextFactory() ctx = cf.getContext() print(ctx) nordugrid-arc-6.14.0/src/services/acix/core/PaxHeaders.30264/test0000644000000000000000000000013214152153474022533 xustar000000000000000030 mtime=1638455100.889584824 30 atime=1638455103.996631509 30 ctime=1638455100.889584824 nordugrid-arc-6.14.0/src/services/acix/core/test/0000755000175000002070000000000014152153474022575 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/acix/core/test/PaxHeaders.30264/Makefile.am0000644000000000000000000000013114152153376024644 xustar000000000000000029 mtime=1638455038.43064635 30 atime=1638455038.510647552 30 ctime=1638455100.888584809 nordugrid-arc-6.14.0/src/services/acix/core/test/Makefile.am0000644000175000002070000000033514152153376024633 0ustar00mockbuildmock00000000000000TESTSCRIPTS = test_bloomfilter.py if ACIX_TESTS_ENABLED TESTS_ENVIRONMENT = PYTHONPATH=$(top_srcdir)/src/services $(TRIAL) TESTS = $(TESTSCRIPTS) else TESTS = endif check_SCRIPTS = $(TESTS) EXTRA_DIST = $(TESTSCRIPTS) nordugrid-arc-6.14.0/src/services/acix/core/test/PaxHeaders.30264/test_bloomfilter.py0000644000000000000000000000013114152153376026537 xustar000000000000000029 mtime=1638455038.43064635 30 atime=1638455038.510647552 30 ctime=1638455100.889584824 nordugrid-arc-6.14.0/src/services/acix/core/test/test_bloomfilter.py0000644000175000002070000000242514152153376026530 0ustar00mockbuildmock00000000000000from twisted.trial import unittest from acix.core import bloomfilter KEYS = ['one', 'two', 'three', 'four'] FALSE_KEYS = ['five', 'six', 'seven' ] SIZE = 160 class BloomFilterTestCase(unittest.TestCase): def setUp(self): self.bf = bloomfilter.BloomFilter(SIZE) def testContains(self): for key in KEYS: self.bf.add(key) for key in KEYS: self.failUnlessIn(key, self.bf) for key in FALSE_KEYS: self.failIfIn(key, self.bf) def testSerialization(self): for key in KEYS: self.bf.add(key) s = self.bf.serialize() bf2 = bloomfilter.BloomFilter(SIZE, s) for key in KEYS: self.failUnlessIn(key, bf2) for key in FALSE_KEYS: self.failIfIn(key, bf2) def testReconstruction(self): # create filter with some non-standard hashes... bf1 = bloomfilter.BloomFilter(SIZE, hashes=['js', 'dek', 'sdbm']) for key in KEYS: bf1.add(key) # just to be sure for key in KEYS: self.failUnlessIn(key, bf1) for key in FALSE_KEYS: self.failIfIn(key, bf1) # reconstruct bf2 = bloomfilter.BloomFilter(SIZE, bits=bf1.serialize(), hashes=bf1.get_hashes()) for key in KEYS: self.failUnlessIn(key, bf2) for key in FALSE_KEYS: self.failIfIn(key, bf2) nordugrid-arc-6.14.0/src/services/acix/core/test/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153434024651 xustar000000000000000030 mtime=1638455068.727101568 30 atime=1638455090.692431607 30 ctime=1638455100.888584809 nordugrid-arc-6.14.0/src/services/acix/core/test/Makefile.in0000644000175000002070000005375614152153434024656 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ @ACIX_TESTS_ENABLED_TRUE@TESTS = $(TESTSCRIPTS) subdir = src/services/acix/core/test DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) am__tty_colors_dummy = \ mgn= red= grn= lgn= blu= brg= std=; \ am__color_tests=no am__tty_colors = { \ $(am__tty_colors_dummy); \ if test "X$(AM_COLOR_TESTS)" = Xno; then \ am__color_tests=no; \ elif test "X$(AM_COLOR_TESTS)" = Xalways; then \ am__color_tests=yes; \ elif test "X$$TERM" != Xdumb && { test -t 1; } 2>/dev/null; then \ am__color_tests=yes; \ fi; \ if test $$am__color_tests = yes; then \ red=''; \ grn=''; \ lgn=''; \ blu=''; \ mgn=''; \ brg=''; \ std=''; \ fi; \ } DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ TESTSCRIPTS = test_bloomfilter.py @ACIX_TESTS_ENABLED_TRUE@TESTS_ENVIRONMENT = PYTHONPATH=$(top_srcdir)/src/services $(TRIAL) check_SCRIPTS = $(TESTS) EXTRA_DIST = $(TESTSCRIPTS) all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/acix/core/test/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/acix/core/test/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs tags TAGS: ctags CTAGS: cscope cscopelist: check-TESTS: $(TESTS) @failed=0; all=0; xfail=0; xpass=0; skip=0; \ srcdir=$(srcdir); export srcdir; \ list=' $(TESTS) '; \ $(am__tty_colors); \ if test -n "$$list"; then \ for tst in $$list; do \ if test -f ./$$tst; then dir=./; \ elif test -f $$tst; then dir=; \ else dir="$(srcdir)/"; fi; \ if $(TESTS_ENVIRONMENT) $${dir}$$tst $(AM_TESTS_FD_REDIRECT); then \ all=`expr $$all + 1`; \ case " $(XFAIL_TESTS) " in \ *[\ \ ]$$tst[\ \ ]*) \ xpass=`expr $$xpass + 1`; \ failed=`expr $$failed + 1`; \ col=$$red; res=XPASS; \ ;; \ *) \ col=$$grn; res=PASS; \ ;; \ esac; \ elif test $$? -ne 77; then \ all=`expr $$all + 1`; \ case " $(XFAIL_TESTS) " in \ *[\ \ ]$$tst[\ \ ]*) \ xfail=`expr $$xfail + 1`; \ col=$$lgn; res=XFAIL; \ ;; \ *) \ failed=`expr $$failed + 1`; \ col=$$red; res=FAIL; \ ;; \ esac; \ else \ skip=`expr $$skip + 1`; \ col=$$blu; res=SKIP; \ fi; \ echo "$${col}$$res$${std}: $$tst"; \ done; \ if test "$$all" -eq 1; then \ tests="test"; \ All=""; \ else \ tests="tests"; \ All="All "; \ fi; \ if test "$$failed" -eq 0; then \ if test "$$xfail" -eq 0; then \ banner="$$All$$all $$tests passed"; \ else \ if test "$$xfail" -eq 1; then failures=failure; else failures=failures; fi; \ banner="$$All$$all $$tests behaved as expected ($$xfail expected $$failures)"; \ fi; \ else \ if test "$$xpass" -eq 0; then \ banner="$$failed of $$all $$tests failed"; \ else \ if test "$$xpass" -eq 1; then passes=pass; else passes=passes; fi; \ banner="$$failed of $$all $$tests did not behave as expected ($$xpass unexpected $$passes)"; \ fi; \ fi; \ dashes="$$banner"; \ skipped=""; \ if test "$$skip" -ne 0; then \ if test "$$skip" -eq 1; then \ skipped="($$skip test was not run)"; \ else \ skipped="($$skip tests were not run)"; \ fi; \ test `echo "$$skipped" | wc -c` -le `echo "$$banner" | wc -c` || \ dashes="$$skipped"; \ fi; \ report=""; \ if test "$$failed" -ne 0 && test -n "$(PACKAGE_BUGREPORT)"; then \ report="Please report to $(PACKAGE_BUGREPORT)"; \ test `echo "$$report" | wc -c` -le `echo "$$banner" | wc -c` || \ dashes="$$report"; \ fi; \ dashes=`echo "$$dashes" | sed s/./=/g`; \ if test "$$failed" -eq 0; then \ col="$$grn"; \ else \ col="$$red"; \ fi; \ echo "$${col}$$dashes$${std}"; \ echo "$${col}$$banner$${std}"; \ test -z "$$skipped" || echo "$${col}$$skipped$${std}"; \ test -z "$$report" || echo "$${col}$$report$${std}"; \ echo "$${col}$$dashes$${std}"; \ test "$$failed" -eq 0; \ else :; fi distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am $(MAKE) $(AM_MAKEFLAGS) $(check_SCRIPTS) $(MAKE) $(AM_MAKEFLAGS) check-TESTS check: check-am all-am: Makefile installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: check-am install-am install-strip .PHONY: all all-am check check-TESTS check-am clean clean-generic \ clean-libtool cscopelist-am ctags-am distclean \ distclean-generic distclean-libtool distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags-am uninstall \ uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/acix/core/PaxHeaders.30264/__init__.py0000644000000000000000000000013014152153376023741 xustar000000000000000029 mtime=1638455038.43064635 29 atime=1638455038.43064635 30 ctime=1638455100.837584043 nordugrid-arc-6.14.0/src/services/acix/core/__init__.py0000644000175000002070000000000014152153376023716 0ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/acix/core/PaxHeaders.30264/cacheclient.py0000644000000000000000000000013114152153376024445 xustar000000000000000029 mtime=1638455038.43064635 30 atime=1638455038.510647552 30 ctime=1638455100.840584088 nordugrid-arc-6.14.0/src/services/acix/core/cacheclient.py0000644000175000002070000000361614152153376024441 0ustar00mockbuildmock00000000000000""" Client for retrieving cache. """ try: from urllib.parse import urlparse except ImportError: from urlparse import urlparse from twisted.python import log from twisted.internet import reactor from twisted.web import client HEADER_HASHES = b'x-hashes' HEADER_CACHE_TIME = b'x-cache-time' HEADER_CACHE_URL = b'x-cache-url' class InvalidCacheReplyError(Exception): pass def retrieveCache(url, contextFactory=None): # mostly copied from twisted.web.client """ Returns a deferred, which will fire with a tuple consisting of a the hashes, generation-time, and the cache. """ u = urlparse(url) factory = client.HTTPClientFactory(url.encode()) factory.noisy = False if u.scheme == 'https': from twisted.internet import ssl if contextFactory is None: contextFactory = ssl.ClientContextFactory() reactor.connectSSL(u.hostname, u.port, factory, contextFactory) else: reactor.connectTCP(u.hostname, u.port, factory) factory.deferred.addCallback(_gotCache, factory, url) return factory.deferred def _gotCache(result, factory, url): log.msg("Got reply from cache service %s" % url) try: hashes = factory.response_headers[HEADER_HASHES] cache_time = factory.response_headers[HEADER_CACHE_TIME] except KeyError as e: raise InvalidCacheReplyError(str(e)) try: cache_url = factory.response_headers[HEADER_CACHE_URL][0].decode() except KeyError as e: # Site may not expose cache to outside cache_url = '' #log.msg("Raw cache headers. Hashes: %s. Cache time: %s." % (hashes, cache_time)) assert len(hashes) == 1, "Got more than one hash header" assert len(cache_time) == 1, "Got more than one cache time header" hashes = hashes[0].decode().split(',') cache_time = float(cache_time[0].decode()) return hashes, cache_time, result, cache_url nordugrid-arc-6.14.0/src/services/acix/core/PaxHeaders.30264/bitvector.py0000644000000000000000000000013114152153376024204 xustar000000000000000029 mtime=1638455038.43064635 30 atime=1638455038.510647552 30 ctime=1638455100.838584058 nordugrid-arc-6.14.0/src/services/acix/core/bitvector.py0000644000175000002070000000243514152153376024176 0ustar00mockbuildmock00000000000000""" Custom bitvector implementation, as most other suck. Both BitVector and bitarray has problems with serialization, which is rather critical for us. There might be endian issues. Author: Henrik Thostrup Jensen """ import array ARRAY_TYPE = 'B' TYPE_SIZE = 8 class BitVector(object): def __init__(self, n_bits, bits=None): assert n_bits % TYPE_SIZE == 0, "Size must be a multiple of %i" % TYPE_SIZE if bits is None: self.bits = array.array(ARRAY_TYPE, [0] * (n_bits // TYPE_SIZE)) else: assert n_bits == len(bits) * TYPE_SIZE, "Size and given bits does not match" self.bits = array.array(ARRAY_TYPE) try: self.bits.frombytes(bits) except AttributeError: self.bits.fromstring(bits) def __setitem__(self, index, value): assert value == 1, "Only possible to set bits" l = self.bits[index // TYPE_SIZE] self.bits[index // TYPE_SIZE] = l | 1 << (index % TYPE_SIZE) def __getitem__(self, index): l = self.bits[index // TYPE_SIZE] return (l >> (index % TYPE_SIZE)) & 1 def tostring(self): try: return self.bits.tobytes() except AttributeError: return self.bits.tostring() nordugrid-arc-6.14.0/src/services/acix/core/PaxHeaders.30264/indexclient.py0000644000000000000000000000013114152153376024511 xustar000000000000000029 mtime=1638455038.43064635 30 atime=1638455038.510647552 30 ctime=1638455100.841584103 nordugrid-arc-6.14.0/src/services/acix/core/indexclient.py0000644000175000002070000000201714152153376024477 0ustar00mockbuildmock00000000000000""" Client for retrieving cache. Note that json is only available on python >= 2.6. """ import json try: from urllib.parse import quote except ImportError: from urllib import quote from twisted.python import log from twisted.web import client class InvalidIndexReplyError(Exception): pass def queryIndex(index_url, urls): for url in urls: assert ',' not in urls, "Commas ',' not allowed in urls currently" eurls = [ quote(url) for url in urls ] url = index_url + "?url=" + ','.join(eurls) d = client.getPage(url.encode()) d.addCallback(_gotResult, index_url) d.addErrback(_indexError, index_url) return d def _gotResult(result, index_url): log.msg("Got reply from index service %s" % index_url) try: decoded_result = json.loads(result) return decoded_result except ValueError as e: raise InvalidIndexReplyError(str(e)) def _indexError(failure, index_url): log.msg("Error while getting index results:") log.err(failure) return failure nordugrid-arc-6.14.0/src/services/acix/core/PaxHeaders.30264/hashes.py0000644000000000000000000000013114152153376023456 xustar000000000000000029 mtime=1638455038.43064635 30 atime=1638455038.510647552 30 ctime=1638455100.840584088 nordugrid-arc-6.14.0/src/services/acix/core/hashes.py0000644000175000002070000000655014152153376023452 0ustar00mockbuildmock00000000000000# #************************************************************************** #* * #* General Purpose Hash Function Algorithms Library * #* * #* Author: Arash Partow - 2002 * #* URL: http://www.partow.net * #* URL: http://www.partow.net/programming/hashfunctions/index.html * #* * #* Modified by Henrik Thostrup Jensen to operate on int * #* arrays instead of strings (large optimization when performing several * #* hashes of the same string. (2009) * #* * #* Copyright notice: * #* Free use of the General Purpose Hash Function Algorithms Library is * #* permitted under the guidelines and in accordance with the most current * #* version of the Common Public License. * #* http://www.opensource.org/licenses/cpl.php * #* * #************************************************************************** # import sys if sys.version_info[0] >= 3: long = int def RSHash(key): a = 378551 b = 63689 hash = 0 for k in key: hash = hash * a + k a = a * b return hash def JSHash(key): hash = 1315423911 for k in key: hash ^= ((hash << 5) + k + (hash >> 2)) return hash def PJWHash(key): BitsInUnsignedInt = 4 * 8 ThreeQuarters = long((BitsInUnsignedInt * 3) // 4) OneEighth = long(BitsInUnsignedInt // 8) HighBits = (0xFFFFFFFF) << (BitsInUnsignedInt - OneEighth) hash = 0 test = 0 for k in key: hash = (hash << OneEighth) + k test = hash & HighBits if test != 0: hash = (( hash ^ (test >> ThreeQuarters)) & (~HighBits)); return (hash & 0x7FFFFFFF) def ELFHash(key): hash = 0 x = 0 for k in key: hash = (hash << 4) + k x = hash & 0xF0000000 if x != 0: hash ^= (x >> 24) hash &= ~x return hash def BKDRHash(key): seed = 131 # 31 131 1313 13131 131313 etc.. hash = 0 for k in key: hash = (hash * seed) + k return hash def SDBMHash(key): hash = 0 for k in key: hash = k + (hash << 6) + (hash << 16) - hash; return hash def DJBHash(key): hash = 5381 for k in key: hash = ((hash << 5) + hash) + k return hash def DEKHash(key): hash = len(key); for k in key: hash = ((hash << 5) ^ (hash >> 27)) ^ k return hash def BPHash(key): hash = 0 for k in key: hash = hash << 7 ^ k return hash def FNVHash(key): fnv_prime = 0x811C9DC5 hash = 0 for k in key: hash *= fnv_prime hash ^= k return hash ## requres index, so we don't use it #def APHash(key): # hash = 0xAAAAAAAA # for k in key: # if ((i & 1) == 0): # hash ^= ((hash << 7) ^ k * (hash >> 3)) # else: # hash ^= (~((hash << 11) + k ^ (hash >> 5))) # return hash nordugrid-arc-6.14.0/src/services/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376022012 xustar000000000000000030 mtime=1638455038.405645974 30 atime=1638455038.500647402 30 ctime=1638455099.184559205 nordugrid-arc-6.14.0/src/services/Makefile.am0000644000175000002070000000143514152153376022002 0ustar00mockbuildmock00000000000000if A_REX_SERVICE_ENABLED AREX_SERVICE = a-rex else AREX_SERVICE = endif if GRIDFTPD_SERVICE_ENABLED GRIDFTPD_SERVICE = gridftpd else GRIDFTPD_SERVICE = endif if LDAP_SERVICE_ENABLED LDAP_SERVICE = ldap-infosys else LDAP_SERVICE = endif if MONITOR_ENABLED MONITOR = monitor else MONITOR = endif if CANDYPOND_ENABLED CANDYPOND_SERVICE = candypond else CANDYPOND_SERVICE = endif if DATADELIVERY_SERVICE_ENABLED DATADELIVERY_SERVICE = data-staging else DATADELIVERY_SERVICE = endif if ACIX_ENABLED ACIX = acix else ACIX = endif SUBDIRS = $(GRIDFTPD_SERVICE) $(AREX_SERVICE) $(LDAP_SERVICE) \ $(MONITOR) \ $(CANDYPOND_SERVICE) \ $(DATADELIVERY_SERVICE) \ $(ACIX) \ wrappers examples DIST_SUBDIRS = gridftpd a-rex ldap-infosys \ monitor \ candypond \ data-staging wrappers examples acix nordugrid-arc-6.14.0/src/services/PaxHeaders.30264/Makefile.in0000644000000000000000000000013114152153432022013 xustar000000000000000030 mtime=1638455066.237064154 30 atime=1638455089.526414087 29 ctime=1638455099.18355919 nordugrid-arc-6.14.0/src/services/Makefile.in0000644000175000002070000006263414152153432022014 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ @A_REX_SERVICE_ENABLED_FALSE@AREX_SERVICE = @A_REX_SERVICE_ENABLED_TRUE@AREX_SERVICE = a-rex @GRIDFTPD_SERVICE_ENABLED_FALSE@GRIDFTPD_SERVICE = @GRIDFTPD_SERVICE_ENABLED_TRUE@GRIDFTPD_SERVICE = gridftpd @LDAP_SERVICE_ENABLED_FALSE@LDAP_SERVICE = @LDAP_SERVICE_ENABLED_TRUE@LDAP_SERVICE = ldap-infosys @MONITOR_ENABLED_FALSE@MONITOR = @MONITOR_ENABLED_TRUE@MONITOR = monitor @CANDYPOND_ENABLED_FALSE@CANDYPOND_SERVICE = @CANDYPOND_ENABLED_TRUE@CANDYPOND_SERVICE = candypond @DATADELIVERY_SERVICE_ENABLED_FALSE@DATADELIVERY_SERVICE = @DATADELIVERY_SERVICE_ENABLED_TRUE@DATADELIVERY_SERVICE = data-staging @ACIX_ENABLED_FALSE@ACIX = @ACIX_ENABLED_TRUE@ACIX = acix SUBDIRS = $(GRIDFTPD_SERVICE) $(AREX_SERVICE) $(LDAP_SERVICE) \ $(MONITOR) \ $(CANDYPOND_SERVICE) \ $(DATADELIVERY_SERVICE) \ $(ACIX) \ wrappers examples DIST_SUBDIRS = gridftpd a-rex ldap-infosys \ monitor \ candypond \ data-staging wrappers examples acix all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile installdirs: installdirs-recursive installdirs-am: install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool cscopelist-am ctags \ ctags-am distclean distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ installdirs-am maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags tags-am uninstall uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/PaxHeaders.30264/data-staging0000644000000000000000000000013214152153474022243 xustar000000000000000030 mtime=1638455100.708582105 30 atime=1638455103.996631509 30 ctime=1638455100.708582105 nordugrid-arc-6.14.0/src/services/data-staging/0000755000175000002070000000000014152153474022305 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/data-staging/PaxHeaders.30264/Makefile.am0000644000000000000000000000013114152153376024354 xustar000000000000000029 mtime=1638455038.43264638 30 atime=1638455038.510647552 30 ctime=1638455100.702582014 nordugrid-arc-6.14.0/src/services/data-staging/Makefile.am0000644000175000002070000000214314152153376024342 0ustar00mockbuildmock00000000000000pkglib_LTLIBRARIES = libdatadeliveryservice.la if SYSV_SCRIPTS_ENABLED DATA_DELIVERY_SCRIPT = arc-datadelivery-service else DATA_DELIVERY_SCRIPT = endif initd_SCRIPTS = $(DATA_DELIVERY_SCRIPT) if SYSTEMD_UNITS_ENABLED DATA_DELIVERY_UNIT = arc-datadelivery-service.service else DATA_DELIVERY_UNIT = endif units_DATA = $(DATA_DELIVERY_UNIT) pkgdata_SCRIPTS = arc-datadelivery-service-start libdatadeliveryservice_la_SOURCES = DataDeliveryService.h DataDeliveryService.cpp libdatadeliveryservice_la_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(AM_CXXFLAGS) libdatadeliveryservice_la_LIBADD = \ $(top_builddir)/src/libs/data-staging/libarcdatastaging.la \ $(top_builddir)/src/hed/libs/infosys/libarcinfosys.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/delegation/libarcdelegation.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(GLIBMM_LIBS) libdatadeliveryservice_la_LDFLAGS = -no-undefined -avoid-version -module nordugrid-arc-6.14.0/src/services/data-staging/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153435024362 xustar000000000000000030 mtime=1638455069.072106751 30 atime=1638455090.776432869 30 ctime=1638455100.702582014 nordugrid-arc-6.14.0/src/services/data-staging/Makefile.in0000644000175000002070000010615014152153435024352 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/data-staging DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(srcdir)/arc-datadelivery-service.in \ $(srcdir)/arc-datadelivery-service.service.in \ $(srcdir)/arc-datadelivery-service-start.in \ $(top_srcdir)/depcomp README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = arc-datadelivery-service \ arc-datadelivery-service.service \ arc-datadelivery-service-start CONFIG_CLEAN_VPATH_FILES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(pkglibdir)" "$(DESTDIR)$(initddir)" \ "$(DESTDIR)$(pkgdatadir)" "$(DESTDIR)$(unitsdir)" LTLIBRARIES = $(pkglib_LTLIBRARIES) am__DEPENDENCIES_1 = libdatadeliveryservice_la_DEPENDENCIES = \ $(top_builddir)/src/libs/data-staging/libarcdatastaging.la \ $(top_builddir)/src/hed/libs/infosys/libarcinfosys.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/delegation/libarcdelegation.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(am__DEPENDENCIES_1) am_libdatadeliveryservice_la_OBJECTS = \ libdatadeliveryservice_la-DataDeliveryService.lo libdatadeliveryservice_la_OBJECTS = \ $(am_libdatadeliveryservice_la_OBJECTS) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = libdatadeliveryservice_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(libdatadeliveryservice_la_CXXFLAGS) $(CXXFLAGS) \ $(libdatadeliveryservice_la_LDFLAGS) $(LDFLAGS) -o $@ SCRIPTS = $(initd_SCRIPTS) $(pkgdata_SCRIPTS) AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(libdatadeliveryservice_la_SOURCES) DIST_SOURCES = $(libdatadeliveryservice_la_SOURCES) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac DATA = $(units_DATA) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ pkglib_LTLIBRARIES = libdatadeliveryservice.la @SYSV_SCRIPTS_ENABLED_FALSE@DATA_DELIVERY_SCRIPT = @SYSV_SCRIPTS_ENABLED_TRUE@DATA_DELIVERY_SCRIPT = arc-datadelivery-service initd_SCRIPTS = $(DATA_DELIVERY_SCRIPT) @SYSTEMD_UNITS_ENABLED_FALSE@DATA_DELIVERY_UNIT = @SYSTEMD_UNITS_ENABLED_TRUE@DATA_DELIVERY_UNIT = arc-datadelivery-service.service units_DATA = $(DATA_DELIVERY_UNIT) pkgdata_SCRIPTS = arc-datadelivery-service-start libdatadeliveryservice_la_SOURCES = DataDeliveryService.h DataDeliveryService.cpp libdatadeliveryservice_la_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(AM_CXXFLAGS) libdatadeliveryservice_la_LIBADD = \ $(top_builddir)/src/libs/data-staging/libarcdatastaging.la \ $(top_builddir)/src/hed/libs/infosys/libarcinfosys.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/delegation/libarcdelegation.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(GLIBMM_LIBS) libdatadeliveryservice_la_LDFLAGS = -no-undefined -avoid-version -module all: all-am .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/data-staging/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/data-staging/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): arc-datadelivery-service: $(top_builddir)/config.status $(srcdir)/arc-datadelivery-service.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arc-datadelivery-service.service: $(top_builddir)/config.status $(srcdir)/arc-datadelivery-service.service.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arc-datadelivery-service-start: $(top_builddir)/config.status $(srcdir)/arc-datadelivery-service-start.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ install-pkglibLTLIBRARIES: $(pkglib_LTLIBRARIES) @$(NORMAL_INSTALL) @list='$(pkglib_LTLIBRARIES)'; test -n "$(pkglibdir)" || list=; \ list2=; for p in $$list; do \ if test -f $$p; then \ list2="$$list2 $$p"; \ else :; fi; \ done; \ test -z "$$list2" || { \ echo " $(MKDIR_P) '$(DESTDIR)$(pkglibdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pkglibdir)" || exit 1; \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(pkglibdir)'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(pkglibdir)"; \ } uninstall-pkglibLTLIBRARIES: @$(NORMAL_UNINSTALL) @list='$(pkglib_LTLIBRARIES)'; test -n "$(pkglibdir)" || list=; \ for p in $$list; do \ $(am__strip_dir) \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(pkglibdir)/$$f'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(pkglibdir)/$$f"; \ done clean-pkglibLTLIBRARIES: -test -z "$(pkglib_LTLIBRARIES)" || rm -f $(pkglib_LTLIBRARIES) @list='$(pkglib_LTLIBRARIES)'; \ locs=`for p in $$list; do echo $$p; done | \ sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ sort -u`; \ test -z "$$locs" || { \ echo rm -f $${locs}; \ rm -f $${locs}; \ } libdatadeliveryservice.la: $(libdatadeliveryservice_la_OBJECTS) $(libdatadeliveryservice_la_DEPENDENCIES) $(EXTRA_libdatadeliveryservice_la_DEPENDENCIES) $(AM_V_CXXLD)$(libdatadeliveryservice_la_LINK) -rpath $(pkglibdir) $(libdatadeliveryservice_la_OBJECTS) $(libdatadeliveryservice_la_LIBADD) $(LIBS) install-initdSCRIPTS: $(initd_SCRIPTS) @$(NORMAL_INSTALL) @list='$(initd_SCRIPTS)'; test -n "$(initddir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(initddir)'"; \ $(MKDIR_P) "$(DESTDIR)$(initddir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n' \ -e 'h;s|.*|.|' \ -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) { files[d] = files[d] " " $$1; \ if (++n[d] == $(am__install_max)) { \ print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ else { print "f", d "/" $$4, $$1 } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(initddir)$$dir'"; \ $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(initddir)$$dir" || exit $$?; \ } \ ; done uninstall-initdSCRIPTS: @$(NORMAL_UNINSTALL) @list='$(initd_SCRIPTS)'; test -n "$(initddir)" || exit 0; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 's,.*/,,;$(transform)'`; \ dir='$(DESTDIR)$(initddir)'; $(am__uninstall_files_from_dir) install-pkgdataSCRIPTS: $(pkgdata_SCRIPTS) @$(NORMAL_INSTALL) @list='$(pkgdata_SCRIPTS)'; test -n "$(pkgdatadir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(pkgdatadir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pkgdatadir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n' \ -e 'h;s|.*|.|' \ -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) { files[d] = files[d] " " $$1; \ if (++n[d] == $(am__install_max)) { \ print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ else { print "f", d "/" $$4, $$1 } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(pkgdatadir)$$dir'"; \ $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(pkgdatadir)$$dir" || exit $$?; \ } \ ; done uninstall-pkgdataSCRIPTS: @$(NORMAL_UNINSTALL) @list='$(pkgdata_SCRIPTS)'; test -n "$(pkgdatadir)" || exit 0; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 's,.*/,,;$(transform)'`; \ dir='$(DESTDIR)$(pkgdatadir)'; $(am__uninstall_files_from_dir) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libdatadeliveryservice_la-DataDeliveryService.Plo@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< libdatadeliveryservice_la-DataDeliveryService.lo: DataDeliveryService.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libdatadeliveryservice_la_CXXFLAGS) $(CXXFLAGS) -MT libdatadeliveryservice_la-DataDeliveryService.lo -MD -MP -MF $(DEPDIR)/libdatadeliveryservice_la-DataDeliveryService.Tpo -c -o libdatadeliveryservice_la-DataDeliveryService.lo `test -f 'DataDeliveryService.cpp' || echo '$(srcdir)/'`DataDeliveryService.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libdatadeliveryservice_la-DataDeliveryService.Tpo $(DEPDIR)/libdatadeliveryservice_la-DataDeliveryService.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='DataDeliveryService.cpp' object='libdatadeliveryservice_la-DataDeliveryService.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libdatadeliveryservice_la_CXXFLAGS) $(CXXFLAGS) -c -o libdatadeliveryservice_la-DataDeliveryService.lo `test -f 'DataDeliveryService.cpp' || echo '$(srcdir)/'`DataDeliveryService.cpp mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-unitsDATA: $(units_DATA) @$(NORMAL_INSTALL) @list='$(units_DATA)'; test -n "$(unitsdir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(unitsdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(unitsdir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(unitsdir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(unitsdir)" || exit $$?; \ done uninstall-unitsDATA: @$(NORMAL_UNINSTALL) @list='$(units_DATA)'; test -n "$(unitsdir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(unitsdir)'; $(am__uninstall_files_from_dir) ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(LTLIBRARIES) $(SCRIPTS) $(DATA) installdirs: for dir in "$(DESTDIR)$(pkglibdir)" "$(DESTDIR)$(initddir)" "$(DESTDIR)$(pkgdatadir)" "$(DESTDIR)$(unitsdir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool clean-pkglibLTLIBRARIES \ mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-initdSCRIPTS install-pkgdataSCRIPTS \ install-unitsDATA install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-pkglibLTLIBRARIES install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-initdSCRIPTS uninstall-pkgdataSCRIPTS \ uninstall-pkglibLTLIBRARIES uninstall-unitsDATA .MAKE: install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \ clean-libtool clean-pkglibLTLIBRARIES cscopelist-am ctags \ ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-initdSCRIPTS install-man install-pdf \ install-pdf-am install-pkgdataSCRIPTS \ install-pkglibLTLIBRARIES install-ps install-ps-am \ install-strip install-unitsDATA installcheck installcheck-am \ installdirs maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \ uninstall-am uninstall-initdSCRIPTS uninstall-pkgdataSCRIPTS \ uninstall-pkglibLTLIBRARIES uninstall-unitsDATA # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/data-staging/PaxHeaders.30264/DataDeliveryService.h0000644000000000000000000000013114152153376026367 xustar000000000000000029 mtime=1638455038.43264638 30 atime=1638455038.510647552 30 ctime=1638455100.707582089 nordugrid-arc-6.14.0/src/services/data-staging/DataDeliveryService.h0000644000175000002070000001121114152153376026351 0ustar00mockbuildmock00000000000000#ifndef DATADELIVERYSERVICE_H_ #define DATADELIVERYSERVICE_H_ #include #include #include #include #include #include #include namespace DataStaging { /// Service for the Delivery layer of data staging. /** * This service starts and controls data transfers. It assumes that the * files in any request submitted are ready for immediate transfer and * so do not need to be resolved or prepared in any way. * * It implements DTRCallback to get callbacks when a DTR has finished * transfer. * * Status codes in results returned: * - OK - successful submission/cancellation * - TRANSFERRING - transfer still ongoing * - TRANSFERRED - transfer finished successfully * - TRANSFER_ERROR - transfer failed * - SERVICE_ERROR - something went wrong in the service itself * * An internal list of active transfers is held in memory. After the first * query of a finished transfer (successful or not) the DTR is moved to an * archived list where only summary information is kept about the transfer * (DTR ID, state and short error description). The DTR object is then * deleted. This archived list is also kept in memory. In case a transfer is * never queried, a separate thread moves any transfers which completed more * than one hour ago to the archived list. */ class DataDeliveryService: public Arc::Service, DTRCallback { /// Managed pointer to stringstream used to hold log output typedef Arc::ThreadedPointer sstream_ptr; private: /// Construct a SOAP error message with optional extra reason string Arc::MCC_Status make_soap_fault(Arc::Message& outmsg, const std::string& reason = ""); /// DataDeliveryService namespace Arc::NS ns; /// Directories the service is allowed to copy files from or to std::list allowed_dirs; /// Process limit read from cache service configuration unsigned int max_processes; /// Current processes - using gint to guarantee atomic thread-safe operations gint current_processes; /// Internal list of active DTRs, mapped to the stream with the transfer log std::map active_dtrs; /// Lock for active DTRs list Arc::SimpleCondition active_dtrs_lock; /// Archived list of finished DTRs, just ID and final state and short explanation /// TODO: save to file, DB? std::map > archived_dtrs; /// Lock for archive DTRs list Arc::SimpleCondition archived_dtrs_lock; /// Object to manage Delivery processes DataDelivery delivery; /// Container for delegated credentials Arc::DelegationContainerSOAP delegation; /// Directory in which to store temporary delegated proxies std::string tmp_proxy_dir; /// Root logger destinations, to use when logging messages in methods /// called from Delivery layer where root logger is disabled std::list root_destinations; /// Logger object static Arc::Logger logger; /// Log a message to root destinations void LogToRootLogger(Arc::LogLevel level, const std::string& message); /// Static version of ArchivalThread, used when thread is created static void ArchivalThread(void* arg); /// Archival thread void ArchivalThread(void); /// Sanity check on file sources and destinations bool CheckInput(const std::string& url, const Arc::UserConfig& usercfg, Arc::XMLNode& resultelement, bool& require_credential_file); /* individual operations */ /// Start a new transfer Arc::MCC_Status Start(Arc::XMLNode in, Arc::XMLNode out); /// Query status of transfer Arc::MCC_Status Query(Arc::XMLNode in, Arc::XMLNode out); /// Cancel a transfer Arc::MCC_Status Cancel(Arc::XMLNode in, Arc::XMLNode out); /// Check service is ok and return service information Arc::MCC_Status Ping(Arc::XMLNode in, Arc::XMLNode out); public: /// Make a new DataDeliveryService. Sets up the process handler. DataDeliveryService(Arc::Config *cfg, Arc::PluginArgument* parg); /// Destroy the DataDeliveryService virtual ~DataDeliveryService(); /// Main method called by HED when service is invoked. Directs call to appropriate internal method. virtual Arc::MCC_Status process(Arc::Message &inmsg, Arc::Message &outmsg); /// Implementation of callback method from DTRCallback virtual void receiveDTR(DTR_ptr dtr); }; } // namespace DataStaging #endif /* DATADELIVERYSERVICE_H_ */ nordugrid-arc-6.14.0/src/services/data-staging/PaxHeaders.30264/arc-datadelivery-service.in0000644000000000000000000000013114152153376027526 xustar000000000000000029 mtime=1638455038.43264638 30 atime=1638455038.510647552 30 ctime=1638455100.703582029 nordugrid-arc-6.14.0/src/services/data-staging/arc-datadelivery-service.in0000644000175000002070000001076214152153376027522 0ustar00mockbuildmock00000000000000#!/bin/bash # # Init file for the DataDelivery service # # chkconfig: 2345 87 13 # description: ARC DataDelivery service # processname: arched ### BEGIN INIT INFO # Provides: arc-datadelivery-service # Required-Start: $local_fs $remote_fs # Required-Stop: $local_fs $remote_fs # Default-Start: 2 3 4 5 # Default-Stop: 0 1 6 # Short-Description: ARC DataDelivery service # Description: ARC DataDelivery service ### END INIT INFO # source function library if [ -f /etc/init.d/functions ]; then . /etc/init.d/functions log_success_msg() { echo -n "$@" success "$@" echo } log_warning_msg() { echo -n "$@" warning "$@" echo } log_failure_msg() { echo -n "$@" failure "$@" echo } elif [ -f /lib/lsb/init-functions ]; then . /lib/lsb/init-functions else echo "Error: Cannot source neither init.d nor lsb functions" exit 1 fi prog=arched # sysconfig files if [ -r /etc/sysconfig/nordugrid ]; then . /etc/sysconfig/nordugrid elif [ -r /etc/default/nordugrid ]; then . /etc/default/nordugrid fi if [ -r /etc/sysconfig/arc-datadelivery-service ]; then . /etc/sysconfig/arc-datadelivery-service elif [ -r /etc/default/arc-datadelivery-service ]; then . /etc/default/arc-datadelivery-service fi # ARC_LOCATION ARC_LOCATION=${ARC_LOCATION:-@prefix@} if [ ! -d "$ARC_LOCATION" ]; then log_failure_msg "ARC_LOCATION ($ARC_LOCATION) not found" exit 1 fi # PID and lock file PID_FILE=`${ARC_LOCATION}/@pkgdatasubdir@/arc-datadelivery-service-start --getpidfile` if [ $? -ne 0 ]; then # When --getpidfile fails it returns the error on stdout log_failure_msg "$PID_FILE" exit 1 fi if [ `id -u` = 0 ] ; then # Debian does not have /run/lock/subsys if [ -d /run/lock/subsys ]; then LOCKFILE=/run/lock/subsys/$prog-datadelivery-service else LOCKFILE=/run/lock/$prog-datadelivery-service fi else LOCKFILE=$HOME/$prog-datadelivery-service.lock fi start() { echo -n "Starting $prog: " # Check if we are already running if [ -f "$PID_FILE" ]; then read pid < "$PID_FILE" if [ "x$pid" != "x" ]; then ps -p "$pid" -o comm 2>/dev/null | grep "^$prog$" 1>/dev/null 2>/dev/null if [ $? -eq 0 ] ; then log_success_msg "already running (pid $pid)" return 0 fi fi rm -f "$PID_FILE" "$LOCKFILE" fi ${ARC_LOCATION}/@pkgdatasubdir@/arc-datadelivery-service-start RETVAL=$? if [ $RETVAL -eq 0 ]; then touch $LOCKFILE log_success_msg else log_failure_msg fi return $RETVAL } stop() { echo -n "Stopping $prog: " if [ -f "$PID_FILE" ]; then read pid < "$PID_FILE" if [ ! -z "$pid" ] ; then kill "$pid" RETVAL=$? if [ $RETVAL -eq 0 ]; then log_success_msg else log_failure_msg fi timeout=10; # enough time to kill any active processes while ( ps -p "$pid" -o comm 2>/dev/null | grep "^$prog$" 1>/dev/null 2>/dev/null ) && [ $timeout -ge 1 ] ; do sleep 1 timeout=$(($timeout - 1)) done [ $timeout -lt 1 ] && kill -9 "$pid" 1>/dev/null 2>&1 rm -f "$PID_FILE" "$LOCKFILE" else RETVAL=1 log_failure_msg "$prog shutdown - pidfile is empty" fi else RETVAL=0 log_success_msg "$prog shutdown - already stopped" fi return $RETVAL } status() { if [ -f "$PID_FILE" ]; then read pid < "$PID_FILE" if [ "$pid" != "" ]; then if ps -p "$pid" > /dev/null; then echo "$1 (pid $pid) is running..." return 0 fi echo "$1 stopped but pid file exists" return 1 fi fi if [ -f $LOCKFILE ]; then echo "$1 stopped but lockfile exists" return 2 fi echo "$1 is stopped" return 3 } restart() { stop start } case "$1" in start) start ;; stop) stop ;; status) status $prog ;; restart | force-reload) restart ;; reload) ;; condrestart | try-restart) [ -f $LOCKFILE ] && restart || : ;; *) echo "Usage: $0 {start|stop|status|restart|force-reload|reload|condrestart|try-restart}" exit 1 ;; esac exit $? nordugrid-arc-6.14.0/src/services/data-staging/PaxHeaders.30264/arc-datadelivery-service.service.in0000644000000000000000000000013114152153376031165 xustar000000000000000029 mtime=1638455038.43264638 30 atime=1638455038.510647552 30 ctime=1638455100.704582044 nordugrid-arc-6.14.0/src/services/data-staging/arc-datadelivery-service.service.in0000644000175000002070000000034614152153376031156 0ustar00mockbuildmock00000000000000[Unit] Description=A-REX datadelivery service After=local_fs.target remote_fs.target [Service] ExecStart=@prefix@/@pkgdatasubdir@/arc-datadelivery-service-start NotifyAccess=all Type=forking [Install] WantedBy=multi-user.target nordugrid-arc-6.14.0/src/services/data-staging/PaxHeaders.30264/DataDeliveryService.cpp0000644000000000000000000000013114152153376026722 xustar000000000000000029 mtime=1638455038.43264638 30 atime=1638455038.510647552 30 ctime=1638455100.708582105 nordugrid-arc-6.14.0/src/services/data-staging/DataDeliveryService.cpp0000644000175000002070000006467114152153376026726 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include "DataDeliveryService.h" namespace DataStaging { static Arc::Plugin *get_service(Arc::PluginArgument* arg) { Arc::ServicePluginArgument* srvarg = arg?dynamic_cast(arg):NULL; if(!srvarg) return NULL; DataDeliveryService* s = new DataDeliveryService((Arc::Config*)(*srvarg),arg); if (*s) return s; delete s; return NULL; } Arc::Logger DataDeliveryService::logger(Arc::Logger::rootLogger, "DataDeliveryService"); void DataDeliveryService::ArchivalThread(void* arg) { DataDeliveryService* service = (DataDeliveryService*)arg; service->ArchivalThread(); } void DataDeliveryService::ArchivalThread() { // archive every 10 mins DTRs older than 1 hour // TODO: configurable, save to disk? int frequency = 600; while (true) { sleep(frequency); Arc::Time timelimit(Arc::Time()-Arc::Period(3600)); active_dtrs_lock.lock(); for (std::map::iterator i = active_dtrs.begin(); i != active_dtrs.end();) { DTR_ptr dtr = i->first; if (dtr->get_modification_time() < timelimit && dtr->get_status() != DTRStatus::TRANSFERRING) { archived_dtrs_lock.lock(); if (dtr->error()) { logger.msg(Arc::VERBOSE, "Archiving DTR %s, state ERROR", dtr->get_id()); archived_dtrs[dtr->get_id()] = std::pair("TRANSFER_ERROR", dtr->get_error_status().GetDesc()); } else { logger.msg(Arc::VERBOSE, "Archiving DTR %s, state %s", dtr->get_id(), dtr->get_status().str()); archived_dtrs[dtr->get_id()] = std::pair("TRANSFERRED", ""); } archived_dtrs_lock.unlock(); active_dtrs.erase(i++); } else ++i; } active_dtrs_lock.unlock(); } } bool DataDeliveryService::CheckInput(const std::string& url, const Arc::UserConfig& usercfg, Arc::XMLNode& resultelement, bool& require_credential_file) { Arc::DataHandle h(url, usercfg); if (!h || !(*h)) { resultelement.NewChild("ErrorDescription") = "Can't handle URL " + url; return false; } if (h->Local()) { std::string path(h->GetURL().Path()); if (path.find("../") != std::string::npos) { resultelement.NewChild("ErrorDescription") = "'../' is not allowed in filename"; return false; } bool allowed = false; for (std::list::iterator i = allowed_dirs.begin(); i != allowed_dirs.end(); ++i) { if (path.find(*i) == 0) allowed = true; } if (!allowed) { resultelement.NewChild("ErrorDescription") = "Access denied to path " + path; return false; } } if (h->RequiresCredentialsInFile()) require_credential_file = true; return true; } void DataDeliveryService::LogToRootLogger(Arc::LogLevel level, const std::string& message) { Arc::Logger::getRootLogger().addDestinations(root_destinations); logger.msg(level, message); Arc::Logger::getRootLogger().removeDestinations(); } void DataDeliveryService::receiveDTR(DTR_ptr dtr) { LogToRootLogger(Arc::INFO, "Received DTR "+dtr->get_id()+" from Delivery in state "+dtr->get_status().str()); // delete temp proxy file if it was created if (dtr->get_source()->RequiresCredentialsInFile() || dtr->get_destination()->RequiresCredentialsInFile()) { std::string proxy_file(tmp_proxy_dir+"/DTR."+dtr->get_id()+".proxy"); LogToRootLogger(Arc::DEBUG, "Removing temp proxy "+proxy_file); if (unlink(proxy_file.c_str()) != 0 && errno != ENOENT) { LogToRootLogger(Arc::WARNING, "Failed to remove temporary proxy "+proxy_file+": "+Arc::StrError(errno)); } } if (current_processes > 0) --current_processes; } /* Accepts: id url url 1000 1000 true 12345 adler32:12345678 100 60 100 120 ... Returns id SERVICE_ERROR ... ... */ Arc::MCC_Status DataDeliveryService::Start(Arc::XMLNode in, Arc::XMLNode out) { Arc::XMLNode resp = out.NewChild("DataDeliveryStartResponse"); Arc::XMLNode results = resp.NewChild("DataDeliveryStartResult"); // Save credentials to temp file and set in UserConfig Arc::XMLNode delegated_token = in["DataDeliveryStart"]["deleg:DelegatedToken"]; if (!delegated_token) { logger.msg(Arc::ERROR, "No delegation token in request"); return Arc::MCC_Status(Arc::GENERIC_ERROR, "DataDeliveryService", "No delegation token received"); } // Check credentials were already delegated std::string credential; if (!delegation.DelegatedToken(credential, delegated_token)) { // Failed to accept delegation logger.msg(Arc::ERROR, "Failed to accept delegation"); return Arc::MCC_Status(Arc::GENERIC_ERROR, "DataDeliveryService", "Failed to accept delegation"); } for(int n = 0;;++n) { Arc::XMLNode dtrnode = in["DataDeliveryStart"]["DTR"][n]; if (!dtrnode) break; std::string dtrid((std::string)dtrnode["ID"]); std::string src((std::string)dtrnode["Source"]); std::string dest((std::string)dtrnode["Destination"]); int uid = Arc::stringtoi((std::string)dtrnode["Uid"]); int gid = Arc::stringtoi((std::string)dtrnode["Gid"]); if (dtrnode["Caching"] == "true") { uid = Arc::User().get_uid(); gid = Arc::User().get_gid(); } // proxy path will be set later Arc::initializeCredentialsType cred_type(Arc::initializeCredentialsType::SkipCredentials); Arc::UserConfig usercfg(cred_type); bool require_credential_file = false; Arc::XMLNode resultelement = results.NewChild("Result"); resultelement.NewChild("ID") = dtrid; if (!CheckInput(src, usercfg, resultelement, require_credential_file)) { resultelement.NewChild("ResultCode") = "SERVICE_ERROR"; resultelement["ErrorDescription"] = (std::string)resultelement["ErrorDescription"] + ": Cannot use source"; logger.msg(Arc::ERROR, (std::string)resultelement["ErrorDescription"]); continue; } if (!CheckInput(dest, usercfg, resultelement, require_credential_file)) { resultelement.NewChild("ResultCode") = "SERVICE_ERROR"; resultelement["ErrorDescription"] = (std::string)resultelement["ErrorDescription"] + ": Cannot use destination"; logger.msg(Arc::ERROR, (std::string)resultelement["ErrorDescription"]); continue; } if (current_processes >= max_processes) { logger.msg(Arc::WARNING, "All %u process slots used", max_processes); resultelement.NewChild("ResultCode") = "SERVICE_ERROR"; resultelement.NewChild("ErrorDescription") = "No free process slot available"; continue; } // check if dtrid is in the active list - if so it is probably a retry active_dtrs_lock.lock(); std::map::iterator i = active_dtrs.begin(); for (; i != active_dtrs.end(); ++i) { if (i->first->get_id() == dtrid) break; } if (i != active_dtrs.end()) { if (i->first->get_status() == DTRStatus::TRANSFERRING) { logger.msg(Arc::ERROR, "Received retry for DTR %s still in transfer", dtrid); resultelement.NewChild("ResultCode") = "SERVICE_ERROR"; resultelement.NewChild("ErrorDescription") = "DTR is still in transfer"; active_dtrs_lock.unlock(); continue; } // Erase this DTR from active list logger.msg(Arc::VERBOSE, "Replacing DTR %s in state %s with new request", dtrid, i->first->get_status().str()); active_dtrs.erase(i); } active_dtrs_lock.unlock(); std::string proxy_file(tmp_proxy_dir+"/DTR."+dtrid+".proxy"); if (require_credential_file) { // Store proxy, only readable by user. Use DTR job id as proxy name. // TODO: it is inefficient to create a file for every DTR, better to // use some kind of proxy store logger.msg(Arc::VERBOSE, "Storing temp proxy at %s", proxy_file); bool proxy_result = Arc::FileCreate(proxy_file, credential, 0, 0, S_IRUSR | S_IWUSR); if (!proxy_result && errno == ENOENT) { Arc::DirCreate(tmp_proxy_dir, S_IRWXU | S_IRGRP | S_IXGRP | S_IROTH | S_IXOTH, true); proxy_result = Arc::FileCreate(proxy_file, credential, 0, 0, S_IRUSR | S_IWUSR); } if (!proxy_result) { logger.msg(Arc::ERROR, "Failed to create temp proxy at %s: %s", proxy_file, Arc::StrError(errno)); resultelement.NewChild("ResultCode") = "SERVICE_ERROR"; resultelement.NewChild("ErrorDescription") = "Failed to store temporary proxy"; continue; } if (chown(proxy_file.c_str(), uid, gid) != 0) { logger.msg(Arc::ERROR, "Failed to change owner of temp proxy at %s to %i:%i: %s", proxy_file, uid, gid, Arc::StrError(errno)); resultelement.NewChild("ResultCode") = "SERVICE_ERROR"; resultelement.NewChild("ErrorDescription") = "Failed to store temporary proxy"; continue; } usercfg.ProxyPath(proxy_file); } else { usercfg.CredentialString(credential); } // Logger destinations for this DTR. Uses a string stream so log can easily be sent // back to the client. LogStream keeps a reference to the stream so we // cannot delete it until deleting LogStream. These pointers are // deleted when the DTR is archived. std::list logs; sstream_ptr stream(new std::stringstream()); Arc::LogDestination * output = new Arc::LogStream(*stream); output->setFormat(Arc::MediumFormat); logs.push_back(output); std::string groupid(Arc::UUID()); DTR_ptr dtr(new DTR(src, dest, usercfg, groupid, uid, logs, "DataStaging")); if (!(*dtr)) { logger.msg(Arc::ERROR, "Invalid DTR"); resultelement.NewChild("ResultCode") = "SERVICE_ERROR"; resultelement.NewChild("ErrorDescription") = "Could not create DTR"; if (unlink(proxy_file.c_str()) != 0 && errno != ENOENT) { logger.msg(Arc::WARNING, "Failed to remove temporary proxy %s: %s", proxy_file, Arc::StrError(errno)); } continue; } ++current_processes; // Set source checksum to validate against if (dtrnode["CheckSum"]) dtr->get_source()->SetCheckSum((std::string)dtrnode["CheckSum"]); // Set filesize for protocols which need it if (dtrnode["Size"]) dtr->get_source()->SetSize(Arc::stringtoull((std::string)dtrnode["Size"])); // Get the callbacks sent to Scheduler and connect Delivery dtr->registerCallback(this, SCHEDULER); dtr->registerCallback(&delivery, DELIVERY); // Set transfer limits TransferParameters transfer_params; if (dtrnode["MinAverageSpeed"]) transfer_params.min_average_bandwidth = Arc::stringtoull((std::string)dtrnode["MinAverageSpeed"]); if (dtrnode["AverageTime"]) transfer_params.averaging_time = Arc::stringtoui((std::string)dtrnode["AverageTime"]); if (dtrnode["MinCurrentSpeed"]) transfer_params.min_current_bandwidth = Arc::stringtoull((std::string)dtrnode["MinCurrentSpeed"]); if (dtrnode["MaxInactivityTime"]) transfer_params.max_inactivity_time = Arc::stringtoui((std::string)dtrnode["MaxInactivityTime"]); delivery.SetTransferParameters(transfer_params); dtr->set_id(dtrid); dtr->set_status(DTRStatus::TRANSFER); DTR::push(dtr, DELIVERY); // Add to active list active_dtrs_lock.lock(); active_dtrs[dtr] = stream; active_dtrs_lock.unlock(); resultelement.NewChild("ResultCode") = "OK"; } return Arc::MCC_Status(Arc::STATUS_OK); } /* Accepts: id ... Returns: id ERROR ... 2 1 ... 1234 123456789 adler32:a123a45 ... */ Arc::MCC_Status DataDeliveryService::Query(Arc::XMLNode in, Arc::XMLNode out) { Arc::XMLNode resp = out.NewChild("DataDeliveryQueryResponse"); Arc::XMLNode results = resp.NewChild("DataDeliveryQueryResult"); for(int n = 0;;++n) { Arc::XMLNode dtrnode = in["DataDeliveryQuery"]["DTR"][n]; if (!dtrnode) break; std::string dtrid((std::string)dtrnode["ID"]); Arc::XMLNode resultelement = results.NewChild("Result"); resultelement.NewChild("ID") = dtrid; active_dtrs_lock.lock(); std::map::iterator dtr_it = active_dtrs.begin(); for (; dtr_it != active_dtrs.end(); ++dtr_it) { if (dtr_it->first->get_id() == dtrid) break; } if (dtr_it == active_dtrs.end()) { active_dtrs_lock.unlock(); // if not in active list, look in archived list archived_dtrs_lock.lock(); std::map >::const_iterator arc_it = archived_dtrs.find(dtrid); if (arc_it != archived_dtrs.end()) { resultelement.NewChild("ResultCode") = archived_dtrs[dtrid].first; resultelement.NewChild("ErrorDescription") = archived_dtrs[dtrid].second; archived_dtrs_lock.unlock(); continue; } archived_dtrs_lock.unlock(); logger.msg(Arc::ERROR, "No such DTR %s", dtrid); resultelement.NewChild("ResultCode") = "SERVICE_ERROR"; resultelement.NewChild("ErrorDescription") = "No such DTR"; continue; } DTR_ptr dtr = dtr_it->first; resultelement.NewChild("Log") = dtr_it->second->str(); resultelement.NewChild("BytesTransferred") = Arc::tostring(dtr->get_bytes_transferred()); if (dtr->error()) { logger.msg(Arc::INFO, "DTR %s failed: %s", dtrid, dtr->get_error_status().GetDesc()); resultelement.NewChild("ResultCode") = "TRANSFER_ERROR"; resultelement.NewChild("ErrorDescription") = dtr->get_error_status().GetDesc(); resultelement.NewChild("ErrorStatus") = Arc::tostring(dtr->get_error_status().GetErrorStatus()); resultelement.NewChild("ErrorLocation") = Arc::tostring(dtr->get_error_status().GetErrorLocation()); resultelement.NewChild("TransferTime") = Arc::tostring(dtr->get_transfer_time()); archived_dtrs_lock.lock(); archived_dtrs[dtrid] = std::pair("TRANSFER_ERROR", dtr->get_error_status().GetDesc()); archived_dtrs_lock.unlock(); } else if (dtr->get_status() == DTRStatus::TRANSFERRED) { logger.msg(Arc::INFO, "DTR %s finished successfully", dtrid); resultelement.NewChild("ResultCode") = "TRANSFERRED"; resultelement.NewChild("TransferTime") = Arc::tostring(dtr->get_transfer_time()); // pass calculated checksum back to Scheduler (eg to insert in catalog) if (dtr->get_destination()->CheckCheckSum()) resultelement.NewChild("CheckSum") = dtr->get_destination()->GetCheckSum(); archived_dtrs_lock.lock(); archived_dtrs[dtrid] = std::pair("TRANSFERRED", ""); archived_dtrs_lock.unlock(); } else { logger.msg(Arc::VERBOSE, "DTR %s still in progress (%lluB transferred)", dtrid, dtr->get_bytes_transferred()); resultelement.NewChild("ResultCode") = "TRANSFERRING"; active_dtrs_lock.unlock(); return Arc::MCC_Status(Arc::STATUS_OK); } // Terminal state //delete dtr_it->second; active_dtrs.erase(dtr_it); active_dtrs_lock.unlock(); } return Arc::MCC_Status(Arc::STATUS_OK); } /* Accepts: id ... Returns: id ERROR ... ... */ Arc::MCC_Status DataDeliveryService::Cancel(Arc::XMLNode in, Arc::XMLNode out) { Arc::XMLNode resp = out.NewChild("DataDeliveryCancelResponse"); Arc::XMLNode results = resp.NewChild("DataDeliveryCancelResult"); for (int n = 0;;++n) { Arc::XMLNode dtrnode = in["DataDeliveryCancel"]["DTR"][n]; if (!dtrnode) break; std::string dtrid((std::string)dtrnode["ID"]); Arc::XMLNode resultelement = results.NewChild("Result"); resultelement.NewChild("ID") = dtrid; // Check if DTR is still in active list active_dtrs_lock.lock(); std::map::iterator dtr_it = active_dtrs.begin(); for (; dtr_it != active_dtrs.end(); ++dtr_it) { if (dtr_it->first->get_id() == dtrid) break; } if (dtr_it == active_dtrs.end()) { active_dtrs_lock.unlock(); logger.msg(Arc::ERROR, "No active DTR %s", dtrid); resultelement.NewChild("ResultCode") = "SERVICE_ERROR"; resultelement.NewChild("ErrorDescription") = "No such active DTR"; continue; } // DTR could be already finished, but report successful cancel anyway DTR_ptr dtr = dtr_it->first; if (dtr->get_status() == DTRStatus::TRANSFERRING_CANCEL) { active_dtrs_lock.unlock(); logger.msg(Arc::ERROR, "DTR %s was already cancelled", dtrid); resultelement.NewChild("ResultCode") = "SERVICE_ERROR"; resultelement.NewChild("ErrorDescription") = "DTR already cancelled"; continue; } // Delivery will automatically kill running process if (!delivery.cancelDTR(dtr)) { active_dtrs_lock.unlock(); logger.msg(Arc::ERROR, "DTR %s could not be cancelled", dtrid); resultelement.NewChild("ResultCode") = "SERVICE_ERROR"; resultelement.NewChild("ErrorDescription") = "DTR could not be cancelled"; continue; } logger.msg(Arc::INFO, "DTR %s cancelled", dtr->get_id()); resultelement.NewChild("ResultCode") = "OK"; active_dtrs_lock.unlock(); } return Arc::MCC_Status(Arc::STATUS_OK); } /* Accepts: Returns: ERROR ... /var/arc/cache 6.5 ... ... */ Arc::MCC_Status DataDeliveryService::Ping(Arc::XMLNode in, Arc::XMLNode out) { Arc::XMLNode resultelement = out.NewChild("DataDeliveryPingResponse").NewChild("DataDeliveryPingResult").NewChild("Result"); resultelement.NewChild("ResultCode") = "OK"; for (std::list::iterator dir = allowed_dirs.begin(); dir != allowed_dirs.end(); ++dir) { resultelement.NewChild("AllowedDir") = *dir; } // Send the 5 min load average double avg[3]; if (getloadavg(avg, 3) != 3) { logger.msg(Arc::WARNING, "Failed to get load average: %s", Arc::StrError()); resultelement.NewChild("LoadAvg") = "-1"; } else { resultelement.NewChild("LoadAvg") = Arc::tostring(avg[1]); } return Arc::MCC_Status(Arc::STATUS_OK); } DataDeliveryService::DataDeliveryService(Arc::Config *cfg, Arc::PluginArgument* parg) : Service(cfg,parg), max_processes(100), current_processes(0) { valid = false; // Set medium format for logging root_destinations = Arc::Logger::getRootLogger().getDestinations(); for (std::list::iterator i = root_destinations.begin(); i != root_destinations.end(); ++i) { (*i)->setFormat(Arc::MediumFormat); } // Check configuration - at least one allowed IP address and dir must be specified if (!(*cfg)["SecHandler"]["PDP"]["Policy"]["Rule"]["Subjects"]["Subject"]) { logger.msg(Arc::ERROR, "Invalid configuration - no allowed IP address specified"); return; } if (!(*cfg)["AllowedDir"]) { logger.msg(Arc::ERROR, "Invalid configuration - no transfer dirs specified"); return; } for (int n = 0;;++n) { Arc::XMLNode allowed_dir = (*cfg)["AllowedDir"][n]; if (!allowed_dir) break; allowed_dirs.push_back((std::string)allowed_dir); } // Start archival thread if (!Arc::CreateThreadFunction(ArchivalThread, this)) { logger.msg(Arc::ERROR, "Failed to start archival thread"); return; } // Create tmp dir for proxies // TODO get from configuration tmp_proxy_dir = "/tmp/arc"; // clear any proxies left behind from previous bad shutdown Arc::DirDelete(tmp_proxy_dir); // Set restrictive umask umask(0077); // Set log level for DTR DataStaging::DTR::LOG_LEVEL = Arc::Logger::getRootLogger().getThreshold(); // Start new DataDelivery delivery.start(); valid = true; } DataDeliveryService::~DataDeliveryService() { // Stop accepting new requests and cancel all active transfers // DataDelivery destructor automatically calls stop() valid = false; // clear any proxies left behind Arc::DirDelete(tmp_proxy_dir); logger.msg(Arc::INFO, "Shutting down data delivery service"); } Arc::MCC_Status DataDeliveryService::process(Arc::Message &inmsg, Arc::Message &outmsg) { if (!valid) return make_soap_fault(outmsg, "Service is not valid"); // Check authorization if(!ProcessSecHandlers(inmsg, "incoming")) { logger.msg(Arc::ERROR, "Unauthorized"); return make_soap_fault(outmsg, "Authorization failed"); } std::string method = inmsg.Attributes()->get("HTTP:METHOD"); if(method == "POST") { logger.msg(Arc::VERBOSE, "process: POST"); logger.msg(Arc::VERBOSE, "Identity is %s", inmsg.Attributes()->get("TLS:PEERDN")); // Both input and output are supposed to be SOAP // Extracting payload Arc::PayloadSOAP* inpayload = NULL; try { inpayload = dynamic_cast(inmsg.Payload()); } catch(std::exception& e) { }; if(!inpayload) { logger.msg(Arc::ERROR, "input is not SOAP"); return make_soap_fault(outmsg); } // Applying known namespaces inpayload->Namespaces(ns); if(logger.getThreshold() <= Arc::DEBUG) { std::string str; inpayload->GetDoc(str, true); logger.msg(Arc::DEBUG, "process: request=%s",str); } // Analyzing request Arc::XMLNode op = inpayload->Child(0); if(!op) { logger.msg(Arc::ERROR, "input does not define operation"); return make_soap_fault(outmsg); } logger.msg(Arc::VERBOSE, "process: operation: %s",op.Name()); Arc::PayloadSOAP* outpayload = new Arc::PayloadSOAP(ns); outpayload->Namespaces(ns); Arc::MCC_Status result(Arc::STATUS_OK); // choose operation // Make a new request if (MatchXMLName(op,"DataDeliveryStart")) { result = Start(*inpayload, *outpayload); } // Query a request else if (MatchXMLName(op,"DataDeliveryQuery")) { result = Query(*inpayload, *outpayload); } // Cancel a request else if (MatchXMLName(op,"DataDeliveryCancel")) { result = Cancel(*inpayload, *outpayload); } // ping service else if (MatchXMLName(op,"DataDeliveryPing")) { result = Ping(*inpayload, *outpayload); } // Delegate credentials. Should be called before making a new request else if (delegation.MatchNamespace(*inpayload)) { if (!delegation.Process(*inpayload, *outpayload)) { delete outpayload; return make_soap_fault(outmsg); } } // Unknown operation else { logger.msg(Arc::ERROR, "SOAP operation is not supported: %s", op.Name()); delete outpayload; return make_soap_fault(outmsg); } if (!result) return make_soap_fault(outmsg, result.getExplanation()); if (logger.getThreshold() <= Arc::DEBUG) { std::string str; outpayload->GetDoc(str, true); logger.msg(Arc::DEBUG, "process: response=%s", str); } outmsg.Payload(outpayload); if (!ProcessSecHandlers(outmsg,"outgoing")) { logger.msg(Arc::ERROR, "Security Handlers processing failed"); delete outmsg.Payload(NULL); return Arc::MCC_Status(); } } else { // only POST supported logger.msg(Arc::ERROR, "Only POST is supported in DataDeliveryService"); return Arc::MCC_Status(); } return Arc::MCC_Status(Arc::STATUS_OK); } Arc::MCC_Status DataDeliveryService::make_soap_fault(Arc::Message& outmsg, const std::string& reason) { Arc::PayloadSOAP* outpayload = new Arc::PayloadSOAP(ns,true); Arc::SOAPFault* fault = outpayload?outpayload->Fault():NULL; if(fault) { fault->Code(Arc::SOAPFault::Sender); if (reason.empty()) fault->Reason("Failed processing request"); else fault->Reason("Failed processing request: "+reason); } outmsg.Payload(outpayload); return Arc::MCC_Status(Arc::STATUS_OK); } } // namespace DataStaging extern Arc::PluginDescriptor const ARC_PLUGINS_TABLE_NAME[] = { { "datadeliveryservice", "HED:SERVICE", NULL, 0, &DataStaging::get_service }, { NULL, NULL, NULL, 0, NULL } }; nordugrid-arc-6.14.0/src/services/data-staging/PaxHeaders.30264/arc-datadelivery-service-start.in0000644000000000000000000000013014152153376030660 xustar000000000000000029 mtime=1638455038.43264638 30 atime=1638455038.510647552 29 ctime=1638455100.70558206 nordugrid-arc-6.14.0/src/services/data-staging/arc-datadelivery-service-start.in0000644000175000002070000001004614152153376030650 0ustar00mockbuildmock00000000000000#!/bin/bash add_library_path() { location="$1" if [ ! "x$location" = "x" ] ; then if [ ! "$location" = "/usr" ] ; then libdir="$location/lib" libdir64="$location/lib64" if [ -d "$libdir64" ] ; then if [ "x$LD_LIBRARY_PATH" = "x" ]; then LD_LIBRARY_PATH="$libdir64" else LD_LIBRARY_PATH="$libdir64:$LD_LIBRARY_PATH" fi fi if [ -d "$libdir" ] ; then if [ "x$LD_LIBRARY_PATH" = "x" ]; then LD_LIBRARY_PATH="$libdir" else LD_LIBRARY_PATH="$libdir:$LD_LIBRARY_PATH" fi fi fi fi } prog=arched RUN=yes send_systemd_notify() { # return if no systemd-notify found type systemd-notify >/dev/null 2>&1 || return systemd-notify "$@" } log_failure_msg() { send_systemd_notify --status "Error: $@" echo $@ } # sysconfig files if [ -r /etc/sysconfig/nordugrid ]; then . /etc/sysconfig/nordugrid elif [ -r /etc/default/nordugrid ]; then . /etc/default/nordugrid fi if [ -r /etc/sysconfig/arc-datadelivery-service ]; then . /etc/sysconfig/arc-datadelivery-service elif [ -r /etc/default/arc-datadelivery-service ]; then . /etc/default/arc-datadelivery-service fi # GLOBUS_LOCATION GLOBUS_LOCATION=${GLOBUS_LOCATION:-@DEFAULT_GLOBUS_LOCATION@} if [ ! -d "$GLOBUS_LOCATION" ]; then log_failure_msg "GLOBUS_LOCATION ($GLOBUS_LOCATION) not found" exit 1 fi export GLOBUS_LOCATION # ARC_LOCATION ARC_LOCATION=${ARC_LOCATION:-@prefix@} if [ ! -d "$ARC_LOCATION" ]; then log_failure_msg "ARC_LOCATION ($ARC_LOCATION) not found" exit 1 fi export ARC_LOCATION readorigconfigvar() { value=`$ARC_LOCATION/@pkglibexecsubdir@/arcconfig-parser --config "$1" -b "$2" -o "$3" 2>/dev/null` if [ $? -eq 0 ] ; then echo "$value" exit 0 else exit 1 fi } # ARC_CONFIG if [ "x$ARC_CONFIG" = "x" ]; then if [ -r $ARC_LOCATION/etc/arc.conf ]; then ARC_CONFIG=$ARC_LOCATION/etc/arc.conf elif [ -r /etc/arc.conf ]; then ARC_CONFIG=/etc/arc.conf fi fi PID_FILE=`readorigconfigvar "$ARC_CONFIG" datadelivery-service pidfile` if [ "x$PID_FILE" = "x" ]; then # Missing default value for pidfile means no service block is present log_failure_msg "ARC configuration is missing [datadelivery-service] block" exit 1 fi if [ "$1" = "--getpidfile" ] ; then echo $PID_FILE exit 0 fi LOG_FILE=`readorigconfigvar "$ARC_CONFIG" datadelivery-service logfile` if [ "x$LOG_FILE" = "x" ]; then log_failure_msg "Log file could not be found in [datadelivery-service] block" exit 1 fi if [ ! -d `dirname "$LOGFILE"` ]; then mkdir -p `dirname "$LOGFILE"` fi prepare() { CMD="$ARC_LOCATION/sbin/$prog" if [ ! -x "$CMD" ]; then log_failure_msg "Missing $CMD executable" exit 1 fi if [ ! -r "$ARC_CONFIG" ]; then log_failure_msg "ARC configuration not found (usually /etc/arc.conf)" exit 1 fi # check that if service is insecure no allowed_dns are defined SECURE=`readorigconfigvar "$ARC_CONFIG" datadelivery-service secure` ALLOWEDDN=`readorigconfigvar "$ARC_CONFIG" datadelivery-service allowed_dn` if [ "$SECURE" = "no" ]; then if [ "x$ALLOWEDDN" != "x" ]; then log_failure_msg "allowed_dn cannot be used with secure=no" exit 1 fi fi # Assuming ini style config CMD="$CMD -i $ARC_CONFIG -p $PID_FILE -l $LOG_FILE" add_library_path "$GLOBUS_LOCATION" if [ "x$LD_LIBRARY_PATH" = "x" ]; then LD_LIBRARY_PATH=$ARC_LOCATION/@libsubdir@ else LD_LIBRARY_PATH=$ARC_LOCATION/@libsubdir@:$LD_LIBRARY_PATH fi export LD_LIBRARY_PATH cd / } if [ "$RUN" != "yes" ] ; then echo "arc-datadelivery-service disabled, please adjust the configuration to your" echo "needs and then set RUN to 'yes' in /etc/default/arc-datadelivery-service to" echo "enable it." exit 0 fi prepare exec $CMD "$@" nordugrid-arc-6.14.0/src/services/data-staging/PaxHeaders.30264/README0000644000000000000000000000013114152153376023200 xustar000000000000000029 mtime=1638455038.43264638 30 atime=1638455038.510647552 30 ctime=1638455100.706582074 nordugrid-arc-6.14.0/src/services/data-staging/README0000644000175000002070000000011214152153376023160 0ustar00mockbuildmock00000000000000DataDeliveryService is a HED service for executing data transfer requests.nordugrid-arc-6.14.0/src/services/PaxHeaders.30264/monitor0000644000000000000000000000013214152153474021367 xustar000000000000000030 mtime=1638455100.623580827 30 atime=1638455103.996631509 30 ctime=1638455100.623580827 nordugrid-arc-6.14.0/src/services/monitor/0000755000175000002070000000000014152153474021431 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376023501 xustar000000000000000030 mtime=1638455038.437646455 30 atime=1638455038.512647582 30 ctime=1638455100.487578784 nordugrid-arc-6.14.0/src/services/monitor/Makefile.am0000644000175000002070000000032014152153376023461 0ustar00mockbuildmock00000000000000SUBDIRS = man mon-icons lang includes monitordir = @monitor_prefix@ dist_monitor_DATA = $(srcdir)/*.php $(srcdir)/*.js monitor_DATA = README install-data-local: $(MKDIR_P) $(DESTDIR)$(monitordir)/cache nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/userlist.php0000644000000000000000000000013214152153376024030 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.498578949 nordugrid-arc-6.14.0/src/services/monitor/userlist.php0000644000175000002070000002056014152153376024020 0ustar00mockbuildmock00000000000000title; $module = &$toppage->module; $strings = &$toppage->strings; $errors = &$toppage->errors; $giislist = &$toppage->giislist; // Header table $toppage->tabletop("",$toptitle." $family"); // Array defining the attributes to be returned $lim = array( "dn", USR_USSN, USR_CPUS, USR_QUEU, USR_DISK ); $ulim = array( "dn", JOB_NAME, JOB_EQUE, JOB_ECLU, JOB_GOWN, JOB_SUBM, JOB_STAT, JOB_USET, JOB_ERRS, JOB_CPUS ); if ( $debug ) { ob_end_flush(); ob_implicit_flush(); } $tlim = 20; $tout = 20; if( $debug ) dbgmsg("
    :::> ".$errors["114"].$tlim.$errors["102"].$tout.$errors["103"]." <:::

    "); // ldapsearch filter string for jobs $filter = "(&(objectclass=".OBJ_USER.")(".USR_USSN."=$uname))"; $ufilter = "(&(objectclass=".OBJ_AJOB.")(".JOB_GOWN."=$uname))"; $gentries = recursive_giis_info($giislist,"cluster",$errors,$debug); $nc = count($gentries); if ( !$nc ) { $errno = "1"; echo "
    ".$errors[$errno]."\n"; return $errno; } $dsarray = array (); $hnarray = array (); $pnarray = array (); $sitetag = array (); /* a tag to skip duplicated entries */ for ( $k = 0; $k < $nc; $k++ ) { $clhost = $gentries[$k]["host"]; $clport = $gentries[$k]["port"]; $ldapuri = "ldap://".$clhost.":".$clport; $clconn = ldap_connect($ldapuri); if ( $clconn && !$sitetag[$clhost] ) { array_push($dsarray,$clconn); array_push($hnarray,$clhost); array_push($pnarray,$clport); $sitetag[$clhost] = 1; /* filtering tag */ } } $nhosts = count($dsarray); if ( !$nhosts ) { // NO SITES REPLY... $errno = "2"; echo "
    ".$errors[$errno]."\n"; return $errno; } // Search all clusters for (a) allowed queues and (b) for user jobs $uiarray = @ldap_search($dsarray,DN_LOCAL,$filter,$lim,0,0,$tlim,LDAP_DEREF_NEVER); // Loop on results: first go queues // HTML table initialisation $utable = new LmTable("userres",$strings["userres"]); $urowcont = array(); $dnmsg = "".$errors["420"].": ".$uname; $utable->adderror($dnmsg, "#cccccc"); $nauclu = 0; $goodds = array(); $goodhn = array(); $goodpn = array(); for ( $ids = 0; $ids < $nhosts; $ids++ ) { $ui = $uiarray[$ids]; $hn = $hnarray[$ids]; $pn = $pnarray[$ids]; $dst = $dsarray[$ids]; $curl = popup("clusdes.php?host=$hn&port=$pn",700,620,1,$lang,$debug); if ($dst && $ui) { $nqueues = @ldap_count_entries($dst,$ui); if ($nqueues > 0) { $nauclu++; array_push($goodds,$dst); array_push($goodhn,$hn); array_push($goodpn,$pn); // If there are valid entries, tabulate results $allres = ldap_get_entries($dst,$ui); $results = ldap_purge($allres); $nqueues = $allres["count"]; // define("CMPKEY",USR_CPUS); // usort($allres,"ldap_entry_comp"); // loop on queues for ($j=0; $j<$nqueues; $j++) { $parts = ldap_explode_dn($allres[$j]["dn"],0); foreach ($parts as $part) { $pair = explode("=",$part); switch ( $pair[0] ) { case CLU_NAME: $ucluster = $pair[1]; break; case QUE_NAME: $uqueue = $pair[1]; break; } } if ( $debug == 2 ) dbgmsg("$hn -- $ucluster
    "); $qurl = popup("quelist.php?host=$ucluster&port=$pn&qname=$uqueue",750,430,6,$lang,$debug); $curl = popup("clusdes.php?host=$ucluster&port=$pn",700,620,1,$lang,$debug); $fcpu = $allres[$j][USR_CPUS][0]; $fproc = freeproc($fcpu); $fdisk = $allres[$j][USR_DISK][0]; $exque = $allres[$j][USR_QUEU][0]; $urowcont[] = "$ucluster:$uqueue"; $urowcont[] = $fcpu; $urowcont[] = $exque; $urowcont[] = $fdisk; $utable->addrow($urowcont); $urowcont = array(); } } else { $utable->adderror("".$errors["11"]." $hn"); } } else { $utable->adderror("$hn ".$errors["12"].""); } @ldap_free_result($ui); } $utable->adderror("".$errors["421"].$nauclu.$errors["422"]."", "#0099FF"); $utable->close(); echo "
    \n"; $srarray = @ldap_search($goodds,DN_LOCAL,$ufilter,$ulim,0,0,$tlim,LDAP_DEREF_NEVER); // HTML table initialisation $jtable = new LmTable($module,$toppage->$module); $rowcont = array(); $jcount = 0; $nghosts = count($goodds); for ( $ids = 0; $ids < $nghosts; $ids++ ) { $sr = $srarray[$ids]; $dst = $goodds[$ids]; $gpn = $goodpn[$ids]; $ghn = $goodhn[$ids]; if ($dst && $sr) { // If search returned, check that there are valid entries $nmatch = @ldap_count_entries($dst,$sr); if ($nmatch > 0) { // If there are valid entries, tabulate results $allentries = ldap_get_entries($dst,$sr); $entries = ldap_purge($allentries); $njobs = $entries["count"]; define("CMPKEY",JOB_SUBM); usort($entries,"ldap_entry_comp"); // loop on jobs for ($i=1; $i<$njobs+1; $i++) { $jobdn = rawurlencode($entries[$i]["dn"]); $curstat = $entries[$i][JOB_STAT][0]; $stahead = substr($curstat,0,12); if ($stahead=="FINISHED at:") { $ftime = substr(strrchr($curstat, " "), 1); $ftime = cnvtime($ftime); $curstat = "FINISHED at: ".$ftime; } $jname = htmlentities($entries[$i][JOB_NAME][0]); $jobname = ($entries[$i][JOB_NAME][0]) ? $jname : "N/A"; $queue = ($entries[$i][JOB_EQUE][0]) ? $entries[$i][JOB_EQUE][0] : "N/A"; $cluster = ($entries[$i][JOB_ECLU][0]) ? $entries[$i][JOB_ECLU][0] : "N/A"; $time = ($entries[$i][JOB_USET][0]) ? $entries[$i][JOB_USET][0] : "N/A"; $ncpus = ($entries[$i][JOB_CPUS][0]) ? $entries[$i][JOB_CPUS][0] : ""; $error = ($entries[$i][JOB_ERRS][0]); if ( $error ) $error = ( preg_match("/user/i",$error) ) ? "X" : "!"; if ( $debug == 2 ) dbgmsg("$ghn --- $cluster
    "); $newwin = popup("jobstat.php?host=$cluster&port=$gpn&status=$status&jobdn=$jobdn",750,430,4,$lang,$debug); $quewin = popup("quelist.php?host=$cluster&port=$gpn&qname=$queue",750,430,6,$lang,$debug); $clstring = popup("clusdes.php?host=$cluster&port=$gpn",700,620,1,$lang,$debug); $jcount++; // filling the table $rowcont[] = "$jcount $error"; $rowcont[] = "$jobname"; $rowcont[] = "$curstat"; $rowcont[] = "$time"; $rowcont[] = "$cluster"; $rowcont[] = "$queue"; $rowcont[] = "$ncpus"; $jtable->addrow($rowcont); $rowcont = array(); } } } @ldap_free_result($sr); } if ( !$jcount ) $jtable->adderror("".$errors["13"].$family.""); $jtable->close(); return 0; // Done $toppage->close(); ?> nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/lang0000644000000000000000000000013214152153474022310 xustar000000000000000030 mtime=1638455100.621580797 30 atime=1638455103.996631509 30 ctime=1638455100.621580797 nordugrid-arc-6.14.0/src/services/monitor/lang/0000755000175000002070000000000014152153474022352 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/monitor/lang/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376024422 xustar000000000000000030 mtime=1638455038.439646485 30 atime=1638455038.512647582 30 ctime=1638455100.610580632 nordugrid-arc-6.14.0/src/services/monitor/lang/Makefile.am0000644000175000002070000000015614152153376024411 0ustar00mockbuildmock00000000000000monitorlangdir = @monitor_prefix@/lang monitorlang_DATA = $(srcdir)/*.inc EXTRA_DIST = $(monitorlang_DATA) nordugrid-arc-6.14.0/src/services/monitor/lang/PaxHeaders.30264/de.inc0000644000000000000000000000013214152153376023451 xustar000000000000000030 mtime=1638455038.439646485 30 atime=1638455038.513647597 30 ctime=1638455100.612580662 nordugrid-arc-6.14.0/src/services/monitor/lang/de.inc0000644000175000002070000014167214152153376023451 0ustar00mockbuildmock00000000000000 N/A bezeichnet einen Job ohne Namen.
    X bezeichnet einen Job, der durch den Nutzer abgebrochen wurde.
    ! bezeichnet einen Job, der nicht erfolgreich ausgeführt wurde.
    Klicken Sie auf den jeweiligen Jobnamen für eine detaillierte Beschreibung."; $str_nam = "Name des Nutzers wie im user certificate spezifiziert. Klicken Sie auf den Namen, um eine Liste aller Ressourcen zu erhalten, die für diesen Nutzer zur Verfügung stehen, sowie eine Liste aller Jobs dieses Users im System."; $str_sta = "Jobstatus wie angegeben durch the Grid Manager (GM) und LRMS.; Jobs durchlaufen die Zustände in der folgenden Reihenfolge:
    ACCEPTED – job wurde submitted aber er wird noch nicht ausgeführt.
    PREPARING – Eingabedateien werden übertragen
    SUBMITTING – Interaktion mit dem LRMS
    INLRMS – der Job ist unter der Kontrolle des LRMS; sein interner Zustand wird durch das Infosystem bestimmt. Mögliche solche Zustände sind:
    : Q – Job ist in der Queue (queued)
    : U – Job wurde unterbrochen (suspended) wegen eines anderen Prozesses (PBSPro)
    : S – Job wurde unterbrochen (suspended) (Condor)
    : R, run – Job wird ausgeführt
    : E – Job wird beendet (PBS)
    FINISHING – Ausgabedateien werden durch den GM transferiert
    FINISHED – Job wurde beendet, eine Zeitmarke (time stamp) wird durch das Infosystem hinzugefügt
    CANCELING – Job wurde abgebrochen
    DELETED – Job wurde nicht nach dem Download des Anwenders, sondern durch den GM wegen Überschreitung der Ablauffrist (expiration date) gelöscht.
    Jeder der Zust&aauml;nde kann durch den Prefix PENDING gekennzeichnet sein. Der GM versucht dann, diesen Zustand für diesen Job zu erreichen."; $str_tim = "CPU-Zeit des Jobs, gemessen in Minuten."; $str_mem = "Speicherbedarf des Jobs, gemessen in KB."; $str_cpu = "Die Anzahl genutzter Prozessoren des Jobs."; // Actual messages $message = array ( // Table headers and help (do not localize "0" and "help" keys) // For "help", keywords in
    must correspond to column titles below the help text ) "loadmon" => array( "0" => "Grid Monitor", "help" => "
    Diese Übersicht zeigt alle Teilnehmer, die an der Spitze des ARC registiert sind. Sie sind primär nach ihrem Land sortiert und dann anhand deren Namen. Ausgewählte Parameter werden überwacht: Cluster alias, die Anzahl aller CPUs und solcher reserviert für lokale Jobs, die Anzahl laufender und wartender Aufträge. Nutzen Sie die "Search" Funktion, um andere Charakteristica von Clustern, Queues, Aufträgen, etc. zu vergleichen
    Land
    ".$clickable.". Landesflagge und -name wie abgeleitet von der resource-Beschreibung. Anklicken, um Informationen zur Gridnutzung dieses Landes zu sehen.
    Cluster
    ".$clickable.". Alternativer Name des Cluster wie durch dessen owner festgelegt. Es werden maximal 22 Zeichen dargestellt. Durch Anlicken werden detaillierte Informationen zum Cluster dargestellt.
    CPUs
    Gesamtanzahl der CPUs im Cluster. NB! Nur ein Teil dieser mag tatsächlich auch für Grid Nutzer verfügbar sein.
    Last (Prozesse:Grid+lokal)
    ".$clickable.". Relative Auslastung des Clusters, abgeleitet von der Anzahl belegter CPUs. Graue Balken stellen die mit lokalen Jobs belegten CPUs dar, rote Balken solche, die von über das Grid submitteten Jobs beansprucht sind. Klicke auf die Balken, um eine detaillierte Liste aller Jobs zu erhalten, inklusive der Anzahl genutzter Prozessoren je Job.
    Wartend
    ".$clickable.". Anzahl aller wartenden Jobs auf dem Cluster, angezeigt als die Anzahl solcher durch das Grid submitteter Jobs plus die Anzahl derjenigen, die lokal submitted wurden. Klicke auf die erste Nummer, um die Liste der wartenden Grid-Jobs zu erhalten.
    ", "Land" => 30, "Site" => 160, "CPUs" => 10, "Last (Prozesse: Grid+lokal)" => 210, "In einer Queue" => 10 ), "clusdes" => array("0" => "Details einer Resource zu", "help" => "
    Attribut
    ".$clickable.". Cluster Attributename".$str_att."
    Wert
    ".$str_val."
    Queue
    ".$clickable.". Namen von batch queues verfügbar fü ARC Nutzer, wie festgelegt durch die owner des Clusters." .$str_que."
    Status
    Queue status. Eine operationelle Queue hat typischerweise den Status active.
    CPU (min)
    Zeitbegrenzung für einen Job. Der erste Wert ist untere Grenze, der zweite die obere. Wenn keine Begrenzungen gesetzt sind, es wird dann jede Lauflänge akzeptiert, wird N/A angezeigt.
    Running
    Die Anzahl von Jobs, die in der Queue aktiv sind. Die Gesamtanzahl der Jobs wird angezeigt, mit der Anzahl belegter Prozessoren in Klammern. NB! Für Jobs mit Parallelverarbeitung kann diese Anzahl deutlich höher sein als die Anzahl der Jobs.
    Queing
    Anzahl von Jobs, die auf deren Ausführung warten. Die Gesamtanzahl wird angezeigt mit der Anzahl durch das Grid submitteter Jobs in Klammern.
    ", "Queue" => 0, "Mapping Queue" => 0, "Status" => 0, "Limiten (min)" => 0, "CPUs" => 0, "Running" => 0, "Queueing" => 0 ), "jobstat" => array("0" => "Jobs at:Job ID", "help" => "
    JOB LIST:
    Job name
    ".$clickable.". Name des Jobs wie durch den owner festgelegt. Wenn kein Name zugewiesen wurde, so wird "N/A" angezeigt. Bei Klick auf dem Namen wird eine detaillierte Beschreibung des Jobs angezeigt.
    Owner
    ".$clickable.". ".$str_nam."
    Status
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Queue
    ".$clickable.". Name der Queue in der der Job ausgeführt wird. ".$str_que."
    CPUs
    ".$str_cpu."
    JOB DETAILS:
    Attribut
    ".$clickable.". Job Attributname".$str_att."
    Wert
    ".$str_val."
    ", "Jobname" => 0, "Eigner" => 0, "Status" => 0, "CPU (min)" => 0, "Queue" => 0, "CPUs" => 0 ), "volist" => array("0" => "Virtuelle Organisationen", "help" => "
    Virtuelle Organisation
    ".$clickable.". Gruppe von Anwendern, üblicherweise gemeinsame Aktivit&aul;ten und Ressourcen teilend. Wenigstens ein Cluster des ARC akzeptiert diese. Klicken Sie auf den Namen, um eine Liste der Mitglieder zu erhalten.
    Mitglieder
    Anzahl der Mitglieder.
    Verwaltet durch
    LDAP Server der die Mitglieder-Datenbank hält.
    ", "Virtuelle Organisation" => 0, "Mitglieder" => 0, "Verwaltet durch" => 0 ), "vousers" => array("0" => "Grid User Base", "help" => "
    Name
    ".$clickable.". ".$str_nam."
    Affiliation
    Des Nutzers Arbeitgeber wie durch den VO manager angegeben. Kann freigelassen werden.
    E-mail
    ".$clickable.". Des Nutzers eMail-Adresse wie angegeben durch den VO Manager. Darf freigelassen werden. Durch Anlicken der Adresse kann eine eMail an den Nutzer gesendet werden.
    ", "#" => 0, "Name" => 0, "Zugehörigkeit" => 0, "E-mail" => 0 ), "userlist" => array("0" => "Information für", "help" => "
    Cluster:queue
    ".$clickable.". Namen der Cluster und deren Queues (getrennt durch einen Doppelpunkt ":") auf welche ein Nutzer Zugriff hat. Ist ein Nutzer nicht autorisiert, wird die Nachricht "Not authorised at host ..." angezeigt. Bei Anlicken der Cluster Namens wird die Beschreibung des Clusters gegeben, genauso wie bei einer Auswahl der Queue.
    Freie CPUs
    Die Anzahl von freien CPUs, die für eine bestimmte Queue für einen bestimmten Nutzer zu einem bestimmten Moment, ggf. eingeschränkt durch die Angabe der maximalen Laufzeit (Angabe in Minuten), verfügbar sind. Zum Beispiel bedeutet "3", daß 3 CPUs für einen Job unbeschränkter Laufzeit verfügbar sind. "4:360" beschreibt die Verfügbarkeit von vier Jobs für nicht länger al 6 Stunden. "10:180 30" bedeutet, daß 10 CPUs verfügbar sind für Jobs, die nicht länger rechnen als 3 Stunden, sowie weitere 30 für Jobs mit unbeschränkter Laufzeit. "0" bedeutet, daß keine CPUs verfügbar sind und neue Jobs entsprechend warten müssen.
    Wartenden Jobs
    Anzahl von Jobs des Anwenders, die in der Wartschlange vor einem Neuen Job sind. Die Zahl "0" bedeutet, dass der Job sofort ausgeführt wird. NB! Dies ist nur eine Abschätzung, durch den Einfluss lokaler Administratoren ist eine sichere Angabe nicht möglich.
    Freier Diskplatz (MB)
    Für einen Nutzer verfügbarer Diskplatz (in Megabytes). NB! Dies ist nur eine Abschätzung, die meisten Cluster haben keine solchen Quotas festgelegt.
    Jobname
    ".$clickable.". ".$str_job."
    Status
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Cluster
    ".$clickable.". Name des Clusters bei dem der Job ausgeführt wird. Bei Klick auf den Namen werden detaillierte Informationen zu dem Cluster präsentiert.
    Queue
    ".$clickable.". Name der Queue, in which der Job ausgeführt wird oder wurde. ".$str_que."
    CPUs
    ".$str_cpu."
    ", "" => 0, "Jobname" => 0, "Status" => 0, "CPU (min)" => 0, "Cluster" => 0, "Queue" => 0, "CPUs" => 0 ), "attlist" => array("0" => "Attributwerte", "help" => "
    Objekt
    ".$clickable.". Names des Objektes dessen Attribute angezeigt werden. Es kann ein Cluster sein, dessen Queue, ein Job, ein Anwender etc. Nach einer Auswahl durch Anklicken der Zeichenkette werden detaillierte Information angezeigt.
    Attribute
    Fü jedes Objekt wird eines oder mehrere Attribute angezeigt. Der Spaltentitel ist der Klarname des Attributes, von einigen MDS-spezifischen Attributen abgesehen, der Inhalt entspricht den Werten wie sie im Informationssystem abgelegt sind.
    ", "Object" => 0, "Attribute" => 0 ), "quelist" => array("0" => "Queue", "help" => "
    Attribut
    ".$clickable.". Name des Queue Attributs".$str_att."
    Wert
    ".$str_val."
    Jobname
    ".$clickable.". ".$str_job."
    Eigner
    ".$clickable.". ".$str_nam."
    Status
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Speicher (KB)
    ".$str_mem."
    CPUs
    ".$str_cpu."
    ", "" => 0, "Jobname" => 0, "Eigner" => 0, "Status" => 0, "CPU (min)" => 0, "Speicher (KB)" => 0, "CPUs" => 0 ), "sestat" => array("0" => "Storage Elements", "help" => "
    Alias
    Storage Element Zweitname wie festgelegt im Informationssystem. Maximal 15 Zeichen werden angezeigt.
    Tot. space
    Totaler Plattenplatz (GB).
    Freier Plattenplatz
    Verfügbarer Plattenplatz in GB.
    Name
    Name des Storage Elements, bestehend aus einem logischen Namen und dem Namen des Hosts, getrennt durch einen Doppelpunk ":"). Der logische Name wird nur für die interne Verwaltung genutzt, um verschiedene Einheiten auf demselben System zu unterscheiden.
    Basis URL
    URL des Storage Elements, üblich ist die Verwendung des gsiftp:// Protokols. Dieser URL dient als Basis für den Zugriff auf Dateien.
    Typ
    Storage Element typ. "gridftp-based" beschreibt Plattenplatz der über die GridFTP Schnittstelle verfügbar ist.
    ", "#" => 0, "Alias" => 0, // "Tot. Platz" => 0, "Freier/Tot. Platz, GB" => 0, "Name" => 0, "Basis URL" => 0, "Typ" => 0 ), "allusers" => array("0" => "Authorised Grid Users:Active Grid Users", "help" => "
    Name
    ".$clickable.". ".$str_nam."
    Zugehörigkeit:
    Das Institut oder die Firma, die den Anwender beschäftigt. Der Eintrag ist abgeleitet vom personal certificate
    Jobs
    Zählt alle Jobs des Anwenders im System (running, pending, finished oder deleted)
    Sites
    Gibt an, wieviele teilnehmende Cluster Aufträge dieses Nutzers annehmen.
    ", "#" => 0, "Name" => 0, "Zugehörigkeit" => 0, "Jobs" => 0, "Sites" => 0 ), "userres" => array("0" => "", "Cluster:queue" => 0, "Freie CPUs" => 0, "Wartenden Jobs" => 0, "Freier Plattenplatz (MB)" => 0 ), "ldapdump" => array("0" => "", "Attribut" => 0, "Wert" => 0 ), // IS attributes "mdsattr" => array( "objectClass" => "objectClass", "Mds-validfrom" => "Info gültig von (GMT)", "Mds-validto" => "Info gültig bis (GMT)" ), "isattr" => array( "nordugrid-cluster-name" => "Front-end domain Name", "nordugrid-cluster-aliasname" => "Cluster Alias", "nordugrid-cluster-contactstring" => "Kontakt", "nordugrid-cluster-interactive-contactstring" => "Interactiver Kontakt", "nordugrid-cluster-comment" => "Kommentar", "nordugrid-cluster-support" => "E-mail Kontakt", "nordugrid-cluster-acl" => "Autorisierte VOs", "nordugrid-cluster-lrms-type" => "LRMS Typ", "nordugrid-cluster-lrms-version" => "LRMS Version", "nordugrid-cluster-lrms-config" => "LRMS Details", "nordugrid-cluster-architecture" => "Architektur", "nordugrid-cluster-opsys" => "Operating System", "nordugrid-cluster-homogeneity" => "Homogener Cluster", "nordugrid-cluster-nodecpu" => "CPU Typ (langsamster)", "nordugrid-cluster-nodememory" => "Memory (MB, kleinster)", "nordugrid-cluster-totalcpus" => "CPUs, gesamt", "nordugrid-cluster-cpudistribution" => "CPU:Hosts", "nordugrid-cluster-benchmark" => "Benchmark", "nordugrid-cluster-sessiondir-free" => "Plattenplatz, verfügbar (MB)", "nordugrid-cluster-sessiondir-total" => "Plattenplatz, gesamt (MB)", "nordugrid-cluster-sessiondir-lifetime"=> "Lebensdauer der Grid Session (min)", "nordugrid-cluster-cache-free" => "Cache size, verfügbar (MB)", "nordugrid-cluster-cache-total" => "Cache size, gesamt (MB)", "nordugrid-cluster-runtimeenvironment" => "Runtime environment", "nordugrid-cluster-localse" => "Storage Element, lokal", "nordugrid-cluster-middleware" => "Grid middleware", "nordugrid-cluster-totaljobs" => "Jobs, totale Anzahl", "nordugrid-cluster-usedcpus" => "CPUs, belegt", "nordugrid-cluster-queuedjobs" => "Jobs, in Queue wartend", "nordugrid-cluster-prelrmsqueued" => "Grid jobs, noch nicht submitted", "nordugrid-cluster-location" => "Postleitzahl", "nordugrid-cluster-owner" => "Eigner", "nordugrid-cluster-issuerca" => "Zertifikat-Aussteller", "nordugrid-cluster-issuerca-hash" => "Zertifikat-Aussteller's kash", "nordugrid-cluster-trustedca" => "Akzeptierte Zertificat-Aussteller", "nordugrid-cluster-nodeaccess" => "IP Konnektivität der Hosts", "nordugrid-cluster-gridarea" => "Session area (OBSOLETE)", "nordugrid-cluster-gridspace" => "Grid Plattenplatz (OBSOLETE)", "nordugrid-cluster-opsysdistribution" => "OS Distribution (OBSOLETE)", "nordugrid-cluster-runningjobs" => "Jobs, running (OBSOLETE)", "nordugrid-cluster-credentialexpirationtime" => "Credential expiration time", "nordugrid-queue-name" => "Queue Name", "nordugrid-queue-comment" => "Kommentar", "nordugrid-queue-status" => "Queue Status", "nordugrid-queue-running" => "CPUs, belegt", "nordugrid-queue-localqueued" => "Local jobs, queued", "nordugrid-queue-prelrmsqueued" => "Grid jobs, noch nicht submitted", "nordugrid-queue-queued" => "Jobs, queued (OBSOLETE)", "nordugrid-queue-maxrunning" => "Jobs, running (max)", "nordugrid-queue-maxqueuable" => "Jobs, queueable (max)", "nordugrid-queue-maxuserrun" => "Jobs pro Unix User (max)", "nordugrid-queue-maxcputime" => "CPU Zeit, max. (min)", "nordugrid-queue-mincputime" => "CPU Zeit, min. (min)", "nordugrid-queue-defaultcputime" => "CPU Zeit, default (min)", "nordugrid-queue-maxwalltime" => "Zeit auf Küchenuhr, max. (min)", "nordugrid-queue-minwalltime" => "Zeit auf Küchenuhr, min. (min)", "nordugrid-queue-defaultwalltime" => "Zeit auf Küchenuhr, default (min)", "nordugrid-queue-schedulingpolicy" => "Scheduling policy", "nordugrid-queue-totalcpus" => "CPUs, gesamt", "nordugrid-queue-nodecpu" => "CPU Typ", "nordugrid-queue-nodememory" => "Speicher (MB)", "nordugrid-queue-architecture" => "Architektur", "nordugrid-queue-opsys" => "Betriebssystem", "nordugrid-queue-homogeneity" => "Homogene Queue", "nordugrid-queue-gridrunning" => "CPUs occupied by Grid jobs", "nordugrid-queue-gridqueued" => "Grid jobs, queued", "nordugrid-queue-benchmark" => "Benchmark", "nordugrid-queue-assignedcpunumber" => "CPUs je Queue (OBSOLETE)", "nordugrid-queue-assignedcputype" => "CPU typ (OBSOLETE)", "nordugrid-job-globalid" => "ID", "nordugrid-job-globalowner" => "Eigner", "nordugrid-job-execcluster" => "Execution Cluster", "nordugrid-job-execqueue" => "Execution Queue", "nordugrid-job-stdout" => "Standard output file", "nordugrid-job-stderr" => "Standard error file", "nordugrid-job-stdin" => "Standard input file", "nordugrid-job-reqcputime" => "Verlangte CPU Zeit", "nordugrid-job-reqwalltime" => "Verlangte Zeit auf Küchenuhr", "nordugrid-job-status" => "Status", "nordugrid-job-queuerank" => "Position in der Queue", "nordugrid-job-comment" => "LRMS Kommentar", "nordugrid-job-submissionui" => "Submitted von", "nordugrid-job-submissiontime" => "Submitted um (GMT)", "nordugrid-job-usedcputime" => "Benötigte CPU Zeit", "nordugrid-job-usedwalltime" => "Benötigte Zeit auf Küchenuhr", "nordugrid-job-completiontime" => "Job beendet um (GMT)", "nordugrid-job-sessiondirerasetime" => "Job gelöscht um (GMT)", "nordugrid-job-proxyexpirationtime" => "Proxy Verfallzeit (GMT)", "nordugrid-job-usedmem" => "Verwendeter Speicher (KB)", "nordugrid-job-errors" => "Errors", "nordugrid-job-exitcode" => "Exit Code", "nordugrid-job-jobname" => "Name", "nordugrid-job-runtimeenvironment" => "Runtime environment", "nordugrid-job-cpucount" => "Angeforderte CPUs", "nordugrid-job-executionnodes" => "Aufrührende Rechner", "nordugrid-job-gmlog" => "GM Logdatei", "nordugrid-job-clientsoftware" => "Version des Clients", "nordugrid-job-rerunable" => "Rerunnable", "nordugrid-job-reqcput" => "Requested time (OBSOLETE)", "nordugrid-job-gridlog" => "Gridlog file (OBSOLETE)", "nordugrid-job-lrmscomment" => "LRMS comment (OBSOLETE)", "nordugrid-authuser-name" => "Name", "nordugrid-authuser-sn" => "Subject Name", "nordugrid-authuser-freecpus" => "Freie CPUs", "nordugrid-authuser-diskspace" => "Freier Plattenplatz (MB)", "nordugrid-authuser-queuelength" => "Wartende Jobs des Users", "nordugrid-se-name" => "Name", "nordugrid-se-aliasname" => "Storage Element Alias", "nordugrid-se-type" => "Storage Element Typ", "nordugrid-se-acl" => "Autorisierte VOs", "nordugrid-se-freespace" => "Freier Plattenplatz (MB)", "nordugrid-se-totalspace" => "Gesamter Plattenplatz (MB)", "nordugrid-se-url" => "Kontakt URL", "nordugrid-se-baseurl" => "Kontakt URL (OBSOLETE)", "nordugrid-se-accesscontrol" => "Zugangskontrolle", "nordugrid-se-authuser" => "Zugelassene User (DN)", "nordugrid-se-location" => "Postleitzahl", "nordugrid-se-owner" => "Eigner", "nordugrid-se-middleware" => "Middleware", "nordugrid-se-issuerca" => "Zertifikat-Aussteller", "nordugrid-se-issuerca-hash" => "Zertifikat-Aussteller ID", "nordugrid-se-trustedca" => "Vertrauten Zertifikat-Ausstellern", "nordugrid-se-comment" => "Kommentar", "nordugrid-rc-name" => "Domainname", "nordugrid-rc-aliasname" => "Replica Catalog Alias", "nordugrid-rc-baseurl" => "Kontakt URL", "nordugrid-rc-authuser" => "Zugelassene User (DN)", "nordugrid-rc-location" => "Postleitzahl", "nordugrid-rc-owner" => "Eigner", "nordugrid-rc-issuerca" => "Zertifikat-Aussteller" ), // Errors, warnings etc "errors" => array( // failure notices "1" => "Die top-level resource Indizes konnten nicht gelesen werden", "2" => "Keiner der lokalen Indizes konnte erreicht werden", // ? "3" => " schlechte Konfiguration oder Zeitüberschreitung bei der Anfrage", "4" => "Keine Grid Jobs gefunden", "5" => "Keine Information gefunden", "6" => "Server nicht verfügbar", "7" => " - später neu laden", "8" => "Keine Informationen zur Queue gefunden", "9" => "Keine Ei nträge gefunden.", "10" => "Keine Nutzer gefunden.", "11" => "Bei diesem Host nicht autorisiert", "12" => "antwortet nicht", "13" => "Keine jüngst submitteten Jobs gefunden für ", // debug messages "101" => " Monitor timeouts für GRIS: ", "102" => " sek für Verbindung und ", "103" => " sek beim Suchen", "104" => " sek verbracht beim Suchen", "105" => "Zeige Ressourcen nur in ", "106" => "Polled top-level Indizes: ", "107" => "Erhielt geographische Ortsangaben, gescante Sites: ", // ? "108" => " sites geographisch geordnet", "109" => "Suche nach Cluster Attributen", "110" => "Suche for Queue Attributen", "111" => "Keine Daten von ", "112" => " funktioniert in ", // ? "113" => " hat keine Resourcen anzubieten", "114" => " Monitor timeouts für GIIS: ", "115" => "Überspringe GRIS: ", "116" => "nicht ein ", "117" => "Teste Verbindung: ", "118" => "OK", "119" => "Entdeckte bislang Ressourcen der folgenden Art ", "120" => "LDAP Fehler beim Suchen ", "121" => " Status bei ", "122" => "Blacklisted: ", "123" => "Registrant gefunden für ", "124" => "Suche nach SE Attributen", "125" => "Suche nach Nutzern", "126" => "Suche nach jobs", "127" => " hat Job ", "128" => " obwohl nicht autorisiert", "129" => "Kann die Objektdaten nicht erhalten: Fehler ", "130" => " Monitor timeouts für EMIR: ", // icon titles "301" => "Update", "302" => "Drucken", "303" => "Hilfe", "304" => "Schließen", "305" => "Rot", "306" => "Grün", "307" => "Alle Nutzer", "308" => "Aktive Nutzer", "309" => "Suchen", "310" => "Storage", "311" => "VOs", "312" => "Flagge von ", "313" => " Grid Prozesse und ", "314" => " lokale Prozesse", // auxilliary strings "401" => "Prozesse", "402" => "Grid", "403" => "Lokal", "404" => "Globus", "405" => "TOTAL", "406" => " sites", "407" => "eine Menge", "408" => " GB", "409" => " ALLE", "410" => "Cluster", "411" => "Queue", "412" => "Job", "413" => "Nutzer", "414" => "Storage", "415" => "Replica Cat.", "416" => "Definiere Attribute, die für das Objekt anzuzeigen sind: ", "417" => "AND von allen Ausdrücken wird gesucht", // ? "418" => "Feld ganz rechts freilassen, um alles anzuzeigen", "419" => "Personalisierte Anzeige von Ressourcen", "420" => "Eindeutiger Name", "421" => "Kann insgesamt nutzen ", "422" => " sites", "423" => "Resource / Object:", "424" => "Nr. von Attributen (def. 6):", "425" => "Objekt", "426" => "Nächstes", "427" => "Auswahl", "428" => "Reset", "429" => "ANZEIGEN" ), // Post code conversion "tlconvert" => array ( "Australia" => "Australien", "Austria" => "Österreich", "Armenia" => "Armenien", "Algeria" => "Algerien", "Belgium" => "Belgien", "Bulgaria" => "Bulgarien", "Canada" => "Canada", "China" => "China", "Czechia" => "Tschechien", "Denmark" => "Dänemark", "Estonia" => "Estland", "Finland" => "Finnland", "France" => "Frankreich", "Georgia" => "Georgien", "Germany" => "Deutschland", "Greece" => "Griechenland", "Hungary" => "Ungarn", "Iceland" => "Island", "Ireland" => "Irland", "Italy" => "Italien", "Japan" => "Japan", "Latvia" => "Lettland", "Lithuania" => "Litauen", "Morocco" => "Marokko", "Netherlands" => "Niederlande", "Norway" => "Norwegen", "Poland" => "Polen", "Portugal" => "Portugal", "Romania" => "Rumänien", "Russia" => "Russland", "SriLanka" => "Sri Lanka", "Sweden" => "Schweden", "Slovakia" => "Slowakei", "Slovenia" => "Slowenien", "Switzerland" => "Schweiz", "Turkey" => "Türkei", "UK" => "UK", "Ukraine" => "Ukraine", "USA" => "USA" ) ); ?> nordugrid-arc-6.14.0/src/services/monitor/lang/PaxHeaders.30264/sk.inc0000644000000000000000000000013214152153376023476 xustar000000000000000030 mtime=1638455038.441646515 30 atime=1638455038.514647612 30 ctime=1638455100.618580752 nordugrid-arc-6.14.0/src/services/monitor/lang/sk.inc0000644000175000002070000015155514152153376023477 0ustar00mockbuildmock00000000000000ARC
    ."; $str_val = "Hodnota atribútu v InformaÄnom Systéme."; $str_que = "ObyÄajne sa jednotlivé rady navzájom líšia prípustným trvaním úloh, prípadne rôznymi skupinami užívateľov. Po kliknutí na prísluÅ¡nú radu získate podrobné informácie vrátane zoznamu bežiacich, Äakajúcich a ukonÄených úloh."; $str_job = "Názov úlohy zvolený užívateľom.
    N/A znamená, že vlastník nepriradil úlohe žiadne meno.
    X znamená, že úloha bola ukonÄená vlastníkom.
    ! znamená, že pri plnení úlohy došlo k chybe.
    Kliknutím zobrazíte podrobné informácie o úlohe."; $str_nam = "Meno užívateľa, podľa jeho osobného certifikátu. Kliknutím získate zoznam všetkých jemu dostupných zdrojov, ako aj zoznam všetkých úloh spustených týmto užívateľom momentálne v systéme."; $str_sta = "Stav úlohy: podľa Gridového Manažéra (GM) a systému správy lokálnych zdrojov (LRMS). Poradie možných stavov je na nasledujúce:
    ACCEPTED – úloha je prijatá, ale jej vykonávanie eÅ¡te nezaÄalo
    PREPARING – sÅ¥ahujú sa vstupné súbory
    SUBMITTING – informácie sa posielajú do LRMS
    INLRMS – správa úlohy predaná LRMS; informaÄný systém zabezpeÄuje informácie o vnútornom stave úlohy. Možné sú nasledujúce stavy:
    : Q – úloha Äaká v rade
    : U – úloha je pozastavená na preÅ¥aženom pracovnom uzle (PBSPro)
    : S – úloha je pozastavená (Condor)
    : R, run – úloha sa vykonáva
    : E – úloha sa ukonÄuje (PBS)
    FINISHING – výstupné súbory sú prenášané na miesto urÄenia
    FINISHED – úloha je ukonÄená; Äas ukonÄenia je stanovený informaÄným systémom
    CANCELING – úloha sa ruší
    DELETED – výstupy úlohy nezmazané užívateľom, ale zmazané GM po expiraÄnej dobe
    Ku každému stavu môže byÅ¥ pridaná predpona \"PENDING:\", ktorá znaÄí, že GM práve nemôže prejsÅ¥ k nasledujúcemu kroku v dôsledku vnútorných obmedzení."; $str_tim = "CPU Äas spotrebovaný úlohou, v minútach."; $str_mem = "Rozsah operaÄnej pamäte využívanej úlohou, v KB."; $str_cpu = "PoÄet CPU využívaných úlohou."; // Actual messages $message = array ( // Table headers and help (do not localize "0" and "help" keys) // For "help", keywords in
    must correspond to column titles below the help text ) "loadmon" => array( "0" => "Grid Monitor", "help" => "
    V tomto okne sa zobrazujú vÅ¡etky výpoÄtové zdroje, ktoré sa registrujú do najvyššieho indexovacieho servisu ARC. Tabuľka je usporiadaná podľa anglického názvu prísluÅ¡nej krajiny a v sekcii pre danú krajinu podľa názvu hlavného stroja daného zdroja (výpoÄtového klástra). Pre každý kláster sa uvádzajú nasledujúce parametre: názov, celkové množstvo procesorov, poÄet obsadených procesorov a tiež poÄet bežiacich a Äakajúcich úloh tak spustených cez Grid ako aj lokálne. Použite utilitu "Vyhľadávanie" pre prezeranie a porovnanie s parametrami ostatných klástrov, rád, úloh atÄ.
    Krajina
    ".$clickable.". Vlajka a názov krajiny. Názov krajiny urÄený z dostupného popisu výpoÄtového zdroja. Kliknite pre zobrazenie zdrojov výluÄne z danej krajiny.
    Zdroj
    ".$clickable.". Názov zdroja (obyÄajne výpoÄtového klástra) daný jeho majiteľom. Maximálna dĺžka zobrazeného reÅ¥azca je 22 znakov. Kliknite na názov pre podrobné informácie o zdroji.
    CPU
    Celkový poÄet CPU zdroja. Pozor! Je možné, že iba ÄasÅ¥ je využiteľná cez Grid.
    Záťaž (procesy)
    ".$clickable.". Relatívne využitie zdroja, zodpovedajúce poÄtu zaÅ¥ažených CPU. Sivá úseÄka zodpovedá poÄtu procesorov obsadených lokálnymi úlohami, zelená úseÄka zodpovedá procesorom vykonávajúcim Gridové úlohy. Kliknite pre podrobné informácie o vÅ¡etkých Gridových úlohách bežiacich na zdroji, vrátane informácií o poÄte procesorov na každú úlohu.
    Čakajúce
    ".$clickable.". PoÄet vÅ¡etkých úloh Äakajúcich v rade na danom zdroji, uvádzaný ako súÄet Gridových a lokálnych úloh. Kliknite na prvé z Äísel pre podrobné informácie o vÅ¡etkých Gridových úlohách Äakajúcich v rade.
    ", "Krajina" => 30, "Zdroj" => 160, "PoÄet CPU" => 10, "Záťaž (procesy: Grid+lokálne)" => 210, "ÄŒakajúce" => 10 ), "clusdes" => array( "0" => "Opis zdroja", "help" => "
    Atribút
    ".$clickable.". Názov atribútov zdroja".$str_att."
    Hodnota
    ".$str_val."
    Rada
    ".$clickable.". Názvy (dané majiteľom zdroja) rád, dostupných Gridovým užívateľom. ".$str_que."
    Stav
    Stav rady. Fungujúca rada obyÄajne udáva stav active.
    Trvanie (min)
    Limit trvania úlohy v danej rade - ak je limit stanovený - v minútach procesorového Äasu. Prvá zobrazená hodnota je dolný limit, druhá predstavuje horné ohraniÄenie tohto parametra. Ak limity nie so stanovené (úlohy ľubovoľného Äasu trvania sú akceptované), zobrazí sa reÅ¥azec N/A.
    Bežiace
    PoÄet úloh spracovávaných v rade. Zobrazuje sa celkový poÄet úloh, priÄom poÄet procesorov obsadených Gridovými úlohami je uvedený v zátvorkách, napr. (Grid: 12). Pozor! Pri paralelných mnohoprocesorových úlohách môže byÅ¥ Äíslo v zátvorkách väÄÅ¡ie ako poÄet úloh.
    Čakajúce
    PoÄet úloh Äakajúcich na spustenie v rade. Zobrazuje sa celkový poÄet úloh, priÄom množstvo úloh spustených cez Grid je uvedené v zátvorkách, napr. (Grid: 235)
    ", "Rada" => 0, "Mapping Queue" => 0, "Stav" => 0, "Trvanie (min)" => 0, "PoÄet CPU" => 0, "Bežiace" => 0, "ÄŒakajúce" => 0 ), "jobstat" => array( "0" => "Úlohy na:identifikátor úlohy", "help" => "
    ZOZNAM ÚLOH:
    Názov úlohy
    ".$clickable.". Názov úlohy daný užívateľom. "N/A" znamená, že užívateľ úlohe názov nepriradil. Po kliknutí sa zobrazí detailný popis úlohy.
    Užívateľ
    ".$clickable.". ".$str_nam."
    Stav
    ".$str_sta."
    CPU Äas (min)
    ".$str_tim."
    Rada
    ".$clickable.". Názov rady, v ktorej sa úloha vykonáva. ".$str_que."
    Procesory
    ".$str_cpu."
    OPIS ÚLOHY:
    Atribút
    ".$clickable.". Názvy atribútov úlohy".$str_att."
    Hodnota
    ".$str_val."
    ", "Názov úlohy" => 0, "Užívateľ" => 0, "Stav" => 0, "CPU Äas (min)" => 0, "Rada" => 0, "PoÄet CPU" => 0 ), "volist" => array( "0" => "Virtuálne organizácie", "help" => "
    Virtuálne organizácie
    ".$clickable.". Skupina užívateľov, obyÄajne zdieľajúca spoloÄné aktivity a zdroje, autorizovaná na aspoň jednom zdroji zapojenom v ARC. Po kliknutí sa zobrazí zoznam Älenov skupiny.
    ÄŒlenovia
    PoÄet Älenov skupiny.
    Obsluhuje sa
    Adresa servera spravujúceho databázu s údajmi o Älenoch skupiny.
    ", "Virtuálna organizácia" => 0, "Členovia" => 0, "Obsluhuje sa" => 0 ), "vousers" => array( "0" => "Užívatelia", "help" => "
    Meno
    ".$clickable.". ".$str_nam."
    Pracovisko
    Pracovisko užívateľa, podľa zápisu v databáze. Nepovinný údaj.
    Elektronická pošta
    ".$clickable.". Adresa elektronickej poštovej schránky užívateľa, podľa zápisu v databáze. Nepovinný údaj. Kliknite na adresu pre poslanie správy užívateľovi.
    ", "#" => 0, "Meno" => 0, "Pracovisko" => 0, "Elektronická pošta" => 0 ), "userlist" => array( "0" => "Informácia pre", "help" => "
    Zdroj:rada
    ".$clickable.". Názvy zdrojov (klástrov) a zodpovedajúcich rád lokálnych systémov správy úloh (LRMS) (oddelené dvojbodkou,":"), na ktorých je užívateľ oprávnený posielať úlohy. Ak užívateľ nie je oprávnený, objaví sa správa: "Not authorised at host ...". Po kliknutí na názov klástra sa zobrazí jeho podrobný opis. Pre získanie podrobností o rade kliknite na jej názov.
    Voľné CPU
    PoÄet voľných CPU v danej rade, pre daného užívateľa, v danom Äase. V prípade, že rada využíva Äasové ohraniÄenia na prípustné trvanie behu úlohy, je tento údaj zobrazený za Äíslom reprezentujúcim poÄet procesorov (v minútach, oddelený dvojbodkou). Napríklad "3" znamená dostupnosÅ¥ troch voľných CPU pre úlohy s akoukoľvek dĺžkou trvania; "4:360" oznaÄuje dostupnosÅ¥ Å¡tyroch voľných CPU, pre úlohy s dobou trvania behu nie dlhÅ¡ou ako Å¡esÅ¥ hodín; "10:180:30" znamená, že je dostupných desaÅ¥ CPU pre úlohy s trvaním nepresahujúcim 3 hodiny a Äalších tridsaÅ¥ procesorov, ktoré môžu prijaÅ¥ úlohy s neobmedzeným Äasom behu; "0" znamená, že v danom momente nie sú žiadne voľné CPU a úlohy budú zaradené do prísluÅ¡nej rady ako Äakajúce.
    Úlohy v rade
    PoÄet úloh užívateľa v zozname Äakajúcich úloh pred novou úlohou, zaslanou daným užívateľom. PoÄet "0" znamená, že úloha by sa mala zaÄaÅ¥ vykonávaÅ¥ okamžite. POZOR! Je to odhad, ktorý nemusí zohľadňovaÅ¥ vÅ¡etky lokálne nastavenia správcu zdroja.
    Voľný diskový priestor (MB)
    Diskový priestor dostupný pre užívateľa v danej rade (v megabajtoch). POZOR! Ide len o odhad, väÄÅ¡ina zdrojov nepodporuje resp. nevyužíva kvóty na diskový priestor.
    Názov úlohy
    ".$clickable.". ".$str_job."
    Stav
    ".$str_sta."
    ÄŒas (min)
    ".$str_tim."
    Zdroj
    ".$clickable.". Názov zdroja (obyÄajne klástra), na ktorom sa úloha vykonáva. Po kliknutí sa zobrazia podrobné informácie o zdroji.
    Rada
    ".$clickable.". Názov rady v LRMS, v ktorej sa vykonávajúcej úlohu. ".$str_que."
    PoÄet CPU
    ".$str_cpu."
    ", "" => 0, "Názov úlohy" => 0, "Stav" => 0, "ÄŒas (min)" => 0, "Zdroj" => 0, "Rada" => 0, "PoÄet CPU" => 0 ), "attlist" => array( "0" => "Attribute values", "help" => "
    Objekt
    ".$clickable.". Názov objektu, atribúty ktorého sú zobrazené. Môže ísÅ¥ o názov rady klástra, názov úlohy, meno užívateľa atÄ. Po kliknutí sa zobrazia podrobné informácie o objekte.
    Atribút
    Pre každý objekt v tabuľke je možné uviesÅ¥ jeden alebo viacero atribútov. V hlaviÄke stĺpca je uvedený názov atribútu, upravený do jednoducho Äitateľnej formy (s výnimkou niekoľkých atribútov Å¡pecifických pre systém MDS), obsah jednotlivých stĺpcov predstavujú význam týchto atribútov podľa toho, ako sú popísané v InformaÄnom Systéme.
    ", "Objekt" => 0, "Atribút" => 0 ), "quelist" => array( "0" => "Popis rady", "help" => "
    Atribút
    ".$clickable.". Názvy atribútov rady".$str_att."
    Hodnota
    ".$str_val."
    Názov úlohy
    ".$clickable.". ".$str_job."
    Majiteľ
    ".$clickable.". ".$str_nam."
    Stav
    ".$str_sta."
    CPU Äas (min)
    ".$str_tim."
    Pamäť (KB)
    ".$str_mem."
    PoÄet CPU
    ".$str_cpu."
    ", "" => 0, "Názov úlohy" => 0, "Majiteľ" => 0, "Stav" => 0, "CPU Äas (min)" => 0, "OperaÄná pamäť (KB)" => 0, "PoÄet CPU" => 0 ), "sestat" => array( "0" => "Úložné zariadenia", "help" => "
    Názov
    Názov úložného zariadenia zaregistrovaný v InformaÄnom Systéme. Zobrazených maximálne 15 znakov.
    Celková kapacita
    Celkový diskový priestor v GB.
    Voľná kapacita
    Momentálne dostupný diskový priestor v GB.
    Názov
    Názov úložného zariadenia skladajúci sa z názvu logickej jednotky a názvu servera (rozdelených dvojbodkou). Logický názov sa využíva len pre úÄely InformaÄného Systému pre zjednoduÅ¡enie rozoznávania rôznych úložných zariadení, nachádzajúcich sa na jednom a tom istom servere.
    Bázová URL
    URL úložného zariadenia, obyÄajne využívajúc protokol gsiftp. Použite túto URL ako bázu pre prístup k súborom.
    Typ
    Typ úložného zariadenia. Typ "gridftp-based" oznaÄuje úložnú jednotku dostupnú cez GridFTP rozhranie.
    ", "#" => 0, "Názov" => 0, // "Celková kapacita" => 0, "Voľná/celková kapacita v GB" => 0, "Názov" => 0, "Bázová URL" => 0, "Typ" => 0 ), "allusers" => array( "0" => "Autorizovaný užívatelia:Aktívny užívatelia", "help" => "
    Meno
    ".$clickable.". ".$str_nam."
    Pracovisko
    Pracovisko užívateľa, podľa informácií v jeho osobnom certifikáte
    Úlohy
    PoÄet vÅ¡etkých užívateľových úloh v systéme (bežiacich, Äakajúcich, ukonÄených a vymazaných)
    Zdroje
    PoÄet klástrov, na ktoré má daný užívateľ prístup
    ", "#" => 0, "Meno" => 0, "pracovisko" => 0, "Úlohy" => 0, "Zdroje" => 0 ), "userres" => array( "0" => "", "Zdroj:rada" => 0, "Voľné CPU" => 0, "Úlohy v rade" => 0, "Voľný diskový priestor (MB)" => 0 ), "ldapdump" => array( "0" => "", "Atribút" => 0, "Hodnota" => 0 ), // IS attributes "mdsattr" => array( "objectClass" => "objectClass", "Mds-validfrom" => "Údaje platné od (GMT)", "Mds-validto" => "Údaje platné do (GMT)" ), "isattr" => array( "nordugrid-cluster-name" => "Meno hlavného stroja", "nordugrid-cluster-aliasname" => "Názov", "nordugrid-cluster-contactstring" => "Kontaktná adresa", "nordugrid-cluster-interactive-contactstring" => "Interaktívna adresa", "nordugrid-cluster-comment" => "Komentár", "nordugrid-cluster-support" => "Elektronická adresa zodpovednej osoby", "nordugrid-cluster-acl" => "Autorizované VO", "nordugrid-cluster-lrms-type" => "typ LRMS", "nordugrid-cluster-lrms-version" => "verzia LRMS", "nordugrid-cluster-lrms-config" => "podrobnosti o LRMS", "nordugrid-cluster-architecture" => "Architektúra", "nordugrid-cluster-opsys" => "OperaÄný systém", "nordugrid-cluster-homogeneity" => "Homogenita klástra", "nordugrid-cluster-nodecpu" => "Typ procesoru (najslabÅ¡ieho)", "nordugrid-cluster-nodememory" => "Pamäť (MB, najmenÅ¡ia)", "nordugrid-cluster-totalcpus" => "PoÄet CPU celkovo", "nordugrid-cluster-cpudistribution" => "PoÄet CPU na jeden uzol", "nordugrid-cluster-benchmark" => "Etalónový test (Benchmark)", "nordugrid-cluster-sessiondir-free" => "Diskový priestor, dostupný (MB)", "nordugrid-cluster-sessiondir-total" => "Diskový priestor, celkový (MB)", "nordugrid-cluster-sessiondir-lifetime"=> "Doba života gridovej úlohy (min)", "nordugrid-cluster-cache-free" => "KeÅ¡ová pamäť, dostupná (MB)", "nordugrid-cluster-cache-total" => "KeÅ¡ová pamäť, celková (MB)", "nordugrid-cluster-runtimeenvironment" => "Pracovné prostredie", "nordugrid-cluster-localse" => "Lokálne úložné zariadenie", "nordugrid-cluster-middleware" => "Gridové rozhranie (middleware)", "nordugrid-clAliasuster-totaljobs" => "Úlohy, celkovo", "nordugrid-cluster-usedcpus" => "CPU, obsadené", "nordugrid-cluster-queuedjobs" => "Úlohy v rade (ZASTARANÉ)", "nordugrid-cluster-prelrmsqueued" => "Gridové úlohy Äakajúce na zaslanie", "nordugrid-cluster-location" => "PoÅ¡tové smerovacie Äíslo", "nordugrid-cluster-owner" => "Majiteľ", "nordugrid-cluster-issuerca" => "CertifikaÄná autorita", "nordugrid-cluster-issuerca-hash" => "Hash-kód certifikaÄnej autority", "nordugrid-cluster-trustedca" => "Akceptované certifikaÄné autority", "nordugrid-cluster-nodeaccess" => "IP-konektivita uzlov", "nordugrid-cluster-gridarea" => "Priestor gridovej úlohy (ZASTARANÉ)", "nordugrid-cluster-gridspace" => "Gridový diskový priestor (ZASTARANÉ)", "nordugrid-cluster-opsysdistribution" => "Distribúcia OS (ZASTARANÉ)", "nordugrid-cluster-runningjobs" => "Bežiace úlohy (ZASTARANÉ)", "nordugrid-cluster-credentialexpirationtime" => "Credential expiration time", "nordugrid-queue-name" => "Názov rady", "nordugrid-queue-comment" => "Komentár", "nordugrid-queue-status" => "Stav rady", "nordugrid-queue-running" => "VÅ¡etky obsadené CPU", "nordugrid-queue-localqueued" => "Lokálne úlohy v rade", "nordugrid-queue-prelrmsqueued" => "Gridové úlohy Äakajúce na zaslanie do rady", "nordugrid-queue-queued" => "Úlohy v rade (ZASTARANÉ)", "nordugrid-queue-maxrunning" => "Bežiace úlohy (max)", "nordugrid-queue-maxqueuable" => "PoÄet úloh v rade (max)", "nordugrid-queue-maxuserrun" => "PoÄet úloh na užívateľa (max)", "nordugrid-queue-maxcputime" => "CPU Äas, maximum (min.)", "nordugrid-queue-mincputime" => "CPU Äas, minimum (min.)", "nordugrid-queue-defaultcputime" => "CPU Äas, bez udania (min.)", "nordugrid-queue-maxwalltime" => "Trvanie, maximum (min.)", "nordugrid-queue-minwalltime" => "Trvanie, minimum (min.)", "nordugrid-queue-defaultwalltime" => "Trvanie, bez udania (min.)", "nordugrid-queue-schedulingpolicy" => "Pravidlá rozvrhu úloh", "nordugrid-queue-totalcpus" => "Celkový poÄet CPU", "nordugrid-queue-nodecpu" => "Typ CPU", "nordugrid-queue-nodememory" => "OperaÄná pamäť (MB)", "nordugrid-queue-architecture" => "Architektúra", "nordugrid-queue-opsys" => "OperaÄný systém", "nordugrid-queue-homogeneity" => "Homogenita rady", "nordugrid-queue-gridrunning" => "CPU obsadené Gridovými úlohami", "nordugrid-queue-gridqueued" => "Gridové úlohy v rade", "nordugrid-queue-benchmark" => "Etalónový test - Benchmark", "nordugrid-queue-assignedcpunumber" => "PoÄet CPU v rade (ZASTARANÉ)", "nordugrid-queue-assignedcputype" => "Typ CPU v rade (ZASTARANÉ)", "nordugrid-job-globalid" => "Identifikátor", "nordugrid-job-globalowner" => "Majiteľ", "nordugrid-job-execcluster" => "Vykonávajúci kláster", "nordugrid-job-execqueue" => "Vykonávajúca rada", "nordugrid-job-stdout" => "Å tandardný výstup", "nordugrid-job-stderr" => "Å tandardný chybový výstup", "nordugrid-job-stdin" => "Å tandardný vstup", "nordugrid-job-reqcputime" => "Požadovaná CPU Äas", "nordugrid-job-reqwalltime" => "Požadovaný Äas trvania", "nordugrid-job-status" => "Stav úlohy", "nordugrid-job-queuerank" => "Pozícia úlohy v rade", "nordugrid-job-comment" => "LRMS komentár", "nordugrid-job-submissionui" => "Stroj, z ktorého bola úloha zaslaná", "nordugrid-job-submissiontime" => "ÄŒas zaslania (GMT)", "nordugrid-job-usedcputime" => "Použitý CPU Äas", "nordugrid-job-usedwalltime" => "DoterajÅ¡ie trvanie úlohy", "nordugrid-job-completiontime" => "ÄŒas ukonÄenia (GMT)", "nordugrid-job-sessiondirerasetime" => "ÄŒas vymazania (GMT)", "nordugrid-job-proxyexpirationtime" => "ÄŒas vyprÅ¡ania proxy certifikátu (GMT)", "nordugrid-job-usedmem" => "Použitá pamäť (KB)", "nordugrid-job-errors" => "Chyby", "nordugrid-job-exitcode" => "Návratová hodnota", "nordugrid-job-jobname" => "Názov", "nordugrid-job-runtimeenvironment" => "Pracovné prostredie", "nordugrid-job-cpucount" => "Požadovaný poÄet CPU", "nordugrid-job-executionnodes" => "Vykonávajúce uzly", "nordugrid-job-gmlog" => "GM log súbor", "nordugrid-job-clientsoftware" => "Verzia klienta", "nordugrid-job-rerunable" => "ZnovuspustiteľnosÅ¥", "nordugrid-job-reqcput" => "Požadovaný Äas (ZASTARANÉ)", "nordugrid-job-gridlog" => "Gridlog súbor (ZASTARANÉ)", "nordugrid-job-lrmscomment" => "LRMS komentár (ZASTARANÉ)", "nordugrid-authuser-name" => "Meno", "nordugrid-authuser-sn" => "Subjekt", "nordugrid-authuser-freecpus" => "Voľné CPU", "nordugrid-authuser-diskspace" => "Voľný úložný priestor (MB)", "nordugrid-authuser-queuelength" => "Úlohy užívateľa v rade", "nordugrid-se-name" => "Celý názov", "nordugrid-se-aliasname" => "Názov", "nordugrid-se-type" => "Typ", "nordugrid-se-acl" => "Autorizované VO", "nordugrid-se-freespace" => "Voľný priestor (MB)", "nordugrid-se-totalspace" => "Celkový priestor (MB)", "nordugrid-se-url" => "Kontaktná URL adresa", "nordugrid-se-baseurl" => "Kontaktná základná URL adresa (ZASTARANÉ)", "nordugrid-se-accesscontrol" => "Kontrola prístupu", "nordugrid-se-authuser" => "Autorizovaný užívateľ (DN)", "nordugrid-se-location" => "PoÅ¡tové smerovacie Äíslo", "nordugrid-se-owner" => "Majiteľ", "nordugrid-se-middleware" => "Gridové rozhranie", "nordugrid-se-issuerca" => "CertifikaÄná autorita", "nordugrid-se-issuerca-hash" => "Hash-kód certifikaÄnej autority", "nordugrid-se-trustedca" => "Akceptované certifikaÄné autority", "nordugrid-se-comment" => "Komentár", "nordugrid-rc-name" => "Názov domény", "nordugrid-rc-aliasname" => "Názov", "nordugrid-rc-baseurl" => "Kontaktná URL adresa", "nordugrid-rc-authuser" => "Autorizovaní užívatelia (DN)", "nordugrid-rc-location" => "PoÅ¡tové smerovacie Äíslo", "nordugrid-rc-owner" => "Majiteľ", "nordugrid-rc-issuerca" => "CertifikaÄná autorita" ), // Errors, warnings etc "errors" => array( // failure notices "1" => "Nemožno preÄítaÅ¥ údaje z indexu vyššej úrovne", "2" => "Žiaden z lokálnych indexov neodpovedá", "3" => " nesprávna konfigurácia alebo uplynul Äas požiadavky", "4" => "Žiadna gridová úloha", "5" => "Žiadna informácia", "6" => "Služba je nedostupná", "7" => " - pokúste sa obnoviÅ¥ neskôr", "8" => "Informácie o rade nedostupné", "9" => "Žiadne údaje", "11" => "Žiadny užívatelia", "11" => "Neautorizovaný na danom zdroji", "12" => "neodpovedá", "13" => "Momentálne niet úloh od daného užívateľa ", // debug messages "101" => " ÄŒasový limit pre spojenie s lokálnym indexom: ", "102" => " s pre spojenie a ", "103" => " s pre hľadanie", "104" => " s strávených hľadaním", "105" => "Zobrazenie zdrojov výluÄne v ", "106" => "Dopytované indexy vyššej úrovne: ", "107" => "Prijaté geografické koordináty zdrojov, preskenované zdroje: ", "108" => " zdrojov usporiadaných podľa geografickej polohy", "109" => "Vyhľadávanie atribútov klástra", "110" => "Vyhľadávanie atribútov rady", "111" => "Niet údajov od ", "112" => " fungujúcich v krajine: ", "113" => " žiadne ponúkané zdroje", "114" => " ÄŒasový limit pre spojenie s globálnym indexom: ", "115" => "Ignoruje sa zdroj: ", "116" => "nezodpovedá typu ", "117" => "Preverovanie spojenia: ", "118" => "V poriadku", "119" => "Doteraz objavených zdrojov typu ", "120" => "Chyba LDAP pri hľadaní ", "121" => " stav v ", "122" => "Zablokované: ", "123" => "Objavený registrant ", "124" => "Vyhľadávanie atribútov úložných zariadení", "125" => "Vyhľadávanie užívateľov", "126" => "Vyhľadávanie úloh", "127" => " spustil úlohu ", "128" => " nemajúc autorizáciu", "129" => "Niet údajov o objekte: chyba ", "130" => " ÄŒasový limit pre spojenie s EMIR: ", // icon titles "301" => "ObnoviÅ¥", "302" => "TlaÄ", "303" => "Pomoc", "304" => "ZatvoriÅ¥", "305" => "Zelená", "306" => "Sivá", "307" => "VÅ¡etci užívatelia", "308" => "Aktívny užívatelia", "309" => "Vyhľadávanie", "310" => "Úložiská", "311" => "Virtuálne organizácie", "312" => "Vlajka krajiny: ", "313" => " (gridové procesy), ", "314" => " (lokálne procesy)", // auxilliary strings "401" => "Procesy", "402" => "Grid", "403" => "Lokálne", "404" => "Svet", "405" => "CELKOVO", "406" => " zdrojov", "407" => "mnoho", "408" => " GB", "409" => " VÅ ETKY", "410" => "Kláster", "411" => "Rada", "412" => "Úloha", "413" => "Užívateľ", "414" => "Úložisko", "415" => "Katalóg replík", "416" => "Zadajte atribúty, ktoré sa majú zobraziÅ¥ pre vybraný objekt: ", "417" => "Vyhľadávanie sa vykonáva pre logické A vÅ¡etkých zadaných výrazov", "418" => "Ponechajte pravé pole prázdne ak filter nie je potrebný", "419" => "Prezeranie zdrojov alebo objektov podľa výberu", "420" => "Plný názov (DN)", "421" => "Môže použiÅ¥ celkovo ", "422" => " zdrojov", "423" => "Zdroj / objekt:", "424" => "PoÄet atribútov (6 automaticky):", "425" => "Objekt", "426" => "ÄŽalší", "427" => "Vyberte", "428" => "ZnovunaÄítaÅ¥", "429" => "UKÃZAŤ" ), // Post code conversion: only for [en]! "tlconvert" => array ( "AU" => "Austrália", "AT" => "Rakúsko", "AM" => "Arménsko", "DZ" => "Alžírsko", "BE" => "Belgicko", "BG" => "Bulharsko", "CA" => "Kanada", "CN" => "Čína", "CZ" => "ÄŒesko", "DK" => "Dánsko", "EE" => "Estónsko", "FI" => "Fínsko", "FIN" => "Fínsko", "SF" => "Fínsko", "FR" => "Francúzsko", "GE" => "Gruzínsko", "DE" => "Nemecko", "D" => "Nemecko", "GR" => "Grécko", "HU" => "MaÄarsko", "IS" => "Island", "IR" => "Ãrsko", "IE" => "Ãrsko", "IT" => "Taliansko", "JP" => "Japonsko", "KEK" => "Japonsko", "TOKYO" => "Japonsko", "LV" => "LotyÅ¡sko", "LT" => "Litva", "MA" => "Maroko", "NL" => "Holandsko", "NO" => "Nórsko", "N" => "Nórsko", "PL" => "Poľsko", "PT" => "Portugalsko", "RO" => "Rumunsko", "RU" => "Rusko", "LK" => "Srí Lanka", "SE" => "Å védsko", "SK" => "Slovensko", "SI" => "Slovinsko", "CH" => "Å vajÄiarsko", "TR" => "Turecko", "UK" => "UK", "UA" => "Ukrajina", "COM" => "USA", "GOV" => "USA", "USA" => "USA", "US" => "USA", "Australia" => "Austrália", "Austria" => "Rakúsko", "Armenia" => "Arménsko", "Algeria" => "Alžírsko", "Belgium" => "Belgicko", "Bulgaria" => "Bulharsko", "Canada" => "Kanada", "China" => "Čína", "Czechia" => "ÄŒesko", "Denmark" => "Dánsko", "Estonia" => "Estónsko", "Finland" => "Fínsko", "France" => "Francúzsko", "Georgia" => "Gruzínsko", "Germany" => "Nemecko", "Greece" => "Grécko", "Hungary" => "MaÄarsko", "Iceland" => "Island", "Ireland" => "Ãrsko", "Italy" => "Taliansko", "Japan" => "Japonsko", "Latvia" => "LotyÅ¡sko", "Lithuania" => "Litva", "Morocco" => "Maroko", "Netherlands" => "Holandsko", "Norway" => "Nórsko", "Poland" => "Poľsko", "Portugal" => "Portugalsko", "Romania" => "Rumunsko", "Russia" => "Rusko", "SriLanka" => "Srí Lanka", "Sweden" => "Å védsko", "Slovakia" => "Slovensko", "Slovenia" => "Slovinsko", "Switzerland" => "Å vajÄiarsko", "Turkey" => "Turecko", "UK" => "Veľká Británia", "Ukraine" => "Ukrajina", "USA" => "USA", "World" => "Svet" ) ); ?> nordugrid-arc-6.14.0/src/services/monitor/lang/PaxHeaders.30264/fi.inc0000644000000000000000000000013014152153376023455 xustar000000000000000028 mtime=1638455038.4406465 30 atime=1638455038.513647597 30 ctime=1638455100.614580692 nordugrid-arc-6.14.0/src/services/monitor/lang/fi.inc0000644000175000002070000013717014152153376023455 0ustar00mockbuildmock00000000000000 N/A tarkoittaa: käyttäjä ei antanut tyolle nimeä.
    X tarkoittaa: käyttäjä tappoi tyonsä.
    ! tarkoittaa: tyon suoritus epäonnistui.
    Valitse tyon nimi jos haluat tyon tarkemmat tiedot."; $str_nam = "Käyttäjän nimi, siten kuin se on käyttäjän varmenteessa. Valitse käyttäjän nimi jos haluat tietoa resursseista jotka ovat hänen käytettävissään ja käyttäjän ajossa olevista toistä."; $str_sta = "Työn tila, siten kuin Grid Manager (GM) ja jonosuoritusohjelma (LRMS) sen kertoivat. Tilat ovat:
    ACCEPTED – tyo lähetetty
    PREPARING – haetaan syötetiedostoja
    SUBMITTING – lähetys jononsuoritusohjelmaan (LRMS) menossa
    INLRMS – tyo on jononsuoritusohjelman armoilla; Tietojärjestelmä lisää seuraavat LRMSn sisäiset tilat:
    : Q – jonossa,
    : U – jono on jaädytetty väliaikaisesti koska tietokone on kuormitettu (PBSPro)
    : S – jono on jäädytty (Condor)
    : R, run – työtä suoritetaan
    : E – tyo on loppuvaiheessa (PBS)
    FINISHING – GM siirtää tyon tulostiedostoja
    FINISHED – tyo suoritettu loppuun; tietojärjestelmä lisää aikaleimaa
    CANCELING – tyo peruutetaan
    DELETED – käyttäjä ei siirtanyt tulosteita, GM poisti ne koska maksimiaika ylittyi
    Kaikkiin tiloihin voi liittya PENDING: -etuliite, joka tarkoittaa etta GM yrittää siirtää työtä seuraavaan tilaan"; $str_tim = "Tyon käyttämä prosessoriaika minuutteina."; $str_mem = "Tyon käyttämä muisti, KB"; $str_cpu = "Tyon käyttämien prosessorien lukumäärä."; // Actual messages $message = array ( // Table headers and help (do not localize "0" and "help" keys) // For "help", keywords in
    must correspond to column titles below the help text ) "loadmon" => array( "0" => "Grid Monitor", "help" => "
    Kohteet jotka rekisteroityvät ARCin luettelopalveluun lajiteltuna maan ja tietokoneen nimen mukaisesti. Kohteista rekisteroidään seuraavat ominaisuudet: klusterin alias-nimi, prosessorikapasiteetti, ajossa olevat ja jonottavat työt (sekä Grid-toiminnoilla lahetetyt etta paikalliset). Käytä "Search" toimintoa jos haluat vertailla muita klusterin, jonon tai tyon ominaisuuksia
    Maa
    ".$clickable.". Maa (lippu ja nimi) kuten annettu resurssien kuvauksessa. Valitse maa jos haluat näyttää vain taman maan tiedot.
    Klusteri
    ".$clickable.". Klusterin alias-nimi kuten omistaja on sen antanut. Naytetään max 22 merkkia. Valitse alias jos haluat tarkemman kuvauksen klusterista.
    Prosessoreita
    Klusterin prosessorien kokonaismäärä. Huom! Grid-käyttäjien saatavilla voi olla näistä vai osa.
    Kuorma (prosesseja:Grid+paikallinen)
    ".$clickable.". Klusterin suhteelinen kuorma, eli kuormitettujen prosessorien määrä. Harmaat palkit vastaavat prosessoreita jotka suorittavat paikallisesti lahetettyha töitä, punaiset palkit Grid-töitä. Valitse palkki jos haluat tarkempaa tietoa Grid-toistä joita suoritetaan klusterissa.
    Jonottamassa
    ".$clickable.". Klusterissa jonottavien toiden lukumäärä, Grid työt sekä paikallisesti lähetetyt työt. Valitse ensimmainen numero jos haluat tarkempaa jonottavista Grid-toista.
    ", "Maa" => 30, "Kohde" => 160, "Prosesseja" => 10, "Kuorma (prosesseja: Grid+paikall.)" => 210, "Jonottamassa" => 10 ), "clusdes" => array("0" => "Resource Details for", "help" => "
    Ominaisuus
    ".$clickable." Klusterin ominaisuuden nimi".$str_att."
    Arvo
    ".$str_val."
    Jono
    ".$clickable.". Jonon nimi siten kuin jonon omistaja on sen maarittanyt. ".$str_que."
    Tila
    Jonon tila. Toiminnassa oleva jono ilmoittaa yleensa tilan active.
    Prosessorit (min)
    Jonon toiden aikarajoitus (jos annettu) prosessoriminuutteina. Naytetään ala- ja yläraja. N/A näytetään ios rajoituksia ei ole (kaikenkestoiset työt sallitaan).
    Ajossa
    Ajossa olevat jonon työt. Toiden kokonaismäärä, suluissa Grid-töitä suorittavien prosessorien kokonaismäärä. Huom: rinnakkaisille multiprosessoritoille suluissa oleva numero voi olla suurempi kuin toiden määrä.
    Jonottamassa
    TyöT jotka odottavat suoritukseen paäsyä jonossa. Toiden kokonaismäärä ja Grid-toiminnoilla lähetetyt suluissa esim. (Grid: 235)
    ", "Jono" => 0, "Mapping Queue" => 0, "Tila" => 0, "Rajoitukset (min)" => 0, "Prosessoreita" => 0, "Ajossa" => 0, "Jonottamassa" => 0 ), "jobstat" => array("0" => "Jobs at:Job ID", "help" => "
    TYÖT:
    Tyon nimi
    ".$clickable.". Tyon nimi (omistajan antama). Jos omistaja ei antanut tyolle nimea, näytetään " style={color:red;}>N/A" . Valitse nimi jos haluat kuvauksen tyostä.
    Omistaja
    ".$clickable.". ".$str_nam."
    Tila
    ".$str_sta."
    Prosessoreita (min)
    ".$str_tim."
    Jono
    ".$clickable.". Eräajojono, jossa työtä suoritetaan. ".$str_que."
    Prosessoreita
    ".$str_cpu."
    TYÖT (YKSITYISKOHTAISESTI):
    Ominaisuus
    ".$clickable.". Tyon ominaisuus".$str_att."
    Arvo
    ".$str_val."
    ", "Tyon nimi" => 0, "Omistaja" => 0, "Tila" => 0, "Prosessoreita (min)" => 0, "Jono" => 0, "Prosessoreita" => 0 ), "volist" => array("0" => "Virtual Organisations", "help" => "
    Virtuaaliorganisaatio (VO)
    ".$clickable.". Ryhmä käyttäjiä jotka käyttävät samanlaisia resursseja ARC-tietkoneissa. Valitse ryhmän nimi jos haluta listan ryhmän jäsenistä.
    Jäseniä
    Ryhmän jäsenten määrä.
    Served by
    LDAP palvelin johon ryhmä/jäsenyystiedot talletetaan.
    ", "Virtuaaliorganisaatio (VO)" => 0, "Jäsenet" => 0, "Palvelin" => 0 ), "vousers" => array("0" => "Grid User Base", "help" => "
    Nimi
    ".$clickable.". ".$str_nam."
    Organisaatio
    ".$clickable.". Käyttäjän organisaatio siinä muodossa kuin VO'n hallinnoija on sen antanut (voi olla myos tyhjä).
    Sähkopostiosoite
    ".$clickable.". Käyttäjän sähkopostiosoite siinä muodossa kuin VO'n hallinnoija on sen antanut (voi olla myos tyhjä). Valitsemalla sähkopostiosoitteen voit lähettää käyttäjälle sähkopostia.
    ", "#" => 0, "Nimi" => 0, "Organisaatio" => 0, "Sähkopostiosoite" => 0 ), "userlist" => array("0" => "Information for", "help" => "
    Klusteri:jono
    ".$clickable.". Klusterit ja niiden jonot (kaksoispisteella erotettuina, ":") joihin käyttäja voi lähettää töitä. Jos käyttäjällä ei ole oikeutta lähettää työtä, tuloste on "Not authorised at host ..." Valitse klusterin nimi jos haluat yksityiskohtaisen kuvauksen klusterista. Valitse jonon nimi jos haluat yksityiskohtaisen kuvauksen jonosta.
    Vapaita prosessoreita
    Tälle käyttäjälle, annetussa jonossa saatavilla olevien prosessorien maara. Tämän jälkeen saattaa ilmetä myos maksimiarvo joka kertoo kuinka monta minuuttia prosessori on käytettävissä. "3" tarkoittaa: 3 prosessoria käytettävissä ilman aikarjaa. "4:360" tarkoittaa: 4 prosessoria korkeintaan kuudeksi tunniksi. "0" tarkoittaa: ei prosessoreita saatavilla ja työt jonottavat kunnes niita vapautuu.
    Jonossa olevia töitä
    TyöT jotka todennäkoisesti suoritetaan ennen uutta jonoon tulevaa. "0" tarkoittaa: tyo suoritetaan heti. Huom! Tämä on arvio, jonon paikalliskaytanto saattaa muuttaa prioriteetteja.
    Vapaa tila (MB)
    Käyttäjälle tarjolla oleva levytila tässä jonossa (megatavuina). Huom! Tämä on arvio, koska klusterit eivät tarjoa levykiintioitä.
    Tyon nimi
    ".$clickable.". ".$str_job."
    Tila
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Klusteri
    ".$clickable.". Klusteri kossa tyo suoritetaan/suoritettiin.
    Queue
    ".$clickable.". Jono jossa tyo suoritetaan/suoritettiin. ".$str_que."
    Prosessoreita
    ".$str_cpu."
    ", "" => 0, "Tyon nimi" => 0, "Tila" => 0, "Prosessoreita (min)" => 0, "Klusteri" => 0, "Jono" => 0, "Prosessoreita" => 0 ), "attlist" => array("0" => "Attribute values", "help" => "
    Object
    ".$clickable.". Objekti jonka ominaisuuksia tarkastellaan. Objekti voi olla klusteri, jono, tyo, käyttäjä jne. Valitsemalla objektin sen kuvauksen.
    Ominaisuus
    Ominaisuuksia ja niiden arvoja tulostetaan yksi tai usempia per kohde. Sarakkeen otsikko on ihmiselle ymmarrettävässä muodossa (poislukien jotkin MDS-spesifit ominaisuudet). Sarake sisältää vastaavan ominaisuuden arvot tälle kohteelle (arvot saadaan tietojärjestelmästä).
    ", "Objekti" => 0, "Ominaisuus" => 0 ), "quelist" => array("0" => "Jono", "help" => "
    Ominaisuus
    ".$clickable.". Name of a queue attribute".$str_att."
    Arvo
    ".$str_val."
    Tyon nimi
    ".$clickable.". ".$str_job."
    Omistaja
    ".$clickable.". ".$str_nam."
    Tila
    ".$str_sta."
    Prosessoreita (min)
    ".$str_tim."
    Muisti (KB)
    ".$str_mem."
    CPUs
    ".$str_cpu."
    ", "" => 0, "Tyon nimi" => 0, "Omistaja" => 0, "Tila" => 0, "Prosessoreita (min)" => 0, "Muisti (KB)" => 0, "Prosessoreita" => 0 ), "sestat" => array("0" => "Storage Elements", "help" => "
    Alias-nimi
    Talletuselementin nimi siinä muodossa kuin se on tietojärjestelmässä (IS), max. 15 merkkiä näytetään.
    Tilaa kaikkiaan
    Kokonaislevytila, GB.
    Vapaa tila
    Tälle hetkellä vapaana oleva levytila, GB.
    Name
    Talletuselementin nimi, looginen nimi ja tietokoneen nimi kaksoispisteella eroteltuna. Loogista nimeä käyttää vain tietojärjestelmä (IS), jotta voidaan erottaa eri talletuselementit samalla koneella.
    URLin alku
    Talletuselementin URL, usein gsiftp://.. Tama URL on edeltää yksittäisiä tiedostoja tai hakemistoja.
    Type
    Talletuselementin tyyppi. "gridftp-based" tarkoittaa tietovarantoa jossa GridFTP liittymä.
    ", "#" => 0, "Alias-nimi" => 0, // "Tilaa kaikkiaan" => 0, "Vapaa/kaikkiaan tila, GB" => 0, "Nimi" => 0, "URLin alku" => 0, "Typpi" => 0 ), "allusers" => array("0" => "Grid kayttäjät joille käytto sallittu:Aktiiviset Grid käyttäjät", "help" => "
    Nimi
    ".$clickable.". ".$str_nam."
    Organisaatio
    Käyttäjän organisaatio, tieto saatu varmenteesta
    TyöT
    Kaikki käyttäjien työt (ajossa, odottamassa, suoritettu tai poistettu)
    Kohteet
    Kuinka monta kohdetta tämä käyttäjä voi käyttää
    ", "#" => 0, "Nimi" => 0, "Organisaatio" => 0, "TöIta" => 0, "Kohteita" => 0 ), "userres" => array("0" => "", "Klusteri:jono" => 0, "Vapaita prosessoreita" => 0, "Jonossa olevia töitä" => 0, "Vapaata levytilaa (MB)" => 0 ), "ldapdump" => array("0" => "", "Ominaisuus" => 0, "Arvo" => 0 ), // IS attributes "mdsattr" => array( "objectClass" => "objectClass", "Mds-validfrom" => "Info valid from (GMT)", "Mds-validto" => "Info valid to (GMT)" ), "isattr" => array( "nordugrid-cluster-name" => "Edustakoneen domain", "nordugrid-cluster-aliasname" => "Klusterin alias-nimi", "nordugrid-cluster-contactstring" => "Kontakti", "nordugrid-cluster-interactive-contactstring" => "Interaktiivisten toiden kontakti", "nordugrid-cluster-comment" => "Kommentti", "nordugrid-cluster-support" => "Tukipalvelun sähkoposti", "nordugrid-cluster-acl" => "Sallitut VO't", "nordugrid-cluster-lrms-type" => "Jononhallintaohjelmiston tyyppi", "nordugrid-cluster-lrms-version" => "Jononhallintaohjelmiston versio", "nordugrid-cluster-lrms-config" => "Jononhallintaohjelmisto, tarkemmin", "nordugrid-cluster-architecture" => "Arkkitehtuuri", "nordugrid-cluster-opsys" => "Käyttojärjestelmä", "nordugrid-cluster-homogeneity" => "Klusterin arkkitehtuuri yhtenainen ", "nordugrid-cluster-nodecpu" => "Prosessorin tyyppi (hitain)", "nordugrid-cluster-nodememory" => "Muisti (MB, pienin määrä)", "nordugrid-cluster-totalcpus" => "Prosessoreita kaikkiaan", "nordugrid-cluster-cpudistribution" => "Prosessoreita tietokonetta kohti", "nordugrid-cluster-benchmark" => "Suoritustesti", "nordugrid-cluster-sessiondir-free" => "Levytila, saatavilla (MB)", "nordugrid-cluster-sessiondir-total" => "Levytila kaikkiaan (MB)", "nordugrid-cluster-sessiondir-lifetime"=> "Sessiohakemiston elinaika (min)", "nordugrid-cluster-cache-free" => "Valimuistin koko, saatavilla (MB)", "nordugrid-cluster-cache-total" => "Valimuistin koko kaikkiaan (MB)", "nordugrid-cluster-runtimeenvironment" => "Ajoaikainen ymparisto", "nordugrid-cluster-localse" => "Paikallinen talletuselementti (SE)", "nordugrid-cluster-middleware" => "Väliohjelmisto", "nordugrid-cluster-totaljobs" => "Töiden kokonaismäärä", "nordugrid-cluster-usedcpus" => "Prosessoreita varattu", "nordugrid-cluster-queuedjobs" => "TöItä jonossa", "nordugrid-cluster-prelrmsqueued" => "Grid–töitä odottamassa", "nordugrid-cluster-location" => "Postinumero", "nordugrid-cluster-owner" => "Omistaja", "nordugrid-cluster-issuerca" => "Varmenteen myontäjä", "nordugrid-cluster-issuerca-hash" => "Varmenteen myontäjän hajakoodi", "nordugrid-cluster-trustedca" => "Luotetut varmenteen myontäjä", "nordugrid-cluster-nodeaccess" => "Laskentasolmun internet-yhteys", "nordugrid-cluster-gridarea" => "Sessiotila (VANHENTUNUT)", "nordugrid-cluster-gridspace" => "Grid levytila (VANHENTUNUT)", "nordugrid-cluster-opsysdistribution" => "Käyttojärjestelmän jakelunimi (VANHENTUNUT)", "nordugrid-cluster-runningjobs" => "TöItä, ajossa (VANHENTUNUT)", "nordugrid-cluster-credentialexpirationtime" => "Varmenne voimassa", "nordugrid-queue-name" => "Jonon nimi", "nordugrid-queue-comment" => "Kommentti", "nordugrid-queue-status" => "Jonon tila", "nordugrid-queue-running" => "Prosessoreita varattu", "nordugrid-queue-localqueued" => "Paikallisia töitä jonossa", "nordugrid-queue-prelrmsqueued" => "Grid–töitä odottamassa", "nordugrid-queue-queued" => "Jonossa olevia töitä (VANHENTUNUT)", "nordugrid-queue-maxrunning" => "Ajossa olevia töitä (max)", "nordugrid-queue-maxqueuable" => "TöItä jotka voivat jonottaa (max)", "nordugrid-queue-maxuserrun" => "TöItä käyttäjää kohti (max)", "nordugrid-queue-maxcputime" => "Prosessoriaika, max. (minuutteja)", "nordugrid-queue-mincputime" => "Prosessoriaika, min. (minuutteja)", "nordugrid-queue-defaultcputime" => "Prosessoriaika, oletusarvo (minuutteja)", "nordugrid-queue-maxwalltime" => "Kokonaisaika, max. (minuutteja)", "nordugrid-queue-minwalltime" => "Kokonaisaika, min. (minuutteja)", "nordugrid-queue-defaultwalltime" => "Kokonaisaika, oletusarvo (minuutteja)", "nordugrid-queue-schedulingpolicy" => "Schedulointipolitiikka", "nordugrid-queue-totalcpus" => "Prosessoreita kaikkiaan", "nordugrid-queue-nodecpu" => "Prosessrin tyyppi", "nordugrid-queue-nodememory" => "Muistia (MB)", "nordugrid-queue-architecture" => "Arkkitehtuuri", "nordugrid-queue-opsys" => "Käyttojärjestelmä", "nordugrid-queue-homogeneity" => "Jonon arkkitehtuuri yhtenäinen", "nordugrid-queue-gridrunning" => "Grid–töiden käyttämät prosessorit", "nordugrid-queue-gridqueued" => "Grid työt, jonossa", "nordugrid-queue-benchmark" => "Mitattu suorituskyky", "nordugrid-queue-assignedcpunumber" => "Prosessoreita jonoa kohti (VANHENTUNUT)", "nordugrid-queue-assignedcputype" => "Prosessorin tyyppi (VANHENTUNUT)", "nordugrid-job-globalid" => "ID", "nordugrid-job-globalowner" => "Omistaja", "nordugrid-job-execcluster" => "Suoritusklusteri", "nordugrid-job-execqueue" => "Suoritusjono", "nordugrid-job-stdout" => "Standardi tulostiedosto", "nordugrid-job-stderr" => "Standardi virhetiedosto ", "nordugrid-job-stdin" => "Standardi syotetiedosto", "nordugrid-job-reqcputime" => "Pyydetty prosessoriaika", "nordugrid-job-reqwalltime" => "Pyydetty kokonaisaika", "nordugrid-job-status" => "Tyon tila", "nordugrid-job-queuerank" => "Paikka jonossa", "nordugrid-job-comment" => "Jonosuoritusohjelman kommentti", "nordugrid-job-submissionui" => "Lähetetty koneesta", "nordugrid-job-submissiontime" => "Lähetysaika (GMT)", "nordugrid-job-usedcputime" => "Käytetty prosessoriaika", "nordugrid-job-usedwalltime" => "Käytetty kokonaisaika", "nordugrid-job-completiontime" => "Saatu suoritettua (GMT)", "nordugrid-job-sessiondirerasetime" => "Poistamisaika (GMT)", "nordugrid-job-proxyexpirationtime" => "Proxyn käyttoaika loppuu (GMT)", "nordugrid-job-usedmem" => "Käytetty muisti (KB)", "nordugrid-job-errors" => "Virheet", "nordugrid-job-exitcode" => "Poistumiskoodi", "nordugrid-job-jobname" => "Nimi", "nordugrid-job-runtimeenvironment" => "Ajoaikainen ympäristo", "nordugrid-job-cpucount" => "Pyydetyt prosessorit", "nordugrid-job-executionnodes" => "Suoritusnoodi", "nordugrid-job-gmlog" => "GM log -tiedosto", "nordugrid-job-clientsoftware" => "Asiakasohjelmiston nimi", "nordugrid-job-rerunable" => "Uudelleen ajettavissa", "nordugrid-job-reqcput" => "Pyydetty suoritusaika (VANHENTUNUT)", "nordugrid-job-lrmscomment" => "Jonosuoritusohjelman kommentti (VANHENTUNUT)", "nordugrid-job-gridlog" => "Gridlog tiedosto (VANHENTUNUT)", "nordugrid-authuser-name" => "Nimi", "nordugrid-authuser-sn" => "Subject-nimi", "nordugrid-authuser-freecpus" => "Vapaita prosessoreita", "nordugrid-authuser-diskspace" => "Vapaa levytila (MB)", "nordugrid-authuser-queuelength" => "KДyttДjДn tЖitД jonossa", "nordugrid-se-name" => "Nimi", "nordugrid-se-aliasname" => "Talletuselementin alias-nimi", "nordugrid-se-type" => "Talletuselementin tyyppi", "nordugrid-se-acl" => "Autorisoidut VOt", "nordugrid-se-freespace" => "Vapaa tila (MB)", "nordugrid-se-totalspace" => "Kokonaistila (MB)", "nordugrid-se-url" => "Yhteys-URL", "nordugrid-se-baseurl" => "Yhteys-URL (VANHENTUNUT)", "nordugrid-se-accesscontrol" => "Kayttokontrolli", "nordugrid-se-authuser" => "Auktorisoitu käyttäjä (DN)", "nordugrid-se-location" => "Postinumero", "nordugrid-se-owner" => "Omistaja", "nordugrid-se-middleware" => "VДliohjelmisto", "nordugrid-se-issuerca" => "Varmenteen myontäjä", "nordugrid-se-issuerca-hash" => "Varmenteen myontäjän hajakoodi", "nordugrid-se-trustedca" => "Luotetut varmenteen myontäjä", "nordugrid-se-comment" => "Kommentteja", "nordugrid-rc-name" => "Domainin nimi", "nordugrid-rc-aliasname" => "Replica Catalog alias", "nordugrid-rc-baseurl" => "Yhteys-URL", "nordugrid-rc-authuser" => "Auktorisoitu käyttäjä (DN)", "nordugrid-rc-location" => "Postinumero", "nordugrid-rc-owner" => "Omistaja", "nordugrid-rc-issuerca" => "Varmenteen myontäjä (CA)" ), // Errors, warnings etc "errors" => array( // failure notices "1" => "Ei voitu lukea ylätason indeksejä", "2" => "Ei saatu yhteyttä paikallisiin indeksipalveluihin", "3" => " viallinen konfiguraatio tai pyyynnolle annettu aika ylittyi", "4" => "Ei Grid-töitä", "5" => "Ei loytynyt tietoa", "6" => "Tietokone ei saavutettavissa", "7" => " - hae uudestaan myohemmin", "8" => "Ei jonotietoa", "9" => "Ei kohderiveja", "10" => "Ei käyttäjiä", "11" => "Ei oikeutta käyttää tietokonetta", "12" => "ei vastaa", "13" => "Ei töitä ", // debug messages "101" => " Monitoriprosesille annettu aika: GRIS: ", "102" => " sekuntia yhteyksien luomiseen ", "103" => " sekuntia käytetty etsimisprosessissa", "104" => " sekuntia käytetty etsimiseen", "105" => "Näytetään vain resurssit: ", "106" => "Tutkittu ylimman tason indeksit: ", "107" => "Maantieteelliset kohteet haettu, lisataan tietoa: ", "108" => " kohteet jarjestetty maantieteellisesti", "109" => "Etsi klusterin ominaisuuksilla", "110" => "Etsi jonon ominaisuuksilla", "111" => "Ei dataa kohteesta ", "112" => " on toiminnassa: ", "113" => " ei resursseja tarjolla", "114" => " Monitoriprosessille annettu aika ylittyi, GIIS: ", "115" => "Jätetään valiin GRIS: ", "116" => "ei ole ", "117" => "Tarkintan yhteyttä: ", "118" => "OK", "119" => "Siihen mennessä loytynyt seuraavanlaisia resursseja ", "120" => "LDAP etsinnässä virhe ", "121" => " status ", "122" => "Mustalla listalla: ", "123" => "Rekisteroitynyt ", "124" => "Etsi tallennuselementin (SE) ominaisuuskai", "125" => "Etsi käyttäjiä", "126" => "Etsi töitä", "127" => " tyo ", "128" => " ei käyttooikeutta", "129" => "Virhe: ei tietoa kohteesta ", "130" => " Monitoriprosessille annettu aika ylittyi, EMIR: ", // icon titles "301" => "Lataa uudestaan", "302" => "Tulosta", "303" => "Ohjeet", "304" => "Sulje", "305" => "Punainen", "306" => "Harmaa", "307" => "Kaikki käyttäjät", "308" => "Aktiiviset käyttäjät", "309" => "Hae", "310" => "Tietovarannot", "311" => "Virtuaaliorganisaatiot", "312" => "Lippu: ", "313" => " Grid prosessit ja ", "314" => " paikalliset prosessit", // auxilliary strings "401" => "Prosessit", "402" => "Grid", "403" => "Paikallinen", "404" => "Maailma", "405" => "TOTAL", "406" => " kohdetta ", "407" => "paljon", "408" => " GB", "409" => " KAIKKI", "410" => "Klusteri", "411" => "Jono", "412" => "Tyo", "413" => "Kayttäjä", "414" => "Tietovaranto", "415" => "Replica Cat.", "416" => "Valitse ominaisuudet jotka näytetään: ", "417" => "Kaikkien valintojen kombinaation näytetään", "418" => "Jata oikeanpuoleinen kenttä tyhjäksi jos haluat kaikki tulokset näyttoon", "419" => "Näyta valitut resurssit tai kohteet", "420" => "Distinguished name", "421" => "käytettävissä ", "422" => " kohdetta", "423" => "Resurssi / objekti:", "424" => "Ominaisuuksia (def. 6):", "425" => "Objekti", "426" => "Seuraava", "427" => "Valise yksi", "428" => "Tyhjennä valinnat", "429" => "NÄYTÄ" ), // Post code conversion "tlconvert" => array ( "Australia" => "Australia", "Austria" => "Itävalta", "Armenia" => "Armenia", "Algeria" => "Algeria", "Belgium" => "Belgia", "Bulgaria" => "Bulgaria", "Canada" => "Kanada", "Czechia" => "Tsekki", "China" => "Kiina", "Denmark" => "Tanska", "Estonia" => "Eesti", "Finland" => "Suomi", "France" => "Ranska", "Georgia" => "Georgia", "Germany" => "Saksa", "Greece" => "Kreikka", "Hungary" => "Unkari", "Iceland" => "Islanti", "Ireland" => "Irlanti", "Italy" => "Italia", "Japan" => "Japani", "Latvia" => "Latvia", "Lithuania" => "Liettua", "Morocco" => "Marokko", "Netherlands" => "Alankomaat", "Norway" => "Norja", "Poland" => "Puola", "Portugal" => "Portugali", "Romania" => "Romania", "Russia" => "Venäjä", "SriLanka" => "Sri Lanka", "Sweden" => "Ruotsi", "Slovakia" => "Slovakia", "Slovenia" => "Slovenia", "Switzerland" => "Sveitsi", "Turkey" => "Turkki", "UK" => "Iso-Britannia", "Ukraine" => "Ukraina", "USA" => "USA" ) ); ?> nordugrid-arc-6.14.0/src/services/monitor/lang/PaxHeaders.30264/ru.inc0000644000000000000000000000013214152153376023507 xustar000000000000000030 mtime=1638455038.441646515 30 atime=1638455038.513647597 30 ctime=1638455100.617580737 nordugrid-arc-6.14.0/src/services/monitor/lang/ru.inc0000644000175000002070000020053414152153376023500 0ustar00mockbuildmock00000000000000ARC."; $str_val = "Значение атрибута, запиÑанное в Информационной СиÑтеме."; $str_que = "Обычно очереди различаютÑÑ Ð»Ð¸Ð±Ð¾ по допуÑтимой продолжительноÑти Ñчёта, либо по допущенной группе пользователей. По щелчку выводитÑÑ Ð¿Ð¾Ð»Ð½Ð¾Ðµ опиÑание очереди, включающее ÑпиÑок вÑех извеÑтных задач: в Ñчёте, в очереди и закончившихÑÑ."; $str_job = " Ð˜Ð¼Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸, приÑвоенное хозÑином.
    N/A означает, что хозÑин не приÑвоил никакого имени.
    X означает, что хозÑин отменил иÑполнение задачи.
    ! означает, что при иÑполнении задачи произошла ошибка.
    По щелчку выводитÑÑ Ð¿Ð¾Ð´Ñ€Ð¾Ð±Ð½Ð¾Ðµ опиÑание задачи."; $str_nam = "Ð˜Ð¼Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ, в ÑоответÑтвии Ñ ÐµÐ³Ð¾ личным Ñертификатом. По щелчку выводитÑÑ ÑÐ²Ð¾Ð´Ð½Ð°Ñ Ñ‚Ð°Ð±Ð»Ð¸Ñ†Ð° вÑех Грид-реÑурÑов, доÑтупных данному пользователю, и ÑпиÑок вÑех его задач, зарегиÑтрированных на данный момент в ÑиÑтеме."; $str_sta = "СоÑтоÑние задачи: ÑÑ‚Ð°Ð´Ð¸Ñ Ð¿Ñ€Ð¾Ð³Ñ€ÐµÑÑа в ГМ или ÑÑ‚Ð°Ñ‚ÑƒÑ Ð² СУПО. ПоÑледовательноÑть возможных ÑоÑтоÑний такова:
    ACCEPTED – задача принÑта, но иÑполнение ещё не началоÑÑŒ
    PREPARING – подгружаютÑÑ Ð½ÐµÐ¾Ð±Ñ…Ð¾Ð´Ð¸Ð¼Ñ‹Ðµ входные данные
    SUBMITTING – поÑылаетÑÑ Ð·Ð°Ð´Ð°Ð½Ð¸Ðµ в СУПО
    INLRMS – управление задачей передано в СУПО; Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¾Ð½Ð½Ð°Ñ ÑиÑтема обеÑпечивает ÑÐ²ÐµÐ´ÐµÐ½Ð¸Ñ Ð¾ внутреннем ÑоÑтоÑнии задачи. Возможны Ñледующие ÑоÑтоÑниÑ:
    : Q – задача ожидает в очереди
    : U – задача приоÑтановлена на перегруженом узле (PBSPro)
    : S – задача приоÑтановлена (Condor)
    : R, run – задача иÑполнÑетÑÑ
    : E – задача заканчиваетÑÑ (PBS)
    FINISHING – выходные данные переÑылаютÑÑ Ð¿Ð¾ назначению
    FINISHED – задача завершена; Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ð¾Ð½Ð½Ð°Ñ ÑиÑтема добавлÑет метку времени окончаниÑ
    CANCELING – задача отменÑетÑÑ
    DELETED – результаты задачи не затребованы хозÑином, но уничтожены Ñервером по иÑтечении времени Ñ…Ñ€Ð°Ð½ÐµÐ½Ð¸Ñ (обычно 24 чаÑа).
    К каждому ÑоÑтоÑнию может быть добавлена приÑтавка \"PENDING:\", что означает, что ГМ не может в данный момент перейти к Ñледующему Ñтапу иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð¸Ð·-за ÑоответÑтвующих внутренних ограничений."; $str_tim = "ПроцеÑÑорное времÑ, затраченное задачей, в минутах."; $str_mem = "Объём оперативной памÑти, иÑпользуемый задачей на текущий момент, в килобайтах"; $str_cpu = "ЧиÑло процеÑÑоров, занимаемых задачей."; // Actual messages $message = array ( // Table headers and help "loadmon" => array( "0" => "Грид-монитор", "help" => "
    Ð’ Ñтом окне приведена ÑÐ²Ð¾Ð´Ð½Ð°Ñ Ñ‚Ð°Ð±Ð»Ð¸Ñ†Ð° вÑех вычиÑлительных реÑурÑов, региÑтрирующихÑÑ Ð² ÑпиÑки выÑшего ÑƒÑ€Ð¾Ð²Ð½Ñ ARC. Таблица упорÑдочена по английÑкому названию Ñтраны, и в каждой Ñтране – по имени головной машины. Ð”Ð»Ñ ÐºÐ°Ð¶Ð´Ð¾Ð³Ð¾ реÑурÑа выведены Ñледующие параметры: название, общее чиÑло процеÑÑоров, чиÑло занÑтых процеÑÑоров, а также количеÑтво заданий в очереди, как заÑланных через Грид, так и меÑтных. ИÑпользуйте утилиту \"ПоиÑк\" Ð´Ð»Ñ Ð¿Ñ€Ð¾Ñмотра и ÑÑ€Ð°Ð²Ð½ÐµÐ½Ð¸Ñ Ð´Ñ€ÑƒÐ³Ð¸Ñ… параметров клаÑтеров, очередей, задач и Ñ‚.д..
    Страна
    ".$clickable.". Флаг и название Ñтраны, как Ñледует из доÑтупного опиÑÐ°Ð½Ð¸Ñ Ñ€ÐµÑурÑа. По щелчку выводитÑÑ ÑÐ²Ð¾Ð´Ð½Ð°Ñ Ñ‚Ð°Ð±Ð»Ð¸Ñ†Ð° только Ð´Ð»Ñ Ñтой Ñтраны.
    РеÑурÑ
    ".$clickable.". Ðазвание реÑурÑа (обычно, клаÑтера), приÑвоенное владельцем. Длина Ñтроки не должна превышать 22 Ñимвола. По щелчку выводитÑÑ Ð¿Ð¾Ð»Ð½Ð¾Ðµ опиÑание реÑурÑа (клаÑтера).
    ЦП
    Общее чиÑло центральных процеÑÑоров в клаÑтере. Внимание! Лишь чаÑть из них может быть доÑтупна Грид-пользователÑм.
    Загрузка (процеÑÑÑ‹)
    ".$clickable.". ОтноÑÐ¸Ñ‚ÐµÐ»ÑŒÐ½Ð°Ñ Ð·Ð°Ð³Ñ€ÑƒÐ·ÐºÐ° клаÑтера, иÑÑ…Ð¾Ð´Ñ Ð¸Ð· чиÑла занÑтых процеÑÑоров. Ð¡ÐµÑ€Ð°Ñ Ð¿Ð¾Ð»Ð¾Ñа ÑоответÑтвует количеÑтву процеÑÑоров, занÑтых под меÑтные задачи, тогда как краÑÐ½Ð°Ñ Ð¿Ð¾Ð»Ð¾Ñа указывает на количеÑтво процеÑÑоров, иÑполнÑющих Грид-задачи. По щелчку выводитÑÑ Ñводка вÑех активных Грид-задач на клаÑтере, Ð²ÐºÐ»ÑŽÑ‡Ð°ÑŽÑ‰Ð°Ñ Ð¸Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸ÑŽ о чиÑле процеÑÑоров на каждую задачу.
    Ожидают
    ".$clickable.". ЧиÑло вÑех задач, ожидающих в очереди на данном клаÑтере, предÑтавленное в виде Ñуммы Грид- и локальных задач. По щелчку на первой цифре выводитÑÑ Ñводка вÑех задач в очереди, заÑланных через Грид.
    ", "Страна" => 30, "РеÑурÑ" => 160, "ЦП" => 10, "Загрузка (процеÑÑÑ‹)" => 210, "Ожидают" => 10 ), "clusdes" => array( "0" => "ОпиÑание реÑурÑа", "help" => "
    Ðтрибут
    ".$clickable.". ÐÐ°Ð·Ð²Ð°Ð½Ð¸Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð¾Ð² клаÑтера".$str_att."
    Значение
    ".$str_val."
    Очередь
    ".$clickable.". ÐÐ°Ð·Ð²Ð°Ð½Ð¸Ñ Ð¾Ñ‡ÐµÑ€ÐµÐ´ÐµÐ¹ (приÑвоенные владельцами), доÑтупных Ð´Ð»Ñ Ð“Ñ€Ð¸Ð´-пользователей. ".$str_que."
    СоÑтоÑние
    СоÑтоÑние очереди. Ð Ð°Ð±Ð¾Ñ‚Ð°ÑŽÑ‰Ð°Ñ Ð¾Ñ‡ÐµÑ€ÐµÐ´ÑŒ обычно выдаёт ÑоÑтоÑние active.
    ДлительноÑть (мин)
    Пределы по времени на продолжительноÑть обработки Ð·Ð°Ð´Ð°Ð½Ð¸Ñ Ð² очереди, еÑли таковые уÑтановлены, в минутах процеÑÑорного времени. Первое значение ÑоответÑтвует нижнему пределу, второе – верхнему. ЕÑли пределы не уÑтановлены (Ñ‚.е., очередь принимает задачи любой продолжительноÑти), выводитÑÑ Ð¼ÐµÑ‚ÐºÐ° N/A.
    СчитаютÑÑ
    ЧиÑло задач, ÑчитающихÑÑ Ð² очереди. Показано общее чиÑло задач, причём чиÑло процеÑÑоров, занÑтых под Грид-задачи, указано в Ñкобках, например: (Грид: 12). Внимание! При наличии параллельных многопроцеÑÑорных задач, чиÑло в Ñкобках может превышать общее чиÑло задач.
    Ожидают
    ЧиÑло заданий, ожидающих иÑÐ¿Ð¾Ð»Ð½ÐµÐ½Ð¸Ñ Ð² очереди. Показано общее чиÑло задач, причём количеÑтво заданий, заÑланных через Грид, указано в Ñкобках, например: (Грид: 235).
    ", "Очередь" => 0, "Mapping Queue" => 0, "СоÑтоÑние" => 0, "ДлительноÑть (мин)" => 0, "ЦП" => 0, "СчитаютÑÑ" => 0, "Ожидают" => 0 ), "jobstat" => array( "0" => "Задачи на:Ярлык задачи", "help" => "
    СПИСОК ЗÐДÐЧ:
    Ð˜Ð¼Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸
    ".$clickable.". Ð˜Ð¼Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸, приÑвоенное хозÑином. N/A означает, что хозÑин не приÑвоил никакого имени. По щелчку выводитÑÑ Ð¿Ð¾Ð´Ñ€Ð¾Ð±Ð½Ð¾Ðµ опиÑание задачи.
    ХозÑин
    ".$clickable.". ".$str_nam."
    СоÑтоÑние
    ".$str_sta."
    Ð’Ñ€ÐµÐ¼Ñ (мин)
    ".$str_tim."
    Очередь
    ".$clickable.". Ðазвание очереди СУПО, в которой проиÑходит иÑполнение задачи.".$str_que."
    ЦП
    ".$str_cpu."
    ОПИСÐÐИЕ ЗÐДÐЧИ:
    Ðтрибут
    ".$clickable.". ÐÐ°Ð·Ð²Ð°Ð½Ð¸Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð¾Ð² задачи.".$str_att."
    Значение
    ".$str_val."
    ", "Ð˜Ð¼Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" => 0, "ХозÑин" => 0, "СоÑтоÑние" => 0, "Ð’Ñ€ÐµÐ¼Ñ (мин)" => 0, "Очередь" => 0, "ЦП" => 0 ), "volist" => array( "0" => "Виртуальные организации", "help" => "
    Виртуальные организации
    ".$clickable.". Группа пользователей – обычно объединÑемых ÑовмеÑтной целью или реÑурÑами, – Ð´Ð¾Ð¿ÑƒÑ‰ÐµÐ½Ð½Ð°Ñ Ðº работе по крайней мере на одном из реÑурÑов ARC. По щелчку выводитÑÑ ÑпиÑок членов группы.
    Члены
    КоличеÑтво членов группы.
    ОбÑлуживаетÑÑ
    ÐÐ´Ñ€ÐµÑ Ñервера, поддерживающего базу данных членов группы.
    ", "Ð’Ð¸Ñ€Ñ‚ÑƒÐ°Ð»ÑŒÐ½Ð°Ñ Ð¾Ñ€Ð³Ð°Ð¸Ð·Ð°Ñ†Ð¸Ñ" => 0, "Члены" => 0, "ОбÑлуживаетÑÑ" => 0 ), "vousers" => array( "0" => "Пользователи", "help" => "
    ИмÑ
    ".$clickable.". ".$str_nam."
    МеÑто работы
    МеÑто работы пользователÑ, в ÑоответÑтвии Ñ Ð·Ð°Ð¿Ð¸Ñью в базе данных. ÐеобÑзательно.
    Ð­Ð»ÐµÐºÑ‚Ñ€Ð¾Ð½Ð½Ð°Ñ Ð¿Ð¾Ñ‡Ñ‚Ð°
    ".$clickable.". ÐÐ´Ñ€ÐµÑ Ñлектронной почты пользователÑ, в ÑоответÑтвии Ñ Ð·Ð°Ð¿Ð¸Ñью в базе данных. ÐеобÑзательно. По щелчку ÑоздаетÑÑ Ñообщение Ð´Ð»Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ.
    ", "#" => 0, "ИмÑ" => 0, "МеÑто работы" => 0, "Ð­Ð»ÐµÐºÑ‚Ñ€Ð¾Ð½Ð½Ð°Ñ Ð¿Ð¾Ñ‡Ñ‚Ð°" => 0 ), "userlist" => array( "0" => "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð´Ð»Ñ", "help" => "
    РеÑурÑ:очередь
    ".$clickable.". ÐÐ°Ð·Ð²Ð°Ð½Ð¸Ñ Ñ€ÐµÑурÑов (клаÑтеров) и ÑоответÑтвующих очередей СУПО (разделённые двоеточием), доÑтупных данному пользователю. ЕÑли доÑтуп закрыт, выводитÑÑ Ñообщение "Ðет доÑтупа к реÑурÑу". По щелчку на названии клаÑтера выводитÑÑ Ð¿Ð¾Ð»Ð½Ð¾Ðµ опиÑание реÑурÑа (клаÑтера). По щелчку на названии очереди выводитÑÑ Ð¿Ð¾Ð»Ð½Ð¾Ðµ опиÑание очереди.
    Свободные ЦП.
    ЧиÑло Ñвободных центральных процеÑÑоров, доÑтупных в данной очереди Ð´Ð»Ñ Ð´Ð°Ð½Ð½Ð¾Ð³Ð¾ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð½Ð° данный момент времени. ЕÑли очередь имеет Ð¾Ð³Ñ€Ð°Ð½Ð¸Ñ‡ÐµÐ½Ð¸Ñ Ð¿Ð¾ времени на иÑполнение задач, Ñтот предел указан поÑле чиÑла процеÑÑоров (в минутах, разделÑетÑÑ Ð´Ð²Ð¾ÐµÑ‚Ð¾Ñ‡Ð¸ÐµÐ¼). Ðапример, "3" означает, что 3 процеÑÑора доÑтупно Ð´Ð»Ñ Ñколь угодно продолжительных задач; "4:360" означает, что 4 процеÑÑора доÑтупно Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡, не превышающих 6 чаÑов; "10:180 30" означает, что 10 процеÑÑоров доÑтупно Ð´Ð»Ñ Ð·Ð°Ð´Ð°Ñ‡, не превышающих 3 чаÑов, и 30 процеÑÑоров доÑтупно Ð´Ð»Ñ Ñколь угодно продолжительных задач; "0" означает, что Ñвободных реÑурÑов нет, и задачи будут направлены на ожидание в очереди.
    Задачи в очереди
    КоличеÑтво задач пользователÑ, раÑположенных в ÑпиÑке Ð¾Ð¶Ð¸Ð´Ð°Ð½Ð¸Ñ Ð¿ÐµÑ€ÐµÐ´ новой задачей, заÑланной от имени данного пользователÑ. ЧиÑло "0" означает, что задача предположительно будет запущена на Ñчёт немедленно. Внимание! Это лишь предположительные значениÑ, которые могут быть изменены локальными операторами.
    ДиÑк, доÑтупно (Мб)
    ПроÑтранÑтво на локальном жёÑтком диÑке, доÑтупное данному пользователю в данной очереди (в мегабайтах). Внимание! Это лишь предположительные значениÑ, Ñ‚.к. большинÑтво клаÑтеров не поддерживают диÑковые квоты.
    Ð˜Ð¼Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸
    ".$clickable.". ".$str_job."
    СоÑтоÑние
    ".$str_sta."
    Ð’Ñ€ÐµÐ¼Ñ (мин)
    ".$str_tim."
    РеÑурÑ
    ".$clickable.". Ð˜Ð¼Ñ Ñ€ÐµÑурÑа (обычно, клаÑтера), на котором проиÑходит иÑполнение. задачи. По щелчку выводитÑÑ Ð¿Ð¾Ð»Ð½Ð¾Ðµ опиÑание реÑурÑа (клаÑтера).
    Очередь
    ".$clickable.". Ðазвание очереди СУПО, в которой проиÑходит иÑполнение задачи. ".$str_que."
    ЦП
    ".$str_cpu."
    ", "" => 0, "Ð˜Ð¼Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" => 0, "СоÑтоÑние" => 0, "Ð’Ñ€ÐµÐ¼Ñ (мин)" => 0, "РеÑурÑ" => 0, "Очередь" => 0, "ЦП" => 0 ), "attlist" => array( "0" => "Ð—Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð¾Ð²", "help" => "
    Объект
    ".$clickable." Ðазвание объекта, атрибуты которого перечиÑлены в Ñтроке. Это может быть Ð¸Ð¼Ñ ÐºÐ»Ð°Ñтера, Ð¸Ð¼Ñ Ð¾Ñ‡ÐµÑ€ÐµÐ´Ð¸, Ð¸Ð¼Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸, Ð¸Ð¼Ñ Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð¸.Ñ‚.д.. По щелчку выводитÑÑ Ð¿Ð¾Ð´Ñ€Ð¾Ð±Ð½Ð¾Ðµ опиÑание объекта.
    Ðтрибут
    Ð”Ð»Ñ ÐºÐ°Ð¶Ð´Ð¾Ð³Ð¾ объекта в таблице приведены Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð¾Ð´Ð½Ð¾Ð³Ð¾ или неÑкольких его атрибутов. Ð’ заголовке Ñтолбца указано название атрибута, интерпретированное Ð´Ð»Ñ Ð¿Ñ€Ð¾Ñтоты Ñ‡Ñ‚ÐµÐ½Ð¸Ñ (за иÑключением неÑкольких атрибутов, Ñпецифичных Ð´Ð»Ñ ÑиÑтемы MDS), а Ñодержимым каждого Ñтолбца ÑвлÑÑŽÑ‚ÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ ÑоответÑтвующих атрибутов, запиÑанные в Информационной СиÑтеме.
    ", "Объект" => 0, "Ðтрибут" => 0 ), "quelist" => array( "0" => "Очередь", "help" => "
    Ðтрибут
    ".$clickable.". ÐÐ°Ð·Ð²Ð°Ð½Ð¸Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð¾Ð² очереди".$str_att."
    Значение
    ".$str_val."
    Ð˜Ð¼Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸
    ".$clickable.". ".$str_job."
    ХозÑин
    ".$clickable.". ".$str_nam."
    СоÑтоÑние
    ".$str_sta."
    Ð’Ñ€ÐµÐ¼Ñ (мин)
    ".$str_tim."
    ОЗУ (Кб)
    ".$str_mem."
    ЦП
    ".$str_cpu."
    ", "" => 0, "Ð˜Ð¼Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" => 0, "ХозÑин" => 0, "СоÑтоÑние" => 0, "Ð’Ñ€ÐµÐ¼Ñ (мин)" => 0, "ОЗУ (Кб)" => 0, "ЦП" => 0 ), "sestat" => array( "0" => "Ðакопительные уÑтройÑтва", "help" => "
    Ðазвание
    Ðазвание накопительного уÑтройÑтва, зарегиÑтрированное в Информационной СиÑтеме. МакÑимально допуÑÑ‚Ð¸Ð¼Ð°Ñ Ð´Ð»Ð¸Ð½Ð°: 15 Ñимволов.
    ВеÑÑŒ объём
    Полный объём диÑка, Гб.
    Свободно
    ДоÑтупное проÑтранÑтво на диÑке в наÑтоÑщий момент, Гб.
    ИмÑ
    Ð˜Ð¼Ñ Ð½Ð°ÐºÐ¾Ð¿Ð¸Ñ‚ÐµÐ»ÑŒÐ½Ð¾Ð³Ð¾ уÑтройÑтва, ÑоÑтоÑщее из логичеÑкого имени и имени Ñервера (разделённое двоеточием). ЛогичеÑкое Ð¸Ð¼Ñ Ð¸ÑпользуетÑÑ Ñ‚Ð¾Ð»ÑŒÐºÐ¾ Информационной СиÑтемой, Ð´Ð»Ñ Ð¿Ñ€Ð¾Ñтоты раÑÐ¿Ð¾Ð·Ð½Ð°Ð²Ð°Ð½Ð¸Ñ Ñ€Ð°Ð·Ð½Ñ‹Ñ… накопительных уÑтройÑтв, находÑщихÑÑ Ð½Ð° одном и том же Ñервере.
    URL базы
    URL накопительного уÑтройÑтва, обычно по протоколу gsiftp://. ИÑпользуйте Ñтот Ð°Ð´Ñ€ÐµÑ ÐºÐ°Ðº базовый Ð´Ð»Ñ Ð´Ð¾Ñтупа к файлам.
    Тип
    Тип накопительного уÑтройÑтва. Тип "gridftp-based" означает что Ñто диÑковый накопитель Ñ Ð¸Ð½Ñ‚ÐµÑ€Ñ„ÐµÐ¹Ñом GridFTP.
    ", "#" => 0, "Ðазвание" => 0, // "ВеÑÑŒ объём" => 0, "Свободно/веÑÑŒ объём, Гб"=> 0, "ИмÑ" => 0, "URL базы" => 0, "Тип" => 0 ), "allusers" => array( "0" => "Допущенные пользователи:Ðктивные пользователи", "help" => "
    ИмÑ
    ".$clickable.". ".$str_nam."
    МеÑто работы
    МеÑто работы пользователÑ, в ÑоответÑтвии Ñ Ð·Ð°Ð¿Ð¸Ñью в его Ñертификате.
    Задачи
    ЧиÑло вÑех задач пользователÑ, находÑщихÑÑ Ð² ÑиÑтеме (в Ñчёте, в очереди и закончившихÑÑ).
    РеÑурÑÑ‹
    ЧиÑло клаÑтеров, на которых данный пользователь имеет допуÑк.
    ", "#" => 0, "ИмÑ" => 0, "МеÑто работы" => 0, "Задачи" => 0, "РеÑурÑÑ‹" => 0 ), "userres" => array( "0" => "", "РеÑурÑ:очередь" => 0, "Свободные ЦП" => 0, "Задачи в очереди" => 0, "ДиÑк, доÑтупно (Мб)" => 0 ), "ldapdump" => array( "0" => "", "Ðтрибут" => 0, "Значение" => 0 ), "mdsattr" => array( "objectClass" => "objectClass", "Mds-validfrom" => "Данные дейÑтвительны Ñ (GMT)", "Mds-validto" => "Данные дейÑтвительны по (GMT)" ), "isattr" => array( "objectClass" => "objectClass", "Mds-validfrom" => "Данные дейÑтвительны Ñ (GMT)", "Mds-validto" => "Данные дейÑтвительны по (GMT)", "nordugrid-cluster-name" => "Ð˜Ð¼Ñ Ð³Ð¾Ð»Ð¾Ð²Ð½Ð¾Ð¹ машины", "nordugrid-cluster-aliasname" => "Ðазвание", "nordugrid-cluster-contactstring" => "Контактный адреÑ", "nordugrid-cluster-interactive-contactstring" => "Интерактивный адреÑ", "nordugrid-cluster-comment" => "Комментарий", "nordugrid-cluster-support" => "ÐÐ´Ñ€ÐµÑ Ð¾Ñ‚Ð²ÐµÑ‚Ñтвенного", "nordugrid-cluster-acl" => "Допущенные ВО", "nordugrid-cluster-lrms-type" => "СУПО, тип", "nordugrid-cluster-lrms-version" => "СУПО, верÑиÑ", "nordugrid-cluster-lrms-config" => "СУПО, подробноÑти", "nordugrid-cluster-architecture" => "Ðрхитектура", "nordugrid-cluster-opsys" => "ÐžÐ¿ÐµÑ€Ð°Ñ†Ð¸Ð¾Ð½Ð½Ð°Ñ ÑиÑтема", "nordugrid-cluster-homogeneity" => "ОднородноÑть реÑурÑа", "nordugrid-cluster-nodecpu" => "ПроцеÑÑор, тип (худший)", "nordugrid-cluster-nodememory" => "ОЗУ (Мб, наименьшее)", "nordugrid-cluster-totalcpus" => "ПроцеÑÑоры, вÑего", "nordugrid-cluster-cpudistribution" => "ПроцеÑÑоры:узлы", "nordugrid-cluster-benchmark" => "Эталонный теÑÑ‚", "nordugrid-cluster-sessiondir-free" => "ДиÑк, доÑтупно (Мб)", "nordugrid-cluster-sessiondir-total" => "ДиÑк, веÑÑŒ объём (Мб)", "nordugrid-cluster-sessiondir-lifetime"=> "Ð’Ñ€ÐµÐ¼Ñ Ð¶Ð¸Ð·Ð½Ð¸ Грид-ÑеÑÑии (мин)", "nordugrid-cluster-cache-free" => "ДиÑковый кÑш, Ñвободно (Мб)", "nordugrid-cluster-cache-total" => "ДиÑковый кÑш, вÑего (Мб)", "nordugrid-cluster-runtimeenvironment" => "Ð Ð°Ð±Ð¾Ñ‡Ð°Ñ Ñреда", "nordugrid-cluster-localse" => "Локальный накопитель", "nordugrid-cluster-middleware" => "Грид-ПО", "nordugrid-cluster-totaljobs" => "Задачи, вÑего", "nordugrid-cluster-usedcpus" => "ПроцеÑÑоры, занÑтые", "nordugrid-cluster-queuedjobs" => "Задачи в очереди (УСТÐРЕВШИЙ)", "nordugrid-cluster-prelrmsqueued" => "Грид-задачи, ждущие заÑылки", "nordugrid-cluster-location" => "Почтовый индекÑ", "nordugrid-cluster-owner" => "Владелец", "nordugrid-cluster-issuerca" => "Центр Ñертификации", "nordugrid-cluster-issuerca-hash" => "Хеш-код центра Ñертификации", "nordugrid-cluster-trustedca" => "ДоверÑемые центры Ñертификации", "nordugrid-cluster-nodeaccess" => "IP-Ñоединение узлов", "nordugrid-cluster-gridarea" => "ÐÐ´Ñ€ÐµÑ ÑеÑÑий (УСТÐРЕВШИЙ)", "nordugrid-cluster-gridspace" => "Грид-диÑк (УСТÐРЕВШИЙ)", "nordugrid-cluster-opsysdistribution" => "ДиÑтрибутив ОС (УСТÐРЕВШИЙ)", "nordugrid-cluster-runningjobs" => "Задачи в Ñчёте (УСТÐРЕВШИЙ)", "nordugrid-cluster-credentialexpirationtime" => "Срок дейÑÑ‚Ð²Ð¸Ñ Ñертификата", "nordugrid-queue-name" => "Ð˜Ð¼Ñ Ð¾Ñ‡ÐµÑ€ÐµÐ´Ð¸", "nordugrid-queue-comment" => "Комментарий", "nordugrid-queue-status" => "СоÑтоÑние очереди", "nordugrid-queue-running" => "Ð’Ñе занÑтые процеÑÑоры", "nordugrid-queue-localqueued" => "Локальные задачи в очереди", "nordugrid-queue-prelrmsqueued" => "Грид-задачи, ждущие заÑылки", "nordugrid-queue-queued" => "Задачи в очереди (УСТÐРЕВШИЙ)", "nordugrid-queue-maxrunning" => "Задачи в Ñчёте (предел)", "nordugrid-queue-maxqueuable" => "Задачи в очереди (предел)", "nordugrid-queue-maxuserrun" => "Задачи на Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ (предел)", "nordugrid-queue-maxcputime" => "Ð’Ñ€ÐµÐ¼Ñ Ð¿Ñ€Ð¾Ñ†ÐµÑÑора, наибольшее (мин)", "nordugrid-queue-mincputime" => "Ð’Ñ€ÐµÐ¼Ñ Ð¿Ñ€Ð¾Ñ†ÐµÑÑора, наименьшее (мин)", "nordugrid-queue-defaultcputime" => "Ð’Ñ€ÐµÐ¼Ñ Ð¿Ñ€Ð¾Ñ†ÐµÑÑора, по умолчанию (мин)", "nordugrid-queue-maxwalltime" => "ПродолжительноÑть, Ð½Ð°Ð¸Ð±Ð¾Ð»ÑŒÑˆÐ°Ñ (мин)", "nordugrid-queue-minwalltime" => "ПродолжительноÑть, Ð½Ð°Ð¸Ð¼ÐµÐ½ÑŒÑˆÐ°Ñ (мин)", "nordugrid-queue-defaultwalltime" => "ПродолжительноÑть, по умолчанию (мин)", "nordugrid-queue-schedulingpolicy" => "Правила планировки", "nordugrid-queue-totalcpus" => "ПроцеÑÑоры, вÑего", "nordugrid-queue-nodecpu" => "ПроцеÑÑор, тип", "nordugrid-queue-nodememory" => "ОЗУ (Мб)", "nordugrid-queue-architecture" => "Ðрхитектура", "nordugrid-queue-opsys" => "ÐžÐ¿ÐµÑ€Ð°Ñ†Ð¸Ð¾Ð½Ð½Ð°Ñ ÑиÑтема", "nordugrid-queue-homogeneity" => "ОднородноÑть очереди", "nordugrid-queue-gridrunning" => "ПроцеÑÑоры под грид-задачами", "nordugrid-queue-gridqueued" => "Грид-задачи в очереди", "nordugrid-queue-benchmark" => "Эталонный теÑÑ‚", "nordugrid-queue-assignedcpunumber" => "ПроцеÑÑоры (УСТÐРЕВШИЙ)", "nordugrid-queue-assignedcputype" => "Тип процеÑÑора (УСТÐРЕВШИЙ)", "nordugrid-job-globalid" => "Ярлык", "nordugrid-job-globalowner" => "ХозÑин", "nordugrid-job-execcluster" => "ВыполнÑющий клаÑтер", "nordugrid-job-execqueue" => "ВыполнÑÑŽÑ‰Ð°Ñ Ð¾Ñ‡ÐµÑ€ÐµÐ´ÑŒ", "nordugrid-job-stdout" => "Стандартный выход", "nordugrid-job-stderr" => "Ð¡Ñ‚Ð°Ð½Ð´Ð°Ñ€Ñ‚Ð½Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ°", "nordugrid-job-stdin" => "Стандартный вход", "nordugrid-job-reqcputime" => "Запрошенное процеÑÑорное времÑ", "nordugrid-job-reqwalltime" => "Запрошенное времÑ", "nordugrid-job-status" => "СоÑтоÑние", "nordugrid-job-queuerank" => "Положение в очереди", "nordugrid-job-comment" => "Комментарий СУПО", "nordugrid-job-submissionui" => "ЗаÑылающий клиент", "nordugrid-job-submissiontime" => "Ð’Ñ€ÐµÐ¼Ñ Ð·Ð°Ñылки (GMT)", "nordugrid-job-usedcputime" => "ИÑпользованное процеÑÑорное времÑ", "nordugrid-job-usedwalltime" => "ИÑпользованное времÑ", "nordugrid-job-completiontime" => "Ð’Ñ€ÐµÐ¼Ñ Ð¾ÐºÐ¾Ð½Ñ‡Ð°Ð½Ð¸Ñ (GMT)", "nordugrid-job-sessiondirerasetime" => "Срок ÑƒÐ½Ð¸Ñ‡Ñ‚Ð¾Ð¶ÐµÐ½Ð¸Ñ (GMT)", "nordugrid-job-proxyexpirationtime" => "Окончание доверенноÑти (GMT)", "nordugrid-job-usedmem" => "ИÑпользование ОЗУ (Кб)", "nordugrid-job-errors" => "Ошибки", "nordugrid-job-exitcode" => "Код возврата", "nordugrid-job-jobname" => "ИмÑ", "nordugrid-job-runtimeenvironment" => "Ð Ð°Ð±Ð¾Ñ‡Ð°Ñ Ñреда", "nordugrid-job-cpucount" => "Запрошено процеÑÑоров", "nordugrid-job-executionnodes" => "ВыполнÑющие узлы", "nordugrid-job-gmlog" => "Ð–ÑƒÑ€Ð½Ð°Ð»ÑŒÐ½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ ГМ", "nordugrid-job-clientsoftware" => "ВерÑÐ¸Ñ ÐºÐ»Ð¸ÐµÐ½Ñ‚Ð°", "nordugrid-job-rerunable" => "ПерезапуÑкаемоÑть", "nordugrid-job-reqcput" => "Запрошенное Ð²Ñ€ÐµÐ¼Ñ (УСТÐРЕВШИЙ)", "nordugrid-job-gridlog" => "Грид-запиÑÑŒ (УСТÐРЕВШИЙ)", "nordugrid-job-lrmscomment" => "Комментарий СУПО (УСТÐРЕВШИЙ)", "nordugrid-authuser-name" => "ИмÑ", "nordugrid-authuser-sn" => "Субъект", "nordugrid-authuser-freecpus" => "Свободные ЦП", "nordugrid-authuser-diskspace" => "ДиÑк, доÑтупно (Мб)", "nordugrid-authuser-queuelength" => "Задачи Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ Ð² очереди", "nordugrid-se-name" => "УÑловное имÑ", "nordugrid-se-aliasname" => "Ðазвание", "nordugrid-se-type" => "Тип", "nordugrid-se-acl" => "Допущенные ВО", "nordugrid-se-freespace" => "Свободный объём (Мб)", "nordugrid-se-totalspace" => "ВеÑÑŒ объём (Мб)", "nordugrid-se-url" => "Контактный адреÑ", "nordugrid-se-baseurl" => "Контактный Ð°Ð´Ñ€ÐµÑ (УСТÐРЕВШИЙ)", "nordugrid-se-accesscontrol" => "Контроль доÑтупа", "nordugrid-se-authuser" => "Допущенные пользователи (DN)", "nordugrid-se-location" => "Почтовый индекÑ", "nordugrid-se-owner" => "Владелец", "nordugrid-se-middleware" => "Грид-ПО", "nordugrid-se-issuerca" => "Центр Ñертификации", "nordugrid-se-issuerca-hash" => "Хеш-код центра Ñертификации", "nordugrid-se-trustedca" => "ДоверÑемые центры Ñертификации", "nordugrid-se-comment" => "Комментарий", "nordugrid-rc-name" => "Доменное имÑ", "nordugrid-rc-aliasname" => "Ðазвание", "nordugrid-rc-baseurl" => "Контактный адреÑ", "nordugrid-rc-authuser" => "Допущенные пользователи (DN)", "nordugrid-rc-location" => "Почтовый индекÑ", "nordugrid-rc-owner" => "Владелец", "nordugrid-rc-issuerca" => "Сертификат выдан" ), "errors" => array( "1" => "Ðевозможно прочеÑть ÑпиÑки выÑшего уровнÑ", "2" => "Ðи один из меÑтных ÑпиÑков не отзываетÑÑ", "3" => " Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ ÐºÐ¾Ð½Ñ„Ð¸Ð³ÑƒÑ€Ð°Ñ†Ð¸Ñ Ð¸Ð»Ð¸ иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа", "4" => "Ðет Грид-задач", "5" => "Ðет информации", "6" => "Служба недоÑтупна", "7" => " - попробуйте обновить позже", "8" => "Ðет информации об очереди", "9" => "Ðет данных", "10" => "Ðет пользователей", "11" => "Ðет доÑтупа к реÑурÑу", "12" => "не отзываетÑÑ", "13" => "Ðа наÑтоÑщий момент нет задач Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ ", "101" => " Ð’Ñ€ÐµÐ¼Ñ Ð½Ð° ÑвÑзь Ñ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ñ‹Ð¼ ÑпиÑком: ", "102" => " Ñ Ð½Ð° Ñоединение и ", "103" => " Ñ Ð½Ð° поиÑк", "104" => " Ñ Ð·Ð°Ñ‚Ñ€Ð°Ñ‡ÐµÐ½Ð¾ на поиÑк", "105" => "ПеречиÑление реÑурÑов: ", "106" => "Опрошено ÑпиÑков верхнего уровнÑ: ", "107" => "Получены географичеÑкие координаты, проÑканировано реÑурÑов: ", "108" => " реÑурÑов упорÑдочено по геополитичеÑкому признаку", "109" => "ПоиÑк атрибутов клаÑтера", "110" => "ПоиÑк атрибутов очереди", "111" => "Ðет данных Ñ ", "112" => " функционирует в Ñтране: ", "113" => " не раÑполагает реÑурÑами", "114" => " Ð’Ñ€ÐµÐ¼Ñ Ð½Ð° ÑвÑзь Ñ Ð³Ð»Ð¾Ð±Ð°Ð»ÑŒÐ½Ñ‹Ð¼ ÑпиÑком: ", "115" => "ИгнорируетÑÑ Ñ€ÐµÑурÑ: ", "116" => "не ÑоответÑтвует типу ", "117" => "Проверка ÑвÑзи: ", "118" => "еÑть", "119" => "Ðа данный момент обнаружено реÑурÑов типа ", "120" => "Ошибка LDAP при поиÑке на ", "121" => "-ÑоÑтоÑние на ", "122" => "Заблокирован: ", "123" => "Обнаружен региÑтрант ", "124" => "ПоиÑк атрибутов накопителей", "125" => "ПоиÑк пользователей", "126" => "ПоиÑк задач", "127" => " запуÑтил(а) задачу ", "128" => " не будучи допущенным(ой)", "129" => "Ðет информации об объекте: ошибка ", "130" => " Ð’Ñ€ÐµÐ¼Ñ Ð½Ð° ÑвÑзь Ñ Ð³Ð»Ð¾Ð±Ð°Ð»ÑŒÐ½Ñ‹Ð¼ ÑпиÑком: ", "301" => "Перезагрузить", "302" => "Печать", "303" => "Помощь", "304" => "Закрыть", "305" => "КраÑный", "306" => "Серый", "307" => "Ð’Ñе пользователи", "308" => "Ðктивные пользователи", "309" => "ПоиÑк", "310" => "Ðакопители", "311" => "Виртуальные организации", "312" => "Флаг Ñтраны: ", "313" => " Грид-процеÑÑов и ", "314" => " меÑтных процеÑÑов", "401" => "ПроцеÑÑÑ‹", "402" => "Грид", "403" => "меÑтные", "404" => "Мир", "405" => "ВСЕГО", "406" => " объектов", "407" => "куча", "408" => " Гб", "409" => " ВСЕ", "410" => "КлаÑтер", "411" => "Очередь", "412" => "Задача", "413" => "Пользователь", "414" => "Ðакопитель", "415" => "Каталог реплик", "416" => "Задайте атрибуты Ð´Ð»Ñ Ð¿Ñ€Ð¾Ñмотра; выбранный объект: ", "417" => "ПоиÑк проводитÑÑ Ð´Ð»Ñ Ð»Ð¾Ð³Ð¸Ñ‡ÐµÑкого И вÑех выражений", "418" => "Ðе заполнÑйте правое поле, еÑли фильтр не нужен", "419" => "ПроÑмотр реÑурÑов или объектов по выбору", "420" => "Выделенное имÑ", "421" => "Может иÑпользовать ", "422" => " клаÑтеров", "423" => "РеÑÑƒÑ€Ñ / объект:", "424" => "Кол.-во атрибутов (6 по ум.):", "425" => "Объект", "426" => "Дальше", "427" => "Выберите", "428" => "ОчиÑтить", "429" => "ПОКÐЗÐТЬ" ), // Country name conversion, no postcode! "tlconvert" => array ( "Australia" => "ÐвÑтралиÑ", "Austria" => "ÐвÑтриÑ", "Armenia" => "ÐрмениÑ", "Algeria" => "Ðлжир", "Belgium" => "БельгиÑ", "Bulgaria" => "БолгариÑ", "Canada" => "Канада", "Chile" => "Чили", "China" => "Китай", "Czechia" => "ЧехиÑ", "Denmark" => "ДаниÑ", "Estonia" => "ЭÑтониÑ", "Finland" => "ФинлÑндиÑ", "France" => "ФранциÑ", "Georgia" => "ГрузиÑ", "Germany" => "ГерманиÑ", "Greece" => "ГрециÑ", "HongKong" => "Гонконг", "Hungary" => "ВенгриÑ", "Iceland" => "ИÑландиÑ", "Ireland" => "ИрландиÑ", "Italy" => "ИталиÑ", "Japan" => "ЯпониÑ", "Latvia" => "ЛатвиÑ", "Lithuania" => "Литва", "Morocco" => "Марокко", "Netherlands" => "Ðидерланды", "Norway" => "ÐорвегиÑ", "Poland" => "Польша", "Portugal" => "ПортугалиÑ", "Romania" => "РумыниÑ", "Russia" => "РоÑÑиÑ", "SriLanka" => "Шри-Ланка", "Sweden" => "ШвециÑ", "Slovakia" => "СловакиÑ", "Slovenia" => "СловениÑ", "Spain" => "ИÑпаниÑ", "Switzerland" => "ШвейцариÑ", "Taiwan" => "Тайвань", "Turkey" => "ТурциÑ", "UK" => "ВеликобританиÑ", "Ukraine" => "Украина", "USA" => "СШÐ", "World" => "Мир" ) ); ?> nordugrid-arc-6.14.0/src/services/monitor/lang/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153435024427 xustar000000000000000030 mtime=1638455069.829118126 30 atime=1638455091.108437858 30 ctime=1638455100.609580617 nordugrid-arc-6.14.0/src/services/monitor/lang/Makefile.in0000644000175000002070000005130214152153435024415 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/monitor/lang DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(monitorlangdir)" DATA = $(monitorlang_DATA) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ monitorlangdir = @monitor_prefix@/lang monitorlang_DATA = $(srcdir)/*.inc EXTRA_DIST = $(monitorlang_DATA) all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/monitor/lang/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/monitor/lang/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-monitorlangDATA: $(monitorlang_DATA) @$(NORMAL_INSTALL) @list='$(monitorlang_DATA)'; test -n "$(monitorlangdir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(monitorlangdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(monitorlangdir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(monitorlangdir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(monitorlangdir)" || exit $$?; \ done uninstall-monitorlangDATA: @$(NORMAL_UNINSTALL) @list='$(monitorlang_DATA)'; test -n "$(monitorlangdir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(monitorlangdir)'; $(am__uninstall_files_from_dir) tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(DATA) installdirs: for dir in "$(DESTDIR)$(monitorlangdir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-monitorlangDATA install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-monitorlangDATA .MAKE: install-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ cscopelist-am ctags-am distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-monitorlangDATA install-pdf install-pdf-am install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags-am uninstall uninstall-am \ uninstall-monitorlangDATA # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/monitor/lang/PaxHeaders.30264/no.inc0000644000000000000000000000013014152153376023473 xustar000000000000000028 mtime=1638455038.4406465 30 atime=1638455038.513647597 30 ctime=1638455100.616580722 nordugrid-arc-6.14.0/src/services/monitor/lang/no.inc0000644000175000002070000013711014152153376023465 0ustar00mockbuildmock00000000000000 N/A indikerer at eier ikke har tildelt jobben et navn.
    X indikerer at eier har avbrutt jobben.
    ! indikerer at jobben ikke ble fullført.
    Klikk på et navn for å få en detaljert beskrivelse av jobben."; $str_nam = "Brukernavn som spesifisert i det personlige sertifikatet. Klikk på et navn for resurser tilgjengelige for denne brukeren og dennes jobber i systemet."; $str_sta = "Jobbstatus som returnert av gridmanageren (GM) og lokalt resursmanagementsystem LRMS. Kronologisk er tilstandene :
    ACCEPTED – jobben er sendt, men ikke behandlet.
    PREPARING – inputfiler hentes.
    SUBMITTING – forhandlinger med LRMS pÃ¥gÃ¥r
    INLRMS – jobben er overført til LRMS. Informasjonssystemet lagrer lokal status. Mulige tilstander er :
    : Q – jobben er i køen
    : U – jobben er satt pÃ¥ vent pÃ¥ en opptatt maskin (PBSPro)
    : S – jobben er satt pÃ¥ vent (Condor)
    : R, run – jobben kjøres.
    : E – jobben avsluttes (PBS)
    FINISHING – outputfiler overføres av GM.
    FINISHED – jobben err avsluttet; tidsstempel legges til av informasjonssystemet.
    CANCELING – jobben avbrytes.
    DELETED – jobben er ikke ryddet opp av eier, men slettet av GM pÃ¥ grunn av overgÃ¥tt lagringstid.
    Alla disse tilstandene kan meldes med prefikset PENDING: som betyr at GM prøver å flytte jobben over i neste tilstand."; $str_tim = "CPU-tid i minutter brukt av jobben."; $str_mem = "Minne i KB brukt av jobben."; $str_cpu = "Antall prosessorer brukt av jobben."; // Actual messages $message = array ( // Table headers and help (do not localize "0" and "help" keys) // For "help", keywords in
    must correspond to column titles below the help text ) "loadmon" => array( "0" => "Gridmonitor", "help" => "
    Denne siden viser alle klynger (sites) som har registrert seg i indekstjenesten til ARC, sortert etter land og deretter maskinnavn. Følgende klyngeparametere listes : klyngealias, total CPU-kapasitet og antall kjørende og ventende jobber, både lokale jobber og gridjobber. Bruk søkefunksjonen hvis annen informasjon om klynger, køer, jobber eller lignende er ønsket.
    Land
    ".$clickable.". Flagg og navn på land hentet fra tilgjengellige resursbeskrivelser. Klikk for å få opp infomasjon om et land.
    Klynger
    ".$clickable.". Klyngealias tildelt av eier. Maksimalt vises 22 tegn. Klikk på aliaset for en detaljert klyngebeskrivelse.
    CPU-er
    Totalt antall CPU-er i en klynge. OBS! Muligens er bare noen av disse tilgjengelige for gridbrukere.
    Belastning (prosesser: grid + lokalt)
    ".$clickable.". Relativ klyngebelastning som tilsvarer antall opptatte CPU-er. Grå felt viser antall prosessorer som kjører lokale jobbber, røde felt viser antall CPU-er som kjører gridjobber. Klikk på feltet for en detaljert liste over alle gridjobber som kjøres på klyngen, inklusive antall prosessorer per jobb.
    Ventende
    ".$clickable.". Totalt antall jobber som venter på klyngen, vises som antall ventende gridjobber pluss antall ventende lokale jobber. Klikk på det første sifferet for å liste ventende gridjobber på klyngen.
    ", "Land" => 30, "Klynge" => 160, "CPU-er" => 10, "Belastning (prosesser: grid + lokalt)" => 210, "Ventende" => 10 ), "clusdes" => array("0" => "Resursinformasjon for", "help" => "
    Attributt
    ".$clickable.". Klyngeattributtnavn".$str_att."
    Verdi
    ".$str_val."
    Kø
    ".$clickable.". Klyngeeiers navn på batchkøene som er tilgjengelige for ARC brukere.".$str_que."
    Status
    Køstatus. Fungerende køer viser normalt status active.
    Tidsgrenser (min)
    Tidsgrense for jobblengde per kø, hvis definert, i CPU-minutter. Den første verdien er den nedre grensen, den andre den øvre. Hvis ingen grenser er definert, dvs. alle jobber er tillatt, vises N/A
    Kjøres
    Antall kjørende jobber. Totalt antall jobber vises med antall prosessorer med gridjobber i parentes, f.eks. (Grid: 12). OBS! For parallelle multiprosessorjobber kan nummeret i parentes være større enn antall jobber.
    Køer
    Antall jobber i køen. Totalt antall jobber vises med gridjobber i parentes, f.eks. (Grid: 235)
    ", "Kø" => 0, "Mapping Queue" => 0, "Status" => 0, "Tidsgrenser (min)" => 0, "CPU-er" => 0, "Kjøres" => 0, "Køer" => 0 ), "jobstat" => array("0" => "Jobber på:Jobb-ID", "help" => "
    JOBBLISTE:
    Jobbnavn
    ".$clickable.". ".$str_job."
    Eier
    ".$clickable.". ".$str_nam."
    Status
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Kø
    ".$clickable.". Navn på batchkø hvor jobben kjøres. ".$str_que."
    CPU-er
    ".$str_cpu."
    JOBBINFORMASJON:
    Attributt
    ".$clickable.". Jobbattributtnavn".$str_att."
    Verdi
    ".$str_val."
    ", "Jobbnavn" => 0, "Eier" => 0, "Status" => 0, "CPU (min)" => 0, "Kø" => 0, "CPU-er" => 0 ), "volist" => array("0" => "Virtuelle organisasjoner", "help" => "
    Virtuell organisasjon VO
    ".$clickable.". En gruppe brukere som ofte arbeider med det samme og bruker de samme resursene. En VO er autorisert på minst en ARC klynge. Klikk på navnet for å få en liste over medlemmene.
    Medlemmer
    Antall medlemmer.
    Tjener
    LDAP-tjener som huser databasen med medlemskapene.
    ", "Virtuell organisasjon" => 0, "Medlemmer" => 0, "Tjener" => 0 ), "vousers" => array("0" => "Gridbrukerbase", "help" => "
    Navn
    ".$clickable.". ".$str_nam."
    Tilknytning
    Brukerens hjemmeinstitutt registrert av en VO manager. Kan være tomt.
    E-post
    ".$clickable.". Brukerens e-post registrert av en VO-manager. Kan være tomt. Klikk på adressen for å sende en e-post til brukeren.
    ", "#" => 0, "Navn" => 0, "Tilknytning" => 0, "E-post" => 0 ), "userlist" => array("0" => "Informasjon om", "help" => "
    Klynge:kø
    ".$clickable.". Navn på klynge og dens respektive køer (separert med et kolon, ":") som brukerern er autorisert til å sende jobber til. Hvis brukeren ikke er autorisert vises meldingen "Not authorised at host ...". Klikk på klyngenavn for å få detaljert klyngebeskrivelse. Klikk på kønavn for å få detaljert købeskrivelse.
    Ledige CPU-er
    Antall ledige CPU-er i køen for denne brukeren i øyeblikket, iblant med en øvre tidsgrense i minutter. F.eks. "3" betyr tre CPU-er tilgjengelige for en jobb med ubegrenset kjøringstid; "4:360" indikerer at det finnes fire CPU-er tilgjengelige for jobber kortere enn seks timer; "10:180 30" betyr at det finnes ti CPU-er tilgjengelige for jobber som ikke overgår tre timer, pluss 30 CPU-er tilgjengelige for jobber av valgfri lengde; "0" betyr at det ikke finnes noen CPU-er tilgjenglige for øyeblikket og at jobben kommer til å bli satt i kø.
    Ventende jobber
    Antall brukerens forventede jobber foran i køen for denne brukeren. "0" betyr at jobben forventes å kjøres umiddelbart. OBS! Dette er kun et estimat som kan overkjøres av lokale regler.
    Ledig disk (MB)
    Diskplass tilgjengelig for brukeren i en gitt kø (i megabyte). OBS! Dette er kun et estimat da de fleste klynger ikke tilbyr faste diskkvoter.
    Jobbnavn
    ".$clickable.". ".$str_job."
    Status
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Klynge
    ".$clickable.". Navn på klynge hvor jobben kjøres / ble kjørt. Klikk på klyngenavn for detaljert informasjon om klyngen.
    Kø
    ".$clickable.". Navn på batchkøen hvor jobben kjøres / ble kjørt. ".$str_que."
    CPU-er
    ".$str_cpu."
    ", "" => 0, "Jobbnavn" => 0, "Status" => 0, "CPU (min)" => 0, "Klynger" => 0, "Kø" => 0, "CPU-er" => 0 ), "attlist" => array("0" => "Attributtverdi", "help" => "
    Objekt
    ".$clickable.". Namn på det objekt vars attribut visas. Det kan vara ett klusternamn, ett klusters könamn, ett jobbnamn, ett användarnamn etc. Klicka på namnet för att få en detaljerad beskrivning av objektet.
    Attributt
    För varje objekt, ett eller flera attributtvärden kan listas. Kolumntiteln är det human-readable attributtnamnet (förutom för några MDS-specifika attributt), och Kolumnens innehåll är attributtvärden per objekt inmatade i informationssystemet.
    ", "Objekt" => 0, "Attributt" => 0 ), "quelist" => array("0" => "Kø", "help" => "
    Attributt
    ".$clickable.". Køattributtnavn".$str_att."
    Verdi
    ".$str_val."
    Jobbnavn
    ".$clickable.". ".$str_job."
    Eier
    ".$clickable.". ".$str_nam."
    Status
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Minne (KB)
    ".$str_mem."
    CPU-er
    ".$str_cpu."
    ", "" => 0, "Jobbnavn" => 0, "Eier" => 0, "Status" => 0, "CPU (min)" => 0, "Minne (KB)" => 0, "CPU-er" => 0 ), "sestat" => array("0" => "Lagringselementer", "help" => "
    Alias
    Lagringselementets alias som angitt i informasjonssystemet. Det vises maksimalt 15 tegn.
    Total plass
    Total diskplass (GB).
    Ledig plass
    Diskplass tilgjengelig for øyeblikket (GB).
    Navn
    Lagringselementets navn. Både logisk navn og maskinnavn (separert med et kolon, ":") angis. Det logiske navnet brukes av informasjonssystemet for å skille mellom ulike lagringselementer på samme maskin.
    Base-URL
    Lagringselementets URL, oftest en gsiftp:// protokoll. Bruk URL som basis for tilgang til filer.
    Type
    Lagringselementets type. "gridftp-based" indikerer disklagring med gridftp-grensesnitt.
    ", "#" => 0, "Alias" => 0, // "Total plass" => 0, "Ledig/total plass" => 0, "Navn" => 0, "Base-URL" => 0, "Type" => 0 ), "allusers" => array("0" => "Autoriserte gridbrukere:Aktive gridbrukere", "help" => "
    Navn
    ".$clickable.". ".$str_nam."
    Tilknytning
    Brukerens tilknytning som spesifisert i det personlige sertifikatet.
    Jobber
    Totalt antall jobber som denne brukeren har i systemet (kjørende, ventende, ferdige eller slettede).
    Klynger
    Viser antall klynger som denne brukeren er autorisert på.
    ", "#" => 0, "Navn" => 0, "Tilknytning" => 0, "Jobber" => 0, "Klynger" => 0 ), "userres" => array("0" => "", "Klynge:kø" => 0, "Ledige CPU-er" => 0, "Ventende jobber" => 0, "Ledig disk (MB)" => 0 ), "ldapdump" => array("0" => "", "Attributt" => 0, "Verdi" => 0 ), // IS attributes "mdsattr" => array( "objectClass" => "objectClass", "Mds-validfrom" => "Info gyldig f.o.m. (GMT)", "Mds-validto" => "Info gyldig t.o.m. (GMT)" ), "isattr" => array( "nordugrid-cluster-name" => "Front-end domenenavn", "nordugrid-cluster-aliasname" => "Klyngealias", "nordugrid-cluster-contactstring" => "Kontaktstreng", "nordugrid-cluster-interactive-contactstring" => "Interaktiv kontakt", "nordugrid-cluster-comment" => "Kommentar", "nordugrid-cluster-support" => "E-postkontakt", "nordugrid-cluster-acl" => "Autoriserte VO-er", "nordugrid-cluster-lrms-type" => "LRMS-type", "nordugrid-cluster-lrms-version" => "LRMS-versjon", "nordugrid-cluster-lrms-config" => "LRMS-detaljer", "nordugrid-cluster-architecture" => "Arkitektur", "nordugrid-cluster-opsys" => "Operativsystem", "nordugrid-cluster-homogeneity" => "Homogen klynge", "nordugrid-cluster-nodecpu" => "CPU-type (langsomste)", "nordugrid-cluster-nodememory" => "Minne (MB, minste)", "nordugrid-cluster-totalcpus" => "CPU-er, totalt", "nordugrid-cluster-cpudistribution" => "CPU:maskiner", "nordugrid-cluster-benchmark" => "Benchmark", "nordugrid-cluster-sessiondir-free" => "Diskplass tilgjengelig (MB)", "nordugrid-cluster-sessiondir-total" => "Diskplass totalt (MB)", "nordugrid-cluster-sessiondir-lifetime"=> "Gridsesjonens levetid (min)", "nordugrid-cluster-cache-free" => "Cachestørrelse tilgjengelig (MB)", "nordugrid-cluster-cache-total" => "Cachestørrelse totalt (MB)", "nordugrid-cluster-runtimeenvironment" => "Runtimemiljø", "nordugrid-cluster-localse" => "Lagringselement, lokalt", "nordugrid-cluster-middleware" => "Grid-middleware", "nordugrid-cluster-totaljobs" => "Jobber, totalt antall", "nordugrid-cluster-usedcpus" => "CPU-er, opptatte", "nordugrid-cluster-queuedjobs" => "Jobber, ventende", "nordugrid-cluster-prelrmsqueued" => "Grid jobs, awaiting submission", "nordugrid-cluster-location" => "Postnummer", "nordugrid-cluster-owner" => "Eier", "nordugrid-cluster-issuerca" => "Sertifikatutstedere", "nordugrid-cluster-issuerca-hash" => "Certificate issuer's hash", "nordugrid-cluster-trustedca" => "Trusted certificate issuers", "nordugrid-cluster-nodeaccess" => "Node-IP-Oppkobling", "nordugrid-cluster-gridarea" => "SesjonsomrÃ¥de (UtgÃ¥tt)", "nordugrid-cluster-gridspace" => "Griddiskplass (UtgÃ¥tt)", "nordugrid-cluster-opsysdistribution" => "OS-distribusjon (UtgÃ¥tt)", "nordugrid-cluster-runningjobs" => "Kjørende jobber (UtgÃ¥tt)", "nordugrid-cluster-credentialexpirationtime" => "Credential expiration time", "nordugrid-queue-name" => "Kønavn", "nordugrid-queue-comment" => "Kommentar", "nordugrid-queue-status" => "Køstatus", "nordugrid-queue-running" => "CPU-er, opptatte", "nordugrid-queue-localqueued" => "Local jobs, queued", "nordugrid-queue-prelrmsqueued" => "Grid jobs, awaiting submission", "nordugrid-queue-queued" => "Jobber, ventende (UtgÃ¥tt)", "nordugrid-queue-maxrunning" => "Jobber, kjørende (max)", "nordugrid-queue-maxqueuable" => "Jobber, ventende (max)", "nordugrid-queue-maxuserrun" => "Jobber per unixbruker (max)", "nordugrid-queue-maxcputime" => "CPU-tid, max. (min)", "nordugrid-queue-mincputime" => "CPU-tid, min. (min)", "nordugrid-queue-defaultcputime" => "CPU-tid, spesifisert (min)", "nordugrid-queue-maxwalltime" => "Klokketid, max. (min)", "nordugrid-queue-minwalltime" => "Klokketid, min. (min)", "nordugrid-queue-defaultwalltime" => "Klokketid, spesifisert (min)", "nordugrid-queue-schedulingpolicy" => "Scheduleringspolicy", "nordugrid-queue-totalcpus" => "CPU-er, totalt", "nordugrid-queue-nodecpu" => "CPU-type", "nordugrid-queue-nodememory" => "Minne (MB)", "nordugrid-queue-architecture" => "Arkitektur", "nordugrid-queue-opsys" => "Operativsystem", "nordugrid-queue-homogeneity" => "Homogen kø", "nordugrid-queue-gridrunning" => "CPUs occupied by Grid jobs", "nordugrid-queue-gridqueued" => "Gridjobber, ventende", "nordugrid-queue-benchmark" => "Benchmark", "nordugrid-queue-assignedcpunumber" => "CPU-er per kø (FÖRLEGAD)", "nordugrid-queue-assignedcputype" => "CPU-type (UtgÃ¥tt)", "nordugrid-job-globalid" => "ID", "nordugrid-job-globalowner" => "Eier", "nordugrid-job-execcluster" => "Eksekveringsklynge", "nordugrid-job-execqueue" => "Eksekveringskø", "nordugrid-job-stdout" => "Standard outputfil", "nordugrid-job-stderr" => "Standard errorfil", "nordugrid-job-stdin" => "Standard inputfil", "nordugrid-job-reqcputime" => "Forlangt CPU-tid", "nordugrid-job-reqwalltime" => "Forlangt klokketid", "nordugrid-job-status" => "Status", "nordugrid-job-queuerank" => "Plass i køen", "nordugrid-job-comment" => "LRMS-kommentar", "nordugrid-job-submissionui" => "Innsendingsmaskin", "nordugrid-job-submissiontime" => "Innsendingstid (GMT)", "nordugrid-job-usedcputime" => "Brukt CPU-tid", "nordugrid-job-usedwalltime" => "Brukt klokketid", "nordugrid-job-completiontime" => "Avslutningstid (GMT)", "nordugrid-job-sessiondirerasetime" => "Slettetid (GMT)", "nordugrid-job-proxyexpirationtime" => "Proxy forfallstid (GMT)", "nordugrid-job-usedmem" => "Brukt minne (KB)", "nordugrid-job-errors" => "Feil", "nordugrid-job-exitcode" => "Returkode", "nordugrid-job-jobname" => "Navn", "nordugrid-job-runtimeenvironment" => "Runtimemiljø", "nordugrid-job-cpucount" => "Forlangte CPU-er", "nordugrid-job-executionnodes" => "Ekseekveringsnoder", "nordugrid-job-gmlog" => "GM loggfil", "nordugrid-job-clientsoftware" => "Klientversjon", "nordugrid-job-rerunable" => "Omkjørbar", "nordugrid-job-reqcput" => "Forlangt tid (UtgÃ¥tt)", "nordugrid-job-gridlog" => "Gridloggfil (UtgÃ¥tt)", "nordugrid-job-lrmscomment" => "LRMS-kommentar (UtgÃ¥tt)", "nordugrid-authuser-name" => "Navn", "nordugrid-authuser-sn" => "Subjektnavn", "nordugrid-authuser-freecpus" => "Ledige CPU-er", "nordugrid-authuser-diskspace" => "Ledig diskplass (MB)", "nordugrid-authuser-queuelength" => "User's queued jobs", "nordugrid-se-name" => "Navn", "nordugrid-se-aliasname" => "Lagringselementalias", "nordugrid-se-type" => "Lagringselementtype", "nordugrid-se-acl" => "Autoriserte VO-er", "nordugrid-se-freespace" => "Ledig plass (MB)", "nordugrid-se-totalspace" => "Totalt utrymme (MB)", "nordugrid-se-url" => "Kontakt-URL", "nordugrid-se-baseurl" => "Kontakt-URL (UtgÃ¥tt)", "nordugrid-se-accesscontrol" => "Tilgangskontroll", "nordugrid-se-authuser" => "Autorisert bruker", "nordugrid-se-location" => "Postnummer", "nordugrid-se-owner" => "Eier", "nordugrid-se-middleware" => "Grid-middleware", "nordugrid-se-issuerca" => "Sertifikatutsteder", "nordugrid-se-issuerca-hash" => "Certificate issuer's hash", "nordugrid-se-trustedca" => "Trusted certificate issuers", "nordugrid-se-comment" => "Kommentar", "nordugrid-rc-name" => "Domenenavn", "nordugrid-rc-aliasname" => "Replikkakatalog-Alias", "nordugrid-rc-baseurl" => "Kontakt-URL", "nordugrid-rc-authuser" => "Autorisert bruker (DN)", "nordugrid-rc-location" => "Postnummer", "nordugrid-rc-owner" => "Eier", "nordugrid-rc-issuerca" => "Sertifikatutsteder" ), // Errors, warnings etc "errors" => array( // failure notices "1" => "Kan ikke lese toppnivÃ¥ indekstjenere", "2" => "Ingen av de lokale indekstjenerne returnerte oppkoblingen", "3" => " dÃ¥lig konfigurering eller begäran drog över tiden", "4" => "Ingen gridjobber funnet", "5" => "Ingen informasjon funnet", "6" => "Tjener utilgjengelig", "7" => " - reload senere", "8" => "Ingen køinformasjon funnet", "9" => "Ingen poster funnet", "10" => "Ingen brukere funnet", "11" => "Ikke autorisert pÃ¥ ", "12" => "svarer ikke", "13" => "Ingen nye jobber funnet for ", // debug messages "101" => " Monitor timeout for GRIS: ", "102" => " sek for oppkobling og ", "103" => " sek for søk", "104" => " sek brukere for søk", "105" => "Viser resurser ogsÃ¥ i ", "106" => "Spurte toppnivÃ¥ indekstjenere: ", "107" => "Fikk geografiske data, skannede klynger: ", "108" => " klynger sortert etter geografiske data", "109" => "Søk etter klyngeattributter", "110" => "Søk etter køattributter", "111" => "Ingen data fra ", "112" => " Er oppe i ", "113" => " har ingen resurser Ã¥ tilby", "114" => " Monitor timeout for GIIS: ", "115" => "Hopper over GRIS: ", "116" => "ikke en ", "117" => "Verifiserer oppkobling: ", "118" => "OK", "119" => "Hittil, detekterte resurser av slag ", "120" => "LDAP-feil ved søk etter ", "121" => " status ved ", "122" => "Svartelistede: ", "123" => "Registrert funnet for ", "124" => "Søk etter lagringselementattributter", "125" => "Søk etter brukere", "126" => "Søk etter jobb", "127" => " har jobb ", "128" => " uten være autorisert", "129" => "Kan ikke lade objektdata: feil ", "130" => " Monitor timeout for EMIR: ", // icon titles "301" => "Reload", "302" => "Skriv ut", "303" => "Hjelp", "304" => "Lukk", "305" => "Rød", "306" => "GrÃ¥", "307" => "Alle brukere", "308" => "Aktive brukere", "309" => "Søk", "310" => "Lagringsenheter", "311" => "VO-er", "312" => "Flagg for ", "313" => " gridprosesser og ", "314" => " lokale prosesser", // auxilliary strings "401" => "Prosesser", "402" => "Grid", "403" => "Lokalt", "404" => "Verden", "405" => "TOTALT", "406" => " klynger", "407" => "en masse", "408" => " GB", "409" => " ALLE", "410" => "Klynge", "411" => "Kø", "412" => "Jobb", "413" => "Bruker", "414" => "Lagringsenhet", "415" => "Replikkakatalog", "416" => "Definer søkeattributter for objekt : ", "417" => "Det søkes logisk OG av alle uttrykkene.", "418" => "La det høyre feltet stÃ¥ tomt for Ã¥ vise alt.", "419" => "Vis resurser eller objekt som samsvarer med ditt valg", "420" => "Særskilt navn", "421" => "Kan bruke totalt ", "422" => " klynger", "423" => "Resurs / objekt:", "424" => "Antall attributter (standard er 6):", "425" => "Objekt", "426" => "Neste", "427" => "Velg", "428" => "Gjenopprett", "429" => "VIS" ), // Post code conversion "tlconvert" => array ( "Australia" => "Australia", "Austria" => "Österrike", "Armenia" => "Armenia", "Algeria" => "Algerie", "Belgium" => "Belgia", "Bulgaria" => "Bulgaria", "Canada" => "Canada", "China" => "Kina", "Czechia" => "Tsjekkia", "Denmark" => "Danmark", "Estonia" => "Estland", "Finland" => "Finland", "France" => "Frankrike", "Georgia" => "Georgia", "Germany" => "Tyskland", "Greece" => "Hellas", "Hungary" => "Ungarn", "Iceland" => "Island", "Ireland" => "Irland", "Italy" => "Italia", "Japan" => "Japan", "Latvia" => "Lettland", "Lithuania" => "Litauen", "Morocco" => "Marokko", "Netherlands" => "Nederland", "Norway" => "Norge", "Poland" => "Polen", "Portugal" => "Portugal", "Romania" => "Romania", "Russia" => "Russland", "SriLanka" => "Sri Lanka", "Sweden" => "Sverige", "Slovakia" => "Slovakia", "Slovenia" => "Slovenia", "Switzerland" => "Sveits", "Turkey" => "Tyrkia", "UK" => "Storbritannia", "Ukraine" => "Ukraina", "USA" => "USA", "World" => "Verden" ) ); ?> nordugrid-arc-6.14.0/src/services/monitor/lang/PaxHeaders.30264/da.inc0000644000000000000000000000013214152153376023445 xustar000000000000000030 mtime=1638455038.439646485 30 atime=1638455038.512647582 30 ctime=1638455100.611580647 nordugrid-arc-6.14.0/src/services/monitor/lang/da.inc0000644000175000002070000014253014152153376023437 0ustar00mockbuildmock00000000000000 N/A viser at brugeren ikke tildelte et navn.
    X viser at jobbet er slået ihjel af ejeren.
    ! viser at jobbet fejlede i systemet.
    Tryk på et navn for at få en detaljeret beskrivelse af jobbet."; $str_nam = "Navn på brugeren som angivet i det personlige certifikat. Tryk på navnet for at få en liste af alle ressourcer tilgængelige for denne bruger og alle brugerens job i systemet."; $str_sta = "Jobstatus som returneret af Gridmanageren (GM) og LRMS. Tilstandene er i sekventiel rækkefølge:
    ACCEPTED – jobbet er overført til systemet men endnu ikke behandlet
    PREPARING – inputfilerne hentes
    SUBMITTING – der udveksles data med LRMS
    INLRMS – jobbet overføres til LRMS; intern status tilføjes af informationssystemet. Mulige tilstande er:
    : Q – jobbet venter i kø
    : U – jobbets udførelse er udskudt på en travl knude (PBSPro)
    : S – jobbets udførelse er udskudt (Condor)
    : R, run – jobbet kører
    : E – jobbet er færdigt (PBS)
    FINISHING – uddatafilerne overføres af GM
    FINISHED – jobbet er færdigt; tidsstemplet tilføjes af informationssystemet
    CANCELING – jobbet afbrydes
    DELETED – jobbet er ikke afryddet på anmodning af brugeren men fjernet af GM fordi udløbstiden er passeret
    Hver tilstand kan rapporteres med et PENDING præfiks som betyder at GM forsøger at rykke jobbet op i næste tilstand"; $str_tim = "CPU-tid brugt at jobbet, minutter."; $str_mem = "Lager (RAM) brugt af jobbet, KB"; $str_cpu = "Antal processorer brugt af jobbet."; // Actual messages $message = array ( // Table headers and help (do not localize "0" and "help" keys) // For "help", keywords in
    must correspond to column titles below the help text ) "loadmon" => array( "0" => "Grid Monitor", "help" => "
    Denne skærmside viser alle steder, der registrerer sig hos den øverste ARC indekseringstjeneste sorteret først efter land så efter maskinnavn. Udvalgte lokale parametre overvåges: klyngealias, kø, job, o.s.v. Brug quot;Search" tjenesten hvis du vil sammenligne klynger, køer, job, osv.
    Land
    ".$clickable.". Landets flag og navn fra tilgængelig resource. Tryk for vise landeinformation.
    Klynge
    ".$clickable.". Klyngealias som tildelt af ejeren. Højst 22 tegn vises. Tryk på aliases for deltaljerede klyngebeskrivelse.
    CPU-er
    Totalt antal CPU-er i en klynge. NB! Kun en del af disse kan bruges af Grid-brugere.
    Belastning (processer:Grid + lokale)
    ".$clickable.". Relativ klyngebelastning, svarende til antallet af optagede CPU-er . Grå felter viser processorer optaget af lokale job, røde felter viser CPU-er optaget af Grid-job. Tryk på feltet for at få en detaljeret liste med alle kørende Grid-job på klyngen , inklusiv antallet af processorer per job.
    job I Kø
    ".$clickable.". Antal job i køen på klyngen, vist som antallet Grid-job plus antal lokale job i køen. Tryk på det første antal for at få en liste af Grid-job i køen på klyngen
    ", "Land" => 30, "Sted" => 160, "CPU-er" => 10, "Belastning (processer: Grid+lokale)" => 210, "I kø" => 10 ), "clusdes" => array("0" => "Ressourcedetaljer for", "help" => "
    Attribut
    ".$clickable.". Klyngeattributnavn".$str_att."
    Værdi
    ".$str_val."
    ".$clickable.". Navn på batchkøer til rådighed for ARC brugere, som angivet af klyngeejere. ".$str_que."
    Status
    Køstatus. Fungerende køer viser som regel active status.
    CPU (min)
    Tidsgrænse for varigheden af job per kø, hvis sat, i CPU-minutter. Den første værdi er den nedre grænse, den anden er den øvre grænse. Hvis der ikke er en grænse (job med envher varighed accepteres), vises N/A.
    Kørende
    Antal job der udføres i køen. Det totale antal job is vises, med antallet af processorer optaget af Grid-job vist i parentes, fx (Grid: 12). NB! For parallelle multiprocessorjob kan tallet i parentes være større end antallet af job.
    I kø
    Antallet af job, der venter i køen på at komme til at køre. Det totale antal vises, med Grid-job i parentes, fx (Grid: 235)
    ", "Kø" => 0, "Mapping Queue" => 0, "Status" => 0, "Grænse (min)" => 0, "CPU-er" => 0, "kørende" => 0, "I Kø" => 0 ), "jobstat" => array("0" => "Job på:Job ID", "help" => "
    JOBLISTE:
    Job navn
    ".$clickable.". Navn på et job som tildelt af ejeren. Hvis der ikke er tildelt et navn, vises "N/A". Tryk på et navn for at få en detaljeret beskrivelse af jobbet.
    Ejer
    ".$clickable.". ".$str_nam."
    Status
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    ".$clickable.". Navn på batchkøen, hvor jobbet udføres. ".$str_que."
    CPU'er
    ".$str_cpu."
    JOBDETALJER:
    Attribut
    ".$clickable.". Jobattributens navn".$str_att."
    Værdi
    ".$str_val."
    ", "Job navn" => 0, "Ejer" => 0, "Status" => 0, "CPU (min)" => 0, "Kø" => 0, "CPU'er" => 0 ), "volist" => array("0" => "Virtuel Organisations", "help" => "
    Virtuel Organisation
    ".$clickable.". Group of users, typically sharing common activities and resources, authorised at at least one ARC-enabled site. Click on the navn to get the list of group members.
    Members
    Number of group members.
    Served by
    LDAP server that supports the group membership database.
    ", "Virtuel Organisation" => 0, "Medlemmer" => 0, "Server" => 0 ), "vousers" => array("0" => "Gridbrugerbase", "help" => "
    Navn
    ".$clickable.". ".$str_nam."
    Tilknytning
    Brugerens hjemmeinstitut som anmeldt af VO bestyreren. Kan være tom.
    E-mail
    ".$clickable.". Brugerens e-mail som anmeldt af VO bestyreren. Kan være tom. Tryk på adressen for sende en email til brugeren.
    ", "#" => 0, "Navn" => 0, "Tilknytning" => 0, "E-mail" => 0 ), "userlist" => array("0" => "Information om", "help" => "
    Klynge:kø
    ".$clickable.". Navne på klynger of respektive køer (adskilt af kolon, ":") hvor en bruger er autoriseret til at indlevere job. Hvis brugeren ikke er autoriseret, vises beskeden: "Ikke authoriset på vært ...". Tryk på et klyngenavn for at få en detaljeret beskrivelse af klyngen. Tryk på et kønavn for at få en detaljeret beskrivelse af køen
    Ledige CPU'er
    Det aktuelle antal ledige CPU'er i en given kø til rådighed for brugeren , evt tilføjet den øvre grænse for varigheden af jobs (i minutter). Fx betyder "3" at der er 3 CPU'er til rådighed for et job med ubegrænset køretid; "4:360" angiver at der er 4 CPU'er til rådighed for job, der kører mindre end 6 timer; "10:180 30" betyder at der er 10 CPU'er til rådighed for job, der kører mindre end 3 timer, samt 30 CPU'er til rådighed for jobs, der kan køre en vilkårlig tid; "0" betyder at der ikke er nogen CPU'er til rådighed for tiden the moment, og nye job vil havne i en ventekø.
    Job i ventekø
    Antal brugerjob, der forventes at være foran en brugers nye job i en ventekø (for this user). Antallet "0" betyder at jobbet forventes udført med det samme. NB! Det er kun et estimat, som kan tilsidesættes af lokale regler.
    Ledig diskplads (MB)
    Diskplads til rådighed for brugeren i en given bestemt kø (i MegaBytes). NB! Det er kun et estimat, da de færreste klynges kan tilbyde faste diskpladskvoter.
    Jobnavn
    ".$clickable.". ".$str_job."
    Status
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Klynge
    ".$clickable.". Navn på klyngen hvor jobbet udføres. Tryk på et klyngenavn for at få detaljerede oplysninger om klyngen.
    ".$clickable.". Navn på batchkøen hvor jobbet udføres eller blev udført. ".$str_que."
    CPUs
    ".$str_cpu."
    ", "" => 0, "Jobnavn" => 0, "Status" => 0, "CPU (min)" => 0, "Klynge" => 0, "Kø" => 0, "CPU'er" => 0 ), "attlist" => array("0" => "Attributværdier", "help" => "
    Objekt
    ".$clickable.". Navn objektet hvis attributter vises. Det kan være et klyngenavn, et klyngekønavn, et jobnavn, et brugernavn osv. Tryk på navneteksten for at få en detaljeret beskrivelse af objektet.
    Attribut
    For hvert objekt kan en eller flere værdier vises. Kolonnetitel er det menneskelæselige navn (bortset fra visse MDS-specifikke attributter) og indholdet i kolonnen er attributværdier for objektet som det blev til indtastet i informationssystemet.
    ", "Objelt" => 0, "Attribut" => 0 ), "quelist" => array("0" => "Kø", "help" => "
    Attribut
    ".$clickable.". Navn på en køattribut".$str_att."
    Værdi
    ".$str_val."
    Jobnavn
    ".$clickable.". ".$str_job."
    Ejer
    ".$clickable.". ".$str_nam."
    Status
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Lager (RAM) (KB)
    ".$str_mem."
    CPUs
    ".$str_cpu."
    ", "" => 0, "Jobnavn" => 0, "Ejer" => 0, "Status" => 0, "CPU (min)" => 0, "Lager (RAM) (KB)" => 0, "CPU'er" => 0 ), "sestat" => array("0" => "Lagerenhed", "help" => "
    Alias
    Lagerenheder alias som angivet i informationssystemet. Højst 15 tegn vises.
    Total lagerplads
    Total lagetplads på harddisken, GigaByte.
    Ledig Plads
    Aktuel Ledig diskplads, GigaByte.
    Navn
    Lagerenheds, bestående af et logsik navn og et værtsnavn (adskilt af kolon, ":"). Det logiske navn bruges kun af hensyn til informationssystemet, for at skelne mellem forskellige lagringsenheder på den samme maskine.
    Grund-URL
    Lagringsenhedens URL, som regel en gsiftp:// protokol. Brug denne URL som udgangspunkt for at tilgå filer.
    Type
    Lagringselement type. "gridftp-baseret" angiver en harddisk med GridFTP-grænseflade.
    ", "#" => 0, "Alias" => 0, // "Total lagerplads" => 0, "Ledig/Total Diskplads, GB" => 0, "Navn" => 0, "Grund-URL" => 0, "Type" => 0 ), "allusers" => array("0" => "Authoriserede Gridbrugere:Aktive Gridbrugere", "help" => "
    Navn
    ".$clickable.". ".$str_nam."
    Tilknytning
    Brugertilknytning, uddraget af det personlige certifikat
    Job
    Antal brugerjob i systemet (kørende, ventende, afsluttede eller slettede)
    Steder
    Viser hvor mange steder brugeren er autoriseret
    ", "#" => 0, "Navn" => 0, "Tilknytning" => 0, "Job" => 0, "Steder" => 0 ), "userres" => array("0" => "", "Klynge:kø" => 0, "Ledige CPU'er" => 0, "Job, i ventekø" => 0, "Ledig diskplads (MegaByte)" => 0 ), "ldapdump" => array("0" => "", "Attribut" => 0, "Værdi" => 0 ), // IS attributes "mdsattr" => array( "objectClass" => "objektklasse", "Mds-validfrom" => "Info gyldig fra (GMT)", "Mds-validto" => "Info gyldig til (GMT)" ), "isattr" => array( "nordugrid-cluster-name" => "Frontend domænenavn", "nordugrid-cluster-aliasname" => "Klyngealias", "nordugrid-cluster-contactstring" => "Kontakttekst", "nordugrid-cluster-interactive-contactstring" => "Interaktiv kontakt", "nordugrid-cluster-comment" => "Kommentar", "nordugrid-cluster-support" => "E-mail kontact", "nordugrid-cluster-acl" => "Authoriserede VO'er", "nordugrid-cluster-lrms-type" => "LRMS type", "nordugrid-cluster-lrms-version" => "LRMS version", "nordugrid-cluster-lrms-config" => "LRMS detaljer", "nordugrid-cluster-architecture" => "Arkitektur", "nordugrid-cluster-opsys" => "Styresystem", "nordugrid-cluster-homogeneity" => "Homogen klynge", "nordugrid-cluster-nodecpu" => "CPU type (langsomste)", "nordugrid-cluster-nodememory" => "Lager (MB, mindst)", "nordugrid-cluster-totalcpus" => "CPU's, i alt", "nordugrid-cluster-cpudistribution" => "CPU:maskiner", "nordugrid-cluster-benchmark" => "Benchmark", "nordugrid-cluster-sessiondir-free" => "Harddiskplads, til rådighed (MB)", "nordugrid-cluster-sessiondir-total" => "Harddiskplads, i alt (MB)", "nordugrid-cluster-sessiondir-lifetime"=> "Gridsession levetid (min)", "nordugrid-cluster-cache-free" => "Cachestørrelse, til rådighed (MB)", "nordugrid-cluster-cache-total" => "Cachestørrelse, i alt (MB)", "nordugrid-cluster-runtimeenvironment" => "Køretidsomgivelser", "nordugrid-cluster-localse" => "Lagringsenhed, lokal", "nordugrid-cluster-middleware" => "Gridmiddleware", "nordugrid-cluster-totaljobs" => "Jobs, samlet antal", "nordugrid-cluster-usedcpus" => "CPU'er, optagede", "nordugrid-cluster-queuedjobs" => "Jobs, i kø", "nordugrid-cluster-prelrmsqueued" => "Grid jobs, awaiting submission", "nordugrid-cluster-location" => "Postnummer", "nordugrid-cluster-owner" => "Ejer", "nordugrid-cluster-issuerca" => "Certifikatudsteder", "nordugrid-cluster-issuerca-hash" => "Certificate issuer's hash", "nordugrid-cluster-trustedca" => "Trusted certificate issuers", "nordugrid-cluster-nodeaccess" => "Node IP sammenhæng", "nordugrid-cluster-gridarea" => "Session area (FORÑ„LDET)", "nordugrid-cluster-gridspace" => "Griddiskplads (FORÑ„LDET)", "nordugrid-cluster-opsysdistribution" => "OS fordeling (FORÑ„LDET)", "nordugrid-cluster-runningjobs" => "Job, kørende (FORÑ„LDET)", "nordugrid-cluster-credentialexpirationtime" => "Credential expiration time", "nordugrid-queue-name" => "Kønavn", "nordugrid-queue-comment" => "Kommentar", "nordugrid-queue-status" => "Køstatus", "nordugrid-queue-running" => "CPU'er, optagede", "nordugrid-queue-localqueued" => "Local jobs, queued", "nordugrid-queue-prelrmsqueued" => "Grid jobs, awaiting submission", "nordugrid-queue-queued" => "Job, i ventekø (FORÑ„LDET)", "nordugrid-queue-maxrunning" => "Jobs, kørende (max)", "nordugrid-queue-maxqueuable" => "Jobs, kan udskydes (max)", "nordugrid-queue-maxuserrun" => "Jobs per Unixbruger (max)", "nordugrid-queue-maxcputime" => "CPU-tid, max. (minutter)", "nordugrid-queue-mincputime" => "CPU-tid, min. (minutter)", "nordugrid-queue-defaultcputime" => "CPU-tid, default (minutter)", "nordugrid-queue-maxwalltime" => "Vægurstid, max. (minutter)", "nordugrid-queue-minwalltime" => "Vægurstid, min. (minutter)", "nordugrid-queue-defaultwalltime" => "Vægurstid, default (minutter)", "nordugrid-queue-schedulingpolicy" => "Skeduleringspolitik", "nordugrid-queue-totalcpus" => "CPU'er, i alt", "nordugrid-queue-nodecpu" => "CPUtype", "nordugrid-queue-nodememory" => "Lager (RAM) (MB)", "nordugrid-queue-architecture" => "Arkitektur", "nordugrid-queue-opsys" => "Styresystem", "nordugrid-queue-homogeneity" => "Homogen kø", "nordugrid-queue-gridrunning" => "CPU'er, optagede af Gridjobs", "nordugrid-queue-gridqueued" => "Gridjobs, i ventekø", "nordugrid-queue-benchmark" => "Benchmark", "nordugrid-queue-assignedcpunumber" => "CPU'er per kø (FORÑ„LDET)", "nordugrid-queue-assignedcputype" => "CPUtype (FORÑ„LDET)", "nordugrid-job-globalid" => "ID", "nordugrid-job-globalowner" => "Ejer", "nordugrid-job-execcluster" => "Execution cluster", "nordugrid-job-execqueue" => "Aktivkø", "nordugrid-job-stdout" => "Standard uddatafil", "nordugrid-job-stderr" => "Standard fejlfil", "nordugrid-job-stdin" => "Standard inddatafile", "nordugrid-job-reqcputime" => "Anmodet CPU-tid", "nordugrid-job-reqwalltime" => "Anmodet vægurstid", "nordugrid-job-status" => "Status", "nordugrid-job-queuerank" => "Position i køen", "nordugrid-job-comment" => "LRMS kommentar", "nordugrid-job-submissionui" => "Indleveringsmaskine", "nordugrid-job-submissiontime" => "Indleveringstid (GMT)", "nordugrid-job-usedcputime" => "Forbrugt CPU-tid", "nordugrid-job-usedwalltime" => "Forbrugt vægurstid", "nordugrid-job-completiontime" => "Job afslutningstidspunkt (GMT)", "nordugrid-job-sessiondirerasetime" => "Sletningstidspunkt (GMT)", "nordugrid-job-proxyexpirationtime" => "Proxy udløbstidspunkt (GMT)", "nordugrid-job-usedmem" => "Benyttet lager (RAM) (KB)", "nordugrid-job-errors" => "Fejl", "nordugrid-job-exitcode" => "Afslutningskode", "nordugrid-job-jobname" => "Navn", "nordugrid-job-runtimeenvironment" => "Køretidsomgivelser", "nordugrid-job-cpucount" => "Anmodede CPU'er", "nordugrid-job-executionnodes" => "Udførelsesknuder", "nordugrid-job-gmlog" => "GM logfil", "nordugrid-job-clientsoftware" => "klientversion", "nordugrid-job-rerunable" => "Genkørbare", "nordugrid-job-reqcput" => "Anmodet tid (Forældet)", "nordugrid-job-gridlog" => "Gridlogfil (Forældet)", "nordugrid-job-lrmscomment" => "LRMS kommentar (Forældet)", "nordugrid-authuser-name" => "Navn", "nordugrid-authuser-sn" => "Subjektnavn", "nordugrid-authuser-freecpus" => "Ledige CPU'er", "nordugrid-authuser-diskspace" => "Ledig harddiskplads (MB)", "nordugrid-authuser-queuelength" => "User's queued jobs", "nordugrid-se-name" => "Nabn", "nordugrid-se-aliasname" => "Lagerelement alias", "nordugrid-se-type" => "Lagerelement type", "nordugrid-se-acl" => "Authoriserede VO'er", "nordugrid-se-freespace" => "Legid plads (MB)", "nordugrid-se-totalspace" => "Total plads (MB)", "nordugrid-se-url" => "Kontakt URL", "nordugrid-se-baseurl" => "Kontact URL (Forældet)", "nordugrid-se-accesscontrol" => "Adgangskontrol", "nordugrid-se-authuser" => "Autoriseret bruger (DN)", "nordugrid-se-location" => "Postnummer", "nordugrid-se-owner" => "Ejer", "nordugrid-se-middleware" => "Middleware", "nordugrid-se-issuerca" => "Certifikatudsteder", "nordugrid-se-issuerca-hash" => "Certificate issuer's hash", "nordugrid-se-trustedca" => "Trusted certificate issuers", "nordugrid-se-comment" => "Kommentar", "nordugrid-rc-name" => "Domænenavn", "nordugrid-rc-aliasname" => "Replikeringskatalogalias", "nordugrid-rc-baseurl" => "Kontact URL", "nordugrid-rc-authuser" => "Autoriseret bruger (DN)", "nordugrid-rc-location" => "Postnummer", "nordugrid-rc-owner" => "Ejer", "nordugrid-rc-issuerca" => "Certifikatudsteder" ), // Errors, warnings etc "errors" => array( // failure notices "1" => "Kan ikke topniveau ressourceindekser", "2" => "ingen af de lokale indekser returnerede en forbindelse", "3" => " dårlig konfigurations eller anmodning udløb", "4" => "Ingen Gridjobs fundet", "5" => "ingen information fundet", "6" => "Server ikke tilgængelig", "7" => " - genlæs senere", "8" => "ingen køinformationer fundet", "9" => "Ingen indgange fundet", "10" => "Ingen brugere fundet", "11" => "Ingen autoriserede på værten", "12" => "svarer ikke", "13" => "Fandt ingen nylige jobs for ", // debug messages "101" => " Monitor timeout for GRIS: ", "102" => " sek on forbindelse og ", "103" => " sek on søgning", "104" => " sek brugt på at søge", "105" => "Viser kun ressourcer i ", "106" => "Spurgte topniveau indeksservere: ", "107" => "Fik geokrafiske placeringer, skanned steder: ", "108" => " steder sorteret efter geografisk placering", "109" => "Leder efter klyngeattributter", "110" => "Leder efter køattributter", "111" => "Ingen daya fra ", "112" => " er oppe i ", "113" => " tilbyder ingen ressourcer", "114" => " Monitor timeouts for GIIS: ", "115" => "springer over GRIS: ", "116" => "ikke en ", "117" => "Checker forbindelse: ", "118" => "OK", "119" => "Så vidt, opdagede ressource at typen ", "120" => "LDAP fejl ved søgning ", "121" => " status ved ", "122" => "Sortlistet: ", "123" => "Registrant fundet for ", "124" => "Led efter SE attributter", "125" => "Led efter brugere", "126" => "Led efter jobs", "127" => " har job ", "128" => " men ikke autoriseret", "129" => "Kan ikke få objektdata: fejl ", "130" => " Monitor timeouts for EMIR: ", // icon titles "301" => "Genlæs", "302" => "Udskriv", "303" => "Hjælp", "304" => "Luk", "305" => "Rød", "306" => "Grå", "307" => "Alle brugere", "308" => "Aktive brugere", "309" => "Søg", "310" => "Lager", "311" => "VO-er", "312" => "Flaget for ", "313" => " Grid processer og ", "314" => " locale processer", // auxilliary strings "401" => "Processer", "402" => "Grid", "403" => "Lokal", "404" => "Verden", "405" => "TOTAL", "406" => " steder", "407" => "en masse", "408" => " GB", "409" => " ALLE", "410" => "Klynge", "411" => "Kø", "412" => "Job", "413" => "Bruger", "414" => "Lager", "415" => "Replikerings Kat.", "416" => "Definer attributter for at vise objektet: ", "417" => "logisk OG af alle udtrykkene findes", "418" => "Efterlad feltet længst til højre tomt for vise alt", "419" => "Vis de ressourcer eller objekter, du vil", "420" => "Distinguished name", "421" => "Kan bruge i alt ", "422" => " steder", "423" => "Ressource / objekt:", "424" => "Ant. attributter (def. 6):", "425" => "Objekt", "426" => "Næste", "427" => "Vælg een", "428" => "Nulstil", "429" => "VIS" ), // Post code conversion: only for [en]! "tlconvert" => array ( "Australia" => "Australien", "Austria" => "ÑŒstrig", "Armenia" => "Armenien", "Algeria" => "Algeriet", "Belgium" => "Belgien", "Bulgaria" => "Bulgarien", "Canada" => "Canada", "China" => "Kina", "Czechia" => "Tjekkiet", "Denmark" => "Danmark", "Estonia" => "Estland", "Finland" => "Finland", "France" => "Frankrig", "Georgia" => "Georgien", "Germany" => "Tyskland", "Greece" => "Grækenland", "Hungary" => "Ungarn", "Iceland" => "Island", "Ireland" => "Irland", "Italy" => "Italien", "Japan" => "Japan", "Latvia" => "Letland", "Lithuania" => "Lithauen", "Morocco" => "Marocco", "Netherlands" => "Nederlandene", "Norway" => "Norge", "Poland" => "Polen", "Portugal" => "Portugal", "Romania" => "Rumænien", "Russia" => "Rusland", "SriLanka" => "Sri Lanka", "Sweden" => "Sverige", "Slovakia" => "Slovakiet", "Slovenia" => "Slovenien", "Switzerland" => "Schweiz", "Turkey" => "Tyrkiet", "UK" => "UK", "Ukraine" => "Ukraine", "USA" => "USA", "World" => "Verden" ) ); ?> nordugrid-arc-6.14.0/src/services/monitor/lang/PaxHeaders.30264/uk.inc0000644000000000000000000000013214152153376023500 xustar000000000000000030 mtime=1638455038.441646515 30 atime=1638455038.514647612 30 ctime=1638455100.620580782 nordugrid-arc-6.14.0/src/services/monitor/lang/uk.inc0000644000175000002070000017333514152153376023501 0ustar00mockbuildmock00000000000000 // -- Author: oxana.smirnova@hep.lu.se // Some common strings: $clickable = "ПОСИЛÐÐÐЯ"; $str_att = ", інтерпретировані Ð´Ð»Ñ Ð¿Ñ€Ð¾Ñтоти Ñ‡Ð¸Ñ‚Ð°Ð½Ð½Ñ (за виключеннÑм декількох атрибутів, Ñпецифічних Ð´Ð»Ñ ÑиÑтеми MDS). За кліком выводÑтьÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð½Ñ Ñ†ÑŒÐ¾Ð³Ð¾ атрибута на вÑÑ–Ñ… відомих реÑурÑах ARC."; $str_val = "Ð—Ð½Ð°Ñ‡ÐµÐ½Ð½Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð°, запиÑане в інформаційній ÑиÑтемі."; $str_que = "Зазвичай черги розрізнÑютьÑÑ Ð°Ð±Ð¾ за допуÑтимою триваліÑтю обрахунку, або за допущеною групою кориÑтувачів. За кліком виводитьÑÑ Ð¿Ð¾Ð²Ð½Ð¸Ð¹ Ð¾Ð¿Ð¸Ñ Ñ‡ÐµÑ€Ð³Ð¸, що міÑтить ÑпиÑок вÑÑ–Ñ… відомих завдань: в обрахунку, в черзі та завершених."; $str_job = " Ім'Ñ Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ, приÑвоєне хазÑїном.
    N/A означає, що хазÑїн не приÑвоїв ніÑкого імені.
    X означає, що хазÑїн відмінив Ð²Ð¸ÐºÐ¾Ð½Ð°Ð½Ð½Ñ Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ.
    ! означає, що при виконанні Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ Ð²Ð¸Ð½Ð¸ÐºÐ»Ð° помилка.
    За кліком виводитьÑÑ Ð´ÐµÑ‚Ð°Ð»ÑŒÐ½Ð¸Ð¹ Ð¾Ð¿Ð¸Ñ Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ."; $str_nam = "Ім'Ñ ÐºÐ¾Ñ€Ð¸Ñтувача, у відповідноÑті до його оÑобиÑтого Ñертифікату. За кліком виводитьÑÑ Ð·Ð²ÐµÐ´ÐµÐ½Ð° Ñ‚Ð°Ð±Ð»Ð¸Ñ†Ñ Ð²ÑÑ–Ñ… Грід-реÑурÑів, доÑтупних даному кориÑтувачу, Ñ– ÑпиÑок вÑÑ–Ñ… його завдань, зареєÑтрованих на разі в ÑиÑтемі."; $str_sta = "Стан завданнÑ: ÑÑ‚Ð°Ð´Ñ–Ñ Ð¿Ñ€Ð¾Ð³Ñ€ÐµÑу в ГМ або Ñтан в ЛСКР. ПоÑлідовніÑть можливих Ñтанів така:
    ACCEPTED – Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ Ð¿Ñ€Ð¸Ð¹Ð½Ñто, але Ð²Ð¸ÐºÐ¾Ð½Ð°Ð½Ð½Ñ Ñ‰Ðµ не почалоÑÑŒ
    PREPARING – підвантажуютьÑÑ Ð½ÐµÐ¾Ð±Ñ…Ñ–Ð´Ð½Ñ– вхідні дані
    SUBMITTING – Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ Ð½Ð°Ð¿Ñ€Ð°Ð²Ð»ÑєтьÑÑ Ð´Ð¾ ЛСКР
    INLRMS – ÑƒÐ¿Ñ€Ð°Ð²Ð»Ñ–Ð½Ð½Ñ Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñм передано в ЛСКР; інформаційна ÑиÑтема виÑвітлює відомоÑті про внутрішній Ñтан завданнÑ. Можливі наÑтупні Ñтани:
    : Q – Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ Ð¾Ñ‡Ñ–ÐºÑƒÑ” у черзі
    : U – Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ Ð¿Ñ€Ð¸Ð·ÑƒÐ¿Ð¸Ð½ÐµÐ½Ð¾ на перевантаженому вузлі (PBSPro)
    : S – Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ Ð¿Ñ€Ð¸Ð·ÑƒÐ¿Ð¸Ð½ÐµÐ½Ð¾ (Condor)
    : R, run – Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ Ð²Ð¸ÐºÐ¾Ð½ÑƒÑ”Ñ‚ÑŒÑÑ
    : E – Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ Ð·Ð°Ð²ÐµÑ€ÑˆÑƒÑ”Ñ‚ÑŒÑÑ (PBS)
    FINISHING – вихідні дані переÑилаютÑÑ Ð·Ð° призначеннÑм
    FINISHED – Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ðµ; інформаційна ÑиÑтема додає мітку чаÑу завершеннÑ
    CANCELING – Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ Ð²Ñ–Ð´Ð¼Ñ–Ð½ÑєтьÑÑ
    DELETED – результати Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ Ð½Ðµ були вивантажені його хазÑїном Ñ– були знищені Ñервером піÑÐ»Ñ Ñ‚Ð¾Ð³Ð¾ Ñк Ñплив Ñ‡Ð°Ñ Ð·Ð±ÐµÑ€Ñ–Ð³Ð°Ð½Ð½Ñ (зазвичай 24 години).
    До кожного Ñтану може бути додана приÑтавка \"PENDING:\", що означає, що ГМ не може в даний момент перейти до наÑтупного етапу Ð²Ð¸ÐºÐ¾Ð½Ð°Ð½Ð½Ñ Ñ‡ÐµÑ€ÐµÐ· відповідні внутрішні обмеженнÑ."; $str_tim = "ПроцеÑорний чаÑ, витрачений завданнÑм, у хвилинах."; $str_mem = "Об'єм оперативної пам'Ñті, що викориÑтовує Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ Ð½Ð° разі, в кілобайтах"; $str_cpu = "ЧиÑло процеÑорів, що займає завданнÑ."; // Actual messages $message = array ( // Table headers and help "loadmon" => array( "0" => "Грід-монітор", "help" => "
    У цьому вікні приведена Ñ‚Ð°Ð±Ð»Ð¸Ñ†Ñ Ð²ÑÑ–Ñ… обчиÑлювальних реÑурÑів, що реєÑтруютьÑÑ Ñƒ ÑпиÑки вищого Ñ€Ñ–Ð²Ð½Ñ ARC. Ð¢Ð°Ð±Ð»Ð¸Ñ†Ñ Ð²Ð¿Ð¾Ñ€Ñдкована по англійÑькій назві країни, Ñ– в кожній країні – за іменем керуючої машини. Ð”Ð»Ñ ÐºÐ¾Ð¶Ð½Ð¾Ð³Ð¾ реÑурÑа виведені наÑтупні параметри: назва, загальна кількіÑть процеÑорів, чиÑло зайнÑтих процеÑорів, а також кількіÑть завдань у черзі, Ñк заÑланих через Грід, так Ñ– міÑцевих. ВикориÑтовуйте утиліту \"Пошук\" Ð´Ð»Ñ Ð¾Ð³Ð»Ñду та порівнÑÐ½Ð½Ñ Ñ–Ð½ÑˆÐ¸Ñ… параметрів клаÑтерів, черг, завдань Ñ– Ñ‚.д.
    Країна
    ".$clickable.". Прапор та назва країни, Ñк Ñлідує із доÑтупного опиÑу реÑурÑу. За кліком виводитьÑÑ Ð·Ð²ÐµÐ´ÐµÐ½Ð° Ñ‚Ð°Ð±Ð»Ð¸Ñ†Ñ Ñ‚Ñ–Ð»ÑŒÐºÐ¸ Ð´Ð»Ñ Ñ†Ñ–Ñ”Ñ— країни.
    РеÑурÑ
    ".$clickable.". Ðазва реÑурÑа (зазвичай клаÑтера), приÑвоєна влаÑником. Довжина Ñ€Ñдка не повинна перевищувати 22 Ñимвола. За кліком виводитьÑÑ Ð¿Ð¾Ð²Ð½Ð¸Ð¹ Ð¾Ð¿Ð¸Ñ Ñ€ÐµÑурÑу (клаÑтера).
    ЦП
    Загальна кількіÑть процеÑорів (Ñдер) у клаÑтері. Увага! Тільки чаÑтина з них може бути доÑтупна кориÑтувачам грід.
    ЗавантаженіÑть (процеÑори)
    ".$clickable.". ВідноÑна завантаженіÑть клаÑтера, виходÑчи із чиÑла зайнÑтих процеÑорів. Сіра Ñмуга відповідає кількоÑті процеÑорів, зайнÑтих під міÑцеві завданнÑ, а червона Ñмуга вказує кількіÑть процеÑорів, що виконують грід-завданнÑ. За кліком виводитьÑÑ ÑпиÑок вÑÑ–Ñ… активних грід-завдань на клаÑтері, включаючи інформацію про чиÑло процеÑорів на кожне завданнÑ.
    Очікують
    ".$clickable.". ЧиÑло вÑÑ–Ñ… завдань, що ÑтоÑть у черзі на даному клаÑтері, предÑтавлене у виглÑді Ñуми грід- Ñ– локальних завдань. За кліком на першій цифрі виводитьÑÑ ÑпиÑок вÑÑ–Ñ… завдань у черзі, заÑланих через грід.
    ", "Країна" => 30, "РеÑурÑ" => 160, "ЦП" => 10, "ЗавантаженіÑть (процеÑори)" => 210, "Очікують" => 10 ), "clusdes" => array( "0" => "ÐžÐ¿Ð¸Ñ Ñ€ÐµÑурÑу", "help" => "
    Ðтрибут
    ".$clickable.". Ðазви атрибутів клаÑтера".$str_att."
    ЗначеннÑ
    ".$str_val."
    Черга
    ".$clickable.". Ðазви черг (приÑвоєні влаÑниками), що Ñ” доÑтупними Ð´Ð»Ñ Ð³Ñ€Ñ–Ð´-кориÑтувачів. ".$str_que."
    Стан
    Стан черги. Ðктивна черга зазвичай видає Ñтан active.
    ТриваліÑть (хв)
    Межі по чаÑу на триваліÑть Ð¿ÐµÑ€ÐµÐ±ÑƒÐ²Ð°Ð½Ð½Ñ Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ Ð² черзі, Ñкщо такі вÑтановлені, у хвилинах процеÑорного чаÑу. Перше Ð·Ð½Ð°Ñ‡ÐµÐ½Ð½Ñ Ð²Ñ–Ð´Ð¿Ð¾Ð²Ñ–Ð´Ð°Ñ” нижній межі, друге – верхній. Якщо межі не вÑтановлені (тобто черга приймає Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ Ð±ÑƒÐ´ÑŒ-Ñкої тривалоÑті), виводитьÑÑ Ð¼Ñ–Ñ‚ÐºÐ° N/A.
    РахуютьÑÑ
    ЧиÑло завдань, що обраховуютьÑÑ Ð² черзі. Показано загальне чиÑло завдань, причому чиÑло процеÑорів, зайнÑтих під грід-завданнÑ, вказано в дужках, наприклад: (Грід: 12). Увага! За наÑвноÑті паралельних багатопроцеÑорних завдань, чиÑло в дужках может перевищувати загальне чиÑло завдань.
    Очікують
    ЧиÑло завдань, що чекують на Ð²Ð¸ÐºÐ¾Ð½Ð°Ð½Ð½Ñ Ð² черзі. Показано загальне чиÑло завдань, причому кількіÑть завдань, заÑланих через Грід, вказано в дужках, наприклад: (Грід: 235).
    ", "Черга" => 0, "Mapping Queue" => 0, "Стан" => 0, "ТриваліÑть (хв)" => 0, "ЦП" => 0, "РахуютьÑÑ" => 0, "Очікують" => 0 ), "jobstat" => array( "0" => "Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ Ð½Ð°:Ярлик завданнÑ", "help" => "
    СПИСОК завдань:
    Ім'Ñ Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ
    ".$clickable.". Ім'Ñ Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ, приÑвоєне хазÑїном. N/A означає, що хазÑїн не приÑвоїв ниÑкого імені. За кліком виводитьÑÑ Ð´ÐµÑ‚Ð°Ð»ÑŒÐ½Ð¸Ð¹ Ð¾Ð¿Ð¸Ñ Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ.
    ХазÑїн
    ".$clickable.". ".$str_nam."
    Стан
    ".$str_sta."
    Ð§Ð°Ñ (хв)
    ".$str_tim."
    Черга
    ".$clickable.". Ðазва черги ЛСКР, у котрій проходить Ð²Ð¸ÐºÐ¾Ð½Ð°Ð½Ð½Ñ Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ.".$str_que."
    ЦП
    ".$str_cpu."
    ОПИС завданнÑ:
    Ðтрибут
    ".$clickable.". Ðазви атрибутів завданнÑ.".$str_att."
    ЗначеннÑ
    ".$str_val."
    ", "Ім'Ñ Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ" => 0, "ХазÑїн" => 0, "Стан" => 0, "Ð§Ð°Ñ (хв)" => 0, "Черга" => 0, "ЦП" => 0 ), "volist" => array( "0" => "Віртуальні організації", "help" => "
    Віртуальні організації
    ".$clickable.". Група кориÑтувачів – зазвичай об'єднаних Ñпільною ціллю чи реÑурÑами, – допущена до работи хоча б на одному із реÑурÑів ARC. За кліком виводитьÑÑ ÑпиÑок членів групи.
    Члени
    КількіÑть членів групи.
    ОбÑлуговуєтьÑÑ
    ÐдреÑа Ñервера, що підтримує базу даних членів групи.
    ", "Віртуальна організаціÑ" => 0, "Члени" => 0, "ОбÑлуговуєтьÑÑ" => 0 ), "vousers" => array( "0" => "КориÑтувачі", "help" => "
    Ім'Ñ
    ".$clickable.". ".$str_nam."
    МіÑце роботи
    МіÑце роботи кориÑтувача, у відповідноÑті до запиÑу у базі даних. Ðеобов'Ñзково.
    Електронна пошта
    ".$clickable.". ÐдреÑа електронної пошти кориÑтувача, у відповідноÑті до запиÑу у базі даних. Ðеобов'Ñзково. За кліком ÑтворюєтьÑÑ Ð»Ð¸ÑÑ‚ Ð´Ð»Ñ ÐºÐ¾Ñ€Ð¸Ñтувача.
    ", "â„–" => 0, "Ім'Ñ" => 0, "МіÑце роботи" => 0, "Електронна пошта" => 0 ), "userlist" => array( "0" => "Ð†Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ñ–Ñ Ð¿Ñ€Ð¾", "help" => "
    РеÑурÑ:черга
    ".$clickable.". Ðазви реÑурÑів (клаÑтерів) та відповідних черг ЛСКР (разділені двокрапкою), доÑтупних даному кориÑтувачу. Якщо доÑтуп закритий, виводитьÑÑ Ð¿Ð¾Ð²Ñ–Ð´Ð¾Ð¼Ð»ÐµÐ½Ð½Ñ "Ðемає доÑтупу до реÑурÑу". За кліком на назві клаÑтера виводитьÑÑ Ð¿Ð¾Ð²Ð½Ð¸Ð¹ Ð¾Ð¿Ð¸Ñ Ñ€ÐµÑурÑа (клаÑтера). За кліком на назві черги виводитьÑÑ Ð¿Ð¾Ð²Ð½Ð¸Ð¹ Ð¾Ð¿Ð¸Ñ Ñ‡ÐµÑ€Ð³Ð¸.
    вільних процеÑорів.
    ЧиÑло вільних центральних процеÑорів, доÑтупних у даній черзі Ð´Ð»Ñ Ð´Ð°Ð½Ð¾Ð³Ð¾ кориÑтувача на даний момент чаÑу. Якщо черга має Ð¾Ð±Ð¼ÐµÐ¶ÐµÐ½Ð½Ñ Ð·Ð° чаÑом на Ð²Ð¸ÐºÐ¾Ð½Ð°Ð½Ð½Ñ Ð·Ð°Ð²Ð´Ð°Ð½ÑŒ, Ñ†Ñ Ð¼ÐµÐ¶Ð° вказана піÑÐ»Ñ Ñ‡Ð¸Ñла процеÑорів (у хвилинах, розділÑєтÑÑ Ð´Ð²Ð¾ÐºÑ€Ð°Ð¿ÐºÐ¾ÑŽ). Ðаприклад, "3" означає, що 3 процеÑÑора доÑтупно Ð´Ð»Ñ Ð·Ð°Ð²Ð´Ð°Ð½ÑŒ будь-Ñкої тривалоÑті; "4:360" означає, що 4 процеÑора доÑтупно Ð´Ð»Ñ Ð·Ð°Ð²Ð´Ð°Ð½ÑŒ, Ñ‡Ð°Ñ Ð²Ð¸ÐºÐ¾Ð½Ð°Ð½Ð½Ñ Ñких не перевищує 6 годин; "10:180 30" означає, що 10 процеÑорів доÑтупно Ð´Ð»Ñ Ð·Ð°Ð²Ð´Ð°Ð½ÑŒ, Ñ‡Ð°Ñ Ð²Ð¸ÐºÐ¾Ð½Ð°Ð½Ð½Ñ Ñких не перевищує 3 годин, Ñ– 30 процеÑорів доÑтупно Ð´Ð»Ñ Ð·Ð°Ð²Ð´Ð°Ð½ÑŒ будь-Ñкої тривалоÑті; "0" означає, що вільних реÑурÑів немає, Ñ– Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ Ð±ÑƒÐ´ÑƒÑ‚ÑŒ направлені на Ð¾Ñ‡Ñ–ÐºÑƒÐ²Ð°Ð½Ð½Ñ Ð² черзі.
    завдань у черзі
    КількіÑть завдань кориÑтувача, що знаходÑтьÑÑ Ñƒ ÑпиÑку Ð¾Ñ‡Ñ–ÐºÑƒÐ²Ð°Ð½Ð½Ñ Ð¿ÐµÑ€ÐµÐ´ новим завданнÑм, заÑланим від імені даного кориÑтувача. ЧиÑло "0" означає, що Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ Ð¼Ð¾Ð¶Ð»Ð¸Ð²Ð¾ буде запущене на обрахунок негайно. Увага! Це лише приблизні значеннÑ, Ñкі можуть бути змінені локальними операторами.
    доÑтупний проÑтір на Ñховищі (Мб)
    ПроÑтір на локальному жорÑткому диÑку, доÑтупне даному кориÑтувачу у даній черзі (в мегабайтах). Увага! Це лише приблизні значеннÑ, оÑкільки більшіÑть клаÑтерів не підтримують диÑкові квоти.
    Ім'Ñ Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ
    ".$clickable.". ".$str_job."
    Стан
    ".$str_sta."
    Ð§Ð°Ñ (хв)
    ".$str_tim."
    РеÑурÑ
    ".$clickable.". Ім'Ñ Ñ€ÐµÑурÑу (зазвичай клаÑтера), на котрому проходить виконаннÑ. завданнÑ. За кліком виводитьÑÑ Ð¿Ð¾Ð²Ð½Ð¸Ð¹ Ð¾Ð¿Ð¸Ñ Ñ€ÐµÑурÑу (клаÑтера).
    Черга
    ".$clickable.". Ðазва черги ЛСКР, у Ñкій проходить Ð²Ð¸ÐºÐ¾Ð½Ð°Ð½Ð½Ñ Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ. ".$str_que."
    ЦП
    ".$str_cpu."
    ", "" => 0, "Ім'Ñ Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ" => 0, "Стан" => 0, "Ð§Ð°Ñ (хв)" => 0, "РеÑурÑ" => 0, "Черга" => 0, "ЦП" => 0 ), "attlist" => array( "0" => "Ð—Ð½Ð°Ñ‡ÐµÐ½Ð½Ñ Ð°Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ñ–Ð²", "help" => "
    Об'єкт
    ".$clickable." Ðазва об'єкта, атрибути Ñкого перераховані у Ñ€Ñдку. Це може бути ім'Ñ ÐºÐ»Ð°Ñтера, черги, завданнÑ, кориÑтувача Ñ– Ñ‚.д.. За кліком виводитьÑÑ Ð·Ð¼Ñ–Ñтовний Ð¾Ð¿Ð¸Ñ Ð¾Ð±'єкту.
    Ðтрибут
    Ð”Ð»Ñ ÐºÐ¾Ð¶Ð½Ð¾Ð³Ð¾ об'єкта в таблиці приведені Ð·Ð½Ð°Ñ‡ÐµÐ½Ð½Ñ Ð¾Ð´Ð½Ð¾Ð³Ð¾ чи декількох його атрибутів. У заголовку ÑÑ‚Ð¾Ð²Ð¿Ñ†Ñ Ð²ÐºÐ°Ð·Ð°Ð½Ð¾ назва атрибута, інтерпретована Ð´Ð»Ñ Ð¿Ñ€Ð¾Ñтоти Ñ‡Ð¸Ñ‚Ð°Ð½Ð½Ñ (за виключеннÑм декількох атрибутів, Ñпецифічних Ð´Ð»Ñ ÑиÑтеми MDS), а вміÑтом кожного ÑÑ‚Ð¾Ð²Ð¿Ñ†Ñ ÑвлÑютьÑÑ Ð·Ð½Ð°Ñ‡ÐµÐ½Ð½Ñ Ð²Ñ–Ð´Ð¿Ð¾Ð²Ñ–Ð´Ð½Ð¸Ñ… атрибутів, що запиÑані в інформаційній ÑиÑтемі.
    ", "Об'єкт" => 0, "Ðтрибут" => 0 ), "quelist" => array( "0" => "Черга", "help" => "
    Ðтрибут
    ".$clickable.". Ðазва атрибутів черги".$str_att."
    ЗначеннÑ
    ".$str_val."
    Ім'Ñ Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ
    ".$clickable.". ".$str_job."
    ХазÑїн
    ".$clickable.". ".$str_nam."
    Стан
    ".$str_sta."
    Ð§Ð°Ñ (хв)
    ".$str_tim."
    ОЗУ (Кб)
    ".$str_mem."
    ЦП
    ".$str_cpu."
    ", "" => 0, "І'Ð¼Ñ Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ" => 0, "ХазÑїн" => 0, "Стан" => 0, "Ð§Ð°Ñ (хв)" => 0, "ОЗУ (Кб)" => 0, "ЦП" => 0 ), "sestat" => array( "0" => "Зберігальні приÑтрої", "help" => "
    Ðазва
    Ðазва зберігального приÑтрою, зареєÑтрована в інформаційній ÑиÑтемі. МакÑимально допуÑтима довжина: 15 Ñимволів.
    ВеÑÑŒ об'єм
    Повний об'єм диÑку, Гб.
    Вільно
    ДоÑтупний проÑтір на диÑку на разі, Гб.
    Ім'Ñ
    Ім'Ñ Ð·Ð±ÐµÑ€Ñ–Ð³Ð°Ð»ÑŒÐ½Ð¾Ð³Ð¾ приÑтрою, Ñкладене із логічного імені та імені Ñервера (розділених двокрапкою). Логічне ім'Ñ Ð²Ð¸ÐºÐ¾Ñ€Ð¸ÑтовуєтьÑÑ Ñ‚Ñ–Ð»ÑŒÐºÐ¸ інформаційною ÑиÑтемою Ð´Ð»Ñ Ñ€Ð¾Ð·Ñ€Ñ–Ð·Ð½ÐµÐ½Ð½Ñ Ð·Ð±ÐµÑ€Ñ–Ð³Ð°Ð»ÑŒÐ½Ð¸Ñ… приÑтроїв на одному й тому ж Ñервері.
    URL бази
    URL зберігального приÑтрою, зазвичай протоколу gsiftp://. ВикориÑтовуйте цю адреÑу Ñк базову Ð´Ð»Ñ Ð´Ð¾Ñтупу до файлів.
    Тип
    Тип зберігального приÑтрою. Тип "gridftp-based" означає що це диÑковий накопичувач з інтерфейÑом GridFTP.
    ", "â„–" => 0, "Ðазва" => 0, // "ВеÑÑŒ об'єм" => 0, "Вільний/веÑÑŒ об'єм, Гб" => 0, "Ім'Ñ" => 0, "URL бази" => 0, "Тип" => 0 ), "allusers" => array( "0" => "Допущені кориÑтувачі:Ðктивні кориÑтувачі", "help" => "
    Ім'Ñ
    ".$clickable.". ".$str_nam."
    МіÑце роботи
    МіÑце роботи кориÑтувача, у відповідноÑті із запиÑом у його Ñертифікаті.
    ЗавданнÑ
    ЧиÑло вÑÑ–Ñ… завдань кориÑтувача, що знаходÑтьÑÑ Ð² ÑиÑтемі (на обрахунку, в черзі та завершених).
    РеÑурÑи
    ЧиÑло клаÑтерів, до Ñких даний кориÑтувач має допуÑк.
    ", "â„–" => 0, "Ім'Ñ" => 0, "МіÑце роботи" => 0, "ЗавданнÑ" => 0, "РеÑурÑи" => 0 ), "userres" => array( "0" => "", "РеÑурÑ:черга" => 0, "вільних процеÑорів" => 0, "завдань у черзі" => 0, "вільний проÑтір на Ñховищі (Мб)" => 0 ), "ldapdump" => array( "0" => "", "Ðтрибут" => 0, "ЗначеннÑ" => 0 ), "mdsattr" => array( "objectClass" => "objectClass", "Mds-validfrom" => "ВідомоÑті дійÑні з (GMT)", "Mds-validto" => "ВідомоÑті дійÑні до (GMT)" ), "isattr" => array( "nordugrid-cluster-name" => "Ім'Ñ ÐºÐµÑ€ÑƒÑŽÑ‡Ð¾Ñ— машини", "nordugrid-cluster-aliasname" => "Ðазва", "nordugrid-cluster-contactstring" => "Точка входу", "nordugrid-cluster-interactive-contactstring" => "Інтерактивна точка входу", "nordugrid-cluster-comment" => "Коментар", "nordugrid-cluster-support" => "Технічна підтримка", "nordugrid-cluster-acl" => "Допущені ВО", "nordugrid-cluster-lrms-type" => "ЛСКР, тип", "nordugrid-cluster-lrms-version" => "ЛСКР, верÑÑ–Ñ", "nordugrid-cluster-lrms-config" => "ЛСКР, подробиці", "nordugrid-cluster-architecture" => "Ðрхітектура", "nordugrid-cluster-opsys" => "Операційна ÑиÑтема", "nordugrid-cluster-homogeneity" => "ОднорідніÑть реÑурÑу", "nordugrid-cluster-nodecpu" => "ПроцеÑор, тип (найгірший)", "nordugrid-cluster-nodememory" => "ОЗУ (Мб, найменьше)", "nordugrid-cluster-totalcpus" => "ПроцеÑори, уÑього", "nordugrid-cluster-cpudistribution" => "ПроцеÑори: вузли", "nordugrid-cluster-benchmark" => "Еталонний теÑÑ‚", "nordugrid-cluster-sessiondir-free" => "Сховище, доÑтупно (Мб)", "nordugrid-cluster-sessiondir-total" => "Сховище, веÑÑŒ об'єм (Мб)", "nordugrid-cluster-sessiondir-lifetime"=> "Ð§Ð°Ñ Ð¶Ð¸Ñ‚Ñ‚Ñ Ð³Ñ€Ñ–Ð´-ÑеанÑу (хв)", "nordugrid-cluster-cache-free" => "ДиÑковий кеш, вільно (Мб)", "nordugrid-cluster-cache-total" => "ДиÑковий кеш, уÑього (Мб)", "nordugrid-cluster-runtimeenvironment" => "Робоче Ñередовище", "nordugrid-cluster-localse" => "Локальний накопичувач", "nordugrid-cluster-middleware" => "Грід-ПЗ", "nordugrid-cluster-totaljobs" => "завдань, вÑього", "nordugrid-cluster-usedcpus" => "ПроцеÑори, зайнÑті", "nordugrid-cluster-queuedjobs" => "завдань у черзі (ЗÐСТÐРІЛИЙ)", "nordugrid-cluster-prelrmsqueued" => "Грід-завдань, що очікують на заÑилку", "nordugrid-cluster-location" => "Поштовий індекÑ", "nordugrid-cluster-owner" => "ВлаÑник", "nordugrid-cluster-issuerca" => "Центр Ñертифікації", "nordugrid-cluster-issuerca-hash" => "Хеш-код центра Ñертификації", "nordugrid-cluster-trustedca" => "Довірені центри Ñертификації", "nordugrid-cluster-nodeaccess" => "IP-з'Ñ”Ð´Ð½Ð°Ð½Ð½Ñ Ð²ÑƒÐ·Ð»Ñ–Ð²", "nordugrid-cluster-gridarea" => "ÐдреÑа ÑеанÑів (ЗÐСТÐРІЛИЙ)", "nordugrid-cluster-gridspace" => "Грід-диÑк (ЗÐСТÐРІЛИЙ)", "nordugrid-cluster-opsysdistribution" => "ДиÑтрибутив ОС (ЗÐСТÐРІЛИЙ)", "nordugrid-cluster-runningjobs" => "завдань в обрахунку (ЗÐСТÐРІЛИЙ)", "nordugrid-cluster-credentialexpirationtime" => "Термін дії Ñертифікату", "nordugrid-queue-name" => "І'Ð¼Ñ Ñ‡ÐµÑ€Ð³Ð¸", "nordugrid-queue-comment" => "Коментар", "nordugrid-queue-status" => "Стан черги", "nordugrid-queue-running" => "завдань в обрахунку", "nordugrid-queue-localqueued" => "Локальні Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ Ñƒ черзі", "nordugrid-queue-prelrmsqueued" => "Грід-завданнÑ, що очікують на заÑилку", "nordugrid-queue-queued" => "завдань в черзі (ЗÐСТÐРІЛИЙ)", "nordugrid-queue-maxrunning" => "завдань в обрахунку (межа)", "nordugrid-queue-maxqueuable" => "завдань в черзі (межа)", "nordugrid-queue-maxuserrun" => "завдань на кориÑтувача (межа)", "nordugrid-queue-maxcputime" => "ТриваліÑть, найбільша (хв)", "nordugrid-queue-mincputime" => "ТриваліÑть, найменьша (ха)", "nordugrid-queue-defaultcputime" => "ТриваліÑть, за замовчуваннÑм (хв)", "nordugrid-queue-schedulingpolicy" => "Правила плануваннÑ", "nordugrid-queue-totalcpus" => "ПроцеÑори, вÑього", "nordugrid-queue-nodecpu" => "ПроцеÑор, тип", "nordugrid-queue-nodememory" => "ОЗУ (Мб)", "nordugrid-queue-architecture" => "Ðрхітектура", "nordugrid-queue-opsys" => "Операційна ÑиÑтема", "nordugrid-queue-homogeneity" => "ОднорідніÑть черги", "nordugrid-queue-gridrunning" => "Грід-завдань в обрахунку", "nordugrid-queue-gridqueued" => "Грід-завдань в черзі", "nordugrid-queue-benchmark" => "Еталонний теÑÑ‚", "nordugrid-queue-assignedcpunumber" => "ПроцеÑори (ЗÐСТÐРІЛИЙ)", "nordugrid-queue-assignedcputype" => "Тип процеÑора (ЗÐСТÐРІЛИЙ)", "nordugrid-job-globalid" => "Ярлик", "nordugrid-job-globalowner" => "ХазÑїн", "nordugrid-job-execcluster" => "Виконуючий клаÑтер", "nordugrid-job-execqueue" => "Виконуюча черга", "nordugrid-job-stdout" => "Стандартний потік виведеннÑ", "nordugrid-job-stderr" => "Стандартний потік видачі помилок", "nordugrid-job-stdin" => "Стандартний потік введеннÑ", "nordugrid-job-reqcputime" => "Запитаний процеÑÑорний чаÑ", "nordugrid-job-reqwalltime" => "Запитаний реальний чаÑ", "nordugrid-job-status" => "Стан", "nordugrid-job-queuerank" => "ÐŸÐ¾Ð»Ð¾Ð¶ÐµÐ½Ð½Ñ Ð² черзі", "nordugrid-job-comment" => "Коментар ЛСКР", "nordugrid-job-submissionui" => "ЗаÑилаючий клієнт", "nordugrid-job-submissiontime" => "Ð§Ð°Ñ Ð·Ð°Ñилки (GMT)", "nordugrid-job-usedcputime" => "ВикориÑтаний процеÑорний чаÑ", "nordugrid-job-usedwalltime" => "ВикориÑтаний реальний чаÑ", "nordugrid-job-completiontime" => "Ð§Ð°Ñ Ð·Ð°Ð²ÐµÑ€ÑˆÐµÐ½Ð½Ñ (GMT)", "nordugrid-job-sessiondirerasetime" => "Срок Ð·Ð½Ð¸Ñ‰ÐµÐ½Ð½Ñ (GMT)", "nordugrid-job-proxyexpirationtime" => "Ð—Ð°ÐºÑ–Ð½Ñ‡ÐµÐ½Ð½Ñ Ð´Ð¾Ð²Ñ–Ñ€ÐµÐ½Ð½Ñ (GMT)", "nordugrid-job-usedmem" => "ВикориÑÑ‚Ð°Ð½Ð½Ñ ÐžÐ—Ð£ (Кб)", "nordugrid-job-errors" => "Помилки", "nordugrid-job-exitcode" => "Код поверненнÑ", "nordugrid-job-jobname" => "Ім'Ñ", "nordugrid-job-runtimeenvironment" => "Рабоче Ñередовище", "nordugrid-job-cpucount" => "Запитано процеÑорів", "nordugrid-job-executionnodes" => "Виконуючі вузли", "nordugrid-job-gmlog" => "Журнальний Ð·Ð°Ð¿Ð¸Ñ Ð“Ðœ", "nordugrid-job-clientsoftware" => "ВерÑÑ–Ñ ÐºÐ»Ñ–Ñ”Ð½Ñ‚Ð°", "nordugrid-job-rerunable" => "МожливіÑть перезапуÑку", "nordugrid-job-reqcput" => "Запитаний Ñ‡Ð°Ñ (ЗÐСТÐРІЛИЙ)", "nordugrid-job-gridlog" => "Грід-Ð·Ð°Ð¿Ð¸Ñ (ЗÐСТÐРІЛИЙ)", "nordugrid-job-lrmscomment" => "Коментар ЛСКР (ЗÐСТÐРІЛИЙ)", "nordugrid-authuser-name" => "Ім'Ñ", "nordugrid-authuser-sn" => "Суб'єкт", "nordugrid-authuser-freecpus" => "Вільні ЦП", "nordugrid-authuser-diskspace" => "ДиÑк, доÑтупно (Мб)", "nordugrid-authuser-queuelength" => "завдань кориÑтувача в черзі", "nordugrid-se-name" => "Умовне ім'Ñ", "nordugrid-se-aliasname" => "Ðазва", "nordugrid-se-type" => "Тип", "nordugrid-se-acl" => "Допущені ВО", "nordugrid-se-freespace" => "Вільний об'єм (Мб)", "nordugrid-se-totalspace" => "ВеÑÑŒ об'єм (Мб)", "nordugrid-se-url" => "ÐдреÑа доÑтупу", "nordugrid-se-baseurl" => "ÐдреÑа доÑтупу (ЗÐСТÐРІЛИЙ)", "nordugrid-se-accesscontrol" => "Контроль доÑтупу", "nordugrid-se-authuser" => "Допущені кориÑтувачі (DN)", "nordugrid-se-location" => "Поштовий індекÑ", "nordugrid-se-owner" => "ВлаÑник", "nordugrid-se-middleware" => "Грід-ПЗ", "nordugrid-se-issuerca" => "Центр Ñертифікації", "nordugrid-se-issuerca-hash" => "Хеш-код центра Ñертификації", "nordugrid-se-trustedca" => "Довірені центи Ñертификації", "nordugrid-se-comment" => "Коментар", "nordugrid-rc-name" => "Доменне ім'Ñ", "nordugrid-rc-aliasname" => "Ðазва", "nordugrid-rc-baseurl" => "Контактна адреÑа", "nordugrid-rc-authuser" => "Допущені кориÑтувачі (DN)", "nordugrid-rc-location" => "Поштовий індекÑ", "nordugrid-rc-owner" => "ВлаÑник", "nordugrid-rc-issuerca" => "Сертифікат виданий" ), "errors" => array( "1" => "Ðеможливо опитати каталоги вищого рівнÑ", "2" => "Жоден із міÑцевих каталогів не відзиваєтьÑÑ", "3" => " невірна ÐºÐ¾Ð½Ñ„Ñ–Ð³ÑƒÑ€Ð°Ñ†Ñ–Ñ Ð°Ð±Ð¾ Ñплив Ñ‡Ð°Ñ Ð·Ð°Ð¿Ð¸Ñ‚Ñƒ", "4" => "Ðемає грід-завдань", "5" => "Ðемає інформації", "6" => "Служба недоÑтупна", "7" => " - Ñпробуйте поновити пізніше", "8" => "Ðемає інформації про чергу", "9" => "Ðемає даних", "10" => "Ðемає кориÑтувачів", "11" => "Ðемає доÑтупу до реÑурÑу", "12" => "не відзиваєтÑÑ", "13" => "Ðа разі немає завдань кориÑтувача ", "101" => " Ð§Ð°Ñ Ð½Ð° зв'Ñзок із локальним каталогом: ", "102" => " Ñ Ð½Ð° з'Ñ”Ð´Ð½Ð°Ð½Ð½Ñ Ñ‚Ð° ", "103" => " Ñ Ð½Ð° пошук", "104" => " Ñ Ð·Ð°Ñ‚Ñ€Ð°Ñ‡ÐµÐ½Ð¾ на пошук", "105" => "ÐŸÐµÑ€ÐµÑ€Ð°Ñ…ÑƒÐ²Ð°Ð½Ð½Ñ Ñ€ÐµÑурÑів: ", "106" => "Опитано каталогів верхнього рівнÑ: ", "107" => "Отримані географічні координати, проÑкановано реÑурÑів: ", "108" => " реÑурÑів впорÑдковано за геополітичною ознакою", "109" => "Пошук атрибутів клаÑтера", "110" => "Пошук атрибутів черги", "111" => "Ðемає даних з ", "112" => " функціонує в країні: ", "113" => " не має реÑурÑів", "114" => " Ð§Ð°Ñ Ð½Ð° з'вÑзок із глобальним каталогом: ", "115" => "ІгноруєтьÑÑ Ñ€ÐµÑурÑ: ", "116" => "не відповідає типу ", "117" => "Перевірка зв'Ñзку: ", "118" => "так!", "119" => "Ðа разі виÑвлено реÑурÑів типу ", "120" => "Помилка LDAP при пошуку на ", "121" => "-Ñтан на ", "122" => "Заблокований: ", "123" => "ВиÑвлено реєÑтранта ", "124" => "Пошук атрибутів накопичувачів", "125" => "Пошук кориÑтувачів", "126" => "Пошук завдань", "127" => " запуÑтив(ла) Ð·Ð°Ð²Ð´Ð°Ð½Ð½Ñ ", "128" => " не будучи допущеним(ою)", "129" => "Ðемає інформації про об'єкт: помилка ", "130" => " Ð§Ð°Ñ Ð½Ð° з'вÑзок із глобальним каталогом: ", "301" => "Перезавантажити", "302" => "Друк", "303" => "Допомога", "304" => "Закрити", "305" => "Червоний", "306" => "Сірий", "307" => "Ð’ÑÑ– кориÑтувачі", "308" => "Ðктивні кориÑтувачі", "309" => "Пошук", "310" => "Ðакопичувачі", "311" => "Віртуальні организації", "312" => "Прапор країни: ", "313" => " процеÑорів під грід та ", "314" => " процеÑорів під міÑцеві", "401" => "ПроцеÑи", "402" => "Грід", "403" => "міÑцеві", "404" => "Світ", "405" => "ЗÐГÐЛОМ", "406" => " об'єктів", "407" => "купа", "408" => " Гб", "409" => " ВСІ", "410" => "КлаÑтер", "411" => "Черга", "412" => "завданнÑ", "413" => "КориÑтувач", "414" => "Ðакопичувач", "415" => "Каталог реплік", "416" => "Задайте атрибути Ð´Ð»Ñ Ð¾Ð³Ð»Ñду; вибраний об'єкт: ", "417" => "Пошук проводитьÑÑ Ð´Ð»Ñ Ð»Ð¾Ð³Ñ–Ñ‡Ð½Ð¾Ð³Ð¾ І вÑÑ–Ñ… виразів", "418" => "Ðе заповнюйте праве поле, Ñкщо фільтр непотрібен", "419" => "ОглÑд реÑурÑів чи об'єктів за вибором", "420" => "Виділене ім'Ñ", "421" => "Може викориÑтовувати ", "422" => " клаÑтерів", "423" => "РеÑÑƒÑ€Ñ / об'єкт:", "424" => "КількіÑть атрибутів (6 за зам.):", "425" => "Об'єкт", "426" => "Далі", "427" => "Виберіть", "428" => "ОчиÑтити", "429" => "ПОКÐЗÐТИ" ), // Country name conversion, no postcode! "tlconvert" => array ( "Australia" => "ÐвÑтраліÑ", "Austria" => "ÐвÑтріÑ", "Armenia" => "ÐрменіÑ", "Algeria" => "Ðлжир", "Belgium" => "БельгіÑ", "Bulgaria" => "БолгаріÑ", "Canada" => "Канада", "China" => "Китай", "Czechia" => "ЧехіÑ", "Denmark" => "ДаніÑ", "Estonia" => "ЕÑтоніÑ", "Finland" => "ФінлÑндіÑ", "France" => "ФранціÑ", "Georgia" => "ГрузіÑ", "Germany" => "Ðімеччина", "Greece" => "ГреціÑ", "Hungary" => "Угорщина", "Iceland" => "ІÑландіÑ", "Ireland" => "ІрландіÑ", "Italy" => "ІталіÑ", "Japan" => "ЯпоніÑ", "Latvia" => "ЛатвіÑ", "Lithuania" => "Литва", "Morocco" => "Марокко", "Netherlands" => "Ðідерланди", "Norway" => "ÐорвегіÑ", "Poland" => "Польща", "Portugal" => "ПортугаліÑ", "Romania" => "РумуніÑ", "Russia" => "РоÑÑ–Ñ", "SriLanka" => "Шрі-Ланка", "Sweden" => "ШвеціÑ", "Slovakia" => "Словаччина", "Slovenia" => "СловеніÑ", "Switzerland" => "ШвейцаріÑ", "Turkey" => "Туреччина", "UK" => "ВеликобританіÑ", "Ukraine" => "Україна", "USA" => "СШÐ", "World" => "Світ" ) ); ?> nordugrid-arc-6.14.0/src/services/monitor/lang/PaxHeaders.30264/en.inc0000644000000000000000000000013014152153376023461 xustar000000000000000028 mtime=1638455038.4406465 30 atime=1638455038.513647597 30 ctime=1638455100.613580677 nordugrid-arc-6.14.0/src/services/monitor/lang/en.inc0000644000175000002070000014656614152153376023472 0ustar00mockbuildmock00000000000000 N/A indicates that user did not assign any name.
    X indicates that the job has been killed by the owner
    ! indicates that the job failed in the system
    Click on a name to get a detailed description of the job."; $str_nam = "Name of the user, as specified in the personal certificate. Click on a name to get the list of all the resources available for this user and all the jobs by this user which are currently in the system."; $str_sta = "Job status as returned by the Grid Manager (GM) and LRMS. In sequential order, the states are:
    ACCEPTED – job submitted but not yet processed
    PREPARING – input files are being retreived
    SUBMITTING – interaction with LRMS ongoing
    INLRMS – the job is transferred to the LRMS; internal status is added by the infosystem. Possible states are:
    : Q – job is queued
    : U – job is in a suspended state on a busy node (PBSPro)
    : S – job is in a suspended state (Condor)
    : R, run – job is running
    : E – job is finishing (PBS)
    FINISHING – output files are being transferred by the GM
    FINISHED – job is finished; time stamp is added by the infosystem
    CANCELING – job is being cancelled
    DELETED – job not cleaned upon user request but removed by the GM due to expiration time
    Each of the states can be reported with the PENDING: prefix, meaning the GM is attempting to move the job to the next state"; $str_tim = "CPU time used by the job, minutes."; $str_mem = "Memory consumed by the job, KB"; $str_cpu = "Number of processors used by the job."; // Actual messages $message = array ( // Table headers and help (do not localize "0" and "help" keys) // For "help", keywords in
    must correspond to column titles below the help text ) "loadmon" => array( "0" => "Grid Monitor", "help" => "
    This screen displays all the sites registering to the top ARC indexing service, sorted by country then by host name. Selected site parameters are monitored: cluster alias, total CPU capacity and number of running and queued jobs, both Grid and local ones. Use "Search" utility if you want to compare other cluster, queue, job etc. characteristics
    Country
    ".$clickable.". Country flag and name as deduced from available resource descriptions. Click to show only this country info.
    Cluster
    ".$clickable.". Cluster alias as assigned by the owner. Maximal displayed length is 22 characters. Click on the alias to get a detailed cluster description.
    CPUs
    Total number of CPUs in a cluster. NB! Only a fraction of those can be actualy available for the Grid users.
    Load (processes:Grid+local)
    ".$clickable.". Relative cluster load, corresponding to the occupied CPUs count. Grey bars indicate processors occupied by the localy submitted jobs, while red bars show CPUs occupied by jobs submitted via Grid. Click on the bar to get the detailed list of all the running Grid jobs on the cluster, including amount of processors per job.
    Queueing
    ".$clickable.". Number of all queued jobs on the cluster, shown as number of queueing grid jobs plus number of locally submitted queueing jobs. Click the first number to get the list of queued Grid jobs on the cluster.
    ", "Country" => 30, "Site" => 160, "CPUs" => 10, "Load (processes: Grid+local)" => 210, "Queueing" => 10 ), "clusdes" => array("0" => "Resource Details for", "help" => "
    Attribute
    ".$clickable.". Cluster attribute name".$str_att."
    Value
    ".$str_val."
    Queue
    ".$clickable.". Names of batch queues available for the ARC users, as set by cluster owners. ".$str_que."
    Status
    Queue status. Operating queue typically reports active status.
    CPU (min)
    Time limit for job duration per queue, if set, in CPU-minutes. First displayed value is the lower limit, second - the upper one. If limits are not set (jobs of any duration are accepted), N/A tag is shown.
    Running
    Number of jobs running in the queue. Total number of jobs is shown, with number of processors occupied by Grid-submitted jobs displayed in parentheses, e.g. (Grid: 12). NB! For parallel multiprocessor jobs, number in parentheses can be larger than number of jobs.
    Queing
    Number of jobs awaiting execution in the queue. Total number of jobs is shown, with Grid-submitted jobs displayed in parentheses, e.g. (Grid: 235)
    ", "Queue" => 0, "Mapping Queue" => 0, "Status" => 0, "Limits (min)" => 0, "CPUs" => 0, "Running" => 0, "Queueing" => 0 ), "jobstat" => array("0" => "Jobs at:Job ID", "help" => "
    JOB LIST:
    Job name
    ".$clickable.". Name of a job as assigned by the owner. If no name has been assigned, "N/A" is displayed. Click on a name to get a detailed description of the job.
    Owner
    ".$clickable.". ".$str_nam."
    Status
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Queue
    ".$clickable.". Name of the batch queue in which the job is being executed. ".$str_que."
    CPUs
    ".$str_cpu."
    JOB DETAILS:
    Attribute
    ".$clickable.". Job attribute name".$str_att."
    Value
    ".$str_val."
    ", "Job name" => 0, "Owner" => 0, "Status" => 0, "CPU (min)" => 0, "Queue" => 0, "CPUs" => 0 ), "volist" => array("0" => "Virtual Organisations", "help" => "
    Virtual Organisation
    ".$clickable.". Group of users, typically sharing common activities and resources, authorised at at least one ARC-enabled site. Click on the name to get the list of group members.
    Members
    Number of group members.
    Served by
    LDAP server that supports the group membership database.
    ", "Virtual Organisation" => 0, "Members" => 0, "Served by" => 0 ), "vousers" => array("0" => "Grid User Base", "help" => "
    Name
    ".$clickable.". ".$str_nam."
    Affiliation
    Users home institute as entered by a VO manager. Can be empty.
    E-mail
    ".$clickable.". Users e-mail as entered by a VO manager. Can be empty. Click the address to send an e-mail to the user.
    ", "#" => 0, "Name" => 0, "Affiliation" => 0, "E-mail" => 0 ), "userlist" => array("0" => "Information for", "help" => "
    Cluster:queue
    ".$clickable.". Names of clusters and respective queues (separated by a column, ":") where a user is authorized to submit jobs. If a user is not authorized, message "Not authorised at host ..." is displayed. Click a cluster name to get a detailed cluster description. Click on a queue name to get a detailed queue description.
    Free CPUs
    Number of free CPUs available in a given queue for the user at this moment of time, optionally appended with the upper time limit value (in minutes). For example, "3" means 3 CPUs available for a job of unlimited running time; "4:360" indicates there are 4 CPUs available for jobs not longer than 6 hours; "10:180 30" means there are 10 CPUs available for jobs not exceeding 3 hours, plus 30 CPUs available for jobs of any length; "0" means there are no CPUs available at the moment, and the jobs will be placed in a waiting queue.
    Queued jobs
    Number of user's jobs expected to sit ahead of a new submitted job (for this user) in a waiting queue. Number of "0" means the job is expected to be executed immediately. NB! This is only an estimation, which can be overwritten by local policies.
    Free disk (MB)
    Disk space available for the user in a given queue (in Megabytes). NB! This is only an estimation, as most clusters do not provide fixed disk quotas.
    Job name
    ".$clickable.". ".$str_job."
    Status
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Cluster
    ".$clickable.". Name of the cluster at which the job is being executed. Click on a cluster name to get detailed information about the cluster.
    Queue
    ".$clickable.". Name of the batch queue in which the job is/was executed. ".$str_que."
    CPUs
    ".$str_cpu."
    ", "" => 0, "Job name" => 0, "Status" => 0, "CPU (min)" => 0, "Cluster" => 0, "Queue" => 0, "CPUs" => 0 ), "attlist" => array("0" => "Attribute values", "help" => "
    Object
    ".$clickable.". Name of the object which attributes are displayed. It can be a cluster name, a clusters queue name, a job name, a user name etc. Click on the string to get a detailed decscription of the object.
    Attribute
    For each object, one or more attribute values can be listed. Column title is the human-readable attribute name (except of some MDS-specific attributes), and the column contents are attribute values per object as entered in the Information System.
    ", "Object" => 0, "Attribute" => 0 ), "quelist" => array("0" => "Queue", "help" => "
    Attribute
    ".$clickable.". Name of a queue attribute".$str_att."
    Value
    ".$str_val."
    Job name
    ".$clickable.". ".$str_job."
    Owner
    ".$clickable.". ".$str_nam."
    Status
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Memory (KB)
    ".$str_mem."
    CPUs
    ".$str_cpu."
    ", "" => 0, "Job name" => 0, "Owner" => 0, "Status" => 0, "CPU (min)" => 0, "Memory (KB)" => 0, "CPUs" => 0 ), "sestat" => array("0" => "Storage Elements", "help" => "
    Alias
    Storage element alias as specified in the Information System. Maximal displayed length is 15 characters.
    Tot. space
    Total disk space, GB.
    Free space
    Disk space available at the moment, GB.
    Name
    Storage element name, consisting of a logical name and host name (separated by a column, ":"). Logical name is used only for information system purposes, to distinguish between different storage elements hosted by the same machine.
    Base URL
    URL for the storage element, typically a gsiftp:// protocol. Use this URL as the base to access files.
    Type
    Storage element type. "gridftp-based" indicates a disk storage with GridFTP interface.
    ", "#" => 0, "Alias" => 0, // "Tot. space" => 0, "Free/total space, Gb" => 0, "Name" => 0, "Base URL" => 0, "Type" => 0 ), "allusers" => array("0" => "Authorised Grid Users:Active Grid Users", "help" => "
    Name
    ".$clickable.". ".$str_nam."
    Affiliation
    User's affiliation, derived from the personal certificate
    Jobs
    Count of all user jobs in the system (running, pending, finished or deleted)
    Sites
    Shows how many sites authorise this user
    ", "#" => 0, "Name" => 0, "Affiliaton" => 0, "Jobs" => 0, "Sites" => 0 ), "userres" => array("0" => "", "Cluster:queue" => 0, "Free CPUs" => 0, "Queued jobs" => 0, "Free disk (MB)" => 0 ), "ldapdump" => array("0" => "", "Attribute" => 0, "Value" => 0 ), // IS attributes "mdsattr" => array( "objectClass" => "objectClass", "Mds-validfrom" => "Info valid from (GMT)", "Mds-validto" => "Info valid to (GMT)" ), "isattr" => array( "objectClass" => "objectClass", "Mds-validfrom" => "Info valid from (GMT)", "Mds-validto" => "Info valid to (GMT)", "nordugrid-cluster-name" => "Front-end domain name", "nordugrid-cluster-aliasname" => "Cluster alias", "nordugrid-cluster-contactstring" => "Contact string", "nordugrid-cluster-interactive-contactstring" => "Interactive contact", "nordugrid-cluster-comment" => "Comment", "nordugrid-cluster-support" => "E-mail contact", "nordugrid-cluster-acl" => "Authorised VOs", "nordugrid-cluster-lrms-type" => "LRMS type", "nordugrid-cluster-lrms-version" => "LRMS version", "nordugrid-cluster-lrms-config" => "LRMS details", "nordugrid-cluster-architecture" => "Architecture", "nordugrid-cluster-opsys" => "Operating system", "nordugrid-cluster-homogeneity" => "Homogeneous cluster", "nordugrid-cluster-nodecpu" => "CPU type (slowest)", "nordugrid-cluster-nodememory" => "Memory (MB, smallest)", "nordugrid-cluster-totalcpus" => "CPUs, total", "nordugrid-cluster-cpudistribution" => "CPU:machines", "nordugrid-cluster-benchmark" => "Benchmark", "nordugrid-cluster-sessiondir-free" => "Disk space, available (MB)", "nordugrid-cluster-sessiondir-total" => "Disk space, total (MB)", "nordugrid-cluster-sessiondir-lifetime"=> "Grid session lifetime (min)", "nordugrid-cluster-cache-free" => "Cache size, available (MB)", "nordugrid-cluster-cache-total" => "Cache size, total (MB)", "nordugrid-cluster-runtimeenvironment" => "Runtime environment", "nordugrid-cluster-localse" => "Storage Element, local", "nordugrid-cluster-middleware" => "Grid middleware", "nordugrid-cluster-totaljobs" => "Jobs, total amount", "nordugrid-cluster-usedcpus" => "CPUs, occupied", "nordugrid-cluster-queuedjobs" => "Jobs, queued", "nordugrid-cluster-prelrmsqueued" => "Grid jobs, awaiting submission", "nordugrid-cluster-location" => "Postal code", "nordugrid-cluster-owner" => "Owner", "nordugrid-cluster-issuerca" => "Certificate issuer", "nordugrid-cluster-issuerca-hash" => "Certificate issuer's hash", "nordugrid-cluster-trustedca" => "Trusted certificate issuers", "nordugrid-cluster-nodeaccess" => "Node IP connectivity", "nordugrid-cluster-gridarea" => "Session area (OBSOLETE)", "nordugrid-cluster-gridspace" => "Grid disk space (OBSOLETE)", "nordugrid-cluster-opsysdistribution" => "OS distribution (OBSOLETE)", "nordugrid-cluster-runningjobs" => "Jobs, running (OBSOLETE)", "nordugrid-cluster-credentialexpirationtime" => "Credential expiration time", "nordugrid-queue-name" => "Queue name", "nordugrid-queue-comment" => "Comment", "nordugrid-queue-status" => "Queue status", "nordugrid-queue-running" => "Total occupied CPUs", "nordugrid-queue-localqueued" => "Local jobs, queued", "nordugrid-queue-prelrmsqueued" => "Grid jobs, awaiting submission", "nordugrid-queue-queued" => "Jobs, queued (OBSOLETE)", "nordugrid-queue-maxrunning" => "Jobs, running (max)", "nordugrid-queue-maxqueuable" => "Jobs, queueable (max)", "nordugrid-queue-maxuserrun" => "Jobs per Unix user (max)", "nordugrid-queue-maxcputime" => "CPU time, max. (minutes)", "nordugrid-queue-mincputime" => "CPU time, min. (minutes)", "nordugrid-queue-defaultcputime" => "CPU time, default (minutes)", "nordugrid-queue-maxwalltime" => "Walltime, max. (minutes)", "nordugrid-queue-minwalltime" => "Walltime, min. (minutes)", "nordugrid-queue-defaultwalltime" => "Walltime, default (minutes)", "nordugrid-queue-schedulingpolicy" => "Scheduling policy", "nordugrid-queue-totalcpus" => "CPUs, total", "nordugrid-queue-nodecpu" => "CPU type", "nordugrid-queue-nodememory" => "Memory (MB)", "nordugrid-queue-architecture" => "Architecture", "nordugrid-queue-opsys" => "Operating system", "nordugrid-queue-homogeneity" => "Homogeneous queue", "nordugrid-queue-gridrunning" => "CPUs occupied by Grid jobs", "nordugrid-queue-gridqueued" => "Grid jobs, queued", "nordugrid-queue-benchmark" => "Benchmark", "nordugrid-queue-assignedcpunumber" => "CPUs per queue (OBSOLETE)", "nordugrid-queue-assignedcputype" => "CPU type (OBSOLETE)", "nordugrid-job-globalid" => "ID", "nordugrid-job-globalowner" => "Owner", "nordugrid-job-execcluster" => "Execution cluster", "nordugrid-job-execqueue" => "Execution queue", "nordugrid-job-stdout" => "Standard output file", "nordugrid-job-stderr" => "Standard error file", "nordugrid-job-stdin" => "Standard input file", "nordugrid-job-reqcputime" => "Requested CPU time", "nordugrid-job-reqwalltime" => "Requested wall clock time", "nordugrid-job-status" => "Status", "nordugrid-job-queuerank" => "Position in the queue", "nordugrid-job-comment" => "LRMS comment", "nordugrid-job-submissionui" => "Submission machine", "nordugrid-job-submissiontime" => "Submission time (GMT)", "nordugrid-job-usedcputime" => "Used CPU time", "nordugrid-job-usedwalltime" => "Used wall clock time", "nordugrid-job-completiontime" => "Job completion time (GMT)", "nordugrid-job-sessiondirerasetime" => "Erase time (GMT)", "nordugrid-job-proxyexpirationtime" => "Proxy expiration time (GMT)", "nordugrid-job-usedmem" => "Used memory (KB)", "nordugrid-job-errors" => "Errors", "nordugrid-job-exitcode" => "Exit code", "nordugrid-job-jobname" => "Name", "nordugrid-job-runtimeenvironment" => "Runtime environment", "nordugrid-job-cpucount" => "Requested CPUs", "nordugrid-job-executionnodes" => "Execution nodes", "nordugrid-job-gmlog" => "GM log file", "nordugrid-job-clientsoftware" => "Client version", "nordugrid-job-rerunable" => "Rerunnable", "nordugrid-job-reqcput" => "Requested time (OBSOLETE)", "nordugrid-job-gridlog" => "Gridlog file (OBSOLETE)", "nordugrid-job-lrmscomment" => "LRMS comment (OBSOLETE)", "nordugrid-authuser-name" => "Name", "nordugrid-authuser-sn" => "Subject Name", "nordugrid-authuser-freecpus" => "Free CPUs", "nordugrid-authuser-diskspace" => "Free disk space (MB)", "nordugrid-authuser-queuelength" => "User's queued jobs", "nordugrid-se-name" => "Name", "nordugrid-se-aliasname" => "Storage element alias", "nordugrid-se-type" => "Storage element type", "nordugrid-se-acl" => "Authorised VOs", "nordugrid-se-freespace" => "Free space (MB)", "nordugrid-se-totalspace" => "Total space (MB)", "nordugrid-se-url" => "Contact URL", "nordugrid-se-baseurl" => "Contact URL (OBSOLETE)", "nordugrid-se-accesscontrol" => "Access control", "nordugrid-se-authuser" => "Authorised user (DN)", "nordugrid-se-location" => "Postal code", "nordugrid-se-owner" => "Owner", "nordugrid-se-middleware" => "Middleware", "nordugrid-se-issuerca" => "Certificate issuer", "nordugrid-se-issuerca-hash" => "Certificate issuer's hash", "nordugrid-se-trustedca" => "Trusted certificate issuers", "nordugrid-se-comment" => "Comment", "nordugrid-rc-name" => "Domain name", "nordugrid-rc-aliasname" => "Replica Catalog alias", "nordugrid-rc-baseurl" => "Contact URL", "nordugrid-rc-authuser" => "Authorised user (DN)", "nordugrid-rc-location" => "Postal code", "nordugrid-rc-owner" => "Owner", "nordugrid-rc-issuerca" => "Certificate issuer" ), // Errors, warnings etc "errors" => array( // failure notices "1" => "Can not read top-level resource indices", "2" => "None of the local indices returned connection", "3" => " bad configuration or request timed out", "4" => "No Grid jobs found", "5" => "No information found", "6" => "Server unavailable", "7" => " - refresh later", "8" => "No queue information found", "9" => "No entries found", "10" => "No users found", "11" => "Not authorised at host", "12" => "does not answer", "13" => "No recent jobs found for ", // debug messages "101" => " Monitor timeouts for GRIS: ", "102" => " sec on connection and ", "103" => " sec on search", "104" => " sec spent searching", "105" => "Showing resources only in ", "106" => "Polled top-level indices: ", "107" => "Got geographical locations, scanned sites: ", "108" => " sites arranged by geographical location", "109" => "Search for cluster attributes", "110" => "Search for queue attributes", "111" => "No data from ", "112" => " is up in ", "113" => " has no resources to offer", "114" => " Monitor timeouts for GIIS: ", "115" => "Skipping GRIS: ", "116" => "not a ", "117" => "Checking connection: ", "118" => "OK", "119" => "That far, detected resources of kind ", "120" => "LDAP error searching ", "121" => " status at ", "122" => "Blacklisted: ", "123" => "Registrant found for ", "124" => "Search for SE attributes", "125" => "Search for users", "126" => "Search for jobs", "127" => " has job ", "128" => " while not being authorized", "129" => "Can not get object data: error ", "130" => " Monitor timeouts for EMIR: ", "131" => " Monitor timeouts for ARCHERY depends on OS DNS resolver settings (In DNS cache we trust!)", "132" => "Failed to query the following ARCHERY endpoint: ", "133" => "Reached the recursive loop limit while querying ARCHERY endpoint: ", // icon titles "301" => "Refresh", "302" => "Print", "303" => "Help", "304" => "Close", "305" => "Red", "306" => "Grey", "307" => "All users", "308" => "Active users", "309" => "Search", "310" => "Storage", "311" => "VOs", "312" => "Flag of ", "313" => " Grid processes and ", "314" => " local processes", // auxilliary strings "401" => "Processes", "402" => "Grid", "403" => "Local", "404" => "World", "405" => "TOTAL", "406" => " sites", "407" => "a lot of", "408" => " GB", "409" => " ALL", "410" => "Cluster", "411" => "Queue", "412" => "Job", "413" => "User", "414" => "Storage", "415" => "Replica Cat.", "416" => "Define attributes to display for the object: ", "417" => "AND of all the expressions will be matched", "418" => "Leave the righmost field empty to show everything", "419" => "Display resources or objects of your choice", "420" => "Distinguished name", "421" => "Can use a total of ", "422" => " sites", "423" => "Resource / object:", "424" => "Nr.of attributes (def. 6):", "425" => "Object", "426" => "Next", "427" => "Select one", "428" => "Reset", "429" => "SHOW" ), // Post code conversion: only for [en]! "tlconvert" => array ( "AU" => "Australia", "AT" => "Austria", "AM" => "Armenia", "DZ" => "Algeria", "BE" => "Belgium", "BG" => "Bulgaria", "CA" => "Canada", "CL" => "Chile", "CN" => "China", "CZ" => "Czechia", "DK" => "Denmark", "EE" => "Estonia", "FI" => "Finland", "FIN" => "Finland", "SF" => "Finland", "FR" => "France", "GE" => "Georgia", "DE" => "Germany", "D" => "Germany", "GR" => "Greece", "HK" => "HongKong", "HU" => "Hungary", "IS" => "Iceland", "IR" => "Ireland", "IE" => "Ireland", "IT" => "Italy", "JP" => "Japan", "KEK" => "Japan", "TOKYO" => "Japan", "LV" => "Latvia", "LT" => "Lithuania", "MA" => "Morocco", "NL" => "Netherlands", "NO" => "Norway", "N" => "Norway", "PL" => "Poland", "PT" => "Portugal", "RO" => "Romania", "RU" => "Russia", "SU" => "Russia", "LK" => "SriLanka", "SE" => "Sweden", "SK" => "Slovakia", "SI" => "Slovenia", "ES" => "Spain", "CH" => "Switzerland", "TW" => "Taiwan", "TR" => "Turkey", "UK" => "UK", "UA" => "Ukraine", "COM" => "USA", "GOV" => "USA", "USA" => "USA", "US" => "USA", "RG" => "World" ) ); ?> nordugrid-arc-6.14.0/src/services/monitor/lang/PaxHeaders.30264/us.inc0000644000000000000000000000013214152153376023510 xustar000000000000000030 mtime=1638455038.441646515 30 atime=1638455038.514647612 30 ctime=1638455100.621580797 nordugrid-arc-6.14.0/src/services/monitor/lang/us.inc0000644000175000002070000014277514152153376023515 0ustar00mockbuildmock00000000000000 [<2-letter code>] // -- Translation: // -- Author: oxana.smirnova@hep.lu.se // Some common strings: $clickable = "CLICKABLE"; $str_att = ", human-readable except of some MDS-specific attributes. Click on the attribute name to get the list of the attribute values across the ARC universe."; $str_val = "Attribute value as entered in the Information System."; $str_que = "Typically, different queues correspond to different allowed task duration, or to different groups of users. Click on a queue name to get detailed information about the queue, including running, queued, and finished tasks."; $str_job = "Name of a job as assigned by the job owner.
    N/A indicates that user did not assign any name.
    X indicates that the job has been killed by the owner
    ! indicates that the job failed in the system
    Click on a name to get a detailed description of the job."; $str_nam = "Name of the user, as specified in the personal certificate. Click on a name to get the list of all the resources available for this user and all the jobs by this user which are currently in the system."; $str_sta = "Job status as returned by the Grid Manager (GM) and LRMS. In sequential order, the states are:
    ACCEPTED – job submitted but not yet processed
    PREPARING – input files are being retreived
    SUBMITTING – interaction with LRMS ongoing
    INLRMS – the job is transferred to the LRMS; internal status is added by the infosystem. Possible states are:
    : Q – job is queued
    : U – job is in a suspended state on a busy node (PBSPro)
    : S – job is in a suspended state (Condor)
    : R, run – job is running
    : E – job is finishing (PBS)
    FINISHING – output files are being transferred by the GM
    FINISHED – job is finished; time stamp is added by the infosystem
    CANCELING – job is being cancelled
    DELETED – job not cleaned upon user request but removed by the GM due to expiration time
    Each of the states can be reported with the PENDING: prefix, meaning the GM is attempting to move the job to the next state"; $str_tim = "CPU time used by the job, minutes."; $str_mem = "Memory consumed by the job, KB"; $str_cpu = "Number of processors used by the job."; // Actual messages $message = array ( // Table headers and help (do not localize "0" and "help" keys) // For "help", keywords in
    must correspond to column titles below the help text ) "loadmon" => array( "0" => "Grid Monitor", "help" => "
    This screen displays all the sites registering to the top ARC indexing service, sorted by country then by host name. Selected site parameters are monitored: cluster alias, total CPU capacity and number of running and queued jobs, both Grid and local ones. Use "Search" utility if you want to compare other cluster, queue, job etc. characteristics
    Country
    ".$clickable.". Country flag and name as deduced from available resource descriptions. Click to show only this country info.
    Cluster
    ".$clickable.". Cluster alias as assigned by the owner. Maximal displayed length is 22 characters. Click on the alias to get a detailed cluster description.
    CPUs
    Total number of CPUs in a cluster. NB! Only a fraction of those can be actualy available for the Grid users.
    Load (processes:Grid+local)
    ".$clickable.". Relative cluster load, corresponding to the occupied CPUs count. Grey bars indicate processors occupied by the localy submitted jobs, while red bars show CPUs occupied by jobs submitted via Grid. Click on the bar to get the detailed list of all the running Grid jobs on the cluster, including amount of processors per job.
    Queueing
    ".$clickable.". Number of all queued jobs on the cluster, shown as number of queueing grid jobs plus number of locally submitted queueing jobs. Click the first number to get the list of queued Grid jobs on the cluster.
    ", "Country" => 30, "Site" => 160, "CPUs" => 10, "Load (processes: Grid+local)" => 210, "Queueing" => 10 ), "clusdes" => array("0" => "Resource Details for", "help" => "
    Attribute
    ".$clickable.". Cluster attribute name".$str_att."
    Value
    ".$str_val."
    Queue
    ".$clickable.". Names of batch queues available for the ARC users, as set by cluster owners. ".$str_que."
    Status
    Queue status. Operating queue typically reports active status.
    CPU (min)
    Time limit for job duration per queue, if set, in CPU-minutes. First displayed value is the lower limit, second - the upper one. If limits are not set (jobs of any duration are accepted), N/A tag is shown.
    Running
    Number of jobs running in the queue. Total number of jobs is shown, with number of processors occupied by Grid-submitted jobs displayed in parentheses, e.g. (Grid: 12). NB! For parallel multiprocessor jobs, number in parentheses can be larger than number of jobs.
    Queing
    Number of jobs awaiting execution in the queue. Total number of jobs is shown, with Grid-submitted jobs displayed in parentheses, e.g. (Grid: 235)
    ", "Queue" => 0, "Mapping Queue" => 0, "Status" => 0, "Limits (min)" => 0, "CPUs" => 0, "Running" => 0, "Queueing" => 0 ), "jobstat" => array("0" => "Jobs at:Job ID", "help" => "
    JOB LIST:
    Job name
    ".$clickable.". Name of a job as assigned by the owner. If no name has been assigned, "N/A" is displayed. Click on a name to get a detailed description of the job.
    Owner
    ".$clickable.". ".$str_nam."
    Status
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Queue
    ".$clickable.". Name of the batch queue in which the job is being executed. ".$str_que."
    CPUs
    ".$str_cpu."
    JOB DETAILS:
    Attribute
    ".$clickable.". Job attribute name".$str_att."
    Value
    ".$str_val."
    ", "Job name" => 0, "Owner" => 0, "Status" => 0, "CPU (min)" => 0, "Queue" => 0, "CPUs" => 0 ), "volist" => array("0" => "Virtual Organisations", "help" => "
    Virtual Organisation
    ".$clickable.". Group of users, typically sharing common activities and resources, authorised at at least one ARC-enabled site. Click on the name to get the list of group members.
    Members
    Number of group members.
    Served by
    LDAP server that supports the group membership database.
    ", "Virtual Organisation" => 0, "Members" => 0, "Served by" => 0 ), "vousers" => array("0" => "Grid User Base", "help" => "
    Name
    ".$clickable.". ".$str_nam."
    Affiliation
    Users home institute as entered by a VO manager. Can be empty.
    E-mail
    ".$clickable.". Users e-mail as entered by a VO manager. Can be empty. Click the address to send an e-mail to the user.
    ", "#" => 0, "Name" => 0, "Affiliation" => 0, "E-mail" => 0 ), "userlist" => array("0" => "Information for", "help" => "
    Cluster:queue
    ".$clickable.". Names of clusters and respective queues (separated by a column, ":") where a user is authorized to submit jobs. If a user is not authorized, message "Not authorised at host ..." is displayed. Click a cluster name to get a detailed cluster description. Click on a queue name to get a detailed queue description.
    Free CPUs
    Number of free CPUs available in a given queue for the user at this moment of time, optionally appended with the upper time limit value (in minutes). For example, "3" means 3 CPUs available for a job of unlimited running time; "4:360" indicates there are 4 CPUs available for jobs not longer than 6 hours; "10:180 30" means there are 10 CPUs available for jobs not exceeding 3 hours, plus 30 CPUs available for jobs of any length; "0" means there are no CPUs available at the moment, and the jobs will be placed in a waiting queue.
    Queued jobs
    Number of user's jobs expected to sit ahead of a new submitted job (for this user) in a waiting queue. Number of "0" means the job is expected to be executed immediately. NB! This is only an estimation, which can be overwritten by local policies.
    Free disk (MB)
    Disk space available for the user in a given queue (in Megabytes). NB! This is only an estimation, as most clusters do not provide fixed disk quotas.
    Job name
    ".$clickable.". ".$str_job."
    Status
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Cluster
    ".$clickable.". Name of the cluster at which the job is being executed. Click on a cluster name to get detailed information about the cluster.
    Queue
    ".$clickable.". Name of the batch queue in which the job is/was executed. ".$str_que."
    CPUs
    ".$str_cpu."
    ", "" => 0, "Job name" => 0, "Status" => 0, "CPU (min)" => 0, "Cluster" => 0, "Queue" => 0, "CPUs" => 0 ), "attlist" => array("0" => "Attribute values", "help" => "
    Object
    ".$clickable.". Name of the object which attributes are displayed. It can be a cluster name, a clusters queue name, a job name, a user name etc. Click on the string to get a detailed decscription of the object.
    Attribute
    For each object, one or more attribute values can be listed. Column title is the human-readable attribute name (except of some MDS-specific attributes), and the column contents are attribute values per object as entered in the Information System.
    ", "Object" => 0, "Attribute" => 0 ), "quelist" => array("0" => "Queue", "help" => "
    Attribute
    ".$clickable.". Name of a queue attribute".$str_att."
    Value
    ".$str_val."
    Job name
    ".$clickable.". ".$str_job."
    Owner
    ".$clickable.". ".$str_nam."
    Status
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Memory (KB)
    ".$str_mem."
    CPUs
    ".$str_cpu."
    ", "" => 0, "Job name" => 0, "Owner" => 0, "Status" => 0, "CPU (min)" => 0, "Memory (KB)" => 0, "CPUs" => 0 ), "sestat" => array("0" => "Storage Elements", "help" => "
    Alias
    Storage element alias as specified in the Information System. Maximal displayed length is 15 characters.
    Tot. space
    Total disk space, GB.
    Free space
    Disk space available at the moment, GB.
    Name
    Storage element name, consisting of a logical name and host name (separated by a column, ":"). Logical name is used only for information system purposes, to distinguish between different storage elements hosted by the same machine.
    Base URL
    URL for the storage element, typically a gsiftp:// protocol. Use this URL as the base to access files.
    Type
    Storage element type. "gridftp-based" indicates a disk storage with GridFTP interface.
    ", "#" => 0, "Alias" => 0, "Free/tot. space, GB" => 0, "Name" => 0, "Base URL" => 0, "Type" => 0 ), "allusers" => array("0" => "Authorised Grid Users:Active Grid Users", "help" => "
    Name
    ".$clickable.". ".$str_nam."
    Affiliation
    User's affiliation, derived from the personal certificate
    Jobs
    Count of all user jobs in the system (running, pending, finished or deleted)
    Sites
    Shows how many sites authorise this user
    ", "#" => 0, "Name" => 0, "Affiliaton" => 0, "Jobs" => 0, "Sites" => 0 ), "userres" => array("0" => "", "Cluster:queue" => 0, "Free CPUs" => 0, "Queued jobs" => 0, "Free disk (MB)" => 0 ), "ldapdump" => array("0" => "", "Attribute" => 0, "Value" => 0 ), // IS attributes "mdsattr" => array( "objectClass" => "objectClass", "Mds-validfrom" => "Info valid from (GMT)", "Mds-validto" => "Info valid to (GMT)" ), "isattr" => array( "nordugrid-cluster-name" => "Front-end domain name", "nordugrid-cluster-aliasname" => "Cluster alias", "nordugrid-cluster-contactstring" => "Contact string", "nordugrid-cluster-interactive-contactstring" => "Interactive contact", "nordugrid-cluster-comment" => "Comment", "nordugrid-cluster-support" => "E-mail contact", "nordugrid-cluster-acl" => "Authorised VOs", "nordugrid-cluster-lrms-type" => "LRMS type", "nordugrid-cluster-lrms-version" => "LRMS version", "nordugrid-cluster-lrms-config" => "LRMS details", "nordugrid-cluster-architecture" => "Architecture", "nordugrid-cluster-opsys" => "Operating system", "nordugrid-cluster-homogeneity" => "Homogeneous cluster", "nordugrid-cluster-nodecpu" => "CPU type (slowest)", "nordugrid-cluster-nodememory" => "Memory (MB, smallest)", "nordugrid-cluster-totalcpus" => "CPUs, total", "nordugrid-cluster-cpudistribution" => "CPU:machines", "nordugrid-cluster-benchmark" => "Benchmark", "nordugrid-cluster-sessiondir-free" => "Disk space, available (MB)", "nordugrid-cluster-sessiondir-total" => "Disk space, total (MB)", "nordugrid-cluster-sessiondir-lifetime"=> "Grid session lifetime (min)", "nordugrid-cluster-cache-free" => "Cache size, available (MB)", "nordugrid-cluster-cache-total" => "Cache size, total (MB)", "nordugrid-cluster-runtimeenvironment" => "Runtime environment", "nordugrid-cluster-localse" => "Storage Element, local", "nordugrid-cluster-middleware" => "Grid middleware", "nordugrid-cluster-totaljobs" => "Jobs, total amount", "nordugrid-cluster-usedcpus" => "CPUs, occupied", "nordugrid-cluster-queuedjobs" => "Jobs, queued", "nordugrid-cluster-prelrmsqueued" => "Grid jobs, awaiting submission", "nordugrid-cluster-location" => "Postal code", "nordugrid-cluster-owner" => "Owner", "nordugrid-cluster-issuerca" => "Certificate issuer", "nordugrid-cluster-issuerca-hash" => "Certificate issuer's hash", "nordugrid-cluster-trustedca" => "Trusted certificate issuers", "nordugrid-cluster-nodeaccess" => "Node IP connectivity", "nordugrid-cluster-gridarea" => "Session area (OBSOLETE)", "nordugrid-cluster-gridspace" => "Grid disk space (OBSOLETE)", "nordugrid-cluster-opsysdistribution" => "OS distribution (OBSOLETE)", "nordugrid-cluster-runningjobs" => "Jobs, running (OBSOLETE)", "nordugrid-cluster-credentialexpirationtime" => "Credential expiration time", "nordugrid-queue-name" => "Queue name", "nordugrid-queue-comment" => "Comment", "nordugrid-queue-status" => "Queue status", "nordugrid-queue-running" => "Total occupied CPUs", "nordugrid-queue-localqueued" => "Local jobs, queued", "nordugrid-queue-prelrmsqueued" => "Grid jobs, awaiting submission", "nordugrid-queue-queued" => "Jobs, queued (OBSOLETE)", "nordugrid-queue-maxrunning" => "Jobs, running (max)", "nordugrid-queue-maxqueuable" => "Jobs, queueable (max)", "nordugrid-queue-maxuserrun" => "Jobs per Unix user (max)", "nordugrid-queue-maxcputime" => "CPU time, max. (minutes)", "nordugrid-queue-mincputime" => "CPU time, min. (minutes)", "nordugrid-queue-defaultcputime" => "CPU time, default (minutes)", "nordugrid-queue-maxwalltime" => "Walltime, max. (minutes)", "nordugrid-queue-minwalltime" => "Walltime, min. (minutes)", "nordugrid-queue-defaultwalltime" => "Walltime, default (minutes)", "nordugrid-queue-schedulingpolicy" => "Scheduling policy", "nordugrid-queue-totalcpus" => "CPUs, total", "nordugrid-queue-nodecpu" => "CPU type", "nordugrid-queue-nodememory" => "Memory (MB)", "nordugrid-queue-architecture" => "Architecture", "nordugrid-queue-opsys" => "Operating system", "nordugrid-queue-homogeneity" => "Homogeneous queue", "nordugrid-queue-gridrunning" => "CPUs occupied by Grid jobs", "nordugrid-queue-gridqueued" => "Grid jobs, queued", "nordugrid-queue-benchmark" => "Benchmark", "nordugrid-queue-assignedcpunumber" => "CPUs per queue (OBSOLETE)", "nordugrid-queue-assignedcputype" => "CPU type (OBSOLETE)", "nordugrid-job-globalid" => "ID", "nordugrid-job-globalowner" => "Owner", "nordugrid-job-execcluster" => "Execution cluster", "nordugrid-job-execqueue" => "Execution queue", "nordugrid-job-stdout" => "Standard output file", "nordugrid-job-stderr" => "Standard error file", "nordugrid-job-stdin" => "Standard input file", "nordugrid-job-reqcputime" => "Requested CPU time", "nordugrid-job-reqwalltime" => "Requested wall clock time", "nordugrid-job-status" => "Status", "nordugrid-job-queuerank" => "Position in the queue", "nordugrid-job-comment" => "LRMS comment", "nordugrid-job-submissionui" => "Submission machine", "nordugrid-job-submissiontime" => "Submission time (GMT)", "nordugrid-job-usedcputime" => "Used CPU time", "nordugrid-job-usedwalltime" => "Used wall clock time", "nordugrid-job-completiontime" => "Completion time (GMT)", "nordugrid-job-sessiondirerasetime" => "Erase time (GMT)", "nordugrid-job-proxyexpirationtime" => "Proxy expiration time (GMT)", "nordugrid-job-usedmem" => "Used memory (KB)", "nordugrid-job-errors" => "Errors", "nordugrid-job-exitcode" => "Exit code", "nordugrid-job-jobname" => "Name", "nordugrid-job-runtimeenvironment" => "Runtime environment", "nordugrid-job-cpucount" => "Requested CPUs", "nordugrid-job-executionnodes" => "Execution nodes", "nordugrid-job-gmlog" => "GM log file", "nordugrid-job-clientsoftware" => "Client version", "nordugrid-job-rerunable" => "Rerunnable", "nordugrid-job-reqcput" => "Requested time (OBSOLETE)", "nordugrid-job-gridlog" => "Gridlog file (OBSOLETE)", "nordugrid-job-lrmscomment" => "LRMS comment (OBSOLETE)", "nordugrid-authuser-name" => "Name", "nordugrid-authuser-sn" => "Subject Name", "nordugrid-authuser-freecpus" => "Free CPUs", "nordugrid-authuser-diskspace" => "Free disk space (MB)", "nordugrid-authuser-queuelength" => "User's queued jobs", "nordugrid-se-name" => "Name", "nordugrid-se-aliasname" => "Storage element alias", "nordugrid-se-type" => "Storage element type", "nordugrid-se-acl" => "Authorised VOs", "nordugrid-se-freespace" => "Free space (MB)", "nordugrid-se-totalspace" => "Total space (MB)", "nordugrid-se-url" => "Contact URL", "nordugrid-se-baseurl" => "Contact URL (OBSOLETE)", "nordugrid-se-accesscontrol" => "Access control", "nordugrid-se-authuser" => "Authorised user (DN)", "nordugrid-se-location" => "Postal code", "nordugrid-se-owner" => "Owner", "nordugrid-se-middleware" => "Middleware", "nordugrid-se-issuerca" => "Certificate issuer", "nordugrid-se-issuerca-hash" => "Certificate issuer's hash", "nordugrid-se-trustedca" => "Trusted certificate issuers", "nordugrid-se-comment" => "Comment", "nordugrid-rc-name" => "Domain name", "nordugrid-rc-aliasname" => "Replica Catalog alias", "nordugrid-rc-baseurl" => "Contact URL", "nordugrid-rc-authuser" => "Authorised user (DN)", "nordugrid-rc-location" => "Postal code", "nordugrid-rc-owner" => "Owner", "nordugrid-rc-issuerca" => "Certificate issuer" ), // Errors, warnings etc "errors" => array( // failure notices "1" => "Can not read top-level resource indices", "2" => "None of the local indices returned connection", "3" => " bad configuration or request timed out", "4" => "No Grid jobs found", "5" => "No information found", "6" => "Server unavailable", "7" => " - refresh later", "8" => "No queue information found", "9" => "No entries found", "10" => "No users found", "11" => "Not authorised at host", "12" => "does not answer", "13" => "No recent jobs found for ", // debug messages "101" => " Monitor timeouts for GRIS: ", "102" => " sec on connection and ", "103" => " sec on search", "104" => " sec spent searching", "105" => "Showing resources only in ", "106" => "Polled top-level indices: ", "107" => "Got geographical locations, scanned sites: ", "108" => " sites arranged by geographical location", "109" => "Search for cluster attributes", "110" => "Search for queue attributes", "111" => "No data from ", "112" => " is up in ", "113" => " has no resources to offer", "114" => " Monitor timeouts for GIIS: ", "115" => "Skipping GRIS: ", "116" => "not a ", "117" => "Checking connection: ", "118" => "OK", "119" => "That far, detected resources of kind ", "120" => "LDAP error searching ", "121" => " status at ", "122" => "Blacklisted: ", "123" => "Registrant found for ", "124" => "Search for SE attributes", "125" => "Search for users", "126" => "Search for jobs", "127" => " has job ", "128" => " while not being authorized", "129" => "Can not get object data: error ", "130" => " Monitor timeouts for EMIR: ", // icon titles "301" => "Refresh", "302" => "Print", "303" => "Help", "304" => "Close", "305" => "Red", "306" => "Grey", "307" => "All users", "308" => "Active users", "309" => "Search", "310" => "Storage", "311" => "VOs", "312" => "Flag of ", "313" => " Grid processes and ", "314" => " local processes", // auxilliary strings "401" => "Processes", "402" => "Grid", "403" => "Local", "404" => "World", "405" => "TOTAL", "406" => " sites", "407" => "a lot of", "408" => " GB", "409" => " ALL", "410" => "Cluster", "411" => "Queue", "412" => "Job", "413" => "User", "414" => "Storage", "415" => "Replica Cat.", "416" => "Define attributes to display for the object: ", "417" => "AND of all the expressions will be matched", "418" => "Leave the righmost field empty to show everything", "419" => "Display resources or objects of your choice", "420" => "Distinguished name", "421" => "Can use a total of ", "422" => " sites", "423" => "Resource / object:", "424" => "Nr.of attributes (def. 6):", "425" => "Object", "426" => "Next", "427" => "Select one", "428" => "Reset", "429" => "SHOW" ), // Post code conversion "tlconvert" => array ( "Australia" => "Australia", "Austria" => "Austria", "Armenia" => "Armenia", "Algeria" => "Algeria", "Belgium" => "Belgium", "Bulgaria" => "Bulgaria", "Canada" => "Canada", "China" => "China", "Czechia" => "Czechia", "Denmark" => "Denmark", "Estonia" => "Estonia", "Finland" => "Finland", "France" => "France", "Georgia" => "Georgia", "Germany" => "Germany", "Greece" => "Greece", "Hungary" => "Hungary", "Iceland" => "Iceland", "Ireland" => "Ireland", "Ireland" => "Ireland", "Italy" => "Italy", "Japan" => "Japan", "Latvia" => "Latvia", "Lithuania" => "Lithuania", "Morocco" => "Morocco", "Netherlands" => "Netherlands", "Norway" => "Norway", "Poland" => "Poland", "Portugal" => "Portugal", "Romania" => "Romania", "Russia" => "Russia", "SriLanka" => "Sri Lanka", "Sweden" => "Sweden", "Slovakia" => "Slovakia", "Slovenia" => "Slovenia", "Switzerland" => "Switzerland", "Turkey" => "Turkey", "UK" => "UK", "Ukraine" => "Ukraine", "USA" => "USA" ) ); ?> nordugrid-arc-6.14.0/src/services/monitor/lang/PaxHeaders.30264/fr.inc0000644000000000000000000000013014152153376023466 xustar000000000000000028 mtime=1638455038.4406465 30 atime=1638455038.513647597 30 ctime=1638455100.615580707 nordugrid-arc-6.14.0/src/services/monitor/lang/fr.inc0000644000175000002070000014632014152153376023463 0ustar00mockbuildmock00000000000000 N/A indique que l'utilisateur n'a pas donné de nom.
    X indique que le job a été tué par le propriétaire.
    ! indique que le job a échoué dans le systÈme
    Cliquer sur un nom pour voir une description détaillée du job."; $str_nam = "Nom de l'utilisateur, tel que spécifié dans le certificat personnel. Cliquer sur un nom pour voir la list de toutes les ressource disponibles pour cet utilisateur et tous les jobs soumis par cet utilisateur qui sont actuellement dans le système."; $str_sta = "Statut du job tel que fourni par le Grid Manager (GM) et LRMS. Dans l'ordre, les états sont :
    ACCEPTED – job soumis mais non encore pris en charge
    PREPARING – les fichiers d'entrée sont en train d'être récupérés
    SUBMITTING – interaction avec LRMS en cours
    INLRMS – le job est transféré au LRMS; un statut interne est ajouté par l'infosystem. Les états possible sont :
    : Q – le job est en attente
    : U – le job est suspendu dans un node occupé (PBSPro)
    : S – le job est suspendu (Condor)
    : R, run – le job est en cours
    : E – le job se termine (PBS)
    FINISHING – les fichiers de sortie sont en train d'être transférés par le GM
    FINISHED – le job est terminé; un indicateur temporel est ajouté par l'infosystem
    CANCELING – le job est en train d'être annulé
    DELETED – le job n'a pas été supprimé par l'utilisateur mais par le GM à cause de la date d'expiration
    Chaque état peut être donné avec le préfixe PENDING:, ce qui signifie que le GM essaie de déplacer le job vers l'état suivant"; $str_tim = "Temps CPU utilisé par le job, en minutes."; $str_mem = "Mémoire consomée par le job, en ko"; $str_cpu = "Nombre de processeurs utilisés par le job."; // Actual messages $message = array ( // Table headers and help (do not localize "0" and "help" keys) // For "help", keywords in
    must correspond to column titles below the help text ) "loadmon" => array( "0" => "Grid Monitor", "help" => "
    Cet écran montre tous les sites enrégistrés dans l'indexing service d'ARC, triés par pays puis par nom de site. Une selection de paramètres de site sont affichés : alias du cluster, capacité CPU totale et nombre de jobs courants et en attente, aussi bien du Grid que locaux. Utiliser "Search" pour comparer d'autres caractéristiques de cluster, file, job etc...
    Pays
    ".$clickable.". Drapeau et nom du pays, extrait des descriptions de ressource disponibles. Cliquer pour montrer les information concernant ce pays uniquement.
    Cluster
    ".$clickable.". Alias du cluster assigné par le propriétaire. La longueur maximale affichée est 22 caractÈres. Cliquer sur l'alias pour voir une description détaillée du cluster.
    CPU
    nombre total de CPU dans un cluster. NB! Seule une fraction de ceux-ci est effectivement accessible aux utilisateurs du Grid.
    Charge (processus:Grid+local)
    ".$clickable.". Charge relative du cluster, correspondant au nombre de CPU occupés. Les barres grises indiquent les processeurs occupés par les jobs soumis localement, les barres rouges montrent les CPU occupés par des jobs soumis à travers le Grid. Cliquer sur la barre pour voir la liste détaillée de tous les jobs d'origine Grid dans le cluster, y compris le nombre de processus par job.
    Files d'attente
    ".$clickable.". Nombre total de jobs en attente dans le cluster, montré comme le nombre de jobs du grid en attente plus le nombre de jobs en attente soumis localement. Cliquer sur le premier nombre pour voir la liste des jobs du Grid en attente dans le cluster.
    ", "Pays" => 30, "Cluster" => 160, "CPU" => 10, "Charge (processus: Grid+local)" => 210, "File d'attente" => 10 ), "clusdes" => array("0" => "Details des ressources pour", "help" => "
    Attribut
    ".$clickable.". Nom de l'attribut de cluster".$str_att."
    Valeur
    ".$str_val."
    File d'attente
    ".$clickable.". Noms des files batch disponibles pour les utilisateurs d'ARC, donné par le propriétaire du cluster. ".$str_que."
    Statut
    Statut des files. Une file active indiquera typiquement le statut : active.
    CPU (min)
    Limite de durée pour un job dans une file, si elle existe, en minutes. La première valeur affichée est la limite basse, la seconde la limite haute. Si les limites ne sont pas données (jobs de durée quelconque acceptés), le symbole N/A est affiché.
    En cours
    Nombre de jobs en cours dans la file. Le nombre total de jobs est indiqué, avec le nombre de processeurs occupś par des jobs du Grid entre parenthèses, par ex. (Grid:12). NB! Pour les jobs multiprocesseurs en parallèle, le nombre entre parenthèse peut être plus élevé que le nombre de jobs.
    En attente
    Nombre de jobs en attente d'execution dans la file. Le nombre total de jobs est affiché, avec les jobs du Grid entre parenthèse, par ex. (Grid: 235)
    ", "File d'attente" => 0, "Mapping Queue" => 0, "Statut" => 0, "Limites (min)" => 0, "CPU" => 0, "En cours" => 0, "En attente" => 0 ), "jobstat" => array("0" => "Jobs à:Job ID", "help" => "
    LIST DES JOBS:
    Nom du job
    ".$clickable.". Nom d'un job, assigné par le propriétaire. Si aucun nom n'a été assigné, "N/A" est affiché. Cliquer sur un nom pour voir une description détaillée du job.
    Propriétaire
    ".$clickable.". ".$str_nam."
    Statut
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    File d'attente
    ".$clickable.". Nom de la file batch dans laquelle le job est exécuté. ".$str_que."
    CPU
    ".$str_cpu."
    DETAILS DU JOB:
    Attribut
    ".$clickable.". Nom de l'attribut du job".$str_att."
    Valeur
    ".$str_val."
    ", "Nom du job" => 0, "propriétaire" => 0, "Statut" => 0, "CPU (min)" => 0, "Filer d'attente" => 0, "CPU" => 0 ), "volist" => array("0" => "Virtual Organisations", "help" => "
    Virtual Organisation
    ".$clickable.". Groupe d'utilisateurs, généralement partageant la même activités et les mêmes ressources, autorisé à au moins un site ARC. Cliquer sur le nom pour voir la liste des membres du groupe.
    Membres
    Nombre de membres du groupe.
    Desservi par
    serveur LDAP qui supporte la base de données des membres du groupe.
    ", "Virtual Organisation" => 0, "Membres" => 0, "Desservi par" => 0 ), "vousers" => array("0" => "Utilisateurs du Grid", "help" => "
    Nom
    ".$clickable.". ".$str_nam."
    Affiliation
    Institut d'origine de l'utilisateur, entré par le VO manager. Peut être vide.
    Adresse électronique
    ".$clickable.". Adresse électronique de l'utilisateur, entré par le VO manager. Peut être vide. Cliquer sur l'adresse pour envoyer un courriel à l'utilisateur.
    ", "#" => 0, "Nom" => 0, "Affiliation" => 0, "Adresse èlectronique" => 0 ), "userlist" => array("0" => "Information pour", "help" => "
    Cluster:file
    ".$clickable.". Noms des clusters et file respective (separés par deux points, ":") où un utilisateur est autorisé à soumettre des jobs. Si un utilisateur n'est pas autorisé, le message "Not authorised at host ..." est affiché. Cliquer sur un nom de cluster pour voir une description détaillée du cluster. Cliquer sur un nom de file pour voir une description détaillée de la file.
    CPU libres
    Nombre de CPU libres disponibles dans une file données pour l'utilisateur à cet instant, eventuellement associé avec la durée maximum (en minutes) Par exemple, "3" signifie 3 CPU disponible pour un job de durée illimitée; "4:360" indique qu'il y a 4 CPU disponibles pour des jobs de moins de 6 heures; "10:180 30" signifie qu'il y a 10 CPU disponibles pour des jobs n'excédant pas 3 heures, plus 30 CPU disponibles pour des jobs de n'omporte quelle durée; "0" signifie qu'il n'y a pas de CPU disponible à cet instant, et les jobs seront placés dans une file d'attente.
    Jobs en attente
    Nombre de jobs d'utilisateur qui seront avant un nouveau job (pour cet utilisateur) dans une file d'attente. Un nombre de "0" signifie que le job devrait être exécuté immédiatement. NB! Ceci n'est qu'une estimation, qui peut être outrepassée par des politiques locales.
    Disque libre (Mo)
    Espace disque disponible pour l'utilisateur dans une file donnée (en Mégaoctets). NB! Ceci n'est qu'une estimation, étant donné que la plupart des clusters ne fournissent pas de quotas fixes.
    Nom du job
    ".$clickable.". ".$str_job."
    Statut
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Cluster
    ".$clickable.". Nom du cluster où le job est exécuté. Cliquer sur un nom de cluster pour voir des information détaillées sur le cluster.
    Queue
    ".$clickable.". Nom de la file batch dans laquelle le job est/était exécuté. ".$str_que."
    CPU
    ".$str_cpu."
    ", "" => 0, "Nom du job" => 0, "Statut" => 0, "CPU (min)" => 0, "Cluster" => 0, "File" => 0, "CPU" => 0 ), "attlist" => array("0" => "Valeur des attributs", "help" => "
    Objet
    ".$clickable.". Nom de l'objet dont les attributs sont affichés Ce peut être le nom d'un cluster, d'une file de clusters, d'un job, d'un utilisateur etc... Cliquer sur le texte pour voir une description détaillée de l'objet.
    Attribut
    Pour chaque objet, un ou plusieurs attributs peuvent Être listés Le titre de la colonne est une version "human-readable" du nom de l'attribut (sauf pour certain attributs MDS-specifiques), et le contenu de la colonne est la valeur de l'attribut par objet, telle qu'elle est entrée dans l'Information System.
    ", "Objet" => 0, "Attribut" => 0 ), "quelist" => array("0" => "File", "help" => "
    Attribut
    ".$clickable.". Nom d'un attribut de file".$str_att."
    Valeur
    ".$str_val."
    Nom du job
    ".$clickable.". ".$str_job."
    Propriétaire
    ".$clickable.". ".$str_nam."
    Statut
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Memoire (KB)
    ".$str_mem."
    CPU
    ".$str_cpu."
    ", "" => 0, "Nom du job" => 0, "Propriétaire" => 0, "Statut" => 0, "CPU (min)" => 0, "Memoire (ko)" => 0, "CPU" => 0 ), "sestat" => array("0" => "Elements de Stockage", "help" => "
    Alias
    Alias de l'Element de Stockage specifié dans l'Information System. Longueur maximal affichée de 15 caractères.
    Espace total
    Espace disque total, Go.
    Espace libre
    Espace disque disponible sur le moment, Go.
    Nom
    Nom de l'Element de Stockage, composé d'un nom logique et d'un nom d'hôte (séparés par deux points, ":"). Le nom logique est utilisé uniquement pour le système d'information, pour distinguer différents élément de stockages accueillis par la même machine-hôte.
    URL de base
    URL pour l'élément de stockage, généralement un protocole gsiftp:// Utiliser cet URL comme base pour accéder aux fichiers.
    Type
    Type d'élément de stockage. "gridftp-based" indique un stockage disque avec une interface GridFTP.
    ", "#" => 0, "Alias" => 0, "Espace libre/total, GB" => 0, "Nom" => 0, "URL de base" => 0, "Type" => 0 ), "allusers" => array("0" => "Utilisateurs Grid autorisés:Utilisateurs Grid actifs", "help" => "
    Nom
    ".$clickable.". ".$str_nam."
    Affiliation
    Affiliation de l'utilisateur, dérivé du certificat personnel
    Jobs
    Compteur de tous les jobs d'utilisateur dans le système (en cours, en attente, terminés ou supprimés)
    Sites
    Affiche le nombre de sites qui admettent cet utilisateur
    ", "#" => 0, "Nom" => 0, "Affiliaton" => 0, "Jobs" => 0, "Sites" => 0 ), "userres" => array("0" => "", "Cluster:file" => 0, "CPU libres" => 0, "Jobs en attente" => 0, "Espace disque libre (Mo)" => 0 ), "ldapdump" => array("0" => "", "Attribut" => 0, "Valeur" => 0 ), // IS attributes "mdsattr" => array( "objectClass" => "objectClass", "Mds-validfrom" => "Info valide depuis (GMT)", "Mds-validto" => "Info valide jusqu'à (GMT)" ), "isattr" => array( "nordugrid-cluster-name" => "Nom du domaine Front-end", "nordugrid-cluster-aliasname" => "Alias du cluster", "nordugrid-cluster-contactstring" => "Fil de contact", "nordugrid-cluster-interactive-contactstring" => "Contact interactif", "nordugrid-cluster-comment" => "Commentaire", "nordugrid-cluster-support" => "Contact courriel", "nordugrid-cluster-acl" => "VO authorisés", "nordugrid-cluster-lrms-type" => "type de LRMS", "nordugrid-cluster-lrms-version" => "Version de LRMS", "nordugrid-cluster-lrms-config" => "détails de LRMS", "nordugrid-cluster-architecture" => "Architecture", "nordugrid-cluster-opsys" => "Système d'exploitation", "nordugrid-cluster-homogeneity" => "Cluster homogène", "nordugrid-cluster-nodecpu" => "type de CPU (le plus lent)", "nordugrid-cluster-nodememory" => "Memoire (Mo, la plus petite)", "nordugrid-cluster-totalcpus" => "CPU, total", "nordugrid-cluster-cpudistribution" => "CPU:machines", "nordugrid-cluster-benchmark" => "Benchmark", "nordugrid-cluster-sessiondir-free" => "Espace disque, disponible (Mo)", "nordugrid-cluster-sessiondir-total" => "Espace disque, total (Mo)", "nordugrid-cluster-sessiondir-lifetime"=> "Durée de vie d'une session Grid (min)", "nordugrid-cluster-cache-free" => "Taille du cache, disponible (Mo)", "nordugrid-cluster-cache-total" => "Taille du cache, total (Mo)", "nordugrid-cluster-runtimeenvironment" => "Moteur d'exécution (runtime environment)", "nordugrid-cluster-localse" => "Element de stockage, local", "nordugrid-cluster-middleware" => "Intergiciel (middleware) du Grid", "nordugrid-cluster-totaljobs" => "Jobs, quantité totale", "nordugrid-cluster-usedcpus" => "CPU, occupés", "nordugrid-cluster-queuedjobs" => "Jobs, en attente", "nordugrid-cluster-prelrmsqueued" => "Jobs Grid, en attente d'être soumis", "nordugrid-cluster-location" => "Code postal", "nordugrid-cluster-owner" => "Propriétaire", "nordugrid-cluster-issuerca" => "Fournisseur du certificat", "nordugrid-cluster-issuerca-hash" => "Hachage du fournisseur du certificat", "nordugrid-cluster-trustedca" => "Fournisseurs de certificat fiables", "nordugrid-cluster-nodeaccess" => "IP-connectivité du node", "nordugrid-cluster-gridarea" => "zone de la session (OBSOLETE)", "nordugrid-cluster-gridspace" => "Espace disque Grid (OBSOLETE)", "nordugrid-cluster-opsysdistribution" => "OS distribution (OBSOLETE)", "nordugrid-cluster-runningjobs" => "Jobs, en cours (OBSOLETE)", "nordugrid-cluster-credentialexpirationtime" => "Credential expiration time", "nordugrid-queue-name" => "Nom de file", "nordugrid-queue-comment" => "Commentaire", "nordugrid-queue-status" => "Statut de file", "nordugrid-queue-running" => "CPU, occupés", "nordugrid-queue-localqueued" => "Jobs locaux, en attente", "nordugrid-queue-prelrmsqueued" => "Jobs Grid, en attente d'être soumis", "nordugrid-queue-queued" => "Jobs, en attente (OBSOLETE)", "nordugrid-queue-maxrunning" => "Jobs, en cours (max)", "nordugrid-queue-maxqueuable" => "Jobs, pouvant être mis en attente (max)", "nordugrid-queue-maxuserrun" => "Jobs par utilisateur Unix (max)", "nordugrid-queue-maxcputime" => "Temps CPU, max. (minutes)", "nordugrid-queue-mincputime" => "Temps CPU, min. (minutes)", "nordugrid-queue-defaultcputime" => "Temps CPU, default (minutes)", "nordugrid-queue-maxwalltime" => "Temps d'horloge, max. (minutes)", "nordugrid-queue-minwalltime" => "Temps d'horloge, min. (minutes)", "nordugrid-queue-defaultwalltime" => "Temps d'horloge, defaut (minutes)", "nordugrid-queue-schedulingpolicy" => "Scheduling policy", "nordugrid-queue-totalcpus" => "CPU, total", "nordugrid-queue-nodecpu" => "type de CPU", "nordugrid-queue-nodememory" => "Memoire (Mo)", "nordugrid-queue-architecture" => "Architecture", "nordugrid-queue-opsys" => "System d'exploitation", "nordugrid-queue-homogeneity" => "File homogène", "nordugrid-queue-gridrunning" => "CPU, occupés par jobs Grid", "nordugrid-queue-gridqueued" => "Jobs Grid, en attente", "nordugrid-queue-benchmark" => "Benchmark", "nordugrid-queue-assignedcpunumber" => "CPU par file (OBSOLETE)", "nordugrid-queue-assignedcputype" => "Type de CPU (OBSOLETE)", "nordugrid-job-globalid" => "ID", "nordugrid-job-globalowner" => "Propriétaire", "nordugrid-job-execcluster" => "Cluster d'execution", "nordugrid-job-execqueue" => "File d'execution", "nordugrid-job-stdout" => "Fichier de sortie standard", "nordugrid-job-stderr" => "Fichier d'erreur standard", "nordugrid-job-stdin" => "Fichier d'entrée standard", "nordugrid-job-reqcputime" => "Temps CPU requis", "nordugrid-job-reqwalltime" => "Temps d'horloge requis", "nordugrid-job-status" => "Statut", "nordugrid-job-queuerank" => "Position dans la file", "nordugrid-job-comment" => "Commentaire LRMS", "nordugrid-job-submissionui" => "Machine de soumission", "nordugrid-job-submissiontime" => "Date de soumission (GMT)", "nordugrid-job-usedcputime" => "Temps CPU utilisé", "nordugrid-job-usedwalltime" => "Temps d'horloge utilisé", "nordugrid-job-completiontime" => "Date de termination (GMT)", "nordugrid-job-sessiondirerasetime" => "Date de suppression (GMT)", "nordugrid-job-proxyexpirationtime" => "Date d'expiration du proxy (GMT)", "nordugrid-job-usedmem" => "Memoire utilisée (Ko)", "nordugrid-job-errors" => "Erreurs", "nordugrid-job-exitcode" => "Code de sortie", "nordugrid-job-jobname" => "Nom", "nordugrid-job-runtimeenvironment" => "Moteur d'exécution (runtime environment)", "nordugrid-job-cpucount" => "CPU requis", "nordugrid-job-executionnodes" => "Nodes d'exécution", "nordugrid-job-gmlog" => "Fichier de journal du GM", "nordugrid-job-clientsoftware" => "Version du client", "nordugrid-job-rerunable" => "Réexecutable", "nordugrid-job-reqcput" => "Temps requis (OBSOLETE)", "nordugrid-job-gridlog" => "Fichier Gridlog (OBSOLETE)", "nordugrid-job-lrmscomment" => "commentaire LRMS (OBSOLETE)", "nordugrid-authuser-name" => "Nom", "nordugrid-authuser-sn" => "Nom du sujet", "nordugrid-authuser-freecpus" => "CPU libres", "nordugrid-authuser-diskspace" => "Espace disque libre (Mo)", "nordugrid-authuser-queuelength" => "Jobs en attente de l'utilisateur", "nordugrid-se-name" => "Nom", "nordugrid-se-aliasname" => "Alias de l'élément de stockage", "nordugrid-se-type" => "Type d'élément de stockage", "nordugrid-se-acl" => "VO autorisés", "nordugrid-se-freespace" => "Espace libre (Mo)", "nordugrid-se-totalspace" => "Espace total (Mo)", "nordugrid-se-url" => "URL de contact", "nordugrid-se-baseurl" => "URL de contact (OBSOLETE)", "nordugrid-se-accesscontrol" => "Contrôle d'accès", "nordugrid-se-authuser" => "Utilisateur autorisé (DN)", "nordugrid-se-location" => "Code postal", "nordugrid-se-owner" => "Propriétaire", "nordugrid-se-middleware" => "Intergiciel (middleware)", "nordugrid-se-issuerca" => "Fournisseur de certificate", "nordugrid-se-issuerca-hash" => "Hachage du fournisseur de certificat", "nordugrid-se-trustedca" => "Fournisseurs de certificat fiables", "nordugrid-se-comment" => "Commentaire", "nordugrid-rc-name" => "Nom de domaine", "nordugrid-rc-aliasname" => "Alias du duplicata du Catalogue", "nordugrid-rc-baseurl" => "URL de contact", "nordugrid-rc-authuser" => "Utilisateur autorisé (DN)", "nordugrid-rc-location" => "Code postal", "nordugrid-rc-owner" => "Propriétaire", "nordugrid-rc-issuerca" => "Fournisseur de certificat" ), // Errors, warnings etc "errors" => array( // failure notices "1" => "Impossible de lire les index de ressource", "2" => "Aucun des index locaux ne retourne de connexion", "3" => " mauvaise configuration ou la requête a expiré", "4" => "Aucun job Grid trouvé", "5" => "Aucune information trouvée", "6" => "Serveur indisponible", "7" => " - rafraîchir plus tard", "8" => "Aucune information de liste trouvée", "9" => "Aucune entrée trouvée", "10" => "Aucun utilisateur trouvé", "11" => "Non autorisé chez l'hôte", "12" => "Ne répond pas", "13" => "Aucun job récent trouvé pour ", // debug messages "101" => " Monitor timeouts pour GRIS: ", "102" => " sec pendant la connection et ", "103" => " sec pendant la recherche", "104" => " sec en recherche", "105" => "N'affiche les ressources qu'en ", "106" => "Polled top-level indices: ", "107" => "Situations géographique obtenues, sites scannés: ", "108" => " sites rangés par situation géographique", "109" => "Recherche d'attributs du cluster", "110" => "Recherche d'attributs de la file", "111" => "Aucune donnée de ", "112" => " is up in ", "113" => " n'a aucune ressource à proposer", "114" => " Monitor timeouts for GIIS: ", "115" => "Saute GRIS: ", "116" => "pas un ", "117" => "Vérifie la connexion: ", "118" => "OK", "119" => "Jusqu'ici, a détecté des ressource de genre ", "120" => "Erreur LDAP en cherchant ", "121" => " statut à ", "122" => "Sur liste noire: ", "123" => "Registrant found for ", "124" => "Recherche d'attributs de SE", "125" => "Recherche d'utilisateurs", "126" => "Recherche de jobs", "127" => " a un job ", "128" => " alors que ce n'est pas autorisé", "129" => "Impossible d'obtenir les données d'object: erreur ", "130" => " Monitor timeouts for EMIR: ", // icon titles "301" => "Rafraîchir", "302" => "Imprimer", "303" => "Aide", "304" => "Fermer", "305" => "Rouge", "306" => "Gris", "307" => "Tous utilisateurs", "308" => "Utilisateurs actifs", "309" => "Rechercher", "310" => "Stockage", "311" => "VO", "312" => "Drapeau de ", "313" => " processus Grid et ", "314" => " processus locaux", // auxilliary strings "401" => "Processus", "402" => "Grid", "403" => "Local", "404" => "Monde", "405" => "TOTAL", "406" => " sites", "407" => "beaucoup de", "408" => " Go", "409" => " ALL", "410" => "Cluster", "411" => "File", "412" => "Job", "413" => "Utilisateur", "414" => "Stockage", "415" => "Duplicata Cat.", "416" => "Définir les attributs à afficher pour l'objet: ", "417" => "Le produit logique (ET) de toutes les expressions va être testé", "418" => "Laisser le champ de droite vide pour tout afficher", "419" => "Afficher les ressources ou objets de votre choix", "420" => "Nom Distinct", "421" => "Peut utiliser un total de ", "422" => " sites", "423" => "Ressource / objet:", "424" => "Nr. des attributs (def. 6):", "425" => "Objet", "426" => "Suivant", "427" => "Choisir un", "428" => "Reinitialiser", "429" => "AFFICHER" ), // Post code conversion "tlconvert" => array ( "Australia" => "Australie", "Austria" => "Autriche", "Armenia" => "Armenie", "Algeria" => "Algerie", "Belgium" => "Belgique", "Bulgaria" => "Bulgarie", "Canada" => "Canada", "China" => "Chine", "Czechia" => "République Tchèque", "Denmark" => "Danemark", "Estonia" => "Estonie", "Finland" => "Finlande", "France" => "France", "Georgia" => "Georgie", "Germany" => "Allemagne", "Greece" => "Grèce", "Hungary" => "Hongrie", "Iceland" => "Islande", "Ireland" => "Irlande", "Italy" => "Italie", "Japan" => "Japon", "Latvia" => "Lettonie", "Lithuania" => "Lithuanie", "Morocco" => "Maroc", "Netherlands" => "Pays-Bas", "Norway" => "Norvège", "Poland" => "Pologne", "Portugal" => "Portugal", "Romania" => "Roumanie", "Russia" => "Russie", "SriLanka" => "Sri Lanka", "Sweden" => "Suède", "Slovakia" => "Slovaquie", "Slovenia" => "Slovenie", "Switzerland" => "Suisse", "Turkey" => "Turquie", "UK" => "Grande-Bretagne", "Ukraine" => "Ukraine", "USA" => "USA" ) ); ?> nordugrid-arc-6.14.0/src/services/monitor/lang/PaxHeaders.30264/sv.inc0000644000000000000000000000013214152153376023511 xustar000000000000000030 mtime=1638455038.441646515 30 atime=1638455038.514647612 30 ctime=1638455100.619580767 nordugrid-arc-6.14.0/src/services/monitor/lang/sv.inc0000644000175000002070000014636514152153376023515 0ustar00mockbuildmock00000000000000 N/A betyder att ägaren inte tilldelat ett jobbnamn
    X betyder att jobbet dödats av ägaren
    ! betyder att jobbet inte fullbordades i systemet
    Klicka på ett namn för att få en detaljerad beskrivning av jobbet."; $str_nam = "Användarens namn såsom specificerat i det personliga cerifikatet. Klicka på ett namn för att få en lista över alla resurser som är tillgängliga för denna användare och denna användares alla jobb som för närvarande finns i systemet."; $str_sta = "Jobbstatus returnerad av gridmanagern (GM) och LRMS. Tillstånden är i tidsordning:
    ACCEPTED – jobbet har skickats in men är ännu ej behandlat
    PREPARING – indatafiler hämtas
    SUBMITTING – växelverkan med LRMS pÃ¥gÃ¥r
    INLRMS – jobbet har överförts till LRMS; intern status läggs till av informationsstystemet. Möjliga tillstÃ¥nd är:
    : Q – jobbet är köat
    : U – jobbet är i ett uppskjutet tillstÃ¥nd pÃ¥ en upptagen nod (PBSPro)
    : S – jobbet är i ett uppskjutet tillstÃ¥nd (Condor)
    : R, run – jobbet exekveras
    : E – jobbet avslutas (PBS)
    FINISHING – utdatafiler överförs av GM
    FINISHED – jobbet är avslutat; tidsstämpel läggs till av informationssystemet
    CANCELING – jobbet hÃ¥ller pÃ¥ att avbrytas
    DELETED – jobbet har inte tagits bort pÃ¥ begäran av användaren utan av GM p.g.a. att maximala lagringstiden har passerat
    Alla dessa tillstånd kan rapporteras med prefixet PENDING:, vilket betyder att GM försöker a flytta jobbet till nästa tillstånd"; $str_tim = "CPU-tid som jobbet använt, minuter."; $str_mem = "Minne som jobbet använt, KB."; $str_cpu = "Antal processorer som jobbet använt."; // Actual messages $message = array ( // Table headers and help (do not localize "0" and "help" keys) // For "help", keywords in
    must correspond to column titles below the help text ) "loadmon" => array( "0" => "Gridmonitor", "help" => "
    Denna sida visar alla kluster som registrerar sig till ARCs indexservice, sorterade efter land och därefter värdnamn. Utvalda klusterparametrar monitoreras: klusteralias, total CPU-kapacitet och antal jobb som exekveras och köar på klustret, såväl gridjobb som lokala jobb. Använd sökfuntionen om du vill jämföra annan kluster-, kö- och jobbinformation.
    Land
    ".$clickable.". Landslagga och landsnamn härledda från tillgängliga resursbeskrivningar. Klicka för att visa endast detta lands information.
    Kluster
    ".$clickable.". Klusteralias tilldelat av ägaren. Maximal visad längd är 22 tecken. Klicka på detta alias för att få en detaljerad klusterbeskrivning.
    CPU:er
    Totalt antal CPU:er i ett kluster. OBS! Endast en del av dessa kan vara tillgängliga för gridanvändare.
    Belastning (processer: grid + lokala)
    ".$clickable.". Relativ klusterbelastning, motsvarande antalet upptagna CPU:er. Grå fält markerar processorer upptagna av de lokalt inskickade jobben, medan röda fält visar CPU:er upptagna av jobb som skickats in via grid. Klicka på fältet för att få en detaljerad lista av alla gridjobb som exekveras på klustret, inklusive antalet processorer per job.
    Köande
    ".$clickable.". Totalt antal jobb som köar på klustret, visat som antalet köande gridjobb plus antalet lokalt inskickade köande jobb. Klicka på den första siffran för att få en lista av köande gridjob på klustret.
    ", "Land" => 30, "Kluster" => 160, "CPU:er" => 10, "Belastning (processer: grid + lokala)" => 210, "Köande" => 10 ), "clusdes" => array("0" => "Resursinformation för", "help" => "
    Attribut
    ".$clickable.". Klusterattributnamn".$str_att."
    Värde
    ".$str_val."
    Kö
    ".$clickable.". Namn på batchköer tillgängliga för ARCanvändarna uppsatta av klusterägarna. ".$str_que."
    Status
    Köstatus. Fungerande köer visar normalt status active.
    Tidsgränser (min)
    Tidsgräns för jobblängd per kö, om definierad, i CPU-minuter. Det första visade värdet är den nedre gränsen, det andra den övre. Om inga gränser är definierade (jobb med alla längder är tillåtna), visas N/A
    Exekveras
    Antal jobb som exekveras i kön. Det totala antalet jobb visas, med antalet processorer upptagna av gridjobb i parentes, t.ex. (Grid: 12). OBS! För parallella multiprocessorjobb kan numret i parentes vara större än antalet jobb.
    Köar
    Antal jobb som väntar på att exekveras i kön. Det totala antalet jobb visas, med gridjobb visade i parentes, t.ex. (Grid: 235)
    ", "Kö" => 0, "Mappningskö" => 0, "Status" => 0, "Tidsgränser (min)" => 0, "CPU:er" => 0, "Exekveras" => 0, "Köar" => 0 ), "jobstat" => array("0" => "Jobb på:Jobb-ID", "help" => "
    JOBBLISTA:
    Jobbnamn
    ".$clickable.". ".$str_job."
    Ägare
    ".$clickable.". ".$str_nam."
    Status
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Kö
    ".$clickable.". Namn på batchkön i vilken jobbet exekveras. ".$str_que."
    CPU:er
    ".$str_cpu."
    JOBBINFORMATION:
    Attribut
    ".$clickable.". Jobbattributnamn".$str_att."
    Värde
    ".$str_val."
    ", "Jobbnamn" => 0, "Ägare" => 0, "Status" => 0, "CPU (min)" => 0, "Kö" => 0, "CPU:er" => 0 ), "volist" => array("0" => "Virtuella organisationer", "help" => "
    Virtuell organisation
    ".$clickable.". Användargrupp, delar oftast gemensamma activiteter och resurser, autoriserad på åtminstone ett ARC-kluster. Klicka på namnet för att få en lista med gruppmedlemmar.
    Medlemmar
    Antal gruppmedlemmar.
    Server
    LDAP-server som huserar gruppmedlemsskapsdatabasen.
    ", "Virtuell organisation" => 0, "Medlemmar" => 0, "Server" => 0 ), "vousers" => array("0" => "Gridanvändarbas", "help" => "
    Namn
    ".$clickable.". ".$str_nam."
    Anknytning
    Användarens heminstitut inmatat av VO-managern. Kan vara tomt.
    E-post
    ".$clickable.". Användarens e-post inmatad av en VO-manager. Kan vara tomt. Klicka på adressen för att sända ett e-brev till användaren.
    ", "#" => 0, "Namn" => 0, "Anknytning" => 0, "E-post" => 0 ), "userlist" => array("0" => "Information för", "help" => "
    Kluster:kö
    ".$clickable.". Namn på kluster och dess respektive köer (separade med ett kolon, ":") där en användare är autoriserad att skicka in jobb. Om en avändare inte är autoriserad visas meddelendet "Not authorised at host ...". Klicka på ett klusternamn för att få en detaljerad klusterbeskrivning. Klicka på ett könamn föt att få en detaljerad köbeskrivning.
    Fria CPU:er
    Antal fria CPU:er tillgängliga i en given kö för denna användare vid detta tillfälle, ibland med en övre tidsgräns (i minuter) bifogad. T.ex. "3" betyder 3 CPU:er tillgängliga för ett jobb med obegränsad exekveringstid; "4:360" indikerar att det finns 4 CPU:er tillgängliga för jobb som inte är längre än 6 timmar; "10:180 30" betyder att det finns 10 CPU:er tillgängliga för jobb som inte övergår 3 timmar, plus 30 CPU:er tillgängliga för jobb av valfri längd; "0" betyder att det inte finns några CPU:er tillgängliga för tillfället, och att jobben kommer att placeras i kö.
    Köade jobb
    Antal användarens jobb som förväntas stå före ett nytt inskickat jobb (för denna användare) i en kö. "0" betyder att jobbet förväntas exekveras omedelbart. OBS! Detta är endast en uppskattning, som kan åsidosättas av lokala regler.
    Fri disk (MB)
    Diskutrymme gillgängligt för användaren i en given kö (i megabyte). OBS! Detta är endast en uppskattning, då de flesta kluster inte erbjuder fasta diskkvoter.
    Jobbnamn
    ".$clickable.". ".$str_job."
    Status
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Kluster
    ".$clickable.". Namn på det kluster på vilket jobbet exekvera(de)s. Klicka på ett klusternamn för att få detaljerad information om klustret.
    Kö
    ".$clickable.". Name på den batchkö i vilken jobbet exekvera(de)s. ".$str_que."
    CPU:er
    ".$str_cpu."
    ", "" => 0, "Jobbnamn" => 0, "Status" => 0, "CPU (min)" => 0, "Kluster" => 0, "Kö" => 0, "CPU:er" => 0 ), "attlist" => array("0" => "Attributvärden", "help" => "
    Objekt
    ".$clickable.". Namn på det objekt vars attribut visas. Det kan vara ett klusternamn, ett klusters könamn, ett jobbnamn, ett användarnamn etc. Klicka på namnet för att få en detaljerad beskrivning av objektet.
    Attribut
    För varje objekt, ett eller flera attributvärden kan listas. Kolumntiteln är det human-readable attributnamnet (förutom för några MDS-specifika attribut), och Kolumnens innehåll är attributvärden per objekt inmatade i informationssystemet.
    ", "Objekt" => 0, "Attribut" => 0 ), "quelist" => array("0" => "Kö", "help" => "
    Attribut
    ".$clickable.". Köattributnamn".$str_att."
    Värde
    ".$str_val."
    Jobbnamn
    ".$clickable.". ".$str_job."
    Ägare
    ".$clickable.". ".$str_nam."
    Status
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Minne (KB)
    ".$str_mem."
    CPU:er
    ".$str_cpu."
    ", "" => 0, "Jobbnamn" => 0, "Ägare" => 0, "Status" => 0, "CPU (min)" => 0, "Minne (KB)" => 0, "CPU:er" => 0 ), "sestat" => array("0" => "Lagringselement", "help" => "
    Alias
    Lagringselementalias specificerat i informationssytemet. Maximal visad längd är 15 tecken.
    Totalt utrymme
    Totalt diskutrymme, GB.
    Fritt utrymme
    Diskutrymme tillgängligt för tillfället, GB.
    Namn
    Lagringselementnamn, bestående av ett logiskt namn och värdnamn (separerade av ett kolon, ":"). Det logiska namnet används endast för informationssystemsyften, för att särskilja olika lagringselement som huserar på samma maskin.
    Bas-URL
    Lagringselementats URL, oftast ett gsiftp:// protocol. Använd denna URL som bas för att komma åt filer.
    Typ
    Lagringselementtyp. "gridftp-based" indikerar disklagring med gridftp-gränssnitt.
    ", "#" => 0, "Alias" => 0, // "Totalt utrymme" => 0, "Fritt/totalt utrymme, GB" => 0, "Namn" => 0, "Bas-URL" => 0, "Typ" => 0 ), "allusers" => array("0" => "Autoriserade gridanvändare:Aktiva gridanvändare", "help" => "
    Namn
    ".$clickable.". ".$str_nam."
    Anknytning
    Användarens anknytning, härledd från det personliga certifikatet
    Jobb
    Totalt antal jobb från denna användarens i systemet (exekveras, avvaktande, avslutade eller borttagna)
    Kluster
    Visar hur många kluster som autoriserar denna användare
    ", "#" => 0, "Namn" => 0, "Anknytning" => 0, "Jobb" => 0, "Kluster" => 0 ), "userres" => array("0" => "", "Kluster:kö" => 0, "Fria CPU:er" => 0, "Köade jobb" => 0, "Fri disk (MB)" => 0 ), "ldapdump" => array("0" => "", "Attribut" => 0, "Värde" => 0 ), // IS attributes "mdsattr" => array( "objectClass" => "objectClass", "Mds-validfrom" => "Info giltig fr.o.m. (GMT)", "Mds-validto" => "Info giltig t.o.m. (GMT)" ), "isattr" => array( "objectClass" => "objectClass", "Mds-validfrom" => "Info giltig fr.o.m. (GMT)", "Mds-validto" => "Info giltig t.o.m. (GMT)" "nordugrid-cluster-name" => "Front-end domännamn", "nordugrid-cluster-aliasname" => "Klusteralias", "nordugrid-cluster-contactstring" => "Kontaktsträng", "nordugrid-cluster-interactive-contactstring" => "Interaktiv kontakt", "nordugrid-cluster-comment" => "Kommentar", "nordugrid-cluster-support" => "E-postkontakt", "nordugrid-cluster-acl" => "Auktoriserade VO:er", "nordugrid-cluster-lrms-type" => "LRMS-typ", "nordugrid-cluster-lrms-version" => "LRMS-version", "nordugrid-cluster-lrms-config" => "LRMS-detaljer", "nordugrid-cluster-architecture" => "Arkitektur", "nordugrid-cluster-opsys" => "Operativsystem", "nordugrid-cluster-homogeneity" => "Homogent kluster", "nordugrid-cluster-nodecpu" => "CPU-typ (lÃ¥ngsammast)", "nordugrid-cluster-nodememory" => "Minne (MB, minsta)", "nordugrid-cluster-totalcpus" => "CPU:er, totalt", "nordugrid-cluster-cpudistribution" => "CPU:er, per maskin", "nordugrid-cluster-benchmark" => "Benchmark", "nordugrid-cluster-sessiondir-free" => "Diskutrymme, tillgängligt (MB)", "nordugrid-cluster-sessiondir-total" => "Diskutrymme, totalt (MB)", "nordugrid-cluster-sessiondir-lifetime"=> "Gridsessionens livstid (min)", "nordugrid-cluster-cache-free" => "Cachestorlek, tillgängligt (MB)", "nordugrid-cluster-cache-total" => "Cachestorlek, totalt (MB)", "nordugrid-cluster-runtimeenvironment" => "Runtime-miljö", "nordugrid-cluster-localse" => "Lagringselement, lokalt", "nordugrid-cluster-middleware" => "Grid-middleware", "nordugrid-cluster-totaljobs" => "Jobb, totalt antal", "nordugrid-cluster-usedcpus" => "CPU:er, upptagna", "nordugrid-cluster-queuedjobs" => "Jobb, köade", "nordugrid-cluster-prelrmsqueued" => "Gridjobb, köade före LRMS", "nordugrid-cluster-location" => "Postnummer", "nordugrid-cluster-owner" => "Ägare", "nordugrid-cluster-issuerca" => "Certifikatutfärdare", "nordugrid-cluster-issuerca-hash" => "Certifikatutfärdares hashsumma", "nordugrid-cluster-trustedca" => "Betrodd certifikatutfärdare", "nordugrid-cluster-nodeaccess" => "Nod-IP-uppkoppling", "nordugrid-cluster-gridarea" => "Sessionsarea (FÖRLEGAD)", "nordugrid-cluster-gridspace" => "Griddiskutrymme (FÖRLEGAD)", "nordugrid-cluster-opsysdistribution" => "OS-distribution (FÖRLEGAD)", "nordugrid-cluster-runningjobs" => "Jobb, exekveras (FÖRLEGAD)", "nordugrid-cluster-credentialexpirationtime" => "Certifikat-förfallotid", "nordugrid-queue-name" => "Könamn", "nordugrid-queue-comment" => "Kommentar", "nordugrid-queue-status" => "Köstatus", "nordugrid-queue-running" => "CPU:er, upptagna", "nordugrid-queue-localqueued" => "Lokala jobb, köade", "nordugrid-queue-prelrmsqueued" => "Grid jobb, köade före LRMS", "nordugrid-queue-queued" => "Jobb, köade (FÖRLEGAD)", "nordugrid-queue-maxrunning" => "Jobb, exekveras (max)", "nordugrid-queue-maxqueuable" => "Jobb, köbara (max)", "nordugrid-queue-maxuserrun" => "Jobb per unixanvändare (max)", "nordugrid-queue-maxcputime" => "CPU-tid, max. (minuter)", "nordugrid-queue-mincputime" => "CPU-tid, min. (minuter)", "nordugrid-queue-defaultcputime" => "CPU-tid, förvald (minuter)", "nordugrid-queue-maxwalltime" => "Klocktid, max. (minuter)", "nordugrid-queue-minwalltime" => "Klocktid, min. (minuter)", "nordugrid-queue-defaultwalltime" => "Klocktid, förvald (minuter)", "nordugrid-queue-schedulingpolicy" => "Scheduleringspolicy", "nordugrid-queue-totalcpus" => "CPU:er, totalt", "nordugrid-queue-nodecpu" => "CPU-typ", "nordugrid-queue-nodememory" => "Minne (MB)", "nordugrid-queue-architecture" => "Arkitektur", "nordugrid-queue-opsys" => "Operativsystem", "nordugrid-queue-homogeneity" => "Homogen kö", "nordugrid-queue-gridrunning" => "CPU:er, upptagna av gridjobb", "nordugrid-queue-gridqueued" => "Gridjobb, köade", "nordugrid-queue-benchmark" => "Benchmark", "nordugrid-queue-assignedcpunumber" => "CPU:er per kö (FÖRLEGAD)", "nordugrid-queue-assignedcputype" => "CPU-typ (FÖRLEGAD)", "nordugrid-job-globalid" => "ID", "nordugrid-job-globalowner" => "Ägare", "nordugrid-job-execcluster" => "Exekveringskluster", "nordugrid-job-execqueue" => "Exekveringskö", "nordugrid-job-stdout" => "Standard output-fil", "nordugrid-job-stderr" => "Standard error-fil", "nordugrid-job-stdin" => "Standard input-fil", "nordugrid-job-reqcputime" => "Begärd CPU-tid", "nordugrid-job-reqwalltime" => "Begärd klocktid", "nordugrid-job-status" => "Status", "nordugrid-job-queuerank" => "Plats i kön", "nordugrid-job-comment" => "LRMS-kommentar", "nordugrid-job-submissionui" => "Inskickningsmaskin", "nordugrid-job-submissiontime" => "Inskickningstid (GMT)", "nordugrid-job-usedcputime" => "Använd CPU-tid", "nordugrid-job-usedwalltime" => "Använd klocktid", "nordugrid-job-completiontime" => "Avslutningstid (GMT)", "nordugrid-job-sessiondirerasetime" => "Raderingstid (GMT)", "nordugrid-job-proxyexpirationtime" => "Proxyförfallotid (GMT)", "nordugrid-job-usedmem" => "Använt minne (KB)", "nordugrid-job-errors" => "Fel", "nordugrid-job-exitcode" => "Returkod", "nordugrid-job-jobname" => "Namn", "nordugrid-job-runtimeenvironment" => "Runtimemiljö", "nordugrid-job-cpucount" => "Begärda CPU:er", "nordugrid-job-executionnodes" => "Exekveringsnoder", "nordugrid-job-gmlog" => "GM loggfil", "nordugrid-job-clientsoftware" => "Klientversion", "nordugrid-job-rerunable" => "Omkörbart", "nordugrid-job-reqcput" => "Begärd tid (FÖRLEGAD)", "nordugrid-job-gridlog" => "Gridloggfil (FÖRLEGAD)", "nordugrid-job-lrmscomment" => "LRMS-kommentar (FÖRLEGAD)", "nordugrid-authuser-name" => "Namn", "nordugrid-authuser-sn" => "Subjektnamn", "nordugrid-authuser-freecpus" => "Fria CPU:er", "nordugrid-authuser-diskspace" => "Fritt diskutrymme (MB)", "nordugrid-authuser-queuelength" => "Användarens kölängd", "nordugrid-se-name" => "Namn", "nordugrid-se-aliasname" => "Lagringselementalias", "nordugrid-se-type" => "Lagringselementtyp", "nordugrid-se-acl" => "Auktoriserade VO:er", "nordugrid-se-freespace" => "Fritt utrymme (MB)", "nordugrid-se-totalspace" => "Totalt utrymme (MB)", "nordugrid-se-url" => "Kontakt-URL", "nordugrid-se-baseurl" => "Kontakt-URL (FÖRLEGAD)", "nordugrid-se-accesscontrol" => "Access kontroll", "nordugrid-se-authuser" => "Auktoriserad användare (DN)", "nordugrid-se-location" => "Postnummer", "nordugrid-se-owner" => "Ägare", "nordugrid-se-middleware" => "Grid-middleware", "nordugrid-se-issuerca" => "Certifikatutfärdare", "nordugrid-se-issuerca-hash" => "Certifikatutfärdares hashsumma", "nordugrid-se-trustedca" => "Betrodd certifikatutfärdare", "nordugrid-se-comment" => "Kommentar", "nordugrid-rc-name" => "Domännamn", "nordugrid-rc-aliasname" => "Replica Catalog-alias", "nordugrid-rc-baseurl" => "Kontakt-URL", "nordugrid-rc-authuser" => "Auktoriserad användare (DN)", "nordugrid-rc-location" => "Postnummer", "nordugrid-rc-owner" => "Ägare", "nordugrid-rc-issuerca" => "Certifikatutfärdare" ), // Errors, warnings etc "errors" => array( // failure notices "1" => "Kan inte läsa topp-nivÃ¥-indexservrarna", "2" => "Ingen av de lokala indexservrarna returnerade uppkopplingen", "3" => " dÃ¥lig konfigurering eller begäran drog över tiden", "4" => "Inga gridjobb funna", "5" => "Ingen information funnen", "6" => "Server otillgänglig", "7" => " - ladda om senare", "8" => "Ingen köinformation funnen", "9" => "Inga poster funna", "10" => "Inga användare funna", "11" => "Inte autoriserad pÃ¥ värden", "12" => "svarar inte", "13" => "Inga nya jobb funna för ", // debug messages "101" => " Monitor-time-out för GRIS: ", "102" => " sek för uppkoppling och ", "103" => " sek för sökning", "104" => " sek använda för sökning", "105" => "Visar resurser endast i ", "106" => "FrÃ¥gade topp-nivÃ¥-indexservrar: ", "107" => "Fick geografiska data, skannade kluster: ", "108" => " kluster sorterade efter geografiska data", "109" => "Sökning efter klusterattribut", "110" => "Sökning efter köattribut", "111" => "Inga data frÃ¥n ", "112" => " är uppe i ", "113" => " har inga resurser att erbjuda", "114" => " Monitor-time-out för GIIS: ", "115" => "Hoppar över GRIS: ", "116" => "inte en ", "117" => "Verifierar uppkoppling: ", "118" => "OK", "119" => "Hittills, detekterade resurser av slag ", "120" => "LDAP-fel vid sökning efter ", "121" => " status vid ", "122" => "Svartlistad: ", "123" => "Registrant funnen för ", "124" => "Sökning efter lagringselementattribut", "125" => "Sökning efter användare", "126" => "Sökning efter jobb", "127" => " har jobb ", "128" => " utan att vara auktoriserad", "129" => "Kan inte hämta objektdata: fel ", "130" => " Monitor-timeout för EMIR: ", "131" => " Monitor-timeout för ARCHERY beror pÃ¥ operativsystemets DNS-uppslagningsinställningar (Vi litar pÃ¥ DNS-cache!)", "132" => "Misslyckades med att frÃ¥ga följande ARCHERY-ändpunkt: ", "133" => "NÃ¥dde rekursiv-loop-gräns medan ARCHERY-ändpunkt tillfrÃ¥gades: ", // icon titles "301" => "Ladda om", "302" => "Skriv ut", "303" => "Hjälp", "304" => "Stäng", "305" => "Röd", "306" => "GrÃ¥", "307" => "Alla användare", "308" => "Aktiva användare", "309" => "Sök", "310" => "Lagring", "311" => "VO:ar", "312" => "Flagga för ", "313" => " Gridprocesser and ", "314" => " lokala processer", // auxilliary strings "401" => "Processer", "402" => "Grid", "403" => "Lokala", "404" => "Världen", "405" => "TOTALT", "406" => " kluster", "407" => "en massa", "408" => " GB", "409" => " ALLA", "410" => "Kluster", "411" => "Kö", "412" => "Jobb", "413" => "Användare", "414" => "Lagring", "415" => "Replikakatalog", "416" => "Definera attribut att visa för objektet: ", "417" => "logiskt OCH av alla uttryck kommer att hittas", "418" => "Lämna det högra fältet tomt för att visa allt", "419" => "Visa resurser eller objekt enligt ditt val", "420" => "Särskijlande namn", "421" => "Kan använda totalt ", "422" => " kluster", "423" => "Resurs / objekt:", "424" => "Antal attribut (förval 6):", "425" => "Objekt", "426" => "Nästa", "427" => "Välj ett", "428" => "Ã…terställ", "429" => "VISA" ), // Post code conversion "tlconvert" => array ( "Australia" => "Australien", "Austria" => "Österrike", "Armenia" => "Armenien", "Algeria" => "Algeriet", "Belgium" => "Belgien", "Bulgaria" => "Bulgarien", "Canada" => "Canada", "Chile" => "Chile", "China" => "Kina", "Czechia" => "Tjeckien", "Denmark" => "Danmark", "Estonia" => "Estland", "Finland" => "Finland", "France" => "Frankrike", "Georgia" => "Georgien", "Germany" => "Tyskland", "Greece" => "Grekland", "HongKong" => "Hong Kong", "Hungary" => "Ungern", "Iceland" => "Island", "Ireland" => "Irland", "Italy" => "Italien", "Japan" => "Japan", "Latvia" => "Lettland", "Lithuania" => "Litauen", "Morocco" => "Marocko", "Netherlands" => "Nederländerna", "Norway" => "Norge", "Poland" => "Polen", "Portugal" => "Portugal", "Romania" => "Rumänien", "Russia" => "Ryssland", "SriLanka" => "Sri Lanka", "Sweden" => "Sverige", "Slovakia" => "Slovakien", "Slovenia" => "Slovenien", "Spain" => "Spanien", "Switzerland" => "Schweiz", "Taiwan" => "Taiwan", "Turkey" => "Turkiet", "UK" => "Storbritannien", "Ukraine" => "Ukraina", "USA" => "USA", "World" => "Världen" ) ); ?> nordugrid-arc-6.14.0/src/services/monitor/lang/PaxHeaders.30264/hu.inc0000644000000000000000000000013014152153376023473 xustar000000000000000028 mtime=1638455038.4406465 30 atime=1638455038.513647597 30 ctime=1638455100.615580707 nordugrid-arc-6.14.0/src/services/monitor/lang/hu.inc0000644000175000002070000014220214152153376023463 0ustar00mockbuildmock00000000000000 N/A:  ez utóbbi azt jelenti, hogy a felhasználó nem adott meg neki nevet.
    X:  ez azt jelenti, hogy a job-ot a tulajdonosa "megölte"
    !:  ez azt jelenti, hogy a job futása közben hiba lépett fel a rendszerben
    Kattintson a névre, hogy bővebb információt kapjon a job-ról."; $str_nam = "A felhasználó neve, ez van megadva a személyes tanusítványban. Kattintson a névre azért, hogy megkapja a felhasználó összes elérhető erőforrásoknak a listáját és a hozzá tartozó összes job-ot, ami a rendszerben éppen jelen van."; $str_sta = "A job állapota, amit a Grid Menedzser (GM) és az LRMS ad vissza. Szekvenciális sorrendben az állapotok a következők:
    ACCEPTED – a job elküldve, de még nincs feldolgozás alatt
    PREPARING – bemeneti állományok kinyerése.
    SUBMITTING – az interrakció az LRMS-el folyamatban
    INLRMS – a job átküldve az LRMS-nek; a belsÅ‘ állapot hozzá lett adva az információs rendszer segítségével. A lehetséges állapotok:
    Q – a job a várakozósorban van
    U – a job egy felfüggesztett állapotban van egy elfoglalt csomópontban (PBSPro)
    S – a job egy felfüggesztett állapotban van (Condor)
    R – a job fut
    E – a job véget ért (PBS)
    FINISHING – a kimeneti fájlok átvitelre megtötént a GM segítségével
    FINISHED – a job véget ért; az idÅ‘ pecsétet hozzá adta az információs rendszer
    CANCELING – a job érvénytelenítve lett
    DELETED – a job nem lett kitörölve a felhasználó kérésére, viszont a GM eltávolította, mert lejárt a határideje
    Minden állapotot jelenteni lehet a PENDING állapottal, ez azt jelenti a GM számára, hogy a job-ot a következő állapotba próbálja meg átbillenteni"; $str_tim = "A job által lefoglalt CPU idő (perc)."; $str_mem = "A job által lefoglalt memória (KB)."; $str_cpu = "Azon processzorok száma, amit a job használ."; // Actual messages $message = array ( // Table headers and help (do not localize "0" and "help" keys) // For "help", keywords in
    must correspond to column titles below the help text ) "loadmon" => array( "0" => "Grid Monitorozó", "help" => "
    Ez a képernyő mutatja meg az összes site regisztrációt, ami a legfelső ARC indexelő szolgáltatás esetén előfordul, először az ország, majd a hoszt név szerint van rendezve a lista. A következő site paraméterek vannak monitorozva: klaszter alias, teljes CPU kapacitás és a futó ill. várakozósoros jobok száma (Grid-es és helyi együttesen). Használja a "Keresés" segédeszközt, ha szeretne megosztani egyéb klaszter, várakozósor, job stb. jellemzőt.
    Ország
    ".$clickable.". Ország zászló és név, ez az elérhető leírásokból száramzik. Kattintson ide, hogy csak ennek az országnak az információit lássa.
    Klaszter
    ".$clickable.". A klaszter álnevét a tulajdonos jelöli ki. Maximum 22 karakter hosszúságú lehet. Kattintson az aliasre, hogy részletesebb információt kapjon a klaszterről.
    CPU-k
    A klaszterben lévő összes CPU száma. Csak ezek töredékét tudják éppen elérni a grid-es felhasználók.
    Betöltés (feldolgoz:Grid+helyi)
    ".$clickable.". Relatív klaszter betöltés, megfelelően a foglalt CPU-k számához. A szürke sáv azt mutatja, hogy a processzorokat helyileg elküldött job-ok foglalják le, a piros sáv pedig azt, hogy a CPU-kat a grid-ről küldött job-ok foglalják le. Kattintson a sávra, hogy részletes információt kapjon a klaszteren futó Grid-es job-okról, ebben benne foglaltatik az is, hogy egy job-hoz hány darab processzor tartozik.
    Várakozólistában
    ".$clickable.". A klaszterben lévő összes várakozósoros job száma, megmutatja a várakozósoros grid-es job-okat plusz a helyileg elküldött várakozósoros job-okat. Kattintson az első számra ahhoz, hogy a klaszterben lévő várakozósoros grid-es jobok listáját megkapja.
    ", "Ország" => 30, "Site" => 160, "CPU-k" => 10, "Betöltés (feldolgoz: Grid+helyi)" => 210, "Várólistán" => 10 ), "clusdes" => array("0" => "Erőforrás részletek a következőkről:", "help" => "
    Attribútum
    ".$clickable.". Klaszter attribútum név".$str_att."
    Érték
    ".$str_val."
    Várakozósor
    ".$clickable.". Azon kötegelt várakozósoroknak a nevei, amik az ARC felhasználók számára elérhetőek, ezt a klaszter tulajdonosa állítja be. ".$str_que."
    Ãllapot
    A várakozósor állapota. A működő lista tipikusan aktív állapotot jelez.
    CPU (min)
    Idő korlát a várakozósoronkénti job-ok időtartamára, ha meg van adva, akkor az CPU percben értendő. Az első megjelenő érték az alsó korlát, a második a felső korlát. Ha a korlátok nincsenek beállítva (a job-ok bármikor elfogadásra kerülnek), ez így van jelölve: N/A .
    Futás
    Azon job-ok száma, amik a várakozósorban futnak. Az összes job számát megmutatja, a processzorok számával illetve a zárójelben jelzett grid-feladatokkal együtt, e.g. (Grid: 12). Párhuzamos, többprocesszoros feladatok esetén a zárójelek közötti szám nagyobb is lehet, mint a feladatok száma
    Várakozólistán
    Azon job-ok száma, melyek a várakozósorban a futtatásra várnak. Az összes job száma látható, a zárójelben jelzett grid-feladatokkal együtt, például (Grid: 235)
    ", "Várakozósor" => 0, "LRMS várakozósor" => 0, "Ãllapot" => 0, "Korlátok (min)" => 0, "CPU-k" => 0, "Futás" => 0, "Várólistán" => 0 ), "jobstat" => array("0" => "Job helye:Job ID", "help" => "
    JOB LISTA
    Job név
    ".$clickable.". A job neve, amit a tulajdonos jelöl ki. Ha nincsen név kijelölve, akkor a következőt látjuk: "N/A". Kattintson a névre, hogy megkapja a job részletes leírását.
    Tulajdonos
    ".$clickable.". ".$str_nam."
    Ãllapot
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Várakozósor
    ".$clickable.". A kötegelt várakozósor neve, amiben a job végrehajtódott. ".$str_que."
    CPU-k
    ".$str_cpu."
    JOB RÉSZLETEK
    Attribútum
    ".$clickable.". Job attribútum név".$str_att."
    Érték
    ".$str_val."
    ", "Job név" => 0, "Tulajdonos" => 0, "Ãllapot" => 0, "CPU (min)" => 0, "Várakozósor" => 0, "CPU-k" => 0 ), "volist" => array("0" => "Virtuális szervezetek", "help" => "
    Virtuális szervezet
    ".$clickable.". Felhasználók csoportja, tipikusan közös feladatokat és erőforrásokat osztanak meg egymással, az engedélyezés legalább egy ARC-os site-on megtörténik. Kattintson a névre, hogy megkapja a csoport tajainak a listáját.
    Tagok
    A csoport tagjainak a száma.
    Kiszolgáló
    LDAP szerver, ami támogatja a csoportos tagsági adatbázist.
    ", "Virtuális szervezet" => 0, "Tagok" => 0, "Kiszolgáló" => 0 ), "vousers" => array("0" => "Grid User Base", "help" => "
    Név
    ".$clickable.". ".$str_nam."
    Kapcsolatok
    A felhasználók saját intézménye, amit egy VO menedzser szolgáltat. üres is lehet.
    E-mail
    ".$clickable.". Felhasználók E-mail címe, amit egy VO menedzser ad meg. üres is lehet. Kattintson a címre, hogy levelet küldhessen a felhasználó E-mail címére.
    ", "#" => 0, "Név" => 0, "Kapcsolatok" => 0, "E-mail" => 0 ), "userlist" => array("0" => "Információszerzés", "help" => "
    Klaszter:várakozósor
    ".$clickable.". A klaszterek nevei és a megfelelő várakozósorok(oszlopokkal elválasztva, ":"), ahol a felhasználó job küldésekre jogosult. Ha a felhasználó nem jogosult, akkor a következő üzenet fog megjelenni: "Nincs megfelelő jogosultsága ennél a hosztnál". Kattintson a klaszter nevére, hogy egy részletesebb leírást kapjon a klaszterről. Kattintson a várakozósor nevére, hogy egy részletesebb leírást kapjon a várakozósorról.
    Szabad CPU-k
    Ebben a pillanatban a felhasználó számára elérhető szabad CPU-k száma, az adott várakozósorban, feltételesen ki van egészítve a felső idő korláttal (ez percben értendő). Például a "3" azt jelenti, hogy 3 CPU használható fel a job számára korlátlan ideig; "4:360" ez azt mutatja, hogy 4 CPU-t tud felhasználni a job, de csak 6 órán keresztül; "10:180 30" ez azt jelenti, hogy 10 CPU áll a job-ok rendelkezésére 3 órán keresztül, és ezen kívül 30 CPU van még pluszba korlátlan időre; "0" ez azt jelenti, hogy nem áll rendelkezésre CPU, ebben a pillanatban, és ekkor a job-ok várakozólistára kerülnek.
    Várakosósorban elhelyezett job-ok
    A felhasználó azon job-jainak a száma, amiknek várhatóan várakoznia kell egy újonnan elküldött job előtt a várakozási sorban. A "0" száma azt jelenti, hogy a job remélhetőleg azonnal lefuthat. Ez csupán egy becslés, amit a helyi irányelvek felülbírálhatnak.
    Szabad lemez terület (MB)
    A felhasználó számára elérhető szabad lemezterület egy adott várakozósorban (MB). Ez csupán egy értékelés, a legtöbb klaszter nem nyújt fix lemez kvótákat.
    Job név
    ".$clickable.". ".$str_job."
    Ãllapot
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Klaszter
    ".$clickable.". A klaszter neve, amelyben a feladat éppen fut. Kattintson a klaszter nevére, hogy egy részletesebb leírást kapjon.
    Várakozósor
    ".$clickable.". Azon várakozósor neve, amiben a job lefuttot, vagy le fog futni. ".$str_que."
    CPU-k
    ".$str_cpu."
    ", "" => 0, "Job név" => 0, "Ãllapot" => 0, "CPU (min)" => 0, "Klaszter" => 0, "Várakozósor" => 0, "CPU-k" => 0 ), "attlist" => array("0" => "Attribútum értékek", "help" => "
    Objektum
    ".$clickable.". Az objektumok neve, ezek lesznek megjelenítve. Ez lehet klaszter név, egy klaszter várakozósorának a neve, egy job név, egy felhasználói név stb. Kattintson a szövegre, hogy egy részletesebb leírást kapjon az objektumról.
    Attribútumok
    Minden objektum számára egy vagy több attribútum értéket lehet kilistáztatni. Az oszlop címe egy emberi olvasásra szánt név (kivéve néhány MDS specifikus attribútumot), az oszlopok attribútum értékeket tartalmaznak az adott objektumról, ahogy az az információs rendszerbe be lett írva .
    ", "Objektum" => 0, "Attribútum" => 0 ), "quelist" => array("0" => "Várakozósor", "help" => "
    Attribútum
    ".$clickable.". Egy várakozósor attribútumának a neve".$str_att."
    Érték
    ".$str_val."
    Job név
    ".$clickable.". ".$str_job."
    Tulajdonos
    ".$clickable.". ".$str_nam."
    Ãllapot
    ".$str_sta."
    CPU (min)
    ".$str_tim."
    Memória (KB)
    ".$str_mem."
    CPU-k
    ".$str_cpu."
    ", "" => 0, "Job név" => 0, "Tulajdonos" => 0, "Ãllapot" => 0, "CPU (min)" => 0, "Memória (KB)" => 0, "CPU-k" => 0 ), "sestat" => array("0" => "Adattároló elemek", "help" => "
    Alias
    Az adattároló elem álneve az információs rendszerben van meghatározva. Maximális megjeleníthető hosszúság: 15 karakter
    Összes lemezterület
    Összes lemezterület (GB).
    Szabad terület
    Pillanatnyilag ennyi szabad terület van, (GB)
    Név
    Adattároló elem neve, egy logikai névből és egy hoszt névből áll (egy oszloppal van elválasztva, ":"). A logikai nevet az információs rendszer használja azért, hogy megkülönböztesse a különbözö adattároló elemeket ugyanazon a gépen.
    Alap URL
    Az adattároló elem URL-je tipikusan egy gsiftp:// protokoll. Ezt használja alapból ahhoz, hogy elérje a fájlokat.
    Típus
    Az adattároló elem típusa. "gridftp-based" jelzi a GridFTP interfészen keresztül az adattároló lemezt.
    ", "#" => 0, "Alias" => 0, "Szabad/összes hely, GB" => 0, "Név" => 0, "Alap URL" => 0, "Típus" => 0 ), "allusers" => array("0" => "Megbízható Grid felhasználók:Aktív Grid felhasználók", "help" => "
    Név
    ".$clickable.". ".$str_nam."
    Kapcsolat
    A felhasználó kapcsolódás-rendszere, amely a személyes tanúsítványából származik.
    Job-ok
    Az összes felhasználó rendszerben lévő job-jainak a száma (futás, függőben lévő, befejezett vagy törölt)
    Site-ok
    Megmutatja, hogy hány darab site engedélyezi ezt a felhasználót
    ", "#" => 0, "Név" => 0, "Kapcsolatok" => 0, "Job-ok" => 0, "Site-ok" => 0 ), "userres" => array("0" => "", "Klaszter:várakozósor" => 0, "Szabad CPU-k" => 0, "Várakozólistára helyezett job-ok" => 0, "Szabad lemezterület (MB)" => 0 ), "ldapdump" => array("0" => "", "Attribútum" => 0, "Érték" => 0 ), // IS attributes "mdsattr" => array( "objectClass" => "objectClass", "Mds-validfrom" => "érvényes információ tÅ‘le (GMT)", "Mds-validto" => "érvényes információ neki (GMT)" ), "isattr" => array( "nordugrid-cluster-name" => "Front-end tartomány név", "nordugrid-cluster-aliasname" => "Klaszter alias", "nordugrid-cluster-contactstring" => "ElérhetÅ‘ségi szöveg", "nordugrid-cluster-interactive-contactstring" => "Interaktív elérhetÅ‘ség", "nordugrid-cluster-comment" => "Megjegyzés", "nordugrid-cluster-support" => "E-mail cím", "nordugrid-cluster-acl" => "Engedélyezett VO-k", "nordugrid-cluster-lrms-type" => "LRMS típus", "nordugrid-cluster-lrms-version" => "LRMS verzió", "nordugrid-cluster-lrms-config" => "LRMS részletek", "nordugrid-cluster-architecture" => "Architektúra", "nordugrid-cluster-opsys" => "Operációs rendszer", "nordugrid-cluster-homogeneity" => "Homogén klaszter", "nordugrid-cluster-nodecpu" => "CPU típus (leglassabb)", "nordugrid-cluster-nodememory" => "Memória (MB, legkisebb)", "nordugrid-cluster-totalcpus" => "CPU-k, összesen", "nordugrid-cluster-cpudistribution" => "CPU:gépek", "nordugrid-cluster-benchmark" => "Teljesítmény értékelés", "nordugrid-cluster-sessiondir-free" => "Lemez terület, elérhetÅ‘ (MB)", "nordugrid-cluster-sessiondir-total" => "Lemez terület, összesen (MB)", "nordugrid-cluster-sessiondir-lifetime"=> "Grid session élettartam (min)", "nordugrid-cluster-cache-free" => "Cache méret, elérhetÅ‘ (MB)", "nordugrid-cluster-cache-total" => "Cache méret, összesen (MB)", "nordugrid-cluster-runtimeenvironment" => "Futásidejű környezet", "nordugrid-cluster-localse" => "Adattárolási elem, helyi", "nordugrid-cluster-middleware" => "Grid köztesréteg", "nordugrid-cluster-totaljobs" => "Job-ok, teljes összeg", "nordugrid-cluster-usedcpus" => "CPU-k, foglalt", "nordugrid-cluster-queuedjobs" => "Job-ok, várakozólistás", "nordugrid-cluster-prelrmsqueued" => "Grid job-ok, várakozó részfeladat", "nordugrid-cluster-location" => "Irányítószám", "nordugrid-cluster-owner" => "Tulajdonos", "nordugrid-cluster-issuerca" => "Tanúsítvány kibocsájtó", "nordugrid-cluster-issuerca-hash" => "Tanúsítvány kibocsájtó-s hash", "nordugrid-cluster-trustedca" => "Megbízható tanúsítvány kibocsájtók", "nordugrid-cluster-nodeaccess" => "Csomópont IP összekapcsolhatóság", "nordugrid-cluster-gridarea" => "Session terület (ELAVULT)", "nordugrid-cluster-gridspace" => "Grid lemez terület (ELAVULT)", "nordugrid-cluster-opsysdistribution" => "OS disztribúció (ELAVULT)", "nordugrid-cluster-runningjobs" => "Job-ok, futás (ELAVULT)", "nordugrid-cluster-credentialexpirationtime" => "Credential expiration time", "nordugrid-queue-name" => "A várakozósor neve", "nordugrid-queue-comment" => "Megjegyzés", "nordugrid-queue-status" => "A várakozósor állapota", "nordugrid-queue-running" => "CPU-k, foglalt", "nordugrid-queue-localqueued" => "Helyi job-ok, várakozólistás", "nordugrid-queue-prelrmsqueued" => "Grid job-ok, várakozó részfeladat", "nordugrid-queue-queued" => "Job-ok, várakozólistás (ELAVULT)", "nordugrid-queue-maxrunning" => "Job-ok, futás(max)", "nordugrid-queue-maxqueuable" => "Job-ok, várakozólistába tehetÅ‘ (max)", "nordugrid-queue-maxuserrun" => "Unix felhasználókénti job-ok (max)", "nordugrid-queue-maxcputime" => "CPU idÅ‘, max. (perc)", "nordugrid-queue-mincputime" => "CPU idÅ‘, min. (perc)", "nordugrid-queue-defaultcputime" => "CPU idÅ‘, alap. (perc)", "nordugrid-queue-maxwalltime" => ""Wall clock" idÅ‘, max. (perc)", "nordugrid-queue-minwalltime" => ""Wall clock" idÅ‘, min. (perc)", "nordugrid-queue-defaultwalltime" => ""Wall clock" idÅ‘, alap. (perc)", "nordugrid-queue-schedulingpolicy" => "Ütemezési politika", "nordugrid-queue-totalcpus" => "CPU-k, összesen", "nordugrid-queue-nodecpu" => "CPU típusa", "nordugrid-queue-nodememory" => "Memória (MB)", "nordugrid-queue-architecture" => "Architektúra", "nordugrid-queue-opsys" => "Operációs rendszer", "nordugrid-queue-homogeneity" => "Homogén várakozósor", "nordugrid-queue-gridrunning" => "CPUs occupied by Grid jobs", "nordugrid-queue-gridqueued" => "Grid job-ok, várakozólistás", "nordugrid-queue-benchmark" => "Teljesítmény értékelés", "nordugrid-queue-assignedcpunumber" => "Várakozósoronkénti CPU-k (ELAVULT)", "nordugrid-queue-assignedcputype" => "CPU típus (ELAVULT)", "nordugrid-job-globalid" => "ID", "nordugrid-job-globalowner" => "Tulajdonos", "nordugrid-job-execcluster" => "Végrehajtási klaszter", "nordugrid-job-execqueue" => "Végrehajtási várakozósor", "nordugrid-job-stdout" => "Szabványos kimeneti fájl", "nordugrid-job-stderr" => "Szabványos hiba fájl", "nordugrid-job-stdin" => "Szabványos bemeneti fájl", "nordugrid-job-reqcputime" => "Kért CPU idÅ‘", "nordugrid-job-reqwalltime" => "Kért $quot;wall clock$quot; idÅ‘", "nordugrid-job-status" => "Ãllapot", "nordugrid-job-queuerank" => "A városkozási sorban lévÅ‘ pozíciója", "nordugrid-job-comment" => "LRMS megjegyzés", "nordugrid-job-submissionui" => "Submission machine", "nordugrid-job-submissiontime" => "Részfeladat idÅ‘ (GMT)", "nordugrid-job-usedcputime" => "Felhasznált CPU idÅ‘", "nordugrid-job-usedwalltime" => "Felhasznált "wall clock" idÅ‘", "nordugrid-job-completiontime" => "Elkészítési idÅ‘ (GMT)", "nordugrid-job-sessiondirerasetime" => "Törlési idÅ‘ (GMT)", "nordugrid-job-proxyexpirationtime" => "Proxy lejárati idÅ‘ (GMT)", "nordugrid-job-usedmem" => "Felhasznált memória (KB)", "nordugrid-job-errors" => "Hibák", "nordugrid-job-exitcode" => "Kilépési kód", "nordugrid-job-jobname" => "Név", "nordugrid-job-runtimeenvironment" => "Futásidejű környezet", "nordugrid-job-cpucount" => "Kért CPU-k", "nordugrid-job-executionnodes" => "Végrehajtási csomópontok", "nordugrid-job-gmlog" => "GM napló fájl", "nordugrid-job-clientsoftware" => "Kliens verzió", "nordugrid-job-rerunable" => "újra futtatható", "nordugrid-job-reqcput" => "Kért idÅ‘ (ELAVULT)", "nordugrid-job-gridlog" => "Grid napló fájl (ELAVULT)", "nordugrid-job-lrmscomment" => "LRMS megjegyzés (ELAVULT)", "nordugrid-authuser-name" => "Név", "nordugrid-authuser-sn" => "Téma neve", "nordugrid-authuser-freecpus" => "Szabad CPU-k", "nordugrid-authuser-diskspace" => "Szabad hely (MB)", "nordugrid-authuser-queuelength" => "A felhasználó várakozósoros job-jai", "nordugrid-se-name" => "Név", "nordugrid-se-aliasname" => "Az adattároló elem álneve", "nordugrid-se-type" => "Az adattároló elem típusa", "nordugrid-se-acl" => "Engedélyezett VO-k", "nordugrid-se-freespace" => "Szabad hely (MB)", "nordugrid-se-totalspace" => "Összes lemezterület (MB)", "nordugrid-se-url" => "URL elérhetÅ‘ség", "nordugrid-se-baseurl" => "URL elérhetÅ‘ség (ELAVULT)", "nordugrid-se-accesscontrol" => "Hozzáférés ellenÅ‘rzése", "nordugrid-se-authuser" => "Engedélyezett felhasználó (DN)", "nordugrid-se-location" => "Irányítószám", "nordugrid-se-owner" => "Tulajdonos", "nordugrid-se-middleware" => "Köztesréteg", "nordugrid-se-issuerca" => "Tanúsítvány kibocsátó", "nordugrid-se-issuerca-hash" => "Tanúsítvány kibocsátó hash-e", "nordugrid-se-trustedca" => "Megbízható tanúsítvány kibocsájtók", "nordugrid-se-comment" => "Megjegyzés", "nordugrid-rc-name" => "Tartomány név", "nordugrid-rc-aliasname" => "Replika katalógus alias", "nordugrid-rc-baseurl" => "URL elérhetÅ‘ség", "nordugrid-rc-authuser" => "Engedélyezett felhasználó (DN)", "nordugrid-rc-location" => "Irányítószám", "nordugrid-rc-owner" => "Tulajdonos", "nordugrid-rc-issuerca" => "Tanúsítvány kibocsátó" ), // Errors, warnings etc "errors" => array( // failure notices "1" => "Nem tudom olvasni a top level GIS és EMIR szerverek index szolgáltatásait", "2" => "Egyik helyi index sem jelzett vissza kapcsolatot", "3" => " rossz konfiguráció, vagy kérési idÅ‘túllépés", "4" => "Grid-es job nem található", "5" => "Nincs információ", "6" => "A szervert nem lehet elérni", "7" => " - frissítés késÅ‘bb", "8" => "Nincs információ a várakozási sorról", "9" => "Nem található bejegyzés", "10" => "Nincs felhasználó", "11" => "Nincs megfelelÅ‘ jogosultsága ennél a hosztnál ", "12" => "nincs válasz", "13" => "Nincsenek nemrégi feladatok ", // debug messages "101" => " Monitorozási idÅ‘túllépések a GRIS esetén: ", "102" => " mp kapcsolódáskor és ", "103" => " mp kereséskor", "104" => " mp (keresésre szánt idÅ‘)", "105" => "Az erÅ‘forrásokat csupán a következÅ‘ben mutatja meg ", "106" => "Lekérdezett felsÅ‘szintű indexek: ", "107" => "Kapott földrajzi helyek, átvizsgált site-ok: ", "108" => " site-ok intézése földrajzi helyek szerint", "109" => "Klaszter attribútumok keresése", "110" => "A várakozási sor attribútumainak a keresése", "111" => "Nincs adat errÅ‘l ", "112" => " működÅ‘képes ", "113" => " nincs erÅ‘forrása, amit felkínálhat", "114" => " Monitorozási idÅ‘túllépések a GIIS esetén: ", "115" => "GRIS kihagyása: ", "116" => "nem egy ", "117" => "Kapcsolat ellenÅ‘rzése: ", "118" => "OK", "119" => "Eddig, ebbÅ‘l a fajta erÅ‘forrásból ", "120" => "LDAP hiba keresése ", "121" => " állapot ", "122" => "Fekete listára került: ", "123" => "Regisztálót találtam a következÅ‘ számára ", "124" => "SE-s attribútumok keresése", "125" => "Felhasználók keresése", "126" => "Jobok keresése", "127" => " van job-ja ", "128" => " amíg nincsen engedélyezve ", "129" => "Nem lehet elérni az objektum adatait: hiba ", "130" => "Monitorozási idÅ‘túllépések az EMIR esetén: ", // icon titles "301" => "Frissítés", "302" => "Nyomtatás", "303" => "Súgó", "304" => "Bezár", "305" => "Piros", "306" => "Szürke", "307" => "Minden felhasználó", "308" => "Aktív felhasználók", "309" => "Keresés", "310" => "Adattároló", "311" => "VO-k", "312" => "Zászlaja ", "313" => " Grid-es feldolgozás és ", "314" => " helyi feldolgozás", // auxiliary strings "401" => "Feldolgoz", "402" => "Grid", "403" => "Helyi", "404" => "Világ", "405" => "TELJES", "406" => " site-ok", "407" => "rengeteg", "408" => " GB", "409" => " MIND", "410" => "Klaszter", "411" => "Várakozási sor", "412" => "Job", "413" => "Felhasználó", "414" => "Adattároló", "415" => "Replika katalógus", "416" => "Attribútumok megadása az objektum megjelenítése miatt: ", "417" => "Minden kifejezés összevetésre fog kerülni", "418" => "A jobb szélsÅ‘ mezÅ‘t hagyja üresen azért, hogy mindent lásson", "419" => "A kiválasztott erÅ‘források vagy objektumok megjelenítése", "420" => "MegkülönböztetÅ‘ név", "421" => "Összesen használni tud ", "422" => " site-ot", "423" => "ErÅ‘forrás / objektum:", "424" => "Attribútumok száma (alap.: 6):", "425" => "Objektum", "426" => "KövetkezÅ‘", "427" => "Válassz ki egyet", "428" => "Törlés", "429" => "Mutat" ), // Post code conversion "tlconvert" => array ( "Australia" => "Ausztrália", "Austria" => "Ausztria", "Armenia" => "Örményország", "Algeria" => "Algéria", "Belgium" => "Belgium", "Bulgaria" => "Bulgária", "Canada" => "Kanada", "China" => "Kína", "Czechia" => "Cseszlovákia", "Denmark" => "Dánia", "Estonia" => "észtország", "Finland" => "Finnország", "France" => "Franciaország", "Georgia" => "GrúÉzia", "Germany" => "Németország", "Greece" => "Görögország", "Hungary" => "Magyarország", "Iceland" => "Izland", "Ireland" => "írország", "Italy" => "Olaszország", "Japan" => "Japán", "Latvia" => "Lettország", "Lithuania" => "Litvánia", "Morocco" => "Marokkó", "Netherlands" => "Hollandia", "Norway" => "Norvégia", "Poland" => "Lengyelország", "Portugal" => "Portugália", "Romania" => "Románia", "Russia" => "Oroszország", "SriLanka" => "Sri Lanka", "Sweden" => "Svédország", "Slovakia" => "Szlovákia", "Slovenia" => "Szlovénia", "Switzerland" => "Svájc", "Turkey" => "Törökország", "UK" => "UK", "Ukraine" => "Ukrajna", "USA" => "USA" ) ); ?> nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/allusers.php0000644000000000000000000000013214152153376024010 xustar000000000000000030 mtime=1638455038.437646455 30 atime=1638455038.512647582 30 ctime=1638455100.490578829 nordugrid-arc-6.14.0/src/services/monitor/allusers.php0000644000175000002070000001426514152153376024005 0ustar00mockbuildmock00000000000000title; $module = &$toppage->module; $strings = &$toppage->strings; $errors = &$toppage->errors; $giislist = &$toppage->giislist; $yazyk = &$toppage->language; // Array defining the attributes to be returned $lim = array( "dn", USR_USSN ); /* need only SN per each user */ $jlim = array( "dn", JOB_GOWN ); /* Job owner only is needed */ if ( $debug ) { ob_end_flush(); ob_implicit_flush(); } $tlim = 20; $tout = 20; if( $debug ) dbgmsg("
    :::> ".$errors["114"].$tlim.$errors["102"].$tout.$errors["103"]." <:::

    "); // Header table $titles = explode(":",$toptitle); // two alternative titles, separated by column $gtitle = $titles[0]; if ( $jobnum ) $gtitle = $titles[1]; $toppage->tabletop($gtitle,""); $family = cnvname($ussn); // ldapsearch filter string for jobs $filter = "(objectclass=".OBJ_USER.")"; /* Find all users */ $jfilter = "(objectclass=".OBJ_AJOB.")"; /* Find all jobs */ $gentries = recursive_giis_info($giislist,"cluster",$errors,$debug); $nc = count($gentries); if ( !$nc ) { // NO SITES FOUND! $errno = "1"; echo "
    ".$errors[$errno]."\n"; return $errno; } $dsarray = array (); $hnarray = array (); $sitetag = array (); /* a tag to skip duplicated entries */ for ( $k = 0; $k < $nc; $k++ ) { $clhost = $gentries[$k]["host"]; $clport = $gentries[$k]["port"]; $ldapuri = "ldap://".$clhost.":".$clport; $clconn = ldap_connect($ldapuri); if ( $clconn && !$sitetag[$clhost] ) { array_push($dsarray,$clconn); array_push($hnarray,$clhost); $sitetag[$clhost] = 1; /* filtering tag */ } } $nhosts = count($dsarray); if ( !$nhosts ) { // NO SITES REPLY... $errno = "2"; echo "
    ".$errors[$errno]."\n"; return $errno; } // Search all clusters for users $uiarray = array(); $ts1 = time(); $uiarray = @ldap_search($dsarray,DN_LOCAL,$filter,$lim,0,0,$tlim,LDAP_DEREF_NEVER); $ts2 = time(); if($debug) dbgmsg("
    ".$errors["125"]." (".($ts2-$ts1).$errors["104"].")
    "); // Search all clusters for jobs $jiarray = array(); $ts1 = time(); $jiarray = @ldap_search($dsarray,DN_LOCAL,$jfilter,$jlim,0,0,$tlim,LDAP_DEREF_NEVER); $ts2 = time(); if($debug) dbgmsg("
    ".$errors["126"]." (".($ts2-$ts1).$errors["104"].")
    "); // Loop on clusters; building user list $usrlist = array (); for ( $ids = 0; $ids < $nhosts; $ids++ ) { $ui = array (); $ui = $uiarray[$ids]; $ji = array (); $ji = $jiarray[$ids]; $dst = array (); $dst = $dsarray[$ids]; if ($dst && $ui) { $nusers = @ldap_count_entries($dst,$ui); $njobs = @ldap_count_entries($dst,$ji); if ($nusers > 0 || $njobs > 0) { // If there are valid entries, tabulate results $allres = array(); $allres = @ldap_get_entries($dst,$ui); $results = ldap_purge($allres,USR_USSN,$debug); $alljobs = array(); $alljobs = @ldap_get_entries($dst,$ji); // $nusers = $allres["count"]; $nusers = $results["count"]; $njobs = $alljobs["count"]; // loop on users, filling $usrlist[$ussn]["name"] and counting $usrlist[$ussn]["hosts"] for ($j=0; $j<$nusers; $j++) { // $ussn = $allres[$j][USR_USSN][0]; $ussn = $results[$j][USR_USSN][0]; $family = cnvname($ussn, 2); if ( $family == "host" || strlen($family) < 2 ) continue; $ussn = trim($ussn); $ussn = addslashes($ussn); // In case $ussn contains escape characters if ( !$usrlist[$ussn] ) { $usrlist[$ussn]["name"] = $family; $usrlist[$ussn]["org"] = getorg($ussn); $usrlist[$ussn]["jobs"] = 0; $usrlist[$ussn]["hosts"] = 0; } $usrlist[$ussn]["hosts"]++; } // loop on jobs, filling $usrlist[$jown]["jobs"] for ($k=0; $k<$njobs; $k++) { $jdn = $alljobs[$k]["dn"]; $jown = $alljobs[$k][JOB_GOWN][0]; $family = cnvname($jown, 2); if ( $family == "host" || strlen($family) < 2 ) continue; $jown = addslashes($jown); // In case $jown contains escape characters if ( !$usrlist[$jown] ) { // Shouldn't be happening, but... $usrlist[$jown]["name"] = $family; $usrlist[$jown]["org"] = getorg($jown); $usrlist[$jown]["jobs"] = 0; if( $debug == 2 ) dbgmsg("$family".$errors["127"]."$jdn".$errors["128"]."
    "); } $usrlist[$jown]["jobs"]++; } } } } uasort($usrlist,"hncmp"); // HTML table initialisation $utable = new LmTableSp($module,$toppage->$module); $urowcont = array(); if ( $debug ) { ob_end_flush(); ob_implicit_flush(); } $count = 0; foreach ( $usrlist as $ussn => $data ) { // if ( $count > 9 ) continue; $name = $data["name"]; $org = $data["org"]; $nhosts = 0; $nhosts = $data["hosts"]; $jcount = 0; $jcount = $data["jobs"]; if ( $jcount < $jobnum ) continue; /* In case list only those with jobs */ $count++; $encuname = rawurlencode($ussn); $usrwin = popup("userlist.php?owner=$encuname",700,500,5,$lang,$debug); $urowcont[] = $count; $urowcont[] = "$name"; $urowcont[] = $org; $urowcont[] = $jcount; $urowcont[] = $nhosts; $utable->addrow($urowcont); $urowcont = array(); } $utable->close(); return 0; // Done $toppage->close(); ?> nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/man0000644000000000000000000000013214152153474022142 xustar000000000000000030 mtime=1638455100.521579295 30 atime=1638455103.997631524 30 ctime=1638455100.521579295 nordugrid-arc-6.14.0/src/services/monitor/man/0000755000175000002070000000000014152153474022204 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/monitor/man/PaxHeaders.30264/Makefile.am0000644000000000000000000000013114152153376024253 xustar000000000000000030 mtime=1638455038.441646515 30 atime=1638455038.514647612 29 ctime=1638455100.52057928 nordugrid-arc-6.14.0/src/services/monitor/man/Makefile.am0000644000175000002070000000002514152153376024236 0ustar00mockbuildmock00000000000000man_MANS = monitor.7 nordugrid-arc-6.14.0/src/services/monitor/man/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153435024261 xustar000000000000000030 mtime=1638455069.877118847 30 atime=1638455091.060437137 30 ctime=1638455100.519579265 nordugrid-arc-6.14.0/src/services/monitor/man/Makefile.in0000644000175000002070000005304514152153435024255 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/monitor/man DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(srcdir)/monitor.7.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = monitor.7 CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } man7dir = $(mandir)/man7 am__installdirs = "$(DESTDIR)$(man7dir)" NROFF = nroff MANS = $(man_MANS) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ man_MANS = monitor.7 all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/monitor/man/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/monitor/man/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): monitor.7: $(top_builddir)/config.status $(srcdir)/monitor.7.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-man7: $(man_MANS) @$(NORMAL_INSTALL) @list1=''; \ list2='$(man_MANS)'; \ test -n "$(man7dir)" \ && test -n "`echo $$list1$$list2`" \ || exit 0; \ echo " $(MKDIR_P) '$(DESTDIR)$(man7dir)'"; \ $(MKDIR_P) "$(DESTDIR)$(man7dir)" || exit 1; \ { for i in $$list1; do echo "$$i"; done; \ if test -n "$$list2"; then \ for i in $$list2; do echo "$$i"; done \ | sed -n '/\.7[a-z]*$$/p'; \ fi; \ } | while read p; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; echo "$$p"; \ done | \ sed -e 'n;s,.*/,,;p;h;s,.*\.,,;s,^[^7][0-9a-z]*$$,7,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,' | \ sed 'N;N;s,\n, ,g' | { \ list=; while read file base inst; do \ if test "$$base" = "$$inst"; then list="$$list $$file"; else \ echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man7dir)/$$inst'"; \ $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man7dir)/$$inst" || exit $$?; \ fi; \ done; \ for i in $$list; do echo "$$i"; done | $(am__base_list) | \ while read files; do \ test -z "$$files" || { \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man7dir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(man7dir)" || exit $$?; }; \ done; } uninstall-man7: @$(NORMAL_UNINSTALL) @list=''; test -n "$(man7dir)" || exit 0; \ files=`{ for i in $$list; do echo "$$i"; done; \ l2='$(man_MANS)'; for i in $$l2; do echo "$$i"; done | \ sed -n '/\.7[a-z]*$$/p'; \ } | sed -e 's,.*/,,;h;s,.*\.,,;s,^[^7][0-9a-z]*$$,7,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,'`; \ dir='$(DESTDIR)$(man7dir)'; $(am__uninstall_files_from_dir) tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(MANS) installdirs: for dir in "$(DESTDIR)$(man7dir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-man install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-man7 install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-man uninstall-man: uninstall-man7 .MAKE: install-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ cscopelist-am ctags-am distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-man7 install-pdf install-pdf-am install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags-am uninstall uninstall-am uninstall-man \ uninstall-man7 # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/monitor/man/PaxHeaders.30264/monitor.7.in0000644000000000000000000000013214152153376024404 xustar000000000000000030 mtime=1638455038.441646515 30 atime=1638455038.514647612 30 ctime=1638455100.521579295 nordugrid-arc-6.14.0/src/services/monitor/man/monitor.7.in0000644000175000002070000000367414152153376024403 0ustar00mockbuildmock00000000000000.TH monitor 7 "2003-03-03" "NorduGrid @VERSION@" "NorduGrid Toolkit" .SH NAME monitor \- Real-time NorduGrid monitoring tool .SH DESCRIPTION .B "LDAP Grid Monitor" is a set of .B PHP and .B Java scripts, providing a Web interface to the .B NorduGrid Information System. Should be working for any similar .B LDAP based service. .SH REQUIREMENTS .IP "LDAP library" e.g., http://www.openldap.org .IP "GD library" http://www.boutell.com/gd .IP "PHP4 library" http://www.php.net, must be compiled with LDAP and GD extensions .IP "HTTP server" must be compiled with PHP4 .IP "Globus MDS" http://www.globus.org/mds, or a similar .B LDAP based service .IP "Virtual Organisation" Is optional .SH INSTALLATION Copy all the files in a folder, accessible by the HTTP server. Modify .I settings.inc according to your MDS structure and liking. Run the whole stuff by loading .I loadmon.php into your favorite browser. .SH FILES .I loadmon.php .RS To monitor several servers at once, add hosts and DNs to the .IR $arrhost and, correspondingly, .IR $arrbdn arrays in .I loadmon.php .RE .I isattr.inc .I cnvname.inc .I cnvalias.inc .RS Making output more human-readable: modify .IR isattr.inc, .IR cnvname.inc, .IR cnvalias.inc. Otherwise, these files are not needed. .RE .I blacklist.inc .RS To prevent sites from being polled, modify array entries in .IR blacklist.inc. Otherwise, the file is not needed. .RE .I vo-users.php .RS Not needed when working without a Virtual Organisation. In such a case, remove the corresponding link from .I loadmon.php . .RE .I jobstat.php .RS When working without the .B NorduGrid Information System: to make sure that the job status is defined properly, edit .I jobstat.php (look for .B adjustment instructions in the code). .SH AUTHOR Oxana Smirnova .SH "SEE ALSO" .BR ngsub (1), .BR ngstat (1), .BR ngdel (1), .BR ngget (1), .BR ngsync (1), .BR ngcopy (1), .BR ngremove (1) nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153435023506 xustar000000000000000030 mtime=1638455069.733116684 30 atime=1638455091.024436596 30 ctime=1638455100.486578769 nordugrid-arc-6.14.0/src/services/monitor/Makefile.in0000644000175000002070000007040014152153435023474 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/monitor DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(srcdir)/monitor.in $(srcdir)/README.in $(dist_monitor_DATA) ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = monitor README CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(monitordir)" "$(DESTDIR)$(monitordir)" DATA = $(dist_monitor_DATA) $(monitor_DATA) RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DIST_SUBDIRS = $(SUBDIRS) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ SUBDIRS = man mon-icons lang includes monitordir = @monitor_prefix@ dist_monitor_DATA = $(srcdir)/*.php $(srcdir)/*.js monitor_DATA = README all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/monitor/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/monitor/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): monitor: $(top_builddir)/config.status $(srcdir)/monitor.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ README: $(top_builddir)/config.status $(srcdir)/README.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-dist_monitorDATA: $(dist_monitor_DATA) @$(NORMAL_INSTALL) @list='$(dist_monitor_DATA)'; test -n "$(monitordir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(monitordir)'"; \ $(MKDIR_P) "$(DESTDIR)$(monitordir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(monitordir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(monitordir)" || exit $$?; \ done uninstall-dist_monitorDATA: @$(NORMAL_UNINSTALL) @list='$(dist_monitor_DATA)'; test -n "$(monitordir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(monitordir)'; $(am__uninstall_files_from_dir) install-monitorDATA: $(monitor_DATA) @$(NORMAL_INSTALL) @list='$(monitor_DATA)'; test -n "$(monitordir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(monitordir)'"; \ $(MKDIR_P) "$(DESTDIR)$(monitordir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(monitordir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(monitordir)" || exit $$?; \ done uninstall-monitorDATA: @$(NORMAL_UNINSTALL) @list='$(monitor_DATA)'; test -n "$(monitordir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(monitordir)'; $(am__uninstall_files_from_dir) # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile $(DATA) installdirs: installdirs-recursive installdirs-am: for dir in "$(DESTDIR)$(monitordir)" "$(DESTDIR)$(monitordir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-data-local install-dist_monitorDATA \ install-monitorDATA install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-dist_monitorDATA uninstall-monitorDATA .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool cscopelist-am ctags \ ctags-am distclean distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am \ install-data-local install-dist_monitorDATA install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-monitorDATA install-pdf install-pdf-am install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs installdirs-am maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \ uninstall-am uninstall-dist_monitorDATA uninstall-monitorDATA install-data-local: $(MKDIR_P) $(DESTDIR)$(monitordir)/cache # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/help.php0000644000000000000000000000013214152153376023106 xustar000000000000000030 mtime=1638455038.437646455 30 atime=1638455038.512647582 30 ctime=1638455100.493578874 nordugrid-arc-6.14.0/src/services/monitor/help.php0000644000175000002070000000072614152153376023100 0ustar00mockbuildmock00000000000000$module; $helptext = $data["help"]; echo $helptext; // Done $toppage->close(); ?> nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/volist.php0000644000000000000000000000013214152153376023476 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.499578964 nordugrid-arc-6.14.0/src/services/monitor/volist.php0000644000175000002070000001444614152153376023474 0ustar00mockbuildmock00000000000000title; $module = &$toppage->module; $strings = &$toppage->strings; $errors = &$toppage->errors; // Header table $toppage->tabletop("".$toptitle."

    "); // The main function $vos = array ( array ( "name" => "NorduGrid members", "server" => "grid-vo.nordugrid.org", "port" => "389", "dn" => "ou=people,dc=nordugrid,dc=org" ), array ( "name" => "NorduGrid guests", "server" => "https://www.pdc.kth.se/grid/swegrid-vo", "port" => "", "dn" => "" ), array ( "name" => "NorduGrid developers", "server" => "http://www.nordugrid.org", "port" => "", "dn" => "", "group" => "developers.dn" ), array ( "name" => "NorduGrid tutorials", "server" => "grid-vo.nordugrid.org", "port" => "389", "dn" => "ou=tutorial,dc=nordugrid,dc=org" ), array ( "name" => "ATLAS test users (SWEGRID)", "server" => "https://www.pdc.kth.se", "port" => "", "dn" => "", "group" => "grid/swegrid-vo/vo.atlas-testusers-vo" ), /* array ( "name" => "NorduGrid services", "server" => "grid-vo.nordugrid.org", "port" => "389", "dn" => "ou=services,dc=nordugrid,dc=org" ), */ array ( "name" => "BaBar", "server" => "babar-vo.gridpp.ac.uk", "port" => "389", "dn" => "ou=babar,dc=gridpp,dc=ac,dc=uk" ), array ( "name" => "EDG ALICE", "server" => "grid-vo.nikhef.nl", "port" => "389", "dn" => "o=alice,dc=eu-datagrid,dc=org" ), array ( "name" => "EDG ATLAS", "server" => "grid-vo.nikhef.nl", "port" => "389", "dn" => "o=atlas,dc=eu-datagrid,dc=org" ), array ( "name" => "LCG ATLAS", "server" => "grid-vo.nikhef.nl", "port" => "389", "dn" => "o=atlas,dc=eu-datagrid,dc=org", "group" => "ou=lcg1" ), array ( "name" => "EDG CMS", "server" => "grid-vo.nikhef.nl", "port" => "389", "dn" => "o=cms,dc=eu-datagrid,dc=org" ), array ( "name" => "EDG LHC-B", "server" => "grid-vo.nikhef.nl", "port" => "389", "dn" => "o=lhcb,dc=eu-datagrid,dc=org" ), array ( "name" => "EDG D0", "server" => "grid-vo.nikhef.nl", "port" => "389", "dn" => "o=dzero,dc=eu-datagrid,dc=org", "group" => "ou=testbed1" ), array ( "name" => "EDG Earth Observation", "server" => "grid-vo.nikhef.nl", "port" => "389", "dn" => "o=earthob,dc=eu-datagrid,dc=org" ), array ( "name" => "EDG Genomics", "server" => "grid-vo.nikhef.nl", "port" => "389", "dn" => "o=biomedical,dc=eu-datagrid,dc=org", "group" => "ou=genomics" ), array ( "name" => "EDG Medical Imaging", "server" => "grid-vo.nikhef.nl", "port" => "389", "dn" => "o=biomedical,dc=eu-datagrid,dc=org", "group" => "ou=medical imaging" ), array ( "name" => "EDG ITeam", "server" => "marianne.in2p3.fr", "port" => "389", "dn" => "o=testbed,dc=eu-datagrid,dc=org", "group" => "ou=ITeam" ), array ( "name" => "EDG TSTG", "server" => "marianne.in2p3.fr", "port" => "389", "dn" => "o=testbed,dc=eu-datagrid,dc=org", "group" => "ou=TSTG" ), array ( "name" => "EDG Tutorials", "server" => "marianne.in2p3.fr", "port" => "389", "dn" => "o=testbed,dc=eu-datagrid,dc=org", "group" => "ou=EDGtutorial" ), array ( "name" => "EDG WP6", "server" => "marianne.in2p3.fr", "port" => "389", "dn" => "o=testbed,dc=eu-datagrid,dc=org", "group" => "ou=wp6" ) ); $votable = new LmTableSp($module,$toppage->$module); $rowcont = array (); foreach ( $vos as $contact ) { $server = $contact["server"]; $port = $contact["port"]; $dn = $contact["dn"]; $group = ""; if ( !empty($contact["group"]) ) $group = $contact["group"]; $nusers = 0; if ( $dn ) { // open ldap connection $ldapuri = "ldap://".$server.":".$port; $ds = ldap_connect($ldapuri); if ($ds) { if ( $group ) { $newfilter = "(objectclass=*)"; $newdn = $group.",".$dn; $newlim = array("dn","member"); $sr = @ldap_search($ds,$newdn,$newfilter,$newlim,0,0,10,LDAP_DEREF_NEVER); if ($sr) { $groupdesc = @ldap_get_entries($ds,$sr); $nusers = $groupdesc[0]["member"]["count"]; } } else { $sr = @ldap_search($ds,$dn,"(objectclass=organizationalPerson)",array("dn"),0,0,10,LDAP_DEREF_NEVER); if ($sr) $nusers = @ldap_count_entries($ds,$sr); } } $vostring = popup("vo-users.php?host=$server&port=$port&vo=$dn&group=$group",750,300,6,$lang,$debug); } else { $url = $server."/".$group; $users = file($url); $nusers = 0; if ( !empty($users) ) $nusers = count($users); $vostring = popup($url,750,300,6,$lang,$debug); } $rowcont[] = "".$contact["name"].""; $rowcont[] = $nusers; $rowcont[] = $server; $votable->addrow($rowcont); $rowcont = array (); } $votable->close(); $toppage->close(); /* group http://www.nbi.dk/~waananen/ngssc2003.txt ### Datagrid VO Groups and their user mappings */ ?>nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/clusdes.php0000644000000000000000000000013214152153376023620 xustar000000000000000030 mtime=1638455038.437646455 30 atime=1638455038.512647582 30 ctime=1638455100.492578859 nordugrid-arc-6.14.0/src/services/monitor/clusdes.php0000644000175000002070000001700414152153376023607 0ustar00mockbuildmock00000000000000title; $module = &$toppage->module; $strings = &$toppage->strings; $errors = &$toppage->errors; // Header table $toppage->tabletop("","".$toptitle." $host"); // Array defining the attributes to be returned $qlim = array( QUE_NAME, QUE_QUED, QUE_GQUE, QUE_PQUE, QUE_LQUE, QUE_RUNG, QUE_GRUN, QUE_ASCP, QUE_MAXT, QUE_MINT, QUE_STAT ); // ldapsearch filter strings for cluster and queues $qfilter = "(objectclass=".OBJ_QUEU.")"; $dn = DN_LOCAL; if ($schema == "GLUE2") { $qlim = array( GQUE_NAME, GQUE_MAPQ, GQUE_QUED, GQUE_GQUE, GQUE_PQUE, GQUE_LQUE, GQUE_RUNG, GQUE_GRUN, GQUE_ASCP, GQUE_MAXT, GQUE_MINT, GQUE_STAT ); // ldapsearch filter strings for cluster and queues $qfilter = "(objectclass=".GOBJ_QUEU.")"; $dn = DN_GLUE; } if ( $debug ) { ob_end_flush(); ob_implicit_flush(); } $tlim = 15; $tout = 20; if( $debug ) dbgmsg("
    :::> ".$errors["101"].$tlim.$errors["102"].$tout.$errors["103"]." <:::

    "); // establish connection to the requested LDAP server $chost = $host; if ( $isse ) $chost=substr(strstr($host,":"),1); $ldapuri = "ldap://".$chost.":".$port; $ds = ldap_connect($ldapuri); if ($ds) { // If contact OK, search for clusters $ts1 = time(); if ( $isse ) { $exclude = array(SEL_USER); if ( $dn == DN_LOCAL ) $thisdn = ldap_nice_dump($strings,$ds,SEL_NAME."=".$host.",".$dn,$exclude); if ( $dn == DN_GLUE ) { $querydn = SEL_NAME."=".$host.":arex,GLUE2GroupID=services,".DN_GLUE;//TODO: change SEL_NAME $thisdn = ldap_nice_dump($strings,$ds,$querydn,$exclude); } } else { if ( $dn == DN_LOCAL ) $thisdn = ldap_nice_dump($strings,$ds,CLU_NAME."=".$host.",".$dn); if ( $dn == DN_GLUE ) { $querydn = "GLUE2ServiceID=urn:ogf:ComputingService:".$host.":arex,GLUE2GroupID=services,".DN_GLUE; $thisdn = ldap_nice_dump($strings,$ds,$querydn); } } $ts2 = time(); if($debug) dbgmsg("
    ".$errors["109"]." (".($ts2-$ts1).$errors["104"].")
    "); if ( strlen($thisdn) < 4 && $debug ) dbgmsg("
    ".$errors["129"].$thisdn."

    "); echo "
    "; // Loop on queues (if everything works) if ($thisdn != 1 && !$isse) { $ts1 = time(); $qsr = @ldap_search($ds,$dn,$qfilter,$qlim,0,0,$tlim,LDAP_DEREF_NEVER); $ts2 = time(); if($debug) dbgmsg("
    ".$errors["110"]." (".($ts2-$ts1).$errors["104"].")
    "); // Fall back to conventional LDAP // if (!$qsr) $qsr = @ldap_search($ds,$dn,$qfilter,$qlim,0,0,$tlim,LDAP_DEREF_NEVER); } if ($qsr) { // If search returned, check that there are valid entries $nqmatch = @ldap_count_entries($ds,$qsr); if ($nqmatch > 0) { // If there are valid entries, tabulate results $qentries = @ldap_get_entries($ds,$qsr); $nqueues = $qentries["count"]; // HTML table initialisation $qtable = new LmTableSp($module,$toppage->$module,$schema); // loop on the rest of attributes define("CMPKEY",QUE_MAXT); usort($qentries,"quetcmp"); for ($k=1; $k<$nqueues+1; $k++) { if ( $dn == DN_LOCAL ) { $qname = $qentries[$k][QUE_NAME][0]; $qstatus = $qentries[$k][QUE_STAT][0]; // $queued = @$qentries[$k][QUE_QUED][0]; $queued = @($qentries[$k][QUE_QUED][0]) ? ($entries[$k][QUE_QUED][0]) : 0; /* deprecated since 0.5.38 */ $locque = @($qentries[$k][QUE_LQUE][0]) ? ($qentries[$k][QUE_LQUE][0]) : 0; /* new since 0.5.38 */ $run = @($qentries[$k][QUE_RUNG][0]) ? ($qentries[$k][QUE_RUNG][0]) : 0; $cpumin = @($qentries[$k][QUE_MINT][0]) ? $qentries[$k][QUE_MINT][0] : "0"; $cpumax = @($qentries[$k][QUE_MAXT][0]) ? $qentries[$k][QUE_MAXT][0] : ">"; $cpu = @($qentries[$k][QUE_ASCP][0]) ? $qentries[$k][QUE_ASCP][0] : "N/A"; $gridque = @($qentries[$k][QUE_GQUE][0]) ? $qentries[$k][QUE_GQUE][0] : "0"; $gmque = @($qentries[$k][QUE_PQUE][0]) ? ($qentries[$k][QUE_PQUE][0]) : 0; /* new since 0.5.38 */ $gridrun = @($qentries[$k][QUE_GRUN][0]) ? $qentries[$k][QUE_GRUN][0] : "0"; $quewin = popup("quelist.php?host=$host&port=$port&qname=$qname&schema=$schema",750,430,6,$lang,$debug); } if ( $dn == DN_GLUE ) { $qname = $qentries[$k][GQUE_NAME][0]; $mapque = $qentries[$k][GQUE_MAPQ][0]; $qstatus = $qentries[$k][GQUE_STAT][0]; // $queued = @$qentries[$k][GQUE_QUED][0]; $queued = @($qentries[$k][GQUE_QUED][0]) ? ($entries[$k][GQUE_QUED][0]) : 0; /* deprecated since 0.5.38 */ $locque = @($qentries[$k][GQUE_LQUE][0]) ? ($qentries[$k][GQUE_LQUE][0]) : 0; /* new since 0.5.38 */ $run = @($qentries[$k][GQUE_RUNG][0]) ? ($qentries[$k][GQUE_RUNG][0]) : 0; $cpumin = @($qentries[$k][GQUE_MINT][0]) ? $qentries[$k][GQUE_MINT][0] : "0"; $cpumax = @($qentries[$k][GQUE_MAXT][0]) ? $qentries[$k][GQUE_MAXT][0] : ">"; $cpu = @($qentries[$k][GQUE_ASCP][0]) ? $qentries[$k][GQUE_ASCP][0] : "N/A"; $gridque = @($qentries[$k][GQUE_GQUE][0]) ? $qentries[$k][GQUE_GQUE][0] : "0"; $gmque = @($qentries[$k][GQUE_PQUE][0]) ? ($qentries[$k][GQUE_PQUE][0]) : 0; /* new since 0.5.38 */ $gridrun = @($qentries[$k][GQUE_GRUN][0]) ? $qentries[$k][GQUE_GRUN][0] : "0"; $quewin = popup("quelist.php?host=$host&port=$port&qname=$qname&schema=$schema",750,430,6,$lang,$debug); } $gridque = $gridque + $gmque; if ( $queued == 0 ) $queued = $locque + $gridque; // filling the table $qrowcont[] = "$qname"; if ( !empty($mapque) ) { $qrowcont[] = "$mapque"; } $qrowcont[] = "$qstatus"; $qrowcont[] = "$cpumin – $cpumax"; $qrowcont[] = "$cpu"; $qrowcont[] = "$run (".$errors["402"].": $gridrun)"; $qrowcont[] = "$queued (".$errors["402"].": $gridque)"; $qtable->addrow($qrowcont); $qrowcont = array (); } $qtable->close(); } else { $errno = 8; echo "
    ".$errors["8"]."\n"; return $errno; } } elseif ( !$isse ) { $errno = 5; echo "
    ".$errors["5"]."\n"; return $errno; } @ldap_free_result($qsr); @ldap_close($ds); return 0; } else { $errno = 6; echo "
    ".$errors[$errno]."\n"; return $errno; } // Done $toppage->close(); ?> nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/mon-icons0000644000000000000000000000013214152153474023271 xustar000000000000000030 mtime=1638455100.591580346 30 atime=1638455103.997631524 30 ctime=1638455100.591580346 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/0000755000175000002070000000000014152153474023333 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Russia.png0000644000000000000000000000013114152153376025322 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.567579986 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Russia.png0000644000175000002070000000040014152153376025302 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<*PLTEÂ$¾'3S 6^©\D…lƒº3S¡6^ª\D†lƒ»Ã$À'¿'ÿÿÿv¯Ëý\IDATxÚbàEÄ€.@ÄÀÊʉXYˆ‰‰ 0133;`f $ÀÂ@ \È€ €xx¸‘@102ò F€_åCFY9IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Makefile.am0000644000000000000000000000013114152153376025402 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.539579565 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Makefile.am0000644000175000002070000000022714152153376025371 0ustar00mockbuildmock00000000000000monitoriconsdir = @monitor_prefix@/mon-icons monitoricons_DATA = $(srcdir)/*.png $(srcdir)/*.php $(srcdir)/*.gif EXTRA_DIST = $(monitoricons_DATA) nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Finland.png0000644000000000000000000000013114152153376025427 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.514647612 30 ctime=1638455100.551579746 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Finland.png0000644000175000002070000000050214152153376025412 0ustar00mockbuildmock00000000000000‰PNG  IHDR 2ÜIËbKGDÿÿÿ ½§“ pHYs  ÒÝ~ütIMEÓÜÆ?üÏIDATxÚ•;rÃ0 DI„òg«Ðýïæ"©â(¡(R ÐEv¶}³²/³+ˆ>Ñæ{Âje« q‡:ôäíŠo™æ,ȱ†#å Š…1u2ý8höxÒqƒ™í‚ûÐÐO@›³à:¦œ§è1*J1Ø÷¼;‹¼Eüœ½Oðñ;ð®ýªÅKZÑFøóg•¤_i«)=Ñ^ nc%¸esÆ9Øåņ¸+Ád á§U6ÎU/¬xñ’ C< ~ÐÙ48Qpø§þ ðp#F{2IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Slovakia.png0000644000000000000000000000013114152153376025625 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.568580001 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Slovakia.png0000644000175000002070000000050214152153376025610 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<`PLTElƒºÞ“î¿ÉGd¨òÏÖæŸ®ÆBÊ/OSn®ê¯»x¿ÒOj[C„•Ap¹`¹;_;Z£ÿÿÿ¾'???FÎ?]-Q,Cz_ŽÎÕè¬ÑÖ_xòôù_x´‘ fâhIDATxÚbDÄ€.@`NV&V¨@17+/ƒ°3rró @@˜ùEøØ`ÄÀÌòFkt옖”\ ÒvLŠx­†Ÿ=J,¸ÎþM»Œ4Kø)~IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153435025410 xustar000000000000000030 mtime=1638455069.924119553 30 atime=1638455091.096437678 30 ctime=1638455100.539579565 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Makefile.in0000644000175000002070000005141414152153435025402 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/monitor/mon-icons DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(monitoriconsdir)" DATA = $(monitoricons_DATA) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ monitoriconsdir = @monitor_prefix@/mon-icons monitoricons_DATA = $(srcdir)/*.png $(srcdir)/*.php $(srcdir)/*.gif EXTRA_DIST = $(monitoricons_DATA) all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/monitor/mon-icons/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/monitor/mon-icons/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-monitoriconsDATA: $(monitoricons_DATA) @$(NORMAL_INSTALL) @list='$(monitoricons_DATA)'; test -n "$(monitoriconsdir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(monitoriconsdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(monitoriconsdir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(monitoriconsdir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(monitoriconsdir)" || exit $$?; \ done uninstall-monitoriconsDATA: @$(NORMAL_UNINSTALL) @list='$(monitoricons_DATA)'; test -n "$(monitoriconsdir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(monitoriconsdir)'; $(am__uninstall_files_from_dir) tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(DATA) installdirs: for dir in "$(DESTDIR)$(monitoriconsdir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-monitoriconsDATA install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-monitoriconsDATA .MAKE: install-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ cscopelist-am ctags-am distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-monitoriconsDATA install-pdf install-pdf-am install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags-am uninstall uninstall-am \ uninstall-monitoriconsDATA # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Spain.png0000644000000000000000000000013114152153376025126 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.570580031 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Spain.png0000644000175000002070000000030614152153376025113 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<PLTEä²ùú½'¥VXæÜÃj ßÛ2½…sòæÏ4IDATxÚ„Î1 ÄÀœ€þÿÇR)#…)· ã‰-IŽë”àaE[T˜óBëû±Læçü©ØIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Slovenia.png0000644000000000000000000000013114152153376025634 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.569580016 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Slovenia.png0000644000175000002070000000047114152153376025624 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<`PLTElƒºÎÕèµÁÜæêó„—Å_x´KN“l8tÉÅÚ­¡ÁtXÿÿÿ;Z£¾'Fž@S÷ô -Q_¿¿¿Gd¨òôùSn®|-e¬ÑÚàíµ.[C„CT›¢ËÁËâün_IDATxÚbàFÄÀÍÍÄÊÀ   ,'B €X¹˜¹à €xx¸eyà €€<œ,>@1°³³K°#€bàE„!@ÆzŠÃ%qIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Portugal.png0000644000000000000000000000013114152153376025651 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.566579971 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Portugal.png0000644000175000002070000000031414152153376025635 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<PLTE+¢L¾' EÜ´»I9²o=Â5N…Eª|Æ:IDATxÚdŽA ¦üÿÍb¦N{l ` ml‰–¤€Š A]¢ÃxY*¨•ÈèYùfϱ)Àzvu7%Â8IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Denmark.png0000644000000000000000000000013114152153376025435 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.514647612 30 ctime=1638455100.549579715 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Denmark.png0000644000175000002070000000047114152153376025425 0ustar00mockbuildmock00000000000000‰PNG  IHDR 2ÜIËbKGDÿÿÿ ½§“ pHYs  ÒÝ~ütIMEÓ €mçÆIDATxÚuŽÁnÂ0çùÙ„„¶ê¡âÿET"…P'¶·?`VsÜÑ®­,¤H6†ç¼úx÷ôÙÆžôb+¸±‰Ût8^îÕñ÷=5c©/(’ ƒ{«»P9ÙÌœT»}b6nÓáki—¸~ø%~ö7ÖP™õŠ}oU­H:kVQíWÝ N"¸øÍÄiˆoìú ×€Òð„ÓéüÐõ4"³jt¡2*ðçlîš•›d¶hôa ÈS™mjªö²ÝàÅÑ×ùkÇ•¾Lf–IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Latvia.png0000644000000000000000000000013114152153376025274 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.560579881 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Latvia.png0000644000175000002070000000025514152153376025264 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe< PLTEÿÿÿ™33™3f3'Û9'IDATxÚb`dBŒ Lh€‰01`h! 0mA7ÃZt-\N¦…ÎIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/USA.png0000644000000000000000000000013214152153376024505 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.576580121 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/USA.png0000644000175000002070000000027514152153376024476 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<PLTEÿÿÿÌ3ff™3f™ÿÿÿ–ÛutRNSÿÿÿÿû¶S#IDATxÚb`F8``fA C`DD¨`AB.Ó$s±IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/icon-close.png0000644000000000000000000000013214152153376026110 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.579580166 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/icon-close.png0000644000175000002070000000051414152153376026075 0ustar00mockbuildmock00000000000000‰PNG  IHDRשÍÊgAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<PLTEÀÀÀ```   ÿÿÿ´Â®¸tRNSÿÿÿÿû¶S²IDATxÚb`Áˆ—@á”  y( € #3Lœ‰™LDœ&Ãd‚e$ÁÀÌ “a3A‚Ä€à2!1XXˆIIœ €XdÄYˆMæ<€b`A•; €pJN£§å„Ó¹„Óƒ„3Hg Î` œ@8£ €pJN €^/ýx%’PIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Georgia.png0000644000000000000000000000013114152153376025431 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.514647612 30 ctime=1638455100.553579776 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Georgia.png0000644000175000002070000000030414152153376025414 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<PLTEÿÿÿÿÌÿÿÌÌÌ3fÌ3û®cŽ;IDATxÚŒŽ10ùÿ›[jl:ô&%^($¢aáÀÃ3@ìð(á-5HèÂwl…%}|Å–\u¢¹œIIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Poland.png0000644000000000000000000000013114152153376025271 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.565579956 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Poland.png0000644000175000002070000000022314152153376025254 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<PLTEÿÿÿ¾'?µIDATxÚb` `DÔ0 òQ>ìYÞIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Netherlands.png0000644000000000000000000000013114152153376026323 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.563579926 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Netherlands.png0000644000175000002070000000025014152153376026306 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<PLTEÿÿÿÿÌÌÌÌÿÌ33f™W3–‘IDATxÚb`F „ÑaÀ„XÐa€sjÞ“¼ŠIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Germany.png0000644000000000000000000000013114152153376025456 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.514647612 30 ctime=1638455100.553579776 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Germany.png0000644000175000002070000000036214152153376025445 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<'PLTE¼$É)ÎC;ïÔfòÞiË)½$óßiñÖfÏD;ðÕfu@ÒüXQIDATxÚbàAÄ€.@ÄÀˆ‘‘02` &NÀ@ ,ì(€ €¸8P@10s¡f€“…}ó§ŠIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Romania.png0000644000000000000000000000013114152153376025442 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.566579971 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Romania.png0000644000175000002070000000024114152153376025425 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<PLTEÿÿÿ™ÌÌ3Ì33f™ pIDATxÚb`& `d†Á*`€}•eB$IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Morocco.png0000644000000000000000000000013114152153376025455 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.562579911 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Morocco.png0000644000175000002070000000031414152153376025441 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<PLTEcM3¾'Â'”",¡*Ã6¼ /Ç#EØ /:IDATxÚ\ÎA0DÑé`ÜÿƵhRüå‹©h)g¨ÖBÕ'h€s€Û€"®¥ÿì~ì 0‚Z‰úèúIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/icon_led.php0000644000000000000000000000013214152153376025634 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.588580301 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/icon_led.php0000644000175000002070000000073014152153376025621 0ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Switzerland.png0000644000000000000000000000013114152153376026362 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.572580061 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Switzerland.png0000644000175000002070000000026014152153376026346 0ustar00mockbuildmock00000000000000‰PNG  IHDR ½¾ÞœbKGDÿÿÿ ½§“ pHYs  ÒÝ~ütIMEÓøÿ”W=IDATxÚcd@ÿþ3al& ~þÿ‡`² °à´Ÿ‘‘^.€Ù„Çf»Ý%´Œç o{=IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Algeria.png0000644000000000000000000000013114152153376025420 xustar000000000000000030 mtime=1638455038.441646515 30 atime=1638455038.514647612 29 ctime=1638455100.54057958 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Algeria.png0000644000175000002070000000030314152153376025402 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<PLTE(ŸJþÿþ¬3Aß «rZ?9±`óêêýðó J«„1IDATxÚb`eVF(``À!ÀÂÌŽ"ÀĆ"ÀÄÂÊÌĆOC ¦¡HÖ¢; ÀDϵ0²ñIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/icon_back.php0000644000000000000000000000013214152153376025770 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.587580286 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/icon_back.php0000644000175000002070000000062114152153376025754 0ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Ireland.png0000644000000000000000000000013114152153376025432 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.514647612 30 ctime=1638455100.558579851 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Ireland.png0000644000175000002070000000026614152153376025424 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<PLTEÿÿÿÿÿÌÿÌfÌÿÌÌÌf3™f3™3ôÚ±'IDATxÚb`f `dV6 €°€º]€Ö¢9 À¦ ëÛSIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Chile.png0000644000000000000000000000013014152153376025077 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.514647612 29 ctime=1638455100.54657967 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Chile.png0000644000175000002070000000047514152153376025074 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA± üa cHRMz%€ƒùÿ€éu0ê`:˜o’_ÅF PLTE;Z£ÿÿÿ¾'ÍòbKGDÿ-Þ pHYsgŸÒRtIMEä ±EëàIDAT×c`F`À"$¨@`BÔ7Úëõ!ñ%tEXtdate:create2020-02-24T08:31:03+00:00eîxŒ%tEXtdate:modify2020-02-24T08:31:03+00:00³À0IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/spacer.gif0000644000000000000000000000013214152153376025313 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.591580346 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/spacer.gif0000644000175000002070000000005314152153376025276 0ustar00mockbuildmock00000000000000GIF89a€!ù,D;nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Sweden.png0000644000000000000000000000013114152153376025301 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.572580061 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Sweden.png0000644000175000002070000000047114152153376025271 0ustar00mockbuildmock00000000000000‰PNG  IHDR 2ÜIËbKGDÿÿÿ ½§“ pHYs  ÒÝ~ütIMEÓˆÆIDATxÚ•Í[JAEÑ}ë;-ç?<‘‰Ñ®t½®¨û÷°2.‡v‰ð©’0‹< fËl—yõåV„P{ÒöìÁï¾ñ;¦ h¡ÓÏ#ÞÔZ’È7® Ë\ˆ „8†³Çµ¯Ñ>‚Ej då^cS>Úª´p×mì«*Ó\Âh?¬KÑ>¨ðµ,ï‡ôLsá…ƒBñ¾ÞukzÚØÏ(¦Ns³‡Æ(¨¬¹KËÓOºRÞæ‚…ŽWèªh ˜Š ˜gøçþÎÈ‚ªï[þôIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/icon-folks.png0000644000000000000000000000013214152153376026121 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.580580181 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/icon-folks.png0000644000175000002070000000076314152153376026114 0ustar00mockbuildmock00000000000000‰PNG  IHDRשÍÊgAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<ZPLTEžýæ˜üÙdûÏ=þì²Ù¥¢°=²JDÿùæìÓÑ«;5¿idÕ„ ýß~âœ%ýé¥ùñðžéµ\Åxsª1 ¾bJ¤% ß´²ôÀ-æÃÁþò̘ûÌ0ÿÿÿ¿¬~×tRNSÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿì^õIDATxÚbň—@á” œ—”D• ¨„8ƒ´4+²@A$8¥ÁY@A$¸!¢¼LLüP €‚H°ƒÅE$d€€ "@È:ødÀ€,@ I°D\†,@PW±‚\ÅŒ,@ III.ii.!ˆX €€ÂÒ0Àf†X! @ ²¬PQ1666 ¯””H € N„: @º&Á#ƒ¤dˆA–SPÅ. XèbHLB pÈΈ œ„S €pJ˜<ÉêoSIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/World.png0000644000000000000000000000013214152153376025144 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.578580151 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/World.png0000644000175000002070000000021314152153376025125 0ustar00mockbuildmock00000000000000‰PNG  IHDR 2ÜIËbKGDÿÿÿ ½§“ pHYs  ÒÝ~ütIMEÓI,6IDATxÚcž÷™ÀÄ@"Õ0844û¸diy?IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Bulgaria.png0000644000000000000000000000013114152153376025602 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.514647612 30 ctime=1638455100.545579655 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Bulgaria.png0000644000175000002070000000025514152153376025572 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<PLTEÿÿÿÌ3fÌf3™33f3™3x*|×!IDATxÚb` 0¡V4ÀÀŒXÐ# ,`M²m6/YIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Czechia.png0000644000000000000000000000012714152153376025427 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.514647612 28 ctime=1638455100.5485797 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Czechia.png0000644000175000002070000000025714152153376025414 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe< PLTEÿÿÿÌ33f™f¼œÕ,IDATxÚdÈ1 ƒÀÂÿÝAÙ¸q*ý H‹l¹Àkê@$ä³ %a_›IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/icon-run.png0000644000000000000000000000013214152153376025607 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.585580256 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/icon-run.png0000644000175000002070000000113614152153376025575 0ustar00mockbuildmock00000000000000‰PNG  IHDRשÍÊgAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<`PLTEž½Už«;5¿idè¨(˜ùñðôÀ-ª1 ÉmÜ"Ò–“¶IìÓÑ¥,%Ù¥¢æÃÁòâá²JDî´*ÅxsÐżƒ¸ZT¤% °=Ãaß´²âœ%ûÌ0ÿÿÿ ø < tRNSÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ\\íXIDATxÚbLj—@á” œ“`fcå”ec T‚Ÿ‘ƒ•›‹—“™M„›$@ .^)i99N6V1f@%ØÙ¸Y™X„™Dä䏸ù@b–b’fg‘d•““cš#(À#/@P£Ä%8q‹ÉI2³qòròÈÌUBL@qFY99Y)y€‚Ið0ÊÉI3 ±ðr1r²1³°ËL‚™h1³<»( Ð/ @ 0“€ËI‰"|@P qF„˜0B € L §Š1"$ˆ!ÎÅÀ‡ˆ’`abåæcäG—  ÐËblŒ¼Ìì(Á@ ò<œŒ ,¨âòÄt#³fDƒ<Ÿ°;– œQ @8% _‘8S%ZŽÏIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/icon-refresh.png0000644000000000000000000000013214152153376026441 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.584580241 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/icon-refresh.png0000644000175000002070000000070614152153376026431 0ustar00mockbuildmock00000000000000‰PNG  IHDRשÍÊgAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<-PLTEððð   ÀÀÀppp```PPP€€€@@@000 àààÿÿÿ4‡qstRNSÿÿÿÿÿÿÿÿÿÿÿÿÿÿÔܘ¡IDATxÚbàÈ—@!Ið01ñ x“àaæænf˜@A%Xxá€"@ V°˜b L=7#ˆÅ4 ,@  8'ÔLNV>& €@|f z$×1€5H¨Éì¼\@Q€b€˜„$ÎÃÅËËÎÇ@ |l`§0Á%À|>€bk€º˜ N ™ áÈ.™pÀt# °¯PËËË ¤,ÄÄÇŒð Ô`€b€¹$` œ@8ƒ €pF@áŒZ€Â™§@€×A Qk%IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Armenia.png0000644000000000000000000000013214152153376025431 xustar000000000000000030 mtime=1638455038.441646515 30 atime=1638455038.514647612 30 ctime=1638455100.541579595 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Armenia.png0000644000175000002070000000025714152153376025422 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<PLTEÿ™fÌ™fÌ3ff™f3™3f™'#IDATxÚb`B „XÐ+À`F € `”â‘ »J§IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Italy.png0000644000000000000000000000013114152153376025136 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.559579866 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Italy.png0000644000175000002070000000024514152153376025125 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<PLTEÿÿÿÿ™ÌÌ3™ÌÌ3™3™3ØÀ¹IDATxÚb`Vf `d†Á*`w[_|pдIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Hungary.png0000644000000000000000000000013114152153376025471 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.514647612 30 ctime=1638455100.556579821 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Hungary.png0000644000175000002070000000025014152153376025454 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<PLTEÿÿÿÿÌÌÌÿÌÌ33™3`ÝÊDIDATxÚb`F „ÑaÀ„XÐa€sjÞ“¼ŠIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Greece.png0000644000000000000000000000013114152153376025246 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.514647612 30 ctime=1638455100.554579791 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Greece.png0000644000175000002070000000026314152153376025235 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe< PLTEÿÿÿ™ÌÌÿÿÿTøœ tRNSÿÿ×Ê A!IDATxÚb`dd`D „ðªÀ0ŸvbU0¡€wrtŒ-IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/icon-help.png0000644000000000000000000000013214152153376025733 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.581580196 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/icon-help.png0000644000175000002070000000066214152153376025724 0ustar00mockbuildmock00000000000000‰PNG  IHDRשÍÊgAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<3PLTEððð@@@€€€ÐÐÐ °°°àààPPP```000ppp   ÀÀÀÿÿÿÿÌtRNSÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ%­™bèIDATxÚbÀˆ—@á” ¸+;###7L € 4 ? pC„Br‚…¹@X €À/ˆÏ b²¤Ab–™Ã1“ ¤†È °3ÔN@hÎeÙÆbª+Øq¬ &@!K00‚„Y †’3Hœ*@H c8Xa<€BH0ÃÜ„àI ôB‚„`c„@á4 €pJN €Âµ„S €pJj!kqË.IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/HongKong.png0000644000000000000000000000013114152153376025566 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.514647612 30 ctime=1638455100.555579805 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/HongKong.png0000644000175000002070000000067514152153376025564 0ustar00mockbuildmock00000000000000‰PNG  IHDR 2ÜIË„IDATÓÁ¿jQàß9÷_fv²»AC²*Úˆ ¤±´ŸÁ°²µ³ôERˆµ…¥`-q AÓ™-„€ n2³;sçÞ{Žß‡dåX» 0ìÛHÍÛ€µÁ85j- “ p—6 ­ê G‚Œ f:‚²¡P‹ž1f(`©+e$`ˤ˜Œ˜éÛ×ú鸛Þ^ÃG¢nîz@Éd¶–‰6©÷ç͹å´wóòò9/W;»wM=‹ç¿ø*¼]ÇHÙZÍÙTÁ»øð‹ûc+Ž˜ç¸8-/Þðj¥È Ud¨”¢ÙlñûÍÖ'?ÝÕ°÷ä^÷ñksv ô@M µÕµƒ2´q»?þ°{¬'Ëaù­ýñåïâAæ¬h³1çŠ$Ò%çüÁþ óô§×áó÷ðêYƒ DÛA@àÂT”ÌhÕèâ–4e¯*¹ ô•€2·$LDœJ±`2&—\˜…¡ªÁyɉ6œJqÞþÚC·;¤ÊIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Japan.png0000644000000000000000000000013114152153376025105 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.560579881 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Japan.png0000644000175000002070000000067314152153376025101 0ustar00mockbuildmock00000000000000‰PNG  IHDR 2ÜIËbKGDÿÿÿ ½§“ pHYs  ÒÝ~ütIMEÓ' n¶HIDATxÚ-ΡŽSAÇáÿ™¹·mX‚AaX³¢ÄDƒåA1<Y,˜]ƒÁ€€„ÁbPÐlÛ;3眆ï >R¢iQš´‰ªšÙY3­›Në¶‘Ì×Ji–y¶£­îKcc3ÏRÍ¥~û*Û­åF–=š¦ií]ôÑÆ‚……Îõûþì%zàzØv¯øtƒw‘ÑHÄ©9ýHŒÛ/y~AUTQ5Šþ^¾à÷‚tF‘ ^¿A›¬J­SÊ¢®Wo8 EÒ¬é€T¥?{Íx¨ÏjÅH™Í~:ªàéE³¢(<~}îÎÓԫƬ”–'»ÃþgObñÿ%g$\¿Ë§ÏC+L\îÆÇtœ;çHK#ð"IB!Ãöw|¿5¤ícÎîYVŠ iBE2²É )³IàY Sš­‚>¢–²2Ó¤0ªøÃ2¡ ðëMIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/icon_spacer.php0000644000000000000000000000013214152153376026345 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.589580317 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/icon_spacer.php0000644000175000002070000000034414152153376026333 0ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Austria.png0000644000000000000000000000013214152153376025465 xustar000000000000000030 mtime=1638455038.441646515 30 atime=1638455038.514647612 30 ctime=1638455100.543579625 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Austria.png0000644000175000002070000000023614152153376025453 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe< PLTEÿÿÿÿÌÌÌ3­ŽXÇIDATxÚb`B „Ña€¡…tk K áCAhIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Lithuania.png0000644000000000000000000000013114152153376025772 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.561579896 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Lithuania.png0000644000175000002070000000037614152153376025766 0ustar00mockbuildmock00000000000000‰PNG  IHDR 2ÜIËbKGDÿÿÿ ½§“ pHYs  ÒÝ~ütIMEÓ éšô£‹IDATxÚ1Â@ g«©iù 5¥å/y5HÈKar9!2Õœ¥[Ë«çƒ$¶KÊY¨a¦3iј& ƒX!ÙJ` ôµa\k€€ÔåzŠö‘™í6{ê’lÛn>¤ëC]ÙýgŠÝÎó}#µ×µÞ“LKEŸ6FêÙ]/¨Rÿba³·ŸÎµªIã…P£dIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Taiwan.png0000644000000000000000000000013114152153376025277 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.573580076 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Taiwan.png0000644000175000002070000000062014152153376025263 0ustar00mockbuildmock00000000000000‰PNG  IHDR 2ÜIËWIDATÓÁ1nQ†Ñï¿ï={‚b#Q Q¥ ƒ††ýP’- ö€”4Hl…¤BQ$ :$Pšñ8ŒgæÝË9zþâs÷·ø”Nϲ…’F[þùÑ(ÉN8s Öùð˺¶=?/o/^þåêf·NÌ¢_d+áš<°Ò8¹ÌyÍÉëWÏ.Þ½Áùöµû½ûéþo)[„‘BH ËCCˆëÛîãåwE½¾Ý·Ô’-Ïžkˆ P@T¢æ§v„»»›ûOï{¢톱±Fx„‚PîÉ]ýÙ“îp¿Èø°º\î#)´Å”Œ˜„;^J^­óŠU¡e¬¦Ná6r@ã ɉ @bˆž©Ç…¶£·y1áÔY)=žëÞ’ÀˆÙ2y]‘1•OÂk—-îÕ”äVÐtte¶kSM„ª#„“ ZÁ,¡Š³YÿèY¹êVùÉwIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Norway.png0000644000000000000000000000013114152153376025333 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.564579941 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Norway.png0000644000175000002070000000054414152153376025324 0ustar00mockbuildmock00000000000000‰PNG  IHDR 2ÜIËbKGDÿÿÿ ½§“ pHYs  ÒÝ~ütIMEÓ3~¡NŽñIDATxÚ•Î?JA†ñg¾Élt‰ ÑÖÖBì<€·ðÞÀ4‚…`eá <lõ¢  Ùà&›ÌÎ÷yi|øõï‹:ŒAÝ#Þ=@B¬‡÷æQò:Î÷ÚvU6¢ÕR¨6Ùêê7ç<¹¤êþFIc¯lÌú \PMÁ,e9{y#£ÉþÑÞé1K]Œ¾ÂÇ…Uv’5Ìí—-Ô¶Ý™âbAÊ"0'$X”bÐ P¤…”ÕYß<¥õØï¾ÛÉÅÎù™S7yþ”×GUÍ_š{LˆÐ^]1hëÛËäQ—'½äA|¡W6‡N=F–ðÏþ•œ*Tö²ÿIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Canada.png0000644000000000000000000000013014152153376025222 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.514647612 29 ctime=1638455100.54657967 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Canada.png0000644000175000002070000000075114152153376025214 0ustar00mockbuildmock00000000000000‰PNG  IHDR 2ÜIËbKGDÿÿÿ ½§“ pHYs  ÒÝ~ütIMEÓ%ŠgvIDATxÚÁÁŠAÐûªª“i˜0 ˆàB7â*£?àWù þ‰+7âÖ•.Dc&Ý©TW½z×s@‘ìлóòíûUú;nâfÜG#û¾gäŸØŸÏè`€Á#yʼ.–ì÷4µúö%ß¼ÎL™7©t] Ã €¡‰ý,.Ïx}`¤þþÉå­ýéŠ_¿0‘…yÕUÀ†i0‚+U[ʦ¼ûØ|þ€ü¯ Ä«ùü../šÆa¤š‡ÒeLx³ãî/ÖE}S¼ºÀûkÛ]ñöÊÐ*Ü6ÀUƒqlœÖ2š[ä{w¨«ÕLqùPÛÄ  TLÀűfrøÁGO´#¼®Ÿ2nã0²;S †iØJ]Ð;úÔN›ØÛñÜ=6{ÿ ÚãÁãÔ¶m!AÐ"”¨0 ÂñôD¯÷$YÍâ Ûm¥‘äØ-‹÷($8€FZ))%?q"ªBj)¦U£Ï™fAD(ÿwéyÜã×IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Ukraine.png0000644000000000000000000000013214152153376025453 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.577580136 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Ukraine.png0000644000175000002070000000024114152153376025435 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe< PLTEÿÿ3ÿÿ™Ìÿ™ÌÌ:Ý7ÉIDATxÚb`B d0£ ÀˆÈ0^²ñöqlIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Belgium.png0000644000000000000000000000013114152153376025440 xustar000000000000000030 mtime=1638455038.441646515 30 atime=1638455038.514647612 29 ctime=1638455100.54457964 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Belgium.png0000644000175000002070000000025414152153376025427 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<PLTEÿÿ3ÿÿÌ™Ì3™™D¾ IDATxÚb`F&f`€0Á`@8 À§ï'ú÷ÞIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/icon-print.png0000644000000000000000000000013214152153376026137 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.583580226 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/icon-print.png0000644000175000002070000000053614152153376026130 0ustar00mockbuildmock00000000000000‰PNG  IHDRשÍÊgAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<PLTE€€€   àààpppÐÐÐÀÀÀ@@@ÿÿÿé" > tRNSÿÿÿÿÿÿÿÿSOx´IDATxÚbàÀˆ—@á” ˜;@§@!$Pi€B’`d``EHN €B’`fccDH’ ++B €pZ@8%ˆî$ÿ°„S € ÚÙÙYÁÈfKB$ €‚I°ÁT €] €Â)@ (A €Â)@8£ €pJN €Ò á€#µlIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/icon_bar.php0000644000000000000000000000013214152153376025634 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.588580301 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/icon_bar.php0000644000175000002070000000241414152153376025622 0ustar00mockbuildmock00000000000000 9) {$x3 = $x1-16;} if (strlen($text) > 5) $x3 = 84; if (strlen($text) > 10) $x3 = 36; $im = @imagecreate(200,$y); $bgcolor = imagecolorallocate($im,204,204,204); $red = imagecolorallocate($im,97,144,0); $grey = imagecolorallocate($im,176,176,176); // $white = imagecolorallocate($im,255,255,255); $white = imagecolorallocate($im,48,48,48); if ( $x1 < $x3 ) $white = imagecolorallocate($im,82,82,82); if ( $x1 ) imagefilledrectangle($im,0,0,$x1,$y,$grey); if ( $xg1 ) imagefilledrectangle($im,0,0,$xg1,$y,$red); imagestring ($im, 3, $x3, 0, $text, $white); imagepng ($im); ImageDestroy($im); } $x = $_GET["x"]; $xg = $_GET["xg"]; $y = $_GET["y"]; $text = $_GET["text"]; do_bar($x,$xg,$y,$text); ?>nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/icon_start.php0000644000000000000000000000013214152153376026225 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.590580331 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/icon_start.php0000644000175000002070000000071614152153376026216 0ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/UK.png0000644000000000000000000000013114152153376024373 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.575580106 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/UK.png0000644000175000002070000000036414152153376024364 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<PLTEÿÌÌÿ™™ÌÌÌÌffÌ3fÌ33Ì3™™Ìf™Ì3f™;Á“\IDATxÚ<Ž À0Bk­÷¿ðÞ²¥&"Š?…|fÎZ v†Þ Ètk&Sn Èž‘±V „«öÞ¯ÿXú’kÅx›nF6|ÆÊ~—“¥Ç‘ÿÙTÈ÷1šã`úŠôœ?ž IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Australia.png0000644000000000000000000000013114152153376026001 xustar000000000000000030 mtime=1638455038.441646515 30 atime=1638455038.514647612 29 ctime=1638455100.54257961 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Australia.png0000644000175000002070000000054214152153376025770 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<`PLTElƒº¬ÑGd¨Sn®_x´ÚàíÆBx¿Úo…â ·Ÿ¤ê¯»Ê/O”Œ¶¦_n;Z£,CzòôùÎÕè„—ÅæêóµÁÜÎ?]ößäÖ_xúïñÂ4Þ“î¿É柮©¶ÖгîˆIDATxÚb‘gáãc’áb⇀b`à”唓g²ÙE€@1ððð°± ›‰‰™™ €ع9%9d9 êÙùˆ‹—‰™‹—Ÿ¨h@1@Íbâgb H@D„$ÀÊ a³€8¶Žæ @ üh À¶%ªóƒIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/China.png0000644000000000000000000000013114152153376025076 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.514647612 30 ctime=1638455100.547579685 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/China.png0000644000175000002070000000043714152153376025070 0ustar00mockbuildmock00000000000000‰PNG  IHDR ½¾ÞœbKGDÿÿÿ ½§“ pHYs  šœtIMEÖ .濬IDAT(ÏÅÏ-Â@„áw7-¦=YA QׄSÎÄGè!¨ AÔWB@ p 4”ì‡Àaؤ‚ÇÏdFåD Z‚<ISÌJ°ü¤rIoX¤†ÛúÀ}ë£Û‚­.4AjèÏ7©[)’,v*P9‘t5áÄp]©öhðBËë¢Ïyyú„,Œ#Ç _’,Fw„bZº_ MCÿ/xñ14aú„ÙIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/icon-disk.png0000644000000000000000000000013214152153376025735 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.580580181 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/icon-disk.png0000644000175000002070000000070114152153376025720 0ustar00mockbuildmock00000000000000‰PNG  IHDRשÍÊgAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<NPLTEÉmžª1 Õ„ ôÀ-°=âœ%ÏxÃaÒ–“ž¶IòâáæÃÁè¨(½U²JDÙ¥¢¿id¥,%ùñð¸ZT˜̇ƒûÌ0ÿÿÿ·çtRNSÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ"ÚÓIDATxÚbĈ—@á” œ„S €pJTB„STX „E9E "‘àadeæ“>fVF°@A$Ĺ%à€[,@P 1&v6 6v&1ˆ@A%˜ùY¹ÄĸXù™ “@• œ„S €pJT‚!Á‘ ˆ/#¹Œ¼`!€‚…• (H„¡„3§@á” œ„S €pJh3MfŸ}IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Turkey.png0000644000000000000000000000013114152153376025337 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.515647627 30 ctime=1638455100.574580091 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Turkey.png0000644000175000002070000000035214152153376025325 0ustar00mockbuildmock00000000000000‰PNG  IHDR 2ÜIË pHYs  šœœIDATxœcÜÇ Î@ `"I5 2‡ÇPS4Ðõ׋7¯×ïþõü5Düõ6Íx»õÀÓiËþÿúMØIòÕzæ~:y‰á÷ÛB®VBî6ø40±²üÿóI†‰‘ ‹™ãD ¬?)Tg¾Ûqø÷›÷,¼_¯ßý~û!> _.Üøvý®dR0—ºÂ¯'/¿ý€ÕŒ4’<´NjwIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/icon-look.png0000644000000000000000000000013214152153376025747 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.582580211 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/icon-look.png0000644000175000002070000000073014152153376025734 0ustar00mockbuildmock00000000000000‰PNG  IHDRשÍÊgAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<BPLTEß´²ôÀ-¸ZTÅxs²JDÉmìÓÑ̇ƒæÃÁùñðÃa¥,%î´*°=¤% Ò–“½Uª1 ¿idûÌ0˜ÿÿÿ°˜Ý”tRNSÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÒÀäúIDATxÚbň—@¡I°±³³³Y„"Á)$Lœ@6@¡H0‰ðóp‰0Ù„,Á!Â% \"¢¢„,Á.–`a d >ad d fd„,Á  –`ŠŠ«¸EX…YE€&‰²„·ˆ¯ˆ È„``e`fa†B9Ø_p@ q~d €b@ˆ ¡'@1à â¢Ä€C\ €€lØÄE"). @ £8°E<@áL „S ÀËô+EØi IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Iceland.png0000644000000000000000000000013114152153376025413 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.514647612 30 ctime=1638455100.557579836 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Iceland.png0000644000175000002070000000021114152153376025373 0ustar00mockbuildmock00000000000000‰PNG  IHDR ½¾ÞœPIDAT(‘c´ŽZüŸ YðŸQƒÁñÿ ›è% ø^Y"Å0þÿÿÿ?º ²°AÃRÀÀ{=šŽ,Aáþhå a [sáIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/Estonia.png0000644000000000000000000000013114152153376025456 xustar000000000000000029 mtime=1638455038.44264653 30 atime=1638455038.514647612 30 ctime=1638455100.550579731 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/Estonia.png0000644000175000002070000000034414152153376025445 0ustar00mockbuildmock00000000000000‰PNG  IHDR Š`.®gAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<PLTE=]¨;[¤;Z£;[¥;Z¤=]©@@@*ÿÿÿøÉ‹LIDATxÚb`bbALLÄÀÈÈŒˆ 00; 4@ÄÀ†ˆ †@a3¥WhÒIEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/mon-icons/PaxHeaders.30264/icon-vo.png0000644000000000000000000000013214152153376025427 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.586580271 nordugrid-arc-6.14.0/src/services/monitor/mon-icons/icon-vo.png0000644000175000002070000000105514152153376025415 0ustar00mockbuildmock00000000000000‰PNG  IHDRשÍÊgAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<]PLTEª1 ôÀ-ž¤% ²JDùñð¥,%̇ƒ˜òâáÜ"žÉmÒ–“ÃaÏxî´*Ù¥¢¸ZT«;5Õ„ ß´²¶I½U¿idìÓÑâœ%æÃÁ°=ûÌ0ÿÿÿè¶4tRNSÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÍv+IDATxÚbÈ—@!$8¥€Î „ ƒŒ ³œ @p AfYYYF&i €`¬l\@ Y˜@Á$ØedÁ€Y‚¤ €`¢ÌŒ q>q6 @Áí`áIˆ‰ƒHv99€‚KHrpII‰0õIqsʹl çŠAlbà• tçJÄE€–ܹÜ`!°´¨œ@$xYXX„aÎÈò @%8AÖò3A bsƒ €€B|`'"“@烜4’Z›S €!Œd“ @1ÀÜÃφpˆ @@;xŽ`ÔÏ BÿƒÙÄ€©p6@áL „S À3B7;ê\Y¹IEND®B`‚nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/discover.php0000644000000000000000000000013214152153376023774 xustar000000000000000030 mtime=1638455038.437646455 30 atime=1638455038.512647582 30 ctime=1638455100.493578874 nordugrid-arc-6.14.0/src/services/monitor/discover.php0000644000175000002070000001100614152153376023757 0ustar00mockbuildmock00000000000000title); $strings = &$toppage->strings; $giislist = &$toppage->giislist; $isattr = &$toppage->isattr; $errors = &$toppage->errors; require_once('attlist.inc'); $itself = $_SERVER["PHP_SELF"]; $ifsub = $_POST["submit"] ? TRUE : FALSE ; $ifsel = $_POST["select"] ? TRUE : FALSE ; echo "
    \n"; if ( $ifsub ) { // Call attributes list function for all selected arguments $request = $_POST; $attributes = array (); $signs = array (); $filters = array (); $attributes = $request["attributes"]; $signs = $request["signs"]; $filters = $request["filters"]; $thething = $request["scope"]; if ( $thething == "job" || $thething == "queue" || $thething == "authuser" ) $thething = "cluster"; // $attwin = popup("attlist.php?attribute=$encatt",650,300,7,$lang,$debug); do_attlist($thething,$attributes,$signs,$filters,$strings,$giislist); echo "
     "; echo " \n
    "; } elseif ( $ifsel ) { // If selection of search object and nr. of attributres is made, display options: $scope = $_POST; $object = $scope["object"]; $nlines = $scope["nlines"]; if ( !$nlines ) $nlines = 6; echo "

    ".$errors["416"].$object."

    \n"; echo "
    ".$errors["417"]."
    \n"; echo "
    ".$errors["418"]."


    \n"; $attwin = popup($itself,650,300,7,$lang,$debug); echo "
    "; echo "\n"; $rcol = "#ccffff"; for ( $i = 0; $i < $nlines; $i++ ) { echo "\n"; echo "\n"; echo "\n"; } echo "\n"; echo "
    "; echo "

      
    \n"; echo " \n"; } else { echo "

    ".$errors["419"]."

    \n"; echo "
    "; echo "

    ".$errors["423"]." \n"; echo "  ".$errors["424"]." \n"; echo "  \n"; echo "

    \n"; } echo "
    \n"; $toppage->close(); ?> nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/vo-users.php0000644000000000000000000000013214152153376023741 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.499578964 nordugrid-arc-6.14.0/src/services/monitor/vo-users.php0000644000175000002070000000776714152153376023747 0ustar00mockbuildmock00000000000000title; $module = &$toppage->module; $strings = &$toppage->strings; $errors = &$toppage->errors; // Header table $toppage->tabletop("","".$toptitle.""); // ldap search filter string for jobs $ufilter = "(objectclass=".OBJ_PERS.")"; $ulim = array ( "dn", VO_USSN, VO_USCN, VO_DESC, VO_INST, VO_MAIL ); if ( $debug ) { ob_end_flush(); ob_implicit_flush(); } $tlim = 10; $tout = 15; if( $debug ) dbgmsg("
    :::> ".$errors["114"].$tlim.$errors["102"].$tout.$errors["103"]." <:::

    "); // Establish connection to the requested VO server if( $debug ) dbgmsg($errors["117"].$host.":".$port); $ldapuri = "ldap://".$host.":".$port; $ds = ldap_connect($ldapuri); if ($ds) { // If contact OK, search for people $ts1 = time(); $sr = @ldap_search($ds,$vo,$ufilter,$ulim,0,0,$tlim,LDAP_DEREF_NEVER,$tout); $ts2 = time(); if($debug) dbgmsg("
    ".$errors["125"]." (".($ts2-$ts1).$errors["104"].")
    "); if ($sr) { // If search returned, check that there are valid entries $nmatch = @ldap_count_entries($ds,$sr); if ($nmatch > 0) { // If there are valid entries, tabulate results $entries = @ldap_get_entries($ds,$sr); $nusers = $entries["count"]; define("CMPKEY",VO_USSN); usort($entries,"ldap_entry_comp"); // HTML table initialization $utable = new LmTable($module,$toppage->$module); // loop on users $uscnt = 0; for ($i=1; $i<$nusers+1; $i++) { $dn = $entries[$i]["dn"]; if ( $group ) { $newfilter = "(member=$dn)"; $newdn = $group.",".$vo; $newlim = array("dn"); $gcheck = @ldap_search($ds,$newdn,$newfilter,$newlim,0,0,$tlim,LDAP_DEREF_NEVER,$tout); if ( !ldap_count_entries($ds,$gcheck) ) continue; } $usname = $entries[$i][VO_USCN][0]; // $usname = utf2cyr($usname,"n"); // $ussn = strstr($entries[$i][VO_DESC][0],"/"); $ussn = substr(strstr($entries[$i][VO_DESC][0],"subject="),8); $ussn = trim($ussn); $encuname = rawurlencode($ussn); $org = $entries[$i][VO_INST][0]; // $org = utf8_decode($org); $mail = $entries[$i][VO_MAIL][0]; $mailstr = "mailto:".$mail; $usrwin = popup("userlist.php?owner=$encuname",700,500,5,$lang,$debug); // filling the table $uscnt++; $urowcont[] = $uscnt; $urowcont[] = "$usname"; $urowcont[] = "$org"; $urowcont[] = "$mail"; $utable->addrow($urowcont); $urowcont = array (); } $utable->close(); } else { $errno = 10; echo "
    ".$errors[$errno]."\n"; return $errno; } } else { $errno = 5; echo "
    ".$errors[$errno]."\n"; return $errno; } ldap_free_result($sr); ldap_close($ds); return 0; } else { $errno = 6; echo "
    ".$errors[$errno]."\n"; return $errno; } // Done $toppage->close(); ?> nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/loadmon.php0000644000000000000000000000013214152153376023607 xustar000000000000000030 mtime=1638455038.441646515 30 atime=1638455038.514647612 30 ctime=1638455100.495578904 nordugrid-arc-6.14.0/src/services/monitor/loadmon.php0000644000175000002070000006050614152153376023603 0ustar00mockbuildmock00000000000000module; $strings = &$toppage->strings; $errors = &$toppage->errors; $giislist = &$toppage->giislist; $emirslist= &$toppage->emirslist; $cert = &$toppage->cert; $yazyk = &$toppage->language; $archery_list = &$toppage->archery_list; // Header table $toptit = date("Y-m-d T H:i:s"); $toppage->tabletop("".EXTRA_TITLE." ".$toppage->title."

    ","$toptit"); //********************* Schema changing ****************************** $other_schema = "GLUE2"; if ( $schema == "GLUE2" ) $other_schema = "NG"; $_GET["schema"] = $other_schema; $get_options = ""; $keys = array_keys($_GET); foreach ($_GET as $key => $value) { if ( $key == $keys[0] ) { $get_options = "?"; } else { $get_options = $get_options."&"; } $get_options = $get_options."$key=$value"; } //TODO: use translate messages echo "Current data rendered according to $schema schema.
    "; echo "Schema switching to: $other_schema

    "; //********************** Legend - only needed for this module ********************* echo "
    \n"; echo "".$errors["401"].":\n"; echo "\"".$errors["305"]."\"".$errors["402"]." \"".$errors["306"]."\"".$errors["403"]."\n"; echo ""; $sewin = popup("sestat.php",650,200,8,$lang,$debug); $discwin = popup("discover.php",700,400,9,$lang,$debug); $vostring = popup("volist.php",440,330,11,$lang,$debug); $usstring = popup("allusers.php",650,700,12,$lang,$debug); $acstring = popup("allusers.php?limit=1",500,600,12,$lang,$debug); echo "
    \n"; //******** Authorised users echo "\"".$errors["307"]."\" \n"; //******** Active users echo "\"".$errors["308"]."\" \n"; //******** Search echo "\"".$errors["309"]."\" \n"; //******** Storage echo "\"".$errors["310"]."\" \n"; //******** Virtual Organisations echo "\"".$errors["311"]."\"\n"; echo "
    \n"; echo "
    \n"; //****************************** End of legend **************************************** // Some debug output if ( $debug ) { ob_end_flush(); ob_implicit_flush(); dbgmsg("
    ARC ".$toppage->getVersion()."
    "); } $tcont = array(); // array with rows, to be sorted $cachefile = CACHE_LOCATION."/loadmon-$schema-".$yazyk; $tcont = get_from_cache($cachefile,120); // If cache exists, skip ldapsearch if ( !$tcont || $debug || $display != "all" ) { // Do LDAP search $tcont = array(); // Setting time limits for ldapsearch $tlim = 10; $tout = 11; if($debug) dbgmsg("
    :::> ".$errors["101"].$tlim.$errors["102"].$tout.$errors["103"]." <:::
    "); // ldapsearch filter string for clusters and queues $filter="(|(objectClass=".OBJ_CLUS.")(objectClass=".OBJ_QUEU.")(objectclass=".GOBJ_CLUS.")(objectclass=".GOBJ_QUEU.")(objectClass=".GOBJ_MAN.")(objectClass=".GOBJ_LOC."))"; // Array defining the attributes to be returned $lim = array( "dn", GCLU_ANAM, GCLU_ZIPC, GCLU_TCPU, GCLU_UCPU, GCLU_TJOB, GCLU_QJOB, GCLU_PQUE, GQUE_STAT, GQUE_GQUE, GQUE_QUED, GQUE_LQUE, GQUE_PQUE, GQUE_RUNG, GQUE_GRUN, CLU_ANAM, CLU_ZIPC, CLU_TCPU, CLU_UCPU, CLU_TJOB, CLU_QJOB, CLU_PQUE, QUE_STAT, QUE_GQUE, QUE_QUED, QUE_LQUE, QUE_PQUE, QUE_RUNG, QUE_GRUN ); // Adjusting cluster display filter $showvo = ""; if ( substr($display,0,2) == "vo" ) { $showvo = substr(strrchr($display,"="),1); if ($debug) dbgmsg(" ::: ".$errors["105"]."$showvo"); } if ( $display != "all" && !$showvo ) $filter = "(&".$filstr."(".$display."))"; //========================= GET CLUSTER LIST ============================ $gentries = array(); // EGIIS if ( ! empty($giislist) ) { $ngiis = count($giislist); $ts1 = time(); $gentries = recursive_giis_info($giislist,"cluster",$errors,$debug,1); $ts2 = time(); if($debug) dbgmsg("
    ".$errors["106"].$ngiis." (".($ts2-$ts1).$errors["104"].")
    "); } // EMIR if ( ! empty($emirslist)) $gentries = emirs_info($emirslist,"cluster",$errors,$gentries,$debug,$cert); // ARCHERY if ( ! empty($archery_list) ) $gentries = array_merge($gentries, archery_info($archery_list, $schema, $errors, $debug)); //======================================================================= $nc = count($gentries); if ( !$nc ) { // NO SITES FOUND! $errno = "1"; echo "
    ".$errors[$errno]."\n"; return $errno; } else { if ( $debug == 2 ) { dbgmsg("

    ".$errors["119"]."cluster: ".$nc."
    "); foreach ( $gentries as $num=>$val ) dbgmsg($val["host"].":".$val["base"]."
    "); } } $dsarray = array (); $hnarray = array (); $pnarray = array (); $dnarray = array (); $sitetag = array (); /* a tag to skip duplicated entries */ // Purging cluster entries for ( $k = 0; $k < $nc; $k++ ) { $clhost = $gentries[$k]["host"]; $clport = $gentries[$k]["port"]; $basedn = $gentries[$k]["base"]; $fp = @fsockopen($clhost, $clport, $errno, $errstr, 2); $ldapuri = "ldap://".$clhost.":".$clport; $clconn = ldap_connect($ldapuri); if ( $fp && $clconn && !@$sitetag[$clhost] ) { fclose($fp); array_push($dsarray,$clconn); array_push($hnarray,$clhost); array_push($pnarray,$clport); array_push($dnarray,$basedn); @ldap_set_option($clconn, LDAP_OPT_NETWORK_TIMEOUT, $tout); $sitetag[$clhost] = 1; /* filtering tag */ if ($debug==2) dbgmsg("$k - $clhost:$clport "); } elseif ( $fp && $clconn && @$sitetag[$clhost] ) { fclose($fp); if ( $schema == "GLUE2"){ // Add only the base option $index = array_keys($hnarray, $clhost); $dnarray[$index[0]] = DN_GLUE; } elseif ( $schema == "NG"){ // Add only the base option $index = array_keys($hnarray, $clhost); $dnarray[$index[0]] = DN_LOCAL; } else { array_push($dsarray,$clconn); array_push($hnarray,$clhost); array_push($pnarray,$clport); array_push($dnarray,$basedn); } } } $nhosts = count($dsarray); if( $debug == 2 ) dbgmsg("
    ".$nhosts.$errors["108"]."
    "); if ( !$nhosts ) { // NO SITES REPLY... $errno = "2"; echo "
    ".$errors[$errno]."\n"; return $errno; } // Search all clusters and queues $ts1 = time(); $srarray = @ldap_search($dsarray,$dnarray,$filter,$lim,0,0,$tlim,LDAP_DEREF_NEVER); // If using the patched LDAP //$srarray = @ldap_search($dsarray,DN_LOCAL,$filter,$lim,0,0,$tlim,LDAP_DEREF_NEVER,$tout); $ts2 = time(); if($debug) dbgmsg("
    ".$errors["109"]." (".($ts2-$ts1).$errors["104"].")
    "); /* * $ts1 = time(); * $qsarray = @ldap_search($dsarray,DN_LOCAL,$qfilstr,$qlim,0,0,$tlim,LDAP_DEREF_NEVER); * // Fall back to a conventional LDAP * // if ( !count($qsrarray)) $qsarray = @ldap_search($dsarray,DN_LOCAL,$qfilstr,$qlim,0,0,$tlim,LDAP_DEREF_NEVER); * $ts2 = time(); if($debug) dbgmsg("
    ".$errors["110"]." (".($ts2-$ts1).$errors["104"].")
    "); */ // Loop on clusters for ( $ids = 0; $ids < $nhosts; $ids++ ) { $entries = array(); $jentries = array(); $gentries = array(); $rowcont = array(); $sr = $srarray[$ids]; $hn = $hnarray[$ids]; $pn = $pnarray[$ids]; $ds = $dsarray[$ids]; $nr = @ldap_count_entries($ds,$sr); if ( !$sr || !$ds || !$nr ) { $error = ldap_error($ds); if ( $error == "Success" ) $error = $errors["3"]; if ( $debug ) dbgmsg("".$errors["111"]."$hn ($error)
    "); $sr = FALSE; } if ($ds && $sr) { $entries = @ldap_get_entries($ds,$sr); $nclusters = $entries["count"]; /* Actually, cluster and queue blocks, 2+ */ if ( !$nclusters ) { if ( $debug ) dbgmsg("$hn:".$errors["3"]."
    "); continue; } $nclu = 0; $nqueues = 0; $allqrun = 0; $gridjobs = 0; $allqueued = 0; $gridqueued = 0; $lrmsqueued = 0; $prequeued = 0; $totgridq = 0; $toflag2 = FALSE; $stopflag = FALSE; for ($i=0; $i<$nclusters; $i++) { $curdn = $entries[$i]["dn"]; $preflength = strrpos($curdn,","); $basedn = substr($curdn,$preflength+1); $allbasedn = strtolower(substr($curdn,$preflength-17)); if ($basedn == DN_GLUE) { // check if it is a site or a job; count $preflength = strpos($curdn,":"); $preflength = strpos($curdn,":",$preflength+1); $object = substr($curdn,$preflength+1,strpos($curdn,":",$preflength+1)-$preflength-1); if ($object=="ComputingService") { $dnparts = ldap_explode_dn($curdn,0); $endpointArray=explode(":",$dnparts[0]); $curname = $endpointArray[3]; $curport = $pn; $curalias = $entries[$i][GCLU_ANAM][0]; // Manipulate alias: replace the string if necessary and cut off at 22 characters; strip HTML tags if (file_exists("cnvalias.inc")) include('cnvalias.inc'); $curalias = strip_tags($curalias); if ( strlen($curalias) > 22 ) $curalias = substr($curalias,0,21) . ">"; $totqueued = @($entries[$i][GCLU_QJOB][0]) ? $entries[$i][GCLU_QJOB][0] : 0; /* deprecated since 0.5.38 */ $gmqueued = @($entries[$i][GCLU_PQUE][0]) ? $entries[$i][GCLU_PQUE][0] : 0; /* new since 0.5.38 */ $clstring = popup("clusdes.php?host=$curname&port=$curport&schema=$schema",700,620,1,$lang,$debug); $nclu++; } elseif ($object=="ComputingManager") { $curtotcpu = @($entries[$i][GCLU_TCPU][0]) ? $entries[$i][GCLU_TCPU][0] : 0; if ( !$curtotcpu && $debug ) dbgmsg("$curname".$errors["113"]."
    "); $curtotjobs = @($entries[$i][GCLU_TJOB][0]) ? $entries[$i][GCLU_TJOB][0] : 0; $curusedcpu = @($entries[$i][GCLU_UCPU][0]) ? $entries[$i][GCLU_UCPU][0] : -1; } elseif ($object=="ComputingShare") { $qstatus = $entries[$i][GQUE_STAT][0]; if ( $qstatus != "production" ) $stopflag = TRUE; $allqrun += @($entries[$i][GQUE_RUNG][0]) ? ($entries[$i][GQUE_RUNG][0]) : 0; $gridjobs += @($entries[$i][GQUE_GRUN][0]) ? ($entries[$i][GQUE_GRUN][0]) : 0; $gridqueued += @($entries[$i][GQUE_GQUE][0]) ? ($entries[$i][GQUE_GQUE][0]) : 0; $allqueued += @($entries[$i][GQUE_QUED][0]) ? ($entries[$i][GQUE_QUED][0]) : 0; /* deprecated since 0.5.38 */ $lrmsqueued += @($entries[$i][GQUE_LQUE][0]) ? ($entries[$i][GQUE_LQUE][0]) : 0; /* new since 0.5.38 */ $prequeued += @($entries[$i][GQUE_PQUE][0]) ? ($entries[$i][GQUE_PQUE][0]) : 0; /* new since 0.5.38 */ $nqueues++; } elseif ($object=="Location") { $dnparts = ldap_explode_dn($curdn,0); $endpointArray=explode(":",$dnparts[0]); $curname = $endpointArray[3]; $curport = $pn; // Country name massaging $vo = guess_country($curname,$entries[$i][GCLU_ZIPC][0]); if ($debug==2) dbgmsg("$ids: $curname".$errors["112"]."$vo
    "); $vostring = $_SERVER['PHP_SELF']."?display=vo=$vo"; if ( $lang != "default") $vostring .= "&lang=".$lang; if ( $debug ) $vostring .= "&debug=".$debug; $country = $vo; if ( $yazyk !== "en" ) $country = $strings["tlconvert"][$vo]; $country_content = "\"".$errors["312"]."\" ".$country." "; if (!in_array($country_content,$rowcont)){ $rowcont[] = $country_content; } } } elseif ($allbasedn == DN_LOCAL) { // check if it is a site or a job; count $preflength = strpos($curdn,"-"); $object = substr($curdn,$preflength+1,strpos($curdn,"-",$preflength+1)-$preflength-1); if ($object=="cluster") { $dnparts = ldap_explode_dn($curdn,0); $curname = substr(strstr($dnparts[0],"="),1); $curport = $pn; // Country name massaging $zip = ""; if ( !empty($entries[$i][CLU_ZIPC][0]) ) $zip = $entries[$i][CLU_ZIPC][0]; $vo = guess_country($curname,$zip); if ($debug==2) dbgmsg("$ids: $curname".$errors["112"]."$vo
    "); $vostring = $_SERVER['PHP_SELF']."?display=vo=$vo"; if ( $lang != "default") $vostring .= "&lang=".$lang; if ( $debug ) $vostring .= "&debug=".$debug; $country = $vo; if ( $yazyk !== "en" ) $country = $strings["tlconvert"][$vo]; $rowcont[] = "\"".$errors["312"]."\" ".$country." "; $curtotcpu = @($entries[$i][CLU_TCPU][0]) ? $entries[$i][CLU_TCPU][0] : 0; if ( !$curtotcpu && $debug ) dbgmsg("$curname".$errors["113"]."
    "); $curalias = $entries[$i][CLU_ANAM][0]; // Manipulate alias: replace the string if necessary and cut off at 22 characters; strip HTML tags if (file_exists("cnvalias.inc")) include('cnvalias.inc'); $curalias = strip_tags($curalias); if ( strlen($curalias) > 22 ) $curalias = substr($curalias,0,21) . ">"; $curtotjobs = @($entries[$i][CLU_TJOB][0]) ? $entries[$i][CLU_TJOB][0] : 0; $curusedcpu = @($entries[$i][CLU_UCPU][0]) ? $entries[$i][CLU_UCPU][0] : -1; $totqueued = @($entries[$i][CLU_QJOB][0]) ? $entries[$i][CLU_QJOB][0] : 0; /* deprecated since 0.5.38 */ $gmqueued = @($entries[$i][CLU_PQUE][0]) ? $entries[$i][CLU_PQUE][0] : 0; /* new since 0.5.38 */ $clstring = popup("clusdes.php?host=$curname&port=$curport",700,620,1,$lang,$debug); $nclu++; } elseif ($object=="queue") { $qstatus = $entries[$i][QUE_STAT][0]; if ( $qstatus != "active" ) $stopflag = TRUE; $allqrun += @($entries[$i][QUE_RUNG][0]) ? ($entries[$i][QUE_RUNG][0]) : 0; $gridjobs += @($entries[$i][QUE_GRUN][0]) ? ($entries[$i][QUE_GRUN][0]) : 0; $gridqueued += @($entries[$i][QUE_GQUE][0]) ? ($entries[$i][QUE_GQUE][0]) : 0; $allqueued += @($entries[$i][QUE_QUED][0]) ? ($entries[$i][QUE_QUED][0]) : 0; /* deprecated since 0.5.38 */ $lrmsqueued += @($entries[$i][QUE_LQUE][0]) ? ($entries[$i][QUE_LQUE][0]) : 0; /* new since 0.5.38 */ $prequeued += @($entries[$i][QUE_PQUE][0]) ? ($entries[$i][QUE_PQUE][0]) : 0; /* new since 0.5.38 */ $nqueues++; } } } if ( !$nclu && $nqueues ) { if ( $debug ) dbgmsg("$hn:".$errors["3"].": ".$errors["111"].$errors["410"]."
    "); continue; } if ( $nclu > 1 && $debug ) dbgmsg("$hn:".$errors["3"].": $nclu ".$errors["406"]."
    "); if (!$nqueues) $toflag2 = TRUE; if ($debug==2 && $prequeued != $gmqueued) dbgmsg("$curname: cluster-prelrmsqueued != sum(queue-prelrmsqueued)"); $allrun = ($curusedcpu < 0) ? $allqrun : $curusedcpu; if ($gridjobs > $allrun) $gridjobs = $allrun; /* For versions < 0.5.38: * Some Grid jobs are counted towards $totqueued and not towards $allqueued * (those in GM), so $totqueued - $allqueued = $gmqueued, * and $truegridq = $gmqueued + $gridqueued * and $nongridq = $totqueued - $truegridq == $allqueued - $gridqueued * hence $truegridq = $totqueued - $nongridq */ $nongridq = ($totqueued) ? $allqueued - $gridqueued : $lrmsqueued; $truegridq = ($totqueued) ? $totqueued - $nongridq : $gridqueued + $prequeued; // temporary hack: // $truegridq = $gridqueued; // $formtgq = sprintf(" s",$truegridq); $formngq = sprintf("\ \;s",$nongridq); $localrun = $allrun - $gridjobs; $gridload = ($curtotcpu > 0) ? $gridjobs/$curtotcpu : 0; $clusload = ($curtotcpu > 0) ? $allrun/$curtotcpu : 0; $tstring = urlencode("$gridjobs+$localrun"); $jrstring = popup("jobstat.php?host=$curname&port=$curport&status=Running&jobdn=all",600,500,2,$lang,$debug); $jqstring = popup("jobstat.php?host=$curname&port=$curport&status=Queueing&jobdn=all",600,500,2,$lang,$debug); if ( $schema == "GLUE2"){ $jrstring = popup("jobstat.php?host=$curname&port=$curport&status=Running&jobdn=all&schema=$schema",600,500,2,$lang,$debug); $jqstring = popup("jobstat.php?host=$curname&port=$curport&status=Queueing&jobdn=all&schema=$schema",600,500,2,$lang,$debug); } if ( $toflag2 ) { $tstring .= " (no queue info)"; // not sure if this is localizeable at all } elseif ( $stopflag ) { $tstring .= " (queue inactive)"; // not sure if this is localizeable at all } // Add a cluster row $rowcont[] = " $curalias"; $rowcont[] = "$curtotcpu"; if ( $curtotcpu ) { $rowcont[] = "\"$gridjobs+$localrun\""; } else { $rowcont[] = "\"$gridjobs+$localrun\""; } // $rowcont[] = "$totqueued"; $rowcont[] = "$truegridq+$nongridq"; // Not adding anymore, cache instead // $ctable->addrow($rowcont); $tcont[] = $rowcont; $rowcont = array (); } } // Dump the collected table cache_table($cachefile,$tcont); } // HTML table initialization $ctable = new LmTableSp($module,$toppage->$module); // Sort /** possible ordering keywords: * country - sort by country, default * cpu - sort by advertised CPU number * grun - sort by number of running Grid jobs */ $ostring = "comp_by_".$order; usort($tcont,$ostring); $nrows = count($tcont); $votolink = array(); $affiliation = array(); foreach ( $tcont as $trow ) { $vo = $trow[0]; $vo = substr(stristr($vo,"./mon-icons/"),12); $vo = substr($vo,0,strpos($vo,".")); if ( !in_array($vo,$votolink) ) $votolink[]=$vo; array_push($affiliation,$vo); } $affcnt = array_count_values($affiliation); $prevvo = "boo"; $sumcpu = 0; $sumgridjobs = 0; $sumlocljobs = 0; $sumclusters = 0; $sumgridqueued = 0; $sumloclqueued = 0; //$sumqueued = 0; // actual loop foreach ( $tcont as $trow ) { $gridjobs = $trow[3]; $gridjobs = substr(stristr($gridjobs,"alt=\""),5); $gridjobs = substr($gridjobs,0,strpos($gridjobs,"+")); $localrun = $trow[3]; $localrun = substr(stristr($localrun,"+"),1); $localrun = substr($localrun,0,strpos($localrun,"\" w")); $truegridq = $trow[4]; $truegridq = substr(stristr($truegridq,""),3); $truegridq = substr($truegridq,0,strpos($truegridq,"")); $nongridq = $trow[4]; $nongridq = substr(stristr($nongridq,"+"),1); $vo = $trow[0]; $vo = substr(stristr($vo,"./mon-icons/"),12); $vo = substr($vo,0,strpos($vo,".")); if ( @$showvo && $showvo != $vo ) continue; $sumcpu += $trow[2]; $sumgridjobs += $gridjobs; $sumlocljobs += $localrun; $sumgridqueued += $truegridq; $sumloclqueued += $nongridq; // $sumqueued += $totqueued; $sumclusters ++; if ( $vo != $prevvo && $order == "country" ) { // start new country rowspan $prevvo = $vo; $vostring = $trow[0]; $ctable->addspacer("#000099"); $ctable->rowspan( $affcnt[$vo], $vostring, "#FFF2DF" ); $tcrow = array_shift($trow); $ctable->addrow($trow); } else { if ( $order == "country" ) $tcrow = array_shift($trow); $ctable->addrow($trow); } } $tcont = array(); $ctable->addspacer("#990000"); $rowcont[] = "".$errors["405"].""; $rowcont[] = "$sumclusters".$errors["406"].""; $rowcont[] = "$sumcpu"; $rowcont[] = "$sumgridjobs + $sumlocljobs"; $rowcont[] = "$sumgridqueued + $sumloclqueued"; // $rowcont[] = "$sumqueued"; $ctable->addrow($rowcont, "#ffffff"); $ctable->close(); // To change language, link back to ALL $linkback = $_SERVER['PHP_SELF']; if ( $debug ) { $linkback .= "?debug=".$debug; $separator = "&"; } else { $separator = "?"; } // Show flags if only one country is chosen if ( @$showvo ) { echo "
    \n"; foreach ( $votolink as $volink ) { $vostring = $_SERVER['PHP_SELF']."?display=vo=$volink"; if ( $lang != "default" ) $vostring .= "&lang=".$lang; if ( $debug ) $vostring .= "&debug=".$debug; $voimage = "\"".$errors["312"]."\""; echo "$voimage  "; } if ( $lang != "default") $linkall = $linkback.$separator."lang=".$lang; echo "".$errors["409"]."
    \n"; // Show ALL echo "
    \n"; } else { // Show languages $translations = scandir(getcwd()."/lang"); echo "

    \n"; foreach ( $translations as $transfile ) { $twoletcod = substr($transfile,0,2); if ( stristr($transfile,".") == ".inc" && $twoletcod != "us" ) { $linklang = $linkback.$separator."lang=".$twoletcod; echo "$twoletcod  "; } } echo "
    \n"; } return 0; // Done $toppage->close(); ?> nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/mylo.js0000644000000000000000000000013214152153376022763 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.501578994 nordugrid-arc-6.14.0/src/services/monitor/mylo.js0000644000175000002070000000051714152153376022753 0ustar00mockbuildmock00000000000000function mylo (fnam,lnam,dom1,dom2){ if ( lnam == "" ) { var name = fnam; } else { var name = fnam + "." + lnam; } var host = dom1 + "." + dom2; var complete = name + "@" + host; output = "" + complete + ""; document.write(output); return output; } nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/attlist.php0000644000000000000000000000013214152153376023642 xustar000000000000000030 mtime=1638455038.437646455 30 atime=1638455038.512647582 30 ctime=1638455100.491578844 nordugrid-arc-6.14.0/src/services/monitor/attlist.php0000644000175000002070000000252514152153376023633 0ustar00mockbuildmock00000000000000title); $strings = &$toppage->strings; $giislist = &$toppage->giislist; $archery_list = &$toppage->archery_list; require_once('attlist.inc'); $object = $_GET["object"]; $attribute = $_GET["attribute"]; $filter = $_GET["filter"]; if ( !$filter ) $filter=""; if ( !$attribute ) $attribute="nordugrid-cluster-middleware"; if ( !$object ) $object="cluster"; $attribute = rawurldecode($attribute); $filter = rawurldecode($filter); if ( $attribute[1]==":") { $attribute = unserialize($attribute); $filter = unserialize($filter); $attributes = $attribute; $filters = $filter; $n = count($attributes); $signs = array_fill(0,$n,"="); } else { $attributes = array ($attribute); $signs = array ("="); $filters = array ($filter); } do_attlist($object,$attributes,$signs,$filters,$strings,$giislist,$archery_list); // Done $toppage->close(); ?> nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/quelist.php0000644000000000000000000000013214152153376023644 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.496578919 nordugrid-arc-6.14.0/src/services/monitor/quelist.php0000644000175000002070000001721214152153376023634 0ustar00mockbuildmock00000000000000title; $module = &$toppage->module; $strings = &$toppage->strings; $errors = &$toppage->errors; $clstring = popup("clusdes.php?host=$host&port=$port&schema=$schema",700,620,1,$lang,$debug); // Header table $toppage->tabletop("","".$toptitle." ".$qname." (".$host.")"); $lim = array( "dn", JOB_NAME, JOB_GOWN, JOB_SUBM, JOB_STAT, JOB_COMP, JOB_USET, JOB_USEM, JOB_ERRS, JOB_CPUS, JOB_EQUE ); if ( $schema == "GLUE2") { $lim = array( "dn", GJOB_NAME, GJOB_GOWN, GJOB_SUBM, GJOB_STAT, GJOB_COMP, GJOB_USET, GJOB_USEM, GJOB_ERRS, GJOB_CPUS, GJOB_EQUE ); } if ( $debug ) { ob_end_flush(); ob_implicit_flush(); } $tlim = 15; $tout = 20; if( $debug ) dbgmsg("
    :::> ".$errors["101"].$tlim.$errors["102"].$tout.$errors["103"]." <:::

    "); // ldapsearch filter strings for cluster and queues $filstr = "(objectclass=".OBJ_AJOB.")"; $dn = DN_LOCAL; $topdn = DN_GLOBL; if ( $schema == "GLUE2") { $filstr = "(objectclass=".GOBJ_AJOB.")"; $dn = "GLUE2GroupID=services,".DN_GLUE; } // Establish connection to the requested LDAP server $ldapuri = "ldap://".$host.":".$port; $ds = ldap_connect($ldapuri); if ($ds) { // If contact OK, search for NorduGrid clusters $basedn = QUE_NAME."=".$qname.",".CLU_NAME."=".$host.","; $locdn = $basedn.$dn; if ( $schema == "GLUE2") { $basedn = GQUE_NAME."=".$qname.",".GCLU_NAME."=".$host.","; $basedn = "GLUE2ShareID=urn:ogf:ComputingShare:".$host.":".$qname.",GLUE2ServiceID=urn:ogf:ComputingService:".$host.":arex,"; $locdn = $basedn.$dn; } $aaa = ldap_nice_dump($strings,$ds,$locdn); echo "
    "; $ts1 = time(); $sr = ldap_search($ds,$dn,$filstr,$lim,0,0,$tlim,LDAP_DEREF_NEVER); $ts2 = time(); if($debug) dbgmsg("
    ".$errors["110"]." (".($ts2-$ts1).$errors["104"].")
    "); // Fall back to conventional LDAP // if (!$sr) $sr = ldap_search($ds,$dn,$filstr,$lim,0,0,$tlim,LDAP_DEREF_NEVER); if ($sr) { $nmatch = ldap_count_entries($ds,$sr); if ($nmatch > 0) { $entries = ldap_get_entries($ds,$sr); $njobs = $entries["count"]; define("CMPKEY",JOB_SUBM); if ( $schema == "GLUE2") define("CMPKEY",GJOB_SUBM); usort($entries,"ldap_entry_comp"); // HTML table initialisation $ltable = new LmTable($module,$toppage->$module); // loop on jobs $nj = 0; for ($i=1; $i<$njobs+1; $i++) { if ( $schema == "GLUE2") { $equeue = $entries[$i][GJOB_EQUE][0]; if ( $equeue !== $qname ) { if ( $debug == 2 ) dbgmsg($equeue." != ".$qname); continue; } $jobdn = rawurlencode($entries[$i]["dn"]); $curstat = $entries[$i][GJOB_STAT][0]; $stahead = substr($curstat,0,12); $ftime = ""; if ($stahead=="FINISHED at:") { $ftime = substr(strrchr($curstat, " "), 1); } elseif ($curstat=="FINISHED") { $ftime = $entries[$i][GJOB_COMP][0]; } if ( $ftime ) { $ftime = cnvtime($ftime); $curstat = "FINISHED at: ".$ftime; } $uname = $entries[$i][GJOB_GOWN][0]; $encuname = rawurlencode($uname); $family = cnvname($uname, 2); $jname = htmlentities($entries[$i][GJOB_NAME][0]); $jobname = ($entries[$i][GJOB_NAME][0]) ? $jname : "N/A"; $time = ($entries[$i][GJOB_USET][0]) ? $entries[$i][GJOB_USET][0] : ""; $memory = ($entries[$i][GJOB_USEM][0]) ? $entries[$i][GJOB_USEM][0] : ""; $ncpus = ($entries[$i][GJOB_CPUS][0]) ? $entries[$i][GJOB_CPUS][0] : ""; $error = ($entries[$i][GJOB_ERRS][0]); if ( $error ) $error = ( preg_match("/user/i",$error) ) ? "X" : "!"; $status = "All"; $newwin = popup("jobstat.php?host=$host&port=$port&status=$status&jobdn=$jobdn&schema=$schema",750,430,4,$lang,$debug); $usrwin = popup("userlist.php?bdn=$topdn&owner=$encuname&schema=$schema",700,500,5,$lang,$debug); } else { //NG schema parse $equeue = $entries[$i][JOB_EQUE][0]; if ( $equeue !== $qname ) { if ( $debug == 2 ) dbgmsg($equeue." != ".$qname); continue; } $jobdn = rawurlencode($entries[$i]["dn"]); $curstat = $entries[$i][JOB_STAT][0]; $stahead = substr($curstat,0,12); $ftime = ""; if ($stahead=="FINISHED at:") { $ftime = substr(strrchr($curstat, " "), 1); } elseif ($curstat=="FINISHED") { $ftime = $entries[$i][JOB_COMP][0]; } if ( $ftime ) { $ftime = cnvtime($ftime); $curstat = "FINISHED at: ".$ftime; } $uname = $entries[$i][JOB_GOWN][0]; $encuname = rawurlencode($uname); $family = cnvname($uname, 2); $jname = htmlentities($entries[$i][JOB_NAME][0]); $jobname = ( !empty($entries[$i][JOB_NAME][0]) ) ? $jname : "N/A"; $time = ( !empty($entries[$i][JOB_USET][0]) ) ? $entries[$i][JOB_USET][0] : ""; $memory = ( !empty($entries[$i][JOB_USEM][0]) ) ? $entries[$i][JOB_USEM][0] : ""; $ncpus = ( !empty($entries[$i][JOB_CPUS][0]) ) ? $entries[$i][JOB_CPUS][0] : ""; $error = ( !empty($entries[$i][JOB_ERRS][0]) ); if ( $error ) $error = ( preg_match("/user/i",$error) ) ? "X" : "!"; $status = "All"; $newwin = popup("jobstat.php?host=$host&port=$port&status=$status&jobdn=$jobdn",750,430,4,$lang,$debug); $usrwin = popup("userlist.php?bdn=$topdn&owner=$encuname",700,500,5,$lang,$debug); } // filling the table $nj++; $lrowcont[] = "$nj $error"; $lrowcont[] = "$jobname"; $lrowcont[] = "$family"; $lrowcont[] = "$curstat"; $lrowcont[] = "$time"; $lrowcont[] = "$memory"; $lrowcont[] = "$ncpus"; $ltable->addrow($lrowcont); $lrowcont = array (); } $ltable->close(); } else { $errno = "4"; echo "
    ".$errors[$errno]."\n"; return $errno; } } else { $errno = "5"; echo "
    ".$errors[$errno]."\n"; return $errno; } ldap_free_result($sr); ldap_close($ds); return 0; } else { $errno = "6"; echo "
    ".$errors[$errno]."\n"; return $errno; } // Done $toppage->close(); ?> nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/README.in0000644000000000000000000000013214152153376022732 xustar000000000000000030 mtime=1638455038.437646455 30 atime=1638455038.512647582 30 ctime=1638455100.489578814 nordugrid-arc-6.14.0/src/services/monitor/README.in0000644000175000002070000000520014152153376022714 0ustar00mockbuildmock00000000000000NorduGrid ARC version @VERSION@ Grid Monitor ============ Description ----------- Set of PHP scripts, providing a Web interface to the NorduGrid Information System. Should be working for any similar LDAP-based service, if the schema configuration is done carefuly. The directory contains: cache - directory for front page cache includes - directory with common methods and configuration file (settings.inc) lang - directory with localizations man - directory for the man page mon-icons - directory with icons allusers.php - list grid users attlist.php - show values of selected attributes on the grid clusdes.php - show cluster or storage information discover.php - list attributes specific for an object for consecutive search monitor.in - lynx call for the monitor (template) help.php - print help jobstat.php - show running/other jobs in a queue loadmon.php - main grid monitor script Makefile.am - Makefile template monitor.js - Java script for pop-up screens mylo.js - Java script for mail addresses quelist.php - show queue details and jobs README.in - README file (template) sestat.php - list storage elements userlist.php - show allowed sites and list of jobs of a user volist.php - static list of some VOs vo-users.php - lists users in a VO Requirements ------------ - GD library (http://www.boutell.com/gd/) - LDAP library (e.g., http://www.openldap.org) - PHP4 or PHP5 (http://www.php.net) compiled with LDAP and GD extensions - HTTP server compiled with PHP4 or PHP5 - Working ARC information system instance or a similar LDAP-based service - Optional: running Virtual Organisation LDAP-based service Installation ------------ 1. Copy all the files in a folder, accessible by the HTTP server 2. Verify that this folder contains a directory called "cache" and that it is writeable by the HTTP server. If your server is configured to have write access only to a specific location, such as "../htdata", modify CACHE_LOCATION value in "includes/settings.inc" accordingly 3. Modify "includes/settings.inc" according to your infosystem structure and liking: most likely, you want to modify the $giislist array by removing some GIISes/GRISes and adding other(s) 4. Run the whole stuff by loading "loadmon.php" into your browser Fine tuning ----------- - Making output more human-readable: modify "/lang/*.inc", "includes/cnvname.inc", "includes/cnvalias.inc". - Preventing sites from being polled: modify "includes/blacklist.inc". Otherwise, the file is not needed. Contact ------- Oxana Smirnova, oxana.smirnova@hep.lu.se nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/sestat.php0000644000000000000000000000013214152153376023461 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.497578934 nordugrid-arc-6.14.0/src/services/monitor/sestat.php0000644000175000002070000001402014152153376023443 0ustar00mockbuildmock00000000000000title; $module = &$toppage->module; $strings = &$toppage->strings; $errors = &$toppage->errors; $giislist = &$toppage->giislist; // Header table $toppage->tabletop("".$toptitle."

    ",""); if ( $debug ) { ob_end_flush(); ob_implicit_flush(); } $tlim = 10; $tout = 15; if($debug) dbgmsg("
    :::> ".$errors["101"].$tlim.$errors["102"].$tout.$errors["103"]." <:::
    "); // Arrays defining the attributes to be returned $lim = array( "dn", SEL_NAME, SEL_ANAM, SEL_CURL, SEL_BURL, SEL_TYPE, SEL_FREE, SEL_TOTA ); // ldapsearch filter strings for clusters and queues $filstr = "(objectclass=".OBJ_STEL.")"; // Top GIIS server: get all from the pre-defined list $ngiis = count($giislist); $ts1 = time(); $gentries = recursive_giis_info($giislist,"nordugrid-SE",$errors,$debug); $ts2 = time(); if($debug) dbgmsg("
    ".$errors["106"].$ngiis." (".($ts2-$ts1).$errors["104"].")
    "); $nc = count($gentries); if ( !$nc ) { // NO SITES FOUND! $errno = "1"; echo "
    ".$errors[$errno]."\n"; return $errno; } $dsarray = array (); $hnarray = array (); $sitetag = array (); /* a tag to skip duplicated entries */ for ( $k = 0; $k < $nc; $k++ ) { $clhost = $gentries[$k]["host"]; $clport = $gentries[$k]["port"]; $ldapuri = "ldap://".$clhost.":".$clport; $clconn = ldap_connect($ldapuri); if ( $clconn && !$sitetag[$clhost] ) { array_push($dsarray,$clconn); array_push($hnarray,$clhost); $sitetag[$clhost] = 1; /* filtering tag */ if ($debug==2) dbgmsg("$k - $clhost:$clport "); } } $nhosts = count($dsarray); if ( !$nhosts ) { // NO SITES REPLY... $errno = "2"; echo "
    ".$errors[$errno]."\n"; return $errno; } // Search all SEs $ts1 = time(); $srarray = @ldap_search($dsarray,DN_LOCAL,$filstr,$lim,0,0,$tlim,LDAP_DEREF_NEVER); $ts2 = time(); if($debug) dbgmsg("
    ".$errors["124"]." (".($ts2-$ts1).$errors["104"].")
    "); $ctable = new LmTableSp($module,$toppage->$module); // Loop on SEs $senum = 0; $space = 0; $capacity = 0; for ( $ids = 0; $ids < $nhosts; $ids++ ) { $sr = $srarray[$ids]; $ds = $dsarray[$ids]; $hn = $hnarray[$ids]; /* host name, for debugging */ if ($ds && $sr) { $entries = @ldap_get_entries($ds,$sr); $nclusters = $entries["count"]; /* May be several SEs! */ if ( !$nclusters ) continue; for ( $i = 0; $i < $nclusters; $i++) { $senum++; $curdn = $entries[$i]["dn"]; $curname = $entries[$i][SEL_NAME][0]; $curalias = $entries[$i][SEL_ANAM][0]; $curspace = ( $entries[$i][SEL_FREE][0] ) ? $entries[$i][SEL_FREE][0] : 0; // $curcapacity = ( $entries[$i][SEL_TOTA][0] ) ? $entries[$i][SEL_TOTA][0] : $errors["407"]; $curcapacity = ( $entries[$i][SEL_TOTA][0] ) ? $entries[$i][SEL_TOTA][0] : $curspace; $cururl = ( $entries[$i][SEL_BURL][0] ) ? $entries[$i][SEL_BURL][0] : $entries[$i][SEL_CURL][0]; $curtype = $entries[$i][SEL_TYPE][0]; $clstring = popup("clusdes.php?host=$curname&port=$curport&isse=1&debug=$debug",700,620,1,$lang,$debug); $curspace = intval($curspace/1000); $occupancy = 1; // by default, all occupied $space += $curspace; // if ( $curcapacity != $errors["407"] ) { if ( $curcapacity != 0 ) { $curcapacity = intval($curcapacity/1000); $occupancy = ($curcapacity - $curspace)/$curcapacity; $capacity += $curcapacity; } $tstring = $curspace."/".$curcapacity; $tlen = strlen($tstring); if ($tlen<11) { $nspaces = 11 - $tlen; for ( $is = 0; $is < $nspaces; $is++ ) $tstring .= " "; } $tstring = urlencode($tstring); if ($debug==2) dbgmsg("$senum: $curname at $hn
    "); if ( strlen($curalias) > 15 ) $curalias = substr($curalias,0,15) . ">"; // $clstring = popup("clusdes.php?host=$curname&port=2135",700,620,1,$lang,$debug); $rowcont[] = "$senum"; $rowcont[] = " $curalias"; $rowcont[] = "\"$tstring\""; // $rowcont[] = $curcapacity.$errors["408"]; // $rowcont[] = $curspace.$errors["408"]; $rowcont[] = "$curname"; $rowcont[] = "$cururl"; $rowcont[] = "$curtype"; $ctable->addrow($rowcont); $rowcont = array (); } } $entries = array(); $jentries = array(); $gentries = array(); } $occupancy = ($capacity - $space)/$capacity; $tstring = $space."/".$capacity; $ctable->addspacer("#ffcc33"); $rowcont[] = " "; $rowcont[] = "".$errors["405"].""; $rowcont[] = "\"$tstring\""; //$rowcont[] = "$capacity".$errors["408"].""; //$rowcont[] = "$space".$errors["408"].""; $rowcont[] = " "; $rowcont[] = " "; $rowcont[] = " "; $ctable->addrow($rowcont, "#ffffff"); $ctable->close(); return 0; // Done $toppage->close(); ?> nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/jobstat.php0000644000000000000000000000013214152153376023624 xustar000000000000000030 mtime=1638455038.439646485 30 atime=1638455038.512647582 30 ctime=1638455100.494578889 nordugrid-arc-6.14.0/src/services/monitor/jobstat.php0000644000175000002070000001762114152153376023620 0ustar00mockbuildmock00000000000000title; $module = &$toppage->module; $strings = &$toppage->strings; $errors = &$toppage->errors; // Header table $titles = explode(":",$toptitle); // two alternative titles, separated by column if ($jobdn=="all") { $clstring = popup("clusdes.php?host=$host&port=$port&schema=$schema",700,620,1,$lang,$debug); $gtitle = "".$titles[0]." $host"; } else { $jobdn = rawurldecode($jobdn); $jobdn = preg_replace("/\"/","",$jobdn); $dn_pieces = ldap_explode_dn($jobdn,1); $jobgid = $dn_pieces[0]; $gtitle = "".$titles[1].": $jobgid"; } $toppage->tabletop("",$gtitle); // Arrays defining the attributes to be returned $lim = array( "dn", JOB_NAME, JOB_EQUE, JOB_GOWN, JOB_STAT, JOB_USET, JOB_SUBM, JOB_CPUS ); // ldapsearch filter string for jobs $filstr="(objectclass=".OBJ_AJOB.")"; if ( $schema == "GLUE2") { $lim = array( "dn", GJOB_NAME, GJOB_EQUE, GJOB_GOWN, GJOB_STAT, GJOB_USET, GJOB_SUBM, GJOB_CPUS ); $filstr="(objectclass=".GOBJ_AJOB.")"; } if ( $debug ) { ob_end_flush(); ob_implicit_flush(); } $tlim = 15; $tout = 20; if( $debug ) dbgmsg("
    :::> ".$errors["101"].$tlim.$errors["102"].$tout.$errors["103"]." <:::

    "); // Establish connection to the requested LDAP server $ldapuri = "ldap://".$host.":".$port; $ds = ldap_connect($ldapuri); $bdn = DN_LOCAL; $topdn = DN_GLOBL; if ( $schema == "GLUE2") { $bdn = DN_GLUE; if ($jobdn != "all") $bdn = ""; } if ($ds) { // Single job info dump and quit if ($jobdn != "all") { // $basedn = explode("Mds",$jobdn); $basedn = preg_split("/mds/i",$jobdn); $locdn = $basedn[0].$bdn; $thisdn = ldap_nice_dump($strings,$ds,$locdn); ldap_close($ds); return 0; } // Loop over all the jobs $sr = @ldap_search($ds,$bdn,$filstr,$lim,0,0,$tlim,LDAP_DEREF_NEVER); // Fall back to conventional LDAP // if (!$sr) $sr = @ldap_search($ds,$bdn,$filstr,$lim,0,0,$tlim,LDAP_DEREF_NEVER); if ($sr) { // If search returned, check that there are valid entries $nmatch = ldap_count_entries($ds,$sr); if ($nmatch > 0) { // HTML table initialisation $jtable = new LmTable($module,$toppage->$module); // If there are valid entries, tabulate results $entries = ldap_get_entries($ds,$sr); $njobs = $entries["count"]; define("CMPKEY",JOB_SUBM); if ( $schema == "GLUE2") define("CMPKEY",GJOB_SUBM); usort($entries,"ldap_entry_comp"); // loop on jobs $jcount = 0; for ($i=1; $i<$njobs+1; $i++) { $jdn = rawurlencode($entries[$i]["dn"]); $curstat = $entries[$i][JOB_STAT][0]; if ( $schema == "GLUE2") { $curstat = $entries[$i][GJOB_STAT][0]; } /* * The following flags may need an adjustment, * depending on the Job Status provider */ // Running job: statail == "R" or "run" // $statail = substr($curstat,-3); $statail = substr(strstr($curstat,"INLRMS:"),7); $statail = trim($statail); // Queued job: stahead != "FIN" && statail != "R" and "run" etc $stahead = substr($curstat,0,3); $flagrun = ( $status == "Running" && ( $statail == "R" || /* PBS */ $statail == "S" || /* suspended by Condor */ $statail == "run" ) /* easypdc */ ); $flagque = ( $status != "Running" && $statail != "R" && $statail != "S" && $statail != "run" && $stahead != "FIN" && $stahead != "FAI" && $stahead != "EXE" && $stahead != "KIL" && $stahead != "DEL" ); /* No changes necessary below */ $flagact = ($flagrun || $flagque)?1:0; if ($flagact == 1 || $status == "All" ) { if ( $schema == "GLUE2") { $uname = $entries[$i][GJOB_GOWN][0]; $encuname = rawurlencode($uname); $uname = addslashes($uname); // In case $uname contains escape characters $family = cnvname($uname, 2); $jname = htmlentities($entries[$i][JOB_NAME][0]); $jobname = ($entries[$i][GJOB_NAME][0]) ? $jname : "N/A"; $queue = ($entries[$i][GJOB_EQUE][0]) ? $entries[$i][GJOB_EQUE][0] : ""; $time = ($entries[$i][GJOB_USET][0]) ? $entries[$i][GJOB_USET][0] : ""; $ncpus = ($entries[$i][GJOB_CPUS][0]) ? $entries[$i][GJOB_CPUS][0] : ""; $newwin = popup("jobstat.php?host=$host&port=$port&status=$status&jobdn=$jdn&schema=$schema",750,430,4,$lang,$debug); $quewin = popup("quelist.php?host=$host&port=$port&qname=$queue&schema=$schema",750,430,6,$lang,$debug); $usrwin = popup("userlist.php?bdn=$topdn&owner=$encuname&schema=$schema",700,500,5,$lang,$debug); } else { $uname = $entries[$i][JOB_GOWN][0]; $encuname = rawurlencode($uname); $uname = addslashes($uname); // In case $uname contains escape characters $family = cnvname($uname, 2); $jname = htmlentities($entries[$i][JOB_NAME][0]); $jobname = ($entries[$i][JOB_NAME][0]) ? $jname : "N/A"; $queue = ($entries[$i][JOB_EQUE][0]) ? $entries[$i][JOB_EQUE][0] : ""; $time = ($entries[$i][JOB_USET][0]) ? $entries[$i][JOB_USET][0] : ""; $ncpus = ($entries[$i][JOB_CPUS][0]) ? $entries[$i][JOB_CPUS][0] : ""; $newwin = popup("jobstat.php?host=$host&port=$port&status=$status&jobdn=$jdn",750,430,4,$lang,$debug); $quewin = popup("quelist.php?host=$host&port=$port&qname=$queue",750,430,6,$lang,$debug); $usrwin = popup("userlist.php?bdn=$topdn&owner=$encuname",700,500,5,$lang,$debug); } $jcount++; // filling the table $jrowcont[] = "$jcount $jobname"; $jrowcont[] = "$family"; $jrowcont[] = "$curstat"; $jrowcont[] = "$time"; $jrowcont[] = "$queue"; $jrowcont[] = "$ncpus"; $jtable->addrow($jrowcont); $jrowcont = array (); } } if ($jcount == 0) $jtable->adderror("".$errors["4"].": ".$status.""); $jtable->close(); } else { echo "
    ".$errors["4"]."".$errors["7"]."\n"; } } else { echo "
    ".$errors["4"]."".$errors["7"]."\n"; } $entries = array(); @ldap_free_result($sr); ldap_close($ds); return 0; } else { $errno = "6"; echo "
    ".$errors[$errno]."\n"; return $errno; } // Done $toppage->close(); ?> nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/monitor.js0000644000000000000000000000013214152153376023472 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.500578979 nordugrid-arc-6.14.0/src/services/monitor/monitor.js0000644000175000002070000000322714152153376023463 0ustar00mockbuildmock00000000000000function ngurl(link) { var wloc="http://"+document.domain+link; var vtest=link; var prot=vtest.substring(0,4); var vhttp="http"; if (prot == vhttp) { var wloc=link } return wloc; } function monitor(link,x,y,n) { // "n" is needed to keep dedicated windows for each monitor type // function ngurl() adds HTTP contact string, if needed // wloc=ngurl(link); var ua = ' ' + navigator.userAgent.toLowerCase(); var is_opera = ua.indexOf('opera'); var is_lynx = ua.indexOf('lynx'); var is_konqueror = ua.indexOf('konqueror'); wloc = link; browser = navigator.appName; if ( is_opera>0 || is_lynx>0 || is_konqueror>0 ) { window.location = wloc; } else { aaa=open("","win"+n,"innerWidth="+x+",innerHeight="+y+",resizable=1,scrollbars=1,width="+x+",height="+y); aaa.document.encoding = "text/html; charset=utf-8"; aaa.document.clear(); aaa.document.writeln(""); aaa.document.writeln(""); aaa.document.writeln("NorduGrid"); aaa.document.writeln(""); aaa.document.writeln(""); aaa.document.writeln(""); aaa.document.writeln("






    "); aaa.document.writeln("Collecting information..."); aaa.document.writeln("

    "); aaa.document.writeln(""); aaa.document.writeln(""); aaa.document.close(); aaa.document.location.href=wloc; aaa.document.close(); } } nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/includes0000644000000000000000000000013214152153474023175 xustar000000000000000030 mtime=1638455100.656581323 30 atime=1638455103.997631524 30 ctime=1638455100.656581323 nordugrid-arc-6.14.0/src/services/monitor/includes/0000755000175000002070000000000014152153474023237 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376025307 xustar000000000000000030 mtime=1638455038.437646455 30 atime=1638455038.512647582 30 ctime=1638455100.639581068 nordugrid-arc-6.14.0/src/services/monitor/includes/Makefile.am0000644000175000002070000000015714152153376025277 0ustar00mockbuildmock00000000000000monitorincdir = @monitor_prefix@/includes monitorinc_DATA = $(srcdir)/*.inc EXTRA_DIST = $(monitorinc_DATA) nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/comfun.inc0000644000000000000000000000013114152153376025234 xustar000000000000000029 mtime=1638455038.43864647 30 atime=1638455038.512647582 30 ctime=1638455100.646581173 nordugrid-arc-6.14.0/src/services/monitor/includes/comfun.inc0000644000175000002070000000617214152153376025230 0ustar00mockbuildmock00000000000000 XXX   */ $geo1 = $a[0]; $geo2 = $b[0]; $geo1 = substr(stristr($geo1,""),3); $geo2 = substr(stristr($geo2,""),3); $geo1 = substr($geo1,0,strpos($geo1,"<")); $geo2 = substr($geo2,0,strpos($geo2,"<")); $ali1 = $a[1]; $ali2 = $b[1]; $ali1 = substr(stristr($ali1,""),3); $ali2 = substr(stristr($ali2,""),3); $ali1 = substr($ali1,0,strpos($ali1,"<")); $ali2 = substr($ali2,0,strpos($ali2,"<")); $cmpgeo = strcasecmp ($geo1,$geo2); $cmpali = strcasecmp ($ali1,$ali2); if ( !$cmpgeo ) return $cmpali; return $cmpgeo; } /** * @return int * @param a array * @param b array * @desc Compares by CPU */ function comp_by_cpu ($a, $b) { $cpu1 = $a[2]; $cpu2 = $b[2]; $cmpcpu = $cpu2 - $cpu1; return $cmpcpu; } /** * @return int * @param a array * @param b array * @desc Compares by grid running jobs */ function comp_by_grun ($a, $b) { $sum1 = $a[3]; $sum2 = $b[3]; // echo $sum1." vs ".$sum2."
    "; $sum1 = substr(stristr($sum1,"alt=\""),5); $sum2 = substr(stristr($sum2,"alt=\""),5); $sum1 = substr($sum1,0,strpos($sum1,"+")); $sum2 = substr($sum2,0,strpos($sum2,"+")); $cmpsum = $sum2 - $sum1; return $cmpsum; } ?>nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/blacklist.inc0000644000000000000000000000013214152153376025716 xustar000000000000000030 mtime=1638455038.437646455 30 atime=1638455038.512647582 30 ctime=1638455100.642581113 nordugrid-arc-6.14.0/src/services/monitor/includes/blacklist.inc0000644000175000002070000000257314152153376025712 0ustar00mockbuildmock00000000000000"0", "grid.fi.uib.no"=>"0", "dc2.uio.no"=>"0", "dc1.uio.no"=>"0", "dc3.uio.no"=>"0", "dc4.uio.no"=>"0", "fire.ii.uib.no"=>"0", "hydra.ii.uib.no"=>"0", "grid.nbi.dk"=>"0", "lscf.nbi.dk"=>"0", "hepax1.nbi.dk"=>"0", "morpheus.nbi.dk"=>"0", "heppc08.nbi.dk"=>"0", "grid.uni-c.dk"=>"0", "tambohuse.imada.sdu.dk"=>"0", "gridrouter.imada.sdu.dk"=>"0", "tiger.imada.sdu.dk"=>"0", "cbs202.cbs.dtu.dk"=>"0", "gridgate.it.dtu.dk"=>"0", "amigos24.diku.dk"=>"0", "nroot.hip.fi"=>"0", "grid.hip.fi"=>"0", "hirmu.hip.fi"=>"0", "pc19.hip.fi"=>"0", "pc30.hip.helsinki.fi"=>"0", "testbed0.hip.helsinki.fi"=>"0", "pchip04.cern.ch"=>"0", "quark.hep.lu.se"=>"0", "farm.hep.lu.se"=>"0", "hathi.hep.lu.se"=>"0", "grid.tsl.uu.se"=>"0", "grid.scfab.se"=>"0", "bambi.quark.lu.se"=>"0", "nexus.swegrid.se"=>"0", "hagrid.it.uu.se"=>"0", "ingrid.hpc2n.umu.se"=>"0", "sigrid.lunarc.lu.se"=>"0", "bluesmoke.nsc.liu.se"=>"0", "g01n01.pdc.kth.se"=>"0", "ingvar.nsc.liu.se"=>"0", "seth.hpc2n.umu.se"=>"0", "banan.hpc2n.umu.se"=>"0", "jakarta.hpc2n.umu.se"=>"0", "gridum2.cs.umu.se"=>"0", "gridum1.cs.umu.se"=>"0", "sleipner.byggmek.lth.se"=>"0", "grendel.it.uu.se"=>"0", "login-3.monolith.nsc.liu.se"=>"0", "vls.science.upjs.sk"=>"0", "213-35-172-38-dsl.plus.estpak.ee"=>"0", "cm-gw.phys.ualberta.ca"=>"0", "tgrid.icepp.s.u-tokyo.ac.jp"=>"0", "hmx00.kek.jp"=>"0", "dummy"=>"0", "dummy"=>"0"); ?>nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/locale.inc0000644000000000000000000000013114152153376025204 xustar000000000000000029 mtime=1638455038.43864647 30 atime=1638455038.512647582 30 ctime=1638455100.651581248 nordugrid-arc-6.14.0/src/services/monitor/includes/locale.inc0000644000175000002070000007054114152153376025201 0ustar00mockbuildmock00000000000000 array ( // Table headers "loadmon" => array( "0" => "Grid Monitor", "Country" => 30, "Site" => 160, "CPUs" => 10, "Load (processes: Grid+local)" => 210, "Queueing" => 10 ), "clusdes" => array("0" => "Cluster Details for", "Queue" => 0, "Status" => 0, "Limits (min)" => 0, "CPUs" => 0, "Running" => 0, "Queueing" => 0 ), "jobstat" => array("0" => "Jobs at:Job ID", "Job name" => 0, "Owner" => 0, "Status" => 0, "CPU (min)" => 0, "Queue" => 0, "CPUs" => 0 ), "volist" => array("0" => "Virtual Organisations", "Virtual Organisation" => 0, "Members" => 0, "Served by" => 0 ), "vousers" => array("0" => "Grid User Base", "#" => 0, "Name" => 0, "Affiliation" => 0, "E-mail" => 0 ), "userlist" => array("0" => "Information for", "" => 0, "Job name" => 0, "Status" => 0, "CPU (min)" => 0, "Cluster" => 0, "Queue" => 0, "CPUs" => 0 ), "userres" => array("0" => "", "Cluster:queue" => 0, "Free CPUs" => 0, "Exp. queue length" => 0, "Free disk (MB)" => 0 ), "attlist" => array("0" => "Attribute values", "Resource" => 0, "Current value" => 0 ), "quelist" => array("0" => "Details for the queue", "" => 0, "Job name" => 0, "Owner" => 0, "Status" => 0, "CPU (min)" => 0, "Memory (KB)" => 0, "CPUs" => 0 ), "sestat" => array("0" => "Storage Elements", "#" => 0, "Alias" => 0, "Tot. space" => 0, "Free space" => 0, "Name" => 0, "Base URL" => 0, "Type" => 0 ), "allusers" => array("0" => "Authorised Grid Users:Active Grid Users", "#" => 0, "Name" => 0, "Affiliaton" => 0, "Jobs" => 0, "Sites" => 0 ), "ldapdump" => array("0" => "", "Attribute" => 0, "Value" => 0 ), // IS attributes "mdsattr" => array( "objectClass" => "objectClass", "Mds-validfrom" => "Info valid from (GMT)", "Mds-validto" => "Info valid to (GMT)" ), "isattr" => array( "nordugrid-cluster-name" => "Front-end domain name", "nordugrid-cluster-aliasname" => "Cluster alias", "nordugrid-cluster-contactstring" => "Contact string", "nordugrid-cluster-interactive-contactstring" => "Interactive contact", "nordugrid-cluster-comment" => "Comment", "nordugrid-cluster-support" => "E-mail contact", "nordugrid-cluster-lrms-type" => "LRMS type", "nordugrid-cluster-lrms-version" => "LRMS version", "nordugrid-cluster-lrms-config" => "LRMS details", "nordugrid-cluster-architecture" => "Architecture", "nordugrid-cluster-opsys" => "Operating system", "nordugrid-cluster-homogeneity" => "Homogeneous cluster", "nordugrid-cluster-nodecpu" => "CPU type (slowest)", "nordugrid-cluster-nodememory" => "Memory (MB, smallest)", "nordugrid-cluster-totalcpus" => "CPUs, total", "nordugrid-cluster-cpudistribution" => "CPU:machines", "nordugrid-cluster-sessiondir-free" => "Disk space, available (MB)", "nordugrid-cluster-sessiondir-total" => "Disk space, total (MB)", "nordugrid-cluster-cache-free" => "Cache size, available (MB)", "nordugrid-cluster-cache-total" => "Cache size, total (MB)", "nordugrid-cluster-runtimeenvironment" => "Runtime environment", "nordugrid-cluster-localse" => "Storage Element, local", "nordugrid-cluster-middleware" => "Grid middleware", "nordugrid-cluster-totaljobs" => "Jobs, total amount", "nordugrid-cluster-usedcpus" => "CPUs, occupied", "nordugrid-cluster-queuedjobs" => "Jobs, queued", "nordugrid-cluster-location" => "Postal code", "nordugrid-cluster-owner" => "Owner", "nordugrid-cluster-issuerca" => "Certificate issuer", "nordugrid-cluster-nodeaccess" => "Node IP connectivity", "nordugrid-cluster-gridarea" => "Session area (OBSOLETE)", "nordugrid-cluster-gridspace" => "Grid disk space (OBSOLETE)", "nordugrid-cluster-opsysdistribution" => "OS distribution (OBSOLETE)", "nordugrid-cluster-runningjobs" => "Jobs, running (OBSOLETE)", "nordugrid-queue-name" => "Queue name", "nordugrid-queue-status" => "Queue status", "nordugrid-queue-running" => "Jobs, running", "nordugrid-queue-queued" => "Jobs, queued", "nordugrid-queue-maxrunning" => "Jobs, running (max)", "nordugrid-queue-maxqueuable" => "Jobs, queueable (max)", "nordugrid-queue-maxuserrun" => "Jobs per Unix user (max)", "nordugrid-queue-maxcputime" => "CPU time, max. (minutes)", "nordugrid-queue-mincputime" => "CPU time, min. (minutes)", "nordugrid-queue-defaultcputime" => "CPU time, default (minutes)", "nordugrid-queue-schedulingpolicy" => "Scheduling policy", "nordugrid-queue-totalcpus" => "CPUs, total", "nordugrid-queue-nodecpu" => "CPU type", "nordugrid-queue-nodememory" => "Memory (MB)", "nordugrid-queue-architecture" => "Architecture", "nordugrid-queue-opsys" => "Operating system", "nordugrid-queue-gridrunning" => "Grid jobs, running", "nordugrid-queue-gridqueued" => "Grid jobs, queued", "nordugrid-queue-assignedcpunumber" => "CPUs per queue (OBSOLETE)", "nordugrid-queue-assignedcputype" => "CPU type (OBSOLETE)", "nordugrid-job-globalid" => "ID", "nordugrid-job-globalowner" => "Owner", "nordugrid-job-execcluster" => "Execution cluster", "nordugrid-job-execqueue" => "Execution queue", "nordugrid-job-stdout" => "Standard output file", "nordugrid-job-stderr" => "Standard error file", "nordugrid-job-stdin" => "Standard input file", "nordugrid-job-reqcput" => "Requested CPU time", "nordugrid-job-status" => "Status", "nordugrid-job-queuerank" => "Position in the queue", "nordugrid-job-lrmscomment" => "LRMS comment", "nordugrid-job-submissionui" => "Submission machine", "nordugrid-job-submissiontime" => "Submission time (GMT)", "nordugrid-job-usedcputime" => "Used CPU time", "nordugrid-job-usedwalltime" => "Used wall time", "nordugrid-job-sessiondirerasetime" => "Erase time (GMT)", "nordugrid-job-proxyexpirationtime" => "Proxy expiration time (GMT)", "nordugrid-job-usedmem" => "Used memory (KB)", "nordugrid-job-errors" => "Errors", "nordugrid-job-jobname" => "Name", "nordugrid-job-runtimeenvironment" => "Runtime environment", "nordugrid-job-cpucount" => "Requested CPUs", "nordugrid-job-executionnodes" => "Execution nodes", "nordugrid-job-gmlog" => "GM log file", "nordugrid-job-gridlog" => "Gridlog file (OBSOLETE)", "nordugrid-job-clientsoftware" => "Client version", "nordugrid-authuser-name" => "Name", "nordugrid-authuser-sn" => "Subject Name", "nordugrid-authuser-freecpus" => "Free CPUs", "nordugrid-authuser-diskspace" => "Free disk space (MB)", "nordugrid-authuser-queuelength" => "Experienced queue length", "nordugrid-se-name" => "Domain name", "nordugrid-se-aliasname" => "Storage element alias", "nordugrid-se-type" => "Storage element type", "nordugrid-se-freespace" => "Free space (GB)", "nordugrid-se-totalspace" => "Total space (GB)", "nordugrid-se-baseurl" => "Contact URL", "nordugrid-se-authuser" => "Authorised user (DN)", "nordugrid-se-location" => "Postal code", "nordugrid-se-owner" => "Owner", "nordugrid-se-issuerca" => "Certificate issuer", "nordugrid-se-comment" => "Comment", "nordugrid-rc-name" => "Domain name", "nordugrid-rc-aliasname" => "Replica Catalog alias", "nordugrid-rc-baseurl" => "Contact URL", "nordugrid-rc-authuser" => "Authorised user (DN)", "nordugrid-rc-location" => "Postal code", "nordugrid-rc-owner" => "Owner", "nordugrid-rc-issuerca" => "Certificate issuer" ), // Errors, warnings etc "errors" => array( "1" => "Can not read top-level resource indices", "2" => "None of the local indices returned connection", "3" => " bad configuration or request timed out", "4" => "No Grid jobs found", "5" => "No information found", "6" => "Server unavailable", "7" => " - refresh later", "101" => " Monitor timeouts for GRIS: ", "102" => " sec on connection and ", "103" => " sec on search", "104" => " sec spent searching", "105" => "Showing resources only in ", "106" => "Polled top-level indices: ", "107" => "Got geographical locations, scanned sites: ", "108" => " sites arranged by geographical location", "109" => "Search for cluster attributes", "110" => "Search for queue attributes", "111" => "No data from ", "112" => " is up in ", "113" => " has no resources to offer", "114" => " Monitor timeouts for GIIS: ", "115" => "Skipping GRIS: ", "116" => "not a ", "117" => "Checking connection: ", "118" => "OK", "119" => "That far, detected resources of kind ", "120" => "LDAP error searching ", "121" => " status at ", "122" => "Blacklisted: ", "123" => "Registrant found for ", "124" => "Search for SE attributes", "125" => "Search for users", "126" => "Search for jobs", "127" => " has job ", "128" => " while not being authorized", "129" => "Can not get object data: error ", "130" => " Monitor timeouts for EMIR: ", "301" => "Refresh", "302" => "Print", "303" => "Help", "304" => "Close", "305" => "Red", "306" => "Grey", "307" => "All users", "308" => "Active users", "309" => "Search", "310" => "Storage", "311" => "VOs", "312" => "Flag of ", "313" => " Grid processes and ", "314" => " local processes", "401" => "Processes", "402" => "Grid", "403" => "Local", "404" => "World", "405" => "TOTAL", "406" => " sites", "407" => "a lot of", "408" => " GB", "409" => " ALL" ), // Post code conversion: only for [en]! "tlconvert" => array ( "AU" => "Australia", "CA" => "Canada", "CH" => "Switzerland", "DK" => "Denmark", "EE" => "Estonia", "FI" => "Finland", "FIN" => "Finland", "SF" => "Finland", "DE" => "Germany", "JP" => "Japan", "NO" => "Norway", "N" => "Norway", "SE" => "Sweden", "SK" => "Slovakia", "SI" => "Slovenia", "KEK" => "Japan", "TOKYO" => "Japan" ) ), "ru" => array ( // Table headers "loadmon" => array("0" => "Грид-монитор", "Страна" => 0, "РеÑурÑ" => 0, "ЦП" => 0, "Загрузка" => 0, "Ожидают" => 0 ), "clusdes" => array("0" => "ОпиÑание клаÑтера", "Очередь" => 0, "СоÑтоÑние" => 0, "ДлительноÑть (мин)" => 0, "ЦП" => 0, "СчитаютÑÑ" => 0, "Ожидают" => 0 ), "jobstat" => array("0" => "Задачи на:Ðомер задачи", "Ð˜Ð¼Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" => 0, "ХозÑин" => 0, "СоÑтоÑние" => 0, "Ð’Ñ€ÐµÐ¼Ñ (мин)" => 0, "Очередь" => 0, "ЦП" => 0 ), "volist" => array("0" => "Виртуальные организации", "Ð’Ð¸Ñ€Ñ‚ÑƒÐ°Ð»ÑŒÐ½Ð°Ñ Ð¾Ñ€Ð³Ð°Ð¸Ð·Ð°Ñ†Ð¸Ñ" => 0, "Члены" => 0, "ОбÑлуживаетÑÑ" => 0 ), "vousers" => array("0" => "Пользователи", "#" => 0, "ИмÑ" => 0, "МеÑто работы" => 0, "Ð­Ð»ÐµÐºÑ‚Ñ€Ð¾Ð½Ð½Ð°Ñ Ð¿Ð¾Ñ‡Ñ‚Ð°" => 0 ), "userlist" => array("0" => "Ð˜Ð½Ñ„Ð¾Ñ€Ð¼Ð°Ñ†Ð¸Ñ Ð´Ð»Ñ", "" => 0, "Ð˜Ð¼Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" => 0, "СоÑтоÑние" => 0, "Ð’Ñ€ÐµÐ¼Ñ (мин)" => 0, "РеÑурÑ" => 0, "Очередь" => 0, "ЦП" => 0 ), "userres" => array("0" => "", "РеÑурÑ:очередь" => 0, "Свободные ЦП" => 0, "Длина очереди" => 0, "ДиÑк, доÑтупно (Мб)" => 0 ), "attlist" => array("0" => "Ð—Ð½Ð°Ñ‡ÐµÐ½Ð¸Ñ Ð°Ñ‚Ñ‚Ñ€Ð¸Ð±ÑƒÑ‚Ð¾Ð²", "РеÑурÑ" => 0, "Значение" => 0 ), "quelist" => array("0" => "ОпиÑание очереди", "" => 0, "Ð˜Ð¼Ñ Ð·Ð°Ð´Ð°Ñ‡Ð¸" => 0, "ХозÑин" => 0, "СоÑтоÑние" => 0, "Ð’Ñ€ÐµÐ¼Ñ (мин)" => 0, "ОЗУ (КБ)" => 0, "ЦП" => 0 ), "sestat" => array("0" => "Внешние запоминающие уÑтройÑтва", "#" => 0, "Ðазвание" => 0, "ВеÑÑŒ объём" => 0, "Свободно" => 0, "ИмÑ" => 0, "URL базы" => 0, "Тип" => 0 ), "allusers" => array("0" => "Допущенные пользователи:Ðктивные пользователи", "#" => 0, "ИмÑ" => 0, "МеÑто работы" => 0, "Задачи" => 0, "РеÑурÑÑ‹" => 0 ), "ldapdump" => array("0" => "", "Ðттрибут" => 0, "Значение" => 0 ), "mdsattr" => array( "objectClass" => "objectClass", "Mds-validfrom" => "Данные дейÑтвительны Ñ (GMT)", "Mds-validto" => "Данные дейÑтвительны по (GMT)" ), "isattr" => array( "nordugrid-cluster-name" => "Ð˜Ð¼Ñ Ð³Ð¾Ð»Ð¾Ð²Ð½Ð¾Ð¹ машины", "nordugrid-cluster-aliasname" => "Ðазвание", "nordugrid-cluster-contactstring" => "Контактный адреÑ", "nordugrid-cluster-interactive-contactstring" => "Интерактивный адреÑ", "nordugrid-cluster-comment" => "Комментарий", "nordugrid-cluster-support" => "Е-почта ответÑтвенного", "nordugrid-cluster-lrms-type" => "СУПО, тип", "nordugrid-cluster-lrms-version" => "СУПО, верÑиÑ", "nordugrid-cluster-lrms-config" => "СУПО, подробноÑти", "nordugrid-cluster-architecture" => "Ðрхитектура", "nordugrid-cluster-opsys" => "ÐžÐ¿ÐµÑ€Ð°Ñ†Ð¸Ð¾Ð½Ð½Ð°Ñ ÑиÑтема", "nordugrid-cluster-homogeneity" => "ГомогенноÑть реÑурÑа", "nordugrid-cluster-nodecpu" => "ПроцеÑÑор, тип (худший)", "nordugrid-cluster-nodememory" => "ОЗУ (Мб, наименьшее)", "nordugrid-cluster-totalcpus" => "ПроцеÑÑоры, вÑего", "nordugrid-cluster-cpudistribution" => "ПроцеÑÑоры:узлы", "nordugrid-cluster-sessiondir-free" => "ДиÑк, доÑтупно (Мб)", "nordugrid-cluster-sessiondir-total" => "ДиÑк, веÑÑŒ объём (Мб)", "nordugrid-cluster-cache-free" => "ДиÑковый кÑш, Ñвободно (Мб)", "nordugrid-cluster-cache-total" => "ДиÑковый кÑш, вÑего (Мб)", "nordugrid-cluster-runtimeenvironment" => "Ð Ð°Ð±Ð¾Ñ‡Ð°Ñ Ñреда", "nordugrid-cluster-localse" => "ВЗУ, локальное", "nordugrid-cluster-middleware" => "Грид-ПО", "nordugrid-cluster-totaljobs" => "Задачи, вÑего", "nordugrid-cluster-usedcpus" => "ПроцеÑÑоры, занÑтые", "nordugrid-cluster-queuedjobs" => "Задачи, в очереди", "nordugrid-cluster-location" => "Почтовый индекÑ", "nordugrid-cluster-owner" => "Владелец", "nordugrid-cluster-issuerca" => "Сертификат выдан", "nordugrid-cluster-nodeaccess" => "IP-Ñоединение узлов", "nordugrid-cluster-gridarea" => "ÐÐ´Ñ€ÐµÑ ÑеÑÑий (СТÐРЫЙ)", "nordugrid-cluster-gridspace" => "Грид-диÑк (СТÐРЫЙ)", "nordugrid-cluster-opsysdistribution" => "ДиÑтрибутив ОС (СТÐРЫЙ)", "nordugrid-cluster-runningjobs" => "Задачи, в Ñчёте (СТÐРЫЙ)", "nordugrid-queue-name" => "Ð˜Ð¼Ñ Ð¾Ñ‡ÐµÑ€ÐµÐ´Ð¸", "nordugrid-queue-status" => "СоÑтоÑние очереди", "nordugrid-queue-running" => "Задачи, в Ñчёте", "nordugrid-queue-queued" => "Задачи, в очереди", "nordugrid-queue-maxrunning" => "Задачи, в Ñчёте (предел)", "nordugrid-queue-maxqueuable" => "Задачи, в очереди (предел)", "nordugrid-queue-maxuserrun" => "Задачи на Ð¿Ð¾Ð»ÑŒÐ·Ð¾Ð²Ð°Ñ‚ÐµÐ»Ñ (предел)", "nordugrid-queue-maxcputime" => "ДлительноÑть, наиб. (мин)", "nordugrid-queue-mincputime" => "ДлительноÑть, наим. (мин)", "nordugrid-queue-defaultcputime" => "ДлительноÑть, по ум. (мин)", "nordugrid-queue-schedulingpolicy" => "Правила планировки", "nordugrid-queue-totalcpus" => "ПроцеÑÑоры, вÑего", "nordugrid-queue-nodecpu" => "ПроцеÑÑор, тип", "nordugrid-queue-nodememory" => "ОЗУ (Мб)", "nordugrid-queue-architecture" => "Ðрхитектура", "nordugrid-queue-opsys" => "ÐžÐ¿ÐµÑ€Ð°Ñ†Ð¸Ð¾Ð½Ð½Ð°Ñ ÑиÑтема", "nordugrid-queue-gridrunning" => "Грид-задачи, в Ñчёте", "nordugrid-queue-gridqueued" => "Грид-задачи, в очереди", "nordugrid-queue-assignedcpunumber" => "ПроцеÑÑоры (СТÐРЫЙ)", "nordugrid-queue-assignedcputype" => "Тип процеÑÑора (СТÐРЫЙ)", "nordugrid-job-globalid" => "Ðомер", "nordugrid-job-globalowner" => "ХозÑин", "nordugrid-job-execcluster" => "ВыполнÑющий клаÑтер", "nordugrid-job-execqueue" => "ВыполнÑÑŽÑ‰Ð°Ñ Ð¾Ñ‡ÐµÑ€ÐµÐ´ÑŒ", "nordugrid-job-stdout" => "Стандартный выход", "nordugrid-job-stderr" => "Ð¡Ñ‚Ð°Ð½Ð´Ð°Ñ€Ñ‚Ð½Ð°Ñ Ð¾ÑˆÐ¸Ð±ÐºÐ°", "nordugrid-job-stdin" => "Стандартный вход", "nordugrid-job-reqcput" => "Запрошенное времÑ", "nordugrid-job-status" => "СоÑтоÑние", "nordugrid-job-queuerank" => "Положение в очереди", "nordugrid-job-lrmscomment" => "Комментарий СУПО", "nordugrid-job-submissionui" => "ЗаÑылающий клиент", "nordugrid-job-submissiontime" => "Ð’Ñ€ÐµÐ¼Ñ Ð·Ð°Ñылки (GMT)", "nordugrid-job-usedcputime" => "ИÑпользованное Ð²Ñ€ÐµÐ¼Ñ Ð¦ÐŸ", "nordugrid-job-usedwalltime" => "ИÑпользованное времÑ", "nordugrid-job-sessiondirerasetime" => "Срок ÑƒÐ½Ð¸Ñ‡Ñ‚Ð¾Ð¶ÐµÐ½Ð¸Ñ (GMT)", "nordugrid-job-proxyexpirationtime" => "Окончание доверенноÑти (GMT)", "nordugrid-job-usedmem" => "ИÑпользование ОЗУ (Кб)", "nordugrid-job-errors" => "Ошибки", "nordugrid-job-jobname" => "ИмÑ", "nordugrid-job-runtimeenvironment" => "Ð Ð°Ð±Ð¾Ñ‡Ð°Ñ Ñреда", "nordugrid-job-cpucount" => "Запрошено процеÑÑоров", "nordugrid-job-executionnodes" => "ВыполнÑющие узлы", "nordugrid-job-gmlog" => "Ð–ÑƒÑ€Ð½Ð°Ð»ÑŒÐ½Ð°Ñ Ð·Ð°Ð¿Ð¸ÑÑŒ ГМ", "nordugrid-job-gridlog" => "Грид-запиÑÑŒ (СТÐРЫЙ)", "nordugrid-job-clientsoftware" => "ВерÑÐ¸Ñ ÐºÐ»Ð¸ÐµÐ½Ñ‚Ð°", "nordugrid-authuser-name" => "ИмÑ", "nordugrid-authuser-sn" => "Субъект", "nordugrid-authuser-freecpus" => "Свободные ЦП", "nordugrid-authuser-diskspace" => "ДиÑк, доÑтупно (Мб)", "nordugrid-authuser-queuelength" => "Длина очереди", "nordugrid-se-name" => "Доменное имÑ", "nordugrid-se-aliasname" => "Ðазвание", "nordugrid-se-type" => "Тип", "nordugrid-se-freespace" => "Свободный объём (Гб)", "nordugrid-se-totalspace" => "ВеÑÑŒ объём (Гб)", "nordugrid-se-baseurl" => "Контактный адреÑ", "nordugrid-se-authuser" => "Допущенные ползьзователи (DN)", "nordugrid-se-location" => "Почтовый индекÑ", "nordugrid-se-owner" => "Владелец", "nordugrid-se-issuerca" => "Сертификат выдан", "nordugrid-se-comment" => "Комментарий", "nordugrid-rc-name" => "Доменное имÑ", "nordugrid-rc-aliasname" => "Ðазвание", "nordugrid-rc-baseurl" => "Контактный адреÑ", "nordugrid-rc-authuser" => "Допущенные пользователи (DN)", "nordugrid-rc-location" => "Почтовый индекÑ", "nordugrid-rc-owner" => "Владелец", "nordugrid-rc-issuerca" => "Сертификат выдан" ), "errors" => array( "1" => "Ðевозможно прочеÑть ÑпиÑки выÑшего уровнÑ", "2" => "Ðи один из меÑтных ÑпиÑков не отзываетÑÑ", "3" => " Ð½ÐµÐ²ÐµÑ€Ð½Ð°Ñ ÐºÐ¾Ð½Ñ„Ð¸Ð³ÑƒÑ€Ð°Ñ†Ð¸Ñ Ð¸Ð»Ð¸ иÑтекло Ð²Ñ€ÐµÐ¼Ñ Ð·Ð°Ð¿Ñ€Ð¾Ñа", "4" => "Ðе обнаружено Грид-задач", "5" => "Ðет информации", "6" => "Служба недоÑтупна", "7" => " - попробуйте обновить поззже", "101" => " Ð’Ñ€ÐµÐ¼Ñ Ð½Ð° ÑвÑзь Ñ Ð»Ð¾ÐºÐ°Ð»ÑŒÐ½Ñ‹Ð¼ ÑпиÑком: ", "102" => " Ñ Ð½Ð° Ñоединение и ", "103" => " Ñ Ð½Ð° поиÑк", "104" => " Ñ Ð·Ð°Ñ‚Ñ€Ð°Ñ‡ÐµÐ½Ð¾ на поиÑк", "105" => "ПеречиÑление реÑурÑов: ", "106" => "Опрошено ÑпиÑков верхнего уровнÑ: ", "107" => "Получены географичеÑкие координаты, проÑканировано реÑурÑов: ", "108" => " реÑурÑов упорÑдочено по геополитичеÑкому признаку", "109" => "ПоиÑк аттрибутов клаÑтера", "110" => "ПоиÑк аттрибутов очереди", "111" => "Ðет данных Ñ ", "112" => " фукционирует в Ñтране: ", "113" => " не раÑполагает реÑурÑами", "114" => " Ð’Ñ€ÐµÐ¼Ñ Ð½Ð° ÑвÑзь Ñ Ð³Ð»Ð¾Ð±Ð°Ð»ÑŒÐ½Ñ‹Ð¼ ÑпиÑком: ", "115" => "ИгнорируетÑÑ Ñ€ÐµÑурÑ: ", "116" => "не ÑоответÑтвует типу ", "117" => "Проверка ÑвÑзи: ", "118" => "еÑть", "119" => "Ðа данный момент обнаружено реÑурÑов типа ", "120" => "Ошибка LDAP при поиÑке на ", "121" => "-ÑоÑтоÑние на ", "122" => "Заблокирован: ", "123" => "Обнаружен региÑтрант ", "124" => "ПоиÑк аттрибутов ВЗУ", "125" => "ПоиÑк пользователей", "126" => "ПоиÑк задач", "127" => " запуÑтил(а) задачу ", "128" => " не будучи допущенным(ой)", "301" => "Перезагрузить", "302" => "Печать", "303" => "Помощь", "304" => "Закрыть", "305" => "КраÑный", "306" => "Серый", "307" => "Ð’Ñе пользователи", "308" => "Ðктивные пользователи", "309" => "ПоиÑк", "310" => "ВЗУ", "311" => "Виртуальные организации", "312" => "Флаг Ñтраны: ", "313" => " Грид-процеÑÑов и ", "314" => " меÑтных процеÑÑов", "401" => "ПроцеÑÑÑ‹", "402" => "Грид", "403" => "меÑтные", "404" => "Мир", "405" => "ВСЕГО", "406" => " реÑурÑ(а)(ов)", "407" => "куча", "408" => " Гб", "409" => " ВСЕ" ), // Country name conversion, no postcode! "tlconvert" => array ( "Australia" => "ÐвÑтралиÑ", "Canada" => "Канада", "Switzerland" => "ШвейцариÑ", "Denmark" => "ДаниÑ", "Estonia" => "ЭÑтониÑ", "Finland" => "ФинлÑндиÑ", "Germany" => "ГерманиÑ", "Japan" => "ЯпониÑ", "Norway" => "ÐорвегиÑ", "Sweden" => "ШвециÑ", "Slovakia" => "СловакиÑ", "Slovenia" => "СловениÑ", "World" => "Мир" ) ) ); ?> nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153435025314 xustar000000000000000030 mtime=1638455069.781117405 30 atime=1638455091.084437498 30 ctime=1638455100.638581053 nordugrid-arc-6.14.0/src/services/monitor/includes/Makefile.in0000644000175000002070000005127414152153435025312 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/monitor/includes DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(monitorincdir)" DATA = $(monitorinc_DATA) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ monitorincdir = @monitor_prefix@/includes monitorinc_DATA = $(srcdir)/*.inc EXTRA_DIST = $(monitorinc_DATA) all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/monitor/includes/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/monitor/includes/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-monitorincDATA: $(monitorinc_DATA) @$(NORMAL_INSTALL) @list='$(monitorinc_DATA)'; test -n "$(monitorincdir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(monitorincdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(monitorincdir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(monitorincdir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(monitorincdir)" || exit $$?; \ done uninstall-monitorincDATA: @$(NORMAL_UNINSTALL) @list='$(monitorinc_DATA)'; test -n "$(monitorincdir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(monitorincdir)'; $(am__uninstall_files_from_dir) tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(DATA) installdirs: for dir in "$(DESTDIR)$(monitorincdir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-monitorincDATA install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-monitorincDATA .MAKE: install-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ cscopelist-am ctags-am distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-monitorincDATA install-pdf install-pdf-am install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags-am uninstall uninstall-am \ uninstall-monitorincDATA # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/attlist.inc0000644000000000000000000000013214152153376025432 xustar000000000000000030 mtime=1638455038.437646455 30 atime=1638455038.512647582 30 ctime=1638455100.641581098 nordugrid-arc-6.14.0/src/services/monitor/includes/attlist.inc0000644000175000002070000001576214152153376025432 0ustar00mockbuildmock00000000000000tabletop("",TOPTIT); $tlim = 20; $tout = 20; $engatts = array (); $lim = array ( "dn" ); // deduce the top object name assuming all requests are for the same object $components = explode("-",$attributes[0]); $object = $components[0]."-".$components[1]; // start building the filter $filter = "(&(objectclass=$object)"; $n = count($attributes); $natt = 0; $attrtag = array (); for ( $i=0; $i<$n; $i++ ){ $attribute = $attributes[$i]; if ( !$attribute ) continue; $selection = ( $filters[$i] ) ? $filters[$i] : "*"; $is = ( $signs[$i] ) ? $signs[$i] : "="; $lim[] = $attribute; if ( !in_array($attribute,$attrtag)) $engatts[] = ( $isattr[$attribute] ) ? $isattr[$attribute] : $attribute; $attrtag[] = $attribute; // create the filter switch ( $is ) { case "!=": $filter .= "(!($attribute=$selection))"; break; case "~": $selection = "*".$selection."*"; $filter .= "($attribute=$selection)"; break; case "!~": $selection = "*".$selection."*"; $filter .= "(!($attribute=$selection))"; break; default: $filter .= "($attribute$is$selection)"; } $natt++; } $filter .= ")"; if ( ! empty($giislist) ) { $gentries = recursive_giis_info($giislist,$thething,$errors,0,1); $nc = count($gentries); } // TODO: fix for GLUE2 schema if ( ! empty($archery_list) ) $gentries = array_merge($gentries, archery_info($archery_list, "NG", $errors, 0)); if ( !$nc ) { $errno = 1; echo "
    ".$errors[$errno]."\n"; return $errno; } $dsarray = array (); $hnarray = array (); $pnarray = array (); $sitetag = array (); /* a tag to skip duplicated entries */ for ( $k = 0; $k < $nc; $k++ ) { $clhost = $gentries[$k]["host"]; $clport = $gentries[$k]["port"]; $ldapuri = "ldap://".$clhost.":".$clport; $clconn = ldap_connect($ldapuri); if ( $clconn && !$sitetag[$clhost] ) { array_push($dsarray,$clconn); array_push($pnarray,$clport); $sitetag[$clhost] = 1; /* filtering tag */ } } $nhosts = count($dsarray); if ( !$nhosts ) { // NO SITES REPLY... $errno = "2"; echo "
    ".$errors[$errno]."\n"; return $errno; } // Search all clusters $srarray = @ldap_search($dsarray,DN_LOCAL,$filter,$lim,0,0,$tlim,LDAP_DEREF_NEVER); echo "\n"; // HTML table initialisation array_unshift($engatts,$errors["425"]); $jtable = new LmTableFree($engatts); $rowcont = array(); $tabcont = array(); $rc = 0; for ( $ids = 0; $ids < $nhosts; $ids++ ) { $sr = $srarray[$ids]; $dst = $dsarray[$ids]; $pn = $pnarray[$ids]; if ($dst && $sr) { // If search returned, check that there are valid entries $nmatch = @ldap_count_entries($dst,$sr); if ($nmatch > 0) { // If there are valid entries, tabulate results $allentries = ldap_get_entries($dst,$sr); $entries = ldap_purge($allentries); if ( $object == OBJ_AJOB ) { define("CMPKEY",JOB_STAT); //usort($entries,"ldap_entry_comp"); } $nclus = $entries["count"]; for ($i=0; $i<$nclus; $i++) { $cluster = "N/A"; $queue = "N/A"; $job = "N/A"; $currdn = $entries[$i]["dn"]; $currdn = preg_replace("/\"/","",$currdn); $dnparts = ldap_explode_dn($currdn,0); foreach ($dnparts as $part) { $pair = explode("=",$part); switch ( $pair[0] ) { case CLU_NAME: $cluster = $pair[1]; break; case SEL_NAME: $se = $pair[1]; break; case QUE_NAME: $queue = $pair[1]; break; case JOB_GLID: $job = $pair[1]; $encjob = rawurlencode($currdn); break; } } $sort = "cluster"; // 410: cluster; 411: queue; 412: job; 413: user; 414: SE switch ( $object ) { case OBJ_CLUS: $resource = $errors["410"]." $cluster"; $winstring = popup("clusdes.php?host=$cluster&port=$pn",700,620,1); break; case OBJ_QUEU: $resource = $errors["410"]." $cluster, ".$errors["411"]." $queue"; $winstring = popup("quelist.php?host=$cluster&port=$pn&qname=$queue",750,430,6); break; case OBJ_USER: $resource = $errors["410"]." $cluster, ".$errors["411"]." $queue"; $winstring = popup("quelist.php?host=$cluster&port=$pn&qname=$queue",750,430,6); break; case OBJ_AJOB: $resource = $errors["412"]." $job"; $winstring = popup("jobstat.php?host=$cluster&port=$pn&status=&jobdn=$encjob",750,430,4); break; case OBJ_STEL: $resource = $errors["414"]." $se"; $winstring = ""; break; } $rc++; $rowcont[0] = ( $winstring ) ? "$rc $resource" : "$rc $resource"; // determine maximum row count per object $vcount = 0; foreach ( $attributes as $attribute ) { if ( !$attribute ) continue; $ccount = $entries[$i][$attribute]["count"]; $vcount = ( $ccount > $vcount ) ? $ccount : $vcount; } if ($vcount == 0) $jtable->adderror($resource); $attrtag = array(); for ( $j = 0; $j < $vcount; $j++ ) { $attval = ""; $attcheck = FALSE; for ( $k = 0; $k < $n ; $k++ ) { $attribute = $attributes[$k]; if ( !$attribute || @in_array($attribute,$attrtag[$j]) ) continue; if ( $entries[$i][$attribute][$j] ) { $attval = $entries[$i][$attribute][$j]; $attcheck = TRUE; } else { $attval = " "; } // Some time-stamp readability adjustment if (substr(strrchr($attribute, "-"), 1) == "sessiondirerasetime" || substr(strrchr($attribute, "-"), 1) == "submissiontime" || substr($attribute,0,9) == "Mds-valid" ) $attval=cnvtime($attval); $rowcont[] = htmlentities($attval); $attrtag[$j][] = $attribute; } if ( $attcheck ) { $tabcont[] = $rowcont; } else { $rc--; } // if ( $attcheck ) $jtable->addrow($rowcont); $rowcont = array(); $rowcont[0] = " "; } } } } @ldap_free_result($sr); } foreach ( $tabcont as $row ) $jtable->addrow($row,""); $jtable->close(); return 0; } ?>nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/ldap_nice_dump.inc0000644000000000000000000000013114152153376026710 xustar000000000000000029 mtime=1638455038.43864647 30 atime=1638455038.512647582 30 ctime=1638455100.649581218 nordugrid-arc-6.14.0/src/services/monitor/includes/ldap_nice_dump.inc0000644000175000002070000001074714152153376026707 0ustar00mockbuildmock00000000000000"; // Plain LDAP dump for the DN $filstr = "(objectclass=*)"; if ( strpos($dn, DN_GLUE) ) { $filstr = "(|(objectClass=".GOBJ_CLUS.")(objectClass=".GOBJ_MAN.")(objectClass=".GOBJ_LOC.")(objectClass=".GOBJ_QUEU.")(objectClass=".GOBJ_CON."))"; if ( strpos(strtolower(" ".$dn), GJOB_GLID) ) { $filstr = "(|(objectClass=".GOBJ_AJOB."))"; } } $sr = ldap_search($ds,$dn,$filstr,array("*"),0,0,$tlim,LDAP_DEREF_NEVER); if ($sr) { // If search returned, check that there are valid entries $nmatch = ldap_count_entries($ds,$sr); if ($nmatch > 0) { // If there are valid entries, tabulate results $first = ldap_first_entry($ds,$sr); if ( !strpos($dn, DN_GLUE) ) $nmatch = 1; for ( $j=0; $j<$nmatch; $j++){ $entries = ldap_get_attributes($ds,$first); $nfields = $entries["count"]; // get the Distinguished Name $thisdn = ldap_get_dn($ds,$first); // HTML table initialisation $dtable = new LmTableSp("ldapdump",$strings["ldapdump"]); // add the DN entry $drowcont = array("".$errors["420"]."",$thisdn); $dtable->addrow($drowcont, "#cccccc"); $drowcont = array(); // loop on the rest of attributes for ($i=0; $i<$nfields; $i++) { $curatt = $entries[$i]; if ( $exclude && in_array($curatt,$exclude) ) continue; $engatt = ($isattr[$curatt]) ? $isattr[$curatt] : $curatt; $nval = $entries[$curatt]["count"]; $encatt = rawurlencode($curatt); $attwin = popup("attlist.php?attribute=$encatt",650,300,7); $attstring = @( $mdsattr[$curatt] ) ? "$engatt" : "$engatt"; $drowcont[0] = $attstring; $drowcont[1] = " "; if ($nval==0) $dtable->addrow($drowcont); $drowcont[1] = ""; if ( $nval > 4 ) $drowcont[1] = $fhead; for ($k=0; $k<$nval; $k++) { $curval = $entries[$curatt][$k]; // Strip HTML tags some smart folks are adding $curval = strip_tags($curval); // Some time-stamp readability adjustment if ( strlen($curval) == 15 && $curval[14] == "Z" ) $curval=cnvtime($curval); $encval = htmlspecialchars($curval,ENT_QUOTES,"UTF-8"); // E-mail masquerading for short lists (dunno what to do with long lists) if (strpos($curval,"@",1) && $nval<5) { $m = mylo ($curval); if ( !empty($m[0]) ) $encval = ""; } if ( $nval > 4 ) { $drowcont[1] .= "$encval"; if ( $k < $nval-1 ) $drowcont[1] .= "\n"; } else { $drowcont[1] .= $encval; if ( $k < $nval-1 ) $drowcont[1] .= "
     "; } } if ( $nval > 4 ) $drowcont[1] .= $ftail; $dtable->addrow($drowcont); } $dtable->close(); echo "
    "; $first = ldap_next_entry($ds,$first); } ldap_free_result($sr); return $thisdn; } else { $errno = 9; echo "
    ".$errors[$errno]."\n"; return $errno; } } else { $errno = 5; echo "
    ".$errors[$errno]."\n"; return $errno; } } ?> nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/cnvtime.inc0000644000000000000000000000013114152153376025412 xustar000000000000000029 mtime=1638455038.43864647 30 atime=1638455038.512647582 30 ctime=1638455100.645581158 nordugrid-arc-6.14.0/src/services/monitor/includes/cnvtime.inc0000644000175000002070000000055214152153376025402 0ustar00mockbuildmock00000000000000 nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/archery.inc0000644000000000000000000000013214152153376025403 xustar000000000000000030 mtime=1638455038.437646455 30 atime=1638455038.512647582 30 ctime=1638455100.640581083 nordugrid-arc-6.14.0/src/services/monitor/includes/archery.inc0000644000175000002070000001267214152153376025400 0ustar00mockbuildmock00000000000000" .$error_str."
  • \n"); } /** * @param $hostname * @return int */ function check_blacklist($hostname) { global $blacklist; if ( ! isset($blacklist) ) { return 0; } if ( in_array($hostname, $blacklist) ) { return $blacklist[$hostname]; } return 0; } /** * @param $endpoint * @param $errors * @param $debug * @return array */ function query_dns_archery($endpoint, $errors, $debug) { $archery_endpoints = array(); // get dns record hostname according to ARCHERY entree point convention if (substr($endpoint, 0, 6) == 'dns://') { $dns_endpoint = substr($endpoint, 6); } else { $dns_endpoint = '_archery.' . $endpoint; } // perform query $dnsquery = dns_get_record($dns_endpoint, DNS_TXT); if ( $dnsquery === FALSE ) { if ( $debug ) dbgerr_html($errors["132"].$endpoint); return $archery_endpoints; } // parse query foreach ( $dnsquery as $dnsrr ) { if (isset($dnsrr['entries'])) { foreach ($dnsrr['entries'] as $dnsrr_value) { $erecord_arr = array(); $akv = explode(' ', $dnsrr_value); foreach ($akv as $kv) { $ae = explode('=', $kv, 2); if ( count($ae) == 2 ) { $erecord_arr[$ae[0]] = $ae[1]; } } $archery_endpoints[] = $erecord_arr; } } else { if ( $debug ) dbgerr_html($errors["132"].$endpoint); } } return $archery_endpoints; } /** * @param $endpoint * @param $schema * @param $errors * @param int $debug * @param int $looplimit * @return array */ function recursive_archery_info ($endpoint, $schema, $errors, $debug=0, $looplimit=5) { $endpoints = array(); // Just in case recursion limit if ( $looplimit == 0 ) { dbgerr_html($errors["133"].$endpoint); return $endpoints; } // Query archery for endpoints info $archery_endpoints = query_dns_archery($endpoint, $errors, $debug); foreach ($archery_endpoints as $ainfo) { if ( !empty($ainfo['t']) ) { if ($ainfo['t'] == 'org.nordugrid.archery' OR $ainfo['t'] == 'archery' OR $ainfo['t'] == 'archery.service' OR $ainfo['t'] == 'archery.group' ) { if (isset($ainfo['s'])) { if ($ainfo['s'] != "1") { continue; } } $more_endpoints = recursive_archery_info ($ainfo['u'], $schema, $errors, $debug=0, $looplimit-1); $endpoints = array_merge($endpoints, $more_endpoints); } elseif ($ainfo['t'] == 'org.nordugrid.ldapegiis') { //TODO: invoke egiis query continue; } elseif ($ainfo['t'] == 'org.nordugrid.ldapng') { if ( $schema !== 'NG' ) continue; // ldap://:2135/Mds-Vo-Name=local,o=grid $parsed_url = array(); if ( !empty($ainfo['u']) && preg_match('/^ldap:\/\/(?P[^:]+):(?[0-9]+)\/(?P.*)/', $ainfo['u'], $parsed_url) ) { if ( check_blacklist($parsed_url['host'])) { if ( $debug ) dbgerr_html($errors["122"].$parsed_url['host']); continue; } $endpoints[] = array ( 'host' => $parsed_url['host'], 'port' => $parsed_url['port'], 'base' => "nordugrid-cluster-name=".$parsed_url['host'].",".$parsed_url['base'] ); } } elseif ($ainfo['t'] == 'org.nordugrid.ldapglue2') { if ( $schema !== 'GLUE2' ) continue; // ldap://:2135/o=glue $parsed_url = array(); if ( preg_match('/^ldap:\/\/(?P[^:]+):(?[0-9]+)\/(?P.*)/', $ainfo['u'], $parsed_url) ) { if ( check_blacklist($parsed_url['host'])) { if ( $debug ) dbgerr_html($errors["122"].$parsed_url['host']); continue; } $endpoints[] = array ( 'host' => $parsed_url['host'], 'port' => $parsed_url['port'], // dirty hack, monitor only works with array of ldapng endpoints even for GLUE2 :-) 'base' => "nordugrid-cluster-name=".$parsed_url['host'].",".DN_LOCAL ); } } else { // skip all unsupported endpoints (e.g. submission endpoints, WS endpoints, etc) continue; } } } return $endpoints; } /** * @return array * @param archery_list array * @param schema string * @param debug integer * @param loopcnt integer * @desc Returns list of LDAP endpoints */ function archery_info($archery_list, $schema, $errors, $debug="0") { // show the debug message regarding ARCHERY timeouts if($debug && ! empty($archery_list)) { dbgmsg("
    :::> " . $errors["131"] . " <:::

    "); } // start recursively querying ARCHERY $entries = array(); foreach ( $archery_list as $archery ) { $entries = array_merge($entries, recursive_archery_info($archery['endpoint'], $schema, $errors, $debug)); } return $entries; } ?> nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/headfoot.inc0000644000000000000000000000013114152153376025536 xustar000000000000000029 mtime=1638455038.43864647 30 atime=1638455038.512647582 30 ctime=1638455100.648581203 nordugrid-arc-6.14.0/src/services/monitor/includes/headfoot.inc0000644000175000002070000002263714152153376025536 0ustar00mockbuildmock00000000000000\n"; //echo "\n"; echo "\n"; echo "\n"; echo "\n"; echo "\n"; echo "\n"; echo "\n"; if ( $wintyp ) { $this->module = $wintyp; // Localize $yaccept = @$_SERVER["HTTP_ACCEPT_LANGUAGE"] ; if ( !$yaccept ) $yaccept = "en"; if ( FORCE_LANG != "default" ) $yaccept = FORCE_LANG; $yazyk = "en"; $yazyki = explode(",",$yaccept); foreach ( $yazyki as $option ) { if ( $yazyk != "en" ) continue; $option = trim($option); $option = substr($option,0,2); // some sniffing // touch("test/".$option); // echo "\n"; $locfile = $option.".inc"; if ( !file_exists("lang/".$locfile) ) continue; $yazyk = $option; } $locfile = $yazyk.".inc"; include $locfile; setlocale(LC_ALL, $yazyk); $this->language = $yazyk; $this->strings = $message; $this->errors = $message["errors"]; $this->countries = $message["tlconvert"]; $this->mdsattr = $message["mdsattr"]; $this->isattr = $message["isattr"]; // Assigns $this->clusdes = $message["clusdes"]; $this->$wintyp = $message[$wintyp]; $toptitle = $message[$wintyp][0]; // Set page parameters require ('settings.inc'); $inpnam = implode("_",array("def",$wintyp)); // Page style definitions (see settings.inc) // Sets top window title $this->title = ( $toptitle ) ? $toptitle : ""; // Refresh rate $this->refresh = (${$inpnam}["refresh"]) ? ${$inpnam}["refresh"] : 0; // Background and link colors $this->bg = (${$inpnam}["bgcolor"]) ? ${$inpnam}["bgcolor"] : "#ffffff"; $this->lc = (${$inpnam}["lcolor"]) ? ${$inpnam}["lcolor"] : "#cc0000"; // Dumps the header HTML code $titles = explode(":",$this->title); // sometimes titles are many echo "".$titles[0]." ".$extratitle."\n"; if ( $this->refresh ) echo "\n"; echo "\n"; // define giislist if ( ! isset($emirslist) ) { $emirslist = array (); } if ( ! isset($archery_list)) { $archery_list = array (); } $this->giislist = $giislist; $this->emirslist = $emirslist; $this->cert = $cert; $this->archery_list = $archery_list; } // Finishes HTML header, starts document body echo "\n"; echo "\n"; echo "
    \n"; } /** * @return void * @param errors array * @param title string * @param subtitle string * @desc Makes an opening Monitor header */ function tabletop ( $toptitle="", $subtitle="" ) { // function tabletop() $lang = FORCE_LANG; echo "\n"; echo "\n"; echo "\n"; echo "
    ".$toptitle."
    ".$subtitle."\n"; echo " errors["301"]."\" alt=\"".$this->errors["301"]."\">\n"; echo " \n"; echo " errors["302"]."\" alt=\"".$this->errors["302"]."\">\n"; echo " module."&lang=".$lang."',400,300,10);\" onClick=\"javascript:monitor('help.php?module=".$this->module."',400,300,10);\">\n"; echo " errors["303"]."\" alt=\"".$this->errors["303"]."\">\n"; echo " \n"; echo " errors["304"]."\" alt=\"".$this->errors["304"]."\">\n"; echo "
    \n"; } /** * @return string * @desc returns version number from README */ function getVersion () { $v = "N/A"; if ( file_exists("README") ) { $readme = fopen("README","r"); $fline = fgets($readme); $v = substr(stristr($fline,"version "),8); fclose($readme); } $this->version = $v; return $v; } /** * @return void * @desc Closes an HTML document */ function close () { // Closes the HTML document echo "\n
    \n"; echo "\n"; while (ob_get_level() > 0) { ob_end_flush(); } ob_implicit_flush(); } } /** * Below are some generic functions, non-class-specific * * function dbgmsg ( string ) : prints out a message and flushes output; useful for debugging * function popup ( string, int, int, int ) : opens up a new window, depending on the client */ /** * @return void * @param dbgtxt string * @desc Outputs a debug message outside the table */ function dbgmsg( $dbgtxt="Debug" ) { echo "$dbgtxt\n"; flush(); } /** * @return void * @param contact string * @param x int * @param y int * @param n int * @param lang string * @param debug int * @desc Returns a new monitor window URL */ $agent = @$_SERVER["HTTP_USER_AGENT"] ; if ( !defined("USERAGENT") ) define("USERAGENT",$agent); function popup() { $numargs = func_num_args(); $contact = func_get_arg(0); if ( $numargs < 2 ) { $x = 400; $y = 300; $n = 1; } elseif ( $numargs < 5 ) { $lang = "default"; $debug = 0; } else { $x = func_get_arg(1); // 400 $y = func_get_arg(2); // 300 $n = func_get_arg(3); // 1 $lang = func_get_arg(4); $debug = func_get_arg(5); } ( USERAGENT ) ? $agent = USERAGENT : $agent = "lynx"; if ( preg_match("/opera/i",$agent) || preg_match("/lynx/i",$agent) || preg_match("/konqueror/i",$agent) ) return $contact; // $link = "javascript:monitor('".$contact."',$x,$y,$n)"; if ( $lang != "default" && $lang != FALSE ) { if ( strpos($contact,"?") ) { $contact .= "&" ; } else { $contact .= "?" ; } $contact .= "lang=$lang"; } if ( $debug ) { if ( strpos($contact,"?") ) { $contact .= "&" ; } else { $contact .= "?" ; } $contact .= "debug=$debug"; } $link = $contact."\" target=\"win".$n."\" onClick=\"monitor('".$contact."',$x,$y,$n); return false"; return $link; } ?> nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/ldap_purge.inc0000644000000000000000000000013114152153376026067 xustar000000000000000029 mtime=1638455038.43864647 30 atime=1638455038.512647582 30 ctime=1638455100.649581218 nordugrid-arc-6.14.0/src/services/monitor/includes/ldap_purge.inc0000644000175000002070000000176614152153376026067 0ustar00mockbuildmock00000000000000### purged DN:".$curdn."

    \n"; } } $entries["count"] = $storesize; return $entries; } ?> nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/lmtable.inc0000644000000000000000000000013114152153376025365 xustar000000000000000029 mtime=1638455038.43864647 30 atime=1638455038.512647582 30 ctime=1638455100.650581233 nordugrid-arc-6.14.0/src/services/monitor/includes/lmtable.inc0000644000175000002070000002172714152153376025364 0ustar00mockbuildmock00000000000000 0) { ob_end_flush(); } require ('settings.inc'); $inpnam = implode("_",array("def",$wintyp)); // $xeader = implode("_",array("header",$wintyp)); $this->color_header = (${$inpnam}["thcolor"]) ? ${$inpnam}["thcolor"] : "#999999"; $this->color_bg = (${$inpnam}["tbcolor"]) ? ${$inpnam}["tbcolor"] : "#f0f0f0"; $this->font_title = (${$inpnam}["thfont"]) ? ${$inpnam}["thfont"] : "color=\"#ffffff\""; $this->font_main = (${$inpnam}["tbfont"]) ? ${$inpnam}["tbfont"] : "color=\"#000000\""; $this->columns = $locset; $this->ncols = 0; echo "color_bg."\">\n"; echo "color_header."\">\n"; $colnr = 0; if ( $wintyp == "clusdes" && $schema != "GLUE2" ) { $position = 3; $keys = array_keys($locset); unset($locset[$keys[$position]]); } foreach ( $locset as $colnam => $colwid) { if ( $colnam == "0" || $colnam == "help" ) continue; $this->ncols ++; $colnr++; $value = $colnam; if ( $schema == "GLUE2" && $value == "Queue") { $value = "Share Name"; } // Specific sorting links for the front module if ( $wintyp == "loadmon" ) { // Keep old arguments, if any, except of order $allargs = ""; foreach ( $_GET as $argm => $argval ) { if ( $argm == "order" ) continue; $allargs .= $argm."=".$argval."&"; } $str1 = "font_title.">".$value.""; if ( $colnr == 1 ) $value = $str1."country".$str2; elseif ( $colnr == 3 ) $value = $str1."cpu".$str2; elseif ( $colnr == 4 ) $value = $str1."grun".$str2; } $width = ($colwid)?$colwid:"1%"; echo "\n"; } echo "\n"; } /** * @return void * @param contents array * @desc Draws a table row */ function addrow( $contents, $bgcol="" ) { if ( count($contents) != $this->ncols ) { $this->adderror("Incompatible data"); return 1; } $this->contents = $contents; if ($bgcol) { echo "\n"; } else { echo "\n"; } foreach ($contents as $colent) { $value = $colent; echo "\n"; } echo "\n"; } /** * @return void * @param color string * @desc Draws a spanning row containing a spacer */ function addspacer( $color="#000000" ) { echo "\n"; echo ""; echo "\n"; } /** * @return void * @param errtxt string * @desc Draws a spanning row containing error message */ function adderror( $errtxt="Error", $bgcol="" ) { $this->errtxt = $errtxt; echo "\n"; echo ""; echo "\n"; } /** * @return void * @param errtxt string * @param nrows integer * @param color string * @desc Adds a cell spanning $nrows rows */ function rowspan( $nrows, $errtxt=" ", $color="#ffffcc" ) { $this->errtxt = $errtxt; $ncols = $this->ncols - 1; $nrows = $nrows + 1; echo "\n"; echo ""; echo ""; echo "\n"; } /** * @return void * @desc Closes a table */ function close() { echo "
    font_title."> $value 
    font_main."> $value 
    ncols."\" bgcolor=\"$color\" height=\"0\">\"\"
    ncols."\""; if ($bgcol) echo " bgcolor=\"$bgcol\""; echo ">font_main."> $errtxt
     $errtxt\"\"
    \n"; # ob_end_flush(); ob_implicit_flush(FALSE); } } class LmTableSp extends LmTable { var $spcolor; /** * @return void * @param contents array * @param color string * @desc Draws a table row with a spacer above */ function addrow( $contents, $bgcol="", $color="#ffffff" ) { $ncols = count($contents); $this->contents = $contents; if ($bgcol) { echo "\n"; } else { echo "\n"; } foreach ($contents as $colent) { $value = $colent; echo "font_main."> $value \n"; } echo "\n"; echo "\n"; echo "\"\""; echo "\n"; } /** * @return void * @param errtxt string * @param color string * @desc Draws a spanning row containing error message */ function adderror( $errtxt="Error", $color="#ffffff", $bgcol="" ) { $this->errtxt = $errtxt; $ncols = $this->ncols; $tospan = $this->rowspan; if ( $tospan ) $ncols = $ncols - 1; echo "\n"; echo "\"\""; echo "\n"; echo "\n"; echo "ncols."\""; if ($bgcol) echo " bgcolor=\"$bgcol\""; echo ">font_main."> $errtxt"; echo "\n"; } /** * @return void * @param errtxt string * @param nrows integer * @param color string * @desc Adds a cell spanning $nrows rows */ function rowspan( $nrows, $errtxt=" ", $color="#ffffcc" ) { $this->errtxt = $errtxt; $ncols = $this->ncols - 1; $nrows = (2 * $nrows) + 1; echo "\n"; echo " $errtxt"; echo "\"\""; echo "\n"; } } class LmTableFree extends LmTableSp { /** * @return LmTableFree * @param headers array * @desc Starts an HTML table */ function __construct( $headers ) { ob_implicit_flush(0); ob_start(); $this->color_header = "#666666"; $this->color_bg = "#f0f0f0"; $this->font_title = "color=\"#ffffff\""; $this->font_main = "color=\"#000000\""; $this->columns = count($headers); $this->ncols = 0; echo "color_bg."\">\n"; echo "color_header."\">\n"; foreach ( $headers as $colnam ) { $this->ncols ++; $value = $colnam; $width = "1%"; echo "\n"; } echo "\n"; } } ?> nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/postcode.inc0000644000000000000000000000013114152153376025565 xustar000000000000000029 mtime=1638455038.43864647 30 atime=1638455038.512647582 30 ctime=1638455100.653581278 nordugrid-arc-6.14.0/src/services/monitor/includes/postcode.inc0000644000175000002070000000565314152153376025564 0ustar00mockbuildmock00000000000000$tout sec t/o"; if ( !$record ) continue; $nrecords = $record["count"]; /* should be 1 */ for ($m = 0; $m < $nrecords; $m++) { $curcod = $record[$m][CLU_ZIPC][0]; if ( $curcod ) $cllist[$idx]["zvoname"] = cnvvo($curcod,$curnam); } } return($cllist); } /** * @return string * @param curnam string * @desc Guesses geographical location of a cluster */ function guess_country($curnam, $zip) { // Dumb domain name guess by 2 last letters $zvoname = cnvvo("",$curnam); // overwrite the previous decision if country code is set in the postal code if ( $zip ) $zvoname = cnvvo($zip,$curnam); return $zvoname; } ?>nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/mylo.inc0000644000000000000000000000013114152153376024725 xustar000000000000000029 mtime=1638455038.43864647 30 atime=1638455038.512647582 30 ctime=1638455100.652581263 nordugrid-arc-6.14.0/src/services/monitor/includes/mylo.inc0000644000175000002070000000124014152153376024710 0ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/cnvname.inc0000644000000000000000000000013114152153376025374 xustar000000000000000029 mtime=1638455038.43864647 30 atime=1638455038.512647582 30 ctime=1638455100.644581143 nordugrid-arc-6.14.0/src/services/monitor/includes/cnvname.inc0000644000175000002070000000401114152153376025356 0ustar00mockbuildmock00000000000000 1 && $family != "Doe") { /* catch for the tutorials */ $doestr = substr($family,1,1); /* returns "1" if it is a number, or a letter if it's a name */ if ( preg_match("/[0-9]/",$doestr) ) { $number = array_pop($names); $family = end($names); } // $family = substr(strrchr($uname, " "), 1); $name = $cn[0]."."; /* First letter of the name (doesn't work with 8-bit strings) */ if ( $flag == 2 ) $name = $names[0]; eval("\$name = \"$name\";"); $family = $name." ".$family; } else { $family = $cn; } if ( !$family ) return $uname /* Give up */; return $family; } /** * @return string * @param uname string * @desc Takes user DN and attempts to extract her affiliation */ function getorg ( $uname ) { $uname = trim($uname); $pieces = explode("/L=", $uname); if ( count($pieces) == 1 ) $pieces = explode("/DC=", $uname); if ( count($pieces) == 1 ) $pieces = explode("/OU=", $uname); if ( count($pieces) == 1 ) $pieces = explode("/O=", $uname); $org = end($pieces); $tailpos = strpos($org, "/"); if ( $tailpos ) $org = substr($org,0,$tailpos); return $org; } ?>nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/settings.inc0000644000000000000000000000013214152153376025606 xustar000000000000000030 mtime=1638455038.439646485 30 atime=1638455038.512647582 30 ctime=1638455100.655581308 nordugrid-arc-6.14.0/src/services/monitor/includes/settings.inc0000644000175000002070000004161014152153376025575 0ustar00mockbuildmock00000000000000 "index1.nordugrid.org", "port" => "2135", "base" => "mds-vo-name=NorduGrid,o=grid", "vo" => "NorduGrid"), array("host" => "index2.nordugrid.org", "port" => "2135", "base" => "mds-vo-name=NorduGrid,o=grid", "vo" => "NorduGrid"), array("host" => "index3.nordugrid.org", "port" => "2135", "base" => "mds-vo-name=NorduGrid,o=grid", "vo" => "NorduGrid"), array("host" => "index4.nordugrid.org", "port" => "2135", "base" => "mds-vo-name=NorduGrid,o=grid", "vo" => "NorduGrid") //*** A country-level GIIS example, use as many as you wish to monitor: //, //array("host" => "f9pc18.ijs.si", // "port" => "2135", // "base" => "mds-vo-name=Slovenia,o=grid", // "vo" => "Slovenia") //*** A single site GRIS example, use as many as you wish to monitor: //, //array("host" => "gridmaster.pzr.uni-rostock.de", // "port" => "2135", // "base" => "nordugrid-cluster-name=gridmaster.pzr.uni-rostock.de,mds-vo-name=local,o=grid", // "vo" => "Germany") ); // list of ARCHERY endpoints to query $archery_list = array ( array ( /* ATLAS at NDGF */ "endpoint" => "ndgf.org") /* NorduGrid */, array ( "endpoint" => "nordugrid.org") ); // list of available EMIRs /* $emirslist = array( array("schema" => "https", "host" => "testbed-emi5.grid.upjs.sk", "port" => "54321", "base" => "mds-vo-name=NorduGrid,o=grid") ); /* * To set up TLS connection to EMIR server client certificate is mandatory: * Convert client certificate from *.pfx (pkcs12) into *.pem with openssl (if needed): * > openssl pkcs12 -in keys.pfx -out keys.pem */ $cert = "/var/www/monitor-svn/includes/test.pem"; $cert_pass = 'emi'; // base DNs for searches: local (GRIS), global (GIIS), VO if ( !defined("DN_LOCAL") ) define("DN_LOCAL","mds-vo-name=local,o=grid"); if ( !defined("DN_GLUE") ) define("DN_GLUE","o=glue"); if ( !defined("DN_GLOBL") ) define("DN_GLOBL","mds-vo-name=NorduGrid,o=grid"); if ( !defined("DN_VIORG") ) define("DN_VIORG","dc=nordugrid,dc=org"); if ( !defined("DN_PEOPL") ) define("DN_PEOPL","ou=people,dc=nordugrid,dc=org"); if ( !defined("DN_GUEST") ) define("DN_GUEST","ou=guests,dc=nordugrid,dc=org"); if ( !defined("DN_TUTOR") ) define("DN_TUTOR","ou=tutorial,dc=nordugrid,dc=org"); if ( !defined("DN_SERVS") ) define("DN_SERVS","ou=services,dc=nordugrid,dc=org"); if ( !defined("DN_RECAT") ) define("DN_RECAT","rc=NorduGrid,dc=nordugrid,dc=org"); // Information system classes and attributes namespace prefix, "nordugrid" if ( !defined("IS_PREFX") ) define("IS_PREFX","nordugrid"); if ( !defined("IS_PREFXG") ) define("IS_PREFXG","glue2"); // Cache location (use ../htdata when installing directly in ./htdocs) if ( !defined("CACHE_LOCATION") ) define("CACHE_LOCATION","cache"); // Extra title to be added to "Grid Monitor" (e.g. My Favorite) if ( !defined("EXTRA_TITLE") ) define("EXTRA_TITLE",""); //========================================================================= // =================== no need to change things below ===================== //========================================================================= // objectclasses if ( !defined("OBJ_CLUS") ) define("OBJ_CLUS",IS_PREFX."-cluster"); if ( !defined("OBJ_STEL") ) define("OBJ_STEL",IS_PREFX."-se"); if ( !defined("OBJ_QUEU") ) define("OBJ_QUEU",IS_PREFX."-queue"); if ( !defined("OBJ_AJOB") ) define("OBJ_AJOB",IS_PREFX."-job"); if ( !defined("OBJ_USER") ) define("OBJ_USER",IS_PREFX."-authuser"); //GLUE2 if ( !defined("GOBJ_CLUS") ) define("GOBJ_CLUS",IS_PREFXG."ComputingService"); if ( !defined("GOBJ_STEL") ) define("GOBJ_STEL",IS_PREFXG."-se"); if ( !defined("GOBJ_QUEU") ) define("GOBJ_QUEU",IS_PREFXG."ComputingShare"); if ( !defined("GOBJ_AJOB") ) define("GOBJ_AJOB",IS_PREFXG."ComputingActivity"); if ( !defined("GOBJ_USER") ) define("GOBJ_USER",IS_PREFXG."-authuser"); if ( !defined("GOBJ_LOC") ) define("GOBJ_LOC",IS_PREFXG."Location"); if ( !defined("GOBJ_MAN") ) define("GOBJ_MAN",IS_PREFXG."Manager"); if ( !defined("GOBJ_CON") ) define("GOBJ_CON",IS_PREFXG."Contact"); if ( !defined("OBJ_PERS") ) define("OBJ_PERS","organizationalPerson"); if ( !defined("OBJ_RCOL") ) define("OBJ_RCOL","GlobusReplicaLogicalCollection"); /* RC Logical Collection object */ if ( !defined("OBJ_RFIL") ) define("OBJ_RFIL","GlobusReplicaLogicalFile"); /* RC Logical File object */ if ( !defined("OBJ_RFIN") ) define("OBJ_RFIN","GlobusReplicaFileInfo"); /* RC File Info object */ if ( !defined("OBJ_RSEL") ) define("OBJ_RSEL","GlobusReplicaInfo"); /* RC Info object */ // attributes //NG if ( !defined("CLU_NAME") ) define("CLU_NAME",IS_PREFX."-cluster-name"); if ( !defined("CLU_ANAM") ) define("CLU_ANAM",IS_PREFX."-cluster-aliasname"); if ( !defined("CLU_ZIPC") ) define("CLU_ZIPC",IS_PREFX."-cluster-location"); if ( !defined("CLU_TCPU") ) define("CLU_TCPU",IS_PREFX."-cluster-totalcpus"); if ( !defined("CLU_UCPU") ) define("CLU_UCPU",IS_PREFX."-cluster-usedcpus"); if ( !defined("CLU_TJOB") ) define("CLU_TJOB",IS_PREFX."-cluster-totaljobs"); if ( !defined("CLU_QJOB") ) define("CLU_QJOB",IS_PREFX."-cluster-queuedjobs"); /* deprecated since 0.5.38 */ if ( !defined("CLU_OWNR") ) define("CLU_OWNR",IS_PREFX."-cluster-owner"); if ( !defined("CLU_SUPP") ) define("CLU_SUPP",IS_PREFX."-cluster-support"); if ( !defined("CLU_PQUE") ) define("CLU_PQUE",IS_PREFX."-cluster-prelrmsqueued"); /* new since 0.5.38 */ if ( !defined("SEL_NAME") ) define("SEL_NAME",IS_PREFX."-se-name"); if ( !defined("SEL_BURL") ) define("SEL_BURL",IS_PREFX."-se-baseurl"); /* gone since 0.5.26 */ if ( !defined("SEL_CURL") ) define("SEL_CURL",IS_PREFX."-se-url"); /* in place since 0.5.26 */ if ( !defined("SEL_ANAM") ) define("SEL_ANAM",IS_PREFX."-se-aliasname"); if ( !defined("SEL_TYPE") ) define("SEL_TYPE",IS_PREFX."-se-type"); if ( !defined("SEL_FREE") ) define("SEL_FREE",IS_PREFX."-se-freespace"); if ( !defined("SEL_TOTA") ) define("SEL_TOTA",IS_PREFX."-se-totalspace"); if ( !defined("SEL_USER") ) define("SEL_USER",IS_PREFX."-se-authuser"); if ( !defined("QUE_NAME") ) define("QUE_NAME",IS_PREFX."-queue-name"); if ( !defined("QUE_STAT") ) define("QUE_STAT",IS_PREFX."-queue-status"); if ( !defined("QUE_RUNG") ) define("QUE_RUNG",IS_PREFX."-queue-running"); if ( !defined("QUE_GRUN") ) define("QUE_GRUN",IS_PREFX."-queue-gridrunning"); if ( !defined("QUE_MAXR") ) define("QUE_MAXR",IS_PREFX."-queue-maxrunning"); if ( !defined("QUE_QUED") ) define("QUE_QUED",IS_PREFX."-queue-queued"); /* deprecated since 0.5.38 */ if ( !defined("QUE_LQUE") ) define("QUE_LQUE",IS_PREFX."-queue-localqueued"); /* new since 0.5.38 */ if ( !defined("QUE_PQUE") ) define("QUE_PQUE",IS_PREFX."-queue-prelrmsqueued"); /* new since 0.5.38 */ if ( !defined("QUE_GQUE") ) define("QUE_GQUE",IS_PREFX."-queue-gridqueued"); if ( !defined("QUE_MAXQ") ) define("QUE_MAXQ",IS_PREFX."-queue-maxqueuable"); if ( !defined("QUE_ASCP") ) define("QUE_ASCP",IS_PREFX."-queue-totalcpus"); if ( !defined("QUE_MINT") ) define("QUE_MINT",IS_PREFX."-queue-mincputime"); if ( !defined("QUE_MAXT") ) define("QUE_MAXT",IS_PREFX."-queue-maxcputime"); if ( !defined("JOB_GLID") ) define("JOB_GLID",IS_PREFX."-job-globalid"); if ( !defined("JOB_NAME") ) define("JOB_NAME",IS_PREFX."-job-jobname"); if ( !defined("JOB_STAT") ) define("JOB_STAT",IS_PREFX."-job-status"); if ( !defined("JOB_EQUE") ) define("JOB_EQUE",IS_PREFX."-job-execqueue"); if ( !defined("JOB_ECLU") ) define("JOB_ECLU",IS_PREFX."-job-execcluster"); if ( !defined("JOB_GOWN") ) define("JOB_GOWN",IS_PREFX."-job-globalowner"); if ( !defined("JOB_USET") ) define("JOB_USET",IS_PREFX."-job-usedcputime"); if ( !defined("JOB_USEM") ) define("JOB_USEM",IS_PREFX."-job-usedmem"); if ( !defined("JOB_SUBM") ) define("JOB_SUBM",IS_PREFX."-job-submissiontime"); if ( !defined("JOB_COMP") ) define("JOB_COMP",IS_PREFX."-job-completiontime"); if ( !defined("JOB_ERRS") ) define("JOB_ERRS",IS_PREFX."-job-errors"); if ( !defined("JOB_CPUS") ) define("JOB_CPUS",IS_PREFX."-job-cpucount"); if ( !defined("USR_NAME") ) define("USR_NAME",IS_PREFX."-authuser-name"); if ( !defined("USR_USSN") ) define("USR_USSN",IS_PREFX."-authuser-sn"); if ( !defined("USR_CPUS") ) define("USR_CPUS",IS_PREFX."-authuser-freecpus"); if ( !defined("USR_QUEU") ) define("USR_QUEU",IS_PREFX."-authuser-queuelength"); if ( !defined("USR_DISK") ) define("USR_DISK",IS_PREFX."-authuser-diskspace"); //GLUE2 if ( !defined("GCLU_NAME") ) define("GCLU_NAME",IS_PREFXG."entityname"); if ( !defined("GCLU_ANAM") ) define("GCLU_ANAM",IS_PREFXG."entityname"); if ( !defined("GCLU_ZIPC") ) define("GCLU_ZIPC",IS_PREFXG."locationpostcode"); if ( !defined("GCLU_TCPU") ) define("GCLU_TCPU",IS_PREFXG."computingmanagertotallogicalcpus"); if ( !defined("GCLU_UCPU") ) define("GCLU_UCPU",IS_PREFXG."-cluster-usedcpus"); if ( !defined("GCLU_TJOB") ) define("GCLU_TJOB",IS_PREFXG."computingservicetotaljobs"); if ( !defined("GCLU_QJOB") ) define("GCLU_QJOB",IS_PREFXG."-cluster-queuedjobs"); /* deprecated since 0.5.38 */ if ( !defined("GCLU_OWNR") ) define("GCLU_OWNR",IS_PREFXG."-cluster-owner"); if ( !defined("GCLU_SUPP") ) define("GCLU_SUPP",IS_PREFXG."-cluster-support"); if ( !defined("GCLU_PQUE") ) define("GCLU_PQUE",IS_PREFXG."computingserviceprelrmswaitingjobs"); /* new since 0.5.38 */ if ( !defined("GSEL_NAME") ) define("GSEL_NAME",IS_PREFXG."-se-name"); if ( !defined("GSEL_BURL") ) define("GSEL_BURL",IS_PREFXG."-se-baseurl"); /* gone since 0.5.26 */ if ( !defined("GSEL_CURL") ) define("GSEL_CURL",IS_PREFXG."-se-url"); /* in place since 0.5.26 */ if ( !defined("GSEL_ANAM") ) define("GSEL_ANAM",IS_PREFXG."-se-aliasname"); if ( !defined("GSEL_TYPE") ) define("GSEL_TYPE",IS_PREFXG."-se-type"); if ( !defined("GSEL_FREE") ) define("GSEL_FREE",IS_PREFXG."-se-freespace"); if ( !defined("GSEL_TOTA") ) define("GSEL_TOTA",IS_PREFXG."-se-totalspace"); if ( !defined("GSEL_USER") ) define("GSEL_USER",IS_PREFXG."-se-authuser"); if ( !defined("GQUE_NAME") ) define("GQUE_NAME",IS_PREFXG."entityname"); if ( !defined("GQUE_MAPQ") ) define("GQUE_MAPQ",IS_PREFXG."computingsharemappingqueue"); if ( !defined("GQUE_STAT") ) define("GQUE_STAT",IS_PREFXG."computingshareservingstate"); if ( !defined("GQUE_RUNG") ) define("GQUE_RUNG",IS_PREFXG."computingsharelocalrunningjobs"); if ( !defined("GQUE_GRUN") ) define("GQUE_GRUN",IS_PREFXG."computingsharerunningjobs"); if ( !defined("GQUE_MAXR") ) define("GQUE_MAXR",IS_PREFXG."computingsharemaxrunningjobs"); if ( !defined("GQUE_QUED") ) define("GQUE_QUED",IS_PREFXG."-queue-queued"); /* deprecated since 0.5.38 */ if ( !defined("GQUE_LQUE") ) define("GQUE_LQUE",IS_PREFXG."computingsharelocalwaitingjobs"); /* new since 0.5.38 */ if ( !defined("GQUE_PQUE") ) define("GQUE_PQUE",IS_PREFXG."computingshareprelrmswaitingjobs"); /* new since 0.5.38 */ if ( !defined("GQUE_GQUE") ) define("GQUE_GQUE",IS_PREFXG."computingsharewaitingjobs"); if ( !defined("GQUE_MAXQ") ) define("GQUE_MAXQ",IS_PREFXG."-queue-maxqueuable"); if ( !defined("GQUE_ASCP") ) define("GQUE_ASCP",IS_PREFXG."-queue-totalcpus"); if ( !defined("GQUE_MINT") ) define("GQUE_MINT",IS_PREFXG."-queue-mincputime"); if ( !defined("GQUE_MAXT") ) define("GQUE_MAXT",IS_PREFXG."-queue-maxcputime"); if ( !defined("GJOB_GLID") ) define("GJOB_GLID",IS_PREFXG."activityid"); if ( !defined("GJOB_NAME") ) define("GJOB_NAME",IS_PREFXG."name"); if ( !defined("GJOB_STAT") ) define("GJOB_STAT",IS_PREFXG."computingactivitystate"); if ( !defined("GJOB_EQUE") ) define("GJOB_EQUE",IS_PREFXG."computingactivityqueue"); if ( !defined("GJOB_ECLU") ) define("GJOB_ECLU",IS_PREFXG."computingactivityexecutionnode"); if ( !defined("GJOB_GOWN") ) define("GJOB_GOWN",IS_PREFXG."computingactivityowner"); if ( !defined("GJOB_USET") ) define("GJOB_USET",IS_PREFXG."computingactivityusedtotalcputime"); if ( !defined("GJOB_USEM") ) define("GJOB_USEM",IS_PREFXG."computingactivityusedmainmemory"); if ( !defined("GJOB_SUBM") ) define("GJOB_SUBM",IS_PREFXG."computingactivitysubmissiontime"); if ( !defined("GJOB_COMP") ) define("GJOB_COMP",IS_PREFXG."computingactivitycomputingmanagerendtime"); if ( !defined("GJOB_ERRS") ) define("GJOB_ERRS",IS_PREFXG."computingactivityerror"); if ( !defined("GJOB_CPUS") ) define("GJOB_CPUS",IS_PREFXG."computingactivityrequestedslots"); if ( !defined("GUSR_NAME") ) define("GUSR_NAME",IS_PREFXG."-authuser-name"); if ( !defined("GUSR_USSN") ) define("GUSR_USSN",IS_PREFXG."-authuser-sn"); if ( !defined("GUSR_CPUS") ) define("GUSR_CPUS",IS_PREFXG."-authuser-freecpus"); if ( !defined("GUSR_QUEU") ) define("GUSR_QUEU",IS_PREFXG."-authuser-queuelength"); if ( !defined("GUSR_DISK") ) define("GUSR_DISK",IS_PREFXG."-authuser-diskspace"); if ( !defined("VO_USCN" ) ) define("VO_USCN" ,"cn"); if ( !defined("VO_USSN" ) ) define("VO_USSN" ,"sn"); if ( !defined("VO_DESC" ) ) define("VO_DESC" ,"description"); if ( !defined("VO_MAIL" ) ) define("VO_MAIL" ,"mail"); if ( !defined("VO_INST" ) ) define("VO_INST" ,"o"); //************************************* Grid Monitor top window style ****************************** $def_loadmon = array( "refresh" => 120, "bgcolor" => "#ffffff", "thcolor" => "#005659", "lcolor" => "#005659", "tbcolor" => "#ffecb5", "thfont" => "face=\"sans-serif\" color=#ffffff", "tbfont" => "face=\"sans-serif\"" ); //************************************* Cluster description style ********************************** $def_clusdes = array ( //"title" => $theaders["clusdes"][0], "refresh" => 600, "bgcolor" => "#ffcc33", "thcolor" => "#000099", "lcolor" => "#000099", "tbcolor" => "#ffffcc", "thfont" => "face=\"sans-serif\" color=\"#ffffff\"", "tbfont" => "face=\"sans-serif\" color=\"#000099\"" ); //*************************************** Job statistics style ************************************* $def_jobstat = array ( "refresh" => 600, "bgcolor" => "#ffffff", "thcolor" => "#ffcc33", "lcolor" => "#000099", "tbcolor" => "#ffffcc", "thfont" => "face=\"sans-serif\" color=\"#000000\"", "tbfont" => "face=\"sans-serif\" color=\"#000099\"" ); //******************************************* VO list style *************************************** $def_volist = array( "refresh" => 0, "bgcolor" => "#ffffff", "thcolor" => "#ffcc33", "lcolor" => "#ffff00", "tbcolor" => "#cc0033", "thfont" => "face=\"sans-serif\" color=\"#993300\"", "tbfont" => "face=\"sans-serif\" color=\"#ffffff\"" ); //***************************************** VO user base style ************************************* $def_vousers = array( "refresh" => 0, "bgcolor" => "#ffffff", "thcolor" => "#ffcc33", "lcolor" => "#ffcccc", "tbcolor" => "#000099", "thfont" => "face=\"sans-serif\" color=\"#000000\"", "tbfont" => "face=\"sans-serif\" color=\"#ffffff\"" ); //***************************************** User job list style ************************************ $def_userlist = array( "refresh" => 0, "bgcolor" => "#ffffcc", "thcolor" => "#ffcc33", "lcolor" => "#000099", "tbcolor" => "#ffffff", "thfont" => "face=\"sans-serif\" color=\"#000000\"", "tbfont" => "face=\"sans-serif\" color=\"#000099\"" ); $def_userres = array( "thcolor" => "#000099", "tbcolor" => "#ffffcc", "thfont" => "face=\"sans-serif\" color=\"#ffffff\"", "tbfont" => "face=\"sans-serif\" color=\"#000099\"" ); //**************************************** Attribute list style ************************************ $def_attlist = array( "refresh" => 0, "bgcolor" => "#ffffff", "thcolor" => "#000099", "lcolor" => "#000099", "tbcolor" => "#ccffff", "thfont" => "face=\"sans-serif\" color=\"#ffffff\"", "tbfont" => "face=\"sans-serif\" color=\"#000099\"" ); //****************************************** Queue job list style ********************************** $def_quelist = array( "refresh" => 300, "bgcolor" => "#ffffff", "thcolor" => "#000099", "lcolor" => "#000099", "tbcolor" => "#ffffcc", "thfont" => "face=\"sans-serif\" color=\"#ffffff\"", "tbfont" => "face=\"sans-serif\" color=\"#000099\"" ); //******************************************* SE info style *************************************** $def_sestat = array( "refresh" => 300, "bgcolor" => "#ffffff", "thcolor" => "#ffcc33", "lcolor" => "#003300", "tbcolor" => "#CCCC99", "thfont" => "face=\"sans-serif\" color=\"#990000\"", "tbfont" => "face=\"sans-serif\" color=\"#000000\"" ); //******************************************* Users info style *************************************** $def_allusers = array( "refresh" => 0, "bgcolor" => "#ffffff", "thcolor" => "#339966", "lcolor" => "#003300", "tbcolor" => "#ccffcc", "thfont" => "face=\"sans-serif\" color=\"#ffffff\"", "tbfont" => "face=\"sans-serif\" color=\"#000000\"" ); //***************************** LDAP parameters dump style - no need to modify ********************* $def_ldapdump = array( "thcolor" => "#000099", "tbcolor" => "#ffffcc", "thfont" => "face=\"sans-serif\" color=\"#ffffff\"", "tbfont" => "face=\"sans-serif\" color=\"#000099\"" ); ?> nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/toreload.inc0000644000000000000000000000013214152153376025557 xustar000000000000000030 mtime=1638455038.439646485 30 atime=1638455038.512647582 30 ctime=1638455100.656581323 nordugrid-arc-6.14.0/src/services/monitor/includes/toreload.inc0000644000175000002070000000060614152153376025546 0ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/cache.inc0000644000000000000000000000013114152153376025010 xustar000000000000000029 mtime=1638455038.43864647 30 atime=1638455038.512647582 30 ctime=1638455100.643581128 nordugrid-arc-6.14.0/src/services/monitor/includes/cache.inc0000644000175000002070000000255314152153376025003 0ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/emirs_info.inc0000644000000000000000000000013114152153376026077 xustar000000000000000029 mtime=1638455038.43864647 30 atime=1638455038.512647582 30 ctime=1638455100.647581188 nordugrid-arc-6.14.0/src/services/monitor/includes/emirs_info.inc0000644000175000002070000001525514152153376026075 0ustar00mockbuildmock00000000000000 "https", * "host" => "testbed-emi5.grid.upjs.sk, * "port" => "54321", * "base" => "mds-vo-name=NorduGrid,o=grid"),...) */ $tlim = 2; $tout = 5; if($debug && !empty($emirslist)) dbgmsg("
    :::> ".$errors["130"].$tlim.$errors["102"].$tout.$errors["103"]." <:::

    "); $nemirs = count($emirslist); $counter = count($gentries); $tag = array(); $entries = $gentries; $blacklist = array(); if ( file_exists("blacklist.inc") ) include('blacklist.inc'); // Loop on entered EMIR sites // If a host is blacklisted, skip // If a host is a cluster (GRIS), keep and skip // If a host is any other local GRIS, skip for ( $ig = 0; $ig < $nemirs; $ig++ ) { $eschema = $emirslist[$ig]["schema"]; $ehost = $emirslist[$ig]["host"]; if ( @$blacklist[$ehost] ) { if ( $debug ) dbgmsg("
    ".$errors["122"]."$ehost
    \n"); continue; } $eport = $emirslist[$ig]["port"]; $ebase = $emirslist[$ig]["base"]; if ( preg_match("/$element/i",$ebase) ) { // Invent a "fake DN" for host tagging and skip duplicated entries $fakedn = "hn=".$ehost.", ".$ebase; if ( @$tag[$fakedn] ) continue; $tag[$fakedn] = 1; continue; } elseif ( preg_match("/local/i",$ebase) ) { if ( $debug ) dbgmsg("
    ".$errors["115"].$ehost." (".$errors["116"].$element.")"); continue; } if ( $debug == 2 ) dbgmsg("
    ".$errors["117"]."$ehost..."); // Connection to EMIR $path = "services"; $query = "Service_Endpoint_Capability=information.discovery.resource&Service_Endpoint_Technology=ldap"; $res = http_request('GET', $eschema."://".$ehost.":".$eport."/".$path."?".$query, $data, $cert, $referer=''); if ($res["status"] == "ok"){ if ( $debug == 2 ) dbgmsg($errors["118"]); $json_a=json_decode($res["content"], true); $nrecords = count($json_a); for ($i = 0; $i < $nrecords; $i++) { $url = parse_url($json_a[$i]["Service_Endpoint_URL"]); $curhost = $url["host"]; $curhstat = $json_a[$i]["Service_Endpoint_HealthState"]; $cursstat = $json_a[$i]["Service_Endpoint_ServingState"]; /* * URL structure: * * ldapng: ldap://:2135/Mds-Vo-Name=local,o=grid * * ldapglue1: ldap://:2135/Mds-Vo-Name=resource,o=grid * * ldapglue2: ldap://:2135/o=glue */ // Introduce "fake" DN for tagging purpose - helps skipping sites registering twice $fakedn = "hn=".$url["host"].", ".$url["base"]; //if ( @$tag[$fakedn] ) continue; // Note: We need all enpoint about a service!!! if ( @$blacklist[$curhost] ) { if ( $debug>0 ) dbgmsg("
    ".$errors["122"]."$curhost
    \n"); continue; } $curstat = (($curhstat == "ok")&&($cursstat == "production")) ? "VALID": "healt state: '".$curhstate."', serving state: '".$cursstate."'"; if ( $curstat != "VALID" ) { if ( $debug ) dbgmsg("
    $curstat".$errors["121"]."$fakedn
    \n"); //continue; } $entries[$counter]["host"] = $url["host"]; $entries[$counter]["port"] = $url["port"]; $entries[$counter]["base"] = substr($url["path"],1); if ( $debug == 2 ) dbgmsg("
    ".$errors["123"]."$base: $fakedn
    \n"); $tag[$fakedn] = 1; $counter++; } } } if ( $debug == 2 ) dbgmsg("
    "); // Some debugging printout if ( $debug == 2 ) { dbgmsg("

    ".$errors["119"].$element.": ".$counter."
    "); foreach ( $entries as $num=>$val ) dbgmsg($val["host"].":".$val["port"]."/".$val["base"]."
    "); } return $entries; } /* * Send http request to the given URL of the server. */ function http_request($type, $url, $data, $cert, $referer='') { // Convert the data array into URL Parameters like a=b&foo=bar etc. $data = http_build_query($data); // parse the given URL $url = parse_url($url); if ($url['scheme'] != 'https' && $url['scheme'] != 'http') { die('Error: Only HTTP(S) request are supported !'); } // extract host and path: $host = $url['host']; $port = $url['port']; $path = $url['path']; $query= $url['query']; // open a socket connection on the given port - timeout: 30 sec $fp = stream_socket_client($host.":".$port, $errno, $errstr, 30); if ($url['scheme'] == 'https') { // add secure properties $context = stream_context_create(); $result = stream_context_set_option($context, 'ssl', 'local_cert', $cert); $result = stream_context_set_option($context, 'ssl', 'passphrase', $cert_pass); // open a secure socket connection on the given port - timeout: 30 sec $fp = stream_socket_client("ssl://".$host.":".$port, $errno, $errstr, 30, STREAM_CLIENT_CONNECT,$context); } if ($fp){ // send the request headers: fputs($fp, $type." $path?$query HTTP/1.1\r\n"); fputs($fp, "Host: $host\r\n"); if ($referer != '') fputs($fp, "Referer: $referer\r\n"); fputs($fp, "Content-type: application/x-www-form-urlencoded\r\n"); fputs($fp, "Content-length: ". strlen($data) ."\r\n"); fputs($fp, "Connection: close\r\n\r\n"); fputs($fp, $data); $result = ''; while(!feof($fp)) { // receive the results of the request $result .= fgets($fp, 128); } } else { return array( 'status' => 'err', 'error' => "$errstr ($errno)" ); } // close the socket connection: fclose($fp); // split the result header from the content $result = explode("\r\n\r\n", $result, 2); $header = isset($result[0]) ? $result[0] : ''; $content = isset($result[1]) ? $result[1] : ''; // return as structured array: return array( 'status' => 'ok', 'header' => $header, 'content' => $content ); } ?> nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/cnvalias.inc0000644000000000000000000000013114152153376025545 xustar000000000000000029 mtime=1638455038.43864647 30 atime=1638455038.512647582 30 ctime=1638455100.644581143 nordugrid-arc-6.14.0/src/services/monitor/includes/cnvalias.inc0000644000175000002070000000302614152153376025534 0ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/monitor/includes/PaxHeaders.30264/recursive_giis_info.inc0000644000000000000000000000013214152153376030003 xustar000000000000000030 mtime=1638455038.439646485 30 atime=1638455038.512647582 30 ctime=1638455100.654581293 nordugrid-arc-6.14.0/src/services/monitor/includes/recursive_giis_info.inc0000644000175000002070000001265214152153376027776 0ustar00mockbuildmock00000000000000 "grid.nbi.dk", * "port" => "2135", * "base" => "mds-vo-name=NorduGrid,o=grid"),...) */ $loopcnt++; $tlim = 2; $tout = 5; if($debug && count($giislist) < 5) dbgmsg("
    :::> ".$errors["114"].$tlim.$errors["102"].$tout.$errors["103"]." <:::

    "); $greg = array(); $gfilter = "(objectclass=mds*)"; $ngiis = count($giislist); $counter = 0; $tag = array(); $dsarray = array(); $dnarray = array(); $hnarray = array(); $entries = array(); $blacklist = array(); @include('blacklist.inc'); // uses blacklist if it is in includes or current path // Loop on entered sites // If a host is blacklisted, skip // If a host is a cluster (ARIS), keep and skip // If a host is any other local ARIS, skip for ( $ig = 0; $ig < $ngiis; $ig++ ) { $ghost = $giislist[$ig]["host"]; if ( @$blacklist[$ghost] ) { if ( $debug ) dbgmsg("
    ".$errors["122"]."$ghost
    \n"); continue; } $gport = $giislist[$ig]["port"]; $gbase = $giislist[$ig]["base"]; if ( preg_match("/$element/i",$gbase) ) { // Invent a "fake DN" for host tagging and skip duplicated entries $fakedn = "hn=".$ghost.", ".$gbase; if ( @$tag[$fakedn] ) continue; $tag[$fakedn] = 1; array_push($entries,$giislist[$ig]); $counter++; continue; } elseif ( preg_match("/local/i",$gbase) ) { if ( $debug ) dbgmsg("
    ".$errors["115"].$ghost." (".$errors["116"].$element.")"); continue; } if ( $debug == 2 ) dbgmsg("
    ".$errors["117"]."$ghost..."); $fp = @fsockopen($ghost, $gport, $errno, $errstr, 2); $ldapuri = "ldap://".$ghost.":".$gport; $gconn = ldap_connect($ldapuri); if ( $fp && $gconn ) { fclose($fp); if ( $debug == 2 ) dbgmsg($errors["118"]); array_push($dsarray,$gconn); array_push($dnarray,$gbase); array_push($hnarray,$ghost); } if ( $debug == 2 ) dbgmsg("
    "); } // Some debugging printout if ( $debug == 2 ) { dbgmsg("

    ".$errors["119"].$element.": ".$counter."
    "); foreach ( $entries as $num=>$val ) dbgmsg($val["host"].":".$val["base"]."
    "); } // Check if there is underlying structure $srarray = @ldap_read($dsarray,$dnarray,$gfilter,$greg,0,0,$tlim,LDAP_DEREF_NEVER); // If using the pached LDAP //$srarray = @ldap_read($dsarray,$dnarray,$gfilter,$greg,0,0,$tlim,LDAP_DEREF_NEVER,$tout); // Debug: check if something eventualy timeouts or something if ( $debug ) { $nconns = count($dsarray); for ( $ii = 0; $ii < $nconns; $ii++ ) { $ldconn = $dsarray[$ii]; $hnconn = $hnarray[$ii]; if ( ldap_errno($ldconn) != 0x00 ) { $ldaperrmess = ldap_error($ldconn); dbgmsg("".$errors["120"].$hnconn.": ".$ldaperrmess."
    "); } } } $nhosts = 0; if ( !empty($srarray) ) $nhosts = count($srarray); // If EGIISes are found, loop on contacted EGIISes if ( $nhosts ) { $truecount = 0; for( $ids = 0; $ids < $nhosts; $ids++ ) { // suppose N hosts answered (nhosts), each returned M lower registrants (nrecords) // some of lower registrants are the same and have to be purged // and everything should be re-arranged in a new common array $sr = $srarray[$ids]; $ds = $dsarray[$ids]; $base = $dnarray[$ids]; if ($sr) $truecount++; $record = @ldap_get_entries($ds,$sr); $nrecords = $record["count"]; // Per each contacted EGIIS, loop on potential lower-level EGIISes/clusters for ($i = 0; $i < $nrecords; $i++) { $curdn = $record[$i]["dn"]; $curhost = $record[$i]["mds-service-hn"][0]; $curstat = $record[$i]["mds-reg-status"][0]; $curport = $record[$i]["mds-service-port"][0]; $cursuff = $record[$i]["mds-service-ldap-suffix"][0]; // Introduce "fake" DN for tagging purpose - helps skipping sites registering twice $fakedn = "hn=".$curhost.", ".$cursuff; if ( @$tag[$fakedn] ) continue; if ( @$blacklist[$curhost] ) { if ( $debug>0 ) dbgmsg("
    ".$errors["122"]."$curhost
    \n"); continue; } if ( $curstat != "VALID" ) { if ( $debug ) dbgmsg("
    $curstat".$errors["121"]."$fakedn
    \n"); continue; } // array_push($entries,$record[$i]); $entries[$counter]["host"] = $curhost; $entries[$counter]["port"] = $curport; $entries[$counter]["base"] = $cursuff; if ( $debug == 2 ) dbgmsg("
    ".$errors["123"]."$base: $fakedn
    \n"); $tag[$fakedn] = 1; $counter++; } } // Array $entries contains all possible stuff which registers to a EGIIS // Keep recursing if ($truecount && $loopcnt < 10 ) $entries = recursive_giis_info($entries,$element,$errors,$debug,$loopcnt); } return $entries; } ?> nordugrid-arc-6.14.0/src/services/monitor/PaxHeaders.30264/monitor.in0000644000000000000000000000013214152153376023464 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.488578799 nordugrid-arc-6.14.0/src/services/monitor/monitor.in0000644000175000002070000000010414152153376023444 0ustar00mockbuildmock00000000000000#!/bin/sh lynx http://localhost/@monitor_local_prefix@/loadmon.php nordugrid-arc-6.14.0/src/services/PaxHeaders.30264/candypond0000644000000000000000000000013214152153474021657 xustar000000000000000030 mtime=1638455100.681581699 30 atime=1638455103.997631524 30 ctime=1638455100.681581699 nordugrid-arc-6.14.0/src/services/candypond/0000755000175000002070000000000014152153474021721 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/candypond/PaxHeaders.30264/Makefile.am0000644000000000000000000000013114152153376023770 xustar000000000000000029 mtime=1638455038.43264638 30 atime=1638455038.510647552 30 ctime=1638455100.677581639 nordugrid-arc-6.14.0/src/services/candypond/Makefile.am0000644000175000002070000000135714152153376023764 0ustar00mockbuildmock00000000000000pkglib_LTLIBRARIES = libcandypond.la libcandypond_la_SOURCES = CandyPond.h CandyPond.cpp \ CandyPondGenerator.h CandyPondGenerator.cpp libcandypond_la_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) libcandypond_la_LIBADD = \ ../a-rex/grid-manager/libgridmanager.la \ ../a-rex/delegation/libdelegation.la \ $(top_builddir)/src/hed/libs/infosys/libarcinfosys.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(GLIBMM_LIBS) libcandypond_la_LDFLAGS = -no-undefined -avoid-version -module nordugrid-arc-6.14.0/src/services/candypond/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153435023776 xustar000000000000000030 mtime=1638455069.001105685 30 atime=1638455090.763432674 30 ctime=1638455100.676581624 nordugrid-arc-6.14.0/src/services/candypond/Makefile.in0000644000175000002070000007543314152153435023777 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/candypond DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(top_srcdir)/depcomp README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(pkglibdir)" LTLIBRARIES = $(pkglib_LTLIBRARIES) am__DEPENDENCIES_1 = libcandypond_la_DEPENDENCIES = \ ../a-rex/grid-manager/libgridmanager.la \ ../a-rex/delegation/libdelegation.la \ $(top_builddir)/src/hed/libs/infosys/libarcinfosys.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(am__DEPENDENCIES_1) am_libcandypond_la_OBJECTS = libcandypond_la-CandyPond.lo \ libcandypond_la-CandyPondGenerator.lo libcandypond_la_OBJECTS = $(am_libcandypond_la_OBJECTS) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = libcandypond_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(libcandypond_la_CXXFLAGS) $(CXXFLAGS) \ $(libcandypond_la_LDFLAGS) $(LDFLAGS) -o $@ AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(libcandypond_la_SOURCES) DIST_SOURCES = $(libcandypond_la_SOURCES) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ pkglib_LTLIBRARIES = libcandypond.la libcandypond_la_SOURCES = CandyPond.h CandyPond.cpp \ CandyPondGenerator.h CandyPondGenerator.cpp libcandypond_la_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) libcandypond_la_LIBADD = \ ../a-rex/grid-manager/libgridmanager.la \ ../a-rex/delegation/libdelegation.la \ $(top_builddir)/src/hed/libs/infosys/libarcinfosys.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(GLIBMM_LIBS) libcandypond_la_LDFLAGS = -no-undefined -avoid-version -module all: all-am .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/candypond/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/candypond/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): install-pkglibLTLIBRARIES: $(pkglib_LTLIBRARIES) @$(NORMAL_INSTALL) @list='$(pkglib_LTLIBRARIES)'; test -n "$(pkglibdir)" || list=; \ list2=; for p in $$list; do \ if test -f $$p; then \ list2="$$list2 $$p"; \ else :; fi; \ done; \ test -z "$$list2" || { \ echo " $(MKDIR_P) '$(DESTDIR)$(pkglibdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pkglibdir)" || exit 1; \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(pkglibdir)'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(pkglibdir)"; \ } uninstall-pkglibLTLIBRARIES: @$(NORMAL_UNINSTALL) @list='$(pkglib_LTLIBRARIES)'; test -n "$(pkglibdir)" || list=; \ for p in $$list; do \ $(am__strip_dir) \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(pkglibdir)/$$f'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(pkglibdir)/$$f"; \ done clean-pkglibLTLIBRARIES: -test -z "$(pkglib_LTLIBRARIES)" || rm -f $(pkglib_LTLIBRARIES) @list='$(pkglib_LTLIBRARIES)'; \ locs=`for p in $$list; do echo $$p; done | \ sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ sort -u`; \ test -z "$$locs" || { \ echo rm -f $${locs}; \ rm -f $${locs}; \ } libcandypond.la: $(libcandypond_la_OBJECTS) $(libcandypond_la_DEPENDENCIES) $(EXTRA_libcandypond_la_DEPENDENCIES) $(AM_V_CXXLD)$(libcandypond_la_LINK) -rpath $(pkglibdir) $(libcandypond_la_OBJECTS) $(libcandypond_la_LIBADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libcandypond_la-CandyPond.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libcandypond_la-CandyPondGenerator.Plo@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< libcandypond_la-CandyPond.lo: CandyPond.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libcandypond_la_CXXFLAGS) $(CXXFLAGS) -MT libcandypond_la-CandyPond.lo -MD -MP -MF $(DEPDIR)/libcandypond_la-CandyPond.Tpo -c -o libcandypond_la-CandyPond.lo `test -f 'CandyPond.cpp' || echo '$(srcdir)/'`CandyPond.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libcandypond_la-CandyPond.Tpo $(DEPDIR)/libcandypond_la-CandyPond.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='CandyPond.cpp' object='libcandypond_la-CandyPond.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libcandypond_la_CXXFLAGS) $(CXXFLAGS) -c -o libcandypond_la-CandyPond.lo `test -f 'CandyPond.cpp' || echo '$(srcdir)/'`CandyPond.cpp libcandypond_la-CandyPondGenerator.lo: CandyPondGenerator.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libcandypond_la_CXXFLAGS) $(CXXFLAGS) -MT libcandypond_la-CandyPondGenerator.lo -MD -MP -MF $(DEPDIR)/libcandypond_la-CandyPondGenerator.Tpo -c -o libcandypond_la-CandyPondGenerator.lo `test -f 'CandyPondGenerator.cpp' || echo '$(srcdir)/'`CandyPondGenerator.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libcandypond_la-CandyPondGenerator.Tpo $(DEPDIR)/libcandypond_la-CandyPondGenerator.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='CandyPondGenerator.cpp' object='libcandypond_la-CandyPondGenerator.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libcandypond_la_CXXFLAGS) $(CXXFLAGS) -c -o libcandypond_la-CandyPondGenerator.lo `test -f 'CandyPondGenerator.cpp' || echo '$(srcdir)/'`CandyPondGenerator.cpp mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(LTLIBRARIES) installdirs: for dir in "$(DESTDIR)$(pkglibdir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool clean-pkglibLTLIBRARIES \ mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-pkglibLTLIBRARIES install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-pkglibLTLIBRARIES .MAKE: install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \ clean-libtool clean-pkglibLTLIBRARIES cscopelist-am ctags \ ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-pkglibLTLIBRARIES install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-compile mostlyclean-generic mostlyclean-libtool \ pdf pdf-am ps ps-am tags tags-am uninstall uninstall-am \ uninstall-pkglibLTLIBRARIES # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/candypond/PaxHeaders.30264/CandyPond.h0000644000000000000000000000013114152153376023764 xustar000000000000000029 mtime=1638455038.43264638 30 atime=1638455038.510647552 30 ctime=1638455100.679581669 nordugrid-arc-6.14.0/src/services/candypond/CandyPond.h0000644000175000002070000001070314152153376023753 0ustar00mockbuildmock00000000000000#ifndef CANDYPONDSERVICE_H_ #define CANDYPONDSERVICE_H_ #include #include #include #include #include // A-REX includes for GM configuration and delegation #include "../a-rex/grid-manager/conf/GMConfig.h" #include "../a-rex/grid-manager/files/ControlFileContent.h" #include "../a-rex/grid-manager/files/ControlFileHandling.h" #include "../a-rex/delegation/DelegationStore.h" #include "CandyPondGenerator.h" namespace CandyPond { /** * CandyPond provides functionality for A-REX cache operations that can be * performed by remote clients. It currently consists of three operations: * CacheCheck - allows querying of the cache for the presence of files. * CacheLink - enables a running job to dynamically request cache files to * be linked to its working (session) directory. * CacheLinkQuery - query the status of a transfer initiated by CacheLink. * This service is especially useful in the case of pilot job workflows where * job submission does not follow the usual ARC workflow. In order for input * files to be available to jobs, the pilot job can call CandyPond to * prepare them. If requested files are not present in the cache, they can be * downloaded by CandyPond if requested, using the DTR data staging * framework. */ class CandyPond: public Arc::Service { private: /** Return codes of cache link */ enum CacheLinkReturnCode { Success, // everything went ok Staging, // files are still in the middle of downloading NotAvailable, // cache file doesn't exist and dostage is false Locked, // cache file is locked (being downloaded by other process) CacheError, // error with cache (configuration, filesystem etc) PermissionError, // user doesn't have permission on original source LinkError, // error while linking to session dir DownloadError, // error downloading cache file BadURLError, // A bad URL was supplied which could not be handled }; /** Construct a SOAP error message with optional extra reason string */ Arc::MCC_Status make_soap_fault(Arc::Message& outmsg, const std::string& reason = ""); /** Add a Result element to a response */ void add_result_element(Arc::XMLNode& results, const std::string& fileurl, CacheLinkReturnCode returncode, const std::string& reason); /** CandyPond namespace */ Arc::NS ns; /** A-REX configuration */ ARex::GMConfig config; /** Generator to handle data staging */ CandyPondGenerator* dtr_generator; /** Logger object */ static Arc::Logger logger; protected: /* Cache operations */ /** * Check whether the URLs supplied in the input are present in any cache. * Returns in the out message for each file true or false, and if true, * the size of the file on cache disk. * @param mapped_user The local user to which the client DN was mapped */ Arc::MCC_Status CacheCheck(Arc::XMLNode in, Arc::XMLNode out, const Arc::User& mapped_user); /** * This method is used to link cache files to the session dir. A list of * URLs is supplied and if they are present in the cache and the user * calling the service has permission to access them, then they are linked * to the given session directory. If the user requests that missing files * be staged, then data staging requests are entered. The user should then * use CacheLinkQuery to poll the status of the requests. * @param mapped_user The local user to which the client DN was mapped */ Arc::MCC_Status CacheLink(Arc::XMLNode in, Arc::XMLNode out, const Arc::User& mapped_user); /** * Query the status of data staging for a given job ID. */ Arc::MCC_Status CacheLinkQuery(Arc::XMLNode in, Arc::XMLNode out); public: /** * Make a new CandyPond. Reads the configuration and determines * the validity of the service. */ CandyPond(Arc::Config *cfg, Arc::PluginArgument* parg); /** * Destroy the CandyPond */ virtual ~CandyPond(void); /** * Main method called by HED when CandyPond is invoked. Directs call * to appropriate CandyPond method. */ virtual Arc::MCC_Status process(Arc::Message &inmsg, Arc::Message &outmsg); /** Returns true if the CandyPond is valid. */ operator bool() { return valid; }; /** Returns true if the CandyPond is not valid. */ bool operator!() { return !valid; }; }; } // namespace CandyPond #endif /* CANDYPONDSERVICE_H_ */ nordugrid-arc-6.14.0/src/services/candypond/PaxHeaders.30264/CandyPond.cpp0000644000000000000000000000013114152153376024317 xustar000000000000000029 mtime=1638455038.43264638 30 atime=1638455038.510647552 30 ctime=1638455100.680581684 nordugrid-arc-6.14.0/src/services/candypond/CandyPond.cpp0000644000175000002070000005341714152153376024317 0ustar00mockbuildmock00000000000000#include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include "CandyPond.h" namespace CandyPond { static Arc::Plugin *get_service(Arc::PluginArgument* arg) { Arc::ServicePluginArgument* srvarg = arg?dynamic_cast(arg):NULL; if(!srvarg) return NULL; CandyPond* s = new CandyPond((Arc::Config*)(*srvarg),arg); if (*s) return s; delete s; return NULL; } Arc::Logger CandyPond::logger(Arc::Logger::rootLogger, "CandyPond"); CandyPond::CandyPond(Arc::Config *cfg, Arc::PluginArgument* parg) : Service(cfg,parg), dtr_generator(NULL) { valid = false; // read configuration information /* candypond config specifies A-REX conf file /etc/arc.conf */ ns["candypond"] = "urn:candypond_config"; if (!(*cfg)["service"] || !(*cfg)["service"]["config"]) { // error - no config defined logger.msg(Arc::ERROR, "No A-REX config file found in candypond configuration"); return; } std::string arex_config = (std::string)(*cfg)["service"]["config"]; logger.msg(Arc::INFO, "Using A-REX config file %s", arex_config); config.SetConfigFile(arex_config); if (!config.Load()) { logger.msg(Arc::ERROR, "Failed to process A-REX configuration in %s", arex_config); return; } config.Print(); if (config.CacheParams().getCacheDirs().empty() && config.CacheParams().getReadOnlyCacheDirs().empty()) { logger.msg(Arc::ERROR, "No caches defined in configuration"); return; } // check if we are running along with A-REX or standalone bool with_arex = false; if ((*cfg)["service"]["witharex"] && (std::string)(*cfg)["service"]["witharex"] == "true") with_arex = true; // start Generator for data staging dtr_generator = new CandyPondGenerator(config, with_arex); valid = true; } CandyPond::~CandyPond(void) { if (dtr_generator) { delete dtr_generator; dtr_generator = NULL; } } Arc::MCC_Status CandyPond::CacheCheck(Arc::XMLNode in, Arc::XMLNode out, const Arc::User& mapped_user) { /* Accepts: url ... Returns url true 1234 ... */ // substitute cache paths according to mapped user ARex::CacheConfig cache_params(config.CacheParams()); cache_params.substitute(config, mapped_user); Arc::FileCache cache(cache_params.getCacheDirs(), cache_params.getDrainingCacheDirs(), cache_params.getReadOnlyCacheDirs(), "0", mapped_user.get_uid(), mapped_user.get_gid()); if (!cache) { logger.msg(Arc::ERROR, "Error creating cache"); return Arc::MCC_Status(Arc::GENERIC_ERROR, "CacheCheck", "Server error with cache"); } Arc::XMLNode resp = out.NewChild("CacheCheckResponse"); Arc::XMLNode results = resp.NewChild("CacheCheckResult"); for(int n = 0;;++n) { Arc::XMLNode id = in["CacheCheck"]["TheseFilesNeedToCheck"]["FileURL"][n]; if (!id) break; std::string fileurl = (std::string)in["CacheCheck"]["TheseFilesNeedToCheck"]["FileURL"][n]; Arc::XMLNode resultelement = results.NewChild("Result"); resultelement.NewChild("FileURL") = fileurl; bool fileexist = false; std::string file_lfn; Arc::initializeCredentialsType cred_type(Arc::initializeCredentialsType::SkipCredentials); Arc::UserConfig usercfg(cred_type); Arc::URL url(fileurl); Arc::DataHandle d(url, usercfg); if (!d) { logger.msg(Arc::ERROR, "Can't handle URL %s", fileurl); resultelement.NewChild("ExistInTheCache") = "false"; resultelement.NewChild("FileSize") = "0"; continue; } logger.msg(Arc::INFO, "Looking up URL %s", d->str()); file_lfn = cache.File(d->str()); if (file_lfn.empty()) { logger.msg(Arc::ERROR, "Empty filename returned from FileCache"); resultelement.NewChild("ExistInTheCache") = "false"; resultelement.NewChild("FileSize") = "0"; continue; } logger.msg(Arc::INFO, "Cache file is %s", file_lfn); struct stat fileStat; if (Arc::FileStat(file_lfn, &fileStat, false)) fileexist = true; else if (errno != ENOENT) logger.msg(Arc::ERROR, "Problem accessing cache file %s: %s", file_lfn, Arc::StrError(errno)); resultelement.NewChild("ExistInTheCache") = (fileexist ? "true": "false"); if (fileexist) resultelement.NewChild("FileSize") = Arc::tostring(fileStat.st_size); else resultelement.NewChild("FileSize") = "0"; } return Arc::MCC_Status(Arc::STATUS_OK); } Arc::MCC_Status CandyPond::CacheLink(Arc::XMLNode in, Arc::XMLNode out, const Arc::User& mapped_user) { /* Accepts: url // remote file name // local file on session dir ... uname 123456789 90 false Returns: url 0 success ... */ // read in inputs bool dostage = false; if (in["CacheLink"]["Stage"]) dostage = ((std::string)in["CacheLink"]["Stage"] == "true") ? true : false; Arc::XMLNode jobidnode = in["CacheLink"]["JobID"]; if (!jobidnode) { logger.msg(Arc::ERROR, "No job ID supplied"); return Arc::MCC_Status(Arc::GENERIC_ERROR, "CacheLink", "Bad input (no JobID specified)"); } std::string jobid = (std::string)jobidnode; int priority = 50; Arc::XMLNode prioritynode = in["CacheLink"]["Priority"]; if (prioritynode) { if (!Arc::stringto((std::string)prioritynode, priority)) { logger.msg(Arc::ERROR, "Bad number in priority element: %s", (std::string)prioritynode); return Arc::MCC_Status(Arc::GENERIC_ERROR, "CacheLink", "Bad input (bad number in Priority)"); } if (priority <= 0) priority = 1; if (priority > 100) priority = 100; } Arc::XMLNode uname = in["CacheLink"]["Username"]; if (!uname) { logger.msg(Arc::ERROR, "No username supplied"); return Arc::MCC_Status(Arc::GENERIC_ERROR, "CacheLink", "Bad input (no Username specified)"); } std::string username = (std::string)uname; // TODO: try to force mapping to supplied user if (username != mapped_user.Name()) { logger.msg(Arc::ERROR, "Supplied username %s does not match mapped username %s", username, mapped_user.Name()); return Arc::MCC_Status(Arc::GENERIC_ERROR, "CacheLink", "Supplied username does not match mapped user"); } // check job id and session dir are ok // substitute session dirs and use tmp configuration to find the one for this job std::vector sessions = config.SessionRoots(); for (std::vector::iterator session = sessions.begin(); session != sessions.end(); ++session) { config.Substitute(*session, mapped_user); } ARex::GMConfig tmp_config; tmp_config.SetSessionRoot(sessions); std::string session_root = tmp_config.SessionRoot(jobid); if (session_root.empty()) { logger.msg(Arc::ERROR, "No session directory found"); return Arc::MCC_Status(Arc::GENERIC_ERROR, "CacheLink", "No session directory found for supplied Job ID"); } std::string session_dir = session_root + '/' + jobid; logger.msg(Arc::INFO, "Using session dir %s", session_dir); struct stat fileStat; if (!Arc::FileStat(session_dir, &fileStat, true)) { logger.msg(Arc::ERROR, "Failed to stat session dir %s", session_dir); return Arc::MCC_Status(Arc::GENERIC_ERROR, "CacheLink", "Failed to access session dir"); } // check permissions - owner must be same as mapped user if (fileStat.st_uid != mapped_user.get_uid()) { logger.msg(Arc::ERROR, "Session dir %s is owned by %i, but current mapped user is %i", session_dir, fileStat.st_uid, mapped_user.get_uid()); return Arc::MCC_Status(Arc::GENERIC_ERROR, "CacheLink", "Failed to access session dir"); } // get delegated proxy info to check permission on cached files // TODO: use credentials of caller of this service. For now ask the // delegation store for the proxy of the job. ARex::DelegationStore::DbType deleg_db_type = ARex::DelegationStore::DbSQLite; switch (config.DelegationDBType()) { case ARex::GMConfig::deleg_db_bdb: deleg_db_type = ARex::DelegationStore::DbBerkeley; break; case ARex::GMConfig::deleg_db_sqlite: deleg_db_type = ARex::DelegationStore::DbSQLite; break; } ARex::DelegationStore dstore(config.DelegationDir(), deleg_db_type, false); std::string proxy_path; // Read job's local file to extract delegation id ARex::JobLocalDescription job_desc; if (job_local_read_file(jobid, config, job_desc) && !job_desc.delegationid.empty()) { proxy_path = dstore.FindCred(job_desc.delegationid, job_desc.DN); } if (proxy_path.empty() || !Arc::FileStat(proxy_path, &fileStat, true)) { logger.msg(Arc::ERROR, "Failed to access proxy of given job id %s at %s", jobid, proxy_path); return Arc::MCC_Status(Arc::GENERIC_ERROR, "CacheLink", "Failed to access proxy"); } Arc::UserConfig usercfg; usercfg.UtilsDirPath(config.ControlDir()); usercfg.ProxyPath(proxy_path); usercfg.InitializeCredentials(Arc::initializeCredentialsType::NotTryCredentials); std::string dn; Arc::Time exp_time; try { Arc::Credential ci(usercfg.ProxyPath(), usercfg.ProxyPath(), usercfg.CACertificatesDirectory(), ""); dn = ci.GetIdentityName(); exp_time = ci.GetEndTime(); } catch (Arc::CredentialError& e) { logger.msg(Arc::ERROR, "Couldn't handle certificate: %s", e.what()); return Arc::MCC_Status(Arc::GENERIC_ERROR, "CacheLink", std::string("Error with proxy at "+proxy_path)); } logger.msg(Arc::INFO, "DN is %s", dn); // create cache // substitute cache paths according to mapped user ARex::CacheConfig cache_params(config.CacheParams()); cache_params.substitute(config, mapped_user); Arc::FileCache cache(cache_params.getCacheDirs(), cache_params.getDrainingCacheDirs(), cache_params.getReadOnlyCacheDirs(), jobid, mapped_user.get_uid(), mapped_user.get_gid()); if (!cache) { logger.msg(Arc::ERROR, "Error with cache configuration"); return Arc::MCC_Status(Arc::GENERIC_ERROR, "CacheCheck", "Server error with cache"); } // set up response structure Arc::XMLNode resp = out.NewChild("CacheLinkResponse"); Arc::XMLNode results = resp.NewChild("CacheLinkResult"); std::map to_download; // files not in cache (remote, local) bool error_happened = false; // if true then don't bother with downloads at the end // loop through all files for (int n = 0;;++n) { Arc::XMLNode id = in["CacheLink"]["TheseFilesNeedToLink"]["File"][n]; if (!id) break; Arc::XMLNode f_url = id["FileURL"]; if (!f_url) break; Arc::XMLNode f_name = id["FileName"]; if (!f_name) break; std::string fileurl = (std::string)f_url; std::string filename = (std::string)f_name; std::string session_file = session_dir + '/' + filename; logger.msg(Arc::INFO, "Looking up URL %s", fileurl); Arc::URL u(fileurl); Arc::DataHandle d(u, usercfg); if (!d) { logger.msg(Arc::ERROR, "Can't handle URL %s", fileurl); add_result_element(results, fileurl, CandyPond::BadURLError, "Could not handle input URL"); error_happened = true; continue; } d->SetSecure(false); // the actual url used with the cache std::string url = d->str(); bool available = false; bool is_locked = false; if (!cache.Start(url, available, is_locked)) { if (is_locked) { add_result_element(results, fileurl, CandyPond::Locked, "File is locked"); } else { add_result_element(results, fileurl, CandyPond::CacheError, "Error starting cache"); } error_happened = true; continue; } if (!available) { cache.Stop(url); // file not in cache - the result status for these files will be set later to_download[fileurl] = session_file; continue; } // file is in cache - check permissions if (!cache.CheckDN(url, dn)) { Arc::DataStatus res = d->Check(false); if (!res.Passed()) { logger.msg(Arc::ERROR, "Permission checking failed: %s", url); add_result_element(results, fileurl, CandyPond::PermissionError, "Permission denied"); error_happened = true; continue; } cache.AddDN(url, dn, exp_time); logger.msg(Arc::VERBOSE, "Permission checking passed for url %s", url); } // link file bool try_again = false; // TODO add executable and copy flags to request if (!cache.Link(session_file, url, false, false, false, try_again)) { // If locked, send to DTR and let it deal with the retry strategy if (try_again) { to_download[fileurl] = session_file; continue; } // failed to link - report as if not there add_result_element(results, fileurl, CandyPond::LinkError, "Failed to link to session dir"); error_happened = true; continue; } // Successfully linked to session - move to scratch if necessary // Note: won't work if scratch is not mounted on CE if (!config.ScratchDir().empty()) { std::string scratch_file(config.ScratchDir()+'/'+jobid+'/'+filename); // Access session and scratch under mapped uid Arc::FileAccess fa; if (!fa.fa_setuid(mapped_user.get_uid(), mapped_user.get_gid()) || !fa.fa_rename(session_file, scratch_file)) { logger.msg(Arc::ERROR, "Failed to move %s to %s: %s", session_file, scratch_file, Arc::StrError(errno)); add_result_element(results, fileurl, CandyPond::LinkError, "Failed to link to move file from session dir to scratch"); error_happened = true; continue; } } // everything went ok so report success add_result_element(results, fileurl, CandyPond::Success, "Success"); } // check for any downloads to perform, only if requested and there were no previous errors if (to_download.empty() || error_happened || !dostage) { for (std::map::iterator i = to_download.begin(); i != to_download.end(); ++i) { add_result_element(results, i->first, CandyPond::NotAvailable, "File not available"); } return Arc::MCC_Status(Arc::STATUS_OK); } bool stage_start_error = false; // Loop through files to download and start a DTR for each one for (std::map::iterator i = to_download.begin(); i != to_download.end(); ++i) { // if one DTR failed to start then don't start any more // TODO cancel others already started if (stage_start_error) { add_result_element(results, i->first, CandyPond::DownloadError, "Failed to start data staging"); continue; } logger.msg(Arc::VERBOSE, "Starting new DTR for %s", i->first); if (!dtr_generator->addNewRequest(mapped_user, i->first, i->second, usercfg, jobid, priority)) { logger.msg(Arc::ERROR, "Failed to start new DTR for %s", i->first); add_result_element(results, i->first, CandyPond::DownloadError, "Failed to start data staging"); stage_start_error = true; } else { add_result_element(results, i->first, CandyPond::Staging, "Staging started"); } } return Arc::MCC_Status(Arc::STATUS_OK); } Arc::MCC_Status CandyPond::CacheLinkQuery(Arc::XMLNode in, Arc::XMLNode out) { /* Accepts: 123456789 Returns: 0 success */ Arc::XMLNode jobidnode = in["CacheLinkQuery"]["JobID"]; if (!jobidnode) { logger.msg(Arc::ERROR, "No job ID supplied"); return Arc::MCC_Status(Arc::GENERIC_ERROR, "CacheLinkQuery", "Bad input (no JobID specified)"); } std::string jobid = (std::string)jobidnode; // set up response structure Arc::XMLNode resp = out.NewChild("CacheLinkQueryResponse"); Arc::XMLNode results = resp.NewChild("CacheLinkQueryResult"); std::string error; // query Generator for DTR status if (dtr_generator->queryRequestsFinished(jobid, error)) { if (error.empty()) { logger.msg(Arc::INFO, "Job %s: all files downloaded successfully", jobid); add_result_element(results, "", CandyPond::Success, "Success"); } else if (error == "Job not found") { add_result_element(results, "", CandyPond::CacheError, "No such job"); } else { logger.msg(Arc::INFO, "Job %s: Some downloads failed", jobid); add_result_element(results, "", CandyPond::DownloadError, "Download failed: " + error); } } else { logger.msg(Arc::VERBOSE, "Job %s: files still downloading", jobid); add_result_element(results, "", CandyPond::Staging, "Still staging"); } return Arc::MCC_Status(Arc::STATUS_OK); } Arc::MCC_Status CandyPond::process(Arc::Message &inmsg, Arc::Message &outmsg) { // Check authorization if(!ProcessSecHandlers(inmsg, "incoming")) { logger.msg(Arc::ERROR, "CandyPond: Unauthorized"); return make_soap_fault(outmsg, "Authorization failed"); } std::string method = inmsg.Attributes()->get("HTTP:METHOD"); // find local user std::string mapped_username = inmsg.Attributes()->get("SEC:LOCALID"); if (mapped_username.empty()) { logger.msg(Arc::ERROR, "No local user mapping found"); return make_soap_fault(outmsg, "No local user mapping found"); } Arc::User mapped_user(mapped_username); if(method == "POST") { logger.msg(Arc::VERBOSE, "process: POST"); logger.msg(Arc::INFO, "Identity is %s", inmsg.Attributes()->get("TLS:PEERDN")); // Both input and output are supposed to be SOAP // Extracting payload Arc::PayloadSOAP* inpayload = NULL; try { inpayload = dynamic_cast(inmsg.Payload()); } catch(std::exception& e) { }; if(!inpayload) { logger.msg(Arc::ERROR, "input is not SOAP"); return make_soap_fault(outmsg); } // Applying known namespaces inpayload->Namespaces(ns); if(logger.getThreshold() <= Arc::VERBOSE) { std::string str; inpayload->GetDoc(str, true); logger.msg(Arc::VERBOSE, "process: request=%s",str); } // Analyzing request Arc::XMLNode op = inpayload->Child(0); if(!op) { logger.msg(Arc::ERROR, "input does not define operation"); return make_soap_fault(outmsg); } logger.msg(Arc::VERBOSE, "process: operation: %s",op.Name()); Arc::PayloadSOAP* outpayload = new Arc::PayloadSOAP(ns); outpayload->Namespaces(ns); Arc::MCC_Status result(Arc::STATUS_OK); // choose operation if (MatchXMLName(op,"CacheCheck")) { result = CacheCheck(*inpayload, *outpayload, mapped_user); } else if (MatchXMLName(op, "CacheLink")) { result = CacheLink(*inpayload, *outpayload, mapped_user); } else if (MatchXMLName(op, "CacheLinkQuery")) { result = CacheLinkQuery(*inpayload, *outpayload); } else { // unknown operation logger.msg(Arc::ERROR, "SOAP operation is not supported: %s", op.Name()); delete outpayload; return make_soap_fault(outmsg); } if (!result) return make_soap_fault(outmsg, result.getExplanation()); if (logger.getThreshold() <= Arc::VERBOSE) { std::string str; outpayload->GetDoc(str, true); logger.msg(Arc::VERBOSE, "process: response=%s", str); } outmsg.Payload(outpayload); if (!ProcessSecHandlers(outmsg,"outgoing")) { logger.msg(Arc::ERROR, "Security Handlers processing failed"); delete outmsg.Payload(NULL); return Arc::MCC_Status(); } } else { // only POST supported logger.msg(Arc::ERROR, "Only POST is supported in CandyPond"); return Arc::MCC_Status(); } return Arc::MCC_Status(Arc::STATUS_OK); } void CandyPond::add_result_element(Arc::XMLNode& results, const std::string& fileurl, CacheLinkReturnCode returncode, const std::string& reason) { Arc::XMLNode resultelement = results.NewChild("Result"); if (!fileurl.empty()) resultelement.NewChild("FileURL") = fileurl; resultelement.NewChild("ReturnCode") = Arc::tostring(returncode); resultelement.NewChild("ReturnCodeExplanation") = reason; } Arc::MCC_Status CandyPond::make_soap_fault(Arc::Message& outmsg, const std::string& reason) { Arc::PayloadSOAP* outpayload = new Arc::PayloadSOAP(ns,true); Arc::SOAPFault* fault = outpayload?outpayload->Fault():NULL; if(fault) { fault->Code(Arc::SOAPFault::Sender); if (reason.empty()) fault->Reason("Failed processing request"); else fault->Reason("Failed processing request: "+reason); } outmsg.Payload(outpayload); return Arc::MCC_Status(Arc::STATUS_OK); } } // namespace CandyPond extern Arc::PluginDescriptor const ARC_PLUGINS_TABLE_NAME[] = { { "candypond", "HED:SERVICE", NULL, 0, &CandyPond::get_service }, { NULL, NULL, NULL, 0, NULL } }; nordugrid-arc-6.14.0/src/services/candypond/PaxHeaders.30264/CandyPondGenerator.h0000644000000000000000000000013114152153376025633 xustar000000000000000029 mtime=1638455038.43264638 30 atime=1638455038.510647552 30 ctime=1638455100.681581699 nordugrid-arc-6.14.0/src/services/candypond/CandyPondGenerator.h0000644000175000002070000000571714152153376025633 0ustar00mockbuildmock00000000000000#ifndef CANDYPONDGENERATOR_H_ #define CANDYPONDGENERATOR_H_ #include #include #include "../a-rex/grid-manager/conf/StagingConfig.h" namespace CandyPond { /// DTR Generator for CandyPond. class CandyPondGenerator : public DataStaging::DTRCallback { private: /// Scheduler object to process DTRs. DataStaging::Scheduler* scheduler; /// Generator state DataStaging::ProcessState generator_state; /// Whether to use the host certificate when communicating with remote delivery bool use_host_cert; /// Scratch directory used by job std::string scratch_dir; /// Whether we are running with A-REX or we manage the Scheduler ourselves bool run_with_arex; /// A-REX configuration const ARex::GMConfig& config; /// Staging configuration ARex::StagingConfig staging_conf; /// Map of job id to DTRs std::multimap processing_dtrs; /// Lock for DTR map Arc::SimpleCondition processing_lock; /// Map of job id to error message, if any std::map finished_jobs; /// Lock for finished job map Arc::SimpleCondition finished_lock; /// Logger static Arc::Logger logger; public: /// Start Generator and get Scheduler instance. /** * If with_arex is true then it is assumed that A-REX takes care of * configuring, starting and stopping the DTR Scheduler. If CandyPond * is run outside of A-REX then it starts an independent DTR instance, * using parameters given in arc.conf. * @param config A-REX configuration * @param with_arex If true then we assume A-REX starts the scheduler, if * false then we start and stop it. */ CandyPondGenerator(const ARex::GMConfig& config, bool with_arex); /// Stop Scheduler if we are not running with A-REX ~CandyPondGenerator(); /// Callback method to receive completed DTRs void receiveDTR(DataStaging::DTR_ptr dtr); /// Add a new request. /** * @param user User for this transfer * @param source Source file * @param destination Destination file * @param usercfg UserConfig with proxy information * @param jobid Job identifier * @param priority DTR priority */ bool addNewRequest(const Arc::User& user, const std::string& source, const std::string& destination, const Arc::UserConfig& usercfg, const std::string& jobid, int priority); /// Query requests for given job id. /** * @param jobid Job ID to query * @param error If any DTR finished with an error, the description is put * in error. * @return True if all requests for the job have finished, false otherwise */ bool queryRequestsFinished(const std::string& jobid, std::string& error); }; } // namespace CandyPond #endif /* CANDYPONDGENERATOR_H_ */ nordugrid-arc-6.14.0/src/services/candypond/PaxHeaders.30264/CandyPondGenerator.cpp0000644000000000000000000000013114152153376026166 xustar000000000000000029 mtime=1638455038.43264638 30 atime=1638455038.510647552 30 ctime=1638455100.681581699 nordugrid-arc-6.14.0/src/services/candypond/CandyPondGenerator.cpp0000644000175000002070000002124314152153376026156 0ustar00mockbuildmock00000000000000#include #include #include "../a-rex/grid-manager/conf/UrlMapConfig.h" #include "CandyPondGenerator.h" namespace CandyPond { Arc::Logger CandyPondGenerator::logger(Arc::Logger::rootLogger, "CandyPondGenerator"); CandyPondGenerator::CandyPondGenerator(const ARex::GMConfig& conf, bool with_arex) : generator_state(DataStaging::INITIATED), use_host_cert(false), scratch_dir(conf.ScratchDir()), run_with_arex(with_arex), config(conf), staging_conf(config) { scheduler = DataStaging::Scheduler::getInstance(); if (run_with_arex) { // A-REX sets DTR configuration generator_state = DataStaging::RUNNING; return; } if (!staging_conf) return; // Convert A-REX configuration values to DTR configuration // TODO find location for DTR state log, should be different from A-REX's // Log level for DTR DataStaging::DTR::LOG_LEVEL = staging_conf.get_log_level(); // Processing limits scheduler->SetSlots(staging_conf.get_max_processor(), staging_conf.get_max_processor(), staging_conf.get_max_delivery(), staging_conf.get_max_emergency(), staging_conf.get_max_prepared()); // Transfer shares DataStaging::TransferSharesConf share_conf(staging_conf.get_share_type(), staging_conf.get_defined_shares()); scheduler->SetTransferSharesConf(share_conf); // Transfer limits DataStaging::TransferParameters transfer_limits; transfer_limits.min_current_bandwidth = staging_conf.get_min_speed(); transfer_limits.averaging_time = staging_conf.get_min_speed_time(); transfer_limits.min_average_bandwidth = staging_conf.get_min_average_speed(); transfer_limits.max_inactivity_time = staging_conf.get_max_inactivity_time(); scheduler->SetTransferParameters(transfer_limits); // URL mappings ARex::UrlMapConfig url_map(config); scheduler->SetURLMapping(url_map); // Preferred pattern scheduler->SetPreferredPattern(staging_conf.get_preferred_pattern()); // Delivery services scheduler->SetDeliveryServices(staging_conf.get_delivery_services()); // Limit on remote delivery size scheduler->SetRemoteSizeLimit(staging_conf.get_remote_size_limit()); // Set whether to use host cert for remote delivery use_host_cert = staging_conf.get_use_host_cert_for_remote_delivery(); // End of configuration - start Scheduler thread scheduler->start(); generator_state = DataStaging::RUNNING; } CandyPondGenerator::~CandyPondGenerator() { generator_state = DataStaging::STOPPED; if (!run_with_arex) scheduler->stop(); // delete scheduler? it is possible another thread is using the static instance } void CandyPondGenerator::receiveDTR(DataStaging::DTR_ptr dtr) { // Take DTR out of processing map and add to finished jobs logger.msg(Arc::INFO, "DTR %s finished with state %s", dtr->get_id(), dtr->get_status().str()); std::string jobid (dtr->get_parent_job_id()); // Add to finished jobs std::string error_msg; if (dtr->error()) error_msg = dtr->get_error_status().GetDesc() + ". "; finished_lock.lock(); finished_jobs[jobid] += error_msg; finished_lock.unlock(); // remove from processing jobs processing_lock.lock(); std::pair::iterator, std::multimap::iterator> dtr_iterator = processing_dtrs.equal_range(jobid); if (dtr_iterator.first == dtr_iterator.second) { processing_lock.unlock(); logger.msg(Arc::WARNING, "No active job id %s", jobid); return; } // remove this DTR from the processing list for (std::multimap::iterator i = dtr_iterator.first; i != dtr_iterator.second; ++i) { if (i->second->get_id() == dtr->get_id()) { processing_dtrs.erase(i); break; } } processing_lock.unlock(); // Move to scratch if necessary if (!dtr->error() && !scratch_dir.empty()) { // Get filename relative to session dir std::string session_file = dtr->get_destination()->GetURL().Path(); std::string::size_type pos = session_file.find(jobid); if (pos == std::string::npos) { logger.msg(Arc::ERROR, "Could not determine session directory from filename %s", session_file); finished_lock.lock(); finished_jobs[jobid] += "Could not determine session directory from filename for during move to scratch. "; finished_lock.unlock(); return; } std::string scratch_file(scratch_dir+'/'+session_file.substr(pos)); // Access session and scratch under mapped uid Arc::FileAccess fa; if (!fa.fa_setuid(dtr->get_local_user().get_uid(), dtr->get_local_user().get_gid()) || !fa.fa_rename(session_file, scratch_file)) { logger.msg(Arc::ERROR, "Failed to move %s to %s: %s", session_file, scratch_file, Arc::StrError(errno)); finished_lock.lock(); finished_jobs[jobid] += "Failed to move file from session dir to scratch. "; finished_lock.unlock(); } } } bool CandyPondGenerator::addNewRequest(const Arc::User& user, const std::string& source, const std::string& destination, const Arc::UserConfig& usercfg, const std::string& jobid, int priority) { if (generator_state != DataStaging::RUNNING) return false; std::list logs; // Logger destinations for this DTR. Uses a string stream to keep log in memory rather // than a file. LogStream keeps a reference to the stream so we have to use // a pointer. The LogDestinations are deleted when the DTR is received back. // TODO: provide access to this log somehow std::stringstream * stream = new std::stringstream(); Arc::LogDestination * output = new Arc::LogStream(*stream); logs.push_back(output); DataStaging::DTR_ptr dtr(new DataStaging::DTR(source, destination, usercfg, jobid, user.get_uid(), logs, "DataStaging")); if (!(*dtr)) { logger.msg(Arc::ERROR, "Invalid DTR for source %s, destination %s", source, destination); return false; } // set retry count (tmp errors only) dtr->set_tries_left(staging_conf.get_max_retries()); // set priority dtr->set_priority(priority); // set whether to use A-REX host certificate for remote delivery services dtr->host_cert_for_remote_delivery(use_host_cert); // use a separate share from A-REX downloads dtr->set_sub_share("candypond-download"); // substitute cache paths based on user ARex::CacheConfig cache_params(config.CacheParams()); cache_params.substitute(config, user); DataStaging::DTRCacheParameters cache_parameters; cache_parameters.cache_dirs = cache_params.getCacheDirs(); // we are definitely going to download so read-only caches are not useful here dtr->set_cache_parameters(cache_parameters); dtr->registerCallback(this, DataStaging::GENERATOR); dtr->registerCallback(scheduler, DataStaging::SCHEDULER); processing_lock.lock(); processing_dtrs.insert(std::pair(jobid, dtr)); processing_lock.unlock(); // Avoid logging when possible during scheduler submission because it gets // blocked by LFC calls locking the environment Arc::LogLevel log_level = Arc::Logger::getRootLogger().getThreshold(); Arc::Logger::getRootLogger().setThreshold(Arc::ERROR); DataStaging::DTR::push(dtr, DataStaging::SCHEDULER); Arc::Logger::getRootLogger().setThreshold(log_level); return true; } bool CandyPondGenerator::queryRequestsFinished(const std::string& jobid, std::string& error) { // First check currently processing DTRs processing_lock.lock(); if (processing_dtrs.find(jobid) != processing_dtrs.end()) { logger.msg(Arc::VERBOSE, "DTRs still running for job %s", jobid); processing_lock.unlock(); return false; } processing_lock.unlock(); // Now check finished jobs finished_lock.lock(); if (finished_jobs.find(jobid) != finished_jobs.end()) { logger.msg(Arc::VERBOSE, "All DTRs finished for job %s", jobid); error = finished_jobs[jobid]; finished_lock.unlock(); return true; } // Job not running or finished - report error logger.msg(Arc::WARNING, "Job %s not found", jobid); error = "Job not found"; return true; } } // namespace CandyPond nordugrid-arc-6.14.0/src/services/candypond/PaxHeaders.30264/README0000644000000000000000000000013114152153376022614 xustar000000000000000029 mtime=1638455038.43264638 30 atime=1638455038.510647552 30 ctime=1638455100.678581654 nordugrid-arc-6.14.0/src/services/candypond/README0000644000175000002070000000020614152153376022600 0ustar00mockbuildmock00000000000000The cache service is a service inside HED which exposes some operations on the A-REX cache to remote clients through a WS interface. nordugrid-arc-6.14.0/src/services/PaxHeaders.30264/wrappers0000644000000000000000000000013014152153474021541 xustar000000000000000029 mtime=1638455100.72958242 30 atime=1638455103.997631524 29 ctime=1638455100.72958242 nordugrid-arc-6.14.0/src/services/wrappers/0000755000175000002070000000000014152153474021605 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/wrappers/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376023655 xustar000000000000000030 mtime=1638455038.443646545 30 atime=1638455038.515647627 30 ctime=1638455100.726582375 nordugrid-arc-6.14.0/src/services/wrappers/Makefile.am0000644000175000002070000000017114152153376023641 0ustar00mockbuildmock00000000000000if PYTHON_SERVICE PYTHON_WRAPPER = python else PYTHON_WRAPPER = endif SUBDIRS = $(PYTHON_WRAPPER) DIST_SUBDIRS = python nordugrid-arc-6.14.0/src/services/wrappers/PaxHeaders.30264/Makefile.in0000644000000000000000000000013014152153435023660 xustar000000000000000029 mtime=1638455069.97312029 30 atime=1638455091.145438414 29 ctime=1638455100.72558236 nordugrid-arc-6.14.0/src/services/wrappers/Makefile.in0000644000175000002070000006123614152153435023657 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/wrappers DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ @PYTHON_SERVICE_FALSE@PYTHON_WRAPPER = @PYTHON_SERVICE_TRUE@PYTHON_WRAPPER = python SUBDIRS = $(PYTHON_WRAPPER) DIST_SUBDIRS = python all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/wrappers/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/wrappers/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile installdirs: installdirs-recursive installdirs-am: install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool cscopelist-am ctags \ ctags-am distclean distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ installdirs-am maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags tags-am uninstall uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/wrappers/PaxHeaders.30264/README0000644000000000000000000000013014152153376022477 xustar000000000000000029 mtime=1638455038.44464656 30 atime=1638455038.515647627 29 ctime=1638455100.72758239 nordugrid-arc-6.14.0/src/services/wrappers/README0000644000175000002070000000004014152153376022460 0ustar00mockbuildmock00000000000000collection of language bindings nordugrid-arc-6.14.0/src/services/wrappers/PaxHeaders.30264/python0000644000000000000000000000013214152153474023064 xustar000000000000000030 mtime=1638455100.753582781 30 atime=1638455103.997631524 30 ctime=1638455100.753582781 nordugrid-arc-6.14.0/src/services/wrappers/python/0000755000175000002070000000000014152153474023126 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/wrappers/python/PaxHeaders.30264/pythonwrapper.cpp0000644000000000000000000000013114152153376026567 xustar000000000000000029 mtime=1638455038.44464656 30 atime=1638455038.515647627 30 ctime=1638455100.750582736 nordugrid-arc-6.14.0/src/services/wrappers/python/pythonwrapper.cpp0000644000175000002070000004015014152153376026555 0ustar00mockbuildmock00000000000000// based on: // http://www.codeproject.com/cpp/embedpython_1.asp // http://coding.derkeiler.com/Archive/Python/comp.lang.python/2006-11/msg01211.html #ifdef HAVE_CONFIG_H #include #endif #include "pythonwrapper.h" #include #include #include #include #include #include #ifdef __cplusplus extern "C" { #endif /* SWIG Specific object SHOULD BE SYNC WITH generated SWIG CODE */ typedef void *(*swig_converter_func)(void *); typedef struct swig_type_info *(*swig_dycast_func)(void **); typedef struct swig_type_info { const char *name; /* mangled name of this type */ const char *str; /* human readable name of this type */ swig_dycast_func dcast; /* dynamic cast function down a hierarchy */ struct swig_cast_info *cast; /* linked list of types that can cast into this type */ void *clientdata; /* language specific type data */ int owndata; /* flag if the structure owns the clientdata */ } swig_type_info; /* Structure to store a type and conversion function used for casting */ typedef struct swig_cast_info { swig_type_info *type; /* pointer to type that is equivalent to this type */ swig_converter_func converter; /* function to cast the void pointers */ struct swig_cast_info *next; /* pointer to next cast in linked list */ struct swig_cast_info *prev; /* pointer to the previous cast */ } swig_cast_info; typedef struct { PyObject_HEAD void *ptr; swig_type_info *ty; int own; PyObject *next; } PySwigObject; #ifdef __cplusplus } #endif void *extract_swig_wrappered_pointer(PyObject *obj) { char this_str[] = "this"; if (!PyObject_HasAttrString(obj, this_str)) { return NULL; } PyObject *thisAttr = PyObject_GetAttrString(obj, this_str); if (thisAttr == NULL) { return NULL; } void* ptr = ((PySwigObject *)thisAttr)->ptr; Py_DECREF(thisAttr); return ptr; } // Thread state of main python interpreter thread static PyThreadState *tstate = NULL; static int python_service_counter = 0; static Glib::Mutex service_lock; Arc::Logger Arc::Service_PythonWrapper::logger(Service::logger, "PythonWrapper"); static Arc::Plugin* get_service(Arc::PluginArgument* arg) { Arc::ServicePluginArgument* srvarg = arg?dynamic_cast(arg):NULL; if(!srvarg) return NULL; Arc::ChainContext* ctx = (Arc::ChainContext*)(*srvarg); // ((Arc::PluginsFactory*)(*ctx))->load("pythonservice",false,true); // doesn't work, why? ::dlopen(((Arc::PluginsFactory*)(*ctx))->findLocation("pythonservice").c_str(),RTLD_NOW | RTLD_GLOBAL); service_lock.lock(); // Initialize the Python Interpreter if (!Py_IsInitialized()) { Py_InitializeEx(0); // python does not handle signals PyEval_InitThreads(); // Main thread created and lock acquired tstate = PyThreadState_Get(); // Get current thread if(tstate == NULL) { Arc::Logger::getRootLogger().msg(Arc::ERROR, "Failed to initialize main Python thread"); return NULL; } } else { if(tstate == NULL) { Arc::Logger::getRootLogger().msg(Arc::ERROR, "Main Python thread was not initialized"); return NULL; } PyEval_AcquireThread(tstate); } python_service_counter++; Arc::Logger::getRootLogger().msg(Arc::DEBUG, "Loading %u-th Python service", python_service_counter); service_lock.unlock(); Arc::Service* service = new Arc::Service_PythonWrapper((Arc::Config*)(*srvarg),arg); PyEval_ReleaseThread(tstate); // Release current thread Arc::Logger::getRootLogger().msg(Arc::DEBUG, "Initialized %u-th Python service", python_service_counter); return service; } extern Arc::PluginDescriptor const ARC_PLUGINS_TABLE_NAME[] = { { "pythonservice", "HED:SERVICE", NULL, 0, &get_service }, { NULL, NULL, NULL, 0, NULL } }; namespace Arc { Service_PythonWrapper::Service_PythonWrapper(Arc::Config *cfg,Arc::PluginArgument* parg):Service(cfg,parg), initialized(false) { PyObject *py_module_name = NULL; PyObject *py_arc_module_name = NULL; PyObject *dict = NULL; PyObject *arc_dict = NULL; PyObject *arc_cfg_klass = NULL; PyObject *arg = NULL; PyObject *py_cfg = NULL; PyObject *klass = NULL; arc_module = NULL; module = NULL; object = NULL; if (tstate == NULL) { logger.msg(Arc::ERROR, "Main Python thread is not initialized"); return; } //PyEval_AcquireThread(tstate); std::string path = (std::string)(*cfg)["ClassName"]; std::size_t p = path.rfind("."); if (p == std::string::npos) { logger.msg(Arc::ERROR, "Invalid class name"); return; } std::string module_name = path.substr(0, p); std::string class_name = path.substr(p+1, path.length()); logger.msg(Arc::VERBOSE, "class name: %s", class_name); logger.msg(Arc::VERBOSE, "module name: %s", module_name); // Convert module name to Python string #if PY_MAJOR_VERSION >= 3 py_module_name = PyUnicode_FromString(module_name.c_str()); #else py_module_name = PyString_FromString(module_name.c_str()); #endif if (py_module_name == NULL) { logger.msg(Arc::ERROR, "Cannot convert module name to Python string"); if (PyErr_Occurred()) PyErr_Print(); return; } // Load module module = PyImport_Import(py_module_name); if (module == NULL) { logger.msg(Arc::ERROR, "Cannot import module"); if (PyErr_Occurred()) PyErr_Print(); Py_DECREF(py_module_name); return; } Py_DECREF(py_module_name); // Import ARC python wrapper #if PY_MAJOR_VERSION >= 3 py_arc_module_name = PyUnicode_FromString("arc"); #else py_arc_module_name = PyString_FromString("arc"); #endif if (py_arc_module_name == NULL) { logger.msg(Arc::ERROR, "Cannot convert ARC module name to Python string"); if (PyErr_Occurred()) PyErr_Print(); return; } // Load arc module arc_module = PyImport_Import(py_arc_module_name); if (arc_module == NULL) { logger.msg(Arc::ERROR, "Cannot import ARC module"); if (PyErr_Occurred()) PyErr_Print(); Py_DECREF(py_arc_module_name); return; } Py_DECREF(py_arc_module_name); // arc_dict is a borrowed reference arc_dict = PyModule_GetDict(arc_module); if (arc_dict == NULL) { logger.msg(Arc::ERROR, "Cannot get dictionary of ARC module"); if (PyErr_Occurred()) PyErr_Print(); return; } // Get the arc config class // arc_cfg_klass is a borrowed reference arc_cfg_klass = PyDict_GetItemString(arc_dict, "Config"); if (arc_cfg_klass == NULL) { logger.msg(Arc::ERROR, "Cannot find ARC Config class"); if (PyErr_Occurred()) PyErr_Print(); return; } // check is it really a class if (!PyCallable_Check(arc_cfg_klass)) { logger.msg(Arc::ERROR, "Config class is not an object"); return; } // Get dictionary of module content // dict is a borrowed reference dict = PyModule_GetDict(module); if (dict == NULL) { logger.msg(Arc::ERROR, "Cannot get dictionary of module"); if (PyErr_Occurred()) PyErr_Print(); return; } // Get the class // klass is a borrowed reference klass = PyDict_GetItemString(dict, (char*)class_name.c_str()); if (klass == NULL) { logger.msg(Arc::ERROR, "Cannot find service class"); if (PyErr_Occurred()) PyErr_Print(); return; } // check is it really a class if (PyCallable_Check(klass)) { arg = Py_BuildValue("(l)", (long int)cfg); if (arg == NULL) { logger.msg(Arc::ERROR, "Cannot create config argument"); if (PyErr_Occurred()) PyErr_Print(); return; } py_cfg = PyObject_CallObject(arc_cfg_klass, arg); if (py_cfg == NULL) { logger.msg(Arc::ERROR, "Cannot convert config to Python object"); if (PyErr_Occurred()) PyErr_Print(); Py_DECREF(arg); return; } Py_DECREF(arg); arg = Py_BuildValue("(O)", py_cfg); if (arg == NULL) { logger.msg(Arc::ERROR, "Cannot create argument of the constructor"); if (PyErr_Occurred()) PyErr_Print(); return; } // create instance of class object = PyObject_CallObject(klass, arg); if (object == NULL) { logger.msg(Arc::ERROR, "Cannot create instance of Python class"); if (PyErr_Occurred()) PyErr_Print(); return; } Py_DECREF(arg); } else { logger.msg(Arc::ERROR, "%s is not an object", class_name); return; } // check is it really a class if (!PyCallable_Check(klass)) { logger.msg(Arc::ERROR, "Message class is not an object"); return; } //tstate = PyGILState_GetThisThreadState(); //PyEval_ReleaseThread(tstate); logger.msg(Arc::VERBOSE, "Python Wrapper constructor succeeded"); initialized = true; } Service_PythonWrapper::~Service_PythonWrapper(void) { service_lock.lock(); PyEval_AcquireThread(tstate); // Release python objects - it is needed for Python // destructors to be called if(arc_module) { Py_DECREF(arc_module); } if(module) { Py_DECREF(module); } if(object) { Py_DECREF(object); } // Finish the Python Interpreter python_service_counter--; logger.msg(Arc::VERBOSE, "Python Wrapper destructor (%d)", python_service_counter); if (python_service_counter == 0) { Py_Finalize(); } else { PyEval_ReleaseThread(tstate); } service_lock.unlock(); } Arc::MCC_Status Service_PythonWrapper::make_fault(Arc::Message& outmsg) { Arc::PayloadSOAP* outpayload = new Arc::PayloadSOAP(Arc::NS(),true); Arc::SOAPFault* fault = outpayload->Fault(); if(fault) { fault->Code(Arc::SOAPFault::Sender); fault->Reason("Failed processing request"); }; outmsg.Payload(outpayload); return Arc::MCC_Status(); } /* Arc::MCC_Status Service_PythonWrapper::python_error(const char *str) { return Arc::MCC_Status(Arc::GENERIC_ERROR); }*/ class PythonLock { private: PyGILState_STATE gstate_; Arc::Logger& logger_; public: PythonLock(Arc::Logger& logger):logger_(logger) { gstate_ = PyGILState_Ensure(); logger_.msg(Arc::VERBOSE, "Python interpreter locked"); }; ~PythonLock(void) { PyGILState_Release(gstate_); logger_.msg(Arc::VERBOSE, "Python interpreter released"); }; }; class XMLNodeP { private: Arc::XMLNode* obj_; public: XMLNodeP(Arc::XMLNode& node):obj_(NULL) { try { obj_ = new Arc::XMLNode(node); } catch(std::exception& e) { }; }; ~XMLNodeP(void) { if(obj_) delete obj_; }; XMLNode& operator*(void) const { return *obj_; }; XMLNode* operator->(void) const { return obj_; }; operator bool(void) { return (obj_ != NULL); }; bool operator!(void) { return (obj_ == NULL); }; operator long int(void) { return (long int)obj_; }; private: XMLNodeP(); XMLNodeP(XMLNodeP const&); XMLNodeP& operator=(XMLNodeP const&); }; class SOAPMessageP { private: Arc::SOAPMessage* obj_; public: SOAPMessageP(Arc::Message& msg):obj_(NULL) { try { obj_ = new Arc::SOAPMessage(msg); } catch(std::exception& e) { }; }; ~SOAPMessageP(void) { if(obj_) delete obj_; }; SOAPMessage& operator*(void) const { return *obj_; }; SOAPMessage* operator->(void) const { return obj_; }; operator bool(void) { return (obj_ != NULL); }; bool operator!(void) { return (obj_ == NULL); }; operator long int(void) { return (long int)obj_; }; private: SOAPMessageP(); SOAPMessageP(SOAPMessageP const&); SOAPMessageP& operator=(SOAPMessageP const&); }; class PyObjectP { private: PyObject* obj_; PyObjectP(); PyObjectP& operator=(PyObjectP const&); public: PyObjectP(PyObjectP const& p):obj_(p.obj_) { Py_INCREF(obj_); }; PyObjectP(PyObject* obj):obj_(obj) { }; ~PyObjectP(void) { if(obj_) { Py_DECREF(obj_); } }; operator bool(void) { return (obj_ != NULL); }; bool operator!(void) { return (obj_ == NULL); }; operator PyObject*(void) { return obj_; }; }; Arc::MCC_Status Service_PythonWrapper::process(Arc::Message& inmsg, Arc::Message& outmsg) { //PyObject *py_status = NULL; //PyObject *py_inmsg = NULL; //PyObject *py_outmsg = NULL; PyObject *arg = NULL; logger.msg(Arc::VERBOSE, "Python wrapper process called"); if(!initialized) return Arc::MCC_Status(); PythonLock plock(logger); // Convert in message to SOAP message Arc::SOAPMessageP inmsg_ptr(inmsg); if(!inmsg_ptr) { logger.msg(Arc::ERROR, "Failed to create input SOAP container"); return make_fault(outmsg); } if(!inmsg_ptr->Payload()) { logger.msg(Arc::ERROR, "input is not SOAP"); return make_fault(outmsg); } // Convert incoming message to python object arg = Py_BuildValue("(l)", (long int)inmsg_ptr); if (arg == NULL) { logger.msg(Arc::ERROR, "Cannot create inmsg argument"); if (PyErr_Occurred()) PyErr_Print(); return make_fault(outmsg); } // arc_dict is a borrowed reference PyObject *arc_dict = PyModule_GetDict(arc_module); if (arc_dict == NULL) { logger.msg(Arc::ERROR, "Cannot get dictionary of ARC module"); if (PyErr_Occurred()) PyErr_Print(); return make_fault(outmsg); } // arc_msg_klass is a borrowed reference PyObject *arc_msg_klass = PyDict_GetItemString(arc_dict, "SOAPMessage"); if (arc_msg_klass == NULL) { logger.msg(Arc::ERROR, "Cannot find ARC Message class"); if (PyErr_Occurred()) PyErr_Print(); return make_fault(outmsg); } PyObjectP py_inmsg(PyObject_CallObject(arc_msg_klass, arg)); if (!py_inmsg) { logger.msg(Arc::ERROR, "Cannot convert inmsg to Python object"); if (PyErr_Occurred()) PyErr_Print(); Py_DECREF(arg); return make_fault(outmsg); } Py_DECREF(arg); Arc::SOAPMessageP outmsg_ptr(outmsg); if(!outmsg_ptr) { logger.msg(Arc::ERROR, "Failed to create SOAP containers"); return make_fault(outmsg); } // Convert incoming and outcoming messages to python objects arg = Py_BuildValue("(l)", (long int)outmsg_ptr); if (arg == NULL) { logger.msg(Arc::ERROR, "Cannot create outmsg argument"); if (PyErr_Occurred()) PyErr_Print(); return make_fault(outmsg); } PyObjectP py_outmsg(PyObject_CallObject(arc_msg_klass, arg)); if (!py_outmsg) { logger.msg(Arc::ERROR, "Cannot convert outmsg to Python object"); if (PyErr_Occurred()) PyErr_Print(); Py_DECREF(arg); return make_fault(outmsg); } Py_DECREF(arg); // Call the process method PyObjectP py_status(PyObject_CallMethod(object, (char*)"process", (char*)"(OO)", (PyObject*)py_inmsg, (PyObject*)py_outmsg)); if (!py_status) { if (PyErr_Occurred()) PyErr_Print(); return make_fault(outmsg); } MCC_Status *status_ptr2 = (MCC_Status *)extract_swig_wrappered_pointer(py_status); Arc::MCC_Status status; if(status_ptr2) status=(*status_ptr2); { // std::string str = (std::string)status; // std::cout << "status: " << str << std::endl; }; SOAPMessage *outmsg_ptr2 = (SOAPMessage *)extract_swig_wrappered_pointer(py_outmsg); if(outmsg_ptr2 == NULL) return make_fault(outmsg); SOAPEnvelope *p = outmsg_ptr2->Payload(); if(p == NULL) return make_fault(outmsg); { // std::string xml; // if(p) p->GetXML(xml); // std::cout << "XML: " << xml << std::endl; }; Arc::PayloadSOAP *pl = new Arc::PayloadSOAP(*p); { // std::string xml; // pl->GetXML(xml); // std::cout << "XML: " << xml << std::endl; }; outmsg.Payload(pl); return status; } } // namespace Arc nordugrid-arc-6.14.0/src/services/wrappers/python/PaxHeaders.30264/Makefile.am0000644000000000000000000000013114152153376025175 xustar000000000000000029 mtime=1638455038.44464656 30 atime=1638455038.515647627 30 ctime=1638455100.749582721 nordugrid-arc-6.14.0/src/services/wrappers/python/Makefile.am0000644000175000002070000000117114152153376025163 0ustar00mockbuildmock00000000000000SUBDIRS = schema pkglib_LTLIBRARIES = libpythonservice.la libpythonservice_la_SOURCES = pythonwrapper.cpp pythonwrapper.h libpythonservice_la_CXXFLAGS = -I$(top_srcdir)/include \ $(PYTHON_CFLAGS) $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(AM_CXXFLAGS) libpythonservice_la_LDFLAGS = -no-undefined -avoid-version -module libpythonservice_la_LIBADD = \ $(top_builddir)/src/hed/libs/infosys/libarcinfosys.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(PYTHON_LIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(DLOPEN_LIBS) nordugrid-arc-6.14.0/src/services/wrappers/python/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153436025204 xustar000000000000000030 mtime=1638455070.041121311 30 atime=1638455091.157438594 30 ctime=1638455100.748582705 nordugrid-arc-6.14.0/src/services/wrappers/python/Makefile.in0000644000175000002070000010321714152153436025175 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/wrappers/python DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(top_srcdir)/depcomp README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(pkglibdir)" LTLIBRARIES = $(pkglib_LTLIBRARIES) am__DEPENDENCIES_1 = libpythonservice_la_DEPENDENCIES = \ $(top_builddir)/src/hed/libs/infosys/libarcinfosys.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) am_libpythonservice_la_OBJECTS = libpythonservice_la-pythonwrapper.lo libpythonservice_la_OBJECTS = $(am_libpythonservice_la_OBJECTS) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = libpythonservice_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(libpythonservice_la_CXXFLAGS) $(CXXFLAGS) \ $(libpythonservice_la_LDFLAGS) $(LDFLAGS) -o $@ AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(libpythonservice_la_SOURCES) DIST_SOURCES = $(libpythonservice_la_SOURCES) RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DIST_SUBDIRS = $(SUBDIRS) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ SUBDIRS = schema pkglib_LTLIBRARIES = libpythonservice.la libpythonservice_la_SOURCES = pythonwrapper.cpp pythonwrapper.h libpythonservice_la_CXXFLAGS = -I$(top_srcdir)/include \ $(PYTHON_CFLAGS) $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(AM_CXXFLAGS) libpythonservice_la_LDFLAGS = -no-undefined -avoid-version -module libpythonservice_la_LIBADD = \ $(top_builddir)/src/hed/libs/infosys/libarcinfosys.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(PYTHON_LIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(DLOPEN_LIBS) all: all-recursive .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/wrappers/python/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/wrappers/python/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): install-pkglibLTLIBRARIES: $(pkglib_LTLIBRARIES) @$(NORMAL_INSTALL) @list='$(pkglib_LTLIBRARIES)'; test -n "$(pkglibdir)" || list=; \ list2=; for p in $$list; do \ if test -f $$p; then \ list2="$$list2 $$p"; \ else :; fi; \ done; \ test -z "$$list2" || { \ echo " $(MKDIR_P) '$(DESTDIR)$(pkglibdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pkglibdir)" || exit 1; \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(pkglibdir)'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(pkglibdir)"; \ } uninstall-pkglibLTLIBRARIES: @$(NORMAL_UNINSTALL) @list='$(pkglib_LTLIBRARIES)'; test -n "$(pkglibdir)" || list=; \ for p in $$list; do \ $(am__strip_dir) \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(pkglibdir)/$$f'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(pkglibdir)/$$f"; \ done clean-pkglibLTLIBRARIES: -test -z "$(pkglib_LTLIBRARIES)" || rm -f $(pkglib_LTLIBRARIES) @list='$(pkglib_LTLIBRARIES)'; \ locs=`for p in $$list; do echo $$p; done | \ sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ sort -u`; \ test -z "$$locs" || { \ echo rm -f $${locs}; \ rm -f $${locs}; \ } libpythonservice.la: $(libpythonservice_la_OBJECTS) $(libpythonservice_la_DEPENDENCIES) $(EXTRA_libpythonservice_la_DEPENDENCIES) $(AM_V_CXXLD)$(libpythonservice_la_LINK) -rpath $(pkglibdir) $(libpythonservice_la_OBJECTS) $(libpythonservice_la_LIBADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libpythonservice_la-pythonwrapper.Plo@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< libpythonservice_la-pythonwrapper.lo: pythonwrapper.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libpythonservice_la_CXXFLAGS) $(CXXFLAGS) -MT libpythonservice_la-pythonwrapper.lo -MD -MP -MF $(DEPDIR)/libpythonservice_la-pythonwrapper.Tpo -c -o libpythonservice_la-pythonwrapper.lo `test -f 'pythonwrapper.cpp' || echo '$(srcdir)/'`pythonwrapper.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libpythonservice_la-pythonwrapper.Tpo $(DEPDIR)/libpythonservice_la-pythonwrapper.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='pythonwrapper.cpp' object='libpythonservice_la-pythonwrapper.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libpythonservice_la_CXXFLAGS) $(CXXFLAGS) -c -o libpythonservice_la-pythonwrapper.lo `test -f 'pythonwrapper.cpp' || echo '$(srcdir)/'`pythonwrapper.cpp mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile $(LTLIBRARIES) installdirs: installdirs-recursive installdirs-am: for dir in "$(DESTDIR)$(pkglibdir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool clean-pkglibLTLIBRARIES \ mostlyclean-am distclean: distclean-recursive -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-pkglibLTLIBRARIES install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-pkglibLTLIBRARIES .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool \ clean-pkglibLTLIBRARIES cscopelist-am ctags ctags-am distclean \ distclean-compile distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-pkglibLTLIBRARIES \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs installdirs-am maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags tags-am uninstall uninstall-am \ uninstall-pkglibLTLIBRARIES # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/wrappers/python/PaxHeaders.30264/schema0000644000000000000000000000013214152153474024324 xustar000000000000000030 mtime=1638455100.771583051 30 atime=1638455103.997631524 30 ctime=1638455100.771583051 nordugrid-arc-6.14.0/src/services/wrappers/python/schema/0000755000175000002070000000000014152153474024366 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/wrappers/python/schema/PaxHeaders.30264/Makefile.am0000644000000000000000000000013114152153376026435 xustar000000000000000029 mtime=1638455038.44464656 30 atime=1638455038.515647627 30 ctime=1638455100.770583036 nordugrid-arc-6.14.0/src/services/wrappers/python/schema/Makefile.am0000644000175000002070000000014614152153376026424 0ustar00mockbuildmock00000000000000arcschemadir = $(pkgdatadir)/schema arcschema_DATA = pythonwrapper.xsd EXTRA_DIST = $(arcschema_DATA) nordugrid-arc-6.14.0/src/services/wrappers/python/schema/PaxHeaders.30264/Makefile.in0000644000000000000000000000013114152153436026443 xustar000000000000000030 mtime=1638455070.089122033 29 atime=1638455091.17043879 30 ctime=1638455100.769583021 nordugrid-arc-6.14.0/src/services/wrappers/python/schema/Makefile.in0000644000175000002070000005126514152153436026442 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/wrappers/python/schema DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(arcschemadir)" DATA = $(arcschema_DATA) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ arcschemadir = $(pkgdatadir)/schema arcschema_DATA = pythonwrapper.xsd EXTRA_DIST = $(arcschema_DATA) all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/wrappers/python/schema/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/wrappers/python/schema/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-arcschemaDATA: $(arcschema_DATA) @$(NORMAL_INSTALL) @list='$(arcschema_DATA)'; test -n "$(arcschemadir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(arcschemadir)'"; \ $(MKDIR_P) "$(DESTDIR)$(arcschemadir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(arcschemadir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(arcschemadir)" || exit $$?; \ done uninstall-arcschemaDATA: @$(NORMAL_UNINSTALL) @list='$(arcschema_DATA)'; test -n "$(arcschemadir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(arcschemadir)'; $(am__uninstall_files_from_dir) tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(DATA) installdirs: for dir in "$(DESTDIR)$(arcschemadir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-arcschemaDATA install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-arcschemaDATA .MAKE: install-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ cscopelist-am ctags-am distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-arcschemaDATA install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags-am uninstall \ uninstall-am uninstall-arcschemaDATA # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/wrappers/python/schema/PaxHeaders.30264/pythonwrapper.xsd0000644000000000000000000000013114152153376030043 xustar000000000000000029 mtime=1638455038.44464656 30 atime=1638455038.515647627 30 ctime=1638455100.771583051 nordugrid-arc-6.14.0/src/services/wrappers/python/schema/pythonwrapper.xsd0000644000175000002070000000124314152153376030031 0ustar00mockbuildmock00000000000000 Defines the full module name of the class which containes the service implementation. The full name should follow the rules of python 'import' command. nordugrid-arc-6.14.0/src/services/wrappers/python/PaxHeaders.30264/pythonwrapper.h0000644000000000000000000000013014152153376026233 xustar000000000000000029 mtime=1638455038.44464656 30 atime=1638455038.515647627 29 ctime=1638455100.75158275 nordugrid-arc-6.14.0/src/services/wrappers/python/pythonwrapper.h0000644000175000002070000000140014152153376026215 0ustar00mockbuildmock00000000000000#ifndef __ARC_SERVICE_PYTHON_WRAPPER_H__ #define __ARC_SERVICE_PYTHON_WRAPPER_H__ #include #include #include namespace Arc { class Service_PythonWrapper: public Arc::Service { protected: Arc::MCC_Status make_fault(Arc::Message& outmsg); static Arc::Logger logger; PyObject *arc_module; PyObject *module; PyObject *object; bool initialized; public: Service_PythonWrapper(Arc::Config *cfg, Arc::PluginArgument* parg); virtual ~Service_PythonWrapper(void); /** Service request processing routine */ virtual Arc::MCC_Status process(Arc::Message&, Arc::Message&); }; } // namespace Arc #endif // __ARC_SERVICE_PYTHON_WRAPPER_H__ nordugrid-arc-6.14.0/src/services/wrappers/python/PaxHeaders.30264/README0000644000000000000000000000013114152153376024021 xustar000000000000000029 mtime=1638455038.44464656 30 atime=1638455038.515647627 30 ctime=1638455100.749582721 nordugrid-arc-6.14.0/src/services/wrappers/python/README0000644000175000002070000000005214152153376024004 0ustar00mockbuildmock00000000000000service which wraps python based services nordugrid-arc-6.14.0/src/services/PaxHeaders.30264/a-rex0000644000000000000000000000013214152153474020714 xustar000000000000000030 mtime=1638455100.422577807 30 atime=1638455103.997631524 30 ctime=1638455100.422577807 nordugrid-arc-6.14.0/src/services/a-rex/0000755000175000002070000000000014152153474020756 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/PayloadFile.h0000644000000000000000000000013214152153376023334 xustar000000000000000030 mtime=1638455038.405645974 30 atime=1638455038.500647402 30 ctime=1638455099.419562737 nordugrid-arc-6.14.0/src/services/a-rex/PayloadFile.h0000644000175000002070000000777514152153376023341 0ustar00mockbuildmock00000000000000#ifndef __ARC_PAYLOADFILE_H__ #define __ARC_PAYLOADFILE_H__ #include #include #include #include namespace ARex { /** Implementation of PayloadRawInterface which provides access to ordinary file. Currently only read-only mode is supported. */ class PayloadFile: public Arc::PayloadRawInterface { protected: /* TODO: use system-independent file access */ int handle_; char* addr_; off_t size_; off_t start_; off_t end_; void SetRead(int h,Size_t start,Size_t end); public: /** Creates object associated with file for reading from it. Use end=-1 for full size. */ PayloadFile(const char* filename,Size_t start,Size_t end); PayloadFile(int h,Size_t start,Size_t end); /** Creates object associated with file for writing into it. Use size=-1 for undefined size. */ //PayloadFile(const char* filename,Size_t size); virtual ~PayloadFile(void); virtual char operator[](Size_t pos) const; virtual char* Content(Size_t pos = -1); virtual Size_t Size(void) const; virtual char* Insert(Size_t pos = 0,Size_t size = 0); virtual char* Insert(const char* s,Size_t pos = 0,Size_t size = -1); virtual char* Buffer(unsigned int num); virtual Size_t BufferSize(unsigned int num) const; virtual Size_t BufferPos(unsigned int num) const; virtual bool Truncate(Size_t size); operator bool(void) { return (handle_ != -1); }; bool operator!(void) { return (handle_ == -1); }; }; class PayloadBigFile: public Arc::PayloadStream { private: static Size_t threshold_; off_t limit_; public: /** Creates object associated with file for reading from it */ PayloadBigFile(const char* filename,Size_t start,Size_t end); PayloadBigFile(int h,Size_t start,Size_t end); /** Creates object associated with file for writing into it. Use size=-1 for undefined size. */ //PayloadBigFile(const char* filename,Size_t size); virtual ~PayloadBigFile(void); virtual Size_t Pos(void) const; virtual Size_t Size(void) const; virtual Size_t Limit(void) const; virtual bool Get(char* buf,int& size); operator bool(void) { return (handle_ != -1); }; bool operator!(void) { return (handle_ == -1); }; static Size_t Threshold(void) { return threshold_; }; static void Threshold(Size_t t) { if(t > 0) threshold_=t; }; }; class PayloadFAFile: public Arc::PayloadStreamInterface { protected: Arc::FileAccess* handle_; off_t limit_; public: /** Creates object associated with file for reading from it */ PayloadFAFile(Arc::FileAccess* h,Size_t start,Size_t end); virtual ~PayloadFAFile(void); virtual Size_t Pos(void) const; virtual Size_t Size(void) const; virtual Size_t Limit(void) const; virtual bool Get(char* buf,int& size); virtual bool Get(std::string& buf) { char cbuf[1024]; int size = sizeof(cbuf); if(!Get(cbuf,size)) return false; buf.assign(cbuf,size); return true; }; virtual std::string Get(void) { std::string buf; Get(buf); return buf; }; virtual bool Put(const char* buf,Size_t size) { return false; }; virtual bool Put(const std::string& buf) { return Put(buf.c_str(),buf.length()); }; virtual bool Put(const char* buf) { return Put(buf,buf?strlen(buf):0); }; virtual int Timeout(void) const { return 0; }; virtual void Timeout(int to) { }; operator bool(void) { return (handle_ != NULL); }; bool operator!(void) { return (handle_ == NULL); }; }; // For ranges start is inclusive and end is exclusive Arc::MessagePayload* newFileRead(const char* filename,Arc::PayloadRawInterface::Size_t start = 0,Arc::PayloadRawInterface::Size_t end = (Arc::PayloadRawInterface::Size_t)(-1)); Arc::MessagePayload* newFileRead(int h,Arc::PayloadRawInterface::Size_t start = 0,Arc::PayloadRawInterface::Size_t end = (Arc::PayloadRawInterface::Size_t)(-1)); Arc::MessagePayload* newFileRead(Arc::FileAccess* h,Arc::PayloadRawInterface::Size_t start = 0,Arc::PayloadRawInterface::Size_t end = (Arc::PayloadRawInterface::Size_t)(-1)); } // namespace ARex #endif /* __ARC_PAYLOADFILE_H__ */ nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376023026 xustar000000000000000030 mtime=1638455038.405645974 30 atime=1638455038.500647402 30 ctime=1638455099.395562376 nordugrid-arc-6.14.0/src/services/a-rex/Makefile.am0000644000175000002070000000556014152153376023021 0ustar00mockbuildmock00000000000000if INTERNAL_ENABLED INTERNAL = internaljobplugin else INTERNAL = endif SUBDIRS = delegation grid-manager infoproviders lrms schema $(INTERNAL) rte rest DIST_SUBDIRS = delegation grid-manager infoproviders lrms schema internaljobplugin rte rest pkglib_LTLIBRARIES = libarex.la noinst_PROGRAMS = test_cache_check if PYTHON_LRMS_ENABLED PYTHON_LRMS_PKGLIBEXEC_FILES = arc-sshfs-mount else PYTHON_LRMS_PKGLIBEXEC_FILES = endif pkglibexec_SCRIPTS = $(PYTHON_LRMS_PKGLIBEXEC_FILES) if SYSV_SCRIPTS_ENABLED AREX_SCRIPT = arc-arex arc-arex-ws else AREX_SCRIPT = endif initd_SCRIPTS = $(AREX_SCRIPT) if SYSTEMD_UNITS_ENABLED AREX_UNIT = arc-arex.service arc-arex-ws.service else AREX_UNIT = endif units_DATA = $(AREX_UNIT) pkgdata_SCRIPTS = arc-arex-start arc-arex-ws-start perferator sbin_SCRIPTS = a-rex-backtrace-collect man_MANS = a-rex-backtrace-collect.8 EXTRA_DIST = arc-sshfs-mount arc.zero.conf GRIDMANAGER_LIBS = grid-manager/libgridmanager.la delegation/libdelegation.la rest/libarexrest.la libarex_la_SOURCES = arex.cpp job.cpp \ create_activity.cpp \ get_activity_statuses.cpp \ change_activity_status.cpp \ update_credentials.cpp faults.cpp \ get.cpp put.cpp PayloadFile.cpp FileChunks.cpp \ information_collector.cpp cachecheck.cpp tools.cpp \ arex.h job.h PayloadFile.h FileChunks.h tools.h libarex_la_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) # Needs real cleaning in respect to dependencies libarex_la_LIBADD = \ $(GRIDMANAGER_LIBS) \ $(top_builddir)/src/hed/libs/infosys/libarcinfosys.la \ $(top_builddir)/src/hed/libs/ws-addressing/libarcwsaddressing.la \ $(top_builddir)/src/hed/libs/communication/libarccommunication.la \ $(top_builddir)/src/hed/libs/security/libarcsecurity.la \ $(top_builddir)/src/hed/libs/delegation/libarcdelegation.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/otokens/libarcotokens.la $(top_builddir)/src/hed/libs/common/libarccommon.la libarex_la_LDFLAGS = -no-undefined -avoid-version -module $(DBCXX_LIBS) test_cache_check_SOURCES = test_cache_check.cpp test_cache_check_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) test_cache_check_LDADD = \ $(top_builddir)/src/hed/libs/communication/libarccommunication.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(GLIBMM_LIBS) $(LIBXML2_LIBS) install-data-local: $(MKDIR_P) "$(DESTDIR)$(sysconfdir)" if test ! -e $(DESTDIR)$(sysconfdir)/arc.conf; then $(INSTALL_DATA) $(srcdir)/arc.zero.conf $(DESTDIR)$(sysconfdir)/arc.conf; fi uninstall-local: rm -f $(DESTDIR)$(sysconfdir)/arc.conf nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/tools.cpp0000644000000000000000000000013214152153376022636 xustar000000000000000030 mtime=1638455038.429646335 30 atime=1638455038.509647537 30 ctime=1638455099.416562691 nordugrid-arc-6.14.0/src/services/a-rex/tools.cpp0000644000175000002070000002763414152153376022637 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include "tools.h" namespace ARex { void convertActivityStatus(const std::string& gm_state,std::string& bes_state,std::string& arex_state,bool failed,bool pending) { if(gm_state == "ACCEPTED") { bes_state="Pending"; arex_state="Accepted"; } else if(gm_state == "PREPARING") { bes_state="Running"; arex_state=(!pending)?"Preparing":"Prepared"; } else if(gm_state == "SUBMIT") { bes_state="Running"; arex_state="Submitting"; } else if(gm_state == "INLRMS") { bes_state="Running"; arex_state=(!pending)?"Executing":"Executed"; } else if(gm_state == "FINISHING") { bes_state="Running"; arex_state="Finishing"; } else if(gm_state == "FINISHED") { if(!failed) { bes_state="Finished"; arex_state="Finished"; } else { bes_state="Failed"; arex_state="Failed"; }; } else if(gm_state == "DELETED") { // AFAIR failed is not avialable anymore. bes_state=(!failed)?"Finished":"Failed"; arex_state="Deleted"; } else if(gm_state == "CANCELING") { bes_state="Running"; arex_state="Killing"; }; } Arc::XMLNode addActivityStatus(Arc::XMLNode pnode,const std::string& gm_state,Arc::XMLNode glue_xml,bool failed,bool pending) { std::string bes_state(""); std::string arex_state(""); std::string glue_state(""); convertActivityStatus(gm_state,bes_state,arex_state,failed,pending); Arc::XMLNode state = pnode.NewChild("bes-factory:ActivityStatus"); state.NewAttribute("state")=bes_state; state.NewChild("a-rex:State")=arex_state; if(pending) state.NewChild("a-rex:State")="Pending"; if((bool)glue_xml) { Arc::XMLNode state_node = glue_xml["State"]; for(;(bool)state_node;++state_node) { std::string state = (std::string)state_node; if(state.empty()) continue; // Look for nordugrid prefix if(::strncmp("nordugrid:",state.c_str(),10) == 0) { // Remove prefix state.erase(0,10); glue_state = state; }; }; }; if(!glue_state.empty()) { std::string::size_type p = glue_state.find(':'); if(p != std::string::npos) { if(glue_state.substr(0,p) == "INLRMS") { // Extrach state of batch system state.NewChild("a-rex:LRMSState")=glue_state.substr(p+1); }; }; state.NewChild("glue:State")=glue_state; }; return state; } // primary: // accepted|preprocessing| // processing|processing-accepting|processing-queued|processing-running| // postprocessing|terminal // attribute: // validating| // server-paused| // client-paused| // client-stagein-possible| // client-stageout-possible| // provisioning| // deprovisioning| // server-stagein| // server-stageout| // batch-suspend| // app-running| // preprocessing-cancel| // processing-cancel| // postprocessing-cancel| // validation-failure| // preprocessing-failure| // processing-failure| // postprocessing-failure| // app-failure| // expired void convertActivityStatusES(const std::string& gm_state,std::string& primary_state,std::list& state_attributes,bool failed,bool pending,const std::string& failedstate,const std::string& failedcause) { bool failed_set = false; bool canceled = (failedcause == "client"); primary_state = ""; if(gm_state == "ACCEPTED") { primary_state="accepted"; state_attributes.push_back("client-stagein-possible"); } else if(gm_state == "PREPARING") { primary_state="preprocessing"; state_attributes.push_back("client-stagein-possible"); state_attributes.push_back("server-stagein"); } else if(gm_state == "SUBMIT") { primary_state="processing-accepting"; } else if(gm_state == "INLRMS") { // Reporting job state as not started executing yet. // Because we have no more detailed information this // is probably safest solution. primary_state="processing-queued"; } else if(gm_state == "FINISHING") { primary_state="postprocessing"; state_attributes.push_back("client-stageout-possible"); state_attributes.push_back("server-stageout"); } else if(gm_state == "FINISHED") { primary_state="terminal"; state_attributes.push_back("client-stageout-possible"); } else if(gm_state == "DELETED") { primary_state="terminal"; state_attributes.push_back("expired"); } else if(gm_state == "CANCELING") { primary_state="processing"; }; if(failedstate == "ACCEPTED") { state_attributes.push_back("validation-failure"); failed_set = true; } else if(failedstate == "PREPARING") { state_attributes.push_back(canceled?"preprocessing-cancel":"preprocessing-failure"); failed_set = true; } else if(failedstate == "SUBMIT") { state_attributes.push_back(canceled?"processing-cancel":"processing-failure"); failed_set = true; } else if(failedstate == "INLRMS") { state_attributes.push_back(canceled?"processing-cancel":"processing-failure"); // Or maybe APP-FAILURE failed_set = true; } else if(failedstate == "FINISHING") { state_attributes.push_back(canceled?"postprocessing-cancel":"postprocessing-failure"); failed_set = true; } else if(failedstate == "FINISHED") { } else if(failedstate == "DELETED") { } else if(failedstate == "CANCELING") { }; if(primary_state == "terminal") { if(failed && !failed_set) { // Must put something to mark job failed state_attributes.push_back("app-failure"); }; }; if(!primary_state.empty()) { if(pending) state_attributes.push_back("server-paused"); }; } // ActivityStatus // Status // [accepted|preprocessing| // processing|processing-accepting|processing-queued|processing-running| // postprocessing|terminal] // Attribute 0- // [validating| // server-paused| // client-paused| // client-stagein-possible| // client-stageout-possible| // provisioning| // deprovisioning| // server-stagein| // server-stageout| // batch-suspend| // app-running| // preprocessing-cancel| // processing-cancel| // postprocessing-cancel| // validation-failure| // preprocessing-failure| // processing-failure| // postprocessing-failure| // app-failure| // expired] // Timestamp (dateTime) // Description 0-1 Arc::XMLNode addActivityStatusES(Arc::XMLNode pnode,const std::string& gm_state,Arc::XMLNode glue_xml,bool failed,bool pending,const std::string& failedstate,const std::string& failedcause) { std::string primary_state; std::list state_attributes; std::string glue_state(""); convertActivityStatusES(gm_state,primary_state,state_attributes,failed,pending,failedstate,failedcause); Arc::XMLNode state = pnode.NewChild("estypes:ActivityStatus"); state.NewChild("estypes:Status") = primary_state; for(std::list::iterator st = state_attributes.begin(); st!=state_attributes.end();++st) { state.NewChild("estypes:Attribute") = *st; }; return state; } Arc::XMLNode addActivityStatusES(Arc::XMLNode pnode,Arc::XMLNode glue_xml) { std::string primary_state; std::list state_attributes; std::string glue_state(""); for(Arc::XMLNode snode = glue_xml["State"]; (bool)snode ; ++snode) { std::string state_str = snode; if(state_str.compare(0, 6, "emies:") == 0) { primary_state = state_str.substr(6); } else if(state_str.compare(0, 10, "emiesattr:") == 0) { state_attributes.push_back(state_str.substr(10)); } } Arc::XMLNode state = pnode.NewChild("estypes:ActivityStatus"); state.NewChild("estypes:Status") = primary_state; for(std::list::iterator st = state_attributes.begin(); st!=state_attributes.end();++st) { state.NewChild("estypes:Attribute") = *st; }; return state; } /* JobIDGeneratorARC::JobIDGeneratorARC(const std::string& endpoint):endpoint_(endpoint) { } void JobIDGeneratorARC::SetLocalID(const std::string& id) { id_ = id; } Arc::XMLNode JobIDGeneratorARC::GetGlobalID(Arc::XMLNode& pnode) { Arc::XMLNode node; if(!pnode) { Arc::NS ns; ns["bes-factory"]="http://schemas.ggf.org/bes/2006/08/bes-factory"; ns["a-rex"]="http://www.nordugrid.org/schemas/a-rex"; Arc::XMLNode(ns,"bes-factory:ActivityIdentifier").Exchange(pnode); node = pnode; } else { node = pnode.NewChild("bes-factory:ActivityIdentifier"); }; Arc::WSAEndpointReference identifier(node); // Make job's ID identifier.Address(endpoint_); // address of service identifier.ReferenceParameters().NewChild("a-rex:JobID")=id_; identifier.ReferenceParameters().NewChild("a-rex:JobSessionDir")=endpoint_+"/"+id_; return node; } std::string JobIDGeneratorARC::GetGlobalID(void) { Arc::XMLNode node; GetGlobalID(node); std::string jobid; node.GetDoc(jobid); std::string::size_type p = 0; // squeeze into 1 line while((p=jobid.find_first_of("\r\n",p)) != std::string::npos) jobid.replace(p,1," "); return jobid; } std::string JobIDGeneratorARC::GetManagerURL(void) { return endpoint_; } std::string JobIDGeneratorARC::GetJobURL(void) { return endpoint_ + "/" + id_; } std::string JobIDGeneratorARC::GetInterface(void) { return "org.nordugrid.xbes"; } std::string JobIDGeneratorARC::GetHostname(void) { return Arc::URL(endpoint_).Host(); } */ JobIDGeneratorES::JobIDGeneratorES(const std::string& endpoint):endpoint_(endpoint) { } void JobIDGeneratorES::SetLocalID(const std::string& id) { id_ = id; } Arc::XMLNode JobIDGeneratorES::GetGlobalID(Arc::XMLNode& pnode) { Arc::XMLNode node; if(!pnode) { Arc::NS ns; ns["estypes"]="http://www.eu-emi.eu/es/2010/12/types"; Arc::XMLNode(ns,"estypes:ActivityID").Exchange(pnode); node = pnode; } else { node = pnode.NewChild("estypes:ActivityID"); }; node = id_; return node; } std::string JobIDGeneratorES::GetGlobalID(void) { return id_; } std::string JobIDGeneratorES::GetManagerURL(void) { return endpoint_; } std::string JobIDGeneratorES::GetJobURL(void) { return endpoint_ + "/" + id_; } std::string JobIDGeneratorES::GetInterface(void) { return "org.ogf.glue.emies.activitycreation"; } std::string JobIDGeneratorES::GetHostname(void) { return Arc::URL(endpoint_).Host(); } std::string JobIDGeneratorREST::GetInterface(void) { return "org.nordugrid.arcrest"; } JobIDGeneratorINTERNAL::JobIDGeneratorINTERNAL(const std::string& endpoint):endpoint_(endpoint) { } void JobIDGeneratorINTERNAL::SetLocalID(const std::string& id) { id_ = id; } Arc::XMLNode JobIDGeneratorINTERNAL::GetGlobalID(Arc::XMLNode& pnode) { //To-do make something more sensible for INTERNAL plugin case Arc::XMLNode node; if(!pnode) { Arc::NS ns; ns["estypes"]="http://www.eu-emi.eu/es/2010/12/types"; Arc::XMLNode(ns,"estypes:ActivityID").Exchange(pnode); node = pnode; } else { node = pnode.NewChild("estypes:ActivityID"); }; node = id_; return node; } std::string JobIDGeneratorINTERNAL::GetGlobalID(void) { return id_; } std::string JobIDGeneratorINTERNAL::GetManagerURL(void) { return ""; // conroldir? } std::string JobIDGeneratorINTERNAL::GetJobURL(void) { return ""; // job state file? } std::string JobIDGeneratorINTERNAL::GetInterface(void) { return "org.nordugrid.internal"; } std::string JobIDGeneratorINTERNAL::GetHostname(void) { return Arc::URL(endpoint_).Host(); } } nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/arex.h0000644000000000000000000000013214152153376022102 xustar000000000000000030 mtime=1638455038.406645989 30 atime=1638455038.500647402 30 ctime=1638455099.417562706 nordugrid-arc-6.14.0/src/services/a-rex/arex.h0000644000175000002070000002272614152153376022100 0ustar00mockbuildmock00000000000000#ifndef __ARC_AREX_H__ #define __ARC_AREX_H__ #include #include #include #include #include #include #include "FileChunks.h" #include "grid-manager/GridManager.h" #include "delegation/DelegationStores.h" #include "grid-manager/conf/GMConfig.h" #include "rest/rest.h" #include "job.h" namespace ARex { class ARexGMConfig; class CountedResourceLock; class CountedResource { friend class CountedResourceLock; public: CountedResource(int maxconsumers = -1); ~CountedResource(void); void MaxConsumers(int maxconsumers); private: Glib::Cond cond_; Glib::Mutex lock_; int limit_; int count_; void Acquire(void); void Release(void); }; class OptimizedInformationContainer: public Arc::InformationContainer { private: bool parse_xml_; std::string filename_; int handle_; Arc::XMLNode doc_; Glib::Mutex olock_; public: OptimizedInformationContainer(bool parse_xml = true); ~OptimizedInformationContainer(void); int OpenDocument(void); void Assign(const std::string& xml,const std::string filename = ""); }; #define AREXOP(NAME) Arc::MCC_Status NAME(ARexGMConfig& config,Arc::XMLNode in,Arc::XMLNode out) class ARexService: public Arc::Service { private: static void gm_threads_starter(void* arg); void gm_threads_starter(); Arc::MCC_Status cache_get(Arc::Message& outmsg, const std::string& subpath, off_t range_start, off_t range_end, ARexGMConfig& config, bool no_content); protected: Arc::ThreadRegistry thread_count_; static Arc::NS ns_; Arc::Logger logger_; DelegationStores delegation_stores_; OptimizedInformationContainer infodoc_; CountedResource infolimit_; CountedResource beslimit_; CountedResource datalimit_; std::string endpoint_; bool publishstaticinfo_; std::string uname_; std::string common_name_; std::string long_description_; std::string os_name_; std::string gmrun_; unsigned int infoprovider_wakeup_period_; unsigned int all_jobs_count_; //Glib::Mutex glue_states_lock_; //std::map glue_states_; FileChunksList files_chunks_; GMConfig config_; GridManager* gm_; ARexRest rest_; // A-REX operations AREXOP(CacheCheck); /** Update credentials for specified job through A-REX own interface */ Arc::MCC_Status UpdateCredentials(ARexGMConfig& config,Arc::XMLNode in,Arc::XMLNode out,const std::string& credentials); // EMI ES operations Arc::MCC_Status ESCreateActivities(ARexGMConfig& config,Arc::XMLNode in,Arc::XMLNode out,const std::string& clientid); AREXOP(ESGetResourceInfo); AREXOP(ESQueryResourceInfo); AREXOP(ESPauseActivity); AREXOP(ESResumeActivity); AREXOP(ESNotifyService); AREXOP(ESCancelActivity); AREXOP(ESWipeActivity); AREXOP(ESRestartActivity); AREXOP(ESListActivities); AREXOP(ESGetActivityStatus); AREXOP(ESGetActivityInfo); // HTTP operations Arc::MCC_Status GetJob(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& id,std::string const& subpath); Arc::MCC_Status GetLogs(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& id,std::string const& subpath); Arc::MCC_Status GetInfo(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath); Arc::MCC_Status GetInfo(Arc::Message& inmsg,Arc::Message& outmsg); Arc::MCC_Status GetNew(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath); Arc::MCC_Status GetDelegation(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& id,std::string const& subpath); Arc::MCC_Status GetCache(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath); Arc::MCC_Status HeadJob(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& id,std::string const& subpath); Arc::MCC_Status HeadLogs(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& id,std::string const& subpath); Arc::MCC_Status HeadInfo(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath); Arc::MCC_Status HeadInfo(Arc::Message& inmsg,Arc::Message& outmsg); Arc::MCC_Status HeadNew(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath); Arc::MCC_Status HeadDelegation(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& id,std::string const& subpath); Arc::MCC_Status HeadCache(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath); Arc::MCC_Status PutJob(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& id,std::string const& subpath); Arc::MCC_Status PutLogs(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& id,std::string const& subpath); Arc::MCC_Status PutInfo(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath); Arc::MCC_Status PutNew(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath); Arc::MCC_Status PutDelegation(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& id,std::string const& subpath); Arc::MCC_Status PutCache(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath); Arc::MCC_Status DeleteJob(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& id,std::string const& subpath); Arc::MCC_Status DeleteLogs(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& id,std::string const& subpath); Arc::MCC_Status DeleteInfo(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath); Arc::MCC_Status DeleteNew(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath); Arc::MCC_Status DeleteDelegation(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& id,std::string const& subpath); Arc::MCC_Status DeleteCache(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath); // A-REX faults void UnknownActivityIdentifierFault(Arc::XMLNode fault,const std::string& message); void UnknownActivityIdentifierFault(Arc::SOAPFault& fault,const std::string& message); void InvalidRequestMessageFault(Arc::XMLNode fault,const std::string& element,const std::string& message); void InvalidRequestMessageFault(Arc::SOAPFault& fault,const std::string& element,const std::string& message); // EMI ES faults #define ES_MSG_FAULT_HEAD(NAME) \ void NAME(Arc::XMLNode fault,const std::string& message,const std::string& desc = ""); \ void NAME(Arc::SOAPFault& fault,const std::string& message,const std::string& desc = ""); #define ES_SIMPLE_FAULT_HEAD(NAME) \ void NAME(Arc::XMLNode fault,const std::string& message = "",const std::string& desc = ""); \ void NAME(Arc::SOAPFault& fault,const std::string& message = "",const std::string& desc = ""); ES_MSG_FAULT_HEAD(ESInternalBaseFault) void ESVectorLimitExceededFault(Arc::XMLNode fault,unsigned long limit,const std::string& message = "",const std::string& desc = ""); void ESVectorLimitExceededFault(Arc::SOAPFault& fault,unsigned long limit,const std::string& message = "",const std::string& desc = ""); ES_SIMPLE_FAULT_HEAD(ESAccessControlFault); ES_SIMPLE_FAULT_HEAD(ESUnsupportedCapabilityFault) ES_SIMPLE_FAULT_HEAD(ESInvalidActivityDescriptionSemanticFault) ES_SIMPLE_FAULT_HEAD(ESInvalidActivityDescriptionFault) ES_SIMPLE_FAULT_HEAD(ESNotSupportedQueryDialectFault) ES_SIMPLE_FAULT_HEAD(ESNotValidQueryStatementFault) ES_SIMPLE_FAULT_HEAD(ESUnknownQueryFault) ES_SIMPLE_FAULT_HEAD(ESInternalResourceInfoFault) ES_SIMPLE_FAULT_HEAD(ESResourceInfoNotFoundFault) ES_SIMPLE_FAULT_HEAD(ESUnableToRetrieveStatusFault) ES_SIMPLE_FAULT_HEAD(ESUnknownAttributeFault) ES_SIMPLE_FAULT_HEAD(ESOperationNotAllowedFault) ES_SIMPLE_FAULT_HEAD(ESActivityNotFoundFault) ES_SIMPLE_FAULT_HEAD(ESInternalNotificationFault) ES_SIMPLE_FAULT_HEAD(ESOperationNotPossibleFault) ES_SIMPLE_FAULT_HEAD(ESInvalidActivityStateFault) ES_SIMPLE_FAULT_HEAD(ESInvalidActivityLimitFault) ES_SIMPLE_FAULT_HEAD(ESInvalidParameterFault) Arc::MCC_Status preProcessSecurity(Arc::Message& inmsg,Arc::Message& outmsg,Arc::SecAttr* sattr,bool is_soap,ARexConfigContext*& config,bool& passed); Arc::MCC_Status postProcessSecurity(Arc::Message& outmsg,bool& passed); public: ARexService(Arc::Config *cfg,Arc::PluginArgument *parg); virtual ~ARexService(void); virtual Arc::MCC_Status process(Arc::Message& inmsg,Arc::Message& outmsg); // HTTP paths static char const* InfoPath; static char const* LogsPath; static char const* NewPath; static char const* DelegationPath; static char const* CachePath; static char const* RestPath; // Convenience methods static Arc::MCC_Status make_empty_response(Arc::Message& outmsg); static Arc::MCC_Status make_fault(Arc::Message& outmsg); static Arc::MCC_Status make_http_fault(Arc::Message& outmsg,int code,const char* resp); static Arc::MCC_Status make_soap_fault(Arc::Message& outmsg,const char* resp = NULL); static Arc::MCC_Status extract_content(Arc::Message& inmsg, std::string& content,uint32_t size_limit = 0); int OpenInfoDocument(void); void InformationCollector(void); virtual std::string getID(); void StopChildThreads(void); }; } // namespace ARex #define HTTP_ERR_NOT_SUPPORTED (501) #define HTTP_ERR_FORBIDDEN (403) #endif nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/tools.h0000644000000000000000000000013214152153376022303 xustar000000000000000030 mtime=1638455038.429646335 30 atime=1638455038.509647537 30 ctime=1638455099.420562751 nordugrid-arc-6.14.0/src/services/a-rex/tools.h0000644000175000002070000000704514152153376022276 0ustar00mockbuildmock00000000000000#ifndef __ARC_AREX_TOOLS_H__ #define __ARC_AREX_TOOLS_H__ #include #include namespace ARex { void convertActivityStatus(const std::string& gm_state,std::string& bes_state,std::string& arex_state,bool failed = false,bool pending = false); Arc::XMLNode addActivityStatus(Arc::XMLNode pnode,const std::string& gm_state,Arc::XMLNode glue_xml = Arc::XMLNode(),bool failed = false,bool pending = false); void convertActivityStatusES(const std::string& gm_state,std::string& primary_state,std::list& state_attributes,bool failed,bool pending,const std::string& failedstate,const std::string& failedcause); Arc::XMLNode addActivityStatusES(Arc::XMLNode pnode,const std::string& gm_state,Arc::XMLNode glue_xml = Arc::XMLNode(),bool failed = false,bool pending = false,const std::string& failedstate = "",const std::string& failedcause = ""); class JobIDGenerator { public: JobIDGenerator() { }; virtual ~JobIDGenerator() { }; virtual void SetLocalID(const std::string& id) = 0; virtual Arc::XMLNode GetGlobalID(Arc::XMLNode& pnode) = 0; virtual std::string GetGlobalID(void) = 0; virtual std::string GetJobURL(void) = 0; virtual std::string GetManagerURL(void) = 0; virtual std::string GetHostname(void) = 0; virtual std::string GetInterface(void) = 0; }; /* class JobIDGeneratorARC:public JobIDGenerator { public: JobIDGeneratorARC(const std::string& endpoint); virtual ~JobIDGeneratorARC() { }; virtual void SetLocalID(const std::string& id); virtual Arc::XMLNode GetGlobalID(Arc::XMLNode& pnode); virtual std::string GetGlobalID(void); virtual std::string GetJobURL(void); virtual std::string GetManagerURL(void); virtual std::string GetHostname(void); virtual std::string GetInterface(void); private: std::string endpoint_; std::string id_; }; */ class JobIDGeneratorES:public JobIDGenerator { public: JobIDGeneratorES(const std::string& endpoint); virtual ~JobIDGeneratorES() { }; virtual void SetLocalID(const std::string& id); virtual Arc::XMLNode GetGlobalID(Arc::XMLNode& pnode); virtual std::string GetGlobalID(void); virtual std::string GetJobURL(void); virtual std::string GetManagerURL(void); virtual std::string GetHostname(void); virtual std::string GetInterface(void); private: std::string endpoint_; std::string id_; }; class JobIDGeneratorINTERNAL:public JobIDGenerator { public: JobIDGeneratorINTERNAL(const std::string& endpoint); virtual ~JobIDGeneratorINTERNAL() { }; virtual void SetLocalID(const std::string& id); virtual Arc::XMLNode GetGlobalID(Arc::XMLNode& pnode); virtual std::string GetGlobalID(void); virtual std::string GetJobURL(void); virtual std::string GetManagerURL(void); virtual std::string GetHostname(void); virtual std::string GetInterface(void); private: std::string endpoint_; std::string id_; }; class JobIDGeneratorREST:public JobIDGeneratorES { public: JobIDGeneratorREST(const std::string& endpoint):JobIDGeneratorES(endpoint) {}; virtual std::string GetInterface(void); }; Arc::XMLNode addJobID(Arc::XMLNode& pnode,const std::string& endpoint,const std::string& id); std::string makeJobID(const std::string& endpoint,const std::string& id); Arc::XMLNode addJobIDES(Arc::XMLNode& pnode,const std::string& endpoint,const std::string& id); std::string makeJobIDES(const std::string& endpoint,const std::string& id); Arc::XMLNode addJobIDINTERNAL(Arc::XMLNode& pnode,const std::string& endpoint,const std::string& id); std::string makeJobIDINTERNAL(const std::string& endpoint,const std::string& id); } #endif // __ARC_AREX_TOOLS_H__ nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/get.cpp0000644000000000000000000000013214152153376022255 xustar000000000000000030 mtime=1638455038.407646004 30 atime=1638455038.501647417 30 ctime=1638455099.411562616 nordugrid-arc-6.14.0/src/services/a-rex/get.cpp0000644000175000002070000005324614152153376022254 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include "PayloadFile.h" #include "job.h" #include "arex.h" #define MAX_CHUNK_SIZE (10*1024*1024) namespace ARex { static Arc::PayloadRaw* newFileInfo(int handle = -1) { Arc::PayloadRaw* buf = new Arc::PayloadRaw; if(handle != -1) { struct stat st; if(buf && (::fstat(handle,&st) == 0)) buf->Truncate(st.st_size); ::close(handle); } else { if(buf) buf->Truncate(0); } return buf; } static Arc::PayloadRaw* newFileInfo(Arc::FileAccess& file) { struct stat st; Arc::PayloadRaw* buf = new Arc::PayloadRaw; if(buf && (file.fa_fstat(st))) buf->Truncate(st.st_size); return buf; } static void ExtractRange(Arc::Message& inmsg, off_t& range_start, off_t& range_end) { range_start = 0; range_end = (off_t)(-1); { std::string val; val=inmsg.Attributes()->get("HTTP:RANGESTART"); if(!val.empty()) { // Negative ranges not supported if(!Arc::stringto(val,range_start)) { range_start=0; } else { val=inmsg.Attributes()->get("HTTP:RANGEEND"); if(!val.empty()) { if(!Arc::stringto(val,range_end)) { range_end=(off_t)(-1); } else { // Rest of code here treats end of range as exclusive // While HTTP ranges are inclusive ++range_end; }; }; }; }; }; } // -------------------------------------------------------------------------------------------------------------- static Arc::MCC_Status GetJobsList(Arc::Message& outmsg,ARexGMConfig& config,Arc::Logger& logger) { std::string html; html="\r\n\r\nARex: Jobs list\r\n\r\n\r\n
      \r\n"; std::list jobs = ARexJob::Jobs(config,logger); for(std::list::iterator job = jobs.begin();job!=jobs.end();++job) { std::string line = "
    • job "; line+=(*job); line+=""; line+=" logs\r\n"; html+=line; }; html+="
    \r\n"; // Service description access html+="SERVICE DESCRIPTION"; html+="\r\n"; Arc::PayloadRaw* buf = new Arc::PayloadRaw; if(buf) buf->Insert(html.c_str(),0,html.length()); outmsg.Payload(buf); outmsg.Attributes()->set("HTTP:content-type","text/html"); return Arc::MCC_Status(Arc::STATUS_OK); } static Arc::MCC_Status GetFilesList(Arc::Message& outmsg,ARexGMConfig& config, Arc::FileAccess& dir,std::string const& baseurl,std::string const& basepath, Arc::Logger& logger) { std::string html; html="\r\n\r\nARex: Job\r\n\r\n\r\n
      \r\n"; for(;;) { std::string file; if(!dir.fa_readdir(file)) break; if(file == ".") continue; if(file == "..") continue; std::string fpath = basepath+"/"+file; struct stat st; if(lstat(fpath.c_str(),&st) == 0) { if(S_ISREG(st.st_mode)) { std::string line = "
    • file "; line+=file; line+=" - "+Arc::tostring(st.st_size)+" bytes"+"\r\n"; html+=line; } else if(S_ISDIR(st.st_mode)) { std::string line = "
    • dir "; line+=file; line+="\r\n"; html+=line; }; } else { std::string line = "
    • unknown "; line+=file; line+="\r\n"; html+=line; }; }; // Add virtual logs folder /* if((hpath.empty()) && (!joblog.empty())) { std::string line = "
    • dir "; line+=joblog; line+=" - log directory\r\n"; html+=line; }; */ html+="
    \r\n\r\n"; Arc::PayloadRaw* buf = new Arc::PayloadRaw; if(buf) buf->Insert(html.c_str(),0,html.length()); outmsg.Payload(buf); outmsg.Attributes()->set("HTTP:content-type","text/html"); return Arc::MCC_Status(Arc::STATUS_OK); } // -------------------------------------------------------------------------------------------------------------- Arc::MCC_Status ARexService::GetJob(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& id,std::string const& subpath) { if(!&config) { return make_http_fault(outmsg, HTTP_ERR_FORBIDDEN, "User is not identified"); }; if(id.empty()) { // Not a specific job - generate page with list of jobs return GetJobsList(outmsg,config,logger_); } ARexJob job(id,config,logger_); if(!job) { // There is no such job logger_.msg(Arc::ERROR, "Get: there is no job %s - %s", id, job.Failure()); return Arc::MCC_Status(Arc::UNKNOWN_SERVICE_ERROR); }; std::string hpath = Arc::trim(subpath,"/"); // prepare clean sub-path std::string joblog = job.LogDir(); if(!joblog.empty()) { if((strncmp(joblog.c_str(),hpath.c_str(),joblog.length()) == 0) && ((hpath[joblog.length()] == '/') || (hpath[joblog.length()] == '\0'))) { hpath.erase(0,joblog.length()+1); return GetLogs(inmsg,outmsg,config,id,hpath); }; }; // File or folder Arc::FileAccess* dir = job.OpenDir(subpath); if(dir) { // Directory - html with file list std::string dirurl = config.Endpoint()+"/"+id; if(!hpath.empty()) dirurl+="/"+hpath; std::string dirpath = job.GetFilePath(hpath); Arc::MCC_Status r = GetFilesList(outmsg,config,*dir,dirurl,dirpath,logger_); dir->fa_closedir(); Arc::FileAccess::Release(dir); return r; }; Arc::FileAccess* file = job.OpenFile(hpath,true,false); if(file) { // File or similar off_t range_start; off_t range_end; ExtractRange(inmsg, range_start, range_end); Arc::MessagePayload* h = newFileRead(file,range_start,range_end); if(!h) { file->fa_close(); Arc::FileAccess::Release(file); return Arc::MCC_Status(Arc::UNKNOWN_SERVICE_ERROR); }; outmsg.Payload(h); outmsg.Attributes()->set("HTTP:content-type","application/octet-stream"); return Arc::MCC_Status(Arc::STATUS_OK); }; // Can't process this path // offset=0; size=0; return Arc::MCC_Status(Arc::UNKNOWN_SERVICE_ERROR); } Arc::MCC_Status ARexService::GetLogs(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& id,std::string const& subpath) { if(!&config) { return make_http_fault(outmsg, HTTP_ERR_FORBIDDEN, "User is not identified"); }; if(id.empty()) { // Not a specific job - not expected return Arc::MCC_Status(Arc::UNKNOWN_SERVICE_ERROR); } ARexJob job(id,config,logger_); if(!job) { // There is no such job logger_.msg(Arc::ERROR, "Get: there is no job %s - %s", id, job.Failure()); return Arc::MCC_Status(Arc::UNKNOWN_SERVICE_ERROR); }; std::string hpath = Arc::trim(subpath,"/"); // prepare clean sub-path if(hpath.empty()) { std::list logs = job.LogFiles(); std::string html; html="\r\n\r\nARex: Job Logs\r\n\r\n\r\n
      \r\n"; for(std::list::iterator l = logs.begin();l != logs.end();++l) { if(strncmp(l->c_str(),"proxy",5) == 0) continue; std::string line = "
    • file "; line+=*l; line+=" - log file\r\n"; html+=line; }; html+="
    \r\n\r\n"; Arc::PayloadRaw* buf = new Arc::PayloadRaw; if(buf) buf->Insert(html.c_str(),0,html.length()); outmsg.Payload(buf); outmsg.Attributes()->set("HTTP:content-type","text/html"); return Arc::MCC_Status(Arc::STATUS_OK); } else { int file = job.OpenLogFile(hpath); if(file != -1) { off_t range_start; off_t range_end; ExtractRange(inmsg, range_start, range_end); Arc::MessagePayload* h = newFileRead(file,range_start,range_end); if(h) { outmsg.Payload(h); outmsg.Attributes()->set("HTTP:content-type","text/plain"); return Arc::MCC_Status(Arc::STATUS_OK); } else { ::close(file); } }; }; return Arc::MCC_Status(Arc::UNKNOWN_SERVICE_ERROR); } Arc::MCC_Status ARexService::GetInfo(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath) { if(!subpath.empty()) return Arc::MCC_Status(Arc::UNKNOWN_SERVICE_ERROR); return GetInfo(inmsg, outmsg); } Arc::MCC_Status ARexService::GetInfo(Arc::Message& inmsg,Arc::Message& outmsg) { int h = OpenInfoDocument(); if(h == -1) return Arc::MCC_Status(); Arc::MessagePayload* payload = newFileRead(h); if(!payload) { ::close(h); return Arc::MCC_Status(); }; outmsg.Payload(payload); outmsg.Attributes()->set("HTTP:content-type","text/xml"); return Arc::MCC_Status(Arc::STATUS_OK); } Arc::MCC_Status ARexService::GetNew(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath) { if(!&config) { return make_http_fault(outmsg, HTTP_ERR_FORBIDDEN, "User is not identified"); }; return Arc::MCC_Status(Arc::UNKNOWN_SERVICE_ERROR); } Arc::MCC_Status ARexService::GetCache(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath) { if(!&config) { return make_http_fault(outmsg, HTTP_ERR_FORBIDDEN, "User is not identified"); }; off_t range_start = 0; off_t range_end = (off_t)(-1); ExtractRange(inmsg, range_start, range_end); return cache_get(outmsg, subpath, range_start, range_end, config, false); } // -------------------------------------------------------------------------------------------------------------- Arc::MCC_Status ARexService::HeadLogs(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& id,std::string const& subpath) { if(id.empty()) { // Not a specific job - not expected return Arc::MCC_Status(Arc::UNKNOWN_SERVICE_ERROR); } ARexJob job(id,config,logger_); if(!job) { // There is no such job logger_.msg(Arc::ERROR, "Get: there is no job %s - %s", id, job.Failure()); return Arc::MCC_Status(Arc::UNKNOWN_SERVICE_ERROR); }; std::string hpath = Arc::trim(subpath,"/"); // prepare clean sub-path if(hpath.empty()) { Arc::PayloadRaw* buf = new Arc::PayloadRaw; if(buf) buf->Truncate(0); outmsg.Payload(buf); outmsg.Attributes()->set("HTTP:content-type","text/html"); return Arc::MCC_Status(Arc::STATUS_OK); } else { int file = job.OpenLogFile(hpath); if(file != -1) { struct stat st; Arc::PayloadRaw* buf = new Arc::PayloadRaw; if(buf && (::fstat(file,&st) == 0)) buf->Truncate(st.st_size); ::close(file); outmsg.Payload(buf); outmsg.Attributes()->set("HTTP:content-type","text/plain"); return Arc::MCC_Status(Arc::STATUS_OK); }; }; return Arc::MCC_Status(Arc::UNKNOWN_SERVICE_ERROR); } Arc::MCC_Status ARexService::HeadInfo(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath) { if(!subpath.empty()) return Arc::MCC_Status(Arc::UNKNOWN_SERVICE_ERROR); return HeadInfo(inmsg, outmsg); } Arc::MCC_Status ARexService::HeadInfo(Arc::Message& inmsg,Arc::Message& outmsg) { int h = OpenInfoDocument(); if(h == -1) return Arc::MCC_Status(); outmsg.Payload(newFileInfo(h)); outmsg.Attributes()->set("HTTP:content-type","text/xml"); return Arc::MCC_Status(Arc::STATUS_OK); } Arc::MCC_Status ARexService::HeadNew(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath) { if(!&config) { return make_http_fault(outmsg, HTTP_ERR_FORBIDDEN, "User is not identified"); }; return Arc::MCC_Status(Arc::UNKNOWN_SERVICE_ERROR); } Arc::MCC_Status ARexService::HeadCache(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath) { if(!&config) { return make_http_fault(outmsg, HTTP_ERR_FORBIDDEN, "User is not identified"); }; off_t range_start = 0; off_t range_end = (off_t)(-1); return cache_get(outmsg, subpath, range_start, range_end, config, true); } Arc::MCC_Status ARexService::HeadJob(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& id,std::string const& subpath) { if(!&config) { return make_http_fault(outmsg, HTTP_ERR_FORBIDDEN, "User is not identified"); }; if(id.empty()) { // Not a specific job - page with list of jobs outmsg.Payload(newFileInfo()); outmsg.Attributes()->set("HTTP:content-type","text/html"); return Arc::MCC_Status(Arc::STATUS_OK); } ARexJob job(id,config,logger_); if(!job) { // There is no such job logger_.msg(Arc::ERROR, "Head: there is no job %s - %s", id, job.Failure()); return Arc::MCC_Status(Arc::UNKNOWN_SERVICE_ERROR); }; std::string hpath = Arc::trim(subpath,"/"); // prepare clean sub-path std::string joblog = job.LogDir(); if(!joblog.empty()) { if((strncmp(joblog.c_str(),hpath.c_str(),joblog.length()) == 0) && ((hpath[joblog.length()] == '/') || (hpath[joblog.length()] == '\0'))) { hpath.erase(0,joblog.length()+1); return HeadLogs(inmsg,outmsg,config,id,hpath); }; }; // File or folder Arc::FileAccess* dir = job.OpenDir(subpath); if(dir) { // Directory - html with file list outmsg.Payload(newFileInfo()); outmsg.Attributes()->set("HTTP:content-type","text/html"); dir->fa_closedir(); Arc::FileAccess::Release(dir); return Arc::MCC_Status(Arc::STATUS_OK); }; Arc::FileAccess* file = job.OpenFile(hpath,true,false); if(file) { // File or similar outmsg.Payload(newFileInfo(*file)); file->fa_close(); Arc::FileAccess::Release(file); outmsg.Attributes()->set("HTTP:content-type","application/octet-stream"); return Arc::MCC_Status(Arc::STATUS_OK); }; // Can't process this path // offset=0; size=0; return Arc::MCC_Status(Arc::UNKNOWN_SERVICE_ERROR); } // ------------------------------------------------------------------------------------------------- static bool cache_get_allowed(const std::string& url, ARexGMConfig& config, Arc::Logger& logger) { // Extract information from credentials std::string dn; // DN of credential std::string vo; // Assuming only one VO std::list voms; // VOMS attributes for (std::list::const_iterator a = config.beginAuth(); a!=config.endAuth(); ++a) { if (*a) { Arc::SecAttr* sattr = (*a)->get("TLS"); if (!sattr) continue; dn = sattr->get("IDENTITY"); vo = sattr->get("VO"); voms = sattr->getAll("VOMS"); break; } } // At least DN should be found. VOMS info may not be present. if (dn.empty()) { logger.msg(Arc::ERROR, "Failed to extract credential information"); return false; } logger.msg(Arc::DEBUG, "Checking cache permissions: DN: %s", dn); logger.msg(Arc::DEBUG, "Checking cache permissions: VO: %s", vo); for (std::list::const_iterator att = voms.begin(); att != voms.end(); ++att) { logger.msg(Arc::DEBUG, "Checking cache permissions: VOMS attr: %s", *att); } // Cache configuration specifies URL regexps and a certificate attribute and // value. Go through looking for a match. for (std::list::const_iterator access = config.GmConfig().CacheParams().getCacheAccess().begin(); access != config.GmConfig().CacheParams().getCacheAccess().end(); ++access) { if (access->regexp.match(url)) { if (Arc::lower(access->cred_type) == "dn") { if (access->cred_value.match(dn)) { logger.msg(Arc::VERBOSE, "Cache access allowed to %s by DN %s", url, dn); return true; } logger.msg(Arc::DEBUG, "DN %s doesn't match %s", dn, access->cred_value.getPattern()); } else if (Arc::lower(access->cred_type) == "voms:vo") { if (access->cred_value.match(vo)) { logger.msg(Arc::VERBOSE, "Cache access allowed to %s by VO %s", url, vo); return true; } logger.msg(Arc::DEBUG, "VO %s doesn't match %s", vo, access->cred_value.getPattern()); } else if (Arc::lower(access->cred_type) == "voms:role") { // Get the configured allowed role std::vector role_parts; Arc::tokenize(access->cred_value.getPattern(), role_parts, ":"); if (role_parts.size() != 2) { logger.msg(Arc::WARNING, "Bad credential value %s in cache access rules", access->cred_value.getPattern()); continue; } std::string cred_vo = role_parts[0]; std::string cred_role = role_parts[1]; std::string allowed_role("/VO="+cred_vo+"/Group="+cred_vo+"/Role="+cred_role); for (std::list::const_iterator attr = voms.begin(); attr != voms.end(); ++attr) { if (*attr == allowed_role) { logger.msg(Arc::DEBUG, "VOMS attr %s matches %s", *attr, allowed_role); logger.msg(Arc::VERBOSE, "Cache access allowed to %s by VO %s and role %s", url, cred_vo, cred_role); return true; } logger.msg(Arc::DEBUG, "VOMS attr %s doesn't match %s", *attr, allowed_role); } } else if (Arc::lower(access->cred_type) == "voms:group") { // Get the configured allowed group std::vector group_parts; Arc::tokenize(access->cred_value.getPattern(), group_parts, ":"); if (group_parts.size() != 2) { logger.msg(Arc::WARNING, "Bad credential value %s in cache access rules", access->cred_value.getPattern()); continue; } std::string cred_vo = group_parts[0]; std::string cred_group = group_parts[1]; std::string allowed_group("/VO="+cred_vo+"/Group="+cred_vo+"/Group="+cred_group); for (std::list::const_iterator attr = voms.begin(); attr != voms.end(); ++attr) { if (*attr == allowed_group) { logger.msg(Arc::DEBUG, "VOMS attr %s matches %s", *attr, allowed_group); logger.msg(Arc::VERBOSE, "Cache access allowed to %s by VO %s and group %s", url, cred_vo, cred_group); return true; } logger.msg(Arc::DEBUG, "VOMS attr %s doesn't match %s", *attr, allowed_group); } } else { logger.msg(Arc::WARNING, "Unknown credential type %s for URL pattern %s", access->cred_type, access->regexp.getPattern()); } } } // If we get to here no match was found logger.msg(Arc::VERBOSE, "No match found in cache access rules for %s", url); return false; } Arc::MCC_Status ARexService::cache_get(Arc::Message& outmsg, const std::string& subpath, off_t range_start, off_t range_end, ARexGMConfig& config, bool no_content) { // subpath contains the URL, which can be encoded. Constructing a URL // object with encoded=true only decodes the path so have to decode first std::string unencoded(Arc::uri_unencode(subpath)); Arc::URL cacheurl(unencoded); logger.msg(Arc::INFO, "Get from cache: Looking in cache for %s", cacheurl.str()); if (!cacheurl) { logger.msg(Arc::ERROR, "Get from cache: Invalid URL %s", subpath); return make_http_fault(outmsg, 400, "Bad request: Invalid URL"); } // Security check. The access is configured in arc.conf like // cache_access="srm://srm-atlas.cern.ch/grid/atlas* voms:vo atlas" // then the url is compared to the certificate attribute specified if (!cache_get_allowed(cacheurl.str(), config, logger)) { return make_http_fault(outmsg, 403, "Forbidden"); } Arc::FileCache cache(config.GmConfig().CacheParams().getCacheDirs(), config.GmConfig().CacheParams().getDrainingCacheDirs(), config.GmConfig().CacheParams().getReadOnlyCacheDirs(), "0", // Jobid is not used config.User().get_uid(), config.User().get_gid()); if (!cache) { logger.msg(Arc::ERROR, "Get from cache: Error in cache configuration"); return make_http_fault(outmsg, 500, "Error in cache configuration"); } // Get the cache file corresponding to the URL std::string cache_file(cache.File(cacheurl.str())); // Check if file exists struct stat st; if (!Arc::FileStat(cache_file, &st, false)) { if (errno == ENOENT) { logger.msg(Arc::INFO, "Get from cache: File not in cache"); return make_http_fault(outmsg, 404, "File not found"); } else { logger.msg(Arc::WARNING, "Get from cache: could not access cached file: %s", Arc::StrError(errno)); return make_http_fault(outmsg, 500, "Error accessing cached file"); } } // Check file size against specified range if (range_start > st.st_size) range_start = st.st_size; if (range_end > st.st_size) range_end = st.st_size; // Check if lockfile exists if (Arc::FileStat(cache_file + Arc::FileLock::getLockSuffix(), &st, false)) { logger.msg(Arc::INFO, "Get from cache: Cached file is locked"); return make_http_fault(outmsg, 409, "Cached file is locked"); } // Read the file and fill the payload if (!no_content) { Arc::MessagePayload* h = newFileRead(cache_file.c_str(), range_start, range_end); outmsg.Payload(h); } else { struct stat st; Arc::PayloadRaw* buf = new Arc::PayloadRaw; if(buf && Arc::FileStat(cache_file, &st, false)) buf->Truncate(st.st_size); outmsg.Payload(buf); } outmsg.Attributes()->set("HTTP:content-type","application/octet-stream"); return Arc::MCC_Status(Arc::STATUS_OK); } } // namespace ARex nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/a-rex-backtrace-collect.in0000644000000000000000000000013214152153376025676 xustar000000000000000030 mtime=1638455038.405645974 30 atime=1638455038.500647402 30 ctime=1638455099.401562466 nordugrid-arc-6.14.0/src/services/a-rex/a-rex-backtrace-collect.in0000644000175000002070000000461614152153376025672 0ustar00mockbuildmock00000000000000#!/bin/bash readconfigvar() { fname=$1 if [ ! -r "$fname" ]; then return fi bname="[$2]" vname=$3 value= cat "$fname" | sed -e 's/\r$//' -e 's/^\r//' | grep -e '^\[' -e "^${vname}=" | { while true; do read line if [ ! $? = 0 ] ; then return fi if [ "$line" = "$bname" ] ; then while true ; do read line if [ ! $? = 0 ] ; then return fi lstart=`echo "$line" | head -c 1` if [ "$lstart" = '[' ] ; then return fi vlname=`echo "$line" | sed 's/=.*//;t;s/.*//'` if [ "$vlname" = "$vname" ] ; then val=`echo "$line" | sed 's/[^=]*=//'` eval "echo $val" return fi done fi done } } # ARC_LOCATION ARC_LOCATION=${ARC_LOCATION:-@prefix@} if [ "x$ARC_CONFIG" = "x" ]; then if [ -r $ARC_LOCATION/etc/arc.conf ]; then ARC_CONFIG=$ARC_LOCATION/etc/arc.conf elif [ -r /etc/arc.conf ]; then ARC_CONFIG=/etc/arc.conf fi fi if [ "x$ARC_CONFIG" = "x" ]; then echo "Can't find configuration file." exit 1 fi if [ ! -f "${ARC_CONFIG}" ]; then echo "Can't find configuration file at ${ARC_CONFIG}." exit 1 fi ARCHED="${ARC_LOCATION}/sbin/arched" if [ ! -f "${ARCHED}" ]; then echo "Can't find arched at ${ARCHED}." exit 1 fi LOGFILE=`readconfigvar "$ARC_CONFIG" arex logfile` LOGFILE=${LOGFILE:-/var/log/arc/arex.log} COREDIR=`dirname "${LOGFILE}"`/arccore if [ ! -d "${COREDIR}" ]; then echo "Can't find core collection folder at ${COREDIR}." exit 1 fi backtrace_generated=no for corename in "${COREDIR}"/*; do echo "${corename}" | grep '\.backtrace$' if [ ! "$?" = '0' ]; then backtracename="${corename}.backtrace" echo "--- Processing ${corename} - storing into ${backtracename} ---" gdb --batch --core="${corename}" "${ARCHED}" --eval-command='thread apply all bt full' 1>"${backtracename}" 2>&1 backtrace_generated=yes fi done if [ $backtrace_generated = yes ]; then echo "Please send generated backtrace(s) to support@nordugrid.org or report them on http://bugzilla.nordugrid.org" fi nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/information_collector.cpp0000644000000000000000000000013214152153376026071 xustar000000000000000030 mtime=1638455038.419646185 30 atime=1638455038.506647492 30 ctime=1638455099.414562661 nordugrid-arc-6.14.0/src/services/a-rex/information_collector.cpp0000644000175000002070000003322714152153376026065 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include "grid-manager/files/ControlFileHandling.h" #include "job.h" #include "arex.h" namespace ARex { int ARexService::OpenInfoDocument() { int h = infodoc_.OpenDocument(); if (h == -1) { // If information collector has no file assigned (not yet or collector is not running) open it directly h = open(config_.InformationFile().c_str(), O_RDONLY); } return h; } void ARexService::InformationCollector(void) { thread_count_.RegisterThread(); for(;;) { // Run information provider std::string xml_str; int r = -1; { std::string cmd; cmd=Arc::ArcLocation::GetDataDir()+"/CEinfo.pl --splitjobs --config "+config_.ConfigFile(); std::string stdin_str; std::string stderr_str; Arc::Run run(cmd); run.AssignStdin(stdin_str); run.AssignStdout(xml_str, 1024*1024); // can information document become bigger than 1MB? run.AssignStderr(stderr_str); logger_.msg(Arc::DEBUG,"Resource information provider: %s",cmd); if(!run.Start()) { // Failed to fork proces logger_.msg(Arc::DEBUG,"Resource information provider failed to start"); } else { if(!run.Wait()) { logger_.msg(Arc::DEBUG,"Resource information provider failed to run"); } else { r = run.Result(); if (r!=0) { logger_.msg(Arc::WARNING,"Resource information provider failed with exit status: %i\n%s",r,stderr_str); } else { logger_.msg(Arc::DEBUG,"Resource information provider log:\n%s",stderr_str); }; }; }; }; if (r!=0) { logger_.msg(Arc::WARNING,"No new informational document assigned"); } else { logger_.msg(Arc::VERBOSE,"Obtained XML: %s",xml_str.substr(0,100)); // Following code is suboptimal. Most of it should go away // and functionality to be moved to information providers. if(!xml_str.empty()) { // Currently glue states are lost. Counter of all jobs is lost too. infodoc_.Assign(xml_str, config_.InformationFile()); Arc::XMLNode root = infodoc_.Acquire(); Arc::XMLNode all_jobs_count = root["Domains"]["AdminDomain"]["Services"]["ComputingService"]["AllJobs"]; if((bool)all_jobs_count) { Arc::stringto((std::string)all_jobs_count,all_jobs_count_); all_jobs_count.Destroy(); // is not glue2 info }; infodoc_.Release(); } else { logger_.msg(Arc::ERROR,"Informational document is empty"); }; }; if(thread_count_.WaitOrCancel(infoprovider_wakeup_period_*100)) break; }; thread_count_.UnregisterThread(); } std::string ARexService::getID() { return "ARC:AREX"; } class PrefixedFilePayload: public Arc::PayloadRawInterface { private: std::string prefix_; std::string postfix_; int handle_; void* addr_; off_t length_; public: PrefixedFilePayload(const std::string& prefix,const std::string& postfix,int handle) { prefix_ = prefix; postfix_ = postfix; handle_ = handle; addr_ = MAP_FAILED; length_ = 0; if(handle != -1) { struct stat st; if(::fstat(handle,&st) == 0) { if(st.st_size > 0) { length_ = st.st_size; addr_ = ::mmap(NULL,st.st_size,PROT_READ,MAP_PRIVATE,handle,0); if(addr_ == MAP_FAILED) length_=0; }; }; }; }; ~PrefixedFilePayload(void) { if(addr_ != MAP_FAILED) ::munmap(addr_,length_); if(handle_ != -1) ::close(handle_); }; virtual char operator[](Size_t pos) const { char* p = ((PrefixedFilePayload*)this)->Content(pos); if(!p) return 0; return *p; }; virtual char* Content(Size_t pos) { if(pos < prefix_.length()) return (char*)(prefix_.c_str() + pos); pos -= prefix_.length(); if(pos < length_) return ((char*)(addr_) + pos); pos -= length_; if(pos < postfix_.length()) return (char*)(postfix_.c_str() + pos); return NULL; }; virtual Size_t Size(void) const { return (prefix_.length() + length_ + postfix_.length()); }; virtual char* Insert(Size_t /* pos */ = 0,Size_t /* size */ = 0) { return NULL; }; virtual char* Insert(const char* /* s */,Size_t /* pos */ = 0,Size_t /* size */ = -1) { return NULL; }; virtual char* Buffer(unsigned int num = 0) { if(num == 0) return (char*)(prefix_.c_str()); if(addr_ != MAP_FAILED) { if(num == 1) return (char*)addr_; } else { ++num; }; if(num == 2) return (char*)(postfix_.c_str()); return NULL; }; virtual Size_t BufferSize(unsigned int num = 0) const { if(num == 0) return prefix_.length(); if(addr_ != MAP_FAILED) { if(num == 1) return length_; } else { ++num; }; if(num == 2) return postfix_.length(); return 0; }; virtual Size_t BufferPos(unsigned int num = 0) const { if(num == 0) return 0; if(addr_ != MAP_FAILED) { if(num == 1) return prefix_.length(); } else { ++num; }; if(num == 2) return (prefix_.length() + length_); return (prefix_.length() + length_ + postfix_.length()); }; virtual bool Truncate(Size_t /* size */) { return false; }; }; OptimizedInformationContainer::OptimizedInformationContainer(bool parse_xml) { handle_=-1; parse_xml_=parse_xml; } OptimizedInformationContainer::~OptimizedInformationContainer(void) { if(handle_ != -1) ::close(handle_); if(!filename_.empty()) ::unlink(filename_.c_str()); } int OptimizedInformationContainer::OpenDocument(void) { int h = -1; olock_.lock(); if(handle_ != -1) h = ::dup(handle_); olock_.unlock(); return h; } void OptimizedInformationContainer::Assign(const std::string& xml, const std::string filename) { std::string tmpfilename; int h = -1; if(filename.empty()) { h = Glib::file_open_tmp(tmpfilename); } else { tmpfilename = filename; tmpfilename += ".tmpXXXXXX"; h = Glib::mkstemp(tmpfilename); }; if(h == -1) { Arc::Logger::getRootLogger().msg(Arc::ERROR,"OptimizedInformationContainer failed to create temporary file"); return; }; Arc::Logger::getRootLogger().msg(Arc::VERBOSE,"OptimizedInformationContainer created temporary file: %s",tmpfilename); for(std::string::size_type p = 0;p 0) expression = expression.Child(0); std::string xpath = (std::string)expression; if(xpath.empty()) { Arc::SOAPFault fault(out.Parent(),Arc::SOAPFault::Sender,""); ESNotValidQueryStatementFault(fault,"Could not extract xpath query from request"); out.Destroy(); return Arc::MCC_Status(Arc::STATUS_OK); } // WARNING. Suboptimal temporary solution. int h = OpenInfoDocument(); if(h == -1) ESFAULT("Failed to open resource information file"); ::lseek(h,0,SEEK_SET); struct stat st; if((::fstat(h,&st) != 0) || (st.st_size == 0)) { ::close(h); ESFAULT("Failed to stat resource information file"); }; char* buf = (char*)::malloc(st.st_size+1); if(!buf) { ::close(h); ESFAULT("Failed to allocate memory for resource information"); }; off_t p = 0; for(;pfirst,-1); } return Arc::MCC_Status(Arc::STATUS_OK); } } nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/arc-arex-start.in0000644000000000000000000000013214152153376024157 xustar000000000000000030 mtime=1638455038.405645974 30 atime=1638455038.500647402 30 ctime=1638455099.397562406 nordugrid-arc-6.14.0/src/services/a-rex/arc-arex-start.in0000644000175000002070000002365314152153376024155 0ustar00mockbuildmock00000000000000#!/bin/bash export MALLOC_ARENA_MAX=2 add_library_path() { location="$1" if [ ! "x$location" = "x" ] ; then if [ ! "$location" = "/usr" ] ; then libdir="$location/lib" libdir64="$location/lib64" if [ -d "$libdir64" ] ; then if [ "x$LD_LIBRARY_PATH" = "x" ]; then LD_LIBRARY_PATH="$libdir64" else LD_LIBRARY_PATH="$libdir64:$LD_LIBRARY_PATH" fi fi if [ -d "$libdir" ] ; then if [ "x$LD_LIBRARY_PATH" = "x" ]; then LD_LIBRARY_PATH="$libdir" else LD_LIBRARY_PATH="$libdir:$LD_LIBRARY_PATH" fi fi fi fi } prog=arched RUN=yes send_systemd_notify() { # return if no systemd-notify found type systemd-notify >/dev/null 2>&1 || return systemd-notify "$@" } log_failure_msg() { send_systemd_notify --status "Error: $@" echo $@ } # sysconfig files if [ -r /etc/sysconfig/nordugrid ]; then . /etc/sysconfig/nordugrid elif [ -r /etc/default/nordugrid ]; then . /etc/default/nordugrid fi if [ -r /etc/sysconfig/arc-arex ]; then . /etc/sysconfig/arc-arex elif [ -r /etc/default/arc-arex ]; then . /etc/default/arc-arex fi # GLOBUS_LOCATION GLOBUS_LOCATION=${GLOBUS_LOCATION:-@DEFAULT_GLOBUS_LOCATION@} if [ -n "$GLOBUS_LOCATION" ]; then if [ ! -d "$GLOBUS_LOCATION" ]; then log_failure_msg "GLOBUS_LOCATION ($GLOBUS_LOCATION) not found" exit 1 fi export GLOBUS_LOCATION fi # ARC_LOCATION ARC_LOCATION=${ARC_LOCATION:-@prefix@} if [ ! -d "$ARC_LOCATION" ]; then log_failure_msg "ARC_LOCATION ($ARC_LOCATION) not found" exit 1 fi export ARC_LOCATION # VOMS_LOCATION VOMS_LOCATION=${VOMS_LOCATION:-@DEFAULT_VOMS_LOCATION@} # Prepare environment for executing various tools and main application add_library_path "$VOMS_LOCATION" add_library_path "$GLOBUS_LOCATION" if [ "x$LD_LIBRARY_PATH" = "x" ]; then LD_LIBRARY_PATH=$ARC_LOCATION/@libsubdir@:$ARC_LOCATION/@libsubdir@64 else LD_LIBRARY_PATH=$ARC_LOCATION/@libsubdir@:$ARC_LOCATION/@libsubdir@64:$LD_LIBRARY_PATH fi export LD_LIBRARY_PATH testconfigblock() { $ARC_LOCATION/@pkglibexecsubdir@/arcconfig-parser --runconfig "$1" --load -b "$2" 2>/dev/null 1>&2 if [ $? -eq 0 ] ; then echo 'true' else echo 'false' fi } readorigconfigvar() { value=`$ARC_LOCATION/@pkglibexecsubdir@/arcconfig-parser --config "$1" -b "$3" -o "$2" 2>/dev/null` if [ $? -eq 0 ] ; then echo "$value" exit 0 else exit 1 fi } readconfigvar() { fname="$1" optname="$2" blocks="" while [ ! -z "$3" ] ; do blocks="$blocks -b $3" shift done value=`$ARC_LOCATION/@pkglibexecsubdir@/arcconfig-parser --runconfig "$fname" --load $blocks -o "$optname" 2>/dev/null` if [ $? -eq 0 ] ; then echo "$value" exit 0 else exit 1 fi } # ARC_CONFIG if [ "x$ARC_CONFIG" = "x" ]; then if [ -r $ARC_LOCATION/etc/arc.conf ]; then ARC_CONFIG=$ARC_LOCATION/etc/arc.conf elif [ -r /etc/arc.conf ]; then ARC_CONFIG=/etc/arc.conf fi fi # PID file PID_FILE=`readorigconfigvar "$ARC_CONFIG" pidfile arex` if [ "x$PID_FILE" = "x" ]; then # Missing default value for pidfile means no service block is present log_failure_msg "ARC configuration is missing [arex] block" exit 1 fi if [ "$1" = "--getpidfile" ] ; then echo $PID_FILE exit 0 fi ARC_RUNTIME_CONFIG=`echo "$PID_FILE" | sed 's#\([^\./]*\)\.[^\./]*$#\1#'` ARC_RUNTIME_CONFIG="${ARC_RUNTIME_CONFIG}.cfg" mkdir_for_user() { dirpath="$1" username="$2" groupname="$3" if [ ! -d "$dirpath" ] ; then mkdir -p "$dirpath" if [ ! -z "$username" ] ; then if [ ! -z "$groupname" ] ; then chown "$username:$groupname" "$dirpath" else chown "$username" "$dirpath" fi fi fi } mkfile_for_user() { filepath="$1" username="$2" groupname="$3" if [ ! -f "$filepath" ] ; then touch "$filepath" fi if [ ! -z "$username" ] ; then if [ ! -z "$groupname" ] ; then chown "$username:$groupname" "$filepath" else chown "$username" "$filepath" fi fi } prepare() { CMD="$ARC_LOCATION/sbin/$prog" if [ ! -x "$CMD" ]; then log_failure_msg "Missing $CMD executable" exit 1 fi if [ ! -r "$ARC_CONFIG" ]; then log_failure_msg "ARC configuration not found (usually /etc/arc.conf)" exit 1 fi # Pre-process configuration $ARC_LOCATION/@pkglibexecsubdir@/arcconfig-parser --config "$ARC_CONFIG" --runconfig "$ARC_RUNTIME_CONFIG" --save 2>/dev/null if [ $? -ne 0 ] ; then log_failure_msg "ARC configuration processing failed" exit 1 fi # Creating configuration file of arched # Reading following information from config file: # Log file # Debug level # User name # ... LOGFILE=`readconfigvar "$ARC_RUNTIME_CONFIG" logfile arex` LOGLEVEL=`readconfigvar "$ARC_RUNTIME_CONFIG" loglevel arex` WATCHDOG=`readconfigvar "$ARC_RUNTIME_CONFIG" watchdog arex` USERNAME=`readconfigvar "$ARC_RUNTIME_CONFIG" user arex` GRIDTMPDIR=`readconfigvar "$ARC_RUNTIME_CONFIG" tmpdir arex` GROUPNAME=`echo "$USERNAME" | sed 's/^[^:]*//;s/^://'` USERNAME=`echo "$USERNAME" | sed 's/:.*//'` X509_USER_CERT=`readconfigvar "$ARC_RUNTIME_CONFIG" x509_host_cert common` X509_USER_KEY=`readconfigvar "$ARC_RUNTIME_CONFIG" x509_host_key common` X509_CERT_DIR=`readconfigvar "$ARC_RUNTIME_CONFIG" x509_cert_dir common` GLOBUS_TCP_PORT_RANGE=`readconfigvar "$ARC_RUNTIME_CONFIG" globus_tcp_port_range arex/data-staging` GLOBUS_UDP_PORT_RANGE=`readconfigvar "$ARC_RUNTIME_CONFIG" globus_udp_port_range arex/data-staging` HOSTNAME=`readconfigvar "$ARC_RUNTIME_CONFIG" hostname common` SERVICEMAIL=`readconfigvar "$ARC_RUNTIME_CONFIG" mail arex` # It is easier to handle root user through empty value. if [ "$USERNAME" = "root" ] ; then USERNAME="" fi if [ "$GROUPNAME" = "root" ] ; then GROUPNAME="" fi # Exporting collected variables export X509_USER_CERT export X509_USER_KEY export X509_CERT_DIR export GLOBUS_TCP_PORT_RANGE export GLOBUS_UDP_PORT_RANGE export HOSTNAME if [ ! -z "$GRIDTMPDIR" ] ; then export TMPDIR="$GRIDTMPDIR" ; fi # Web Service configuration arex_endpoint="" arex_mount_point="" ws_present=`testconfigblock "$ARC_RUNTIME_CONFIG" arex/ws` arex_present=`testconfigblock "$ARC_RUNTIME_CONFIG" arex/ws/jobs` if [ "$ws_present" = 'true' ] ; then WSLOGFILE=`readconfigvar "$ARC_RUNTIME_CONFIG" logfile arex/ws` arex_mount_point=`readconfigvar "$ARC_RUNTIME_CONFIG" wsurl arex/ws` arex_endpoint="$arex_mount_point" fi service_mail="" if [ ! -z "$SERVICEMAIL" ] ; then service_mail="$SERVICEMAIL" fi AREX_CONFIG=`mktemp -t arex.xml.XXXXXX` if [ -z "$AREX_CONFIG" ] ; then log_failure_msg "Failed to create temporary file" exit 1 fi CMD="$CMD -c $AREX_CONFIG" case "$LOGLEVEL" in 0) LOGLEVEL="FATAL" ;; 1) LOGLEVEL="ERROR" ;; 2) LOGLEVEL="WARNING" ;; 3) LOGLEVEL="INFO" ;; 4) LOGLEVEL="VERBOSE" ;; 5) LOGLEVEL="DEBUG" ;; *) LOGLEVEL="INFO" ;; esac mkdir_for_user `dirname "$LOGFILE"` "$USERNAME" "$GROUPNAME" mkfile_for_user "$LOGFILE" "$USERNAME" "$GROUPNAME" if [ "$WATCHDOG" = "yes" ] ; then WATCHDOG="true" else WATCHDOG="false" fi if [ ! -z "$USERNAME" ] ; then CMD="$CMD -u $USERNAME" fi if [ ! -z "$GROUPNAME" ] ; then CMD="$CMD -g $GROUPNAME" fi AREXCFG="\ $PID_FILE $LOGFILE $LOGLEVEL $WATCHDOG $ARC_LOCATION/@pkglibsubdir@/ arex $service_mail $arex_endpoint $ARC_RUNTIME_CONFIG " echo "$AREXCFG" > "$AREX_CONFIG" # setup logfile in case it is not there yet if [ ! -z "$USERNAME" ] ; then if [ ! -z "$GROUPNAME" ] ; then [ -f $AREX_CONFIG ] && chown "$USERNAME:$GROUPNAME" "$AREX_CONFIG" else [ -f $AREX_CONFIG ] && chown "$USERNAME" "$AREX_CONFIG" fi fi # prepare to collect crash information COREDIR=`dirname "${LOGFILE}"`/arccore mkdir_for_user "${COREDIR}" "$USERNAME" "$GROUPNAME" cd "${COREDIR}" ulimit -c unlimited } validate() { CHECK_CMD=$ARC_LOCATION/@pkglibexecsubdir@/arc-config-check if [ ! -x $CHECK_CMD ]; then log_failure_msg "Could not find or execute arc-config-check tool" return 1 fi eval "$CHECK_CMD --config $ARC_CONFIG $@" RETVAL=$? return $RETVAL } if [ "$RUN" != "yes" ] ; then echo "a-rex disabled, please adjust the configuration to your needs " echo "and then set RUN to 'yes' in /etc/default/a-rex to enable it." exit 0 fi prepare echo "Validating A-REX setup..." >> "$LOGFILE" validate >> "$LOGFILE" 2>&1 RETVAL=$? if [ $RETVAL != 0 ]; then # Run validator again to print errors to stdout validate --skip-warnings log_failure_msg "Configuration validation failed" exit 1 fi # Raise limit on number of file descriptors to max hlimit=`ulimit -H -n` if [ ! -z "$hlimit" ] ; then ulimit -S -n "$hlimit" 2>/dev/null fi exec $CMD "$@" nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153432023030 xustar000000000000000030 mtime=1638455066.348065822 30 atime=1638455089.538414268 30 ctime=1638455099.394562361 nordugrid-arc-6.14.0/src/services/a-rex/Makefile.in0000644000175000002070000020372714152153432023030 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ noinst_PROGRAMS = test_cache_check$(EXEEXT) subdir = src/services/a-rex DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(srcdir)/arc-arex.in $(srcdir)/arc-arex.service.in \ $(srcdir)/arc-arex-start.in $(srcdir)/arc-arex-ws.in \ $(srcdir)/arc-arex-ws.service.in \ $(srcdir)/arc-arex-ws-start.in \ $(srcdir)/a-rex-backtrace-collect.in \ $(srcdir)/a-rex-backtrace-collect.8.in $(srcdir)/perferator.in \ $(top_srcdir)/depcomp README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = arc-arex arc-arex.service arc-arex-start \ arc-arex-ws arc-arex-ws.service arc-arex-ws-start \ a-rex-backtrace-collect a-rex-backtrace-collect.8 perferator CONFIG_CLEAN_VPATH_FILES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(pkglibdir)" "$(DESTDIR)$(initddir)" \ "$(DESTDIR)$(pkgdatadir)" "$(DESTDIR)$(pkglibexecdir)" \ "$(DESTDIR)$(sbindir)" "$(DESTDIR)$(man8dir)" \ "$(DESTDIR)$(unitsdir)" LTLIBRARIES = $(pkglib_LTLIBRARIES) libarex_la_DEPENDENCIES = $(GRIDMANAGER_LIBS) \ $(top_builddir)/src/hed/libs/infosys/libarcinfosys.la \ $(top_builddir)/src/hed/libs/ws-addressing/libarcwsaddressing.la \ $(top_builddir)/src/hed/libs/communication/libarccommunication.la \ $(top_builddir)/src/hed/libs/security/libarcsecurity.la \ $(top_builddir)/src/hed/libs/delegation/libarcdelegation.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/otokens/libarcotokens.la am_libarex_la_OBJECTS = libarex_la-arex.lo libarex_la-job.lo \ libarex_la-create_activity.lo \ libarex_la-get_activity_statuses.lo \ libarex_la-change_activity_status.lo \ libarex_la-update_credentials.lo libarex_la-faults.lo \ libarex_la-get.lo libarex_la-put.lo libarex_la-PayloadFile.lo \ libarex_la-FileChunks.lo libarex_la-information_collector.lo \ libarex_la-cachecheck.lo libarex_la-tools.lo libarex_la_OBJECTS = $(am_libarex_la_OBJECTS) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = libarex_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(libarex_la_CXXFLAGS) \ $(CXXFLAGS) $(libarex_la_LDFLAGS) $(LDFLAGS) -o $@ PROGRAMS = $(noinst_PROGRAMS) am_test_cache_check_OBJECTS = \ test_cache_check-test_cache_check.$(OBJEXT) test_cache_check_OBJECTS = $(am_test_cache_check_OBJECTS) am__DEPENDENCIES_1 = test_cache_check_DEPENDENCIES = $(top_builddir)/src/hed/libs/communication/libarccommunication.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) test_cache_check_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(test_cache_check_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) \ $(LDFLAGS) -o $@ SCRIPTS = $(initd_SCRIPTS) $(pkgdata_SCRIPTS) $(pkglibexec_SCRIPTS) \ $(sbin_SCRIPTS) AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(libarex_la_SOURCES) $(test_cache_check_SOURCES) DIST_SOURCES = $(libarex_la_SOURCES) $(test_cache_check_SOURCES) RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac man8dir = $(mandir)/man8 NROFF = nroff MANS = $(man_MANS) DATA = $(units_DATA) RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ @INTERNAL_ENABLED_FALSE@INTERNAL = @INTERNAL_ENABLED_TRUE@INTERNAL = internaljobplugin SUBDIRS = delegation grid-manager infoproviders lrms schema $(INTERNAL) rte rest DIST_SUBDIRS = delegation grid-manager infoproviders lrms schema internaljobplugin rte rest pkglib_LTLIBRARIES = libarex.la @PYTHON_LRMS_ENABLED_FALSE@PYTHON_LRMS_PKGLIBEXEC_FILES = @PYTHON_LRMS_ENABLED_TRUE@PYTHON_LRMS_PKGLIBEXEC_FILES = arc-sshfs-mount pkglibexec_SCRIPTS = $(PYTHON_LRMS_PKGLIBEXEC_FILES) @SYSV_SCRIPTS_ENABLED_FALSE@AREX_SCRIPT = @SYSV_SCRIPTS_ENABLED_TRUE@AREX_SCRIPT = arc-arex arc-arex-ws initd_SCRIPTS = $(AREX_SCRIPT) @SYSTEMD_UNITS_ENABLED_FALSE@AREX_UNIT = @SYSTEMD_UNITS_ENABLED_TRUE@AREX_UNIT = arc-arex.service arc-arex-ws.service units_DATA = $(AREX_UNIT) pkgdata_SCRIPTS = arc-arex-start arc-arex-ws-start perferator sbin_SCRIPTS = a-rex-backtrace-collect man_MANS = a-rex-backtrace-collect.8 EXTRA_DIST = arc-sshfs-mount arc.zero.conf GRIDMANAGER_LIBS = grid-manager/libgridmanager.la delegation/libdelegation.la rest/libarexrest.la libarex_la_SOURCES = arex.cpp job.cpp \ create_activity.cpp \ get_activity_statuses.cpp \ change_activity_status.cpp \ update_credentials.cpp faults.cpp \ get.cpp put.cpp PayloadFile.cpp FileChunks.cpp \ information_collector.cpp cachecheck.cpp tools.cpp \ arex.h job.h PayloadFile.h FileChunks.h tools.h libarex_la_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) # Needs real cleaning in respect to dependencies libarex_la_LIBADD = \ $(GRIDMANAGER_LIBS) \ $(top_builddir)/src/hed/libs/infosys/libarcinfosys.la \ $(top_builddir)/src/hed/libs/ws-addressing/libarcwsaddressing.la \ $(top_builddir)/src/hed/libs/communication/libarccommunication.la \ $(top_builddir)/src/hed/libs/security/libarcsecurity.la \ $(top_builddir)/src/hed/libs/delegation/libarcdelegation.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/otokens/libarcotokens.la libarex_la_LDFLAGS = -no-undefined -avoid-version -module $(DBCXX_LIBS) test_cache_check_SOURCES = test_cache_check.cpp test_cache_check_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) test_cache_check_LDADD = \ $(top_builddir)/src/hed/libs/communication/libarccommunication.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(GLIBMM_LIBS) $(LIBXML2_LIBS) all: all-recursive .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/a-rex/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/a-rex/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): arc-arex: $(top_builddir)/config.status $(srcdir)/arc-arex.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arc-arex.service: $(top_builddir)/config.status $(srcdir)/arc-arex.service.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arc-arex-start: $(top_builddir)/config.status $(srcdir)/arc-arex-start.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arc-arex-ws: $(top_builddir)/config.status $(srcdir)/arc-arex-ws.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arc-arex-ws.service: $(top_builddir)/config.status $(srcdir)/arc-arex-ws.service.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arc-arex-ws-start: $(top_builddir)/config.status $(srcdir)/arc-arex-ws-start.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ a-rex-backtrace-collect: $(top_builddir)/config.status $(srcdir)/a-rex-backtrace-collect.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ a-rex-backtrace-collect.8: $(top_builddir)/config.status $(srcdir)/a-rex-backtrace-collect.8.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ perferator: $(top_builddir)/config.status $(srcdir)/perferator.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ install-pkglibLTLIBRARIES: $(pkglib_LTLIBRARIES) @$(NORMAL_INSTALL) @list='$(pkglib_LTLIBRARIES)'; test -n "$(pkglibdir)" || list=; \ list2=; for p in $$list; do \ if test -f $$p; then \ list2="$$list2 $$p"; \ else :; fi; \ done; \ test -z "$$list2" || { \ echo " $(MKDIR_P) '$(DESTDIR)$(pkglibdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pkglibdir)" || exit 1; \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(pkglibdir)'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(pkglibdir)"; \ } uninstall-pkglibLTLIBRARIES: @$(NORMAL_UNINSTALL) @list='$(pkglib_LTLIBRARIES)'; test -n "$(pkglibdir)" || list=; \ for p in $$list; do \ $(am__strip_dir) \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(pkglibdir)/$$f'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(pkglibdir)/$$f"; \ done clean-pkglibLTLIBRARIES: -test -z "$(pkglib_LTLIBRARIES)" || rm -f $(pkglib_LTLIBRARIES) @list='$(pkglib_LTLIBRARIES)'; \ locs=`for p in $$list; do echo $$p; done | \ sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ sort -u`; \ test -z "$$locs" || { \ echo rm -f $${locs}; \ rm -f $${locs}; \ } libarex.la: $(libarex_la_OBJECTS) $(libarex_la_DEPENDENCIES) $(EXTRA_libarex_la_DEPENDENCIES) $(AM_V_CXXLD)$(libarex_la_LINK) -rpath $(pkglibdir) $(libarex_la_OBJECTS) $(libarex_la_LIBADD) $(LIBS) clean-noinstPROGRAMS: @list='$(noinst_PROGRAMS)'; test -n "$$list" || exit 0; \ echo " rm -f" $$list; \ rm -f $$list || exit $$?; \ test -n "$(EXEEXT)" || exit 0; \ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ echo " rm -f" $$list; \ rm -f $$list test_cache_check$(EXEEXT): $(test_cache_check_OBJECTS) $(test_cache_check_DEPENDENCIES) $(EXTRA_test_cache_check_DEPENDENCIES) @rm -f test_cache_check$(EXEEXT) $(AM_V_CXXLD)$(test_cache_check_LINK) $(test_cache_check_OBJECTS) $(test_cache_check_LDADD) $(LIBS) install-initdSCRIPTS: $(initd_SCRIPTS) @$(NORMAL_INSTALL) @list='$(initd_SCRIPTS)'; test -n "$(initddir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(initddir)'"; \ $(MKDIR_P) "$(DESTDIR)$(initddir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n' \ -e 'h;s|.*|.|' \ -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) { files[d] = files[d] " " $$1; \ if (++n[d] == $(am__install_max)) { \ print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ else { print "f", d "/" $$4, $$1 } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(initddir)$$dir'"; \ $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(initddir)$$dir" || exit $$?; \ } \ ; done uninstall-initdSCRIPTS: @$(NORMAL_UNINSTALL) @list='$(initd_SCRIPTS)'; test -n "$(initddir)" || exit 0; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 's,.*/,,;$(transform)'`; \ dir='$(DESTDIR)$(initddir)'; $(am__uninstall_files_from_dir) install-pkgdataSCRIPTS: $(pkgdata_SCRIPTS) @$(NORMAL_INSTALL) @list='$(pkgdata_SCRIPTS)'; test -n "$(pkgdatadir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(pkgdatadir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pkgdatadir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n' \ -e 'h;s|.*|.|' \ -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) { files[d] = files[d] " " $$1; \ if (++n[d] == $(am__install_max)) { \ print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ else { print "f", d "/" $$4, $$1 } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(pkgdatadir)$$dir'"; \ $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(pkgdatadir)$$dir" || exit $$?; \ } \ ; done uninstall-pkgdataSCRIPTS: @$(NORMAL_UNINSTALL) @list='$(pkgdata_SCRIPTS)'; test -n "$(pkgdatadir)" || exit 0; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 's,.*/,,;$(transform)'`; \ dir='$(DESTDIR)$(pkgdatadir)'; $(am__uninstall_files_from_dir) install-pkglibexecSCRIPTS: $(pkglibexec_SCRIPTS) @$(NORMAL_INSTALL) @list='$(pkglibexec_SCRIPTS)'; test -n "$(pkglibexecdir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(pkglibexecdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pkglibexecdir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n' \ -e 'h;s|.*|.|' \ -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) { files[d] = files[d] " " $$1; \ if (++n[d] == $(am__install_max)) { \ print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ else { print "f", d "/" $$4, $$1 } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(pkglibexecdir)$$dir'"; \ $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(pkglibexecdir)$$dir" || exit $$?; \ } \ ; done uninstall-pkglibexecSCRIPTS: @$(NORMAL_UNINSTALL) @list='$(pkglibexec_SCRIPTS)'; test -n "$(pkglibexecdir)" || exit 0; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 's,.*/,,;$(transform)'`; \ dir='$(DESTDIR)$(pkglibexecdir)'; $(am__uninstall_files_from_dir) install-sbinSCRIPTS: $(sbin_SCRIPTS) @$(NORMAL_INSTALL) @list='$(sbin_SCRIPTS)'; test -n "$(sbindir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(sbindir)'"; \ $(MKDIR_P) "$(DESTDIR)$(sbindir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n' \ -e 'h;s|.*|.|' \ -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) { files[d] = files[d] " " $$1; \ if (++n[d] == $(am__install_max)) { \ print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ else { print "f", d "/" $$4, $$1 } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(sbindir)$$dir'"; \ $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(sbindir)$$dir" || exit $$?; \ } \ ; done uninstall-sbinSCRIPTS: @$(NORMAL_UNINSTALL) @list='$(sbin_SCRIPTS)'; test -n "$(sbindir)" || exit 0; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 's,.*/,,;$(transform)'`; \ dir='$(DESTDIR)$(sbindir)'; $(am__uninstall_files_from_dir) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarex_la-FileChunks.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarex_la-PayloadFile.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarex_la-arex.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarex_la-cachecheck.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarex_la-change_activity_status.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarex_la-create_activity.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarex_la-faults.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarex_la-get.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarex_la-get_activity_statuses.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarex_la-information_collector.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarex_la-job.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarex_la-put.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarex_la-tools.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarex_la-update_credentials.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/test_cache_check-test_cache_check.Po@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< libarex_la-arex.lo: arex.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -MT libarex_la-arex.lo -MD -MP -MF $(DEPDIR)/libarex_la-arex.Tpo -c -o libarex_la-arex.lo `test -f 'arex.cpp' || echo '$(srcdir)/'`arex.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarex_la-arex.Tpo $(DEPDIR)/libarex_la-arex.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arex.cpp' object='libarex_la-arex.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -c -o libarex_la-arex.lo `test -f 'arex.cpp' || echo '$(srcdir)/'`arex.cpp libarex_la-job.lo: job.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -MT libarex_la-job.lo -MD -MP -MF $(DEPDIR)/libarex_la-job.Tpo -c -o libarex_la-job.lo `test -f 'job.cpp' || echo '$(srcdir)/'`job.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarex_la-job.Tpo $(DEPDIR)/libarex_la-job.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='job.cpp' object='libarex_la-job.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -c -o libarex_la-job.lo `test -f 'job.cpp' || echo '$(srcdir)/'`job.cpp libarex_la-create_activity.lo: create_activity.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -MT libarex_la-create_activity.lo -MD -MP -MF $(DEPDIR)/libarex_la-create_activity.Tpo -c -o libarex_la-create_activity.lo `test -f 'create_activity.cpp' || echo '$(srcdir)/'`create_activity.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarex_la-create_activity.Tpo $(DEPDIR)/libarex_la-create_activity.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='create_activity.cpp' object='libarex_la-create_activity.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -c -o libarex_la-create_activity.lo `test -f 'create_activity.cpp' || echo '$(srcdir)/'`create_activity.cpp libarex_la-get_activity_statuses.lo: get_activity_statuses.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -MT libarex_la-get_activity_statuses.lo -MD -MP -MF $(DEPDIR)/libarex_la-get_activity_statuses.Tpo -c -o libarex_la-get_activity_statuses.lo `test -f 'get_activity_statuses.cpp' || echo '$(srcdir)/'`get_activity_statuses.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarex_la-get_activity_statuses.Tpo $(DEPDIR)/libarex_la-get_activity_statuses.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='get_activity_statuses.cpp' object='libarex_la-get_activity_statuses.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -c -o libarex_la-get_activity_statuses.lo `test -f 'get_activity_statuses.cpp' || echo '$(srcdir)/'`get_activity_statuses.cpp libarex_la-change_activity_status.lo: change_activity_status.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -MT libarex_la-change_activity_status.lo -MD -MP -MF $(DEPDIR)/libarex_la-change_activity_status.Tpo -c -o libarex_la-change_activity_status.lo `test -f 'change_activity_status.cpp' || echo '$(srcdir)/'`change_activity_status.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarex_la-change_activity_status.Tpo $(DEPDIR)/libarex_la-change_activity_status.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='change_activity_status.cpp' object='libarex_la-change_activity_status.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -c -o libarex_la-change_activity_status.lo `test -f 'change_activity_status.cpp' || echo '$(srcdir)/'`change_activity_status.cpp libarex_la-update_credentials.lo: update_credentials.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -MT libarex_la-update_credentials.lo -MD -MP -MF $(DEPDIR)/libarex_la-update_credentials.Tpo -c -o libarex_la-update_credentials.lo `test -f 'update_credentials.cpp' || echo '$(srcdir)/'`update_credentials.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarex_la-update_credentials.Tpo $(DEPDIR)/libarex_la-update_credentials.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='update_credentials.cpp' object='libarex_la-update_credentials.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -c -o libarex_la-update_credentials.lo `test -f 'update_credentials.cpp' || echo '$(srcdir)/'`update_credentials.cpp libarex_la-faults.lo: faults.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -MT libarex_la-faults.lo -MD -MP -MF $(DEPDIR)/libarex_la-faults.Tpo -c -o libarex_la-faults.lo `test -f 'faults.cpp' || echo '$(srcdir)/'`faults.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarex_la-faults.Tpo $(DEPDIR)/libarex_la-faults.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='faults.cpp' object='libarex_la-faults.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -c -o libarex_la-faults.lo `test -f 'faults.cpp' || echo '$(srcdir)/'`faults.cpp libarex_la-get.lo: get.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -MT libarex_la-get.lo -MD -MP -MF $(DEPDIR)/libarex_la-get.Tpo -c -o libarex_la-get.lo `test -f 'get.cpp' || echo '$(srcdir)/'`get.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarex_la-get.Tpo $(DEPDIR)/libarex_la-get.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='get.cpp' object='libarex_la-get.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -c -o libarex_la-get.lo `test -f 'get.cpp' || echo '$(srcdir)/'`get.cpp libarex_la-put.lo: put.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -MT libarex_la-put.lo -MD -MP -MF $(DEPDIR)/libarex_la-put.Tpo -c -o libarex_la-put.lo `test -f 'put.cpp' || echo '$(srcdir)/'`put.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarex_la-put.Tpo $(DEPDIR)/libarex_la-put.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='put.cpp' object='libarex_la-put.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -c -o libarex_la-put.lo `test -f 'put.cpp' || echo '$(srcdir)/'`put.cpp libarex_la-PayloadFile.lo: PayloadFile.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -MT libarex_la-PayloadFile.lo -MD -MP -MF $(DEPDIR)/libarex_la-PayloadFile.Tpo -c -o libarex_la-PayloadFile.lo `test -f 'PayloadFile.cpp' || echo '$(srcdir)/'`PayloadFile.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarex_la-PayloadFile.Tpo $(DEPDIR)/libarex_la-PayloadFile.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='PayloadFile.cpp' object='libarex_la-PayloadFile.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -c -o libarex_la-PayloadFile.lo `test -f 'PayloadFile.cpp' || echo '$(srcdir)/'`PayloadFile.cpp libarex_la-FileChunks.lo: FileChunks.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -MT libarex_la-FileChunks.lo -MD -MP -MF $(DEPDIR)/libarex_la-FileChunks.Tpo -c -o libarex_la-FileChunks.lo `test -f 'FileChunks.cpp' || echo '$(srcdir)/'`FileChunks.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarex_la-FileChunks.Tpo $(DEPDIR)/libarex_la-FileChunks.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='FileChunks.cpp' object='libarex_la-FileChunks.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -c -o libarex_la-FileChunks.lo `test -f 'FileChunks.cpp' || echo '$(srcdir)/'`FileChunks.cpp libarex_la-information_collector.lo: information_collector.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -MT libarex_la-information_collector.lo -MD -MP -MF $(DEPDIR)/libarex_la-information_collector.Tpo -c -o libarex_la-information_collector.lo `test -f 'information_collector.cpp' || echo '$(srcdir)/'`information_collector.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarex_la-information_collector.Tpo $(DEPDIR)/libarex_la-information_collector.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='information_collector.cpp' object='libarex_la-information_collector.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -c -o libarex_la-information_collector.lo `test -f 'information_collector.cpp' || echo '$(srcdir)/'`information_collector.cpp libarex_la-cachecheck.lo: cachecheck.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -MT libarex_la-cachecheck.lo -MD -MP -MF $(DEPDIR)/libarex_la-cachecheck.Tpo -c -o libarex_la-cachecheck.lo `test -f 'cachecheck.cpp' || echo '$(srcdir)/'`cachecheck.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarex_la-cachecheck.Tpo $(DEPDIR)/libarex_la-cachecheck.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='cachecheck.cpp' object='libarex_la-cachecheck.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -c -o libarex_la-cachecheck.lo `test -f 'cachecheck.cpp' || echo '$(srcdir)/'`cachecheck.cpp libarex_la-tools.lo: tools.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -MT libarex_la-tools.lo -MD -MP -MF $(DEPDIR)/libarex_la-tools.Tpo -c -o libarex_la-tools.lo `test -f 'tools.cpp' || echo '$(srcdir)/'`tools.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarex_la-tools.Tpo $(DEPDIR)/libarex_la-tools.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='tools.cpp' object='libarex_la-tools.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarex_la_CXXFLAGS) $(CXXFLAGS) -c -o libarex_la-tools.lo `test -f 'tools.cpp' || echo '$(srcdir)/'`tools.cpp test_cache_check-test_cache_check.o: test_cache_check.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(test_cache_check_CXXFLAGS) $(CXXFLAGS) -MT test_cache_check-test_cache_check.o -MD -MP -MF $(DEPDIR)/test_cache_check-test_cache_check.Tpo -c -o test_cache_check-test_cache_check.o `test -f 'test_cache_check.cpp' || echo '$(srcdir)/'`test_cache_check.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/test_cache_check-test_cache_check.Tpo $(DEPDIR)/test_cache_check-test_cache_check.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='test_cache_check.cpp' object='test_cache_check-test_cache_check.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(test_cache_check_CXXFLAGS) $(CXXFLAGS) -c -o test_cache_check-test_cache_check.o `test -f 'test_cache_check.cpp' || echo '$(srcdir)/'`test_cache_check.cpp test_cache_check-test_cache_check.obj: test_cache_check.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(test_cache_check_CXXFLAGS) $(CXXFLAGS) -MT test_cache_check-test_cache_check.obj -MD -MP -MF $(DEPDIR)/test_cache_check-test_cache_check.Tpo -c -o test_cache_check-test_cache_check.obj `if test -f 'test_cache_check.cpp'; then $(CYGPATH_W) 'test_cache_check.cpp'; else $(CYGPATH_W) '$(srcdir)/test_cache_check.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/test_cache_check-test_cache_check.Tpo $(DEPDIR)/test_cache_check-test_cache_check.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='test_cache_check.cpp' object='test_cache_check-test_cache_check.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(test_cache_check_CXXFLAGS) $(CXXFLAGS) -c -o test_cache_check-test_cache_check.obj `if test -f 'test_cache_check.cpp'; then $(CYGPATH_W) 'test_cache_check.cpp'; else $(CYGPATH_W) '$(srcdir)/test_cache_check.cpp'; fi` mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-man8: $(man_MANS) @$(NORMAL_INSTALL) @list1=''; \ list2='$(man_MANS)'; \ test -n "$(man8dir)" \ && test -n "`echo $$list1$$list2`" \ || exit 0; \ echo " $(MKDIR_P) '$(DESTDIR)$(man8dir)'"; \ $(MKDIR_P) "$(DESTDIR)$(man8dir)" || exit 1; \ { for i in $$list1; do echo "$$i"; done; \ if test -n "$$list2"; then \ for i in $$list2; do echo "$$i"; done \ | sed -n '/\.8[a-z]*$$/p'; \ fi; \ } | while read p; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; echo "$$p"; \ done | \ sed -e 'n;s,.*/,,;p;h;s,.*\.,,;s,^[^8][0-9a-z]*$$,8,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,' | \ sed 'N;N;s,\n, ,g' | { \ list=; while read file base inst; do \ if test "$$base" = "$$inst"; then list="$$list $$file"; else \ echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man8dir)/$$inst'"; \ $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man8dir)/$$inst" || exit $$?; \ fi; \ done; \ for i in $$list; do echo "$$i"; done | $(am__base_list) | \ while read files; do \ test -z "$$files" || { \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man8dir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(man8dir)" || exit $$?; }; \ done; } uninstall-man8: @$(NORMAL_UNINSTALL) @list=''; test -n "$(man8dir)" || exit 0; \ files=`{ for i in $$list; do echo "$$i"; done; \ l2='$(man_MANS)'; for i in $$l2; do echo "$$i"; done | \ sed -n '/\.8[a-z]*$$/p'; \ } | sed -e 's,.*/,,;h;s,.*\.,,;s,^[^8][0-9a-z]*$$,8,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,'`; \ dir='$(DESTDIR)$(man8dir)'; $(am__uninstall_files_from_dir) install-unitsDATA: $(units_DATA) @$(NORMAL_INSTALL) @list='$(units_DATA)'; test -n "$(unitsdir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(unitsdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(unitsdir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(unitsdir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(unitsdir)" || exit $$?; \ done uninstall-unitsDATA: @$(NORMAL_UNINSTALL) @list='$(units_DATA)'; test -n "$(unitsdir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(unitsdir)'; $(am__uninstall_files_from_dir) # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile $(LTLIBRARIES) $(PROGRAMS) $(SCRIPTS) $(MANS) $(DATA) installdirs: installdirs-recursive installdirs-am: for dir in "$(DESTDIR)$(pkglibdir)" "$(DESTDIR)$(initddir)" "$(DESTDIR)$(pkgdatadir)" "$(DESTDIR)$(pkglibexecdir)" "$(DESTDIR)$(sbindir)" "$(DESTDIR)$(man8dir)" "$(DESTDIR)$(unitsdir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool clean-noinstPROGRAMS \ clean-pkglibLTLIBRARIES mostlyclean-am distclean: distclean-recursive -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-data-local install-initdSCRIPTS install-man \ install-pkgdataSCRIPTS install-unitsDATA install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-pkglibLTLIBRARIES install-pkglibexecSCRIPTS \ install-sbinSCRIPTS install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-man8 install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-initdSCRIPTS uninstall-local uninstall-man \ uninstall-pkgdataSCRIPTS uninstall-pkglibLTLIBRARIES \ uninstall-pkglibexecSCRIPTS uninstall-sbinSCRIPTS \ uninstall-unitsDATA uninstall-man: uninstall-man8 .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool \ clean-noinstPROGRAMS clean-pkglibLTLIBRARIES cscopelist-am \ ctags ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-data-local install-dvi install-dvi-am \ install-exec install-exec-am install-html install-html-am \ install-info install-info-am install-initdSCRIPTS install-man \ install-man8 install-pdf install-pdf-am install-pkgdataSCRIPTS \ install-pkglibLTLIBRARIES install-pkglibexecSCRIPTS install-ps \ install-ps-am install-sbinSCRIPTS install-strip \ install-unitsDATA installcheck installcheck-am installdirs \ installdirs-am maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \ uninstall-am uninstall-initdSCRIPTS uninstall-local \ uninstall-man uninstall-man8 uninstall-pkgdataSCRIPTS \ uninstall-pkglibLTLIBRARIES uninstall-pkglibexecSCRIPTS \ uninstall-sbinSCRIPTS uninstall-unitsDATA $(top_builddir)/src/hed/libs/common/libarccommon.la install-data-local: $(MKDIR_P) "$(DESTDIR)$(sysconfdir)" if test ! -e $(DESTDIR)$(sysconfdir)/arc.conf; then $(INSTALL_DATA) $(srcdir)/arc.zero.conf $(DESTDIR)$(sysconfdir)/arc.conf; fi uninstall-local: rm -f $(DESTDIR)$(sysconfdir)/arc.conf # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/arc-sshfs-mount0000644000000000000000000000013214152153376023746 xustar000000000000000030 mtime=1638455038.406645989 30 atime=1638455038.500647402 30 ctime=1638455099.422562782 nordugrid-arc-6.14.0/src/services/a-rex/arc-sshfs-mount0000644000175000002070000000657614152153376023751 0ustar00mockbuildmock00000000000000#!/bin/bash # # This script contains functions used to mount/umount session, runtime, # and cache directories on a remote cluster front-end using SSHFS. It is # supposed to be sourced from the a-rex init script. # mount_sshfs() { # Read from arc.conf REMOTE_HOST=`readconfigvar "$ARC_CONFIG" lrms/ssh remote_host` REMOTE_USER=`readconfigvar "$ARC_CONFIG" lrms/ssh remote_user` SSH_TIMEOUT=`readconfigvar "$ARC_CONFIG" lrms/ssh ssh_timeout` IDENTITY_FILE=`readconfigvar "$ARC_CONFIG" lrms/ssh private_key` GRIDMAP=`readconfigvar "$ARC_CONFIG" common/mapping gridmap` # Get user and group id and save to mapfiles user=`head -1 $GRIDMAP | sed 's/^"[^"]*" //'` IDMAPDIR=`mktemp -d` UIDF=${IDMAPDIR}/${user}.uidfile GIDF=${IDMAPDIR}/${user}.gidfile printf "%s:%s\n" "${user}" "`ssh -i ${IDENTITY_FILE} ${REMOTE_USER}@${REMOTE_HOST} "id -u"`" > ${UIDF} printf "%s:%s\n" "`id -g -n ${user}`" "`ssh -i ${IDENTITY_FILE} ${REMOTE_USER}@${REMOTE_HOST} "id -g"`" > ${GIDF} for MOUNTDIR in sessiondir runtimedir cachedir; do LOCALDIR=`readconfigvar "$ARC_CONFIG" arex ${MOUNTDIR}` REMOTEDIR=`readconfigvar "$ARC_CONFIG" lrms/ssh remote_${MOUNTDIR}` if [ -z ${REMOTEDIR} ]; then log_failure_msg "Directory attribute remote_${MOUNTDIR} not set in ${ARC_CONFIG}" RETVAL=1 break fi if [ -z ${LOCALDIR} ]; then continue fi # Stat will fail if transport endpoints are not connected if [ "$(mount | grep ${LOCALDIR})" ] && [ "$(stat ${LOCALDIR} 2>/dev/null)" ]; then log_warning_msg "${LOCALDIR} is already mounted" continue fi # In case of bad disconnect, unmount before remount fusermount -u ${LOCALDIR} 2>/dev/null if [ "$(ls -A ${LOCALDIR})" ]; then log_failur_msg "Directory ${LOCALDIR} not empty." RETVAL=1 break fi # The reconnect and ServerAliveInterval options should prevent # the "transport endpoint not connected" error from occuring SSHFS_OPTIONS="allow_other" SSHFS_OPTIONS="${SSHFS_OPTIONS},reconnect" SSHFS_OPTIONS="${SSHFS_OPTIONS},idmap=file" SSHFS_OPTIONS="${SSHFS_OPTIONS},uidfile=${UIDF}" SSHFS_OPTIONS="${SSHFS_OPTIONS},gidfile=${GIDF}" SSHFS_OPTIONS="${SSHFS_OPTIONS},ConnectTimeout=${SSH_TIMEOUT:-10}" SSHFS_OPTIONS="${SSHFS_OPTIONS},ServerAliveInterval=10" SSHFS_OPTIONS="${SSHFS_OPTIONS},IdentityFile=${IDENTITY_FILE}" sshfs -o ${SSHFS_OPTIONS} ${REMOTE_USER}@${REMOTE_HOST}:${REMOTEDIR} ${LOCALDIR} 2>&1>/dev/null RETVAL=$? if [ $RETVAL != 0 ]; then log_failure_msg "Unable to mount ${REMOTE_HOST}:${REMOTEDIR} (remote_${MOUNTDIR} attribute) directory" break fi done rm -rf ${IDMAPDIR} return $RETVAL } unmount_sshfs() { for MOUNTDIR in sessiondir runtimedir cachedir; do LOCALDIR=`readconfigvar "$ARC_CONFIG" arex ${MOUNTDIR}` REMOTEDIR=`readconfigvar "$ARC_CONFIG" lrms/ssh remote_${MOUNTDIR}` if [ ! -z ${REMOTEDIR} ] && [ "$(mount | grep ${LOCALDIR})" ]; then timeout 10 fusermount -u ${LOCALDIR} > /dev/null if [ $? != 0 ]; then log_failure_msg "sshfs ${MOUNTDIR} unmount - timeout" RETVAL=1 fi fi done } nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/rte0000644000000000000000000000013214152153474021506 xustar000000000000000030 mtime=1638455100.417577732 30 atime=1638455103.997631524 30 ctime=1638455100.417577732 nordugrid-arc-6.14.0/src/services/a-rex/rte/0000755000175000002070000000000014152153474021550 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/a-rex/rte/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376023620 xustar000000000000000030 mtime=1638455038.429646335 30 atime=1638455038.509647537 30 ctime=1638455100.417577732 nordugrid-arc-6.14.0/src/services/a-rex/rte/Makefile.am0000644000175000002070000000044014152153376023603 0ustar00mockbuildmock00000000000000arcrteenvdir = $(pkgdatadir)/rte/ENV arcrteenv_DATA = ENV/PROXY ENV/RTE ENV/LRMS-SCRATCH ENV/CANDYPOND ENV/SINGULARITY arcrteenvcondordir = $(pkgdatadir)/rte/ENV/CONDOR arcrteenvcondor_DATA = ENV/CONDOR/DOCKER EXTRA_DIST = ENV/RTE ENV/LRMS-SCRATCH ENV/SINGULARITY $(arcrteenvcondor_DATA) nordugrid-arc-6.14.0/src/services/a-rex/rte/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153434023624 xustar000000000000000030 mtime=1638455068.526098547 30 atime=1638455090.560429624 30 ctime=1638455100.416577717 nordugrid-arc-6.14.0/src/services/a-rex/rte/Makefile.in0000644000175000002070000005362314152153434023622 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/a-rex/rte DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(arcrteenvdir)" \ "$(DESTDIR)$(arcrteenvcondordir)" DATA = $(arcrteenv_DATA) $(arcrteenvcondor_DATA) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ arcrteenvdir = $(pkgdatadir)/rte/ENV arcrteenv_DATA = ENV/PROXY ENV/RTE ENV/LRMS-SCRATCH ENV/CANDYPOND ENV/SINGULARITY arcrteenvcondordir = $(pkgdatadir)/rte/ENV/CONDOR arcrteenvcondor_DATA = ENV/CONDOR/DOCKER EXTRA_DIST = ENV/RTE ENV/LRMS-SCRATCH ENV/SINGULARITY $(arcrteenvcondor_DATA) all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/a-rex/rte/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/a-rex/rte/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-arcrteenvDATA: $(arcrteenv_DATA) @$(NORMAL_INSTALL) @list='$(arcrteenv_DATA)'; test -n "$(arcrteenvdir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(arcrteenvdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(arcrteenvdir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(arcrteenvdir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(arcrteenvdir)" || exit $$?; \ done uninstall-arcrteenvDATA: @$(NORMAL_UNINSTALL) @list='$(arcrteenv_DATA)'; test -n "$(arcrteenvdir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(arcrteenvdir)'; $(am__uninstall_files_from_dir) install-arcrteenvcondorDATA: $(arcrteenvcondor_DATA) @$(NORMAL_INSTALL) @list='$(arcrteenvcondor_DATA)'; test -n "$(arcrteenvcondordir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(arcrteenvcondordir)'"; \ $(MKDIR_P) "$(DESTDIR)$(arcrteenvcondordir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(arcrteenvcondordir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(arcrteenvcondordir)" || exit $$?; \ done uninstall-arcrteenvcondorDATA: @$(NORMAL_UNINSTALL) @list='$(arcrteenvcondor_DATA)'; test -n "$(arcrteenvcondordir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(arcrteenvcondordir)'; $(am__uninstall_files_from_dir) tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(DATA) installdirs: for dir in "$(DESTDIR)$(arcrteenvdir)" "$(DESTDIR)$(arcrteenvcondordir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-arcrteenvDATA install-arcrteenvcondorDATA install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-arcrteenvDATA uninstall-arcrteenvcondorDATA .MAKE: install-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ cscopelist-am ctags-am distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-arcrteenvDATA \ install-arcrteenvcondorDATA install-data install-data-am \ install-dvi install-dvi-am install-exec install-exec-am \ install-html install-html-am install-info install-info-am \ install-man install-pdf install-pdf-am install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags-am uninstall uninstall-am \ uninstall-arcrteenvDATA uninstall-arcrteenvcondorDATA # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/a-rex/rte/PaxHeaders.30264/ENV0000644000000000000000000000013214152153474022136 xustar000000000000000030 mtime=1638455100.419577762 30 atime=1638455103.997631524 30 ctime=1638455100.419577762 nordugrid-arc-6.14.0/src/services/a-rex/rte/ENV/0000755000175000002070000000000014152153474022200 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/a-rex/rte/ENV/PaxHeaders.30264/CANDYPOND.in0000644000000000000000000000013214152153376024063 xustar000000000000000030 mtime=1638455038.429646335 30 atime=1638455038.509647537 30 ctime=1638455095.726507247 nordugrid-arc-6.14.0/src/services/a-rex/rte/ENV/CANDYPOND.in0000644000175000002070000000322114152153376024046 0ustar00mockbuildmock00000000000000# description: makes ARC Candypond client availble on the Worker Nodes and ready to be used # param:CANDYPOND_URL:string:auto:Manually redefine Candypond URL CANDYPOND_URL=${CANDYPOND_URL:-"auto"} ARC_LIBEXEC_DIR="${ARC_LOCATION:-@prefix@}/@pkglibexecsubdir@" ARC_CONFIG_PARSER="$ARC_LIBEXEC_DIR/arcconfig-parser --load -r $ARC_CONFIG" if [ "x$1" = "x0" ]; then # determine Candypond URL if [ "x$CANDYPOND_URL" = "xauto" ]; then if [ ! -r "$ARC_CONFIG" ]; then echo "WARNING: arc.conf is not readable, assumming Candypond is enabled and default URL is used." >&2 CANDYPOND_URL="https://$(hostname -f):443/arex/candypond" else # check Candypond is enabled $ARC_CONFIG_PARSER -b arex/ws/candypond if [ $? -ne 0 ]; then echo "FATAL: Candypond is not enabled. Add [arex/ws/candypond] to the arc.conf." >&2 exit 1 fi # get URL CANDYPOND_URL="$( $ARC_CONFIG_PARSER -b arex/ws -o wsurl )/candypond" fi fi # copy 'arccandypond' tool to sessiondir mkdir -p ${joboption_directory}/arc/bin/ cp $ARC_LIBEXEC_DIR/arccandypond ${joboption_directory}/arc/bin/ # add URL to job environment (find the last and add to bottom) arc_env_idx=0 arc_env_var="joboption_env_${arc_env_idx}" while eval "test -n \"\$$arc_env_var\""; do arc_env_idx=$(( arc_env_idx + 1 )) arc_env_var="joboption_env_${arc_env_idx}" done eval "export ${arc_env_var}=ARC_CANDYPOND_URL='$CANDYPOND_URL'" elif [ "x$1" = "x1" ]; then # add to PATH export PATH="${RUNTIME_JOB_DIR}/arc/bin:$PATH" fi nordugrid-arc-6.14.0/src/services/a-rex/rte/ENV/PaxHeaders.30264/CONDOR0000644000000000000000000000013214152153474023122 xustar000000000000000030 mtime=1638455100.420577777 30 atime=1638455103.997631524 30 ctime=1638455100.420577777 nordugrid-arc-6.14.0/src/services/a-rex/rte/ENV/CONDOR/0000755000175000002070000000000014152153474023164 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/a-rex/rte/ENV/CONDOR/PaxHeaders.30264/DOCKER0000644000000000000000000000013214152153376024072 xustar000000000000000030 mtime=1638455038.429646335 30 atime=1638455038.509647537 30 ctime=1638455100.420577777 nordugrid-arc-6.14.0/src/services/a-rex/rte/ENV/CONDOR/DOCKER0000644000175000002070000000076714152153376024071 0ustar00mockbuildmock00000000000000# description: enables submission to Docker universe in HTCondor backend # param:DOCKER_IMAGE:string::Docker image to use for submitted jobs by default DOCKER_IMAGE=${DOCKER_IMAGE:-} if [ "x$1" = "x0" ]; then export DOCKER_UNIVERSE=docker # docker image name can be redefined by user as RTE parameter [ -n "$2" ] && DOCKER_IMAGE="$2" export DOCKER_IMAGE # in case Docker image is used on WN add this to accounting information export ACCOUNTING_WN_INSTANCE="${DOCKER_IMAGE}" fi nordugrid-arc-6.14.0/src/services/a-rex/rte/ENV/PaxHeaders.30264/LRMS-SCRATCH0000644000000000000000000000013214152153376024001 xustar000000000000000030 mtime=1638455038.429646335 30 atime=1638455038.509647537 30 ctime=1638455100.418577747 nordugrid-arc-6.14.0/src/services/a-rex/rte/ENV/LRMS-SCRATCH0000644000175000002070000000134514152153376023771 0ustar00mockbuildmock00000000000000# description: enables the usage of local to WN scratch directory defined by LRMS # param:SCRATCH_VAR:string:WORKDIR:Variable name that holds the path to job-specific WN scratch directory # param:TMPDIR_LOCATION:string::Define the TMPDIR path on WN. Variable names can be used as a part of the path, e.g. '$WORKDIR/tmp' SCRATCH_VAR="${SCRATCH_VAR:-WORKDIR}" TMPDIR_LOCATION="${TMPDIR_LOCATION:-}" if [ "x$1" = "x0" ]; then RUNTIME_LOCAL_SCRATCH_DIR="\${${SCRATCH_VAR}}" elif [ "x$1" = "x1" ]; then if [ -n "${TMPDIR_LOCATION}" ]; then if [ ! -d "${TMPDIR_LOCATION}" ]; then mkdir "${TMPDIR_LOCATION}" chmod 1777 "${TMPDIR_LOCATION}" fi export TMPDIR="${TMPDIR_LOCATION}" fi fi nordugrid-arc-6.14.0/src/services/a-rex/rte/ENV/PaxHeaders.30264/PROXY.in0000644000000000000000000000013214152153376023465 xustar000000000000000030 mtime=1638455038.429646335 30 atime=1638455038.509647537 30 ctime=1638455095.725507232 nordugrid-arc-6.14.0/src/services/a-rex/rte/ENV/PROXY.in0000644000175000002070000000333014152153376023451 0ustar00mockbuildmock00000000000000# description: copy proxy certificate to the job session directory # param:COPY_CACERT_DIR:Yes,No:Yes:If set to Yes, CA certificate dir will be copied to the session directory along with proxy certificate # param:USE_DELEGATION_DB:Yes,No:No:If set to Yes RTE will try to extract proxy certificate from A-REX delegation DB (works in limited number of cases) COPY_CACERT_DIR="${COPY_CACERT_DIR:-Yes}" USE_DELEGATION_DB="${USE_DELEGATION_DB:-No}" X509_CERT_DIR="${X509_CERT_DIR:-/etc/grid-security/certificates}" if [ "x$1" = "x0" ]; then if [ "x$COPY_CACERT_DIR" = "xYes" ]; then mkdir -pv ${joboption_directory}/arc/certificates/ cp -rv ${X509_CERT_DIR}/* ${joboption_directory}/arc/certificates/ fi if [ "x$USE_DELEGATION_DB" = "xYes" ]; then GM_JOBS="${ARC_LOCATION:-@prefix@}/@pkglibexecsubdir@/gm-jobs" # try DB export or fall back to proxy file $GM_JOBS -J -S -D ${joboption_gridid} -o "${joboption_directory}/user.proxy" || USE_DELEGATION_DB="No" fi if [ "x$USE_DELEGATION_DB" = "xNo" ]; then cat "${joboption_controldir}/job.${joboption_gridid}.proxy" > "${joboption_directory}/user.proxy" fi chmod 600 "${joboption_directory}/user.proxy" elif [ "x$1" = "x1" ]; then export X509_USER_PROXY="${X509_USER_PROXY:-${RUNTIME_JOB_DIR}/user.proxy}" export X509_USER_CERT="${X509_USER_CERT:-${RUNTIME_JOB_DIR}/user.proxy}" if [ "x$COPY_CACERT_DIR" = "xYes" ]; then export X509_CERT_DIR="${RUNTIME_JOB_DIR}/arc/certificates" else export X509_CERT_DIR="${X509_CERT_DIR:-/etc/grid-security/certificates}" fi elif [ "x$1" = "x2" ]; then if [ "x$COPY_CACERT_DIR" = "xYes" ]; then rm -rf ${RUNTIME_JOB_DIR}/arc/certificates fi fi nordugrid-arc-6.14.0/src/services/a-rex/rte/ENV/PaxHeaders.30264/SINGULARITY0000644000000000000000000000013214152153376023751 xustar000000000000000030 mtime=1638455038.429646335 30 atime=1638455038.509647537 30 ctime=1638455100.419577762 nordugrid-arc-6.14.0/src/services/a-rex/rte/ENV/SINGULARITY0000644000175000002070000000324014152153376023735 0ustar00mockbuildmock00000000000000# description: executes the job inside singularity container # param:SINGULARITY_IMAGE:string:NULL:singularity image or tree per VO, key:value comma separated, key default if VO not matched # param:SINGULARITY_OPTIONS:string: :singularity options # param:BINARY_PATH:string:/usr/bin/singularity:singularity binary location SINGULARITY_OPTIONS="${SINGULARITY_OPTIONS:-}" SINGULARITY_IMAGE="${SINGULARITY_IMAGE:-}" BINARY_PATH="${BINARY_PATH:-'/usr/bin/singularity'}" BINARY_PATH="${4:-$BINARY_PATH}" DEFAULT_IMAGE="NULL" if [ "x$1" = "x0" ]; then # get VO #vo=`arcproxy -P $joboption_controldir/job.$joboption_gridid.proxy -i vomsVO 2> /dev/null` localfile=${joboption_controldir}/job.${joboption_gridid}.local vo=`grep voms= $localfile | awk -F / '{print $2}'|sort -u` values=`echo $SINGULARITY_IMAGE | sed -e 's/,/ /g'` for i in $values do voname=`echo $i | awk -F : '{print $1}'` voimage=`echo $i | awk -F : '{print $2}'` if [ "xdefault" = "x$voname" ]; then DEFAULT_IMAGE=$voimage fi if [ "x$vo" = "x$voname" ] ; then IMAGE=$voimage fi done IMAGE="${IMAGE:-$DEFAULT_IMAGE}" # explicit image, NULL skips container IMAGE="${2:-$IMAGE}" # unquote IMAGE temp="${IMAGE%\"}" temp="${temp#\"}" IMAGE=$temp # Check if singularity already used/set by another RTE echo $joboption_args |grep -q singularity; sused=$? if [ "x$IMAGE" != "xNULL" ] && [ "x$sused" == "x1" ] ; then joboption_args="$BINARY_PATH exec $SINGULARITY_OPTIONS --home \${RUNTIME_JOB_DIR} $IMAGE $joboption_args" # account singularity image usage export ACCOUNTING_WN_INSTANCE="${IMAGE}" fi fi nordugrid-arc-6.14.0/src/services/a-rex/rte/ENV/PaxHeaders.30264/RTE0000644000000000000000000000013214152153376022571 xustar000000000000000030 mtime=1638455038.429646335 30 atime=1638455038.509647537 30 ctime=1638455100.418577747 nordugrid-arc-6.14.0/src/services/a-rex/rte/ENV/RTE0000644000175000002070000000310414152153376022554 0ustar00mockbuildmock00000000000000# description: copy RunTimeEnvironment scripts to the job session directory if [ "$1" = "0" ] ; then runtimeenv_idx=0 runtimeenv_var="joboption_runtime_${runtimeenv_idx}" eval "runtimeenv_name=\"\${${runtimeenv_var}}\"" while [ -n "${runtimeenv_name}" ]; do # define safe-defaults arcce_runtimeenv_path=/dev/null arcce_runtimeenv_params_path=/dev/null # find RTE location (enabled vs default) if [ -e "${joboption_controldir}/rte/enabled/${runtimeenv_name}" ]; then arcce_runtimeenv_path="${joboption_controldir}/rte/enabled/${runtimeenv_name}" else arcce_runtimeenv_path="${joboption_controldir}/rte/default/${runtimeenv_name}" fi # check RTE have parameters file if [ -e "${joboption_controldir}/rte/params/${runtimeenv_name}" ]; then arcce_runtimeenv_params_path="${joboption_controldir}/rte/params/${runtimeenv_name}" fi # copy RTE script to session directory sessiondir_runtimeenv_path="${joboption_directory}/rte/${runtimeenv_name}" mkdir -p "${sessiondir_runtimeenv_path%/*}" cat "$arcce_runtimeenv_params_path" > "$sessiondir_runtimeenv_path" cat "$arcce_runtimeenv_path" >> "$sessiondir_runtimeenv_path" # next RTE runtimeenv_idx=$((runtimeenv_idx+1)) runtimeenv_var="joboption_runtime_${runtimeenv_idx}" eval "runtimeenv_name=\"\${${runtimeenv_var}}\"" done unset runtimeenv_idx runtimeenv_var sessiondir_runtimeenv_path arcce_runtimeenv_path arcce_runtimeenv_params_path fi true nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/arc-arex-ws-start.in0000644000000000000000000000013214152153376024606 xustar000000000000000030 mtime=1638455038.406645989 30 atime=1638455038.500647402 30 ctime=1638455099.400562451 nordugrid-arc-6.14.0/src/services/a-rex/arc-arex-ws-start.in0000644000175000002070000006013614152153376024601 0ustar00mockbuildmock00000000000000#!/bin/bash export MALLOC_ARENA_MAX=2 add_library_path() { location="$1" if [ ! "x$location" = "x" ] ; then if [ ! "$location" = "/usr" ] ; then libdir="$location/lib" libdir64="$location/lib64" if [ -d "$libdir64" ] ; then if [ "x$LD_LIBRARY_PATH" = "x" ]; then LD_LIBRARY_PATH="$libdir64" else LD_LIBRARY_PATH="$libdir64:$LD_LIBRARY_PATH" fi fi if [ -d "$libdir" ] ; then if [ "x$LD_LIBRARY_PATH" = "x" ]; then LD_LIBRARY_PATH="$libdir" else LD_LIBRARY_PATH="$libdir:$LD_LIBRARY_PATH" fi fi fi fi } prog=arched RUN=yes send_systemd_notify() { # return if no systemd-notify found type systemd-notify >/dev/null 2>&1 || return systemd-notify "$@" } log_failure_msg() { send_systemd_notify --status "Error: $@" echo $@ } # sysconfig files if [ -r /etc/sysconfig/nordugrid ]; then . /etc/sysconfig/nordugrid elif [ -r /etc/default/nordugrid ]; then . /etc/default/nordugrid fi if [ -r /etc/sysconfig/arc-arex-ws ]; then . /etc/sysconfig/arc-arex-ws elif [ -r /etc/default/arc-arex-ws ]; then . /etc/default/arc-arex-ws fi # GLOBUS_LOCATION GLOBUS_LOCATION=${GLOBUS_LOCATION:-@DEFAULT_GLOBUS_LOCATION@} if [ -n "$GLOBUS_LOCATION" ]; then if [ ! -d "$GLOBUS_LOCATION" ]; then log_failure_msg "GLOBUS_LOCATION ($GLOBUS_LOCATION) not found" exit 1 fi export GLOBUS_LOCATION fi # ARC_LOCATION ARC_LOCATION=${ARC_LOCATION:-@prefix@} if [ ! -d "$ARC_LOCATION" ]; then log_failure_msg "ARC_LOCATION ($ARC_LOCATION) not found" exit 1 fi export ARC_LOCATION # VOMS_LOCATION VOMS_LOCATION=${VOMS_LOCATION:-@DEFAULT_VOMS_LOCATION@} # Prepare environment for executing various tools and main application add_library_path "$VOMS_LOCATION" add_library_path "$GLOBUS_LOCATION" if [ "x$LD_LIBRARY_PATH" = "x" ]; then LD_LIBRARY_PATH=$ARC_LOCATION/@libsubdir@:$ARC_LOCATION/@libsubdir@64 else LD_LIBRARY_PATH=$ARC_LOCATION/@libsubdir@:$ARC_LOCATION/@libsubdir@64:$LD_LIBRARY_PATH fi export LD_LIBRARY_PATH testconfigblock() { $ARC_LOCATION/@pkglibexecsubdir@/arcconfig-parser --runconfig "$1" --load -b "$2" 2>/dev/null 1>&2 if [ $? -eq 0 ] ; then echo 'true' else echo 'false' fi } readorigconfigvar() { value=`$ARC_LOCATION/@pkglibexecsubdir@/arcconfig-parser --config "$1" -b "$3" -o "$2" 2>/dev/null` if [ $? -eq 0 ] ; then echo "$value" exit 0 else exit 1 fi } readconfigvar() { fname="$1" optname="$2" blocks="" while [ ! -z "$3" ] ; do blocks="$blocks -b $3" shift done value=`$ARC_LOCATION/@pkglibexecsubdir@/arcconfig-parser --runconfig "$fname" --load $blocks -o "$optname" 2>/dev/null` if [ $? -eq 0 ] ; then echo "$value" exit 0 else exit 1 fi } # ARC_CONFIG if [ "x$ARC_CONFIG" = "x" ]; then if [ -r $ARC_LOCATION/etc/arc.conf ]; then ARC_CONFIG=$ARC_LOCATION/etc/arc.conf elif [ -r /etc/arc.conf ]; then ARC_CONFIG=/etc/arc.conf fi fi # PID file PID_FILE=`readorigconfigvar "$ARC_CONFIG" pidfile arex/ws` if [ "x$PID_FILE" = "x" ]; then # Missing default value for pidfile means no service block is present log_failure_msg "ARC configuration is missing [arex/ws] block" exit 1 fi if [ "$1" = "--getpidfile" ] ; then echo $PID_FILE exit 0 fi ARC_RUNTIME_CONFIG=`echo "$PID_FILE" | sed 's#\([^\./]*\)\.[^\./]*$#\1#'` ARC_RUNTIME_CONFIG="${ARC_RUNTIME_CONFIG}.cfg" mkdir_for_user() { dirpath="$1" username="$2" groupname="$3" if [ ! -d "$dirpath" ] ; then mkdir -p "$dirpath" if [ ! -z "$username" ] ; then if [ ! -z "$groupname" ] ; then chown "$username:$groupname" "$dirpath" else chown "$username" "$dirpath" fi fi fi } mkfile_for_user() { filepath="$1" username="$2" groupname="$3" if [ ! -f "$filepath" ] ; then touch "$filepath" fi if [ ! -z "$username" ] ; then if [ ! -z "$groupname" ] ; then chown "$username:$groupname" "$filepath" else chown "$username" "$filepath" fi fi } prepare() { CMD="$ARC_LOCATION/sbin/$prog" if [ ! -x "$CMD" ]; then log_failure_msg "Missing $CMD executable" exit 1 fi if [ ! -r "$ARC_CONFIG" ]; then log_failure_msg "ARC configuration not found (usually /etc/arc.conf)" exit 1 fi # Pre-process configuration $ARC_LOCATION/@pkglibexecsubdir@/arcconfig-parser --config "$ARC_CONFIG" --runconfig "$ARC_RUNTIME_CONFIG" --save 2>/dev/null if [ $? -ne 0 ] ; then log_failure_msg "ARC configuration processing failed" exit 1 fi # Creating configuration file of arched # Reading following information from config file: # Log file # Debug level # User name # ... LOGFILE=`readconfigvar "$ARC_RUNTIME_CONFIG" logfile arex` LOGLEVEL=`readconfigvar "$ARC_RUNTIME_CONFIG" loglevel arex` WATCHDOG=`readconfigvar "$ARC_RUNTIME_CONFIG" watchdog arex` USERNAME=`readconfigvar "$ARC_RUNTIME_CONFIG" user arex` GRIDTMPDIR=`readconfigvar "$ARC_RUNTIME_CONFIG" tmpdir arex` GROUPNAME=`echo "$USERNAME" | sed 's/^[^:]*//;s/^://'` USERNAME=`echo "$USERNAME" | sed 's/:.*//'` X509_USER_CERT=`readconfigvar "$ARC_RUNTIME_CONFIG" x509_host_cert common` X509_USER_KEY=`readconfigvar "$ARC_RUNTIME_CONFIG" x509_host_key common` X509_CERT_DIR=`readconfigvar "$ARC_RUNTIME_CONFIG" x509_cert_dir common` GLOBUS_TCP_PORT_RANGE=`readconfigvar "$ARC_RUNTIME_CONFIG" globus_tcp_port_range arex/data-staging` GLOBUS_UDP_PORT_RANGE=`readconfigvar "$ARC_RUNTIME_CONFIG" globus_udp_port_range arex/data-staging` VOMS_PROCESSING=`readconfigvar "$ARC_RUNTIME_CONFIG" voms_processing common` mapping_present=`testconfigblock "$ARC_RUNTIME_CONFIG" mapping` authtokens_present=`testconfigblock "$ARC_RUNTIME_CONFIG" authtokens` CIPHERS_STRING=`readconfigvar "$ARC_RUNTIME_CONFIG" tlsciphers arex/ws` PROTOCOLS_STRING=`readconfigvar "$ARC_RUNTIME_CONFIG" tlsprotocols arex/ws` CURVE_STRING=`readconfigvar "$ARC_RUNTIME_CONFIG" tlscurve arex/ws` USERMAP_BLOCK='' if [ "$mapping_present" = 'true' ] ; then USERMAP_BLOCK='mapping' fi HOSTNAME=`readconfigvar "$ARC_RUNTIME_CONFIG" hostname common` SERVICEMAIL=`readconfigvar "$ARC_RUNTIME_CONFIG" mail arex` CONTROLDIR=`readconfigvar "$ARC_RUNTIME_CONFIG" controldir arex` # It is easier to handle root user through empty value. if [ "$USERNAME" = "root" ] ; then USERNAME="" fi if [ "$GROUPNAME" = "root" ] ; then GROUPNAME="" fi DHPARAM_PATH='' if [ ! -z "$CONTROLDIR" ] ; then DHPARAM_PATH="$CONTROLDIR/dhparam.pem" if [ -f "$DHPARAM_PATH" ] ; then openssl dhparam -in "$DHPARAM_PATH" -check -noout &>/dev/null if [ $? -ne 0 ] ; then rm -f "$DHPARAM_PATH" fi fi if [ ! -f "$DHPARAM_PATH" ] ; then /bin/bash -c "umask 0177 ; openssl dhparam -out '$DHPARAM_PATH' 4096" &>/dev/null & disown; fi fi # Exporting collected variables export X509_USER_CERT export X509_USER_KEY export X509_CERT_DIR export GLOBUS_TCP_PORT_RANGE export GLOBUS_UDP_PORT_RANGE export HOSTNAME if [ ! -z "$GRIDTMPDIR" ] ; then export TMPDIR="$GRIDTMPDIR" ; fi # Web Service configuration arex_endpoint="" arex_mount_point="" arex_proto="" arex_host="" arex_port="" arex_path="" arex_service_plexer="" ws_present=`testconfigblock "$ARC_RUNTIME_CONFIG" arex/ws` arex_present=`testconfigblock "$ARC_RUNTIME_CONFIG" arex/ws/jobs` if [ "$ws_present" = 'true' ] ; then WSLOGFILE=`readconfigvar "$ARC_RUNTIME_CONFIG" logfile arex/ws` MAX_JOB_CONTROL_REQUESTS=`readconfigvar "$ARC_RUNTIME_CONFIG" max_job_control_requests arex/ws` MAX_INFOSYS_REQUESTS=`readconfigvar "$ARC_RUNTIME_CONFIG" max_infosys_requests arex/ws` MAX_DATA_TRANSFER_REQUESTS=`readconfigvar "$ARC_RUNTIME_CONFIG" max_data_transfer_requests arex/ws` USERAUTH_BLOCK='arex/ws/jobs' arex_mount_point=`readconfigvar "$ARC_RUNTIME_CONFIG" wsurl arex/ws` arex_proto=`echo "$arex_mount_point" | sed 's/^\([^:]*\):\/\/.*/\1/;t;s/.*//'` arex_host=`echo "$arex_mount_point" | sed 's/^[^:]*:\/\/\([^:\/]*\).*/\1/;t;s/.*//'` arex_port=`echo "$arex_mount_point" | sed 's/^[^:]*:\/\/[^:]*:\([^\/]*\)\(.*\)/\1/;t;s/.*//'` arex_path=`echo "$arex_mount_point" | sed 's/^[^:]*:\/\/[^\/]*\/\(.*\)/\1/;t;s/.*//'` if [ "$arex_proto" = "https" ] ; then if [ -z "$arex_port" ] ; then arex_port="443" fi elif [ "$arex_proto" = "http" ] ; then if [ -z "$arex_port" ] ; then arex_port="80" fi else log_failure_msg "Unsupported protocol '$arex_proto' for WS interface URL" exit 1 fi arex_endpoint="$arex_mount_point" arex_path="/$arex_path" mkdir_for_user `dirname "$WSLOGFILE"` "$USERNAME" "$GROUPNAME" mkfile_for_user "$WSLOGFILE" "$USERNAME" "$GROUPNAME" else log_failure_msg "The A-REX/EMIES WS interface must be enabled for this service" exit 1 fi if [ "$arex_present" = 'true' ] ; then if [ "$mapping_present" != 'true' ] ; then log_failure_msg "For A-REX/EMIES WS interface to work mapping must be enabled" exit 1 fi arex_service_plexer="^$arex_path" fi argus_shc="" argus_plugin="" arguspep_endpoint=`readconfigvar "$ARC_RUNTIME_CONFIG" arguspep_endpoint arex/ws/argus` if [ ! -z "$arguspep_endpoint" ]; then argus_plugin="${argus_plugin}arguspepclient" if [ ! -f "$ARC_LOCATION/lib/arc/libarguspepclient.so" ] && [ ! -f "$ARC_LOCATION/lib64/arc/libarguspepclient.so" ]; then log_failure_msg "Plugin arguspepclient(libarguspepclient.so) not found. You may need to install corresponding package" exit 1 fi arguspep_profile=`readconfigvar "$ARC_RUNTIME_CONFIG" arguspep_profile arex/ws/argus` if [ -z "$arguspep_profile" ]; then arguspep_profile="emi"; fi arguspep_usermap=`readconfigvar "$ARC_RUNTIME_CONFIG" arguspep_usermap arex/ws/argus` if [ -z "$arguspep_usermap" ]; then arguspep_usermap="false"; fi if [ "$arguspep_usermap" = "yes" ]; then arguspep_usermap="true"; fi if [ "$arguspep_usermap" = "no" ]; then arguspep_usermap="false"; fi if [ "$mapping_present" != 'true' ]; then if [ "$arguspep_usermap" = 'true' ]; then log_failure_msg "Can't map user identity through Argus PEP because mapping is disabled for the service." exit 1 fi fi argus_shc="${argus_shc} $arguspep_endpoint $arguspep_profile $X509_USER_KEY $X509_USER_CERT $X509_CERT_DIR $arguspep_usermap " fi arguspdp_endpoint=`readconfigvar "$ARC_RUNTIME_CONFIG" arguspdp_endpoint arex/ws/argus` if [ ! -z "$arguspdp_endpoint" ]; then argus_plugin="${argus_plugin}arguspdpclient" if [ ! -f "$ARC_LOCATION/lib/arc/libarguspdpclient.so" ] && [ ! -f "$ARC_LOCATION/lib64/arc/libarguspdpclient.so" ]; then log_failure_msg "Plugin arguspdpclient(libarguspdpclient.so) not found. You may need to install corresponding package" exit 1 fi arguspdp_profile=`readconfigvar "$ARC_RUNTIME_CONFIG" arguspdp_profile arex/ws/argus` if [ -z "$arguspdp_profile" ]; then arguspdp_profile="emi"; fi arguspdp_usermap=`readconfigvar "$ARC_RUNTIME_CONFIG" arguspdp_usermap arex/ws/argus` if [ -z "$arguspdp_usermap" ]; then arguspdp_usermap="false"; fi if [ "$arguspdp_usermap" = "yes" ]; then arguspdp_usermap="true"; fi if [ "$arguspdp_usermap" = "no" ]; then arguspdp_usermap="false"; fi if [ "$mapping_present" != 'true' ]; then if [ "$arguspdp_usermap" = 'true' ]; then log_failure_msg "Can't map user identity through Argus PDP because mapping is disabled for the service." exit 1 fi fi arguspdp_acceptnotapplicable=`readconfigvar "$ARC_RUNTIME_CONFIG" arguspdp_acceptnotapplicable arex/ws/argus` if [ -z "$arguspdp_acceptnotapplicable" ]; then arguspdp_acceptnotapplicable="false"; fi if [ "$arguspdp_acceptnotapplicable" = "yes" ]; then arguspdp_acceptnotapplicable="true"; fi if [ "$arguspdp_acceptnotapplicable" = "no" ]; then arguspdp_acceptnotapplicable="false"; fi argus_shc="${argus_shc} $arguspdp_endpoint $arguspdp_profile $X509_USER_KEY $X509_USER_CERT $X509_CERT_DIR $arguspdp_usermap $arguspdp_acceptnotapplicable " fi # candypond candypond_plexer="" candypond="" use_candypond=`testconfigblock "$ARC_RUNTIME_CONFIG" arex/ws/candypond` if [ "$use_candypond" = "true" ]; then if [ "$ws_present" != 'true' ] ; then log_failure_msg "WS interface must be turned on to use candypond" exit 1 fi candypond_plexer="^$arex_path/candypond" candypond_shc=" $ARC_RUNTIME_CONFIG arex/ws/candypond " if [ "$mapping_present" = 'true' ]; then candypond_shc="$candypond_shc $ARC_RUNTIME_CONFIG $USERMAP_BLOCK $ARC_RUNTIME_CONFIG true " fi candypond=" $candypond_shc " fi service_mail="" if [ ! -z "$SERVICEMAIL" ] ; then service_mail="$SERVICEMAIL" fi AREX_CONFIG=`mktemp -t arex.xml.XXXXXX` if [ -z "$AREX_CONFIG" ] ; then log_failure_msg "Failed to create temporary file" exit 1 fi CMD="$CMD -c $AREX_CONFIG" case "$LOGLEVEL" in 0) LOGLEVEL="FATAL" ;; 1) LOGLEVEL="ERROR" ;; 2) LOGLEVEL="WARNING" ;; 3) LOGLEVEL="INFO" ;; 4) LOGLEVEL="VERBOSE" ;; 5) LOGLEVEL="DEBUG" ;; *) LOGLEVEL="INFO" ;; esac if [ "$WATCHDOG" = "yes" ] ; then WATCHDOG="true" else WATCHDOG="false" fi VOMS_PROCESSING=${VOMS_PROCESSING:-standard} if [ ! -z "$USERNAME" ] ; then CMD="$CMD -u $USERNAME" fi if [ ! -z "$GROUPNAME" ] ; then CMD="$CMD -g $GROUPNAME" fi # Authorization and user mapping for A-REX/EMIES emies_legacy_shc="" # emies_legacy_shc=" # # # # # # $ARC_RUNTIME_CONFIG # $USERAUTH_BLOCK # # # #" if [ "$mapping_present" = 'true' ]; then emies_legacy_shc="$emies_legacy_shc $ARC_RUNTIME_CONFIG $USERMAP_BLOCK " fi authtokens_plugin="" authtokens_handler="" if [ "$authtokens_present" = 'true' ]; then authtokens_plugin="arcshcotokens" authtokens_handler=" " fi ciphers_xml="" if [ ! -z "$CIPHERS_STRING" ] ; then ciphers_xml="$CIPHERS_STRING" fi protocols_xml="" if [ ! -z "$PROTOCOLS_STRING" ] ; then protocols_xml="$PROTOCOLS_STRING" fi curve_xml="" if [ ! -z "$CURVE_STRING" ] ; then curve_xml="$CURVE_STRING" fi dhparam_xml="" if [ ! -z "$DHPARAM_PATH" ] ; then dhparam_xml="$DHPARAM_PATH" fi # A-Rex with WS interface over HTTP AREXCFGWS="\ $PID_FILE $WSLOGFILE $LOGLEVEL $WATCHDOG $ARC_LOCATION/@pkglibsubdir@/ mcctcp mcctls mcchttp mccsoap arex identitymap arcshc arcshclegacy $authtokens_plugin $argus_plugin $arex_port POST $authtokens_handler true $candypond_plexer $arex_service_plexer $emies_legacy_shc $argus_shc $arex_endpoint $service_mail $ARC_RUNTIME_CONFIG none $MAX_INFOSYS_REQUESTS $MAX_JOB_CONTROL_REQUESTS $MAX_DATA_TRANSFER_REQUESTS $candypond " # A-Rex with WS interface over HTTPS AREXCFGWSS="\ $PID_FILE $WSLOGFILE $LOGLEVEL $WATCHDOG $ARC_LOCATION/@pkglibsubdir@/ mcctcp mcctls mcchttp mccsoap arex identitymap arcshc arcshclegacy $authtokens_plugin $argus_plugin $arex_port $X509_USER_KEY $X509_USER_CERT $X509_CERT_DIR $VOMS_PROCESSING false $ciphers_xml $protocols_xml $curve_xml $dhparam_xml POST $authtokens_handler
    Strict-Transport-Security: max-age=31536000; includeSubDomains
    $ARC_RUNTIME_CONFIG
    true $candypond_plexer $arex_service_plexer $service_mail $emies_legacy_shc $argus_shc $arex_endpoint $ARC_RUNTIME_CONFIG none $MAX_INFOSYS_REQUESTS $MAX_JOB_CONTROL_REQUESTS $MAX_DATA_TRANSFER_REQUESTS $candypond
    " if [ "$arex_proto" = 'http' ] ; then echo "$AREXCFGWS" > "$AREX_CONFIG" else echo "$AREXCFGWSS" > "$AREX_CONFIG" fi # setup logfile in case it is not there yet if [ ! -z "$USERNAME" ] ; then if [ ! -z "$GROUPNAME" ] ; then [ -f $AREX_CONFIG ] && chown "$USERNAME:$GROUPNAME" "$AREX_CONFIG" else [ -f $AREX_CONFIG ] && chown "$USERNAME" "$AREX_CONFIG" fi fi # prepare to collect crash information COREDIR=`dirname "${LOGFILE}"`/arccore mkdir_for_user "${COREDIR}" "$USERNAME" "$GROUPNAME" cd "${COREDIR}" ulimit -c unlimited } validate() { CHECK_CMD=$ARC_LOCATION/@pkglibexecsubdir@/arc-config-check if [ ! -x $CHECK_CMD ]; then log_failure_msg "Could not find or execute arc-config-check tool" return 1 fi eval "$CHECK_CMD --config $ARC_CONFIG $@" RETVAL=$? return $RETVAL } if [ "$RUN" != "yes" ] ; then echo "a-rex-ws disabled, please adjust the configuration to your needs " echo "and then set RUN to 'yes' in /etc/default/arc-arex-ws to enable it." exit 0 fi prepare echo "Validating A-REX setup..." >> "$WSLOGFILE" validate >> "$WSLOGFILE" 2>&1 RETVAL=$? if [ $RETVAL != 0 ]; then # Run validator again to print errors to stdout validate --skip-warnings log_failure_msg "Configuration validation failed" exit 1 fi # Raise limit on number of file descriptors to max hlimit=`ulimit -H -n` if [ ! -z "$hlimit" ] ; then ulimit -S -n "$hlimit" 2>/dev/null fi exec $CMD "$@" nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/internaljobplugin0000644000000000000000000000013214152153474024442 xustar000000000000000030 mtime=1638455100.397577432 30 atime=1638455103.997631524 30 ctime=1638455100.397577432 nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/0000755000175000002070000000000014152153474024504 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/PaxHeaders.30264/JobControllerPluginINTERN0000644000000000000000000000013214152153376031320 xustar000000000000000030 mtime=1638455038.419646185 30 atime=1638455038.506647492 30 ctime=1638455100.393577372 nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.h0000644000175000002070000000412014152153376031745 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifndef __ARC_JOBCONTROLLERINTERNAL_H__ #define __ARC_JOBCONTROLLERINTERNAL_H__ #include using namespace Arc; namespace Arc{ class URL; } namespace ARex { class GMConfig; } namespace ARexINTERNAL { class INTERNALClient; class INTERNALClients; class JobStateINTERNAL; class JobControllerPluginINTERNAL : public Arc::JobControllerPlugin { public: JobControllerPluginINTERNAL(const UserConfig& usercfg, PluginArgument* parg) : JobControllerPlugin(usercfg, parg),clients(usercfg) { supportedInterfaces.push_back("org.nordugrid.internal"); } ~JobControllerPluginINTERNAL() {} static Plugin* Instance(PluginArgument *arg) { JobControllerPluginArgument *jcarg = dynamic_cast(arg); return jcarg ? new JobControllerPluginINTERNAL(*jcarg, arg) : NULL; } virtual bool isEndpointNotSupported(const std::string& endpoint) const; virtual void UpdateJobs(std::list& jobs, std::list& IDsProcessed, std::list& IDsNotProcessed, bool isGrouped = false) const; virtual bool CleanJobs(const std::list& jobs, std::list& IDsProcessed, std::list& IDsNotProcessed, bool isGrouped = false) const; virtual bool CancelJobs(const std::list& jobs, std::list& IDsProcessed, std::list& IDsNotProcessed, bool isGrouped = false) const; virtual bool RenewJobs(const std::list& jobs, std::list& IDsProcessed, std::list& IDsNotProcessed, bool isGrouped = false) const; virtual bool ResumeJobs(const std::list& jobs, std::list& IDsProcessed, std::list& IDsNotProcessed, bool isGrouped = false) const; virtual bool GetURLToJobResource(const Job& job, Job::ResourceType resource, URL& url) const; virtual bool GetJobDescription(const Job& job, std::string& desc_str) const; private: INTERNALClients clients; static Logger logger; }; } // namespace Arc #endif // __ARC_JOBCONTROLLERINTERNAL_H__ nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376026554 xustar000000000000000030 mtime=1638455038.420646199 30 atime=1638455038.506647492 30 ctime=1638455100.385577251 nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/Makefile.am0000644000175000002070000000331214152153376026540 0ustar00mockbuildmock00000000000000pkglib_LTLIBRARIES = libaccINTERNAL.la libaccINTERNAL_la_SOURCES = INTERNALClient.cpp INTERNALClient.h \ JobStateINTERNAL.cpp JobStateINTERNAL.h \ SubmitterPluginINTERNAL.cpp SubmitterPluginINTERNAL.h \ JobControllerPluginINTERNAL.cpp JobControllerPluginINTERNAL.h \ JobListRetrieverPluginINTERNAL.cpp JobListRetrieverPluginINTERNAL.h \ TargetInformationRetrieverPluginINTERNAL.cpp TargetInformationRetrieverPluginINTERNAL.h \ DescriptorsINTERNAL.cpp \ ../job.cpp ../tools.cpp libaccINTERNAL_la_CXXFLAGS = -I$(top_srcdir)/include \ $(LIBXML2_CFLAGS) $(GLIBMM_CFLAGS) $(AM_CXXFLAGS) libaccINTERNAL_la_LIBADD = \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(top_builddir)/src/hed/libs/compute/libarccompute.la \ $(top_builddir)/src/hed/libs/security/libarcsecurity.la \ ../grid-manager/conf/libconf.la \ ../grid-manager/jobs/libjobs.la \ ../grid-manager/files/libfiles.la \ ../grid-manager/log/liblog.la \ ../grid-manager/run/librun.la \ ../grid-manager/mail/libmail.la \ ../delegation/libdelegation.la # $(top_builddir)/src/hed/libs/delegation/libarcdelegation.la \ # $(top_builddir)/src/hed/libs/delegation/libarcdelegation.la \ # $(top_builddir)/src/hed/libs/compute/libarccompute.la \ # $(top_builddir)/src/hed/libs/communication/libarccommunication.la \ # $(top_builddir)/src/hed/libs/message/libarcmessage.la \ # $(top_builddir)/src/libs/data-staging/libarcdatastaging.la \ # $(top_builddir)/src/services/a-rex/grid-manager/libgridmanager.la \ # $(top_builddir)/src/services/a-rex/delegation/libdelegation.la \ # $(LIBXML2_LIBS) $(GLIBMM_LIBS) $(GLOBUS_JOBPLUGIN_LIBS) libaccINTERNAL_la_LDFLAGS = -no-undefined -avoid-version -module nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/PaxHeaders.30264/INTERNALClient.h0000644000000000000000000000013214152153376027244 xustar000000000000000030 mtime=1638455038.419646185 30 atime=1638455038.506647492 30 ctime=1638455100.387577281 nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/INTERNALClient.h0000644000175000002070000001277714152153376027247 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifndef __INTERNAL_CLIENT__ #define __INTERNAL_CLIENT__ #include #include #include #include #include #include #include #include #include "../job.h" #include "../delegation/DelegationStore.h" #include "../delegation/DelegationStores.h" #include "../grid-manager/jobs/GMJob.h" /*Note to self: must check all variables if they should be public or private */ using namespace Arc; namespace ARexINTERNAL { #define DEFAULT_JOB_RSL_MAX_SIZE (5*1024*1024) class INTERNALClient; class INTERNALJob { friend class INTERNALClient; private: std::string id; std::string state; std::string sessiondir; std::string controldir; std::string delegation_id; Arc::URL manager; Arc::URL resource; std::list stagein; std::list session; std::list stageout; public: INTERNALJob& operator=(const Arc::Job& job); void toJob(INTERNALClient* client, INTERNALJob* localjob, Arc::Job& j) const; void toJob(INTERNALClient* client, Arc::Job & job, Arc::Logger& logger) const; //added to be able to convert arexjob to INTERNALJob INTERNALJob(/*const */ARex::ARexJob& _arexjob, const ARex::GMConfig& _config, std::string const& _deleg_id); INTERNALJob(void){}; std::string const GetId() const { return id; } std::list const& GetStagein() const { return stagein; } std::list const& GetSession() const { return session; } std::list const& GetStageout() const { return stageout; } }; //! A client class for the INTERNAL service. /*! This class is a client for the INTERNAL service. It provides methods for selected set of operations on a INTERNAL service: - Job submission - Job status queries - Job termination */ class INTERNALClient { friend class INTERNALJob; public: //! The constructor for the INTERNALClient class. /*! This is the constructor for the INTERNALClient class. It creates an INTERNAL client that corresponds to a specific INTERNAL service. @param url The URL of the INTERNAL service. @param usercfg onfiguration object. */ INTERNALClient(void); INTERNALClient(const Arc::UserConfig& usercfg); INTERNALClient(const Arc::URL& url, const Arc::UserConfig& usercfg); //! The destructor. /*! This is the destructor. It does what destructors usually do, cleans up... */ ~INTERNALClient(); ARex::GMConfig const * GetConfig() const { return config; } const std::string& failure(void) const { return lfailure; } bool CreateDelegation(std::string& deleg_id); bool RenewDelegation(std::string const& deleg_id); //! Submit a job. //TO-DO Fix description /*! This method submits a job to the INTERNAL service corresponding to this client instance. It does not do data staging. @param jobdesc A string containing the job description. @param job The container for attributes identidying submitted job. @param state The current state of submitted job. @return true on success */ bool submit(const std::list& jobdescs, std::list& localjobs_, const std::string delegation_id = ""); bool submit(const Arc::JobDescription& jobdesc, INTERNALJob& localjob, const std::string delegation_id = ""); bool putFiles(INTERNALJob const& localjob, std::list const& sources, std::list const& destinations); bool info(std::list& jobids,std::list& jobids_found); bool info(INTERNALJob& job, Arc::Job& info); bool clean(const std::string& jobid); bool kill(const std::string& jobid); bool restart(const std::string& jobid); bool list(std::list& jobs); //! Request the status of a service. /*! This method requests the INTERNAL service about its status. @param status The XML document representing status of the service. @return true on success */ bool sstat(Arc::XMLNode& xmldoc); private: Arc::URL ce; std::string endpoint; Arc::UserConfig usercfg; std::string cfgfile; Arc::User user; std::vector session_dirs; std::vector session_dirs_non_draining; ARex::GMConfig *config; ARex::ARexGMConfig *arexconfig; bool SetAndLoadConfig(); bool SetEndPoint(); //bool SetGMDirs(); bool MapLocalUser(); bool PrepareARexConfig(); //bool PreProcessJob(ARex::JobDescriptionHandler& job_desc_handler, ARex::JobLocalDescription& job_desc); bool readonly; unsigned int job_rsl_max_size; bool fill_local_jobdesc(Arc::XMLNode& descr); std::string error_description;//should maybe be other type, check in jobplugin and relat std::string get_error_description() const; ARex::DelegationStore::DbType deleg_db_type; //! A logger for the A-REX client. /*! This is a logger to which all logging messages from the INTERNAL client are sent. */ static Arc::Logger logger; ARex::DelegationStores deleg_stores; std::list avail_queues; const char* matched_vo; std::string lfailure; }; class INTERNALClients { private: std::multimap clients_; const Arc::UserConfig& usercfg_; public: INTERNALClients(const Arc::UserConfig& usercfg); ~INTERNALClients(void); }; } #endif nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153433026557 xustar000000000000000030 mtime=1638455067.354080937 30 atime=1638455089.847418911 30 ctime=1638455100.384577236 nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/Makefile.in0000644000175000002070000012416514152153433026555 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/a-rex/internaljobplugin DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(top_srcdir)/depcomp README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(pkglibdir)" LTLIBRARIES = $(pkglib_LTLIBRARIES) libaccINTERNAL_la_DEPENDENCIES = \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(top_builddir)/src/hed/libs/compute/libarccompute.la \ $(top_builddir)/src/hed/libs/security/libarcsecurity.la \ ../grid-manager/conf/libconf.la \ ../grid-manager/jobs/libjobs.la \ ../grid-manager/files/libfiles.la \ ../grid-manager/log/liblog.la ../grid-manager/run/librun.la \ ../grid-manager/mail/libmail.la ../delegation/libdelegation.la am_libaccINTERNAL_la_OBJECTS = libaccINTERNAL_la-INTERNALClient.lo \ libaccINTERNAL_la-JobStateINTERNAL.lo \ libaccINTERNAL_la-SubmitterPluginINTERNAL.lo \ libaccINTERNAL_la-JobControllerPluginINTERNAL.lo \ libaccINTERNAL_la-JobListRetrieverPluginINTERNAL.lo \ libaccINTERNAL_la-TargetInformationRetrieverPluginINTERNAL.lo \ libaccINTERNAL_la-DescriptorsINTERNAL.lo \ libaccINTERNAL_la-job.lo libaccINTERNAL_la-tools.lo libaccINTERNAL_la_OBJECTS = $(am_libaccINTERNAL_la_OBJECTS) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = libaccINTERNAL_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(libaccINTERNAL_la_CXXFLAGS) $(CXXFLAGS) \ $(libaccINTERNAL_la_LDFLAGS) $(LDFLAGS) -o $@ AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(libaccINTERNAL_la_SOURCES) DIST_SOURCES = $(libaccINTERNAL_la_SOURCES) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ pkglib_LTLIBRARIES = libaccINTERNAL.la libaccINTERNAL_la_SOURCES = INTERNALClient.cpp INTERNALClient.h \ JobStateINTERNAL.cpp JobStateINTERNAL.h \ SubmitterPluginINTERNAL.cpp SubmitterPluginINTERNAL.h \ JobControllerPluginINTERNAL.cpp JobControllerPluginINTERNAL.h \ JobListRetrieverPluginINTERNAL.cpp JobListRetrieverPluginINTERNAL.h \ TargetInformationRetrieverPluginINTERNAL.cpp TargetInformationRetrieverPluginINTERNAL.h \ DescriptorsINTERNAL.cpp \ ../job.cpp ../tools.cpp libaccINTERNAL_la_CXXFLAGS = -I$(top_srcdir)/include \ $(LIBXML2_CFLAGS) $(GLIBMM_CFLAGS) $(AM_CXXFLAGS) libaccINTERNAL_la_LIBADD = \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(top_builddir)/src/hed/libs/compute/libarccompute.la \ $(top_builddir)/src/hed/libs/security/libarcsecurity.la \ ../grid-manager/conf/libconf.la \ ../grid-manager/jobs/libjobs.la \ ../grid-manager/files/libfiles.la \ ../grid-manager/log/liblog.la \ ../grid-manager/run/librun.la \ ../grid-manager/mail/libmail.la \ ../delegation/libdelegation.la # $(top_builddir)/src/hed/libs/delegation/libarcdelegation.la \ # $(top_builddir)/src/hed/libs/delegation/libarcdelegation.la \ # $(top_builddir)/src/hed/libs/compute/libarccompute.la \ # $(top_builddir)/src/hed/libs/communication/libarccommunication.la \ # $(top_builddir)/src/hed/libs/message/libarcmessage.la \ # $(top_builddir)/src/libs/data-staging/libarcdatastaging.la \ # $(top_builddir)/src/services/a-rex/grid-manager/libgridmanager.la \ # $(top_builddir)/src/services/a-rex/delegation/libdelegation.la \ # $(LIBXML2_LIBS) $(GLIBMM_LIBS) $(GLOBUS_JOBPLUGIN_LIBS) libaccINTERNAL_la_LDFLAGS = -no-undefined -avoid-version -module all: all-am .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/a-rex/internaljobplugin/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/a-rex/internaljobplugin/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): install-pkglibLTLIBRARIES: $(pkglib_LTLIBRARIES) @$(NORMAL_INSTALL) @list='$(pkglib_LTLIBRARIES)'; test -n "$(pkglibdir)" || list=; \ list2=; for p in $$list; do \ if test -f $$p; then \ list2="$$list2 $$p"; \ else :; fi; \ done; \ test -z "$$list2" || { \ echo " $(MKDIR_P) '$(DESTDIR)$(pkglibdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pkglibdir)" || exit 1; \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(pkglibdir)'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(pkglibdir)"; \ } uninstall-pkglibLTLIBRARIES: @$(NORMAL_UNINSTALL) @list='$(pkglib_LTLIBRARIES)'; test -n "$(pkglibdir)" || list=; \ for p in $$list; do \ $(am__strip_dir) \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(pkglibdir)/$$f'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(pkglibdir)/$$f"; \ done clean-pkglibLTLIBRARIES: -test -z "$(pkglib_LTLIBRARIES)" || rm -f $(pkglib_LTLIBRARIES) @list='$(pkglib_LTLIBRARIES)'; \ locs=`for p in $$list; do echo $$p; done | \ sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ sort -u`; \ test -z "$$locs" || { \ echo rm -f $${locs}; \ rm -f $${locs}; \ } libaccINTERNAL.la: $(libaccINTERNAL_la_OBJECTS) $(libaccINTERNAL_la_DEPENDENCIES) $(EXTRA_libaccINTERNAL_la_DEPENDENCIES) $(AM_V_CXXLD)$(libaccINTERNAL_la_LINK) -rpath $(pkglibdir) $(libaccINTERNAL_la_OBJECTS) $(libaccINTERNAL_la_LIBADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libaccINTERNAL_la-DescriptorsINTERNAL.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libaccINTERNAL_la-INTERNALClient.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libaccINTERNAL_la-JobControllerPluginINTERNAL.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libaccINTERNAL_la-JobListRetrieverPluginINTERNAL.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libaccINTERNAL_la-JobStateINTERNAL.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libaccINTERNAL_la-SubmitterPluginINTERNAL.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libaccINTERNAL_la-TargetInformationRetrieverPluginINTERNAL.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libaccINTERNAL_la-job.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libaccINTERNAL_la-tools.Plo@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< libaccINTERNAL_la-INTERNALClient.lo: INTERNALClient.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccINTERNAL_la_CXXFLAGS) $(CXXFLAGS) -MT libaccINTERNAL_la-INTERNALClient.lo -MD -MP -MF $(DEPDIR)/libaccINTERNAL_la-INTERNALClient.Tpo -c -o libaccINTERNAL_la-INTERNALClient.lo `test -f 'INTERNALClient.cpp' || echo '$(srcdir)/'`INTERNALClient.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libaccINTERNAL_la-INTERNALClient.Tpo $(DEPDIR)/libaccINTERNAL_la-INTERNALClient.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='INTERNALClient.cpp' object='libaccINTERNAL_la-INTERNALClient.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccINTERNAL_la_CXXFLAGS) $(CXXFLAGS) -c -o libaccINTERNAL_la-INTERNALClient.lo `test -f 'INTERNALClient.cpp' || echo '$(srcdir)/'`INTERNALClient.cpp libaccINTERNAL_la-JobStateINTERNAL.lo: JobStateINTERNAL.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccINTERNAL_la_CXXFLAGS) $(CXXFLAGS) -MT libaccINTERNAL_la-JobStateINTERNAL.lo -MD -MP -MF $(DEPDIR)/libaccINTERNAL_la-JobStateINTERNAL.Tpo -c -o libaccINTERNAL_la-JobStateINTERNAL.lo `test -f 'JobStateINTERNAL.cpp' || echo '$(srcdir)/'`JobStateINTERNAL.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libaccINTERNAL_la-JobStateINTERNAL.Tpo $(DEPDIR)/libaccINTERNAL_la-JobStateINTERNAL.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='JobStateINTERNAL.cpp' object='libaccINTERNAL_la-JobStateINTERNAL.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccINTERNAL_la_CXXFLAGS) $(CXXFLAGS) -c -o libaccINTERNAL_la-JobStateINTERNAL.lo `test -f 'JobStateINTERNAL.cpp' || echo '$(srcdir)/'`JobStateINTERNAL.cpp libaccINTERNAL_la-SubmitterPluginINTERNAL.lo: SubmitterPluginINTERNAL.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccINTERNAL_la_CXXFLAGS) $(CXXFLAGS) -MT libaccINTERNAL_la-SubmitterPluginINTERNAL.lo -MD -MP -MF $(DEPDIR)/libaccINTERNAL_la-SubmitterPluginINTERNAL.Tpo -c -o libaccINTERNAL_la-SubmitterPluginINTERNAL.lo `test -f 'SubmitterPluginINTERNAL.cpp' || echo '$(srcdir)/'`SubmitterPluginINTERNAL.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libaccINTERNAL_la-SubmitterPluginINTERNAL.Tpo $(DEPDIR)/libaccINTERNAL_la-SubmitterPluginINTERNAL.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='SubmitterPluginINTERNAL.cpp' object='libaccINTERNAL_la-SubmitterPluginINTERNAL.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccINTERNAL_la_CXXFLAGS) $(CXXFLAGS) -c -o libaccINTERNAL_la-SubmitterPluginINTERNAL.lo `test -f 'SubmitterPluginINTERNAL.cpp' || echo '$(srcdir)/'`SubmitterPluginINTERNAL.cpp libaccINTERNAL_la-JobControllerPluginINTERNAL.lo: JobControllerPluginINTERNAL.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccINTERNAL_la_CXXFLAGS) $(CXXFLAGS) -MT libaccINTERNAL_la-JobControllerPluginINTERNAL.lo -MD -MP -MF $(DEPDIR)/libaccINTERNAL_la-JobControllerPluginINTERNAL.Tpo -c -o libaccINTERNAL_la-JobControllerPluginINTERNAL.lo `test -f 'JobControllerPluginINTERNAL.cpp' || echo '$(srcdir)/'`JobControllerPluginINTERNAL.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libaccINTERNAL_la-JobControllerPluginINTERNAL.Tpo $(DEPDIR)/libaccINTERNAL_la-JobControllerPluginINTERNAL.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='JobControllerPluginINTERNAL.cpp' object='libaccINTERNAL_la-JobControllerPluginINTERNAL.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccINTERNAL_la_CXXFLAGS) $(CXXFLAGS) -c -o libaccINTERNAL_la-JobControllerPluginINTERNAL.lo `test -f 'JobControllerPluginINTERNAL.cpp' || echo '$(srcdir)/'`JobControllerPluginINTERNAL.cpp libaccINTERNAL_la-JobListRetrieverPluginINTERNAL.lo: JobListRetrieverPluginINTERNAL.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccINTERNAL_la_CXXFLAGS) $(CXXFLAGS) -MT libaccINTERNAL_la-JobListRetrieverPluginINTERNAL.lo -MD -MP -MF $(DEPDIR)/libaccINTERNAL_la-JobListRetrieverPluginINTERNAL.Tpo -c -o libaccINTERNAL_la-JobListRetrieverPluginINTERNAL.lo `test -f 'JobListRetrieverPluginINTERNAL.cpp' || echo '$(srcdir)/'`JobListRetrieverPluginINTERNAL.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libaccINTERNAL_la-JobListRetrieverPluginINTERNAL.Tpo $(DEPDIR)/libaccINTERNAL_la-JobListRetrieverPluginINTERNAL.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='JobListRetrieverPluginINTERNAL.cpp' object='libaccINTERNAL_la-JobListRetrieverPluginINTERNAL.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccINTERNAL_la_CXXFLAGS) $(CXXFLAGS) -c -o libaccINTERNAL_la-JobListRetrieverPluginINTERNAL.lo `test -f 'JobListRetrieverPluginINTERNAL.cpp' || echo '$(srcdir)/'`JobListRetrieverPluginINTERNAL.cpp libaccINTERNAL_la-TargetInformationRetrieverPluginINTERNAL.lo: TargetInformationRetrieverPluginINTERNAL.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccINTERNAL_la_CXXFLAGS) $(CXXFLAGS) -MT libaccINTERNAL_la-TargetInformationRetrieverPluginINTERNAL.lo -MD -MP -MF $(DEPDIR)/libaccINTERNAL_la-TargetInformationRetrieverPluginINTERNAL.Tpo -c -o libaccINTERNAL_la-TargetInformationRetrieverPluginINTERNAL.lo `test -f 'TargetInformationRetrieverPluginINTERNAL.cpp' || echo '$(srcdir)/'`TargetInformationRetrieverPluginINTERNAL.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libaccINTERNAL_la-TargetInformationRetrieverPluginINTERNAL.Tpo $(DEPDIR)/libaccINTERNAL_la-TargetInformationRetrieverPluginINTERNAL.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='TargetInformationRetrieverPluginINTERNAL.cpp' object='libaccINTERNAL_la-TargetInformationRetrieverPluginINTERNAL.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccINTERNAL_la_CXXFLAGS) $(CXXFLAGS) -c -o libaccINTERNAL_la-TargetInformationRetrieverPluginINTERNAL.lo `test -f 'TargetInformationRetrieverPluginINTERNAL.cpp' || echo '$(srcdir)/'`TargetInformationRetrieverPluginINTERNAL.cpp libaccINTERNAL_la-DescriptorsINTERNAL.lo: DescriptorsINTERNAL.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccINTERNAL_la_CXXFLAGS) $(CXXFLAGS) -MT libaccINTERNAL_la-DescriptorsINTERNAL.lo -MD -MP -MF $(DEPDIR)/libaccINTERNAL_la-DescriptorsINTERNAL.Tpo -c -o libaccINTERNAL_la-DescriptorsINTERNAL.lo `test -f 'DescriptorsINTERNAL.cpp' || echo '$(srcdir)/'`DescriptorsINTERNAL.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libaccINTERNAL_la-DescriptorsINTERNAL.Tpo $(DEPDIR)/libaccINTERNAL_la-DescriptorsINTERNAL.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='DescriptorsINTERNAL.cpp' object='libaccINTERNAL_la-DescriptorsINTERNAL.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccINTERNAL_la_CXXFLAGS) $(CXXFLAGS) -c -o libaccINTERNAL_la-DescriptorsINTERNAL.lo `test -f 'DescriptorsINTERNAL.cpp' || echo '$(srcdir)/'`DescriptorsINTERNAL.cpp libaccINTERNAL_la-job.lo: ../job.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccINTERNAL_la_CXXFLAGS) $(CXXFLAGS) -MT libaccINTERNAL_la-job.lo -MD -MP -MF $(DEPDIR)/libaccINTERNAL_la-job.Tpo -c -o libaccINTERNAL_la-job.lo `test -f '../job.cpp' || echo '$(srcdir)/'`../job.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libaccINTERNAL_la-job.Tpo $(DEPDIR)/libaccINTERNAL_la-job.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='../job.cpp' object='libaccINTERNAL_la-job.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccINTERNAL_la_CXXFLAGS) $(CXXFLAGS) -c -o libaccINTERNAL_la-job.lo `test -f '../job.cpp' || echo '$(srcdir)/'`../job.cpp libaccINTERNAL_la-tools.lo: ../tools.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccINTERNAL_la_CXXFLAGS) $(CXXFLAGS) -MT libaccINTERNAL_la-tools.lo -MD -MP -MF $(DEPDIR)/libaccINTERNAL_la-tools.Tpo -c -o libaccINTERNAL_la-tools.lo `test -f '../tools.cpp' || echo '$(srcdir)/'`../tools.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libaccINTERNAL_la-tools.Tpo $(DEPDIR)/libaccINTERNAL_la-tools.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='../tools.cpp' object='libaccINTERNAL_la-tools.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccINTERNAL_la_CXXFLAGS) $(CXXFLAGS) -c -o libaccINTERNAL_la-tools.lo `test -f '../tools.cpp' || echo '$(srcdir)/'`../tools.cpp mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(LTLIBRARIES) installdirs: for dir in "$(DESTDIR)$(pkglibdir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool clean-pkglibLTLIBRARIES \ mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-pkglibLTLIBRARIES install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-pkglibLTLIBRARIES .MAKE: install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \ clean-libtool clean-pkglibLTLIBRARIES cscopelist-am ctags \ ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-pkglibLTLIBRARIES install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-compile mostlyclean-generic mostlyclean-libtool \ pdf pdf-am ps ps-am tags tags-am uninstall uninstall-am \ uninstall-pkglibLTLIBRARIES # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/PaxHeaders.30264/JobStateINTERNAL.h0000644000000000000000000000013214152153376027541 xustar000000000000000030 mtime=1638455038.419646185 30 atime=1638455038.506647492 30 ctime=1638455100.389577311 nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/JobStateINTERNAL.h0000644000175000002070000000062414152153376027530 0ustar00mockbuildmock00000000000000#ifndef __ARC_JOBSTATEINTERNAL_H__ #define __ARC_JOBSTATEINTERNAL_H__ #include namespace ARexINTERNAL { class JobStateINTERNAL : public Arc::JobState { public: JobStateINTERNAL(const std::string& state) : Arc::JobState(state, &StateMap) {} static JobState::StateType StateMap(const std::string& state); }; } #endif // __ARC_JOBSTATEINTERNAL_H__ nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/PaxHeaders.30264/TargetInformationRetrieve0000644000000000000000000000031214152153376031605 xustar0000000000000000112 path=nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/TargetInformationRetrieverPluginINTERNAL.cpp 30 mtime=1638455038.420646199 30 atime=1638455038.506647492 30 ctime=1638455100.395577401 nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/TargetInformationRetrieverPluginINTERNAL.c0000644000175000002070000001022614152153376034512 0ustar00mockbuildmock00000000000000 // -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include "JobStateINTERNAL.h" #include "INTERNALClient.h" #include "TargetInformationRetrieverPluginINTERNAL.h" using namespace Arc; namespace ARexINTERNAL { //used when the --direct option is not issued with arcsub Logger TargetInformationRetrieverPluginINTERNAL::logger(Logger::getRootLogger(), "TargetInformationRetrieverPlugin.INTERNAL"); bool TargetInformationRetrieverPluginINTERNAL::isEndpointNotSupported(const Endpoint& endpoint) const { const std::string::size_type pos = endpoint.URLString.find("://"); if (pos != std::string::npos) { const std::string proto = lower(endpoint.URLString.substr(0, pos)); return (proto != "file"); } return (endpoint.URLString != "localhost"); // TODO: consider more strict way to allow only file://localhost } static URL CreateURL(std::string service) { std::string::size_type pos1 = service.find("://"); if (pos1 == std::string::npos) { service = "file://" + service; } else { std::string proto = lower(service.substr(0,pos1)); if(proto != "file") return URL(); } return service; } EndpointQueryingStatus TargetInformationRetrieverPluginINTERNAL::Query(const UserConfig& uc, const Endpoint& cie, std::list& csList, const EndpointQueryOptions&) const { EndpointQueryingStatus s(EndpointQueryingStatus::FAILED); //To-decide: should INTERNAL plugin information be visible in info.xml? It can not be used outside, so does not seem to make sense to have it added in info.xml URL url(CreateURL(cie.URLString)); if (!url) { return s; } //To get hold of general service information INTERNALClient ac(url, uc); XMLNode servicesQueryResponse; if (!ac.sstat(servicesQueryResponse)) { return s; } GLUE2::ParseExecutionTargets(servicesQueryResponse, csList); if(!csList.empty()){ if(csList.front().AdminDomain->Name.empty()) csList.front().AdminDomain->Name = url.Host(); csList.front()->InformationOriginEndpoint = cie; //Add the INTERNAL computingendpointtype ComputingEndpointType newCe; newCe->ID = url.Host(); newCe->URLString = url.str(); newCe->InterfaceName = "org.nordugrid.internal"; newCe->HealthState = "ok"; newCe->QualityLevel = "testing";//testing for now, production when in production newCe->Technology = "direct"; newCe->Capability.insert("executionmanagement.jobcreation"); newCe->Capability.insert("executionmanagement.jobdescription"); newCe->Capability.insert("executionmanagement.jobmanagement"); newCe->Capability.insert("information.discovery.job"); newCe->Capability.insert("information.discovery.resource"); newCe->Capability.insert("information.lookup.job"); newCe->Capability.insert("security.delegation"); // std::string ID; // std::string HealthStateInfo; // std::list InterfaceVersion; // std::list InterfaceExtension; // std::list SupportedProfile; // std::string Implementor; // Software Implementation; // std::string ServingState; // std::string IssuerCA; // std::list TrustedCA; // Time DowntimeEnds; // std::string Staging; // int TotalJobs; // int RunningJobs; // int WaitingJobs; // int StagingJobs; // int SuspendedJobs; // int PreLRMSWaitingJobs; //To-DO Assuming there is only one computingservice ComputingServiceType cs = csList.front(); std::map ce = cs.ComputingEndpoint; csList.front().ComputingEndpoint.insert(std::pair(ce.size(), newCe)); } if (!csList.empty()) s = EndpointQueryingStatus::SUCCESSFUL; return s; } } // namespace Arc nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/PaxHeaders.30264/SubmitterPluginINTERNAL.h0000644000000000000000000000013214152153376031163 xustar000000000000000030 mtime=1638455038.420646199 30 atime=1638455038.506647492 30 ctime=1638455100.391577341 nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.h0000644000175000002070000000307714152153376031157 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifndef __ARC_SUBMITTERPLUGININTERNAL_H__ #define __ARC_SUBMITTERPLUGININTERNAL_H__ #include #include #include #include #include #include "INTERNALClient.h" using namespace Arc; namespace ARexINTERNAL{ //class JobStateINTERNAL; class SubmissionStatus; class SubmitterPluginINTERNAL : public SubmitterPlugin { public: SubmitterPluginINTERNAL(const UserConfig& usercfg, PluginArgument* parg) : SubmitterPlugin(usercfg, parg),clients(usercfg) { supportedInterfaces.push_back("org.nordugrid.internal"); } ~SubmitterPluginINTERNAL() { /*deleteAllClients();*/ } static Plugin* Instance(PluginArgument *arg) { SubmitterPluginArgument *subarg = dynamic_cast(arg); return subarg ? new SubmitterPluginINTERNAL(*subarg, arg) : NULL; } virtual bool isEndpointNotSupported(const std::string& endpoint) const; virtual Arc::SubmissionStatus Submit(const std::list& jobdescs, const std::string& endpoint, EntityConsumer& jc, std::list& notSubmitted); virtual Arc::SubmissionStatus Submit(const std::list& jobdescs, const ExecutionTarget& et, EntityConsumer& jc, std::list& notSubmitted); private: INTERNALClients clients; bool getDelegationID(const URL& durl, std::string& delegation_id); }; } // namespace ARexINTERNAL #endif // __ARC_SUBMITTERPLUGININTERNAL_H__ nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/PaxHeaders.30264/SubmitterPluginINTERNAL.c0000644000000000000000000000013214152153376031156 xustar000000000000000030 mtime=1638455038.420646199 30 atime=1638455038.506647492 30 ctime=1638455100.390577326 nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/SubmitterPluginINTERNAL.cpp0000644000175000002070000001200114152153376031475 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include //#include "JobStateINTERNAL.h" #include "SubmitterPluginINTERNAL.h" using namespace Arc; namespace ARexINTERNAL { bool SubmitterPluginINTERNAL::isEndpointNotSupported(const std::string& endpoint) const { const std::string::size_type pos = endpoint.find("://"); return pos != std::string::npos && lower(endpoint.substr(0, pos)) != "file"; } bool SubmitterPluginINTERNAL::getDelegationID(const URL& durl, std::string& delegation_id) { if(!durl) { logger.msg(INFO, "Failed to delegate credentials to server - no delegation interface found"); return false; } INTERNALClient ac(durl,*usercfg); if(!ac.CreateDelegation(delegation_id)) { logger.msg(INFO, "Failed to delegate credentials to server - %s",ac.failure()); return false; } return true; } Arc::SubmissionStatus SubmitterPluginINTERNAL::Submit(const std::list& jobdescs, const ExecutionTarget& et, EntityConsumer& jc, std::list& notSubmitted){ Arc::SubmissionStatus retval; std::string endpoint = et.ComputingEndpoint->URLString; retval = Submit(jobdescs, endpoint, jc, notSubmitted); return retval; } Arc::SubmissionStatus SubmitterPluginINTERNAL::Submit(const std::list& jobdescs, const std::string& endpoint, EntityConsumer& jc, std::list& notSubmitted) { //jobdescs as passed down from the client // TODO: this is multi step process. So having retries would be nice. // TODO: If delegation interface is not on same endpoint as submission interface this method is faulty. URL url((endpoint.find("://") == std::string::npos ? "file://" : "") + endpoint, false); /*for accessing jobs*/ /*Preparation of jobdescription*/ Arc::SubmissionStatus retval; std::string delegation_id; INTERNALClient ac(url,*usercfg); for (std::list::const_iterator itJ = jobdescs.begin(); itJ != jobdescs.end(); ++itJ) { //Calls JobDescription.Prepare, which Check for identical file names. and if executable and input is contained in the file list. JobDescription preparedjobdesc(*itJ); if (!preparedjobdesc.Prepare()) { logger.msg(INFO, "Failed preparing job description"); notSubmitted.push_back(&*itJ); retval |= Arc::SubmissionStatus::DESCRIPTION_NOT_SUBMITTED; continue; } bool need_delegation = false; std::list upload_sources; std::list upload_destinations; /*Preparation of input files and outputfiles */ for(std::list::const_iterator itIF = preparedjobdesc.DataStaging.InputFiles.begin(); itIF != preparedjobdesc.DataStaging.InputFiles.end(); ++itIF) { if(!itIF->Sources.empty()) { if(itIF->Sources.front().Protocol() == "file") { upload_sources.push_back(itIF->Sources.front().Path()); upload_destinations.push_back(itIF->Name); } else { need_delegation = true; } } } for(std::list::const_iterator itOF = itJ->DataStaging.OutputFiles.begin(); itOF != itJ->DataStaging.OutputFiles.end() && !need_delegation; ++itOF) { if((!itOF->Targets.empty()) || (itOF->Name[0] == '@')) { // ARC specific - dynamic list of output files need_delegation = true; } } /*end preparation of input and output files */ if (need_delegation && delegation_id.empty()) { if (!getDelegationID(url, delegation_id)) { notSubmitted.push_back(&*itJ); retval |= Arc::SubmissionStatus::DESCRIPTION_NOT_SUBMITTED; continue; } } std::list localjobs; std::list preparedjobdescs; preparedjobdescs.push_back(preparedjobdesc); if((!ac.submit(preparedjobdescs, localjobs, delegation_id)) || (localjobs.empty())) { logger.msg(INFO, "Failed submitting job description"); notSubmitted.push_back(&*itJ); retval |= Arc::SubmissionStatus::DESCRIPTION_NOT_SUBMITTED; continue; } if(!upload_sources.empty()) { if(!ac.putFiles(localjobs.front(), upload_sources, upload_destinations)) { notSubmitted.push_back(&*itJ); retval |= Arc::SubmissionStatus::DESCRIPTION_NOT_SUBMITTED; continue; } } Arc::Job job; localjobs.front().toJob(&ac,&(localjobs.front()),job); AddJobDetails(preparedjobdesc, job); jc.addEntity(job); }//end loop over jobdescriptions return retval; } } // namespace ARexINTERNAL nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/PaxHeaders.30264/JobStateINTERNAL.cpp0000644000000000000000000000013214152153376030074 xustar000000000000000030 mtime=1638455038.419646185 30 atime=1638455038.506647492 30 ctime=1638455100.388577296 nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/JobStateINTERNAL.cpp0000644000175000002070000000631014152153376030061 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include "JobStateINTERNAL.h" /* Maps/translates a INTERNAL state - which is a state corresponding to the ARexJob state, hence GM-job state, to an ARC:JobState */ namespace ARexINTERNAL { Arc::JobState::StateType JobStateINTERNAL::StateMap(const std::string& state) { std::string state_ = Arc::lower(state); /* Infosys states (mapped from GM states): ACCEPTING ACCEPTED PREPARED SUBMITTING INLRMS: * KILLING EXECUTED KILLED FAILED GM states (either not mapped or somehow obtained directly): ACCEPTED PREPARING SUBMIT INLRMS CANCELING FINISHING FINISHED DELETED PENDING:* */ /// \mapname GM Grid Manager /// \mapnote Prefix "PENDING:" and spaces are ignored when mapping states. if (state_.substr(0,8) == "pending:") state_.erase(0,8); // remove spaces because sometimes we may have 'INLRMS: *' std::string::size_type p = 0; while((p = state_.find(' ',p)) != std::string::npos) state_.erase(p,1); /// \mapattr ACCEPTED -> ACCEPTED /// \mapattr ACCEPTING -> ACCEPTED if ((state_ == "accepted") || (state_ == "accepting")) return JobState::ACCEPTED; /// \mapattr PREPARING -> PREPARING /// \mapattr PREPARED -> PREPARING else if ((state_ == "preparing") || (state_ == "prepared")) return JobState::PREPARING; /// \mapattr SUBMIT -> SUBMITTING /// \mapattr SUBMITTING -> SUBMITTING else if ((state_ == "submit") || (state_ == "submitting")) return JobState::SUBMITTING; /// \mapattr INLRMS:Q -> QUEUING else if (state_ == "inlrms:q") return JobState::QUEUING; /// \mapattr INLRMS:R -> RUNNING else if (state_ == "inlrms:r") return JobState::RUNNING; /// \mapattr INLRMS:H -> HOLD else if (state_ == "inlrms:h") return JobState::HOLD; /// \mapattr INLRMS:S -> HOLD else if (state_ == "inlrms:s") return JobState::HOLD; /// \mapattr INLRMS:E -> FINISHING else if (state_ == "inlrms:e") return JobState::FINISHING; /// \mapattr INLRMS:O -> HOLD else if (state_ == "inlrms:o") return JobState::HOLD; /// \mapattr INLRMS* -> QUEUING else if (state_.substr(0,6) == "inlrms") return JobState::QUEUING; // expect worst ? /// \mapattr FINISHING -> FINISHING /// \mapattr KILLING -> FINISHING /// \mapattr CANCELING -> FINISHING /// \mapattr EXECUTED -> FINISHING else if ((state_ == "finishing") || (state_ == "killing") || (state_ == "canceling") || (state_ == "executed")) return JobState::FINISHING; /// \mapattr FINISHED -> FINISHED else if (state_ == "finished") return JobState::FINISHED; /// \mapattr KILLED -> KILLED else if (state_ == "killed") return JobState::KILLED; /// \mapattr FAILED -> FAILED else if (state_ == "failed") return JobState::FAILED; /// \mapattr DELETED -> DELETED else if (state_ == "deleted") return JobState::DELETED; /// \mapattr "" -> UNDEFINED else if (state_ == "") return JobState::UNDEFINED; /// \mapattr Any other state -> OTHER else return JobState::OTHER; } } nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/PaxHeaders.30264/DescriptorsINTERNAL.cpp0000644000000000000000000000013214152153376030662 xustar000000000000000030 mtime=1638455038.419646185 30 atime=1638455038.506647492 30 ctime=1638455100.397577432 nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/DescriptorsINTERNAL.cpp0000644000175000002070000000245214152153376030652 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include "SubmitterPluginINTERNAL.h" #include "JobControllerPluginINTERNAL.h" #include "JobListRetrieverPluginINTERNAL.h" #include "TargetInformationRetrieverPluginINTERNAL.h" extern Arc::PluginDescriptor const ARC_PLUGINS_TABLE_NAME[] = { { "INTERNAL", "HED:SubmitterPlugin", "INTERNAL execution service", 0, &ARexINTERNAL::SubmitterPluginINTERNAL::Instance }, { "INTERNAL", "HED:JobControllerPlugin", "INTERNAL execution service", 0, &ARexINTERNAL::JobControllerPluginINTERNAL::Instance }, { "INTERNAL", "HED:TargetInformationRetrieverPlugin", "INTERNAL execution service", 0, &ARexINTERNAL::TargetInformationRetrieverPluginINTERNAL::Instance }, { "INTERNAL", "HED:JobListRetrieverPlugin", "INTERNAL execution service", 0, &ARexINTERNAL::JobListRetrieverPluginINTERNAL::Instance }, { NULL, NULL, NULL, 0, NULL } }; // Bug #3775 reports issues related to unloading this module. The source of the issue is not yet clear. // But taking into account complexity of linked libraries it is better to disable loading. extern "C" { void ARC_MODULE_CONSTRUCTOR_NAME(Glib::Module* module, Arc::ModuleManager* manager) { if(manager && module) { manager->makePersistent(module); }; } } nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/PaxHeaders.30264/TargetInformationRetrieve0000644000000000000000000000013214152153376031605 xustar000000000000000030 mtime=1638455038.420646199 30 atime=1638455038.506647492 30 ctime=1638455100.396577417 nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/TargetInformationRetrieverPluginINTERNAL.h0000644000175000002070000000234614152153376034523 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifndef __ARC_TARGETINFORMATIONRETRIEVERINTERNAL_H__ #define __ARC_TARGETINFORMATIONRETRIEVERINTERNAL_H__ #include #include using namespace Arc; namespace Arc{ class Logger; class EndpointQueryingStatus; class ExecutionTarget; class URL; class UserConfig; class XMLNode; } namespace ARexINTERNAL { class INTERNALClient; class JobStateINTERNAL; class TargetInformationRetrieverPluginINTERNAL: public TargetInformationRetrieverPlugin { public: TargetInformationRetrieverPluginINTERNAL(PluginArgument* parg): TargetInformationRetrieverPlugin(parg) { supportedInterfaces.push_back("org.nordugrid.internal"); }; ~TargetInformationRetrieverPluginINTERNAL() {}; static Plugin* Instance(PluginArgument *arg) { return new TargetInformationRetrieverPluginINTERNAL(arg); }; virtual EndpointQueryingStatus Query(const UserConfig&, const Endpoint&, std::list&, const EndpointQueryOptions&) const; virtual bool isEndpointNotSupported(const Endpoint&) const; private: static Logger logger; }; } // namespace Arc #endif // __ARC_TARGETINFORMATIONRETRIEVERINTERNAL_H__ nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/PaxHeaders.30264/INTERNALClient.cpp0000644000000000000000000000013214152153376027577 xustar000000000000000030 mtime=1638455038.419646185 30 atime=1638455038.506647492 30 ctime=1638455100.386577266 nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/INTERNALClient.cpp0000644000175000002070000006074414152153376027577 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include "../grid-manager/jobs/CommFIFO.h" #include "../grid-manager/jobs/JobDescriptionHandler.h" #include "../grid-manager/conf/GMConfig.h" #include "../grid-manager/files/ControlFileHandling.h" #include "JobStateINTERNAL.h" #include "INTERNALClient.h" //#include "../job.cpp" using namespace Arc; namespace ARexINTERNAL { Arc::Logger INTERNALClient::logger(Arc::Logger::rootLogger, "INTERNAL Client"); INTERNALClient::INTERNALClient(void) : config(NULL), arexconfig(NULL) { logger.msg(Arc::DEBUG,"Default INTERNAL client contructor"); if(!SetAndLoadConfig()){ logger.msg(Arc::ERROR,"Failed to load grid-manager configfile"); return; } if(!SetEndPoint()){ logger.msg(Arc::ERROR,"Failed to set INTERNAL endpoint"); return; } MapLocalUser(); PrepareARexConfig(); }; INTERNALClient::INTERNALClient(const Arc::UserConfig& usercfg) :usercfg(usercfg), config(NULL), arexconfig(NULL) { if(!SetAndLoadConfig()){ logger.msg(Arc::ERROR,"Failed to load grid-manager configfile"); return; } if(!SetEndPoint()){ logger.msg(Arc::ERROR,"Failed to set INTERNAL endpoint"); return; } MapLocalUser(); PrepareARexConfig(); }; //using this one from submitterpluginlocal INTERNALClient::INTERNALClient(const Arc::URL& url, const Arc::UserConfig& usercfg) :ce(url), usercfg(usercfg), config(NULL), arexconfig(NULL) { if(!SetAndLoadConfig()){ logger.msg(Arc::ERROR,"Failed to load grid-manager configfile"); return; } if(!SetEndPoint()){ logger.msg(Arc::ERROR,"Failed to set INTERNAL endpoint"); return; } MapLocalUser(); PrepareARexConfig(); }; INTERNALClient::~INTERNALClient() { delete config; delete arexconfig; } INTERNALJob::INTERNALJob(/*const */ARex::ARexJob& _arexjob, const ARex::GMConfig& config, std::string const& _deleg_id) :id(_arexjob.ID()), state((std::string)_arexjob.State()), sessiondir(_arexjob.SessionDir()), controldir(config.ControlDir()), delegation_id(_deleg_id) { stageout.push_back(_arexjob.SessionDir()); stagein.push_back(_arexjob.SessionDir()); } bool INTERNALClient::SetEndPoint(){ endpoint = config->ControlDir(); return true; } bool INTERNALClient::SetAndLoadConfig(){ cfgfile = ARex::GMConfig::GuessConfigFile(); if (cfgfile.empty()) { logger.msg(Arc::ERROR,"Failed to identify grid-manager config file"); return false; } // Push configuration through pre-parser in order to setup default values. // We are only interested in pidfile location because this is where // fully pre-processed configuration file resides. std::list parser_args; parser_args.push_back(Arc::ArcLocation::GetToolsDir() + "/arcconfig-parser"); parser_args.push_back("--config"); parser_args.push_back(cfgfile); parser_args.push_back("-b"); parser_args.push_back("arex"); parser_args.push_back("-o"); parser_args.push_back("pidfile"); Arc::Run parser(parser_args); std::string pidfile; parser.AssignStdout(pidfile); if((!parser.Start()) || (!parser.Wait())) { logger.msg(Arc::ERROR,"Failed to run configuration parser at %s.", parser_args.front()); return false; } if(parser.Result() != 0) { logger.msg(Arc::ERROR,"Parser failed with error code %i.", (int)parser.Result()); return false; } pidfile = Arc::trim(pidfile, "\r\n"); // parser adds EOLs struct stat st; if(!FileStat(pidfile, &st, true)) { logger.msg(Arc::ERROR,"No pid file is found at '%s'. Probably A-REX is not running.", pidfile); return false; } // Actual config file location cfgfile = pidfile; std::string::size_type dot_pos = cfgfile.find_last_of("./"); if((dot_pos != std::string::npos) && (cfgfile[dot_pos] == '.')) cfgfile.resize(dot_pos); cfgfile += ".cfg"; config = new ARex::GMConfig(cfgfile); config->SetDelegations(&deleg_stores); if(!config->Load()){ logger.msg(Arc::ERROR,"Failed to load grid-manager config file from %s", cfgfile); return false; } ARex::DelegationStore::DbType deleg_db_type = ARex::DelegationStore::DbBerkeley; switch(config->DelegationDBType()) { case ARex::GMConfig::deleg_db_bdb: deleg_db_type = ARex::DelegationStore::DbBerkeley; break; case ARex::GMConfig::deleg_db_sqlite: deleg_db_type = ARex::DelegationStore::DbSQLite; break; }; deleg_stores.SetDbType(deleg_db_type); config->Print(); return true; } // Security attribute simulating information pulled from TLS layer. class TLSSecAttr: public SecAttr { public: TLSSecAttr(Arc::UserConfig& usercfg) { Arc::Credential cred(usercfg); identity_ = cred.GetIdentityName(); Arc::VOMSTrustList trust_list; trust_list.AddRegex("^.*$"); std::vector voms; if(parseVOMSAC(cred, usercfg.CACertificatesDirectory(), usercfg.CACertificatePath(), usercfg.VOMSESPath()/*?*/, trust_list, voms, true, true)) { for(std::vector::const_iterator v = voms.begin(); v != voms.end();++v) { if(!(v->status & VOMSACInfo::Error)) { for(std::vector::const_iterator a = v->attributes.begin(); a != v->attributes.end();++a) { voms_.push_back(VOMSFQANToFull(v->voname,*a)); }; }; }; }; } virtual ~TLSSecAttr(void) { } virtual operator bool(void) const { return true; } virtual bool Export(SecAttrFormat format,XMLNode &val) const { return false; } virtual std::string get(const std::string& id) const { if(id == "IDENTITY") return identity_; std::list items = getAll(id); if(!items.empty()) return *items.begin(); return ""; } virtual std::list getAll(const std::string& id) const { if(id == "VOMS") { return voms_; }; return SecAttr::getAll(id); } std::string const& Identity() const { return identity_; } protected: std::string identity_; // Subject of last non-proxy certificate std::list voms_; // VOMS attributes from the VOMS extension of proxy virtual bool equal(const SecAttr &b) const { return false; } }; bool INTERNALClient::MapLocalUser(){ if(!arexconfig) { logger.msg(Arc::ERROR, "INTERNALClient is not initialized"); return false; } Arc::Credential cred(usercfg); // Here we need to simulate message going though chain of plugins. // Luckily we only need these SecHandler plugins: legacy.handler and legacy.map. // And as source of information "TLS" Security Attribute must be supplied following // information items: IDENTITY (user subject) and VOMS (VOMS FQANs). // Load plugins Config factory_cfg; //(""); MCCLoader loader(factory_cfg); //factory_cfg.NewChild("ModuleManager").NewChild("Path") = Arc::ArcLocation::Get()+"/lib/arc"; //factory_cfg.NewChild("Plugins").NewChild("Name") = "arcshclegacy"; //PluginsFactory factory(factory_cfg); ChainContext& context(*static_cast(loader)); PluginsFactory& factory(*static_cast(context)); factory.load("arcshc"); factory.load("arcshclegacy"); factory.load("identitymap"); //Arc::ChainContext context(MCCLoader& loader); ArcSec::SecHandler* gridmapper(NULL); ArcSec::SecHandler* handler(NULL); ArcSec::SecHandler* mapper(NULL); { ArcSec::SecHandlerConfig xcfg("identity.map", "incoming"); Config cfg(xcfg /*, cfg.getFileName()*/); XMLNode pdp1 = cfg.NewChild("PDP"); pdp1.NewAttribute("name") = "allow.pdp"; pdp1.NewChild("LocalList") = "/etc/grid-security/grid-mapfile"; //XMLNode pdp2 = cfg.NewChild("PDP"); //pdp2.NewAttribute("allow.pdp"); //pdp2.NewChild("LocalName") = "nobody"; ArcSec::SecHandlerPluginArgument arg(&cfg, &context); Plugin* plugin = factory.get_instance(SecHandlerPluginKind, "identity.map", &arg); gridmapper = plugin?dynamic_cast(plugin):NULL; } { ArcSec::SecHandlerConfig xcfg("arclegacy.handler", "incoming"); Config cfg(xcfg /*, cfg.getFileName()*/); cfg.NewChild("ConfigFile") = config->ConfigFile(); ArcSec::SecHandlerPluginArgument arg(&cfg, &context); Plugin* plugin = factory.get_instance(SecHandlerPluginKind, "arclegacy.handler", &arg); handler = plugin?dynamic_cast(plugin):NULL; }; { ArcSec::SecHandlerConfig xcfg("arclegacy.map", "incoming"); Config cfg(xcfg /*, cfg.getFileName()*/); XMLNode block = cfg.NewChild("ConfigBlock"); block.NewChild("ConfigFile") = config->ConfigFile(); block.NewChild("BlockName") = "mapping"; ArcSec::SecHandlerPluginArgument arg(&cfg, &context); Plugin* plugin = factory.get_instance(SecHandlerPluginKind, "arclegacy.map", &arg); mapper = plugin?dynamic_cast(plugin):NULL; }; bool result = false; if(gridmapper && handler && mapper) { // Prepare information source TLSSecAttr* sec_attr = new TLSSecAttr(usercfg); // Setup fake mesage to be used as container for information being processed Arc::Message msg; msg.Auth()->set("TLS", sec_attr); // Message takes ownership of the sec_attr // Some plugins fetch user DN from message attributes msg.Attributes()->set("TLS:IDENTITYDN", sec_attr->Identity()); // Process collected information if((gridmapper->Handle(&msg)) && (handler->Handle(&msg)) && (mapper->Handle(&msg))) { // Result of mapping is stored in message attribute - fetch it std::string uname = msg.Attributes()->get("SEC:LOCALID"); if(!uname.empty()) { user = Arc::User(uname); result = true; } } } delete gridmapper; delete handler; delete mapper; return result; } bool INTERNALClient::PrepareARexConfig(){ Arc::Credential cred(usercfg); std::string gridname = cred.GetIdentityName(); arexconfig = new ARex::ARexGMConfig(*config,user.Name(),gridname,endpoint); return true; } bool INTERNALClient::CreateDelegation(std::string& deleg_id){ if(!arexconfig) { logger.msg(Arc::ERROR, "INTERNALClient is not initialized"); return false; } // Create new delegation slot in delegation store and // generate or apply delegation id. Arc::Credential cred(usercfg); std::string gridname = cred.GetIdentityName(); std::string proxy_data; std::string proxy_part1; std::string proxy_part2; std::string proxy_part3; cred.OutputCertificate(proxy_part1); cred.OutputPrivatekey(proxy_part2); cred.OutputCertificateChain(proxy_part3); proxy_data = proxy_part1 + proxy_part2 + proxy_part3; ARex::DelegationStore& deleg = deleg_stores[config->DelegationDir()]; if(!deleg.AddCred(deleg_id, gridname, proxy_data)) { error_description="Failed to store delegation."; logger.msg(Arc::ERROR, "%s", error_description); return false; } return true; } bool INTERNALClient::RenewDelegation(std::string const& deleg_id) { if(!arexconfig) { logger.msg(Arc::ERROR, "INTERNALClient is not initialized"); return false; } // Create new delegation in already assigned slot if(deleg_id.empty()) return false; Arc::Credential cred(usercfg); std::string gridname = cred.GetIdentityName(); std::string proxy_data; //std::string proxy_key; //cred.OutputCertificateChain(proxy_data); //cred.OutputPrivatekey(proxy_key); //proxy_data = proxy_key + proxy_data; //usercfg.CredentialString(proxy_data); std::string proxy_part1; std::string proxy_part2; std::string proxy_part3; cred.OutputCertificate(proxy_part1); cred.OutputPrivatekey(proxy_part2); cred.OutputCertificateChain(proxy_part3); proxy_data = proxy_part1 + proxy_part2 + proxy_part3; ARex::DelegationStore& deleg = deleg_stores[config->DelegationDir()]; if(!deleg.PutCred(deleg_id, gridname, proxy_data)) { error_description="Failed to store delegation."; logger.msg(Arc::ERROR, "%s", error_description); return false; } return true; } std::string INTERNALClient::get_error_description() const { if (!error_description.empty()) return error_description; return ""; } bool INTERNALClient::submit(const std::list& jobdescs,std::list& localjobs, const std::string delegation_id) { if(!arexconfig) { logger.msg(Arc::ERROR, "INTERNALClient is not initialized"); return false; } //called by SubmitterPluginINTERNAL ac->submit(..) logger.msg(Arc::VERBOSE, "Submitting job "); bool noFailures = true; int limit = 1000000; // 1 M - Safety std::list::const_iterator itSubmit = jobdescs.begin(), itLastProcessedEnd = jobdescs.begin(); while (itSubmit != jobdescs.end() && limit > 0) { for (int i = 0; itSubmit != jobdescs.end() && i < limit; ++itSubmit, ++i) { INTERNALJob localjob; //set some additional parameters if(config->DefaultQueue().empty() && (config->Queues().size() == 1)) { config->SetDefaultQueue(*(config->Queues().begin())); } ARex::JobDescriptionHandler job_desc_handler(*config); ARex::JobLocalDescription job_desc; std::string jobdesc_str; Arc::JobDescriptionResult ures = (*itSubmit).UnParse(jobdesc_str,"emies:adl"); Arc::XMLNode adl(jobdesc_str); ARex::JobIDGeneratorINTERNAL idgenerator(endpoint); const std::string dummy = ""; ARex::ARexJob arexjob(adl,*arexconfig,delegation_id,dummy,logger,idgenerator); if(!arexjob){ logger.msg(Arc::ERROR, "%s",arexjob.Failure()); return false; } else{ //make localjob for internal handling INTERNALJob localjob(arexjob,*config,delegation_id); localjobs.push_back(localjob); } } itLastProcessedEnd = itSubmit; } return noFailures; } bool INTERNALClient::putFiles(INTERNALJob const& localjob, std::list const& sources, std::list const& destinations) { if(!arexconfig) { logger.msg(Arc::ERROR, "INTERNALClient is not initialized"); return false; } ARex::GMJob gmjob(localjob.id, user, localjob.sessiondir, ARex::JOB_STATE_ACCEPTED); //Fix-me removed cbegin and cend from sources and destination. Either fix compiler, or rewrite to be const. for(std::list::const_iterator source = sources.begin(), destination = destinations.begin(); source != sources.end() && destination != destinations.end(); ++source, ++destination) { std::string path = localjob.sessiondir + "/" + *destination; std::string fn = "/" + *destination; // TODO: direct copy will not work if session is on NFS if(!FileCopy(*source, path)) { logger.msg(Arc::ERROR, "Failed to copy input file: %s to path: %s",path); return false; } if((!ARex::fix_file_permissions(path,false)) || // executable flags is handled by A-Rex (!ARex::fix_file_owner(path,gmjob))) { logger.msg(Arc::ERROR, "Failed to set permissions on: %s",path); //clean job here? At the moment job is left in limbo in control and sessiondir clean(localjob.id); return false; } ARex::job_input_status_add_file(gmjob,*config,fn); } (void)ARex::CommFIFO::Signal(config->ControlDir(), localjob.id); return true; } bool INTERNALClient::submit(const Arc::JobDescription& jobdesc, INTERNALJob& localjob, const std::string delegation_id) { std::list jobdescs; std::list localjobs; jobdescs.push_back(jobdesc); if(!submit(jobdescs, localjobs, delegation_id)) return false; if(localjobs.empty()) return false; localjob = localjobs.back(); return true; } bool INTERNALClient::info(std::list& jobs, std::list& jobids_found){ if(!arexconfig) { logger.msg(Arc::ERROR, "INTERNALClient is not initialized"); return false; } //at the moment called by JobListretrieverPluginINTERNAL Query for(std::list::iterator job = jobs.begin(); job!= jobs.end(); job++){ ARex::ARexJob arexjob(job->id,*arexconfig,logger); std::string state = arexjob.State(); if (state != "UNDEFINED") jobids_found.push_back(*job); } return true; } bool INTERNALClient::info(INTERNALJob& localjob, Arc::Job& arcjob){ if(!arexconfig) { logger.msg(Arc::ERROR, "INTERNALClient is not initialized"); return false; } //Called from (at least) JobControllerPluginINTERNAL //Called for stagein/out/sessionodir if url of either is not known //Extracts information about current arcjob from arexjob and job.jobid.description file and updates/populates the localjob and arcjob with this info, and fills a localjob with the information std::vector tokens; Arc::tokenize(arcjob.JobID, tokens, "/"); if(tokens.empty()) return false; //NB! Add control that the arcjob.jobID is in correct format localjob.id = tokens.back(); ARex::JobId gm_job_id = localjob.id; ARex::ARexJob arexjob(gm_job_id,*arexconfig,logger); arcjob.State = JobStateINTERNAL((std::string)arexjob.State()); if(!localjob.delegation_id.empty()) arcjob.DelegationID.push_back(localjob.delegation_id); //Get other relevant info from the .info file ARex::JobLocalDescription job_desc; if(!ARex::job_local_read_file(gm_job_id,*config,job_desc)) { error_description="Job is probably corrupted: can't read internal information."; logger.msg(Arc::ERROR, "%s", error_description); return false; }; //JobControllerPluginINTERNAL needs this, so make sure it is set. if(localjob.session.empty()){ localjob.session.push_back((std::string)job_desc.sessiondir); } if(localjob.stagein.empty()){ //assume that it is sessiondir localjob.stagein.push_back((std::string)job_desc.sessiondir); } if(localjob.stageout.empty()){ //assume that it is sessiondir localjob.stageout.push_back((std::string)job_desc.sessiondir); } return true; } bool INTERNALClient::sstat(Arc::XMLNode& xmldoc) { if(!arexconfig) { logger.msg(Arc::ERROR, "INTERNALClient is not initialized"); return false; } //TO-DO Need to lock info.xml during reading? std::string fname = config->InformationFile(); std::string xmlstring; (void)Arc::FileRead(fname, xmlstring); if(xmlstring.empty()){ error_description="Failed to obtain resource information."; logger.msg(Arc::ERROR, "%s", error_description); return false; } XMLNode tmp(xmlstring); XMLNode services = tmp["Domains"]["AdminDomain"]["Services"]; if(!services) { lfailure = "Missing Services in response"; return false; } services.Move(xmldoc); return true; } bool INTERNALClient::kill(const std::string& jobid){ if(!arexconfig) { logger.msg(Arc::ERROR, "INTERNALClient is not initialized"); return false; } //jobid is full url std::vector tokens; Arc::tokenize(jobid, tokens, "/"); if(tokens.empty()) return false; std::string thisid = tokens.back(); ARex::ARexJob arexjob(thisid,*arexconfig,logger); arexjob.Cancel(); return true; } bool INTERNALClient::clean(const std::string& jobid){ if(!arexconfig) { logger.msg(Arc::ERROR, "INTERNALClient is not initialized"); return false; } //jobid is full url std::vector tokens; Arc::tokenize(jobid, tokens, "/"); if(tokens.empty()) return false; std::string thisid = tokens.back(); ARex::ARexJob arexjob(thisid,*arexconfig,logger); arexjob.Clean(); return true; } bool INTERNALClient::restart(const std::string& jobid){ if(!arexconfig) { logger.msg(Arc::ERROR, "INTERNALClient is not initialized"); return false; } //jobid is full url std::vector tokens; Arc::tokenize(jobid, tokens, "/"); if(tokens.empty()) return false; std::string thisid = tokens.back(); ARex::ARexJob arexjob(thisid,*arexconfig,logger); arexjob.Resume(); return true; } bool INTERNALClient::list(std::list& jobs){ //Populates localjobs containing only jobid //how do I want to search for jobs in system? std::string cdir=config->ControlDir(); Glib::Dir dir(cdir); std::string file_name; while ((file_name = dir.read_name()) != "") { std::vector tokens; Arc::tokenize(file_name, tokens, "."); // look for job.id.local if (tokens.size() == 3 && tokens[0] == "job" && tokens[2] == "local") { INTERNALJob job; job.id = (std::string)tokens[1]; jobs.push_back(job); }; } dir.close(); return true; } INTERNALJob& INTERNALJob::operator=(const Arc::Job& job) { //Set localjob attributes from the ARC job //Called from JobControllerPlugin stagein.clear(); session.clear(); stageout.clear(); if (job.StageInDir) stagein.push_back(job.StageInDir); if (job.StageOutDir) stageout.push_back(job.StageOutDir); if (job.SessionDir) session.push_back(job.SessionDir); id = job.JobID; manager = job.JobManagementURL; resource = job.ServiceInformationURL; delegation_id = job.DelegationID.empty()?std::string(""):*job.DelegationID.begin(); // State information is not transfered from Job object. Currently not needed. return *this; } void INTERNALJob::toJob(INTERNALClient* client, INTERNALJob* localjob, Arc::Job& j) const { //fills an arcjob from localjob //j.JobID = (client->ce).str() + "/" + localjob->id; j.JobID = "file://" + sessiondir; j.ServiceInformationURL = client->ce; j.ServiceInformationInterfaceName = "org.nordugrid.internal"; j.JobStatusURL = client->ce; j.JobStatusInterfaceName = "org.nordugrid.internal"; j.JobManagementURL = client->ce; j.JobManagementInterfaceName = "org.nordugrid.internal"; j.IDFromEndpoint = id; if (!stagein.empty())j.StageInDir = stagein.front(); else j.StageInDir = sessiondir; if (!stageout.empty())j.StageOutDir = stageout.front(); else j.StageOutDir = sessiondir; if (!session.empty()) j.SessionDir = session.front(); else j.SessionDir = sessiondir; j.DelegationID.clear(); if(!(localjob->delegation_id).empty()) j.DelegationID.push_back(localjob->delegation_id); } void INTERNALJob::toJob(INTERNALClient* client, Arc::Job& arcjob, Arc::Logger& logger) const { //called from UpdateJobs in JobControllerPluginINTERNAL if (!stagein.empty())arcjob.StageInDir = stagein.front(); else arcjob.StageInDir = sessiondir; if (!stageout.empty()) arcjob.StageOutDir = stageout.front(); else arcjob.StageOutDir = sessiondir; if (!session.empty()) arcjob.StageInDir = session.front(); else arcjob.SessionDir = sessiondir; //extract info from arexjob //extract jobid from arcjob, which is the full jobid url std::vector tokens; Arc::tokenize(arcjob.JobID, tokens, "/"); if(!tokens.empty()) { //NB! Add control that the arcjob.jobID is in correct format ARex::JobId gm_job_id = tokens.back(); if(client && client->arexconfig) { ARex::ARexJob arexjob(gm_job_id,*(client->arexconfig),client->logger); std::string state = arexjob.State(); arcjob.State = JobStateINTERNAL(state); } } } // ----------------------------------------------------------------------------- // TODO: does it need locking? INTERNALClients::INTERNALClients(const Arc::UserConfig& usercfg):usercfg_(usercfg) { } INTERNALClients::~INTERNALClients(void) { std::multimap::iterator it; for (it = clients_.begin(); it != clients_.end(); it = clients_.begin()) { delete it->second; } } } nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/PaxHeaders.30264/JobControllerPluginINTERN0000644000000000000000000000013214152153376031320 xustar000000000000000030 mtime=1638455038.419646185 30 atime=1638455038.506647492 30 ctime=1638455100.392577356 nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/JobControllerPluginINTERNAL.cpp0000644000175000002070000002453314152153376032312 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include "../grid-manager/conf/GMConfig.h" #include "../grid-manager/files/ControlFileHandling.h" #include "JobStateINTERNAL.h" #include "INTERNALClient.h" #include "JobControllerPluginINTERNAL.h" using namespace Arc; namespace ARexINTERNAL { Logger JobControllerPluginINTERNAL::logger(Logger::getRootLogger(), "JobControllerPlugin.INTERNAL"); bool JobControllerPluginINTERNAL::isEndpointNotSupported(const std::string& endpoint) const { const std::string::size_type pos = endpoint.find("://"); return pos != std::string::npos && lower(endpoint.substr(0, pos)) != "file" ; } void JobControllerPluginINTERNAL::UpdateJobs(std::list& jobs, std::list& IDsProcessed, std::list& IDsNotProcessed, bool isGrouped) const { if (jobs.empty()) return; INTERNALClient ac; ARex::GMConfig const *config = ac.GetConfig(); if(!config){ logger.msg(Arc::ERROR,"Failed to load grid-manager config file"); return; } //Is this method doing what it is supposed to do? I think the main purpose is to get hold of jobids for existing jobs in the system. for(std::list::iterator itJ = jobs.begin(); itJ != jobs.end(); itJ++){ //stat the .description file to check whether job is still in the system //(*itJ).JobID is now the global id, tokenize and get hold of just the local jobid std::vector tokens; Arc::tokenize((**itJ).JobID, tokens, "/"); std::string localid = tokens[tokens.size()-1]; std::string rsl; if(!ARex::job_description_read_file(localid, *config, rsl)){ continue; } //the job exists, so add it INTERNALJob localjob; //toJob calls info(job) and populates the arcjob with basic information (id and state). localjob.toJob(&ac,**itJ,logger); if (itJ != jobs.end()) { IDsProcessed.push_back((**itJ).JobID); } else{ IDsNotProcessed.push_back((**itJ).JobID); } } } bool JobControllerPluginINTERNAL::CleanJobs(const std::list& jobs, std::list& IDsProcessed, std::list& IDsNotProcessed, bool isGrouped) const { INTERNALClient ac(*usercfg); ARex::GMConfig const * config = ac.GetConfig(); if(!config){ logger.msg(Arc::ERROR,"Failed to load grid-manager config file"); return false; } bool ok = true; for (std::list::const_iterator it = jobs.begin(); it != jobs.end(); ++it) { //Job& job = **it; if (!ac.clean((*it)->JobID)) { ok = false; IDsNotProcessed.push_back((*it)->JobID); continue; } IDsProcessed.push_back((*it)->JobID); } return ok; } bool JobControllerPluginINTERNAL::CancelJobs(const std::list& jobs, std::list& IDsProcessed, std::list& IDsNotProcessed, bool isGrouped) const { bool ok = true; for (std::list::const_iterator it = jobs.begin(); it != jobs.end(); ++it) { INTERNALClient ac(*usercfg); ARex::GMConfig const * config = ac.GetConfig(); if(!config){ logger.msg(Arc::ERROR,"Failed to load grid-manager config file"); return false; } if(!ac.kill((*it)->JobID)) { ok = false; IDsNotProcessed.push_back((*it)->JobID); continue; } (*it)->State = JobStateINTERNAL((std::string)"killed"); IDsProcessed.push_back((*it)->JobID); } return ok; } bool JobControllerPluginINTERNAL::RenewJobs(const std::list& jobs, std::list& IDsProcessed, std::list& IDsNotProcessed, bool isGrouped) const { for (std::list::const_iterator it = jobs.begin(); it != jobs.end(); ++it) { // 1. Fetch/find delegation ids for each job INTERNALClient ac; ARex::GMConfig const * config = ac.GetConfig(); if(!config){ logger.msg(Arc::ERROR,"Failed to load grid-manager config file"); return false; } if((*it)->DelegationID.empty()) { logger.msg(INFO, "Job %s has no delegation associated. Can't renew such job.", (*it)->JobID); IDsNotProcessed.push_back((*it)->JobID); continue; } // 2. Leave only unique IDs - not needed yet because current code uses // different delegations for each job. // 3. Renew credentials for every ID std::list::const_iterator did = (*it)->DelegationID.begin(); for(;did != (*it)->DelegationID.end();++did) { if(!ac.RenewDelegation(*did)) { logger.msg(INFO, "Job %s failed to renew delegation %s.", (*it)->JobID/*, *did, ac->failure()*/); break; } } if(did != (*it)->DelegationID.end()) { IDsNotProcessed.push_back((*it)->JobID); continue; } IDsProcessed.push_back((*it)->JobID); } return false; } bool JobControllerPluginINTERNAL::ResumeJobs(const std::list& jobs, std::list& IDsProcessed, std::list& IDsNotProcessed, bool isGrouped) const { bool ok = true; for (std::list::const_iterator it = jobs.begin(); it != jobs.end(); ++it) { INTERNALClient ac; ARex::GMConfig const * config = ac.GetConfig(); if(!config){ logger.msg(Arc::ERROR,"Failed to load grid-manager config file"); return false; } Job& job = **it; if (!job.RestartState) { logger.msg(INFO, "Job %s does not report a resumable state", job.JobID); ok = false; IDsNotProcessed.push_back(job.JobID); continue; } logger.msg(VERBOSE, "Resuming job: %s at state: %s (%s)", job.JobID, job.RestartState.GetGeneralState(), job.RestartState()); if(!ac.restart((*it)->JobID)) { ok = false; IDsNotProcessed.push_back((*it)->JobID); continue; } IDsProcessed.push_back((*it)->JobID); logger.msg(VERBOSE, "Job resuming successful"); } return ok; } bool JobControllerPluginINTERNAL::GetURLToJobResource(const Job& job, Job::ResourceType resource, URL& url) const { if (resource == Job::JOBDESCRIPTION) { return false; } // Obtain information about staging urls INTERNALJob ljob; ljob = job; URL stagein; URL stageout; URL session; // TODO: currently using first valid URL. Need support for multiple. for(std::list::const_iterator s = ljob.GetStagein().begin();s!=ljob.GetStagein().end();++s) { if(*s) { stagein = *s; break; } } for(std::list::const_iterator s = ljob.GetStageout().begin();s!=ljob.GetStageout().end();++s) { if(*s) { stageout = *s; break; } } for(std::list::const_iterator s = ljob.GetSession().begin();s!=ljob.GetSession().end();++s) { if(*s) { session = *s; break; } } if ((resource != Job::STAGEINDIR || !stagein) && (resource != Job::STAGEOUTDIR || !stageout) && (resource != Job::SESSIONDIR || !session)) { // If there is no needed URL provided try to fetch it from server Job tjob; tjob.JobID = job.JobID; INTERNALClient ac; ARex::GMConfig const * config = ac.GetConfig(); if(!config){ logger.msg(Arc::ERROR,"Failed to load grid-manager config file"); return false; } if (!ac.info(ljob, tjob)) { logger.msg(INFO, "Failed retrieving information for job: %s", job.JobID); return false; } for(std::list::const_iterator s = ljob.GetStagein().begin();s!=ljob.GetStagein().end();++s) { if(*s) { stagein = *s; break; } } for(std::list::const_iterator s = ljob.GetStageout().begin();s!=ljob.GetStageout().end();++s) { if(*s) { stageout = *s; break; } } for(std::list::const_iterator s = ljob.GetSession().begin();s!=ljob.GetSession().end();++s) { if(*s) { session = *s; break; } } // Choose url by state // TODO: For INTERNAL submission plugin the url is the same for all, although not reflected here // TODO: maybe this method should somehow know what is purpose of URL // TODO: state attributes would be more suitable // TODO: library need to be etended to allow for multiple URLs if((tjob.State == JobState::ACCEPTED) || (tjob.State == JobState::PREPARING)) { url = stagein; } else if((tjob.State == JobState::DELETED) || (tjob.State == JobState::FAILED) || (tjob.State == JobState::KILLED) || (tjob.State == JobState::FINISHED) || (tjob.State == JobState::FINISHING)) { url = stageout; } else { url = session; } // If no url found by state still try to get something if(!url) { if(session) url = session; if(stagein) url = stagein; if(stageout) url = stageout; } } switch (resource) { case Job::STDIN: url.ChangePath(url.Path() + '/' + job.StdIn); break; case Job::STDOUT: url.ChangePath(url.Path() + '/' + job.StdOut); break; case Job::STDERR: url.ChangePath(url.Path() + '/' + job.StdErr); break; case Job::JOBLOG: url.ChangePath(url.Path() + "/" + job.LogDir + "/errors"); break; case Job::STAGEINDIR: if(stagein) url = stagein; break; case Job::STAGEOUTDIR: if(stageout) url = stageout; break; case Job::SESSIONDIR: if(session) url = session; break; default: break; } if(url && ((url.Protocol() == "file"))) { //To-do - is this relevant for INTERNAL plugin? url.AddOption("threads=2",false); url.AddOption("encryption=optional",false); // url.AddOption("httpputpartial=yes",false); - TODO: use for A-REX } return true; } bool JobControllerPluginINTERNAL::GetJobDescription(const Job& /* job */, std::string& /* desc_str */) const { logger.msg(INFO, "Retrieving job description of INTERNAL jobs is not supported"); return false; } } // namespace Arc nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/PaxHeaders.30264/JobListRetrieverPluginINT0000644000000000000000000000013214152153376031433 xustar000000000000000030 mtime=1638455038.419646185 30 atime=1638455038.506647492 30 ctime=1638455100.394577386 nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/JobListRetrieverPluginINTERNAL.h0000644000175000002070000000206714152153376032435 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifndef __ARC_JOBLISTRETRIEVERPLUGININTERNAL_H__ #define __ARC_JOBLISTRETRIEVERPLUGININTERNAL_H__ #include #include using namespace Arc; namespace Arc{ class Logger; } namespace ARexINTERNAL { class JobLocalDescription; class INTERNALClient; class INTERNALClients; class JobListRetrieverPluginINTERNAL : public Arc::JobListRetrieverPlugin { public: JobListRetrieverPluginINTERNAL(PluginArgument* parg): JobListRetrieverPlugin(parg) { supportedInterfaces.push_back("org.nordugrid.internal"); } virtual ~JobListRetrieverPluginINTERNAL() {} static Plugin* Instance(PluginArgument *arg) { return new JobListRetrieverPluginINTERNAL(arg); } virtual EndpointQueryingStatus Query(const UserConfig&, const Endpoint&, std::list&, const EndpointQueryOptions&) const; virtual bool isEndpointNotSupported(const Endpoint&) const; private: static Logger logger; }; } // namespace Arc #endif // __ARC_JOBLISTRETRIEVERPLUGININTERNAL_H__ nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/PaxHeaders.30264/JobListRetrieverPluginINT0000644000000000000000000000013214152153376031433 xustar000000000000000030 mtime=1638455038.419646185 30 atime=1638455038.506647492 30 ctime=1638455100.393577372 nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/JobListRetrieverPluginINTERNAL.cpp0000644000175000002070000000545014152153376032767 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include "../grid-manager/files/ControlFileContent.h" #include "../grid-manager/files/ControlFileHandling.h" #include "JobStateINTERNAL.h" #include "INTERNALClient.h" #include "JobListRetrieverPluginINTERNAL.h" using namespace Arc; namespace ARexINTERNAL { Logger JobListRetrieverPluginINTERNAL::logger(Logger::getRootLogger(), "JobListRetrieverPlugin.INTERNAL"); bool JobListRetrieverPluginINTERNAL::isEndpointNotSupported(const Endpoint& endpoint) const { const std::string::size_type pos = endpoint.URLString.find("://"); if (pos != std::string::npos) { const std::string proto = lower(endpoint.URLString.substr(0, pos)); return ((proto != "file")); } return (endpoint.URLString != "localhost"); } static URL CreateURL(std::string service) { std::string::size_type pos1 = service.find("://"); if (pos1 == std::string::npos) { service = "file://" + service; } else { std::string proto = lower(service.substr(0,pos1)); if(proto != "file") return URL(); } return service; } EndpointQueryingStatus JobListRetrieverPluginINTERNAL::Query(const UserConfig& uc, const Endpoint& endpoint, std::list& jobs, const EndpointQueryOptions&) const { EndpointQueryingStatus s(EndpointQueryingStatus::FAILED); //this can all be simplified I think - TO-DO URL url(CreateURL(endpoint.URLString)); if (!url) { return s; } INTERNALClient ac(uc); if (!ac.GetConfig()) { return s; } std::list localjobs; if (!ac.list(localjobs)) { return s; } logger.msg(DEBUG, "Listing localjobs succeeded, %d localjobs found", localjobs.size()); //checks that the job is in state other than undefined std::list jobids_found; ac.info(localjobs,jobids_found); std::list::iterator itID = jobids_found.begin(); for(; itID != jobids_found.end(); ++itID) { //read job description to get hold of submission-interface ARex::JobLocalDescription job_desc; ARex::JobId jobid((*itID).GetId()); ARex::job_local_read_file(jobid, *ac.GetConfig(), job_desc); std::string submittedVia = job_desc.interface; if (submittedVia != "org.nordugrid.internal") { logger.msg(DEBUG, "Skipping retrieved job (%s) because it was submitted via another interface (%s).", url.fullstr() + "/" + itID->GetId(), submittedVia); continue; } INTERNALJob localjob; Job j; itID->toJob(&ac, &localjob, j); jobs.push_back(j); }; s = EndpointQueryingStatus::SUCCESSFUL; return s; } } // namespace Arc nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/PaxHeaders.30264/README0000644000000000000000000000013214152153376025400 xustar000000000000000030 mtime=1638455038.420646199 30 atime=1638455038.506647492 30 ctime=1638455100.386577266 nordugrid-arc-6.14.0/src/services/a-rex/internaljobplugin/README0000644000175000002070000000025714152153376025371 0ustar00mockbuildmock00000000000000Arc Client Component (ACC) plugins for supporting ARC lightweight (INTERNAL) Implements the following specialized classes: o JobControllerPluginINTERNAL o SubmitterINTERNAL nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/cachecheck.cpp0000644000000000000000000000013214152153376023537 xustar000000000000000030 mtime=1638455038.406645989 30 atime=1638455038.500647402 30 ctime=1638455099.415562676 nordugrid-arc-6.14.0/src/services/a-rex/cachecheck.cpp0000644000175000002070000000616214152153376023531 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include "job.h" #include "arex.h" #define CACHE_CHECK_SESSION_DIR_ID "9999999999999999999999999999999" namespace ARex { Arc::MCC_Status ARexService::CacheCheck(ARexGMConfig& config,Arc::XMLNode in,Arc::XMLNode out) { std::vector caches; std::vector draining_caches; std::vector readonly_caches; // use cache dir(s) from conf file try { CacheConfig cache_config(config.GmConfig().CacheParams()); cache_config.substitute(config.GmConfig(), config.User()); caches = cache_config.getCacheDirs(); readonly_caches = cache_config.getReadOnlyCacheDirs(); } catch (CacheConfigException& e) { logger.msg(Arc::ERROR, "Error with cache configuration: %s", e.what()); Arc::SOAPFault fault(out.Parent(),Arc::SOAPFault::Sender,"Error with cache configuration"); fault.Detail(true).NewChild("CacheConfigurationFault"); out.Destroy(); return Arc::MCC_Status(); } if (caches.empty()) { Arc::SOAPFault fault(out.Parent(),Arc::SOAPFault::Sender,"Cache is disabled"); fault.Detail(true).NewChild("CacheDisabledFault"); out.Destroy(); return Arc::MCC_Status(); } Arc::FileCache cache(caches, draining_caches, readonly_caches, CACHE_CHECK_SESSION_DIR_ID ,config.User().get_uid(), config.User().get_gid()); if (!cache) { logger.msg(Arc::ERROR, "Error with cache configuration"); Arc::SOAPFault fault(out.Parent(),Arc::SOAPFault::Sender,"Error with cache configuration"); fault.Detail(true).NewChild("CacheConfigurationFault"); out.Destroy(); return Arc::MCC_Status(); } bool fileexist; Arc::XMLNode resp = out.NewChild("CacheCheckResponse"); Arc::XMLNode results = resp.NewChild("CacheCheckResult"); for(int n = 0;;++n) { Arc::XMLNode id = in["CacheCheck"]["TheseFilesNeedToCheck"]["FileURL"][n]; if (!id) break; std::string fileurl = (std::string)in["CacheCheck"]["TheseFilesNeedToCheck"]["FileURL"][n]; Arc::XMLNode resultelement = results.NewChild("Result"); fileexist = false; std::string file_lfn; Arc::initializeCredentialsType cred_type(Arc::initializeCredentialsType::SkipCredentials); Arc::UserConfig usercfg(cred_type); Arc::URL url(fileurl); Arc::DataHandle d(url, usercfg); logger.msg(Arc::INFO, "Looking up URL %s", d->str()); file_lfn = cache.File(d->str()); logger.msg(Arc::INFO, "Cache file is %s", file_lfn); struct stat fileStat; fileexist = (stat(file_lfn.c_str(), &fileStat) == 0) ? true : false; resultelement.NewChild("FileURL") = fileurl; resultelement.NewChild("ExistInTheCache") = (fileexist ? "true": "false"); if (fileexist) resultelement.NewChild("FileSize") = Arc::tostring(fileStat.st_size); else resultelement.NewChild("FileSize") = "0"; } return Arc::MCC_Status(Arc::STATUS_OK); } } // namespace nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/put.cpp0000644000000000000000000000013114152153376022305 xustar000000000000000029 mtime=1638455038.42864632 30 atime=1638455038.509647537 30 ctime=1638455099.412562631 nordugrid-arc-6.14.0/src/services/a-rex/put.cpp0000644000175000002070000002070514152153376022277 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include #include "PayloadFile.h" #include "job.h" #include "arex.h" #define MAX_CHUNK_SIZE (10*1024*1024) namespace ARex { static bool write_file(Arc::FileAccess& h,char* buf,size_t size) { for(;size>0;) { ssize_t l = h.fa_write(buf,size); if(l == -1) return false; size-=l; buf+=l; }; return true; } Arc::MCC_Status ARexService::PutInfo(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath) { if(!&config) { return make_http_fault(outmsg, HTTP_ERR_FORBIDDEN, "User is not identified"); }; return make_http_fault(outmsg,501,"Not Implemented"); } Arc::MCC_Status ARexService::DeleteInfo(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath) { if(!&config) { return make_http_fault(outmsg, HTTP_ERR_FORBIDDEN, "User is not identified"); }; return make_http_fault(outmsg,501,"Not Implemented"); } Arc::MCC_Status ARexService::PutCache(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath) { if(!&config) { return make_http_fault(outmsg, HTTP_ERR_FORBIDDEN, "User is not identified"); }; return make_http_fault(outmsg,501,"Not Implemented"); } Arc::MCC_Status ARexService::DeleteCache(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& subpath) { if(!&config) { return make_http_fault(outmsg, HTTP_ERR_FORBIDDEN, "User is not identified"); }; return make_http_fault(outmsg,501,"Not Implemented"); } static Arc::MCC_Status PutJobFile(Arc::Message& outmsg, Arc::FileAccess& file, std::string& errstr, Arc::PayloadStreamInterface& stream, FileChunks& fc, bool& complete) { complete = false; // TODO: Use memory mapped file to minimize number of in memory copies const int bufsize = 1024*1024; if(!fc.Size()) fc.Size(stream.Size()); off_t pos = stream.Pos(); if(file.fa_lseek(pos,SEEK_SET) != pos) { std::string err = Arc::StrError(); errstr = "failed to set position of file to "+Arc::tostring(pos)+" - "+err; return ARexService::make_http_fault(outmsg, 500, "Error seeking to specified position in file"); }; char* buf = new char[bufsize]; if(!buf) { errstr = "failed to allocate memory"; return ARexService::make_http_fault(outmsg, 500, "Error allocating memory"); }; bool got_something = false; for(;;) { int size = bufsize; if(!stream.Get(buf,size)) break; if(size > 0) got_something = true; if(!write_file(file,buf,size)) { std::string err = Arc::StrError(); delete[] buf; errstr = "failed to write to file - "+err; return ARexService::make_http_fault(outmsg, 500, "Error writing to file"); }; if(size) fc.Add(pos,size); pos+=size; }; delete[] buf; // Due to limitation of PayloadStreamInterface it is not possible to // directly distingush between zero sized file and file with undefined // size. But by applying some dynamic heuristics it is possible. // TODO: extend/modify PayloadStreamInterface. if((stream.Size() == 0) && (stream.Pos() == 0) && (!got_something)) { complete = true; } return ARexService::make_empty_response(outmsg); } static Arc::MCC_Status PutJobFile(Arc::Message& outmsg, Arc::FileAccess& file, std::string& errstr, Arc::PayloadRawInterface& buf, FileChunks& fc, bool& complete) { complete = false; bool got_something = false; if(!fc.Size()) fc.Size(buf.Size()); for(int n = 0;;++n) { char* sbuf = buf.Buffer(n); if(sbuf == NULL) break; off_t offset = buf.BufferPos(n); off_t size = buf.BufferSize(n); if(size > 0) { got_something = true; off_t o = file.fa_lseek(offset,SEEK_SET); if(o != offset) { std::string err = Arc::StrError(); errstr = "failed to set position of file to "+Arc::tostring(offset)+" - "+err; return ARexService::make_http_fault(outmsg, 500, "Error seeking to specified position"); }; if(!write_file(file,sbuf,size)) { std::string err = Arc::StrError(); errstr = "failed to write to file - "+err; return ARexService::make_http_fault(outmsg, 500, "Error writing file"); }; if(size) fc.Add(offset,size); }; }; if((buf.Size() == 0) && (!got_something)) { complete = true; } return ARexService::make_empty_response(outmsg); } Arc::MCC_Status ARexService::PutJob(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& id,std::string const& subpath) { if(!&config) { return make_http_fault(outmsg, HTTP_ERR_FORBIDDEN, "User is not identified"); }; // Nothing can be put into root endpoint if(id.empty()) return make_http_fault(outmsg, 500, "No job specified"); // Check for proper payload Arc::MessagePayload* payload = inmsg.Payload(); if(!payload) { logger_.msg(Arc::ERROR, "%s: put file %s: there is no payload", id, subpath); return make_http_fault(outmsg, 500, "Missing payload"); }; Arc::PayloadStreamInterface* stream = dynamic_cast(payload); Arc::PayloadRawInterface* buf = dynamic_cast(payload); if((!stream) && (!buf)) { logger_.msg(Arc::ERROR, "%s: put file %s: unrecognized payload", id, subpath); return make_http_fault(outmsg, 500, "Error processing payload"); } // Acquire job ARexJob job(id,config,logger_); if(!job) { // There is no such job logger_.msg(Arc::ERROR, "%s: there is no such job: %s", job.ID(), job.Failure()); return make_http_fault(outmsg, 500, "Job does not exist"); }; // Prepare access to file FileChunksRef fc(files_chunks_.Get(job.ID()+"/"+subpath)); Arc::FileAccess* file = job.CreateFile(subpath.c_str()); if(file == NULL) { // TODO: report something logger_.msg(Arc::ERROR, "%s: put file %s: failed to create file: %s", job.ID(), subpath, job.Failure()); return make_http_fault(outmsg, 500, "Error creating file"); }; Arc::MCC_Status r; std::string err; bool complete(false); if(stream) { r = PutJobFile(outmsg,*file,err,*stream,*fc,complete); } else { r = PutJobFile(outmsg,*file,err,*buf,*fc,complete); } file->fa_close(); Arc::FileAccess::Release(file); if(r) { if(complete || fc->Complete()) job.ReportFileComplete(subpath); } else { logger_.msg(Arc::ERROR, "%s: put file %s: %s", job.ID(), subpath, err); } return r; } Arc::MCC_Status ARexService::DeleteJob(Arc::Message& inmsg,Arc::Message& outmsg,ARexGMConfig& config,std::string const& id,std::string const& subpath) { if(!&config) { return make_http_fault(outmsg, HTTP_ERR_FORBIDDEN, "User is not identified"); }; // Nothing can be removed in root endpoint if(id.empty()) return make_http_fault(outmsg, 500, "No job specified"); // Ignoring payload // Acquire job ARexJob job(id,config,logger_); if(!job) { // There is no such job logger_.msg(Arc::ERROR, "%s: there is no such job: %s", job.ID(), job.Failure()); return make_http_fault(outmsg, 500, "Job does not exist"); }; std::string full_path = job.GetFilePath(subpath.c_str()); if(full_path.empty()) { logger_.msg(Arc::ERROR, "%s: delete file %s: failed to obtain file path: %s", job.ID(), subpath, job.Failure()); return make_http_fault(outmsg, 500, "Error deleting file"); }; bool is_file = true; Arc::FileAccess* fs = job.OpenFile(subpath.c_str(), false, true); if(fs == NULL) { is_file = false; fs = job.OpenDir(subpath.c_str()); } if(fs == NULL) { // TODO: report something logger_.msg(Arc::ERROR, "%s: delete file %s: failed to open file/dir: %s", job.ID(), subpath, job.Failure()); return make_http_fault(outmsg, 500, "Error deleting file"); }; bool unlink_result = is_file ? fs->fa_unlink(full_path.c_str()) : fs->fa_rmdir(full_path.c_str());; int unlink_err = fs->geterrno(); is_file ? fs->fa_close() : fs->fa_closedir(); Arc::FileAccess::Release(fs); if(!unlink_result) { if((unlink_err == ENOTDIR) || (unlink_err == ENOENT)) { return make_http_fault(outmsg, 404, "File not found"); } else { return make_http_fault(outmsg, 500, "Error deleting file"); }; }; return ARexService::make_empty_response(outmsg); } } // namespace ARex nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/test_cache_check.cpp0000644000000000000000000000013214152153376024735 xustar000000000000000030 mtime=1638455038.429646335 30 atime=1638455038.509647537 30 ctime=1638455099.421562766 nordugrid-arc-6.14.0/src/services/a-rex/test_cache_check.cpp0000644000175000002070000000247114152153376024726 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include int main(void) { Arc::Logger logger(Arc::Logger::rootLogger, "Test"); Arc::LogStream logcerr(std::cerr); Arc::Logger::rootLogger.addDestination(logcerr); logger.msg(Arc::INFO, "Creating client side chain"); std::string id; std::string url("https://localhost/arex"); Arc::NS ns("a-rex", "http://www.nordugrid.org/schemas/a-rex"); Arc::MCCConfig cfg; Arc::UserConfig uc; uc.ApplyToConfig(cfg); Arc::ClientSOAP client(cfg, url, 60); std::string faultstring; Arc::PayloadSOAP request(ns); Arc::XMLNode req = request.NewChild("a-rex:CacheCheck").NewChild("a-rex:TheseFilesNeedToCheck"); req.NewChild("a-rex:FileURL") = "http://example.org/test.txt"; Arc::PayloadSOAP* response; Arc::MCC_Status status = client.process(&request, &response); if (!status) { std::cerr << "Request failed" << std::endl; } std::string str; response->GetDoc(str, true); std::cout << str << std::endl; return 0; } nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/arc.zero.conf0000644000000000000000000000013214152153376023364 xustar000000000000000030 mtime=1638455038.406645989 30 atime=1638455038.500647402 30 ctime=1638455099.423562797 nordugrid-arc-6.14.0/src/services/a-rex/arc.zero.conf0000644000175000002070000000076614152153376023362 0ustar00mockbuildmock00000000000000# # ARC Computing Element zero configuration # Consult ARC Installation Guide to modify this configuration for production use-cases # [common] x509_host_key = /etc/grid-security/testCA-hostkey.pem x509_host_cert = /etc/grid-security/testCA-hostcert.pem [authgroup:zero] file = /etc/grid-security/testCA.allowed-subjects [mapping] map_to_user = zero nobody:nobody [lrms] lrms = fork [arex] [arex/ws] [arex/ws/jobs] allowaccess = zero [infosys] [infosys/glue2] [infosys/cluster] [queue:fork] nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/lrms0000644000000000000000000000013214152153474021671 xustar000000000000000030 mtime=1638455100.263575418 30 atime=1638455103.997631524 30 ctime=1638455100.263575418 nordugrid-arc-6.14.0/src/services/a-rex/lrms/0000755000175000002070000000000014152153474021733 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/a-rex/lrms/PaxHeaders.30264/arc0000644000000000000000000000013214152153474022436 xustar000000000000000030 mtime=1638455100.286575764 30 atime=1638455103.997631524 30 ctime=1638455100.286575764 nordugrid-arc-6.14.0/src/services/a-rex/lrms/arc/0000755000175000002070000000000014152153474022500 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/a-rex/lrms/arc/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376024550 xustar000000000000000030 mtime=1638455038.420646199 30 atime=1638455038.506647492 30 ctime=1638455100.281575689 nordugrid-arc-6.14.0/src/services/a-rex/lrms/arc/Makefile.am0000644000175000002070000000053614152153376024541 0ustar00mockbuildmock00000000000000pkgpythondir = $(PYTHON_SITE_ARCH)/arc pkgpython_PYTHON = __init__.py all-local: if test ! -e "$(abs_top_builddir)/python/python/arc/lrms"; then ln -s $(abs_top_srcdir)/src/services/a-rex/lrms/arc/lrms $(abs_top_builddir)/python/python/arc/lrms; fi CLEANFILES = $(abs_top_builddir)/python/python/arc/lrms SUBDIRS = lrms DIST_SUBDIRS = $(SUBDIRS) nordugrid-arc-6.14.0/src/services/a-rex/lrms/arc/PaxHeaders.30264/Makefile.in0000644000000000000000000000013114152153433024552 xustar000000000000000029 mtime=1638455067.46408259 30 atime=1638455089.958420579 30 ctime=1638455100.281575689 nordugrid-arc-6.14.0/src/services/a-rex/lrms/arc/Makefile.in0000644000175000002070000007034714152153433024553 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/a-rex/lrms/arc DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(pkgpython_PYTHON) $(top_srcdir)/py-compile ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__py_compile = PYTHON=$(PYTHON) $(SHELL) $(py_compile) am__installdirs = "$(DESTDIR)$(pkgpythondir)" am__pep3147_tweak = \ sed -e 's|\.py$$||' -e 's|[^/]*$$|__pycache__/&.*.py|' py_compile = $(top_srcdir)/py-compile RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ pkgpythondir = $(PYTHON_SITE_ARCH)/arc pkgpython_PYTHON = __init__.py CLEANFILES = $(abs_top_builddir)/python/python/arc/lrms SUBDIRS = lrms DIST_SUBDIRS = $(SUBDIRS) all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/a-rex/lrms/arc/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/a-rex/lrms/arc/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-pkgpythonPYTHON: $(pkgpython_PYTHON) @$(NORMAL_INSTALL) @list='$(pkgpython_PYTHON)'; dlist=; list2=; test -n "$(pkgpythondir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(pkgpythondir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pkgpythondir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then b=; else b="$(srcdir)/"; fi; \ if test -f $$b$$p; then \ $(am__strip_dir) \ dlist="$$dlist $$f"; \ list2="$$list2 $$b$$p"; \ else :; fi; \ done; \ for file in $$list2; do echo $$file; done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pkgpythondir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(pkgpythondir)" || exit $$?; \ done || exit $$?; \ if test -n "$$dlist"; then \ $(am__py_compile) --destdir "$(DESTDIR)" \ --basedir "$(pkgpythondir)" $$dlist; \ else :; fi uninstall-pkgpythonPYTHON: @$(NORMAL_UNINSTALL) @list='$(pkgpython_PYTHON)'; test -n "$(pkgpythondir)" || list=; \ py_files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ test -n "$$py_files" || exit 0; \ dir='$(DESTDIR)$(pkgpythondir)'; \ pyc_files=`echo "$$py_files" | sed 's|$$|c|'`; \ pyo_files=`echo "$$py_files" | sed 's|$$|o|'`; \ py_files_pep3147=`echo "$$py_files" | $(am__pep3147_tweak)`; \ echo "$$py_files_pep3147";\ pyc_files_pep3147=`echo "$$py_files_pep3147" | sed 's|$$|c|'`; \ pyo_files_pep3147=`echo "$$py_files_pep3147" | sed 's|$$|o|'`; \ st=0; \ for files in \ "$$py_files" \ "$$pyc_files" \ "$$pyo_files" \ "$$pyc_files_pep3147" \ "$$pyo_files_pep3147" \ ; do \ $(am__uninstall_files_from_dir) || st=$$?; \ done; \ exit $$st # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile all-local installdirs: installdirs-recursive installdirs-am: for dir in "$(DESTDIR)$(pkgpythondir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: -test -z "$(CLEANFILES)" || rm -f $(CLEANFILES) distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-pkgpythonPYTHON install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-pkgpythonPYTHON .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am all-local \ check check-am clean clean-generic clean-libtool cscopelist-am \ ctags ctags-am distclean distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-pkgpythonPYTHON install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs installdirs-am maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \ uninstall-am uninstall-pkgpythonPYTHON all-local: if test ! -e "$(abs_top_builddir)/python/python/arc/lrms"; then ln -s $(abs_top_srcdir)/src/services/a-rex/lrms/arc/lrms $(abs_top_builddir)/python/python/arc/lrms; fi # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/a-rex/lrms/arc/PaxHeaders.30264/__init__.py0000644000000000000000000000013214152153376024625 xustar000000000000000030 mtime=1638455038.420646199 30 atime=1638455038.506647492 30 ctime=1638455100.282575703 nordugrid-arc-6.14.0/src/services/a-rex/lrms/arc/__init__.py0000644000175000002070000000014614152153376024613 0ustar00mockbuildmock00000000000000""" The ``arc`` package contains extra ARC modules, currently only LRMS specific ones (arc.lrms). """ nordugrid-arc-6.14.0/src/services/a-rex/lrms/arc/PaxHeaders.30264/lrms0000644000000000000000000000013214152153474023413 xustar000000000000000030 mtime=1638455100.311576139 30 atime=1638455103.997631524 30 ctime=1638455100.311576139 nordugrid-arc-6.14.0/src/services/a-rex/lrms/arc/lrms/0000755000175000002070000000000014152153474023455 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/a-rex/lrms/arc/lrms/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376025525 xustar000000000000000030 mtime=1638455038.420646199 30 atime=1638455038.506647492 30 ctime=1638455100.304576034 nordugrid-arc-6.14.0/src/services/a-rex/lrms/arc/lrms/Makefile.am0000644000175000002070000000025414152153376025513 0ustar00mockbuildmock00000000000000pkgpythondir = $(PYTHON_SITE_ARCH)/arc/lrms/ pkgpython_PYTHON = \ __init__.py slurm.py \ pyCancel.py pySubmit.py pyScanner.py SUBDIRS = common DIST_SUBDIRS = $(SUBDIRS) nordugrid-arc-6.14.0/src/services/a-rex/lrms/arc/lrms/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153433025530 xustar000000000000000030 mtime=1638455067.519083417 30 atime=1638455089.970420759 30 ctime=1638455100.304576034 nordugrid-arc-6.14.0/src/services/a-rex/lrms/arc/lrms/Makefile.in0000644000175000002070000007000114152153433025513 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/a-rex/lrms/arc/lrms DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(pkgpython_PYTHON) $(top_srcdir)/py-compile ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__py_compile = PYTHON=$(PYTHON) $(SHELL) $(py_compile) am__installdirs = "$(DESTDIR)$(pkgpythondir)" am__pep3147_tweak = \ sed -e 's|\.py$$||' -e 's|[^/]*$$|__pycache__/&.*.py|' py_compile = $(top_srcdir)/py-compile RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ pkgpythondir = $(PYTHON_SITE_ARCH)/arc/lrms/ pkgpython_PYTHON = \ __init__.py slurm.py \ pyCancel.py pySubmit.py pyScanner.py SUBDIRS = common DIST_SUBDIRS = $(SUBDIRS) all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/a-rex/lrms/arc/lrms/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/a-rex/lrms/arc/lrms/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-pkgpythonPYTHON: $(pkgpython_PYTHON) @$(NORMAL_INSTALL) @list='$(pkgpython_PYTHON)'; dlist=; list2=; test -n "$(pkgpythondir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(pkgpythondir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pkgpythondir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then b=; else b="$(srcdir)/"; fi; \ if test -f $$b$$p; then \ $(am__strip_dir) \ dlist="$$dlist $$f"; \ list2="$$list2 $$b$$p"; \ else :; fi; \ done; \ for file in $$list2; do echo $$file; done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pkgpythondir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(pkgpythondir)" || exit $$?; \ done || exit $$?; \ if test -n "$$dlist"; then \ $(am__py_compile) --destdir "$(DESTDIR)" \ --basedir "$(pkgpythondir)" $$dlist; \ else :; fi uninstall-pkgpythonPYTHON: @$(NORMAL_UNINSTALL) @list='$(pkgpython_PYTHON)'; test -n "$(pkgpythondir)" || list=; \ py_files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ test -n "$$py_files" || exit 0; \ dir='$(DESTDIR)$(pkgpythondir)'; \ pyc_files=`echo "$$py_files" | sed 's|$$|c|'`; \ pyo_files=`echo "$$py_files" | sed 's|$$|o|'`; \ py_files_pep3147=`echo "$$py_files" | $(am__pep3147_tweak)`; \ echo "$$py_files_pep3147";\ pyc_files_pep3147=`echo "$$py_files_pep3147" | sed 's|$$|c|'`; \ pyo_files_pep3147=`echo "$$py_files_pep3147" | sed 's|$$|o|'`; \ st=0; \ for files in \ "$$py_files" \ "$$pyc_files" \ "$$pyo_files" \ "$$pyc_files_pep3147" \ "$$pyo_files_pep3147" \ ; do \ $(am__uninstall_files_from_dir) || st=$$?; \ done; \ exit $$st # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile installdirs: installdirs-recursive installdirs-am: for dir in "$(DESTDIR)$(pkgpythondir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-pkgpythonPYTHON install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-pkgpythonPYTHON .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool cscopelist-am ctags \ ctags-am distclean distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-pkgpythonPYTHON install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs installdirs-am maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \ uninstall-am uninstall-pkgpythonPYTHON # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/a-rex/lrms/arc/lrms/PaxHeaders.30264/slurm.py0000644000000000000000000000013214152153376025205 xustar000000000000000030 mtime=1638455038.421646215 30 atime=1638455038.507647507 30 ctime=1638455100.306576064 nordugrid-arc-6.14.0/src/services/a-rex/lrms/arc/lrms/slurm.py0000644000175000002070000011334014152153376025174 0ustar00mockbuildmock00000000000000""" SLURM batch system interface module. """ # TODO: Check if there are any bugfixes to the bash SLURM back-end scripts which has not ported to this SLURM python module. from __future__ import absolute_import import os, sys, time, re import arc from .common.cancel import cancel from .common import config as cconfig from .common.config import configure, is_conf_setter from .common.proc import execute_local, execute_remote from .common.log import debug, verbose, info, warn, error, ArcError from .common.lrmsinfo import LRMSInfo from .common.scan import * from .common.ssh import ssh_connect from .common.submit import * @is_conf_setter def set_slurm(cfg): """ Set SLURM specific :py:data:`~lrms.common.Config` attributes. :param cfg: parsed arc.conf :type cfg: :py:class:`ConfigParser.ConfigParser` """ cconfig.Config.slurm_bin_path = str(cfg.get('lrms', 'slurm_bin_path')).strip('"') if \ cfg.has_option('lrms', 'slurm_bin_path') else '/usr/bin' cconfig.Config.slurm_wakeupperiod = int(cfg.get('lrms', 'slurm_wakeupperiod').strip('"')) if \ cfg.has_option('lrms', 'slurm_wakeupperiod') else 30 #--------------------- # Submit methods #--------------------- def Submit(config, jobdesc): """ Submits a job to the SLURM queue specified in arc.conf. This method executes the required RunTimeEnvironment scripts and assembles the bash job script. The job script is written to file and submitted with ``sbatch``. :param str config: path to arc.conf :param jobdesc: job description object :type jobdesc: :py:class:`arc.JobDescription` :return: local job ID if successfully submitted, else ``None`` :rtype: :py:obj:`str` """ configure(config, set_slurm) validate_attributes(jobdesc) if cconfig.Config.remote_host: ssh_connect(cconfig.Config.remote_host, cconfig.Config.remote_user, cconfig.Config.private_key) # Run RTE stage0 debug('----- starting slurmSubmitter.py -----', 'slurm.Submit') RTE_stage0(jobdesc, 'SLURM', SBATCH_ACCOUNT = 'OtherAttributes.SBATCH_ACCOUNT') set_grid_global_jobid(jobdesc) # Create script file and write job script jobscript = get_job_script(jobdesc) script_file = write_script_file(jobscript) debug('Created file %s' % script_file, 'slurm.Submit') debug('SLURM jobname: %s' % jobdesc.Identification.JobName, 'slurm.Submit') debug('SLURM job script built', 'slurm.Submit') debug('----------------- BEGIN job script -----', 'slurm.Submit') emptylines = 0 for line in jobscript.split('\n'): if not line: emptylines += 1 else: debug(emptylines*'\n' + line.replace("%", "%%"), 'slurm.Submit') emptylines = 0 if emptylines > 1: debug((emptylines-1)*'\n', 'slurm.Submit') debug('----------------- END job script -----', 'slurm.Submit') if 'ONLY_WRITE_JOBSCRIPT' in os.environ and os.environ['ONLY_WRITE_JOBSCRIPT'] == 'yes': return "-1" ####################################### # Submit the job ###################################### execute = execute_local if not cconfig.Config.remote_host else execute_remote directory = jobdesc.OtherAttributes['joboption;directory'] debug('Session directory: %s' % directory, 'slurm.Submit') SLURM_TRIES = 0 handle = None while SLURM_TRIES < 10: args = '%s/sbatch %s' % (cconfig.Config.slurm_bin_path, script_file) verbose('Executing \'%s\' on %s' % (args, cconfig.Config.remote_host if cconfig.Config.remote_host else 'localhost'), 'slurm.Submit') handle = execute(args) if handle.returncode == 0: break if handle.returncode == 198 or wait_for_queue(handle): debug('Waiting for queue to decrease', 'slurm.Submit') time.sleep(60) SLURM_TRIES += 1 continue break # Other error than full queue if handle.returncode == 0: # TODO: Test what happens when the jobqueue is full or when the slurm # ctld is not responding. SLURM 1.x and 2.2.x outputs the jobid into # STDERR and STDOUT respectively. Concat them, and let sed sort it out. # From the exit code we know that the job was submitted, so this # is safe. Ulf Tigerstedt 1.5.2011 localid = get_job_id(handle) if localid: debug('Job submitted successfully!', 'slurm.Submit') debug('Local job id: ' + localid, 'slurm.Submit') debug('----- exiting submitSubmitter.py -----', 'slurm.Submit') return localid debug('job *NOT* submitted successfully!', 'slurm.Submit') debug('got error code from sbatch: %d !' % handle.returncode, 'slurm.Submit') debug('Output is:\n' + ''.join(handle.stdout), 'slurm.Submit') debug('Error output is:\n' + ''.join(handle.stderr), 'slurm.Submit') debug('----- exiting slurmSubmitter.py -----', 'slurm.Submit') def wait_for_queue(handle): """ Read from ``sbatch`` output whether the queue is full. :param object handle: sbatch handle :return: ``True`` if queue is full, else ``False`` :rtype: :py:obj:`bool` """ for f in (handle.stdout, handle.stderr): for line in f: if ("maximum number of jobs" in line or # A rare SLURM error, but may cause chaos in the # information/accounting system "unable to accept job" in line): return True return False def get_job_id(handle): """ Read local job ID from ``sbatch`` output. :param object handle: sbatch handle :return: local job ID if found, else ``None`` :rtype: :py:obj:`str` """ for f in (handle.stdout, handle.stderr): for line in f: match = re.search(r'Submitted batch job (\d+)', line) if match: return match.group(1) error('Job ID not found in stdout or stderr', 'slurm.Submit') def get_job_script(jobdesc): """ Assemble bash job script for a SLURM host. :param jobdesc: job description object :type jobdesc: :py:class:`arc.JobDescription` :return: job script :rtype: :py:obj:`str` """ set_req_mem(jobdesc) # TODO: Maybe change way in which JobDescriptionParserSLURM is loaded. jobscript = JobscriptAssemblerSLURM(jobdesc).assemble() if not jobscript: raise ArcError('Unable to assemble SLURM job option', 'slurm.Submit') return jobscript #--------------------- # Cancel methods #--------------------- def Cancel(config, jobid): """ Cancel a job running at a SLURM host with ``scancel``. :param str config: path to arc.conf :param str jobid: local job ID :return: ``True`` if successfully cancelled, else ``False`` :rtype: :py:obj:`bool` """ verify_job_id(jobid) configure(config, set_slurm) cmd = '%s/%s' % (cconfig.Config.slurm_bin_path, 'scancel') return cancel([cmd, jobid], jobid) def verify_job_id(jobid): """ Verify that the job ID is an integer else raise :py:class:`~lrms.common.common.ArcError`. :param str jobid: local job ID """ try: int(jobid) except: raise ArcError('Job ID is not set, or it contains non-numeric characters (%s)' % jobid, 'slurm.Cancel') #--------------------- # Scan methods #--------------------- def Scan(config, ctr_dirs): """ Query the SLURM host for all jobs in /[controldir]/processing with ``squeue``. If the job has stopped running, more detailed information is fetched with ``scontrol``, and the diagnostics and comments files are updated. Finally ``gm-kick`` is executed on all jobs with an exit code. :param str config: path to arc.conf :param ctr_dirs: list of paths to control directories :type ctr_dirs: :py:obj:`list` [ :py:obj:`str` ... ] """ configure(config, set_slurm) if cconfig.Config.scanscriptlog: scanlogfile = arc.common.LogFile(cconfig.Config.scanscriptlog) arc.common.Logger_getRootLogger().addDestination(scanlogfile) arc.common.Logger_getRootLogger().setThreshold(cconfig.Config.log_threshold) jobs = get_jobs(ctr_dirs) if not jobs: return if cconfig.Config.remote_host: # NOTE: Assuming 256 B of TCP window needed for each job (squeue) ssh_connect(cconfig.Config.remote_host, cconfig.Config.remote_user, cconfig.Config.private_key, (2 << 7)*len(jobs)) execute = execute_local if not cconfig.Config.remote_host else execute_remote args = cconfig.Config.slurm_bin_path + '/squeue -a -h -o %i:%T -t all -j ' + ','.join(jobs.keys()) if '__SLURM_TEST' in os.environ: handle = execute(args, env=dict(os.environ)) else: handle = execute(args) if handle.returncode != 0: debug('Got error code %i from squeue' % handle.returncode, 'slurm.Scan') debug('Error output is:\n' + ''.join(handle.stderr), 'slurm.Scan') # Slurm can report StartTime and EndTime in at least these two formats: # 2010-02-15T15:30:29 (MDS) # 02/15-15:25:15 # Python does not support duplicate named groups. # Have to use separate regex if we want to use named groups. date_MDS = re.compile(r'^(?P\d\d\d\d)-(?P\d\d)-(?P
    \d\d)T(?P\d\d):(?P\d\d):(?P\d\d)$') date_2 = re.compile(r'^(?P\d\d)/(?P
    \d\d)-(?P\d\d):(?P\d\d):(?P\d\d)$') for line in handle.stdout: try: localid, state = line.strip().split(':', 1) except: if line: warn('Failed to parse squeue line: ' + line, 'slurm.Scan') continue job = jobs[localid] job.state = state if job.state in ['PENDING','RUNNING','SUSPENDED','COMPLETING']: continue if not job.state: set_exit_code_from_diag(job) job.message = MESSAGES.get(job.state, '') args = cconfig.Config.slurm_bin_path + '/scontrol -o show job %s' % localid scontrol_handle = execute(args) if scontrol_handle.returncode != 0: debug('Got error code %i from scontrol' % scontrol_handle.returncode, 'slurm.Scan') debug('Error output is:\n' + ''.join(scontrol_handle.stderr), 'slurm.Scan') try: scontrol_dict = dict(item.split('=', 1) for item in re.split(' (?=[^ =]+=)', scontrol_handle.stdout[0])) job = jobs[scontrol_dict['JobId']] except: warn('Failed to parse scontrol line: ' + line, 'slurm.Scan') continue if 'ExitCode' in scontrol_dict: ec1, ec2 = scontrol_dict['ExitCode'].split(':') job.exitcode = int(ec2) + 256 if int(ec2) != 0 else int(ec1) else: job.exitcode = 0 if state == 'COMPLETED' else -1 if (state == 'NODE_FAIL' or state == 'CANCELLED') and ('ExitCode' not in scontrol_dict or job.exitcode == 0): job.exitcode = 15 job.message = 'Job was cancelled by SLURM' if 'StartTime' in scontrol_dict: match = date_MDS.match(scontrol_dict['StartTime']) or date_2.match(scontrol_dict['StartTime']) scontrol_dict['StartTime'] = get_MDS(match.groupdict()) job.LRMSStartTime = arc.common.Time(scontrol_dict['StartTime']) if 'EndTime' in scontrol_dict: match = date_MDS.match(scontrol_dict['EndTime']) or date_2.match(scontrol_dict['EndTime']) scontrol_dict['EndTime'] = get_MDS(match.groupdict()) job.LRMSEndTime = arc.common.Time(scontrol_dict['EndTime']) if 'StartTime' in scontrol_dict and 'EndTime' in scontrol_dict: job.WallTime = job.LRMSEndTime - job.LRMSStartTime if 'NumCPUs' in scontrol_dict: job.Processors = scontrol_dict['NumCPUs'] with open(job.lrms_done_file, 'w') as f: f.write('%d %s\n' % (job.exitcode, job.message)) write_comments(job) update_diag(job) kicklist = [job for job in jobs.values() if job.state not in ['PENDING','RUNNING','SUSPENDED','COMPLETING']] kicklist.extend([job for job in jobs.values() if job.state == 'CANCELLED']) # kick twice gm_kick(kicklist) def get_lrms_options_schema(): return LRMSInfo.get_lrms_options_schema(slurm_bin_path = '*') def get_lrms_info(options): if sys.version_info[0] >= 3: # Perl::Inline::Python passes text input as bytes objects in Python 3 # Convert them to str objects since this is what ARC is using def convert(input): if isinstance(input, dict): return dict((convert(key), convert(value)) for key, value in input.items()) elif isinstance(input, list): return [convert(element) for element in input] elif isinstance(input, bytes): return input.decode() else: return input options = convert(options) si = SLURMInfo(options) si.read_config() si.read_partitions() si.read_jobs() si.read_nodes() si.read_cpuinfo() si.cluster_info() for qkey, qval in options['queues'].items(): if si.queue_info(qkey): si.users_info(qkey, qval['users']) si.jobs_info(options['jobs']) si.nodes_info() return si.lrms_info class SLURMInfo(LRMSInfo, object): def __init__(self, options): super(SLURMInfo, self).__init__(options) self._path = options['slurm_bin_path'] if 'slurm_bin_path' in options else '/usr/bin' def read_config(self): self.config = {} execute = execute_local if not self._ssh else execute_remote handle = execute('%s/scontrol show config| grep "MaxJobCount\\|SLURM_VERSION"' % (self._path)) if handle.returncode: raise ArcError('scontrol error: %s' % '\n'.join(handle.stderr), 'SLURMInfo') for line in handle.stdout: try: conf = line.strip().split(' = ', 1) self.config[conf[0].rstrip()] = conf[1] except IndexError: # Couldn't split: blank line, header etc .. continue def read_partitions(self): self.partitions = {} execute = execute_local if not self._ssh else execute_remote handle = execute('%s/sinfo -a -h -o \'PartitionName=%%P TotalCPUs=%%C ' 'TotalNodes=%%D MaxTime=%%l\'' % (self._path)) if handle.returncode: raise ArcError('sinfo error: %s' % '\n'.join(handle.stderr), 'SLURMInfo') for line in handle.stdout: try: part = dict(item.split('=', 1) for item in LRMSInfo.split(line.strip())) part['PartitionName'] = part['PartitionName'].rstrip('*') part['MaxTime'] = SLURMInfo.as_period(part['MaxTime']) # Format of '%C' is: Number of CPUs by state in the format 'allocated/idle/other/total' part['AllocatedCPUs'], part['IdleCPUs'], part['OtherCPUs'], part['TotalCPUs'] = \ map(SLURMInfo.parse_number, part['TotalCPUs'].split('/')) part['TotalNodes'] = SLURMInfo.parse_number(part['TotalNodes']) self.partitions[part['PartitionName']] = part; except ValueError: # Couldn't split: blank line, header etc .. continue def read_jobs(self): self.jobs = {} execute = execute_local if not self._ssh else execute_remote handle = execute('%s/squeue -a -h -t all -o \'JobId=%%i TimeUsed=%%M Partition=%%P JobState=%%T ' 'ReqNodes=%%D ReqCPUs=%%C TimeLimit=%%l Name=%%j NodeList=%%N\'' % (self._path)) if handle.returncode: raise ArcError('squeue error: %s' % '\n'.join(handle.stderr), 'SLURMInfo') for line in handle.stdout: try: job = dict(item.split('=', 1) for item in LRMSInfo.split(line.strip())) if 'TimeUsed' in job: job['TimeUsed'] = SLURMInfo.as_period(job['TimeUsed']) if 'TimeLimit' in job: job['TimeLimit'] = SLURMInfo.as_period(job['TimeLimit']) self.jobs[job['JobId']] = job except ValueError: # Couldn't split: blank line, header etc .. continue def read_nodes(self): self.nodes = {} execute = execute_local if not self._ssh else execute_remote handle = execute('%s/scontrol show node --oneliner' % (self._path)) if handle.returncode: raise ArcError('scontrol error: %s' % '\n'.join(handle.stderr), 'SLURMInfo') for line in handle.stdout: try: _ = dict(item.split('=', 1) for item in LRMSInfo.split(line.strip())) record = dict((k, _[k]) for k in ('NodeName', 'CPUTot', 'RealMemory', 'State', 'Sockets', 'OS', 'Arch')) # Node status can be followed by different symbols # according to it being unresponsive, powersaving, etc. # Get rid of them record['State'] = record['State'].rstrip('*~#+') self.nodes[record['NodeName']] = record except KeyError: # Node is probably down if attributes are missing, just skip it continue except ValueError: # Couldn't split: blank line, header etc .. continue def read_cpuinfo(self): self.cpuinfo = {} execute = execute_local if not self._ssh else execute_remote handle = execute('%s/sinfo -a -h -o \'%%C\'' % (self._path)) if handle.returncode: raise ArcError('sinfo error: %s' % '\n'.join(handle.stderr), 'SLURMInfo') for line in handle.stdout: try: self.cpuinfo = dict(zip(('AllocatedCPUs', 'IdleCPUs', 'OtherCPUs', 'TotalCPUs'), map(SLURMInfo.parse_number, line.strip().split('/')))) break except IndexError: # Probably blank line continue def cluster_info(self): cluster = {} cluster['lrms_type'] = 'SLURM' cluster['lrms_version'] = self.config['SLURM_VERSION'] cluster['totalcpus'] = sum(map(int, (node['CPUTot'] for node in self.nodes.values()))) cluster['queuedcpus'] = sum(map(int, (job['ReqCPUs'] for job in self.jobs.values() if job['JobState'] == 'PENDING'))) cluster['usedcpus'] = self.cpuinfo['AllocatedCPUs'] cluster['queuedjobs'], cluster['runningjobs'] = self.get_jobs() # NOTE: should be on the form '8cpu:800 2cpu:40' cpudist = {} for node in self.nodes.values(): cpudist[node['CPUTot']] = cpudist[node['CPUTot']] + 1 if node['CPUTot'] in cpudist else 1 cluster['cpudistribution'] = ' '.join('%scpu:%i' % (key, val) for key, val in cpudist.items()) self.lrms_info['cluster'] = cluster def get_jobs(self, queue = ''): queuedjobs = runningjobs = 0 for job in self.jobs.values(): if queue and queue != job['Partition']: continue if job['JobState'] == 'PENDING': queuedjobs += 1 elif job['JobState'] in ('RUNNING', 'COMPLETING'): runningjobs += 1 return queuedjobs, runningjobs def queue_info(self, qname): if not qname in self.partitions: return False queue = {} queue['status'] = queue['maxrunning'] = queue['maxqueuable'] = queue['maxuserrun'] = self.config['MaxJobCount'] time = self.partitions[qname]['MaxTime'].GetPeriod() queue['maxcputime'] = queue['defaultcput'] = queue['maxwalltime'] = queue['defaultwallt'] = time if time > 0 else (2**31)-1 queue['mincputime'] = queue['minwalltime'] = 0 queue['queued'], queue['running'] = self.get_jobs(qname) queue['totalcpus'] = self.partitions[qname]['TotalCPUs'] queue['freeslots'] = self.partitions[qname]['IdleCPUs'] self.lrms_info['queues'][qname] = queue return True def users_info(self, queue, accts): queue = self.lrms_info['queues'][queue] queue['users'] = {} for u in accts: queue['users'][u] = {} queue['users'][u]['freecpus'] = { str(self.cpuinfo['IdleCPUs']) : 0 } queue['users'][u]['queuelength'] = 0 def jobs_info(self, jids): jobs = {} # Jobs can't have overlapping ID between queues in SLURM statemap = {'RUNNING' : 'R', 'COMPLETED' : 'E', 'CANCELLED' : 'O', 'FAILED' : 'O', 'PENDING' : 'Q', 'TIMEOUT' : 'O' } for jid in jids: if jid not in self.jobs: # Lost job or invalid job id! jobs[jid] = { 'status' : 'O' } continue _job = self.jobs[jid] job = {} job['status'] = statemap.get(_job['JobState'], 'O') # TODO: calculate rank? Probably not possible. job['rank'] = 0 job['cpus'] = _job['ReqCPUs'] # TODO: This gets the memory from the first node in a job # allocation which will not be correct on a heterogenous # cluster job['nodes'] = self.expand_nodes(_job['NodeList']) node = job['nodes'][0] if job['nodes'] else None # Only jobs that got the nodes can report the memory of # their nodes if node: job['mem'] = self.nodes[node]['RealMemory'] walltime = int(_job['TimeUsed'].GetPeriod()) reqwalltime = int(_job['TimeLimit'].GetPeriod()) count = int(_job['ReqCPUs']) # TODO: multiply walltime by number of cores to get cputime? job['walltime'] = walltime job['cputime'] = walltime * count job['reqwalltime'] = reqwalltime job['reqcputime'] = reqwalltime * count job['comment'] = [_job['Name']] jobs[jid] = job self.lrms_info['jobs'] = jobs def expand_nodes(self, nodes_expr): # Translates a list like n[1-2,5],n23,n[54-55] to n1,n2,n5,n23,n54,n55 if not nodes_expr: return [] nodes = [] for node_expr in re.split(',(?=[a-zA-Z])', nodes_expr): # Lookahead for letter try: node, expr = node_expr[:-1].split('[') for num in expr.split(','): if num.isdigit(): nodes.append(name + num) else: start, end = map(int, num.split('-')) # TODO: Preserve leading zeroes in sequence, # if needed #enodes += sprintf('%s%0*d,', name, l, i) nodes += [name + str(n) for n in range(start, end+1)] except: nodes.append(node_expr) return nodes def nodes_info(self): unavailable = ('DOWN', 'DRAIN', 'FAIL', 'MAINT', 'UNK') free = ('IDLE', 'MIXED') nodes = {} for key, _node in self.nodes.items(): node = {'isfree' : int(_node['State'] in free), 'isavailable' : int(_node['State'] not in unavailable)} node['lcpus'] = node['slots'] = int(_node['CPUTot']) node['pmem'] = int(_node['RealMemory']) node['pcpus'] = int(_node['Sockets']) node['sysname'] = _node['OS'] node['machine'] = _node['Arch'] nodes[key] = node self.lrms_info['nodes'] = nodes @staticmethod def as_period(time): # SLURM can report periods as "infinite" or "UNLIMITED" if time.lower() == "infinite" or time.lower() == "unlimited": # Max number allowed by ldap return arc.common.Period(2**31-1) if time.lower() == "invalid": return arc.common.Period(0) time = time.replace('-', ':').split(':') return arc.common.Period('P%sDT%sH%sM%sS' % tuple(['0']*(4 - len(time)) + time)) class JobscriptAssemblerSLURM(JobscriptAssembler): def __init__(self, jobdesc): super(JobscriptAssemblerSLURM, self).__init__(jobdesc) def assemble(self): script = JobscriptAssemblerSLURM.assemble_SBATCH(self.jobdesc) if not script: return script += self.get_stub('umask_and_sourcewithargs') script += self.get_stub('user_env') script += self.get_stub('runtime_env') script += self.get_stub('move_files_to_node') script += "\nRESULT=0\n\n" script += "if [ \"$RESULT\" = '0' ] ; then\n" script += self.get_stub('rte_stage1') script += '''if [ ! "X$SLURM_NODEFILE" = 'X' ] ; then if [ -r "$SLURM_NODEFILE" ] ; then cat "$SLURM_NODEFILE" | sed 's/\\(.*\\)/nodename=\\1/' >> "$RUNTIME_JOB_DIAG" NODENAME_WRITTEN="1" else SLURM_NODEFILE= fi fi ''' script += "if [ \"$RESULT\" = '0' ] ; then\n" script += self.get_stub('cd_and_run') script += "fi\nfi\n" script += self.get_stub('rte_stage2') script += self.get_stub('clean_scratchdir') script += self.get_stub('move_files_to_frontend') return script @staticmethod def assemble_SBATCH(j, language = "", dialect = ""): # TODO: What about adjusting working directory, # diagnostics, and uploading output files. These should # probably be handled by submisison script. # First check if the job description is valid. #~ if not j.Application.Executable.Path: #~ logger.msg(arc.DEBUG, "Missing executable") #~ return (False, "") product = "#!/bin/bash -l\n" product += "# SLURM batch job script built by arex\n" # << TODO # TODO: Make configurable # rerun is handled by GM, do not let SLURM requeue jobs itself. product += "#SBATCH --no-requeue\n" # TODO: Description is missing # TODO: Maybe output and error file paths should be passed using # the separately map, instead of using the session_directory key. # write SLURM output to 'comment' file if "joboption;directory" in j.OtherAttributes: product += "#SBATCH -e " + j.OtherAttributes["joboption;directory"] + ".comment\n" product += "#SBATCH -o " + j.OtherAttributes["joboption;directory"] + ".comment\n" product += "\n" ### Choose queue ### # SLURM v2.3 option description: -p, --partition= # Request a specific partition for the resource allocation. If not # specified, the default behavior is to allow the slurm controller to # select the default partition as designated by the system # administrator. If the job can use more than one partition, specify # their names in a comma separate list and the one offering earliest # initiation will be used. ### \mapattr --partition <- QueueName if j.Resources.QueueName: product += "#SBATCH -p " + j.Resources.QueueName + "\n" ### Set priority ### # SLURM V2.6.5 option description: --nice[=adjustment] # Run the job with an adjusted scheduling priority within SLURM. # With no adjustment value the scheduling priority is decreased by # 100. The adjustment range is from -10000 (highest priority) to # 10000 (lowest priority). Only privileged users can specify a # negative adjustment. NOTE: This option is presently ignored if # SchedulerType=sched/wiki or SchedulerType=sched/wiki2. if j.Application.Priority > -1: # Default is 0, and only superusers can assign priorities # less than 0. # We set the priority as '100 - ARC priority'. # This will have the desired effect for all grid jobs # Local jobs will unfortunatly have a default priority equal # to ARC priority 100, but there is no way around that. product += "#SBATCH --nice=%d\n" % (100-j.Application.Priority) else: # If priority is not set we should set it to # 50 to match the default in the XRSL documentation product += "#SBATCH --nice=%d\n" % (50) ### Project name for accounting ### # SLURM v2.4 option description: -A, --account= # Charge resources used by this job to specified account. The account is # an arbitrary string. The account name may be changed after job # submission using the scontrol command. # # The short option was renamed from '-U' to '-A' in SLURM v2.1. ### \mapattr --account <- OtherAttributes if "joboption;rsl_project" in j.OtherAttributes: product += "#SBATCH -A " + j.OtherAttributes["joboption;rsl_project"] + "\n" elif 'SBATCH_ACCOUNT' in j.OtherAttributes: product += "#SBATCH -A " + j.OtherAttributes['SBATCH_ACCOUNT'] + "\n" ### Job name for convenience ### # SLURM v2.3 option description: -J, --job-name= # Specify a name for the job allocation. The specified name will appear # along with the job id number when querying running jobs on the system. # The default is the name of the batch script, or just "sbatch" if the # script is read on sbatch's standard input. ### \mapattr --job-name <- JobName if j.Identification.JobName: # TODO: is this necessary? do parts of the infosys need these # limitations? # 's/^\([^[:alpha:]]\)/N\1/' # Prefix with 'N' if # starting with non-alpha character. 's/[^[:alnum:]]/_/g' # Replace all non-letters and numbers with '_'. # 's/\(...............\).*/\1/' # Limit to 15 characters. prefix = 'N' if not j.Identification.JobName[0].isalpha() else '' product += "#SBATCH -J '" + prefix + re.sub(r'\W', '_', j.Identification.JobName)[:15-len(prefix)] + "'\n" else: product += "#SBATCH -J 'gridjob'\n" # Set up the user's environment on the compute node where the script is # executed. SLURM v2.3 option description: # --get-user-env[=timeout][mode]. This option will tell sbatch to # retrieve the login environment variables for the user specified in the # --uid option. The environment variables are retrieved by running # something of this sort: "su - -c /usr/bin/env" and parsing # the output. Be aware that any environment variables already # set in sbatch's environment will take precedence over any environment # variables in the user's login environment. Clear any environment # variables before calling sbatch that you do not want propagated to the # spawned program. The optional timeout value is in seconds. Default # value is 8 seconds. The optional mode value control the "su" options. # With a mode value of "S", "su" is executed without the "-" option. # With a mode value of "L", "su" is executed with the "-" option, # replicating the login environment. If mode not specified, the mode # established at SLURM build time is used. Example of use include # "--get-user-env", "--get-user-env=10", "--get-user-env=10L", and # "--get-user-env=S". This option was originally created for use by # Moab. product += "#SBATCH --get-user-env=10L\n" ### (non-)parallel jobs ### # SLURM v2.3 option description: -n, --ntasks= # sbatch does not launch tasks, it requests an allocation of resources # and submits a batch script. This option advises the SLURM controller # that job steps run within the allocation will launch a maximum of # number tasks and to provide for sufficient resources. The default is # one task per node, but note that the --cpus-per-task option will # change this default. ### \mapattr --ntasks <- NumberOfSlots nslots = j.Resources.SlotRequirement.NumberOfSlots \ if j.Resources.SlotRequirement.NumberOfSlots > 1 else 1 product += "#SBATCH -n " + str(nslots) + "\n" ### SLURM v2.3 option description: --ntasks-per-node= # Request the maximum ntasks be invoked on each node. Meant to be used # with the --nodes option. This is related to --cpus-per-task=ncpus, # but does not require knowledge of the actual number of cpus on each # node. In some cases, it is more convenient to be able to request that # no more than a specific number of tasks be invoked on each node. # Examples of this include submitting a hybrid MPI/OpenMP app where only # one MPI "task/rank" should be assigned to each node while allowing the # OpenMP portion to utilize all of the parallelism present in the node, # or submitting a single setup/cleanup/monitoring job to each node of a # pre-existing allocation as one step in a larger job script. ### \mapattr --ntasks-per-node <- SlotsPerHost if j.Resources.SlotRequirement.SlotsPerHost > 1: product += "#SBATCH --ntasks-per-node " + \ str(j.Resources.SlotRequirement.SlotsPerHost) + "\n" # Node properties: Set by e.g. RTE script in stage 0. if "joboption;nodeproperty" in j.OtherAttributes: product += "#SBATCH " + j.OtherAttributes["joboption;nodeproperty"] product += "\n" # SLURM v2.3 option description: --exclusive # The job allocation can not share nodes with other running jobs. This # is the opposite of --share, whichever option is seen last on the # command line will be used. The default shared/exclusive behavior # depends on system configuration and the partition's Shared option take # s precedence over the job's option. ### \mapattr --exclusive <- ExclusiveExecution if j.Resources.SlotRequirement.ExclusiveExecution: product += "#SBATCH --exclusive\n" ### Execution times (minutes) ### # SLURM v2.4 option description: -t, --time=
    font_title."> $value 
    "; for(int n = 0; ; ++n) { XMLNode child = xml.Child(n); if (!child) break; output += ""; } output += "
    "; output += child.Name(); output += ""; RenderToHtml(child, output, depth+1); output += "
    "; } if(depth == 0) { output += ""; } } static void RenderToXml(Arc::XMLNode xml, std::string& output, int depth = 0) { xml.GetXML(output, "utf-8"); } static char const * SkipWS(char const * input) { while(*input) { if(!std::isspace(*input)) break; ++input; } return input; } static char const * SkipTo(char const * input, char tag) { while(*input) { if(*input == tag) break; ++input; } return input; } static char const * SkipToEscaped(char const * input, char tag) { while(*input) { if(*input == '\\') { ++input; if(!*input) break; } else if(*input == tag) { break; } ++input; } return input; } static char const * ParseFromJson(Arc::XMLNode& xml, char const * input, int depth = 0) { input = SkipWS(input); if(!*input) return input; if(*input == '{') { // complex item ++input; char const * nameStart = SkipWS(input); if(*nameStart != '}') while(true) { if(*nameStart != '"') return NULL; ++nameStart; char const * nameEnd = SkipToEscaped(nameStart, '"'); if(*nameEnd != '"') return NULL; char const * sep = SkipWS(nameEnd+1); if(*sep != ':') return NULL; XMLNode item = xml.NewChild(json_unencode(std::string(nameStart, nameEnd-nameStart))); input = sep+1; input = ParseFromJson(item,input,depth+1); if(!input) return NULL; input = SkipWS(input); if(*input == ',') { // next element ++input; } else if(*input == '}') { // last element break; } else { return NULL; }; }; ++input; } else if(*input == '[') { ++input; // array char const * nameStart = SkipWS(input); XMLNode item = xml; if(*nameStart != ']') while(true) { input = ParseFromJson(item,input,depth+1); if(!input) return NULL; input = SkipWS(input); if(*input == ',') { // next element ++input; item = xml.Parent().NewChild(item.Name()); } else if(*input == ']') { // last element item = xml.Parent().NewChild(item.Name()); // It will be deleted outside loop break; } else { return NULL; }; }; item.Destroy(); ++input; } else if(*input == '"') { ++input; // string char const * strStart = input; input = SkipToEscaped(strStart, '"'); if(*input != '"') return NULL; xml = json_unencode(std::string(strStart, input-strStart)); ++input; // } else if((*input >= '0') && (*input <= '9')) { // } else if(*input == 't') { // } else if(*input == 'f') { // } else if(*input == 'n') { } else { ++input; // true, false, null, number char const * strStart = input; while(*input) { if((*input == ',') || (*input == '}') || (*input == ']') || (std::isspace(*input))) break; ++input; } xml = std::string(strStart, input-strStart); }; return input; } static void RenderResponse(Arc::XMLNode xml, ResponseFormat format, std::string& output) { switch(format) { case ResponseFormatXml: RenderToXml(xml, output); break; case ResponseFormatHtml: RenderToHtml(xml, output); break; case ResponseFormatJson: RenderToJson(xml, output); break; default: break; } } static void ExtractRange(Arc::Message& inmsg, off_t& range_start, off_t& range_end) { range_start = 0; range_end = (off_t)(-1); { std::string val; val=inmsg.Attributes()->get("HTTP:RANGESTART"); if(!val.empty()) { // Negative ranges not supported if(!Arc::stringto(val,range_start)) { range_start=0; } else { val=inmsg.Attributes()->get("HTTP:RANGEEND"); if(!val.empty()) { if(!Arc::stringto(val,range_end)) { range_end=(off_t)(-1); } else { // Rest of code here treats end of range as exclusive // While HTTP ranges are inclusive ++range_end; }; }; }; }; }; } std::string ARexRest::ProcessingContext::operator[](char const * key) const { if(!key) return ""; std::multimap::const_iterator it = query.find(key); if(it == query.end()) return ""; return it->second; } static Arc::MCC_Status extract_content(Arc::Message& inmsg,std::string& content,uint32_t size_limit) { // Identify payload Arc::MessagePayload* payload = inmsg.Payload(); if(!payload) { return Arc::MCC_Status(Arc::GENERIC_ERROR,"","Missing payload"); }; Arc::PayloadStreamInterface* stream = dynamic_cast(payload); Arc::PayloadRawInterface* buf = dynamic_cast(payload); if((!stream) && (!buf)) { return Arc::MCC_Status(Arc::GENERIC_ERROR,"","Error processing payload"); } // Fetch content content.clear(); if(stream) { std::string add_str; while(stream->Get(add_str)) { content.append(add_str); if((size_limit != 0) && (content.size() >= size_limit)) break; } } else { for(unsigned int n = 0;buf->Buffer(n);++n) { content.append(buf->Buffer(n),buf->BufferSize(n)); if((size_limit != 0) && (content.size() >= size_limit)) break; }; }; return Arc::MCC_Status(Arc::STATUS_OK); } // Strip first token from path delimited by /. static bool GetPathToken(std::string& subpath, std::string& token) { std::string::size_type token_start = 0; while(subpath[token_start] == '/') ++token_start; std::string::size_type token_end = token_start; while((token_end < subpath.length()) && (subpath[token_end] != '/')) ++token_end; if (token_start == token_end) return false; token = subpath.substr(token_start, token_end-token_start); while(subpath[token_end] == '/') ++token_end; subpath.erase(0, token_end); return true; } static std::string StripNewLine(char const * str) { std::string res(str); for(std::string::size_type pos = res.find_first_of("\r\n"); pos != std::string::npos; pos = res.find_first_of("\r\n",pos)) { res[pos] = ' '; } return res; } // Insert generic (error) HTTP response into outmsg. static Arc::MCC_Status HTTPFault(Arc::Message& inmsg, Arc::Message& outmsg,int code,const char* resp) { Arc::PayloadRaw* outpayload = new Arc::PayloadRaw(); delete outmsg.Payload(outpayload); outmsg.Attributes()->set("HTTP:CODE",Arc::tostring(code)); if(resp) outmsg.Attributes()->set("HTTP:REASON",StripNewLine(resp)); return Arc::MCC_Status(Arc::STATUS_OK); } static Arc::MCC_Status HTTPResponse(Arc::Message& inmsg, Arc::Message& outmsg) { Arc::PayloadRaw* outpayload = new Arc::PayloadRaw(); delete outmsg.Payload(outpayload); outmsg.Attributes()->set("HTTP:CODE","200"); outmsg.Attributes()->set("HTTP:REASON","OK"); return Arc::MCC_Status(Arc::STATUS_OK); } static Arc::MCC_Status HTTPResponse(Arc::Message& inmsg, Arc::Message& outmsg, std::string const & content, std::string const& mime) { if(inmsg.Attributes()->get("HTTP:METHOD") == "HEAD") { Arc::PayloadRaw* outpayload = new Arc::PayloadRaw(); if(outpayload) outpayload->Truncate(content.length()); delete outmsg.Payload(outpayload); } else { Arc::PayloadRaw* outpayload = new Arc::PayloadRaw(); if(outpayload) outpayload->Insert(content.c_str(),0,content.length()); delete outmsg.Payload(outpayload); } outmsg.Attributes()->set("HTTP:CODE","200"); outmsg.Attributes()->set("HTTP:REASON","OK"); outmsg.Attributes()->set("HTTP:content-type",mime); return Arc::MCC_Status(Arc::STATUS_OK); } static Arc::MCC_Status HTTPResponseFile(Arc::Message& inmsg, Arc::Message& outmsg, int& fileHandle, std::string const& mime) { if(inmsg.Attributes()->get("HTTP:METHOD") == "HEAD") { Arc::PayloadRaw* outpayload = new Arc::PayloadRaw(); struct stat st; if(outpayload && (::fstat(fileHandle,&st) == 0)) outpayload->Truncate(st.st_size); delete outmsg.Payload(outpayload); } else { off_t range_start = 0; off_t range_end = 0; ExtractRange(inmsg, range_start, range_end); Arc::MessagePayload* outpayload = newFileRead(fileHandle,range_start,range_end); delete outmsg.Payload(outpayload); fileHandle = -1; } outmsg.Attributes()->set("HTTP:CODE","200"); outmsg.Attributes()->set("HTTP:REASON","OK"); outmsg.Attributes()->set("HTTP:content-type",mime); return Arc::MCC_Status(Arc::STATUS_OK); } static Arc::MCC_Status HTTPResponseFile(Arc::Message& inmsg, Arc::Message& outmsg, Arc::FileAccess*& fileHandle, std::string const& mime) { if(inmsg.Attributes()->get("HTTP:METHOD") == "HEAD") { Arc::PayloadRaw* outpayload = new Arc::PayloadRaw(); struct stat st; if(outpayload && fileHandle->fa_fstat(st)) outpayload->Truncate(st.st_size); delete outmsg.Payload(outpayload); } else { off_t range_start; off_t range_end; ExtractRange(inmsg, range_start, range_end); Arc::MessagePayload* outpayload = newFileRead(fileHandle,range_start,range_end); delete outmsg.Payload(outpayload); fileHandle = NULL; } outmsg.Attributes()->set("HTTP:CODE","200"); outmsg.Attributes()->set("HTTP:REASON","OK"); outmsg.Attributes()->set("HTTP:content-type",mime); return Arc::MCC_Status(Arc::STATUS_OK); } static Arc::MCC_Status HTTPDELETEResponse(Arc::Message& inmsg, Arc::Message& outmsg, bool queued = false) { Arc::PayloadRaw* outpayload = new Arc::PayloadRaw(); delete outmsg.Payload(outpayload); if(queued) { outmsg.Attributes()->set("HTTP:CODE","204"); outmsg.Attributes()->set("HTTP:REASON","No Content"); } else { outmsg.Attributes()->set("HTTP:CODE","202"); outmsg.Attributes()->set("HTTP:REASON","Accepted"); } return Arc::MCC_Status(Arc::STATUS_OK); } static Arc::MCC_Status HTTPPOSTDelayedResponse(Arc::Message& inmsg, Arc::Message& outmsg) { Arc::PayloadRaw* outpayload = new Arc::PayloadRaw(); delete outmsg.Payload(outpayload); outmsg.Attributes()->set("HTTP:CODE","202"); outmsg.Attributes()->set("HTTP:REASON","Queued"); return Arc::MCC_Status(Arc::STATUS_OK); } static Arc::MCC_Status HTTPPOSTResponse(Arc::Message& inmsg, Arc::Message& outmsg, std::string const & content, std::string const& mime, std::string const & redir = "") { Arc::PayloadRaw* outpayload = new Arc::PayloadRaw(); if(outpayload) outpayload->Insert(content.c_str(),0,content.length()); delete outmsg.Payload(outpayload); outmsg.Attributes()->set("HTTP:CODE","201"); outmsg.Attributes()->set("HTTP:REASON","Created"); outmsg.Attributes()->set("HTTP:content-type",mime); if(!redir.empty()) outmsg.Attributes()->set("HTTP:location",redir); return Arc::MCC_Status(Arc::STATUS_OK); } static ResponseFormat ProcessAcceptedFormat(Arc::Message& inmsg, Arc::Message& outmsg) { // text/html, application/xhtml+xml, application/xml;q=0.9, image/webp, */*;q=0.8 std::list accepts; for(Arc::AttributeIterator attrIt = inmsg.Attributes()->getAll("HTTP:accept"); attrIt.hasMore(); ++attrIt) tokenize(*attrIt, accepts, ","); for(std::list::iterator acc = accepts.begin(); acc != accepts.end(); ++acc) { *acc = Arc::trim(*acc, " "); std::string::size_type pos = acc->find_first_of(';'); if(pos != std::string::npos) acc->erase(pos); } ResponseFormat outFormat = ResponseFormatHtml; for(std::list::iterator acc = accepts.begin(); acc != accepts.end(); ++acc) { if(*acc == "application/json") { outFormat = ResponseFormatJson; outmsg.Attributes()->set("HTTP:content-type","application/json"); break; } else if((*acc == "text/xml") || (*acc == "application/xml")) { outFormat = ResponseFormatXml; outmsg.Attributes()->set("HTTP:content-type","application/xml"); break; } else if(*acc == "text/html") { outFormat = ResponseFormatHtml; outmsg.Attributes()->set("HTTP:content-type","text/html"); break; } } return outFormat; } // Insert structured positive response into outmsg. static Arc::MCC_Status HTTPResponse(Arc::Message& inmsg, Arc::Message& outmsg, Arc::XMLNode& resp) { ResponseFormat outFormat = ProcessAcceptedFormat(inmsg,outmsg); std::string respStr; RenderResponse(resp, outFormat, respStr); if(inmsg.Attributes()->get("HTTP:METHOD") == "HEAD") { Arc::PayloadRaw* outpayload = new Arc::PayloadRaw(); if(outpayload) outpayload->Truncate(respStr.length()); delete outmsg.Payload(outpayload); } else { Arc::PayloadRaw* outpayload = new Arc::PayloadRaw(); if(outpayload) outpayload->Insert(respStr.c_str(),0,respStr.length()); delete outmsg.Payload(outpayload); } outmsg.Attributes()->set("HTTP:CODE","200"); outmsg.Attributes()->set("HTTP:REASON","OK"); return Arc::MCC_Status(Arc::STATUS_OK); } static Arc::MCC_Status HTTPPOSTResponse(Arc::Message& inmsg, Arc::Message& outmsg, Arc::XMLNode& resp, std::string const & redir = "") { ResponseFormat outFormat = ProcessAcceptedFormat(inmsg,outmsg); std::string respStr; RenderResponse(resp, outFormat, respStr); Arc::PayloadRaw* outpayload = new Arc::PayloadRaw(); if(outpayload) outpayload->Insert(respStr.c_str(),0,respStr.length()); delete outmsg.Payload(outpayload); outmsg.Attributes()->set("HTTP:CODE","201"); outmsg.Attributes()->set("HTTP:REASON","Created"); if(!redir.empty()) outmsg.Attributes()->set("HTTP:location",redir); return Arc::MCC_Status(Arc::STATUS_OK); } static std::string GetPath(Arc::Message &inmsg,std::string &base,std::multimap& query) { base = inmsg.Attributes()->get("HTTP:ENDPOINT"); Arc::AttributeIterator iterator = inmsg.Attributes()->getAll("PLEXER:EXTENSION"); std::string path; if(iterator.hasMore()) { // Service is behind plexer path = *iterator; if(base.length() > path.length()) base.resize(base.length()-path.length()); } else { // Standalone service path=Arc::URL(base).Path(); base.resize(0); }; std::string::size_type queryPos = path.find('?'); if(queryPos == std::string::npos) queryPos = path.find(';'); if(queryPos != std::string::npos) { std::list queryItems; Arc::tokenize(path.substr(queryPos+1), queryItems, "&"); for(std::list::iterator queryItem = queryItems.begin(); queryItem != queryItems.end(); ++queryItem) { std::string::size_type valuePos = queryItem->find('='); std::string value; if(valuePos != std::string::npos) { value = queryItem->substr(valuePos+1); queryItem->resize(valuePos); }; query.insert(std::pair(Arc::uri_unencode(*queryItem),Arc::uri_unencode(value))); }; path.resize(queryPos); }; // Path is encoded in HTTP URLs too path = Arc::uri_unencode(path); return path; } static void ParseIds(std::multimap const & query, std::list& ids) { typedef std::multimap::const_iterator iter; std::pair range = query.equal_range("id"); for(iter id = range.first; id != range.second; ++id) { ids.push_back(id->second); }; } static void ParseJobIds(Arc::Message& inmsg, Arc::Message& outmsg, std::list& ids) { std::string content; Arc::MCC_Status status = extract_content(inmsg,content,1024*1024); std::string contentType = inmsg.Attributes()->get("HTTP:content-type"); Arc::XMLNode listXml; if(contentType == "application/json") { Arc::XMLNode("").Move(listXml); (void)ParseFromJson(listXml, content.c_str()); } else if((contentType == "application/xml") || contentType.empty()) { Arc::XMLNode(content).Move(listXml); } // jobs // job // id for(Arc::XMLNode jobXml = listXml["job"];(bool)jobXml;++jobXml) { std::string id = jobXml["id"]; if(!id.empty()) ids.push_back(id); } } // REST State A-REX State // * ACCEPTING ACCEPTED // * ACCEPTED PENDING:ACCEPTED // * PREPARING PREPARING // * PREPARED PENDING:PREPARING // * SUBMITTING SUBMIT // - QUEUING INLRMS + LRMS queued // - RUNNING INLRMS + LRMS running // - HELD INLRMS + LRMS on hold // - EXITINGLRMS INLRMS + LRMS finished // - OTHER INLRMS + LRMS other // * EXECUTED PENDING:INLRMS // * FINISHING FINISHING // * KILLING CANCELLING | PREPARING + DTR cancel | FINISHING + DTR cancel // * FINISHED FINISHED + no errors & no cancel // * FAILED FINISHED + errors // * KILLED FINISHED + cancel // * WIPED DELETED static void convertActivityStatusREST(const std::string& gm_state,std::string& rest_state, bool failed,bool pending,const std::string& /*failedstate*/,const std::string& failedcause) { rest_state.clear(); if(gm_state == "ACCEPTED") { if(!pending) rest_state="ACCEPTING"; else rest_state="ACCEPTED"; } else if(gm_state == "PREPARING") { if(!pending) rest_state="PREPARING"; else rest_state="PREPARED"; } else if(gm_state == "SUBMIT") { rest_state="SUBMITTING"; } else if(gm_state == "INLRMS") { if(!pending) { // Talking to LRMS would be too heavy. Just choose something innocent enough. rest_state="RUNNING"; } else { rest_state="EXECUTED"; } } else if(gm_state == "FINISHING") { rest_state="FINISHING"; } else if(gm_state == "CANCELING") { rest_state="KILLING"; } else if(gm_state == "FINISHED") { if(!pending) { if(failed) { // TODO: hack if(failedcause.find("Job is canceled by external request") != std::string::npos) { rest_state = "KILLED"; } else { rest_state = "FAILED"; } } else { rest_state="FINISHED"; } } else { rest_state="EXECUTED"; } } else if(gm_state == "DELETED") { rest_state="WIPED"; } else { rest_state="None"; } } ARexRest::ARexRest(Arc::Config *cfg, Arc::PluginArgument *parg, GMConfig& config, ARex::DelegationStores& delegation_stores,unsigned int& all_jobs_count): logger_(Arc::Logger::rootLogger, "A-REX REST"), config_(config),delegation_stores_(delegation_stores),all_jobs_count_(all_jobs_count) { endpoint_=(std::string)((*cfg)["endpoint"]); uname_=(std::string)((*cfg)["usermap"]["defaultLocalName"]); } ARexRest::~ARexRest(void) { } // Main request processor of REST interface Arc::MCC_Status ARexRest::process(Arc::Message& inmsg,Arc::Message& outmsg) { // Split request path into parts: service, jobs, files, etc. // TODO: make it HTTP independent std::string endpoint; ProcessingContext context; context.method = inmsg.Attributes()->get("HTTP:METHOD"); std::string clientid = (inmsg.Attributes()->get("TCP:REMOTEHOST"))+":"+(inmsg.Attributes()->get("TCP:REMOTEPORT")); logger_.msg(Arc::INFO, "Connection from %s: %s", inmsg.Attributes()->get("TCP:REMOTEHOST"), inmsg.Attributes()->get("TLS:IDENTITYDN")); context.subpath = GetPath(inmsg,endpoint,context.query); context.processed = "/"; if((inmsg.Attributes()->get("PLEXER:PATTERN").empty()) && context.subpath.empty()) context.subpath=endpoint; logger_.msg(Arc::VERBOSE, "process: method: %s", context.method); logger_.msg(Arc::VERBOSE, "process: endpoint: %s", endpoint); // {/rest}// logger_.msg(Arc::VERBOSE, "REST: process %s at %s",context.method,context.subpath); std::string apiVersion; GetPathToken(context.subpath, apiVersion); // drop /rest if ((!GetPathToken(context.subpath, apiVersion)) || apiVersion.empty()) { // {/rest return processVersions(inmsg, outmsg, context); } context.processed += apiVersion; context.processed += "/"; if (apiVersion != "1.0") { return HTTPFault(inmsg,outmsg,404,"Version Not Supported"); } std::string functionality; if(!GetPathToken(context.subpath, functionality) || functionality.empty()) { // {/rest}/ return processGeneral(inmsg, outmsg, context); } context.processed += functionality; context.processed += "/"; if (functionality == "info") { // {/rest}//info[?schema=glue2] return processInfo(inmsg, outmsg, context); } else if (functionality == "delegations") { // {/rest}/delegations/[[?action=get,renew,delete]] return processDelegations(inmsg, outmsg, context); } else if (functionality == "jobs") { // {/rest}//jobs[?state=[&state=[...]]] // {/rest}//jobs?action={new|info|status|kill|clean|restart} return processJobs(inmsg, outmsg, context); } return HTTPFault(inmsg,outmsg,404,"Functionality Not Supported"); } // ---------------------------- GENERAL INFO --------------------------------- Arc::MCC_Status ARexRest::processVersions(Arc::Message& inmsg,Arc::Message& outmsg,ProcessingContext& context) { if((context.method == "GET") || (context.method == "HEAD")) { XMLNode versions("1.0"); // only supported version is 1.0 return HTTPResponse(inmsg, outmsg, versions); } logger_.msg(Arc::VERBOSE, "process: method %s is not supported for subpath %s",context.method,context.processed); return HTTPFault(inmsg,outmsg,501,"Not Implemented"); } Arc::MCC_Status ARexRest::processGeneral(Arc::Message& inmsg,Arc::Message& outmsg, ProcessingContext& context) { return HTTPFault(inmsg,outmsg,404,"Not Found"); } Arc::MCC_Status ARexRest::processInfo(Arc::Message& inmsg,Arc::Message& outmsg, ProcessingContext& context) { if(!context.subpath.empty()) return HTTPFault(inmsg,outmsg,404,"Not Found"); // GET /info[?schema=glue2] - retrieve generic information about cluster. // HEAD - supported. // PUT,POST,DELETE - not supported. if((context.method != "GET") && (context.method != "HEAD")) { logger_.msg(Arc::VERBOSE, "process: method %s is not supported for subpath %s",context.method,context.processed); return HTTPFault(inmsg,outmsg,501,"Not Implemented"); } std::string schema = context["schema"]; if (!schema.empty() && (schema != "glue2")) { logger_.msg(Arc::VERBOSE, "process: schema %s is not supported for subpath %s",schema,context.processed); return HTTPFault(inmsg,outmsg,501,"Schema not implemented"); } std::string infoStr; Arc::FileRead(config_.InformationFile(), infoStr); XMLNode infoXml(infoStr); return HTTPResponse(inmsg, outmsg, infoXml); } // ---------------------------- DELEGATIONS --------------------------------- Arc::MCC_Status ARexRest::processDelegations(Arc::Message& inmsg,Arc::Message& outmsg, ProcessingContext& context) { // GET /delegations - retrieves list of delegations belonging to authenticated user // HEAD - supported. // POST /delegations?action=new starts a new delegation process (1st step). // PUT /delegations/ stores public part (2nd step). // POST /delegations/?action=get,renew,delete used to manage delegation. std::string delegationId; if(GetPathToken(context.subpath, delegationId)) { context.processed += delegationId; context.processed += "/"; return processDelegation(inmsg,outmsg,context,delegationId); } ARexConfigContext* config = ARexConfigContext::GetRutimeConfiguration(inmsg,config_,uname_,endpoint_); if(!config) { return HTTPFault(inmsg,outmsg,500,"User can't be assigned configuration"); } if((context.method == "GET") || (context.method == "HEAD")) { XMLNode listXml(""); std::list ids = delegation_stores_[config_.DelegationDir()].ListCredIDs(config->GridName()); for(std::list::iterator itId = ids.begin(); itId != ids.end(); ++itId) { listXml.NewChild("delegation").NewChild("id") = *itId; } return HTTPResponse(inmsg, outmsg, listXml); } else if(context.method == "POST") { std::string action = context["action"]; if(action != "new") return HTTPFault(inmsg,outmsg,501,"Action not implemented"); std::string delegationId; std::string delegationRequest; if(!delegation_stores_.GetRequest(config_.DelegationDir(),delegationId,config->GridName(),delegationRequest)) { return HTTPFault(inmsg,outmsg,500,"Failed generating delegation request"); } Arc::URL base(inmsg.Attributes()->get("HTTP:ENDPOINT")); return HTTPPOSTResponse(inmsg,outmsg,delegationRequest,"application/x-pem-file",base.Path()+"/"+delegationId); } logger_.msg(Arc::VERBOSE, "process: method %s is not supported for subpath %s",context.method,context.processed); return HTTPFault(inmsg,outmsg,501,"Not Implemented"); } void UpdateProxyFile(ARex::DelegationStores& delegation_stores, ARexConfigContext& config, std::string const& id) { #if 1 // In case of update for compatibility during intermediate period store delegations in // per-job proxy file too. DelegationStore& delegation_store(delegation_stores[config.GmConfig().DelegationDir()]); std::list job_ids; if(delegation_store.GetLocks(id,config.GridName(),job_ids)) { for(std::list::iterator job_id = job_ids.begin(); job_id != job_ids.end(); ++job_id) { // check if that is main delegation for this job std::string delegationid; if(job_local_read_delegationid(*job_id,config.GmConfig(),delegationid)) { if(id == delegationid) { std::string credentials; if(delegation_store.GetCred(id,config.GridName(),credentials)) { if(!credentials.empty()) { GMJob job(*job_id,Arc::User(config.User().get_uid())); (void)job_proxy_write_file(job,config.GmConfig(),credentials); }; }; }; }; }; }; #endif } Arc::MCC_Status ARexRest::processDelegation(Arc::Message& inmsg,Arc::Message& outmsg,ProcessingContext& context,std::string const & id) { // GET,HEAD,DELETE - not supported. // PUT /delegations/ stores public part (2nd step) to finish delegation procedure or to re-new delegation. // POST /delegations/?action=get,renew,delete used to manage delegation. if(!context.subpath.empty()) return HTTPFault(inmsg,outmsg,404,"Not Found"); // no more sub-resources ARexConfigContext* config = ARexConfigContext::GetRutimeConfiguration(inmsg,config_,uname_,endpoint_); if(!config) return HTTPFault(inmsg,outmsg,500,"User can't be assigned configuration"); // POST - manages delegation. if(context.method == "PUT") { // Fetch HTTP content to pass it as delegation std::string content; Arc::MCC_Status res = extract_content(inmsg,content,1024*1024); // 1mb size limit is sane enough if(!res) return HTTPFault(inmsg,outmsg,500,res.getExplanation().c_str()); if(content.empty()) return HTTPFault(inmsg,outmsg,500,"Missing payload"); if(!delegation_stores_.PutDeleg(config_.DelegationDir(),id,config->GridName(),content)) return HTTPFault(inmsg,outmsg,500,"Failed accepting delegation"); UpdateProxyFile(delegation_stores_, *config, id); return HTTPResponse(inmsg,outmsg); } else if(context.method == "POST") { std::string action = context["action"]; if(action == "get") { std::string credentials; if(!delegation_stores_[config_.DelegationDir()].GetDeleg(id, config->GridName(), credentials)) { return HTTPFault(inmsg,outmsg,404,"No delegation found"); } return HTTPResponse(inmsg, outmsg, credentials, "application/x-pem-file"); // ?? } else if(action == "renew") { std::string delegationId = id; std::string delegationRequest; if(!delegation_stores_.GetRequest(config_.DelegationDir(),delegationId,config->GridName(),delegationRequest)) return HTTPFault(inmsg,outmsg,500,"Failed generating delegation request"); return HTTPPOSTResponse(inmsg,outmsg,delegationRequest,"application/x-pem-file",""); } else if(action == "delete") { Arc::DelegationConsumerSOAP* deleg = delegation_stores_[config_.DelegationDir()].FindConsumer(id, config->GridName()); if(!deleg) return HTTPFault(inmsg,outmsg,404,"No such delegation"); if(!(delegation_stores_[config_.DelegationDir()].RemoveConsumer(deleg))) return HTTPFault(inmsg,outmsg,500,"Failed deleting delegation"); return HTTPDELETEResponse(inmsg, outmsg); // ?? } logger_.msg(Arc::VERBOSE, "process: action %s is not supported for subpath %s",action,context.processed); return HTTPFault(inmsg,outmsg,501,"Action not implemented"); } logger_.msg(Arc::VERBOSE, "process: method %s is not supported for subpath %s",context.method,context.processed); return HTTPFault(inmsg,outmsg,501,"Not Implemented"); } // ---------------------------- JOBS --------------------------------- static bool processJobInfo(Arc::Message& inmsg,ARexConfigContext& config, Arc::Logger& logger, std::string const & id, XMLNode jobXml); static bool processJobStatus(Arc::Message& inmsg,ARexConfigContext& config, Arc::Logger& logger, std::string const & id, XMLNode jobXml); static bool processJobKill(Arc::Message& inmsg,ARexConfigContext& config, Arc::Logger& logger, std::string const & id, XMLNode jobXml); static bool processJobClean(Arc::Message& inmsg,ARexConfigContext& config, Arc::Logger& logger, std::string const & id, XMLNode jobXml); static bool processJobRestart(Arc::Message& inmsg,ARexConfigContext& config, Arc::Logger& logger, std::string const & id, XMLNode jobXml); static bool processJobDelegations(Arc::Message& inmsg,ARexConfigContext& config, Arc::Logger& logger, std::string const & id, XMLNode jobXml, ARex::DelegationStores& delegation_stores); Arc::MCC_Status ARexRest::processJobs(Arc::Message& inmsg,Arc::Message& outmsg,ProcessingContext& context) { // GET /jobs[?state=] // HEAD - supported. // POST /jobs?action=new initiates creation of a new job instance or multiple jobs. // POST /jobs?action={info|status|kill|clean|restart|delegations} - job management operations supporting arrays of jobs. // PUT - not supported. std::string jobId; if(GetPathToken(context.subpath, jobId)) { // /jobs//... context.processed += jobId; context.processed += "/"; return processJob(inmsg,outmsg,context,jobId); } ARexConfigContext* config = ARexConfigContext::GetRutimeConfiguration(inmsg,config_,uname_,endpoint_); if(!config) { return HTTPFault(inmsg,outmsg,500,"User can't be assigned configuration"); } if((context.method == "GET") || (context.method == "HEAD")) { std::list states; tokenize(context["state"], states, ","); XMLNode listXml(""); std::list ids = ARexJob::Jobs(*config,logger_); for(std::list::iterator itId = ids.begin(); itId != ids.end(); ++itId) { std::string rest_state; if(!states.empty()) { ARexJob job(*itId,*config,logger_); if(!job) continue; // There is no such job bool job_pending = false; std::string gm_state = job.State(job_pending); bool job_failed = job.Failed(); std::string failed_cause; std::string failed_state = job.FailedState(failed_cause); convertActivityStatusREST(gm_state,rest_state,job_failed,job_pending,failed_state,failed_cause); bool state_found = false; for(std::list::iterator itState = states.begin(); itState != states.end(); ++itState) { if(rest_state == *itState) { state_found = true; break; } } if(!state_found) continue; } // states filter XMLNode jobXml = listXml.NewChild("job"); jobXml.NewChild("id") = *itId; if(!rest_state.empty()) jobXml.NewChild("state") = rest_state; } return HTTPResponse(inmsg, outmsg, listXml); } else if(context.method == "POST") { std::string action = context["action"]; if(action == "new") { if((config->GmConfig().MaxTotal() > 0) && (all_jobs_count_ >= config->GmConfig().MaxTotal())) return HTTPFault(inmsg,outmsg,500,"No more jobs allowed"); // Fetch HTTP content to pass it as job description std::string desc_str; Arc::MCC_Status res = extract_content(inmsg,desc_str,100*1024*1024); if(!res) return HTTPFault(inmsg,outmsg,500,res.getExplanation().c_str()); if(desc_str.empty()) return HTTPFault(inmsg,outmsg,500,"Missing payload"); JobIDGeneratorREST idgenerator(config->Endpoint()); std::string clientid = (inmsg.Attributes()->get("TCP:REMOTEHOST"))+":"+(inmsg.Attributes()->get("TCP:REMOTEPORT")); // TODO: Make ARexJob accept JobDescription directly to avoid reparsing jobs and use Arc::JobDescription::Parse here. // Quck and dirty check for job type std::string::size_type start_pos = desc_str.find_first_not_of(" \t\r\n"); if(start_pos == std::string::npos) return HTTPFault(inmsg,outmsg,500,"Payload is empty"); XMLNode listXml(""); // TODO: Split to separate functions switch(desc_str[start_pos]) { case '<': { // XML (multi- or single-ADL) Arc::XMLNode jobs_desc_xml(desc_str); if (jobs_desc_xml.Name() == "ActivityDescriptions") { // multi for(int idx = 0;;++idx) { Arc::XMLNode job_desc_xml = jobs_desc_xml.Child(idx); if(!job_desc_xml) break; XMLNode jobXml = listXml.NewChild("job"); ARexJob job(job_desc_xml,*config,"",clientid,logger_,idgenerator); if(!job) { jobXml.NewChild("status-code") = "500"; jobXml.NewChild("reason") = job.Failure(); } else { jobXml.NewChild("status-code") = "201"; jobXml.NewChild("reason") = "Created"; jobXml.NewChild("id") = job.ID(); jobXml.NewChild("state") = "ACCEPTING"; } } } else { // maybe single XMLNode jobXml = listXml.NewChild("job"); ARexJob job(jobs_desc_xml,*config,"",clientid,logger_,idgenerator); if(!job) { jobXml.NewChild("status-code") = "500"; jobXml.NewChild("reason") = job.Failure(); } else { jobXml.NewChild("status-code") = "201"; jobXml.NewChild("reason") = "Created"; jobXml.NewChild("id") = job.ID(); jobXml.NewChild("state") = "ACCEPTING"; } } }; break; case '&': { // single-xRSL XMLNode jobXml = listXml.NewChild("job"); ARexJob job(desc_str,*config,"",clientid,logger_,idgenerator); if(!job) { jobXml.NewChild("status-code") = "500"; jobXml.NewChild("reason") = job.Failure(); } else { jobXml.NewChild("status-code") = "201"; jobXml.NewChild("reason") = "Created"; jobXml.NewChild("id") = job.ID(); jobXml.NewChild("state") = "ACCEPTING"; } }; break; case '+': { // multi-xRSL std::list jobdescs; Arc::JobDescriptionResult result = Arc::JobDescription::Parse(desc_str, jobdescs, "nordugrid:xrsl", "GRIDMANAGER"); if (!result) { return HTTPFault(inmsg,outmsg,500,result.str().c_str()); } else { for(std::list::iterator jobdesc = jobdescs.begin(); jobdesc != jobdescs.end(); ++jobdesc) { XMLNode jobXml = listXml.NewChild("job"); std::string jobdesc_str; result = jobdesc->UnParse(jobdesc_str, "nordugrid:xrsl", "GRIDMANAGER"); if (!result) { jobXml.NewChild("status-code") = "500"; jobXml.NewChild("reason") = result.str(); } else { ARexJob job(jobdesc_str,*config,"",clientid,logger_,idgenerator); if(!job) { jobXml.NewChild("status-code") = "500"; jobXml.NewChild("reason") = job.Failure(); } else { jobXml.NewChild("status-code") = "201"; jobXml.NewChild("reason") = "Created"; jobXml.NewChild("id") = job.ID(); jobXml.NewChild("state") = "ACCEPTING"; } } } } }; break; default: return HTTPFault(inmsg,outmsg,500,"Payload is not recognized"); break; } return HTTPPOSTResponse(inmsg, outmsg, listXml); } else if(action == "info") { std::list ids; ParseJobIds(inmsg,outmsg,ids); XMLNode listXml(""); for(std::list::iterator id = ids.begin(); id != ids.end(); ++id) { XMLNode jobXml = listXml.NewChild("job"); (void)processJobInfo(inmsg,*config,logger_,*id,jobXml); } return HTTPPOSTResponse(inmsg, outmsg, listXml); } else if(action == "status") { std::list ids; ParseJobIds(inmsg,outmsg,ids); XMLNode listXml(""); for(std::list::iterator id = ids.begin(); id != ids.end(); ++id) { XMLNode jobXml = listXml.NewChild("job"); (void)processJobStatus(inmsg,*config,logger_,*id,jobXml); } return HTTPPOSTResponse(inmsg, outmsg, listXml); } else if(action == "kill") { std::list ids; ParseJobIds(inmsg,outmsg,ids); XMLNode listXml(""); for(std::list::iterator id = ids.begin(); id != ids.end(); ++id) { XMLNode jobXml = listXml.NewChild("job"); (void)processJobKill(inmsg,*config,logger_,*id,jobXml); } return HTTPPOSTResponse(inmsg, outmsg, listXml); } else if(action == "clean") { std::list ids; ParseJobIds(inmsg,outmsg,ids); XMLNode listXml(""); for(std::list::iterator id = ids.begin(); id != ids.end(); ++id) { XMLNode jobXml = listXml.NewChild("job"); (void)processJobClean(inmsg,*config,logger_,*id,jobXml); } return HTTPPOSTResponse(inmsg, outmsg, listXml); } else if(action == "restart") { std::list ids; ParseJobIds(inmsg,outmsg,ids); XMLNode listXml(""); for(std::list::iterator id = ids.begin(); id != ids.end(); ++id) { XMLNode jobXml = listXml.NewChild("job"); (void)processJobRestart(inmsg,*config,logger_,*id,jobXml); } return HTTPPOSTResponse(inmsg, outmsg, listXml); } else if(action == "delegations") { std::list ids; ParseJobIds(inmsg,outmsg,ids); XMLNode listXml(""); for(std::list::iterator id = ids.begin(); id != ids.end(); ++id) { XMLNode jobXml = listXml.NewChild("job"); (void)processJobDelegations(inmsg,*config,logger_,*id,jobXml,delegation_stores_); } return HTTPPOSTResponse(inmsg, outmsg, listXml); } logger_.msg(Arc::VERBOSE, "process: action %s is not supported for subpath %s",action,context.processed); return HTTPFault(inmsg,outmsg,501,"Action not implemented"); } logger_.msg(Arc::VERBOSE, "process: method %s is not supported for subpath %s",context.method,context.processed); return HTTPFault(inmsg,outmsg,501,"Not Implemented"); } static bool processJobInfo(Arc::Message& inmsg,ARexConfigContext& config, Arc::Logger& logger, std::string const & id, XMLNode jobXml) { ARexJob job(id,config,logger); if(!job) { // There is no such job std::string failure = job.Failure(); logger.msg(Arc::ERROR, "REST:GET job %s - %s", id, failure); jobXml.NewChild("status-code") = "404"; jobXml.NewChild("reason") = (!failure.empty()) ? failure : "Job not found"; jobXml.NewChild("id") = id; jobXml.NewChild("info_document"); return false; } std::string glue_s; Arc::XMLNode glue_xml(job_xml_read_file(id,config.GmConfig(),glue_s)?glue_s:""); if(!glue_xml) { // Fallback: create something minimal static const char* job_xml_template = "\n" " \n" " SubmittedVia=org.ogf.glue.emies.activitycreation\n" " single\n" " \n" " emies:adl\n" " \n" " \n" " \n" " \n" " \n" ""; Arc::XMLNode(job_xml_template).New(glue_xml); Arc::URL headnode(config.GmConfig().HeadNode()); glue_xml["ID"] = std::string("urn:caid:")+headnode.Host()+":org.ogf.glue.emies.activitycreation:"+id; glue_xml["IDFromEndpoint"] = "urn:idfe:"+id; { // Collecting job state bool job_pending = false; std::string gm_state = job.State(job_pending); bool job_failed = job.Failed(); std::string failed_cause; std::string failed_state = job.FailedState(failed_cause); std::string primary_state; std::list state_attributes; convertActivityStatusES(gm_state,primary_state,state_attributes, job_failed,job_pending,failed_state,failed_cause); glue_xml["State"] = "emies:"+primary_state; std::string prefix = glue_xml["State"].Prefix(); for(std::list::iterator attr = state_attributes.begin(); attr != state_attributes.end(); ++attr) { glue_xml.NewChild(prefix+":State") = "emiesattr:"+(*attr); }; std::string rest_state; convertActivityStatusREST(gm_state,rest_state, job_failed,job_pending,failed_state,failed_cause); glue_xml["State"] = "arcrest:"+rest_state; }; glue_xml["Owner"] = config.GridName(); glue_xml.Attribute("CreationTime") = job.Created().str(Arc::ISOTime); }; // Delegation ids? jobXml.NewChild("status-code") = "200"; jobXml.NewChild("reason") = "OK"; jobXml.NewChild("id") = id; jobXml.NewChild("info_document").NewChild("ComputingActivity").Exchange(glue_xml); return true; } static bool processJobStatus(Arc::Message& inmsg,ARexConfigContext& config, Arc::Logger& logger, std::string const & id, XMLNode jobXml) { ARexJob job(id,config,logger); if(!job) { // There is no such job std::string failure = job.Failure(); logger.msg(Arc::ERROR, "REST:GET job %s - %s", id, failure); jobXml.NewChild("status-code") = "404"; jobXml.NewChild("reason") = (!failure.empty()) ? failure : "Job not found"; jobXml.NewChild("id") = id; jobXml.NewChild("State") = "None"; return false; } // Collecting job state // Most detailed state is obtianable from XML info std::string rest_state; { std::string glue_s; if(job_xml_read_file(id,config.GmConfig(),glue_s)) { Arc::XMLNode glue_xml(glue_s); if((bool)glue_xml) { for(Arc::XMLNode snode = glue_xml["State"]; (bool)snode ; ++snode) { std::string state_str = snode; if(state_str.compare(0, 8, "arcrest:") == 0) { rest_state = state_str.substr(8); break; } } } } } if (rest_state.empty()) { // Faster but less detailed state can be computed from GM state bool job_pending = false; std::string gm_state = job.State(job_pending); bool job_failed = job.Failed(); std::string failed_cause; std::string failed_state = job.FailedState(failed_cause); convertActivityStatusREST(gm_state,rest_state, job_failed,job_pending,failed_state,failed_cause); } jobXml.NewChild("status-code") = "200"; jobXml.NewChild("reason") = "OK"; jobXml.NewChild("id") = id; jobXml.NewChild("state") = rest_state; return true; } static bool processJobKill(Arc::Message& inmsg,ARexConfigContext& config, Arc::Logger& logger, std::string const & id, XMLNode jobXml) { ARexJob job(id,config,logger); if(!job) { // There is no such job std::string failure = job.Failure(); logger.msg(Arc::ERROR, "REST:KILL job %s - %s", id, failure); jobXml.NewChild("status-code") = "404"; jobXml.NewChild("reason") = (!failure.empty()) ? failure : "Job not found"; jobXml.NewChild("id") = id; return false; } if(!job.Cancel()) { std::string failure = job.Failure(); logger.msg(Arc::ERROR, "REST:KILL job %s - %s", id, failure); jobXml.NewChild("status-code") = "505"; jobXml.NewChild("reason") = (!failure.empty()) ? failure : "Job could not be canceled"; jobXml.NewChild("id") = id; return false; } jobXml.NewChild("status-code") = "202"; jobXml.NewChild("reason") = "Queued for killing"; jobXml.NewChild("id") = id; return true; } static bool processJobClean(Arc::Message& inmsg,ARexConfigContext& config, Arc::Logger& logger, std::string const & id, XMLNode jobXml) { ARexJob job(id,config,logger); if(!job) { // There is no such job std::string failure = job.Failure(); logger.msg(Arc::ERROR, "REST:CLEAN job %s - %s", id, failure); jobXml.NewChild("status-code") = "404"; jobXml.NewChild("reason") = (!failure.empty()) ? failure : "Job not found"; jobXml.NewChild("id") = id; return false; } if(!job.Clean()) { std::string failure = job.Failure(); logger.msg(Arc::ERROR, "REST:CLEAN job %s - %s", id, failure); jobXml.NewChild("status-code") = "505"; jobXml.NewChild("reason") = (!failure.empty()) ? failure : "Job could not be cleaned"; jobXml.NewChild("id") = id; return false; } jobXml.NewChild("status-code") = "202"; jobXml.NewChild("reason") = "Queued for cleaning"; jobXml.NewChild("id") = id; return true; } static bool processJobRestart(Arc::Message& inmsg,ARexConfigContext& config, Arc::Logger& logger, std::string const & id, XMLNode jobXml) { ARexJob job(id,config,logger); if(!job) { // There is no such job std::string failure = job.Failure(); logger.msg(Arc::ERROR, "REST:RESTART job %s - %s", id, failure); jobXml.NewChild("status-code") = "404"; jobXml.NewChild("reason") = (!failure.empty()) ? failure : "Job not found"; jobXml.NewChild("id") = id; return false; } if(!job.Resume()) { std::string failure = job.Failure(); logger.msg(Arc::ERROR, "REST:RESTART job %s - %s", id, failure); jobXml.NewChild("status-code") = "505"; jobXml.NewChild("reason") = (!failure.empty()) ? failure : "Job could not be resumed"; jobXml.NewChild("id") = id; return false; } jobXml.NewChild("status-code") = "202"; jobXml.NewChild("reason") = "Queued for restarting"; jobXml.NewChild("id") = id; return true; } static bool processJobDelegations(Arc::Message& inmsg,ARexConfigContext& config, Arc::Logger& logger, std::string const & id, XMLNode jobXml, ARex::DelegationStores& delegation_stores) { ARexJob job(id,config,logger); if(!job) { // There is no such job std::string failure = job.Failure(); logger.msg(Arc::ERROR, "REST:RESTART job %s - %s", id, failure); jobXml.NewChild("status-code") = "404"; jobXml.NewChild("reason") = (!failure.empty()) ? failure : "Job not found"; jobXml.NewChild("id") = id; return false; } jobXml.NewChild("status-code") = "200"; jobXml.NewChild("reason") = "OK"; jobXml.NewChild("id") = id; std::list ids = delegation_stores[config.GmConfig().DelegationDir()].ListLockedCredIDs(id,config.GridName()); for(std::list::iterator itId = ids.begin(); itId != ids.end(); ++itId) { jobXml.NewChild("delegation_id") = *itId; } return true; } Arc::MCC_Status ARexRest::processJob(Arc::Message& inmsg,Arc::Message& outmsg, ProcessingContext& context,std::string const & id) { std::string subResource; if(GetPathToken(context.subpath, subResource)) { context.processed += subResource; context.processed += "/"; if(subResource == "session") { return processJobSessionDir(inmsg,outmsg,context,id); } else if(subResource == "diagnose") { return processJobControlDir(inmsg,outmsg,context,id); } return HTTPFault(inmsg,outmsg,404,"Wrong job sub-resource requested"); } return HTTPFault(inmsg,outmsg,404,"Missing job sub-resource"); } // ------------------------------- PER-JOB SESSION DIR ------------------------------------- static bool write_file(Arc::FileAccess& h,char* buf,size_t size) { for(;size>0;) { ssize_t l = h.fa_write(buf,size); if(l == -1) return false; size-=l; buf+=l; }; return true; } static Arc::MCC_Status PutJobFile(Arc::Message& inmsg, Arc::Message& outmsg, Arc::FileAccess& file, std::string& errstr, Arc::PayloadStreamInterface& stream, FileChunks& fc, bool& complete) { complete = false; // TODO: Use memory mapped file to minimize number of in memory copies const int bufsize = 1024*1024; if(!fc.Size()) fc.Size(stream.Size()); off_t pos = stream.Pos(); if(file.fa_lseek(pos,SEEK_SET) != pos) { std::string err = Arc::StrError(); errstr = "failed to set position of file to "+Arc::tostring(pos)+" - "+err; return HTTPFault(inmsg, outmsg, 500, "Error seeking to specified position in file"); }; char* buf = new char[bufsize]; if(!buf) { errstr = "failed to allocate memory"; return HTTPFault(inmsg, outmsg, 500, "Error allocating memory"); }; bool got_something = false; for(;;) { int size = bufsize; if(!stream.Get(buf,size)) break; if(size > 0) got_something = true; if(!write_file(file,buf,size)) { std::string err = Arc::StrError(); delete[] buf; errstr = "failed to write to file - "+err; return HTTPFault(inmsg, outmsg, 500, "Error writing to file"); }; if(size) fc.Add(pos,size); pos+=size; }; delete[] buf; // Due to limitation of PayloadStreamInterface it is not possible to // directly distingush between zero sized file and file with undefined // size. But by applying some dynamic heuristics it is possible. // TODO: extend/modify PayloadStreamInterface. if((stream.Size() == 0) && (stream.Pos() == 0) && (!got_something)) { complete = true; } return HTTPResponse(inmsg,outmsg); } static Arc::MCC_Status PutJobFile(Arc::Message& inmsg, Arc::Message& outmsg, Arc::FileAccess& file, std::string& errstr, Arc::PayloadRawInterface& buf, FileChunks& fc, bool& complete) { complete = false; bool got_something = false; if(!fc.Size()) fc.Size(buf.Size()); for(int n = 0;;++n) { char* sbuf = buf.Buffer(n); if(sbuf == NULL) break; off_t offset = buf.BufferPos(n); off_t size = buf.BufferSize(n); if(size > 0) { got_something = true; off_t o = file.fa_lseek(offset,SEEK_SET); if(o != offset) { std::string err = Arc::StrError(); errstr = "failed to set position of file to "+Arc::tostring(offset)+" - "+err; return HTTPFault(inmsg, outmsg, 500, "Error seeking to specified position"); }; if(!write_file(file,sbuf,size)) { std::string err = Arc::StrError(); errstr = "failed to write to file - "+err; return HTTPFault(inmsg, outmsg, 500, "Error writing file"); }; if(size) fc.Add(offset,size); }; }; if((buf.Size() == 0) && (!got_something)) { complete = true; } return HTTPResponse(inmsg,outmsg); } static void STATtoPROP(std::string const& name, struct stat& st, std::list requestProps, XMLNode& response) { XMLNode propstat = response.NewChild("d:propstat"); XMLNode prop = propstat.NewChild("d:prop"); propstat.NewChild("d:status") = "HTTP/1.1 200 OK"; prop.NewChild("d:displayname") = name; if(S_ISDIR(st.st_mode)) { prop.NewChild("d:resourcetype").NewChild("d:collection"); } else { prop.NewChild("d:resourcetype"); prop.NewChild("d:getcontentlength") = Arc::tostring(st.st_size); }; prop.NewChild("d:getlastmodified") = Arc::Time(st.st_mtime).str(Arc::ISOTime); prop.NewChild("d:creationdate") = Arc::Time(st.st_ctime).str(Arc::ISOTime); } static void ProcessPROPFIND(Arc::FileAccess* fa, Arc::XMLNode& multistatus,URL const& url,std::string const& path,uid_t uid,gid_t gid,int depth) { std::string name; std::size_t pos = path.rfind('/'); if(pos == std::string::npos) name = path; else name = path.substr(pos+1); XMLNode response = multistatus.NewChild("d:response"); std::string hrefStr = url.fullstr(); struct stat st; if(!fa->fa_stat(path,st)) { // Not found response.NewChild("d:href") = hrefStr; response.NewChild("d:status") = "HTTP/1.1 404 Not Found"; } else if(S_ISREG(st.st_mode)) { while(!hrefStr.empty() && hrefStr[hrefStr.length()-1] == '/') hrefStr.resize(hrefStr.length()-1); response.NewChild("d:href") = hrefStr; STATtoPROP(name, st, std::list(), response); } else if(S_ISDIR(st.st_mode)) { if(!hrefStr.empty() && hrefStr[hrefStr.length()-1] != '/') hrefStr += '/'; response.NewChild("d:href") = hrefStr; STATtoPROP(name, st, std::list(), response); if(depth > 0) { if (fa->fa_opendir(path)) { std::list names; std::string name; while(fa->fa_readdir(name)) { if(name == ".") continue; if(name == "..") continue; names.push_back(name); } fa->fa_closedir(); for(std::list::iterator name = names.begin(); name != names.end(); ++name) { URL subUrl(url); subUrl.ChangePath(subUrl.Path() + "/" + *name); std::string subPath = path + "/" + *name; ProcessPROPFIND(fa,multistatus,subUrl,subPath,uid,gid,depth-1); } } } } else { // Not for this interface response.NewChild("d:href") = hrefStr; response.NewChild("d:status") = "HTTP/1.1 404 Not Found"; } } Arc::MCC_Status ARexRest::processJobSessionDir(Arc::Message& inmsg,Arc::Message& outmsg, ProcessingContext& context,std::string const & id) { class FileAccessRef { public: FileAccessRef(Arc::FileAccess* obj):obj_(obj) { } ~FileAccessRef() { if(obj_) { obj_->fa_close(); obj_->fa_closedir(); Arc::FileAccess::Release(obj_); } } operator bool() const { return (obj_ == NULL); } bool operator !() const { return (obj_ == NULL); } Arc::FileAccess& operator*() { return *obj_; } Arc::FileAccess* operator->() { return obj_; } operator Arc::FileAccess*() { return obj_; } Arc::FileAccess*& get() { return obj_; } protected: Arc::FileAccess* obj_; }; // GET,HEAD,PUT,DELETE - supported for files stored in job's session directory and perform usual actions. // GET,HEAD - for directories retrieves list of stored files (consider WebDAV for format). // DELETE - for directories removes whole directory. // PUT - for directory not supported. // POST - not supported. // PATCH - for files modifies part of files (body format need to be defined, all files treated as binary, currently support non-standard PUT with ranges). // PROPFIND - list diectories, stat files. ARexConfigContext* config = ARexConfigContext::GetRutimeConfiguration(inmsg,config_,uname_,endpoint_); if(!config) { return HTTPFault(inmsg,outmsg,500,"User can't be assigned configuration"); } ARexJob job(id,*config,logger_); if(!job) { // There is no such job logger_.msg(Arc::ERROR, "REST:GET job %s - %s", id, job.Failure()); return HTTPFault(inmsg,outmsg,404,job.Failure().c_str()); } // Make sure path is correct while working with files if(!CanonicalDir(context.subpath, false, false)) return HTTPFault(inmsg,outmsg,404,"Wrong path"); if((context.method == "GET") || (context.method == "HEAD")) { // File or folder FileAccessRef dir(job.OpenDir(context.subpath)); if(dir) { XMLNode listXml(""); std::string dirpath = job.GetFilePath(context.subpath); for(;;) { std::string fileName; if(!dir->fa_readdir(fileName)) break; if(fileName == ".") continue; if(fileName == "..") continue; std::string fpath = dirpath+"/"+fileName; struct stat st; if(dir->fa_lstat(fpath.c_str(),st)) { if(S_ISREG(st.st_mode)) { XMLNode itemXml = listXml.NewChild("file"); itemXml = fileName; itemXml.NewAttribute("size") = Arc::tostring(st.st_size); } else if(S_ISDIR(st.st_mode)) { XMLNode itemXml = listXml.NewChild("dir"); itemXml = fileName; }; }; }; return HTTPResponse(inmsg,outmsg,listXml); }; FileAccessRef file(job.OpenFile(context.subpath,true,false)); if(file) { // File or similar Arc::MCC_Status r = HTTPResponseFile(inmsg,outmsg,file.get(),"application/octet-stream"); return r; } return HTTPFault(inmsg,outmsg,404,"Not found"); } else if(context.method == "PUT") { // Check for proper payload Arc::MessagePayload* payload = inmsg.Payload(); Arc::PayloadStreamInterface* stream = dynamic_cast(payload); Arc::PayloadRawInterface* buf = dynamic_cast(payload); if((!stream) && (!buf)) { logger_.msg(Arc::ERROR, "REST:PUT job %s: file %s: there is no payload", id, context.subpath); return HTTPFault(inmsg, outmsg, 500, "Missing payload"); }; // Prepare access to file FileAccessRef file(job.CreateFile(context.subpath)); if(!file) { // TODO: report something logger_.msg(Arc::ERROR, "%s: put file %s: failed to create file: %s", job.ID(), context.subpath, job.Failure()); return HTTPFault(inmsg, outmsg, 500, "Error creating file"); }; FileChunksRef fc(files_chunks_.Get(job.GetFilePath(context.subpath))); Arc::MCC_Status r; std::string err; bool complete(false); if(stream) { r = PutJobFile(inmsg,outmsg,*file,err,*stream,*fc,complete); } else { r = PutJobFile(inmsg,outmsg,*file,err,*buf,*fc,complete); } if(!r) { logger_.msg(Arc::ERROR, "HTTP:PUT %s: put file %s: %s", job.ID(), context.subpath, err); } else { if(complete || fc->Complete()) job.ReportFileComplete(context.subpath); } return r; } else if(context.method == "DELETE") { std::string fpath = job.GetFilePath(context.subpath); if(!fpath.empty()) { if((!FileDelete(fpath,job.UID(),job.GID())) && (!DirDelete(fpath,true,job.UID(),job.GID()))) { return HTTPFault(inmsg,outmsg,500,"Failed to delete"); } } return HTTPDELETEResponse(inmsg,outmsg); } else if(context.method == "PROPFIND") { int depth = 10; // infinite with common sense std::string depthStr = inmsg.Attributes()->get("HTTP:depth"); if(depthStr == "0") depth = 0; else if(depthStr == "1") depth = 1; std::string fpath = job.GetFilePath(context.subpath); URL url(inmsg.Attributes()->get("HTTP:ENDPOINT")); Arc::XMLNode multistatus(""); FileAccessRef fa(Arc::FileAccess::Acquire()); if(fa) ProcessPROPFIND(fa,multistatus,url,fpath,job.UID(),job.GID(),depth); std::string payload; multistatus.GetDoc(payload); return HTTPResponse(inmsg,outmsg,payload,"application/xml"); }; return HTTPFault(inmsg,outmsg,501,"Not Implemented"); } // ------------------------------- PER-JOB CNTROL DIR ---------------------------- Arc::MCC_Status ARexRest::processJobControlDir(Arc::Message& inmsg,Arc::Message& outmsg, ProcessingContext& context,std::string const & id) { // GET - return the content of file in A-REX control directory for requested jobID // HEAD - supported. // PUT, POST, DELETE - not supported. char const * const mimeText = "text/plain"; char const * const mimeXml = "application/xml"; struct resourceDef { char const * const name; char const * const mime; }; resourceDef const allowedSubResources[] = { { "failed", mimeText }, { "local", mimeText }, { "errors", mimeText }, { "description", mimeText }, { "diag", mimeText }, { "comment", mimeText }, { "status", mimeText }, { "acl", mimeText }, { "xml", mimeXml }, { "input", mimeText }, { "output", mimeText }, { "input_status", mimeText }, { "output_status", mimeText }, { "statistics", mimeText }, { NULL, NULL } }; std::string subResource = context.subpath; resourceDef const * allowedSubResource = allowedSubResources; for(; allowedSubResource->name; ++allowedSubResource) { if(subResource == allowedSubResource->name) break; } if(!(allowedSubResource->name)) return HTTPFault(inmsg,outmsg,404,"Diagnostic item not found"); if((context.method == "GET") || (context.method == "HEAD")) { ARexConfigContext* config = ARexConfigContext::GetRutimeConfiguration(inmsg,config_,uname_,endpoint_); if(!config) { return HTTPFault(inmsg,outmsg,500,"User can't be assigned configuration"); } ARexJob job(id,*config,logger_); if(!job) { // There is no such job logger_.msg(Arc::ERROR, "REST:GET job %s - %s", id, job.Failure()); return HTTPFault(inmsg,outmsg,404,job.Failure().c_str()); } int file = job.OpenLogFile(subResource); if(file == -1) return HTTPFault(inmsg,outmsg,404,"Not found"); Arc::MCC_Status r = HTTPResponseFile(inmsg,outmsg,file,allowedSubResource->mime); if(file != -1) ::close(file); return r; } logger_.msg(Arc::VERBOSE, "process: method %s is not supported for subpath %s",context.method,context.processed); return HTTPFault(inmsg,outmsg,501,"Not Implemented"); } /* Arc::MCC_Status ARexRest::processJobDelegations(Arc::Message& inmsg,Arc::Message& outmsg, ProcessingContext& context,std::string const & id) { std::string delegationId; if(GetPathToken(context.subpath, delegationId)) { context.processed += delegationId; context.processed += "/"; return processJobDelegation(inmsg,outmsg,context,id,delegationId); } ARexConfigContext* config = ARexConfigContext::GetRutimeConfiguration(inmsg,config_,uname_,endpoint_); if(!config) { return HTTPFault(inmsg,outmsg,500,"User can't be assigned configuration"); } ARexJob job(id,*config,logger_); if(!job) { // There is no such job logger_.msg(Arc::ERROR, "REST:GET job %s - %s", id, job.Failure()); return HTTPFault(inmsg,outmsg,404,job.Failure().c_str()); } // GET - retrieves list of delegations belonging to specified job // HEAD - supported. if((context.method == "GET") || (context.method == "HEAD")) { XMLNode listXml(""); std::list ids = delegation_stores_[config_.DelegationDir()].ListLockedCredIDs(job.ID(),config->GridName()); for(std::list::iterator itId = ids.begin(); itId != ids.end(); ++itId) { listXml.NewChild("delegation").NewChild("id") = *itId; } return HTTPResponse(inmsg, outmsg, listXml); } logger_.msg(Arc::VERBOSE, "process: method %s is not supported for subpath %s",context.method,context.processed); return HTTPFault(inmsg,outmsg,501,"Not Implemented"); } Arc::MCC_Status ARexRest::processJobDelegation(Arc::Message& inmsg,Arc::Message& outmsg, ProcessingContext& context,std::string const & jobId,std::string const & delegId) { if(!context.subpath.empty()) return HTTPFault(inmsg,outmsg,404,"Not Found"); // no more sub-resources ARexConfigContext* config = ARexConfigContext::GetRutimeConfiguration(inmsg,config_,uname_,endpoint_); if(!config) { return HTTPFault(inmsg,outmsg,500,"User can't be assigned configuration"); } // GET - returns public part of the stored delegation as application/x-pem-file. // HEAD - supported. if((context.method == "GET") || (context.method == "HEAD")) { std::string credentials; if(!delegation_stores_[config_.DelegationDir()].GetDeleg(delegId, config->GridName(), credentials)) { return HTTPFault(inmsg,outmsg,404,"No delegation found"); } return HTTPResponse(inmsg, outmsg, credentials, "application/x-pem-file"); } logger_.msg(Arc::VERBOSE, "process: method %s is not supported for subpath %s",context.method,context.processed); return HTTPFault(inmsg,outmsg,501,"Not Implemented"); } */ nordugrid-arc-6.14.0/src/services/a-rex/rest/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153434024007 xustar000000000000000030 mtime=1638455068.477097811 30 atime=1638455089.695416627 30 ctime=1638455100.440578078 nordugrid-arc-6.14.0/src/services/a-rex/rest/Makefile.in0000644000175000002070000007455314152153434024012 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/a-rex/rest DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(top_srcdir)/depcomp ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = LTLIBRARIES = $(noinst_LTLIBRARIES) libarexrest_la_DEPENDENCIES = \ $(top_builddir)/src/hed/libs/common/libarccommon.la am_libarexrest_la_OBJECTS = libarexrest_la-rest.lo libarexrest_la_OBJECTS = $(am_libarexrest_la_OBJECTS) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = libarexrest_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(libarexrest_la_CXXFLAGS) $(CXXFLAGS) \ $(libarexrest_la_LDFLAGS) $(LDFLAGS) -o $@ AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(libarexrest_la_SOURCES) DIST_SOURCES = $(libarexrest_la_SOURCES) RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ SUBDIRS = DIST_SUBDIRS = noinst_LTLIBRARIES = libarexrest.la libarexrest_la_SOURCES = rest.cpp rest.h libarexrest_la_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) libarexrest_la_LIBADD = \ $(top_builddir)/src/hed/libs/common/libarccommon.la libarexrest_la_LDFLAGS = -no-undefined -avoid-version -module all: all-recursive .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/a-rex/rest/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/a-rex/rest/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): clean-noinstLTLIBRARIES: -test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES) @list='$(noinst_LTLIBRARIES)'; \ locs=`for p in $$list; do echo $$p; done | \ sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ sort -u`; \ test -z "$$locs" || { \ echo rm -f $${locs}; \ rm -f $${locs}; \ } libarexrest.la: $(libarexrest_la_OBJECTS) $(libarexrest_la_DEPENDENCIES) $(EXTRA_libarexrest_la_DEPENDENCIES) $(AM_V_CXXLD)$(libarexrest_la_LINK) $(libarexrest_la_OBJECTS) $(libarexrest_la_LIBADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libarexrest_la-rest.Plo@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< libarexrest_la-rest.lo: rest.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarexrest_la_CXXFLAGS) $(CXXFLAGS) -MT libarexrest_la-rest.lo -MD -MP -MF $(DEPDIR)/libarexrest_la-rest.Tpo -c -o libarexrest_la-rest.lo `test -f 'rest.cpp' || echo '$(srcdir)/'`rest.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libarexrest_la-rest.Tpo $(DEPDIR)/libarexrest_la-rest.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='rest.cpp' object='libarexrest_la-rest.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libarexrest_la_CXXFLAGS) $(CXXFLAGS) -c -o libarexrest_la-rest.lo `test -f 'rest.cpp' || echo '$(srcdir)/'`rest.cpp mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile $(LTLIBRARIES) installdirs: installdirs-recursive installdirs-am: install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool clean-noinstLTLIBRARIES \ mostlyclean-am distclean: distclean-recursive -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool \ clean-noinstLTLIBRARIES cscopelist-am ctags ctags-am distclean \ distclean-compile distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ installdirs-am maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \ uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/a-rex/rest/PaxHeaders.30264/rest.h0000644000000000000000000000013214152153376023075 xustar000000000000000030 mtime=1638455038.429646335 30 atime=1638455038.509647537 30 ctime=1638455100.443578123 nordugrid-arc-6.14.0/src/services/a-rex/rest/rest.h0000644000175000002070000000547314152153376023073 0ustar00mockbuildmock00000000000000#ifndef __ARC_AREX_REST_H__ #define __ARC_AREX_REST_H__ #include #include #include #include #include "../grid-manager/conf/GMConfig.h" namespace ARex { class ARexRest { public: ARexRest(Arc::Config *cfg, Arc::PluginArgument *parg, GMConfig& config, ARex::DelegationStores& delegation_stores, unsigned int& all_jobs_count); virtual ~ARexRest(void); Arc::MCC_Status process(Arc::Message& inmsg,Arc::Message& outmsg); private: class ProcessingContext { public: std::string subpath; std::string method; std::string processed; std::multimap query; std::string operator[](char const * key) const; }; Arc::Logger logger_; std::string uname_; std::string endpoint_; FileChunksList files_chunks_; ARex::GMConfig& config_; ARex::DelegationStores& delegation_stores_; unsigned int& all_jobs_count_; Arc::MCC_Status processVersions(Arc::Message& inmsg,Arc::Message& outmsg,ProcessingContext& context); Arc::MCC_Status processGeneral(Arc::Message& inmsg,Arc::Message& outmsg,ProcessingContext& context); Arc::MCC_Status processInfo(Arc::Message& inmsg,Arc::Message& outmsg,ProcessingContext& context); Arc::MCC_Status processJobs(Arc::Message& inmsg,Arc::Message& outmsg,ProcessingContext& context); Arc::MCC_Status processDelegations(Arc::Message& inmsg,Arc::Message& outmsg,ProcessingContext& context); Arc::MCC_Status processDelegation(Arc::Message& inmsg,Arc::Message& outmsg,ProcessingContext& context, std::string const & id); Arc::MCC_Status processJob(Arc::Message& inmsg,Arc::Message& outmsg,ProcessingContext& context, std::string const & id); Arc::MCC_Status processJobSessionDir(Arc::Message& inmsg,Arc::Message& outmsg,ProcessingContext& context, std::string const & id); Arc::MCC_Status processJobControlDir(Arc::Message& inmsg,Arc::Message& outmsg,ProcessingContext& context, std::string const & id); Arc::MCC_Status processJobSub(Arc::Message& inmsg,Arc::Message& outmsg,ProcessingContext& context, std::string const & id, std::string const & subResource); Arc::MCC_Status processJobSession(Arc::Message& inmsg,Arc::Message& outmsg, ProcessingContext& context,std::string const & id); //Arc::MCC_Status processJobDelegations(Arc::Message& inmsg,Arc::Message& outmsg, // ProcessingContext& context,std::string const & id); //Arc::MCC_Status processJobDelegation(Arc::Message& inmsg,Arc::Message& outmsg, // ProcessingContext& context,std::string const & jobId,std::string const & delegId); }; } // namespace ARex #endif nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/SQLhelpers.h0000644000000000000000000000013214152153376023165 xustar000000000000000030 mtime=1638455038.405645974 30 atime=1638455038.500647402 30 ctime=1638455099.456563292 nordugrid-arc-6.14.0/src/services/a-rex/SQLhelpers.h0000644000175000002070000000264714152153376023163 0ustar00mockbuildmock00000000000000#ifndef __AREX_SQL_COMMON_HELPERS_H__ #define __AREX_SQL_COMMON_HELPERS_H__ #include #include #include namespace ARex { static const std::string sql_special_chars("'#\r\n\b\0",6); static const char sql_escape_char('%'); static const Arc::escape_type sql_escape_type(Arc::escape_hex); // Returns SQL-escaped string representation of argumnet inline static std::string sql_escape(const std::string& str) { return Arc::escape_chars(str, sql_special_chars, sql_escape_char, false, sql_escape_type); } inline static std::string sql_escape(int num) { return Arc::tostring(num); } inline static std::string sql_escape(const Arc::Time& val) { if(val.GetTime() == -1) return ""; return Arc::escape_chars((std::string)val, sql_special_chars, sql_escape_char, false, sql_escape_type); } // Unescape SQLite returned values inline static std::string sql_unescape(const std::string& str) { return Arc::unescape_chars(str, sql_escape_char,sql_escape_type); } inline static void sql_unescape(const std::string& str, int& val) { (void)Arc::stringto(Arc::unescape_chars(str, sql_escape_char,sql_escape_type), val); } inline static void sql_unescape(const std::string& str, Arc::Time& val) { if(str.empty()) { val = Arc::Time(); return; } val = Arc::Time(Arc::unescape_chars(str, sql_escape_char,sql_escape_type)); } } // namespace ARex #endif // __AREX_SQL_COMMON_HELPERS_H__ nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/job.cpp0000644000000000000000000000013214152153376022250 xustar000000000000000030 mtime=1638455038.420646199 30 atime=1638455038.506647492 30 ctime=1638455099.405562526 nordugrid-arc-6.14.0/src/services/a-rex/job.cpp0000644000175000002070000013440514152153376022244 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include // NOTE: On Solaris errno is not working properly if cerrno is included first #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include "grid-manager/conf/GMConfig.h" #include "grid-manager/jobs/GMJob.h" #include "grid-manager/jobs/ContinuationPlugins.h" #include "grid-manager/jobs/JobDescriptionHandler.h" #include "grid-manager/jobs/CommFIFO.h" #include "grid-manager/jobs/JobsList.h" #include "grid-manager/files/ControlFileHandling.h" #include "delegation/DelegationStores.h" #include "delegation/DelegationStore.h" #include "job.h" using namespace ARex; Arc::Logger ARexGMConfig::logger(Arc::Logger::getRootLogger(), "ARexGMConfig"); static std::string GetPath(std::string url){ std::string::size_type ds, ps; ds=url.find("//"); if (ds==std::string::npos) { ps=url.find("/"); } else { ps=url.find("/", ds+2); } if (ps==std::string::npos) return ""; return url.substr(ps); } ARexConfigContext* ARexConfigContext::GetRutimeConfiguration(Arc::Message& inmsg, GMConfig& gmconfig, std::string const & default_uname, std::string const & default_endpoint) { ARexConfigContext* config = NULL; Arc::MessageContextElement* mcontext = (*inmsg.Context())["arex.gmconfig"]; if(mcontext) { try { config = dynamic_cast(mcontext); logger.msg(Arc::DEBUG,"Using cached local account '%s'", config->User().Name()); } catch(std::exception& e) { }; }; if(config) return config; // TODO: do configuration detection // TODO: do mapping to local unix name std::string uname; uname=inmsg.Attributes()->get("SEC:LOCALID"); if(uname.empty()) uname=default_uname; if(uname.empty()) { if(getuid() == 0) { logger.msg(Arc::ERROR, "Will not map to 'root' account by default"); return NULL; }; struct passwd pwbuf; char buf[4096]; struct passwd* pw; if(getpwuid_r(getuid(),&pwbuf,buf,sizeof(buf),&pw) == 0) { if(pw && pw->pw_name) { uname = pw->pw_name; }; }; }; if(uname.empty()) { logger.msg(Arc::ERROR, "No local account name specified"); return NULL; }; logger.msg(Arc::DEBUG,"Using local account '%s'",uname); std::string grid_name = inmsg.Attributes()->get("TLS:IDENTITYDN"); if(grid_name.empty()) { // Try tokens if TLS has no information about user identity logger.msg(Arc::ERROR, "TLS provides no identity, going for OTokens"); grid_name = inmsg.Attributes()->get("OTOKENS:IDENTITYDN"); /* Below is an example on how obtained token can be exchanged. Arc::SecAttr* sattr = inmsg.Auth()->get("OTOKENS"); if(!sattr) sattr = inmsg.AuthContext()->get("OTOKENS"); if(sattr) { std::string token = sattr->get(""); if(!token.empty()) { Arc::OpenIDMetadata tokenMetadata; Arc::OpenIDMetadataFetcher metaFetcher(sattr->get("iss").c_str()); if(metaFetcher.Fetch(tokenMetadata)) { char const * tokenEndpointUrl = tokenMetadata.TokenEndpoint(); if(tokenEndpointUrl) { Arc::OpenIDTokenFetcher tokenFetcher(tokenEndpointUrl, "c85e84e8-c9ea-4ecc-8123-070df2c10e0e", "dRnakcoaT-9YA6T1LzeLAqeEu7jLBxeTWFyQMbJ6BWZonjEcE060-dn8EWAfpZmPq3x7oTjUnu6mamYylBaNhw"); std::list scopes; scopes.push_back("storage.read:/"); scopes.push_back("storage.create:/"); std::list audiences; audiences.push_back("se1.example"); audiences.push_back("se2.example"); Arc::OpenIDTokenFetcher::TokenList tokens; if(tokenFetcher.Fetch("urn:ietf:params:oauth:grant-type:token-exchange", token, scopes, audiences, tokens)) { for(auto const & token : tokens) { logger_.msg(Arc::ERROR, "Token response: %s : %s", token.first, token.second); }; } else logger_.msg(Arc::ERROR, "Failed to fetch token"); } else logger_.msg(Arc::ERROR, "Token metadata contains no token endpoint");; } else logger_.msg(Arc::ERROR, "Failed to fetch token metadata"); } else logger_.msg(Arc::ERROR, "There is no token in sec attr"); } else logger_.msg(Arc::ERROR, "There is no otoken sec attr"); */ }; std::string endpoint = default_endpoint; if(endpoint.empty()) { std::string http_endpoint = inmsg.Attributes()->get("HTTP:ENDPOINT"); std::string tcp_endpoint = inmsg.Attributes()->get("TCP:ENDPOINT"); bool https_proto = ((inmsg.Auth() && (inmsg.Auth()->get("TLS"))) || (inmsg.AuthContext() && (inmsg.AuthContext()->get("TLS")))); endpoint = tcp_endpoint; if(https_proto) { endpoint="https"+endpoint; } else { endpoint="http"+endpoint; }; endpoint+=GetPath(http_endpoint); }; config=new ARexConfigContext(gmconfig,uname,grid_name,endpoint); if(config) { if(*config) { inmsg.Context()->Add("arex.gmconfig",config); } else { delete config; config=NULL; logger.msg(Arc::ERROR, "Failed to acquire A-REX's configuration"); }; }; return config; } static bool match_lists(const std::list& list1, const std::list& list2, std::string& matched) { for(std::list::const_iterator l1 = list1.begin(); l1 != list1.end(); ++l1) { for(std::list::const_iterator l2 = list2.begin(); l2 != list2.end(); ++l2) { if((*l1) == (*l2)) { matched = *l1; return true; }; }; }; return false; } static bool match_lists(const std::list >& list1, const std::list& list2, std::string& matched) { for(std::list >::const_iterator l1 = list1.begin(); l1 != list1.end(); ++l1) { for(std::list::const_iterator l2 = list2.begin(); l2 != list2.end(); ++l2) { if((l1->second) == (*l2)) { matched = l1->second; return l1->first; }; }; }; return false; } static bool match_groups(std::list const & groups, ARexGMConfig& config) { std::string matched_group; if(!groups.empty()) { for(std::list::iterator a = config.beginAuth();a!=config.endAuth();++a) { if(*a) { // This security attribute collected information about user's authorization groups Arc::SecAttr* sattr = (*a)->get("ARCLEGACY"); if(sattr) { if(match_lists(groups, sattr->getAll("GROUP"), matched_group)) { return true; }; }; }; }; }; return false; } static bool match_groups(std::list > const & groups, ARexGMConfig& config) { std::string matched_group; if(!groups.empty()) { for(std::list::iterator a = config.beginAuth();a!=config.endAuth();++a) { if(*a) { // This security attribute collected information about user's authorization groups Arc::SecAttr* sattr = (*a)->get("ARCLEGACY"); if(sattr) { if(match_lists(groups, sattr->getAll("GROUP"), matched_group)) { return true; }; }; }; }; }; return false; } ARexGMConfig::ARexGMConfig(const GMConfig& config,const std::string& uname,const std::string& grid_name,const std::string& service_endpoint): config_(config),user_(uname),readonly_(false),grid_name_(grid_name),service_endpoint_(service_endpoint) { //if(!InitEnvironment(configfile)) return; // const char* uname = user_s.get_uname(); //if((bool)job_map) uname=job_map.unix_name(); if(!user_) { logger.msg(Arc::WARNING, "Cannot handle local user %s", uname); return; } // Do substitutions on session dirs session_roots_ = config_.SessionRoots(); for (std::vector::iterator session = session_roots_.begin(); session != session_roots_.end(); ++session) { config_.Substitute(*session, user_); } session_roots_non_draining_ = config_.SessionRootsNonDraining(); for (std::vector::iterator session = session_roots_non_draining_.begin(); session != session_roots_non_draining_.end(); ++session) { config_.Substitute(*session, user_); } if(!config_.AREXEndpoint().empty()) service_endpoint_ = config_.AREXEndpoint(); } static ARexJobFailure setfail(JobReqResult res) { switch(res.result_type) { case JobReqSuccess: return ARexJobNoError; case JobReqInternalFailure: return ARexJobInternalError; case JobReqSyntaxFailure: return ARexJobDescriptionSyntaxError; case JobReqUnsupportedFailure: return ARexJobDescriptionUnsupportedError; case JobReqMissingFailure: return ARexJobDescriptionMissingError; case JobReqLogicalFailure: return ARexJobDescriptionLogicalError; }; return ARexJobInternalError; } bool ARexJob::is_allowed(bool fast) { allowed_to_see_=false; allowed_to_maintain_=false; // Checking user's grid name against owner if(config_.GridName() == job_.DN) { allowed_to_see_=true; allowed_to_maintain_=true; return true; }; if(fast) return true; // Do fine-grained authorization requested by job's owner if(config_.beginAuth() == config_.endAuth()) return true; std::string acl; if(!job_acl_read_file(id_,config_.GmConfig(),acl)) return true; // safe to ignore if(acl.empty()) return true; // No policy defiled - only owner allowed // Identify and parse policy ArcSec::EvaluatorLoader eval_loader; Arc::AutoPointer policy(eval_loader.getPolicy(ArcSec::Source(acl))); if(!policy) { logger_.msg(Arc::VERBOSE, "%s: Failed to parse user policy", id_); return true; }; Arc::AutoPointer eval(eval_loader.getEvaluator(policy.Ptr())); if(!eval) { logger_.msg(Arc::VERBOSE, "%s: Failed to load evaluator for user policy ", id_); return true; }; std::string policyname = policy->getName(); if((policyname.length() > 7) && (policyname.substr(policyname.length()-7) == ".policy")) { policyname.resize(policyname.length()-7); }; if(policyname == "arc") { // Creating request - directly with XML // Creating top of request document Arc::NS ns; ns["ra"]="http://www.nordugrid.org/schemas/request-arc"; Arc::XMLNode request(ns,"ra:Request"); // Collect all security attributes for(std::list::iterator a = config_.beginAuth();a!=config_.endAuth();++a) { if(*a) (*a)->Export(Arc::SecAttr::ARCAuth,request); }; // Leave only client identities for(Arc::XMLNode item = request["RequestItem"];(bool)item;++item) { for(Arc::XMLNode a = item["Action"];(bool)a;a=item["Action"]) a.Destroy(); for(Arc::XMLNode r = item["Resource"];(bool)r;r=item["Resource"]) r.Destroy(); }; // Fix namespace request.Namespaces(ns); // Create A-Rex specific action // TODO: make helper classes for such operations Arc::XMLNode item = request["ra:RequestItem"]; if(!item) item=request.NewChild("ra:RequestItem"); // Possible operations are Modify and Read Arc::XMLNode action; action=item.NewChild("ra:Action"); action=JOB_POLICY_OPERATION_READ; action.NewAttribute("Type")="string"; action.NewAttribute("AttributeId")=JOB_POLICY_OPERATION_URN; action=item.NewChild("ra:Action"); action=JOB_POLICY_OPERATION_MODIFY; action.NewAttribute("Type")="string"; action.NewAttribute("AttributeId")=JOB_POLICY_OPERATION_URN; // Evaluating policy ArcSec::Response *resp = eval->evaluate(request,policy.Ptr()); // Analyzing response in order to understand which operations are allowed if(!resp) return true; // Not authorized // Following should be somehow made easier ArcSec::ResponseList& rlist = resp->getResponseItems(); for(int n = 0; nres != ArcSec::DECISION_PERMIT) continue; if(!(ritem->reqtp)) continue; for(ArcSec::Action::iterator a = ritem->reqtp->act.begin();a!=ritem->reqtp->act.end();++a) { ArcSec::RequestAttribute* attr = *a; if(!attr) continue; ArcSec::AttributeValue* value = attr->getAttributeValue(); if(!value) continue; std::string action = value->encode(); if(action == "Read") allowed_to_see_=true; if(action == "Modify") allowed_to_maintain_=true; }; }; } else if(policyname == "gacl") { // Creating request - directly with XML Arc::NS ns; Arc::XMLNode request(ns,"gacl"); // Collect all security attributes for(std::list::iterator a = config_.beginAuth();a!=config_.endAuth();++a) { if(*a) (*a)->Export(Arc::SecAttr::GACL,request); }; // Leave only client identities int entries = 0; for(Arc::XMLNode entry = request["entry"];(bool)entry;++entry) { for(Arc::XMLNode a = entry["allow"];(bool)a;a=entry["allow"]) a.Destroy(); for(Arc::XMLNode a = entry["deny"];(bool)a;a=entry["deny"]) a.Destroy(); ++entries; }; if(!entries) request.NewChild("entry"); // Evaluate every action separately for(Arc::XMLNode entry = request["entry"];(bool)entry;++entry) { entry.NewChild("allow").NewChild("read"); }; ArcSec::Response *resp; resp=eval->evaluate(request,policy.Ptr()); if(resp) { ArcSec::ResponseList& rlist = resp->getResponseItems(); for(int n = 0; nres != ArcSec::DECISION_PERMIT) continue; allowed_to_see_=true; break; }; }; for(Arc::XMLNode entry = request["entry"];(bool)entry;++entry) { entry["allow"].Destroy(); entry.NewChild("allow").NewChild("write"); }; resp=eval->evaluate(request,policy.Ptr()); if(resp) { ArcSec::ResponseList& rlist = resp->getResponseItems(); for(int n = 0; nres != ArcSec::DECISION_PERMIT) continue; allowed_to_maintain_=true; break; }; }; // TODO: , } else { logger_.msg(Arc::VERBOSE, "%s: Unknown user policy '%s'", id_, policyname); }; return true; } ARexJob::ARexJob(const std::string& id,ARexGMConfig& config,Arc::Logger& logger,bool fast_auth_check):id_(id),logger_(logger),config_(config),uid_(0),gid_(0) { if(id_.empty()) return; if(!config_) { id_.clear(); return; }; // Reading essential information about job if(!job_local_read_file(id_,config_.GmConfig(),job_)) { id_.clear(); return; }; // Checking if user is allowed to do anything with that job if(!is_allowed(fast_auth_check)) { id_.clear(); return; }; if(!(allowed_to_see_ || allowed_to_maintain_)) { id_.clear(); return; }; // Checking for presence of session dir and identifying local user id. struct stat st; if(job_.sessiondir.empty()) { id_.clear(); return; }; if(stat(job_.sessiondir.c_str(),&st) != 0) { id_.clear(); return; }; uid_ = st.st_uid; gid_ = st.st_gid; } ARexJob::ARexJob(Arc::XMLNode xmljobdesc,ARexGMConfig& config,const std::string& delegid,const std::string& clientid, Arc::Logger& logger, JobIDGenerator& idgenerator, Arc::XMLNode migration):id_(""),logger_(logger),config_(config) { std::string job_desc_str; // Make full XML doc out of subtree { Arc::XMLNode doc; xmljobdesc.New(doc); doc.GetDoc(job_desc_str); }; make_new_job(job_desc_str,delegid,clientid,idgenerator,migration); } ARexJob::ARexJob(std::string const& job_desc_str,ARexGMConfig& config,const std::string& delegid,const std::string& clientid, Arc::Logger& logger, JobIDGenerator& idgenerator, Arc::XMLNode migration):id_(""),logger_(logger),config_(config) { make_new_job(job_desc_str,delegid,clientid,idgenerator,migration); } void ARexJob::make_new_job(std::string const& job_desc_str,const std::string& delegid,const std::string& clientid,JobIDGenerator& idgenerator,Arc::XMLNode migration) { if(!config_) return; uid_ = config_.User().get_uid(); gid_ = config_.User().get_gid(); if(!config_.GmConfig().AllowNew()) { std::list const & groups = config_.GmConfig().AllowSubmit(); if(!match_groups(groups, config_)) { failure_="New job submission is not allowed"; failure_type_=ARexJobConfigurationError; return; }; }; DelegationStores* delegs = config_.GmConfig().GetDelegations(); if(!delegs) { failure_="Failed to find delegation store"; failure_type_=ARexJobInternalError; return; } DelegationStore& deleg = delegs->operator[](config_.GmConfig().DelegationDir()); // New job is created here // First get and acquire new id if(!make_job_id()) return; if((config_.GmConfig().MaxJobDescSize() > 0) && (job_desc_str.size() > config_.GmConfig().MaxJobDescSize())) { delete_job_id(); failure_="Job description is too big"; failure_type_=ARexJobConfigurationError; return; }; // Choose session directory std::string sessiondir; if (!ChooseSessionDir(id_, sessiondir)) { delete_job_id(); failure_="Failed to find valid session directory"; failure_type_=ARexJobInternalError; return; }; job_.sessiondir = sessiondir+"/"+id_; GMJob job(id_,Arc::User(uid_),job_.sessiondir,JOB_STATE_ACCEPTED); // Store description if(!job_description_write_file(job,config_.GmConfig(),job_desc_str)) { delete_job_id(); failure_="Failed to store job description"; failure_type_=ARexJobInternalError; return; }; // Analyze job description (checking, substituting, etc) JobDescriptionHandler job_desc_handler(config_.GmConfig()); Arc::JobDescription desc; JobReqResult parse_result = job_desc_handler.parse_job_req(id_,job_,desc,true); if((failure_type_=setfail(parse_result)) != ARexJobNoError) { failure_ = parse_result.failure; if(failure_.empty()) { failure_="Failed to parse job description"; failure_type_=ARexJobInternalError; }; delete_job_id(); return; }; std::string acl(parse_result.acl); if((!job_.action.empty()) && (job_.action != "request")) { failure_="Wrong action in job request: "+job_.action; failure_type_=ARexJobInternalError; delete_job_id(); return; }; // Check for proper LRMS name in request. If there is no LRMS name // in user configuration that means service is opaque frontend and // accepts any LRMS in request. if((!job_.lrms.empty()) && (!config_.GmConfig().DefaultLRMS().empty())) { if(job_.lrms != config_.GmConfig().DefaultLRMS()) { failure_="Requested LRMS is not supported by this service"; failure_type_=ARexJobInternalError; //failure_type_=ARexJobDescriptionLogicalError; delete_job_id(); return; }; }; if(job_.lrms.empty()) job_.lrms=config_.GmConfig().DefaultLRMS(); // Handle queue in request. // if (queue in xrsl) submit to that queue w/o modification; // elseif (no queue in xrsl and exists default queue in arc.conf) substitute default queue into xrsl and check authorisation; // elseif (no queue in xrsl and no default queue in arc.conf and VO is authorised in one of the arc.conf queues*) substitute // into xrsl the first; queue where VO is authorised in arc.conf; // else (reject); if(job_.queue.empty()) // queue in job description? job_.queue=config_.GmConfig().DefaultQueue(); // default queue in configuration bool queue_authorized = false; bool queue_matched = false; for(std::list::const_iterator q = config_.GmConfig().Queues().begin(); q != config_.GmConfig().Queues().end(); ++q) { if(!job_.queue.empty()) { if(*q != job_.queue) continue; // skip non-matcing queue }; queue_matched = true; // Check for allowed authorization group std::list > const & groups = config_.GmConfig().MatchingGroups(q->c_str()); if(groups.empty()) { queue_authorized = true; // No authorized groups assigned - all allowed } else { if(match_groups(groups, config_)) { queue_authorized = true; }; }; if(queue_authorized) { if(job_.queue.empty()) job_.queue = *q; // no queue requested - assign first authorized break; }; }; if(!queue_authorized) { // Different error messages for different job requests if(job_.queue.empty()) { failure_="Request has no queue defined and none is allowed for this user"; failure_type_=ARexJobConfigurationError; } else { if(queue_matched) { failure_="Requested queue "+job_.queue+" does not match any of available queues"; failure_type_=ARexJobInternalError; } else { failure_="Requested queue "+job_.queue+" is not allowed for this user"; failure_type_=ARexJobConfigurationError; }; }; delete_job_id(); return; }; // Check for various unsupported features if(!job_.preexecs.empty()) { failure_="Pre-executables are not supported by this service"; failure_type_=ARexJobDescriptionUnsupportedError; delete_job_id(); return; }; if(!job_.postexecs.empty()) { failure_="Post-executables are not supported by this service"; failure_type_=ARexJobDescriptionUnsupportedError; delete_job_id(); return; }; for(std::list::iterator f = desc.DataStaging.OutputFiles.begin();f != desc.DataStaging.OutputFiles.end();++f) { for(std::list::iterator t = f->Targets.begin();t != f->Targets.end();++t) { switch(t->CreationFlag) { case Arc::TargetType::CFE_DEFAULT: case Arc::TargetType::CFE_OVERWRITE: case Arc::TargetType::CFE_DONTOVERWRITE: break; default: failure_="Unsupported creation mode for Target"; failure_type_=ARexJobDescriptionUnsupportedError; delete_job_id(); return; }; }; }; // TODO: Rerun; // TODO: ExpiryTime; // TODO: ProcessingStartTime; // TODO: Priority; // TODO: Notification; // TODO: CredentialService; // TODO: AccessControl; // TODO: DryRun; // TODO: RemoteLogging // TODO: OperatingSystem; // TODO: Platform; // TODO: NetworkInfo; // TODO: IndividualPhysicalMemory; // TODO: IndividualVirtualMemory; // TODO: DiskSpaceRequirement; // TODO: SessionLifeTime; // TODO: SessionDirectoryAccess; // TODO: IndividualCPUTime; // TODO: TotalCPUTime; // TODO: IndividualWallTime; // TODO: TotalWallTime; // TODO: NodeAccess; // TODO: CEType; // Check that the SlotRequirements make sense. // I.e. that SlotsPerHost do not exceed total Slots // and that SlotsPerHost is a divisor of total Slots if((desc.Resources.SlotRequirement.SlotsPerHost > desc.Resources.SlotRequirement.NumberOfSlots) || (desc.Resources.SlotRequirement.NumberOfSlots % desc.Resources.SlotRequirement.SlotsPerHost != 0)) { failure_="SlotsPerHost exceeding NumberOfSlots is not supported"; failure_type_=ARexJobDescriptionUnsupportedError; delete_job_id(); return; }; if(!desc.Resources.Coprocessor.v.empty()) { failure_="Coprocessor is not supported yet."; failure_type_=ARexJobDescriptionUnsupportedError; delete_job_id(); return; }; // There may be 3 sources of delegated credentials: // 1. If job comes through EMI-ES it has delegations assigned only per file // through source and target. But ARC has extension to pass global // delegation for whole DataStaging // 2. In ARC BES extension credentials delegated as part of job creation request. // Those are provided in credentials variable // 3. If neither works and special dynamic output files @list which // have no targets and no delegations are present then any of // per file delegations is used bool need_delegation = false; // not for sure, but most probably needed std::list deleg_ids; // collection of all delegations if(!desc.DataStaging.DelegationID.empty()) { job_.delegationid = desc.DataStaging.DelegationID; // remember that special delegation deleg_ids.push_back(desc.DataStaging.DelegationID); // and store in list of all delegations } else if(!delegid.empty()) { // Have per job credentials - remember and refer by id later job_.delegationid = delegid; // remember that ad-hoc delegation deleg_ids.push_back(delegid); // and store in list of all delegations } else { // No per job delegation provided. // Check if generic delegation is needed at all. for(std::list::iterator f = desc.DataStaging.OutputFiles.begin(); f != desc.DataStaging.OutputFiles.end();++f) { if(f->Name[0] == '@') { // Dynamic file - possibly we need delegation. But we can't know till job finished. // Try to use any of provided delegations. need_delegation = true; break; }; }; }; // Collect other delegations // Delegation ids can be found in parsed job description for(std::list::iterator f = desc.DataStaging.InputFiles.begin();f != desc.DataStaging.InputFiles.end();++f) { for(std::list::iterator s = f->Sources.begin();s != f->Sources.end();++s) { if(!s->DelegationID.empty()) deleg_ids.push_back(s->DelegationID); }; }; for(std::list::iterator f = desc.DataStaging.OutputFiles.begin();f != desc.DataStaging.OutputFiles.end();++f) { for(std::list::iterator t = f->Targets.begin();t != f->Targets.end();++t) { if(!t->DelegationID.empty()) deleg_ids.push_back(t->DelegationID); }; }; if(need_delegation && job_.delegationid.empty()) { // Still need generic per job delegation if(deleg_ids.size() > 0) { // Pick up first delegation as generic one job_.delegationid = *deleg_ids.begin(); } else { // Missing most probably required delegation - play safely failure_="Dynamic output files and no delegation assigned to job are incompatible."; failure_type_=ARexJobDescriptionUnsupportedError; delete_job_id(); return; }; }; // Start local file (some local attributes are already defined at this point) /* !!!!! some parameters are unchecked here - rerun,diskspace !!!!! */ job_.jobid=id_; job_.starttime=Arc::Time(); job_.DN=config_.GridName(); job_.clientname=clientid; job_.migrateactivityid=(std::string)migration["ActivityIdentifier"]; job_.forcemigration=(migration["ForceMigration"]=="true"); // BES ActivityIdentifier is global job ID idgenerator.SetLocalID(id_); job_.globalid = idgenerator.GetGlobalID(); job_.headnode = idgenerator.GetManagerURL(); job_.headhost = idgenerator.GetHostname(); job_.globalurl = idgenerator.GetJobURL(); job_.interface = idgenerator.GetInterface(); std::string certificates; job_.expiretime = time(NULL); #if 1 // For compatibility reasons during transitional period store full proxy if possible if(!job_.delegationid.empty()) { (void)deleg.GetCred(job_.delegationid, config_.GridName(), certificates); } if(!certificates.empty()) { if(!job_proxy_write_file(job,config_.GmConfig(),certificates)) { delete_job_id(); failure_="Failed to write job proxy file"; failure_type_=ARexJobInternalError; return; }; try { Arc::Credential cred(certificates,"","","","",false); job_.expiretime = cred.GetEndTime(); logger_.msg(Arc::VERBOSE, "Credential expires at %s", job_.expiretime.str()); } catch(std::exception const& e) { logger_.msg(Arc::WARNING, "Credential handling exception: %s", e.what()); }; } else #endif // Create user credentials (former "proxy") { for(std::list::iterator a = config_.beginAuth();a!=config_.endAuth();++a) { if(*a) { Arc::SecAttr* sattr = (*a)->get("TLS"); if(sattr) { certificates = sattr->get("CERTIFICATE"); if(!certificates.empty()) { certificates += sattr->get("CERTIFICATECHAIN"); if(!job_proxy_write_file(job,config_.GmConfig(),certificates)) { delete_job_id(); failure_="Failed to write job proxy file"; failure_type_=ARexJobInternalError; return; }; try { Arc::Credential cred(certificates,"","","","",false); job_.expiretime = cred.GetEndTime(); logger_.msg(Arc::VERBOSE, "Credential expires at %s", job_.expiretime.str()); } catch(std::exception const& e) { logger_.msg(Arc::WARNING, "Credential handling exception: %s", e.what()); }; break; }; }; }; }; }; // Collect authorized VOMS/VO - so far only source is ARCLEGACYPDP for(std::list::iterator a = config_.beginAuth();a!=config_.endAuth();++a) { if(*a) { Arc::SecAttr* sattr = (*a)->get("ARCLEGACYPDP"); if(sattr) { std::list voms = sattr->getAll("VOMS"); job_.voms.insert(job_.voms.end(),voms.begin(),voms.end()); std::list vo = sattr->getAll("VO"); job_.localvo.insert(job_.localvo.end(),vo.begin(),vo.end()); }; }; }; // If no authorized VOMS was identified just report those from credentials (TLS source) if(job_.voms.empty()) { for(std::list::iterator a = config_.beginAuth();a!=config_.endAuth();++a) { if(*a) { Arc::SecAttr* sattr = (*a)->get("TLS"); if(sattr) { std::list voms = sattr->getAll("VOMS"); // These attributes are in different format and need to be converted // into ordinary VOMS FQANs. for(std::list::iterator v = voms.begin();v!=voms.end();++v) { std::string fqan = Arc::VOMSFQANFromFull(*v); if(!fqan.empty()) { job_.voms.insert(job_.voms.end(),fqan); }; }; }; }; }; }; // If still no VOMS information is available take forced one from configuration if(job_.voms.empty()) { std::string forced_voms = config_.GmConfig().ForcedVOMS(job_.queue.c_str()); if(forced_voms.empty()) forced_voms = config_.GmConfig().ForcedVOMS(); if(!forced_voms.empty()) { job_.voms.push_back(forced_voms); }; }; // Write local file if(!job_local_write_file(job,config_.GmConfig(),job_)) { delete_job_id(); failure_="Failed to store internal job description"; failure_type_=ARexJobInternalError; return; }; // Write grami file if(!job_desc_handler.write_grami(desc,job,NULL)) { delete_job_id(); failure_="Failed to create grami file"; failure_type_=ARexJobInternalError; return; }; // Write ACL file if(!acl.empty()) { if(!job_acl_write_file(id_,config_.GmConfig(),acl)) { delete_job_id(); failure_="Failed to process/store job ACL"; failure_type_=ARexJobInternalError; return; }; }; // Call authentication/authorization plugin/exec { // talk to external plugin to ask if we can proceed std::list results; ContinuationPlugins* plugins = config_.GmConfig().GetContPlugins(); if(plugins) plugins->run(job,config_.GmConfig(),results); std::list::iterator result = results.begin(); while(result != results.end()) { // analyze results if(result->action == ContinuationPlugins::act_fail) { delete_job_id(); failure_="Job is not allowed by external plugin: "+result->response; failure_type_=ARexJobInternalError; return; } else if(result->action == ContinuationPlugins::act_log) { // Scream but go ahead logger_.msg(Arc::WARNING, "Failed to run external plugin: %s", result->response); } else if(result->action == ContinuationPlugins::act_pass) { // Just continue if(result->response.length()) { logger_.msg(Arc::INFO, "Plugin response: %s", result->response); }; } else { delete_job_id(); failure_="Failed to pass external plugin: "+result->response; failure_type_=ARexJobInternalError; return; }; ++result; }; }; /*@ // Make access to filesystem on behalf of local user if(cred_plugin && (*cred_plugin)) { job_subst_t subst_arg; subst_arg.user=user; subst_arg.job=&job_id; subst_arg.reason="new"; // run external plugin to acquire non-unix local credentials if(!cred_plugin->run(job_subst,&subst_arg)) { olog << "Failed to run plugin" << std::endl; delete_job_id(); failure_type_=ARexJobInternalError; error_description="Failed to obtain external credentials"; return 1; }; if(cred_plugin->result() != 0) { olog << "Plugin failed: " << cred_plugin->result() << std::endl; delete_job_id(); error_description="Failed to obtain external credentials"; failure_type_=ARexJobInternalError; return 1; }; }; */ // Create session directory if(!config_.GmConfig().CreateSessionDirectory(job.SessionDir(), job.get_user())) { delete_job_id(); failure_="Failed to create session directory"; failure_type_=ARexJobInternalError; return; }; // Create input status file to tell downloader we // are handling input in clever way. job_input_status_add_file(job,config_.GmConfig()); // Create status file (do it last so GM picks job up here) if(!job_state_write_file(job,config_.GmConfig(),JOB_STATE_ACCEPTED,false)) { delete_job_id(); failure_="Failed registering job in A-REX"; failure_type_=ARexJobInternalError; return; }; // Put lock on all delegated credentials of this job. // Because same delegation id can be used multiple times remove // duplicates to avoid adding multiple identical locking records. deleg_ids.sort(); deleg_ids.unique(); deleg.LockCred(id_,deleg_ids,config_.GridName()); // Tell main loop new job has arrived CommFIFO::Signal(config_.GmConfig().ControlDir(),id_); return; } bool ARexJob::GetDescription(Arc::XMLNode& xmljobdesc) { if(id_.empty()) return false; std::string sdesc; if(!job_description_read_file(id_,config_.GmConfig(),sdesc)) return false; Arc::XMLNode xdesc(sdesc); if(!xdesc) return false; xmljobdesc.Replace(xdesc); return true; } bool ARexJob::Cancel(void) { if(id_.empty()) return false; GMJob job(id_,Arc::User(uid_)); if(!job_cancel_mark_put(job,config_.GmConfig())) return false; CommFIFO::Signal(config_.GmConfig().ControlDir(),id_); return true; } bool ARexJob::Clean(void) { if(id_.empty()) return false; GMJob job(id_,Arc::User(uid_)); if(!job_clean_mark_put(job,config_.GmConfig())) return false; CommFIFO::Signal(config_.GmConfig().ControlDir(),id_); return true; } bool ARexJob::Resume(void) { if(id_.empty()) return false; if(job_.failedstate.length() == 0) { // Job can't be restarted. failure_="Job has not failed"; failure_type_=ARexJobDescriptionLogicalError; return false; }; if(job_.reruns <= 0) { // Job run out of number of allowed retries. failure_="No more restarts allowed"; failure_type_=ARexJobDescriptionLogicalError; return false; }; if(!job_restart_mark_put(GMJob(id_,Arc::User(uid_)),config_.GmConfig())) { // Failed to report restart request. failure_="Failed to report internal restart request"; failure_type_=ARexJobInternalError; return false; }; CommFIFO::Signal(config_.GmConfig().ControlDir(),id_); return true; } std::string ARexJob::State(void) { bool job_pending; return State(job_pending); } std::string ARexJob::State(bool& job_pending) { if(id_.empty()) return ""; job_state_t state = job_state_read_file(id_,config_.GmConfig(),job_pending); return GMJob::get_state_name(state); } bool ARexJob::Failed(void) { if(id_.empty()) return false; return job_failed_mark_check(id_,config_.GmConfig()); } std::string ARexJob::FailedState(std::string& cause) { std::string state; job_local_read_failed(id_,config_.GmConfig(),state,cause); return state; } Arc::Time ARexJob::Created(void) { time_t t = job_description_time(id_,config_.GmConfig()); if(t == 0) return Arc::Time(); // ??? return Arc::Time(t); } Arc::Time ARexJob::Modified(void) { time_t t = job_state_time(id_,config_.GmConfig()); if(t == 0) return Arc::Time(); // ??? return Arc::Time(t); } bool ARexJob::UpdateCredentials(const std::string& credentials) { if(id_.empty()) return false; if(!update_credentials(credentials)) return false; GMJob job(id_,Arc::User(uid_), job_.sessiondir,JOB_STATE_ACCEPTED); if(!job_local_write_file(job,config_.GmConfig(),job_)) return false; return true; } bool ARexJob::update_credentials(const std::string& credentials) { if(credentials.empty()) return true; // Per job credentials update - renew generic credentials assigned to this job if(job_.delegationid.empty()) return false; DelegationStores* delegs = config_.GmConfig().GetDelegations(); if(!delegs) return false; DelegationStore& deleg = delegs->operator[](config_.GmConfig().DelegationDir()); if(!deleg.PutCred(job_.delegationid, config_.GridName(), credentials)) return false; Arc::Credential cred(credentials,"","","","",false); job_.expiretime = cred.GetEndTime(); GMJob job(id_,Arc::User(uid_), job_.sessiondir,JOB_STATE_ACCEPTED); #if 0 std::string cred_public; cred.OutputCertificate(cred_public); cred.OutputCertificateChain(cred_public); (void)job_proxy_write_file(job,config_.GmConfig(),cred_public); #else // For compatibility reasons during transitional period store full proxy if possible (void)job_proxy_write_file(job,config_.GmConfig(),credentials); #endif // TODO: should job.#.proxy be updated too? return true; } bool ARexJob::make_job_id(void) { if(!config_) return false; int i; //@ delete_job_id(); for(i=0;i<100;i++) { //id_=Arc::tostring((unsigned int)getpid())+ // Arc::tostring((unsigned int)time(NULL))+ // Arc::tostring(rand(),1); Arc::GUID(id_); std::string fname=config_.GmConfig().ControlDir()+"/job."+id_+".description"; struct stat st; if(stat(fname.c_str(),&st) == 0) continue; int h = ::open(fname.c_str(),O_RDWR | O_CREAT | O_EXCL,0600); // So far assume control directory is on local fs. // TODO: add locks or links for NFS int err = errno; if(h == -1) { if(err == EEXIST) continue; logger_.msg(Arc::ERROR, "Failed to create file in %s", config_.GmConfig().ControlDir()); id_=""; return false; }; fix_file_owner(fname,config_.User()); close(h); return true; }; logger_.msg(Arc::ERROR, "Out of tries while allocating new job ID in %s", config_.GmConfig().ControlDir()); id_=""; return false; } bool ARexJob::delete_job_id(void) { if(!config_) return true; if(!id_.empty()) { if(!job_.sessiondir.empty()) // check if session dir was already defined job_clean_final(GMJob(id_,Arc::User(uid_), job_.sessiondir),config_.GmConfig()); id_=""; }; return true; } int ARexJob::TotalJobs(ARexGMConfig& config,Arc::Logger& /* logger */) { return JobsList::CountAllJobs(config.GmConfig()); } // TODO: optimize std::list ARexJob::Jobs(ARexGMConfig& config,Arc::Logger& logger) { std::list jlist; JobsList::GetAllJobIds(config.GmConfig(),jlist); std::list::iterator i = jlist.begin(); while(i!=jlist.end()) { ARexJob job(*i,config,logger,true); if(job) { ++i; } else { i = jlist.erase(i); }; }; return jlist; } std::string ARexJob::SessionDir(void) { if(id_.empty()) return ""; return job_.sessiondir; } std::string ARexJob::LogDir(void) { return job_.stdlog; } static bool normalize_filename(std::string& filename) { std::string::size_type p = 0; if(filename[0] != G_DIR_SEPARATOR) filename.insert(0,G_DIR_SEPARATOR_S); for(;p != std::string::npos;) { if((filename[p+1] == '.') && (filename[p+2] == '.') && ((filename[p+3] == 0) || (filename[p+3] == G_DIR_SEPARATOR)) ) { std::string::size_type pr = std::string::npos; if(p > 0) pr = filename.rfind(G_DIR_SEPARATOR,p-1); if(pr == std::string::npos) return false; filename.erase(pr,p-pr+3); p=pr; } else if((filename[p+1] == '.') && (filename[p+2] == G_DIR_SEPARATOR)) { filename.erase(p,2); } else if(filename[p+1] == G_DIR_SEPARATOR) { filename.erase(p,1); }; p = filename.find(G_DIR_SEPARATOR,p+1); }; if(!filename.empty()) filename.erase(0,1); // removing leading separator return true; } Arc::FileAccess* ARexJob::CreateFile(const std::string& filename) { if(id_.empty()) return NULL; std::string fname = filename; if((!normalize_filename(fname)) || (fname.empty())) { failure_="File name is not acceptable"; failure_type_=ARexJobInternalError; return NULL; }; int lname = fname.length(); fname = job_.sessiondir+"/"+fname; // First try to create/open file Arc::FileAccess* fa = Arc::FileAccess::Acquire(); if(!*fa) { delete fa; return NULL; }; if(!fa->fa_setuid(uid_,gid_)) { Arc::FileAccess::Release(fa); return NULL; }; if(!fa->fa_open(fname,O_WRONLY | O_CREAT,S_IRUSR | S_IWUSR)) { if(fa->geterrno() != ENOENT) { Arc::FileAccess::Release(fa); return NULL; }; std::string::size_type n = fname.rfind('/'); if((n == std::string::npos) || (n < (fname.length()-lname))) { Arc::FileAccess::Release(fa); return NULL; }; if(!fa->fa_mkdirp(fname.substr(0,n),S_IRUSR | S_IWUSR | S_IXUSR)) { if(fa->geterrno() != EEXIST) { Arc::FileAccess::Release(fa); return NULL; }; }; if(!fa->fa_open(fname,O_WRONLY | O_CREAT,S_IRUSR | S_IWUSR)) { Arc::FileAccess::Release(fa); return NULL; }; }; return fa; } Arc::FileAccess* ARexJob::OpenFile(const std::string& filename,bool for_read,bool for_write) { if(id_.empty()) return NULL; std::string fname = filename; if((!normalize_filename(fname)) || (fname.empty())) { failure_="File name is not acceptable"; failure_type_=ARexJobInternalError; return NULL; }; fname = job_.sessiondir+"/"+fname; int flags = 0; if(for_read && for_write) { flags=O_RDWR; } else if(for_read) { flags=O_RDONLY; } else if(for_write) { flags=O_WRONLY; } //return Arc::FileOpen(fname,flags,uid_,gid_,0); Arc::FileAccess* fa = Arc::FileAccess::Acquire(); if(*fa) { if(fa->fa_setuid(uid_,gid_)) { if(fa->fa_open(fname,flags,0)) { return fa; }; }; }; failure_="Failed opening file - "+Arc::StrError(fa->geterrno()); failure_type_=ARexJobInternalError; Arc::FileAccess::Release(fa); return NULL; } Arc::FileAccess* ARexJob::OpenDir(const std::string& dirname) { if(id_.empty()) return NULL; std::string dname = dirname; if(!normalize_filename(dname)) { failure_="Directory name is not acceptable"; failure_type_=ARexJobInternalError; return NULL; }; //if(dname.empty()) return NULL; dname = job_.sessiondir+"/"+dname; Arc::FileAccess* fa = Arc::FileAccess::Acquire(); if(*fa) { if(fa->fa_setuid(uid_,gid_)) { if(fa->fa_opendir(dname)) { return fa; }; }; }; failure_="Failed opening directory - "+Arc::StrError(fa->geterrno()); failure_type_=ARexJobInternalError; Arc::FileAccess::Release(fa); return NULL; } int ARexJob::OpenLogFile(const std::string& name) { if(id_.empty()) return -1; if(strchr(name.c_str(),'/')) return -1; std::string fname = config_.GmConfig().ControlDir() + "/job." + id_ + "." + name; int h = ::open(fname.c_str(),O_RDONLY); if(name == "status") { if(h != -1) return h; fname = config_.GmConfig().ControlDir() + "/" + subdir_cur + "/job." + id_ + "." + name; h = ::open(fname.c_str(),O_RDONLY); if(h != -1) return h; fname = config_.GmConfig().ControlDir() + "/" + subdir_new + "/job." + id_ + "." + name; h = ::open(fname.c_str(),O_RDONLY); if(h != -1) return h; fname = config_.GmConfig().ControlDir() + "/" + subdir_rew + "/job." + id_ + "." + name; h = ::open(fname.c_str(),O_RDONLY); if(h != -1) return h; fname = config_.GmConfig().ControlDir() + "/" + subdir_old + "/job." + id_ + "." + name; h = ::open(fname.c_str(),O_RDONLY); }; return h; } std::list ARexJob::LogFiles(void) { std::list logs; if(id_.empty()) return logs; std::string dname = config_.GmConfig().ControlDir(); std::string prefix = "job." + id_ + "."; // TODO: scanning is performance bottleneck. Use matching instead. Glib::Dir* dir = new Glib::Dir(dname); if(!dir) return logs; for(;;) { std::string name = dir->read_name(); if(name.empty()) break; if(strncmp(prefix.c_str(),name.c_str(),prefix.length()) != 0) continue; logs.push_back(name.substr(prefix.length())); }; delete dir; // Add always present status logs.push_back("status"); return logs; } std::string ARexJob::GetFilePath(const std::string& filename) { if(id_.empty()) return ""; std::string fname = filename; if(!normalize_filename(fname)) return ""; if(fname.empty()) return job_.sessiondir; return job_.sessiondir+"/"+fname; } bool ARexJob::ReportFileComplete(const std::string& filename) { if(id_.empty()) return false; std::string fname = filename; if(!normalize_filename(fname)) return false; if(!job_input_status_add_file(GMJob(id_,Arc::User(uid_)),config_.GmConfig(),"/"+fname)) return false; CommFIFO::Signal(config_.GmConfig().ControlDir(),id_); return true; } bool ARexJob::ReportFilesComplete(void) { if(id_.empty()) return false; if(!job_input_status_add_file(GMJob(id_,Arc::User(uid_)),config_.GmConfig(),"/")) return false; CommFIFO::Signal(config_.GmConfig().ControlDir(),id_); return true; } std::string ARexJob::GetLogFilePath(const std::string& name) { if(id_.empty()) return ""; return config_.GmConfig().ControlDir() + "/job." + id_ + "." + name; } bool ARexJob::ChooseSessionDir(const std::string& /* jobid */, std::string& sessiondir) { if (config_.SessionRootsNonDraining().size() == 0) { // no active session dirs available logger_.msg(Arc::ERROR, "No non-draining session dirs available"); return false; } // choose randomly from non-draining session dirs sessiondir = config_.SessionRootsNonDraining().at(rand() % config_.SessionRootsNonDraining().size()); return true; } nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/a-rex-backtrace-collect.8.in0000644000000000000000000000013214152153376026044 xustar000000000000000030 mtime=1638455038.405645974 30 atime=1638455038.500647402 30 ctime=1638455099.402562481 nordugrid-arc-6.14.0/src/services/a-rex/a-rex-backtrace-collect.8.in0000644000175000002070000000133514152153376026033 0ustar00mockbuildmock00000000000000.TH a-rex-backtrace-collect 8 "2016-08-16" "NorduGrid @VERSION@" "NorduGrid Toolkit" .SH NAME a-rex-backtrace-collect \- processes core file(s) generated by arched and produces backtrace(s). .SH DESCRIPTION .B a-rex-backtrace-collect processes core file(s) collected in ARC_LOGS_DIR/arccore folder and produces their backtraces. The backtrace(s) are stored in files .backtrace. The ARC installation location can be adjusted using ARC_LOCATION environment variable. The location of configuration file can be specified using ARC_CONFIG environment variable. .SH SYNOPSIS a-rex-backtrace-collect .SH OPTIONS No options are supported. .SH AUTHOR Aleksandr Konstantinov nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/faults.cpp0000644000000000000000000000013214152153376022774 xustar000000000000000030 mtime=1638455038.407646004 30 atime=1638455038.501647417 30 ctime=1638455099.410562601 nordugrid-arc-6.14.0/src/services/a-rex/faults.cpp0000644000175000002070000001171414152153376022765 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include "arex.h" #include "tools.h" namespace ARex { /* UnknownActivityIdentifierFault Message (string) InvalidRequestMessageFaultType InvalidElement (string,unbounded) Message (string) */ // A-REX faults static const std::string BES_FACTORY_FAULT_URL("http://schemas.ggf.org/bes/2006/08/bes-factory/BESFactoryPortType/Fault"); static void SetFaultResponse(Arc::SOAPFault& fault) { // Fetch top element of SOAP message - should be better way Arc::XMLNode fault_node = fault; Arc::SOAPEnvelope res(fault_node.Parent().Parent()); // Fault->Body->Envelope Arc::WSAHeader(res).Action(BES_FACTORY_FAULT_URL); } void ARexService::UnknownActivityIdentifierFault(Arc::XMLNode fault,const std::string& message) { fault.Name("bes-factory:UnknownActivityIdentifierFault"); fault.NewChild("bes-factory:Message")=message; return; } void ARexService::UnknownActivityIdentifierFault(Arc::SOAPFault& fault,const std::string& message) { UnknownActivityIdentifierFault(fault.Detail(true).NewChild("dummy"),message); SetFaultResponse(fault); } void ARexService::InvalidRequestMessageFault(Arc::XMLNode fault,const std::string& element,const std::string& message) { fault.Name("bes-factory:InvalidRequestMessageFaultType"); if(!element.empty()) fault.NewChild("bes-factory:InvalidElement")=element; fault.NewChild("bes-factory:Message")=message; return; } void ARexService::InvalidRequestMessageFault(Arc::SOAPFault& fault,const std::string& element,const std::string& message) { InvalidRequestMessageFault(fault.Detail(true).NewChild("dummy"),element,message); SetFaultResponse(fault); } // EMI ES faults // InternalBaseFault // Message // Timestamp (dateTime) 0-1 // Description 0-1 // FailureCode (int) 0-1 void ARexService::ESInternalBaseFault(Arc::XMLNode fault,const std::string& message,const std::string& desc) { fault.Name("estypes:InternalBaseFault"); fault.NewChild("estypes:Message") = message; fault.NewChild("estypes:Timestamp") = Arc::Time().str(Arc::ISOTime); if(!desc.empty()) fault.NewChild("estypes:Description") = desc; //fault.NewChild("estypes:FailureCode") = "0"; } void ARexService::ESInternalBaseFault(Arc::SOAPFault& fault,const std::string& message,const std::string& desc) { ESInternalBaseFault(fault.Detail(true).NewChild("dummy"),message,desc); } void ARexService::ESVectorLimitExceededFault(Arc::XMLNode fault,unsigned long limit,const std::string& message,const std::string& desc) { ESInternalBaseFault(fault,message.empty()?"Limit of parallel requests exceeded":message,desc); fault.NewChild("estypes:ServerLimit") = Arc::tostring(limit); fault.Name("estypes:VectorLimitExceededFault"); } void ARexService::ESVectorLimitExceededFault(Arc::SOAPFault& fault,unsigned long limit,const std::string& message,const std::string& desc) { ESVectorLimitExceededFault(fault.Detail(true).NewChild("dummy"),limit,message,desc); } #define ES_SIMPLE_FAULT(FAULTNAME,NAMESPACE,MESSAGE) \ void ARexService::ES##FAULTNAME(Arc::XMLNode fault, \ const std::string& message,const std::string& desc) { \ ESInternalBaseFault(fault,message.empty()?(MESSAGE):message,desc); \ fault.Name(#NAMESPACE ":" #FAULTNAME); \ } \ \ void ARexService::ES##FAULTNAME(Arc::SOAPFault& fault, \ const std::string& message,const std::string& desc) { \ ES##FAULTNAME(fault.Detail(true).NewChild("dummy"),message,desc); \ } ES_SIMPLE_FAULT(AccessControlFault,estypes,"Access denied") ES_SIMPLE_FAULT(UnsupportedCapabilityFault,escreate,"Unsupported capability") ES_SIMPLE_FAULT(InvalidActivityDescriptionSemanticFault,escreate,"Invalid activity description semantics") ES_SIMPLE_FAULT(InvalidActivityDescriptionFault,escreate,"Invalid activity description") ES_SIMPLE_FAULT(NotSupportedQueryDialectFault,esrinfo,"Query language not supported") ES_SIMPLE_FAULT(NotValidQueryStatementFault,esrinfo,"Query is not valid for specified language") ES_SIMPLE_FAULT(UnknownQueryFault,esrinfo,"Query is not recognized") ES_SIMPLE_FAULT(InternalResourceInfoFault,esrinfo,"Internal failure retrieving resource information") ES_SIMPLE_FAULT(ResourceInfoNotFoundFault,esrinfo,"Resource has no requested information") ES_SIMPLE_FAULT(UnableToRetrieveStatusFault,esainfo,"Activity status is missing") ES_SIMPLE_FAULT(UnknownAttributeFault,esainfo,"Activity has no such attribute") ES_SIMPLE_FAULT(OperationNotAllowedFault,esainfo,"Requested operation not allowed") ES_SIMPLE_FAULT(ActivityNotFoundFault,esainfo,"Activity with specified ID not found") ES_SIMPLE_FAULT(InternalNotificationFault,esainfo,"Notofication fault") ES_SIMPLE_FAULT(OperationNotPossibleFault,esainfo,"Can't perform this operation") ES_SIMPLE_FAULT(InvalidActivityStateFault,esainfo,"Invalid activity state") ES_SIMPLE_FAULT(InvalidActivityLimitFault,esainfo,"Invalid activity limit") ES_SIMPLE_FAULT(InvalidParameterFault,esainfo,"Invalid parameter") } nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/schema0000644000000000000000000000013214152153474022154 xustar000000000000000030 mtime=1638455100.363576921 30 atime=1638455103.998631539 30 ctime=1638455100.363576921 nordugrid-arc-6.14.0/src/services/a-rex/schema/0000755000175000002070000000000014152153474022216 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/a-rex/schema/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376024266 xustar000000000000000030 mtime=1638455038.429646335 30 atime=1638455038.509647537 30 ctime=1638455100.361576891 nordugrid-arc-6.14.0/src/services/a-rex/schema/Makefile.am0000644000175000002070000000020514152153376024250 0ustar00mockbuildmock00000000000000arcschemadir = $(pkgdatadir)/schema arcschema_DATA = a-rex.xsd a-rex_infoprovider.xsd a-rex_lrms.xsd EXTRA_DIST = $(arcschema_DATA) nordugrid-arc-6.14.0/src/services/a-rex/schema/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153434024272 xustar000000000000000030 mtime=1638455068.574099269 30 atime=1638455090.596430165 30 ctime=1638455100.360576876 nordugrid-arc-6.14.0/src/services/a-rex/schema/Makefile.in0000644000175000002070000005126614152153434024271 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/a-rex/schema DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(arcschemadir)" DATA = $(arcschema_DATA) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ arcschemadir = $(pkgdatadir)/schema arcschema_DATA = a-rex.xsd a-rex_infoprovider.xsd a-rex_lrms.xsd EXTRA_DIST = $(arcschema_DATA) all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/a-rex/schema/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/a-rex/schema/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-arcschemaDATA: $(arcschema_DATA) @$(NORMAL_INSTALL) @list='$(arcschema_DATA)'; test -n "$(arcschemadir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(arcschemadir)'"; \ $(MKDIR_P) "$(DESTDIR)$(arcschemadir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(arcschemadir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(arcschemadir)" || exit $$?; \ done uninstall-arcschemaDATA: @$(NORMAL_UNINSTALL) @list='$(arcschema_DATA)'; test -n "$(arcschemadir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(arcschemadir)'; $(am__uninstall_files_from_dir) tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(DATA) installdirs: for dir in "$(DESTDIR)$(arcschemadir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-arcschemaDATA install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-arcschemaDATA .MAKE: install-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ cscopelist-am ctags-am distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-arcschemaDATA install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags-am uninstall \ uninstall-am uninstall-arcschemaDATA # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/a-rex/schema/PaxHeaders.30264/a-rex_lrms.xsd0000644000000000000000000000013214152153376025023 xustar000000000000000030 mtime=1638455038.429646335 30 atime=1638455038.509647537 30 ctime=1638455100.363576921 nordugrid-arc-6.14.0/src/services/a-rex/schema/a-rex_lrms.xsd0000644000175000002070000001154114152153376025012 0ustar00mockbuildmock00000000000000 The path to qstat, pbsnodes, qmgr etc PBS binaries. No need to set unless PBS is used. The path of the PBS server logfiles which are used by the GM to determine whether a PBS job is completed. If not specified, GM will use qstat for that. nordugrid-arc-6.14.0/src/services/a-rex/schema/PaxHeaders.30264/a-rex_infoprovider.xsd0000644000000000000000000000013214152153376026554 xustar000000000000000030 mtime=1638455038.429646335 30 atime=1638455038.509647537 30 ctime=1638455100.362576905 nordugrid-arc-6.14.0/src/services/a-rex/schema/a-rex_infoprovider.xsd0000644000175000002070000002423014152153376026542 0ustar00mockbuildmock00000000000000 This element can be used to specify benchmark results on the ExecutionEnvironment level. It should contain the name of the benchmark and the benchmark score separated by a space. Please use one of standard benchmark names given below if applicable: bogomips - BogoMips cfp2006 - SPEC CFP 2006 floating point benchmark cint2006 - SPEC CINT 2006 integer benchmark linpack - LINPACK benchmark specfp2000 - SPECfp2000 floating point benchmark specint2000 - SPECint2000 integer benchmark This element is used by the infoprovider to determine which nodes are included in an ExecutionEnvironment. This element represents a group of identical compute nodes in the cluster. nordugrid-arc-6.14.0/src/services/a-rex/schema/PaxHeaders.30264/a-rex.xsd0000644000000000000000000000013214152153376023766 xustar000000000000000030 mtime=1638455038.429646335 30 atime=1638455038.509647537 30 ctime=1638455100.362576905 nordugrid-arc-6.14.0/src/services/a-rex/schema/a-rex.xsd0000644000175000002070000007305114152153376023761 0ustar00mockbuildmock00000000000000 This element defines URL of A-REX service as seen from outside. This element defines path to arc0 Grid Manager configuration file. If present values in that file will overwrite those defined as siblings of this element. If string is empty its value is /etc/arc.conf. This element defines how grid-manager part of A-REX is run. * internal - as a thread inside service container. * none - no grid-manager is run. * external - as a separate executable (not supported anymore). Default is 'internal'. Defines parameters for mapping Grid user identity to local account. Currently only default account name can be specified. Defines parameters for controlling LRMS specific and LRMS related functionality. * type - name of LRMS/batch system * defaultShare - optional name of default ComputingShare * sharedFilesystem - either session directory is shared with computing node * sharedScratch - the path where the frontend can access cross-mounted scratch directories of nodes, if applicable * GNUTimeUtility - location and name of GNU time executable * any accommodates numerous LRMS configuration parameters * pbs_bin_path="/usr/bin" * pbs_log_path="/var/spool/pbs/server_logs" * condor_bin_path="/opt/condor/bin" * condor_config="/opt/condor/etc/condor_config" * condor_rank="(1-LoadAvg/2)*(1-LoadAvg/2)*Memory/1000*KFlops/1000000" * slurm_bin_path="/usr/bin" * sge_bin_path="/opt/n1ge6/bin/lx24-x86" * sge_root="/opt/n1ge6" * sge_cell="default" * sge_execd_port="537" * lsf_bin_path="/usr/local/lsf/bin/" * lsf_profile_path="/usr/share/lsf/conf" * ll_bin_path="/opt/ibmll/LoadL/full/bin" * ll_consumable_resources="yes" Defines parameters influencing load imposed on gateway computer. Unless specified missing element means do not limit. * maxJobsTracked - jobs which are not in FINISHED state (jobs tracked in RAM) * maxJobsRun - jobs being run (SUBMITTING, INLRMS states) * maxJobsTotal - jobs in any state * maxJobsPerDN - maximum jobs in the system per user DN * maxJobsTransferred - jobs being processed on frontend (PREPARING, FINISHING states) * maxJobsTransferredAdditional - additional reserved number of jobs being processed on frontend * maxFilesTransferred - number of files being transferred simultaneously by jobs in PREPARING and FINISHING states. Value is per job. * maxLoadShare - sharing mechanism for data transfer - the maximum number of processes that can run per transfer share * loadShareType - sharing mechanism for data transfer - the scheme used to assign jobs to transfer shares. Possible values are "dn", "voms:vo", "voms:role" and "voms:group" * shareLimit - specifies a transfer share that has a limit different from the default value in maxLoadShare * name - the name of the share. Examples for different sharing mechanisms: - dn : /O=Grid/O=NorduGrid/OU=domainname.com/CN=Jane Doe - voms:vo : voname - voms:role : voname:rolename - voms:group : /voname/groupname * limit - the maximum number of processes that can run for this particular share * wakeupPeriod - specifies how often cheks for new jobs arrived, job state change requests, etc are done. That is resposivity of the service. The period is in seconds. Default is 3 minutes. Usually this element is not needed. Parameters related to cache functionality. Multiple caches may be specified. Cached data will be distributed evenly over the caches. If none such element is present caching is disabled. * location - path to a directory to store cached data. Multiple cache directories may be specified by specifying multiple location elements. Cached data will be distributed evenly over the caches. * remotelocation - path to a cache which is managed by another grid-manager. * link - optional path at which the location is accessible on computing nodes, if it is different from the path on the service host. * highWatermark, lowWatermark - specify high and low watermarks for space used by cache, as a percentage of the space on the file system on which the cache directory is located. When the max is exceeded, files will be deleted to bring the used space down to the min level. It is a good idea to have the cache on its own separate file system. To turn off this feature those elements must be absent. * cacheLogFile - the file where messages from cache cleaning are logged. * cacheLogLevel - the log level used by the cache cleaning script. * cacheLifetime - the lifetime of cache files Parameters for new data staging framework: * maxDelivery: maximum number of files in physical transfer * maxProcessor: maximum number of files in each pre or post transfer stage * maxEmergency: maximum number of files which can use emergency slots when regular slots are full * maxPrepared: maximum number of files in prepared state * shareType: transfer shares type * definedShare: share with a defined priority * name: share name * priority: share priority * deliveryService: remote data delivery service endpoint * localDelivery: whether to use local delivery as well as remote * remoteSizeLimit: Lower limit on file size (in bytes) under which transfers always use local delivery * useHostCert: whether to use host certificate for communication with remote delivery services * dtrLog: path to location where DTR state is periodically dumped Specifies how service prepares its control and session directories at startup. yes - directories are created and they ownership and permissions adjusted missing - directories are created and only for those which are created ownership and permission are adjusted no - nothing is created and adjusted This optional parameter can be used to enable publishing of additional information to ISIS. The default is not to publish ("no"). The information, which is considered in some degree to be static, includes HealthState, OSFamily, Platform, PhysicalCPUs, CPUMultiplicity, CPUModel and ApplicationEnvironment. This optional parameter can be used to disable ARC (BES based) job management interface. By default it is enabled. This optional parameter can be used to enable EMI ES job management interface. By default it is disabled. CommonName attribute of bes-factory. LongDescription attribute of bes-factory. Name of Local Resource Management System. Name of Operating System. The values are based on the OSType field of the CIM_OperatingSystem model: http://www.dmtf.org/standards/cim/cim_schema_v29 Some examples of valid choices: LINUX, MACOS, Solaris, Windows 2000 The GLUE2 infoprovider wake up period time in second The information interface (LIDI) max number of simultaneous clients. Default is 10. The max number of simultaneous clients performing job management operations (extended BES). Default is 100. The max number of simultaneous clients performing HTTP PUT and GET operations. Default is 100. Options for the A-REX information provider nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/delegation0000644000000000000000000000013214152153473023026 xustar000000000000000030 mtime=1638455099.455563277 30 atime=1638455103.998631539 30 ctime=1638455099.455563277 nordugrid-arc-6.14.0/src/services/a-rex/delegation/0000755000175000002070000000000014152153473023070 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/a-rex/delegation/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376025141 xustar000000000000000030 mtime=1638455038.407646004 30 atime=1638455038.500647402 30 ctime=1638455099.445563127 nordugrid-arc-6.14.0/src/services/a-rex/delegation/Makefile.am0000644000175000002070000000104214152153376025123 0ustar00mockbuildmock00000000000000noinst_LTLIBRARIES = libdelegation.la libdelegation_la_SOURCES = \ uid.cpp FileRecord.cpp FileRecordBDB.cpp FileRecordSQLite.cpp DelegationStore.cpp DelegationStores.cpp \ uid.h FileRecord.h FileRecordBDB.h FileRecordSQLite.h DelegationStore.h DelegationStores.h \ ../SQLhelpers.h libdelegation_la_CXXFLAGS = -I$(top_srcdir)/include \ $(LIBXML2_CFLAGS) $(GLIBMM_CFLAGS) $(DBCXX_CPPFLAGS) $(SQLITE_CFLAGS) $(AM_CXXFLAGS) libdelegation_la_LIBADD = $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(DBCXX_LIBS) $(SQLITE_LIBS) nordugrid-arc-6.14.0/src/services/a-rex/delegation/PaxHeaders.30264/FileRecordBDB.cpp0000644000000000000000000000013214152153376026137 xustar000000000000000030 mtime=1638455038.407646004 30 atime=1638455038.500647402 30 ctime=1638455099.448563172 nordugrid-arc-6.14.0/src/services/a-rex/delegation/FileRecordBDB.cpp0000644000175000002070000004673314152153376026141 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include "uid.h" #include "FileRecordBDB.h" namespace ARex { #define FR_DB_NAME "list" static void db_env_clean(const std::string& base) { try { Glib::Dir dir(base); std::string name; while ((name = dir.read_name()) != "") { std::string fullpath(base); fullpath += G_DIR_SEPARATOR_S + name; struct stat st; if (::lstat(fullpath.c_str(), &st) == 0) { if(!S_ISDIR(st.st_mode)) { if(name != FR_DB_NAME) { Arc::FileDelete(fullpath.c_str()); }; }; }; }; } catch(Glib::FileError& e) { }; } bool FileRecordBDB::dberr(const char* s, int err) { if(err == 0) return true; error_num_ = err; error_str_ = std::string(s)+": "+DbEnv::strerror(err); return false; } FileRecordBDB::FileRecordBDB(const std::string& base, bool create): FileRecord(base, create), db_rec_(NULL), db_lock_(NULL), db_locked_(NULL), db_link_(NULL) { valid_ = open(create); } bool FileRecordBDB::verify(void) { // Performing various kinds of verifications std::string dbpath = basepath_ + G_DIR_SEPARATOR_S + FR_DB_NAME; { Db db_test(NULL,DB_CXX_NO_EXCEPTIONS); if(!dberr("Error verifying databases", db_test.verify(dbpath.c_str(),NULL,NULL,DB_NOORDERCHK))) { if(error_num_ != ENOENT) return false; }; }; { Db db_test(NULL,DB_CXX_NO_EXCEPTIONS); if(!dberr("Error verifying database 'meta'", db_test.verify(dbpath.c_str(),"meta",NULL,DB_ORDERCHKONLY))) { if(error_num_ != ENOENT) return false; }; }; // Skip 'link' - it is not of btree kind // Skip 'lock' - for unknown reason it returns DB_NOTFOUND // Skip 'locked' - for unknown reason it returns DB_NOTFOUND return true; } FileRecordBDB::~FileRecordBDB(void) { close(); } bool FileRecordBDB::open(bool create) { int oflags = 0; int eflags = DB_INIT_CDB | DB_INIT_MPOOL; if(create) { oflags |= DB_CREATE; eflags |= DB_CREATE; }; int mode = S_IRUSR|S_IWUSR; db_env_ = new DbEnv(DB_CXX_NO_EXCEPTIONS); if(!dberr("Error setting database environment flags", db_env_->set_flags(DB_CDB_ALLDB,1))) { delete db_env_; db_env_ = NULL; return false; } // If process is master (create = true) we should check by caling // failchk() and discard environment in case something is wrong. // But sicne we are allowed to discard environment we can do // that just in case. if(create) { db_env_clean(basepath_); }; if(!dberr("Error opening database environment", db_env_->open(basepath_.c_str(),eflags,mode))) { delete db_env_; db_env_ = NULL; return false; }; std::string dbpath = FR_DB_NAME; if(create) { // If creation is allowed that means we are master and can try verifying if(!verify()) return false; }; // db_link // |---db_lock // \---db_locked db_rec_ = new Db(db_env_,DB_CXX_NO_EXCEPTIONS); db_lock_ = new Db(db_env_,DB_CXX_NO_EXCEPTIONS); db_locked_ = new Db(db_env_,DB_CXX_NO_EXCEPTIONS); db_link_ = new Db(db_env_,DB_CXX_NO_EXCEPTIONS); if(!dberr("Error setting flag DB_DUPSORT",db_lock_->set_flags(DB_DUPSORT))) return false; if(!dberr("Error setting flag DB_DUPSORT",db_locked_->set_flags(DB_DUPSORT))) return false; if(!dberr("Error associating databases",db_link_->associate(NULL,db_lock_,&lock_callback,0))) return false; if(!dberr("Error associating databases",db_link_->associate(NULL,db_locked_,&locked_callback,0))) return false; if(!dberr("Error opening database 'meta'", db_rec_->open(NULL,dbpath.c_str(), "meta", DB_BTREE,oflags,mode))) return false; if(!dberr("Error opening database 'link'", db_link_->open(NULL,dbpath.c_str(), "link", DB_RECNO,oflags,mode))) return false; if(!dberr("Error opening database 'lock'", db_lock_->open(NULL,dbpath.c_str(), "lock", DB_BTREE,oflags,mode))) return false; if(!dberr("Error opening database 'locked'", db_locked_->open(NULL,dbpath.c_str(),"locked",DB_BTREE,oflags,mode))) return false; return true; } void FileRecordBDB::close(void) { valid_ = false; if(db_locked_) db_locked_->close(0); if(db_lock_) db_lock_->close(0); if(db_link_) db_link_->close(0); if(db_rec_) db_rec_->close(0); if(db_env_) db_env_->close(0); delete db_locked_; db_locked_ = NULL; delete db_lock_; db_lock_ = NULL; delete db_link_; db_link_ = NULL; delete db_env_; db_env_ = NULL; } static void* store_string(const std::string& str, void* buf) { uint32_t l = str.length(); unsigned char* p = (unsigned char*)buf; *p = (unsigned char)l; l >>= 8; ++p; *p = (unsigned char)l; l >>= 8; ++p; *p = (unsigned char)l; l >>= 8; ++p; *p = (unsigned char)l; l >>= 8; ++p; ::memcpy(p,str.c_str(),str.length()); p += str.length(); return (void*)p; } static void* parse_string(std::string& str, const void* buf, uint32_t& size) { uint32_t l = 0; const unsigned char* p = (unsigned char*)buf; if(size < 4) { p += size; size = 0; return (void*)p; }; l |= ((uint32_t)(*p)) << 0; ++p; --size; l |= ((uint32_t)(*p)) << 8; ++p; --size; l |= ((uint32_t)(*p)) << 16; ++p; --size; l |= ((uint32_t)(*p)) << 24; ++p; --size; if(l > size) l = size; // TODO: sanity check str.assign((const char*)p,l); p += l; size -= l; return (void*)p; } static void make_string(const std::string& str, Dbt& rec) { rec.set_data(NULL); rec.set_size(0); uint32_t l = 4 + str.length(); void* d = (void*)::malloc(l); if(!d) return; rec.set_data(d); rec.set_size(l); d = store_string(str,d); } static void make_link(const std::string& lock_id,const std::string& id, const std::string& owner, Dbt& rec) { rec.set_data(NULL); rec.set_size(0); uint32_t l = 4 + lock_id.length() + 4 + id.length() + 4 + owner.length(); void* d = (void*)::malloc(l); if(!d) return; rec.set_data(d); rec.set_size(l); d = store_string(lock_id,d); d = store_string(id,d); d = store_string(owner,d); } static void make_key(const std::string& id, const std::string& owner, Dbt& key) { key.set_data(NULL); key.set_size(0); uint32_t l = 4 + id.length() + 4 + owner.length(); void* d = (void*)::malloc(l); if(!d) return; key.set_data(d); key.set_size(l); d = store_string(id,d); d = store_string(owner,d); } static void make_record(const std::string& uid, const std::string& id, const std::string& owner, const std::list& meta, Dbt& key, Dbt& data) { key.set_data(NULL); key.set_size(0); data.set_data(NULL); data.set_size(0); uint32_t l = 4 + uid.length(); for(std::list::const_iterator m = meta.begin(); m != meta.end(); ++m) { l += 4 + m->length(); }; make_key(id,owner,key); void* d = (void*)::malloc(l); if(!d) { ::free(key.get_data()); key.set_data(NULL); key.set_size(0); return; }; data.set_data(d); data.set_size(l); d = store_string(uid,d); for(std::list::const_iterator m = meta.begin(); m != meta.end(); ++m) { d = store_string(*m,d); }; } static void parse_record(std::string& uid, std::string& id, std::string& owner, std::list& meta, const Dbt& key, const Dbt& data) { uint32_t size = 0; void* d = NULL; d = (void*)key.get_data(); size = (uint32_t)key.get_size(); d = parse_string(id,d,size); d = parse_string(owner,d,size); d = (void*)data.get_data(); size = (uint32_t)data.get_size(); d = parse_string(uid,d,size); for(;size;) { std::string s; d = parse_string(s,d,size); meta.push_back(s); }; } int FileRecordBDB::locked_callback(Db * secondary, const Dbt * key, const Dbt * data, Dbt * result) { const void* p = data->get_data(); uint32_t size = data->get_size(); std::string str; p = parse_string(str,p,size); result->set_data((void*)p); result->set_size(size); return 0; } bool FileRecordBDB::Recover(void) { Glib::Mutex::Lock lock(lock_); // Real recovery not implemented yet. close(); error_num_ = -1; error_str_ = "Recovery not implemented yet."; return false; } int FileRecordBDB::lock_callback(Db * secondary, const Dbt * key, const Dbt * data, Dbt * result) { const void* p = data->get_data(); uint32_t size = data->get_size(); uint32_t rest = size; std::string str; parse_string(str,p,rest); result->set_data((void*)p); result->set_size(size-rest); return 0; } std::string FileRecordBDB::Add(std::string& id, const std::string& owner, const std::list& meta) { if(!valid_) return ""; int uidtries = 10; // some sane number std::string uid; while(true) { if(!(uidtries--)) return ""; Glib::Mutex::Lock lock(lock_); Dbt key; Dbt data; uid = rand_uid64().substr(4); make_record(uid,(id.empty())?uid:id,owner,meta,key,data); void* pkey = key.get_data(); void* pdata = data.get_data(); int dbres = db_rec_->put(NULL,&key,&data,DB_NOOVERWRITE); if(dbres == DB_KEYEXIST) { ::free(pkey); ::free(pdata); uid.resize(0); continue; }; if(!dberr("Failed to add record to database",dbres)) { ::free(pkey); ::free(pdata); return ""; }; db_rec_->sync(0); ::free(pkey); ::free(pdata); break; }; if(id.empty()) id = uid; make_file(uid); return uid_to_path(uid); } bool FileRecordBDB::Add(const std::string& uid, const std::string& id, const std::string& owner, const std::list& meta) { if(!valid_) return false; Glib::Mutex::Lock lock(lock_); Dbt key; Dbt data; make_record(uid,(id.empty())?uid:id,owner,meta,key,data); void* pkey = key.get_data(); void* pdata = data.get_data(); int dbres = db_rec_->put(NULL,&key,&data,DB_NOOVERWRITE); if(!dberr("Failed to add record to database",dbres)) { ::free(pkey); ::free(pdata); return false; }; db_rec_->sync(0); ::free(pkey); ::free(pdata); return true; } std::string FileRecordBDB::Find(const std::string& id, const std::string& owner, std::list& meta) { if(!valid_) return ""; Glib::Mutex::Lock lock(lock_); Dbt key; Dbt data; make_key(id,owner,key); void* pkey = key.get_data(); if(!dberr("Failed to retrieve record from database",db_rec_->get(NULL,&key,&data,0))) { ::free(pkey); return ""; }; std::string uid; std::string id_tmp; std::string owner_tmp; parse_record(uid,id_tmp,owner_tmp,meta,key,data); ::free(pkey); return uid_to_path(uid); } bool FileRecordBDB::Modify(const std::string& id, const std::string& owner, const std::list& meta) { if(!valid_) return false; Glib::Mutex::Lock lock(lock_); Dbt key; Dbt data; make_key(id,owner,key); void* pkey = key.get_data(); if(!dberr("Failed to retrieve record from database",db_rec_->get(NULL,&key,&data,0))) { ::free(pkey); return false; }; std::string uid; std::string id_tmp; std::string owner_tmp; std::list meta_tmp; parse_record(uid,id_tmp,owner_tmp,meta_tmp,key,data); ::free(pkey); make_record(uid,id,owner,meta,key,data); if(!dberr("Failed to store record to database",db_rec_->put(NULL,&key,&data,0))) { ::free(key.get_data()); ::free(data.get_data()); return false; }; db_rec_->sync(0); ::free(key.get_data()); ::free(data.get_data()); return true; } bool FileRecordBDB::Remove(const std::string& id, const std::string& owner) { if(!valid_) return false; Glib::Mutex::Lock lock(lock_); Dbt key; Dbt data; make_key(id,owner,key); void* pkey = key.get_data(); if(dberr("",db_locked_->get(NULL,&key,&data,0))) { ::free(pkey); error_str_ = "Record has active locks"; return false; // have locks }; if(!dberr("Failed to retrieve record from database",db_rec_->get(NULL,&key,&data,0))) { ::free(pkey); return false; // No such record? }; std::string uid; std::string id_tmp; std::string owner_tmp; std::list meta; parse_record(uid,id_tmp,owner_tmp,meta,key,data); if(!dberr("Failed to delete record from database",db_rec_->del(NULL,&key,0))) { // TODO: handle error ::free(pkey); return false; }; db_rec_->sync(0); ::free(pkey); remove_file(uid); return true; } bool FileRecordBDB::AddLock(const std::string& lock_id, const std::list& ids, const std::string& owner) { if(!valid_) return false; Glib::Mutex::Lock lock(lock_); Dbt key; Dbt data; for(std::list::const_iterator id = ids.begin(); id != ids.end(); ++id) { make_link(lock_id,*id,owner,data); void* pdata = data.get_data(); if(!dberr("addlock:put",db_link_->put(NULL,&key,&data,DB_APPEND))) { ::free(pdata); return false; }; ::free(pdata); }; db_link_->sync(0); return true; } bool FileRecordBDB::RemoveLock(const std::string& lock_id) { std::list > ids; return RemoveLock(lock_id,ids); } bool FileRecordBDB::RemoveLock(const std::string& lock_id, std::list >& ids) { if(!valid_) return false; Glib::Mutex::Lock lock(lock_); Dbc* cur = NULL; if(!dberr("removelock:cursor",db_lock_->cursor(NULL,&cur,DB_WRITECURSOR))) return false; Dbt key; Dbt data; make_string(lock_id,key); void* pkey = key.get_data(); if(!dberr("removelock:get1",cur->get(&key,&data,DB_SET))) { // TODO: handle errors ::free(pkey); cur->close(); return false; }; for(;;) { std::string id; std::string owner; uint32_t size = data.get_size(); void* buf = data.get_data(); buf = parse_string(id,buf,size); // lock_id - skip buf = parse_string(id,buf,size); buf = parse_string(owner,buf,size); ids.push_back(std::pair(id,owner)); if(!dberr("removelock:del",cur->del(0))) { ::free(pkey); cur->close(); return false; }; if(!dberr("removelock:get2",cur->get(&key,&data,DB_NEXT_DUP))) break; }; db_lock_->sync(0); ::free(pkey); cur->close(); return true; } bool FileRecordBDB::ListLocked(const std::string& lock_id, std::list >& ids) { if(!valid_) return false; Glib::Mutex::Lock lock(lock_); Dbc* cur = NULL; if(!dberr("listlocked:cursor",db_lock_->cursor(NULL,&cur,0))) return false; Dbt key; Dbt data; make_string(lock_id,key); void* pkey = key.get_data(); if(!dberr("listlocked:get1",cur->get(&key,&data,DB_SET))) { // TODO: handle errors ::free(pkey); cur->close(); return false; }; for(;;) { std::string id; std::string owner; uint32_t size = data.get_size(); void* buf = data.get_data(); buf = parse_string(id,buf,size); // lock_id - skip buf = parse_string(id,buf,size); buf = parse_string(owner,buf,size); ids.push_back(std::pair(id,owner)); if(cur->get(&key,&data,DB_NEXT_DUP) != 0) break; }; ::free(pkey); cur->close(); return true; } bool FileRecordBDB::ListLocks(std::list& locks) { if(!valid_) return false; Glib::Mutex::Lock lock(lock_); Dbc* cur = NULL; if(db_lock_->cursor(NULL,&cur,0)) return false; for(;;) { Dbt key; Dbt data; if(cur->get(&key,&data,DB_NEXT_NODUP) != 0) break; // TODO: handle errors std::string str; uint32_t size = key.get_size(); parse_string(str,key.get_data(),size); locks.push_back(str); }; cur->close(); return true; } bool FileRecordBDB::ListLocks(const std::string& id, const std::string& owner, std::list& locks) { if(!valid_) return false; Glib::Mutex::Lock lock(lock_); Dbc* cur = NULL; if(db_lock_->cursor(NULL,&cur,0)) return false; for(;;) { Dbt key; Dbt data; if(cur->get(&key,&data,DB_NEXT_NODUP) != 0) break; // TODO: handle errors std::string str; uint32_t size = key.get_size(); parse_string(str,key.get_data(),size); { std::string id_tmp; std::string owner_tmp; uint32_t size = data.get_size(); void* buf = data.get_data(); buf = parse_string(id_tmp,buf,size); // lock_id - skip buf = parse_string(id_tmp,buf,size); buf = parse_string(owner_tmp,buf,size); if((id_tmp != id) || (owner_tmp != owner)) continue; }; locks.push_back(str); }; cur->close(); return true; } FileRecordBDB::Iterator::Iterator(FileRecordBDB& frec):FileRecord::Iterator(frec),cur_(NULL) { Glib::Mutex::Lock lock(frec.lock_); if(!frec.dberr("Iterator:cursor",frec.db_rec_->cursor(NULL,&cur_,0))) { if(cur_) { cur_->close(); cur_=NULL; }; return; }; Dbt key; Dbt data; if(!frec.dberr("Iterator:first",cur_->get(&key,&data,DB_FIRST))) { cur_->close(); cur_=NULL; return; }; parse_record(uid_,id_,owner_,meta_,key,data); } FileRecordBDB::Iterator::~Iterator(void) { FileRecordBDB& frec((FileRecordBDB&)frec_); Glib::Mutex::Lock lock(frec.lock_); if(cur_) { cur_->close(); cur_=NULL; }; } FileRecordBDB::Iterator& FileRecordBDB::Iterator::operator++(void) { if(!cur_) return *this; FileRecordBDB& frec((FileRecordBDB&)frec_); Glib::Mutex::Lock lock(frec.lock_); Dbt key; Dbt data; if(!frec.dberr("Iterator:first",cur_->get(&key,&data,DB_NEXT))) { cur_->close(); cur_=NULL; return *this; }; parse_record(uid_,id_,owner_,meta_,key,data); return *this; } FileRecordBDB::Iterator& FileRecordBDB::Iterator::operator--(void) { if(!cur_) return *this; FileRecordBDB& frec((FileRecordBDB&)frec_); Glib::Mutex::Lock lock(frec.lock_); Dbt key; Dbt data; if(!frec.dberr("Iterator:first",cur_->get(&key,&data,DB_PREV))) { cur_->close(); cur_=NULL; return *this; }; parse_record(uid_,id_,owner_,meta_,key,data); return *this; } void FileRecordBDB::Iterator::suspend(void) { FileRecordBDB& frec((FileRecordBDB&)frec_); Glib::Mutex::Lock lock(frec.lock_); if(cur_) { cur_->close(); cur_=NULL; } } bool FileRecordBDB::Iterator::resume(void) { FileRecordBDB& frec((FileRecordBDB&)frec_); Glib::Mutex::Lock lock(frec.lock_); if(!cur_) { if(id_.empty()) return false; if(!frec.dberr("Iterator:cursor",frec.db_rec_->cursor(NULL,&cur_,0))) { if(cur_) { cur_->close(); cur_=NULL; }; return false; }; Dbt key; Dbt data; make_key(id_,owner_,key); void* pkey = key.get_data(); if(!frec.dberr("Iterator:first",cur_->get(&key,&data,DB_SET))) { ::free(pkey); cur_->close(); cur_=NULL; return false; }; parse_record(uid_,id_,owner_,meta_,key,data); ::free(pkey); }; return true; } } // namespace ARex nordugrid-arc-6.14.0/src/services/a-rex/delegation/PaxHeaders.30264/FileRecord.cpp0000644000000000000000000000013214152153376025627 xustar000000000000000030 mtime=1638455038.407646004 30 atime=1638455038.500647402 30 ctime=1638455099.447563157 nordugrid-arc-6.14.0/src/services/a-rex/delegation/FileRecord.cpp0000644000175000002070000000246714152153376025625 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include "FileRecord.h" namespace ARex { std::string FileRecord::uid_to_path(const std::string& uid) { std::string path = basepath_; std::string::size_type p = 0; for(;uid.length() > (p+4);) { path = path + G_DIR_SEPARATOR_S + uid.substr(p,3); p += 3; }; return path + G_DIR_SEPARATOR_S + uid.substr(p); } bool FileRecord::make_file(const std::string& uid) { std::string path = uid_to_path(uid); std::string::size_type p = path.rfind(G_DIR_SEPARATOR_S); if((p != std::string::npos) && (p != 0)) { (void)Arc::DirCreate(path.substr(0,p),0,0,S_IXUSR|S_IRUSR|S_IWUSR,true); } return Arc::FileCreate(uid_to_path(uid),"",0,0,S_IRUSR|S_IWUSR); } bool FileRecord::remove_file(const std::string& uid) { std::string path = uid_to_path(uid); if(Arc::FileDelete(path)) { while(true) { std::string::size_type p = path.rfind(G_DIR_SEPARATOR_S); if((p == std::string::npos) || (p == 0)) break; if(p <= basepath_.length()) break; path.resize(p); if(!Arc::DirDelete(path,false)) break; }; return true; }; return false; } } // namespace ARex nordugrid-arc-6.14.0/src/services/a-rex/delegation/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153432025143 xustar000000000000000030 mtime=1638455066.424066964 30 atime=1638455089.707416807 30 ctime=1638455099.444563112 nordugrid-arc-6.14.0/src/services/a-rex/delegation/Makefile.in0000644000175000002070000010204714152153432025134 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/a-rex/delegation DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(top_srcdir)/depcomp ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = LTLIBRARIES = $(noinst_LTLIBRARIES) am__DEPENDENCIES_1 = libdelegation_la_DEPENDENCIES = \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) am_libdelegation_la_OBJECTS = libdelegation_la-uid.lo \ libdelegation_la-FileRecord.lo \ libdelegation_la-FileRecordBDB.lo \ libdelegation_la-FileRecordSQLite.lo \ libdelegation_la-DelegationStore.lo \ libdelegation_la-DelegationStores.lo libdelegation_la_OBJECTS = $(am_libdelegation_la_OBJECTS) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = libdelegation_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(libdelegation_la_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) \ $(LDFLAGS) -o $@ AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(libdelegation_la_SOURCES) DIST_SOURCES = $(libdelegation_la_SOURCES) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ noinst_LTLIBRARIES = libdelegation.la libdelegation_la_SOURCES = \ uid.cpp FileRecord.cpp FileRecordBDB.cpp FileRecordSQLite.cpp DelegationStore.cpp DelegationStores.cpp \ uid.h FileRecord.h FileRecordBDB.h FileRecordSQLite.h DelegationStore.h DelegationStores.h \ ../SQLhelpers.h libdelegation_la_CXXFLAGS = -I$(top_srcdir)/include \ $(LIBXML2_CFLAGS) $(GLIBMM_CFLAGS) $(DBCXX_CPPFLAGS) $(SQLITE_CFLAGS) $(AM_CXXFLAGS) libdelegation_la_LIBADD = $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(DBCXX_LIBS) $(SQLITE_LIBS) all: all-am .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/a-rex/delegation/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/a-rex/delegation/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): clean-noinstLTLIBRARIES: -test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES) @list='$(noinst_LTLIBRARIES)'; \ locs=`for p in $$list; do echo $$p; done | \ sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ sort -u`; \ test -z "$$locs" || { \ echo rm -f $${locs}; \ rm -f $${locs}; \ } libdelegation.la: $(libdelegation_la_OBJECTS) $(libdelegation_la_DEPENDENCIES) $(EXTRA_libdelegation_la_DEPENDENCIES) $(AM_V_CXXLD)$(libdelegation_la_LINK) $(libdelegation_la_OBJECTS) $(libdelegation_la_LIBADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libdelegation_la-DelegationStore.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libdelegation_la-DelegationStores.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libdelegation_la-FileRecord.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libdelegation_la-FileRecordBDB.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libdelegation_la-FileRecordSQLite.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libdelegation_la-uid.Plo@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< libdelegation_la-uid.lo: uid.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libdelegation_la_CXXFLAGS) $(CXXFLAGS) -MT libdelegation_la-uid.lo -MD -MP -MF $(DEPDIR)/libdelegation_la-uid.Tpo -c -o libdelegation_la-uid.lo `test -f 'uid.cpp' || echo '$(srcdir)/'`uid.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libdelegation_la-uid.Tpo $(DEPDIR)/libdelegation_la-uid.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='uid.cpp' object='libdelegation_la-uid.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libdelegation_la_CXXFLAGS) $(CXXFLAGS) -c -o libdelegation_la-uid.lo `test -f 'uid.cpp' || echo '$(srcdir)/'`uid.cpp libdelegation_la-FileRecord.lo: FileRecord.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libdelegation_la_CXXFLAGS) $(CXXFLAGS) -MT libdelegation_la-FileRecord.lo -MD -MP -MF $(DEPDIR)/libdelegation_la-FileRecord.Tpo -c -o libdelegation_la-FileRecord.lo `test -f 'FileRecord.cpp' || echo '$(srcdir)/'`FileRecord.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libdelegation_la-FileRecord.Tpo $(DEPDIR)/libdelegation_la-FileRecord.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='FileRecord.cpp' object='libdelegation_la-FileRecord.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libdelegation_la_CXXFLAGS) $(CXXFLAGS) -c -o libdelegation_la-FileRecord.lo `test -f 'FileRecord.cpp' || echo '$(srcdir)/'`FileRecord.cpp libdelegation_la-FileRecordBDB.lo: FileRecordBDB.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libdelegation_la_CXXFLAGS) $(CXXFLAGS) -MT libdelegation_la-FileRecordBDB.lo -MD -MP -MF $(DEPDIR)/libdelegation_la-FileRecordBDB.Tpo -c -o libdelegation_la-FileRecordBDB.lo `test -f 'FileRecordBDB.cpp' || echo '$(srcdir)/'`FileRecordBDB.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libdelegation_la-FileRecordBDB.Tpo $(DEPDIR)/libdelegation_la-FileRecordBDB.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='FileRecordBDB.cpp' object='libdelegation_la-FileRecordBDB.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libdelegation_la_CXXFLAGS) $(CXXFLAGS) -c -o libdelegation_la-FileRecordBDB.lo `test -f 'FileRecordBDB.cpp' || echo '$(srcdir)/'`FileRecordBDB.cpp libdelegation_la-FileRecordSQLite.lo: FileRecordSQLite.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libdelegation_la_CXXFLAGS) $(CXXFLAGS) -MT libdelegation_la-FileRecordSQLite.lo -MD -MP -MF $(DEPDIR)/libdelegation_la-FileRecordSQLite.Tpo -c -o libdelegation_la-FileRecordSQLite.lo `test -f 'FileRecordSQLite.cpp' || echo '$(srcdir)/'`FileRecordSQLite.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libdelegation_la-FileRecordSQLite.Tpo $(DEPDIR)/libdelegation_la-FileRecordSQLite.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='FileRecordSQLite.cpp' object='libdelegation_la-FileRecordSQLite.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libdelegation_la_CXXFLAGS) $(CXXFLAGS) -c -o libdelegation_la-FileRecordSQLite.lo `test -f 'FileRecordSQLite.cpp' || echo '$(srcdir)/'`FileRecordSQLite.cpp libdelegation_la-DelegationStore.lo: DelegationStore.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libdelegation_la_CXXFLAGS) $(CXXFLAGS) -MT libdelegation_la-DelegationStore.lo -MD -MP -MF $(DEPDIR)/libdelegation_la-DelegationStore.Tpo -c -o libdelegation_la-DelegationStore.lo `test -f 'DelegationStore.cpp' || echo '$(srcdir)/'`DelegationStore.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libdelegation_la-DelegationStore.Tpo $(DEPDIR)/libdelegation_la-DelegationStore.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='DelegationStore.cpp' object='libdelegation_la-DelegationStore.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libdelegation_la_CXXFLAGS) $(CXXFLAGS) -c -o libdelegation_la-DelegationStore.lo `test -f 'DelegationStore.cpp' || echo '$(srcdir)/'`DelegationStore.cpp libdelegation_la-DelegationStores.lo: DelegationStores.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libdelegation_la_CXXFLAGS) $(CXXFLAGS) -MT libdelegation_la-DelegationStores.lo -MD -MP -MF $(DEPDIR)/libdelegation_la-DelegationStores.Tpo -c -o libdelegation_la-DelegationStores.lo `test -f 'DelegationStores.cpp' || echo '$(srcdir)/'`DelegationStores.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libdelegation_la-DelegationStores.Tpo $(DEPDIR)/libdelegation_la-DelegationStores.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='DelegationStores.cpp' object='libdelegation_la-DelegationStores.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libdelegation_la_CXXFLAGS) $(CXXFLAGS) -c -o libdelegation_la-DelegationStores.lo `test -f 'DelegationStores.cpp' || echo '$(srcdir)/'`DelegationStores.cpp mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(LTLIBRARIES) installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool clean-noinstLTLIBRARIES \ mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \ clean-libtool clean-noinstLTLIBRARIES cscopelist-am ctags \ ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags tags-am uninstall uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/a-rex/delegation/PaxHeaders.30264/uid.cpp0000644000000000000000000000013214152153376024372 xustar000000000000000030 mtime=1638455038.407646004 30 atime=1638455038.500647402 30 ctime=1638455099.446563142 nordugrid-arc-6.14.0/src/services/a-rex/delegation/uid.cpp0000644000175000002070000000110314152153376024352 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #ifdef HAVE_STDINT_H #include #endif #include #include "uid.h" namespace ARex { std::string rand_uid64(void) { static unsigned int cnt; struct timeval t; gettimeofday(&t,NULL); uint64_t id = (((uint64_t)((cnt++) & 0xffff)) << 48) | (((uint64_t)(t.tv_sec & 0xffff)) << 32) | (((uint64_t)(t.tv_usec & 0xffff)) << 16) | (((uint64_t)(rand() & 0xffff)) << 0); return Arc::inttostr(id,16,16); } } // namespace ARex nordugrid-arc-6.14.0/src/services/a-rex/delegation/PaxHeaders.30264/FileRecordSQLite.h0000644000000000000000000000013214152153376026356 xustar000000000000000030 mtime=1638455038.407646004 30 atime=1638455038.500647402 30 ctime=1638455099.454563262 nordugrid-arc-6.14.0/src/services/a-rex/delegation/FileRecordSQLite.h0000644000175000002070000000552014152153376026345 0ustar00mockbuildmock00000000000000#ifndef __ARC_DELEGATION_FILERECORDSQLITE_H__ #define __ARC_DELEGATION_FILERECORDSQLITE_H__ #include #include #include #include #include "FileRecord.h" namespace ARex { class FileRecordSQLite: public FileRecord { private: Glib::Mutex lock_; // TODO: use DB locking sqlite3* db_; int sqlite3_exec_nobusy(const char *sql, int (*callback)(void*,int,char**,char**), void *arg, char **errmsg); bool dberr(const char* s, int err); bool open(bool create); void close(void); bool verify(void); public: class Iterator: public FileRecord::Iterator { friend class FileRecordSQLite; private: Iterator(const Iterator&); // disabled constructor Iterator(FileRecordSQLite& frec); sqlite3_int64 rowid_; public: ~Iterator(void); virtual Iterator& operator++(void); virtual Iterator& operator--(void); virtual void suspend(void); virtual bool resume(void); virtual operator bool(void) { return (rowid_ != -1); }; virtual bool operator!(void) { return (rowid_ == -1); }; }; friend class FileRecordSQLite::Iterator; FileRecordSQLite(const std::string& base, bool create = true); virtual ~FileRecordSQLite(void); virtual Iterator* NewIterator(void) { return new Iterator(*this); }; virtual bool Recover(void); virtual std::string Add(std::string& id, const std::string& owner, const std::list& meta); virtual bool Add(const std::string& uid, const std::string& id, const std::string& owner, const std::list& meta); virtual std::string Find(const std::string& id, const std::string& owner, std::list& meta); virtual bool Modify(const std::string& id, const std::string& owner, const std::list& meta); virtual bool Remove(const std::string& id, const std::string& owner); // Assign specified credential ids specified lock lock_id virtual bool AddLock(const std::string& lock_id, const std::list& ids, const std::string& owner); // Reomove lock lock_id from all associated credentials virtual bool RemoveLock(const std::string& lock_id); // Reomove lock lock_id from all associated credentials and store // identifiers of associated credentials into ids virtual bool RemoveLock(const std::string& lock_id, std::list >& ids); // Fills locks with all known lock ids. virtual bool ListLocks(std::list& locks); // Fills locks with all lock ids associated with specified credential id virtual bool ListLocks(const std::string& id, const std::string& owner, std::list& locks); // Fills ids with identifiers of credentials locked by specified lock_id lock virtual bool ListLocked(const std::string& lock_id, std::list >& ids); }; } // namespace ARex #endif // __ARC_DELEGATION_FiLERECORDSQLITE_H__ nordugrid-arc-6.14.0/src/services/a-rex/delegation/PaxHeaders.30264/DelegationStores.cpp0000644000000000000000000000013214152153376027064 xustar000000000000000030 mtime=1638455038.407646004 30 atime=1638455038.500647402 30 ctime=1638455099.450563202 nordugrid-arc-6.14.0/src/services/a-rex/delegation/DelegationStores.cpp0000644000175000002070000000324214152153376027052 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include "DelegationStore.h" #include "DelegationStores.h" namespace ARex { DelegationStores::DelegationStores(DelegationStore::DbType db_type):db_type_(db_type) { } DelegationStores::~DelegationStores(void) { Glib::Mutex::Lock lock(lock_); for(std::map::iterator i = stores_.begin(); i != stores_.end(); ++i) { delete i->second; } } DelegationStore& DelegationStores::operator[](const std::string& path) { Glib::Mutex::Lock lock(lock_); std::map::iterator i = stores_.find(path); if(i != stores_.end()) return *(i->second); DelegationStore* store = new DelegationStore(path,db_type_); stores_.insert(std::pair(path,store)); return *store; } bool DelegationStores::MatchNamespace(const Arc::SOAPEnvelope& in) { return Arc::DelegationContainerSOAP().MatchNamespace(in); } bool DelegationStores::Process(const std::string& path,const Arc::SOAPEnvelope& in,Arc::SOAPEnvelope& out,const std::string& client,std::string& credentials) { return operator[](path).Process(credentials,in,out,client); } bool DelegationStores::GetRequest(const std::string& path,std::string& id,const std::string& client,std::string& request) { return operator[](path).GetRequest(id,client,request); } bool DelegationStores::PutDeleg(const std::string& path,const std::string& id,const std::string& client,const std::string& credentials) { return operator[](path).PutDeleg(id,client,credentials); } } // namespace ARex nordugrid-arc-6.14.0/src/services/a-rex/delegation/PaxHeaders.30264/DelegationStores.h0000644000000000000000000000013214152153376026531 xustar000000000000000030 mtime=1638455038.407646004 30 atime=1638455038.500647402 30 ctime=1638455099.455563277 nordugrid-arc-6.14.0/src/services/a-rex/delegation/DelegationStores.h0000644000175000002070000000400714152153376026517 0ustar00mockbuildmock00000000000000#ifndef __ARC_DELEGATION_STORES_H__ #define __ARC_DELEGATION_STORES_H__ #include #include #include #include #include #include "DelegationStore.h" namespace ARex { /// Set of service storing delegated credentials class DelegationStores { private: Glib::Mutex lock_; std::map stores_; DelegationStore::DbType db_type_; DelegationStores(const DelegationStores&) { }; public: DelegationStores(DelegationStore::DbType db_type = DelegationStore::DbSQLite); ~DelegationStores(void); void SetDbType(DelegationStore::DbType db_type) { db_type_ = db_type; }; /// Returns or creates delegation storage associated with 'path'. DelegationStore& operator[](const std::string& path); /// Check if SOAP request 'in' can be handled by this implementation. bool MatchNamespace(const Arc::SOAPEnvelope& in); /// Processes SOAP request 'in' using delegation storage associated with 'path'. /// Response is filled into 'out'. The 'client' is identifier of requestor /// used by service internally to recognize owner of stored credentials. /// If operation produces credentials token it is returned in 'credentials'. /// If operation is successful returns true. bool Process(const std::string& path,const Arc::SOAPEnvelope& in,Arc::SOAPEnvelope& out,const std::string& client,std::string& credentials); /// Provides delegation request from storage 'path' for cpecified 'id' and 'client'. If 'id' is empty /// then new storage slot is created and its identifier stored in 'id'. bool GetRequest(const std::string& path,std::string& id,const std::string& client,std::string& request); /// Stores delegated credentials corresponding to delegation request obtained by call to GetRequest(). /// Only public part is expected in 'credentials'. bool PutDeleg(const std::string& path,const std::string& id,const std::string& client,const std::string& credentials); }; } // namespace ARex #endif // __ARC_DELEGATION_STORE_H__ nordugrid-arc-6.14.0/src/services/a-rex/delegation/PaxHeaders.30264/DelegationStore.h0000644000000000000000000000013214152153376026346 xustar000000000000000030 mtime=1638455038.407646004 30 atime=1638455038.500647402 30 ctime=1638455099.455563277 nordugrid-arc-6.14.0/src/services/a-rex/delegation/DelegationStore.h0000644000175000002070000001264714152153376026345 0ustar00mockbuildmock00000000000000#ifndef __ARC_DELEGATION_STORE_H__ #define __ARC_DELEGATION_STORE_H__ #include #include #include #include #include #include "FileRecord.h" namespace ARex { class DelegationStore: public Arc::DelegationContainerSOAP { private: class Consumer { public: std::string id; std::string client; std::string path; Consumer(const std::string& id_, const std::string& client_, const std::string& path_): id(id_),client(client_),path(path_) { }; }; Glib::Mutex lock_; Glib::Mutex check_lock_; FileRecord* fstore_; std::map acquired_; unsigned int expiration_; unsigned int maxrecords_; unsigned int mtimeout_; FileRecord::Iterator* mrec_; Arc::Logger logger_; public: enum DbType { DbBerkeley, DbSQLite }; DelegationStore(const std::string& base, DbType db, bool allow_recover = true); ~DelegationStore(void); operator bool(void) { return ((bool)fstore_ && (bool)*fstore_); }; bool operator!(void) { return !((bool)fstore_ && (bool)*fstore_); }; /** Returns description of last error */ std::string Error(void) { return fstore_?fstore_->Error():std::string(""); }; /** Sets expiration time for unlocked credentials */ void Expiration(unsigned int v = 0) { expiration_ = v; }; /** Sets max number of credentials to store */ void MaxRecords(unsigned int v = 0) { maxrecords_ = v; }; void CheckTimeout(unsigned int v = 0) { mtimeout_ = v; }; /** Create a slot for credential storing and return associated delegation consumer. The consumer object must be release with ReleaseConsumer/RemoveConsumer */ virtual Arc::DelegationConsumerSOAP* AddConsumer(std::string& id,const std::string& client); /** Find existing delegation slot and create delegation consumer for it. The consumer object must be release with ReleaseConsumer/RemoveConsumer */ virtual Arc::DelegationConsumerSOAP* FindConsumer(const std::string& id,const std::string& client); /** Store credentials into slot associated with specified consumer object */ virtual bool TouchConsumer(Arc::DelegationConsumerSOAP* c,const std::string& credentials); /** Read credentials stored in slot associated with specified consumer object */ virtual bool QueryConsumer(Arc::DelegationConsumerSOAP* c,std::string& credentials); /** Release consumer object but keep credentials store slot */ virtual void ReleaseConsumer(Arc::DelegationConsumerSOAP* c); /** Release consumer object and delete associated credentials store slot */ virtual bool RemoveConsumer(Arc::DelegationConsumerSOAP* c); virtual void CheckConsumers(void); void PeriodicCheckConsumers(void); /** Store new credentials associated with client and assign id to it */ bool AddCred(std::string& id, const std::string& client, const std::string& credentials); /** Store/update credentials with specified id and associated with client */ bool PutCred(const std::string& id, const std::string& client, const std::string& credentials); /** Returns path to file containing credential with specied id and client */ std::string FindCred(const std::string& id,const std::string& client); /** Retrieves credentials with specified id and associated with client */ bool GetCred(const std::string& id, const std::string& client, std::string& credentials); /** Retrieves locks associated with specified id and client */ bool GetLocks(const std::string& id, const std::string& client, std::list& lock_ids); /** Retrieves all locks known */ bool GetLocks(std::list& lock_ids); /** Returns credentials ids associated with specific client */ std::list ListCredIDs(const std::string& client); /** Returns all credentials ids (1st) along with their client ids (2nd) */ std::list > ListCredIDs(void); /** Locks credentials also associating it with specific lock identifier */ bool LockCred(const std::string& lock_id, const std::list& ids,const std::string& client); /** Release lock set by previous call to LockCred by associated lock id. Optionally it can update credentials usage timestamp and force removal credentials from storage if it is not locked anymore. */ bool ReleaseCred(const std::string& lock_id, bool touch = false, bool remove = false); /** Returns credential ids locked by specific lock id and associated with specified client */ std::list ListLockedCredIDs(const std::string& lock_id, const std::string& client); /** Returns credential ids locked by specific lock id */ std::list > ListLockedCredIDs(const std::string& lock_id); /** Provides delegation request specified 'id' and 'client'. If 'id' is empty then new storage slot is created and its identifier stored in 'id'. */ bool GetRequest(std::string& id,const std::string& client,std::string& request); /** Stores delegated credentials corresponding to delegation request obtained by call to GetRequest(). Only public part is expected in 'credentials'. */ bool PutDeleg(const std::string& id,const std::string& client,const std::string& credentials); /** Retrieves public part of credentials with specified id and associated with client */ bool GetDeleg(const std::string& id, const std::string& client, std::string& credentials); }; } // namespace ARex #endif // __ARC_DELEGATION_STORE_H__ nordugrid-arc-6.14.0/src/services/a-rex/delegation/PaxHeaders.30264/uid.h0000644000000000000000000000013214152153376024037 xustar000000000000000030 mtime=1638455038.407646004 30 atime=1638455038.500647402 30 ctime=1638455099.451563217 nordugrid-arc-6.14.0/src/services/a-rex/delegation/uid.h0000644000175000002070000000013114152153376024017 0ustar00mockbuildmock00000000000000#include namespace ARex { std::string rand_uid64(void); } // namespace ARex nordugrid-arc-6.14.0/src/services/a-rex/delegation/PaxHeaders.30264/FileRecord.h0000644000000000000000000000013214152153376025274 xustar000000000000000030 mtime=1638455038.407646004 30 atime=1638455038.500647402 30 ctime=1638455099.452563232 nordugrid-arc-6.14.0/src/services/a-rex/delegation/FileRecord.h0000644000175000002070000000737214152153376025272 0ustar00mockbuildmock00000000000000#ifndef __ARC_DELEGATION_FILERECORD_H__ #define __ARC_DELEGATION_FILERECORD_H__ #include #include namespace ARex { class FileRecord { protected: std::string basepath_; int error_num_; std::string error_str_; bool valid_; std::string uid_to_path(const std::string& uid); bool make_file(const std::string& uid); bool remove_file(const std::string& uid); public: class Iterator { private: Iterator(const Iterator&); // disabled copy constructor protected: Iterator(FileRecord& frec):frec_(frec) {}; FileRecord& frec_; std::string uid_; std::string id_; std::string owner_; std::list meta_; public: virtual ~Iterator(void) {}; virtual Iterator& operator++(void) = 0; virtual Iterator& operator--(void) = 0; virtual void suspend(void) = 0; virtual bool resume(void) = 0; virtual operator bool(void) = 0; virtual bool operator!(void) = 0; const std::string& uid(void) const { return uid_; }; const std::string& id(void) const { return id_; }; const std::string& owner(void) const { return owner_; }; const std::list& meta(void) const { return meta_; }; const std::string path(void) const { return frec_.uid_to_path(uid_); }; }; friend class FileRecord::Iterator; FileRecord(const std::string& base, bool create = true): basepath_(base), error_num_(0), valid_(false) {}; virtual ~FileRecord(void) {}; operator bool(void) { return valid_; }; bool operator!(void) { return !valid_; }; /// Returns textual description of last error. std::string Error(void) { return error_str_; }; /// Obtain an iterator for walking through existing credentials slots. virtual Iterator* NewIterator(void) = 0; virtual bool Recover(void) = 0; /// Adds new slot for storing credentials including generation of uid, /// assignment of id (if empty) and creation of file for storing credentials. virtual std::string Add(std::string& id, const std::string& owner, const std::list& meta) = 0; /// Adds only record in database (to be used for database management only). virtual bool Add(const std::string& uid, const std::string& id, const std::string& owner, const std::list& meta) = 0; /// Obtains path to stored credentials. virtual std::string Find(const std::string& id, const std::string& owner, std::list& meta) = 0; /// Modifies existing entry in database with new meta values. virtual bool Modify(const std::string& id, const std::string& owner, const std::list& meta) = 0; /// Fully removes credentials slot including file which stores credentials. virtual bool Remove(const std::string& id, const std::string& owner) = 0; // Assign specified credential ids specified lock lock_id virtual bool AddLock(const std::string& lock_id, const std::list& ids, const std::string& owner) = 0; // Remove lock lock_id from all associated credentials virtual bool RemoveLock(const std::string& lock_id) = 0; // Reomve lock lock_id from all associated credentials and store // identifiers of associated credentials into ids virtual bool RemoveLock(const std::string& lock_id, std::list >& ids) = 0; // Fills locks with all known lock ids. virtual bool ListLocks(std::list& locks) = 0; // Fills locks with all lock ids associated with specified credential id virtual bool ListLocks(const std::string& id, const std::string& owner, std::list& locks) = 0; // Fills ids with identifiers of credentials locked by specified lock_id lock virtual bool ListLocked(const std::string& lock_id, std::list >& ids) = 0; }; } // namespace ARex #endif // __ARC_DELEGATION_FiLERECORD_H__ nordugrid-arc-6.14.0/src/services/a-rex/delegation/PaxHeaders.30264/DelegationStore.cpp0000644000000000000000000000013214152153376026701 xustar000000000000000030 mtime=1638455038.407646004 30 atime=1638455038.500647402 30 ctime=1638455099.450563202 nordugrid-arc-6.14.0/src/services/a-rex/delegation/DelegationStore.cpp0000644000175000002070000004002514152153376026667 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #define DELEGATION_USES_SQLITE 1 #include "FileRecordSQLite.h" #include "FileRecordBDB.h" #include "DelegationStore.h" namespace ARex { DelegationStore::DelegationStore(const std::string& base, DbType db, bool allow_recover): logger_(Arc::Logger::rootLogger, "Delegation Storage") { fstore_ = NULL; expiration_ = 0; maxrecords_ = 0; mtimeout_ = 0; mrec_ = NULL; switch(db) { case DbBerkeley: fstore_ = new FileRecordBDB(base, allow_recover); break; case DbSQLite: fstore_ = new FileRecordSQLite(base, allow_recover); break; default: failure_ = "Unsupported database type requested for delegation storage."; logger_.msg(Arc::ERROR,"%s",failure_); return; }; if(!*fstore_) { failure_ = "Failed to initialize storage. " + fstore_->Error(); logger_.msg(Arc::WARNING,"%s",failure_); if(allow_recover) { // Database creation failed. Try recovery. if(!fstore_->Recover()) { failure_ = "Failed to recover storage. " + fstore_->Error(); logger_.msg(Arc::WARNING,"%s",failure_); logger_.msg(Arc::WARNING,"Wiping and re-creating whole storage"); delete fstore_; fstore_ = NULL; // Full recreation of database. Delete everything. Glib::Dir dir(base); std::string name; while ((name = dir.read_name()) != "") { std::string fullpath(base); fullpath += G_DIR_SEPARATOR_S + name; struct stat st; if (::lstat(fullpath.c_str(), &st) == 0) { if(S_ISDIR(st.st_mode)) { Arc::DirDelete(fullpath.c_str()); } else { Arc::FileDelete(fullpath.c_str()); }; }; }; switch(db) { case DbBerkeley: fstore_ = new FileRecordBDB(base); break; case DbSQLite: fstore_ = new FileRecordSQLite(base); break; default: // Must not happen - already sorted out above. return; }; if(!*fstore_) { // Failure failure_ = "Failed to re-create storage. " + fstore_->Error(); logger_.msg(Arc::WARNING,"%s",failure_); } else { // Database recreated. }; }; } else { logger_.msg(Arc::ERROR,"%s",failure_); }; }; // TODO: Do some cleaning on startup } DelegationStore::~DelegationStore(void) { // BDB objects must be destroyed because // somehow BDB does not understand that process // already died and keeps locks forewer. delete mrec_; delete fstore_; /* Following code is not executed because there must be no active consumers when store being destroyed. It is probably safer to leave hanging consumers than to destroy them. Anyway by design this destructor is supposed to be called only when applications exits. while(acquired_.size() > 0) { std::map::iterator i = acquired_.begin(); delete i->first; acquired_.erase(i); }; */ } Arc::DelegationConsumerSOAP* DelegationStore::AddConsumer(std::string& id,const std::string& client) { std::string path = fstore_->Add(id,client,std::list()); if(path.empty()) { failure_ = "Local error - failed to create slot for delegation. "+fstore_->Error(); return NULL; } Arc::DelegationConsumerSOAP* cs = new Arc::DelegationConsumerSOAP(); std::string key; cs->Backup(key); if(!key.empty()) { if(!Arc::FileCreate(path,key,0,0,S_IRUSR|S_IWUSR)) { fstore_->Remove(id,client); delete cs; cs = NULL; failure_ = "Local error - failed to store credentials"; return NULL; }; }; Glib::Mutex::Lock lock(lock_); acquired_.insert(std::pair(cs,Consumer(id,client,path))); return cs; } static const char* key_start_tag("-----BEGIN RSA PRIVATE KEY-----"); static const char* key_end_tag("-----END RSA PRIVATE KEY-----"); static std::string extract_key(const std::string& proxy) { std::string key; std::string::size_type start = proxy.find(key_start_tag); if(start != std::string::npos) { std::string::size_type end = proxy.find(key_end_tag,start+strlen(key_start_tag)); if(end != std::string::npos) { return proxy.substr(start,end-start+strlen(key_end_tag)); }; }; return ""; } static void remove_key(std::string& proxy) { while(true) { std::string::size_type start = proxy.find(key_start_tag); if(start == std::string::npos) break; std::string::size_type end = proxy.find(key_end_tag,start+strlen(key_start_tag)); if(end == std::string::npos) end = proxy.length(); proxy.erase(start,end-start+strlen(key_end_tag)); }; } static bool compare_no_newline(const std::string& str1, const std::string& str2) { std::string::size_type p1 = 0; std::string::size_type p2 = 0; for(;;) { if((p1 < str1.length()) && ((str1[p1] == '\r') || (str1[p1] == '\n'))) { ++p1; continue; }; if((p2 < str2.length()) && ((str2[p2] == '\r') || (str2[p2] == '\n'))) { ++p2; continue; }; if(p1 >= str1.length()) break; if(p2 >= str2.length()) break; if(str1[p1] != str2[p2]) break; ++p1; ++p2; }; return ((p1 >= str1.length()) && (p2 >= str2.length())); } Arc::DelegationConsumerSOAP* DelegationStore::FindConsumer(const std::string& id,const std::string& client) { std::list meta; std::string path = fstore_->Find(id,client,meta); if(path.empty()) { failure_ = "Identifier not found for client. "+fstore_->Error(); return NULL; }; std::string content; if(!Arc::FileRead(path,content)) { failure_ = "Local error - failed to read credentials"; return NULL; }; Arc::DelegationConsumerSOAP* cs = new Arc::DelegationConsumerSOAP(); if(!content.empty()) { std::string key = extract_key(content); if(!key.empty()) { cs->Restore(key); }; }; Glib::Mutex::Lock lock(lock_); acquired_.insert(std::pair(cs,Consumer(id,client,path))); return cs; } bool DelegationStore::TouchConsumer(Arc::DelegationConsumerSOAP* c,const std::string& credentials) { if(!c) return false; Glib::Mutex::Lock lock(lock_); std::map::iterator i = acquired_.find(c); if(i == acquired_.end()) { failure_ = "Delegation not found"; return false; }; if(!credentials.empty()) { if(!Arc::FileCreate(i->second.path,credentials,0,0,S_IRUSR|S_IWUSR)) { failure_ = "Local error - failed to create storage for delegation"; logger_.msg(Arc::WARNING,"DelegationStore: TouchConsumer failed to create file %s",i->second.path); return false; }; }; return true; } bool DelegationStore::QueryConsumer(Arc::DelegationConsumerSOAP* c,std::string& credentials) { if(!c) return false; Glib::Mutex::Lock lock(lock_); std::map::iterator i = acquired_.find(c); if(i == acquired_.end()) { failure_ = "Delegation not found"; return false; }; Arc::FileRead(i->second.path,credentials); return true; } void DelegationStore::ReleaseConsumer(Arc::DelegationConsumerSOAP* c) { if(!c) return; Glib::Mutex::Lock lock(lock_); std::map::iterator i = acquired_.find(c); if(i == acquired_.end()) return; // ???? // Check if key changed. If yes then store only key. // TODO: optimize std::string newkey; i->first->Backup(newkey); if(!newkey.empty()) { std::string oldkey; std::string content; Arc::FileRead(i->second.path,content); if(!content.empty()) oldkey = extract_key(content); if(!compare_no_newline(newkey,oldkey)) { Arc::FileCreate(i->second.path,newkey,0,0,S_IRUSR|S_IWUSR); }; }; delete i->first; acquired_.erase(i); } bool DelegationStore::RemoveConsumer(Arc::DelegationConsumerSOAP* c) { if(!c) return false; Glib::Mutex::Lock lock(lock_); std::map::iterator i = acquired_.find(c); if(i == acquired_.end()) return false; // ???? bool r = fstore_->Remove(i->second.id,i->second.client); // TODO: Handle failure delete i->first; acquired_.erase(i); return r; } void DelegationStore::CheckConsumers(void) { // Not doing any cleaning ocasionally to avoid delegation response delay. // Instead PeriodicCheckConsumers() is called to do periodic cleaning. } void DelegationStore::PeriodicCheckConsumers(void) { // Go through stored credentials // Remove outdated records (those with locks won't be removed) if(expiration_) { time_t start = ::time(NULL); Glib::Mutex::Lock check_lock(lock_); if(mrec_ != NULL) { if(!mrec_->resume()) { logger_.msg(Arc::WARNING,"DelegationStore: PeriodicCheckConsumers failed to resume iterator"); delete mrec_; mrec_ = NULL; }; }; if(mrec_ == NULL) { mrec_ = fstore_->NewIterator(); }; for(;(bool)(*mrec_);++(*mrec_)) { if(mtimeout_ && (((unsigned int)(::time(NULL) - start)) > mtimeout_)) { mrec_->suspend(); return; } struct stat st; if(::stat(mrec_->path().c_str(),&st) == 0) { if(((unsigned int)(::time(NULL) - st.st_mtime)) > expiration_) { if(fstore_->Remove(mrec_->id(),mrec_->owner())) { } else { // It is ok to fail here because Remove checks for delegation locks. // So reporting only for debuging purposes. logger_.msg(Arc::DEBUG,"DelegationStore: PeriodicCheckConsumers failed to remove old delegation %s - %s", mrec_->uid(), fstore_->Error()); }; }; }; }; delete mrec_; mrec_ = NULL; }; // TODO: Remove records over threshold return; } bool DelegationStore::AddCred(std::string& id, const std::string& client, const std::string& credentials) { std::string path = fstore_->Add(id,client,std::list()); if(path.empty()) { failure_ = "Local error - failed to create slot for delegation. "+fstore_->Error(); return false; } if(!Arc::FileCreate(path,credentials,0,0,S_IRUSR|S_IWUSR)) { fstore_->Remove(id,client); failure_ = "Local error - failed to create storage for delegation"; logger_.msg(Arc::WARNING,"DelegationStore: TouchConsumer failed to create file %s",path); return false; }; return true; } bool DelegationStore::PutCred(const std::string& id, const std::string& client, const std::string& credentials) { std::list meta; std::string path = fstore_->Find(id,client,meta); if(path.empty()) { failure_ = "Local error - failed to find specified credentials. "+fstore_->Error(); return false; } if(!Arc::FileCreate(path,credentials,0,0,S_IRUSR|S_IWUSR)) { failure_ = "Local error - failed to store delegation"; return false; }; return true; } std::string DelegationStore::FindCred(const std::string& id,const std::string& client) { std::list meta; return fstore_->Find(id,client,meta); } bool DelegationStore::GetCred(const std::string& id, const std::string& client, std::string& credentials) { std::list meta; std::string path = fstore_->Find(id,client,meta); if(path.empty()) { failure_ = "Local error - failed to find specified credentials. "+fstore_->Error(); return false; } std::string content; if(!Arc::FileRead(path,credentials)) { failure_ = "Local error - failed to read credentials"; return false; }; return true; } bool DelegationStore::GetLocks(const std::string& id, const std::string& client, std::list& lock_ids) { return fstore_->ListLocks(id, client, lock_ids); } bool DelegationStore::GetLocks(std::list& lock_ids) { return fstore_->ListLocks(lock_ids); } std::list DelegationStore::ListCredIDs(const std::string& client) { std::list res; FileRecord::Iterator& rec = *(fstore_->NewIterator()); for(;(bool)rec;++rec) { if(rec.owner() == client) res.push_back(rec.id()); }; delete &rec; return res; } std::list > DelegationStore::ListLockedCredIDs(const std::string& lock_id) { std::list > ids; (void)fstore_->ListLocked(lock_id, ids); return ids; } std::list DelegationStore::ListLockedCredIDs(const std::string& lock_id, const std::string& client) { std::list res; std::list > ids; if(!fstore_->ListLocked(lock_id, ids)) return res; for(std::list >::iterator id = ids.begin(); id != ids.end();++id) { if(id->second == client) res.push_back(id->first); } return res; } std::list > DelegationStore::ListCredIDs(void) { std::list > res; FileRecord::Iterator& rec = *(fstore_->NewIterator()); for(;(bool)rec;++rec) { res.push_back(std::pair(rec.id(),rec.owner())); }; delete &rec; return res; } bool DelegationStore::LockCred(const std::string& lock_id, const std::list& ids,const std::string& client) { if(!fstore_->AddLock(lock_id,ids,client)) { failure_ = "Local error - failed set lock for delegation. "+fstore_->Error(); return false; }; return true; } bool DelegationStore::ReleaseCred(const std::string& lock_id, bool touch, bool remove) { if((!touch) && (!remove)) return fstore_->RemoveLock(lock_id); std::list > ids; if(!fstore_->RemoveLock(lock_id,ids)) return false; for(std::list >::iterator i = ids.begin(); i != ids.end(); ++i) { if(touch) { std::list meta; std::string path = fstore_->Find(i->first,i->second,meta); // TODO: in a future use meta for storing times if(!path.empty()) ::utime(path.c_str(),NULL); }; if(remove) fstore_->Remove(i->first,i->second); }; return true; } bool DelegationStore::GetRequest(std::string& id,const std::string& client,std::string& request) { Arc::DelegationConsumerSOAP* consumer = NULL; if(!id.empty()) { consumer = FindConsumer(id,client); }; if(consumer == NULL) { consumer = AddConsumer(id,client); }; if(consumer == NULL) return false; if(id.empty()) { ReleaseConsumer(consumer); return false; }; bool result = consumer->Request(request); ReleaseConsumer(consumer); return result; } bool DelegationStore::PutDeleg(const std::string& id,const std::string& client,const std::string& credentials) { Arc::DelegationConsumerSOAP* consumer = FindConsumer(id,client); if(consumer == NULL) return false; std::string content(credentials); if(!consumer->Acquire(content)) { ReleaseConsumer(consumer); return false; }; if(!TouchConsumer(consumer,content)) { ReleaseConsumer(consumer); return false; }; ReleaseConsumer(consumer); return true; } bool DelegationStore::GetDeleg(const std::string& id, const std::string& client, std::string& credentials) { std::string creds; if(!GetCred(id, client, credentials)) return false; remove_key(credentials); return true; } } // namespace ARex nordugrid-arc-6.14.0/src/services/a-rex/delegation/PaxHeaders.30264/FileRecordSQLite.cpp0000644000000000000000000000013214152153376026711 xustar000000000000000030 mtime=1638455038.407646004 30 atime=1638455038.500647402 30 ctime=1638455099.449563187 nordugrid-arc-6.14.0/src/services/a-rex/delegation/FileRecordSQLite.cpp0000644000175000002070000004652014152153376026705 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include "uid.h" #include "../SQLhelpers.h" #include "FileRecordSQLite.h" namespace ARex { #define FR_DB_NAME "list" bool FileRecordSQLite::dberr(const char* s, int err) { if(err == SQLITE_OK) return true; error_num_ = err; #ifdef HAVE_SQLITE3_ERRSTR error_str_ = std::string(s)+": "+sqlite3_errstr(err); #else error_str_ = std::string(s)+": error code "+Arc::tostring(err); #endif return false; } FileRecordSQLite::FileRecordSQLite(const std::string& base, bool create): FileRecord(base, create), db_(NULL) { valid_ = open(create); } bool FileRecordSQLite::verify(void) { // Not implemented and probably not needed return true; } FileRecordSQLite::~FileRecordSQLite(void) { close(); } int FileRecordSQLite::sqlite3_exec_nobusy(const char *sql, int (*callback)(void*,int,char**,char**), void *arg, char **errmsg) { int err; while((err = sqlite3_exec(db_, sql, callback, arg, errmsg)) == SQLITE_BUSY) { // Access to database is designed in such way that it should not block for long time. // So it should be safe to simply wait for lock to be released without any timeout. struct timespec delay = { 0, 10000000 }; // 0.01s - should be enough for most cases (void)::nanosleep(&delay, NULL); }; return err; } bool FileRecordSQLite::open(bool create) { std::string dbpath = basepath_ + G_DIR_SEPARATOR_S + FR_DB_NAME; if(db_ != NULL) return true; // already open int flags = SQLITE_OPEN_READWRITE; // it will open read-only if access is protected if(create) { flags |= SQLITE_OPEN_CREATE; }; int err; while((err = sqlite3_open_v2(dbpath.c_str(), &db_, flags, NULL)) == SQLITE_BUSY) { // In case something prevents databasre from open right now - retry if(db_) (void)sqlite3_close(db_); db_ = NULL; struct timespec delay = { 0, 10000000 }; // 0.01s - should be enough for most cases (void)::nanosleep(&delay, NULL); }; if(!dberr("Error opening database", err)) { if(db_) (void)sqlite3_close(db_); db_ = NULL; return false; }; if(create) { if(!dberr("Error creating table rec", sqlite3_exec_nobusy("CREATE TABLE IF NOT EXISTS rec(id, owner, uid, meta, UNIQUE(id, owner), UNIQUE(uid))", NULL, NULL, NULL))) { (void)sqlite3_close(db_); // todo: handle error db_ = NULL; return false; }; if(!dberr("Error creating table lock", sqlite3_exec_nobusy("CREATE TABLE IF NOT EXISTS lock(lockid, uid)", NULL, NULL, NULL))) { (void)sqlite3_close(db_); // todo: handle error db_ = NULL; return false; }; if(!dberr("Error creating index lockid", sqlite3_exec_nobusy("CREATE INDEX IF NOT EXISTS lockid ON lock (lockid)", NULL, NULL, NULL))) { (void)sqlite3_close(db_); // todo: handle error db_ = NULL; return false; }; if(!dberr("Error creating index uid", sqlite3_exec_nobusy("CREATE INDEX IF NOT EXISTS uid ON lock (uid)", NULL, NULL, NULL))) { (void)sqlite3_close(db_); // todo: handle error db_ = NULL; return false; }; } else { // SQLite opens database in lazy way. But we still want to know if it is good database. if(!dberr("Error checking database", sqlite3_exec_nobusy("PRAGMA schema_version;", NULL, NULL, NULL))) { (void)sqlite3_close(db_); // todo: handle error db_ = NULL; return false; }; }; return true; } void FileRecordSQLite::close(void) { valid_ = false; if(db_) { (void)sqlite3_close(db_); // todo: handle error db_ = NULL; }; } void store_strings(const std::list& strs, std::string& buf) { if(!strs.empty()) { for(std::list::const_iterator str = strs.begin(); ; ++str) { buf += sql_escape(*str); if (str == strs.end()) break; buf += '#'; }; }; } static void parse_strings(std::list& strs, const char* buf) { if(!buf || (*buf == '\0')) return; const char* sep = std::strchr(buf, '#'); while(sep) { strs.push_back(sql_unescape(std::string(buf,sep-buf))); buf = sep+1; sep = std::strchr(buf, '#'); }; } bool FileRecordSQLite::Recover(void) { Glib::Mutex::Lock lock(lock_); // Real recovery not implemented yet. close(); error_num_ = -1; error_str_ = "Recovery not implemented yet."; return false; } struct FindCallbackRecArg { sqlite3_int64 rowid; std::string id; std::string owner; std::string uid; std::list meta; FindCallbackRecArg(): rowid(-1) {}; }; static int FindCallbackRec(void* arg, int colnum, char** texts, char** names) { for(int n = 0; n < colnum; ++n) { if(names[n] && texts[n]) { if((strcmp(names[n], "rowid") == 0) || (strcmp(names[n], "_rowid_") == 0)) { (void)Arc::stringto(texts[n], ((FindCallbackRecArg*)arg)->rowid); } else if(strcmp(names[n], "uid") == 0) { ((FindCallbackRecArg*)arg)->uid = texts[n]; } else if(strcmp(names[n], "id") == 0) { ((FindCallbackRecArg*)arg)->id = sql_unescape(texts[n]); } else if(strcmp(names[n], "owner") == 0) { ((FindCallbackRecArg*)arg)->owner = sql_unescape(texts[n]); } else if(strcmp(names[n], "meta") == 0) { parse_strings(((FindCallbackRecArg*)arg)->meta, texts[n]); }; }; }; return 0; } struct FindCallbackUidMetaArg { std::string& uid; std::list& meta; FindCallbackUidMetaArg(std::string& uid, std::list& meta): uid(uid), meta(meta) {}; }; static int FindCallbackUidMeta(void* arg, int colnum, char** texts, char** names) { for(int n = 0; n < colnum; ++n) { if(names[n] && texts[n]) { if(strcmp(names[n], "uid") == 0) { ((FindCallbackUidMetaArg*)arg)->uid = texts[n]; } else if(strcmp(names[n], "meta") == 0) { parse_strings(((FindCallbackUidMetaArg*)arg)->meta, texts[n]); }; }; }; return 0; } struct FindCallbackUidArg { std::string& uid; FindCallbackUidArg(std::string& uid): uid(uid) {}; }; static int FindCallbackUid(void* arg, int colnum, char** texts, char** names) { for(int n = 0; n < colnum; ++n) { if(names[n] && texts[n]) { if(strcmp(names[n], "uid") == 0) { ((FindCallbackUidMetaArg*)arg)->uid = texts[n]; }; }; }; return 0; } struct FindCallbackCountArg { int count; FindCallbackCountArg():count(0) {}; }; static int FindCallbackCount(void* arg, int colnum, char** texts, char** names) { ((FindCallbackCountArg*)arg)->count += 1; return 0; } struct FindCallbackIdOwnerArg { std::list< std::pair >& records; FindCallbackIdOwnerArg(std::list< std::pair >& recs): records(recs) {}; }; static int FindCallbackIdOwner(void* arg, int colnum, char** texts, char** names) { std::pair rec; for(int n = 0; n < colnum; ++n) { if(names[n] && texts[n]) { if(strcmp(names[n], "id") == 0) { rec.first = sql_unescape(texts[n]); } else if(strcmp(names[n], "owner") == 0) { rec.second = sql_unescape(texts[n]); }; }; }; if(!rec.first.empty()) ((FindCallbackIdOwnerArg*)arg)->records.push_back(rec); return 0; } struct FindCallbackLockArg { std::list< std::string >& records; FindCallbackLockArg(std::list< std::string >& recs): records(recs) {}; }; static int FindCallbackLock(void* arg, int colnum, char** texts, char** names) { for(int n = 0; n < colnum; ++n) { if(names[n] && texts[n]) { if(strcmp(names[n], "lockid") == 0) { std::string rec = sql_unescape(texts[n]); if(!rec.empty()) ((FindCallbackLockArg*)arg)->records.push_back(rec); }; }; }; return 0; } std::string FileRecordSQLite::Add(std::string& id, const std::string& owner, const std::list& meta) { if(!valid_) return ""; int uidtries = 10; // some sane number std::string uid; while(true) { if(!(uidtries--)) { error_str_ = "Out of tries adding record to database"; return ""; }; Glib::Mutex::Lock lock(lock_); uid = rand_uid64().substr(4); std::string metas; store_strings(meta, metas); std::string sqlcmd = "INSERT INTO rec(id, owner, uid, meta) VALUES ('"+ sql_escape(id.empty()?uid:id)+"', '"+ sql_escape(owner)+"', '"+uid+"', '"+metas+"')"; int dbres = sqlite3_exec_nobusy(sqlcmd.c_str(), NULL, NULL, NULL); if(dbres == SQLITE_CONSTRAINT) { // retry due to non-unique id uid.resize(0); continue; }; if(!dberr("Failed to add record to database", dbres)) { return ""; }; if(sqlite3_changes(db_) != 1) { error_str_ = "Failed to add record to database"; return ""; }; break; }; if(id.empty()) id = uid; make_file(uid); return uid_to_path(uid); } bool FileRecordSQLite::Add(const std::string& uid, const std::string& id, const std::string& owner, const std::list& meta) { if(!valid_) return false; Glib::Mutex::Lock lock(lock_); std::string metas; store_strings(meta, metas); std::string sqlcmd = "INSERT INTO rec(id, owner, uid, meta) VALUES ('"+ sql_escape(id.empty()?uid:id)+"', '"+ sql_escape(owner)+"', '"+uid+"', '"+metas+"')"; int dbres = sqlite3_exec_nobusy(sqlcmd.c_str(), NULL, NULL, NULL); if(!dberr("Failed to add record to database", dbres)) { return false; }; if(sqlite3_changes(db_) != 1) { error_str_ = "Failed to add record to database"; return false; }; return true; } std::string FileRecordSQLite::Find(const std::string& id, const std::string& owner, std::list& meta) { if(!valid_) return ""; Glib::Mutex::Lock lock(lock_); std::string sqlcmd = "SELECT uid, meta FROM rec WHERE ((id = '"+sql_escape(id)+"') AND (owner = '"+sql_escape(owner)+"'))"; std::string uid; FindCallbackUidMetaArg arg(uid, meta); if(!dberr("Failed to retrieve record from database",sqlite3_exec_nobusy(sqlcmd.c_str(), &FindCallbackUidMeta, &arg, NULL))) { return ""; }; if(uid.empty()) { error_str_ = "Failed to retrieve record from database"; return ""; }; return uid_to_path(uid); } bool FileRecordSQLite::Modify(const std::string& id, const std::string& owner, const std::list& meta) { if(!valid_) return false; Glib::Mutex::Lock lock(lock_); std::string metas; store_strings(meta, metas); std::string sqlcmd = "UPDATE rec SET meta = '"+metas+"' WHERE ((id = '"+sql_escape(id)+"') AND (owner = '"+sql_escape(owner)+"'))"; if(!dberr("Failed to update record in database",sqlite3_exec_nobusy(sqlcmd.c_str(), NULL, NULL, NULL))) { return false; }; if(sqlite3_changes(db_) < 1) { error_str_ = "Failed to find record in database"; return false; }; return true; } bool FileRecordSQLite::Remove(const std::string& id, const std::string& owner) { if(!valid_) return false; Glib::Mutex::Lock lock(lock_); std::string uid; { std::string sqlcmd = "SELECT uid FROM rec WHERE ((id = '"+sql_escape(id)+"') AND (owner = '"+sql_escape(owner)+"'))"; FindCallbackUidArg arg(uid); if(!dberr("Failed to retrieve record from database",sqlite3_exec_nobusy(sqlcmd.c_str(), &FindCallbackUid, &arg, NULL))) { return false; // No such record? }; }; if(uid.empty()) { error_str_ = "Record not found"; return false; // No such record }; { std::string sqlcmd = "SELECT uid FROM lock WHERE (uid = '"+uid+"')"; FindCallbackCountArg arg; if(!dberr("Failed to find locks in database",sqlite3_exec_nobusy(sqlcmd.c_str(), &FindCallbackCount, &arg, NULL))) { return false; }; if(arg.count > 0) { error_str_ = "Record has active locks"; return false; // have locks }; }; { std::string sqlcmd = "DELETE FROM rec WHERE (uid = '"+uid+"')"; if(!dberr("Failed to delete record in database",sqlite3_exec_nobusy(sqlcmd.c_str(), NULL, NULL, NULL))) { return false; }; if(sqlite3_changes(db_) < 1) { error_str_ = "Failed to delete record in database"; return false; // no such record }; }; remove_file(uid); return true; } bool FileRecordSQLite::AddLock(const std::string& lock_id, const std::list& ids, const std::string& owner) { if(!valid_) return false; Glib::Mutex::Lock lock(lock_); for(std::list::const_iterator id = ids.begin(); id != ids.end(); ++id) { std::string uid; { std::string sqlcmd = "SELECT uid FROM rec WHERE ((id = '"+sql_escape(*id)+"') AND (owner = '"+sql_escape(owner)+"'))"; FindCallbackUidArg arg(uid); if(!dberr("Failed to retrieve record from database",sqlite3_exec_nobusy(sqlcmd.c_str(), &FindCallbackUid, &arg, NULL))) { return false; // No such record? }; }; if(uid.empty()) { // No such record continue; }; std::string sqlcmd = "INSERT INTO lock(lockid, uid) VALUES ('"+sql_escape(lock_id)+"','"+uid+"')"; if(!dberr("addlock:put",sqlite3_exec_nobusy(sqlcmd.c_str(), NULL, NULL, NULL))) { return false; }; }; return true; } bool FileRecordSQLite::RemoveLock(const std::string& lock_id) { if(!valid_) return false; Glib::Mutex::Lock lock(lock_); // map lock to id,owner { std::string sqlcmd = "DELETE FROM lock WHERE (lockid = '"+sql_escape(lock_id)+"')"; if(!dberr("removelock:del",sqlite3_exec_nobusy(sqlcmd.c_str(), NULL, NULL, NULL))) { return false; }; if(sqlite3_changes(db_) < 1) { error_str_ = ""; return false; }; }; return true; } bool FileRecordSQLite::RemoveLock(const std::string& lock_id, std::list >& ids) { if(!valid_) return false; Glib::Mutex::Lock lock(lock_); // map lock to id,owner { std::string sqlcmd = "SELECT id,owner FROM rec WHERE uid IN (SELECT uid FROM lock WHERE (lockid = '"+sql_escape(lock_id)+"'))"; FindCallbackIdOwnerArg arg(ids); if(!dberr("removelock:get",sqlite3_exec_nobusy(sqlcmd.c_str(), &FindCallbackIdOwner, &arg, NULL))) { //return false; }; }; { std::string sqlcmd = "DELETE FROM lock WHERE (lockid = '"+sql_escape(lock_id)+"')"; if(!dberr("removelock:del",sqlite3_exec_nobusy(sqlcmd.c_str(), NULL, NULL, NULL))) { return false; }; if(sqlite3_changes(db_) < 1) { error_str_ = ""; return false; }; }; return true; } bool FileRecordSQLite::ListLocked(const std::string& lock_id, std::list >& ids) { if(!valid_) return false; Glib::Mutex::Lock lock(lock_); // map lock to id,owner { std::string sqlcmd = "SELECT id,owner FROM rec WHERE uid IN (SELECT uid FROM lock WHERE (lockid = '"+sql_escape(lock_id)+"'))"; FindCallbackIdOwnerArg arg(ids); if(!dberr("listlocked:get",sqlite3_exec_nobusy(sqlcmd.c_str(), &FindCallbackIdOwner, &arg, NULL))) { return false; }; }; //if(ids.empty()) return false; return true; } bool FileRecordSQLite::ListLocks(std::list& locks) { if(!valid_) return false; Glib::Mutex::Lock lock(lock_); { std::string sqlcmd = "SELECT lockid FROM lock"; FindCallbackLockArg arg(locks); if(!dberr("listlocks:get",sqlite3_exec_nobusy(sqlcmd.c_str(), &FindCallbackLock, &arg, NULL))) { return false; }; }; return true; } bool FileRecordSQLite::ListLocks(const std::string& id, const std::string& owner, std::list& locks) { if(!valid_) return false; Glib::Mutex::Lock lock(lock_); std::string uid; { std::string sqlcmd = "SELECT uid FROM rec WHERE ((id = '"+sql_escape(id)+"') AND (owner = '"+sql_escape(owner)+"'))"; FindCallbackUidArg arg(uid); if(!dberr("Failed to retrieve record from database",sqlite3_exec_nobusy(sqlcmd.c_str(), &FindCallbackUid, &arg, NULL))) { return false; // No such record? }; }; if(uid.empty()) { error_str_ = "Record not found"; return false; // No such record }; { std::string sqlcmd = "SELECT lockid FROM lock WHERE (uid = '"+uid+"')"; FindCallbackLockArg arg(locks); if(!dberr("listlocks:get",sqlite3_exec_nobusy(sqlcmd.c_str(), &FindCallbackLock, &arg, NULL))) { return false; }; }; return true; } FileRecordSQLite::Iterator::Iterator(FileRecordSQLite& frec):FileRecord::Iterator(frec) { rowid_ = -1; Glib::Mutex::Lock lock(frec.lock_); { std::string sqlcmd = "SELECT _rowid_,id,owner,uid,meta FROM rec ORDER BY _rowid_ LIMIT 1"; FindCallbackRecArg arg; if(!frec.dberr("listlocks:get",frec.sqlite3_exec_nobusy(sqlcmd.c_str(), &FindCallbackRec, &arg, NULL))) { return; }; if(arg.uid.empty()) { return; }; uid_ = arg.uid; id_ = arg.id; owner_ = arg.owner; meta_ = arg.meta; rowid_ = arg.rowid; }; } FileRecordSQLite::Iterator::~Iterator(void) { } FileRecordSQLite::Iterator& FileRecordSQLite::Iterator::operator++(void) { if(rowid_ == -1) return *this; FileRecordSQLite& frec((FileRecordSQLite&)frec_); Glib::Mutex::Lock lock(frec.lock_); { std::string sqlcmd = "SELECT _rowid_,id,owner,uid,meta FROM rec WHERE (_rowid_ > " + Arc::tostring(rowid_) + ") ORDER BY _rowid_ ASC LIMIT 1"; FindCallbackRecArg arg; if(!frec.dberr("listlocks:get",frec.sqlite3_exec_nobusy(sqlcmd.c_str(), &FindCallbackRec, &arg, NULL))) { rowid_ = -1; return *this; }; if(arg.uid.empty()) { rowid_ = -1; return *this; }; uid_ = arg.uid; id_ = arg.id; owner_ = arg.owner; meta_ = arg.meta; rowid_ = arg.rowid; }; return *this; } FileRecordSQLite::Iterator& FileRecordSQLite::Iterator::operator--(void) { if(rowid_ == -1) return *this; FileRecordSQLite& frec((FileRecordSQLite&)frec_); Glib::Mutex::Lock lock(frec.lock_); { std::string sqlcmd = "SELECT _rowid_,id,owner,uid,meta FROM rec WHERE (_rowid_ < " + Arc::tostring(rowid_) + ") ORDER BY _rowid_ DESC LIMIT 1"; FindCallbackRecArg arg; if(!frec.dberr("listlocks:get",frec.sqlite3_exec_nobusy(sqlcmd.c_str(), &FindCallbackRec, &arg, NULL))) { rowid_ = -1; return *this; }; if(arg.uid.empty()) { rowid_ = -1; return *this; }; uid_ = arg.uid; id_ = arg.id; owner_ = arg.owner; meta_ = arg.meta; rowid_ = arg.rowid; }; return *this; } void FileRecordSQLite::Iterator::suspend(void) { } bool FileRecordSQLite::Iterator::resume(void) { return true; } } // namespace ARex nordugrid-arc-6.14.0/src/services/a-rex/delegation/PaxHeaders.30264/FileRecordBDB.h0000644000000000000000000000013214152153376025604 xustar000000000000000030 mtime=1638455038.407646004 30 atime=1638455038.500647402 30 ctime=1638455099.453563247 nordugrid-arc-6.14.0/src/services/a-rex/delegation/FileRecordBDB.h0000644000175000002070000000563214152153376025577 0ustar00mockbuildmock00000000000000#ifndef __ARC_DELEGATION_FILERECORDBDB_H__ #define __ARC_DELEGATION_FILERECORDBDB_H__ #include #include #include #include #include "FileRecord.h" namespace ARex { class FileRecordBDB: public FileRecord { private: Glib::Mutex lock_; // TODO: use DB locking DbEnv* db_env_; Db* db_rec_; Db* db_lock_; Db* db_locked_; Db* db_link_; static int locked_callback(Db *, const Dbt *, const Dbt *, Dbt * result); static int lock_callback(Db *, const Dbt *, const Dbt *, Dbt * result); bool dberr(const char* s, int err); bool open(bool create); void close(void); bool verify(void); public: class Iterator: public FileRecord::Iterator { friend class FileRecordBDB; private: Dbc* cur_; Iterator(const Iterator&); // disabled constructor Iterator(FileRecordBDB& frec); public: ~Iterator(void); virtual Iterator& operator++(void); virtual Iterator& operator--(void); virtual void suspend(void); virtual bool resume(void); virtual operator bool(void) { return (cur_!=NULL); }; virtual bool operator!(void) { return (cur_==NULL); }; }; friend class FileRecordBDB::Iterator; FileRecordBDB(const std::string& base, bool create = true); virtual ~FileRecordBDB(void); virtual Iterator* NewIterator(void) { return new Iterator(*this); }; virtual bool Recover(void); virtual std::string Add(std::string& id, const std::string& owner, const std::list& meta); virtual bool Add(const std::string& uid, const std::string& id, const std::string& owner, const std::list& meta); virtual std::string Find(const std::string& id, const std::string& owner, std::list& meta); virtual bool Modify(const std::string& id, const std::string& owner, const std::list& meta); virtual bool Remove(const std::string& id, const std::string& owner); // Assign specified credential ids specified lock lock_id virtual bool AddLock(const std::string& lock_id, const std::list& ids, const std::string& owner); // Reomove lock lock_id from all associated credentials virtual bool RemoveLock(const std::string& lock_id); // Reomove lock lock_id from all associated credentials and store // identifiers of associated credentials into ids virtual bool RemoveLock(const std::string& lock_id, std::list >& ids); // Fills locks with all known lock ids. virtual bool ListLocks(std::list& locks); // Fills locks with all lock ids associated with specified credential id virtual bool ListLocks(const std::string& id, const std::string& owner, std::list& locks); // Fills ids with identifiers of credentials locked by specified lock_id lock virtual bool ListLocked(const std::string& lock_id, std::list >& ids); }; } // namespace ARex #endif // __ARC_DELEGATION_FiLERECORDBDB_H__ nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/README0000644000000000000000000000013214152153376021652 xustar000000000000000030 mtime=1638455038.405645974 30 atime=1638455038.500647402 30 ctime=1638455099.403562496 nordugrid-arc-6.14.0/src/services/a-rex/README0000644000175000002070000000004514152153376021636 0ustar00mockbuildmock00000000000000ARC job management service - A-REX. nordugrid-arc-6.14.0/src/services/a-rex/PaxHeaders.30264/PayloadFile.cpp0000644000000000000000000000013214152153376023667 xustar000000000000000030 mtime=1638455038.405645974 30 atime=1638455038.500647402 30 ctime=1638455099.412562631 nordugrid-arc-6.14.0/src/services/a-rex/PayloadFile.cpp0000644000175000002070000001425214152153376023660 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include "PayloadFile.h" namespace ARex { PayloadBigFile::Size_t PayloadBigFile::threshold_ = 1024*1024*10; // 10MB by default PayloadFile::PayloadFile(const char* filename,Size_t start,Size_t end) { handle_=::open(filename,O_RDONLY); SetRead(handle_,start,end); } PayloadFile::PayloadFile(int h,Size_t start,Size_t end) { SetRead(h,start,end); } void PayloadFile::SetRead(int h,Size_t start,Size_t end) { handle_=h; start_=start; end_=end; addr_=(char*)MAP_FAILED; size_=0; if(handle_ == -1) return; struct stat st; if(fstat(handle_,&st) != 0) goto error; size_=st.st_size; if((end_ == ((off_t)-1)) || (end_ > size_)) { end_=size_; } if(start_ >= size_) { start_=size_; end_=start_; return; } if(size_ > 0) { addr_=(char*)mmap(NULL,size_,PROT_READ,MAP_SHARED,handle_,0); if(addr_ == (char*)MAP_FAILED) goto error; } return; error: perror("PayloadFile"); if(handle_ != -1) ::close(handle_); handle_=-1; size_=0; addr_=(char*)MAP_FAILED; return; } PayloadFile::~PayloadFile(void) { if(addr_ != (char*)MAP_FAILED) munmap(addr_,size_); if(handle_ != -1) ::close(handle_); handle_=-1; size_=0; addr_=(char*)MAP_FAILED; return; } char* PayloadFile::Content(Size_t pos) { if(handle_ == -1) return NULL; if(addr_ == (char*)MAP_FAILED) return NULL; if(pos >= end_) return NULL; if(pos < start_) return NULL; return (addr_+pos); } char PayloadFile::operator[](Size_t pos) const { if(handle_ == -1) return 0; if(addr_ == (char*)MAP_FAILED) return 0; if(pos >= end_) return 0; if(pos < start_) return 0; return addr_[pos]; } PayloadFile::Size_t PayloadFile::Size(void) const { return size_; } char* PayloadFile::Insert(Size_t /*pos*/,Size_t /*size*/) { // Not supported return NULL; } char* PayloadFile::Insert(const char*,Size_t /*pos*/,Size_t /*size*/) { // Not supported return NULL; } char* PayloadFile::Buffer(unsigned int num) { if(handle_ == -1) return NULL; if(num>0) return NULL; if(addr_ == (char*)MAP_FAILED) return NULL; return addr_+start_; } PayloadFile::Size_t PayloadFile::BufferSize(unsigned int num) const { if(handle_ == -1) return 0; if(num>0) return 0; return (end_-start_); } PayloadFile::Size_t PayloadFile::BufferPos(unsigned int num) const { if(num == 0) return start_; return end_; } bool PayloadFile::Truncate(Size_t /*size*/) { // Not supported return false; } static int open_file_read(const char* filename) { return ::open(filename,O_RDONLY); } //static int open_file_write(const char* filename) { // return ::open(filename,O_WRONLY | O_CREAT,S_IRUSR | S_IWUSR); //} PayloadBigFile::PayloadBigFile(int h,Size_t start,Size_t end): PayloadStream(h) { seekable_ = false; if(handle_ == -1) return; ::lseek(handle_,start,SEEK_SET); limit_ = end; } PayloadBigFile::PayloadBigFile(const char* filename,Size_t start,Size_t end): PayloadStream(open_file_read(filename)) { seekable_ = false; if(handle_ == -1) return; ::lseek(handle_,start,SEEK_SET); limit_ = end; } //PayloadBigFile::PayloadBigFile(const char* filename,Size_t size): // PayloadStream(open_file_write(filename)){ // seekable_ = false; //} PayloadBigFile::~PayloadBigFile(void) { if(handle_ != -1) ::close(handle_); } Arc::PayloadStream::Size_t PayloadBigFile::Pos(void) const { if(handle_ == -1) return 0; return ::lseek(handle_,0,SEEK_CUR); } Arc::PayloadStream::Size_t PayloadBigFile::Size(void) const { if(handle_ == -1) return 0; struct stat st; if(fstat(handle_,&st) != 0) return 0; return st.st_size; } Arc::PayloadStream::Size_t PayloadBigFile::Limit(void) const { Size_t s = Size(); if((limit_ == (off_t)(-1)) || (limit_ > s)) return s; return limit_; } bool PayloadBigFile::Get(char* buf,int& size) { if(handle_ == -1) return false; if(limit_ == (off_t)(-1)) return PayloadStream::Get(buf,size); Size_t cpos = Pos(); if(cpos >= limit_) { size=0; return false; } if((cpos+size) > limit_) size=limit_-cpos; return PayloadStream::Get(buf,size); } PayloadFAFile::PayloadFAFile(Arc::FileAccess* h,Size_t start,Size_t end) { handle_ = h; if(handle_ == NULL) return; handle_->fa_lseek(start,SEEK_SET); limit_ = end; } PayloadFAFile::~PayloadFAFile(void) { if(handle_ != NULL) { handle_->fa_close(); Arc::FileAccess::Release(handle_); }; } Arc::PayloadStream::Size_t PayloadFAFile::Pos(void) const { if(handle_ == NULL) return 0; return handle_->fa_lseek(0,SEEK_CUR); } Arc::PayloadStream::Size_t PayloadFAFile::Size(void) const { if(handle_ == NULL) return 0; struct stat st; if(!handle_->fa_fstat(st)) return 0; return st.st_size; } Arc::PayloadStream::Size_t PayloadFAFile::Limit(void) const { Size_t s = Size(); if((limit_ == (off_t)(-1)) || (limit_ > s)) return s; return limit_; } bool PayloadFAFile::Get(char* buf,int& size) { if(handle_ == NULL) return false; if(limit_ != (off_t)(-1)) { Size_t cpos = Pos(); if(cpos >= limit_) { size=0; return false; } if((cpos+size) > limit_) size=limit_-cpos; }; ssize_t l = handle_->fa_read(buf,size); if(l <= 0) { size=0; return false; } size = (int)l; return true; } Arc::MessagePayload* newFileRead(const char* filename,Arc::PayloadRawInterface::Size_t start,Arc::PayloadRawInterface::Size_t end) { int h = open_file_read(filename); return newFileRead(h,start,end); } Arc::MessagePayload* newFileRead(int h,Arc::PayloadRawInterface::Size_t start,Arc::PayloadRawInterface::Size_t end) { struct stat st; if(fstat(h,&st) != 0) return NULL; if(st.st_size > PayloadBigFile::Threshold()) { PayloadBigFile* f = new PayloadBigFile(h,start,end); if(!*f) { delete f; return NULL; }; return f; } PayloadFile* f = new PayloadFile(h,start,end); if(!*f) { delete f; return NULL; }; return f; } Arc::MessagePayload* newFileRead(Arc::FileAccess* h,Arc::PayloadRawInterface::Size_t start,Arc::PayloadRawInterface::Size_t end) { PayloadFAFile* f = new PayloadFAFile(h,start,end); return f; } } // namespace ARex nordugrid-arc-6.14.0/src/services/PaxHeaders.30264/gridftpd0000644000000000000000000000013014152153473021500 xustar000000000000000029 mtime=1638455099.34856167 30 atime=1638455103.998631539 29 ctime=1638455099.34856167 nordugrid-arc-6.14.0/src/services/gridftpd/0000755000175000002070000000000014152153473021544 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376023615 xustar000000000000000030 mtime=1638455038.433646395 30 atime=1638455038.510647552 30 ctime=1638455099.206559536 nordugrid-arc-6.14.0/src/services/gridftpd/Makefile.am0000644000175000002070000000264314152153376023607 0ustar00mockbuildmock00000000000000sbin_PROGRAMS = gridftpd noinst_LTLIBRARIES = libgridftpd.la if SYSV_SCRIPTS_ENABLED GRIDFTPD_SCRIPT = arc-gridftpd else GRIDFTPD_SCRIPT = endif initd_SCRIPTS = $(GRIDFTPD_SCRIPT) if SYSTEMD_UNITS_ENABLED GRIDFTPD_UNIT = arc-gridftpd.service else GRIDFTPD_UNIT = endif units_DATA = $(GRIDFTPD_UNIT) pkgdata_SCRIPTS = arc-gridftpd-start libgridftpd_la_SOURCES = userspec.cpp names.cpp misc.cpp libgridftpd_la_CXXFLAGS = -I$(top_srcdir)/include \ $(GLOBUS_FTP_CLIENT_CFLAGS) $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(AM_CXXFLAGS) gridftpd_SOURCES = commands.cpp config.cpp fileroot.cpp listener.cpp \ dataread.cpp datawrite.cpp datalist.cpp fileroot_config.cpp \ commands.h conf.h fileroot.h misc.h names.h userspec.h gridftpd_CXXFLAGS = -I$(top_srcdir)/include \ $(GLOBUS_FTP_CLIENT_CFLAGS) $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(AM_CXXFLAGS) gridftpd_LDADD = libgridftpd.la conf/libconf.la run/librun.la \ misc/libmisc.la auth/libauth.la auth/libmap.la \ $(top_builddir)/src/hed/libs/globusutils/libarcglobusutils.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(GLOBUS_FTP_CONTROL_LIBS) $(GLOBUS_GSS_ASSIST_LIBS) \ $(GLOBUS_GSSAPI_GSI_LIBS) $(GLOBUS_IO_LIBS) \ $(GLOBUS_GSI_CERT_UTILS_LIBS) $(GLOBUS_GSI_CREDENTIAL_LIBS) \ $(GLOBUS_OPENSSL_MODULE_LIBS) $(GLOBUS_COMMON_LIBS) gridftpd_LDFLAGS = -rdynamic SUBDIRS = misc conf run auth . fileplugin DIST_SUBDIRS = misc conf run auth . fileplugin man_MANS = gridftpd.8 nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/dataread.cpp0000644000000000000000000000013214152153376024032 xustar000000000000000030 mtime=1638455038.435646425 30 atime=1638455038.511647567 30 ctime=1638455099.218559716 nordugrid-arc-6.14.0/src/services/gridftpd/dataread.cpp0000644000175000002070000001600514152153376024021 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include "fileroot.h" #include "names.h" #include "commands.h" static Arc::Logger logger(Arc::Logger::getRootLogger(),"GridFTP_Commands"); /* file retrieve callbacks */ void GridFTP_Commands::data_connect_retrieve_callback(void* arg,globus_ftp_control_handle_t*,unsigned int /* stripendx */,globus_bool_t /* reused */,globus_object_t *error) { GridFTP_Commands *it = (GridFTP_Commands*)arg; logger.msg(Arc::VERBOSE, "data_connect_retrieve_callback"); globus_thread_blocking_will_block(); globus_mutex_lock(&(it->data_lock)); it->time_spent_disc=0; it->time_spent_network=0; it->last_action_time=time(NULL); logger.msg(Arc::VERBOSE, "Data channel connected (retrieve)"); if(it->check_abort(error)) { it->froot.close(false); globus_mutex_unlock(&(it->data_lock)); return; }; it->data_eof = false; /* make buffers */ logger.msg(Arc::VERBOSE, "data_connect_retrieve_callback: allocate_data_buffer"); it->compute_data_buffer(); if(!(it->allocate_data_buffer())) { logger.msg(Arc::ERROR, "data_connect_retrieve_callback: allocate_data_buffer failed"); it->froot.close(false); it->force_abort(); globus_mutex_unlock(&(it->data_lock)); return; }; /* fill and register all available buffers */ it->data_callbacks=0; it->data_offset=0; for(unsigned int i = 0;idata_buffer_num;i++) { logger.msg(Arc::VERBOSE, "data_connect_retrieve_callback: check for buffer %u", i); if(!((it->data_buffer)[i].data)) continue; /* read data from file */ unsigned long long size = it->data_buffer_size; if(it->virt_restrict) { if((it->data_offset + size) > it->virt_size) size=it->virt_size-it->data_offset; }; struct timezone tz; gettimeofday(&(it->data_buffer[i].time_last),&tz); int fres=it->froot.read(it->data_buffer[i].data, (it->virt_offset)+(it->data_offset),&size); if(fres != 0) { logger.msg(Arc::ERROR, "Closing channel (retrieve) due to local read error: %s", it->froot.error); it->force_abort(); it->free_data_buffer();it->froot.close(false); globus_mutex_unlock(&(it->data_lock)); return; }; if(size == 0) it->data_eof=GLOBUS_TRUE; /* register buffer */ globus_result_t res; res=globus_ftp_control_data_write(&(it->handle), (globus_byte_t*)(it->data_buffer[i].data), size,it->data_offset,it->data_eof, &data_retrieve_callback,it); it->data_offset+=size; if(res != GLOBUS_SUCCESS) { logger.msg(Arc::ERROR, "Buffer registration failed"); logger.msg(Arc::ERROR, "Globus error: %s", Arc::GlobusResult(res).str()); it->force_abort(); if(it->data_callbacks==0){it->free_data_buffer();it->froot.close(false);}; globus_mutex_unlock(&(it->data_lock)); return; }; it->data_callbacks++; if(it->data_eof == GLOBUS_TRUE) break; }; globus_mutex_unlock(&(it->data_lock)); return; } void GridFTP_Commands::data_retrieve_callback(void* arg,globus_ftp_control_handle_t*,globus_object_t *error,globus_byte_t *buffer,globus_size_t length,globus_off_t offset,globus_bool_t eof) { logger.msg(Arc::VERBOSE, "data_retrieve_callback"); globus_thread_blocking_will_block(); GridFTP_Commands *it = (GridFTP_Commands*)arg; struct timezone tz; struct timeval tv; gettimeofday(&tv,&tz); globus_mutex_lock(&(it->data_lock)); it->last_action_time=time(NULL); logger.msg(Arc::VERBOSE, "Data channel (retrieve) %i %i %i", (int)offset, (int)length, (int)eof); it->data_callbacks--; if(it->check_abort(error)) { if(it->data_callbacks==0){it->free_data_buffer();it->froot.close(false);}; globus_mutex_unlock(&(it->data_lock)); return; }; if(it->data_eof) { if(it->data_callbacks==0) { logger.msg(Arc::VERBOSE, "Closing channel (retrieve)"); it->free_data_buffer(); it->virt_offset=0; it->virt_restrict=false; it->transfer_mode=false; it->froot.close(); logger.msg(Arc::VERBOSE, "Time spent waiting for network: %.3f ms", (float)(it->time_spent_network/1000.0)); logger.msg(Arc::VERBOSE, "Time spent waiting for disc: %.3f ms", (float)(it->time_spent_disc/1000.0)); it->send_response("226 Requested file transfer completed\r\n"); }; globus_mutex_unlock(&(it->data_lock)); return; }; /* find this buffer */ unsigned int i; for(i = 0;idata_buffer_num;i++) { if((it->data_buffer)[i].data == (unsigned char*)buffer) break; }; if(i >= it->data_buffer_num) { /* lost buffer - probably memory corruption */ logger.msg(Arc::ERROR, "data_retrieve_callback: lost buffer"); it->force_abort(); if(it->data_callbacks==0){it->free_data_buffer();it->froot.close(false);}; globus_mutex_unlock(&(it->data_lock)); return; }; unsigned long long int time_diff = (tv.tv_sec-(it->data_buffer[i].time_last.tv_sec))*1000000+ (tv.tv_usec-(it->data_buffer[i].time_last.tv_usec)); it->time_spent_network+=time_diff; /* read data from file */ unsigned long long size = it->data_buffer_size; if(it->virt_restrict) { if((it->data_offset + size) > it->virt_size) size=it->virt_size-it->data_offset; }; #ifdef __USE_PARALLEL_FILE_ACCESS__ it->data_callbacks++; /* Unlock while reading file, so to allow others to read in parallel. This can speed up read if on striped device/filesystem. */ globus_mutex_unlock(&(it->data_lock)); #endif /* NOTE: it->data_lock is not unlocked here because it->froot.write is not thread safe */ struct timeval tv_last; gettimeofday(&tv_last,&tz); int fres=it->froot.read(it->data_buffer[i].data, (it->virt_offset)+(it->data_offset),&size); #ifdef __USE_PARALLEL_FILE_ACCESS__ globus_mutex_lock(&(it->data_lock)); it->data_callbacks--; #endif gettimeofday(&tv,&tz); time_diff=(tv.tv_sec-tv_last.tv_sec)*1000000+(tv.tv_usec-tv_last.tv_usec); it->time_spent_disc+=time_diff; if((fres != 0) || (!it->transfer_mode) || (it->transfer_abort)) { logger.msg(Arc::ERROR, "Closing channel (retrieve) due to local read error: %s", it->froot.error); it->force_abort(); if(it->data_callbacks==0){it->free_data_buffer();it->froot.close(false);}; globus_mutex_unlock(&(it->data_lock)); return; }; if(size == 0) it->data_eof=true; /* register buffer */ globus_result_t res; res=globus_ftp_control_data_write(&(it->handle), (globus_byte_t*)(it->data_buffer[i].data), size,it->data_offset,it->data_eof, &data_retrieve_callback,it); it->data_offset+=size; if(res != GLOBUS_SUCCESS) { logger.msg(Arc::ERROR, "Buffer registration failed"); logger.msg(Arc::ERROR, "Globus error: %s", Arc::GlobusResult(res).str()); it->force_abort(); if(it->data_callbacks==0){it->free_data_buffer();it->froot.close(false);}; globus_mutex_unlock(&(it->data_lock)); return; }; it->data_callbacks++; globus_mutex_unlock(&(it->data_lock)); return; } nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/auth0000644000000000000000000000013214152153473022443 xustar000000000000000030 mtime=1638455099.347561655 30 atime=1638455103.999631554 30 ctime=1638455099.347561655 nordugrid-arc-6.14.0/src/services/gridftpd/auth/0000755000175000002070000000000014152153473022505 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376024556 xustar000000000000000030 mtime=1638455038.433646395 30 atime=1638455038.511647567 30 ctime=1638455099.323561294 nordugrid-arc-6.14.0/src/services/gridftpd/auth/Makefile.am0000644000175000002070000000220214152153376024537 0ustar00mockbuildmock00000000000000noinst_LTLIBRARIES = libauth.la libaccess.la libmap.la libauth_la_SOURCES = auth.h auth.cpp auth_subject.cpp auth_file.cpp \ auth_voms.cpp auth_plugin.cpp libauth_la_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(GLOBUS_IO_CFLAGS) \ $(LCMAPS_CFLAGS) $(AM_CXXFLAGS) libauth_la_LIBADD = ../misc/libmisc.la ../run/librun.la ../conf/libconf.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la libaccess_la_SOURCES = identity.cpp identity.h \ identity_dn.cpp identity_dn.h identity_voms.cpp identity_voms.h \ permission.cpp permission.h \ object_access.cpp object_access.h libaccess_la_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(GLOBUS_IO_CFLAGS) \ $(LCMAPS_CFLAGS) $(AM_CXXFLAGS) libaccess_la_LIBADD = \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(XML_LIBS) libmap_la_SOURCES = unixmap.h unixmap.cpp unixmap_lcmaps.cpp \ simplemap.h simplemap.cpp libmap_la_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(GLOBUS_IO_CFLAGS) \ $(LCMAPS_CFLAGS) $(AM_CXXFLAGS) libmap_la_LIBADD = libauth.la $(top_builddir)/src/hed/libs/common/libarccommon.la nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/simplemap.h0000644000000000000000000000013114152153376024661 xustar000000000000000029 mtime=1638455038.43464641 30 atime=1638455038.511647567 30 ctime=1638455099.346561639 nordugrid-arc-6.14.0/src/services/gridftpd/auth/simplemap.h0000644000175000002070000000064414152153376024653 0ustar00mockbuildmock00000000000000#include #define SELFUNMAP_TIME (10*24*60*60) class SimpleMap { private: std::string dir_; int pool_handle_; unsigned int selfunmap_time_; public: SimpleMap(const char* dir); ~SimpleMap(void); std::string map(const char* subject); bool unmap(const char* subject); operator bool(void) const { return (pool_handle_ != -1); }; bool operator!(void) const { return (pool_handle_ == -1); }; }; nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/identity_dn.cpp0000644000000000000000000000013114152153376025537 xustar000000000000000029 mtime=1638455038.43464641 30 atime=1638455038.511647567 30 ctime=1638455099.328561369 nordugrid-arc-6.14.0/src/services/gridftpd/auth/identity_dn.cpp0000644000175000002070000000132114152153376025522 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include "identity_dn.h" IdentityItemDN::IdentityItemDN(const char* dn):dn_(dn) { type_="dn"; } IdentityItemDN::~IdentityItemDN(void) { } Identity::Item* IdentityItemDN::duplicate(void) const { return new IdentityItemDN(dn_.c_str()); } const std::string& IdentityItemDN::name(unsigned int n) { if(n>0) return empty_; return type_; } const std::string& IdentityItemDN::value(unsigned int n) { if(n>0) return empty_; return dn_; } const std::string& IdentityItemDN::value(const char* name,unsigned int /* n */) { std::string name_s = name; if(name_s != "dn") return empty_; return dn_; } std::string IdentityItemDN::str(void) { return dn_; } nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153435024563 xustar000000000000000030 mtime=1638455069.361111094 30 atime=1638455090.886434522 30 ctime=1638455099.322561279 nordugrid-arc-6.14.0/src/services/gridftpd/auth/Makefile.in0000644000175000002070000012466114152153435024562 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/gridftpd/auth DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(top_srcdir)/depcomp README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = LTLIBRARIES = $(noinst_LTLIBRARIES) libaccess_la_DEPENDENCIES = \ $(top_builddir)/src/hed/libs/common/libarccommon.la am_libaccess_la_OBJECTS = libaccess_la-identity.lo \ libaccess_la-identity_dn.lo libaccess_la-identity_voms.lo \ libaccess_la-permission.lo libaccess_la-object_access.lo libaccess_la_OBJECTS = $(am_libaccess_la_OBJECTS) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = libaccess_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(libaccess_la_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ libauth_la_DEPENDENCIES = ../misc/libmisc.la ../run/librun.la \ ../conf/libconf.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la am_libauth_la_OBJECTS = libauth_la-auth.lo libauth_la-auth_subject.lo \ libauth_la-auth_file.lo libauth_la-auth_voms.lo \ libauth_la-auth_plugin.lo libauth_la_OBJECTS = $(am_libauth_la_OBJECTS) libauth_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(libauth_la_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ libmap_la_DEPENDENCIES = libauth.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la am_libmap_la_OBJECTS = libmap_la-unixmap.lo \ libmap_la-unixmap_lcmaps.lo libmap_la-simplemap.lo libmap_la_OBJECTS = $(am_libmap_la_OBJECTS) libmap_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(libmap_la_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(libaccess_la_SOURCES) $(libauth_la_SOURCES) \ $(libmap_la_SOURCES) DIST_SOURCES = $(libaccess_la_SOURCES) $(libauth_la_SOURCES) \ $(libmap_la_SOURCES) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ noinst_LTLIBRARIES = libauth.la libaccess.la libmap.la libauth_la_SOURCES = auth.h auth.cpp auth_subject.cpp auth_file.cpp \ auth_voms.cpp auth_plugin.cpp libauth_la_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(GLOBUS_IO_CFLAGS) \ $(LCMAPS_CFLAGS) $(AM_CXXFLAGS) libauth_la_LIBADD = ../misc/libmisc.la ../run/librun.la ../conf/libconf.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la libaccess_la_SOURCES = identity.cpp identity.h \ identity_dn.cpp identity_dn.h identity_voms.cpp identity_voms.h \ permission.cpp permission.h \ object_access.cpp object_access.h libaccess_la_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(GLOBUS_IO_CFLAGS) \ $(LCMAPS_CFLAGS) $(AM_CXXFLAGS) libaccess_la_LIBADD = \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(XML_LIBS) libmap_la_SOURCES = unixmap.h unixmap.cpp unixmap_lcmaps.cpp \ simplemap.h simplemap.cpp libmap_la_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(GLOBUS_IO_CFLAGS) \ $(LCMAPS_CFLAGS) $(AM_CXXFLAGS) libmap_la_LIBADD = libauth.la $(top_builddir)/src/hed/libs/common/libarccommon.la all: all-am .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/gridftpd/auth/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/gridftpd/auth/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): clean-noinstLTLIBRARIES: -test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES) @list='$(noinst_LTLIBRARIES)'; \ locs=`for p in $$list; do echo $$p; done | \ sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ sort -u`; \ test -z "$$locs" || { \ echo rm -f $${locs}; \ rm -f $${locs}; \ } libaccess.la: $(libaccess_la_OBJECTS) $(libaccess_la_DEPENDENCIES) $(EXTRA_libaccess_la_DEPENDENCIES) $(AM_V_CXXLD)$(libaccess_la_LINK) $(libaccess_la_OBJECTS) $(libaccess_la_LIBADD) $(LIBS) libauth.la: $(libauth_la_OBJECTS) $(libauth_la_DEPENDENCIES) $(EXTRA_libauth_la_DEPENDENCIES) $(AM_V_CXXLD)$(libauth_la_LINK) $(libauth_la_OBJECTS) $(libauth_la_LIBADD) $(LIBS) libmap.la: $(libmap_la_OBJECTS) $(libmap_la_DEPENDENCIES) $(EXTRA_libmap_la_DEPENDENCIES) $(AM_V_CXXLD)$(libmap_la_LINK) $(libmap_la_OBJECTS) $(libmap_la_LIBADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libaccess_la-identity.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libaccess_la-identity_dn.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libaccess_la-identity_voms.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libaccess_la-object_access.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libaccess_la-permission.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libauth_la-auth.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libauth_la-auth_file.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libauth_la-auth_plugin.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libauth_la-auth_subject.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libauth_la-auth_voms.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libmap_la-simplemap.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libmap_la-unixmap.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libmap_la-unixmap_lcmaps.Plo@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< libaccess_la-identity.lo: identity.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccess_la_CXXFLAGS) $(CXXFLAGS) -MT libaccess_la-identity.lo -MD -MP -MF $(DEPDIR)/libaccess_la-identity.Tpo -c -o libaccess_la-identity.lo `test -f 'identity.cpp' || echo '$(srcdir)/'`identity.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libaccess_la-identity.Tpo $(DEPDIR)/libaccess_la-identity.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='identity.cpp' object='libaccess_la-identity.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccess_la_CXXFLAGS) $(CXXFLAGS) -c -o libaccess_la-identity.lo `test -f 'identity.cpp' || echo '$(srcdir)/'`identity.cpp libaccess_la-identity_dn.lo: identity_dn.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccess_la_CXXFLAGS) $(CXXFLAGS) -MT libaccess_la-identity_dn.lo -MD -MP -MF $(DEPDIR)/libaccess_la-identity_dn.Tpo -c -o libaccess_la-identity_dn.lo `test -f 'identity_dn.cpp' || echo '$(srcdir)/'`identity_dn.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libaccess_la-identity_dn.Tpo $(DEPDIR)/libaccess_la-identity_dn.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='identity_dn.cpp' object='libaccess_la-identity_dn.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccess_la_CXXFLAGS) $(CXXFLAGS) -c -o libaccess_la-identity_dn.lo `test -f 'identity_dn.cpp' || echo '$(srcdir)/'`identity_dn.cpp libaccess_la-identity_voms.lo: identity_voms.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccess_la_CXXFLAGS) $(CXXFLAGS) -MT libaccess_la-identity_voms.lo -MD -MP -MF $(DEPDIR)/libaccess_la-identity_voms.Tpo -c -o libaccess_la-identity_voms.lo `test -f 'identity_voms.cpp' || echo '$(srcdir)/'`identity_voms.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libaccess_la-identity_voms.Tpo $(DEPDIR)/libaccess_la-identity_voms.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='identity_voms.cpp' object='libaccess_la-identity_voms.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccess_la_CXXFLAGS) $(CXXFLAGS) -c -o libaccess_la-identity_voms.lo `test -f 'identity_voms.cpp' || echo '$(srcdir)/'`identity_voms.cpp libaccess_la-permission.lo: permission.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccess_la_CXXFLAGS) $(CXXFLAGS) -MT libaccess_la-permission.lo -MD -MP -MF $(DEPDIR)/libaccess_la-permission.Tpo -c -o libaccess_la-permission.lo `test -f 'permission.cpp' || echo '$(srcdir)/'`permission.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libaccess_la-permission.Tpo $(DEPDIR)/libaccess_la-permission.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='permission.cpp' object='libaccess_la-permission.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccess_la_CXXFLAGS) $(CXXFLAGS) -c -o libaccess_la-permission.lo `test -f 'permission.cpp' || echo '$(srcdir)/'`permission.cpp libaccess_la-object_access.lo: object_access.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccess_la_CXXFLAGS) $(CXXFLAGS) -MT libaccess_la-object_access.lo -MD -MP -MF $(DEPDIR)/libaccess_la-object_access.Tpo -c -o libaccess_la-object_access.lo `test -f 'object_access.cpp' || echo '$(srcdir)/'`object_access.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libaccess_la-object_access.Tpo $(DEPDIR)/libaccess_la-object_access.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='object_access.cpp' object='libaccess_la-object_access.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libaccess_la_CXXFLAGS) $(CXXFLAGS) -c -o libaccess_la-object_access.lo `test -f 'object_access.cpp' || echo '$(srcdir)/'`object_access.cpp libauth_la-auth.lo: auth.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libauth_la_CXXFLAGS) $(CXXFLAGS) -MT libauth_la-auth.lo -MD -MP -MF $(DEPDIR)/libauth_la-auth.Tpo -c -o libauth_la-auth.lo `test -f 'auth.cpp' || echo '$(srcdir)/'`auth.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libauth_la-auth.Tpo $(DEPDIR)/libauth_la-auth.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='auth.cpp' object='libauth_la-auth.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libauth_la_CXXFLAGS) $(CXXFLAGS) -c -o libauth_la-auth.lo `test -f 'auth.cpp' || echo '$(srcdir)/'`auth.cpp libauth_la-auth_subject.lo: auth_subject.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libauth_la_CXXFLAGS) $(CXXFLAGS) -MT libauth_la-auth_subject.lo -MD -MP -MF $(DEPDIR)/libauth_la-auth_subject.Tpo -c -o libauth_la-auth_subject.lo `test -f 'auth_subject.cpp' || echo '$(srcdir)/'`auth_subject.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libauth_la-auth_subject.Tpo $(DEPDIR)/libauth_la-auth_subject.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='auth_subject.cpp' object='libauth_la-auth_subject.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libauth_la_CXXFLAGS) $(CXXFLAGS) -c -o libauth_la-auth_subject.lo `test -f 'auth_subject.cpp' || echo '$(srcdir)/'`auth_subject.cpp libauth_la-auth_file.lo: auth_file.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libauth_la_CXXFLAGS) $(CXXFLAGS) -MT libauth_la-auth_file.lo -MD -MP -MF $(DEPDIR)/libauth_la-auth_file.Tpo -c -o libauth_la-auth_file.lo `test -f 'auth_file.cpp' || echo '$(srcdir)/'`auth_file.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libauth_la-auth_file.Tpo $(DEPDIR)/libauth_la-auth_file.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='auth_file.cpp' object='libauth_la-auth_file.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libauth_la_CXXFLAGS) $(CXXFLAGS) -c -o libauth_la-auth_file.lo `test -f 'auth_file.cpp' || echo '$(srcdir)/'`auth_file.cpp libauth_la-auth_voms.lo: auth_voms.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libauth_la_CXXFLAGS) $(CXXFLAGS) -MT libauth_la-auth_voms.lo -MD -MP -MF $(DEPDIR)/libauth_la-auth_voms.Tpo -c -o libauth_la-auth_voms.lo `test -f 'auth_voms.cpp' || echo '$(srcdir)/'`auth_voms.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libauth_la-auth_voms.Tpo $(DEPDIR)/libauth_la-auth_voms.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='auth_voms.cpp' object='libauth_la-auth_voms.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libauth_la_CXXFLAGS) $(CXXFLAGS) -c -o libauth_la-auth_voms.lo `test -f 'auth_voms.cpp' || echo '$(srcdir)/'`auth_voms.cpp libauth_la-auth_plugin.lo: auth_plugin.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libauth_la_CXXFLAGS) $(CXXFLAGS) -MT libauth_la-auth_plugin.lo -MD -MP -MF $(DEPDIR)/libauth_la-auth_plugin.Tpo -c -o libauth_la-auth_plugin.lo `test -f 'auth_plugin.cpp' || echo '$(srcdir)/'`auth_plugin.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libauth_la-auth_plugin.Tpo $(DEPDIR)/libauth_la-auth_plugin.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='auth_plugin.cpp' object='libauth_la-auth_plugin.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libauth_la_CXXFLAGS) $(CXXFLAGS) -c -o libauth_la-auth_plugin.lo `test -f 'auth_plugin.cpp' || echo '$(srcdir)/'`auth_plugin.cpp libmap_la-unixmap.lo: unixmap.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libmap_la_CXXFLAGS) $(CXXFLAGS) -MT libmap_la-unixmap.lo -MD -MP -MF $(DEPDIR)/libmap_la-unixmap.Tpo -c -o libmap_la-unixmap.lo `test -f 'unixmap.cpp' || echo '$(srcdir)/'`unixmap.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libmap_la-unixmap.Tpo $(DEPDIR)/libmap_la-unixmap.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='unixmap.cpp' object='libmap_la-unixmap.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libmap_la_CXXFLAGS) $(CXXFLAGS) -c -o libmap_la-unixmap.lo `test -f 'unixmap.cpp' || echo '$(srcdir)/'`unixmap.cpp libmap_la-unixmap_lcmaps.lo: unixmap_lcmaps.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libmap_la_CXXFLAGS) $(CXXFLAGS) -MT libmap_la-unixmap_lcmaps.lo -MD -MP -MF $(DEPDIR)/libmap_la-unixmap_lcmaps.Tpo -c -o libmap_la-unixmap_lcmaps.lo `test -f 'unixmap_lcmaps.cpp' || echo '$(srcdir)/'`unixmap_lcmaps.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libmap_la-unixmap_lcmaps.Tpo $(DEPDIR)/libmap_la-unixmap_lcmaps.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='unixmap_lcmaps.cpp' object='libmap_la-unixmap_lcmaps.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libmap_la_CXXFLAGS) $(CXXFLAGS) -c -o libmap_la-unixmap_lcmaps.lo `test -f 'unixmap_lcmaps.cpp' || echo '$(srcdir)/'`unixmap_lcmaps.cpp libmap_la-simplemap.lo: simplemap.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libmap_la_CXXFLAGS) $(CXXFLAGS) -MT libmap_la-simplemap.lo -MD -MP -MF $(DEPDIR)/libmap_la-simplemap.Tpo -c -o libmap_la-simplemap.lo `test -f 'simplemap.cpp' || echo '$(srcdir)/'`simplemap.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libmap_la-simplemap.Tpo $(DEPDIR)/libmap_la-simplemap.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='simplemap.cpp' object='libmap_la-simplemap.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libmap_la_CXXFLAGS) $(CXXFLAGS) -c -o libmap_la-simplemap.lo `test -f 'simplemap.cpp' || echo '$(srcdir)/'`simplemap.cpp mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(LTLIBRARIES) installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool clean-noinstLTLIBRARIES \ mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \ clean-libtool clean-noinstLTLIBRARIES cscopelist-am ctags \ ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags tags-am uninstall uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/object_access.cpp0000644000000000000000000000013114152153376026014 xustar000000000000000029 mtime=1638455038.43464641 30 atime=1638455038.511647567 30 ctime=1638455099.334561459 nordugrid-arc-6.14.0/src/services/gridftpd/auth/object_access.cpp0000644000175000002070000000347114152153376026007 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include "object_access.h" ObjectAccess::Item ObjectAccess::empty_(NULL,NULL); ObjectAccess::ObjectAccess(void) { } ObjectAccess::ObjectAccess(const ObjectAccess& o) { for(std::list::const_iterator i = o.items_.begin();i!=o.items_.end();++i) { Item* item = (Item*)(&(*i)); Identity* id = item->id(); Permission* perm = item->permission(); if(id && perm) { id = id->duplicate(); perm = perm->duplicate(); if(id && perm) { items_.insert(items_.end(),Item(id,perm)); } else { if(id) delete id; if(perm) delete perm; }; }; }; } ObjectAccess::~ObjectAccess(void) { for(std::list::iterator i = items_.begin();i!=items_.end();++i) { if(i->id()) delete i->id(); if(i->permission()) delete i->permission(); }; } ObjectAccess::Item* ObjectAccess::use(Identity* id,Permission* perm) { if(!id) return NULL; if(!perm) return NULL; return &(*items_.insert(items_.end(),Item(id,perm))); } ObjectAccess::Item* ObjectAccess::add(Identity* id,Permission* perm) { if(!id) return NULL; if(!perm) return NULL; Identity* id_ = id->duplicate(); Permission* perm_ = perm->duplicate(); return use(id_,perm_); } ObjectAccess::Item* ObjectAccess::operator[](unsigned int n) { if(n >= items_.size()) return NULL; std::list::iterator i = items_.begin(); for(;n && (i!=items_.end());--n,++i){}; if(i == items_.end()) return NULL; return &(*i); } ObjectAccess::Item* ObjectAccess::find(Identity* id) { if(id == NULL) return NULL; std::list::iterator i = items_.begin(); for(;i!=items_.end();++i) { Identity* id_ = i->id(); if(id_ == NULL) continue; if((*id_) == (*id)) return &(*i); }; return NULL; } int ObjectAccess::size(void) { return items_.size(); } nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/identity_voms.h0000644000000000000000000000013114152153376025567 xustar000000000000000029 mtime=1638455038.43464641 30 atime=1638455038.511647567 30 ctime=1638455099.331561414 nordugrid-arc-6.14.0/src/services/gridftpd/auth/identity_voms.h0000644000175000002070000000140714152153376025557 0ustar00mockbuildmock00000000000000#include #include "identity.h" class IdentityItemVOMS: public Identity::Item { std::string vo_; std::string voms_; std::string group_; std::string role_; std::string cap_; static std::string vo_name_; static std::string voms_name_; static std::string group_name_; static std::string role_name_; static std::string cap_name_; public: IdentityItemVOMS(const char* vo,const char* voms,const char* group,const char* role,const char* cap); IdentityItemVOMS(const IdentityItemVOMS& v); virtual ~IdentityItemVOMS(void); virtual Identity::Item* duplicate(void) const; virtual const std::string& name(unsigned int n); virtual const std::string& value(unsigned int n); virtual const std::string& value(const char* name,unsigned int n); }; nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/auth.h0000644000000000000000000000013214152153376023634 xustar000000000000000030 mtime=1638455038.433646395 30 atime=1638455038.511647567 30 ctime=1638455099.336561489 nordugrid-arc-6.14.0/src/services/gridftpd/auth/auth.h0000644000175000002070000001576514152153376023637 0ustar00mockbuildmock00000000000000#ifndef __GM_AUTH_H__ #define __GM_AUTH_H__ #include #include #include #include #include enum AuthResult { AAA_POSITIVE_MATCH = 1, AAA_NEGATIVE_MATCH = -1, AAA_NO_MATCH = 0, AAA_FAILURE = 2 }; class AuthVO; /** VOMS FQAN split into elements */ struct voms_fqan_t { std::string group; // including root group which is always same as VO std::string role; // role associated to group - for each role there is one voms_fqan_t std::string capability; // deprecated but must keep itt void str(std::string& str) const; // convert to string (minimal variation) }; /** VOMS data */ struct voms_t { std::string server; /*!< The VOMS server hostname */ std::string voname; /*!< The name of the VO to which the VOMS belongs */ std::vector fqans; /*!< Processed FQANs of user */ }; class AuthUser { private: typedef AuthResult (AuthUser:: * match_func_t)(const char* line); typedef struct { const char* cmd; match_func_t func; } source_t; class group_t { public: std::string name; // const char* vo; // VO name matched when authorizing this group struct voms_t voms; // VOMS attributes matched when authorizing this group group_t(const char* name_, const char* vo_, const struct voms_t& voms_): name(name_?name_:""),vo(vo_?vo_:""),voms(voms_) { }; }; // VOMS attributes which matched last athorization rule. Also affected by matching group. struct voms_t default_voms_; // Last matched VO name from those defined in [vo]. const char* default_vo_; // Last matched group including groupcfg processing. const char* default_group_; std::string subject; // SN of certificate std::string from; // Remote hostname std::string filename; // Delegated proxy stored in this file bool proxy_file_was_created; // If proxy file was created by this object bool has_delegation; // If proxy contains delegation static source_t sources[]; // Supported evaluation sources AuthResult match_all(const char* line); AuthResult match_group(const char* line); AuthResult match_subject(const char* line); AuthResult match_file(const char* line); AuthResult match_ldap(const char* line); AuthResult match_voms(const char* line); AuthResult match_vo(const char* line); AuthResult match_lcas(const char *); AuthResult match_plugin(const char* line); AuthResult match_authtokens(const char* line); AuthResult process_voms(void); std::vector voms_data; // VOMS information extracted from proxy bool voms_extracted; std::list groups; // Groups which user matched (internal names) std::list vos; // VOs to which user belongs (external names) bool valid; const group_t* find_group(const char* grp) const { if(grp == NULL) return NULL; for(std::list::const_iterator i=groups.begin();i!=groups.end();++i) { if(i->name == grp) return &(*i); }; return NULL; }; const group_t* find_group(const std::string& grp) const { return find_group(grp.c_str());}; public: AuthUser(const AuthUser&); // Constructor // subject - subject/DN of user // filename - file with (delegated) credentials AuthUser(const char* subject = NULL,const char* filename = NULL); ~AuthUser(void); AuthUser& operator=(const AuthUser&); bool operator!(void) { return !valid; }; operator bool(void) { return valid; }; void set(const char* subject,const char* hostname = NULL); void set(const char* subject,gss_ctx_id_t ctx,gss_cred_id_t cred,const char* hostname = NULL); void set(const char* s,STACK_OF(X509)* cred,const char* hostname = NULL); // Evaluate authentication rules AuthResult evaluate(const char* line); const char* DN(void) const { return subject.c_str(); }; const char* proxy(void) const { return filename.c_str(); }; bool is_proxy(void) const { return has_delegation; }; const char* hostname(void) const { return from.c_str(); }; // Remember this user belongs to group 'grp' void add_group(const char* grp) { groups.push_back(group_t(grp,default_vo_,default_voms_)); }; void add_group(const std::string& grp) { add_group(grp.c_str()); }; // Mark this user as belonging to no groups void clear_groups(void) { groups.clear(); default_group_=NULL; }; // Returns true if user belongs to specified group 'grp' bool check_group(const char* grp) const; bool check_group(const std::string& grp) const { return check_group(grp.c_str());}; bool select_group(const char* grp); bool select_group(const std::string& grp) { return select_group(grp.c_str());}; void add_vo(const char* vo) { vos.push_back(std::string(vo)); }; void add_vo(const std::string& vo) { vos.push_back(vo); }; bool add_vo(const char* vo,const char* filename); bool add_vo(const std::string& vo,const std::string& filename); bool add_vo(const AuthVO& vo); bool add_vo(const std::list& vos); void clear_vos(void) { vos.clear(); }; bool check_vo(const char* vo) const { for(std::list::const_iterator i=vos.begin();i!=vos.end();++i) { if(strcmp(i->c_str(),vo) == 0) return true; }; return false; }; bool check_vo(const std::string& vo) const { return check_vo(vo.c_str());}; const struct voms_t& default_voms(void) const { return default_voms_; }; const char* default_vo(void) const { return default_vo_; }; const char* default_group(void) const { return default_group_; }; const struct voms_t* default_group_voms(void) const { const group_t* group = find_group(default_group_); return (group == NULL)?NULL:&(group->voms); }; const char* default_group_vo(void) const { const group_t* group = find_group(default_group_); return (group == NULL)?NULL:group->vo; }; const char* default_subject(void) const { return subject.c_str(); }; // Returns all VOMS attributes associated with user const std::vector& voms(void) const; // Returns all internal (locally configured) VOs associated with user const std::list& VOs(void) const; // convert ARC VOMS attribute list into voms structure static struct voms_t arc_to_voms(const std::string& vo,const std::vector& attributes); static std::string err_to_string(int err); }; class AuthEvaluator { private: std::list l; std::string name; public: AuthEvaluator(void); AuthEvaluator(const char* name); ~AuthEvaluator(void); void add(const char*); AuthResult evaluate(AuthUser &) const; bool operator==(const char* s) { return (strcmp(name.c_str(),s)==0); }; bool operator==(const std::string& s) const { return (name == s); }; const char* get_name() const { return name.c_str(); }; }; void AuthUserSubst(std::string& str,AuthUser& it); class AuthVO { friend class AuthUser; private: std::string name; std::string file; public: AuthVO(const char* vo,const char* filename):name(vo),file(filename) { }; AuthVO(const std::string& vo,const std::string& filename):name(vo.c_str()),file(filename.c_str()) { }; ~AuthVO(void) { }; }; #endif nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/unixmap.h0000644000000000000000000000013114152153376024353 xustar000000000000000029 mtime=1638455038.43464641 30 atime=1638455038.511647567 30 ctime=1638455099.343561594 nordugrid-arc-6.14.0/src/services/gridftpd/auth/unixmap.h0000644000175000002070000000405014152153376024340 0ustar00mockbuildmock00000000000000#ifndef __GM_UNIXMAP_H__ #define __GM_UNIXMAP_H__ #include #include "auth.h" class UnixMap { private: class unix_user_t { public: std::string name; std::string group; unix_user_t(void) { }; }; typedef AuthResult (UnixMap:: * map_func_t)(const AuthUser& user,unix_user_t& unix_user,const char* line); typedef struct { const char* cmd; map_func_t map; } source_t; typedef enum { MAPPING_CONTINUE, MAPPING_STOP } map_action_t; typedef struct { map_action_t nogroup; map_action_t nomap; map_action_t map; } map_events_t; static source_t sources[]; // Supported evaluation sources // Unix user obtained after mapping unix_user_t unix_user_; // Associated user AuthUser& user_; // Identity of mapping request. std::string map_id_; // Mapping processing policy map_events_t map_policy_; // Mapping was done bool mapped_; AuthResult map_mapfile(const AuthUser& user,unix_user_t& unix_user,const char* line); AuthResult map_simplepool(const AuthUser& user,unix_user_t& unix_user,const char* line); AuthResult map_unixuser(const AuthUser& user,unix_user_t& unix_user,const char* line); AuthResult map_lcmaps(const AuthUser& user,unix_user_t& unix_user,const char* line); AuthResult map_mapplugin(const AuthUser& user,unix_user_t& unix_user,const char* line); public: // Constructor - links to grid user UnixMap(AuthUser& user,const std::string& id = ""); ~UnixMap(void); // Properties const char* id(void) const { return map_id_.c_str(); }; operator bool(void) const { return mapped_; }; bool operator!(void) const { return !mapped_; }; const char* unix_name(void) const { return unix_user_.name.c_str(); }; const char* unix_group(void) const { return unix_user_.group.c_str(); }; AuthUser& user(void) { return user_; }; // Map AuthResult mapgroup(const char* rule, const char* line); AuthResult setunixuser(const char* name, const char* group); // Stack processing policy bool set_map_policy(const char* rule, const char* line); }; #endif // __GM_UNIXMAP_H__ nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/auth.cpp0000644000000000000000000000013214152153376024167 xustar000000000000000030 mtime=1638455038.433646395 30 atime=1638455038.511647567 30 ctime=1638455099.337561504 nordugrid-arc-6.14.0/src/services/gridftpd/auth/auth.cpp0000644000175000002070000002733314152153376024164 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include "../misc/proxy.h" #include "auth.h" static Arc::Logger logger(Arc::Logger::getRootLogger(),"AuthUser"); void voms_fqan_t::str(std::string& str) const { str = group; if(!role.empty()) str += "/Role="+role; if(!capability.empty()) str += "/Capability="+capability; } AuthResult AuthUser::match_all(const char* line) { std::string token = Arc::trim(line); if(token == "yes") { default_voms_=voms_t(); default_vo_=NULL; default_group_=NULL; return AAA_POSITIVE_MATCH; } if(token == "no") { return AAA_NO_MATCH; } logger.msg(Arc::ERROR,"Unexpected argument for 'all' rule - %s",token); return AAA_FAILURE; } AuthResult AuthUser::match_authtokens(const char* line) { // No tokens can be derived in this environment. Hence always no match. return AAA_NO_MATCH; } AuthResult AuthUser::match_group(const char* line) { for(;;) { std::string s(""); int n = Arc::ConfigIni::NextArg(line,s); if(n == 0) break; line+=n; for(std::list::iterator i = groups.begin();i!=groups.end();++i) { if(s == i->name) { default_voms_=i->voms; default_vo_=i->vo; default_group_=i->name.c_str(); return AAA_POSITIVE_MATCH; }; }; }; return AAA_NO_MATCH; } AuthResult AuthUser::match_vo(const char* line) { for(;;) { std::string s(""); int n = Arc::ConfigIni::NextArg(line,s); if(n == 0) break; line+=n; for(std::list::iterator i = vos.begin();i!=vos.end();++i) { if(s == *i) { default_voms_=voms_t(); default_vo_=i->c_str(); default_group_=NULL; return AAA_POSITIVE_MATCH; }; }; }; return AAA_NO_MATCH; } AuthUser::source_t AuthUser::sources[] = { { "all", &AuthUser::match_all }, { "authgroup", &AuthUser::match_group }, { "subject", &AuthUser::match_subject }, { "file", &AuthUser::match_file }, { "voms", &AuthUser::match_voms }, { "userlist", &AuthUser::match_vo }, { "plugin", &AuthUser::match_plugin }, { "authtokens", &AuthUser::match_authtokens }, { NULL, NULL } }; AuthUser::AuthUser(const char* s,const char* f):subject(""),filename("") { valid = true; if(s) { Arc::ConfigIni::NextArg(s,subject,'\0','\0'); }; struct stat fileStat; if(f && stat(f, &fileStat) == 0) filename=f; proxy_file_was_created=false; voms_extracted=false; has_delegation=false; // ???? default_voms_=voms_t(); default_vo_=NULL; default_group_=NULL; if(process_voms() == AAA_FAILURE) valid=false; } AuthUser::AuthUser(const AuthUser& a) { valid=a.valid; subject=a.subject; filename=a.filename; has_delegation=a.has_delegation; proxy_file_was_created=false; voms_extracted=false; default_voms_=voms_t(); default_vo_=NULL; default_group_=NULL; if(process_voms() == AAA_FAILURE) valid=false; } AuthUser& AuthUser::operator=(const AuthUser& a) { valid=a.valid; subject=a.subject; filename=a.filename; has_delegation=a.has_delegation; voms_data.clear(); voms_extracted=false; proxy_file_was_created=false; default_voms_=voms_t(); default_vo_=NULL; default_group_=NULL; if(process_voms() == AAA_FAILURE) valid=false; return *this; } void AuthUser::set(const char* s,gss_ctx_id_t ctx,gss_cred_id_t cred,const char* hostname) { valid=true; if(hostname) from=hostname; voms_data.clear(); voms_extracted=false; proxy_file_was_created=false; filename=""; has_delegation=false; filename=""; subject=""; char* p = gridftpd::write_proxy(cred); if(p) { filename=p; free(p); has_delegation=true; proxy_file_was_created=true; } else { p=gridftpd::write_cert_chain(ctx); if(p) { filename=p; free(p); proxy_file_was_created=true; }; }; if(s == NULL) { // Obtain subject from credentials or context if(filename.length()) { globus_gsi_cred_handle_t h; if(globus_gsi_cred_handle_init(&h,GLOBUS_NULL) == GLOBUS_SUCCESS) { if(globus_gsi_cred_read_proxy(h,(char*)(filename.c_str())) == GLOBUS_SUCCESS) { char* sname = NULL; if(globus_gsi_cred_get_subject_name(h,&sname) == GLOBUS_SUCCESS) { Arc::ConfigIni::NextArg(sname,subject,'\0','\0'); free(sname); }; }; globus_gsi_cred_handle_destroy(h); }; }; } else { subject=s; }; if(process_voms() == AAA_FAILURE) valid=false; } void AuthUser::set(const char* s,STACK_OF(X509)* cred,const char* hostname) { valid=true; if(hostname) from=hostname; voms_data.clear(); voms_extracted=false; proxy_file_was_created=false; filename=""; has_delegation=false; int chain_size = 0; if(cred) chain_size=sk_X509_num(cred); if((s == NULL) && (chain_size <= 0)) return; if(s == NULL) { X509* cert=sk_X509_value(cred,0); if(cert) { X509_NAME *name = X509_get_subject_name(cert); if(name) { if(globus_gsi_cert_utils_get_base_name(name,cred) == GLOBUS_SUCCESS) { char* buf = X509_NAME_oneline(X509_get_subject_name(cert),NULL,0); if(buf) { subject=buf; OPENSSL_free(buf); }; }; }; }; if(subject.length() == 0) return; } else { subject=s; }; if(chain_size > 0) { std::string tempname = Glib::build_filename(Glib::get_tmp_dir(), "x509.XXXXXX"); if(!Arc::TmpFileCreate(tempname, "")) return; filename = tempname; BIO* bio; if((bio=BIO_new_file(filename.c_str(), "w")) == NULL) return; for(int chain_index = 0;chain_index& attributes) { struct voms_t voms_item; voms_item.voname = vo; // Collect fqans with parsed groups, roles and capabilties. for(std::vector::const_iterator v = attributes.begin(); v != attributes.end(); ++v) { std::list elements; Arc::tokenize(*v, elements, "/"); // /rootgroup(=VO)/mygroup/mysubgroup/Role=myrole std::list::iterator i = elements.begin(); // Check root group agains VO and skip wrong ones if (i == elements.end()) continue; // too short if (*i != voms_item.voname) { // Check if that is VO to hostname association (special ARC FQAN) if(*i == (std::string("voname=")+voms_item.voname)) { ++i; if (*i != voms_item.voname) { std::vector keyvalue; Arc::tokenize(*i, keyvalue, "="); if (keyvalue.size() == 2) { if (keyvalue[0] == "hostname") { voms_item.server = keyvalue[1]; }; }; }; }; continue; // ignore attribute with wrong root group }; voms_fqan_t fqan; fqan.group = "/"+(*i); ++i; for (; i != elements.end(); ++i) { std::vector keyvalue; Arc::tokenize(*i, keyvalue, "="); if (keyvalue.size() == 1) { // part of group fqan.group += "/"+(*i); } else if (keyvalue.size() == 2) { if (keyvalue[0] == "Role") { fqan.role = keyvalue[1]; } else if (keyvalue[0] == "Capability") { fqan.capability = keyvalue[1]; } } } voms_item.fqans.push_back(fqan); } return voms_item; } AuthUser::~AuthUser(void) { if(proxy_file_was_created && filename.length()) unlink(filename.c_str()); } AuthResult AuthUser::evaluate(const char* line) { if(!valid) return AAA_FAILURE; bool invert = false; bool no_match = false; const char* command = "subject"; size_t command_len = 7; if(subject.length()==0) return AAA_NO_MATCH; // ?? if(!line) return AAA_NO_MATCH; for(;*line;line++) if(!isspace(*line)) break; if(*line == 0) return AAA_NO_MATCH; if(*line == '#') return AAA_NO_MATCH; if(*line == '-') { line++; invert=true; } else if(*line == '+') { line++; }; if(*line == '!') { no_match=true; line++; }; if((*line != '/') && (*line != '"')) { command=line; for(;*line;line++) if(isspace(*line)) break; command_len=line-command; for(;*line;line++) if(!isspace(*line)) break; }; for(source_t* s = sources;s->cmd;s++) { if((strncmp(s->cmd,command,command_len) == 0) && (strlen(s->cmd) == command_len)) { AuthResult res=(this->*(s->func))(line); if(res == AAA_FAILURE) return res; if(no_match) { if(res==AAA_NO_MATCH) { res=AAA_POSITIVE_MATCH; } else { res=AAA_NO_MATCH; }; }; if(invert) { switch(res) { case AAA_POSITIVE_MATCH: res = AAA_NEGATIVE_MATCH; break; case AAA_NEGATIVE_MATCH: res = AAA_POSITIVE_MATCH; break; case AAA_NO_MATCH: case AAA_FAILURE: default: break; }; }; return res; }; }; logger.msg(Arc::ERROR, "Unknown authorization command %s", command); return AAA_FAILURE; } const std::vector& AuthUser::voms(void) const { if(!voms_extracted) { const char* line = "* * * *"; // Little hack to avoid mess with const in legacy code const_cast(this)->match_voms(line); }; return voms_data; } const std::list& AuthUser::VOs(void) const { return vos; } bool AuthUser::add_vo(const char* vo,const char* filename) { if((!filename) || (!filename[0])) { logger.msg(Arc::WARNING,"The [vo] section labeled '%s' has no file associated and can't be used for matching", vo); return false; } if(match_file(filename) == AAA_POSITIVE_MATCH) { add_vo(vo); return true; }; return false; } bool AuthUser::add_vo(const std::string& vo,const std::string& filename) { return add_vo(vo.c_str(),filename.c_str()); } bool AuthUser::add_vo(const AuthVO& vo) { return add_vo(vo.name,vo.file); } bool AuthUser::add_vo(const std::list& vos) { bool r = true; for(std::list::const_iterator vo = vos.begin();vo!=vos.end();++vo) { r&=add_vo(*vo); }; return r; } std::string AuthUser::err_to_string(int err) { if(err == AAA_POSITIVE_MATCH) return "positive"; if(err == AAA_NEGATIVE_MATCH) return "negative"; if(err == AAA_NO_MATCH) return "no match"; if(err == AAA_FAILURE) return "failure"; return ""; } bool AuthUser::check_group(const char* grp) const { if(grp == NULL) return false; for(std::list::const_iterator i=groups.begin();i!=groups.end();++i) { if(i->name == grp) return true; }; return false; }; bool AuthUser::select_group(const char* grp) { default_group_ = NULL; if(grp == NULL) return false; for(std::list::const_iterator i=groups.begin();i!=groups.end();++i) { if(i->name == grp) { default_group_ = i->name.c_str(); return true; }; }; return false; } AuthEvaluator::AuthEvaluator(void):name("") { } AuthEvaluator::AuthEvaluator(const char* s):name(s) { } AuthEvaluator::~AuthEvaluator(void) { } void AuthEvaluator::add(const char* line) { l.push_back(line); } AuthResult AuthEvaluator::evaluate(AuthUser &u) const { for(std::list::const_iterator i = l.begin();i!=l.end();++i) { AuthResult r = u.evaluate(i->c_str()); if(r != AAA_NO_MATCH) return r; }; return AAA_NO_MATCH; } nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/auth_subject.cpp0000644000000000000000000000013214152153376025706 xustar000000000000000030 mtime=1638455038.433646395 30 atime=1638455038.511647567 30 ctime=1638455099.338561519 nordugrid-arc-6.14.0/src/services/gridftpd/auth/auth_subject.cpp0000644000175000002070000000045714152153376025701 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include "auth.h" AuthResult AuthUser::match_subject(const char* line) { std::string s(line); if(strcmp(subject.c_str(),s.c_str()) == 0) { return AAA_POSITIVE_MATCH; }; return AAA_NO_MATCH; } nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/auth_voms.cpp0000644000000000000000000000013214152153376025233 xustar000000000000000030 mtime=1638455038.433646395 30 atime=1638455038.511647567 30 ctime=1638455099.340561549 nordugrid-arc-6.14.0/src/services/gridftpd/auth/auth_voms.cpp0000644000175000002070000001361714152153376025230 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include "auth.h" static Arc::Logger logger(Arc::Logger::getRootLogger(),"AuthUserVOMS"); static AuthResult process_vomsproxy(const char* filename,std::vector &data,bool auto_cert = false); AuthResult AuthUser::process_voms(void) { if(!voms_extracted) { if(filename.length() > 0) { AuthResult err = process_vomsproxy(filename.c_str(),voms_data); voms_extracted=true; logger.msg(Arc::DEBUG, "VOMS proxy processing returns: %i - %s", err, err_to_string(err)); if(err != AAA_POSITIVE_MATCH) return err; }; }; return AAA_POSITIVE_MATCH; } AuthResult AuthUser::match_voms(const char* line) { // parse line std::string vo(""); std::string group(""); std::string role(""); std::string capabilities(""); std::string auto_c(""); int n; n=Arc::ConfigIni::NextArg(line,vo,' ','"'); if(n == 0) { logger.msg(Arc::ERROR, "Missing VO in configuration"); return AAA_FAILURE; }; line+=n; n=Arc::ConfigIni::NextArg(line,group,' ','"'); if(n == 0) { logger.msg(Arc::ERROR, "Missing group in configuration"); return AAA_FAILURE; }; line+=n; n=Arc::ConfigIni::NextArg(line,role,' ','"'); if(n == 0) { logger.msg(Arc::ERROR, "Missing role in configuration"); return AAA_FAILURE; }; line+=n; n=Arc::ConfigIni::NextArg(line,capabilities,' ','"'); if(n == 0) { logger.msg(Arc::ERROR, "Missing capabilities in configuration"); return AAA_FAILURE; }; n=Arc::ConfigIni::NextArg(line,auto_c,' ','"'); logger.msg(Arc::VERBOSE, "Rule: vo: %s", vo); logger.msg(Arc::VERBOSE, "Rule: group: %s", group); logger.msg(Arc::VERBOSE, "Rule: role: %s", role); logger.msg(Arc::VERBOSE, "Rule: capabilities: %s", capabilities); // extract info from voms proxy // if(voms_data->size() == 0) { if(process_voms() != AAA_POSITIVE_MATCH) return AAA_FAILURE; if(voms_data.empty()) return AAA_NO_MATCH; // analyse permissions for(std::vector::iterator v = voms_data.begin();v!=voms_data.end();++v) { logger.msg(Arc::DEBUG, "Match vo: %s", v->voname); if((vo == "*") || (vo == v->voname)) { bool matched = false; for(std::vector::iterator f = v->fqans.begin(); f != v->fqans.end(); ++f) { if(((group == "*") || (group == f->group)) && ((role == "*") || (role == f->role)) && ((capabilities == "*") || (capabilities == f->capability))) { if(!matched) { default_voms_ = voms_t(); default_voms_.voname = v->voname; default_voms_.server = v->server; matched = true; }; default_voms_.fqans.push_back(*f); }; }; if(matched) { return AAA_POSITIVE_MATCH; }; }; }; logger.msg(Arc::VERBOSE, "Matched nothing"); return AAA_NO_MATCH; } static AuthResult process_vomsproxy(const char* filename,std::vector &data,bool /* auto_cert */) { std::string voms_dir = "/etc/grid-security/vomsdir"; std::string cert_dir = "/etc/grid-security/certificates"; { std::string v; if(!(v = Arc::GetEnv("X509_VOMS_DIR")).empty()) voms_dir = v; if(!(v = Arc::GetEnv("X509_CERT_DIR")).empty()) cert_dir = v; }; std::string voms_processing = Arc::GetEnv("VOMS_PROCESSING"); Arc::Credential c(filename, filename, cert_dir, ""); std::vector output; std::string emptystring = ""; /* Arc::VOMSTrustList emptylist; emptylist.AddRegex(".*"); */ std::string voms_trust_chains = Arc::GetEnv("VOMS_TRUST_CHAINS"); logger.msg(Arc::VERBOSE, "VOMS trust chains: %s", voms_trust_chains); std::vector vomstrustlist; std::vector vomstrustchains; Arc::tokenize(voms_trust_chains, vomstrustchains, "\n"); for(size_t i=0; i vomstrust_dns; std::string trust_chain = vomstrustchains[i]; std::string::size_type p1, p2=0; while(1) { p1 = trust_chain.find("\"", p2); if(p1!=std::string::npos) { p2 = trust_chain.find("\"", p1+1); if(p2!=std::string::npos) { std::string str = trust_chain.substr(p1+1, p2-p1-1); vomstrust_dns.push_back(str); p2++; if(trust_chain[p2] == '\n') break; } } if((p1==std::string::npos) || (p2==std::string::npos)) break; } if(!vomstrust_dns.empty()) { if(vomstrustlist.empty()) vomstrustlist.insert(vomstrustlist.begin(), vomstrust_dns.begin(), vomstrust_dns.end()); else { vomstrustlist.push_back("----NEXT CHAIN---"); vomstrustlist.insert(vomstrustlist.end(), vomstrust_dns.begin(), vomstrust_dns.end()); } } } Arc::VOMSTrustList voms_trust_list(vomstrustlist); parseVOMSAC(c, cert_dir, emptystring, voms_dir, voms_trust_list, output, true, true); for(size_t n=0;n #endif #include "identity_voms.h" std::string IdentityItemVOMS::vo_name_("vo"); std::string IdentityItemVOMS::voms_name_("voms"); std::string IdentityItemVOMS::group_name_("group"); std::string IdentityItemVOMS::role_name_("role"); std::string IdentityItemVOMS::cap_name_("capability"); IdentityItemVOMS::IdentityItemVOMS(const IdentityItemVOMS& v) { vo_=v.vo_; voms_=v.voms_; group_=v.group_; role_=v.role_; cap_=v.cap_; } IdentityItemVOMS::~IdentityItemVOMS(void) { } Identity::Item* IdentityItemVOMS::duplicate(void) const { return new IdentityItemVOMS(*this); } const std::string& IdentityItemVOMS::name(unsigned int n) { switch(n) { case 0: return vo_name_; case 1: return voms_name_; case 2: return group_name_; case 3: return role_name_; case 4: return cap_name_; }; return empty_; } const std::string& IdentityItemVOMS::value(unsigned int n) { switch(n) { case 0: return vo_; case 1: return voms_; case 2: return group_; case 3: return role_; case 4: return cap_; }; return empty_; } const std::string& IdentityItemVOMS::value(const char* name,unsigned int /* n */) { if(vo_name_ == name) return vo_; if(voms_name_ == name) return voms_; if(group_name_ == name) return group_; if(role_name_ == name) return role_; if(cap_name_ == name) return cap_; return empty_; } IdentityItemVOMS::IdentityItemVOMS(const char* vo,const char* voms,const char* group,const char* role,const char* cap) { vo_=vo; voms_=voms; group_=group; role_=role; cap_=cap; } nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/permission.h0000644000000000000000000000013114152153376025062 xustar000000000000000029 mtime=1638455038.43464641 30 atime=1638455038.511647567 30 ctime=1638455099.333561444 nordugrid-arc-6.14.0/src/services/gridftpd/auth/permission.h0000644000175000002070000000136714152153376025057 0ustar00mockbuildmock00000000000000#ifndef __ARC_PERMISSION_H__ #define __ARC_PERMISSION_H__ class Permission { public: typedef enum { object = 0, metadata = 1, permissions = 2 } Object; typedef enum { create = 0, read = 1, write = 2, extend = 3, reduce = 4, remove = 5, info = 6 } Action; typedef enum { undefined = 0, allow = 1, deny = 2 } Perm; private: Perm perms_[3][7]; public: Permission(void); Permission(const Permission& p); virtual ~Permission(void); bool set(Object o,Action a,Perm p); bool set_conditional(Object o,Action a,Perm p); bool get(Object o,Action a,Perm p); bool get_conditional(Object o,Action a,Perm p); virtual Permission* duplicate(void) const; }; #endif // __ARC_PERMISSION_H__ nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/auth_plugin.cpp0000644000000000000000000000013214152153376025545 xustar000000000000000030 mtime=1638455038.433646395 30 atime=1638455038.511647567 30 ctime=1638455099.341561564 nordugrid-arc-6.14.0/src/services/gridftpd/auth/auth_plugin.cpp0000644000175000002070000000366614152153376025545 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include "../run/run_plugin.h" #include "auth.h" static Arc::Logger logger(Arc::Logger::getRootLogger(),"AuthUserPlugin"); void AuthUserSubst(std::string& str,AuthUser& it) { int l = str.length(); for(int i=0;i #endif #include "identity.h" // ------ Identity --------------- Identity::Item Identity::empty_; Identity::Identity(void) { } Identity::Identity(const Identity& t) { for(std::list::const_iterator i = t.items_.begin();i!=t.items_.end();++i) { add(*i); }; } Identity::~Identity(void) { for(std::list::iterator i = items_.begin();i!=items_.end();++i) { if(*i) delete *i; }; } Identity* Identity::duplicate(void) const { return new Identity(*this); } Identity::Item* Identity::add(const Identity::Item* t) { if(!t) return NULL; return *(items_.insert(items_.end(),t->duplicate())); } Identity::Item* Identity::use(Identity::Item* t) { if(!t) return NULL; return *(items_.insert(items_.end(),t)); } Identity::Item* Identity::operator[](unsigned int n) { if(n>=items_.size()) return NULL; std::list::iterator i = items_.begin(); for(;n && (i!=items_.end());--n,++i){}; if(i==items_.end()) return NULL; return *i; } bool Identity::operator==(Identity& id) { for(std::list::iterator i = items_.begin(); i!=items_.end();++i) { if(*i == NULL) continue; for(std::list::iterator i_ = id.items_.begin(); i_!=id.items_.end();++i_) { if(*i_ == NULL) continue; if(((*i)->str()) == ((*i_)->str())) return true; }; }; return false; } // ------ Identity::Item --------------- std::string Identity::Item::empty_(""); Identity::Item::Item(void):type_("") { } Identity::Item::~Item(void) { } Identity::Item* Identity::Item::duplicate(void) const { return new Identity::Item; } const std::string& Identity::Item::name(unsigned int /* n */) { return empty_; } const std::string& Identity::Item::value(unsigned int /* n */) { return empty_; } const std::string& Identity::Item::value(const char* /* name */,unsigned int /* n */) { return empty_; } std::string Identity::Item::str(void) { std::string v; for(int n = 0;;n++) { const std::string& s = name(n); if(s.length() == 0) break; v+="/"+s+"="+value(n); }; return v; } nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/unixmap_lcmaps.cpp0000644000000000000000000000013114152153376026245 xustar000000000000000029 mtime=1638455038.43464641 30 atime=1638455038.511647567 30 ctime=1638455099.345561625 nordugrid-arc-6.14.0/src/services/gridftpd/auth/unixmap_lcmaps.cpp0000644000175000002070000000155114152153376026235 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include "../misc/proxy.h" #include "../run/run_plugin.h" #include "unixmap.h" static Arc::Logger logger(Arc::Logger::getRootLogger(),"UnixMap"); AuthResult UnixMap::map_lcmaps(const AuthUser& user,unix_user_t& unix_user,const char* line) { // TODO: escape // TODO: hardcoded 300s timeout for lcmaps std::string lcmaps_plugin = "300 \""+ Arc::ArcLocation::Get()+G_DIR_SEPARATOR_S+PKGLIBEXECSUBDIR+ G_DIR_SEPARATOR_S+"arc-lcmaps\" "; lcmaps_plugin+=std::string("\"")+user_.DN()+"\" "; lcmaps_plugin+=std::string("\"")+user_.proxy()+"\" "; lcmaps_plugin+=line; AuthResult res = map_mapplugin(user,unix_user,lcmaps_plugin.c_str()); return res; } nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/identity.h0000644000000000000000000000013214152153376024524 xustar000000000000000030 mtime=1638455038.433646395 30 atime=1638455038.511647567 30 ctime=1638455099.327561354 nordugrid-arc-6.14.0/src/services/gridftpd/auth/identity.h0000644000175000002070000000162414152153376024514 0ustar00mockbuildmock00000000000000#ifndef __ARC_IDENTITY_H__ #define __ARC_IDENTITY_H__ #include #include class Identity { public: class Item { protected: std::string type_; static std::string empty_; public: Item(void); virtual ~Item(void); const std::string& type(void) const { return type_; }; virtual Item* duplicate(void) const; virtual const std::string& name(unsigned int n); virtual const std::string& value(unsigned int n); virtual const std::string& value(const char* name,unsigned int n); virtual std::string str(void); }; protected: std::list items_; static Item empty_; public: Identity(void); Identity(const Identity&); virtual ~Identity(void); Item* add(const Item* t); Item* use(Item* t); Item* operator[](unsigned int n); virtual Identity* duplicate(void) const; virtual bool operator==(Identity& id); }; #endif // __ARC_IDENTITY_H__ nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/permission.cpp0000644000000000000000000000013114152153376025415 xustar000000000000000029 mtime=1638455038.43464641 30 atime=1638455038.511647567 30 ctime=1638455099.332561429 nordugrid-arc-6.14.0/src/services/gridftpd/auth/permission.cpp0000644000175000002070000000354314152153376025410 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include "permission.h" Permission::Permission(void) { for(int a = 0;a<7;++a) { for(int o = 0;o<3;++o) { perms_[o][a]=undefined; }; }; } Permission::Permission(const Permission& p) { for(int a = 0;a<7;++a) { for(int o = 0;o<3;++o) { perms_[o][a]=p.perms_[o][a]; }; }; } Permission::~Permission(void) { } Permission* Permission::duplicate(void) const { return new Permission(*this); } bool Permission::set(Object o,Action a,Perm p) { if((o<0) || (o>=3)) return false; if((a<0) || (a>=7)) return false; perms_[o][a]=p; return true; } bool Permission::set_conditional(Object o,Action a,Perm p) { if((o<0) || (o>=3)) return false; if((a<0) || (a>=7)) return false; if((perms_[permissions][info] == allow) && (perms_[o][a] == p)) return true; switch(p) { case undefined: { if((perms_[permissions][reduce] == allow) || (perms_[permissions][write] == allow)) { perms_[o][a]=p; return true; }; }; break; case allow: { if(((perms_[permissions][extend] == allow) && (perms_[o][a] == undefined)) || (perms_[permissions][write] == allow)) { perms_[o][a]=p; return true; }; }; break; case deny: { if(((perms_[permissions][extend] == allow) && (perms_[o][a] == undefined)) || (perms_[permissions][write] == allow)) { perms_[o][a]=p; return true; }; }; break; }; return false; } bool Permission::get(Object o,Action a,Perm p) { if((o<0) || (o>=3)) return false; if((a<0) || (a>=7)) return false; if(perms_[permissions][info] != allow) return false; return (perms_[o][a] == p); } bool Permission::get_conditional(Object o,Action a,Perm p) { if((o<0) || (o>=3)) return false; if((a<0) || (a>=7)) return false; return (perms_[o][a] == p); } nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/simplemap.cpp0000644000000000000000000000013114152153376025214 xustar000000000000000029 mtime=1638455038.43464641 30 atime=1638455038.511647567 30 ctime=1638455099.347561655 nordugrid-arc-6.14.0/src/services/gridftpd/auth/simplemap.cpp0000644000175000002070000001412414152153376025204 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include "simplemap.h" static Arc::Logger logger(Arc::Logger::getRootLogger(),"SimpleMap"); class FileLock { private: int h_; struct flock l_; public: FileLock(int h):h_(h) { if(h_ == -1) return; l_.l_type=F_WRLCK; l_.l_whence=SEEK_SET; l_.l_start=0; l_.l_len=0; for(;;) { if(fcntl(h_,F_SETLKW,&l_) == 0) break; if(errno != EINTR) { h_=-1; return; }; }; }; ~FileLock(void) { if(h_ == -1) return; l_.l_type=F_UNLCK; fcntl(h_,F_SETLKW,&l_); }; operator bool(void) const { return (h_ != -1); }; bool operator!(void) const { return (h_ == -1); }; }; SimpleMap::SimpleMap(const char* dir):dir_(dir) { if((dir_.length() == 0) || (dir_[dir_.length()-1] != '/')) dir_+="/"; if(dir_[0] != '/') dir_=Glib::get_current_dir()+"/"+dir_; pool_handle_=open((dir_+"pool").c_str(),O_RDWR); selfunmap_time_ = SELFUNMAP_TIME; std::ifstream config((dir_+"config").c_str()); while(config.good()) { std::string str; getline(config, str); std::string::size_type sep = str.find('='); if(sep != std::string::npos) { // So far only one command is supported if(str.substr(0,sep) == "timeout") { unsigned int n; if(Arc::stringto(str.substr(sep+1), n)) { selfunmap_time_ = n*24*60*60; logger.msg(Arc::VERBOSE, "SimpleMap: acquired new unmap time of %u seconds", selfunmap_time_); } else { logger.msg(Arc::ERROR, "SimpleMap: wrong number in unmaptime command", str.substr(sep+1)); } } } } } SimpleMap::~SimpleMap(void) { if(pool_handle_ != -1) close(pool_handle_); pool_handle_=-1; } #define failure(S) { \ logger.msg(Arc::ERROR, "SimpleMap: %s", (S)); \ return ""; \ } #define info(S) { \ logger.msg(Arc::INFO, "SimpleMap: %s", (S)); \ } std::string SimpleMap::map(const char* subject) { if(pool_handle_ == -1) failure("not initialized"); if(!subject) failure("missing subject"); std::string filename(subject); for(std::string::size_type i = filename.find('/');i!=std::string::npos; i=filename.find('/',i+1)) filename[i]='_'; filename=dir_+filename; FileLock lock(pool_handle_); if(!lock) failure("failed to lock pool file"); // Check for existing mapping struct stat st; if(stat(filename.c_str(),&st) == 0) { if(!S_ISREG(st.st_mode)) failure("mapping is not a regular file"); std::ifstream f(filename.c_str()); if(!f.is_open()) failure("can't open mapping file"); std::string buf; getline(f,buf); utime(filename.c_str(),NULL); return buf; }; // Look for unused names // Get full list first. std::list names; { std::ifstream f((dir_+"pool").c_str()); if(!f.is_open()) failure("can't open pool file") std::string buf; while(getline(f,buf)) { if(buf.empty()) continue; names.push_back(buf); }; }; if(names.empty()) failure("pool is empty"); // Remove all used names from list. Also find oldest maping. time_t oldmap_time = 0; std::string oldmap_name; std::string oldmap_subject; { struct dirent file_; struct dirent *file; DIR *dir=opendir(dir_.c_str()); if(dir == NULL) failure("can't list pool directory"); for(;;) { readdir_r(dir,&file_,&file); if(file == NULL) break; if(std::string(file->d_name) == ".") continue; if(std::string(file->d_name) == "..") continue; if(std::string(file->d_name) == "pool") continue; std::string filename = dir_+file->d_name; struct stat st; if(stat(filename.c_str(),&st) != 0) continue; if(!S_ISREG(st.st_mode)) continue; std::ifstream f(filename.c_str()); if(!f.is_open()) { // trash in directory closedir(dir); failure("can't open one of mapping files"); }; std::string buf; getline(f,buf); // find this name in list std::list::iterator i = names.begin(); for(;i!=names.end();++i) if(*i == buf) break; if(i == names.end()) { // Always try to destroy old mappings without corresponding // entry in the pool file if((selfunmap_time_ > 0) && (((unsigned int)(time(NULL) - st.st_mtime)) >= selfunmap_time_)) { unlink(filename.c_str()); }; } else { names.erase(i); if( (oldmap_name.length() == 0) || (((int)(oldmap_time - st.st_mtime)) > 0) ) { oldmap_name=buf; oldmap_subject=file->d_name; oldmap_time=st.st_mtime; }; }; }; closedir(dir); }; if(!names.empty()) { // Claim one of unused names std::ofstream f(filename.c_str()); if(!f.is_open()) failure("can't create mapping file"); f<<*(names.begin())< #include "identity.h" class IdentityItemDN: public Identity::Item { std::string dn_; public: IdentityItemDN(const char* dn); virtual ~IdentityItemDN(void); virtual Identity::Item* duplicate(void) const; virtual const std::string& name(unsigned int n); virtual const std::string& value(unsigned int n); virtual const std::string& value(const char* name,unsigned int n); virtual std::string str(void); }; nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/object_access.h0000644000000000000000000000013114152153376025461 xustar000000000000000029 mtime=1638455038.43464641 30 atime=1638455038.511647567 30 ctime=1638455099.335561474 nordugrid-arc-6.14.0/src/services/gridftpd/auth/object_access.h0000644000175000002070000000156314152153376025454 0ustar00mockbuildmock00000000000000#ifndef __ARC_OBJECT_ACCESS_H__ #define __ARC_OBJECT_ACCESS_H__ #include #include "identity.h" #include "permission.h" class ObjectAccess { public: class Item: public Identity::Item { protected: Identity* id_; Permission* perm_; public: Item(Identity* id,Permission* perm):id_(id),perm_(perm) { }; //~Item(void) { if(id_) delete id_; if(perm_) delete perm_; }; ~Item(void) { }; Identity* id(void) { return id_; }; Permission* permission(void) { return perm_; }; }; protected: static Item empty_; std::list items_; public: ObjectAccess(void); ObjectAccess(const ObjectAccess& o); virtual ~ObjectAccess(void); Item* use(Identity* id,Permission* perm); Item* add(Identity* id,Permission* perm); Item* operator[](unsigned int n); Item* find(Identity* id); int size(void); }; #endif // __ARC_OBJECT_ACCESS_H__ nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/auth_file.cpp0000644000000000000000000000013214152153376025166 xustar000000000000000030 mtime=1638455038.433646395 30 atime=1638455038.511647567 30 ctime=1638455099.339561534 nordugrid-arc-6.14.0/src/services/gridftpd/auth/auth_file.cpp0000644000175000002070000000235314152153376025156 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include "auth.h" static Arc::Logger logger(Arc::Logger::getRootLogger(),"AuthUserFile"); AuthResult AuthUser::match_file(const char* line) { std::string s = Arc::trim(line); if(!s.empty()) { std::ifstream f(s.c_str()); if(!f.is_open()) { logger.msg(Arc::ERROR, "Failed to read file %s", s); return AAA_FAILURE; }; for(;f.good();) { std::string buf; getline(f,buf); //buf = Arc::trim(buf); std::string::size_type p = 0; for(;p=buf.length()) continue; if(buf[p] == '#') continue; std::string subj; p = Arc::get_token(subj,buf,p," ","\"","\""); if(subj.empty()) continue; // can't match empty subject - it is dangerous if(subject != subj) continue; f.close(); return AAA_POSITIVE_MATCH; }; f.close(); }; return AAA_NO_MATCH; } nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/unixmap.cpp0000644000000000000000000000013014152153376024705 xustar000000000000000029 mtime=1638455038.43464641 30 atime=1638455038.511647567 29 ctime=1638455099.34456161 nordugrid-arc-6.14.0/src/services/gridftpd/auth/unixmap.cpp0000644000175000002070000002141114152153376024673 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include "../run/run_plugin.h" #include "simplemap.h" #include "unixmap.h" static Arc::Logger logger(Arc::Logger::getRootLogger(),"UnixMap"); UnixMap::source_t UnixMap::sources[] = { { "map_with_file", &UnixMap::map_mapfile }, { "map_to_pool", &UnixMap::map_simplepool }, { "map_to_user", &UnixMap::map_unixuser }, { "lcmaps", &UnixMap::map_lcmaps }, { "map_with_plugin", &UnixMap::map_mapplugin }, { NULL, NULL } }; UnixMap::UnixMap(AuthUser& user,const std::string& id): user_(user),map_id_(id), map_policy_({MAPPING_CONTINUE,MAPPING_STOP,MAPPING_STOP}),mapped_(false) { } UnixMap::~UnixMap(void) { } void split_unixname(std::string& unixname,std::string& unixgroup) { std::string::size_type p = unixname.find(':'); if(p != std::string::npos) { unixgroup=unixname.c_str()+p+1; unixname.resize(p); }; if(unixname[0] == '*') unixname.resize(0); if(unixgroup[0] == '*') unixgroup.resize(0); } static inline void skip_spaces(char const * & line) { for(;*line;line++) if(!isspace(*line)) break; // skip spaces } static inline void skip_till_space(char const * & line) { for(;*line;line++) if(isspace(*line)) break; } static char const action_continue_str[] = { "continue" }; static char const action_stop_str[] = { "stop" }; static char const option_nogroup_str[] = { "policy_on_nogroup" }; static char const option_nomap_str[] = { "policy_on_nomap" }; static char const option_map_str[] = { "policy_on_map" }; // Set mapping rules stack processing policy options bool UnixMap::set_map_policy(const char* rule, const char* line) { if(!line) { logger.msg(Arc::ERROR,"Mapping policy option has empty value"); return false; }; skip_spaces(line); if(*line == 0) { logger.msg(Arc::ERROR,"Mapping policy option has empty value"); return false; }; // parse event action map_action_t action; if(strcmp(line, action_continue_str) == 0) { action = MAPPING_CONTINUE; } else if(strcmp(line, action_stop_str) == 0) { action = MAPPING_STOP; } else { logger.msg(Arc::ERROR,"Unsupported mapping policy action: %s",line); return false; } // parse policy event type if(strcmp(rule, option_nogroup_str) == 0) { map_policy_.nogroup = action; } else if(strcmp(rule, option_nomap_str) == 0) { map_policy_.nomap = action; } else if(strcmp(rule, option_map_str) == 0) { map_policy_.map = action; } else { logger.msg(Arc::ERROR,"Unsupported mapping policy option: %s",rule); return false; } return true; } // Mapping options processing AuthResult UnixMap::mapgroup(const char* rule, const char* line) { // now: rule = options authgroup args mapped_=false; if(!line) { logger.msg(Arc::ERROR,"User name mapping command is empty"); return AAA_FAILURE; }; skip_spaces(line); if(*line == 0) { logger.msg(Arc::ERROR,"User name mapping command is empty"); return AAA_FAILURE; }; // identify common parts of mapping commands const char* groupname = line; skip_till_space(line); int groupname_len = line-groupname; if(groupname_len == 0) { logger.msg(Arc::ERROR,"User name mapping has empty authgroup: %s", groupname); return AAA_FAILURE; }; // match specified authgroup if(!user_.check_group(std::string(groupname,groupname_len))) { // nogroup event mapped_= (map_policy_.nogroup == MAPPING_STOP); return AAA_NO_MATCH; }; unix_user_.name.resize(0); unix_user_.group.resize(0); skip_spaces(line); if(!rule || (*rule == 0)) { logger.msg(Arc::ERROR,"User name mapping has empty command"); return AAA_FAILURE; }; for(source_t* s = sources;s->cmd;s++) { if(strcmp(s->cmd,rule) == 0) { AuthResult res=(this->*(s->map))(user_,unix_user_,line); if(res == AAA_POSITIVE_MATCH) { // map event mapped_= (map_policy_.map == MAPPING_STOP); return AAA_POSITIVE_MATCH; }; if(res == AAA_FAILURE) { // Processing failure, cause immediate error return AAA_FAILURE; }; // Paranoid about negative match // nomap event mapped_= (map_policy_.nomap == MAPPING_STOP); return AAA_NO_MATCH; }; }; logger.msg(Arc::ERROR,"Unknown user name mapping rule %s",rule); return AAA_FAILURE; } AuthResult UnixMap::setunixuser(const char* unixname, const char* unixgroup) { mapped_=false; if((!unixname) || (*unixname == 0)) { logger.msg(Arc::ERROR,"User name mapping has empty name: %s", unixname); return AAA_FAILURE; }; unix_user_.name.assign(unixname); if(unixgroup) unix_user_.group.assign(unixgroup); mapped_=true; return AAA_POSITIVE_MATCH; } // ----------------------------------------------------------- static void subst_arg(std::string& str,void* arg) { AuthUser* it = (AuthUser*)arg; if(!it) return; AuthUserSubst(str,*it); } AuthResult UnixMap::map_mapplugin(const AuthUser& /* user */ ,unix_user_t& unix_user,const char* line) { // ... timeout path arg ... if(!line) { logger.msg(Arc::ERROR,"Plugin (user mapping) command is empty"); return AAA_FAILURE; }; skip_spaces(line); if(*line == 0) { logger.msg(Arc::ERROR,"Plugin (user mapping) command is empty"); return AAA_FAILURE; }; char* p; long int to = strtol(line,&p,0); if(p == line) { logger.msg(Arc::ERROR,"Plugin (user mapping) timeout is not a number: %s", line); return AAA_FAILURE; }; if(to < 0) { logger.msg(Arc::ERROR,"Plugin (user mapping) timeout is wrong number: %s", line); return AAA_FAILURE; }; line=p; skip_spaces(line); if(*line == 0) { logger.msg(Arc::ERROR,"Plugin (user mapping) command is empty"); return AAA_FAILURE; }; std::string s = line; gridftpd::RunPlugin run(line); run.timeout(to); if(run.run(subst_arg,&user_)) { if(run.result() == 0) { if(run.stdout_channel().length() <= 512) { // sane name // Plugin should print user[:group] at stdout or nothing if no suitable mapping found unix_user.name = run.stdout_channel(); split_unixname(unix_user.name,unix_user.group); if(unix_user.name.empty()) return AAA_NO_MATCH; // success but no match return AAA_POSITIVE_MATCH; } else { logger.msg(Arc::ERROR,"Plugin %s returned too much: %s",run.cmd(),run.stdout_channel()); }; } else { logger.msg(Arc::ERROR,"Plugin %s returned: %u",run.cmd(),(unsigned int)run.result()); }; } else { logger.msg(Arc::ERROR,"Plugin %s failed to run",run.cmd()); }; logger.msg(Arc::INFO,"Plugin %s printed: %u",run.cmd(),run.stdout_channel()); logger.msg(Arc::ERROR,"Plugin %s error: %u",run.cmd(),run.stderr_channel()); return AAA_FAILURE; } AuthResult UnixMap::map_mapfile(const AuthUser& user,unix_user_t& unix_user,const char* line) { // ... file // This is just grid-mapfile std::ifstream f(line); if(user.DN()[0] == 0) return AAA_FAILURE; if(!f.is_open() ) { logger.msg(Arc::ERROR, "Mapfile at %s can't be opened.", line); return AAA_FAILURE; }; for(;f.good();) { std::string buf; //char buf[512]; // must be enough for DN + name getline(f,buf); char* p = &buf[0]; for(;*p;p++) if(((*p) != ' ') && ((*p) != '\t')) break; if((*p) == '#') continue; if((*p) == 0) continue; std::string val; int n = Arc::ConfigIni::NextArg(p,val,' ','"'); if(strcmp(val.c_str(),user.DN()) != 0) continue; p+=n; Arc::ConfigIni::NextArg(p,unix_user.name,' ','"'); f.close(); return AAA_POSITIVE_MATCH; }; f.close(); return AAA_NO_MATCH; } AuthResult UnixMap::map_simplepool(const AuthUser& user,unix_user_t& unix_user,const char* line) { // ... dir if(user.DN()[0] == 0) { logger.msg(Arc::ERROR, "User pool mapping is missing user subject."); return AAA_NO_MATCH; }; SimpleMap pool(line); if(!pool) { logger.msg(Arc::ERROR, "User pool at %s can't be opened.", line); return AAA_FAILURE; }; unix_user.name=pool.map(user.DN()); if(unix_user.name.empty()) { logger.msg(Arc::ERROR, "User pool at %s failed to perform user mapping.", line); return AAA_FAILURE; }; split_unixname(unix_user.name,unix_user.group); return AAA_POSITIVE_MATCH; } AuthResult UnixMap::map_unixuser(const AuthUser& /* user */,unix_user_t& unix_user,const char* line) { // ... unixname[:unixgroup] // Maping is always positive - just fill specified username std::string unixname(line); std::string unixgroup; std::string::size_type p = unixname.find(':'); if(p != std::string::npos) { unixgroup=unixname.c_str()+p+1; unixname.resize(p); }; if(unixname.empty()) { logger.msg(Arc::ERROR, "User name direct mapping is missing user name: %s.", line); return AAA_FAILURE; }; unix_user.name=unixname; unix_user.group=unixgroup; return AAA_POSITIVE_MATCH; } nordugrid-arc-6.14.0/src/services/gridftpd/auth/PaxHeaders.30264/README0000644000000000000000000000013214152153376023402 xustar000000000000000030 mtime=1638455038.433646395 30 atime=1638455038.511647567 30 ctime=1638455099.325561324 nordugrid-arc-6.14.0/src/services/gridftpd/auth/README0000644000175000002070000000005614152153376023370 0ustar00mockbuildmock00000000000000Authorisation handling for the GridFTP server nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/conf0000644000000000000000000000013214152153473022427 xustar000000000000000030 mtime=1638455099.277560603 30 atime=1638455103.999631554 30 ctime=1638455099.277560603 nordugrid-arc-6.14.0/src/services/gridftpd/conf/0000755000175000002070000000000014152153473022471 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/gridftpd/conf/PaxHeaders.30264/conf_vo.cpp0000644000000000000000000000013114152153376024642 xustar000000000000000029 mtime=1638455038.43464641 30 atime=1638455038.511647567 30 ctime=1638455099.274560558 nordugrid-arc-6.14.0/src/services/gridftpd/conf/conf_vo.cpp0000644000175000002070000000360314152153376024632 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include "conf_vo.h" namespace gridftpd { int config_vo(AuthUser& user,Arc::ConfigIni& sect,std::string& cmd,std::string& rest,Arc::Logger* logger) { if(strcmp(sect.SectionMatch(),"userlist") != 0) return 1; if(sect.SubSection()[0] != '\0') return 1; if(cmd.length() == 0) return 1; std::string voname = sect.SectionIdentifier(); std::string vofile; for(;;) { if(cmd == "outfile") { vofile=rest; }; sect.ReadNext(cmd,rest); if(sect.SectionNew() || (cmd.length() == 0)) { if(voname.empty()) { logger->msg(Arc::WARNING, "Configuration section [userlist] is missing name."); } else { user.add_vo(voname,vofile); }; if(cmd.length() == 0) return 1; if(strcmp(sect.SectionMatch(),"userlist") != 0) return 1; if(sect.SubSection()[0] != '\0') return 1; voname=""; vofile=""; }; }; return 0; } int config_vo(std::list& vos,Arc::ConfigIni& sect,std::string& cmd,std::string& rest,Arc::Logger* logger) { if(strcmp(sect.SectionMatch(),"userlist") != 0) return 1; if(sect.SubSection()[0] != '\0') return 1; if(cmd.length() == 0) return 1; std::string voname = sect.SectionIdentifier(); std::string vofile; for(;;) { if(cmd == "outfile") { vofile=rest; }; sect.ReadNext(cmd,rest); if(sect.SectionNew() || (cmd.length() == 0)) { if(voname.empty()) { logger->msg(Arc::WARNING, "Configuration section [userlist] is missing name."); } else { vos.push_back(AuthVO(voname,vofile)); }; if(cmd.length() == 0) return 1; if(strcmp(sect.SectionMatch(),"userlist") != 0) return 1; if(sect.SubSection()[0] != '\0') return 1; voname=""; vofile=""; }; }; return 0; } } // namespace gridftpd nordugrid-arc-6.14.0/src/services/gridftpd/conf/PaxHeaders.30264/Makefile.am0000644000000000000000000000013114152153376024541 xustar000000000000000029 mtime=1638455038.43464641 30 atime=1638455038.511647567 30 ctime=1638455099.273560543 nordugrid-arc-6.14.0/src/services/gridftpd/conf/Makefile.am0000644000175000002070000000033714152153376024532 0ustar00mockbuildmock00000000000000noinst_LTLIBRARIES = libconf.la libconf_la_SOURCES = \ conf_vo.cpp daemon.cpp \ conf_vo.h daemon.h libconf_la_CXXFLAGS = -I$(top_srcdir)/include \ $(LIBXML2_CFLAGS) $(GLIBMM_CFLAGS) $(GLOBUS_IO_CFLAGS) $(AM_CXXFLAGS) nordugrid-arc-6.14.0/src/services/gridftpd/conf/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153435024547 xustar000000000000000030 mtime=1638455069.427112086 30 atime=1638455090.899434718 30 ctime=1638455099.272560527 nordugrid-arc-6.14.0/src/services/gridftpd/conf/Makefile.in0000644000175000002070000006600014152153435024536 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/gridftpd/conf DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(top_srcdir)/depcomp README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = LTLIBRARIES = $(noinst_LTLIBRARIES) libconf_la_LIBADD = am_libconf_la_OBJECTS = libconf_la-conf_vo.lo libconf_la-daemon.lo libconf_la_OBJECTS = $(am_libconf_la_OBJECTS) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = libconf_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(libconf_la_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(libconf_la_SOURCES) DIST_SOURCES = $(libconf_la_SOURCES) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ noinst_LTLIBRARIES = libconf.la libconf_la_SOURCES = \ conf_vo.cpp daemon.cpp \ conf_vo.h daemon.h libconf_la_CXXFLAGS = -I$(top_srcdir)/include \ $(LIBXML2_CFLAGS) $(GLIBMM_CFLAGS) $(GLOBUS_IO_CFLAGS) $(AM_CXXFLAGS) all: all-am .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/gridftpd/conf/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/gridftpd/conf/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): clean-noinstLTLIBRARIES: -test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES) @list='$(noinst_LTLIBRARIES)'; \ locs=`for p in $$list; do echo $$p; done | \ sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ sort -u`; \ test -z "$$locs" || { \ echo rm -f $${locs}; \ rm -f $${locs}; \ } libconf.la: $(libconf_la_OBJECTS) $(libconf_la_DEPENDENCIES) $(EXTRA_libconf_la_DEPENDENCIES) $(AM_V_CXXLD)$(libconf_la_LINK) $(libconf_la_OBJECTS) $(libconf_la_LIBADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libconf_la-conf_vo.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libconf_la-daemon.Plo@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< libconf_la-conf_vo.lo: conf_vo.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libconf_la_CXXFLAGS) $(CXXFLAGS) -MT libconf_la-conf_vo.lo -MD -MP -MF $(DEPDIR)/libconf_la-conf_vo.Tpo -c -o libconf_la-conf_vo.lo `test -f 'conf_vo.cpp' || echo '$(srcdir)/'`conf_vo.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libconf_la-conf_vo.Tpo $(DEPDIR)/libconf_la-conf_vo.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='conf_vo.cpp' object='libconf_la-conf_vo.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libconf_la_CXXFLAGS) $(CXXFLAGS) -c -o libconf_la-conf_vo.lo `test -f 'conf_vo.cpp' || echo '$(srcdir)/'`conf_vo.cpp libconf_la-daemon.lo: daemon.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libconf_la_CXXFLAGS) $(CXXFLAGS) -MT libconf_la-daemon.lo -MD -MP -MF $(DEPDIR)/libconf_la-daemon.Tpo -c -o libconf_la-daemon.lo `test -f 'daemon.cpp' || echo '$(srcdir)/'`daemon.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libconf_la-daemon.Tpo $(DEPDIR)/libconf_la-daemon.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='daemon.cpp' object='libconf_la-daemon.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libconf_la_CXXFLAGS) $(CXXFLAGS) -c -o libconf_la-daemon.lo `test -f 'daemon.cpp' || echo '$(srcdir)/'`daemon.cpp mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(LTLIBRARIES) installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool clean-noinstLTLIBRARIES \ mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \ clean-libtool clean-noinstLTLIBRARIES cscopelist-am ctags \ ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags tags-am uninstall uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/gridftpd/conf/PaxHeaders.30264/daemon.cpp0000644000000000000000000000013214152153376024455 xustar000000000000000030 mtime=1638455038.435646425 30 atime=1638455038.511647567 30 ctime=1638455099.275560573 nordugrid-arc-6.14.0/src/services/gridftpd/conf/daemon.cpp0000644000175000002070000002302314152153376024442 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include "daemon.h" namespace gridftpd { static Arc::Logger logger(Arc::Logger::getRootLogger(),"Daemon"); static Arc::LogFile* sighup_dest = NULL; static void sighup_handler(int) { if(!sighup_dest) return; sighup_dest->setReopen(true); sighup_dest->setReopen(false); } Daemon::Daemon(void):logfile_(""),logsize_(0),lognum_(5),logreopen_(false),uid_((uid_t)(-1)),gid_((gid_t)(-1)),daemon_(true),pidfile_(""),debug_(-1) { } Daemon::~Daemon(void) { } int Daemon::arg(char c) { switch(c) { case 'F': { daemon_=false; }; break; case 'L': { logfile_=optarg; }; break; case 'U': { std::string username(optarg); std::string groupname(""); std::string::size_type n = username.find(':'); if(n != std::string::npos) { groupname=optarg+n+1; username.resize(n); }; if(username.length() == 0) { uid_=0; gid_=0; } else { struct passwd pw_; struct passwd *pw; char buf[BUFSIZ]; getpwnam_r(username.c_str(),&pw_,buf,BUFSIZ,&pw); if(pw == NULL) { logger.msg(Arc::ERROR, "No such user: %s", username); uid_=0; gid_=0; return -1; }; uid_=pw->pw_uid; gid_=pw->pw_gid; }; if(groupname.length() != 0) { struct group gr_; struct group *gr; char buf[BUFSIZ]; getgrnam_r(groupname.c_str(),&gr_,buf,BUFSIZ,&gr); if(gr == NULL) { logger.msg(Arc::ERROR, "No such group: %s", groupname); gid_=0; return -1; }; gid_=gr->gr_gid; }; }; break; case 'P': { pidfile_=optarg; }; break; case 'd': { char* p; debug_ = strtol(optarg,&p,10); if(((*p) != 0) || (debug_<0)) { logger.msg(Arc::ERROR, "Improper debug level '%s'", optarg); return 1; }; }; break; default: return 1; }; return 0; } int Daemon::config(const std::string& section, const std::string& cmd,std::string& rest) { if(section == "common") { if(cmd == "hostname") { Arc::SetEnv("GLOBUS_HOSTNAME",rest.c_str()); return 0; } else if(cmd == "x509_host_key") { Arc::SetEnv("X509_USER_KEY",rest.c_str()); return 0; } else if(cmd == "x509_host_cert") { Arc::SetEnv("X509_USER_CERT",rest.c_str()); return 0; } else if(cmd == "x509_cert_dir") { Arc::SetEnv("X509_CERT_DIR",rest.c_str()); return 0; } else if(cmd == "x509_voms_dir") { Arc::SetEnv("X509_VOMS_DIR",rest.c_str()); return 0; } else if(cmd == "voms_processing") { Arc::SetEnv("VOMS_PROCESSING",rest.c_str()); return 0; } else if(cmd == "http_proxy") { Arc::SetEnv("ARC_HTTP_PROXY",rest.c_str()); return 0; } else { return 1; // not processed command }; } else if(section == "mapping") { } else if(section == "gridftpd") { // [gridftpd] section if(cmd == "logfile") { if(logfile_.length() == 0) logfile_=rest; } else if(cmd == "logreopen") { std::string arg = Arc::ConfigIni::NextArg(rest); if(arg=="") { logger.msg(Arc::ERROR, "Missing option for command logreopen"); return -1; }; if(strcasecmp("yes",arg.c_str()) == 0) { logreopen_=true; } else if(strcasecmp("no",arg.c_str()) == 0) { logreopen_=false; } else { logger.msg(Arc::ERROR, "Wrong option in logreopen"); return -1; }; } else if(cmd == "user") { if(uid_ == (uid_t)(-1)) { std::string username = rest; std::string groupname(""); std::string::size_type n = username.find(':'); if(n != std::string::npos) { groupname=username.c_str()+n+1; username.resize(n); }; if(username.length() == 0) { uid_=0; gid_=0; } else { struct passwd pw_; struct passwd *pw; char buf[BUFSIZ]; getpwnam_r(username.c_str(),&pw_,buf,BUFSIZ,&pw); if(pw == NULL) { logger.msg(Arc::ERROR, "No such user: %s", username); uid_=0; gid_=0; return -1; }; uid_=pw->pw_uid; gid_=pw->pw_gid; }; if(groupname.length() != 0) { struct group gr_; struct group *gr; char buf[BUFSIZ]; getgrnam_r(groupname.c_str(),&gr_,buf,BUFSIZ,&gr); if(gr == NULL) { logger.msg(Arc::ERROR, "No such group: %s", groupname); gid_=0; return -1; }; gid_=gr->gr_gid; }; }; } else if(cmd == "pidfile") { if(pidfile_.length() == 0) pidfile_=rest; } else if(cmd == "loglevel") { if(debug_ == -1) { char* p; debug_ = strtol(rest.c_str(),&p,10); if(((*p) != 0) || (debug_<0)) { logger.msg(Arc::ERROR, "Improper debug level '%s'", rest); return -1; }; }; } else if(cmd == "x509_host_key") { Arc::SetEnv("X509_USER_KEY",rest.c_str()); return 0; } else if(cmd == "x509_host_cert") { Arc::SetEnv("X509_USER_CERT",rest.c_str()); return 0; } else if(cmd == "x509_cert_dir") { Arc::SetEnv("X509_CERT_DIR",rest.c_str()); return 0; } else if(cmd == "globus_tcp_port_range") { Arc::SetEnv("GLOBUS_TCP_PORT_RANGE",rest.c_str()); return 0; } else if(cmd == "globus_udp_port_range") { Arc::SetEnv("GLOBUS_UDP_PORT_RANGE",rest.c_str()); return 0; } else { return 1; // not processed command }; }; return 0; // proccessed command } int Daemon::getopt(int argc, char * const argv[],const char *optstring) { int n; std::string opts(optstring); opts+=DAEMON_OPTS; while((n=::getopt(argc,argv,opts.c_str())) != -1) { switch(n) { case 'F': case 'L': case 'U': case 'P': case 'd': { if(arg(n) != 0) return '.'; }; break; default: return n; }; }; return -1; } int Daemon::daemon(bool close_fds) { // set up logging // this must be a pointer which is not deleted because log destinations // are added by reference... Arc::LogFile* logger_file = new Arc::LogFile(logfile_); if (!logger_file || !(*logger_file)) { logger.msg(Arc::ERROR, "Failed to open log file %s", logfile_); return 1; } if (logsize_ > 0) logger_file->setMaxSize(logsize_); if (lognum_ > 0) logger_file->setBackups(lognum_); logger_file->setReopen(logreopen_); if (debug_ >= 0) { Arc::Logger::getRootLogger().setThreshold(Arc::old_level_to_level((unsigned int)debug_)); }; Arc::Logger::getRootLogger().removeDestinations(); Arc::Logger::getRootLogger().addDestination(*logger_file); if(!logreopen_) { sighup_dest = logger_file; signal(SIGHUP,&sighup_handler); }; if(close_fds) { struct rlimit lim; unsigned long long int max_files; if(getrlimit(RLIMIT_NOFILE,&lim) == 0) { max_files=lim.rlim_cur; } else { max_files=4096; }; if(max_files == RLIM_INFINITY) max_files=4096; for(int i=3;i #include #include #include "../auth/auth.h" namespace gridftpd { int config_vo(AuthUser& user,Arc::ConfigIni& sect,std::string& cmd,std::string& rest,Arc::Logger* logger = NULL); int config_vo(std::list& vos,Arc::ConfigIni& sect,std::string& cmd,std::string& rest,Arc::Logger* logger = NULL); } // namespace gridftpd #endif // __GRIDFTPD_CONFIG_VO_H__ nordugrid-arc-6.14.0/src/services/gridftpd/conf/PaxHeaders.30264/daemon.h0000644000000000000000000000013214152153376024122 xustar000000000000000030 mtime=1638455038.435646425 30 atime=1638455038.511647567 30 ctime=1638455099.277560603 nordugrid-arc-6.14.0/src/services/gridftpd/conf/daemon.h0000644000175000002070000000140514152153376024107 0ustar00mockbuildmock00000000000000#ifndef __GRIDFTPD_DAEMON_H__ #define __GRIDFTPD_DAEMON_H__ #include #define DAEMON_OPTS "ZzFL:U:P:d:" namespace gridftpd { class Daemon { private: std::string logfile_; int logsize_; int lognum_; bool logreopen_; uid_t uid_; gid_t gid_; bool daemon_; std::string pidfile_; int debug_; public: Daemon(void); ~Daemon(void); int arg(char c); int config(const std::string& section, const std::string& cmd,std::string& rest); int getopt(int argc, char * const argv[],const char *optstring); int daemon(bool close_fds = false); const char* short_help(void); void logfile(const char* path); void pidfile(const char* path); }; } // namespace gridftpd #endif // __GRIDFTPD_DAEMON_H__ nordugrid-arc-6.14.0/src/services/gridftpd/conf/PaxHeaders.30264/README0000644000000000000000000000013114152153376023365 xustar000000000000000029 mtime=1638455038.43464641 30 atime=1638455038.511647567 30 ctime=1638455099.274560558 nordugrid-arc-6.14.0/src/services/gridftpd/conf/README0000644000175000002070000000003214152153376023346 0ustar00mockbuildmock00000000000000configuration processing. nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/arc-gridftpd.service.in0000644000000000000000000000013214152153376026116 xustar000000000000000030 mtime=1638455038.433646395 30 atime=1638455038.511647567 30 ctime=1638455099.208559566 nordugrid-arc-6.14.0/src/services/gridftpd/arc-gridftpd.service.in0000644000175000002070000000036614152153376026110 0ustar00mockbuildmock00000000000000[Unit] Description=ARC gridftpd Documentation=man:gridftpd(8) After=local_fs.target remote_fs.target [Service] ExecStart=@prefix@/@pkgdatasubdir@/arc-gridftpd-start -F SuccessExitStatus=255 NotifyAccess=all [Install] WantedBy=multi-user.target nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153435023622 xustar000000000000000030 mtime=1638455069.268109696 30 atime=1638455090.824433591 30 ctime=1638455099.205559521 nordugrid-arc-6.14.0/src/services/gridftpd/Makefile.in0000644000175000002070000017067614152153435023630 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ sbin_PROGRAMS = gridftpd$(EXEEXT) subdir = src/services/gridftpd DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(srcdir)/arc-gridftpd.in $(srcdir)/arc-gridftpd.service.in \ $(srcdir)/arc-gridftpd-start.in $(srcdir)/gridftpd.8.in \ $(top_srcdir)/depcomp README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = arc-gridftpd arc-gridftpd.service \ arc-gridftpd-start gridftpd.8 CONFIG_CLEAN_VPATH_FILES = LTLIBRARIES = $(noinst_LTLIBRARIES) libgridftpd_la_LIBADD = am_libgridftpd_la_OBJECTS = libgridftpd_la-userspec.lo \ libgridftpd_la-names.lo libgridftpd_la-misc.lo libgridftpd_la_OBJECTS = $(am_libgridftpd_la_OBJECTS) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = libgridftpd_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(libgridftpd_la_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) \ $(LDFLAGS) -o $@ am__installdirs = "$(DESTDIR)$(sbindir)" "$(DESTDIR)$(initddir)" \ "$(DESTDIR)$(pkgdatadir)" "$(DESTDIR)$(man8dir)" \ "$(DESTDIR)$(unitsdir)" PROGRAMS = $(sbin_PROGRAMS) am_gridftpd_OBJECTS = gridftpd-commands.$(OBJEXT) \ gridftpd-config.$(OBJEXT) gridftpd-fileroot.$(OBJEXT) \ gridftpd-listener.$(OBJEXT) gridftpd-dataread.$(OBJEXT) \ gridftpd-datawrite.$(OBJEXT) gridftpd-datalist.$(OBJEXT) \ gridftpd-fileroot_config.$(OBJEXT) gridftpd_OBJECTS = $(am_gridftpd_OBJECTS) am__DEPENDENCIES_1 = gridftpd_DEPENDENCIES = libgridftpd.la conf/libconf.la run/librun.la \ misc/libmisc.la auth/libauth.la auth/libmap.la \ $(top_builddir)/src/hed/libs/globusutils/libarcglobusutils.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) gridftpd_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(gridftpd_CXXFLAGS) \ $(CXXFLAGS) $(gridftpd_LDFLAGS) $(LDFLAGS) -o $@ am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } SCRIPTS = $(initd_SCRIPTS) $(pkgdata_SCRIPTS) AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(libgridftpd_la_SOURCES) $(gridftpd_SOURCES) DIST_SOURCES = $(libgridftpd_la_SOURCES) $(gridftpd_SOURCES) RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac man8dir = $(mandir)/man8 NROFF = nroff MANS = $(man_MANS) DATA = $(units_DATA) RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ noinst_LTLIBRARIES = libgridftpd.la @SYSV_SCRIPTS_ENABLED_FALSE@GRIDFTPD_SCRIPT = @SYSV_SCRIPTS_ENABLED_TRUE@GRIDFTPD_SCRIPT = arc-gridftpd initd_SCRIPTS = $(GRIDFTPD_SCRIPT) @SYSTEMD_UNITS_ENABLED_FALSE@GRIDFTPD_UNIT = @SYSTEMD_UNITS_ENABLED_TRUE@GRIDFTPD_UNIT = arc-gridftpd.service units_DATA = $(GRIDFTPD_UNIT) pkgdata_SCRIPTS = arc-gridftpd-start libgridftpd_la_SOURCES = userspec.cpp names.cpp misc.cpp libgridftpd_la_CXXFLAGS = -I$(top_srcdir)/include \ $(GLOBUS_FTP_CLIENT_CFLAGS) $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(AM_CXXFLAGS) gridftpd_SOURCES = commands.cpp config.cpp fileroot.cpp listener.cpp \ dataread.cpp datawrite.cpp datalist.cpp fileroot_config.cpp \ commands.h conf.h fileroot.h misc.h names.h userspec.h gridftpd_CXXFLAGS = -I$(top_srcdir)/include \ $(GLOBUS_FTP_CLIENT_CFLAGS) $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(AM_CXXFLAGS) gridftpd_LDADD = libgridftpd.la conf/libconf.la run/librun.la \ misc/libmisc.la auth/libauth.la auth/libmap.la \ $(top_builddir)/src/hed/libs/globusutils/libarcglobusutils.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(GLOBUS_FTP_CONTROL_LIBS) $(GLOBUS_GSS_ASSIST_LIBS) \ $(GLOBUS_GSSAPI_GSI_LIBS) $(GLOBUS_IO_LIBS) \ $(GLOBUS_GSI_CERT_UTILS_LIBS) $(GLOBUS_GSI_CREDENTIAL_LIBS) \ $(GLOBUS_OPENSSL_MODULE_LIBS) $(GLOBUS_COMMON_LIBS) gridftpd_LDFLAGS = -rdynamic SUBDIRS = misc conf run auth . fileplugin DIST_SUBDIRS = misc conf run auth . fileplugin man_MANS = gridftpd.8 all: all-recursive .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/gridftpd/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/gridftpd/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): arc-gridftpd: $(top_builddir)/config.status $(srcdir)/arc-gridftpd.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arc-gridftpd.service: $(top_builddir)/config.status $(srcdir)/arc-gridftpd.service.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arc-gridftpd-start: $(top_builddir)/config.status $(srcdir)/arc-gridftpd-start.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ gridftpd.8: $(top_builddir)/config.status $(srcdir)/gridftpd.8.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ clean-noinstLTLIBRARIES: -test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES) @list='$(noinst_LTLIBRARIES)'; \ locs=`for p in $$list; do echo $$p; done | \ sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ sort -u`; \ test -z "$$locs" || { \ echo rm -f $${locs}; \ rm -f $${locs}; \ } libgridftpd.la: $(libgridftpd_la_OBJECTS) $(libgridftpd_la_DEPENDENCIES) $(EXTRA_libgridftpd_la_DEPENDENCIES) $(AM_V_CXXLD)$(libgridftpd_la_LINK) $(libgridftpd_la_OBJECTS) $(libgridftpd_la_LIBADD) $(LIBS) install-sbinPROGRAMS: $(sbin_PROGRAMS) @$(NORMAL_INSTALL) @list='$(sbin_PROGRAMS)'; test -n "$(sbindir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(sbindir)'"; \ $(MKDIR_P) "$(DESTDIR)$(sbindir)" || exit 1; \ fi; \ for p in $$list; do echo "$$p $$p"; done | \ sed 's/$(EXEEXT)$$//' | \ while read p p1; do if test -f $$p \ || test -f $$p1 \ ; then echo "$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n;h' \ -e 's|.*|.|' \ -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \ sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) files[d] = files[d] " " $$1; \ else { print "f", $$3 "/" $$4, $$1; } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(sbindir)$$dir'"; \ $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(sbindir)$$dir" || exit $$?; \ } \ ; done uninstall-sbinPROGRAMS: @$(NORMAL_UNINSTALL) @list='$(sbin_PROGRAMS)'; test -n "$(sbindir)" || list=; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \ -e 's/$$/$(EXEEXT)/' \ `; \ test -n "$$list" || exit 0; \ echo " ( cd '$(DESTDIR)$(sbindir)' && rm -f" $$files ")"; \ cd "$(DESTDIR)$(sbindir)" && rm -f $$files clean-sbinPROGRAMS: @list='$(sbin_PROGRAMS)'; test -n "$$list" || exit 0; \ echo " rm -f" $$list; \ rm -f $$list || exit $$?; \ test -n "$(EXEEXT)" || exit 0; \ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ echo " rm -f" $$list; \ rm -f $$list gridftpd$(EXEEXT): $(gridftpd_OBJECTS) $(gridftpd_DEPENDENCIES) $(EXTRA_gridftpd_DEPENDENCIES) @rm -f gridftpd$(EXEEXT) $(AM_V_CXXLD)$(gridftpd_LINK) $(gridftpd_OBJECTS) $(gridftpd_LDADD) $(LIBS) install-initdSCRIPTS: $(initd_SCRIPTS) @$(NORMAL_INSTALL) @list='$(initd_SCRIPTS)'; test -n "$(initddir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(initddir)'"; \ $(MKDIR_P) "$(DESTDIR)$(initddir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n' \ -e 'h;s|.*|.|' \ -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) { files[d] = files[d] " " $$1; \ if (++n[d] == $(am__install_max)) { \ print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ else { print "f", d "/" $$4, $$1 } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(initddir)$$dir'"; \ $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(initddir)$$dir" || exit $$?; \ } \ ; done uninstall-initdSCRIPTS: @$(NORMAL_UNINSTALL) @list='$(initd_SCRIPTS)'; test -n "$(initddir)" || exit 0; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 's,.*/,,;$(transform)'`; \ dir='$(DESTDIR)$(initddir)'; $(am__uninstall_files_from_dir) install-pkgdataSCRIPTS: $(pkgdata_SCRIPTS) @$(NORMAL_INSTALL) @list='$(pkgdata_SCRIPTS)'; test -n "$(pkgdatadir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(pkgdatadir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pkgdatadir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n' \ -e 'h;s|.*|.|' \ -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) { files[d] = files[d] " " $$1; \ if (++n[d] == $(am__install_max)) { \ print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ else { print "f", d "/" $$4, $$1 } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(pkgdatadir)$$dir'"; \ $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(pkgdatadir)$$dir" || exit $$?; \ } \ ; done uninstall-pkgdataSCRIPTS: @$(NORMAL_UNINSTALL) @list='$(pkgdata_SCRIPTS)'; test -n "$(pkgdatadir)" || exit 0; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 's,.*/,,;$(transform)'`; \ dir='$(DESTDIR)$(pkgdatadir)'; $(am__uninstall_files_from_dir) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/gridftpd-commands.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/gridftpd-config.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/gridftpd-datalist.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/gridftpd-dataread.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/gridftpd-datawrite.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/gridftpd-fileroot.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/gridftpd-fileroot_config.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/gridftpd-listener.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libgridftpd_la-misc.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libgridftpd_la-names.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libgridftpd_la-userspec.Plo@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< libgridftpd_la-userspec.lo: userspec.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libgridftpd_la_CXXFLAGS) $(CXXFLAGS) -MT libgridftpd_la-userspec.lo -MD -MP -MF $(DEPDIR)/libgridftpd_la-userspec.Tpo -c -o libgridftpd_la-userspec.lo `test -f 'userspec.cpp' || echo '$(srcdir)/'`userspec.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libgridftpd_la-userspec.Tpo $(DEPDIR)/libgridftpd_la-userspec.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='userspec.cpp' object='libgridftpd_la-userspec.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libgridftpd_la_CXXFLAGS) $(CXXFLAGS) -c -o libgridftpd_la-userspec.lo `test -f 'userspec.cpp' || echo '$(srcdir)/'`userspec.cpp libgridftpd_la-names.lo: names.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libgridftpd_la_CXXFLAGS) $(CXXFLAGS) -MT libgridftpd_la-names.lo -MD -MP -MF $(DEPDIR)/libgridftpd_la-names.Tpo -c -o libgridftpd_la-names.lo `test -f 'names.cpp' || echo '$(srcdir)/'`names.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libgridftpd_la-names.Tpo $(DEPDIR)/libgridftpd_la-names.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='names.cpp' object='libgridftpd_la-names.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libgridftpd_la_CXXFLAGS) $(CXXFLAGS) -c -o libgridftpd_la-names.lo `test -f 'names.cpp' || echo '$(srcdir)/'`names.cpp libgridftpd_la-misc.lo: misc.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libgridftpd_la_CXXFLAGS) $(CXXFLAGS) -MT libgridftpd_la-misc.lo -MD -MP -MF $(DEPDIR)/libgridftpd_la-misc.Tpo -c -o libgridftpd_la-misc.lo `test -f 'misc.cpp' || echo '$(srcdir)/'`misc.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libgridftpd_la-misc.Tpo $(DEPDIR)/libgridftpd_la-misc.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='misc.cpp' object='libgridftpd_la-misc.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libgridftpd_la_CXXFLAGS) $(CXXFLAGS) -c -o libgridftpd_la-misc.lo `test -f 'misc.cpp' || echo '$(srcdir)/'`misc.cpp gridftpd-commands.o: commands.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -MT gridftpd-commands.o -MD -MP -MF $(DEPDIR)/gridftpd-commands.Tpo -c -o gridftpd-commands.o `test -f 'commands.cpp' || echo '$(srcdir)/'`commands.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/gridftpd-commands.Tpo $(DEPDIR)/gridftpd-commands.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='commands.cpp' object='gridftpd-commands.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -c -o gridftpd-commands.o `test -f 'commands.cpp' || echo '$(srcdir)/'`commands.cpp gridftpd-commands.obj: commands.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -MT gridftpd-commands.obj -MD -MP -MF $(DEPDIR)/gridftpd-commands.Tpo -c -o gridftpd-commands.obj `if test -f 'commands.cpp'; then $(CYGPATH_W) 'commands.cpp'; else $(CYGPATH_W) '$(srcdir)/commands.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/gridftpd-commands.Tpo $(DEPDIR)/gridftpd-commands.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='commands.cpp' object='gridftpd-commands.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -c -o gridftpd-commands.obj `if test -f 'commands.cpp'; then $(CYGPATH_W) 'commands.cpp'; else $(CYGPATH_W) '$(srcdir)/commands.cpp'; fi` gridftpd-config.o: config.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -MT gridftpd-config.o -MD -MP -MF $(DEPDIR)/gridftpd-config.Tpo -c -o gridftpd-config.o `test -f 'config.cpp' || echo '$(srcdir)/'`config.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/gridftpd-config.Tpo $(DEPDIR)/gridftpd-config.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='config.cpp' object='gridftpd-config.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -c -o gridftpd-config.o `test -f 'config.cpp' || echo '$(srcdir)/'`config.cpp gridftpd-config.obj: config.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -MT gridftpd-config.obj -MD -MP -MF $(DEPDIR)/gridftpd-config.Tpo -c -o gridftpd-config.obj `if test -f 'config.cpp'; then $(CYGPATH_W) 'config.cpp'; else $(CYGPATH_W) '$(srcdir)/config.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/gridftpd-config.Tpo $(DEPDIR)/gridftpd-config.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='config.cpp' object='gridftpd-config.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -c -o gridftpd-config.obj `if test -f 'config.cpp'; then $(CYGPATH_W) 'config.cpp'; else $(CYGPATH_W) '$(srcdir)/config.cpp'; fi` gridftpd-fileroot.o: fileroot.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -MT gridftpd-fileroot.o -MD -MP -MF $(DEPDIR)/gridftpd-fileroot.Tpo -c -o gridftpd-fileroot.o `test -f 'fileroot.cpp' || echo '$(srcdir)/'`fileroot.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/gridftpd-fileroot.Tpo $(DEPDIR)/gridftpd-fileroot.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='fileroot.cpp' object='gridftpd-fileroot.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -c -o gridftpd-fileroot.o `test -f 'fileroot.cpp' || echo '$(srcdir)/'`fileroot.cpp gridftpd-fileroot.obj: fileroot.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -MT gridftpd-fileroot.obj -MD -MP -MF $(DEPDIR)/gridftpd-fileroot.Tpo -c -o gridftpd-fileroot.obj `if test -f 'fileroot.cpp'; then $(CYGPATH_W) 'fileroot.cpp'; else $(CYGPATH_W) '$(srcdir)/fileroot.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/gridftpd-fileroot.Tpo $(DEPDIR)/gridftpd-fileroot.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='fileroot.cpp' object='gridftpd-fileroot.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -c -o gridftpd-fileroot.obj `if test -f 'fileroot.cpp'; then $(CYGPATH_W) 'fileroot.cpp'; else $(CYGPATH_W) '$(srcdir)/fileroot.cpp'; fi` gridftpd-listener.o: listener.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -MT gridftpd-listener.o -MD -MP -MF $(DEPDIR)/gridftpd-listener.Tpo -c -o gridftpd-listener.o `test -f 'listener.cpp' || echo '$(srcdir)/'`listener.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/gridftpd-listener.Tpo $(DEPDIR)/gridftpd-listener.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='listener.cpp' object='gridftpd-listener.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -c -o gridftpd-listener.o `test -f 'listener.cpp' || echo '$(srcdir)/'`listener.cpp gridftpd-listener.obj: listener.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -MT gridftpd-listener.obj -MD -MP -MF $(DEPDIR)/gridftpd-listener.Tpo -c -o gridftpd-listener.obj `if test -f 'listener.cpp'; then $(CYGPATH_W) 'listener.cpp'; else $(CYGPATH_W) '$(srcdir)/listener.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/gridftpd-listener.Tpo $(DEPDIR)/gridftpd-listener.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='listener.cpp' object='gridftpd-listener.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -c -o gridftpd-listener.obj `if test -f 'listener.cpp'; then $(CYGPATH_W) 'listener.cpp'; else $(CYGPATH_W) '$(srcdir)/listener.cpp'; fi` gridftpd-dataread.o: dataread.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -MT gridftpd-dataread.o -MD -MP -MF $(DEPDIR)/gridftpd-dataread.Tpo -c -o gridftpd-dataread.o `test -f 'dataread.cpp' || echo '$(srcdir)/'`dataread.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/gridftpd-dataread.Tpo $(DEPDIR)/gridftpd-dataread.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='dataread.cpp' object='gridftpd-dataread.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -c -o gridftpd-dataread.o `test -f 'dataread.cpp' || echo '$(srcdir)/'`dataread.cpp gridftpd-dataread.obj: dataread.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -MT gridftpd-dataread.obj -MD -MP -MF $(DEPDIR)/gridftpd-dataread.Tpo -c -o gridftpd-dataread.obj `if test -f 'dataread.cpp'; then $(CYGPATH_W) 'dataread.cpp'; else $(CYGPATH_W) '$(srcdir)/dataread.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/gridftpd-dataread.Tpo $(DEPDIR)/gridftpd-dataread.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='dataread.cpp' object='gridftpd-dataread.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -c -o gridftpd-dataread.obj `if test -f 'dataread.cpp'; then $(CYGPATH_W) 'dataread.cpp'; else $(CYGPATH_W) '$(srcdir)/dataread.cpp'; fi` gridftpd-datawrite.o: datawrite.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -MT gridftpd-datawrite.o -MD -MP -MF $(DEPDIR)/gridftpd-datawrite.Tpo -c -o gridftpd-datawrite.o `test -f 'datawrite.cpp' || echo '$(srcdir)/'`datawrite.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/gridftpd-datawrite.Tpo $(DEPDIR)/gridftpd-datawrite.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='datawrite.cpp' object='gridftpd-datawrite.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -c -o gridftpd-datawrite.o `test -f 'datawrite.cpp' || echo '$(srcdir)/'`datawrite.cpp gridftpd-datawrite.obj: datawrite.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -MT gridftpd-datawrite.obj -MD -MP -MF $(DEPDIR)/gridftpd-datawrite.Tpo -c -o gridftpd-datawrite.obj `if test -f 'datawrite.cpp'; then $(CYGPATH_W) 'datawrite.cpp'; else $(CYGPATH_W) '$(srcdir)/datawrite.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/gridftpd-datawrite.Tpo $(DEPDIR)/gridftpd-datawrite.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='datawrite.cpp' object='gridftpd-datawrite.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -c -o gridftpd-datawrite.obj `if test -f 'datawrite.cpp'; then $(CYGPATH_W) 'datawrite.cpp'; else $(CYGPATH_W) '$(srcdir)/datawrite.cpp'; fi` gridftpd-datalist.o: datalist.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -MT gridftpd-datalist.o -MD -MP -MF $(DEPDIR)/gridftpd-datalist.Tpo -c -o gridftpd-datalist.o `test -f 'datalist.cpp' || echo '$(srcdir)/'`datalist.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/gridftpd-datalist.Tpo $(DEPDIR)/gridftpd-datalist.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='datalist.cpp' object='gridftpd-datalist.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -c -o gridftpd-datalist.o `test -f 'datalist.cpp' || echo '$(srcdir)/'`datalist.cpp gridftpd-datalist.obj: datalist.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -MT gridftpd-datalist.obj -MD -MP -MF $(DEPDIR)/gridftpd-datalist.Tpo -c -o gridftpd-datalist.obj `if test -f 'datalist.cpp'; then $(CYGPATH_W) 'datalist.cpp'; else $(CYGPATH_W) '$(srcdir)/datalist.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/gridftpd-datalist.Tpo $(DEPDIR)/gridftpd-datalist.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='datalist.cpp' object='gridftpd-datalist.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -c -o gridftpd-datalist.obj `if test -f 'datalist.cpp'; then $(CYGPATH_W) 'datalist.cpp'; else $(CYGPATH_W) '$(srcdir)/datalist.cpp'; fi` gridftpd-fileroot_config.o: fileroot_config.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -MT gridftpd-fileroot_config.o -MD -MP -MF $(DEPDIR)/gridftpd-fileroot_config.Tpo -c -o gridftpd-fileroot_config.o `test -f 'fileroot_config.cpp' || echo '$(srcdir)/'`fileroot_config.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/gridftpd-fileroot_config.Tpo $(DEPDIR)/gridftpd-fileroot_config.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='fileroot_config.cpp' object='gridftpd-fileroot_config.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -c -o gridftpd-fileroot_config.o `test -f 'fileroot_config.cpp' || echo '$(srcdir)/'`fileroot_config.cpp gridftpd-fileroot_config.obj: fileroot_config.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -MT gridftpd-fileroot_config.obj -MD -MP -MF $(DEPDIR)/gridftpd-fileroot_config.Tpo -c -o gridftpd-fileroot_config.obj `if test -f 'fileroot_config.cpp'; then $(CYGPATH_W) 'fileroot_config.cpp'; else $(CYGPATH_W) '$(srcdir)/fileroot_config.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/gridftpd-fileroot_config.Tpo $(DEPDIR)/gridftpd-fileroot_config.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='fileroot_config.cpp' object='gridftpd-fileroot_config.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(gridftpd_CXXFLAGS) $(CXXFLAGS) -c -o gridftpd-fileroot_config.obj `if test -f 'fileroot_config.cpp'; then $(CYGPATH_W) 'fileroot_config.cpp'; else $(CYGPATH_W) '$(srcdir)/fileroot_config.cpp'; fi` mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-man8: $(man_MANS) @$(NORMAL_INSTALL) @list1=''; \ list2='$(man_MANS)'; \ test -n "$(man8dir)" \ && test -n "`echo $$list1$$list2`" \ || exit 0; \ echo " $(MKDIR_P) '$(DESTDIR)$(man8dir)'"; \ $(MKDIR_P) "$(DESTDIR)$(man8dir)" || exit 1; \ { for i in $$list1; do echo "$$i"; done; \ if test -n "$$list2"; then \ for i in $$list2; do echo "$$i"; done \ | sed -n '/\.8[a-z]*$$/p'; \ fi; \ } | while read p; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; echo "$$p"; \ done | \ sed -e 'n;s,.*/,,;p;h;s,.*\.,,;s,^[^8][0-9a-z]*$$,8,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,' | \ sed 'N;N;s,\n, ,g' | { \ list=; while read file base inst; do \ if test "$$base" = "$$inst"; then list="$$list $$file"; else \ echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man8dir)/$$inst'"; \ $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man8dir)/$$inst" || exit $$?; \ fi; \ done; \ for i in $$list; do echo "$$i"; done | $(am__base_list) | \ while read files; do \ test -z "$$files" || { \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man8dir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(man8dir)" || exit $$?; }; \ done; } uninstall-man8: @$(NORMAL_UNINSTALL) @list=''; test -n "$(man8dir)" || exit 0; \ files=`{ for i in $$list; do echo "$$i"; done; \ l2='$(man_MANS)'; for i in $$l2; do echo "$$i"; done | \ sed -n '/\.8[a-z]*$$/p'; \ } | sed -e 's,.*/,,;h;s,.*\.,,;s,^[^8][0-9a-z]*$$,8,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,'`; \ dir='$(DESTDIR)$(man8dir)'; $(am__uninstall_files_from_dir) install-unitsDATA: $(units_DATA) @$(NORMAL_INSTALL) @list='$(units_DATA)'; test -n "$(unitsdir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(unitsdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(unitsdir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(unitsdir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(unitsdir)" || exit $$?; \ done uninstall-unitsDATA: @$(NORMAL_UNINSTALL) @list='$(units_DATA)'; test -n "$(unitsdir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(unitsdir)'; $(am__uninstall_files_from_dir) # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile $(LTLIBRARIES) $(PROGRAMS) $(SCRIPTS) $(MANS) $(DATA) installdirs: installdirs-recursive installdirs-am: for dir in "$(DESTDIR)$(sbindir)" "$(DESTDIR)$(initddir)" "$(DESTDIR)$(pkgdatadir)" "$(DESTDIR)$(man8dir)" "$(DESTDIR)$(unitsdir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool clean-noinstLTLIBRARIES \ clean-sbinPROGRAMS mostlyclean-am distclean: distclean-recursive -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-initdSCRIPTS install-man \ install-pkgdataSCRIPTS install-unitsDATA install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-sbinPROGRAMS install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-man8 install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-initdSCRIPTS uninstall-man \ uninstall-pkgdataSCRIPTS uninstall-sbinPROGRAMS \ uninstall-unitsDATA uninstall-man: uninstall-man8 .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool \ clean-noinstLTLIBRARIES clean-sbinPROGRAMS cscopelist-am ctags \ ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-initdSCRIPTS install-man install-man8 \ install-pdf install-pdf-am install-pkgdataSCRIPTS install-ps \ install-ps-am install-sbinPROGRAMS install-strip \ install-unitsDATA installcheck installcheck-am installdirs \ installdirs-am maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \ uninstall-am uninstall-initdSCRIPTS uninstall-man \ uninstall-man8 uninstall-pkgdataSCRIPTS uninstall-sbinPROGRAMS \ uninstall-unitsDATA # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/names.h0000644000000000000000000000013114152153376023034 xustar000000000000000029 mtime=1638455038.43664644 30 atime=1638455038.512647582 30 ctime=1638455099.224559806 nordugrid-arc-6.14.0/src/services/gridftpd/names.h0000644000175000002070000000041114152153376023016 0ustar00mockbuildmock00000000000000#include bool remove_last_name(std::string &name); bool keep_last_name(std::string &name); char* remove_head_dir_c(const char* name,int dir_len); std::string remove_head_dir_s(std::string &name,int dir_len); const char* get_last_name(const char* name); nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/fileplugin0000644000000000000000000000013214152153473023640 xustar000000000000000030 mtime=1638455099.373562045 30 atime=1638455103.999631554 30 ctime=1638455099.373562045 nordugrid-arc-6.14.0/src/services/gridftpd/fileplugin/0000755000175000002070000000000014152153473023702 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/gridftpd/fileplugin/PaxHeaders.30264/fileplugin.cpp0000644000000000000000000000013114152153376026560 xustar000000000000000030 mtime=1638455038.435646425 30 atime=1638455038.511647567 29 ctime=1638455099.37256203 nordugrid-arc-6.14.0/src/services/gridftpd/fileplugin/fileplugin.cpp0000644000175000002070000006355214152153376026561 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #define GRIDFTP_PLUGIN #include #if HAVE_SYS_STATFS_H #include #endif #if HAVE_SYS_MOUNT_H #include #endif #if HAVE_SYS_VFS_H #include #endif #include #include #include "fileplugin.h" #include "../userspec.h" #include "../names.h" #include "../misc.h" #ifndef HAVE_STRERROR_R int strerror_r (int errnum, char * buf, size_t buflen) { char * estring = strerror (errnum); strncpy (buf, estring, buflen); buf[buflen-1] = '\0'; return 0; } #endif static Arc::Logger logger(Arc::Logger::getRootLogger(),"DirectFilePlugin"); static bool parse_id(std::string s,int &id,int base = 10) { if((s.length()==1) && (s[0] == '*')) { id=-1; return true; } else { char* end; id=strtoul(s.c_str(),&end,base); if(*end) { return false; }; }; return true; } static bool parse_owner_rights(std::string &rest,int &uid,int &gid,int &orbits,int &andbits) { struct passwd pw_; struct group gr_; struct passwd *pw; struct group *gr; char buf[BUFSIZ]; std::string owner = Arc::ConfigIni::NextArg(rest); std::string acc_rights = Arc::ConfigIni::NextArg(rest); if(acc_rights.length() == 0) { logger.msg(Arc::WARNING, "Can't parse access rights in configuration line"); return false; }; std::string::size_type n; n=owner.find(':'); if(n == std::string::npos) { logger.msg(Arc::WARNING, "Can't parse user:group in configuration line"); return false; }; if(!parse_id(owner.substr(0,n),uid)) { /* not number, must be name */ getpwnam_r(owner.substr(0,n).c_str(),&pw_,buf,BUFSIZ,&pw); if(pw == NULL) { logger.msg(Arc::WARNING, "Can't recognize user in configuration line"); return false; }; uid=pw->pw_uid; }; if(!parse_id(owner.substr(n+1),gid)) { /* not number, must be name */ getgrnam_r(owner.substr(n+1).c_str(),&gr_,buf,BUFSIZ,&gr); if(gr == NULL) { logger.msg(Arc::WARNING, "Can't recognize group in configuration line"); return false; }; gid=gr->gr_gid; }; n=acc_rights.find(':'); if(n == std::string::npos) { logger.msg(Arc::WARNING, "Can't parse or:and in configuration line"); return false; }; if((!parse_id(acc_rights.substr(0,n),orbits,8)) || (!parse_id(acc_rights.substr(0,n),andbits,8))) { logger.msg(Arc::WARNING, "Can't parse or:and in configuration line"); return false; }; return true; } DirectFilePlugin::DirectFilePlugin(std::istream &cfile,userspec_t const &user) { data_file=-1; uid=user.get_uid(); gid=user.get_gid(); /* read configuration */ for(;;) { std::string rest=Arc::ConfigFile::read_line(cfile); std::string command=Arc::ConfigIni::NextArg(rest); if(command.length() == 0) break; /* end of file - should not be here */ if(command == "dir") { DirectAccess::diraccess_t laccess; /* filling default access */ laccess.read=false; laccess.dirlist=false; laccess.cd=false; laccess.creat=false; laccess.overwrite=false; laccess.append=false; laccess.del=false; laccess.mkdir=false; laccess.access=DirectAccess::local_unix_access; bool parsed_line = false; rest=subst_user_spec(rest,&user); std::string dir = Arc::ConfigIni::NextArg(rest); if(dir.length() == 0) { logger.msg(Arc::WARNING, "Can't parse configuration line"); continue; }; if(!Arc::CanonicalDir(dir,false)) { logger.msg(Arc::WARNING, "Bad directory name: %s", dir); continue; }; for(;;) { std::string subcommand = Arc::ConfigIni::NextArg(rest); if(subcommand.length() == 0) { parsed_line=true; break; }; if(subcommand == "read") { laccess.read=true; } else if(subcommand == "delete") { laccess.del=true; } else if(subcommand == "append") { laccess.append=true; } else if(subcommand == "overwrite") { laccess.overwrite=true; } else if(subcommand == "cd") { laccess.cd=true; } else if(subcommand == "dirlist") { laccess.dirlist=true; } else if(subcommand == "create") { laccess.creat=true; if(!parse_owner_rights(rest, laccess.creat_uid,laccess.creat_gid, laccess.creat_perm_or,laccess.creat_perm_and)) { logger.msg(Arc::WARNING, "Can't parse create arguments in configuration line"); break; }; } else if(subcommand == "mkdir") { laccess.mkdir=true; if(!parse_owner_rights(rest, laccess.mkdir_uid,laccess.mkdir_gid, laccess.mkdir_perm_or,laccess.mkdir_perm_and)) { logger.msg(Arc::WARNING, "Can't parse mkdir arguments in configuration line"); break; }; } else if(subcommand == "owner") { laccess.access=DirectAccess::local_user_access; } else if(subcommand == "group") { laccess.access=DirectAccess::local_group_access; } else if(subcommand == "other") { laccess.access=DirectAccess::local_other_access; } else if(subcommand == "nouser") { laccess.access=DirectAccess::local_none_access; } else { logger.msg(Arc::WARNING, "Bad subcommand in configuration line: %s", subcommand); continue; }; }; if(parsed_line) { access.push_back(DirectAccess(dir,laccess)); }; } else if(command == "mount") { rest=subst_user_spec(rest,&user); mount=rest; if((mount.length() == 0) || (!Arc::CanonicalDir(mount,false))) { logger.msg(Arc::WARNING, "Bad mount directory specified"); }; logger.msg(Arc::INFO, "Mount point %s", mount); } else if(command == "endpoint") { endpoint=rest; } else if(command == "end") { break; /* end of section */ } else { logger.msg(Arc::WARNING, "Unsupported configuration command: %s", command); }; }; access.sort(DirectAccess::comp); file_mode=file_access_none; } /* name must be absolute path */ /* make directories out of scope of mount dir */ int makedirs(std::string &name) { /* to make it faster - just check if it exists */ struct stat st; if(stat(name.c_str(),&st) == 0) { if(S_ISDIR(st.st_mode)) return 0; return 1; }; std::string::size_type n=1; for(;;) { if(n >= name.length()) break; n=name.find('/',n); if(n==std::string::npos) n=name.length(); std::string dname=name.substr(0,n); n++; if(stat(dname.c_str(),&st) == 0) { /* have intermediate object */ if(S_ISDIR(st.st_mode)) continue; /* already have - ok */ return 1; /* can't make directory based on file - not in unix */ }; /* no such object - create */ if(mkdir(dname.c_str(),S_IRWXU | S_IRWXG | S_IRWXO) == 0) continue; char errmgsbuf[256] = ""; (void)strerror_r(errno,errmgsbuf,sizeof(errmgsbuf)); logger.msg(Arc::ERROR, "mkdir failed: %s", errmgsbuf); return 1; /* directory creation failed */ }; return 0; } /* make all directories */ int DirectFilePlugin::makedir(std::string &dname) { /* first check for mount point */ std::string mname='/'+mount; if(makedirs(mname) != 0) { /* can't make mount point */ logger.msg(Arc::WARNING, "Warning: mount point %s creation failed.", mname); return 1; }; /* now go through rest of directories */ std::string::size_type n = 0; std::string pdname(""); std::list::iterator i=control_dir(pdname,false); if(i==access.end()) return 1; /* no root ? - strange */ pdname=real_name(pdname); int ur=i->unix_rights(pdname,uid,gid); if(ur & S_IFREG) return 1; if(!(ur & S_IFDIR)) return 1; for(;;) { if(n >= dname.length()) break; n=dname.find('/',n); if(n==std::string::npos) n=dname.length(); std::string fdname=dname.substr(0,n); n++; /* remember if parrent directory allows mkdir */ bool allow_mkdir = i->access.mkdir; i=control_dir(fdname,false); if(i==access.end()) return 1; /* first check maybe it already exists */ fdname=real_name(fdname); int pur = ur; ur=i->unix_rights(fdname,uid,gid); if(ur & S_IFDIR) continue; /* already exists */ if(ur & S_IFREG) return 1; /* can't make directory with same name as file */ /* check if parrent directory allows mkdir */ if(!allow_mkdir) return -1; if(!(pur & S_IWUSR)) return 1; /* create directory with proper rights */ if(i->unix_set(uid,gid) == 0) { if(::mkdir(fdname.c_str(), i->access.mkdir_perm_or & i->access.mkdir_perm_and) == 0) { chmod(fdname.c_str(), i->access.mkdir_perm_or & i->access.mkdir_perm_and); i->unix_reset(); uid_t u = i->access.mkdir_uid; gid_t g = i->access.mkdir_gid; if(u == ((uid_t)(-1))) u=uid; if(g == ((gid_t)(-1))) g=gid; if(chown(fdname.c_str(),u,g) != 0) {} continue; } else { i->unix_reset(); }; }; char errmgsbuf[256] = ""; (void)strerror_r(errno,errmgsbuf,sizeof(errmgsbuf)); logger.msg(Arc::ERROR, "mkdir failed: %s", errmgsbuf); return 1; /* directory creation failed */ }; return 0; } int DirectFilePlugin::removefile(std::string &name) { std::list::iterator i=control_dir(name,true); if(i==access.end()) return 1; if(!(i->access.del)) return 1; std::string fname=real_name(name); int ur=i->unix_rights(fname,uid,gid); if(ur == 0 && errno > 0) { // stat failed error_description = Arc::StrError(errno); return 1; } if(ur & S_IFDIR) { error_description = "Is a directory"; return 1; } if(!(ur & S_IFREG)) return 1; if(i->unix_set(uid,gid) != 0) return 1; if(::remove(fname.c_str()) != 0) { error_description = Arc::StrError(errno); i->unix_reset(); return 1; }; i->unix_reset(); return 0; } int DirectFilePlugin::removedir(std::string &dname) { std::list::iterator i=control_dir(dname,true); if(i==access.end()) return 1; if(!(i->access.del)) return 1; std::string fdname=real_name(dname); int ur=i->unix_rights(fdname,uid,gid); if(ur == 0 && errno > 0) { // stat failed error_description = Arc::StrError(errno); return 1; } if(!(ur & S_IFDIR)) { error_description = "Not a directory"; return 1; } if(i->unix_set(uid,gid) != 0) return 1; if(::remove(fdname.c_str()) != 0) { error_description = Arc::StrError(errno); i->unix_reset(); return 1; }; i->unix_reset(); return 0; } int DirectFilePlugin::open(const char* name,open_modes mode,unsigned long long int size) { logger.msg(Arc::VERBOSE, "plugin: open: %s", name); std::string fname = real_name(name); if( mode == GRIDFTP_OPEN_RETRIEVE ) { /* open for reading */ std::list::iterator i=control_dir(name,true); if(i==access.end()) return 1; /* error ? */ if(i->access.read) { int ur=(*i).unix_rights(fname,uid,gid); if(ur == 0 && errno > 0) { // stat failed error_description = Arc::StrError(errno); return 1; } if((ur & S_IFREG) && (ur & S_IRUSR)) { /* so open it */ if(i->unix_set(uid,gid) != 0) return 1; logger.msg(Arc::INFO, "Retrieving file %s", fname); data_file=::open(fname.c_str(),O_RDONLY); i->unix_reset(); if(data_file == -1) return 1; file_mode=file_access_read; file_name=fname; return 0; }; }; return 1; } else if( mode == GRIDFTP_OPEN_STORE ) { /* open for writing - overwrite */ std::string dname=name; if(!remove_last_name(dname)) { return 1; }; std::list::iterator i=control_dir(name,true); if(i==access.end()) return 1; /* first check if file exists */ int ur=i->unix_rights(fname,uid,gid); if(ur & S_IFREG) { if(i->access.overwrite) { /* can overwrite */ if(ur & S_IWUSR) { /* really can ? */ if(size > 0) { struct statfs dst; #ifndef sun if(statfs((char*)(fname.c_str()),&dst) == 0) { #else if(statfs((char*)(fname.c_str()),&dst,0,0) == 0) { #endif uid_t uid_; gid_t gid_; unsigned long long size_ = 0; time_t changed_,modified_; bool is_file_; i->unix_info(fname,uid_,gid_,size_,changed_,modified_,is_file_); if(size > ((dst.f_bfree*dst.f_bsize) + size_)) { logger.msg(Arc::ERROR, "Not enough space to store file"); return 1; }; }; }; if(i->unix_set(uid,gid) != 0) return 1; logger.msg(Arc::INFO, "Storing file %s", fname); data_file=::open(fname.c_str(),O_WRONLY); i->unix_reset(); if(data_file == -1) return 1; file_mode=file_access_overwrite; file_name=fname; if(truncate(file_name.c_str(),0) != 0) {} return 0; }; }; error_description="File exists, overwrite not allowed"; return 1; } else if(ur & S_IFDIR) { /* it's a directory */ return 1; } else { /* no such object in filesystem */ if(i->access.creat) { /* allowed to create new file */ std::string fdname = real_name(dname); /* make sure we have directory to store file */ if(makedir(dname) != 0) return 1; /* problems with underlaying dir */ int ur=i->unix_rights(fdname,uid,gid); if((ur & S_IWUSR) && (ur & S_IFDIR)) { if(size > 0) { struct statfs dst; #ifndef sun if(statfs((char*)(fname.c_str()),&dst) == 0) { #else if(statfs((char*)(fname.c_str()),&dst,0,0) == 0) { #endif if(size > (dst.f_bfree*dst.f_bsize)) { logger.msg(Arc::ERROR, "Not enough space to store file"); return 1; }; }; }; if(i->unix_set(uid,gid) != 0) return 1; logger.msg(Arc::INFO, "Storing file %s", fname); data_file=::open(fname.c_str(),O_WRONLY | O_CREAT | O_EXCL, i->access.creat_perm_or & i->access.creat_perm_and); i->unix_reset(); if(data_file == -1) return 1; uid_t u = i->access.creat_uid; gid_t g = i->access.creat_gid; if(u == ((uid_t)(-1))) u=uid; if(g == ((gid_t)(-1))) g=gid; logger.msg(Arc::VERBOSE, "open: changing owner for %s, %i, %i", fname, u, gid); if(chown(fname.c_str(),u,g) != 0) {} /* adjust permissions because open uses umask */ chmod(fname.c_str(), i->access.creat_perm_or & i->access.creat_perm_and); struct stat st; stat(fname.c_str(),&st); logger.msg(Arc::VERBOSE, "open: owner: %i %i", st.st_uid, st.st_gid); file_mode=file_access_create; file_name=fname; return 0; }; }; }; return 1; } logger.msg(Arc::WARNING, "Unknown open mode %s", mode); return 1; } int DirectFilePlugin::close(bool eof) { logger.msg(Arc::VERBOSE, "plugin: close"); if(data_file != -1) { if(eof) { ::close(data_file); } else { /* file was not transferred properly */ if((file_mode==file_access_create) || (file_mode==file_access_overwrite)) { /* destroy file */ ::close(data_file); ::unlink(file_name.c_str()); }; }; }; return 0; } int DirectFilePlugin::open_direct(const char* name,open_modes mode) { std::string fname = name; if( mode == GRIDFTP_OPEN_RETRIEVE ) { /* open for reading */ data_file=::open(fname.c_str(),O_RDONLY); if(data_file == -1) return 1; file_mode=file_access_read; file_name=fname; return 0; } else if( mode == GRIDFTP_OPEN_STORE ) { /* open for writing - overwrite */ data_file=::open(fname.c_str(),O_WRONLY | O_CREAT,S_IRUSR | S_IWUSR); if(data_file == -1) return 1; file_mode=file_access_create; file_name=fname; if(truncate(file_name.c_str(),0) != 0) {} if(chown(fname.c_str(),uid,gid) != 0) {} chmod(fname.c_str(),S_IRUSR | S_IWUSR); return 0; } logger.msg(Arc::WARNING, "Unknown open mode %s", mode); return 1; } int DirectFilePlugin::read(unsigned char *buf,unsigned long long int offset,unsigned long long int *size) { ssize_t l; logger.msg(Arc::VERBOSE, "plugin: read"); if(data_file == -1) return 1; if(lseek(data_file,offset,SEEK_SET) != offset) { (*size)=0; return 0; /* can't read anymore */ }; if((l=::read(data_file,buf,(*size))) == -1) { logger.msg(Arc::WARNING, "Error while reading file"); (*size)=0; return 1; }; (*size)=l; return 0; } int DirectFilePlugin::write(unsigned char *buf,unsigned long long int offset,unsigned long long int size) { ssize_t l; size_t ll; logger.msg(Arc::VERBOSE, "plugin: write"); if(data_file == -1) return 1; if(lseek(data_file,offset,SEEK_SET) != offset) { perror("lseek"); return 1; /* can't write at that position */ }; for(ll=0;ll::iterator DirectFilePlugin::control_dir(const std::string &name,bool indir) { return control_dir(name.c_str(),indir); } std::list::iterator DirectFilePlugin::control_dir(const char* name,bool indir) { std::list::iterator i; for(i=access.begin();i!=access.end();++i) { if(i->belongs(name,indir)) break; }; return i; } std::string DirectFilePlugin::real_name(char* name) { return real_name(std::string(name)); } std::string DirectFilePlugin::real_name(std::string name) { std::string fname = ""; if(mount.length() != 0) { fname+='/'+mount; }; if(name.length() != 0) { fname+='/'+name; }; return fname; } bool DirectFilePlugin::fill_object_info(DirEntry &dent,std::string dirname,int ur, std::list::iterator i, DirEntry::object_info_level mode) { bool is_manageable = true; if(mode != DirEntry::minimal_object_info) { std::string ffname = dirname; if(dent.name.length() != 0) ffname+="/"+dent.name; if(i->unix_set(uid,gid) != 0) { is_manageable=false; } else { if(i->unix_info(ffname, dent.uid,dent.gid,dent.size, dent.changed,dent.modified,dent.is_file) != 0) { is_manageable=false; }; i->unix_reset(); }; if(is_manageable) { if(mode != DirEntry::basic_object_info) { int fur=i->unix_rights(ffname,uid,gid); if(S_IFDIR & fur) { dent.is_file=false; } else if(S_IFREG & fur) { dent.is_file=true; } else { is_manageable=false; }; // TODO: treat special files (not regular) properly. (how?) if(is_manageable) { if(dent.is_file) { if(i->access.del && (ur & S_IWUSR)) dent.may_delete=true; if(i->access.overwrite &&(fur & S_IWUSR)) dent.may_write=true; if(i->access.append && (fur & S_IWUSR)) dent.may_append=true; if(i->access.read && (fur & S_IRUSR)) dent.may_read=true; } else { // TODO: this directory can have different rules than i !!!!!! if(i->access.del && (ur & S_IWUSR)) dent.may_delete=true; if(i->access.creat && (fur & S_IWUSR)) dent.may_create=true; if(i->access.mkdir && (fur & S_IWUSR)) dent.may_mkdir=true; if(i->access.cd && (fur & S_IXUSR)) dent.may_chdir=true; if(i->access.dirlist &&(fur & S_IRUSR)) dent.may_dirlist=true; if(i->access.del && (fur & S_IWUSR)) dent.may_purge=true; }; }; }; }; }; return is_manageable; } int DirectFilePlugin::readdir(const char* name,std::list &dir_list,DirEntry::object_info_level mode) { /* first check if allowed to read this directory */ std::list::iterator i=control_dir(name,false); if(i==access.end()) return 1; /* error ? */ std::string fname = real_name(name); if(i->access.dirlist) { int ur=i->unix_rights(fname,uid,gid); if(ur == 0 && errno > 0) { // stat failed error_description = Arc::StrError(errno); return 1; } if((ur & S_IFDIR) && (ur & S_IRUSR) && (ur & S_IXUSR)) { /* allowed to list in configuration and by unix rights */ /* following Linux semantics - need r-x for dirlist */ /* now get real listing */ if(i->unix_set(uid,gid) != 0) return 1; DIR* d=::opendir(fname.c_str()); if(d == NULL) { return 1; }; /* maybe return ? */ struct dirent *de; for(;;) { de=::readdir(d); if(de == NULL) break; if((!strcmp(de->d_name,".")) || (!strcmp(de->d_name,".."))) continue; DirEntry dent(true,de->d_name); // treat it as file by default i->unix_reset(); bool is_manageable = fill_object_info(dent,fname,ur,i,mode); i->unix_set(uid,gid); if(is_manageable) { dir_list.push_back(dent); }; }; ::closedir(d); i->unix_reset(); return 0; } else if(ur & S_IFREG) { DirEntry dent(true,""); bool is_manageable = fill_object_info(dent,fname,ur,i,mode); if(is_manageable) { dir_list.push_back(dent); return -1; }; }; } return 1; } /* checkdir is allowed to change dirname to show actual target of cd */ int DirectFilePlugin::checkdir(std::string &dirname) { logger.msg(Arc::VERBOSE, "plugin: checkdir: %s", dirname); std::list::iterator i=control_dir(dirname,false); if(i==access.end()) return 0; /* error ? */ logger.msg(Arc::VERBOSE, "plugin: checkdir: access: %s", (*i).name); std::string fname = real_name(dirname); if(i->access.cd) { int ur=(*i).unix_rights(fname,uid,gid); if(ur == 0 && errno > 0) { // stat failed error_description = Arc::StrError(errno); return 1; } if((ur & S_IXUSR) && (ur & S_IFDIR)) { logger.msg(Arc::VERBOSE, "plugin: checkdir: access: allowed: %s", fname); return 0; }; }; return 1; } int DirectFilePlugin::checkfile(std::string &name,DirEntry &info,DirEntry::object_info_level mode) { std::list::iterator i=control_dir(name,true); if(i==access.end()) return 1; /* error ? */ /* TODO check permissions of higher level directory */ std::string dname=name; if(!remove_last_name(dname)) { /* information about top directory was requested. Since this directory is declared it should exist. At least virtually */ info.uid=getuid(); info.gid=getgid(); info.is_file=false; info.name=""; return 0; }; if(!(i->access.dirlist)) { return 1; }; std::string fdname = real_name(dname); int ur=i->unix_rights(fdname,uid,gid); if(ur == 0 && errno > 0) { // stat failed error_description = Arc::StrError(errno); return 1; } if(!((ur & S_IXUSR) && (ur & S_IFDIR))) { return 1; }; std::string fname = real_name(name); DirEntry dent(true,get_last_name(fname.c_str())); bool is_manageable = fill_object_info(dent,fdname,ur,i,mode); if(!is_manageable) { if (errno > 0) error_description = Arc::StrError(errno); return 1; }; info=dent; return 0; } bool DirectAccess::belongs(std::string &name,bool indir) { return belongs(name.c_str(),indir); } bool DirectAccess::belongs(const char* name,bool indir) { int pl=this->name.length(); if(pl == 0) return true; /* root dir */ int l=strlen(name); if (pl > l) return false; if(strncmp(this->name.c_str(),name,pl)) return false; if(!indir) if(pl == l) return true; if(name[pl] == '/') return true; return false; } nordugrid-arc-6.14.0/src/services/gridftpd/fileplugin/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376025753 xustar000000000000000030 mtime=1638455038.435646425 30 atime=1638455038.511647567 30 ctime=1638455099.369561985 nordugrid-arc-6.14.0/src/services/gridftpd/fileplugin/Makefile.am0000644000175000002070000000134114152153376025737 0ustar00mockbuildmock00000000000000GLOBUS_FILEPLUGIN_LIBS = $(GLOBUS_RSL_LIBS) $(GLOBUS_IO_LIBS) \ $(GLOBUS_GSI_CREDENTIAL_LIBS) $(GLOBUS_GSI_CERT_UTILS_LIBS) \ $(GLOBUS_GSSAPI_GSI_LIBS) $(GLOBUS_COMMON_LIBS) pkglib_LTLIBRARIES = filedirplugin.la filedirplugin_la_SOURCES = init.cpp fileplugin.cpp fileplugin.h filedirplugin_la_CXXFLAGS = -I$(top_srcdir)/include \ $(GLOBUS_IO_CFLAGS) $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(AM_CXXFLAGS) filedirplugin_la_LDFLAGS = -no-undefined -avoid-version -module filedirplugin_la_LIBADD = \ ../libgridftpd_la-misc.lo ../libgridftpd_la-userspec.lo \ ../libgridftpd_la-names.lo ../auth/libmap.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(GLOBUS_FILEPLUGIN_LIBS) nordugrid-arc-6.14.0/src/services/gridftpd/fileplugin/PaxHeaders.30264/Makefile.in0000644000000000000000000000013114152153435025757 xustar000000000000000030 mtime=1638455069.497113137 30 atime=1638455090.938435304 29 ctime=1638455099.36856197 nordugrid-arc-6.14.0/src/services/gridftpd/fileplugin/Makefile.in0000644000175000002070000007517114152153435025760 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/gridftpd/fileplugin DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(top_srcdir)/depcomp README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(pkglibdir)" LTLIBRARIES = $(pkglib_LTLIBRARIES) am__DEPENDENCIES_1 = am__DEPENDENCIES_2 = $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) filedirplugin_la_DEPENDENCIES = ../libgridftpd_la-misc.lo \ ../libgridftpd_la-userspec.lo ../libgridftpd_la-names.lo \ ../auth/libmap.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(am__DEPENDENCIES_2) am_filedirplugin_la_OBJECTS = filedirplugin_la-init.lo \ filedirplugin_la-fileplugin.lo filedirplugin_la_OBJECTS = $(am_filedirplugin_la_OBJECTS) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = filedirplugin_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(filedirplugin_la_CXXFLAGS) $(CXXFLAGS) \ $(filedirplugin_la_LDFLAGS) $(LDFLAGS) -o $@ AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(filedirplugin_la_SOURCES) DIST_SOURCES = $(filedirplugin_la_SOURCES) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ GLOBUS_FILEPLUGIN_LIBS = $(GLOBUS_RSL_LIBS) $(GLOBUS_IO_LIBS) \ $(GLOBUS_GSI_CREDENTIAL_LIBS) $(GLOBUS_GSI_CERT_UTILS_LIBS) \ $(GLOBUS_GSSAPI_GSI_LIBS) $(GLOBUS_COMMON_LIBS) pkglib_LTLIBRARIES = filedirplugin.la filedirplugin_la_SOURCES = init.cpp fileplugin.cpp fileplugin.h filedirplugin_la_CXXFLAGS = -I$(top_srcdir)/include \ $(GLOBUS_IO_CFLAGS) $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(AM_CXXFLAGS) filedirplugin_la_LDFLAGS = -no-undefined -avoid-version -module filedirplugin_la_LIBADD = \ ../libgridftpd_la-misc.lo ../libgridftpd_la-userspec.lo \ ../libgridftpd_la-names.lo ../auth/libmap.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(GLOBUS_FILEPLUGIN_LIBS) all: all-am .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/gridftpd/fileplugin/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/gridftpd/fileplugin/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): install-pkglibLTLIBRARIES: $(pkglib_LTLIBRARIES) @$(NORMAL_INSTALL) @list='$(pkglib_LTLIBRARIES)'; test -n "$(pkglibdir)" || list=; \ list2=; for p in $$list; do \ if test -f $$p; then \ list2="$$list2 $$p"; \ else :; fi; \ done; \ test -z "$$list2" || { \ echo " $(MKDIR_P) '$(DESTDIR)$(pkglibdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pkglibdir)" || exit 1; \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(pkglibdir)'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(pkglibdir)"; \ } uninstall-pkglibLTLIBRARIES: @$(NORMAL_UNINSTALL) @list='$(pkglib_LTLIBRARIES)'; test -n "$(pkglibdir)" || list=; \ for p in $$list; do \ $(am__strip_dir) \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(pkglibdir)/$$f'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(pkglibdir)/$$f"; \ done clean-pkglibLTLIBRARIES: -test -z "$(pkglib_LTLIBRARIES)" || rm -f $(pkglib_LTLIBRARIES) @list='$(pkglib_LTLIBRARIES)'; \ locs=`for p in $$list; do echo $$p; done | \ sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ sort -u`; \ test -z "$$locs" || { \ echo rm -f $${locs}; \ rm -f $${locs}; \ } filedirplugin.la: $(filedirplugin_la_OBJECTS) $(filedirplugin_la_DEPENDENCIES) $(EXTRA_filedirplugin_la_DEPENDENCIES) $(AM_V_CXXLD)$(filedirplugin_la_LINK) -rpath $(pkglibdir) $(filedirplugin_la_OBJECTS) $(filedirplugin_la_LIBADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/filedirplugin_la-fileplugin.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/filedirplugin_la-init.Plo@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< filedirplugin_la-init.lo: init.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(filedirplugin_la_CXXFLAGS) $(CXXFLAGS) -MT filedirplugin_la-init.lo -MD -MP -MF $(DEPDIR)/filedirplugin_la-init.Tpo -c -o filedirplugin_la-init.lo `test -f 'init.cpp' || echo '$(srcdir)/'`init.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/filedirplugin_la-init.Tpo $(DEPDIR)/filedirplugin_la-init.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='init.cpp' object='filedirplugin_la-init.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(filedirplugin_la_CXXFLAGS) $(CXXFLAGS) -c -o filedirplugin_la-init.lo `test -f 'init.cpp' || echo '$(srcdir)/'`init.cpp filedirplugin_la-fileplugin.lo: fileplugin.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(filedirplugin_la_CXXFLAGS) $(CXXFLAGS) -MT filedirplugin_la-fileplugin.lo -MD -MP -MF $(DEPDIR)/filedirplugin_la-fileplugin.Tpo -c -o filedirplugin_la-fileplugin.lo `test -f 'fileplugin.cpp' || echo '$(srcdir)/'`fileplugin.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/filedirplugin_la-fileplugin.Tpo $(DEPDIR)/filedirplugin_la-fileplugin.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='fileplugin.cpp' object='filedirplugin_la-fileplugin.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(filedirplugin_la_CXXFLAGS) $(CXXFLAGS) -c -o filedirplugin_la-fileplugin.lo `test -f 'fileplugin.cpp' || echo '$(srcdir)/'`fileplugin.cpp mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(LTLIBRARIES) installdirs: for dir in "$(DESTDIR)$(pkglibdir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool clean-pkglibLTLIBRARIES \ mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-pkglibLTLIBRARIES install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-pkglibLTLIBRARIES .MAKE: install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \ clean-libtool clean-pkglibLTLIBRARIES cscopelist-am ctags \ ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-pkglibLTLIBRARIES install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-compile mostlyclean-generic mostlyclean-libtool \ pdf pdf-am ps ps-am tags tags-am uninstall uninstall-am \ uninstall-pkglibLTLIBRARIES # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/gridftpd/fileplugin/PaxHeaders.30264/fileplugin.h0000644000000000000000000000013214152153376026226 xustar000000000000000030 mtime=1638455038.435646425 30 atime=1638455038.511647567 30 ctime=1638455099.373562045 nordugrid-arc-6.14.0/src/services/gridftpd/fileplugin/fileplugin.h0000644000175000002070000000743514152153376026224 0ustar00mockbuildmock00000000000000#ifndef GRID_SERVER_FILE_PLUGIN_H #define GRID_SERVER_FILE_PLUGIN_H #include #include #include #include "../fileroot.h" #include "../userspec.h" /* DirectAccess is used to store information about access control */ class DirectAccess { public: typedef enum { local_none_access, local_user_access, local_group_access, local_other_access, local_unix_access } local_access_t; typedef struct { bool read; bool creat; int creat_uid; int creat_gid; int creat_perm_or; int creat_perm_and; bool overwrite; bool append; bool del; bool mkdir; int mkdir_uid; int mkdir_gid; int mkdir_perm_or; int mkdir_perm_and; local_access_t access; bool cd; bool dirlist; } diraccess_t; diraccess_t access; std::string name; DirectAccess(void) { /* dumb constructor, object is for copying to only */ name=""; access.read=true; access.dirlist=true; access.cd=true; access.creat=false; access.overwrite=false; access.append=false; access.del=false; access.mkdir=false; access.access=local_unix_access; }; DirectAccess(const DirectAccess &dir) { /* copy constructor */ name=dir.name; access=dir.access; }; DirectAccess& operator= (const DirectAccess &dir) { name=dir.name; access=dir.access; return (*this); }; DirectAccess(std::string &dirname,diraccess_t &diraccess) { /* real constructor */ name=dirname; access=diraccess; }; static bool comp(DirectAccess &left,DirectAccess &right) { return (left.name.length() > right.name.length()); }; bool belongs(std::string &name,bool indir = false); bool belongs(const char* name,bool indir = false); bool can_read(std::string &name); bool can_write(std::string &name); bool can_append(std::string &name); bool can_mkdir(std::string &name); int unix_rights(std::string &name,int uid,int gid); int unix_info(std::string &name,uid_t &uid,gid_t &gid,unsigned long long &size,time_t &created,time_t &modified,bool &is_file); int unix_set(int uid,int gid); void unix_reset(void); }; /* this class is used to communicate with network layer - must be derived from FilePlugin */ class DirectFilePlugin: public FilePlugin { private: typedef enum { file_access_none, file_access_read, file_access_create, file_access_overwrite } file_access_mode_t; file_access_mode_t file_mode; std::string file_name; bool fill_object_info(DirEntry &dent,std::string dirname,int ur, std::list::iterator i, DirEntry::object_info_level mode); std::string real_name(std::string name); std::string real_name(char* name); std::list::iterator control_dir(const std::string &name,bool indir=false); std::list::iterator control_dir(const char* name,bool indir=false); public: int uid; int gid; std::list access; int data_file; std::string mount; DirectFilePlugin(std::istream &cfile,userspec_t const &user); ~DirectFilePlugin(void) { }; virtual int open(const char* name,open_modes mode,unsigned long long int size = 0); int open_direct(const char* name,open_modes mode); virtual int close(bool eof = true); virtual int read(unsigned char *buf,unsigned long long int offset,unsigned long long int *size); virtual int write(unsigned char *buf,unsigned long long int offset,unsigned long long int size); virtual int readdir(const char* name,std::list &dir_list,DirEntry::object_info_level mode); virtual int checkdir(std::string &dirname); virtual int checkfile(std::string &name,DirEntry &file,DirEntry::object_info_level mode); virtual int makedir(std::string &dirname); virtual int removefile(std::string &name); virtual int removedir(std::string &dirname); }; #endif nordugrid-arc-6.14.0/src/services/gridftpd/fileplugin/PaxHeaders.30264/init.cpp0000644000000000000000000000013214152153376025366 xustar000000000000000030 mtime=1638455038.435646425 30 atime=1638455038.511647567 30 ctime=1638455099.371562015 nordugrid-arc-6.14.0/src/services/gridftpd/fileplugin/init.cpp0000644000175000002070000000060714152153376025356 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #define GRIDFTP_PLUGIN #include "fileplugin.h" static FilePlugin* init_cpp(std::istream &cfile,userspec_t &user) { DirectFilePlugin* arg = new DirectFilePlugin(cfile,user); return arg; } extern "C" { FilePlugin* init(std::istream &cfile,userspec_t &user,FileNode &node) { return init_cpp(cfile,user); } } nordugrid-arc-6.14.0/src/services/gridftpd/fileplugin/PaxHeaders.30264/README0000644000000000000000000000012714152153376024603 xustar000000000000000030 mtime=1638455038.435646425 30 atime=1638455038.511647567 27 ctime=1638455099.370562 nordugrid-arc-6.14.0/src/services/gridftpd/fileplugin/README0000644000175000002070000000015414152153376024564 0ustar00mockbuildmock00000000000000filedirplugin for GridFTP server. Allows exposure of a file-system hierarchy through the GridFTP interface. nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/misc.cpp0000644000000000000000000000013114152153376023217 xustar000000000000000029 mtime=1638455038.43664644 30 atime=1638455038.511647567 30 ctime=1638455099.213559641 nordugrid-arc-6.14.0/src/services/gridftpd/misc.cpp0000644000175000002070000000125714152153376023212 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include "misc.h" std::string timetostring(time_t t) { int l; char buf[32]; buf[0]=0; ctime_r(&t,buf); l=strlen(buf); if(l > 0) buf[l-1]=0; return std::string(buf); } std::string dirstring(bool dir,long long unsigned int s,time_t t,const char *name) { std::string str; if(dir) { str="d--------- 1 user group " + timetostring(t) + \ " " + Arc::tostring(s,16) + " " + std::string(name)+"\r\n"; } else { str="---------- 1 user group " + timetostring(t) + \ " " + Arc::tostring(s,16) + " " + std::string(name)+"\r\n"; }; return str; } nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/listener.cpp0000644000000000000000000000013114152153376024111 xustar000000000000000029 mtime=1638455038.43664644 30 atime=1638455038.511647567 30 ctime=1638455099.217559701 nordugrid-arc-6.14.0/src/services/gridftpd/listener.cpp0000644000175000002070000004172014152153376024103 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include #include "fileroot.h" #include "commands.h" #include "conf.h" #define DEFAULT_MAX_BUFFER_SIZE (10*65536) #define DEFAULT_BUFFER_SIZE (65536) #define DEFAULT_MAX_CONECTIONS (100) #define DEFAULT_GRIDFTP_PORT 2811 #define DEFAULT_LOG_FILE "/var/log/arc/gridftpd.log" #define DEFAULT_PID_FILE "/run/gridftpd.pid" GridFTP_Commands *client; static int max_connections = 0; static volatile int started_connections = 0; static volatile int finished_connections = 0; unsigned long long int max_data_buffer_size = 0; unsigned long long int default_data_buffer_size = 0; unsigned int firewall_interface[4] = { 0, 0, 0, 0 }; static Arc::Logger logger(Arc::Logger::getRootLogger(), "gridftpd"); #define PROTO_NAME(ADDR) ((ADDR->ai_family==AF_INET6)?"IPv6":"IPv4") /* new connection */ #ifndef __DONT_USE_FORK__ void new_conn_callback(int sock) { /* initiate random number generator */ srand(getpid() + getppid() + time(NULL)); #ifdef HAVE_GLOBUS_THREAD_SET_MODEL globus_thread_set_model("pthread"); #endif if((globus_module_activate(GLOBUS_COMMON_MODULE) != GLOBUS_SUCCESS) || (globus_module_activate(GLOBUS_FTP_CONTROL_MODULE) != GLOBUS_SUCCESS) || (globus_module_activate(GLOBUS_GSI_CREDENTIAL_MODULE) != GLOBUS_SUCCESS) || (globus_module_activate(GLOBUS_GSI_GSS_ASSIST_MODULE) != GLOBUS_SUCCESS) || (globus_module_activate(GLOBUS_OPENSSL_MODULE) != GLOBUS_SUCCESS)) { logger.msg(Arc::ERROR, "Activation failed"); globus_module_deactivate_all(); close(sock); exit(1); }; client = new GridFTP_Commands(getpid(),firewall_interface); client->new_connection_callback((void*)client,sock); close(sock); logger.msg(Arc::INFO, "Child exited"); _exit(0); globus_module_deactivate(GLOBUS_OPENSSL_MODULE); globus_module_deactivate(GLOBUS_GSI_GSS_ASSIST_MODULE); globus_module_deactivate(GLOBUS_GSI_CREDENTIAL_MODULE); globus_module_deactivate(GLOBUS_FTP_CONTROL_MODULE); globus_module_deactivate(GLOBUS_COMMON_MODULE); exit(0); } #else void new_conn_callback(void* arg,globus_ftp_control_server_t *handle,globus_object_t *error) { if(error != GLOBUS_SUCCESS) { logger.msg(Arc::ERROR, "Globus connection error"); return; }; logger.msg(Arc::INFO, "New connection"); client = new GridFTP_Commands(cur_connections,firewall_interface); client->new_connection_callback((void*)client,handle,error); } #endif void serv_stop_callback(void* /* arg */,globus_ftp_control_server_t* /* handle */,globus_object_t* /* error */) { logger.msg(Arc::INFO, "Server stopped"); } static volatile int server_done = 0; static void (*sig_old_chld)(int) = SIG_ERR; void sig_chld(int /* signum */) { int old_errno = errno; int status; for(;;) { int id=waitpid(-1,&status,WNOHANG); if((id == 0) || (id == -1)) break; ++finished_connections; }; errno = old_errno; } #ifdef __USE_RESURECTION__ void sig_term(int signum) { int old_errno = errno; if(chid == -1) return; if(chid == 0) { server_done = 1; globus_cond_signal(&server_cond); if(sig_old_term == SIG_ERR) return; if(sig_old_term == SIG_IGN) return; if(sig_old_term == SIG_DFL) return; (*sig_old_term)(signum); } else { kill(chid,SIGTERM); }; errno = old_errno; }; int main_internal(int argc,char** argv); int main(int argc,char** argv) { char const * log_time_format = ::getenv("ARC_LOGGER_TIME_FORMAT"); if(log_time_format) { if(strcmp(log_time_format,"USER") == 0) { Arc::Time::SetFormat(Arc::UserTime); } else if(strcmp(log_time_format,"USEREXT") == 0) { Arc::Time::SetFormat(Arc::UserExtTime); } else if(strcmp(log_time_format,"ELASTIC") == 0) { Arc::Time::SetFormat(Arc::ElasticTime); } else if(strcmp(log_time_format,"MDS") == 0) { Arc::Time::SetFormat(Arc::MDSTime); } else if(strcmp(log_time_format,"ASC") == 0) { Arc::Time::SetFormat(Arc::ASCTime); } else if(strcmp(log_time_format,"ISO") == 0) { Arc::Time::SetFormat(Arc::ISOTime); } else if(strcmp(log_time_format,"UTC") == 0) { Arc::Time::SetFormat(Arc::UTCTime); } else if(strcmp(log_time_format,"RFC1123") == 0) { Arc::Time::SetFormat(Arc::RFC1123Time); } else if(strcmp(log_time_format,"EPOCH") == 0) { Arc::Time::SetFormat(Arc::EpochTime); }; }; // temporary stderr destination until configuration is read and used in daemon.daemon() Arc::LogStream logcerr(std::cerr); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::INFO); globus_module_deactivate_all(); setpgid(0,0); sig_old_term=signal(SIGTERM,&sig_term); if(sig_old_term == SIG_ERR) { perror(""); logger.msg(Arc::ERROR, "Error: failed to set handler for SIGTERM"); return -1; }; for(;;) { logger.msg(Arc::INFO, "Starting controlled process"); if((chid=fork()) != 0) { if(chid == -1) { logger.msg(Arc::ERROR, "fork failed"); return -1; }; int status; if(wait(&status) == -1) { logger.msg(Arc::ERROR, "wait failed - killing child"); kill(chid,SIGKILL); return -1; }; logger.msg(Arc::INFO, "Child exited"); if(WIFSIGNALED(status)) { logger.msg(Arc::INFO, "Killed with signal: "<<(int)(WTERMSIG(status))); if(WTERMSIG(status) == SIGSEGV) { logger.msg(Arc::INFO, "Restarting after segmentation violation."); logger.msg(Arc::INFO, "Waiting 1 minute"); sleep(60); continue; }; }; return WEXITSTATUS(status); }; break; }; return main_internal(argc,argv); } int main_internal(int argc,char** argv) { #else void sig_term_fork(int /* signum */) { int old_errno = errno; int static passed = 0; if(passed) _exit(-1); server_done=1; passed=1; kill(0,SIGTERM); errno = old_errno; } int main(int argc,char** argv) { #ifndef __DONT_USE_FORK__ globus_module_deactivate_all(); #endif setpgid(0,0); #endif char const * log_time_format = ::getenv("ARC_LOGGER_TIME_FORMAT"); if(log_time_format) { if(strcmp(log_time_format,"USER") == 0) { Arc::Time::SetFormat(Arc::UserTime); } else if(strcmp(log_time_format,"USEREXT") == 0) { Arc::Time::SetFormat(Arc::UserExtTime); } else if(strcmp(log_time_format,"ELASTIC") == 0) { Arc::Time::SetFormat(Arc::ElasticTime); } else if(strcmp(log_time_format,"MDS") == 0) { Arc::Time::SetFormat(Arc::MDSTime); } else if(strcmp(log_time_format,"ASC") == 0) { Arc::Time::SetFormat(Arc::ASCTime); } else if(strcmp(log_time_format,"ISO") == 0) { Arc::Time::SetFormat(Arc::ISOTime); } else if(strcmp(log_time_format,"UTC") == 0) { Arc::Time::SetFormat(Arc::UTCTime); } else if(strcmp(log_time_format,"RFC1123") == 0) { Arc::Time::SetFormat(Arc::RFC1123Time); } else if(strcmp(log_time_format,"EPOCH") == 0) { Arc::Time::SetFormat(Arc::EpochTime); }; }; // temporary stderr destination until configuration is read and used in daemon.daemon() Arc::LogStream logcerr(std::cerr); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::INFO); #ifndef __DONT_USE_FORK__ signal(SIGTERM,&sig_term_fork); sig_old_chld=signal(SIGCHLD,&sig_chld); if(sig_old_chld == SIG_ERR) { perror(""); logger.msg(Arc::ERROR, "Error: failed to set handler for SIGCHLD"); return -1; }; std::list handles; #else globus_ftp_control_server_t handle; /* initiate random number generator */ srand(getpid() + getppid() + time(NULL)); #endif unsigned short server_port=0; int n; gridftpd::Daemon daemon; while((n=daemon.getopt(argc,argv,"hp:c:n:b:B:")) != -1) { switch(n) { case '.': { return 1; }; case ':': { logger.msg(Arc::ERROR, "Missing argument"); return 1; }; case '?': { logger.msg(Arc::ERROR, "Unknown option"); return 1; }; case 'h': { fprintf(stdout,"gridftpd [-p port_to_listen] [-c config_file] [-n maximal_connections] [-b default_buffer_size] [-B maximal_buffer_size] %s.\n",daemon.short_help()); return 0; }; case 'p': { if(sscanf(optarg,"%hu",&server_port) != 1) { logger.msg(Arc::ERROR, "Wrong port number"); return 1; }; }; break; case 'c': { config_file=optarg; }; break; case 'n': { if((sscanf(optarg,"%i",&max_connections) != 1) || (max_connections < 0)) { logger.msg(Arc::ERROR, "Wrong number of connections"); return 1; }; }; break; case 'b': { if((sscanf(optarg,"%Lu",&default_data_buffer_size) != 1) || (default_data_buffer_size < 1)) { logger.msg(Arc::ERROR, "Wrong buffer size"); return 1; }; }; break; case 'B': { if((sscanf(optarg,"%Lu",&max_data_buffer_size) != 1) || (max_data_buffer_size < 1)) { logger.msg(Arc::ERROR, "Wrong maximal buffer size"); return 1; }; }; break; default: break; }; }; //if(config_file) nordugrid_config_loc=config_file; // Read configuration (for daemon commands and port) FileRoot::ServerParams params; if(FileRoot::config(daemon,¶ms) != 0) { logger.msg(Arc::ERROR, "Failed reading configuration"); return 1; }; if(server_port == 0) server_port=params.port; if(server_port == 0) server_port=DEFAULT_GRIDFTP_PORT; if(max_connections == 0) max_connections=params.max_connections; if(max_connections == 0) max_connections=DEFAULT_MAX_CONECTIONS; if(max_data_buffer_size == 0) max_data_buffer_size=params.max_buffer; if(max_data_buffer_size == 0) max_data_buffer_size=DEFAULT_MAX_BUFFER_SIZE; if(default_data_buffer_size == 0) default_data_buffer_size=params.default_buffer; if(default_data_buffer_size == 0) default_data_buffer_size=DEFAULT_BUFFER_SIZE; firewall_interface[0]=params.firewall[0]; firewall_interface[1]=params.firewall[1]; firewall_interface[2]=params.firewall[2]; firewall_interface[3]=params.firewall[3]; #ifndef __DONT_USE_FORK__ unsigned int addrs_num = 0; { struct addrinfo hint; struct addrinfo *info = NULL; memset(&hint, 0, sizeof(hint)); hint.ai_socktype = SOCK_STREAM; hint.ai_protocol = IPPROTO_TCP; // ? hint.ai_flags = AI_PASSIVE; // looking for bind'able adresses // hint.ai_family = AF_INET; // hint.ai_family = AF_INET6; int ret = getaddrinfo(NULL, Arc::tostring(server_port).c_str(), &hint, &info); if (ret != 0) { std::string err_str = gai_strerror(ret); logger.msg(Arc::ERROR, "Failed to obtain local address: %s",err_str); exit(-1); }; for(struct addrinfo *info_ = info;info_;info_=info_->ai_next) { ++addrs_num; int s = socket(info_->ai_family,info_->ai_socktype,info_->ai_protocol); if(s == -1) { std::string e = Arc::StrError(errno); logger.msg(Arc::WARNING, "Failed to create socket(%s): %s",PROTO_NAME(info_),e); }; { int on = 1; setsockopt(s,SOL_SOCKET,SO_REUSEADDR,(void*)(&on),sizeof(on)); }; #ifdef IPV6_V6ONLY if(info_->ai_family == AF_INET6) { int v = 1; // Some systems (Linux for example) make v6 support v4 too // by default. Some don't. Make it same for everyone - // separate sockets for v4 and v6. if(setsockopt(s,IPPROTO_IPV6,IPV6_V6ONLY,&v,sizeof(v)) != 0) { std::string e = Arc::StrError(errno); logger.msg(Arc::WARNING, "Failed to limit socket to IPv6: %s",e); close(s); continue; }; }; #endif if(bind(s,info_->ai_addr,info_->ai_addrlen) == -1) { std::string e = Arc::StrError(errno); logger.msg(Arc::WARNING, "Failed to bind socket(%s): %s",PROTO_NAME(info_),e); close(s); continue; }; if(listen(s,128) == -1) { std::string e = Arc::StrError(errno); logger.msg(Arc::WARNING, "Failed to listen on socket(%s): %s",PROTO_NAME(info_),e); close(s); continue; }; handles.push_back(s); }; }; if(handles.empty()) { logger.msg(Arc::ERROR, "Not listening to anything"); exit(-1); }; if(handles.size() < addrs_num) { logger.msg(Arc::WARNING, "Some addresses failed. Listening on %u of %u.",(unsigned int)handles.size(),addrs_num); }; daemon.logfile(DEFAULT_LOG_FILE); daemon.pidfile(DEFAULT_PID_FILE); if(daemon.daemon(false) != 0) { perror("daemonization failed"); return 1; }; logger.msg(Arc::INFO, "Listen started"); for(;;) { fd_set ifds; fd_set efds; FD_ZERO(&ifds); FD_ZERO(&efds); int maxfd = -1; for(std::list::iterator handle = handles.begin();handle != handles.end();++handle) { FD_SET(*handle,&ifds); FD_SET(*handle,&efds); if(*handle > maxfd) maxfd = *handle; }; if(maxfd < 0) { if(!server_done) logger.msg(Arc::ERROR, "No valid handles left for listening"); break; }; int r = select(maxfd+1,&ifds,NULL,&efds,NULL); if(r == -1) { if(errno == EINTR) continue; if(!server_done) logger.msg(Arc::ERROR, "Select failed: %s", Arc::StrError(errno)); break; }; std::list::iterator handle = handles.begin(); for(;handle != handles.end();++handle) { if(FD_ISSET(*handle,&ifds) || FD_ISSET(*handle,&efds)) break; }; if(handle == handles.end()) { // ??? continue; }; struct sockaddr_in addr; socklen_t addrlen = sizeof(addr); int sock = accept(*handle,(sockaddr*)&addr,&addrlen); if(sock == -1) { if(!server_done) logger.msg(Arc::ERROR, "Accept failed: %s", Arc::StrError(errno)); if(errno == EBADF) { // handle becomes bad close(*handle); handles.erase(handle); }; }; int curr_connections = started_connections - finished_connections; logger.msg(Arc::INFO, "Have connections: %i, max: %i", curr_connections, max_connections); if((curr_connections < max_connections) || (max_connections == 0)) { logger.msg(Arc::INFO, "New connection"); switch (fork()) { case -1: { logger.msg(Arc::ERROR, "Fork failed: %s", Arc::StrError(errno)); }; break; case 0: { /* child */ for(std::list::iterator handle = handles.begin();handle != handles.end();++handle) { close(*handle); }; handles.clear(); Arc::Run::AfterFork(); new_conn_callback(sock); }; break; default: { /* parent */ ++started_connections; }; break; }; } else { /* it is probaly better to close connection immediately */ logger.msg(Arc::ERROR, "Refusing connection: Connection limit exceeded"); }; close(sock); }; for(std::list::iterator handle = handles.begin();handle != handles.end();++handle) { close(*handle); }; handles.clear(); #else if(daemon.daemon() != 0) { perror("daemonization failed"); return 1; }; #ifdef HAVE_GLOBUS_THREAD_SET_MODEL globus_thread_set_model("pthread"); #endif if((globus_module_activate(GLOBUS_COMMON_MODULE) != GLOBUS_SUCCESS) || (globus_module_activate(GLOBUS_FTP_CONTROL_MODULE) != GLOBUS_SUCCESS) || (globus_module_activate(GLOBUS_GSI_CREDENTIAL_MODULE) != GLOBUS_SUCCESS) || (globus_module_activate(GLOBUS_GSI_GSS_ASSIST_MODULE) != GLOBUS_SUCCESS) || (globus_module_activate(GLOBUS_OPENSSL_MODULE) != GLOBUS_SUCCESS)) { logger.msg(Arc::ERROR, "Activation failed"); globus_module_deactivate_all(); goto exit; }; if(globus_ftp_control_server_handle_init(&handle) != GLOBUS_SUCCESS) { logger.msg(Arc::ERROR, "Init failed"); goto exit_active; }; if(globus_ftp_control_server_listen(&handle,&server_port,&new_conn_callback,NULL) != GLOBUS_SUCCESS) { logger.msg(Arc::ERROR, "Listen failed"); goto exit_inited; }; logger.msg(Arc::INFO, "Listen started"); globus_mutex_init(&server_lock,GLOBUS_NULL); globus_cond_init(&server_cond,GLOBUS_NULL); server_done=0; globus_mutex_lock(&(server_lock)); while(!(server_done)) { globus_cond_wait(&(server_cond),&(server_lock)); }; globus_mutex_unlock(&(server_lock)); logger.msg(Arc::INFO, "Listen finished"); globus_mutex_destroy(&server_lock); globus_cond_destroy(&server_cond); logger.msg(Arc::INFO, "Stopping server"); globus_ftp_control_server_stop(&handle,&serv_stop_callback,NULL); exit_inited: logger.msg(Arc::INFO, "Destroying handle"); globus_ftp_control_server_handle_destroy(&handle); exit_active: logger.msg(Arc::INFO, "Deactivating modules"); globus_module_deactivate(GLOBUS_OPENSSL_MODULE); globus_module_deactivate(GLOBUS_GSI_GSS_ASSIST_MODULE); globus_module_deactivate(GLOBUS_GSI_CREDENTIAL_MODULE); globus_module_deactivate(GLOBUS_FTP_CONTROL_MODULE); globus_module_deactivate(GLOBUS_COMMON_MODULE); exit: #endif logger.msg(Arc::INFO, "Exiting"); return 0; } nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/commands.cpp0000644000000000000000000000013114152153376024065 xustar000000000000000029 mtime=1638455038.43464641 30 atime=1638455038.511647567 30 ctime=1638455099.214559656 nordugrid-arc-6.14.0/src/services/gridftpd/commands.cpp0000644000175000002070000015067014152153376024064 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include "fileroot.h" #include "names.h" #include "commands.h" #include "misc/proxy.h" /* timeout if nothing happened during 10 minutes */ #define FTP_TIMEOUT 600 static Arc::Logger logger(Arc::Logger::getRootLogger(),"GridFTP_Commands"); extern unsigned long long int max_data_buffer_size; extern unsigned long long int default_data_buffer_size; #ifndef __DONT_USE_FORK__ static int fork_done; static globus_mutex_t fork_lock; static globus_cond_t fork_cond; #endif GridFTP_Commands_timeout* timeouter = NULL; extern int make_list_string(const DirEntry &entr,GridFTP_Commands::list_mode_t mode,unsigned char* buf,int size,const char *prefix); int GridFTP_Commands::send_response(const char* response) { globus_result_t res; response_done=0; { std::string s = response; for(std::string::size_type n=0;;) if((n=s.find('\r'))==std::string::npos) {break;} else {s[n]='\\';}; for(std::string::size_type n=0;;) if((n=s.find('\n'))==std::string::npos) {break;} else {s[n]='\\';}; logger.msg(Arc::VERBOSE, "response: %s", s); }; res = globus_ftp_control_send_response(&handle,response,&response_callback,this); if(res != GLOBUS_SUCCESS) { logger.msg(Arc::ERROR, "Send response failed: %s", Arc::GlobusResult(res).str()); globus_mutex_lock(&response_lock); response_done=2; globus_cond_signal(&response_cond); globus_mutex_unlock(&response_lock); return 1; }; return 0; } int GridFTP_Commands::wait_response(void) { int res = 0; // What is the right deafault here? globus_abstime_t timeout; last_action_time=time(NULL); GlobusTimeAbstimeSet(timeout,0,100000); globus_mutex_lock(&response_lock); while(!response_done) { globus_cond_timedwait(&response_cond,&response_lock,&timeout); res=(response_done != 1); last_action_time=time(NULL); }; response_done=0; globus_mutex_unlock(&response_lock); return res; } void GridFTP_Commands::response_callback(void* arg,globus_ftp_control_handle_t*,globus_object_t *error) { GridFTP_Commands *it = (GridFTP_Commands*)arg; globus_mutex_lock(&(it->response_lock)); if(error != GLOBUS_SUCCESS) { logger.msg(Arc::ERROR, "Response sending error"); it->response_done=2; } else { it->response_done=1; }; globus_cond_signal(&(it->response_cond)); globus_mutex_unlock(&(it->response_lock)); } void GridFTP_Commands::close_callback(void *arg,globus_ftp_control_handle_t*,globus_object_t* /* error */, globus_ftp_control_response_t* /* ftp_response */) { GridFTP_Commands *it = (GridFTP_Commands*)arg; if(it) { logger.msg(Arc::INFO, "Closed connection"); delete it; }; // GridFTP_Commands::response_callback(arg,handle,error); } #if GLOBUS_IO_VERSION>=5 static void io_close_cb(void* /* callback_arg */,globus_io_handle_t*, globus_result_t /* result */) { } #endif #ifndef __DONT_USE_FORK__ GridFTP_Commands::close_semaphor_t::close_semaphor_t(void) { } GridFTP_Commands::close_semaphor_t::~close_semaphor_t(void) { globus_mutex_lock(&fork_lock); fork_done=1; globus_cond_signal(&fork_cond); globus_mutex_unlock(&fork_lock); } int GridFTP_Commands::new_connection_callback(void* arg,int sock) { GridFTP_Commands *it = (GridFTP_Commands*)arg; globus_mutex_init(&fork_lock,GLOBUS_NULL); globus_cond_init(&fork_cond,GLOBUS_NULL); // Convert the socket to a globus IO structure. globus_result_t res; globus_io_attr_t attr; globus_io_tcpattr_init(&attr); globus_io_attr_set_socket_oobinline(&attr, GLOBUS_TRUE); globus_io_attr_set_tcp_nodelay(&attr, GLOBUS_TRUE); res = globus_io_tcp_posix_convert(sock,&attr,&(it->handle.cc_handle.io_handle)); if(res != GLOBUS_SUCCESS) { logger.msg(Arc::ERROR, "Socket conversion failed: %s", Arc::GlobusResult(res).str()); return -1; }; it->handle.cc_handle.cc_state=GLOBUS_FTP_CONTROL_CONNECTED; fork_done = 0; int count = 0; res = globus_io_tcp_get_local_address_ex(&(it->handle.cc_handle.io_handle), it->local_host,&count,&(it->local_port)); if(res != GLOBUS_SUCCESS) { logger.msg(Arc::ERROR, "Failed to obtain own address: %s", Arc::GlobusResult(res).str()); return -1; }; if(count == sizeof(in_addr)) { it->local_is_ipv6 = false; } else if(count == sizeof(in6_addr)) { it->local_is_ipv6 = true; } else { logger.msg(Arc::ERROR, "Failed to recognize own address type (IPv4 or IPv6) - %u",count); return -1; }; if(it->local_is_ipv6) { char str[8*5]; snprintf(str,sizeof(str),"%04x:%04x:%04x:%04x:%04x:%04x:%04x:%04x", it->local_host[0]<<8 | it->local_host[1], it->local_host[2]<<8 | it->local_host[3], it->local_host[4]<<8 | it->local_host[5], it->local_host[6]<<8 | it->local_host[7], it->local_host[8]<<8 | it->local_host[9], it->local_host[10]<<8 | it->local_host[11], it->local_host[12]<<8 | it->local_host[13],it->local_host[14]<<8 | it->local_host[15]); logger.msg(Arc::INFO, "Accepted connection on [%s]:%u",str,it->local_port); } else { logger.msg(Arc::INFO, "Accepted connection on %u.%u.%u.%u:%u",it->local_host[0],it->local_host[1],it->local_host[2],it->local_host[3],it->local_port); }; globus_ftp_control_local_prot(&(it->handle),GLOBUS_FTP_CONTROL_PROTECTION_PRIVATE); // globus_ftp_control_local_prot(&(it->handle),GLOBUS_FTP_CONTROL_PROTECTION_CLEAR); it->data_dcau.mode=GLOBUS_FTP_CONTROL_DCAU_SELF; it->data_dcau.subject.subject=NULL; globus_ftp_control_local_dcau(&(it->handle),&(it->data_dcau),GSS_C_NO_CREDENTIAL); globus_ftp_control_local_mode(&(it->handle),GLOBUS_FTP_CONTROL_MODE_STREAM); globus_ftp_control_local_type(&(it->handle),GLOBUS_FTP_CONTROL_TYPE_IMAGE,0); // Call accept callback as if Globus called it accepted_callback(it, &(it->handle), GLOBUS_SUCCESS); globus_mutex_lock(&fork_lock); while(!fork_done) { globus_cond_wait(&fork_cond,&fork_lock); }; globus_mutex_unlock(&fork_lock); globus_cond_destroy(&fork_cond); globus_mutex_destroy(&fork_lock); return 0; } #else void GridFTP_Commands::new_connection_callback(void* arg,globus_ftp_control_server_t *server_handle,globus_object_t *error) { GridFTP_Commands *it = (GridFTP_Commands*)arg; globus_ftp_control_local_prot(&(it->handle),GLOBUS_FTP_CONTROL_PROTECTION_PRIVATE); // globus_ftp_control_local_prot(&(it->handle),GLOBUS_FTP_CONTROL_PROTECTION_CLEAR); it->data_dcau.mode=GLOBUS_FTP_CONTROL_DCAU_SELF; it->data_dcau.subject.subject=NULL; globus_ftp_control_local_dcau(&(it->handle),&(it->data_dcau),GSS_C_NO_CREDENTIAL); globus_ftp_control_local_mode(&(it->handle),GLOBUS_FTP_CONTROL_MODE_STREAM); globus_ftp_control_local_type(&(it->handle),GLOBUS_FTP_CONTROL_TYPE_IMAGE); if(globus_ftp_control_server_accept(server_handle,&(it->handle),&accepted_callback,it) != GLOBUS_SUCCESS) { logger.msg(Arc::ERROR, "Accept failed"); }; } #endif void GridFTP_Commands::accepted_callback(void* arg,globus_ftp_control_handle_t *handle,globus_object_t *error) { GridFTP_Commands *it = (GridFTP_Commands*)arg; if(error != GLOBUS_SUCCESS) { logger.msg(Arc::ERROR, "Accept failed: %s", Arc::globus_object_to_string(error)); delete it; return; }; int remote_host[16] = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; unsigned short remote_port = 0; int count = 0; globus_io_tcp_get_remote_address_ex(&(handle->cc_handle.io_handle),remote_host,&count,&remote_port); if(count == sizeof(in6_addr)) { char str[8*5]; snprintf(str,sizeof(str),"%04x:%04x:%04x:%04x:%04x:%04x:%04x:%04x", remote_host[0]<<8 | remote_host[1], remote_host[2]<<8 | remote_host[3], remote_host[4]<<8 | remote_host[5], remote_host[6]<<8 | remote_host[7], remote_host[8]<<8 | remote_host[9], remote_host[10]<<8 | remote_host[11], remote_host[12]<<8 | remote_host[13],remote_host[14]<<8 | remote_host[15]); logger.msg(Arc::INFO, "Accepted connection from [%s]:%u",str,remote_port); } else { logger.msg(Arc::INFO, "Accepted connection from %u.%u.%u.%u:%u", (unsigned int)(remote_host[0]), (unsigned int)(remote_host[1]), (unsigned int)(remote_host[2]), (unsigned int)(remote_host[3]), remote_port); }; it->send_response("220 Server ready\r\n"); if(globus_ftp_control_server_authenticate(&(it->handle),GLOBUS_FTP_CONTROL_AUTH_REQ_GSSAPI,&authenticate_callback,it) != GLOBUS_SUCCESS) { logger.msg(Arc::ERROR, "Authenticate in commands failed"); delete it; return; }; } void GridFTP_Commands::authenticate_callback(void* arg,globus_ftp_control_handle_t *handle,globus_object_t *error,globus_ftp_control_auth_info_t *result) { GridFTP_Commands *it = (GridFTP_Commands*)arg; if((result == GLOBUS_NULL) || (error != GLOBUS_SUCCESS)) { logger.msg(Arc::ERROR, "Authentication failure"); logger.msg(Arc::ERROR, Arc::globus_object_to_string(error)); if(it->send_response("535 Authentication failed\r\n") == 0) { it->wait_response(); }; delete it; return; }; logger.msg(Arc::INFO, "User subject: %s", result->auth_gssapi_subject); logger.msg(Arc::INFO, "Encrypted: %s", (result->encrypt ? "true" : "false")); it->delegated_cred=result->delegated_credential_handle; // //const char* fname = write_cert_chain(result->auth_gssapi_context); // if(it->froot.config(result,handle) != 0) { logger.msg(Arc::ERROR, "User has no proper configuration associated"); if(it->send_response("535 Not allowed\r\n") == 0) { it->wait_response(); }; delete it; return; }; if(it->froot.nodes.size() == 0) { logger.msg(Arc::ERROR, "User has empty virtual directory tree.\nEither user has no authorised plugins or there are no plugins configured at all."); if(it->send_response("535 Nothing to serve\r\n") == 0) { it->wait_response(); }; delete it; return; }; it->send_response("235 Authentication successful\r\n"); /* Set defaults */ // globus_ftp_control_local_prot(&(it->handle),GLOBUS_FTP_CONTROL_PROTECTION_PRIVATE); // globus_ftp_control_local_prot(&(it->handle),GLOBUS_FTP_CONTROL_PROTECTION_CLEAR); it->data_dcau.mode=GLOBUS_FTP_CONTROL_DCAU_SELF; it->data_dcau.subject.subject=NULL; globus_ftp_control_local_dcau(&(it->handle),&(it->data_dcau),it->delegated_cred); globus_ftp_control_local_mode(&(it->handle),GLOBUS_FTP_CONTROL_MODE_STREAM); globus_ftp_control_local_type(&(it->handle),GLOBUS_FTP_CONTROL_TYPE_IMAGE,0); if(globus_ftp_control_read_commands(&(it->handle),&commands_callback,it) != GLOBUS_SUCCESS) { logger.msg(Arc::ERROR, "Read commands in authenticate failed"); delete it; return; }; } static int parse_integers(char* string,int args[],int margs) { char* cp = string; char* np; int n=0; if((*cp)==0) return n; for(;;) { np=cp; cp=strchr(np,','); if(cp!=NULL) { (*cp)=0; cp++; }; if(ntransfer_mode) { \ it->send_response("421 Service not available\r\n"); break; \ }; \ /* Globus data handle may still be in CLOSING state because \ it takes some time to close socket through globus_io. \ Check for such situation and give Globus 10 sec. to recover. */ \ time_t s_time = time(NULL); \ while(true) { \ if(handle->dc_handle.state != GLOBUS_FTP_DATA_STATE_CLOSING) break; \ if(((unsigned int)(time(NULL)-s_time)) > 10) break; \ sleep(1); \ }; \ if(handle->dc_handle.state == GLOBUS_FTP_DATA_STATE_CLOSING) { \ it->send_response("421 Timeout waiting for service to become available\r\n"); break; \ }; \ } static int parse_eport(char* str, globus_ftp_control_host_port_t* host_port) { memset(host_port,0,sizeof(globus_ftp_control_host_port_t)); for(;isblank(*str);++str) { }; if((*str < 33) || (*str > 126)) return -1; // bad delimiter char delim = *str; const char* protocol_s = ++str; for(;*str != delim;++str) { if(!*str) return -1; }; // missing delimiter *str = 0; const char* addr_s = ++str; for(;*str != delim;++str) { if(!*str) return -1; }; // missing delimiter *str = 0; const char* port_s = ++str; for(;*str != delim;++str) { if(!*str) return -1; }; // missing delimiter *str = 0; char* port_e = NULL; unsigned short port = strtoul(port_s,&port_e,10); if(!port) return -1; // wrong port number host_port->port = port; char* protocol_e = NULL; unsigned short protocol = strtoul(protocol_s,&protocol_e,10); if(protocol == 1) { // IPv4 struct in_addr addr; if(inet_pton(AF_INET,addr_s,&addr) != 1) return -1; // wrong address if(sizeof(addr) > sizeof(host_port->host)) return -1; memcpy(&(host_port->host),&addr,sizeof(addr)); host_port->hostlen = sizeof(addr); } else if(protocol == 2) { // IPv6 struct in6_addr addr; if(inet_pton(AF_INET6,addr_s,&addr) != 1) return -1; // wrong address if(sizeof(addr) > sizeof(host_port->host)) return -1; memcpy(&(host_port->host),&addr,sizeof(addr)); host_port->hostlen = sizeof(addr); } else { return -1; // wrong protocol }; return 0; } /* main procedure */ void GridFTP_Commands::commands_callback(void* arg,globus_ftp_control_handle_t *handle,globus_object_t *error,union globus_ftp_control_command_u *command) { GridFTP_Commands *it = (GridFTP_Commands*)arg; it->last_action_time=time(NULL); if(command == GLOBUS_NULL) { logger.msg(Arc::INFO, "Control connection (probably) closed"); if(error) { logger.msg(Arc::ERROR, Arc::globus_object_to_string(error)); }; it->make_abort(); delete it; return; } #ifndef HAVE_FTP_COMMAND_MLSD #define GLOBUS_FTP_CONTROL_COMMAND_MLSD \ ((globus_ftp_control_command_code_t)(GLOBUS_FTP_CONTROL_COMMAND_UNKNOWN+1)) #define GLOBUS_FTP_CONTROL_COMMAND_MLST \ ((globus_ftp_control_command_code_t)(GLOBUS_FTP_CONTROL_COMMAND_UNKNOWN+2)) #define GLOBUS_FTP_CONTROL_COMMAND_EPRT \ ((globus_ftp_control_command_code_t)(GLOBUS_FTP_CONTROL_COMMAND_UNKNOWN+3)) #define GLOBUS_FTP_CONTROL_COMMAND_EPSV \ ((globus_ftp_control_command_code_t)(GLOBUS_FTP_CONTROL_COMMAND_UNKNOWN+4)) if(command->code == GLOBUS_FTP_CONTROL_COMMAND_UNKNOWN) { if(!strncasecmp("MLSD",command->base.raw_command,4)) { command->code=GLOBUS_FTP_CONTROL_COMMAND_MLSD; const char* arg = get_arg(command->base.raw_command); if(arg == NULL) { arg=""; }; command->list.string_arg=(char*)arg; } else if(!strncasecmp("MLST",command->base.raw_command,4)) { command->code=GLOBUS_FTP_CONTROL_COMMAND_MLST; const char* arg = get_arg(command->base.raw_command); if(arg == NULL) { arg=""; }; command->list.string_arg=(char*)arg; } else if(!strncasecmp("EPRT",command->base.raw_command,4)) { command->code=GLOBUS_FTP_CONTROL_COMMAND_EPRT; char* arg = get_arg(command->base.raw_command); if(parse_eport(arg,&(command->port.host_port)) != 0) { logger.msg(Arc::VERBOSE, "Command EPRT"); logger.msg(Arc::ERROR, "Failed to parse remote address %s",arg); it->send_response("553 Failed to parse port for data transfer\r\n"); return; } } else if(!strncasecmp("EPSV",command->base.raw_command,4)) { command->code=GLOBUS_FTP_CONTROL_COMMAND_EPSV; const char* arg = get_arg(command->base.raw_command); if(arg == NULL) { arg=""; }; command->pasv.string_arg=(char*)arg; }; }; #endif switch((int)command->code) { case GLOBUS_FTP_CONTROL_COMMAND_AUTH: { it->send_response("534 Reauthentication is not supported\r\n"); }; break; case GLOBUS_FTP_CONTROL_COMMAND_FEAT: { it->send_response("211- Features supported\r\n FEAT\r\n AUTH\r\n ERET\r\n SBUF\r\n DCAU\r\n SPAS\r\n SPOR\r\n SIZE\r\n MDTM\r\n MLSD\r\n MLST\r\nEPRT\r\nEPSV\r\n211 End\r\n"); }; break; case GLOBUS_FTP_CONTROL_COMMAND_USER: { fix_string_arg(command->user.string_arg); logger.msg(Arc::VERBOSE, "Command USER %s", command->user.string_arg); it->send_response("230 No need for username\r\n"); }; break; case GLOBUS_FTP_CONTROL_COMMAND_PASS: { it->send_response("230 No need for password\r\n"); }; break; case GLOBUS_FTP_CONTROL_COMMAND_CDUP: { logger.msg(Arc::VERBOSE, "Command CDUP"); command->code=GLOBUS_FTP_CONTROL_COMMAND_CWD; command->cwd.string_arg=(char*)".."; }; case GLOBUS_FTP_CONTROL_COMMAND_CWD: { fix_string_arg(command->cwd.string_arg); logger.msg(Arc::VERBOSE, "Command CWD %s", command->cwd.string_arg); std::string pwd = command->cwd.string_arg; if(it->froot.cwd(pwd) == 0) { pwd = "250 \""+pwd+"\" is current directory\r\n"; it->send_response(pwd.c_str()); } else { if(it->froot.error.length()) { it->send_response("550 "+it->froot.error+"\r\n"); } else { it->send_response("550 Can't change to this directory.\r\n"); }; }; }; break; case GLOBUS_FTP_CONTROL_COMMAND_MKD: { fix_string_arg(command->mkd.string_arg); logger.msg(Arc::VERBOSE, "Command MKD %s", command->mkd.string_arg); std::string pwd = command->mkd.string_arg; if(it->froot.mkd(pwd) == 0) { it->send_response("250 MKD command ok.\r\n"); } else { if(it->froot.error.length()) { it->send_response("550 "+it->froot.error+"\r\n"); } else { it->send_response("550 Can't make this directory.\r\n"); }; }; }; break; case GLOBUS_FTP_CONTROL_COMMAND_PWD: { std::string pwd; pwd = "257 \""+(it->froot.cwd())+"\" is current directory\r\n"; it->send_response(pwd.c_str()); }; break; case GLOBUS_FTP_CONTROL_COMMAND_SIZE: { fix_string_arg(command->size.string_arg); logger.msg(Arc::VERBOSE, "Command SIZE %s", command->size.string_arg); unsigned long long size; if(it->froot.size(command->size.string_arg,&size) != 0) { if(it->froot.error.length()) { it->send_response("550 "+it->froot.error+"\r\n"); } else { it->send_response("550 Size for object not available.\r\n"); }; break; }; char buf[200]; sprintf(buf,"213 %llu\r\n",size); it->send_response(buf); }; break; case GLOBUS_FTP_CONTROL_COMMAND_SBUF: { CHECK_TRANSFER; logger.msg(Arc::VERBOSE, "Command SBUF: %i", command->sbuf.buffer_size); // Because Globus wants SBUF to apply for all following data // connections, there is no way to reset to system defaults. // Let's make a little extension globus_ftp_control_tcpbuffer_t tcpbuf; if(command->sbuf.buffer_size == 0) { tcpbuf.mode=GLOBUS_FTP_CONTROL_TCPBUFFER_DEFAULT; } else if(command->sbuf.buffer_size < 0) { it->send_response("501 Wrong argument for SBUF\r\n"); break; } else { tcpbuf.mode=GLOBUS_FTP_CONTROL_TCPBUFFER_FIXED; tcpbuf.fixed.size=command->sbuf.buffer_size; }; if(globus_ftp_control_local_tcp_buffer(&(it->handle),&tcpbuf) != GLOBUS_SUCCESS) { it->send_response("501 SBUF argument can't be accepted\r\n"); break; }; it->send_response("200 Accepted TCP buffer size\r\n"); }; break; case GLOBUS_FTP_CONTROL_COMMAND_MLST: { fix_string_arg(command->list.string_arg); logger.msg(Arc::VERBOSE, "Command MLST %s", command->list.string_arg); DirEntry info; if(it->froot.checkfile(command->list.string_arg,info,DirEntry::full_object_info) != 0) { if(it->froot.error.length()) { it->send_response("550 "+it->froot.error+"\r\n"); } else { it->send_response("550 Information for object not available\r\n"); }; break; }; char buf[1024]; const char* str1="250-Information follows\r\n "; const char* str2="250 Information finished\r\n"; int str1l=strlen(str1); int str2l=strlen(str2); strcpy(buf,str1); make_list_string(info,list_mlsd_mode,(unsigned char*)(buf+str1l), 1024-str1l-str2l,""); strcat(buf,str2); it->send_response(buf); }; break; case GLOBUS_FTP_CONTROL_COMMAND_DELE: { fix_string_arg(command->dele.string_arg); logger.msg(Arc::VERBOSE, "Command DELE %s", command->dele.string_arg); std::string file = command->dele.string_arg; if(it->froot.rm(file) == 0) { it->send_response("250 File deleted.\r\n"); } else { if(it->froot.error.length()) { it->send_response("550 "+it->froot.error+"\r\n"); } else { it->send_response("550 Can't delete this file.\r\n"); }; }; }; break; case GLOBUS_FTP_CONTROL_COMMAND_RMD: { fix_string_arg(command->rmd.string_arg); logger.msg(Arc::VERBOSE, "Command RMD %s", command->rmd.string_arg); std::string dfile = command->rmd.string_arg; if(it->froot.rmd(dfile) == 0) { it->send_response("250 Directory deleted.\r\n"); } else { if(it->froot.error.length()) { it->send_response("550 "+it->froot.error+"\r\n"); } else { it->send_response("550 Can't delete this directory.\r\n"); }; }; }; break; case GLOBUS_FTP_CONTROL_COMMAND_TYPE: { logger.msg(Arc::VERBOSE, "Command TYPE %c", (char)(command->type.type)); CHECK_TRANSFER; if(command->type.type==GLOBUS_FTP_CONTROL_TYPE_NONE) { it->send_response("504 Unsupported type\r\n"); } else { globus_ftp_control_local_type(&(it->handle),command->type.type,0); it->send_response("200 Type accepted\r\n"); }; }; break; case GLOBUS_FTP_CONTROL_COMMAND_MODE: { logger.msg(Arc::VERBOSE, "Command MODE %c", (char)(command->mode.mode)); CHECK_TRANSFER; if((command->mode.mode!=GLOBUS_FTP_CONTROL_MODE_STREAM) && (command->mode.mode!=GLOBUS_FTP_CONTROL_MODE_EXTENDED_BLOCK)) { it->send_response("504 Unsupported mode\r\n"); } else { globus_ftp_control_local_mode(&(it->handle),command->mode.mode); it->send_response("200 Mode accepted\r\n"); }; }; break; case GLOBUS_FTP_CONTROL_COMMAND_ABOR: { logger.msg(Arc::VERBOSE, "Command ABOR"); globus_mutex_lock(&(it->abort_lock)); if(!(it->transfer_mode)) { globus_mutex_unlock(&(it->abort_lock)); it->send_response("226 Abort not needed\r\n"); } else { globus_mutex_unlock(&(it->abort_lock)); it->make_abort(); }; }; break; case GLOBUS_FTP_CONTROL_COMMAND_REST: { /* for the beginning stream mode only */ fix_string_arg(command->rest.string_arg); logger.msg(Arc::VERBOSE, "Command REST %s", command->rest.string_arg); CHECK_TRANSFER; it->virt_restrict=false; if(sscanf(command->rest.string_arg,"%llu",&(it->virt_offset)) != 1) { it->virt_offset=0; it->send_response("501 Wrong parameter\r\n"); break; }; it->send_response("350 Restore pointer accepted\r\n"); }; break; case GLOBUS_FTP_CONTROL_COMMAND_EPSV: case GLOBUS_FTP_CONTROL_COMMAND_SPAS: case GLOBUS_FTP_CONTROL_COMMAND_PASV: { if(command->code == GLOBUS_FTP_CONTROL_COMMAND_EPSV) { logger.msg(Arc::VERBOSE, "Command EPSV %s",command->pasv.string_arg); } else if(command->code == GLOBUS_FTP_CONTROL_COMMAND_SPAS) { logger.msg(Arc::VERBOSE, "Command SPAS"); } else { logger.msg(Arc::VERBOSE, "Command PASV"); }; CHECK_TRANSFER; globus_ftp_control_host_port_t node; memset(&node,0,sizeof(node)); char buf[200]; if((command->code == GLOBUS_FTP_CONTROL_COMMAND_EPSV) && it->local_is_ipv6) { // EPSV requires data and control to be of same interface // Hopefully globus opens port both for IPv4 and IPv6 globus_ftp_control_ipv6_allow(&(it->handle),GLOBUS_TRUE); // But it does not. It also ignores address passed to it // in 'node'. Looks like only option to control which // socket is created is to directly set attribute in // internal member. globus_io_attr_set_tcp_interface(&(it->handle.dc_handle.io_attr),"0:0:0:0:0:0:0:0"); } if((command->code == GLOBUS_FTP_CONTROL_COMMAND_PASV) || (command->code == GLOBUS_FTP_CONTROL_COMMAND_EPSV)) { globus_result_t res_tmp; if((res_tmp=globus_ftp_control_local_pasv(&(it->handle),&node)) !=GLOBUS_SUCCESS){ logger.msg(Arc::ERROR, "local_pasv failed"); logger.msg(Arc::ERROR, Arc::GlobusResult(res_tmp).str()); it->send_response("553 Failed to allocate port for data transfer\r\n"); break; }; if(it->firewall[0] && (node.hostlen == 4)) { // replace address // todo: we need separate firewall for IPv4 and IPv6 node.host[0]=it->firewall[0]; node.host[1]=it->firewall[1]; node.host[2]=it->firewall[2]; node.host[3]=it->firewall[3]; }; if(command->code == GLOBUS_FTP_CONTROL_COMMAND_PASV) { sprintf(buf,"227 Entering Passive Mode (%i,%i,%i,%i,%i,%i)\r\n", node.host[0], node.host[1], node.host[2], node.host[3], (node.port & 0x0FF00) >> 8, node.port & 0x000FF); } else { // EPSV sprintf(buf,"229 Entering Extended Passive Mode (|||%u|)\r\n",node.port); }; } else { // SPAS globus_result_t res_tmp; if((res_tmp=globus_ftp_control_local_spas(&(it->handle),&node,1)) !=GLOBUS_SUCCESS){ logger.msg(Arc::ERROR, "local_spas failed"); logger.msg(Arc::ERROR, Arc::GlobusResult(res_tmp).str()); it->send_response("553 Failed to allocate port for data transfer\r\n"); break; }; if(it->firewall[0]) { // replace address node.host[0]=it->firewall[0]; node.host[1]=it->firewall[1]; node.host[2]=it->firewall[2]; node.host[3]=it->firewall[3]; }; sprintf(buf,"229-Entering Passive Mode\r\n %i,%i,%i,%i,%i,%i\r\n229 End\r\n", node.host[0], node.host[1], node.host[2], node.host[3], (node.port & 0x0FF00) >> 8, node.port & 0x000FF); }; it->data_conn_type=GRIDFTP_CONNECT_PASV; it->send_response(buf); }; break; case GLOBUS_FTP_CONTROL_COMMAND_EPRT: case GLOBUS_FTP_CONTROL_COMMAND_PORT: { if(command->code == GLOBUS_FTP_CONTROL_COMMAND_EPRT) { logger.msg(Arc::VERBOSE, "Command EPRT"); } else { logger.msg(Arc::VERBOSE, "Command PORT"); }; if(!it->froot.active_data) { logger.msg(Arc::ERROR, "active_data is disabled"); it->send_response("553 Active data transfer is disabled\r\n"); break; } CHECK_TRANSFER; globus_ftp_control_host_port_t node; node=command->port.host_port; globus_result_t res_tmp = globus_ftp_control_local_port(&(it->handle),&node); if(res_tmp != GLOBUS_SUCCESS) { logger.msg(Arc::ERROR, "local_port failed"); logger.msg(Arc::ERROR, Arc::GlobusResult(res_tmp).str()); it->send_response("553 Failed to accept port for data transfer\r\n"); break; }; it->data_conn_type=GRIDFTP_CONNECT_PORT; it->send_response("200 Node accepted\r\n"); }; break; case GLOBUS_FTP_CONTROL_COMMAND_MLSD: case GLOBUS_FTP_CONTROL_COMMAND_NLST: case GLOBUS_FTP_CONTROL_COMMAND_LIST: { fix_string_arg(command->list.string_arg); if(command->code == GLOBUS_FTP_CONTROL_COMMAND_MLSD) { logger.msg(Arc::VERBOSE, "Command MLSD %s", command->list.string_arg); } else if(command->code == GLOBUS_FTP_CONTROL_COMMAND_NLST) { logger.msg(Arc::VERBOSE, "Command NLST %s", command->list.string_arg); } else { logger.msg(Arc::VERBOSE, "Command LIST %s", command->list.string_arg); }; CHECK_TRANSFER; DirEntry::object_info_level mode; if(command->code == GLOBUS_FTP_CONTROL_COMMAND_LIST) { it->list_mode=list_list_mode; mode=DirEntry::basic_object_info; } else if(command->code == GLOBUS_FTP_CONTROL_COMMAND_NLST) { it->list_mode=list_nlst_mode; mode=DirEntry::minimal_object_info; } else { it->list_mode=list_mlsd_mode; mode=DirEntry::full_object_info; }; it->dir_list.clear(); it->list_name_prefix=""; int res = it->froot.readdir(command->list.string_arg,it->dir_list,mode); // 1 - error if(res == 1) { // error, most probably no such dir if(it->froot.error.length()) { it->send_response("450 "+it->froot.error+"\r\n"); break; } else { it->send_response("450 Object unavailable.\r\n"); break; }; }; // -1 - file if((res == -1) && ( (it->list_mode == list_mlsd_mode) || (it->list_mode == list_nlst_mode) ) ) { // MLSD and NLST are for directories only it->send_response("501 Object is not a directory.\r\n"); break; }; // 0 - directory if(it->data_conn_type == GRIDFTP_CONNECT_PORT) { it->send_response("150 Opening connection for list.\r\n"); it->transfer_mode=true; it->transfer_abort=false; globus_ftp_control_data_connect_write(&(it->handle),&list_connect_retrieve_callback,it); } else if(it->data_conn_type == GRIDFTP_CONNECT_PASV) { it->send_response("150 Opening connection for list.\r\n"); it->transfer_mode=true; it->transfer_abort=false; globus_ftp_control_data_connect_write(&(it->handle),&list_connect_retrieve_callback,it); } else { it->send_response("501 PORT or PASV command needed\r\n"); }; }; break; case GLOBUS_FTP_CONTROL_COMMAND_ERET: { fix_string_arg(command->eret.string_arg); logger.msg(Arc::VERBOSE, "Command ERET %s", command->eret.string_arg); char* args[4]; if(parse_args(command->eret.raw_command,args,4)<4) { it->send_response("500 parsing failed\r\n"); break; }; if(strcmp(args[0],"P")) { it->send_response("500 mark parsing failed\r\n"); break; }; char* ep; it->virt_restrict=true; it->virt_offset=strtoull(args[1],&ep,10); if((*ep) != 0) { it->send_response("500 offset parsing failed\r\n"); break; }; it->virt_size=strtoull(args[2],&ep,10); if((*ep) != 0) { it->send_response("500 size parsing failed\r\n"); break; }; if(it->froot.open(args[3],GRIDFTP_OPEN_RETRIEVE)!=0) { /* failed to open */ if(it->froot.error.length()) { it->send_response("550 "+it->froot.error+"\r\n"); } else { it->send_response("550 File unavailable.\r\n"); }; break; }; }; case GLOBUS_FTP_CONTROL_COMMAND_RETR: { if(command->code == GLOBUS_FTP_CONTROL_COMMAND_RETR) { fix_string_arg(command->retr.string_arg); logger.msg(Arc::VERBOSE, "Command RETR %s", command->retr.string_arg); CHECK_TRANSFER; /* try to open file */ if(it->froot.open(command->retr.string_arg,GRIDFTP_OPEN_RETRIEVE)!=0) { /* failed to open */ if(it->froot.error.length()) { it->send_response("550 "+it->froot.error+"\r\n"); break; } else { it->send_response("550 File unavailable.\r\n"); break; }; break; }; it->virt_restrict=false; }; if(it->data_conn_type == GRIDFTP_CONNECT_PORT) { it->send_response("150 Opening connection.\r\n"); it->transfer_abort=false; it->transfer_mode=true; globus_ftp_control_data_connect_write(&(it->handle),&data_connect_retrieve_callback,it); } else if(it->data_conn_type == GRIDFTP_CONNECT_PASV) { it->send_response("150 Opening connection.\r\n"); it->transfer_abort=false; it->transfer_mode=true; globus_ftp_control_data_connect_write(&(it->handle),&data_connect_retrieve_callback,it); } else { it->send_response("502 PORT or PASV command needed\r\n"); }; }; break; case GLOBUS_FTP_CONTROL_COMMAND_STOR: { fix_string_arg(command->stor.string_arg); logger.msg(Arc::VERBOSE, "Command STOR %s", command->stor.string_arg); CHECK_TRANSFER; /* try to open file */ if(it->froot.open(command->stor.string_arg,GRIDFTP_OPEN_STORE) != 0) { it->file_size=0; /* failed to open */ if(it->froot.error.length()) { it->send_response("553 "+it->froot.error+"\r\n"); } else { it->send_response("553 File not allowed.\r\n"); }; break; }; it->file_size=0; if(it->data_conn_type == GRIDFTP_CONNECT_PORT) { it->send_response("150 Opening connection.\r\n"); it->transfer_abort=false; it->transfer_mode=true; globus_ftp_control_data_connect_read(&(it->handle),&data_connect_store_callback,it); } else if(it->data_conn_type == GRIDFTP_CONNECT_PASV) { it->send_response("150 Opening connection.\r\n"); it->transfer_abort=false; it->transfer_mode=true; globus_ftp_control_data_connect_read(&(it->handle),&data_connect_store_callback,it); } else { it->send_response("501 PORT or PASV command needed\r\n"); }; }; break; case GLOBUS_FTP_CONTROL_COMMAND_ALLO: { logger.msg(Arc::VERBOSE, "Command ALLO %i", command->allo.size); it->file_size=0; char* args[4]; int n = parse_args(command->allo.raw_command,args,4); if( (n==0) || (n==4) || (n==2) || ((n==3) && (strcmp(args[1],"R"))) ) { it->send_response("500 parsing failed\r\n"); break; }; char *e; it->file_size=strtoull(args[0],&e,10); if((*e) != 0) { it->file_size=0; it->send_response("500 parsing failed\r\n"); break; }; if(n == 3) { it->file_size=strtoull(args[2],&e,10); if((*e) != 0) { it->file_size=0; it->send_response("500 parsing failed\r\n"); break; }; }; it->send_response("200 Size accepted\r\n"); }; break; case GLOBUS_FTP_CONTROL_COMMAND_OPTS: { logger.msg(Arc::VERBOSE, "Command OPTS"); CHECK_TRANSFER; if(!strncasecmp(command->opts.cmd_name,"RETR",4)) { logger.msg(Arc::VERBOSE, "Command OPTS RETR"); char* args[3]; char* val; int v; int i; int n=parse_semicolon(command->opts.cmd_opts,args,3); if(n>3) n=3; globus_ftp_control_parallelism_t dp; dp.mode=GLOBUS_FTP_CONTROL_PARALLELISM_NONE; for(i=0;isend_response("500 Syntax failure\r\n"); break; }; (*val++)=0; if(!strcasecmp(args[i],"parallelism")) { int argn[3]; if((v=parse_integers(val,argn,3)) != 3) { it->send_response("500 parsing failed\r\n"); break; }; if(argn[0]<=0) { it->send_response("500 bad value\r\n"); break; }; if(argn[0]>50) { it->send_response("500 too big value\r\n"); break; }; dp.mode=GLOBUS_FTP_CONTROL_PARALLELISM_FIXED; dp.fixed.size=argn[0]; continue; } else { it->send_response("501 Sorry, option not supported\r\n"); break; }; }; if(ihandle),&dp); /* it->data_buffer_num=dp.fixed.size*2+1; it->data_buffer_size=default_data_buffer_size; if(it->data_buffer_num > 41) it->data_buffer_num=41; if(it->data_buffer_num < 3) it->data_buffer_num=3; if((it->data_buffer_num * it->data_buffer_size)>max_data_buffer_size) { it->data_buffer_size=max_data_buffer_size/it->data_buffer_num; }; if(it->data_buffer_size=0) { it->data_buffer_size=1; }; */ it->send_response("200 New options are valid\r\n"); } else { it->send_response("501 OPTS for command is not supported\r\n"); break; }; }; break; case GLOBUS_FTP_CONTROL_COMMAND_NOOP: { logger.msg(Arc::VERBOSE, "Command NOOP"); it->send_response("200 Doing nothing.\r\n"); }; break; case GLOBUS_FTP_CONTROL_COMMAND_QUIT: { logger.msg(Arc::VERBOSE, "Command QUIT"); it->make_abort(); if(it->send_response("221 Quitting.\r\n") == 0) { it->wait_response(); }; //globus_ftp_control_force_close(&(it->handle),&close_callback,it); //// delete it; logger.msg(Arc::INFO, "Closing connection"); if(globus_ftp_control_force_close(&(it->handle),&close_callback,it) != GLOBUS_SUCCESS) { logger.msg(Arc::WARNING, "Failed to close, deleting client"); delete it; //} else { // it->wait_response(); }; //delete it; return; }; break; case GLOBUS_FTP_CONTROL_COMMAND_UNKNOWN: default: { fix_string_arg(command->base.raw_command); if(!strncasecmp("DCAU",command->base.raw_command,4)) { char* args[2]; int n = parse_args(command->base.raw_command,args,2); logger.msg(Arc::VERBOSE, "Command DCAU: %i '%s'", n, args[0]); if((n < 1) || (n > 2) || (strlen(args[0]) != 1)) { it->send_response("500 Wrong number of arguments\r\n"); break; }; if(args[0][0] == 'T') args[0][0]='A'; if((args[0][0] == GLOBUS_FTP_CONTROL_DCAU_NONE) || (args[0][0] == GLOBUS_FTP_CONTROL_DCAU_SELF)) { if(n != 1) { it->send_response("500 Do not need a subject\r\n"); break; } } else if(args[0][0] == GLOBUS_FTP_CONTROL_DCAU_SUBJECT) { if(n != 2) { it->send_response("500 Need an argument\r\n"); break; } } else { it->send_response("504 Unsupported authentication type\r\n"); break; }; it->data_dcau.mode=(globus_ftp_control_dcau_mode_t)(args[0][0]); if(n>1) { if(it->data_dcau.subject.subject) free(it->data_dcau.subject.subject); it->data_dcau.subject.subject = strdup(args[1]); }; globus_ftp_control_local_dcau(&(it->handle),&(it->data_dcau),it->delegated_cred); it->send_response("200 Authentication type accepted\r\n"); } else if(!strncasecmp("PBSZ",command->base.raw_command,4)) { CHECK_TRANSFER; char* args[1]; int n = parse_args(command->base.raw_command,args,1); logger.msg(Arc::VERBOSE, "Command PBZS: %s", args[0]); if(n > 1) { it->send_response("501 Need only one argument\r\n"); break; }; unsigned long pbsz; unsigned long npbsz; pbsz=atoi(args[0]); if((n <= 0) || (n>1000000)) { /* let's not support TOO BIG buffers */ it->send_response("501 Wrong number\r\n"); break; }; logger.msg(Arc::VERBOSE, "Setting pbsz to %lu", pbsz); globus_ftp_control_local_pbsz(&(it->handle),pbsz); globus_ftp_control_get_pbsz(&(it->handle),&npbsz); if(pbsz == npbsz) { it->send_response("200 Accepted buffer size\r\n"); } else { char buf[200]; sprintf(buf,"200 PBSZ=%lu\r\n",npbsz); it->send_response(buf); }; /* it->data_buffer_size=npbsz; */ } else if(!strncasecmp("PROT",command->base.raw_command,4)) { CHECK_TRANSFER; char* args[1]; int n = parse_args(command->base.raw_command,args,1); logger.msg(Arc::VERBOSE, "Command PROT: %s", args[0]); if(n > 1) { it->send_response("501 Need only one argument\r\n"); break; }; if(strlen(args[0]) != 1) { it->send_response("504 Protection level is not supported\r\n"); break; }; bool allow_protection = true; switch(args[0][0]) { case GLOBUS_FTP_CONTROL_PROTECTION_PRIVATE: if(!it->froot.heavy_encryption) allow_protection=false; case GLOBUS_FTP_CONTROL_PROTECTION_SAFE: case GLOBUS_FTP_CONTROL_PROTECTION_CONFIDENTIAL: case GLOBUS_FTP_CONTROL_PROTECTION_CLEAR: { if(allow_protection) { it->send_response("200 Protection mode accepted\r\n"); globus_ftp_control_local_prot(&(it->handle), (globus_ftp_control_protection_t)args[0][0]); } else { it->send_response("504 Protection level is not allowed\r\n"); }; }; break; default: { it->send_response("504 Protection level is not supported\r\n"); }; }; } else if(!strncasecmp("MDTM",command->base.raw_command,4)) { char* arg = get_arg(command->base.raw_command); logger.msg(Arc::VERBOSE, "Command MDTM %s", (arg?arg:"")); if(arg == NULL) { it->send_response("501 Need name\r\n"); break; }; time_t t; struct tm tt; struct tm *tp; if(it->froot.time(arg,&t) != 0) { if(it->froot.error.length()) { it->send_response("550 "+it->froot.error+"\r\n"); } else { it->send_response("550 Time for object not available.\r\n"); }; break; }; tp=gmtime_r(&t,&tt); if(tp==NULL) { it->send_response("550 Time for object not available\r\n"); break; }; char buf[200]; sprintf(buf,"213 %04u%02u%02u%02u%02u%02u\r\n",tp->tm_year+1900,tp->tm_mon+1,tp->tm_mday,tp->tm_hour,tp->tm_min,tp->tm_sec); it->send_response(buf); } else { logger.msg(Arc::VERBOSE, "Raw command: %s", command->base.raw_command); it->send_response("500 Do not understand\r\n"); }; } break; }; } void GridFTP_Commands::free_data_buffer(void) { if(data_buffer == NULL) return; for(unsigned int i = 0;i 41) data_buffer_num=41; if(data_buffer_num < 3) data_buffer_num=3; if((data_buffer_num * data_buffer_size) > max_data_buffer_size) { data_buffer_size=max_data_buffer_size/data_buffer_num; }; if(data_buffer_size==0) { data_buffer_size=1; }; return; } bool GridFTP_Commands::allocate_data_buffer(void) { free_data_buffer(); data_buffer=(data_buffer_t*)malloc(sizeof(data_buffer_t)*data_buffer_num); if(data_buffer == NULL) return false; unsigned int i; for(i = 0;iabort_lock)); if(error != GLOBUS_SUCCESS) { logger.msg(Arc::ERROR, "abort_callback: Globus error: %s", Arc::globus_object_to_string(error)); }; /* check for flag just in case */ if(it->transfer_abort) { it->send_response("226 Abort finished\r\n"); }; it->transfer_abort=false; it->transfer_mode=false; globus_cond_broadcast(&(it->abort_cond)); /* many threads can be waiting */ globus_mutex_unlock(&(it->abort_lock)); } /* perform data transfer abort */ void GridFTP_Commands::make_abort(bool already_locked,bool wait_abort) { logger.msg(Arc::VERBOSE, "make_abort: start"); if(!already_locked) globus_mutex_lock(&abort_lock); if(!transfer_mode) { /* leave if not transfering */ globus_mutex_unlock(&abort_lock); return; }; bool t = transfer_mode; if(!transfer_abort) { /* not aborting yet */ if(globus_ftp_control_data_force_close(&handle,abort_callback,this) == GLOBUS_SUCCESS) { transfer_abort=true; } else { logger.msg(Arc::ERROR, "Failed to abort data connection - ignoring and recovering"); globus_mutex_unlock(&abort_lock); abort_callback(this,&handle,GLOBUS_SUCCESS); globus_mutex_lock(&abort_lock); }; }; last_action_time=time(NULL); if(wait_abort) while(transfer_abort) { logger.msg(Arc::INFO, "make_abort: wait for abort flag to be reset"); globus_cond_wait(&abort_cond,&abort_lock); }; if(t) { /* transfer_mode=false; */ /* close (if) opened files */ froot.close(false); virt_offset=0; virt_restrict=false; }; logger.msg(Arc::VERBOSE, "make_abort: leaving"); globus_mutex_unlock(&abort_lock); } /* check for globus error, print it and abort connection if necessary */ /* This function should always be called from data transfer callbacks with data_lock locked */ bool GridFTP_Commands::check_abort(globus_object_t *error) { globus_mutex_lock(&abort_lock); if(transfer_abort || (!transfer_mode)) { /* abort in progress or not transfering anymore */ globus_mutex_unlock(&abort_lock); return true; /* just leave telling to stop registering buffers */ }; if((error != GLOBUS_SUCCESS)) { logger.msg(Arc::ERROR, "check_abort: have Globus error"); logger.msg(Arc::ERROR, "Abort request caused by transfer error"); logger.msg(Arc::ERROR, "Globus error: %s", Arc::globus_object_to_string(error)); /* TODO !!!!!!!!!!! should be only one 426 !!!!!!!!!! */ logger.msg(Arc::INFO, "check_abort: sending 426"); send_response("426 Transfer terminated.\r\n"); globus_mutex_unlock(&data_lock); /* release other waiting threads */ make_abort(true,false); globus_mutex_lock(&data_lock); return true; }; globus_mutex_unlock(&abort_lock); return false; } /* same as make_abort, but is called from data transfer callbacks */ /* This function should always be called from data transfer callbacks with data_lock locked */ void GridFTP_Commands::force_abort(void) { globus_mutex_lock(&abort_lock); if(transfer_abort || (!transfer_mode)) { /* abort in progress or not transfering anymore*/ globus_mutex_unlock(&abort_lock); return; }; logger.msg(Arc::INFO, "Abort request caused by error in transfer function"); /* TODO !!!!!!!!!!! should be only one 426 !!!!!!!!!! */ if(froot.error.length()) { send_response("426 "+froot.error+"\r\n"); } else { send_response("426 Transfer terminated.\r\n"); }; globus_mutex_unlock(&data_lock); /* release other waiting threads */ make_abort(true,false); globus_mutex_lock(&data_lock); return; } GridFTP_Commands::GridFTP_Commands(int n,unsigned int* f) { log_id=n; firewall[0]=0; firewall[1]=0; firewall[2]=0; firewall[3]=0; if(f) memcpy(firewall,f,sizeof(firewall)); globus_mutex_init(&response_lock,GLOBUS_NULL); globus_cond_init(&response_cond,GLOBUS_NULL); response_done=0; globus_mutex_init(&abort_lock,GLOBUS_NULL); globus_cond_init(&abort_cond,GLOBUS_NULL); data_done=0; globus_mutex_init(&data_lock,GLOBUS_NULL); data_buffer=NULL; data_buffer_size=default_data_buffer_size; data_buffer_num=3; data_buf_count=0; data_callbacks=0; data_offset=0; globus_ftp_control_handle_init(&handle); data_dcau.mode=GLOBUS_FTP_CONTROL_DCAU_DEFAULT; data_dcau.subject.subject=NULL; data_conn_type=GRIDFTP_CONNECT_NONE; virt_offset=0; virt_size=0; virt_restrict=false; time_spent_disc=0; time_spent_network=0; transfer_mode=false; transfer_abort=false; data_eof=false; delegated_cred=NULL; file_size=0; last_action_time=time(NULL); list_offset=0; list_mode=list_mlsd_mode; /* harmless race condition here */ if(!timeouter) { GridFTP_Commands_timeout* t = new GridFTP_Commands_timeout; if(!timeouter) { timeouter=t; } else { delete t; }; }; timeouter->add(*this); } GridFTP_Commands::~GridFTP_Commands(void) { /* here all connections should be closed and all callbacks unregistered */ globus_mutex_destroy(&response_lock); globus_cond_destroy(&response_cond); globus_mutex_destroy(&abort_lock); globus_cond_destroy(&abort_cond); globus_ftp_control_handle_destroy(&handle); timeouter->remove(*this); #ifndef __DONT_USE_FORK__ delete timeouter; /* globus_mutex_lock(&fork_lock); fork_done=1; globus_cond_signal(&fork_cond); globus_mutex_unlock(&fork_lock); */ #endif } GridFTP_Commands_timeout::GridFTP_Commands_timeout(void) { exit_cond_flag=false; cond_flag=false; globus_mutex_init(&lock,GLOBUS_NULL); globus_cond_init(&cond,GLOBUS_NULL); globus_cond_init(&exit_cond,GLOBUS_NULL); if(globus_thread_create(&timer_thread,NULL,&timer_func,(void*)this)!=0){ logger.msg(Arc::ERROR, "Failed to start timer thread - timeout won't work"); globus_mutex_destroy(&lock); globus_cond_destroy(&cond); globus_cond_destroy(&exit_cond); exit_cond_flag=true; }; } GridFTP_Commands_timeout::~GridFTP_Commands_timeout(void) { if(exit_cond_flag) return; cond_flag=true; globus_mutex_lock(&lock); globus_cond_signal(&cond); while(!exit_cond_flag) { globus_cond_wait(&exit_cond,&lock); }; globus_mutex_unlock(&lock); globus_mutex_destroy(&lock); globus_cond_destroy(&cond); globus_cond_destroy(&exit_cond); } void GridFTP_Commands_timeout::remove(const GridFTP_Commands& cmd) { if(exit_cond_flag) return; globus_mutex_lock(&lock); for(std::list::iterator i=cmds.begin();i!=cmds.end();) { if(&cmd == (*i)) { i=cmds.erase(i); } else { ++i; }; }; globus_mutex_unlock(&lock); } void GridFTP_Commands_timeout::add(GridFTP_Commands& cmd) { if(exit_cond_flag) return; globus_mutex_lock(&lock); cmds.push_back(&cmd); globus_mutex_unlock(&lock); } void* GridFTP_Commands_timeout::timer_func(void* arg) { GridFTP_Commands_timeout* it = (GridFTP_Commands_timeout*)arg; globus_mutex_lock(&(it->lock)); for(;;) { if(it->cond_flag) { /* destructor */ break; }; time_t curr_time = time(NULL); time_t next_wakeup = curr_time + FTP_TIMEOUT; for(std::list::iterator i=it->cmds.begin(); i!=it->cmds.end();++i) { if((*i)->last_action_time != (time_t)(-1)) { time_t time_passed = curr_time - (*i)->last_action_time; if(time_passed >= FTP_TIMEOUT) { /* cancel connection */ logger.msg(Arc::ERROR, "Killing connection due to timeout"); #if GLOBUS_IO_VERSION<5 shutdown((*i)->handle.cc_handle.io_handle.fd,2); #else globus_io_register_close(&((*i)->handle.cc_handle.io_handle),&io_close_cb,NULL); #endif (*i)->last_action_time=(time_t)(-1); } else { time_passed = curr_time + (FTP_TIMEOUT - time_passed); if(time_passed < next_wakeup) next_wakeup = time_passed; }; }; }; curr_time = time(NULL); if(next_wakeup < curr_time) { next_wakeup=curr_time; }; globus_abstime_t timeout; GlobusTimeAbstimeSet(timeout,next_wakeup-curr_time,0); globus_cond_timedwait(&(it->cond),&(it->lock),&timeout); }; it->exit_cond_flag=true; globus_cond_signal(&(it->exit_cond)); globus_mutex_unlock(&(it->lock)); return NULL; } nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/datawrite.cpp0000644000000000000000000000013214152153376024251 xustar000000000000000030 mtime=1638455038.435646425 30 atime=1638455038.511647567 30 ctime=1638455099.218559716 nordugrid-arc-6.14.0/src/services/gridftpd/datawrite.cpp0000644000175000002070000001323514152153376024242 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include "fileroot.h" #include "names.h" #include "commands.h" static Arc::Logger logger(Arc::Logger::getRootLogger(),"GridFTP_Commands"); /* file store callbacks */ void GridFTP_Commands::data_connect_store_callback(void* arg,globus_ftp_control_handle_t*,unsigned int /* stripendx */,globus_bool_t /* reused */,globus_object_t *error) { GridFTP_Commands *it = (GridFTP_Commands*)arg; logger.msg(Arc::VERBOSE, "data_connect_store_callback"); globus_thread_blocking_will_block(); globus_mutex_lock(&(it->data_lock)); it->time_spent_disc=0; it->time_spent_network=0; it->last_action_time=time(NULL); logger.msg(Arc::VERBOSE, "Data channel connected (store)"); if(it->check_abort(error)) { it->froot.close(false); globus_mutex_unlock(&(it->data_lock)); return; }; it->data_eof = false; /* make buffers */ it->compute_data_buffer(); if(!(it->allocate_data_buffer())) { it->froot.close(false); it->force_abort(); globus_mutex_unlock(&(it->data_lock)); return; }; /* register all available buffers */ it->data_callbacks=0; globus_result_t res = GLOBUS_SUCCESS; for(unsigned int i = 0;idata_buffer_num;i++) { if(!((it->data_buffer)[i].data)) continue; struct timezone tz; gettimeofday(&(it->data_buffer[i].time_last),&tz); res=globus_ftp_control_data_read(&(it->handle), (globus_byte_t*)(it->data_buffer[i].data), it->data_buffer_size, &data_store_callback,it); if(res==GLOBUS_SUCCESS) { it->data_callbacks++; } else { break; }; }; if(it->data_callbacks==0) { logger.msg(Arc::ERROR, "Failed to register any buffer"); if(res != GLOBUS_SUCCESS) { logger.msg(Arc::ERROR, "Globus error: %s", Arc::GlobusResult(res).str()); }; it->froot.close(false); it->free_data_buffer(); it->force_abort(); globus_mutex_unlock(&(it->data_lock)); return; }; globus_mutex_unlock(&(it->data_lock)); return; } void GridFTP_Commands::data_store_callback(void* arg,globus_ftp_control_handle_t*,globus_object_t *error,globus_byte_t *buffer,globus_size_t length,globus_off_t offset,globus_bool_t eof) { globus_thread_blocking_will_block(); GridFTP_Commands *it = (GridFTP_Commands*)arg; struct timezone tz; struct timeval tv; gettimeofday(&tv,&tz); globus_mutex_lock(&(it->data_lock)); it->last_action_time=time(NULL); logger.msg(Arc::VERBOSE, "Data channel (store) %i %i %i", (int)offset, (int)length, (int)eof); it->data_callbacks--; if(it->check_abort(error)) { if(it->data_callbacks==0){it->free_data_buffer();it->froot.close(false);}; globus_mutex_unlock(&(it->data_lock)); return; }; if(eof) it->data_eof=true; /* find this buffer */ unsigned int i; for(i = 0;idata_buffer_num;i++) { if((it->data_buffer)[i].data == (unsigned char*)buffer) break; }; if(i >= it->data_buffer_num) { /* lost buffer - probably memory corruption */ logger.msg(Arc::ERROR, "data_store_callback: lost buffer"); it->force_abort(); if(it->data_callbacks==0){it->free_data_buffer();it->froot.close(false);}; globus_mutex_unlock(&(it->data_lock)); return; }; unsigned long long int time_diff = (tv.tv_sec-(it->data_buffer[i].time_last.tv_sec))*1000000+ (tv.tv_usec-(it->data_buffer[i].time_last.tv_usec)); it->time_spent_network+=time_diff; /* write data to file NOTE: it->data_lock is not unlocked here because it->froot.write is not thread safe */ struct timeval tv_last; gettimeofday(&tv_last,&tz); if(it->froot.write(it->data_buffer[i].data, (it->virt_offset)+offset,length) != 0) { logger.msg(Arc::ERROR, "Closing channel (store) due to error: %s", it->froot.error); it->force_abort(); if(it->data_callbacks==0){it->free_data_buffer();it->froot.close(false);}; globus_mutex_unlock(&(it->data_lock)); return; }; gettimeofday(&tv,&tz); time_diff=(tv.tv_sec-tv_last.tv_sec)*1000000+(tv.tv_usec-tv_last.tv_usec); it->time_spent_disc+=time_diff; if(it->data_eof) { if(it->data_callbacks==0) { logger.msg(Arc::VERBOSE, "Closing channel (store)"); it->free_data_buffer(); it->virt_offset=0; it->virt_restrict=false; it->transfer_mode=false; if(it->froot.close() != 0) { if(it->froot.error.length()) { it->send_response("451 "+it->froot.error+"\r\n"); } else { it->send_response("451 Local error\r\n"); }; } else { logger.msg(Arc::VERBOSE, "Time spent waiting for network: %.3f ms", (float)(it->time_spent_network/1000.0)); logger.msg(Arc::VERBOSE, "Time spent waiting for disc: %.3f ms", (float)(it->time_spent_disc/1000.0)); it->send_response("226 Requested file transfer completed\r\n"); }; }; globus_mutex_unlock(&(it->data_lock)); return; }; /* register buffer */ globus_result_t res; gettimeofday(&(it->data_buffer[i].time_last),&tz); res=globus_ftp_control_data_read(&(it->handle), (globus_byte_t*)(it->data_buffer[i].data), it->data_buffer_size, &data_store_callback,it); if(res != GLOBUS_SUCCESS) { /* Because this error can be caused by EOF, abort should not be called unless this is last buffer */ if(it->data_callbacks==0) { logger.msg(Arc::ERROR, "Globus error: %s", Arc::GlobusResult(res).str()); it->force_abort(); it->free_data_buffer();it->froot.close(false); }; globus_mutex_unlock(&(it->data_lock)); return; }; it->data_callbacks++; globus_mutex_unlock(&(it->data_lock)); return; } nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/names.cpp0000644000000000000000000000013114152153376023367 xustar000000000000000029 mtime=1638455038.43664644 30 atime=1638455038.512647582 30 ctime=1638455099.212559626 nordugrid-arc-6.14.0/src/services/gridftpd/names.cpp0000644000175000002070000000155014152153376023356 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include bool remove_last_name(std::string &name) { int n=name.rfind('/'); if(n==-1) { if(name.length() == 0) return false; name=""; return true; }; name=name.substr(0,n); return true; } bool keep_last_name(std::string &name) { int n=name.rfind('/'); if(n==-1) return false; name=name.substr(n+1); return true; } /* only good names can come here - not checking */ char* remove_head_dir_c(const char* name,int dir_len) { char* s = (char*)name+dir_len; if((*s) == '/') s++; return s; } std::string remove_head_dir_s(std::string &name,int dir_len) { if(name[dir_len]=='/') dir_len++; return name.substr(dir_len); } const char* get_last_name(const char* name) { const char* p = strrchr(name,'/'); if(p==NULL) { p=name; } else { p++; }; return p; } nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/misc0000644000000000000000000000013214152153473022435 xustar000000000000000030 mtime=1638455099.252560227 30 atime=1638455103.999631554 30 ctime=1638455099.252560227 nordugrid-arc-6.14.0/src/services/gridftpd/misc/0000755000175000002070000000000014152153473022477 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/gridftpd/misc/PaxHeaders.30264/ldapquery.cpp0000644000000000000000000000013114152153376025225 xustar000000000000000029 mtime=1638455038.43664644 30 atime=1638455038.512647582 30 ctime=1638455099.251560212 nordugrid-arc-6.14.0/src/services/gridftpd/misc/ldapquery.cpp0000644000175000002070000004343314152153376025222 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include "config.h" #endif #include #ifdef HAVE_SASL_H #include #include #endif #ifdef HAVE_SASL_SASL_H #include #include #endif #include #include #include #include #include #include #include #include #include "ldapquery.h" #ifdef HAVE_LIBINTL_H #include #define _(A) dgettext("arclib", (A)) #else #define _(A) (A) #endif namespace gridftpd { static Arc::Logger logger(Arc::Logger::getRootLogger(),"LdapQuery"); class sigpipe_ingore { public: sigpipe_ingore(); }; struct ldap_bind_arg { LDAP *connection; Arc::SimpleCondition cond; bool anonymous; std::string usersn; bool valid; }; static void* ldap_bind_with_timeout(void* arg_); #if defined(HAVE_SASL_H) || defined(HAVE_SASL_SASL_H) class sasl_defaults { public: sasl_defaults (ldap *ld, const std::string & mech, const std::string & realm, const std::string & authcid, const std::string & authzid, const std::string & passwd); ~sasl_defaults() {}; private: std::string p_mech; std::string p_realm; std::string p_authcid; std::string p_authzid; std::string p_passwd; friend int my_sasl_interact(ldap *ld, unsigned int flags, void * defaults_, void * interact_); }; static sigpipe_ingore sigpipe_ingore; sigpipe_ingore::sigpipe_ingore() { signal(SIGPIPE,SIG_IGN); } sasl_defaults::sasl_defaults (ldap *ld, const std::string & mech, const std::string & realm, const std::string & authcid, const std::string & authzid, const std::string & passwd) : p_mech (mech), p_realm (realm), p_authcid (authcid), p_authzid (authzid), p_passwd (passwd) { if (p_mech.empty()) { char * temp; ldap_get_option (ld, LDAP_OPT_X_SASL_MECH, &temp); if (temp) { p_mech = temp; free (temp); } } if (p_realm.empty()) { char * temp; ldap_get_option (ld, LDAP_OPT_X_SASL_REALM, &temp); if (temp) { p_realm = temp; free (temp); } } if (p_authcid.empty()) { char * temp; ldap_get_option (ld, LDAP_OPT_X_SASL_AUTHCID, &temp); if (temp) { p_authcid = temp; free (temp); } } if (p_authzid.empty()) { char * temp; ldap_get_option (ld, LDAP_OPT_X_SASL_AUTHZID, &temp); if (temp) { p_authzid = temp; free (temp); } } } int my_sasl_interact(ldap* /* ld */, unsigned int flags, void * defaults_, void * interact_) { sasl_interact_t * interact = (sasl_interact_t *) interact_; sasl_defaults * defaults = (sasl_defaults *) defaults_; if (flags == LDAP_SASL_INTERACTIVE) { logger.msg(Arc::VERBOSE, _("SASL Interaction")); } while (interact->id != SASL_CB_LIST_END) { bool noecho = false; bool challenge = false; bool use_default = false; switch (interact->id) { case SASL_CB_GETREALM: if (defaults && !defaults->p_realm.empty()) interact->defresult = strdup (defaults->p_realm.c_str()); break; case SASL_CB_AUTHNAME: if (defaults && !defaults->p_authcid.empty()) interact->defresult = strdup (defaults->p_authcid.c_str()); break; case SASL_CB_USER: if (defaults && !defaults->p_authzid.empty()) interact->defresult = strdup (defaults->p_authzid.c_str()); break; case SASL_CB_PASS: if (defaults && !defaults->p_passwd.empty()) interact->defresult = strdup (defaults->p_passwd.c_str()); noecho = true; break; case SASL_CB_NOECHOPROMPT: noecho = true; challenge = true; break; case SASL_CB_ECHOPROMPT: challenge = true; break; } if (flags != LDAP_SASL_INTERACTIVE && (interact->defresult || interact->id == SASL_CB_USER)) { use_default = true; } else { if (flags == LDAP_SASL_QUIET) return 1; if (challenge && interact->challenge) logger.msg(Arc::VERBOSE, "%s: %s", _("Challenge"), interact->challenge); if (interact->defresult) logger.msg(Arc::VERBOSE, "%s: %s", _("Default"), interact->defresult); std::string prompt; std::string input; prompt = interact->prompt ? std::string (interact->prompt) + ": " : "Interact: "; if (noecho) { input = getpass (prompt.c_str()); } else { std::cout << prompt; std::cin >> input; } if (input.empty()) use_default = true; else { interact->result = strdup (input.c_str()); interact->len = input.length(); } } if (use_default) { interact->result = strdup (interact->defresult ? interact->defresult : ""); interact->len = strlen ((char *) interact->result); } if (defaults && interact->id == SASL_CB_PASS) { // clear default password after first use defaults->p_passwd = ""; } interact++; } return 0; } #endif LdapQuery::LdapQuery(const std::string& ldaphost, int ldapport, bool anonymous, const std::string& usersn, int timeout) : host(ldaphost), port(ldapport), anonymous(anonymous), usersn(usersn), timeout(timeout), connection(NULL), messageid(0) {} LdapQuery::~LdapQuery() { if (connection) { ldap_unbind_ext (connection, NULL, NULL); connection = NULL; } } void LdapQuery::Connect() { const int version = LDAP_VERSION3; logger.msg(Arc::VERBOSE, "%s: %s:%i", _("LdapQuery: Initializing connection to"), host, port); if (connection) throw LdapQueryError( _("Ldap connection already open to") + (" " + host)); ldap_initialize(&connection, ("ldap://" + host + ':' + Arc::tostring(port)).c_str()); if (!connection) throw LdapQueryError( _("Could not open ldap connection to") + (" " + host)); try { SetConnectionOptions(version); } catch (LdapQueryError& e) { // Clean up and re-throw exception ldap_unbind_ext (connection, NULL, NULL); connection = NULL; throw; } ldap_bind_arg arg; arg.connection = connection; arg.anonymous = anonymous; arg.usersn = usersn; arg.valid = false; pthread_t thr; if (pthread_create(&thr, NULL, &ldap_bind_with_timeout, &arg) != 0) { ldap_unbind_ext (connection, NULL, NULL); connection = NULL; throw LdapQueryError( _("Failed to create ldap bind thread") + (" (" + host + ")")); } if (!arg.cond.wait(1000 * (timeout + 1))) { pthread_cancel (thr); pthread_detach (thr); // if bind fails unbind will fail too - so don't call it connection = NULL; throw LdapQueryError( _("Ldap bind timeout") + (" (" + host + ")")); } pthread_join (thr, NULL); if (!arg.valid) { ldap_unbind_ext (connection, NULL, NULL); connection = NULL; throw LdapQueryError( _("Failed to bind to ldap server") + (" (" + host + ")")); }; } void LdapQuery::SetConnectionOptions(int version) { timeval tout; tout.tv_sec = timeout; tout.tv_usec = 0; if (ldap_set_option (connection, LDAP_OPT_NETWORK_TIMEOUT, &tout) != LDAP_OPT_SUCCESS) throw LdapQueryError( _("Could not set ldap network timeout") + (" (" + host + ")")); if (ldap_set_option (connection, LDAP_OPT_TIMELIMIT, &timeout) != LDAP_OPT_SUCCESS) throw LdapQueryError( _("Could not set ldap timelimit") + (" (" + host + ")")); if (ldap_set_option (connection, LDAP_OPT_PROTOCOL_VERSION, &version) != LDAP_OPT_SUCCESS) throw LdapQueryError( _("Could not set ldap protocol version") + (" (" + host + ")")); } static void* ldap_bind_with_timeout(void* arg_) { ldap_bind_arg* arg = (ldap_bind_arg* ) arg_; int ldresult = 0; if (arg->anonymous) { BerValue cred = { 0, (char*)"" }; ldresult = ldap_sasl_bind_s (arg->connection, NULL, LDAP_SASL_SIMPLE, &cred, NULL, NULL, NULL); } else { #if defined(HAVE_SASL_H) || defined(HAVE_SASL_SASL_H) int ldapflag = LDAP_SASL_QUIET; if (logger.getThreshold() <= Arc::VERBOSE) ldapflag = LDAP_SASL_AUTOMATIC; sasl_defaults defaults = sasl_defaults (arg->connection, SASLMECH, "", "", arg->usersn, ""); ldresult = ldap_sasl_interactive_bind_s (arg->connection, NULL, SASLMECH, NULL, NULL, ldapflag, my_sasl_interact, &defaults); #else BerValue cred = { 0, (char*)"" }; ldresult = ldap_sasl_bind_s (arg->connection, NULL, LDAP_SASL_SIMPLE, &cred, NULL, NULL, NULL); #endif } if (ldresult != LDAP_SUCCESS) { arg->valid = false; arg->cond.signal(); } else { arg->valid = true; arg->cond.signal(); } return NULL; } void LdapQuery::Query(const std::string& base, const std::string& filter, const std::vector & attributes, Scope scope) { Connect(); logger.msg(Arc::VERBOSE, "%s %s", _("LdapQuery: Querying"), host); logger.msg(Arc::VERBOSE, "%s: %s", _("base dn"), base); if (!filter.empty()) logger.msg(Arc::VERBOSE, " %s: %s", _("filter"), filter); if (!attributes.empty()) { logger.msg(Arc::VERBOSE, " %s:", _("attributes")); for (std::vector::const_iterator vs = attributes.begin(); vs != attributes.end(); vs++) logger.msg(Arc::VERBOSE, " %s", *vs); } timeval tout; tout.tv_sec = timeout; tout.tv_usec = 0; char *filt = (char *) filter.c_str(); char ** attrs; if (attributes.empty()) attrs = NULL; else { attrs = new char * [attributes.size() + 1]; int i = 0; for (std::vector::const_iterator vs = attributes.begin(); vs != attributes.end(); vs++, i++) attrs [i] = (char *) vs->c_str(); attrs [i] = NULL; } int ldresult = ldap_search_ext (connection, base.c_str(), scope, filt, attrs, 0, NULL, NULL, &tout, 0, &messageid); if (attrs) delete[] attrs; if (ldresult != LDAP_SUCCESS) { std::string error_msg(ldap_err2string (ldresult)); error_msg += " (" + host + ")"; ldap_unbind_ext (connection, NULL, NULL); connection = NULL; throw LdapQueryError(error_msg); } } void LdapQuery::Result(ldap_callback callback, void* ref) { try { HandleResult(callback, ref); } catch (LdapQueryError& e) { // Clean up and re-throw exception ldap_unbind_ext (connection, NULL, NULL); connection = NULL; messageid = 0; throw; } // Since C++ doesnt have finally(), here we are again ldap_unbind_ext (connection, NULL, NULL); connection = NULL; messageid = 0; return; } void LdapQuery::HandleResult(ldap_callback callback, void* ref) { logger.msg(Arc::VERBOSE, "%s %s", _("LdapQuery: Getting results from"), host); if (!messageid) throw LdapQueryError( _("Error: no ldap query started to") + (" " + host)); timeval tout; tout.tv_sec = timeout; tout.tv_usec = 0; bool done = false; int ldresult = 0; LDAPMessage * res = NULL; while (!done && (ldresult = ldap_result (connection, messageid, LDAP_MSG_ONE, &tout, &res)) > 0) { for (LDAPMessage * msg = ldap_first_message (connection, res); msg; msg = ldap_next_message (connection, msg)) { switch (ldap_msgtype(msg)) { case LDAP_RES_SEARCH_ENTRY: HandleSearchEntry(msg, callback, ref); break; case LDAP_RES_SEARCH_RESULT: done = true; break; } // switch } // for ldap_msgfree (res); } if (ldresult == 0) throw LdapQueryError(_("Ldap query timed out") + (": " + host)); if (ldresult == -1) { std::string error_msg(ldap_err2string (ldresult)); error_msg += " (" + host + ")"; throw LdapQueryError(error_msg); } return; } void LdapQuery::HandleSearchEntry(LDAPMessage* msg, ldap_callback callback, void* ref) { char *dn = ldap_get_dn(connection, msg); callback("dn", dn, ref); if (dn) ldap_memfree(dn); BerElement *ber = NULL; for (char *attr = ldap_first_attribute (connection, msg, &ber); attr; attr = ldap_next_attribute (connection, msg, ber)) { BerValue **bval; if ((bval = ldap_get_values_len (connection, msg, attr))) { for (int i = 0; bval[i]; i++) { callback (attr, (bval[i]->bv_val ? bval[i]->bv_val : ""), ref); } ber_bvecfree(bval); } ldap_memfree(attr); } if (ber) ber_free(ber, 0); } std::string LdapQuery::Host() { return host; } ParallelLdapQueries::ParallelLdapQueries(std::list clusters, std::string filter, std::vector attrs, ldap_callback callback, void* object, LdapQuery::Scope scope, const std::string& usersn, bool anonymous, int timeout) : clusters(clusters), filter(filter), attrs(attrs), callback(callback), object(object), scope(scope), usersn(usersn), anonymous(anonymous), timeout(timeout) { urlit = this->clusters.begin(); pthread_mutex_init(&lock, NULL); } ParallelLdapQueries::~ParallelLdapQueries() { pthread_mutex_destroy(&lock); } void ParallelLdapQueries::Query() { const int numqueries = clusters.size(); pthread_t* threads = new pthread_t[numqueries]; int res; for (unsigned int i = 0; ilock); Arc::URL qurl = *(plq->urlit); plq->urlit++; pthread_mutex_unlock(&plq->lock); LdapQuery ldapq(qurl.Host(), qurl.Port(), plq->anonymous, plq->usersn, plq->timeout); try { pthread_mutex_lock(&plq->lock); ldapq.Query(qurl.Path(), plq->filter, plq->attrs, plq->scope); /* is Path() correct here to replace BaseDN() ?? */ pthread_mutex_unlock(&plq->lock); } catch (LdapQueryError& e) { pthread_mutex_unlock(&plq->lock); logger.msg(Arc::VERBOSE, "%s: %s", _("Warning"), e.what()); pthread_exit(NULL); } pthread_mutex_lock(&plq->lock); try { ldapq.Result(plq->callback, plq->object); } catch (LdapQueryError& e) { logger.msg(Arc::VERBOSE, "%s: %s", _("Warning"), e.what()); } pthread_mutex_unlock(&plq->lock); pthread_exit(NULL); } } // namespace gridftpd nordugrid-arc-6.14.0/src/services/gridftpd/misc/PaxHeaders.30264/Makefile.am0000644000000000000000000000013114152153376024547 xustar000000000000000029 mtime=1638455038.43664644 30 atime=1638455038.511647567 30 ctime=1638455099.247560152 nordugrid-arc-6.14.0/src/services/gridftpd/misc/Makefile.am0000644000175000002070000000103714152153376024536 0ustar00mockbuildmock00000000000000noinst_LTLIBRARIES = libmisc.la if LDAP_ENABLED libmisc_la_SOURCES = \ ldapquery.cpp proxy.cpp \ ldapquery.h proxy.h else libmisc_la_SOURCES = \ proxy.cpp \ proxy.h endif libmisc_la_CXXFLAGS = -I$(top_srcdir)/include \ $(LIBXML2_CFLAGS) $(GLIBMM_CFLAGS) $(GLOBUS_GSSAPI_GSI_CFLAGS) $(AM_CXXFLAGS) libmisc_la_LIBADD = \ $(top_builddir)/src/hed/libs/credentialstore/libarccredentialstore.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(LDAP_LIBS) -lpthread nordugrid-arc-6.14.0/src/services/gridftpd/misc/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153435024555 xustar000000000000000030 mtime=1638455069.565114159 30 atime=1638455090.912434913 30 ctime=1638455099.246560137 nordugrid-arc-6.14.0/src/services/gridftpd/misc/Makefile.in0000644000175000002070000006753214152153435024557 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/gridftpd/misc DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(top_srcdir)/depcomp README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = LTLIBRARIES = $(noinst_LTLIBRARIES) am__DEPENDENCIES_1 = libmisc_la_DEPENDENCIES = $(top_builddir)/src/hed/libs/credentialstore/libarccredentialstore.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(am__DEPENDENCIES_1) am__libmisc_la_SOURCES_DIST = proxy.cpp proxy.h ldapquery.cpp \ ldapquery.h @LDAP_ENABLED_FALSE@am_libmisc_la_OBJECTS = libmisc_la-proxy.lo @LDAP_ENABLED_TRUE@am_libmisc_la_OBJECTS = libmisc_la-ldapquery.lo \ @LDAP_ENABLED_TRUE@ libmisc_la-proxy.lo libmisc_la_OBJECTS = $(am_libmisc_la_OBJECTS) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = libmisc_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(libmisc_la_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(libmisc_la_SOURCES) DIST_SOURCES = $(am__libmisc_la_SOURCES_DIST) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ noinst_LTLIBRARIES = libmisc.la @LDAP_ENABLED_FALSE@libmisc_la_SOURCES = \ @LDAP_ENABLED_FALSE@ proxy.cpp \ @LDAP_ENABLED_FALSE@ proxy.h @LDAP_ENABLED_TRUE@libmisc_la_SOURCES = \ @LDAP_ENABLED_TRUE@ ldapquery.cpp proxy.cpp \ @LDAP_ENABLED_TRUE@ ldapquery.h proxy.h libmisc_la_CXXFLAGS = -I$(top_srcdir)/include \ $(LIBXML2_CFLAGS) $(GLIBMM_CFLAGS) $(GLOBUS_GSSAPI_GSI_CFLAGS) $(AM_CXXFLAGS) libmisc_la_LIBADD = \ $(top_builddir)/src/hed/libs/credentialstore/libarccredentialstore.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(LDAP_LIBS) -lpthread all: all-am .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/gridftpd/misc/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/gridftpd/misc/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): clean-noinstLTLIBRARIES: -test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES) @list='$(noinst_LTLIBRARIES)'; \ locs=`for p in $$list; do echo $$p; done | \ sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ sort -u`; \ test -z "$$locs" || { \ echo rm -f $${locs}; \ rm -f $${locs}; \ } libmisc.la: $(libmisc_la_OBJECTS) $(libmisc_la_DEPENDENCIES) $(EXTRA_libmisc_la_DEPENDENCIES) $(AM_V_CXXLD)$(libmisc_la_LINK) $(libmisc_la_OBJECTS) $(libmisc_la_LIBADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libmisc_la-ldapquery.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libmisc_la-proxy.Plo@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< libmisc_la-proxy.lo: proxy.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libmisc_la_CXXFLAGS) $(CXXFLAGS) -MT libmisc_la-proxy.lo -MD -MP -MF $(DEPDIR)/libmisc_la-proxy.Tpo -c -o libmisc_la-proxy.lo `test -f 'proxy.cpp' || echo '$(srcdir)/'`proxy.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libmisc_la-proxy.Tpo $(DEPDIR)/libmisc_la-proxy.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='proxy.cpp' object='libmisc_la-proxy.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libmisc_la_CXXFLAGS) $(CXXFLAGS) -c -o libmisc_la-proxy.lo `test -f 'proxy.cpp' || echo '$(srcdir)/'`proxy.cpp libmisc_la-ldapquery.lo: ldapquery.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libmisc_la_CXXFLAGS) $(CXXFLAGS) -MT libmisc_la-ldapquery.lo -MD -MP -MF $(DEPDIR)/libmisc_la-ldapquery.Tpo -c -o libmisc_la-ldapquery.lo `test -f 'ldapquery.cpp' || echo '$(srcdir)/'`ldapquery.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libmisc_la-ldapquery.Tpo $(DEPDIR)/libmisc_la-ldapquery.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='ldapquery.cpp' object='libmisc_la-ldapquery.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libmisc_la_CXXFLAGS) $(CXXFLAGS) -c -o libmisc_la-ldapquery.lo `test -f 'ldapquery.cpp' || echo '$(srcdir)/'`ldapquery.cpp mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(LTLIBRARIES) installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool clean-noinstLTLIBRARIES \ mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \ clean-libtool clean-noinstLTLIBRARIES cscopelist-am ctags \ ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags tags-am uninstall uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/gridftpd/misc/PaxHeaders.30264/proxy.h0000644000000000000000000000013114152153376024045 xustar000000000000000029 mtime=1638455038.43664644 30 atime=1638455038.512647582 30 ctime=1638455099.250560197 nordugrid-arc-6.14.0/src/services/gridftpd/misc/proxy.h0000644000175000002070000000035014152153376024031 0ustar00mockbuildmock00000000000000#ifndef GRID_SERVER_PROXY_H #define GRID_SERVER_PROXY_H #include namespace gridftpd { char* write_proxy(gss_cred_id_t cred); char* write_cert_chain(const gss_ctx_id_t gss_context); } // namespace gridftpd #endif nordugrid-arc-6.14.0/src/services/gridftpd/misc/PaxHeaders.30264/ldapquery.h0000644000000000000000000000013114152153376024672 xustar000000000000000029 mtime=1638455038.43664644 30 atime=1638455038.512647582 30 ctime=1638455099.252560227 nordugrid-arc-6.14.0/src/services/gridftpd/misc/ldapquery.h0000644000175000002070000000733314152153376024666 0ustar00mockbuildmock00000000000000/* * Interface for querying ldap servers. Should support GSSGSI-API for * SASL if your environment is set up correctly, however so far it * isn't necessary in the ARC. */ #ifndef ARCLIB_LDAPQUERY #define ARCLIB_LDAPQUERY #include #include #include #include #include #define SASLMECH "GSI-GSSAPI" namespace gridftpd { /** LdapQuery exception. Gets thrown whan an error occurs in a query. */ class LdapQueryError : public std::exception { public: /** Standard exception class constructor. */ LdapQueryError(std::string message): message(message) {} ~LdapQueryError() throw() {} virtual const char* what() const throw() { return message.c_str(); } private: std::string message; }; /** * LDAP callback type. Your ldap callbacks should be of same structure. */ typedef void (*ldap_callback)(const std::string& attr, const std::string& value, void *ref); /** * LdapQuery class; querying of LDAP servers. */ class LdapQuery { public: /** * Constructs a new LdapQuery object and sets connection options. * The connection is first established when calling Query. */ LdapQuery(const std::string& ldaphost, int ldapport, bool anonymous = true, const std::string& usersn = "", int timeout = 20); /** * Destructor. Will disconnect from the ldapserver if stll connected. */ ~LdapQuery(); /** * Scope for a LDAP queries. Use when querying. */ enum Scope { base, onelevel, subtree }; /** * Queries the ldap server. */ void Query(const std::string& base, const std::string& filter = "(objectclass=*)", const std::vector & attributes = std::vector(), Scope scope = subtree); /** * Retrieves the result of the query from the ldap-server. */ void Result(ldap_callback callback, void *ref); /** * Returns the hostname of the ldap-server. */ std::string Host(); private: void Connect(); void SetConnectionOptions(int version); void HandleResult(ldap_callback callback, void *ref); void HandleSearchEntry(LDAPMessage *msg, ldap_callback callback, void *ref); std::string host; int port; bool anonymous; std::string usersn; int timeout; ldap *connection; int messageid; }; /** General method to perform parallel ldap-queries to a set of clusters */ class ParallelLdapQueries { public: ParallelLdapQueries(std::list clusters, std::string filter, std::vector attrs, ldap_callback callback, void* object, LdapQuery::Scope scope = LdapQuery::subtree, const std::string& usersn = "", bool anonymous = true, int timeout = 20); ~ParallelLdapQueries(); void Query(); static void* DoLdapQuery(void* arg); private: std::list clusters; std::string filter; std::vector attrs; ldap_callback callback; void* object; LdapQuery::Scope scope; std::string usersn; bool anonymous; int timeout; std::list::iterator urlit; pthread_mutex_t lock; }; } // namespace gridftpd #endif // ARCLIB_LDAPQUERY nordugrid-arc-6.14.0/src/services/gridftpd/misc/PaxHeaders.30264/proxy.cpp0000644000000000000000000000013114152153376024400 xustar000000000000000029 mtime=1638455038.43664644 30 atime=1638455038.512647582 30 ctime=1638455099.249560182 nordugrid-arc-6.14.0/src/services/gridftpd/misc/proxy.cpp0000644000175000002070000000710214152153376024366 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include "proxy.h" #include #include #include #include #include #include namespace gridftpd { char* write_proxy(gss_cred_id_t cred) { char* proxy_fname = NULL; OM_uint32 major_status = 0; OM_uint32 minor_status = 0; gss_buffer_desc deleg_proxy_filename; if(cred == GSS_C_NO_CREDENTIAL) return NULL; major_status = gss_export_cred(&minor_status, cred, NULL, 1, &deleg_proxy_filename); if (major_status == GSS_S_COMPLETE) { char * cp; cp = strchr((char *)deleg_proxy_filename.value, '='); if(cp != NULL) { cp++; proxy_fname=strdup(cp); }; free(deleg_proxy_filename.value); }; return proxy_fname; } char* write_cert_chain(const gss_ctx_id_t gss_context) { /* Globus OID for the remote parties certificate chain */ gss_OID_desc cert_chain_oid = {11, (void*)"\x2b\x06\x01\x04\x01\x9b\x50\x01\x01\x01\x08"}; gss_buffer_set_t client_cert_chain = NULL; OM_uint32 major_status; OM_uint32 minor_status; int certs_num = 0; int n,n_; STACK_OF(X509) *cert_chain = NULL; BIO* bio = NULL; char* fname = NULL; major_status = gss_inquire_sec_context_by_oid(&minor_status, gss_context, &cert_chain_oid, &client_cert_chain); if(major_status != GSS_S_COMPLETE) { return NULL; }; certs_num = client_cert_chain->count; if(certs_num <= 0) goto err_exit; if((cert_chain = sk_X509_new_null()) == NULL) goto err_exit; for(n=0,n_=0;nelements[n].value; int length = (int)client_cert_chain->elements[n].length; X509* cert = d2i_X509(NULL,&value,length); if(cert) { if(cert) sk_X509_insert(cert_chain,cert,n_++); /* { X509_NAME *name = X509_get_subject_name(cert); char buf[256]; buf[0]=0; if(name) { X509_NAME_oneline(name,buf,sizeof(buf)); fprintf(stderr,"Name: %s\n",buf); } else { fprintf(stderr,"Name: none\n"); }; }; */ } else { /* fprintf(stderr,"No cert\n"); */ }; }; /* TODO: do not store in file - pass directly to calling function */ /* Make temporary file */ { std::string tempname = Glib::build_filename(Glib::get_tmp_dir(), "x509.XXXXXX"); if(!Arc::TmpFileCreate(tempname, "")) goto err_exit; fname = strdup(tempname.c_str()); if((bio=BIO_new_file(fname,"w")) == NULL) goto err_exit; }; for(n=0;n #endif #include #include #include #include #include #include #include #include #include "userspec.h" #include "conf.h" static Arc::Logger logger(Arc::Logger::getRootLogger(),"userspec_t"); void userspec_t::free(void) const { // Keep authentication info to preserve proxy (just in case) } userspec_t::userspec_t(void):user(),uid(-1),gid(-1),port(0),map(user),default_map(user) { host[0] = 0; } userspec_t::~userspec_t(void) { userspec_t::free(); } bool userspec_t::fill(globus_ftp_control_auth_info_t *auth,globus_ftp_control_handle_t *handle, const char* cfg) { struct passwd pw_; struct group gr_; struct passwd* pw=NULL; struct group* gr=NULL; char bufp[BUFSIZ]; char bufg[BUFSIZ]; if(cfg) config_file = cfg; if(auth == NULL) return false; if(auth->auth_gssapi_subject == NULL) return false; std::string subject; Arc::ConfigIni::NextArg(auth->auth_gssapi_subject,subject,'\0','\0'); // fill host info if(handle) { //int host[4] = {0,0,0,0}; //unsigned short port = 0; if(globus_io_tcp_get_remote_address(&(handle->cc_handle.io_handle), host,&(port)) != GLOBUS_SUCCESS) { port=0; user.set(auth->auth_gssapi_subject,auth->auth_gssapi_context, auth->delegated_credential_handle); } else { char abuf[1024]; abuf[sizeof(abuf)-1]=0; struct hostent he; struct hostent* he_p; struct in_addr a; snprintf(abuf,sizeof(abuf)-1,"%u.%u.%u.%u", (unsigned int)host[0],(unsigned int)host[1],(unsigned int)host[2],(unsigned int)host[3]); if(inet_aton(abuf,&a) != 0) { int h_errnop; char buf[1024]; he_p=globus_libc_gethostbyaddr_r((char*)&a,strlen(abuf),AF_INET, &he,buf,sizeof(buf),&h_errnop); if(he_p) { if(strcmp(he_p->h_name,"localhost") == 0) { abuf[sizeof(abuf)-1]=0; if(globus_libc_gethostname(abuf,sizeof(abuf)-1) != 0) { strcpy(abuf,"localhost"); }; }; }; }; user.set(auth->auth_gssapi_subject,auth->auth_gssapi_context, auth->delegated_credential_handle,abuf); }; } else { user.set(auth->auth_gssapi_subject,auth->auth_gssapi_context, auth->delegated_credential_handle); }; if((!user.is_proxy()) || (user.proxy() == NULL) || (user.proxy()[0] == 0)) { logger.msg(Arc::INFO, "No proxy provided"); } else { logger.msg(Arc::VERBOSE, "Proxy/credentials stored at %s", user.proxy()); }; char* name=NULL; getpwuid_r(getuid(),&pw_,bufp,BUFSIZ,&pw); if(pw == NULL) { logger.msg(Arc::WARNING, "Running user has no name"); } else { name=strdup(pw->pw_name); logger.msg(Arc::INFO, "Mapped to running user: %s", name); }; if(pw) { uid=pw->pw_uid; if(gr) { gid=gr->gr_gid; } else { gid=pw->pw_gid; }; logger.msg(Arc::INFO, "Mapped to local id: %i", uid); home=pw->pw_dir; if(!gr) { getgrgid_r(gid,&gr_,bufg,BUFSIZ,&gr); if(gr == NULL) { logger.msg(Arc::ERROR, "No group %i for mapped user", gid); }; }; default_map.setunixuser(name?name:"", gr?gr->gr_name:""); logger.msg(Arc::INFO, "Mapped to local group id: %i", gid); if(gr) logger.msg(Arc::INFO, "Mapped to local group name: %s", gr->gr_name); logger.msg(Arc::VERBOSE, "Mapped user's home: %s", home); }; if(name) std::free(name); if(!user) return false; return true; } bool userspec_t::fill(AuthUser& u, const char* cfg) { struct passwd pw_; struct group gr_; struct passwd *pw=NULL; struct group *gr=NULL; char bufp[BUFSIZ]; char bufg[BUFSIZ]; std::string subject = u.DN(); if(cfg) config_file = cfg; user=u; if((!user.is_proxy()) || (user.proxy() == NULL) || (user.proxy()[0] == 0)) { logger.msg(Arc::INFO, "No proxy provided"); } else { logger.msg(Arc::INFO, "Proxy stored at %s", user.proxy()); }; char* name=NULL; getpwuid_r(getuid(),&pw_,bufp,BUFSIZ,&pw); if(pw == NULL) { logger.msg(Arc::WARNING, "Running user has no name"); } else { name=strdup(pw->pw_name); logger.msg(Arc::INFO, "Mapped to running user: %s", name); }; if(pw) { uid=pw->pw_uid; if(gr) { gid=gr->gr_gid; } else { gid=pw->pw_gid; }; logger.msg(Arc::INFO, "Mapped to local id: %i", uid); home=pw->pw_dir; if(!gr) { getgrgid_r(gid,&gr_,bufg,BUFSIZ,&gr); if(gr == NULL) { logger.msg(Arc::INFO, "No group %i for mapped user", gid); }; }; default_map.setunixuser(name?name:"", gr?gr->gr_name:""); logger.msg(Arc::INFO, "Mapped to local group id: %i", pw->pw_gid); if(gr) logger.msg(Arc::INFO, "Mapped to local group name: %s", gr->gr_name); logger.msg(Arc::INFO, "Mapped user's home: %s", home); }; if(name) std::free(name); return true; } std::string subst_user_spec(std::string &in,userspec_t const *spec) { std::string out = ""; unsigned int i; unsigned int last; last=0; for(i=0;ilast) out+=in.substr(last,i-last); i++; if(i>=in.length()) { }; switch(in[i]) { case 'u': { char buf[10]; snprintf(buf,9,"%i",spec->uid); out+=buf; last=i+1; }; break; case 'U': { out+=spec->get_uname(); last=i+1; }; break; case 'g': { char buf[10]; snprintf(buf,9,"%i",spec->gid); out+=buf; last=i+1; }; break; case 'G': { out+=spec->get_gname(); last=i+1; }; break; case 'D': { out+=spec->user.DN(); last=i+1; }; break; case 'H': { out+=spec->home; last=i+1; }; break; case '%': { out+='%'; last=i+1; }; break; default: { logger.msg(Arc::WARNING, "Undefined control sequence: %%%s", in[i]); }; break; }; }; }; if(i>last) out+=in.substr(last); return out; } bool userspec_t::refresh(void) { if(!map) return false; home=""; uid=-1; gid=-1; const char* name = map.unix_name(); const char* group = map.unix_group(); if(name == NULL) return false; if(name[0] == 0) return false; struct passwd pw_; struct group gr_; struct passwd *pw; struct group *gr; char buf[BUFSIZ]; getpwnam_r(name,&pw_,buf,BUFSIZ,&pw); if(pw == NULL) { logger.msg(Arc::ERROR, "Local user %s does not exist", name); return false; }; uid=pw->pw_uid; home=pw->pw_dir; gid=pw->pw_gid; if(group && group[0]) { getgrnam_r(group,&gr_,buf,BUFSIZ,&gr); if(gr == NULL) { logger.msg(Arc::WARNING, "Local group %s does not exist", group); } else { gid=gr->gr_gid; }; }; logger.msg(Arc::INFO, "Remapped to local user: %s", name); logger.msg(Arc::INFO, "Remapped to local id: %i", uid); logger.msg(Arc::INFO, "Remapped to local group id: %i", gid); if(group && group[0]) logger.msg(Arc::INFO, "Remapped to local group name: %s", group); logger.msg(Arc::INFO, "Remapped user's home: %s", home); return true; } //AuthResult userspec_t::mapname(const char* line) { // AuthResult res = map.mapname(line); // if(res == AAA_POSITIVE_MATCH) refresh(); // return res; //} AuthResult userspec_t::mapgroup(const char* rule, const char* line) { AuthResult res = map.mapgroup(rule, line); if(res == AAA_POSITIVE_MATCH) refresh(); return res; } bool userspec_t::set_map_policy(const char* rule, const char* line) { return map.set_map_policy(rule, line); } //AuthResult userspec_t::mapvo(const char* line) { // AuthResult res = map.mapvo(line); // if(res == AAA_POSITIVE_MATCH) refresh(); // return res; //} const char* userspec_t::get_uname(void) const { const char* name = NULL; if((bool)map) { name=map.unix_name(); } else if((bool)default_map) { name=default_map.unix_name(); }; if(!name) name=""; return name; } const char* userspec_t::get_gname(void) const { const char* group = NULL; if((bool)map) { group=map.unix_group(); } else if((bool)default_map) { group=default_map.unix_group(); }; if(!group) group=""; return group; } nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/arc-gridftpd-start.in0000644000000000000000000000013214152153376025612 xustar000000000000000030 mtime=1638455038.433646395 30 atime=1638455038.510647552 30 ctime=1638455099.209559581 nordugrid-arc-6.14.0/src/services/gridftpd/arc-gridftpd-start.in0000644000175000002070000001145114152153376025601 0ustar00mockbuildmock00000000000000#!/bin/bash add_library_path() { location="$1" if [ ! "x$location" = "x" ] ; then if [ ! "$location" = "/usr" ] ; then libdir="$location/lib" libdir64="$location/lib64" if [ -d "$libdir64" ] ; then if [ "x$LD_LIBRARY_PATH" = "x" ]; then LD_LIBRARY_PATH="$libdir64" else LD_LIBRARY_PATH="$libdir64:$LD_LIBRARY_PATH" fi fi if [ -d "$libdir" ] ; then if [ "x$LD_LIBRARY_PATH" = "x" ]; then LD_LIBRARY_PATH="$libdir" else LD_LIBRARY_PATH="$libdir:$LD_LIBRARY_PATH" fi fi fi fi } prog=gridftpd RUN=yes send_systemd_notify() { # return if no systemd-notify found type systemd-notify >/dev/null 2>&1 || return systemd-notify "$@" } log_failure_msg() { send_systemd_notify --status "Error: $@" echo $@ } # sysconfig files if [ -r /etc/sysconfig/nordugrid ]; then . /etc/sysconfig/nordugrid elif [ -r /etc/default/nordugrid ]; then . /etc/default/nordugrid fi if [ -r /etc/sysconfig/arc-${prog} ]; then . /etc/sysconfig/arc-${prog} elif [ -r /etc/default/arc-${prog} ]; then . /etc/default/arc-${prog} fi if [ "$RUN" != "yes" ] ; then echo "arc-${prog} disabled, please adjust the configuration to your" echo "needs and then set RUN to 'yes' in /etc/default/arc-${prog} to enable it." exit 0 fi # GLOBUS_LOCATION GLOBUS_LOCATION=${GLOBUS_LOCATION:-@DEFAULT_GLOBUS_LOCATION@} if [ ! -d "$GLOBUS_LOCATION" ]; then log_failure_msg "GLOBUS_LOCATION ($GLOBUS_LOCATION) not found" exit 1 fi export GLOBUS_LOCATION # ARC_LOCATION ARC_LOCATION=${ARC_LOCATION:-@prefix@} if [ ! -d "$ARC_LOCATION" ]; then log_failure_msg "ARC_LOCATION ($ARC_LOCATION) not found" exit 1 fi export ARC_LOCATION readorigconfigvar() { value=`$ARC_LOCATION/@pkglibexecsubdir@/arcconfig-parser --config "$1" -b "$3" -o "$2" 2>/dev/null` if [ $? -eq 0 ] ; then echo "$value" exit 0 else exit 1 fi } readconfigvar() { fname="$1" optname="$2" blocks="" while [ ! -z "$3" ] ; do blocks="$blocks -b $3" shift done value=`$ARC_LOCATION/@pkglibexecsubdir@/arcconfig-parser --runconfig "$fname" --load $blocks -o "$optname" 2>/dev/null` if [ $? -eq 0 ] ; then echo "$value" exit 0 else exit 1 fi } check_cert() { X509_USER_CERT=`readconfigvar "$ARC_RUNTIME_CONFIG" x509_host_cert gridftpd common` X509_USER_KEY=`readconfigvar "$ARC_RUNTIME_CONFIG" x509_host_key gridftpd common` if [ ! -f "$X509_USER_CERT" ] ; then log_failure_msg "Host certificate $X509_USER_CERT is not found" exit 1 fi if [ ! -f "$X509_USER_KEY" ] ; then log_failure_msg "Host key $X509_USER_KEY is not found" exit 1 fi # check permissions on key perms=`stat -L -c %a "$X509_USER_KEY"` if [ "$perms" != "600" ] && [ "$perms" != "400" ] ; then log_failure_msg "Host key must be readable only by user" exit 1 fi } # ARC_CONFIG if [ "x$ARC_CONFIG" = "x" ]; then if [ -r $ARC_LOCATION/etc/arc.conf ]; then ARC_CONFIG=$ARC_LOCATION/etc/arc.conf elif [ -r /etc/arc.conf ]; then ARC_CONFIG=/etc/arc.conf fi fi if [ ! -r "$ARC_CONFIG" ]; then log_failure_msg "ARC configuration not found (usually /etc/arc.conf)" exit 1 fi # VOMS_LOCATION VOMS_LOCATION=${VOMS_LOCATION:-@DEFAULT_VOMS_LOCATION@} add_library_path "$VOMS_LOCATION" add_library_path "$GLOBUS_LOCATION" if [ "x$LD_LIBRARY_PATH" = "x" ]; then LD_LIBRARY_PATH=$ARC_LOCATION/@libsubdir@ else LD_LIBRARY_PATH=$ARC_LOCATION/@libsubdir@:$LD_LIBRARY_PATH fi export LD_LIBRARY_PATH # PID file PID_FILE=`readorigconfigvar "$ARC_CONFIG" pidfile gridftpd` if [ "x$PID_FILE" = "x" ]; then # Missing default value for pidfile means no service block is present log_failure_msg "ARC configuration is missing [gridftpd] block" exit 1 fi if [ "$1" = "--getpidfile" ] ; then echo $PID_FILE exit 0 fi ARC_RUNTIME_CONFIG=`echo "$PID_FILE" | sed 's#\([^\./]*\)\.[^\./]*$#\1#'` ARC_RUNTIME_CONFIG="${ARC_RUNTIME_CONFIG}.cfg" CMD="$ARC_LOCATION/sbin/$prog" if [ ! -x "$CMD" ]; then log_failure_msg "Missing $CMD executable" exit 1 fi # Pre-process configuration $ARC_LOCATION/@pkglibexecsubdir@/arcconfig-parser --config "$ARC_CONFIG" --runconfig "$ARC_RUNTIME_CONFIG" --save 2>/dev/null if [ $? -ne 0 ] ; then log_failure_msg "ARC configuration processing failed" exit 1 fi CMD="$CMD -c $ARC_RUNTIME_CONFIG" CMD="$CMD -P $PID_FILE" LOGFILE=`readconfigvar "$ARC_RUNTIME_CONFIG" logfile gridftpd` USERNAME=`readconfigvar "$ARC_RUNTIME_CONFIG" user gridftpd` if [ ! -d `dirname "$LOGFILE"` ]; then mkdir -p `dirname "$LOGFILE"` if [ ! -z "$USERNAME" ] ; then chown "$USERNAME" `dirname "$LOGFILE"` fi fi if [ ! -z "$USERNAME" ] ; then if [ -f "$LOGFILE" ] ; then chown "$USERNAME" "$LOGFILE" fi fi check_cert exec $CMD "$@" nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/config.cpp0000644000000000000000000000013214152153376023532 xustar000000000000000030 mtime=1638455038.435646425 30 atime=1638455038.511647567 30 ctime=1638455099.215559671 nordugrid-arc-6.14.0/src/services/gridftpd/config.cpp0000644000175000002070000000117314152153376023521 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include "conf.h" static const char* default_central_config_file = DEFAULT_CENTRAL_CONFIG_FILE; const char* config_file = NULL; std::string config_open_gridftp(Arc::ConfigFile &cfile) { std::string config_name; if(config_file) { config_name=config_file; } else if((config_name=Arc::GetEnv("ARC_CONFIG")).empty()) { config_name=default_central_config_file; }; if(!cfile.open(config_name)) return ""; // Set environment variable for other tools Arc::SetEnv("ARC_CONFIG",config_name.c_str()); return config_name; } nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/arc-gridftpd.in0000644000000000000000000000013214152153376024457 xustar000000000000000030 mtime=1638455038.433646395 30 atime=1638455038.510647552 30 ctime=1638455099.207559551 nordugrid-arc-6.14.0/src/services/gridftpd/arc-gridftpd.in0000644000175000002070000000723114152153376024447 0ustar00mockbuildmock00000000000000#!/bin/bash # # Init file for the ARC gridftp server # # chkconfig: 2345 75 25 # description: ARC gridftpd # # config: /etc/sysconfig/nordugrid # config: /etc/sysconfig/gridftpd # config: @prefix@/etc/arc.conf # config: /etc/arc.conf ### BEGIN INIT INFO # Provides: arc-gridftpd # Required-Start: $local_fs $remote_fs # Required-Stop: $local_fs $remote_fs # Default-Start: 2 3 4 5 # Default-Stop: 0 1 6 # Short-Description: ARC gridftpd # Description: ARC gridftp server ### END INIT INFO # source function library if [ -f /etc/init.d/functions ]; then . /etc/init.d/functions log_success_msg() { echo -n "$@" success "$@" echo } log_warning_msg() { echo -n "$@" warning "$@" echo } log_failure_msg() { echo -n "$@" failure "$@" echo } elif [ -f /lib/lsb/init-functions ]; then . /lib/lsb/init-functions else echo "Error: Cannot source neither init.d nor lsb functions" exit 1 fi prog=gridftpd # sysconfig files if [ -r /etc/sysconfig/nordugrid ]; then . /etc/sysconfig/nordugrid elif [ -r /etc/default/nordugrid ]; then . /etc/default/nordugrid fi if [ -r /etc/sysconfig/arc-${prog} ]; then . /etc/sysconfig/arc-${prog} elif [ -r /etc/default/arc-${prog} ]; then . /etc/default/arc-${prog} fi # ARC_LOCATION ARC_LOCATION=${ARC_LOCATION:-@prefix@} if [ ! -d "$ARC_LOCATION" ]; then log_failure_msg "ARC_LOCATION ($ARC_LOCATION) not found" exit 1 fi # PID and lock file PID_FILE=`${ARC_LOCATION}/@pkgdatasubdir@/arc-gridftpd-start --getpidfile` if [ $? -ne 0 ]; then # When --getpidfile fails it returns the error on stdout log_failure_msg "$PID_FILE" exit 1 fi if [ `id -u` = 0 ] ; then # Debian does not have /run/lock/subsys if [ -d /run/lock/subsys ]; then LOCKFILE=/run/lock/subsys/$prog else LOCKFILE=/run/lock/$prog fi else LOCKFILE=$HOME/$prog.lock fi start() { echo -n "Starting $prog: " # Check if we are already running if [ -f "$PID_FILE" ]; then read pid < "$PID_FILE" if [ "x$pid" != "x" ]; then ps -p "$pid" -o comm 2>/dev/null | grep "^$prog$" 1>/dev/null 2>/dev/null if [ $? -eq 0 ] ; then log_success_msg "already running (pid $pid)" return 0 fi fi rm -f "$PID_FILE" "$LOCKFILE" fi ${ARC_LOCATION}/@pkgdatasubdir@/arc-gridftpd-start RETVAL=$? if [ $RETVAL -eq 0 ]; then touch $LOCKFILE log_success_msg else log_failure_msg fi return $RETVAL } stop() { echo -n "Stopping $prog: " if [ -f "$PID_FILE" ]; then read pid < "$PID_FILE" if [ ! -z "$pid" ] ; then kill "$pid" RETVAL=$? if [ $RETVAL -eq 0 ]; then log_success_msg else log_failure_msg fi sleep 1 kill -9 "$pid" 1>/dev/null 2>&1 rm -f "$PID_FILE" "$LOCKFILE" else RETVAL=1 log_failure_msg "$prog shutdown - pidfile is empty" fi else RETVAL=0 log_success_msg "$prog shutdown - already stopped" fi return $RETVAL } status() { if [ -f "$PID_FILE" ]; then read pid < "$PID_FILE" if [ "$pid" != "" ]; then if ps -p "$pid" > /dev/null; then echo "$1 (pid $pid) is running..." return 0 fi echo "$1 stopped but pid file exists" return 1 fi fi if [ -f $LOCKFILE ]; then echo "$1 stopped but lockfile exists" return 2 fi echo "$1 is stopped" return 3 } restart() { stop start } case "$1" in start) start ;; stop) stop ;; status) status $prog ;; restart | force-reload) restart ;; reload) ;; condrestart | try-restart) [ -f $LOCKFILE ] && restart || : ;; *) echo "Usage: $0 {start|stop|status|restart|force-reload|reload|condrestart|try-restart}" exit 1 ;; esac exit $? nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/misc.h0000644000000000000000000000013114152153376022664 xustar000000000000000029 mtime=1638455038.43664644 30 atime=1638455038.511647567 30 ctime=1638455099.224559806 nordugrid-arc-6.14.0/src/services/gridftpd/misc.h0000644000175000002070000000036714152153376022660 0ustar00mockbuildmock00000000000000#ifdef __USE_POSIX #include #else #define __USE_POSIX #include #undef __USE_POSIX #endif #include std::string timetostring(time_t t); std::string dirstring(bool dir,long long unsigned int s,time_t t,const char *name); nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/gridftpd.8.in0000644000000000000000000000013114152153376024061 xustar000000000000000029 mtime=1638455038.43664644 30 atime=1638455038.511647567 30 ctime=1638455099.210559596 nordugrid-arc-6.14.0/src/services/gridftpd/gridftpd.8.in0000644000175000002070000000245314152153376024053 0ustar00mockbuildmock00000000000000.\" -*- nroff -*- .TH GRIDFTPD 8 "@DATE@" "NorduGrid ARC @VERSION@" "NorduGrid System Managers Manual" .SH NAME gridftpd \- ARC GridFTP Server .SH SYNOPSIS .B gridftpd [\fIOPTION\fR]... .SH DESCRIPTION .\" Add any additional description here .PP gridftpd is the daemon running the ARC GridFTP server. This server has plugins to allow job submission from Grid clients and to expose a local filesystem as a Grid Storage Element. For more information see "The ARC Computing Element System Administrator Guide" (NORDUGRID-MANUAL-20). .SH OPTIONS .TP \fB\-h\fR display help text .TP \fB\-p\fR \fInumber\fR port on which to listen .TP \fB\-c\fR \fIpath\fR full path to config file .TP \fB\-n\fR \fInumber\fR maximum number of connections allowed .TP \fB\-b\fR \fInumber\fR default buffer size .TP \fB\-B\fR \fInumber\fR maximum buffer size .TP \fB\-F\fR run daemon in foreground .TP \fB\-U\fR \fIuser[:group]\fR user (and group) name to switch to after starting .TP \fB\-L\fR \fIpath\fR path to log file .TP \fB\-P\fR \fIpath\fR path to pid file .TP \fB\-d\fR \fInumber\fR debug level, from 0 (lowest verbosity) to 5 (highest verbosity) .PP .SH REPORTING BUGS Report bugs to http://bugzilla.nordugrid.org/ .SH COPYRIGHT APACHE LICENSE Version 2.0 .SH FILES .BR /etc/arc.conf .SH AUTHOR David Cameron nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/datalist.cpp0000644000000000000000000000013214152153376024072 xustar000000000000000030 mtime=1638455038.435646425 30 atime=1638455038.511647567 30 ctime=1638455099.219559731 nordugrid-arc-6.14.0/src/services/gridftpd/datalist.cpp0000644000175000002070000001277014152153376024066 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include "fileroot.h" #include "names.h" #include "commands.h" #include "misc.h" static Arc::Logger logger(Arc::Logger::getRootLogger(),"GridFTP_Commands"); static std::string timetostring_rfc3659(time_t t) { struct tm tt; struct tm *tp; tp=gmtime_r(&t,&tt); if(tp == NULL) return ""; char buf[16]; snprintf(buf,sizeof(buf),"%04u%02u%02u%02u%02u%02u", tp->tm_year+1900,tp->tm_mon+1,tp->tm_mday, tp->tm_hour,tp->tm_min,tp->tm_sec); buf[sizeof(buf)-1]=0; return std::string(buf); } int make_list_string(const DirEntry &entr,GridFTP_Commands::list_mode_t mode, unsigned char* buf,int size,const char *prefix) { std::string str; switch(mode) { case GridFTP_Commands::list_mlsd_mode: { if(entr.is_file) { str+="type=file;"; } else { str+="type=dir;"; }; str+="size="+Arc::tostring(entr.size)+";"; str+="modify="+timetostring_rfc3659(entr.modified)+";"; str+="perm="; if(entr.is_file) { if(entr.may_append) str+="a"; if(entr.may_delete) str+="d"; if(entr.may_rename) str+="f"; if(entr.may_read) str+="r"; if(entr.may_write) str+="w"; } else { if(entr.may_create) str+="c"; if(entr.may_delete) str+="d"; if(entr.may_chdir) str+="e"; if(entr.may_rename) str+="f"; if(entr.may_dirlist)str+="l"; if(entr.may_purge) str+="p"; }; str+="; "; str+=prefix+entr.name; }; break; case GridFTP_Commands::list_list_mode: { if(entr.is_file) { str="------- 1 user group "+timetostring(entr.modified)+" "+ Arc::tostring(entr.size,16)+" "+prefix+entr.name; } else { str="d------ 1 user group "+timetostring(entr.modified)+" "+ Arc::tostring(entr.size,16)+" "+prefix+entr.name; }; }; break; case GridFTP_Commands::list_nlst_mode: { str=prefix+entr.name; }; break; default: { }; break; }; int len = str.length(); if(len > (size-3)) { str.resize(size-6); str+="..."; len=size-3; }; strcpy((char*)buf,str.c_str()); buf[len]='\r'; len++; buf[len]='\n'; len++; buf[len]=0; return len; } /* *** list transfer callbacks *** */ void GridFTP_Commands::list_retrieve_callback(void* arg,globus_ftp_control_handle_t*,globus_object_t *error,globus_byte_t* /* buffer */,globus_size_t /* length */,globus_off_t /* offset */,globus_bool_t /* eof */) { GridFTP_Commands *it = (GridFTP_Commands*)arg; globus_mutex_lock(&(it->data_lock)); it->last_action_time=time(NULL); if(it->check_abort(error)) { it->free_data_buffer(); globus_mutex_unlock(&(it->data_lock)); return; }; globus_bool_t eodf; globus_size_t size; if(it->dir_list_pointer == it->dir_list.end()) { it->virt_offset=0; it->transfer_mode=false; it->free_data_buffer(); logger.msg(Arc::VERBOSE, "Closing channel (list)"); it->send_response("226 Transfer completed.\r\n"); globus_mutex_unlock(&(it->data_lock)); return; }; globus_ftp_control_local_send_eof(&(it->handle),GLOBUS_TRUE); ++(it->dir_list_pointer); if(it->dir_list_pointer == it->dir_list.end()) { size=0; eodf=GLOBUS_TRUE; } else { size=make_list_string(*(it->dir_list_pointer),it->list_mode, it->data_buffer[0].data,it->data_buffer_size, it->list_name_prefix.c_str()); eodf=GLOBUS_FALSE; }; globus_result_t res; res=globus_ftp_control_data_write(&(it->handle), (globus_byte_t*)(it->data_buffer[0].data), size,it->list_offset,eodf, &list_retrieve_callback,it); if(res != GLOBUS_SUCCESS) { it->free_data_buffer(); it->force_abort(); globus_mutex_unlock(&(it->data_lock)); return; }; globus_mutex_unlock(&(it->data_lock)); } void GridFTP_Commands::list_connect_retrieve_callback(void* arg,globus_ftp_control_handle_t*,unsigned int /* stripendx */,globus_bool_t /* reused */,globus_object_t *error) { GridFTP_Commands *it = (GridFTP_Commands*)arg; globus_mutex_lock(&(it->data_lock)); it->last_action_time=time(NULL); if(it->check_abort(error)) { globus_mutex_unlock(&(it->data_lock)); return; }; it->data_buffer_size=4096; it->data_buffer_num=1; if(!it->allocate_data_buffer()) { it->force_abort(); globus_mutex_unlock(&(it->data_lock)); return; }; it->dir_list_pointer=it->dir_list.begin(); globus_size_t size; globus_bool_t eodf; if(it->dir_list_pointer == it->dir_list.end()) { size=0; eodf=GLOBUS_TRUE; } else { size=make_list_string(*(it->dir_list_pointer),it->list_mode, it->data_buffer[0].data,it->data_buffer_size, it->list_name_prefix.c_str()); eodf=GLOBUS_FALSE; }; it->list_offset = 0; logger.msg(Arc::VERBOSE, "Data channel connected (list)"); globus_ftp_control_local_send_eof(&(it->handle),GLOBUS_TRUE); globus_result_t res; res=globus_ftp_control_data_write(&(it->handle), (globus_byte_t*)(it->data_buffer[0].data), size,it->list_offset,eodf, &list_retrieve_callback,it); if(res != GLOBUS_SUCCESS) { it->free_data_buffer(); it->force_abort(); globus_mutex_unlock(&(it->data_lock)); return; }; it->list_offset+=size; globus_mutex_unlock(&(it->data_lock)); } nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/fileroot_config.cpp0000644000000000000000000000013214152153376025435 xustar000000000000000030 mtime=1638455038.435646425 30 atime=1638455038.511647567 30 ctime=1638455099.220559746 nordugrid-arc-6.14.0/src/services/gridftpd/fileroot_config.cpp0000644000175000002070000003765414152153376025441 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #ifdef HAVE_SSTREAM #include #else #include #endif #include #include #include #include "conf.h" #include "names.h" #include "misc.h" #include "auth/auth.h" #include "conf/conf_vo.h" #include "fileroot.h" static Arc::Logger logger(Arc::Logger::getRootLogger(),"FileRoot"); int FileRoot::config(gridftpd::Daemon &daemon,ServerParams* params) { /* open and read configuration file */ Arc::ConfigFile cfile; Arc::ConfigIni* cf = NULL; config_open_gridftp(cfile); if(!cfile.is_open()) { logger.msg(Arc::ERROR, "configuration file not found"); return 1; }; cf=new Arc::ConfigIni(cfile); cf->AddSection("common"); // 0 cf->AddSection("gridftpd"); // 1 cf->AddSection("mapping"); // 2 cf->SetSectionIndicator("."); for(;;) { std::string rest; std::string command; cf->ReadNext(command,rest); if(command.length() == 0) break; /* EOF */ if(daemon.config(cf->Section(),command,rest) == -1) { cfile.close(); delete cf; return 1; }; if(cf->SubSection()[0] != 0) continue; if(cf->SectionNum() == 0) { // [common] } else if(cf->SectionNum() == 1) { // [gridftpd] if(command == "port") { if(params) { if(sscanf(rest.c_str(),"%u",&(params->port)) != 1) { logger.msg(Arc::ERROR, "Wrong port number in configuration"); cfile.close(); delete cf; return 1; }; }; } else if(command == "maxconnections") { if(params) { if(sscanf(rest.c_str(),"%u",&(params->max_connections)) != 1) { logger.msg(Arc::ERROR, "Wrong maxconnections number in configuration"); cfile.close(); delete cf; return 1; }; }; } else if(command == "defaultbuffer") { if(params) { if(sscanf(rest.c_str(),"%u",&(params->default_buffer)) != 1) { logger.msg(Arc::ERROR, "Wrong defaultbuffer number in configuration"); cfile.close(); delete cf; return 1; }; }; } else if(command == "maxbuffer") { if(params) { if(sscanf(rest.c_str(),"%u",&(params->max_buffer)) != 1) { logger.msg(Arc::ERROR, "Wrong maxbuffer number in configuration"); cfile.close(); delete cf; return 1; }; }; } else if(command == "firewall") { if(params) { std::string value=rest; int errcode; struct hostent* host; struct hostent hostbuf; #ifdef _MACOSX char buf[BUFSIZ]; if((host=gethostbyname2(value.c_str(),AF_INET)) == NULL) { //TODO: Deal with IPv6 #else #ifndef _AIX #ifndef sun char buf[BUFSIZ]; if(gethostbyname_r(value.c_str(), &hostbuf,buf,sizeof(buf),&host,&errcode)) { #else char buf[BUFSIZ]; if((host=gethostbyname_r(value.c_str(), &hostbuf,buf,sizeof(buf),&errcode)) == NULL) { #endif #else struct hostent_data buf[BUFSIZ]; if((errcode=gethostbyname_r(value.c_str(), (host=&hostbuf),buf))) { #endif #endif logger.msg(Arc::ERROR, "Can't resolve host %s", value); cfile.close(); delete cf; return 1; }; if( (host == NULL) || (host->h_length < sizeof(struct in_addr)) || (host->h_addr_list[0] == NULL) ) { logger.msg(Arc::ERROR, "Can't resolve host %s", value); cfile.close(); delete cf; return 1; }; unsigned char* addr = (unsigned char*)(&(((struct in_addr*)(host->h_addr_list[0]))->s_addr)); params->firewall[0]=addr[0]; params->firewall[1]=addr[1]; params->firewall[2]=addr[2]; params->firewall[3]=addr[3]; }; }; }; }; cfile.close(); delete cf; return 0; } static int const cfgsec_mapping_n = 0; static int const cfgsec_common_n = 1; static int const cfgsec_group_n = 2; static int const cfgsec_gridftpd_n = 3; static int const cfgsec_vo_n = 4; // Second level configuration method used when forking for new connection. int FileRoot::config(Arc::ConfigIni &cf,std::string &pluginpath) { typedef enum { conf_state_none, conf_state_single, conf_state_group, // inside [group] configuration conf_state_plugin // inside plugin configuration } config_state_t; config_state_t st = conf_state_none; typedef enum { group_match_no_command, // no access command group_match_allow, // allowaccess matched group_match_deny, // denyaccess matched group_match_none // nothing matched } group_match_t; group_match_t group_match = group_match_no_command; user.user.select_group(NULL); std::string group_name; // =config_next_arg(rest); int group_decision = AAA_NO_MATCH; std::string plugin_config; std::string plugin_name; std::string plugin_path; // for telling plugin its own endpoint // hostname can be overridden in configuration std::string hostname; char hostn[256]; if (gethostname(hostn, sizeof(hostn)) != 0) logger.msg(Arc::WARNING, "Could not determine hostname from gethostname()"); else hostname = hostn; std::string port = "2811"; // Hard-coded, but it is standard default for(;;) { std::string rest; std::string command; cf.ReadNext(command,rest); if(group_match == group_match_deny) { // skip configuration for wrong auth group if(!cf.SectionNew()) continue; }; int r = gridftpd::config_vo(user.user,cf,command,rest,&logger); // [userlist] processing if(r==0) continue; // processed if(cf.SectionNew()) { // section finished if((group_match == group_match_no_command) || (group_match == group_match_allow)) switch(st) { case conf_state_group: { // authgroup processing ended if(group_name.length() == 0) { logger.msg(Arc::ERROR, "unnamed group"); return 1; }; if(group_decision == AAA_POSITIVE_MATCH) { user.user.add_group(group_name); }; }; break; case conf_state_plugin: { // plugin processing ended if(plugin_name.length() == 0) { logger.msg(Arc::WARNING, "undefined plugin name"); break; }; if(plugin_path.length() == 0) { logger.msg(Arc::WARNING, "undefined virtual plugin path"); break; }; plugin_path=subst_user_spec(plugin_path,&user); if(!Arc::CanonicalDir(plugin_path,false)) { logger.msg(Arc::WARNING, "bad directory for plugin: %s", plugin_path); break; }; /* look if path is not already registered */ bool already_have_this_path=false; for(std::list::iterator i=nodes.begin();i!=nodes.end();++i) { if((*i) == plugin_path) { already_have_this_path=true; break; }; }; if(already_have_this_path) { logger.msg(Arc::WARNING, "Already have directory: %s", plugin_path); break; }; logger.msg(Arc::INFO, "Registering directory: %s with plugin: %s", plugin_path, plugin_name); plugin_name=pluginpath+'/'+plugin_name; plugin_config+="endpoint gsiftp://"+hostname+":"+port+"/"+plugin_path+"\n"; plugin_config+="end\n"; #ifdef HAVE_SSTREAM std::stringstream fake_cfile(plugin_config); #else std::strstream fake_cfile; fake_cfile<AddSection("mapping"); cf->AddSection("common"); cf->AddSection("authgroup"); cf->AddSection("gridftpd"); cf->AddSection("userlist"); cf->SetSectionIndicator("."); /* keep information about user */ if(!user.fill(auth,handle,config_file.c_str())) { logger.msg(Arc::ERROR, "failed to process client identification"); delete cf; return 1; }; std::string pluginpath; std::list pluginpaths = Arc::ArcLocation::GetPlugins(); if(pluginpaths.empty()) { logger.msg(Arc::ERROR, "failed to identify plugins path"); delete cf; return 1; }; pluginpath=pluginpaths.front(); int r; r = config(*cf,pluginpath); cfile.close(); delete cf; cf = NULL; if(r != 0) return r; /* must be sorted to make sure we can always find right directory */ nodes.sort(FileNode::compare); /* create dummy directories */ int nn=nodes.size(); for(std::list::iterator i=nodes.begin();i!=nodes.end();++i) { if(nn==0) break; std::string name = i->point; for(;remove_last_name(name);) { if(name.length() == 0) break; bool found = false; std::list::iterator ii=i; for(;;) { ++ii; if(ii == nodes.end()) break; if(name == ii->point) { found=true; break; }; }; if(!found) { /* add dummy dir */ logger.msg(Arc::ERROR, "Registering dummy directory: %s", name); nodes.push_back(FileNode(name.c_str())); } else { break; }; }; nn--; }; opened_node=nodes.end(); user.free(); return 0; } nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/fileroot.h0000644000000000000000000000013214152153376023555 xustar000000000000000030 mtime=1638455038.435646425 30 atime=1638455038.511647567 30 ctime=1638455099.223559791 nordugrid-arc-6.14.0/src/services/gridftpd/fileroot.h0000644000175000002070000001664614152153376023557 0ustar00mockbuildmock00000000000000#ifndef GRID_SERVER_FILEROOT_H #define GRID_SERVER_FILEROOT_H #include #include #include #include #include #include #include #include "userspec.h" #include "conf.h" #include "conf/daemon.h" typedef enum { GRIDFTP_OPEN_RETRIEVE = 1, GRIDFTP_OPEN_STORE = 2 } open_modes; class DirEntry { public: typedef enum { minimal_object_info = 0, basic_object_info = 1, full_object_info = 2 } object_info_level; std::string name; bool is_file; time_t changed; time_t modified; unsigned long long size; uid_t uid; gid_t gid; bool may_rename; // bool may_delete; // bool may_create; // for dirs bool may_chdir; // for dirs bool may_dirlist; // for dirs bool may_mkdir; // for dirs bool may_purge; // for dirs bool may_read; // for files bool may_append; // for files bool may_write; // for files DirEntry(bool is_file_ = false,std::string name_ = ""): name(name_),is_file(is_file_), changed(0),modified(0),size(0),uid(0),gid(0), may_rename(false),may_delete(false), may_create(false),may_chdir(false),may_dirlist(false), may_mkdir(false),may_purge(false), may_read(false),may_append(false),may_write(false) { }; void reset(void) { name=""; is_file=false; changed=0; modified=0; size=0; uid=0; gid=0; may_rename=false; may_delete=false; may_create=false; may_chdir=false; may_dirlist=false; may_mkdir=false; may_purge=false; may_read=false; may_append=false; may_write=false; }; }; class FilePlugin { /* this is the base class for plugins */ public: std::string error_description; virtual std::string get_error_description() const { return error_description; }; /* virtual functions are not defined in base class */ virtual int open(const char*,open_modes,unsigned long long int /* size */ = 0) { return 1; }; virtual int close(bool /* eof */ = true) { return 1; }; virtual int read(unsigned char *,unsigned long long int /* offset */,unsigned long long int* /* size */) { return 1; }; virtual int write(unsigned char *,unsigned long long int /* offset */,unsigned long long int /* size */) { return 1; }; virtual int readdir(const char* /* name */,std::list& /* dir_list */,DirEntry::object_info_level /* mode */ = DirEntry::basic_object_info) { return 1; }; virtual int checkdir(std::string& /* dirname */) { return 1; }; virtual int checkfile(std::string& /* name */,DirEntry& /* info */,DirEntry::object_info_level /* mode */) { return 1; }; virtual int makedir(std::string& /* dirname */) { return 1; }; virtual int removefile(std::string& /* name */) { return 1; }; virtual int removedir(std::string& /* dirname */) { return 1; }; int count; FilePlugin(void) { count=0; /* after creation acquire MUST be called */ }; int acquire(void) { count++; return count; }; int release(void); virtual ~FilePlugin(void) { /* dlclose is done externally - yet */ }; protected: std::string endpoint; // endpoint (URL) corresponding to plugin }; class FileNode; /* this is the only exported C function from plugin */ typedef FilePlugin* (*plugin_init_t)(std::istream &cfile,userspec_t const &user,FileNode &node); class FileNode { public: std::string point; private: FilePlugin *plug; std::string plugname; void* handle; plugin_init_t init; public: static const std::string no_error; FileNode(void) { /* empty uninitialized - can be used only to copy to it later */ point=""; plugname=""; handle=NULL; init=NULL; plug=NULL; }; /* following two constructors should be used only for copying in list */ FileNode(const FileNode &node) { point=node.point; plugname=node.plugname; plug=node.plug; handle=node.handle; init=NULL; if(plug) plug->acquire(); }; FileNode& operator= (const FileNode &node); FileNode(const char* dirname) { plug=NULL; init=NULL; point=std::string(dirname); handle=NULL; return; }; /* this constructor is for real load of plugin - it should be used to create really new FileNode */ FileNode(char const* dirname, char const* plugin, std::istream &cfile,userspec_t &user); ~FileNode(void); bool has_plugin(void) const { return (plug != NULL); }; FilePlugin* get_plugin(void) const { return plug; }; const std::string& get_plugin_path(void) const { return plugname; }; static bool compare(const FileNode &left,const FileNode &right) { return (left.point.length() > right.point.length()); }; bool operator> (const FileNode &right) const { return (point.length() > right.point.length()); }; bool operator< (const FileNode &right) const { return (point.length() < right.point.length()); }; bool operator> (char* right) const { return (point.length() > strlen(right)); }; bool operator< (char* right) const { return (point.length() < strlen(right)); }; bool operator== (std::string right) const { return (point == right); }; bool belongs(const char* name); bool is_in_dir(const char* name); std::string last_name(void); int open(const char* name,open_modes mode,unsigned long long int size = 0); int close(bool eof = true); int write(unsigned char *buf,unsigned long long int offset,unsigned long long int size); int read(unsigned char *buf,unsigned long long int offset,unsigned long long int *size); int readdir(const char* name,std::list &dir_list,DirEntry::object_info_level mode = DirEntry::basic_object_info); int checkdir(std::string &dirname); int checkfile(std::string &name,DirEntry &info,DirEntry::object_info_level mode); int makedir(std::string &dirname); int removedir(std::string &dirname); int removefile(std::string &name); std::string error(void) const { if(plug) return plug->get_error_description(); return no_error; }; }; class GridFTP_Commands; class FileRoot { friend class GridFTP_Commands; private: bool heavy_encryption; bool active_data; //bool unix_mapped; std::string error; public: class ServerParams { public: unsigned int port; unsigned int firewall[4]; unsigned int max_connections; unsigned int default_buffer; unsigned int max_buffer; ServerParams(void):port(0),max_connections(0),default_buffer(0),max_buffer(0) { firewall[0]=0; firewall[1]=0; firewall[2]=0; firewall[3]=0; }; }; std::list nodes; std::string cur_dir; userspec_t user; FileRoot(void); ~FileRoot(void) { }; std::list::iterator opened_node; int open(const char* name,open_modes mode,unsigned long long int size = 0); int close(bool eof = true); int write(unsigned char *buf,unsigned long long int offset,unsigned long long int size); int read(unsigned char *buf,unsigned long long int offset,unsigned long long int *size); int readdir(const char* name,std::list &dir_list,DirEntry::object_info_level mode); std::string cwd() const { return "/"+cur_dir; }; int cwd(std::string &name); int mkd(std::string &name); int rmd(std::string &name); int rm(std::string &name); int size(const char* name,unsigned long long int *size); int time(const char* name,time_t *time); int checkfile(const char* name,DirEntry &obj,DirEntry::object_info_level mode); int config(globus_ftp_control_auth_info_t* auth,globus_ftp_control_handle_t *handle); int config(Arc::ConfigIni &cf,std::string &pluginpath); static int config(gridftpd::Daemon &daemon,ServerParams* params); }; #endif nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/conf.h0000644000000000000000000000013114152153376022656 xustar000000000000000029 mtime=1638455038.43464641 30 atime=1638455038.511647567 30 ctime=1638455099.222559776 nordugrid-arc-6.14.0/src/services/gridftpd/conf.h0000644000175000002070000000050414152153376022643 0ustar00mockbuildmock00000000000000#ifndef __GFS_CONF_H__ #define __GFS_CONF_H__ #include #include #include #include #include #define DEFAULT_CENTRAL_CONFIG_FILE "/etc/arc.conf" extern const char* config_file; std::string config_open_gridftp(Arc::ConfigFile &cfile); #endif // __GFS_CONF_H__ nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/fileroot.cpp0000644000000000000000000000013214152153376024110 xustar000000000000000030 mtime=1638455038.435646425 30 atime=1638455038.511647567 30 ctime=1638455099.216559686 nordugrid-arc-6.14.0/src/services/gridftpd/fileroot.cpp0000644000175000002070000003057714152153376024111 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include "fileroot.h" #include "names.h" #include "misc.h" static Arc::Logger logger(Arc::Logger::getRootLogger(),"FilePlugin"); const std::string FileNode::no_error(""); #define NO_PLUGIN(PATH) { logger.msg(Arc::ERROR, "No plugin is configured or authorised for requested path %s", PATH); } int FilePlugin::release(void) { count--; if(count < 0) { logger.msg(Arc::WARNING, "FilePlugin: more unload than load"); count=0; }; return count; } FileNode::FileNode(char const* dirname,char const* plugin,std::istream &cfile,userspec_t &user) { plug=NULL; init=NULL; point=std::string(dirname); plugname=std::string(plugin); // handle=dlopen(plugin,RTLD_LAZY); handle=dlopen(plugin,RTLD_NOW); if(!handle) { logger.msg(Arc::ERROR, dlerror()); logger.msg(Arc::ERROR, "Can't load plugin %s for access point %s", plugin, dirname); return; }; init=(plugin_init_t)dlsym(handle,"init"); if(init == NULL) { logger.msg(Arc::ERROR, "Plugin %s for access point %s is broken.", plugin, dirname); dlclose(handle); handle=NULL; return; }; if((plug=init(cfile,user,*this)) == NULL) { logger.msg(Arc::ERROR, "Plugin %s for access point %s is broken.", plugin, dirname); dlclose(handle); handle=NULL; init=NULL; return; }; if(plug->acquire() != 1) { logger.msg(Arc::ERROR, "Plugin %s for access point %s acquire failed (should never happen).", plugin, dirname); delete plug; dlclose(handle); handle=NULL; init=NULL; plug=NULL; return; }; } FileNode::~FileNode(void) { if(plug) if(plug->release() == 0) { logger.msg(Arc::VERBOSE, "Destructor with dlclose (%s)", point); delete plug; dlclose(handle); handle=NULL; init=NULL; plug=NULL; }; } std::string FileNode::last_name(void) { int pl=point.rfind('/'); if(pl == -1) return point; return point.substr(pl+1); } bool FileNode::belongs(const char* name) { int pl=point.length(); if(pl == 0) return true; int l=strlen(name); if (pl > l) return false; if(strncmp(point.c_str(),name,pl)) return false; if(pl == l) return true; if(name[pl] == '/') return true; return false; } FileNode& FileNode::operator= (const FileNode &node) { logger.msg(Arc::VERBOSE, "FileNode: operator= (%s <- %s) %lu <- %lu", point, node.point, (unsigned long int)this, (unsigned long int)(&node)); if(this == &node) return *this; if(plug) if(plug->release() == 0) { logger.msg(Arc::VERBOSE, "Copying with dlclose"); delete plug; dlclose(handle); handle=NULL; init=NULL; plug=NULL; }; point=node.point; plugname=node.plugname; plug=node.plug; handle=node.handle; return *this; } /* bool FileNode::is_in_dir(const char* name) { int l=strlen(name); if(point.length() <= l) return false; if(point[l] != '/') return false; if(strncmp(point.c_str(),name,l)) return false; return true; } */ /* should only last directory be shown ? */ bool FileNode::is_in_dir(const char* name) { int l=strlen(name); int pl=point.rfind('/'); /* returns -1 if not found */ if(pl == -1) { if(l == 0) { return true; }; }; if(l != pl) return false; if(strncmp(point.c_str(),name,l)) return false; return true; } int FileRoot::size(const char* name,unsigned long long int *size) { std::string new_name; if(name[0] != '/') { new_name=cur_dir+'/'+name; } else { new_name=name; }; error=FileNode::no_error; if(!Arc::CanonicalDir(new_name,false)) return 1; if(new_name.empty()) { (*size)=0; return 0; } for(std::list::iterator i=nodes.begin();i!=nodes.end();++i) { if(i->belongs(new_name.c_str())) { DirEntry info; if(i->checkfile(new_name,info,DirEntry::basic_object_info) != 0) { error=i->error(); return 1; }; (*size)=info.size; return 0; }; }; NO_PLUGIN(name); return 1; } int FileRoot::time(const char* name,time_t *time) { std::string new_name; if(name[0] != '/') { new_name=cur_dir+'/'+name; } else { new_name=name; }; error=FileNode::no_error; if(!Arc::CanonicalDir(new_name,false)) return 1; if(new_name.empty()) { (*time)=0; return 0; }; for(std::list::iterator i=nodes.begin();i!=nodes.end();++i) { if(i->belongs(new_name.c_str())) { DirEntry info; if(i->checkfile(new_name,info,DirEntry::basic_object_info) != 0) { error=i->error(); return 1; }; (*time)=info.modified; return 0; }; }; NO_PLUGIN(name); return 1; } int FileRoot::checkfile(const char* name,DirEntry &info,DirEntry::object_info_level mode) { std::string new_name; if(name[0] != '/') { new_name=cur_dir+'/'+name; } else { new_name=name; }; error=FileNode::no_error; if(!Arc::CanonicalDir(new_name,false)) return 1; if(new_name.empty()) { info.reset(); info.name="/"; info.is_file=false; return 0; }; for(std::list::iterator i=nodes.begin();i!=nodes.end();++i) { if(i->belongs(new_name.c_str())) { if(i->checkfile(new_name,info,mode) != 0) { error=i->error(); return 1; }; info.name="/"+new_name; return 0; }; }; NO_PLUGIN(name); return 1; } int FileRoot::mkd(std::string& name) { std::string new_dir; if(name[0] != '/') { new_dir=cur_dir+'/'+name; } else { new_dir=name; }; error=FileNode::no_error; if(Arc::CanonicalDir(new_dir,false)) { for(std::list::iterator i=nodes.begin();i!=nodes.end();++i) { if((*i) == new_dir) { /* already exists, at least virtually */ name=new_dir; return 0; }; if((*i).belongs(new_dir.c_str())) { if((*i).makedir(new_dir) == 0) { name=new_dir; return 0; }; error=i->error(); name=cur_dir; return 1; }; }; NO_PLUGIN(name); }; name=cur_dir; return 1; } int FileRoot::rmd(std::string& name) { std::string new_dir; if(name[0] != '/') { new_dir=cur_dir+'/'+name; } else { new_dir=name; }; error=FileNode::no_error; if(Arc::CanonicalDir(new_dir,false)) { for(std::list::iterator i=nodes.begin();i!=nodes.end();++i) { if((*i) == new_dir) { /* virtual - not removable */ return 1; }; if(i->belongs(new_dir.c_str())) { int res = i->removedir(new_dir); error=i->error(); return res; }; }; NO_PLUGIN(name); }; return 1; } int FileRoot::rm(std::string& name) { std::string new_dir; if(name[0] != '/') { new_dir=cur_dir+'/'+name; } else { new_dir=name; }; error=FileNode::no_error; if(Arc::CanonicalDir(new_dir,false)) { for(std::list::iterator i=nodes.begin();i!=nodes.end();++i) { if((*i) == new_dir) { /* virtual dir - not removable */ return 1; }; if(i->belongs(new_dir.c_str())) { int res = i->removefile(new_dir); error=i->error(); return res; }; }; NO_PLUGIN(name); }; return 1; } int FileRoot::cwd(std::string& name) { std::string new_dir; if(name[0] != '/') { new_dir=cur_dir+'/'+name; } else { new_dir=name; }; error=FileNode::no_error; if(Arc::CanonicalDir(new_dir,false)) { if(new_dir.length() == 0) { /* always can go to root ? */ cur_dir=new_dir; name=cur_dir; return 0; }; /* check if can cd */ for(std::list::iterator i=nodes.begin();i!=nodes.end();++i) { if((*i) == new_dir) { cur_dir=new_dir; name=cur_dir; return 0; }; if((*i).belongs(new_dir.c_str())) { if((*i).checkdir(new_dir) == 0) { cur_dir=new_dir; name=cur_dir; return 0; }; error=i->error(); name=cur_dir; return 1; }; }; NO_PLUGIN(name); }; name="/"+cur_dir; return 1; } int FileRoot::open(const char* name,open_modes mode,unsigned long long int size) { std::string new_name; if(name[0] != '/') { new_name=cur_dir+'/'+name; } else { new_name=name; }; error=FileNode::no_error; if(!Arc::CanonicalDir(new_name,false)) { return 1; }; for(std::list::iterator i=nodes.begin();i!=nodes.end();++i) { if(i->belongs(new_name.c_str())) { if(i->open(new_name.c_str(),mode,size) == 0) { opened_node=i; return 0; }; error=i->error(); return 1; }; }; NO_PLUGIN(name); return 1; } int FileRoot::close(bool eof) { error=FileNode::no_error; if(opened_node != nodes.end()) { int i=(*opened_node).close(eof); error=opened_node->error(); opened_node=nodes.end(); return i; }; return 1; } int FileRoot::read(unsigned char* buf,unsigned long long int offset,unsigned long long *size) { error=FileNode::no_error; if(opened_node != nodes.end()) { int res = (*opened_node).read(buf,offset,size); error=opened_node->error(); return res; }; return 1; } int FileRoot::write(unsigned char *buf,unsigned long long int offset,unsigned long long size) { error=FileNode::no_error; if(opened_node != nodes.end()) { int res = (*opened_node).write(buf,offset,size); error=opened_node->error(); return res; }; return 1; } /* 0 - ok , 1 - failure, -1 - this is a file */ int FileRoot::readdir(const char* name,std::list &dir_list,DirEntry::object_info_level mode) { std::string fullname; if(name[0] != '/') { fullname=cur_dir+'/'+name; } else { fullname=name; }; error=FileNode::no_error; if(!Arc::CanonicalDir(fullname,false)) return 1; int res = 1; for(std::list::iterator i=nodes.begin();i!=nodes.end();++i) { if(i->belongs(fullname.c_str())) { res=i->readdir(fullname.c_str(),dir_list,mode); error=i->error(); break; }; }; if(res == -1) { /* means this is a file */ std::list::iterator di = dir_list.end(); --di; di->name="/"+fullname; return -1; }; for(std::list::iterator i=nodes.begin();i!=nodes.end();++i) { if(i->is_in_dir(fullname.c_str())) { DirEntry de; de.name=i->last_name(); de.is_file=false; // if(i->checkfile(i->point,de,mode) == 0) { // if(de.is_file) { // de.reset(); de.name=i->last_name(); de.is_file=false; /* TODO: fill other attributes */ // // }; // }; dir_list.push_front(de); res=0; }; }; return res; } FileRoot::FileRoot(void):error(FileNode::no_error) { cur_dir=""; opened_node=nodes.end(); heavy_encryption=true; active_data=true; //unix_mapped=false; } int FileNode::readdir(const char* name,std::list &dir_list,DirEntry::object_info_level mode) { if(plug) { plug->error_description=""; return plug->readdir(remove_head_dir_c(name,point.length()),dir_list,mode); }; return 0; } int FileNode::checkfile(std::string &name,DirEntry &info,DirEntry::object_info_level mode) { if(plug) { plug->error_description=""; std::string dname=remove_head_dir_s(name,point.length()); return plug->checkfile(dname,info,mode); }; return 1; } int FileNode::checkdir(std::string &dirname) { if(plug) { plug->error_description=""; std::string dname=remove_head_dir_s(dirname,point.length()); if(plug->checkdir(dname) == 0) { dirname=point+'/'+dname; return 0; }; }; return 1; } int FileNode::makedir(std::string &dirname) { if(plug) { plug->error_description=""; std::string dname=remove_head_dir_s(dirname,point.length()); return plug->makedir(dname); }; return 1; } int FileNode::removedir(std::string &dirname) { if(plug) { plug->error_description=""; std::string dname=remove_head_dir_s(dirname,point.length()); return plug->removedir(dname); }; return 1; } int FileNode::removefile(std::string &name) { if(plug) { plug->error_description=""; std::string dname=remove_head_dir_s(name,point.length()); return plug->removefile(dname); }; return 1; } int FileNode::open(const char* name,open_modes mode,unsigned long long int size) { if(plug) { plug->error_description=""; return plug->open(remove_head_dir_c(name,point.length()),mode,size); }; return 1; } int FileNode::close(bool eof) { if(plug) { plug->error_description=""; return plug->close(eof); }; return 1; } int FileNode::read(unsigned char *buf,unsigned long long int offset,unsigned long long *size) { if(plug) { plug->error_description=""; return plug->read(buf,offset,size); }; return 1; } int FileNode::write(unsigned char *buf,unsigned long long int offset,unsigned long long size) { if(plug) { plug->error_description=""; return plug->write(buf,offset,size); }; return 1; } nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/run0000644000000000000000000000013214152153473022306 xustar000000000000000030 mtime=1638455099.301560963 30 atime=1638455103.999631554 30 ctime=1638455099.301560963 nordugrid-arc-6.14.0/src/services/gridftpd/run/0000755000175000002070000000000014152153473022350 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/gridftpd/run/PaxHeaders.30264/Makefile.am0000644000000000000000000000013114152153376024420 xustar000000000000000029 mtime=1638455038.43664644 30 atime=1638455038.512647582 30 ctime=1638455099.298560918 nordugrid-arc-6.14.0/src/services/gridftpd/run/Makefile.am0000644000175000002070000000033614152153376024410 0ustar00mockbuildmock00000000000000noinst_LTLIBRARIES = librun.la librun_la_SOURCES = run_plugin.cpp run_plugin.h librun_la_CXXFLAGS = -I$(top_srcdir)/include \ $(LIBXML2_CFLAGS) $(GLIBMM_CFLAGS) $(AM_CXXFLAGS) librun_la_LIBADD = $(DLOPEN_LIBS) -lpthread nordugrid-arc-6.14.0/src/services/gridftpd/run/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153435024426 xustar000000000000000030 mtime=1638455069.630115136 30 atime=1638455090.925435108 30 ctime=1638455099.297560903 nordugrid-arc-6.14.0/src/services/gridftpd/run/Makefile.in0000644000175000002070000006370014152153435024421 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/gridftpd/run DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(top_srcdir)/depcomp README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = LTLIBRARIES = $(noinst_LTLIBRARIES) am__DEPENDENCIES_1 = librun_la_DEPENDENCIES = $(am__DEPENDENCIES_1) am_librun_la_OBJECTS = librun_la-run_plugin.lo librun_la_OBJECTS = $(am_librun_la_OBJECTS) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = librun_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(librun_la_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(librun_la_SOURCES) DIST_SOURCES = $(librun_la_SOURCES) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ noinst_LTLIBRARIES = librun.la librun_la_SOURCES = run_plugin.cpp run_plugin.h librun_la_CXXFLAGS = -I$(top_srcdir)/include \ $(LIBXML2_CFLAGS) $(GLIBMM_CFLAGS) $(AM_CXXFLAGS) librun_la_LIBADD = $(DLOPEN_LIBS) -lpthread all: all-am .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/gridftpd/run/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/gridftpd/run/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): clean-noinstLTLIBRARIES: -test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES) @list='$(noinst_LTLIBRARIES)'; \ locs=`for p in $$list; do echo $$p; done | \ sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ sort -u`; \ test -z "$$locs" || { \ echo rm -f $${locs}; \ rm -f $${locs}; \ } librun.la: $(librun_la_OBJECTS) $(librun_la_DEPENDENCIES) $(EXTRA_librun_la_DEPENDENCIES) $(AM_V_CXXLD)$(librun_la_LINK) $(librun_la_OBJECTS) $(librun_la_LIBADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/librun_la-run_plugin.Plo@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< librun_la-run_plugin.lo: run_plugin.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(librun_la_CXXFLAGS) $(CXXFLAGS) -MT librun_la-run_plugin.lo -MD -MP -MF $(DEPDIR)/librun_la-run_plugin.Tpo -c -o librun_la-run_plugin.lo `test -f 'run_plugin.cpp' || echo '$(srcdir)/'`run_plugin.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/librun_la-run_plugin.Tpo $(DEPDIR)/librun_la-run_plugin.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='run_plugin.cpp' object='librun_la-run_plugin.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(librun_la_CXXFLAGS) $(CXXFLAGS) -c -o librun_la-run_plugin.lo `test -f 'run_plugin.cpp' || echo '$(srcdir)/'`run_plugin.cpp mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(LTLIBRARIES) installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool clean-noinstLTLIBRARIES \ mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \ clean-libtool clean-noinstLTLIBRARIES cscopelist-am ctags \ ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags tags-am uninstall uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/gridftpd/run/PaxHeaders.30264/run_plugin.cpp0000644000000000000000000000013114152153376025252 xustar000000000000000029 mtime=1638455038.43664644 30 atime=1638455038.512647582 30 ctime=1638455099.300560948 nordugrid-arc-6.14.0/src/services/gridftpd/run/run_plugin.cpp0000644000175000002070000001604114152153376025242 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include "run_plugin.h" namespace gridftpd { void free_args(char** args) { if(args == NULL) return; for(int i=0;args[i];i++) free(args[i]); free(args); } char** string_to_args(const std::string& command) { if(command.length() == 0) return NULL; int n = 100; char** args = (char**)malloc(n*sizeof(char**)); int i; for(i=0;i::iterator i = args_.begin(); i!=args_.end();++i,++n) args[n]=(char*)(i->c_str()); args[n]=NULL; if(lib.length() == 0) { bool r = false; Arc::Run re(args_); re.AssignStdin(stdin_); re.AssignStdout(stdout_); re.AssignStderr(stderr_); if(re.Start()) { if(re.Wait(timeout_)) { result_=re.Result(); r=true; } else { re.Kill(0); }; }; if(!r) { free(args); return false; }; } else { void* lib_h = dlopen(lib.c_str(),RTLD_NOW); if(lib_h == NULL) { free(args); return false; }; lib_plugin_t f; f.v = dlsym(lib_h,args[0]); if(f.v == NULL) { dlclose(lib_h); free(args); return false; }; result_ = (*f.f)(args[1],args[2],args[3],args[4],args[5], args[6],args[7],args[8],args[9],args[10], args[11],args[12],args[13],args[14],args[15], args[16],args[17],args[18],args[19],args[20], args[21],args[22],args[23],args[24],args[25], args[26],args[27],args[28],args[29],args[30], args[31],args[32],args[33],args[34],args[35], args[36],args[37],args[38],args[39],args[40], args[41],args[42],args[43],args[44],args[45], args[56],args[57],args[58],args[59],args[60], args[61],args[62],args[63],args[64],args[65], args[66],args[67],args[68],args[69],args[70], args[71],args[72],args[73],args[74],args[75], args[76],args[77],args[78],args[79],args[80], args[81],args[82],args[83],args[84],args[85], args[86],args[87],args[88],args[89],args[90], args[91],args[92],args[93],args[94],args[95], args[96],args[97],args[98],args[99],args[100]); dlclose(lib_h); }; free(args); return true; } bool RunPlugin::run(substitute_t subst,void* arg) { result_=0; stdout_=""; stderr_=""; if(subst == NULL) return run(); if(args_.empty()) return true; char** args = (char**)malloc(sizeof(char*)*(args_.size()+1)); if(args == NULL) return false; std::list args__; for(std::list::iterator i = args_.begin();i!=args_.end();++i) { args__.push_back(*i); }; for(std::list::iterator i = args__.begin();i!=args__.end();++i) { (*subst)(*i,arg); }; int n = 0; for(std::list::iterator i = args__.begin(); i!=args__.end();++i,++n) args[n]=(char*)(i->c_str()); args[n]=NULL; if(lib.length() == 0) { bool r = false; Arc::Run re(args__); re.AssignStdin(stdin_); re.AssignStdout(stdout_); re.AssignStderr(stderr_); if(re.Start()) { if(re.Wait(timeout_)) { result_=re.Result(); r=true; } else { re.Kill(0); }; }; if(!r) { free(args); return false; }; } else { void* lib_h = dlopen(lib.c_str(),RTLD_NOW); if(lib_h == NULL) { free(args); return false; }; lib_plugin_t f; f.v = dlsym(lib_h,args[0]); if(f.v == NULL) { dlclose(lib_h); free(args); return false; }; result_ = (*f.f)(args[1],args[2],args[3],args[4],args[5], args[6],args[7],args[8],args[9],args[10], args[11],args[12],args[13],args[14],args[15], args[16],args[17],args[18],args[19],args[20], args[21],args[22],args[23],args[24],args[25], args[26],args[27],args[28],args[29],args[30], args[31],args[32],args[33],args[34],args[35], args[36],args[37],args[38],args[39],args[40], args[41],args[42],args[43],args[44],args[45], args[56],args[57],args[58],args[59],args[60], args[61],args[62],args[63],args[64],args[65], args[66],args[67],args[68],args[69],args[70], args[71],args[72],args[73],args[74],args[75], args[76],args[77],args[78],args[79],args[80], args[81],args[82],args[83],args[84],args[85], args[86],args[87],args[88],args[89],args[90], args[91],args[92],args[93],args[94],args[95], args[96],args[97],args[98],args[99],args[100]); dlclose(lib_h); }; free(args); return true; } } // namespace gridftpd nordugrid-arc-6.14.0/src/services/gridftpd/run/PaxHeaders.30264/run_plugin.h0000644000000000000000000000013114152153376024717 xustar000000000000000029 mtime=1638455038.43664644 30 atime=1638455038.512647582 30 ctime=1638455099.301560963 nordugrid-arc-6.14.0/src/services/gridftpd/run/run_plugin.h0000644000175000002070000000313514152153376024707 0ustar00mockbuildmock00000000000000#ifndef ARC_GRIDFTPD_RUN_PLUGIN_H #define ARC_GRIDFTPD_RUN_PLUGIN_H #include #include #include #include #include namespace gridftpd { class RunPlugin { private: std::list args_; std::string lib; std::string stdin_; std::string stdout_; std::string stderr_; int timeout_; int result_; void set(const std::string& cmd); void set(char const * const * args); public: typedef void (*substitute_t)(std::string& str,void* arg); union lib_plugin_t { int (*f)(...); void* v; }; RunPlugin(void):timeout_(10),result_(0) { }; RunPlugin(const std::string& cmd):timeout_(10),result_(0) { set(cmd); }; RunPlugin(char const * const * args):timeout_(10),result_(0) { set(args); }; RunPlugin& operator=(const std::string& cmd) { set(cmd); return *this; }; RunPlugin& operator=(char const * const * args) { set(args); return *this; }; bool run(void); bool run(substitute_t subst,void* arg); int result(void) const { return result_; }; void timeout(int t) { timeout_=t; }; void stdin_channel(const std::string& s) { stdin_=s; }; const std::string& stdout_channel(void) const { return stdout_; }; const std::string& stderr_channel(void) const { return stderr_; }; operator bool(void) const { return !args_.empty(); }; std::string cmd(void) { return (args_.empty())?std::string(""):args_.front(); }; }; void free_args(char** args); char** string_to_args(const std::string& command); } // namespace gridftpd #endif // ARC_GRIDFTPD_RUN_PLUGIN_H nordugrid-arc-6.14.0/src/services/gridftpd/run/PaxHeaders.30264/README0000644000000000000000000000013114152153376023244 xustar000000000000000029 mtime=1638455038.43664644 30 atime=1638455038.512647582 30 ctime=1638455099.299560933 nordugrid-arc-6.14.0/src/services/gridftpd/run/README0000644000175000002070000000004514152153376023231 0ustar00mockbuildmock00000000000000Classes to run external executables. nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/commands.h0000644000000000000000000000013114152153376023532 xustar000000000000000029 mtime=1638455038.43464641 30 atime=1638455038.511647567 30 ctime=1638455099.221559761 nordugrid-arc-6.14.0/src/services/gridftpd/commands.h0000644000175000002070000001300114152153376023513 0ustar00mockbuildmock00000000000000#include #include class GridFTP_Commands_timeout; class GridFTP_Commands { typedef enum data_connect_type_e { GRIDFTP_CONNECT_NONE, GRIDFTP_CONNECT_PORT, GRIDFTP_CONNECT_PASV } data_connect_type_t; friend class GridFTP_Commands_timeout; private: #ifndef __DONT_USE_FORK__ class close_semaphor_t { public: close_semaphor_t(void); ~close_semaphor_t(void); }; close_semaphor_t close_semaphor; #endif int log_id; unsigned int firewall[4]; int local_host[16]; // enough for IPv6 unsigned short local_port; bool local_is_ipv6; time_t last_action_time; globus_ftp_control_handle_t handle; globus_mutex_t response_lock; globus_cond_t response_cond; int response_done; globus_mutex_t abort_lock; globus_cond_t abort_cond; int data_done; data_connect_type_t data_conn_type; globus_ftp_control_dcau_t data_dcau; globus_ftp_control_tcpbuffer_t tcp_buffer; gss_cred_id_t delegated_cred; unsigned long long int file_size; FileRoot froot; /* flag to mark server is transfering data right now */ bool transfer_mode; /* flag to mark transfer abort was requested by client side */ bool transfer_abort; /* lock used in data transfer callbacks */ globus_mutex_t data_lock; /* flag to mark eof was set during data transfer(receive) or any other reason to stop registering new buffers */ bool data_eof; /* number of buffers registered so far for data transfer */ unsigned int data_buf_count; /* store array of data buffers here */ typedef struct { unsigned char* data; unsigned long long int used; struct timeval time_last; } data_buffer_t; data_buffer_t* data_buffer; /* size of every buffer - should it be equal to PBSZ ? */ unsigned long long int data_buffer_size; unsigned int data_buffer_num; unsigned int data_callbacks; /* keeps offset in file for reading */ unsigned long long data_offset; unsigned long long virt_offset; unsigned long long virt_size; bool virt_restrict; /* statistics */ unsigned long long int time_spent_disc; unsigned long long int time_spent_network; void compute_data_buffer(void); bool allocate_data_buffer(void); void free_data_buffer(void); int send_response(const std::string& response) { return send_response(response.c_str()); }; int send_response(const char* response); int wait_response(void); static void close_callback(void *arg,globus_ftp_control_handle_t *handle,globus_object_t *error, globus_ftp_control_response_t *ftp_response); static void response_callback(void* arg,globus_ftp_control_handle_t *handle,globus_object_t *error); static void abort_callback(void* arg,globus_ftp_control_handle_t *handle,globus_object_t *error); bool check_abort(globus_object_t *error); void make_abort(bool already_locked = false,bool wait_abort = true); void force_abort(void); static void accepted_callback(void* arg,globus_ftp_control_handle_t *handle,globus_object_t *error); static void commands_callback(void* arg,globus_ftp_control_handle_t *handle,globus_object_t *error,union globus_ftp_control_command_u *command); static void authenticate_callback(void* arg,globus_ftp_control_handle_t *handle,globus_object_t *error,globus_ftp_control_auth_info_t *result); time_t last_action(void) const { return last_action_time; }; public: GridFTP_Commands(int n = 0,unsigned int* firewall = NULL); ~GridFTP_Commands(void); #ifndef __DONT_USE_FORK__ static int new_connection_callback(void* arg,int server_handle); #else static void new_connection_callback(void* arg,globus_ftp_control_server_t *server_handle,globus_object_t *error); #endif static void data_connect_retrieve_callback(void* arg,globus_ftp_control_handle_t *handle,unsigned int stripendx,globus_bool_t reused,globus_object_t *error); static void data_retrieve_callback(void* arg,globus_ftp_control_handle_t *handle,globus_object_t *error,globus_byte_t *buffer,globus_size_t length,globus_off_t offset,globus_bool_t eof); static void data_connect_store_callback(void* arg,globus_ftp_control_handle_t *handle,unsigned int stripendx,globus_bool_t reused,globus_object_t *error); static void data_store_callback(void* arg,globus_ftp_control_handle_t *handle,globus_object_t *error,globus_byte_t *buffer,globus_size_t length,globus_off_t offset,globus_bool_t eof); static void list_connect_retrieve_callback(void* arg,globus_ftp_control_handle_t *handle,unsigned int stripendx,globus_bool_t reused,globus_object_t *error); static void list_retrieve_callback(void* arg,globus_ftp_control_handle_t *handle,globus_object_t *error,globus_byte_t *buffer,globus_size_t length,globus_off_t offset,globus_bool_t eof); std::list dir_list; std::list::iterator dir_list_pointer; std::string list_name_prefix; globus_off_t list_offset; typedef enum { list_list_mode, list_nlst_mode, list_mlsd_mode } list_mode_t; list_mode_t list_mode; }; class GridFTP_Commands_timeout { private: globus_thread_t timer_thread; std::list cmds; globus_mutex_t lock; globus_cond_t cond; globus_cond_t exit_cond; bool cond_flag; bool exit_cond_flag; static void* timer_func(void* arg); public: GridFTP_Commands_timeout(void); ~GridFTP_Commands_timeout(void); void add(GridFTP_Commands& cmd); void remove(const GridFTP_Commands& cmd); }; nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/README0000644000000000000000000000013214152153376022441 xustar000000000000000030 mtime=1638455038.433646395 30 atime=1638455038.510647552 30 ctime=1638455099.211559611 nordugrid-arc-6.14.0/src/services/gridftpd/README0000644000175000002070000000102114152153376022420 0ustar00mockbuildmock00000000000000GridFTP server ported from arc0 svn tree. This service can act as a secure gateway to grid resources, providing authentication and authorisation through several mechanisms. 3 plugins are provided by ARC: - jobplugin (in a-rex/grid-manager/jobplugin) allows job submission, monitoring and control through the GridFTP interface - fileplugin exposes a file-system hierarchy through the GridFTP interface The plugins can be enabled through configuration described in the manual "The NorduGrid Grid Manager and GridFTP Server" nordugrid-arc-6.14.0/src/services/gridftpd/PaxHeaders.30264/userspec.h0000644000000000000000000000013114152153376023562 xustar000000000000000029 mtime=1638455038.43664644 30 atime=1638455038.512647582 30 ctime=1638455099.225559821 nordugrid-arc-6.14.0/src/services/gridftpd/userspec.h0000644000175000002070000000300114152153376023542 0ustar00mockbuildmock00000000000000#ifndef GRID_SERVER_USERSPEC_H #define GRID_SERVER_USERSPEC_H #include #include #include "auth/auth.h" #include "auth/unixmap.h" class userspec_t { friend std::string subst_user_spec(std::string &in,userspec_t const *spec); public: AuthUser user; private: int uid; int gid; std::string home; int host[4]; short unsigned int port; std::string config_file; bool refresh(void); UnixMap map; UnixMap default_map; public: void free(void) const; userspec_t(void); ~userspec_t(void); // Initial setup bool fill(globus_ftp_control_auth_info_t *auth,globus_ftp_control_handle_t *handle,const char* cfg = NULL); bool fill(AuthUser& user,const char* cfg = NULL); int get_uid(void) const { return uid; }; int get_gid(void) const { return gid; }; const char* get_uname(void) const; const char* get_gname(void) const; const std::string& get_config_file(void) const { return config_file; } short unsigned int get_port(void) const { return port; }; const int* get_host(void) const { return host; }; const AuthUser& get_user(void) const { return user; }; //AuthResult mapname(const char* line); AuthResult mapgroup(const char* rule, const char* line); bool set_map_policy(const char* rule, const char* line); //AuthResult mapvo(const char* line); bool mapped(void) const { return (bool)map; }; }; std::string subst_user_spec(std::string &in,userspec_t const *spec); bool check_gridmap(const char* dn,char** user,const char* mapfile = NULL); #endif nordugrid-arc-6.14.0/src/services/PaxHeaders.30264/examples0000644000000000000000000000013214152153474021516 xustar000000000000000030 mtime=1638455100.792583367 30 atime=1638455103.999631554 30 ctime=1638455100.792583367 nordugrid-arc-6.14.0/src/services/examples/0000755000175000002070000000000014152153474021560 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/examples/PaxHeaders.30264/Makefile.am0000644000000000000000000000013114152153376023627 xustar000000000000000029 mtime=1638455038.43264638 30 atime=1638455038.510647552 30 ctime=1638455100.790583336 nordugrid-arc-6.14.0/src/services/examples/Makefile.am0000644000175000002070000000020314152153376023610 0ustar00mockbuildmock00000000000000if PYTHON_SERVICE PYTHON_SERVICE = echo_python else PYTHON_SERVICE = endif SUBDIRS = $(PYTHON_SERVICE) DIST_SUBDIRS = echo_python nordugrid-arc-6.14.0/src/services/examples/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153435023635 xustar000000000000000030 mtime=1638455069.122107503 30 atime=1638455091.120438038 30 ctime=1638455100.789583322 nordugrid-arc-6.14.0/src/services/examples/Makefile.in0000644000175000002070000006124114152153435023626 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/examples DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ @PYTHON_SERVICE_FALSE@PYTHON_SERVICE = @PYTHON_SERVICE_TRUE@PYTHON_SERVICE = echo_python SUBDIRS = $(PYTHON_SERVICE) DIST_SUBDIRS = echo_python all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/examples/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/examples/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile installdirs: installdirs-recursive installdirs-am: install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool cscopelist-am ctags \ ctags-am distclean distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ installdirs-am maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags tags-am uninstall uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/examples/PaxHeaders.30264/echo_python0000644000000000000000000000013214152153474024035 xustar000000000000000030 mtime=1638455100.813583682 30 atime=1638455103.999631554 30 ctime=1638455100.813583682 nordugrid-arc-6.14.0/src/services/examples/echo_python/0000755000175000002070000000000014152153474024077 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/examples/echo_python/PaxHeaders.30264/Makefile.am0000644000000000000000000000013114152153376026146 xustar000000000000000029 mtime=1638455038.43264638 30 atime=1638455038.510647552 30 ctime=1638455100.811583652 nordugrid-arc-6.14.0/src/services/examples/echo_python/Makefile.am0000644000175000002070000000022614152153376026134 0ustar00mockbuildmock00000000000000exampledir = $(pkgdatadir)/examples/echo_python example_DATA = README EchoService.py __init__.py schema/echo_python.xsd EXTRA_DIST = $(example_DATA) nordugrid-arc-6.14.0/src/services/examples/echo_python/PaxHeaders.30264/EchoService.py0000644000000000000000000000013114152153376026663 xustar000000000000000029 mtime=1638455038.43264638 30 atime=1638455038.510647552 30 ctime=1638455100.813583682 nordugrid-arc-6.14.0/src/services/examples/echo_python/EchoService.py0000644000175000002070000002155414152153376026660 0ustar00mockbuildmock00000000000000import arc import time logger = arc.Logger(arc.Logger_getRootLogger(), 'EchoService.py') wsrf_rp_ns = "http://docs.oasis-open.org/wsrf/rp-2" echo_ns = "http://www.nordugrid.org/schemas/echo" import threading class EchoService(object): def __init__(self, cfg): logger.msg(arc.INFO, "EchoService (python) constructor called") # get the response-prefix from the config XML self.prefix = str(cfg.Get('prefix')) # get the response-suffix from the config XML self.suffix = str(cfg.Get('suffix')) logger.msg(arc.DEBUG, "EchoService (python) has prefix %(prefix)s and suffix %(suffix)s" % {'prefix': self.prefix, 'suffix': self.suffix}) self.ssl_config = self.parse_ssl_config(cfg) thread_test = str(cfg.Get('ThreadTest')) if thread_test: threading.Thread(target = self.infinite, args=[thread_test]).start() def __del__(self): logger.msg(arc.INFO, "EchoService (python) destructor called") def parse_ssl_config(self, cfg): try: client_ssl_node = cfg.Get('ClientSSLConfig') fromFile = str(client_ssl_node.Attribute('FromFile')) if fromFile: try: xml_string = file(fromFile).read() client_ssl_node = arc.XMLNode(xml_string) except: log.msg() pass if client_ssl_node.Size() == 0: return {} ssl_config = {} ssl_config['key_file'] = str(client_ssl_node.Get('KeyPath')) ssl_config['cert_file'] = str(client_ssl_node.Get('CertificatePath')) ca_file = str(client_ssl_node.Get('CACertificatePath')) if ca_file: ssl_config['ca_file'] = ca_file else: ssl_config['ca_dir'] = str(client_ssl_node.Get('CACertificatesDir')) return ssl_config except: import traceback logger.msg(arc.ERROR, traceback.format_exc()) return {} def infinite(self, url): logger.msg(arc.INFO, "EchoService (python) thread test starting") i = 0 while True: try: i += 1 cfg = arc.MCCConfig() s = arc.ClientSOAP(cfg, arc.URL(url)) ns = arc.NS('echo', echo_ns) outpayload = arc.PayloadSOAP(ns) outpayload.NewChild('echo:echo').NewChild('echo:say').Set('hi!') resp, status = s.process(outpayload) logger.msg(arc.INFO, "EchoService (python) thread test, iteration %(iteration)s %(status)s" % {'iteration': i, 'status': status}) time.sleep(3) except Exception as e: import traceback logger.msg(arc.DEBUG, traceback.format_exc()) def GetLocalInformation(self): ns = arc.NS({'':'http://schemas.ogf.org/glue/2008/05/spec_2.0_d41_r01'}) info = arc.XMLNode(ns,'Domains') service_node = info.NewChild('AdminDomain').NewChild('Services').NewChild('Service') service_node.NewChild('Type').Set('org.nordugrid.tests.echo_python') endpoint_node = service_node.NewChild('Endpoint') endpoint_node.NewChild('HealthState').Set('ok') endpoint_node.NewChild('ServingState').Set('production') return info def process(self, inmsg, outmsg): logger.msg(arc.DEBUG, "EchoService (python) 'Process' called") # time.sleep(10) # get the payload from the message inpayload = inmsg.Payload() logger.msg(arc.VERBOSE, 'inmsg.Auth().Export(arc.SecAttr.ARCAuth) = %s' % inmsg.Auth().Export(arc.SecAttr.ARCAuth).GetXML()) logger.msg(arc.VERBOSE, 'inmsg.Attributes().getAll() = %s ' % inmsg.Attributes().getAll()) logger.msg(arc.INFO, "EchoService (python) got: %s " % inpayload.GetXML()) # the first child of the payload should be the name of the request request_node = inpayload.Child() # get the namespace request_namespace = request_node.Namespace() logger.msg(arc.DEBUG, "EchoService (python) request_namespace: %s" % request_namespace) if request_namespace != echo_ns: if request_namespace == wsrf_rp_ns: outpayload = arc.PayloadSOAP(arc.NS({'wsrf-rp':wsrf_rp_ns})) outpayload.NewChild('wsrf-rp:GetResourcePropertyDocumentResponse').NewChild(self.GetLocalInformation()) outmsg.Payload(outpayload) logger.msg(arc.DEBUG, "outpayload %s" % outpayload.GetXML()) return arc.MCC_Status(arc.STATUS_OK) raise Exception('wrong namespace. expected: %s' % echo_ns) # get the name of the request without the namespace prefix # this is the name of the Body node's first child request_name = request_node.Name() # create an answer payload ns = arc.NS({'echo': echo_ns}) outpayload = arc.PayloadSOAP(ns) # here we defined that 'echo' prefix will be the namespace prefix of 'http://www.nordugrid.org/schemas/echo' # get the message say = str(request_node.Get('say')) # put it between the response-prefix and the response-suffix hear = self.prefix + say + self.suffix if request_name == 'double': # if the name of the request is 'double' # we create a new echo message which we send to http://localhost:60000/Echo using the ClientSOAP object cfg = arc.MCCConfig() ssl = False if self.ssl_config: cfg.AddCertificate(self.ssl_config.get('cert_file', None)) cfg.AddPrivateKey(self.ssl_config.get('key_file', None)) if 'ca_file' in self.ssl_config: cfg.AddCAFile(self.ssl_config.get('ca_file', None)) else: cfg.AddCADir(self.ssl_config.get('ca_dir', None)) ssl = True if ssl: url = arc.URL('https://localhost:60000/Echo') logger.msg(arc.DEBUG, 'Calling https://localhost:60000/Echo using ClientSOAP') else: url = arc.URL('http://localhost:60000/Echo') logger.msg(arc.DEBUG, 'Calling http://localhost:60000/Echo using ClientSOAP') # creating the ClientSOAP object s = arc.ClientSOAP(cfg, url) new_payload = arc.PayloadSOAP(ns) # creating the message new_payload.NewChild('echo:echo').NewChild('echo:say').Set(hear) logger.msg(arc.DEBUG, 'new_payload %s' % new_payload.GetXML()) # sending the message resp, status = s.process(new_payload) # get the response hear = str(resp.Get('echoResponse').Get('hear')) elif request_name == 'httplib': # if the name of the request is 'httplib' # we create a new echo message which we send to http://localhost:60000/echo using python's built-in http client try: import http.client as httplib except ImportError: import httplib logger.msg(arc.DEBUG, 'Calling http://localhost:60000/Echo using httplib') # create the connection h = httplib.HTTPConnection('localhost', 60000) new_payload = arc.PayloadSOAP(ns) # create the message new_payload.NewChild('echo:echo').NewChild('echo:say').Set(hear) logger.msg(arc.DEBUG, 'new_payload %s' % new_payload.GetXML()) # send the message h.request('POST', '/Echo', new_payload.GetXML()) r = h.getresponse() response = r.read() logger.msg(arc.DEBUG, response) resp = arc.XMLNode(response) # get the response hear = str(resp.Child().Get('echoResponse').Get('hear')) elif request_name == 'wait': logger.msg(arc.DEBUG, 'Start waiting 10 sec...') time.sleep(10) logger.msg(arc.DEBUG, 'Waiting ends.') # we create a node at '/echo:echoResponse/echo:hear' and put the string in it outpayload.NewChild('echo:echoResponse').NewChild('echo:hear').Set(hear) outmsg.Payload(outpayload) logger.msg(arc.DEBUG, "outpayload %s" % outpayload.GetXML()) # return with STATUS_OK return arc.MCC_Status(arc.STATUS_OK) # you can easily test this with this shellscript: """ MESSAGE='HELLO' echo Request: echo $MESSAGE echo echo Response: curl -d "$MESSAGE" http://localhost:60000/Echo echo """ # nordugrid-arc-6.14.0/src/services/examples/echo_python/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153435026154 xustar000000000000000030 mtime=1638455069.170108224 30 atime=1638455091.133438234 30 ctime=1638455100.810583637 nordugrid-arc-6.14.0/src/services/examples/echo_python/Makefile.in0000644000175000002070000005127714152153435026155 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/examples/echo_python DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(exampledir)" DATA = $(example_DATA) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ exampledir = $(pkgdatadir)/examples/echo_python example_DATA = README EchoService.py __init__.py schema/echo_python.xsd EXTRA_DIST = $(example_DATA) all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/examples/echo_python/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/examples/echo_python/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-exampleDATA: $(example_DATA) @$(NORMAL_INSTALL) @list='$(example_DATA)'; test -n "$(exampledir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(exampledir)'"; \ $(MKDIR_P) "$(DESTDIR)$(exampledir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(exampledir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(exampledir)" || exit $$?; \ done uninstall-exampleDATA: @$(NORMAL_UNINSTALL) @list='$(example_DATA)'; test -n "$(exampledir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(exampledir)'; $(am__uninstall_files_from_dir) tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(DATA) installdirs: for dir in "$(DESTDIR)$(exampledir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-exampleDATA install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-exampleDATA .MAKE: install-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ cscopelist-am ctags-am distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exampleDATA install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags-am uninstall \ uninstall-am uninstall-exampleDATA # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/examples/echo_python/PaxHeaders.30264/__init__.py0000644000000000000000000000013014152153376026222 xustar000000000000000029 mtime=1638455038.43264638 29 atime=1638455038.43264638 30 ctime=1638455100.813583682 nordugrid-arc-6.14.0/src/services/examples/echo_python/__init__.py0000644000175000002070000000000014152153376026177 0ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/examples/echo_python/PaxHeaders.30264/schema0000644000000000000000000000013214152153474025275 xustar000000000000000030 mtime=1638455100.814583697 30 atime=1638455103.999631554 30 ctime=1638455100.814583697 nordugrid-arc-6.14.0/src/services/examples/echo_python/schema/0000755000175000002070000000000014152153474025337 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/examples/echo_python/schema/PaxHeaders.30264/echo_python.xsd0000644000000000000000000000013214152153376030412 xustar000000000000000030 mtime=1638455038.433646395 30 atime=1638455038.510647552 30 ctime=1638455100.814583697 nordugrid-arc-6.14.0/src/services/examples/echo_python/schema/echo_python.xsd0000644000175000002070000000463514152153376030407 0ustar00mockbuildmock00000000000000 Prefix of the response string. Suffix of the response string. Defines the path of the XML file which contains TLS related clint configuration. If this attribute defined than the elements inside will be ignored. Container of TLS related client configuration Path of certificate private key Path of certificate Directory location of CA certificates nordugrid-arc-6.14.0/src/services/examples/echo_python/PaxHeaders.30264/README0000644000000000000000000000013114152153376024772 xustar000000000000000029 mtime=1638455038.43264638 30 atime=1638455038.510647552 30 ctime=1638455100.812583667 nordugrid-arc-6.14.0/src/services/examples/echo_python/README0000644000175000002070000000011214152153376024752 0ustar00mockbuildmock00000000000000Simple test service to demonstrate how Python based services should work. nordugrid-arc-6.14.0/src/services/PaxHeaders.30264/ldap-infosys0000644000000000000000000000013214152153474022310 xustar000000000000000030 mtime=1638455100.467578483 30 atime=1638455103.999631554 30 ctime=1638455100.467578483 nordugrid-arc-6.14.0/src/services/ldap-infosys/0000755000175000002070000000000014152153474022352 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/services/ldap-infosys/PaxHeaders.30264/Makefile.am0000644000000000000000000000013114152153376024421 xustar000000000000000029 mtime=1638455038.43664644 30 atime=1638455038.512647582 30 ctime=1638455100.463578423 nordugrid-arc-6.14.0/src/services/ldap-infosys/Makefile.am0000644000175000002070000000055314152153376024412 0ustar00mockbuildmock00000000000000if SYSV_SCRIPTS_ENABLED GRID_INFOSYS_SCRIPT = arc-infosys-ldap else GRID_INFOSYS_SCRIPT = endif initd_SCRIPTS = $(GRID_INFOSYS_SCRIPT) if SYSTEMD_UNITS_ENABLED GRID_INFOSYS_UNIT = arc-infosys-ldap.service arc-infosys-ldap-slapd.service else GRID_INFOSYS_UNIT = endif units_DATA = $(GRID_INFOSYS_UNIT) pkgdata_SCRIPTS = create-bdii-config create-slapd-config nordugrid-arc-6.14.0/src/services/ldap-infosys/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153435024427 xustar000000000000000030 mtime=1638455069.681115902 30 atime=1638455090.950435484 30 ctime=1638455100.462578408 nordugrid-arc-6.14.0/src/services/ldap-infosys/Makefile.in0000644000175000002070000006214614152153435024425 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/services/ldap-infosys DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(srcdir)/create-bdii-config.in \ $(srcdir)/create-slapd-config.in $(srcdir)/arc-infosys-ldap.in \ $(srcdir)/arc-infosys-ldap.service.in \ $(srcdir)/arc-infosys-ldap-slapd.service.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = create-bdii-config create-slapd-config \ arc-infosys-ldap arc-infosys-ldap.service \ arc-infosys-ldap-slapd.service CONFIG_CLEAN_VPATH_FILES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(initddir)" "$(DESTDIR)$(pkgdatadir)" \ "$(DESTDIR)$(unitsdir)" SCRIPTS = $(initd_SCRIPTS) $(pkgdata_SCRIPTS) AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac DATA = $(units_DATA) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ @SYSV_SCRIPTS_ENABLED_FALSE@GRID_INFOSYS_SCRIPT = @SYSV_SCRIPTS_ENABLED_TRUE@GRID_INFOSYS_SCRIPT = arc-infosys-ldap initd_SCRIPTS = $(GRID_INFOSYS_SCRIPT) @SYSTEMD_UNITS_ENABLED_FALSE@GRID_INFOSYS_UNIT = @SYSTEMD_UNITS_ENABLED_TRUE@GRID_INFOSYS_UNIT = arc-infosys-ldap.service arc-infosys-ldap-slapd.service units_DATA = $(GRID_INFOSYS_UNIT) pkgdata_SCRIPTS = create-bdii-config create-slapd-config all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/services/ldap-infosys/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/services/ldap-infosys/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): create-bdii-config: $(top_builddir)/config.status $(srcdir)/create-bdii-config.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ create-slapd-config: $(top_builddir)/config.status $(srcdir)/create-slapd-config.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arc-infosys-ldap: $(top_builddir)/config.status $(srcdir)/arc-infosys-ldap.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arc-infosys-ldap.service: $(top_builddir)/config.status $(srcdir)/arc-infosys-ldap.service.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arc-infosys-ldap-slapd.service: $(top_builddir)/config.status $(srcdir)/arc-infosys-ldap-slapd.service.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ install-initdSCRIPTS: $(initd_SCRIPTS) @$(NORMAL_INSTALL) @list='$(initd_SCRIPTS)'; test -n "$(initddir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(initddir)'"; \ $(MKDIR_P) "$(DESTDIR)$(initddir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n' \ -e 'h;s|.*|.|' \ -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) { files[d] = files[d] " " $$1; \ if (++n[d] == $(am__install_max)) { \ print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ else { print "f", d "/" $$4, $$1 } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(initddir)$$dir'"; \ $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(initddir)$$dir" || exit $$?; \ } \ ; done uninstall-initdSCRIPTS: @$(NORMAL_UNINSTALL) @list='$(initd_SCRIPTS)'; test -n "$(initddir)" || exit 0; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 's,.*/,,;$(transform)'`; \ dir='$(DESTDIR)$(initddir)'; $(am__uninstall_files_from_dir) install-pkgdataSCRIPTS: $(pkgdata_SCRIPTS) @$(NORMAL_INSTALL) @list='$(pkgdata_SCRIPTS)'; test -n "$(pkgdatadir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(pkgdatadir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pkgdatadir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n' \ -e 'h;s|.*|.|' \ -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) { files[d] = files[d] " " $$1; \ if (++n[d] == $(am__install_max)) { \ print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ else { print "f", d "/" $$4, $$1 } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(pkgdatadir)$$dir'"; \ $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(pkgdatadir)$$dir" || exit $$?; \ } \ ; done uninstall-pkgdataSCRIPTS: @$(NORMAL_UNINSTALL) @list='$(pkgdata_SCRIPTS)'; test -n "$(pkgdatadir)" || exit 0; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 's,.*/,,;$(transform)'`; \ dir='$(DESTDIR)$(pkgdatadir)'; $(am__uninstall_files_from_dir) mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-unitsDATA: $(units_DATA) @$(NORMAL_INSTALL) @list='$(units_DATA)'; test -n "$(unitsdir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(unitsdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(unitsdir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(unitsdir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(unitsdir)" || exit $$?; \ done uninstall-unitsDATA: @$(NORMAL_UNINSTALL) @list='$(units_DATA)'; test -n "$(unitsdir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(unitsdir)'; $(am__uninstall_files_from_dir) tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(SCRIPTS) $(DATA) installdirs: for dir in "$(DESTDIR)$(initddir)" "$(DESTDIR)$(pkgdatadir)" "$(DESTDIR)$(unitsdir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-initdSCRIPTS install-pkgdataSCRIPTS \ install-unitsDATA install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-initdSCRIPTS uninstall-pkgdataSCRIPTS \ uninstall-unitsDATA .MAKE: install-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ cscopelist-am ctags-am distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am \ install-initdSCRIPTS install-man install-pdf install-pdf-am \ install-pkgdataSCRIPTS install-ps install-ps-am install-strip \ install-unitsDATA installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags-am uninstall uninstall-am uninstall-initdSCRIPTS \ uninstall-pkgdataSCRIPTS uninstall-unitsDATA # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/services/ldap-infosys/PaxHeaders.30264/create-bdii-config.in0000644000000000000000000000013214152153376026331 xustar000000000000000030 mtime=1638455038.437646455 30 atime=1638455038.512647582 30 ctime=1638455100.464578438 nordugrid-arc-6.14.0/src/services/ldap-infosys/create-bdii-config.in0000644000175000002070000003560214152153376026324 0ustar00mockbuildmock00000000000000#!/bin/bash # Define logging functions send_systemd_notify() { # return if no systemd-notify found type systemd-notify >/dev/null 2>&1 || return systemd-notify "$@" } log_failure_msg() { send_systemd_notify --status "Error: $@" echo $@ } # Create bdii config for the NorduGrid/ARC information system ARC_LOCATION=${ARC_LOCATION:-@prefix@} if [ ! -d "$ARC_LOCATION" ]; then log_failure_msg "ARC_LOCATION ($ARC_LOCATION) not found" exit 1 fi # ARC_CONFIG if [ "x$ARC_CONFIG" = "x" ]; then if [ -r $ARC_LOCATION/etc/arc.conf ]; then ARC_CONFIG=$ARC_LOCATION/etc/arc.conf elif [ -r /etc/arc.conf ]; then ARC_CONFIG=/etc/arc.conf fi if [ ! -r "$ARC_CONFIG" ]; then log_failure_msg "arc.conf is missing at path: $ARC_CONFIG or no ARC_LOCATION is set" log_failure_msg "If this file is in a non-standard place it can be set" log_failure_msg " with the ARC_CONFIG environment variable" exit 1 fi fi # Define runtime config location for infosys LDAP prefix=@prefix@ runtime_config_dir=/run/arc if [ ! -d "$runtime_config_dir" ]; then mkdir -p "$runtime_config_dir" fi export ARC_RUNCONFIG="$runtime_config_dir/arc-infosys-ldap.conf" unset runtime_config_dir unset prefix # Define arcconfig-parser and dump running configuration arcconfig_parser=${ARC_LOCATION}/@pkglibexecsubdir@/arcconfig-parser ${arcconfig_parser} -c ${ARC_CONFIG} --save -r ${ARC_RUNCONFIG} # Check for infosys block if ! ${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys; then log_failure_msg "Missing [infosys] configuration block" exit 1 fi # Check for infosys/ldap block if ! ${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys/ldap; then log_failure_msg "Missing [infosys/ldap] configuration block" exit 1 fi eval $(${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys/ldap -b infosys -b common -e bash) bdii_user=$CONFIG_user if [ -z "$bdii_user" ]; then # Get ldap user from passwd bdii_user=`getent passwd ldap openldap | sed 's/:.*//;q'` if [ -z "$bdii_user" ]; then echo "Warning, could not find ldap or openldap user" echo "resorting to using the root user" bdii_user=root fi fi # These values may be set in arc.conf, otherwise use sensible defaults providerlog=${CONFIG_logfile:-/var/log/arc/infoprovider.log} bdii_location=${CONFIG_bdii_location:-/usr} bdii_update_cmd=${CONFIG_bdii_update_cmd:-${bdii_location}/sbin/bdii-update} if [ ! -x $bdii_update_cmd ]; then log_failure_msg "Can not find bdii-update command at: $bdii_update_cmd." echo "Please set bdii_update_cmd in arc.conf" exit 1 fi infosys_ldap_run_dir=${CONFIG_infosys_ldap_run_dir:-/run/arc/infosys} mkdir -p ${infosys_ldap_run_dir} chown ${bdii_user}: ${infosys_ldap_run_dir} # Put BDII update helper to known directory helpers_dir=$infosys_ldap_run_dir if [ -n "$FORCE_ARC_RUNDIR" ]; then helpers_dir="${FORCE_ARC_RUNDIR}/infosys" mkdir -p "${helpers_dir}" fi bdii_update_exechelper_cmd=${helpers_dir}/bdii-update.cmd rm -f $bdii_update_exechelper_cmd bdii_update_posthelper_cmd=${helpers_dir}/bdii-update-post.cmd rm -f $bdii_update_posthelper_cmd bdii_debug_level=${CONFIG_bdii_debug_level:-WARNING} bdii_tmp_dir=${CONFIG_bdii_tmp_dir:-/var/tmp/arc/bdii} bdii_var_dir=${CONFIG_bdii_var_dir:-/var/lib/arc/bdii} bdii_run_dir=${CONFIG_bdii_run_dir:-/run/arc/bdii} bdii_log_dir=${CONFIG_bdii_log_dir:-/var/log/arc/bdii} bdii_log_file="${bdii_log_dir}/bdii-update.log" bdii_slapd_conf=${infosys_ldap_run_dir}/bdii-slapd.conf bdii_default_ldif=${bdii_tmp_dir}/provider/arc-default.ldif.pl bdii_ldif_dir=${bdii_tmp_dir}/ldif bdii_provider_dir=${bdii_tmp_dir}/provider bdii_plugin_dir=${bdii_tmp_dir}/plugin bdii_port=${CONFIG_port:-2135} # Using uppercase characters in bdii_bind will break infosys. bdii_bind="o=grid" # $bdii_provider_timeout refers to the time bdii waits for the provider output to complete. bdii_provider_timeout=${CONFIG_bdii_provider_timeout:-10800} # $infoproviders_timelimit is a-rex's infoproviders timeout. infoproviders_timelimit=$(${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b arex -o infoproviders_timelimit) infoproviders_timelimit=${infoproviders_timelimit:-10800} # $wakeupperiod is the time a-rex waits before running infoproviders again. wakeupperiod=$(${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b arex -o wakeupperiod) wakeupperiod=${wakeupperiod:-120} bdii_archive_size=${CONFIG_bdii_archive_size:-0} # The infoprovider does the waiting, no need for BDII to do it too. Use # some small timeout to protect the system in case there is a problem with # the provier bdii_breathe_time=${CONFIG_bdii_breathe_time:-10} # max_cycle is the time bdii will trust the content of any provider to be fresh enough max_cycle=$(( $bdii_provider_timeout + $infoproviders_timelimit + $wakeupperiod )) bdii_read_timeout=${CONFIG_bdii_read_timeout:-$max_cycle} bdii_delete_delay=${CONFIG_bdii_delete_delay:-0} # PIDFile location handling update_pid_file=$( readlink -m ${CONFIG_bdii_update_pid_file:-$bdii_run_dir/bdii-update.pid} ) # forced pidfile location instead of arc.conf-based (if FORCE_ARC_RUNDIR is set) if [ -n "$FORCE_ARC_RUNDIR" ]; then pid_dir="${FORCE_ARC_RUNDIR}/bdii" mkdir -p "$pid_dir" chown -R ${bdii_user}: "$pid_dir" pid_file="$( readlink -m ${pid_dir}/bdii-update.pid )" if [ "x${update_pid_file}" != "x${pid_file}" ]; then custom_pid_file="${update_pid_file}" rm -f "${custom_pid_file}" update_pid_file="${pid_file}" fi unset pid_dir pid_file fi rm -f "${update_pid_file}" # Debian does not have /run/lock/subsys if [ -d /run/lock/subsys ]; then update_lock_file=${update_lock_file:-/run/lock/subsys/arc-bdii-update} else update_lock_file=${update_lock_file:-/run/lock/arc-bdii-update} fi # Check directories and permissions mkdir -p `dirname $providerlog` touch ${providerlog} chown ${bdii_user}: ${providerlog} mkdir -p $bdii_log_dir chown -R ${bdii_user}: ${bdii_log_dir} if ${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys/nordugrid || \ ${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys/glue1 || \ ${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys/glue2/ldap; then if [ ! -f "$ARC_LOCATION/@pkgdatasubdir@/InfosysHelper.pm" ]; then log_failure_msg "InfosysHelper.pm not found. Is A-REX installed?" echo "For operation without A-REX, disable publishing of cluster information" echo "([infosys/nordugrid], [infosys/glue1] and [infosys/glue2/ldap])" exit 1 fi fi BDII_CONF=${CONFIG_bdii_conf:-${infosys_ldap_run_dir}/bdii.conf} resource_location="" resource_latitude="" resource_longitude="" cpuscalingreferencesi00="" processorotherdescription="" gluesiteweb="" gluesiteuniqueid="" if ${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys/glue1; then eval $(${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys/glue1 -e bash) resource_location=${CONFIG_resource_location} resource_latitude=${CONFIG_resource_latitude} resource_longitude=${CONFIG_resource_longitude} cpuscalingreferencesi00=${CONFIG_cpu_scaling_reference_si00} processorotherdescription=${CONFIG_processor_other_description} gluesiteweb=${CONFIG_glue_site_web} gluesiteuniqueid=${CONFIG_glue_site_unique_id} if ${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys/glue1/site-bdii ; then provide_glue_site_info="true" else provide_glue_site_info="false" fi if [ "x$resource_location" = "x" ]; then log_failure_msg "If [infosys/glue1] is enabled, then resource_location must be set." echo "It should be set to a free-form string describing the location," echo "for example: 'Kastrup, Denmark'" exit 1 fi if [[ "x$resource_location" =~ "/" ]]; then log_failure_msg "Wrong location format. Please do NOT use slashes / ." echo "It should be set to a free-form string describing the location," echo "for example: 'Kastrup, Denmark'" exit 1 fi if [ "x$resource_latitude" = "x" ]; then log_failure_msg "If [infosys/glue1] is enabled, then resource_latitude must be set." echo "It should be set to the latitude for the location," echo "for example: '55.75000'" exit 1 fi if [ "x$resource_longitude" = "x" ]; then log_failure_msg "If [infosys/glue1] is enabled, then resource_longitude must be set." echo "It should be set to the longitude for the location," echo "for example: '12.41670'" exit 1 fi if [ "x$cpuscalingreferencesi00" = "x" ]; then log_failure_msg "If [infosys/glue1] is enabled, then cpu_scaling_reference_si00 must be set." echo "It should be set to the SI00 value," echo "for example: '2400'" exit 1 fi if [ "x$processorotherdescription" = "x" ]; then log_failure_msg "If [infosys/glue1] is enabled, then processor_other_description must be set." echo "It should be set to a value like in the example,where cores is the average number" echo "of cores in the machine" echo "for example: 'Cores=3,Benchmark=9.8-HEP-SPEC06'" exit 1 fi if [ "x$gluesiteweb" = "x" ]; then log_failure_msg "If [infosys/glue1] is enabled, then glue_site_web must be set." echo "It should be set to a url for the website belonging to the institution holding the resource," echo "for example: 'http://www.ndgf.org'" exit 1 fi if [ "x$gluesiteuniqueid" = "x" ]; then log_failure_msg "If [infosys/glue1] is enabled, then glue_site_unique_id must be set." echo "It should be set to a unique id to the resource, this should be entered into the GocDb" echo "for example: 'NDGF-T1'" exit 1 fi fi # Create directories for storing temporary scripts and check permissions etc mkdir -p $bdii_var_dir mkdir -p $bdii_run_dir mkdir -p $bdii_tmp_dir mkdir -p $bdii_tmp_dir/ldif mkdir -p $bdii_tmp_dir/provider mkdir -p $bdii_tmp_dir/plugin # change permissions if user is not root chown -R ${bdii_user}: ${bdii_var_dir} chown -R ${bdii_user}: ${bdii_run_dir} chown -R ${bdii_user}: ${bdii_tmp_dir} # Generate bdii configuration rm -f ${BDII_CONF} cat <<-EOF >> ${BDII_CONF} # This file was automatically generated by $0 # Do not modify BDII_LOG_FILE=$bdii_log_file BDII_PID_FILE=$update_pid_file BDII_LOG_LEVEL=$bdii_debug_level BDII_LDIF_DIR=$bdii_ldif_dir BDII_PROVIDER_DIR=$bdii_provider_dir BDII_PLUGIN_DIR=$bdii_plugin_dir BDII_PORT=$bdii_port BDII_BREATHE_TIME=$bdii_breathe_time BDII_READ_TIMEOUT=$bdii_read_timeout BDII_ARCHIVE_SIZE=$bdii_archive_size BDII_DELETE_DELAY=$bdii_delete_delay BDII_USER=$bdii_user BDII_VAR_DIR=$bdii_var_dir BDII_RUN_DIR=$bdii_run_dir BDII_BIND=$bdii_bind SLAPD_CONF=$bdii_slapd_conf EOF # Generate default ldif cat <<-EOF > $bdii_default_ldif #!/usr/bin/perl # This file was automatically generated by $0 # Do not modify use POSIX; print "\n"; print "dn: o=grid\n"; print "objectClass: organization\n"; print "o: grid\n"; print "\n"; print "dn: Mds-Vo-name=local,o=grid\n"; print "objectClass: Mds\n"; print "Mds-Vo-name: local\n"; print "Mds-validfrom: " . strftime("%Y%m%d%H%M%SZ\n", gmtime()); print "Mds-validto: " . strftime("%Y%m%d%H%M%SZ\n", gmtime(time() + 3600)); print "\n"; print "dn: Mds-Vo-name=resource,o=grid\n"; print "objectClass: Mds\n"; print "Mds-Vo-name: resource\n"; print "Mds-validfrom: " . strftime("%Y%m%d%H%M%SZ\n", gmtime()); print "Mds-validto: " . strftime("%Y%m%d%H%M%SZ\n", gmtime(time() + 3600)); print "\n"; print "dn: o=glue\n"; print "objectClass: organization\n"; print "o: glue\n"; EOF chmod +x $bdii_default_ldif # Create ARC ldif generator file ldif_generator_file=${bdii_tmp_dir}/provider/arc-nordugrid-bdii-ldif rm -f ${ldif_generator_file} touch ${ldif_generator_file} ldif_glue1_generator=${infosys_ldap_run_dir}/arc-glue-bdii-ldif ldif_script=${infosys_ldap_run_dir}/ldif-provider.sh cat <<-EOF > ${ldif_generator_file} #!/usr/bin/perl # This file was automatically generated by the $0 # Do not modify EOF # NG and GLUE2 come directly from a-rex infoprovider cat <<-EOF >> ${ldif_generator_file} BEGIN { unshift @INC, '$ARC_LOCATION/@pkgdatasubdir@'; } use InfosysHelper; exit 1 unless InfosysHelper::ldifIsReady('$infosys_ldap_run_dir', '$max_cycle'); EOF if ${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys/nordugrid || \ ${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys/glue2/ldap; then echo "system('$ldif_script');" >> ${ldif_generator_file} fi if ${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys/glue1; then ldif_generator_file_ng=${bdii_tmp_dir}/provider/arc-nordugrid-bdii-ldif ldif_generator_file_glue=${bdii_tmp_dir}/provider/arc-glue-bdii-ldif rm -f ${ldif_generator_file_glue} touch ${ldif_generator_file_glue} # We use , instead of / here to allow for / in path # resource_location though, can contain commas.. sed "s,\$LDIF_GENERATOR_FILE_NG,$ldif_generator_file_ng,g; s/\$LOC/\"$resource_location\"/g; s/\$LAT/$resource_latitude/g; s/\$LONG/$resource_longitude/g; s/\$CPUSCALINGREFERENCESI00/$cpuscalingreferencesi00/g; s/\$PROCESSOROTHERDESCRIPTION/$processorotherdescription/g; s,\$GLUESITEWEB,$gluesiteweb,g; s,\$BDIIPORT,$bdii_port,g; s,\$GLUESITEUNIQUEID,$gluesiteuniqueid,g; s,\$PROVIDE_GLUE_SITE_INFO,$provide_glue_site_info,g; " $ARC_LOCATION/@pkgdatasubdir@/glue-generator.pl > ${ldif_generator_file_glue} chmod +x ${ldif_generator_file_glue} echo "system('$ldif_glue1_generator');" >> ${ldif_generator_file} fi chmod +x ${ldif_generator_file} # Site BDII if [ "x$provide_glue_site_info" == "xtrue" ]; then eval $(${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys/glue1/site-bdii -e bash) unique_id=${CONFIG_unique_id} url=${CONFIG_url} if [ -z "$unique_id" -o -z "$url" ]; then log_failure_msg "Mandaroty options 'unique_id' and 'url' should be configured for Site-BDII." exit 1 fi site_config="${bdii_tmp_dir}/${unique_id}.conf" site_provider="$bdii_provider_dir/site_${unique_id}.sh" echo "$unique_id $url" > "$site_config" # Create script and make glite provider use arc directories cat <<-EOF > $site_provider #!/bin/sh export GLITE_LOCATION_VAR=${bdii_tmp_dir} $ARC_LOCATION/@pkgdatasubdir@/glite-info-provider-ldap -m "$unique_id" -c $site_config EOF chmod +x $site_provider fi # Helper script to start BDII Update switching to BDII user if [ -x /sbin/runuser ]; then RUNUSER=runuser else RUNUSER=su fi USERSHELL=${USERSHELL:-"/bin/sh"} if [ ! -x ${USERSHELL} ]; then log_failure_msg "Could not find ${USERSHELL}" exit 1 fi cat <<-EOF > ${bdii_update_exechelper_cmd} if [ \$( id -u ) = 0 ]; then exec $RUNUSER -s "$USERSHELL" -c "${bdii_update_cmd} -c ${BDII_CONF} -d" ${bdii_user} else exec ${bdii_update_cmd} -c ${BDII_CONF} -d fi EOF cat <<-EOF > ${bdii_update_posthelper_cmd} iterlimit=30 while [ \$iterlimit -ge 0 ] && ! [ -r "${update_pid_file}" ]; do sleep 1 iterlimit=\$(expr \$iterlimit - 1) done EOF # copy forced pidfile to custom arc.conf pidfile (if needed) if [ -n "${custom_pid_file}" ]; then echo "mkdir -p \"${custom_pid_file%/*}\"" >> ${bdii_update_posthelper_cmd} echo "cp -a \"${update_pid_file}\" \"${custom_pid_file}\"" >> ${bdii_update_posthelper_cmd} fi nordugrid-arc-6.14.0/src/services/ldap-infosys/PaxHeaders.30264/arc-infosys-ldap.service.in0000644000000000000000000000013214152153376027530 xustar000000000000000030 mtime=1638455038.437646455 30 atime=1638455038.512647582 30 ctime=1638455100.466578468 nordugrid-arc-6.14.0/src/services/ldap-infosys/arc-infosys-ldap.service.in0000644000175000002070000000077414152153376027525 0ustar00mockbuildmock00000000000000[Unit] Description=ARC LDAP-based information services - BDII-Update Requires=arc-infosys-ldap-slapd.service After=arc-infosys-ldap-slapd.service BindsTo=arc-infosys-ldap-slapd.service [Service] Type=forking Environment=FORCE_ARC_RUNDIR=/run/arc PIDFile=/run/arc/bdii/bdii-update.pid ExecStartPre=@prefix@/@pkgdatasubdir@/create-bdii-config ExecStart=/bin/sh /run/arc/infosys/bdii-update.cmd ExecStartPost=/bin/sh /run/arc/infosys/bdii-update-post.cmd NotifyAccess=all [Install] WantedBy=multi-user.target nordugrid-arc-6.14.0/src/services/ldap-infosys/PaxHeaders.30264/arc-infosys-ldap-slapd.service.in0000644000000000000000000000013214152153376030631 xustar000000000000000030 mtime=1638455038.437646455 30 atime=1638455038.512647582 30 ctime=1638455100.467578483 nordugrid-arc-6.14.0/src/services/ldap-infosys/arc-infosys-ldap-slapd.service.in0000644000175000002070000000070714152153376030622 0ustar00mockbuildmock00000000000000[Unit] Description=ARC LDAP-based information services - SLAPD After=syslog.target network.target network-online.target PartOf=arc-infosys-ldap.service StopWhenUnneeded=true [Service] Type=forking Environment=FORCE_ARC_RUNDIR=/run/arc PIDFile=/run/arc/bdii/db/slapd.pid ExecStartPre=@prefix@/@pkgdatasubdir@/create-slapd-config ExecStart=/bin/sh /run/arc/infosys/bdii-slapd.cmd ExecStartPost=/bin/sh /run/arc/infosys/bdii-slapd-post.cmd NotifyAccess=all nordugrid-arc-6.14.0/src/services/ldap-infosys/PaxHeaders.30264/arc-infosys-ldap.in0000644000000000000000000000013214152153376026071 xustar000000000000000030 mtime=1638455038.437646455 30 atime=1638455038.512647582 30 ctime=1638455100.466578468 nordugrid-arc-6.14.0/src/services/ldap-infosys/arc-infosys-ldap.in0000644000175000002070000002210414152153376026055 0ustar00mockbuildmock00000000000000#!/bin/bash # # Init file for the NorduGrid/ARC LDAP based local resource information system # # chkconfig: 2345 76 24 # description: NorduGrid/ARC local resource information system # # config: /etc/sysconfig/nordugrid # config: /etc/sysconfig/arc-infosys-ldap # config: /etc/arc.conf # ###################################################################### ### BEGIN INIT INFO # Provides: arc-infosys-ldap # Required-Start: $remote_fs $syslog # Required-Stop: $remote_fs $syslog # Default-Start: 2 3 4 5 # Default-Stop: 0 1 6 # Short-Description: NorduGrid/ARC local resource information system # Description: NorduGrid/ARC LDAP based local resource information system ### END INIT INFO # Helper functions if [ -r /etc/init.d/functions ]; then . /etc/init.d/functions log_success_msg() { echo -n "$@" success "$@" echo } log_warning_msg() { echo -n "$@" warning "$@" echo } log_failure_msg() { echo -n "$@" failure "$@" echo } elif [ -r /lib/lsb/init-functions ]; then . /lib/lsb/init-functions else echo "Error: Cannot source neither init.d nor lsb functions" exit 1 fi RETVAL=0 prog=arc-infosys-ldap RUN=yes # sysconfig files if [ -r /etc/sysconfig/nordugrid ]; then . /etc/sysconfig/nordugrid elif [ -r /etc/default/nordugrid ]; then . /etc/default/nordugrid fi if [ -r /etc/sysconfig/$prog ]; then . /etc/sysconfig/$prog elif [ -r /etc/default/$prog ]; then . /etc/default/$prog fi if [ "x$RUN" != "xyes" ]; then log_warning_msg "$prog disabled, please adjust the configuration to your" log_warning_msg "needs and then set RUN to 'yes' in /etc/default/$prog to enable it." exit 0 fi [ -n "$ARC_LOCATION" ] && export ARC_LOCATION [ -n "$ARC_CONFIG" ] && export ARC_CONFIG ARC_LOCATION=${ARC_LOCATION:-@prefix@} if [ ! -d "$ARC_LOCATION" ]; then log_failure_msg "ARC_LOCATION ($ARC_LOCATION) not found" exit 1 fi # Define arc.conf location # ARC_CONFIG if [ "x$ARC_CONFIG" = "x" ]; then if [ -r $ARC_LOCATION/etc/arc.conf ]; then ARC_CONFIG=$ARC_LOCATION/etc/arc.conf elif [ -r /etc/arc.conf ]; then ARC_CONFIG=/etc/arc.conf fi if [ ! -r "$ARC_CONFIG" ]; then log_failure_msg "arc.conf is missing at path: $ARC_CONFIG or no ARC_LOCATION is set" log_failure_msg "If this file is in a non-standard place it can be set" log_failure_msg " with the ARC_CONFIG environment variable" exit 1 fi fi # Define runtime config location for infosys LDAP prefix=@prefix@ runtime_config_dir=/run/arc if [ ! -d "$runtime_config_dir" ]; then mkdir -p "$runtime_config_dir" fi export ARC_RUNCONFIG="$runtime_config_dir/arc-infosys-ldap.conf" unset runtime_config_dir unset prefix # Define arcconfig-parser and dump running configuration arcconfig_parser=${ARC_LOCATION}/@pkglibexecsubdir@/arcconfig-parser ${arcconfig_parser} -c ${ARC_CONFIG} --save -r ${ARC_RUNCONFIG} # Check for infosys block if ! ${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys; then log_failure_msg "Missing [infosys] configuration block" exit 1 fi # Check for infosys/ldap block if ! ${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys/ldap; then log_failure_msg "Missing [infosys/ldap] configuration block" exit 1 fi eval $(${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys/ldap -b infosys -b common -e bash) bdii_user=$CONFIG_user if [ -z "$bdii_user" ]; then # Get ldap user from passwd bdii_user=`getent passwd ldap openldap | sed 's/:.*//;q'` if [ -z "$bdii_user" ]; then log_warning_msg "Warning, could not find ldap or openldap user" log_warning_msg "resorting to using the root user" bdii_user=root fi fi bdii_location=${CONFIG_bdii_location:-/usr} bdii_update_cmd=${CONFIG_bdii_update_cmd:-${bdii_location}/sbin/bdii-update} if [ ! -x $bdii_update_cmd ]; then log_failure_msg "Can not find bdii-update command at: $bdii_update_cmd." log_failure_msg "Please set bdii_update_cmd in arc.conf" exit 1 fi bdii_run_dir=${CONFIG_bdii_run_dir:-/run/arc/bdii} slapd_pid_file=${bdii_run_dir}/db/slapd.pid update_pid_file=${CONFIG_bdii_update_pid_file:-$bdii_run_dir/bdii-update.pid} if [ `id -u` = 0 ]; then # Debian does not have /run/lock/subsys if [ -d /run/lock/subsys ]; then slapd_lock_file=${slapd_lock_file:-/run/lock/subsys/$prog-slapd} update_lock_file=${update_lock_file:-/run/lock/subsys/$prog-bdii} else slapd_lock_file=${slapd_lock_file:-/run/lock/$prog-slapd} update_lock_file=${update_lock_file:-/run/lock/$prog-bdii} fi else slapd_lock_file=$HOME/$prog-slapd update_lock_file=$HOME/$prog-bdii fi infosys_ldap_run_dir=${CONFIG_infosys_ldap_run_dir:-/run/arc/infosys} BDII_CONF=${CONFIG_bdii_conf:-${infosys_ldap_run_dir}/bdii.conf} start () { if [ -r "${slapd_lock_file}" ] || [ -r "${update_lock_file}" ]; then result=$($0 status) if [ $? -gt 0 ]; then echo ${result} 1>&2 RETVAL=1 else log_success_msg "$prog already started" RETVAL=0 fi return ${RETVAL} fi ${ARC_LOCATION}/@pkgdatasubdir@/create-slapd-config if [ ! $? = 0 ]; then log_failure_msg "Failed to create configuration for $prog slapd" exit 1 fi ${ARC_LOCATION}/@pkgdatasubdir@/create-bdii-config if [ ! $? = 0 ]; then log_failure_msg "Failed to create configuration for $prog bdii" exit 1 fi # path to generated helper scripts is hardcoded (in both systemd unit and sysV scripts) /bin/sh ${infosys_ldap_run_dir}/bdii-slapd.cmd touch ${slapd_lock_file} /bin/sh ${infosys_ldap_run_dir}/bdii-slapd-post.cmd if ! [ -r "${slapd_pid_file}" ]; then log_failure_msg "$prog slapd failed to start" rm -f ${slapd_lock_file} RETVAL=1 return ${RETVAL} fi /bin/sh ${infosys_ldap_run_dir}/bdii-update.cmd touch ${update_lock_file} /bin/sh ${infosys_ldap_run_dir}/bdii-update-post.cmd if [ ! -r ${update_pid_file} ]; then log_failure_msg "$prog bdii failed to start" rm -f ${update_lock_file} RETVAL=1 return ${RETVAL} fi log_success_msg "$prog started" } stop () { if [ ! -r "${slapd_lock_file}" ] && [ ! -r "${update_lock_file}" ]; then log_success_msg "$prog already stopped" RETVAL=0 return ${RETVAL} fi if [ -r "${update_pid_file}" ]; then update_pid=$(cat ${update_pid_file}) ps ${update_pid} >/dev/null 2>&1 if [ ! $? = 0 ]; then log_failure_msg "$prog bdii pid file exists but the process died" RETVAL=1 return ${RETVAL} fi fi killall -u ${bdii_user} -15 arc-nordugrid-bdii-ldif 2>/dev/null if [ -n "${update_pid}" ]; then kill -15 ${update_pid} 2>/dev/null ps ${update_pid} >/dev/null 2>&1 if [ $? = 0 ]; then sleep 2 ps ${update_pid} >/dev/null 2>&1 if [ $? = 0 ]; then kill -9 ${update_pid} 2>/dev/null sleep 2 ps ${update_pid} >/dev/null 2>&1 if [ $? = 0 ]; then log_failure_msg "Could not kill $prog bdii with pid ${update_pid}" RETVAL=1 return ${RETVAL} fi fi fi fi # Clean up rm -f ${infosys_ldap_run_dir}/arc-glue-bdii-ldif rm -f ${update_pid_file} rm -f ${update_lock_file} log_success_msg "$prog bdii stopped" if [ -r "${slapd_pid_file}" ]; then slapd_pid=$(cat ${slapd_pid_file}) ps ${slapd_pid} >/dev/null 2>&1 if [ ! $? = 0 ]; then log_failure_msg "$prog slapd pid file exists but the process died" RETVAL=1 return ${RETVAL} fi fi if [ -n "${slapd_pid}" ]; then kill -15 ${slapd_pid} 2>/dev/null ps ${slapd_pid} >/dev/null 2>&1 if [ $? = 0 ]; then sleep 2 ps ${slapd_pid} >/dev/null 2>&1 if [ $? = 0 ]; then kill -9 ${slapd_pid} 2>/dev/null sleep 2 ps ${slapd_pid} >/dev/null 2>&1 if [ $? = 0 ]; then log_failure_msg "Could not stop $prog slapd with pid: $slapd_pid" RETVAL=1 return ${RETVAL} fi fi fi fi rm -f ${slapd_pid_file} rm -f ${slapd_lock_file} log_success_msg "$prog slapd stopped" return ${RETVAL} } status () { if [ ! -r "${slapd_lock_file}" ] && [ ! -r "${update_lock_file}" ]; then log_success_msg "$prog is stopped" RETVAL=3 return ${RETVAL} fi if [ -r ${slapd_pid_file} ]; then ps $(cat ${slapd_pid_file}) >/dev/null 2>&1 if [ ! $? = 0 ]; then log_failure_msg "ARIS slapd pid file exists but the process died" RETVAL=1 fi else log_failure_msg "ARIS slapd process has no pid file" RETVAL=2 fi if [ -r ${update_pid_file} ]; then ps $(cat ${update_pid_file}) >/dev/null 2>&1 if [ ! $? = 0 ]; then log_failure_msg "ARIS bdii pid file exists but the process died" RETVAL=1 fi else log_failure_msg "ARIS BDII process has no pid file" RETVAL=2 fi if [ ${RETVAL} = 0 ]; then log_success_msg "$prog is running" fi return ${RETVAL} } case "$1" in start) start ;; stop) stop ;; restart | force-reload) stop # avoid race sleep 3 start ;; reload) ;; status) status ;; condrestart | try-restart) if [ -r ${slapd_lock_file} ] || [ -r ${update_lock_file} ]; then stop # avoid race sleep 3 start fi ;; *) echo "Usage: $0 {start|stop|restart|force-reload|reload|condrestart|try-restart|status}" exit 1 ;; esac exit $RETVAL nordugrid-arc-6.14.0/src/services/ldap-infosys/PaxHeaders.30264/create-slapd-config.in0000644000000000000000000000013214152153376026525 xustar000000000000000030 mtime=1638455038.437646455 30 atime=1638455038.512647582 30 ctime=1638455100.465578453 nordugrid-arc-6.14.0/src/services/ldap-infosys/create-slapd-config.in0000644000175000002070000003332014152153376026513 0ustar00mockbuildmock00000000000000#!/bin/bash # Define logging functions send_systemd_notify() { # return if no systemd-notify found type systemd-notify >/dev/null 2>&1 || return systemd-notify "$@" } log_failure_msg() { send_systemd_notify --status "Error: $@" echo $@ } # Create slapd config for the NorduGrid/ARC information system ARC_LOCATION=${ARC_LOCATION:-@prefix@} if [ ! -d "$ARC_LOCATION" ]; then log_failure_msg "ARC_LOCATION ($ARC_LOCATION) not found" exit 1 fi # ARC_CONFIG if [ "x$ARC_CONFIG" = "x" ]; then if [ -r $ARC_LOCATION/etc/arc.conf ]; then ARC_CONFIG=$ARC_LOCATION/etc/arc.conf elif [ -r /etc/arc.conf ]; then ARC_CONFIG=/etc/arc.conf fi if [ ! -r "$ARC_CONFIG" ]; then log_failure_msg "arc.conf is missing at path: $ARC_CONFIG or no ARC_LOCATION is set" log_failure_msg "If this file is in a non-standard place it can be set" log_failure_msg " with the ARC_CONFIG environment variable" exit 1 fi fi # Define runtime config location for infosys LDAP prefix=@prefix@ runtime_config_dir=/run/arc if [ ! -d "$runtime_config_dir" ]; then mkdir -p "$runtime_config_dir" fi export ARC_RUNCONFIG="$runtime_config_dir/arc-infosys-ldap.conf" unset runtime_config_dir unset prefix # Define arcconfig-parser and dump running configuration arcconfig_parser=${ARC_LOCATION}/@pkglibexecsubdir@/arcconfig-parser ${arcconfig_parser} -c ${ARC_CONFIG} --save -r ${ARC_RUNCONFIG} # Check for infosys block if ! ${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys; then log_failure_msg "Missing [infosys] configuration block" exit 1 fi # Check for infosys/ldap block if ! ${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys/ldap; then log_failure_msg "Missing [infosys/ldap] configuration block" exit 1 fi eval $(${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys/ldap -b infosys -b common -e bash) bdii_user=$CONFIG_user if [ -z "$bdii_user" ]; then # Get ldap user from passwd bdii_user=`getent passwd ldap openldap | sed 's/:.*//;q'` if [ -z "$bdii_user" ]; then echo "Warning, could not find ldap or openldap user" echo "resorting to using the root user" bdii_user=root fi fi # These values may be set in arc.conf, otherwise use sensible defaults slapd_loglevel=${CONFIG_slapd_loglevel:-0} slapd_hostnamebind=${CONFIG_slapd_hostnamebind:-"*"} slapd_port=${CONFIG_port:-2135} ldap_schema_dir=${CONFIG_ldap_schema_dir} threads=${CONFIG_threads:-32} timelimit=${CONFIG_timelimit:-2400} bdii_location=${CONFIG_bdii_location:-/usr} infosys_ldap_run_dir=${CONFIG_infosys_ldap_run_dir:-/run/arc/infosys} mkdir -p ${infosys_ldap_run_dir} chown ${bdii_user}: ${infosys_ldap_run_dir} bdii_var_dir=${CONFIG_bdii_var_dir:-/var/lib/arc/bdii} bdii_run_dir=${CONFIG_bdii_run_dir:-/run/arc/bdii} # PIDFile location handling slapd_pid_file=$( readlink -m ${bdii_run_dir}/db/slapd.pid ) # forced pidfile location instead of arc.conf-based (if FORCE_ARC_RUNDIR is set) if [ -n "$FORCE_ARC_RUNDIR" ]; then pid_dir="${FORCE_ARC_RUNDIR}/bdii/db" mkdir -p "$pid_dir" chown -R ${bdii_user}: "$pid_dir" pid_file="$( readlink -m ${pid_dir}/slapd.pid )" if [ "x${slapd_pid_file}" != "x${pid_file}" ]; then custom_pid_file="${slapd_pid_file}" rm -f "$custom_pid_file" slapd_pid_file="${pid_file}" fi unset pid_dir pid_file fi rm -f "${slapd_pid_file}" bdii_db_config=${CONFIG_bdii_db_config:-"/etc/bdii/DB_CONFIG"} bdii_database=${CONFIG_bdii_database:-"hdb"} # Check for existance of core ldap schema coreschema=$(find /etc/openldap /etc/ldap ${ldap_schema_dir} -name core.schema \ -printf "%h/%f\n" 2>/dev/null) if [ "x" = "x$coreschema" ]; then log_failure_msg "Could not find ldap core schema file" exit 1 fi # Check for existance of Glue schemas. glueschemadir=$(find /etc/openldap /etc/ldap ${ldap_schema_dir} -name Glue-CORE.schema \ -printf "%h\n" 2>/dev/null) if [ "x" = "x$glueschemadir" ]; then log_failure_msg "Could not find glue schema directory under /etc" exit 1 fi # Check for existence of a system ldap, this command will be used by bdii slapd_cmd= if [ "x" = "x$CONFIG_slapd" ]; then O_IFS=$IFS IFS=: for dir in $PATH; do if [ -x "$dir/slapd" ]; then slapd_cmd="$dir/slapd" break fi done IFS=$O_IFS else slapd_cmd=$CONFIG_slapd fi if [ -z "$slapd_cmd" ] || [ ! -x "$slapd_cmd" ]; then log_failure_msg "Could not find ldap server binary, usually /usr/sbin/slapd" exit 1 fi find_ldap_database_module() { # First try to find a separate module ldapdir=$(find /usr/lib64/openldap /usr/lib/openldap /usr/lib64/ldap \ /usr/lib/ldap -name "back_${database}.la" -printf ":%h/" 2>/dev/null) if [ -n "$ldapdir" ]; then # Separate module found ldapmodule="moduleload back_${database}" grep -E -q "${ldapdir}(:|$)" <<< ${ldapdirs} || \ ldapdirs=${ldapdirs}${ldapdir} else # Separate module not found - check for preloaded module ldapmodule= if [ $(grep -Ec "${database}_db_init|${database}_back_db_init" "$slapd_cmd") -eq 0 ]; then # Module not found database= fi fi } find_ldap_overlay_module() { # Try to find a separate module ldapdir=$(find /usr/lib64/openldap /usr/lib/openldap /usr/lib64/ldap \ /usr/lib/ldap -name "${overlay}.la" -printf ":%h/" 2>/dev/null) if [ -n "$ldapdir" ]; then # Separate module found ldapmodule="moduleload ${overlay}" grep -E -q "${ldapdir}(:|$)" <<< ${ldapdirs} || \ ldapdirs=${ldapdirs}${ldapdir} else # Module not found ldapmodule= overlay= fi } ldapdirs= database=${bdii_database} find_ldap_database_module if [ -z "${database}" ]; then log_failure_msg "Could not find ldap ${bdii_database} database module" exit 1 fi moduleload_bdii="${ldapmodule}" database=relay find_ldap_database_module if [ -z "${database}" ]; then echo "Could not find ldap relay database module, top-bdii integration is disabled." fi moduleload_relay="${ldapmodule}" overlay=rwm find_ldap_overlay_module if [ -z "$overlay" ]; then echo "Could not find ldap rwm overlay module, top-bdii integration is disabled." fi moduleload_rwm="${ldapmodule}" if ${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys/index; then database=shell find_ldap_database_module if [ -z "${database}" ]; then log_failure_msg "Could not find ldap shell database module" exit 1 fi moduleload_shell="${ldapmodule}" else moduleload_shell= fi ldapdirs=`sed 's/^://' <<< $ldapdirs` #ldapdirs=`sed 's/:$//' <<< $ldapdirs` if [ -n "$ldapdirs" ]; then modulepath="modulepath $ldapdirs" else modulepath= fi for i in "/etc/bdii/BDII.schema" "${bdii_location}/etc/BDII.schema"; do if [ -r $i ]; then bdii_schema="include $i" break fi done bdii_slapd_conf=${infosys_ldap_run_dir}/bdii-slapd.conf rm -f $bdii_slapd_conf # Put SLAPD start helpers to known directory helpers_dir=$infosys_ldap_run_dir if [ -n "$FORCE_ARC_RUNDIR" ]; then helpers_dir="${FORCE_ARC_RUNDIR}/infosys" mkdir -p "${helpers_dir}" fi bdii_slapd_cmd=${helpers_dir}/bdii-slapd.cmd rm -f $bdii_slapd_cmd bdii_slapd_post_cmd=${helpers_dir}/bdii-slapd-post.cmd rm -f $bdii_slapd_post_cmd # Ensure the configuration file is not world-readable, # as it contains the slapd database password (umask 077; > $bdii_slapd_conf) pass=`/usr/bin/mkpasswd -s 0 2> /dev/null` || pass=$RANDOM$RANDOM cat <<-EOF >> $bdii_slapd_conf # This file was automatically generated by $0." # Do not modify. include ${coreschema} ${bdii_schema} #glue schemas include ${glueschemadir}/Glue-CORE.schema include ${glueschemadir}/Glue-CE.schema include ${glueschemadir}/Glue-CESEBind.schema include ${glueschemadir}/Glue-MDS.schema #glue2 schema include ${glueschemadir}/GLUE20.schema #nordugrid specific schemas include ${ARC_LOCATION}/@pkgdatasubdir@/ldap-schema/nordugrid.schema $modulepath $moduleload_bdii $moduleload_relay $moduleload_rwm $moduleload_shell allow bind_v2 pidfile $slapd_pid_file argsfile $bdii_run_dir/db/slapd.args loglevel $slapd_loglevel threads $threads idletimeout 120 sizelimit unlimited timelimit $timelimit EOF if [ -n "${moduleload_rwm}" ]; then admindomain=$(${arcconfig_parser} --load -r ${ARC_RUNCONFIG} -b infosys/glue2 -o admindomain_name) admindomain="urn:ad:${admindomain:-UNDEFINEDVALUE}" cat <<-EOF >> $bdii_slapd_conf # Relay to allow top-bdii to parse info as the CE was a site-bdii database relay suffix "GLUE2GroupID=resource,o=glue" overlay rwm rwm-rewriteEngine on rwm-rewriteContext default rwm-rewriteRule "GLUE2GroupID=resource,o=glue" "GLUE2GroupID=services,o=glue" ":" rwm-rewriteContext searchFilter rwm-rewriteContext searchEntryDN rwm-rewriteContext searchAttrDN rwm-rewriteContext matchedDN database relay suffix "GLUE2GroupID=resource,GLUE2DomainID=$admindomain,o=glue" overlay rwm rwm-rewriteEngine on rwm-rewriteContext default rwm-rewriteRule "GLUE2GroupID=resource,GLUE2DomainID=$admindomain,o=glue" "GLUE2GroupID=services,o=glue" ":" rwm-rewriteContext searchFilter rwm-rewriteContext searchEntryDN rwm-rewriteRule "(.*[^ ],)?[ ]?GLUE2GroupID=services,o=glue" "\$1GLUE2GroupID=services,GLUE2DomainID=$admindomain,o=glue" ":" rwm-rewriteContext searchAttrDN rwm-rewriteContext matchedDN database relay suffix "GLUE2GroupID=services,GLUE2DomainID=$admindomain,o=glue" overlay rwm suffixmassage "GLUE2GroupID=services,o=glue" EOF fi cat <<-EOF >> $bdii_slapd_conf # ${bdii_database} database definitions for o=grid database ${bdii_database} cachesize 150000 dbnosync suffix "o=grid" checkpoint 131072 60 rootdn "o=grid" rootpw $pass directory $bdii_var_dir/db/arc # ${bdii_database} database definitions for o=glue database ${bdii_database} cachesize 150000 dbnosync suffix "o=glue" checkpoint 131072 60 rootdn "o=glue" rootpw $pass directory $bdii_var_dir/db/glue2 # ${bdii_database} database definitions for o=infosys database ${bdii_database} cachesize 60 dbnosync suffix "o=infosys" checkpoint 131072 60 rootdn "o=infosys" rootpw $pass directory $bdii_var_dir/db/stats EOF chown $bdii_user: $bdii_slapd_conf [ -x /sbin/restorecon ] && /sbin/restorecon $bdii_slapd_conf # Write slapd starting command if [ "x$slapd_hostnamebind" = "x*" ]; then echo exec ${slapd_cmd} -f ${bdii_slapd_conf} -h \"ldap://${slapd_hostnamebind}:${slapd_port}\" -u ${bdii_user} > ${bdii_slapd_cmd} else echo exec ${slapd_cmd} -f ${bdii_slapd_conf} -h \"ldap://localhost:${slapd_port} ldap://${slapd_hostnamebind}:${slapd_port}\" -u ${bdii_user} > ${bdii_slapd_cmd} fi chmod +x ${bdii_slapd_cmd} # Write post-exec script to check slapd is up and running cat <<-EOF > ${bdii_slapd_post_cmd} iterlimit=30 while [ \$iterlimit -ge 0 ] && ! [ -r "${slapd_pid_file}" ]; do sleep 1 iterlimit=\$(expr \$iterlimit - 1) done EOF # copy forced pidfile to custom arc.conf pidfile (if forced pid was requested) if [ -n "${custom_pid_file}" ]; then echo "mkdir -p \"${custom_pid_file%/*}\"" >> ${bdii_slapd_post_cmd} echo "cp -a \"${slapd_pid_file}\" \"${custom_pid_file}\"" >> ${bdii_slapd_post_cmd} fi # Initialize the database directories mkdir -p $bdii_run_dir/db mkdir -p $bdii_run_dir/archive chown $bdii_user: $bdii_run_dir chown $bdii_user: $bdii_run_dir/db chown $bdii_user: $bdii_run_dir/archive [ -x /sbin/restorecon ] && /sbin/restorecon -R $bdii_run_dir/db [ -x /sbin/restorecon ] && /sbin/restorecon -R $bdii_run_dir/archive mkdir -p $bdii_var_dir/archive mkdir -p $bdii_var_dir/db/arc mkdir -p $bdii_var_dir/db/glue2 mkdir -p $bdii_var_dir/db/stats rm -f $bdii_var_dir/db/arc/* 2>/dev/null rm -f $bdii_var_dir/db/glue2/* 2>/dev/null rm -f $bdii_var_dir/db/stats/* 2>/dev/null chown $bdii_user: $bdii_var_dir/db chown $bdii_user: $bdii_var_dir/archive chown $bdii_user: $bdii_var_dir/db/arc chown $bdii_user: $bdii_var_dir/db/glue2 chown $bdii_user: $bdii_var_dir/db/stats [ -x /sbin/restorecon ] && /sbin/restorecon -R $bdii_var_dir/db [ -x /sbin/restorecon ] && /sbin/restorecon -R $bdii_var_dir/archive # Workaround for BDII DB_CONFIG cachesize bigger than actual memory set_cachesize_line=`egrep '^[[:space:]]*'set_cachesize ${bdii_db_config}` if [ -n "${set_cachesize_line}" ]; then if [ -e /proc/meminfo ]; then memsize=$(grep MemFree /proc/meminfo | awk '{printf "%.0f", $2 * 1024}') default_set_cachesize=$(echo ${set_cachesize_line} | awk '{print $2 * 1073741824 + $3}') half_memsize=$(( ${memsize} / 2 )) if [ $default_set_cachesize -ge $half_memsize ]; then echo "The system does not fulfill BDII optimal memory requirements" echo "ARC will try to fix it anyway..." new_set_cachesize=$(( $memsize / 16 )) TEMPBDIIDBCONFIG=`mktemp -q /tmp/DB_CONFIG.XXXXXX` chmod 644 $TEMPBDIIDBCONFIG sed "s/^set_cachesize.*$/set_cachesize 0 $new_set_cachesize 1/" ${bdii_db_config} > $TEMPBDIIDBCONFIG bdii_db_config=${TEMPBDIIDBCONFIG} echo "DB_CONFIG set_cachesize is now: 0 $new_set_cachesize 1" fi else echo "/proc/meminfo does not exist. Cannot apply BDII memory workaround" echo "slapd might fail to start" fi fi # End of BDII set_cachesize workaround # copy BDII DB_CONFIG in ARC locations cp ${bdii_db_config} ${bdii_var_dir}/db/arc/DB_CONFIG cp ${bdii_db_config} ${bdii_var_dir}/db/glue2/DB_CONFIG cp ${bdii_db_config} ${bdii_var_dir}/db/stats/DB_CONFIG chown $bdii_user: ${bdii_var_dir}/db/arc/DB_CONFIG chown $bdii_user: ${bdii_var_dir}/db/glue2/DB_CONFIG chown $bdii_user: ${bdii_var_dir}/db/stats/DB_CONFIG # if the BDII low memory workaround has been applied, remove the temp file if [ -r $TEMPBDIIDBCONFIG ]; then rm -f $TEMPBDIIDBCONFIG fi nordugrid-arc-6.14.0/src/PaxHeaders.30264/external0000644000000000000000000000013214152153467017701 xustar000000000000000030 mtime=1638455095.779508043 30 atime=1638455103.999631554 30 ctime=1638455095.779508043 nordugrid-arc-6.14.0/src/external/0000755000175000002070000000000014152153467017743 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/external/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376022011 xustar000000000000000030 mtime=1638455038.344645058 30 atime=1638455038.475647026 30 ctime=1638455095.776507998 nordugrid-arc-6.14.0/src/external/Makefile.am0000644000175000002070000000004514152153376021775 0ustar00mockbuildmock00000000000000SUBDIRS = cJSON DIST_SUBDIRS = cJSON nordugrid-arc-6.14.0/src/external/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153422022012 xustar000000000000000030 mtime=1638455058.134942416 30 atime=1638455088.013391354 30 ctime=1638455095.775507983 nordugrid-arc-6.14.0/src/external/Makefile.in0000644000175000002070000006104114152153422022001 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/external DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ SUBDIRS = cJSON DIST_SUBDIRS = cJSON all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/external/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/external/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile installdirs: installdirs-recursive installdirs-am: install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool cscopelist-am ctags \ ctags-am distclean distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ installdirs-am maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags tags-am uninstall uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/external/PaxHeaders.30264/cJSON0000644000000000000000000000013214152153467020615 xustar000000000000000030 mtime=1638455095.802508389 30 atime=1638455103.999631554 30 ctime=1638455095.802508389 nordugrid-arc-6.14.0/src/external/cJSON/0000755000175000002070000000000014152153467020657 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/external/cJSON/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376022725 xustar000000000000000030 mtime=1638455038.344645058 30 atime=1638455038.476647041 30 ctime=1638455095.798508328 nordugrid-arc-6.14.0/src/external/cJSON/Makefile.am0000644000175000002070000000014114152153376022706 0ustar00mockbuildmock00000000000000noinst_LTLIBRARIES = libcjson.la libcjson_la_SOURCES = cJSON.c cJSON.h libcjson_la_LIBADD = -lm nordugrid-arc-6.14.0/src/external/cJSON/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153422022726 xustar000000000000000030 mtime=1638455058.193943302 30 atime=1638455088.026391549 30 ctime=1638455095.797508313 nordugrid-arc-6.14.0/src/external/cJSON/Makefile.in0000644000175000002070000005727714152153422022735 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/external/cJSON DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(top_srcdir)/depcomp README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = LTLIBRARIES = $(noinst_LTLIBRARIES) libcjson_la_DEPENDENCIES = am_libcjson_la_OBJECTS = cJSON.lo libcjson_la_OBJECTS = $(am_libcjson_la_OBJECTS) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(libcjson_la_SOURCES) DIST_SOURCES = $(libcjson_la_SOURCES) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ noinst_LTLIBRARIES = libcjson.la libcjson_la_SOURCES = cJSON.c cJSON.h libcjson_la_LIBADD = -lm all: all-am .SUFFIXES: .SUFFIXES: .c .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/external/cJSON/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/external/cJSON/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): clean-noinstLTLIBRARIES: -test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES) @list='$(noinst_LTLIBRARIES)'; \ locs=`for p in $$list; do echo $$p; done | \ sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ sort -u`; \ test -z "$$locs" || { \ echo rm -f $${locs}; \ rm -f $${locs}; \ } libcjson.la: $(libcjson_la_OBJECTS) $(libcjson_la_DEPENDENCIES) $(EXTRA_libcjson_la_DEPENDENCIES) $(AM_V_CCLD)$(LINK) $(libcjson_la_OBJECTS) $(libcjson_la_LIBADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/cJSON.Plo@am__quote@ .c.o: @am__fastdepCC_TRUE@ $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(COMPILE) -c $< .c.obj: @am__fastdepCC_TRUE@ $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(COMPILE) -c `$(CYGPATH_W) '$<'` .c.lo: @am__fastdepCC_TRUE@ $(AM_V_CC)$(LTCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LTCOMPILE) -c -o $@ $< mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(LTLIBRARIES) installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool clean-noinstLTLIBRARIES \ mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \ clean-libtool clean-noinstLTLIBRARIES cscopelist-am ctags \ ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags tags-am uninstall uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/external/cJSON/PaxHeaders.30264/cJSON.c0000644000000000000000000000013214152153376021751 xustar000000000000000030 mtime=1638455038.345645073 30 atime=1638455038.476647041 30 ctime=1638455095.801508373 nordugrid-arc-6.14.0/src/external/cJSON/cJSON.c0000644000175000002070000005517114152153376021747 0ustar00mockbuildmock00000000000000/* Copyright (c) 2009 Dave Gamble Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /* cJSON */ /* JSON parser in C. */ #include #include #include #include #include #include #include #include "cJSON.h" static const char *ep; const char *cJSON_GetErrorPtr(void) {return ep;} static int cJSON_strcasecmp(const char *s1,const char *s2) { if (!s1) return (s1==s2)?0:1;if (!s2) return 1; for(; tolower(*s1) == tolower(*s2); ++s1, ++s2) if(*s1 == 0) return 0; return tolower(*(const unsigned char *)s1) - tolower(*(const unsigned char *)s2); } static void *(*cJSON_malloc)(size_t sz) = malloc; static void (*cJSON_free)(void *ptr) = free; static char* cJSON_strdup(const char* str) { size_t len; char* copy; len = strlen(str) + 1; if (!(copy = (char*)cJSON_malloc(len))) return 0; memcpy(copy,str,len); return copy; } void cJSON_InitHooks(cJSON_Hooks* hooks) { if (!hooks) { /* Reset hooks */ cJSON_malloc = malloc; cJSON_free = free; return; } cJSON_malloc = (hooks->malloc_fn)?hooks->malloc_fn:malloc; cJSON_free = (hooks->free_fn)?hooks->free_fn:free; } /* Internal constructor. */ static cJSON *cJSON_New_Item(void) { cJSON* node = (cJSON*)cJSON_malloc(sizeof(cJSON)); if (node) memset(node,0,sizeof(cJSON)); return node; } /* Delete a cJSON structure. */ void cJSON_Delete(cJSON *c) { cJSON *next; while (c) { next=c->next; if (!(c->type&cJSON_IsReference) && c->child) cJSON_Delete(c->child); if (!(c->type&cJSON_IsReference) && c->valuestring) cJSON_free(c->valuestring); if (c->string) cJSON_free(c->string); cJSON_free(c); c=next; } } /* Parse the input text to generate a number, and populate the result into item. */ static const char *parse_number(cJSON *item,const char *num) { double n=0,sign=1,scale=0;int subscale=0,signsubscale=1; if (*num=='-') sign=-1,num++; /* Has sign? */ if (*num=='0') num++; /* is zero */ if (*num>='1' && *num<='9') do n=(n*10.0)+(*num++ -'0'); while (*num>='0' && *num<='9'); /* Number? */ if (*num=='.' && num[1]>='0' && num[1]<='9') {num++; do n=(n*10.0)+(*num++ -'0'),scale--; while (*num>='0' && *num<='9');} /* Fractional part? */ if (*num=='e' || *num=='E') /* Exponent? */ { num++;if (*num=='+') num++; else if (*num=='-') signsubscale=-1,num++; /* With sign? */ while (*num>='0' && *num<='9') subscale=(subscale*10)+(*num++ - '0'); /* Number? */ } n=sign*n*pow(10.0,(scale+subscale*signsubscale)); /* number = +/- number.fraction * 10^+/- exponent */ item->valuedouble=n; item->valueint=(int)n; item->type=cJSON_Number; return num; } /* Render the number nicely from the given item into a string. */ static char *print_number(cJSON *item) { char *str; double d=item->valuedouble; if (fabs(((double)item->valueint)-d)<=DBL_EPSILON && d<=INT_MAX && d>=INT_MIN) { str=(char*)cJSON_malloc(21); /* 2^64+1 can be represented in 21 chars. */ if (str) sprintf(str,"%d",item->valueint); } else { str=(char*)cJSON_malloc(64); /* This is a nice tradeoff. */ if (str) { if (fabs(floor(d)-d)<=DBL_EPSILON && fabs(d)<1.0e60)sprintf(str,"%.0f",d); else if (fabs(d)<1.0e-6 || fabs(d)>1.0e9) sprintf(str,"%e",d); else sprintf(str,"%f",d); } } return str; } static unsigned parse_hex4(const char *str) { unsigned h=0; if (*str>='0' && *str<='9') h+=(*str)-'0'; else if (*str>='A' && *str<='F') h+=10+(*str)-'A'; else if (*str>='a' && *str<='f') h+=10+(*str)-'a'; else return 0; h=h<<4;str++; if (*str>='0' && *str<='9') h+=(*str)-'0'; else if (*str>='A' && *str<='F') h+=10+(*str)-'A'; else if (*str>='a' && *str<='f') h+=10+(*str)-'a'; else return 0; h=h<<4;str++; if (*str>='0' && *str<='9') h+=(*str)-'0'; else if (*str>='A' && *str<='F') h+=10+(*str)-'A'; else if (*str>='a' && *str<='f') h+=10+(*str)-'a'; else return 0; h=h<<4;str++; if (*str>='0' && *str<='9') h+=(*str)-'0'; else if (*str>='A' && *str<='F') h+=10+(*str)-'A'; else if (*str>='a' && *str<='f') h+=10+(*str)-'a'; else return 0; return h; } /* Parse the input text into an unescaped cstring, and populate item. */ static const unsigned char firstByteMark[7] = { 0x00, 0x00, 0xC0, 0xE0, 0xF0, 0xF8, 0xFC }; static const char *parse_string(cJSON *item,const char *str) { const char *ptr=str+1;char *ptr2;char *out;int len=0;unsigned uc,uc2; if (*str!='\"') {ep=str;return 0;} /* not a string! */ while (*ptr!='\"' && *ptr && ++len) if (*ptr++ == '\\') ptr++; /* Skip escaped quotes. */ out=(char*)cJSON_malloc(len+1); /* This is how long we need for the string, roughly. */ if (!out) return 0; ptr=str+1;ptr2=out; while (*ptr!='\"' && *ptr) { if (*ptr!='\\') *ptr2++=*ptr++; else { ptr++; switch (*ptr) { case 'b': *ptr2++='\b'; break; case 'f': *ptr2++='\f'; break; case 'n': *ptr2++='\n'; break; case 'r': *ptr2++='\r'; break; case 't': *ptr2++='\t'; break; case 'u': /* transcode utf16 to utf8. */ uc=parse_hex4(ptr+1);ptr+=4; /* get the unicode char. */ if ((uc>=0xDC00 && uc<=0xDFFF) || uc==0) break; /* check for invalid. */ if (uc>=0xD800 && uc<=0xDBFF) /* UTF16 surrogate pairs. */ { if (ptr[1]!='\\' || ptr[2]!='u') break; /* missing second-half of surrogate. */ uc2=parse_hex4(ptr+3);ptr+=6; if (uc2<0xDC00 || uc2>0xDFFF) break; /* invalid second-half of surrogate. */ uc=0x10000 + (((uc&0x3FF)<<10) | (uc2&0x3FF)); } len=4;if (uc<0x80) len=1;else if (uc<0x800) len=2;else if (uc<0x10000) len=3; ptr2+=len; switch (len) { case 4: *--ptr2 =((uc | 0x80) & 0xBF); uc >>= 6; case 3: *--ptr2 =((uc | 0x80) & 0xBF); uc >>= 6; case 2: *--ptr2 =((uc | 0x80) & 0xBF); uc >>= 6; case 1: *--ptr2 =(uc | firstByteMark[len]); } ptr2+=len; break; default: *ptr2++=*ptr; break; } ptr++; } } *ptr2=0; if (*ptr=='\"') ptr++; item->valuestring=out; item->type=cJSON_String; return ptr; } /* Render the cstring provided to an escaped version that can be printed. */ static char *print_string_ptr(const char *str) { const char *ptr;char *ptr2,*out;int len=0;unsigned char token; if (!str) return cJSON_strdup(""); ptr=str;while ((token=*ptr) && ++len) {if (strchr("\"\\\b\f\n\r\t",token)) len++; else if (token<32) len+=5;ptr++;} out=(char*)cJSON_malloc(len+3); if (!out) return 0; ptr2=out;ptr=str; *ptr2++='\"'; while (*ptr) { if ((unsigned char)*ptr>31 && *ptr!='\"' && *ptr!='\\') *ptr2++=*ptr++; else { *ptr2++='\\'; switch (token=*ptr++) { case '\\': *ptr2++='\\'; break; case '\"': *ptr2++='\"'; break; case '\b': *ptr2++='b'; break; case '\f': *ptr2++='f'; break; case '\n': *ptr2++='n'; break; case '\r': *ptr2++='r'; break; case '\t': *ptr2++='t'; break; default: sprintf(ptr2,"u%04x",token);ptr2+=5; break; /* escape and print */ } } } *ptr2++='\"';*ptr2++=0; return out; } /* Invote print_string_ptr (which is useful) on an item. */ static char *print_string(cJSON *item) {return print_string_ptr(item->valuestring);} /* Predeclare these prototypes. */ static const char *parse_value(cJSON *item,const char *value); static char *print_value(cJSON *item,int depth,int fmt); static const char *parse_array(cJSON *item,const char *value); static char *print_array(cJSON *item,int depth,int fmt); static const char *parse_object(cJSON *item,const char *value); static char *print_object(cJSON *item,int depth,int fmt); /* Utility to jump whitespace and cr/lf */ static const char *skip(const char *in) {while (in && *in && (unsigned char)*in<=32) in++; return in;} /* Parse an object - create a new root, and populate. */ cJSON *cJSON_ParseWithOpts(const char *value,const char **return_parse_end,int require_null_terminated) { const char *end=0; cJSON *c=cJSON_New_Item(); ep=0; if (!c) return 0; /* memory fail */ end=parse_value(c,skip(value)); if (!end) {cJSON_Delete(c);return 0;} /* parse failure. ep is set. */ /* if we require null-terminated JSON without appended garbage, skip and then check for a null terminator */ if (require_null_terminated) {end=skip(end);if (*end) {cJSON_Delete(c);ep=end;return 0;}} if (return_parse_end) *return_parse_end=end; return c; } /* Default options for cJSON_Parse */ cJSON *cJSON_Parse(const char *value) {return cJSON_ParseWithOpts(value,0,0);} /* Render a cJSON item/entity/structure to text. */ char *cJSON_Print(cJSON *item) {return print_value(item,0,1);} char *cJSON_PrintUnformatted(cJSON *item) {return print_value(item,0,0);} /* Parser core - when encountering text, process appropriately. */ static const char *parse_value(cJSON *item,const char *value) { if (!value) return 0; /* Fail on null. */ if (!strncmp(value,"null",4)) { item->type=cJSON_NULL; return value+4; } if (!strncmp(value,"false",5)) { item->type=cJSON_False; return value+5; } if (!strncmp(value,"true",4)) { item->type=cJSON_True; item->valueint=1; return value+4; } if (*value=='\"') { return parse_string(item,value); } if (*value=='-' || (*value>='0' && *value<='9')) { return parse_number(item,value); } if (*value=='[') { return parse_array(item,value); } if (*value=='{') { return parse_object(item,value); } ep=value;return 0; /* failure. */ } /* Render a value to text. */ static char *print_value(cJSON *item,int depth,int fmt) { char *out=0; if (!item) return 0; switch ((item->type)&255) { case cJSON_NULL: out=cJSON_strdup("null"); break; case cJSON_False: out=cJSON_strdup("false");break; case cJSON_True: out=cJSON_strdup("true"); break; case cJSON_Number: out=print_number(item);break; case cJSON_String: out=print_string(item);break; case cJSON_Array: out=print_array(item,depth,fmt);break; case cJSON_Object: out=print_object(item,depth,fmt);break; } return out; } /* Build an array from input text. */ static const char *parse_array(cJSON *item,const char *value) { cJSON *child; if (*value!='[') {ep=value;return 0;} /* not an array! */ item->type=cJSON_Array; value=skip(value+1); if (*value==']') return value+1; /* empty array. */ item->child=child=cJSON_New_Item(); if (!item->child) return 0; /* memory fail */ value=skip(parse_value(child,skip(value))); /* skip any spacing, get the value. */ if (!value) return 0; while (*value==',') { cJSON *new_item; if (!(new_item=cJSON_New_Item())) return 0; /* memory fail */ child->next=new_item;new_item->prev=child;child=new_item; value=skip(parse_value(child,skip(value+1))); if (!value) return 0; /* memory fail */ } if (*value==']') return value+1; /* end of array */ ep=value;return 0; /* malformed. */ } /* Render an array to text */ static char *print_array(cJSON *item,int depth,int fmt) { char **entries; char *out=0,*ptr,*ret;int len=5; cJSON *child=item->child; int numentries=0,i=0,fail=0; /* How many entries in the array? */ while (child) numentries++,child=child->next; /* Explicitly handle numentries==0 */ if (!numentries) { out=(char*)cJSON_malloc(3); if (out) strcpy(out,"[]"); return out; } /* Allocate an array to hold the values for each */ entries=(char**)cJSON_malloc(numentries*sizeof(char*)); if (!entries) return 0; memset(entries,0,numentries*sizeof(char*)); /* Retrieve all the results: */ child=item->child; while (child && !fail) { ret=print_value(child,depth+1,fmt); entries[i++]=ret; if (ret) len+=strlen(ret)+2+(fmt?1:0); else fail=1; child=child->next; } /* If we didn't fail, try to malloc the output string */ if (!fail) out=(char*)cJSON_malloc(len); /* If that fails, we fail. */ if (!out) fail=1; /* Handle failure. */ if (fail) { for (i=0;itype=cJSON_Object; value=skip(value+1); if (*value=='}') return value+1; /* empty array. */ item->child=child=cJSON_New_Item(); if (!item->child) return 0; value=skip(parse_string(child,skip(value))); if (!value) return 0; child->string=child->valuestring;child->valuestring=0; if (*value!=':') {ep=value;return 0;} /* fail! */ value=skip(parse_value(child,skip(value+1))); /* skip any spacing, get the value. */ if (!value) return 0; while (*value==',') { cJSON *new_item; if (!(new_item=cJSON_New_Item())) return 0; /* memory fail */ child->next=new_item;new_item->prev=child;child=new_item; value=skip(parse_string(child,skip(value+1))); if (!value) return 0; child->string=child->valuestring;child->valuestring=0; if (*value!=':') {ep=value;return 0;} /* fail! */ value=skip(parse_value(child,skip(value+1))); /* skip any spacing, get the value. */ if (!value) return 0; } if (*value=='}') return value+1; /* end of array */ ep=value;return 0; /* malformed. */ } /* Render an object to text. */ static char *print_object(cJSON *item,int depth,int fmt) { char **entries=0,**names=0; char *out=0,*ptr,*ret,*str;int len=7,i=0,j; cJSON *child=item->child; int numentries=0,fail=0; /* Count the number of entries. */ while (child) numentries++,child=child->next; /* Explicitly handle empty object case */ if (!numentries) { out=(char*)cJSON_malloc(fmt?depth+4:3); if (!out) return 0; ptr=out;*ptr++='{'; if (fmt) {*ptr++='\n';for (i=0;ichild;depth++;if (fmt) len+=depth; while (child) { names[i]=str=print_string_ptr(child->string); entries[i++]=ret=print_value(child,depth,fmt); if (str && ret) len+=strlen(ret)+strlen(str)+2+(fmt?2+depth:0); else fail=1; child=child->next; } /* Try to allocate the output string */ if (!fail) out=(char*)cJSON_malloc(len); if (!out) fail=1; /* Handle failure */ if (fail) { for (i=0;ichild;int i=0;while(c)i++,c=c->next;return i;} cJSON *cJSON_GetArrayItem(cJSON *array,int item) {cJSON *c=array->child; while (c && item>0) item--,c=c->next; return c;} cJSON *cJSON_GetObjectItem(cJSON *object,const char *string) {cJSON *c=object->child; while (c && cJSON_strcasecmp(c->string,string)) c=c->next; return c;} /* Utility for array list handling. */ static void suffix_object(cJSON *prev,cJSON *item) {prev->next=item;item->prev=prev;} /* Utility for handling references. */ static cJSON *create_reference(cJSON *item) {cJSON *ref=cJSON_New_Item();if (!ref) return 0;memcpy(ref,item,sizeof(cJSON));ref->string=0;ref->type|=cJSON_IsReference;ref->next=ref->prev=0;return ref;} /* Add item to array/object. */ void cJSON_AddItemToArray(cJSON *array, cJSON *item) {cJSON *c=array->child;if (!item) return; if (!c) {array->child=item;} else {while (c && c->next) c=c->next; suffix_object(c,item);}} void cJSON_AddItemToObject(cJSON *object,const char *string,cJSON *item) {if (!item) return; if (item->string) cJSON_free(item->string);item->string=cJSON_strdup(string);cJSON_AddItemToArray(object,item);} void cJSON_AddItemReferenceToArray(cJSON *array, cJSON *item) {cJSON_AddItemToArray(array,create_reference(item));} void cJSON_AddItemReferenceToObject(cJSON *object,const char *string,cJSON *item) {cJSON_AddItemToObject(object,string,create_reference(item));} cJSON *cJSON_DetachItemFromArray(cJSON *array,int which) {cJSON *c=array->child;while (c && which>0) c=c->next,which--;if (!c) return 0; if (c->prev) c->prev->next=c->next;if (c->next) c->next->prev=c->prev;if (c==array->child) array->child=c->next;c->prev=c->next=0;return c;} void cJSON_DeleteItemFromArray(cJSON *array,int which) {cJSON_Delete(cJSON_DetachItemFromArray(array,which));} cJSON *cJSON_DetachItemFromObject(cJSON *object,const char *string) {int i=0;cJSON *c=object->child;while (c && cJSON_strcasecmp(c->string,string)) i++,c=c->next;if (c) return cJSON_DetachItemFromArray(object,i);return 0;} void cJSON_DeleteItemFromObject(cJSON *object,const char *string) {cJSON_Delete(cJSON_DetachItemFromObject(object,string));} /* Replace array/object items with new ones. */ void cJSON_ReplaceItemInArray(cJSON *array,int which,cJSON *newitem) {cJSON *c=array->child;while (c && which>0) c=c->next,which--;if (!c) return; newitem->next=c->next;newitem->prev=c->prev;if (newitem->next) newitem->next->prev=newitem; if (c==array->child) array->child=newitem; else newitem->prev->next=newitem;c->next=c->prev=0;cJSON_Delete(c);} void cJSON_ReplaceItemInObject(cJSON *object,const char *string,cJSON *newitem){int i=0;cJSON *c=object->child;while(c && cJSON_strcasecmp(c->string,string))i++,c=c->next;if(c){newitem->string=cJSON_strdup(string);cJSON_ReplaceItemInArray(object,i,newitem);}} /* Create basic types: */ cJSON *cJSON_CreateNull(void) {cJSON *item=cJSON_New_Item();if(item)item->type=cJSON_NULL;return item;} cJSON *cJSON_CreateTrue(void) {cJSON *item=cJSON_New_Item();if(item)item->type=cJSON_True;return item;} cJSON *cJSON_CreateFalse(void) {cJSON *item=cJSON_New_Item();if(item)item->type=cJSON_False;return item;} cJSON *cJSON_CreateBool(int b) {cJSON *item=cJSON_New_Item();if(item)item->type=b?cJSON_True:cJSON_False;return item;} cJSON *cJSON_CreateNumber(double num) {cJSON *item=cJSON_New_Item();if(item){item->type=cJSON_Number;item->valuedouble=num;item->valueint=(int)num;}return item;} cJSON *cJSON_CreateString(const char *string) {cJSON *item=cJSON_New_Item();if(item){item->type=cJSON_String;item->valuestring=cJSON_strdup(string);}return item;} cJSON *cJSON_CreateArray(void) {cJSON *item=cJSON_New_Item();if(item)item->type=cJSON_Array;return item;} cJSON *cJSON_CreateObject(void) {cJSON *item=cJSON_New_Item();if(item)item->type=cJSON_Object;return item;} /* Create Arrays: */ cJSON *cJSON_CreateIntArray(const int *numbers,int count) {int i;cJSON *n=0,*p=0,*a=cJSON_CreateArray();for(i=0;a && ichild=n;else suffix_object(p,n);p=n;}return a;} cJSON *cJSON_CreateFloatArray(const float *numbers,int count) {int i;cJSON *n=0,*p=0,*a=cJSON_CreateArray();for(i=0;a && ichild=n;else suffix_object(p,n);p=n;}return a;} cJSON *cJSON_CreateDoubleArray(const double *numbers,int count) {int i;cJSON *n=0,*p=0,*a=cJSON_CreateArray();for(i=0;a && ichild=n;else suffix_object(p,n);p=n;}return a;} cJSON *cJSON_CreateStringArray(const char **strings,int count) {int i;cJSON *n=0,*p=0,*a=cJSON_CreateArray();for(i=0;a && ichild=n;else suffix_object(p,n);p=n;}return a;} /* Duplication */ cJSON *cJSON_Duplicate(cJSON *item,int recurse) { cJSON *newitem,*cptr,*nptr=0,*newchild; /* Bail on bad ptr */ if (!item) return 0; /* Create new item */ newitem=cJSON_New_Item(); if (!newitem) return 0; /* Copy over all vars */ newitem->type=item->type&(~cJSON_IsReference),newitem->valueint=item->valueint,newitem->valuedouble=item->valuedouble; if (item->valuestring) {newitem->valuestring=cJSON_strdup(item->valuestring); if (!newitem->valuestring) {cJSON_Delete(newitem);return 0;}} if (item->string) {newitem->string=cJSON_strdup(item->string); if (!newitem->string) {cJSON_Delete(newitem);return 0;}} /* If non-recursive, then we're done! */ if (!recurse) return newitem; /* Walk the ->next chain for the child. */ cptr=item->child; while (cptr) { newchild=cJSON_Duplicate(cptr,1); /* Duplicate (with recurse) each item in the ->next chain */ if (!newchild) {cJSON_Delete(newitem);return 0;} if (nptr) {nptr->next=newchild,newchild->prev=nptr;nptr=newchild;} /* If newitem->child already set, then crosswire ->prev and ->next and move on */ else {newitem->child=newchild;nptr=newchild;} /* Set newitem->child and move to it */ cptr=cptr->next; } return newitem; } void cJSON_Minify(char *json) { char *into=json; while (*json) { if (*json==' ') json++; else if (*json=='\t') json++; // Whitespace characters. else if (*json=='\r') json++; else if (*json=='\n') json++; else if (*json=='/' && json[1]=='/') while (*json && *json!='\n') json++; // double-slash comments, to end of line. else if (*json=='/' && json[1]=='*') {while (*json && !(*json=='*' && json[1]=='/')) json++;json+=2;} // multiline comments. else if (*json=='\"'){*into++=*json++;while (*json && *json!='\"'){if (*json=='\\') *into++=*json++;*into++=*json++;}*into++=*json++;} // string literals, which are \" sensitive. else *into++=*json++; // All other characters. } *into=0; // and null-terminate. }nordugrid-arc-6.14.0/src/external/cJSON/PaxHeaders.30264/cJSON.h0000644000000000000000000000013214152153376021756 xustar000000000000000030 mtime=1638455038.345645073 30 atime=1638455038.476647041 30 ctime=1638455095.802508389 nordugrid-arc-6.14.0/src/external/cJSON/cJSON.h0000644000175000002070000001522614152153376021751 0ustar00mockbuildmock00000000000000/* Copyright (c) 2009 Dave Gamble Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #ifndef cJSON__h #define cJSON__h #ifdef __cplusplus extern "C" { #endif /* cJSON Types: */ #define cJSON_False 0 #define cJSON_True 1 #define cJSON_NULL 2 #define cJSON_Number 3 #define cJSON_String 4 #define cJSON_Array 5 #define cJSON_Object 6 #define cJSON_IsReference 256 /* The cJSON structure: */ typedef struct cJSON { struct cJSON *next,*prev; /* next/prev allow you to walk array/object chains. Alternatively, use GetArraySize/GetArrayItem/GetObjectItem */ struct cJSON *child; /* An array or object item will have a child pointer pointing to a chain of the items in the array/object. */ int type; /* The type of the item, as above. */ char *valuestring; /* The item's string, if type==cJSON_String */ int valueint; /* The item's number, if type==cJSON_Number */ double valuedouble; /* The item's number, if type==cJSON_Number */ char *string; /* The item's name string, if this item is the child of, or is in the list of subitems of an object. */ } cJSON; typedef struct cJSON_Hooks { void *(*malloc_fn)(size_t sz); void (*free_fn)(void *ptr); } cJSON_Hooks; /* Supply malloc, realloc and free functions to cJSON */ extern void cJSON_InitHooks(cJSON_Hooks* hooks); /* Supply a block of JSON, and this returns a cJSON object you can interrogate. Call cJSON_Delete when finished. */ extern cJSON *cJSON_Parse(const char *value); /* Render a cJSON entity to text for transfer/storage. Free the char* when finished. */ extern char *cJSON_Print(cJSON *item); /* Render a cJSON entity to text for transfer/storage without any formatting. Free the char* when finished. */ extern char *cJSON_PrintUnformatted(cJSON *item); /* Delete a cJSON entity and all subentities. */ extern void cJSON_Delete(cJSON *c); /* Returns the number of items in an array (or object). */ extern int cJSON_GetArraySize(cJSON *array); /* Retrieve item number "item" from array "array". Returns NULL if unsuccessful. */ extern cJSON *cJSON_GetArrayItem(cJSON *array,int item); /* Get item "string" from object. Case insensitive. */ extern cJSON *cJSON_GetObjectItem(cJSON *object,const char *string); /* For analysing failed parses. This returns a pointer to the parse error. You'll probably need to look a few chars back to make sense of it. Defined when cJSON_Parse() returns 0. 0 when cJSON_Parse() succeeds. */ extern const char *cJSON_GetErrorPtr(void); /* These calls create a cJSON item of the appropriate type. */ extern cJSON *cJSON_CreateNull(void); extern cJSON *cJSON_CreateTrue(void); extern cJSON *cJSON_CreateFalse(void); extern cJSON *cJSON_CreateBool(int b); extern cJSON *cJSON_CreateNumber(double num); extern cJSON *cJSON_CreateString(const char *string); extern cJSON *cJSON_CreateArray(void); extern cJSON *cJSON_CreateObject(void); /* These utilities create an Array of count items. */ extern cJSON *cJSON_CreateIntArray(const int *numbers,int count); extern cJSON *cJSON_CreateFloatArray(const float *numbers,int count); extern cJSON *cJSON_CreateDoubleArray(const double *numbers,int count); extern cJSON *cJSON_CreateStringArray(const char **strings,int count); /* Append item to the specified array/object. */ extern void cJSON_AddItemToArray(cJSON *array, cJSON *item); extern void cJSON_AddItemToObject(cJSON *object,const char *string,cJSON *item); /* Append reference to item to the specified array/object. Use this when you want to add an existing cJSON to a new cJSON, but don't want to corrupt your existing cJSON. */ extern void cJSON_AddItemReferenceToArray(cJSON *array, cJSON *item); extern void cJSON_AddItemReferenceToObject(cJSON *object,const char *string,cJSON *item); /* Remove/Detatch items from Arrays/Objects. */ extern cJSON *cJSON_DetachItemFromArray(cJSON *array,int which); extern void cJSON_DeleteItemFromArray(cJSON *array,int which); extern cJSON *cJSON_DetachItemFromObject(cJSON *object,const char *string); extern void cJSON_DeleteItemFromObject(cJSON *object,const char *string); /* Update array items. */ extern void cJSON_ReplaceItemInArray(cJSON *array,int which,cJSON *newitem); extern void cJSON_ReplaceItemInObject(cJSON *object,const char *string,cJSON *newitem); /* Duplicate a cJSON item */ extern cJSON *cJSON_Duplicate(cJSON *item,int recurse); /* Duplicate will create a new, identical cJSON item to the one you pass, in new memory that will need to be released. With recurse!=0, it will duplicate any children connected to the item. The item->next and ->prev pointers are always zero on return from Duplicate. */ /* ParseWithOpts allows you to require (and check) that the JSON is null terminated, and to retrieve the pointer to the final byte parsed. */ extern cJSON *cJSON_ParseWithOpts(const char *value,const char **return_parse_end,int require_null_terminated); extern void cJSON_Minify(char *json); /* Macros for creating things quickly. */ #define cJSON_AddNullToObject(object,name) cJSON_AddItemToObject(object, name, cJSON_CreateNull()) #define cJSON_AddTrueToObject(object,name) cJSON_AddItemToObject(object, name, cJSON_CreateTrue()) #define cJSON_AddFalseToObject(object,name) cJSON_AddItemToObject(object, name, cJSON_CreateFalse()) #define cJSON_AddBoolToObject(object,name,b) cJSON_AddItemToObject(object, name, cJSON_CreateBool(b)) #define cJSON_AddNumberToObject(object,name,n) cJSON_AddItemToObject(object, name, cJSON_CreateNumber(n)) #define cJSON_AddStringToObject(object,name,s) cJSON_AddItemToObject(object, name, cJSON_CreateString(s)) /* When assigning an integer value, it needs to be propagated to valuedouble too. */ #define cJSON_SetIntValue(object,val) ((object)?(object)->valueint=(object)->valuedouble=(val):(val)) #ifdef __cplusplus } #endif #endif nordugrid-arc-6.14.0/src/external/cJSON/PaxHeaders.30264/README0000644000000000000000000000013214152153376021551 xustar000000000000000030 mtime=1638455038.345645073 30 atime=1638455038.476647041 30 ctime=1638455095.800508359 nordugrid-arc-6.14.0/src/external/cJSON/README0000644000175000002070000002105714152153376021543 0ustar00mockbuildmock00000000000000/* Copyright (c) 2009 Dave Gamble Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ Welcome to cJSON. cJSON aims to be the dumbest possible parser that you can get your job done with. It's a single file of C, and a single header file. JSON is described best here: http://www.json.org/ It's like XML, but fat-free. You use it to move data around, store things, or just generally represent your program's state. First up, how do I build? Add cJSON.c to your project, and put cJSON.h somewhere in the header search path. For example, to build the test app: gcc cJSON.c test.c -o test -lm ./test As a library, cJSON exists to take away as much legwork as it can, but not get in your way. As a point of pragmatism (i.e. ignoring the truth), I'm going to say that you can use it in one of two modes: Auto and Manual. Let's have a quick run-through. I lifted some JSON from this page: http://www.json.org/fatfree.html That page inspired me to write cJSON, which is a parser that tries to share the same philosophy as JSON itself. Simple, dumb, out of the way. Some JSON: { "name": "Jack (\"Bee\") Nimble", "format": { "type": "rect", "width": 1920, "height": 1080, "interlace": false, "frame rate": 24 } } Assume that you got this from a file, a webserver, or magic JSON elves, whatever, you have a char * to it. Everything is a cJSON struct. Get it parsed: cJSON *root = cJSON_Parse(my_json_string); This is an object. We're in C. We don't have objects. But we do have structs. What's the framerate? cJSON *format = cJSON_GetObjectItem(root,"format"); int framerate = cJSON_GetObjectItem(format,"frame rate")->valueint; Want to change the framerate? cJSON_GetObjectItem(format,"frame rate")->valueint=25; Back to disk? char *rendered=cJSON_Print(root); Finished? Delete the root (this takes care of everything else). cJSON_Delete(root); That's AUTO mode. If you're going to use Auto mode, you really ought to check pointers before you dereference them. If you want to see how you'd build this struct in code? cJSON *root,*fmt; root=cJSON_CreateObject(); cJSON_AddItemToObject(root, "name", cJSON_CreateString("Jack (\"Bee\") Nimble")); cJSON_AddItemToObject(root, "format", fmt=cJSON_CreateObject()); cJSON_AddStringToObject(fmt,"type", "rect"); cJSON_AddNumberToObject(fmt,"width", 1920); cJSON_AddNumberToObject(fmt,"height", 1080); cJSON_AddFalseToObject (fmt,"interlace"); cJSON_AddNumberToObject(fmt,"frame rate", 24); Hopefully we can agree that's not a lot of code? There's no overhead, no unnecessary setup. Look at test.c for a bunch of nice examples, mostly all ripped off the json.org site, and a few from elsewhere. What about manual mode? First up you need some detail. Let's cover how the cJSON objects represent the JSON data. cJSON doesn't distinguish arrays from objects in handling; just type. Each cJSON has, potentially, a child, siblings, value, a name. The root object has: Object Type and a Child The Child has name "name", with value "Jack ("Bee") Nimble", and a sibling: Sibling has type Object, name "format", and a child. That child has type String, name "type", value "rect", and a sibling: Sibling has type Number, name "width", value 1920, and a sibling: Sibling has type Number, name "height", value 1080, and a sibling: Sibling hs type False, name "interlace", and a sibling: Sibling has type Number, name "frame rate", value 24 Here's the structure: typedef struct cJSON { struct cJSON *next,*prev; struct cJSON *child; int type; char *valuestring; int valueint; double valuedouble; char *string; } cJSON; By default all values are 0 unless set by virtue of being meaningful. next/prev is a doubly linked list of siblings. next takes you to your sibling, prev takes you back from your sibling to you. Only objects and arrays have a "child", and it's the head of the doubly linked list. A "child" entry will have prev==0, but next potentially points on. The last sibling has next=0. The type expresses Null/True/False/Number/String/Array/Object, all of which are #defined in cJSON.h A Number has valueint and valuedouble. If you're expecting an int, read valueint, if not read valuedouble. Any entry which is in the linked list which is the child of an object will have a "string" which is the "name" of the entry. When I said "name" in the above example, that's "string". "string" is the JSON name for the 'variable name' if you will. Now you can trivially walk the lists, recursively, and parse as you please. You can invoke cJSON_Parse to get cJSON to parse for you, and then you can take the root object, and traverse the structure (which is, formally, an N-tree), and tokenise as you please. If you wanted to build a callback style parser, this is how you'd do it (just an example, since these things are very specific): void parse_and_callback(cJSON *item,const char *prefix) { while (item) { char *newprefix=malloc(strlen(prefix)+strlen(item->name)+2); sprintf(newprefix,"%s/%s",prefix,item->name); int dorecurse=callback(newprefix, item->type, item); if (item->child && dorecurse) parse_and_callback(item->child,newprefix); item=item->next; free(newprefix); } } The prefix process will build you a separated list, to simplify your callback handling. The 'dorecurse' flag would let the callback decide to handle sub-arrays on it's own, or let you invoke it per-item. For the item above, your callback might look like this: int callback(const char *name,int type,cJSON *item) { if (!strcmp(name,"name")) { /* populate name */ } else if (!strcmp(name,"format/type") { /* handle "rect" */ } else if (!strcmp(name,"format/width") { /* 800 */ } else if (!strcmp(name,"format/height") { /* 600 */ } else if (!strcmp(name,"format/interlace") { /* false */ } else if (!strcmp(name,"format/frame rate") { /* 24 */ } return 1; } Alternatively, you might like to parse iteratively. You'd use: void parse_object(cJSON *item) { int i; for (i=0;ichild; while (subitem) { // handle subitem if (subitem->child) parse_object(subitem->child); subitem=subitem->next; } } Of course, this should look familiar, since this is just a stripped-down version of the callback-parser. This should cover most uses you'll find for parsing. The rest should be possible to infer.. and if in doubt, read the source! There's not a lot of it! ;) In terms of constructing JSON data, the example code above is the right way to do it. You can, of course, hand your sub-objects to other functions to populate. Also, if you find a use for it, you can manually build the objects. For instance, suppose you wanted to build an array of objects? cJSON *objects[24]; cJSON *Create_array_of_anything(cJSON **items,int num) { int i;cJSON *prev, *root=cJSON_CreateArray(); for (i=0;i<24;i++) { if (!i) root->child=objects[i]; else prev->next=objects[i], objects[i]->prev=prev; prev=objects[i]; } return root; } and simply: Create_array_of_anything(objects,24); cJSON doesn't make any assumptions about what order you create things in. You can attach the objects, as above, and later add children to each of those objects. As soon as you call cJSON_Print, it renders the structure to text. The test.c code shows how to handle a bunch of typical cases. If you uncomment the code, it'll load, parse and print a bunch of test files, also from json.org, which are more complex than I'd care to try and stash into a const char array[]. Enjoy cJSON! - Dave Gamble, Aug 2009 nordugrid-arc-6.14.0/src/external/PaxHeaders.30264/README0000644000000000000000000000013214152153376020635 xustar000000000000000030 mtime=1638455038.344645058 30 atime=1638455038.475647026 30 ctime=1638455095.777508013 nordugrid-arc-6.14.0/src/external/README0000644000175000002070000000014014152153376020615 0ustar00mockbuildmock00000000000000External libraries small enough to include source rather than depending on an external package. nordugrid-arc-6.14.0/src/PaxHeaders.30264/clients0000644000000000000000000000013214152153475017517 xustar000000000000000030 mtime=1638455101.053587288 30 atime=1638455103.999631554 30 ctime=1638455101.053587288 nordugrid-arc-6.14.0/src/clients/0000755000175000002070000000000014152153475017561 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/clients/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376021630 xustar000000000000000030 mtime=1638455038.336644937 30 atime=1638455038.472646981 30 ctime=1638455100.989586327 nordugrid-arc-6.14.0/src/clients/Makefile.am0000644000175000002070000000137414152153376021622 0ustar00mockbuildmock00000000000000if DATA_CLIENT_ENABLED DATA_CLIENT = data else DATA_CLIENT = endif if CREDENTIALS_CLIENT_ENABLED CREDENTIALS_CLIENT = credentials else CREDENTIALS_CLIENT = endif if COMPUTE_CLIENT_ENABLED COMPUTE_CLIENT = compute else COMPUTE_CLIENT = endif SUBDIRS = $(DATA_CLIENT) $(CREDENTIALS_CLIENT) $(COMPUTE_CLIENT) DIST_SUBDIRS = data credentials compute arcsysconfdir = $(sysconfdir)/arc arcsysconf_DATA = client.conf exampledir = $(pkgdatadir)/examples example_DATA = client.conf BASH_COMPLETION_SOURCE = client.bash_completion arc-client-tools: $(BASH_COMPLETION_SOURCE) cp $< $@ bashcompletiondir = $(bashcompdir) bashcompletion_DATA = arc-client-tools EXTRA_DIST = $(example_DATA) $(arcsysconf_DATA) $(BASH_COMPLETION_SOURCE) CLEANFILES = arc-client-tools nordugrid-arc-6.14.0/src/clients/PaxHeaders.30264/data0000644000000000000000000000013214152153475020430 xustar000000000000000030 mtime=1638455101.023586838 30 atime=1638455103.999631554 30 ctime=1638455101.023586838 nordugrid-arc-6.14.0/src/clients/data/0000755000175000002070000000000014152153475020472 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/clients/data/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376022541 xustar000000000000000030 mtime=1638455038.340644998 30 atime=1638455038.474647011 30 ctime=1638455101.014586702 nordugrid-arc-6.14.0/src/clients/data/Makefile.am0000644000175000002070000000250214152153376022525 0ustar00mockbuildmock00000000000000bin_PROGRAMS = arccp arcls arcrm arcmkdir arcrename man_MANS = arccp.1 arcls.1 arcrm.1 arcmkdir.1 arcrename.1 CLILIBS = \ $(top_builddir)/src/hed/libs/compute/libarccompute.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la arccp_SOURCES = arccp.cpp arccp_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) arccp_LDADD = $(CLILIBS) $(GLIBMM_LIBS) arcls_SOURCES = arcls.cpp arcls_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) arcls_LDADD = $(CLILIBS) $(GLIBMM_LIBS) arcrm_SOURCES = arcrm.cpp arcrm_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) arcrm_LDADD = $(CLILIBS) $(GLIBMM_LIBS) arcmkdir_SOURCES = arcmkdir.cpp arcmkdir_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) arcmkdir_LDADD = $(CLILIBS) $(GLIBMM_LIBS) arcrename_SOURCES = arcrename.cpp arcrename_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) arcrename_LDADD = $(CLILIBS) $(GLIBMM_LIBS) nordugrid-arc-6.14.0/src/clients/data/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153421022541 xustar000000000000000030 mtime=1638455057.988940222 30 atime=1638455091.195439165 30 ctime=1638455101.013586687 nordugrid-arc-6.14.0/src/clients/data/Makefile.in0000644000175000002070000012374114152153421022536 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ bin_PROGRAMS = arccp$(EXEEXT) arcls$(EXEEXT) arcrm$(EXEEXT) \ arcmkdir$(EXEEXT) arcrename$(EXEEXT) subdir = src/clients/data DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(srcdir)/arccp.1.in $(srcdir)/arcls.1.in $(srcdir)/arcrm.1.in \ $(srcdir)/arcmkdir.1.in $(srcdir)/arcrename.1.in \ $(top_srcdir)/depcomp README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = arccp.1 arcls.1 arcrm.1 arcmkdir.1 arcrename.1 CONFIG_CLEAN_VPATH_FILES = am__installdirs = "$(DESTDIR)$(bindir)" "$(DESTDIR)$(man1dir)" PROGRAMS = $(bin_PROGRAMS) am_arccp_OBJECTS = arccp-arccp.$(OBJEXT) arccp_OBJECTS = $(am_arccp_OBJECTS) am__DEPENDENCIES_1 = arccp_DEPENDENCIES = $(CLILIBS) $(am__DEPENDENCIES_1) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = arccp_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(arccp_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ am_arcls_OBJECTS = arcls-arcls.$(OBJEXT) arcls_OBJECTS = $(am_arcls_OBJECTS) arcls_DEPENDENCIES = $(CLILIBS) $(am__DEPENDENCIES_1) arcls_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(arcls_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ am_arcmkdir_OBJECTS = arcmkdir-arcmkdir.$(OBJEXT) arcmkdir_OBJECTS = $(am_arcmkdir_OBJECTS) arcmkdir_DEPENDENCIES = $(CLILIBS) $(am__DEPENDENCIES_1) arcmkdir_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(arcmkdir_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ am_arcrename_OBJECTS = arcrename-arcrename.$(OBJEXT) arcrename_OBJECTS = $(am_arcrename_OBJECTS) arcrename_DEPENDENCIES = $(CLILIBS) $(am__DEPENDENCIES_1) arcrename_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(arcrename_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ am_arcrm_OBJECTS = arcrm-arcrm.$(OBJEXT) arcrm_OBJECTS = $(am_arcrm_OBJECTS) arcrm_DEPENDENCIES = $(CLILIBS) $(am__DEPENDENCIES_1) arcrm_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(arcrm_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = SOURCES = $(arccp_SOURCES) $(arcls_SOURCES) $(arcmkdir_SOURCES) \ $(arcrename_SOURCES) $(arcrm_SOURCES) DIST_SOURCES = $(arccp_SOURCES) $(arcls_SOURCES) $(arcmkdir_SOURCES) \ $(arcrename_SOURCES) $(arcrm_SOURCES) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } man1dir = $(mandir)/man1 NROFF = nroff MANS = $(man_MANS) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ man_MANS = arccp.1 arcls.1 arcrm.1 arcmkdir.1 arcrename.1 CLILIBS = \ $(top_builddir)/src/hed/libs/compute/libarccompute.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la arccp_SOURCES = arccp.cpp arccp_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) arccp_LDADD = $(CLILIBS) $(GLIBMM_LIBS) arcls_SOURCES = arcls.cpp arcls_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) arcls_LDADD = $(CLILIBS) $(GLIBMM_LIBS) arcrm_SOURCES = arcrm.cpp arcrm_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) arcrm_LDADD = $(CLILIBS) $(GLIBMM_LIBS) arcmkdir_SOURCES = arcmkdir.cpp arcmkdir_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) arcmkdir_LDADD = $(CLILIBS) $(GLIBMM_LIBS) arcrename_SOURCES = arcrename.cpp arcrename_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) arcrename_LDADD = $(CLILIBS) $(GLIBMM_LIBS) all: all-am .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/clients/data/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/clients/data/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): arccp.1: $(top_builddir)/config.status $(srcdir)/arccp.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arcls.1: $(top_builddir)/config.status $(srcdir)/arcls.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arcrm.1: $(top_builddir)/config.status $(srcdir)/arcrm.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arcmkdir.1: $(top_builddir)/config.status $(srcdir)/arcmkdir.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arcrename.1: $(top_builddir)/config.status $(srcdir)/arcrename.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ install-binPROGRAMS: $(bin_PROGRAMS) @$(NORMAL_INSTALL) @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(bindir)'"; \ $(MKDIR_P) "$(DESTDIR)$(bindir)" || exit 1; \ fi; \ for p in $$list; do echo "$$p $$p"; done | \ sed 's/$(EXEEXT)$$//' | \ while read p p1; do if test -f $$p \ || test -f $$p1 \ ; then echo "$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n;h' \ -e 's|.*|.|' \ -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \ sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) files[d] = files[d] " " $$1; \ else { print "f", $$3 "/" $$4, $$1; } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(bindir)$$dir'"; \ $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(bindir)$$dir" || exit $$?; \ } \ ; done uninstall-binPROGRAMS: @$(NORMAL_UNINSTALL) @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \ -e 's/$$/$(EXEEXT)/' \ `; \ test -n "$$list" || exit 0; \ echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \ cd "$(DESTDIR)$(bindir)" && rm -f $$files clean-binPROGRAMS: @list='$(bin_PROGRAMS)'; test -n "$$list" || exit 0; \ echo " rm -f" $$list; \ rm -f $$list || exit $$?; \ test -n "$(EXEEXT)" || exit 0; \ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ echo " rm -f" $$list; \ rm -f $$list arccp$(EXEEXT): $(arccp_OBJECTS) $(arccp_DEPENDENCIES) $(EXTRA_arccp_DEPENDENCIES) @rm -f arccp$(EXEEXT) $(AM_V_CXXLD)$(arccp_LINK) $(arccp_OBJECTS) $(arccp_LDADD) $(LIBS) arcls$(EXEEXT): $(arcls_OBJECTS) $(arcls_DEPENDENCIES) $(EXTRA_arcls_DEPENDENCIES) @rm -f arcls$(EXEEXT) $(AM_V_CXXLD)$(arcls_LINK) $(arcls_OBJECTS) $(arcls_LDADD) $(LIBS) arcmkdir$(EXEEXT): $(arcmkdir_OBJECTS) $(arcmkdir_DEPENDENCIES) $(EXTRA_arcmkdir_DEPENDENCIES) @rm -f arcmkdir$(EXEEXT) $(AM_V_CXXLD)$(arcmkdir_LINK) $(arcmkdir_OBJECTS) $(arcmkdir_LDADD) $(LIBS) arcrename$(EXEEXT): $(arcrename_OBJECTS) $(arcrename_DEPENDENCIES) $(EXTRA_arcrename_DEPENDENCIES) @rm -f arcrename$(EXEEXT) $(AM_V_CXXLD)$(arcrename_LINK) $(arcrename_OBJECTS) $(arcrename_LDADD) $(LIBS) arcrm$(EXEEXT): $(arcrm_OBJECTS) $(arcrm_DEPENDENCIES) $(EXTRA_arcrm_DEPENDENCIES) @rm -f arcrm$(EXEEXT) $(AM_V_CXXLD)$(arcrm_LINK) $(arcrm_OBJECTS) $(arcrm_LDADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arccp-arccp.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcls-arcls.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcmkdir-arcmkdir.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcrename-arcrename.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcrm-arcrm.Po@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< arccp-arccp.o: arccp.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arccp_CXXFLAGS) $(CXXFLAGS) -MT arccp-arccp.o -MD -MP -MF $(DEPDIR)/arccp-arccp.Tpo -c -o arccp-arccp.o `test -f 'arccp.cpp' || echo '$(srcdir)/'`arccp.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arccp-arccp.Tpo $(DEPDIR)/arccp-arccp.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arccp.cpp' object='arccp-arccp.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arccp_CXXFLAGS) $(CXXFLAGS) -c -o arccp-arccp.o `test -f 'arccp.cpp' || echo '$(srcdir)/'`arccp.cpp arccp-arccp.obj: arccp.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arccp_CXXFLAGS) $(CXXFLAGS) -MT arccp-arccp.obj -MD -MP -MF $(DEPDIR)/arccp-arccp.Tpo -c -o arccp-arccp.obj `if test -f 'arccp.cpp'; then $(CYGPATH_W) 'arccp.cpp'; else $(CYGPATH_W) '$(srcdir)/arccp.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arccp-arccp.Tpo $(DEPDIR)/arccp-arccp.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arccp.cpp' object='arccp-arccp.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arccp_CXXFLAGS) $(CXXFLAGS) -c -o arccp-arccp.obj `if test -f 'arccp.cpp'; then $(CYGPATH_W) 'arccp.cpp'; else $(CYGPATH_W) '$(srcdir)/arccp.cpp'; fi` arcls-arcls.o: arcls.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcls_CXXFLAGS) $(CXXFLAGS) -MT arcls-arcls.o -MD -MP -MF $(DEPDIR)/arcls-arcls.Tpo -c -o arcls-arcls.o `test -f 'arcls.cpp' || echo '$(srcdir)/'`arcls.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcls-arcls.Tpo $(DEPDIR)/arcls-arcls.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcls.cpp' object='arcls-arcls.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcls_CXXFLAGS) $(CXXFLAGS) -c -o arcls-arcls.o `test -f 'arcls.cpp' || echo '$(srcdir)/'`arcls.cpp arcls-arcls.obj: arcls.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcls_CXXFLAGS) $(CXXFLAGS) -MT arcls-arcls.obj -MD -MP -MF $(DEPDIR)/arcls-arcls.Tpo -c -o arcls-arcls.obj `if test -f 'arcls.cpp'; then $(CYGPATH_W) 'arcls.cpp'; else $(CYGPATH_W) '$(srcdir)/arcls.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcls-arcls.Tpo $(DEPDIR)/arcls-arcls.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcls.cpp' object='arcls-arcls.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcls_CXXFLAGS) $(CXXFLAGS) -c -o arcls-arcls.obj `if test -f 'arcls.cpp'; then $(CYGPATH_W) 'arcls.cpp'; else $(CYGPATH_W) '$(srcdir)/arcls.cpp'; fi` arcmkdir-arcmkdir.o: arcmkdir.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcmkdir_CXXFLAGS) $(CXXFLAGS) -MT arcmkdir-arcmkdir.o -MD -MP -MF $(DEPDIR)/arcmkdir-arcmkdir.Tpo -c -o arcmkdir-arcmkdir.o `test -f 'arcmkdir.cpp' || echo '$(srcdir)/'`arcmkdir.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcmkdir-arcmkdir.Tpo $(DEPDIR)/arcmkdir-arcmkdir.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcmkdir.cpp' object='arcmkdir-arcmkdir.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcmkdir_CXXFLAGS) $(CXXFLAGS) -c -o arcmkdir-arcmkdir.o `test -f 'arcmkdir.cpp' || echo '$(srcdir)/'`arcmkdir.cpp arcmkdir-arcmkdir.obj: arcmkdir.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcmkdir_CXXFLAGS) $(CXXFLAGS) -MT arcmkdir-arcmkdir.obj -MD -MP -MF $(DEPDIR)/arcmkdir-arcmkdir.Tpo -c -o arcmkdir-arcmkdir.obj `if test -f 'arcmkdir.cpp'; then $(CYGPATH_W) 'arcmkdir.cpp'; else $(CYGPATH_W) '$(srcdir)/arcmkdir.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcmkdir-arcmkdir.Tpo $(DEPDIR)/arcmkdir-arcmkdir.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcmkdir.cpp' object='arcmkdir-arcmkdir.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcmkdir_CXXFLAGS) $(CXXFLAGS) -c -o arcmkdir-arcmkdir.obj `if test -f 'arcmkdir.cpp'; then $(CYGPATH_W) 'arcmkdir.cpp'; else $(CYGPATH_W) '$(srcdir)/arcmkdir.cpp'; fi` arcrename-arcrename.o: arcrename.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcrename_CXXFLAGS) $(CXXFLAGS) -MT arcrename-arcrename.o -MD -MP -MF $(DEPDIR)/arcrename-arcrename.Tpo -c -o arcrename-arcrename.o `test -f 'arcrename.cpp' || echo '$(srcdir)/'`arcrename.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcrename-arcrename.Tpo $(DEPDIR)/arcrename-arcrename.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcrename.cpp' object='arcrename-arcrename.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcrename_CXXFLAGS) $(CXXFLAGS) -c -o arcrename-arcrename.o `test -f 'arcrename.cpp' || echo '$(srcdir)/'`arcrename.cpp arcrename-arcrename.obj: arcrename.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcrename_CXXFLAGS) $(CXXFLAGS) -MT arcrename-arcrename.obj -MD -MP -MF $(DEPDIR)/arcrename-arcrename.Tpo -c -o arcrename-arcrename.obj `if test -f 'arcrename.cpp'; then $(CYGPATH_W) 'arcrename.cpp'; else $(CYGPATH_W) '$(srcdir)/arcrename.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcrename-arcrename.Tpo $(DEPDIR)/arcrename-arcrename.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcrename.cpp' object='arcrename-arcrename.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcrename_CXXFLAGS) $(CXXFLAGS) -c -o arcrename-arcrename.obj `if test -f 'arcrename.cpp'; then $(CYGPATH_W) 'arcrename.cpp'; else $(CYGPATH_W) '$(srcdir)/arcrename.cpp'; fi` arcrm-arcrm.o: arcrm.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcrm_CXXFLAGS) $(CXXFLAGS) -MT arcrm-arcrm.o -MD -MP -MF $(DEPDIR)/arcrm-arcrm.Tpo -c -o arcrm-arcrm.o `test -f 'arcrm.cpp' || echo '$(srcdir)/'`arcrm.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcrm-arcrm.Tpo $(DEPDIR)/arcrm-arcrm.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcrm.cpp' object='arcrm-arcrm.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcrm_CXXFLAGS) $(CXXFLAGS) -c -o arcrm-arcrm.o `test -f 'arcrm.cpp' || echo '$(srcdir)/'`arcrm.cpp arcrm-arcrm.obj: arcrm.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcrm_CXXFLAGS) $(CXXFLAGS) -MT arcrm-arcrm.obj -MD -MP -MF $(DEPDIR)/arcrm-arcrm.Tpo -c -o arcrm-arcrm.obj `if test -f 'arcrm.cpp'; then $(CYGPATH_W) 'arcrm.cpp'; else $(CYGPATH_W) '$(srcdir)/arcrm.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcrm-arcrm.Tpo $(DEPDIR)/arcrm-arcrm.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcrm.cpp' object='arcrm-arcrm.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcrm_CXXFLAGS) $(CXXFLAGS) -c -o arcrm-arcrm.obj `if test -f 'arcrm.cpp'; then $(CYGPATH_W) 'arcrm.cpp'; else $(CYGPATH_W) '$(srcdir)/arcrm.cpp'; fi` mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-man1: $(man_MANS) @$(NORMAL_INSTALL) @list1=''; \ list2='$(man_MANS)'; \ test -n "$(man1dir)" \ && test -n "`echo $$list1$$list2`" \ || exit 0; \ echo " $(MKDIR_P) '$(DESTDIR)$(man1dir)'"; \ $(MKDIR_P) "$(DESTDIR)$(man1dir)" || exit 1; \ { for i in $$list1; do echo "$$i"; done; \ if test -n "$$list2"; then \ for i in $$list2; do echo "$$i"; done \ | sed -n '/\.1[a-z]*$$/p'; \ fi; \ } | while read p; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; echo "$$p"; \ done | \ sed -e 'n;s,.*/,,;p;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,' | \ sed 'N;N;s,\n, ,g' | { \ list=; while read file base inst; do \ if test "$$base" = "$$inst"; then list="$$list $$file"; else \ echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man1dir)/$$inst'"; \ $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man1dir)/$$inst" || exit $$?; \ fi; \ done; \ for i in $$list; do echo "$$i"; done | $(am__base_list) | \ while read files; do \ test -z "$$files" || { \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man1dir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(man1dir)" || exit $$?; }; \ done; } uninstall-man1: @$(NORMAL_UNINSTALL) @list=''; test -n "$(man1dir)" || exit 0; \ files=`{ for i in $$list; do echo "$$i"; done; \ l2='$(man_MANS)'; for i in $$l2; do echo "$$i"; done | \ sed -n '/\.1[a-z]*$$/p'; \ } | sed -e 's,.*/,,;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,'`; \ dir='$(DESTDIR)$(man1dir)'; $(am__uninstall_files_from_dir) ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(PROGRAMS) $(MANS) installdirs: for dir in "$(DESTDIR)$(bindir)" "$(DESTDIR)$(man1dir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-binPROGRAMS clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-man install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-binPROGRAMS install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-man1 install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-binPROGRAMS uninstall-man uninstall-man: uninstall-man1 .MAKE: install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-am clean \ clean-binPROGRAMS clean-generic clean-libtool cscopelist-am \ ctags ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-binPROGRAMS \ install-data install-data-am install-dvi install-dvi-am \ install-exec install-exec-am install-html install-html-am \ install-info install-info-am install-man install-man1 \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-compile mostlyclean-generic mostlyclean-libtool \ pdf pdf-am ps ps-am tags tags-am uninstall uninstall-am \ uninstall-binPROGRAMS uninstall-man uninstall-man1 # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/clients/data/PaxHeaders.30264/arcmkdir.cpp0000644000000000000000000000013214152153376023005 xustar000000000000000030 mtime=1638455038.340644998 30 atime=1638455038.474647011 30 ctime=1638455101.022586823 nordugrid-arc-6.14.0/src/clients/data/arcmkdir.cpp0000644000175000002070000001341414152153376022775 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include static Arc::Logger logger(Arc::Logger::getRootLogger(), "arcmkdir"); bool arcmkdir(const Arc::URL& file_url, Arc::UserConfig& usercfg, bool with_parents) { if (!file_url) { logger.msg(Arc::ERROR, "Invalid URL: %s", file_url.str()); return false; } if (file_url.Protocol() == "urllist") { std::list files = Arc::ReadURLList(file_url); if (files.empty()) { logger.msg(Arc::ERROR, "Can't read list of locations from file %s", file_url.Path()); return false; } bool r = true; for (std::list::iterator file = files.begin(); file != files.end(); ++file) { if (!arcmkdir(*file, usercfg, with_parents)) r = false; } return r; } Arc::DataHandle url(file_url, usercfg); if (!url) { logger.msg(Arc::ERROR, "Unsupported URL given"); return false; } if (url->RequiresCredentials()) { if (!usercfg.InitializeCredentials(Arc::initializeCredentialsType::RequireCredentials)) { logger.msg(Arc::ERROR, "Unable to create directory %s", file_url.str()); logger.msg(Arc::ERROR, "Invalid credentials, please check proxy and/or CA certificates"); return false; } Arc::Credential holder(usercfg); if (!holder.IsValid()) { if (holder.GetEndTime() < Arc::Time()) { logger.msg(Arc::ERROR, "Proxy expired"); } logger.msg(Arc::ERROR, "Unable to create directory %s", file_url.str()); logger.msg(Arc::ERROR, "Invalid credentials, please check proxy and/or CA certificates"); return false; } } url->SetSecure(false); Arc::DataStatus res = url->CreateDirectory(with_parents); if (!res.Passed()) { logger.msg(Arc::ERROR, std::string(res)); if (res.Retryable()) logger.msg(Arc::ERROR, "This seems like a temporary error, please try again later"); return false; } return true; } static int runmain(int argc, char **argv) { setlocale(LC_ALL, ""); static Arc::LogStream logcerr(std::cerr); logcerr.setFormat(Arc::ShortFormat); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::WARNING); Arc::ArcLocation::Init(argv[0]); Arc::OptionParser options(istring("url"), istring("The arcmkdir command creates directories " "on grid storage elements and catalogs.")); bool with_parents = false; options.AddOption('p', "parents", istring("make parent directories as needed"), with_parents); bool show_plugins = false; options.AddOption('P', "listplugins", istring("list the available plugins (protocols supported)"), show_plugins); int timeout = 20; options.AddOption('t', "timeout", istring("timeout in seconds (default 20)"), istring("seconds"), timeout); std::string conffile; options.AddOption('z', "conffile", istring("configuration file (default ~/.arc/client.conf)"), istring("filename"), conffile); std::string debug; options.AddOption('d', "debug", istring("FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG"), istring("debuglevel"), debug); bool version = false; options.AddOption('v', "version", istring("print version information"), version); std::list params = options.Parse(argc, argv); if (version) { std::cout << Arc::IString("%s version %s", "arcrm", VERSION) << std::endl; return 0; } // If debug is specified as argument, it should be set before loading the configuration. if (!debug.empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(debug)); logger.msg(Arc::VERBOSE, "Running command: %s", options.GetCommandWithArguments()); if (show_plugins) { std::list modules; Arc::PluginsFactory pf(Arc::BaseConfig().MakeConfig(Arc::Config()).Parent()); pf.scan(Arc::FinderLoader::GetLibrariesList(), modules); Arc::PluginsFactory::FilterByKind("HED:DMC", modules); std::cout << Arc::IString("Protocol plugins available:") << std::endl; for (std::list::iterator itMod = modules.begin(); itMod != modules.end(); ++itMod) { for (std::list::iterator itPlug = itMod->plugins.begin(); itPlug != itMod->plugins.end(); ++itPlug) { std::cout << " " << itPlug->name << " - " << itPlug->description << std::endl; } } return 0; } // credentials will be initialised later if necessary Arc::UserConfig usercfg(conffile, Arc::initializeCredentialsType::TryCredentials); if (!usercfg) { logger.msg(Arc::ERROR, "Failed configuration initialization"); return 1; } usercfg.UtilsDirPath(Arc::UserConfig::ARCUSERDIRECTORY()); usercfg.Timeout(timeout); if (debug.empty() && !usercfg.Verbosity().empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(usercfg.Verbosity())); if (params.size() != 1) { logger.msg(Arc::ERROR, "Wrong number of parameters specified"); return 1; } // add a slash to the end if not present std::string url = params.front(); if (url[url.length()-1] != '/') url += '/'; if (!arcmkdir(url, usercfg, with_parents)) return 1; return 0; } int main(int argc, char **argv) { int xr = runmain(argc,argv); _exit(xr); return 0; } nordugrid-arc-6.14.0/src/clients/data/PaxHeaders.30264/arcrename.cpp0000644000000000000000000000013214152153376023146 xustar000000000000000030 mtime=1638455038.340644998 30 atime=1638455038.474647011 30 ctime=1638455101.022586823 nordugrid-arc-6.14.0/src/clients/data/arcrename.cpp0000644000175000002070000001407414152153376023141 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include static Arc::Logger logger(Arc::Logger::getRootLogger(), "arcrename"); bool arcrename(const Arc::URL& old_url, const Arc::URL& new_url, Arc::UserConfig& usercfg, int timeout) { if (!old_url) { logger.msg(Arc::ERROR, "Invalid URL: %s", old_url.str()); return false; } if (!new_url) { logger.msg(Arc::ERROR, "Invalid URL: %s", new_url.str()); return false; } // Check URLs if (old_url.Protocol() != new_url.Protocol() || old_url.Host() != new_url.Host() || old_url.Port() != new_url.Port()) { logger.msg(Arc::ERROR, "Both URLs must have the same protocol, host and port"); return false; } std::string old_path(old_url.Path()); std::string new_path(new_url.Path()); Arc::CanonicalDir(old_path, true); Arc::CanonicalDir(new_path, true); // LFC URLs can be specified by guid metadata option if ((old_path.find_first_not_of('/') == std::string::npos && old_url.MetaDataOptions().empty()) || new_path.find_first_not_of('/') == std::string::npos) { logger.msg(Arc::ERROR, "Cannot rename to or from root directory"); return false; } if (old_path == new_path && old_url.FullPath() == new_url.FullPath()) { logger.msg(Arc::ERROR, "Cannot rename to the same URL"); return false; } Arc::DataHandle url(old_url, usercfg); if (!url) { logger.msg(Arc::ERROR, "Unsupported URL given"); return false; } if (url->RequiresCredentials()) { if (!usercfg.InitializeCredentials(Arc::initializeCredentialsType::RequireCredentials)) { logger.msg(Arc::ERROR, "Unable to rename %s", old_url.str()); logger.msg(Arc::ERROR, "Invalid credentials, please check proxy and/or CA certificates"); return false; } Arc::Credential holder(usercfg); if (!holder.IsValid()) { if (holder.GetEndTime() < Arc::Time()) { logger.msg(Arc::ERROR, "Proxy expired"); } logger.msg(Arc::ERROR, "Unable to rename %s", old_url.str()); logger.msg(Arc::ERROR, "Invalid credentials, please check proxy and/or CA certificates"); return false; } } // Insecure by default url->SetSecure(false); // Do the renaming Arc::DataStatus res = url->Rename(new_url); if (!res.Passed()) { logger.msg(Arc::ERROR, std::string(res)); if (res.Retryable()) logger.msg(Arc::ERROR, "This seems like a temporary error, please try again later"); return false; } return true; } static int runmain(int argc, char **argv) { setlocale(LC_ALL, ""); static Arc::LogStream logcerr(std::cerr); logcerr.setFormat(Arc::ShortFormat); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::WARNING); Arc::ArcLocation::Init(argv[0]); Arc::OptionParser options(istring("old_url new_url"), istring("The arcrename command renames files on " "grid storage elements.")); bool show_plugins = false; options.AddOption('P', "listplugins", istring("list the available plugins (protocols supported)"), show_plugins); int timeout = 20; options.AddOption('t', "timeout", istring("timeout in seconds (default 20)"), istring("seconds"), timeout); std::string conffile; options.AddOption('z', "conffile", istring("configuration file (default ~/.arc/client.conf)"), istring("filename"), conffile); std::string debug; options.AddOption('d', "debug", istring("FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG"), istring("debuglevel"), debug); bool version = false; options.AddOption('v', "version", istring("print version information"), version); std::list params = options.Parse(argc, argv); if (version) { std::cout << Arc::IString("%s version %s", "arcrename", VERSION) << std::endl; return 0; } // If debug is specified as argument, it should be set before loading the configuration. if (!debug.empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(debug)); logger.msg(Arc::VERBOSE, "Running command: %s", options.GetCommandWithArguments()); if (show_plugins) { std::list modules; Arc::PluginsFactory pf(Arc::BaseConfig().MakeConfig(Arc::Config()).Parent()); pf.scan(Arc::FinderLoader::GetLibrariesList(), modules); Arc::PluginsFactory::FilterByKind("HED:DMC", modules); std::cout << Arc::IString("Protocol plugins available:") << std::endl; for (std::list::iterator itMod = modules.begin(); itMod != modules.end(); ++itMod) { for (std::list::iterator itPlug = itMod->plugins.begin(); itPlug != itMod->plugins.end(); ++itPlug) { std::cout << " " << itPlug->name << " - " << itPlug->description << std::endl; } } return 0; } // credentials will be initialised later if necessary Arc::UserConfig usercfg(conffile, Arc::initializeCredentialsType::TryCredentials); if (!usercfg) { logger.msg(Arc::ERROR, "Failed configuration initialization"); return 1; } usercfg.UtilsDirPath(Arc::UserConfig::ARCUSERDIRECTORY()); if (debug.empty() && !usercfg.Verbosity().empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(usercfg.Verbosity())); if (params.size() != 2) { logger.msg(Arc::ERROR, "Wrong number of parameters specified"); return 1; } std::string oldurl(params.front()); std::string newurl(params.back()); if (!arcrename(oldurl, newurl, usercfg, timeout)) return 1; return 0; } int main(int argc, char **argv) { int xr = runmain(argc,argv); _exit(xr); return 0; } nordugrid-arc-6.14.0/src/clients/data/PaxHeaders.30264/arcls.cpp0000644000000000000000000000013214152153376022315 xustar000000000000000030 mtime=1638455038.340644998 30 atime=1638455038.474647011 30 ctime=1638455101.021586807 nordugrid-arc-6.14.0/src/clients/data/arcls.cpp0000644000175000002070000003426514152153376022314 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include static Arc::Logger logger(Arc::Logger::getRootLogger(), "arcls"); void print_urls(const Arc::FileInfo& file) { for (std::list::const_iterator u = file.GetURLs().begin(); u != file.GetURLs().end(); ++u) std::cout << "\t" << *u << std::endl; } void print_meta(const Arc::FileInfo& file) { std::map md = file.GetMetaData(); for (std::map::iterator mi = md.begin(); mi != md.end(); ++mi) std::cout<first<<":"<second<& files, bool show_urls, bool show_meta) { if (files.empty()) return; unsigned int namewidth = 0; unsigned int sizewidth = 0; unsigned int csumwidth = 0; // find longest length of each field to align the output for (std::list::const_iterator i = files.begin(); i != files.end(); ++i) { if (i->GetName().length() > namewidth) namewidth = i->GetName().length(); if (i->CheckSize() && i->GetSize() > 0 && // log(0) not good! (unsigned int)(log10(i->GetSize()))+1 > sizewidth) sizewidth = (unsigned int)(log10(i->GetSize()))+1; if (i->CheckCheckSum() && i->GetCheckSum().length() > csumwidth) csumwidth = i->GetCheckSum().length(); } std::cout << std::setw(namewidth) << std::left << " "; std::cout << " "; std::cout << std::setw(sizewidth + 4) << std::left << " "; std::cout << " "; std::cout << " "; std::cout << std::setw(csumwidth) << std::right << ""; std::cout << std::endl; // set minimum widths to accommodate headers if (namewidth < 7) namewidth = 7; if (sizewidth < 7) sizewidth = 7; if (csumwidth < 8) csumwidth = 8; for (std::list::const_iterator i = files.begin(); i != files.end(); ++i) { std::cout << std::setw(namewidth) << std::left << i->GetName(); switch (i->GetType()) { case Arc::FileInfo::file_type_file: std::cout << " file"; break; case Arc::FileInfo::file_type_dir: std::cout << " dir"; break; default: std::cout << " (n/a)"; break; } if (i->CheckSize()) { std::cout << " " << std::setw(sizewidth) << std::right << Arc::tostring(i->GetSize()); } else { std::cout << " " << std::setw(sizewidth) << std::right << " (n/a)"; } if (i->CheckModified()) { std::cout << " " << i->GetModified(); } else { std::cout << " (n/a) "; } if (i->CheckCheckSum()) { std::cout << " " << std::setw(csumwidth) << std::left << i->GetCheckSum(); } else { std::cout << " " << std::setw(csumwidth) << std::left << " (n/a)"; } if (i->CheckLatency()) { std::cout << " " << i->GetLatency(); } else { std::cout << " (n/a)"; } std::cout << std::endl; if (show_urls) print_urls(*i); if (show_meta) print_meta(*i); } } static bool arcls(const Arc::URL& dir_url, Arc::UserConfig& usercfg, bool show_details, // longlist bool show_urls, // locations bool show_meta, // metadata bool no_list, // don't list dirs bool force_list, // force dir list bool check_access, // checkaccess int recursion, // recursion int timeout) { // timeout if (!dir_url) { logger.msg(Arc::ERROR, "Invalid URL: %s", dir_url.fullstr()); return false; } if (dir_url.Protocol() == "urllist") { std::list dirs = Arc::ReadURLList(dir_url); if (dirs.empty()) { logger.msg(Arc::ERROR, "Can't read list of locations from file %s", dir_url.Path()); return false; } bool r = true; for (std::list::iterator dir = dirs.begin(); dir != dirs.end(); ++dir) { if(!arcls(*dir, usercfg, show_details, show_urls, show_meta, no_list, force_list, check_access, recursion, timeout)) r = false; } return r; } Arc::DataHandle url(dir_url, usercfg); if (!url) { logger.msg(Arc::ERROR, "Unsupported URL given"); return false; } if (url->RequiresCredentials()) { if (!usercfg.InitializeCredentials(Arc::initializeCredentialsType::RequireCredentials)) { logger.msg(Arc::ERROR, "Unable to list content of %s", dir_url.str()); logger.msg(Arc::ERROR, "Invalid credentials, please check proxy and/or CA certificates"); return false; } Arc::Credential holder(usercfg); if (!holder.IsValid()) { if (holder.GetEndTime() < Arc::Time()) { logger.msg(Arc::ERROR, "Proxy expired"); } logger.msg(Arc::ERROR, "Unable to list content of %s", dir_url.str()); logger.msg(Arc::ERROR, "Invalid credentials, please check proxy and/or CA certificates"); return false; } } url->SetSecure(false); if(check_access) { std::cout << dir_url << " - "; if(url->Check(false)) { std::cout << "passed" << std::endl; return true; } else { std::cout << "failed" << std::endl; return false; } } Arc::DataPoint::DataPointInfoType verb = (Arc::DataPoint::DataPointInfoType) (Arc::DataPoint::INFO_TYPE_MINIMAL | Arc::DataPoint::INFO_TYPE_NAME); if(show_urls) verb = (Arc::DataPoint::DataPointInfoType) (verb | Arc::DataPoint::INFO_TYPE_STRUCT); if(show_meta) verb = (Arc::DataPoint::DataPointInfoType) (verb | Arc::DataPoint::INFO_TYPE_ALL); if(show_details) verb = (Arc::DataPoint::DataPointInfoType) (verb | Arc::DataPoint::INFO_TYPE_TYPE | Arc::DataPoint::INFO_TYPE_TIMES | Arc::DataPoint::INFO_TYPE_CONTENT | Arc::DataPoint::INFO_TYPE_CKSUM | Arc::DataPoint::INFO_TYPE_ACCESS); if(recursion > 0) verb = (Arc::DataPoint::DataPointInfoType) (verb | Arc::DataPoint::INFO_TYPE_TYPE); Arc::DataStatus res; Arc::FileInfo file; std::list files; if(no_list) { // only requested object is queried res = url->Stat(file, verb); if(res) files.push_back(file); } else if(force_list) { // assume it is directory, fail otherwise res = url->List(files, verb); } else { // try to guess what to do res = url->Stat(file, (Arc::DataPoint::DataPointInfoType)(verb | Arc::DataPoint::INFO_TYPE_TYPE)); if(res && (file.GetType() == Arc::FileInfo::file_type_file)) { // If it is file and we are sure, then just report it. files.push_back(file); } else { // If it is dir then we must list it. But if stat failed or // if type is undefined there is still chance it is directory. Arc::DataStatus res_ = url->List(files, verb); if(!res_) { // If listing failed maybe simply report previous result if any. if(res) { files.push_back(file); } } else { res = res_; } } } if (!res) { if (files.empty()) { logger.msg(Arc::ERROR, std::string(res)); if (res.Retryable()) logger.msg(Arc::ERROR, "This seems like a temporary error, please try again later"); return false; } logger.msg(Arc::INFO, "Warning: " "Failed listing files but some information is obtained"); } files.sort(); // Sort alphabetically by name if (show_details) { print_details(files, show_urls, show_meta); } else { for (std::list::iterator i = files.begin(); i != files.end(); ++i) { std::cout << i->GetName() << std::endl; if (show_urls) print_urls(*i); if (show_meta) print_meta(*i); } } // Do recursion. Recursion has no sense if listing is forbidden. if ((recursion > 0) && (!no_list)) { for (std::list::iterator i = files.begin(); i != files.end(); ++i) { if (i->GetType() == Arc::FileInfo::file_type_dir) { Arc::URL suburl = dir_url; if(suburl.Protocol() != "file") { if (suburl.Path()[suburl.Path().length() - 1] != '/') suburl.ChangePath(suburl.Path() + "/" + i->GetName()); else suburl.ChangePath(suburl.Path() + i->GetName()); } else { if (suburl.Path()[suburl.Path().length() - 1] != G_DIR_SEPARATOR) suburl.ChangePath(suburl.Path() + G_DIR_SEPARATOR_S + i->GetName()); else suburl.ChangePath(suburl.Path() + i->GetName()); } std::cout << std::endl; std::cout << suburl.str() << ":" << std::endl; arcls(suburl, usercfg, show_details, show_urls, show_meta, no_list, force_list, check_access, recursion - 1, timeout); std::cout << std::endl; } } } return true; } static int runmain(int argc, char **argv) { setlocale(LC_ALL, ""); static Arc::LogStream logcerr(std::cerr); logcerr.setFormat(Arc::ShortFormat); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::WARNING); Arc::ArcLocation::Init(argv[0]); Arc::OptionParser options(istring("url"), istring("The arcls command is used for listing " "files in grid storage elements " "and file\nindex catalogues.")); bool longlist = false; options.AddOption('l', "long", istring("long format (more information)"), longlist); bool locations = false; options.AddOption('L', "locations", istring("show URLs of file locations"), locations); bool metadata = false; options.AddOption('m', "metadata", istring("display all available metadata"), metadata); bool infinite_recursion = false; options.AddOption('r', "recursive", istring("operate recursively"), infinite_recursion); int recursion = 0; options.AddOption('D', "depth", istring("operate recursively up to specified level"), istring("level"), recursion); bool nolist = false; options.AddOption('n', "nolist", istring("show only description of requested object, do not list content of directories"), nolist); bool forcelist = false; options.AddOption('f', "forcelist", istring("treat requested object as directory and always try to list content"), forcelist); bool checkaccess = false; options.AddOption('c', "checkaccess", istring("check readability of object, does not show any information about object"), checkaccess); bool show_plugins = false; options.AddOption('P', "listplugins", istring("list the available plugins (protocols supported)"), show_plugins); int timeout = 20; options.AddOption('t', "timeout", istring("timeout in seconds (default 20)"), istring("seconds"), timeout); std::string conffile; options.AddOption('z', "conffile", istring("configuration file (default ~/.arc/client.conf)"), istring("filename"), conffile); std::string debug; options.AddOption('d', "debug", istring("FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG"), istring("debuglevel"), debug); bool version = false; options.AddOption('v', "version", istring("print version information"), version); std::list params = options.Parse(argc, argv); if (version) { std::cout << Arc::IString("%s version %s", "arcls", VERSION) << std::endl; return 0; } // If debug is specified as argument, it should be set before loading the configuration. if (!debug.empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(debug)); logger.msg(Arc::VERBOSE, "Running command: %s", options.GetCommandWithArguments()); if (show_plugins) { std::list modules; Arc::PluginsFactory pf(Arc::BaseConfig().MakeConfig(Arc::Config()).Parent()); pf.scan(Arc::FinderLoader::GetLibrariesList(), modules); Arc::PluginsFactory::FilterByKind("HED:DMC", modules); std::cout << Arc::IString("Protocol plugins available:") << std::endl; for (std::list::iterator itMod = modules.begin(); itMod != modules.end(); ++itMod) { for (std::list::iterator itPlug = itMod->plugins.begin(); itPlug != itMod->plugins.end(); ++itPlug) { std::cout << " " << itPlug->name << " - " << itPlug->description << std::endl; } } return 0; } // credentials will be initialised later if necessary Arc::UserConfig usercfg(conffile, Arc::initializeCredentialsType::TryCredentials); if (!usercfg) { logger.msg(Arc::ERROR, "Failed configuration initialization"); return 1; } usercfg.UtilsDirPath(Arc::UserConfig::ARCUSERDIRECTORY()); if (debug.empty() && !usercfg.Verbosity().empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(usercfg.Verbosity())); // Analyze options if (params.size() != 1) { logger.msg(Arc::ERROR, "Wrong number of parameters specified"); return 1; } if(forcelist && nolist) { logger.msg(Arc::ERROR, "Incompatible options --nolist and --forcelist requested"); return 1; } if(recursion && nolist) { logger.msg(Arc::ERROR, "Requesting recursion and --nolist has no sense"); return 1; } if(infinite_recursion) recursion = INT_MAX; std::list::iterator it = params.begin(); if(!arcls(*it, usercfg, longlist, locations, metadata, nolist, forcelist, checkaccess, recursion, timeout)) return 1; return 0; } int main(int argc, char **argv) { int xr = runmain(argc,argv); _exit(xr); return 0; } nordugrid-arc-6.14.0/src/clients/data/PaxHeaders.30264/arcrm.1.in0000644000000000000000000000013214152153376022300 xustar000000000000000030 mtime=1638455038.340644998 30 atime=1638455038.474647011 30 ctime=1638455101.016586733 nordugrid-arc-6.14.0/src/clients/data/arcrm.1.in0000644000175000002070000000620014152153376022263 0ustar00mockbuildmock00000000000000.\" -*- nroff -*- .TH ARCRM 1 "@DATE@" "NorduGrid ARC @VERSION@" "NorduGrid Users Manual" .SH NAME arcrm \- delete files .SH DESCRIPTION The .B arcrm command deletes files on grid storage elements and indexing services. .SH SYNOPSIS .B arcrm [options] url [url ...] .SH OPTIONS .IP "\fB-f\fR, \fB--force\fR" remove logical file name registration even if not all physical instances were removed .IP "\fB-P\fR, \fB--listplugins\fR" list the available plugins (protocols supported) .IP "\fB-t\fR, \fB--timeout\fR=\fIseconds\fR" timeout in seconds (default 20) .IP "\fB-z\fR, \fB--conffile\fR=\fIfilename\fR" configuration file (default ~/.arc/client.conf) .IP "\fB-d\fR, \fB--debug\fR=\fIdebuglevel\fR" FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG .IP "\fB-v\fR, \fB--version\fR" print version information .IP "\fB-?\fR, \fB--help\fR" print help .SH ARGUMENTS .IP "\fBurl [url ...]\fR" the location(s) to delete .LP .SH EXTENDED DESCRIPTION The .B arcrm command deletes files on grid storage elements and indexing services. In the case of an indexing service url all physical instances of the file corresponding to the given locations are deleted and unregistered. If an indexing service url is given without locations, all physical instances and all meta-information about file are deleted. For more information on ARC URL syntax please read "Protocols, Uniform Resource Locators (URL) and Extensions Supported in ARC" [NORDUGRID-TECH-7] If .B url starts from '@', the remaining argument is assumed to be a path to a local file containing a list of URLs, one per line. In this case .B arcrm performs like it were called with all those URLs as arguments. Depending on the installed libraries (check with .B -P ), the following protocols may be used: file (file:// prefix may be omitted), http, https, httpg, ftp, gsiftp, srm, root. .SH FILES .TP .B ~/.arc/client.conf Some options can be given default values by specifying them in the ARC client configuration file. By using the .B --conffile option a different configuration file can be used than the default. .SH ENVIRONMENT VARIABLES .TP .B X509_USER_PROXY The location of the user's Grid proxy file. Shouldn't be set unless the proxy is in a non-standard location. .TP .B ARC_LOCATION The location where ARC is installed can be specified by this variable. If not specified the install location will be determined from the path to the command being executed, and if this fails a WARNING will be given stating the location which will be used. .TP .B ARC_PLUGIN_PATH The location of ARC plugins can be specified by this variable. Multiple locations can be specified by separating them by : (; in Windows). The default location is \fB$ARC_LOCATION\fR/lib/arc (\\ in Windows). .SH EXAMPLE arcrm gsiftp://example.com/grid/file.dat .SH NOTES Lack of recursion is a feature. .SH COPYRIGHT APACHE LICENSE Version 2.0 .SH AUTHOR ARC software is developed by the NorduGrid Collaboration (http://www.nordugrid.org), please consult the AUTHORS file distributed with ARC. Please report bugs and feature requests to http://bugzilla.nordugrid.org .SH SEE ALSO .BR arccp (1), .BR arcls (1), .BR arcmkdir(1), .BR arcrename (1), nordugrid-arc-6.14.0/src/clients/data/PaxHeaders.30264/arcrename.1.in0000644000000000000000000000013214152153376023131 xustar000000000000000030 mtime=1638455038.340644998 30 atime=1638455038.474647011 30 ctime=1638455101.018586762 nordugrid-arc-6.14.0/src/clients/data/arcrename.1.in0000644000175000002070000000621014152153376023115 0ustar00mockbuildmock00000000000000.\" -*- nroff -*- .TH ARCRENAME 1 "@DATE@" "NorduGrid ARC @VERSION@" "NorduGrid Users Manual" .SH NAME arcrename \- rename file or directory .SH DESCRIPTION The .B arcrename renames files or directories on grid storage elements and indexing services. .SH SYNOPSIS .B arcrename [options] oldurl newurl .SH OPTIONS .IP "\fB-P\fR, \fB--listplugins\fR" list the available plugins (protocols supported) .IP "\fB-t\fR, \fB--timeout\fR=\fIseconds\fR" timeout in seconds (default 20) .IP "\fB-z\fR, \fB--conffile\fR=\fIfilename\fR" configuration file (default ~/.arc/client.conf) .IP "\fB-d\fR, \fB--debug\fR=\fIdebuglevel\fR" FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG .IP "\fB-v\fR, \fB--version\fR" print version information .IP "\fB-?\fR, \fB--help\fR" print help .SH ARGUMENTS .IP "\fBoldurl\fR" current name of file or directory .IP "\fBnewurl\fR" new name for file or directory .LP .SH EXTENDED DESCRIPTION The .B arcrename command renames files or directories on grid storage elements and indexing services. The path component of .B oldurl and .B newurl must differ and it must be the only component of both URLs which is different. .B arcrename will exit with an error if the paths are equivalent or other components of the URLs are different. Renaming a URL to an existing URL will either fail or overwrite the existing URL, depending on the protocol. .B arcrename works purely at the namespace level and does not perform data transfer. For more information on ARC URL syntax please read "Protocols, Uniform Resource Locators (URL) and Extensions Supported in ARC" [NORDUGRID-TECH-7] Depending on the installed libraries (check with .B -P ), the following protocols may be used: file (file:// prefix may be omitted), http, https, httpg, ftp, gsiftp, srm, root. However renaming is not supported or implemented for some of those protocols. .SH FILES .TP .B ~/.arc/client.conf Some options can be given default values by specifying them in the ARC client configuration file. By using the .B --conffile option a different configuration file can be used than the default. .SH ENVIRONMENT VARIABLES .TP .B X509_USER_PROXY The location of the user's Grid proxy file. Shouldn't be set unless the proxy is in a non-standard location. .TP .B ARC_LOCATION The location where ARC is installed can be specified by this variable. If not specified the install location will be determined from the path to the command being executed, and if this fails a WARNING will be given stating the location which will be used. .TP .B ARC_PLUGIN_PATH The location of ARC plugins can be specified by this variable. Multiple locations can be specified by separating them by : (; in Windows). The default location is \fB$ARC_LOCATION\fR/lib/arc (\\ in Windows). .SH EXAMPLE arcrename gsiftp://example.com/grid/file.dat gsiftp://example.com/grid/new.file.dat .SH COPYRIGHT APACHE LICENSE Version 2.0 .SH AUTHOR ARC software is developed by the NorduGrid Collaboration (http://www.nordugrid.org), please consult the AUTHORS file distributed with ARC. Please report bugs and feature requests to http://bugzilla.nordugrid.org .SH SEE ALSO .BR arccp (1), .BR arcls (1), .BR arcmkdir(1), .BR arcrm(1), nordugrid-arc-6.14.0/src/clients/data/PaxHeaders.30264/arccp.cpp0000644000000000000000000000013214152153376022301 xustar000000000000000030 mtime=1638455038.340644998 30 atime=1638455038.474647011 30 ctime=1638455101.020586793 nordugrid-arc-6.14.0/src/clients/data/arccp.cpp0000644000175000002070000006351214152153376022275 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include static Arc::Logger logger(Arc::Logger::getRootLogger(), "arccp"); static Arc::SimpleCondition cond; static bool cancelled = false; static void sig_cancel(int) { if (cancelled) _exit(0); cancelled = true; cond.broadcast(); } static void progress(FILE *o, const char*, unsigned int, unsigned long long int all, unsigned long long int max, double, double) { static int rs = 0; const char rs_[4] = { '|', '/', '-', '\\' }; if (max) { fprintf(o, "\r|"); unsigned int l = (74 * all + 37) / max; if (l > 74) l = 74; unsigned int i = 0; for (; i < l; i++) fprintf(o, "="); fprintf(o, "%c", rs_[rs++]); if (rs > 3) rs = 0; for (; i < 74; i++) fprintf(o, " "); fprintf(o, "|\r"); fflush(o); return; } fprintf(o, "\r%llu kB \r", all / 1024); } static void transfer_cb(unsigned long long int bytes_transferred) { fprintf (stderr, "\r%llu kB \r", bytes_transferred / 1024); } static void mover_callback(Arc::DataMover* mover, Arc::DataStatus status, void* arg) { Arc::DataStatus* res = (Arc::DataStatus*)arg; *res = status; if (!res->Passed()) { logger.msg(Arc::ERROR, "Current transfer FAILED: %s", std::string(*res)); if (res->Retryable()) { logger.msg(Arc::ERROR, "This seems like a temporary error, please try again later"); } } cond.broadcast(); } static bool checkProxy(Arc::UserConfig& usercfg, const Arc::URL& src_file) { if (!usercfg.InitializeCredentials(Arc::initializeCredentialsType::RequireCredentials)) { logger.msg(Arc::ERROR, "Unable to copy %s", src_file.str()); logger.msg(Arc::ERROR, "Invalid credentials, please check proxy and/or CA certificates"); return false; } Arc::Credential holder(usercfg); if (!holder.IsValid()) { if (holder.GetEndTime() < Arc::Time()) { logger.msg(Arc::ERROR, "Proxy expired"); } logger.msg(Arc::ERROR, "Unable to copy %s", src_file.str()); logger.msg(Arc::ERROR, "Invalid credentials, please check proxy and/or CA certificates"); return false; } return true; } bool arctransfer(const Arc::URL& source_url, const Arc::URL& destination_url, const std::list& locations, Arc::UserConfig& usercfg, bool secure, bool passive, bool verbose, int timeout) { if (!source_url) { logger.msg(Arc::ERROR, "Invalid URL: %s", source_url.str()); return false; } if (!destination_url) { logger.msg(Arc::ERROR, "Invalid URL: %s", destination_url.str()); return false; } // Credentials are always required for 3rd party transfer if (!checkProxy(usercfg, source_url)) return false; if (timeout > 0) usercfg.Timeout(timeout); Arc::DataStatus res = Arc::DataPoint::Transfer3rdParty(source_url, destination_url, usercfg, verbose ? &transfer_cb : NULL); if (verbose) std::cerr< sources = Arc::ReadURLList(source_url); std::list destinations = Arc::ReadURLList(destination_url); if (sources.empty()) { logger.msg(Arc::ERROR, "Can't read list of sources from file %s", source_url.Path()); return false; } if (destinations.empty()) { logger.msg(Arc::ERROR, "Can't read list of destinations from file %s", destination_url.Path()); return false; } if (sources.size() != destinations.size()) { logger.msg(Arc::ERROR, "Numbers of sources and destinations do not match"); return false; } bool r = true; for (std::list::iterator source = sources.begin(), destination = destinations.begin(); (source != sources.end()) && (destination != destinations.end()); ++source, ++destination) { if (!arcregister(*source, *destination, usercfg, force_meta)) r = false; if (cancelled) return true; } return r; } if (source_url.Protocol() == "urllist") { std::list sources = Arc::ReadURLList(source_url); if (sources.empty()) { logger.msg(Arc::ERROR, "Can't read list of sources from file %s", source_url.Path()); return false; } bool r = true; for (std::list::iterator source = sources.begin(); source != sources.end(); ++source) { if (!arcregister(*source, destination_url, usercfg, force_meta)) r = false; if (cancelled) return true; } return r; } if (destination_url.Protocol() == "urllist") { std::list destinations = Arc::ReadURLList(destination_url); if (destinations.empty()) { logger.msg(Arc::ERROR, "Can't read list of destinations from file %s", destination_url.Path()); return false; } bool r = true; for (std::list::iterator destination = destinations.begin(); destination != destinations.end(); ++destination) { if (!arcregister(source_url, *destination, usercfg, force_meta)) r = false; if (cancelled) return true; } return r; } if (destination_url.Path()[destination_url.Path().length() - 1] == '/') { logger.msg(Arc::ERROR, "Fileset registration is not supported yet"); return false; } Arc::DataHandle source(source_url, usercfg); Arc::DataHandle destination(destination_url, usercfg); if (!source) { logger.msg(Arc::ERROR, "Unsupported source url: %s", source_url.str()); return false; } if (!destination) { logger.msg(Arc::ERROR, "Unsupported destination url: %s", destination_url.str()); return false; } if ((source->RequiresCredentials() || destination->RequiresCredentials()) && !checkProxy(usercfg, source_url)) return false; if (source->IsIndex() || !destination->IsIndex()) { logger.msg(Arc::ERROR, "For registration source must be ordinary URL" " and destination must be indexing service"); return false; } // Obtain meta-information about source Arc::FileInfo fileinfo; Arc::DataPoint::DataPointInfoType verb = (Arc::DataPoint::DataPointInfoType)Arc::DataPoint::INFO_TYPE_CONTENT; Arc::DataStatus res = source->Stat(fileinfo, verb); if (!res) { logger.msg(Arc::ERROR, "Could not obtain information about source: %s", std::string(res)); return false; } // Check if destination is already registered if (destination->Resolve(true)) { // Check meta-info matches source if (!destination->CompareMeta(*source) && !force_meta) { logger.msg(Arc::ERROR, "Metadata of source does not match existing " "destination. Use the --force option to override this."); return false; } // Remove existing locations destination->ClearLocations(); } bool replication = destination->Registered(); destination->SetMeta(*source); // pass metadata // Add new location std::string metaname = source_url.ConnectionURL(); if (!destination->AddLocation(source_url, metaname)) { logger.msg(Arc::ERROR, "Failed to accept new file/destination"); return false; } destination->SetTries(1); res = destination->PreRegister(replication, force_meta); if (!res) { logger.msg(Arc::ERROR, "Failed to register new file/destination: %s", std::string(res)); return false; } res = destination->PostRegister(replication); if (!res) { destination->PreUnregister(replication); logger.msg(Arc::ERROR, "Failed to register new file/destination: %s", std::string(res)); return false; } return true; } static Arc::DataStatus do_mover(const Arc::URL& s_url, const Arc::URL& d_url, const std::list& locations, const std::string& cache_dir, Arc::UserConfig& usercfg, bool secure, bool passive, bool force_meta, int tries, bool verbose, int timeout) { Arc::DataHandle source(s_url, usercfg); Arc::DataHandle destination(d_url, usercfg); if (!source) { logger.msg(Arc::ERROR, "Unsupported source url: %s", s_url.str()); return Arc::DataStatus::ReadAcquireError; } if (!destination) { logger.msg(Arc::ERROR, "Unsupported destination url: %s", d_url.str()); return Arc::DataStatus::WriteAcquireError; } if ((source->RequiresCredentials() || destination->RequiresCredentials()) && !checkProxy(usercfg, s_url)) return Arc::DataStatus::CredentialsExpiredError; if (!locations.empty()) { std::string meta(destination->GetURL().Protocol()+"://"+destination->GetURL().Host()); for (std::list::const_iterator i = locations.begin(); i != locations.end(); ++i) { destination->AddLocation(*i, meta); } } Arc::DataMover mover; mover.secure(secure); mover.passive(passive); mover.verbose(verbose); mover.force_to_meta(force_meta); if (tries) { mover.retry(true); // go through all locations source->SetTries(tries); // try all locations "tries" times destination->SetTries(tries); } Arc::User cache_user; Arc::FileCache cache; if (!cache_dir.empty()) cache = Arc::FileCache(cache_dir+" .", "", cache_user.get_uid(), cache_user.get_gid()); if (verbose) mover.set_progress_indicator(&progress); Arc::DataStatus callback_res; Arc::DataStatus res = mover.Transfer(*source, *destination, cache, Arc::URLMap(), 0, 0, 0, timeout, &mover_callback, &callback_res); if (!res.Passed()) { logger.msg(Arc::ERROR, "Current transfer FAILED: %s", std::string(res)); if (res.Retryable()) { logger.msg(Arc::ERROR, "This seems like a temporary error, please try again later"); } return res; } cond.wait(); // wait for mover_callback if (verbose) std::cerr<& locations, const std::string& cache_dir, Arc::UserConfig& usercfg, bool secure, bool passive, bool force_meta, int recursion, int tries, bool verbose, int timeout) { Arc::URL source_url(source_url_); if (!source_url) { logger.msg(Arc::ERROR, "Invalid URL: %s", source_url.str()); return false; } Arc::URL destination_url(destination_url_); if (!destination_url) { logger.msg(Arc::ERROR, "Invalid URL: %s", destination_url.str()); return false; } if (timeout <= 0) timeout = 300; // 5 minute default if (tries < 0) tries = 0; if (source_url.Protocol() == "urllist" && destination_url.Protocol() == "urllist") { std::list sources = Arc::ReadURLList(source_url); std::list destinations = Arc::ReadURLList(destination_url); if (sources.empty()) { logger.msg(Arc::ERROR, "Can't read list of sources from file %s", source_url.Path()); return false; } if (destinations.empty()) { logger.msg(Arc::ERROR, "Can't read list of destinations from file %s", destination_url.Path()); return false; } if (sources.size() != destinations.size()) { logger.msg(Arc::ERROR, "Numbers of sources and destinations do not match"); return false; } bool r = true; for (std::list::iterator source = sources.begin(), destination = destinations.begin(); (source != sources.end()) && (destination != destinations.end()); ++source, ++destination) { if (!arccp(*source, *destination, locations, cache_dir, usercfg, secure, passive, force_meta, recursion, tries, verbose, timeout)) r = false; if (cancelled) return true; } return r; } if (source_url.Protocol() == "urllist") { std::list sources = Arc::ReadURLList(source_url); if (sources.empty()) { logger.msg(Arc::ERROR, "Can't read list of sources from file %s", source_url.Path()); return false; } bool r = true; for (std::list::iterator source = sources.begin(); source != sources.end(); ++source) { if (!arccp(*source, destination_url, locations, cache_dir, usercfg, secure, passive, force_meta, recursion, tries, verbose, timeout)) r = false; if (cancelled) return true; } return r; } if (destination_url.Protocol() == "urllist") { std::list destinations = Arc::ReadURLList(destination_url); if (destinations.empty()) { logger.msg(Arc::ERROR, "Can't read list of destinations from file %s", destination_url.Path()); return false; } bool r = true; for (std::list::iterator destination = destinations.begin(); destination != destinations.end(); ++destination) { if (!arccp(source_url, *destination, locations, cache_dir, usercfg, secure, passive, force_meta, recursion, tries, verbose, timeout)) r = false; if (cancelled) return true; } return r; } if (destination_url.Path()[destination_url.Path().length() - 1] != '/') { if (source_url.Path()[source_url.Path().length() - 1] == '/' && source_url.MetaDataOption("guid").empty()) { // files specified by guid may have path '/' logger.msg(Arc::ERROR, "Fileset copy to single object is not supported yet"); return false; } } else { // Copy TO fileset/directory if (source_url.Path()[source_url.Path().length() - 1] != '/') { // Copy FROM single object std::string::size_type p = source_url.Path().rfind('/'); if (p == std::string::npos) { logger.msg(Arc::ERROR, "Can't extract object's name from source url"); return false; } destination_url.ChangePath(destination_url.Path() + source_url.Path().substr(p + 1)); } else { // Fileset copy Arc::DataHandle source(source_url, usercfg); if (!source) { logger.msg(Arc::ERROR, "Unsupported source url: %s", source_url.str()); return false; } if (source->RequiresCredentials() && !checkProxy(usercfg, source_url)) return false; std::list files; Arc::DataStatus result = source->List(files, (Arc::DataPoint::DataPointInfoType) (Arc::DataPoint::INFO_TYPE_NAME | Arc::DataPoint::INFO_TYPE_TYPE)); if (!result.Passed()) { logger.msg(Arc::ERROR, "%s. Cannot copy fileset", std::string(result)); return false; } bool failures = false; // Handle transfer of files first (treat unknown like files) for (std::list::iterator i = files.begin(); i != files.end(); ++i) { if ((i->GetType() != Arc::FileInfo::file_type_unknown) && (i->GetType() != Arc::FileInfo::file_type_file)) continue; logger.msg(Arc::INFO, "Name: %s", i->GetName()); Arc::URL s_url(std::string(source_url.str() + i->GetName())); Arc::URL d_url(std::string(destination_url.str() + i->GetName())); logger.msg(Arc::INFO, "Source: %s", s_url.str()); logger.msg(Arc::INFO, "Destination: %s", d_url.str()); Arc::DataStatus res = do_mover(s_url, d_url, locations, cache_dir, usercfg, secure, passive, force_meta, tries, verbose, timeout); if (cancelled) return true; if (!res.Passed()) failures = true; else logger.msg(Arc::INFO, "Current transfer complete"); } if (failures) { logger.msg(Arc::ERROR, "Some transfers failed"); return false; } // Go deeper if allowed bool r = true; if (recursion > 0) // Handle directories recursively for (std::list::iterator i = files.begin(); i != files.end(); ++i) { if (i->GetType() != Arc::FileInfo::file_type_dir) continue; if (verbose) logger.msg(Arc::INFO, "Directory: %s", i->GetName()); std::string s_url(source_url.str()); std::string d_url(destination_url.str()); s_url += i->GetName(); d_url += i->GetName(); s_url += "/"; d_url += "/"; if (!arccp(s_url, d_url, locations, cache_dir, usercfg, secure, passive, force_meta, recursion - 1, tries, verbose, timeout)) r = false; if (cancelled) return true; } return r; } } Arc::DataStatus res = do_mover(source_url, destination_url, locations, cache_dir, usercfg, secure, passive, force_meta, tries, verbose, timeout); if (cancelled) return true; if (!res.Passed()) return false; logger.msg(Arc::INFO, "Transfer complete"); return true; } static int runmain(int argc, char **argv) { setlocale(LC_ALL, ""); // set signal handlers for safe cancellation signal(SIGTERM, sig_cancel); signal(SIGINT, sig_cancel); static Arc::LogStream logcerr(std::cerr); logcerr.setFormat(Arc::ShortFormat); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::WARNING); Arc::ArcLocation::Init(argv[0]); Arc::OptionParser options(istring("source destination"), istring("The arccp command copies files to, from " "and between grid storage elements.")); bool passive = false; options.AddOption('p', "passive", istring("use passive transfer (off by default if secure " "is on, on by default if secure is not requested)"), passive); bool notpassive = false; options.AddOption('n', "nopassive", istring("do not try to force passive transfer"), notpassive); bool force = false; options.AddOption('f', "force", istring("if the destination is an indexing service " "and not the same as the source and the " "destination is already registered, then " "the copy is normally not done. However, if " "this option is specified the source is " "assumed to be a replica of the destination " "created in an uncontrolled way and the " "copy is done like in case of replication. " "Using this option also skips validation of " "completed transfers."), force); bool verbose = false; options.AddOption('i', "indicate", istring("show progress indicator"), verbose); bool nocopy = false; options.AddOption('T', "notransfer", istring("do not transfer, but register source into " "destination. destination must be a meta-url."), nocopy); bool secure = false; options.AddOption('u', "secure", istring("use secure transfer (insecure by default)"), secure); std::string cache_path; options.AddOption('y', "cache", istring("path to local cache (use to put file into cache)"), istring("path"), cache_path); bool infinite_recursion = false; options.AddOption('r', "recursive", istring("operate recursively"), infinite_recursion); int recursion = 0; options.AddOption('D', "depth", istring("operate recursively up to specified level"), istring("level"), recursion); int retries = 0; options.AddOption('R', "retries", istring("number of retries before failing file transfer"), istring("number"), retries); std::list locations; options.AddOption('L', "location", istring("physical location to write to when destination is an indexing service." " Must be specified for indexing services which do not automatically" " generate physical locations. Can be specified multiple times -" " locations will be tried in order until one succeeds."), istring("URL"), locations); bool thirdparty = false; options.AddOption('3', "thirdparty", istring("perform third party transfer, where the destination pulls" " from the source (only available with GFAL plugin)"), thirdparty); bool show_plugins = false; options.AddOption('P', "listplugins", istring("list the available plugins (protocols supported)"), show_plugins); int timeout = 20; options.AddOption('t', "timeout", istring("timeout in seconds (default 20)"), istring("seconds"), timeout); std::string conffile; options.AddOption('z', "conffile", istring("configuration file (default ~/.arc/client.conf)"), istring("filename"), conffile); std::string debug; options.AddOption('d', "debug", istring("FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG"), istring("debuglevel"), debug); bool version = false; options.AddOption('v', "version", istring("print version information"), version); std::list params = options.Parse(argc, argv); if (version) { std::cout << Arc::IString("%s version %s", "arccp", VERSION) << std::endl; return 0; } // If debug is specified as argument, it should be set before loading the configuration. if (!debug.empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(debug)); logger.msg(Arc::VERBOSE, "Running command: %s", options.GetCommandWithArguments()); if (show_plugins) { std::list modules; Arc::PluginsFactory pf(Arc::BaseConfig().MakeConfig(Arc::Config()).Parent()); pf.scan(Arc::FinderLoader::GetLibrariesList(), modules); Arc::PluginsFactory::FilterByKind("HED:DMC", modules); std::cout << Arc::IString("Protocol plugins available:") << std::endl; for (std::list::iterator itMod = modules.begin(); itMod != modules.end(); ++itMod) { for (std::list::iterator itPlug = itMod->plugins.begin(); itPlug != itMod->plugins.end(); ++itPlug) { std::cout << " " << itPlug->name << " - " << itPlug->description << std::endl; } } return 0; } // Attempt to acquire credentials. Whether they are required will be // determined later depending on the protocol. Arc::UserConfig usercfg(conffile, Arc::initializeCredentialsType::TryCredentials); if (!usercfg) { logger.msg(Arc::ERROR, "Failed configuration initialization"); return 1; } usercfg.UtilsDirPath(Arc::UserConfig::ARCUSERDIRECTORY()); if (debug.empty() && !usercfg.Verbosity().empty()) { Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(usercfg.Verbosity())); } if (params.size() != 2) { logger.msg(Arc::ERROR, "Wrong number of parameters specified"); return 1; } if (passive && notpassive) { logger.msg(Arc::ERROR, "Options 'p' and 'n' can't be used simultaneously"); return 1; } if ((!secure) && (!notpassive)) passive = true; if (infinite_recursion) recursion = INT_MAX; std::list::iterator it = params.begin(); std::string source = *it; ++it; std::string destination = *it; if (source == "-") source = "stdio:///stdin"; if (destination == "-") destination = "stdio:///stdout"; if (thirdparty) { if (!arctransfer(source, destination, locations, usercfg, secure, passive, verbose, timeout)) return 1; } else if (nocopy) { if (!arcregister(source, destination, usercfg, force)) return 1; } else { if (!arccp(source, destination, locations, cache_path, usercfg, secure, passive, force, recursion, retries + 1, verbose, timeout)) return 1; } return 0; } int main(int argc, char **argv) { int xr = runmain(argc,argv); _exit(xr); return 0; } nordugrid-arc-6.14.0/src/clients/data/PaxHeaders.30264/arcrm.cpp0000644000000000000000000000013214152153376022315 xustar000000000000000030 mtime=1638455038.341645013 30 atime=1638455038.474647011 30 ctime=1638455101.023586838 nordugrid-arc-6.14.0/src/clients/data/arcrm.cpp0000644000175000002070000001536414152153376022313 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include static Arc::Logger logger(Arc::Logger::getRootLogger(), "arcrm"); /// Returns number of files that failed to be deleted int arcrm(const std::list& urls, Arc::UserConfig& usercfg, bool errcont) { Arc::DataHandle* handle = NULL; Arc::DataMover mover; unsigned int failed = 0; for (std::list::const_iterator url = urls.begin(); url != urls.end(); ++url) { if (!(*url)) { logger.msg(Arc::ERROR, "Invalid URL: %s", url->str()); failed++; continue; } if (url->Protocol() == "urllist") { std::list url_files = Arc::ReadURLList(*url); if (url_files.empty()) { logger.msg(Arc::ERROR, "Can't read list of locations from file %s", url->Path()); failed += 1; } else { failed += arcrm(url_files, usercfg, errcont); } continue; } // Depending on protocol SetURL() may allow reusing connections and hence // the same DataHandle object to delete multiple files. If it is not // supported SetURL() returns false and a new DataHandle must be created. if (!handle || !(*handle)->SetURL(*url)) { delete handle; handle = new Arc::DataHandle(*url, usercfg); if (!(*handle)) { logger.msg(Arc::ERROR, "Unsupported URL given: %s", url->str()); failed++; delete handle; handle = NULL; continue; } if ((*handle)->RequiresCredentials()) { if (!usercfg.InitializeCredentials(Arc::initializeCredentialsType::RequireCredentials)) { logger.msg(Arc::ERROR, "Unable to remove file %s", url->str()); logger.msg(Arc::ERROR, "Invalid credentials, please check proxy and/or CA certificates"); failed++; delete handle; handle = NULL; continue; } Arc::Credential holder(usercfg); if (!holder.IsValid()) { if (holder.GetEndTime() < Arc::Time()) { logger.msg(Arc::ERROR, "Proxy expired"); } logger.msg(Arc::ERROR, "Unable to remove file %s", url->str()); logger.msg(Arc::ERROR, "Invalid credentials, please check proxy and/or CA certificates"); failed++; delete handle; handle = NULL; continue; } } } // only one try (*handle)->SetTries(1); Arc::DataStatus res = mover.Delete(**handle, errcont); if (!res.Passed()) { logger.msg(Arc::ERROR, std::string(res)); if (res.Retryable()) { logger.msg(Arc::ERROR, "This seems like a temporary error, please try again later"); } failed++; } } delete handle; return failed; } static int runmain(int argc, char **argv) { setlocale(LC_ALL, ""); static Arc::LogStream logcerr(std::cerr); logcerr.setFormat(Arc::ShortFormat); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::WARNING); Arc::ArcLocation::Init(argv[0]); Arc::OptionParser options(istring("url [url ...]"), istring("The arcrm command deletes files on " "grid storage elements.")); bool force = false; options.AddOption('f', "force", istring("remove logical file name registration even " "if not all physical instances were removed"), force); bool show_plugins = false; options.AddOption('P', "listplugins", istring("list the available plugins (protocols supported)"), show_plugins); int timeout = 20; options.AddOption('t', "timeout", istring("timeout in seconds (default 20)"), istring("seconds"), timeout); std::string conffile; options.AddOption('z', "conffile", istring("configuration file (default ~/.arc/client.conf)"), istring("filename"), conffile); std::string debug; options.AddOption('d', "debug", istring("FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG"), istring("debuglevel"), debug); bool version = false; options.AddOption('v', "version", istring("print version information"), version); std::list params = options.Parse(argc, argv); if (version) { std::cout << Arc::IString("%s version %s", "arcrm", VERSION) << std::endl; return 0; } // If debug is specified as argument, it should be set before loading the configuration. if (!debug.empty()) { Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(debug)); } logger.msg(Arc::VERBOSE, "Running command: %s", options.GetCommandWithArguments()); if (show_plugins) { std::list modules; Arc::PluginsFactory pf(Arc::BaseConfig().MakeConfig(Arc::Config()).Parent()); pf.scan(Arc::FinderLoader::GetLibrariesList(), modules); Arc::PluginsFactory::FilterByKind("HED:DMC", modules); std::cout << Arc::IString("Protocol plugins available:") << std::endl; for (std::list::iterator itMod = modules.begin(); itMod != modules.end(); ++itMod) { for (std::list::iterator itPlug = itMod->plugins.begin(); itPlug != itMod->plugins.end(); ++itPlug) { std::cout << " " << itPlug->name << " - " << itPlug->description << std::endl; } } return 0; } // credentials will be initialised later if necessary Arc::UserConfig usercfg(conffile, Arc::initializeCredentialsType::TryCredentials); if (!usercfg) { logger.msg(Arc::ERROR, "Failed configuration initialization"); return 1; } usercfg.UtilsDirPath(Arc::UserConfig::ARCUSERDIRECTORY()); usercfg.Timeout(timeout); if (debug.empty() && !usercfg.Verbosity().empty()) { Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(usercfg.Verbosity())); } if (params.empty()) { logger.msg(Arc::ERROR, "Wrong number of parameters specified"); return 1; } std::list urls; for (std::list::const_iterator i = params.begin(); i != params.end(); ++i) { urls.push_back(*i); } unsigned int failed = arcrm(urls, usercfg, force); if (failed != 0) { if (params.size() != 1 || failed > 1) std::cout<&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/clients DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(arcsysconfdir)" \ "$(DESTDIR)$(bashcompletiondir)" "$(DESTDIR)$(exampledir)" DATA = $(arcsysconf_DATA) $(bashcompletion_DATA) $(example_DATA) RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ @DATA_CLIENT_ENABLED_FALSE@DATA_CLIENT = @DATA_CLIENT_ENABLED_TRUE@DATA_CLIENT = data @CREDENTIALS_CLIENT_ENABLED_FALSE@CREDENTIALS_CLIENT = @CREDENTIALS_CLIENT_ENABLED_TRUE@CREDENTIALS_CLIENT = credentials @COMPUTE_CLIENT_ENABLED_FALSE@COMPUTE_CLIENT = @COMPUTE_CLIENT_ENABLED_TRUE@COMPUTE_CLIENT = compute SUBDIRS = $(DATA_CLIENT) $(CREDENTIALS_CLIENT) $(COMPUTE_CLIENT) DIST_SUBDIRS = data credentials compute arcsysconfdir = $(sysconfdir)/arc arcsysconf_DATA = client.conf exampledir = $(pkgdatadir)/examples example_DATA = client.conf BASH_COMPLETION_SOURCE = client.bash_completion bashcompletiondir = $(bashcompdir) bashcompletion_DATA = arc-client-tools EXTRA_DIST = $(example_DATA) $(arcsysconf_DATA) $(BASH_COMPLETION_SOURCE) CLEANFILES = arc-client-tools all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/clients/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/clients/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-arcsysconfDATA: $(arcsysconf_DATA) @$(NORMAL_INSTALL) @list='$(arcsysconf_DATA)'; test -n "$(arcsysconfdir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(arcsysconfdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(arcsysconfdir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(arcsysconfdir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(arcsysconfdir)" || exit $$?; \ done uninstall-arcsysconfDATA: @$(NORMAL_UNINSTALL) @list='$(arcsysconf_DATA)'; test -n "$(arcsysconfdir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(arcsysconfdir)'; $(am__uninstall_files_from_dir) install-bashcompletionDATA: $(bashcompletion_DATA) @$(NORMAL_INSTALL) @list='$(bashcompletion_DATA)'; test -n "$(bashcompletiondir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(bashcompletiondir)'"; \ $(MKDIR_P) "$(DESTDIR)$(bashcompletiondir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(bashcompletiondir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(bashcompletiondir)" || exit $$?; \ done uninstall-bashcompletionDATA: @$(NORMAL_UNINSTALL) @list='$(bashcompletion_DATA)'; test -n "$(bashcompletiondir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(bashcompletiondir)'; $(am__uninstall_files_from_dir) install-exampleDATA: $(example_DATA) @$(NORMAL_INSTALL) @list='$(example_DATA)'; test -n "$(exampledir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(exampledir)'"; \ $(MKDIR_P) "$(DESTDIR)$(exampledir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(exampledir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(exampledir)" || exit $$?; \ done uninstall-exampleDATA: @$(NORMAL_UNINSTALL) @list='$(example_DATA)'; test -n "$(exampledir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(exampledir)'; $(am__uninstall_files_from_dir) # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile $(DATA) installdirs: installdirs-recursive installdirs-am: for dir in "$(DESTDIR)$(arcsysconfdir)" "$(DESTDIR)$(bashcompletiondir)" "$(DESTDIR)$(exampledir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: -test -z "$(CLEANFILES)" || rm -f $(CLEANFILES) distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-arcsysconfDATA install-bashcompletionDATA \ install-exampleDATA install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-arcsysconfDATA uninstall-bashcompletionDATA \ uninstall-exampleDATA .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool cscopelist-am ctags \ ctags-am distclean distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-arcsysconfDATA \ install-bashcompletionDATA install-data install-data-am \ install-dvi install-dvi-am install-exampleDATA install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs installdirs-am maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \ uninstall-am uninstall-arcsysconfDATA \ uninstall-bashcompletionDATA uninstall-exampleDATA arc-client-tools: $(BASH_COMPLETION_SOURCE) cp $< $@ # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/clients/PaxHeaders.30264/client.conf0000644000000000000000000000013214152153376021721 xustar000000000000000030 mtime=1638455038.336644937 30 atime=1638455038.472646981 30 ctime=1638455100.991586357 nordugrid-arc-6.14.0/src/clients/client.conf0000644000175000002070000000611614152153376021712 0ustar00mockbuildmock00000000000000[common] ## certificatepath = path - Specify the location of client certificate file. ## Environmental variable X509_USER_CERT redefines this value. ## default: $HOME/.globus/usercert.pem #certificatepath=/home/user/credentials/cert.pem ## keypath = path - Specify the location of client secret key file. ## Environmental variable X509_USER_KEY redefines this value. ## default: $HOME/.globus/userkey.pem #keypath=/home/user/credentials/key.pem ## cacertificatesdirectory = path - Specify the location of CA certificates directory ## Environmental variable X509_CERT_DIR redefines this value. ## default: /etc/grid-security/certificates #cacertificatesdirectory=/home/user/.globus/certificates ## proxypath = path - Specify the location of proxy certificate (both for generation and usage) ## Environmental variable X509_USER_PROXY redefines this value. ## default: /tmp/x509up_u${UID} #proxypath=/home/user/credentials/proxy.pem ## vomsespath = path - Path to file or directory that holds client VOMS configuration ## to generate proxy certificates ## Environmental variables X509_VOMS_FILE and X509_VOMSES redefine this value ## If missing arcproxy will search for vomses in the following locations: ## - ~/.arc/vomses ## - ~/.voms/vomses ## - /etc/vomses ## - /etc/grid-security/vomses ## default: undefined #vomsespath=/home/user/credentials/vomses ## defaultvoms = vo[:command] - Default value for --voms (-S) arcproxy option that is ## used to define VO and optionaly FQANs used during proxy certificate generation ## multivalued ## default: undefined #defaultvoms=atlas:/atlas/Role=pilot #defaultvoms=nordugrid.org:all #defaultvoms=ops.ndgf.org ## rejectdiscovery = service - Specify the FQDN or URLs of the services that should be rejected ## during service discovery process by CLI tools (arcsub, arctest, arcresub) ## multivalued ## default: undefined #rejectdiscovery=bad.service.org #rejectdiscovery=bad2.service.org ## rejectmanagement = service - Specify the FQDN or URLs of the CEs that should be skipped ## during the job management (e.g. arcstat, arckill) ## multivalued ## default: undefined #rejectmanagement=bad3.service.org #rejectmanagement=bad4.service.org ## brokername = broker - Specify the broker used in resource discovery. ## The full list of installed brokers can be obtained running "arcsub -P" ## default: Random #brokername=FastestQueue ## brokerarguments = args - Specify broker arguments (if applicable to specified broker) ## default: undefined #brokername=PythonBroker #brokerarguments=ACIXBroker.ACIXBroker:https://cacheindex.ndgf.org:6443/data/index ## timeout = seconds - Amount of time to wait for a service to respond before ## considering it dead. ## default: 20 #timeout=60 ## joblist = path - Path to the jobs database that holds all extra data ## about submitted jobs to be used during further job management ## default: $HOME/.arc/jobs.dat #joblist=/home/user/arcjobs.dat ## joblisttype = type - Type of the backend used for jobs database. ## IT IS STRONGLY advised to keep default SQLITE type of backend. ## allowedvalues: SQLITE, BDB, XML ## default: SQLITE #joblisttype=XML nordugrid-arc-6.14.0/src/clients/PaxHeaders.30264/credentials0000644000000000000000000000013214152153475022014 xustar000000000000000030 mtime=1638455101.051587258 30 atime=1638455103.999631554 30 ctime=1638455101.051587258 nordugrid-arc-6.14.0/src/clients/credentials/0000755000175000002070000000000014152153475022056 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/clients/credentials/PaxHeaders.30264/arcproxy_proxy.cpp0000644000000000000000000000013214152153376025705 xustar000000000000000030 mtime=1638455038.339644982 30 atime=1638455038.474647011 30 ctime=1638455101.050587243 nordugrid-arc-6.14.0/src/clients/credentials/arcproxy_proxy.cpp0000644000175000002070000001127014152153376025673 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #ifdef HAVE_NSS #include #endif using namespace ArcCredential; void create_tmp_proxy(std::string& proxy, Arc::Credential& signer) { int keybits = 2048; Arc::Time now; Arc::Period period = 3600 * 12 + 300; std::string req_str; Arc::Credential tmp_proxyreq(now-Arc::Period(300), period, keybits); tmp_proxyreq.GenerateRequest(req_str); std::string proxy_private_key; std::string signing_cert; std::string signing_cert_chain; tmp_proxyreq.OutputPrivatekey(proxy_private_key); signer.OutputCertificate(signing_cert); signer.OutputCertificateChain(signing_cert_chain); if (!signer.SignRequest(&tmp_proxyreq, proxy)) throw std::runtime_error("Failed to sign proxy"); proxy.append(proxy_private_key).append(signing_cert).append(signing_cert_chain); } void create_proxy(std::string& proxy, Arc::Credential& signer, const std::string& proxy_policy, const Arc::Time& proxy_start, const Arc::Period& proxy_period, const std::string& vomsacseq, int keybits, const std::string& signing_algorithm) { std::string private_key, signing_cert, signing_cert_chain; std::string req_str; if(keybits < 0) keybits = signer.GetKeybits(); Arc::Credential cred_request(proxy_start, proxy_period, keybits); cred_request.SetSigningAlgorithm(signer.GetSigningAlgorithm()); if(!signing_algorithm.empty() && signing_algorithm != "inherit") { if(signing_algorithm == "sha1") { cred_request.SetSigningAlgorithm(Arc::SIGN_SHA1); } else if(signing_algorithm == "sha2") { cred_request.SetSigningAlgorithm(Arc::SIGN_SHA256); } else if(signing_algorithm == "sha224") { cred_request.SetSigningAlgorithm(Arc::SIGN_SHA224); } else if(signing_algorithm == "sha256") { cred_request.SetSigningAlgorithm(Arc::SIGN_SHA256); } else if(signing_algorithm == "sha384") { cred_request.SetSigningAlgorithm(Arc::SIGN_SHA384); } else if(signing_algorithm == "sha512") { cred_request.SetSigningAlgorithm(Arc::SIGN_SHA512); } else { throw std::runtime_error("Unknown signing algorithm specified: "+signing_algorithm); } } cred_request.GenerateRequest(req_str); cred_request.OutputPrivatekey(private_key); signer.OutputCertificate(signing_cert); signer.OutputCertificateChain(signing_cert_chain); //Put the voms attribute certificate into proxy certificate if (!vomsacseq.empty()) { bool r = cred_request.AddExtension("acseq", (char**)(vomsacseq.c_str())); if (!r) std::cout << Arc::IString("Failed to add VOMS AC extension. Your proxy may be incomplete.") << std::endl; } if(!proxy_policy.empty()) { cred_request.SetProxyPolicy("rfc", "anylanguage", proxy_policy, -1); } else if(CERT_IS_LIMITED_PROXY(signer.GetType())) { // Gross hack for globus. If Globus marks own proxy as limited // it expects every derived proxy to be limited or at least // independent. Independent proxies has little sense in Grid // world. So here we make our proxy globus-limited to allow // it to be used with globus code. cred_request.SetProxyPolicy("rfc", "limited", proxy_policy, -1); } else { cred_request.SetProxyPolicy("rfc", "inheritAll", proxy_policy, -1); } if (!signer.SignRequest(&cred_request, proxy)) throw std::runtime_error("Failed to sign proxy"); proxy.append(private_key).append(signing_cert).append(signing_cert_chain); } void write_proxy_file(const std::string& path, const std::string& content) { if((!Arc::FileDelete(path)) && (errno != ENOENT)) { throw std::runtime_error("Failed to remove proxy file " + path); } if(!Arc::FileCreate(path, content, 0, 0, S_IRUSR | S_IWUSR)) { throw std::runtime_error("Failed to create proxy file " + path); } } void remove_proxy_file(const std::string& path) { if((!Arc::FileDelete(path)) && (errno != ENOENT)) { throw std::runtime_error("Failed to remove proxy file " + path); } } void remove_cert_file(const std::string& path) { if((!Arc::FileDelete(path)) && (errno != ENOENT)) { throw std::runtime_error("Failed to remove certificate file " + path); } } nordugrid-arc-6.14.0/src/clients/credentials/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376024125 xustar000000000000000030 mtime=1638455038.339644982 30 atime=1638455038.473646996 30 ctime=1638455101.044587153 nordugrid-arc-6.14.0/src/clients/credentials/Makefile.am0000644000175000002070000000130614152153376024112 0ustar00mockbuildmock00000000000000bin_PROGRAMS = arcproxy arcproxy_SOURCES = arcproxy.cpp arcproxy_voms.cpp arcproxy_myproxy.cpp arcproxy_proxy.cpp arcproxy.h arcproxy_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) arcproxy_LDADD = \ $(top_builddir)/src/hed/libs/credentialstore/libarccredentialstore.la \ $(top_builddir)/src/hed/libs/communication/libarccommunication.la \ $(top_builddir)/src/hed/libs/delegation/libarcdelegation.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(top_builddir)/src/hed/libs/crypto/libarccrypto.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) man_MANS = arcproxy.1 nordugrid-arc-6.14.0/src/clients/credentials/PaxHeaders.30264/Makefile.in0000644000000000000000000000013114152153421024124 xustar000000000000000029 mtime=1638455057.90893902 30 atime=1638455091.266440232 30 ctime=1638455101.044587153 nordugrid-arc-6.14.0/src/clients/credentials/Makefile.in0000644000175000002070000011572214152153421024122 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ bin_PROGRAMS = arcproxy$(EXEEXT) subdir = src/clients/credentials DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(srcdir)/arcproxy.1.in $(top_srcdir)/depcomp README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = arcproxy.1 CONFIG_CLEAN_VPATH_FILES = am__installdirs = "$(DESTDIR)$(bindir)" "$(DESTDIR)$(man1dir)" PROGRAMS = $(bin_PROGRAMS) am_arcproxy_OBJECTS = arcproxy-arcproxy.$(OBJEXT) \ arcproxy-arcproxy_voms.$(OBJEXT) \ arcproxy-arcproxy_myproxy.$(OBJEXT) \ arcproxy-arcproxy_proxy.$(OBJEXT) arcproxy_OBJECTS = $(am_arcproxy_OBJECTS) am__DEPENDENCIES_1 = arcproxy_DEPENDENCIES = $(top_builddir)/src/hed/libs/credentialstore/libarccredentialstore.la \ $(top_builddir)/src/hed/libs/communication/libarccommunication.la \ $(top_builddir)/src/hed/libs/delegation/libarcdelegation.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(top_builddir)/src/hed/libs/crypto/libarccrypto.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = arcproxy_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(arcproxy_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(arcproxy_SOURCES) DIST_SOURCES = $(arcproxy_SOURCES) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } man1dir = $(mandir)/man1 NROFF = nroff MANS = $(man_MANS) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ arcproxy_SOURCES = arcproxy.cpp arcproxy_voms.cpp arcproxy_myproxy.cpp arcproxy_proxy.cpp arcproxy.h arcproxy_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) arcproxy_LDADD = \ $(top_builddir)/src/hed/libs/credentialstore/libarccredentialstore.la \ $(top_builddir)/src/hed/libs/communication/libarccommunication.la \ $(top_builddir)/src/hed/libs/delegation/libarcdelegation.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(top_builddir)/src/hed/libs/crypto/libarccrypto.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) man_MANS = arcproxy.1 all: all-am .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/clients/credentials/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/clients/credentials/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): arcproxy.1: $(top_builddir)/config.status $(srcdir)/arcproxy.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ install-binPROGRAMS: $(bin_PROGRAMS) @$(NORMAL_INSTALL) @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(bindir)'"; \ $(MKDIR_P) "$(DESTDIR)$(bindir)" || exit 1; \ fi; \ for p in $$list; do echo "$$p $$p"; done | \ sed 's/$(EXEEXT)$$//' | \ while read p p1; do if test -f $$p \ || test -f $$p1 \ ; then echo "$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n;h' \ -e 's|.*|.|' \ -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \ sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) files[d] = files[d] " " $$1; \ else { print "f", $$3 "/" $$4, $$1; } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(bindir)$$dir'"; \ $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(bindir)$$dir" || exit $$?; \ } \ ; done uninstall-binPROGRAMS: @$(NORMAL_UNINSTALL) @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \ -e 's/$$/$(EXEEXT)/' \ `; \ test -n "$$list" || exit 0; \ echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \ cd "$(DESTDIR)$(bindir)" && rm -f $$files clean-binPROGRAMS: @list='$(bin_PROGRAMS)'; test -n "$$list" || exit 0; \ echo " rm -f" $$list; \ rm -f $$list || exit $$?; \ test -n "$(EXEEXT)" || exit 0; \ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ echo " rm -f" $$list; \ rm -f $$list arcproxy$(EXEEXT): $(arcproxy_OBJECTS) $(arcproxy_DEPENDENCIES) $(EXTRA_arcproxy_DEPENDENCIES) @rm -f arcproxy$(EXEEXT) $(AM_V_CXXLD)$(arcproxy_LINK) $(arcproxy_OBJECTS) $(arcproxy_LDADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcproxy-arcproxy.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcproxy-arcproxy_myproxy.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcproxy-arcproxy_proxy.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcproxy-arcproxy_voms.Po@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< arcproxy-arcproxy.o: arcproxy.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcproxy_CXXFLAGS) $(CXXFLAGS) -MT arcproxy-arcproxy.o -MD -MP -MF $(DEPDIR)/arcproxy-arcproxy.Tpo -c -o arcproxy-arcproxy.o `test -f 'arcproxy.cpp' || echo '$(srcdir)/'`arcproxy.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcproxy-arcproxy.Tpo $(DEPDIR)/arcproxy-arcproxy.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcproxy.cpp' object='arcproxy-arcproxy.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcproxy_CXXFLAGS) $(CXXFLAGS) -c -o arcproxy-arcproxy.o `test -f 'arcproxy.cpp' || echo '$(srcdir)/'`arcproxy.cpp arcproxy-arcproxy.obj: arcproxy.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcproxy_CXXFLAGS) $(CXXFLAGS) -MT arcproxy-arcproxy.obj -MD -MP -MF $(DEPDIR)/arcproxy-arcproxy.Tpo -c -o arcproxy-arcproxy.obj `if test -f 'arcproxy.cpp'; then $(CYGPATH_W) 'arcproxy.cpp'; else $(CYGPATH_W) '$(srcdir)/arcproxy.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcproxy-arcproxy.Tpo $(DEPDIR)/arcproxy-arcproxy.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcproxy.cpp' object='arcproxy-arcproxy.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcproxy_CXXFLAGS) $(CXXFLAGS) -c -o arcproxy-arcproxy.obj `if test -f 'arcproxy.cpp'; then $(CYGPATH_W) 'arcproxy.cpp'; else $(CYGPATH_W) '$(srcdir)/arcproxy.cpp'; fi` arcproxy-arcproxy_voms.o: arcproxy_voms.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcproxy_CXXFLAGS) $(CXXFLAGS) -MT arcproxy-arcproxy_voms.o -MD -MP -MF $(DEPDIR)/arcproxy-arcproxy_voms.Tpo -c -o arcproxy-arcproxy_voms.o `test -f 'arcproxy_voms.cpp' || echo '$(srcdir)/'`arcproxy_voms.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcproxy-arcproxy_voms.Tpo $(DEPDIR)/arcproxy-arcproxy_voms.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcproxy_voms.cpp' object='arcproxy-arcproxy_voms.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcproxy_CXXFLAGS) $(CXXFLAGS) -c -o arcproxy-arcproxy_voms.o `test -f 'arcproxy_voms.cpp' || echo '$(srcdir)/'`arcproxy_voms.cpp arcproxy-arcproxy_voms.obj: arcproxy_voms.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcproxy_CXXFLAGS) $(CXXFLAGS) -MT arcproxy-arcproxy_voms.obj -MD -MP -MF $(DEPDIR)/arcproxy-arcproxy_voms.Tpo -c -o arcproxy-arcproxy_voms.obj `if test -f 'arcproxy_voms.cpp'; then $(CYGPATH_W) 'arcproxy_voms.cpp'; else $(CYGPATH_W) '$(srcdir)/arcproxy_voms.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcproxy-arcproxy_voms.Tpo $(DEPDIR)/arcproxy-arcproxy_voms.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcproxy_voms.cpp' object='arcproxy-arcproxy_voms.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcproxy_CXXFLAGS) $(CXXFLAGS) -c -o arcproxy-arcproxy_voms.obj `if test -f 'arcproxy_voms.cpp'; then $(CYGPATH_W) 'arcproxy_voms.cpp'; else $(CYGPATH_W) '$(srcdir)/arcproxy_voms.cpp'; fi` arcproxy-arcproxy_myproxy.o: arcproxy_myproxy.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcproxy_CXXFLAGS) $(CXXFLAGS) -MT arcproxy-arcproxy_myproxy.o -MD -MP -MF $(DEPDIR)/arcproxy-arcproxy_myproxy.Tpo -c -o arcproxy-arcproxy_myproxy.o `test -f 'arcproxy_myproxy.cpp' || echo '$(srcdir)/'`arcproxy_myproxy.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcproxy-arcproxy_myproxy.Tpo $(DEPDIR)/arcproxy-arcproxy_myproxy.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcproxy_myproxy.cpp' object='arcproxy-arcproxy_myproxy.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcproxy_CXXFLAGS) $(CXXFLAGS) -c -o arcproxy-arcproxy_myproxy.o `test -f 'arcproxy_myproxy.cpp' || echo '$(srcdir)/'`arcproxy_myproxy.cpp arcproxy-arcproxy_myproxy.obj: arcproxy_myproxy.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcproxy_CXXFLAGS) $(CXXFLAGS) -MT arcproxy-arcproxy_myproxy.obj -MD -MP -MF $(DEPDIR)/arcproxy-arcproxy_myproxy.Tpo -c -o arcproxy-arcproxy_myproxy.obj `if test -f 'arcproxy_myproxy.cpp'; then $(CYGPATH_W) 'arcproxy_myproxy.cpp'; else $(CYGPATH_W) '$(srcdir)/arcproxy_myproxy.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcproxy-arcproxy_myproxy.Tpo $(DEPDIR)/arcproxy-arcproxy_myproxy.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcproxy_myproxy.cpp' object='arcproxy-arcproxy_myproxy.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcproxy_CXXFLAGS) $(CXXFLAGS) -c -o arcproxy-arcproxy_myproxy.obj `if test -f 'arcproxy_myproxy.cpp'; then $(CYGPATH_W) 'arcproxy_myproxy.cpp'; else $(CYGPATH_W) '$(srcdir)/arcproxy_myproxy.cpp'; fi` arcproxy-arcproxy_proxy.o: arcproxy_proxy.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcproxy_CXXFLAGS) $(CXXFLAGS) -MT arcproxy-arcproxy_proxy.o -MD -MP -MF $(DEPDIR)/arcproxy-arcproxy_proxy.Tpo -c -o arcproxy-arcproxy_proxy.o `test -f 'arcproxy_proxy.cpp' || echo '$(srcdir)/'`arcproxy_proxy.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcproxy-arcproxy_proxy.Tpo $(DEPDIR)/arcproxy-arcproxy_proxy.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcproxy_proxy.cpp' object='arcproxy-arcproxy_proxy.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcproxy_CXXFLAGS) $(CXXFLAGS) -c -o arcproxy-arcproxy_proxy.o `test -f 'arcproxy_proxy.cpp' || echo '$(srcdir)/'`arcproxy_proxy.cpp arcproxy-arcproxy_proxy.obj: arcproxy_proxy.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcproxy_CXXFLAGS) $(CXXFLAGS) -MT arcproxy-arcproxy_proxy.obj -MD -MP -MF $(DEPDIR)/arcproxy-arcproxy_proxy.Tpo -c -o arcproxy-arcproxy_proxy.obj `if test -f 'arcproxy_proxy.cpp'; then $(CYGPATH_W) 'arcproxy_proxy.cpp'; else $(CYGPATH_W) '$(srcdir)/arcproxy_proxy.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcproxy-arcproxy_proxy.Tpo $(DEPDIR)/arcproxy-arcproxy_proxy.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcproxy_proxy.cpp' object='arcproxy-arcproxy_proxy.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcproxy_CXXFLAGS) $(CXXFLAGS) -c -o arcproxy-arcproxy_proxy.obj `if test -f 'arcproxy_proxy.cpp'; then $(CYGPATH_W) 'arcproxy_proxy.cpp'; else $(CYGPATH_W) '$(srcdir)/arcproxy_proxy.cpp'; fi` mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-man1: $(man_MANS) @$(NORMAL_INSTALL) @list1=''; \ list2='$(man_MANS)'; \ test -n "$(man1dir)" \ && test -n "`echo $$list1$$list2`" \ || exit 0; \ echo " $(MKDIR_P) '$(DESTDIR)$(man1dir)'"; \ $(MKDIR_P) "$(DESTDIR)$(man1dir)" || exit 1; \ { for i in $$list1; do echo "$$i"; done; \ if test -n "$$list2"; then \ for i in $$list2; do echo "$$i"; done \ | sed -n '/\.1[a-z]*$$/p'; \ fi; \ } | while read p; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; echo "$$p"; \ done | \ sed -e 'n;s,.*/,,;p;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,' | \ sed 'N;N;s,\n, ,g' | { \ list=; while read file base inst; do \ if test "$$base" = "$$inst"; then list="$$list $$file"; else \ echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man1dir)/$$inst'"; \ $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man1dir)/$$inst" || exit $$?; \ fi; \ done; \ for i in $$list; do echo "$$i"; done | $(am__base_list) | \ while read files; do \ test -z "$$files" || { \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man1dir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(man1dir)" || exit $$?; }; \ done; } uninstall-man1: @$(NORMAL_UNINSTALL) @list=''; test -n "$(man1dir)" || exit 0; \ files=`{ for i in $$list; do echo "$$i"; done; \ l2='$(man_MANS)'; for i in $$l2; do echo "$$i"; done | \ sed -n '/\.1[a-z]*$$/p'; \ } | sed -e 's,.*/,,;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,'`; \ dir='$(DESTDIR)$(man1dir)'; $(am__uninstall_files_from_dir) ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(PROGRAMS) $(MANS) installdirs: for dir in "$(DESTDIR)$(bindir)" "$(DESTDIR)$(man1dir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-binPROGRAMS clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-man install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-binPROGRAMS install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-man1 install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-binPROGRAMS uninstall-man uninstall-man: uninstall-man1 .MAKE: install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-am clean \ clean-binPROGRAMS clean-generic clean-libtool cscopelist-am \ ctags ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-binPROGRAMS \ install-data install-data-am install-dvi install-dvi-am \ install-exec install-exec-am install-html install-html-am \ install-info install-info-am install-man install-man1 \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-compile mostlyclean-generic mostlyclean-libtool \ pdf pdf-am ps ps-am tags tags-am uninstall uninstall-am \ uninstall-binPROGRAMS uninstall-man uninstall-man1 # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/clients/credentials/PaxHeaders.30264/arcproxy.h0000644000000000000000000000013214152153376024111 xustar000000000000000030 mtime=1638455038.339644982 30 atime=1638455038.473646996 30 ctime=1638455101.051587258 nordugrid-arc-6.14.0/src/clients/credentials/arcproxy.h0000644000175000002070000000325014152153376024076 0ustar00mockbuildmock00000000000000 // Functions in arcproxy_proxy.cpp // Create simple temporary proxy void create_tmp_proxy(std::string& proxy, Arc::Credential& signer); // Create proxy with all bells and whistles as specified in arguments void create_proxy(std::string& proxy, Arc::Credential& signer, const std::string& proxy_policy, const Arc::Time& proxy_start, const Arc::Period& proxy_period, const std::string& vomsacseq, int keybits, const std::string& signing_algorithm); // Store content of proxy void write_proxy_file(const std::string& path, const std::string& content); // Delete proxy file void remove_proxy_file(const std::string& path); // Delete certificate file void remove_cert_file(const std::string& path); // Functions in arcproxy_voms.cpp // Create simple temporary proxy // Collect VOMS AC from configured Voms servers bool contact_voms_servers(std::map >& vomscmdlist, std::list& orderlist, std::string& vomses_path, bool use_gsi_comm, bool use_http_comm, const std::string& voms_period, Arc::UserConfig& usercfg, Arc::Logger& logger, const std::string& tmp_proxy_path, std::string& vomsacseq); // Functions in arcproxy_myproxy.cpp // Communicate with MyProxy server bool contact_myproxy_server(const std::string& myproxy_server, const std::string& myproxy_command, const std::string& myproxy_user_name, bool use_empty_passphrase, const std::string& myproxy_period, const std::string& retrievable_by_cert, Arc::Time& proxy_start, Arc::Period& proxy_period, std::list& vomslist, std::string& vomses_path, const std::string& proxy_path, Arc::UserConfig& usercfg, Arc::Logger& logger); nordugrid-arc-6.14.0/src/clients/credentials/PaxHeaders.30264/arcproxy.cpp0000644000000000000000000000013214152153376024444 xustar000000000000000030 mtime=1638455038.339644982 30 atime=1638455038.473646996 30 ctime=1638455101.047587198 nordugrid-arc-6.14.0/src/clients/credentials/arcproxy.cpp0000644000175000002070000017016714152153376024445 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #ifdef HAVE_NSS #include #endif #include "arcproxy.h" using namespace ArcCredential; #ifdef HAVE_NSS static void get_default_nssdb_path(std::vector& nss_paths) { const Arc::User user; // The profiles.ini could exist under firefox, seamonkey and thunderbird std::vector profiles_homes; std::string home_path = user.Home(); std::string profiles_home; #if defined(_MACOSX) profiles_home = home_path + G_DIR_SEPARATOR_S "Library" G_DIR_SEPARATOR_S "Application Support" G_DIR_SEPARATOR_S "Firefox"; profiles_homes.push_back(profiles_home); profiles_home = home_path + G_DIR_SEPARATOR_S "Library" G_DIR_SEPARATOR_S "Application Support" G_DIR_SEPARATOR_S "SeaMonkey"; profiles_homes.push_back(profiles_home); profiles_home = home_path + G_DIR_SEPARATOR_S "Library" G_DIR_SEPARATOR_S "Thunderbird"; profiles_homes.push_back(profiles_home); #else //Linux profiles_home = home_path + G_DIR_SEPARATOR_S ".mozilla" G_DIR_SEPARATOR_S "firefox"; profiles_homes.push_back(profiles_home); profiles_home = home_path + G_DIR_SEPARATOR_S ".mozilla" G_DIR_SEPARATOR_S "seamonkey"; profiles_homes.push_back(profiles_home); profiles_home = home_path + G_DIR_SEPARATOR_S ".thunderbird"; profiles_homes.push_back(profiles_home); #endif std::vector pf_homes; // Remove the unreachable directories for(int i=0; i ini_home; // Remove the unreachable "profiles.ini" files for(int i=0; i::iterator it; for(it = ini_home.begin(); it != ini_home.end(); ++it) { std::string pf_ini = (*it).first; std::string pf_home = (*it).second; std::string profiles; std::ifstream in_f(pf_ini.c_str()); std::getline(in_f, profiles, '\0'); std::list lines; Arc::tokenize(profiles, lines, "\n"); // Parse each [Profile] for (std::list::iterator i = lines.begin(); i != lines.end(); ++i) { std::vector inivalue; Arc::tokenize(*i, inivalue, "="); if((inivalue[0].find("Profile") != std::string::npos) && (inivalue[0].find("StartWithLast") == std::string::npos)) { bool is_relative = false; std::string path; std::advance(i, 1); for(; i != lines.end();) { inivalue.clear(); Arc::tokenize(*i, inivalue, "="); if (inivalue.size() == 2) { if (inivalue[0] == "IsRelative") { if(inivalue[1] == "1") is_relative = true; else is_relative = false; } if (inivalue[0] == "Path") path = inivalue[1]; } if(inivalue[0].find("Profile") != std::string::npos) { --i; break; } std::advance(i, 1); } std::string nss_path; if(is_relative) nss_path = pf_home + G_DIR_SEPARATOR_S + path; else nss_path = path; struct stat st; if((::stat(nss_path.c_str(),&st) == 0) && (S_ISDIR(st.st_mode)) && (user.get_uid() == st.st_uid)) nss_paths.push_back(nss_path); if(i == lines.end()) break; } } } return; } static void get_nss_certname(std::string& certname, Arc::Logger& logger) { std::list certInfolist; ArcAuthNSS::nssListUserCertificatesInfo(certInfolist); if(certInfolist.size()) { std::cout<::iterator it; for(it = certInfolist.begin(); it != certInfolist.end(); ++it) { ArcAuthNSS::certInfo cert_info = (*it); std::string sub_dn = cert_info.subject_dn; std::string cn_name; std::string::size_type pos1, pos2; pos1 = sub_dn.find("CN="); if(pos1 != std::string::npos) { pos2 = sub_dn.find(",", pos1); if(pos2 != std::string::npos) cn_name = " ("+sub_dn.substr(pos1+3, pos2-pos1-3) + ")"; } std::cout< cert_info.end) msg = "(expired)"; else if((now + 300) > cert_info.end) msg = "(will be expired in 5 min)"; else if((now + 3600*24) > cert_info.end) { Arc::Period left(cert_info.end - now); msg = std::string("(will be expired in ") + std::string(left) + ")"; } std::cout<1)) { char c = getchar(); int num = c - '0'; if((num<=certInfolist.size()) && (num>=1)) { it = certInfolist.begin(); std::advance(it, num-1); certname = (*it).certname; break; } } } #endif static std::string signTypeToString(Arc::Signalgorithm alg) { switch(alg) { case Arc::SIGN_SHA1: return "sha1"; case Arc::SIGN_SHA224: return "sha224"; case Arc::SIGN_SHA256: return "sha256"; case Arc::SIGN_SHA384: return "sha384"; case Arc::SIGN_SHA512: return "sha512"; default: break; } return "unknown"; } typedef enum { pass_all, pass_private_key, pass_myproxy, pass_myproxy_new, pass_nss } pass_destination_type; std::map passsources; class PasswordSourceFile: public Arc::PasswordSource { private: std::ifstream file_; public: PasswordSourceFile(const std::string& filename):file_(filename.c_str()) { }; virtual Result Get(std::string& password, int minsize, int maxsize) { if(!file_) return Arc::PasswordSource::NO_PASSWORD; std::getline(file_, password); return Arc::PasswordSource::PASSWORD; }; }; static int runmain(int argc, char *argv[]) { setlocale(LC_ALL, ""); Arc::Logger logger(Arc::Logger::getRootLogger(), "arcproxy"); Arc::LogStream logcerr(std::cerr); logcerr.setFormat(Arc::ShortFormat); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::WARNING); Arc::ArcLocation::Init(argv[0]); Arc::OptionParser options(" ", istring("The arcproxy command creates a proxy from a key/certificate pair which can\n" "then be used to access grid resources."), istring("Supported constraints are:\n" " validityStart=time (e.g. 2008-05-29T10:20:30Z; if not specified, start from now)\n" " validityEnd=time\n" " validityPeriod=time (e.g. 43200 or 12h or 12H; if both validityPeriod and validityEnd\n" " not specified, the default is 12 hours for local proxy, and 168 hours for delegated\n" " proxy on myproxy server)\n" " vomsACvalidityPeriod=time (e.g. 43200 or 12h or 12H; if not specified, the default\n" " is the minimum value of 12 hours and validityPeriod)\n" " myproxyvalidityPeriod=time (lifetime of proxies delegated by myproxy server,\n" " e.g. 43200 or 12h or 12H; if not specified, the default is the minimum value of\n" " 12 hours and validityPeriod (which is lifetime of the delegated proxy on myproxy server))\n" " proxyPolicy=policy content\n" " proxyPolicyFile=policy file\n" " keybits=number - length of the key to generate. Default is 2048 bits.\n" " Special value 'inherit' is to use key length of signing certificate.\n" " signingAlgorithm=name - signing algorithm to use for signing public key of proxy.\n" " Possible values are sha1, sha2 (alias for sha256), sha224, sha256, sha384, sha512\n" " and inherit (use algorithm of signing certificate). Default is inherit.\n" " With old systems, only sha1 is acceptable.\n" "\n" "Supported information item names are:\n" " subject - subject name of proxy certificate.\n" " identity - identity subject name of proxy certificate.\n" " issuer - issuer subject name of proxy certificate.\n" " ca - subject name of CA which issued initial certificate.\n" " path - file system path to file containing proxy.\n" " type - type of proxy certificate.\n" " validityStart - timestamp when proxy validity starts.\n" " validityEnd - timestamp when proxy validity ends.\n" " validityPeriod - duration of proxy validity in seconds.\n" " validityLeft - duration of proxy validity left in seconds.\n" " vomsVO - VO name represented by VOMS attribute\n" " vomsSubject - subject of certificate for which VOMS attribute is issued\n" " vomsIssuer - subject of service which issued VOMS certificate\n" " vomsACvalidityStart - timestamp when VOMS attribute validity starts.\n" " vomsACvalidityEnd - timestamp when VOMS attribute validity ends.\n" " vomsACvalidityPeriod - duration of VOMS attribute validity in seconds.\n" " vomsACvalidityLeft - duration of VOMS attribute validity left in seconds.\n" " proxyPolicy\n" " keybits - size of proxy certificate key in bits.\n" " signingAlgorithm - algorithm used to sign proxy certificate.\n" "Items are printed in requested order and are separated by newline.\n" "If item has multiple values they are printed in same line separated by |.\n" "\n" "Supported password destinations are:\n" " key - for reading private key\n" " myproxy - for accessing credentials at MyProxy service\n" " myproxynew - for creating credentials at MyProxy service\n" " all - for any purspose.\n" "\n" "Supported password sources are:\n" " quoted string (\"password\") - explicitly specified password\n" " int - interactively request password from console\n" " stdin - read password from standard input delimited by newline\n" " file:filename - read password from file named filename\n" " stream:# - read password from input stream number #.\n" " Currently only 0 (standard input) is supported.\n" )); std::string proxy_path; options.AddOption('P', "proxy", istring("path to the proxy file"), istring("path"), proxy_path); std::string cert_path; options.AddOption('C', "cert", istring("path to the certificate file, it can be either PEM, DER, or PKCS12 formatted"), istring("path"), cert_path); std::string key_path; options.AddOption('K', "key", istring("path to the private key file, if the certificate is in PKCS12 format, then no need to give private key"), istring("path"), key_path); std::string ca_dir; options.AddOption('T', "cadir", istring("path to the trusted certificate directory, only needed for the VOMS client functionality"), istring("path"), ca_dir); std::string voms_dir; options.AddOption('s', "vomsdir", istring("path to the top directory of VOMS *.lsc files, only needed for the VOMS client functionality"), istring("path"), voms_dir); std::string vomses_path; options.AddOption('V', "vomses", istring("path to the VOMS server configuration file"), istring("path"), vomses_path); std::list vomslist; options.AddOption('S', "voms", istring("voms<:command>. Specify VOMS server (More than one VOMS server \n" " can be specified like this: --voms VOa:command1 --voms VOb:command2). \n" " :command is optional, and is used to ask for specific attributes(e.g: roles)\n" " command options are:\n" " all --- put all of this DN's attributes into AC; \n" " list ---list all of the DN's attribute, will not create AC extension; \n" " /Role=yourRole --- specify the role, if this DN \n" " has such a role, the role will be put into AC; \n" " /voname/groupname/Role=yourRole --- specify the VO, group and role; if this DN \n" " has such a role, the role will be put into AC. \n" " If this option is not specified values from configuration files are used.\n" " To avoid anything to be used specify -S with empty value.\n" ), istring("string"), vomslist); std::list orderlist; options.AddOption('o', "order", istring("group<:role>. Specify ordering of attributes \n" " Example: --order /knowarc.eu/coredev:Developer,/knowarc.eu/testers:Tester \n" " or: --order /knowarc.eu/coredev:Developer --order /knowarc.eu/testers:Tester \n" " Note that it does not make sense to specify the order if you have two or more different VOMS servers specified"), istring("string"), orderlist); bool use_gsi_comm = false; options.AddOption('G', "gsicom", istring("use GSI communication protocol for contacting VOMS services"), use_gsi_comm); bool use_http_comm = false; options.AddOption('H', "httpcom", istring("use HTTP communication protocol for contacting VOMS services that provide RESTful access \n" " Note for RESTful access, \'list\' command and multiple VOMS server are not supported\n"), use_http_comm); bool use_old_comm = false; options.AddOption('B', "oldcom", istring("use old communication protocol for contacting VOMS services instead of RESTful access\n"), use_old_comm); bool use_gsi_proxy = false; options.AddOption('O', "old", istring("this option is not functional (old GSI proxies are not supported anymore)"), use_gsi_proxy); bool info = false; options.AddOption('I', "info", istring("print all information about this proxy."), info); std::list infoitemlist; options.AddOption('i', "infoitem", istring("print selected information about this proxy."), istring("string"), infoitemlist); bool remove_proxy = false; options.AddOption('r', "remove", istring("remove proxy"), remove_proxy); std::string user_name; //user name to MyProxy server options.AddOption('U', "user", istring("username to MyProxy server (if missing subject of user certificate is used)"), istring("string"), user_name); bool use_empty_passphrase = false; //if use empty passphrase to myproxy server options.AddOption('N', "nopassphrase", istring( "don't prompt for a credential passphrase, when retrieve a \n" " credential from on MyProxy server. \n" " The precondition of this choice is the credential is PUT onto\n" " the MyProxy server without a passphrase by using -R (--retrievable_by_cert) \n" " option when being PUTing onto Myproxy server. \n" " This option is specific for the GET command when contacting Myproxy server." ), use_empty_passphrase); std::string retrievable_by_cert; //if use empty passphrase to myproxy server options.AddOption('R', "retrievable_by_cert", istring( "Allow specified entity to retrieve credential without passphrase.\n" " This option is specific for the PUT command when contacting Myproxy server." ), istring("string"), retrievable_by_cert); std::string myproxy_server; //url of MyProxy server options.AddOption('L', "myproxysrv", istring("hostname[:port] of MyProxy server"), istring("string"), myproxy_server); std::string myproxy_command; //command to myproxy server options.AddOption('M', "myproxycmd", istring( "command to MyProxy server. The command can be PUT, GET, INFO, NEWPASS or DESTROY.\n" " PUT -- put a delegated credentials to the MyProxy server; \n" " GET -- get a delegated credentials from the MyProxy server; \n" " INFO -- get and present information about credentials stored at the MyProxy server; \n" " NEWPASS -- change password protecting credentials stored at the MyProxy server; \n" " DESTROY -- wipe off credentials stored at the MyProxy server; \n" " Local credentials (certificate and key) are not necessary except in case of PUT. \n" " MyProxy functionality can be used together with VOMS functionality.\n" " --voms and --vomses can be used for Get command if VOMS attributes\n" " is required to be included in the proxy.\n" ), istring("string"), myproxy_command); bool use_nssdb = false; #ifdef HAVE_NSS options.AddOption('F', "nssdb", istring("use NSS credential database in default Mozilla profiles, \n" " including Firefox, Seamonkey and Thunderbird.\n"), use_nssdb); #endif std::list constraintlist; options.AddOption('c', "constraint", istring("proxy constraints"), istring("string"), constraintlist); std::list passsourcelist; options.AddOption('p', "passwordsource", istring("password destination=password source"), istring("string"), passsourcelist); int timeout = -1; options.AddOption('t', "timeout", istring("timeout in seconds (default 20)"), istring("seconds"), timeout); std::string conffile; options.AddOption('z', "conffile", istring("configuration file (default ~/.arc/client.conf)"), istring("filename"), conffile); std::string debug; options.AddOption('d', "debug", istring("FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG"), istring("debuglevel"), debug); bool version = false; options.AddOption('v', "version", istring("print version information"), version); std::list params = options.Parse(argc, argv); if(use_http_comm && use_old_comm) { logger.msg(Arc::ERROR, "RESTful and old VOMS communication protocols can't be requested simultaneously."); return EXIT_FAILURE; } if (version) { std::cout << Arc::IString("%s version %s", "arcproxy", VERSION) << std::endl; return EXIT_SUCCESS; } // If debug is specified as argument, it should be set before loading the configuration. if (!debug.empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(debug)); logger.msg(Arc::VERBOSE, "Running command: %s", options.GetCommandWithArguments()); // This ensure command line args overwrite all other options if(!cert_path.empty())Arc::SetEnv("X509_USER_CERT", cert_path); if(!key_path.empty())Arc::SetEnv("X509_USER_KEY", key_path); if(!proxy_path.empty())Arc::SetEnv("X509_USER_PROXY", proxy_path); if(!ca_dir.empty())Arc::SetEnv("X509_CERT_DIR", ca_dir); // Set default, predefined or guessed credentials. Also check if they exist. #ifdef HAVE_NSS Arc::UserConfig usercfg(conffile, Arc::initializeCredentialsType(Arc::initializeCredentialsType::TryCredentials)); #else Arc::UserConfig usercfg(conffile, Arc::initializeCredentialsType(Arc::initializeCredentialsType::TryCredentials)); #endif if (!usercfg) { logger.msg(Arc::ERROR, "Failed configuration initialization."); return EXIT_FAILURE; } if(use_nssdb) { usercfg.CertificatePath("");; usercfg.KeyPath("");; } if(vomslist.empty()) { vomslist = usercfg.DefaultVOMSes(); } for(std::list::iterator voms = vomslist.begin(); voms != vomslist.end();) { if(voms->empty()) { voms = vomslist.erase(voms); } else { ++voms; } } // Check for needed credentials objects // Can proxy be used for? Could not find it in documentation. // Key and certificate not needed if only printing proxy information if ( (!(Arc::lower(myproxy_command) == "get")) && (!use_nssdb) ) { if((usercfg.CertificatePath().empty() || ( usercfg.KeyPath().empty() && (usercfg.CertificatePath().find(".p12") == std::string::npos) ) ) && !(info || (infoitemlist.size() > 0) || remove_proxy)) { logger.msg(Arc::ERROR, "Failed to find certificate and/or private key or files have improper permissions or ownership."); logger.msg(Arc::ERROR, "You may try to increase verbosity to get more information."); return EXIT_FAILURE; } } if(!vomslist.empty() || !myproxy_command.empty()) { // For external communication CAs are needed if(usercfg.CACertificatesDirectory().empty()) { logger.msg(Arc::ERROR, "Failed to find CA certificates"); logger.msg(Arc::ERROR, "Cannot find the CA certificates directory path, " "please set environment variable X509_CERT_DIR, " "or cacertificatesdirectory in a configuration file."); logger.msg(Arc::ERROR, "You may try to increase verbosity to get more information."); logger.msg(Arc::ERROR, "The CA certificates directory is required for " "contacting VOMS and MyProxy servers."); return EXIT_FAILURE; } } // Convert list of voms+command into more convenient structure std::map > vomscmdlist; if (!vomslist.empty()) { if (vomses_path.empty()) vomses_path = usercfg.VOMSESPath(); if (vomses_path.empty()) { logger.msg(Arc::ERROR, "$X509_VOMS_FILE, and $X509_VOMSES are not set;\n" "User has not specified the location for vomses information;\n" "There is also not vomses location information in user's configuration file;\n" "Can not find vomses in default locations: ~/.arc/vomses, ~/.voms/vomses,\n" "$ARC_LOCATION/etc/vomses, $ARC_LOCATION/etc/grid-security/vomses, $PWD/vomses,\n" "/etc/vomses, /etc/grid-security/vomses, and the location at the corresponding sub-directory"); return false; } for(std::list::iterator v = vomslist.begin(); v != vomslist.end(); ++v) { std::string::size_type p = v->find(':'); if(p == std::string::npos) { vomscmdlist[*v].push_back(""); } else { vomscmdlist[v->substr(0,p)].push_back(v->substr(p+1)); *v = v->substr(0,p); } } // Remove duplicates vomslist.sort(); vomslist.unique(); } // Proxy is special case. We either need default or predefined path. // No guessing or testing is needed. // By running credentials initialization once more all set values // won't change. But proxy will get default value if not set. { Arc::UserConfig tmpcfg(conffile, Arc::initializeCredentialsType(Arc::initializeCredentialsType::NotTryCredentials)); if(proxy_path.empty()) proxy_path = tmpcfg.ProxyPath(); usercfg.ProxyPath(proxy_path); } // Get back all paths if(key_path.empty()) key_path = usercfg.KeyPath(); if(cert_path.empty()) cert_path = usercfg.CertificatePath(); if(ca_dir.empty()) ca_dir = usercfg.CACertificatesDirectory(); if(voms_dir.empty()) voms_dir = Arc::GetEnv("X509_VOMS_DIR"); if (debug.empty() && !usercfg.Verbosity().empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(usercfg.Verbosity())); if (timeout > 0) usercfg.Timeout(timeout); Arc::User user; if (!params.empty()) { logger.msg(Arc::ERROR, "Wrong number of arguments!"); return EXIT_FAILURE; } const Arc::Time now; if (remove_proxy) { if (proxy_path.empty()) { logger.msg(Arc::ERROR, "Cannot find the path of the proxy file, " "please setup environment X509_USER_PROXY, " "or proxypath in a configuration file"); return EXIT_FAILURE; } if(!Arc::FileDelete(proxy_path)) { if(errno != ENOENT) { logger.msg(Arc::ERROR, "Cannot remove proxy file at %s", proxy_path); } else { logger.msg(Arc::ERROR, "Cannot remove proxy file at %s, because it's not there", proxy_path); } return EXIT_FAILURE; } return EXIT_SUCCESS; } if (info) { if(!usercfg.OToken().empty()) { std::cout << Arc::IString("Bearer token is available. It is preferred for job submission.") << std::endl; } std::vector voms_attributes; bool res = false; if (proxy_path.empty()) { logger.msg(Arc::ERROR, "Cannot find the path of the proxy file, " "please setup environment X509_USER_PROXY, " "or proxypath in a configuration file"); return EXIT_FAILURE; } else if (!(Glib::file_test(proxy_path, Glib::FILE_TEST_EXISTS))) { logger.msg(Arc::ERROR, "Cannot find file at %s for getting the proxy. " "Please make sure this file exists.", proxy_path); return EXIT_FAILURE; } Arc::Credential holder(proxy_path, "", "", ""); if(!holder.GetCert()) { logger.msg(Arc::ERROR, "Cannot process proxy file at %s.", proxy_path); return EXIT_FAILURE; } std::cout << Arc::IString("Subject: %s", holder.GetDN()) << std::endl; std::cout << Arc::IString("Issuer: %s", holder.GetIssuerName()) << std::endl; std::cout << Arc::IString("Identity: %s", holder.GetIdentityName()) << std::endl; if (holder.GetEndTime() < now) std::cout << Arc::IString("Time left for proxy: Proxy expired") << std::endl; else if (now < holder.GetStartTime()) std::cout << Arc::IString("Time left for proxy: Proxy not valid yet") << std::endl; else std::cout << Arc::IString("Time left for proxy: %s", (holder.GetEndTime() - now).istr()) << std::endl; std::cout << Arc::IString("Proxy path: %s", proxy_path) << std::endl; std::cout << Arc::IString("Proxy type: %s", certTypeToString(holder.GetType())) << std::endl; std::cout << Arc::IString("Proxy key length: %i", holder.GetKeybits()) << std::endl; std::cout << Arc::IString("Proxy signature: %s", signTypeToString(holder.GetSigningAlgorithm())) << std::endl; Arc::VOMSTrustList voms_trust_dn; voms_trust_dn.AddRegex(".*"); res = parseVOMSAC(holder, ca_dir, "", voms_dir, voms_trust_dn, voms_attributes, true, true); // Not printing error message because parseVOMSAC will print everything itself //if (!res) logger.msg(Arc::ERROR, "VOMS attribute parsing failed"); for(int n = 0; n 0) { std::cout<<"====== "< attr_elements; Arc::tokenize(attr, attr_elements, "/"); // remove /voname= prefix if ( ! attr_elements.empty() ) attr_elements.pop_front(); if ( attr_elements.empty() ) { logger.msg(Arc::WARNING, "Malformed VOMS AC attribute %s", attr); continue; } // policyAuthority (URI) and AC tags if ( attr_elements.size() == 1 ) { std::string uri = attr_elements.front().substr(9); std::cout << "uri : " << uri < voms_attributes[n].till) { std::cout << Arc::IString("Time left for AC: AC has expired")< 0) { if (proxy_path.empty()) { logger.msg(Arc::ERROR, "Cannot find the path of the proxy file, " "please setup environment X509_USER_PROXY, " "or proxypath in a configuration file"); return EXIT_FAILURE; } else if (!(Glib::file_test(proxy_path, Glib::FILE_TEST_EXISTS))) { logger.msg(Arc::ERROR, "Cannot find file at %s for getting the proxy. " "Please make sure this file exists.", proxy_path); return EXIT_FAILURE; } Arc::Credential holder(proxy_path, "", "", ""); if(!holder.GetCert()) { logger.msg(Arc::ERROR, "Cannot process proxy file at %s.", proxy_path); return EXIT_FAILURE; } Arc::VOMSTrustList voms_trust_dn; voms_trust_dn.AddRegex(".*"); std::vector voms_attributes; parseVOMSAC(holder, ca_dir, "", voms_dir, voms_trust_dn, voms_attributes, true, true); bool unknownInfo = false; for(std::list::iterator ii = infoitemlist.begin(); ii != infoitemlist.end(); ++ii) { if(*ii == "subject") { std::cout << holder.GetDN() << std::endl; } else if(*ii == "identity") { std::cout << holder.GetIdentityName() << std::endl; } else if(*ii == "issuer") { std::cout << holder.GetIssuerName() << std::endl; } else if(*ii == "ca") { std::cout << holder.GetCAName() << std::endl; } else if(*ii == "path") { std::cout << proxy_path << std::endl; } else if(*ii == "type") { std::cout << certTypeToString(holder.GetType()) << std::endl; // todo:less human readable } else if(*ii == "validityStart") { std::cout << holder.GetStartTime().GetTime() << std::endl; } else if(*ii == "validityEnd") { std::cout << holder.GetEndTime().GetTime() << std::endl; } else if(*ii == "validityPeriod") { std::cout << (holder.GetEndTime() - holder.GetStartTime()).GetPeriod() << std::endl; } else if(*ii == "validityLeft") { std::cout << ((nownow)?(voms_attributes[n].till-now):Arc::Period(0)).GetPeriod(); } std::cout << std::endl; } else if(*ii == "proxyPolicy") { std::cout << holder.GetProxyPolicy() << std::endl; } else if(*ii == "keybits") { std::cout << holder.GetKeybits() << std::endl; } else if(*ii == "signingAlgorithm") { std::cout << signTypeToString(holder.GetSigningAlgorithm()) << std::endl; } else { logger.msg(Arc::ERROR, "Information item '%s' is not known",*ii); unknownInfo = true; } } if (unknownInfo) return EXIT_FAILURE; return EXIT_SUCCESS; } if ((cert_path.empty() || key_path.empty()) && (Arc::lower(myproxy_command) == "put")) { if (cert_path.empty()) logger.msg(Arc::ERROR, "Cannot find the user certificate path, " "please setup environment X509_USER_CERT, " "or certificatepath in a configuration file"); if (key_path.empty()) logger.msg(Arc::ERROR, "Cannot find the user private key path, " "please setup environment X509_USER_KEY, " "or keypath in a configuration file"); return EXIT_FAILURE; } std::map constraints; for (std::list::iterator it = constraintlist.begin(); it != constraintlist.end(); ++it) { std::string::size_type pos = it->find('='); if (pos != std::string::npos) constraints[it->substr(0, pos)] = it->substr(pos + 1); else constraints[*it] = ""; } std::map > passprompts; passprompts[pass_private_key] = std::pair("private key",false); passprompts[pass_myproxy] = std::pair("MyProxy server",false); passprompts[pass_myproxy_new] = std::pair("MyProxy server (new)",true); for (std::list::iterator it = passsourcelist.begin(); it != passsourcelist.end(); ++it) { std::string::size_type pos = it->find('='); if (pos == std::string::npos) { logger.msg(Arc::ERROR, "Cannot parse password source expression %s " "it must be of type=source format", *it); return EXIT_FAILURE; } std::string dest = it->substr(0, pos); pass_destination_type pass_dest; if(dest == "key") { pass_dest = pass_private_key; } else if(dest == "myproxy") { pass_dest = pass_myproxy; } else if(dest == "myproxynew") { pass_dest = pass_myproxy_new; } else if(dest == "nss") { pass_dest = pass_nss; } else if(dest == "all") { pass_dest = pass_all; } else { logger.msg(Arc::ERROR, "Cannot parse password type %s. " "Currently supported values are 'key','myproxy','myproxynew' and 'all'.", dest); return EXIT_FAILURE; } Arc::PasswordSource* pass_source; std::string pass = it->substr(pos + 1); if((pass[0] == '"') && (pass[pass.length()-1] == '"')) { pass_source = new Arc::PasswordSourceString(pass.substr(1,pass.length()-2)); } else if(pass == "int") { pass_source = new Arc::PasswordSourceInteractive(passprompts[pass_private_key].first,passprompts[pass_private_key].second); } else if(pass == "stdin") { pass_source = new Arc::PasswordSourceStream(&std::cin); } else { pos = pass.find(':'); if(pos == std::string::npos) { logger.msg(Arc::ERROR, "Cannot parse password source %s " "it must be of source_type or source_type:data format. " "Supported source types are int,stdin,stream,file.", pass); return EXIT_FAILURE; } std::string data = pass.substr(pos + 1); pass.resize(pos); if(pass == "file") { pass_source = new PasswordSourceFile(data); // TODO: combine same files } else if(pass == "stream") { if(data == "0") { pass_source = new Arc::PasswordSourceStream(&std::cin); } else { logger.msg(Arc::ERROR, "Only standard input is currently supported " "for password source."); return EXIT_FAILURE; } } else { logger.msg(Arc::ERROR, "Cannot parse password source type %s. " "Supported source types are int,stdin,stream,file.", pass); return EXIT_FAILURE; } } if(pass_source) { if(pass_dest != pass_all) { passsources[pass_dest] = pass_source; } else { passsources[pass_private_key] = pass_source; passsources[pass_myproxy] = pass_source; passsources[pass_myproxy_new] = pass_source; passsources[pass_nss] = pass_source; } } } for(std::map >::iterator p = passprompts.begin(); p != passprompts.end();++p) { if(passsources.find(p->first) == passsources.end()) { passsources[p->first] = new Arc::PasswordSourceInteractive(p->second.first,p->second.second); } } //proxy validity period //Set the default proxy validity lifetime to 12 hours if there is //no validity lifetime provided by command caller // Set default values first // TODO: Is default validityPeriod since now or since validityStart? Arc::Time validityStart = now; // now by default Arc::Period validityPeriod(12*60*60); if (Arc::lower(myproxy_command) == "put") { //For myproxy PUT operation, the proxy should be 7 days according to the default //definition in myproxy implementation. validityPeriod = 7*24*60*60; } // Acquire constraints. Check for valid values and conflicts. if((!constraints["validityStart"].empty()) && (!constraints["validityEnd"].empty()) && (!constraints["validityPeriod"].empty())) { std::cerr << Arc::IString("The start, end and period can't be set simultaneously") << std::endl; return EXIT_FAILURE; } if(!constraints["validityStart"].empty()) { validityStart = Arc::Time(constraints["validityStart"]); if (validityStart == Arc::Time(Arc::Time::UNDEFINED)) { std::cerr << Arc::IString("The start time that you set: %s can't be recognized.", (std::string)constraints["validityStart"]) << std::endl; return EXIT_FAILURE; } } if(!constraints["validityPeriod"].empty()) { validityPeriod = Arc::Period(constraints["validityPeriod"]); if (validityPeriod.GetPeriod() <= 0) { std::cerr << Arc::IString("The period that you set: %s can't be recognized.", (std::string)constraints["validityPeriod"]) << std::endl; return EXIT_FAILURE; } } if(!constraints["validityEnd"].empty()) { Arc::Time validityEnd = Arc::Time(constraints["validityEnd"]); if (validityEnd == Arc::Time(Arc::Time::UNDEFINED)) { std::cerr << Arc::IString("The end time that you set: %s can't be recognized.", (std::string)constraints["validityEnd"]) << std::endl; return EXIT_FAILURE; } if(!constraints["validityPeriod"].empty()) { // If period is explicitly set then start is derived from end and period validityStart = validityEnd - validityPeriod; } else { // otherwise start - optionally - and end are set, period is derived if(validityEnd < validityStart) { std::cerr << Arc::IString("The end time that you set: %s is before start time: %s.", (std::string)validityEnd,(std::string)validityStart) << std::endl; // error return EXIT_FAILURE; } validityPeriod = validityEnd - validityStart; } } // Here we have validityStart and validityPeriod defined Arc::Time validityEnd = validityStart + validityPeriod; // Warn user about strange times but do not prevent user from doing anything legal if(validityStart < now) { std::cout << Arc::IString("WARNING: The start time that you set: %s is before current time: %s", (std::string)validityStart, (std::string)now) << std::endl; } if(validityEnd < now) { std::cout << Arc::IString("WARNING: The end time that you set: %s is before current time: %s", (std::string)validityEnd, (std::string)now) << std::endl; } //voms AC valitity period //Set the default voms AC validity lifetime to 12 hours if there is //no validity lifetime provided by command caller Arc::Period vomsACvalidityPeriod(12*60*60); if(!constraints["vomsACvalidityPeriod"].empty()) { vomsACvalidityPeriod = Arc::Period(constraints["vomsACvalidityPeriod"]); if (vomsACvalidityPeriod.GetPeriod() == 0) { std::cerr << Arc::IString("The VOMS AC period that you set: %s can't be recognized.", (std::string)constraints["vomsACvalidityPeriod"]) << std::endl; return EXIT_FAILURE; } } else { if(validityPeriod < vomsACvalidityPeriod) vomsACvalidityPeriod = validityPeriod; // It is strange that VOMS AC may be valid less than proxy itself. // Maybe it would be more correct to have it valid by default from // now till validityEnd. } std::string voms_period = Arc::tostring(vomsACvalidityPeriod.GetPeriod()); //myproxy validity period. //Set the default myproxy validity lifetime to 12 hours if there is //no validity lifetime provided by command caller Arc::Period myproxyvalidityPeriod(12*60*60); if(!constraints["myproxyvalidityPeriod"].empty()) { myproxyvalidityPeriod = Arc::Period(constraints["myproxyvalidityPeriod"]); if (myproxyvalidityPeriod.GetPeriod() == 0) { std::cerr << Arc::IString("The MyProxy period that you set: %s can't be recognized.", (std::string)constraints["myproxyvalidityPeriod"]) << std::endl; return EXIT_FAILURE; } } else { if(validityPeriod < myproxyvalidityPeriod) myproxyvalidityPeriod = validityPeriod; // see vomsACvalidityPeriod } std::string myproxy_period = Arc::tostring(myproxyvalidityPeriod.GetPeriod()); std::string signing_algorithm = constraints["signingAlgorithm"]; int keybits = 0; if(!constraints["keybits"].empty()) { if(constraints["keybits"] == "inherit") { keybits = -1; } else if((!Arc::stringto(constraints["keybits"],keybits)) || (keybits <= 0)) { std::cerr << Arc::IString("The keybits constraint is wrong: %s.", (std::string)constraints["keybits"]) << std::endl; return EXIT_FAILURE; } } #ifdef HAVE_NSS // TODO: move to spearate file //Using nss db dominate other option if(use_nssdb) { // Get the nss db paths from firefox's profile.ini file std::vector nssdb_paths; get_default_nssdb_path(nssdb_paths); if(nssdb_paths.empty()) { std::cout << Arc::IString("The NSS database can not be detected in the Firefox profile") << std::endl; return EXIT_FAILURE; } // Let user to choose which profile to use // if multiple profiles exist bool res; std::string configdir; if(nssdb_paths.size() > 1) { std::cout<=1)) { configdir = nssdb_paths[num-1]; break; } } } else { configdir = nssdb_paths[0]; } res = ArcAuthNSS::nssInit(configdir); std::cout<< Arc::IString("NSS database to be accessed: %s\n", configdir.c_str()); //The nss db under firefox profile seems to not be protected by any passphrase by default bool ascii = true; const char* trusts = "u,u,u"; // Generate CSR std::string proxy_csrfile = "proxy.csr"; std::string proxy_keyname = "proxykey"; std::string proxy_privk_str; res = ArcAuthNSS::nssGenerateCSR(proxy_keyname, "CN=Test,OU=ARC,O=EMI", *passsources[pass_nss], proxy_csrfile, proxy_privk_str, ascii); if(!res) return EXIT_FAILURE; // Create a temporary proxy and contact voms server std::string issuername; std::string vomsacseq; if (!vomslist.empty()) { std::string tmp_proxy_path; if(!Arc::TmpFileCreate(tmp_proxy_path,"")) return EXIT_FAILURE; get_nss_certname(issuername, logger); // Create tmp proxy cert int duration = 12; res = ArcAuthNSS::nssCreateCert(proxy_csrfile, issuername, NULL, duration, "", tmp_proxy_path, ascii); if(!res) { remove_proxy_file(tmp_proxy_path); return EXIT_FAILURE; } // TODO: Use FileUtils std::string tmp_proxy_cred_str; std::ifstream tmp_proxy_cert_s(tmp_proxy_path.c_str()); std::getline(tmp_proxy_cert_s, tmp_proxy_cred_str,'\0'); tmp_proxy_cert_s.close(); // Export EEC std::string cert_file; if(!Arc::TmpFileCreate(cert_file,"")) { remove_proxy_file(tmp_proxy_path); return EXIT_FAILURE; } res = ArcAuthNSS::nssExportCertificate(issuername, cert_file); if(!res) { remove_cert_file(cert_file); remove_proxy_file(tmp_proxy_path); return EXIT_FAILURE; } std::string eec_cert_str; std::ifstream eec_s(cert_file.c_str()); std::getline(eec_s, eec_cert_str,'\0'); eec_s.close(); remove_cert_file(cert_file); // Compose tmp proxy file tmp_proxy_cred_str.append(proxy_privk_str).append(eec_cert_str); write_proxy_file(tmp_proxy_path, tmp_proxy_cred_str); if(!contact_voms_servers(vomscmdlist, orderlist, vomses_path, use_gsi_comm, use_http_comm || !use_old_comm, voms_period, usercfg, logger, tmp_proxy_path, vomsacseq)) { remove_proxy_file(tmp_proxy_path); return EXIT_FAILURE; } remove_proxy_file(tmp_proxy_path); } // Create proxy with VOMS AC std::string proxy_certfile = "myproxy.pem"; // Let user to choose which credential to use if(issuername.empty()) get_nss_certname(issuername, logger); std::cout< voms_attributes; bool r = parseVOMSAC(holder, ca_dir, "", voms_dir, voms_trust_dn, voms_attributes, true, true); if (!r) logger.msg(Arc::ERROR, "VOMS attribute parsing failed"); if(voms_attributes.size() == 0) { logger.msg(Arc::INFO, "Myproxy server did not return proxy with VOMS AC included"); std::string vomsacseq; contact_voms_servers(vomscmdlist, orderlist, vomses_path, use_gsi_comm, use_http_comm || !use_old_comm, voms_period, usercfg, logger, proxy_path, vomsacseq); if(!vomsacseq.empty()) { Arc::Credential signer(proxy_path, proxy_path, "", ""); std::string proxy_cert; create_proxy(proxy_cert, signer, policy, proxy_start, proxy_period, vomsacseq, keybits, signing_algorithm); write_proxy_file(proxy_path, proxy_cert); } } return EXIT_SUCCESS; } else return EXIT_FAILURE; } //Create proxy or voms proxy try { Arc::Credential signer(cert_path, key_path, "", "", *passsources[pass_private_key]); if (signer.GetIdentityName().empty()) { std::cerr << Arc::IString("Proxy generation failed: No valid certificate found.") << std::endl; return EXIT_FAILURE; } EVP_PKEY* pkey = signer.GetPrivKey(); if(!pkey) { std::cerr << Arc::IString("Proxy generation failed: No valid private key found.") << std::endl; return EXIT_FAILURE; } if(pkey) EVP_PKEY_free(pkey); std::cout << Arc::IString("Your identity: %s", signer.GetIdentityName()) << std::endl; if (now > signer.GetEndTime()) { std::cerr << Arc::IString("Proxy generation failed: Certificate has expired.") << std::endl; return EXIT_FAILURE; } else if (now < signer.GetStartTime()) { std::cerr << Arc::IString("Proxy generation failed: Certificate is not valid yet.") << std::endl; return EXIT_FAILURE; } std::string vomsacseq; if (!vomslist.empty()) { //Generate a temporary self-signed proxy certificate //to contact the voms server std::string tmp_proxy_path; std::string tmp_proxy; if(!Arc::TmpFileCreate(tmp_proxy_path,"")) { std::cerr << Arc::IString("Proxy generation failed: Failed to create temporary file.") << std::endl; return EXIT_FAILURE; } create_tmp_proxy(tmp_proxy, signer); write_proxy_file(tmp_proxy_path, tmp_proxy); if(!contact_voms_servers(vomscmdlist, orderlist, vomses_path, use_gsi_comm, use_http_comm || !use_old_comm, voms_period, usercfg, logger, tmp_proxy_path, vomsacseq)) { remove_proxy_file(tmp_proxy_path); std::cerr << Arc::IString("Proxy generation failed: Failed to retrieve VOMS information.") << std::endl; return EXIT_FAILURE; } remove_proxy_file(tmp_proxy_path); } std::string proxy_cert; create_proxy(proxy_cert, signer, policy, proxy_start, proxy_period, vomsacseq, keybits, signing_algorithm); //If myproxy command is "Put", then the proxy path is set to /tmp/myproxy-proxy.uid.pid if (Arc::lower(myproxy_command) == "put") proxy_path = Glib::build_filename(Glib::get_tmp_dir(), "myproxy-proxy." + Arc::tostring(user.get_uid()) + Arc::tostring((int)(getpid()))); write_proxy_file(proxy_path,proxy_cert); Arc::Credential proxy_cred(proxy_path, proxy_path, "", ""); Arc::Time left = proxy_cred.GetEndTime(); std::cout << Arc::IString("Proxy generation succeeded") << std::endl; std::cout << Arc::IString("Your proxy is valid until: %s", left.str(Arc::UserTime)) << std::endl; //return EXIT_SUCCESS; } catch (std::exception& err) { logger.msg(Arc::ERROR, err.what()); return EXIT_FAILURE; } //Delegate the former self-delegated credential to //myproxy server if (Arc::lower(myproxy_command) == "put") { bool res = contact_myproxy_server( myproxy_server, myproxy_command, user_name, use_empty_passphrase, myproxy_period, retrievable_by_cert, proxy_start, proxy_period, vomslist, vomses_path, proxy_path, usercfg, logger); if(res) return EXIT_SUCCESS; else return EXIT_FAILURE; } return EXIT_SUCCESS; } int main(int argc, char **argv) { int xr = runmain(argc,argv); _exit(xr); return 0; } nordugrid-arc-6.14.0/src/clients/credentials/PaxHeaders.30264/arcproxy_myproxy.cpp0000644000000000000000000000013214152153376026253 xustar000000000000000030 mtime=1638455038.339644982 30 atime=1638455038.473646996 30 ctime=1638455101.049587228 nordugrid-arc-6.14.0/src/clients/credentials/arcproxy_myproxy.cpp0000644000175000002070000003043314152153376026243 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #ifdef HAVE_NSS #include #endif #include "arcproxy.h" using namespace ArcCredential; typedef enum { pass_all, pass_private_key, pass_myproxy, pass_myproxy_new, pass_nss } pass_destination_type; extern std::map passsources; static std::string get_cert_dn(const std::string& cert_file) { std::string dn_str; Arc::Credential cert(cert_file, "", "", ""); dn_str = cert.GetIdentityName(); return dn_str; } bool contact_myproxy_server(const std::string& myproxy_server, const std::string& myproxy_command, const std::string& myproxy_user_name, bool use_empty_passphrase, const std::string& myproxy_period, const std::string& retrievable_by_cert, Arc::Time& proxy_start, Arc::Period& proxy_period, std::list& vomslist, std::string& vomses_path, const std::string& proxy_path, Arc::UserConfig& usercfg, Arc::Logger& logger) { std::string user_name = myproxy_user_name; std::string key_path, cert_path, ca_dir; key_path = usercfg.KeyPath(); cert_path = usercfg.CertificatePath(); ca_dir = usercfg.CACertificatesDirectory(); if(user_name.empty()) { user_name = get_cert_dn(proxy_path); } if(user_name.empty() && !cert_path.empty()) { user_name = get_cert_dn(cert_path); } //If the "INFO" myproxy command is given, try to get the //information about the existence of stored credentials //on the myproxy server. try { if (Arc::lower(myproxy_command) == "info") { if (myproxy_server.empty()) throw std::invalid_argument("URL of MyProxy server is missing"); if (user_name.empty()) throw std::invalid_argument("Username to MyProxy server is missing"); std::string respinfo; //if(usercfg.CertificatePath().empty()) usercfg.CertificatePath(cert_path); //if(usercfg.KeyPath().empty()) usercfg.KeyPath(key_path); if(usercfg.ProxyPath().empty() && !proxy_path.empty()) usercfg.ProxyPath(proxy_path); else { if(usercfg.CertificatePath().empty() && !cert_path.empty()) usercfg.CertificatePath(cert_path); if(usercfg.KeyPath().empty() && !key_path.empty()) usercfg.KeyPath(key_path); } if(usercfg.CACertificatesDirectory().empty()) usercfg.CACertificatesDirectory(ca_dir); Arc::CredentialStore cstore(usercfg,Arc::URL("myproxy://"+myproxy_server)); std::map myproxyopt; myproxyopt["username"] = user_name; if(!cstore.Info(myproxyopt,respinfo)) throw std::invalid_argument("Failed to get info from MyProxy service"); std::cout << Arc::IString("Succeeded to get info from MyProxy server") << std::endl; std::cout << respinfo << std::endl; return true; } } catch (std::exception& err) { logger.msg(Arc::ERROR, err.what()); return false; } //If the "NEWPASS" myproxy command is given, try to get the //information about the existence of stored credentials //on the myproxy server. try { if (Arc::lower(myproxy_command) == "newpass") { if (myproxy_server.empty()) throw std::invalid_argument("URL of MyProxy server is missing"); if (user_name.empty()) throw std::invalid_argument("Username to MyProxy server is missing"); std::string passphrase; if(passsources[pass_myproxy]->Get(passphrase, 4, 256) != Arc::PasswordSource::PASSWORD) throw std::invalid_argument("Error entering passphrase"); std::string newpassphrase; if(passsources[pass_myproxy_new]->Get(newpassphrase, 4, 256) != Arc::PasswordSource::PASSWORD) throw std::invalid_argument("Error entering passphrase"); if(usercfg.ProxyPath().empty() && !proxy_path.empty()) usercfg.ProxyPath(proxy_path); else { if(usercfg.CertificatePath().empty() && !cert_path.empty()) usercfg.CertificatePath(cert_path); if(usercfg.KeyPath().empty() && !key_path.empty()) usercfg.KeyPath(key_path); } if(usercfg.CACertificatesDirectory().empty()) usercfg.CACertificatesDirectory(ca_dir); Arc::CredentialStore cstore(usercfg,Arc::URL("myproxy://"+myproxy_server)); std::map myproxyopt; myproxyopt["username"] = user_name; myproxyopt["password"] = passphrase; myproxyopt["newpassword"] = newpassphrase; if(!cstore.ChangePassword(myproxyopt)) throw std::invalid_argument("Failed to change password MyProxy service"); std::cout << Arc::IString("Succeeded to change password on MyProxy server") << std::endl; return true; } } catch (std::exception& err) { logger.msg(Arc::ERROR, err.what()); return false; } //If the "DESTROY" myproxy command is given, try to get the //information about the existence of stored credentials //on the myproxy server. try { if (Arc::lower(myproxy_command) == "destroy") { if (myproxy_server.empty()) throw std::invalid_argument("URL of MyProxy server is missing"); if (user_name.empty()) throw std::invalid_argument("Username to MyProxy server is missing"); std::string passphrase; if(passsources[pass_myproxy]->Get(passphrase, 4, 256) != Arc::PasswordSource::PASSWORD) throw std::invalid_argument("Error entering passphrase"); std::string respinfo; if(usercfg.ProxyPath().empty() && !proxy_path.empty()) usercfg.ProxyPath(proxy_path); else { if(usercfg.CertificatePath().empty() && !cert_path.empty()) usercfg.CertificatePath(cert_path); if(usercfg.KeyPath().empty() && !key_path.empty()) usercfg.KeyPath(key_path); } if(usercfg.CACertificatesDirectory().empty()) usercfg.CACertificatesDirectory(ca_dir); Arc::CredentialStore cstore(usercfg,Arc::URL("myproxy://"+myproxy_server)); std::map myproxyopt; myproxyopt["username"] = user_name; myproxyopt["password"] = passphrase; if(!cstore.Destroy(myproxyopt)) throw std::invalid_argument("Failed to destroy credential on MyProxy service"); std::cout << Arc::IString("Succeeded to destroy credential on MyProxy server") << std::endl; return true; } } catch (std::exception& err) { logger.msg(Arc::ERROR, err.what()); return false; } //If the "GET" myproxy command is given, try to get a delegated //certificate from the myproxy server. //For "GET" command, certificate and key are not needed, and //anonymous GSSAPI is used (GSS_C_ANON_FLAG) try { if (Arc::lower(myproxy_command) == "get") { if (myproxy_server.empty()) throw std::invalid_argument("URL of MyProxy server is missing"); if (user_name.empty()) throw std::invalid_argument("Username to MyProxy server is missing"); std::string passphrase; if(!use_empty_passphrase) { if(passsources[pass_myproxy]->Get(passphrase, 4, 256) != Arc::PasswordSource::PASSWORD) throw std::invalid_argument("Error entering passphrase"); } std::string proxy_cred_str_pem; Arc::initializeCredentialsType cred_type(Arc::initializeCredentialsType::SkipCredentials); Arc::UserConfig usercfg_tmp(cred_type); usercfg_tmp.CACertificatesDirectory(usercfg.CACertificatesDirectory()); Arc::CredentialStore cstore(usercfg_tmp,Arc::URL("myproxy://"+myproxy_server)); std::map myproxyopt; myproxyopt["username"] = user_name; myproxyopt["password"] = passphrase; myproxyopt["lifetime"] = myproxy_period; // According to the protocol of myproxy, the "Get" command can // include the information about vo name, so that myproxy server // can contact voms server to retrieve AC for myproxy client // See 2.4 of http://grid.ncsa.illinois.edu/myproxy/protocol/ // "When VONAME appears in the message, the server will generate VOMS // proxy certificate using VONAME and VOMSES, or the server's VOMS server information." class vomses_match: public Arc::VOMSConfig::filter { private: int seq_; const std::list& vomses_; std::map& opts_; public: bool match(const Arc::VOMSConfigLine& line) { for(std::list::const_iterator voms = vomses_.begin(); voms != vomses_.end(); ++voms) { if((line.Name() == *voms) || (line.Alias() == *voms)) { opts_["vomsname"+Arc::tostring(seq_)] = *voms; opts_["vomses"+Arc::tostring(seq_)] = line.Str(); ++seq_; break; }; }; // Because rsult is stored imeediately there is no sense to keep matched lines in // VOMSConfig object. return false; }; vomses_match(const std::list& vomses, std::map opts): seq_(0),vomses_(vomses),opts_(opts) { }; }; Arc::VOMSConfig voms_config(vomses_path, vomses_match(vomslist,myproxyopt)); if(!cstore.Retrieve(myproxyopt,proxy_cred_str_pem)) throw std::invalid_argument("Failed to retrieve proxy from MyProxy service"); write_proxy_file(proxy_path,proxy_cred_str_pem); //Assign proxy_path to cert_path and key_path, //so the later voms functionality can use the proxy_path //to create proxy with voms AC extension. In this //case, "--cert" and "--key" is not needed. cert_path = proxy_path; key_path = proxy_path; std::cout << Arc::IString("Succeeded to get a proxy in %s from MyProxy server %s", proxy_path, myproxy_server) << std::endl; return true; } } catch (std::exception& err) { logger.msg(Arc::ERROR, err.what()); return false; } //Delegate the former self-delegated credential to //myproxy server try { if (Arc::lower(myproxy_command) == "put") { if (myproxy_server.empty()) throw std::invalid_argument("URL of MyProxy server is missing"); if (user_name.empty()) throw std::invalid_argument("Username to MyProxy server is missing"); std::string prompt1 = "MyProxy server"; std::string passphrase; if(retrievable_by_cert.empty()) { if(passsources[pass_myproxy_new]->Get(passphrase, 4, 256) != Arc::PasswordSource::PASSWORD) throw std::invalid_argument("Error entering passphrase"); } std::string proxy_cred_str_pem; std::ifstream proxy_cred_file(proxy_path.c_str()); if(!proxy_cred_file) throw std::invalid_argument("Failed to read proxy file "+proxy_path); std::getline(proxy_cred_file,proxy_cred_str_pem,'\0'); if(proxy_cred_str_pem.empty()) throw std::invalid_argument("Failed to read proxy file "+proxy_path); proxy_cred_file.close(); usercfg.ProxyPath(proxy_path); if(usercfg.CACertificatesDirectory().empty()) { usercfg.CACertificatesDirectory(ca_dir); } Arc::CredentialStore cstore(usercfg,Arc::URL("myproxy://"+myproxy_server)); std::map myproxyopt; myproxyopt["username"] = user_name; myproxyopt["password"] = passphrase; myproxyopt["lifetime"] = myproxy_period; if(!retrievable_by_cert.empty()) { myproxyopt["retriever_trusted"] = retrievable_by_cert; } if(!cstore.Store(myproxyopt,proxy_cred_str_pem,true,proxy_start,proxy_period)) throw std::invalid_argument("Failed to delegate proxy to MyProxy service"); remove_proxy_file(proxy_path); std::cout << Arc::IString("Succeeded to put a proxy onto MyProxy server") << std::endl; return true; } } catch (std::exception& err) { logger.msg(Arc::ERROR, err.what()); remove_proxy_file(proxy_path); return false; } return true; } nordugrid-arc-6.14.0/src/clients/credentials/PaxHeaders.30264/arcproxy.1.in0000644000000000000000000000013214152153376024427 xustar000000000000000030 mtime=1638455038.339644982 30 atime=1638455038.473646996 30 ctime=1638455101.045587168 nordugrid-arc-6.14.0/src/clients/credentials/arcproxy.1.in0000644000175000002070000002717714152153376024432 0ustar00mockbuildmock00000000000000.\" -*- nroff -*- .TH APPROXY 1 "@DATE@" "NorduGrid ARC @VERSION@" "NorduGrid Users Manual" .SH NAME arcproxy \- ARC Credentials Proxy generation utility .SH SYNOPSIS .B arcproxy [\fIOPTION\fR] .SH DESCRIPTION .\" Add any additional description here .PP arcproxy generates proxy credentials (general proxy certificate, or proxy certificate with VOMS AC extension) from private key and certificate of user. .SH OPTIONS .TP \fB\-h\fR prints short usage description .TP \fB\-P\fR \fIfilename\fR location of the generated proxy file .TP \fB\-C\fR location of X509 certificate file, the file can be either pem, der, or pkcs12 formatted; if this option is not set, then env X509_USER_CERT will be searched; if X509_USER_CERT env is not set, then certificatepath item in client.conf will be searched; if the location still is not found, then ~/.arc/, ~/.globus/, ./etc/arc, and ./ will be searched. .TP \fB\-K\fR location of private key file, if the certificate is in pkcs12 format, then no need to give private key; if this option is not set, then env X509_USER_KEY will be searched; if X509_USER_KEY env is not set, then keypath item in client.conf will be searched; if the location still is not found, then ~/.arc/, ~/.globus/, ./etc/arc, and ./ will be searched. .TP \fB\-T\fR path to trusted certificate directory, only needed for VOMS client functionality; if this option is not set, then env X509_CERT_DIR will be searched; if X509_CERT_DIR env is not set, then cacertificatesdirectory item in client.conf will be searched. .TP \fB\-s\fR path to top directory of VOMS *.lsc files, only needed for VOMS client functionality .TP \fB\-V\fR path to VOMS server configuration file, only needed for VOMS client functionality if the path is a directory rather than a file, all of the files under this directory will be searched .TP \fB\-S\fR voms<:command>. Specify VOMS server. :command is optional, and is used to ask for specific attributes(e.g: roles) command option is: all --- put all of this DN's attributes into AC; list ---list all of the DN's attribute,will not create AC extension; /Role=yourRole --- specify the role, if this DN has such a role, the role will be put into AC /voname/groupname/Role=yourRole --- specify the vo,group and role if this DN has such a role, the role will be put into AC .TP \fB\-o\fR group<:role>. Specify ordering of attributes. Example: --order /knowarc.eu/coredev:Developer,/knowarc.eu/testers:Tester or: --order /knowarc.eu/coredev:Developer --order /knowarc.eu/testers:Tester Note that it does not make sense to specify the order if you have two or more different VOMS server specified .TP \fB\-G\fR use GSI wire protocol for contacting VOMS services instead of SSL/TLS .TP \fB\-H\fR use HTTP communication protocol for contacting VOMS services that provide RESTful access Note for RESTful access, 'list' command and multiple VOMS server are not supported. This protocol is now default communicaton protocol and You do not need to specify this option. .TP \fB\-B\fR use old communication protocol for contacting VOMS services instead of RESTful. .TP \fB\-O\fR this option is not functional anymore (old GSI proxies are not supported) .TP \fB\-I\fR print all information about this proxy. In order to show the Identity (DN without CN as subfix for proxy) of the certificate, the 'trusted certdir' is needed. .TP \fB\-i\fR print selected information about this proxy. Currently following information items are supported: \fBsubject\fR - subject name of proxy certificate. \fBidentity\fR - identity subject name of proxy certificate. \fBissuer\fR - issuer subject name of proxy certificate. \fBca\fR - subject name of CA which issued initial certificate. \fBpath\fR - file system path to file containing proxy. \fBtype\fR - type of proxy certificate. \fBvalidityStart\fR - timestamp when proxy validity starts. \fBvalidityEnd\fR - timestamp when proxy validity ends. \fBvalidityPeriod\fR - duration of proxy validity in seconds. \fBvalidityLeft\fR - duration of proxy validity left in seconds. \fBvomsVO\fR - VO name represented by VOMS attribute. \fBvomsSubject\fR - subject of certificate for which VOMS attribute is issued. \fBvomsIssuer\fR - subject of service which issued VOMS certificate. \fBvomsACvalidityStart\fR - timestamp when VOMS attribute validity starts. \fBvomsACvalidityEnd\fR - timestamp when VOMS attribute validity ends. \fBvomsACvalidityPeriod\fR - duration of VOMS attribute validity in seconds. \fBvomsACvalidityLeft\fR - duration of VOMS attribute validity left in seconds. \fBproxyPolicy\fR \fBkeybits\fR - size of proxy certificate key in bits. \fBsigningAlgorithm\fR - algorithm used to sign proxy certificate. Items are printed in requested order and are separated by newline. If item has multiple values they are printed in same line separated by |. .TP \fB\-r\fR Remove the proxy file. .TP \fB\-U\fR Username to myproxy server. .TP \fB\-N\fR don't prompt for a credential passphrase, when retrieve a credential from on MyProxy server. The precondition of this choice is the credential is PUT onto the MyProxy server without a passphrase by using -R (--retrievable_by_cert) option when being PUTing onto Myproxy server. This option is specific for the GET command when contacting Myproxy server. .TP \fB\-R\fR Allow specified entity to retrieve credential without passphrase. This option is specific for the PUT command when contacting Myproxy server. .TP \fB\-L\fR hostname of myproxy server optionally followed by colon and port number, e.g. example.org:7512. If the port number has not been specified, 7512 is used by default. .TP \fB\-M\fR command to myproxy server. The command can be PUT and GET. PUT/put -- put a delegated credential to myproxy server; GET/get -- get a delegated credential from myproxy server, credential (certificate and key) is not needed in this case; myproxy functionality can be used together with VOMS functionality. voms and vomses can be used for Get command if VOMS attributes is required to be included in the proxy. .TP \fB\-F\fR use NSS credential DB in default Mozilla profiles, including Firefox, Seamonkey and Thunderbird. .TP \fB\-c\fR constraints of proxy certificate. Currently following constraints are supported: \fBvalidityStart=time\fR - time when certificate becomes valid. Default is now. \fBvalidityEnd=time\fR - time when certificate becomes invalid. Default is 43200 (12 hours) from start for local proxy and 7 days for delegated to MyProxy. \fBvalidityPeriod=time\fR - for how long certificate is valid. Default is 43200 (12 hours)for local proxy and 7 days for delegated to MyProxy. \fBvomsACvalidityPeriod=time\fR - for how long the AC is valid. Default is shorter of validityPeriod and 12 hours. \fBmyproxyvalidityPeriod=time\fR - lifetime of proxies delegated by myproxy server. Default is shorter of validityPeriod and 12 hours. \fBproxyPolicy=policy content\fR - assigns specified string to proxy policy to limit it's functionality. \fBkeybits=number\fR - length of the key to generate. Default is 2048 bits. Special value 'inherit' is to use key length of signing certificate. \fBsigningAlgorithm=name\fR - signing algorithm to use for signing public key of proxy. Default is sha1. Possible values are sha1, sha2 (alias for sha256), sha224, sha256, sha384, sha512 and inherit (use algorithm of signing certificate). .TP \fB\-p\fR password destination=password source. Supported password destinations are: \fBkey\fR - for reading private key \fBmyproxy\fR - for accessing credentials at MyProxy service \fBmyproxynew\fR - for creating credentials at MyProxy service \fBall\fR - for any purspose. Supported password sources are: \fBquoted string ("password")\fR - explicitly specified password \fBint\fR - interactively request password from console \fBstdin\fR - read password from standard input delimited by newline \fBfile:filename\fR - read password from file named filename \fBstream:#\fR - read password from input stream number #. Currently only 0 (standard input) is supported. .TP \fB\-t\fR timeout in seconds (default 20) .TP \fB\-z\fR configuration file (default ~/.arc/client.conf) .TP \fB\-d\fR level of information printed. Possible values are DEBUG, VERBOSE, INFO, WARNING, ERROR and FATAL. .TP \fB\-v\fR print version information .PP If location of certificate and key are not explicitly specified they are looked for in following location and order: Key/certificate paths specified by the environment variables X509_USER_KEY and X509_USER_CERT respectively. Paths specified in configuration file. ~/.arc/usercert.pem and ~/.arc/userkey.pem for certificate and key respectively. ~/.globus/usercert.pem and ~/.globus/userkey.pem for certificate and key respectively. If destination location of proxy file is not specified, the value of X509_USER_PROXY environment variable is used explicitly. If no value is provided, the default location is used - /x509up_u. Here TEMPORARY DIRECTORY is derived from environment variables TMPDIR, TMP, TEMP or default location /tmp is used. .SH "REPORTING BUGS" Report bugs to http://bugzilla.nordugrid.org/ .SH ENVIRONMENT VARIABLES .TP .B ARC_LOCATION The location where ARC is installed can be specified by this variable. If not specified the install location will be determined from the path to the command being executed, and if this fails a WARNING will be given stating the location which will be used. .TP .B ARC_PLUGIN_PATH The location of ARC plugins can be specified by this variable. Multiple locations can be specified by separating them by : (; in Windows). The default location is \fB$ARC_LOCATION\fR/lib/arc (\\ in Windows). .SH COPYRIGHT APACHE LICENSE Version 2.0 .SH FILES .TP .B /etc/vomses Common file containing a list of selected VO contact point, one VO per line, for example: .RS .IP """gin"" ""kuiken.nikhef.nl"" ""15050"" ""/O=dutchgrid/O=hosts/OU=nikhef.nl/CN=kuiken.nikhef.nl"" ""gin.ggf.org""" .IP """nordugrid.org"" ""voms.uninett.no"" ""15015"" ""/O=Grid/O=NorduGrid/CN=host/voms.ndgf.org"" ""nordugrid.org""" .RE .TP .B ~/.voms/vomses Same as .B /etc/vomses but located in user's home area. If exists, has precedence over .B /etc/vomses .RS The order of the parsing of vomses location is: .RS 1. command line options .RE .RS 2. client configuration file ~/.arc/client.conf .RE .RS 3. $X509_VOMSES or $X509_VOMS_FILE .RE .RS 4. ~/.arc/vomses .RE .RS 5. ~/.voms/vomses .RE .RS 6. $ARC_LOCATION/etc/vomses (this is for Windows environment) .RE .RS 7. $ARC_LOCATION/etc/grid-security/vomses (this is for Windows environment) .RE .RS 8. $PWD/vomses .RE .RS 9. /etc/vomses .RE .RS 10. /etc/grid-security/vomses .RE .RE .TP .B ~/.arc/client.conf Some options can be given default values by specifying them in the ARC client configuration file. By using the .B --conffile option a different configuration file can be used than the default. .SH AUTHOR ARC software is developed by the NorduGrid Collaboration (http://www.nordugrid.org), please consult the AUTHORS file distributed with ARC. Please report bugs and feature requests to http://bugzilla.nordugrid.org .SH SEE ALSO .BR arccat (1), .BR arcclean (1), .BR arccp (1), .BR arcget (1), .BR arcinfo (1), .BR arckill (1), .BR arcls (1), .BR arcmkdir (1), .BR arcrenew (1), .BR arcresub (1), .BR arcresume (1), .BR arcrm (1), .BR arcstat (1), .BR arcsub (1), .BR arcsync (1), .BR arctest (1) nordugrid-arc-6.14.0/src/clients/credentials/PaxHeaders.30264/arcproxy_voms.cpp0000644000000000000000000000013214152153376025510 xustar000000000000000030 mtime=1638455038.340644998 30 atime=1638455038.474647011 30 ctime=1638455101.048587213 nordugrid-arc-6.14.0/src/clients/credentials/arcproxy_voms.cpp0000644000175000002070000002247714152153376025511 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include bool contact_voms_servers(std::map >& vomscmdlist, std::list& orderlist, std::string& vomses_path, bool use_gsi_comm, bool use_http_comm, const std::string& voms_period, Arc::UserConfig& usercfg, Arc::Logger& logger, const std::string& tmp_proxy_path, std::string& vomsacseq) { std::string ca_dir; ca_dir = usercfg.CACertificatesDirectory(); class voms_match: public Arc::VOMSConfig::filter { private: const std::string& voms_; public: virtual bool match(const Arc::VOMSConfigLine& line) const { return ((line.Name() == voms_) || (line.Alias() == voms_)); }; voms_match(const std::string& voms):voms_(voms) { }; }; class vomses_match: public Arc::VOMSConfig::filter { private: const std::map >& vomses_; public: virtual bool match(const Arc::VOMSConfigLine& line) const { // TODO: this will not scale for many voms servers specified at command line for(std::map >::const_iterator voms = vomses_.begin(); voms != vomses_.end(); ++voms) { if((line.Name() == voms->first) || (line.Alias() == voms->first)) return true; }; return false; }; vomses_match(const std::map >& vomses):vomses_(vomses) { }; }; Arc::VOMSConfig voms_config(vomses_path, vomses_match(vomscmdlist)); if(!voms_config) { // logger logger.msg(Arc::ERROR, "Failed to process VOMS configuration or no suitable configuration lines found."); return false; } //Contact the voms servers to retrieve attribute certificate Arc::MCCConfig cfg; cfg.AddProxy(tmp_proxy_path); cfg.AddCADir(ca_dir); Arc::Period lifetime; if(!voms_period.empty()) { time_t voms_period_sec; if(!Arc::stringto(voms_period,voms_period_sec)) { logger.msg(Arc::ERROR, "Failed to parse requested VOMS lifetime: %s", voms_period); return false; } lifetime = voms_period_sec; } // TODO: group commands by server. Is it really useful? Can it really be used effectively. // Loop through commands for (std::map >::iterator vomscmd = vomscmdlist.begin(); vomscmd != vomscmdlist.end(); ++vomscmd) { const std::string& voms_server = vomscmd->first; // server name const std::list& voms_commands = vomscmd->second; // command to send bool succeeded = false; int voms_lines_num = 0; // Loop through suitable voms configuration lines for (Arc::VOMSConfig::iterator vomsline = voms_config.First(voms_match(voms_server)); (bool)vomsline; vomsline = vomsline.Next(voms_match(voms_server))) { if(vomsline->Host().empty()) { logger.msg(Arc::ERROR, "Cannot get VOMS server address information from vomses line: \"%s\"", vomsline->Str()); throw std::runtime_error("Cannot get VOMS server address information from vomses line: \"" + vomsline->Str() + "\""); } ++voms_lines_num; logger.msg(Arc::INFO, "Contacting VOMS server (named %s): %s on port: %s", vomsline->Name(), vomsline->Host(), vomsline->Port()); std::cout << Arc::IString("Contacting VOMS server (named %s): %s on port: %s", vomsline->Name(), vomsline->Host(), vomsline->Port()) << std::endl; unsigned int port_num = 0; if(!vomsline->Port().empty()) { if(!Arc::stringto(vomsline->Port(),port_num)) { // Failed to parse port logger.msg(Arc::ERROR, "Failed to parse requested VOMS server port number: %s", vomsline->Port()); continue; } } else { port_num = 8443; // default VOMS port ? } if(use_http_comm) { // RESTful interface std::list fqans; for(std::list::const_iterator c_it = voms_commands.begin(); c_it != voms_commands.end(); ++c_it) { if (c_it->empty()) { // ?? fqans.push_back("/"+voms_name); } else if (Arc::lower(*c_it) == "all") { // ?? fqans.push_back("/"+voms_name); } else if (Arc::lower(*c_it) == "list") { // N // not supported logger.msg(Arc::ERROR, "List functionality is not supported for RESTful VOMS interface"); continue; } else { fqans.push_back(*c_it); // ?? } } Arc::ClientVOMSRESTful client(cfg, vomsline->Host(), port_num, Arc::TLSSec, usercfg.Timeout()/* todo: , proxy_host, proxy_port*/); std::string response; Arc::MCC_Status status = client.process(fqans, lifetime, response); if (!status) { std::cout << Arc::IString("The VOMS server with the information:\n\t%s\ncan not be reached, please make sure it is available.", vomsline->Str()) << std::endl; std::cout << Arc::IString("Collected error is:\n\t%s",(std::string)status) << std::endl; continue; //There could be another voms replicated server with the same name exists } if (response.empty()) { logger.msg(Arc::ERROR, "No valid response from VOMS server: %s", vomsline->Name()); std::cout << Arc::IString("Collected error is:\n\t%s",(std::string)status) << std::endl; continue; } vomsacseq.append(response); succeeded = true; break; // ?? } else { // old interface std::list commands; for(std::list::const_iterator c_it = voms_commands.begin(); c_it != voms_commands.end(); ++c_it) { if (c_it->empty()) { commands.push_back(Arc::VOMSCommand().GetGroup("/"+vomsline->Name())); } else if (Arc::lower(*c_it) == "all") { commands.push_back(Arc::VOMSCommand().GetEverything()); } else if (Arc::lower(*c_it) == "list") { // N // not supported logger.msg(Arc::ERROR, "List functionality is not supported for legacy VOMS interface"); continue; } else { std::string::size_type pos = c_it->find("/Role="); if (pos == 0) { commands.push_back(Arc::VOMSCommand().GetRole(c_it->substr(pos+6))); } else if((pos != std::string::npos) && (pos > 0)) { commands.push_back(Arc::VOMSCommand().GetRoleInGroup(c_it->substr(0, pos),c_it->substr(pos + 6))); } else if((*c_it)[0] == '/') { commands.push_back(Arc::VOMSCommand().GetGroup(*c_it)); } else { // unexpected logger.msg(Arc::ERROR, "Failed to parse VOMS command: %s",*c_it); continue; } } } std::list > ordering; for(std::list::iterator o_it = orderlist.begin(); o_it != orderlist.end(); ++o_it) { std::string::size_type pos = o_it->find(':'); if(pos == std::string::npos) { ordering.push_back(std::pair(*o_it,"")); } else { ordering.push_back(std::pair(o_it->substr(0,pos),o_it->substr(pos+1))); } } //logger.msg(Arc::VERBOSE, "Try to get attribute from VOMS server with order: %s", ordering); //logger.msg(Arc::VERBOSE, "Message sent to VOMS server %s is: %s", voms_name, send_msg); Arc::ClientVOMS client(cfg, vomsline->Host(), port_num, use_gsi_comm ? Arc::GSISec : Arc::TLSSec, usercfg.Timeout()); std::string response; Arc::MCC_Status status = client.process(commands, ordering, lifetime, response); if (!status) { std::cout << Arc::IString("The VOMS server with the information:\n\t%s\ncan not be reached, please make sure it is available.", vomsline->Str()) << std::endl; std::cout << Arc::IString("Collected error is:\n\t%s",(std::string)status) << std::endl; continue; //There could be another voms replicated server with the same name exists } if (response.empty()) { logger.msg(Arc::ERROR, "No valid response from VOMS server: %s", vomsline->Name()); std::cout << Arc::IString("Collected error is:\n\t%s",(std::string)status) << std::endl; continue; } vomsacseq.append(response); succeeded = true; break; } } // voms lines if(succeeded == false) { if(voms_lines_num > 1) { std::cout << Arc::IString("There are %d servers with the same name: %s in your vomses file, but none of them can be reached, or can return a valid message.", voms_lines_num, voms_server) << std::endl; } return false; } } // voms servers return true; } nordugrid-arc-6.14.0/src/clients/credentials/PaxHeaders.30264/README0000644000000000000000000000013214152153376022751 xustar000000000000000030 mtime=1638455038.339644982 30 atime=1638455038.473646996 30 ctime=1638455101.046587183 nordugrid-arc-6.14.0/src/clients/credentials/README0000644000175000002070000000006014152153376022732 0ustar00mockbuildmock00000000000000User tools for manipulating user credentials. nordugrid-arc-6.14.0/src/clients/PaxHeaders.30264/README0000644000000000000000000000013214152153376020454 xustar000000000000000030 mtime=1638455038.336644937 30 atime=1638455038.472646981 30 ctime=1638455100.990586342 nordugrid-arc-6.14.0/src/clients/README0000644000175000002070000000013514152153376020440 0ustar00mockbuildmock00000000000000ARC provides a number of command line clients that implement interfaces to various services. nordugrid-arc-6.14.0/src/clients/PaxHeaders.30264/compute0000644000000000000000000000013214152153475021173 xustar000000000000000030 mtime=1638455101.100587995 30 atime=1638455103.999631554 30 ctime=1638455101.100587995 nordugrid-arc-6.14.0/src/clients/compute/0000755000175000002070000000000014152153475021235 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arcresub.cpp0000644000000000000000000000013214152153376023562 xustar000000000000000030 mtime=1638455038.338644968 30 atime=1638455038.473646996 30 ctime=1638455101.094587904 nordugrid-arc-6.14.0/src/clients/compute/arcresub.cpp0000644000175000002070000002050214152153376023546 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include "utils.h" #include "submit.h" int RUNMAIN(arcresub)(int argc, char **argv) { setlocale(LC_ALL, ""); Arc::Logger logger(Arc::Logger::getRootLogger(), "arcresub"); Arc::LogStream logcerr(std::cerr); logcerr.setFormat(Arc::ShortFormat); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::WARNING); Arc::ArcLocation::Init(argv[0]); ClientOptions opt(ClientOptions::CO_RESUB, istring("[job ...]")); std::list jobidentifiers = opt.Parse(argc, argv); if (opt.showversion) { std::cout << Arc::IString("%s version %s", "arcresub", VERSION) << std::endl; return 0; } // If debug is specified as argument, it should be set before loading the configuration. if (!opt.debug.empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(opt.debug)); logger.msg(Arc::VERBOSE, "Running command: %s", opt.GetCommandWithArguments()); Arc::UserConfig usercfg(opt.conffile, opt.joblist); if (!usercfg) { logger.msg(Arc::ERROR, "Failed configuration initialization"); return 1; } if (opt.show_plugins) { std::list types; types.push_back("HED:SubmitterPlugin"); types.push_back("HED:ServiceEndpointRetrieverPlugin"); types.push_back("HED:TargetInformationRetrieverPlugin"); types.push_back("HED:BrokerPlugin"); showplugins("arcresub", types, logger, usercfg.Broker().first); return 0; } // TODO: proper check for each job must be implemented if(usercfg.OToken().empty()) { if (!checkproxy(usercfg)) { return 1; } } if (opt.debug.empty() && !usercfg.Verbosity().empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(usercfg.Verbosity())); if (opt.same && opt.notsame) { logger.msg(Arc::ERROR, "--same and --not-same cannot be specified together."); return 1; } for (std::list::const_iterator it = opt.jobidinfiles.begin(); it != opt.jobidinfiles.end(); ++it) { if (!Arc::Job::ReadJobIDsFromFile(*it, jobidentifiers)) { logger.msg(Arc::WARNING, "Cannot read specified jobid file: %s", *it); } } if (opt.timeout > 0) usercfg.Timeout(opt.timeout); if (!opt.broker.empty()) usercfg.Broker(opt.broker); if ((!opt.joblist.empty() || !opt.status.empty()) && jobidentifiers.empty() && opt.clusters.empty()) opt.all = true; if (jobidentifiers.empty() && opt.clusters.empty() && !opt.all) { logger.msg(Arc::ERROR, "No jobs given"); return 1; } std::list selectedURLs = getSelectedURLsFromUserConfigAndCommandLine(usercfg, opt.clusters); std::list rejectManagementURLs = getRejectManagementURLsFromUserConfigAndCommandLine(usercfg, opt.rejectmanagement); std::list jobs; Arc::JobInformationStorage *jobstore = createJobInformationStorage(usercfg); if (jobstore != NULL && !jobstore->IsStorageExisting()) { logger.msg(Arc::ERROR, "Job list file (%s) doesn't exist", usercfg.JobListFile()); delete jobstore; return 1; } if (jobstore == NULL || ( opt.all && !jobstore->ReadAll(jobs, rejectManagementURLs)) || (!opt.all && !jobstore->Read(jobs, jobidentifiers, selectedURLs, rejectManagementURLs))) { logger.msg(Arc::ERROR, "Unable to read job information from file (%s)", usercfg.JobListFile()); delete jobstore; return 1; } if (!opt.all) { for (std::list::const_iterator itJIDAndName = jobidentifiers.begin(); itJIDAndName != jobidentifiers.end(); ++itJIDAndName) { std::cout << Arc::IString("Warning: Job not found in job list: %s", *itJIDAndName) << std::endl; } } Arc::JobSupervisor jobmaster(usercfg, jobs); jobmaster.Update(); jobmaster.SelectValid(); if (!opt.status.empty()) { jobmaster.SelectByStatus(opt.status); } if (jobmaster.GetSelectedJobs().empty()) { std::cout << Arc::IString("No jobs") << std::endl; delete jobstore; return 1; } std::list rejectDiscoveryURLs = getRejectDiscoveryURLsFromUserConfigAndCommandLine(usercfg, opt.rejectdiscovery); // select endpoints to resubmit to std::list services; if ( opt.isARC6TargetSelectionOptions(logger, true) ) { // canonicalize endpoint types (allow -c that is used for filtering here) if (!opt.canonicalizeARC6InterfaceTypes(logger)) return 1; // get endpoint batches according to ARC6 target selection logic std::list > endpoint_batches; bool info_discovery = prepare_submission_endpoint_batches(usercfg, opt, endpoint_batches); // JobSupervisor::Resubmit code only works with brokering if (!info_discovery) { logger.msg(Arc::ERROR,"It is not possible to resubmit jobs without new target information discovery"); return 1; } // resubmit only to priority submission interface, no fallbacks services = endpoint_batches.front(); } else { services = getServicesFromUserConfigAndCommandLine(usercfg, opt.indexurls, opt.qlusters, opt.requestedSubmissionInterfaceName, opt.infointerface); } std::list resubmittedJobs; // same + 2*notsame in {0,1,2}. same and notsame cannot both be true, see above. int retval = (int)!jobmaster.Resubmit((int)opt.same + 2*(int)opt.notsame, services, resubmittedJobs, rejectDiscoveryURLs); if (retval == 0 && resubmittedJobs.empty()) { std::cout << Arc::IString("No jobs to resubmit with the specified status") << std::endl; delete jobstore; return 0; } for (std::list::const_iterator it = resubmittedJobs.begin(); it != resubmittedJobs.end(); ++it) { std::cout << Arc::IString("Job submitted with jobid: %s", it->JobID) << std::endl; } if (!resubmittedJobs.empty() && !jobstore->Write(resubmittedJobs)) { std::cout << Arc::IString("Warning: Failed to write job information to file (%s)", usercfg.JobListFile()) << std::endl; std::cout << Arc::IString(" To recover missing jobs, run arcsync") << std::endl; retval = 1; } if (!opt.jobidoutfile.empty() && !Arc::Job::WriteJobIDsToFile(resubmittedJobs, opt.jobidoutfile)) { logger.msg(Arc::WARNING, "Cannot write jobids to file (%s)", opt.jobidoutfile); retval = 1; } std::list notresubmitted = jobmaster.GetIDsNotProcessed(); if (!jobmaster.Cancel()) { retval = 1; } for (std::list::const_iterator it = jobmaster.GetIDsNotProcessed().begin(); it != jobmaster.GetIDsNotProcessed().end(); ++it) { logger.msg(Arc::WARNING, "Resubmission of job (%s) succeeded, but killing the job failed - it will still appear in the job list", *it); } if (!opt.keep) { if (!jobmaster.Clean()) { retval = 1; } for (std::list::const_iterator it = jobmaster.GetIDsNotProcessed().begin(); it != jobmaster.GetIDsNotProcessed().end(); ++it) { logger.msg(Arc::WARNING, "Resubmission of job (%s) succeeded, but cleaning the job failed - it will still appear in the job list", *it); } if (!jobstore->Remove(jobmaster.GetIDsProcessed())) { std::cout << Arc::IString("Warning: Failed removing jobs from file (%s)", usercfg.JobListFile()) << std::endl; std::cout << Arc::IString(" Use arcclean to remove non-existing jobs") << std::endl; retval = 1; } } delete jobstore; if ((resubmittedJobs.size() + notresubmitted.size()) > 1) { std::cout << std::endl << Arc::IString("Job resubmission summary:") << std::endl; std::cout << "-----------------------" << std::endl; std::cout << Arc::IString("%d of %d jobs were resubmitted", resubmittedJobs.size(), resubmittedJobs.size() + notresubmitted.size()) << std::endl; if (!notresubmitted.empty()) { std::cout << Arc::IString("The following %d were not resubmitted", notresubmitted.size()) << std::endl; for (std::list::const_iterator it = notresubmitted.begin(); it != notresubmitted.end(); ++it) { std::cout << *it << std::endl; } } } return retval; } nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376023304 xustar000000000000000030 mtime=1638455038.337644953 30 atime=1638455038.472646981 30 ctime=1638455101.074587604 nordugrid-arc-6.14.0/src/clients/compute/Makefile.am0000644000175000002070000000751214152153376023276 0ustar00mockbuildmock00000000000000bin_PROGRAMS = arcsub arcget arcstat arcinfo arckill arcclean arccat arcsync \ arcresub arcresume arcrenew arctest man_MANS = arcsub.1 arcget.1 arcstat.1 arcinfo.1 arckill.1 arcclean.1 arccat.1 \ arcresub.1 arcsync.1 arcresume.1 arcrenew.1 arctest.1 CLILIBS = \ $(top_builddir)/src/hed/libs/compute/libarccompute.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la arcsub_SOURCES = arcsub.cpp utils.cpp utils.h submit.cpp submit.h arcsub_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arcsub_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arctest_SOURCES = arctest.cpp utils.cpp utils.h submit.cpp submit.h arctest_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arctest_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arcget_SOURCES = arcget.cpp utils.cpp utils.h arcget_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arcget_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arcstat_SOURCES = arcstat.cpp utils.cpp utils.h arcstat_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arcstat_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arcinfo_SOURCES = arcinfo.cpp utils.cpp utils.h arcinfo_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) arcinfo_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) arckill_SOURCES = arckill.cpp utils.cpp utils.h arckill_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arckill_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arcclean_SOURCES = arcclean.cpp utils.cpp utils.h arcclean_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arcclean_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arccat_SOURCES = arccat.cpp utils.cpp utils.h arccat_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arccat_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arcsync_SOURCES = arcsync.cpp utils.cpp utils.h arcsync_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arcsync_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arcresub_SOURCES = arcresub.cpp utils.cpp utils.h submit.cpp submit.h arcresub_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arcresub_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arcrenew_SOURCES = arcrenew.cpp utils.cpp utils.h arcrenew_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arcrenew_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arcresume_SOURCES = arcresume.cpp utils.cpp utils.h arcresume_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arcresume_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arctest.1.in0000644000000000000000000000013214152153376023404 xustar000000000000000030 mtime=1638455038.338644968 30 atime=1638455038.473646996 30 ctime=1638455101.085587769 nordugrid-arc-6.14.0/src/clients/compute/arctest.1.in0000644000175000002070000001012114152153376023364 0ustar00mockbuildmock00000000000000.\" -*- nroff -*- .TH ARCTEST 1 "@DATE@" "NorduGrid ARC @VERSION@" "NorduGrid Users Manual" .SH NAME arctest \- ARC Test Suite .SH DESCRIPTION The .B arctest command tests basic ARC client and server setup. .SH SYNOPSIS .B arctest [options] .SH OPTIONS .IP "\fB-c\fR, \fB--cluster\fR=\fIname\fR" select one or more computing elements: \fIname\fR can be an alias for a single CE, a group of CEs or a URL .IP "\fB-g\fR, \fB--index\fR=\fIname\fR" select one or more registries: \fIname\fR can be an alias for a single registry, a group of registries or a URL .IP "\fB-R\fR, \fB--rejectdiscovery\fR=\fIURL\fR" skip the service with the given URL during service discovery .IP "\fB-S\fR, \fB--submissioninterface\fR=\fIInterfaceName\fR" only use this interface for submitting (e.g. org.nordugrid.gridftpjob, org.ogf.glue.emies.activitycreation, org.ogf.bes) .IP "\fB-I\fR, \fB--infointerface\fR=\fIInterfaceName\fR" the computing element specified by URL at the command line should be queried using this information interface (possible options: org.nordugrid.ldapng, org.nordugrid.ldapglue2, org.nordugrid.wsrfglue2, org.ogf.glue.emies.resourceinfo) .IP "\fB-J\fR \fIjobid\fR, \fB--jobid\fR=\fIjobid\fR" submits testjob given by the jobid. .IP "\fB-r\fR \fItime\fR, \fB--runtime\fR=\fItime\fR" test job runtime specified in case of the 1st test job. .IP "\fB-j\fR, \fB--joblist\fR=\fIfilename\fR" the file storing information about active jobs (default ~/.arc/jobs.xml) .IP "\fB-o\fR, \fB--jobids-to-file\fR=\fIfilename\fR" the IDs of the submitted jobs will be appended to this file .IP "\fB-D\fR, \fB--dryrun\fR" submit jobs as dry run (no submission to batch system) .IP "\fB --direct\fR" submit directly - no resource discovery or matchmaking .IP "\fB-x\fR, \fB--dumpdescription\fR" do not submit - dump job description in the language accepted by the target .IP "\fB-E\fR, \fB--certificate\fR" prints information about installed user- and CA-certificates .IP "\fB-P\fR, \fB--listplugins\fR" list the available plugins .IP "\fB-t\fR, \fB--timeout\fR=\fIseconds\fR" timeout in seconds (default 20) .IP "\fB-z\fR, \fB--conffile\fR=\fIfilename\fR" configuration file (default ~/.arc/client.conf) .IP "\fB-d\fR, \fB--debug\fR=\fIdebuglevel\fR" FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG .IP "\fB-b\fR, \fB--broker\fR=\fIbroker\fR" selected broker: Random (default), FastestQueue or custom .IP "\fB-v\fR, \fB-version\fR" print version information .IP "\fB-h\fR, \fB-help\fR" print usage information .LP .SH EXTENDED DESCRIPTION The .B arctest command is used for basic testing of ARC client and server installation through submitting various test jobs. It can also print basic information about user's certificate. The command is complementary to .B arcinfo and .B arcproxy -I The test jobs available in this version of arctest are: Test job 1: This test-job calculates prime-numbers for a number of minutes given by .B -r (default 5) and outputs the list to stderr. The source-code for the prime-number program, the Makefile and the executable are downloaded to the cluster from HTTP and FTP servers and the program is compiled before running. In this way, the test job constitutes a fairly comprehensive test of the basic setup of a grid cluster. Test job 2: attempts to list all environment variables at the remote site Test job 3: copies a remote file from an HTTP server into a local file .SH EXAMPLES arctest -J 1 -c will submit test job number 1 to the specified cluster belonging to the flavor of it. arctest --certificate will print basic information about the user's certificate. .SH COPYRIGHT APACHE LICENSE Version 2.0 .SH AUTHOR ARC software is developed by the NorduGrid Collaboration (http://www.nordugrid.org), please consult the AUTHORS file distributed with ARC. Please report bugs and feature requests to http://bugzilla.nordugrid.org .SH SEE ALSO .BR arccat (1), .BR arcclean (1), .BR arccp (1), .BR arcget (1), .BR arcinfo (1), .BR arckill (1), .BR arcls (1), .BR arcmkdir (1), .BR arcproxy (1), .BR arcrenew (1), .BR arcresub (1), .BR arcresume (1), .BR arcrm (1), .BR arcstat (1), .BR arcsub (1), .BR arcsync (1) nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153421023304 xustar000000000000000030 mtime=1638455057.836937938 30 atime=1638455091.291440608 30 ctime=1638455101.073587589 nordugrid-arc-6.14.0/src/clients/compute/Makefile.in0000644000175000002070000027003414152153421023277 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ bin_PROGRAMS = arcsub$(EXEEXT) arcget$(EXEEXT) arcstat$(EXEEXT) \ arcinfo$(EXEEXT) arckill$(EXEEXT) arcclean$(EXEEXT) \ arccat$(EXEEXT) arcsync$(EXEEXT) arcresub$(EXEEXT) \ arcresume$(EXEEXT) arcrenew$(EXEEXT) arctest$(EXEEXT) subdir = src/clients/compute DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(srcdir)/arcstat.1.in $(srcdir)/arcinfo.1.in \ $(srcdir)/arcsub.1.in $(srcdir)/arcclean.1.in \ $(srcdir)/arckill.1.in $(srcdir)/arcget.1.in \ $(srcdir)/arccat.1.in $(srcdir)/arcresub.1.in \ $(srcdir)/arcsync.1.in $(srcdir)/arcrenew.1.in \ $(srcdir)/arcresume.1.in $(srcdir)/arctest.1.in \ $(top_srcdir)/depcomp README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = arcstat.1 arcinfo.1 arcsub.1 arcclean.1 arckill.1 \ arcget.1 arccat.1 arcresub.1 arcsync.1 arcrenew.1 arcresume.1 \ arctest.1 CONFIG_CLEAN_VPATH_FILES = am__installdirs = "$(DESTDIR)$(bindir)" "$(DESTDIR)$(man1dir)" PROGRAMS = $(bin_PROGRAMS) am_arccat_OBJECTS = arccat-arccat.$(OBJEXT) arccat-utils.$(OBJEXT) arccat_OBJECTS = $(am_arccat_OBJECTS) am__DEPENDENCIES_1 = arccat_DEPENDENCIES = $(CLILIBS) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = arccat_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(arccat_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ am_arcclean_OBJECTS = arcclean-arcclean.$(OBJEXT) \ arcclean-utils.$(OBJEXT) arcclean_OBJECTS = $(am_arcclean_OBJECTS) arcclean_DEPENDENCIES = $(CLILIBS) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) arcclean_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(arcclean_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ am_arcget_OBJECTS = arcget-arcget.$(OBJEXT) arcget-utils.$(OBJEXT) arcget_OBJECTS = $(am_arcget_OBJECTS) arcget_DEPENDENCIES = $(CLILIBS) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) arcget_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(arcget_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ am_arcinfo_OBJECTS = arcinfo-arcinfo.$(OBJEXT) arcinfo-utils.$(OBJEXT) arcinfo_OBJECTS = $(am_arcinfo_OBJECTS) arcinfo_DEPENDENCIES = $(CLILIBS) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) arcinfo_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(arcinfo_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ am_arckill_OBJECTS = arckill-arckill.$(OBJEXT) arckill-utils.$(OBJEXT) arckill_OBJECTS = $(am_arckill_OBJECTS) arckill_DEPENDENCIES = $(CLILIBS) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) arckill_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(arckill_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ am_arcrenew_OBJECTS = arcrenew-arcrenew.$(OBJEXT) \ arcrenew-utils.$(OBJEXT) arcrenew_OBJECTS = $(am_arcrenew_OBJECTS) arcrenew_DEPENDENCIES = $(CLILIBS) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) arcrenew_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(arcrenew_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ am_arcresub_OBJECTS = arcresub-arcresub.$(OBJEXT) \ arcresub-utils.$(OBJEXT) arcresub-submit.$(OBJEXT) arcresub_OBJECTS = $(am_arcresub_OBJECTS) arcresub_DEPENDENCIES = $(CLILIBS) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) arcresub_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(arcresub_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ am_arcresume_OBJECTS = arcresume-arcresume.$(OBJEXT) \ arcresume-utils.$(OBJEXT) arcresume_OBJECTS = $(am_arcresume_OBJECTS) arcresume_DEPENDENCIES = $(CLILIBS) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) arcresume_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(arcresume_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ am_arcstat_OBJECTS = arcstat-arcstat.$(OBJEXT) arcstat-utils.$(OBJEXT) arcstat_OBJECTS = $(am_arcstat_OBJECTS) arcstat_DEPENDENCIES = $(CLILIBS) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) arcstat_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(arcstat_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ am_arcsub_OBJECTS = arcsub-arcsub.$(OBJEXT) arcsub-utils.$(OBJEXT) \ arcsub-submit.$(OBJEXT) arcsub_OBJECTS = $(am_arcsub_OBJECTS) arcsub_DEPENDENCIES = $(CLILIBS) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) arcsub_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(arcsub_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ am_arcsync_OBJECTS = arcsync-arcsync.$(OBJEXT) arcsync-utils.$(OBJEXT) arcsync_OBJECTS = $(am_arcsync_OBJECTS) arcsync_DEPENDENCIES = $(CLILIBS) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) arcsync_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(arcsync_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ am_arctest_OBJECTS = arctest-arctest.$(OBJEXT) arctest-utils.$(OBJEXT) \ arctest-submit.$(OBJEXT) arctest_OBJECTS = $(am_arctest_OBJECTS) arctest_DEPENDENCIES = $(CLILIBS) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \ $(am__DEPENDENCIES_1) arctest_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(arctest_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(arccat_SOURCES) $(arcclean_SOURCES) $(arcget_SOURCES) \ $(arcinfo_SOURCES) $(arckill_SOURCES) $(arcrenew_SOURCES) \ $(arcresub_SOURCES) $(arcresume_SOURCES) $(arcstat_SOURCES) \ $(arcsub_SOURCES) $(arcsync_SOURCES) $(arctest_SOURCES) DIST_SOURCES = $(arccat_SOURCES) $(arcclean_SOURCES) $(arcget_SOURCES) \ $(arcinfo_SOURCES) $(arckill_SOURCES) $(arcrenew_SOURCES) \ $(arcresub_SOURCES) $(arcresume_SOURCES) $(arcstat_SOURCES) \ $(arcsub_SOURCES) $(arcsync_SOURCES) $(arctest_SOURCES) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } man1dir = $(mandir)/man1 NROFF = nroff MANS = $(man_MANS) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ man_MANS = arcsub.1 arcget.1 arcstat.1 arcinfo.1 arckill.1 arcclean.1 arccat.1 \ arcresub.1 arcsync.1 arcresume.1 arcrenew.1 arctest.1 CLILIBS = \ $(top_builddir)/src/hed/libs/compute/libarccompute.la \ $(top_builddir)/src/hed/libs/data/libarcdata.la \ $(top_builddir)/src/hed/libs/credential/libarccredential.la \ $(top_builddir)/src/hed/libs/message/libarcmessage.la \ $(top_builddir)/src/hed/libs/loader/libarcloader.la \ $(top_builddir)/src/hed/libs/common/libarccommon.la arcsub_SOURCES = arcsub.cpp utils.cpp utils.h submit.cpp submit.h arcsub_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arcsub_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arctest_SOURCES = arctest.cpp utils.cpp utils.h submit.cpp submit.h arctest_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arctest_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arcget_SOURCES = arcget.cpp utils.cpp utils.h arcget_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arcget_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arcstat_SOURCES = arcstat.cpp utils.cpp utils.h arcstat_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arcstat_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arcinfo_SOURCES = arcinfo.cpp utils.cpp utils.h arcinfo_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(AM_CXXFLAGS) arcinfo_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) arckill_SOURCES = arckill.cpp utils.cpp utils.h arckill_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arckill_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arcclean_SOURCES = arcclean.cpp utils.cpp utils.h arcclean_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arcclean_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arccat_SOURCES = arccat.cpp utils.cpp utils.h arccat_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arccat_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arcsync_SOURCES = arcsync.cpp utils.cpp utils.h arcsync_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arcsync_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arcresub_SOURCES = arcresub.cpp utils.cpp utils.h submit.cpp submit.h arcresub_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arcresub_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arcrenew_SOURCES = arcrenew.cpp utils.cpp utils.h arcrenew_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arcrenew_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) arcresume_SOURCES = arcresume.cpp utils.cpp utils.h arcresume_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(OPENSSL_CFLAGS) $(DBCXX_CPPFLAGS) $(AM_CXXFLAGS) arcresume_LDADD = $(CLILIBS) $(GLIBMM_LIBS) $(LIBXML2_LIBS) $(OPENSSL_LIBS) $(DBCXX_LIBS) all: all-am .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/clients/compute/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/clients/compute/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): arcstat.1: $(top_builddir)/config.status $(srcdir)/arcstat.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arcinfo.1: $(top_builddir)/config.status $(srcdir)/arcinfo.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arcsub.1: $(top_builddir)/config.status $(srcdir)/arcsub.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arcclean.1: $(top_builddir)/config.status $(srcdir)/arcclean.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arckill.1: $(top_builddir)/config.status $(srcdir)/arckill.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arcget.1: $(top_builddir)/config.status $(srcdir)/arcget.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arccat.1: $(top_builddir)/config.status $(srcdir)/arccat.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arcresub.1: $(top_builddir)/config.status $(srcdir)/arcresub.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arcsync.1: $(top_builddir)/config.status $(srcdir)/arcsync.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arcrenew.1: $(top_builddir)/config.status $(srcdir)/arcrenew.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arcresume.1: $(top_builddir)/config.status $(srcdir)/arcresume.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arctest.1: $(top_builddir)/config.status $(srcdir)/arctest.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ install-binPROGRAMS: $(bin_PROGRAMS) @$(NORMAL_INSTALL) @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(bindir)'"; \ $(MKDIR_P) "$(DESTDIR)$(bindir)" || exit 1; \ fi; \ for p in $$list; do echo "$$p $$p"; done | \ sed 's/$(EXEEXT)$$//' | \ while read p p1; do if test -f $$p \ || test -f $$p1 \ ; then echo "$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n;h' \ -e 's|.*|.|' \ -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \ sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) files[d] = files[d] " " $$1; \ else { print "f", $$3 "/" $$4, $$1; } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(bindir)$$dir'"; \ $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(bindir)$$dir" || exit $$?; \ } \ ; done uninstall-binPROGRAMS: @$(NORMAL_UNINSTALL) @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \ -e 's/$$/$(EXEEXT)/' \ `; \ test -n "$$list" || exit 0; \ echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \ cd "$(DESTDIR)$(bindir)" && rm -f $$files clean-binPROGRAMS: @list='$(bin_PROGRAMS)'; test -n "$$list" || exit 0; \ echo " rm -f" $$list; \ rm -f $$list || exit $$?; \ test -n "$(EXEEXT)" || exit 0; \ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ echo " rm -f" $$list; \ rm -f $$list arccat$(EXEEXT): $(arccat_OBJECTS) $(arccat_DEPENDENCIES) $(EXTRA_arccat_DEPENDENCIES) @rm -f arccat$(EXEEXT) $(AM_V_CXXLD)$(arccat_LINK) $(arccat_OBJECTS) $(arccat_LDADD) $(LIBS) arcclean$(EXEEXT): $(arcclean_OBJECTS) $(arcclean_DEPENDENCIES) $(EXTRA_arcclean_DEPENDENCIES) @rm -f arcclean$(EXEEXT) $(AM_V_CXXLD)$(arcclean_LINK) $(arcclean_OBJECTS) $(arcclean_LDADD) $(LIBS) arcget$(EXEEXT): $(arcget_OBJECTS) $(arcget_DEPENDENCIES) $(EXTRA_arcget_DEPENDENCIES) @rm -f arcget$(EXEEXT) $(AM_V_CXXLD)$(arcget_LINK) $(arcget_OBJECTS) $(arcget_LDADD) $(LIBS) arcinfo$(EXEEXT): $(arcinfo_OBJECTS) $(arcinfo_DEPENDENCIES) $(EXTRA_arcinfo_DEPENDENCIES) @rm -f arcinfo$(EXEEXT) $(AM_V_CXXLD)$(arcinfo_LINK) $(arcinfo_OBJECTS) $(arcinfo_LDADD) $(LIBS) arckill$(EXEEXT): $(arckill_OBJECTS) $(arckill_DEPENDENCIES) $(EXTRA_arckill_DEPENDENCIES) @rm -f arckill$(EXEEXT) $(AM_V_CXXLD)$(arckill_LINK) $(arckill_OBJECTS) $(arckill_LDADD) $(LIBS) arcrenew$(EXEEXT): $(arcrenew_OBJECTS) $(arcrenew_DEPENDENCIES) $(EXTRA_arcrenew_DEPENDENCIES) @rm -f arcrenew$(EXEEXT) $(AM_V_CXXLD)$(arcrenew_LINK) $(arcrenew_OBJECTS) $(arcrenew_LDADD) $(LIBS) arcresub$(EXEEXT): $(arcresub_OBJECTS) $(arcresub_DEPENDENCIES) $(EXTRA_arcresub_DEPENDENCIES) @rm -f arcresub$(EXEEXT) $(AM_V_CXXLD)$(arcresub_LINK) $(arcresub_OBJECTS) $(arcresub_LDADD) $(LIBS) arcresume$(EXEEXT): $(arcresume_OBJECTS) $(arcresume_DEPENDENCIES) $(EXTRA_arcresume_DEPENDENCIES) @rm -f arcresume$(EXEEXT) $(AM_V_CXXLD)$(arcresume_LINK) $(arcresume_OBJECTS) $(arcresume_LDADD) $(LIBS) arcstat$(EXEEXT): $(arcstat_OBJECTS) $(arcstat_DEPENDENCIES) $(EXTRA_arcstat_DEPENDENCIES) @rm -f arcstat$(EXEEXT) $(AM_V_CXXLD)$(arcstat_LINK) $(arcstat_OBJECTS) $(arcstat_LDADD) $(LIBS) arcsub$(EXEEXT): $(arcsub_OBJECTS) $(arcsub_DEPENDENCIES) $(EXTRA_arcsub_DEPENDENCIES) @rm -f arcsub$(EXEEXT) $(AM_V_CXXLD)$(arcsub_LINK) $(arcsub_OBJECTS) $(arcsub_LDADD) $(LIBS) arcsync$(EXEEXT): $(arcsync_OBJECTS) $(arcsync_DEPENDENCIES) $(EXTRA_arcsync_DEPENDENCIES) @rm -f arcsync$(EXEEXT) $(AM_V_CXXLD)$(arcsync_LINK) $(arcsync_OBJECTS) $(arcsync_LDADD) $(LIBS) arctest$(EXEEXT): $(arctest_OBJECTS) $(arctest_DEPENDENCIES) $(EXTRA_arctest_DEPENDENCIES) @rm -f arctest$(EXEEXT) $(AM_V_CXXLD)$(arctest_LINK) $(arctest_OBJECTS) $(arctest_LDADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arccat-arccat.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arccat-utils.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcclean-arcclean.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcclean-utils.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcget-arcget.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcget-utils.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcinfo-arcinfo.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcinfo-utils.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arckill-arckill.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arckill-utils.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcrenew-arcrenew.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcrenew-utils.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcresub-arcresub.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcresub-submit.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcresub-utils.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcresume-arcresume.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcresume-utils.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcstat-arcstat.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcstat-utils.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcsub-arcsub.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcsub-submit.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcsub-utils.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcsync-arcsync.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcsync-utils.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arctest-arctest.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arctest-submit.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arctest-utils.Po@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< arccat-arccat.o: arccat.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arccat_CXXFLAGS) $(CXXFLAGS) -MT arccat-arccat.o -MD -MP -MF $(DEPDIR)/arccat-arccat.Tpo -c -o arccat-arccat.o `test -f 'arccat.cpp' || echo '$(srcdir)/'`arccat.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arccat-arccat.Tpo $(DEPDIR)/arccat-arccat.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arccat.cpp' object='arccat-arccat.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arccat_CXXFLAGS) $(CXXFLAGS) -c -o arccat-arccat.o `test -f 'arccat.cpp' || echo '$(srcdir)/'`arccat.cpp arccat-arccat.obj: arccat.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arccat_CXXFLAGS) $(CXXFLAGS) -MT arccat-arccat.obj -MD -MP -MF $(DEPDIR)/arccat-arccat.Tpo -c -o arccat-arccat.obj `if test -f 'arccat.cpp'; then $(CYGPATH_W) 'arccat.cpp'; else $(CYGPATH_W) '$(srcdir)/arccat.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arccat-arccat.Tpo $(DEPDIR)/arccat-arccat.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arccat.cpp' object='arccat-arccat.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arccat_CXXFLAGS) $(CXXFLAGS) -c -o arccat-arccat.obj `if test -f 'arccat.cpp'; then $(CYGPATH_W) 'arccat.cpp'; else $(CYGPATH_W) '$(srcdir)/arccat.cpp'; fi` arccat-utils.o: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arccat_CXXFLAGS) $(CXXFLAGS) -MT arccat-utils.o -MD -MP -MF $(DEPDIR)/arccat-utils.Tpo -c -o arccat-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arccat-utils.Tpo $(DEPDIR)/arccat-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arccat-utils.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arccat_CXXFLAGS) $(CXXFLAGS) -c -o arccat-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp arccat-utils.obj: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arccat_CXXFLAGS) $(CXXFLAGS) -MT arccat-utils.obj -MD -MP -MF $(DEPDIR)/arccat-utils.Tpo -c -o arccat-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arccat-utils.Tpo $(DEPDIR)/arccat-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arccat-utils.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arccat_CXXFLAGS) $(CXXFLAGS) -c -o arccat-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` arcclean-arcclean.o: arcclean.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcclean_CXXFLAGS) $(CXXFLAGS) -MT arcclean-arcclean.o -MD -MP -MF $(DEPDIR)/arcclean-arcclean.Tpo -c -o arcclean-arcclean.o `test -f 'arcclean.cpp' || echo '$(srcdir)/'`arcclean.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcclean-arcclean.Tpo $(DEPDIR)/arcclean-arcclean.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcclean.cpp' object='arcclean-arcclean.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcclean_CXXFLAGS) $(CXXFLAGS) -c -o arcclean-arcclean.o `test -f 'arcclean.cpp' || echo '$(srcdir)/'`arcclean.cpp arcclean-arcclean.obj: arcclean.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcclean_CXXFLAGS) $(CXXFLAGS) -MT arcclean-arcclean.obj -MD -MP -MF $(DEPDIR)/arcclean-arcclean.Tpo -c -o arcclean-arcclean.obj `if test -f 'arcclean.cpp'; then $(CYGPATH_W) 'arcclean.cpp'; else $(CYGPATH_W) '$(srcdir)/arcclean.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcclean-arcclean.Tpo $(DEPDIR)/arcclean-arcclean.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcclean.cpp' object='arcclean-arcclean.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcclean_CXXFLAGS) $(CXXFLAGS) -c -o arcclean-arcclean.obj `if test -f 'arcclean.cpp'; then $(CYGPATH_W) 'arcclean.cpp'; else $(CYGPATH_W) '$(srcdir)/arcclean.cpp'; fi` arcclean-utils.o: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcclean_CXXFLAGS) $(CXXFLAGS) -MT arcclean-utils.o -MD -MP -MF $(DEPDIR)/arcclean-utils.Tpo -c -o arcclean-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcclean-utils.Tpo $(DEPDIR)/arcclean-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arcclean-utils.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcclean_CXXFLAGS) $(CXXFLAGS) -c -o arcclean-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp arcclean-utils.obj: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcclean_CXXFLAGS) $(CXXFLAGS) -MT arcclean-utils.obj -MD -MP -MF $(DEPDIR)/arcclean-utils.Tpo -c -o arcclean-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcclean-utils.Tpo $(DEPDIR)/arcclean-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arcclean-utils.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcclean_CXXFLAGS) $(CXXFLAGS) -c -o arcclean-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` arcget-arcget.o: arcget.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcget_CXXFLAGS) $(CXXFLAGS) -MT arcget-arcget.o -MD -MP -MF $(DEPDIR)/arcget-arcget.Tpo -c -o arcget-arcget.o `test -f 'arcget.cpp' || echo '$(srcdir)/'`arcget.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcget-arcget.Tpo $(DEPDIR)/arcget-arcget.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcget.cpp' object='arcget-arcget.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcget_CXXFLAGS) $(CXXFLAGS) -c -o arcget-arcget.o `test -f 'arcget.cpp' || echo '$(srcdir)/'`arcget.cpp arcget-arcget.obj: arcget.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcget_CXXFLAGS) $(CXXFLAGS) -MT arcget-arcget.obj -MD -MP -MF $(DEPDIR)/arcget-arcget.Tpo -c -o arcget-arcget.obj `if test -f 'arcget.cpp'; then $(CYGPATH_W) 'arcget.cpp'; else $(CYGPATH_W) '$(srcdir)/arcget.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcget-arcget.Tpo $(DEPDIR)/arcget-arcget.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcget.cpp' object='arcget-arcget.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcget_CXXFLAGS) $(CXXFLAGS) -c -o arcget-arcget.obj `if test -f 'arcget.cpp'; then $(CYGPATH_W) 'arcget.cpp'; else $(CYGPATH_W) '$(srcdir)/arcget.cpp'; fi` arcget-utils.o: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcget_CXXFLAGS) $(CXXFLAGS) -MT arcget-utils.o -MD -MP -MF $(DEPDIR)/arcget-utils.Tpo -c -o arcget-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcget-utils.Tpo $(DEPDIR)/arcget-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arcget-utils.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcget_CXXFLAGS) $(CXXFLAGS) -c -o arcget-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp arcget-utils.obj: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcget_CXXFLAGS) $(CXXFLAGS) -MT arcget-utils.obj -MD -MP -MF $(DEPDIR)/arcget-utils.Tpo -c -o arcget-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcget-utils.Tpo $(DEPDIR)/arcget-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arcget-utils.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcget_CXXFLAGS) $(CXXFLAGS) -c -o arcget-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` arcinfo-arcinfo.o: arcinfo.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcinfo_CXXFLAGS) $(CXXFLAGS) -MT arcinfo-arcinfo.o -MD -MP -MF $(DEPDIR)/arcinfo-arcinfo.Tpo -c -o arcinfo-arcinfo.o `test -f 'arcinfo.cpp' || echo '$(srcdir)/'`arcinfo.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcinfo-arcinfo.Tpo $(DEPDIR)/arcinfo-arcinfo.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcinfo.cpp' object='arcinfo-arcinfo.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcinfo_CXXFLAGS) $(CXXFLAGS) -c -o arcinfo-arcinfo.o `test -f 'arcinfo.cpp' || echo '$(srcdir)/'`arcinfo.cpp arcinfo-arcinfo.obj: arcinfo.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcinfo_CXXFLAGS) $(CXXFLAGS) -MT arcinfo-arcinfo.obj -MD -MP -MF $(DEPDIR)/arcinfo-arcinfo.Tpo -c -o arcinfo-arcinfo.obj `if test -f 'arcinfo.cpp'; then $(CYGPATH_W) 'arcinfo.cpp'; else $(CYGPATH_W) '$(srcdir)/arcinfo.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcinfo-arcinfo.Tpo $(DEPDIR)/arcinfo-arcinfo.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcinfo.cpp' object='arcinfo-arcinfo.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcinfo_CXXFLAGS) $(CXXFLAGS) -c -o arcinfo-arcinfo.obj `if test -f 'arcinfo.cpp'; then $(CYGPATH_W) 'arcinfo.cpp'; else $(CYGPATH_W) '$(srcdir)/arcinfo.cpp'; fi` arcinfo-utils.o: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcinfo_CXXFLAGS) $(CXXFLAGS) -MT arcinfo-utils.o -MD -MP -MF $(DEPDIR)/arcinfo-utils.Tpo -c -o arcinfo-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcinfo-utils.Tpo $(DEPDIR)/arcinfo-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arcinfo-utils.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcinfo_CXXFLAGS) $(CXXFLAGS) -c -o arcinfo-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp arcinfo-utils.obj: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcinfo_CXXFLAGS) $(CXXFLAGS) -MT arcinfo-utils.obj -MD -MP -MF $(DEPDIR)/arcinfo-utils.Tpo -c -o arcinfo-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcinfo-utils.Tpo $(DEPDIR)/arcinfo-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arcinfo-utils.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcinfo_CXXFLAGS) $(CXXFLAGS) -c -o arcinfo-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` arckill-arckill.o: arckill.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arckill_CXXFLAGS) $(CXXFLAGS) -MT arckill-arckill.o -MD -MP -MF $(DEPDIR)/arckill-arckill.Tpo -c -o arckill-arckill.o `test -f 'arckill.cpp' || echo '$(srcdir)/'`arckill.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arckill-arckill.Tpo $(DEPDIR)/arckill-arckill.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arckill.cpp' object='arckill-arckill.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arckill_CXXFLAGS) $(CXXFLAGS) -c -o arckill-arckill.o `test -f 'arckill.cpp' || echo '$(srcdir)/'`arckill.cpp arckill-arckill.obj: arckill.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arckill_CXXFLAGS) $(CXXFLAGS) -MT arckill-arckill.obj -MD -MP -MF $(DEPDIR)/arckill-arckill.Tpo -c -o arckill-arckill.obj `if test -f 'arckill.cpp'; then $(CYGPATH_W) 'arckill.cpp'; else $(CYGPATH_W) '$(srcdir)/arckill.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arckill-arckill.Tpo $(DEPDIR)/arckill-arckill.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arckill.cpp' object='arckill-arckill.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arckill_CXXFLAGS) $(CXXFLAGS) -c -o arckill-arckill.obj `if test -f 'arckill.cpp'; then $(CYGPATH_W) 'arckill.cpp'; else $(CYGPATH_W) '$(srcdir)/arckill.cpp'; fi` arckill-utils.o: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arckill_CXXFLAGS) $(CXXFLAGS) -MT arckill-utils.o -MD -MP -MF $(DEPDIR)/arckill-utils.Tpo -c -o arckill-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arckill-utils.Tpo $(DEPDIR)/arckill-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arckill-utils.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arckill_CXXFLAGS) $(CXXFLAGS) -c -o arckill-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp arckill-utils.obj: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arckill_CXXFLAGS) $(CXXFLAGS) -MT arckill-utils.obj -MD -MP -MF $(DEPDIR)/arckill-utils.Tpo -c -o arckill-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arckill-utils.Tpo $(DEPDIR)/arckill-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arckill-utils.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arckill_CXXFLAGS) $(CXXFLAGS) -c -o arckill-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` arcrenew-arcrenew.o: arcrenew.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcrenew_CXXFLAGS) $(CXXFLAGS) -MT arcrenew-arcrenew.o -MD -MP -MF $(DEPDIR)/arcrenew-arcrenew.Tpo -c -o arcrenew-arcrenew.o `test -f 'arcrenew.cpp' || echo '$(srcdir)/'`arcrenew.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcrenew-arcrenew.Tpo $(DEPDIR)/arcrenew-arcrenew.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcrenew.cpp' object='arcrenew-arcrenew.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcrenew_CXXFLAGS) $(CXXFLAGS) -c -o arcrenew-arcrenew.o `test -f 'arcrenew.cpp' || echo '$(srcdir)/'`arcrenew.cpp arcrenew-arcrenew.obj: arcrenew.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcrenew_CXXFLAGS) $(CXXFLAGS) -MT arcrenew-arcrenew.obj -MD -MP -MF $(DEPDIR)/arcrenew-arcrenew.Tpo -c -o arcrenew-arcrenew.obj `if test -f 'arcrenew.cpp'; then $(CYGPATH_W) 'arcrenew.cpp'; else $(CYGPATH_W) '$(srcdir)/arcrenew.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcrenew-arcrenew.Tpo $(DEPDIR)/arcrenew-arcrenew.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcrenew.cpp' object='arcrenew-arcrenew.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcrenew_CXXFLAGS) $(CXXFLAGS) -c -o arcrenew-arcrenew.obj `if test -f 'arcrenew.cpp'; then $(CYGPATH_W) 'arcrenew.cpp'; else $(CYGPATH_W) '$(srcdir)/arcrenew.cpp'; fi` arcrenew-utils.o: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcrenew_CXXFLAGS) $(CXXFLAGS) -MT arcrenew-utils.o -MD -MP -MF $(DEPDIR)/arcrenew-utils.Tpo -c -o arcrenew-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcrenew-utils.Tpo $(DEPDIR)/arcrenew-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arcrenew-utils.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcrenew_CXXFLAGS) $(CXXFLAGS) -c -o arcrenew-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp arcrenew-utils.obj: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcrenew_CXXFLAGS) $(CXXFLAGS) -MT arcrenew-utils.obj -MD -MP -MF $(DEPDIR)/arcrenew-utils.Tpo -c -o arcrenew-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcrenew-utils.Tpo $(DEPDIR)/arcrenew-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arcrenew-utils.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcrenew_CXXFLAGS) $(CXXFLAGS) -c -o arcrenew-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` arcresub-arcresub.o: arcresub.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresub_CXXFLAGS) $(CXXFLAGS) -MT arcresub-arcresub.o -MD -MP -MF $(DEPDIR)/arcresub-arcresub.Tpo -c -o arcresub-arcresub.o `test -f 'arcresub.cpp' || echo '$(srcdir)/'`arcresub.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcresub-arcresub.Tpo $(DEPDIR)/arcresub-arcresub.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcresub.cpp' object='arcresub-arcresub.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresub_CXXFLAGS) $(CXXFLAGS) -c -o arcresub-arcresub.o `test -f 'arcresub.cpp' || echo '$(srcdir)/'`arcresub.cpp arcresub-arcresub.obj: arcresub.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresub_CXXFLAGS) $(CXXFLAGS) -MT arcresub-arcresub.obj -MD -MP -MF $(DEPDIR)/arcresub-arcresub.Tpo -c -o arcresub-arcresub.obj `if test -f 'arcresub.cpp'; then $(CYGPATH_W) 'arcresub.cpp'; else $(CYGPATH_W) '$(srcdir)/arcresub.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcresub-arcresub.Tpo $(DEPDIR)/arcresub-arcresub.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcresub.cpp' object='arcresub-arcresub.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresub_CXXFLAGS) $(CXXFLAGS) -c -o arcresub-arcresub.obj `if test -f 'arcresub.cpp'; then $(CYGPATH_W) 'arcresub.cpp'; else $(CYGPATH_W) '$(srcdir)/arcresub.cpp'; fi` arcresub-utils.o: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresub_CXXFLAGS) $(CXXFLAGS) -MT arcresub-utils.o -MD -MP -MF $(DEPDIR)/arcresub-utils.Tpo -c -o arcresub-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcresub-utils.Tpo $(DEPDIR)/arcresub-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arcresub-utils.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresub_CXXFLAGS) $(CXXFLAGS) -c -o arcresub-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp arcresub-utils.obj: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresub_CXXFLAGS) $(CXXFLAGS) -MT arcresub-utils.obj -MD -MP -MF $(DEPDIR)/arcresub-utils.Tpo -c -o arcresub-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcresub-utils.Tpo $(DEPDIR)/arcresub-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arcresub-utils.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresub_CXXFLAGS) $(CXXFLAGS) -c -o arcresub-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` arcresub-submit.o: submit.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresub_CXXFLAGS) $(CXXFLAGS) -MT arcresub-submit.o -MD -MP -MF $(DEPDIR)/arcresub-submit.Tpo -c -o arcresub-submit.o `test -f 'submit.cpp' || echo '$(srcdir)/'`submit.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcresub-submit.Tpo $(DEPDIR)/arcresub-submit.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='submit.cpp' object='arcresub-submit.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresub_CXXFLAGS) $(CXXFLAGS) -c -o arcresub-submit.o `test -f 'submit.cpp' || echo '$(srcdir)/'`submit.cpp arcresub-submit.obj: submit.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresub_CXXFLAGS) $(CXXFLAGS) -MT arcresub-submit.obj -MD -MP -MF $(DEPDIR)/arcresub-submit.Tpo -c -o arcresub-submit.obj `if test -f 'submit.cpp'; then $(CYGPATH_W) 'submit.cpp'; else $(CYGPATH_W) '$(srcdir)/submit.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcresub-submit.Tpo $(DEPDIR)/arcresub-submit.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='submit.cpp' object='arcresub-submit.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresub_CXXFLAGS) $(CXXFLAGS) -c -o arcresub-submit.obj `if test -f 'submit.cpp'; then $(CYGPATH_W) 'submit.cpp'; else $(CYGPATH_W) '$(srcdir)/submit.cpp'; fi` arcresume-arcresume.o: arcresume.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresume_CXXFLAGS) $(CXXFLAGS) -MT arcresume-arcresume.o -MD -MP -MF $(DEPDIR)/arcresume-arcresume.Tpo -c -o arcresume-arcresume.o `test -f 'arcresume.cpp' || echo '$(srcdir)/'`arcresume.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcresume-arcresume.Tpo $(DEPDIR)/arcresume-arcresume.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcresume.cpp' object='arcresume-arcresume.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresume_CXXFLAGS) $(CXXFLAGS) -c -o arcresume-arcresume.o `test -f 'arcresume.cpp' || echo '$(srcdir)/'`arcresume.cpp arcresume-arcresume.obj: arcresume.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresume_CXXFLAGS) $(CXXFLAGS) -MT arcresume-arcresume.obj -MD -MP -MF $(DEPDIR)/arcresume-arcresume.Tpo -c -o arcresume-arcresume.obj `if test -f 'arcresume.cpp'; then $(CYGPATH_W) 'arcresume.cpp'; else $(CYGPATH_W) '$(srcdir)/arcresume.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcresume-arcresume.Tpo $(DEPDIR)/arcresume-arcresume.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcresume.cpp' object='arcresume-arcresume.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresume_CXXFLAGS) $(CXXFLAGS) -c -o arcresume-arcresume.obj `if test -f 'arcresume.cpp'; then $(CYGPATH_W) 'arcresume.cpp'; else $(CYGPATH_W) '$(srcdir)/arcresume.cpp'; fi` arcresume-utils.o: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresume_CXXFLAGS) $(CXXFLAGS) -MT arcresume-utils.o -MD -MP -MF $(DEPDIR)/arcresume-utils.Tpo -c -o arcresume-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcresume-utils.Tpo $(DEPDIR)/arcresume-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arcresume-utils.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresume_CXXFLAGS) $(CXXFLAGS) -c -o arcresume-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp arcresume-utils.obj: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresume_CXXFLAGS) $(CXXFLAGS) -MT arcresume-utils.obj -MD -MP -MF $(DEPDIR)/arcresume-utils.Tpo -c -o arcresume-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcresume-utils.Tpo $(DEPDIR)/arcresume-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arcresume-utils.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcresume_CXXFLAGS) $(CXXFLAGS) -c -o arcresume-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` arcstat-arcstat.o: arcstat.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcstat_CXXFLAGS) $(CXXFLAGS) -MT arcstat-arcstat.o -MD -MP -MF $(DEPDIR)/arcstat-arcstat.Tpo -c -o arcstat-arcstat.o `test -f 'arcstat.cpp' || echo '$(srcdir)/'`arcstat.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcstat-arcstat.Tpo $(DEPDIR)/arcstat-arcstat.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcstat.cpp' object='arcstat-arcstat.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcstat_CXXFLAGS) $(CXXFLAGS) -c -o arcstat-arcstat.o `test -f 'arcstat.cpp' || echo '$(srcdir)/'`arcstat.cpp arcstat-arcstat.obj: arcstat.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcstat_CXXFLAGS) $(CXXFLAGS) -MT arcstat-arcstat.obj -MD -MP -MF $(DEPDIR)/arcstat-arcstat.Tpo -c -o arcstat-arcstat.obj `if test -f 'arcstat.cpp'; then $(CYGPATH_W) 'arcstat.cpp'; else $(CYGPATH_W) '$(srcdir)/arcstat.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcstat-arcstat.Tpo $(DEPDIR)/arcstat-arcstat.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcstat.cpp' object='arcstat-arcstat.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcstat_CXXFLAGS) $(CXXFLAGS) -c -o arcstat-arcstat.obj `if test -f 'arcstat.cpp'; then $(CYGPATH_W) 'arcstat.cpp'; else $(CYGPATH_W) '$(srcdir)/arcstat.cpp'; fi` arcstat-utils.o: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcstat_CXXFLAGS) $(CXXFLAGS) -MT arcstat-utils.o -MD -MP -MF $(DEPDIR)/arcstat-utils.Tpo -c -o arcstat-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcstat-utils.Tpo $(DEPDIR)/arcstat-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arcstat-utils.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcstat_CXXFLAGS) $(CXXFLAGS) -c -o arcstat-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp arcstat-utils.obj: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcstat_CXXFLAGS) $(CXXFLAGS) -MT arcstat-utils.obj -MD -MP -MF $(DEPDIR)/arcstat-utils.Tpo -c -o arcstat-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcstat-utils.Tpo $(DEPDIR)/arcstat-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arcstat-utils.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcstat_CXXFLAGS) $(CXXFLAGS) -c -o arcstat-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` arcsub-arcsub.o: arcsub.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsub_CXXFLAGS) $(CXXFLAGS) -MT arcsub-arcsub.o -MD -MP -MF $(DEPDIR)/arcsub-arcsub.Tpo -c -o arcsub-arcsub.o `test -f 'arcsub.cpp' || echo '$(srcdir)/'`arcsub.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcsub-arcsub.Tpo $(DEPDIR)/arcsub-arcsub.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcsub.cpp' object='arcsub-arcsub.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsub_CXXFLAGS) $(CXXFLAGS) -c -o arcsub-arcsub.o `test -f 'arcsub.cpp' || echo '$(srcdir)/'`arcsub.cpp arcsub-arcsub.obj: arcsub.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsub_CXXFLAGS) $(CXXFLAGS) -MT arcsub-arcsub.obj -MD -MP -MF $(DEPDIR)/arcsub-arcsub.Tpo -c -o arcsub-arcsub.obj `if test -f 'arcsub.cpp'; then $(CYGPATH_W) 'arcsub.cpp'; else $(CYGPATH_W) '$(srcdir)/arcsub.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcsub-arcsub.Tpo $(DEPDIR)/arcsub-arcsub.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcsub.cpp' object='arcsub-arcsub.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsub_CXXFLAGS) $(CXXFLAGS) -c -o arcsub-arcsub.obj `if test -f 'arcsub.cpp'; then $(CYGPATH_W) 'arcsub.cpp'; else $(CYGPATH_W) '$(srcdir)/arcsub.cpp'; fi` arcsub-utils.o: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsub_CXXFLAGS) $(CXXFLAGS) -MT arcsub-utils.o -MD -MP -MF $(DEPDIR)/arcsub-utils.Tpo -c -o arcsub-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcsub-utils.Tpo $(DEPDIR)/arcsub-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arcsub-utils.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsub_CXXFLAGS) $(CXXFLAGS) -c -o arcsub-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp arcsub-utils.obj: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsub_CXXFLAGS) $(CXXFLAGS) -MT arcsub-utils.obj -MD -MP -MF $(DEPDIR)/arcsub-utils.Tpo -c -o arcsub-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcsub-utils.Tpo $(DEPDIR)/arcsub-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arcsub-utils.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsub_CXXFLAGS) $(CXXFLAGS) -c -o arcsub-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` arcsub-submit.o: submit.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsub_CXXFLAGS) $(CXXFLAGS) -MT arcsub-submit.o -MD -MP -MF $(DEPDIR)/arcsub-submit.Tpo -c -o arcsub-submit.o `test -f 'submit.cpp' || echo '$(srcdir)/'`submit.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcsub-submit.Tpo $(DEPDIR)/arcsub-submit.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='submit.cpp' object='arcsub-submit.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsub_CXXFLAGS) $(CXXFLAGS) -c -o arcsub-submit.o `test -f 'submit.cpp' || echo '$(srcdir)/'`submit.cpp arcsub-submit.obj: submit.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsub_CXXFLAGS) $(CXXFLAGS) -MT arcsub-submit.obj -MD -MP -MF $(DEPDIR)/arcsub-submit.Tpo -c -o arcsub-submit.obj `if test -f 'submit.cpp'; then $(CYGPATH_W) 'submit.cpp'; else $(CYGPATH_W) '$(srcdir)/submit.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcsub-submit.Tpo $(DEPDIR)/arcsub-submit.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='submit.cpp' object='arcsub-submit.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsub_CXXFLAGS) $(CXXFLAGS) -c -o arcsub-submit.obj `if test -f 'submit.cpp'; then $(CYGPATH_W) 'submit.cpp'; else $(CYGPATH_W) '$(srcdir)/submit.cpp'; fi` arcsync-arcsync.o: arcsync.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsync_CXXFLAGS) $(CXXFLAGS) -MT arcsync-arcsync.o -MD -MP -MF $(DEPDIR)/arcsync-arcsync.Tpo -c -o arcsync-arcsync.o `test -f 'arcsync.cpp' || echo '$(srcdir)/'`arcsync.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcsync-arcsync.Tpo $(DEPDIR)/arcsync-arcsync.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcsync.cpp' object='arcsync-arcsync.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsync_CXXFLAGS) $(CXXFLAGS) -c -o arcsync-arcsync.o `test -f 'arcsync.cpp' || echo '$(srcdir)/'`arcsync.cpp arcsync-arcsync.obj: arcsync.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsync_CXXFLAGS) $(CXXFLAGS) -MT arcsync-arcsync.obj -MD -MP -MF $(DEPDIR)/arcsync-arcsync.Tpo -c -o arcsync-arcsync.obj `if test -f 'arcsync.cpp'; then $(CYGPATH_W) 'arcsync.cpp'; else $(CYGPATH_W) '$(srcdir)/arcsync.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcsync-arcsync.Tpo $(DEPDIR)/arcsync-arcsync.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcsync.cpp' object='arcsync-arcsync.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsync_CXXFLAGS) $(CXXFLAGS) -c -o arcsync-arcsync.obj `if test -f 'arcsync.cpp'; then $(CYGPATH_W) 'arcsync.cpp'; else $(CYGPATH_W) '$(srcdir)/arcsync.cpp'; fi` arcsync-utils.o: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsync_CXXFLAGS) $(CXXFLAGS) -MT arcsync-utils.o -MD -MP -MF $(DEPDIR)/arcsync-utils.Tpo -c -o arcsync-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcsync-utils.Tpo $(DEPDIR)/arcsync-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arcsync-utils.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsync_CXXFLAGS) $(CXXFLAGS) -c -o arcsync-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp arcsync-utils.obj: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsync_CXXFLAGS) $(CXXFLAGS) -MT arcsync-utils.obj -MD -MP -MF $(DEPDIR)/arcsync-utils.Tpo -c -o arcsync-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcsync-utils.Tpo $(DEPDIR)/arcsync-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arcsync-utils.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcsync_CXXFLAGS) $(CXXFLAGS) -c -o arcsync-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` arctest-arctest.o: arctest.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arctest_CXXFLAGS) $(CXXFLAGS) -MT arctest-arctest.o -MD -MP -MF $(DEPDIR)/arctest-arctest.Tpo -c -o arctest-arctest.o `test -f 'arctest.cpp' || echo '$(srcdir)/'`arctest.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arctest-arctest.Tpo $(DEPDIR)/arctest-arctest.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arctest.cpp' object='arctest-arctest.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arctest_CXXFLAGS) $(CXXFLAGS) -c -o arctest-arctest.o `test -f 'arctest.cpp' || echo '$(srcdir)/'`arctest.cpp arctest-arctest.obj: arctest.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arctest_CXXFLAGS) $(CXXFLAGS) -MT arctest-arctest.obj -MD -MP -MF $(DEPDIR)/arctest-arctest.Tpo -c -o arctest-arctest.obj `if test -f 'arctest.cpp'; then $(CYGPATH_W) 'arctest.cpp'; else $(CYGPATH_W) '$(srcdir)/arctest.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arctest-arctest.Tpo $(DEPDIR)/arctest-arctest.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arctest.cpp' object='arctest-arctest.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arctest_CXXFLAGS) $(CXXFLAGS) -c -o arctest-arctest.obj `if test -f 'arctest.cpp'; then $(CYGPATH_W) 'arctest.cpp'; else $(CYGPATH_W) '$(srcdir)/arctest.cpp'; fi` arctest-utils.o: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arctest_CXXFLAGS) $(CXXFLAGS) -MT arctest-utils.o -MD -MP -MF $(DEPDIR)/arctest-utils.Tpo -c -o arctest-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arctest-utils.Tpo $(DEPDIR)/arctest-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arctest-utils.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arctest_CXXFLAGS) $(CXXFLAGS) -c -o arctest-utils.o `test -f 'utils.cpp' || echo '$(srcdir)/'`utils.cpp arctest-utils.obj: utils.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arctest_CXXFLAGS) $(CXXFLAGS) -MT arctest-utils.obj -MD -MP -MF $(DEPDIR)/arctest-utils.Tpo -c -o arctest-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arctest-utils.Tpo $(DEPDIR)/arctest-utils.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='utils.cpp' object='arctest-utils.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arctest_CXXFLAGS) $(CXXFLAGS) -c -o arctest-utils.obj `if test -f 'utils.cpp'; then $(CYGPATH_W) 'utils.cpp'; else $(CYGPATH_W) '$(srcdir)/utils.cpp'; fi` arctest-submit.o: submit.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arctest_CXXFLAGS) $(CXXFLAGS) -MT arctest-submit.o -MD -MP -MF $(DEPDIR)/arctest-submit.Tpo -c -o arctest-submit.o `test -f 'submit.cpp' || echo '$(srcdir)/'`submit.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arctest-submit.Tpo $(DEPDIR)/arctest-submit.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='submit.cpp' object='arctest-submit.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arctest_CXXFLAGS) $(CXXFLAGS) -c -o arctest-submit.o `test -f 'submit.cpp' || echo '$(srcdir)/'`submit.cpp arctest-submit.obj: submit.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arctest_CXXFLAGS) $(CXXFLAGS) -MT arctest-submit.obj -MD -MP -MF $(DEPDIR)/arctest-submit.Tpo -c -o arctest-submit.obj `if test -f 'submit.cpp'; then $(CYGPATH_W) 'submit.cpp'; else $(CYGPATH_W) '$(srcdir)/submit.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arctest-submit.Tpo $(DEPDIR)/arctest-submit.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='submit.cpp' object='arctest-submit.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arctest_CXXFLAGS) $(CXXFLAGS) -c -o arctest-submit.obj `if test -f 'submit.cpp'; then $(CYGPATH_W) 'submit.cpp'; else $(CYGPATH_W) '$(srcdir)/submit.cpp'; fi` mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-man1: $(man_MANS) @$(NORMAL_INSTALL) @list1=''; \ list2='$(man_MANS)'; \ test -n "$(man1dir)" \ && test -n "`echo $$list1$$list2`" \ || exit 0; \ echo " $(MKDIR_P) '$(DESTDIR)$(man1dir)'"; \ $(MKDIR_P) "$(DESTDIR)$(man1dir)" || exit 1; \ { for i in $$list1; do echo "$$i"; done; \ if test -n "$$list2"; then \ for i in $$list2; do echo "$$i"; done \ | sed -n '/\.1[a-z]*$$/p'; \ fi; \ } | while read p; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; echo "$$p"; \ done | \ sed -e 'n;s,.*/,,;p;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,' | \ sed 'N;N;s,\n, ,g' | { \ list=; while read file base inst; do \ if test "$$base" = "$$inst"; then list="$$list $$file"; else \ echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man1dir)/$$inst'"; \ $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man1dir)/$$inst" || exit $$?; \ fi; \ done; \ for i in $$list; do echo "$$i"; done | $(am__base_list) | \ while read files; do \ test -z "$$files" || { \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man1dir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(man1dir)" || exit $$?; }; \ done; } uninstall-man1: @$(NORMAL_UNINSTALL) @list=''; test -n "$(man1dir)" || exit 0; \ files=`{ for i in $$list; do echo "$$i"; done; \ l2='$(man_MANS)'; for i in $$l2; do echo "$$i"; done | \ sed -n '/\.1[a-z]*$$/p'; \ } | sed -e 's,.*/,,;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,'`; \ dir='$(DESTDIR)$(man1dir)'; $(am__uninstall_files_from_dir) ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(PROGRAMS) $(MANS) installdirs: for dir in "$(DESTDIR)$(bindir)" "$(DESTDIR)$(man1dir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-binPROGRAMS clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-man install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-binPROGRAMS install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-man1 install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-binPROGRAMS uninstall-man uninstall-man: uninstall-man1 .MAKE: install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-am clean \ clean-binPROGRAMS clean-generic clean-libtool cscopelist-am \ ctags ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-binPROGRAMS \ install-data install-data-am install-dvi install-dvi-am \ install-exec install-exec-am install-html install-html-am \ install-info install-info-am install-man install-man1 \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-compile mostlyclean-generic mostlyclean-libtool \ pdf pdf-am ps ps-am tags tags-am uninstall uninstall-am \ uninstall-binPROGRAMS uninstall-man uninstall-man1 # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arckill.1.in0000644000000000000000000000013214152153376023360 xustar000000000000000030 mtime=1638455038.337644953 30 atime=1638455038.473646996 30 ctime=1638455101.079587679 nordugrid-arc-6.14.0/src/clients/compute/arckill.1.in0000644000175000002070000001106714152153376023352 0ustar00mockbuildmock00000000000000.\" -*- nroff -*- .TH ARCKILL 1 "@DATE@" "NorduGrid ARC @VERSION@" "NorduGrid Users Manual" .SH NAME arckill \- ARC Kill .SH DESCRIPTION The .B arckill command is used to kill running jobs. .SH SYNOPSIS .B arckill [options] [job ...] .SH OPTIONS .IP "\fB-a\fR, \fB--all\fR" all jobs .IP "\fB-j\fR, \fB--joblist\fR=\fIfilename\fR" the file storing information about active jobs (default ~/.arc/jobs.xml) .IP "\fB-i\fR, \fB--jobids-from-file\fR=\fIfilename\fR" a file containing a list of jobIDs .IP "\fB-c\fR, \fB--cluster\fR=\fIname\fR" select one or more computing elements: \fIname\fR can be an alias for a single CE, a group of CEs or a URL .IP "\fB-r\fR, \fB--rejectmanagement\fR=\fIURL\fR" skip jobs which are on a computing element with a given URL .IP "\fB-s\fR, \fB--status\fR=\fIstatusstr\fR" only select jobs whose status is statusstr .IP "\fB-k\fR, \fB--keep\fR" keep files on the remote cluster (do not clean) .IP "\fB-P\fR, \fB--listplugins\fR" list the available plugins .IP "\fB-t\fR, \fB--timeout\fR=\fIseconds\fR" timeout in seconds (default 20) .IP "\fB-z\fR, \fB--conffile\fR=\fIfilename\fR" configuration file (default ~/.arc/client.conf) .IP "\fB-d\fR, \fB--debug\fR=\fIdebuglevel\fR" FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG .IP "\fB-v\fR, \fB--version\fR" print version information .IP "\fB-?\fR, \fB--help\fR" print help .LP .SH ARGUMENTS .IP "\fBjob\fR ..." list of jobids and/or jobnames .LP .SH EXTENDED DESCRIPTION The .B arckill command kills a running job on an ARC enabled resource. The job can be referred to either by the jobid that was returned by .BR arcsub (1) at submission time or by its jobname if the job description that was submitted contained a jobname attribute. More than one jobid and/or jobname can be given. If several jobs were submitted with the same jobname all those jobs are killed. If the .B --joblist option is used the list of jobs is read from a file with the specified filename. By specifying the .B --all option, all jobs can be killed. The .B --cluster option can be used to select or reject jobs at specific clusters. See .BR arcsub (1) for a discussion of the format of arguments to this option. The .B --status option can be used to select jobs in a specific state. These options can be repeated several times. See .BR arstat (1) for possible state values. If the job was successfully killed the attepmt to remove the job from the remote cluster will be made unless the .B --keep option was specified. Depending on functionality of service job killing procedure may take time and it may be impossible to clean job immediately. In that case .B arckill will report number of cleaned jobs smaller than processed ones. Cleaning of leftover jobs may be performed by running .B arcclean later. .SH FILES .TP .B ~/.arc/client.conf Some options can be given default values by specifying them in the ARC client configuration file. By using the .B --conffile option a different configuration file can be used than the default. .TP .B ~/.arc/jobs.xml This a local list of the user's active jobs. When a job is successfully submitted it is added to this list and when it is removed from the remote cluster it is removed from this list. This list is used as the list of all active jobs when the user specifies the .B --all option to the various NorduGrid ARC user interface commands. By using the .B --joblist option a different file can be used than the default. .SH ENVIRONMENT VARIABLES .TP .B X509_USER_PROXY The location of the user's Grid proxy file. Shouldn't be set unless the proxy is in a non-standard location. .TP .B ARC_LOCATION The location where ARC is installed can be specified by this variable. If not specified the install location will be determined from the path to the command being executed, and if this fails a WARNING will be given stating the location which will be used. .TP .B ARC_PLUGIN_PATH The location of ARC plugins can be specified by this variable. Multiple locations can be specified by separating them by : (; in Windows). The default location is \fB$ARC_LOCATION\fR/lib/arc (\\ in Windows). .SH COPYRIGHT APACHE LICENSE Version 2.0 .SH AUTHOR ARC software is developed by the NorduGrid Collaboration (http://www.nordugrid.org), please consult the AUTHORS file distributed with ARC. Please report bugs and feature requests to http://bugzilla.nordugrid.org .SH SEE ALSO .BR arccat (1), .BR arcclean (1), .BR arccp (1), .BR arcget (1), .BR arcinfo (1), .BR arcls (1), .BR arcmkdir (1), .BR arcproxy (1), .BR arcrenew (1), .BR arcresub (1), .BR arcresume (1), .BR arcrm (1), .BR arcstat (1), .BR arcsub (1), .BR arcsync (1), .BR arctest (1) nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arcsub.cpp0000644000000000000000000000013214152153376023233 xustar000000000000000030 mtime=1638455038.338644968 30 atime=1638455038.473646996 30 ctime=1638455101.098587964 nordugrid-arc-6.14.0/src/clients/compute/arcsub.cpp0000644000175000002070000001721014152153376023221 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include "utils.h" #include "submit.h" static Arc::Logger logger(Arc::Logger::getRootLogger(), "arcsub"); int RUNMAIN(arcsub)(int argc, char **argv) { setlocale(LC_ALL, ""); Arc::LogStream logcerr(std::cerr); logcerr.setFormat(Arc::ShortFormat); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::WARNING); Arc::ArcLocation::Init(argv[0]); ClientOptions opt(ClientOptions::CO_SUB, istring("[filename ...]"), istring("The arcsub command is used for " "submitting jobs to Grid enabled " "computing\nresources.")); std::list params = opt.Parse(argc, argv); if (opt.showversion) { std::cout << Arc::IString("%s version %s", "arcsub", VERSION) << std::endl; return 0; } // If debug is specified as argument, it should be set before loading the configuration. if (!opt.debug.empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(opt.debug)); logger.msg(Arc::VERBOSE, "Running command: %s", opt.GetCommandWithArguments()); Arc::UserConfig usercfg(opt.conffile, opt.joblist); if (!usercfg) { logger.msg(Arc::ERROR, "Failed configuration initialization"); return 1; } if (opt.show_plugins) { std::list types; types.push_back("HED:SubmitterPlugin"); types.push_back("HED:ServiceEndpointRetrieverPlugin"); types.push_back("HED:TargetInformationRetrieverPlugin"); types.push_back("HED:JobDescriptionParserPlugin"); types.push_back("HED:BrokerPlugin"); showplugins("arcsub", types, logger, usercfg.Broker().first); return 0; } if (opt.debug.empty() && !usercfg.Verbosity().empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(usercfg.Verbosity())); if (opt.timeout > 0) usercfg.Timeout(opt.timeout); if (!opt.broker.empty()) usercfg.Broker(opt.broker); opt.jobdescriptionfiles.insert(opt.jobdescriptionfiles.end(), params.begin(), params.end()); if (opt.jobdescriptionfiles.empty() && opt.jobdescriptionstrings.empty()) { logger.msg(Arc::ERROR, "No job description input specified"); return 1; } std::list jobdescriptionlist; // Loop over input job description files for (std::list::iterator it = opt.jobdescriptionfiles.begin(); it != opt.jobdescriptionfiles.end(); ++it) { std::ifstream descriptionfile(it->c_str()); if (!descriptionfile) { logger.msg(Arc::ERROR, "Can not open job description file: %s", *it); return 1; } descriptionfile.seekg(0, std::ios::end); std::streamsize length = descriptionfile.tellg(); descriptionfile.seekg(0, std::ios::beg); char *buffer = new char[length + 1]; descriptionfile.read(buffer, length); descriptionfile.close(); buffer[length] = '\0'; std::list jobdescs; Arc::JobDescriptionResult parseres = Arc::JobDescription::Parse((std::string)buffer, jobdescs); if (parseres) { for (std::list::iterator itJ = jobdescs.begin(); itJ != jobdescs.end(); ++itJ) { itJ->Application.DryRun = opt.dryrun; for (std::list::iterator itJAlt = itJ->GetAlternatives().begin(); itJAlt != itJ->GetAlternatives().end(); ++itJAlt) { itJAlt->Application.DryRun = opt.dryrun; } } jobdescriptionlist.insert(jobdescriptionlist.end(), jobdescs.begin(), jobdescs.end()); } else { logger.msg(Arc::ERROR, "Invalid JobDescription:"); std::cout << buffer << std::endl; delete[] buffer; std::cerr << parseres.str() << std::endl; return 1; } delete[] buffer; } //Loop over job description input strings for (std::list::iterator it = opt.jobdescriptionstrings.begin(); it != opt.jobdescriptionstrings.end(); ++it) { std::list jobdescs; Arc::JobDescriptionResult parseres = Arc::JobDescription::Parse(*it, jobdescs); if (parseres) { for (std::list::iterator itJ = jobdescs.begin(); itJ != jobdescs.end(); ++itJ) { itJ->Application.DryRun = opt.dryrun; for (std::list::iterator itJAlt = itJ->GetAlternatives().begin(); itJAlt != itJ->GetAlternatives().end(); ++itJAlt) { itJAlt->Application.DryRun = opt.dryrun; } } jobdescriptionlist.insert(jobdescriptionlist.end(), jobdescs.begin(), jobdescs.end()); } else { logger.msg(Arc::ERROR, "Invalid JobDescription:"); std::cout << *it << std::endl; std::cerr << parseres.str() << std::endl; return 1; } } // Check if X.509 credentials are needed for data staging bool need_proxy = false; for (std::list::iterator jobIt = jobdescriptionlist.begin(); jobIt != jobdescriptionlist.end(); ++jobIt) { if(jobneedsproxy(*jobIt)) { need_proxy = true; break; } } if(need_proxy || usercfg.OToken().empty()) { if (!checkproxy(usercfg)) { return 1; } } if ( opt.isARC6TargetSelectionOptions(logger) ) { // canonicalize endpoint types if (!opt.canonicalizeARC6InterfaceTypes(logger)) return 1; // get endpoint batches according to ARC6 target selection logic std::list > endpoint_batches; bool info_discovery = prepare_submission_endpoint_batches(usercfg, opt, endpoint_batches); // add rejectdiscovery if defined if (!opt.rejectdiscovery.empty()) usercfg.AddRejectDiscoveryURLs(opt.rejectdiscovery); // action: dumpjobdescription if (opt.dumpdescription) { if (!info_discovery) { logger.msg(Arc::ERROR,"Cannot adapt job description to the submission target when information discovery is turned off"); return 1; } // dump description only for priority submission interface, no fallbacks std::list services = endpoint_batches.front(); std::string req_sub_iface; if (!opt.submit_types.empty()) req_sub_iface = opt.submit_types.front(); return dumpjobdescription(usercfg, jobdescriptionlist, services, req_sub_iface); } // default action: start submission cycle return submit_jobs(usercfg, endpoint_batches, info_discovery, opt.jobidoutfile, jobdescriptionlist); } else { // Legacy target selection submission logic std::list services = getServicesFromUserConfigAndCommandLine(usercfg, opt.indexurls, opt.clusters, opt.requestedSubmissionInterfaceName, opt.infointerface); if (!opt.direct_submission) { usercfg.AddRejectDiscoveryURLs(opt.rejectdiscovery); } if (opt.dumpdescription) { return dumpjobdescription(usercfg, jobdescriptionlist, services, opt.requestedSubmissionInterfaceName); } return legacy_submit(usercfg, jobdescriptionlist, services, opt.requestedSubmissionInterfaceName, opt.jobidoutfile, opt.direct_submission); } } nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arccat.cpp0000644000000000000000000000013214152153376023211 xustar000000000000000030 mtime=1638455038.337644953 30 atime=1638455038.472646981 30 ctime=1638455101.087587799 nordugrid-arc-6.14.0/src/clients/compute/arccat.cpp0000644000175000002070000002017114152153376023177 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include "utils.h" int RUNMAIN(arccat)(int argc, char **argv) { setlocale(LC_ALL, ""); Arc::Logger logger(Arc::Logger::getRootLogger(), "arccat"); Arc::LogStream logcerr(std::cerr); logcerr.setFormat(Arc::ShortFormat); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::WARNING); Arc::ArcLocation::Init(argv[0]); ClientOptions opt(ClientOptions::CO_CAT, istring("[job ...]"), istring("The arccat command performs the cat " "command on the stdout, stderr or grid\n" "manager's error log of the job.")); std::list jobidentifiers = opt.Parse(argc, argv); if (opt.showversion) { std::cout << Arc::IString("%s version %s", "arccat", VERSION) << std::endl; return 0; } // If debug is specified as argument, it should be set before loading the configuration. if (!opt.debug.empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(opt.debug)); logger.msg(Arc::VERBOSE, "Running command: %s", opt.GetCommandWithArguments()); if (opt.show_plugins) { std::list types; types.push_back("HED:JobControllerPlugin"); showplugins("arccat", types, logger); return 0; } Arc::UserConfig usercfg(opt.conffile, opt.joblist); if (!usercfg) { logger.msg(Arc::ERROR, "Failed configuration initialization"); return 1; } if(usercfg.OToken().empty()) { if (!checkproxy(usercfg)) { return 1; } } if (opt.debug.empty() && !usercfg.Verbosity().empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(usercfg.Verbosity())); for (std::list::const_iterator it = opt.jobidinfiles.begin(); it != opt.jobidinfiles.end(); it++) { if (!Arc::Job::ReadJobIDsFromFile(*it, jobidentifiers)) { logger.msg(Arc::WARNING, "Cannot read specified jobid file: %s", *it); } } if (opt.timeout > 0) usercfg.Timeout(opt.timeout); if ((!opt.joblist.empty() || !opt.status.empty()) && jobidentifiers.empty() && opt.clusters.empty()) opt.all = true; if (jobidentifiers.empty() && opt.clusters.empty() && !opt.all) { logger.msg(Arc::ERROR, "No jobs given"); return 1; } std::list selectedURLs; if (!opt.clusters.empty()) { selectedURLs = getSelectedURLsFromUserConfigAndCommandLine(usercfg, opt.clusters); } std::list rejectManagementURLs = getRejectManagementURLsFromUserConfigAndCommandLine(usercfg, opt.rejectmanagement); std::list jobs; Arc::JobInformationStorage *jobstore = createJobInformationStorage(usercfg); if (jobstore != NULL && !jobstore->IsStorageExisting()) { logger.msg(Arc::ERROR, "Job list file (%s) doesn't exist", usercfg.JobListFile()); delete jobstore; return 1; } if (jobstore == NULL || ( opt.all && !jobstore->ReadAll(jobs, rejectManagementURLs)) || (!opt.all && !jobstore->Read(jobs, jobidentifiers, selectedURLs, rejectManagementURLs))) { logger.msg(Arc::ERROR, "Unable to read job information from file (%s)", usercfg.JobListFile()); delete jobstore; return 1; } delete jobstore; if (!opt.all) { for (std::list::const_iterator itJIDAndName = jobidentifiers.begin(); itJIDAndName != jobidentifiers.end(); ++itJIDAndName) { std::cout << Arc::IString("Warning: Job not found in job list: %s", *itJIDAndName) << std::endl; } } Arc::JobSupervisor jobmaster(usercfg, jobs); jobmaster.Update(); jobmaster.SelectValid(); if (!opt.status.empty()) { jobmaster.SelectByStatus(opt.status); } jobs = jobmaster.GetSelectedJobs(); if (jobs.empty()) { std::cout << Arc::IString("No jobs") << std::endl; return 1; } std::string resourceName; if (opt.show_joblog) { resourceName = "joblog"; } else if (opt.show_stderr) { resourceName = "stderr"; } else if (!opt.show_file.empty()) { resourceName = "session file"; } else { resourceName = "stdout"; } // saving to a temp file is necessary because chunks from server // may arrive out of order std::string filename = Glib::build_filename(Glib::get_tmp_dir(), "arccat.XXXXXX"); int tmp_h = Glib::mkstemp(filename); if (tmp_h == -1) { logger.msg(Arc::INFO, "Could not create temporary file \"%s\"", filename); logger.msg(Arc::ERROR, "Cannot create output of %s for any jobs", resourceName); return 1; } Arc::URL dst("stdio:///"+Arc::tostring(tmp_h)); if (!dst) { logger.msg(Arc::ERROR, "Cannot create output of %s for any jobs", resourceName); logger.msg(Arc::INFO, "Invalid destination URL %s", dst.str()); close(tmp_h); unlink(filename.c_str()); return 1; } Arc::URL stdoutdst("stdio:///stdout"); int retval = 0; for (std::list::const_iterator it = jobs.begin(); it != jobs.end(); ++it) { if (!it->State || (!opt.status.empty() && std::find(opt.status.begin(), opt.status.end(), it->State()) == opt.status.end() && std::find(opt.status.begin(), opt.status.end(), it->State.GetGeneralState()) == opt.status.end())) { continue; } if (it->State == Arc::JobState::DELETED) { logger.msg(Arc::WARNING, "Job deleted: %s", it->JobID); retval = 1; continue; } // The job-log might be available before the job has started (middleware dependent). if (!opt.show_joblog && !it->State.IsFinished() && it->State != Arc::JobState::RUNNING && it->State != Arc::JobState::FINISHING) { logger.msg(Arc::WARNING, "Job has not started yet: %s", it->JobID); retval = 1; continue; } //if ((opt.show_joblog && it->LogDir.empty()) || // (!opt.show_joblog && opt.show_stderr && it->StdErr.empty()) || // (!opt.show_joblog && !opt.show_stderr && it->StdOut.empty())) { // logger.msg(Arc::ERROR, "Cannot determine the %s location: %s", resourceName, it->JobID); // retval = 1; // continue; //} Arc::Job::ResourceType resource; if (opt.show_joblog) { resource = Arc::Job::JOBLOG; } else if (opt.show_stderr) { resource = Arc::Job::STDERR; } else if (!opt.show_file.empty()) { switch((Arc::JobState::StateType)it->State) { case Arc::JobState::ACCEPTED: case Arc::JobState::PREPARING: case Arc::JobState::SUBMITTING: case Arc::JobState::HOLD: resource = Arc::Job::STAGEINDIR; break; case Arc::JobState::QUEUING: case Arc::JobState::RUNNING: case Arc::JobState::OTHER: default: resource = Arc::Job::SESSIONDIR; break; case Arc::JobState::FINISHING: case Arc::JobState::FINISHED: case Arc::JobState::FAILED: case Arc::JobState::KILLED: resource = Arc::Job::STAGEOUTDIR; break; } } else { resource = Arc::Job::STDOUT; } Arc::URL src; if(!it->GetURLToResource(resource, src)) { logger.msg(Arc::ERROR, "Cannot determine the %s location: %s", resourceName, it->JobID); retval = 1; continue; } if (!src) { logger.msg(Arc::ERROR, "Cannot create output of %s for job (%s): Invalid source %s", resourceName, it->JobID, src.str()); retval = 1; continue; } if (!opt.show_file.empty()) { src.ChangePath(src.Path()+"/"+opt.show_file); } if (!it->CopyJobFile(usercfg, src, dst)) { retval = 1; continue; } logger.msg(Arc::VERBOSE, "Catting %s for job %s", resourceName, it->JobID); // Use File DMC in order to handle proper writing to stdout (e.g. supporting redirection and piping from shell). if (!it->CopyJobFile(usercfg, dst, stdoutdst)) { retval = 1; continue; } } close(tmp_h); unlink(filename.c_str()); return retval; } nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arcsync.1.in0000644000000000000000000000013214152153376023401 xustar000000000000000030 mtime=1638455038.338644968 30 atime=1638455038.473646996 30 ctime=1638455101.082587724 nordugrid-arc-6.14.0/src/clients/compute/arcsync.1.in0000644000175000002070000001055414152153376023373 0ustar00mockbuildmock00000000000000.\" -*- nroff -*- .TH ARCSYNC 1 "@DATE@" "NorduGrid ARC @VERSION@" "NorduGrid Users Manual" .SH NAME arcsync \- ARC Synchronize .SH DESCRIPTION The \fBarcsync\fR command synchronizes your local jobs list with the information at a given cluster or index server. .SH SYNOPSIS .B arcsync [options] .SH OPTIONS .IP "\fB-c\fR, \fB--cluster\fR=\fIname\fR" select one or more computing elements: \fIname\fR can be an alias for a single CE, a group of CEs or a URL .IP "\fB-g\fR, \fB--index\fR=\fIname\fR" select one or more registries: \fIname\fR can be an alias for a single registry, a group of registries or a URL .IP "\fB-j\fR, \fB--joblist\fR=\fIfilename\fR" the file storing information about active jobs (default ~/.arc/jobs.xml) .IP "\fB-f\fR, \fB--force\fR" don't ask for verification .IP "\fB-T\fR, \fB--truncate\fR" shrinks the joblist before synchronizing .IP "\fB-C\fR, \fB--convert\fR" instead of pulling jobs information from server read it from jobs storage and write it back. This option is mostly for converting format of jobs storage file. .IP "\fB-P\fR, \fB--listplugins\fR" list the available plugins .IP "\fB-t\fR, \fB--timeout\fR=\fIseconds\fR" timeout in seconds (default 20) .IP "\fB-z\fR, \fB--conffile\fR=\fIfilename\fR" configuration file (default ~/.arc/client.conf) .IP "\fB-d\fR, \fB--debug\fR=\fIdebuglevel\fR" FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG .IP "\fB-v\fR, \fB--version\fR" print version information .IP "\fB-?\fR, \fB--help\fR" print help .LP .LP .SH EXTENDED DESCRIPTION The ARC user interface keeps a local list of active jobs in the users home directory (see \fBarcsub\fR(1)). If this file is lost, or the user wants to recreate the file on a different computer,the \fBarcsync\fR command can be used to recreate the file from the information available the specified cluster or index server. Since the information about a job retrieved from a cluster can be slightly out of date if the user very recently submitted or removed a job a warning is issued when this command is run. The \fB--force\fR option disables this warning. The .B --cluster options can be used to select or reject clusters that should be considered in the synchronization. This options can be repeated several times. See .BR arcsub (1) for a discussion of the format of arguments to this option. If the joblist is not empty when invoking synchronization the old jobs will be merged with the new jobs, unless the .B --truncate option is given, in which case the joblist will first be cleaned of old jobs and then the new jobs will be added. .SH FILES .TP .B ~/.arc/client.conf Some options can be given default values by specifying them in the ARC client configuration file. By using the .B --conffile option a different configuration file can be used than the default. .TP .B ~/.arc/jobs.xml This is a local list of the user's active jobs. When a job is successfully submitted it is added to this list and when it is removed from the remote cluster it is removed from this list. This list is used as the list of all active jobs when the user specifies the .B --all option to the various NorduGrid ARC user interface commands. By using the .B --joblist option a different file can be used than the default. .SH ENVIRONMENT VARIABLES .TP .B X509_USER_PROXY The location of the user's Grid proxy file. Shouldn't be set unless the proxy is in a non-standard location. .TP .B ARC_LOCATION The location where ARC is installed can be specified by this variable. If not specified the install location will be determined from the path to the command being executed, and if this fails a WARNING will be given stating the location which will be used. .TP .B ARC_PLUGIN_PATH The location of ARC plugins can be specified by this variable. Multiple locations can be specified by separating them by : (; in Windows). The default location is \fB$ARC_LOCATION\fR/lib/arc (\\ in Windows). .SH COPYRIGHT APACHE LICENSE Version 2.0 .SH AUTHOR ARC software is developed by the NorduGrid Collaboration (http://www.nordugrid.org), please consult the AUTHORS file distributed with ARC. Please report bugs and feature requests to http://bugzilla.nordugrid.org .SH SEE ALSO .BR arccat (1), .BR arcclean (1), .BR arccp (1), .BR arcget (1), .BR arcinfo (1), .BR arckill (1), .BR arcls (1), .BR arcmkdir (1), .BR arcproxy (1), .BR arcrenew (1), .BR arcresub (1), .BR arcresume (1), .BR arcrm (1), .BR arcstat (1), .BR arcsub (1), .BR arctest (1) nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arcresume.cpp0000644000000000000000000000013214152153376023742 xustar000000000000000030 mtime=1638455038.338644968 30 atime=1638455038.473646996 30 ctime=1638455101.096587934 nordugrid-arc-6.14.0/src/clients/compute/arcresume.cpp0000644000175000002070000001013514152153376023727 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include "utils.h" int RUNMAIN(arcresume)(int argc, char **argv) { setlocale(LC_ALL, ""); Arc::Logger logger(Arc::Logger::getRootLogger(), "arcresume"); Arc::LogStream logcerr(std::cerr); logcerr.setFormat(Arc::ShortFormat); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::WARNING); Arc::ArcLocation::Init(argv[0]); ClientOptions opt(ClientOptions::CO_RESUME, istring("[job ...]")); std::list jobidentifiers = opt.Parse(argc, argv); if (opt.showversion) { std::cout << Arc::IString("%s version %s", "arcresume", VERSION) << std::endl; return 0; } // If debug is specified as argument, it should be set before loading the configuration. if (!opt.debug.empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(opt.debug)); logger.msg(Arc::VERBOSE, "Running command: %s", opt.GetCommandWithArguments()); Arc::UserConfig usercfg(opt.conffile, opt.joblist); if (!usercfg) { logger.msg(Arc::ERROR, "Failed configuration initialization"); return 1; } if(usercfg.OToken().empty()) { if (!checkproxy(usercfg)) { return 1; } } if (opt.debug.empty() && !usercfg.Verbosity().empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(usercfg.Verbosity())); if (opt.show_plugins) { std::list types; types.push_back("HED:JobControllerPlugin"); showplugins("arcresume", types, logger); return 0; } for (std::list::const_iterator it = opt.jobidinfiles.begin(); it != opt.jobidinfiles.end(); ++it) { if (!Arc::Job::ReadJobIDsFromFile(*it, jobidentifiers)) { logger.msg(Arc::WARNING, "Cannot read specified jobid file: %s", *it); } } if (opt.timeout > 0) usercfg.Timeout(opt.timeout); if ((!opt.joblist.empty() || !opt.status.empty()) && jobidentifiers.empty() && opt.clusters.empty()) opt.all = true; if (jobidentifiers.empty() && opt.clusters.empty() && !opt.all) { logger.msg(Arc::ERROR, "No jobs given"); return 1; } std::list selectedURLs; if (!opt.clusters.empty()) { selectedURLs = getSelectedURLsFromUserConfigAndCommandLine(usercfg, opt.clusters); } std::list rejectManagementURLs = getRejectDiscoveryURLsFromUserConfigAndCommandLine(usercfg, opt.rejectdiscovery); std::list jobs; Arc::JobInformationStorage *jobstore = createJobInformationStorage(usercfg); if (jobstore != NULL && !jobstore->IsStorageExisting()) { logger.msg(Arc::ERROR, "Job list file (%s) doesn't exist", usercfg.JobListFile()); delete jobstore; return 1; } if (jobstore == NULL || ( opt.all && !jobstore->ReadAll(jobs, rejectManagementURLs)) || (!opt.all && !jobstore->Read(jobs, jobidentifiers, selectedURLs, rejectManagementURLs))) { logger.msg(Arc::ERROR, "Unable to read job information from file (%s)", usercfg.JobListFile()); delete jobstore; return 1; } delete jobstore; if (!opt.all) { for (std::list::const_iterator itJIDAndName = jobidentifiers.begin(); itJIDAndName != jobidentifiers.end(); ++itJIDAndName) { std::cout << Arc::IString("Warning: Job not found in job list: %s", *itJIDAndName) << std::endl; } } Arc::JobSupervisor jobmaster(usercfg, jobs); jobmaster.Update(); jobmaster.SelectValid(); if (!opt.status.empty()) { jobmaster.SelectByStatus(opt.status); } if (jobmaster.GetSelectedJobs().empty()) { std::cout << Arc::IString("No jobs") << std::endl; return 1; } int retval = (int)!jobmaster.Resume(); std::cout << Arc::IString("Jobs processed: %d, resumed: %d", jobmaster.GetIDsProcessed().size()+jobmaster.GetIDsNotProcessed().size(), jobmaster.GetIDsProcessed().size()) << std::endl; return retval; } nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arcstat.cpp0000644000000000000000000000013114152153376023414 xustar000000000000000030 mtime=1638455038.338644968 30 atime=1638455038.473646996 29 ctime=1638455101.09758795 nordugrid-arc-6.14.0/src/clients/compute/arcstat.cpp0000644000175000002070000001470714152153376023413 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include "utils.h" int RUNMAIN(arcstat)(int argc, char **argv) { setlocale(LC_ALL, ""); Arc::Logger logger(Arc::Logger::getRootLogger(), "arcstat"); Arc::LogStream logcerr(std::cerr); logcerr.setFormat(Arc::ShortFormat); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::WARNING); Arc::ArcLocation::Init(argv[0]); ClientOptions opt(ClientOptions::CO_STAT, istring("[job ...]"), istring("The arcstat command is used for " "obtaining the status of jobs that have\n" "been submitted to Grid enabled resources.")); std::list jobidentifiers = opt.Parse(argc, argv); if (opt.showversion) { std::cout << Arc::IString("%s version %s", "arcstat", VERSION) << std::endl; return 0; } // If debug is specified as argument, it should be set before loading the configuration. if (!opt.debug.empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(opt.debug)); logger.msg(Arc::VERBOSE, "Running command: %s", opt.GetCommandWithArguments()); if (opt.show_plugins) { std::list types; types.push_back("HED:JobControllerPlugin"); showplugins("arcstat", types, logger); return 0; } Arc::UserConfig usercfg(opt.conffile, opt.joblist); if (!usercfg) { logger.msg(Arc::ERROR, "Failed configuration initialization"); return 1; } if (opt.debug.empty() && !usercfg.Verbosity().empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(usercfg.Verbosity())); for (std::list::const_iterator it = opt.jobidinfiles.begin(); it != opt.jobidinfiles.end(); ++it) { if (!Arc::Job::ReadJobIDsFromFile(*it, jobidentifiers)) { logger.msg(Arc::WARNING, "Cannot read specified jobid file: %s", *it); } } if (opt.timeout > 0) usercfg.Timeout(opt.timeout); if (!opt.sort.empty() && !opt.rsort.empty()) { logger.msg(Arc::ERROR, "The 'sort' and 'rsort' flags cannot be specified at the same time."); return 1; } if (!opt.rsort.empty()) { opt.sort = opt.rsort; } typedef bool (*JobSorting)(const Arc::Job&, const Arc::Job&); std::map orderings; orderings["jobid"] = &Arc::Job::CompareJobID; orderings["submissiontime"] = &Arc::Job::CompareSubmissionTime; orderings["jobname"] = &Arc::Job::CompareJobName; if (!opt.sort.empty() && orderings.find(opt.sort) == orderings.end()) { std::cerr << "Jobs cannot be sorted by \"" << opt.sort << "\", the following orderings are supported:" << std::endl; for (std::map::const_iterator it = orderings.begin(); it != orderings.end(); ++it) std::cerr << it->first << std::endl; return 1; } if ((!opt.joblist.empty() || !opt.status.empty()) && jobidentifiers.empty() && opt.clusters.empty()) opt.all = true; if (jobidentifiers.empty() && opt.clusters.empty() && !opt.all) { logger.msg(Arc::ERROR, "No jobs given"); return 1; } std::list selectedURLs; if (!opt.clusters.empty()) { selectedURLs = getSelectedURLsFromUserConfigAndCommandLine(usercfg, opt.clusters); } std::list rejectManagementURLs = getRejectManagementURLsFromUserConfigAndCommandLine(usercfg, opt.rejectmanagement); std::list jobs; Arc::JobInformationStorage *jobstore = createJobInformationStorage(usercfg); if (jobstore != NULL && !jobstore->IsStorageExisting()) { logger.msg(Arc::ERROR, "Job list file (%s) doesn't exist", usercfg.JobListFile()); delete jobstore; return 1; } if (jobstore == NULL || ( opt.all && !jobstore->ReadAll(jobs, rejectManagementURLs)) || (!opt.all && !jobstore->Read(jobs, jobidentifiers, selectedURLs, rejectManagementURLs))) { logger.msg(Arc::ERROR, "Unable to read job information from file (%s)", usercfg.JobListFile()); delete jobstore; return 1; } delete jobstore; if (!opt.all) { for (std::list::const_iterator itJIDAndName = jobidentifiers.begin(); itJIDAndName != jobidentifiers.end(); ++itJIDAndName) { std::cout << Arc::IString("Warning: Job not found in job list: %s", *itJIDAndName) << std::endl; } } Arc::JobSupervisor jobmaster(usercfg, jobs); jobmaster.Update(); unsigned int queried_num = jobmaster.GetAllJobs().size(); if (!opt.status.empty()) { jobmaster.SelectByStatus(opt.status); } if (!opt.show_unavailable) { jobmaster.SelectValid(); } jobs = jobmaster.GetSelectedJobs(); if (queried_num == 0) { std::cout << Arc::IString("No jobs found, try later") << std::endl; return 1; } std::vector jobsSortable(jobs.begin(), jobs.end()); if (!opt.sort.empty()) { opt.rsort.empty() ? std::sort(jobsSortable.begin(), jobsSortable.end(), orderings[opt.sort]) : std::sort(jobsSortable.rbegin(), jobsSortable.rend(), orderings[opt.sort]); } if (!opt.show_json) { for (std::vector::const_iterator it = jobsSortable.begin(); it != jobsSortable.end(); ++it) { // Option 'long' (longlist) takes precedence over option 'print-jobids' (printids) if (opt.longlist || !opt.printids) { it->SaveToStream(std::cout, opt.longlist); } else { std::cout << it->JobID << std::endl; } } } else { std::cout << "\"jobs\": ["; for (std::vector::const_iterator it = jobsSortable.begin(); it != jobsSortable.end(); ++it) { std::cout << (it==jobsSortable.begin()?"":",") << std::endl; if (opt.longlist || !opt.printids) { it->SaveToStreamJSON(std::cout, opt.longlist); } else { std::cout << "\"" << it->JobID << "\""; } } std::cout << std::endl; std::cout << "]" << std::endl; } if (opt.show_unavailable) { jobmaster.SelectValid(); } unsigned int returned_info_num = jobmaster.GetSelectedJobs().size(); if (!opt.show_json) { std::cout << Arc::IString("Status of %d jobs was queried, %d jobs returned information", queried_num, returned_info_num) << std::endl; } return 0; } nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arcget.cpp0000644000000000000000000000013214152153376023221 xustar000000000000000030 mtime=1638455038.337644953 30 atime=1638455038.473646996 30 ctime=1638455101.090587844 nordugrid-arc-6.14.0/src/clients/compute/arcget.cpp0000644000175000002070000001505314152153376023212 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include "utils.h" int RUNMAIN(arcget)(int argc, char **argv) { setlocale(LC_ALL, ""); Arc::Logger logger(Arc::Logger::getRootLogger(), "arcget"); Arc::LogStream logcerr(std::cerr); logcerr.setFormat(Arc::ShortFormat); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::WARNING); Arc::ArcLocation::Init(argv[0]); ClientOptions opt(ClientOptions::CO_GET, istring("[job ...]"), istring("The arcget command is used for " "retrieving the results from a job.")); std::list jobidentifiers = opt.Parse(argc, argv); if (opt.showversion) { std::cout << Arc::IString("%s version %s", "arcget", VERSION) << std::endl; return 0; } // If debug is specified as argument, it should be set before loading the configuration. if (!opt.debug.empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(opt.debug)); logger.msg(Arc::VERBOSE, "Running command: %s", opt.GetCommandWithArguments()); if (opt.show_plugins) { std::list types; types.push_back("HED:JobControllerPlugin"); showplugins("arcget", types, logger); return 0; } Arc::UserConfig usercfg(opt.conffile, opt.joblist); if (!usercfg) { logger.msg(Arc::ERROR, "Failed configuration initialization"); return 1; } if(usercfg.OToken().empty()) { if (!checkproxy(usercfg)) { return 1; } } if (opt.debug.empty() && !usercfg.Verbosity().empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(usercfg.Verbosity())); if (opt.downloaddir.empty()) { if (!usercfg.JobDownloadDirectory().empty()) { opt.downloaddir = usercfg.JobDownloadDirectory(); logger.msg(Arc::INFO, "Job download directory from user configuration file: %s", opt.downloaddir); } else { logger.msg(Arc::INFO, "Job download directory will be created in present working directory."); } } else { logger.msg(Arc::INFO, "Job download directory: %s", opt.downloaddir); } for (std::list::const_iterator it = opt.jobidinfiles.begin(); it != opt.jobidinfiles.end(); ++it) { if (!Arc::Job::ReadJobIDsFromFile(*it, jobidentifiers)) { logger.msg(Arc::WARNING, "Cannot read specified jobid file: %s", *it); } } if (opt.timeout > 0) usercfg.Timeout(opt.timeout); if ((!opt.joblist.empty() || !opt.status.empty()) && jobidentifiers.empty() && opt.clusters.empty()) opt.all = true; if (jobidentifiers.empty() && opt.clusters.empty() && !opt.all) { logger.msg(Arc::ERROR, "No jobs given"); return 1; } std::list selectedURLs; if (!opt.clusters.empty()) { selectedURLs = getSelectedURLsFromUserConfigAndCommandLine(usercfg, opt.clusters); } std::list rejectManagementURLs = getRejectManagementURLsFromUserConfigAndCommandLine(usercfg, opt.rejectmanagement); std::list jobs; Arc::JobInformationStorage *jobstore = createJobInformationStorage(usercfg); if (jobstore != NULL && !jobstore->IsStorageExisting()) { logger.msg(Arc::ERROR, "Job list file (%s) doesn't exist", usercfg.JobListFile()); delete jobstore; return 1; } if (jobstore == NULL || ( opt.all && !jobstore->ReadAll(jobs, rejectManagementURLs)) || (!opt.all && !jobstore->Read(jobs, jobidentifiers, selectedURLs, rejectManagementURLs))) { logger.msg(Arc::ERROR, "Unable to read job information from file (%s)", usercfg.JobListFile()); delete jobstore; return 1; } if (!opt.all) { for (std::list::const_iterator itJIdentifier = jobidentifiers.begin(); itJIdentifier != jobidentifiers.end(); ++itJIdentifier) { std::cout << Arc::IString("Warning: Job not found in job list: %s", *itJIdentifier) << std::endl; } } Arc::JobSupervisor jobmaster(usercfg, jobs); jobmaster.Update(); jobmaster.SelectValid(); if (!opt.status.empty()) { jobmaster.SelectByStatus(opt.status); } if (jobmaster.GetSelectedJobs().empty()) { std::cout << Arc::IString("No jobs") << std::endl; delete jobstore; return 1; } if(!opt.downloaddir.empty()) { Arc::URL dirpath(opt.downloaddir); if(dirpath.Protocol() == "file") { if(!Arc::DirCreate(dirpath.Path(),S_IRWXU,true)) { std::string errstr = Arc::StrError(); logger.msg(Arc::ERROR, "Unable to create directory for storing results (%s) - %s", dirpath.Path(), errstr); return 1; } } } std::list downloaddirectories; int retval = (int)!jobmaster.Retrieve(opt.downloaddir, opt.usejobname, opt.forcedownload, downloaddirectories); for (std::list::const_iterator it = downloaddirectories.begin(); it != downloaddirectories.end(); ++it) { std::cout << Arc::IString("Results stored at: %s", *it) << std::endl; } unsigned int processed_num = jobmaster.GetIDsProcessed().size(); unsigned int retrieved_num = downloaddirectories.size(); unsigned int cleaned_num = 0; if (!opt.keep) { std::list retrieved = jobmaster.GetIDsProcessed(); // No need to clean selection because retrieved is subset of selected jobmaster.SelectByID(retrieved); if(!jobmaster.Clean()) { std::cout << Arc::IString("Warning: Some jobs were not removed from server") << std::endl; std::cout << Arc::IString(" Use arcclean to remove retrieved jobs from job list", usercfg.JobListFile()) << std::endl; retval = 1; } cleaned_num = jobmaster.GetIDsProcessed().size(); if (!jobstore->Remove(jobmaster.GetIDsProcessed())) { std::cout << Arc::IString("Warning: Failed removing jobs from file (%s)", usercfg.JobListFile()) << std::endl; std::cout << Arc::IString(" Use arcclean to remove retrieved jobs from job list", usercfg.JobListFile()) << std::endl; retval = 1; } std::cout << Arc::IString("Jobs processed: %d, successfully retrieved: %d, successfully cleaned: %d", processed_num, retrieved_num, cleaned_num) << std::endl; } else { std::cout << Arc::IString("Jobs processed: %d, successfully retrieved: %d", processed_num, retrieved_num) << std::endl; } delete jobstore; return retval; } nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/submit.cpp0000644000000000000000000000013214152153376023257 xustar000000000000000030 mtime=1638455038.338644968 30 atime=1638455038.473646996 30 ctime=1638455101.095587919 nordugrid-arc-6.14.0/src/clients/compute/submit.cpp0000644000175000002070000004476514152153376023264 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include "submit.h" static Arc::Logger logger(Arc::Logger::getRootLogger(), "submit"); void HandleSubmittedJobs::addEntity(const Arc::Job& j) { std::cout << Arc::IString("Job submitted with jobid: %s", j.JobID) << std::endl; submittedJobs.push_back(j); } void HandleSubmittedJobs::write() const { if (!jobidfile.empty() && !Arc::Job::WriteJobIDsToFile(submittedJobs, jobidfile)) { logger.msg(Arc::WARNING, "Cannot write job IDs to file (%s)", jobidfile); } Arc::JobInformationStorage* jobStore = createJobInformationStorage(uc); if (jobStore == NULL || !jobStore->Write(submittedJobs)) { if (jobStore == NULL) { logger.msg(Arc::WARNING, "Unable to open job list file (%s), unknown format", uc.JobListFile()); } else { logger.msg(Arc::WARNING, "Failed to write job information to database (%s)", uc.JobListFile()); } logger.msg(Arc::WARNING, "To recover missing jobs, run arcsync"); } logger.msg(Arc::DEBUG, "Record about new job successfully added to the database (%s)", uc.JobListFile()); delete jobStore; } void HandleSubmittedJobs::printsummary(const std::list& originalDescriptions, const std::list& notsubmitted) const { if (originalDescriptions.size() > 1) { std::cout << std::endl << Arc::IString("Job submission summary:") << std::endl; std::cout << "-----------------------" << std::endl; std::cout << Arc::IString("%d of %d jobs were submitted", submittedJobs.size(), submittedJobs.size()+notsubmitted.size()) << std::endl; if (!notsubmitted.empty()) { std::cout << std::endl << Arc::IString("The following jobs were not submitted:") << std::endl; int jobnr = 1; for (std::list::const_iterator it = notsubmitted.begin(); it != notsubmitted.end(); ++it) { std::cout << " * " << Arc::IString("Job nr.") << " " << jobnr << ":" << std::endl; (*it)->SaveToStream(std::cout, "userlong"); jobnr++; } } } } int process_submission_status(Arc::SubmissionStatus status, const Arc::UserConfig& usercfg) { if (status.isSet(Arc::SubmissionStatus::BROKER_PLUGIN_NOT_LOADED)) { std::cerr << Arc::IString("ERROR: Unable to load broker %s", usercfg.Broker().first) << std::endl; return 2; } if (status.isSet(Arc::SubmissionStatus::NO_SERVICES)) { std::cerr << Arc::IString("ERROR: Job submission aborted because no resource returned any information") << std::endl; return 2; } if (status.isSet(Arc::SubmissionStatus::DESCRIPTION_NOT_SUBMITTED)) { std::cerr << Arc::IString("ERROR: One or multiple job descriptions was not submitted.") << std::endl; return 1; } return 0; } void check_missing_plugins(Arc::Submitter s, int is_error) { bool gridFTPJobPluginFailed = false; for (std::map::const_iterator it = s.GetEndpointSubmissionStatuses().begin(); it != s.GetEndpointSubmissionStatuses().end(); ++it) { if (it->first.InterfaceName == "org.nordugrid.gridftpjob" && it->second == Arc::EndpointSubmissionStatus::NOPLUGIN) { gridFTPJobPluginFailed = true; } } if (gridFTPJobPluginFailed) { Arc::LogLevel level = (is_error ? Arc::ERROR : Arc::INFO); std::string indent = (is_error ? " " : " "); logger.msg(level, "A computing resource using the GridFTP interface was requested, but\n" "%sthe corresponding plugin could not be loaded. Is the plugin installed?\n" "%sIf not, please install the package 'nordugrid-arc-plugins-globus'.\n" "%sDepending on your type of installation the package name might differ.", indent, indent, indent); } // TODO: What to do when failing to load other plugins. } int legacy_submit(const Arc::UserConfig& usercfg, const std::list& jobdescriptionlist, std::list& services, const std::string& requestedSubmissionInterface, const std::string& jobidfile, bool direct_submission) { HandleSubmittedJobs hsj(jobidfile, usercfg); Arc::Submitter s(usercfg); s.addConsumer(hsj); Arc::SubmissionStatus status; if (!direct_submission) { std::list rsi; if (!requestedSubmissionInterface.empty()) rsi.push_back(requestedSubmissionInterface); status = s.BrokeredSubmit(services, jobdescriptionlist, rsi); } else { if (!requestedSubmissionInterface.empty()) { for (std::list::iterator it = services.begin(); it != services.end();) { // Remove endpoint - it has an unrequested interface name. if (!it->InterfaceName.empty() && it->InterfaceName != requestedSubmissionInterface) { logger.msg(Arc::INFO, "Removing endpoint %s: It has an unrequested interface (%s).", it->URLString, it->InterfaceName); it = services.erase(it); continue; } it->InterfaceName = requestedSubmissionInterface; ++it; } } status = s.Submit(services, jobdescriptionlist); } hsj.write(); int error_check = process_submission_status(status, usercfg); if (error_check == 2) return 1; if (status.isSet(Arc::SubmissionStatus::SUBMITTER_PLUGIN_NOT_LOADED)) check_missing_plugins(s, error_check); hsj.printsummary(jobdescriptionlist, s.GetDescriptionsNotSubmitted()); return error_check; } int dumpjobdescription(const Arc::UserConfig& usercfg, const std::list& jobdescriptionlist, const std::list& services, const std::string& requestedSubmissionInterface) { int retval = 0; std::set preferredInterfaceNames; if (usercfg.InfoInterface().empty()) { preferredInterfaceNames.insert("org.nordugrid.ldapglue2"); } else { preferredInterfaceNames.insert(usercfg.InfoInterface()); } Arc::ComputingServiceUniq csu; Arc::ComputingServiceRetriever csr(usercfg, std::list(), usercfg.RejectDiscoveryURLs(), preferredInterfaceNames); csr.addConsumer(csu); for (std::list::const_iterator it = services.begin(); it != services.end(); ++it) { csr.addEndpoint(*it); } csr.wait(); std::list CEs = csu.getServices(); if (CEs.empty()) { std::cout << Arc::IString("Unable to adapt job description to any resource, no resource information could be obtained.") << std::endl; std::cout << Arc::IString("Original job description is listed below:") << std::endl; for (std::list::const_iterator it = jobdescriptionlist.begin(); it != jobdescriptionlist.end(); ++it) { std::string descOutput; it->UnParse(descOutput, it->GetSourceLanguage()); std::cout << descOutput << std::endl; } return 1; } Arc::Broker broker(usercfg, usercfg.Broker().first); if (!broker.isValid(false)) { logger.msg(Arc::ERROR, "Dumping job description aborted: Unable to load broker %s", usercfg.Broker().first); return 1; } Arc::ExecutionTargetSorter ets(broker, CEs); std::list::const_iterator itJAlt; // Iterator to use for alternative job descriptions. for (std::list::const_iterator itJ = jobdescriptionlist.begin(); itJ != jobdescriptionlist.end(); ++itJ) { const Arc::JobDescription* currentJobDesc = &*itJ; bool descriptionDumped = false; do { Arc::JobDescription jobdescdump(*currentJobDesc); ets.set(jobdescdump); for (ets.reset(); !ets.endOfList(); ets.next()) { if(!requestedSubmissionInterface.empty() && ets->ComputingEndpoint->InterfaceName != requestedSubmissionInterface) continue; if (!jobdescdump.Prepare(*ets)) { logger.msg(Arc::INFO, "Unable to prepare job description according to needs of the target resource (%s).", ets->ComputingEndpoint->URLString); continue; } std::string jobdesclang = "emies:adl"; if (ets->ComputingEndpoint->InterfaceName == "org.nordugrid.gridftpjob") { jobdesclang = "nordugrid:xrsl"; } else if (ets->ComputingEndpoint->InterfaceName == "org.ogf.glue.emies.activitycreation") { jobdesclang = "emies:adl"; } else if (ets->ComputingEndpoint->InterfaceName == "org.nordugrid.internal") { jobdesclang = "emies:adl"; } std::string jobdesc; if (!jobdescdump.UnParse(jobdesc, jobdesclang)) { logger.msg(Arc::INFO, "An error occurred during the generation of job description to be sent to %s", ets->ComputingEndpoint->URLString); continue; } std::cout << Arc::IString("Job description to be sent to %s:", ets->AdminDomain->Name) << std::endl; std::cout << jobdesc << std::endl; descriptionDumped = true; break; } if (!descriptionDumped && itJ->HasAlternatives()) { // Alternative job descriptions. if (currentJobDesc == &*itJ) { itJAlt = itJ->GetAlternatives().begin(); } else { ++itJAlt; } currentJobDesc = &*itJAlt; } } while (!descriptionDumped && itJ->HasAlternatives() && itJAlt != itJ->GetAlternatives().end()); if (ets.endOfList()) { std::cout << Arc::IString("Unable to prepare job description according to needs of the target resource.") << std::endl; retval = 1; } } //end loop over all job descriptions return retval; } bool prepare_submission_endpoint_batches(const Arc::UserConfig& usercfg, const ClientOptions& opt, std::list >& endpoint_batches) { bool info_discovery = true; // Computing element direct targets for (std::list::const_iterator it = opt.computing_elements.begin(); it != opt.computing_elements.end(); ++it) { if (opt.info_types.empty()) { std::list endpoints; // any interfaces can be used: start with discovery if (opt.submit_types.empty()) { Arc::Endpoint service(*it); service.Capability.insert(Arc::Endpoint::GetStringForCapability(Arc::Endpoint::COMPUTINGINFO)); service.RequestedSubmissionInterfaceName = ""; endpoints.push_back(service); } else { // discovery is disabled - submit directly in the defined interface order info_discovery = false; for (std::list::const_iterator sit = opt.submit_types.begin(); sit != opt.submit_types.end(); ++sit) { Arc::Endpoint service(*it); service.Capability.insert(Arc::Endpoint::GetStringForCapability(Arc::Endpoint::JOBSUBMIT)); service.Capability.insert(Arc::Endpoint::GetStringForCapability(Arc::Endpoint::JOBCREATION)); service.InterfaceName = *sit; endpoints.push_back(service); } } endpoint_batches.push_back(endpoints); // add infointerfaces of all defined types when discovery is used } else { for (std::list::const_iterator sit = opt.submit_types.begin(); sit != opt.submit_types.end(); ++sit) { std::list endpoints; for (std::list::const_iterator iit = opt.info_types.begin(); iit != opt.info_types.end(); ++iit) { Arc::Endpoint service(*it); service.Capability.insert(Arc::Endpoint::GetStringForCapability(Arc::Endpoint::COMPUTINGINFO)); service.InterfaceName = *iit; service.RequestedSubmissionInterfaceName = *sit; endpoints.push_back(service); } endpoint_batches.push_back(endpoints); } } } // Query the registries for available endpoints if (!opt.registries.empty()) { Arc::EntityContainer registry_endpoints; // Get all service endpoints regardless of capabilities std::list rejectDiscoveryURLs = getRejectDiscoveryURLsFromUserConfigAndCommandLine(usercfg, opt.rejectdiscovery); std::list capabilityFilter; Arc::ServiceEndpointRetriever ser(usercfg, Arc::EndpointQueryOptions( true, capabilityFilter, rejectDiscoveryURLs)); ser.addConsumer(registry_endpoints); for (std::list::const_iterator it = opt.registries.begin(); it != opt.registries.end(); ++it) { Arc::Endpoint registry(*it); registry.Capability.insert(Arc::Endpoint::GetStringForCapability(Arc::Endpoint::REGISTRY)); ser.addEndpoint(registry); } ser.wait(); // Loop over endpoints returned by registry and match against interface types if ( !opt.info_types.empty() ) { for (std::list::const_iterator sit = opt.submit_types.begin(); sit != opt.submit_types.end(); ++sit) { std::list endpoints; for (Arc::EntityContainer::iterator eit = registry_endpoints.begin(); eit != registry_endpoints.end(); ++eit) { for (std::list::const_iterator iit = opt.info_types.begin(); iit != opt.info_types.end(); ++iit) { if ( eit->InterfaceName == *iit ) { Arc::Endpoint service(*eit); logger.msg(Arc::INFO, "Service endpoint %s (type %s) added to the list for resource discovery", eit->URLString, eit->InterfaceName); service.RequestedSubmissionInterfaceName = *sit; endpoints.push_back(service); } } } if (!endpoints.empty()) { endpoint_batches.push_back(endpoints); } else { logger.msg(Arc::WARNING, "There are no endpoints in registry that match requested info endpoint type"); } } // endpoint types was not requested at all } else if ( opt.submit_types.empty() ) { // try all infodiscovery endpoints but prioritize the interfaces in the following order std::list info_priority; info_priority.push_back("org.ogf.glue.emies.resourceinfo"); info_priority.push_back("org.nordugrid.arcrest"); info_priority.push_back("org.nordugrid.ldapglue2"); info_priority.push_back("org.nordugrid.ldapng"); for (std::list::const_iterator iit = info_priority.begin(); iit != info_priority.end(); ++iit) { std::list endpoints; for (Arc::EntityContainer::iterator eit = registry_endpoints.begin(); eit != registry_endpoints.end(); ++eit) { if ( eit->InterfaceName == *iit ) { Arc::Endpoint service(*eit); service.RequestedSubmissionInterfaceName = ""; endpoints.push_back(service); logger.msg(Arc::INFO, "Service endpoint %s (type %s) added to the list for resource discovery", eit->URLString, eit->InterfaceName); } } if (!endpoints.empty()) endpoint_batches.push_back(endpoints); } // it was requested to disable infodiscovery for targets } else { info_discovery = false; std::list endpoints; for (std::list::const_iterator sit = opt.submit_types.begin(); sit != opt.submit_types.end(); ++sit) { for (Arc::EntityContainer::iterator eit = registry_endpoints.begin(); eit != registry_endpoints.end(); ++eit) { if ( eit->InterfaceName == *sit ) { Arc::Endpoint service(*eit); service.Capability.clear(); service.Capability.insert(Arc::Endpoint::GetStringForCapability(Arc::Endpoint::JOBSUBMIT)); service.Capability.insert(Arc::Endpoint::GetStringForCapability(Arc::Endpoint::JOBCREATION)); service.InterfaceName = *sit; endpoints.push_back(service); logger.msg(Arc::INFO, "Service endpoint %s (type %s) added to the list for direct submission", eit->URLString, eit->InterfaceName); } } } if (!endpoints.empty()) { endpoint_batches.push_back(endpoints); } else { logger.msg(Arc::WARNING, "There are no endpoints in registry that match requested submission endpoint type"); } } } return info_discovery; } int submit_jobs(const Arc::UserConfig& usercfg, const std::list >& endpoint_batches, bool info_discovery, const std::string& jobidfile, const std::list& jobdescriptionlist) { HandleSubmittedJobs hsj(jobidfile, usercfg); Arc::Submitter submitter(usercfg); submitter.addConsumer(hsj); std::list w_jobdescriptionlist(jobdescriptionlist); int error_check = 0; for (std::list >::const_iterator it = endpoint_batches.begin(); it != endpoint_batches.end(); ++it) { Arc::SubmissionStatus status; if (info_discovery) { status = submitter.BrokeredSubmit(*it, w_jobdescriptionlist); } else { status = submitter.Submit(*it, w_jobdescriptionlist); } hsj.write(); error_check = process_submission_status(status, usercfg); if (error_check == 2) return 1; if (submitter.GetDescriptionsNotSubmitted().empty()) break; if (status.isSet(Arc::SubmissionStatus::SUBMITTER_PLUGIN_NOT_LOADED)) check_missing_plugins(submitter, error_check); // remove already submitted jobs from description list std::list failedjd = submitter.GetDescriptionsNotSubmitted(); std::list::iterator itOrig = w_jobdescriptionlist.begin(); while ( itOrig != w_jobdescriptionlist.end() ) { bool is_failedjd = false; for (std::list::const_iterator itFailed = failedjd.begin(); itFailed != failedjd.end(); ++itFailed) { if (&(*itOrig) == *itFailed) { is_failedjd = true; break; } } if (is_failedjd) { ++itOrig; continue; } w_jobdescriptionlist.erase(itOrig++); } } hsj.printsummary(jobdescriptionlist, submitter.GetDescriptionsNotSubmitted()); return error_check; } nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arcrenew.1.in0000644000000000000000000000013214152153376023545 xustar000000000000000030 mtime=1638455038.337644953 30 atime=1638455038.473646996 30 ctime=1638455101.083587739 nordugrid-arc-6.14.0/src/clients/compute/arcrenew.1.in0000644000175000002070000001026714152153376023540 0ustar00mockbuildmock00000000000000.\" -*- nroff -*- .TH ARCRENEW 1 "@DATE@" "NorduGrid ARC @VERSION@" "NorduGrid Users Manual" .SH NAME arcrenew \- ARC Proxy Renewal .SH DESCRIPTION The .B arcrenew command is used for renewing the proxy of jobs that have been submitted to grid enabled resources. .SH SYNOPSIS .B arcrenew [options] [job ...] .SH OPTIONS .IP "\fB-a\fR, \fB--all\fR" all jobs .IP "\fB-j\fR, \fB--joblist\fR=\fIfilename\fR" the file storing information about active jobs (default ~/.arc/jobs.xml) .IP "\fB-i\fR, \fB--jobids-from-file\fR=\fIfilename\fR" a file containing a list of jobIDs .IP "\fB-c\fR, \fB--cluster\fR=\fIname\fR" select one or more computing elements: \fIname\fR can be an alias for a single CE, a group of CEs or a URL .IP "\fB-r\fR, \fB--rejectmanagement\fR=\fIURL\fR" skip jobs which are on a computing element with a given URL .IP "\fB-s\fR, \fB--status\fR=\fIstatusstr\fR" only select jobs whose status is statusstr .IP "\fB-P\fR, \fB--listplugins\fR" list the available plugins .IP "\fB-t\fR, \fB--timeout\fR=\fIseconds\fR" timeout in seconds (default 20) .IP "\fB-z\fR, \fB--conffile\fR=\fIfilename\fR" configuration file (default ~/.arc/client.conf) .IP "\fB-d\fR, \fB--debug\fR=\fIdebuglevel\fR" FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG .IP "\fB-v\fR, \fB--version\fR" print version information .IP "\fB-?\fR, \fB--help\fR" print help .LP .SH ARGUMENTS .IP "\fBjob\fR ..." list of jobids and/or jobnames .LP .SH EXTENDED DESCRIPTION The .B arcrenew command renews the proxy of a job submitted an ARC enabled resource. The job can be referred to either by the jobid that was returned by .BR arcsub (1) at submission time or by its jobname if the job description that was submitted contained a jobname attribute. More than one jobid and/or jobname can be given. If several jobs were submitted with the same jobname the proxies of all those jobs are renewed. If the .B --joblist option is used the list of jobs is read from a file with the specified filename. By specifying the .B --all option, the proxies of all active jobs will be renewed. The .B --cluster option can be used to select or reject jobs at specific clusters. See .BR arcsub (1) for a discussion of the format of arguments to this option. The .B --status option can be used to select jobs in a specific state. These options can be repeated several times. See .BR arstat (1) for possible state values. .SH FILES .TP .B ~/.arc/client.conf Some options can be given default values by specifying them in the ARC client configuration file. By using the .B --conffile option a different configuration file can be used than the default. .TP .B ~/.arc/jobs.xml This a local list of the user's active jobs. When a job is successfully submitted it is added to this list and when it is removed from the remote cluster it is removed from this list. This list is used as the list of all active jobs when the user specifies the .B --all option to the various NorduGrid ARC user interface commands. By using the .B --joblist option a different file can be used than the default. .SH ENVIRONMENT VARIABLES .TP .B X509_USER_PROXY The location of the user's Grid proxy file. Shouldn't be set unless the proxy is in a non-standard location. .TP .B ARC_LOCATION The location where ARC is installed can be specified by this variable. If not specified the install location will be determined from the path to the command being executed, and if this fails a WARNING will be given stating the location which will be used. .TP .B ARC_PLUGIN_PATH The location of ARC plugins can be specified by this variable. Multiple locations can be specified by separating them by : (; in Windows). The default location is \fB$ARC_LOCATION\fR/lib/arc (\\ in Windows). .SH COPYRIGHT APACHE LICENSE Version 2.0 .SH AUTHOR ARC software is developed by the NorduGrid Collaboration (http://www.nordugrid.org), please consult the AUTHORS file distributed with ARC. Please report bugs and feature requests to http://bugzilla.nordugrid.org .SH SEE ALSO .BR arccat (1), .BR arcclean (1), .BR arccp (1), .BR arcget (1), .BR arcinfo (1), .BR arckill (1), .BR arcls (1), .BR arcmkdir (1), .BR arcproxy (1), .BR arcresub (1), .BR arcresume (1), .BR arcrm (1), .BR arcstat (1), .BR arcsub (1), .BR arcsync (1), .BR arctest (1) nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arctest.cpp0000644000000000000000000000013214152153376023421 xustar000000000000000030 mtime=1638455038.338644968 30 atime=1638455038.473646996 30 ctime=1638455101.100587995 nordugrid-arc-6.14.0/src/clients/compute/arctest.cpp0000644000175000002070000003564214152153376023420 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include "utils.h" #include "submit.h" static Arc::Logger logger(Arc::Logger::getRootLogger(), "arcsub"); int test(const Arc::UserConfig& usercfg, Arc::ExecutionTargetSorter& ets, const Arc::JobDescription& testJob, const std::string& jobidfile); int dumpjobdescription_arctest_legacy(const Arc::UserConfig& usercfg, Arc::ExecutionTargetSorter& ets, const Arc::JobDescription& testJob); static bool get_hash_value(const Arc::Credential& c, std::string& hash_str); int RUNMAIN(arctest)(int argc, char **argv) { setlocale(LC_ALL, ""); Arc::LogStream logcerr(std::cerr); logcerr.setFormat(Arc::ShortFormat); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::WARNING); Arc::ArcLocation::Init(argv[0]); ClientOptions opt(ClientOptions::CO_TEST, istring(" "), istring("The arctest command is used for " "testing clusters as resources.")); std::list params = opt.Parse(argc, argv); if (opt.showversion) { std::cout << Arc::IString("%s version %s", "arctest", VERSION) << std::endl; return 0; } if ((opt.testjobid == -1) && (!opt.show_credentials) && (!opt.show_plugins)) { std::cout << Arc::IString("Nothing to do:\n" "you have to either specify a test job id with -J (--job)\n" "or query information about the certificates with -E (--certificate)\n"); return 0; } if ((opt.testjobid == 1) && (!opt.runtime)) { std::cout << Arc::IString("For the 1st test job " "you also have to specify a runtime value with -r (--runtime) option."); return 0; } // If debug is specified as argument, it should be set before loading the configuration. if (!opt.debug.empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(opt.debug)); logger.msg(Arc::VERBOSE, "Running command: %s", opt.GetCommandWithArguments()); Arc::UserConfig usercfg(opt.conffile, opt.joblist); if (!usercfg) { logger.msg(Arc::ERROR, "Failed configuration initialization"); return 1; } if (opt.show_plugins) { std::list types; types.push_back("HED:SubmitterPlugin"); types.push_back("HED:ServiceEndpointRetrieverPlugin"); types.push_back("HED:TargetInformationRetrieverPlugin"); types.push_back("HED:JobDescriptionParserPlugin"); types.push_back("HED:BrokerPlugin"); showplugins("arctest", types, logger, usercfg.Broker().first); return 0; } if (opt.debug.empty() && !usercfg.Verbosity().empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(usercfg.Verbosity())); if (opt.show_credentials) { const Arc::Time now; std::cout << Arc::IString("Certificate information:") << std::endl; std::string certificate_issuer = ""; if (usercfg.CertificatePath().empty()) { std::cout << " " << Arc::IString("No user-certificate found") << std::endl << std::endl; } else { Arc::Credential holder(usercfg.CertificatePath(), "", usercfg.CACertificatesDirectory(), ""); std::cout << " " << Arc::IString("Certificate: %s", usercfg.CertificatePath()) << std::endl; if (!holder.GetDN().empty()) { std::cout << " " << Arc::IString("Subject name: %s", holder.GetDN()) << std::endl; std::cout << " " << Arc::IString("Valid until: %s", (std::string) holder.GetEndTime() ) << std::endl << std::endl; certificate_issuer = holder.GetIssuerName(); } else { std::cout << " " << Arc::IString("Unable to determine certificate information") << std::endl << std::endl; } } std::cout << Arc::IString("Proxy certificate information:") << std::endl; if (usercfg.ProxyPath().empty()) { std::cout << " " << Arc::IString("No proxy found") << std::endl << std::endl; } else { Arc::Credential holder(usercfg.ProxyPath(), "", usercfg.CACertificatesDirectory(), ""); std::cout << " " << Arc::IString("Proxy: %s", usercfg.ProxyPath()) << std::endl; std::cout << " " << Arc::IString("Proxy-subject: %s", holder.GetDN()) << std::endl; if (holder.GetEndTime() < now) { std::cout << " " << Arc::IString("Valid for: Proxy expired") << std::endl << std::endl; } else if (!holder.GetVerification()) { std::cout << " " << Arc::IString("Valid for: Proxy not valid") << std::endl << std::endl; } else { std::cout << " " << Arc::IString("Valid for: %s", (holder.GetEndTime() - now).istr()) << std::endl << std::endl; } } if (!certificate_issuer.empty()) { std::cout << Arc::IString("Certificate issuer: %s", certificate_issuer) << std::endl << std::endl; } bool issuer_certificate_found = false; std::cout << Arc::IString("CA-certificates installed:") << std::endl; Glib::Dir cadir(usercfg.CACertificatesDirectory()); for (Glib::DirIterator it = cadir.begin(); it != cadir.end(); ++it) { std::string cafile = Glib::build_filename(usercfg.CACertificatesDirectory(), *it); // Assume certificates have file ending ".0", ".1" or ".2". Very OpenSSL specific. if (Glib::file_test(cafile, Glib::FILE_TEST_IS_REGULAR) && (*it)[(*it).size()-2] == '.' && ((*it)[(*it).size()-1] == '0' || (*it)[(*it).size()-1] == '1' || (*it)[(*it).size()-1] == '2')) { Arc::Credential cred(cafile, "", "", ""); std::string dn = cred.GetDN(); if (dn.empty()) continue; std::string hash; // Only accept certificates with correct hash. if (!get_hash_value(cred, hash) || hash != (*it).substr(0, (*it).size()-2)) continue; if (dn == certificate_issuer) issuer_certificate_found = true; std::cout << " " << dn << std::endl; } } if (certificate_issuer.empty()) { std::cout << std::endl << Arc::IString("Unable to detect if issuer certificate is installed.") << std::endl; } else if (!issuer_certificate_found) { logger.msg(Arc::WARNING, "Your issuer's certificate is not installed"); } return EXIT_SUCCESS; } if (opt.timeout > 0) usercfg.Timeout(opt.timeout); if (!opt.broker.empty()) usercfg.Broker(opt.broker); Arc::JobDescription testJob; if (!Arc::JobDescription::GetTestJob(opt.testjobid, testJob)) { std::cout << Arc::IString("No test-job, with ID \"%d\"", opt.testjobid) << std::endl; return 1; } if(usercfg.OToken().empty() || jobneedsproxy(testJob)) { if (!checkproxy(usercfg)) { return 1; } } // Set user input variables into job description if (opt.testjobid == 1) { testJob.Application.Executable.Argument.back() = Arc::tostring(opt.runtime); testJob.Resources.TotalCPUTime = (opt.runtime+3)*60; for ( std::map::iterator iter = testJob.OtherAttributes.begin(); iter != testJob.OtherAttributes.end(); ++iter ) { char buffer [iter->second.length()+255]; sprintf(buffer, iter->second.c_str(), opt.runtime, opt.runtime+3); iter->second = (std::string) buffer; } } // ARC6 target selection submission logic (same as arcsub) if ( opt.isARC6TargetSelectionOptions(logger) ) { // arctest only works with single test job in jobdescription list std::list jobdescriptionlist; jobdescriptionlist.push_back(testJob); // canonicalize endpoint types if (!opt.canonicalizeARC6InterfaceTypes(logger)) return 1; // get endpoint batches according to ARC6 target selection logic std::list > endpoint_batches; bool info_discovery = prepare_submission_endpoint_batches(usercfg, opt, endpoint_batches); // add rejectdiscovery if defined if (!opt.rejectdiscovery.empty()) usercfg.AddRejectDiscoveryURLs(opt.rejectdiscovery); // action: dumpjobdescription if (opt.dumpdescription) { if (!info_discovery) { logger.msg(Arc::ERROR,"Cannot adapt job description to the submission target when information discovery is turned off"); return 1; } // dump description only for priority submission interface, no fallbacks std::list services = endpoint_batches.front(); std::string req_sub_iface; if (!opt.submit_types.empty()) req_sub_iface = opt.submit_types.front(); return dumpjobdescription(usercfg, jobdescriptionlist, services, req_sub_iface); } // default action: start submission cycle return submit_jobs(usercfg, endpoint_batches, info_discovery, opt.jobidoutfile, jobdescriptionlist); // legacy code that implements submission logic in arctest } else { Arc::Broker broker(usercfg, testJob, usercfg.Broker().first); if (!broker.isValid()) { logger.msg(Arc::ERROR, "Unable to load broker %s", usercfg.Broker().first); return 1; } logger.msg(Arc::INFO, "Broker %s loaded", usercfg.Broker().first); std::list services = getServicesFromUserConfigAndCommandLine(usercfg, opt.indexurls, opt.clusters, opt.requestedSubmissionInterfaceName, opt.infointerface); std::set preferredInterfaceNames; if (usercfg.InfoInterface().empty()) { preferredInterfaceNames.insert("org.nordugrid.ldapglue2"); } else { preferredInterfaceNames.insert(usercfg.InfoInterface()); } Arc::ExecutionTargetSorter ets(broker); std::list rejectDiscoveryURLs = getRejectDiscoveryURLsFromUserConfigAndCommandLine(usercfg, opt.rejectdiscovery); Arc::ComputingServiceRetriever csr(usercfg, std::list(), rejectDiscoveryURLs, preferredInterfaceNames); csr.addConsumer(ets); for (std::list::const_iterator it = services.begin(); it != services.end(); ++it) { csr.addEndpoint(*it); } csr.wait(); if (csr.empty()) { if (!opt.dumpdescription) { std::cout << Arc::IString("Test aborted because no resource returned any information") << std::endl; } else { std::cout << Arc::IString("Unable to adapt job description to any resource, no resource information could be obtained.") << std::endl; std::cout << Arc::IString("Original job description is listed below:") << std::endl; std::string descOutput; testJob.UnParse(descOutput, testJob.GetSourceLanguage()); std::cout << descOutput << std::endl; } return 1; } if (ets.getMatchingTargets().empty()) { if (!opt.dumpdescription) { std::cout << Arc::IString("ERROR: Test aborted because no suitable resources were found for the test-job") << std::endl; } else { std::cout << Arc::IString("ERROR: Dumping job description aborted because no suitable resources were found for the test-job") << std::endl; } return 1; } if (opt.dumpdescription) { return dumpjobdescription_arctest_legacy(usercfg, ets, testJob); } std::cout << Arc::IString("Submitting test-job %d:", opt.testjobid) << std::endl; std::string testJobXRSL; testJob.UnParse(testJobXRSL, "nordugrid:xrsl"); std::cout << testJobXRSL << std::endl; std::cout << Arc::IString("Client version: nordugrid-arc-%s", VERSION) << std::endl; return test(usercfg, ets, testJob, opt.jobidoutfile); } } void printjobid(const std::string& jobid, const std::string& jobidfile) { if (!jobidfile.empty()) if (!Arc::Job::WriteJobIDToFile(jobid, jobidfile)) logger.msg(Arc::WARNING, "Cannot write jobid (%s) to file (%s)", jobid, jobidfile); std::cout << Arc::IString("Test submitted with jobid: %s", jobid) << std::endl; } int test(const Arc::UserConfig& usercfg, Arc::ExecutionTargetSorter& ets, const Arc::JobDescription& testJob, const std::string& jobidfile) { int retval = 0; std::list jobids; std::list submittedJobs; std::map notsubmitted; submittedJobs.push_back(Arc::Job()); for (ets.reset(); !ets.endOfList(); ets.next()) { if (ets->Submit(usercfg, testJob, submittedJobs.back())) { printjobid(submittedJobs.back().JobID, jobidfile); std::cout << Arc::IString("Computing service: %s", ets->ComputingService->Name) << std::endl; break; } } if (ets.endOfList()) { std::cout << Arc::IString("Test failed, no more possible targets") << std::endl; submittedJobs.pop_back(); retval = 1; } Arc::JobInformationStorage *jobstore = createJobInformationStorage(usercfg); if (jobstore == NULL) { logger.msg(Arc::ERROR, "Unable to read job information from file (%s)", usercfg.JobListFile()); return 1; } if (!jobstore->Write(submittedJobs)) { std::cout << Arc::IString("Warning: Failed to write job information to file (%s)", usercfg.JobListFile()) << std::endl; std::cout << Arc::IString("To recover missing jobs, run arcsync") << std::endl; } delete jobstore; return retval; } int dumpjobdescription_arctest_legacy(const Arc::UserConfig& usercfg, Arc::ExecutionTargetSorter& ets, const Arc::JobDescription& testJob) { for (ets.reset(); !ets.endOfList(); ets.next()) { Arc::JobDescription preparedTestJob(testJob); std::string jobdesc; // Prepare the test jobdescription according to the chosen ExecutionTarget if (!preparedTestJob.Prepare(*ets)) { logger.msg(Arc::INFO, "Unable to prepare job description according to needs of the target resource (%s).", ets->ComputingEndpoint->URLString); continue; } std::string jobdesclang = "emies:adl"; if (ets->ComputingEndpoint->InterfaceName == "org.nordugrid.gridftpjob") { jobdesclang = "nordugrid:xrsl"; } if (!preparedTestJob.UnParse(jobdesc, jobdesclang)) { logger.msg(Arc::INFO, "An error occurred during the generation of job description to be sent to %s", ets->ComputingEndpoint->URLString); continue; } std::cout << Arc::IString("Job description to be sent to %s:", ets->AdminDomain->Name) << std::endl; std::cout << jobdesc << std::endl; break; } return (!ets.endOfList()); } static bool get_hash_value(const Arc::Credential& c, std::string& hash_str) { X509* cert = c.GetCert(); if(!cert) return false; X509_NAME* cert_name = X509_get_subject_name(cert); if(!cert_name) return false; char hash[32]; memset(hash, 0, 32); snprintf(hash, 32, "%08lx", X509_NAME_hash(cert_name)); hash_str = hash; X509_free(cert); return true; } nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arckill.cpp0000644000000000000000000000013214152153376023375 xustar000000000000000030 mtime=1638455038.337644953 30 atime=1638455038.473646996 30 ctime=1638455101.092587874 nordugrid-arc-6.14.0/src/clients/compute/arckill.cpp0000644000175000002070000001252414152153376023366 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include "utils.h" int RUNMAIN(arckill)(int argc, char **argv) { setlocale(LC_ALL, ""); Arc::Logger logger(Arc::Logger::getRootLogger(), "arckill"); Arc::LogStream logcerr(std::cerr); logcerr.setFormat(Arc::ShortFormat); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::WARNING); Arc::ArcLocation::Init(argv[0]); ClientOptions opt(ClientOptions::CO_KILL, istring("[job ...]"), istring("The arckill command is used to kill " "running jobs.")); std::list jobidentifiers = opt.Parse(argc, argv); if (opt.showversion) { std::cout << Arc::IString("%s version %s", "arckill", VERSION) << std::endl; return 0; } // If debug is specified as argument, it should be set before loading the configuration. if (!opt.debug.empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(opt.debug)); logger.msg(Arc::VERBOSE, "Running command: %s", opt.GetCommandWithArguments()); if (opt.show_plugins) { std::list types; types.push_back("HED:JobControllerPlugin"); showplugins("arckill", types, logger); return 0; } Arc::UserConfig usercfg(opt.conffile, opt.joblist); if (!usercfg) { logger.msg(Arc::ERROR, "Failed configuration initialization"); return 1; } if(usercfg.OToken().empty()) { if (!checkproxy(usercfg)) { return 1; } } if (opt.debug.empty() && !usercfg.Verbosity().empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(usercfg.Verbosity())); for (std::list::const_iterator it = opt.jobidinfiles.begin(); it != opt.jobidinfiles.end(); ++it) { if (!Arc::Job::ReadJobIDsFromFile(*it, jobidentifiers)) { logger.msg(Arc::WARNING, "Cannot read specified jobid file: %s", *it); } } if (opt.timeout > 0) usercfg.Timeout(opt.timeout); if ((!opt.joblist.empty() || !opt.status.empty()) && jobidentifiers.empty() && opt.clusters.empty()) opt.all = true; if (jobidentifiers.empty() && opt.clusters.empty() && !opt.all) { logger.msg(Arc::ERROR, "No jobs given"); return 1; } std::list selectedURLs; if (!opt.clusters.empty()) { selectedURLs = getSelectedURLsFromUserConfigAndCommandLine(usercfg, opt.clusters); } std::list rejectManagementURLs = getRejectManagementURLsFromUserConfigAndCommandLine(usercfg, opt.rejectmanagement); std::list jobs; Arc::JobInformationStorage *jobstore = createJobInformationStorage(usercfg); if (jobstore != NULL && !jobstore->IsStorageExisting()) { logger.msg(Arc::ERROR, "Job list file (%s) doesn't exist", usercfg.JobListFile()); delete jobstore; return 1; } if (jobstore == NULL || ( opt.all && !jobstore->ReadAll(jobs, rejectManagementURLs)) || (!opt.all && !jobstore->Read(jobs, jobidentifiers, selectedURLs, rejectManagementURLs))) { logger.msg(Arc::ERROR, "Unable to read job information from file (%s)", usercfg.JobListFile()); delete jobstore; return 1; } if (!opt.all) { for (std::list::const_iterator itJIDAndName = jobidentifiers.begin(); itJIDAndName != jobidentifiers.end(); ++itJIDAndName) { std::cout << Arc::IString("Warning: Job not found in job list: %s", *itJIDAndName) << std::endl; } } Arc::JobSupervisor jobmaster(usercfg, jobs); jobmaster.Update(); jobmaster.SelectValid(); if (!opt.status.empty()) { jobmaster.SelectByStatus(opt.status); } if (jobmaster.GetSelectedJobs().empty()) { std::cout << Arc::IString("No jobs") << std::endl; delete jobstore; return 1; } int retval = (int)!jobmaster.Cancel(); unsigned int selected_num = jobmaster.GetSelectedJobs().size(); unsigned int canceled_num = jobmaster.GetIDsProcessed().size(); unsigned int cleaned_num = 0; if (!opt.keep) { std::list canceled = jobmaster.GetIDsProcessed(); // No need to clean selection because retrieved is subset of selected jobmaster.SelectByID(canceled); if(!jobmaster.Clean()) { std::cout << Arc::IString("Warning: Some jobs were not removed from server") << std::endl; std::cout << Arc::IString(" Use arcclean to remove retrieved jobs from job list", usercfg.JobListFile()) << std::endl; retval = 1; } cleaned_num = jobmaster.GetIDsProcessed().size(); if (!jobstore->Remove(jobmaster.GetIDsProcessed())) { std::cout << Arc::IString("Warning: Failed removing jobs from file (%s)", usercfg.JobListFile()) << std::endl; std::cout << Arc::IString(" Run 'arcclean -s Undefined' to remove killed jobs from job list") << std::endl; retval = 1; } std::cout << Arc::IString("Jobs processed: %d, successfully killed: %d, successfully cleaned: %d", selected_num, canceled_num, cleaned_num) << std::endl; } else { std::cout << Arc::IString("Jobs processed: %d, successfully killed: %d", selected_num, canceled_num) << std::endl; } delete jobstore; return retval; } nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arcsub.1.in0000644000000000000000000000013214152153376023216 xustar000000000000000030 mtime=1638455038.338644968 30 atime=1638455038.473646996 30 ctime=1638455101.077587649 nordugrid-arc-6.14.0/src/clients/compute/arcsub.1.in0000644000175000002070000003402114152153376023203 0ustar00mockbuildmock00000000000000.\" -*- nroff -*- .TH ARCSUB 1 "@DATE@" "NorduGrid ARC @VERSION@" "NorduGrid Users Manual" .SH NAME arcsub \- ARC Submission .SH DESCRIPTION The .B arcsub command is used for submitting jobs to Grid enabled computing resources. .SH SYNOPSIS .B arcsub [options] [filename ...] .SH OPTIONS .IP "\fB-c\fR, \fB--cluster\fR=\fIname\fR" select one or more computing elements: \fIname\fR can be an alias for a single CE, a group of CEs or a URL .IP "\fB-g\fR, \fB--index\fR=\fIname\fR" select one or more registries: \fIname\fR can be an alias for a single registry, a group of registries or a URL .IP "\fB-R\fR, \fB--rejectdiscovery\fR=\fIURL\fR" skip the service with the given URL during service discovery .IP "\fB-S\fR, \fB--submissioninterface\fR=\fIInterfaceName\fR" only use this interface for submitting (e.g. org.nordugrid.gridftpjob, org.ogf.glue.emies.activitycreation, org.ogf.bes) .IP "\fB-I\fR, \fB--infointerface\fR=\fIInterfaceName\fR" the computing element specified by URL at the command line should be queried using this information interface (possible options: org.nordugrid.ldapng, org.nordugrid.ldapglue2, org.nordugrid.wsrfglue2, org.ogf.glue.emies.resourceinfo) .IP "\fB-e\fR, \fB--jobdescrstring=\fIString" jobdescription string describing the job to be submitted .IP "\fB-f\fR, \fB--jobdescrfile=\fIfilename" jobdescription file describing the job to be submitted .IP "\fB-j\fR, \fB--joblist\fR=\fIfilename\fR" the file storing information about active jobs (default ~/.arc/jobs.xml) .IP "\fB-o\fR, \fB--jobids-to-file\fR=\fIfilename\fR" the IDs of the submitted jobs will be appended to this file .IP "\fB-D\fR, \fB--dryrun\fR" submit jobs as dry run (no submission to batch system) .IP "\fB --direct\fR" submit directly - no resource discovery or matchmaking .IP "\fB-x\fR, \fB--dumpdescription\fR" do not submit - dump job description in the language accepted by the target .IP "\fB-P\fR, \fB--listplugins\fR" list the available plugins .IP "\fB-t\fR, \fB--timeout\fR=\fIseconds\fR" timeout in seconds (default 20) .IP "\fB-z\fR, \fB--conffile\fR=\fIfilename\fR" configuration file (default ~/.arc/client.conf) .IP "\fB-d\fR, \fB--debug\fR=\fIdebuglevel\fR" FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG .IP "\fB-b\fR, \fB--broker\fR=\fIbroker\fR" selected broker: Random (default), FastestQueue or custom. Use -P to find possible options. .IP "\fB-v\fR, \fB--version\fR" print version information .IP "\fB-?\fR, \fB--help\fR" print help .LP .SH ARGUMENTS .IP "\fBfilename\fR ..." job description files describing the jobs to be submitted .LP .SH EXTENDED DESCRIPTION \fBarcsub\fR is the key command when submitting jobs to Grid enabled computing resources with the ARC client. As default \fBarcsub\fR is able to submit jobs to A-REX, CREAM and EMI ES enabled computing elements (CEs), and as always for successful submission you need to be authenticated at the targeted computing services. Since \fBarcsub\fR is build on a modular library, modules can be installed which enables submission to other targets, e.g. the classic ARC CE Grid-Manager. Job submission can be accomplished by specifying a job description file to submit as an argument. \fBarcsub\fR will then by default perform resource discovery on the Grid and then the discovered resources will be matched to the job description and ranked according to the chosen broker (\fB--broker\fR option). If no Grid environment has been configured, please contact your system administrator, or setup one yourself in the client configuration file (see files section). Another option is to explicitly specify a registry service (or multiple) to \fBarcsub\fR using the \fB--index\fR option, which accepts an URL, alias or group. Alternatively a specific CE (or multiple) can be targeted by using the \fB--cluster\fR option. If such a scenario is the most common, it is worthwhile to specify those CEs in the client configuration as default services, which makes it superfluous to specify them as argument. In the same manner aliases and groups, defined in the configuration file, can be utilized, and can be used as argument to the \fB--cluster\fR or \fB--index\fR options. In all of the above scenarios \fBarcsub\fR obtains resource information from the services which is then used for matchmaking against the job description, however that step can be avoided by specifying the \fB--direct\fR option, in which case the job description is submitted directly to first specified endpoint. The format of a classic GRIDFTP-based cluster URLs: .br \fB[ldap://][:2135/nordugrid-cluster-name=,Mds-Vo-name=local,o=grid]\fR .br Only the \fBhostname\fR part has to be specified, the rest of the URL is automatically generated. The format of an A-REX URL is: .br \fB[https://][:][/]\fR .br Here the port is 443 by default, but the path cannot be guessed, so if it is not specified, then the service is assumed to live on the root path. Job descriptions can also be specified using the \fB--jobdescrfile\fR option which expect the file name of the description as argument, or the \fB--jobdescrstring\fR option which expect as argument the job description as a string, and both options can be specified multiple times and one does not exclude the other. The default supported job description languages are xRSL and EMIES ADL. If the job description is successfully submitted a job-ID is returned and printed. This job-ID uniquely identifies the job while it is being executed. On the other hand it is also possible that no CEs matches the constraints defined in the description in which case no submission will be done. Upon successful submission, the job-ID along with more technical job information is stored in the job-list file (described below). The stored information enables the job management commands of the ARC client to manage jobs easily, and thus the job-ID need not to be saved manually. By default the job-list file is stored in the .arc directory in the home directory of the user, however another location can be specified using the \fB--joblist\fR option taking the location of this file as argument. If the \fB--joblist\fR option was used during submission, it should also be specified in the consecutive commands when managing the job. If a Computing Element has multiple job submission interfaces (e.g. gridftp, EMI-ES, BES), then the brokering algorithm will choose one of them. With the \fB--submissioninterface\fR option the requested interface can be specified, and in that case only those Computing Elements will be considered which has that specific interface, and only that interface will be used to submit the jobs. As mentioned above registry or index services can be specified with the \fB--index\fR option. Specifying one or multiple index servers instructs the \fBarcsub\fR command to query the servers for registered CEs, the returned CEs will then be matched against the job description and those matching will be ranked by the chosen broker (see below) and submission will be tried in order until successful or reaching the end. From the returned list of CEs it might happen that a troublesome or undesirable CE is selected for submission, in that case it possible to reject that cluster using the \fB--rejectdiscovery\fR option and providing the URL (or just the hostname) of the CE, which will disregard that CE as a target for submission. When multiple CEs are targeted for submission, the resource broker will be used to filter out CEs which do not match the job description requirements and then rank the remaining CEs. The broker used by default will rank the CEs randomly, however a different broker can be chosen by using the \fB--broker\fR option, which takes the name of the broker as argument. The broker type can also be specified in client.conf. The brokers available can be seen using .B arcsub -P. By default the following brokers are available: .IP "Random (default)" Chooses a random CE matching the job requirements. .IP "FastestQueue" Ranks matching CEs according to the length of the job queue at the CEs, ranking those with shortest queue first/highest. .IP "Benchmark" Ranks matching CEs according to a specified benchmark, which should be specified by appending the broker name with ':' and then the name of the benchmark. If no option is given to the Benchmark broker then CEs will be ranked according to the 'specint2000' benchmark. .IP "Data" Ranks matching CEs according to the amount of input data cached by each CE, by querying the CE. Only CEs with the A-REX BES interface support this operation. .IP "Null" Choose a random CE with no filtering at all of CEs. .IP "PythonBroker" User-defined custom brokers can be created in Python. See the example broker SampleBroker.py or ACIXBroker.py (like Data broker but uses the ARC Cache Index) that come installed with ARC for more details of how to write your own broker. A PythonBroker is specified by \fB--broker PythonBroker:Filename.Class:args\fR, where Filename is the file containing the class Class which implements the broker interface. The directory containing this file must be in the PYTHONPATH. args is optional and allows specifying arguments to the broker. .P Before submission, \fBarcsub\fR performs an intelligent modification of the job description (adding or modifying attributes, even converting the description language to fit the needs of the CE) ensuring that it is valid. The modified job description can be printed by specifying the \fB--dumpdescription\fR option. The format, i.e. job description language, of the printed job description cannot be specified, and will be that which will be sent to and accepted by the chosen target. Further information from \fBarcsub\fR can be obtained by increasing the verbosity, which is done with the \fB--debug\fR option where the default verbosity level is WARNING. Setting the level to DEBUG will show all messages, while setting it to FATAL will only show fatal log messages. To \fBvalidate\fR your job description without actually submitting a job, use the \fB--dryrun\fR option: it will capture possible syntax or other errors, but will instruct the site not to submit the job for execution. Only the grid-manager (ARC0) and A-REX (ARC1) CEs support this feature. .SH EXAMPLES Submission of a job description file "helloworld.adl" to the Grid .br \fBarcsub helloworld.adl\fR A information index server (registry) can also be queried for CEs to submit to: .br \fBarcsub -g registry.example.com helloworld.adl\fR Submission of a job description file "helloworld.adl" to ce.example.com: .br \fBarcsub -c ce.example.com helloworld.adl\fR Direct submission to a CE is done as: .br \fBarcsub --direct -c cd.example.com helloworld.adl\fR The job description can also be specified directly on the command line as shown in the example, using the XRSL job description language: .br \fBarcsub -c example.com/arex -e \\ .br \'&(executable="/bin/echo")(arguments="Hello World!")\'\fR When submitting against CEs retrieved from information index servers it might be useful to do resource brokering: .br \fBarcsub -g registry.example.com -b FastestQueue helloworld.adl\fR If the job has a large input data set, it can be useful to send it to a CE where those files are already cached. The ACIX broker can be used for this: .br \fBarcsub -g registry.example.com -b PythonBroker:ACIXBroker.ACIXBroker:https://cacheindex.ndgf.org:6443/data/index helloworld.adl\fR Disregarding a specific CE for submission submitting against an information index server: .br \fBarcsub -g registry.example.com -R badcomputingelement.com/arex helloworld.adl\fR Dumping the job description is done as follows: .br \fBarcsub -c example.com/arex -x helloworld.adl\fR .SH FILES .TP .B ~/.arc/client.conf Some options can be given default values by specifying them in the ARC client configuration file. Registry and computing element services can be specified in separate sections of the config. The default services can be specified by adding 'default=yes' attribute to the section of the service, thus when no \fB--cluster\fR or \fB--index\fR options are given these will be used for submission. Each service has an alias, and can be member of any number of groups. Then specifying the alias or the name of the group with the \fB--cluster\fR or \fB--index\fR options will select the given services. By using the \fB--conffile\fR option a different configuration file can be used than the default. Note that some installations also have a system client configuration file, however attributes in the client one takes precedence, and then command line options takes precedence over configuration file attributes. .TP .B ~/.arc/jobs.xml This a local list of the user's active jobs. When a job is successfully submitted it is added to this list and when it is removed from the remote cluster it is removed from this list. This list is used as the list of all active jobs when the user specifies the \fB--all\fR option to the various NorduGrid ARC user interface commands. By using the \fB--joblist\fR option a different file can be used than the default. .SH ENVIRONMENT VARIABLES .TP .B X509_USER_PROXY The location of the user's Grid proxy file. Shouldn't be set unless the proxy is in a non-standard location. .TP .B ARC_LOCATION The location where ARC is installed can be specified by this variable. If not specified the install location will be determined from the path to the command being executed, and if this fails a WARNING will be given stating the location which will be used. .TP .B ARC_PLUGIN_PATH The location of ARC plugins can be specified by this variable. Multiple locations can be specified by separating them by : (; in Windows). The default location is \fB$ARC_LOCATION\fR/lib/arc (\\ in Windows). .SH COPYRIGHT APACHE LICENSE Version 2.0 .SH AUTHOR ARC software is developed by the NorduGrid Collaboration (http://www.nordugrid.org), please consult the AUTHORS file distributed with ARC. Please report bugs and feature requests to http://bugzilla.nordugrid.org .SH SEE ALSO .BR arccat (1), .BR arcclean (1), .BR arccp (1), .BR arcget (1), .BR arcinfo (1), .BR arckill (1), .BR arcls (1), .BR arcmkdir (1), .BR arcproxy (1), .BR arcrenew (1), .BR arcresub (1), .BR arcresume (1), .BR arcrm (1), .BR arcstat (1), .BR arcsync (1), .BR arctest (1) nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arcsync.cpp0000644000000000000000000000013114152153376023415 xustar000000000000000030 mtime=1638455038.338644968 30 atime=1638455038.473646996 29 ctime=1638455101.09958798 nordugrid-arc-6.14.0/src/clients/compute/arcsync.cpp0000644000175000002070000002202114152153376023400 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include "utils.h" class JobSynchronizer : public Arc::EntityConsumer { public: JobSynchronizer( const Arc::UserConfig& uc, const std::list& services, const std::list& rejectedServices = std::list(), const std::set& preferredInterfaceNames = std::set(), const std::list& capabilityFilter = std::list(1, Arc::Endpoint::GetStringForCapability(Arc::Endpoint::COMPUTINGINFO)) ) : uc(uc), ser(uc, Arc::EndpointQueryOptions(true, capabilityFilter, rejectedServices)), jlr(uc, Arc::EndpointQueryOptions(preferredInterfaceNames)) { jlr.needAllResults(); ser.addConsumer(*this); jlr.addConsumer(jobs); for (std::list::const_iterator it = services.begin(); it != services.end(); ++it) { if (it->HasCapability(Arc::Endpoint::REGISTRY)) { ser.addEndpoint(*it); } else { jlr.addEndpoint(*it); } } } void wait() { ser.wait(); jlr.wait(); } void addEntity(const Arc::Endpoint& service) { if (service.HasCapability(Arc::Endpoint::COMPUTINGINFO)) { jlr.addEndpoint(service); } } bool writeJobs(bool truncate) { bool jobsWritten = false; bool jobsReported = false; Arc::JobInformationStorage *jobstore = createJobInformationStorage(uc); if (jobstore == NULL) { std::cerr << Arc::IString("Warning: Unable to open job list file (%s), unknown format", uc.JobListFile()) << std::endl; return false; } // Write extracted job info to joblist if (truncate) { jobstore->Clean(); if ( (jobsWritten = jobstore->Write(jobs)) ) { for (std::list::const_iterator it = jobs.begin(); it != jobs.end(); ++it) { if (!jobsReported) { std::cout << Arc::IString("Found the following jobs:")<Name.empty()) { std::cout << it->Name << " (" << it->JobID << ")" << std::endl; } else { std::cout << it->JobID << std::endl; } } std::cout << Arc::IString("Total number of jobs found: ") << jobs.size() << std::endl; } } else { std::list newJobs; std::set prunedServices; jlr.getServicesWithStatus(Arc::EndpointQueryingStatus::SUCCESSFUL, prunedServices); if ( (jobsWritten = jobstore->Write(jobs, prunedServices, newJobs)) ) { for (std::list::const_iterator it = newJobs.begin(); it != newJobs.end(); ++it) { if (!jobsReported) { std::cout << Arc::IString("Found the following new jobs:")<Name.empty()) { std::cout << (*it)->Name << " (" << (*it)->JobID << ")" << std::endl; } else { std::cout << (*it)->JobID << std::endl; } } std::cout << Arc::IString("Total number of new jobs found: ") << newJobs.size() << std::endl; } } delete jobstore; if (!jobsWritten) { std::cout << Arc::IString("ERROR: Failed to write job information to file (%s)", uc.JobListFile()) << std::endl; return false; } return true; } private: const Arc::UserConfig& uc; Arc::ServiceEndpointRetriever ser; Arc::JobListRetriever jlr; Arc::EntityContainer jobs; }; int RUNMAIN(arcsync)(int argc, char **argv) { setlocale(LC_ALL, ""); Arc::Logger logger(Arc::Logger::getRootLogger(), "arcsync"); Arc::LogStream logcerr(std::cerr); logcerr.setFormat(Arc::ShortFormat); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::WARNING); Arc::ArcLocation::Init(argv[0]); ClientOptions opt(ClientOptions::CO_SYNC, " ", istring("The arcsync command synchronizes your " "local job list with the information at\n" "the given resources or index servers.")); std::list params = opt.Parse(argc, argv); if (opt.showversion) { std::cout << Arc::IString("%s version %s", "arcsync", VERSION) << std::endl; return 0; } // If debug is specified as argument, it should be set before loading the configuration. if (!opt.debug.empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(opt.debug)); logger.msg(Arc::VERBOSE, "Running command: %s", opt.GetCommandWithArguments()); if (opt.show_plugins) { std::list types; types.push_back("HED:JobListRetrieverPlugin"); showplugins("arcsync", types, logger); return 0; } Arc::UserConfig usercfg(opt.conffile, opt.joblist); if (!usercfg) { logger.msg(Arc::ERROR, "Failed configuration initialization"); return 1; } if (opt.convert) { Arc::JobInformationStorage *jobstore = createJobInformationStorage(usercfg); if (jobstore == NULL) { std::cerr << Arc::IString("Warning: Unable to open job list file (%s), unknown format", usercfg.JobListFile()) << std::endl; return 1; } // Read current jobs std::list jobs; if (!jobstore->ReadAll(jobs)) { std::cerr << Arc::IString("Warning: Unable to read local list of jobs from file (%s)", usercfg.JobListFile()) << std::endl; return 1; } // Delete existing database so new on is created with specified format if (!jobstore->Clean()) { std::cerr << Arc::IString("Warning: Unable to truncate local list of jobs in file (%s)", usercfg.JobListFile()) << std::endl; return 1; } delete jobstore; jobstore = createJobInformationStorage(usercfg); if (jobstore == NULL) { std::cerr << Arc::IString("Warning: Unable to create job list file (%s), jobs list is destroyed", usercfg.JobListFile()) << std::endl; return 1; } if (!jobstore->Write(jobs)) { std::cerr << Arc::IString("Warning: Failed to write local list of jobs into file (%s), jobs list is destroyed", usercfg.JobListFile()) << std::endl; return 1; } return 0; } if (opt.timeout > 0) usercfg.Timeout(opt.timeout); if(usercfg.OToken().empty()) { if (!checkproxy(usercfg)) { return 1; } } if (opt.debug.empty() && !usercfg.Verbosity().empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(usercfg.Verbosity())); //sanity check if (!opt.forcesync) { std::cout << Arc::IString("Synchronizing the local list of active jobs with the information in the\n" "information system can result in some inconsistencies. Very recently submitted\n" "jobs might not yet be present, whereas jobs very recently scheduled for\n" "deletion can still be present." ) << std::endl; std::cout << Arc::IString("Are you sure you want to synchronize your local job list?") << " [" << Arc::IString("y") << "/" << Arc::IString("n") << "] "; std::string response; std::cin >> response; if (Arc::lower(response) != std::string(Arc::FindTrans("y"))) { std::cout << Arc::IString("Cancelling synchronization request") << std::endl; return 0; } } // legacy options => new options for (std::list::const_iterator it = opt.clusters.begin(); it != opt.clusters.end(); ++it) { opt.computing_elements.push_back(*it); } for (std::list::const_iterator it = opt.indexurls.begin(); it != opt.indexurls.end(); ++it) { opt.registries.push_back(*it); } std::list endpoints = getServicesFromUserConfigAndCommandLine(usercfg, opt.registries, opt.computing_elements); std::list rejectDiscoveryURLs = getRejectDiscoveryURLsFromUserConfigAndCommandLine(usercfg, opt.rejectdiscovery); if (endpoints.empty()) { logger.msg(Arc::ERROR, "No services specified. Please configure default services in the client configuration, " "or specify a cluster or index (-c or -g options, see arcsync -h)."); return 1; } std::set preferredInterfaceNames; if (usercfg.InfoInterface().empty()) { preferredInterfaceNames.insert("org.nordugrid.ldapglue2"); } else { preferredInterfaceNames.insert(usercfg.InfoInterface()); } JobSynchronizer js(usercfg, endpoints, rejectDiscoveryURLs, preferredInterfaceNames); js.wait(); return js.writeJobs(opt.truncate)?0:1; // true -> 0, false -> 1. } nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arcrenew.cpp0000644000000000000000000000013214152153376023562 xustar000000000000000030 mtime=1638455038.337644953 30 atime=1638455038.473646996 30 ctime=1638455101.093587889 nordugrid-arc-6.14.0/src/clients/compute/arcrenew.cpp0000644000175000002070000001005614152153376023551 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include "utils.h" int RUNMAIN(arcrenew)(int argc, char **argv) { setlocale(LC_ALL, ""); Arc::Logger logger(Arc::Logger::getRootLogger(), "arcrenew"); Arc::LogStream logcerr(std::cerr); logcerr.setFormat(Arc::ShortFormat); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::WARNING); Arc::ArcLocation::Init(argv[0]); ClientOptions opt(ClientOptions::CO_RENEW, istring("[job ...]")); std::list jobidentifiers = opt.Parse(argc, argv); if (opt.showversion) { std::cout << Arc::IString("%s version %s", "arcrenew", VERSION) << std::endl; return 0; } // If debug is specified as argument, it should be set before loading the configuration. if (!opt.debug.empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(opt.debug)); logger.msg(Arc::VERBOSE, "Running command: %s", opt.GetCommandWithArguments()); if (opt.show_plugins) { std::list types; types.push_back("HED:JobControllerPlugin"); showplugins("arcrenew", types, logger); return 0; } Arc::UserConfig usercfg(opt.conffile, opt.joblist); if (!usercfg) { logger.msg(Arc::ERROR, "Failed configuration initialization"); return 1; } if (!checkproxy(usercfg)) { return 1; } if (opt.debug.empty() && !usercfg.Verbosity().empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(usercfg.Verbosity())); for (std::list::const_iterator it = opt.jobidinfiles.begin(); it != opt.jobidinfiles.end(); ++it) { if (!Arc::Job::ReadJobIDsFromFile(*it, jobidentifiers)) { logger.msg(Arc::WARNING, "Cannot read specified jobid file: %s", *it); } } if (opt.timeout > 0) usercfg.Timeout(opt.timeout); if ((!opt.joblist.empty() || !opt.status.empty()) && jobidentifiers.empty() && opt.clusters.empty()) opt.all = true; if (jobidentifiers.empty() && opt.clusters.empty() && !opt.all) { logger.msg(Arc::ERROR, "No jobs given"); return 1; } std::list selectedURLs; if (!opt.clusters.empty()) { selectedURLs = getSelectedURLsFromUserConfigAndCommandLine(usercfg, opt.clusters); } std::list rejectManagementURLs = getRejectManagementURLsFromUserConfigAndCommandLine(usercfg, opt.rejectmanagement); std::list jobs; Arc::JobInformationStorage *jobstore = createJobInformationStorage(usercfg); if (jobstore != NULL && !jobstore->IsStorageExisting()) { logger.msg(Arc::ERROR, "Job list file (%s) doesn't exist", usercfg.JobListFile()); delete jobstore; return 1; } if (jobstore == NULL || ( opt.all && !jobstore->ReadAll(jobs, rejectManagementURLs)) || (!opt.all && !jobstore->Read(jobs, jobidentifiers, selectedURLs, rejectManagementURLs))) { logger.msg(Arc::ERROR, "Unable to read job information from file (%s)", usercfg.JobListFile()); delete jobstore; return 1; } delete jobstore; if (!opt.all) { for (std::list::const_iterator itJIDAndName = jobidentifiers.begin(); itJIDAndName != jobidentifiers.end(); ++itJIDAndName) { std::cout << Arc::IString("Warning: Job not found in job list: %s", *itJIDAndName) << std::endl; } } Arc::JobSupervisor jobmaster(usercfg, jobs); jobmaster.Update(); jobmaster.SelectValid(); if (!opt.status.empty()) { jobmaster.SelectByStatus(opt.status); } if (jobmaster.GetSelectedJobs().empty()) { std::cout << Arc::IString("No jobs") << std::endl; return 1; } int retval = (int)!jobmaster.Renew(); std::cout << Arc::IString("Jobs processed: %d, renewed: %d", jobmaster.GetIDsProcessed().size()+jobmaster.GetIDsNotProcessed().size(), jobmaster.GetIDsProcessed().size()) << std::endl; return retval; } nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arcresub.1.in0000644000000000000000000000013214152153376023545 xustar000000000000000030 mtime=1638455038.337644953 30 atime=1638455038.473646996 30 ctime=1638455101.081587709 nordugrid-arc-6.14.0/src/clients/compute/arcresub.1.in0000644000175000002070000001604714152153376023542 0ustar00mockbuildmock00000000000000.\" -*- nroff -*- .TH ARCRESUB 1 "@DATE@" "NorduGrid ARC @VERSION@" "NorduGrid Users Manual" .SH NAME arcresub \- ARC Resubmission .SH DESCRIPTION The .B arcresub command is used for resubmitting jobs to grid enabled computing resources. .SH SYNOPSIS .B arcresub [options] [jobid ...] .SH OPTIONS .IP "\fB-a\fR, \fB--all\fR" all jobs .IP "\fB-j\fR, \fB--joblist\fR=\fIfilename\fR" the file storing information about active jobs (default ~/.arc/jobs.xml) .IP "\fB-i\fR, \fB--jobids-from-file\fR=\fIfilename\fR" a file containing a list of job IDs .IP "\fB-o\fR, \fB--jobids-to-file\fR=\fIfilename\fR" the IDs of the submitted jobs will be appended to this file .IP "\fB-c\fR, \fB--cluster\fR=\fIname\fR" select one or more computing elements: \fIname\fR can be an alias for a single CE, a group of CEs or a URL .IP "\fB-q\fR, \fB--qluster\fR=\fIname\fR" select one or more computing elements for the new jobs: \fIname\fR can be an alias for a single CE, a group of CEs or a URL .IP "\fB-g\fR, \fB--index\fR=\fIname\fR" select one or more registries: \fIname\fR can be an alias for a single registry, a group of registries or a URL .IP "\fB-r\fR, \fB--rejectmanagement\fR=\fIURL\fR" skip jobs which are on a computing element with a given URL .IP "\fB-R\fR, \fB--rejectdiscovery\fR=\fIURL\fR" skip the service with the given URL during service discovery .IP "\fB-S\fR, \fB--submissioninterface\fR=\fIInterfaceName\fR" only use this interface for submitting (e.g. org.nordugrid.gridftpjob, org.ogf.glue.emies.activitycreation, org.ogf.bes) .IP "\fB-I\fR, \fB--infointerface\fR=\fIInterfaceName\fR" the computing element specified by URL at the command line should be queried using this information interface (possible options: org.nordugrid.ldapng, org.nordugrid.ldapglue2, org.nordugrid.wsrfglue2, org.ogf.glue.emies.resourceinfo) .IP "\fB-k\fR, \fB--keep\fR" keep the files on the server (do not clean) .IP "\fB-m\fR, \fB--same\fR" resubmit to the same cluster .IP "\fB-M\fR, \fB--not-same\fR" do not resubmit to the same cluster .IP "\fB-s\fR, \fB--status\fR=\fIstatusstr\fR" only select jobs whose status is statusstr .IP "\fB-P\fR, \fB--listplugins\fR" list the available plugins .IP "\fB-t\fR, \fB--timeout\fR=\fIseconds\fR" timeout in seconds (default 20) .IP "\fB-z\fR, \fB--conffile\fR=\fIfilename\fR" configuration file (default ~/.arc/client.conf) .IP "\fB-d\fR, \fB--debug\fR=\fIdebuglevel\fR" FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG .IP "\fB-b\fR, \fB--broker\fR=\fIbroker\fR" selected broker: Random (default), FastestQueue or custom .IP "\fB-v\fR, \fB--version\fR" print version information .IP "\fB-?\fR, \fB--help\fR" print help .LP .SH ARGUMENTS .IP "\fBjobid\fR ..." job ID or job name of jobs to be resubmitted .LP .SH EXTENDED DESCRIPTION The .B arcresub command is used for resubmitting jobs on grid enabled computing resources. The job can be referred to either by the job ID that was returned by .BR arcsub (1) at submission time or by its jobname if the job description that was submitted contained a jobname attribute. More than one job ID and/or jobname can be given. If several jobs were submitted with the same jobname all those jobs will be resubmitted. If the job description of a job to be resubmitted, contained any local input files, checksums of these was calculated and stored in the job list, and those will be used to check whether the files has changed. If local input files has changed the job will not be resubmitted. In case the job description is not found in the job list, it will be retrieved from the cluster holding the job. This description however might differ from the one originally used to submit the job, since both the submission client and the cluster can have made modifications to the job description. Upon resubmision the job will receive a new job ID, and the old job ID will be stored in the local job list file, enabling future back tracing of the resubmitted job. The name of the local job list file can used specified by the .B --joblist option. By specifying the .B --all option, all active jobs appearing the in job list file will be resubmitted. The .B --cluster option can be used to select or reject jobs at specific clusters to be resubmitted. See .BR arcsub (1) for a discussion of the format of arguments to this option. The .B --qluster option can be used to force the jobs to be resubmitted to a particular cluster, or to reject resubmission to a particular cluster. Again see .BR arcsub (1) for a discussion of the format of arguments to this option. The .B --status option can be used to select jobs in a specific state. These options can be repeated several times. See .BR arstat (1) for possible state values. Which servers to query can be specified by giving the .B --index option to the command. See .BR arcsub (1) for a discussion of the format of arguments to this option. The default behaviour of .BR arcresub is to resubmit to any cluster. This behaviour can be changed by specifying the \fB--same\fR or \fB--not-same\fR options. The former will resubmit a job to the same cluster as the job resides, and thus the \fB--qluster\fR and \fB--index\fR options is ignored. The latter will resubmit a job to any cluster except to the cluster it resides. Note the \fB--same\fR and \fB--not-same\fR options cannot be specified together. If the old job was successfully killed the job will be removed from the remote cluster unless the \fB--keep\fR option was specified. .SH FILES .TP .B ~/.arc/client.conf Some options can be given default values by specifying them in the ARC client configuration file. By using the .B --conffile option a different configuration file can be used than the default. .TP .B ~/.arc/jobs.xml This a local list of the user's active jobs. When a job is successfully submitted it is added to this list and when it is removed from the remote cluster it is removed from this list. This list is used as the list of all active jobs when the user specifies the .SH ENVIRONMENT VARIABLES .TP .B X509_USER_PROXY The location of the user's Grid proxy file. Shouldn't be set unless the proxy is in a non-standard location. .TP .B ARC_LOCATION The location where ARC is installed can be specified by this variable. If not specified the install location will be determined from the path to the command being executed, and if this fails a WARNING will be given stating the location which will be used. .TP .B ARC_PLUGIN_PATH The location of ARC plugins can be specified by this variable. Multiple locations can be specified by separating them by : (; in Windows). The default location is \fB$ARC_LOCATION\fR/lib/arc (\\ in Windows). .SH COPYRIGHT APACHE LICENSE Version 2.0 .SH AUTHOR ARC software is developed by the NorduGrid Collaboration (http://www.nordugrid.org), please consult the AUTHORS file distributed with ARC. Please report bugs and feature requests to http://bugzilla.nordugrid.org .SH SEE ALSO .BR arccat (1), .BR arcclean (1), .BR arccp (1), .BR arcget (1), .BR arcinfo (1), .BR arckill (1), .BR arcls (1), .BR arcmkdir (1), .BR arcproxy (1), .BR arcrenew (1), .BR arcresume (1), .BR arcrm (1), .BR arcstat (1), .BR arcsub (1), .BR arcsync (1), .BR arctest (1) nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/utils.cpp0000644000000000000000000000013214152153376023114 xustar000000000000000030 mtime=1638455038.339644982 30 atime=1638455038.473646996 30 ctime=1638455101.087587799 nordugrid-arc-6.14.0/src/clients/compute/utils.cpp0000644000175000002070000007170214152153376023110 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include "utils.h" #include #include ConsoleRecovery::ConsoleRecovery(void) { ti = new termios; if (tcgetattr(STDIN_FILENO, ti) == 0) return; delete ti; ti = NULL; } ConsoleRecovery::~ConsoleRecovery(void) { if(ti) tcsetattr(STDIN_FILENO, TCSANOW, ti); delete ti; } std::list getSelectedURLsFromUserConfigAndCommandLine(Arc::UserConfig usercfg, std::list computingelements) { std::list endpoints = getServicesFromUserConfigAndCommandLine(usercfg, std::list(), computingelements); std::list serviceURLs; for (std::list::const_iterator it = endpoints.begin(); it != endpoints.end(); ++it) { serviceURLs.push_back(it->URLString); } return serviceURLs; } std::list getRejectDiscoveryURLsFromUserConfigAndCommandLine(Arc::UserConfig usercfg, std::list rejectdiscovery) { std::list rejectDiscoveryURLs = usercfg.RejectDiscoveryURLs(); rejectDiscoveryURLs.insert(rejectDiscoveryURLs.end(), rejectdiscovery.begin(), rejectdiscovery.end()); return rejectDiscoveryURLs; } std::list getRejectManagementURLsFromUserConfigAndCommandLine(Arc::UserConfig usercfg, std::list rejectmanagement) { std::list rejectManagementURLs = usercfg.RejectManagementURLs(); rejectManagementURLs.insert(rejectManagementURLs.end(), rejectmanagement.begin(), rejectmanagement.end()); return rejectManagementURLs; } std::list getServicesFromUserConfigAndCommandLine(Arc::UserConfig usercfg, std::list registries, std::list computingelements, std::string requestedSubmissionInterfaceName, std::string infointerface) { std::list services; if (computingelements.empty() && registries.empty()) { std::list endpoints = usercfg.GetDefaultServices(); for (std::list::const_iterator its = endpoints.begin(); its != endpoints.end(); ++its) { services.push_back(*its); } } else { for (std::list::const_iterator it = computingelements.begin(); it != computingelements.end(); ++it) { // check if the string is a group or alias std::list newServices = usercfg.GetServices(*it, Arc::ConfigEndpoint::COMPUTINGINFO); if (newServices.empty()) { // if it was not an alias or a group, then it should be the URL Arc::Endpoint service(*it); service.Capability.insert(Arc::Endpoint::GetStringForCapability(Arc::Endpoint::COMPUTINGINFO)); if (!infointerface.empty()) { service.InterfaceName = infointerface; } service.RequestedSubmissionInterfaceName = requestedSubmissionInterfaceName; services.push_back(service); } else { // if it was a group (or an alias), add all the services for (std::list::iterator its = newServices.begin(); its != newServices.end(); ++its) { if (!requestedSubmissionInterfaceName.empty()) { // if there was a submission interface requested, this overrides the one from the config its->RequestedSubmissionInterfaceName = requestedSubmissionInterfaceName; } services.push_back(*its); } } } for (std::list::const_iterator it = registries.begin(); it != registries.end(); ++it) { // check if the string is a name of a group std::list newServices = usercfg.GetServices(*it, Arc::ConfigEndpoint::REGISTRY); if (newServices.empty()) { // if it was not an alias or a group, then it should be the URL Arc::Endpoint service(*it); service.Capability.insert(Arc::Endpoint::GetStringForCapability(Arc::Endpoint::REGISTRY)); services.push_back(service); } else { // if it was a group (or an alias), add all the services services.insert(services.end(), newServices.begin(), newServices.end()); } } } return services; } void showplugins(const std::string& program, const std::list& types, Arc::Logger& logger, const std::string& chosenBroker) { for (std::list::const_iterator itType = types.begin(); itType != types.end(); ++itType) { if (*itType == "HED:SubmitterPlugin") { std::cout << Arc::IString("Types of execution services that %s is able to submit jobs to:", program) << std::endl; } else if (*itType == "HED:ServiceEndpointRetrieverPlugin") { std::cout << Arc::IString("Types of registry services that %s is able to collect information from:", program) << std::endl; } else if (*itType == "HED:TargetInformationRetrieverPlugin") { std::cout << Arc::IString("Types of local information services that %s is able to collect information from:", program) << std::endl; } else if (*itType == "HED:JobListRetriever") { std::cout << Arc::IString("Types of local information services that %s is able to collect job information from:", program) << std::endl; } else if (*itType == "HED:JobControllerPlugin") { std::cout << Arc::IString("Types of services that %s is able to manage jobs at:", program) << std::endl; } else if (*itType == "HED:JobDescriptionParserPlugin") { std::cout << Arc::IString("Job description languages supported by %s:", program) << std::endl; } else if (*itType == "HED:BrokerPlugin") { std::cout << Arc::IString("Brokers available to %s:", program) << std::endl; } std::list modules; Arc::PluginsFactory pf(Arc::BaseConfig().MakeConfig(Arc::Config()).Parent()); bool isDefaultBrokerLocated = false; pf.scan(Arc::FinderLoader::GetLibrariesList(), modules); Arc::PluginsFactory::FilterByKind(*itType, modules); for (std::list::iterator itMod = modules.begin(); itMod != modules.end(); ++itMod) { for (std::list::iterator itPlug = itMod->plugins.begin(); itPlug != itMod->plugins.end(); ++itPlug) { std::cout << " " << itPlug->name; if (*itType == "HED:BrokerPlugin" && itPlug->name == chosenBroker) { std::cout << " (default)"; isDefaultBrokerLocated = true; } std::cout << " - " << itPlug->description << std::endl; } } if (*itType == "HED:BrokerPlugin" && !isDefaultBrokerLocated) { logger.msg(Arc::WARNING, "Default broker (%s) is not available. When using %s a broker should be specified explicitly (-b option).", chosenBroker, program); } } } bool checkproxy(const Arc::UserConfig& uc) { if (!uc.ProxyPath().empty() ) { Arc::Credential holder(uc.ProxyPath(), "", "", ""); if (holder.GetEndTime() < Arc::Time()){ std::cout << Arc::IString("Proxy expired. Job submission aborted. Please run 'arcproxy'!") << std::endl; return false; } } else { std::cout << Arc::IString("Cannot find any proxy. This application currently cannot run without a proxy.\n" " If you have the proxy file in a non-default location,\n" " please make sure the path is specified in the client configuration file.\n" " If you don't have a proxy yet, please run 'arcproxy'!") << std::endl; return false; } return true; } static bool urlisinsecure(Arc::URL const & url) { std::string protocol = url.Protocol(); return protocol.empty() || (protocol == "http") || (protocol == "ftp") || (protocol == "ldap"); } bool jobneedsproxy(const Arc::JobDescription& job) { // Check if X.509 credentials are needed for data staging std::list inputFiles = job.DataStaging.InputFiles; for(std::list::iterator fileIt = inputFiles.begin(); fileIt != inputFiles.end(); ++fileIt) { for(std::list::iterator sourceIt = fileIt->Sources.begin(); sourceIt != fileIt->Sources.end(); ++sourceIt) { if(!urlisinsecure(*sourceIt)) { return true; } } } std::list outputFiles = job.DataStaging.OutputFiles; for(std::list::iterator fileIt = outputFiles.begin(); fileIt != outputFiles.end(); ++fileIt) { for(std::list::iterator targetIt = fileIt->Targets.begin(); targetIt != fileIt->Targets.end(); ++targetIt) { if(!urlisinsecure(*targetIt)) { return true; } } } return false; } void splitendpoints(std::list& selected, std::list& rejected) { // Removes slashes from end of endpoint strings, and put strings with leading '-' into rejected list. for (std::list::iterator it = selected.begin(); it != selected.end();) { if ((*it)[it->length()-1] == '/') { it->erase(it->length()-1); continue; } if (it->empty()) { it = selected.erase(it); continue; } if ((*it)[0] == '-') { rejected.push_back(it->substr(1)); it = selected.erase(it); } else { ++it; } } } Arc::JobInformationStorage* createJobInformationStorage(const Arc::UserConfig& uc) { Arc::JobInformationStorage* jis = NULL; if (Glib::file_test(uc.JobListFile(), Glib::FILE_TEST_EXISTS)) { for (int i = 0; Arc::JobInformationStorage::AVAILABLE_TYPES[i].name != NULL; ++i) { jis = (Arc::JobInformationStorage::AVAILABLE_TYPES[i].instance)(uc.JobListFile()); if (jis && jis->IsValid()) { return jis; } delete jis; } return NULL; } for (int i = 0; Arc::JobInformationStorage::AVAILABLE_TYPES[i].name != NULL; ++i) { if (uc.JobListType() == Arc::JobInformationStorage::AVAILABLE_TYPES[i].name) { jis = (Arc::JobInformationStorage::AVAILABLE_TYPES[i].instance)(uc.JobListFile()); if (jis && jis->IsValid()) { return jis; } delete jis; return NULL; } } if (Arc::JobInformationStorage::AVAILABLE_TYPES[0].instance != NULL) { jis = (Arc::JobInformationStorage::AVAILABLE_TYPES[0].instance)(uc.JobListFile()); if (jis && jis->IsValid()) { return jis; } delete jis; } return NULL; } bool ClientOptions::isARC6TargetSelectionOptions(Arc::Logger& logger, bool allow_cluster) { bool arc6_target_options = false; do { if ( ! computing_elements.empty() ) { arc6_target_options = true; break; } if ( ! registries.empty() ) { arc6_target_options = true; break; } if ( ! requested_submission_endpoint_type.empty() ) { arc6_target_options = true; break; } if ( ! requested_info_endpoint_type.empty() ) arc6_target_options = true; } while (false); bool legacy_target_options = false; do { if ( ! clusters.empty() && !allow_cluster ) { legacy_target_options = true; break; } if ( ! indexurls.empty() ) { legacy_target_options = true; break; } if ( ! requestedSubmissionInterfaceName.empty() ) { legacy_target_options = true; break; } if ( ! infointerface.empty() ) { legacy_target_options = true; break; } if ( direct_submission ) legacy_target_options = true; } while (false); if ( legacy_target_options && arc6_target_options ) { logger.msg(Arc::ERROR, "It is impossible to mix ARC6 target selection options with legacy options. All legacy options will be ignored!"); } return arc6_target_options; } bool ClientOptions::canonicalizeARC6InterfaceTypes(Arc::Logger& logger) { std::string s(requested_submission_endpoint_type); std::string i(requested_info_endpoint_type); // canonicalize submission endpoint if ( !s.empty() ) { if (s.find(".") == std::string::npos) { s = "org.nordugrid." + s; } // replace EMI-ES type if (s == "org.nordugrid.emies") { s = "org.ogf.glue.emies.activitycreation"; // allow to use gridftp as gridftpjob } else if ( s == "org.nordugrid.gridftp" ) { s += "job"; } } // canonicalize information endpoint if ( !i.empty() && Arc::lower(i) != "none" ) { if (i.find(".") == std::string::npos ) { i = "org.nordugrid." + i; } else if ( i == "ldap.nordugrid" ) { i = "org.nordugrid.ldapng"; } else if ( i == "ldap.glue2" ) { i = "org.nordugrid.ldapglue2"; } // replace EMI-ES type if (i == "org.nordugrid.emies") { i = "org.ogf.glue.emies.resourceinfo"; // allow to use gridftp as gridftpjob } else if ( s == "org.nordugrid.gridftp" ) { s += "job"; } } // nothing specified - any interface can be used if ( s.empty() && i.empty() ) return true; // define info based on submission (and verify submission type is supported) if ( !s.empty() ) { const std::string notify_template = "Automatically adding %s information endpoint type based on desired submission interface"; if ( s == "org.ogf.glue.emies.activitycreation" ) { if ( i.empty() ) { logger.msg(Arc::VERBOSE, notify_template, "org.ogf.glue.emies.resourceinfo"); info_types.push_back("org.ogf.glue.emies.resourceinfo"); } } else if ( s == "org.nordugrid.arcrest" ) { if ( i.empty() ) { logger.msg(Arc::VERBOSE, notify_template, "org.nordugrid.arcrest"); info_types.push_back("org.nordugrid.arcrest"); } } else if ( s == "org.nordugrid.internal" ) { if ( i.empty() ) { logger.msg(Arc::VERBOSE, notify_template, "org.nordugrid.internal"); info_types.push_back("org.nordugrid.internal"); } } else if ( s == "org.nordugrid.gridftpjob" ) { if ( i.empty() ) { logger.msg(Arc::VERBOSE, notify_template, "org.nordugrid.ldapng"); logger.msg(Arc::VERBOSE, notify_template, "org.nordugrid.ldapglue2"); info_types.push_back("org.nordugrid.ldapng"); info_types.push_back("org.nordugrid.ldapglue2"); } } else { logger.msg(Arc::ERROR, "Unsupported submission endpoint type: %s", s); return false; } submit_types.push_back(s); } // define submission type based on info (and verify info type is supported) if ( !i.empty() ) { const std::string notify_template = "Automatically adding %s submission endpoint type based on desired information interface"; if ( i == "org.ogf.glue.emies.resourceinfo" ) { if ( s.empty() ) { logger.msg(Arc::VERBOSE, notify_template, "org.ogf.glue.emies.activitycreation"); submit_types.push_back("org.ogf.glue.emies.activitycreation"); } } else if ( i == "org.nordugrid.arcrest" ) { if ( s.empty() ) { logger.msg(Arc::VERBOSE, notify_template, "org.nordugrid.arcrest"); submit_types.push_back("org.nordugrid.arcrest"); } } else if ( i == "org.nordugrid.internal" ) { if ( s.empty() ) { logger.msg(Arc::VERBOSE, notify_template, "org.nordugrid.internal"); submit_types.push_back("org.nordugrid.internal"); } } else if ( i == "org.nordugrid.ldapng" ) { if ( s.empty() ) { logger.msg(Arc::VERBOSE, notify_template, "org.nordugrid.gridftpjob"); submit_types.push_back("org.nordugrid.gridftpjob"); } } else if ( i == "org.nordugrid.ldapglue2" ) { if ( s.empty() ) { logger.msg(Arc::VERBOSE, notify_template, "org.ogf.glue.emies.activitycreation"); submit_types.push_back("org.ogf.glue.emies.activitycreation"); logger.msg(Arc::VERBOSE, notify_template, "org.nordugrid.gridftpjob"); submit_types.push_back("org.nordugrid.gridftpjob"); } } else if ( Arc::lower(i) == "none" ) { if ( s.empty() ) { logger.msg(Arc::VERBOSE, "Requested to skip resource discovery. Will try direct submission to %s and %s submission endpoint types", "org.ogf.glue.emies.activitycreation", "org.nordugrid.gridftpjob"); submit_types.push_back("org.ogf.glue.emies.activitycreation"); submit_types.push_back("org.nordugrid.gridftpjob"); } return true; } else { logger.msg(Arc::ERROR, "Unsupported information endpoint type: %s", i); return false; } info_types.push_back(i); } return true; } ClientOptions::ClientOptions(Client_t c, const std::string& arguments, const std::string& summary, const std::string& description) : Arc::OptionParser(arguments, summary, description), dryrun(false), dumpdescription(false), show_credentials(false), show_plugins(false), showversion(false), all(false), forcemigration(false), keep(false), forcesync(false), truncate(false), convert(false), longlist(false), printids(false), same(false), notsame(false), forceclean(false), show_stdout(true), show_stderr(false), show_joblog(false), show_json(false), usejobname(false), forcedownload(false), list_configured_services(false), direct_submission(false), show_unavailable(false), testjobid(-1), runtime(5), timeout(-1) { bool cIsJobMan = (c == CO_CAT || c == CO_CLEAN || c == CO_GET || c == CO_KILL || c == CO_RENEW || c == CO_RESUME || c == CO_STAT || c == CO_ACL); DefineOptionsGroup("xaction", istring("Other actions")); DefineOptionsGroup("filtering", istring("Brokering and filtering")); DefineOptionsGroup("format", istring("Output format modifiers")); DefineOptionsGroup("tuning", istring("Behaviour tuning")); DefineOptionsGroup("arc6-target", istring("ARC6 submission endpoint selection")); DefineOptionsGroup("legacy-target", istring("Legacy options set for defining targets")); if ( c == CO_RESUB || c == CO_SUB || c == CO_TEST || c == CO_SYNC ) { GroupAddOption("arc6-target", 'C', "computing-element", istring("specify computing element hostname or a complete endpoint URL"), istring("ce"), computing_elements); GroupAddOption("arc6-target", 'Y', "registry", istring("registry service URL with optional specification of protocol"), istring("registry"), registries); } if ( c == CO_RESUB || c == CO_SUB || c == CO_TEST ) { GroupAddOption("arc6-target", 'T', "submission-endpoint-type", istring("require the specified endpoint type for job submission.\n" "\tAllowed values are: arcrest, emies, gridftp or gridftpjob and internal."), istring("type"), requested_submission_endpoint_type); GroupAddOption("arc6-target", 'Q', "info-endpoint-type", istring("require information query using the specified information endpoint type.\n" "\tSpecial value 'NONE' will disable all resource information queries and the following brokering.\n" "\tAllowed values are: ldap.nordugrid, ldap.glue2, emies, arcrest and internal."), istring("type"), requested_info_endpoint_type); } if (c == CO_SUB || c == CO_TEST || c == CO_SYNC ) { GroupAddOption("legacy-target", 'c', "cluster", istring("select one or more computing elements: " "name can be an alias for a single CE, a group of CEs or a URL"), istring("name"), clusters); } else { GroupAddOption("filtering", 'c', "cluster", istring("only select jobs that were submitted to this resource"), istring("name"), clusters); } if (!cIsJobMan && c != CO_SYNC) { GroupAddOption("legacy-target", 'I', "infointerface", istring("the computing element specified by URL at the command line " "should be queried using this information interface.\n" "\tAllowed values are: org.nordugrid.ldapng, org.nordugrid.ldapglue2 and org.ogf.glue.emies.resourceinfo"), istring("interfacename"), infointerface); } if (c == CO_RESUB || c == CO_MIGRATE) { GroupAddOption("legacy-target", 'q', "qluster", istring("selecting a computing element for the new jobs with a URL or an alias, " "or selecting a group of computing elements with the name of the group"), istring("name"), qlusters); } if (c == CO_MIGRATE) { GroupAddOption("tuning", 'f', "force", istring("force migration, ignore kill failure"), forcemigration); } if (c == CO_GET || c == CO_KILL || c == CO_MIGRATE || c == CO_RESUB) { GroupAddOption("tuning", 'k', "keep", istring("keep the files on the server (do not clean)"), keep); } if (c == CO_SYNC) { GroupAddOption("tuning", 'f', "force", istring("do not ask for verification"), forcesync); GroupAddOption("tuning", 'T', "truncate", istring("truncate the joblist before synchronizing"), truncate); GroupAddOption("xaction", 0, "convert", istring("do not collect information, only convert jobs storage format"), convert); } if (c == CO_INFO || c == CO_STAT) { GroupAddOption("format", 'l', "long", istring("long format (more information)"), longlist); } if (c == CO_INFO) { GroupAddOption("xaction", 'L', "list-configured-services", istring("print a list of services configured in the client.conf"), list_configured_services); } if (c == CO_CAT) { GroupAddOption("xaction", 'o', "stdout", istring("show the stdout of the job (default)"), show_stdout); GroupAddOption("xaction", 'e', "stderr", istring("show the stderr of the job"), show_stderr); GroupAddOption("xaction", 'l', "joblog", istring("show the CE's error log of the job"), show_joblog); GroupAddOption("xaction", 'f', "file", istring("show the specified file from job's session directory"), istring("filepath"), show_file); } if (c == CO_GET) { GroupAddOption("tuning", 'D', "dir", istring("download directory (the job directory will" " be created in this directory)"), istring("dirname"), downloaddir); GroupAddOption("tuning", 'J', "usejobname", istring("use the jobname instead of the short ID as" " the job directory name"), usejobname); GroupAddOption("tuning", 'f', "force", istring("force download (overwrite existing job directory)"), forcedownload); } if (c == CO_STAT) { // Option 'long' takes precedence over this option (print-jobids). GroupAddOption("xaction", 'p', "print-jobids", istring("instead of the status only the IDs of " "the selected jobs will be printed"), printids); GroupAddOption("tuning", 'S', "sort", istring("sort jobs according to jobid, submissiontime or jobname"), istring("order"), sort); GroupAddOption("tuning", 'R', "rsort", istring("reverse sorting of jobs according to jobid, submissiontime or jobname"), istring("order"), rsort); GroupAddOption("tuning", 'u', "show-unavailable", istring("show jobs where status information is unavailable"), show_unavailable); GroupAddOption("format", 'J', "json", istring("show status information in JSON format"), show_json); } if (c == CO_RESUB) { GroupAddOption("filtering", 'm', "same", istring("resubmit to the same resource"), same); GroupAddOption("filtering", 'M', "not-same", istring("do not resubmit to the same resource"), notsame); } if (c == CO_CLEAN) { GroupAddOption("tuning", 'f', "force", istring("remove the job from the local list of jobs " "even if the job is not found in the infosys"), forceclean); } if (!cIsJobMan) { GroupAddOption("legacy-target", 'g', "index", istring("select one or more registries: " "name can be an alias for a single registry, a group of registries or a URL"), istring("name"), indexurls); } if (c == CO_TEST) { GroupAddOption("xaction", 'J', "job", istring("submit test job given by the number"), istring("int"), testjobid); GroupAddOption("xaction", 'r', "runtime", istring("test job runtime specified by the number"), istring("int"), runtime); } if (cIsJobMan || c == CO_RESUB) { GroupAddOption("filtering", 's', "status", istring("only select jobs whose status is statusstr"), istring("statusstr"), status); } if (cIsJobMan || c == CO_MIGRATE || c == CO_RESUB) { GroupAddOption("filtering", 'a', "all", istring("all jobs"), all); } if (c == CO_SUB) { GroupAddOption("tuning", 'e', "jobdescrstring", istring("jobdescription string describing the job to " "be submitted"), istring("string"), jobdescriptionstrings); GroupAddOption("tuning", 'f', "jobdescrfile", istring("jobdescription file describing the job to " "be submitted"), istring("string"), jobdescriptionfiles); } if (c == CO_MIGRATE || c == CO_RESUB || c == CO_SUB || c == CO_TEST) { GroupAddOption("filtering", 'b', "broker", istring("select broker method (list available brokers with --listplugins flag)"), istring("broker"), broker); GroupAddOption("tuning", 'o', "jobids-to-file", istring("the IDs of the submitted jobs will be appended to this file"), istring("filename"), jobidoutfile); GroupAddOption("legacy-target", 'S', "submissioninterface", istring("only use this interface for submitting.\n" "\tAllowed values are: org.nordugrid.gridftpjob or org.nordugrid.gridftp, org.ogf.glue.emies.activitycreation and org.nordugrid.internal"), istring("InterfaceName"), requestedSubmissionInterfaceName); } if (c == CO_MIGRATE || c == CO_RESUB || c == CO_SUB || c == CO_TEST || c == CO_INFO) { GroupAddOption("filtering", 'R', "rejectdiscovery", istring("skip the service with the given URL during service discovery"), istring("URL"), rejectdiscovery); } if (cIsJobMan || c == CO_MIGRATE || c == CO_RESUB) { GroupAddOption("tuning", 'i', "jobids-from-file", istring("a file containing a list of jobIDs"), istring("filename"), jobidinfiles); GroupAddOption("filtering", 'r', "rejectmanagement", istring("skip jobs that are on a computing element with a given URL"), istring("URL"), rejectmanagement); } if (c == CO_SUB || c == CO_TEST) { GroupAddOption("xaction", 'D', "dryrun", istring("submit jobs as dry run (no submission to batch system)"), dryrun); GroupAddOption("legacy-target", 0, "direct", istring("submit directly - no resource discovery or matchmaking"), direct_submission); GroupAddOption("xaction", 'x', "dumpdescription", istring("do not submit - dump job description " "in the language accepted by the target"), dumpdescription); } if (c == CO_INFO) { GroupAddOption("legacy-target", 'S', "submissioninterface", istring("only get information about executon targets that support this job submission interface.\n" "\tAllowed values are org.nordugrid.gridftpjob or org.nordugrid.gridftp, org.ogf.glue.emies.activitycreation and org.nordugrid.internal"), istring("InterfaceName"), requestedSubmissionInterfaceName); } if (c == CO_TEST) { GroupAddOption("xaction", 'E', "certificate", istring("prints info about installed user- and CA-certificates"), show_credentials); } if (c != CO_INFO) { GroupAddOption("tuning", 'j', "joblist", Arc::IString("the file storing information about active jobs (default %s)", Arc::UserConfig::JOBLISTFILE()).str(), istring("filename"), joblist); } /* --- Standard options below --- */ AddOption('z', "conffile", istring("configuration file (default ~/.arc/client.conf)"), istring("filename"), conffile); AddOption('t', "timeout", istring("timeout in seconds (default 20)"), istring("seconds"), timeout); GroupAddOption("xaction", 'P', "listplugins", istring("list the available plugins"), show_plugins); AddOption('d', "debug", istring("FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG"), istring("debuglevel"), debug); AddOption('v', "version", istring("print version information"), showversion); } nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/utils.h0000644000000000000000000000013214152153376022561 xustar000000000000000030 mtime=1638455038.339644982 30 atime=1638455038.473646996 30 ctime=1638455101.088587814 nordugrid-arc-6.14.0/src/clients/compute/utils.h0000644000175000002070000002104014152153376022543 0ustar00mockbuildmock00000000000000#ifndef __ARC_CLEINT_COMPUTE_UTILS_H_ #define __ARC_CLEINT_COMPUTE_UTILS_H_ #include #include #include #include #include #include #include #include #include #include struct termios; // This class records current state of console // when created and recovers it when destroyed. // Its main purpose is to recover console in // case application had to cancel any UI actions // involving changing console state like // password input. class ConsoleRecovery { private: ConsoleRecovery(ConsoleRecovery const&); ConsoleRecovery& operator=(ConsoleRecovery const&); struct termios * ti; public: ConsoleRecovery(void); ~ConsoleRecovery(void); }; #ifdef TEST #define RUNMAIN(X) test_##X##_main #else #define RUNMAIN(X) X(int argc, char **argv); \ int main(int argc, char **argv) { int xr = 0; { ConsoleRecovery cr; xr = X(argc,argv); }; _exit(xr); return 0; } \ int X #endif /// Returns the URLs of computing elements selected by alias, group name, URL or the default ones /** This helper method gets a list of string representing computing elements. Each item of the list is either an alias of service configured in the UserConfig, a name of a group configured in the UserConfig, or a URL of service not configured in the UserConfig. If the list is empty, the default services will be selected from the UserConfig. The method returns the URLs of the selected services. This is meant to be used by the command line programs where the user is specifying a list of computing elements by alias, group name (which has to be looked up in the UserConfig), or by URL. \param[in] usercfg is the UserConfig object containing information about configured services \param[in] computingelements is a list of strings containing aliases, group names, or URLs of computing elements \return a list of URL strings, the endpoints of the selected services, or the default ones if none was selected */ std::list getSelectedURLsFromUserConfigAndCommandLine(Arc::UserConfig usercfg, std::list computingelements); /// Combine the list of rejected discovery URLs from the UserConfig with the ones specified in a list /** Helper method for the command line programs to combine the list of rejected discovery URLs specified by the user at the command line with the ones configured in the UserConfig. The rejected discovery URLs supposed to cause the service discovery not to discovery computing elements whose URL matches any of these strings. \param[in] usercfg is the UserConfig object containing information about configured services \param[in] rejectdiscovery is a list of strings, which will be also added to the resulting list besides the ones from the UserConfig \return a list of strings which are the rejected URLs from the UserConfig and the ones given as the second argument combined */ std::list getRejectDiscoveryURLsFromUserConfigAndCommandLine(Arc::UserConfig usercfg, std::list rejectdiscovery); /// Combine the list of rejected management URLs from the UserConfig with the ones specified in a list /** Helper method for the command line programs to combine the list of rejected management URLs specified by the user at the command line with the ones configured in the UserConfig. The rejected management URLs supposed to cause the job management commands not to manage jobs which reside on computing elements whose URL matches any of the items in the list \param[in] usercfg is the UserConfig object containing information about configured services \param[in] rejectmanagement is a list of strings, which will be also added to the resulting list besides the ones from the UserConfig \return a list of strings which are the rejected URLs from the UserConfig and the ones given as the second argument combined */ std::list getRejectManagementURLsFromUserConfigAndCommandLine(Arc::UserConfig usercfg, std::list rejectmanagement); /// Looks up or creates Endpoints from strings specified at the command line using the information from the UserConfig /** This helper method gets a list of strings representing service registries and computing element, along with a requested submisison interface, looks up all the services from the UserConfig, and return the Endpoints found there, or create new Endpoints for services not found in the Userconfig. If there are no registries or computing elements given, then the default services will be returned. This is meant to be used by the command line programs where the user is specifying service registries and/or computing elements with several strings, which could refer to services configured in the UserConfig (aliases or groups), or they can be URLs refering to services which are not configured in the UserConfig. This method looks up the aliases and group names, and if a string is not an alias or a group name, then it's assumed to be a URL. \param[in] usercfg is the UserConfig object containing information about configured services \param[in] registries is a list of strings containing aliases, group names, or URLs of service registries \param[in] computingelements is a list of strings containing aliases, group names, or URLs of computing elements \return a list of Endpoint objects containing the services corresponding the given strings or the default services. */ std::list getServicesFromUserConfigAndCommandLine(Arc::UserConfig usercfg, std::list registries, std::list computingelements, std::string requestedSubmissionInterfaceName = "", std::string infointerface = ""); void showplugins(const std::string& program, const std::list& types, Arc::Logger& logger, const std::string& chosenBroker = ""); bool checkproxy(const Arc::UserConfig& uc); bool jobneedsproxy(const Arc::JobDescription& job); void splitendpoints(std::list& selected, std::list& rejected); /** * Creates a new JobInformationStorage object. Caller has responsibility of * deleting returned object. */ Arc::JobInformationStorage* createJobInformationStorage(const Arc::UserConfig& uc); class ClientOptions : public Arc::OptionParser { public: enum Client_t { CO_SUB, CO_MIGRATE, CO_RESUB, CO_TEST, CO_CAT, CO_CLEAN, CO_GET, CO_KILL, CO_RENEW, CO_RESUME, CO_STAT, CO_SYNC, CO_INFO, CO_ACL }; ClientOptions(Client_t c, const std::string& arguments = "", const std::string& summary = "", const std::string& description = ""); /// Returns the boolean value indication whether new ARC6 set of target selection options are in use bool isARC6TargetSelectionOptions(Arc::Logger& logger, bool allow_cluster = false); /// Implement ARC consistent info/submission endpoint types logic bool canonicalizeARC6InterfaceTypes(Arc::Logger& logger); bool dryrun; bool dumpdescription; bool show_credentials; bool show_plugins; bool showversion; bool all; bool forcemigration; bool keep; bool forcesync; bool truncate; bool convert; bool longlist; bool printids; bool same; bool notsame; bool forceclean; bool show_stdout; bool show_stderr; bool show_joblog; bool show_json; bool usejobname; bool forcedownload; bool list_configured_services; bool direct_submission; bool show_unavailable; int testjobid; int runtime; int timeout; std::string show_file; std::string joblist; std::string jobidoutfile; std::string conffile; std::string debug; std::string broker; std::string sort; std::string rsort; std::string downloaddir; std::string requestedSubmissionInterfaceName; std::string infointerface; std::list clusters; std::list qlusters; std::list indexurls; std::list jobdescriptionstrings; std::list jobdescriptionfiles; std::list jobidinfiles; std::list status; std::list rejectdiscovery; std::list rejectmanagement; // arc6 consistent, intuitive and streamlined target selection: // command line options std::list computing_elements; std::list registries; std::string requested_submission_endpoint_type; std::string requested_info_endpoint_type; // post-processed interface types std::list submit_types; std::list info_types; }; #endif // __ARC_CLEINT_COMPUTE_UTILS_H_ nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/submit.h0000644000000000000000000000013214152153376022724 xustar000000000000000030 mtime=1638455038.338644968 30 atime=1638455038.473646996 30 ctime=1638455101.095587919 nordugrid-arc-6.14.0/src/clients/compute/submit.h0000644000175000002070000000645314152153376022721 0ustar00mockbuildmock00000000000000#ifndef __ARC_CLEINT_COMPUTE_SUBMIT_COMMON_H_ #define __ARC_CLEINT_COMPUTE_SUBMIT_COMMON_H_ #include #include #include #include #include #include #include #include #include #include #include #include "utils.h" int process_submission_status(Arc::SubmissionStatus status, const Arc::UserConfig& usercfg); void check_missing_plugins(Arc::Submitter s, int is_error); int legacy_submit(const Arc::UserConfig& usercfg, const std::list& jobdescriptionlist, std::list& services, const std::string& requestedSubmissionInterface, const std::string& jobidfile, bool direct_submission); int dumpjobdescription(const Arc::UserConfig& usercfg, const std::list& jobdescriptionlist, const std::list& services, const std::string& requestedSubmissionInterface); /// Implements ARC6 logic of targets selection based on info/submit types requested /** This helper method process requested types, computing elements and registry and defines the endpoint batches for submission tries. \param[in] usercfg is the UserConfig object containing information about configured services \param[in] opt ClientOptions object containing request options \param endpoint_batches list of lists of Endpoint objects \return a bool indicating the need of target information lookup versus direct submission. */ bool prepare_submission_endpoint_batches(const Arc::UserConfig& usercfg, const ClientOptions& opt, std::list >& endpoint_batches); /// Submit job using defined endpoint batches and submission type /** This helper method try to submit jobs to the list of endpoint batches with (brokering) or without inforamtion quueries \param[in] usercfg is the UserConfig object containing information about configured services \param[in] endpoint_batches list of lists of Endpoint objects \param[in] info_discovery boolean indicating the need or inforamtion quueries and brokering \param[in] jobidoutfile Path to file to store jobids \param[in] jobdescriptionlist list of job descriptions to submit \return a bool indicating the need of target information lookup versus direct submission. */ int submit_jobs(const Arc::UserConfig& usercfg, const std::list >& endpoint_batches, bool info_discovery, const std::string& jobidfile, const std::list& jobdescriptionlist); /// Class to handle submitted job and present the results to user class HandleSubmittedJobs : public Arc::EntityConsumer { public: HandleSubmittedJobs(const std::string& jobidfile, const Arc::UserConfig& uc) : jobidfile(jobidfile), uc(uc), submittedJobs() {} ~HandleSubmittedJobs() {} void addEntity(const Arc::Job& j); void write() const; void printsummary(const std::list& originalDescriptions, const std::list& notsubmitted) const; void clearsubmittedjobs() { submittedJobs.clear(); } private: const std::string jobidfile; const Arc::UserConfig& uc; std::list submittedJobs; }; #endif // __ARC_CLEINT_COMPUTE_SUBMIT_COMMON_H_ nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arccat.1.in0000644000000000000000000000013214152153376023174 xustar000000000000000030 mtime=1638455038.337644953 30 atime=1638455038.472646981 30 ctime=1638455101.080587694 nordugrid-arc-6.14.0/src/clients/compute/arccat.1.in0000644000175000002070000001111314152153376023156 0ustar00mockbuildmock00000000000000.\" -*- nroff -*- .TH ARCCAT 1 "@DATE@" "NorduGrid ARC @VERSION@" "NorduGrid Users Manual" .SH NAME arccat \- ARC Catenate .SH DESCRIPTION The .B arccat command can be used to view the stdout or stderr of a running job. It can also be used to show A-REX's error log of a job. .SH SYNOPSIS .B arccat [options] [job ...] .SH OPTIONS .IP "\fB-a\fR, \fB--all\fR" all jobs .IP "\fB-j\fR, \fB--joblist\fR=\fIfilename\fR" the file storing information about active jobs (default ~/.arc/jobs.xml) .IP "\fB-i\fR, \fB--jobids-from-file\fR=\fIfilename\fR" a file containing a list of jobIDs .IP "\fB-c\fR, \fB--cluster\fR=\fIname\fR" select one or more computing elements: \fIname\fR can be an alias for a single CE, a group of CEs or a URL .IP "\fB-r\fR, \fB--rejectmanagement\fR=\fIURL\fR" skip jobs which are on a computing element with a given URL .IP "\fB-s\fR, \fB--status\fR=\fIstatusstr\fR" only select jobs whose status is statusstr .IP "\fB-o\fR, \fB--stdout\fR" show the stdout of the job (default) .IP "\fB-e\fR, \fB--stderr\fR" show the stderr of the job .IP "\fB-l\fR, \fB--joblog\fR" show A-REX's error log of the job .IP "\fB-P\fR, \fB--listplugins\fR" list the available plugins .IP "\fB-t\fR, \fB--timeout\fR=\fIseconds\fR" timeout in seconds (default 20) .IP "\fB-z\fR, \fB--conffile\fR=\fIfilename\fR" configuration file (default ~/.arc/client.conf) .IP "\fB-d\fR, \fB--debug\fR=\fIdebuglevel\fR" FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG .IP "\fB-v\fR, \fB--version\fR" print version information .IP "\fB-?\fR, \fB--help\fR" print help .LP .SH ARGUMENTS .IP "\fBjob\fR ..." list of jobids and/or jobnames .LP .SH EXTENDED DESCRIPTION The .B arccat command displays the stdout or stderr of running jobs. It can also display A-REX's error log of a job. The job can be referred to either by the jobid that was returned by .BR arcsub (1) at submission time or by its jobname if the job description that was submitted contained a jobname attribute. More than one jobid and/or jobname can be given. If several jobs were submitted with the same jobname the stdout, stderr or A-REX error log of all those jobs are shown. If the .B --joblist option is used the list of jobs is read from a file with the specified filename. By specifying the .B --all option, the stdout, stderr or A-REX error log of all active jobs will be shown. The .B --cluster option can be used to select or reject jobs at specific clusters. See .BR arcsub (1) for a discussion of the format of arguments to this option. The .B --status option can be used to select jobs in a specific state. These options can be repeated several times. See .BR arstat (1) for possible state values. Only jobs where the stdout or stderr argument was given in the job description can display the contents of those files. .SH FILES .TP .B ~/.arc/client.conf Some options can be given default values by specifying them in the ARC client configuration file. By using the .B --conffile option a different configuration file can be used than the default. .TP .B ~/.arc/jobs.xml This a local list of the user's active jobs. When a job is successfully submitted it is added to this list and when it is removed from the remote cluster it is removed from this list. This list is used as the list of all active jobs when the user specifies the .B --all option to the various NorduGrid ARC user interface commands. By using the .B --joblist option a different file can be used than the default. .SH ENVIRONMENT VARIABLES .TP .B X509_USER_PROXY The location of the user's Grid proxy file. Shouldn't be set unless the proxy is in a non-standard location. .TP .B ARC_LOCATION The location where ARC is installed can be specified by this variable. If not specified the install location will be determined from the path to the command being executed, and if this fails a WARNING will be given stating the location which will be used. .TP .B ARC_PLUGIN_PATH The location of ARC plugins can be specified by this variable. Multiple locations can be specified by separating them by : (; in Windows). The default location is \fB$ARC_LOCATION\fR/lib/arc (\\ in Windows). .SH COPYRIGHT APACHE LICENSE Version 2.0 .SH AUTHOR ARC software is developed by the NorduGrid Collaboration (http://www.nordugrid.org), please consult the AUTHORS file distributed with ARC. Please report bugs and feature requests to http://bugzilla.nordugrid.org .SH SEE ALSO .BR arcclean (1), .BR arccp (1), .BR arcget (1), .BR arcinfo (1), .BR arckill (1), .BR arcls (1), .BR arcmkdir (1), .BR arcproxy (1), .BR arcrenew (1), .BR arcresub (1), .BR arcresume (1), .BR arcrm (1), .BR arcstat (1), .BR arcsub (1), .BR arcsync (1), .BR arctest (1) nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arcclean.1.in0000644000000000000000000000013214152153376023507 xustar000000000000000030 mtime=1638455038.337644953 30 atime=1638455038.473646996 30 ctime=1638455101.078587664 nordugrid-arc-6.14.0/src/clients/compute/arcclean.1.in0000644000175000002070000001104314152153376023473 0ustar00mockbuildmock00000000000000.\" -*- nroff -*- .TH ARCCLEAN 1 "@DATE@" "NorduGrid ARC @VERSION@" "NorduGrid Users Manual" .SH NAME arcclean \- ARC Clean .SH DESCRIPTION The .B arcclean command removes a job from the remote cluster. .SH SYNOPSIS .B arcclean [options] [job ...] .SH OPTIONS .IP "\fB-a\fR, \fB--all\fR" all jobs .IP "\fB-j\fR, \fB--joblist\fR=\fIfilename\fR" the file storing information about active jobs (default ~/.arc/jobs.xml) .IP "\fB-i\fR, \fB--jobids-from-file\fR=\fIfilename\fR" a file containing a list of jobIDs .IP "\fB-c\fR, \fB--cluster\fR=\fIname\fR" select one or more computing elements: \fIname\fR can be an alias for a single CE, a group of CEs or a URL .IP "\fB-r\fR, \fB--rejectmanagement\fR=\fIURL\fR" skip jobs which are on a computing element with a given URL .IP "\fB-s\fR, \fB--status\fR=\fIstatusstr\fR" only select jobs whose status is statusstr .IP "\fB-f\fR, \fB--force\fR" removes the job from the local list of jobs even if the job is not found in the cluster's information system .IP "\fB-P\fR, \fB--listplugins\fR" list the available plugins .IP "\fB-t\fR, \fB--timeout\fR=\fIseconds\fR" timeout in seconds (default 20) .IP "\fB-z\fR, \fB--conffile\fR=\fIfilename\fR" configuration file (default ~/.arc/client.conf) .IP "\fB-d\fR, \fB--debug\fR=\fIdebuglevel\fR" FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG .IP "\fB-v\fR, \fB--version\fR" print version information .IP "\fB-?\fR, \fB--help\fR" print help .LP .SH ARGUMENTS .IP "\fBjob\fR ..." list of jobids and/or jobnames .LP .SH EXTENDED DESCRIPTION The .B arcclean command removes a job from the remote cluster. Only jobs that have finished can be removed. The job can be referred to either by the jobid that was returned by .BR arcsub (1) at submission time or by its jobname if the job description that was submitted contained a jobname attribute. More than one jobid and/or jobname can be given. If several jobs were submitted with the same jobname all those jobs are removed. If the .B --joblist option is used the list of jobs is read from a file with the specified filename. By specifying the .B --all option, all active jobs can be removed. The .B --cluster option can be used to select or reject jobs at specific clusters. See .BR arcsub (1) for a discussion of the format of arguments to this option. The .B --status option can be used to select jobs in a specific state. These options can be repeated several times. See .BR arstat (1) for possible state values. The .B --force option removes the job from your local list of jobs even if the job can not be found in the remote information system. Jobs not appearing in the remote information system can also be removed from the local list by specifying the .B --status option with value \fBUndefined\fR. .SH FILES .TP .B ~/.arc/client.conf Some options can be given default values by specifying them in the ARC client configuration file. By using the .B --conffile option a different configuration file can be used than the default. .TP .B ~/.arc/jobs.xml This a local list of the user's active jobs. When a job is successfully submitted it is added to this list and when it is removed from the remote cluster it is removed from this list. This list is used as the list of all active jobs when the user specifies the .B --all option to the various NorduGrid ARC user interface commands. By using the .B --joblist option a different file can be used than the default. .SH ENVIRONMENT VARIABLES .TP .B X509_USER_PROXY The location of the user's Grid proxy file. Shouldn't be set unless the proxy is in a non-standard location. .TP .B ARC_LOCATION The location where ARC is installed can be specified by this variable. If not specified the install location will be determined from the path to the command being executed, and if this fails a WARNING will be given stating the location which will be used. .TP .B ARC_PLUGIN_PATH The location of ARC plugins can be specified by this variable. Multiple locations can be specified by separating them by : (; in Windows). The default location is \fB$ARC_LOCATION\fR/lib/arc (\\ in Windows). .SH COPYRIGHT APACHE LICENSE Version 2.0 .SH AUTHOR ARC software is developed by the NorduGrid Collaboration (http://www.nordugrid.org), please consult the AUTHORS file distributed with ARC. Please report bugs and feature requests to http://bugzilla.nordugrid.org .SH SEE ALSO .BR arccat (1), .BR arccp (1), .BR arcget (1), .BR arcinfo (1), .BR arckill (1), .BR arcls (1), .BR arcmkdir (1), .BR arcproxy (1), .BR arcrenew (1), .BR arcresub (1), .BR arcresume (1), .BR arcrm (1), .BR arcstat (1), .BR arcsub (1), .BR arcsync (1), .BR arctest (1) nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arcinfo.cpp0000644000000000000000000000013214152153376023375 xustar000000000000000030 mtime=1638455038.337644953 30 atime=1638455038.473646996 30 ctime=1638455101.091587859 nordugrid-arc-6.14.0/src/clients/compute/arcinfo.cpp0000644000175000002070000001731414152153376023370 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include "utils.h" int RUNMAIN(arcinfo)(int argc, char **argv) { setlocale(LC_ALL, ""); Arc::Logger logger(Arc::Logger::getRootLogger(), "arcinfo"); Arc::LogStream logcerr(std::cerr); logcerr.setFormat(Arc::ShortFormat); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::WARNING); Arc::ArcLocation::Init(argv[0]); ClientOptions opt(ClientOptions::CO_INFO, istring("[resource ...]"), istring("The arcinfo command is used for " "obtaining the status of computing " "resources on the Grid.")); { std::list clusterstmp = opt.Parse(argc, argv); opt.clusters.insert(opt.clusters.end(), clusterstmp.begin(), clusterstmp.end()); } if (opt.showversion) { std::cout << Arc::IString("%s version %s", "arcinfo", VERSION) << std::endl; return 0; } // If debug is specified as argument, it should be set before loading the configuration. if (!opt.debug.empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(opt.debug)); logger.msg(Arc::VERBOSE, "Running command: %s", opt.GetCommandWithArguments()); if (opt.show_plugins) { std::list types; types.push_back("HED:ServiceEndpointRetrieverPlugin"); types.push_back("HED:TargetInformationRetrieverPlugin"); showplugins("arcinfo", types, logger); return 0; } Arc::UserConfig usercfg(opt.conffile); if (!usercfg) { logger.msg(Arc::ERROR, "Failed configuration initialization"); return 1; } if (opt.list_configured_services) { std::map allServices = usercfg.GetAllConfiguredServices(); std::cout << "Configured registries:" << std::endl; for (std::map::const_iterator it = allServices.begin(); it != allServices.end(); ++it) { if (it->second.type == Arc::ConfigEndpoint::REGISTRY) { std::cout << " " << it->first << ": " << it->second.URLString; if (!it->second.InterfaceName.empty()) { std::cout << " (" << it->second.InterfaceName << ")"; } std::cout << std::endl; } } std::cout << "Configured computing elements:" << std::endl; for (std::map::const_iterator it = allServices.begin(); it != allServices.end(); ++it) { if (it->second.type == Arc::ConfigEndpoint::COMPUTINGINFO) { std::cout << " " << it->first << ": " << it->second.URLString; if (!it->second.InterfaceName.empty() || !it->second.RequestedSubmissionInterfaceName.empty()) { std::cout << " (" << it->second.InterfaceName; if (!it->second.InterfaceName.empty() && !it->second.RequestedSubmissionInterfaceName.empty()) { std::cout << " / "; } std::cout << it->second.RequestedSubmissionInterfaceName + ")"; } std::cout << std::endl; } } return 0; } if (opt.debug.empty() && !usercfg.Verbosity().empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(usercfg.Verbosity())); if (opt.timeout > 0) usercfg.Timeout(opt.timeout); std::list endpoints = getServicesFromUserConfigAndCommandLine(usercfg, opt.indexurls, opt.clusters, opt.requestedSubmissionInterfaceName, opt.infointerface); std::set preferredInterfaceNames; if (usercfg.InfoInterface().empty()) { preferredInterfaceNames.insert("org.nordugrid.ldapglue2"); } else { preferredInterfaceNames.insert(usercfg.InfoInterface()); } std::list rejectDiscoveryURLs = getRejectDiscoveryURLsFromUserConfigAndCommandLine(usercfg, opt.rejectdiscovery); Arc::ComputingServiceUniq csu; Arc::ComputingServiceRetriever csr(usercfg, std::list(), rejectDiscoveryURLs, preferredInterfaceNames); csr.addConsumer(csu); for (std::list::const_iterator it = endpoints.begin(); it != endpoints.end(); ++it) { csr.addEndpoint(*it); } csr.wait(); std::list services = csu.getServices(); for (std::list::const_iterator it = services.begin(); it != services.end(); ++it) { if (opt.longlist) { if (it != services.begin()) std::cout << std::endl; std::cout << *it; std::cout << std::flush; } else { std::cout << "Computing service: " << (**it).Name; if (!(**it).QualityLevel.empty()) { std::cout << " (" << (**it).QualityLevel << ")"; } std::cout << std::endl; std::stringstream infostream, submissionstream; for (std::map::const_iterator itCE = it->ComputingEndpoint.begin(); itCE != it->ComputingEndpoint.end(); ++itCE) { if (itCE->second->Capability.count(Arc::Endpoint::GetStringForCapability(Arc::Endpoint::COMPUTINGINFO))) { infostream << " " << Arc::IString("Information endpoint") << ": " << itCE->second->URLString; if ( !itCE->second->InterfaceName.empty() ) { infostream << " (" << itCE->second->InterfaceName << ")"; } infostream << std::endl; } if (itCE->second->Capability.empty() || itCE->second->Capability.count(Arc::Endpoint::GetStringForCapability(Arc::Endpoint::JOBSUBMIT)) || itCE->second->Capability.count(Arc::Endpoint::GetStringForCapability(Arc::Endpoint::JOBCREATION))) { submissionstream << " "; submissionstream << Arc::IString("Submission endpoint") << ": "; submissionstream << itCE->second->URLString; submissionstream << " (" << Arc::IString("status") << ": "; submissionstream << itCE->second->HealthState << ", "; submissionstream << Arc::IString("interface") << ": "; submissionstream << itCE->second->InterfaceName << ")" << std::endl; } } std::cout << infostream.str() << submissionstream.str(); } } bool anEndpointFailed = false; // Check if querying endpoint succeeded. Arc::EndpointStatusMap statusMap = csr.getAllStatuses(); for (std::list::const_iterator it = endpoints.begin(); it != endpoints.end(); ++it) { Arc::EndpointStatusMap::const_iterator itStatus = statusMap.find(*it); if (itStatus != statusMap.end() && itStatus->second != Arc::EndpointQueryingStatus::SUCCESSFUL && itStatus->second != Arc::EndpointQueryingStatus::SUSPENDED_NOTREQUIRED) { if (!anEndpointFailed) { anEndpointFailed = true; std::cerr << Arc::IString("ERROR: Failed to retrieve information from the following endpoints:") << std::endl; } std::cerr << " " << it->URLString; if (!itStatus->second.getDescription().empty()) { std::cerr << " (" << itStatus->second.getDescription() << ")"; } std::cerr << std::endl; } } if (anEndpointFailed) return 1; if (services.empty()) { std::cerr << Arc::IString("ERROR: Failed to retrieve information"); if (!endpoints.empty()) { std::cerr << " " << Arc::IString("from the following endpoints:") << std::endl; for (std::list::const_iterator it = endpoints.begin(); it != endpoints.end(); ++it) { std::cerr << " " << it->URLString << std::endl; } } else { std::cerr << std::endl; } return 1; } return 0; } nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arcstat.1.in0000644000000000000000000000013214152153376023400 xustar000000000000000030 mtime=1638455038.338644968 30 atime=1638455038.473646996 30 ctime=1638455101.075587619 nordugrid-arc-6.14.0/src/clients/compute/arcstat.1.in0000644000175000002070000001536114152153376023373 0ustar00mockbuildmock00000000000000.\" -*- nroff -*- .TH ARCSTAT 1 "@DATE@" "NorduGrid ARC @VERSION@" "NorduGrid Users Manual" .SH NAME arcstat \- ARC Status .SH DESCRIPTION The .B arcstat command is used for obtaining the status of jobs that have been submitted to grid enabled resources. .SH SYNOPSIS .B arcstat [options] [job ...] .SH OPTIONS .IP "\fB-a\fR, \fB--all\fR" all jobs .IP "\fB-j\fR, \fB--joblist\fR=\fIfilename\fR" the file storing information about active jobs (default ~/.arc/jobs.xml) .IP "\fB-i\fR, \fB--jobids-from-file\fR=\fIfilename\fR" a file containing a list of jobIDs .IP "\fB-c\fR, \fB--cluster\fR=\fIname\fR" select one or more computing elements: \fIname\fR can be an alias for a single CE, a group of CEs or a URL .IP "\fB-r\fR, \fB--rejectmanagement\fR=\fIURL\fR" skip jobs which are on a computing element with a given URL .IP "\fB-s\fR, \fB--status\fR=\fIstatusstr\fR" only select jobs whose status is statusstr .IP "\fB-l\fR, \fB--long\fR" long format (more information) .IP "\fB-J\fR, \fB--json\fR" output information about jobs in JSON format .IP "\fB-S\fR, \fB--sort\fR" sort jobs according to jobid, submissiontime or jobname .IP "\fB-R\fR, \fB--rsort\fR" reverse sorting of jobs according to jobid, submissiontime or jobname .IP "\fB-u\fR, \fB--show-unavailable\fR" show jobs where status information is unavailable .IP "\fB-p\fR, \fB--print-jobids\fR" instead of the status only the IDs of the selected jobs will be printed .IP "\fB-P\fR, \fB--listplugins\fR" list the available plugins .IP "\fB-t\fR, \fB--timeout\fR=\fIseconds\fR" timeout in seconds (default 20) .IP "\fB-z\fR, \fB--conffile\fR=\fIfilename\fR" configuration file (default ~/.arc/client.conf) .IP "\fB-d\fR, \fB--debug\fR=\fIdebuglevel\fR" FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG .IP "\fB-v\fR, \fB--version\fR" print version information .IP "\fB-?\fR, \fB--help\fR" print help .LP .SH ARGUMENTS .IP "\fBjob\fR ..." list of jobids and/or jobnames .LP .SH EXTENDED DESCRIPTION The .B arcstat command gives the status of a job submitted to a ARC enabled resource. The job can be referred to either by the jobid that was returned by .BR arcsub (1) at submission time or by its jobname if the job description that was submitted contained a jobname attribute. More than one jobid and/or jobname can be given. If several jobs were submitted with the same jobname the status of all those jobs are shown. If the .B --joblist option is used the list of jobs is read from a file with the specified filename. By specifying the .B --all option, the status of all active jobs will be shown. By default .B arcstat presents job states as defined internally followed by middleware specific representation of job state in brackets. The following internal job states are defined: .B Accepted - job accepted on cluster but not being processed yet .B Preparing - job is in phase of preparing for submission to batch system .B Submitting - communication with batch system in ongoing .B Hold - job's processing is suspended dueto internal reason or user request .B Queuing - job is passed to batch system but not being executed yet .B Running - job being execcuted in batch system .B Finishing - job in phase of post-execution procedures being run .B Finished - job successfully completed all processing phases .B Killed - job processing was interrupted by user request .B Failed - job processing was interrupted due to detected failure .B Deleted - job was removed from cluster (usually because it stayed there too long) .B Other - middleware specific job state could not be adequately mappped to internal state Those are also states which are used by .BR arccat (1), .BR arcclean (1), .BR arcget (1), .BR arckill (1), .BR arcrenew (1), .BR arcresub (1), .BR arcresume (1) to perform job filtering. If the .B --long option is given more detailed information is shown. Jobs can be sorted according to the jobid, submissiontime or jobname, either in normal or reverse order. By using the .B --sort or .B --rsort option followed by the desired ordering ('jobid', 'submissiontime' or 'jobname'), jobs will be sorted in normal or reverse order. Note that the options .B --sort and .B --rsort cannot be used at the same time. The .B --cluster option can be used to select or reject jobs at specific clusters. See .BR arcsub (1) for a discussion of the format of arguments to this option. The .B --status option can be used to select jobs in a specific state. These options can be repeated several times. The .B arcstat command locates the available clusters by querying information index servers. Which servers to query can be specified by giving the .B --index option to the command. See .BR arcsub (1) for a discussion of the format of arguments to this option. Also in this case the .B --long option can be used to obtain more detailed information. Previously the .B arcstat command was also used to query information of clusters and/or index servers. This functionality have been moved to the new command .B arcinfo . .SH FILES .TP .B ~/.arc/client.conf Some options can be given default values by specifying them in the ARC client configuration file. By using the .B --conffile option a different configuration file can be used than the default. .TP .B ~/.arc/jobs.xml This a local list of the user's active jobs. When a job is successfully submitted it is added to this list and when it is removed from the remote cluster it is removed from this list. This list is used as the list of all active jobs when the user specifies the .B --all option to the various NorduGrid ARC user interface commands. By using the .B --joblist option a different file can be used than the default. .SH ENVIRONMENT VARIABLES .TP .B X509_USER_PROXY The location of the user's Grid proxy file. Shouldn't be set unless the proxy is in a non-standard location. .TP .B ARC_LOCATION The location where ARC is installed can be specified by this variable. If not specified the install location will be determined from the path to the command being executed, and if this fails a WARNING will be given stating the location which will be used. .TP .B ARC_PLUGIN_PATH The location of ARC plugins can be specified by this variable. Multiple locations can be specified by separating them by : (; in Windows). The default location is \fB$ARC_LOCATION\fR/lib/arc (\\ in Windows). .SH COPYRIGHT APACHE LICENSE Version 2.0 .SH AUTHOR ARC software is developed by the NorduGrid Collaboration (http://www.nordugrid.org), please consult the AUTHORS file distributed with ARC. Please report bugs and feature requests to http://bugzilla.nordugrid.org .SH SEE ALSO .BR arccat (1), .BR arcclean (1), .BR arccp (1), .BR arcget (1), .BR arcinfo (1), .BR arckill (1), .BR arcls (1), .BR arcmkdir (1), .BR arcproxy (1), .BR arcrenew (1), .BR arcresub (1), .BR arcresume (1), .BR arcrm (1), .BR arcsub (1), .BR arcsync (1), .BR arctest (1) nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arcresume.1.in0000644000000000000000000000013214152153376023725 xustar000000000000000030 mtime=1638455038.338644968 30 atime=1638455038.473646996 30 ctime=1638455101.084587754 nordugrid-arc-6.14.0/src/clients/compute/arcresume.1.in0000644000175000002070000001033514152153376023714 0ustar00mockbuildmock00000000000000.\" -*- nroff -*- .TH ARCRESUME 1 "@DATE@" "NorduGrid ARC @VERSION@" "NorduGrid Users Manual" .SH NAME arcresume \- ARC Resume .SH DESCRIPTION The .B arcresume command is used for resuming a job that was submitted to grid enabled resources and then subsequently failed. The job will be resumed at the last ok state reported by the cluster. .SH SYNOPSIS .B arcresume [options] [job ...] .SH OPTIONS .IP "\fB-a\fR, \fB--all\fR" all jobs .IP "\fB-j\fR, \fB--joblist\fR=\fIfilename\fR" the file storing information about active jobs (default ~/.arc/jobs.xml) .IP "\fB-i\fR, \fB--jobids-from-file\fR=\fIfilename\fR" a file containing a list of jobIDs .IP "\fB-c\fR, \fB--cluster\fR=\fIname\fR" select one or more computing elements: \fIname\fR can be an alias for a single CE, a group of CEs or a URL .IP "\fB-r\fR, \fB--rejectmanagement\fR=\fIURL\fR" skip jobs which are on a computing element with a given URL .IP "\fB-s\fR, \fB--status\fR=\fIstatusstr\fR" only select jobs whose status is statusstr .IP "\fB-P\fR, \fB--listplugins\fR" list the available plugins .IP "\fB-t\fR, \fB--timeout\fR=\fIseconds\fR" timeout in seconds (default 20) .IP "\fB-z\fR, \fB--conffile\fR=\fIfilename\fR" configuration file (default ~/.arc/client.conf) .IP "\fB-d\fR, \fB--debug\fR=\fIdebuglevel\fR" FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG .IP "\fB-v\fR, \fB--version\fR" print version information .IP "\fB-?\fR, \fB--help\fR" print help .LP .SH ARGUMENTS .IP "\fBjob\fR ..." list of jobids and/or jobnames .LP .SH EXTENDED DESCRIPTION The .B arcresume command resumes a job submitted an ARC enabled resource. The job can be referred to either by the jobid that was returned by .BR arcsub (1) at submission time or by its jobname if the job description that was submitted contained a jobname attribute. More than one jobid and/or jobname can be given. If several jobs were submitted with the same jobname all those jobs are resumed. If the .B --joblist option is used the list of jobs is read from a file with the specified filename. By specifying the .B --all option, all active jobs will be resumed. The .B --cluster option can be used to select or reject jobs at specific clusters. See .BR arcsub (1) for a discussion of the format of arguments to this option. The .B --status option can be used to select jobs in a specific state. These options can be repeated several times. See .BR arcstat (1) for possible states values. .SH FILES .TP .B ~/.arc/client.conf Some options can be given default values by specifying them in the ARC client configuration file. By using the .B --conffile option a different configuration file can be used than the default. .TP .B ~/.arc/jobs.xml This a local list of the user's active jobs. When a job is successfully submitted it is added to this list and when it is removed from the remote cluster it is removed from this list. This list is used as the list of all active jobs when the user specifies the .B --all option to the various NorduGrid ARC user interface commands. By using the .B --joblist option a different file can be used than the default. .SH ENVIRONMENT VARIABLES .TP .B X509_USER_PROXY The location of the user's Grid proxy file. Shouldn't be set unless the proxy is in a non-standard location. .TP .B ARC_LOCATION The location where ARC is installed can be specified by this variable. If not specified the install location will be determined from the path to the command being executed, and if this fails a WARNING will be given stating the location which will be used. .TP .B ARC_PLUGIN_PATH The location of ARC plugins can be specified by this variable. Multiple locations can be specified by separating them by : (; in Windows). The default location is \fB$ARC_LOCATION\fR/lib/arc (\\ in Windows). .SH COPYRIGHT APACHE LICENSE Version 2.0 .SH AUTHOR ARC software is developed by the NorduGrid Collaboration (http://www.nordugrid.org), please consult the AUTHORS file distributed with ARC. Please report bugs and feature requests to http://bugzilla.nordugrid.org .SH SEE ALSO .BR arccat (1), .BR arcclean (1), .BR arccp (1), .BR arcget (1), .BR arcinfo (1), .BR arckill (1), .BR arcls (1), .BR arcmkdir (1), .BR arcproxy (1), .BR arcrenew (1), .BR arcresub (1), .BR arcrm (1), .BR arcstat (1), .BR arcsub (1), .BR arcsync (1), .BR arctest (1) nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arcinfo.1.in0000644000000000000000000000013214152153376023360 xustar000000000000000030 mtime=1638455038.337644953 30 atime=1638455038.473646996 30 ctime=1638455101.076587634 nordugrid-arc-6.14.0/src/clients/compute/arcinfo.1.in0000644000175000002070000001020114152153376023337 0ustar00mockbuildmock00000000000000.\" -*- nroff -*- .TH ARCINFO 1 "@DATE@" "NorduGrid ARC @VERSION@" "NorduGrid Users Manual" .SH NAME arcinfo \- ARC Info .SH DESCRIPTION The .B arcinfo command is used for obtaining status and information of clusters on the grid. .SH SYNOPSIS .B arcinfo [options] [cluster ...] .SH OPTIONS .IP "\fB-c\fR, \fB--cluster\fR=\fIname\fR" select one or more computing elements: \fIname\fR can be an alias for a single CE, a group of CEs or a URL .IP "\fB-I\fR, \fB--infointerface\fR=\fIInterfaceName\fR" the computing element specified by URL at the command line should be queried using this information interface (possible options: org.nordugrid.ldapng, org.nordugrid.ldapglue2, org.nordugrid.wsrfglue2, org.ogf.glue.emies.resourceinfo) .IP "\fB-g\fR, \fB--index\fR=\fIname\fR" select one or more registries: \fIname\fR can be an alias for a single registry, a group of registries or a URL .IP "\fB-R\fR, \fB--rejectdiscovery\fR=\fIURL\fR" skip the service with the given URL during service discovery .IP "\fB-S\fR, \fB--submissioninterface\fR=\fIInterfaceName\fR" only get information about executon targets which supports this job submission interface (e.g. org.nordugrid.gridftpjob, org.ogf.glue.emies.activitycreation, org.nordugrid.xbes) .IP "\fB-l\fR, \fB--long\fR" long format (more information) .IP "\fB-L\fR, \fB--list-configured-services\fR" print a list of services configured in the client.conf .IP "\fB-P\fR, \fB--listplugins\fR" list the available plugins .IP "\fB-t\fR, \fB--timeout\fR=\fIseconds\fR" timeout in seconds (default 20) .IP "\fB-z\fR, \fB--conffile\fR=\fIfilename\fR" configuration file (default ~/.arc/client.conf) .IP "\fB-d\fR, \fB--debug\fR=\fIdebuglevel\fR" FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG .IP "\fB-v\fR, \fB--version\fR" print version information .IP "\fB-?\fR, \fB--help\fR" print help .LP .SH EXTENDED DESCRIPTION The .B arcinfo command is used to get the status and information of clusters and queues available on the grid. You can specify the URLs of cluster with the .B --cluster option, or by just listing them as arguments. The .B --index flag can be used to specify an index server which should be queried for clusters. Both of these flags take a service endpoint as argument. See .BR arcsub (1) for a discussion of this format. Detailed information about queried computing services can be obtained by specifying the .B --long flag. When specifying the .B --index flag, the information about the computing services registered at the index server will be queried rather than the status of the index server itself. Currently no command exists to query a index server. .SH FILES .TP .B ~/.arc/client.conf Some options can be given default values by specifying them in the ARC client configuration file. By using the .B --conffile option a different configuration file can be used than the default. .SH ENVIRONMENT VARIABLES .TP .B X509_USER_PROXY The location of the user's Grid proxy file. Shouldn't be set unless the proxy is in a non-standard location. Note that this could also be set in the client configuration file, however the environment variable overrides the settings in configuration. .TP .B ARC_LOCATION The location where ARC is installed can be specified by this variable. If not specified the install location will be determined from the path to the command being executed, and if this fails a WARNING will be given stating the location which will be used. .TP .B ARC_PLUGIN_PATH The location of ARC plugins can be specified by this variable. Multiple locations can be specified by separating them by : (; in Windows). The default location is \fB$ARC_LOCATION\fR/lib/arc (\\ in Windows). .SH COPYRIGHT APACHE LICENSE Version 2.0 .SH AUTHOR ARC software is developed by the NorduGrid Collaboration (http://www.nordugrid.org), please consult the AUTHORS file distributed with ARC. Please report bugs and feature requests to http://bugzilla.nordugrid.org .SH SEE ALSO .BR arccat (1), .BR arcclean (1), .BR arccp (1), .BR arcget (1), .BR arckill (1), .BR arcls (1), .BR arcmkdir (1), .BR arcproxy (1), .BR arcrenew (1), .BR arcresub (1), .BR arcresume (1), .BR arcrm (1), .BR arcstat (1), .BR arcsub (1), .BR arcsync (1), .BR arctest (1) nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arcget.1.in0000644000000000000000000000013214152153376023204 xustar000000000000000030 mtime=1638455038.337644953 30 atime=1638455038.473646996 30 ctime=1638455101.080587694 nordugrid-arc-6.14.0/src/clients/compute/arcget.1.in0000644000175000002070000001150514152153376023173 0ustar00mockbuildmock00000000000000.\" -*- nroff -*- .TH ARCGET 1 "@DATE@" "NorduGrid ARC @VERSION@" "NorduGrid Users Manual" .SH NAME arcget \- ARC Get .SH DESCRIPTION The .B arcget command is used for retrieving the results from a job. .SH SYNOPSIS .B arcget [options] [job ...] .SH OPTIONS .IP "\fB-a\fR, \fB--all\fR" all jobs .IP "\fB-j\fR, \fB--joblist\fR=\fIfilename\fR" the file storing information about active jobs (default ~/.arc/jobs.xml) .IP "\fB-i\fR, \fB--jobids-from-file\fR=\fIfilename\fR" a file containing a list of jobIDs .IP "\fB-c\fR, \fB--cluster\fR=\fIname\fR" select one or more computing elements: \fIname\fR can be an alias for a single CE, a group of CEs or a URL .IP "\fB-r\fR, \fB--rejectmanagement\fR=\fIURL\fR" skip jobs which are on a computing element with a given URL .IP "\fB-s\fR, \fB--status\fR=\fIstatusstr\fR" only select jobs whose status is statusstr .IP "\fB-D\fR, \fB--dir\fR=\fIdirname\fR" download directory (the job directory will be created in this directory) .IP "\fB-J\fR, \fB--usejobname\fR" use the jobname instead of the short ID as the job directory name .IP "\fB-k\fR, \fB--keep\fR" keep files on the remote cluster (do not clean) .IP "\fB-f\fR, \fB--force\fR" force download (overwrite existing job directory) .IP "\fB-P\fR, \fB--listplugins\fR" list the available plugins .IP "\fB-t\fR, \fB--timeout\fR=\fIseconds\fR" timeout in seconds (default 20) .IP "\fB-z\fR, \fB--conffile\fR=\fIfilename\fR" configuration file (default ~/.arc/client.conf) .IP "\fB-d\fR, \fB--debug\fR=\fIdebuglevel\fR" FATAL, ERROR, WARNING, INFO, VERBOSE or DEBUG .IP "\fB-v\fR, \fB--version\fR" print version information .IP "\fB-?\fR, \fB--help\fR" print help .LP .SH ARGUMENTS .IP "\fBjob\fR ..." list of jobids and/or jobnames .LP .SH EXTENDED DESCRIPTION The .B arcget command downloads the results after a job has completed on an grid enabled computing resource. Only the results of jobs that have finished can be downloaded. The job can be referred to either by the jobid that was returned by .BR arcsub (1) at submission time or by its jobname if the job description that was submitted contained a jobname attribute. More than one jobid and/or jobname can be given. If several jobs were submitted with the same jobname the results of all those jobs are downloaded. If the .B --joblist option is used the list of jobs is read from a file with the specified filename. By specifying the .B --all option, the results of all active jobs are downloaded. The .B --cluster option can be used to select or reject jobs at specific clusters. See .BR arcsub (1) for a discussion of the format of arguments to this option. The .B --status option can be used to select jobs in a specific state. These options can be repeated several times. See .BR arstat (1) for possible state values. For each job that is downloaded a subdirectory will be created in the download directory that will contain the downloaded files. If the download was successful the job will be removed from the remote cluster unless the .B --keep option was specified. .SH FILES .TP .B ~/.arc/client.conf Some options can be given default values by specifying them in the ARC client configuration file. By using the .B --conffile option a different configuration file can be used than the default. .TP .B ~/.arc/jobs.xml This a local list of the user's active jobs. When a job is successfully submitted it is added to this list and when it is removed from the remote cluster it is removed from this list. This list is used as the list of all active jobs when the user specifies the .B --all option to the various NorduGrid ARC user interface commands. By using the .B --joblist option a different file can be used than the default. .SH ENVIRONMENT VARIABLES .TP .B X509_USER_PROXY The location of the user's Grid proxy file. Shouldn't be set unless the proxy is in a non-standard location. .TP .B ARC_LOCATION The location where ARC is installed can be specified by this variable. If not specified the install location will be determined from the path to the command being executed, and if this fails a WARNING will be given stating the location which will be used. .TP .B ARC_PLUGIN_PATH The location of ARC plugins can be specified by this variable. Multiple locations can be specified by separating them by : (; in Windows). The default location is \fB$ARC_LOCATION\fR/lib/arc (\\ in Windows). .SH COPYRIGHT APACHE LICENSE Version 2.0 .SH AUTHOR ARC software is developed by the NorduGrid Collaboration (http://www.nordugrid.org), please consult the AUTHORS file distributed with ARC. Please report bugs and feature requests to http://bugzilla.nordugrid.org .SH SEE ALSO .BR arccat (1), .BR arcclean (1), .BR arccp (1), .BR arcinfo (1), .BR arckill (1), .BR arcls (1), .BR arcmkdir (1), .BR arcproxy (1), .BR arcrenew (1), .BR arcresub (1), .BR arcresume (1), .BR arcrm (1), .BR arcstat (1), .BR arcsub (1), .BR arcsync (1), .BR arctest (1) nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/README0000644000000000000000000000013214152153376022130 xustar000000000000000030 mtime=1638455038.337644953 30 atime=1638455038.472646981 30 ctime=1638455101.086587784 nordugrid-arc-6.14.0/src/clients/compute/README0000644000175000002070000000005314152153376022113 0ustar00mockbuildmock00000000000000ARC command line tools for job management nordugrid-arc-6.14.0/src/clients/compute/PaxHeaders.30264/arcclean.cpp0000644000000000000000000000013214152153376023524 xustar000000000000000030 mtime=1638455038.337644953 30 atime=1638455038.473646996 30 ctime=1638455101.089587829 nordugrid-arc-6.14.0/src/clients/compute/arcclean.cpp0000644000175000002070000001400214152153376023506 0ustar00mockbuildmock00000000000000// -*- indent-tabs-mode: nil -*- #ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include #include #include "utils.h" int RUNMAIN(arcclean)(int argc, char **argv) { setlocale(LC_ALL, ""); Arc::Logger logger(Arc::Logger::getRootLogger(), "arcclean"); Arc::LogStream logcerr(std::cerr); logcerr.setFormat(Arc::ShortFormat); Arc::Logger::getRootLogger().addDestination(logcerr); Arc::Logger::getRootLogger().setThreshold(Arc::WARNING); Arc::ArcLocation::Init(argv[0]); ClientOptions opt(ClientOptions::CO_CLEAN, istring("[job ...]"), istring("The arcclean command removes a job " "from the computing resource.")); std::list jobidentifiers = opt.Parse(argc, argv); if (opt.showversion) { std::cout << Arc::IString("%s version %s", "arcclean", VERSION) << std::endl; return 0; } // If debug is specified as argument, it should be set before loading the configuration. if (!opt.debug.empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(opt.debug)); logger.msg(Arc::VERBOSE, "Running command: %s", opt.GetCommandWithArguments()); if (opt.show_plugins) { std::list types; types.push_back("HED:JobControllerPlugin"); showplugins("arcclean", types, logger); return 0; } Arc::UserConfig usercfg(opt.conffile, opt.joblist); if (!usercfg) { logger.msg(Arc::ERROR, "Failed configuration initialization"); return 1; } if(usercfg.OToken().empty()) { if (!checkproxy(usercfg)) { return 1; } } if (opt.debug.empty() && !usercfg.Verbosity().empty()) Arc::Logger::getRootLogger().setThreshold(Arc::istring_to_level(usercfg.Verbosity())); for (std::list::const_iterator it = opt.jobidinfiles.begin(); it != opt.jobidinfiles.end(); ++it) { if (!Arc::Job::ReadJobIDsFromFile(*it, jobidentifiers)) { logger.msg(Arc::WARNING, "Cannot read specified jobid file: %s", *it); } } if (opt.timeout > 0) usercfg.Timeout(opt.timeout); if ((!opt.joblist.empty() || !opt.status.empty()) && jobidentifiers.empty() && opt.clusters.empty()) opt.all = true; if (jobidentifiers.empty() && opt.clusters.empty() && !opt.all) { logger.msg(Arc::ERROR, "No jobs given"); return 1; } std::list selectedURLs; if (!opt.clusters.empty()) { selectedURLs = getSelectedURLsFromUserConfigAndCommandLine(usercfg, opt.clusters); } std::list rejectManagementURLs = getRejectManagementURLsFromUserConfigAndCommandLine(usercfg, opt.rejectmanagement); std::list jobs; Arc::JobInformationStorage *jobstore = createJobInformationStorage(usercfg); if (jobstore != NULL && !jobstore->IsStorageExisting()) { logger.msg(Arc::ERROR, "Job list file (%s) doesn't exist", usercfg.JobListFile()); delete jobstore; return 1; } if (jobstore == NULL || ( opt.all && !jobstore->ReadAll(jobs, rejectManagementURLs)) || (!opt.all && !jobstore->Read(jobs, jobidentifiers, selectedURLs, rejectManagementURLs))) { logger.msg(Arc::ERROR, "Unable to read job information from file (%s)", usercfg.JobListFile()); delete jobstore; return 1; } if (!opt.all) { for (std::list::const_iterator itJIDAndName = jobidentifiers.begin(); itJIDAndName != jobidentifiers.end(); ++itJIDAndName) { std::cout << Arc::IString("Warning: Job not found in job list: %s", *itJIDAndName) << std::endl; } } Arc::JobSupervisor jobmaster(usercfg, jobs); jobmaster.Update(); jobmaster.SelectValid(); if (!opt.status.empty()) { jobmaster.SelectByStatus(opt.status); } //if (jobmaster.GetSelectedJobs().empty()) { // std::cout << Arc::IString("No jobs") << std::endl; // return 1; //} int retval = (int)!jobmaster.Clean(); std::list cleaned = jobmaster.GetIDsProcessed(); const std::list& notcleaned = jobmaster.GetIDsNotProcessed(); if ((!opt.status.empty() && std::find(opt.status.begin(), opt.status.end(), "Undefined") != opt.status.end()) || opt.forceclean) { std::string response = ""; if (!opt.forceclean) { std::cout << Arc::IString("You are about to remove jobs from the job list for which no information could be\n" "found. NOTE: Recently submitted jobs might not have appeared in the information\n" "system, and this action will also remove such jobs.") << std::endl; std::cout << Arc::IString("Are you sure you want to clean jobs missing information?") << " [" << Arc::IString("y") << "/" << Arc::IString("n") << "] "; std::cin >> response; } if (!opt.forceclean && Arc::lower(response) != std::string(Arc::FindTrans("y"))) { std::cout << Arc::IString("Jobs missing information will not be cleaned!") << std::endl; if (cleaned.empty() && notcleaned.empty()) { return retval; } } else { for (std::list::const_iterator it = jobmaster.GetAllJobs().begin(); it != jobmaster.GetAllJobs().end(); ++it) { if (it->State == Arc::JobState::UNDEFINED) { cleaned.push_back(it->JobID); } } } } if (!jobstore->Remove(cleaned)) { std::cout << Arc::IString("Warning: Failed to write job information to file (%s)", usercfg.JobListFile()) << std::endl; std::cout << Arc::IString(" Run 'arcclean -s Undefined' to remove cleaned jobs from job list", usercfg.JobListFile()) << std::endl; } delete jobstore; if (cleaned.empty() && notcleaned.empty()) { std::cout << Arc::IString("No jobs") << std::endl; return 1; } std::cout << Arc::IString("Jobs processed: %d, deleted: %d", cleaned.size()+notcleaned.size(), cleaned.size()) << std::endl; return retval; } nordugrid-arc-6.14.0/src/PaxHeaders.30264/utils0000644000000000000000000000013214152153475017216 xustar000000000000000030 mtime=1638455101.283590744 30 atime=1638455103.999631554 30 ctime=1638455101.283590744 nordugrid-arc-6.14.0/src/utils/0000755000175000002070000000000014152153475017260 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/utils/PaxHeaders.30264/Makefile.am0000644000000000000000000000013114152153376021326 xustar000000000000000030 mtime=1638455038.449646635 30 atime=1638455038.517647657 29 ctime=1638455101.11958828 nordugrid-arc-6.14.0/src/utils/Makefile.am0000644000175000002070000000022314152153376021311 0ustar00mockbuildmock00000000000000GRIDMAP = gridmap if HED_ENABLED HED = hed else HED = endif SUBDIRS = $(HED) $(GRIDMAP) python archery DIST_SUBDIRS = hed gridmap python archery nordugrid-arc-6.14.0/src/utils/PaxHeaders.30264/gridmap0000644000000000000000000000013214152153475020641 xustar000000000000000030 mtime=1638455101.172589076 30 atime=1638455103.999631554 30 ctime=1638455101.172589076 nordugrid-arc-6.14.0/src/utils/gridmap/0000755000175000002070000000000014152153475020703 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/utils/gridmap/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376022752 xustar000000000000000030 mtime=1638455038.449646635 30 atime=1638455038.517647657 30 ctime=1638455101.169589031 nordugrid-arc-6.14.0/src/utils/gridmap/Makefile.am0000644000175000002070000000050014152153376022732 0ustar00mockbuildmock00000000000000dist_sbin_SCRIPTS = nordugridmap cronddir = @cronddir@ man_MANS = nordugridmap.8 # Not using crond_DATA since we need to rename it. install-exec-local: $(MKDIR_P) "$(DESTDIR)$(cronddir)" $(INSTALL_DATA) nordugridmap.cron $(DESTDIR)$(cronddir)/nordugridmap uninstall-local: rm -f $(DESTDIR)$(cronddir)/nordugridmap nordugrid-arc-6.14.0/src/utils/gridmap/PaxHeaders.30264/nordugridmap.8.in0000644000000000000000000000013214152153376024107 xustar000000000000000030 mtime=1638455038.450646651 30 atime=1638455038.517647657 30 ctime=1638455101.171589061 nordugrid-arc-6.14.0/src/utils/gridmap/nordugridmap.8.in0000644000175000002070000000417714152153376024105 0ustar00mockbuildmock00000000000000.\" -*- nroff -*- .TH NORDUGRIDMAP 8 "@DATE@" "NorduGrid ARC @VERSION@" "NorduGrid ARC" .SH NAME nordugridmap \- ARC grid-mapfile generator .SH SYNOPSIS .B nordugridmap [\fB-t\fR, \fB--test\fR] [\fB-c\fR, \fB--config\fR \fIFILE\fR] [\fB-h\fR, \fB--help\fR] .SH DESCRIPTION .PP The nordugridmap utility is usually run as a crontab entry in order to automatically generate grid-mapfile(s). .PP All information about mapfiles to generate, sources of information and other related options are stored in a single configuration file. Common Nordugrid ARC configuration file \fBarc.conf\fP(5) used if not redefined via command line option. .SH EXTENDED DESCRIPTION The sources of information supported by nordugridmap are the following: .TP .B http(s):// URL to plain text file. File should contain a list of DNs - one DN per line. .TP .B voms(s):// URL to VOMS-Admin interface. .TP .B nordugrid NorduGrid VO members .TP .B file:// Local file (stand-alone or dynamicaly generated by nordugridmap). File should contain a list of DNs with optional mapped unixid: "user DN" [mapped unixID] .TP .B vo:// Reference to another VO configuration block .PP We are STRONGLY RECOMMEND TO READ tne Nordugruid ARC documentation, \fBarc.conf\fP(5) manual page and/or the \fBarc.conf.reference\fP to learn all details about nordugridmap configuration options usage. .SH OPTIONS .TP \fB\-t\fR, \fB\-\-test\fR Does not actually create grid-mapfile(s), but perform test run in debug mode. .TP \fB\-c\fR, \fB\-\-config \fIFILE\fR Specifies the configuration file to be used. By default the \fBarc.conf\fP(5) is used. nordugridmap utilize [nordugridmap] section for general options fine-tunung and processes all the [vo] blocks from the config. .TP \fB\-h\fR, \fB\-\-help\fR Show help options .PP .SH REPORTING BUGS Report bugs to http://bugzilla.nordugrid.org/ .SH COPYRIGHT APACHE LICENSE Version 2.0 .SH FILES .BR /etc/arc.conf .SH AUTHOR ARC software is developed by the NorduGrid Collaboration (http://www.nordugrid.org), please consult the AUTHORS file distributed with ARC. Please report bugs and feature requests to http://bugzilla.nordugrid.org .SH SEE ALSO .BR arc.conf (5) nordugrid-arc-6.14.0/src/utils/gridmap/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153436022760 xustar000000000000000030 mtime=1638455070.971135285 30 atime=1638455091.813448451 30 ctime=1638455101.169589031 nordugrid-arc-6.14.0/src/utils/gridmap/Makefile.in0000644000175000002070000005732214152153436022756 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/utils/gridmap DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(srcdir)/nordugridmap.cron.in $(srcdir)/nordugridmap.8.in \ $(dist_sbin_SCRIPTS) ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = nordugridmap.cron nordugridmap.8 CONFIG_CLEAN_VPATH_FILES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(sbindir)" "$(DESTDIR)$(man8dir)" SCRIPTS = $(dist_sbin_SCRIPTS) AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac man8dir = $(mandir)/man8 NROFF = nroff MANS = $(man_MANS) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ dist_sbin_SCRIPTS = nordugridmap man_MANS = nordugridmap.8 all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/utils/gridmap/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/utils/gridmap/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): nordugridmap.cron: $(top_builddir)/config.status $(srcdir)/nordugridmap.cron.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ nordugridmap.8: $(top_builddir)/config.status $(srcdir)/nordugridmap.8.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ install-dist_sbinSCRIPTS: $(dist_sbin_SCRIPTS) @$(NORMAL_INSTALL) @list='$(dist_sbin_SCRIPTS)'; test -n "$(sbindir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(sbindir)'"; \ $(MKDIR_P) "$(DESTDIR)$(sbindir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n' \ -e 'h;s|.*|.|' \ -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) { files[d] = files[d] " " $$1; \ if (++n[d] == $(am__install_max)) { \ print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ else { print "f", d "/" $$4, $$1 } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(sbindir)$$dir'"; \ $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(sbindir)$$dir" || exit $$?; \ } \ ; done uninstall-dist_sbinSCRIPTS: @$(NORMAL_UNINSTALL) @list='$(dist_sbin_SCRIPTS)'; test -n "$(sbindir)" || exit 0; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 's,.*/,,;$(transform)'`; \ dir='$(DESTDIR)$(sbindir)'; $(am__uninstall_files_from_dir) mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-man8: $(man_MANS) @$(NORMAL_INSTALL) @list1=''; \ list2='$(man_MANS)'; \ test -n "$(man8dir)" \ && test -n "`echo $$list1$$list2`" \ || exit 0; \ echo " $(MKDIR_P) '$(DESTDIR)$(man8dir)'"; \ $(MKDIR_P) "$(DESTDIR)$(man8dir)" || exit 1; \ { for i in $$list1; do echo "$$i"; done; \ if test -n "$$list2"; then \ for i in $$list2; do echo "$$i"; done \ | sed -n '/\.8[a-z]*$$/p'; \ fi; \ } | while read p; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; echo "$$p"; \ done | \ sed -e 'n;s,.*/,,;p;h;s,.*\.,,;s,^[^8][0-9a-z]*$$,8,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,' | \ sed 'N;N;s,\n, ,g' | { \ list=; while read file base inst; do \ if test "$$base" = "$$inst"; then list="$$list $$file"; else \ echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man8dir)/$$inst'"; \ $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man8dir)/$$inst" || exit $$?; \ fi; \ done; \ for i in $$list; do echo "$$i"; done | $(am__base_list) | \ while read files; do \ test -z "$$files" || { \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man8dir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(man8dir)" || exit $$?; }; \ done; } uninstall-man8: @$(NORMAL_UNINSTALL) @list=''; test -n "$(man8dir)" || exit 0; \ files=`{ for i in $$list; do echo "$$i"; done; \ l2='$(man_MANS)'; for i in $$l2; do echo "$$i"; done | \ sed -n '/\.8[a-z]*$$/p'; \ } | sed -e 's,.*/,,;h;s,.*\.,,;s,^[^8][0-9a-z]*$$,8,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,'`; \ dir='$(DESTDIR)$(man8dir)'; $(am__uninstall_files_from_dir) tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(SCRIPTS) $(MANS) installdirs: for dir in "$(DESTDIR)$(sbindir)" "$(DESTDIR)$(man8dir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-man install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-dist_sbinSCRIPTS install-exec-local install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-man8 install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-dist_sbinSCRIPTS uninstall-local uninstall-man uninstall-man: uninstall-man8 .MAKE: install-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ cscopelist-am ctags-am distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am \ install-dist_sbinSCRIPTS install-dvi install-dvi-am \ install-exec install-exec-am install-exec-local install-html \ install-html-am install-info install-info-am install-man \ install-man8 install-pdf install-pdf-am install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags-am uninstall uninstall-am \ uninstall-dist_sbinSCRIPTS uninstall-local uninstall-man \ uninstall-man8 # Not using crond_DATA since we need to rename it. install-exec-local: $(MKDIR_P) "$(DESTDIR)$(cronddir)" $(INSTALL_DATA) nordugridmap.cron $(DESTDIR)$(cronddir)/nordugridmap uninstall-local: rm -f $(DESTDIR)$(cronddir)/nordugridmap # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/utils/gridmap/PaxHeaders.30264/nordugridmap0000644000000000000000000000013214152153376023334 xustar000000000000000030 mtime=1638455038.450646651 30 atime=1638455038.517647657 30 ctime=1638455101.172589076 nordugrid-arc-6.14.0/src/utils/gridmap/nordugridmap0000755000175000002070000012215514152153376023332 0ustar00mockbuildmock00000000000000#!/usr/bin/perl # # nordugridmap - generates grid-mapfile(s) based on configuration # binmode STDIN; binmode STDOUT; use Getopt::Long; use POSIX qw(strftime); use Time::localtime; use File::Temp qw(tempfile); use File::Path; use Storable; use URI; use XML::DOM; use LWP::UserAgent; use LWP::Protocol::https; use SOAP::Lite; use SOAP::Transport::HTTP; # please use this when developing use warnings; use strict; use constant { # logging level constants FATAL => 0, ERROR => 1, WARNING => 2, INFO => 3, VERBOSE => 4, DEBUG => 5, # nordugridmap internals VERSION => "4.0", USERAGENT => "nordugridmap" }; # # GET COMMAND LINE OPTIONS AND SET DEFAULTS # # define configuration flags my %config_flags = ( 'mapuser_processing' => 0, # overwrite = 1, keep = 0 'cache_enabled' => 1, 'log_to_file' => 1, 'voms_use_soap' => 1, # voms_method: soap = 1, get = 0 'allow_empty_unixid' => 1 ); my $log_level = 2; my $fetch_url_timeout = 15; my $opt_help; my $opt_test; my $fileopt = $ENV{ARC_CONFIG}||="/etc/arc.conf"; # get options GetOptions("help" => \$opt_help, "test" => \$opt_test, "config=s" => \$fileopt); if ($opt_help) { &printHelp; exit(1); } # print at DEBUG level to STDERR when using testing mode if ($opt_test) { $log_level = 5; $config_flags{'log_to_file'} = 0; &Logger("Nordugridmap is running in a testing mode. There will be no gridmaps altered.", DEBUG); } # # CONFIG FILE PARSER (ARC.CONF INI FORMAT EXPECTED) # unless (open (CONFIGFILE, "<$fileopt")) { &Logger("Can't open $fileopt configuration file", FATAL); } my %parsedconfig = (); my $blockname; my $lineindex=0; my $parseoptions=0; while (my $line =) { $lineindex++; next if $line =~/^#/; next if $line =~/^$/; next if $line =~/^\s+$/; # parse block name if ($line =~/\[([^\]]+)\]/ ) { $blockname = $1; if ( $blockname =~ /^(common$|nordugridmap$|userlist)/ ) { $parseoptions = 1; if ($blockname =~ /^userlist:\s*([^\[\]]+)\s*$/ ) { my $name = $1; $name =~ s/^\s+|\s+$//g; $blockname = "userlist:$name"; $parsedconfig{$blockname}{'name'} = $name; } elsif ($blockname =~ /^userlist$/ ) { &Logger("Legacy [userlist] block found in $fileopt line $lineindex. Should be [userlist:NAME]. Execution halted to prevent unnoticed errors.", FATAL, "ConfigParser"); } next; } $parseoptions = 0; } # skip parsing for not relevant blocks next unless $parseoptions; # parse name/values my $variable_name; my $variable_value; if ( $line =~/^\s*(\w+)\s*=\s*(.*)\s*$/ ) { $variable_name=$1; $variable_value=$2; } else { if ($line =~ /^\s*mapped_unixid\s*$/ ) { $variable_name="mapped_unixid"; $variable_value=""; } else { next; } } # "" is the empty string (valid case for mapped_unixid="") if ( $variable_value =~ /^\s*""\s*$/ ) { $variable_value = ""; } if ( $blockname =~/^userlist:/ ) { # special parsing for the nordugrid VO members: source="nordugrid" if (($variable_name eq "source") && ($variable_value eq "nordugrid")) { $variable_value = "vomss://voms.ndgf.org:8443/voms/nordugrid.org"; } } # store values to hash: $parsedconfig{blockname}{variable_name} unless ($parsedconfig{$blockname}{$variable_name}) { $parsedconfig{$blockname}{$variable_name} = $variable_value; } else { $parsedconfig{$blockname}{$variable_name} .= '[separator]' . $variable_value; } } close CONFIGFILE; # # CHECK CONFIGURATION FOR REQUIRED INFO # # check [userlist:NAME] blocks exists my @blocknames_tmp = (keys %parsedconfig); unless ( grep /^userlist:/, @blocknames_tmp) { &Logger("There are no [userlist:NAME] blocks had been found in the $fileopt configuration file", FATAL, "ConfigParser"); } # general configurable options (order: [nordugridmap] -> [common] -> $ENV -> defaults); my $capath = $parsedconfig{"nordugridmap"}{"x509_cert_dir"} || $parsedconfig{"common"}{"x509_cert_dir"} || $ENV{X509_CERT_DIR} || "/etc/grid-security/certificates/"; my $x509cert = $parsedconfig{'nordugridmap'}{'x509_host_cert'} || $parsedconfig{'common'}{'x509_host_cert'} || $ENV{X509_HOST_CERT} || $ENV{X509_USER_CERT} || "/etc/grid-security/hostcert.pem"; my $x509key = $parsedconfig{'nordugridmap'}{'x509_host_key'} || $parsedconfig{'common'}{'x5security09_host_key'} || $ENV{X509_HOST_KEY} || $ENV{X509_USER_KEY} || "/etc/grid-security/hostkey.pem"; my $default_mapfile = $parsedconfig{'nordugridmap'}{'gridmap'} || $parsedconfig{'mapping'}{'gridmap'} || "/etc/grid-security/grid-mapfile"; my $mapfile_owner = $parsedconfig{'nordugridmap'}{'gridmap_owner'} || "root"; my $mapfile_group = $parsedconfig{'nordugridmap'}{'gridmap_group'} || "root"; my $mapfile_chmod = $parsedconfig{'nordugridmap'}{'gridmap_permissions'} || "0600"; my $logfile = $parsedconfig{'nordugridmap'}{'logfile'} || "/var/log/arc/nordugridmap.log"; my $cachedir = $parsedconfig{'nordugridmap'}{'cachedir'} || "/var/spool/arc/gridmapcache/"; my $cache_maxlife = $parsedconfig{'nordugridmap'}{'cachetime'} || 3 * 24 * 60 * 60; # three days old &set_numeric_value(\$log_level, 'loglevel', '0 to 5') unless $opt_test; &set_numeric_value(\$fetch_url_timeout, 'fetch_timeout', 'numeric integers'); &set_configuration_flag('cache_enabled','cache_enable','yes','no'); &set_configuration_flag('mapuser_processing','mapuser_processing','overwrite','keep'); &set_configuration_flag('allow_empty_unixid','allow_empty_unixid','yes','no'); &set_configuration_flag('voms_use_soap','voms_method','soap','get'); &set_configuration_flag('log_to_file', 'log_to_file', 'yes', 'no') unless $opt_test; # # ENABLE/DISABLE FEATURES DEPEND ON CONFIGURATION # # redirect log to file if ( $config_flags{'log_to_file'} ) { open ( STDERR, ">> $logfile" ) or &Logger("Cannot open logfile '$logfile' for writing. Exiting.", FATAL); &Logger("Starting grid-mapfiles processing cycle", INFO); } # if cache enabled ensure cache directory exists and writable if ( $config_flags{'cache_enabled'} ) { # check cachedir exists unless ( -d $cachedir ) { &Logger("Cache directory does not exists. Trying to create...", WARNING); eval { mkpath($cachedir) }; if ($@) { &Logger("Failed to create cache directory $cachedir", FATAL); } &Logger("Cache directory $cachedir has been created", INFO); } &Logger("Cache directory $cachedir is not writable", FATAL) unless -w $cachedir; } # # PROCESS [USERLIST] BLOCKS DEPENDENCIES # # generate a list of all external sources to fetch # generate a list of [userlist] blocks dependencies my %sources_list = (); my %sources_deps = (); # process blocks defined in arc.conf foreach my $block (sort(keys %parsedconfig)) { next unless $block =~ /^userlist:/; my $listname = &get_userlist_name($block); $sources_deps{"userlist://".$listname} = &get_block_sources($block, \%sources_list); } # ensure loop-free configuration my %dryrun_sources_data = %sources_list; &process_userlist_blocks(\%sources_deps, \%dryrun_sources_data, 1); # # FETCH SOURCES AND ASSEMBLE GRIDMAPS # # fetch all sources my %sources_data = (); &fetch_sources(\%sources_list, \%sources_data); # assemble [userlist] blocks gridmap lists &process_userlist_blocks(\%sources_deps, \%sources_data); # assemble gridmapfiles my %mapfile_data = (); &process_mapfiles(\%mapfile_data, \%sources_data); # write mapfiles to disk if ( $opt_test ) { &write_mapfiles_data(\%mapfile_data, 1); } else { &write_mapfiles_data(\%mapfile_data, 0, $mapfile_owner, $mapfile_group, $mapfile_chmod); } # END OF MAIN ROUTINE :-) # # GENERAL CONFIGURATION PARSER SUBROUTINES # # get userlist name for [userlist] block sub get_userlist_name { my $block = shift; if ( $parsedconfig{$block}{'name'} ) { return $parsedconfig{$block}{'name'}; } else { &Logger("Malformed userlist block [$block] found in in $fileopt: failed to get block name", FATAL, "ConfigParser"); } } # set configuration flags in %config_flags based on [nordugridmap] parsed configuration sub set_configuration_flag { my ( $flag_name, $option_name, $value_yes, $value_no ) = @_; if ( defined $parsedconfig{'nordugridmap'}{$option_name} ) { if ( $parsedconfig{'nordugridmap'}{$option_name} eq $value_yes ) { $config_flags{$flag_name} = 1; } elsif ( $parsedconfig{'nordugridmap'}{$option_name} eq $value_no ) { $config_flags{$flag_name} = 0; } else { my $text_def = $config_flags{$flag_name} ? $value_yes : $value_no; &Logger("Unrecognized value for option '$option_name' in [nordugridmap] configuration. Valid valueas are: '$value_yes' or '$value_no'. Using default '$text_def'", WARNING, "ConfigParser"); } } } # return numeric value of [nordugridmap] parsed configuration option sub set_numeric_value { my ( $ref_var, $option_name, $value_valid ) = @_; if ( defined $parsedconfig{'nordugridmap'}{$option_name} ) { if ( $parsedconfig{'nordugridmap'}{$option_name} =~ /^\d+$/ ) { $$ref_var = $parsedconfig{'nordugridmap'}{$option_name}; } else { &Logger("Unrecognized value for option '$option_name' in [nordugridmap] configuration. Valid valueas are: $value_valid. Using default value: $$ref_var.", WARNING, "ConfigParser"); } } } # return boolean flag value in specified %options_hash sub get_source_flag { my ( $ref_options_hash, $flag_name, $option_name, $value_yes, $value_no ) = @_; if ( defined $ref_options_hash->{$option_name} ) { return 1 if ( $ref_options_hash->{$option_name} eq $value_yes ); return 0 if ( $ref_options_hash->{$option_name} eq $value_no ); my $text_def = $config_flags{$flag_name} ? $value_yes : $value_no; &Logger("Unrecognized value for source-specific option '$option_name'. Valid valueas are: '$value_yes' or '$value_no'. Using globaly configured value '$text_def'", WARNING, "ConfigParser"); } return $config_flags{$flag_name}; } # # MAPPING PROCESSING SUBROUTINES # # assemble grid-mapfiles data sub process_mapfiles { my ( $ref_mapfile_data, $ref_sources_data ) = @_; foreach my $block (sort(keys %parsedconfig)) { next unless $block =~ /^userlist:/; my $gmf = $parsedconfig{$block}{'outfile'} || $default_mapfile; next if $gmf eq '/dev/null'; next if defined $ref_mapfile_data->{$gmf}; &Logger("Assembling gridmap file: $gmf", INFO, "AssembleGridMapfile"); $ref_mapfile_data->{$gmf} = {}; my @userlist_blocks = &get_file_userlist_sources($gmf); foreach my $source ( @userlist_blocks ) { foreach my $dn ( keys %{$ref_sources_data->{$source}} ) { unless ( defined $ref_mapfile_data->{$gmf}->{$dn} ) { $ref_mapfile_data->{$gmf}->{$dn} = $ref_sources_data->{$source}->{$dn}->{'mapuser'}; } else { &Logger("Entry '$dn' already exists in $gmf gridmapfile. Skiped.", DEBUG, "AssembleGridMapfile"); } } } } } # assemble [userlist] blocks mapping data sub process_userlist_blocks { my ($ref_sources_deps, $ref_sources_data, $dryrun) = @_; my $blocks_unfinished = 1; my $blocks_processed = 1; # loop until all [userlist] blocks are processed while ( $blocks_unfinished ) { if ( $blocks_processed == 0 ) { &Logger("Loop detected in the [userlist] blocks dependencied. Please review you configuration.", FATAL, "AssembleBlockData"); } # initial values $blocks_unfinished = 0; $blocks_processed = 0; foreach my $block (sort(keys %parsedconfig)) { next unless $block =~ /^userlist:/; my $list_name = &get_userlist_name($block); my $userlist_ref = "userlist://" . $list_name; next if defined $ref_sources_data->{$userlist_ref}; $blocks_unfinished++; # check all sources fetched or already assembled my $undefined_cnt = 0; foreach my $source ( @{$ref_sources_deps->{$userlist_ref}} ) { $undefined_cnt++ unless defined $ref_sources_data->{$source}; } # assemble [userlist] block gridmap unless ( $undefined_cnt ) { unless ( $dryrun ) { # get [userlist] block parameters my $mapped_user = "nobody"; if ( exists $parsedconfig{$block}{'mapped_unixid'} ) { $mapped_user = $parsedconfig{$block}{'mapped_unixid'}; } # define [userlist] block filter if any my @Rules = (); if ( $parsedconfig{$block}{'filter'} ) { my @filters = split /\[separator\]/, $parsedconfig{$block}{'filter'}; foreach my $filter_entry (@filters) { push @Rules, $filter_entry; } # if we allow certain people, deny becomes last option if ( ($parsedconfig{$block}{'filter'} =~ /allow/) ) { push @Rules, "deny *"; } } else { # no filters - allow all push @Rules, "allow *"; } # print block parameters summary on debug &Logger("Assembling DNs list for the [userlist:$list_name] block", DEBUG, "AssembleBlockData"); # process all sources $ref_sources_data->{$userlist_ref} = {}; foreach my $source ( @{$ref_sources_deps->{$userlist_ref}} ) { foreach my $dn ( keys %{$ref_sources_data->{$source}} ) { my %source_dn_hash = %{$ref_sources_data->{$source}->{$dn}}; unless ( defined $ref_sources_data->{$userlist_ref}->{$dn} ) { # check DN is filtered next unless &rule_match($dn, \@Rules); # check mapping user exists for record if ( $config_flags{'mapuser_processing'} || ! defined $source_dn_hash{'mapuser'} ) { if ( $mapped_user eq "" ) { unless ( $config_flags{'allow_empty_unixid'} ) { &Logger("There is no mapping for DN '$dn' in [userlist:$list_name] block. Skipping record.", WARNING, "AssembleBlockData"); next; } else { &Logger("Using empty mapping for DN '$dn' in [userlist:$list_name] block.", VERBOSE, "AssembleBlockData"); } } } # if we are still here - add entry $ref_sources_data->{$userlist_ref}->{$dn} = \%source_dn_hash; # always map to common user on 'rewrite' mapuser processing if ( $config_flags{'mapuser_processing'} || ! defined $ref_sources_data->{$userlist_ref}->{$dn}->{'mapuser'} ) { $ref_sources_data->{$userlist_ref}->{$dn}->{'mapuser'} = $mapped_user; } &Logger("Adding mapping entry '$dn -> $ref_sources_data->{$userlist_ref}->{$dn}->{'mapuser'}' for the [userlist:$list_name] block.", DEBUG, "AssembleBlockData"); # maintain information about where record is come from unless ( defined $ref_sources_data->{$userlist_ref}->{$dn}->{'source'} ) { $ref_sources_data->{$userlist_ref}->{$dn}->{'source'} = $source; } } else { &Logger("Mapping for '$dn' entry already exists for the [userlist:$list_name] block. Skiped.", DEBUG, "AssembleBlockData"); } } } } else { $ref_sources_data->{$userlist_ref} = 1; } $blocks_processed++; } } } } # write mapfiles to disk sub write_mapfiles_data { my ( $ref_mapfile_data, $dryrun, $owner, $group, $chmod ) = @_; foreach my $mapfile ( keys %$ref_mapfile_data ) { unless ( $dryrun ) { my ($gmf, $tmp_mapfile) = tempfile($mapfile . "XXXXX", UNLINK => 1) or &Logger("Cannot open temporary file to write $mapfile data", FATAL, "WriteMapfile"); &Logger("Writting mapfile data to $mapfile", INFO, "WriteMapfile"); while ( my ($dn, $map) = each(%{$ref_mapfile_data->{$mapfile}}) ) { print $gmf "\"$dn\" $map\n" or &Logger("Failed to write gridmap data (not enough disk space?) to temporary file $tmp_mapfile", FATAL, "WriteMapfile"); } close($gmf); my $uid = getpwnam($owner); my $gid = getgrnam($group); chown $uid, $gid, $tmp_mapfile; chmod oct($chmod), $tmp_mapfile; rename $tmp_mapfile, $mapfile; } else { my $gmf_string = ""; while ( my ($dn, $map) = each(%{$ref_mapfile_data->{$mapfile}}) ) { $gmf_string .= " \"$dn\" $map\n"; } &Logger("Printing mapfile content for $mapfile:\n$gmf_string", INFO, "WriteMapfile"); } } } # # SOURCES DEPENDENCIES TRACKING SUBROTINES # # return array of [userlist] blocks required to generate gridmapfile sub get_file_userlist_sources { my $file_name = shift; my @file_userlists = (); foreach my $block (sort(keys %parsedconfig)) { next unless $block =~ /^userlist:/; if ( defined $parsedconfig{$block}{'outfile'} ) { next unless $parsedconfig{$block}{'outfile'} eq $file_name; } else { next unless $file_name eq $default_mapfile; } push @file_userlists, "userlist://".get_userlist_name($block); } return @file_userlists; } # extract optional per-source parameters from source string and return hash # optional parameters will be removed from passed source string sub get_source_params { my $ref_source = shift; my ( $source_str, $params_str ) = split '<', $$ref_source; # trim url without optional parameters and return back $source_str =~ s/^\s+//; $source_str =~ s/\s+$//; $$ref_source = $source_str; &Logger("Source URL is: $source_str", DEBUG, "ParseSourceURL"); # create source parameters hash my %source_params = (); if ( defined $params_str ) { foreach my $param_record ( split ' ', $params_str ) { next unless ( $param_record =~/^(\w+)=(.+)$/ ); &Logger("Processing source optional parameter '$1'=$2", DEBUG, "ParseSourceURL"); $source_params{$1}=$2; } } return \%source_params; } # return list of block dependencied and fill external sources list sub get_block_sources { my ($block_id, $ref_sources_list, $ref_confighash) = @_; # parsed arc.conf hash is used by default $ref_confighash = \%parsedconfig unless defined $ref_confighash; # array with block dependencied my @userlist_sources = (); &Logger("Getting sources for [$block_id] block", DEBUG, "GetInfoSources"); my @urls = split /\[separator\]/, $ref_confighash->{$block_id}{'source'}; foreach my $source (@urls) { &Logger("Found mapping source record: $source", DEBUG, "GetInfoSources"); # get optional per-source parameters my $ref_source_params = &get_source_params(\$source); my $source_id = $source; # check sources are already in sources list if ( defined $ref_sources_list->{$source} ) { # if source parameters differ - use block_id prefix if ( &Storable::freeze($ref_source_params) ne &Storable::freeze($ref_sources_list->{$source}) ) { &Logger("Adding block ID prefix for duplicate source URL with different parameters set", DEBUG, "GetInfoSources"); $source_id = "$block_id|$source"; } else { &Logger("Source URL is already defined", DEBUG, "GetInfoSources"); } } # get source protocol my ( $protocol, $uri ) = $source =~ m/([-\w]+):\/\/(.*)/; $protocol = lc $protocol; # process URLs depend on protocol used if ( $protocol =~ /^vomss?$/i ) { # special handling for voms_fqan_map if ( defined $ref_confighash->{$block_id}{'voms_fqan_map'} ) { # FQANs defined for VOMS URL: generate URL for every FQAN my @fqans = split /\[separator\]/, $ref_confighash->{$block_id}{'voms_fqan_map'}; my ( $voms_baseid, $dummy_fqan ) = $source_id =~ m/^([^\?]+)\??(.*)$/; foreach my $fqan_match ( @fqans ) { my ( $fqan, $map_id ) = $fqan_match =~ m/^([^\s]+)\s+(.*)$/; # create URL with specified FQAN my $fqan_source_id = $voms_baseid . "?" . $fqan; my ( $dummy_id, $fqan_source_url ) = $fqan_source_id =~ m/(\w+\|)?([^|]+)/; &Logger("Generating FQAN-map source URL: $fqan_source_url (mapped to $map_id)", VERBOSE, "GetInfoSources"); # put mapped_unixid parameter my %fqan_source_params = %$ref_source_params; $fqan_source_params{'mapped_unixid'} = $map_id; # save as [userlist] block source $ref_sources_list->{$fqan_source_id} = \%fqan_source_params; push @userlist_sources, $fqan_source_id; } } # standalone VOMS URL: retreive DNs and use directly as userlist source # FQANs before original URL to apply specific maps first $ref_sources_list->{$source_id} = $ref_source_params; push @userlist_sources, $source_id; } elsif ( $protocol =~ /^(https?|ldap)$/i ) { # external sources: retreive and use directly as userlist source $ref_sources_list->{$source_id} = $ref_source_params; push @userlist_sources, $source_id; } elsif ( $protocol =~ /^file$/i ) { # local file: if created by nordugridmap - use [userlist] blocks as userlist sources # if file is independent source - use directly my @file_userlist_sources = &get_file_userlist_sources($uri); if ( @file_userlist_sources ) { push @userlist_sources, @file_userlist_sources; } else { if ( -e $uri ) { $ref_sources_list->{$source_id} = $ref_source_params; push @userlist_sources, $source_id; } else { &Logger("File source '$uri' does not exist. Ignoring.", WARNING, "GetInfoSources"); } } } elsif ( $protocol =~ /^userlist$/i ) { # [userlist] block: use directly as userlist source if ( defined $ref_confighash->{"userlist:$uri"} ) { push @userlist_sources, $source_id; } else { &Logger("Userlist source '$source_id' does not exist. Ignoring.", WARNING, "GetInfoSources"); } } else { &Logger("Unsupported protocol found: $source", WARNING, "GetInfoSources"); } } return \@userlist_sources; } # # SUBROUTINES TO GET INFORMATION FROM DIFFERENT SOURCES # # fetch data from all sources in sources_list and put them to sources_data hash sub fetch_sources { my ( $ref_sources_list, $ref_sources_data ) = @_; my $exit_code; my $ref_subjects; foreach my $source_id (keys %$ref_sources_list) { # separate optional block_id prefix from source URL my ( $block_id, $source ) = $source_id =~ m/(\w+\|)?([^|]+)/; # get source parameters my ( $protocol, $uri ) = $source =~ m/(\w+):\/\/(.*)/; my $ref_source_params = $ref_sources_list->{$source_id}; # check source-specific cache control my $use_cache = &get_source_flag($ref_source_params, 'cache_enabled','cache_enable','yes','no'); # get subjects from external URL if ( $protocol =~ /^vomss?$/i ) { ($exit_code, $ref_subjects) = &voms_subjects($source, $ref_source_params); } elsif ( $protocol =~ /^https?$/i ) { ($exit_code, $ref_subjects) = &http_subjects($source, $ref_source_params); } elsif ( $protocol =~ /^file$/i ) { ($exit_code, $ref_subjects) = &read_gridmap($uri, $ref_source_params); } else { &Logger("Unsupported protocol to fetch: $protocol", FATAL, "FetchSourcesData"); } # check fetch result and try to save/load cache unless ( $exit_code ) { if ( $use_cache ) { &write_cached_subjects($source_id, $ref_subjects) unless $opt_test; } } else { &Logger("Failed to retreive data from URL: $source", WARNING, "FetchSourcesData"); if ( $use_cache ) { my ($err_code, $cache_ref_subjects) = &read_cached_subjects($source_id); unless ($err_code) { &Logger("Using locally cached data for URL: $source", INFO, "FetchSourcesData"); $ref_subjects = $cache_ref_subjects; } } } # put fetched results to sources_data hash $ref_sources_data->{$source_id} = $ref_subjects; } } # setup HTTPS SSL parameters sub setup_https { # For Net::SSL $ENV{HTTPS_CERT_FILE} = $x509cert; $ENV{HTTPS_KEY_FILE} = $x509key; $ENV{HTTPS_CA_DIR} = $capath; # For IO::Socket::SSL (LWP) if ( $IO::Socket::SSL::VERSION ) { IO::Socket::SSL::set_ctx_defaults( ca_path => $capath, use_cert => 1, key_file => $x509key, cert_file => $x509cert, verify_mode => 1 ); } } # get content of HTTP(S) URL sub get_http_url { my $uri = shift; my $scheme = $uri->scheme; &Logger("Unsupported URL ($uri) passed to method", FATAL, "FetchSourcesData.HTTP") unless ( $scheme =~ /^https?$/ ); # handle SSL environment &setup_https() if ($uri->scheme eq 'https'); # create LWP object my $ua = LWP::UserAgent->new( agent => USERAGENT."/".VERSION, timeout => $fetch_url_timeout ); # do GET query my $res = $ua->get($uri, 'Cache-Control' => 'no-cache', 'Pragma' => 'no-cache'); unless ($res->is_success) { &Logger("HTTP request failed for URL $uri:\n\t". $res->message, ERROR, "FetchSourcesData.HTTP"); return 0; } return $res->content; } # HTTP(S) sources: expects plain text list of "DN" sub http_subjects { my ($url, $ref_source_params) = @_; my %Subjects = (); # get subjects from URL specified &Logger("Getting subjects from source: $url", DEBUG, "FetchSourcesData.HTTP"); my $uri = URI->new($url); my $content = get_http_url($uri); unless ($content) { &Logger("Failed to get information from source: $url", ERROR, "FetchSourcesData.HTTP"); return (1, \%Subjects); } my $count = 0; foreach my $line ( split /\n/, $content ) { next if $line =~ /^(\s)*$/; chomp($line); # "subject" should be the first and only one parsed parameter my ($subject, $dummy) = split (/\s+"(.*)"/, $line); $subject =~ s/"(.*)"/$1/g; $Subjects{$subject} = { 'subject' => $subject }; # mapped_unixid can be passed via optional parameters $Subjects{$subject}{'mapuser'} = $ref_source_params->{'mapped_unixid'} if defined $ref_source_params->{'mapped_unixid'}; $count++; } &Logger("No information retreived from URL: $url", WARNING, "FetchSourcesData.HTTP") unless $count; return (0, \%Subjects); } # VOMS(S) methods wrapper sub voms_subjects { my ($url, $ref_source_params) = @_; my $use_soap = &get_source_flag($ref_source_params, 'voms_use_soap', 'voms_method', 'soap', 'get'); if ( $use_soap ) { return &voms_subjects_soap($url, $ref_source_params); } else { return &voms_subjects_get($url, $ref_source_params); } } # VOMS(S) sources: expect VOMS-Admin SOAP responce (SOAP:Lite implementation) sub voms_subjects_soap { my ($url, $ref_source_params) = @_; my %Subjects = (); &Logger("Getting subjects from source: $url", DEBUG, "FetchSourcesData.VOMS"); # get SOAP endpoint URL and container my ( $endpoint, $container ) = split(/\?/, $url, 2); $endpoint =~ s/^voms/http/; # handle SSL environment &setup_https() if $endpoint =~ /^https/; $endpoint .= '/services/VOMSCompatibility'; my $soap_client; eval { $soap_client = SOAP::Lite->proxy($endpoint, agent => USERAGENT."/".VERSION, timeout => $fetch_url_timeout ); }; unless ( $soap_client ) { &Logger("Failed to connect to SOAP endpoint: $url", ERROR, "FetchSourcesData.VOMS"); return (1, \%Subjects); } # call getGridmapUsers method my $soap_req; eval { if ( $container ) { $soap_req = $soap_client->getGridmapUsers($container); } else { $soap_req = $soap_client->getGridmapUsers(); } }; unless ( $soap_client->transport->is_success ) { &Logger("SOAP transport failed for URL: $url. Error: ".$soap_client->transport->status, ERROR, "FetchSourcesData.VOMS"); return (1, \%Subjects); } unless ($soap_req) { &Logger("SOAP responce parsing failed for URL: $url", ERROR, "FetchSourcesData.VOMS"); return (3, \%Subjects); } if ( $soap_req->fault ) { &Logger("SOAP request failed for URL: $url. Returned error: ".$soap_req->faultstring, ERROR, "FetchSourcesData.VOMS"); return (4, \%Subjects); } if ( ref($soap_req->result) ne 'ARRAY' ) { &Logger("SOAP returned non-array result for URL: $url", VERBOSE, "FetchSourcesData.VOMS"); return (0, \%Subjects); } if ( ! @{$soap_req->result} ) { &Logger("SOAP returned empty result for URL: $url", VERBOSE, "FetchSourcesData.VOMS"); return (0, \%Subjects); } foreach my $subject ( @{$soap_req->result} ) { $Subjects{$subject} = { 'subject' => $subject }; # mapped_unixid can be passed via optional parameters $Subjects{$subject}{'mapuser'} = $ref_source_params->{'mapped_unixid'} if defined $ref_source_params->{'mapped_unixid'}; } return (0, \%Subjects); } # VOMS(S) sources: expect VOMS-Admin SOAP responce (GET+XML manual parser implementation) sub voms_subjects_get { my ($url, $ref_source_params) = @_; my %Subjects = (); &Logger("Getting subjects from source: $url", DEBUG, "FetchSourcesData.VOMS"); # create proper HTTP(S) URL my $uri = URI->new($url); my $scheme = $uri->scheme; $scheme =~ s/^voms/http/; $uri->scheme($scheme); # prepare GET query $uri->path($uri->path.'/services/VOMSCompatibility'); if ( $uri->query() ) { $uri->query_form( method => 'getGridmapUsers', container => $uri->query() ); } else { $uri->query_form( method => 'getGridmapUsers'); } # get URI content my $content = get_http_url($uri); return ( 1, \%Subjects) unless $content; # parse result on success my $parser = new XML::DOM::Parser; my $doc; eval { $doc = $parser->parse($content) }; unless ($doc) { &Logger("Parsing VOMS ($url) XML response FAILED", ERROR, "FetchSourcesData.VOMS"); return ( 3, \%Subjects); } my $retval = $doc->getElementsByTagName('soapenv:Body'); my $subject; if ($retval->getLength == 1) { my $returnNode = $doc->getElementsByTagName('getGridmapUsersReturn')->item(0); for my $user ($returnNode->getChildNodes) { if ($user->getNodeType == ELEMENT_NODE) { $subject = undef; eval { $subject = $user->getFirstChild->getData }; if ( defined $subject ) { $Subjects{$subject} = { 'subject' => $subject }; # mapped_unixid can be passed via optional parameters $Subjects{$subject}{'mapuser'} = $ref_source_params->{'mapped_unixid'} if defined $ref_source_params->{'mapped_unixid'}; } else { &Logger("Found subject that cannot be parsed from VOMS XML ($url)", ERROR, "FetchSourcesData.VOMS"); } } } } else { &Logger("VOMS search($uri): No such object", ERROR, "FetchSourcesData.VOMS"); return ( 4, \%Subjects); } $doc->dispose; return (0, \%Subjects); } # Mapfile sources: expect local gridmap-file sub read_gridmap { my ($gridmap_file, $ref_source_params) = @_; my %Subjects = (); &Logger("Getting subjects from source: file://$gridmap_file", DEBUG, "FetchSourcesData.LocalFile"); if (! -e $gridmap_file) { &Logger("File $gridmap_file not found", ERROR, "FetchSourcesData.LocalFile"); return (1, \%Subjects); } if (! -T $gridmap_file) { &Logger("File $gridmap_file not in text format", ERROR, "FetchSourcesData.LocalFile"); return (2, \%Subjects); } unless (open(IN, "< $gridmap_file")) { &Logger("Unable to open $gridmap_file", ERROR, "FetchSourcesData.LocalFile"); return (3, \%Subjects); } binmode IN; # mapped_unixid can be passed via optional parameters, overriding is controlled by 'mapuser_processing' option my $def_mapuser = ( defined $ref_source_params->{'mapped_unixid'} ) ? $ref_source_params->{'mapped_unixid'} : 0; my $map_overwrite = &get_source_flag($ref_source_params, 'mapuser_processing','mapuser_processing','overwrite','keep'); while (my $f = ) { chomp($f); if ($f =~ /^\s*\"((\/[^\/]+)+)"\s+([^\s]+)\s*$/) { # record match: "/user/DN" mapping my $subject = $1; my $mapuser = $3; $mapuser = $def_mapuser if ( $def_mapuser && $map_overwrite ); $Subjects{$subject} = { 'subject' => $subject, 'mapuser' => $mapuser }; } elsif ($f =~ /^\s*\"((\/[^\/]+)+)\"\s*$/) { # record match: "/user/DN/only" my $subject = $1; $Subjects{$subject} = { 'subject' => $subject }; $Subjects{$subject}{'mapuser'} = $def_mapuser if ( $def_mapuser ); } elsif ($f =~ /^\s*((\/[^\/\s]+)+)\s+([^\s]+)\s*$/) { # record match: /user/DN/no_spaces mapping my $subject = $1; my $mapuser = $3; $mapuser = $def_mapuser if ( $def_mapuser && $map_overwrite ); $Subjects{$subject} = { 'subject' => $subject, 'mapuser' => $mapuser }; } elsif ($f =~ /^\s*((\/[^\/\s]+)+)\s*$/) { # record match: /user/DN/no_spaces/only my $subject = $1; $Subjects{$subject} = { 'subject' => $subject }; $Subjects{$subject}{'mapuser'} = $def_mapuser if ( $def_mapuser ); } else { &Logger("Skipping missformed record '$f' in file $gridmap_file", WARNING, "FetchSourcesData.LocalFile"); } } close(IN); return (0, \%Subjects); } # # MATCHING AND FILTERING # # check subject match against ACL rules sub rule_match { my ($subj, $ref_Rules) = @_; my @Rules = @$ref_Rules; my $subjReg = $subj; $subjReg =~ s/\@/\\\@/g; foreach my $rule (@Rules) { my ($action, $acl) = split / /, $rule, 2; $acl =~ s/\@/\\\@/g; $acl =~ s/\*/.\*/g; if ($subjReg =~ /$acl/) { if ($action eq "deny") { &Logger("User '$subj' denied by rule 'deny $acl'", DEBUG, "FilterDN"); } else { &Logger("User '$subj' allowed by rule 'allow $acl'", DEBUG, "FilterDN") if ( $acl ne ".*" ); return 1; } last; } } return 0; } # # CACHE OPERATIONS SUBROUTINES # # get source URL hash sub urlhash { my $url = shift; # split the url into substrings of length 8 and run crypt on each substring my @chunks = ( $url =~ /.{1,8}/gs ); my $result; foreach my $c (@chunks) { $result .= crypt $c, "arc"; } $result =~ s/[\/|\.]//g; return $result; } # get cache location for source URL sub get_subject_cache_location { my $url = shift; my $hash = &urlhash($url); my $file_location = $cachedir . "/" . $hash; return $file_location; } # write cached values for source URL sub write_cached_subjects { my ($url, $ref_subjects) = @_; my %Subjects = %$ref_subjects; my $cache_file = &get_subject_cache_location($url); &Logger("Writting cached subjects for $url to $cache_file", DEBUG, "SourceCaching"); store($ref_subjects, $cache_file) or &Logger("Failed to write to the cache file $cache_file", WARNING, "SourceCaching"); } # read cached values for source URL sub read_cached_subjects { my $url = shift; my $cache_file = &get_subject_cache_location($url); unless ( -e $cache_file ) { &Logger("Cache file does not exists for URL: $url", VERBOSE, "SourceCaching"); return 1; } my $mtime = (stat($cache_file))[9]; if ($mtime + $cache_maxlife < time()) { &Logger("Rejecting to use cache, max lifetime exceeded", VERBOSE, "SourceCaching"); eval { unlink($cache_file); }; return 2; } &Logger("Getting subjects for $url from cache", DEBUG, "SourceCaching"); my $ref_subjects; eval { $ref_subjects = retrieve($cache_file); }; if ( defined $ref_subjects ) { return 0, $ref_subjects; } &Logger("Failed to get data from cache file for URL: $url", WARNING, "SourceCaching"); eval { unlink($cache_file); }; return 3; } # # LOGGING FUNCTIONS # # convert debug level to number sub debug_numericv { my $level = shift; return $level if ( $level =~ /\d/ ); return 0 if $level =~ /^FATAL$/i; return 1 if $level =~ /^ERROR$/i; return 2 if $level =~ /^WARNING$/i; return 3 if $level =~ /^INFO$/i; return 4 if $level =~ /^VERBOSE$/i; return 5 if $level =~ /^DEBUG$/i; return 2; # WARNING level on syntax error } # get debug level string value sub debug_stringv { my $level = shift; return "FATAL" if ( $level == 0 ); return "ERROR" if ( $level == 1 ); return "WARNING" if ( $level == 2 ); return "INFO" if ( $level == 3 ); return "VERBOSE" if ( $level == 4 ); return "DEBUG" if ( $level == 5 ); } # show message depending on threshold sub Logger { my ( $text, $threshold, $subsystem ) = @_; $threshold = &debug_numericv($threshold); if ( $threshold <= $log_level ) { my $timestring = strftime("%Y-%m-%d %H:%M:%S", @{ &localtime() } ); $subsystem = ( defined $subsystem ) ? ".$subsystem" : ""; printf STDERR "[%s] [Nordugridmap%s] [%s] [$$] %s\n", $timestring, $subsystem, &debug_stringv($threshold), $text; } # exit nordugridmap on FATAL errors exit (1) unless ( $threshold ); } # # DISPLAY NORDUGRIDMAP HELP # sub printHelp { system("pod2text $0"); } =pod =head1 NAME nordugridmap - generates grid-mapfile(s) =head1 SYNOPSIS B [B<-t>, B<--test>] [B<-h>, B<--help>] [ B<-c>, B<--config> FILE ] =head1 DESCRIPTION B is usually run as a crontab entry in order to automatically generate mapfile(s). For configuration information consult tne Nordugrid ARC documentation and the arc.conf.reference =head1 OPTIONS =over 4 =item B<-t>, B<--test> Does not actually create grid-mapfile(s), but perform test run in debug mode. =item B<-h>, B<--help> Print a help screen. =item B<-c>, B<--config> FILE Specifies the configuration file to be used. By default the /etc/arc.conf is used. B utilize [nordugridmap] section for general options fine-tuning and processes all the [userlist:NAME] blocks from the config. =back =head1 CREDITS The early scripts were based on a modified version of the mkgridmap (v 1.6) script written by the DataGrid - authorization team . Since then the script has been considerably rewritten. In Dec 2011 script logic was completely rewritten and B v 2.0 was born. =head1 COMMENTS balazs.konya@hep.lu.se, waananen@nbi.dk, manf@grid.org.ua =cut nordugrid-arc-6.14.0/src/utils/gridmap/PaxHeaders.30264/nordugridmap.cron.in0000644000000000000000000000013214152153376024701 xustar000000000000000030 mtime=1638455038.450646651 30 atime=1638455038.517647657 30 ctime=1638455101.170589046 nordugrid-arc-6.14.0/src/utils/gridmap/nordugridmap.cron.in0000644000175000002070000000014514152153376024666 0ustar00mockbuildmock00000000000000#NorduGrid VO automatic grid-mapfile creation 11 3,9,12,15,21 * * * root @prefix@/sbin/nordugridmap nordugrid-arc-6.14.0/src/utils/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153436021335 xustar000000000000000030 mtime=1638455070.874133828 30 atime=1638455091.826448647 30 ctime=1638455101.118588265 nordugrid-arc-6.14.0/src/utils/Makefile.in0000644000175000002070000006122014152153436021323 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/utils DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ GRIDMAP = gridmap @HED_ENABLED_FALSE@HED = @HED_ENABLED_TRUE@HED = hed SUBDIRS = $(HED) $(GRIDMAP) python archery DIST_SUBDIRS = hed gridmap python archery all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/utils/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/utils/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile installdirs: installdirs-recursive installdirs-am: install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool cscopelist-am ctags \ ctags-am distclean distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ installdirs-am maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags tags-am uninstall uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/utils/PaxHeaders.30264/archery0000644000000000000000000000012414152153475020654 xustar000000000000000027 mtime=1638455101.300591 30 atime=1638455103.999631554 27 ctime=1638455101.300591 nordugrid-arc-6.14.0/src/utils/archery/0000755000175000002070000000000014152153475020715 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/utils/archery/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376022764 xustar000000000000000030 mtime=1638455038.449646635 30 atime=1638455038.517647657 30 ctime=1638455101.299590985 nordugrid-arc-6.14.0/src/utils/archery/Makefile.am0000644000175000002070000000003614152153376022750 0ustar00mockbuildmock00000000000000sbin_SCRIPTS = archery-manage nordugrid-arc-6.14.0/src/utils/archery/PaxHeaders.30264/Makefile.in0000644000000000000000000000013114152153436022771 xustar000000000000000030 mtime=1638455070.922134549 30 atime=1638455091.618445521 29 ctime=1638455101.29859097 nordugrid-arc-6.14.0/src/utils/archery/Makefile.in0000644000175000002070000005251514152153436022767 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/utils/archery DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(srcdir)/archery-manage.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = archery-manage CONFIG_CLEAN_VPATH_FILES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(sbindir)" SCRIPTS = $(sbin_SCRIPTS) AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ sbin_SCRIPTS = archery-manage all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/utils/archery/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/utils/archery/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): archery-manage: $(top_builddir)/config.status $(srcdir)/archery-manage.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ install-sbinSCRIPTS: $(sbin_SCRIPTS) @$(NORMAL_INSTALL) @list='$(sbin_SCRIPTS)'; test -n "$(sbindir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(sbindir)'"; \ $(MKDIR_P) "$(DESTDIR)$(sbindir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n' \ -e 'h;s|.*|.|' \ -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) { files[d] = files[d] " " $$1; \ if (++n[d] == $(am__install_max)) { \ print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ else { print "f", d "/" $$4, $$1 } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(sbindir)$$dir'"; \ $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(sbindir)$$dir" || exit $$?; \ } \ ; done uninstall-sbinSCRIPTS: @$(NORMAL_UNINSTALL) @list='$(sbin_SCRIPTS)'; test -n "$(sbindir)" || exit 0; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 's,.*/,,;$(transform)'`; \ dir='$(DESTDIR)$(sbindir)'; $(am__uninstall_files_from_dir) mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(SCRIPTS) installdirs: for dir in "$(DESTDIR)$(sbindir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-sbinSCRIPTS install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-sbinSCRIPTS .MAKE: install-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ cscopelist-am ctags-am distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-ps install-ps-am \ install-sbinSCRIPTS install-strip installcheck installcheck-am \ installdirs maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags-am uninstall uninstall-am uninstall-sbinSCRIPTS # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/utils/archery/PaxHeaders.30264/archery-manage.in0000644000000000000000000000012714152153376024147 xustar000000000000000030 mtime=1638455038.449646635 30 atime=1638455038.517647657 27 ctime=1638455101.300591 nordugrid-arc-6.14.0/src/utils/archery/archery-manage.in0000644000175000002070000025013014152153376024131 0ustar00mockbuildmock00000000000000#!@PYTHON@ from __future__ import print_function # general puprose import os import sys import time import logging import argparse import hashlib import re import json # Connectivity testing import socket # DATA fetching import ldap try: import http.client as httplib except ImportError: import httplib import ssl import xml.etree.ElementTree as ElementTree # DNS processing import dns.rdatatype import dns.resolver import dns.update import dns.query import dns.tsig import dns.tsigkeyring from dns.exception import DNSException # Multithreading from threading import Thread, Lock try: from queue import Queue except ImportError: from Queue import Queue # Software import subprocess # gpg import base64 import tempfile try: from urllib.parse import quote, unquote except ImportError: from urllib import quote, unquote # GLOBAL VARIABLES _fetch_timeout = 10 # Initialize logger logger = logging.getLogger('ARC.ARCHERY-Manage') logger.setLevel(logging.WARNING) log_handler_stderr = logging.StreamHandler() log_handler_stderr.setFormatter( logging.Formatter('[%(asctime)s] [%(name)s] [%(levelname)s] [%(process)d] [%(message)s]')) logger.addHandler(log_handler_stderr) # # GENERAL HELPERS # class HTTPSInsecureConnection(httplib.HTTPSConnection): """Class to make a HTTPS connection without CA Cert verification (compatible with 2.6+ Python)""" def __init__(self, host, port=443, timeout=30): httplib.HTTPSConnection.__init__(self, host, port) self.timeout = timeout def connect(self): """Redefine the sock without CA check enforcement""" sock = socket.create_connection((self.host, self.port), self.timeout) if self._tunnel_host: self.sock = sock self._tunnel() # Don't force Server Certificate Check self.sock = ssl.wrap_socket(sock, cert_reqs=ssl.CERT_NONE) class TimeoutQueue(Queue): """FIFO Queue with defined timeout to wait for Queue Join""" def join_with_timeout(self, timeout): self.all_tasks_done.acquire() try: endtime = time.time() + timeout while self.unfinished_tasks: remaining = endtime - time.time() if remaining <= 0: raise OSError('Timeout waiting for the Queue Join') self.all_tasks_done.wait(remaining) finally: self.all_tasks_done.release() def LDAPStrError(ldape): """Get string error from LDAP exception""" if str(ldape) == dict: err = str(ldape) elif len(ldape.args) and type(ldape.args[0]) == dict: err = ldape.args[0] else: return str(ldape) errstr = '' if 'desc' in err: errstr += err['desc'] if 'info' in err: errstr += ' ({0})'.format(err['info']) else: errstr += str(ldape) return errstr # # OUTPUT FORMATTING FUNCTIONS # def output_arc_celist(archery_object, cmd_args): """Output the list of ARC CE hostnames (JSON capable)""" arcce_ids = [s['id'] for s in archery_services(archery_object, 'org.nordugrid.arex')] if cmd_args.json: print(json.dumps(arcce_ids)) else: for ce in arcce_ids: print(ce) def json_config_object(archery_object): """Translate internal extended JSON to expanded JSON config that can be used as a source""" # helper function to use with output_json_config() jconf = {} if 'id' in archery_object: jconf['id'] = archery_object['id'] elif 'dns-name' in archery_object: jconf['dns-name'] = archery_object['dns-name'] if 'type' in archery_object: jconf['type'] = archery_object['type'] if 'object' in archery_object: kind = archery_object['object'] elif 'reftype' in archery_object: kind = archery_object['reftype'] else: kind = 'group' if 'raw-dns' in archery_object: jconf['raw-dns'] = archery_object['raw-dns'] external_object = None if 'pointer_rr_data' in archery_object: external_object = archery_object['pointer_rr_data'].split(' ')[0].replace('u=', '') if kind == 'service': jconf['endpoints'] = [] if 'endpoints' in archery_object: if archery_object['endpoints']: external_object = None # do not mix endpoints and external for e in archery_object['endpoints']: if 's' in e and e['s'] == '0': jconf['endpoints'].append({ 'url': e['u'], 'type': e['t'], 'status': False }) else: jconf['endpoints'].append({e['u']: e['t']}) if kind == 'software': if 'endpoints' in archery_object: external_object = None for e in archery_object['endpoints']: if 't' in e and e['t'] == 'gpg.pubkey': jconf['pubkey_url'] = e['u'] if kind == 'rte': jconf['name'] = archery_object['id'] del jconf['id'] if 'description' in archery_object: jconf['description'] = unquote(archery_object['description']) if 'endpoints' in archery_object: external_object = None for e in archery_object['endpoints']: if 't' in e and e['t'] == 'gpg.signed': jconf['url'] = e['u'] if kind == 'gpg.pubkey.base64': if 'endpoints' in archery_object: for e in archery_object['endpoints']: if 'rr_data' in e: jconf['pubkey'] = e['rr_data'] if kind == 'gpg.signed.base64': if 'endpoints' in archery_object: for e in archery_object['endpoints']: if 'rr_data' in e: jconf['data'] = e['rr_data'] if 'contains' in archery_object: if archery_object['contains']: external_object = None # do not mix natively nested objects and external for child in archery_object['contains']: ckind, cobj = json_config_object(child) if ckind == 'service': if 'services' not in jconf: jconf['services'] = [] jconf['services'].append(cobj) elif ckind == 'software': jconf['software'] = cobj elif ckind == 'rte': if 'rtes' not in jconf: jconf['rtes'] = [] jconf['rtes'].append(cobj) elif ckind == 'gpg.pubkey.base64': if 'pubkey' in cobj: jconf['pubkey'] = cobj['pubkey'] elif ckind == 'gpg.signed.base64': if 'data' in cobj: jconf['data'] = cobj['data'] elif ckind == 'group': if 'groups' not in jconf: jconf['groups'] = [] jconf['groups'].append(cobj) if external_object is not None: jconf['external-archery-object'] = external_object # do not put leftovers to external object config for accidental_key in ['id', 'type', 'endpoints', 'contains']: if accidental_key in jconf: del jconf[accidental_key] return kind, jconf def output_json_config(archery_object, cmd_args): """Output the entire ARCHERY internal object in JSON to stdout (for debugging purposes)""" _, jconf = json_config_object(archery_object) print(json.dumps(jconf, indent=2)) def output_internal_object(archery_object, cmd_args): """Output the entire ARCHERY internal object in JSON to stdout (for debugging purposes)""" print(json.dumps(archery_object, indent=2)) def output_endpoints(archery_object, cmd_args): """Output the list of endpoints with types (JSON capable)""" elist = archery_endpoints(archery_object) if cmd_args.json: print(json.dumps(elist)) else: for e in elist: if 's' in e and e['s'] != '1': if not cmd_args.output_all: continue print('{u:<60} : {t}'.format(**e)) def output_services(archery_object, cmd_args): """Output the list of services with types (JSON capable)""" slist = archery_services(archery_object) if cmd_args.json: print(json.dumps(slist)) else: for s in slist: if 's' in s and s['s'] != '1': if not cmd_args.output_all: continue print('{id:<60} : {type}'.format(**s)) def txt_255(txt, getlist=False): """TXT record have 255 bytes limit and should be split to subsequent strings if longer""" txtlen = len(txt) if txtlen <= 255: if getlist: return [txt] return '"' + txt + '"' # split by 255 clen = 0 parts = [] while clen < txtlen: parts.append(txt[clen:clen + 254]) clen += 254 if getlist: return parts return '"' + '" "'.join(parts) + '"' def output_zonefile(archery_object, cmd_args): """Output the content of BIND zone file""" if cmd_args.json: logger.error('JSON format is not supported by DNS zone file formatter') if cmd_args.output_all: logger.debug('ARCHERY zone file formatter includes all endpoints by default and ignore --output-all option') ttl = cmd_args.ttl rrset = list(archery_txt_rrset(archery_object)) rrset.sort() for rr in rrset: rr_mod = rr.split(' ', 1) txtdata = txt_255(rr_mod[1]) print('{0:<64}{1:>6} TXT {2}'.format(rr_mod[0], ttl, txtdata)) if 'raw-dns' in archery_object: if archery_object['rr_owner']: print('$ORIGIN {0}'.format(archery_object['rr_owner'])) for rdns in archery_object['raw-dns']: if not rdns['rdata']: continue if 'ttl' not in rdns: rdns['ttl'] = ttl if isinstance(rdns['rdata'], list): for rd in rdns['rdata']: srdns = rdns.copy() if srdns['type'] in ['TXT', 'SRV']: srdns['rdata'] = txt_255(rd) else: srdns['rdata'] = rd print('{name:<64}{ttl:>6} {type} {rdata}'.format(**srdns)) else: if rdns['type'] in ['TXT', 'SRV']: rdns['rdata'] = txt_255(rdns['rdata']) print('{name:<64}{ttl:>6} {type} {rdata}'.format(**rdns)) _output_formatters = { 'arc-CEs': output_arc_celist, 'services': output_services, 'endpoints': output_endpoints, 'zonefile': output_zonefile, 'json': output_json_config, '_debug': output_internal_object } # # ENDPOINT FILTERING CLASSES # class EndpointFilter(object): """Base interface class for implementing endpoint filters""" def __init__(self): self.filter_on_fetch = False def filter(self, endpoint_dict): raise NotImplementedError('Filter function should be implemented (return True means filtering)') def set_on_fetch(self): self.filter_on_fetch = True def on_fetch(self): return self.filter_on_fetch def help(self): raise NotImplementedError('Help function should be implemented') class EndpointFilterType(EndpointFilter): """Filter endpoints by type""" _resourseinfo_endpoint_types = [ 'org.nordugrid.ldapglue2', 'org.nordugrid.ldapng', 'org.ogf.glue.emies.resourceinfo' 'org.nordugrid.arcrest' ] @staticmethod def type(): return 'type' def __init__(self, args='arc-resourceinfo'): super(EndpointFilterType, self).__init__() self._allowed_endpoint_types = args.split(',') # handle resourceinfo alias for all nordugrid information endpoint types if 'arc-resourceinfo' in self._allowed_endpoint_types: self._allowed_endpoint_types += self._resourseinfo_endpoint_types self._allowed_endpoint_types.remove('arc-resourceinfo') logger.debug('Applying endpoints filtering with the following allowed types: %s', ','.join(self._allowed_endpoint_types)) def filter(self, endpoint_dict): if endpoint_dict['t'] not in self._allowed_endpoint_types: logger.info('Endpoint %s (type %s) filtered (type filter)', endpoint_dict['u'], endpoint_dict['t']) return True logger.debug('Endpoint %s (type %s) is allowed by defined type filter', endpoint_dict['u'], endpoint_dict['t']) return False def help(self): print('Endpoint type filter: \'-f type:[,[...]]\'') class EndpointFilterPortscan(EndpointFilter): """Filter endpoints by port connectivity check""" __uri_re = re.compile(r'^(?P(?:[^:]+)://(?P[^:/]+):(?P[0-9]+))/*.*') def __init__(self, args=None): super(EndpointFilterPortscan, self).__init__() self.__args = args self.timeout = _fetch_timeout @staticmethod def type(): return 'portscan' def filter(self, endpoint_dict): uri_data = self.__uri_re.match(endpoint_dict['u']) if uri_data: uri_parms = uri_data.groupdict() s = socket.socket() address = uri_parms['host'] port = int(uri_parms['port']) try: logger.debug('Testing connectivity to %s:%s network endpoint', address, port) s.settimeout(self.timeout) s.connect((address, port)) except Exception as err: logger.info('Endpoint %s (type %s) filtered (port connectivity filter) %s', endpoint_dict['u'], endpoint_dict['t'], str(err)) return True finally: s.close() logger.debug('Endpoint %s (type %s) is allowed by port connectivity filter', endpoint_dict['u'], endpoint_dict['t']) return False else: logger.error('Endpoint %s (type %s) filtered (port connectivity filter). Failed to parse URI.', endpoint_dict['u'], endpoint_dict['t']) return True def help(self): print('Endpoint port connectivity filter: \'-f portscan\'') class EndpointFilterAllowedVO(EndpointFilter): """Filter endpoints by allowed VO information in LDAP""" __uri_re = re.compile(r'^(?P(?P[^:/]+)://(?P[^:/]+)(?P:[0-9]+))/*.*') __ldap_uri_re = re.compile(r'^(?Pldap://(?P[^:/]+)(?::[0-9]+))/(?P.*)') def __init__(self, args=''): super(EndpointFilterAllowedVO, self).__init__() self.filter_on_fetch = True self._allowed_vos = args.split(',') self.timeout = _fetch_timeout @staticmethod def type(): return 'vo' def filter(self, endpoint_dict): if 'vos' not in endpoint_dict: logger.debug('No VO policy defined for endpoint %s (type %s). Filter will not block it.', endpoint_dict['u'], endpoint_dict['t']) return False for vo in self._allowed_vos: if vo not in endpoint_dict['vos']: logger.info('Endpoint %s (type %s) filtered (allowed VO filter)', endpoint_dict['u'], endpoint_dict['t']) return True logger.debug('Endpoint %s (type %s) is allowed by defined VO filter', endpoint_dict['u'], endpoint_dict['t']) return False def help(self): print('Endpoint allowed VO filter: \'-f vo:[,[...]]\'') _filters = { 'type': EndpointFilterType, 'vo': EndpointFilterAllowedVO, 'portscan': EndpointFilterPortscan } def filter_endpoints(archery_object, filters): """Recursively loop over archery onject and apply filters""" if archery_object['object'] == 'service': if 'endpoints' in archery_object and archery_object['endpoints']: filtered_endpoints = [] for e in archery_object['endpoints']: filtered = False for fo in filters: if not fo.on_fetch(): if fo.filter(e): filtered = True break if not filtered: filtered_endpoints.append(e) archery_object['endpoints'] = filtered_endpoints if not filtered_endpoints: return False else: if 'contains' in archery_object: filtered_contains = [] for c in archery_object['contains']: if filter_endpoints(c, filters): filtered_contains.append(c) archery_object['contains'] = filtered_contains return True # # ARCHERY DATA PROCESSING # def archery_endpoints(archery_object, etype=None): """Return list of endpoint data from ARCHERY object tree""" endpoints = [] # add endpoint records if 'endpoints' in archery_object: for edata in archery_object['endpoints']: # filter by endtpoint type if requested if etype is not None: if edata['t'] != etype: continue endpoints.append(edata) # process child records if 'contains' in archery_object: for cdata in archery_object['contains']: endpoints += archery_endpoints(cdata, etype) return endpoints def archery_services(archery_object, stype=None): """Return list of services data from ARCHERY object tree""" services = [] # detect type of object if 'object' in archery_object: object_kind = archery_object['object'] elif 'contains' in archery_object and archery_object['contains']: object_kind = 'group' else: object_kind = 'service' # process data if object_kind == 'group': if 'contains' in archery_object: for cdata in archery_object['contains']: services += archery_services(cdata, stype) elif object_kind == 'service': # filter by service type if stype is not None: if 'type' not in archery_object: logger.debug('There is no service type defined for service object at %s. Skipping.', archery_object['rr_owner']) return services if archery_object['type'] != stype: logger.debug('Skipping service object at %s (type %s does not match requested %s).', archery_object['rr_owner'], archery_object['type'], stype) return services # check for service id if 'id' not in archery_object: logger.debug('There is no ID defined for service object at %s. Skipping.', archery_object['rr_owner']) return services # append service services.append({ 'id': archery_object['id'], 'type': archery_object['type'] if 'type' in archery_object else None, }) return services def archery_txt_rrset(archery_object, parent_rr_owner=''): """Return set of TXT RRs for ARCHERY object tree""" rrset = set() if 'rr_owner' not in archery_object: logger.error('Malformed archery object to generate RRSet data. Execution aborted.') sys.exit(1) rr_owner = archery_object['rr_owner'] if parent_rr_owner: rr_owner += '.' + parent_rr_owner # construct object record (if not already exists) if 'rr_data' not in archery_object and 'object' in archery_object: rr = 'o=' + archery_object['object'] if 'type' in archery_object: rr += ' t=' + archery_object['type'].replace(' ', '-') if 'id' in archery_object: rr += ' id=' + archery_object['id'].replace(' ', '-') if 'description' in archery_object: rr += ' d=' + archery_object['description'].replace(' ', '-') # group object without type and id is the default behaviour (no RRSet needed) if rr != 'o=group': archery_object['rr_data'] = rr # add endpoint records has_endpoints = False if 'endpoints' in archery_object and archery_object['endpoints']: for edata in archery_object['endpoints']: # construct TXT rendering for endpoint record (if not already exists) if 'rr_data' not in edata: estatus = '' if 's' in edata and edata['s'] != 1: estatus = ' s={0}'.format(edata['s']) edata['rr_data'] = 'u={0} t={1}{2}'.format(edata['u'], edata['t'].replace(' ', '-'), estatus) # add service endpoints RRSet rrset.add('{0} {1}'.format(rr_owner, edata['rr_data'])) has_endpoints = True # add object id-record RRSet (if defined and not dummy service) if 'rr_data' in archery_object: if has_endpoints or archery_object['rr_data'] != 'o=service': rrset.add('{0} {1}'.format(rr_owner, archery_object['rr_data'])) # add child records if 'contains' in archery_object: for cdata in archery_object['contains']: # construct TXT rendering for pointer record (if not already exists) if 'pointer_rr_data' not in cdata: # status estatus = '' if 'status' in cdata and cdata['status'] != 1: estatus = ' s={0}'.format(cdata['status']) # child type if 'reftype' in cdata: ctype = cdata['reftype'] elif 'object' in cdata: ctype = 'archery.' + cdata['object'] else: if 'contains' in cdata and len(cdata['contains']) > 0: ctype = 'archery.group' else: ctype = 'archery.service' cdata['pointer_rr_data'] = 'u=dns://{0}.{1} t={2}{3}'.format( cdata['rr_owner'], rr_owner, ctype, estatus ) # add pointed record RRSet prr_owner = rr_owner if not parent_rr_owner: # predefined entry point prr_owner = '_archery' if rr_owner: prr_owner += '.' + rr_owner rrset.add('{0} {1}'.format(prr_owner, cdata['pointer_rr_data'])) # add child object data RRSet rrset |= archery_txt_rrset(cdata, rr_owner) return rrset # # INFORMATION SOURCES PROCESSING FUNCTIONS # def get_file_celist(fpath): """Load hostnames from static list stored in file""" ce_list = [] try: with open(fpath, 'r') as fd: ce_list = [line.strip() for line in fd] return ce_list except EnvironmentError: logger.error('Failed to open file %s to read AEC CE list', fpath) return ce_list def get_egiis_celist(egiis_uri, ldap_timeout=_fetch_timeout): """Fetch CE hostnames from EGIIS (for migration)""" ce_list = [] ldap_uri_re = re.compile(r'^(?Pldap://[^:/]+(?::[0-9]+))/(?P.*)') parse_egiis_uri = ldap_uri_re.match(egiis_uri) if parse_egiis_uri: egiis_params = parse_egiis_uri.groupdict() ldap_uri = egiis_params['uri'] ldap_basedn = egiis_params['basedn'] else: logger.error('Failed to parse provided EGIIS URL %s. ' 'Expected format ldap://:/mds-vo-name=,o=grid. ', egiis_uri) return ce_list try: ldap_conn = ldap.initialize(ldap_uri) ldap_conn.set_option(ldap.OPT_NETWORK_TIMEOUT, ldap_timeout) ldap_conn.set_option(ldap.OPT_TIMEOUT, ldap_timeout) ldap_conn.set_option(ldap.OPT_PROTOCOL_VERSION, ldap.VERSION3) logger.debug('Querying EGIIS: %s', egiis_uri) egiis_entries = ldap_conn.search_s(ldap_basedn, ldap.SCOPE_BASE) if egiis_entries is None: logger.error('EGIIS %s query returns empty result set.', egiis_uri) return ce_list for egiis_dn, egiis_entry in egiis_entries: if egiis_dn.startswith('nordugrid-cluster-name='): ce_list.append(egiis_entry['Mds-Service-hn'][0].decode()) else: ce_list += get_egiis_celist('ldap://{Mds-Service-hn[0]}:2135/' '{Mds-Service-Ldap-suffix[0]}'.format(**egiis_entry), ldap_timeout) except ldap.LDAPError as err: logger.warning('Failed to query EGIIS %s. Error: %s', egiis_uri, LDAPStrError(err)) return list(set(ce_list)) # # INFO ENDPOINTS PROCESSING FUNCTION # def get_arc_ce_endpoints_ldapglue2(hostname, port=2135, ldap_timeout=_fetch_timeout, filters=None): """Get ARC CE endpoints by querying LDAP GLUE2 (fallback to LDAP NG query)""" # define filters fetch_vos = False if filters is None: filters = [] else: for f in filters: if f.type() == 'vo': fetch_vos = True break endpoints = [] ldap_uri = 'ldap://{0}:{1}'.format(hostname, port) ldap_basedn = 'o=glue' ldap_filter = '(objectClass=GLUE2ComputingEndpoint)' ldap_attrs = ['GLUE2EndpointID', 'GLUE2EndpointURL', 'GLUE2EndpointHealthState', 'GLUE2EndpointInterfaceName'] try: ldap_conn = ldap.initialize(ldap_uri) ldap_conn.set_option(ldap.OPT_NETWORK_TIMEOUT, ldap_timeout) ldap_conn.set_option(ldap.OPT_TIMEOUT, ldap_timeout) ldap_conn.set_option(ldap.OPT_PROTOCOL_VERSION, ldap.VERSION3) ldap_endpoints_list = ldap_conn.search_s(ldap_basedn, ldap.SCOPE_SUBTREE, ldap_filter, ldap_attrs) if ldap_endpoints_list is None: logger.error('LDAP GLUE2 query for %s returns empty result set.', hostname) return endpoints for ldap_dn, ldap_ee in ldap_endpoints_list: if 'GLUE2EndpointURL' not in ldap_ee: logger.warning( 'Failed to find endpoint URL in LDAP response for DN %s. ' 'It seams GLUE2 rendering is broken for %s.', ldap_dn.decode(), ldap_uri) continue # get endpoint data e_id = ldap_ee['GLUE2EndpointID'][0].decode() e_url = ldap_ee['GLUE2EndpointURL'][0].decode() e_type = ldap_ee['GLUE2EndpointInterfaceName'][0].decode() e_entry = {'id': e_id, 'u': e_url, 't': e_type} if ldap_ee['GLUE2EndpointHealthState'][0].decode().upper() != 'OK': e_entry['s'] = '0' # fetch access policy if VO filtering is requested if fetch_vos: ldap_vo_filter = '(&(objectClass=GLUE2AccessPolicy)' \ '(GLUE2AccessPolicyEndpointForeignKey={0}))'.format(e_id) logger.debug('Querying AccessPolicy for endpoint %s (type %s)', e_url, e_type) vo_q_res = ldap_conn.search_s(ldap_basedn, ldap.SCOPE_SUBTREE, ldap_vo_filter, ['GLUE2PolicyRule']) if vo_q_res: for(_, policy_list) in vo_q_res: if 'GLUE2PolicyRule' in policy_list: e_vos = [v.decode().split(':', 1)[1] for v in policy_list['GLUE2PolicyRule']] if e_vos: e_entry['vos'] = e_vos # apply filters if any for fo in filters: if fo.on_fetch(): if fo.filter(e_entry): break else: # add endpoint if not filtered logger.debug('Found endpoint %s (type %s) for ARC CE %s', e_url, e_type, hostname) endpoints.append(e_entry) except (ldap.SERVER_DOWN, ldap.CONNECT_ERROR, ldap.TIMEOUT) as err: logger.error('Failed to connect to LDAP server for %s CE. Error: %s', hostname, LDAPStrError(err)) return endpoints except ldap.LDAPError as err: logger.error('Failed to query LDAP GLUE2 for %s. Error: %s', hostname, LDAPStrError(err)) # fallback to LDAP NG for classic legacy ARC CEs without GLUE2 support if not endpoints: logger.warning('There are no endpoints fetched for %s using LDAP GLUE2. Falling back to LDAP NG.', hostname) endpoints = get_arc_ce_endpoints_ldapng(hostname, port, ldap_timeout, filters) return endpoints def get_arc_ce_endpoints_ldapng(hostname, port=2135, ldap_timeout=_fetch_timeout, filters=None): """Get ARC CE endpoints by querying Legacy LDAP NorduGrid Schema""" if filters is None: filters = [] endpoints = [] ldap_uri = 'ldap://{0}:{1}'.format(hostname, port) ldap_basedn = 'Mds-Vo-name=local,o=grid' ldap_filter = '(objectClass=nordugrid-cluster)' ldap_attrs = ['nordugrid-cluster-contactstring', 'nordugrid-cluster-name', 'nordugrid-cluster-acl'] try: ldap_conn = ldap.initialize(ldap_uri) ldap_conn.set_option(ldap.OPT_NETWORK_TIMEOUT, ldap_timeout) ldap_conn.set_option(ldap.OPT_TIMEOUT, ldap_timeout) ldap_conn.set_option(ldap.OPT_PROTOCOL_VERSION, ldap.VERSION3) ldap_endpoints_list = ldap_conn.search_s(ldap_basedn, ldap.SCOPE_SUBTREE, ldap_filter, ldap_attrs) if ldap_endpoints_list is None: logger.error('LDAP NG query for %s returns empty result set.', hostname) return endpoints for ldap_dn, ldap_ee in ldap_endpoints_list: if 'nordugrid-cluster-contactstring' not in ldap_ee: logger.warning( 'Failed to find endpoint URL (contactstring) in LDAP response for DN %s. ' 'It seams NG rendering is broken for %s.', ldap_dn.decode(), ldap_uri) continue # get endpoint data e_id = ldap_ee['nordugrid-cluster-name'][0].decode() e_url = ldap_ee['nordugrid-cluster-contactstring'][0].decode() e_type = 'org.nordugrid.gridftpjob' e_entry = {'id': e_id, 'u': e_url, 't': e_type} # get authorized VOs if available if 'nordugrid-cluster-acl' in ldap_ee: e_vos = [v.decode().split(':', 1)[1] for v in ldap_ee['nordugrid-cluster-acl']] if e_vos: e_entry['vos'] = e_vos # apply filters if any for fo in filters: if fo.on_fetch(): if fo.filter(e_entry): break else: # add endpoint if not filtered logger.debug('Found endpoint %s (type %s) for ARC CE %s', e_url, e_type, hostname) endpoints.append(e_entry) # also add ldapng endpoint to comply gridftpjob ldapng_uri = '{0}/{1}'.format(ldap_uri, ldap_basedn) ldapng_entry = {'id': ldapng_uri, 'u': ldapng_uri, 't': 'org.nordugrid.ldapng'} endpoints.append(ldapng_entry) except ldap.LDAPError as err: logger.error('Failed to query LDAP NG for %s. Error: %s', hostname, LDAPStrError(err)) return endpoints def _ldap_uri_dict(uri): """Parse LDAP URI and return the dict of URI components""" __ldap_uri_re = re.compile(r'^(?Pldap://(?P[^:/]+)(?::[0-9]+))/(?P.*)') ldap_uri_match = __ldap_uri_re.match(uri) if ldap_uri_match: ldap_uri_dict = ldap_uri_match.groupdict() else: logger.error('Cannot parse URI %s as LDAP URI. Skipping information fetching.', uri) return None return ldap_uri_dict def get_sitebdii_endpoints_ldapglue1(uri, ldap_timeout=_fetch_timeout, filters=None): """Get services and their endpoints by querying Site-BDII LDAP GLUE1""" ldap_uri_dict = _ldap_uri_dict(uri) if ldap_uri_dict is None: return [] if filters is None: filters = [] services = {} ldap_uri = ldap_uri_dict['uri'] ldap_basedn = ldap_uri_dict['basedn'] try: ldap_conn = ldap.initialize(ldap_uri) ldap_conn.set_option(ldap.OPT_NETWORK_TIMEOUT, ldap_timeout) ldap_conn.set_option(ldap.OPT_TIMEOUT, ldap_timeout) ldap_conn.set_option(ldap.OPT_PROTOCOL_VERSION, ldap.VERSION3) # Query info (3 completely different kind of objects in Glue1: Serive, CE and SE) ldap_service_filter = '(|(objectClass=GlueService)(objectClass=GlueCE)' \ '(objectClass=GlueSE)(objectClass=GlueSEControlProtocol)' \ '(objectClass=GlueSEAccessProtocol))' # Service object attributes ldap_service_attrs = ['GlueServiceEndpoint', 'GlueServiceStatus', 'GlueServiceType', 'GlueServiceName'] # CE attribute ldap_service_attrs += ['GlueCEInfoContactString', 'GlueCEImplementationName', 'GlueInformationServiceURL', 'GlueForeignKey'] # SE attributes ldap_service_attrs += ['GlueSEImplementationName', 'GlueSEUniqueID', 'GlueChunkKey', 'GlueSEControlProtocolEndpoint', 'GlueSEControlProtocolType', 'GlueSEAccessProtocolEndpoint', 'GlueSEAccessProtocolType'] glue1_data = ldap_conn.search_s(ldap_basedn, ldap.SCOPE_SUBTREE, ldap_service_filter, ldap_service_attrs) if glue1_data is None: logger.error('Site-BDII LDAP GLUE1.3 query for %s/%s returns empty result set.', ldap_uri, ldap_basedn) return [] if filters: logger.warning('No on-fetch filters support for legacy GLUE1.3') for ldap_dn, ldap_data in glue1_data: if 'GlueServiceEndpoint' in ldap_data: # General service object parsing (both service and endpoint) s_id = ldap_data['GlueServiceName'][0].decode() se_type = ldap_data['GlueServiceType'][0].decode() if s_id not in services: services[s_id] = {'object': 'service', 'type': se_type, 'id': s_id, 'endpoints': []} services[s_id]['rr_owner'] = dns_rr_owner_name(services[s_id], ldap_dn) e_id = ldap_dn.decode() e_url = ldap_data['GlueServiceEndpoint'][0].decode() e_entry = {'id': e_id, 'u': e_url, 't': se_type} if ldap_data['GlueServiceStatus'][0].decode().upper() != 'OK': e_entry['s'] = '0' services[s_id]['endpoints'].append(e_entry) elif 'GlueCEInfoContactString' in ldap_data: # CE object parsing (both service and endpoint) s_id = ldap_data['GlueForeignKey'][0].decode() s_id = s_id[20:] # remove GlueClusterUniqueID= s_type = ldap_data['GlueCEImplementationName'][0].decode() se_type = s_type ie_type = s_type if s_type == 'CREAM': se_type = 'org.glite.ce.CREAM' ie_type = 'bdii_site' elif s_type == 'ARC-CE': se_type = 'org.nordugrid.gridftpjob' ie_type = 'org.nordugrid.ldapng' se_url = ldap_data['GlueCEInfoContactString'][0].decode() ie_url = ldap_data['GlueInformationServiceURL'][0].decode() if s_id not in services: services[s_id] = {'object': 'service', 'type': s_type, 'id': s_id, 'endpoints': []} services[s_id]['rr_owner'] = dns_rr_owner_name(services[s_id], ldap_dn) se_entry = {'id': se_url, 'u': se_url, 't': se_type} services[s_id]['endpoints'].append(se_entry) ie_entry = {'id': ie_url, 'u': ie_url, 't': ie_type} services[s_id]['endpoints'].append(ie_entry) elif 'GlueSE' in ldap_data: # SE object (service) s_id = ldap_data['GlueSEUniqueID'][0].decode() s_type = ldap_data['GlueSEImplementationName'][0].decode() services[s_id] = {'object': 'service', 'type': s_type, 'id': s_id, 'endpoints': []} elif 'GlueChunkKey' in ldap_data: # SE endpoint objects s_id = ldap_data['GlueChunkKey'][0].decode() s_id = s_id[15:] # remove GlueSEUniqueID= if 'GlueSEControlProtocolEndpoint' in ldap_data: e_url = ldap_data['GlueSEControlProtocolEndpoint'][0].decode() e_type = ldap_data['GlueSEControlProtocolType'][0].decode() else: e_url = ldap_data['GlueSEAccessProtocolEndpoint'][0].decode() e_type = ldap_data['GlueSEAccessProtocolType'][0].decode() if s_id not in services: continue e_entry = {'id': e_url, 'u': e_url, 't': e_type} services[s_id]['endpoints'].append(e_entry) else: logger.warning( 'Failed to find any known service data in the LDAP response for DN %s. ' 'It seams GLUE1.3 rendering is broken for %s/%s.', ldap_dn.decode(), ldap_uri, ldap_basedn) continue except ldap.LDAPError as err: logger.error('Failed to query LDAP GLUE1.3 for %s/%s. Error: %s', ldap_uri, ldap_basedn, LDAPStrError(err)) return services.values() def get_sitebdii_endpoints_ldapglue2(uri, ldap_timeout=_fetch_timeout, filters=None): """Get services and their endpoints by querying Site-BDII LDAP GLUE2""" ldap_uri_dict = _ldap_uri_dict(uri) if ldap_uri_dict is None: return [] # define filters fetch_vos = False if filters is None: filters = [] else: for f in filters: if f.type() == 'vo': fetch_vos = True break services = {} ldap_uri = ldap_uri_dict['uri'] glue1_fallback = False # construct GLUE2 base DN ldap_basedn = ldap_uri_dict['basedn'] if ldap_basedn.endswith('o=grid'): glue1_fallback = True # legacy glue1.3 basedn given: remove suffix, replace mds-vo-name ldap_basedn = ldap_basedn[:-6] + 'o=glue' ldap_basedn = 'GLUE2DomainID' + ldap_basedn[11:] try: ldap_conn = ldap.initialize(ldap_uri) ldap_conn.set_option(ldap.OPT_NETWORK_TIMEOUT, ldap_timeout) ldap_conn.set_option(ldap.OPT_TIMEOUT, ldap_timeout) ldap_conn.set_option(ldap.OPT_PROTOCOL_VERSION, ldap.VERSION3) # Query services info ldap_service_filter = '(objectClass=GLUE2Service)' ldap_service_attrs = ['GLUE2ServiceID', 'GLUE2ServiceType'] service_info = ldap_conn.search_s(ldap_basedn, ldap.SCOPE_SUBTREE, ldap_service_filter, ldap_service_attrs) if service_info is None: logger.error('Site-BDII LDAP GLUE2 query for %s/%s returns empty result set.', ldap_uri, ldap_basedn) return [] for ldap_dn, ldap_s in service_info: if 'GLUE2ServiceID' not in ldap_s: logger.warning( 'Failed to find service ID in the LDAP response for DN %s. ' 'It seams GLUE2 rendering is broken for %s/%s.', ldap_dn.decode(), ldap_uri, ldap_basedn) continue # get service data s_id = ldap_s['GLUE2ServiceID'][0].decode() s_type = ldap_s['GLUE2ServiceType'][0].decode() services[s_id] = {'object': 'service', 'type': s_type, 'id': s_id, 'endpoints': []} logger.debug('Found service %s (type %s)', s_id, s_type) # Query endpoints info ldap_endpoints_filter = '(objectClass=GLUE2Endpoint)' ldap_endpoints_arrts = ['GLUE2EndpointID', 'GLUE2EndpointURL', 'GLUE2EndpointInterfaceName', 'GLUE2EndpointHealthState', 'GLUE2EndpointServiceForeignKey'] endpoints_info = ldap_conn.search_s(ldap_basedn, ldap.SCOPE_SUBTREE, ldap_endpoints_filter, ldap_endpoints_arrts) for ldap_dn, ldap_ee in endpoints_info: if 'GLUE2EndpointURL' not in ldap_ee: logger.warning( 'Failed to find endpoint URL in LDAP response for DN %s. ' 'It seams GLUE2 rendering is broken for %s.', ldap_dn.decode(), ldap_uri, ldap_basedn) continue e_id = ldap_ee['GLUE2EndpointID'][0].decode() e_url = ldap_ee['GLUE2EndpointURL'][0].decode() e_type = ldap_ee['GLUE2EndpointInterfaceName'][0].decode() e_entry = {'id': e_id, 'u': e_url, 't': e_type} if ldap_ee['GLUE2EndpointHealthState'][0].decode().upper() != 'OK': e_entry['s'] = '0' e_service = ldap_ee['GLUE2EndpointServiceForeignKey'][0].decode() # fetch access policy if VO filtering is requested if fetch_vos: ldap_vo_filter = '(&(objectClass=GLUE2AccessPolicy)' \ '(GLUE2AccessPolicyEndpointForeignKey={0}))'.format(e_id) logger.debug('Querying AccessPolicy for endpoint %s (type %s)', e_url, e_type) vo_q_res = ldap_conn.search_s(ldap_basedn, ldap.SCOPE_SUBTREE, ldap_vo_filter, ['GLUE2PolicyRule']) if vo_q_res: for(_, policy_list) in vo_q_res: if 'GLUE2PolicyRule' in policy_list: e_vos = [v.decode().split(':', 1)[1] for v in policy_list['GLUE2PolicyRule'] if v.decode().lower().startswith('vo:')] if e_vos: e_entry['vos'] = e_vos # apply filters if any for fo in filters: if fo.on_fetch(): if fo.filter(e_entry): break else: # add endpoint if not filtered logger.debug('Found endpoint %s (type %s) for %s service', e_url, e_type, e_service) if e_service not in services: logger.error('Found endpoint %s (type %s) for service ID %s, ' 'but service itself is missing in the rendering.', e_url, e_type, e_service) continue services[e_service]['endpoints'].append(e_entry) except (ldap.SERVER_DOWN, ldap.CONNECT_ERROR, ldap.TIMEOUT) as err: logger.error('Failed to connect to LDAP server %s. Error: %s', ldap_uri, LDAPStrError(err)) return services.values() except ldap.LDAPError as err: logger.error('Failed to query LDAP GLUE2 for %s/%s. Error: %s', ldap_uri, ldap_basedn, LDAPStrError(err)) # fallback to LDAP GLUE1 for legacy Site-BDII without GLUE2 support if not services and glue1_fallback: logger.warning('There are no service endpoints fetched for LDAP GLUE2 URI %s. ' 'Falling back to LDAP GLUE1 site-bdii query.', ldap_uri) return get_sitebdii_endpoints_ldapglue1(uri, ldap_timeout, filters) return services.values() _fetch_data_map = { 'arc-ldapglue2': get_arc_ce_endpoints_ldapglue2, 'arc-ldapng': get_arc_ce_endpoints_ldapng, 'sitebdii': get_sitebdii_endpoints_ldapglue2, 'sitebdii-glue1': get_sitebdii_endpoints_ldapglue1 } def _worker_info_fetch(fetch_queue, lock): """Worker process to fetch enqueued data and add it to ARCHERY object""" # { method, uri, obj, obj_attr, filters } while True: pdata = fetch_queue.get() logger.debug('Processing %s data fetching', pdata['uri']) fetch_f = _fetch_data_map[pdata['method']] fetch_data = fetch_f(pdata['uri'], filters=pdata['filters']) with lock: # add fetched data to the list if fetch_data: # generate rr_owner for nested objects if pdata['obj_attr'] == 'contains': for fobj in fetch_data: if 'endpoints' in fobj and not fobj['endpoints']: logger.warning('Service %s (type %s) contains no valid endpoints. Skipping.', fobj['id'], fobj['type']) continue fobj['rr_owner'] = dns_rr_owner_name(fobj, pdata['obj']['rr_owner']) pdata['obj']['contains'].append(fobj) else: pdata['obj'][pdata['obj_attr']] += fetch_data # handle status (mark as inactive if no endpoints are fetched) if not pdata['obj'][pdata['obj_attr']]: pdata['obj']['status'] = 0 elif 'status' in pdata['obj']: del pdata['obj']['status'] fetch_queue.task_done() def enqueue_object_data_fetch(fetch_queue, archery_object, applied_filters=None): """Process topology tree and enqueue object data to be fetched""" if 'endpoints' in archery_object: if 'endpoints_fetch' in archery_object: for fetch_method in archery_object['endpoints_fetch'].keys(): fetch_uri = archery_object['endpoints_fetch'][fetch_method] logger.debug('Enqueueing endpoints data fetch from %s using %s method.', fetch_uri, fetch_method) fetch_queue.put({ 'method': fetch_method, 'uri': fetch_uri, 'obj': archery_object, 'obj_attr': 'endpoints', 'filters': applied_filters }) if 'contains' in archery_object: if 'contains_fetch' in archery_object: # per-source filters from config source_filters = [] if 'filters' in archery_object['contains_fetch']: source_filters.extend(get_configured_fillters(archery_object['contains_fetch']['filters'], True)) if applied_filters is not None: source_filters.extend(applied_filters) for fetch_method in archery_object['contains_fetch'].keys(): if fetch_method == 'filters': continue fetch_uri = archery_object['contains_fetch'][fetch_method] logger.debug('Enqueueing group data fetch from %s using %s method.', fetch_uri, fetch_method) fetch_queue.put({ 'method': fetch_method, 'uri': fetch_uri, 'obj': archery_object, 'obj_attr': 'contains', 'filters': source_filters }) # recursively process tree for child_object in archery_object['contains']: enqueue_object_data_fetch(fetch_queue, child_object, applied_filters) def get_configured_fillters(filters_list=None, force_on_fetch=False): """Return list of filtering objects from """ applied_filters = [] if filters_list is not None: for f in filters_list: if f == 'help': print('Supported filters are:') for _, fclass in _filters.items(): fci = fclass() fci.help() sys.exit(0) fdef = f.split(':', 1) ftype = fdef[0] fargs = fdef[1] if len(fdef) > 1 else '' if ftype not in list(_filters.keys()): logger.error('Ignoring bad filter definition: %s', f) continue fclass = _filters[ftype] fobj = fclass(fargs) if force_on_fetch: fobj.set_on_fetch() applied_filters.append(fobj) return applied_filters def fetch_infosys_data(archery_object, applied_filters=None, threads=10): """Fetch infosys data to be added into the ARCHERY""" # create queue and object lock fetch_queue = TimeoutQueue() object_lock = Lock() # recursively add fetch tasks to the fetch queue enqueue_object_data_fetch(fetch_queue, archery_object, applied_filters) # start worker threads for i in range(threads): logger.debug('Staring worker thread %s to fetch infosys data.', i) worker = Thread(target=_worker_info_fetch, args=(fetch_queue, object_lock,)) worker.setDaemon(True) worker.start() # wait for parallel fetch to complete logger.info('Waiting for endpoint data fetching completion...') # make it killable while not fetch_queue.empty(): time.sleep(0.3) # join with timeout (in case of some stuck ldap connections) try: fetch_queue.join_with_timeout(_fetch_timeout*3) except OSError as e: logger.error(str(e)) sys.exit(1) # # ARCHERY TOPOLOGY PROCESSING # # FLAT ARC-CE LIST def get_arcce_topology(ce_list, rr_owner=''): """Create ARCHERY data object that represent ARC CE flat CE list topology""" archery_object = { 'object': 'group', 'rr_owner': rr_owner, 'contains': [], } for ce in ce_list: service_object = { 'object': 'service', 'type': 'org.nordugrid.arex', 'id': ce, 'endpoints': [] } service_object['rr_owner'] = dns_rr_owner_name(service_object, rr_owner) service_object['endpoints_fetch'] = { 'arc-ldapglue2': ce } archery_object['contains'].append(service_object) return archery_object # JSON CONFIG def get_json_topology(json_file, rr_owner='', timeout=_fetch_timeout): """Create ARCHERY data object that represent arbitrary topology defined in JSON config file""" try: with open(json_file, 'r') as jconf_f: jconf = json.load(jconf_f) except IOError as err: logger.error('Failed to open JSON config file %s. Error: %s', json_file, str(err)) sys.exit(1) except ValueError as err: logger.error('Failed to parse JSON config file %s. Error: %s', json_file, str(err)) sys.exit(1) # process groups recursively and return archery-manage internal object archery_object = group_object_from_json(jconf, rr_owner) # support raw DNS records only on the top-level if 'raw-dns' in jconf: archery_object['raw-dns'] = jconf['raw-dns'] return archery_object # JSON: helpers for groups/services def service_object_from_json(sconf): """Define service object content based on JSON config""" sobj = { 'object': 'service', 'endpoints': [] } if 'external-archery-object' in sconf: sobj['pointer_rr_data'] = 'u={0} t=archery.service'.format(sconf['external-archery-object']) return sobj if 'id' in sconf: sobj['id'] = sconf['id'] else: logger.error('Service description in config is missing mandatory "id" attribute. Service will be skipped. ' 'Provided JSON service description: %s', json.dumps(sconf)) return None if 'type' in sconf: sobj['type'] = sconf['type'] else: logger.error('Service description in config is missing mandatory "type" attribute. Service will be skipped. ' 'Provided JSON service description: %s', json.dumps(sconf)) return None if 'endpoints' in sconf: for edict in sconf['endpoints']: erecord = {} for ekey in edict.keys(): if ekey == 'url': erecord['u'] = edict[ekey] elif ekey == 'type': erecord['t'] = edict[ekey] elif ekey == 'status': if not edict[ekey]: erecord['s'] = '0' else: erecord['u'] = ekey erecord['t'] = edict[ekey] sobj['endpoints'].append(erecord) return sobj def group_object_from_json(jconf, rr_owner): """Define service object content based on JSON config""" archery_object = { 'object': 'group', 'rr_owner': rr_owner, 'contains': [], } if 'external-archery-object' in jconf: archery_object['pointer_rr_data'] = 'u={0} t=archery.group'.format(jconf['external-archery-object']) return archery_object if 'arc-services' in jconf: archery_arcces_object = get_arcce_topology(jconf['arc-services'], rr_owner) archery_object['contains'].extend(archery_arcces_object['contains']) if 'services' in jconf: for sconf in jconf['services']: sobj = service_object_from_json(sconf) if sobj is not None: sobj['rr_owner'] = dns_rr_owner_name(sobj, rr_owner) archery_object['contains'].append(sobj) if 'id' in jconf: archery_object['id'] = jconf['id'] if 'dns-name' in jconf: archery_object['dns-name'] = jconf['dns-name'] if 'type' in jconf: archery_object['type'] = jconf['type'] if 'external-source' in jconf: archery_object['contains_fetch'] = jconf['external-source'] if 'software' in jconf: swobj = software_object_from_json(jconf['software']) if swobj is not None: archery_object['contains'].append(swobj) if 'groups' in jconf: g_idx = 0 for gconf in jconf['groups']: g_idx += 1 if 'dns-name' in gconf: g_rr_owner = gconf['dns-name'] elif 'id' in gconf: g_rr_owner = gconf['id'].replace(' ', '-') else: gconf['dns-name'] = dns_rr_owner_name(gconf, 'group{0}.{1}'.format(g_idx, rr_owner)) g_rr_owner = gconf['dns-name'] archery_object['contains'].append(group_object_from_json(gconf, g_rr_owner)) return archery_object # JSON: helpers for software objects def __get_rte_description(rte_path): """Extract embedded RTE description from RTE file""" with open(rte_path) as rte_f: max_lines = 10 description = None for line in rte_f: descr_re = re.match(r'^#+\s*description:\s*(.*)\s*$', line, flags=re.IGNORECASE) if descr_re: description = descr_re.group(1) max_lines -= 1 if not max_lines: break return description def __get_dir_rtes(rtedir): """Get all RTEs defined by classic directory structure""" rtes = {} for path, _, files in os.walk(rtedir): rtebase = path.lstrip(rtedir + '/') for f in files: rtename = rtebase + '/' + f if rtebase else f rtepath = path + '/' + f if os.path.islink(rtepath): rtepath = os.readlink(rtepath) rtes[rtename] = rtepath return rtes def software_object_from_json(jconf): """Get necessary data and define software object based on JSON config""" sconf = { 'object': 'software', 'rr_owner': '_software', 'endpoints': [], 'contains': [] } # set gpg options gpg_home = [] warn_gpg_home = True if 'gpg_home' in jconf: gpg_home = ['--homedir', jconf['gpg_home']] warn_gpg_home = False keyid = [] if 'gpg_keyid' in jconf: keyid.append(jconf['gpg_keyid']) # set directory to hold signed RTE files signed_dir = 'signed' if 'signed_rtes_dir' in jconf: signed_dir = jconf['signed_rtes_dir'] # public key data for archery.software object if 'pubkey_url' in jconf: sconf['endpoints'].append({ 'u': jconf['pubkey_url'], 't': 'gpg.pubkey' }) else: if 'pubkey' not in jconf: # if there is no defined public key, export from GPG automatically if warn_gpg_home: logger.warning('There is no GPG home defined in the configuration. Using default GPG path.') warn_gpg_home = False keyout = tempfile.mkstemp(suffix='.key', prefix='pubkey-')[1] os.unlink(keyout) gpgcmd = ['gpg'] + gpg_home + ['--output', keyout, '--export'] + keyid logger.info('Exporting public key from GPG database using: %s', ' '.join(gpgcmd)) gpgproc = subprocess.Popen(gpgcmd) gpgproc.wait() if gpgproc.returncode != 0 or not os.path.exists(keyout): logger.error('Failed to export public key from GPG database') sys.exit(1) with open(keyout, 'rb') as key_f: jconf['pubkey'] = base64.b64encode(key_f.read()).decode() os.unlink(keyout) # add child object with key in the DNS sconf['contains'].append({ 'reftype': 'gpg.pubkey.base64', 'rr_owner': '_pubkey', 'endpoints': [{ 'rr_data': jconf['pubkey'] }] }) # generate RTEs from directory (if defined) if 'rtes_dir' in jconf: if not os.path.exists(jconf['rtes_dir']): logger.error('Path to RTEs directory (%s) does not exists.', jconf['rtes_dir']) else: dirrtes = __get_dir_rtes(jconf['rtes_dir']) if dirrtes and 'rtes' not in jconf: jconf['rtes'] = [] for rte in dirrtes: logger.debug('Adding RTE %s to software registry', rte) jconf['rtes'].append({ 'name': rte, 'path': dirrtes[rte] }) # process RTE objects if 'rtes' not in jconf: logger.warning('No RTEs defined in the software object. Nothing to do.') return sconf for rte in jconf['rtes']: # rte object info if 'name' not in rte: logger.error('Malformed RTE definition. Name is missing in %s', json.dumps(rte)) continue rtename = rte['name'] rteobj = { 'object': 'rte', 'id': rtename, 'endpoints': [] } rteobj['rr_owner'] = dns_rr_owner_name(rteobj, sconf['rr_owner']) if 'description' in rte: rteobj['description'] = quote(rte['description']) # rte content endpoint if 'url' in rte: rteobj['endpoints'].append({ 'u': rte['url'], 't': 'gpg.signed' }) elif 'data' in rte: try: base64.b64decode(rte['data']) except TypeError: logger.error('Cannon parse RTE %s data as base64 encoded. Skipping.') continue if 'contains' not in rteobj: rteobj['contains'] = [] rteobj['contains'].append({ 'reftype': 'gpg.signed.base64', 'rr_owner': '_data', 'endpoints': [{ 'rr_data': rte['data'] }] }) elif 'path' in rte: rtepath = rte['path'] if not os.path.exists(rtepath): logger.error('Malformed RTE %s definition. RTE path %s does not exists.', rtename, rte['path']) continue if 'description' not in rteobj: logger.debug('Trying to fetch description from RTE file at %s', rtepath) filedescr = __get_rte_description(rtepath) if filedescr: rteobj['description'] = quote(filedescr) if not os.path.exists(signed_dir): try: os.mkdir(signed_dir, 0o755) except IOError as e: logger.error('Failed to create directory for signed RTEs in %s. Error: %s', signed_dir, str(e)) # signed RTE path srtename = rtename.replace('/', '-') + '.signed' srtepath = os.path.join(signed_dir, srtename) sign_needed = True if os.path.exists(srtepath): rte_mtime = os.path.getmtime(rtepath) srte_mtime = os.path.getmtime(srtepath) if rte_mtime > srte_mtime: logger.info('Signed RTE file for %s is already exist (%s). ' 'But the RTE file updated more recently. Going to recreate signed RTE.', rtename, srtename) os.unlink(srtepath) else: logger.info('Signed RTE file for %s is already exist (%s). Skipping signing.', rtename, srtename) sign_needed = False # sign rtes if sign_needed: if warn_gpg_home: logger.warning('There is no GPG home defined in the configuration. Using default GPG path.') warn_gpg_home = False gpgcmd = ['gpg'] + gpg_home + ['--output', srtepath, '--sign', rtepath] logger.info('Signing RunTimeEnvironment %s with GPG using %s', rtename, ' '.join(gpgcmd)) gpgproc = subprocess.Popen(gpgcmd) gpgproc.wait() if gpgproc.returncode != 0 or not os.path.exists(srtepath): logger.error('Failed to sign RunTimeEnvironment %s', rtename) sys.exit(1) # if URL is defined, just add endpoint if 'signed_rtes_url' in jconf: rteurl = jconf['signed_rtes_url'].rstrip('/') + '/' rteobj['endpoints'].append({ 'u': rteurl + srtename, 't': 'gpg.signed' }) else: # or embedd RTE into the DNS with open(srtepath, 'rb') as srte_f: if 'contains' not in rteobj: rteobj['contains'] = [] rteobj['contains'].append({ 'reftype': 'gpg.signed.base64', 'rr_owner': '_data', 'endpoints': [{ 'rr_data': base64.b64encode(srte_f.read()).decode() }] }) # add rte object to software object sconf['contains'].append(rteobj) # remind about RTEs upload if were configured if 'signed_rtes_url' in jconf: logger.info('NOTE! According to configuration signed RTEs should be uploaded to %s ' 'from "%s" directory to be accessible.', jconf['signed_rtes_url'], signed_dir) return sconf # CONFIG FROM GOCDB def get_gocdb_topology(rr_owner='', timeout=_fetch_timeout): """Create ARCHERY data object that represent GOCDB-defined EGI topology""" gocdb_host = 'goc.egi.eu' gocdb_path = '/gocdbpi/public/?method=get_site_list' # fetch EGI topology data topology = {} conn = HTTPSInsecureConnection(gocdb_host, timeout=timeout) try: # fetch the data conn.request('GET', gocdb_path) response = conn.getresponse() if response.status != 200: logger.error('Failed to get sites list from GOCDB PI at http://%s%s. HTTP reason: %s', gocdb_host, gocdb_path, response.reason) sys.exit(1) # parse the XML gocdb_xml = ElementTree.fromstring(response.read()) for site in gocdb_xml: ngi = site.attrib['ROC'] if str(site.attrib['GIIS_URL']).strip() == '': logger.warning('Site %s in %s NGI contains no Site-BDII information. Skipping.', site.attrib['NAME'], ngi) continue if ngi not in topology: topology[ngi] = {} topology[ngi][site.attrib['NAME']] = site.attrib['GIIS_URL'] except Exception as e: logger.error('Failed to query GOCDB PI at http://%s%s. Error: %s', gocdb_host, gocdb_path, e) # create archery object skeleton archery_object = { 'object': 'group', 'type': 'org.egi.infrastructure', 'id': 'EGI', 'rr_owner': rr_owner, 'contains': [], } for ngi in topology.keys(): ngi_object = { 'object': 'group', 'type': 'org.egi.ngi', 'id': ngi, 'rr_owner': ngi.replace(' ', '-'), 'contains': [], } for site in topology[ngi].keys(): site_object = { 'object': 'group', 'type': 'org.egi.site', 'id': site, 'rr_owner': site.replace(' ', '-'), 'contains': [], 'contains_fetch': { 'sitebdii': topology[ngi][site] } } ngi_object['contains'].append(site_object) archery_object['contains'].append(ngi_object) return archery_object # # ARCHERY DNS PROCESSING # def dns_rr_owner_name(archery_object, parent_owner): """Generate RR owner name based on the object content and parent owner name""" # TODO: consider to add another naming schemes # concatenate different object attribute values strid = parent_owner if 'object' in archery_object: strid += archery_object['object'] else: strid += 'group' if 'type' in archery_object: strid += archery_object['type'] if 'id' in archery_object: strid += archery_object['id'] # produce SHA1 hash (SHA1 selected for best speed) and shorten it return hashlib.sha1(strid.encode()).hexdigest()[:10] def parse_archery_txt(txtstr): """Get data dict from ARCHERY DNS TXT string representation""" rrdata = {} for kv in txtstr.split(' '): # in case of broken records if len(kv) < 3: logger.warning('Malformed archery TXT entry "%s" ("%s" too short for k=v)', txtstr, kv) continue # only one letter keys and 'id' is supported now if kv[1] == '=': rrdata[kv[0]] = kv[2:] elif kv.startswith('id='): rrdata['id'] = kv[3:] else: logger.warning('Malformed archery TXT entry "%s" (%s does not match k=value)', txtstr, kv) return rrdata def fetch_archery_dns_data(dns_name, nameserver=None, threads=1): """Get ARCHERY data object from DNS endpoint""" archery_object = { 'contains': [], 'endpoints': [], 'rr_owner': '' } req_queue = Queue() req_queue.put({ 'name': dns_name, 'obj': archery_object, 'parent_name': '', }) # start worker threads for i in range(threads): logger.debug('Staring worker thread %s to fetch DNS data.', i) worker = Thread(target=_worker_resolver, args=(req_queue, nameserver,)) worker.setDaemon(True) worker.start() # wait for parallel fetch to complete logger.info('Waiting for DNS queries completion...') req_queue.join() return archery_object def _worker_resolver(req_queue, nameserver=None): """Worker thread to fetch DNS data""" # thread DNS resolver resolver = dns.resolver.Resolver() if nameserver is not None: resolver.nameservers = [nameserver] # request while True: req = req_queue.get() __fetch_archery_dns_data(req_queue, req['obj'], req['name'], resolver, req['parent_name']) req_queue.task_done() def __fetch_archery_dns_data(req_queue, archery_object, dns_name, resolver, parent_name=''): """Process ARCHERY data from DNS RRSet""" # construct archery exact domain name to query (ensure the dot is at the end) if dns_name[0:6] == 'dns://': dns_name = dns_name[6:].rstrip('.') + '.' else: dns_name = dns_name.rstrip('.') + '.' pdns_name = dns_name qdns_name = dns_name if not parent_name: # default entry point qdns_name = '_archery.' + dns_name # query TXT RRSet logger.debug('Querying ARCHERY data from: %s', dns_name) try: archery_rrs = resolver.query(qdns_name, 'TXT') # get owner name (without full DNS suffix including dot) rrset_name = pdns_name if parent_name and rrset_name.endswith(parent_name): rrset_name = rrset_name[:-(len(parent_name)+1)] archery_object['rr_owner'] = rrset_name for rr in archery_rrs: # fetch all records txt = '' for rri in rr.strings: txt += rri.decode() # special cases for '_pubkey' and '_data' that contains raw data if rrset_name in ['_pubkey', '_data']: archery_object['endpoints'].append({'rr_data': txt}) continue # parse object data rrdata = parse_archery_txt(txt) # object description resource record found if 'o' in rrdata: archery_object['rr_data'] = txt archery_object['object'] = rrdata['o'] # type and id for the object if available if 't' in rrdata: archery_object['type'] = rrdata['t'] if 'id' in rrdata: archery_object['id'] = rrdata['id'] # description for archery.rte object if 'd' in rrdata: archery_object['description'] = rrdata['d'] # other records that contains endpoint/grouping data elif 'u' in rrdata: if 't' in rrdata: if rrdata['t'] in ['archery.group', 'archery.service', 'org.nordugrid.archery', 'archery.software', 'archery.rte', 'gpg.pubkey.base64', 'gpg.signed.base64']: # fetch the data from DNS child_object = { 'contains': [], 'endpoints': [], 'pointer_rr_data': txt, 'rr_owner': '' } # add reftype for raw data referenced objects if rrdata['t'] in ['gpg.pubkey.base64', 'gpg.signed.base64']: child_object['reftype'] = rrdata['t'] # add status for child object if defined if 's' in rrdata and rrdata['s'] != '1': child_object['status'] = 0 archery_object['contains'].append(child_object) # enqueue request to fetch child data (child object is already created) req_queue.put({ 'name': rrdata['u'], 'obj': child_object, 'parent_name': pdns_name, }) else: rrdata['rr_data'] = txt archery_object['endpoints'].append(rrdata) else: logger.error('ARCHERY data in %s contains broken endpoint record without type: %s', dns_name, txt) continue # check for objects with no endpoints if not archery_object['contains'] and not archery_object['endpoints']: logger.warning('ARCHERY service object defined by %s has no endpoints.', dns_name) except DNSException as err: logger.warning('Failed to query ARCHERY data from %s (Error: %s)', dns_name, err) # if query failed (leftover objects in DNS) - still provide RR owner for the pointer record rrset_name = dns_name if rrset_name.endswith(parent_name): rrset_name = rrset_name[:-len(parent_name)] archery_object['rr_owner'] = rrset_name def _raw_dns_fetch(dnsdata, domain, nameserver=None): resolver = dns.resolver.Resolver() if nameserver is not None: resolver.nameservers = [nameserver] for rdns in dnsdata: # format configured data rdns['config_data'] = set() if rdns['rdata']: if isinstance(rdns['rdata'], list): for rdata in rdns['rdata']: rdns['config_data'].add(rdata) else: rdns['config_data'].add(rdns['rdata']) # fetch and construct server data rdns['server_data'] = set() resolve_name = domain if rdns['name']: resolve_name = rdns['name'] + '.' + resolve_name try: handle_no_answer = True if rdns['type'] == 'NS' else False logger.debug('Querying raw DNS data (type %s) from %s', rdns['type'], resolve_name) rrs = resolver.query(resolve_name, rdns['type'], raise_on_no_answer=(not handle_no_answer)) # handle NS records if not rrs.response.answer: for rr in rrs.response.authority: for lrr in rr.to_text().split('\n'): rdns['server_data'].add(lrr.split(' ')[-1]) except dns.resolver.NXDOMAIN as e: logger.debug('NXDOMAIN received for %s DNS query for domain %s.', rdns['type'], resolve_name) except dns.resolver.NoAnswer as e: logger.warning('No answer for %s DNS query for domain %s. Error: %s', rdns['type'], resolve_name, str(e)) except dns.resolver.NoNameservers as e: logger.warning('No namservers received for %s DNS query for domain %s. Error: %s', rdns['type'], resolve_name, str(e)) else: for rr in rrs: rdns['server_data'].add(rr.to_text().strip('"')) # # HANDLE DDNS UPDATE # _tsig_algorithms = { 'HMAC-MD5': dns.tsig.HMAC_MD5, 'HMAC-SHA1': dns.tsig.HMAC_SHA1, 'HMAC-SHA224': dns.tsig.HMAC_SHA224, 'HMAC-SHA256': dns.tsig.HMAC_SHA256, 'HMAC-SHA384': dns.tsig.HMAC_SHA384, 'HMAC-SHA512': dns.tsig.HMAC_SHA512, } def archery_ddns_update(domain, nameserver, keyring_dict, new_archery_object, ttl=3600, fetch_threads=1, keyalgorithm=dns.tsig.default_algorithm): """Incrementally updates ARCHERY data records in DNS""" keyring = dns.tsigkeyring.from_text(keyring_dict) main_rr_owner = domain.rstrip('.') + '.' # new ARCHERY TXT data according to provided data object new_dns_rrset = archery_txt_rrset(new_archery_object) # old endpoints from querying the ARCHERY DNS zone dns_archery_object = fetch_archery_dns_data(main_rr_owner, nameserver=nameserver, threads=fetch_threads) old_dns_rrset = archery_txt_rrset(dns_archery_object) # print(json.dumps(list(new_dns_rrset), indent=2)) # print(json.dumps(list(old_dns_rrset), indent=2)) remove_rrs = old_dns_rrset - new_dns_rrset add_rrs = new_dns_rrset - old_dns_rrset logger.info('DNS incremental update includes %s records to add and %s records to remove', len(add_rrs), len(remove_rrs)) # print(json.dumps(list(add_rrs), indent=2)) # print(json.dumps(list(remove_rrs), indent=2)) try: update = dns.update.Update(domain, keyring=keyring, keyalgorithm=keyalgorithm) for r in remove_rrs: logger.debug('Going to REMOVE record by means of DDNS update: %s', r) rr = r.split(' ', 1) txts = txt_255(rr[1].replace(' ', r'\ '), getlist=True) update.delete(rr[0], 'txt', ' '.join(txts)) if len(update.to_wire()) > 65000: logger.info('Size limit reached. Sending partial DDNS update.') dns.query.tcp(update, nameserver) update = dns.update.Update(domain, keyring=keyring, keyalgorithm=keyalgorithm) for a in add_rrs: logger.debug('Going to ADD record by means of DDNS update: %s', a) ar = a.split(' ', 1) txts = txt_255(ar[1].replace(' ', r'\ '), getlist=True) update.add(ar[0], ttl, 'txt', ' '.join(txts)) if len(update.to_wire()) > 65000: logger.info('Size limit reached. Sending partial DDNS update.') dns.query.tcp(update, nameserver) update = dns.update.Update(domain, keyring=keyring, keyalgorithm=keyalgorithm) # if exception is not raised we have succeeded with update dns.query.tcp(update, nameserver) logger.info('ARCHERY information has been updated for zone %s', domain) except DNSException as e: logger.error('Failed in ARCHERY data DDNS update. Error: %s', e) # check raw DNS records are in sync in the zone if 'raw-dns' in archery_object: logger.info('Raw DNS data is defined in the config. Goind to check defined records consistency.') _raw_dns_fetch(archery_object['raw-dns'], domain, nameserver) try: rawupdate = dns.update.Update(domain, keyring=keyring, keyalgorithm=keyalgorithm) needs_rawupdate = False for rr in archery_object['raw-dns']: for cr in list(rr['server_data'] - rr['config_data']): logger.debug('Going to REMOVE raw DNS record by means of DDNS update: %s %s %s', rr['name'], rr['type'], cr) needs_rawupdate = True rawupdate.delete(rr['name'], dns.rdatatype.from_text(rr['type']), cr) for cr in list(rr['config_data'] - rr['server_data']): logger.debug('Going to ADD raw DNS record by means of DDNS update: %s %s %s', rr['name'], rr['type'], cr) needs_rawupdate = True rawupdate.add(rr['name'], ttl, dns.rdatatype.from_text(rr['type']), cr) if needs_rawupdate: dns.query.tcp(rawupdate, nameserver) logger.info('Defined raw DNS data has been updated for zone %s', domain) else: logger.info('Defined raw DNS data is in sync for zone %s', domain) except DNSException as e: logger.error('Failed in raw DNS data DDNS update. Error: %s', e) # # MAIN EXECUTION CYCLE # def get_parser(): """Command line arguments parser""" parser = argparse.ArgumentParser(description='The archery-manage tool used to simplify common operations with ARCHERY, including registry initial bootstrap, integration with topology databases and keeping dynamic information up to date.') parser.add_argument('-d', '--debug', action='store', default='INFO', choices=['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG']) parser.add_argument('-s', '--source', action='store', required=True, help='Services topology source (use \'help\' value to print available sources)') parser.add_argument('-f', '--filter', action='append', help='Add endpoints filter (use \'help\' value to print available filters)') parser.add_argument('-o', '--output', choices=list(_output_formatters.keys()), help='Write requested data to stdout') parser.add_argument('--json', action='store_true', help='Change output format from plaintext to JSON') parser.add_argument('--output-all', action='store_true', help='Output all services/endpoints including inactive (filters are still applied)') parser.add_argument('-u', '--ddns-update', action='store_true', help='Invoke DNS zone incremental DDNS update secured by TSIG key') parser.add_argument('--domain', help='Domain name of the ARCHERY endpoint to use (required for DDNS update)') parser.add_argument('--ddns-master-ip', help='Master DNS IP address (required for DDNS update)') parser.add_argument('--ddns-tsig-keyfile', help='TSIG keyfile (required for DDNS update)') parser.add_argument('--ddns-tsig-algorithm', help='Cryptographic algorithm for TSIG', choices=list(_tsig_algorithms.keys()), default='HMAC-MD5') parser.add_argument('--ttl', action='store', default='3600', type=int, help='DNS resource records TTL value to use (default is %(default)s)') parser.add_argument('--threads', action='store', default='8', type=int, help='Number of treads to fetch information in parallel (default is %(default)s)') parser.add_argument('--timeout', action='store', default='10', type=int, help='Per-source information fetching timeout (default is %(default)s seconds)') return parser if __name__ == '__main__': # Process command line arguments parser = get_parser() cmd_args = parser.parse_args() # Set requested logging level logger.setLevel(getattr(logging, cmd_args.debug, 20)) # Set per-source fetch timeout value _fetch_timeout = cmd_args.timeout # Domain name to work with domain = cmd_args.domain # Check DDNS update required options before doing anything if cmd_args.ddns_update: # check for domain if domain is None: logger.error('Domain name (--domain) is required to use DDNS update') sys.exit(1) # check for master nameserver IP if cmd_args.ddns_master_ip is None: logger.error('DNS master IP (--ddns-master-ip) is required to use DDNS update') sys.exit(1) nameserver = cmd_args.ddns_master_ip # check for keyring if cmd_args.ddns_tsig_keyfile is None: logger.error('TSIG keyfile (--ddns-tsig-keyfile) is required to use DDNS update') sys.exit(1) else: try: logger.debug('Reading TSIG key from %s', cmd_args.ddns_tsig_keyfile) with open(cmd_args.ddns_tsig_keyfile, 'r') as tsig_f: keyring_str = tsig_f.readline() keyring_s = keyring_str.split(':') if len(keyring_s) != 2: logger.error('Failed to parse TSIG keyfile %s. Expected format is keyname:secret', cmd_args.ddns_tsig_keyfile) sys.exit(1) logger.debug('TSIG key %s has been read successfully', keyring_s[0]) keyring_dict = {keyring_s[0]: keyring_s[1]} except EnvironmentError as err: logger.error('Failed to read TSIG keyfile %s. Error: %s', cmd_args.ddns_tsig_keyfile, err) sys.exit(1) # Parse filters for fetching endpoints applied_filters = get_configured_fillters(cmd_args.filter) # Base domain name to work with rr_owner = '' if domain is not None: rr_owner += domain.rstrip('.') + '.' # Define services topology logger.info('Constructing ARCHERY objects topology according to configuration.') source = cmd_args.source if source.startswith('json:'): logger.info('Obtaining services topology from JSON configuration file: %s', source[5:]) archery_object = get_json_topology(source[5:], rr_owner) elif source.startswith('file:'): logger.error('The \'file:\' source type is deprecated. Use \'arcce-list:\' for the same behavior.') sys.exit(1) elif source.startswith('arcce-list:'): logger.info('Obtaining ARC CEs list from file: %s', source[11:]) ce_list = get_file_celist(source[11:]) archery_object = get_arcce_topology(ce_list, rr_owner) elif source.startswith('egiis:'): logger.info('Obtaining ARC CEs list from EGIIS: %s', source[6:]) ce_list = get_egiis_celist(source[6:]) logger.debug('Fetched EGIIS CEs list to work with: %s', ', '.join(ce_list)) archery_object = get_arcce_topology(ce_list, rr_owner) elif source.startswith('archery:'): logger.info('Obtaining services topology from ARCHERY DNS endpoint: %s', source[8:]) archery_object = fetch_archery_dns_data(source[8:], threads=cmd_args.threads) elif source.startswith('gocdb'): logger.info('Obtaining services topology from EGI GOCDB.') archery_object = get_gocdb_topology(rr_owner) elif source == 'help': sources_types = { 'json': 'Topology defined in JSON configuration file', 'arcce-list': 'List of ARC CE hostnames stored in file', 'archery': 'ARCHERY endpoint', 'egiis': 'Legacy EGIIS LDAP URI', 'gocdb': 'EGI GOCDB', } print('Supported sources types:') for st, sd in sources_types.items(): print(' {0:>12}: {1}'.format(st, sd)) sys.exit(0) else: logger.error('Unsupported source: %s', source) sys.exit(1) # Fetch topology data from defined infosys services logger.info('Fetching endpoints data from information system.') fetch_infosys_data(archery_object, applied_filters, threads=cmd_args.threads) # Post-fetch endpoint filtering do_filtering = False for f in applied_filters: # at least on not on-fetch filter should be defined if not f.on_fetch(): do_filtering = True break if do_filtering: logger.info('Starting endpoint filtering loop') filter_endpoints(archery_object, applied_filters) # Invoke DDNS update if requested if cmd_args.ddns_update: logger.info('Sending update to DNS master %s via DDNS protocol (using TSIG key %s)', nameserver, list(keyring_dict.keys())[0]) archery_ddns_update(domain, nameserver, keyring_dict, archery_object, ttl=cmd_args.ttl, fetch_threads=cmd_args.threads, keyalgorithm=_tsig_algorithms[cmd_args.ddns_tsig_algorithm]) # Output information if requested if cmd_args.output: formatter_f = _output_formatters[cmd_args.output] formatter_f(archery_object, cmd_args) nordugrid-arc-6.14.0/src/utils/PaxHeaders.30264/hed0000644000000000000000000000013214152153475017756 xustar000000000000000030 mtime=1638455101.151588761 30 atime=1638455103.999631554 30 ctime=1638455101.151588761 nordugrid-arc-6.14.0/src/utils/hed/0000755000175000002070000000000014152153475020020 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/utils/hed/PaxHeaders.30264/wsdl2hed.cpp0000644000000000000000000000013214152153376022253 xustar000000000000000030 mtime=1638455038.450646651 30 atime=1638455038.517647657 30 ctime=1638455101.147588701 nordugrid-arc-6.14.0/src/utils/hed/wsdl2hed.cpp0000644000175000002070000002606114152153376022245 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include #include "schemaconv.h" Arc::NS ns; struct to_upper { int operator() (int ch) { return std::toupper(ch); } }; struct to_lower { int operator() (int ch) { return std::tolower(ch); } }; static void std_h_header(std::string &name, std::ofstream &h) { std::string uname = name; std::transform(name.begin(), name.end(), uname.begin(), to_upper()); h << "// Generated by wsdl2hed " << std::endl; h << "#ifndef __ARC_" << uname << "_H__" << std::endl; h << "#define __ARC_" << uname << "_H__" << std::endl; h << std::endl; h << "#include " << std::endl; h << "#include " << std::endl; h << "#include " << std::endl; h << std::endl; h << "namespace " << name << " {" << std::endl; h << std::endl; h << "class " << name << "Service: public Arc::Service" << std::endl; h << "{" << std::endl; h << std::endl; } static void h_public_part(std::string &name, std::ofstream &h) { h << " public:" << std::endl; h << " " << name << "Service(Arc::Config *cfg);" << std::endl; h << " virtual ~" << name << "Service(void);" << std::endl; h << " virtual Arc::MCC_Status process(Arc::Message &inmsg, Arc::Message &outmsg);" << std::endl; } static void h_private_part(std::string &/*name*/, std::ofstream &h, Arc::XMLNode &xml) { h << " private:" << std::endl; h << " Arc::NS ns;" << std::endl; h << " Arc::Logger logger;" << std::endl; h << " Arc::DelegationContainerSOAP delegation;" << std::endl; h << " Arc::InformationContainer infodoc;" << std::endl; h << " Arc::MCC_Status make_soap_fault(Arc::Message &outmsg);" << std::endl; h << " // Operations from WSDL" << std::endl; Arc::XMLNode op; for (int i = 0; (op = xml["wsdl:portType"]["wsdl:operation"][i]) == true; i++) { std::string n = (std::string) op.Attribute("name"); if (!n.empty()) { h << " Arc::MCC_Status " << n << "(Arc::XMLNode &in, Arc::XMLNode &out);" << std::endl; } } } static void std_cpp_header(std::string &name, std::ofstream &cpp) { std::string lname = name; std::transform(name.begin(), name.end(), lname.begin(), to_lower()); cpp << "// Generated by wsdl2hed" << std::endl; cpp << "#ifdef HAVE_CONFIG_H" << std::endl; cpp << "#include " << std::endl; cpp << "#endif" << std::endl; cpp << std::endl; cpp << "#include " << std::endl; cpp << "#include " << std::endl; cpp << "#include " << std::endl; cpp << "#include " << std::endl; cpp << std::endl; cpp << "#include \"" << lname << ".h\"" << std::endl; cpp << std::endl; cpp << "namespace " << name << " {" << std::endl; cpp << std::endl; cpp << "static Arc::Service *get_service(Arc::Config *cfg, Arc::ChainContext *) { " << std::endl; cpp << " return new " << name << "Service(cfg);" << std::endl; cpp << "}" << std::endl; } static void cpp_public_part(std::string &name, std::ofstream &cpp, Arc::XMLNode &xml) { cpp << std::endl; cpp << name << "Service::" << name << "Service(Arc::Config *cfg):Service(cfg),logger(Arc::Logger::rootLogger, \"" << name << "\")" << std::endl; cpp << "{" << std::endl; cpp << " // Define supported namespaces" << std::endl; Arc::NS n = xml.Namespaces(); Arc::NS::iterator it; for (it = n.begin(); it != n.end(); it++) { // Ignore some default namespace if (it->first != "soap" && it->first != "SOAP-ENV" && it->first != "SOAP-ENC" && it->first != "wsdl" && it->first != "xsd") { cpp << " ns[\"" << it->first << "\"]=\"" << it->second << "\";" << std::endl; } } cpp << "}" << std::endl; cpp << std::endl; cpp << name << "Service::~" << name << "Service(void)" << std::endl; cpp << "{" << std::endl; cpp << "}" << std::endl; cpp << std::endl; cpp << "Arc::MCC_Status " << name << "Service::process(Arc::Message &inmsg, Arc::Message &outmsg)" << std::endl; cpp << "{\n\ // Both input and output are supposed to be SOAP\n\ // Extracting payload\n\ Arc::PayloadSOAP* inpayload = NULL;\n\ try {\n\ inpayload = dynamic_cast(inmsg.Payload());\n\ } catch(std::exception& e) { };\n\ if(!inpayload) {\n\ logger.msg(Arc::ERROR, \"input is not SOAP\");\n\ return make_soap_fault(outmsg);\n\ };\n\ // Analyzing request\n\ Arc::XMLNode op = inpayload->Child(0);\n\ if(!op) {\n\ logger.msg(Arc::ERROR, \"input does not define operation\");\n\ return make_soap_fault(outmsg);\n\ }; \n\ logger.msg(Arc::VERBOSE,\"process: operation: %s\", op.Name());\n\ Arc::PayloadSOAP* outpayload = new Arc::PayloadSOAP(ns);\n\ Arc::PayloadSOAP& res = *outpayload;\n\ Arc::MCC_Status ret = Arc::STATUS_OK;" << std::endl; cpp << " "; // just becuase good indent of following if section Arc::XMLNode op; for (int i = 0; (op = xml["wsdl:portType"]["wsdl:operation"][i]) == true; i++) { std::string n = (std::string) op.Attribute("name"); std::string msg = (std::string) op["output"].Attribute("message"); cpp << "if(MatchXMLName(op, \"" << n << "\")) {" << std::endl; cpp << " Arc::XMLNode r = res.NewChild(\"" << msg << "\");" << std::endl; cpp << " ret = " << n << "(op, r);" << std::endl; cpp << " } else "; } cpp << "if(MatchXMLName(op, \"DelegateCredentialsInit\")) {\n\ if(!delegation.DelegateCredentialsInit(*inpayload,*outpayload)) {\n\ delete inpayload;\n\ return make_soap_fault(outmsg);\n\ }\n\ // WS-Property\n\ } else if(MatchXMLNamespace(op,\"http://docs.oasis-open.org/wsrf/rp-2\")) {\n\ Arc::SOAPEnvelope* out_ = infodoc.Process(*inpayload);\n\ if(out_) {\n\ *outpayload=*out_;\n\ delete out_;\n\ } else {\n\ delete inpayload; delete outpayload;\n\ return make_soap_fault(outmsg);\n\ };\n\ } else {\n\ logger.msg(Arc::ERROR,\"SOAP operation is not supported: %s\", op.Name());\n\ return make_soap_fault(outmsg);\n\ };\n\ // Set output\n\ outmsg.Payload(outpayload);\n\ return Arc::MCC_Status(ret);\n\ }" << std::endl; cpp << std::endl; } static void cpp_private_part(std::string &name, std::ostream &cpp, Arc::XMLNode &xml) { cpp << "Arc::MCC_Status "<< name << "Service::make_soap_fault(Arc::Message& outmsg)\n\ {\n\ Arc::PayloadSOAP* outpayload = new Arc::PayloadSOAP(ns,true);\n\ Arc::SOAPFault* fault = outpayload?outpayload->Fault():NULL;\n\ if(fault) {\n\ fault->Code(Arc::SOAPFault::Sender);\n\ fault->Reason(\"Failed processing request\");\n\ };\n\ outmsg.Payload(outpayload);\n\ return Arc::MCC_Status(Arc::STATUS_OK);\n\ }" << std::endl << std::endl; Arc::XMLNode op; for (int i = 0; (op = xml["wsdl:portType"]["wsdl:operation"][i]) == true; i++) { std::string n = (std::string) op.Attribute("name"); if (!n.empty()) { cpp << "Arc::MCC_Status " << name << "Service::" << n << "(Arc::XMLNode &in, Arc::XMLNode &out)" << std::endl; cpp << "{" << std::endl; cpp << " return Arc::MCC_Status();" << std::endl; cpp << "}" << std::endl; cpp << std::endl; } } } static void std_h_footer(std::string &name, std::ofstream &h) { std::string uname = name; std::transform(name.begin(), name.end(), uname.begin(), to_upper()); h << std::endl; h << "}; // class " << name << std::endl; h << "}; // namespace " << name << std::endl; h << "#endif // __ARC_" << uname << "_H__" << std::endl; } static void std_cpp_footer(std::string &name, std::ofstream &cpp) { std::string lname = name; std::transform(name.begin(), name.end(), lname.begin(), to_lower()); cpp << "}; // namespace " << name << std::endl; cpp << std::endl; cpp << "service_descriptors ARC_SERVICE_LOADER = {" << std::endl; cpp << " { \"" << lname << "\", 0, &" << name << "::get_service }," << std::endl; cpp << " { NULL, 0, NULL }" << std::endl; cpp << "};" << std::endl; } static void gen_makefile_am(std::string &name) { std::string lname = name; std::transform(name.begin(), name.end(), lname.begin(), to_lower()); std::ofstream m("Makefile.am"); m << "pkglib_LTLIBRARIES = lib" << lname << ".la" << std::endl; m << "lib" << lname << "_la_SOURCES = " << lname << ".cpp " << lname << ".h" << std::endl; m << "lib" << lname << "_la_CXXFLAGS = $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) -I$(top_srcdir)/include" << std::endl; m << "lib" << lname << "_la_LIBADD = $(top_srcdir)/src/hed/libs/loader/libarcloader.la $(top_srcdir)/src/hed/libs/message/libarcmessage.la $(top_srcdir)/src/hed/libs/security/libarcsecurity.la $(top_srcdir)/src/hed/libs/ws/libarcws.la $(top_srcdir)/src/hed/libs/common/libarccommon.la" << std::endl; m << "lib" << lname << "_la_LDFLAGS = -no-undefined -avoid-version -module" << std::endl; m.close(); } int main(int argc, char **argv) { bool parse_schema = false; if ((argc > 1) && (strcmp(argv[1],"-s") == 0)) { parse_schema = true; --argc; ++argv; } if (argc < 3) { std::cerr << "Invalid arguments" << std::endl; return -1; } ns["wsdl"] = "http://schemas.xmlsoap.org/wsdl/"; std::string xml_str = Glib::file_get_contents(argv[1]); Arc::XMLNode xml(xml_str); if (xml == false) { std::cerr << "Failed parse XML! " << std::endl; return -1; } /* { std::string str; xml.GetXML(str); std::cout << str << std::endl; }; */ // xml.Namespaces(ns); std::string name = argv[2]; std::string lname = name; std::transform(name.begin(), name.end(), lname.begin(), to_lower()); std::string header_path = lname; header_path += ".h"; std::string cpp_path = lname; cpp_path += ".cpp"; std::ofstream h(header_path.c_str()); if (!h) { std::cerr << "Cannot create: " << header_path << std::endl; exit(1); } std::ofstream cpp(cpp_path.c_str()); if (!cpp) { unlink (header_path.c_str()); std::cerr << "Cannot create: " << cpp_path << std::endl; } if(parse_schema) { if(!schemaconv(xml,h,cpp,lname)) return 1; return 0; } std_h_header(name, h); h_public_part(name, h); h_private_part(name, h, xml); std_h_footer(name, h); std_cpp_header(name, cpp); cpp_public_part(name, cpp, xml); cpp_private_part(name, cpp, xml); std_cpp_footer(name, cpp); h.close(); cpp.close(); gen_makefile_am(name); return 0; } nordugrid-arc-6.14.0/src/utils/hed/PaxHeaders.30264/complextype.cpp0000644000000000000000000000013214152153376023110 xustar000000000000000030 mtime=1638455038.450646651 30 atime=1638455038.517647657 30 ctime=1638455101.151588761 nordugrid-arc-6.14.0/src/utils/hed/complextype.cpp0000644000175000002070000002200114152153376023070 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include "schemaconv.h" using namespace Arc; // -------------- Complex type -------------- // 1 - class/parent name (C++,XML) static const char* complex_type_header_pattern_h = "\ class %1$s: public Arc::XMLNode {\n\ public:\n\ "; // 1 - class/parent name (C++,XML) static const char* complex_type_footer_pattern_h = "\ static %1$s New(Arc::XMLNode parent);\n\ %1$s(Arc::XMLNode node);\n\ };\n\ \n\ "; // 1 - class/parent name (C++,XML) // 2 - class namespace (XML) static const char* complex_type_constructor_header_pattern_cpp = "\ %3$s%1$s::%1$s(Arc::XMLNode node) {\n\ Arc::NS ns;\n\ ns[\"ns\"]=\"%2$s\";\n\ Namespaces(ns);\n\ "; // 1 - class/parent name (C++,XML) // 2 - class namespace (XML) static const char* complex_type_constructor_footer_pattern_cpp = "\ }\n\ \n\ %3$s%1$s %3$s%1$s::New(Arc::XMLNode parent) {\n\ Arc::NS ns;\n\ ns[\"ns\"]=\"%2$s\";\n\ %1$s el(parent.NewChild(\"ns:%1$s\",ns));\n\ return el;\n\ }\n\ "; // 1 - element name (C++,XML) // 2 - element type (C++) static const char* mandatory_element_pattern_h = "\ %2$s %1$s(void);\n\ "; // 1 - element name (C++,XML) // 2 - element type (C++) static const char* optional_element_pattern_h = "\ %2$s %1$s(bool create = false);\n\ "; // 1 - element name (C++,XML) // 2 - element type (C++) static const char* array_element_pattern_h = "\ %2$s %1$s(int index,bool create = false);\n\ "; // 1 - element name (C++,XML) static const char* mandatory_element_constructor_pattern_cpp = "\ (void)%1$s();\n\ "; // 1 - element name (C++,XML) // 2 - element type (C++) // 3 - class/parent name (C++,XML) // 4 - element namespace prefix (XML) // 5 - element type (XML) static const char* mandatory_element_method_pattern_cpp = "\ %2$s %3$s::%1$s(void) {\n\ Arc::XMLNode node = operator[](\"%4$s:%5$s\");\n\ if(!node) node = NewChild(\"%4$s:%5$s\");\n\ return node;\n\ }\n\ "; // 1 - element name (C++,XML) static const char* optional_element_constructor_pattern_cpp = "\ "; // 1 - element name (C++,XML) // 2 - element type (C++) // 3 - class/parent name (C++,XML) // 4 - element namespace prefix (XML) // 5 - element type (XML) static const char* optional_element_method_pattern_cpp = "\ %2$s %3$s::%1$s(bool create) {\n\ Arc::XMLNode node = operator[](\"%4$s:%5$s\");\n\ if(create && !node) node = NewChild(\"%4$s:%5$s\");\n\ return node;\n\ }\n\ "; // 1 - element name (C++,XML) // 2 - minimal number of elements static const char* array_element_constructor_pattern_cpp = "\ if(%2$s > 0) (void)%1$s(%2$s - 1);\n\ "; // 1 - element name (C++,XML) // 2 - element type (C++) // 3 - class/parent name (C++,XML) // 4 - element namespace prefix (XML) // 5 - element type (XML) // 6 - minimal number of elements static const char* array_element_method_pattern_cpp = "\ %2$s %3$s::%1$s(int index,bool create) {\n\ if(index < %6$s) create = true;\n\ Arc::XMLNode node = operator[](\"%4$s:%5$s\")[index];\n\ if(create && !node) {\n\ for(int n = 0;n((std::string)n); }; n=(std::string)(element.Attribute("maxOccurs")); if(!n.empty()) { if(n == "unbounded") { maxoccurs=-1; } else { maxoccurs=stringto((std::string)n); }; }; if(maxoccurs != -1) { if(maxoccurs < minoccurs) { std::cout<<" maxOccurs is smaller than minOccurs"<&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ bin_PROGRAMS = wsdl2hed$(EXEEXT) arcplugin$(EXEEXT) subdir = src/utils/hed DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(srcdir)/wsdl2hed.1.in $(srcdir)/arcplugin.1.in \ $(top_srcdir)/depcomp README ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = wsdl2hed.1 arcplugin.1 CONFIG_CLEAN_VPATH_FILES = am__installdirs = "$(DESTDIR)$(bindir)" "$(DESTDIR)$(man1dir)" PROGRAMS = $(bin_PROGRAMS) am_arcplugin_OBJECTS = arcplugin-arcplugin.$(OBJEXT) arcplugin_OBJECTS = $(am_arcplugin_OBJECTS) am__DEPENDENCIES_1 = arcplugin_DEPENDENCIES = \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = arcplugin_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(arcplugin_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ am_wsdl2hed_OBJECTS = wsdl2hed-wsdl2hed.$(OBJEXT) \ wsdl2hed-schemaconv.$(OBJEXT) wsdl2hed-common.$(OBJEXT) \ wsdl2hed-simpletype.$(OBJEXT) wsdl2hed-complextype.$(OBJEXT) wsdl2hed_OBJECTS = $(am_wsdl2hed_OBJECTS) wsdl2hed_DEPENDENCIES = \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) wsdl2hed_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(wsdl2hed_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(arcplugin_SOURCES) $(wsdl2hed_SOURCES) DIST_SOURCES = $(arcplugin_SOURCES) $(wsdl2hed_SOURCES) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } man1dir = $(mandir)/man1 NROFF = nroff MANS = $(man_MANS) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ man_MANS = wsdl2hed.1 arcplugin.1 wsdl2hed_SOURCES = wsdl2hed.cpp schemaconv.cpp schemaconv.h \ common.cpp simpletype.cpp complextype.cpp wsdl2hed_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(AM_CXXFLAGS) wsdl2hed_LDADD = \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(GLIBMM_LIBS) $(LIBXML2_LIBS) arcplugin_SOURCES = arcplugin.cpp arcplugin_CXXFLAGS = -I$(top_srcdir)/include \ $(GLIBMM_CFLAGS) $(LIBXML2_CFLAGS) $(AM_CXXFLAGS) arcplugin_LDADD = \ $(top_builddir)/src/hed/libs/common/libarccommon.la \ $(GLIBMM_LIBS) $(LIBXML2_LIBS) all: all-am .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/utils/hed/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/utils/hed/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): wsdl2hed.1: $(top_builddir)/config.status $(srcdir)/wsdl2hed.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ arcplugin.1: $(top_builddir)/config.status $(srcdir)/arcplugin.1.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ install-binPROGRAMS: $(bin_PROGRAMS) @$(NORMAL_INSTALL) @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(bindir)'"; \ $(MKDIR_P) "$(DESTDIR)$(bindir)" || exit 1; \ fi; \ for p in $$list; do echo "$$p $$p"; done | \ sed 's/$(EXEEXT)$$//' | \ while read p p1; do if test -f $$p \ || test -f $$p1 \ ; then echo "$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n;h' \ -e 's|.*|.|' \ -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \ sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) files[d] = files[d] " " $$1; \ else { print "f", $$3 "/" $$4, $$1; } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(bindir)$$dir'"; \ $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(bindir)$$dir" || exit $$?; \ } \ ; done uninstall-binPROGRAMS: @$(NORMAL_UNINSTALL) @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \ -e 's/$$/$(EXEEXT)/' \ `; \ test -n "$$list" || exit 0; \ echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \ cd "$(DESTDIR)$(bindir)" && rm -f $$files clean-binPROGRAMS: @list='$(bin_PROGRAMS)'; test -n "$$list" || exit 0; \ echo " rm -f" $$list; \ rm -f $$list || exit $$?; \ test -n "$(EXEEXT)" || exit 0; \ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ echo " rm -f" $$list; \ rm -f $$list arcplugin$(EXEEXT): $(arcplugin_OBJECTS) $(arcplugin_DEPENDENCIES) $(EXTRA_arcplugin_DEPENDENCIES) @rm -f arcplugin$(EXEEXT) $(AM_V_CXXLD)$(arcplugin_LINK) $(arcplugin_OBJECTS) $(arcplugin_LDADD) $(LIBS) wsdl2hed$(EXEEXT): $(wsdl2hed_OBJECTS) $(wsdl2hed_DEPENDENCIES) $(EXTRA_wsdl2hed_DEPENDENCIES) @rm -f wsdl2hed$(EXEEXT) $(AM_V_CXXLD)$(wsdl2hed_LINK) $(wsdl2hed_OBJECTS) $(wsdl2hed_LDADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/arcplugin-arcplugin.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/wsdl2hed-common.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/wsdl2hed-complextype.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/wsdl2hed-schemaconv.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/wsdl2hed-simpletype.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/wsdl2hed-wsdl2hed.Po@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< arcplugin-arcplugin.o: arcplugin.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcplugin_CXXFLAGS) $(CXXFLAGS) -MT arcplugin-arcplugin.o -MD -MP -MF $(DEPDIR)/arcplugin-arcplugin.Tpo -c -o arcplugin-arcplugin.o `test -f 'arcplugin.cpp' || echo '$(srcdir)/'`arcplugin.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcplugin-arcplugin.Tpo $(DEPDIR)/arcplugin-arcplugin.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcplugin.cpp' object='arcplugin-arcplugin.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcplugin_CXXFLAGS) $(CXXFLAGS) -c -o arcplugin-arcplugin.o `test -f 'arcplugin.cpp' || echo '$(srcdir)/'`arcplugin.cpp arcplugin-arcplugin.obj: arcplugin.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcplugin_CXXFLAGS) $(CXXFLAGS) -MT arcplugin-arcplugin.obj -MD -MP -MF $(DEPDIR)/arcplugin-arcplugin.Tpo -c -o arcplugin-arcplugin.obj `if test -f 'arcplugin.cpp'; then $(CYGPATH_W) 'arcplugin.cpp'; else $(CYGPATH_W) '$(srcdir)/arcplugin.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/arcplugin-arcplugin.Tpo $(DEPDIR)/arcplugin-arcplugin.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='arcplugin.cpp' object='arcplugin-arcplugin.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(arcplugin_CXXFLAGS) $(CXXFLAGS) -c -o arcplugin-arcplugin.obj `if test -f 'arcplugin.cpp'; then $(CYGPATH_W) 'arcplugin.cpp'; else $(CYGPATH_W) '$(srcdir)/arcplugin.cpp'; fi` wsdl2hed-wsdl2hed.o: wsdl2hed.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -MT wsdl2hed-wsdl2hed.o -MD -MP -MF $(DEPDIR)/wsdl2hed-wsdl2hed.Tpo -c -o wsdl2hed-wsdl2hed.o `test -f 'wsdl2hed.cpp' || echo '$(srcdir)/'`wsdl2hed.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/wsdl2hed-wsdl2hed.Tpo $(DEPDIR)/wsdl2hed-wsdl2hed.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='wsdl2hed.cpp' object='wsdl2hed-wsdl2hed.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -c -o wsdl2hed-wsdl2hed.o `test -f 'wsdl2hed.cpp' || echo '$(srcdir)/'`wsdl2hed.cpp wsdl2hed-wsdl2hed.obj: wsdl2hed.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -MT wsdl2hed-wsdl2hed.obj -MD -MP -MF $(DEPDIR)/wsdl2hed-wsdl2hed.Tpo -c -o wsdl2hed-wsdl2hed.obj `if test -f 'wsdl2hed.cpp'; then $(CYGPATH_W) 'wsdl2hed.cpp'; else $(CYGPATH_W) '$(srcdir)/wsdl2hed.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/wsdl2hed-wsdl2hed.Tpo $(DEPDIR)/wsdl2hed-wsdl2hed.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='wsdl2hed.cpp' object='wsdl2hed-wsdl2hed.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -c -o wsdl2hed-wsdl2hed.obj `if test -f 'wsdl2hed.cpp'; then $(CYGPATH_W) 'wsdl2hed.cpp'; else $(CYGPATH_W) '$(srcdir)/wsdl2hed.cpp'; fi` wsdl2hed-schemaconv.o: schemaconv.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -MT wsdl2hed-schemaconv.o -MD -MP -MF $(DEPDIR)/wsdl2hed-schemaconv.Tpo -c -o wsdl2hed-schemaconv.o `test -f 'schemaconv.cpp' || echo '$(srcdir)/'`schemaconv.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/wsdl2hed-schemaconv.Tpo $(DEPDIR)/wsdl2hed-schemaconv.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='schemaconv.cpp' object='wsdl2hed-schemaconv.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -c -o wsdl2hed-schemaconv.o `test -f 'schemaconv.cpp' || echo '$(srcdir)/'`schemaconv.cpp wsdl2hed-schemaconv.obj: schemaconv.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -MT wsdl2hed-schemaconv.obj -MD -MP -MF $(DEPDIR)/wsdl2hed-schemaconv.Tpo -c -o wsdl2hed-schemaconv.obj `if test -f 'schemaconv.cpp'; then $(CYGPATH_W) 'schemaconv.cpp'; else $(CYGPATH_W) '$(srcdir)/schemaconv.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/wsdl2hed-schemaconv.Tpo $(DEPDIR)/wsdl2hed-schemaconv.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='schemaconv.cpp' object='wsdl2hed-schemaconv.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -c -o wsdl2hed-schemaconv.obj `if test -f 'schemaconv.cpp'; then $(CYGPATH_W) 'schemaconv.cpp'; else $(CYGPATH_W) '$(srcdir)/schemaconv.cpp'; fi` wsdl2hed-common.o: common.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -MT wsdl2hed-common.o -MD -MP -MF $(DEPDIR)/wsdl2hed-common.Tpo -c -o wsdl2hed-common.o `test -f 'common.cpp' || echo '$(srcdir)/'`common.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/wsdl2hed-common.Tpo $(DEPDIR)/wsdl2hed-common.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='common.cpp' object='wsdl2hed-common.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -c -o wsdl2hed-common.o `test -f 'common.cpp' || echo '$(srcdir)/'`common.cpp wsdl2hed-common.obj: common.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -MT wsdl2hed-common.obj -MD -MP -MF $(DEPDIR)/wsdl2hed-common.Tpo -c -o wsdl2hed-common.obj `if test -f 'common.cpp'; then $(CYGPATH_W) 'common.cpp'; else $(CYGPATH_W) '$(srcdir)/common.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/wsdl2hed-common.Tpo $(DEPDIR)/wsdl2hed-common.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='common.cpp' object='wsdl2hed-common.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -c -o wsdl2hed-common.obj `if test -f 'common.cpp'; then $(CYGPATH_W) 'common.cpp'; else $(CYGPATH_W) '$(srcdir)/common.cpp'; fi` wsdl2hed-simpletype.o: simpletype.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -MT wsdl2hed-simpletype.o -MD -MP -MF $(DEPDIR)/wsdl2hed-simpletype.Tpo -c -o wsdl2hed-simpletype.o `test -f 'simpletype.cpp' || echo '$(srcdir)/'`simpletype.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/wsdl2hed-simpletype.Tpo $(DEPDIR)/wsdl2hed-simpletype.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='simpletype.cpp' object='wsdl2hed-simpletype.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -c -o wsdl2hed-simpletype.o `test -f 'simpletype.cpp' || echo '$(srcdir)/'`simpletype.cpp wsdl2hed-simpletype.obj: simpletype.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -MT wsdl2hed-simpletype.obj -MD -MP -MF $(DEPDIR)/wsdl2hed-simpletype.Tpo -c -o wsdl2hed-simpletype.obj `if test -f 'simpletype.cpp'; then $(CYGPATH_W) 'simpletype.cpp'; else $(CYGPATH_W) '$(srcdir)/simpletype.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/wsdl2hed-simpletype.Tpo $(DEPDIR)/wsdl2hed-simpletype.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='simpletype.cpp' object='wsdl2hed-simpletype.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -c -o wsdl2hed-simpletype.obj `if test -f 'simpletype.cpp'; then $(CYGPATH_W) 'simpletype.cpp'; else $(CYGPATH_W) '$(srcdir)/simpletype.cpp'; fi` wsdl2hed-complextype.o: complextype.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -MT wsdl2hed-complextype.o -MD -MP -MF $(DEPDIR)/wsdl2hed-complextype.Tpo -c -o wsdl2hed-complextype.o `test -f 'complextype.cpp' || echo '$(srcdir)/'`complextype.cpp @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/wsdl2hed-complextype.Tpo $(DEPDIR)/wsdl2hed-complextype.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='complextype.cpp' object='wsdl2hed-complextype.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -c -o wsdl2hed-complextype.o `test -f 'complextype.cpp' || echo '$(srcdir)/'`complextype.cpp wsdl2hed-complextype.obj: complextype.cpp @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -MT wsdl2hed-complextype.obj -MD -MP -MF $(DEPDIR)/wsdl2hed-complextype.Tpo -c -o wsdl2hed-complextype.obj `if test -f 'complextype.cpp'; then $(CYGPATH_W) 'complextype.cpp'; else $(CYGPATH_W) '$(srcdir)/complextype.cpp'; fi` @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/wsdl2hed-complextype.Tpo $(DEPDIR)/wsdl2hed-complextype.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='complextype.cpp' object='wsdl2hed-complextype.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(wsdl2hed_CXXFLAGS) $(CXXFLAGS) -c -o wsdl2hed-complextype.obj `if test -f 'complextype.cpp'; then $(CYGPATH_W) 'complextype.cpp'; else $(CYGPATH_W) '$(srcdir)/complextype.cpp'; fi` mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-man1: $(man_MANS) @$(NORMAL_INSTALL) @list1=''; \ list2='$(man_MANS)'; \ test -n "$(man1dir)" \ && test -n "`echo $$list1$$list2`" \ || exit 0; \ echo " $(MKDIR_P) '$(DESTDIR)$(man1dir)'"; \ $(MKDIR_P) "$(DESTDIR)$(man1dir)" || exit 1; \ { for i in $$list1; do echo "$$i"; done; \ if test -n "$$list2"; then \ for i in $$list2; do echo "$$i"; done \ | sed -n '/\.1[a-z]*$$/p'; \ fi; \ } | while read p; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; echo "$$p"; \ done | \ sed -e 'n;s,.*/,,;p;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,' | \ sed 'N;N;s,\n, ,g' | { \ list=; while read file base inst; do \ if test "$$base" = "$$inst"; then list="$$list $$file"; else \ echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man1dir)/$$inst'"; \ $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man1dir)/$$inst" || exit $$?; \ fi; \ done; \ for i in $$list; do echo "$$i"; done | $(am__base_list) | \ while read files; do \ test -z "$$files" || { \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man1dir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(man1dir)" || exit $$?; }; \ done; } uninstall-man1: @$(NORMAL_UNINSTALL) @list=''; test -n "$(man1dir)" || exit 0; \ files=`{ for i in $$list; do echo "$$i"; done; \ l2='$(man_MANS)'; for i in $$l2; do echo "$$i"; done | \ sed -n '/\.1[a-z]*$$/p'; \ } | sed -e 's,.*/,,;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,'`; \ dir='$(DESTDIR)$(man1dir)'; $(am__uninstall_files_from_dir) ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(PROGRAMS) $(MANS) installdirs: for dir in "$(DESTDIR)$(bindir)" "$(DESTDIR)$(man1dir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-binPROGRAMS clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-man install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-binPROGRAMS install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-man1 install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-binPROGRAMS uninstall-man uninstall-man: uninstall-man1 .MAKE: install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-am clean \ clean-binPROGRAMS clean-generic clean-libtool cscopelist-am \ ctags ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-binPROGRAMS \ install-data install-data-am install-dvi install-dvi-am \ install-exec install-exec-am install-html install-html-am \ install-info install-info-am install-man install-man1 \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-compile mostlyclean-generic mostlyclean-libtool \ pdf pdf-am ps ps-am tags tags-am uninstall uninstall-am \ uninstall-binPROGRAMS uninstall-man uninstall-man1 # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/utils/hed/PaxHeaders.30264/arcplugin.cpp0000644000000000000000000000013214152153376022523 xustar000000000000000030 mtime=1638455038.450646651 30 atime=1638455038.517647657 30 ctime=1638455101.146588686 nordugrid-arc-6.14.0/src/utils/hed/arcplugin.cpp0000644000175000002070000002031214152153376022506 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include #include #include #include std::list< std::pair > priorities_map; static uint32_t map_priority(const std::string& str1, const std::string& str2) { for(std::list< std::pair >::iterator p = priorities_map.begin(); p != priorities_map.end(); ++p) { if(p->first.match(str1) || p->first.match(str2)) return p->second; } return ARC_PLUGIN_DEFAULT_PRIORITY; } static std::string encode_for_var(uint32_t v) { return "\"" + Arc::tostring(v) + "\""; } static std::string encode_for_var(const char* str) { std::string stro = "\""; stro += str; std::string::size_type p = 1; for(;;++p) { p = stro.find_first_of("\"\\",p); if(p == std::string::npos) break; stro.insert(p, "\\"); ++p; } stro += "\""; return stro; } static std::string replace_file_suffix(const std::string& path,const std::string& newsuffix) { std::string newpath = path; std::string::size_type name_p = newpath.rfind(G_DIR_SEPARATOR_S); if(name_p == std::string::npos) { name_p = 0; } else { ++name_p; } std::string::size_type suffix_p = newpath.find('.',name_p); if(suffix_p != std::string::npos) { newpath.resize(suffix_p); } newpath += "." + newsuffix; return newpath; } static bool process_module(const std::string& plugin_filename, bool create_apd) { Arc::PluginDescriptor dummy_desc[2]; memset(dummy_desc,0,sizeof(dummy_desc)); dummy_desc[0].name = ""; dummy_desc[0].kind = ""; dummy_desc[0].description = ""; dummy_desc[0].version = 0; dummy_desc[0].instance = (Arc::get_plugin_instance)dummy_desc; std::string descriptor_filename = replace_file_suffix(plugin_filename,"apd"); Glib::ModuleFlags flags = Glib::ModuleFlags(0); flags|=Glib::MODULE_BIND_LAZY; Glib::Module *module = new Glib::Module(plugin_filename,flags); if ((!module) || (!(*module))) { std::cerr << "Failed to load module " << plugin_filename << ": " << Glib::Module::get_last_error() << std::endl; return false; } std::cout << "Loaded module " << plugin_filename << std::endl; std::cout << std::endl; void *ptr = NULL; if(!module->get_symbol(ARC_PLUGINS_TABLE_SYMB,ptr)) { std::cerr << "Module " << plugin_filename << " is not an ARC plugin: " << Glib::Module::get_last_error() << std::endl; if(create_apd) { std::cerr << "Dummy descriptor file will be created to avoid loading this module at all" << std::endl; // This is needed to make rpmlint happy. ptr = dummy_desc; } //delete module; //return -1; }; Arc::PluginDescriptor* desc = (Arc::PluginDescriptor*)ptr; std::ofstream apd; if(create_apd) { apd.open(descriptor_filename.c_str()); if(!apd) { std::cerr << "Failed to create descriptor file " << descriptor_filename << std::endl; return false; }; }; for(;desc;++desc) { if(desc->name == NULL) break; if(desc->kind == NULL) break; if(desc->instance == NULL) break; if(create_apd) { uint32_t priority = map_priority(desc->name, desc->kind); apd << "name=" << encode_for_var(desc->name) << std::endl; apd << "kind=" << encode_for_var(desc->kind) << std::endl; if (desc->description != NULL) { apd << "description=" << encode_for_var(desc->description) << std::endl; } apd << "version=" << encode_for_var(desc->version) << std::endl; apd << "priority=" << encode_for_var(priority) << std::endl; apd << std::endl; // end of description mark } else { std::cout << "name: " << desc->name << std::endl; std::cout << "kind: " << desc->kind << std::endl; if (desc->description != NULL) { std::cout << "description: " << desc->description << std::endl; } std::cout << "version: " << desc->version << std::endl; std::cout << std::endl; }; }; if(create_apd) { apd.close(); std::cout << "Created descriptor " << descriptor_filename << std::endl; }; // We are not unloading module because it may be not suitable // for unloading or it may be library which may fail unloading // after it was loaded with dlopen(). //delete module; return true; } int main(int argc, char **argv) { const std::string modsuffix("." G_MODULE_SUFFIX); bool create_apd = false; bool recursive = false; while (argc > 1) { if (strcmp(argv[1],"-c") == 0) { create_apd = true; --argc; ++argv; } else if(strcmp(argv[1],"-r") == 0) { recursive = true; --argc; ++argv; } else if(strcmp(argv[1],"-p") == 0) { if(argc <= 2) { std::cerr << "Missing option for -p" << std::endl; return -1; } uint32_t priority; std::string option = argv[2]; std::string::size_type comma = option.find(','); if(comma == std::string::npos) { std::cerr << "Missing , in -p option" << std::endl; return -1; } if(!Arc::stringto(option.substr(0,comma),priority)) { std::cerr << "Can't parse prority number " << option.substr(0,comma) << std::endl; return -1; } std::cerr<<"+++ "<(Arc::RegularExpression(option.substr(comma+1)),priority)); --argc; ++argv; --argc; ++argv; } else if (strcmp(argv[1],"-h") == 0) { std::cout << "arcplugin [-c] [-r] [-p priority,regex] [-h] plugin_path [plugin_path [...]]" << std::endl; std::cout << " -c If specified then APD file is created using same name" << std::endl; std::cout << " as ARC plugin with suffix replaced with .apd." << std::endl; std::cout << " -r If specified operation is fully recursive." << std::endl; std::cout << " -p Defines which priority to be assigned for each plugin." << std::endl; std::cout << " Each plugin's kind and name attributes are matched" << std::endl; std::cout << " specified regex. One which matches gets specified" << std::endl; std::cout << " This option can be specified multiple times." << std::endl; std::cout << " -h prints this help and exits." << std::endl; std::cout << " plugin_path is full path to ARC plugin loadable module" << std::endl; std::cout << " file or directory containing such modules." << std::endl; return 0; } else { break; }; }; if (argc < 2) { std::cerr << "Missing arguments" << std::endl; return -1; }; std::list paths; for(int n = 1; n < argc; ++n) paths.push_back(argv[n]); int user_paths = paths.size(); int num = 0; for(std::list::iterator path = paths.begin(); path != paths.end(); ++path) { try { Glib::Dir dir(*path); if((!recursive) && (num >= user_paths)) continue; for (Glib::DirIterator file = dir.begin(); file != dir.end(); file++) { std::string name = *file; if(name == ".") continue; if(name == "..") continue; paths.push_back(Glib::build_filename(*path, name)); } } catch (Glib::FileError&) { if(path->length() <= modsuffix.length()) continue; if(path->substr(path->length()-modsuffix.length()) != modsuffix) continue; process_module(*path, create_apd); } ++num; } //return 0; // Do quick exit to avoid possible problems with module unloading _exit(0); } nordugrid-arc-6.14.0/src/utils/hed/PaxHeaders.30264/common.cpp0000644000000000000000000000013214152153376022027 xustar000000000000000030 mtime=1638455038.450646651 30 atime=1638455038.517647657 30 ctime=1638455101.150588746 nordugrid-arc-6.14.0/src/utils/hed/common.cpp0000644000175000002070000000303614152153376022016 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include "schemaconv.h" using namespace Arc; void strprintf(std::ostream& out,const char* fmt, const std::string& arg1,const std::string& arg2, const std::string& arg3,const std::string& arg4, const std::string& arg5,const std::string& arg6, const std::string& arg7,const std::string& arg8, const std::string& arg9,const std::string& arg10) { char buf[65536]; buf[0]=0; snprintf(buf,sizeof(buf)-1,fmt,arg1.c_str(),arg2.c_str(),arg3.c_str(), arg4.c_str(),arg5.c_str(),arg6.c_str(), arg7.c_str(),arg8.c_str(),arg9.c_str(), arg10.c_str()); buf[sizeof(buf)-1]=0; out< #include // common void strprintf(std::ostream& out,const char* fmt, const std::string& arg1 = "",const std::string& arg2 = "", const std::string& arg3 = "",const std::string& arg4 = "", const std::string& arg5 = "",const std::string& arg6 = "", const std::string& arg7 = "",const std::string& arg8 = "", const std::string& arg9 = "",const std::string& arg10 = ""); void strprintf(std::string& out,const char* fmt, const std::string& arg1 = "",const std::string& arg2 = "", const std::string& arg3 = "",const std::string& arg4 = "", const std::string& arg5 = "",const std::string& arg6 = "", const std::string& arg7 = "",const std::string& arg8 = "", const std::string& arg9 = "",const std::string& arg10 = ""); // simple type void simpletypeprint(Arc::XMLNode stype,const std::string& ns,std::ostream& h_file,std::ostream& cpp_file); void simpletypeprintnamed(const std::string& cppspace,const std::string& ntype,Arc::XMLNode stype,const std::string& ns,std::ostream& h_file,std::ostream& cpp_file); // complex type void complextypeprint(Arc::XMLNode ctype,const std::string& ns,std::ostream& h_file,std::ostream& cpp_file); // entry point bool schemaconv(Arc::XMLNode wsdl,std::ostream& h_file,std::ostream& cpp_file,const std::string& name); nordugrid-arc-6.14.0/src/utils/hed/PaxHeaders.30264/arcplugin.1.in0000644000000000000000000000013214152153376022506 xustar000000000000000030 mtime=1638455038.450646651 30 atime=1638455038.517647657 30 ctime=1638455101.145588671 nordugrid-arc-6.14.0/src/utils/hed/arcplugin.1.in0000644000175000002070000000263714152153376022503 0ustar00mockbuildmock00000000000000.\" -*- nroff -*- .TH ARCPLUGIN 1 "@DATE@" "NorduGrid ARC @VERSION@" "NorduGrid Users Manual" .SH NAME arcplugin \- ARC plugin management utility .SH DESCRIPTION The .B arcplugin command prints description of ARC plugin or creates ARC Plugin Descriptor (APD) file. .SH SYNOPSIS .B arcplugin [-c] [-r] [-p priority,regex] [-h] plugin_path [plugin_path [...]] .SH OPTIONS .IP "\fB\ -h \fR" Prints help message and exits. .IP "\fB\ -c \fR" If specified then APD file is created using same name as ARC plugin with suffix. replaced with .apd. .IP "\fB\ -r \fR" If specified operation is fully recursive. .IP "\fB\ -p \fR" Defines which priority to be assigned for each plugin. Each plugin's kind and name attributes are matched specified regex. One which matches gets specified This option can be specified multiple times. Priority is 32 bit positive integer. Default value is 128. .IP "\fB\ plugin_path \fR" full path to ARC plugin loadable module file or directory containing such modules. In last case operation will recurse once into that directory. For fully recursive operation use -r. .PP .SH REPORTING BUGS Report bugs to http://bugzilla.nordugrid.org .SH COPYRIGHT APACHE LICENSE Version 2.0 .SH AUTHOR ARC software is developed by the NorduGrid Collaboration (http://www.nordugrid.org), please consult the AUTHORS file distributed with ARC. Please report bugs and feature requests to http://bugzilla.nordugrid.org nordugrid-arc-6.14.0/src/utils/hed/PaxHeaders.30264/schemaconv.cpp0000644000000000000000000000013214152153376022665 xustar000000000000000030 mtime=1638455038.450646651 30 atime=1638455038.517647657 30 ctime=1638455101.148588716 nordugrid-arc-6.14.0/src/utils/hed/schemaconv.cpp0000644000175000002070000000333714152153376022660 0ustar00mockbuildmock00000000000000#ifdef HAVE_CONFIG_H #include #endif #include #include #include #include #include "schemaconv.h" using namespace Arc; bool schemaconv(XMLNode wsdl,std::ostream& h_file,std::ostream& cpp_file,const std::string& name) { h_file<<"\ #include \n\ \n\ namespace "< #endif #include #include #include #include #include "schemaconv.h" using namespace Arc; // -------- Simple type ------------ // 1 - class/parent name (C++,XML) static const char* simple_type_pattern_h = "\ class %1$s: public Arc::XMLNode {\n\ public:\n\ static %1$s New(Arc::XMLNode parent);\n\ %1$s(Arc::XMLNode node);\n\ };\n\ \n\ "; // 1 - class/parent name (C++,XML) // 2 - class namespace (XML) static const char* simple_type_pattern_cpp = "\ %1$s %1$s::New(Arc::XMLNode parent) {\n\ Arc::NS ns;\n\ ns[\"ns\"]=\"%2$s\";\n\ %1$s el(parent.NewChild(\"ns:%1$s\",ns));\n\ return el;\n\ }\n\ \n\ %1$s::%1$s(Arc::XMLNode node):Arc::XMLNode(node){\n\ Arc::NS ns;\n\ ns[\"ns\"]=\"%2$s\";\n\ Namespaces(ns);\n\ }\n\ \n\ "; void simpletypeprintnamed(const std::string& /* cppspace */,const std::string& ntype,XMLNode /* stype */,const std::string& ns,std::ostream& h_file,std::ostream& cpp_file) { strprintf(h_file,simple_type_pattern_h,ntype); strprintf(cpp_file,simple_type_pattern_cpp,ntype,ns); } void simpletypeprint(XMLNode stype,const std::string& ns,std::ostream& h_file,std::ostream& cpp_file) { std::string ntype; if(stype.Name() == "simpleType") { ntype = (std::string)(stype.Attribute("name")); h_file<<"//simple type: "< $@ CLEANFILES = paths_dist.py SUBDIRS = utils control DIST_SUBDIRS = utils control nordugrid-arc-6.14.0/src/utils/python/arc/PaxHeaders.30264/Makefile.in0000644000000000000000000000013014152153437023422 xustar000000000000000028 mtime=1638455071.1651382 30 atime=1638455091.706446843 30 ctime=1638455101.215589723 nordugrid-arc-6.14.0/src/utils/python/arc/Makefile.in0000644000175000002070000007452114152153437023422 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/utils/python/arc DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(srcdir)/gen_paths_dist.sh.in $(am__pkgpython_PYTHON_DIST) \ $(top_srcdir)/py-compile ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = gen_paths_dist.sh CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__py_compile = PYTHON=$(PYTHON) $(SHELL) $(py_compile) am__installdirs = "$(DESTDIR)$(pkgpythondir)" \ "$(DESTDIR)$(pkgpythondir)" am__pep3147_tweak = \ sed -e 's|\.py$$||' -e 's|[^/]*$$|__pycache__/&.*.py|' am__pkgpython_PYTHON_DIST = paths.py __init__.py py_compile = $(top_srcdir)/py-compile RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ @PYTHON_SWIG_ENABLED_FALSE@INITPY = __init__.py @PYTHON_SWIG_ENABLED_TRUE@INITPY = pkgpythondir = $(PYTHON_SITE_ARCH)/arc pkgpython_PYTHON = paths.py $(INITPY) nodist_pkgpython_PYTHON = paths_dist.py CLEANFILES = paths_dist.py SUBDIRS = utils control DIST_SUBDIRS = utils control all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/utils/python/arc/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/utils/python/arc/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): gen_paths_dist.sh: $(top_builddir)/config.status $(srcdir)/gen_paths_dist.sh.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-nodist_pkgpythonPYTHON: $(nodist_pkgpython_PYTHON) @$(NORMAL_INSTALL) @list='$(nodist_pkgpython_PYTHON)'; dlist=; list2=; test -n "$(pkgpythondir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(pkgpythondir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pkgpythondir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then b=; else b="$(srcdir)/"; fi; \ if test -f $$b$$p; then \ $(am__strip_dir) \ dlist="$$dlist $$f"; \ list2="$$list2 $$b$$p"; \ else :; fi; \ done; \ for file in $$list2; do echo $$file; done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pkgpythondir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(pkgpythondir)" || exit $$?; \ done || exit $$?; \ if test -n "$$dlist"; then \ $(am__py_compile) --destdir "$(DESTDIR)" \ --basedir "$(pkgpythondir)" $$dlist; \ else :; fi uninstall-nodist_pkgpythonPYTHON: @$(NORMAL_UNINSTALL) @list='$(nodist_pkgpython_PYTHON)'; test -n "$(pkgpythondir)" || list=; \ py_files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ test -n "$$py_files" || exit 0; \ dir='$(DESTDIR)$(pkgpythondir)'; \ pyc_files=`echo "$$py_files" | sed 's|$$|c|'`; \ pyo_files=`echo "$$py_files" | sed 's|$$|o|'`; \ py_files_pep3147=`echo "$$py_files" | $(am__pep3147_tweak)`; \ echo "$$py_files_pep3147";\ pyc_files_pep3147=`echo "$$py_files_pep3147" | sed 's|$$|c|'`; \ pyo_files_pep3147=`echo "$$py_files_pep3147" | sed 's|$$|o|'`; \ st=0; \ for files in \ "$$py_files" \ "$$pyc_files" \ "$$pyo_files" \ "$$pyc_files_pep3147" \ "$$pyo_files_pep3147" \ ; do \ $(am__uninstall_files_from_dir) || st=$$?; \ done; \ exit $$st install-pkgpythonPYTHON: $(pkgpython_PYTHON) @$(NORMAL_INSTALL) @list='$(pkgpython_PYTHON)'; dlist=; list2=; test -n "$(pkgpythondir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(pkgpythondir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pkgpythondir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then b=; else b="$(srcdir)/"; fi; \ if test -f $$b$$p; then \ $(am__strip_dir) \ dlist="$$dlist $$f"; \ list2="$$list2 $$b$$p"; \ else :; fi; \ done; \ for file in $$list2; do echo $$file; done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pkgpythondir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(pkgpythondir)" || exit $$?; \ done || exit $$?; \ if test -n "$$dlist"; then \ $(am__py_compile) --destdir "$(DESTDIR)" \ --basedir "$(pkgpythondir)" $$dlist; \ else :; fi uninstall-pkgpythonPYTHON: @$(NORMAL_UNINSTALL) @list='$(pkgpython_PYTHON)'; test -n "$(pkgpythondir)" || list=; \ py_files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ test -n "$$py_files" || exit 0; \ dir='$(DESTDIR)$(pkgpythondir)'; \ pyc_files=`echo "$$py_files" | sed 's|$$|c|'`; \ pyo_files=`echo "$$py_files" | sed 's|$$|o|'`; \ py_files_pep3147=`echo "$$py_files" | $(am__pep3147_tweak)`; \ echo "$$py_files_pep3147";\ pyc_files_pep3147=`echo "$$py_files_pep3147" | sed 's|$$|c|'`; \ pyo_files_pep3147=`echo "$$py_files_pep3147" | sed 's|$$|o|'`; \ st=0; \ for files in \ "$$py_files" \ "$$pyc_files" \ "$$pyo_files" \ "$$pyc_files_pep3147" \ "$$pyo_files_pep3147" \ ; do \ $(am__uninstall_files_from_dir) || st=$$?; \ done; \ exit $$st # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile installdirs: installdirs-recursive installdirs-am: for dir in "$(DESTDIR)$(pkgpythondir)" "$(DESTDIR)$(pkgpythondir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: -test -z "$(CLEANFILES)" || rm -f $(CLEANFILES) distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-nodist_pkgpythonPYTHON \ install-pkgpythonPYTHON install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-nodist_pkgpythonPYTHON \ uninstall-pkgpythonPYTHON .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool cscopelist-am ctags \ ctags-am distclean distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-nodist_pkgpythonPYTHON install-pdf install-pdf-am \ install-pkgpythonPYTHON install-ps install-ps-am install-strip \ installcheck installcheck-am installdirs installdirs-am \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags tags-am uninstall uninstall-am \ uninstall-nodist_pkgpythonPYTHON uninstall-pkgpythonPYTHON paths_dist.py: gen_paths_dist.sh $(SHELL) $(builddir)/gen_paths_dist.sh > $@ # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/utils/python/arc/PaxHeaders.30264/__init__.py0000644000000000000000000000013214152153376023472 xustar000000000000000030 mtime=1638455038.451646665 30 atime=1638455038.451646665 30 ctime=1638455101.219589783 nordugrid-arc-6.14.0/src/utils/python/arc/__init__.py0000644000175000002070000000000014152153376023445 0ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/utils/python/arc/PaxHeaders.30264/paths.py0000644000000000000000000000013214152153376023052 xustar000000000000000030 mtime=1638455038.453646696 30 atime=1638455038.518647672 30 ctime=1638455101.218589768 nordugrid-arc-6.14.0/src/utils/python/arc/paths.py0000644000175000002070000000125314152153376023040 0ustar00mockbuildmock00000000000000from __future__ import absolute_import import os try: # try to import -generated file from .paths_dist import * except ImportError: # use defaults if 'ARC_LOCATION' in os.environ: ARC_LOCATION = os.environ['ARC_LOCATION'] else: ARC_LOCATION = '/usr' ARC_LIBEXEC_DIR = ARC_LOCATION + '/libexec/arc' ARC_DATA_DIR = ARC_LOCATION + '/share/arc' ARC_LIB_DIR = ARC_LOCATION + '/lib64/arc' ARC_RUN_DIR = '/run/arc' ARC_DOC_DIR = ARC_LOCATION + '/share/doc/nordugrid-arc/' ARC_CONF = '/etc/arc.conf' ARC_VERSION = 'devel' # define ARC_LOCATION to be use by tools like gm-jobs os.environ['ARC_LOCATION'] = ARC_LOCATION nordugrid-arc-6.14.0/src/utils/python/arc/PaxHeaders.30264/utils0000644000000000000000000000013214152153475022444 xustar000000000000000030 mtime=1638455101.242590128 30 atime=1638455103.999631554 30 ctime=1638455101.242590128 nordugrid-arc-6.14.0/src/utils/python/arc/utils/0000755000175000002070000000000014152153475022506 5ustar00mockbuildmock00000000000000nordugrid-arc-6.14.0/src/utils/python/arc/utils/PaxHeaders.30264/Makefile.am0000644000000000000000000000013214152153376024555 xustar000000000000000030 mtime=1638455038.453646696 30 atime=1638455038.518647672 30 ctime=1638455101.240590098 nordugrid-arc-6.14.0/src/utils/python/arc/utils/Makefile.am0000644000175000002070000000014314152153376024540 0ustar00mockbuildmock00000000000000pkgpythondir = $(PYTHON_SITE_ARCH)/arc/utils pkgpython_PYTHON = __init__.py config.py reference.py nordugrid-arc-6.14.0/src/utils/python/arc/utils/PaxHeaders.30264/Makefile.in0000644000000000000000000000013214152153437024564 xustar000000000000000030 mtime=1638455071.271139793 30 atime=1638455091.729447189 30 ctime=1638455101.239590083 nordugrid-arc-6.14.0/src/utils/python/arc/utils/Makefile.in0000644000175000002070000005332314152153437024557 0ustar00mockbuildmock00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/utils/python/arc/utils DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(pkgpython_PYTHON) $(top_srcdir)/py-compile ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = \ $(top_srcdir)/m4/ac_cxx_have_dbdeadlockexception.m4 \ $(top_srcdir)/m4/ac_cxx_have_sstream.m4 \ $(top_srcdir)/m4/ac_cxx_namespaces.m4 \ $(top_srcdir)/m4/arc_api.m4 $(top_srcdir)/m4/arc_paths.m4 \ $(top_srcdir)/m4/fsusage.m4 $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/gpt.m4 $(top_srcdir)/m4/iconv.m4 \ $(top_srcdir)/m4/intlmacosx.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/m4/nls.m4 \ $(top_srcdir)/m4/po.m4 $(top_srcdir)/m4/progtest.m4 \ $(top_srcdir)/VERSION $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__py_compile = PYTHON=$(PYTHON) $(SHELL) $(py_compile) am__installdirs = "$(DESTDIR)$(pkgpythondir)" am__pep3147_tweak = \ sed -e 's|\.py$$||' -e 's|[^/]*$$|__pycache__/&.*.py|' py_compile = $(top_srcdir)/py-compile am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) pkgdatadir = @pkgdatadir@ pkgincludedir = @pkgincludedir@ pkglibdir = @pkglibdir@ pkglibexecdir = @pkglibexecdir@ ACLOCAL = @ACLOCAL@ ALTPYTHON = @ALTPYTHON@ ALTPYTHON_CFLAGS = @ALTPYTHON_CFLAGS@ ALTPYTHON_EXT_SUFFIX = @ALTPYTHON_EXT_SUFFIX@ ALTPYTHON_LIBS = @ALTPYTHON_LIBS@ ALTPYTHON_SITE_ARCH = @ALTPYTHON_SITE_ARCH@ ALTPYTHON_SITE_LIB = @ALTPYTHON_SITE_LIB@ ALTPYTHON_VERSION = @ALTPYTHON_VERSION@ AMTAR = @AMTAR@ AM_CXXFLAGS = @AM_CXXFLAGS@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ ARCCLIENT_CFLAGS = @ARCCLIENT_CFLAGS@ ARCCLIENT_LIBS = @ARCCLIENT_LIBS@ ARCCOMMON_CFLAGS = @ARCCOMMON_CFLAGS@ ARCCOMMON_LIBS = @ARCCOMMON_LIBS@ ARCCREDENTIAL_CFLAGS = @ARCCREDENTIAL_CFLAGS@ ARCCREDENTIAL_LIBS = @ARCCREDENTIAL_LIBS@ ARCDATA_CFLAGS = @ARCDATA_CFLAGS@ ARCDATA_LIBS = @ARCDATA_LIBS@ ARCINFOSYS_CFLAGS = @ARCINFOSYS_CFLAGS@ ARCINFOSYS_LIBS = @ARCINFOSYS_LIBS@ ARCJOB_CFLAGS = @ARCJOB_CFLAGS@ ARCJOB_LIBS = @ARCJOB_LIBS@ ARCLOADER_CFLAGS = @ARCLOADER_CFLAGS@ ARCLOADER_LIBS = @ARCLOADER_LIBS@ ARCMESSAGE_CFLAGS = @ARCMESSAGE_CFLAGS@ ARCMESSAGE_LIBS = @ARCMESSAGE_LIBS@ ARCOTOKENS_CFLAGS = @ARCOTOKENS_CFLAGS@ ARCOTOKENS_LIBS = @ARCOTOKENS_LIBS@ ARCSECURITY_CFLAGS = @ARCSECURITY_CFLAGS@ ARCSECURITY_LIBS = @ARCSECURITY_LIBS@ ARCWSADDRESSING_CFLAGS = @ARCWSADDRESSING_CFLAGS@ ARCWSADDRESSING_LIBS = @ARCWSADDRESSING_LIBS@ ARCWSSECURITY_CFLAGS = @ARCWSSECURITY_CFLAGS@ ARCWSSECURITY_LIBS = @ARCWSSECURITY_LIBS@ ARCXMLSEC_CFLAGS = @ARCXMLSEC_CFLAGS@ ARCXMLSEC_LIBS = @ARCXMLSEC_LIBS@ ARC_VERSION = @ARC_VERSION@ ARC_VERSION_MAJOR = @ARC_VERSION_MAJOR@ ARC_VERSION_MINOR = @ARC_VERSION_MINOR@ ARC_VERSION_NUM = @ARC_VERSION_NUM@ ARC_VERSION_PATCH = @ARC_VERSION_PATCH@ ARGUS_CFLAGS = @ARGUS_CFLAGS@ ARGUS_LIBS = @ARGUS_LIBS@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ BASH_COMPLETION_CFLAGS = @BASH_COMPLETION_CFLAGS@ BASH_COMPLETION_LIBS = @BASH_COMPLETION_LIBS@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CPPUNIT_CFLAGS = @CPPUNIT_CFLAGS@ CPPUNIT_CONFIG = @CPPUNIT_CONFIG@ CPPUNIT_LIBS = @CPPUNIT_LIBS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DATE = @DATE@ DATER = @DATER@ DBCXX_CPPFLAGS = @DBCXX_CPPFLAGS@ DBCXX_LIBS = @DBCXX_LIBS@ DEFAULT_GLOBUS_LOCATION = @DEFAULT_GLOBUS_LOCATION@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DLOPEN_LIBS = @DLOPEN_LIBS@ DOT = @DOT@ DOXYGEN = @DOXYGEN@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GFAL2_CFLAGS = @GFAL2_CFLAGS@ GFAL2_LIBS = @GFAL2_LIBS@ GLIBMM_CFLAGS = @GLIBMM_CFLAGS@ GLIBMM_LIBS = @GLIBMM_LIBS@ GLOBUS_COMMON_CFLAGS = @GLOBUS_COMMON_CFLAGS@ GLOBUS_COMMON_LIBS = @GLOBUS_COMMON_LIBS@ GLOBUS_FTP_CLIENT_CFLAGS = @GLOBUS_FTP_CLIENT_CFLAGS@ GLOBUS_FTP_CLIENT_LIBS = @GLOBUS_FTP_CLIENT_LIBS@ GLOBUS_FTP_CONTROL_CFLAGS = @GLOBUS_FTP_CONTROL_CFLAGS@ GLOBUS_FTP_CONTROL_LIBS = @GLOBUS_FTP_CONTROL_LIBS@ GLOBUS_GSI_CALLBACK_CFLAGS = @GLOBUS_GSI_CALLBACK_CFLAGS@ GLOBUS_GSI_CALLBACK_LIBS = @GLOBUS_GSI_CALLBACK_LIBS@ GLOBUS_GSI_CERT_UTILS_CFLAGS = @GLOBUS_GSI_CERT_UTILS_CFLAGS@ GLOBUS_GSI_CERT_UTILS_LIBS = @GLOBUS_GSI_CERT_UTILS_LIBS@ GLOBUS_GSI_CREDENTIAL_CFLAGS = @GLOBUS_GSI_CREDENTIAL_CFLAGS@ GLOBUS_GSI_CREDENTIAL_LIBS = @GLOBUS_GSI_CREDENTIAL_LIBS@ GLOBUS_GSSAPI_GSI_CFLAGS = @GLOBUS_GSSAPI_GSI_CFLAGS@ GLOBUS_GSSAPI_GSI_LIBS = @GLOBUS_GSSAPI_GSI_LIBS@ GLOBUS_GSS_ASSIST_CFLAGS = @GLOBUS_GSS_ASSIST_CFLAGS@ GLOBUS_GSS_ASSIST_LIBS = @GLOBUS_GSS_ASSIST_LIBS@ GLOBUS_IO_CFLAGS = @GLOBUS_IO_CFLAGS@ GLOBUS_IO_LIBS = @GLOBUS_IO_LIBS@ GLOBUS_MAKEFILE_HEADER = @GLOBUS_MAKEFILE_HEADER@ GLOBUS_OPENSSL_CFLAGS = @GLOBUS_OPENSSL_CFLAGS@ GLOBUS_OPENSSL_LIBS = @GLOBUS_OPENSSL_LIBS@ GLOBUS_OPENSSL_MODULE_CFLAGS = @GLOBUS_OPENSSL_MODULE_CFLAGS@ GLOBUS_OPENSSL_MODULE_LIBS = @GLOBUS_OPENSSL_MODULE_LIBS@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPT_FLAVOR_CONFIGURATION = @GPT_FLAVOR_CONFIGURATION@ GPT_QUERY = @GPT_QUERY@ GREP = @GREP@ GTHREAD_CFLAGS = @GTHREAD_CFLAGS@ GTHREAD_LIBS = @GTHREAD_LIBS@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LCAS_CFLAGS = @LCAS_CFLAGS@ LCAS_LIBS = @LCAS_LIBS@ LCAS_LOCATION = @LCAS_LOCATION@ LCMAPS_CFLAGS = @LCMAPS_CFLAGS@ LCMAPS_LIBS = @LCMAPS_LIBS@ LCMAPS_LOCATION = @LCMAPS_LOCATION@ LD = @LD@ LDAP_LIBS = @LDAP_LIBS@ LDFLAGS = @LDFLAGS@ LDNS_CFLAGS = @LDNS_CFLAGS@ LDNS_CONFIG = @LDNS_CONFIG@ LDNS_LIBS = @LDNS_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBRESOLV = @LIBRESOLV@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIBXML2_CFLAGS = @LIBXML2_CFLAGS@ LIBXML2_LIBS = @LIBXML2_LIBS@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ MYSQL_CFLAGS = @MYSQL_CFLAGS@ MYSQL_LIBS = @MYSQL_LIBS@ NM = @NM@ NMEDIT = @NMEDIT@ NSS_CFLAGS = @NSS_CFLAGS@ NSS_LIBS = @NSS_LIBS@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OPENSSL_1_1_CFLAGS = @OPENSSL_1_1_CFLAGS@ OPENSSL_1_1_LIBS = @OPENSSL_1_1_LIBS@ OPENSSL_CFLAGS = @OPENSSL_CFLAGS@ OPENSSL_LIBS = @OPENSSL_LIBS@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PDFLATEX = @PDFLATEX@ PERL = @PERL@ PERL5LIB_INLINE_PYTHON = @PERL5LIB_INLINE_PYTHON@ PERL_TEST_DIR = @PERL_TEST_DIR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYLINT = @PYLINT@ PYLINT_ARGS = @PYLINT_ARGS@ PYLINT_ARGS_ARGUMENTS_DIFFER = @PYLINT_ARGS_ARGUMENTS_DIFFER@ PYTHON = @PYTHON@ PYTHON_CFLAGS = @PYTHON_CFLAGS@ PYTHON_EXT_SUFFIX = @PYTHON_EXT_SUFFIX@ PYTHON_LIBS = @PYTHON_LIBS@ PYTHON_SITE_ARCH = @PYTHON_SITE_ARCH@ PYTHON_SITE_LIB = @PYTHON_SITE_LIB@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ S3_CPPFLAGS = @S3_CPPFLAGS@ S3_LIBS = @S3_LIBS@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ SPECDATE = @SPECDATE@ SQLITE_CFLAGS = @SQLITE_CFLAGS@ SQLITE_LIBS = @SQLITE_LIBS@ STRIP = @STRIP@ SWIG = @SWIG@ SWIG2 = @SWIG2@ SWIG_PYTHON_NAMING = @SWIG_PYTHON_NAMING@ SYSTEMD_DAEMON_LIBS = @SYSTEMD_DAEMON_LIBS@ TEST_DIR = @TEST_DIR@ TRIAL = @TRIAL@ TWISTD = @TWISTD@ USE_NLS = @USE_NLS@ UUID_LIBS = @UUID_LIBS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ XMLSEC_CFLAGS = @XMLSEC_CFLAGS@ XMLSEC_LIBS = @XMLSEC_LIBS@ XMLSEC_OPENSSL_CFLAGS = @XMLSEC_OPENSSL_CFLAGS@ XMLSEC_OPENSSL_LIBS = @XMLSEC_OPENSSL_LIBS@ XROOTD_CPPFLAGS = @XROOTD_CPPFLAGS@ XROOTD_LIBS = @XROOTD_LIBS@ ZLIB_CFLAGS = @ZLIB_CFLAGS@ ZLIB_LIBS = @ZLIB_LIBS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ arc_location = @arc_location@ baseversion = @baseversion@ bashcompdir = @bashcompdir@ bindir = @bindir@ bindir_rel_to_pkglibexecdir = @bindir_rel_to_pkglibexecdir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ cronddir = @cronddir@ datadir = @datadir@ datarootdir = @datarootdir@ debianversion = @debianversion@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ extpkglibdir = @extpkglibdir@ fedorarelease = @fedorarelease@ fedorasetupopts = @fedorasetupopts@ gnu_time = @gnu_time@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ initddir = @initddir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libsubdir = @libsubdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ monitor_prefix = @monitor_prefix@ nodename = @nodename@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ pkgdatadir_rel_to_pkglibexecdir = @pkgdatadir_rel_to_pkglibexecdir@ pkgdatasubdir = @pkgdatasubdir@ pkglibdir_rel_to_pkglibexecdir = @pkglibdir_rel_to_pkglibexecdir@ pkglibexecsubdir = @pkglibexecsubdir@ pkglibsubdir = @pkglibsubdir@ posix_shell = @posix_shell@ prefix = @prefix@ preversion = @preversion@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sbindir_rel_to_pkglibexecdir = @sbindir_rel_to_pkglibexecdir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp_dir = @tmp_dir@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ unitsdir = @unitsdir@ pkgpythondir = $(PYTHON_SITE_ARCH)/arc/utils pkgpython_PYTHON = __init__.py config.py reference.py all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/utils/python/arc/utils/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/utils/python/arc/utils/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-pkgpythonPYTHON: $(pkgpython_PYTHON) @$(NORMAL_INSTALL) @list='$(pkgpython_PYTHON)'; dlist=; list2=; test -n "$(pkgpythondir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(pkgpythondir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pkgpythondir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then b=; else b="$(srcdir)/"; fi; \ if test -f $$b$$p; then \ $(am__strip_dir) \ dlist="$$dlist $$f"; \ list2="$$list2 $$b$$p"; \ else :; fi; \ done; \ for file in $$list2; do echo $$file; done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pkgpythondir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(pkgpythondir)" || exit $$?; \ done || exit $$?; \ if test -n "$$dlist"; then \ $(am__py_compile) --destdir "$(DESTDIR)" \ --basedir "$(pkgpythondir)" $$dlist; \ else :; fi uninstall-pkgpythonPYTHON: @$(NORMAL_UNINSTALL) @list='$(pkgpython_PYTHON)'; test -n "$(pkgpythondir)" || list=; \ py_files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ test -n "$$py_files" || exit 0; \ dir='$(DESTDIR)$(pkgpythondir)'; \ pyc_files=`echo "$$py_files" | sed 's|$$|c|'`; \ pyo_files=`echo "$$py_files" | sed 's|$$|o|'`; \ py_files_pep3147=`echo "$$py_files" | $(am__pep3147_tweak)`; \ echo "$$py_files_pep3147";\ pyc_files_pep3147=`echo "$$py_files_pep3147" | sed 's|$$|c|'`; \ pyo_files_pep3147=`echo "$$py_files_pep3147" | sed 's|$$|o|'`; \ st=0; \ for files in \ "$$py_files" \ "$$pyc_files" \ "$$pyo_files" \ "$$pyc_files_pep3147" \ "$$pyo_files_pep3147" \ ; do \ $(am__uninstall_files_from_dir) || st=$$?; \ done; \ exit $$st tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile installdirs: for dir in "$(DESTDIR)$(pkgpythondir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-pkgpythonPYTHON install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-pkgpythonPYTHON .MAKE: install-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ cscopelist-am ctags-am distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-pkgpythonPYTHON install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags-am uninstall uninstall-am \ uninstall-pkgpythonPYTHON # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: nordugrid-arc-6.14.0/src/utils/python/arc/utils/PaxHeaders.30264/config.py0000644000000000000000000000013214152153376024340 xustar000000000000000030 mtime=1638455038.453646696 30 atime=1638455038.518647672 30 ctime=1638455101.241590113 nordugrid-arc-6.14.0/src/utils/python/arc/utils/config.py0000644000175000002070000005066514152153376024341 0ustar00mockbuildmock00000000000000import logging import json import re import sys import subprocess from arc.paths import * # init module __logger __logger = logging.getLogger('ARC.Config') # module-wise data structures to store parsed configs __parsed_config = {} __parsed_config_admin_defined = {} __parsed_blocks = [] __default_config = {} __default_blocks = [] # processing constants and regexes __def_path_arcconf = ARC_CONF __def_path_defaults = ARC_DATA_DIR + '/arc.parser.defaults' __def_path_runconf = ARC_RUN_DIR + '/arc.runtime.conf' # defaults parsing constants __no_default = 'undefined' __var_re = re.compile(r'\$VAR\{(?:\[(?P[^\[\]]+)\])?(?P